Compare commits
21 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b69aace8d8 | ||
|
|
79097bb43c | ||
|
|
806fac1742 | ||
|
|
4f97d7cf8d | ||
|
|
42acc457af | ||
|
|
c02920607f | ||
|
|
452885c271 | ||
|
|
5c242a07b6 | ||
|
|
088899d59f | ||
|
|
1faff2a37e | ||
|
|
23c8d3d045 | ||
|
|
a033388d2b | ||
|
|
82fe45ac56 | ||
|
|
bcb7fcda6b | ||
|
|
726a98100b | ||
|
|
2f021a0c2b | ||
|
|
eb05cb6c6e | ||
|
|
7530af95da | ||
|
|
8399e95bda | ||
|
|
3b4dfe326f | ||
|
|
2e787a254e |
@@ -596,12 +596,14 @@ note:
|
||||
* `e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and `e2ts` would then reindex those, unless there is a new copyparty version with new parsers and the release note says otherwise
|
||||
* the rescan button in the admin panel has no effect unless the volume has `-e2ds` or higher
|
||||
|
||||
to save some time, you can choose to only index filename/path/size/last-modified (and not the hash of the file contents) by setting `--no-hash` or the volume-flag `:c,dhash`, this has the following consequences:
|
||||
to save some time, you can provide a regex pattern for filepaths to only index by filename/path/size/last-modified (and not the hash of the file contents) by setting `--no-hash \.iso$` or the volume-flag `:c,nohash=\.iso$`, this has the following consequences:
|
||||
* initial indexing is way faster, especially when the volume is on a network disk
|
||||
* makes it impossible to [file-search](#file-search)
|
||||
* if someone uploads the same file contents, the upload will not be detected as a dupe, so it will not get symlinked or rejected
|
||||
|
||||
if you set `--no-hash`, you can enable hashing for specific volumes using flag `:c,ehash`
|
||||
similarly, you can fully ignore files/folders using `--no-idx [...]` and `:c,noidx=\.iso$`
|
||||
|
||||
if you set `--no-hash [...]` globally, you can enable hashing for specific volumes using flag `:c,nohash=`
|
||||
|
||||
|
||||
## upload rules
|
||||
@@ -851,7 +853,7 @@ below are some tweaks roughly ordered by usefulness:
|
||||
* `-q` disables logging and can help a bunch, even when combined with `-lo` to redirect logs to file
|
||||
* `--http-only` or `--https-only` (unless you want to support both protocols) will reduce the delay before a new connection is established
|
||||
* `--hist` pointing to a fast location (ssd) will make directory listings and searches faster when `-e2d` or `-e2t` is set
|
||||
* `--no-hash` when indexing a network-disk if you don't care about the actual filehashes and only want the names/tags searchable
|
||||
* `--no-hash .` when indexing a network-disk if you don't care about the actual filehashes and only want the names/tags searchable
|
||||
* `-j` enables multiprocessing (actual multithreading) and can make copyparty perform better in cpu-intensive workloads, for example:
|
||||
* huge amount of short-lived connections
|
||||
* really heavy traffic (downloads/uploads)
|
||||
|
||||
97
bin/up2k.py
97
bin/up2k.py
@@ -3,7 +3,7 @@ from __future__ import print_function, unicode_literals
|
||||
|
||||
"""
|
||||
up2k.py: upload to copyparty
|
||||
2021-10-04, v0.7, ed <irc.rizon.net>, MIT-Licensed
|
||||
2021-10-12, v0.9, ed <irc.rizon.net>, MIT-Licensed
|
||||
https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py
|
||||
|
||||
- dependencies: requests
|
||||
@@ -33,11 +33,15 @@ import datetime
|
||||
PY2 = sys.version_info[0] == 2
|
||||
if PY2:
|
||||
from Queue import Queue
|
||||
from urllib import unquote
|
||||
from urllib import quote
|
||||
|
||||
sys.dont_write_bytecode = True
|
||||
bytes = str
|
||||
else:
|
||||
from queue import Queue
|
||||
from urllib.parse import unquote_to_bytes as unquote
|
||||
from urllib.parse import quote_from_bytes as quote
|
||||
|
||||
unicode = str
|
||||
|
||||
@@ -121,17 +125,30 @@ class FileSlice(object):
|
||||
return ret
|
||||
|
||||
|
||||
_print = print
|
||||
|
||||
|
||||
def eprint(*a, **ka):
|
||||
ka["file"] = sys.stderr
|
||||
ka["end"] = ""
|
||||
if not PY2:
|
||||
ka["flush"] = True
|
||||
|
||||
print(*a, **ka)
|
||||
if PY2:
|
||||
_print(*a, **ka)
|
||||
if PY2 or not VT100:
|
||||
sys.stderr.flush()
|
||||
|
||||
|
||||
def flushing_print(*a, **ka):
|
||||
_print(*a, **ka)
|
||||
if "flush" not in ka:
|
||||
sys.stdout.flush()
|
||||
|
||||
|
||||
if not VT100:
|
||||
print = flushing_print
|
||||
|
||||
|
||||
def termsize():
|
||||
import os
|
||||
|
||||
@@ -231,16 +248,29 @@ def walkdir(top):
|
||||
|
||||
def walkdirs(tops):
|
||||
"""recursive statdir for a list of tops, yields [top, relpath, stat]"""
|
||||
sep = "{0}".format(os.sep).encode("ascii")
|
||||
for top in tops:
|
||||
stop = top
|
||||
if top[-1:] == sep:
|
||||
stop = os.path.dirname(top.rstrip(sep))
|
||||
|
||||
if os.path.isdir(top):
|
||||
for ap, inf in walkdir(top):
|
||||
yield top, ap[len(top) + 1 :], inf
|
||||
yield stop, ap[len(stop) :].lstrip(sep), inf
|
||||
else:
|
||||
sep = "{0}".format(os.sep).encode("ascii")
|
||||
d, n = top.rsplit(sep, 1)
|
||||
yield d, n, os.stat(top)
|
||||
|
||||
|
||||
# mostly from copyparty/util.py
|
||||
def quotep(btxt):
|
||||
quot1 = quote(btxt, safe=b"/")
|
||||
if not PY2:
|
||||
quot1 = quot1.encode("ascii")
|
||||
|
||||
return quot1.replace(b" ", b"+")
|
||||
|
||||
|
||||
# from copyparty/util.py
|
||||
def humansize(sz, terse=False):
|
||||
"""picks a sensible unit for the given extent"""
|
||||
@@ -334,7 +364,7 @@ def handshake(req_ses, url, file, pw, search):
|
||||
if file.url:
|
||||
url = file.url
|
||||
elif b"/" in file.rel:
|
||||
url += file.rel.rsplit(b"/", 1)[0].decode("utf-8", "replace")
|
||||
url += quotep(file.rel.rsplit(b"/", 1)[0]).decode("utf-8", "replace")
|
||||
|
||||
while True:
|
||||
try:
|
||||
@@ -403,7 +433,9 @@ class Ctl(object):
|
||||
def __init__(self, ar):
|
||||
self.ar = ar
|
||||
ar.files = [
|
||||
os.path.abspath(os.path.realpath(x.encode("utf-8"))) for x in ar.files
|
||||
os.path.abspath(os.path.realpath(x.encode("utf-8")))
|
||||
+ (x[-1:] if x[-1:] == os.sep else "").encode("utf-8")
|
||||
for x in ar.files
|
||||
]
|
||||
ar.url = ar.url.rstrip("/") + "/"
|
||||
if "://" not in ar.url:
|
||||
@@ -442,13 +474,14 @@ class Ctl(object):
|
||||
print("{0} {1}\n hash...".format(self.nfiles - nf, upath))
|
||||
get_hashlist(file, None)
|
||||
|
||||
burl = self.ar.url[:8] + self.ar.url[8:].split("/")[0] + "/"
|
||||
while True:
|
||||
print(" hs...")
|
||||
hs = handshake(req_ses, self.ar.url, file, self.ar.a, search)
|
||||
if search:
|
||||
if hs:
|
||||
for hit in hs:
|
||||
print(" found: {0}{1}".format(self.ar.url, hit["rp"]))
|
||||
print(" found: {0}{1}".format(burl, hit["rp"]))
|
||||
else:
|
||||
print(" NOT found")
|
||||
break
|
||||
@@ -564,7 +597,36 @@ class Ctl(object):
|
||||
self.st_hash = [file, ofs]
|
||||
|
||||
def hasher(self):
|
||||
prd = None
|
||||
ls = {}
|
||||
for top, rel, inf in self.filegen:
|
||||
if self.ar.z:
|
||||
rd = os.path.dirname(rel)
|
||||
if prd != rd:
|
||||
prd = rd
|
||||
headers = {}
|
||||
if self.ar.a:
|
||||
headers["Cookie"] = "=".join(["cppwd", self.ar.a])
|
||||
|
||||
ls = {}
|
||||
try:
|
||||
print(" ls ~{0}".format(rd.decode("utf-8", "replace")))
|
||||
r = req_ses.get(
|
||||
self.ar.url.encode("utf-8") + quotep(rd) + b"?ls",
|
||||
headers=headers,
|
||||
)
|
||||
for f in r.json()["files"]:
|
||||
rfn = f["href"].split("?")[0].encode("utf-8", "replace")
|
||||
ls[unquote(rfn)] = f
|
||||
except:
|
||||
print(" mkdir ~{0}".format(rd.decode("utf-8", "replace")))
|
||||
|
||||
rf = ls.get(os.path.basename(rel), None)
|
||||
if rf and rf["sz"] == inf.st_size and abs(rf["ts"] - inf.st_mtime) <= 1:
|
||||
self.nfiles -= 1
|
||||
self.nbytes -= inf.st_size
|
||||
continue
|
||||
|
||||
file = File(top, rel, inf.st_size, inf.st_mtime)
|
||||
while True:
|
||||
with self.mutex:
|
||||
@@ -598,6 +660,7 @@ class Ctl(object):
|
||||
def handshaker(self):
|
||||
search = self.ar.s
|
||||
q = self.q_handshake
|
||||
burl = self.ar.url[:8] + self.ar.url[8:].split("/")[0] + "/"
|
||||
while True:
|
||||
file = q.get()
|
||||
if not file:
|
||||
@@ -627,7 +690,7 @@ class Ctl(object):
|
||||
if hs:
|
||||
for hit in hs:
|
||||
m = "found: {0}\n {1}{2}\n"
|
||||
print(m.format(upath, self.ar.url, hit["rp"]), end="")
|
||||
print(m.format(upath, burl, hit["rp"]), end="")
|
||||
else:
|
||||
print("NOT found: {0}\n".format(upath), end="")
|
||||
|
||||
@@ -659,7 +722,8 @@ class Ctl(object):
|
||||
self.handshaker_busy -= 1
|
||||
|
||||
if not hs:
|
||||
print("uploaded {0}".format(upath))
|
||||
kw = "uploaded" if file.up_b else " found"
|
||||
print("{0} {1}".format(kw, upath))
|
||||
for cid in hs:
|
||||
self.q_upload.put([file, cid])
|
||||
|
||||
@@ -696,13 +760,23 @@ class Ctl(object):
|
||||
self.uploader_busy -= 1
|
||||
|
||||
|
||||
class APF(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter):
|
||||
pass
|
||||
|
||||
|
||||
def main():
|
||||
time.strptime("19970815", "%Y%m%d") # python#7980
|
||||
if not VT100:
|
||||
os.system("rem") # enables colors
|
||||
|
||||
# fmt: off
|
||||
ap = app = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
||||
ap = app = argparse.ArgumentParser(formatter_class=APF, epilog="""
|
||||
NOTE:
|
||||
source file/folder selection uses rsync syntax, meaning that:
|
||||
"foo" uploads the entire folder to URL/foo/
|
||||
"foo/" uploads the CONTENTS of the folder into URL/
|
||||
""")
|
||||
|
||||
ap.add_argument("url", type=unicode, help="server url, including destination folder")
|
||||
ap.add_argument("files", type=unicode, nargs="+", help="files and/or folders to process")
|
||||
ap.add_argument("-a", metavar="PASSWORD", help="password")
|
||||
@@ -711,6 +785,7 @@ def main():
|
||||
ap.add_argument("-j", type=int, metavar="THREADS", default=4, help="parallel connections")
|
||||
ap.add_argument("-nh", action="store_true", help="disable hashing while uploading")
|
||||
ap.add_argument("--safe", action="store_true", help="use simple fallback approach")
|
||||
ap.add_argument("-z", action="store_true", help="ZOOMIN' (skip uploading files if they exist at the destination with the ~same last-modified timestamp, so same as yolo / turbo with date-chk but even faster)")
|
||||
ap = app.add_argument_group("tls")
|
||||
ap.add_argument("-te", metavar="PEM_FILE", help="certificate to expect/verify")
|
||||
ap.add_argument("-td", action="store_true", help="disable certificate check")
|
||||
|
||||
@@ -276,7 +276,8 @@ def run_argparse(argv, formatter):
|
||||
\033[36me2d\033[35m sets -e2d (all -e2* args can be set using ce2* volflags)
|
||||
\033[36md2t\033[35m disables metadata collection, overrides -e2t*
|
||||
\033[36md2d\033[35m disables all database stuff, overrides -e2*
|
||||
\033[36mdhash\033[35m disables file hashing on initial scans, also ehash
|
||||
\033[36mnohash=\\.iso$\033[35m skips hashing file contents if path matches *.iso
|
||||
\033[36mnoidx=\\.iso$\033[35m fully ignores the contents at paths matching *.iso
|
||||
\033[36mhist=/tmp/cdb\033[35m puts thumbnails and indexes at that location
|
||||
\033[36mscan=60\033[35m scan for new files every 60sec, same as --re-maxage
|
||||
|
||||
@@ -380,6 +381,10 @@ def run_argparse(argv, formatter):
|
||||
ap2.add_argument("--no-readme", action="store_true", help="disable rendering readme.md into directory listings")
|
||||
ap2.add_argument("--vague-403", action="store_true", help="send 404 instead of 403 (security through ambiguity, very enterprise)")
|
||||
|
||||
ap2 = ap.add_argument_group('yolo options')
|
||||
ap2.add_argument("--ign-ebind", action="store_true", help="continue running even if it's impossible to listen on some of the requested endpoints")
|
||||
ap2.add_argument("--ign-ebind-all", action="store_true", help="continue running even if it's impossible to receive connections at all")
|
||||
|
||||
ap2 = ap.add_argument_group('logging options')
|
||||
ap2.add_argument("-q", action="store_true", help="quiet")
|
||||
ap2.add_argument("-lo", metavar="PATH", type=u, help="logfile, example: cpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz")
|
||||
@@ -412,7 +417,8 @@ def run_argparse(argv, formatter):
|
||||
ap2.add_argument("-e2ds", action="store_true", help="enable up2k db-scanner, sets -e2d")
|
||||
ap2.add_argument("-e2dsa", action="store_true", help="scan all folders (for search), sets -e2ds")
|
||||
ap2.add_argument("--hist", metavar="PATH", type=u, help="where to store volume data (db, thumbs)")
|
||||
ap2.add_argument("--no-hash", action="store_true", help="disable hashing during e2ds folder scans")
|
||||
ap2.add_argument("--no-hash", metavar="PTN", type=u, help="regex: disable hashing of matching paths during e2ds folder scans")
|
||||
ap2.add_argument("--no-idx", metavar="PTN", type=u, help="regex: disable indexing of matching paths during e2ds folder scans")
|
||||
ap2.add_argument("--re-int", metavar="SEC", type=int, default=30, help="disk rescan check interval")
|
||||
ap2.add_argument("--re-maxage", metavar="SEC", type=int, default=0, help="disk rescan volume interval, 0=off, can be set per-volume with the 'scan' volflag")
|
||||
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline")
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
# coding: utf-8
|
||||
|
||||
VERSION = (1, 0, 9)
|
||||
VERSION = (1, 0, 11)
|
||||
CODENAME = "sufficient"
|
||||
BUILD_DT = (2021, 10, 9)
|
||||
BUILD_DT = (2021, 10, 18)
|
||||
|
||||
S_VERSION = ".".join(map(str, VERSION))
|
||||
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
||||
|
||||
@@ -726,6 +726,7 @@ class AuthSrv(object):
|
||||
axs = getattr(vol.axs, axs_key)
|
||||
if usr in axs or "*" in axs:
|
||||
umap[usr].append(mp)
|
||||
umap[usr].sort()
|
||||
setattr(vfs, "a" + perm, umap)
|
||||
|
||||
all_users = {}
|
||||
@@ -865,9 +866,14 @@ class AuthSrv(object):
|
||||
if self.args.e2d or "e2ds" in vol.flags:
|
||||
vol.flags["e2d"] = True
|
||||
|
||||
if self.args.no_hash:
|
||||
if "ehash" not in vol.flags:
|
||||
vol.flags["dhash"] = True
|
||||
for ga, vf in [["no_hash", "nohash"], ["no_idx", "noidx"]]:
|
||||
if vf in vol.flags:
|
||||
ptn = vol.flags.pop(vf)
|
||||
else:
|
||||
ptn = getattr(self.args, ga)
|
||||
|
||||
if ptn:
|
||||
vol.flags[vf] = re.compile(ptn)
|
||||
|
||||
for k in ["e2t", "e2ts", "e2tsr"]:
|
||||
if getattr(self.args, k):
|
||||
|
||||
@@ -25,14 +25,14 @@ def lstat(p):
|
||||
def makedirs(name, mode=0o755, exist_ok=True):
|
||||
bname = fsenc(name)
|
||||
try:
|
||||
os.makedirs(bname, mode=mode)
|
||||
os.makedirs(bname, mode)
|
||||
except:
|
||||
if not exist_ok or not os.path.isdir(bname):
|
||||
raise
|
||||
|
||||
|
||||
def mkdir(p, mode=0o755):
|
||||
return os.mkdir(fsenc(p), mode=mode)
|
||||
return os.mkdir(fsenc(p), mode)
|
||||
|
||||
|
||||
def rename(src, dst):
|
||||
|
||||
@@ -10,7 +10,6 @@ import json
|
||||
import base64
|
||||
import string
|
||||
import socket
|
||||
import ctypes
|
||||
from datetime import datetime
|
||||
from operator import itemgetter
|
||||
import calendar
|
||||
@@ -20,6 +19,11 @@ try:
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
import ctypes
|
||||
except:
|
||||
pass
|
||||
|
||||
from .__init__ import E, PY2, WINDOWS, ANYWIN, unicode
|
||||
from .util import * # noqa # pylint: disable=unused-wildcard-import
|
||||
from .bos import bos
|
||||
@@ -55,7 +59,7 @@ class HttpCli(object):
|
||||
|
||||
self.bufsz = 1024 * 32
|
||||
self.hint = None
|
||||
self.absolute_urls = False
|
||||
self.trailing_slash = True
|
||||
self.out_headers = {
|
||||
"Access-Control-Allow-Origin": "*",
|
||||
"Cache-Control": "no-store; max-age=0",
|
||||
@@ -150,6 +154,8 @@ class HttpCli(object):
|
||||
|
||||
self.log_src = self.conn.set_rproxy(self.ip)
|
||||
|
||||
self.dip = self.ip.replace(":", ".")
|
||||
|
||||
if self.args.ihead:
|
||||
keys = self.args.ihead
|
||||
if "*" in keys:
|
||||
@@ -166,15 +172,11 @@ class HttpCli(object):
|
||||
# split req into vpath + uparam
|
||||
uparam = {}
|
||||
if "?" not in self.req:
|
||||
if not self.req.endswith("/"):
|
||||
self.absolute_urls = True
|
||||
|
||||
self.trailing_slash = self.req.endswith("/")
|
||||
vpath = undot(self.req)
|
||||
else:
|
||||
vpath, arglist = self.req.split("?", 1)
|
||||
if not vpath.endswith("/"):
|
||||
self.absolute_urls = True
|
||||
|
||||
self.trailing_slash = vpath.endswith("/")
|
||||
vpath = undot(vpath)
|
||||
for k in arglist.split("&"):
|
||||
if "=" in k:
|
||||
@@ -468,13 +470,13 @@ class HttpCli(object):
|
||||
except:
|
||||
raise Pebkac(400, "client d/c before 100 continue")
|
||||
|
||||
if "raw" in self.uparam:
|
||||
return self.handle_stash()
|
||||
|
||||
ctype = self.headers.get("content-type", "").lower()
|
||||
if not ctype:
|
||||
raise Pebkac(400, "you can't post without a content-type header")
|
||||
|
||||
if "raw" in self.uparam:
|
||||
return self.handle_stash()
|
||||
|
||||
if "multipart/form-data" in ctype:
|
||||
return self.handle_post_multipart()
|
||||
|
||||
@@ -535,17 +537,16 @@ class HttpCli(object):
|
||||
fdir = os.path.join(vfs.realpath, rem)
|
||||
if lim:
|
||||
fdir, rem = lim.all(self.ip, rem, remains, fdir)
|
||||
|
||||
fn = None
|
||||
if rem and not self.trailing_slash and not bos.path.isdir(fdir):
|
||||
fdir, fn = os.path.split(fdir)
|
||||
rem, _ = vsplit(rem)
|
||||
|
||||
bos.makedirs(fdir)
|
||||
|
||||
addr = self.ip.replace(":", ".")
|
||||
fn = "put-{:.6f}-{}.bin".format(time.time(), addr)
|
||||
path = os.path.join(fdir, fn)
|
||||
if self.args.nw:
|
||||
path = os.devnull
|
||||
|
||||
open_f = open
|
||||
open_a = [fsenc(path), "wb", 512 * 1024]
|
||||
open_ka = {}
|
||||
open_ka = {"fun": open}
|
||||
open_a = ["wb", 512 * 1024]
|
||||
|
||||
# user-request || config-force
|
||||
if ("gz" in vfs.flags or "xz" in vfs.flags) and (
|
||||
@@ -586,16 +587,28 @@ class HttpCli(object):
|
||||
|
||||
self.log("compressing with {} level {}".format(alg, lv.get(alg)))
|
||||
if alg == "gz":
|
||||
open_f = gzip.GzipFile
|
||||
open_a = [fsenc(path), "wb", lv[alg], None, 0x5FEE6600] # 2021-01-01
|
||||
open_ka["fun"] = gzip.GzipFile
|
||||
open_a = ["wb", lv[alg], None, 0x5FEE6600] # 2021-01-01
|
||||
elif alg == "xz":
|
||||
open_f = lzma.open
|
||||
open_a = [fsenc(path), "wb"]
|
||||
open_ka = {"preset": lv[alg]}
|
||||
open_ka = {"fun": lzma.open, "preset": lv[alg]}
|
||||
open_a = ["wb"]
|
||||
else:
|
||||
self.log("fallthrough? thats a bug", 1)
|
||||
|
||||
with open_f(*open_a, **open_ka) as f:
|
||||
suffix = "-{:.6f}-{}".format(time.time(), self.dip)
|
||||
params = {"suffix": suffix, "fdir": fdir}
|
||||
if self.args.nw:
|
||||
params = {}
|
||||
fn = os.devnull
|
||||
|
||||
params.update(open_ka)
|
||||
|
||||
if not fn:
|
||||
fn = "put" + suffix
|
||||
|
||||
with ren_open(fn, *open_a, **params) as f:
|
||||
f, fn = f["orz"]
|
||||
path = os.path.join(fdir, fn)
|
||||
post_sz, _, sha_b64 = hashcopy(reader, f)
|
||||
|
||||
if lim:
|
||||
@@ -1032,7 +1045,7 @@ class HttpCli(object):
|
||||
if not bos.path.isdir(fdir):
|
||||
raise Pebkac(404, "that folder does not exist")
|
||||
|
||||
suffix = ".{:.6f}-{}".format(time.time(), self.ip)
|
||||
suffix = "-{:.6f}-{}".format(time.time(), self.dip)
|
||||
open_args = {"fdir": fdir, "suffix": suffix}
|
||||
else:
|
||||
open_args = {}
|
||||
@@ -1917,11 +1930,14 @@ class HttpCli(object):
|
||||
# some fuses misbehave
|
||||
if not self.args.nid:
|
||||
if WINDOWS:
|
||||
try:
|
||||
bfree = ctypes.c_ulonglong(0)
|
||||
ctypes.windll.kernel32.GetDiskFreeSpaceExW(
|
||||
ctypes.c_wchar_p(abspath), None, None, ctypes.pointer(bfree)
|
||||
)
|
||||
srv_info.append(humansize(bfree.value) + " free")
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
sv = os.statvfs(fsenc(abspath))
|
||||
free = humansize(sv.f_frsize * sv.f_bfree, True)
|
||||
@@ -2065,7 +2081,7 @@ class HttpCli(object):
|
||||
for fn in vfs_ls:
|
||||
base = ""
|
||||
href = fn
|
||||
if not is_ls and self.absolute_urls and vpath:
|
||||
if not is_ls and not self.trailing_slash and vpath:
|
||||
base = "/" + vpath + "/"
|
||||
href = base + fn
|
||||
|
||||
|
||||
@@ -38,6 +38,7 @@ class SvcHub(object):
|
||||
self.stop_req = False
|
||||
self.stopping = False
|
||||
self.stop_cond = threading.Condition()
|
||||
self.retcode = 0
|
||||
self.httpsrv_up = 0
|
||||
|
||||
self.log_mutex = threading.Lock()
|
||||
@@ -59,7 +60,7 @@ class SvcHub(object):
|
||||
if not args.no_fpool and args.j != 1:
|
||||
m = "WARNING: --use-fpool combined with multithreading is untested and can probably cause undefined behavior"
|
||||
if ANYWIN:
|
||||
m = "windows cannot do multithreading without --no-fpool, so enabling that -- note that upload performance will suffer if you have microsoft defender \"real-time protection\" enabled, so you probably want to use -j 1 instead"
|
||||
m = 'windows cannot do multithreading without --no-fpool, so enabling that -- note that upload performance will suffer if you have microsoft defender "real-time protection" enabled, so you probably want to use -j 1 instead'
|
||||
args.no_fpool = True
|
||||
|
||||
self.log("root", m, c=3)
|
||||
@@ -98,14 +99,23 @@ class SvcHub(object):
|
||||
|
||||
def thr_httpsrv_up(self):
|
||||
time.sleep(5)
|
||||
failed = self.broker.num_workers - self.httpsrv_up
|
||||
expected = self.broker.num_workers * self.tcpsrv.nsrv
|
||||
failed = expected - self.httpsrv_up
|
||||
if not failed:
|
||||
return
|
||||
|
||||
m = "{}/{} workers failed to start"
|
||||
m = m.format(failed, self.broker.num_workers)
|
||||
m = m.format(failed, expected)
|
||||
self.log("root", m, 1)
|
||||
os._exit(1)
|
||||
|
||||
if self.args.ign_ebind_all:
|
||||
return
|
||||
|
||||
if self.args.ign_ebind and self.tcpsrv.srv:
|
||||
return
|
||||
|
||||
self.retcode = 1
|
||||
os.kill(os.getpid(), signal.SIGTERM)
|
||||
|
||||
def cb_httpsrv_up(self):
|
||||
self.httpsrv_up += 1
|
||||
@@ -242,7 +252,7 @@ class SvcHub(object):
|
||||
print("waiting for thumbsrv (10sec)...")
|
||||
|
||||
print("nailed it", end="")
|
||||
ret = 0
|
||||
ret = self.retcode
|
||||
finally:
|
||||
print("\033[0m")
|
||||
if self.logf:
|
||||
|
||||
@@ -42,9 +42,21 @@ class TcpSrv(object):
|
||||
self.log("tcpsrv", m)
|
||||
|
||||
self.srv = []
|
||||
self.nsrv = 0
|
||||
for ip in self.args.i:
|
||||
for port in self.args.p:
|
||||
self.srv.append(self._listen(ip, port))
|
||||
self.nsrv += 1
|
||||
try:
|
||||
self._listen(ip, port)
|
||||
except Exception as ex:
|
||||
if self.args.ign_ebind or self.args.ign_ebind_all:
|
||||
m = "could not listen on {}:{}: {}"
|
||||
self.log("tcpsrv", m.format(ip, port, ex), c=1)
|
||||
else:
|
||||
raise
|
||||
|
||||
if not self.srv and not self.args.ign_ebind_all:
|
||||
raise Exception("could not listen on any of the given interfaces")
|
||||
|
||||
def _listen(self, ip, port):
|
||||
srv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
@@ -52,7 +64,7 @@ class TcpSrv(object):
|
||||
srv.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||
try:
|
||||
srv.bind((ip, port))
|
||||
return srv
|
||||
self.srv.append(srv)
|
||||
except (OSError, socket.error) as ex:
|
||||
if ex.errno in [98, 48]:
|
||||
e = "\033[1;31mport {} is busy on interface {}\033[0m".format(port, ip)
|
||||
|
||||
@@ -466,7 +466,8 @@ class Up2k(object):
|
||||
def _build_file_index(self, vol, all_vols):
|
||||
do_vac = False
|
||||
top = vol.realpath
|
||||
nohash = "dhash" in vol.flags
|
||||
rei = vol.flags.get("noidx")
|
||||
reh = vol.flags.get("nohash")
|
||||
with self.mutex:
|
||||
cur, _ = self.register_vpath(top, vol.flags)
|
||||
|
||||
@@ -483,7 +484,7 @@ class Up2k(object):
|
||||
|
||||
n_add = n_rm = 0
|
||||
try:
|
||||
n_add = self._build_dir(dbw, top, set(excl), top, nohash, [])
|
||||
n_add = self._build_dir(dbw, top, set(excl), top, rei, reh, [])
|
||||
n_rm = self._drop_lost(dbw[0], top)
|
||||
except:
|
||||
m = "failed to index volume [{}]:\n{}"
|
||||
@@ -496,7 +497,7 @@ class Up2k(object):
|
||||
|
||||
return True, n_add or n_rm or do_vac
|
||||
|
||||
def _build_dir(self, dbw, top, excl, cdir, nohash, seen):
|
||||
def _build_dir(self, dbw, top, excl, cdir, rei, reh, seen):
|
||||
rcdir = absreal(cdir) # a bit expensive but worth
|
||||
if rcdir in seen:
|
||||
m = "bailing from symlink loop,\n prev: {}\n curr: {}\n from: {}"
|
||||
@@ -511,6 +512,10 @@ class Up2k(object):
|
||||
g = statdir(self.log_func, not self.args.no_scandir, False, cdir)
|
||||
for iname, inf in sorted(g):
|
||||
abspath = os.path.join(cdir, iname)
|
||||
if rei and rei.search(abspath):
|
||||
continue
|
||||
|
||||
nohash = reh.search(abspath) if reh else False
|
||||
lmod = int(inf.st_mtime)
|
||||
sz = inf.st_size
|
||||
if stat.S_ISDIR(inf.st_mode):
|
||||
@@ -518,7 +523,7 @@ class Up2k(object):
|
||||
continue
|
||||
# self.log(" dir: {}".format(abspath))
|
||||
try:
|
||||
ret += self._build_dir(dbw, top, excl, abspath, nohash, seen)
|
||||
ret += self._build_dir(dbw, top, excl, abspath, rei, reh, seen)
|
||||
except:
|
||||
m = "failed to index subdir [{}]:\n{}"
|
||||
self.log(m.format(abspath, min_ex()), c=1)
|
||||
@@ -1313,7 +1318,7 @@ class Up2k(object):
|
||||
|
||||
# TODO broker which avoid this race and
|
||||
# provides a new filename if taken (same as bup)
|
||||
suffix = ".{:.6f}-{}".format(ts, ip)
|
||||
suffix = "-{:.6f}-{}".format(ts, ip.replace(":", "."))
|
||||
with ren_open(fname, "wb", fdir=fdir, suffix=suffix) as f:
|
||||
return f["orz"][1]
|
||||
|
||||
@@ -1513,6 +1518,7 @@ class Up2k(object):
|
||||
try:
|
||||
permsets = [[True, False, False, True]]
|
||||
vn, rem = self.asrv.vfs.get(vpath, uname, *permsets[0])
|
||||
vn, rem = vn.get_dbv(rem)
|
||||
unpost = False
|
||||
except:
|
||||
# unpost with missing permissions? try read+write and verify with db
|
||||
@@ -1522,6 +1528,7 @@ class Up2k(object):
|
||||
unpost = True
|
||||
permsets = [[True, True]]
|
||||
vn, rem = self.asrv.vfs.get(vpath, uname, *permsets[0])
|
||||
vn, rem = vn.get_dbv(rem)
|
||||
_, _, _, _, dip, dat = self._find_from_vpath(vn.realpath, rem)
|
||||
|
||||
m = "you cannot delete this: "
|
||||
@@ -1870,7 +1877,8 @@ class Up2k(object):
|
||||
del self.registry[job["ptop"]][job["wark"]]
|
||||
return
|
||||
|
||||
suffix = ".{:.6f}-{}".format(job["t0"], job["addr"])
|
||||
dip = job["addr"].replace(":", ".")
|
||||
suffix = "-{:.6f}-{}".format(job["t0"], dip)
|
||||
with ren_open(tnam, "wb", fdir=pdir, suffix=suffix) as f:
|
||||
f, job["tnam"] = f["orz"]
|
||||
if (
|
||||
|
||||
@@ -478,11 +478,12 @@ def min_ex():
|
||||
|
||||
@contextlib.contextmanager
|
||||
def ren_open(fname, *args, **kwargs):
|
||||
fun = kwargs.pop("fun", open)
|
||||
fdir = kwargs.pop("fdir", None)
|
||||
suffix = kwargs.pop("suffix", None)
|
||||
|
||||
if fname == os.devnull:
|
||||
with open(fname, *args, **kwargs) as f:
|
||||
with fun(fname, *args, **kwargs) as f:
|
||||
yield {"orz": [f, fname]}
|
||||
return
|
||||
|
||||
@@ -516,7 +517,7 @@ def ren_open(fname, *args, **kwargs):
|
||||
fname += suffix
|
||||
ext += suffix
|
||||
|
||||
with open(fsenc(fpath), *args, **kwargs) as f:
|
||||
with fun(fsenc(fpath), *args, **kwargs) as f:
|
||||
if b64:
|
||||
fp2 = "fn-trunc.{}.txt".format(b64)
|
||||
fp2 = os.path.join(fdir, fp2)
|
||||
@@ -1190,6 +1191,9 @@ def sendfile_kern(lower, upper, f, s):
|
||||
|
||||
|
||||
def statdir(logger, scandir, lstat, top):
|
||||
if lstat and ANYWIN:
|
||||
lstat = False
|
||||
|
||||
if lstat and not os.supports_follow_symlinks:
|
||||
scandir = False
|
||||
|
||||
|
||||
@@ -289,40 +289,6 @@ html.light #ggrid>a.sel {
|
||||
#files tr:focus+tr td {
|
||||
border-top: 1px solid transparent;
|
||||
}
|
||||
#blocked {
|
||||
position: fixed;
|
||||
top: 0;
|
||||
left: 0;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
background: #333;
|
||||
font-size: 2.5em;
|
||||
z-index: 99;
|
||||
}
|
||||
#blk_play,
|
||||
#blk_abrt {
|
||||
position: fixed;
|
||||
display: table;
|
||||
width: 80%;
|
||||
}
|
||||
#blk_play {
|
||||
height: 60%;
|
||||
left: 10%;
|
||||
top: 5%;
|
||||
}
|
||||
#blk_abrt {
|
||||
height: 25%;
|
||||
left: 10%;
|
||||
bottom: 5%;
|
||||
}
|
||||
#blk_play a,
|
||||
#blk_abrt a {
|
||||
display: table-cell;
|
||||
vertical-align: middle;
|
||||
text-align: center;
|
||||
background: #444;
|
||||
border-radius: 2em;
|
||||
}
|
||||
#widget {
|
||||
position: fixed;
|
||||
font-size: 1.4em;
|
||||
@@ -629,6 +595,9 @@ input.eq_gain {
|
||||
margin-top: .5em;
|
||||
padding: 1.3em .3em;
|
||||
}
|
||||
#ico1 {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -871,8 +840,8 @@ html.light #tree.nowrap #treeul a+a:hover {
|
||||
.opwide>div {
|
||||
display: inline-block;
|
||||
vertical-align: top;
|
||||
border-left: .2em solid #4c4c4c;
|
||||
margin-left: .5em;
|
||||
border-left: .4em solid #4c4c4c;
|
||||
margin: .7em 0 .7em .5em;
|
||||
padding-left: .5em;
|
||||
}
|
||||
.opwide>div.fill {
|
||||
@@ -881,6 +850,10 @@ html.light #tree.nowrap #treeul a+a:hover {
|
||||
.opwide>div>div>a {
|
||||
line-height: 2em;
|
||||
}
|
||||
.opwide>div>h3 {
|
||||
margin: 0 .4em;
|
||||
padding: 0;
|
||||
}
|
||||
#op_cfg>div>div>span {
|
||||
display: inline-block;
|
||||
padding: .2em .4em;
|
||||
@@ -1072,7 +1045,8 @@ a.btn,
|
||||
#rui label,
|
||||
#modal-ok,
|
||||
#modal-ng,
|
||||
#ops {
|
||||
#ops,
|
||||
#ico1 {
|
||||
-webkit-user-select: none;
|
||||
-moz-user-select: none;
|
||||
-ms-user-select: none;
|
||||
@@ -1232,14 +1206,6 @@ html.light tr.play a {
|
||||
html.light #files th:hover .cfg {
|
||||
background: #ccc;
|
||||
}
|
||||
html.light #blocked {
|
||||
background: #eee;
|
||||
}
|
||||
html.light #blk_play a,
|
||||
html.light #blk_abrt a {
|
||||
background: #fff;
|
||||
box-shadow: 0 .2em .4em #ddd;
|
||||
}
|
||||
html.light #widget a {
|
||||
color: #06a;
|
||||
}
|
||||
|
||||
@@ -168,6 +168,15 @@ ebi('op_cfg').innerHTML = (
|
||||
' </td>\n' +
|
||||
' </div>\n' +
|
||||
'</div>\n' +
|
||||
'<div>\n' +
|
||||
' <h3>favicon <span id="ico1">🎉</span></h3>\n' +
|
||||
' <div>\n' +
|
||||
' <input type="text" id="icot" style="width:1.3em" value="" tt="favicon text (blank and refresh to disable)" />' +
|
||||
' <input type="text" id="icof" style="width:2em" value="" tt="foreground color" />' +
|
||||
' <input type="text" id="icob" style="width:2em" value="" tt="background color" />' +
|
||||
' </td>\n' +
|
||||
' </div>\n' +
|
||||
'</div>\n' +
|
||||
'<div><h3>key notation</h3><div id="key_notation"></div></div>\n' +
|
||||
'<div class="fill"><h3>hidden columns</h3><div id="hcols"></div></div>'
|
||||
);
|
||||
@@ -1430,12 +1439,7 @@ function play(tid, is_ev, seek, call_depth) {
|
||||
if (!seek) {
|
||||
var o = ebi(oid);
|
||||
o.setAttribute('id', 'thx_js');
|
||||
if (window.history && history.replaceState) {
|
||||
hist_replace(document.location.pathname + '#' + oid);
|
||||
}
|
||||
else {
|
||||
document.location.hash = oid;
|
||||
}
|
||||
sethash(oid);
|
||||
o.setAttribute('id', oid);
|
||||
}
|
||||
|
||||
@@ -1483,44 +1487,14 @@ function evau_error(e) {
|
||||
}
|
||||
|
||||
|
||||
// show a fullscreen message
|
||||
function show_modal(html) {
|
||||
var body = document.body || document.getElementsByTagName('body')[0],
|
||||
div = mknod('div');
|
||||
|
||||
div.setAttribute('id', 'blocked');
|
||||
div.innerHTML = html;
|
||||
unblocked();
|
||||
body.appendChild(div);
|
||||
}
|
||||
|
||||
|
||||
// hide fullscreen message
|
||||
function unblocked(e) {
|
||||
ev(e);
|
||||
var dom = ebi('blocked');
|
||||
if (dom)
|
||||
dom.parentNode.removeChild(dom);
|
||||
}
|
||||
|
||||
|
||||
// show ui to manually start playback of a linked song
|
||||
function autoplay_blocked(seek) {
|
||||
show_modal(
|
||||
'<div id="blk_play"><a href="#" id="blk_go"></a></div>' +
|
||||
'<div id="blk_abrt"><a href="#" id="blk_na">Cancel<br />(show file list)</a></div>');
|
||||
|
||||
var go = ebi('blk_go'),
|
||||
na = ebi('blk_na'),
|
||||
tid = mp.au.tid,
|
||||
var tid = mp.au.tid,
|
||||
fn = mp.tracks[tid].split(/\//).pop();
|
||||
|
||||
fn = uricom_dec(fn.replace(/\+/g, ' '))[0];
|
||||
|
||||
go.textContent = 'Play "' + fn + '"';
|
||||
go.onclick = function (e) {
|
||||
unblocked(e);
|
||||
toast.hide();
|
||||
modal.confirm('<h6>play this audio file?</h6>\n«' + esc(fn) + '»', function () {
|
||||
if (mp.au !== mp.au_ogvjs)
|
||||
// chrome 91 may permanently taint on a failed play()
|
||||
// depending on win10 settings or something? idk
|
||||
@@ -1533,14 +1507,16 @@ function autoplay_blocked(seek) {
|
||||
|
||||
play(tid, true, seek);
|
||||
mp.fade_in();
|
||||
};
|
||||
na.onclick = unblocked;
|
||||
}, null);
|
||||
}
|
||||
|
||||
|
||||
function play_linked() {
|
||||
function eval_hash() {
|
||||
var v = location.hash;
|
||||
if (v && v.indexOf('#af-') === 0) {
|
||||
if (!v)
|
||||
return;
|
||||
|
||||
if (v.indexOf('#af-') === 0) {
|
||||
var id = v.slice(2).split('&');
|
||||
if (id[0].length != 10)
|
||||
return;
|
||||
@@ -1554,6 +1530,13 @@ function play_linked() {
|
||||
|
||||
return play(id[0], false, parseInt(m[1] || 0) * 60 + parseInt(m[2] || 0));
|
||||
}
|
||||
|
||||
if (v.indexOf('#q=') === 0) {
|
||||
goto('search');
|
||||
var i = ebi('q_raw');
|
||||
i.value = uricom_dec(v.slice(3))[0];
|
||||
return i.oninput();
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -2747,28 +2730,28 @@ document.onkeydown = function (e) {
|
||||
(function () {
|
||||
var sconf = [
|
||||
["size",
|
||||
["szl", "sz_min", "minimum MiB", ""],
|
||||
["szu", "sz_max", "maximum MiB", ""]
|
||||
["szl", "sz_min", "minimum MiB", "16"],
|
||||
["szu", "sz_max", "maximum MiB", "16"]
|
||||
],
|
||||
["date",
|
||||
["dtl", "dt_min", "min. iso8601", ""],
|
||||
["dtu", "dt_max", "max. iso8601", ""]
|
||||
["dtl", "dt_min", "min. iso8601", "16"],
|
||||
["dtu", "dt_max", "max. iso8601", "16"]
|
||||
],
|
||||
["path",
|
||||
["path", "path", "path contains (space-separated)", "46"]
|
||||
["path", "path", "path contains (space-separated)", "34"]
|
||||
],
|
||||
["name",
|
||||
["name", "name", "name contains (negate with -nope)", "46"]
|
||||
["name", "name", "name contains (negate with -nope)", "34"]
|
||||
]
|
||||
];
|
||||
var oldcfg = [];
|
||||
|
||||
if (QS('#srch_form.tags')) {
|
||||
sconf.push(["tags",
|
||||
["tags", "tags", "tags contains (^=start, end=$)", "46"]
|
||||
["tags", "tags", "tags contains (^=start, end=$)", "34"]
|
||||
]);
|
||||
sconf.push(["adv.",
|
||||
["adv", "adv", "key>=1A key<=2B .bpm>165", "46"]
|
||||
["adv", "adv", "key>=1A key<=2B .bpm>165", "34"]
|
||||
]);
|
||||
}
|
||||
|
||||
@@ -2785,8 +2768,8 @@ document.onkeydown = function (e) {
|
||||
html.push(
|
||||
'<td colspan="' + csp + '"><input id="' + hn + 'c" type="checkbox">\n' +
|
||||
'<label for="' + hn + 'c">' + sconf[a][b][2] + '</label>\n' +
|
||||
'<br /><input id="' + hn + 'v" type="text" size="' + sconf[a][b][3] +
|
||||
'" name="' + sconf[a][b][1] + '" /></td>');
|
||||
'<br /><input id="' + hn + 'v" type="text" style="width:' + sconf[a][b][3] +
|
||||
'em" name="' + sconf[a][b][1] + '" /></td>');
|
||||
if (csp == 2)
|
||||
break;
|
||||
}
|
||||
@@ -2998,6 +2981,7 @@ document.onkeydown = function (e) {
|
||||
reload_browser();
|
||||
filecols.set_style(['File Name']);
|
||||
|
||||
sethash('q=' + uricom_enc(this.q_raw));
|
||||
ebi('unsearch').onclick = unsearch;
|
||||
}
|
||||
|
||||
@@ -3007,6 +2991,7 @@ document.onkeydown = function (e) {
|
||||
ebi('files').innerHTML = orig_html;
|
||||
ebi('files').removeAttribute('q_raw');
|
||||
orig_html = null;
|
||||
sethash('');
|
||||
reload_browser();
|
||||
}
|
||||
})();
|
||||
@@ -3154,7 +3139,7 @@ var treectl = (function () {
|
||||
|
||||
treectl.goto = function (url, push) {
|
||||
get_tree("", url, true);
|
||||
reqls(url, push);
|
||||
reqls(url, push, true);
|
||||
}
|
||||
|
||||
function get_tree(top, dst, rst) {
|
||||
@@ -3282,7 +3267,7 @@ var treectl = (function () {
|
||||
reqls(this.getAttribute('href'), true);
|
||||
}
|
||||
|
||||
function reqls(url, hpush) {
|
||||
function reqls(url, hpush, no_tree) {
|
||||
var xhr = new XMLHttpRequest();
|
||||
xhr.top = url;
|
||||
xhr.hpush = hpush;
|
||||
@@ -3290,7 +3275,7 @@ var treectl = (function () {
|
||||
xhr.open('GET', xhr.top + '?ls' + (treectl.dots ? '&dots' : ''), true);
|
||||
xhr.onreadystatechange = recvls;
|
||||
xhr.send();
|
||||
if (hpush)
|
||||
if (hpush && !no_tree)
|
||||
get_tree('.', xhr.top);
|
||||
|
||||
enspin(thegrid.en ? '#gfiles' : '#files');
|
||||
@@ -4469,4 +4454,4 @@ function reload_browser(not_mp) {
|
||||
}
|
||||
reload_browser(true);
|
||||
mukey.render();
|
||||
play_linked();
|
||||
setTimeout(eval_hash, 1);
|
||||
|
||||
@@ -1162,6 +1162,11 @@ function up2k_init(subtle) {
|
||||
}
|
||||
}
|
||||
is_busy = st.todo.handshake.length;
|
||||
try {
|
||||
if (!is_busy && !uc.fsearch && !msel.getsel().length && (!mp.au || mp.au.paused))
|
||||
treectl.goto(get_evpath());
|
||||
}
|
||||
catch (ex) { }
|
||||
}
|
||||
|
||||
if (was_busy != is_busy) {
|
||||
@@ -2012,6 +2017,15 @@ function warn_uploader_busy(e) {
|
||||
|
||||
|
||||
tt.init();
|
||||
favico.init();
|
||||
ebi('ico1').onclick = function () {
|
||||
var a = favico.txt == this.textContent;
|
||||
swrite('icot', a ? 'c' : this.textContent);
|
||||
swrite('icof', a ? null : '000');
|
||||
swrite('icob', a ? null : '');
|
||||
favico.init();
|
||||
};
|
||||
|
||||
|
||||
if (QS('#op_up2k.act'))
|
||||
goto_up2k();
|
||||
|
||||
@@ -621,9 +621,9 @@ function icfg_get(name, defval) {
|
||||
}
|
||||
|
||||
function fcfg_get(name, defval) {
|
||||
var o = ebi(name);
|
||||
var o = ebi(name),
|
||||
val = parseFloat(sread(name));
|
||||
|
||||
var val = parseFloat(sread(name));
|
||||
if (isNaN(val))
|
||||
return parseFloat(o ? o.value : defval);
|
||||
|
||||
@@ -633,6 +633,19 @@ function fcfg_get(name, defval) {
|
||||
return val;
|
||||
}
|
||||
|
||||
function scfg_get(name, defval) {
|
||||
var o = ebi(name),
|
||||
val = sread(name);
|
||||
|
||||
if (val === null)
|
||||
val = defval;
|
||||
|
||||
if (o)
|
||||
o.value = val;
|
||||
|
||||
return val;
|
||||
}
|
||||
|
||||
function bcfg_get(name, defval) {
|
||||
var o = ebi(name);
|
||||
if (!o)
|
||||
@@ -684,6 +697,21 @@ function bcfg_bind(obj, oname, cname, defval, cb, un_ev) {
|
||||
return v;
|
||||
}
|
||||
|
||||
function scfg_bind(obj, oname, cname, defval, cb) {
|
||||
var v = scfg_get(cname, defval),
|
||||
el = ebi(cname);
|
||||
|
||||
obj[oname] = v;
|
||||
if (el)
|
||||
el.oninput = function (e) {
|
||||
swrite(cname, obj[oname] = this.value);
|
||||
if (cb)
|
||||
cb(obj[oname]);
|
||||
};
|
||||
|
||||
return v;
|
||||
}
|
||||
|
||||
|
||||
function hist_push(url) {
|
||||
console.log("h-push " + url);
|
||||
@@ -695,6 +723,15 @@ function hist_replace(url) {
|
||||
history.replaceState(url, url, url);
|
||||
}
|
||||
|
||||
function sethash(hv) {
|
||||
if (window.history && history.replaceState) {
|
||||
hist_replace(document.location.pathname + '#' + hv);
|
||||
}
|
||||
else {
|
||||
document.location.hash = hv;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
var timer = (function () {
|
||||
var r = {};
|
||||
@@ -849,16 +886,7 @@ var tt = (function () {
|
||||
}
|
||||
|
||||
r.init = function () {
|
||||
var ttb = ebi('tooltips');
|
||||
if (ttb) {
|
||||
ttb.onclick = function (e) {
|
||||
ev(e);
|
||||
r.en = !r.en;
|
||||
bcfg_set('tooltips', r.en);
|
||||
r.init();
|
||||
};
|
||||
r.en = bcfg_get('tooltips', true)
|
||||
}
|
||||
bcfg_bind(r, 'en', 'tooltips', r.en, r.init);
|
||||
r.att(document);
|
||||
};
|
||||
|
||||
@@ -1181,3 +1209,54 @@ function repl(e) {
|
||||
}
|
||||
if (ebi('repl'))
|
||||
ebi('repl').onclick = repl;
|
||||
|
||||
|
||||
var favico = (function () {
|
||||
var r = {};
|
||||
r.en = true;
|
||||
|
||||
function gx(txt) {
|
||||
return (
|
||||
'<?xml version="1.0" encoding="UTF-8"?>\n' +
|
||||
'<svg version="1.1" viewBox="0 0 64 64" xmlns="http://www.w3.org/2000/svg"><g>\n' +
|
||||
(r.bg ? '<rect width="100%" height="100%" rx="16" fill="#' + r.bg + '" />\n' : '') +
|
||||
'<text x="50%" y="55%" dominant-baseline="middle" text-anchor="middle"' +
|
||||
' font-family="sans-serif" font-weight="bold" font-size="64px"' +
|
||||
' fill="#' + r.fg + '">' + txt + '</text></g></svg>'
|
||||
);
|
||||
}
|
||||
|
||||
r.upd = function () {
|
||||
var i = QS('link[rel="icon"]'), b64;
|
||||
if (!r.txt)
|
||||
return;
|
||||
|
||||
try {
|
||||
b64 = btoa(gx(r.txt));
|
||||
}
|
||||
catch (ex) {
|
||||
b64 = encodeURIComponent(r.txt).replace(/%([0-9A-F]{2})/g,
|
||||
function x(m, v) { return String.fromCharCode('0x' + v); });
|
||||
|
||||
b64 = btoa(gx(unescape(encodeURIComponent(r.txt))));
|
||||
}
|
||||
|
||||
if (!i) {
|
||||
i = mknod('link');
|
||||
i.rel = 'icon';
|
||||
document.head.appendChild(i);
|
||||
}
|
||||
i.href = 'data:image/svg+xml;base64,' + b64;
|
||||
};
|
||||
|
||||
r.init = function () {
|
||||
clearTimeout(r.to);
|
||||
scfg_bind(r, 'txt', 'icot', '', r.upd);
|
||||
scfg_bind(r, 'fg', 'icof', 'fc5', r.upd);
|
||||
scfg_bind(r, 'bg', 'icob', '333', r.upd);
|
||||
r.upd();
|
||||
};
|
||||
|
||||
r.to = setTimeout(r.init, 100);
|
||||
return r;
|
||||
})();
|
||||
|
||||
@@ -48,7 +48,8 @@ class Cfg(Namespace):
|
||||
mte="a",
|
||||
mth="",
|
||||
hist=None,
|
||||
no_hash=False,
|
||||
no_idx=None,
|
||||
no_hash=None,
|
||||
css_browser=None,
|
||||
**{k: False for k in "e2d e2ds e2dsa e2t e2ts e2tsr".split()}
|
||||
)
|
||||
|
||||
@@ -23,7 +23,8 @@ class Cfg(Namespace):
|
||||
"mte": "a",
|
||||
"mth": "",
|
||||
"hist": None,
|
||||
"no_hash": False,
|
||||
"no_idx": None,
|
||||
"no_hash": None,
|
||||
"css_browser": None,
|
||||
"no_voldump": True,
|
||||
"no_logues": False,
|
||||
|
||||
Reference in New Issue
Block a user