Compare commits

...

11 Commits

Author SHA1 Message Date
ed
96ceccd12a v1.2.3 2022-03-24 02:35:53 +01:00
ed
87994fe006 retry failed uploads with backoff 2022-03-24 02:29:59 +01:00
ed
fa12c81a03 zip-download files older than 1980-01-01 2022-03-24 01:31:50 +01:00
ed
344ce63455 basic-browser is implicitly not js 2022-03-21 01:20:47 +01:00
ed
ec4daacf9e v1.2.2 2022-03-20 06:15:57 +01:00
ed
f3e8308718 eh, better as volflags 2022-03-20 05:45:07 +01:00
ed
515ac5d941 show textfile name in document title 2022-03-20 03:40:21 +01:00
ed
954c7e7e50 add option to request noindex from crawlers 2022-03-20 03:23:42 +01:00
ed
67ff57f3a3 add option to disable html folder listings 2022-03-20 02:45:53 +01:00
ed
c10c70c1e5 misc 2022-03-04 21:30:31 +01:00
ed
04592a98d2 include all IPs + link status in server url listing 2022-03-04 21:29:28 +01:00
25 changed files with 160 additions and 41 deletions

View File

@@ -62,6 +62,7 @@ turn your phone or raspi into a portable file server with resumable uploads/down
* [metadata from audio files](#metadata-from-audio-files) - set `-e2t` to index tags on upload * [metadata from audio files](#metadata-from-audio-files) - set `-e2t` to index tags on upload
* [file parser plugins](#file-parser-plugins) - provide custom parsers to index additional tags, also see [./bin/mtag/README.md](./bin/mtag/README.md) * [file parser plugins](#file-parser-plugins) - provide custom parsers to index additional tags, also see [./bin/mtag/README.md](./bin/mtag/README.md)
* [upload events](#upload-events) - trigger a script/program on each upload * [upload events](#upload-events) - trigger a script/program on each upload
* [hiding from google](#hiding-from-google) - tell search engines you dont wanna be indexed
* [complete examples](#complete-examples) * [complete examples](#complete-examples)
* [browser support](#browser-support) - TLDR: yes * [browser support](#browser-support) - TLDR: yes
* [client examples](#client-examples) - interact with copyparty using non-browser clients * [client examples](#client-examples) - interact with copyparty using non-browser clients
@@ -792,6 +793,17 @@ and it will occupy the parsing threads, so fork anything expensive, or if you wa
if this becomes popular maybe there should be a less janky way to do it actually if this becomes popular maybe there should be a less janky way to do it actually
## hiding from google
tell search engines you dont wanna be indexed, either using the good old [robots.txt](https://www.robotstxt.org/robotstxt.html) or through copyparty settings:
* `--no-robots` adds HTTP (`X-Robots-Tag`) and HTML (`<meta>`) headers with `noindex, nofollow` globally
* volume-flag `[...]:c,norobots` does the same thing for that single volume
* volume-flag `[...]:c,robots` ALLOWS search-engine crawling for that volume, even if `--no-robots` is set globally
also, `--force-js` disables the plain HTML folder listing, making things harder to parse for search engines
## complete examples ## complete examples
* read-only music server with bpm and key scanning * read-only music server with bpm and key scanning

View File

@@ -471,6 +471,8 @@ def run_argparse(argv, formatter):
ap2.add_argument("--no-logues", action="store_true", help="disable rendering .prologue/.epilogue.html into directory listings") ap2.add_argument("--no-logues", action="store_true", help="disable rendering .prologue/.epilogue.html into directory listings")
ap2.add_argument("--no-readme", action="store_true", help="disable rendering readme.md into directory listings") ap2.add_argument("--no-readme", action="store_true", help="disable rendering readme.md into directory listings")
ap2.add_argument("--vague-403", action="store_true", help="send 404 instead of 403 (security through ambiguity, very enterprise)") ap2.add_argument("--vague-403", action="store_true", help="send 404 instead of 403 (security through ambiguity, very enterprise)")
ap2.add_argument("--force-js", action="store_true", help="don't send HTML folder listings, force clients to use the embedded json instead")
ap2.add_argument("--no-robots", action="store_true", help="adds http and html headers asking search engines to not index anything")
ap2 = ap.add_argument_group('yolo options') ap2 = ap.add_argument_group('yolo options')
ap2.add_argument("--ign-ebind", action="store_true", help="continue running even if it's impossible to listen on some of the requested endpoints") ap2.add_argument("--ign-ebind", action="store_true", help="continue running even if it's impossible to listen on some of the requested endpoints")
@@ -539,6 +541,7 @@ def run_argparse(argv, formatter):
ap2 = ap.add_argument_group('ui options') ap2 = ap.add_argument_group('ui options')
ap2.add_argument("--js-browser", metavar="L", type=u, help="URL to additional JS to include") ap2.add_argument("--js-browser", metavar="L", type=u, help="URL to additional JS to include")
ap2.add_argument("--css-browser", metavar="L", type=u, help="URL to additional CSS to include") ap2.add_argument("--css-browser", metavar="L", type=u, help="URL to additional CSS to include")
ap2.add_argument("--html-head", metavar="TXT", type=u, default="", help="text to append to the <head> of all HTML pages")
ap2.add_argument("--textfiles", metavar="CSV", type=u, default="txt,nfo,diz,cue,readme", help="file extensions to present as plaintext") ap2.add_argument("--textfiles", metavar="CSV", type=u, default="txt,nfo,diz,cue,readme", help="file extensions to present as plaintext")
ap2.add_argument("--doctitle", metavar="TXT", type=u, default="copyparty", help="title / service-name to show in html documents") ap2.add_argument("--doctitle", metavar="TXT", type=u, default="copyparty", help="title / service-name to show in html documents")

View File

@@ -1,8 +1,8 @@
# coding: utf-8 # coding: utf-8
VERSION = (1, 2, 1) VERSION = (1, 2, 3)
CODENAME = "ftp btw" CODENAME = "ftp btw"
BUILD_DT = (2022, 3, 3) BUILD_DT = (2022, 3, 24)
S_VERSION = ".".join(map(str, VERSION)) S_VERSION = ".".join(map(str, VERSION))
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT) S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)

View File

@@ -14,6 +14,7 @@ from datetime import datetime
from .__init__ import WINDOWS from .__init__ import WINDOWS
from .util import ( from .util import (
IMPLICATIONS, IMPLICATIONS,
META_NOBOTS,
uncyg, uncyg,
undot, undot,
unhumanize, unhumanize,
@@ -861,6 +862,19 @@ class AuthSrv(object):
if use: if use:
vol.lim = lim vol.lim = lim
if self.args.no_robots:
for vol in vfs.all_vols.values():
# volflag "robots" overrides global "norobots", allowing indexing by search engines for this vol
if not vol.flags.get("robots"):
vol.flags["norobots"] = True
for vol in vfs.all_vols.values():
h = [vol.flags.get("html_head", self.args.html_head)]
if vol.flags.get("norobots"):
h.insert(0, META_NOBOTS)
vol.flags["html_head"] = "\n".join([x for x in h if x])
for vol in vfs.all_vols.values(): for vol in vfs.all_vols.values():
fk = vol.flags.get("fk") fk = vol.flags.get("fk")
if fk: if fk:

View File

@@ -65,6 +65,11 @@ class HttpCli(object):
"Access-Control-Allow-Origin": "*", "Access-Control-Allow-Origin": "*",
"Cache-Control": "no-store; max-age=0", "Cache-Control": "no-store; max-age=0",
} }
h = self.args.html_head
if self.args.no_robots:
h = META_NOBOTS + (("\n" + h) if h else "")
self.out_headers["X-Robots-Tag"] = "noindex, nofollow"
self.html_head = h
def log(self, msg, c=0): def log(self, msg, c=0):
ptn = self.asrv.re_pwd ptn = self.asrv.re_pwd
@@ -93,6 +98,7 @@ class HttpCli(object):
if ka: if ka:
ka["ts"] = self.conn.hsrv.cachebuster() ka["ts"] = self.conn.hsrv.cachebuster()
ka["svcname"] = self.args.doctitle ka["svcname"] = self.args.doctitle
ka["html_head"] = self.html_head
return tpl.render(**ka) return tpl.render(**ka)
return tpl return tpl
@@ -1677,13 +1683,15 @@ class HttpCli(object):
boundary = "\roll\tide" boundary = "\roll\tide"
targs = { targs = {
"ts": self.conn.hsrv.cachebuster(),
"svcname": self.args.doctitle,
"html_head": self.html_head,
"edit": "edit" in self.uparam, "edit": "edit" in self.uparam,
"title": html_escape(self.vpath, crlf=True), "title": html_escape(self.vpath, crlf=True),
"lastmod": int(ts_md * 1000), "lastmod": int(ts_md * 1000),
"md_plug": "true" if self.args.emp else "false", "md_plug": "true" if self.args.emp else "false",
"md_chk_rate": self.args.mcr, "md_chk_rate": self.args.mcr,
"md": boundary, "md": boundary,
"ts": self.conn.hsrv.cachebuster(),
"arg_base": arg_base, "arg_base": arg_base,
} }
html = template.render(**targs).encode("utf-8", "replace") html = template.render(**targs).encode("utf-8", "replace")
@@ -2066,6 +2074,12 @@ class HttpCli(object):
): ):
raise Pebkac(403) raise Pebkac(403)
self.html_head = vn.flags.get("html_head", "")
if vn.flags.get("norobots"):
self.out_headers["X-Robots-Tag"] = "noindex, nofollow"
else:
self.out_headers.pop("X-Robots-Tag", None)
is_dir = stat.S_ISDIR(st.st_mode) is_dir = stat.S_ISDIR(st.st_mode)
if self.can_read: if self.can_read:
th_fmt = self.uparam.get("th") th_fmt = self.uparam.get("th")
@@ -2156,11 +2170,12 @@ class HttpCli(object):
url_suf = self.urlq({}, []) url_suf = self.urlq({}, [])
is_ls = "ls" in self.uparam is_ls = "ls" in self.uparam
is_js = self.cookies.get("js") == "y" is_js = self.args.force_js or self.cookies.get("js") == "y"
tpl = "browser" tpl = "browser"
if "b" in self.uparam: if "b" in self.uparam:
tpl = "browser2" tpl = "browser2"
is_js = False
logues = ["", ""] logues = ["", ""]
if not self.args.no_logues: if not self.args.no_logues:

View File

@@ -5,7 +5,7 @@ import tarfile
import threading import threading
from .sutil import errdesc from .sutil import errdesc
from .util import Queue, fsenc from .util import Queue, fsenc, min_ex
from .bos import bos from .bos import bos
@@ -88,8 +88,9 @@ class StreamTar(object):
try: try:
self.ser(f) self.ser(f)
except Exception as ex: except Exception:
errors.append([f["vp"], repr(ex)]) ex = min_ex(5, True).replace("\n", "\n-- ")
errors.append([f["vp"], ex])
if errors: if errors:
self.errf, txt = errdesc(errors) self.errf, txt = errdesc(errors)

View File

@@ -362,7 +362,7 @@ class SvcHub(object):
src = ansi_re.sub("", src) src = ansi_re.sub("", src)
elif c: elif c:
if isinstance(c, int): if isinstance(c, int):
msg = "\033[3{}m{}".format(c, msg) msg = "\033[3{}m{}\033[0m".format(c, msg)
elif "\033" not in c: elif "\033" not in c:
msg = "\033[{}m{}\033[0m".format(c, msg) msg = "\033[{}m{}\033[0m".format(c, msg)
else: else:

View File

@@ -1,13 +1,12 @@
# coding: utf-8 # coding: utf-8
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
import os
import time import time
import zlib import zlib
from datetime import datetime from datetime import datetime
from .sutil import errdesc from .sutil import errdesc
from .util import yieldfile, sanitize_fn, spack, sunpack from .util import yieldfile, sanitize_fn, spack, sunpack, min_ex
from .bos import bos from .bos import bos
@@ -36,7 +35,10 @@ def unixtime2dos(ts):
bd = ((dy - 1980) << 9) + (dm << 5) + dd bd = ((dy - 1980) << 9) + (dm << 5) + dd
bt = (th << 11) + (tm << 5) + ts // 2 bt = (th << 11) + (tm << 5) + ts // 2
return spack(b"<HH", bt, bd) try:
return spack(b"<HH", bt, bd)
except:
return b"\x00\x00\x21\x00"
def gen_fdesc(sz, crc32, z64): def gen_fdesc(sz, crc32, z64):
@@ -244,8 +246,9 @@ class StreamZip(object):
try: try:
for x in self.ser(f): for x in self.ser(f):
yield x yield x
except Exception as ex: except Exception:
errors.append([f["vp"], repr(ex)]) ex = min_ex(5, True).replace("\n", "\n-- ")
errors.append([f["vp"], ex])
if errors: if errors:
errf, txt = errdesc(errors) errf, txt = errdesc(errors)

View File

@@ -57,13 +57,19 @@ class TcpSrv(object):
msgs = [] msgs = []
title_tab = {} title_tab = {}
title_vars = [x[1:] for x in self.args.wintitle.split(" ") if x.startswith("$")] title_vars = [x[1:] for x in self.args.wintitle.split(" ") if x.startswith("$")]
m = "available @ http://{}:{}/ (\033[33m{}\033[0m)" m = "available @ {}://{}:{}/ (\033[33m{}\033[0m)"
for ip, desc in sorted(eps.items(), key=lambda x: x[1]): for ip, desc in sorted(eps.items(), key=lambda x: x[1]):
for port in sorted(self.args.p): for port in sorted(self.args.p):
if port not in ok.get(ip, ok.get("0.0.0.0", [])): if port not in ok.get(ip, ok.get("0.0.0.0", [])):
continue continue
msgs.append(m.format(ip, port, desc)) proto = " http"
if self.args.http_only:
pass
elif self.args.https_only or port == 443:
proto = "https"
msgs.append(m.format(proto, ip, port, desc))
if not self.args.wintitle: if not self.args.wintitle:
continue continue
@@ -144,10 +150,15 @@ class TcpSrv(object):
return eps return eps
r = re.compile(r"^\s+inet ([^ ]+)/.* (.*)") r = re.compile(r"^\s+inet ([^ ]+)/.* (.*)")
ri = re.compile(r"^\s*[0-9]+\s*:.*")
up = False
for ln in txt.split("\n"): for ln in txt.split("\n"):
if ri.match(ln):
up = "UP" in re.split("[>,< ]", ln)
try: try:
ip, dev = r.match(ln.rstrip()).groups() ip, dev = r.match(ln.rstrip()).groups()
eps[ip] = dev eps[ip] = dev + ("" if up else ", \033[31mLINK-DOWN")
except: except:
pass pass
@@ -177,6 +188,7 @@ class TcpSrv(object):
def ips_windows_ipconfig(self): def ips_windows_ipconfig(self):
eps = {} eps = {}
offs = {}
try: try:
txt, _ = chkcmd(["ipconfig"]) txt, _ = chkcmd(["ipconfig"])
except: except:
@@ -184,18 +196,29 @@ class TcpSrv(object):
rdev = re.compile(r"(^[^ ].*):$") rdev = re.compile(r"(^[^ ].*):$")
rip = re.compile(r"^ +IPv?4? [^:]+: *([0-9\.]{7,15})$") rip = re.compile(r"^ +IPv?4? [^:]+: *([0-9\.]{7,15})$")
roff = re.compile(r".*: Media disconnected$")
dev = None dev = None
for ln in txt.replace("\r", "").split("\n"): for ln in txt.replace("\r", "").split("\n"):
m = rdev.match(ln) m = rdev.match(ln)
if m: if m:
if dev and dev not in eps.values():
offs[dev] = 1
dev = m.group(1).split(" adapter ", 1)[-1] dev = m.group(1).split(" adapter ", 1)[-1]
if dev and roff.match(ln):
offs[dev] = 1
dev = None
m = rip.match(ln) m = rip.match(ln)
if m and dev: if m and dev:
eps[m.group(1)] = dev eps[m.group(1)] = dev
dev = None dev = None
return eps if dev and dev not in eps.values():
offs[dev] = 1
return eps, offs
def ips_windows_netsh(self): def ips_windows_netsh(self):
eps = {} eps = {}
@@ -215,7 +238,6 @@ class TcpSrv(object):
m = rip.match(ln) m = rip.match(ln)
if m and dev: if m and dev:
eps[m.group(1)] = dev eps[m.group(1)] = dev
dev = None
return eps return eps
@@ -223,8 +245,11 @@ class TcpSrv(object):
if MACOS: if MACOS:
eps = self.ips_macos() eps = self.ips_macos()
elif ANYWIN: elif ANYWIN:
eps = self.ips_windows_ipconfig() # sees more interfaces eps, off = self.ips_windows_ipconfig() # sees more interfaces + link state
eps.update(self.ips_windows_netsh()) # has better names eps.update(self.ips_windows_netsh()) # has better names
for k, v in eps.items():
if v in off:
eps[k] += ", \033[31mLINK-DOWN"
else: else:
eps = self.ips_linux() eps = self.ips_linux()

View File

@@ -1137,9 +1137,9 @@ class Up2k(object):
m = "database is version {}, this copyparty only supports versions <= {}" m = "database is version {}, this copyparty only supports versions <= {}"
raise Exception(m.format(ver, DB_VER)) raise Exception(m.format(ver, DB_VER))
msg = "creating new DB (old is bad); backup: {}" msg = "creating new DB (old is bad); backup: "
if ver: if ver:
msg = "creating new DB (too old to upgrade); backup: {}" msg = "creating new DB (too old to upgrade); backup: "
cur = self._backup_db(db_path, cur, ver, msg) cur = self._backup_db(db_path, cur, ver, msg)
db = cur.connection db = cur.connection

View File

@@ -71,6 +71,8 @@ SYMTIME = sys.version_info >= (3, 6) and os.supports_follow_symlinks
HTTP_TS_FMT = "%a, %d %b %Y %H:%M:%S GMT" HTTP_TS_FMT = "%a, %d %b %Y %H:%M:%S GMT"
META_NOBOTS = '<meta name="robots" content="noindex, nofollow">'
HTTPCODE = { HTTPCODE = {
200: "OK", 200: "OK",
204: "No Content", 204: "No Content",
@@ -483,13 +485,13 @@ def vol_san(vols, txt):
return txt return txt
def min_ex(): def min_ex(max_lines=8, reverse=False):
et, ev, tb = sys.exc_info() et, ev, tb = sys.exc_info()
tb = traceback.extract_tb(tb) tb = traceback.extract_tb(tb)
fmt = "{} @ {} <{}>: {}" fmt = "{} @ {} <{}>: {}"
ex = [fmt.format(fp.split(os.sep)[-1], ln, fun, txt) for fp, ln, fun, txt in tb] ex = [fmt.format(fp.split(os.sep)[-1], ln, fun, txt) for fp, ln, fun, txt in tb]
ex.append("[{}] {}".format(et.__name__, ev)) ex.append("[{}] {}".format(et.__name__, ev))
return "\n".join(ex[-8:]) return "\n".join(ex[-max_lines:][:: -1 if reverse else 1])
@contextlib.contextmanager @contextlib.contextmanager

View File

@@ -37,7 +37,7 @@ pre, code, tt, #doc, #doc>code {
display: inline-block; display: inline-block;
padding: .35em .5em .2em .5em; padding: .35em .5em .2em .5em;
border-radius: 0 .3em .3em 0; border-radius: 0 .3em .3em 0;
margin: 1.3em 0 0 0; margin: 1.3em 0 -.2em 0;
font-size: 1.4em; font-size: 1.4em;
} }
#path #entree { #path #entree {

View File

@@ -6,6 +6,7 @@
<title>⇆🎉 {{ title }}</title> <title>⇆🎉 {{ title }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.8"> <meta name="viewport" content="width=device-width, initial-scale=0.8">
{{ html_head }}
<link rel="stylesheet" media="screen" href="/.cpr/ui.css?_={{ ts }}"> <link rel="stylesheet" media="screen" href="/.cpr/ui.css?_={{ ts }}">
<link rel="stylesheet" media="screen" href="/.cpr/browser.css?_={{ ts }}"> <link rel="stylesheet" media="screen" href="/.cpr/browser.css?_={{ ts }}">
{%- if css %} {%- if css %}

View File

@@ -2426,6 +2426,7 @@ var showfile = (function () {
lnh = doc[1], lnh = doc[1],
txt = doc[2], txt = doc[2],
name = url.split('/').pop(), name = url.split('/').pop(),
tname = uricom_dec(name)[0],
lang = r.getlang(name), lang = r.getlang(name),
is_md = lang == 'md'; is_md = lang == 'md';
@@ -2472,13 +2473,14 @@ var showfile = (function () {
wr.style.display = ''; wr.style.display = '';
set_tabindex(); set_tabindex();
wintitle(tname + ' \u2014 ');
document.documentElement.scrollTop = 0; document.documentElement.scrollTop = 0;
var hfun = no_push ? hist_replace : hist_push; var hfun = no_push ? hist_replace : hist_push;
hfun(get_evpath() + '?doc=' + url.split('/').pop()); hfun(get_evpath() + '?doc=' + url.split('/').pop());
qsr('#docname'); qsr('#docname');
el = mknod('span'); el = mknod('span');
el.textContent = uricom_dec(name)[0]; el.textContent = tname;
el.setAttribute('id', 'docname'); el.setAttribute('id', 'docname');
ebi('path').appendChild(el); ebi('path').appendChild(el);
@@ -2613,6 +2615,7 @@ var thegrid = (function () {
return; return;
hist_push(get_evpath()); hist_push(get_evpath());
wintitle();
} }
var vis = has(perms, "read"); var vis = has(perms, "read");

View File

@@ -6,6 +6,7 @@
<title>{{ title }}</title> <title>{{ title }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.8"> <meta name="viewport" content="width=device-width, initial-scale=0.8">
{{ html_head }}
<style> <style>
html{font-family:sans-serif} html{font-family:sans-serif}
td{border:1px solid #999;border-width:1px 1px 0 0;padding:0 5px} td{border:1px solid #999;border-width:1px 1px 0 0;padding:0 5px}

View File

@@ -3,6 +3,7 @@
<title>📝🎉 {{ title }}</title> <title>📝🎉 {{ title }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.7"> <meta name="viewport" content="width=device-width, initial-scale=0.7">
{{ html_head }}
<link rel="stylesheet" href="/.cpr/ui.css?_={{ ts }}"> <link rel="stylesheet" href="/.cpr/ui.css?_={{ ts }}">
<link rel="stylesheet" href="/.cpr/md.css?_={{ ts }}"> <link rel="stylesheet" href="/.cpr/md.css?_={{ ts }}">
{%- if edit %} {%- if edit %}

View File

@@ -3,6 +3,7 @@
<title>📝🎉 {{ title }}</title> <title>📝🎉 {{ title }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.7"> <meta name="viewport" content="width=device-width, initial-scale=0.7">
{{ html_head }}
<link rel="stylesheet" href="/.cpr/ui.css?_={{ ts }}"> <link rel="stylesheet" href="/.cpr/ui.css?_={{ ts }}">
<link rel="stylesheet" href="/.cpr/mde.css?_={{ ts }}"> <link rel="stylesheet" href="/.cpr/mde.css?_={{ ts }}">
<link rel="stylesheet" href="/.cpr/deps/mini-fa.css?_={{ ts }}"> <link rel="stylesheet" href="/.cpr/deps/mini-fa.css?_={{ ts }}">

View File

@@ -6,6 +6,7 @@
<title>{{ svcname }}</title> <title>{{ svcname }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.8"> <meta name="viewport" content="width=device-width, initial-scale=0.8">
{{ html_head }}
<link rel="stylesheet" media="screen" href="/.cpr/msg.css?_={{ ts }}"> <link rel="stylesheet" media="screen" href="/.cpr/msg.css?_={{ ts }}">
</head> </head>

View File

@@ -6,6 +6,7 @@
<title>{{ svcname }}</title> <title>{{ svcname }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.8"> <meta name="viewport" content="width=device-width, initial-scale=0.8">
{{ html_head }}
<link rel="stylesheet" media="screen" href="/.cpr/splash.css?_={{ ts }}"> <link rel="stylesheet" media="screen" href="/.cpr/splash.css?_={{ ts }}">
<link rel="stylesheet" media="screen" href="/.cpr/ui.css?_={{ ts }}"> <link rel="stylesheet" media="screen" href="/.cpr/ui.css?_={{ ts }}">
</head> </head>

View File

@@ -1173,7 +1173,7 @@ function up2k_init(subtle) {
var t = st.todo.handshake[0], var t = st.todo.handshake[0],
cd = t.cooldown; cd = t.cooldown;
if (cd && cd - Date.now() > 0) if (cd && cd > Date.now())
return false; return false;
// keepalive or verify // keepalive or verify
@@ -1370,6 +1370,14 @@ function up2k_init(subtle) {
return taskerd; return taskerd;
})(); })();
function chill(t) {
var now = Date.now();
if ((t.coolmul || 0) < 2 || now - t.cooldown < t.coolmul * 700)
t.coolmul = Math.min((t.coolmul || 0.5) * 2, 32);
t.cooldown = Math.max(t.cooldown || 1, Date.now() + t.coolmul * 1000);
}
///// /////
//// ////
/// hashing /// hashing
@@ -1756,8 +1764,12 @@ function up2k_init(subtle) {
pvis.move(t.n, 'ok'); pvis.move(t.n, 'ok');
} }
else t.t_uploaded = undefined; else {
if (t.t_uploaded)
chill(t);
t.t_uploaded = undefined;
}
tasker(); tasker();
} }
else { else {
@@ -1869,7 +1881,8 @@ function up2k_init(subtle) {
else { else {
toast.err(0, "server broke; cu-err {0} on file [{1}]:\n".format( toast.err(0, "server broke; cu-err {0} on file [{1}]:\n".format(
xhr.status, t.name) + (txt || "no further information")); xhr.status, t.name) + (txt || "no further information"));
return;
chill(t);
} }
orz2(xhr); orz2(xhr);
} }

View File

@@ -3,6 +3,12 @@ echo not a script
exit 1 exit 1
##
## add index.html banners
find -name index.html | sed -r 's/index.html$//' | while IFS= read -r dir; do f="$dir/.prologue.html"; [ -e "$f" ] || echo '<h1><a href="index.html">open index.html</a></h1>' >"$f"; done
## ##
## delete all partial uploads ## delete all partial uploads
## (supports linux/macos, probably windows+msys2) ## (supports linux/macos, probably windows+msys2)
@@ -95,6 +101,7 @@ var t=[]; var b=document.location.href.split('#')[0].slice(0, -1); document.quer
# debug md-editor line tracking # debug md-editor line tracking
var s=mknod('style');s.innerHTML='*[data-ln]:before {content:attr(data-ln)!important;color:#f0c;background:#000;position:absolute;left:-1.5em;font-size:1rem}';document.head.appendChild(s); var s=mknod('style');s.innerHTML='*[data-ln]:before {content:attr(data-ln)!important;color:#f0c;background:#000;position:absolute;left:-1.5em;font-size:1rem}';document.head.appendChild(s);
## ##
## bash oneliners ## bash oneliners
@@ -199,6 +206,7 @@ git remote add all git@github.com:9001/copyparty.git
git remote set-url --add --push all git@gitlab.com:9001/copyparty.git git remote set-url --add --push all git@gitlab.com:9001/copyparty.git
git remote set-url --add --push all git@github.com:9001/copyparty.git git remote set-url --add --push all git@github.com:9001/copyparty.git
## ##
## http 206 ## http 206

View File

@@ -266,7 +266,7 @@ rm have
find | grep -E '\.py$' | find | grep -E '\.py$' |
grep -vE '__version__' | grep -vE '__version__' |
tr '\n' '\0' | tr '\n' '\0' |
xargs -0 $pybin ../scripts/uncomment.py xargs -0 "$pybin" ../scripts/uncomment.py
f=dep-j2/jinja2/constants.py f=dep-j2/jinja2/constants.py
awk '/^LOREM_IPSUM_WORDS/{o=1;print "LOREM_IPSUM_WORDS = u\"a\"";next} !o; /"""/{o=0}' <$f >t awk '/^LOREM_IPSUM_WORDS/{o=1;print "LOREM_IPSUM_WORDS = u\"a\"";next} !o; /"""/{o=0}' <$f >t
@@ -356,7 +356,14 @@ for d in copyparty dep-j2 dep-ftp; do find $d -type f; done |
sed -r 's/(.*)\.(.*)/\2 \1/' | LC_ALL=C sort | sed -r 's/(.*)\.(.*)/\2 \1/' | LC_ALL=C sort |
sed -r 's/([^ ]*) (.*)/\2.\1/' | grep -vE '/list1?$' > list1 sed -r 's/([^ ]*) (.*)/\2.\1/' | grep -vE '/list1?$' > list1
(grep -vE '\.(gz|br)$' list1; grep -E '\.(gz|br)$' list1 | shuf) >list || true for n in {1..50}; do
(grep -vE '\.(gz|br)$' list1; grep -E '\.(gz|br)$' list1 | shuf) >list || true
s=$(md5sum list | cut -c-16)
grep -q $s "$zdir/h" && continue
echo $s >> "$zdir/h"
break
done
[ $n -eq 50 ] && exit
echo creating tar echo creating tar
args=(--owner=1000 --group=1000) args=(--owner=1000 --group=1000)
@@ -386,7 +393,7 @@ suf=
suf=-gz suf=-gz
} }
$pybin $py --sfx-make tar.bz2 $ver $ts "$pybin" $py --sfx-make tar.bz2 $ver $ts
mv sfx.out $sfx_out$suf.py mv sfx.out $sfx_out$suf.py
exts+=($suf.py) exts+=($suf.py)

View File

@@ -4,19 +4,22 @@ set -e
cd ~/dev/copyparty/scripts cd ~/dev/copyparty/scripts
v=$1 v=$1
printf '%s\n' "$v" | grep -qE '^[0-9\.]+$' || exit 1
grep -E "(${v//./, })" ../copyparty/__version__.py || exit 1
git push all [ "$v" = sfx ] || {
git tag v$v printf '%s\n' "$v" | grep -qE '^[0-9\.]+$' || exit 1
git push all --tags grep -E "(${v//./, })" ../copyparty/__version__.py || exit 1
rm -rf ../dist git push all
git tag v$v
git push all --tags
./make-pypi-release.sh u rm -rf ../dist
(cd .. && python3 ./setup.py clean2)
./make-tgz-release.sh $v ./make-pypi-release.sh u
(cd .. && python3 ./setup.py clean2)
./make-tgz-release.sh $v
}
rm -f ../dist/copyparty-sfx.* rm -f ../dist/copyparty-sfx.*
f=../dist/copyparty-sfx.py f=../dist/copyparty-sfx.py

View File

@@ -52,9 +52,12 @@ class Cfg(Namespace):
mth="", mth="",
textfiles="", textfiles="",
doctitle="", doctitle="",
html_head="",
hist=None, hist=None,
no_idx=None, no_idx=None,
no_hash=None, no_hash=None,
force_js=False,
no_robots=False,
js_browser=None, js_browser=None,
css_browser=None, css_browser=None,
**{k: False for k in "e2d e2ds e2dsa e2t e2ts e2tsr no_acode".split()} **{k: False for k in "e2d e2ds e2dsa e2t e2ts e2tsr no_acode".split()}

View File

@@ -17,13 +17,14 @@ from copyparty import util
class Cfg(Namespace): class Cfg(Namespace):
def __init__(self, a=None, v=None, c=None): def __init__(self, a=None, v=None, c=None):
ex = "nw e2d e2ds e2dsa e2t e2ts e2tsr no_logues no_readme no_acode" ex = "nw e2d e2ds e2dsa e2t e2ts e2tsr no_logues no_readme no_acode force_js no_robots"
ex = {k: False for k in ex.split()} ex = {k: False for k in ex.split()}
ex2 = { ex2 = {
"mtp": [], "mtp": [],
"mte": "a", "mte": "a",
"mth": "", "mth": "",
"doctitle": "", "doctitle": "",
"html_head": "",
"hist": None, "hist": None,
"no_idx": None, "no_idx": None,
"no_hash": None, "no_hash": None,