Compare commits

...

49 Commits

Author SHA1 Message Date
ed
d30001d23d v1.0.3 2021-09-18 17:50:40 +02:00
ed
06bbf0d656 filekeys in search results 2021-09-18 17:26:13 +02:00
ed
6ddd952e04 return filekeys in upload summary if read-access 2021-09-18 15:57:43 +02:00
ed
027ad0c3ee misc 2021-09-18 15:38:13 +02:00
ed
3abad2b87b fix navpane nowrap 2021-09-18 14:18:23 +02:00
ed
32a1c7c5d5 cosmetic 2021-09-18 02:07:29 +02:00
ed
f06e165bd4 retro 2021-09-18 02:07:09 +02:00
ed
1c843b24f7 ensure ffmpeg doesn't transcode video 2021-09-17 23:50:54 +02:00
ed
2ace9ed380 fix filekeys appearing in filenames 2021-09-17 23:12:32 +02:00
ed
5f30c0ae03 fix button hover bg 2021-09-17 22:49:49 +02:00
ed
ef60adf7e2 optional navpane wordwrap diasble 2021-09-17 22:49:26 +02:00
ed
7354b462e8 easymde: use extenral marked.js 2021-09-17 09:32:30 +02:00
ed
da904d6be8 upgrade marked.js from v1.1.0 to v3.0.4 2021-09-17 09:10:33 +02:00
ed
c5fbbbbb5c show current line number in md-editor 2021-09-17 01:36:06 +02:00
ed
5010387d8a markdown modpoll at an interval 2021-09-16 09:31:58 +02:00
ed
f00c54a7fb nice 2021-09-16 09:00:36 +02:00
ed
9f52c169d0 more python3 shebangs 2021-09-16 00:28:38 +02:00
ed
bf18339404 change sfx shebang to python3 2021-09-16 00:26:52 +02:00
ed
2ad12b074b return 404 on browsing folders with g 2021-09-16 00:17:27 +02:00
ed
a6788ffe8d mention e2ts deps 2021-09-16 00:06:19 +02:00
ed
0e884df486 keep empty folders after deleting all files 2021-09-15 23:31:49 +02:00
ed
ef1c55286f add filekeys 2021-09-15 23:17:02 +02:00
ed
abc0424c26 show login prompt on 404 2021-09-15 21:53:30 +02:00
ed
44e5c82e6d more aggressively no-cache 2021-09-15 20:49:02 +02:00
ed
5849c446ed new access level g 2021-09-15 01:01:20 +02:00
ed
12b7317831 wget: delete url file 2021-09-15 00:18:58 +02:00
ed
fe323f59af update readme 2021-09-14 23:05:32 +02:00
ed
a00e56f219 lol it works 2021-09-14 22:44:56 +02:00
ed
1a7852794f dry boolean configs 2021-09-14 00:50:27 +02:00
ed
22b1373a57 accessibility: always hilight focused elements 2021-09-14 00:46:53 +02:00
ed
17d78b1469 set max-width for readme.md 2021-09-14 00:46:03 +02:00
ed
4d8b32b249 prevent tooltips on alt-tab 2021-09-14 00:45:30 +02:00
ed
b65bea2550 show toast with stack on rejected promises 2021-09-14 00:42:46 +02:00
ed
0b52ccd200 fqdn makes more sense 2021-09-12 23:49:37 +02:00
ed
3006a07059 cfssl: mention arg 3 2021-09-12 23:38:38 +02:00
ed
801dbc7a9a readme: add motivations / future plans 2021-09-12 23:25:34 +02:00
ed
4f4e895fb7 update vscode launch args 2021-09-11 19:59:59 +02:00
ed
cc57c3b655 bump deps 2021-09-11 19:59:41 +02:00
ed
ca6ec9c5c7 v1.0.2 2021-09-09 09:21:30 +02:00
ed
633b1f0a78 v1.0.1 2021-09-09 00:59:55 +02:00
ed
6136b9bf9c don't double-eof 2021-09-09 00:54:09 +02:00
ed
524a3ba566 actually this is better 2021-09-09 00:41:23 +02:00
ed
58580320f9 make the primary tabs toggle-buttons 2021-09-09 00:35:07 +02:00
ed
759b0a994d alternative equalizer tuning 2021-09-09 00:27:18 +02:00
ed
d2800473e4 less aggressive searching, especially on phones 2021-09-08 23:24:32 +02:00
ed
f5b1a2065e multipart-parser needs exact reads 2021-09-08 21:07:34 +00:00
ed
5e62532295 minimal-up2k: remove filesearch dropzone 2021-09-08 09:16:02 +02:00
ed
c1bee96c40 fix filedrop trying to upload without write access 2021-09-08 00:19:48 +02:00
ed
f273253a2b ( ´ w `) 2021-09-08 00:16:08 +02:00
43 changed files with 1142 additions and 547 deletions

2
.vscode/launch.json vendored
View File

@@ -17,7 +17,7 @@
"-mtp",
".bpm=f,bin/mtag/audio-bpm.py",
"-aed:wark",
"-vsrv::r:aed:cnodupe",
"-vsrv::r:rw,ed:c,dupe",
"-vdist:dist:r"
]
},

View File

@@ -55,4 +55,5 @@
"py27"
],
"python.linting.enabled": true,
"python.pythonPath": "/usr/bin/python3"
}

View File

@@ -25,9 +25,12 @@ turn your phone or raspi into a portable file server with resumable uploads/down
* [notes](#notes) - general notes
* [status](#status) - feature summary
* [testimonials](#testimonials) - small collection of user feedback
* [motivations](#motivations) - project goals / philosophy
* [future plans](#future-plans) - some improvement ideas
* [bugs](#bugs)
* [general bugs](#general-bugs)
* [not my bugs](#not-my-bugs)
* [FAQ](#FAQ) - "frequently" asked questions
* [accounts and volumes](#accounts-and-volumes) - per-folder, per-user permissions
* [the browser](#the-browser) - accessing a copyparty server using a web-browser
* [tabs](#tabs) - the main tabs in the ui
@@ -84,7 +87,7 @@ some recommended options:
* `-e2ts` enables audio metadata indexing (needs either FFprobe or Mutagen), see [optional dependencies](#optional-dependencies)
* `-v /mnt/music:/music:r:rw,foo -a foo:bar` shares `/mnt/music` as `/music`, `r`eadable by anyone, and read-write for user `foo`, password `bar`
* replace `:r:rw,foo` with `:r,foo` to only make the folder readable by `foo` and nobody else
* see [accounts and volumes](#accounts-and-volumes) for the syntax and other access levels (`r`ead, `w`rite, `m`ove, `d`elete)
* see [accounts and volumes](#accounts-and-volumes) for the syntax and other permissions (`r`ead, `w`rite, `m`ove, `d`elete, `g`et)
* `--ls '**,*,ln,p,r'` to crash on startup if any of the volumes contain a symlink which point outside the volume, as that could give users unintended access
@@ -170,6 +173,41 @@ small collection of user feedback
`good enough`, `surprisingly correct`, `certified good software`, `just works`, `why`
# motivations
project goals / philosophy
* inverse linux philosophy -- do all the things, and do an *okay* job
* quick drop-in service to get a lot of features in a pinch
* there are probably [better alternatives](https://github.com/awesome-selfhosted/awesome-selfhosted) if you have specific/long-term needs
* run anywhere, support everything
* as many web-browsers and python versions as possible
* every browser should at least be able to browse, download, upload files
* be a good emergency solution for transferring stuff between ancient boxes
* minimal dependencies
* but optional dependencies adding bonus-features are ok
* everything being plaintext makes it possible to proofread for malicious code
* no preparations / setup necessary, just run the sfx (which is also plaintext)
* adaptable, malleable, hackable
* no build steps; modify the js/python without needing node.js or anything like that
## future plans
some improvement ideas
* the JS is a mess -- a preact rewrite would be nice
* preferably without build dependencies like webpack/babel/node.js, maybe a python thing to assemble js files into main.js
* good excuse to look at using virtual lists (browsers start to struggle when folders contain over 5000 files)
* the UX is a mess -- a proper design would be nice
* very organic (much like the python/js), everything was an afterthought
* true for both the layout and the visual flair
* something like the tron board-room ui (or most other hollywood ones, like ironman) would be :100:
* some of the python files are way too big
* `up2k.py` ended up doing all the file indexing / db management
* `httpcli.py` should be separated into modules in general
# bugs
* Windows: python 3.7 and older cannot read tags with FFprobe, so use Mutagen or upgrade
@@ -194,6 +232,17 @@ small collection of user feedback
* use `--hist` or the `hist` volflag (`-v [...]:c,hist=/tmp/foo`) to place the db inside the vm instead
# FAQ
"frequently" asked questions
* is it possible to block read-access to folders unless you know the exact URL for a particular file inside?
* yes, using the [`g` permission](#accounts-and-volumes), see the examples there
* can I make copyparty download a file to my server if I give it a URL?
* not officially, but there is a [terrible hack](https://github.com/9001/copyparty/blob/hovudstraum/bin/mtag/wget.py) which makes it possible
# accounts and volumes
per-folder, per-user permissions
@@ -208,6 +257,7 @@ permissions:
* `w` (write): upload files, move files *into* this folder
* `m` (move): move files/folders *from* this folder
* `d` (delete): delete files/folders
* `g` (get): only download files, cannot see folder contents or zip/tar
examples:
* add accounts named u1, u2, u3 with passwords p1, p2, p3: `-a u1:p1 -a u2:p2 -a u3:p3`
@@ -218,6 +268,10 @@ examples:
* unauthorized users accessing the webroot can see that the `inc` folder exists, but cannot open it
* `u1` can open the `inc` folder, but cannot see the contents, only upload new files to it
* `u2` can browse it and move files *from* `/inc` into any folder where `u2` has write-access
* make folder `/mnt/ss` available at `/i`, read-write for u1, get-only for everyone else, and enable accesskeys: `-v /mnt/ss:i:rw,u1:g:c,fk=4`
* `c,fk=4` sets the `fk` volume-flag to 4, meaning each file gets a 4-character accesskey
* `u1` can upload files, browse the folder, and see the generated accesskeys
* other users cannot browse the folder, but can access the files if they have the full file URL with the accesskey
# the browser
@@ -672,7 +726,7 @@ TLDR: yes
| image viewer | - | yep | yep | yep | yep | yep | yep | yep |
| video player | - | yep | yep | yep | yep | yep | yep | yep |
| markdown editor | - | - | yep | yep | yep | yep | yep | yep |
| markdown viewer | - | - | yep | yep | yep | yep | yep | yep |
| markdown viewer | - | yep | yep | yep | yep | yep | yep | yep |
| play mp3/m4a | - | yep | yep | yep | yep | yep | yep | yep |
| play ogg/opus | - | - | - | - | yep | yep | `*3` | yep |
| **= feature =** | ie6 | ie9 | ie10 | ie11 | ff 52 | c 49 | iOS | Andr |
@@ -687,8 +741,6 @@ quick summary of more eccentric web-browsers trying to view a directory index:
| browser | will it blend |
| ------- | ------------- |
| **safari** (14.0.3/macos) | is chrome with janky wasm, so playing opus can deadlock the javascript engine |
| **safari** (14.0.1/iOS) | same as macos, except it recovers from the deadlocks if you poke it a bit |
| **links** (2.21/macports) | can browse, login, upload/mkdir/msg |
| **lynx** (2.8.9/macports) | can browse, login, upload/mkdir/msg |
| **w3m** (0.5.3/macports) | can browse, login, upload at 100kB/s, mkdir/msg |
@@ -786,6 +838,11 @@ on public copyparty instances with anonymous upload enabled:
* unless `--no-readme` is set: by uploading/modifying a file named `readme.md`
* if `move` access is granted AND none of `--no-logues`, `--no-dot-mv`, `--no-dot-ren` is set: by uploading some .html file and renaming it to `.epilogue.html` (uploading it directly is blocked)
other misc:
* you can disable directory listings by giving permission `g` instead of `r`, only accepting direct URLs to files
* combine this with volume-flag `c,fk` to generate per-file accesskeys; users which have full read-access will then see URLs with `?k=...` appended to the end, and `g` users must provide that URL including the correct key to avoid a 404
## gotchas

View File

@@ -1,11 +1,18 @@
standalone programs which take an audio file as argument
**NOTE:** these all require `-e2ts` to be functional, meaning you need to do at least one of these: `apt install ffmpeg` or `pip3 install mutagen`
some of these rely on libraries which are not MIT-compatible
* [audio-bpm.py](./audio-bpm.py) detects the BPM of music using the BeatRoot Vamp Plugin; imports GPL2
* [audio-key.py](./audio-key.py) detects the melodic key of music using the Mixxx fork of keyfinder; imports GPL3
* [media-hash.py](./media-hash.py) generates checksums for audio and video streams; uses FFmpeg (LGPL or GPL)
these do not have any problematic dependencies:
* [exe.py](./exe.py) grabs metadata from .exe and .dll files (example for retrieving multiple tags with one parser)
* [wget.py](./wget.py) lets you download files by POSTing URLs to copyparty
# dependencies

View File

@@ -25,6 +25,7 @@ def det(tf):
"-v", "fatal",
"-ss", "13",
"-y", "-i", fsenc(sys.argv[1]),
"-map", "0:a:0",
"-ac", "1",
"-ar", "22050",
"-t", "300",

View File

@@ -28,6 +28,7 @@ def det(tf):
"-hide_banner",
"-v", "fatal",
"-y", "-i", fsenc(sys.argv[1]),
"-map", "0:a:0",
"-t", "300",
"-sample_fmt", "s16",
tf

View File

@@ -4,7 +4,8 @@ set -e
# install dependencies for audio-*.py
#
# linux: requires {python3,ffmpeg,fftw}-dev py3-{wheel,pip} py3-numpy{,-dev} vamp-sdk-dev patchelf
# linux/alpine: requires {python3,ffmpeg,fftw}-dev py3-{wheel,pip} py3-numpy{,-dev} vamp-sdk-dev patchelf cmake
# linux/debian: requires libav{codec,device,filter,format,resample,util}-dev {libfftw3,python3}-dev python3-{numpy,pip} vamp-{plugin-sdk,examples} patchelf cmake
# win64: requires msys2-mingw64 environment
# macos: requires macports
#

85
bin/mtag/wget.py Normal file
View File

@@ -0,0 +1,85 @@
#!/usr/bin/env python3
"""
use copyparty as a file downloader by POSTing URLs as
application/x-www-form-urlencoded (for example using the
message/pager function on the website)
example copyparty config to use this:
--urlform save,get -vsrv/wget:wget:rwmd,ed:c,e2ts:c,mtp=title=ebin,t300,ad,bin/mtag/wget.py
explained:
for realpath srv/wget (served at /wget) with read-write-modify-delete for ed,
enable file analysis on upload (e2ts),
use mtp plugin "bin/mtag/wget.py" to provide metadata tag "title",
do this on all uploads with the file extension "bin",
t300 = 300 seconds timeout for each dwonload,
ad = parse file regardless if FFmpeg thinks it is audio or not
PS: this requires e2ts to be functional,
meaning you need to do at least one of these:
* apt install ffmpeg
* pip3 install mutagen
"""
import os
import sys
import subprocess as sp
from urllib.parse import unquote_to_bytes as unquote
def main():
fp = os.path.abspath(sys.argv[1])
fdir = os.path.dirname(fp)
fname = os.path.basename(fp)
if not fname.startswith("put-") or not fname.endswith(".bin"):
raise Exception("not a post file")
buf = b""
with open(fp, "rb") as f:
while True:
b = f.read(4096)
buf += b
if len(buf) > 4096:
raise Exception("too big")
if not b:
break
if not buf:
raise Exception("file is empty")
buf = unquote(buf.replace(b"+", b" "))
url = buf.decode("utf-8")
if not url.startswith("msg="):
raise Exception("does not start with msg=")
url = url[4:]
if "://" not in url:
url = "https://" + url
os.chdir(fdir)
name = url.split("?")[0].split("/")[-1]
tfn = "-- DOWNLOADING " + name
open(tfn, "wb").close()
cmd = ["wget", "--trust-server-names", "--", url]
try:
sp.check_call(cmd)
# OPTIONAL:
# on success, delete the .bin file which contains the URL
os.unlink(fp)
except:
open("-- FAILED TO DONWLOAD " + name, "wb").close()
os.unlink(tfn)
print(url)
if __name__ == "__main__":
main()

View File

@@ -1,13 +1,14 @@
#!/bin/bash
set -e
# ca-name and server-name
# ca-name and server-fqdn
ca_name="$1"
srv_name="$2"
srv_fqdn="$2"
[ -z "$srv_name" ] && {
[ -z "$srv_fqdn" ] && {
echo "need arg 1: ca name"
echo "need arg 2: server name"
echo "need arg 2: server fqdn"
echo "optional arg 3: if set, write cert into copyparty cfg"
exit 1
}
@@ -31,15 +32,15 @@ EOF
gen_srv() {
(tee /dev/stderr <<EOF
{"key": {"algo":"rsa", "size":4096},
"names": [{"O":"$ca_name - $srv_name"}]}
"names": [{"O":"$ca_name - $srv_fqdn"}]}
EOF
)|
cfssl gencert -ca ca.pem -ca-key ca.key \
-profile=www -hostname="$srv_name.$ca_name" - |
cfssljson -bare "$srv_name"
-profile=www -hostname="$srv_fqdn" - |
cfssljson -bare "$srv_fqdn"
mv "$srv_name-key.pem" "$srv_name.key"
rm "$srv_name.csr"
mv "$srv_fqdn-key.pem" "$srv_fqdn.key"
rm "$srv_fqdn.csr"
}
@@ -57,13 +58,13 @@ show() {
awk '!o; {o=0} /[0-9a-f:]{16}/{o=1}'
}
show ca.pem
show "$srv_name.pem"
show "$srv_fqdn.pem"
# write cert into copyparty config
[ -z "$3" ] || {
mkdir -p ~/.config/copyparty
cat "$srv_name".{key,pem} ca.pem >~/.config/copyparty/cert.pem
cat "$srv_fqdn".{key,pem} ca.pem >~/.config/copyparty/cert.pem
}

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env python3
# coding: utf-8
from __future__ import print_function, unicode_literals
@@ -104,7 +104,7 @@ def ensure_cert():
cert_insec = os.path.join(E.mod, "res/insecure.pem")
cert_cfg = os.path.join(E.cfg, "cert.pem")
if not os.path.exists(cert_cfg):
shutil.copy2(cert_insec, cert_cfg)
shutil.copy(cert_insec, cert_cfg)
try:
if filecmp.cmp(cert_cfg, cert_insec):
@@ -203,6 +203,11 @@ def run_argparse(argv, formatter):
description="http file sharing hub v{} ({})".format(S_VERSION, S_BUILD_DT),
)
try:
fk_salt = unicode(os.path.getmtime(os.path.join(E.cfg, "cert.pem")))
except:
fk_salt = "hunter2"
sects = [
[
"accounts",
@@ -211,14 +216,15 @@ def run_argparse(argv, formatter):
"""
-a takes username:password,
-v takes src:dst:perm1:perm2:permN:volflag1:volflag2:volflagN:...
where "perm" is "accesslevels,username1,username2,..."
where "perm" is "permissions,username1,username2,..."
and "volflag" is config flags to set on this volume
list of accesslevels:
list of permissions:
"r" (read): list folder contents, download files
"w" (write): upload files; need "r" to see the uploads
"m" (move): move files and folders; need "w" at destination
"d" (delete): permanently delete files and folders
"g" (get): download files, but cannot see folder contents
too many volflags to list here, see the other sections
@@ -279,6 +285,10 @@ def run_argparse(argv, formatter):
\033[36mmtp=.bpm=f,audio-bpm.py\033[35m uses the "audio-bpm.py" program to
generate ".bpm" tags from uploads (f = overwrite tags)
\033[36mmtp=ahash,vhash=media-hash.py\033[35m collects two tags at once
\033[0mothers:
\033[36mfk=8\033[35m generates per-file accesskeys,
which will then be required at the "g" permission
\033[0m"""
),
],
@@ -360,6 +370,7 @@ def run_argparse(argv, formatter):
ap2 = ap.add_argument_group('safety options')
ap2.add_argument("--ls", metavar="U[,V[,F]]", type=u, help="scan all volumes; arguments USER,VOL,FLAGS; example [**,*,ln,p,r]")
ap2.add_argument("--salt", type=u, default="hunter2", help="up2k file-hash salt")
ap2.add_argument("--fk-salt", metavar="SALT", type=u, default=fk_salt, help="per-file accesskey salt")
ap2.add_argument("--no-dot-mv", action="store_true", help="disallow moving dotfiles; makes it impossible to move folders containing dotfiles")
ap2.add_argument("--no-dot-ren", action="store_true", help="disallow renaming dotfiles; makes it impossible to make something a dotfile")
ap2.add_argument("--no-logues", action="store_true", help="disable rendering .prologue/.epilogue.html into directory listings")
@@ -493,7 +504,7 @@ def main(argv=None):
if re.match("c[^,]", opt):
mod = True
na.append("c," + opt[1:])
elif re.sub("^[rwmd]*", "", opt) and "," not in opt:
elif re.sub("^[rwmdg]*", "", opt) and "," not in opt:
mod = True
perm = opt[0]
if perm == "a":

View File

@@ -1,8 +1,8 @@
# coding: utf-8
VERSION = (1, 0, 0)
VERSION = (1, 0, 3)
CODENAME = "sufficient"
BUILD_DT = (2021, 9, 7)
BUILD_DT = (2021, 9, 18)
S_VERSION = ".".join(map(str, VERSION))
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)

View File

@@ -29,17 +29,18 @@ LEELOO_DALLAS = "leeloo_dallas"
class AXS(object):
def __init__(self, uread=None, uwrite=None, umove=None, udel=None):
def __init__(self, uread=None, uwrite=None, umove=None, udel=None, uget=None):
self.uread = {} if uread is None else {k: 1 for k in uread}
self.uwrite = {} if uwrite is None else {k: 1 for k in uwrite}
self.umove = {} if umove is None else {k: 1 for k in umove}
self.udel = {} if udel is None else {k: 1 for k in udel}
self.uget = {} if uget is None else {k: 1 for k in uget}
def __repr__(self):
return "AXS({})".format(
", ".join(
"{}={!r}".format(k, self.__dict__[k])
for k in "uread uwrite umove udel".split()
for k in "uread uwrite umove udel uget".split()
)
)
@@ -215,6 +216,7 @@ class VFS(object):
self.awrite = {}
self.amove = {}
self.adel = {}
self.aget = {}
else:
self.histpath = None
self.all_vols = None
@@ -222,6 +224,7 @@ class VFS(object):
self.awrite = None
self.amove = None
self.adel = None
self.aget = None
def __repr__(self):
return "VFS({})".format(
@@ -308,7 +311,7 @@ class VFS(object):
def can_access(self, vpath, uname):
# type: (str, str) -> tuple[bool, bool, bool, bool]
"""can Read,Write,Move,Delete"""
"""can Read,Write,Move,Delete,Get"""
vn, _ = self._find(vpath)
c = vn.axs
return [
@@ -316,10 +319,20 @@ class VFS(object):
uname in c.uwrite or "*" in c.uwrite,
uname in c.umove or "*" in c.umove,
uname in c.udel or "*" in c.udel,
uname in c.uget or "*" in c.uget,
]
def get(self, vpath, uname, will_read, will_write, will_move=False, will_del=False):
# type: (str, str, bool, bool, bool, bool) -> tuple[VFS, str]
def get(
self,
vpath,
uname,
will_read,
will_write,
will_move=False,
will_del=False,
will_get=False,
):
# type: (str, str, bool, bool, bool, bool, bool) -> tuple[VFS, str]
"""returns [vfsnode,fs_remainder] if user has the requested permissions"""
vn, rem = self._find(vpath)
c = vn.axs
@@ -329,6 +342,7 @@ class VFS(object):
[will_write, c.uwrite, "write"],
[will_move, c.umove, "move"],
[will_del, c.udel, "delete"],
[will_get, c.uget, "get"],
]:
if req and (uname not in d and "*" not in d) and uname != LEELOO_DALLAS:
m = "you don't have {}-access for this location"
@@ -368,7 +382,7 @@ class VFS(object):
for name, vn2 in sorted(self.nodes.items()):
ok = False
axs = vn2.axs
axs = [axs.uread, axs.uwrite, axs.umove, axs.udel]
axs = [axs.uread, axs.uwrite, axs.umove, axs.udel, axs.uget]
for pset in permsets:
ok = True
for req, lst in zip(pset, axs):
@@ -561,7 +575,7 @@ class AuthSrv(object):
def _read_vol_str(self, lvl, uname, axs, flags):
# type: (str, str, AXS, any) -> None
if lvl.strip("crwmd"):
if lvl.strip("crwmdg"):
raise Exception("invalid volume flag: {},{}".format(lvl, uname))
if lvl == "c":
@@ -588,6 +602,9 @@ class AuthSrv(object):
if "d" in lvl:
axs.udel[un] = 1
if "g" in lvl:
axs.uget[un] = 1
def _read_volflag(self, flags, name, value, is_list):
if name not in ["mtp"]:
flags[name] = value
@@ -625,7 +642,7 @@ class AuthSrv(object):
if self.args.v:
# list of src:dst:permset:permset:...
# permset is <rwmd>[,username][,username] or <c>,<flag>[=args]
# permset is <rwmdg>[,username][,username] or <c>,<flag>[=args]
for v_str in self.args.v:
m = re_vol.match(v_str)
if not m:
@@ -692,20 +709,21 @@ class AuthSrv(object):
vfs.all_vols = {}
vfs.get_all_vols(vfs.all_vols)
for perm in "read write move del".split():
for perm in "read write move del get".split():
axs_key = "u" + perm
unames = ["*"] + list(acct.keys())
umap = {x: [] for x in unames}
for usr in unames:
for mp, vol in vfs.all_vols.items():
if usr in getattr(vol.axs, axs_key):
axs = getattr(vol.axs, axs_key)
if usr in axs or "*" in axs:
umap[usr].append(mp)
setattr(vfs, "a" + perm, umap)
all_users = {}
missing_users = {}
for axs in daxs.values():
for d in [axs.uread, axs.uwrite, axs.umove, axs.udel]:
for d in [axs.uread, axs.uwrite, axs.umove, axs.udel, axs.uget]:
for usr in d.keys():
all_users[usr] = 1
if usr != "*" and usr not in acct:
@@ -816,6 +834,11 @@ class AuthSrv(object):
if use:
vol.lim = lim
for vol in vfs.all_vols.values():
fk = vol.flags.get("fk")
if fk:
vol.flags["fk"] = int(fk) if fk is not True else 8
for vol in vfs.all_vols.values():
if "pk" in vol.flags and "gz" not in vol.flags and "xz" not in vol.flags:
vol.flags["gz"] = False # def.pk
@@ -930,6 +953,7 @@ class AuthSrv(object):
[" write", "uwrite"],
[" move", "umove"],
["delete", "udel"],
[" get", "uget"],
]:
u = list(sorted(getattr(v.axs, attr).keys()))
u = ", ".join("\033[35meverybody\033[0m" if x == "*" else x for x in u)
@@ -997,10 +1021,10 @@ class AuthSrv(object):
raise Exception("volume not found: " + v)
self.log({"users": users, "vols": vols, "flags": flags})
m = "/{}: read({}) write({}) move({}) del({})"
m = "/{}: read({}) write({}) move({}) del({}) get({})"
for k, v in self.vfs.all_vols.items():
vc = v.axs
self.log(m.format(k, vc.uread, vc.uwrite, vc.umove, vc.udel))
self.log(m.format(k, vc.uread, vc.uwrite, vc.umove, vc.udel, vc.uget))
flag_v = "v" in flags
flag_ln = "ln" in flags
@@ -1014,7 +1038,7 @@ class AuthSrv(object):
for u in users:
self.log("checking /{} as {}".format(v, u))
try:
vn, _ = self.vfs.get(v, u, True, False, False, False)
vn, _ = self.vfs.get(v, u, True, False, False, False, False)
except:
continue

View File

@@ -22,13 +22,12 @@ except:
from .__init__ import E, PY2, WINDOWS, ANYWIN, unicode
from .util import * # noqa # pylint: disable=unused-wildcard-import
from .bos import bos
from .authsrv import AuthSrv, Lim
from .authsrv import AuthSrv
from .szip import StreamZip
from .star import StreamTar
NO_CACHE = {"Cache-Control": "no-cache"}
NO_STORE = {"Cache-Control": "no-store; max-age=0"}
class HttpCli(object):
@@ -54,7 +53,10 @@ class HttpCli(object):
self.bufsz = 1024 * 32
self.hint = None
self.absolute_urls = False
self.out_headers = {"Access-Control-Allow-Origin": "*"}
self.out_headers = {
"Access-Control-Allow-Origin": "*",
"Cache-Control": "no-store; max-age=0",
}
def log(self, msg, c=0):
ptn = self.asrv.re_pwd
@@ -213,6 +215,7 @@ class HttpCli(object):
self.wvol = self.asrv.vfs.awrite[self.uname]
self.mvol = self.asrv.vfs.amove[self.uname]
self.dvol = self.asrv.vfs.adel[self.uname]
self.gvol = self.asrv.vfs.aget[self.uname]
if pwd and "pw" in self.ouparam and pwd != cookies.get("cppwd"):
self.out_headers["Set-Cookie"] = self.get_pwd_cookie(pwd)[0]
@@ -227,6 +230,9 @@ class HttpCli(object):
self.do_log = not self.conn.lf_url or not self.conn.lf_url.search(self.req)
x = self.asrv.vfs.can_access(self.vpath, self.uname)
self.can_read, self.can_write, self.can_move, self.can_delete, self.can_get = x
try:
if self.mode in ["GET", "HEAD"]:
return self.handle_get() and self.keepalive
@@ -351,8 +357,7 @@ class HttpCli(object):
).encode("utf-8", "replace")
if use302:
h = {"Location": "/" + vpath, "Cache-Control": "no-cache"}
self.reply(html, status=302, headers=h)
self.reply(html, status=302, headers={"Location": "/" + vpath})
else:
self.reply(html, status=status)
@@ -378,12 +383,10 @@ class HttpCli(object):
static_path = os.path.join(E.mod, "web/", self.vpath[5:])
return self.tx_file(static_path)
x = self.asrv.vfs.can_access(self.vpath, self.uname)
self.can_read, self.can_write, self.can_move, self.can_delete = x
if not self.can_read and not self.can_write:
if not self.can_read and not self.can_write and not self.can_get:
if self.vpath:
self.log("inaccessible: [{}]".format(self.vpath))
raise Pebkac(404)
return self.tx_404()
self.uparam["h"] = False
@@ -887,8 +890,12 @@ class HttpCli(object):
pwd = self.parser.require("cppwd", 64)
self.parser.drop()
dst = "/?h"
if self.vpath:
dst = "/" + quotep(self.vpath)
ck, msg = self.get_pwd_cookie(pwd)
html = self.j2("msg", h1=msg, h2='<a href="/?h">ack</a>', redir="/?h")
html = self.j2("msg", h1=msg, h2='<a href="' + dst + '">ack</a>', redir=dst)
self.reply(html.encode("utf-8"), headers={"Set-Cookie": ck})
return True
@@ -1077,9 +1084,18 @@ class HttpCli(object):
errmsg = "ERROR: " + errmsg
for sz, sha512, ofn, lfn in files:
vsuf = ""
if self.can_read and "fk" in vfs.flags:
vsuf = "?k=" + gen_filekey(
self.args.fk_salt,
abspath,
sz,
0 if ANYWIN else bos.stat(os.path.join(vfs.realpath, lfn)).st_ino,
)[: vfs.flags["fk"]]
vpath = "{}/{}".format(upload_vpath, lfn).strip("/")
msg += 'sha512: {} // {} bytes // <a href="/{}">{}</a>\n'.format(
sha512[:56], sz, quotep(vpath), html_escape(ofn, crlf=True)
msg += 'sha512: {} // {} bytes // <a href="/{}">{}</a> {}\n'.format(
sha512[:56], sz, quotep(vpath) + vsuf, html_escape(ofn, crlf=True), vsuf
)
# truncated SHA-512 prevents length extension attacks;
# using SHA-512/224, optionally SHA-512/256 = :64
@@ -1087,13 +1103,13 @@ class HttpCli(object):
"url": "{}://{}/{}".format(
"https" if self.tls else "http",
self.headers.get("host", "copyparty"),
vpath,
vpath + vsuf,
),
"sha512": sha512[:56],
"sz": sz,
"fn": lfn,
"fn_orig": ofn,
"path": vpath,
"path": vpath + vsuf,
}
jmsg["files"].append(jpart)
@@ -1277,7 +1293,7 @@ class HttpCli(object):
break
if not editions:
raise Pebkac(404)
return self.tx_404()
#
# if-modified
@@ -1390,6 +1406,8 @@ class HttpCli(object):
if not is_compressed and "cache" not in self.uparam:
self.out_headers.update(NO_CACHE)
else:
self.out_headers.pop("Cache-Control")
self.out_headers["Accept-Ranges"] = "bytes"
self.send_headers(
@@ -1596,6 +1614,7 @@ class HttpCli(object):
html = self.j2(
"splash",
this=self,
qvpath=quotep(self.vpath),
rvol=rvol,
wvol=wvol,
avol=avol,
@@ -1606,7 +1625,13 @@ class HttpCli(object):
mtpq=vs["mtpq"],
url_suf=suf,
)
self.reply(html.encode("utf-8"), headers=NO_STORE)
self.reply(html.encode("utf-8"))
return True
def tx_404(self):
m = '<h1>404 not found &nbsp;┐( ´ -`)┌</h1><p>or maybe you don\'t have access -- try logging in or <a href="/?h">go home</a></p>'
html = self.j2("splash", this=self, qvpath=quotep(self.vpath), msg=m)
self.reply(html.encode("utf-8"), status=404)
return True
def scanvol(self):
@@ -1782,15 +1807,15 @@ class HttpCli(object):
try:
st = bos.stat(abspath)
except:
raise Pebkac(404)
return self.tx_404()
if rem.startswith(".hist/up2k.") or (
rem.endswith("/dir.txt") and rem.startswith(".hist/th/")
):
raise Pebkac(403)
is_dir = stat.S_ISDIR(st.st_mode)
if self.can_read:
if rem.startswith(".hist/up2k.") or (
rem.endswith("/dir.txt") and rem.startswith(".hist/th/")
):
raise Pebkac(403)
is_dir = stat.S_ISDIR(st.st_mode)
th_fmt = self.uparam.get("th")
if th_fmt is not None:
if is_dir:
@@ -1815,11 +1840,23 @@ class HttpCli(object):
return self.tx_ico(rem)
if not is_dir:
if abspath.endswith(".md") and "raw" not in self.uparam:
return self.tx_md(abspath)
if not is_dir and (self.can_read or self.can_get):
if not self.can_read and "fk" in vn.flags:
correct = gen_filekey(
self.args.fk_salt, abspath, st.st_size, 0 if ANYWIN else st.st_ino
)[: vn.flags["fk"]]
got = self.uparam.get("k")
if got != correct:
self.log("wrong filekey, want {}, got {}".format(correct, got))
return self.tx_404()
return self.tx_file(abspath)
if abspath.endswith(".md") and "raw" not in self.uparam:
return self.tx_md(abspath)
return self.tx_file(abspath)
elif is_dir and not self.can_read and not self.can_write:
return self.tx_404()
srv_info = []
@@ -1859,6 +1896,8 @@ class HttpCli(object):
perms.append("move")
if self.can_delete:
perms.append("delete")
if self.can_get:
perms.append("get")
url_suf = self.urlq({}, [])
is_ls = "ls" in self.uparam
@@ -1918,18 +1957,17 @@ class HttpCli(object):
if not self.can_read:
if is_ls:
ret = json.dumps(ls_ret)
self.reply(
ret.encode("utf-8", "replace"),
mime="application/json",
headers=NO_STORE,
)
self.reply(ret.encode("utf-8", "replace"), mime="application/json")
return True
if not stat.S_ISDIR(st.st_mode):
raise Pebkac(404)
return self.tx_404()
if "zip" in self.uparam or "tar" in self.uparam:
raise Pebkac(403)
html = self.j2(tpl, **j2a)
self.reply(html.encode("utf-8", "replace"), headers=NO_STORE)
self.reply(html.encode("utf-8", "replace"))
return True
for k in ["zip", "tar"]:
@@ -1973,6 +2011,8 @@ class HttpCli(object):
idx = self.conn.get_u2idx()
icur = idx.get_cur(dbv.realpath)
add_fk = vn.flags.get("fk")
dirs = []
files = []
for fn in vfs_ls:
@@ -2018,9 +2058,19 @@ class HttpCli(object):
except:
ext = "%"
if add_fk:
href = "{}?k={}".format(
quotep(href),
gen_filekey(
self.args.fk_salt, fspath, sz, 0 if ANYWIN else inf.st_ino
)[:add_fk],
)
else:
href = quotep(href)
item = {
"lead": margin,
"href": quotep(href),
"href": href,
"name": fn,
"sz": sz,
"ext": ext,
@@ -2088,11 +2138,7 @@ class HttpCli(object):
ls_ret["files"] = files
ls_ret["taglist"] = taglist
ret = json.dumps(ls_ret)
self.reply(
ret.encode("utf-8", "replace"),
mime="application/json",
headers=NO_STORE,
)
self.reply(ret.encode("utf-8", "replace"), mime="application/json")
return True
j2a["files"] = dirs + files
@@ -2106,5 +2152,5 @@ class HttpCli(object):
j2a["css"] = self.args.css_browser
html = self.j2(tpl, **j2a)
self.reply(html.encode("utf-8", "replace"), headers=NO_STORE)
self.reply(html.encode("utf-8", "replace"))
return True

View File

@@ -7,8 +7,8 @@ import time
import threading
from datetime import datetime
from .__init__ import unicode
from .util import s3dec, Pebkac, min_ex
from .__init__ import ANYWIN, unicode
from .util import absreal, s3dec, Pebkac, min_ex, gen_filekey
from .bos import bos
from .up2k import up2k_wark_from_hashlist
@@ -242,6 +242,7 @@ class U2idx(object):
self.active_cur = cur
sret = []
fk = flags.get("fk")
c = cur.execute(q, v)
for hit in c:
w, ts, sz, rd, fn, ip, at = hit
@@ -252,6 +253,20 @@ class U2idx(object):
if rd.startswith("//") or fn.startswith("//"):
rd, fn = s3dec(rd, fn)
if fk:
try:
ap = absreal(os.path.join(ptop, rd, fn))
inf = bos.stat(ap)
except:
continue
fn += (
"?k="
+ gen_filekey(
self.args.fk_salt, ap, sz, 0 if ANYWIN else inf.st_ino
)[:fk]
)
rp = "/".join([x for x in [vtop, rd, fn] if x])
sret.append({"ts": int(ts), "sz": sz, "rp": rp, "w": w[:16]})

View File

@@ -236,7 +236,7 @@ class Up2k(object):
if vp:
fvp = "{}/{}".format(vp, fvp)
self._handle_rm(LEELOO_DALLAS, None, fvp)
self._handle_rm(LEELOO_DALLAS, None, fvp, True)
nrm += 1
if nrm:
@@ -838,6 +838,7 @@ class Up2k(object):
cur.connection.commit()
if n_done:
self.log("mtp: scanned {} files in {}".format(n_done, ptop), c=6)
cur.execute("vacuum")
wcur.close()
@@ -1384,7 +1385,7 @@ class Up2k(object):
ok = {}
ng = {}
for vp in vpaths:
a, b, c = self._handle_rm(uname, ip, vp)
a, b, c = self._handle_rm(uname, ip, vp, False)
n_files += a
for k in b:
ok[k] = 1
@@ -1397,7 +1398,7 @@ class Up2k(object):
return "deleted {} files (and {}/{} folders)".format(n_files, ok, ok + ng)
def _handle_rm(self, uname, ip, vpath):
def _handle_rm(self, uname, ip, vpath, rm_topdir):
try:
permsets = [[True, False, False, True]]
vn, rem = self.asrv.vfs.get(vpath, uname, *permsets[0])
@@ -1466,7 +1467,7 @@ class Up2k(object):
bos.unlink(abspath)
rm = rmdirs(self.log_func, scandir, True, atop)
rm = rmdirs(self.log_func, scandir, True, atop, 1 if rm_topdir else 0)
return n_files, rm[0], rm[1]
def handle_mv(self, uname, svp, dvp):
@@ -1508,7 +1509,7 @@ class Up2k(object):
with self.mutex:
self._mv_file(uname, svpf, dvpf)
rmdirs(self.log_func, scandir, True, sabs)
rmdirs(self.log_func, scandir, True, sabs, 1)
return "k"
def _mv_file(self, uname, svp, dvp):

View File

@@ -19,7 +19,7 @@ import subprocess as sp # nosec
from datetime import datetime
from collections import Counter
from .__init__ import PY2, WINDOWS, ANYWIN, VT100
from .__init__ import PY2, WINDOWS, ANYWIN, VT100, unicode
from .stolen import surrogateescape
FAKE_MP = False
@@ -169,7 +169,7 @@ class Cooldown(object):
return ret
class Unrecv(object):
class _Unrecv(object):
"""
undo any number of socket recv ops
"""
@@ -189,10 +189,68 @@ class Unrecv(object):
except:
return b""
def recv_ex(self, nbytes):
"""read an exact number of bytes"""
ret = self.recv(nbytes)
while ret and len(ret) < nbytes:
buf = self.recv(nbytes - len(ret))
if not buf:
break
ret += buf
return ret
def unrecv(self, buf):
self.buf = buf + self.buf
class _LUnrecv(object):
"""
with expensive debug logging
"""
def __init__(self, s):
self.s = s
self.buf = b""
def recv(self, nbytes):
if self.buf:
ret = self.buf[:nbytes]
self.buf = self.buf[nbytes:]
m = "\033[0;7mur:pop:\033[0;1;32m {}\n\033[0;7mur:rem:\033[0;1;35m {}\033[0m\n"
print(m.format(ret, self.buf), end="")
return ret
try:
ret = self.s.recv(nbytes)
m = "\033[0;7mur:recv\033[0;1;33m {}\033[0m\n"
print(m.format(ret), end="")
return ret
except:
return b""
def recv_ex(self, nbytes):
"""read an exact number of bytes"""
ret = self.recv(nbytes)
while ret and len(ret) < nbytes:
buf = self.recv(nbytes - len(ret))
if not buf:
break
ret += buf
return ret
def unrecv(self, buf):
self.buf = buf + self.buf
m = "\033[0;7mur:push\033[0;1;31m {}\n\033[0;7mur:rem:\033[0;1;35m {}\033[0m\n"
print(m.format(buf, self.buf), end="")
Unrecv = _Unrecv
class ProgressPrinter(threading.Thread):
"""
periodically print progress info without linefeeds
@@ -587,19 +645,21 @@ class MultipartParser(object):
yields [fieldname, unsanitized_filename, fieldvalue]
where fieldvalue yields chunks of data
"""
while True:
run = True
while run:
fieldname, filename = self._read_header()
yield [fieldname, filename, self._read_data()]
tail = self.sr.recv(2)
tail = self.sr.recv_ex(2)
if tail == b"--":
# EOF indicated by this immediately after final boundary
self.sr.recv(2)
return
tail = self.sr.recv_ex(2)
run = False
if tail != b"\r\n":
raise Pebkac(400, "protocol error after field value")
m = "protocol error after field value: want b'\\r\\n', got {!r}"
raise Pebkac(400, m.format(tail))
def _read_value(self, iterator, max_len):
ret = b""
@@ -685,6 +745,14 @@ def read_header(sr):
return ret[:ofs].decode("utf-8", "surrogateescape").lstrip("\r\n").split("\r\n")
def gen_filekey(salt, fspath, fsize, inode):
return base64.urlsafe_b64encode(
hashlib.sha512(
"{} {} {} {}".format(salt, fspath, fsize, inode).encode("utf-8", "replace")
).digest()
).decode("ascii")
def humansize(sz, terse=False):
for unit in ["B", "KiB", "MiB", "GiB", "TiB"]:
if sz < 1024:
@@ -985,8 +1053,12 @@ def read_socket_chunked(sr, log=None):
raise Pebkac(400, err)
if chunklen == 0:
sr.recv(2) # \r\n after final chunk
return
x = sr.recv_ex(2)
if x == b"\r\n":
return
m = "protocol error after final chunk: want b'\\r\\n', got {!r}"
raise Pebkac(400, m.format(x))
if log:
log("receiving {} byte chunk".format(chunklen))
@@ -994,7 +1066,10 @@ def read_socket_chunked(sr, log=None):
for chunk in read_socket(sr, chunklen):
yield chunk
sr.recv(2) # \r\n after each chunk too
x = sr.recv_ex(2)
if x != b"\r\n":
m = "protocol error in chunk separator: want b'\\r\\n', got {!r}"
raise Pebkac(400, m.format(x))
def yieldfile(fn):
@@ -1089,7 +1164,7 @@ def statdir(logger, scandir, lstat, top):
logger(src, "{} @ {}".format(repr(ex), top), 1)
def rmdirs(logger, scandir, lstat, top):
def rmdirs(logger, scandir, lstat, top, depth):
if not os.path.exists(fsenc(top)) or not os.path.isdir(fsenc(top)):
top = os.path.dirname(top)
@@ -1099,15 +1174,16 @@ def rmdirs(logger, scandir, lstat, top):
ok = []
ng = []
for d in dirs[::-1]:
a, b = rmdirs(logger, scandir, lstat, d)
a, b = rmdirs(logger, scandir, lstat, d, depth + 1)
ok += a
ng += b
try:
os.rmdir(fsenc(top))
ok.append(top)
except:
ng.append(top)
if depth:
try:
os.rmdir(fsenc(top))
ok.append(top)
except:
ng.append(top)
return ok, ng

View File

@@ -1,6 +1,7 @@
:root {
--grid-sz: 10em;
--grid-ln: 3;
--nav-sz: 16em;
}
* {
line-height: 1.2em;
@@ -173,6 +174,9 @@ a, #files tbody div a:last-child {
#epi.logue {
margin: .8em 0;
}
.mdo {
max-width: 52em;
}
.mdo,
.mdo * {
line-height: 1.4em;
@@ -568,7 +572,7 @@ html.light #wfm a:not(.en) {
padding: .5em;
border-radius: 0 1em 1em 0;
border-width: .15em .3em .3em 0;
max-width: 40em;
max-width: 41em;
}
.opbox input {
margin: .5em;
@@ -699,6 +703,7 @@ input.eq_gain {
left: 0;
bottom: 0;
top: 7em;
width: var(--nav-sz);
overflow-x: hidden;
overflow-y: auto;
-ms-scroll-chaining: none;
@@ -753,6 +758,9 @@ input.eq_gain {
color: #400;
text-shadow: none;
}
.tgl.btn.on:hover {
background: #fe8;
}
#detree {
padding: .3em .5em;
font-size: 1.5em;
@@ -788,6 +796,19 @@ input.eq_gain {
width: calc(100% - 2em);
line-height: 1em;
}
#tree.nowrap #treeul li {
min-height: 1.4em;
white-space: nowrap;
}
#tree.nowrap #treeul a+a:hover {
background: rgba(34, 34, 34, 0.67);
min-width: calc(var(--nav-sz) - 2em);
width: auto;
}
html.light #tree.nowrap #treeul a+a:hover {
background: rgba(255, 255, 255, 0.67);
color: #000;
}
#treeul a+a:hover {
background: #222;
color: #fff;
@@ -879,6 +900,9 @@ input.eq_gain {
#u2turbo.on+#u2tdate {
opacity: 1;
}
#wraptree.on+#hovertree {
display: none;
}
#ghead {
background: #3c3c3c;
border: 1px solid #444;
@@ -1113,6 +1137,9 @@ html.light .tgl.btn.on {
background: #4a0;
color: #fff;
}
html.light .tgl.btn.on:hover {
background: #5c0;
}
html.light #srv_info {
color: #c83;
background: #eee;
@@ -1138,7 +1165,7 @@ html.light #treeul a.hl:hover {
html.light #tree li {
border-color: #f7f7f7 #fff #ddd #fff;
}
html.light #tree a:hover {
html.light #treeul a:hover {
background: #fff;
}
html.light #tree ul {
@@ -1648,6 +1675,9 @@ html.light #bbox-overlay figcaption a {
padding: .5em;
font-size: .9em;
}
html.light #u2err.err {
color: #f07;
}
#u2btn {
color: #eee;
background: #555;

View File

@@ -9,7 +9,7 @@ function dbg(msg) {
// toolbar
ebi('ops').innerHTML = (
'<a href="#" data-dest="" tt="close submenu">---</a>\n' +
'<a href="#" data-dest="" tt="close submenu">--</a>\n' +
(have_up2k_idx ? (
'<a href="#" data-perm="read" data-dest="search" tt="search for files by attributes, path/name, music tags, or any combination of those.$N$N&lt;code&gt;foo bar&lt;/code&gt; = must contain both foo and bar,$N&lt;code&gt;foo -bar&lt;/code&gt; = must contain foo but not bar,$N&lt;code&gt;^yana .opus$&lt;/code&gt; = must start with yana and have the opus extension">🔎</a>\n' +
(have_del && have_unpost ? '<a href="#" data-dest="unpost" tt="unpost: delete your recent uploads">🧯</a>\n' : '') +
@@ -179,10 +179,12 @@ ebi('op_cfg').innerHTML = (
ebi('tree').innerHTML = (
'<div id="treeh">\n' +
' <a href="#" id="detree" tt="show breadcrumbs$NHotkey: B">🍞...</a>\n' +
' <a href="#" class="btn" step="2" id="twobytwo" tt="Hotkey: A">+</a>\n' +
' <a href="#" class="btn" step="-2" id="twig" tt="Hotkey: D">&ndash;</a>\n' +
' <a href="#" class="btn" step="2" id="twobytwo" tt="Hotkey: D">+</a>\n' +
' <a href="#" class="btn" step="-2" id="twig" tt="Hotkey: A">&ndash;</a>\n' +
' <a href="#" class="btn" id="visdir" tt="scroll to selected folder">🎯</a>\n' +
' <a href="#" class="tgl btn" id="dyntree" tt="autogrow as tree expands">a</a>\n' +
' <a href="#" class="btn" id="visdir" tt="scroll to selected folder">v</a>\n' +
' <a href="#" class="tgl btn" id="wraptree" tt="word wrap"></a>\n' +
' <a href="#" class="tgl btn" id="hovertree" tt="reveal overflowing lines on hover$N( breaks scrolling unless mouse $N&nbsp; cursor is in the left gutter )">👀</a>\n' +
'</div>\n' +
'<ul id="treeul"></ul>\n' +
'<div id="thx_ff">&nbsp;</div>'
@@ -199,6 +201,9 @@ ebi('tree').innerHTML = (
function opclick(e) {
var dest = this.getAttribute('data-dest');
if (QS('#op_' + dest + '.act'))
dest = '';
swrite('opmode', dest || null);
if (ctrl(e))
return;
@@ -224,9 +229,22 @@ function goto(dest) {
clmod(obj[a], 'act');
if (dest) {
var ui = ebi('op_' + dest);
var ui = ebi('op_' + dest),
lnk = QS('#ops>a[data-dest=' + dest + ']'),
nps = lnk.getAttribute('data-perm');
nps = nps && nps.length ? nps.split(' ') : [];
if (perms.length)
for (var a = 0; a < nps.length; a++)
if (!has(perms, nps[a]))
return;
if (!has(perms, 'read') && !has(perms, 'write') && (dest == 'up2k'))
return;
clmod(ui, 'act', true);
QS('#ops>a[data-dest=' + dest + ']').className += " act";
lnk.className += " act";
var fn = window['goto_' + dest];
if (fn)
@@ -275,24 +293,13 @@ var mpl = (function () {
var r = {
"pb_mode": sread('pb_mode') || 'loop-folder',
"preload": bcfg_get('au_preload', true),
"clip": bcfg_get('au_npclip', false),
"os_ctl": bcfg_get('au_os_ctl', have_mctl) && have_mctl,
"osd_cv": bcfg_get('au_osd_cv', true),
};
ebi('au_preload').onclick = function (e) {
ev(e);
r.preload = !r.preload;
bcfg_set('au_preload', r.preload);
};
ebi('au_npclip').onclick = function (e) {
ev(e);
r.clip = !r.clip;
bcfg_set('au_npclip', r.clip);
clmod(ebi('wtoggle'), 'np', r.clip && mp.au);
};
bcfg_bind(r, 'preload', 'au_preload', true);
bcfg_bind(r, 'osd_cv', 'au_osd_cv', true);
bcfg_bind(r, 'clip', 'au_npclip', false, function (v) {
clmod(ebi('wtoggle'), 'np', v && mp.au);
});
ebi('au_os_ctl').onclick = function (e) {
ev(e);
@@ -302,12 +309,6 @@ var mpl = (function () {
toast.err(5, 'need firefox 82+ or chrome 73+\n(or iOS 15+ supposedly)');
};
ebi('au_osd_cv').onclick = function (e) {
ev(e);
r.osd_cv = !r.osd_cv;
bcfg_set('au_osd_cv', r.osd_cv);
};
function draw_pb_mode() {
var btns = QSA('#pb_mode>a');
for (var a = 0, aa = btns.length; a < aa; a++) {
@@ -1063,6 +1064,33 @@ var audio_eq = (function () {
"last_au": null
};
// some browsers have insane high-frequency boost
// (or rather the actual problem is Q but close enough)
r.cali = (function () {
try {
var ac = new AudioContext(),
fi = ac.createBiquadFilter(),
freqs = new Float32Array(1),
mag = new Float32Array(1),
phase = new Float32Array(1);
freqs[0] = 14000;
fi.type = 'peaking';
fi.frequency.value = 18000;
fi.Q.value = 0.8;
fi.gain.value = 1;
fi.getFrequencyResponse(freqs, mag, phase);
return mag[0]; // 1.0407 good, 1.0563 bad
}
catch (ex) {
return 0;
}
})();
console.log('eq cali: ' + r.cali);
var e1 = r.cali < 1.05;
var cfg = [ // hz, q, g
[31.25 * 0.88, 0, 1.4], // shelf
[31.25 * 1.04, 0.7, 0.96], // peak
@@ -1073,10 +1101,10 @@ var audio_eq = (function () {
[1000, 0.9, 1.1],
[2000, 0.9, 1.105],
[4000, 0.88, 1.05],
[8000 * 1.006, 0.73, 1.24],
[16000 * 0.89, 0.7, 1.26], // peak
[16000 * 1.13, 0.82, 1.09], // peak
[16000 * 1.205, 0, 1.9] // shelf
[8000 * 1.006, 0.73, e1 ? 1.24 : 1.2],
[16000 * 0.89, 0.7, e1 ? 1.26 : 1.2], // peak
[16000 * 1.13, 0.82, e1 ? 1.09 : 0.75], // peak
[16000 * 1.205, 0, e1 ? 1.9 : 1.85] // shelf
];
try {
@@ -1253,13 +1281,7 @@ var audio_eq = (function () {
txt[a].onkeydown = eq_keydown;
}
r.en = bcfg_get('au_eq', false);
ebi('au_eq').onclick = function (e) {
ev(e);
r.en = !r.en;
bcfg_set('au_eq', r.en);
r.apply();
};
bcfg_bind(r, 'en', 'au_eq', false, r.apply);
r.draw();
return r;
@@ -1494,6 +1516,7 @@ function autoplay_blocked(seek) {
go.textContent = 'Play "' + fn + '"';
go.onclick = function (e) {
unblocked(e);
toast.hide();
if (mp.au !== mp.au_ogvjs)
// chrome 91 may permanently taint on a failed play()
// depending on win10 settings or something? idk
@@ -1570,7 +1593,7 @@ function sortfiles(nodes) {
if ((v + '').indexOf('<a ') === 0)
v = v.split('>')[1];
else if (name == "href" && v) {
if (v.slice(-1) == '/')
if (v.split('?')[0].slice(-1) == '/')
v = '\t' + v;
v = uricom_dec(v)[0]
@@ -1881,12 +1904,11 @@ var fileman = (function () {
rn_reset(0);
tt.att(rui);
var adv = bcfg_get('rn_adv', false),
cs = bcfg_get('rn_case', false);
function sadv() {
ebi('rn_vadv').style.display = ebi('rn_case').style.display = adv ? '' : 'none';
ebi('rn_vadv').style.display = ebi('rn_case').style.display = r.adv ? '' : 'none';
}
bcfg_bind(r, 'adv', 'rn_adv', false, sadv);
bcfg_bind(r, 'cs', 'rn_case', false);
sadv();
function rn_ok(n, ok) {
@@ -1906,17 +1928,6 @@ var fileman = (function () {
ebi('rn_cancel').onclick = rn_cancel;
ebi('rn_apply').onclick = rn_apply;
ebi('rn_adv').onclick = function (e) {
ev(e);
adv = !adv;
bcfg_set('rn_adv', adv);
sadv();
};
ebi('rn_case').onclick = function (e) {
ev(e);
cs = !cs;
bcfg_set('rn_case', cs);
};
var ire = ebi('rn_re'),
ifmt = ebi('rn_fmt'),
@@ -1986,7 +1997,7 @@ var fileman = (function () {
try {
if (ptn)
re = new RegExp(ptn, cs ? 'i' : '');
re = new RegExp(ptn, r.cs ? 'i' : '');
}
catch (ex) {
return toast.err(5, esc('invalid regex:\n' + ex));
@@ -2254,36 +2265,12 @@ var thegrid = (function () {
lfiles.parentNode.insertBefore(gfiles, lfiles);
var r = {
'thumbs': bcfg_get('thumbs', true),
'en': bcfg_get('griden', false),
'sel': bcfg_get('gridsel', false),
'sz': clamp(fcfg_get('gridsz', 10), 4, 40),
'ln': clamp(icfg_get('gridln', 3), 1, 7),
'isdirty': true,
'bbox': null
};
ebi('thumbs').onclick = function (e) {
ev(e);
r.thumbs = !r.thumbs;
bcfg_set('thumbs', r.thumbs);
r.setdirty();
};
ebi('griden').onclick = ebi('wtgrid').onclick = function (e) {
ev(e);
r.en = !r.en;
bcfg_set('griden', r.en);
if (r.en) {
loadgrid();
}
else {
ungrid();
}
pbar.onresize();
vbar.onresize();
};
var btnclick = function (e) {
ev(e);
var s = this.getAttribute('s'),
@@ -2310,12 +2297,14 @@ var thegrid = (function () {
for (var a = 0; a < links.length; a++)
links[a].onclick = btnclick;
ebi('gridsel').onclick = function (e) {
ev(e);
r.sel = !r.sel;
bcfg_set('gridsel', r.sel);
r.loadsel();
};
bcfg_bind(r, 'thumbs', 'thumbs', true, r.setdirty);
bcfg_bind(r, 'sel', 'gridsel', false, r.loadsel);
bcfg_bind(r, 'en', 'griden', false, function (v) {
v ? loadgrid() : ungrid();
pbar.onresize();
vbar.onresize();
});
ebi('wtgrid').onclick = ebi('griden').onclick;
r.setvis = function (vis) {
(r.en ? gfiles : lfiles).style.display = vis ? '' : 'none';
@@ -2834,14 +2823,15 @@ document.onkeydown = function (e) {
clearTimeout(defer_timeout);
defer_timeout = setTimeout(try_search, 2000);
try_search();
try_search(v);
}
function try_search() {
function try_search(v) {
if (Date.now() - search_in_progress > 30 * 1000) {
clearTimeout(defer_timeout);
clearTimeout(search_timeout);
search_timeout = setTimeout(do_search, 200);
search_timeout = setTimeout(do_search,
v && v.length < (is_touch ? 4 : 3) ? 600 : 200);
}
}
@@ -3019,16 +3009,27 @@ var treectl = (function () {
"hidden": true,
"ls_cb": null,
"dir_cb": tree_scrollto,
"ireadme": bcfg_get('ireadme', true)
},
entreed = false,
fixedpos = false,
prev_atop = null,
prev_winh = null,
dyn = bcfg_get('dyntree', true),
dots = bcfg_get('dotfiles', false),
mentered = null,
treesz = clamp(icfg_get('treesz', 16), 4, 50);
bcfg_bind(treectl, 'ireadme', 'ireadme', true);
bcfg_bind(treectl, 'dyn', 'dyntree', true, onresize);
bcfg_bind(treectl, 'dots', 'dotfiles', false, function (v) {
treectl.goto(get_evpath());
});
setwrap(bcfg_bind(treectl, 'wtree', 'wraptree', true, setwrap));
bcfg_bind(treectl, 'htree', 'hovertree', true, reload_tree);
function setwrap(v) {
clmod(ebi('tree'), 'nowrap', !v);
reload_tree();
}
treectl.entree = function (e) {
ev(e);
entreed = true;
@@ -3069,7 +3070,19 @@ var treectl = (function () {
window.removeEventListener('scroll', onscroll);
}
function unmenter() {
if (mentered) {
mentered.style.position = '';
mentered = null;
}
}
function onscroll() {
unmenter();
onscroll2();
}
function onscroll2() {
if (!entreed || treectl.hidden || document.visibilityState == 'hidden')
return;
@@ -3106,7 +3119,7 @@ var treectl = (function () {
tree.style.height = treeh < 10 ? '' : treeh + 'px';
}
}
timer.add(onscroll, true);
timer.add(onscroll2, true);
function onresize(e) {
if (!entreed || treectl.hidden)
@@ -3115,15 +3128,20 @@ var treectl = (function () {
var q = '#tree',
nq = 0;
while (dyn) {
while (treectl.dyn) {
nq++;
q += '>ul>li';
if (!QS(q))
break;
}
var w = treesz + nq;
ebi('tree').style.width = w + 'em';
ebi('wrap').style.marginLeft = w + 'em';
var w = (treesz + nq) + 'em';
try {
document.documentElement.style.setProperty('--nav-sz', w);
}
catch (ex) {
ebi('tree').style.width = w;
}
ebi('wrap').style.marginLeft = w;
onscroll();
}
@@ -3138,7 +3156,7 @@ var treectl = (function () {
xhr.dst = dst;
xhr.rst = rst;
xhr.ts = Date.now();
xhr.open('GET', dst + '?tree=' + top + (dots ? '&dots' : ''), true);
xhr.open('GET', dst + '?tree=' + top + (treectl.dots ? '&dots' : ''), true);
xhr.onreadystatechange = recvtree;
xhr.send();
enspin('#tree');
@@ -3210,18 +3228,38 @@ var treectl = (function () {
function reload_tree() {
var cdir = get_evpath(),
links = QSA('#treeul a+a');
links = QSA('#treeul a+a'),
nowrap = QS('#tree.nowrap') && QS('#hovertree.on');
for (var a = 0, aa = links.length; a < aa; a++) {
var href = links[a].getAttribute('href');
links[a].setAttribute('class', href == cdir ? 'hl' : '');
links[a].onclick = treego;
links[a].onmouseenter = nowrap ? menter : null;
links[a].onmouseleave = nowrap ? mleave : null;
}
links = QSA('#treeul li>a:first-child');
for (var a = 0, aa = links.length; a < aa; a++) {
links[a].setAttribute('dst', links[a].nextSibling.getAttribute('href'));
links[a].onclick = treegrow;
}
ebi('tree').onscroll = nowrap ? unmenter : null;
}
function menter(e) {
var p = this.offsetParent,
pp = p.offsetParent,
ppy = pp.offsetTop,
y = this.offsetTop + p.offsetTop + ppy - p.scrollTop - pp.scrollTop - (ppy ? document.documentElement.scrollTop : 0);
this.style.top = y + 'px';
this.style.position = 'fixed';
mentered = this;
}
function mleave(e) {
this.style.position = '';
mentered = null;
}
function treego(e) {
@@ -3242,7 +3280,7 @@ var treectl = (function () {
xhr.top = url;
xhr.hpush = hpush;
xhr.ts = Date.now();
xhr.open('GET', xhr.top + '?ls' + (dots ? '&dots' : ''), true);
xhr.open('GET', xhr.top + '?ls' + (treectl.dots ? '&dots' : ''), true);
xhr.onreadystatechange = recvls;
xhr.send();
if (hpush)
@@ -3300,8 +3338,9 @@ var treectl = (function () {
nodes = sortfiles(nodes);
for (var a = 0; a < nodes.length; a++) {
var r = nodes[a],
hname = esc(uricom_dec(r.href)[0]),
sortv = (r.href.slice(-1) == '/' ? '\t' : '') + hname,
bhref = r.href.split('?')[0],
hname = esc(uricom_dec(bhref)[0]),
sortv = (bhref.slice(-1) == '/' ? '\t' : '') + hname,
ln = ['<tr><td>' + r.lead + '</td><td sortv="' + sortv +
'"><a href="' + top + r.href + '">' + hname + '</a>', r.sz];
@@ -3342,9 +3381,7 @@ var treectl = (function () {
clmod(ebi('epi'), 'mdo');
if (res.readme)
setTimeout(function () {
show_readme(res.readme);
}, 10);
show_readme(res.readme);
document.title = '⇆🎉 ' + uricom_dec(document.location.pathname.slice(1, -1))[0];
@@ -3391,26 +3428,6 @@ var treectl = (function () {
return ret;
}
function tdots(e) {
ev(e);
dots = !dots;
bcfg_set('dotfiles', dots);
treectl.goto(get_evpath());
}
function treadme(e) {
ev(e);
treectl.ireadme = !treectl.ireadme;
bcfg_set('ireadme', treectl.ireadme);
}
function dyntree(e) {
ev(e);
dyn = !dyn;
bcfg_set('dyntree', dyn);
onresize();
}
function scaletree(e) {
ev(e);
treesz += parseInt(this.getAttribute("step"));
@@ -3424,9 +3441,6 @@ var treectl = (function () {
ebi('entree').onclick = treectl.entree;
ebi('detree').onclick = treectl.detree;
ebi('visdir').onclick = tree_scrollto;
ebi('dotfiles').onclick = tdots;
ebi('ireadme').onclick = treadme;
ebi('dyntree').onclick = dyntree;
ebi('twig').onclick = scaletree;
ebi('twobytwo').onclick = scaletree;
if (sread('entreed') == 'tree')
@@ -3472,7 +3486,7 @@ function apply_perms(newperms) {
var axs = [],
aclass = '>',
chk = ['read', 'write', 'move', 'delete'];
chk = ['read', 'write', 'move', 'delete', 'get'];
for (var a = 0; a < chk.length; a++)
if (has(perms, chk[a]))
@@ -3526,7 +3540,7 @@ function apply_perms(newperms) {
ebi('widget').style.display = have_read ? '' : 'none';
thegrid.setvis(have_read);
if (!have_read)
if (!have_read && have_write)
goto('up2k');
}
@@ -3852,8 +3866,6 @@ function addcrc() {
var light;
(function () {
light = bcfg_get('lightmode', false);
function freshen() {
clmod(document.documentElement, "light", light);
clmod(document.documentElement, "dark", !light);
@@ -3862,12 +3874,7 @@ var light;
vbar.draw();
}
ebi('lightmode').onclick = function (e) {
ev(e);
light = !light;
bcfg_set('lightmode', light);
freshen();
};
bcfg_bind(window, 'light', 'lightmode', false, freshen);
freshen();
})();
@@ -4140,7 +4147,7 @@ function ev_row_tgl(e) {
var unpost = (function () {
ebi('op_unpost').innerHTML = (
"you can delete your recent uploads below &ndash; click the fire-extinguisher icon to refresh" +
'you can delete your recent uploads below &ndash; <a id="unpost_refresh" href="#">refresh list</a>' +
'<p>optional filter:&nbsp; URL must contain <input type="text" id="unpost_filt" size="20" /><a id="unpost_nofilt" href="#">clear filter</a></p>' +
'<div id="unpost"></div>'
);
@@ -4285,6 +4292,11 @@ var unpost = (function () {
r.load();
};
ebi('unpost_refresh').onclick = function (e) {
ev(e);
goto('unpost');
};
return r;
})();

View File

@@ -212,6 +212,10 @@ blink {
#toolsbox a+a {
text-decoration: none;
}
#lno {
position: absolute;
right: 0;
}

View File

@@ -26,6 +26,7 @@
<a id="cfg_uni" href="#">non-ascii: whitelist</a>
<a id="help" href="#">help</a>
</div>
<span id="lno">L#</span>
{%- else %}
<a href="?edit" tt="good: higher performance$Ngood: same document width as viewer$Nbad: assumes you know markdown">edit (basic)</a>
<a href="?edit2" tt="not in-house so probably less buggy">edit (fancy)</a>

View File

@@ -267,7 +267,14 @@ function convert_markdown(md_text, dest_dom) {
throw ex;
}
var md_dom = new DOMParser().parseFromString(md_html, "text/html").body;
var md_dom = dest_dom;
try {
md_dom = new DOMParser().parseFromString(md_html, "text/html").body;
}
catch (ex) {
md_dom.innerHTML = md_html;
window.copydom = noop;
}
var nodes = md_dom.getElementsByTagName('a');
for (var a = nodes.length - 1; a >= 0; a--) {
@@ -502,9 +509,11 @@ img_load.callbacks = [toc.refresh];
// scroll handler
var redraw = (function () {
var sbs = false;
var sbs = true;
function onresize() {
sbs = window.matchMedia('(min-width: 64em)').matches;
if (window.matchMedia)
sbs = window.matchMedia('(min-width: 64em)').matches;
var y = (dom_hbar.offsetTop + dom_hbar.offsetHeight) + 'px';
if (sbs) {
dom_toc.style.top = y;

View File

@@ -230,44 +230,40 @@ redraw = (function () {
// modification checker
function Modpoll() {
this.skip_one = true;
this.disabled = false;
this.periodic = function () {
var that = this;
setTimeout(function () {
that.periodic();
}, 1000 * md_opt.modpoll_freq);
var r = {
skip_one: true,
disabled: false
};
r.periodic = function () {
var skip = null;
if (toast.visible)
skip = 'toast';
else if (this.skip_one)
else if (r.skip_one)
skip = 'saved';
else if (this.disabled)
else if (r.disabled)
skip = 'disabled';
if (skip) {
console.log('modpoll skip, ' + skip);
this.skip_one = false;
r.skip_one = false;
return;
}
console.log('modpoll...');
var url = (document.location + '').split('?')[0] + '?raw&_=' + Date.now();
var xhr = new XMLHttpRequest();
xhr.modpoll = this;
xhr.open('GET', url, true);
xhr.responseType = 'text';
xhr.onreadystatechange = this.cb;
xhr.onreadystatechange = r.cb;
xhr.send();
}
};
this.cb = function () {
if (this.modpoll.disabled || this.modpoll.skip_one) {
r.cb = function () {
if (r.disabled || r.skip_one) {
console.log('modpoll abort');
return;
}
@@ -288,7 +284,7 @@ function Modpoll() {
if (server_ref != server_now) {
console.log("modpoll diff |" + server_ref.length + "|, |" + server_now.length + "|");
this.modpoll.disabled = true;
r.disabled = true;
var msg = [
"The document has changed on the server.",
"The changes will NOT be loaded into your editor automatically.",
@@ -302,12 +298,12 @@ function Modpoll() {
}
console.log('modpoll eq');
}
};
if (md_opt.modpoll_freq > 0)
this.periodic();
setInterval(r.periodic, 1000 * md_opt.modpoll_freq);
return this;
return r;
}
var modpoll = new Modpoll();
@@ -879,6 +875,40 @@ function cfg_uni(e) {
}
var set_lno = (function () {
var t = null,
pi = null,
pv = null,
lno = ebi('lno');
function poke() {
clearTimeout(t);
t = setTimeout(fire, 20);
}
function fire() {
try {
clearTimeout(t);
var i = dom_src.selectionStart;
if (i === pi)
return;
var v = 'L' + dom_src.value.slice(0, i).split('\n').length;
if (v != pv)
lno.innerHTML = v;
pi = i;
pv = v;
}
catch (e) { }
}
timer.add(fire);
return poke;
})();
// hotkeys / toolbar
(function () {
function keydown(ev) {
@@ -897,6 +927,8 @@ function cfg_uni(e) {
if (document.activeElement != dom_src)
return true;
set_lno();
if (ctrl(ev)) {
if (ev.code == "KeyH" || kc == 72) {
md_header(ev.shiftKey);

View File

@@ -45,6 +45,7 @@ l.setItem('lightmode', drk? 0:1);
</script>
<script src="/.cpr/util.js?_={{ ts }}"></script>
<script src="/.cpr/deps/marked.js?_={{ ts }}"></script>
<script src="/.cpr/deps/easymde.js?_={{ ts }}"></script>
<script src="/.cpr/mde.js?_={{ ts }}"></script>
</body></html>

View File

@@ -55,6 +55,16 @@ table {
.btns {
margin: 1em 0;
}
#msg {
margin: 3em 0;
}
#msg h1 {
margin-bottom: 0;
}
#msg h1 + p {
margin-top: .3em;
text-align: right;
}
html.dark,

View File

@@ -12,7 +12,17 @@
<body>
<div id="wrap">
<p>hello {{ this.uname }}</p>
{%- if this.uname == '*' %}
<p>howdy stranger &nbsp; <small>(you're not logged in)</small></p>
{%- else %}
<p>welcome back, <strong>{{ this.uname }}</strong></p>
{%- endif %}
{%- if msg %}
<div id="msg">
{{ msg }}
</div>
{%- endif %}
{%- if avol %}
<h1>admin panel:</h1>
@@ -60,7 +70,7 @@
<h1>login for more:</h1>
<ul>
<form method="post" enctype="multipart/form-data" action="/">
<form method="post" enctype="multipart/form-data" action="/{{ qvpath }}">
<input type="hidden" name="act" value="login" />
<input type="password" name="cppwd" />
<input type="submit" value="Login" />

View File

@@ -246,6 +246,17 @@ html.light #tt em {
#repl_pre {
max-width: 24em;
}
*:focus,
#pctl *:focus,
.btn:focus {
box-shadow: 0 .1em .2em #fc0 inset;
border-radius: .2em;
}
html.light *:focus,
html.light #pctl *:focus,
html.light .btn:focus {
box-shadow: 0 .1em .2em #037 inset;
}

View File

@@ -528,7 +528,7 @@ function up2k_init(subtle) {
got_deps = true;
}
if (perms.length && !has(perms, 'read'))
if (perms.length && !has(perms, 'read') && has(perms, 'write'))
goto('up2k');
function setmsg(msg, type) {
@@ -572,15 +572,17 @@ function up2k_init(subtle) {
}
var parallel_uploads = icfg_get('nthread'),
multitask = bcfg_get('multitask', true),
ask_up = bcfg_get('ask_up', true),
flag_en = bcfg_get('flag_en', false),
fsearch = bcfg_get('fsearch', false),
turbo = bcfg_get('u2turbo', false),
datechk = bcfg_get('u2tdate', true),
uc = {},
fdom_ctr = 0,
min_filebuf = 0;
bcfg_bind(uc, 'multitask', 'multitask', true, null, false);
bcfg_bind(uc, 'ask_up', 'ask_up', true, null, false);
bcfg_bind(uc, 'flag_en', 'flag_en', false, apply_flag_cfg, false);
bcfg_bind(uc, 'fsearch', 'fsearch', false, set_fsearch, false);
bcfg_bind(uc, 'turbo', 'u2turbo', false, draw_turbo, false);
bcfg_bind(uc, 'datechk', 'u2tdate', true, null, false);
var st = {
"files": [],
"todo": {
@@ -715,13 +717,12 @@ function up2k_init(subtle) {
offdrag.bind(this)();
var dz = (this && this.getAttribute('id'));
if ((dz == 'up_dz' && fsearch) || (dz == 'srch_dz' && !fsearch)) {
var err = this.getAttribute('err');
if (err)
return modal.alert('sorry, ' + err);
var err = this.getAttribute('err');
if (err)
return modal.alert('sorry, ' + err);
if ((dz == 'up_dz' && uc.fsearch) || (dz == 'srch_dz' && !uc.fsearch))
tgl_fsearch();
}
if (!QS('#op_up2k.act'))
goto('up2k');
@@ -884,11 +885,11 @@ function up2k_init(subtle) {
return a < b ? -1 : a > b ? 1 : 0;
});
var msg = ['{0} these {1} files?<ul>'.format(fsearch ? 'search' : 'upload', good_files.length)];
var msg = ['{0} these {1} files?<ul>'.format(uc.fsearch ? 'search' : 'upload', good_files.length)];
for (var a = 0, aa = Math.min(20, good_files.length); a < aa; a++)
msg.push('<li>' + esc(good_files[a][1]) + '</li>');
if (ask_up && !fsearch)
if (uc.ask_up && !uc.fsearch)
return modal.confirm(msg.join('') + '</ul>', function () { up_them(good_files); }, null);
up_them(good_files);
@@ -928,7 +929,7 @@ function up2k_init(subtle) {
},
key = entry.name + '\n' + entry.size;
if (fsearch)
if (uc.fsearch)
entry.srch = 1;
if (seen[key])
@@ -937,7 +938,7 @@ function up2k_init(subtle) {
seen[key] = 1;
pvis.addfile([
fsearch ? esc(entry.name) : linksplit(
uc.fsearch ? esc(entry.name) : linksplit(
uricom_dec(entry.purl)[0] + entry.name).join(' '),
'📐 hash',
''
@@ -945,7 +946,7 @@ function up2k_init(subtle) {
st.bytes.total += fobj.size;
st.files.push(entry);
if (turbo)
if (uc.turbo)
push_t(st.todo.head, entry);
else
push_t(st.todo.hash, entry);
@@ -1020,14 +1021,14 @@ function up2k_init(subtle) {
if (nhash) {
st.time.hashing += td;
t.push(['u2etah', st.bytes.hashed, st.bytes.hashed, st.time.hashing]);
if (fsearch)
if (uc.fsearch)
t.push(['u2etat', st.bytes.hashed, st.bytes.hashed, st.time.hashing]);
}
if (nsend) {
st.time.uploading += td;
t.push(['u2etau', st.bytes.uploaded, st.bytes.finished, st.time.uploading]);
}
if ((nhash || nsend) && !fsearch) {
if ((nhash || nsend) && !uc.fsearch) {
if (!st.bytes.finished) {
ebi('u2etat').innerHTML = '(preparing to upload)';
}
@@ -1085,7 +1086,7 @@ function up2k_init(subtle) {
if (st.files[n].t_uploading)
return false;
if ((multitask ? 1 : 0) <
if ((uc.multitask ? 1 : 0) <
st.todo.upload.length +
st.busy.upload.length)
return false;
@@ -1097,7 +1098,7 @@ function up2k_init(subtle) {
if (!parallel_uploads)
return false;
if (multitask) {
if (uc.multitask) {
var ahead = st.bytes.hashed - st.bytes.finished;
return ahead < 1024 * 1024 * 1024 * 4 &&
st.todo.handshake.length + st.busy.handshake.length < 16;
@@ -1144,13 +1145,13 @@ function up2k_init(subtle) {
"EventListener"]("beforeunload", warn_uploader_busy);
if (!is_busy) {
var k = fsearch ? 'searches' : 'uploads',
ks = fsearch ? 'Search' : 'Upload',
tok = fsearch ? 'successful (found on server)' : 'completed successfully',
tng = fsearch ? 'failed (NOT found on server)' : 'failed, sorry',
var k = uc.fsearch ? 'searches' : 'uploads',
ks = uc.fsearch ? 'Search' : 'Upload',
tok = uc.fsearch ? 'successful (found on server)' : 'completed successfully',
tng = uc.fsearch ? 'failed (NOT found on server)' : 'failed, sorry',
ok = pvis.ctr["ok"],
ng = pvis.ctr["ng"],
t = ask_up ? 0 : 10;
t = uc.ask_up ? 0 : 10;
if (ok && ng)
toast.warn(t, 'Finished, but some {0} failed:\n{1} {2},\n{3} {4}'.format(k, ok, tok, ng, tng));
@@ -1454,7 +1455,7 @@ function up2k_init(subtle) {
srv_ts = xhr.getResponseHeader('Last-Modified');
ok = t.size == srv_sz;
if (ok && datechk) {
if (ok && uc.datechk) {
srv_ts = new Date(srv_ts) / 1000;
ok = Math.abs(srv_ts - t.lmod) < 2;
}
@@ -1657,7 +1658,7 @@ function up2k_init(subtle) {
err = rsp;
ofs = err.indexOf('\n/');
if (ofs !== -1) {
err = err.slice(0, ofs + 1) + linksplit(err.slice(ofs + 2)).join(' ');
err = err.slice(0, ofs + 1) + linksplit(err.slice(ofs + 2).trimEnd()).join(' ');
}
}
if (err != "") {
@@ -1851,42 +1852,21 @@ function up2k_init(subtle) {
bumpthread({ "target": 1 })
}
function tgl_multitask() {
multitask = !multitask;
bcfg_set('multitask', multitask);
}
function tgl_ask_up() {
ask_up = !ask_up;
bcfg_set('ask_up', ask_up);
}
function tgl_fsearch() {
set_fsearch(!fsearch);
}
function tgl_turbo() {
turbo = !turbo;
bcfg_set('u2turbo', turbo);
draw_turbo();
}
function tgl_datechk() {
datechk = !datechk;
bcfg_set('u2tdate', datechk);
set_fsearch(!uc.fsearch);
}
function draw_turbo() {
var msgu = '<p class="warn">WARNING: turbo enabled, <span>&nbsp;client may not detect and resume incomplete uploads; see turbo-button tooltip</span></p>',
msgs = '<p class="warn">WARNING: turbo enabled, <span>&nbsp;search results can be incorrect; see turbo-button tooltip</span></p>',
msg = fsearch ? msgs : msgu,
omsg = fsearch ? msgu : msgs,
msg = uc.fsearch ? msgs : msgu,
omsg = uc.fsearch ? msgu : msgs,
html = ebi('u2foot').innerHTML,
ohtml = html;
if (turbo && html.indexOf(msg) === -1)
if (uc.turbo && html.indexOf(msg) === -1)
html = html.replace(omsg, '') + msg;
else if (!turbo)
else if (!uc.turbo)
html = html.replace(msgu, '').replace(msgs, '');
if (html !== ohtml)
@@ -1912,8 +1892,8 @@ function up2k_init(subtle) {
}
if (new_state !== undefined) {
fsearch = new_state;
bcfg_set('fsearch', fsearch);
uc.fsearch = new_state;
bcfg_set('fsearch', uc.fsearch);
}
try {
@@ -1922,10 +1902,10 @@ function up2k_init(subtle) {
catch (ex) { }
try {
var ico = fsearch ? '🔎' : '🚀',
desc = fsearch ? 'Search' : 'Upload';
var ico = uc.fsearch ? '🔎' : '🚀',
desc = uc.fsearch ? 'Search' : 'Upload';
clmod(ebi('op_up2k'), 'srch', fsearch);
clmod(ebi('op_up2k'), 'srch', uc.fsearch);
ebi('u2bm').innerHTML = ico + ' <sup>' + desc + '</sup>';
}
catch (ex) { }
@@ -1934,14 +1914,8 @@ function up2k_init(subtle) {
onresize();
}
function tgl_flag_en() {
flag_en = !flag_en;
bcfg_set('flag_en', flag_en);
apply_flag_cfg();
}
function apply_flag_cfg() {
if (flag_en && !flag) {
if (uc.flag_en && !flag) {
try {
flag = up2k_flagbus();
}
@@ -1950,7 +1924,7 @@ function up2k_init(subtle) {
tgl_flag_en();
}
}
else if (!flag_en && flag) {
else if (!uc.flag_en && flag) {
if (flag.ours)
flag.give();
@@ -1975,14 +1949,6 @@ function up2k_init(subtle) {
ebi('nthread').onkeydown = bumpthread2;
ebi('nthread').oninput = bumpthread;
ebi('multitask').onclick = tgl_multitask;
ebi('ask_up').onclick = tgl_ask_up;
ebi('flag_en').onclick = tgl_flag_en;
ebi('u2turbo').onclick = tgl_turbo;
ebi('u2tdate').onclick = tgl_datechk;
var o = ebi('fsearch');
if (o)
o.onclick = tgl_fsearch;
ebi('u2etas').onclick = function (e) {
ev(e);

View File

@@ -30,7 +30,16 @@ function esc(txt) {
});
}
window.onunhandledrejection = function (e) {
console.log("REJ: " + e.reason);
var err = e.reason;
try {
err += '\n' + e.reason.stack;
}
catch (e) { }
console.log("REJ: " + err);
try {
toast.warn(30, err);
}
catch (e) { }
};
try {
console.hist = [];
@@ -160,6 +169,9 @@ function ignex(all) {
}
function noop() { }
function ctrl(e) {
return e && (e.ctrlKey || e.metaKey);
}
@@ -185,36 +197,40 @@ function ev(e) {
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
if (!String.prototype.endsWith) {
if (!String.prototype.endsWith)
String.prototype.endsWith = function (search, this_len) {
if (this_len === undefined || this_len > this.length) {
this_len = this.length;
}
return this.substring(this_len - search.length, this_len) === search;
};
}
if (!String.startsWith) {
if (!String.startsWith)
String.prototype.startsWith = function (s, i) {
i = i > 0 ? i | 0 : 0;
return this.substring(i, i + s.length) === s;
};
}
if (!Element.prototype.matches) {
if (!String.trimEnd)
String.prototype.trimEnd = String.prototype.trimRight = function () {
return this.replace(/[ \t\r\n]+$/m, '');
};
if (!Element.prototype.matches)
Element.prototype.matches =
Element.prototype.oMatchesSelector ||
Element.prototype.msMatchesSelector ||
Element.prototype.mozMatchesSelector ||
Element.prototype.webkitMatchesSelector;
}
if (!Element.prototype.closest) {
if (!Element.prototype.closest)
Element.prototype.closest = function (s) {
var el = this;
do {
if (el.matches(s)) return el;
el = el.parentElement || el.parentNode;
} while (el !== null && el.nodeType === 1);
}
}
};
// https://stackoverflow.com/a/950146
@@ -361,8 +377,16 @@ function makeSortable(table, cb) {
function linksplit(rp) {
var ret = [];
var apath = '/';
var ret = [],
apath = '/',
q = null;
if (rp && rp.indexOf('?') + 1) {
q = rp.split('?', 2);
rp = q[0];
q = '?' + q[1];
}
if (rp && rp.charAt(0) == '/')
rp = rp.slice(1);
@@ -384,6 +408,9 @@ function linksplit(rp) {
elink = elink.slice(0, -3) + '/';
}
if (!rp && q)
elink += q;
ret.push('<a href="' + apath + elink + '">' + vlink + '</a>');
apath += elink;
}
@@ -620,6 +647,24 @@ function bcfg_upd_ui(name, val) {
}
}
function bcfg_bind(obj, oname, cname, defval, cb, un_ev) {
var v = bcfg_get(cname, defval),
el = ebi(cname);
obj[oname] = v;
if (el)
el.onclick = function (e) {
if (un_ev !== false)
ev(e);
obj[oname] = bcfg_set(cname, !obj[oname]);
if (cb)
cb(obj[oname]);
};
return v;
}
function hist_push(url) {
console.log("h-push " + url);
@@ -679,6 +724,14 @@ var tt = (function () {
r.tt.setAttribute('id', 'tt');
document.body.appendChild(r.tt);
var prev = null;
r.cshow = function () {
if (this !== prev)
r.show.bind(this)();
prev = this;
};
r.show = function () {
if (r.skip) {
r.skip = false;
@@ -732,6 +785,7 @@ var tt = (function () {
ev(e);
window.removeEventListener('scroll', r.hide);
clmod(r.tt, 'show');
clmod(r.tt, 'b');
if (r.el)
r.el.removeEventListener('mouseleave', r.hide);
};
@@ -761,12 +815,13 @@ var tt = (function () {
r.tt.onclick = r.hide;
r.att = function (ctr) {
var _show = r.en ? r.show : null,
var _cshow = r.en ? r.cshow : null,
_show = r.en ? r.show : null,
_hide = r.en ? r.hide : null,
o = ctr.querySelectorAll('*[tt]');
for (var a = o.length - 1; a >= 0; a--) {
o[a].onfocus = _show;
o[a].onfocus = _cshow;
o[a].onblur = _hide;
o[a].onmouseenter = _show;
o[a].onmouseleave = _hide;

View File

@@ -3,6 +3,24 @@
setTimeout(location.reload.bind(location), 700);
document.documentElement.scrollLeft = 0;
var cali = (function() {
var ac = new AudioContext(),
fi = ac.createBiquadFilter(),
freqs = new Float32Array(1),
mag = new Float32Array(1),
phase = new Float32Array(1);
freqs[0] = 14000;
fi.type = 'peaking';
fi.frequency.value = 18000;
fi.Q.value = 0.8;
fi.gain.value = 1;
fi.getFrequencyResponse(freqs, mag, phase);
return mag[0]; // 1.0407 good, 1.0563 bad
})(),
mp = cali < 1.05;
var can = document.createElement('canvas'),
cc = can.getContext('2d'),
w = 2048,
@@ -28,12 +46,12 @@ var cfg = [ // hz, q, g
[1000, 0.9, 1.1],
[2000, 0.9, 1.105],
[4000, 0.88, 1.05],
[8000 * 1.006, 0.73, 1.24],
[8000 * 1.006, 0.73, mp ? 1.24 : 1.2],
//[16000 * 1.00, 0.5, 1.75], // peak.v1
//[16000 * 1.19, 0, 1.8] // shelf.v1
[16000 * 0.89, 0.7, 1.26], // peak
[16000 * 1.13, 0.82, 1.09], // peak
[16000 * 1.205, 0, 1.9] // shelf
[16000 * 0.89, 0.7, mp ? 1.26 : 1.2], // peak
[16000 * 1.13, 0.82, mp ? 1.09 : 0.75], // peak
[16000 * 1.205, 0, mp ? 1.9 : 1.85] // shelf
];
var freqs = new Float32Array(22000),

View File

@@ -17,6 +17,7 @@ html.light {
html.light #files th {
background: rgba(255, 255, 255, 0.9) !important;
}
html.light .logue,
html.light #ops,
html.light #treeul,
html.light #files td {

View File

@@ -11,6 +11,8 @@
#u2cleanup, #u2conf tr:first-child>td[rowspan]:not(#u2btn_cw), /* most of the config options */
#srch_dz, #srch_zd, /* the filesearch dropzone */
#u2cards, #u2etaw /* and the upload progress tabs */
{display: none !important} /* do it! */

View File

@@ -41,9 +41,9 @@ avg() { awk 'function pr(ncsz) {if (nsmp>0) {printf "%3s %s\n", csz, sum/nsmp} c
##
## bad filenames
dirs=("$HOME/vfs/ほげ" "$HOME/vfs/ほげ/ぴよ" "$HOME/vfs/$(printf \\xed\\x91)" "$HOME/vfs/$(printf \\xed\\x91/\\xed\\x92)")
dirs=("./ほげ" "./ほげ/ぴよ" "./$(printf \\xed\\x91)" "./$(printf \\xed\\x91/\\xed\\x92)" './qw,er;ty%20as df?gh+jkl%zxc&vbn <qwe>"rty'"'"'uio&asd&nbsp;fgh')
mkdir -p "${dirs[@]}"
for dir in "${dirs[@]}"; do for fn in ふが "$(printf \\xed\\x93)" 'qwe,rty;asd fgh+jkl%zxc&vbn <qwe>"rty'"'"'uio&asd&nbsp;fgh'; do echo "$dir" > "$dir/$fn.html"; done; done
for dir in "${dirs[@]}"; do for fn in ふが "$(printf \\xed\\x93)" 'qw,er;ty%20as df?gh+jkl%zxc&vbn <qwe>"rty'"'"'uio&asd&nbsp;fgh'; do echo "$dir" > "$dir/$fn.html"; done; done
# qw er+ty%20ui%%20op<as>df&gh&amp;jk#zx'cv"bn`m=qw*er^ty?ui@op,as.df-gh_jk
##
@@ -79,10 +79,8 @@ command -v gdate && date() { gdate "$@"; }; while true; do t=$(date +%s.%N); (ti
# get all up2k search result URLs
var t=[]; var b=document.location.href.split('#')[0].slice(0, -1); document.querySelectorAll('#u2tab .prog a').forEach((x) => {t.push(b+encodeURI(x.getAttribute("href")))}); console.log(t.join("\n"));
# rename all selected songs to <leading-track-number> + <Title> + <extension>
var sel=msel.getsel(), ci=find_file_col('Title')[0], re=[]; for (var a=0; a<sel.length; a++) { var url=sel[a].vp, tag=ebi(sel[a].id).closest('tr').querySelectorAll('td')[ci].textContent, name=uricom_dec(vsplit(url)[1])[0], m=/^([0-9]+[\. -]+)?.*(\.[^\.]+$)/.exec(name), name2=(m[1]||'')+tag+m[2], url2=vsplit(url)[0]+uricom_enc(name2,false); if (url!=url2) re.push([url, url2]); }
console.log(JSON.stringify(re, null, ' '));
function f() { if (!re.length) return treectl.goto(get_evpath()); var [u1,u2] = re.shift(); fetch(u1+'?move='+u2).then((rsp) => {if (rsp.ok) f(); }); }; f();
# debug md-editor line tracking
var s=mknod('style');s.innerHTML='*[data-ln]:before {content:attr(data-ln)!important;color:#f0c;background:#000;position:absolute;left:-1.5em;font-size:1rem}';document.head.appendChild(s);
##
## bash oneliners

View File

@@ -1,11 +1,11 @@
FROM alpine:3.13
FROM alpine:3.14
WORKDIR /z
ENV ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \
ver_hashwasm=4.7.0 \
ver_marked=1.1.0 \
ver_hashwasm=4.9.0 \
ver_marked=3.0.4 \
ver_ogvjs=1.8.4 \
ver_mde=2.14.0 \
ver_codemirror=5.59.3 \
ver_mde=2.15.0 \
ver_codemirror=5.62.3 \
ver_fontawesome=5.13.0 \
ver_zopfli=1.0.3
@@ -113,9 +113,10 @@ RUN cd CodeMirror-$ver_codemirror \
COPY easymde.patch /z/
RUN cd easy-markdown-editor-$ver_mde \
&& patch -p1 < /z/easymde.patch \
&& sed -ri 's`https://registry.npmjs.org/marked/-/marked-0.8.2.tgz`file:/z/nodepkgs/marked`' package-lock.json \
&& sed -ri 's`https://registry.npmjs.org/marked/-/marked-[0-9\.]+.tgz`file:/z/nodepkgs/marked`' package-lock.json \
&& sed -ri 's`("marked": ")[^"]+`\1file:/z/nodepkgs/marked`' ./package.json \
&& sed -ri 's`("codemirror": ")[^"]+`\1file:/z/nodepkgs/codemirror`' ./package.json \
&& sed -ri 's`^var marked = require\(.marked/lib/marked.\);$`var marked = window.marked;`' src/js/easymde.js \
&& npm install
COPY easymde-ln.patch /z/

View File

@@ -1,15 +1,15 @@
diff --git a/src/Lexer.js b/src/Lexer.js
adds linetracking to marked.js v1.0.0 +git;
adds linetracking to marked.js v3.0.4;
add data-ln="%d" to most tags, %d is the source markdown line
--- a/src/Lexer.js
+++ b/src/Lexer.js
@@ -49,4 +49,5 @@ function mangle(text) {
@@ -50,4 +50,5 @@ function mangle(text) {
module.exports = class Lexer {
constructor(options) {
+ this.ln = 1; // like most editors, start couting from 1
this.tokens = [];
this.tokens.links = Object.create(null);
@@ -108,4 +109,15 @@ module.exports = class Lexer {
@@ -127,4 +128,15 @@ module.exports = class Lexer {
}
+ set_ln(token, ln = this.ln) {
@@ -25,122 +25,123 @@ add data-ln="%d" to most tags, %d is the source markdown line
+
/**
* Lexing
@@ -113,10 +125,15 @@ module.exports = class Lexer {
blockTokens(src, tokens = [], top = true) {
src = src.replace(/^ +$/gm, '');
- let token, i, l, lastToken;
+ let token, i, l, lastToken, ln;
@@ -134,7 +146,11 @@ module.exports = class Lexer {
src = src.replace(/^ +$/gm, '');
}
- let token, lastToken, cutSrc, lastParagraphClipped;
+ let token, lastToken, cutSrc, lastParagraphClipped, ln;
while (src) {
+ // this.ln will be bumped by recursive calls into this func;
+ // reset the count and rely on the outermost token's raw only
+ ln = this.ln;
+
// newline
if (this.options.extensions
&& this.options.extensions.block
@@ -142,4 +158,5 @@ module.exports = class Lexer {
if (token = extTokenizer.call({ lexer: this }, src, tokens)) {
src = src.substring(token.raw.length);
+ this.set_ln(token, ln);
tokens.push(token);
return true;
@@ -153,4 +170,5 @@ module.exports = class Lexer {
if (token = this.tokenizer.space(src)) {
src = src.substring(token.raw.length);
+ this.set_ln(token); // is \n if not type
+ this.set_ln(token, ln); // is \n if not type
if (token.type) {
tokens.push(token);
@@ -128,4 +145,5 @@ module.exports = class Lexer {
if (token = this.tokenizer.code(src, tokens)) {
@@ -162,4 +180,5 @@ module.exports = class Lexer {
if (token = this.tokenizer.code(src)) {
src = src.substring(token.raw.length);
+ this.set_ln(token);
if (token.type) {
tokens.push(token);
@@ -141,4 +159,5 @@ module.exports = class Lexer {
+ this.set_ln(token, ln);
lastToken = tokens[tokens.length - 1];
// An indented code block cannot interrupt a paragraph.
@@ -177,4 +196,5 @@ module.exports = class Lexer {
if (token = this.tokenizer.fences(src)) {
src = src.substring(token.raw.length);
+ this.set_ln(token);
+ this.set_ln(token, ln);
tokens.push(token);
continue;
@@ -148,4 +167,5 @@ module.exports = class Lexer {
@@ -184,4 +204,5 @@ module.exports = class Lexer {
if (token = this.tokenizer.heading(src)) {
src = src.substring(token.raw.length);
+ this.set_ln(token);
+ this.set_ln(token, ln);
tokens.push(token);
continue;
@@ -155,4 +175,5 @@ module.exports = class Lexer {
if (token = this.tokenizer.nptable(src)) {
src = src.substring(token.raw.length);
+ this.set_ln(token);
tokens.push(token);
continue;
@@ -162,4 +183,5 @@ module.exports = class Lexer {
@@ -191,4 +212,5 @@ module.exports = class Lexer {
if (token = this.tokenizer.hr(src)) {
src = src.substring(token.raw.length);
+ this.set_ln(token);
+ this.set_ln(token, ln);
tokens.push(token);
continue;
@@ -170,4 +192,7 @@ module.exports = class Lexer {
@@ -198,4 +220,5 @@ module.exports = class Lexer {
if (token = this.tokenizer.blockquote(src)) {
src = src.substring(token.raw.length);
token.tokens = this.blockTokens(token.text, [], top);
+ // recursive call to blockTokens probably bumped this.ln,
+ // token.raw is more reliable so reset this.ln and use that
+ this.set_ln(token, ln);
tokens.push(token);
continue;
@@ -180,5 +205,9 @@ module.exports = class Lexer {
for (i = 0; i < l; i++) {
token.items[i].tokens = this.blockTokens(token.items[i].text, [], false);
+ // list entries don't bump the linecounter, so let's
+ this.ln++;
}
+ // then reset like blockquote
@@ -205,4 +228,5 @@ module.exports = class Lexer {
if (token = this.tokenizer.list(src)) {
src = src.substring(token.raw.length);
+ this.set_ln(token, ln);
tokens.push(token);
continue;
@@ -188,4 +217,5 @@ module.exports = class Lexer {
@@ -212,4 +236,5 @@ module.exports = class Lexer {
if (token = this.tokenizer.html(src)) {
src = src.substring(token.raw.length);
+ this.set_ln(token);
+ this.set_ln(token, ln);
tokens.push(token);
continue;
@@ -195,4 +225,5 @@ module.exports = class Lexer {
if (top && (token = this.tokenizer.def(src))) {
@@ -219,4 +244,5 @@ module.exports = class Lexer {
if (token = this.tokenizer.def(src)) {
src = src.substring(token.raw.length);
+ this.set_ln(token);
if (!this.tokens.links[token.tag]) {
this.tokens.links[token.tag] = {
@@ -207,4 +238,5 @@ module.exports = class Lexer {
+ this.set_ln(token, ln);
lastToken = tokens[tokens.length - 1];
if (lastToken && (lastToken.type === 'paragraph' || lastToken.type === 'text')) {
@@ -236,4 +262,5 @@ module.exports = class Lexer {
if (token = this.tokenizer.table(src)) {
src = src.substring(token.raw.length);
+ this.set_ln(token);
+ this.set_ln(token, ln);
tokens.push(token);
continue;
@@ -214,4 +246,5 @@ module.exports = class Lexer {
@@ -243,4 +270,5 @@ module.exports = class Lexer {
if (token = this.tokenizer.lheading(src)) {
src = src.substring(token.raw.length);
+ this.set_ln(token);
+ this.set_ln(token, ln);
tokens.push(token);
continue;
@@ -221,4 +254,5 @@ module.exports = class Lexer {
if (top && (token = this.tokenizer.paragraph(src))) {
@@ -263,4 +291,5 @@ module.exports = class Lexer {
}
if (this.state.top && (token = this.tokenizer.paragraph(cutSrc))) {
+ this.set_ln(token, ln);
lastToken = tokens[tokens.length - 1];
if (lastParagraphClipped && lastToken.type === 'paragraph') {
@@ -280,4 +309,6 @@ module.exports = class Lexer {
if (token = this.tokenizer.text(src)) {
src = src.substring(token.raw.length);
+ this.set_ln(token);
tokens.push(token);
continue;
@@ -228,4 +262,5 @@ module.exports = class Lexer {
if (token = this.tokenizer.text(src, tokens)) {
src = src.substring(token.raw.length);
+ this.set_ln(token);
if (token.type) {
tokens.push(token);
@@ -263,4 +298,7 @@ module.exports = class Lexer {
for (i = 0; i < l; i++) {
token = tokens[i];
+ // this.ln is at EOF when inline() is invoked;
+ // all this affects <br> tags only so no biggie if it breaks
+ this.ln = token.ln || this.ln;
switch (token.type) {
case 'paragraph':
@@ -386,4 +424,6 @@ module.exports = class Lexer {
+ this.set_ln(token, ln);
+ this.ln++;
lastToken = tokens[tokens.length - 1];
if (lastToken && lastToken.type === 'text') {
@@ -355,4 +386,5 @@ module.exports = class Lexer {
if (token = extTokenizer.call({ lexer: this }, src, tokens)) {
src = src.substring(token.raw.length);
+ this.ln = token.ln || this.ln;
tokens.push(token);
return true;
@@ -420,4 +452,6 @@ module.exports = class Lexer {
if (token = this.tokenizer.br(src)) {
src = src.substring(token.raw.length);
+ // no need to reset (no more blockTokens anyways)
+ token.ln = this.ln++;
tokens.push(token);
continue;
@@ -462,4 +496,5 @@ module.exports = class Lexer {
if (token = this.tokenizer.inlineText(cutSrc, smartypants)) {
src = src.substring(token.raw.length);
+ this.ln = token.ln || this.ln;
if (token.raw.slice(-1) !== '_') { // Track prevChar before string of ____ started
prevChar = token.raw.slice(-1);
diff --git a/src/Parser.js b/src/Parser.js
--- a/src/Parser.js
+++ b/src/Parser.js
@@ -150,17 +151,16 @@ diff --git a/src/Parser.js b/src/Parser.js
+ this.ln = 0; // error indicator; should always be set >=1 from tokens
}
@@ -55,4 +56,9 @@ module.exports = class Parser {
@@ -64,4 +65,8 @@ module.exports = class Parser {
for (i = 0; i < l; i++) {
token = tokens[i];
+ // take line-numbers from tokens whenever possible
+ // and update the renderer's html attribute with the new value
+ this.ln = token.ln || this.ln;
+ this.renderer.tag_ln(this.ln);
+
switch (token.type) {
case 'space': {
@@ -105,7 +111,10 @@ module.exports = class Parser {
// Run any renderer extensions
@@ -124,7 +129,10 @@ module.exports = class Parser {
}
- body += this.renderer.tablerow(cell);
@@ -173,7 +173,7 @@ diff --git a/src/Parser.js b/src/Parser.js
+ out += this.renderer.tag_ln(token.ln).table(header, body);
continue;
}
@@ -148,8 +157,12 @@ module.exports = class Parser {
@@ -167,8 +175,12 @@ module.exports = class Parser {
itemBody += this.parse(item.tokens, loose);
- body += this.renderer.listitem(itemBody, task, checked);
@@ -188,7 +188,7 @@ diff --git a/src/Parser.js b/src/Parser.js
+ out += this.renderer.tag_ln(token.ln).list(body, ordered, start);
continue;
}
@@ -160,5 +173,6 @@ module.exports = class Parser {
@@ -179,5 +191,6 @@ module.exports = class Parser {
}
case 'paragraph': {
- out += this.renderer.paragraph(this.parseInline(token.tokens));
@@ -196,22 +196,14 @@ diff --git a/src/Parser.js b/src/Parser.js
+ out += this.renderer.tag_ln(token.ln).paragraph(t);
continue;
}
@@ -199,4 +213,6 @@ module.exports = class Parser {
for (i = 0; i < l; i++) {
@@ -221,4 +234,7 @@ module.exports = class Parser {
token = tokens[i];
+ // another thing that only affects <br/> and other inlines
+ this.ln = token.ln || this.ln;
switch (token.type) {
case 'escape': {
@@ -229,5 +245,7 @@ module.exports = class Parser {
}
case 'br': {
- out += renderer.br();
+ // update the html attribute before writing each <br/>,
+ // don't care about the others
+ out += renderer.tag_ln(this.ln).br();
break;
}
+
// Run any renderer extensions
if (this.options.extensions && this.options.extensions.renderers && this.options.extensions.renderers[token.type]) {
diff --git a/src/Renderer.js b/src/Renderer.js
--- a/src/Renderer.js
+++ b/src/Renderer.js
@@ -228,7 +220,7 @@ diff --git a/src/Renderer.js b/src/Renderer.js
+
code(code, infostring, escaped) {
const lang = (infostring || '').match(/\S*/)[0];
@@ -24,10 +30,10 @@ module.exports = class Renderer {
@@ -26,10 +32,10 @@ module.exports = class Renderer {
if (!lang) {
- return '<pre><code>'
@@ -241,58 +233,69 @@ diff --git a/src/Renderer.js b/src/Renderer.js
+ return '<pre' + this.ln + '><code class="'
+ this.options.langPrefix
+ escape(lang, true)
@@ -38,5 +44,5 @@ module.exports = class Renderer {
@@ -40,5 +46,5 @@ module.exports = class Renderer {
blockquote(quote) {
- return '<blockquote>\n' + quote + '</blockquote>\n';
+ return '<blockquote' + this.ln + '>\n' + quote + '</blockquote>\n';
}
@@ -49,4 +55,5 @@ module.exports = class Renderer {
@@ -51,4 +57,5 @@ module.exports = class Renderer {
return '<h'
+ level
+ + this.ln
+ ' id="'
+ this.options.headerPrefix
@@ -59,5 +66,5 @@ module.exports = class Renderer {
@@ -61,5 +68,5 @@ module.exports = class Renderer {
}
// ignore IDs
- return '<h' + level + '>' + text + '</h' + level + '>\n';
+ return '<h' + level + this.ln + '>' + text + '</h' + level + '>\n';
}
@@ -73,5 +80,5 @@ module.exports = class Renderer {
@@ -75,5 +82,5 @@ module.exports = class Renderer {
listitem(text) {
- return '<li>' + text + '</li>\n';
+ return '<li' + this.ln + '>' + text + '</li>\n';
}
@@ -85,5 +92,5 @@ module.exports = class Renderer {
@@ -87,5 +94,5 @@ module.exports = class Renderer {
paragraph(text) {
- return '<p>' + text + '</p>\n';
+ return '<p' + this.ln + '>' + text + '</p>\n';
}
@@ -100,5 +107,5 @@ module.exports = class Renderer {
@@ -102,5 +109,5 @@ module.exports = class Renderer {
tablerow(content) {
- return '<tr>\n' + content + '</tr>\n';
+ return '<tr' + this.ln + '>\n' + content + '</tr>\n';
}
@@ -125,5 +132,5 @@ module.exports = class Renderer {
@@ -127,5 +134,5 @@ module.exports = class Renderer {
br() {
- return this.options.xhtml ? '<br/>' : '<br>';
+ return this.options.xhtml ? '<br' + this.ln + '/>' : '<br' + this.ln + '>';
}
@@ -151,5 +158,5 @@ module.exports = class Renderer {
@@ -153,5 +160,5 @@ module.exports = class Renderer {
}
- let out = '<img src="' + href + '" alt="' + text + '"';
+ let out = '<img' + this.ln + ' src="' + href + '" alt="' + text + '"';
if (title) {
out += ' title="' + title + '"';
diff --git a/src/Tokenizer.js b/src/Tokenizer.js
--- a/src/Tokenizer.js
+++ b/src/Tokenizer.js
@@ -301,4 +301,7 @@ module.exports = class Tokenizer {
const l = list.items.length;
+ // each nested list gets +1 ahead; this hack makes every listgroup -1 but atleast it doesn't get infinitely bad
+ this.lexer.ln--;
+
// Item child tokens handled here at end because we needed to have the final item to trim it first
for (i = 0; i < l; i++) {

View File

@@ -1,52 +1,52 @@
diff --git a/src/Lexer.js b/src/Lexer.js
--- a/src/Lexer.js
+++ b/src/Lexer.js
@@ -5,5 +5,5 @@ const { block, inline } = require('./rules.js');
@@ -6,5 +6,5 @@ const { repeatString } = require('./helpers.js');
/**
* smartypants text replacement
- */
+ *
function smartypants(text) {
return text
@@ -26,5 +26,5 @@ function smartypants(text) {
@@ -27,5 +27,5 @@ function smartypants(text) {
/**
* mangle email addresses
- */
+ *
function mangle(text) {
let out = '',
@@ -439,5 +439,5 @@ module.exports = class Lexer {
@@ -465,5 +465,5 @@ module.exports = class Lexer {
// autolink
- if (token = this.tokenizer.autolink(src, mangle)) {
+ if (token = this.tokenizer.autolink(src)) {
src = src.substring(token.raw.length);
tokens.push(token);
@@ -446,5 +446,5 @@ module.exports = class Lexer {
@@ -472,5 +472,5 @@ module.exports = class Lexer {
// url (gfm)
- if (!inLink && (token = this.tokenizer.url(src, mangle))) {
+ if (!inLink && (token = this.tokenizer.url(src))) {
- if (!this.state.inLink && (token = this.tokenizer.url(src, mangle))) {
+ if (!this.state.inLink && (token = this.tokenizer.url(src))) {
src = src.substring(token.raw.length);
tokens.push(token);
@@ -453,5 +453,5 @@ module.exports = class Lexer {
// text
- if (token = this.tokenizer.inlineText(src, inRawBlock, smartypants)) {
+ if (token = this.tokenizer.inlineText(src, inRawBlock)) {
@@ -493,5 +493,5 @@ module.exports = class Lexer {
}
}
- if (token = this.tokenizer.inlineText(cutSrc, smartypants)) {
+ if (token = this.tokenizer.inlineText(cutSrc)) {
src = src.substring(token.raw.length);
tokens.push(token);
this.ln = token.ln || this.ln;
diff --git a/src/Renderer.js b/src/Renderer.js
--- a/src/Renderer.js
+++ b/src/Renderer.js
@@ -140,5 +140,5 @@ module.exports = class Renderer {
@@ -142,5 +142,5 @@ module.exports = class Renderer {
link(href, title, text) {
- href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
+ href = cleanUrl(this.options.baseUrl, href);
if (href === null) {
return text;
@@ -153,5 +153,5 @@ module.exports = class Renderer {
@@ -155,5 +155,5 @@ module.exports = class Renderer {
image(href, title, text) {
- href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
@@ -56,22 +56,23 @@ diff --git a/src/Renderer.js b/src/Renderer.js
diff --git a/src/Tokenizer.js b/src/Tokenizer.js
--- a/src/Tokenizer.js
+++ b/src/Tokenizer.js
@@ -287,11 +287,8 @@ module.exports = class Tokenizer {
if (cap) {
return {
- type: this.options.sanitize
- ? 'paragraph'
- : 'html',
+ type: 'html',
@@ -321,14 +321,7 @@ module.exports = class Tokenizer {
type: 'html',
raw: cap[0],
- pre: !this.options.sanitizer
- && (cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style'),
- text: this.options.sanitize ? (this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0])) : cap[0]
+ pre: cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style',
+ text: cap[0]
+ pre: (cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style'),
text: cap[0]
};
- if (this.options.sanitize) {
- token.type = 'paragraph';
- token.text = this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0]);
- token.tokens = [];
- this.lexer.inline(token.text, token.tokens);
- }
return token;
}
@@ -421,15 +418,9 @@ module.exports = class Tokenizer {
@@ -477,15 +470,9 @@ module.exports = class Tokenizer {
return {
- type: this.options.sanitize
@@ -79,8 +80,8 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
- : 'html',
+ type: 'html',
raw: cap[0],
inLink,
inRawBlock,
inLink: this.lexer.state.inLink,
inRawBlock: this.lexer.state.inRawBlock,
- text: this.options.sanitize
- ? (this.options.sanitizer
- ? this.options.sanitizer(cap[0])
@@ -89,7 +90,7 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
+ text: cap[0]
};
}
@@ -550,10 +541,10 @@ module.exports = class Tokenizer {
@@ -672,10 +659,10 @@ module.exports = class Tokenizer {
}
- autolink(src, mangle) {
@@ -102,7 +103,7 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
+ text = escape(cap[1]);
href = 'mailto:' + text;
} else {
@@ -578,10 +569,10 @@ module.exports = class Tokenizer {
@@ -700,10 +687,10 @@ module.exports = class Tokenizer {
}
- url(src, mangle) {
@@ -115,15 +116,15 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
+ text = escape(cap[0]);
href = 'mailto:' + text;
} else {
@@ -615,12 +606,12 @@ module.exports = class Tokenizer {
@@ -737,12 +724,12 @@ module.exports = class Tokenizer {
}
- inlineText(src, inRawBlock, smartypants) {
+ inlineText(src, inRawBlock) {
- inlineText(src, smartypants) {
+ inlineText(src) {
const cap = this.rules.inline.text.exec(src);
if (cap) {
let text;
if (inRawBlock) {
if (this.lexer.state.inRawBlock) {
- text = this.options.sanitize ? (this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0])) : cap[0];
+ text = cap[0];
} else {
@@ -134,7 +135,7 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
diff --git a/src/defaults.js b/src/defaults.js
--- a/src/defaults.js
+++ b/src/defaults.js
@@ -8,12 +8,8 @@ function getDefaults() {
@@ -9,12 +9,8 @@ function getDefaults() {
highlight: null,
langPrefix: 'language-',
- mangle: true,
@@ -170,7 +171,7 @@ diff --git a/src/helpers.js b/src/helpers.js
+function cleanUrl(base, href) {
if (base && !originIndependentUrl.test(href)) {
href = resolveUrl(base, href);
@@ -223,10 +210,4 @@ function findClosingBracket(str, b) {
@@ -227,10 +214,4 @@ function findClosingBracket(str, b) {
}
-function checkSanitizeDeprecation(opt) {
@@ -179,14 +180,13 @@ diff --git a/src/helpers.js b/src/helpers.js
- }
-}
-
module.exports = {
escape,
@@ -239,5 +220,4 @@ module.exports = {
splitCells,
// copied from https://stackoverflow.com/a/5450113/806777
function repeatString(pattern, count) {
@@ -260,5 +241,4 @@ module.exports = {
rtrim,
- findClosingBracket,
- checkSanitizeDeprecation
+ findClosingBracket
findClosingBracket,
- checkSanitizeDeprecation,
repeatString
};
diff --git a/src/marked.js b/src/marked.js
--- a/src/marked.js
@@ -203,8 +203,14 @@ diff --git a/src/marked.js b/src/marked.js
- checkSanitizeDeprecation(opt);
if (callback) {
@@ -108,5 +106,5 @@ function marked(src, opt, callback) {
return Parser.parse(tokens, opt);
@@ -302,5 +300,4 @@ marked.parseInline = function(src, opt) {
opt = merge({}, marked.defaults, opt || {});
- checkSanitizeDeprecation(opt);
try {
@@ -311,5 +308,5 @@ marked.parseInline = function(src, opt) {
return Parser.parseInline(tokens, opt);
} catch (e) {
- e.message += '\nPlease report this to https://github.com/markedjs/marked.';
+ e.message += '\nmake issue @ https://github.com/9001/copyparty';
@@ -252,86 +258,87 @@ diff --git a/test/bench.js b/test/bench.js
diff --git a/test/specs/run-spec.js b/test/specs/run-spec.js
--- a/test/specs/run-spec.js
+++ b/test/specs/run-spec.js
@@ -22,8 +22,4 @@ function runSpecs(title, dir, showCompletionTable, options) {
@@ -22,9 +22,4 @@ function runSpecs(title, dir, showCompletionTable, options) {
}
- if (spec.options.sanitizer) {
- // eslint-disable-next-line no-eval
- spec.options.sanitizer = eval(spec.options.sanitizer);
- }
-
(spec.only ? fit : (spec.skip ? xit : it))('should ' + passFail + example, async() => {
@@ -53,3 +49,2 @@ runSpecs('Original', './original', false, { gfm: false, pedantic: true });
const before = process.hrtime();
@@ -53,3 +48,2 @@ runSpecs('Original', './original', false, { gfm: false, pedantic: true });
runSpecs('New', './new');
runSpecs('ReDOS', './redos');
-runSpecs('Security', './security', false, { silent: true }); // silent - do not show deprecation warning
diff --git a/test/unit/Lexer-spec.js b/test/unit/Lexer-spec.js
--- a/test/unit/Lexer-spec.js
+++ b/test/unit/Lexer-spec.js
@@ -465,5 +465,5 @@ a | b
@@ -589,5 +589,5 @@ paragraph
});
- it('sanitize', () => {
+ /*it('sanitize', () => {
expectTokens({
md: '<div>html</div>',
@@ -483,5 +483,5 @@ a | b
@@ -607,5 +607,5 @@ paragraph
]
});
- });
+ });*/
});
@@ -587,5 +587,5 @@ a | b
@@ -652,5 +652,5 @@ paragraph
});
- it('html sanitize', () => {
+ /*it('html sanitize', () => {
expectInlineTokens({
md: '<div>html</div>',
@@ -597,5 +597,5 @@ a | b
@@ -660,5 +660,5 @@ paragraph
]
});
- });
+ });*/
it('link', () => {
@@ -909,5 +909,5 @@ a | b
@@ -971,5 +971,5 @@ paragraph
});
- it('autolink mangle email', () => {
+ /*it('autolink mangle email', () => {
expectInlineTokens({
md: '<test@example.com>',
@@ -929,5 +929,5 @@ a | b
@@ -991,5 +991,5 @@ paragraph
]
});
- });
+ });*/
it('url', () => {
@@ -966,5 +966,5 @@ a | b
@@ -1028,5 +1028,5 @@ paragraph
});
- it('url mangle email', () => {
+ /*it('url mangle email', () => {
expectInlineTokens({
md: 'test@example.com',
@@ -986,5 +986,5 @@ a | b
@@ -1048,5 +1048,5 @@ paragraph
]
});
- });
+ });*/
});
@@ -1002,5 +1002,5 @@ a | b
@@ -1064,5 +1064,5 @@ paragraph
});
- describe('smartypants', () => {
+ /*describe('smartypants', () => {
it('single quotes', () => {
expectInlineTokens({
@@ -1072,5 +1072,5 @@ a | b
@@ -1134,5 +1134,5 @@ paragraph
});
});
- });

View File

@@ -206,6 +206,15 @@ while IFS= read -r x; do
tmv "$x"
done
find copyparty | LC_ALL=C sort | sed 's/\.gz$//;s/$/,/' > have
cat have | while IFS= read -r x; do
grep -qF -- "$x" ../scripts/sfx.ls || {
echo "unexpected file: $x"
exit 1
}
done
rm have
[ $no_ogv ] &&
rm -rf copyparty/web/deps/{dynamicaudio,ogv}*

77
scripts/sfx.ls Normal file
View File

@@ -0,0 +1,77 @@
copyparty,
copyparty/__init__.py,
copyparty/__main__.py,
copyparty/__version__.py,
copyparty/authsrv.py,
copyparty/bos,
copyparty/bos/__init__.py,
copyparty/bos/bos.py,
copyparty/bos/path.py,
copyparty/broker_mp.py,
copyparty/broker_mpw.py,
copyparty/broker_thr.py,
copyparty/broker_util.py,
copyparty/httpcli.py,
copyparty/httpconn.py,
copyparty/httpsrv.py,
copyparty/ico.py,
copyparty/mtag.py,
copyparty/res,
copyparty/res/insecure.pem,
copyparty/star.py,
copyparty/stolen,
copyparty/stolen/__init__.py,
copyparty/stolen/surrogateescape.py,
copyparty/sutil.py,
copyparty/svchub.py,
copyparty/szip.py,
copyparty/tcpsrv.py,
copyparty/th_cli.py,
copyparty/th_srv.py,
copyparty/u2idx.py,
copyparty/up2k.py,
copyparty/util.py,
copyparty/web,
copyparty/web/baguettebox.js,
copyparty/web/browser.css,
copyparty/web/browser.html,
copyparty/web/browser.js,
copyparty/web/browser2.html,
copyparty/web/copyparty.gif,
copyparty/web/dd,
copyparty/web/dd/2.png,
copyparty/web/dd/3.png,
copyparty/web/dd/4.png,
copyparty/web/dd/5.png,
copyparty/web/deps,
copyparty/web/deps/easymde.css,
copyparty/web/deps/easymde.js,
copyparty/web/deps/marked.js,
copyparty/web/deps/mini-fa.css,
copyparty/web/deps/mini-fa.woff,
copyparty/web/deps/ogv-decoder-audio-opus-wasm.js,
copyparty/web/deps/ogv-decoder-audio-opus-wasm.wasm,
copyparty/web/deps/ogv-decoder-audio-vorbis-wasm.js,
copyparty/web/deps/ogv-decoder-audio-vorbis-wasm.wasm,
copyparty/web/deps/ogv-demuxer-ogg-wasm.js,
copyparty/web/deps/ogv-demuxer-ogg-wasm.wasm,
copyparty/web/deps/ogv-worker-audio.js,
copyparty/web/deps/ogv.js,
copyparty/web/deps/scp.woff2,
copyparty/web/deps/sha512.ac.js,
copyparty/web/deps/sha512.hw.js,
copyparty/web/md.css,
copyparty/web/md.html,
copyparty/web/md.js,
copyparty/web/md2.css,
copyparty/web/md2.js,
copyparty/web/mde.css,
copyparty/web/mde.html,
copyparty/web/mde.js,
copyparty/web/msg.css,
copyparty/web/msg.html,
copyparty/web/splash.css,
copyparty/web/splash.html,
copyparty/web/ui.css,
copyparty/web/up2k.js,
copyparty/web/util.js,

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env python3
# coding: latin-1
from __future__ import print_function, unicode_literals

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env python3
import os
import sys

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env python3
# coding: utf-8
from __future__ import print_function, unicode_literals
@@ -8,7 +8,7 @@ import tokenize
def uncomment(fpath):
""" modified https://stackoverflow.com/a/62074206 """
"""modified https://stackoverflow.com/a/62074206"""
with open(fpath, "rb") as f:
orig = f.read().decode("utf-8")

View File

@@ -1,11 +1,17 @@
### hello world
* qwe
* asd
* zxc
* 573
* one
* two
* rty
* uio
* asd
* fgh
* jkl
* zxc
* vbn
* 573
* one
* two
* three
* |||
|--|--|
@@ -134,12 +140,12 @@ a newline toplevel
| a table | on the right |
| second row | foo bar |
||
a||a
--|:-:|-:
a table | big text in this | aaakbfddd
second row | centred | bbb
||
||||
--|--|--
foo

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env python3
# coding: utf-8
from __future__ import print_function, unicode_literals
@@ -39,6 +39,8 @@ class Cfg(Namespace):
no_scandir=False,
no_sendfile=True,
no_rescan=True,
no_logues=False,
no_readme=False,
re_maxage=0,
ihead=False,
nih=True,
@@ -96,7 +98,7 @@ class TestHttpCli(unittest.TestCase):
if not vol.startswith(top):
continue
mode = vol[-2].replace("a", "rwmd")
mode = vol[-2].replace("a", "rw")
usr = vol[-1]
if usr == "a":
usr = ""
@@ -151,6 +153,7 @@ class TestHttpCli(unittest.TestCase):
tar = tarfile.open(fileobj=io.BytesIO(b)).getnames()
except:
tar = []
tar = [x[4:] if x.startswith("top/") else x for x in tar]
tar = ["/".join([y for y in [top, durl, x] if y]) for x in tar]
tar = [[x] + self.can_rw(x) for x in tar]
tar_ok = [x[0] for x in tar if x[1]]

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env python3
# coding: utf-8
from __future__ import print_function, unicode_literals
@@ -26,6 +26,8 @@ class Cfg(Namespace):
"no_hash": False,
"css_browser": None,
"no_voldump": True,
"no_logues": False,
"no_readme": False,
"re_maxage": 0,
"rproxy": 0,
}
@@ -195,10 +197,10 @@ class TestVFS(unittest.TestCase):
self.assertEqual(n.realpath, os.path.join(td, "a"))
self.assertAxs(n.axs.uread, ["*"])
self.assertAxs(n.axs.uwrite, [])
self.assertEqual(vfs.can_access("/", "*"), [False, False, False, False])
self.assertEqual(vfs.can_access("/", "k"), [True, True, False, False])
self.assertEqual(vfs.can_access("/a", "*"), [True, False, False, False])
self.assertEqual(vfs.can_access("/a", "k"), [True, False, False, False])
self.assertEqual(vfs.can_access("/", "*"), [False, False, False, False, False])
self.assertEqual(vfs.can_access("/", "k"), [True, True, False, False, False])
self.assertEqual(vfs.can_access("/a", "*"), [True, False, False, False, False])
self.assertEqual(vfs.can_access("/a", "k"), [True, False, False, False, False])
# breadth-first construction
vfs = AuthSrv(