Compare commits

...

61 Commits

Author SHA1 Message Date
ed
cadaeeeace v1.9.4 2023-09-02 00:18:53 +00:00
ed
767696185b add ?tar=gz, ?tar=bz2, ?tar=xz with optional level;
defaults are ?tar=gz:3, ?tar=bz2:9, ?tar=xz:1
2023-09-01 23:44:10 +00:00
ed
c1efd227b7 fix inconsistent use of symlink mtimes in database;
on upload, dupes are by default handled by symlinking to the existing
copy on disk, writing the uploader's local mtime into the symlink mtime,
which is also what gets indexed in the db

this worked as intended, however during an -e2dsa rescan on startup the
symlink destination timestamps would be used instead, causing a reindex
and the resulting loss of uploader metadata (ip, timestamp)

will now always use the symlink's mtime;
worst-case 1% slower startup (no dhash)

this change will cause a reindex of incorrectly indexed files, however
as this has already happened at least once due to the bug being fixed,
there will be no additional loss of metadata
2023-09-01 20:29:55 +00:00
ed
a50d0563c3 instantly perform search when URL contains a raw query 2023-09-01 20:16:19 +00:00
ed
e5641ddd16 update pkgs to 1.9.3 2023-08-31 23:08:32 +00:00
ed
700111ffeb v1.9.3 2023-08-31 22:11:31 +00:00
ed
b8adeb824a misc http correctness;
some of this looks shady af but appears to have been harmless
(decent amount of testing came out ok)

* some location normalization happened before unquoting; however vfs
   handled this correctly so the outcome was just confusing messages
* some url parameters were double-decoded (unpost filter, move
   destinations), causing some operations to fail unexpectedly
* invalid cache-control headers could be generated,
   but not in a maliciously-beneficial way
   (there are safeguards stripping newlines and control-characters)

also adds an exception-message cleanup step to strip away the
filesystem path that copyparty's python files are located at,
in case that could be interesting knowledge
2023-08-31 21:51:58 +00:00
ed
30cc9defcb cosmetics:
* in case someone gets a confusing access-related error message,
  include more context in serverlogs (exact path)
* fix js console spam in search results
* same markdown line-height in viewer and browser
2023-08-31 21:27:14 +00:00
ed
61875bd773 slightly reduce flickering during page load on chrome 2023-08-31 20:02:33 +00:00
ed
30905c6f5d add convenient debugs in case the fight is not over 2023-08-31 20:00:14 +00:00
ed
9986136dfb apple/ios/iphone: maybe fix background album playback
good news: apple finally added support for samplerates other than
44100 for AudioContext, meaning it would now have been possible to
set non-100% volume for audio files including opus files

bad news: apple broke AudioContext in a way that makes it bug out
mediaSessions, causing lockscreen controls to become mostly useless

bad news: apple broke AudioContext additionally where it randomly
causes playback issues, blocking playback of audio files, even if
the AudioContext is sitting idle doing nothing (which is a
requirement for reliable upload speeds on other platforms)

disable AudioContext on iOS
2023-08-31 19:57:05 +00:00
ed
1c0d978979 ios/iphone: autoreplace smart-quotes with sane quotes,
as the iphone keyboard is not able to produce ' or "
2023-08-31 19:29:37 +00:00
ed
0a0364e9f8 FTPd: fix py3.12 support; workaround until next release:
run sfx twice with PYTHONPATH=/tmp/pe-copyparty.$(id -u)/copyparty/vend
2023-08-28 00:25:33 +00:00
ed
3376fbde1a update pkgs to 1.9.2 2023-08-26 22:09:43 +00:00
ed
ac21fa7782 v1.9.2 2023-08-26 21:16:30 +00:00
ed
c1c8dc5e82 ok lets try that again 2023-08-26 19:07:23 +00:00
ed
5a38311481 mark offline volumes in directory tree sidebar 2023-08-26 19:00:46 +00:00
ed
9f8edb7f32 make markdown slightly safer without the nohtml volflag
by running dompurify after marked.parse if plugins are not enabled;
adds no protection against the more practical approach of just
putting a malicious <script> in an html file and uploading that,
but one footgun less is one less footgun
2023-08-26 17:37:02 +00:00
ed
c5a6ac8417 persist dotfile preference as cookie for initial listing 2023-08-26 15:50:57 +00:00
ed
50e01d6904 add more autoban triggers:
* --ban-url: URLs which 404 and also match --sus-urls (bot-scan)
* --ban-403: trying to access volumes that dont exist or require auth
* --ban-422: invalid POST messages, fuzzing and such
* --nonsus-urls: regex of 404s which  shouldn't trigger --ban-404

in may situations it makes sense to handle this logic inside copyparty,
since stuff like cloudflare and running copyparty on another physical
box than the nginx frontend is on becomes fairly clunky
2023-08-26 13:52:24 +00:00
ed
9b46291a20 add option to force-disable turbo,
making it safer to enable --ban-404
(u2c can still get banned inadvertently)
2023-08-26 13:19:38 +00:00
ed
14497b2425 docs:
* mention cloudflare-specific nginx config

versus.md:
* seafile has a size limit on zip downloads
* seafile and nextcloud are slow at uploading many small files

u2c: improve error message in funky environments
2023-08-25 21:57:26 +00:00
ed
f7ceae5a5f add filetable range-select with shift-pgup/pgdn,
and retain file selection cursor when lazyloading more files
2023-08-25 19:34:37 +00:00
ed
c9492d16ba fix textfile navigation hotkeys (broke in 5d13ebb4) 2023-08-25 18:41:45 +00:00
ed
9fb9ada3aa dont whine about inaccessible root on rootless configs,
and make it easier for on403 to invoke the homepage-redirect
2023-08-25 18:33:15 +00:00
ed
db0abbfdda typo 2023-08-21 00:05:39 +00:00
ed
e7f0009e57 update pkgs to 1.9.1 2023-08-20 23:53:58 +00:00
ed
4444f0f6ff v1.9.1 2023-08-20 23:38:42 +00:00
ed
418842d2d3 update pkgs to 1.9.0 2023-08-20 23:11:44 +00:00
ed
cafe53c055 v1.9.0 2023-08-20 22:02:40 +00:00
ed
7673beef72 actually impl --mc-hop (and improve --zm-spam) 2023-08-20 21:27:28 +00:00
ed
b28bfe64c0 explain apple bullshit 2023-08-20 22:09:00 +02:00
ed
135ece3fbd immediately allow uploading an interrupted and
deleted incomplete upload to another location
2023-08-20 19:16:35 +00:00
ed
bd3640d256 change to openmetrics 2023-08-20 18:50:14 +00:00
ed
fc0405c8f3 add prometheus metrics; closes #49 2023-08-20 17:58:06 +00:00
ed
7df890d964 wget: only allow http/https/ftp/ftps (#50):
these are all the protocols that are currently supported by wget,
so this has no practical effect aside from making sure we won't
suddenly get file:// support or something (which would be bad)
2023-08-20 09:47:50 +00:00
ed
8341041857 mdns: option to ignore spec to avoid issues on
networks where clients have multiple IPs of which some are subnets that
the copyparty server is not
2023-08-19 21:45:26 +00:00
ed
1b7634932d tar/zip-download: add opus transcoding filter 2023-08-19 19:40:46 +00:00
ed
48a3898aa6 suggest enabling the database on startup 2023-08-16 19:57:19 +00:00
ed
5d13ebb4ac avoid firefox-android quirk(?):
when repeatedly tapping the next-folder button, occasionally it will
reload the entire page instead of ajax'ing the directory contents.

Navigation happens by simulating a click in the directory sidebar,
so the incorrect behavior matches what would happen if the link to the
folder didn't have its onclick-handler attached, so should probably
double-check if there's some way for that to happen

Issue observed fairly easily in firefox on android, regardless if
copyparty is running locally or on a server in a different country.
Unable to reproduce with android-chrome or desktop-firefox

Could also be due to an addon (dark-reader, noscript, ublock-origin)

anyways, avoiding this by doing the navigation more explicitly
2023-08-16 19:56:47 +00:00
ed
015b87ee99 performance / cosmetic:
* js: use .call instead of .bind when possible
* when running without e2d, the message on startup regarding
  unfinished uploads didn't show the correct filesystem path
2023-08-16 19:32:43 +00:00
ed
0a48acf6be limit each column of the files table to screen width 2023-08-16 03:55:53 +00:00
ed
2b6a3afd38 fix iOS randomly increasing fontsize of some things:
* links which are wider than the display width
* probably input fields too
2023-08-16 03:47:19 +00:00
ed
18aa82fb2f make browser resizing smoother / less expensive 2023-08-15 16:55:19 +00:00
ed
f5407b2997 docker: persist autogenerated seeds, disable certgen, and
mention how to run the containers with selinux enabled
* assumes that a /cfg docker volume is provided
2023-08-15 15:07:33 +00:00
ed
474d5a155b android's got hella strict filename rules 2023-08-15 06:46:57 +02:00
ed
afcd98b794 mention some gotchas (thx noktuas) 2023-08-15 03:38:51 +02:00
ed
4f80e44ff7 option to exactly specify browser title prefix 2023-08-15 03:17:01 +02:00
ed
406e413594 hint at additional context in exceptions 2023-08-15 01:42:13 +02:00
ed
033b50ae1b u2c: exclude files by regex 2023-08-15 00:45:12 +02:00
ed
bee26e853b show server hostname in html titles:
* --doctitle defines most titles, prefixed with "--name: " by default
* the file browser is only prefixed with the --name itself
* --nth ("no-title-hostname") removes it
* also removed by --nih ("no-info-hostname")
2023-08-14 23:50:13 +02:00
ed
04a1f7040e adjustable timestamp resolution in log messages 2023-08-14 17:22:22 +02:00
ed
f9d5bb3b29 support upload by dragdrop from other browser windows,
hello from LO484 https://ocv.me/stuff/aircode.jpg
2023-07-28 21:43:40 +02:00
ed
ca0cd04085 update pkgs to 1.8.8 2023-07-25 16:25:27 +00:00
ed
999ee2e7bc v1.8.8 2023-07-25 15:50:48 +00:00
ed
1ff7f968e8 fix tls-cert regeneration on windows 2023-07-25 15:27:27 +00:00
ed
3966266207 remember ?edit and trailing-slash during login redirect 2023-07-25 15:14:47 +00:00
ed
d03e96a392 html5 strips the first leading LF in textareas; stop it 2023-07-25 14:16:54 +00:00
ed
4c843c6df9 fix md-editor lastmod cmp when browsercache is belligerent 2023-07-25 14:06:53 +00:00
ed
0896c5295c range-select fixes:
* dont crash when shiftclicking between folders
* remember origin when lazyloading more files
2023-07-25 14:06:31 +02:00
ed
cc0c9839eb update pkgs to 1.8.7 2023-07-23 16:16:49 +00:00
55 changed files with 1343 additions and 272 deletions

View File

@@ -71,6 +71,7 @@ turn almost any device into a file server with resumable uploads/downloads using
* [themes](#themes)
* [complete examples](#complete-examples)
* [reverse-proxy](#reverse-proxy) - running copyparty next to other websites
* [prometheus](#prometheus) - metrics/stats can be enabled
* [packages](#packages) - the party might be closer than you think
* [arch package](#arch-package) - now [available on aur](https://aur.archlinux.org/packages/copyparty) maintained by [@icxes](https://github.com/icxes)
* [fedora package](#fedora-package) - now [available on copr-pypi](https://copr.fedorainfracloud.org/coprs/g/copr/PyPI/)
@@ -109,7 +110,7 @@ just run **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/
* or install through pypi: `python3 -m pip install --user -U copyparty`
* or if you cannot install python, you can use [copyparty.exe](#copypartyexe) instead
* or install [on arch](#arch-package) [on NixOS](#nixos-module) [through nix](#nix-package)
* or install [on arch](#arch-package) [on fedora](#fedora-package) [on NixOS](#nixos-module) [through nix](#nix-package)
* or if you are on android, [install copyparty in termux](#install-on-android)
* or if you prefer to [use docker](./scripts/docker/) 🐋 you can do that too
* docker has all deps built-in, so skip this step:
@@ -284,8 +285,11 @@ server notes:
* Android: music playback randomly stops due to [battery usage settings](#fix-unreliable-playback-on-android)
* iPhones: the volume control doesn't work because [apple doesn't want it to](https://developer.apple.com/library/archive/documentation/AudioVideo/Conceptual/Using_HTML5_Audio_Video/Device-SpecificConsiderations/Device-SpecificConsiderations.html#//apple_ref/doc/uid/TP40009523-CH5-SW11)
* *future workaround:* enable the equalizer, make it all-zero, and set a negative boost to reduce the volume
* "future" because `AudioContext` can't maintain a stable playback speed in the current iOS version (15.7), maybe one day...
* `AudioContext` will probably never be a viable workaround as apple introduces new issues faster than they fix current ones
* iPhones: the preload feature (in the media-player-options tab) can cause a tiny audio glitch 20sec before the end of each song, but disabling it may cause worse iOS bugs to appear instead
* just a hunch, but disabling preloading may cause playback to stop entirely, or possibly mess with bluetooth speakers
* tried to add a tooltip regarding this but looks like apple broke my tooltips
* Windows: folders cannot be accessed if the name ends with `.`
* python or windows bug
@@ -295,6 +299,7 @@ server notes:
* VirtualBox: sqlite throws `Disk I/O Error` when running in a VM and the up2k database is in a vboxsf
* use `--hist` or the `hist` volflag (`-v [...]:c,hist=/tmp/foo`) to place the db inside the vm instead
* also happens on mergerfs, so put the db elsewhere
* Ubuntu: dragging files from certain folders into firefox or chrome is impossible
* due to snap security policies -- see `snap connections firefox` for the allowlist, `removable-media` permits all of `/mnt` and `/media` apparently
@@ -504,10 +509,16 @@ select which type of archive you want in the `[⚙️] config` tab:
| name | url-suffix | description |
|--|--|--|
| `tar` | `?tar` | plain gnutar, works great with `curl \| tar -xv` |
| `tar.gz` | `?tar=gz` | gzip compressed tar, for `curl \| tar -xvz` |
| `tar.xz` | `?tar=xz` | gnu-tar with xz / lzma compression (good) |
| `tar.bz2` | `?tar=bz2` | bzip2-compressed tar (mostly useless) |
| `zip` | `?zip=utf8` | works everywhere, glitchy filenames on win7 and older |
| `zip_dos` | `?zip` | traditional cp437 (no unicode) to fix glitchy filenames |
| `zip_crc` | `?zip=crc` | cp437 with crc32 computed early for truly ancient software |
* gzip default level is `3` (0=fast, 9=best), change with `?tar=gz:9`
* xz default level is `1` (0=fast, 9=best), change with `?tar=xz:9`
* bz2 default level is `2` (1=fast, 9=best), change with `?tar=bz2:9`
* hidden files (dotfiles) are excluded unless `-ed`
* `up2k.db` and `dir.txt` is always excluded
* `zip_crc` will take longer to download since the server has to read each file twice
@@ -518,6 +529,10 @@ you can also zip a selection of files or folders by clicking them in the browser
![copyparty-zipsel-fs8](https://user-images.githubusercontent.com/241032/129635374-e5136e01-470a-49b1-a762-848e8a4c9cdc.png)
cool trick: download a folder by appending url-params `?tar&opus` to transcode all audio files (except aac|m4a|mp3|ogg|opus|wma) to opus before they're added to the archive
* super useful if you're 5 minutes away from takeoff and realize you don't have any music on your phone but your server only has flac files and downloading those will burn through all your data + there wouldn't be enough time anyways
* and url-params `&j` / `&w` produce jpeg/webm thumbnails/spectrograms instead of the original audio/video/images
## uploading
@@ -700,7 +715,7 @@ open the `[🎺]` media-player-settings tab to configure it,
* `[loop]` keeps looping the folder
* `[next]` plays into the next folder
* transcode:
* `[flac]` convers `flac` and `wav` files into opus
* `[flac]` converts `flac` and `wav` files into opus
* `[aac]` converts `aac` and `m4a` files into opus
* `[oth]` converts all other known formats into opus
* `aac|ac3|aif|aiff|alac|alaw|amr|ape|au|dfpwm|dts|flac|gsm|it|m4a|mo3|mod|mp2|mp3|mpc|mptm|mt2|mulaw|ogg|okt|opus|ra|s3m|tak|tta|ulaw|wav|wma|wv|xm|xpk`
@@ -715,6 +730,8 @@ can also boost the volume in general, or increase/decrease stereo width (like [c
has the convenient side-effect of reducing the pause between songs, so gapless albums play better with the eq enabled (just make it flat)
not available on iPhones / iPads because AudioContext currently breaks background audio playback on iOS (15.7.8)
### fix unreliable playback on android
@@ -1003,6 +1020,9 @@ you can also set transaction limits which apply per-IP and per-volume, but these
* `:c,maxn=250,3600` allows 250 files over 1 hour from each IP (tracked per-volume)
* `:c,maxb=1g,300` allows 1 GiB total over 5 minutes from each IP (tracked per-volume)
notes:
* `vmaxb` and `vmaxn` requires either the `e2ds` volflag or `-e2dsa` global-option
## compress uploads
@@ -1225,6 +1245,7 @@ you can either:
* if copyparty says `incorrect --rp-loc or webserver config; expected vpath starting with [...]` it's likely because the webserver is stripping away the proxy location from the request URLs -- see the `ProxyPass` in the apache example below
some reverse proxies (such as [Caddy](https://caddyserver.com/)) can automatically obtain a valid https/tls certificate for you, and some support HTTP/2 and QUIC which could be a nice speed boost
* **warning:** nginx-QUIC is still experimental and can make uploads much slower, so HTTP/2 is recommended for now
example webserver configs:
@@ -1232,6 +1253,51 @@ example webserver configs:
* [apache2 config](contrib/apache/copyparty.conf) -- location-based
## prometheus
metrics/stats can be enabled at URL `/.cpr/metrics` for grafana / prometheus / etc (openmetrics 1.0.0)
must be enabled with `--stats` since it reduces startup time a tiny bit, and you probably want `-e2dsa` too
the endpoint is only accessible by `admin` accounts, meaning the `a` in `rwmda` in the following example commandline: `python3 -m copyparty -a ed:wark -v /mnt/nas::rwmda,ed --stats -e2dsa`
follow a guide for setting up `node_exporter` except have it read from copyparty instead; example `/etc/prometheus/prometheus.yml` below
```yaml
scrape_configs:
- job_name: copyparty
metrics_path: /.cpr/metrics
basic_auth:
password: wark
static_configs:
- targets: ['192.168.123.1:3923']
```
currently the following metrics are available,
* `cpp_uptime_seconds`
* `cpp_bans` number of banned IPs
and these are available per-volume only:
* `cpp_disk_size_bytes` total HDD size
* `cpp_disk_free_bytes` free HDD space
and these are per-volume and `total`:
* `cpp_vol_bytes` size of all files in volume
* `cpp_vol_files` number of files
* `cpp_dupe_bytes` disk space presumably saved by deduplication
* `cpp_dupe_files` number of dupe files
* `cpp_unf_bytes` currently unfinished / incoming uploads
some of the metrics have additional requirements to function correctly,
* `cpp_vol_*` requires either the `e2ds` volflag or `-e2dsa` global-option
the following options are available to disable some of the metrics:
* `--nos-hdd` disables `cpp_disk_*` which can prevent spinning up HDDs
* `--nos-vol` disables `cpp_vol_*` which reduces server startup time
* `--nos-dup` disables `cpp_dupe_*` which reduces the server load caused by prometheus queries
* `--nos-unf` disables `cpp_unf_*` for no particular purpose
# packages
the party might be closer than you think
@@ -1544,6 +1610,7 @@ some notes on hardening
* set `--rproxy 0` if your copyparty is directly facing the internet (not through a reverse-proxy)
* cors doesn't work right otherwise
* if you allow anonymous uploads or otherwise don't trust the contents of a volume, you can prevent XSS with volflag `nohtml`
* this returns html documents as plaintext, and also disables markdown rendering
safety profiles:
@@ -1557,9 +1624,9 @@ safety profiles:
* `--unpost 0`, `--no-del`, `--no-mv` disables all move/delete support
* `--hardlink` creates hardlinks instead of symlinks when deduplicating uploads, which is less maintenance
* however note if you edit one file it will also affect the other copies
* `--vague-401` returns a "404 not found" instead of "401 unauthorized" which is a common enterprise meme
* `--vague-403` returns a "404 not found" instead of "401 unauthorized" which is a common enterprise meme
* `--ban-404=50,60,1440` ban client for 1440min (24h) if they hit 50 404's in 60min
* **NB:** will ban anyone who enables up2k turbo
* `--turbo=-1` to force-disable turbo-mode in the uploader which could otherwise hit the 404-ban
* `--nih` removes the server hostname from directory listings
* option `-sss` is a shortcut for the above plus:
@@ -1581,10 +1648,12 @@ other misc notes:
behavior that might be unexpected
* users without read-access to a folder can still see the `.prologue.html` / `.epilogue.html` / `README.md` contents, for the purpose of showing a description on how to use the uploader for example
* users can submit `<script>`s which autorun for other visitors in a few ways;
* users can submit `<script>`s which autorun (in a sandbox) for other visitors in a few ways;
* uploading a `README.md` -- avoid with `--no-readme`
* renaming `some.html` to `.epilogue.html` -- avoid with either `--no-logues` or `--no-dot-ren`
* the directory-listing embed is sandboxed (so any malicious scripts can't do any damage) but the markdown editor is not
* the directory-listing embed is sandboxed (so any malicious scripts can't do any damage) but the markdown editor is not 100% safe, see below
* markdown documents can contain html and `<script>`s; attempts are made to prevent scripts from executing (unless `-emp` is specified) but this is not 100% bulletproof, so setting the `nohtml` volflag is still the safest choice
* or eliminate the problem entirely by only giving write-access to trustworthy people :^)
## cors

View File

@@ -37,6 +37,10 @@ def main():
if "://" not in url:
url = "https://" + url
proto = url.split("://")[0].lower()
if proto not in ("http", "https", "ftp", "ftps"):
raise Exception("bad proto {}".format(proto))
os.chdir(inf["ap"])
name = url.split("?")[0].split("/")[-1]

View File

@@ -65,6 +65,10 @@ def main():
if "://" not in url:
url = "https://" + url
proto = url.split("://")[0].lower()
if proto not in ("http", "https", "ftp", "ftps"):
raise Exception("bad proto {}".format(proto))
os.chdir(fdir)
name = url.split("?")[0].split("/")[-1]

View File

@@ -1,8 +1,8 @@
#!/usr/bin/env python3
from __future__ import print_function, unicode_literals
S_VERSION = "1.9"
S_BUILD_DT = "2023-05-07"
S_VERSION = "1.10"
S_BUILD_DT = "2023-08-15"
"""
u2c.py: upload to copyparty
@@ -14,6 +14,7 @@ https://github.com/9001/copyparty/blob/hovudstraum/bin/u2c.py
- if something breaks just try again and it'll autoresume
"""
import re
import os
import sys
import stat
@@ -39,7 +40,7 @@ except:
try:
import requests
except ImportError:
except ImportError as ex:
if EXE:
raise
elif sys.version_info > (2, 7):
@@ -50,7 +51,7 @@ except ImportError:
m = "\n ERROR: need these:\n" + "\n".join(m) + "\n"
m += "\n for f in *.whl; do unzip $f; done; rm -r *.dist-info\n"
print(m.format(sys.executable))
print(m.format(sys.executable), "\nspecifically,", ex)
sys.exit(1)
@@ -411,10 +412,11 @@ def walkdir(err, top, seen):
err.append((ap, str(ex)))
def walkdirs(err, tops):
def walkdirs(err, tops, excl):
"""recursive statdir for a list of tops, yields [top, relpath, stat]"""
sep = "{0}".format(os.sep).encode("ascii")
if not VT100:
excl = excl.replace("/", r"\\")
za = []
for td in tops:
try:
@@ -431,6 +433,8 @@ def walkdirs(err, tops):
za = [x.replace(b"/", b"\\") for x in za]
tops = za
ptn = re.compile(excl.encode("utf-8") or b"\n")
for top in tops:
isdir = os.path.isdir(top)
if top[-1:] == sep:
@@ -443,6 +447,8 @@ def walkdirs(err, tops):
if isdir:
for ap, inf in walkdir(err, top, []):
if ptn.match(ap):
continue
yield stop, ap[len(stop) :].lstrip(sep), inf
else:
d, n = top.rsplit(sep, 1)
@@ -654,7 +660,7 @@ class Ctl(object):
nfiles = 0
nbytes = 0
err = []
for _, _, inf in walkdirs(err, ar.files):
for _, _, inf in walkdirs(err, ar.files, ar.x):
if stat.S_ISDIR(inf.st_mode):
continue
@@ -696,7 +702,7 @@ class Ctl(object):
if ar.te:
req_ses.verify = ar.te
self.filegen = walkdirs([], ar.files)
self.filegen = walkdirs([], ar.files, ar.x)
self.recheck = [] # type: list[File]
if ar.safe:
@@ -1097,6 +1103,7 @@ source file/folder selection uses rsync syntax, meaning that:
ap.add_argument("-v", action="store_true", help="verbose")
ap.add_argument("-a", metavar="PASSWORD", help="password or $filepath")
ap.add_argument("-s", action="store_true", help="file-search (disables upload)")
ap.add_argument("-x", type=unicode, metavar="REGEX", default="", help="skip file if filesystem-abspath matches REGEX, example: '.*/\.hist/.*'")
ap.add_argument("--ok", action="store_true", help="continue even if some local files are inaccessible")
ap.add_argument("--version", action="store_true", help="show version and exit")
@@ -1113,7 +1120,7 @@ source file/folder selection uses rsync syntax, meaning that:
ap.add_argument("-j", type=int, metavar="THREADS", default=4, help="parallel connections")
ap.add_argument("-J", type=int, metavar="THREADS", default=hcores, help="num cpu-cores to use for hashing; set 0 or 1 for single-core hashing")
ap.add_argument("-nh", action="store_true", help="disable hashing while uploading")
ap.add_argument("-ns", action="store_true", help="no status panel (for slow consoles)")
ap.add_argument("-ns", action="store_true", help="no status panel (for slow consoles and macos)")
ap.add_argument("--safe", action="store_true", help="use simple fallback approach")
ap.add_argument("-z", action="store_true", help="ZOOMIN' (skip uploading files if they exist at the destination with the ~same last-modified timestamp, so same as yolo / turbo with date-chk but even faster)")

View File

@@ -34,6 +34,8 @@ server {
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
# NOTE: with cloudflare you want this instead:
#proxy_set_header X-Forwarded-For $http_cf_connecting_ip;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Connection "Keep-Alive";
}

View File

@@ -1,6 +1,6 @@
# Maintainer: icxes <dev.null@need.moe>
pkgname=copyparty
pkgver="1.8.6"
pkgver="1.9.3"
pkgrel=1
pkgdesc="Portable file sharing hub"
arch=("any")
@@ -20,7 +20,7 @@ optdepends=("ffmpeg: thumbnails for videos, images (slower) and audio, music tag
)
source=("https://github.com/9001/${pkgname}/releases/download/v${pkgver}/${pkgname}-${pkgver}.tar.gz")
backup=("etc/${pkgname}.d/init" )
sha256sums=("a37aacc30b9bec375ff6e7815fd763ec555b9bfbd70415aefdd18552c6491faa")
sha256sums=("87db55a57adf14b3b875c72d94b5df67560abc6dbfc104104e0c76d7f02848b6")
build() {
cd "${srcdir}/${pkgname}-${pkgver}"

View File

@@ -1,5 +1,5 @@
{
"url": "https://github.com/9001/copyparty/releases/download/v1.8.6/copyparty-sfx.py",
"version": "1.8.6",
"hash": "sha256-yTcMW4QVf1QH8jfYpn5BdG5LXilcrmakdbTk9NsVTGE="
"url": "https://github.com/9001/copyparty/releases/download/v1.9.3/copyparty-sfx.py",
"version": "1.9.3",
"hash": "sha256-ufT7WARaj6nKaLX/r3X/ex/hMLMh1rtG0lkZHCm4Gu4="
}

View File

@@ -716,6 +716,40 @@ def get_sects():
"""
),
],
[
"zm",
"mDNS debugging",
dedent(
"""
the mDNS protocol is multicast-based, which means there are thousands
of fun and intersesting ways for it to break unexpectedly
things to check if it does not work at all:
* is there a firewall blocking port 5353 on either the server or client?
(for example, clients may be able to send queries to copyparty,
but the replies could get lost)
* is multicast accidentally disabled on either the server or client?
(look for mDNS log messages saying "new client on [...]")
* the router/switch must be multicast and igmp capable
things to check if it works for a while but then it doesn't:
* is there a firewall blocking port 5353 on either the server or client?
(copyparty may be unable to see the queries from the clients, but the
clients may still be able to see the initial unsolicited announce,
so it works for about 2 minutes after startup until TTL expires)
* does the client have multiple IPs on its interface, and some of the
IPs are in subnets which the copyparty server is not a member of?
for both of the above intermittent issues, try --zm-spam 30
(not spec-compliant but nothing will mind)
"""
),
],
]
@@ -781,7 +815,7 @@ def add_upload(ap):
ap2.add_argument("--magic", action="store_true", help="enable filetype detection on nameless uploads (volflag=magic)")
ap2.add_argument("--df", metavar="GiB", type=float, default=0, help="ensure GiB free disk space by rejecting upload requests")
ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="windows-only: minimum size of incoming uploads through up2k before they are made into sparse files")
ap2.add_argument("--turbo", metavar="LVL", type=int, default=0, help="configure turbo-mode in up2k client; [\033[32m0\033[0m] = off and warn if enabled, [\033[32m1\033[0m] = off, [\033[32m2\033[0m] = on, [\033[32m3\033[0m] = on and disable datecheck")
ap2.add_argument("--turbo", metavar="LVL", type=int, default=0, help="configure turbo-mode in up2k client; [\033[32m-1\033[0m] = forbidden/always-off, [\033[32m0\033[0m] = default-off and warn if enabled, [\033[32m1\033[0m] = default-off, [\033[32m2\033[0m] = on, [\033[32m3\033[0m] = on and disable datecheck")
ap2.add_argument("--u2sort", metavar="TXT", type=u, default="s", help="upload order; [\033[32ms\033[0m]=smallest-first, [\033[32mn\033[0m]=alphabetical, [\033[32mfs\033[0m]=force-s, [\033[32mfn\033[0m]=force-n -- alphabetical is a bit slower on fiber/LAN but makes it easier to eyeball if everything went fine")
ap2.add_argument("--write-uplog", action="store_true", help="write POST reports to textfiles in working-directory")
@@ -846,7 +880,7 @@ def add_zeroconf(ap):
def add_zc_mdns(ap):
ap2 = ap.add_argument_group("Zeroconf-mDNS options")
ap2 = ap.add_argument_group("Zeroconf-mDNS options; also see --help-zm")
ap2.add_argument("--zm", action="store_true", help="announce the enabled protocols over mDNS (multicast DNS-SD) -- compatible with KDE, gnome, macOS, ...")
ap2.add_argument("--zm-on", metavar="NETS", type=u, default="", help="enable zeroconf ONLY on the comma-separated list of subnets and/or interface names/indexes")
ap2.add_argument("--zm-off", metavar="NETS", type=u, default="", help="disable zeroconf on the comma-separated list of subnets and/or interface names/indexes")
@@ -860,8 +894,9 @@ def add_zc_mdns(ap):
ap2.add_argument("--zm-lf", metavar="PATH", type=u, default="", help="link a specific folder for ftp shares")
ap2.add_argument("--zm-ls", metavar="PATH", type=u, default="", help="link a specific folder for smb shares")
ap2.add_argument("--zm-mnic", action="store_true", help="merge NICs which share subnets; assume that same subnet means same network")
ap2.add_argument("--zm-msub", action="store_true", help="merge subnets on each NIC -- always enabled for ipv6 -- reduces network load, but gnome-gvfs clients may stop working")
ap2.add_argument("--zm-msub", action="store_true", help="merge subnets on each NIC -- always enabled for ipv6 -- reduces network load, but gnome-gvfs clients may stop working, and clients cannot be in subnets that the server is not")
ap2.add_argument("--zm-noneg", action="store_true", help="disable NSEC replies -- try this if some clients don't see copyparty")
ap2.add_argument("--zm-spam", metavar="SEC", type=float, default=0, help="send unsolicited announce every SEC; useful if clients have IPs in a subnet which doesn't overlap with the server")
def add_zc_ssdp(ap):
@@ -924,7 +959,16 @@ def add_hooks(ap):
ap2.add_argument("--xbd", metavar="CMD", type=u, action="append", help="execute CMD before a file delete")
ap2.add_argument("--xad", metavar="CMD", type=u, action="append", help="execute CMD after a file delete")
ap2.add_argument("--xm", metavar="CMD", type=u, action="append", help="execute CMD on message")
ap2.add_argument("--xban", metavar="CMD", type=u, action="append", help="execute CMD if someone gets banned (pw/404)")
ap2.add_argument("--xban", metavar="CMD", type=u, action="append", help="execute CMD if someone gets banned (pw/404/403/url)")
def add_stats(ap):
ap2 = ap.add_argument_group('grafana/prometheus metrics endpoint')
ap2.add_argument("--stats", action="store_true", help="enable openmetrics at /.cpr/metrics for admin accounts")
ap2.add_argument("--nos-hdd", action="store_true", help="disable disk-space metrics (used/free space)")
ap2.add_argument("--nos-vol", action="store_true", help="disable volume size metrics (num files, total bytes, vmaxb/vmaxn)")
ap2.add_argument("--nos-dup", action="store_true", help="disable dupe-files metrics (good idea; very slow)")
ap2.add_argument("--nos-unf", action="store_true", help="disable unfinished-uploads metrics")
def add_yolo(ap):
@@ -940,17 +984,19 @@ def add_optouts(ap):
ap2.add_argument("--no-dav", action="store_true", help="disable webdav support")
ap2.add_argument("--no-del", action="store_true", help="disable delete operations")
ap2.add_argument("--no-mv", action="store_true", help="disable move/rename operations")
ap2.add_argument("-nth", action="store_true", help="no title hostname; don't show --name in <title>")
ap2.add_argument("-nih", action="store_true", help="no info hostname -- don't show in UI")
ap2.add_argument("-nid", action="store_true", help="no info disk-usage -- don't show in UI")
ap2.add_argument("-nb", action="store_true", help="no powered-by-copyparty branding in UI")
ap2.add_argument("--no-zip", action="store_true", help="disable download as zip/tar")
ap2.add_argument("--no-tarcmp", action="store_true", help="disable download as compressed tar (?tar=gz, ?tar=bz2, ?tar=xz, ?tar=gz:9, ...)")
ap2.add_argument("--no-lifetime", action="store_true", help="disable automatic deletion of uploads after a certain time (as specified by the 'lifetime' volflag)")
def add_safety(ap):
ap2 = ap.add_argument_group('safety options')
ap2.add_argument("-s", action="count", default=0, help="increase safety: Disable thumbnails / potentially dangerous software (ffmpeg/pillow/vips), hide partial uploads, avoid crawlers.\n └─Alias of\033[32m --dotpart --no-thumb --no-mtag-ff --no-robots --force-js")
ap2.add_argument("-ss", action="store_true", help="further increase safety: Prevent js-injection, accidental move/delete, broken symlinks, webdav, 404 on 403, ban on excessive 404s.\n └─Alias of\033[32m -s --unpost=0 --no-del --no-mv --hardlink --vague-403 --ban-404=50,60,1440 -nih")
ap2.add_argument("-ss", action="store_true", help="further increase safety: Prevent js-injection, accidental move/delete, broken symlinks, webdav, 404 on 403, ban on excessive 404s.\n └─Alias of\033[32m -s --unpost=0 --no-del --no-mv --hardlink --vague-403 --ban-404=50,60,1440 --turbo=-1 -nih")
ap2.add_argument("-sss", action="store_true", help="further increase safety: Enable logging to disk, scan for dangerous symlinks.\n └─Alias of\033[32m -ss --no-dav --no-logues --no-readme -lo=cpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz --ls=**,*,ln,p,r")
ap2.add_argument("--ls", metavar="U[,V[,F]]", type=u, help="do a sanity/safety check of all volumes on startup; arguments \033[33mUSER\033[0m,\033[33mVOL\033[0m,\033[33mFLAGS\033[0m; example [\033[32m**,*,ln,p,r\033[0m]")
ap2.add_argument("--xvol", action="store_true", help="never follow symlinks leaving the volume root, unless the link is into another volume where the user has similar access (volflag=xvol)")
@@ -965,6 +1011,11 @@ def add_safety(ap):
ap2.add_argument("--logout", metavar="H", type=float, default="8086", help="logout clients after H hours of inactivity; [\033[32m0.0028\033[0m]=10sec, [\033[32m0.1\033[0m]=6min, [\033[32m24\033[0m]=day, [\033[32m168\033[0m]=week, [\033[32m720\033[0m]=month, [\033[32m8760\033[0m]=year)")
ap2.add_argument("--ban-pw", metavar="N,W,B", type=u, default="9,60,1440", help="more than \033[33mN\033[0m wrong passwords in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; disable with [\033[32mno\033[0m]")
ap2.add_argument("--ban-404", metavar="N,W,B", type=u, default="no", help="hitting more than \033[33mN\033[0m 404's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes (disabled by default since turbo-up2k counts as 404s)")
ap2.add_argument("--ban-403", metavar="N,W,B", type=u, default="9,2,1440", help="hitting more than \033[33mN\033[0m 403's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; [\033[32m1440\033[0m]=day, [\033[32m10080\033[0m]=week, [\033[32m43200\033[0m]=month")
ap2.add_argument("--ban-422", metavar="N,W,B", type=u, default="9,2,1440", help="hitting more than \033[33mN\033[0m 422's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes (422 is server fuzzing, invalid POSTs and so)")
ap2.add_argument("--ban-url", metavar="N,W,B", type=u, default="9,2,1440", help="hitting more than \033[33mN\033[0m sus URL's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes (decent replacement for --ban-404 if that can't be used)")
ap2.add_argument("--sus-urls", metavar="REGEX", type=u, default=r"\.php$|(^|/)wp-(admin|content|includes)/", help="URLs which are considered sus / eligible for banning; disable with blank or [\033[32mno\033[0m]")
ap2.add_argument("--nonsus-urls", metavar="REGEX", type=u, default=r"^(favicon\.ico|robots\.txt)$|^apple-touch-icon|^\.well-known", help="harmless URLs ignored from 404-bans; disable with blank or [\033[32mno\033[0m]")
ap2.add_argument("--aclose", metavar="MIN", type=int, default=10, help="if a client maxes out the server connection limit, downgrade it from connection:keep-alive to connection:close for MIN minutes (and also kill its active connections) -- disable with 0")
ap2.add_argument("--loris", metavar="B", type=int, default=60, help="if a client maxes out the server connection limit without sending headers, ban it for B minutes; disable with [\033[32m0\033[0m]")
ap2.add_argument("--acao", metavar="V[,V]", type=u, default="*", help="Access-Control-Allow-Origin; list of origins (domains/IPs without port) to accept requests from; [\033[32mhttps://1.2.3.4\033[0m]. Default [\033[32m*\033[0m] allows requests from all sites but removes cookies and http-auth; only ?pw=hunter2 survives")
@@ -995,6 +1046,7 @@ def add_logging(ap):
ap2.add_argument("--no-ansi", action="store_true", default=not VT100, help="disable colors; same as environment-variable NO_COLOR")
ap2.add_argument("--ansi", action="store_true", help="force colors; overrides environment-variable NO_COLOR")
ap2.add_argument("--no-voldump", action="store_true", help="do not list volumes and permissions on startup")
ap2.add_argument("--log-tdec", type=int, default=3, help="timestamp resolution / number of timestamp decimals")
ap2.add_argument("--log-conn", action="store_true", help="debug: print tcp-server msgs")
ap2.add_argument("--log-htp", action="store_true", help="debug: print http-server threadpool scaling")
ap2.add_argument("--ihead", metavar="HEADER", type=u, action='append', help="dump incoming header")
@@ -1039,6 +1091,7 @@ def add_thumbnail(ap):
def add_transcoding(ap):
ap2 = ap.add_argument_group('transcoding options')
ap2.add_argument("--no-acode", action="store_true", help="disable audio transcoding")
ap2.add_argument("--no-bacode", action="store_true", help="disable batch audio transcoding by folder download (zip/tar)")
ap2.add_argument("--ac-maxage", metavar="SEC", type=int, default=86400, help="delete cached transcode output after SEC seconds")
@@ -1100,9 +1153,10 @@ def add_ui(ap, retry):
ap2.add_argument("--ih", action="store_true", help="if a folder contains index.html, show that instead of the directory listing by default (can be changed in the client settings UI)")
ap2.add_argument("--textfiles", metavar="CSV", type=u, default="txt,nfo,diz,cue,readme", help="file extensions to present as plaintext")
ap2.add_argument("--txt-max", metavar="KiB", type=int, default=64, help="max size of embedded textfiles on ?doc= (anything bigger will be lazy-loaded by JS)")
ap2.add_argument("--doctitle", metavar="TXT", type=u, default="copyparty", help="title / service-name to show in html documents")
ap2.add_argument("--doctitle", metavar="TXT", type=u, default="copyparty @ --name", help="title / service-name to show in html documents")
ap2.add_argument("--bname", metavar="TXT", type=u, default="--name", help="server name (displayed in filebrowser document title)")
ap2.add_argument("--pb-url", metavar="URL", type=u, default="https://github.com/9001/copyparty", help="powered-by link; disable with -np")
ap2.add_argument("--ver", action="store_true", help="show version on the control panel (incompatible by -np)")
ap2.add_argument("--ver", action="store_true", help="show version on the control panel (incompatible with -nb)")
ap2.add_argument("--md-sbf", metavar="FLAGS", type=u, default="downloads forms popups scripts top-navigation-by-user-activation", help="list of capabilities to ALLOW for README.md docs (volflag=md_sbf); see https://developer.mozilla.org/en-US/docs/Web/HTML/Element/iframe#attr-sandbox")
ap2.add_argument("--lg-sbf", metavar="FLAGS", type=u, default="downloads forms popups scripts top-navigation-by-user-activation", help="list of capabilities to ALLOW for prologue/epilogue docs (volflag=lg_sbf)")
ap2.add_argument("--no-sb-md", action="store_true", help="don't sandbox README.md documents (volflags: no_sb_md | sb_md)")
@@ -1172,6 +1226,7 @@ def run_argparse(
add_yolo(ap)
add_handlers(ap)
add_hooks(ap)
add_stats(ap)
add_ui(ap, retry)
add_admin(ap)
add_logging(ap)

View File

@@ -1,8 +1,8 @@
# coding: utf-8
VERSION = (1, 8, 7)
CODENAME = "argon"
BUILD_DT = (2023, 7, 23)
VERSION = (1, 9, 4)
CODENAME = "prometheable"
BUILD_DT = (2023, 9, 2)
S_VERSION = ".".join(map(str, VERSION))
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)

View File

@@ -52,6 +52,11 @@ if TYPE_CHECKING:
LEELOO_DALLAS = "leeloo_dallas"
SEE_LOG = "see log for details"
SSEELOG = " ({})".format(SEE_LOG)
BAD_CFG = "invalid config; {}".format(SEE_LOG)
SBADCFG = " ({})".format(BAD_CFG)
class AXS(object):
def __init__(
@@ -471,7 +476,8 @@ class VFS(object):
self.log("vfs", "invalid relpath [{}]".format(vpath))
raise Pebkac(404)
vn, rem = self._find(undot(vpath))
cvpath = undot(vpath)
vn, rem = self._find(cvpath)
c: AXS = vn.axs
for req, d, msg in [
@@ -482,6 +488,11 @@ class VFS(object):
(will_get, c.uget, "get"),
]:
if req and (uname not in d and "*" not in d) and uname != LEELOO_DALLAS:
if self.log and err != 999:
ap = vn.canonical(rem)
t = "{} has no {} in [{}] => [{}] => [{}]"
self.log("vfs", t.format(uname, msg, vpath, cvpath, ap), 6)
t = "you don't have {}-access for this location"
raise Pebkac(err, t.format(msg))
@@ -795,7 +806,7 @@ class AuthSrv(object):
if dst in mount:
t = "multiple filesystem-paths mounted at [/{}]:\n [{}]\n [{}]"
self.log(t.format(dst, mount[dst], src), c=1)
raise Exception("invalid config")
raise Exception(BAD_CFG)
if src in mount.values():
t = "filesystem-path [{}] mounted in multiple locations:"
@@ -804,7 +815,7 @@ class AuthSrv(object):
t += "\n /{}".format(v)
self.log(t, c=3)
raise Exception("invalid config")
raise Exception(BAD_CFG)
if not bos.path.isdir(src):
self.log("warning: filesystem-path does not exist: {}".format(src), 3)
@@ -903,7 +914,7 @@ class AuthSrv(object):
t = "volume-specific config (anything from --help-flags)"
self._l(ln, 6, t)
else:
raise Exception("invalid section header")
raise Exception("invalid section header" + SBADCFG)
self.indent = " " if subsection else " "
continue
@@ -926,7 +937,7 @@ class AuthSrv(object):
acct[u] = p
except:
t = 'lines inside the [accounts] section must be "username: password"'
raise Exception(t)
raise Exception(t + SBADCFG)
continue
if vp is not None and ap is None:
@@ -954,7 +965,7 @@ class AuthSrv(object):
continue
except:
err += "accs entries must be 'rwmdgGa: user1, user2, ...'"
raise Exception(err)
raise Exception(err + SBADCFG)
if cat == catf:
err = ""
@@ -967,7 +978,7 @@ class AuthSrv(object):
if bad:
err = "bad characters [{}] in volflag name [{}]; "
err = err.format(bad, sk)
raise Exception(err)
raise Exception(err + SBADCFG)
if sv is True:
fstr += "," + sk
else:
@@ -979,9 +990,9 @@ class AuthSrv(object):
continue
except:
err += "flags entries (volflags) must be one of the following:\n 'flag1, flag2, ...'\n 'key: value'\n 'flag1, flag2, key: value'"
raise Exception(err)
raise Exception(err + SBADCFG)
raise Exception("unprocessable line in config")
raise Exception("unprocessable line in config" + SBADCFG)
self._e()
self.line_ctr = 0
@@ -1218,7 +1229,7 @@ class AuthSrv(object):
+ ", ".join(k for k in sorted(missing_users)),
c=1,
)
raise Exception("invalid config")
raise Exception(BAD_CFG)
if LEELOO_DALLAS in all_users:
raise Exception("sorry, reserved username: " + LEELOO_DALLAS)
@@ -1228,7 +1239,7 @@ class AuthSrv(object):
if pwd in seenpwds:
t = "accounts [{}] and [{}] have the same password; this is not supported"
self.log(t.format(seenpwds[pwd], usr), 1)
raise Exception("invalid config")
raise Exception(BAD_CFG)
seenpwds[pwd] = usr
promote = []
@@ -1612,6 +1623,7 @@ class AuthSrv(object):
vfs.bubble_flags()
have_e2d = False
have_e2t = False
t = "volumes and permissions:\n"
for zv in vfs.all_vols.values():
if not self.warn_anonwrite:
@@ -1635,6 +1647,9 @@ class AuthSrv(object):
if "e2d" in zv.flags:
have_e2d = True
if "e2t" in zv.flags:
have_e2t = True
t += "\n"
if self.warn_anonwrite:
@@ -1646,6 +1661,13 @@ class AuthSrv(object):
if t:
self.log("\n\033[{}\033[0m\n".format(t))
if not have_e2t:
t = "hint: argument -e2ts enables multimedia indexing (artist/title/...)"
self.log(t, 6)
else:
t = "hint: argument -e2dsa enables searching, upload-undo, and better deduplication"
self.log(t, 6)
zv, _ = vfs.get("/", "*", False, False)
zs = zv.realpath.lower()
if zs in ("/", "c:\\") or zs.startswith(r"c:\windows"):
@@ -1653,7 +1675,7 @@ class AuthSrv(object):
self.log(t.format(zv.realpath), c=1)
try:
zv, _ = vfs.get("/", "*", False, True)
zv, _ = vfs.get("/", "*", False, True, err=999)
if self.warn_anonwrite and os.getcwd() == zv.realpath:
t = "anyone can write to the current directory: {}\n"
self.log(t.format(zv.realpath), c=1)
@@ -2015,13 +2037,19 @@ def expand_config_file(ret: list[str], fp: str, ipath: str) -> None:
if os.path.isdir(fp):
names = os.listdir(fp)
ret.append("#\033[36m cfg files in {} => {}\033[0m".format(fp, names))
crumb = "#\033[36m cfg files in {} => {}\033[0m".format(fp, names)
ret.append(crumb)
for fn in sorted(names):
fp2 = os.path.join(fp, fn)
if not fp2.endswith(".conf") or fp2 in ipath:
continue
expand_config_file(ret, fp2, ipath)
if ret[-1] == crumb:
# no config files below; remove breadcrumb
ret.pop()
return
ipath += " -> " + fp

View File

@@ -181,6 +181,10 @@ def _gen_srv(log: "RootLogger", args, netdevs: dict[str, Netdev]):
raise Exception("failed to translate cert: {}, {}".format(rc, se))
bname = os.path.join(args.crt_dir, "srv")
try:
os.unlink(bname + ".key")
except:
pass
os.rename(bname + "-key.pem", bname + ".key")
os.unlink(bname + ".csr")
@@ -216,7 +220,7 @@ def gencert(log: "RootLogger", args, netdevs: dict[str, Netdev]):
HAVE_CFSSL = False
log("cert", "could not create TLS certificates: {}".format(ex), 3)
if getattr(ex, "errno", 0) == errno.ENOENT:
t = "install cfssl if you want to fix this; https://github.com/cloudflare/cfssl/releases/latest"
t = "install cfssl if you want to fix this; https://github.com/cloudflare/cfssl/releases/latest (cfssl, cfssljson, cfssl-certinfo)"
log("cert", t, 6)
ensure_cert(log, args)

View File

@@ -9,12 +9,19 @@ import stat
import sys
import time
from .__init__ import ANYWIN, PY2, TYPE_CHECKING, E
try:
import asynchat
except:
sys.path.append(os.path.join(E.mod, "vend"))
from pyftpdlib.authorizers import AuthenticationFailed, DummyAuthorizer
from pyftpdlib.filesystems import AbstractedFS, FilesystemError
from pyftpdlib.handlers import FTPHandler
from pyftpdlib.ioloop import IOLoop
from pyftpdlib.servers import FTPServer
from .__init__ import ANYWIN, PY2, TYPE_CHECKING, E
from .authsrv import VFS
from .bos import bos
from .util import (
@@ -30,15 +37,6 @@ from .util import (
vjoin,
)
try:
from pyftpdlib.ioloop import IOLoop
except ImportError:
p = os.path.join(E.mod, "vend")
print("loading asynchat from " + p)
sys.path.append(p)
from pyftpdlib.ioloop import IOLoop
if TYPE_CHECKING:
from .svchub import SvcHub

View File

@@ -33,7 +33,7 @@ from .__version__ import S_VERSION
from .authsrv import VFS # typechk
from .bos import bos
from .star import StreamTar
from .sutil import StreamArc # typechk
from .sutil import StreamArc, gfilter
from .szip import StreamZip
from .util import (
HTTPCODE,
@@ -41,8 +41,8 @@ from .util import (
MultipartParser,
Pebkac,
UnrecvEOF,
alltrace,
absreal,
alltrace,
atomic_move,
exclude_dotfiles,
fsenc,
@@ -141,6 +141,7 @@ class HttpCli(object):
self.vn = self.asrv.vfs
self.rem = " "
self.vpath = " "
self.vpaths = " "
self.uname = " "
self.pw = " "
self.rvol = [" "]
@@ -210,7 +211,8 @@ class HttpCli(object):
ka["ts"] = self.conn.hsrv.cachebuster()
ka["lang"] = self.args.lang
ka["favico"] = self.args.favico
ka["svcname"] = self.args.doctitle
ka["s_name"] = self.args.bname
ka["s_doctitle"] = self.args.doctitle
ka["html_head"] = self.html_head
return tpl.render(**ka) # type: ignore
@@ -331,10 +333,12 @@ class HttpCli(object):
# split req into vpath + uparam
uparam = {}
if "?" not in self.req:
self.trailing_slash = self.req.endswith("/")
vpath = undot(self.req)
vpath = unquotep(self.req) # not query, so + means +
self.trailing_slash = vpath.endswith("/")
vpath = undot(vpath)
else:
vpath, arglist = self.req.split("?", 1)
vpath = unquotep(vpath)
self.trailing_slash = vpath.endswith("/")
vpath = undot(vpath)
@@ -349,6 +353,8 @@ class HttpCli(object):
for k in arglist.split("&"):
if "=" in k:
k, zs = k.split("=", 1)
# x-www-form-urlencoded (url query part) uses
# either + or %20 for 0x20 so handle both
uparam[k.lower()] = unquotep(zs.strip().replace("+", " "))
else:
uparam[k.lower()] = ""
@@ -383,7 +389,10 @@ class HttpCli(object):
self.uparam = uparam
self.cookies = cookies
self.vpath = unquotep(vpath) # not query, so + means +
self.vpath = vpath
self.vpaths = (
self.vpath + "/" if self.trailing_slash and self.vpath else self.vpath
)
ok = "\x00" not in self.vpath
if ANYWIN:
@@ -559,8 +568,8 @@ class HttpCli(object):
self.out_headers.update(NO_CACHE)
return
n = "604869" if cache == "i" else cache or "69"
self.out_headers["Cache-Control"] = "max-age=" + n
n = 69 if not cache else 604869 if cache == "i" else int(cache)
self.out_headers["Cache-Control"] = "max-age=" + str(n)
def k304(self) -> bool:
k304 = self.cookies.get("k304")
@@ -620,9 +629,27 @@ class HttpCli(object):
headers: Optional[dict[str, str]] = None,
volsan: bool = False,
) -> bytes:
if status == 404:
g = self.conn.hsrv.g404
if g.lim:
if status > 400 and status in (403, 404, 422):
if status == 404:
g = self.conn.hsrv.g404
elif status == 403:
g = self.conn.hsrv.g403
else:
g = self.conn.hsrv.g422
gurl = self.conn.hsrv.gurl
if (
gurl.lim
and (not g.lim or gurl.lim < g.lim)
and self.args.sus_urls.search(self.vpath)
):
g = self.conn.hsrv.gurl
if g.lim and (
g == self.conn.hsrv.g422
or not self.args.nonsus_urls
or not self.args.nonsus_urls.search(self.vpath)
):
bonk, ip = g.bonk(self.ip, self.vpath)
if bonk:
xban = self.vn.flags.get("xban")
@@ -637,14 +664,19 @@ class HttpCli(object):
0,
self.ip,
time.time(),
"404",
str(status),
):
self.log("client banned: 404s", 1)
self.log("client banned: %ss" % (status,), 1)
self.conn.hsrv.bans[ip] = bonk
if volsan:
vols = list(self.asrv.vfs.all_vols.values())
body = vol_san(vols, body)
try:
zs = absreal(__file__).rsplit(os.path.sep, 2)[0]
body = body.replace(zs.encode("utf-8"), b"PP")
except:
pass
self.send_headers(len(body), status, mime, headers)
@@ -687,6 +719,21 @@ class HttpCli(object):
r = ["%s=%s" % (k, quotep(zs)) if zs else k for k, zs in kv.items()]
return "?" + "&amp;".join(r)
def ourlq(self) -> str:
skip = ("pw", "h", "k")
ret = []
for k, v in self.ouparam.items():
if k in skip:
continue
t = "{}={}".format(quotep(k), quotep(v))
ret.append(t.replace(" ", "+").rstrip("="))
if not ret:
return ""
return "?" + "&".join(ret)
def redirect(
self,
vpath: str,
@@ -802,6 +849,9 @@ class HttpCli(object):
self.reply(b"", 301, headers=h)
return True
if self.vpath == ".cpr/metrics":
return self.conn.hsrv.metrics.tx(self)
path_base = os.path.join(self.E.mod, "web")
static_path = absreal(os.path.join(path_base, self.vpath[5:]))
if static_path in self.conn.hsrv.statics:
@@ -820,14 +870,17 @@ class HttpCli(object):
if not self.can_read and not self.can_write and not self.can_get:
t = "@{} has no access to [{}]"
self.log(t.format(self.uname, self.vpath))
if "on403" in self.vn.flags:
t += " (on403)"
self.log(t.format(self.uname, self.vpath))
ret = self.on40x(self.vn.flags["on403"], self.vn, self.rem)
if ret == "true":
return True
elif ret == "false":
return False
elif ret == "home":
self.uparam["h"] = ""
elif ret == "allow":
self.log("plugin override; access permitted")
self.can_read = self.can_write = self.can_move = True
@@ -837,6 +890,10 @@ class HttpCli(object):
return self.tx_404(True)
else:
if self.vpath:
ptn = self.args.nonsus_urls
if not ptn or not ptn.search(self.vpath):
self.log(t.format(self.uname, self.vpath))
return self.tx_404(True)
self.uparam["h"] = ""
@@ -2025,7 +2082,9 @@ class HttpCli(object):
dst = self.args.SRS
if self.vpath:
dst += quotep(self.vpath)
dst += quotep(self.vpaths)
dst += self.ourlq()
msg = self.get_pwd_cookie(pwd)
html = self.j2s("msg", h1=msg, h2='<a href="' + dst + '">ack</a>', redir=dst)
@@ -2634,7 +2693,7 @@ class HttpCli(object):
#
# if request is for foo.js, check if we have foo.js.{gz,br}
file_ts = 0
file_ts = 0.0
editions: dict[str, tuple[str, int]] = {}
for ext in ["", ".gz", ".br"]:
try:
@@ -2652,7 +2711,7 @@ class HttpCli(object):
else:
sz = st.st_size
file_ts = max(file_ts, int(st.st_mtime))
file_ts = max(file_ts, st.st_mtime)
editions[ext or "plain"] = (fs_path, sz)
except:
pass
@@ -2665,11 +2724,14 @@ class HttpCli(object):
#
# if-modified
file_lastmod, do_send = self._chk_lastmod(file_ts)
file_lastmod, do_send = self._chk_lastmod(int(file_ts))
self.out_headers["Last-Modified"] = file_lastmod
if not do_send:
status = 304
if self.can_write:
self.out_headers["X-Lastmod3"] = str(int(file_ts * 1000))
#
# Accept-Encoding and UA decides which edition to send
@@ -2833,12 +2895,26 @@ class HttpCli(object):
logmsg = "{:4} {} ".format("", self.req)
self.keepalive = False
cancmp = not self.args.no_tarcmp
if fmt == "tar":
mime = "application/x-tar"
packer: Type[StreamArc] = StreamTar
if cancmp and uarg.startswith("gz"):
mime = "application/gzip"
ext = "tar.gz"
elif cancmp and uarg.startswith("bz2"):
mime = "application/x-bzip"
ext = "tar.bz2"
elif cancmp and uarg.startswith("xz"):
mime = "application/x-xz"
ext = "tar.xz"
else:
mime = "application/x-tar"
ext = "tar"
else:
mime = "application/zip"
packer = StreamZip
ext = "zip"
fn = items[0] if items and items[0] else self.vpath
if fn:
@@ -2863,7 +2939,7 @@ class HttpCli(object):
ufn = b"".join(zbl).decode("ascii")
cdis = "attachment; filename=\"{}.{}\"; filename*=UTF-8''{}.{}"
cdis = cdis.format(afn, fmt, ufn, fmt)
cdis = cdis.format(afn, ext, ufn, ext)
self.log(cdis)
self.send_headers(None, mime=mime, headers={"Content-Disposition": cdis})
@@ -2871,7 +2947,23 @@ class HttpCli(object):
vpath, rem, set(items), self.uname, dots, False, not self.args.no_scandir
)
# for f in fgen: print(repr({k: f[k] for k in ["vp", "ap"]}))
bgen = packer(self.log, fgen, utf8="utf" in uarg, pre_crc="crc" in uarg)
cfmt = ""
if self.thumbcli and not self.args.no_bacode:
for zs in ("opus", "w", "j"):
if zs in self.ouparam or uarg == zs:
cfmt = zs
if cfmt:
self.log("transcoding to [{}]".format(cfmt))
fgen = gfilter(fgen, self.thumbcli, self.uname, vpath, cfmt)
bgen = packer(
self.log,
fgen,
utf8="utf" in uarg,
pre_crc="crc" in uarg,
cmp=uarg if cancmp else "",
)
bsent = 0
for buf in bgen.gen():
if not buf:
@@ -2882,6 +2974,7 @@ class HttpCli(object):
bsent += len(buf)
except:
logmsg += " \033[31m" + unicode(bsent) + "\033[0m"
bgen.stop()
break
spd = self._spd(bsent)
@@ -2936,7 +3029,12 @@ class HttpCli(object):
ts_html = st.st_mtime
sz_md = 0
lead = b""
for buf in yieldfile(fs_path):
if not sz_md and b"\n" in buf[:2]:
lead = buf[: buf.find(b"\n") + 1]
sz_md += len(lead)
sz_md += len(buf)
for c, v in [(b"&", 4), (b"<", 3), (b">", 3)]:
sz_md += (len(buf) - len(buf.replace(c, b""))) * v
@@ -2955,7 +3053,6 @@ class HttpCli(object):
targs = {
"r": self.args.SR if self.is_vproxied else "",
"ts": self.conn.hsrv.cachebuster(),
"svcname": self.args.doctitle,
"html_head": self.html_head,
"edit": "edit" in self.uparam,
"title": html_escape(self.vpath, crlf=True),
@@ -2982,7 +3079,7 @@ class HttpCli(object):
return True
try:
self.s.sendall(html[0])
self.s.sendall(html[0] + lead)
for buf in yieldfile(fs_path):
self.s.sendall(html_bescape(buf))
@@ -2998,7 +3095,7 @@ class HttpCli(object):
return True
def tx_svcs(self) -> bool:
aname = re.sub("[^0-9a-zA-Z]+", "", self.args.name) or "a"
aname = re.sub("[^0-9a-zA-Z]+", "", self.args.vname) or "a"
ep = self.host
host = ep.split(":")[0]
hport = ep[ep.find(":") :] if ":" in ep else ""
@@ -3082,7 +3179,7 @@ class HttpCli(object):
html = self.j2s(
"splash",
this=self,
qvpath=quotep(self.vpath),
qvpath=quotep(self.vpaths) + self.ourlq(),
rvol=rvol,
wvol=wvol,
avol=avol,
@@ -3140,7 +3237,8 @@ class HttpCli(object):
t = '<h1 id="n">404 not found &nbsp;┐( ´ -`)┌</h1><p><a id="r" href="{}/?h">go home</a></p>'
t = t.format(self.args.SR)
html = self.j2s("splash", this=self, qvpath=quotep(self.vpath), msg=t)
qv = quotep(self.vpaths) + self.ourlq()
html = self.j2s("splash", this=self, qvpath=qv, msg=t)
self.reply(html.encode("utf-8"), status=rc)
return True
@@ -3212,7 +3310,7 @@ class HttpCli(object):
dst = ""
elif top:
if not dst.startswith(top + "/"):
raise Pebkac(400, "arg funk")
raise Pebkac(422, "arg funk")
dst = dst[len(top) + 1 :]
@@ -3234,8 +3332,9 @@ class HttpCli(object):
sub = self.gen_tree("/".join([top, excl]).strip("/"), target)
ret["k" + quotep(excl)] = sub
vfs = self.asrv.vfs
try:
vn, rem = self.asrv.vfs.get(top, self.uname, True, False)
vn, rem = vfs.get(top, self.uname, True, False)
fsroot, vfs_ls, vfs_virt = vn.ls(
rem,
self.uname,
@@ -3248,7 +3347,7 @@ class HttpCli(object):
for v in self.rvol:
d1, d2 = v.rsplit("/", 1) if "/" in v else ["", v]
if d1 == top:
vfs_virt[d2] = self.asrv.vfs # typechk, value never read
vfs_virt[d2] = vfs # typechk, value never read
dirs = []
@@ -3262,6 +3361,11 @@ class HttpCli(object):
for x in vfs_virt:
if x != excl:
try:
dvn, drem = vfs.get(vjoin(top, x), self.uname, True, False)
bos.stat(dvn.canonical(drem, False))
except:
x += "\n"
dirs.append(x)
ret["a"] = dirs
@@ -3275,8 +3379,7 @@ class HttpCli(object):
if not idx or not hasattr(idx, "p_end"):
raise Pebkac(500, "sqlite3 is not available on the server; cannot unpost")
filt = self.uparam.get("filter")
filt = unquotep(filt or "")
filt = self.uparam.get("filter") or ""
lm = "ups [{}]".format(filt)
self.log(lm)
@@ -3374,9 +3477,6 @@ class HttpCli(object):
if not dst:
raise Pebkac(400, "need dst vpath")
# x-www-form-urlencoded (url query part) uses
# either + or %20 for 0x20 so handle both
dst = unquotep(dst.replace("+", " "))
return self._mv(self.vpath, dst.lstrip("/"))
def _mv(self, vsrc: str, vdst: str) -> bool:
@@ -3688,7 +3788,7 @@ class HttpCli(object):
"url_suf": url_suf,
"logues": logues,
"readme": readme,
"title": html_escape(self.vpath, crlf=True) or "💾🎉",
"title": html_escape("%s %s" % (self.args.bname, self.vpath), crlf=True),
"srv_info": srv_infot,
"dgrid": "grid" in vf,
"unlist": unlist,
@@ -3756,7 +3856,9 @@ class HttpCli(object):
pass
# show dotfiles if permitted and requested
if not self.args.ed or "dots" not in self.uparam:
if not self.args.ed or (
"dots" not in self.uparam and (is_ls or "dots" not in self.cookies)
):
ls_names = exclude_dotfiles(ls_names)
add_fk = vn.flags.get("fk")

View File

@@ -56,6 +56,7 @@ except SyntaxError:
sys.exit(1)
from .httpconn import HttpConn
from .metrics import Metrics
from .u2idx import U2idx
from .util import (
E_SCK,
@@ -99,12 +100,16 @@ class HttpSrv(object):
# redefine in case of multiprocessing
socket.setdefaulttimeout(120)
self.t0 = time.time()
nsuf = "-n{}-i{:x}".format(nid, os.getpid()) if nid else ""
self.magician = Magician()
self.nm = NetMap([], {})
self.ssdp: Optional["SSDPr"] = None
self.gpwd = Garda(self.args.ban_pw)
self.g404 = Garda(self.args.ban_404)
self.g403 = Garda(self.args.ban_403)
self.g422 = Garda(self.args.ban_422, False)
self.gurl = Garda(self.args.ban_url)
self.bans: dict[str, int] = {}
self.aclose: dict[str, int] = {}
@@ -122,6 +127,7 @@ class HttpSrv(object):
self.t_periodic: Optional[threading.Thread] = None
self.u2fh = FHC()
self.metrics = Metrics(self)
self.srvs: list[socket.socket] = []
self.ncli = 0 # exact
self.clients: set[HttpConn] = set() # laggy

View File

@@ -295,7 +295,9 @@ class MDNS(MCast):
while self.running:
timeout = (
0.02 + random.random() * 0.07
if self.probing or self.q or self.defend or self.unsolicited
if self.probing or self.q or self.defend
else max(0.05, self.unsolicited[0] - time.time())
if self.unsolicited
else (last_hop + ihop if ihop else 180)
)
rdy = select.select(self.srv, [], [], timeout)
@@ -513,6 +515,10 @@ class MDNS(MCast):
for srv in self.srv.values():
tx.add(srv)
if not self.unsolicited and self.args.zm_spam:
zf = time.time() + self.args.zm_spam + random.random() * 0.07
self.unsolicited.append(zf)
for srv, deadline in list(self.defend.items()):
if now < deadline:
continue

165
copyparty/metrics.py Normal file
View File

@@ -0,0 +1,165 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import json
import time
from .__init__ import TYPE_CHECKING
from .util import Pebkac, get_df, unhumanize
if TYPE_CHECKING:
from .httpcli import HttpCli
from .httpsrv import HttpSrv
class Metrics(object):
def __init__(self, hsrv: "HttpSrv") -> None:
self.hsrv = hsrv
def tx(self, cli: "HttpCli") -> bool:
if not cli.avol:
raise Pebkac(403, "not allowed for user " + cli.uname)
args = cli.args
if not args.stats:
raise Pebkac(403, "the stats feature is not enabled in server config")
conn = cli.conn
vfs = conn.asrv.vfs
allvols = list(sorted(vfs.all_vols.items()))
idx = conn.get_u2idx()
if not idx or not hasattr(idx, "p_end"):
idx = None
ret: list[str] = []
def addc(k: str, unit: str, v: str, desc: str) -> None:
if unit:
k += "_" + unit
zs = "# TYPE %s counter\n# UNIT %s %s\n# HELP %s %s\n%s_created %s\n%s_total %s"
ret.append(zs % (k, k, unit, k, desc, k, int(self.hsrv.t0), k, v))
else:
zs = "# TYPE %s counter\n# HELP %s %s\n%s_created %s\n%s_total %s"
ret.append(zs % (k, k, desc, k, int(self.hsrv.t0), k, v))
def addh(k: str, typ: str, desc: str) -> None:
zs = "# TYPE %s %s\n# HELP %s %s"
ret.append(zs % (k, typ, k, desc))
def addbh(k: str, desc: str) -> None:
zs = "# TYPE %s gauge\n# UNIT %s bytes\n# HELP %s %s"
ret.append(zs % (k, k, k, desc))
def addv(k: str, v: str) -> None:
ret.append("%s %s" % (k, v))
v = "{:.3f}".format(time.time() - self.hsrv.t0)
addc("cpp_uptime", "seconds", v, "time since last server restart")
v = str(len(conn.bans or []))
addc("cpp_bans", "", v, "number of banned IPs")
if not args.nos_hdd:
addbh("cpp_disk_size_bytes", "total HDD size of volume")
addbh("cpp_disk_free_bytes", "free HDD space in volume")
for vpath, vol in allvols:
free, total = get_df(vol.realpath)
addv('cpp_disk_size_bytes{vol="/%s"}' % (vpath), str(total))
addv('cpp_disk_free_bytes{vol="/%s"}' % (vpath), str(free))
if idx and not args.nos_vol:
addbh("cpp_vol_bytes", "num bytes of data in volume")
addh("cpp_vol_files", "gauge", "num files in volume")
addbh("cpp_vol_free_bytes", "free space (vmaxb) in volume")
addh("cpp_vol_free_files", "gauge", "free space (vmaxn) in volume")
tnbytes = 0
tnfiles = 0
volsizes = []
try:
ptops = [x.realpath for _, x in allvols]
x = self.hsrv.broker.ask("up2k.get_volsizes", ptops)
volsizes = x.get()
except Exception as ex:
cli.log("tx_stats get_volsizes: {!r}".format(ex), 3)
for (vpath, vol), (nbytes, nfiles) in zip(allvols, volsizes):
tnbytes += nbytes
tnfiles += nfiles
addv('cpp_vol_bytes{vol="/%s"}' % (vpath), str(nbytes))
addv('cpp_vol_files{vol="/%s"}' % (vpath), str(nfiles))
if vol.flags.get("vmaxb") or vol.flags.get("vmaxn"):
zi = unhumanize(vol.flags.get("vmaxb") or "0")
if zi:
v = str(zi - nbytes)
addv('cpp_vol_free_bytes{vol="/%s"}' % (vpath), v)
zi = unhumanize(vol.flags.get("vmaxn") or "0")
if zi:
v = str(zi - nfiles)
addv('cpp_vol_free_files{vol="/%s"}' % (vpath), v)
if volsizes:
addv('cpp_vol_bytes{vol="total"}', str(tnbytes))
addv('cpp_vol_files{vol="total"}', str(tnfiles))
if idx and not args.nos_dup:
addbh("cpp_dupe_bytes", "num dupe bytes in volume")
addh("cpp_dupe_files", "gauge", "num dupe files in volume")
tnbytes = 0
tnfiles = 0
for vpath, vol in allvols:
cur = idx.get_cur(vol.realpath)
if not cur:
continue
nbytes = 0
nfiles = 0
q = "select sz, count(*)-1 c from up group by w having c"
for sz, c in cur.execute(q):
nbytes += sz * c
nfiles += c
tnbytes += nbytes
tnfiles += nfiles
addv('cpp_dupe_bytes{vol="/%s"}' % (vpath), str(nbytes))
addv('cpp_dupe_files{vol="/%s"}' % (vpath), str(nfiles))
addv('cpp_dupe_bytes{vol="total"}', str(tnbytes))
addv('cpp_dupe_files{vol="total"}', str(tnfiles))
if not args.nos_unf:
addbh("cpp_unf_bytes", "incoming/unfinished uploads (num bytes)")
addh("cpp_unf_files", "gauge", "incoming/unfinished uploads (num files)")
tnbytes = 0
tnfiles = 0
try:
x = self.hsrv.broker.ask("up2k.get_unfinished")
xs = x.get()
xj = json.loads(xs)
for ptop, (nbytes, nfiles) in xj.items():
tnbytes += nbytes
tnfiles += nfiles
vol = next((x[1] for x in allvols if x[1].realpath == ptop), None)
if not vol:
t = "tx_stats get_unfinished: could not map {}"
cli.log(t.format(ptop), 3)
continue
addv('cpp_unf_bytes{vol="/%s"}' % (vol.vpath), str(nbytes))
addv('cpp_unf_files{vol="/%s"}' % (vol.vpath), str(nfiles))
addv('cpp_unf_bytes{vol="total"}', str(tnbytes))
addv('cpp_unf_files{vol="total"}', str(tnfiles))
except Exception as ex:
cli.log("tx_stats get_unfinished: {!r}".format(ex), 3)
ret.append("# EOF")
mime = "application/openmetrics-text; version=1.0.0; charset=utf-8"
cli.reply("\n".join(ret).encode("utf-8"), mime=mime)
return True

View File

@@ -15,7 +15,7 @@ from ipaddress import (
)
from .__init__ import MACOS, TYPE_CHECKING
from .util import Netdev, find_prefix, min_ex, spack
from .util import Daemon, Netdev, find_prefix, min_ex, spack
if TYPE_CHECKING:
from .svchub import SvcHub
@@ -228,6 +228,7 @@ class MCast(object):
for srv in self.srv.values():
assert srv.ip in self.sips
Daemon(self.hopper, "mc-hop")
return bound
def setup_socket(self, srv: MC_Sck) -> None:
@@ -299,33 +300,57 @@ class MCast(object):
t = "failed to set IPv4 TTL/LOOP; announcements may not survive multiple switches/routers"
self.log(t, 3)
self.hop(srv)
if self.hop(srv, False):
self.log("igmp was already joined?? chilling for a sec", 3)
time.sleep(1.2)
self.hop(srv, True)
self.b4.sort(reverse=True)
self.b6.sort(reverse=True)
def hop(self, srv: MC_Sck) -> None:
def hop(self, srv: MC_Sck, on: bool) -> bool:
"""rejoin to keepalive on routers/switches without igmp-snooping"""
sck = srv.sck
req = srv.mreq
if ":" in srv.ip:
try:
sck.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_LEAVE_GROUP, req)
# linux does leaves/joins twice with 0.2~1.05s spacing
time.sleep(1.2)
except:
pass
sck.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_JOIN_GROUP, req)
if not on:
try:
sck.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_LEAVE_GROUP, req)
return True
except:
return False
else:
sck.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_JOIN_GROUP, req)
else:
try:
sck.setsockopt(socket.IPPROTO_IP, socket.IP_DROP_MEMBERSHIP, req)
time.sleep(1.2)
except:
pass
if not on:
try:
sck.setsockopt(socket.IPPROTO_IP, socket.IP_DROP_MEMBERSHIP, req)
return True
except:
return False
else:
# t = "joining {} from ip {} idx {} with mreq {}"
# self.log(t.format(srv.grp, srv.ip, srv.idx, repr(srv.mreq)), 6)
sck.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, req)
# t = "joining {} from ip {} idx {} with mreq {}"
# self.log(t.format(srv.grp, srv.ip, srv.idx, repr(srv.mreq)), 6)
sck.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, req)
return True
def hopper(self):
while self.args.mc_hop and self.running:
time.sleep(self.args.mc_hop)
if not self.running:
return
for srv in self.srv.values():
self.hop(srv, False)
# linux does leaves/joins twice with 0.2~1.05s spacing
time.sleep(1.2)
if not self.running:
return
for srv in self.srv.values():
self.hop(srv, True)
def map_client(self, cip: str) -> Optional[MC_Sck]:
try:

View File

@@ -81,7 +81,7 @@ class SSDPr(object):
ubase = "{}://{}:{}".format(proto, sip, sport)
zsl = self.args.zsl
url = zsl if "://" in zsl else ubase + "/" + zsl.lstrip("/")
name = "{} @ {}".format(self.args.doctitle, self.args.name)
name = self.args.doctitle
zs = zs.strip().format(c(ubase), c(url), c(name), c(self.args.zsid))
hc.reply(zs.encode("utf-8", "replace"))
return False # close connectino

View File

@@ -44,6 +44,7 @@ class StreamTar(StreamArc):
self,
log: "NamedLogger",
fgen: Generator[dict[str, Any], None, None],
cmp: str = "",
**kwargs: Any
):
super(StreamTar, self).__init__(log, fgen)
@@ -53,14 +54,36 @@ class StreamTar(StreamArc):
self.qfile = QFile()
self.errf: dict[str, Any] = {}
try:
cmp, lv = cmp.replace(":", ",").split(",")
lv = int(lv)
except:
lv = None
# python 3.8 changed to PAX_FORMAT as default,
# waste of space and don't care about the new features
fmt = tarfile.GNU_FORMAT
self.tar = tarfile.open(fileobj=self.qfile, mode="w|", format=fmt) # type: ignore
arg = {"name": None, "fileobj": self.qfile, "mode": "w", "format": fmt}
if cmp == "gz":
fun = tarfile.TarFile.gzopen
arg["compresslevel"] = lv if lv is not None else 3
elif cmp == "bz2":
fun = tarfile.TarFile.bz2open
arg["compresslevel"] = lv if lv is not None else 2
elif cmp == "xz":
fun = tarfile.TarFile.xzopen
arg["preset"] = lv if lv is not None else 1
else:
fun = tarfile.open
arg["mode"] = "w|"
self.tar = fun(**arg)
Daemon(self._gen, "star-gen")
def gen(self) -> Generator[Optional[bytes], None, None]:
buf = b""
try:
while True:
buf = self.qfile.q.get()
@@ -72,6 +95,12 @@ class StreamTar(StreamArc):
yield None
finally:
while buf:
try:
buf = self.qfile.q.get()
except:
pass
if self.errf:
bos.unlink(self.errf["ap"])
@@ -101,6 +130,9 @@ class StreamTar(StreamArc):
errors.append((f["vp"], f["err"]))
continue
if self.stopped:
break
try:
self.ser(f)
except:

View File

@@ -1,10 +1,14 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import os
import tempfile
from datetime import datetime
from .__init__ import CORES
from .bos import bos
from .th_cli import ThumbCli
from .util import vjoin
if True: # pylint: disable=using-constant-test
from typing import Any, Generator, Optional
@@ -21,10 +25,78 @@ class StreamArc(object):
):
self.log = log
self.fgen = fgen
self.stopped = False
def gen(self) -> Generator[Optional[bytes], None, None]:
raise Exception("override me")
def stop(self) -> None:
self.stopped = True
def gfilter(
fgen: Generator[dict[str, Any], None, None],
thumbcli: ThumbCli,
uname: str,
vtop: str,
fmt: str,
) -> Generator[dict[str, Any], None, None]:
from concurrent.futures import ThreadPoolExecutor
pend = []
with ThreadPoolExecutor(max_workers=CORES) as tp:
try:
for f in fgen:
task = tp.submit(enthumb, thumbcli, uname, vtop, f, fmt)
pend.append((task, f))
if pend[0][0].done() or len(pend) > CORES * 4:
task, f = pend.pop(0)
try:
f = task.result(600)
except:
pass
yield f
for task, f in pend:
try:
f = task.result(600)
except:
pass
yield f
except Exception as ex:
thumbcli.log("gfilter flushing ({})".format(ex))
for task, f in pend:
try:
task.result(600)
except:
pass
thumbcli.log("gfilter flushed")
def enthumb(
thumbcli: ThumbCli, uname: str, vtop: str, f: dict[str, Any], fmt: str
) -> dict[str, Any]:
rem = f["vp"]
ext = rem.rsplit(".", 1)[-1].lower()
if fmt == "opus" and ext in "aac|m4a|mp3|ogg|opus|wma".split("|"):
raise Exception()
vp = vjoin(vtop, rem.split("/", 1)[1])
vn, rem = thumbcli.asrv.vfs.get(vp, uname, True, False)
dbv, vrem = vn.get_dbv(rem)
thp = thumbcli.get(dbv, vrem, f["st"].st_mtime, fmt)
if not thp:
raise Exception()
ext = "jpg" if fmt == "j" else "webp" if fmt == "w" else fmt
sz = bos.path.getsize(thp)
st: os.stat_result = f["st"]
ts = st.st_mtime
f["ap"] = thp
f["vp"] = f["vp"].rsplit(".", 1)[0] + "." + ext
f["st"] = os.stat_result((st.st_mode, -1, -1, 1, 1000, 1000, sz, ts, ts, ts))
return f
def errdesc(errors: list[tuple[str, str]]) -> tuple[dict[str, Any], list[str]]:
report = ["copyparty failed to add the following files to the archive:", ""]

View File

@@ -29,7 +29,7 @@ if True: # pylint: disable=using-constant-test
from typing import Any, Optional, Union
from .__init__ import ANYWIN, EXE, MACOS, TYPE_CHECKING, EnvParams, unicode
from .authsrv import AuthSrv
from .authsrv import BAD_CFG, AuthSrv
from .cert import ensure_cert
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE
from .tcpsrv import TcpSrv
@@ -100,11 +100,6 @@ class SvcHub(object):
self.iphash = HMaccas(os.path.join(self.E.cfg, "iphash"), 8)
# for non-http clients (ftp)
self.bans: dict[str, int] = {}
self.gpwd = Garda(self.args.ban_pw)
self.g404 = Garda(self.args.ban_404)
if args.sss or args.s >= 3:
args.ss = True
args.no_dav = True
@@ -121,6 +116,7 @@ class SvcHub(object):
args.hardlink = True
args.vague_403 = True
args.ban_404 = "50,60,1440"
args.turbo = -1
args.nih = True
if args.s:
@@ -131,8 +127,19 @@ class SvcHub(object):
args.force_js = True
if not self._process_config():
raise Exception("bad config")
raise Exception(BAD_CFG)
# for non-http clients (ftp)
self.bans: dict[str, int] = {}
self.gpwd = Garda(self.args.ban_pw)
self.g404 = Garda(self.args.ban_404)
self.g403 = Garda(self.args.ban_403)
self.g422 = Garda(self.args.ban_422)
self.gurl = Garda(self.args.ban_url)
self.log_div = 10 ** (6 - args.log_tdec)
self.log_efmt = "%02d:%02d:%02d.%0{}d".format(args.log_tdec)
self.log_dfmt = "%04d-%04d-%06d.%0{}d".format(args.log_tdec)
self.log = self._log_disabled if args.q else self._log_enabled
if args.lo:
self._setup_logfile(printed)
@@ -162,6 +169,14 @@ class SvcHub(object):
ch = "abcdefghijklmnopqrstuvwx"[int(args.theme / 2)]
args.theme = "{0}{1} {0} {1}".format(ch, bri)
if args.nih:
args.vname = ""
args.doctitle = args.doctitle.replace(" @ --name", "")
else:
args.vname = args.name
args.doctitle = args.doctitle.replace("--name", args.vname)
args.bname = args.bname.replace("--name", args.vname) or args.vname
if args.log_fk:
args.log_fk = re.compile(args.log_fk)
@@ -388,6 +403,18 @@ class SvcHub(object):
if vs and vs.startswith("~"):
setattr(al, k, os.path.expanduser(vs))
for k in "sus_urls nonsus_urls".split(" "):
vs = getattr(al, k)
if not vs or vs == "no":
setattr(al, k, None)
else:
setattr(al, k, re.compile(vs))
if not al.sus_urls:
al.ban_url = "no"
elif al.ban_url == "no":
al.sus_urls = None
return True
def _setlimits(self) -> None:
@@ -673,11 +700,11 @@ class SvcHub(object):
with self.log_mutex:
zd = datetime.utcnow()
ts = "%04d-%04d-%06d.%03d" % (
ts = self.log_dfmt % (
zd.year,
zd.month * 100 + zd.day,
(zd.hour * 100 + zd.minute) * 100 + zd.second,
zd.microsecond // 1000,
zd.microsecond // self.log_div,
)
self.logf.write("@%s [%s\033[0m] %s\n" % (ts, src, msg))
@@ -729,11 +756,11 @@ class SvcHub(object):
msg = "%s%s\033[0m" % (c, msg)
zd = datetime.utcfromtimestamp(now)
ts = "%02d:%02d:%02d.%03d" % (
ts = self.log_efmt % (
zd.hour,
zd.minute,
zd.second,
zd.microsecond // 1000,
zd.microsecond // self.log_div,
)
msg = fmt % (ts, src, msg)
try:

View File

@@ -221,6 +221,7 @@ class StreamZip(StreamArc):
fgen: Generator[dict[str, Any], None, None],
utf8: bool = False,
pre_crc: bool = False,
**kwargs: Any
) -> None:
super(StreamZip, self).__init__(log, fgen)

View File

@@ -108,6 +108,7 @@ class ThumbCli(object):
if st.st_size:
ret = tpath = tp
fmt = ret.rsplit(".")[1]
break
else:
abort = True
except:

View File

@@ -22,7 +22,7 @@ from copy import deepcopy
from queue import Queue
from .__init__ import ANYWIN, PY2, TYPE_CHECKING, WINDOWS
from .authsrv import LEELOO_DALLAS, VFS, AuthSrv
from .authsrv import LEELOO_DALLAS, SSEELOG, VFS, AuthSrv
from .bos import bos
from .cfg import vf_bmap, vf_vmap
from .fsutil import Fstab
@@ -267,11 +267,49 @@ class Up2k(object):
}
return json.dumps(ret, indent=4)
def get_unfinished(self) -> str:
if PY2 or not self.mutex.acquire(timeout=0.5):
return "{}"
ret: dict[str, tuple[int, int]] = {}
try:
for ptop, tab2 in self.registry.items():
nbytes = 0
nfiles = 0
drp = self.droppable.get(ptop, {})
for wark, job in tab2.items():
if wark in drp:
continue
nfiles += 1
try:
# close enough on average
nbytes += len(job["need"]) * job["size"] // len(job["hash"])
except:
pass
ret[ptop] = (nbytes, nfiles)
finally:
self.mutex.release()
return json.dumps(ret, indent=4)
def get_volsize(self, ptop: str) -> tuple[int, int]:
with self.mutex:
return self._get_volsize(ptop)
def get_volsizes(self, ptops: list[str]) -> list[tuple[int, int]]:
ret = []
with self.mutex:
for ptop in ptops:
ret.append(self._get_volsize(ptop))
return ret
def _get_volsize(self, ptop: str) -> tuple[int, int]:
if "e2ds" not in self.flags.get(ptop, {}):
return (0, 0)
cur = self.cur[ptop]
nbytes = self.volsize[cur]
nfiles = self.volnfiles[cur]
@@ -791,9 +829,9 @@ class Up2k(object):
reg = {}
drp = None
path = os.path.join(histpath, "up2k.snap")
if bos.path.exists(path):
with gzip.GzipFile(path, "rb") as f:
snap = os.path.join(histpath, "up2k.snap")
if bos.path.exists(snap):
with gzip.GzipFile(snap, "rb") as f:
j = f.read().decode("utf-8")
reg2 = json.loads(j)
@@ -804,20 +842,20 @@ class Up2k(object):
pass
for k, job in reg2.items():
path = djoin(job["ptop"], job["prel"], job["name"])
if bos.path.exists(path):
fp = djoin(job["ptop"], job["prel"], job["name"])
if bos.path.exists(fp):
reg[k] = job
job["poke"] = time.time()
job["busy"] = {}
else:
self.log("ign deleted file in snap: [{}]".format(path))
self.log("ign deleted file in snap: [{}]".format(fp))
if drp is None:
drp = [k for k, v in reg.items() if not v.get("need", [])]
else:
drp = [x for x in drp if x in reg]
t = "loaded snap {} |{}| ({})".format(path, len(reg.keys()), len(drp or []))
t = "loaded snap {} |{}| ({})".format(snap, len(reg.keys()), len(drp or []))
ta = [t] + self._vis_reg_progress(reg)
self.log("\n".join(ta))
@@ -829,8 +867,11 @@ class Up2k(object):
if not HAVE_SQLITE3 or "e2d" not in flags or "d2d" in flags:
return None
if bos.makedirs(histpath):
hidedir(histpath)
try:
if bos.makedirs(histpath):
hidedir(histpath)
except:
return None
try:
cur = self._open_db(db_path)
@@ -946,7 +987,11 @@ class Up2k(object):
db.c.connection.commit()
if vol.flags.get("vmaxb") or vol.flags.get("vmaxn"):
if (
vol.flags.get("vmaxb")
or vol.flags.get("vmaxn")
or (self.args.stats and not self.args.nos_vol)
):
zs = "select count(sz), sum(sz) from up"
vn, vb = db.c.execute(zs).fetchone()
vb = vb or 0
@@ -955,7 +1000,7 @@ class Up2k(object):
self.volnfiles[db.c] = vn
vmaxb = unhumanize(vol.flags.get("vmaxb") or "0")
vmaxn = unhumanize(vol.flags.get("vmaxn") or "0")
t = "{} / {} ( {} / {} files) in {}".format(
t = "{:>5} / {:>5} ( {:>5} / {:>5} files) in {}".format(
humansize(vb, True),
humansize(vmaxb, True),
humansize(vn, True).rstrip("B"),
@@ -1005,7 +1050,7 @@ class Up2k(object):
if WINDOWS:
rd = rd.replace("\\", "/").strip("/")
g = statdir(self.log_func, not self.args.no_scandir, False, cdir)
g = statdir(self.log_func, not self.args.no_scandir, True, cdir)
gl = sorted(g)
partials = set([x[0] for x in gl if "PARTIAL" in x[0]])
for iname, inf in gl:
@@ -1020,6 +1065,12 @@ class Up2k(object):
continue
lmod = int(inf.st_mtime)
if stat.S_ISLNK(inf.st_mode):
try:
inf = bos.stat(abspath)
except:
continue
sz = inf.st_size
if fat32 and not ffat and inf.st_mtime % 2:
fat32 = False
@@ -1400,9 +1451,11 @@ class Up2k(object):
pf = "v{}, {:.0f}+".format(n_left, b_left / 1024 / 1024)
self.pp.msg = pf + abspath
st = bos.stat(abspath)
# throws on broken symlinks (always did)
stl = bos.lstat(abspath)
st = bos.stat(abspath) if stat.S_ISLNK(stl.st_mode) else stl
mt2 = int(stl.st_mtime)
sz2 = st.st_size
mt2 = int(st.st_mtime)
if nohash or not sz2:
w2 = up2k_wark_from_metadata(self.salt, sz2, mt2, rd, fn)
@@ -1424,6 +1477,13 @@ class Up2k(object):
if w == w2:
continue
# symlink mtime was inconsistent before v1.9.4; check if that's it
if st != stl and (nohash or not sz2):
mt2b = int(st.st_mtime)
w2b = up2k_wark_from_metadata(self.salt, sz2, mt2b, rd, fn)
if w == w2b:
continue
rewark.append((drd, dfn, w2, sz2, mt2))
t = "hash mismatch: {}\n db: {} ({} byte, {})\n fs: {} ({} byte, {})"
@@ -2358,27 +2418,31 @@ class Up2k(object):
cur = jcur
ptop = None # use cj or job as appropriate
if not job and wark in reg:
# ensure the files haven't been deleted manually
rj = reg[wark]
names = [rj[x] for x in ["name", "tnam"] if x in rj]
for fn in names:
path = djoin(rj["ptop"], rj["prel"], fn)
try:
if bos.path.getsize(path) > 0 or not rj["need"]:
# upload completed or both present
break
except:
# missing; restart
if not self.args.nw and not n4g:
t = "forgetting deleted partial upload at {}"
self.log(t.format(path))
del reg[wark]
break
if job or wark in reg:
job = job or reg[wark]
if (
job["ptop"] == cj["ptop"]
and job["prel"] == cj["prel"]
and job["name"] == cj["name"]
job["ptop"] != cj["ptop"]
or job["prel"] != cj["prel"]
or job["name"] != cj["name"]
):
# ensure the files haven't been deleted manually
names = [job[x] for x in ["name", "tnam"] if x in job]
for fn in names:
path = djoin(job["ptop"], job["prel"], fn)
try:
if bos.path.getsize(path) > 0:
# upload completed or both present
break
except:
# missing; restart
if not self.args.nw and not n4g:
job = None
break
else:
# file contents match, but not the path
src = djoin(job["ptop"], job["prel"], job["name"])
dst = djoin(cj["ptop"], cj["prel"], cj["name"])
@@ -2660,7 +2724,7 @@ class Up2k(object):
if not job:
known = " ".join([x for x in self.registry[ptop].keys()])
self.log("unknown wark [{}], known: {}".format(wark, known))
raise Pebkac(400, "unknown wark")
raise Pebkac(400, "unknown wark" + SSEELOG)
if chash not in job["need"]:
msg = "chash = {} , need:\n".format(chash)

View File

@@ -926,7 +926,8 @@ class Magician(object):
class Garda(object):
"""ban clients for repeated offenses"""
def __init__(self, cfg: str) -> None:
def __init__(self, cfg: str, uniq: bool = True) -> None:
self.uniq = uniq
try:
a, b, c = cfg.strip().split(",")
self.lim = int(a)
@@ -972,7 +973,7 @@ class Garda(object):
# assume /64 clients; drop 4 groups
ip = IPv6Address(ip).exploded[:-20]
if prev:
if prev and self.uniq:
if self.prev.get(ip) == prev:
return 0, ip
@@ -1228,12 +1229,15 @@ def ren_open(
except OSError as ex_:
ex = ex_
if ex.errno == errno.EINVAL and not asciified:
# EPERM: android13
if ex.errno in (errno.EINVAL, errno.EPERM) and not asciified:
asciified = True
bname, fname = [
zs.encode("ascii", "replace").decode("ascii").replace("?", "_")
for zs in [bname, fname]
]
zsl = []
for zs in (bname, fname):
zs = zs.encode("ascii", "replace").decode("ascii")
zs = re.sub(r"[^][a-zA-Z0-9(){}.,+=!-]", "_", zs)
zsl.append(zs)
bname, fname = zsl
continue
# ENOTSUP: zfs on ubuntu 20.04
@@ -1444,7 +1448,7 @@ class MultipartParser(object):
for buf in iterable:
ret += buf
if len(ret) > max_len:
raise Pebkac(400, "field length is too long")
raise Pebkac(422, "field length is too long")
return ret

View File

@@ -310,7 +310,7 @@ window.baguetteBox = (function () {
options = {};
setOptions(o);
if (tt.en)
tt.show.bind(this)();
tt.show.call(this);
}
function setVmode() {
@@ -356,7 +356,7 @@ window.baguetteBox = (function () {
setVmode();
if (tt.en)
tt.show.bind(this)();
tt.show.call(this);
}
function findfile() {
@@ -376,7 +376,12 @@ window.baguetteBox = (function () {
else
(vid() || ebi('bbox-overlay')).requestFullscreen();
}
catch (ex) { alert(ex); }
catch (ex) {
if (IPHONE)
alert('sorry, apple decided to make this impossible on iphones (should work on ipad tho)');
else
alert(ex);
}
}
function tglsel() {
@@ -968,7 +973,7 @@ window.baguetteBox = (function () {
clmod(btnPrev, 'off', 't');
clmod(btnNext, 'off', 't');
if (Date.now() - ctime <= 500)
if (Date.now() - ctime <= 500 && !IPHONE)
tglfull();
ctime = Date.now();

View File

@@ -731,14 +731,13 @@ html.y #files thead th {
margin: 0;
padding: .3em .5em;
background: var(--bg);
max-width: var(--file-td-w);
word-wrap: break-word;
overflow: hidden;
}
#files tr:nth-child(2n) td {
background: var(--row-alt);
}
#files td+td+td {
max-width: 30em;
overflow: hidden;
}
#files td+td {
box-shadow: 1px 0 0 0 rgba(128,128,128,var(--f-sh1)) inset, 0 1px 0 rgba(255,255,255,var(--f-sh2)) inset, 0 -1px 0 rgba(255,255,255,var(--f-sh2)) inset;
}
@@ -861,7 +860,7 @@ html.y #path a:hover {
}
.mdo,
.mdo * {
line-height: 1.4em;
line-height: 1.5em;
}
#srv_info,
#srv_info2,
@@ -1621,6 +1620,12 @@ html.cz .tgl.btn.on {
list-style: none;
border-top: 1px solid var(--bg-u5);
}
#tree li.offline>a:first-child:before {
content: '❌';
position: absolute;
margin-left: -.25em;
z-index: 3;
}
#tree ul a.sel {
background: #000;
background: var(--bg-d3);

View File

@@ -29,7 +29,7 @@
<div id="op_player" class="opview opbox opwide"></div>
<div id="op_bup" class="opview opbox act">
<div id="op_bup" class="opview opbox {% if not ls0 %}act{% endif %}">
<div id="u2err"></div>
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
<input type="hidden" name="act" value="bput" />
@@ -39,7 +39,7 @@
<a id="bbsw" href="?b=u" rel="nofollow"><br />switch to basic browser</a>
</div>
<div id="op_mkdir" class="opview opbox act">
<div id="op_mkdir" class="opview opbox {% if not ls0 %}act{% endif %}">
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
<input type="hidden" name="act" value="mkdir" />
📂<input type="text" name="name" class="i" placeholder="awesome mix vol.1">
@@ -55,7 +55,7 @@
</form>
</div>
<div id="op_msg" class="opview opbox act">
<div id="op_msg" class="opview opbox {% if not ls0 %}act{% endif %}">
<form method="post" enctype="application/x-www-form-urlencoded" accept-charset="utf-8" action="{{ url_suf }}">
📟<input type="text" name="msg" class="i" placeholder="lorem ipsum dolor sit amet">
<input type="submit" value="send msg to srv log">
@@ -142,6 +142,7 @@
themes = {{ themes }},
dtheme = "{{ dtheme }}",
srvinf = "{{ srv_info }}",
s_name = "{{ s_name }}",
lang = "{{ lang }}",
dfavico = "{{ favico }}",
def_hcols = {{ def_hcols|tojson }},

View File

@@ -262,7 +262,8 @@ var Ls = {
"mm_e403": "Could not play audio; error 403: Access denied.\n\nTry pressing F5 to reload, maybe you got logged out",
"mm_e5xx": "Could not play audio; server error ",
"mm_nof": "not finding any more audio files nearby",
"mm_pwrsv": "<p>it looks like playback is being interrupted by your phone's power-saving settings!</p>" + '<p>please go to <a target="_blank" href="https://user-images.githubusercontent.com/241032/235262121-2ffc51ae-7821-4310-a322-c3b7a507890c.png">the app settings of your browser</a> and then <a target="_blank" href="https://user-images.githubusercontent.com/241032/235262123-c328cca9-3930-4948-bd18-3949b9fd3fcf.png">allow unrestricted battery usage</a> to fix it.</p><p>(probably a good idea to use a separate browser dedicated for just music streaming...)</p>',
"mm_pwrsv": "<p>it looks like playback is being interrupted by your phone's power-saving settings!</p>" + '<p>please go to <a target="_blank" href="https://user-images.githubusercontent.com/241032/235262121-2ffc51ae-7821-4310-a322-c3b7a507890c.png">the app settings of your browser</a> and then <a target="_blank" href="https://user-images.githubusercontent.com/241032/235262123-c328cca9-3930-4948-bd18-3949b9fd3fcf.png">allow unrestricted battery usage</a> to fix it.</p><p><em>however,</em> it could also be due to the browser\'s autoplay settings;</p><p>Firefox: tap the icon on the left side of the address bar, then select "autoplay" and "allow audio"</p><p>Chrome: the problem will gradually dissipate as you play more music on this site</p>',
"mm_iosblk": "<p>your web browser thinks the audio playback is unwanted, and it decided to block playback until you start another track manually... unfortunately we are both powerless in telling it otherwise</p><p>supposedly this will get better as you continue playing music on this site, but I'm unfamiliar with apple devices so idk if that's true</p><p>you could try another browser, maybe firefox or chrome?</p>",
"mm_hnf": "that song no longer exists",
"im_hnf": "that image no longer exists",
@@ -374,7 +375,9 @@ var Ls = {
"fu_xe1": "failed to load unpost list from server:\n\nerror ",
"fu_xe2": "404: File not found??",
"fz_tar": "plain gnutar file (linux / mac)",
"fz_tar": "plain gnu-tar file (linux / mac)",
"fz_targz": "tar with gzip level 3 compression$N$Nthis is usually very slow, so$Nuse uncompressed tar instead",
"fz_tarxz": "tar with xz level 1 compression$N$Nthis is usually very slow, so$Nuse uncompressed tar instead",
"fz_zip8": "zip with utf8 filenames (maybe wonky on windows 7 and older)",
"fz_zipd": "zip with traditional cp437 filenames, for really old software",
"fz_zipc": "cp437 with crc32 computed early,$Nfor MS-DOS PKZIP v2.04g (october 1993)$N(takes longer to process before download can start)",
@@ -403,6 +406,7 @@ var Ls = {
"u_https3": "for better performance",
"u_ancient": 'your browser is impressively ancient -- maybe you should <a href="#" onclick="goto(\'bup\')">use bup instead</a>',
"u_nowork": "need firefox 53+ or chrome 57+ or iOS 11+",
"u_uri": "to dragdrop images from other browser windows,\nplease drop it onto the big upload button",
"u_enpot": 'switch to <a href="#">potato UI</a> (may improve upload speed)',
"u_depot": 'switch to <a href="#">fancy UI</a> (may reduce upload speed)',
"u_gotpot": 'switching to the potato UI for improved upload speed,\n\nfeel free to disagree and switch back!',
@@ -724,7 +728,8 @@ var Ls = {
"mm_e403": "Avspilling feilet: Tilgang nektet.\n\nKanskje du ble logget ut?\nPrøv å trykk F5 for å laste siden på nytt.",
"mm_e5xx": "Avspilling feilet: ",
"mm_nof": "finner ikke flere sanger i nærheten",
"mm_pwrsv": "<p>det ser ut som musikken ble avbrutt av telefonen sine strømsparings-innstillinger!</p>" + '<p>ta en tur innom <a target="_blank" href="https://user-images.githubusercontent.com/241032/235262121-2ffc51ae-7821-4310-a322-c3b7a507890c.png">app-innstillingene til nettleseren din</a> og så <a target="_blank" href="https://user-images.githubusercontent.com/241032/235262123-c328cca9-3930-4948-bd18-3949b9fd3fcf.png">tillat ubegrenset batteriforbruk</a></p><p>(sikkert smart å ha en egen nettleser kun for musikkspilling...)</p>',
"mm_pwrsv": "<p>det ser ut som musikken ble avbrutt av telefonen sine strømsparings-innstillinger!</p>" + '<p>ta en tur innom <a target="_blank" href="https://user-images.githubusercontent.com/241032/235262121-2ffc51ae-7821-4310-a322-c3b7a507890c.png">app-innstillingene til nettleseren din</a> og så <a target="_blank" href="https://user-images.githubusercontent.com/241032/235262123-c328cca9-3930-4948-bd18-3949b9fd3fcf.png">tillat ubegrenset batteriforbruk</a></p><p>NB: det kan også være pga. autoplay-innstillingene, så prøv dette:</p><p>Firefox: klikk på ikonet i venstre side av addressefeltet, velg "autoplay" og "tillat lyd"</p><p>Chrome: problemet vil minske gradvis jo mer musikk du spiller på denne siden</p>',
"mm_iosblk": "<p>nettleseren din tror at musikken er uønsket, og den bestemte seg for å stoppe avspillingen slik at du manuelt må velge en ny sang... dessverre er både du og jeg maktesløse når den har bestemt seg.</p><p>det ryktes at problemet vil minske jo mer musikk du spiller på denne siden, men jeg er ikke godt kjent med apple-dingser så jeg er ikke sikker.</p><p>kanskje firefox eller chrome fungerer bedre?</p>",
"mm_hnf": "sangen finnes ikke lenger",
"im_hnf": "bildet finnes ikke lenger",
@@ -837,6 +842,8 @@ var Ls = {
"fu_xe2": "404: Filen finnes ikke??",
"fz_tar": "ukomprimert gnu-tar arkiv, for linux og mac",
"fz_targz": "gnu-tar pakket med gzip (nivå 3)$N$NNB: denne er veldig treg;$Nukomprimert tar er bedre",
"fz_tarxz": "gnu-tar pakket med xz (nivå 1)$N$NNB: denne er veldig treg;$Nukomprimert tar er bedre",
"fz_zip8": "zip med filnavn i utf8 (noe problematisk på windows 7 og eldre)",
"fz_zipd": "zip med filnavn i cp437, for høggamle maskiner",
"fz_zipc": "cp437 med tidlig crc32,$Nfor MS-DOS PKZIP v2.04g (oktober 1993)$N(øker behandlingstid på server)",
@@ -865,6 +872,7 @@ var Ls = {
"u_https3": "for høyere hastighet",
"u_ancient": 'nettleseren din er prehistorisk -- mulig du burde <a href="#" onclick="goto(\'bup\')">bruke bup istedenfor</a>',
"u_nowork": "krever firefox 53+, chrome 57+, eller iOS 11+",
"u_uri": "for å laste opp bilder ifra andre nettleservinduer,\nslipp bildet rett på den store last-opp-knappen",
"u_enpot": 'bytt til <a href="#">enkelt UI</a> (gir sannsynlig raskere opplastning)',
"u_depot": 'bytt til <a href="#">snæsent UI</a> (gir sannsynlig tregere opplastning)',
"u_gotpot": 'byttet til et enklere UI for å laste opp raskere,\n\ndu kan gjerne bytte tilbake altså!',
@@ -1136,16 +1144,16 @@ ebi('op_cfg').innerHTML = (
'<div>\n' +
' <h3>' + L.cl_favico + ' <span id="ico1">🎉</span></h3>\n' +
' <div>\n' +
' <input type="text" id="icot" style="width:1.3em" value="" tt="' + L.cft_text + '" />' +
' <input type="text" id="icof" style="width:2em" value="" tt="' + L.cft_fg + '" />' +
' <input type="text" id="icob" style="width:2em" value="" tt="' + L.cft_bg + '" />' +
' <input type="text" id="icot" value="" ' + NOAC + ' style="width:1.3em" tt="' + L.cft_text + '" />' +
' <input type="text" id="icof" value="" ' + NOAC + ' style="width:2em" tt="' + L.cft_fg + '" />' +
' <input type="text" id="icob" value="" ' + NOAC + ' style="width:2em" tt="' + L.cft_bg + '" />' +
' </td>\n' +
' </div>\n' +
'</div>\n' +
'<div>\n' +
' <h3>' + L.cl_bigdir + '</h3>\n' +
' <div>\n' +
' <input type="text" id="bd_lim" value="250" style="width:4em" tt="' + L.cdt_lim + '" />' +
' <input type="text" id="bd_lim" value="250" ' + NOAC + ' style="width:4em" tt="' + L.cdt_lim + '" />' +
' <a id="bd_ask" class="tgl btn" href="#" tt="' + L.cdt_ask + '">ask</a>\n' +
' </td>\n' +
' </div>\n' +
@@ -1297,7 +1305,8 @@ function set_files_html(html) {
}
var ACtx = window.AudioContext || window.webkitAudioContext,
// actx breaks background album playback on ios
var ACtx = !IPHONE && (window.AudioContext || window.webkitAudioContext),
noih = /[?&]v\b/.exec('' + location),
hash0 = location.hash,
mp;
@@ -1333,7 +1342,7 @@ var mpl = (function () {
) : '') +
'<div><h3>' + L.ml_tint + '</h3><div>' +
'<input type="text" id="pb_tint" style="width:2.4em" value="0" tt="' + L.mt_tint + '" />' +
'<input type="text" id="pb_tint" value="0" ' + NOAC + ' style="width:2.4em" tt="' + L.mt_tint + '" />' +
'</div></div>' +
'<div><h3>' + L.ml_eq + '</h3><div id="audio_eq"></div></div>');
@@ -2036,8 +2045,7 @@ var pbar = (function () {
t_redraw = setTimeout(r.drawpos, sm > 50 ? 20 : 50);
};
window.addEventListener('resize', r.onresize);
r.onresize();
onresize100.add(r.onresize, true);
return r;
})();
@@ -2099,8 +2107,7 @@ var vbar = (function () {
clearTimeout(untext);
untext = setTimeout(r.draw, 1000);
};
window.addEventListener('resize', r.onresize);
r.onresize();
onresize100.add(r.onresize, true);
var rect;
function mousedown(e) {
@@ -2125,6 +2132,11 @@ var vbar = (function () {
lastv = Date.now();
mp.setvol(mul);
r.draw();
setTimeout(function () {
if (IPHONE && mp.au && mul < 0.9 && mp.au.volume == 1)
toast.inf(6, 'volume doesnt work because <a href="https://developer.apple.com/library/archive/documentation/AudioVideo/Conceptual/Using_HTML5_Audio_Video/Device-SpecificConsiderations/Device-SpecificConsiderations.html#//apple_ref/doc/uid/TP40009523-CH5-SW11" target="_blank">apple says no</a>');
}, 1);
}
can.onmousedown = function (e) {
if (e.button !== 0)
@@ -2179,6 +2191,7 @@ function song_skip(n, dirskip) {
if (dirskip && ofs + 1 && ofs > mp.order.length - 2) {
toast.inf(10, L.mm_nof);
console.log("mm_nof1");
mpl.traversals = 0;
return;
}
@@ -2205,13 +2218,14 @@ function next_song_cmn(e) {
}
if (mpl.traversals++ < 5) {
if (MOBILE && t_fchg && Date.now() - t_fchg > 30 * 1000)
modal.alert(L.mm_pwrsv);
modal.alert(IPHONE ? L.mm_iosblk : L.mm_pwrsv);
t_fchg = document.hasFocus() ? 0 : Date.now();
treectl.ls_cb = next_song_cmn;
return tree_neigh(1);
}
toast.inf(10, L.mm_nof);
console.log("mm_nof2");
mpl.traversals = 0;
t_fchg = 0;
}
@@ -2361,7 +2375,7 @@ var mpui = (function () {
// cannot check document.hasFocus to avoid false positives;
// it continues on power-on, doesn't need to be in-browser
if (MOBILE && Date.now() - t_fchg > 30 * 1000)
modal.alert(L.mm_pwrsv);
modal.alert(IPHONE ? L.mm_iosblk : L.mm_pwrsv);
t_fchg = 0;
}
@@ -2713,7 +2727,7 @@ var afilt = (function () {
html.push('<td><a href="#" class="eq_step" step="0.5" band="' + b + '">+</a></td>');
h2.push('<td>' + vs[a][1] + '</td>');
h4.push('<td><a href="#" class="eq_step" step="-0.5" band="' + b + '">&ndash;</a></td>');
h3.push('<td><input type="text" class="eq_gain" band="' + b + '" value="' + vs[a][2] + '" /></td>');
h3.push('<td><input type="text" class="eq_gain" ' + NOAC + ' band="' + b + '" value="' + vs[a][2] + '" /></td>');
}
html = html.join('\n') + '</tr><tr>';
html += h2.join('\n') + '</tr><tr>';
@@ -2927,6 +2941,7 @@ function evau_error(e) {
err = e404;
toast.warn(15, esc(basenames(err + mfile)));
console.log(basenames(err + mfile));
if (em.startsWith('MEDIA_ELEMENT_ERROR:')) {
// chromish for 40x
@@ -3038,7 +3053,7 @@ function eval_hash() {
goto('search');
var i = ebi('q_raw');
i.value = uricom_dec(v.slice(3));
return i.oninput();
return i.onkeydown({ 'key': 'Enter' });
}
if (v.indexOf('#v=') === 0) {
@@ -3423,8 +3438,8 @@ var fileman = (function () {
'<a id="rn_case" class="tgl btn" href="#" tt="' + L.fr_case + '</a>',
'</div>',
'<div id="rn_vadv"><table>',
'<tr><td>regex</td><td><input type="text" id="rn_re" tt="regex search pattern to apply to original filenames; capturing groups can be referenced in the format field below like &lt;code&gt;(1)&lt;/code&gt; and &lt;code&gt;(2)&lt;/code&gt; and so on" placeholder="^[0-9]+[\\. ]+(.*) - (.*)" /></td></tr>',
'<tr><td>format</td><td><input type="text" id="rn_fmt" tt="inspired by foobar2000:$N&lt;code&gt;(title)&lt;/code&gt; is replaced by song title,$N&lt;code&gt;[(artist) - ](title)&lt;/code&gt; skips the first part if artist is blank$N&lt;code&gt;$lpad((tn),2,0)&lt;/code&gt; pads tracknumber to 2 digits" placeholder="[(artist) - ](title).(ext)" /></td></tr>',
'<tr><td>regex</td><td><input type="text" id="rn_re" ' + NOAC + ' tt="regex search pattern to apply to original filenames; capturing groups can be referenced in the format field below like &lt;code&gt;(1)&lt;/code&gt; and &lt;code&gt;(2)&lt;/code&gt; and so on" placeholder="^[0-9]+[\\. ]+(.*) - (.*)" /></td></tr>',
'<tr><td>format</td><td><input type="text" id="rn_fmt" ' + NOAC + ' tt="inspired by foobar2000:$N&lt;code&gt;(title)&lt;/code&gt; is replaced by song title,$N&lt;code&gt;[(artist) - ](title)&lt;/code&gt; skips the first part if artist is blank$N&lt;code&gt;$lpad((tn),2,0)&lt;/code&gt; pads tracknumber to 2 digits" placeholder="[(artist) - ](title).(ext)" /></td></tr>',
'<tr><td>preset</td><td><select id="rn_pre"></select>',
'<button id="rn_pdel">❌ ' + L.fr_pdel + '</button>',
'<button id="rn_pnew">💾 ' + L.fr_pnew + '</button>',
@@ -4310,14 +4325,14 @@ var thegrid = (function () {
if (ctrl(e) && !treectl.csel && !r.sel)
return true;
return gclick.bind(this)(e, false);
return gclick.call(this, e, false);
}
function gclick2(e) {
if (ctrl(e) || !r.sel)
return true;
return gclick.bind(this)(e, true);
return gclick.call(this, e, true);
}
function gclick(e, dbl) {
@@ -4332,7 +4347,7 @@ var thegrid = (function () {
tr = td.parentNode;
if ((r.sel && !dbl && !ctrl(e)) || (treectl.csel && (e.shiftKey || ctrl(e)))) {
td.onclick.bind(td)(e);
td.onclick.call(td, e);
if (e.shiftKey)
return r.loadsel();
clmod(this, 'sel', clgot(tr, 'sel'));
@@ -4618,8 +4633,11 @@ function tree_neigh(n) {
if (act >= links.length)
act = 0;
treectl.dir_cb = tree_scrollto;
links[act].click();
if (showfile.active())
links[act].click();
else
treectl.treego.call(links[act]);
links[act].focus();
}
@@ -4671,6 +4689,7 @@ function hkhelp() {
}
var fselgen, fselctr;
document.onkeydown = function (e) {
if (e.altKey || e.isComposing)
return;
@@ -4715,15 +4734,26 @@ document.onkeydown = function (e) {
}
if (aet == 'tr' && ae.closest('#files')) {
var d = '';
var d = '', rem = 0;
if (k == 'ArrowUp') d = 'previous';
if (k == 'ArrowDown') d = 'next';
if (k == 'PageUp') { d = 'previous'; rem = 0.6; }
if (k == 'PageDown') { d = 'next'; rem = 0.6; }
if (d) {
var el = ae[d + 'ElementSibling'];
if (el) {
fselctr = 0;
var gen = fselgen = Date.now();
if (rem)
rem *= window.innerHeight;
function selfun() {
var el = ae[d + 'ElementSibling'];
if (!el || gen != fselgen)
return;
el.focus();
var elh = el.offsetHeight;
if (ctrl(e))
document.documentElement.scrollTop += (d == 'next' ? 1 : -1) * el.offsetHeight;
document.documentElement.scrollTop += (d == 'next' ? 1 : -1) * elh;
if (e.shiftKey) {
clmod(el, 'sel', 't');
@@ -4731,8 +4761,17 @@ document.onkeydown = function (e) {
msel.selui();
}
return ev(e);
rem -= elh;
if (rem > 0) {
ae = document.activeElement;
if (++fselctr % 5 && rem > elh * (FIREFOX ? 5 : 2))
selfun();
else
setTimeout(selfun, 1);
}
}
selfun();
return ev(e);
}
if (k == 'Space') {
clmod(ae, 'sel', 't');
@@ -4913,7 +4952,7 @@ document.onkeydown = function (e) {
for (var a = 0; a < trs.length; a += 2) {
html.push('<table>' + (trs[a].concat(trs[a + 1])).join('\n') + '</table>');
}
html.push('<table id="tq_raw"><tr><td>raw</td><td><input id="q_raw" type="text" name="q" placeholder="( tags like *nhato* or tags like *taishi* ) and ( not tags like *nhato* or not tags like *taishi* )" /></td></tr></table>');
html.push('<table id="tq_raw"><tr><td>raw</td><td><input id="q_raw" type="text" name="q" ' + NOAC + ' placeholder="( tags like *nhato* or tags like *taishi* ) and ( not tags like *nhato* or not tags like *taishi* )" /></td></tr></table>');
ebi('srch_form').innerHTML = html.join('\n');
var o = QSA('#op_search input');
@@ -4933,7 +4972,7 @@ document.onkeydown = function (e) {
search_in_progress = 0;
function ev_search_input() {
var v = this.value,
var v = unsmart(this.value),
id = this.getAttribute('id');
if (id.slice(-1) == 'v') {
@@ -4970,7 +5009,7 @@ document.onkeydown = function (e) {
if (search_in_progress)
return;
var q = ebi('q_raw').value,
var q = unsmart(ebi('q_raw').value),
vq = ebi('files').getAttribute('q_raw');
srch_msg(false, (q == vq) ? '' : L.sm_prev + (vq ? vq : '(*)'));
@@ -4982,7 +5021,7 @@ document.onkeydown = function (e) {
for (var b = 1; b < sconf[a].length; b++) {
var k = sconf[a][b][0],
chk = 'srch_' + k + 'c',
vs = ebi('srch_' + k + 'v').value,
vs = unsmart(ebi('srch_' + k + 'v').value),
tvs = [];
if (a == 1)
@@ -5075,7 +5114,7 @@ document.onkeydown = function (e) {
xhr.setRequestHeader('Content-Type', 'text/plain');
xhr.onload = xhr.onerror = xhr_search_results;
xhr.ts = Date.now();
xhr.q_raw = ebi('q_raw').value;
xhr.q_raw = unsmart(ebi('q_raw').value);
xhr.send(JSON.stringify({ "q": xhr.q_raw, "n": cap }));
}
@@ -5181,6 +5220,7 @@ document.onkeydown = function (e) {
}
})();
function aligngriditems() {
if (!treectl)
return;
@@ -5203,7 +5243,25 @@ function aligngriditems() {
ebi('ggrid').style.justifyContent = treectl.hidden ? 'center' : 'space-between';
}
}
window.addEventListener('resize', aligngriditems);
onresize100.add(aligngriditems);
var filecolwidth = (function () {
var lastwidth = -1;
return function () {
var vw = window.innerWidth / parseFloat(getComputedStyle(document.body)['font-size']),
w = Math.floor(vw - 2);
if (w == lastwidth)
return;
lastwidth = w;
document.documentElement.style.setProperty('--file-td-w', w + 'em');
}
})();
onresize100.add(filecolwidth, true);
var treectl = (function () {
var r = {
@@ -5225,6 +5283,9 @@ var treectl = (function () {
bcfg_bind(r, 'csel', 'csel', false);
bcfg_bind(r, 'dots', 'dotfiles', false, function (v) {
r.goto(get_evpath());
var xhr = new XHR();
xhr.open('GET', SR + '/?setck=dots=' + (v ? 'y' : ''), true);
xhr.send();
});
bcfg_bind(r, 'dir1st', 'dir1st', true, function (v) {
treectl.gentab(get_evpath(), treectl.lsc);
@@ -5550,7 +5611,7 @@ var treectl = (function () {
}
links[a].className = cl;
links[a].onclick = treego;
links[a].onclick = r.treego;
links[a].onmouseenter = nowrap ? menter : null;
links[a].onmouseleave = nowrap ? mleave : null;
}
@@ -5612,7 +5673,7 @@ var treectl = (function () {
return els[a].click();
}
function treego(e) {
r.treego = function (e) {
if (ctrl(e))
return true;
@@ -5749,9 +5810,15 @@ var treectl = (function () {
var nodes = res.dirs.concat(res.files),
html = mk_files_header(res.taglist),
sel = r.lsc === res ? msel.getsel() : [],
ae = document.activeElement,
cid = null,
plain = [],
seen = {};
if (ae && /^tr$/i.exec(ae.nodeName))
if (ae = ae.querySelector('a[id]'))
cid = ae.getAttribute('id');
r.lsc = res;
if (res.unlist) {
var ptn = new RegExp(res.unlist);
@@ -5848,6 +5915,12 @@ var treectl = (function () {
}
if (sel.length)
msel.loadsel(sel);
else
msel.origin_id(null);
if (cid) try {
ebi(cid).closest('tr').focus();
} catch (ex) { }
setTimeout(eval_hash, 1);
}
@@ -5965,7 +6038,8 @@ var treectl = (function () {
for (var a = 0; a < keys.length; a++) {
var kk = keys[a],
ks = kk.slice(1),
k = uricom_sdec(ks),
ded = ks.endsWith('\n'),
k = uricom_sdec(ded ? ks.replace(/\n$/, '') : ks),
hek = esc(k[0]),
uek = k[1] ? uricom_enc(k[0], true) : k[0],
url = '/' + (top ? top + uek : uek) + '/',
@@ -5978,7 +6052,7 @@ var treectl = (function () {
ret += '<li>' + link + '\n<ul>\n' + subtree + '</ul></li>\n';
}
else {
ret += '<li>' + link + '</li>\n';
ret += (ded ? '<li class="offline">' : '<li>') + link + '</li>\n';
}
}
return ret;
@@ -6602,6 +6676,8 @@ var arcfmt = (function () {
var html = [],
fmts = [
["tar", "tar", L.fz_tar],
["tgz", "tar=gz", L.fz_targz],
["txz", "tar=xz", L.fz_tarxz],
["zip", "zip=utf8", L.fz_zip8],
["zip_dos", "zip", L.fz_zipd],
["zip_crc", "zip=crc", L.fz_zipc]
@@ -6631,7 +6707,7 @@ var arcfmt = (function () {
for (var a = 0, aa = tds.length; a < aa; a++) {
var o = tds[a], txt = o.textContent, href = o.getAttribute('href');
if (txt != 'tar' && txt != 'zip')
if (!/^(zip|tar|tgz|txz)$/.exec(txt))
continue;
var ofs = href.lastIndexOf('?');
@@ -6718,7 +6794,9 @@ var msel = (function () {
};
r.loadsel = function (sel) {
r.so = r.pr = null;
if (!sel || !r.so || !ebi(r.so))
r.so = r.pr = null;
r.sel = [];
r.load();
@@ -7058,7 +7136,12 @@ function show_md(md, name, div, url, depth) {
try {
clmod(div, 'mdo', 1);
if (sandbox(div, sb_md, 'mdo', marked.parse(md, marked_opts)))
var md_html = marked.parse(md, marked_opts);
if (!have_emp)
md_html = DOMPurify.sanitize(md_html);
if (sandbox(div, sb_md, 'mdo', md_html))
return;
ext = md_plug.post;
@@ -7152,7 +7235,7 @@ function sandbox(tgt, rules, cls, html) {
'function say(m){window.parent.postMessage(m,"*")};' +
'setTimeout(function(){var its=0,pih=-1,f=function(){' +
'var ih=2+Math.min(parseInt(getComputedStyle(d).height),d.scrollHeight);' +
'if(ih!=pih){pih=ih;say("iheight #' + tid + ' "+ih,"*")}' +
'if(ih!=pih&&!isNaN(ih)){pih=ih;say("iheight #' + tid + ' "+ih,"*")}' +
'if(++its<20)return setTimeout(f,20);if(its==20)setInterval(f,200)' +
'};f();' +
'window.onfocus=function(){say("igot #' + tid + '")};' +
@@ -7408,8 +7491,16 @@ function goto_unpost(e) {
}
function wintitle(txt) {
document.title = (txt ? txt : '') + get_vpath().slice(1, -1).split('/').pop();
function wintitle(txt, noname) {
if (txt === undefined)
txt = '';
if (s_name && !noname)
txt = s_name + ' ' + txt;
txt += get_vpath().slice(1, -1).split('/').pop();
document.title = txt;
}

View File

@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<title>{{ svcname }}</title>
<title>{{ s_doctitle }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.8">
</head>

View File

@@ -212,6 +212,8 @@ function convert_markdown(md_text, dest_dom) {
try {
var md_html = marked.parse(md_text, marked_opts);
if (!have_emp)
md_html = DOMPurify.sanitize(md_html);
}
catch (ex) {
if (ext)

View File

@@ -278,6 +278,7 @@ function Modpoll() {
return;
var new_md = this.responseText,
new_mt = this.getResponseHeader('X-Lastmod3') || r.lastmod,
server_ref = server_md.replace(/\r/g, ''),
server_now = new_md.replace(/\r/g, '');
@@ -285,6 +286,7 @@ function Modpoll() {
if (r.initial && server_ref != server_now)
return modal.confirm('Your browser decided to show an outdated copy of the document!\n\nDo you want to load the latest version from the server instead?', function () {
dom_src.value = server_md = new_md;
last_modified = new_mt;
draw_md();
}, null);

View File

@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<title>{{ svcname }}</title>
<title>{{ s_doctitle }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.8">
<meta name="theme-color" content="#333">

View File

@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<title>{{ svcname }}</title>
<title>{{ s_doctitle }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.8">
<meta name="theme-color" content="#333">

View File

@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<title>{{ args.doctitle }} @ {{ args.name }}</title>
<title>{{ s_doctitle }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.8">
<meta name="theme-color" content="#333">

View File

@@ -4,6 +4,8 @@
src: local('Source Code Pro Regular'), local('SourceCodePro-Regular'), url(deps/scp.woff2) format('woff2');
}
html {
text-size-adjust: 100%;
-webkit-text-size-adjust: 100%;
touch-action: manipulation;
}
#tt, #toast {

View File

@@ -723,7 +723,7 @@ function Donut(uc, st) {
function strobe() {
var txt = strobes.pop();
wintitle(txt);
wintitle(txt, false);
if (!txt)
clearInterval(tstrober);
}
@@ -971,7 +971,7 @@ function up2k_init(subtle) {
if (++nenters <= 0)
nenters = 1;
if (onover.bind(this)(e))
if (onover.call(this, e))
return true;
var mup, up = QS('#up_zd');
@@ -995,16 +995,29 @@ function up2k_init(subtle) {
function onoverb(e) {
// zones are alive; disable cuo2duo branch
document.body.ondragover = document.body.ondrop = null;
return onover.bind(this)(e);
return onover.call(this, e);
}
function onover(e) {
return onovercmn(this, e, false);
}
function onoverbtn(e) {
return onovercmn(this, e, true);
}
function onovercmn(self, e, btn) {
try {
var ok = false, dt = e.dataTransfer.types;
for (var a = 0; a < dt.length; a++)
if (dt[a] == 'Files')
ok = true;
else if (dt[a] == 'text/uri-list')
return true;
else if (dt[a] == 'text/uri-list') {
if (btn) {
ok = true;
if (toast.txt == L.u_uri)
toast.hide();
}
else
return toast.inf(10, L.u_uri) || true;
}
if (!ok)
return true;
@@ -1020,8 +1033,11 @@ function up2k_init(subtle) {
document.body.ondragenter = document.body.ondragleave = document.body.ondragover = null;
return modal.alert('your browser does not support drag-and-drop uploading');
}
if (btn)
return;
clmod(ebi('drops'), 'vis', 1);
var v = this.getAttribute('v');
var v = self.getAttribute('v');
if (v)
clmod(ebi(v), 'hl', 1);
}
@@ -1045,6 +1061,8 @@ function up2k_init(subtle) {
document.body.ondragleave = offdrag;
document.body.ondragover = onover;
document.body.ondrop = gotfile;
ebi('u2btn').ondrop = gotfile;
ebi('u2btn').ondragover = onoverbtn;
var drops = [ebi('up_dz'), ebi('srch_dz')];
for (var a = 0; a < 2; a++) {
@@ -1088,7 +1106,7 @@ function up2k_init(subtle) {
function gotfile(e) {
ev(e);
nenters = 0;
offdrag.bind(this)();
offdrag.call(this);
var dz = this && this.getAttribute('id');
if (!dz && e && e.clientY)
// cuo2duo fallback
@@ -1132,7 +1150,7 @@ function up2k_init(subtle) {
dst = good_files;
if (is_itemlist) {
if (fobj.kind !== 'file')
if (fobj.kind !== 'file' && fobj.type !== 'text/uri-list')
continue;
try {
@@ -1144,6 +1162,8 @@ function up2k_init(subtle) {
}
catch (ex) { }
fobj = fobj.getAsFile();
if (!fobj)
continue;
}
try {
if (fobj.size < 1)
@@ -2610,8 +2630,7 @@ function up2k_init(subtle) {
}
}
}
window.addEventListener('resize', onresize);
onresize();
onresize100.add(onresize, true);
if (MOBILE) {
// android-chrome wobbles for a bit; firefox / iOS-safari are OK
@@ -2679,6 +2698,11 @@ function up2k_init(subtle) {
}
function draw_turbo() {
if (turbolvl < 0 && uc.turbo) {
bcfg_set('u2turbo', uc.turbo = false);
toast.err(10, "turbo is disabled in server config");
}
var msg = (turbolvl || !uc.turbo) ? null : uc.fsearch ? L.u_ts : L.u_tu,
html = ebi('u2foot').innerHTML;

View File

@@ -7,6 +7,7 @@ if (!window.console || !console.log)
var wah = '',
NOAC = 'autocorrect="off" autocapitalize="off"',
L, tt, treectl, thegrid, up2k, asmCrypto, hashwasm, vbar, marked,
CB = '?_=' + Date.now(),
R = SR.slice(1),
@@ -368,6 +369,15 @@ function import_js(url, cb) {
}
function unsmart(txt) {
return !IPHONE ? txt : (txt.
replace(/[\u2014]/g, "--").
replace(/[\u2022]/g, "*").
replace(/[\u2018\u2019]/g, "'").
replace(/[\u201c\u201d]/g, '"'));
}
var crctab = (function () {
var c, tab = [];
for (var n = 0; n < 256; n++) {
@@ -1051,10 +1061,73 @@ function cliptxt(txt, ok) {
}
function Debounce(delay) {
var r = this;
r.delay = delay;
r.timer = 0;
r.t_hit = 0;
r.t_run = 0;
r.q = [];
r.add = function (fun, run) {
r.rm(fun);
r.q.push(fun);
if (run)
fun();
};
r.rm = function (fun) {
apop(r.q, fun);
};
r.run = function () {
if (crashed)
return;
r.t_run = Date.now();
var q = r.q.slice(0);
for (var a = 0; a < q.length; a++)
q[a]();
};
r.hit = function () {
if (crashed)
return;
var now = Date.now(),
td_hit = now - r.t_hit,
td_run = now - r.t_run;
if (td_run >= r.delay * 2)
r.t_run = now;
if (td_run >= r.delay && td_run <= r.delay * 2) {
// r.delay is also deadline
clearTimeout(r.timer);
return r.run();
}
if (td_hit < r.delay / 5)
return;
clearTimeout(r.timer);
r.timer = setTimeout(r.run, r.delay);
r.t_hit = now;
};
};
var onresize100 = new Debounce(100);
window.addEventListener('resize', onresize100.hit);
var timer = (function () {
var r = {};
r.q = [];
r.last = 0;
r.fs = 0;
r.fc = 0;
r.add = function (fun, run) {
r.rm(fun);
@@ -1080,6 +1153,7 @@ var timer = (function () {
q[a]();
r.last = Date.now();
//r.fc++; if (r.last - r.fs >= 2000) { console.log(r.last - r.fs, r.fc); r.fs = r.last; r.fc = 0; }
}
setInterval(doevents, 100);
@@ -1104,7 +1178,7 @@ var tt = (function () {
var prev = null;
r.cshow = function () {
if (this !== prev)
r.show.bind(this)();
r.show.call(this);
prev = this;
};
@@ -1116,7 +1190,7 @@ var tt = (function () {
return;
if (Date.now() - r.lvis < 400)
return r.show.bind(this)();
return r.show.call(this);
tev = setTimeout(r.show.bind(this), 800);
if (TOUCH)
@@ -1274,6 +1348,9 @@ var toast = (function () {
r.visible = false;
r.txt = null;
r.tag = obj; // filler value (null is scary)
r.p_txt = '';
r.p_sec = 0;
r.p_t = 0;
function scrollchk() {
if (scrolling)
@@ -1306,10 +1383,23 @@ var toast = (function () {
};
r.show = function (cl, sec, txt, tag) {
var same = r.visible && txt == r.p_txt && r.p_sec == sec,
delta = Date.now() - r.p_t;
if (same && delta < 100)
return;
r.p_txt = txt;
r.p_sec = sec;
r.p_t = Date.now();
clearTimeout(te);
if (sec)
te = setTimeout(r.hide, sec * 1000);
if (same && delta < 1000)
return;
if (txt.indexOf('<body>') + 1)
txt = txt.slice(0, txt.indexOf('<')) + ' [...]';
@@ -1488,7 +1578,7 @@ var modal = (function () {
cb_ok = cok;
cb_ng = cng === undefined ? cok : null;
cb_up = fun;
html += '<input id="modali" type="text" /><div id="modalb">' + ok_cancel + '</div>';
html += '<input id="modali" type="text" ' + NOAC + ' /><div id="modalb">' + ok_cancel + '</div>';
r.show(html);
ebi('modali').value = v || '';
@@ -1520,7 +1610,7 @@ function repl_load() {
ret = [
'var v=Object.keys(localStorage); v.sort(); JSON.stringify(v)',
"for (var a of QSA('#files a[id]')) a.setAttribute('download','')",
'console.hist.slice(-10).join("\\n")'
'console.hist.slice(-50).join("\\n")'
];
ipre.innerHTML = '<option value=""></option>';
@@ -1576,6 +1666,8 @@ function repl(e) {
if (!cmd)
return toast.inf(3, 'eval aborted');
cmd = unsmart(cmd);
if (cmd.startsWith(',')) {
evalex_fatal = true;
return modal.alert(esc(eval(cmd.slice(1)) + ''));

View File

@@ -1,3 +1,140 @@
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
# 2023-0831-2211 `v1.9.3` iOS and http fixes
## new features
* iPhones and iPads are now able to...
* 9986136dfb2364edb35aa9fbb87410641c6d6af3 play entire albums while the screen is off without the music randomly stopping
* apple keeps breaking AudioContext in new and interesting ways; time to give up (no more equalizer)
* 1c0d978979a703edeb792e552b18d3b7695b2d90 perform search queries and execude js code
* by translating [smart-quotes](https://stackoverflow.com/questions/48678359/ios-11-safari-html-disable-smart-punctuation) into regular `'` and `"` characters
* python 3.12 support
* technically a bugfix since it was added [a year ago](https://github.com/9001/copyparty/commit/32e22dfe84d5e0b13914b4d0e15c1b8c9725a76d) way before the first py3.12 alpha was released but turns out i botched it, oh well
* filter error messages so they never include the filesystem path where copyparty's python files reside
* print more context in server logs if someone hits an unexpected permission-denied
# bugfixes
found some iffy stuff combing over the code but, as far as I can tell, luckily none of these were dangerous:
* URL normalization was a bit funky, but it appears everything access-control-related was unaffected
* some url parameters were double-decoded, causing the unpost filtering and file renaming to fail if the values contained `%`
* clients could cause the server to return an invalid cache-control header, but newlines and control-characters got rejected correctly
* minor cosmetics / qol fixes:
* reduced flickering on page load in chrome
* fixed some console spam in search results
* markdown documents now have the same line-height in directory listings and the editor
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
# 2023-0826-2116 `v1.9.2` bigger hammer
## new features
* more ways to automatically ban users! three new sensors, all default-enabled, giving a 1 day ban after 9 hits in 2 minutes:
* `--ban-403`: trying to access volumes that dont exist or require authentication
* `--ban-422`: invalid POST messages (from brutefocing POST parameters and such)
* `--ban-url`: URLs which 404 and also match `--sus-urls` (scanners/crawlers)
* if you want to run a vulnerability scan on copyparty, please just [download the server](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) and do it locally! takes less than 30 seconds to set up, you get lower latency, and you won't be filling up the logfiles on the demo server with junk, thank you 🙏
* more ban-related stuff,
* new global option `--nonsus-urls` specifies regex of URLs which are OK to 404 and shouldn't ban people
* `--turbo` now accepts the value `-1` which makes it impossible for clients to enable it, making `--ban-404` safe to use
* range-selecting files in the list-view by shift-pgup/pgdn
* volumes which are currently unavailable (dead nfs share, external HDD which is off, ...) are marked with a ❌ in the directory tree sidebar
* the toggle-button to see dotfiles is now persisted as a cookie so it also applies on the initial page load
* more effort is made to prevent `<script>`s inside markdown documents from running in the markdown editor and the fullpage viewer
* anyone who wanted to use markdown files for malicious stuff can still just upload an html file instead, so this doesn't make anything more secure, just less confusing
* the safest approach is still the `nohtml` volflag which disables markdown rendering outside sandboxes entirely, or only giving out write-access to trustworthy people
* enabling markdown plugins with `-emp` now has the side-effect of cancelling this band-aid too
## bugfixes
* textfile navigation hotkeys broke in the previous version
## other changes
* example [nginx config](https://github.com/9001/copyparty/blob/hovudstraum/contrib/nginx/copyparty.conf) was not compatible with cloudflare (suggest `$http_cf_connecting_ip` instead of `$proxy_add_x_forwarded_for`)
* `copyparty.exe` is now built with python 3.11.5 which fixes [CVE-2023-40217](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2023-40217)
* `copyparty32.exe` is not, because python understandably ended win7 support
* [similar software](https://github.com/9001/copyparty/blob/hovudstraum/docs/versus.md):
* copyparty appears to be 30x faster than nextcloud and seafile at receiving uploads of many small files
* seafile has a size limit when zip-downloading folders
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
# 2023-0820-2338 `v1.9.1` prometheable
## new features
* #49 prometheus / grafana / openmetrics integration ([see readme](https://github.com/9001/copyparty#prometheus))
* read metrics from http://127.0.0.1:3923/.cpr/metrics after enabling with `--stats`
* download a folder with all music transcoded to opus by adding `?tar=opus` or `?zip&opus` to the URL
* can also be used to download thumbnails instead of full images; `?tar=w` for webp, `?tar=j` for jpg
* so i guess the long-time requested feature of pre-generating thumbnails kind of happened after all, if you schedule a `curl http://127.0.0.1:3923/?tar=w >/dev/null` after server startup
* u2c (commandline uploader): argument `-x` to exclude files by regex (compares absolute filesystem paths)
* `--zm-spam 30` can be used to improve zeroconf / mDNS reliability on crazy networks
* only necessary if there are clients with multiple IPs and some of the IPs are outside the subnets that copyparty are in -- not spec-compliant, not really recommended, but shouldn't cause any issues either
* and `--mc-hop` wasn't actually implemented until now
* dragging an image from another browser window onto the upload button is now possible
* only works on chrome, and only on windows or linux (not macos)
* server hostname is prefixed in all window titles
* can be adjusted with `--bname` (the file explorer) and `--doctitle` (all other documents)
* can be disabled with `--nth` (just window title) or `--nih` (title + header)
## bugfixes
* docker: the autogenerated seeds for filekeys and account passwords now get persisted to the config volume (thx noktuas)
* uploading files with fancy filenames could fail if the copyparty server is running on android
* improve workarounds for some apple/iphone/ios jank (thx noktuas and spiky)
* some ui elements had their font-size selected by fair dice roll
* the volume control does nothing because [apple disabled it](https://developer.apple.com/library/archive/documentation/AudioVideo/Conceptual/Using_HTML5_Audio_Video/Device-SpecificConsiderations/Device-SpecificConsiderations.html#//apple_ref/doc/uid/TP40009523-CH5-SW11), so add a warning
* the image gallery cannot be fullscreened [as apple intended](https://developer.mozilla.org/en-US/docs/Web/API/Element/requestFullscreen#browser_compatibility) so add a warning
## other changes
* file table columns are now limited to browser window width
* readme: mention that nginx-QUIC is currently very slow (thx noktuas)
* #50 add a safeguard to the wget plugin in case wget at some point adds support for `file://` or similar
* show a suggestion on startup to enable the database
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
# 2023-0725-1550 `v1.8.8` just boring bugfixes
final release until late august unless something bad happens and i end up building this thing on a shinkansen
## recent security / vulnerability fixes
* there is a [discord server](https://discord.gg/25J8CdTT6G) with an `@everyone` in case of future important updates
* [v1.8.7](https://github.com/9001/copyparty/releases/tag/v1.8.7) (2023-07-23) - [CVE-2023-38501](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2023-38501) - reflected XSS
* [v1.8.2](https://github.com/9001/copyparty/releases/tag/v1.8.2) (2023-07-14) - [CVE-2023-37474](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2023-37474) - path traversal (first CVE)
* all serverlogs reviewed so far (5 public servers) showed no signs of exploitation
## bugfixes
* range-select with shiftclick:
* don't crash when entering another folder and shift-clicking some more
* remember selection origin when lazy-loading more stuff into the viewport
* markdown editor:
* fix confusing warnings when the browser cache decides it *really* wants to cache
* and when a document starts with a newline
* remember intended actions such as `?edit` on login prompts
* Windows: TLS-cert generation (triggered by network changes) could occasionally fail
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
# 2023-0723-1543 `v1.8.7` XSS for days
at the lack of better ideas, there is now a [discord server](https://discord.gg/25J8CdTT6G) with an `@everyone` for all future important updates such as this one
## bugfixes
* reflected XSS through `/?k304` and `/?setck`
* if someone tricked you into clicking a URL containing a chain of `%0d` and `%0a` they could potentially have moved/deleted existing files on the server, or uploaded new files, using your account
* if you use a reverse proxy, you can check if you have been exploited like so:
* nginx: grep your logs for URLs containing `%0d%0a%0d%0a`, for example using the following command:
```bash
(gzip -dc access.log*.gz; cat access.log) | sed -r 's/" [0-9]+ .*//' | grep -iE '%0[da]%0[da]%0[da]%0[da]'
```
* if you find any traces of exploitation (or just want to be on the safe side) it's recommended to change the passwords of your copyparty accounts
* huge thanks *again* to @TheHackyDog !
* the original fix for CVE-2023-37474 broke the download links for u2c.py and partyfuse.py
* fix mediaplayer spinlock if the server only has a single audio file
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
# 2023-0721-0036 `v1.8.6` fix reflected XSS

View File

@@ -148,7 +148,7 @@ symbol legend,
| feature / software | a | b | c | d | e | f | g | h | i | j | k | l |
| ----------------------- | - | - | - | - | - | - | - | - | - | - | - | - |
| download folder as zip | █ | █ | █ | █ | | | █ | | █ | █ | | █ |
| download folder as zip | █ | █ | █ | █ | | | █ | | █ | █ | | █ |
| download folder as tar | █ | | | | | | | | | █ | | |
| upload | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ |
| parallel uploads | █ | | | █ | █ | | • | | █ | | █ | |
@@ -183,6 +183,7 @@ symbol legend,
* `cloud storage backend` = able to serve files from (and write to) s3 or similar cloud services; `` means the software can do this with some help from `rclone mount` as a bridge
* `a`/copyparty can reject uploaded files (based on complex conditions), for example [by extension](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reject-extension.py) or [mimetype](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reject-mimetype.py)
* `e`/seafile download-as-zip is not streaming; it creates the full zipfile before download can start, and fails on big folders
* `j`/filebrowser remarks:
* can provide checksums for single files on request
* can probably do extension/mimetype rejection similar to copyparty
@@ -432,6 +433,7 @@ symbol legend,
* not that bad, can probably be remedied with bindmounts or maybe symlinks
* ⚠️ uploads not resumable / accelerated / integrity-checked
* ⚠️ on cloudflare: max upload size 100 MiB
* ⚠️ uploading small files is slow; `2.2` files per sec (copyparty does `87`/sec), tested locally with [linuxserver/nextcloud](https://hub.docker.com/r/linuxserver/nextcloud) (sqlite)
* ⚠️ no write-only / upload-only folders
* ⚠️ http/webdav only; no ftp, zeroconf
* ⚠️ less awesome music player
@@ -451,7 +453,9 @@ symbol legend,
* *much worse than nextcloud* in that regard
* ⚠️ uploads not resumable / accelerated / integrity-checked
* ⚠️ on cloudflare: max upload size 100 MiB
* ⚠️ uploading small files is slow; `2.7` files per sec (copyparty does `87`/sec), tested locally with [official container](https://manual.seafile.com/docker/deploy_seafile_with_docker/)
* ⚠️ no write-only / upload-only folders
* ⚠️ big folders cannot be zip-downloaded
* ⚠️ http/webdav only; no ftp, zeroconf
* ⚠️ less awesome music player
* ⚠️ doesn't run on android or ipads

View File

@@ -3,6 +3,7 @@ WORKDIR /z
ENV ver_asmcrypto=c72492f4a66e17a0e5dd8ad7874de354f3ccdaa5 \
ver_hashwasm=4.9.0 \
ver_marked=4.3.0 \
ver_dompf=3.0.5 \
ver_mde=2.18.0 \
ver_codemirror=5.65.12 \
ver_fontawesome=5.13.0 \
@@ -13,6 +14,7 @@ ENV ver_asmcrypto=c72492f4a66e17a0e5dd8ad7874de354f3ccdaa5 \
# https://github.com/markedjs/marked/releases
# https://github.com/Ionaru/easy-markdown-editor/tags
# https://github.com/codemirror/codemirror5/releases
# https://github.com/cure53/DOMPurify/releases
# https://github.com/Daninet/hash-wasm/releases
# https://github.com/openpgpjs/asmcrypto.js
# https://github.com/google/zopfli/tags
@@ -27,6 +29,7 @@ RUN mkdir -p /z/dist/no-pk \
&& wget https://github.com/markedjs/marked/archive/v$ver_marked.tar.gz -O marked.tgz \
&& wget https://github.com/Ionaru/easy-markdown-editor/archive/$ver_mde.tar.gz -O mde.tgz \
&& wget https://github.com/codemirror/codemirror5/archive/$ver_codemirror.tar.gz -O codemirror.tgz \
&& wget https://github.com/cure53/DOMPurify/archive/refs/tags/$ver_dompf.tar.gz -O dompurify.tgz \
&& wget https://github.com/FortAwesome/Font-Awesome/releases/download/$ver_fontawesome/fontawesome-free-$ver_fontawesome-web.zip -O fontawesome.zip \
&& wget https://github.com/google/zopfli/archive/zopfli-$ver_zopfli.tar.gz -O zopfli.tgz \
&& wget https://github.com/Daninet/hash-wasm/releases/download/v$ver_hashwasm/hash-wasm@$ver_hashwasm.zip -O hash-wasm.zip \
@@ -48,6 +51,7 @@ RUN mkdir -p /z/dist/no-pk \
&& cd easy-markdown-editor* \
&& npm install \
&& npm i gulp-cli -g ) \
&& tar -xf dompurify.tgz \
&& tar -xf prism.tgz \
&& unzip fontawesome.zip \
&& tar -xf zopfli.tgz
@@ -120,6 +124,10 @@ RUN cd easy-markdown-editor-$ver_mde \
&& cp -pv dist/easymde.min.js /z/dist/easymde.js
# build dompurify
RUN (echo; cat DOMPurify-$ver_dompf/dist/purify.min.js) >> /z/dist/marked.js
# build fontawesome and scp
COPY mini-fa.sh /z
COPY mini-fa.css /z

View File

@@ -5,7 +5,8 @@ LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
org.opencontainers.image.licenses="MIT" \
org.opencontainers.image.title="copyparty-ac" \
org.opencontainers.image.description="copyparty with Pillow and FFmpeg (image/audio/video thumbnails, audio transcoding, media tags)"
ENV PYTHONPYCACHEPREFIX=/tmp/pyc
ENV PYTHONPYCACHEPREFIX=/tmp/pyc \
XDG_CONFIG_HOME=/cfg
RUN apk --no-cache add !pyc \
wget \
@@ -19,4 +20,4 @@ RUN apk --no-cache add !pyc \
COPY i/dist/copyparty-sfx.py ./
WORKDIR /w
EXPOSE 3923
ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "-c", "/z/initcfg"]
ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "--no-crt", "-c", "/z/initcfg"]

View File

@@ -5,7 +5,8 @@ LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
org.opencontainers.image.licenses="MIT" \
org.opencontainers.image.title="copyparty-dj" \
org.opencontainers.image.description="copyparty with all optional dependencies, including musical key / bpm detection"
ENV PYTHONPYCACHEPREFIX=/tmp/pyc
ENV PYTHONPYCACHEPREFIX=/tmp/pyc \
XDG_CONFIG_HOME=/cfg
COPY i/bin/mtag/install-deps.sh ./
COPY i/bin/mtag/audio-bpm.py /mtag/
@@ -35,4 +36,4 @@ RUN apk add -U !pyc \
COPY i/dist/copyparty-sfx.py ./
WORKDIR /w
EXPOSE 3923
ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "-c", "/z/initcfg"]
ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "--no-crt", "-c", "/z/initcfg"]

View File

@@ -5,7 +5,8 @@ LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
org.opencontainers.image.licenses="MIT" \
org.opencontainers.image.title="copyparty-im" \
org.opencontainers.image.description="copyparty with Pillow and Mutagen (image thumbnails, media tags)"
ENV PYTHONPYCACHEPREFIX=/tmp/pyc
ENV PYTHONPYCACHEPREFIX=/tmp/pyc \
XDG_CONFIG_HOME=/cfg
RUN apk --no-cache add !pyc \
wget \
@@ -18,4 +19,4 @@ RUN apk --no-cache add !pyc \
COPY i/dist/copyparty-sfx.py ./
WORKDIR /w
EXPOSE 3923
ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "-c", "/z/initcfg"]
ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "--no-crt", "-c", "/z/initcfg"]

View File

@@ -5,7 +5,8 @@ LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
org.opencontainers.image.licenses="MIT" \
org.opencontainers.image.title="copyparty-iv" \
org.opencontainers.image.description="copyparty with Pillow, FFmpeg, libvips (image/audio/video thumbnails, audio transcoding, media tags)"
ENV PYTHONPYCACHEPREFIX=/tmp/pyc
ENV PYTHONPYCACHEPREFIX=/tmp/pyc \
XDG_CONFIG_HOME=/cfg
RUN apk add -U !pyc \
wget \
@@ -25,4 +26,4 @@ RUN apk add -U !pyc \
COPY i/dist/copyparty-sfx.py ./
WORKDIR /w
EXPOSE 3923
ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "-c", "/z/initcfg"]
ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "--no-crt", "-c", "/z/initcfg"]

View File

@@ -5,7 +5,8 @@ LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
org.opencontainers.image.licenses="MIT" \
org.opencontainers.image.title="copyparty-min" \
org.opencontainers.image.description="just copyparty, no thumbnails / media tags / audio transcoding"
ENV PYTHONPYCACHEPREFIX=/tmp/pyc
ENV PYTHONPYCACHEPREFIX=/tmp/pyc \
XDG_CONFIG_HOME=/cfg
RUN apk --no-cache add !pyc \
python3 \
@@ -17,4 +18,4 @@ RUN apk --no-cache add !pyc \
COPY i/dist/copyparty-sfx.py ./
WORKDIR /w
EXPOSE 3923
ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "-c", "/z/initcfg"]
ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "--no-crt", "--no-thumb", "-c", "/z/initcfg"]

View File

@@ -5,7 +5,8 @@ LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
org.opencontainers.image.licenses="MIT" \
org.opencontainers.image.title="copyparty-min-pip" \
org.opencontainers.image.description="just copyparty, no thumbnails, no media tags, no audio transcoding"
ENV PYTHONPYCACHEPREFIX=/tmp/pyc
ENV PYTHONPYCACHEPREFIX=/tmp/pyc \
XDG_CONFIG_HOME=/cfg
RUN apk --no-cache add python3 py3-pip !pyc \
&& python3 -m pip install copyparty \
@@ -17,4 +18,4 @@ RUN apk --no-cache add python3 py3-pip !pyc \
WORKDIR /w
EXPOSE 3923
ENTRYPOINT ["python3", "-m", "copyparty", "-c", "/z/initcfg"]
ENTRYPOINT ["python3", "-m", "copyparty", "--no-crt", "--no-thumb", "-c", "/z/initcfg"]

View File

@@ -15,6 +15,7 @@ docker run --rm -it -u 1000 -p 3923:3923 -v /mnt/nas:/w -v $PWD/cfgdir:/cfg copy
* `copyparty/ac` is the recommended [image edition](#editions)
* you can download the image from github instead by replacing `copyparty/ac` with `ghcr.io/9001/copyparty-ac`
* if you are using rootless podman, remove `-u 1000`
* if you have selinux, append `:z` to all `-v` args (for example `-v /mnt/nas:/w:z`)
i'm unfamiliar with docker-compose and alternatives so let me know if this section could be better 🙏

View File

@@ -44,7 +44,7 @@ ckgh() {
curl -s https://api.github.com/repos/$dep/releases | tee h |
jq -r 'first|.assets|.[]|.name'
)
[ -z "$k" ] && echo "outdated: $dep" && cp h "ng-$dep" e=1
[ -z "$k" ] && echo "outdated: $dep" && cp h "ng-$dep" && e=1
done
true
}

View File

@@ -1,12 +1,12 @@
d5510a24cb5e15d6d30677335bbc7624c319b371c0513981843dc51d9b3a1e027661096dfcfc540634222bb2634be6db55bf95185b30133cb884f1e47652cf53 altgraph-0.17.3-py2.py3-none-any.whl
eda6c38fc4d813fee897e969ff9ecc5acc613df755ae63df0392217bbd67408b5c1f6c676f2bf5497b772a3eb4e1a360e1245e1c16ee83f0af555f1ab82c3977 Git-2.39.1-32-bit.exe
17ce52ba50692a9d964f57a23ac163fb74c77fdeb2ca988a6d439ae1fe91955ff43730c073af97a7b3223093ffea3479a996b9b50ee7fba0869247a56f74baa6 pefile-2023.2.7-py3-none-any.whl
2410f79f25b55829169fdd45611c04f51932f7701c0601df64ade0eb545c96ba950b7be186eb082482506bc689fcde5fe09c1f6f7cd77c2107028959b7e0d06f pyinstaller-5.12.0-py3-none-win32.whl
62f4f3dda0526ea88cfc5af1806c7b53094672f4237d64c088626c226ad2fbc7549f6c9c6bbe5b228b1f87faf1e5c343ec468c485e4c17fe6d79c6b1f570153a pyinstaller-5.12.0-py3-none-win_amd64.whl
2612c263f73a02eab41404ba96e0c7cf8be4475104668b47dfbae50fadf977b3621dd4102682b301264d82b6e130d95ea84a28bf2106a626a1a2845dac16df47 pyinstaller_hooks_contrib-2023.3-py2.py3-none-any.whl
132a5380f33a245f2e744413a0e1090bc42b7356376de5121397cec5976b04b79f7c9ebe28af222c9c7b01461f7d7920810d220e337694727e0d7cd9e91fa667 pywin32_ctypes-0.2.0-py2.py3-none-any.whl
f298e34356b5590dde7477d7b3a88ad39c622a2bcf3fcd7c53870ce8384dd510f690af81b8f42e121a22d3968a767d2e07595036b2ed7049c8ef4d112bcf3a61 pyinstaller-5.13.2-py3-none-win32.whl
ea73aa54cc6d5db20dfb127e54562dabf890e4cd6171a91b10a51af2bcfc76e1d64cbdce4546df2dcfe42b624724c85b1cd05934be2413425b1f880222727b4f pyinstaller-5.13.2-py3-none-win_amd64.whl
2f4e3927a38cf7757bc9a1c06370d79209669a285a80f1b09cf9917137825c7022a50a56b351807e6e687e2c3a7bd7b2c5cc6daeb4d90e11920284c1a04a1cc3 pyinstaller_hooks_contrib-2023.8-py2.py3-none-any.whl
749a473646c6d4c7939989649733d4c7699fd1c359c27046bf5bc9c070d1a4b8b986bbc65f60d7da725baf16dbfdd75a4c2f5bb8335f2cb5685073f5fee5c2d1 pywin32_ctypes-0.2.2-py3-none-any.whl
3c5adf0a36516d284a2ede363051edc1bcc9df925c5a8a9fa2e03cab579dd8d847fdad42f7fd5ba35992e08234c97d2dbfec40a9d12eec61c8dc03758f2bd88e typing_extensions-4.4.0-py3-none-any.whl
4b6e9ae967a769fe32be8cf0bc0d5a213b138d1e0344e97656d08a3d15578d81c06c45b334c872009db2db8f39db0c77c94ff6c35168d5e13801917667c08678 upx-4.0.2-win32.zip
8d16a967a0a7872a7575b1005cf66915deacda6ee8611fbb52f42fc3e3beb2f901a5140c942a5d146bd412b92bfa9cbadd82beeba83df6d70930c6dc26608a5b upx-4.1.0-win32.zip
# u2c (win7)
a7d259277af4948bf960682bc9fb45a44b9ae9a19763c8a7c313cef4aa9ec2d447d843e4a7c409e9312c8c8f863a24487a8ee4ffa6891e9b1c4e111bb4723861 certifi-2022.12.7-py3-none-any.whl
2822c0dae180b1c8cfb7a70c8c00bad62af9afdbb18b656236680def9d3f1fcdcb8ef5eb64fc3b4c934385cd175ad5992a2284bcba78a243130de75b2d1650db charset_normalizer-3.1.0-cp37-cp37m-win32.whl
@@ -27,4 +27,4 @@ ba91ab0518c61eff13e5612d9e6b532940813f6b56e6ed81ea6c7c4d45acee4d98136a383a250675
7f8f4daa4f4f2dbf24cdd534b2952ee3fba6334eb42b37465ccda3aa1cccc3d6204aa6bfffb8a83bf42ec59c702b5b5247d4c8ee0d4df906334ae53072ef8c4c MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl
4a20aeb52d4fde6aabcba05ee261595eeb5482c72ee27332690f34dd6e7a49c0b3ba3813202ac15c9d21e29f1cd803f2e79ccc1c45ec314fcd0a937016bcbc56 mutagen-1.46.0-py3-none-any.whl
926d408a886059a75cf12706fa061146f9f042b27fb6e65be7d49f398ed23fb0227639d84804586ac014c6bcf7d08cd86a09c1a20793d341aa0802d3d32a546b Pillow-10.0.0-cp311-cp311-win_amd64.whl
a48ee8992eee60a0d620dced71b9f96596f5dd510e3024015aca55884cdb3f9e2405734bfc13f3f40b79106a77bc442cce02ac4c8f5d16207448052b368fd52a python-3.11.4-amd64.exe
c86bbeacad3ae3c7bde747f5b4f09c11eced841add14e79ec4a064e5e29ebca35460e543ba735b11bfb882837d5ff4371ce64492d28d096b4686233c9a8cda6d python-3.11.5-amd64.exe

View File

@@ -17,10 +17,10 @@ uname -s | grep NT-10 && w10=1 || {
fns=(
altgraph-0.17.3-py2.py3-none-any.whl
pefile-2023.2.7-py3-none-any.whl
pyinstaller-5.10.1-py3-none-win_amd64.whl
pyinstaller_hooks_contrib-2023.2-py2.py3-none-any.whl
pywin32_ctypes-0.2.0-py2.py3-none-any.whl
upx-4.0.2-win32.zip
pyinstaller-5.13.1-py3-none-win_amd64.whl
pyinstaller_hooks_contrib-2023.7-py2.py3-none-any.whl
pywin32_ctypes-0.2.2-py3-none-any.whl
upx-4.1.0-win32.zip
)
[ $w10 ] && fns+=(
mutagen-1.46.0-py3-none-any.whl
@@ -43,12 +43,11 @@ fns=(
)
[ $w7x64 ] && fns+=(
windows6.1-kb2533623-x64.msu
pyinstaller-5.10.1-py3-none-win_amd64.whl
python-3.7.9-amd64.exe
)
[ $w7x32 ] && fns+=(
windows6.1-kb2533623-x86.msu
pyinstaller-5.10.1-py3-none-win32.whl
pyinstaller-5.13.1-py3-none-win32.whl
python-3.7.9.exe
)
dl() { curl -fkLOC- "$1" && return 0; echo "$1"; return 1; }

View File

@@ -21,6 +21,7 @@ copyparty/httpconn.py,
copyparty/httpsrv.py,
copyparty/ico.py,
copyparty/mdns.py,
copyparty/metrics.py,
copyparty/mtag.py,
copyparty/multicast.py,
copyparty/pwhash.py,

View File

@@ -100,6 +100,7 @@ def tc1(vflags):
"-p4321",
"-e2dsa",
"-e2tsr",
"--ban-403=no",
"--dbd=yolo",
"--no-mutagen",
"--th-ff-jpg",

View File

@@ -12,7 +12,7 @@ import tempfile
import unittest
from tests import util as tu
from tests.util import Cfg
from tests.util import Cfg, eprint
from copyparty.authsrv import AuthSrv
from copyparty.httpcli import HttpCli
@@ -93,7 +93,7 @@ class TestHttpCli(unittest.TestCase):
res = "ok " + fp in ret
print("[{}] {} {} = {}".format(fp, rok, wok, res))
if rok != res:
print("\033[33m{}\n# {}\033[0m".format(ret, furl))
eprint("\033[33m{}\n# {}\033[0m".format(ret, furl))
self.fail()
# file browser: html
@@ -101,7 +101,7 @@ class TestHttpCli(unittest.TestCase):
res = "'{}'".format(self.fn) in ret
print(res)
if rok != res:
print("\033[33m{}\n# {}\033[0m".format(ret, durl))
eprint("\033[33m{}\n# {}\033[0m".format(ret, durl))
self.fail()
# file browser: json
@@ -110,7 +110,7 @@ class TestHttpCli(unittest.TestCase):
res = '"{}"'.format(self.fn) in ret
print(res)
if rok != res:
print("\033[33m{}\n# {}\033[0m".format(ret, url))
eprint("\033[33m{}\n# {}\033[0m".format(ret, url))
self.fail()
# tar
@@ -132,7 +132,9 @@ class TestHttpCli(unittest.TestCase):
if durl.split("/")[-1] in self.can_read:
ref = [x for x in vfiles if self.in_dive(top + "/" + durl, x)]
for f in ref:
print("{}: {}".format("ok" if f in tar_ok else "NG", f))
ok = f in tar_ok
pr = print if ok else eprint
pr("{}: {}".format("ok" if ok else "NG", f))
ref.sort()
tar_ok.sort()
self.assertEqual(ref, tar_ok)

View File

@@ -1,3 +1,7 @@
#!/usr/bin/env python3
# coding: utf-8
from __future__ import print_function, unicode_literals
import os
import re
import sys
@@ -23,6 +27,12 @@ def nah(*a, **ka):
return False
def eprint(*a, **ka):
ka["file"] = sys.stderr
print(*a, **ka)
sys.stderr.flush()
if MACOS:
import posixpath
@@ -114,7 +124,7 @@ class Cfg(Namespace):
ex = "df loris re_maxage rproxy rsp_jtr rsp_slp s_wr_slp theme themes turbo"
ka.update(**{k: 0 for k in ex.split()})
ex = "ah_alg doctitle favico html_head lg_sbf log_fk md_sbf mth name textfiles unlist R RS SR"
ex = "ah_alg bname doctitle favico html_head lg_sbf log_fk md_sbf mth name textfiles unlist vname R RS SR"
ka.update(**{k: "" for k in ex.split()})
ex = "on403 on404 xad xar xau xban xbd xbr xbu xiu xm"
@@ -179,6 +189,8 @@ class VHttpSrv(object):
self.gpwd = Garda("")
self.g404 = Garda("")
self.g403 = Garda("")
self.gurl = Garda("")
self.ptn_cc = re.compile(r"[\x00-\x1f]")