Compare commits

...

504 Commits

Author SHA1 Message Date
ed
ac40dccc8f v1.9.12 2023-10-15 20:06:46 +00:00
ed
9ca8154651 prefer the new TTF in pillow 10.1 + pyinstaller 6.1 fixes 2023-10-15 18:47:34 +00:00
ed
db668ba491 spectrograms are never cropped; share thumbcache 2023-10-15 11:42:57 +00:00
ed
edbafd94c2 avoid iphone jank:
safari can immediately popstate when alt-tabbing back to the browser,
causing the page to load twice in parallel:

2174 log-capture ok
2295 h-repl $location
2498 h-pop $location <==
2551 sha-ok  # from initial load
2023-10-15 11:27:27 +00:00
ed
2df76eb6e1 client decides if thumbnails should be cropped or not
this carries some intentional side-effects; each thumbnail format will
now be stored in its own subfolder under .hist/th/ making cleanup more
effective (jpeg and webm are dropped separately)
2023-10-15 10:21:25 +00:00
ed
9b77c9ce7d more intuitive upload/filesearch toggle:
restore preferred mode after leaving a restricted folder
2023-10-15 09:00:57 +00:00
ed
dc2b67f155 ui-button to use upload-time instead of local last-modified 2023-10-15 08:46:23 +00:00
ed
9f32e9e11d set default sort order; --sort or volflag "sort" 2023-10-14 22:17:37 +00:00
ed
7086d2a305 ie9 support 2023-10-14 10:01:03 +00:00
ed
575615ca2d slight refactor; 7% faster, 1% more maintainable 2023-10-14 09:54:49 +00:00
kipukun ;_
c0da4b09bf contrib: bump python version in rc script
the default version of Python is now 3.9 as of FreeBSD 13.2-RELEASE
2023-10-13 10:15:27 +02:00
ed
22880ccc9a update pkgs to 1.9.11 2023-10-09 00:51:41 +00:00
ed
e4001550c1 v1.9.11 2023-10-09 00:36:54 +00:00
ed
e9f65be86a add cachebuster for dynamically loaded js files 2023-10-09 00:22:16 +00:00
ed
3b9919a486 update pkgs to 1.9.10 2023-10-08 21:16:12 +00:00
ed
acc363133f v1.9.10 2023-10-08 20:51:49 +00:00
ed
8f2d502d4d configurable printing of failed login attempts 2023-10-08 20:41:02 +00:00
ed
2ae93ad715 clear response headers for each request 2023-10-08 20:38:51 +00:00
ed
bb590e364a update pkgs to 1.9.9 2023-10-07 22:49:12 +00:00
ed
e7fff77735 v1.9.9 2023-10-07 22:29:37 +00:00
ed
753e3cfbaf revert 68c6794d (v1.6.2) and fix it better:
moving deduplicated files between volumes could drop some links
2023-10-07 22:25:44 +00:00
ed
99e9cba1f7 update pkgs to 1.9.8 2023-10-06 18:22:01 +00:00
ed
fcc3336760 v1.9.8 2023-10-06 17:50:35 +00:00
ed
0dc3c23b42 add alternative filekey generator; closes #52 2023-10-06 13:41:22 +00:00
ed
6aa10ecedc mention streaming unzip with bsdtar 2023-10-02 07:40:40 +02:00
ed
93125bba4d update pkgs to 1.9.7 2023-09-30 23:56:35 +00:00
ed
fae5a36e6f v1.9.7 2023-09-30 23:32:51 +00:00
ed
fc9b729fc2 fix #51:
* handle unexpected localstorage values
* handle unsupported --lang values
2023-09-30 22:54:21 +00:00
ed
8620ae5bb7 fix column-hiding ux on phones:
table header click-handler didn't cover the entire cell so it was
easy to sort the table by accident; also do not exit hiding mode
automatically since you usually want to hide several columns
(so also adjust css to make it obvious you're in hiding mode)
2023-09-28 09:28:26 +02:00
ed
01a851da28 mtp-deps: fix building on archlinux 2023-09-24 23:17:26 +00:00
ed
309895d39d docker: exploring alternative base images for performance 2023-09-24 22:26:51 +00:00
ed
7ac0803ded update pkgs to 1.9.6 2023-09-23 12:56:47 +00:00
ed
cae5ccea62 v1.9.6 2023-09-23 12:15:24 +00:00
ed
3768cb4723 add chat 2023-09-23 11:34:32 +00:00
ed
0815dce4c1 ensure indexing runs with --ign-ebind-all 2023-09-22 23:20:57 +00:00
ed
a62f744a18 prevent losing an out-of-volume index
if the server is started while an external drive is not mounted,
it would drop the database because all the files are missing
2023-09-22 23:05:07 +00:00
ed
163e3fce46 improve reverse-proxy support when containerized:
the x-forwarded-for header would get rejected since the reverse-proxy
is not asking from 127.0.0.1 or ::1, so make this allowlist configurable
2023-09-22 22:39:20 +00:00
ed
e76a50cb9d add indexer benchmark + bump default num cores from 4 to 5
and make the mtag deps build better on fedora
2023-09-22 20:40:52 +00:00
ed
72fc76ef48 golf / normalize window.location 2023-09-20 22:07:40 +00:00
ed
c47047c30d configurable real-ip header from reverse proxy 2023-09-20 21:56:39 +00:00
ed
3b8f66c0d5 fix a client crash when uploading from glitchy net
prevent reattempting chunks / handshakes after an upload has completed
since that is both pointless and crashy

bugreport ocr'ed from deepfried pic (thx kipu):
stack: exec_handshake -> xhr.onload -> tasked -> exec_upload -> do_send

529226 crash: t.fobj is null; firefox 117, win64
529083 zombie handshake onerror, some.flac
529081 chunkpit onerror,, 1, another.flac
528933 retrying stuck handshake
498842 ^
464213 zombie handshake onload, some.flac
464208 ^
462858 ignoring dupe-segment error, some.flac
462766 ^
462751 ^
462667 ^
462403 ^
462316 ^
461321 zombie handshake onload, some.flac
461302 ^
461152 ^
461114 ^
461110 ^
460769 ^
459954 ^
459492 ignoring dupe-segment error, some.flac
2023-09-20 21:25:59 +00:00
ed
aa96a1acdc misc optimizations / cleanup:
* slightly faster startup / shutdown
* forgot a jinja2 golf
* waste 4KiB changing prismjs back to gz since brotli is https-gated ;_;
* broke support for firefox<52 (non-var functions must be toplevel
   or immediately within another function), now even firefox 10 /
   centos 6 is somewhat supported again
2023-09-17 13:02:18 +00:00
ed
91cafc2511 faster startup on windows by asking for ffmpeg.exe explicitly
rather than just "ffmpeg" which makes windows try to open each of
ffmpeg.BAT,CMD,COM,EXE,JS,JSE,MSC,VBE,VBS,WSF,WSH one by one
(ffmpeg.js? hello??)
2023-09-13 23:32:19 +00:00
ed
23ca00bba8 support jython and graalpy 2023-09-13 23:24:56 +00:00
ed
a75a992951 golf the sfx-gz by ~27.6 kB;
* 11 kB webdeps: brotli easymde+prism instead of zopfli
* 8 kB jinja2
* 5 kB ftp
* 3 kB improve uncommenter
2023-09-13 23:21:22 +00:00
ed
4fbd6853f4 add msg-log.py initially by @clach04, closes #35 2023-09-12 19:56:05 +00:00
ed
71c3ad63b3 fix tests 2023-09-11 01:46:25 +00:00
ed
e1324e37a5 update pkgs to 1.9.5 2023-09-09 14:15:46 +00:00
ed
a996a09bba v1.9.5 2023-09-09 13:36:56 +00:00
ed
18c763ac08 smb: upgrade to impacket 0.11, full user account support,
permissions are now per-account instead of coalescing

also stops windows from freaking out if there's an offline volume
2023-09-09 12:46:37 +00:00
ed
3d9fb753ba stuff 2023-09-08 21:42:05 +00:00
ed
714fd1811a add option to generate pax-format tar archives
and forgot to commit the nix module
2023-09-08 21:13:23 +00:00
ed
4364581705 fix accidental 422-ban when uploading lots of dupes 2023-09-08 19:49:29 +00:00
ed
ba02c9cc12 readme fix + make hacker theme more hacker 2023-09-08 19:35:12 +00:00
ed
11eefaf968 create / edit non-markdown textfiles (if user has delete-access)
also enables the ansi escape code parser if the text looks like ansi
2023-09-08 18:47:31 +00:00
ed
5a968f9e47 add permission 'h': folders redirect to index.html;
safest way to make copyparty like a general-purpose webserver where
index.html is returned as expected yet directory listing is entirely
disabled / unavailable
2023-09-07 23:30:01 +00:00
ed
6420c4bd03 up to 2.6x faster download-as-zip
when there's lots of files, and especially small ones
and also reduces cpu load by at least 15%
2023-09-05 22:57:03 +00:00
ed
0f9877201b support cache directives in --css-browser, --js-browser;
for example --css-browser=/the.css?cache=600 (seconds)
or --js-browser=/.res/the.js?cache=i (7 days)
2023-09-03 19:50:31 +00:00
ed
9ba2dec9b2 lightbox: fix ccw rotation hotkey 2023-09-03 19:23:29 +00:00
ed
ae9cfea939 update pkgs to 1.9.4 2023-09-02 00:45:57 +00:00
ed
cadaeeeace v1.9.4 2023-09-02 00:18:53 +00:00
ed
767696185b add ?tar=gz, ?tar=bz2, ?tar=xz with optional level;
defaults are ?tar=gz:3, ?tar=bz2:9, ?tar=xz:1
2023-09-01 23:44:10 +00:00
ed
c1efd227b7 fix inconsistent use of symlink mtimes in database;
on upload, dupes are by default handled by symlinking to the existing
copy on disk, writing the uploader's local mtime into the symlink mtime,
which is also what gets indexed in the db

this worked as intended, however during an -e2dsa rescan on startup the
symlink destination timestamps would be used instead, causing a reindex
and the resulting loss of uploader metadata (ip, timestamp)

will now always use the symlink's mtime;
worst-case 1% slower startup (no dhash)

this change will cause a reindex of incorrectly indexed files, however
as this has already happened at least once due to the bug being fixed,
there will be no additional loss of metadata
2023-09-01 20:29:55 +00:00
ed
a50d0563c3 instantly perform search when URL contains a raw query 2023-09-01 20:16:19 +00:00
ed
e5641ddd16 update pkgs to 1.9.3 2023-08-31 23:08:32 +00:00
ed
700111ffeb v1.9.3 2023-08-31 22:11:31 +00:00
ed
b8adeb824a misc http correctness;
some of this looks shady af but appears to have been harmless
(decent amount of testing came out ok)

* some location normalization happened before unquoting; however vfs
   handled this correctly so the outcome was just confusing messages
* some url parameters were double-decoded (unpost filter, move
   destinations), causing some operations to fail unexpectedly
* invalid cache-control headers could be generated,
   but not in a maliciously-beneficial way
   (there are safeguards stripping newlines and control-characters)

also adds an exception-message cleanup step to strip away the
filesystem path that copyparty's python files are located at,
in case that could be interesting knowledge
2023-08-31 21:51:58 +00:00
ed
30cc9defcb cosmetics:
* in case someone gets a confusing access-related error message,
  include more context in serverlogs (exact path)
* fix js console spam in search results
* same markdown line-height in viewer and browser
2023-08-31 21:27:14 +00:00
ed
61875bd773 slightly reduce flickering during page load on chrome 2023-08-31 20:02:33 +00:00
ed
30905c6f5d add convenient debugs in case the fight is not over 2023-08-31 20:00:14 +00:00
ed
9986136dfb apple/ios/iphone: maybe fix background album playback
good news: apple finally added support for samplerates other than
44100 for AudioContext, meaning it would now have been possible to
set non-100% volume for audio files including opus files

bad news: apple broke AudioContext in a way that makes it bug out
mediaSessions, causing lockscreen controls to become mostly useless

bad news: apple broke AudioContext additionally where it randomly
causes playback issues, blocking playback of audio files, even if
the AudioContext is sitting idle doing nothing (which is a
requirement for reliable upload speeds on other platforms)

disable AudioContext on iOS
2023-08-31 19:57:05 +00:00
ed
1c0d978979 ios/iphone: autoreplace smart-quotes with sane quotes,
as the iphone keyboard is not able to produce ' or "
2023-08-31 19:29:37 +00:00
ed
0a0364e9f8 FTPd: fix py3.12 support; workaround until next release:
run sfx twice with PYTHONPATH=/tmp/pe-copyparty.$(id -u)/copyparty/vend
2023-08-28 00:25:33 +00:00
ed
3376fbde1a update pkgs to 1.9.2 2023-08-26 22:09:43 +00:00
ed
ac21fa7782 v1.9.2 2023-08-26 21:16:30 +00:00
ed
c1c8dc5e82 ok lets try that again 2023-08-26 19:07:23 +00:00
ed
5a38311481 mark offline volumes in directory tree sidebar 2023-08-26 19:00:46 +00:00
ed
9f8edb7f32 make markdown slightly safer without the nohtml volflag
by running dompurify after marked.parse if plugins are not enabled;
adds no protection against the more practical approach of just
putting a malicious <script> in an html file and uploading that,
but one footgun less is one less footgun
2023-08-26 17:37:02 +00:00
ed
c5a6ac8417 persist dotfile preference as cookie for initial listing 2023-08-26 15:50:57 +00:00
ed
50e01d6904 add more autoban triggers:
* --ban-url: URLs which 404 and also match --sus-urls (bot-scan)
* --ban-403: trying to access volumes that dont exist or require auth
* --ban-422: invalid POST messages, fuzzing and such
* --nonsus-urls: regex of 404s which  shouldn't trigger --ban-404

in may situations it makes sense to handle this logic inside copyparty,
since stuff like cloudflare and running copyparty on another physical
box than the nginx frontend is on becomes fairly clunky
2023-08-26 13:52:24 +00:00
ed
9b46291a20 add option to force-disable turbo,
making it safer to enable --ban-404
(u2c can still get banned inadvertently)
2023-08-26 13:19:38 +00:00
ed
14497b2425 docs:
* mention cloudflare-specific nginx config

versus.md:
* seafile has a size limit on zip downloads
* seafile and nextcloud are slow at uploading many small files

u2c: improve error message in funky environments
2023-08-25 21:57:26 +00:00
ed
f7ceae5a5f add filetable range-select with shift-pgup/pgdn,
and retain file selection cursor when lazyloading more files
2023-08-25 19:34:37 +00:00
ed
c9492d16ba fix textfile navigation hotkeys (broke in 5d13ebb4) 2023-08-25 18:41:45 +00:00
ed
9fb9ada3aa dont whine about inaccessible root on rootless configs,
and make it easier for on403 to invoke the homepage-redirect
2023-08-25 18:33:15 +00:00
ed
db0abbfdda typo 2023-08-21 00:05:39 +00:00
ed
e7f0009e57 update pkgs to 1.9.1 2023-08-20 23:53:58 +00:00
ed
4444f0f6ff v1.9.1 2023-08-20 23:38:42 +00:00
ed
418842d2d3 update pkgs to 1.9.0 2023-08-20 23:11:44 +00:00
ed
cafe53c055 v1.9.0 2023-08-20 22:02:40 +00:00
ed
7673beef72 actually impl --mc-hop (and improve --zm-spam) 2023-08-20 21:27:28 +00:00
ed
b28bfe64c0 explain apple bullshit 2023-08-20 22:09:00 +02:00
ed
135ece3fbd immediately allow uploading an interrupted and
deleted incomplete upload to another location
2023-08-20 19:16:35 +00:00
ed
bd3640d256 change to openmetrics 2023-08-20 18:50:14 +00:00
ed
fc0405c8f3 add prometheus metrics; closes #49 2023-08-20 17:58:06 +00:00
ed
7df890d964 wget: only allow http/https/ftp/ftps (#50):
these are all the protocols that are currently supported by wget,
so this has no practical effect aside from making sure we won't
suddenly get file:// support or something (which would be bad)
2023-08-20 09:47:50 +00:00
ed
8341041857 mdns: option to ignore spec to avoid issues on
networks where clients have multiple IPs of which some are subnets that
the copyparty server is not
2023-08-19 21:45:26 +00:00
ed
1b7634932d tar/zip-download: add opus transcoding filter 2023-08-19 19:40:46 +00:00
ed
48a3898aa6 suggest enabling the database on startup 2023-08-16 19:57:19 +00:00
ed
5d13ebb4ac avoid firefox-android quirk(?):
when repeatedly tapping the next-folder button, occasionally it will
reload the entire page instead of ajax'ing the directory contents.

Navigation happens by simulating a click in the directory sidebar,
so the incorrect behavior matches what would happen if the link to the
folder didn't have its onclick-handler attached, so should probably
double-check if there's some way for that to happen

Issue observed fairly easily in firefox on android, regardless if
copyparty is running locally or on a server in a different country.
Unable to reproduce with android-chrome or desktop-firefox

Could also be due to an addon (dark-reader, noscript, ublock-origin)

anyways, avoiding this by doing the navigation more explicitly
2023-08-16 19:56:47 +00:00
ed
015b87ee99 performance / cosmetic:
* js: use .call instead of .bind when possible
* when running without e2d, the message on startup regarding
  unfinished uploads didn't show the correct filesystem path
2023-08-16 19:32:43 +00:00
ed
0a48acf6be limit each column of the files table to screen width 2023-08-16 03:55:53 +00:00
ed
2b6a3afd38 fix iOS randomly increasing fontsize of some things:
* links which are wider than the display width
* probably input fields too
2023-08-16 03:47:19 +00:00
ed
18aa82fb2f make browser resizing smoother / less expensive 2023-08-15 16:55:19 +00:00
ed
f5407b2997 docker: persist autogenerated seeds, disable certgen, and
mention how to run the containers with selinux enabled
* assumes that a /cfg docker volume is provided
2023-08-15 15:07:33 +00:00
ed
474d5a155b android's got hella strict filename rules 2023-08-15 06:46:57 +02:00
ed
afcd98b794 mention some gotchas (thx noktuas) 2023-08-15 03:38:51 +02:00
ed
4f80e44ff7 option to exactly specify browser title prefix 2023-08-15 03:17:01 +02:00
ed
406e413594 hint at additional context in exceptions 2023-08-15 01:42:13 +02:00
ed
033b50ae1b u2c: exclude files by regex 2023-08-15 00:45:12 +02:00
ed
bee26e853b show server hostname in html titles:
* --doctitle defines most titles, prefixed with "--name: " by default
* the file browser is only prefixed with the --name itself
* --nth ("no-title-hostname") removes it
* also removed by --nih ("no-info-hostname")
2023-08-14 23:50:13 +02:00
ed
04a1f7040e adjustable timestamp resolution in log messages 2023-08-14 17:22:22 +02:00
ed
f9d5bb3b29 support upload by dragdrop from other browser windows,
hello from LO484 https://ocv.me/stuff/aircode.jpg
2023-07-28 21:43:40 +02:00
ed
ca0cd04085 update pkgs to 1.8.8 2023-07-25 16:25:27 +00:00
ed
999ee2e7bc v1.8.8 2023-07-25 15:50:48 +00:00
ed
1ff7f968e8 fix tls-cert regeneration on windows 2023-07-25 15:27:27 +00:00
ed
3966266207 remember ?edit and trailing-slash during login redirect 2023-07-25 15:14:47 +00:00
ed
d03e96a392 html5 strips the first leading LF in textareas; stop it 2023-07-25 14:16:54 +00:00
ed
4c843c6df9 fix md-editor lastmod cmp when browsercache is belligerent 2023-07-25 14:06:53 +00:00
ed
0896c5295c range-select fixes:
* dont crash when shiftclicking between folders
* remember origin when lazyloading more files
2023-07-25 14:06:31 +02:00
ed
cc0c9839eb update pkgs to 1.8.7 2023-07-23 16:16:49 +00:00
ed
d0aa20e17c v1.8.7 2023-07-23 15:43:38 +00:00
ed
1a658dedb7 fix infinite playback spin on servers with one single file 2023-07-23 14:52:42 +00:00
ed
8d376b854c this is the wrong way around 2023-07-23 14:10:23 +00:00
ed
490c16b01d be even stricter with ?hc 2023-07-23 13:23:52 +00:00
ed
2437a4e864 the CVE-2023-37474 fix was overly strict; loosen 2023-07-23 11:31:11 +00:00
ed
007d948cb9 fix GHSA-f54q-j679-p9hh: reflected-XSS in cookie-setters;
it was possible to set cookie values which contained newlines,
thus terminating the http header and bleeding into the body.

We now disallow control-characters in queries,
but still allow them in paths, as copyparty supports
filenames containing newlines and other mojibake.

The changes in `set_k304` are not necessary in fixing the vulnerability,
but makes the behavior more correct.
2023-07-23 10:55:08 +00:00
ed
335fcc8535 update pkgs to 1.8.6 2023-07-21 01:12:55 +00:00
ed
9eaa9904e0 v1.8.6 2023-07-21 00:36:37 +00:00
ed
0778da6c4d fix GHSA-cw7j-v52w-fp5r: reflected-XSS through /?hc 2023-07-21 00:35:43 +00:00
ed
a1bb10012d update pkgs to 1.8.4 2023-07-18 08:26:39 +00:00
ed
1441ccee4f v1.8.4 2023-07-18 07:46:22 +00:00
ed
491803d8b7 update pkgs to 1.8.3 2023-07-16 23:03:30 +00:00
ed
3dcc386b6f v1.8.3 2023-07-16 22:00:04 +00:00
ed
5aa54d1217 shift/ctrl-click improvements:
* always enable shift-click selection in list-view
* shift-clicking thumbnails opens in new window by default as expected
* enable shift-select in grid-view when multiselect is on
* invert select when the same shift-select is made repeatedly
2023-07-16 18:15:56 +00:00
ed
88b876027c option to range-select files with shift-click; closes #47
also restores the browser-default behavior of
opening links in a new tab with CTRL / new window with SHIFT
2023-07-16 14:05:09 +00:00
ed
fcc3aa98fd add path-traversal scanners 2023-07-16 13:09:31 +00:00
ed
f2f5e266b4 support listing uploader IPs in d2t volumes 2023-07-15 18:50:35 +00:00
ed
e17bf8f325 require the new admin permission for the admin-panel 2023-07-15 18:39:41 +00:00
ed
d19cb32bf3 update pkgs to 1.8.2 2023-07-14 16:05:57 +00:00
ed
85a637af09 v1.8.2 2023-07-14 15:58:39 +00:00
ed
043e3c7dd6 fix traversal vulnerability GHSA-pxfv-7rr3-2qjg:
the /.cpr endpoint allowed full access to server filesystem,
unless mitigated by prisonparty
2023-07-14 15:55:49 +00:00
ed
8f59afb159 fix another race (unpost):
unposting could collide with most other database-related activities,
causing one or the other to fail.
luckily the unprotected query performed by the unpost API happens to be
very cheap, so also the most likely to fail, and would succeed upon a
manual reattempt from the UI.
even in the worst case scenario, there would be no unrecoverable damage
as the next rescan would auto-repair any resulting inconsistencies.
2023-07-14 15:21:14 +00:00
ed
77f1e51444 fix unlikely race (e2tsr):
if someone with admin rights refreshes the homepage exactly as the
directory indexer decides to `_drop_caches`, the indexer thread would
die and the up2k instance would become inoperable...
luckily the probability of hitting this by chance is absolutely minimal,
and the worst case scenario is having to restart copyparty if this
happens immediately after startup; there is no risk of database damage
2023-07-14 15:20:25 +00:00
ed
22fc4bb938 add event-hook for banning users 2023-07-13 22:29:32 +00:00
ed
50c7bba6ea volflag "nohtml" to never return html or rendered markdown from potentially unsafe volumes 2023-07-13 21:57:52 +00:00
ed
551d99b71b add permission "a" to show uploader IPs (#45) 2023-07-12 21:36:55 +00:00
ed
b54b7213a7 more thumbnailer configs available as volflags:
--th-convt = convt
--th-no-crop = nocrop
--th-size = thsize
2023-07-11 22:15:37 +00:00
ed
a14943c8de update pkgs to 1.8.1 2023-07-07 23:58:16 +00:00
ed
a10cad54fc v1.8.1 2023-07-07 22:20:01 +00:00
ed
8568b7702a add pillow10 support + improve text rendering 2023-07-07 22:13:04 +00:00
ed
5d8cb34885 404/403 can be handled with plugins 2023-07-07 21:33:40 +00:00
ed
8d248333e8 dont disable quickedit when hashing passwords interactively 2023-07-07 18:29:30 +00:00
ed
99e2ef7f33 ux: fix tabs clipping in fedora-ff, hackertheme up2k flags 2023-07-07 18:24:58 +00:00
ed
e767230383 very-bad-idea: prefer mpv / streamlink; closes #42 2023-06-28 21:25:40 +00:00
ed
90601314d6 better explain why very-bad-idea is a very bad idea 2023-06-27 22:30:14 +00:00
ed
9c5eac1274 add fedora package 2023-06-27 22:22:42 +00:00
ed
50905439e4 update pkgs to 1.8.0 2023-06-26 00:46:55 +00:00
ed
a0c1239246 v1.8.0 2023-06-26 00:05:12 +00:00
ed
b8e851c332 cloudflare update + cosmetics:
* toastb padding fixes scrollbar on norwegian 403 in firefox
* fix text aspect ratio in seekbaron compact toggle
* crashpage had link overlaps on homepage
2023-06-25 23:09:29 +00:00
ed
baaf2eb24d include mdns names in tls cert 2023-06-25 22:06:35 +00:00
ed
e197895c10 support hashed passwords; closes #39 2023-06-25 21:50:33 +00:00
ed
cb75efa05d md-editor: index file and trigger upload hooks 2023-06-20 18:11:35 +00:00
ed
8b0cf2c982 volflags to limit volume size / num files; closes #40 2023-06-19 00:42:45 +00:00
ed
fc7d9e1f9c update pkgs to 1.7.6 2023-06-11 09:13:58 +00:00
ed
10caafa34c v1.7.6 2023-06-11 08:14:45 +00:00
ed
22cc22225a v1.7.5 2023-06-11 01:32:56 +00:00
ed
22dff4b0e5 update pkgs to 1.7.4 2023-06-11 01:26:25 +00:00
ed
a00ff2b086 v1.7.4 2023-06-11 00:07:38 +00:00
ed
e4acddc23b v1.7.3 2023-06-11 00:03:03 +00:00
ed
2b2d8e4e02 tls / gencert fixes 2023-06-10 23:34:34 +00:00
ed
5501d49032 prefer urandom for fk-salt unless cert.pem exists 2023-06-10 22:47:39 +00:00
ed
fa54b2eec4 generate tls certs 2023-06-10 22:46:24 +00:00
ed
cb0160021f upgrade pyinstaller env/deps 2023-06-10 11:58:58 +00:00
ed
93a723d588 add --ansi to systemd, fix grid controls bg,
mention folder thumbs dependency on -e2d,
improve make-sfx warnings,
update changelog
2023-06-06 22:04:39 +00:00
ed
8ebe1fb5e8 mention cfssl.sh in the default-certificate warning,
and improve documentation inside cfssl.sh
2023-06-06 21:41:19 +00:00
clach04
2acdf685b1 Fix issue #33 - no color output expected when redirecting stdout 2023-06-05 01:58:49 +02:00
ed
9f122ccd16 make-sfx: option to auto-obtain webdeps 2023-06-04 23:46:38 +00:00
ed
03be26fafc improve check for type-hint support 2023-06-04 22:59:25 +00:00
ed
df5d309d6e document the make-sfx.sh fast option 2023-06-04 14:13:35 +00:00
ed
c355f9bd91 catch common environment issues (#32):
* error-message which explains how to run on py2 / older py3
   when trying to run from source
* check compatibility between jinja2 and cpython on startup
* verify that webdeps are present on startup
* verify that webdeps are present when building sfx
* make-sfx.sh grabs the strip-hints dependency
2023-06-04 13:13:36 +00:00
ed
9c28ba417e option to regex-exclude files in browser listings 2023-06-02 21:54:25 +00:00
ed
705b58c741 support the NO_COLOR environment variable
https://no-color.org/ and more importantly
https://youtu.be/biW5UVGkPMA?t=150
2023-06-02 20:22:57 +00:00
ed
510302d667 support ftps-only; closes #30 2023-06-02 19:02:50 +00:00
ed
025a537413 add option to show thumbs by default; closes #31 2023-06-02 18:41:21 +00:00
ed
60a1ff0fc0 macos: mute select() noise on wake from suspend 2023-05-19 16:37:52 +02:00
ed
f94a0b1bff update pkgs to 1.7.2 2023-05-13 00:49:46 +00:00
ed
4ccfeeb2cd v1.7.2 2023-05-13 00:00:07 +00:00
ed
2646f6a4f2 oh nice, looks like 3.18 fixed whatever broke in 3.17 2023-05-12 23:38:10 +00:00
ed
b286ab539e readme: add more examples 2023-05-12 22:41:06 +00:00
ed
2cca6e0922 warn when sharing certain system locations 2023-05-12 21:38:16 +00:00
ed
db51f1b063 cfg: allow trailing colon on category headers 2023-05-12 21:01:34 +00:00
ed
d979c47f50 optimize clearTimeout + always shrink upload panes after completion + fix GET alignment 2023-05-12 20:46:45 +00:00
ed
e64b87b99b dont hardlink symlinks (they could be relative) 2023-05-12 20:41:09 +00:00
ed
b985011a00 upgrade docker to alpine 3.18:
* enables chiptune player
* smaller containers (generate pycache at runtime)
2023-05-11 06:56:21 +00:00
ed
c2ed2314c8 pkg/arch: add setuptools 2023-05-08 22:24:46 +00:00
ed
cd496658c3 update pkgs to 1.7.1 2023-05-07 19:51:59 +00:00
ed
deca082623 v1.7.1 2023-05-07 18:34:39 +00:00
ed
0ea8bb7c83 forgot the u2c symlink + sfx listing 2023-05-07 15:45:20 +00:00
ed
1fb251a4c2 was moved to pyproject 2023-05-07 15:41:00 +00:00
ed
4295923b76 rename up2k.py (client) to u2c.py 2023-05-07 15:37:52 +00:00
ed
572aa4b26c rename up2k.py (client) to u2c.py 2023-05-07 15:35:56 +00:00
ed
b1359f039f linter cleanup 2023-05-07 14:38:30 +00:00
ed
867d8ee49e replace setup.py with pyproject.toml + misc cleanup 2023-05-07 14:37:57 +00:00
ed
04c86e8a89 webdav: support write-only folders + force auth option 2023-05-06 20:33:29 +00:00
ed
bc0cb43ef9 include usernames in request logs 2023-05-06 20:17:56 +00:00
ed
769454fdce ftpd: only log invalid passwords 2023-05-06 19:16:52 +00:00
ed
4ee81af8f6 support ';' in passwords 2023-05-06 18:54:55 +00:00
ed
8b0e66122f smoother playback cursor on short songs + optimize 2023-05-06 16:31:04 +00:00
ed
8a98efb929 adapt to new archpkg layout 2023-05-05 20:51:18 +00:00
ed
b6fd555038 panic if two accounts have the same password 2023-05-05 20:24:24 +00:00
ed
7eb413ad51 doc tweaks 2023-05-05 19:39:10 +00:00
ixces
4421d509eb update PKGBUILD 2023-05-02 17:21:12 +02:00
ed
793ffd7b01 update pkgs to 1.7.0 2023-04-29 22:50:36 +00:00
ed
1e22222c60 v1.7.0 2023-04-29 21:14:38 +00:00
ed
544e0549bc make xvol and xdev apply at runtime (closes #24):
* when accessing files inside an xdev volume, verify that the file
   exists on the same device/filesystem as the volume root

* when accessing files inside an xvol volume, verify that the file
   exists within any volume where the user has read access
2023-04-29 21:10:02 +00:00
ed
83178d0836 preserve empty folders (closes #23):
* when deleting files, do not cascade upwards through empty folders
* when moving folders, also move any empty folders inside

the only remaining action which autoremoves empty folders is
files getting deleted as they expire volume lifetimes

also prevents accidentally moving parent folders into subfolders
(even though that actually worked surprisingly well)
2023-04-29 11:30:43 +00:00
ed
c44f5f5701 nit 2023-04-29 09:44:46 +00:00
ed
138f5bc989 warn about android powersave settings on music interruption + fix eq on folder change 2023-04-29 09:31:53 +00:00
ed
e4759f86ef ftpd correctness:
* winscp mkdir failed because the folder-not-found error got repeated
* rmdir fails after all files in the folder have poofed; that's OK
* add --ftp4 as a precaution
2023-04-28 20:50:45 +00:00
ed
d71416437a show file selection summary 2023-04-27 19:33:52 +00:00
ed
a84c583b2c ok that wasn't enough 2023-04-27 19:06:35 +00:00
ed
cdacdccdb8 update pkgs to 1.6.15 2023-04-27 00:36:56 +00:00
ed
d3ccd3f174 v1.6.15 2023-04-26 23:00:55 +00:00
ed
cb6de0387d a bit faster 2023-04-26 19:56:27 +00:00
ed
abff40519d eyecandy: restore playback indicator on folder hop 2023-04-26 19:09:16 +00:00
ed
55c74ad164 30% faster folder listings (wtf...) 2023-04-26 18:55:53 +00:00
ed
673b4f7e23 option to show symlink's lastmod instead of deref;
mainly motivated by u2cli's folder syncing in turbo mode
which would un-turbo on most dupes due to wrong lastmod

disabled by default for regular http listings
(to avoid confusion in most regular usecases),
enable per-request with urlparam lt

enabled by default for single-level webdav listings
(because rclone hits the same issue as u2cli),
can be disabled with arg --dav-rt or volflag davrt

impossible to enable for recursive webdav listings
2023-04-26 18:54:21 +00:00
ed
d11e02da49 u2cli: avoid dns lookups while uploading 2023-04-26 18:46:42 +00:00
ed
8790f89e08 fix installing from source tarball 2023-04-26 18:40:47 +00:00
ed
33442026b8 try to discourage android from stopping playback...
...when continuing into the next folder

accidentally introduces a neat bonus feature where the music
no longer stops while you go looking for stuff to play next
2023-04-26 18:33:30 +00:00
ed
03193de6d0 socket read/write timeout 2023-04-24 20:04:22 +00:00
ed
8675ff40f3 update pkgs to 1.6.14 2023-04-24 07:52:12 +00:00
ed
d88889d3fc v1.6.14 2023-04-24 06:09:44 +00:00
ed
6f244d4335 update pkgs to 1.6.13 2023-04-24 00:46:47 +00:00
ed
cacca663b3 v1.6.13 2023-04-23 23:05:31 +00:00
ed
d5109be559 ftp: track login state isolated from pyftpdlib;
for convenience, the password can be provided as the username
but that confuses pyftpd a little so let's do this
2023-04-23 21:06:19 +00:00
ed
d999f06bb9 volflags can be -unset 2023-04-23 21:05:29 +00:00
ed
a1a8a8c7b5 configurable tls-certificate location 2023-04-23 20:56:55 +00:00
ed
fdd6f3b4a6 tar/zip: use volume name as toplevel fallback 2023-04-23 20:55:34 +00:00
ed
f5191973df docs cleanup:
* mostly deprecate --http-only and --https-only since there is zero
   performance gain in recent python versions, however could still be
   useful for avoiding limitations in alternative python interpreters
   (and forcing http/https with mdns/ssdp/qr)

* mention antivirus being useless as usual
2023-04-23 20:25:44 +00:00
ed
ddbaebe779 update pkgs to 1.6.12 2023-04-20 22:47:37 +00:00
ed
42099baeff v1.6.12 2023-04-20 21:41:47 +00:00
ed
2459965ca8 u2cli: dont enter delete stage if something failed 2023-04-20 20:40:09 +00:00
ed
6acf436573 u2idx pool instead of per-socket;
prevents running out of FDs thanks to thousands of sqlite3 sessions
and neatly sidesteps what could possibly be a race in python's
sqlite3 bindings where it sometimes forgets to close the fd
2023-04-20 20:36:13 +00:00
ed
f217e1ce71 correctly ignore multirange requests 2023-04-20 19:14:38 +00:00
ed
418000aee3 explain tus incompatibility + update docs 2023-04-19 21:46:33 +00:00
ed
dbbba9625b nix: make deps optional + update docs 2023-04-17 13:17:53 +02:00
Chinpo Nya
397bc92fbc rewrite the nix module config with nix options 2023-04-17 00:26:57 +02:00
Chinpo Nya
6e615dcd03 fix: remove ffmpeg from python env build inputs 2023-04-17 00:26:57 +02:00
Chinpo Nya
9ac5908b33 refactor: remove unnecessary use of 'rec' 2023-04-17 00:26:57 +02:00
Chinpo Nya
50912480b9 automate nix package updates 2023-04-17 00:26:57 +02:00
Chinpo Nya
24b9b8319d nix/nixos documentation 2023-04-17 00:26:57 +02:00
Chinpo Nya
b0f4f0b653 nixos module 2023-04-17 00:26:57 +02:00
Chinpo Nya
05bbd41c4b nix package 2023-04-17 00:26:57 +02:00
ed
8f5f8a3cda expand userhomes everywhere:
* -c
* -lo
* --hist
* hist volflag
* --ssl-log
2023-04-14 18:55:19 +02:00
ed
c8938fc033 fix ipv4 location header on dualstack 2023-04-14 14:06:44 +02:00
ed
1550350e05 update docs (performance tips, windows example) 2023-04-13 21:36:55 +00:00
ed
5cc190c026 better 2023-04-12 22:09:46 +00:00
ed
d6a0a738ce add windows example + update docs + some cosmetics 2023-04-12 22:06:44 +00:00
ed
f5fe3678ee more safari-on-touchbar-macbook workarounds:
* safari invokes pause on the mediasession
   whenever any Audio loads a new src (preload)

* ...and on some(?) seeks
2023-04-07 23:04:01 +02:00
ed
f2a7925387 avoid safari bugs on touchbar macbooks:
* songs would play backwards
* playback started immediately on folder change
2023-04-07 12:38:37 +02:00
ed
fa953ced52 update archpkg to 1.6.11 2023-04-01 22:59:20 +00:00
ed
f0000d9861 v1.6.11 2023-04-01 21:12:54 +00:00
ed
4e67516719 last.fm web-scrobbler support 2023-04-01 21:02:03 +00:00
ed
29db7a6270 deps: automate prismjs build 2023-04-01 17:46:42 +00:00
ed
852499e296 dont panic in case of extension-injected css 2023-04-01 16:08:45 +00:00
ed
f1775fd51c update deps 2023-04-01 15:15:53 +00:00
ed
4bb306932a update systemd notes 2023-04-01 10:32:12 +00:00
ed
2a37e81bd8 add rclone optimization, closes #21 2023-04-01 10:21:21 +00:00
ed
6a312ca856 something dumb 2023-04-01 00:16:30 +00:00
ed
e7f3e475a2 more accurate bpm detector 2023-03-31 21:20:37 +00:00
ed
854ba0ec06 add audio filter plugin thing 2023-03-31 20:20:28 +00:00
ed
209b49d771 remind sqlite we have indexes 2023-03-30 21:45:58 +00:00
ed
949baae539 integrate markdown thumbs with image gallery 2023-03-30 21:21:21 +00:00
ed
5f4ea27586 new hook: exif stripper 2023-03-26 22:19:15 +00:00
ed
099cc97247 hooks: more correct usage examples 2023-03-26 22:18:48 +00:00
ed
592b7d6315 gdi js 2023-03-26 02:06:49 +00:00
ed
0880bf55a1 markdown thumbnails 2023-03-26 01:53:41 +00:00
ed
4cbffec0ec u2cli: show more errors + drop --ws (does nothing) 2023-03-23 23:47:41 +00:00
ed
cc355417d4 update docs 2023-03-23 23:37:45 +00:00
ed
e2bc573e61 webdav correctness:
* generally respond without body
   (rclone likes this)
* don't connection:close on most mkcol errors
2023-03-23 23:25:00 +00:00
ed
41c0376177 update archpkg to 1.6.10 2023-03-20 23:37:20 +00:00
ed
c01cad091e v1.6.10 2023-03-20 21:56:31 +00:00
ed
eb349f339c update foldersync / rclone docs 2023-03-20 21:54:08 +00:00
ed
24d8caaf3e switch rclone to owncloud mode so it sends lastmod 2023-03-20 21:45:52 +00:00
ed
5ac2c20959 basic support for rclone sync 2023-03-20 21:17:53 +00:00
ed
bb72e6bf30 support propfind of files (not just dirs) 2023-03-20 20:58:51 +00:00
ed
d8142e866a accept last-modified from owncloud webdav extension 2023-03-20 20:28:26 +00:00
ed
7b7979fd61 add sftpgo to comparison + update docs 2023-03-19 21:45:35 +00:00
ed
749616d09d help iOS understand short audio files 2023-03-19 20:03:35 +00:00
ed
5485c6d7ca prisonparty: FFmpeg runs faster with /dev/urandom 2023-03-19 18:32:35 +00:00
ed
b7aea38d77 add iOS uploader (mk.ii) 2023-03-18 18:38:37 +00:00
ed
0ecd9f99e6 update archpkg to 1.6.9 2023-03-16 22:34:09 +00:00
ed
ca04a00662 v1.6.9 2023-03-16 21:06:18 +00:00
ed
8a09601be8 url-param ?v disables index.html 2023-03-16 20:52:43 +00:00
ed
1fe0d4693e fix logues bleeding into navpane 2023-03-16 20:23:01 +00:00
ed
bba8a3c6bc fix truncated search results 2023-03-16 20:12:13 +00:00
ed
e3d7f0c7d5 add tooltip delay to android too 2023-03-16 19:48:44 +00:00
ed
be7bb71bbc add option to show index.html instead of listing 2023-03-16 19:41:33 +00:00
ed
e0c4829ec6 verify covers against db instead of fs 2023-03-15 19:48:43 +00:00
ed
5af1575329 readme: ideas welcome w 2023-03-14 22:24:43 +00:00
ed
884f966b86 update archpkg to 1.6.8 2023-03-12 18:55:02 +00:00
ed
f6c6fbc223 fix exe builder 2023-03-12 18:54:16 +00:00
ed
b0cc396bca v1.6.8 2023-03-12 16:10:07 +00:00
ed
ae463518f6 u2cli: send upload stats to server + fix py2.6 support 2023-03-11 21:39:56 +00:00
ed
2be2e9a0d8 index folder thumbs in db 2023-03-11 11:43:29 +00:00
ed
e405fddf74 specify that only up2k clients will resume uploads 2023-03-09 22:47:37 +00:00
ed
c269b0dd91 show an error (instead of crashing) if a pic is 404 2023-03-09 22:37:12 +00:00
ed
8c3211263a keep scanning folders for more music to play 2023-03-09 22:26:41 +00:00
ed
bf04e7c089 update some docs 2023-03-09 22:11:39 +00:00
ed
c7c6e48b1a didn't compress numbered logfiles 2023-03-09 21:59:59 +00:00
ed
974ca773be just to be extra sure 2023-03-09 21:49:29 +00:00
ed
9270c2df19 evict basic-browser from crawlers 2023-03-09 21:35:07 +00:00
ed
b39ff92f34 u2cli: support long paths on win7 2023-03-08 22:27:13 +00:00
ed
7454167f78 add DCO PR template 2023-03-08 08:27:17 +01:00
ed
5ceb3a962f build up2k.exe 2023-03-07 22:58:14 +00:00
ed
52bd5642da update archpkg to 1.6.7 2023-03-05 20:20:15 +00:00
ed
c39c93725f v1.6.7 2023-03-05 20:18:16 +00:00
ed
d00f0b9fa7 ftp: support filezilla mkdir 2023-03-05 20:18:02 +00:00
ed
01cfc70982 add example for webdav automount 2023-03-05 19:52:45 +00:00
ed
e6aec189bd fix flickering toast on upload finish 2023-03-05 19:49:54 +00:00
ed
c98fff1647 fix chunkpost-handshake race (affects --no-dedup only);
a handshake arriving in the middle of the final chunk could cause
dupes to become empty -- worst case leading to loss of data
2023-03-05 19:45:50 +00:00
ed
0009e31bd3 heavy webworker load can park the main thread of a
background chrome tab for 10sec; piggyback some pokes off postmessage
2023-03-02 22:35:32 +00:00
ed
db95e880b2 thats not how it works 2023-02-28 22:19:06 +00:00
ed
e69fea4a59 exe: update screenshots 2023-02-26 22:26:40 +00:00
ed
4360800a6e update archpkg to 1.6.6 2023-02-26 22:11:56 +00:00
ed
b179e2b031 prisonparty: ignore unresolvable mount paths;
allows startup even if some locations are missing,
for example if a server rebooted and some disks aren't up yet
2023-02-26 22:11:15 +00:00
ed
ecdec75b4e v1.6.6 2023-02-26 20:30:17 +00:00
ed
5cb2e33353 update readmes + fix typo 2023-02-26 19:22:54 +00:00
ed
43ff2e531a add deadline for filling data into a reserved filename 2023-02-26 19:13:35 +00:00
ed
1c2c9db8f0 retain upload time (but not ip) on file reindex 2023-02-26 19:09:24 +00:00
ed
7ea183baef let http thread handle upload verification plugins 2023-02-26 19:07:49 +00:00
ed
ab87fac6d8 db got the wrong lastmod when linking dupes 2023-02-26 18:52:04 +00:00
ed
1e3b7eee3b dont rmdir volume top on cleanup 2023-02-26 18:28:37 +00:00
ed
4de028fc3b let controlpanel rescan button override lack of e2dsa 2023-02-26 18:27:10 +00:00
ed
604e5dfaaf improve error handling / messages 2023-02-26 18:26:13 +00:00
ed
05e0c2ec9e add xiu (batching hook; runs on idle after uploads) +
bunch of tweaks/fixes for hooks
2023-02-26 18:23:32 +00:00
ed
76bd005bdc cgen fixes 2023-02-21 19:42:08 +00:00
ed
5effaed352 add reminder that SSDP launches IE by default 2023-02-21 19:38:35 +00:00
ed
cedaf4809f add exe integrity selfcheck 2023-02-21 19:18:10 +00:00
ed
6deaf5c268 add jitter simlation 2023-02-20 21:34:30 +00:00
ed
9dc6a26472 webdav.bat and readme tweaks 2023-02-20 21:00:04 +00:00
ed
14ad5916fc freebsd: fancy console listing for fetch 2023-02-19 22:14:21 +00:00
ed
1a46738649 raise edgecases (broken envs on windows) 2023-02-19 22:13:33 +00:00
ed
9e5e3b099a add optional deps to quickstart section 2023-02-19 22:13:02 +00:00
ed
292ce75cc2 return to previous url after login 2023-02-19 19:58:15 +00:00
ed
ce7df7afd4 update platform support listing 2023-02-19 15:16:50 +00:00
ed
e28e793f81 whoops 2023-02-19 15:11:04 +00:00
ed
3e561976db optimize docker build times (884 to 379 sec) 2023-02-19 14:19:35 +00:00
ed
273a4eb7d0 list supported platforms 2023-02-19 01:00:37 +00:00
ed
6175f85bb6 more docker images for arm, arm64, s390x 2023-02-19 00:50:07 +00:00
ed
a80579f63a build docker for x32 aarch64 armhf ppc64 s390x 2023-02-18 23:04:55 +00:00
ed
96d6bcf26e if non-TLS, show warning in the login form 2023-02-17 22:49:03 +00:00
ed
49e8df25ac ie11: support back button 2023-02-17 22:21:13 +00:00
ed
6a05850f21 also undupe search hits from overlapping volumes 2023-02-17 20:48:57 +00:00
ed
5e7c3defe3 update pypi description + docker links 2023-02-16 19:56:57 +00:00
ed
6c0987d4d0 mention --daw 2023-02-15 17:51:20 +00:00
ed
6eba9feffe condense uploads listing on view change 2023-02-14 21:58:15 +00:00
ed
8adfcf5950 win10-based copyparty64.exe 2023-02-14 21:50:14 +00:00
ed
36d6fa512a mention upcoming libopenmpt availability 2023-02-13 06:57:47 +00:00
ed
79b6e9b393 update archpkg to 1.6.5 2023-02-12 15:38:03 +00:00
ed
dc2e2cbd4b v1.6.5 2023-02-12 14:11:45 +00:00
ed
5c12dac30f most ffmpeg builds dont support compressed modules 2023-02-12 14:02:43 +00:00
ed
641929191e fix reading smb shares on windows 2023-02-12 13:59:34 +00:00
ed
617321631a docker: add annotations 2023-02-11 21:10:28 +00:00
ed
ddc0c899f8 update archpkg to 1.6.4 2023-02-11 21:01:45 +00:00
ed
cdec42c1ae v1.6.4 2023-02-11 18:02:05 +00:00
ed
c48f469e39 park all clients waiting for a transcode 2023-02-11 17:23:29 +00:00
ed
44909cc7b8 print ffmpeg download url on windows 2023-02-11 17:22:24 +00:00
ed
8f61e1568c transcode chiptunes to opus;
* new audio/MPT formats: apac bonk dfpwm ilbc it itgz itr itz mo3 mod mptm mt2 okt s3gz s3m s3r s3z xm xmgz xmr xmz xpk
* new image/PIL formats: blp dcx emf eps fits flc fli fpx im j2k j2p psd spi wmf
2023-02-11 11:17:37 +00:00
ed
b7be7a0fd8 mirror docker images to ghcr 2023-02-10 23:40:30 +00:00
ed
1526a4e084 add docker packaging 2023-02-10 23:02:01 +00:00
ed
dbdb9574b1 doc-browser: fix md scaling + download hotkey 2023-02-10 21:33:48 +00:00
ed
853ae6386c config load summary + safer windows defaults 2023-02-10 21:32:42 +00:00
ed
a4b56c74c7 support long filepaths on win7 + misc windows fixes 2023-02-10 18:37:37 +00:00
ed
d7f1951e44 fix --cgen for 'g' perms 2023-02-08 22:38:21 +00:00
ed
7e2ff9825e ensure -e2tsr takes effect by ignoring dhash 2023-02-08 22:33:02 +00:00
ed
9b423396ec better description for anonymous permissions 2023-02-07 20:12:45 +00:00
ed
781146b2fb describe all database volflags in --help-flags 2023-02-07 20:07:06 +00:00
ed
84937d1ce0 add v2 config syntax (#20) 2023-02-07 19:54:08 +00:00
ed
98cce66aa4 cgen: update set of multivalue keys 2023-02-06 07:26:23 +00:00
ed
043c2d4858 cgen: fix permissions listing 2023-02-06 07:23:35 +00:00
ed
99cc434779 add config explainer + generator (#20) 2023-02-05 22:09:17 +00:00
ed
5095d17e81 more interesting config example 2023-02-05 21:32:20 +00:00
ed
87d835ae37 dont allow multiple volumes at the same fs-path 2023-02-05 21:16:36 +00:00
ed
6939ca768b pkg/arch: add prisonparty 2023-02-05 00:07:04 +00:00
ed
e3957e8239 systemd: prisonparty improvements 2023-02-05 00:03:40 +00:00
ed
4ad6e45216 only load *.conf files when including a folder 2023-02-05 00:01:10 +00:00
ed
76e5eeea3f prisonparty: fix reload signal 2023-02-05 00:00:18 +00:00
ed
eb17f57761 pypi fixes 2023-02-04 17:35:20 +00:00
ed
b0db14d8b0 indicate forced-randomized filenames 2023-02-04 15:18:09 +00:00
ed
2b644fa81b don't alias randomized filenames 2023-02-04 13:41:43 +00:00
ed
190ccee820 add optional version number on controlpanel 2023-02-04 13:41:34 +00:00
JeremyStarTM
4e7dd32e78 Added "wow this is better than nextcloud" (#19)
* Added "wow this is better than nextcloud"
2023-02-04 13:00:16 +00:00
john smith
5817fb66ae goddamn tabs 2023-02-03 12:50:17 +01:00
john smith
9cb04eef93 misc PKGBUILD fixes 2023-02-03 12:50:17 +01:00
john smith
0019fe7f04 indent PKGBUILD with spaces instead of tabs 2023-02-03 12:50:17 +01:00
john smith
852c6f2de1 remove unnecessary dependencies from PKGBUILD 2023-02-03 12:50:17 +01:00
john smith
c4191de2e7 improve PKGBUILD based on stuff in https://github.com/9001/copyparty/issues/17 2023-02-03 12:50:17 +01:00
ed
4de61defc9 add a link exporter to the unpost ui too 2023-02-02 22:57:59 +00:00
ed
0aa88590d0 should generalize this somehow 2023-02-02 22:35:13 +00:00
ed
405f3ee5fe adjustable toast position 2023-02-02 22:28:31 +00:00
ed
bc339f774a button to show/copy links for all recent uploads 2023-02-02 22:27:53 +00:00
ed
e67b695b23 show filekeys in recent-uploads ui 2023-02-02 21:22:51 +00:00
ed
4a7633ab99 fix outdated docs mentioned in #17 sry 2023-02-02 20:12:32 +00:00
john smith
c58f2ef61f fix PKGBUILD more 2023-02-02 20:48:20 +01:00
john smith
3866e6a3f2 fix PKGBUILD indentation 2023-02-02 20:30:48 +01:00
john smith
381686fc66 add PKGBUILD 2023-02-02 20:30:48 +01:00
ed
a918c285bf up2k-ui: button to randomize upload filenames 2023-02-01 22:26:18 +00:00
ed
1e20eafbe0 volflag to randomize all upload filenames 2023-02-01 21:58:01 +00:00
ed
39399934ee v1.6.3 2023-01-31 21:03:43 +00:00
ed
b47635150a shove #files aside while prologue sandbox is loading 2023-01-31 21:02:58 +00:00
ed
78d2f69ed5 prisonparty: support opus transcoding on debian
libblas.so and liblapack.so are symlinks into /etc/alternatives
2023-01-31 20:50:59 +00:00
ed
7a98dc669e block alerts in sandbox by default + add translation 2023-01-31 19:16:28 +00:00
ed
2f15bb5085 include filesize in notification 2023-01-31 19:03:13 +00:00
ed
712a578e6c indicate when a readme/logue was hidden 2023-01-31 19:01:24 +00:00
ed
d8dfc4ccb2 support davfs2 LOCK (uploads) + misc windows support + logue filtering 2023-01-31 18:53:38 +00:00
ed
e413007eb0 hide dotfiles from search results by default 2023-01-31 18:13:33 +00:00
ed
6d1d3e48d8 sandbox height didnt account for scrollbars 2023-01-31 17:54:04 +00:00
ed
04966164ce more iframe-resize-concealing tricks 2023-01-31 17:43:21 +00:00
ed
8b62aa7cc7 unlink files before replacing them
to avoid hardlink-related surprises
2023-01-31 17:17:18 +00:00
ed
1088e8c6a5 optimize 2023-01-30 22:53:27 +00:00
ed
8c54c2226f cover up most of the layout jank 2023-01-30 22:52:16 +00:00
ed
f74ac1f18b fix sandbox lag by helping the iframe cache js 2023-01-30 22:36:05 +00:00
ed
25931e62fd and nofollow the basic-browser link too 2023-01-29 22:15:22 +00:00
ed
707a940399 add nofollow to zip links 2023-01-29 22:10:03 +00:00
ed
87ef50d384 doc 2023-01-29 21:23:48 +00:00
ed
dcadf2b11c v1.6.2 2023-01-29 18:42:21 +00:00
ed
37a690a4c3 fix cookie + rproxy oversights 2023-01-29 18:34:48 +00:00
ed
87ad23fb93 docs + chmod 2023-01-29 18:28:53 +00:00
ed
5f54d534e3 hook/notify: add android support 2023-01-29 15:14:22 +00:00
ed
aecae552a4 v1.6.1 2023-01-29 04:41:16 +00:00
ed
eaa6b3d0be mute some startup noise 2023-01-29 04:33:28 +00:00
ed
c2ace91e52 v1.6.0 2023-01-29 02:55:44 +00:00
ed
0bac87c36f make loss of hotkeys more obvious 2023-01-29 01:40:02 +00:00
ed
e650d05939 shovel across most of the env too 2023-01-29 01:19:53 +00:00
ed
85a96e4446 add custom text selection colors because chrome is broken on fedora 2023-01-29 01:03:10 +00:00
ed
2569005139 support sandboxed markdown plugins 2023-01-29 00:57:08 +00:00
ed
c50cb66aef sandboxed other-origin iframes dont cache css 2023-01-28 23:40:25 +00:00
ed
d4c5fca15b sandbox readme.md / prologue / epilogue 2023-01-28 21:24:40 +00:00
ed
75cea4f684 misc 2023-01-28 13:35:49 +00:00
ed
68c6794d33 rewrite other symlinks after the actual move;
fixes volumes where symlinking is disabled
2023-01-28 01:14:29 +00:00
ed
82f98dd54d delete/move is now POST 2023-01-28 01:02:50 +00:00
ed
741d781c18 add cors controls + improve preflight + pw header 2023-01-28 00:59:04 +00:00
ed
0be1e43451 mention mtp in the hooks readme 2023-01-28 00:07:50 +00:00
ed
5366bf22bb describe detected network changes 2023-01-27 23:56:54 +00:00
ed
bcd91b1809 add eventhook examples 2023-01-27 23:55:57 +00:00
ed
9bd5738e6f shorter fallback hostname 2023-01-27 22:19:25 +00:00
ed
bab4aa4c0a mkdir fix 2023-01-27 22:16:10 +00:00
ed
e965b9b9e2 mkdir missing volumes on startup 2023-01-27 21:52:28 +00:00
ed
31101427d3 support downloading blockdev contents 2023-01-27 21:09:57 +00:00
ed
a083dc36ba dont get confused by dangling symlinks at target 2023-01-27 20:27:00 +00:00
ed
9b7b9262aa promote dedup control to volflags 2023-01-25 21:46:15 +00:00
ed
660011fa6e md-editor: make hotkey ^e more global 2023-01-25 20:58:28 +00:00
ed
ead31b6823 add eventhook sanchecks 2023-01-25 20:51:02 +00:00
ed
4310580cd4 separate http/https logins (breaks ie4 / win3.11 login) 2023-01-24 21:23:57 +00:00
ed
b005acbfda enable text selection between breadcrumbs + update vs 2023-01-23 22:44:29 +00:00
ed
460709e6f3 upgrade wget downloader to use event hooks 2023-01-22 23:45:11 +00:00
ed
a8768d05a9 add comparison to similar software 2023-01-22 23:39:19 +00:00
ed
f8e3e87a52 add event hooks 2023-01-22 23:35:31 +00:00
ed
70f1642d0d allow tar/zip download of hidden folders 2023-01-21 20:56:44 +00:00
ed
3fc7561da4 macos 2023-01-21 10:36:31 +00:00
ed
9065226c3d oh great its in lts too 2023-01-21 10:19:04 +00:00
ed
b7e321fa47 cleanup 2023-01-19 22:26:49 +00:00
ed
664665b86b fix some location-rproxy bugs 2023-01-19 22:26:24 +00:00
ed
f4f362b7a4 add --freebind 2023-01-18 21:55:36 +00:00
ed
577d23f460 zeroconf: detect network change and reannounce 2023-01-18 21:27:27 +00:00
ed
504e168486 compensate avg.speed for single-chunk uploads 2023-01-18 19:53:19 +00:00
ed
f2f9640371 workaround firefox layout bug:
three-line toasts get a scrollbar even if it doesn't need one
and the width is not adjusted correctly when that happens
2023-01-18 19:45:04 +00:00
ed
ee46f832b1 u2cli: add option -ns for slow terminals 2023-01-17 23:29:51 +00:00
ed
b0e755d410 give curl colored (yet sortable) plaintext listings 2023-01-17 23:22:43 +00:00
ed
cfd24604d5 ux tweaks 2023-01-17 23:21:31 +00:00
ed
264894e595 add cursed usecases 2023-01-16 21:46:11 +00:00
ed
5bb9f56247 linux 6.1 fixed the 6.0 bugs; remove workarounds 2023-01-16 20:44:57 +00:00
ed
18942ed066 location-based rproxy fixes 2023-01-16 20:09:45 +00:00
ed
85321a6f31 stale tree is better than no tree 2023-01-15 20:54:03 +00:00
ed
baf641396d add optional powered-by footnode 2023-01-15 20:52:38 +00:00
ed
17c91e7014 override bogus mimetypes 2023-01-14 15:10:32 +00:00
ed
010770684d workaround another linux kernel bug 2023-01-14 08:16:15 +00:00
ed
b4c503657b ignore loss of stdout 2023-01-14 07:35:44 +00:00
ed
71bd306268 fix unpost filters with slashes 2023-01-13 17:56:32 +00:00
ed
dd7fab1352 u2cli: properly retry failed handshakes 2023-01-13 07:17:41 +00:00
ed
dacca18863 v1.5.6 2023-01-12 05:15:30 +00:00
ed
53d92cc0a6 faster upload of small files on high-latency nets 2023-01-12 02:53:22 +00:00
ed
434823f6f0 ui: allow changing num.threads in search-only 2023-01-11 16:14:02 +00:00
ed
2cb1f50370 fix dualstack on lo 2023-01-11 16:10:07 +00:00
ed
03f53f6392 gallery: fix js error on digit-keypress viewing pics 2023-01-11 16:08:15 +00:00
ed
a70ecd7af0 v1.5.5 2022-12-30 07:54:34 +00:00
ed
8b81e58205 mdns fixes 2022-12-30 07:47:53 +00:00
ed
4500c04edf v1.5.4 2022-12-29 04:44:15 +00:00
ed
6222ddd720 fix ssdp on dualstack 2022-12-22 16:50:46 +00:00
ed
8a7135cf41 support fat32 time precision, avoiding rescans
posted from warzaw airport otw to japan
2022-12-20 22:19:32 +01:00
ed
b4c7282956 password from file 2022-12-20 13:28:48 +00:00
ed
8491a40a04 Create SECURITY.md 2022-12-19 21:18:27 +00:00
ed
343d38b693 extend image-viewer with modern formats 2022-12-15 22:38:33 +00:00
ed
6cf53d7364 try next thumbnailer if one fails;
libvips assumes imagemagick was built with avif
2022-12-15 22:34:51 +00:00
ed
b070d44de7 libvips logging + raise codec errors 2022-12-15 22:22:04 +00:00
ed
79aa40fdea cosmetic fixes 2022-12-14 23:12:51 +00:00
ed
dcaff2785f v1.5.3 2022-12-13 19:56:34 +00:00
ed
497f5b4307 add hotkey to enable download mode 2022-12-13 19:50:20 +00:00
ed
be32ad0da6 add sfx tester 2022-12-13 19:05:10 +00:00
ed
8ee2bf810b stop battleplan from indirectly crashing the browser 2022-12-13 18:58:16 +00:00
ed
28232656a9 folder-sync optimizations 2022-12-13 18:56:40 +00:00
180 changed files with 14702 additions and 2408 deletions

2
.github/pull_request_template.md vendored Normal file
View File

@@ -0,0 +1,2 @@
To show that your contribution is compatible with the MIT License, please include the following text somewhere in this PR description:
This PR complies with the DCO; https://developercertificate.org/

14
.gitignore vendored
View File

@@ -21,11 +21,23 @@ copyparty.egg-info/
# winmerge
*.bak
# apple pls
.DS_Store
# derived
copyparty/res/COPYING.txt
copyparty/web/deps/
srv/
scripts/docker/i/
contrib/package/arch/pkg/
contrib/package/arch/src/
# state/logs
up.*.txt
.hist/
.hist/
scripts/docker/*.out
scripts/docker/*.err
/perf.*
# nix build output link
result

1
.vscode/launch.json vendored
View File

@@ -8,6 +8,7 @@
"module": "copyparty",
"console": "integratedTerminal",
"cwd": "${workspaceFolder}",
"justMyCode": false,
"args": [
//"-nw",
"-ed",

10
.vscode/launch.py vendored
View File

@@ -30,9 +30,17 @@ except:
argv = [os.path.expanduser(x) if x.startswith("~") else x for x in argv]
sfx = ""
if len(sys.argv) > 1 and os.path.isfile(sys.argv[1]):
sfx = sys.argv[1]
sys.argv = [sys.argv[0]] + sys.argv[2:]
argv += sys.argv[1:]
if re.search(" -j ?[0-9]", " ".join(argv)):
if sfx:
argv = [sys.executable, sfx] + argv
sp.check_call(argv)
elif re.search(" -j ?[0-9]", " ".join(argv)):
argv = [sys.executable, "-m", "copyparty"] + argv
sp.check_call(argv)
else:

31
.vscode/settings.json vendored
View File

@@ -35,35 +35,22 @@
"python.linting.flake8Enabled": true,
"python.linting.banditEnabled": true,
"python.linting.mypyEnabled": true,
"python.linting.mypyArgs": [
"--ignore-missing-imports",
"--follow-imports=silent",
"--show-column-numbers",
"--strict"
],
"python.linting.flake8Args": [
"--max-line-length=120",
"--ignore=E722,F405,E203,W503,W293,E402,E501,E128",
"--ignore=E722,F405,E203,W503,W293,E402,E501,E128,E226",
],
"python.linting.banditArgs": [
"--ignore=B104"
],
"python.linting.pylintArgs": [
"--disable=missing-module-docstring",
"--disable=missing-class-docstring",
"--disable=missing-function-docstring",
"--disable=wrong-import-position",
"--disable=raise-missing-from",
"--disable=bare-except",
"--disable=invalid-name",
"--disable=line-too-long",
"--disable=consider-using-f-string"
"--ignore=B104,B110,B112"
],
// python3 -m isort --py=27 --profile=black copyparty/
"python.formatting.provider": "black",
"python.formatting.provider": "none",
"[python]": {
"editor.defaultFormatter": "ms-python.black-formatter"
},
"editor.formatOnSave": true,
"[html]": {
"editor.formatOnSave": false,
"editor.autoIndent": "keep",
},
"[css]": {
"editor.formatOnSave": false,
@@ -71,10 +58,6 @@
"files.associations": {
"*.makefile": "makefile"
},
"python.formatting.blackArgs": [
"-t",
"py27"
],
"python.linting.enabled": true,
"python.pythonPath": "/usr/bin/python3"
}

698
README.md

File diff suppressed because it is too large Load Diff

9
SECURITY.md Normal file
View File

@@ -0,0 +1,9 @@
# Security Policy
if you hit something extra juicy pls let me know on either of the following
* email -- `copyparty@ocv.ze` except `ze` should be `me`
* [mastodon dm](https://layer8.space/@tripflag) -- `@tripflag@layer8.space`
* [github private vulnerability report](https://github.com/9001/copyparty/security/advisories/new), wow that form is complicated
* [twitter dm](https://twitter.com/tripflag) (if im somehow not banned yet)
no bug bounties sorry! all i can offer is greetz in the release notes

View File

@@ -1,4 +1,4 @@
# [`up2k.py`](up2k.py)
# [`u2c.py`](u2c.py)
* command-line up2k client [(webm)](https://ocv.me/stuff/u2cli.webm)
* file uploads, file-search, autoresume of aborted/broken uploads
* sync local folder to server

35
bin/handlers/README.md Normal file
View File

@@ -0,0 +1,35 @@
replace the standard 404 / 403 responses with plugins
# usage
load plugins either globally with `--on404 ~/dev/copyparty/bin/handlers/sorry.py` or for a specific volume with `:c,on404=~/handlers/sorry.py`
# api
each plugin must define a `main()` which takes 3 arguments;
* `cli` is an instance of [copyparty/httpcli.py](https://github.com/9001/copyparty/blob/hovudstraum/copyparty/httpcli.py) (the monstrosity itself)
* `vn` is the VFS which overlaps with the requested URL, and
* `rem` is the URL remainder below the VFS mountpoint
* so `vn.vpath + rem` == `cli.vpath` == original request
# examples
## on404
* [sorry.py](answer.py) replies with a custom message instead of the usual 404
* [nooo.py](nooo.py) replies with an endless noooooooooooooo
* [never404.py](never404.py) 100% guarantee that 404 will never be a thing again as it automatically creates dummy files whenever necessary
* [caching-proxy.py](caching-proxy.py) transforms copyparty into a squid/varnish knockoff
## on403
* [ip-ok.py](ip-ok.py) disables security checks if client-ip is 1.2.3.4
# notes
* on403 only works for trivial stuff (basic http access) since I haven't been able to think of any good usecases for it (was just easy to add while doing on404)

36
bin/handlers/caching-proxy.py Executable file
View File

@@ -0,0 +1,36 @@
# assume each requested file exists on another webserver and
# download + mirror them as they're requested
# (basically pretend we're warnish)
import os
import requests
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from copyparty.httpcli import HttpCli
def main(cli: "HttpCli", vn, rem):
url = "https://mirrors.edge.kernel.org/alpine/" + rem
abspath = os.path.join(vn.realpath, rem)
# sneaky trick to preserve a requests-session between downloads
# so it doesn't have to spend ages reopening https connections;
# luckily we can stash it inside the copyparty client session,
# name just has to be definitely unused so "hacapo_req_s" it is
req_s = getattr(cli.conn, "hacapo_req_s", None) or requests.Session()
setattr(cli.conn, "hacapo_req_s", req_s)
try:
os.makedirs(os.path.dirname(abspath), exist_ok=True)
with req_s.get(url, stream=True, timeout=69) as r:
r.raise_for_status()
with open(abspath, "wb", 64 * 1024) as f:
for buf in r.iter_content(chunk_size=64 * 1024):
f.write(buf)
except:
os.unlink(abspath)
return "false"
return "retry"

6
bin/handlers/ip-ok.py Executable file
View File

@@ -0,0 +1,6 @@
# disable permission checks and allow access if client-ip is 1.2.3.4
def main(cli, vn, rem):
if cli.ip == "1.2.3.4":
return "allow"

11
bin/handlers/never404.py Executable file
View File

@@ -0,0 +1,11 @@
# create a dummy file and let copyparty return it
def main(cli, vn, rem):
print("hello", cli.ip)
abspath = vn.canonical(rem)
with open(abspath, "wb") as f:
f.write(b"404? not on MY watch!")
return "retry"

16
bin/handlers/nooo.py Executable file
View File

@@ -0,0 +1,16 @@
# reply with an endless "noooooooooooooooooooooooo"
def say_no():
yield b"n"
while True:
yield b"o" * 4096
def main(cli, vn, rem):
cli.send_headers(None, 404, "text/plain")
for chunk in say_no():
cli.s.sendall(chunk)
return "false"

7
bin/handlers/sorry.py Executable file
View File

@@ -0,0 +1,7 @@
# sends a custom response instead of the usual 404
def main(cli, vn, rem):
msg = f"sorry {cli.ip} but {cli.vpath} doesn't exist"
return str(cli.reply(msg.encode("utf-8"), 404, "text/plain"))

29
bin/hooks/README.md Normal file
View File

@@ -0,0 +1,29 @@
standalone programs which are executed by copyparty when an event happens (upload, file rename, delete, ...)
these programs either take zero arguments, or a filepath (the affected file), or a json message with filepath + additional info
run copyparty with `--help-hooks` for usage details / hook type explanations (xbu/xau/xiu/xbr/xar/xbd/xad)
> **note:** in addition to event hooks (the stuff described here), copyparty has another api to run your programs/scripts while providing way more information such as audio tags / video codecs / etc and optionally daisychaining data between scripts in a processing pipeline; if that's what you want then see [mtp plugins](../mtag/) instead
# after upload
* [notify.py](notify.py) shows a desktop notification ([example](https://user-images.githubusercontent.com/241032/215335767-9c91ed24-d36e-4b6b-9766-fb95d12d163f.png))
* [notify2.py](notify2.py) uses the json API to show more context
* [image-noexif.py](image-noexif.py) removes image exif by overwriting / directly editing the uploaded file
* [discord-announce.py](discord-announce.py) announces new uploads on discord using webhooks ([example](https://user-images.githubusercontent.com/241032/215304439-1c1cb3c8-ec6f-4c17-9f27-81f969b1811a.png))
* [reject-mimetype.py](reject-mimetype.py) rejects uploads unless the mimetype is acceptable
# upload batches
these are `--xiu` hooks; unlike `xbu` and `xau` (which get executed on every single file), `xiu` hooks are given a list of recent uploads on STDIN after the server has gone idle for N seconds, reducing server load + providing more context
* [xiu.py](xiu.py) is a "minimal" example showing a list of filenames + total filesize
* [xiu-sha.py](xiu-sha.py) produces a sha512 checksum list in the volume root
# before upload
* [reject-extension.py](reject-extension.py) rejects uploads if they match a list of file extensions
# on message
* [wget.py](wget.py) lets you download files by POSTing URLs to copyparty

68
bin/hooks/discord-announce.py Executable file
View File

@@ -0,0 +1,68 @@
#!/usr/bin/env python3
import sys
import json
import requests
from copyparty.util import humansize, quotep
_ = r"""
announces a new upload on discord
example usage as global config:
--xau f,t5,j,bin/hooks/discord-announce.py
example usage as a volflag (per-volume config):
-v srv/inc:inc:r:rw,ed:c,xau=f,t5,j,bin/hooks/discord-announce.py
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
(share filesystem-path srv/inc as volume /inc,
readable by everyone, read-write for user 'ed',
running this plugin on all uploads with the params listed below)
parameters explained,
xbu = execute after upload
f = fork; don't wait for it to finish
t5 = timeout if it's still running after 5 sec
j = provide upload information as json; not just the filename
replace "xau" with "xbu" to announce Before upload starts instead of After completion
# how to discord:
first create the webhook url; https://support.discord.com/hc/en-us/articles/228383668-Intro-to-Webhooks
then use this to design your message: https://discohook.org/
"""
def main():
WEBHOOK = "https://discord.com/api/webhooks/1234/base64"
WEBHOOK = "https://discord.com/api/webhooks/1066830390280597718/M1TDD110hQA-meRLMRhdurych8iyG35LDoI1YhzbrjGP--BXNZodZFczNVwK4Ce7Yme5"
# read info from copyparty
inf = json.loads(sys.argv[1])
vpath = inf["vp"]
filename = vpath.split("/")[-1]
url = f"https://{inf['host']}/{quotep(vpath)}"
# compose the message to discord
j = {
"title": filename,
"url": url,
"description": url.rsplit("/", 1)[0],
"color": 0x449900,
"fields": [
{"name": "Size", "value": humansize(inf["sz"])},
{"name": "User", "value": inf["user"]},
{"name": "IP", "value": inf["ip"]},
],
}
for v in j["fields"]:
v["inline"] = True
r = requests.post(WEBHOOK, json={"embeds": [j]})
print(f"discord: {r}\n", end="")
if __name__ == "__main__":
main()

72
bin/hooks/image-noexif.py Executable file
View File

@@ -0,0 +1,72 @@
#!/usr/bin/env python3
import os
import sys
import subprocess as sp
_ = r"""
remove exif tags from uploaded images; the eventhook edition of
https://github.com/9001/copyparty/blob/hovudstraum/bin/mtag/image-noexif.py
dependencies:
exiftool / perl-Image-ExifTool
being an upload hook, this will take effect after upload completion
but before copyparty has hashed/indexed the file, which means that
copyparty will never index the original file, so deduplication will
not work as expected... which is mostly OK but ehhh
note: modifies the file in-place, so don't set the `f` (fork) flag
example usages; either as global config (all volumes) or as volflag:
--xau bin/hooks/image-noexif.py
-v srv/inc:inc:r:rw,ed:c,xau=bin/hooks/image-noexif.py
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
explained:
share fs-path srv/inc at /inc (readable by all, read-write for user ed)
running this xau (execute-after-upload) plugin for all uploaded files
"""
# filetypes to process; ignores everything else
EXTS = ("jpg", "jpeg", "avif", "heif", "heic")
try:
from copyparty.util import fsenc
except:
def fsenc(p):
return p.encode("utf-8")
def main():
fp = sys.argv[1]
ext = fp.lower().split(".")[-1]
if ext not in EXTS:
return
cwd, fn = os.path.split(fp)
os.chdir(cwd)
f1 = fsenc(fn)
cmd = [
b"exiftool",
b"-exif:all=",
b"-iptc:all=",
b"-xmp:all=",
b"-P",
b"-overwrite_original",
b"--",
f1,
]
sp.check_output(cmd)
print("image-noexif: stripped")
if __name__ == "__main__":
try:
main()
except:
pass

115
bin/hooks/msg-log.py Executable file
View File

@@ -0,0 +1,115 @@
#!/usr/bin/env python
# coding: utf-8
# vim:ts=4:sw=4:softtabstop=4:smarttab:expandtab
from __future__ import print_function, unicode_literals
import json
import os
import sys
import time
from datetime import datetime
"""
use copyparty as a dumb messaging server / guestbook thing;
initially contributed by @clach04 in https://github.com/9001/copyparty/issues/35 (thanks!)
Sample usage:
python copyparty-sfx.py --xm j,bin/hooks/msg-log.py
Where:
xm = execute on message-to-server-log
j = provide message information as json; not just the text - this script REQUIRES json
t10 = timeout and kill download after 10 secs
"""
# output filename
FILENAME = os.environ.get("COPYPARTY_MESSAGE_FILENAME", "") or "README.md"
# set True to write in descending order (newest message at top of file);
# note that this becomes very slow/expensive as the file gets bigger
DESCENDING = True
# the message template; the following parameters are provided by copyparty and can be referenced below:
# 'ap' = absolute filesystem path where the message was posted
# 'vp' = virtual path (URL 'path') where the message was posted
# 'mt' = 'at' = unix-timestamp when the message was posted
# 'datetime' = ISO-8601 time when the message was posted
# 'sz' = message size in bytes
# 'host' = the server hostname which the user was accessing (URL 'host')
# 'user' = username (if logged in), otherwise '*'
# 'txt' = the message text itself
# (uncomment the print(msg_info) to see if additional information has been introduced by copyparty since this was written)
TEMPLATE = """
🕒 %(datetime)s, 👤 %(user)s @ %(ip)s
%(txt)s
"""
def write_ascending(filepath, msg_text):
with open(filepath, "a", encoding="utf-8", errors="replace") as outfile:
outfile.write(msg_text)
def write_descending(filepath, msg_text):
lockpath = filepath + ".lock"
got_it = False
for _ in range(16):
try:
os.mkdir(lockpath)
got_it = True
break
except:
time.sleep(0.1)
continue
if not got_it:
return sys.exit(1)
try:
oldpath = filepath + ".old"
os.rename(filepath, oldpath)
with open(oldpath, "r", encoding="utf-8", errors="replace") as infile, open(
filepath, "w", encoding="utf-8", errors="replace"
) as outfile:
outfile.write(msg_text)
while True:
buf = infile.read(4096)
if not buf:
break
outfile.write(buf)
finally:
try:
os.unlink(oldpath)
except:
pass
os.rmdir(lockpath)
def main(argv=None):
if argv is None:
argv = sys.argv
msg_info = json.loads(sys.argv[1])
# print(msg_info)
dt = datetime.utcfromtimestamp(msg_info["at"])
msg_info["datetime"] = dt.strftime("%Y-%m-%d, %H:%M:%S")
msg_text = TEMPLATE % msg_info
filepath = os.path.join(msg_info["ap"], FILENAME)
if DESCENDING and os.path.exists(filepath):
write_descending(filepath, msg_text)
else:
write_ascending(filepath, msg_text)
print(msg_text)
if __name__ == "__main__":
main()

66
bin/hooks/notify.py Executable file
View File

@@ -0,0 +1,66 @@
#!/usr/bin/env python3
import os
import sys
import subprocess as sp
from plyer import notification
_ = r"""
show os notification on upload; works on windows, linux, macos, android
depdencies:
windows: python3 -m pip install --user -U plyer
linux: python3 -m pip install --user -U plyer
macos: python3 -m pip install --user -U plyer pyobjus
android: just termux and termux-api
example usages; either as global config (all volumes) or as volflag:
--xau f,bin/hooks/notify.py
-v srv/inc:inc:r:rw,ed:c,xau=f,bin/hooks/notify.py
^^^^^^^^^^^^^^^^^^^^^^^^^^^
(share filesystem-path srv/inc as volume /inc,
readable by everyone, read-write for user 'ed',
running this plugin on all uploads with the params listed below)
parameters explained,
xau = execute after upload
f = fork so it doesn't block uploads
"""
try:
from copyparty.util import humansize
except:
def humansize(n):
return n
def main():
fp = sys.argv[1]
dp, fn = os.path.split(fp)
try:
sz = humansize(os.path.getsize(fp))
except:
sz = "?"
msg = "{} ({})\n📁 {}".format(fn, sz, dp)
title = "File received"
if "com.termux" in sys.executable:
sp.run(["termux-notification", "-t", title, "-c", msg])
return
icon = "emblem-documents-symbolic" if sys.platform == "linux" else ""
notification.notify(
title=title,
message=msg,
app_icon=icon,
timeout=10,
)
if __name__ == "__main__":
main()

72
bin/hooks/notify2.py Executable file
View File

@@ -0,0 +1,72 @@
#!/usr/bin/env python3
import json
import os
import sys
import subprocess as sp
from datetime import datetime
from plyer import notification
_ = r"""
same as notify.py but with additional info (uploader, ...)
and also supports --xm (notify on 📟 message)
example usages; either as global config (all volumes) or as volflag:
--xm f,j,bin/hooks/notify2.py
--xau f,j,bin/hooks/notify2.py
-v srv/inc:inc:r:rw,ed:c,xm=f,j,bin/hooks/notify2.py
-v srv/inc:inc:r:rw,ed:c,xau=f,j,bin/hooks/notify2.py
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
(share filesystem-path srv/inc as volume /inc,
readable by everyone, read-write for user 'ed',
running this plugin on all uploads / msgs with the params listed below)
parameters explained,
xau = execute after upload
f = fork so it doesn't block uploads
j = provide json instead of filepath list
"""
try:
from copyparty.util import humansize
except:
def humansize(n):
return n
def main():
inf = json.loads(sys.argv[1])
fp = inf["ap"]
sz = humansize(inf["sz"])
dp, fn = os.path.split(fp)
mt = datetime.utcfromtimestamp(inf["mt"]).strftime("%Y-%m-%d %H:%M:%S")
msg = f"{fn} ({sz})\n📁 {dp}"
title = "File received"
icon = "emblem-documents-symbolic" if sys.platform == "linux" else ""
if inf.get("txt"):
msg = inf["txt"]
title = "Message received"
icon = "mail-unread-symbolic" if sys.platform == "linux" else ""
msg += f"\n👤 {inf['user']} ({inf['ip']})\n🕒 {mt}"
if "com.termux" in sys.executable:
sp.run(["termux-notification", "-t", title, "-c", msg])
return
notification.notify(
title=title,
message=msg,
app_icon=icon,
timeout=10,
)
if __name__ == "__main__":
main()

35
bin/hooks/reject-extension.py Executable file
View File

@@ -0,0 +1,35 @@
#!/usr/bin/env python3
import sys
_ = r"""
reject file uploads by file extension
example usage as global config:
--xbu c,bin/hooks/reject-extension.py
example usage as a volflag (per-volume config):
-v srv/inc:inc:r:rw,ed:c,xbu=c,bin/hooks/reject-extension.py
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
(share filesystem-path srv/inc as volume /inc,
readable by everyone, read-write for user 'ed',
running this plugin on all uploads with the params listed below)
parameters explained,
xbu = execute before upload
c = check result, reject upload if error
"""
def main():
bad = "exe scr com pif bat ps1 jar msi"
ext = sys.argv[1].split(".")[-1]
sys.exit(1 if ext in bad.split() else 0)
if __name__ == "__main__":
main()

44
bin/hooks/reject-mimetype.py Executable file
View File

@@ -0,0 +1,44 @@
#!/usr/bin/env python3
import sys
import magic
_ = r"""
reject file uploads by mimetype
dependencies (linux, macos):
python3 -m pip install --user -U python-magic
dependencies (windows):
python3 -m pip install --user -U python-magic-bin
example usage as global config:
--xau c,bin/hooks/reject-mimetype.py
example usage as a volflag (per-volume config):
-v srv/inc:inc:r:rw,ed:c,xau=c,bin/hooks/reject-mimetype.py
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
(share filesystem-path srv/inc as volume /inc,
readable by everyone, read-write for user 'ed',
running this plugin on all uploads with the params listed below)
parameters explained,
xau = execute after upload
c = check result, reject upload if error
"""
def main():
ok = ["image/jpeg", "image/png"]
mt = magic.from_file(sys.argv[1], mime=True)
print(mt)
sys.exit(1 if mt not in ok else 0)
if __name__ == "__main__":
main()

64
bin/hooks/wget.py Executable file
View File

@@ -0,0 +1,64 @@
#!/usr/bin/env python3
import os
import sys
import json
import subprocess as sp
_ = r"""
use copyparty as a file downloader by POSTing URLs as
application/x-www-form-urlencoded (for example using the
message/pager function on the website)
example usage as global config:
--xm f,j,t3600,bin/hooks/wget.py
example usage as a volflag (per-volume config):
-v srv/inc:inc:r:rw,ed:c,xm=f,j,t3600,bin/hooks/wget.py
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
(share filesystem-path srv/inc as volume /inc,
readable by everyone, read-write for user 'ed',
running this plugin on all messages with the params listed below)
parameters explained,
xm = execute on message-to-server-log
f = fork so it doesn't block uploads
j = provide message information as json; not just the text
c3 = mute all output
t3600 = timeout and kill download after 1 hour
"""
def main():
inf = json.loads(sys.argv[1])
url = inf["txt"]
if "://" not in url:
url = "https://" + url
proto = url.split("://")[0].lower()
if proto not in ("http", "https", "ftp", "ftps"):
raise Exception("bad proto {}".format(proto))
os.chdir(inf["ap"])
name = url.split("?")[0].split("/")[-1]
tfn = "-- DOWNLOADING " + name
print(f"{tfn}\n", end="")
open(tfn, "wb").close()
cmd = ["wget", "--trust-server-names", "-nv", "--", url]
try:
sp.check_call(cmd)
except:
t = "-- FAILED TO DONWLOAD " + name
print(f"{t}\n", end="")
open(t, "wb").close()
os.unlink(tfn)
if __name__ == "__main__":
main()

108
bin/hooks/xiu-sha.py Executable file
View File

@@ -0,0 +1,108 @@
#!/usr/bin/env python3
import hashlib
import json
import sys
from datetime import datetime
_ = r"""
this hook will produce a single sha512 file which
covers all recent uploads (plus metadata comments)
use this with --xiu, which makes copyparty buffer
uploads until server is idle, providing file infos
on stdin (filepaths or json)
example usage as global config:
--xiu i5,j,bin/hooks/xiu-sha.py
example usage as a volflag (per-volume config):
-v srv/inc:inc:r:rw,ed:c,xiu=i5,j,bin/hooks/xiu-sha.py
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
(share filesystem-path srv/inc as volume /inc,
readable by everyone, read-write for user 'ed',
running this plugin on batches of uploads with the params listed below)
parameters explained,
xiu = execute after uploads...
i5 = ...after volume has been idle for 5sec
j = provide json instead of filepath list
note the "f" (fork) flag is not set, so this xiu
will block other xiu hooks while it's running
"""
try:
from copyparty.util import fsenc
except:
def fsenc(p):
return p
def humantime(ts):
return datetime.utcfromtimestamp(ts).strftime("%Y-%m-%d %H:%M:%S")
def find_files_root(inf):
di = 9000
for f1, f2 in zip(inf, inf[1:]):
p1 = f1["ap"].replace("\\", "/").rsplit("/", 1)[0]
p2 = f2["ap"].replace("\\", "/").rsplit("/", 1)[0]
di = min(len(p1), len(p2), di)
di = next((i for i in range(di) if p1[i] != p2[i]), di)
return di + 1
def find_vol_root(inf):
return len(inf[0]["ap"][: -len(inf[0]["vp"])])
def main():
zb = sys.stdin.buffer.read()
zs = zb.decode("utf-8", "replace")
inf = json.loads(zs)
# root directory (where to put the sha512 file);
# di = find_files_root(inf) # next to the file closest to volume root
di = find_vol_root(inf) # top of the entire volume
ret = []
total_sz = 0
for md in inf:
ap = md["ap"]
rp = ap[di:]
total_sz += md["sz"]
fsize = "{:,}".format(md["sz"])
mtime = humantime(md["mt"])
up_ts = humantime(md["at"])
h = hashlib.sha512()
with open(fsenc(md["ap"]), "rb", 512 * 1024) as f:
while True:
buf = f.read(512 * 1024)
if not buf:
break
h.update(buf)
cksum = h.hexdigest()
meta = " | ".join([md["wark"], up_ts, mtime, fsize, md["ip"]])
ret.append("# {}\n{} *{}".format(meta, cksum, rp))
ret.append("# {} files, {} bytes total".format(len(inf), total_sz))
ret.append("")
ftime = datetime.utcnow().strftime("%Y-%m%d-%H%M%S.%f")
fp = "{}xfer-{}.sha512".format(inf[0]["ap"][:di], ftime)
with open(fsenc(fp), "wb") as f:
f.write("\n".join(ret).encode("utf-8", "replace"))
print("wrote checksums to {}".format(fp))
if __name__ == "__main__":
main()

50
bin/hooks/xiu.py Executable file
View File

@@ -0,0 +1,50 @@
#!/usr/bin/env python3
import json
import sys
_ = r"""
this hook prints absolute filepaths + total size
use this with --xiu, which makes copyparty buffer
uploads until server is idle, providing file infos
on stdin (filepaths or json)
example usage as global config:
--xiu i1,j,bin/hooks/xiu.py
example usage as a volflag (per-volume config):
-v srv/inc:inc:r:rw,ed:c,xiu=i1,j,bin/hooks/xiu.py
^^^^^^^^^^^^^^^^^^^^^^^^^^^
(share filesystem-path srv/inc as volume /inc,
readable by everyone, read-write for user 'ed',
running this plugin on batches of uploads with the params listed below)
parameters explained,
xiu = execute after uploads...
i1 = ...after volume has been idle for 1sec
j = provide json instead of filepath list
note the "f" (fork) flag is not set, so this xiu
will block other xiu hooks while it's running
"""
def main():
zb = sys.stdin.buffer.read()
zs = zb.decode("utf-8", "replace")
inf = json.loads(zs)
total_sz = 0
for upload in inf:
sz = upload["sz"]
total_sz += sz
print("{:9} {}".format(sz, upload["ap"]))
print("{} files, {} bytes total".format(len(inf), total_sz))
if __name__ == "__main__":
main()

View File

@@ -1,5 +1,9 @@
standalone programs which take an audio file as argument
you may want to forget about all this fancy complicated stuff and just use [event hooks](../hooks/) instead (which doesn't need `-e2ts` or ffmpeg)
----
**NOTE:** these all require `-e2ts` to be functional, meaning you need to do at least one of these: `apt install ffmpeg` or `pip3 install mutagen`
some of these rely on libraries which are not MIT-compatible
@@ -17,6 +21,16 @@ these do not have any problematic dependencies at all:
* [cksum.py](./cksum.py) computes various checksums
* [exe.py](./exe.py) grabs metadata from .exe and .dll files (example for retrieving multiple tags with one parser)
* [wget.py](./wget.py) lets you download files by POSTing URLs to copyparty
* also available as an [event hook](../hooks/wget.py)
## dangerous plugins
plugins in this section should only be used with appropriate precautions:
* [very-bad-idea.py](./very-bad-idea.py) combined with [meadup.js](https://github.com/9001/copyparty/blob/hovudstraum/contrib/plugins/meadup.js) converts copyparty into a janky yet extremely flexible chromecast clone
* also adds a virtual keyboard by @steinuil to the basic-upload tab for comfy couch crowd control
* anything uploaded through the [android app](https://github.com/9001/party-up) (files or links) are executed on the server, meaning anyone can infect your PC with malware... so protect this with a password and keep it on a LAN!
# dependencies
@@ -26,7 +40,7 @@ run [`install-deps.sh`](install-deps.sh) to build/install most dependencies requ
*alternatively* (or preferably) use packages from your distro instead, then you'll need at least these:
* from distro: `numpy vamp-plugin-sdk beatroot-vamp mixxx-keyfinder ffmpeg`
* from pypy: `keyfinder vamp`
* from pip: `keyfinder vamp`
# usage from copyparty

View File

@@ -16,6 +16,10 @@ dep: ffmpeg
"""
# save beat timestamps to ".beats/filename.txt"
SAVE = False
def det(tf):
# fmt: off
sp.check_call([
@@ -23,12 +27,11 @@ def det(tf):
b"-nostdin",
b"-hide_banner",
b"-v", b"fatal",
b"-ss", b"13",
b"-y", b"-i", fsenc(sys.argv[1]),
b"-map", b"0:a:0",
b"-ac", b"1",
b"-ar", b"22050",
b"-t", b"300",
b"-t", b"360",
b"-f", b"f32le",
fsenc(tf)
])
@@ -47,10 +50,29 @@ def det(tf):
print(c["list"][0]["label"].split(" ")[0])
return
# throws if detection failed:
bpm = float(cl[-1]["timestamp"] - cl[1]["timestamp"])
bpm = round(60 * ((len(cl) - 1) / bpm), 2)
print(f"{bpm:.2f}")
# throws if detection failed:
beats = [float(x["timestamp"]) for x in cl]
bds = [b - a for a, b in zip(beats, beats[1:])]
bds.sort()
n0 = int(len(bds) * 0.2)
n1 = int(len(bds) * 0.75) + 1
bds = bds[n0:n1]
bpm = sum(bds)
bpm = round(60 * (len(bds) / bpm), 2)
print(f"{bpm:.2f}")
if SAVE:
fdir, fname = os.path.split(sys.argv[1])
bdir = os.path.join(fdir, ".beats")
try:
os.mkdir(fsenc(bdir))
except:
pass
fp = os.path.join(bdir, fname) + ".txt"
with open(fsenc(fp), "wb") as f:
txt = "\n".join([f"{x:.2f}" for x in beats])
f.write(txt.encode("utf-8"))
def main():

View File

@@ -61,7 +61,7 @@ def main():
os.chdir(cwd)
f1 = fsenc(fn)
f2 = os.path.join(b"noexif", f1)
f2 = fsenc(os.path.join(b"noexif", fn))
cmd = [
b"exiftool",
b"-exif:all=",

View File

@@ -7,6 +7,7 @@ set -e
# linux/alpine: requires gcc g++ make cmake patchelf {python3,ffmpeg,fftw,libsndfile}-dev py3-{wheel,pip} py3-numpy{,-dev}
# linux/debian: requires libav{codec,device,filter,format,resample,util}-dev {libfftw3,python3,libsndfile1}-dev python3-{numpy,pip} vamp-{plugin-sdk,examples} patchelf cmake
# linux/fedora: requires gcc gcc-c++ make cmake patchelf {python3,ffmpeg,fftw,libsndfile}-devel python3-numpy vamp-plugin-sdk qm-vamp-plugins
# linux/arch: requires gcc make cmake patchelf python3 ffmpeg fftw libsndfile python-{numpy,wheel,pip,setuptools}
# win64: requires msys2-mingw64 environment
# macos: requires macports
#
@@ -57,6 +58,7 @@ hash -r
command -v python3 && pybin=python3 || pybin=python
}
$pybin -c 'import numpy' ||
$pybin -m pip install --user numpy
@@ -224,17 +226,18 @@ install_vamp() {
$pybin -m pip install --user vamp
cd "$td"
echo '#include <vamp-sdk/Plugin.h>' | gcc -x c -c -o /dev/null - || [ -e ~/pe/vamp-sdk ] || {
echo '#include <vamp-sdk/Plugin.h>' | g++ -x c++ -c -o /dev/null - || [ -e ~/pe/vamp-sdk ] || {
printf '\033[33mcould not find the vamp-sdk, building from source\033[0m\n'
(dl_files yolo https://code.soundsoftware.ac.uk/attachments/download/2588/vamp-plugin-sdk-2.9.0.tar.gz)
(dl_files yolo https://code.soundsoftware.ac.uk/attachments/download/2691/vamp-plugin-sdk-2.10.0.tar.gz)
sha512sum -c <(
echo "7ef7f837d19a08048b059e0da408373a7964ced452b290fae40b85d6d70ca9000bcfb3302cd0b4dc76cf2a848528456f78c1ce1ee0c402228d812bd347b6983b -"
) <vamp-plugin-sdk-2.9.0.tar.gz
tar -xf vamp-plugin-sdk-2.9.0.tar.gz
echo "153b7f2fa01b77c65ad393ca0689742d66421017fd5931d216caa0fcf6909355fff74706fabbc062a3a04588a619c9b515a1dae00f21a57afd97902a355c48ed -"
) <vamp-plugin-sdk-2.10.0.tar.gz
tar -xf vamp-plugin-sdk-2.10.0.tar.gz
rm -- *.tar.gz
ls -al
cd vamp-plugin-sdk-*
./configure --prefix=$HOME/pe/vamp-sdk
printf '%s\n' "int main(int argc, char **argv) { return 0; }" > host/vamp-simple-host.cpp
./configure --disable-programs --prefix=$HOME/pe/vamp-sdk
make -j1 install
}
@@ -249,8 +252,9 @@ install_vamp() {
rm -- *.tar.gz
cd beatroot-vamp-v1.0
[ -e ~/pe/vamp-sdk ] &&
sed -ri 's`^(CFLAGS :=.*)`\1 -I'$HOME'/pe/vamp-sdk/include`' Makefile.linux
make -f Makefile.linux -j4 LDFLAGS=-L$HOME/pe/vamp-sdk/lib
sed -ri 's`^(CFLAGS :=.*)`\1 -I'$HOME'/pe/vamp-sdk/include`' Makefile.linux ||
sed -ri 's`^(CFLAGS :=.*)`\1 -I/usr/include/vamp-sdk`' Makefile.linux
make -f Makefile.linux -j4 LDFLAGS="-L$HOME/pe/vamp-sdk/lib -L/usr/lib64"
# /home/ed/vamp /home/ed/.vamp /usr/local/lib/vamp
mkdir ~/vamp
cp -pv beatroot-vamp.* ~/vamp/

View File

@@ -1,6 +1,11 @@
#!/usr/bin/env python3
"""
WARNING -- DANGEROUS PLUGIN --
if someone is able to upload files to a copyparty which is
running this plugin, they can execute malware on your machine
so please keep this on a LAN and protect it with a password
use copyparty as a chromecast replacement:
* post a URL and it will open in the default browser
* upload a file and it will open in the default application
@@ -10,16 +15,17 @@ use copyparty as a chromecast replacement:
the android app makes it a breeze to post pics and links:
https://github.com/9001/party-up/releases
(iOS devices have to rely on the web-UI)
goes without saying, but this is HELLA DANGEROUS,
GIVES RCE TO ANYONE WHO HAVE UPLOAD PERMISSIONS
iOS devices can use the web-UI or the shortcut instead:
https://github.com/9001/copyparty#ios-shortcuts
example copyparty config to use this:
--urlform save,get -v.::w:c,e2d,e2t,mte=+a1:c,mtp=a1=ad,kn,c0,bin/mtag/very-bad-idea.py
example copyparty config to use this;
lets the user "kevin" with password "hunter2" use this plugin:
-a kevin:hunter2 --urlform save,get -v.::w,kevin:c,e2d,e2t,mte=+a1:c,mtp=a1=ad,kn,c0,bin/mtag/very-bad-idea.py
recommended deps:
apt install xdotool libnotify-bin
apt install xdotool libnotify-bin mpv
python3 -m pip install --user -U streamlink yt-dlp
https://github.com/9001/copyparty/blob/hovudstraum/contrib/plugins/meadup.js
and you probably want `twitter-unmute.user.js` from the res folder
@@ -63,8 +69,10 @@ set -e
EOF
chmod 755 /usr/local/bin/chromium-browser
# start the server (note: replace `-v.::rw:` with `-v.::w:` to disallow retrieving uploaded stuff)
cd ~/Downloads; python3 copyparty-sfx.py --urlform save,get -v.::rw:c,e2d,e2t,mte=+a1:c,mtp=a1=ad,kn,very-bad-idea.py
# start the server
# note 1: replace hunter2 with a better password to access the server
# note 2: replace `-v.::rw` with `-v.::w` to disallow retrieving uploaded stuff
cd ~/Downloads; python3 copyparty-sfx.py -a kevin:hunter2 --urlform save,get -v.::rw,kevin:c,e2d,e2t,mte=+a1:c,mtp=a1=ad,kn,very-bad-idea.py
"""
@@ -72,11 +80,23 @@ cd ~/Downloads; python3 copyparty-sfx.py --urlform save,get -v.::rw:c,e2d,e2t,mt
import os
import sys
import time
import shutil
import subprocess as sp
from urllib.parse import unquote_to_bytes as unquote
from urllib.parse import quote
have_mpv = shutil.which("mpv")
have_vlc = shutil.which("vlc")
def main():
if len(sys.argv) > 2 and sys.argv[1] == "x":
# invoked on commandline for testing;
# python3 very-bad-idea.py x msg=https://youtu.be/dQw4w9WgXcQ
txt = " ".join(sys.argv[2:])
txt = quote(txt.replace(" ", "+"))
return open_post(txt.encode("utf-8"))
fp = os.path.abspath(sys.argv[1])
with open(fp, "rb") as f:
txt = f.read(4096)
@@ -92,7 +112,7 @@ def open_post(txt):
try:
k, v = txt.split(" ", 1)
except:
open_url(txt)
return open_url(txt)
if k == "key":
sp.call(["xdotool", "key"] + v.split(" "))
@@ -128,6 +148,17 @@ def open_url(txt):
# else:
# sp.call(["xdotool", "getactivewindow", "windowminimize"]) # minimizes the focused windo
# mpv is probably smart enough to use streamlink automatically
if try_mpv(txt):
print("mpv got it")
return
# or maybe streamlink would be a good choice to open this
if try_streamlink(txt):
print("streamlink got it")
return
# nope,
# close any error messages:
sp.call(["xdotool", "search", "--name", "Error", "windowclose"])
# sp.call(["xdotool", "key", "ctrl+alt+d"]) # doesnt work at all
@@ -136,4 +167,39 @@ def open_url(txt):
sp.call(["xdg-open", txt])
def try_mpv(url):
t0 = time.time()
try:
print("trying mpv...")
sp.check_call(["mpv", "--fs", url])
return True
except:
# if it ran for 15 sec it probably succeeded and terminated
t = time.time()
return t - t0 > 15
def try_streamlink(url):
t0 = time.time()
try:
import streamlink
print("trying streamlink...")
streamlink.Streamlink().resolve_url(url)
if have_mpv:
args = "-m streamlink -p mpv -a --fs"
else:
args = "-m streamlink"
cmd = [sys.executable] + args.split() + [url, "best"]
t0 = time.time()
sp.check_call(cmd)
return True
except:
# if it ran for 10 sec it probably succeeded and terminated
t = time.time()
return t - t0 > 10
main()

View File

@@ -1,6 +1,11 @@
#!/usr/bin/env python3
"""
DEPRECATED -- replaced by event hooks;
https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/wget.py
---
use copyparty as a file downloader by POSTing URLs as
application/x-www-form-urlencoded (for example using the
message/pager function on the website)
@@ -60,6 +65,10 @@ def main():
if "://" not in url:
url = "https://" + url
proto = url.split("://")[0].lower()
if proto not in ("http", "https", "ftp", "ftps"):
raise Exception("bad proto {}".format(proto))
os.chdir(fdir)
name = url.split("?")[0].split("/")[-1]

View File

@@ -997,7 +997,7 @@ def main():
ap.add_argument(
"-cf", metavar="NUM_BLOCKS", type=int, default=nf, help="file cache"
)
ap.add_argument("-a", metavar="PASSWORD", help="password")
ap.add_argument("-a", metavar="PASSWORD", help="password or $filepath")
ap.add_argument("-d", action="store_true", help="enable debug")
ap.add_argument("-te", metavar="PEM_FILE", help="certificate to expect/verify")
ap.add_argument("-td", action="store_true", help="disable certificate check")

View File

@@ -4,8 +4,9 @@ set -e
# runs copyparty (or any other program really) in a chroot
#
# assumption: these directories, and everything within, are owned by root
sysdirs=( /bin /lib /lib32 /lib64 /sbin /usr )
sysdirs=(); for v in /bin /lib /lib32 /lib64 /sbin /usr /etc/alternatives ; do
[ -e $v ] && sysdirs+=($v)
done
# error-handler
help() { cat <<'EOF'
@@ -38,7 +39,7 @@ while true; do
v="$1"; shift
[ "$v" = -- ] && break # end of volumes
[ "$#" -eq 0 ] && break # invalid usage
vols+=( "$(realpath "$v")" )
vols+=( "$(realpath "$v" || echo "$v")" )
done
pybin="$1"; shift
pybin="$(command -v "$pybin")"
@@ -82,7 +83,7 @@ jail="${jail%/}"
printf '%s\n' "${sysdirs[@]}" "${vols[@]}" | sed -r 's`/$``' | LC_ALL=C sort | uniq |
while IFS= read -r v; do
[ -e "$v" ] || {
# printf '\033[1;31mfolder does not exist:\033[0m %s\n' "/$v"
printf '\033[1;31mfolder does not exist:\033[0m %s\n' "$v"
continue
}
i1=$(stat -c%D.%i "$v" 2>/dev/null || echo a)
@@ -97,9 +98,11 @@ done
cln() {
rv=$?
# cleanup if not in use
lsof "$jail" | grep -qF "$jail" &&
echo "chroot is in use, will not cleanup" ||
wait -f -p rv $p || true
cd /
echo "stopping chroot..."
lsof "$jail" | grep -F "$jail" &&
echo "chroot is in use; will not unmount" ||
{
mount | grep -F " on $jail" |
awk '{sub(/ type .*/,"");sub(/.* on /,"");print}' |
@@ -115,6 +118,15 @@ mkdir -p "$jail/tmp"
chmod 777 "$jail/tmp"
# create a dev
(cd $jail; mkdir -p dev; cd dev
[ -e null ] || mknod -m 666 null c 1 3
[ -e zero ] || mknod -m 666 zero c 1 5
[ -e random ] || mknod -m 444 random c 1 8
[ -e urandom ] || mknod -m 444 urandom c 1 9
)
# run copyparty
export HOME=$(getent passwd $uid | cut -d: -f6)
export USER=$(getent passwd $uid | cut -d: -f1)
@@ -124,5 +136,6 @@ export LOGNAME="$USER"
#echo "cpp [$cpp]"
chroot --userspec=$uid:$gid "$jail" "$pybin" $pyarg "$cpp" "$@" &
p=$!
trap 'kill -USR1 $p' USR1
trap 'kill $p' INT TERM
wait

View File

@@ -1,16 +1,20 @@
#!/usr/bin/env python3
from __future__ import print_function, unicode_literals
S_VERSION = "1.10"
S_BUILD_DT = "2023-08-15"
"""
up2k.py: upload to copyparty
2022-12-12, v1.0, ed <irc.rizon.net>, MIT-Licensed
https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py
u2c.py: upload to copyparty
2021, ed <irc.rizon.net>, MIT-Licensed
https://github.com/9001/copyparty/blob/hovudstraum/bin/u2c.py
- dependencies: requests
- supports python 2.6, 2.7, and 3.3 through 3.12
- if something breaks just try again and it'll autoresume
"""
import re
import os
import sys
import stat
@@ -18,12 +22,15 @@ import math
import time
import atexit
import signal
import socket
import base64
import hashlib
import platform
import threading
import datetime
EXE = sys.executable.endswith("exe")
try:
import argparse
except:
@@ -33,8 +40,10 @@ except:
try:
import requests
except ImportError:
if sys.version_info > (2, 7):
except ImportError as ex:
if EXE:
raise
elif sys.version_info > (2, 7):
m = "\nERROR: need 'requests'; please run this command:\n {0} -m pip install --user requests\n"
else:
m = "requests/2.18.4 urllib3/1.23 chardet/3.0.4 certifi/2020.4.5.1 idna/2.7"
@@ -42,7 +51,7 @@ except ImportError:
m = "\n ERROR: need these:\n" + "\n".join(m) + "\n"
m += "\n for f in *.whl; do unzip $f; done; rm -r *.dist-info\n"
print(m.format(sys.executable))
print(m.format(sys.executable), "\nspecifically,", ex)
sys.exit(1)
@@ -51,6 +60,7 @@ PY2 = sys.version_info < (3,)
if PY2:
from Queue import Queue
from urllib import quote, unquote
from urlparse import urlsplit, urlunsplit
sys.dont_write_bytecode = True
bytes = str
@@ -58,6 +68,7 @@ else:
from queue import Queue
from urllib.parse import unquote_to_bytes as unquote
from urllib.parse import quote_from_bytes as quote
from urllib.parse import urlsplit, urlunsplit
unicode = str
@@ -245,7 +256,13 @@ def eprint(*a, **ka):
def flushing_print(*a, **ka):
_print(*a, **ka)
try:
_print(*a, **ka)
except:
v = " ".join(str(x) for x in a)
v = v.encode("ascii", "replace").decode("ascii")
_print(v, **ka)
if "flush" not in ka:
sys.stdout.flush()
@@ -324,6 +341,32 @@ class CTermsize(object):
ss = CTermsize()
def undns(url):
usp = urlsplit(url)
hn = usp.hostname
gai = None
eprint("resolving host [{0}] ...".format(hn), end="")
try:
gai = socket.getaddrinfo(hn, None)
hn = gai[0][4][0]
except KeyboardInterrupt:
raise
except:
t = "\n\033[31mfailed to resolve upload destination host;\033[0m\ngai={0}\n"
eprint(t.format(repr(gai)))
raise
if usp.port:
hn = "{0}:{1}".format(hn, usp.port)
if usp.username or usp.password:
hn = "{0}:{1}@{2}".format(usp.username, usp.password, hn)
usp = usp._replace(netloc=hn)
url = urlunsplit(usp)
eprint(" {0}".format(url))
return url
def _scd(err, top):
"""non-recursive listing of directory contents, along with stat() info"""
with os.scandir(top) as dh:
@@ -369,9 +412,29 @@ def walkdir(err, top, seen):
err.append((ap, str(ex)))
def walkdirs(err, tops):
def walkdirs(err, tops, excl):
"""recursive statdir for a list of tops, yields [top, relpath, stat]"""
sep = "{0}".format(os.sep).encode("ascii")
if not VT100:
excl = excl.replace("/", r"\\")
za = []
for td in tops:
try:
ap = os.path.abspath(os.path.realpath(td))
if td[-1:] in (b"\\", b"/"):
ap += sep
except:
# maybe cpython #88013 (ok)
ap = td
za.append(ap)
za = [x if x.startswith(b"\\\\") else b"\\\\?\\" + x for x in za]
za = [x.replace(b"/", b"\\") for x in za]
tops = za
ptn = re.compile(excl.encode("utf-8") or b"\n")
for top in tops:
isdir = os.path.isdir(top)
if top[-1:] == sep:
@@ -384,6 +447,8 @@ def walkdirs(err, tops):
if isdir:
for ap, inf in walkdir(err, top, []):
if ptn.match(ap):
continue
yield stop, ap[len(stop) :].lstrip(sep), inf
else:
d, n = top.rsplit(sep, 1)
@@ -506,25 +571,35 @@ def handshake(ar, file, search):
url += quotep(file.rel.rsplit(b"/", 1)[0]).decode("utf-8", "replace")
while True:
sc = 600
txt = ""
try:
r = req_ses.post(url, headers=headers, json=req)
break
sc = r.status_code
txt = r.text
if sc < 400:
break
raise Exception("http {0}: {1}".format(sc, txt))
except Exception as ex:
em = str(ex).split("SSLError(")[-1]
em = str(ex).split("SSLError(")[-1].split("\nURL: ")[0].strip()
if (
sc == 422
or "<pre>partial upload exists at a different" in txt
or "<pre>source file busy; please try again" in txt
):
file.recheck = True
return [], False
elif sc == 409 or "<pre>upload rejected, file already exists" in txt:
return [], False
elif "<pre>you don't have " in txt:
raise
eprint("handshake failed, retrying: {0}\n {1}\n\n".format(file.name, em))
time.sleep(1)
sc = r.status_code
if sc >= 400:
txt = r.text
if sc == 422 or "<pre>partial upload exists at a different" in txt:
file.recheck = True
return [], False
elif sc == 409 or "<pre>upload rejected, file already exists" in txt:
return [], False
raise Exception("http {0}: {1}".format(sc, txt))
try:
r = r.json()
except:
@@ -546,8 +621,8 @@ def handshake(ar, file, search):
return r["hash"], r["sprs"]
def upload(file, cid, pw):
# type: (File, str, str) -> None
def upload(file, cid, pw, stats):
# type: (File, str, str, str) -> None
"""upload one specific chunk, `cid` (a chunk-hash)"""
headers = {
@@ -555,6 +630,10 @@ def upload(file, cid, pw):
"X-Up2k-Wark": file.wark,
"Content-Type": "application/octet-stream",
}
if stats:
headers["X-Up2k-Stat"] = stats
if pw:
headers["Cookie"] = "=".join(["cppwd", pw])
@@ -575,13 +654,13 @@ class Ctl(object):
(hashing, handshakes, uploads)
"""
def __init__(self, ar):
def _scan(self):
ar = self.ar
eprint("\nscanning {0} locations\n".format(len(ar.files)))
self.ar = ar
nfiles = 0
nbytes = 0
err = []
for _, _, inf in walkdirs(err, ar.files):
for _, _, inf in walkdirs(err, ar.files, ar.x):
if stat.S_ISDIR(inf.st_mode):
continue
@@ -606,8 +685,16 @@ class Ctl(object):
return
eprint("found {0} files, {1}\n\n".format(nfiles, humansize(nbytes)))
self.nfiles = nfiles
self.nbytes = nbytes
return nfiles, nbytes
def __init__(self, ar, stats=None):
self.ok = False
self.ar = ar
self.stats = stats or self._scan()
if not self.stats:
return
self.nfiles, self.nbytes = self.stats
if ar.td:
requests.packages.urllib3.disable_warnings()
@@ -615,7 +702,9 @@ class Ctl(object):
if ar.te:
req_ses.verify = ar.te
self.filegen = walkdirs([], ar.files)
self.filegen = walkdirs([], ar.files, ar.x)
self.recheck = [] # type: list[File]
if ar.safe:
self._safe()
else:
@@ -634,11 +723,11 @@ class Ctl(object):
self.t0 = time.time()
self.t0_up = None
self.spd = None
self.eta = "99:99:99"
self.mutex = threading.Lock()
self.q_handshake = Queue() # type: Queue[File]
self.q_upload = Queue() # type: Queue[tuple[File, str]]
self.recheck = [] # type: list[File]
self.st_hash = [None, "(idle, starting...)"] # type: tuple[File, int]
self.st_up = [None, "(idle, starting...)"] # type: tuple[File, int]
@@ -647,6 +736,8 @@ class Ctl(object):
self._fancy()
self.ok = True
def _safe(self):
"""minimal basic slow boring fallback codepath"""
search = self.ar.s
@@ -680,7 +771,8 @@ class Ctl(object):
ncs = len(hs)
for nc, cid in enumerate(hs):
print(" {0} up {1}".format(ncs - nc, cid))
upload(file, cid, self.ar.a)
stats = "{0}/0/0/{1}".format(nf, self.nfiles - nf)
upload(file, cid, self.ar.a, stats)
print(" ok!")
if file.recheck:
@@ -694,7 +786,7 @@ class Ctl(object):
handshake(self.ar, file, search)
def _fancy(self):
if VT100:
if VT100 and not self.ar.ns:
atexit.register(self.cleanup_vt100)
ss.scroll_region(3)
@@ -718,7 +810,7 @@ class Ctl(object):
else:
idles = 0
if VT100:
if VT100 and not self.ar.ns:
maxlen = ss.w - len(str(self.nfiles)) - 14
txt = "\033[s\033[{0}H".format(ss.g)
for y, k, st, f in [
@@ -755,12 +847,12 @@ class Ctl(object):
eta = (self.nbytes - self.up_b) / (spd + 1)
spd = humansize(spd)
eta = str(datetime.timedelta(seconds=int(eta)))
self.eta = str(datetime.timedelta(seconds=int(eta)))
sleft = humansize(self.nbytes - self.up_b)
nleft = self.nfiles - self.up_f
tail = "\033[K\033[u" if VT100 else "\r"
tail = "\033[K\033[u" if VT100 and not self.ar.ns else "\r"
t = "{0} eta @ {1}/s, {2}, {3}# left".format(eta, spd, sleft, nleft)
t = "{0} eta @ {1}/s, {2}, {3}# left".format(self.eta, spd, sleft, nleft)
eprint(txt + "\033]0;{0}\033\\\r{0}{1}".format(t, tail))
if not self.recheck:
@@ -796,7 +888,10 @@ class Ctl(object):
print(" ls ~{0}".format(srd))
zb = self.ar.url.encode("utf-8")
zb += quotep(rd.replace(b"\\", b"/"))
r = req_ses.get(zb + b"?ls&dots", headers=headers)
r = req_ses.get(zb + b"?ls&lt&dots", headers=headers)
if not r:
raise Exception("HTTP {0}".format(r.status_code))
j = r.json()
for f in j["dirs"] + j["files"]:
rfn = f["href"].split("?")[0].rstrip("/")
@@ -870,6 +965,9 @@ class Ctl(object):
self.handshaker_busy += 1
upath = file.abs.decode("utf-8", "replace")
if not VT100:
upath = upath.lstrip("\\?")
hs, sprs = handshake(self.ar, file, search)
if search:
if hs:
@@ -935,11 +1033,23 @@ class Ctl(object):
self.uploader_busy += 1
self.t0_up = self.t0_up or time.time()
zs = "{0}/{1}/{2}/{3} {4}/{5} {6}"
stats = zs.format(
self.up_f,
len(self.recheck),
self.uploader_busy,
self.nfiles - self.up_f,
int(self.nbytes / (1024 * 1024)),
int((self.nbytes - self.up_b) / (1024 * 1024)),
self.eta,
)
file, cid = task
try:
upload(file, cid, self.ar.a)
except:
eprint("upload failed, retrying: {0} #{1}\n".format(file.name, cid[:8]))
upload(file, cid, self.ar.a, stats)
except Exception as ex:
t = "upload failed, retrying: {0} #{1} ({2})\n"
eprint(t.format(file.name, cid[:8], ex))
# handshake will fix it
with self.mutex:
@@ -973,8 +1083,15 @@ def main():
cores = (os.cpu_count() if hasattr(os, "cpu_count") else 0) or 2
hcores = min(cores, 3) # 4% faster than 4+ on py3.9 @ r5-4500U
ver = "{0}, v{1}".format(S_BUILD_DT, S_VERSION)
if "--version" in sys.argv:
print(ver)
return
sys.argv = [x for x in sys.argv if x != "--ws"]
# fmt: off
ap = app = argparse.ArgumentParser(formatter_class=APF, epilog="""
ap = app = argparse.ArgumentParser(formatter_class=APF, description="copyparty up2k uploader / filesearch tool, " + ver, epilog="""
NOTE:
source file/folder selection uses rsync syntax, meaning that:
"foo" uploads the entire folder to URL/foo/
@@ -984,13 +1101,15 @@ source file/folder selection uses rsync syntax, meaning that:
ap.add_argument("url", type=unicode, help="server url, including destination folder")
ap.add_argument("files", type=unicode, nargs="+", help="files and/or folders to process")
ap.add_argument("-v", action="store_true", help="verbose")
ap.add_argument("-a", metavar="PASSWORD", help="password")
ap.add_argument("-a", metavar="PASSWORD", help="password or $filepath")
ap.add_argument("-s", action="store_true", help="file-search (disables upload)")
ap.add_argument("-x", type=unicode, metavar="REGEX", default="", help="skip file if filesystem-abspath matches REGEX, example: '.*/\.hist/.*'")
ap.add_argument("--ok", action="store_true", help="continue even if some local files are inaccessible")
ap.add_argument("--version", action="store_true", help="show version and exit")
ap = app.add_argument_group("compatibility")
ap.add_argument("--cls", action="store_true", help="clear screen before start")
ap.add_argument("--ws", action="store_true", help="copyparty is running on windows; wait before deleting files after uploading")
ap.add_argument("--rh", type=int, metavar="TRIES", default=0, help="resolve server hostname before upload (good for buggy networks, but TLS certs will break)")
ap = app.add_argument_group("folder sync")
ap.add_argument("--dl", action="store_true", help="delete local files after uploading")
@@ -1001,6 +1120,7 @@ source file/folder selection uses rsync syntax, meaning that:
ap.add_argument("-j", type=int, metavar="THREADS", default=4, help="parallel connections")
ap.add_argument("-J", type=int, metavar="THREADS", default=hcores, help="num cpu-cores to use for hashing; set 0 or 1 for single-core hashing")
ap.add_argument("-nh", action="store_true", help="disable hashing while uploading")
ap.add_argument("-ns", action="store_true", help="no status panel (for slow consoles and macos)")
ap.add_argument("--safe", action="store_true", help="use simple fallback approach")
ap.add_argument("-z", action="store_true", help="ZOOMIN' (skip uploading files if they exist at the destination with the ~same last-modified timestamp, so same as yolo / turbo with date-chk but even faster)")
@@ -1009,7 +1129,16 @@ source file/folder selection uses rsync syntax, meaning that:
ap.add_argument("-td", action="store_true", help="disable certificate check")
# fmt: on
ar = app.parse_args()
try:
ar = app.parse_args()
finally:
if EXE and not sys.argv[1:]:
eprint("*** hit enter to exit ***")
try:
input()
except:
pass
if ar.drd:
ar.dr = True
@@ -1023,7 +1152,7 @@ source file/folder selection uses rsync syntax, meaning that:
ar.files = [
os.path.abspath(os.path.realpath(x.encode("utf-8")))
+ (x[-1:] if x[-1:] == os.sep else "").encode("utf-8")
+ (x[-1:] if x[-1:] in ("\\", "/") else "").encode("utf-8")
for x in ar.files
]
@@ -1031,20 +1160,34 @@ source file/folder selection uses rsync syntax, meaning that:
if "://" not in ar.url:
ar.url = "http://" + ar.url
if ar.a and ar.a.startswith("$"):
fn = ar.a[1:]
print("reading password from file [{0}]".format(fn))
with open(fn, "rb") as f:
ar.a = f.read().decode("utf-8").strip()
for n in range(ar.rh):
try:
ar.url = undns(ar.url)
break
except KeyboardInterrupt:
raise
except:
if n > ar.rh - 2:
raise
if ar.cls:
print("\x1b\x5b\x48\x1b\x5b\x32\x4a\x1b\x5b\x33\x4a", end="")
eprint("\033[H\033[2J\033[3J", end="")
ctl = Ctl(ar)
if ar.dr and not ar.drd:
# run another pass for the deletes
if getattr(ctl, "up_br") and ar.ws:
# wait for up2k to mtime if there was uploads
time.sleep(4)
if ar.dr and not ar.drd and ctl.ok:
print("\npass 2/2: delete")
ar.drd = True
ar.z = True
Ctl(ar)
ctl = Ctl(ar, ctl.stats)
sys.exit(0 if ctl.ok else 1)
if __name__ == "__main__":

View File

@@ -29,11 +29,11 @@ however if your copyparty is behind a reverse-proxy, you may want to use [`share
* disables thumbnails and folder-type detection in windows explorer
* makes it way faster (especially for slow/networked locations (such as partyfuse))
### [`webdav-basicauth.reg`](webdav-basicauth.reg)
* enables webdav basic-auth over plaintext http; takes effect after a reboot OR after running `webdav-unlimit.bat`
### [`webdav-unlimit.bat`](webdav-unlimit.bat)
* removes the 47.6 MiB filesize limit when downloading from webdav
### [`webdav-cfg.reg`](webdav-cfg.bat)
* improves the native webdav support in windows;
* removes the 47.6 MiB filesize limit when downloading from webdav
* optionally enables webdav basic-auth over plaintext http
* optionally helps disable wpad, removing the 10sec latency
### [`cfssl.sh`](cfssl.sh)
* creates CA and server certificates using cfssl

View File

@@ -1,7 +1,6 @@
# when running copyparty behind a reverse proxy,
# the following arguments are recommended:
#
# --http-only lower latency on initial connection
# -i 127.0.0.1 only accept connections from nginx
#
# if you are doing location-based proxying (such as `/stuff` below)

View File

@@ -1,14 +1,44 @@
#!/bin/bash
set -e
cat >/dev/null <<'EOF'
NOTE: copyparty is now able to do this automatically;
however you may wish to use this script instead if
you have specific needs (or if copyparty breaks)
this script generates a new self-signed TLS certificate and
replaces the default insecure one that comes with copyparty
as it is trivial to impersonate a copyparty server using the
default certificate, it is highly recommended to do this
this will create a self-signed CA, and a Server certificate
which gets signed by that CA -- you can run it multiple times
with different server-FQDNs / IPs to create additional certs
for all your different servers / (non-)copyparty services
EOF
# ca-name and server-fqdn
ca_name="$1"
srv_fqdn="$2"
[ -z "$srv_fqdn" ] && {
echo "need arg 1: ca name"
echo "need arg 2: server fqdn and/or IPs, comma-separated"
echo "optional arg 3: if set, write cert into copyparty cfg"
[ -z "$srv_fqdn" ] && { cat <<'EOF'
need arg 1: ca name
need arg 2: server fqdn and/or IPs, comma-separated
optional arg 3: if set, write cert into copyparty cfg
example:
./cfssl.sh PartyCo partybox.local y
EOF
exit 1
}
command -v cfssljson 2>/dev/null || {
echo please install cfssl and try again
exit 1
}
@@ -59,12 +89,14 @@ show() {
}
show ca.pem
show "$srv_fqdn.pem"
echo
echo "successfully generated new certificates"
# write cert into copyparty config
[ -z "$3" ] || {
mkdir -p ~/.config/copyparty
cat "$srv_fqdn".{key,pem} ca.pem >~/.config/copyparty/cert.pem
echo "successfully replaced copyparty certificate"
}

View File

@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<title>🎉 redirect</title>
<title>💾🎉 redirect</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<style>
@@ -26,8 +26,8 @@ a {
<script>
var a = document.getElementById('redir'),
proto = window.location.protocol.indexOf('https') === 0 ? 'https' : 'http',
loc = window.location.hostname || '127.0.0.1',
proto = location.protocol.indexOf('https') === 0 ? 'https' : 'http',
loc = location.hostname || '127.0.0.1',
port = a.getAttribute('href').split(':').pop().split('/')[0],
url = proto + '://' + loc + ':' + port + '/';
@@ -35,7 +35,7 @@ a.setAttribute('href', url);
document.getElementById('desc').innerHTML = 'redirecting to';
setTimeout(function() {
window.location.href = url;
location.href = url;
}, 500);
</script>

Binary file not shown.

View File

@@ -1,7 +1,6 @@
# when running copyparty behind a reverse proxy,
# the following arguments are recommended:
#
# --http-only lower latency on initial connection
# -i 127.0.0.1 only accept connections from nginx
#
# -nc must match or exceed the webserver's max number of concurrent clients;
@@ -9,7 +8,7 @@
# nginx default is 512 (worker_processes 1, worker_connections 512)
#
# you may also consider adding -j0 for CPU-intensive configurations
# (not that i can really think of any good examples)
# (5'000 requests per second, or 20gbps upload/download in parallel)
#
# on fedora/rhel, remember to setsebool -P httpd_can_network_connect 1
@@ -35,7 +34,15 @@ server {
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
# NOTE: with cloudflare you want this instead:
#proxy_set_header X-Forwarded-For $http_cf_connecting_ip;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Connection "Keep-Alive";
}
}
# default client_max_body_size (1M) blocks uploads larger than 256 MiB
client_max_body_size 1024M;
client_header_timeout 610m;
client_body_timeout 610m;
send_timeout 610m;

View File

@@ -0,0 +1,283 @@
{ config, pkgs, lib, ... }:
with lib;
let
mkKeyValue = key: value:
if value == true then
# sets with a true boolean value are coerced to just the key name
key
else if value == false then
# or omitted completely when false
""
else
(generators.mkKeyValueDefault { inherit mkValueString; } ": " key value);
mkAttrsString = value: (generators.toKeyValue { inherit mkKeyValue; } value);
mkValueString = value:
if isList value then
(concatStringsSep ", " (map mkValueString value))
else if isAttrs value then
"\n" + (mkAttrsString value)
else
(generators.mkValueStringDefault { } value);
mkSectionName = value: "[" + (escape [ "[" "]" ] value) + "]";
mkSection = name: attrs: ''
${mkSectionName name}
${mkAttrsString attrs}
'';
mkVolume = name: attrs: ''
${mkSectionName name}
${attrs.path}
${mkAttrsString {
accs = attrs.access;
flags = attrs.flags;
}}
'';
passwordPlaceholder = name: "{{password-${name}}}";
accountsWithPlaceholders = mapAttrs (name: attrs: passwordPlaceholder name);
configStr = ''
${mkSection "global" cfg.settings}
${mkSection "accounts" (accountsWithPlaceholders cfg.accounts)}
${concatStringsSep "\n" (mapAttrsToList mkVolume cfg.volumes)}
'';
name = "copyparty";
cfg = config.services.copyparty;
configFile = pkgs.writeText "${name}.conf" configStr;
runtimeConfigPath = "/run/${name}/${name}.conf";
home = "/var/lib/${name}";
defaultShareDir = "${home}/data";
in {
options.services.copyparty = {
enable = mkEnableOption "web-based file manager";
package = mkOption {
type = types.package;
default = pkgs.copyparty;
defaultText = "pkgs.copyparty";
description = ''
Package of the application to run, exposed for overriding purposes.
'';
};
openFilesLimit = mkOption {
default = 4096;
type = types.either types.int types.str;
description = "Number of files to allow copyparty to open.";
};
settings = mkOption {
type = types.attrs;
description = ''
Global settings to apply.
Directly maps to values in the [global] section of the copyparty config.
See `${getExe cfg.package} --help` for more details.
'';
default = {
i = "127.0.0.1";
no-reload = true;
};
example = literalExpression ''
{
i = "0.0.0.0";
no-reload = true;
}
'';
};
accounts = mkOption {
type = types.attrsOf (types.submodule ({ ... }: {
options = {
passwordFile = mkOption {
type = types.str;
description = ''
Runtime file path to a file containing the user password.
Must be readable by the copyparty user.
'';
example = "/run/keys/copyparty/ed";
};
};
}));
description = ''
A set of copyparty accounts to create.
'';
default = { };
example = literalExpression ''
{
ed.passwordFile = "/run/keys/copyparty/ed";
};
'';
};
volumes = mkOption {
type = types.attrsOf (types.submodule ({ ... }: {
options = {
path = mkOption {
type = types.str;
description = ''
Path of a directory to share.
'';
};
access = mkOption {
type = types.attrs;
description = ''
Attribute list of permissions and the users to apply them to.
The key must be a string containing any combination of allowed permission:
"r" (read): list folder contents, download files
"w" (write): upload files; need "r" to see the uploads
"m" (move): move files and folders; need "w" at destination
"d" (delete): permanently delete files and folders
"g" (get): download files, but cannot see folder contents
"G" (upget): "get", but can see filekeys of their own uploads
"h" (html): "get", but folders return their index.html
"a" (admin): can see uploader IPs, config-reload
For example: "rwmd"
The value must be one of:
an account name, defined in `accounts`
a list of account names
"*", which means "any account"
'';
example = literalExpression ''
{
# wG = write-upget = see your own uploads only
wG = "*";
# read-write-modify-delete for users "ed" and "k"
rwmd = ["ed" "k"];
};
'';
};
flags = mkOption {
type = types.attrs;
description = ''
Attribute list of volume flags to apply.
See `${getExe cfg.package} --help-flags` for more details.
'';
example = literalExpression ''
{
# "fk" enables filekeys (necessary for upget permission) (4 chars long)
fk = 4;
# scan for new files every 60sec
scan = 60;
# volflag "e2d" enables the uploads database
e2d = true;
# "d2t" disables multimedia parsers (in case the uploads are malicious)
d2t = true;
# skips hashing file contents if path matches *.iso
nohash = "\.iso$";
};
'';
default = { };
};
};
}));
description = "A set of copyparty volumes to create";
default = {
"/" = {
path = defaultShareDir;
access = { r = "*"; };
};
};
example = literalExpression ''
{
"/" = {
path = ${defaultShareDir};
access = {
# wG = write-upget = see your own uploads only
wG = "*";
# read-write-modify-delete for users "ed" and "k"
rwmd = ["ed" "k"];
};
};
};
'';
};
};
config = mkIf cfg.enable {
systemd.services.copyparty = {
description = "http file sharing hub";
wantedBy = [ "multi-user.target" ];
environment = {
PYTHONUNBUFFERED = "true";
XDG_CONFIG_HOME = "${home}/.config";
};
preStart = let
replaceSecretCommand = name: attrs:
"${getExe pkgs.replace-secret} '${
passwordPlaceholder name
}' '${attrs.passwordFile}' ${runtimeConfigPath}";
in ''
set -euo pipefail
install -m 600 ${configFile} ${runtimeConfigPath}
${concatStringsSep "\n"
(mapAttrsToList replaceSecretCommand cfg.accounts)}
'';
serviceConfig = {
Type = "simple";
ExecStart = "${getExe cfg.package} -c ${runtimeConfigPath}";
# Hardening options
User = "copyparty";
Group = "copyparty";
RuntimeDirectory = name;
RuntimeDirectoryMode = "0700";
StateDirectory = [ name "${name}/data" "${name}/.config" ];
StateDirectoryMode = "0700";
WorkingDirectory = home;
TemporaryFileSystem = "/:ro";
BindReadOnlyPaths = [
"/nix/store"
"-/etc/resolv.conf"
"-/etc/nsswitch.conf"
"-/etc/hosts"
"-/etc/localtime"
] ++ (mapAttrsToList (k: v: "-${v.passwordFile}") cfg.accounts);
BindPaths = [ home ] ++ (mapAttrsToList (k: v: v.path) cfg.volumes);
# Would re-mount paths ignored by temporary root
#ProtectSystem = "strict";
ProtectHome = true;
PrivateTmp = true;
PrivateDevices = true;
ProtectKernelTunables = true;
ProtectControlGroups = true;
RestrictSUIDSGID = true;
PrivateMounts = true;
ProtectKernelModules = true;
ProtectKernelLogs = true;
ProtectHostname = true;
ProtectClock = true;
ProtectProc = "invisible";
ProcSubset = "pid";
RestrictNamespaces = true;
RemoveIPC = true;
UMask = "0077";
LimitNOFILE = cfg.openFilesLimit;
NoNewPrivileges = true;
LockPersonality = true;
RestrictRealtime = true;
};
};
users.groups.copyparty = { };
users.users.copyparty = {
description = "Service user for copyparty";
group = "copyparty";
home = home;
isSystemUser = true;
};
};
}

View File

@@ -14,5 +14,5 @@ name="$SVCNAME"
command_background=true
pidfile="/var/run/$SVCNAME.pid"
command="/usr/bin/python /usr/local/bin/copyparty-sfx.py"
command="/usr/bin/python3 /usr/local/bin/copyparty-sfx.py"
command_args="-q -v /mnt::rw"

View File

@@ -0,0 +1,55 @@
# Maintainer: icxes <dev.null@need.moe>
pkgname=copyparty
pkgver="1.9.11"
pkgrel=1
pkgdesc="Portable file sharing hub"
arch=("any")
url="https://github.com/9001/${pkgname}"
license=('MIT')
depends=("python" "lsof" "python-jinja")
makedepends=("python-wheel" "python-setuptools" "python-build" "python-installer" "make" "pigz")
optdepends=("ffmpeg: thumbnails for videos, images (slower) and audio, music tags"
"python-mutagen: music tags (alternative)"
"python-pillow: thumbnails for images"
"python-pyvips: thumbnails for images (higher quality, faster, uses more ram)"
"libkeyfinder-git: detection of musical keys"
"qm-vamp-plugins: BPM detection"
"python-pyopenssl: ftps functionality"
"python-argon2_cffi: hashed passwords in config"
"python-impacket-git: smb support (bad idea)"
)
source=("https://github.com/9001/${pkgname}/releases/download/v${pkgver}/${pkgname}-${pkgver}.tar.gz")
backup=("etc/${pkgname}.d/init" )
sha256sums=("67d08a0ca8426a068e6461243612dda347e42f056b6312348f5bfa109de2688f")
build() {
cd "${srcdir}/${pkgname}-${pkgver}"
pushd copyparty/web
make -j$(nproc)
rm Makefile
popd
python3 -m build -wn
}
package() {
cd "${srcdir}/${pkgname}-${pkgver}"
python3 -m installer -d "$pkgdir" dist/*.whl
install -dm755 "${pkgdir}/etc/${pkgname}.d"
install -Dm755 "bin/prisonparty.sh" "${pkgdir}/usr/bin/prisonparty"
install -Dm644 "contrib/package/arch/${pkgname}.conf" "${pkgdir}/etc/${pkgname}.d/init"
install -Dm644 "contrib/package/arch/${pkgname}.service" "${pkgdir}/usr/lib/systemd/system/${pkgname}.service"
install -Dm644 "contrib/package/arch/prisonparty.service" "${pkgdir}/usr/lib/systemd/system/prisonparty.service"
install -Dm644 "contrib/package/arch/index.md" "${pkgdir}/var/lib/${pkgname}-jail/README.md"
install -Dm644 "LICENSE" "${pkgdir}/usr/share/licenses/${pkgname}/LICENSE"
find /etc/${pkgname}.d -iname '*.conf' 2>/dev/null | grep -qE . && return
echo "┏━━━━━━━━━━━━━━━──-"
echo "┃ Configure ${pkgname} by adding .conf files into /etc/${pkgname}.d/"
echo "┃ and maybe copy+edit one of the following to /etc/systemd/system/:"
echo "┣━♦ /usr/lib/systemd/system/${pkgname}.service (standard)"
echo "┣━♦ /usr/lib/systemd/system/prisonparty.service (chroot)"
echo "┗━━━━━━━━━━━━━━━──-"
}

View File

@@ -0,0 +1,7 @@
## import all *.conf files from the current folder (/etc/copyparty.d)
% ./
# add additional .conf files to this folder;
# see example config files for reference:
# https://github.com/9001/copyparty/blob/hovudstraum/docs/example.conf
# https://github.com/9001/copyparty/tree/hovudstraum/docs/copyparty.d

View File

@@ -0,0 +1,32 @@
# this will start `/usr/bin/copyparty-sfx.py`
# and read config from `/etc/copyparty.d/*.conf`
#
# you probably want to:
# change "User=cpp" and "/home/cpp/" to another user
#
# unless you add -q to disable logging, you may want to remove the
# following line to allow buffering (slightly better performance):
# Environment=PYTHONUNBUFFERED=x
[Unit]
Description=copyparty file server
[Service]
Type=notify
SyslogIdentifier=copyparty
Environment=PYTHONUNBUFFERED=x
WorkingDirectory=/var/lib/copyparty-jail
ExecReload=/bin/kill -s USR1 $MAINPID
# user to run as + where the TLS certificate is (if any)
User=cpp
Environment=XDG_CONFIG_HOME=/home/cpp/.config
# stop systemd-tmpfiles-clean.timer from deleting copyparty while it's running
ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
# run copyparty
ExecStart=/usr/bin/python3 /usr/bin/copyparty -c /etc/copyparty.d/init
[Install]
WantedBy=multi-user.target

View File

@@ -0,0 +1,3 @@
this is `/var/lib/copyparty-jail`, the fallback webroot when copyparty has not yet been configured
please add some `*.conf` files to `/etc/copyparty.d/`

View File

@@ -0,0 +1,31 @@
# this will start `/usr/bin/copyparty-sfx.py`
# in a chroot, preventing accidental access elsewhere
# and read config from `/etc/copyparty.d/*.conf`
#
# expose additional filesystem locations to copyparty
# by listing them between the last `1000` and `--`
#
# `1000 1000` = what user to run copyparty as
#
# unless you add -q to disable logging, you may want to remove the
# following line to allow buffering (slightly better performance):
# Environment=PYTHONUNBUFFERED=x
[Unit]
Description=copyparty file server
[Service]
SyslogIdentifier=prisonparty
Environment=PYTHONUNBUFFERED=x
WorkingDirectory=/var/lib/copyparty-jail
ExecReload=/bin/kill -s USR1 $MAINPID
# stop systemd-tmpfiles-clean.timer from deleting copyparty while it's running
ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
# run copyparty
ExecStart=/bin/bash /usr/bin/prisonparty /var/lib/copyparty-jail 1000 1000 /etc/copyparty.d -- \
/usr/bin/python3 /usr/bin/copyparty -c /etc/copyparty.d/init
[Install]
WantedBy=multi-user.target

View File

@@ -0,0 +1,59 @@
{ lib, stdenv, makeWrapper, fetchurl, utillinux, python, jinja2, impacket, pyftpdlib, pyopenssl, argon2-cffi, pillow, pyvips, ffmpeg, mutagen,
# use argon2id-hashed passwords in config files (sha2 is always available)
withHashedPasswords ? true,
# create thumbnails with Pillow; faster than FFmpeg / MediaProcessing
withThumbnails ? true,
# create thumbnails with PyVIPS; even faster, uses more memory
# -- can be combined with Pillow to support more filetypes
withFastThumbnails ? false,
# enable FFmpeg; thumbnails for most filetypes (also video and audio), extract audio metadata, transcode audio to opus
# -- possibly dangerous if you allow anonymous uploads, since FFmpeg has a huge attack surface
# -- can be combined with Thumbnails and/or FastThumbnails, since FFmpeg is slower than both
withMediaProcessing ? true,
# if MediaProcessing is not enabled, you probably want this instead (less accurate, but much safer and faster)
withBasicAudioMetadata ? false,
# enable FTPS support in the FTP server
withFTPS ? false,
# samba/cifs server; dangerous and buggy, enable if you really need it
withSMB ? false,
}:
let
pinData = lib.importJSON ./pin.json;
pyEnv = python.withPackages (ps:
with ps; [
jinja2
]
++ lib.optional withSMB impacket
++ lib.optional withFTPS pyopenssl
++ lib.optional withThumbnails pillow
++ lib.optional withFastThumbnails pyvips
++ lib.optional withMediaProcessing ffmpeg
++ lib.optional withBasicAudioMetadata mutagen
++ lib.optional withHashedPasswords argon2-cffi
);
in stdenv.mkDerivation {
pname = "copyparty";
version = pinData.version;
src = fetchurl {
url = pinData.url;
hash = pinData.hash;
};
buildInputs = [ makeWrapper ];
dontUnpack = true;
dontBuild = true;
installPhase = ''
install -Dm755 $src $out/share/copyparty-sfx.py
makeWrapper ${pyEnv.interpreter} $out/bin/copyparty \
--set PATH '${lib.makeBinPath ([ utillinux ] ++ lib.optional withMediaProcessing ffmpeg)}:$PATH' \
--add-flags "$out/share/copyparty-sfx.py"
'';
}

View File

@@ -0,0 +1,5 @@
{
"url": "https://github.com/9001/copyparty/releases/download/v1.9.11/copyparty-sfx.py",
"version": "1.9.11",
"hash": "sha256-A3nQ/b3KLDQVuioXYf3kgcrXSBOEt+KBCq2tnc5PDWg="
}

View File

@@ -0,0 +1,77 @@
#!/usr/bin/env python3
# Update the Nix package pin
#
# Usage: ./update.sh [PATH]
# When the [PATH] is not set, it will fetch the latest release from the repo.
# With [PATH] set, it will hash the given file and generate the URL,
# base on the version contained within the file
import base64
import json
import hashlib
import sys
import re
from pathlib import Path
OUTPUT_FILE = Path("pin.json")
TARGET_ASSET = "copyparty-sfx.py"
HASH_TYPE = "sha256"
LATEST_RELEASE_URL = "https://api.github.com/repos/9001/copyparty/releases/latest"
DOWNLOAD_URL = lambda version: f"https://github.com/9001/copyparty/releases/download/v{version}/{TARGET_ASSET}"
def get_formatted_hash(binary):
hasher = hashlib.new("sha256")
hasher.update(binary)
asset_hash = hasher.digest()
encoded_hash = base64.b64encode(asset_hash).decode("ascii")
return f"{HASH_TYPE}-{encoded_hash}"
def version_from_sfx(binary):
result = re.search(b'^VER = "(.*)"$', binary, re.MULTILINE)
if result:
return result.groups(1)[0].decode("ascii")
raise ValueError("version not found in provided file")
def remote_release_pin():
import requests
response = requests.get(LATEST_RELEASE_URL).json()
version = response["tag_name"].lstrip("v")
asset_info = [a for a in response["assets"] if a["name"] == TARGET_ASSET][0]
download_url = asset_info["browser_download_url"]
asset = requests.get(download_url)
formatted_hash = get_formatted_hash(asset.content)
result = {"url": download_url, "version": version, "hash": formatted_hash}
return result
def local_release_pin(path):
asset = path.read_bytes()
version = version_from_sfx(asset)
download_url = DOWNLOAD_URL(version)
formatted_hash = get_formatted_hash(asset)
result = {"url": download_url, "version": version, "hash": formatted_hash}
return result
def main():
if len(sys.argv) > 1:
asset_path = Path(sys.argv[1])
result = local_release_pin(asset_path)
else:
result = remote_release_pin()
print(result)
json_result = json.dumps(result, indent=4)
OUTPUT_FILE.write_text(json_result)
if __name__ == "__main__":
main()

View File

@@ -1,13 +1,22 @@
<!--
NOTE: DEPRECATED; please use the javascript version instead:
https://github.com/9001/copyparty/blob/hovudstraum/contrib/plugins/minimal-up2k.js
----
save this as .epilogue.html inside a write-only folder to declutter the UI, makes it look like
https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png
only works if you disable the prologue/epilogue sandbox with --no-sb-lg
which should probably be combined with --no-dot-ren to prevent damage
(`no_sb_lg` can also be set per-volume with volflags)
-->
<style>
/* make the up2k ui REALLY minimal by hiding a bunch of stuff: */
#ops, #tree, #path, #epi+h2, /* main tabs and navigators (tree/breadcrumbs) */
#ops, #tree, #path, #wfp, /* main tabs and navigators (tree/breadcrumbs) */
#u2conf tr:first-child>td[rowspan]:not(#u2btn_cw), /* most of the config options */

View File

@@ -17,7 +17,7 @@ almost the same as minimal-up2k.html except this one...:
var u2min = `
<style>
#ops, #path, #tree, #files, #epi+div+h2,
#ops, #path, #tree, #files, #wfp,
#u2conf td.c+.c, #u2cards, #srch_dz, #srch_zd {
display: none !important;
}
@@ -55,5 +55,5 @@ var u2min = `
if (!has(perms, 'read')) {
var e2 = mknod('div');
e2.innerHTML = u2min;
ebi('wrap').insertBefore(e2, QS('#epi+h2'));
ebi('wrap').insertBefore(e2, QS('#wfp'));
}

208
contrib/plugins/rave.js Normal file
View File

@@ -0,0 +1,208 @@
/* untz untz untz untz */
(function () {
var can, ctx, W, H, fft, buf, bars, barw, pv,
hue = 0,
ibeat = 0,
beats = [9001],
beats_url = '',
uofs = 0,
ops = ebi('ops'),
raving = false,
recalc = 0,
cdown = 0,
FC = 0.9,
css = `<style>
#fft {
position: fixed;
top: 0;
left: 0;
z-index: -1;
}
body {
box-shadow: inset 0 0 0 white;
}
#ops>a,
#path>a {
display: inline-block;
}
/*
body.untz {
animation: untz-body 200ms ease-out;
}
@keyframes untz-body {
0% {inset 0 0 20em white}
100% {inset 0 0 0 white}
}
*/
:root, html.a, html.b, html.c, html.d, html.e {
--row-alt: rgba(48,52,78,0.2);
}
#files td {
background: none;
}
</style>`;
QS('body').appendChild(mknod('div', null, css));
function rave_load() {
console.log('rave_load');
can = mknod('canvas', 'fft');
QS('body').appendChild(can);
ctx = can.getContext('2d');
fft = new AnalyserNode(actx, {
"fftSize": 2048,
"maxDecibels": 0,
"smoothingTimeConstant": 0.7,
});
ibeat = 0;
beats = [9001];
buf = new Uint8Array(fft.frequencyBinCount);
bars = buf.length * FC;
afilt.filters.push(fft);
if (!raving) {
raving = true;
raver();
}
beats_url = mp.au.src.split('?')[0].replace(/(.*\/)(.*)/, '$1.beats/$2.txt');
console.log("reading beats from", beats_url);
var xhr = new XHR();
xhr.open('GET', beats_url, true);
xhr.onload = readbeats;
xhr.url = beats_url;
xhr.send();
}
function rave_unload() {
qsr('#fft');
can = null;
}
function readbeats() {
if (this.url != beats_url)
return console.log('old beats??', this.url, beats_url);
var sbeats = this.responseText.replace(/\r/g, '').split(/\n/g);
if (sbeats.length < 3)
return;
beats = [];
for (var a = 0; a < sbeats.length; a++)
beats.push(parseFloat(sbeats[a]));
var end = beats.slice(-2),
t = end[1],
d = t - end[0];
while (d > 0.1 && t < 1200)
beats.push(t += d);
}
function hrand() {
return Math.random() - 0.5;
}
function raver() {
if (!can) {
raving = false;
return;
}
requestAnimationFrame(raver);
if (!mp || !mp.au || mp.au.paused)
return;
if (--uofs >= 0) {
document.body.style.marginLeft = hrand() * uofs + 'px';
ebi('tree').style.marginLeft = hrand() * uofs + 'px';
for (var a of QSA('#ops>a, #path>a, #pctl>a'))
a.style.transform = 'translate(' + hrand() * uofs * 1 + 'px, ' + hrand() * uofs * 0.7 + 'px) rotate(' + Math.random() * uofs * 0.7 + 'deg)'
}
if (--recalc < 0) {
recalc = 60;
var tree = ebi('tree'),
x = tree.style.display == 'none' ? 0 : tree.offsetWidth;
//W = can.width = window.innerWidth - x;
//H = can.height = window.innerHeight;
//H = ebi('widget').offsetTop;
W = can.width = bars;
H = can.height = 512;
barw = 1; //parseInt(0.8 + W / bars);
can.style.left = x + 'px';
can.style.width = (window.innerWidth - x) + 'px';
can.style.height = ebi('widget').offsetTop + 'px';
}
//if (--cdown == 1)
// clmod(ops, 'untz');
fft.getByteFrequencyData(buf);
var imax = 0, vmax = 0;
for (var a = 10; a < 50; a++)
if (vmax < buf[a]) {
vmax = buf[a];
imax = a;
}
hue = hue * 0.93 + imax * 0.07;
ctx.fillStyle = 'rgba(0,0,0,0)';
ctx.fillRect(0, 0, W, H);
ctx.clearRect(0, 0, W, H);
ctx.fillStyle = 'hsla(' + (hue * 2.5) + ',100%,50%,0.7)';
var x = 0, mul = (H / 256) * 0.5;
for (var a = 0; a < buf.length * FC; a++) {
var v = buf[a] * mul * (1 + 0.69 * a / buf.length);
ctx.fillRect(x, H - v, barw, v);
x += barw;
}
var t = mp.au.currentTime + 0.05;
if (ibeat >= beats.length || beats[ibeat] > t)
return;
while (ibeat < beats.length && beats[ibeat++] < t)
continue;
return untz();
var cv = 0;
for (var a = 0; a < 128; a++)
cv += buf[a];
if (cv - pv > 1000) {
console.log(pv, cv, cv - pv);
if (cdown < 0) {
clmod(ops, 'untz', 1);
cdown = 20;
}
}
pv = cv;
}
function untz() {
console.log('untz');
uofs = 14;
document.body.animate([
{ boxShadow: 'inset 0 0 1em #f0c' },
{ boxShadow: 'inset 0 0 20em #f0c', offset: 0.2 },
{ boxShadow: 'inset 0 0 0 #f0c' },
], { duration: 200, iterations: 1 });
}
afilt.plugs.push({
"en": true,
"load": rave_load,
"unload": rave_unload
});
})();

View File

@@ -10,7 +10,7 @@ name="copyparty"
rcvar="copyparty_enable"
copyparty_user="copyparty"
copyparty_args="-e2dsa -v /storage:/storage:r" # change as you see fit
copyparty_command="/usr/local/bin/python3.8 /usr/local/copyparty/copyparty-sfx.py ${copyparty_args}"
copyparty_command="/usr/local/bin/python3.9 /usr/local/copyparty/copyparty-sfx.py ${copyparty_args}"
pidfile="/var/run/copyparty/${name}.pid"
command="/usr/sbin/daemon"
command_args="-P ${pidfile} -r -f ${copyparty_command}"

View File

@@ -1,3 +1,6 @@
# NOTE: this is now a built-in feature in copyparty
# but you may still want this if you have specific needs
#
# systemd service which generates a new TLS certificate on each boot,
# that way the one-year expiry time won't cause any issues --
# just have everyone trust the ca.pem once every 10 years

View File

@@ -2,12 +2,16 @@
# and share '/mnt' with anonymous read+write
#
# installation:
# cp -pv copyparty.service /etc/systemd/system
# restorecon -vr /etc/systemd/system/copyparty.service
# wget https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py -O /usr/local/bin/copyparty-sfx.py
# cp -pv copyparty.service /etc/systemd/system/
# restorecon -vr /etc/systemd/system/copyparty.service # on fedora/rhel
# firewall-cmd --permanent --add-port={80,443,3923}/tcp # --zone=libvirt
# firewall-cmd --reload
# systemctl daemon-reload && systemctl enable --now copyparty
#
# if it fails to start, first check this: systemctl status copyparty
# then try starting it while viewing logs: journalctl -fan 100
#
# you may want to:
# change "User=cpp" and "/home/cpp/" to another user
# remove the nft lines to only listen on port 3923
@@ -18,6 +22,7 @@
# add '-i 127.0.0.1' to only allow local connections
# add '-e2dsa' to enable filesystem scanning + indexing
# add '-e2ts' to enable metadata indexing
# remove '--ansi' to disable colored logs
#
# with `Type=notify`, copyparty will signal systemd when it is ready to
# accept connections; correctly delaying units depending on copyparty.
@@ -44,7 +49,7 @@ ExecReload=/bin/kill -s USR1 $MAINPID
User=cpp
Environment=XDG_CONFIG_HOME=/home/cpp/.config
# setup forwarding from ports 80 and 443 to port 3923
# OPTIONAL: setup forwarding from ports 80 and 443 to port 3923
ExecStartPre=+/bin/bash -c 'nft -n -a list table nat | awk "/ to :3923 /{print\$NF}" | xargs -rL1 nft delete rule nat prerouting handle; true'
ExecStartPre=+nft add table ip nat
ExecStartPre=+nft -- add chain ip nat prerouting { type nat hook prerouting priority -100 \; }
@@ -55,7 +60,7 @@ ExecStartPre=+nft add rule ip nat prerouting tcp dport 443 redirect to :3923
ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
# copyparty settings
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -e2d -v /mnt::rw
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py --ansi -e2d -v /mnt::rw
[Install]
WantedBy=multi-user.target

View File

@@ -6,12 +6,17 @@
# 1) put copyparty-sfx.py and prisonparty.sh in /usr/local/bin
# 2) cp -pv prisonparty.service /etc/systemd/system && systemctl enable --now prisonparty
#
# expose additional filesystem locations to copyparty
# by listing them between the last `1000` and `--`
#
# `1000 1000` = what user to run copyparty as
#
# you may want to:
# change '/mnt::rw' to another location or permission-set
# (remember to change the '/mnt' chroot arg too)
#
# enable line-buffering for realtime logging (slight performance cost):
# inside the [Service] block, add the following line:
# unless you add -q to disable logging, you may want to remove the
# following line to allow buffering (slightly better performance):
# Environment=PYTHONUNBUFFERED=x
[Unit]
@@ -19,7 +24,14 @@ Description=copyparty file server
[Service]
SyslogIdentifier=prisonparty
WorkingDirectory=/usr/local/bin
Environment=PYTHONUNBUFFERED=x
WorkingDirectory=/var/lib/copyparty-jail
ExecReload=/bin/kill -s USR1 $MAINPID
# stop systemd-tmpfiles-clean.timer from deleting copyparty while it's running
ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
# run copyparty
ExecStart=/bin/bash /usr/local/bin/prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt -- \
/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -v /mnt::rw

View File

@@ -1,9 +1,7 @@
@echo off
rem removes the 47.6 MiB filesize limit when downloading from webdav
rem + optionally allows/enables password-auth over plaintext http
rem + optionally helps disable wpad
setlocal enabledelayedexpansion
rem + optionally helps disable wpad, removing the 10sec latency
net session >nul 2>&1
if %errorlevel% neq 0 (
@@ -20,30 +18,26 @@ echo OK;
echo allow webdav basic-auth over plaintext http?
echo Y: login works, but the password will be visible in wireshark etc
echo N: login will NOT work unless you use https and valid certificates
set c=.
set /p "c=(Y/N): "
echo(
if /i not "!c!"=="y" goto :g1
reg add HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet\services\WebClient\Parameters /v BasicAuthLevel /t REG_DWORD /d 0x2 /f
rem default is 1 (require tls)
choice
if %errorlevel% equ 1 (
reg add HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet\services\WebClient\Parameters /v BasicAuthLevel /t REG_DWORD /d 0x2 /f
rem default is 1 (require tls)
)
:g1
echo(
echo OK;
echo do you want to disable wpad?
echo can give a HUGE speed boost depending on network settings
set c=.
set /p "c=(Y/N): "
echo(
if /i not "!c!"=="y" goto :g2
echo(
echo i'm about to open the [Connections] tab in [Internet Properties] for you;
echo please click [LAN settings] and disable [Automatically detect settings]
echo(
pause
control inetcpl.cpl,,4
choice
if %errorlevel% equ 1 (
echo(
echo i'm about to open the [Connections] tab in [Internet Properties] for you;
echo please click [LAN settings] and disable [Automatically detect settings]
echo(
pause
control inetcpl.cpl,,4
)
:g2
net stop webclient
net start webclient
echo(

View File

@@ -0,0 +1,2 @@
rem run copyparty.exe on machines with busted environment variables
cmd /v /c "set TMP=\tmp && copyparty.exe"

View File

@@ -6,6 +6,10 @@ import platform
import sys
import time
# fmt: off
_:tuple[int,int]=(0,0) # _____________________________________________________________________ hey there! if you are reading this, your python is too old to run copyparty without some help. Please use https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py or the pypi package instead, or see https://github.com/9001/copyparty/blob/hovudstraum/docs/devnotes.md#building if you want to build it yourself :-) ************************************************************************************************************************************************
# fmt: on
try:
from typing import TYPE_CHECKING
except:
@@ -27,13 +31,20 @@ WINDOWS: Any = (
else False
)
VT100 = not WINDOWS or WINDOWS >= [10, 0, 14393]
VT100 = "--ansi" in sys.argv or (
os.environ.get("NO_COLOR", "").lower() in ("", "0", "false")
and sys.stdout.isatty()
and "--no-ansi" not in sys.argv
and (not WINDOWS or WINDOWS >= [10, 0, 14393])
)
# introduced in anniversary update
ANYWIN = WINDOWS or sys.platform in ["msys", "cygwin"]
MACOS = platform.system() == "Darwin"
EXE = bool(getattr(sys, "frozen", False))
try:
CORES = len(os.sched_getaffinity(0))
except:

View File

@@ -10,11 +10,9 @@ __url__ = "https://github.com/9001/copyparty/"
import argparse
import base64
import filecmp
import locale
import os
import re
import shutil
import socket
import sys
import threading
@@ -23,9 +21,10 @@ import traceback
import uuid
from textwrap import dedent
from .__init__ import ANYWIN, CORES, PY2, VT100, WINDOWS, E, EnvParams, unicode
from .__init__ import ANYWIN, CORES, EXE, PY2, VT100, WINDOWS, E, EnvParams, unicode
from .__version__ import CODENAME, S_BUILD_DT, S_VERSION
from .authsrv import expand_config_file, re_vol
from .authsrv import expand_config_file, re_vol, split_cfg_ln, upgrade_cfg_fmt
from .cfg import flagcats, onedash
from .svchub import SvcHub
from .util import (
IMPLICATIONS,
@@ -37,6 +36,7 @@ from .util import (
ansi_re,
min_ex,
py_desc,
pybin,
termsize,
wrap,
)
@@ -53,8 +53,9 @@ try:
except:
HAVE_SSL = False
printed: list[str] = []
u = unicode
printed: list[str] = []
zsid = uuid.uuid4().urn[4:]
class RiceFormatter(argparse.HelpFormatter):
@@ -183,7 +184,7 @@ def init_E(E: EnvParams) -> None:
with open_binary("copyparty", "z.tar") as tgz:
with tarfile.open(fileobj=tgz) as tf:
tf.extractall(tdn)
tf.extractall(tdn) # nosec (archive is safe)
return tdn
@@ -198,7 +199,7 @@ def init_E(E: EnvParams) -> None:
E.mod = _unpack()
if sys.platform == "win32":
bdir = os.environ.get("APPDATA") or os.environ.get("TEMP")
bdir = os.environ.get("APPDATA") or os.environ.get("TEMP") or "."
E.cfg = os.path.normpath(bdir + "/copyparty")
elif sys.platform == "darwin":
E.cfg = os.path.expanduser("~/Library/Preferences/copyparty")
@@ -229,15 +230,42 @@ def get_srvname() -> str:
ret = f.read().decode("utf-8", "replace").strip()
except:
ret = ""
while len(ret) < 7:
namelen = 5
while len(ret) < namelen:
ret += base64.b32encode(os.urandom(4))[:7].decode("utf-8").lower()
ret = re.sub("[234567=]", "", ret)[:7]
ret = re.sub("[234567=]", "", ret)[:namelen]
with open(fp, "wb") as f:
f.write(ret.encode("utf-8") + b"\n")
return ret
def get_fk_salt(cert_path) -> str:
fp = os.path.join(E.cfg, "fk-salt.txt")
try:
with open(fp, "rb") as f:
ret = f.read().strip()
except:
ret = base64.b64encode(os.urandom(18))
with open(fp, "wb") as f:
f.write(ret + b"\n")
return ret.decode("utf-8")
def get_ah_salt() -> str:
fp = os.path.join(E.cfg, "ah-salt.txt")
try:
with open(fp, "rb") as f:
ret = f.read().strip()
except:
ret = base64.b64encode(os.urandom(18))
with open(fp, "wb") as f:
f.write(ret + b"\n")
return ret.decode("utf-8")
def ensure_locale() -> None:
safe = "en_US.UTF-8"
for x in [
@@ -253,34 +281,26 @@ def ensure_locale() -> None:
except:
continue
t = "setlocale {} failed,\n sorting and dates will be funky"
t = "setlocale {} failed,\n sorting and dates might get funky\n"
warn(t.format(safe))
def ensure_cert() -> None:
def ensure_webdeps() -> None:
ap = os.path.join(E.mod, "web/deps/mini-fa.woff")
if os.path.exists(ap):
return
warn(
"""could not find webdeps;
if you are running the sfx, or exe, or pypi package, or docker image,
then this is a bug! Please let me know so I can fix it, thanks :-)
https://github.com/9001/copyparty/issues/new?labels=bug&template=bug_report.md
however, if you are a dev, or running copyparty from source, and you want
full client functionality, you will need to build or obtain the webdeps:
https://github.com/9001/copyparty/blob/hovudstraum/docs/devnotes.md#building
"""
the default cert (and the entire TLS support) is only here to enable the
crypto.subtle javascript API, which is necessary due to the webkit guys
being massive memers (https://www.chromium.org/blink/webcrypto)
i feel awful about this and so should they
"""
cert_insec = os.path.join(E.mod, "res/insecure.pem")
cert_cfg = os.path.join(E.cfg, "cert.pem")
if not os.path.exists(cert_cfg):
shutil.copy(cert_insec, cert_cfg)
try:
if filecmp.cmp(cert_cfg, cert_insec):
lprint(
"\033[33musing default TLS certificate; https will be insecure."
+ "\033[36m\ncertificate location: {}\033[0m\n".format(cert_cfg)
)
except:
pass
# speaking of the default cert,
# printf 'NO\n.\n.\n.\n.\ncopyparty-insecure\n.\n' | faketime '2000-01-01 00:00:00' openssl req -x509 -sha256 -newkey rsa:2048 -keyout insecure.pem -out insecure.pem -days $((($(printf %d 0x7fffffff)-$(date +%s --date=2000-01-01T00:00:00Z))/(60*60*24))) -nodes && ls -al insecure.pem && openssl x509 -in insecure.pem -text -noout
)
def configure_ssl_ver(al: argparse.Namespace) -> None:
@@ -353,27 +373,28 @@ def configure_ssl_ciphers(al: argparse.Namespace) -> None:
def args_from_cfg(cfg_path: str) -> list[str]:
lines: list[str] = []
expand_config_file(lines, cfg_path, "")
lines = upgrade_cfg_fmt(None, argparse.Namespace(vc=False), lines, "")
ret: list[str] = []
skip = False
skip = True
for ln in lines:
if not ln:
sn = ln.split(" #")[0].strip()
if sn.startswith("["):
skip = True
if sn.startswith("[global]"):
skip = False
continue
if ln.startswith("#"):
if skip or not sn.split("#")[0].strip():
continue
if not ln.startswith("-"):
continue
if skip:
continue
try:
ret.extend(ln.split(" ", 1))
except:
ret.append(ln)
for k, v in split_cfg_ln(sn).items():
k = k.lstrip("-")
if not k:
continue
prefix = "-" if k in onedash else "--"
if v is True:
ret.append(prefix + k)
else:
ret.append(prefix + k + "=" + v)
return ret
@@ -466,8 +487,10 @@ def get_sects():
"d" (delete): permanently delete files and folders
"g" (get): download files, but cannot see folder contents
"G" (upget): "get", but can see filekeys of their own uploads
"h" (html): "get", but folders return their index.html
"a" (admin): can see uploader IPs, config-reload
too many volflags to list here, see the other sections
too many volflags to list here, see --help-flags
example:\033[35m
-a ed:hunter2 -v .::r:rw,ed -v ../inc:dump:w:rw,ed:c,nodupe \033[36m
@@ -494,65 +517,115 @@ def get_sects():
"""
volflags are appended to volume definitions, for example,
to create a write-only volume with the \033[33mnodupe\033[0m and \033[32mnosub\033[0m flags:
\033[35m-v /mnt/inc:/inc:w\033[33m:c,nodupe\033[32m:c,nosub
\033[35m-v /mnt/inc:/inc:w\033[33m:c,nodupe\033[32m:c,nosub\033[0m
\033[0muploads, general:
\033[36mnodupe\033[35m rejects existing files (instead of symlinking them)
\033[36mnosub\033[35m forces all uploads into the top folder of the vfs
\033[36mmagic$\033[35m enables filetype detection for nameless uploads
\033[36mgz\033[35m allows server-side gzip of uploads with ?gz (also c,xz)
\033[36mpk\033[35m forces server-side compression, optional arg: xz,9
if global config defines a volflag for all volumes,
you can unset it for a specific volume with -flag
"""
).rstrip()
+ build_flags_desc(),
],
[
"handlers",
"use plugins to handle certain events",
dedent(
"""
usually copyparty returns a \033[33m404\033[0m if a file does not exist, and
\033[33m403\033[0m if a user tries to access a file they don't have access to
\033[0mupload rules:
\033[36mmaxn=250,600\033[35m max 250 uploads over 15min
\033[36mmaxb=1g,300\033[35m max 1 GiB over 5min (suffixes: b, k, m, g)
\033[36msz=1k-3m\033[35m allow filesizes between 1 KiB and 3MiB
\033[36mdf=1g\033[35m ensure 1 GiB free disk space
you can load a plugin which will be invoked right before this
happens, and the plugin can choose to override this behavior
\033[0mupload rotation:
(moves all uploads into the specified folder structure)
\033[36mrotn=100,3\033[35m 3 levels of subfolders with 100 entries in each
\033[36mrotf=%Y-%m/%d-%H\033[35m date-formatted organizing
\033[36mlifetime=3600\033[35m uploads are deleted after 1 hour
load the plugin using --args or volflags; for example \033[36m
--on404 ~/partyhandlers/not404.py
-v .::r:c,on404=~/partyhandlers/not404.py
\033[0m
the file must define the function \033[35mmain(cli,vn,rem)\033[0m:
\033[35mcli\033[0m: the copyparty HttpCli instance
\033[35mvn\033[0m: the VFS which overlaps with the requested URL
\033[35mrem\033[0m: the remainder of the URL below the VFS mountpoint
\033[0mdatabase, general:
\033[36me2d\033[35m sets -e2d (all -e2* args can be set using ce2* volflags)
\033[36md2ts\033[35m disables metadata collection for existing files
\033[36md2ds\033[35m disables onboot indexing, overrides -e2ds*
\033[36md2t\033[35m disables metadata collection, overrides -e2t*
\033[36md2v\033[35m disables file verification, overrides -e2v*
\033[36md2d\033[35m disables all database stuff, overrides -e2*
\033[36mhist=/tmp/cdb\033[35m puts thumbnails and indexes at that location
\033[36mscan=60\033[35m scan for new files every 60sec, same as --re-maxage
\033[36mnohash=\\.iso$\033[35m skips hashing file contents if path matches *.iso
\033[36mnoidx=\\.iso$\033[35m fully ignores the contents at paths matching *.iso
\033[36mnoforget$\033[35m don't forget files when deleted from disk
\033[36mdbd=[acid|swal|wal|yolo]\033[35m database speed-durability tradeoff
\033[36mxlink$\033[35m cross-volume dupe detection / linking
\033[36mxdev\033[35m do not descend into other filesystems
\033[36mxvol\033[35m skip symlinks leaving the volume root
`main` must return a string; one of the following:
\033[0mdatabase, audio tags:
"mte", "mth", "mtp", "mtm" all work the same as -mte, -mth, ...
\033[36mmtp=.bpm=f,audio-bpm.py\033[35m uses the "audio-bpm.py" program to
generate ".bpm" tags from uploads (f = overwrite tags)
\033[36mmtp=ahash,vhash=media-hash.py\033[35m collects two tags at once
> \033[32m"true"\033[0m: the plugin has responded to the request,
and the TCP connection should be kept open
\033[0mthumbnails:
\033[36mdthumb\033[35m disables all thumbnails
\033[36mdvthumb\033[35m disables video thumbnails
\033[36mdathumb\033[35m disables audio thumbnails (spectrograms)
\033[36mdithumb\033[35m disables image thumbnails
> \033[32m"false"\033[0m: the plugin has responded to the request,
and the TCP connection should be terminated
\033[0mclient and ux:
\033[36mhtml_head=TXT\033[35m includes TXT in the <head>
\033[36mrobots\033[35m allows indexing by search engines (default)
\033[36mnorobots\033[35m kindly asks search engines to leave
> \033[32m"retry"\033[0m: the plugin has done something to resolve the 404
situation, and copyparty should reattempt reading the file.
if it still fails, a regular 404 will be returned
\033[0mothers:
\033[36mfk=8\033[35m generates per-file accesskeys,
which will then be required at the "g" permission
\033[0m"""
> \033[32m"allow"\033[0m: should ignore the insufficient permissions
and let the client continue anyways
> \033[32m""\033[0m: the plugin has not handled the request;
try the next plugin or return the usual 404 or 403
\033[1;35mPS!\033[0m the folder that contains the python file should ideally
not contain many other python files, and especially nothing
with filenames that overlap with modules used by copyparty
"""
),
],
[
"hooks",
"execute commands before/after various events",
dedent(
"""
execute a command (a program or script) before or after various events;
\033[36mxbu\033[35m executes CMD before a file upload starts
\033[36mxau\033[35m executes CMD after a file upload finishes
\033[36mxiu\033[35m executes CMD after all uploads finish and volume is idle
\033[36mxbr\033[35m executes CMD before a file rename/move
\033[36mxar\033[35m executes CMD after a file rename/move
\033[36mxbd\033[35m executes CMD before a file delete
\033[36mxad\033[35m executes CMD after a file delete
\033[36mxm\033[35m executes CMD on message
\033[36mxban\033[35m executes CMD if someone gets banned
\033[0m
can be defined as --args or volflags; for example \033[36m
--xau notify-send
-v .::r:c,xau=notify-send
\033[0m
commands specified as --args are appended to volflags;
each --arg and volflag can be specified multiple times,
each command will execute in order unless one returns non-zero
optionally prefix the command with comma-sep. flags similar to -mtp:
\033[36mf\033[35m forks the process, doesn't wait for completion
\033[36mc\033[35m checks return code, blocks the action if non-zero
\033[36mj\033[35m provides json with info as 1st arg instead of filepath
\033[36mwN\033[35m waits N sec after command has been started before continuing
\033[36mtN\033[35m sets an N sec timeout before the command is abandoned
\033[36miN\033[35m xiu only: volume must be idle for N sec (default = 5)
\033[36mkt\033[35m kills the entire process tree on timeout (default),
\033[36mkm\033[35m kills just the main process
\033[36mkn\033[35m lets it continue running until copyparty is terminated
\033[36mc0\033[35m show all process output (default)
\033[36mc1\033[35m show only stderr
\033[36mc2\033[35m show only stdout
\033[36mc3\033[35m mute all process otput
\033[0m
each hook is executed once for each event, except for \033[36mxiu\033[0m
which builds up a backlog of uploads, running the hook just once
as soon as the volume has been idle for iN seconds (5 by default)
\033[36mxiu\033[0m is also unique in that it will pass the metadata to the
executed program on STDIN instead of as argv arguments, and
it also includes the wark (file-id/hash) as a json property
\033[36mxban\033[0m can be used to overrule / cancel a user ban event;
if the program returns 0 (true/OK) then the ban will NOT happen
except for \033[36mxm\033[0m, only one hook / one action can run at a time,
so it's recommended to use the \033[36mf\033[0m flag unless you really need
to wait for the hook to finish before continuing (without \033[36mf\033[0m
the upload speed can easily drop to 10% for small files)"""
),
],
[
@@ -597,9 +670,9 @@ def get_sects():
\033[32macid\033[0m = extremely safe but slow; the old default. Should never lose any data no matter what
\033[32mswal\033[0m = 2.4x faster uploads yet 99.9%% as safe -- theoretical chance of losing metadata for the ~200 most recently uploaded files if there's a power-loss or your OS crashes
\033[32mswal\033[0m = 2.4x faster uploads yet 99.9% as safe -- theoretical chance of losing metadata for the ~200 most recently uploaded files if there's a power-loss or your OS crashes
\033[32mwal\033[0m = another 21x faster on HDDs yet 90%% as safe; same pitfall as \033[33mswal\033[0m except more likely
\033[32mwal\033[0m = another 21x faster on HDDs yet 90% as safe; same pitfall as \033[33mswal\033[0m except more likely
\033[32myolo\033[0m = another 1.5x faster, and removes the occasional sudden upload-pause while the disk syncs, but now you're at risk of losing the entire database in a powerloss / OS-crash
@@ -607,9 +680,86 @@ def get_sects():
"""
),
],
[
"pwhash",
"password hashing",
dedent(
"""
when \033[36m--ah-alg\033[0m is not the default [\033[32mnone\033[0m], all account passwords must be hashed
passwords can be hashed on the commandline with \033[36m--ah-gen\033[0m, but copyparty will also hash and print any passwords that are non-hashed (password which do not start with '+') and then terminate afterwards
\033[36m--ah-alg\033[0m specifies the hashing algorithm and a list of optional comma-separated arguments:
\033[36m--ah-alg argon2\033[0m # which is the same as:
\033[36m--ah-alg argon2,3,256,4,19\033[0m
use argon2id with timecost 3, 256 MiB, 4 threads, version 19 (0x13/v1.3)
\033[36m--ah-alg scrypt\033[0m # which is the same as:
\033[36m--ah-alg scrypt,13,2,8,4\033[0m
use scrypt with cost 2**13, 2 iterations, blocksize 8, 4 threads
\033[36m--ah-alg sha2\033[0m # which is the same as:
\033[36m--ah-alg sha2,424242\033[0m
use sha2-512 with 424242 iterations
recommended: \033[32m--ah-alg argon2\033[0m
(takes about 0.4 sec and 256M RAM to process a new password)
argon2 needs python-package argon2-cffi,
scrypt needs openssl,
sha2 is always available
"""
),
],
[
"zm",
"mDNS debugging",
dedent(
"""
the mDNS protocol is multicast-based, which means there are thousands
of fun and intersesting ways for it to break unexpectedly
things to check if it does not work at all:
* is there a firewall blocking port 5353 on either the server or client?
(for example, clients may be able to send queries to copyparty,
but the replies could get lost)
* is multicast accidentally disabled on either the server or client?
(look for mDNS log messages saying "new client on [...]")
* the router/switch must be multicast and igmp capable
things to check if it works for a while but then it doesn't:
* is there a firewall blocking port 5353 on either the server or client?
(copyparty may be unable to see the queries from the clients, but the
clients may still be able to see the initial unsolicited announce,
so it works for about 2 minutes after startup until TTL expires)
* does the client have multiple IPs on its interface, and some of the
IPs are in subnets which the copyparty server is not a member of?
for both of the above intermittent issues, try --zm-spam 30
(not spec-compliant but nothing will mind)
"""
),
],
]
def build_flags_desc():
ret = ""
for grp, flags in flagcats.items():
ret += "\n\n\033[0m" + grp
for k, v in flags.items():
v = v.replace("\n", "\n ")
ret += "\n \033[36m{}\033[35m {}".format(k, v)
return ret + "\033[0m"
# fmt: off
@@ -647,18 +797,22 @@ def add_upload(ap):
ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads, hiding them from clients unless -ed")
ap2.add_argument("--plain-ip", action="store_true", help="when avoiding filename collisions by appending the uploader's ip to the filename: append the plaintext ip instead of salting and hashing the ip")
ap2.add_argument("--unpost", metavar="SEC", type=int, default=3600*12, help="grace period where uploads can be deleted by the uploader, even without delete permissions; 0=disabled")
ap2.add_argument("--blank-wt", metavar="SEC", type=int, default=300, help="file write grace period (any client can write to a blank file last-modified more recently than SEC seconds ago)")
ap2.add_argument("--reg-cap", metavar="N", type=int, default=38400, help="max number of uploads to keep in memory when running without -e2d; roughly 1 MiB RAM per 600")
ap2.add_argument("--no-fpool", action="store_true", help="disable file-handle pooling -- instead, repeatedly close and reopen files during upload (very slow on windows)")
ap2.add_argument("--use-fpool", action="store_true", help="force file-handle pooling, even when it might be dangerous (multiprocessing, filesystems lacking sparse-files support, ...)")
ap2.add_argument("--hardlink", action="store_true", help="prefer hardlinks instead of symlinks when possible (within same filesystem)")
ap2.add_argument("--never-symlink", action="store_true", help="do not fallback to symlinks when a hardlink cannot be made")
ap2.add_argument("--no-dedup", action="store_true", help="disable symlink/hardlink creation; copy file contents instead")
ap2.add_argument("--hardlink", action="store_true", help="prefer hardlinks instead of symlinks when possible (within same filesystem) (volflag=hardlink)")
ap2.add_argument("--never-symlink", action="store_true", help="do not fallback to symlinks when a hardlink cannot be made (volflag=neversymlink)")
ap2.add_argument("--no-dedup", action="store_true", help="disable symlink/hardlink creation; copy file contents instead (volflag=copydupes)")
ap2.add_argument("--no-dupe", action="store_true", help="reject duplicate files during upload; only matches within the same volume (volflag=nodupe)")
ap2.add_argument("--no-snap", action="store_true", help="disable snapshots -- forget unfinished uploads on shutdown; don't create .hist/up2k.snap files -- abandoned/interrupted uploads must be cleaned up manually")
ap2.add_argument("--u2ts", metavar="TXT", type=u, default="c", help="how to timestamp uploaded files; [\033[32mc\033[0m]=client-last-modified, [\033[32mu\033[0m]=upload-time, [\033[32mfc\033[0m]=force-c, [\033[32mfu\033[0m]=force-u (volflag=u2ts)")
ap2.add_argument("--rand", action="store_true", help="force randomized filenames, --nrand chars long (volflag=rand)")
ap2.add_argument("--nrand", metavar="NUM", type=int, default=9, help="randomized filenames length (volflag=nrand)")
ap2.add_argument("--magic", action="store_true", help="enable filetype detection on nameless uploads (volflag=magic)")
ap2.add_argument("--df", metavar="GiB", type=float, default=0, help="ensure GiB free disk space by rejecting upload requests")
ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="windows-only: minimum size of incoming uploads through up2k before they are made into sparse files")
ap2.add_argument("--turbo", metavar="LVL", type=int, default=0, help="configure turbo-mode in up2k client; [\033[32m0\033[0m] = off and warn if enabled, [\033[32m1\033[0m] = off, [\033[32m2\033[0m] = on, [\033[32m3\033[0m] = on and disable datecheck")
ap2.add_argument("--turbo", metavar="LVL", type=int, default=0, help="configure turbo-mode in up2k client; [\033[32m-1\033[0m] = forbidden/always-off, [\033[32m0\033[0m] = default-off and warn if enabled, [\033[32m1\033[0m] = default-off, [\033[32m2\033[0m] = on, [\033[32m3\033[0m] = on and disable datecheck")
ap2.add_argument("--u2sort", metavar="TXT", type=u, default="s", help="upload order; [\033[32ms\033[0m]=smallest-first, [\033[32mn\033[0m]=alphabetical, [\033[32mfs\033[0m]=force-s, [\033[32mfn\033[0m]=force-n -- alphabetical is a bit slower on fiber/LAN but makes it easier to eyeball if everything went fine")
ap2.add_argument("--write-uplog", action="store_true", help="write POST reports to textfiles in working-directory")
@@ -668,36 +822,64 @@ def add_network(ap):
ap2.add_argument("-i", metavar="IP", type=u, default="::", help="ip to bind (comma-sep.), default: all IPv4 and IPv6")
ap2.add_argument("-p", metavar="PORT", type=u, default="3923", help="ports to bind (comma/range)")
ap2.add_argument("--ll", action="store_true", help="include link-local IPv4/IPv6 even if the NIC has routable IPs (breaks some mdns clients)")
ap2.add_argument("--rproxy", metavar="DEPTH", type=int, default=1, help="which ip to keep; [\033[32m0\033[0m]=tcp, [\033[32m1\033[0m]=origin (first x-fwd), [\033[32m2\033[0m]=cloudflare, [\033[32m3\033[0m]=nginx, [\033[32m-1\033[0m]=closest proxy")
ap2.add_argument("--rproxy", metavar="DEPTH", type=int, default=1, help="which ip to keep; [\033[32m0\033[0m]=tcp, [\033[32m1\033[0m]=origin (first x-fwd, unsafe), [\033[32m2\033[0m]=outermost-proxy, [\033[32m3\033[0m]=second-proxy, [\033[32m-1\033[0m]=closest-proxy")
ap2.add_argument("--xff-hdr", metavar="NAME", type=u, default="x-forwarded-for", help="if reverse-proxied, which http header to read the client's real ip from (argument must be lowercase, but not the actual header)")
ap2.add_argument("--xff-src", metavar="IP", type=u, default="127., ::1", help="comma-separated list of trusted reverse-proxy IPs; only accept the real-ip header (--xff-hdr) if the incoming connection is from an IP starting with either of these. Can be disabled with [\033[32many\033[0m] if you are behind cloudflare (or similar) and are using --xff-hdr=cf-connecting-ip (or similar)")
ap2.add_argument("--rp-loc", metavar="PATH", type=u, default="", help="if reverse-proxying on a location instead of a dedicated domain/subdomain, provide the base location here (eg. /foo/bar)")
if ANYWIN:
ap2.add_argument("--reuseaddr", action="store_true", help="set reuseaddr on listening sockets on windows; allows rapid restart of copyparty at the expense of being able to accidentally start multiple instances")
else:
ap2.add_argument("--freebind", action="store_true", help="allow listening on IPs which do not yet exist, for example if the network interfaces haven't finished going up. Only makes sense for IPs other than '0.0.0.0', '127.0.0.1', '::', and '::1'. May require running as root (unless net.ipv6.ip_nonlocal_bind)")
ap2.add_argument("--s-thead", metavar="SEC", type=int, default=120, help="socket timeout (read request header)")
ap2.add_argument("--s-tbody", metavar="SEC", type=float, default=186, help="socket timeout (read/write request/response bodies). Use 60 on fast servers (default is extremely safe). Disable with 0 if reverse-proxied for a 2%% speed boost")
ap2.add_argument("--s-wr-sz", metavar="B", type=int, default=256*1024, help="socket write size in bytes")
ap2.add_argument("--s-wr-slp", metavar="SEC", type=float, default=0, help="debug: socket write delay in seconds")
ap2.add_argument("--rsp-slp", metavar="SEC", type=float, default=0, help="debug: response delay in seconds")
ap2.add_argument("--rsp-jtr", metavar="SEC", type=float, default=0, help="debug: response delay, random duration 0..SEC")
def add_tls(ap):
def add_tls(ap, cert_path):
ap2 = ap.add_argument_group('SSL/TLS options')
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls -- force plaintext")
ap2.add_argument("--https-only", action="store_true", help="disable plaintext -- force tls")
ap2.add_argument("--cert", metavar="PATH", type=u, default=cert_path, help="path to TLS certificate")
ap2.add_argument("--ssl-ver", metavar="LIST", type=u, help="set allowed ssl/tls versions; [\033[32mhelp\033[0m] shows available versions; default is what your python version considers safe")
ap2.add_argument("--ciphers", metavar="LIST", type=u, help="set allowed ssl/tls ciphers; [\033[32mhelp\033[0m] shows available ciphers")
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
ap2.add_argument("--ssl-log", metavar="PATH", type=u, help="log master secrets for later decryption in wireshark")
def add_cert(ap, cert_path):
cert_dir = os.path.dirname(cert_path)
ap2 = ap.add_argument_group('TLS certificate generator options')
ap2.add_argument("--no-crt", action="store_true", help="disable automatic certificate creation")
ap2.add_argument("--crt-ns", metavar="N,N", type=u, default="", help="comma-separated list of FQDNs (domains) to add into the certificate")
ap2.add_argument("--crt-exact", action="store_true", help="do not add wildcard entries for each --crt-ns")
ap2.add_argument("--crt-noip", action="store_true", help="do not add autodetected IP addresses into cert")
ap2.add_argument("--crt-nolo", action="store_true", help="do not add 127.0.0.1 / localhost into cert")
ap2.add_argument("--crt-nohn", action="store_true", help="do not add mDNS names / hostname into cert")
ap2.add_argument("--crt-dir", metavar="PATH", default=cert_dir, help="where to save the CA cert")
ap2.add_argument("--crt-cdays", metavar="D", type=float, default=3650, help="ca-certificate expiration time in days")
ap2.add_argument("--crt-sdays", metavar="D", type=float, default=365, help="server-cert expiration time in days")
ap2.add_argument("--crt-cn", metavar="TXT", type=u, default="partyco", help="CA/server-cert common-name")
ap2.add_argument("--crt-cnc", metavar="TXT", type=u, default="--crt-cn", help="override CA name")
ap2.add_argument("--crt-cns", metavar="TXT", type=u, default="--crt-cn cpp", help="override server-cert name")
ap2.add_argument("--crt-back", metavar="HRS", type=float, default=72, help="backdate in hours")
ap2.add_argument("--crt-alg", metavar="S-N", type=u, default="ecdsa-256", help="algorithm and keysize; one of these: ecdsa-256 rsa-4096 rsa-2048")
def add_zeroconf(ap):
ap2 = ap.add_argument_group("Zeroconf options")
ap2.add_argument("-z", action="store_true", help="enable all zeroconf backends (mdns, ssdp)")
ap2.add_argument("--z-on", metavar="NETS", type=u, default="", help="enable zeroconf ONLY on the comma-separated list of subnets and/or interface names/indexes\n └─example: \033[32meth0, wlo1, virhost0, 192.168.123.0/24, fd00:fda::/96\033[0m")
ap2.add_argument("--z-off", metavar="NETS", type=u, default="", help="disable zeroconf on the comma-separated list of subnets and/or interface names/indexes")
ap2.add_argument("--z-chk", metavar="SEC", type=int, default=10, help="check for network changes every SEC seconds (0=disable)")
ap2.add_argument("-zv", action="store_true", help="verbose all zeroconf backends")
ap2.add_argument("--mc-hop", metavar="SEC", type=int, default=0, help="rejoin multicast groups every SEC seconds (workaround for some switches/routers which cause mDNS to suddenly stop working after some time); try [\033[32m300\033[0m] or [\033[32m180\033[0m]")
def add_zc_mdns(ap):
ap2 = ap.add_argument_group("Zeroconf-mDNS options:")
ap2 = ap.add_argument_group("Zeroconf-mDNS options; also see --help-zm")
ap2.add_argument("--zm", action="store_true", help="announce the enabled protocols over mDNS (multicast DNS-SD) -- compatible with KDE, gnome, macOS, ...")
ap2.add_argument("--zm-on", metavar="NETS", type=u, default="", help="enable zeroconf ONLY on the comma-separated list of subnets and/or interface names/indexes")
ap2.add_argument("--zm-off", metavar="NETS", type=u, default="", help="disable zeroconf on the comma-separated list of subnets and/or interface names/indexes")
@@ -711,18 +893,19 @@ def add_zc_mdns(ap):
ap2.add_argument("--zm-lf", metavar="PATH", type=u, default="", help="link a specific folder for ftp shares")
ap2.add_argument("--zm-ls", metavar="PATH", type=u, default="", help="link a specific folder for smb shares")
ap2.add_argument("--zm-mnic", action="store_true", help="merge NICs which share subnets; assume that same subnet means same network")
ap2.add_argument("--zm-msub", action="store_true", help="merge subnets on each NIC -- always enabled for ipv6 -- reduces network load, but gnome-gvfs clients may stop working")
ap2.add_argument("--zm-msub", action="store_true", help="merge subnets on each NIC -- always enabled for ipv6 -- reduces network load, but gnome-gvfs clients may stop working, and clients cannot be in subnets that the server is not")
ap2.add_argument("--zm-noneg", action="store_true", help="disable NSEC replies -- try this if some clients don't see copyparty")
ap2.add_argument("--zm-spam", metavar="SEC", type=float, default=0, help="send unsolicited announce every SEC; useful if clients have IPs in a subnet which doesn't overlap with the server")
def add_zc_ssdp(ap):
ap2 = ap.add_argument_group("Zeroconf-SSDP options:")
ap2 = ap.add_argument_group("Zeroconf-SSDP options")
ap2.add_argument("--zs", action="store_true", help="announce the enabled protocols over SSDP -- compatible with Windows")
ap2.add_argument("--zs-on", metavar="NETS", type=u, default="", help="enable zeroconf ONLY on the comma-separated list of subnets and/or interface names/indexes")
ap2.add_argument("--zs-off", metavar="NETS", type=u, default="", help="disable zeroconf on the comma-separated list of subnets and/or interface names/indexes")
ap2.add_argument("--zsv", action="store_true", help="verbose SSDP")
ap2.add_argument("--zsl", metavar="PATH", type=u, default="/?hc", help="location to include in the url (or a complete external URL), for example [\033[32mpriv/?pw=hunter2\033[0m] (goes directly to /priv/ with password hunter2) or [\033[32m?hc=priv&pw=hunter2\033[0m] (shows mounting options for /priv/ with password)")
ap2.add_argument("--zsid", metavar="UUID", type=u, default=uuid.uuid4().urn[4:], help="USN (device identifier) to announce")
ap2.add_argument("--zsid", metavar="UUID", type=u, default=zsid, help="USN (device identifier) to announce")
def add_ftp(ap):
@@ -730,6 +913,7 @@ def add_ftp(ap):
ap2.add_argument("--ftp", metavar="PORT", type=int, help="enable FTP server on PORT, for example \033[32m3921")
ap2.add_argument("--ftps", metavar="PORT", type=int, help="enable FTPS server on PORT, for example \033[32m3990")
ap2.add_argument("--ftpv", action="store_true", help="verbose")
ap2.add_argument("--ftp4", action="store_true", help="only listen on IPv4")
ap2.add_argument("--ftp-wt", metavar="SEC", type=int, default=7, help="grace period for resuming interrupted uploads (any client can write to any file last-modified more recently than SEC seconds ago)")
ap2.add_argument("--ftp-nat", metavar="ADDR", type=u, help="the NAT address to use for passive connections")
ap2.add_argument("--ftp-pr", metavar="P-P", type=u, help="the range of TCP ports to use for passive connections, for example \033[32m12000-13000")
@@ -737,24 +921,62 @@ def add_ftp(ap):
def add_webdav(ap):
ap2 = ap.add_argument_group('WebDAV options')
ap2.add_argument("--daw", action="store_true", help="enable full write support. \033[1;31mWARNING:\033[0m This has side-effects -- PUT-operations will now \033[1;31mOVERWRITE\033[0m existing files, rather than inventing new filenames to avoid loss of data. You might want to instead set this as a volflag where needed. By not setting this flag, uploaded files can get written to a filename which the client does not expect (which might be okay, depending on client)")
ap2.add_argument("--daw", action="store_true", help="enable full write support, even if client may not be webdav. \033[1;31mWARNING:\033[0m This has side-effects -- PUT-operations will now \033[1;31mOVERWRITE\033[0m existing files, rather than inventing new filenames to avoid loss of data. You might want to instead set this as a volflag where needed. By not setting this flag, uploaded files can get written to a filename which the client does not expect (which might be okay, depending on client)")
ap2.add_argument("--dav-inf", action="store_true", help="allow depth:infinite requests (recursive file listing); extremely server-heavy but required for spec compliance -- luckily few clients rely on this")
ap2.add_argument("--dav-mac", action="store_true", help="disable apple-garbage filter -- allow macos to create junk files (._* and .DS_Store, .Spotlight-*, .fseventsd, .Trashes, .AppleDouble, __MACOS)")
ap2.add_argument("--dav-rt", action="store_true", help="show symlink-destination's lastmodified instead of the link itself; always enabled for recursive listings (volflag=davrt)")
ap2.add_argument("--dav-auth", action="store_true", help="force auth for all folders (required by davfs2 when only some folders are world-readable) (volflag=davauth)")
def add_smb(ap):
ap2 = ap.add_argument_group('SMB/CIFS options')
ap2.add_argument("--smb", action="store_true", help="enable smb (read-only) -- this requires running copyparty as root on linux and macos unless --smb-port is set above 1024 and your OS does port-forwarding from 445 to that.\n\033[1;31mWARNING:\033[0m this protocol is dangerous! Never expose to the internet. Account permissions are coalesced; if one account has write-access to a volume, then all accounts do.")
ap2.add_argument("--smb", action="store_true", help="enable smb (read-only) -- this requires running copyparty as root on linux and macos unless --smb-port is set above 1024 and your OS does port-forwarding from 445 to that.\n\033[1;31mWARNING:\033[0m this protocol is dangerous! Never expose to the internet!")
ap2.add_argument("--smbw", action="store_true", help="enable write support (please dont)")
ap2.add_argument("--smb1", action="store_true", help="disable SMBv2, only enable SMBv1 (CIFS)")
ap2.add_argument("--smb-port", metavar="PORT", type=int, default=445, help="port to listen on -- if you change this value, you must NAT from TCP:445 to this port using iptables or similar")
ap2.add_argument("--smb-nwa-1", action="store_true", help="disable impacket#1433 workaround (truncate directory listings to 64kB)")
ap2.add_argument("--smb-nwa-2", action="store_true", help="disable impacket workaround for filecopy globs")
ap2.add_argument("--smba", action="store_true", help="small performance boost: disable per-account permissions, enables account coalescing instead (if one user has write/delete-access, then everyone does)")
ap2.add_argument("--smbv", action="store_true", help="verbose")
ap2.add_argument("--smbvv", action="store_true", help="verboser")
ap2.add_argument("--smbvvv", action="store_true", help="verbosest")
def add_handlers(ap):
ap2 = ap.add_argument_group('handlers (see --help-handlers)')
ap2.add_argument("--on404", metavar="PY", type=u, action="append", help="handle 404s by executing PY file")
ap2.add_argument("--on403", metavar="PY", type=u, action="append", help="handle 403s by executing PY file")
ap2.add_argument("--hot-handlers", action="store_true", help="reload handlers on each request -- expensive but convenient when hacking on stuff")
def add_hooks(ap):
ap2 = ap.add_argument_group('event hooks (see --help-hooks)')
ap2.add_argument("--xbu", metavar="CMD", type=u, action="append", help="execute CMD before a file upload starts")
ap2.add_argument("--xau", metavar="CMD", type=u, action="append", help="execute CMD after a file upload finishes")
ap2.add_argument("--xiu", metavar="CMD", type=u, action="append", help="execute CMD after all uploads finish and volume is idle")
ap2.add_argument("--xbr", metavar="CMD", type=u, action="append", help="execute CMD before a file move/rename")
ap2.add_argument("--xar", metavar="CMD", type=u, action="append", help="execute CMD after a file move/rename")
ap2.add_argument("--xbd", metavar="CMD", type=u, action="append", help="execute CMD before a file delete")
ap2.add_argument("--xad", metavar="CMD", type=u, action="append", help="execute CMD after a file delete")
ap2.add_argument("--xm", metavar="CMD", type=u, action="append", help="execute CMD on message")
ap2.add_argument("--xban", metavar="CMD", type=u, action="append", help="execute CMD if someone gets banned (pw/404/403/url)")
def add_stats(ap):
ap2 = ap.add_argument_group('grafana/prometheus metrics endpoint')
ap2.add_argument("--stats", action="store_true", help="enable openmetrics at /.cpr/metrics for admin accounts")
ap2.add_argument("--nos-hdd", action="store_true", help="disable disk-space metrics (used/free space)")
ap2.add_argument("--nos-vol", action="store_true", help="disable volume size metrics (num files, total bytes, vmaxb/vmaxn)")
ap2.add_argument("--nos-dup", action="store_true", help="disable dupe-files metrics (good idea; very slow)")
ap2.add_argument("--nos-unf", action="store_true", help="disable unfinished-uploads metrics")
def add_yolo(ap):
ap2 = ap.add_argument_group('yolo options')
ap2.add_argument("--allow-csrf", action="store_true", help="disable csrf protections; let other domains/sites impersonate you through cross-site requests")
ap2.add_argument("--getmod", action="store_true", help="permit ?move=[...] and ?delete as GET")
def add_optouts(ap):
ap2 = ap.add_argument_group('opt-outs')
ap2.add_argument("-nw", action="store_true", help="never write anything to disk (debug/benchmark)")
@@ -762,20 +984,23 @@ def add_optouts(ap):
ap2.add_argument("--no-dav", action="store_true", help="disable webdav support")
ap2.add_argument("--no-del", action="store_true", help="disable delete operations")
ap2.add_argument("--no-mv", action="store_true", help="disable move/rename operations")
ap2.add_argument("-nth", action="store_true", help="no title hostname; don't show --name in <title>")
ap2.add_argument("-nih", action="store_true", help="no info hostname -- don't show in UI")
ap2.add_argument("-nid", action="store_true", help="no info disk-usage -- don't show in UI")
ap2.add_argument("-nb", action="store_true", help="no powered-by-copyparty branding in UI")
ap2.add_argument("--no-zip", action="store_true", help="disable download as zip/tar")
ap2.add_argument("--no-tarcmp", action="store_true", help="disable download as compressed tar (?tar=gz, ?tar=bz2, ?tar=xz, ?tar=gz:9, ...)")
ap2.add_argument("--no-lifetime", action="store_true", help="disable automatic deletion of uploads after a certain time (as specified by the 'lifetime' volflag)")
def add_safety(ap, fk_salt):
def add_safety(ap):
ap2 = ap.add_argument_group('safety options')
ap2.add_argument("-s", action="count", default=0, help="increase safety: Disable thumbnails / potentially dangerous software (ffmpeg/pillow/vips), hide partial uploads, avoid crawlers.\n └─Alias of\033[32m --dotpart --no-thumb --no-mtag-ff --no-robots --force-js")
ap2.add_argument("-ss", action="store_true", help="further increase safety: Prevent js-injection, accidental move/delete, broken symlinks, webdav, 404 on 403, ban on excessive 404s.\n └─Alias of\033[32m -s --no-dot-mv --no-dot-ren --unpost=0 --no-del --no-mv --hardlink --vague-403 --ban-404=50,60,1440 -nih")
ap2.add_argument("-sss", action="store_true", help="further increase safety: Enable logging to disk, scan for dangerous symlinks.\n └─Alias of\033[32m -ss --no-dav -lo=cpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz --ls=**,*,ln,p,r")
ap2.add_argument("-ss", action="store_true", help="further increase safety: Prevent js-injection, accidental move/delete, broken symlinks, webdav, 404 on 403, ban on excessive 404s.\n └─Alias of\033[32m -s --unpost=0 --no-del --no-mv --hardlink --vague-403 --ban-404=50,60,1440 --turbo=-1 -nih")
ap2.add_argument("-sss", action="store_true", help="further increase safety: Enable logging to disk, scan for dangerous symlinks.\n └─Alias of\033[32m -ss --no-dav --no-logues --no-readme -lo=cpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz --ls=**,*,ln,p,r")
ap2.add_argument("--ls", metavar="U[,V[,F]]", type=u, help="do a sanity/safety check of all volumes on startup; arguments \033[33mUSER\033[0m,\033[33mVOL\033[0m,\033[33mFLAGS\033[0m; example [\033[32m**,*,ln,p,r\033[0m]")
ap2.add_argument("--salt", type=u, default="hunter2", help="up2k file-hash salt; used to generate unpredictable internal identifiers for uploads -- doesn't really matter")
ap2.add_argument("--fk-salt", metavar="SALT", type=u, default=fk_salt, help="per-file accesskey salt; used to generate unpredictable URLs for hidden files -- this one DOES matter")
ap2.add_argument("--xvol", action="store_true", help="never follow symlinks leaving the volume root, unless the link is into another volume where the user has similar access (volflag=xvol)")
ap2.add_argument("--xdev", action="store_true", help="stay within the filesystem of the volume root; do not descend into other devices (symlink or bind-mount to another HDD, ...) (volflag=xdev)")
ap2.add_argument("--no-dot-mv", action="store_true", help="disallow moving dotfiles; makes it impossible to move folders containing dotfiles")
ap2.add_argument("--no-dot-ren", action="store_true", help="disallow renaming dotfiles; makes it impossible to make something a dotfile")
ap2.add_argument("--no-logues", action="store_true", help="disable rendering .prologue/.epilogue.html into directory listings")
@@ -786,22 +1011,43 @@ def add_safety(ap, fk_salt):
ap2.add_argument("--logout", metavar="H", type=float, default="8086", help="logout clients after H hours of inactivity; [\033[32m0.0028\033[0m]=10sec, [\033[32m0.1\033[0m]=6min, [\033[32m24\033[0m]=day, [\033[32m168\033[0m]=week, [\033[32m720\033[0m]=month, [\033[32m8760\033[0m]=year)")
ap2.add_argument("--ban-pw", metavar="N,W,B", type=u, default="9,60,1440", help="more than \033[33mN\033[0m wrong passwords in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; disable with [\033[32mno\033[0m]")
ap2.add_argument("--ban-404", metavar="N,W,B", type=u, default="no", help="hitting more than \033[33mN\033[0m 404's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes (disabled by default since turbo-up2k counts as 404s)")
ap2.add_argument("--ban-403", metavar="N,W,B", type=u, default="9,2,1440", help="hitting more than \033[33mN\033[0m 403's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; [\033[32m1440\033[0m]=day, [\033[32m10080\033[0m]=week, [\033[32m43200\033[0m]=month")
ap2.add_argument("--ban-422", metavar="N,W,B", type=u, default="9,2,1440", help="hitting more than \033[33mN\033[0m 422's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes (422 is server fuzzing, invalid POSTs and so)")
ap2.add_argument("--ban-url", metavar="N,W,B", type=u, default="9,2,1440", help="hitting more than \033[33mN\033[0m sus URL's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes (decent replacement for --ban-404 if that can't be used)")
ap2.add_argument("--sus-urls", metavar="R", type=u, default=r"\.php$|(^|/)wp-(admin|content|includes)/", help="URLs which are considered sus / eligible for banning; disable with blank or [\033[32mno\033[0m]")
ap2.add_argument("--nonsus-urls", metavar="R", type=u, default=r"^(favicon\.ico|robots\.txt)$|^apple-touch-icon|^\.well-known", help="harmless URLs ignored from 404-bans; disable with blank or [\033[32mno\033[0m]")
ap2.add_argument("--aclose", metavar="MIN", type=int, default=10, help="if a client maxes out the server connection limit, downgrade it from connection:keep-alive to connection:close for MIN minutes (and also kill its active connections) -- disable with 0")
ap2.add_argument("--loris", metavar="B", type=int, default=60, help="if a client maxes out the server connection limit without sending headers, ban it for B minutes; disable with [\033[32m0\033[0m]")
ap2.add_argument("--acao", metavar="V[,V]", type=u, default="*", help="Access-Control-Allow-Origin; list of origins (domains/IPs without port) to accept requests from; [\033[32mhttps://1.2.3.4\033[0m]. Default [\033[32m*\033[0m] allows requests from all sites but removes cookies and http-auth; only ?pw=hunter2 survives")
ap2.add_argument("--acam", metavar="V[,V]", type=u, default="GET,HEAD", help="Access-Control-Allow-Methods; list of methods to accept from offsite ('*' behaves like described in --acao)")
def add_salt(ap, fk_salt, ah_salt):
ap2 = ap.add_argument_group('salting options')
ap2.add_argument("--ah-alg", metavar="ALG", type=u, default="none", help="account-pw hashing algorithm; one of these, best to worst: argon2 scrypt sha2 none (each optionally followed by alg-specific comma-sep. config)")
ap2.add_argument("--ah-salt", metavar="SALT", type=u, default=ah_salt, help="account-pw salt; ignored if --ah-alg is none (default)")
ap2.add_argument("--ah-gen", metavar="PW", type=u, default="", help="generate hashed password for \033[33mPW\033[0m, or read passwords from STDIN if \033[33mPW\033[0m is [\033[32m-\033[0m]")
ap2.add_argument("--ah-cli", action="store_true", help="interactive shell which hashes passwords without ever storing or displaying the original passwords")
ap2.add_argument("--fk-salt", metavar="SALT", type=u, default=fk_salt, help="per-file accesskey salt; used to generate unpredictable URLs for hidden files")
ap2.add_argument("--warksalt", metavar="SALT", type=u, default="hunter2", help="up2k file-hash salt; serves no purpose, no reason to change this (but delete all databases if you do)")
def add_shutdown(ap):
ap2 = ap.add_argument_group('shutdown options')
ap2.add_argument("--ign-ebind", action="store_true", help="continue running even if it's impossible to listen on some of the requested endpoints")
ap2.add_argument("--ign-ebind-all", action="store_true", help="continue running even if it's impossible to receive connections at all")
ap2.add_argument("--exit", metavar="WHEN", type=u, default="", help="shutdown after WHEN has finished; for example [\033[32midx\033[0m] will do volume indexing + metadata analysis")
ap2.add_argument("--exit", metavar="WHEN", type=u, default="", help="shutdown after WHEN has finished; [\033[32mcfg\033[0m] config parsing, [\033[32midx\033[0m] volscan + multimedia indexing")
def add_logging(ap):
ap2 = ap.add_argument_group('logging options')
ap2.add_argument("-q", action="store_true", help="quiet")
ap2.add_argument("-lo", metavar="PATH", type=u, help="logfile, example: \033[32mcpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz")
ap2.add_argument("--no-ansi", action="store_true", default=not VT100, help="disable colors; same as environment-variable NO_COLOR")
ap2.add_argument("--ansi", action="store_true", help="force colors; overrides environment-variable NO_COLOR")
ap2.add_argument("--no-voldump", action="store_true", help="do not list volumes and permissions on startup")
ap2.add_argument("--log-tdec", metavar="N", type=int, default=3, help="timestamp resolution / number of timestamp decimals")
ap2.add_argument("--log-badpwd", metavar="N", type=int, default=1, help="log passphrase of failed login attempts: 0=terse, 1=plaintext, 2=hashed")
ap2.add_argument("--log-conn", action="store_true", help="debug: print tcp-server msgs")
ap2.add_argument("--log-htp", action="store_true", help="debug: print http-server threadpool scaling")
ap2.add_argument("--ihead", metavar="HEADER", type=u, action='append', help="dump incoming header")
@@ -820,10 +1066,10 @@ def add_thumbnail(ap):
ap2.add_argument("--no-thumb", action="store_true", help="disable all thumbnails (volflag=dthumb)")
ap2.add_argument("--no-vthumb", action="store_true", help="disable video thumbnails (volflag=dvthumb)")
ap2.add_argument("--no-athumb", action="store_true", help="disable audio thumbnails (spectrograms) (volflag=dathumb)")
ap2.add_argument("--th-size", metavar="WxH", default="320x256", help="thumbnail res")
ap2.add_argument("--th-size", metavar="WxH", default="320x256", help="thumbnail res (volflag=thsize)")
ap2.add_argument("--th-mt", metavar="CORES", type=int, default=CORES, help="num cpu cores to use for generating thumbnails")
ap2.add_argument("--th-convt", metavar="SEC", type=int, default=60, help="conversion timeout in seconds")
ap2.add_argument("--th-no-crop", action="store_true", help="dynamic height; show full image")
ap2.add_argument("--th-convt", metavar="SEC", type=float, default=60, help="conversion timeout in seconds (volflag=convt)")
ap2.add_argument("--th-no-crop", action="store_true", help="dynamic height; show full image by default (volflag=nocrop)")
ap2.add_argument("--th-dec", metavar="LIBS", default="vips,pil,ff", help="image decoders, in order of preference")
ap2.add_argument("--th-no-jpg", action="store_true", help="disable jpg output")
ap2.add_argument("--th-no-webp", action="store_true", help="disable webp output")
@@ -832,46 +1078,46 @@ def add_thumbnail(ap):
ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown -- avoids doing keepalive pokes (updating the mtime) on thumbnail folders more often than SEC seconds")
ap2.add_argument("--th-clean", metavar="SEC", type=int, default=43200, help="cleanup interval; 0=disabled")
ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age -- folders which haven't been poked for longer than --th-poke seconds will get deleted every --th-clean seconds")
ap2.add_argument("--th-covers", metavar="N,N", type=u, default="folder.png,folder.jpg,cover.png,cover.jpg", help="folder thumbnails to stat/look for")
ap2.add_argument("--th-covers", metavar="N,N", type=u, default="folder.png,folder.jpg,cover.png,cover.jpg", help="folder thumbnails to stat/look for; enabling -e2d will make these case-insensitive, and also automatically select thumbnails for all folders that contain pics, even if none match this pattern")
# https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html
# https://github.com/libvips/libvips
# ffmpeg -hide_banner -demuxers | awk '/^ D /{print$2}' | while IFS= read -r x; do ffmpeg -hide_banner -h demuxer=$x; done | grep -E '^Demuxer |extensions:'
ap2.add_argument("--th-r-pil", metavar="T,T", type=u, default="bmp,dib,gif,icns,ico,jpg,jpeg,jp2,jpx,pcx,png,pbm,pgm,ppm,pnm,sgi,tga,tif,tiff,webp,xbm,dds,xpm,heif,heifs,heic,heics,avif,avifs", help="image formats to decode using pillow")
ap2.add_argument("--th-r-vips", metavar="T,T", type=u, default="jpg,jpeg,jp2,jpx,jxl,tif,tiff,png,webp,heic,avif,fit,fits,fts,exr,svg,hdr,ppm,pgm,pfm,gif,nii", help="image formats to decode using pyvips")
ap2.add_argument("--th-r-ffi", metavar="T,T", type=u, default="apng,avif,avifs,bmp,dds,dib,fit,fits,fts,gif,heic,heics,heif,heifs,icns,ico,jp2,jpeg,jpg,jpx,jxl,pbm,pcx,pfm,pgm,png,pnm,ppm,psd,sgi,tga,tif,tiff,webp,xbm,xpm", help="image formats to decode using ffmpeg")
ap2.add_argument("--th-r-ffv", metavar="T,T", type=u, default="av1,asf,avi,flv,m4v,mkv,mjpeg,mjpg,mpg,mpeg,mpg2,mpeg2,h264,avc,mts,h265,hevc,mov,3gp,mp4,ts,mpegts,nut,ogv,ogm,rm,vob,webm,wmv", help="video formats to decode using ffmpeg")
ap2.add_argument("--th-r-ffa", metavar="T,T", type=u, default="aac,m4a,ogg,opus,flac,alac,mp3,mp2,ac3,dts,wma,ra,wav,aif,aiff,au,alaw,ulaw,mulaw,amr,gsm,ape,tak,tta,wv,mpc", help="audio formats to decode using ffmpeg")
ap2.add_argument("--th-r-pil", metavar="T,T", type=u, default="avif,avifs,blp,bmp,dcx,dds,dib,emf,eps,fits,flc,fli,fpx,gif,heic,heics,heif,heifs,icns,ico,im,j2p,j2k,jp2,jpeg,jpg,jpx,pbm,pcx,pgm,png,pnm,ppm,psd,qoi,sgi,spi,tga,tif,tiff,webp,wmf,xbm,xpm", help="image formats to decode using pillow")
ap2.add_argument("--th-r-vips", metavar="T,T", type=u, default="avif,exr,fit,fits,fts,gif,hdr,heic,jp2,jpeg,jpg,jpx,jxl,nii,pfm,pgm,png,ppm,svg,tif,tiff,webp", help="image formats to decode using pyvips")
ap2.add_argument("--th-r-ffi", metavar="T,T", type=u, default="apng,avif,avifs,bmp,dds,dib,fit,fits,fts,gif,hdr,heic,heics,heif,heifs,icns,ico,jp2,jpeg,jpg,jpx,jxl,pbm,pcx,pfm,pgm,png,pnm,ppm,psd,qoi,sgi,tga,tif,tiff,webp,xbm,xpm", help="image formats to decode using ffmpeg")
ap2.add_argument("--th-r-ffv", metavar="T,T", type=u, default="3gp,asf,av1,avc,avi,flv,h264,h265,hevc,m4v,mjpeg,mjpg,mkv,mov,mp4,mpeg,mpeg2,mpegts,mpg,mpg2,mts,nut,ogm,ogv,rm,ts,vob,webm,wmv", help="video formats to decode using ffmpeg")
ap2.add_argument("--th-r-ffa", metavar="T,T", type=u, default="aac,ac3,aif,aiff,alac,alaw,amr,apac,ape,au,bonk,dfpwm,dts,flac,gsm,ilbc,it,m4a,mo3,mod,mp2,mp3,mpc,mptm,mt2,mulaw,ogg,okt,opus,ra,s3m,tak,tta,ulaw,wav,wma,wv,xm,xpk", help="audio formats to decode using ffmpeg")
def add_transcoding(ap):
ap2 = ap.add_argument_group('transcoding options')
ap2.add_argument("--no-acode", action="store_true", help="disable audio transcoding")
ap2.add_argument("--no-bacode", action="store_true", help="disable batch audio transcoding by folder download (zip/tar)")
ap2.add_argument("--ac-maxage", metavar="SEC", type=int, default=86400, help="delete cached transcode output after SEC seconds")
def add_db_general(ap, hcores):
ap2 = ap.add_argument_group('general db options')
ap2.add_argument("-e2d", action="store_true", help="enable up2k database, making files searchable + enables upload deduplocation")
ap2.add_argument("-e2d", action="store_true", help="enable up2k database, making files searchable + enables upload deduplication")
ap2.add_argument("-e2ds", action="store_true", help="scan writable folders for new files on startup; sets -e2d")
ap2.add_argument("-e2dsa", action="store_true", help="scans all folders on startup; sets -e2ds")
ap2.add_argument("-e2v", action="store_true", help="verify file integrity; rehash all files and compare with db")
ap2.add_argument("-e2vu", action="store_true", help="on hash mismatch: update the database with the new hash")
ap2.add_argument("-e2vp", action="store_true", help="on hash mismatch: panic and quit copyparty")
ap2.add_argument("--hist", metavar="PATH", type=u, help="where to store volume data (db, thumbs) (volflag=hist)")
ap2.add_argument("--no-hash", metavar="PTN", type=u, help="regex: disable hashing of matching paths during e2ds folder scans (volflag=nohash)")
ap2.add_argument("--no-idx", metavar="PTN", type=u, help="regex: disable indexing of matching paths during e2ds folder scans (volflag=noidx)")
ap2.add_argument("--no-hash", metavar="PTN", type=u, help="regex: disable hashing of matching absolute-filesystem-paths during e2ds folder scans (volflag=nohash)")
ap2.add_argument("--no-idx", metavar="PTN", type=u, help="regex: disable indexing of matching absolute-filesystem-paths during e2ds folder scans (volflag=noidx)")
ap2.add_argument("--no-dhash", action="store_true", help="disable rescan acceleration; do full database integrity check -- makes the db ~5%% smaller and bootup/rescans 3~10x slower")
ap2.add_argument("--re-dhash", action="store_true", help="rebuild the cache if it gets out of sync (for example crash on startup during metadata scanning)")
ap2.add_argument("--no-forget", action="store_true", help="never forget indexed files, even when deleted from disk -- makes it impossible to ever upload the same file twice (volflag=noforget)")
ap2.add_argument("--dbd", metavar="PROFILE", default="wal", help="database durability profile; sets the tradeoff between robustness and speed, see --help-dbd (volflag=dbd)")
ap2.add_argument("--xlink", action="store_true", help="on upload: check all volumes for dupes, not just the target volume (volflag=xlink)")
ap2.add_argument("--xdev", action="store_true", help="do not descend into other filesystems (symlink or bind-mount to another HDD, ...) (volflag=xdev)")
ap2.add_argument("--xvol", action="store_true", help="skip symlinks leaving the volume root (volflag=xvol)")
ap2.add_argument("--hash-mt", metavar="CORES", type=int, default=hcores, help="num cpu cores to use for file hashing; set 0 or 1 for single-core hashing")
ap2.add_argument("--re-maxage", metavar="SEC", type=int, default=0, help="disk rescan volume interval, 0=off (volflag=scan)")
ap2.add_argument("--db-act", metavar="SEC", type=float, default=10, help="defer any scheduled volume reindexing until SEC seconds after last db write (uploads, renames, ...)")
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=45, help="search deadline -- terminate searches running for more than SEC seconds")
ap2.add_argument("--srch-hits", metavar="N", type=int, default=7999, help="max search results to allow clients to fetch; 125 results will be shown initially")
ap2.add_argument("--dotsrch", action="store_true", help="show dotfiles in search results (volflags: dotsrch | nodotsrch)")
def add_db_metadata(ap):
@@ -887,7 +1133,7 @@ def add_db_metadata(ap):
ap2.add_argument("--mtag-vv", action="store_true", help="debug mtp settings and mutagen/ffprobe parsers")
ap2.add_argument("-mtm", metavar="M=t,t,t", type=u, action="append", help="add/replace metadata mapping")
ap2.add_argument("-mte", metavar="M,M,M", type=u, help="tags to index/display (comma-sep.)",
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,vc,ac,fmt,res,.fps,ahash,vhash")
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,vc,ac,fmt,res,.fps,ahash,vhash,up_ip,.up_at")
ap2.add_argument("-mth", metavar="M,M,M", type=u, help="tags to hide by default (comma-sep.)",
default=".vq,.aq,vc,ac,fmt,res,.fps")
ap2.add_argument("-mtp", metavar="M=[f,]BIN", type=u, action="append", help="read tag M using program BIN to parse the file")
@@ -895,21 +1141,34 @@ def add_db_metadata(ap):
def add_ui(ap, retry):
ap2 = ap.add_argument_group('ui options')
ap2.add_argument("--lang", metavar="LANG", type=u, default="eng", help="language")
ap2.add_argument("--grid", action="store_true", help="show grid/thumbnails by default (volflag=grid)")
ap2.add_argument("--lang", metavar="LANG", type=u, default="eng", help="language; one of the following: eng nor")
ap2.add_argument("--theme", metavar="NUM", type=int, default=0, help="default theme to use")
ap2.add_argument("--themes", metavar="NUM", type=int, default=8, help="number of themes installed")
ap2.add_argument("--sort", metavar="C,C,C", type=u, default="href", help="default sort order, comma-separated column IDs (see header tooltips), prefix with '-' for descending. Examples: \033[32mhref -href ext sz ts tags/Album tags/.tn\033[0m (volflag=sort)")
ap2.add_argument("--unlist", metavar="REGEX", type=u, default="", help="don't show files matching REGEX in file list. Purely cosmetic! Does not affect API calls, just the browser. Example: [\033[32m\\.(js|css)$\033[0m] (volflag=unlist)")
ap2.add_argument("--favico", metavar="TXT", type=u, default="c 000 none" if retry else "🎉 000 none", help="\033[33mfavicon-text\033[0m [ \033[33mforeground\033[0m [ \033[33mbackground\033[0m ] ], set blank to disable")
ap2.add_argument("--mpmc", metavar="URL", type=u, default="", help="change the mediaplayer-toggle mouse cursor; URL to a folder with {2..5}.png inside (or disable with [\033[32m.\033[0m])")
ap2.add_argument("--js-browser", metavar="L", type=u, help="URL to additional JS to include")
ap2.add_argument("--css-browser", metavar="L", type=u, help="URL to additional CSS to include")
ap2.add_argument("--html-head", metavar="TXT", type=u, default="", help="text to append to the <head> of all HTML pages")
ap2.add_argument("--ih", action="store_true", help="if a folder contains index.html, show that instead of the directory listing by default (can be changed in the client settings UI, or add ?v to URL for override)")
ap2.add_argument("--textfiles", metavar="CSV", type=u, default="txt,nfo,diz,cue,readme", help="file extensions to present as plaintext")
ap2.add_argument("--txt-max", metavar="KiB", type=int, default=64, help="max size of embedded textfiles on ?doc= (anything bigger will be lazy-loaded by JS)")
ap2.add_argument("--doctitle", metavar="TXT", type=u, default="copyparty", help="title / service-name to show in html documents")
ap2.add_argument("--doctitle", metavar="TXT", type=u, default="copyparty @ --name", help="title / service-name to show in html documents")
ap2.add_argument("--bname", metavar="TXT", type=u, default="--name", help="server name (displayed in filebrowser document title)")
ap2.add_argument("--pb-url", metavar="URL", type=u, default="https://github.com/9001/copyparty", help="powered-by link; disable with -np")
ap2.add_argument("--ver", action="store_true", help="show version on the control panel (incompatible with -nb)")
ap2.add_argument("--md-sbf", metavar="FLAGS", type=u, default="downloads forms popups scripts top-navigation-by-user-activation", help="list of capabilities to ALLOW for README.md docs (volflag=md_sbf); see https://developer.mozilla.org/en-US/docs/Web/HTML/Element/iframe#attr-sandbox")
ap2.add_argument("--lg-sbf", metavar="FLAGS", type=u, default="downloads forms popups scripts top-navigation-by-user-activation", help="list of capabilities to ALLOW for prologue/epilogue docs (volflag=lg_sbf)")
ap2.add_argument("--no-sb-md", action="store_true", help="don't sandbox README.md documents (volflags: no_sb_md | sb_md)")
ap2.add_argument("--no-sb-lg", action="store_true", help="don't sandbox prologue/epilogue docs (volflags: no_sb_lg | sb_lg); enables non-js support")
def add_debug(ap):
ap2 = ap.add_argument_group('debug options')
ap2.add_argument("--vc", action="store_true", help="verbose config file parser (explain config)")
ap2.add_argument("--cgen", action="store_true", help="generate config file from current config (best-effort; probably buggy)")
ap2.add_argument("--no-sendfile", action="store_true", help="disable sendfile; instead using a traditional file read loop")
ap2.add_argument("--no-scandir", action="store_true", help="disable scandir; instead using listdir + stat on each file")
ap2.add_argument("--no-fastboot", action="store_true", help="wait for up2k indexing before starting the httpd")
@@ -935,12 +1194,15 @@ def run_argparse(
description="http file sharing hub v{} ({})".format(S_VERSION, S_BUILD_DT),
)
try:
fk_salt = unicode(os.path.getmtime(os.path.join(E.cfg, "cert.pem")))
except:
fk_salt = "hunter2"
cert_path = os.path.join(E.cfg, "cert.pem")
hcores = min(CORES, 4) # optimal on py3.11 @ r5-4500U
fk_salt = get_fk_salt(cert_path)
ah_salt = get_ah_salt()
# alpine peaks at 5 threads for some reason,
# all others scale past that (but try to avoid SMT),
# 5 should be plenty anyways (3 GiB/s on most machines)
hcores = min(CORES, 5 if CORES > 8 else 4)
tty = os.environ.get("TERM", "").lower() == "linux"
@@ -948,7 +1210,8 @@ def run_argparse(
add_general(ap, nc, srvname)
add_network(ap)
add_tls(ap)
add_tls(ap, cert_path)
add_cert(ap, cert_path)
add_qr(ap, tty)
add_zeroconf(ap)
add_zc_mdns(ap)
@@ -961,9 +1224,14 @@ def run_argparse(
add_ftp(ap)
add_webdav(ap)
add_smb(ap)
add_safety(ap, fk_salt)
add_safety(ap)
add_salt(ap, fk_salt, ah_salt)
add_optouts(ap)
add_shutdown(ap)
add_yolo(ap)
add_handlers(ap)
add_hooks(ap)
add_stats(ap)
add_ui(ap, retry)
add_admin(ap)
add_logging(ap)
@@ -1026,9 +1294,12 @@ def main(argv: Optional[list[str]] = None) -> None:
showlic()
sys.exit(0)
if EXE:
print("pybin: {}\n".format(pybin), end="")
ensure_locale()
if HAVE_SSL:
ensure_cert()
ensure_webdeps()
for k, v in zip(argv[1:], argv[2:]):
if k == "-c" and os.path.isfile(v):
@@ -1041,16 +1312,22 @@ def main(argv: Optional[list[str]] = None) -> None:
supp = args_from_cfg(v)
argv.extend(supp)
deprecated: list[tuple[str, str]] = []
deprecated: list[tuple[str, str]] = [("--salt", "--warksalt")]
for dk, nk in deprecated:
try:
idx = argv.index(dk)
except:
idx = -1
ov = ""
for n, k in enumerate(argv):
if k == dk or k.startswith(dk + "="):
idx = n
if "=" in k:
ov = "=" + k.split("=", 1)[1]
if idx < 0:
continue
msg = "\033[1;31mWARNING:\033[0;1m\n {} \033[0;33mwas replaced with\033[0;1m {} \033[0;33mand will be removed\n\033[0m"
lprint(msg.format(dk, nk))
argv[idx] = nk
argv[idx] = nk + ov
time.sleep(2)
da = len(argv) == 1
@@ -1058,7 +1335,8 @@ def main(argv: Optional[list[str]] = None) -> None:
if da:
argv.extend(["--qr"])
if ANYWIN or not os.geteuid():
argv.extend(["-p80,443,3923", "--ign-ebind"])
# win10 allows symlinks if admin; can be unexpected
argv.extend(["-p80,443,3923", "--ign-ebind", "--no-dedup"])
except:
pass
@@ -1080,6 +1358,7 @@ def main(argv: Optional[list[str]] = None) -> None:
for fmtr in [RiceFormatter, RiceFormatter, Dodge11874, BasicDodge11874]:
try:
al = run_argparse(argv, fmtr, retry, nc)
dal = run_argparse([], fmtr, retry, nc)
break
except SystemExit:
raise
@@ -1089,17 +1368,23 @@ def main(argv: Optional[list[str]] = None) -> None:
try:
assert al # type: ignore
assert dal # type: ignore
al.E = E # __init__ is not shared when oxidized
except:
sys.exit(1)
if WINDOWS and not al.keep_qem:
if al.ansi:
al.no_ansi = False
elif not al.no_ansi:
al.ansi = VT100
if WINDOWS and not al.keep_qem and not al.ah_cli:
try:
disable_quickedit()
except:
lprint("\nfailed to disable quick-edit-mode:\n" + min_ex() + "\n")
if not VT100:
if al.ansi:
al.wintitle = ""
nstrs: list[str] = []
@@ -1118,11 +1403,9 @@ def main(argv: Optional[list[str]] = None) -> None:
if re.match("c[^,]", opt):
mod = True
na.append("c," + opt[1:])
elif re.sub("^[rwmdgG]*", "", opt) and "," not in opt:
elif re.sub("^[rwmdgGha]*", "", opt) and "," not in opt:
mod = True
perm = opt[0]
if perm == "a":
perm = "rw"
na.append(perm + "," + opt[1:])
else:
na.append(opt)
@@ -1178,6 +1461,7 @@ def main(argv: Optional[list[str]] = None) -> None:
configure_ssl_ciphers(al)
else:
warn("ssl module does not exist; cannot enable https")
al.http_only = True
if PY2 and WINDOWS and al.e2d:
warn(
@@ -1194,7 +1478,7 @@ def main(argv: Optional[list[str]] = None) -> None:
# signal.signal(signal.SIGINT, sighandler)
SvcHub(al, argv, "".join(printed)).run()
SvcHub(al, dal, argv, "".join(printed)).run()
if __name__ == "__main__":

View File

@@ -1,8 +1,8 @@
# coding: utf-8
VERSION = (1, 5, 2)
CODENAME = "babel"
BUILD_DT = (2022, 12, 12)
VERSION = (1, 9, 12)
CODENAME = "prometheable"
BUILD_DT = (2023, 10, 15)
S_VERSION = ".".join(map(str, VERSION))
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)

File diff suppressed because it is too large Load Diff

View File

@@ -9,7 +9,7 @@ import queue
from .__init__ import CORES, TYPE_CHECKING
from .broker_mpw import MpWorker
from .broker_util import try_exec
from .broker_util import ExceptionalQueue, try_exec
from .util import Daemon, mp
if TYPE_CHECKING:
@@ -69,7 +69,7 @@ class BrokerMp(object):
while procs:
if procs[-1].is_alive():
time.sleep(0.1)
time.sleep(0.05)
continue
procs.pop()
@@ -107,6 +107,19 @@ class BrokerMp(object):
if retq_id:
proc.q_pend.put((retq_id, "retq", rv))
def ask(self, dest: str, *args: Any) -> ExceptionalQueue:
# new non-ipc invoking managed service in hub
obj = self.hub
for node in dest.split("."):
obj = getattr(obj, node)
rv = try_exec(True, obj, *args)
retq = ExceptionalQueue(1)
retq.put(rv)
return retq
def say(self, dest: str, *args: Any) -> None:
"""
send message to non-hub component in other process,

226
copyparty/cert.py Normal file
View File

@@ -0,0 +1,226 @@
import calendar
import errno
import filecmp
import json
import os
import shutil
import time
from .util import Netdev, runcmd
HAVE_CFSSL = True
if True: # pylint: disable=using-constant-test
from .util import RootLogger
def ensure_cert(log: "RootLogger", args) -> None:
"""
the default cert (and the entire TLS support) is only here to enable the
crypto.subtle javascript API, which is necessary due to the webkit guys
being massive memers (https://www.chromium.org/blink/webcrypto)
i feel awful about this and so should they
"""
cert_insec = os.path.join(args.E.mod, "res/insecure.pem")
cert_appdata = os.path.join(args.E.cfg, "cert.pem")
if not os.path.isfile(args.cert):
if cert_appdata != args.cert:
raise Exception("certificate file does not exist: " + args.cert)
shutil.copy(cert_insec, args.cert)
with open(args.cert, "rb") as f:
buf = f.read()
o1 = buf.find(b" PRIVATE KEY-")
o2 = buf.find(b" CERTIFICATE-")
m = "unsupported certificate format: "
if o1 < 0:
raise Exception(m + "no private key inside pem")
if o2 < 0:
raise Exception(m + "no server certificate inside pem")
if o1 > o2:
raise Exception(m + "private key must appear before server certificate")
try:
if filecmp.cmp(args.cert, cert_insec):
t = "using default TLS certificate; https will be insecure:\033[36m {}"
log("cert", t.format(args.cert), 3)
except:
pass
# speaking of the default cert,
# printf 'NO\n.\n.\n.\n.\ncopyparty-insecure\n.\n' | faketime '2000-01-01 00:00:00' openssl req -x509 -sha256 -newkey rsa:2048 -keyout insecure.pem -out insecure.pem -days $((($(printf %d 0x7fffffff)-$(date +%s --date=2000-01-01T00:00:00Z))/(60*60*24))) -nodes && ls -al insecure.pem && openssl x509 -in insecure.pem -text -noout
def _read_crt(args, fn):
try:
if not os.path.exists(os.path.join(args.crt_dir, fn)):
return 0, {}
acmd = ["cfssl-certinfo", "-cert", fn]
rc, so, se = runcmd(acmd, cwd=args.crt_dir)
if rc:
return 0, {}
inf = json.loads(so)
zs = inf["not_after"]
expiry = calendar.timegm(time.strptime(zs, "%Y-%m-%dT%H:%M:%SZ"))
return expiry, inf
except OSError as ex:
if ex.errno == errno.ENOENT:
raise
return 0, {}
except:
return 0, {}
def _gen_ca(log: "RootLogger", args):
expiry = _read_crt(args, "ca.pem")[0]
if time.time() + args.crt_cdays * 60 * 60 * 24 * 0.1 < expiry:
return
backdate = "{}m".format(int(args.crt_back * 60))
expiry = "{}m".format(int(args.crt_cdays * 60 * 24))
cn = args.crt_cnc.replace("--crt-cn", args.crt_cn)
algo, ksz = args.crt_alg.split("-")
req = {
"CN": cn,
"CA": {"backdate": backdate, "expiry": expiry, "pathlen": 0},
"key": {"algo": algo, "size": int(ksz)},
"names": [{"O": cn}],
}
sin = json.dumps(req).encode("utf-8")
log("cert", "creating new ca ...", 6)
cmd = "cfssl gencert -initca -"
rc, so, se = runcmd(cmd.split(), 30, sin=sin)
if rc:
raise Exception("failed to create ca-cert: {}, {}".format(rc, se), 3)
cmd = "cfssljson -bare ca"
sin = so.encode("utf-8")
rc, so, se = runcmd(cmd.split(), 10, sin=sin, cwd=args.crt_dir)
if rc:
raise Exception("failed to translate ca-cert: {}, {}".format(rc, se), 3)
bname = os.path.join(args.crt_dir, "ca")
os.rename(bname + "-key.pem", bname + ".key")
os.unlink(bname + ".csr")
log("cert", "new ca OK", 2)
def _gen_srv(log: "RootLogger", args, netdevs: dict[str, Netdev]):
names = args.crt_ns.split(",") if args.crt_ns else []
if not args.crt_exact:
for n in names[:]:
names.append("*.{}".format(n))
if not args.crt_noip:
for ip in netdevs.keys():
names.append(ip.split("/")[0])
if args.crt_nolo:
names = [x for x in names if x not in ("localhost", "127.0.0.1", "::1")]
if not args.crt_nohn:
names.append(args.name)
names.append(args.name + ".local")
if not names:
names = ["127.0.0.1"]
if "127.0.0.1" in names or "::1" in names:
names.append("localhost")
names = list({x: 1 for x in names}.keys())
try:
expiry, inf = _read_crt(args, "srv.pem")
expired = time.time() + args.crt_sdays * 60 * 60 * 24 * 0.1 > expiry
cert_insec = os.path.join(args.E.mod, "res/insecure.pem")
for n in names:
if n not in inf["sans"]:
raise Exception("does not have {}".format(n))
if expired:
raise Exception("old server-cert has expired")
if not filecmp.cmp(args.cert, cert_insec):
return
except Exception as ex:
log("cert", "will create new server-cert; {}".format(ex))
log("cert", "creating server-cert ...", 6)
backdate = "{}m".format(int(args.crt_back * 60))
expiry = "{}m".format(int(args.crt_sdays * 60 * 24))
cfg = {
"signing": {
"default": {
"backdate": backdate,
"expiry": expiry,
"usages": ["signing", "key encipherment", "server auth"],
}
}
}
with open(os.path.join(args.crt_dir, "cfssl.json"), "wb") as f:
f.write(json.dumps(cfg).encode("utf-8"))
cn = args.crt_cns.replace("--crt-cn", args.crt_cn)
algo, ksz = args.crt_alg.split("-")
req = {
"key": {"algo": algo, "size": int(ksz)},
"names": [{"O": cn}],
}
sin = json.dumps(req).encode("utf-8")
cmd = "cfssl gencert -config=cfssl.json -ca ca.pem -ca-key ca.key -profile=www"
acmd = cmd.split() + ["-hostname=" + ",".join(names), "-"]
rc, so, se = runcmd(acmd, 30, sin=sin, cwd=args.crt_dir)
if rc:
raise Exception("failed to create cert: {}, {}".format(rc, se))
cmd = "cfssljson -bare srv"
sin = so.encode("utf-8")
rc, so, se = runcmd(cmd.split(), 10, sin=sin, cwd=args.crt_dir)
if rc:
raise Exception("failed to translate cert: {}, {}".format(rc, se))
bname = os.path.join(args.crt_dir, "srv")
try:
os.unlink(bname + ".key")
except:
pass
os.rename(bname + "-key.pem", bname + ".key")
os.unlink(bname + ".csr")
with open(os.path.join(args.crt_dir, "ca.pem"), "rb") as f:
ca = f.read()
with open(bname + ".key", "rb") as f:
skey = f.read()
with open(bname + ".pem", "rb") as f:
scrt = f.read()
with open(args.cert, "wb") as f:
f.write(skey + scrt + ca)
log("cert", "new server-cert OK", 2)
def gencert(log: "RootLogger", args, netdevs: dict[str, Netdev]):
global HAVE_CFSSL
if args.http_only:
return
if args.no_crt or not HAVE_CFSSL:
ensure_cert(log, args)
return
try:
_gen_ca(log, args)
_gen_srv(log, args, netdevs)
except Exception as ex:
HAVE_CFSSL = False
log("cert", "could not create TLS certificates: {}".format(ex), 3)
if getattr(ex, "errno", 0) == errno.ENOENT:
t = "install cfssl if you want to fix this; https://github.com/cloudflare/cfssl/releases/latest (cfssl, cfssljson, cfssl-certinfo)"
log("cert", t, 6)
ensure_cert(log, args)

175
copyparty/cfg.py Normal file
View File

@@ -0,0 +1,175 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
# awk -F\" '/add_argument\("-[^-]/{print(substr($2,2))}' copyparty/__main__.py | sort | tr '\n' ' '
zs = "a c e2d e2ds e2dsa e2t e2ts e2tsr e2v e2vp e2vu ed emp i j lo mcr mte mth mtm mtp nb nc nid nih nw p q s ss sss v z zv"
onedash = set(zs.split())
def vf_bmap() -> dict[str, str]:
"""argv-to-volflag: simple bools"""
ret = {
"never_symlink": "neversymlink",
"no_dedup": "copydupes",
"no_dupe": "nodupe",
"no_forget": "noforget",
"th_no_crop": "nocrop",
"dav_auth": "davauth",
"dav_rt": "davrt",
}
for k in (
"dotsrch",
"e2t",
"e2ts",
"e2tsr",
"e2v",
"e2vu",
"e2vp",
"grid",
"hardlink",
"magic",
"no_sb_md",
"no_sb_lg",
"rand",
"xdev",
"xlink",
"xvol",
):
ret[k] = k
return ret
def vf_vmap() -> dict[str, str]:
"""argv-to-volflag: simple values"""
ret = {"th_convt": "convt", "th_size": "thsize"}
for k in ("dbd", "lg_sbf", "md_sbf", "nrand", "sort", "unlist", "u2ts"):
ret[k] = k
return ret
def vf_cmap() -> dict[str, str]:
"""argv-to-volflag: complex/lists"""
ret = {}
for k in ("html_head", "mte", "mth"):
ret[k] = k
return ret
permdescs = {
"r": "read; list folder contents, download files",
"w": 'write; upload files; need "r" to see the uploads',
"m": 'move; move files and folders; need "w" at destination',
"d": "delete; permanently delete files and folders",
"g": "get; download files, but cannot see folder contents",
"G": 'upget; same as "g" but can see filekeys of their own uploads',
"h": 'html; same as "g" but folders return their index.html',
"a": "admin; can see uploader IPs, config-reload",
}
flagcats = {
"uploads, general": {
"nodupe": "rejects existing files (instead of symlinking them)",
"hardlink": "does dedup with hardlinks instead of symlinks",
"neversymlink": "disables symlink fallback; full copy instead",
"copydupes": "disables dedup, always saves full copies of dupes",
"daw": "enable full WebDAV write support (dangerous);\nPUT-operations will now \033[1;31mOVERWRITE\033[0;35m existing files",
"nosub": "forces all uploads into the top folder of the vfs",
"magic": "enables filetype detection for nameless uploads",
"gz": "allows server-side gzip of uploads with ?gz (also c,xz)",
"pk": "forces server-side compression, optional arg: xz,9",
},
"upload rules": {
"maxn=250,600": "max 250 uploads over 15min",
"maxb=1g,300": "max 1 GiB over 5min (suffixes: b, k, m, g, t)",
"vmaxb=1g": "total volume size max 1 GiB (suffixes: b, k, m, g, t)",
"vmaxn=4k": "max 4096 files in volume (suffixes: b, k, m, g, t)",
"rand": "force randomized filenames, 9 chars long by default",
"nrand=N": "randomized filenames are N chars long",
"u2ts=fc": "[f]orce [c]lient-last-modified or [u]pload-time",
"sz=1k-3m": "allow filesizes between 1 KiB and 3MiB",
"df=1g": "ensure 1 GiB free disk space",
},
"upload rotation\n(moves all uploads into the specified folder structure)": {
"rotn=100,3": "3 levels of subfolders with 100 entries in each",
"rotf=%Y-%m/%d-%H": "date-formatted organizing",
"lifetime=3600": "uploads are deleted after 1 hour",
},
"database, general": {
"e2d": "enable database; makes files searchable + enables upload dedup",
"e2ds": "scan writable folders for new files on startup; also sets -e2d",
"e2dsa": "scans all folders for new files on startup; also sets -e2d",
"e2t": "enable multimedia indexing; makes it possible to search for tags",
"e2ts": "scan existing files for tags on startup; also sets -e2t",
"e2tsa": "delete all metadata from DB (full rescan); also sets -e2ts",
"d2ts": "disables metadata collection for existing files",
"d2ds": "disables onboot indexing, overrides -e2ds*",
"d2t": "disables metadata collection, overrides -e2t*",
"d2v": "disables file verification, overrides -e2v*",
"d2d": "disables all database stuff, overrides -e2*",
"hist=/tmp/cdb": "puts thumbnails and indexes at that location",
"scan=60": "scan for new files every 60sec, same as --re-maxage",
"nohash=\\.iso$": "skips hashing file contents if path matches *.iso",
"noidx=\\.iso$": "fully ignores the contents at paths matching *.iso",
"noforget": "don't forget files when deleted from disk",
"fat32": "avoid excessive reindexing on android sdcardfs",
"dbd=[acid|swal|wal|yolo]": "database speed-durability tradeoff",
"xlink": "cross-volume dupe detection / linking",
"xdev": "do not descend into other filesystems",
"xvol": "do not follow symlinks leaving the volume root",
"dotsrch": "show dotfiles in search results",
"nodotsrch": "hide dotfiles in search results (default)",
},
'database, audio tags\n"mte", "mth", "mtp", "mtm" all work the same as -mte, -mth, ...': {
"mtp=.bpm=f,audio-bpm.py": 'uses the "audio-bpm.py" program to\ngenerate ".bpm" tags from uploads (f = overwrite tags)',
"mtp=ahash,vhash=media-hash.py": "collects two tags at once",
},
"thumbnails": {
"dthumb": "disables all thumbnails",
"dvthumb": "disables video thumbnails",
"dathumb": "disables audio thumbnails (spectrograms)",
"dithumb": "disables image thumbnails",
"thsize": "thumbnail res; WxH",
"nocrop": "disable center-cropping by default",
"convt": "conversion timeout in seconds",
},
"handlers\n(better explained in --help-handlers)": {
"on404=PY": "handle 404s by executing PY file",
"on403=PY": "handle 403s by executing PY file",
},
"event hooks\n(better explained in --help-hooks)": {
"xbu=CMD": "execute CMD before a file upload starts",
"xau=CMD": "execute CMD after a file upload finishes",
"xiu=CMD": "execute CMD after all uploads finish and volume is idle",
"xbr=CMD": "execute CMD before a file rename/move",
"xar=CMD": "execute CMD after a file rename/move",
"xbd=CMD": "execute CMD before a file delete",
"xad=CMD": "execute CMD after a file delete",
"xm=CMD": "execute CMD on message",
"xban=CMD": "execute CMD if someone gets banned",
},
"client and ux": {
"grid": "show grid/thumbnails by default",
"sort": "default sort order",
"unlist": "dont list files matching REGEX",
"html_head=TXT": "includes TXT in the <head>",
"robots": "allows indexing by search engines (default)",
"norobots": "kindly asks search engines to leave",
"no_sb_md": "disable js sandbox for markdown files",
"no_sb_lg": "disable js sandbox for prologue/epilogue",
"sb_md": "enable js sandbox for markdown files (default)",
"sb_lg": "enable js sandbox for prologue/epilogue (default)",
"md_sbf": "list of markdown-sandbox safeguards to disable",
"lg_sbf": "list of *logue-sandbox safeguards to disable",
"nohtml": "return html and markdown as text/html",
},
"others": {
"fk=8": 'generates per-file accesskeys,\nwhich are then required at the "g" permission;\nkeys are invalidated if filesize or inode changes',
"fka=8": 'generates slightly weaker per-file accesskeys,\nwhich are then required at the "g" permission;\nnot affected by filesize or inode numbers',
"davauth": "ask webdav clients to login for all folders",
"davrt": "show lastmod time of symlink destination, not the link itself\n(note: this option is always enabled for recursive listings)",
},
}
flagdescs = {k.split("=")[0]: v for tab in flagcats.values() for k, v in tab.items()}

View File

@@ -2,29 +2,40 @@
from __future__ import print_function, unicode_literals
import argparse
import errno
import logging
import os
import stat
import sys
import time
from .__init__ import ANYWIN, PY2, TYPE_CHECKING, E
try:
import asynchat
except:
sys.path.append(os.path.join(E.mod, "vend"))
from pyftpdlib.authorizers import AuthenticationFailed, DummyAuthorizer
from pyftpdlib.filesystems import AbstractedFS, FilesystemError
from pyftpdlib.handlers import FTPHandler
from pyftpdlib.ioloop import IOLoop
from pyftpdlib.servers import FTPServer
from .__init__ import PY2, TYPE_CHECKING, E
from .authsrv import VFS
from .bos import bos
from .util import Daemon, Pebkac, exclude_dotfiles, fsenc, ipnorm
try:
from pyftpdlib.ioloop import IOLoop
except ImportError:
p = os.path.join(E.mod, "vend")
print("loading asynchat from " + p)
sys.path.append(p)
from pyftpdlib.ioloop import IOLoop
from .util import (
Daemon,
Pebkac,
exclude_dotfiles,
fsenc,
ipnorm,
pybin,
relchk,
runhook,
sanitize_fn,
vjoin,
)
if TYPE_CHECKING:
from .svchub import SvcHub
@@ -34,6 +45,12 @@ if True: # pylint: disable=using-constant-test
from typing import Any, Optional
class FSE(FilesystemError):
def __init__(self, msg: str, severity: int = 0) -> None:
super(FilesystemError, self).__init__(msg)
self.severity = severity
class FtpAuth(DummyAuthorizer):
def __init__(self, hub: "SvcHub") -> None:
super(FtpAuth, self).__init__()
@@ -43,6 +60,7 @@ class FtpAuth(DummyAuthorizer):
self, username: str, password: str, handler: Any
) -> None:
handler.username = "{}:{}".format(username, password)
handler.uname = "*"
ip = handler.addr[0]
if ip.startswith("::ffff:"):
@@ -59,10 +77,13 @@ class FtpAuth(DummyAuthorizer):
raise AuthenticationFailed("banned")
asrv = self.hub.asrv
if username == "anonymous":
uname = "*"
else:
uname = asrv.iacct.get(password, "") or asrv.iacct.get(username, "") or "*"
uname = "*"
if username != "anonymous":
for zs in (password, username):
zs = asrv.iacct.get(asrv.ah.hash(zs), "")
if zs:
uname = zs
break
if not uname or not (asrv.vfs.aread.get(uname) or asrv.vfs.awrite.get(uname)):
g = self.hub.gpwd
@@ -74,14 +95,14 @@ class FtpAuth(DummyAuthorizer):
raise AuthenticationFailed("Authentication failed.")
handler.username = uname
handler.uname = handler.username = uname
def get_home_dir(self, username: str) -> str:
return "/"
def has_user(self, username: str) -> bool:
asrv = self.hub.asrv
return username in asrv.acct
return username in asrv.acct or username in asrv.iacct
def has_perm(self, username: str, perm: int, path: Optional[str] = None) -> bool:
return True # handled at filesystem layer
@@ -100,17 +121,18 @@ class FtpFs(AbstractedFS):
def __init__(
self, root: str, cmd_channel: Any
) -> None: # pylint: disable=super-init-not-called
self.h = self.cmd_channel = cmd_channel # type: FTPHandler
self.h = cmd_channel # type: FTPHandler
self.cmd_channel = cmd_channel # type: FTPHandler
self.hub: "SvcHub" = cmd_channel.hub
self.args = cmd_channel.args
self.uname = self.hub.asrv.iacct.get(cmd_channel.password, "*")
self.uname = cmd_channel.uname
self.cwd = "/" # pyftpdlib convention of leading slash
self.root = "/var/lib/empty"
self.can_read = self.can_write = self.can_move = False
self.can_delete = self.can_get = self.can_upget = False
self.can_admin = False
self.listdirinfo = self.listdir
self.chdir(".")
@@ -122,16 +144,36 @@ class FtpFs(AbstractedFS):
w: bool = False,
m: bool = False,
d: bool = False,
) -> str:
) -> tuple[str, VFS, str]:
try:
vpath = vpath.replace("\\", "/").lstrip("/")
vpath = vpath.replace("\\", "/").strip("/")
rd, fn = os.path.split(vpath)
if ANYWIN and relchk(rd):
logging.warning("malicious vpath: %s", vpath)
t = "Unsupported characters in [{}]"
raise FSE(t.format(vpath), 1)
fn = sanitize_fn(fn or "", "", [".prologue.html", ".epilogue.html"])
vpath = vjoin(rd, fn)
vfs, rem = self.hub.asrv.vfs.get(vpath, self.uname, r, w, m, d)
if not vfs.realpath:
raise FilesystemError("no filesystem mounted at this path")
t = "No filesystem mounted at [{}]"
raise FSE(t.format(vpath))
return os.path.join(vfs.realpath, rem)
if "xdev" in vfs.flags or "xvol" in vfs.flags:
ap = vfs.canonical(rem)
avfs = vfs.chk_ap(ap)
t = "Permission denied in [{}]"
if not avfs:
raise FSE(t.format(vpath), 1)
cr, cw, cm, cd, _, _, _ = avfs.can_access("", self.h.uname)
if r and not cr or w and not cw or m and not cm or d and not cd:
raise FSE(t.format(vpath), 1)
return os.path.join(vfs.realpath, rem), vfs, rem
except Pebkac as ex:
raise FilesystemError(str(ex))
raise FSE(str(ex))
def rv2a(
self,
@@ -140,7 +182,7 @@ class FtpFs(AbstractedFS):
w: bool = False,
m: bool = False,
d: bool = False,
) -> str:
) -> tuple[str, VFS, str]:
return self.v2a(os.path.join(self.cwd, vpath), r, w, m, d)
def ftp2fs(self, ftppath: str) -> str:
@@ -154,7 +196,7 @@ class FtpFs(AbstractedFS):
def validpath(self, path: str) -> bool:
if "/.hist/" in path:
if "/up2k." in path or path.endswith("/dir.txt"):
raise FilesystemError("access to this file is forbidden")
raise FSE("Access to this file is forbidden", 1)
return True
@@ -162,7 +204,7 @@ class FtpFs(AbstractedFS):
r = "r" in mode
w = "w" in mode or "a" in mode or "+" in mode
ap = self.rv2a(filename, r, w)
ap = self.rv2a(filename, r, w)[0]
if w:
try:
st = bos.stat(ap)
@@ -171,7 +213,7 @@ class FtpFs(AbstractedFS):
td = 0
if td < -1 or td > self.args.ftp_wt:
raise FilesystemError("cannot open existing file for writing")
raise FSE("Cannot open existing file for writing")
self.validpath(ap)
return open(fsenc(ap), mode)
@@ -180,9 +222,17 @@ class FtpFs(AbstractedFS):
nwd = join(self.cwd, path)
vfs, rem = self.hub.asrv.vfs.get(nwd, self.uname, False, False)
ap = vfs.canonical(rem)
if not bos.path.isdir(ap):
try:
st = bos.stat(ap)
if not stat.S_ISDIR(st.st_mode):
raise Exception()
except:
# returning 550 is library-default and suitable
raise FilesystemError("Failed to change directory")
raise FSE("No such file or directory")
avfs = vfs.chk_ap(ap, st)
if not avfs:
raise FSE("Permission denied", 1)
self.cwd = nwd
(
@@ -192,16 +242,19 @@ class FtpFs(AbstractedFS):
self.can_delete,
self.can_get,
self.can_upget,
) = self.hub.asrv.vfs.can_access(self.cwd.lstrip("/"), self.h.username)
self.can_admin,
) = avfs.can_access("", self.h.uname)
def mkdir(self, path: str) -> None:
ap = self.rv2a(path, w=True)
bos.mkdir(ap)
ap = self.rv2a(path, w=True)[0]
bos.makedirs(ap) # filezilla expects this
def listdir(self, path: str) -> list[str]:
vpath = join(self.cwd, path).lstrip("/")
vpath = join(self.cwd, path)
try:
vfs, rem = self.hub.asrv.vfs.get(vpath, self.uname, True, False)
ap, vfs, rem = self.v2a(vpath, True, False)
if not bos.path.isdir(ap):
raise FSE("No such file or directory", 1)
fsroot, vfs_ls1, vfs_virt = vfs.ls(
rem,
@@ -217,8 +270,12 @@ class FtpFs(AbstractedFS):
vfs_ls.sort()
return vfs_ls
except:
if vpath:
except Exception as ex:
# panic on malicious names
if getattr(ex, "severity", 0):
raise
if vpath.strip("/"):
# display write-only folders as empty
return []
@@ -227,43 +284,49 @@ class FtpFs(AbstractedFS):
return list(sorted(list(r.keys())))
def rmdir(self, path: str) -> None:
ap = self.rv2a(path, d=True)
bos.rmdir(ap)
ap = self.rv2a(path, d=True)[0]
try:
bos.rmdir(ap)
except OSError as e:
if e.errno != errno.ENOENT:
raise
def remove(self, path: str) -> None:
if self.args.no_del:
raise FilesystemError("the delete feature is disabled in server config")
raise FSE("The delete feature is disabled in server config")
vp = join(self.cwd, path).lstrip("/")
try:
self.hub.up2k.handle_rm(self.uname, self.h.remote_ip, [vp], [])
self.hub.up2k.handle_rm(self.uname, self.h.cli_ip, [vp], [], False)
except Exception as ex:
raise FilesystemError(str(ex))
raise FSE(str(ex))
def rename(self, src: str, dst: str) -> None:
if not self.can_move:
raise FilesystemError("not allowed for user " + self.h.username)
raise FSE("Not allowed for user " + self.h.uname)
if self.args.no_mv:
t = "the rename/move feature is disabled in server config"
raise FilesystemError(t)
raise FSE("The rename/move feature is disabled in server config")
svp = join(self.cwd, src).lstrip("/")
dvp = join(self.cwd, dst).lstrip("/")
try:
self.hub.up2k.handle_mv(self.uname, svp, dvp)
except Exception as ex:
raise FilesystemError(str(ex))
raise FSE(str(ex))
def chmod(self, path: str, mode: str) -> None:
pass
def stat(self, path: str) -> os.stat_result:
try:
ap = self.rv2a(path, r=True)
ap = self.rv2a(path, r=True)[0]
return bos.stat(ap)
except:
ap = self.rv2a(path)
except FSE as ex:
if ex.severity:
raise
ap = self.rv2a(path)[0]
st = bos.stat(ap)
if not stat.S_ISDIR(st.st_mode):
raise
@@ -271,44 +334,50 @@ class FtpFs(AbstractedFS):
return st
def utime(self, path: str, timeval: float) -> None:
ap = self.rv2a(path, w=True)
ap = self.rv2a(path, w=True)[0]
return bos.utime(ap, (timeval, timeval))
def lstat(self, path: str) -> os.stat_result:
ap = self.rv2a(path)
ap = self.rv2a(path)[0]
return bos.stat(ap)
def isfile(self, path: str) -> bool:
try:
st = self.stat(path)
return stat.S_ISREG(st.st_mode)
except:
except Exception as ex:
if getattr(ex, "severity", 0):
raise
return False # expected for mojibake in ftp_SIZE()
def islink(self, path: str) -> bool:
ap = self.rv2a(path)
ap = self.rv2a(path)[0]
return bos.path.islink(ap)
def isdir(self, path: str) -> bool:
try:
st = self.stat(path)
return stat.S_ISDIR(st.st_mode)
except:
except Exception as ex:
if getattr(ex, "severity", 0):
raise
return True
def getsize(self, path: str) -> int:
ap = self.rv2a(path)
ap = self.rv2a(path)[0]
return bos.path.getsize(ap)
def getmtime(self, path: str) -> float:
ap = self.rv2a(path)
ap = self.rv2a(path)[0]
return bos.path.getmtime(ap)
def realpath(self, path: str) -> str:
return path
def lexists(self, path: str) -> bool:
ap = self.rv2a(path)
ap = self.rv2a(path)[0]
return bos.path.lexists(ap)
def get_user_by_uid(self, uid: int) -> str:
@@ -322,16 +391,21 @@ class FtpHandler(FTPHandler):
abstracted_fs = FtpFs
hub: "SvcHub"
args: argparse.Namespace
uname: str
def __init__(self, conn: Any, server: Any, ioloop: Any = None) -> None:
self.hub: "SvcHub" = FtpHandler.hub
self.args: argparse.Namespace = FtpHandler.args
self.uname = "*"
if PY2:
FTPHandler.__init__(self, conn, server, ioloop)
else:
super(FtpHandler, self).__init__(conn, server, ioloop)
cip = self.remote_ip
self.cli_ip = cip[7:] if cip.startswith("::ffff:") else cip
# abspath->vpath mapping to resolve log_transfer paths
self.vfs_map: dict[str, str] = {}
@@ -341,8 +415,24 @@ class FtpHandler(FTPHandler):
def ftp_STOR(self, file: str, mode: str = "w") -> Any:
# Optional[str]
vp = join(self.fs.cwd, file).lstrip("/")
ap = self.fs.v2a(vp)
ap, vfs, rem = self.fs.v2a(vp, w=True)
self.vfs_map[ap] = vp
xbu = vfs.flags.get("xbu")
if xbu and not runhook(
None,
xbu,
ap,
vfs.canonical(rem),
"",
self.uname,
0,
0,
self.cli_ip,
0,
"",
):
raise FSE("Upload blocked by xbu server config")
# print("ftp_STOR: {} {} => {}".format(vp, mode, ap))
ret = FTPHandler.ftp_STOR(self, file, mode)
# print("ftp_STOR: {} {} OK".format(vp, mode))
@@ -363,15 +453,17 @@ class FtpHandler(FTPHandler):
# print("xfer_end: {} => {}".format(ap, vp))
if vp:
vp, fn = os.path.split(vp)
vfs, rem = self.hub.asrv.vfs.get(vp, self.username, False, True)
vfs, rem = self.hub.asrv.vfs.get(vp, self.uname, False, True)
vfs, rem = vfs.get_dbv(rem)
self.hub.up2k.hash_file(
vfs.realpath,
vfs.vpath,
vfs.flags,
rem,
fn,
self.remote_ip,
self.cli_ip,
time.time(),
self.uname,
)
return FTPHandler.log_transfer(
@@ -402,10 +494,10 @@ class Ftpd(object):
h1 = SftpHandler
except:
t = "\nftps requires pyopenssl;\nplease run the following:\n\n {} -m pip install --user pyopenssl\n"
print(t.format(sys.executable))
print(t.format(pybin))
sys.exit(1)
h1.certfile = os.path.join(self.args.E.cfg, "cert.pem")
h1.certfile = self.args.cert
h1.tls_control_required = True
h1.tls_data_required = True
@@ -413,9 +505,9 @@ class Ftpd(object):
for h_lp in hs:
h2, lp = h_lp
h2.hub = hub
h2.args = hub.args
h2.authorizer = FtpAuth(hub)
FtpHandler.hub = h2.hub = hub
FtpHandler.args = h2.args = hub.args
FtpHandler.authorizer = h2.authorizer = FtpAuth(hub)
if self.args.ftp_pr:
p1, p2 = [int(x) for x in self.args.ftp_pr.split("-")]
@@ -435,10 +527,21 @@ class Ftpd(object):
lgr = logging.getLogger("pyftpdlib")
lgr.setLevel(logging.DEBUG if self.args.ftpv else logging.INFO)
ips = self.args.i
if "::" in ips:
ips.append("0.0.0.0")
if self.args.ftp4:
ips = [x for x in ips if ":" not in x]
ioloop = IOLoop()
for ip in self.args.i:
for ip in ips:
for h, lp in hs:
FTPServer((ip, int(lp)), h, ioloop)
try:
FTPServer((ip, int(lp)), h, ioloop)
except:
if ip != "0.0.0.0" or "::" not in ips:
raise
Daemon(ioloop.loop, "ftp")

File diff suppressed because it is too large Load Diff

View File

@@ -54,7 +54,6 @@ class HttpConn(object):
self.args: argparse.Namespace = hsrv.args # mypy404
self.E: EnvParams = self.args.E
self.asrv: AuthSrv = hsrv.asrv # mypy404
self.cert_path = hsrv.cert_path
self.u2fh: Util.FHC = hsrv.u2fh # mypy404
self.iphash: HMaccas = hsrv.broker.iphash
self.bans: dict[str, int] = hsrv.bans
@@ -65,6 +64,7 @@ class HttpConn(object):
self.ico: Ico = Ico(self.args) # mypy404
self.t0: float = time.time() # mypy404
self.freshen_pwd: float = 0.0
self.stopping = False
self.nreq: int = -1 # mypy404
self.nbyte: int = 0 # mypy404
@@ -102,17 +102,18 @@ class HttpConn(object):
def log(self, msg: str, c: Union[int, str] = 0) -> None:
self.log_func(self.log_src, msg, c)
def get_u2idx(self) -> U2idx:
# one u2idx per tcp connection;
def get_u2idx(self) -> Optional[U2idx]:
# grab from a pool of u2idx instances;
# sqlite3 fully parallelizes under python threads
# but avoid running out of FDs by creating too many
if not self.u2idx:
self.u2idx = U2idx(self)
self.u2idx = self.hsrv.get_u2idx(str(self.addr))
return self.u2idx
def _detect_https(self) -> bool:
method = None
if self.cert_path:
if True:
try:
method = self.s.recv(4, socket.MSG_PEEK)
except socket.timeout:
@@ -146,7 +147,7 @@ class HttpConn(object):
self.sr = None
if self.args.https_only:
is_https = True
elif self.args.http_only or not HAVE_SSL:
elif self.args.http_only:
is_https = False
else:
# raise Exception("asdf")
@@ -160,7 +161,7 @@ class HttpConn(object):
self.log_src = self.log_src.replace("[36m", "[35m")
try:
ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
ctx.load_cert_chain(self.cert_path)
ctx.load_cert_chain(self.args.cert)
if self.args.ssl_ver:
ctx.options &= ~self.args.ssl_flags_en
ctx.options |= self.args.ssl_flags_de
@@ -214,3 +215,7 @@ class HttpConn(object):
self.cli = HttpCli(self)
if not self.cli.run():
return
if self.u2idx:
self.hsrv.put_u2idx(str(self.addr), self.u2idx)
self.u2idx = None

View File

@@ -4,6 +4,7 @@ from __future__ import print_function, unicode_literals
import base64
import math
import os
import re
import socket
import sys
import threading
@@ -11,9 +12,19 @@ import time
import queue
from .__init__ import ANYWIN, CORES, EXE, MACOS, TYPE_CHECKING, EnvParams
try:
MNFE = ModuleNotFoundError
except:
MNFE = ImportError
try:
import jinja2
except ImportError:
except MNFE:
if EXE:
raise
print(
"""\033[1;31m
you do not have jinja2 installed,\033[33m
@@ -23,14 +34,30 @@ except ImportError:
* (try another python version, if you have one)
* (try copyparty.sfx instead)
""".format(
os.path.basename(sys.executable)
sys.executable
)
)
sys.exit(1)
except SyntaxError:
if EXE:
raise
print(
"""\033[1;31m
your jinja2 version is incompatible with your python version;\033[33m
please try to replace it with an older version:\033[0m
* {} -m pip install --user jinja2==2.11.3
* (try another python version, if you have one)
* (try copyparty.sfx instead)
""".format(
sys.executable
)
)
sys.exit(1)
from .__init__ import ANYWIN, MACOS, TYPE_CHECKING, EnvParams
from .bos import bos
from .httpconn import HttpConn
from .metrics import Metrics
from .u2idx import U2idx
from .util import (
E_SCK,
FHC,
@@ -39,6 +66,7 @@ from .util import (
Magician,
Netdev,
NetMap,
absreal,
ipnorm,
min_ex,
shut_socket,
@@ -72,17 +100,20 @@ class HttpSrv(object):
# redefine in case of multiprocessing
socket.setdefaulttimeout(120)
self.t0 = time.time()
nsuf = "-n{}-i{:x}".format(nid, os.getpid()) if nid else ""
self.magician = Magician()
self.nm = NetMap([], {})
self.ssdp: Optional["SSDPr"] = None
self.gpwd = Garda(self.args.ban_pw)
self.g404 = Garda(self.args.ban_404)
self.g403 = Garda(self.args.ban_403)
self.g422 = Garda(self.args.ban_422, False)
self.gurl = Garda(self.args.ban_url)
self.bans: dict[str, int] = {}
self.aclose: dict[str, int] = {}
self.ip = ""
self.port = 0
self.bound: set[tuple[str, int]] = set()
self.name = "hsrv" + nsuf
self.mutex = threading.Lock()
self.stopping = False
@@ -96,6 +127,7 @@ class HttpSrv(object):
self.t_periodic: Optional[threading.Thread] = None
self.u2fh = FHC()
self.metrics = Metrics(self)
self.srvs: list[socket.socket] = []
self.ncli = 0 # exact
self.clients: set[HttpConn] = set() # laggy
@@ -103,6 +135,9 @@ class HttpSrv(object):
self.cb_ts = 0.0
self.cb_v = ""
self.u2idx_free: dict[str, U2idx] = {}
self.u2idx_n = 0
env = jinja2.Environment()
env.loader = jinja2.FileSystemLoader(os.path.join(self.E.mod, "web"))
jn = ["splash", "svcs", "browser", "browser2", "msg", "md", "mde", "cf"]
@@ -110,17 +145,21 @@ class HttpSrv(object):
zs = os.path.join(self.E.mod, "web", "deps", "prism.js.gz")
self.prism = os.path.exists(zs)
self.statics: set[str] = set()
self._build_statics()
self.ptn_cc = re.compile(r"[\x00-\x1f]")
self.mallow = "GET HEAD POST PUT DELETE OPTIONS".split()
if not self.args.no_dav:
zs = "PROPFIND PROPPATCH LOCK UNLOCK MKCOL COPY MOVE"
self.mallow += zs.split()
if self.args.zs:
from .ssdp import SSDPr
self.ssdp = SSDPr(broker)
cert_path = os.path.join(self.E.cfg, "cert.pem")
if bos.path.exists(cert_path):
self.cert_path = cert_path
else:
self.cert_path = ""
if self.tp_q:
self.start_threads(4)
@@ -131,7 +170,7 @@ class HttpSrv(object):
if self.args.log_thrs:
start_log_thrs(self.log, self.args.log_thrs, nid)
self.th_cfg: dict[str, Any] = {}
self.th_cfg: dict[str, set[str]] = {}
Daemon(self.post_init, "hsrv-init2")
def post_init(self) -> None:
@@ -141,8 +180,20 @@ class HttpSrv(object):
except:
pass
def _build_statics(self) -> None:
for dp, _, df in os.walk(os.path.join(self.E.mod, "web")):
for fn in df:
ap = absreal(os.path.join(dp, fn))
self.statics.add(ap)
if ap.endswith(".gz") or ap.endswith(".br"):
self.statics.add(ap[:-3])
def set_netdevs(self, netdevs: dict[str, Netdev]) -> None:
self.nm = NetMap([self.ip], netdevs)
ips = set()
for ip, _ in self.bound:
ips.add(ip)
self.nm = NetMap(list(ips), netdevs)
def start_threads(self, n: int) -> None:
self.tp_nthr += n
@@ -184,12 +235,13 @@ class HttpSrv(object):
sck.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
sck.settimeout(None) # < does not inherit, ^ opts above do
self.ip, self.port = sck.getsockname()[:2]
ip, port = sck.getsockname()[:2]
self.srvs.append(sck)
self.bound.add((ip, port))
self.nclimax = math.ceil(self.args.nc * 1.0 / nlisteners)
Daemon(
self.thr_listen,
"httpsrv-n{}-listen-{}-{}".format(self.nid or "0", self.ip, self.port),
"httpsrv-n{}-listen-{}-{}".format(self.nid or "0", ip, port),
(sck,),
)
@@ -427,6 +479,9 @@ class HttpSrv(object):
self.clients.remove(cli)
self.ncli -= 1
if cli.u2idx:
self.put_u2idx(str(addr), cli.u2idx)
def cachebuster(self) -> str:
if time.time() - self.cb_ts < 1:
return self.cb_v
@@ -448,3 +503,31 @@ class HttpSrv(object):
self.cb_v = v.decode("ascii")[-4:]
self.cb_ts = time.time()
return self.cb_v
def get_u2idx(self, ident: str) -> Optional[U2idx]:
utab = self.u2idx_free
for _ in range(100): # 5/0.05 = 5sec
with self.mutex:
if utab:
if ident in utab:
return utab.pop(ident)
return utab.pop(list(utab.keys())[0])
if self.u2idx_n < CORES:
self.u2idx_n += 1
return U2idx(self)
time.sleep(0.05)
# not using conditional waits, on a hunch that
# average performance will be faster like this
# since most servers won't be fully saturated
return None
def put_u2idx(self, ident: str, u2idx: U2idx) -> None:
with self.mutex:
while ident in self.u2idx_free:
ident += "a"
self.u2idx_free[ident] = u2idx

View File

@@ -4,9 +4,10 @@ from __future__ import print_function, unicode_literals
import argparse # typechk
import colorsys
import hashlib
import re
from .__init__ import PY2
from .th_srv import HAVE_PIL
from .th_srv import HAVE_PIL, HAVE_PILF
from .util import BytesIO
@@ -17,12 +18,14 @@ class Ico(object):
def get(self, ext: str, as_thumb: bool, chrome: bool) -> tuple[str, bytes]:
"""placeholder to make thumbnails not break"""
zb = hashlib.sha1(ext.encode("utf-8")).digest()[2:4]
bext = ext.encode("ascii", "replace")
ext = bext.decode("utf-8")
zb = hashlib.sha1(bext).digest()[2:4]
if PY2:
zb = [ord(x) for x in zb]
c1 = colorsys.hsv_to_rgb(zb[0] / 256.0, 1, 0.3)
c2 = colorsys.hsv_to_rgb(zb[0] / 256.0, 1, 1)
c2 = colorsys.hsv_to_rgb(zb[0] / 256.0, 0.8 if HAVE_PILF else 1, 1)
ci = [int(x * 255) for x in list(c1) + list(c2)]
c = "".join(["{:02x}".format(x) for x in ci])
@@ -33,8 +36,34 @@ class Ico(object):
h = int(100 / (float(sw) / float(sh)))
w = 100
if chrome and as_thumb:
if chrome:
# cannot handle more than ~2000 unique SVGs
if HAVE_PILF:
# pillow 10.1 made this the default font;
# svg: 3.7s, this: 36s
try:
from PIL import Image, ImageDraw
# [.lt] are hard to see lowercase / unspaced
ext2 = re.sub("(.)", "\\1 ", ext).upper()
h = int(128 * h / w)
w = 128
img = Image.new("RGB", (w, h), "#" + c[:6])
pb = ImageDraw.Draw(img)
_, _, tw, th = pb.textbbox((0, 0), ext2, font_size=16)
xy = ((w - tw) // 2, (h - th) // 2)
pb.text(xy, ext2, fill="#" + c[6:], font_size=16)
img = img.resize((w * 2, h * 2), Image.NEAREST)
buf = BytesIO()
img.save(buf, format="PNG", compress_level=1)
return "image/png", buf.getvalue()
except:
pass
if HAVE_PIL:
# svg: 3s, cache: 6s, this: 8s
from PIL import Image, ImageDraw
@@ -43,8 +72,19 @@ class Ico(object):
w = 64
img = Image.new("RGB", (w, h), "#" + c[:6])
pb = ImageDraw.Draw(img)
tw, th = pb.textsize(ext)
pb.text(((w - tw) // 2, (h - th) // 2), ext, fill="#" + c[6:])
try:
_, _, tw, th = pb.textbbox((0, 0), ext)
except:
tw, th = pb.textsize(ext)
tw += len(ext)
cw = tw // len(ext)
x = ((w - tw) // 2) - (cw * 2) // 3
fill = "#" + c[6:]
for ch in ext:
pb.text((x, (h - th) // 2), " %s " % (ch,), fill=fill)
x += cw
img = img.resize((w * 3, h * 3), Image.NEAREST)
buf = BytesIO()

View File

@@ -1,6 +1,7 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import errno
import random
import select
import socket
@@ -11,6 +12,7 @@ from ipaddress import IPv4Network, IPv6Network
from .__init__ import TYPE_CHECKING
from .__init__ import unicode as U
from .multicast import MC_Sck, MCast
from .stolen.dnslib import AAAA
from .stolen.dnslib import CLASS as DC
from .stolen.dnslib import (
NSEC,
@@ -20,12 +22,11 @@ from .stolen.dnslib import (
SRV,
TXT,
A,
AAAA,
DNSHeader,
DNSQuestion,
DNSRecord,
)
from .util import CachedSet, Daemon, Netdev, min_ex
from .util import CachedSet, Daemon, Netdev, list_ips, min_ex
if TYPE_CHECKING:
from .svchub import SvcHub
@@ -55,10 +56,11 @@ class MDNS_Sck(MC_Sck):
self.bp_bye = b""
self.last_tx = 0.0
self.tx_ex = False
class MDNS(MCast):
def __init__(self, hub: "SvcHub") -> None:
def __init__(self, hub: "SvcHub", ngen: int) -> None:
al = hub.args
grp4 = "" if al.zm6 else MDNS4
grp6 = "" if al.zm4 else MDNS6
@@ -66,7 +68,8 @@ class MDNS(MCast):
hub, MDNS_Sck, al.zm_on, al.zm_off, grp4, grp6, 5353, hub.args.zmv
)
self.srv: dict[socket.socket, MDNS_Sck] = {}
self.logsrc = "mDNS-{}".format(ngen)
self.ngen = ngen
self.ttl = 300
zs = self.args.name + ".local."
@@ -89,7 +92,7 @@ class MDNS(MCast):
self.defend: dict[MDNS_Sck, float] = {} # server -> deadline
def log(self, msg: str, c: Union[int, str] = 0) -> None:
self.log_func("mDNS", msg, c)
self.log_func(self.logsrc, msg, c)
def build_svcs(self) -> tuple[dict[str, dict[str, Any]], set[str]]:
zms = self.args.zms
@@ -275,24 +278,41 @@ class MDNS(MCast):
zf = time.time() + 2
self.probing = zf # cant unicast so give everyone an extra sec
self.unsolicited = [zf, zf + 1, zf + 3, zf + 7] # rfc-8.3
try:
self.run2()
except OSError as ex:
if ex.errno != errno.EBADF:
raise
self.log("stopping due to {}".format(ex), "90")
self.log("stopped", 2)
def run2(self) -> None:
last_hop = time.time()
ihop = self.args.mc_hop
while self.running:
timeout = (
0.02 + random.random() * 0.07
if self.probing or self.q or self.defend or self.unsolicited
if self.probing or self.q or self.defend
else max(0.05, self.unsolicited[0] - time.time())
if self.unsolicited
else (last_hop + ihop if ihop else 180)
)
rdy = select.select(self.srv, [], [], timeout)
rx: list[socket.socket] = rdy[0] # type: ignore
self.rx4.cln()
self.rx6.cln()
buf = b""
addr = ("0", 0)
for sck in rx:
buf, addr = sck.recvfrom(4096)
try:
buf, addr = sck.recvfrom(4096)
self.eat(buf, addr, sck)
except:
if not self.running:
self.log("stopped", 2)
return
t = "{} {} \033[33m|{}| {}\n{}".format(
@@ -311,12 +331,14 @@ class MDNS(MCast):
def stop(self, panic=False) -> None:
self.running = False
if not panic:
for srv in self.srv.values():
try:
for srv in self.srv.values():
try:
if panic:
srv.sck.close()
else:
srv.sck.sendto(srv.bp_bye, (srv.grp, 5353))
except:
pass
except:
pass
self.srv = {}
@@ -374,6 +396,14 @@ class MDNS(MCast):
# avahi broadcasting 127.0.0.1-only packets
return
# check if we've been given additional IPs
for ip in list_ips():
if ip in cips:
self.sips.add(ip)
if not self.sips.isdisjoint(cips):
return
t = "mdns zeroconf: "
if self.probing:
t += "Cannot start; hostname '{}' is occupied"
@@ -485,6 +515,10 @@ class MDNS(MCast):
for srv in self.srv.values():
tx.add(srv)
if not self.unsolicited and self.args.zm_spam:
zf = time.time() + self.args.zm_spam + random.random() * 0.07
self.unsolicited.append(zf)
for srv, deadline in list(self.defend.items()):
if now < deadline:
continue
@@ -507,6 +541,15 @@ class MDNS(MCast):
if now < srv.last_tx + cooldown:
return False
srv.sck.sendto(msg, (srv.grp, 5353))
srv.last_tx = now
try:
srv.sck.sendto(msg, (srv.grp, 5353))
srv.last_tx = now
except Exception as ex:
if srv.tx_ex:
return True
srv.tx_ex = True
t = "tx({},|{}|,{}): {}"
self.log(t.format(srv.ip, len(msg), cooldown, ex), 3)
return True

165
copyparty/metrics.py Normal file
View File

@@ -0,0 +1,165 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import json
import time
from .__init__ import TYPE_CHECKING
from .util import Pebkac, get_df, unhumanize
if TYPE_CHECKING:
from .httpcli import HttpCli
from .httpsrv import HttpSrv
class Metrics(object):
def __init__(self, hsrv: "HttpSrv") -> None:
self.hsrv = hsrv
def tx(self, cli: "HttpCli") -> bool:
if not cli.avol:
raise Pebkac(403, "not allowed for user " + cli.uname)
args = cli.args
if not args.stats:
raise Pebkac(403, "the stats feature is not enabled in server config")
conn = cli.conn
vfs = conn.asrv.vfs
allvols = list(sorted(vfs.all_vols.items()))
idx = conn.get_u2idx()
if not idx or not hasattr(idx, "p_end"):
idx = None
ret: list[str] = []
def addc(k: str, unit: str, v: str, desc: str) -> None:
if unit:
k += "_" + unit
zs = "# TYPE %s counter\n# UNIT %s %s\n# HELP %s %s\n%s_created %s\n%s_total %s"
ret.append(zs % (k, k, unit, k, desc, k, int(self.hsrv.t0), k, v))
else:
zs = "# TYPE %s counter\n# HELP %s %s\n%s_created %s\n%s_total %s"
ret.append(zs % (k, k, desc, k, int(self.hsrv.t0), k, v))
def addh(k: str, typ: str, desc: str) -> None:
zs = "# TYPE %s %s\n# HELP %s %s"
ret.append(zs % (k, typ, k, desc))
def addbh(k: str, desc: str) -> None:
zs = "# TYPE %s gauge\n# UNIT %s bytes\n# HELP %s %s"
ret.append(zs % (k, k, k, desc))
def addv(k: str, v: str) -> None:
ret.append("%s %s" % (k, v))
v = "{:.3f}".format(time.time() - self.hsrv.t0)
addc("cpp_uptime", "seconds", v, "time since last server restart")
v = str(len(conn.bans or []))
addc("cpp_bans", "", v, "number of banned IPs")
if not args.nos_hdd:
addbh("cpp_disk_size_bytes", "total HDD size of volume")
addbh("cpp_disk_free_bytes", "free HDD space in volume")
for vpath, vol in allvols:
free, total = get_df(vol.realpath)
addv('cpp_disk_size_bytes{vol="/%s"}' % (vpath), str(total))
addv('cpp_disk_free_bytes{vol="/%s"}' % (vpath), str(free))
if idx and not args.nos_vol:
addbh("cpp_vol_bytes", "num bytes of data in volume")
addh("cpp_vol_files", "gauge", "num files in volume")
addbh("cpp_vol_free_bytes", "free space (vmaxb) in volume")
addh("cpp_vol_free_files", "gauge", "free space (vmaxn) in volume")
tnbytes = 0
tnfiles = 0
volsizes = []
try:
ptops = [x.realpath for _, x in allvols]
x = self.hsrv.broker.ask("up2k.get_volsizes", ptops)
volsizes = x.get()
except Exception as ex:
cli.log("tx_stats get_volsizes: {!r}".format(ex), 3)
for (vpath, vol), (nbytes, nfiles) in zip(allvols, volsizes):
tnbytes += nbytes
tnfiles += nfiles
addv('cpp_vol_bytes{vol="/%s"}' % (vpath), str(nbytes))
addv('cpp_vol_files{vol="/%s"}' % (vpath), str(nfiles))
if vol.flags.get("vmaxb") or vol.flags.get("vmaxn"):
zi = unhumanize(vol.flags.get("vmaxb") or "0")
if zi:
v = str(zi - nbytes)
addv('cpp_vol_free_bytes{vol="/%s"}' % (vpath), v)
zi = unhumanize(vol.flags.get("vmaxn") or "0")
if zi:
v = str(zi - nfiles)
addv('cpp_vol_free_files{vol="/%s"}' % (vpath), v)
if volsizes:
addv('cpp_vol_bytes{vol="total"}', str(tnbytes))
addv('cpp_vol_files{vol="total"}', str(tnfiles))
if idx and not args.nos_dup:
addbh("cpp_dupe_bytes", "num dupe bytes in volume")
addh("cpp_dupe_files", "gauge", "num dupe files in volume")
tnbytes = 0
tnfiles = 0
for vpath, vol in allvols:
cur = idx.get_cur(vol.realpath)
if not cur:
continue
nbytes = 0
nfiles = 0
q = "select sz, count(*)-1 c from up group by w having c"
for sz, c in cur.execute(q):
nbytes += sz * c
nfiles += c
tnbytes += nbytes
tnfiles += nfiles
addv('cpp_dupe_bytes{vol="/%s"}' % (vpath), str(nbytes))
addv('cpp_dupe_files{vol="/%s"}' % (vpath), str(nfiles))
addv('cpp_dupe_bytes{vol="total"}', str(tnbytes))
addv('cpp_dupe_files{vol="total"}', str(tnfiles))
if not args.nos_unf:
addbh("cpp_unf_bytes", "incoming/unfinished uploads (num bytes)")
addh("cpp_unf_files", "gauge", "incoming/unfinished uploads (num files)")
tnbytes = 0
tnfiles = 0
try:
x = self.hsrv.broker.ask("up2k.get_unfinished")
xs = x.get()
xj = json.loads(xs)
for ptop, (nbytes, nfiles) in xj.items():
tnbytes += nbytes
tnfiles += nfiles
vol = next((x[1] for x in allvols if x[1].realpath == ptop), None)
if not vol:
t = "tx_stats get_unfinished: could not map {}"
cli.log(t.format(ptop), 3)
continue
addv('cpp_unf_bytes{vol="/%s"}' % (vol.vpath), str(nbytes))
addv('cpp_unf_files{vol="/%s"}' % (vol.vpath), str(nfiles))
addv('cpp_unf_bytes{vol="total"}', str(tnbytes))
addv('cpp_unf_files{vol="total"}', str(tnfiles))
except Exception as ex:
cli.log("tx_stats get_unfinished: {!r}".format(ex), 3)
ret.append("# EOF")
mime = "application/openmetrics-text; version=1.0.0; charset=utf-8"
cli.reply("\n".join(ret).encode("utf-8"), mime=mime)
return True

View File

@@ -8,9 +8,19 @@ import shutil
import subprocess as sp
import sys
from .__init__ import PY2, WINDOWS, E, unicode
from .__init__ import ANYWIN, EXE, PY2, WINDOWS, E, unicode
from .bos import bos
from .util import REKOBO_LKEY, fsenc, min_ex, retchk, runcmd, uncyg
from .util import (
FFMPEG_URL,
REKOBO_LKEY,
fsenc,
min_ex,
pybin,
retchk,
runcmd,
sfsenc,
uncyg,
)
if True: # pylint: disable=using-constant-test
from typing import Any, Union
@@ -19,6 +29,9 @@ if True: # pylint: disable=using-constant-test
def have_ff(scmd: str) -> bool:
if ANYWIN:
scmd += ".exe"
if PY2:
print("# checking {}".format(scmd))
acmd = (scmd + " -version").encode("ascii").split(b" ")
@@ -259,7 +272,9 @@ class MTag(object):
self.args = args
self.usable = True
self.prefer_mt = not args.no_mtag_ff
self.backend = "ffprobe" if args.no_mutagen else "mutagen"
self.backend = (
"ffprobe" if args.no_mutagen or (HAVE_FFPROBE and EXE) else "mutagen"
)
self.can_ffprobe = HAVE_FFPROBE and not args.no_mtag_ff
mappings = args.mtm
or_ffprobe = " or FFprobe"
@@ -285,9 +300,14 @@ class MTag(object):
self.log(msg, c=3)
if not self.usable:
if EXE:
t = "copyparty.exe cannot use mutagen; need ffprobe.exe to read media tags: "
self.log(t + FFMPEG_URL)
return
msg = "need Mutagen{} to read media tags so please run this:\n{}{} -m pip install --user mutagen\n"
pybin = os.path.basename(sys.executable)
self.log(msg.format(or_ffprobe, " " * 37, pybin), c=1)
pyname = os.path.basename(pybin)
self.log(msg.format(or_ffprobe, " " * 37, pyname), c=1)
return
# https://picard-docs.musicbrainz.org/downloads/MusicBrainz_Picard_Tag_Map.html
@@ -456,7 +476,10 @@ class MTag(object):
self.log("mutagen: {}\033[0m".format(" ".join(zl)), "90")
if not md.info.length and not md.info.codec:
raise Exception()
except:
except Exception as ex:
if self.args.mtag_v:
self.log("mutagen-err [{}] @ [{}]".format(ex, abspath), "90")
return self.get_ffprobe(abspath) if self.can_ffprobe else {}
sz = bos.path.getsize(abspath)
@@ -519,12 +542,15 @@ class MTag(object):
env = os.environ.copy()
try:
if EXE:
raise Exception()
pypath = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
zsl = [str(pypath)] + [str(x) for x in sys.path if x]
pypath = str(os.pathsep.join(zsl))
env["PYTHONPATH"] = pypath
except:
if not E.ox:
if not E.ox and not EXE:
raise
ret: dict[str, Any] = {}
@@ -532,7 +558,7 @@ class MTag(object):
try:
cmd = [parser.bin, abspath]
if parser.bin.endswith(".py"):
cmd = [sys.executable] + cmd
cmd = [pybin] + cmd
args = {
"env": env,
@@ -551,7 +577,7 @@ class MTag(object):
else:
cmd = ["nice"] + cmd
bcmd = [fsenc(x) for x in cmd]
bcmd = [sfsenc(x) for x in cmd[:-1]] + [fsenc(cmd[-1])]
rc, v, err = runcmd(bcmd, **args) # type: ignore
retchk(rc, bcmd, err, self.log, 5, self.args.mtag_v)
v = v.strip()

View File

@@ -14,8 +14,8 @@ from ipaddress import (
ip_network,
)
from .__init__ import TYPE_CHECKING
from .util import MACOS, Netdev, min_ex, spack
from .__init__ import MACOS, TYPE_CHECKING
from .util import Daemon, Netdev, find_prefix, min_ex, spack
if TYPE_CHECKING:
from .svchub import SvcHub
@@ -110,9 +110,7 @@ class MCast(object):
)
ips = [x for x in ips if x not in ("::1", "127.0.0.1")]
# ip -> ip/prefix
ips = [[x for x in netdevs if x.startswith(y + "/")][0] for y in ips]
ips = find_prefix(ips, netdevs)
on = self.on[:]
off = self.off[:]
@@ -230,6 +228,7 @@ class MCast(object):
for srv in self.srv.values():
assert srv.ip in self.sips
Daemon(self.hopper, "mc-hop")
return bound
def setup_socket(self, srv: MC_Sck) -> None:
@@ -301,33 +300,57 @@ class MCast(object):
t = "failed to set IPv4 TTL/LOOP; announcements may not survive multiple switches/routers"
self.log(t, 3)
self.hop(srv)
if self.hop(srv, False):
self.log("igmp was already joined?? chilling for a sec", 3)
time.sleep(1.2)
self.hop(srv, True)
self.b4.sort(reverse=True)
self.b6.sort(reverse=True)
def hop(self, srv: MC_Sck) -> None:
def hop(self, srv: MC_Sck, on: bool) -> bool:
"""rejoin to keepalive on routers/switches without igmp-snooping"""
sck = srv.sck
req = srv.mreq
if ":" in srv.ip:
try:
sck.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_LEAVE_GROUP, req)
# linux does leaves/joins twice with 0.2~1.05s spacing
time.sleep(1.2)
except:
pass
sck.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_JOIN_GROUP, req)
if not on:
try:
sck.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_LEAVE_GROUP, req)
return True
except:
return False
else:
sck.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_JOIN_GROUP, req)
else:
try:
sck.setsockopt(socket.IPPROTO_IP, socket.IP_DROP_MEMBERSHIP, req)
time.sleep(1.2)
except:
pass
if not on:
try:
sck.setsockopt(socket.IPPROTO_IP, socket.IP_DROP_MEMBERSHIP, req)
return True
except:
return False
else:
# t = "joining {} from ip {} idx {} with mreq {}"
# self.log(t.format(srv.grp, srv.ip, srv.idx, repr(srv.mreq)), 6)
sck.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, req)
# t = "joining {} from ip {} idx {} with mreq {}"
# self.log(t.format(srv.grp, srv.ip, srv.idx, repr(srv.mreq)), 6)
sck.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, req)
return True
def hopper(self):
while self.args.mc_hop and self.running:
time.sleep(self.args.mc_hop)
if not self.running:
return
for srv in self.srv.values():
self.hop(srv, False)
# linux does leaves/joins twice with 0.2~1.05s spacing
time.sleep(1.2)
if not self.running:
return
for srv in self.srv.values():
self.hop(srv, True)
def map_client(self, cip: str) -> Optional[MC_Sck]:
try:

145
copyparty/pwhash.py Normal file
View File

@@ -0,0 +1,145 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import argparse
import base64
import hashlib
import sys
import threading
from .__init__ import unicode
class PWHash(object):
def __init__(self, args: argparse.Namespace):
self.args = args
try:
alg, ac = args.ah_alg.split(",")
except:
alg = args.ah_alg
ac = {}
if alg == "none":
alg = ""
self.alg = alg
self.ac = ac
if not alg:
self.on = False
self.hash = unicode
return
self.on = True
self.salt = args.ah_salt.encode("utf-8")
self.cache: dict[str, str] = {}
self.mutex = threading.Lock()
self.hash = self._cache_hash
if alg == "sha2":
self._hash = self._gen_sha2
elif alg == "scrypt":
self._hash = self._gen_scrypt
elif alg == "argon2":
self._hash = self._gen_argon2
else:
t = "unsupported password hashing algorithm [{}], must be one of these: argon2 scrypt sha2 none"
raise Exception(t.format(alg))
def _cache_hash(self, plain: str) -> str:
with self.mutex:
try:
return self.cache[plain]
except:
pass
if not plain:
return ""
if len(plain) > 255:
raise Exception("password too long")
if len(self.cache) > 9000:
self.cache = {}
ret = self._hash(plain)
self.cache[plain] = ret
return ret
def _gen_sha2(self, plain: str) -> str:
its = int(self.ac[0]) if self.ac else 424242
bplain = plain.encode("utf-8")
ret = b"\n"
for _ in range(its):
ret = hashlib.sha512(self.salt + bplain + ret).digest()
return "+" + base64.urlsafe_b64encode(ret[:24]).decode("utf-8")
def _gen_scrypt(self, plain: str) -> str:
cost = 2 << 13
its = 2
blksz = 8
para = 4
try:
cost = 2 << int(self.ac[0])
its = int(self.ac[1])
blksz = int(self.ac[2])
para = int(self.ac[3])
except:
pass
ret = plain.encode("utf-8")
for _ in range(its):
ret = hashlib.scrypt(ret, salt=self.salt, n=cost, r=blksz, p=para, dklen=24)
return "+" + base64.urlsafe_b64encode(ret).decode("utf-8")
def _gen_argon2(self, plain: str) -> str:
from argon2.low_level import Type as ArgonType
from argon2.low_level import hash_secret
time_cost = 3
mem_cost = 256
parallelism = 4
version = 19
try:
time_cost = int(self.ac[0])
mem_cost = int(self.ac[1])
parallelism = int(self.ac[2])
version = int(self.ac[3])
except:
pass
bplain = plain.encode("utf-8")
bret = hash_secret(
secret=bplain,
salt=self.salt,
time_cost=time_cost,
memory_cost=mem_cost * 1024,
parallelism=parallelism,
hash_len=24,
type=ArgonType.ID,
version=version,
)
ret = bret.split(b"$")[-1].decode("utf-8")
return "+" + ret.replace("/", "_").replace("+", "-")
def stdin(self) -> None:
while True:
ln = sys.stdin.readline().strip()
if not ln:
break
print(self.hash(ln))
def cli(self) -> None:
import getpass
while True:
p1 = getpass.getpass("password> ")
p2 = getpass.getpass("again or just hit ENTER> ")
if p2 and p1 != p2:
print("\033[31minputs don't match; try again\033[0m", file=sys.stderr)
continue
print(self.hash(p1))
print()

View File

View File

@@ -9,13 +9,13 @@ import sys
import time
from types import SimpleNamespace
from .__init__ import ANYWIN, TYPE_CHECKING
from .__init__ import ANYWIN, EXE, TYPE_CHECKING
from .authsrv import LEELOO_DALLAS, VFS
from .bos import bos
from .util import Daemon, min_ex
from .util import Daemon, min_ex, pybin, runhook
if True: # pylint: disable=using-constant-test
from typing import Any
from typing import Any, Union
if TYPE_CHECKING:
from .svchub import SvcHub
@@ -32,6 +32,8 @@ class SMB(object):
self.asrv = hub.asrv
self.log = hub.log
self.files: dict[int, tuple[float, str]] = {}
self.noacc = self.args.smba
self.accs = not self.args.smba
lg.setLevel(logging.DEBUG if self.args.smbvvv else logging.INFO)
for x in ["impacket", "impacket.smbserver"]:
@@ -42,8 +44,12 @@ class SMB(object):
from impacket import smbserver
from impacket.ntlm import compute_lmhash, compute_nthash
except ImportError:
if EXE:
print("copyparty.exe cannot do SMB")
sys.exit(1)
m = "\033[36m\n{}\033[31m\n\nERROR: need 'impacket'; please run this command:\033[33m\n {} -m pip install --user impacket\n\033[0m"
print(m.format(min_ex(), sys.executable))
print(m.format(min_ex(), pybin))
sys.exit(1)
# patch vfs into smbserver.os
@@ -90,6 +96,14 @@ class SMB(object):
port = int(self.args.smb_port)
srv = smbserver.SimpleSMBServer(listenAddress=ip, listenPort=port)
try:
if self.accs:
srv.setAuthCallback(self._auth_cb)
except:
self.accs = False
self.noacc = True
t = "impacket too old; access permissions will not work! all accounts are admin!"
self.log("smb", t, 1)
ro = "no" if self.args.smbw else "yes" # (does nothing)
srv.addShare("A", "/", readOnly=ro)
@@ -109,27 +123,80 @@ class SMB(object):
self.stop = srv.stop
self.log("smb", "listening @ {}:{}".format(ip, port))
def nlog(self, msg: str, c: Union[int, str] = 0) -> None:
self.log("smb", msg, c)
def start(self) -> None:
Daemon(self.srv.start)
def _v2a(self, caller: str, vpath: str, *a: Any) -> tuple[VFS, str]:
def _auth_cb(self, *a, **ka):
debug("auth-result: %s %s", a, ka)
conndata = ka["connData"]
auth_ok = conndata["Authenticated"]
uname = ka["user_name"] if auth_ok else "*"
uname = self.asrv.iacct.get(uname, uname) or "*"
oldname = conndata.get("partygoer", "*") or "*"
cli_ip = conndata["ClientIP"]
cli_hn = ka["host_name"]
if uname != "*":
conndata["partygoer"] = uname
info("client %s [%s] authed as %s", cli_ip, cli_hn, uname)
elif oldname != "*":
info("client %s [%s] keeping old auth as %s", cli_ip, cli_hn, oldname)
elif auth_ok:
info("client %s [%s] authed as [*] (anon)", cli_ip, cli_hn)
else:
info("client %s [%s] rejected", cli_ip, cli_hn)
def _uname(self) -> str:
if self.noacc:
return LEELOO_DALLAS
try:
# you found it! my single worst bit of code so far
# (if you can think of a better way to track users through impacket i'm all ears)
cf0 = inspect.currentframe().f_back.f_back
cf = cf0.f_back
for n in range(3):
cl = cf.f_locals
if "connData" in cl:
return cl["connData"]["partygoer"]
cf = cf.f_back
raise Exception()
except:
warning(
"nyoron... %s <<-- %s <<-- %s <<-- %s",
cf0.f_code.co_name,
cf0.f_back.f_code.co_name,
cf0.f_back.f_back.f_code.co_name,
cf0.f_back.f_back.f_back.f_code.co_name,
)
return "*"
def _v2a(
self, caller: str, vpath: str, *a: Any, uname="", perms=None
) -> tuple[VFS, str]:
vpath = vpath.replace("\\", "/").lstrip("/")
# cf = inspect.currentframe().f_back
# c1 = cf.f_back.f_code.co_name
# c2 = cf.f_code.co_name
debug('%s("%s", %s)\033[K\033[0m', caller, vpath, str(a))
if not uname:
uname = self._uname()
if not perms:
perms = [True, True]
# TODO find a way to grab `identity` in smbComSessionSetupAndX and smb2SessionSetup
vfs, rem = self.asrv.vfs.get(vpath, LEELOO_DALLAS, True, True)
debug('%s("%s", %s) %s @%s\033[K\033[0m', caller, vpath, str(a), perms, uname)
vfs, rem = self.asrv.vfs.get(vpath, uname, *perms)
return vfs, vfs.canonical(rem)
def _listdir(self, vpath: str, *a: Any, **ka: Any) -> list[str]:
vpath = vpath.replace("\\", "/").lstrip("/")
# caller = inspect.currentframe().f_back.f_code.co_name
debug('listdir("%s", %s)\033[K\033[0m', vpath, str(a))
vfs, rem = self.asrv.vfs.get(vpath, LEELOO_DALLAS, False, False)
uname = self._uname()
# debug('listdir("%s", %s) @%s\033[K\033[0m', vpath, str(a), uname)
vfs, rem = self.asrv.vfs.get(vpath, uname, False, False)
_, vfs_ls, vfs_virt = vfs.ls(
rem, LEELOO_DALLAS, not self.args.no_scandir, [[False, False]]
rem, uname, not self.args.no_scandir, [[False, False]]
)
dirs = [x[0] for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)]
fils = [x[0] for x in vfs_ls if x[0] not in dirs]
@@ -142,8 +209,8 @@ class SMB(object):
sz = 112 * 2 # ['.', '..']
for n, fn in enumerate(ls):
if sz >= 64000:
t = "listing only %d of %d files (%d byte); see impacket#1433"
warning(t, n, len(ls), sz)
t = "listing only %d of %d files (%d byte) in /%s; see impacket#1433"
warning(t, n, len(ls), sz, vpath)
break
nsz = len(fn.encode("utf-16", "replace"))
@@ -164,9 +231,18 @@ class SMB(object):
if wr and not self.args.smbw:
yeet("blocked write (no --smbw): " + vpath)
vfs, ap = self._v2a("open", vpath, *a)
if wr and not vfs.axs.uwrite:
yeet("blocked write (no-write-acc): " + vpath)
uname = self._uname()
vfs, ap = self._v2a("open", vpath, *a, uname=uname, perms=[True, wr])
if wr:
if not vfs.axs.uwrite:
t = "blocked write (no-write-acc %s): /%s @%s"
yeet(t % (vfs.axs.uwrite, vpath, uname))
xbu = vfs.flags.get("xbu")
if xbu and not runhook(
self.nlog, xbu, ap, vpath, "", "", 0, 0, "1.7.6.2", 0, ""
):
yeet("blocked by xbu server config: " + vpath)
ret = bos.open(ap, flags, *a, mode=chmod, **ka)
if wr:
@@ -190,15 +266,17 @@ class SMB(object):
_, vp = self.files.pop(fd)
vp, fn = os.path.split(vp)
vfs, rem = self.hub.asrv.vfs.get(vp, LEELOO_DALLAS, False, True)
vfs, rem = self.hub.asrv.vfs.get(vp, self._uname(), False, True)
vfs, rem = vfs.get_dbv(rem)
self.hub.up2k.hash_file(
vfs.realpath,
vfs.vpath,
vfs.flags,
rem,
fn,
"1.7.6.2",
time.time(),
"",
)
def _rename(self, vp1: str, vp2: str) -> None:
@@ -208,15 +286,18 @@ class SMB(object):
vp1 = vp1.lstrip("/")
vp2 = vp2.lstrip("/")
vfs2, ap2 = self._v2a("rename", vp2, vp1)
uname = self._uname()
vfs2, ap2 = self._v2a("rename", vp2, vp1, uname=uname)
if not vfs2.axs.uwrite:
yeet("blocked rename (no-write-acc): " + vp2)
t = "blocked write (no-write-acc %s): /%s @%s"
yeet(t % (vfs2.axs.uwrite, vp2, uname))
vfs1, _ = self.asrv.vfs.get(vp1, LEELOO_DALLAS, True, True)
vfs1, _ = self.asrv.vfs.get(vp1, uname, True, True, True)
if not vfs1.axs.umove:
yeet("blocked rename (no-move-acc): " + vp1)
t = "blocked rename (no-move-acc %s): /%s @%s"
yeet(t % (vfs1.axs.umove, vp1, uname))
self.hub.up2k.handle_mv(LEELOO_DALLAS, vp1, vp2)
self.hub.up2k.handle_mv(uname, vp1, vp2)
try:
bos.makedirs(ap2)
except:
@@ -226,52 +307,74 @@ class SMB(object):
if not self.args.smbw:
yeet("blocked mkdir (no --smbw): " + vpath)
vfs, ap = self._v2a("mkdir", vpath)
uname = self._uname()
vfs, ap = self._v2a("mkdir", vpath, uname=uname)
if not vfs.axs.uwrite:
yeet("blocked mkdir (no-write-acc): " + vpath)
t = "blocked mkdir (no-write-acc %s): /%s @%s"
yeet(t % (vfs.axs.uwrite, vpath, uname))
return bos.mkdir(ap)
def _stat(self, vpath: str, *a: Any, **ka: Any) -> os.stat_result:
return bos.stat(self._v2a("stat", vpath, *a)[1], *a, **ka)
try:
ap = self._v2a("stat", vpath, *a, perms=[True, False])[1]
ret = bos.stat(ap, *a, **ka)
# debug(" `-stat:ok")
return ret
except:
# white lie: windows freaks out if we raise due to an offline volume
# debug(" `-stat:NOPE (faking a directory)")
ts = int(time.time())
return os.stat_result((16877, -1, -1, 1, 1000, 1000, 8, ts, ts, ts))
def _unlink(self, vpath: str) -> None:
if not self.args.smbw:
yeet("blocked delete (no --smbw): " + vpath)
# return bos.unlink(self._v2a("stat", vpath, *a)[1])
vfs, ap = self._v2a("delete", vpath)
uname = self._uname()
vfs, ap = self._v2a(
"delete", vpath, uname=uname, perms=[True, False, False, True]
)
if not vfs.axs.udel:
yeet("blocked delete (no-del-acc): " + vpath)
vpath = vpath.replace("\\", "/").lstrip("/")
self.hub.up2k.handle_rm(LEELOO_DALLAS, "1.7.6.2", [vpath], [])
self.hub.up2k.handle_rm(uname, "1.7.6.2", [vpath], [], False)
def _utime(self, vpath: str, times: tuple[float, float]) -> None:
if not self.args.smbw:
yeet("blocked utime (no --smbw): " + vpath)
vfs, ap = self._v2a("utime", vpath)
uname = self._uname()
vfs, ap = self._v2a("utime", vpath, uname=uname)
if not vfs.axs.uwrite:
yeet("blocked utime (no-write-acc): " + vpath)
t = "blocked utime (no-write-acc %s): /%s @%s"
yeet(t % (vfs.axs.uwrite, vpath, uname))
return bos.utime(ap, times)
def _p_exists(self, vpath: str) -> bool:
# ap = "?"
try:
bos.stat(self._v2a("p.exists", vpath)[1])
ap = self._v2a("p.exists", vpath, perms=[True, False])[1]
bos.stat(ap)
# debug(" `-exists((%s)->(%s)):ok", vpath, ap)
return True
except:
# debug(" `-exists((%s)->(%s)):NOPE", vpath, ap)
return False
def _p_getsize(self, vpath: str) -> int:
st = bos.stat(self._v2a("p.getsize", vpath)[1])
st = bos.stat(self._v2a("p.getsize", vpath, perms=[True, False])[1])
return st.st_size
def _p_isdir(self, vpath: str) -> bool:
try:
st = bos.stat(self._v2a("p.isdir", vpath)[1])
return stat.S_ISDIR(st.st_mode)
st = bos.stat(self._v2a("p.isdir", vpath, perms=[True, False])[1])
ret = stat.S_ISDIR(st.st_mode)
# debug(" `-isdir:%s:%s", st.st_mode, ret)
return ret
except:
return False

View File

@@ -1,6 +1,7 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import errno
import re
import select
import socket
@@ -8,7 +9,7 @@ from email.utils import formatdate
from .__init__ import TYPE_CHECKING
from .multicast import MC_Sck, MCast
from .util import CachedSet, min_ex, html_escape
from .util import CachedSet, html_escape, min_ex
if TYPE_CHECKING:
from .broker_util import BrokerCli
@@ -75,11 +76,12 @@ class SSDPr(object):
c = html_escape
sip, sport = hc.s.getsockname()[:2]
sip = sip.replace("::ffff:", "")
proto = "https" if self.args.https_only else "http"
ubase = "{}://{}:{}".format(proto, sip, sport)
zsl = self.args.zsl
url = zsl if "://" in zsl else ubase + "/" + zsl.lstrip("/")
name = "{} @ {}".format(self.args.doctitle, self.args.name)
name = self.args.doctitle
zs = zs.strip().format(c(ubase), c(url), c(name), c(self.args.zsid))
hc.reply(zs.encode("utf-8", "replace"))
return False # close connectino
@@ -88,19 +90,22 @@ class SSDPr(object):
class SSDPd(MCast):
"""communicates with ssdp clients over multicast"""
def __init__(self, hub: "SvcHub") -> None:
def __init__(self, hub: "SvcHub", ngen: int) -> None:
al = hub.args
vinit = al.zsv and not al.zmv
super(SSDPd, self).__init__(
hub, SSDP_Sck, al.zs_on, al.zs_off, GRP, "", 1900, vinit
)
self.srv: dict[socket.socket, SSDP_Sck] = {}
self.logsrc = "SSDP-{}".format(ngen)
self.ngen = ngen
self.rxc = CachedSet(0.7)
self.txc = CachedSet(5) # win10: every 3 sec
self.ptn_st = re.compile(b"\nst: *upnp:rootdevice", re.I)
def log(self, msg: str, c: Union[int, str] = 0) -> None:
self.log_func("SSDP", msg, c)
self.log_func(self.logsrc, msg, c)
def run(self) -> None:
try:
@@ -125,17 +130,30 @@ class SSDPd(MCast):
srv.hport = hp
self.log("listening")
try:
self.run2()
except OSError as ex:
if ex.errno != errno.EBADF:
raise
self.log("stopping due to {}".format(ex), "90")
self.log("stopped", 2)
def run2(self) -> None:
while self.running:
rdy = select.select(self.srv, [], [], 180)
rdy = select.select(self.srv, [], [], self.args.z_chk or 180)
rx: list[socket.socket] = rdy[0] # type: ignore
self.rxc.cln()
buf = b""
addr = ("0", 0)
for sck in rx:
buf, addr = sck.recvfrom(4096)
try:
buf, addr = sck.recvfrom(4096)
self.eat(buf, addr)
except:
if not self.running:
return
break
t = "{} {} \033[33m|{}| {}\n{}".format(
self.srv[sck].name, addr, len(buf), repr(buf)[2:-1], min_ex()
@@ -144,6 +162,12 @@ class SSDPd(MCast):
def stop(self) -> None:
self.running = False
for srv in self.srv.values():
try:
srv.sck.close()
except:
pass
self.srv = {}
def eat(self, buf: bytes, addr: tuple[str, int]) -> None:
@@ -160,7 +184,7 @@ class SSDPd(MCast):
self.rxc.add(buf)
if not buf.startswith(b"M-SEARCH * HTTP/1."):
raise Exception("not an ssdp message")
return
if not self.ptn_st.search(buf):
return
@@ -184,12 +208,13 @@ BOOTID.UPNP.ORG: 0
CONFIGID.UPNP.ORG: 1
"""
zs = zs.format(formatdate(usegmt=True), srv.ip, srv.hport, self.args.zsid)
v4 = srv.ip.replace("::ffff:", "")
zs = zs.format(formatdate(usegmt=True), v4, srv.hport, self.args.zsid)
zb = zs[1:].replace("\n", "\r\n").encode("utf-8", "replace")
srv.sck.sendto(zb, addr[:2])
if cip not in self.txc.c:
self.log("{} [{}] --> {}".format(srv.name, srv.ip, cip), "6")
self.log("{} [{}] --> {}".format(srv.name, srv.ip, cip), 6)
self.txc.add(cip)
self.txc.cln()

View File

@@ -1,6 +1,7 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import re
import stat
import tarfile
@@ -44,6 +45,7 @@ class StreamTar(StreamArc):
self,
log: "NamedLogger",
fgen: Generator[dict[str, Any], None, None],
cmp: str = "",
**kwargs: Any
):
super(StreamTar, self).__init__(log, fgen)
@@ -53,14 +55,41 @@ class StreamTar(StreamArc):
self.qfile = QFile()
self.errf: dict[str, Any] = {}
# python 3.8 changed to PAX_FORMAT as default,
# waste of space and don't care about the new features
# python 3.8 changed to PAX_FORMAT as default;
# slower, bigger, and no particular advantage
fmt = tarfile.GNU_FORMAT
self.tar = tarfile.open(fileobj=self.qfile, mode="w|", format=fmt) # type: ignore
if "pax" in cmp:
# unless a client asks for it (currently
# gnu-tar has wider support than pax-tar)
fmt = tarfile.PAX_FORMAT
cmp = re.sub(r"[^a-z0-9]*pax[^a-z0-9]*", "", cmp)
try:
cmp, lv = cmp.replace(":", ",").split(",")
lv = int(lv)
except:
lv = None
arg = {"name": None, "fileobj": self.qfile, "mode": "w", "format": fmt}
if cmp == "gz":
fun = tarfile.TarFile.gzopen
arg["compresslevel"] = lv if lv is not None else 3
elif cmp == "bz2":
fun = tarfile.TarFile.bz2open
arg["compresslevel"] = lv if lv is not None else 2
elif cmp == "xz":
fun = tarfile.TarFile.xzopen
arg["preset"] = lv if lv is not None else 1
else:
fun = tarfile.open
arg["mode"] = "w|"
self.tar = fun(**arg)
Daemon(self._gen, "star-gen")
def gen(self) -> Generator[Optional[bytes], None, None]:
buf = b""
try:
while True:
buf = self.qfile.q.get()
@@ -72,6 +101,12 @@ class StreamTar(StreamArc):
yield None
finally:
while buf:
try:
buf = self.qfile.q.get()
except:
pass
if self.errf:
bos.unlink(self.errf["ap"])
@@ -101,6 +136,9 @@ class StreamTar(StreamArc):
errors.append((f["vp"], f["err"]))
continue
if self.stopped:
break
try:
self.ser(f)
except:

View File

@@ -1,10 +1,14 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import os
import tempfile
from datetime import datetime
from .__init__ import CORES
from .bos import bos
from .th_cli import ThumbCli
from .util import vjoin
if True: # pylint: disable=using-constant-test
from typing import Any, Generator, Optional
@@ -21,10 +25,78 @@ class StreamArc(object):
):
self.log = log
self.fgen = fgen
self.stopped = False
def gen(self) -> Generator[Optional[bytes], None, None]:
raise Exception("override me")
def stop(self) -> None:
self.stopped = True
def gfilter(
fgen: Generator[dict[str, Any], None, None],
thumbcli: ThumbCli,
uname: str,
vtop: str,
fmt: str,
) -> Generator[dict[str, Any], None, None]:
from concurrent.futures import ThreadPoolExecutor
pend = []
with ThreadPoolExecutor(max_workers=CORES) as tp:
try:
for f in fgen:
task = tp.submit(enthumb, thumbcli, uname, vtop, f, fmt)
pend.append((task, f))
if pend[0][0].done() or len(pend) > CORES * 4:
task, f = pend.pop(0)
try:
f = task.result(600)
except:
pass
yield f
for task, f in pend:
try:
f = task.result(600)
except:
pass
yield f
except Exception as ex:
thumbcli.log("gfilter flushing ({})".format(ex))
for task, f in pend:
try:
task.result(600)
except:
pass
thumbcli.log("gfilter flushed")
def enthumb(
thumbcli: ThumbCli, uname: str, vtop: str, f: dict[str, Any], fmt: str
) -> dict[str, Any]:
rem = f["vp"]
ext = rem.rsplit(".", 1)[-1].lower()
if fmt == "opus" and ext in "aac|m4a|mp3|ogg|opus|wma".split("|"):
raise Exception()
vp = vjoin(vtop, rem.split("/", 1)[1])
vn, rem = thumbcli.asrv.vfs.get(vp, uname, True, False)
dbv, vrem = vn.get_dbv(rem)
thp = thumbcli.get(dbv, vrem, f["st"].st_mtime, fmt)
if not thp:
raise Exception()
ext = "jpg" if fmt == "j" else "webp" if fmt == "w" else fmt
sz = bos.path.getsize(thp)
st: os.stat_result = f["st"]
ts = st.st_mtime
f["ap"] = thp
f["vp"] = f["vp"].rsplit(".", 1)[0] + "." + ext
f["st"] = os.stat_result((st.st_mode, -1, -1, 1, 1000, 1000, sz, ts, ts, ts))
return f
def errdesc(errors: list[tuple[str, str]]) -> tuple[dict[str, Any], list[str]]:
report = ["copyparty failed to add the following files to the archive:", ""]

View File

@@ -4,6 +4,7 @@ from __future__ import print_function, unicode_literals
import argparse
import base64
import calendar
import errno
import gzip
import logging
import os
@@ -27,13 +28,15 @@ if True: # pylint: disable=using-constant-test
import typing
from typing import Any, Optional, Union
from .__init__ import ANYWIN, MACOS, TYPE_CHECKING, VT100, EnvParams, unicode
from .authsrv import AuthSrv
from .__init__ import ANYWIN, EXE, MACOS, TYPE_CHECKING, EnvParams, unicode
from .authsrv import BAD_CFG, AuthSrv
from .cert import ensure_cert
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE
from .tcpsrv import TcpSrv
from .th_srv import HAVE_PIL, HAVE_VIPS, HAVE_WEBP, ThumbSrv
from .up2k import Up2k
from .util import (
FFMPEG_URL,
VERSIONS,
Daemon,
Garda,
@@ -43,6 +46,7 @@ from .util import (
ansi_re,
min_ex,
mp,
pybin,
start_log_thrs,
start_stackmon,
)
@@ -66,10 +70,18 @@ class SvcHub(object):
put() can return a queue (if want_reply=True) which has a blocking get() with the response.
"""
def __init__(self, args: argparse.Namespace, argv: list[str], printed: str) -> None:
def __init__(
self,
args: argparse.Namespace,
dargs: argparse.Namespace,
argv: list[str],
printed: str,
) -> None:
self.args = args
self.dargs = dargs
self.argv = argv
self.E: EnvParams = args.E
self.no_ansi = args.no_ansi
self.logf: Optional[typing.TextIO] = None
self.logf_base_fn = ""
self.stop_req = False
@@ -88,27 +100,23 @@ class SvcHub(object):
self.iphash = HMaccas(os.path.join(self.E.cfg, "iphash"), 8)
# for non-http clients (ftp)
self.bans: dict[str, int] = {}
self.gpwd = Garda(self.args.ban_pw)
self.g404 = Garda(self.args.ban_404)
if args.sss or args.s >= 3:
args.ss = True
args.no_dav = True
args.no_logues = True
args.no_readme = True
args.lo = args.lo or "cpp-%Y-%m%d-%H%M%S.txt.xz"
args.ls = args.ls or "**,*,ln,p,r"
if args.ss or args.s >= 2:
args.s = True
args.no_logues = True
args.no_readme = True
args.unpost = 0
args.no_del = True
args.no_mv = True
args.hardlink = True
args.vague_403 = True
args.ban_404 = "50,60,1440"
args.turbo = -1
args.nih = True
if args.s:
@@ -118,6 +126,20 @@ class SvcHub(object):
args.no_robots = True
args.force_js = True
if not self._process_config():
raise Exception(BAD_CFG)
# for non-http clients (ftp)
self.bans: dict[str, int] = {}
self.gpwd = Garda(self.args.ban_pw)
self.g404 = Garda(self.args.ban_404)
self.g403 = Garda(self.args.ban_403)
self.g422 = Garda(self.args.ban_422)
self.gurl = Garda(self.args.ban_url)
self.log_div = 10 ** (6 - args.log_tdec)
self.log_efmt = "%02d:%02d:%02d.%0{}d".format(args.log_tdec)
self.log_dfmt = "%04d-%04d-%06d.%0{}d".format(args.log_tdec)
self.log = self._log_disabled if args.q else self._log_enabled
if args.lo:
self._setup_logfile(printed)
@@ -139,25 +161,34 @@ class SvcHub(object):
self.log("root", t.format(args.j))
if not args.no_fpool and args.j != 1:
t = "WARNING: --use-fpool combined with multithreading is untested and can probably cause undefined behavior"
if ANYWIN:
t = 'windows cannot do multithreading without --no-fpool, so enabling that -- note that upload performance will suffer if you have microsoft defender "real-time protection" enabled, so you probably want to use -j 1 instead'
args.no_fpool = True
self.log("root", t, c=3)
t = "WARNING: ignoring --use-fpool because multithreading (-j{}) is enabled"
self.log("root", t.format(args.j), c=3)
args.no_fpool = True
bri = "zy"[args.theme % 2 :][:1]
ch = "abcdefghijklmnopqrstuvwx"[int(args.theme / 2)]
args.theme = "{0}{1} {0} {1}".format(ch, bri)
if not args.hardlink and args.never_symlink:
args.no_dedup = True
if args.nih:
args.vname = ""
args.doctitle = args.doctitle.replace(" @ --name", "")
else:
args.vname = args.name
args.doctitle = args.doctitle.replace("--name", args.vname)
args.bname = args.bname.replace("--name", args.vname) or args.vname
if args.log_fk:
args.log_fk = re.compile(args.log_fk)
# initiate all services to manage
self.asrv = AuthSrv(self.args, self.log)
self.asrv = AuthSrv(self.args, self.log, dargs=self.dargs)
if args.cgen:
self.asrv.cgen()
if args.exit == "cfg":
sys.exit(0)
if args.ls:
self.asrv.dbg_ls()
@@ -166,10 +197,11 @@ class SvcHub(object):
self.log("root", "max clients: {}".format(self.args.nc))
if not self._process_config():
raise Exception("bad config")
self.tcpsrv = TcpSrv(self)
if not self.tcpsrv.srv and self.args.ign_ebind_all:
self.args.no_fastboot = True
self.up2k = Up2k(self)
decs = {k: 1 for k in self.args.th_dec.split(",")}
@@ -182,6 +214,7 @@ class SvcHub(object):
self.args.th_dec = list(decs.keys())
self.thumbsrv = None
want_ff = False
if not args.no_thumb:
t = ", ".join(self.args.th_dec) or "(None available)"
self.log("thumb", "decoder preference: {}".format(t))
@@ -193,8 +226,12 @@ class SvcHub(object):
if self.args.th_dec:
self.thumbsrv = ThumbSrv(self)
else:
want_ff = True
msg = "need either Pillow, pyvips, or FFmpeg to create thumbnails; for example:\n{0}{1} -m pip install --user Pillow\n{0}{1} -m pip install --user pyvips\n{0}apt install ffmpeg"
msg = msg.format(" " * 37, os.path.basename(sys.executable))
msg = msg.format(" " * 37, os.path.basename(pybin))
if EXE:
msg = "copyparty.exe cannot use Pillow or pyvips; need ffprobe.exe and ffmpeg.exe to create thumbnails"
self.log("thumb", msg, c=3)
if not args.no_acode and args.no_thumb:
@@ -206,6 +243,10 @@ class SvcHub(object):
msg = "setting --no-acode because either FFmpeg or FFprobe is not available"
self.log("thumb", msg, c=6)
args.no_acode = True
want_ff = True
if want_ff and ANYWIN:
self.log("thumb", "download FFmpeg to fix it:\033[0m " + FFMPEG_URL, 3)
args.th_poke = min(args.th_poke, args.th_maxage, args.ac_maxage)
@@ -218,7 +259,8 @@ class SvcHub(object):
if args.ftp or args.ftps:
from .ftpd import Ftpd
self.ftpd = Ftpd(self)
self.ftpd: Optional[Ftpd] = None
Daemon(self.start_ftpd, "start_ftpd")
zms += "f" if args.ftp else "F"
if args.smb:
@@ -236,6 +278,7 @@ class SvcHub(object):
if not args.zms:
args.zms = zms
self.zc_ngen = 0
self.mdns: Optional["MDNS"] = None
self.ssdp: Optional["SSDPd"] = None
@@ -247,6 +290,28 @@ class SvcHub(object):
self.broker = Broker(self)
def start_ftpd(self) -> None:
time.sleep(30)
if self.ftpd:
return
self.restart_ftpd()
def restart_ftpd(self) -> None:
if not hasattr(self, "ftpd"):
return
from .ftpd import Ftpd
if self.ftpd:
return # todo
if not os.path.exists(self.args.cert):
ensure_cert(self.log, self.args)
self.ftpd = Ftpd(self)
self.log("root", "started FTPd")
def thr_httpsrv_up(self) -> None:
time.sleep(1 if self.args.ign_ebind_all else 5)
expected = self.broker.num_workers * self.tcpsrv.nsrv
@@ -278,12 +343,20 @@ class SvcHub(object):
if self.httpsrv_up != self.broker.num_workers:
return
time.sleep(0.1) # purely cosmetic dw
ar = self.args
for _ in range(10 if ar.ftp or ar.ftps else 0):
time.sleep(0.03)
if self.ftpd:
break
if self.tcpsrv.qr:
self.log("qr-code", self.tcpsrv.qr)
else:
self.log("root", "workers OK\n")
self.after_httpsrv_up()
def after_httpsrv_up(self) -> None:
self.up2k.init_vols()
Daemon(self.sd_notify, "sd-notify")
@@ -295,12 +368,25 @@ class SvcHub(object):
al.zs_on = al.zs_on or al.z_on
al.zm_off = al.zm_off or al.z_off
al.zs_off = al.zs_off or al.z_off
for n in ("zm_on", "zm_off", "zs_on", "zs_off"):
ns = "zm_on zm_off zs_on zs_off acao acam"
for n in ns.split(" "):
vs = getattr(al, n).split(",")
vs = [x.strip() for x in vs]
vs = [x for x in vs if x]
setattr(al, n, vs)
ns = "acao acam"
for n in ns.split(" "):
vs = getattr(al, n)
vd = {zs: 1 for zs in vs}
setattr(al, n, vd)
ns = "acao"
for n in ns.split(" "):
vs = getattr(al, n)
vs = [x.lower() for x in vs]
setattr(al, n, vs)
R = al.rp_loc
if "//" in R or ":" in R:
t = "found URL in --rp-loc; it should be just the location, for example /foo/bar"
@@ -309,6 +395,43 @@ class SvcHub(object):
al.R = R = R.strip("/")
al.SR = "/" + R if R else ""
al.RS = R + "/" if R else ""
al.SRS = "/" + R + "/" if R else "/"
if al.rsp_jtr:
al.rsp_slp = 0.000001
al.th_covers = set(al.th_covers.split(","))
for k in "c".split(" "):
vl = getattr(al, k)
if not vl:
continue
vl = [os.path.expanduser(x) if x.startswith("~") else x for x in vl]
setattr(al, k, vl)
for k in "lo hist ssl_log".split(" "):
vs = getattr(al, k)
if vs and vs.startswith("~"):
setattr(al, k, os.path.expanduser(vs))
for k in "sus_urls nonsus_urls".split(" "):
vs = getattr(al, k)
if not vs or vs == "no":
setattr(al, k, None)
else:
setattr(al, k, re.compile(vs))
if not al.sus_urls:
al.ban_url = "no"
elif al.ban_url == "no":
al.sus_urls = None
if al.xff_src in ("any", "0", ""):
al.xff_re = None
else:
zs = al.xff_src.replace(" ", "").replace(".", "\\.").replace(",", "|")
al.xff_re = re.compile("^(?:" + zs + ")")
return True
@@ -364,6 +487,7 @@ class SvcHub(object):
def _setup_logfile(self, printed: str) -> None:
base_fn = fn = sel_fn = self._logname()
do_xz = fn.lower().endswith(".xz")
if fn != self.args.lo:
ctr = 0
# yup this is a race; if started sufficiently concurrently, two
@@ -375,7 +499,7 @@ class SvcHub(object):
fn = sel_fn
try:
if fn.lower().endswith(".xz"):
if do_xz:
import lzma
lh = lzma.open(fn, "wt", encoding="utf-8", errors="replace", preset=0)
@@ -386,7 +510,7 @@ class SvcHub(object):
lh = codecs.open(fn, "w", encoding="utf-8", errors="replace")
argv = [sys.executable] + self.argv
argv = [pybin] + self.argv
if hasattr(shlex, "quote"):
argv = [shlex.quote(x) for x in argv]
else:
@@ -402,24 +526,10 @@ class SvcHub(object):
def run(self) -> None:
self.tcpsrv.run()
if getattr(self.args, "zm", False):
try:
from .mdns import MDNS
self.mdns = MDNS(self)
Daemon(self.mdns.run, "mdns")
except:
self.log("root", "mdns startup failed;\n" + min_ex(), 3)
if getattr(self.args, "zs", False):
try:
from .ssdp import SSDPd
self.ssdp = SSDPd(self)
Daemon(self.ssdp.run, "ssdp")
except:
self.log("root", "ssdp startup failed;\n" + min_ex(), 3)
if getattr(self.args, "z_chk", 0) and (
getattr(self.args, "zm", False) or getattr(self.args, "zs", False)
):
Daemon(self.tcpsrv.netmon, "netmon")
Daemon(self.thr_httpsrv_up, "sig-hsrv-up2")
@@ -451,6 +561,33 @@ class SvcHub(object):
else:
self.stop_thr()
def start_zeroconf(self) -> None:
self.zc_ngen += 1
if getattr(self.args, "zm", False):
try:
from .mdns import MDNS
if self.mdns:
self.mdns.stop(True)
self.mdns = MDNS(self, self.zc_ngen)
Daemon(self.mdns.run, "mdns")
except:
self.log("root", "mdns startup failed;\n" + min_ex(), 3)
if getattr(self.args, "zs", False):
try:
from .ssdp import SSDPd
if self.ssdp:
self.ssdp.stop()
self.ssdp = SSDPd(self, self.zc_ngen)
Daemon(self.ssdp.run, "ssdp")
except:
self.log("root", "ssdp startup failed;\n" + min_ex(), 3)
def reload(self) -> str:
if self.reloading:
return "cannot reload; already in progress"
@@ -526,19 +663,25 @@ class SvcHub(object):
ret = 1
try:
self.pr("OPYTHAT")
tasks = []
slp = 0.0
if self.mdns:
Daemon(self.mdns.stop)
tasks.append(Daemon(self.mdns.stop, "mdns"))
slp = time.time() + 0.5
if self.ssdp:
Daemon(self.ssdp.stop)
tasks.append(Daemon(self.ssdp.stop, "ssdp"))
slp = time.time() + 0.5
self.broker.shutdown()
self.tcpsrv.shutdown()
self.up2k.shutdown()
if hasattr(self, "smbd"):
slp = max(slp, time.time() + 0.5)
tasks.append(Daemon(self.smbd.stop, "smbd"))
if self.thumbsrv:
self.thumbsrv.shutdown()
@@ -548,17 +691,19 @@ class SvcHub(object):
break
if n == 3:
self.pr("waiting for thumbsrv (10sec)...")
self.log("root", "waiting for thumbsrv (10sec)...")
if hasattr(self, "smbd"):
slp = max(slp, time.time() + 0.5)
Daemon(self.kill9, a=(1,))
Daemon(self.smbd.stop)
zf = max(time.time() - slp, 0)
Daemon(self.kill9, a=(zf + 0.5,))
while time.time() < slp:
time.sleep(0.1)
if not next((x for x in tasks if x.is_alive), None):
break
self.pr("nailed it", end="")
time.sleep(0.05)
self.log("root", "nailed it")
ret = self.retcode
except:
self.pr("\033[31m[ error during shutdown ]\n{}\033[0m".format(min_ex()))
@@ -568,7 +713,7 @@ class SvcHub(object):
print("\033]0;\033\\", file=sys.stderr, end="")
sys.stderr.flush()
self.pr("\033[0m")
self.pr("\033[0m", end="")
if self.logf:
self.logf.close()
@@ -580,8 +725,14 @@ class SvcHub(object):
return
with self.log_mutex:
ts = datetime.utcnow().strftime("%Y-%m%d-%H%M%S.%f")[:-3]
self.logf.write("@{} [{}\033[0m] {}\n".format(ts, src, msg))
zd = datetime.utcnow()
ts = self.log_dfmt % (
zd.year,
zd.month * 100 + zd.day,
(zd.hour * 100 + zd.minute) * 100 + zd.second,
zd.microsecond // self.log_div,
)
self.logf.write("@%s [%s\033[0m] %s\n" % (ts, src, msg))
now = time.time()
if now >= self.next_day:
@@ -608,26 +759,36 @@ class SvcHub(object):
now = time.time()
if now >= self.next_day:
dt = datetime.utcfromtimestamp(now)
print("\033[36m{}\033[0m\n".format(dt.strftime("%Y-%m-%d")), end="")
zs = "{}\n" if self.no_ansi else "\033[36m{}\033[0m\n"
zs = zs.format(dt.strftime("%Y-%m-%d"))
print(zs, end="")
self._set_next_day()
if self.logf:
self.logf.write(zs)
fmt = "\033[36m{} \033[33m{:21} \033[0m{}\n"
if not VT100:
fmt = "{} {:21} {}\n"
fmt = "\033[36m%s \033[33m%-21s \033[0m%s\n"
if self.no_ansi:
fmt = "%s %-21s %s\n"
if "\033" in msg:
msg = ansi_re.sub("", msg)
if "\033" in src:
src = ansi_re.sub("", src)
elif c:
if isinstance(c, int):
msg = "\033[3{}m{}\033[0m".format(c, msg)
msg = "\033[3%sm%s\033[0m" % (c, msg)
elif "\033" not in c:
msg = "\033[{}m{}\033[0m".format(c, msg)
msg = "\033[%sm%s\033[0m" % (c, msg)
else:
msg = "{}{}\033[0m".format(c, msg)
msg = "%s%s\033[0m" % (c, msg)
ts = datetime.utcfromtimestamp(now).strftime("%H:%M:%S.%f")[:-3]
msg = fmt.format(ts, src, msg)
zd = datetime.utcfromtimestamp(now)
ts = self.log_efmt % (
zd.hour,
zd.minute,
zd.second,
zd.microsecond // self.log_div,
)
msg = fmt % (ts, src, msg)
try:
print(msg, end="")
except UnicodeEncodeError:
@@ -635,13 +796,20 @@ class SvcHub(object):
print(msg.encode("utf-8", "replace").decode(), end="")
except:
print(msg.encode("ascii", "replace").decode(), end="")
except OSError as ex:
if ex.errno != errno.EPIPE:
raise
if self.logf:
self.logf.write(msg)
def pr(self, *a: Any, **ka: Any) -> None:
with self.log_mutex:
print(*a, **ka)
try:
with self.log_mutex:
print(*a, **ka)
except OSError as ex:
if ex.errno != errno.EPIPE:
raise
def check_mp_support(self) -> str:
if MACOS:

View File

@@ -2,8 +2,8 @@
from __future__ import print_function, unicode_literals
import calendar
import time
import stat
import time
import zlib
from .bos import bos
@@ -221,6 +221,7 @@ class StreamZip(StreamArc):
fgen: Generator[dict[str, Any], None, None],
utf8: bool = False,
pre_crc: bool = False,
**kwargs: Any
) -> None:
super(StreamZip, self).__init__(log, fgen)
@@ -275,6 +276,7 @@ class StreamZip(StreamArc):
def gen(self) -> Generator[bytes, None, None]:
errf: dict[str, Any] = {}
errors = []
mbuf = b""
try:
for f in self.fgen:
if "err" in f:
@@ -283,13 +285,20 @@ class StreamZip(StreamArc):
try:
for x in self.ser(f):
yield x
mbuf += x
if len(mbuf) >= 16384:
yield mbuf
mbuf = b""
except GeneratorExit:
raise
except:
ex = min_ex(5, True).replace("\n", "\n-- ")
errors.append((f["vp"], ex))
if mbuf:
yield mbuf
mbuf = b""
if errors:
errf, txt = errdesc(errors)
self.log("\n".join(([repr(errf)] + txt[1:])))
@@ -299,20 +308,23 @@ class StreamZip(StreamArc):
cdir_pos = self.pos
for name, sz, ts, crc, h_pos in self.items:
buf = gen_hdr(h_pos, name, sz, ts, self.utf8, crc, self.pre_crc)
yield self._ct(buf)
mbuf += self._ct(buf)
if len(mbuf) >= 16384:
yield mbuf
mbuf = b""
cdir_end = self.pos
_, need_64 = gen_ecdr(self.items, cdir_pos, cdir_end)
if need_64:
ecdir64_pos = self.pos
buf = gen_ecdr64(self.items, cdir_pos, cdir_end)
yield self._ct(buf)
mbuf += self._ct(buf)
buf = gen_ecdr64_loc(ecdir64_pos)
yield self._ct(buf)
mbuf += self._ct(buf)
ecdr, _ = gen_ecdr(self.items, cdir_pos, cdir_end)
yield self._ct(ecdr)
yield mbuf + self._ct(ecdr)
finally:
if errf:
bos.unlink(errf["ap"])

View File

@@ -5,14 +5,17 @@ import os
import re
import socket
import sys
import time
from .__init__ import ANYWIN, PY2, TYPE_CHECKING, VT100, unicode
from .__init__ import ANYWIN, PY2, TYPE_CHECKING, unicode
from .cert import gencert
from .stolen.qrcodegen import QrCode
from .util import (
E_ACCESS,
E_ADDR_IN_USE,
E_ADDR_NOT_AVAIL,
E_UNREACH,
IP6ALL,
Netdev,
min_ex,
sunpack,
@@ -28,6 +31,9 @@ if TYPE_CHECKING:
if not hasattr(socket, "IPPROTO_IPV6"):
setattr(socket, "IPPROTO_IPV6", 41)
if not hasattr(socket, "IP_FREEBIND"):
setattr(socket, "IP_FREEBIND", 15)
class TcpSrv(object):
"""
@@ -46,6 +52,8 @@ class TcpSrv(object):
self.stopping = False
self.srv: list[socket.socket] = []
self.bound: list[tuple[str, int]] = []
self.netdevs: dict[str, Netdev] = {}
self.netlist = ""
self.nsrv = 0
self.qr = ""
pad = False
@@ -221,8 +229,16 @@ class TcpSrv(object):
except:
pass # will create another ipv4 socket instead
if not ANYWIN and self.args.freebind:
srv.setsockopt(socket.SOL_IP, socket.IP_FREEBIND, 1)
try:
srv.bind((ip, port))
sport = srv.getsockname()[1]
if port != sport:
# linux 6.0.16 lets you bind a port which is in use
# except it just gives you a random port instead
raise OSError(E_ADDR_IN_USE[0], "")
self.srv.append(srv)
except (OSError, socket.error) as ex:
if ex.errno in E_ADDR_IN_USE:
@@ -239,8 +255,19 @@ class TcpSrv(object):
srvs: list[socket.socket] = []
for srv in self.srv:
ip, port = srv.getsockname()[:2]
if ip == IP6ALL:
ip = "::" # jython
try:
srv.listen(self.args.nc)
try:
ok = srv.getsockopt(socket.SOL_SOCKET, socket.SO_ACCEPTCONN)
except:
ok = 1 # macos
if not ok:
# some linux don't throw on listen(0.0.0.0) after listen(::)
raise Exception("failed to listen on {}".format(srv.getsockname()))
except:
if ip == "0.0.0.0" and ("::", port) in bound:
# dualstack
@@ -252,6 +279,8 @@ class TcpSrv(object):
srv.close()
continue
t = "\n\nERROR: could not open listening socket, probably because one of the server ports ({}) is busy on one of the requested interfaces ({}); avoid this issue by specifying a different port (-p 3939) and/or a specific interface to listen on (-i 192.168.56.1)\n"
self.log("tcpsrv", t.format(port, ip), 1)
raise
bound.append((ip, port))
@@ -268,7 +297,13 @@ class TcpSrv(object):
self.srv = srvs
self.bound = bound
self.nsrv = len(srvs)
self._distribute_netdevs()
def _distribute_netdevs(self):
self.hub.broker.say("set_netdevs", self.netdevs)
self.hub.start_zeroconf()
gencert(self.log, self.args, self.netdevs)
self.hub.restart_ftpd()
def shutdown(self) -> None:
self.stopping = True
@@ -280,6 +315,27 @@ class TcpSrv(object):
self.log("tcpsrv", "ok bye")
def netmon(self):
while not self.stopping:
time.sleep(self.args.z_chk)
netdevs = self.detect_interfaces(self.args.i)
if not netdevs:
continue
added = "nothing"
removed = "nothing"
for k, v in netdevs.items():
if k not in self.netdevs:
added = "{} = {}".format(k, v)
for k, v in self.netdevs.items():
if k not in netdevs:
removed = "{} = {}".format(k, v)
t = "network change detected:\n added {}\033[0;33m\nremoved {}"
self.log("tcpsrv", t.format(added, removed), 3)
self.netdevs = netdevs
self._distribute_netdevs()
def detect_interfaces(self, listen_ips: list[str]) -> dict[str, Netdev]:
from .stolen.ifaddr import get_adapters
@@ -300,6 +356,12 @@ class TcpSrv(object):
except:
pass
netlist = str(sorted(eps.items()))
if netlist == self.netlist and self.netdevs:
return {}
self.netlist = netlist
if "0.0.0.0" not in listen_ips and "::" not in listen_ips:
eps = {k: v for k, v in eps.items() if k.split("/")[0] in listen_ips}
@@ -448,7 +510,7 @@ class TcpSrv(object):
zoom = 1
qr = qrc.render(zoom, pad)
if not VT100:
if self.args.no_ansi:
return "{}\n{}".format(txt, qr)
halfc = "\033[40;48;5;{0}m{1}\033[47;48;5;{2}m"

View File

@@ -31,7 +31,7 @@ class ThumbCli(object):
if not c:
raise Exception()
except:
c = {k: {} for k in ["thumbable", "pil", "vips", "ffi", "ffv", "ffa"]}
c = {k: set() for k in ["thumbable", "pil", "vips", "ffi", "ffv", "ffa"]}
self.thumbable = c["thumbable"]
self.fmt_pil = c["pil"]
@@ -94,7 +94,7 @@ class ThumbCli(object):
self.log("no histpath for [{}]".format(ptop))
return None
tpath = thumb_path(histpath, rem, mtime, fmt)
tpath = thumb_path(histpath, rem, mtime, fmt, self.fmt_ffa)
tpaths = [tpath]
if fmt == "w":
# also check for jpg (maybe webp is unavailable)
@@ -108,6 +108,7 @@ class ThumbCli(object):
if st.st_size:
ret = tpath = tp
fmt = ret.rsplit(".")[1]
break
else:
abort = True
except:

View File

@@ -3,6 +3,7 @@ from __future__ import print_function, unicode_literals
import base64
import hashlib
import logging
import os
import shutil
import subprocess as sp
@@ -11,14 +12,17 @@ import time
from queue import Queue
from .__init__ import TYPE_CHECKING
from .__init__ import ANYWIN, TYPE_CHECKING
from .authsrv import VFS
from .bos import bos
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, ffprobe
from .util import (
FFMPEG_URL,
BytesIO,
Cooldown,
Daemon,
Pebkac,
afsenc,
fsenc,
min_ex,
runcmd,
@@ -33,14 +37,21 @@ if TYPE_CHECKING:
from .svchub import SvcHub
HAVE_PIL = False
HAVE_PILF = False
HAVE_HEIF = False
HAVE_AVIF = False
HAVE_WEBP = False
try:
from PIL import ExifTags, Image, ImageOps
from PIL import ExifTags, Image, ImageFont, ImageOps
HAVE_PIL = True
try:
ImageFont.load_default(size=16)
HAVE_PILF = True
except:
pass
try:
Image.new("RGB", (2, 2)).save(BytesIO(), format="webp")
HAVE_WEBP = True
@@ -61,36 +72,47 @@ try:
HAVE_AVIF = True
except:
pass
logging.getLogger("PIL").setLevel(logging.WARNING)
except:
pass
try:
HAVE_VIPS = True
import pyvips
logging.getLogger("pyvips").setLevel(logging.WARNING)
except:
HAVE_VIPS = False
def thumb_path(histpath: str, rem: str, mtime: float, fmt: str) -> str:
def thumb_path(histpath: str, rem: str, mtime: float, fmt: str, ffa: set[str]) -> str:
# base16 = 16 = 256
# b64-lc = 38 = 1444
# base64 = 64 = 4096
rd, fn = vsplit(rem)
if rd:
h = hashlib.sha512(fsenc(rd)).digest()
b64 = base64.urlsafe_b64encode(h).decode("ascii")[:24]
rd = "{}/{}/".format(b64[:2], b64[2:4]).lower() + b64
else:
rd = "top"
if not rd:
rd = "\ntop"
# spectrograms are never cropped; strip fullsize flag
ext = rem.split(".")[-1].lower()
if ext in ffa and fmt in ("wf", "jf"):
fmt = fmt[:1]
rd += "\n" + fmt
h = hashlib.sha512(afsenc(rd)).digest()
b64 = base64.urlsafe_b64encode(h).decode("ascii")[:24]
rd = "{}/{}/".format(b64[:2], b64[2:4]).lower() + b64
# could keep original filenames but this is safer re pathlen
h = hashlib.sha512(fsenc(fn)).digest()
h = hashlib.sha512(afsenc(fn)).digest()
fn = base64.urlsafe_b64encode(h).decode("ascii")[:24]
if fmt in ("opus", "caf"):
cat = "ac"
else:
fmt = "webp" if fmt == "w" else "png" if fmt == "p" else "jpg"
fc = fmt[:1]
fmt = "webp" if fc == "w" else "png" if fc == "p" else "jpg"
cat = "th"
return "{}/{}/{}/{}.{:x}.{}".format(histpath, cat, rd, fn, int(mtime), fmt)
@@ -103,8 +125,6 @@ class ThumbSrv(object):
self.args = hub.args
self.log_func = hub.log
res = hub.args.th_size.split("x")
self.res = tuple([int(x) for x in res])
self.poke_cd = Cooldown(self.args.th_poke)
self.mutex = threading.Lock()
@@ -112,7 +132,7 @@ class ThumbSrv(object):
self.stopping = False
self.nthr = max(1, self.args.th_mt)
self.q: Queue[Optional[tuple[str, str]]] = Queue(self.nthr * 4)
self.q: Queue[Optional[tuple[str, str, str, VFS]]] = Queue(self.nthr * 4)
for n in range(self.nthr):
Daemon(self.worker, "thumb-{}-{}".format(n, self.nthr))
@@ -128,6 +148,8 @@ class ThumbSrv(object):
msg = "cannot create audio/video thumbnails because some of the required programs are not available: "
msg += ", ".join(missing)
self.log(msg, c=3)
if ANYWIN and self.args.no_acode:
self.log("download FFmpeg to fix it:\033[0m " + FFMPEG_URL, 3)
if self.args.th_clean:
Daemon(self.cleaner, "thumb.cln")
@@ -175,13 +197,17 @@ class ThumbSrv(object):
with self.mutex:
return not self.nthr
def getres(self, vn: VFS) -> tuple[int, int]:
w, h = vn.flags["thsize"].split("x")
return int(w), int(h)
def get(self, ptop: str, rem: str, mtime: float, fmt: str) -> Optional[str]:
histpath = self.asrv.vfs.histtab.get(ptop)
if not histpath:
self.log("no histpath for [{}]".format(ptop))
return None
tpath = thumb_path(histpath, rem, mtime, fmt)
tpath = thumb_path(histpath, rem, mtime, fmt, self.fmt_ffa)
abspath = os.path.join(ptop, rem)
cond = threading.Condition(self.mutex)
do_conv = False
@@ -191,19 +217,25 @@ class ThumbSrv(object):
self.log("wait {}".format(tpath))
except:
thdir = os.path.dirname(tpath)
bos.makedirs(thdir)
bos.makedirs(os.path.join(thdir, "w"))
inf_path = os.path.join(thdir, "dir.txt")
if not bos.path.exists(inf_path):
with open(inf_path, "wb") as f:
f.write(fsenc(os.path.dirname(abspath)))
f.write(afsenc(os.path.dirname(abspath)))
self.busy[tpath] = [cond]
do_conv = True
if do_conv:
self.q.put((abspath, tpath))
self.log("conv {} \033[0m{}".format(tpath, abspath), c=6)
allvols = list(self.asrv.vfs.all_vols.values())
vn = next((x for x in allvols if x.realpath == ptop), None)
if not vn:
self.log("ptop [{}] not in {}".format(ptop, allvols), 3)
vn = self.asrv.vfs.all_aps[0][1]
self.q.put((abspath, tpath, fmt, vn))
self.log("conv {} :{} \033[0m{}".format(tpath, fmt, abspath), c=6)
while not self.stopping:
with self.mutex:
@@ -239,50 +271,62 @@ class ThumbSrv(object):
if not task:
break
abspath, tpath = task
abspath, tpath, fmt, vn = task
ext = abspath.split(".")[-1].lower()
png_ok = False
fun = None
funs = []
if not bos.path.exists(tpath):
for lib in self.args.th_dec:
if fun:
break
elif lib == "pil" and ext in self.fmt_pil:
fun = self.conv_pil
if lib == "pil" and ext in self.fmt_pil:
funs.append(self.conv_pil)
elif lib == "vips" and ext in self.fmt_vips:
fun = self.conv_vips
funs.append(self.conv_vips)
elif lib == "ff" and ext in self.fmt_ffi or ext in self.fmt_ffv:
fun = self.conv_ffmpeg
funs.append(self.conv_ffmpeg)
elif lib == "ff" and ext in self.fmt_ffa:
if tpath.endswith(".opus") or tpath.endswith(".caf"):
fun = self.conv_opus
funs.append(self.conv_opus)
elif tpath.endswith(".png"):
fun = self.conv_waves
funs.append(self.conv_waves)
png_ok = True
else:
fun = self.conv_spec
funs.append(self.conv_spec)
if not png_ok and tpath.endswith(".png"):
raise Pebkac(400, "png only allowed for waveforms")
if fun:
tdir, tfn = os.path.split(tpath)
ttpath = os.path.join(tdir, "w", tfn)
try:
bos.unlink(ttpath)
except:
pass
for fun in funs:
try:
fun(abspath, tpath)
fun(abspath, ttpath, fmt, vn)
break
except Exception as ex:
msg = "{} could not create thumbnail of {}\n{}"
msg = msg.format(fun.__name__, abspath, min_ex())
c: Union[str, int] = 1 if "<Signals.SIG" in msg else "90"
self.log(msg, c)
if getattr(ex, "returncode", 0) != 321:
with open(tpath, "wb") as _:
pass
if fun == funs[-1]:
with open(ttpath, "wb") as _:
pass
else:
# ffmpeg may spawn empty files on windows
try:
os.unlink(tpath)
os.unlink(ttpath)
except:
pass
try:
bos.rename(ttpath, tpath)
except:
pass
with self.mutex:
subs = self.busy[tpath]
del self.busy[tpath]
@@ -294,9 +338,10 @@ class ThumbSrv(object):
with self.mutex:
self.nthr -= 1
def fancy_pillow(self, im: "Image.Image") -> "Image.Image":
def fancy_pillow(self, im: "Image.Image", fmt: str, vn: VFS) -> "Image.Image":
# exif_transpose is expensive (loads full image + unconditional copy)
r = max(*self.res) * 2
res = self.getres(vn)
r = max(*res) * 2
im.thumbnail((r, r), resample=Image.LANCZOS)
try:
k = next(k for k, v in ExifTags.TAGS.items() if v == "Orientation")
@@ -310,23 +355,23 @@ class ThumbSrv(object):
if rot in rots:
im = im.transpose(rots[rot])
if self.args.th_no_crop:
im.thumbnail(self.res, resample=Image.LANCZOS)
if fmt.endswith("f"):
im.thumbnail(res, resample=Image.LANCZOS)
else:
iw, ih = im.size
dw, dh = self.res
dw, dh = res
res = (min(iw, dw), min(ih, dh))
im = ImageOps.fit(im, res, method=Image.LANCZOS)
return im
def conv_pil(self, abspath: str, tpath: str) -> None:
def conv_pil(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
with Image.open(fsenc(abspath)) as im:
try:
im = self.fancy_pillow(im)
im = self.fancy_pillow(im, fmt, vn)
except Exception as ex:
self.log("fancy_pillow {}".format(ex), "90")
im.thumbnail(self.res)
im.thumbnail(self.getres(vn))
fmts = ["RGB", "L"]
args = {"quality": 40}
@@ -349,12 +394,12 @@ class ThumbSrv(object):
im.save(tpath, **args)
def conv_vips(self, abspath: str, tpath: str) -> None:
def conv_vips(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
crops = ["centre", "none"]
if self.args.th_no_crop:
if fmt.endswith("f"):
crops = ["none"]
w, h = self.res
w, h = self.getres(vn)
kw = {"height": h, "size": "down", "intent": "relative"}
for c in crops:
@@ -363,12 +408,13 @@ class ThumbSrv(object):
img = pyvips.Image.thumbnail(abspath, w, **kw)
break
except:
pass
if c == crops[-1]:
raise
img.write_to_file(tpath, Q=40)
def conv_ffmpeg(self, abspath: str, tpath: str) -> None:
ret, _ = ffprobe(abspath, int(self.args.th_convt / 2))
def conv_ffmpeg(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
ret, _ = ffprobe(abspath, int(vn.flags["convt"] / 2))
if not ret:
return
@@ -380,12 +426,13 @@ class ThumbSrv(object):
seek = [b"-ss", "{:.0f}".format(dur / 3).encode("utf-8")]
scale = "scale={0}:{1}:force_original_aspect_ratio="
if self.args.th_no_crop:
if fmt.endswith("f"):
scale += "decrease,setsar=1:1"
else:
scale += "increase,crop={0}:{1},setsar=1:1"
bscale = scale.format(*list(self.res)).encode("utf-8")
res = self.getres(vn)
bscale = scale.format(*list(res)).encode("utf-8")
# fmt: off
cmd = [
b"ffmpeg",
@@ -417,11 +464,11 @@ class ThumbSrv(object):
]
cmd += [fsenc(tpath)]
self._run_ff(cmd)
self._run_ff(cmd, vn)
def _run_ff(self, cmd: list[bytes]) -> None:
def _run_ff(self, cmd: list[bytes], vn: VFS) -> None:
# self.log((b" ".join(cmd)).decode("utf-8"))
ret, _, serr = runcmd(cmd, timeout=self.args.th_convt)
ret, _, serr = runcmd(cmd, timeout=vn.flags["convt"])
if not ret:
return
@@ -464,8 +511,8 @@ class ThumbSrv(object):
self.log(t + txt, c=c)
raise sp.CalledProcessError(ret, (cmd[0], b"...", cmd[-1]))
def conv_waves(self, abspath: str, tpath: str) -> None:
ret, _ = ffprobe(abspath, int(self.args.th_convt / 2))
def conv_waves(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
ret, _ = ffprobe(abspath, int(vn.flags["convt"] / 2))
if "ac" not in ret:
raise Exception("not audio")
@@ -490,10 +537,10 @@ class ThumbSrv(object):
# fmt: on
cmd += [fsenc(tpath)]
self._run_ff(cmd)
self._run_ff(cmd, vn)
def conv_spec(self, abspath: str, tpath: str) -> None:
ret, _ = ffprobe(abspath, int(self.args.th_convt / 2))
def conv_spec(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
ret, _ = ffprobe(abspath, int(vn.flags["convt"] / 2))
if "ac" not in ret:
raise Exception("not audio")
@@ -533,23 +580,34 @@ class ThumbSrv(object):
]
cmd += [fsenc(tpath)]
self._run_ff(cmd)
self._run_ff(cmd, vn)
def conv_opus(self, abspath: str, tpath: str) -> None:
def conv_opus(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
if self.args.no_acode:
raise Exception("disabled in server config")
ret, _ = ffprobe(abspath, int(self.args.th_convt / 2))
ret, _ = ffprobe(abspath, int(vn.flags["convt"] / 2))
if "ac" not in ret:
raise Exception("not audio")
try:
dur = ret[".dur"][1]
except:
dur = 0
src_opus = abspath.lower().endswith(".opus") or ret["ac"][1] == "opus"
want_caf = tpath.endswith(".caf")
tmp_opus = tpath
if want_caf:
tmp_opus = tpath.rsplit(".", 1)[0] + ".opus"
tmp_opus = tpath + ".opus"
try:
bos.unlink(tmp_opus)
except:
pass
if not want_caf or (not src_opus and not bos.path.isfile(tmp_opus)):
caf_src = abspath if src_opus else tmp_opus
if not want_caf or not src_opus:
# fmt: off
cmd = [
b"ffmpeg",
@@ -564,9 +622,34 @@ class ThumbSrv(object):
fsenc(tmp_opus)
]
# fmt: on
self._run_ff(cmd)
self._run_ff(cmd, vn)
if want_caf:
# iOS fails to play some "insufficiently complex" files
# (average file shorter than 8 seconds), so of course we
# fix that by mixing in some inaudible pink noise :^)
# 6.3 sec seems like the cutoff so lets do 7, and
# 7 sec of psyqui-musou.opus @ 3:50 is 174 KiB
if want_caf and (dur < 20 or bos.path.getsize(caf_src) < 256 * 1024):
# fmt: off
cmd = [
b"ffmpeg",
b"-nostdin",
b"-v", b"error",
b"-hide_banner",
b"-i", fsenc(abspath),
b"-filter_complex", b"anoisesrc=a=0.001:d=7:c=pink,asplit[l][r]; [l][r]amerge[s]; [0:a:0][s]amix",
b"-map_metadata", b"-1",
b"-ac", b"2",
b"-c:a", b"libopus",
b"-b:a", b"128k",
b"-f", b"caf",
fsenc(tpath)
]
# fmt: on
self._run_ff(cmd, vn)
elif want_caf:
# simple remux should be safe
# fmt: off
cmd = [
b"ffmpeg",
@@ -581,7 +664,13 @@ class ThumbSrv(object):
fsenc(tpath)
]
# fmt: on
self._run_ff(cmd)
self._run_ff(cmd, vn)
if tmp_opus != tpath:
try:
bos.unlink(tmp_opus)
except:
pass
def poke(self, tdir: str) -> None:
if not self.poke_cd.poke(tdir):

View File

@@ -34,14 +34,14 @@ if True: # pylint: disable=using-constant-test
from typing import Any, Optional, Union
if TYPE_CHECKING:
from .httpconn import HttpConn
from .httpsrv import HttpSrv
class U2idx(object):
def __init__(self, conn: "HttpConn") -> None:
self.log_func = conn.log_func
self.asrv = conn.asrv
self.args = conn.args
def __init__(self, hsrv: "HttpSrv") -> None:
self.log_func = hsrv.log
self.asrv = hsrv.asrv
self.args = hsrv.args
self.timeout = self.args.srch_time
if not HAVE_SQLITE3:
@@ -51,7 +51,7 @@ class U2idx(object):
self.active_id = ""
self.active_cur: Optional["sqlite3.Cursor"] = None
self.cur: dict[str, "sqlite3.Cursor"] = {}
self.mem_cur = sqlite3.connect(":memory:").cursor()
self.mem_cur = sqlite3.connect(":memory:", check_same_thread=False).cursor()
self.mem_cur.execute(r"create table a (b text)")
self.p_end = 0.0
@@ -69,7 +69,7 @@ class U2idx(object):
fsize = body["size"]
fhash = body["hash"]
wark = up2k_wark_from_hashlist(self.args.salt, fsize, fhash)
wark = up2k_wark_from_hashlist(self.args.warksalt, fsize, fhash)
uq = "substr(w,1,16) = ? and w = ?"
uv: list[Union[str, int]] = [wark[:16], wark]
@@ -101,7 +101,8 @@ class U2idx(object):
uri = ""
try:
uri = "{}?mode=ro&nolock=1".format(Path(db_path).as_uri())
cur = sqlite3.connect(uri, 2, uri=True).cursor()
db = sqlite3.connect(uri, 2, uri=True, check_same_thread=False)
cur = db.cursor()
cur.execute('pragma table_info("up")').fetchone()
self.log("ro: {}".format(db_path))
except:
@@ -112,7 +113,7 @@ class U2idx(object):
if not cur:
# on windows, this steals the write-lock from up2k.deferred_init --
# seen on win 10.0.17763.2686, py 3.10.4, sqlite 3.37.2
cur = sqlite3.connect(db_path, 2).cursor()
cur = sqlite3.connect(db_path, 2, check_same_thread=False).cursor()
self.log("opened {}".format(db_path))
self.cur[ptop] = cur
@@ -120,10 +121,10 @@ class U2idx(object):
def search(
self, vols: list[tuple[str, str, dict[str, Any]]], uq: str, lim: int
) -> tuple[list[dict[str, Any]], list[str]]:
) -> tuple[list[dict[str, Any]], list[str], bool]:
"""search by query params"""
if not HAVE_SQLITE3:
return [], []
return [], [], False
q = ""
v: Union[str, int] = ""
@@ -275,7 +276,7 @@ class U2idx(object):
have_up: bool,
have_mt: bool,
lim: int,
) -> tuple[list[dict[str, Any]], list[str]]:
) -> tuple[list[dict[str, Any]], list[str], bool]:
done_flag: list[bool] = []
self.active_id = "{:.6f}_{}".format(
time.time(), threading.current_thread().ident
@@ -293,6 +294,7 @@ class U2idx(object):
self.log("qs: {!r} {!r}".format(uq, uv))
ret = []
seen_rps: set[str] = set()
lim = min(lim, int(self.args.srch_hits))
taglist = {}
for (vtop, ptop, flags) in vols:
@@ -311,34 +313,45 @@ class U2idx(object):
sret = []
fk = flags.get("fk")
dots = flags.get("dotsrch")
fk_alg = 2 if "fka" in flags else 1
c = cur.execute(uq, tuple(vuv))
for hit in c:
w, ts, sz, rd, fn, ip, at = hit[:7]
lim -= 1
if lim < 0:
break
if rd.startswith("//") or fn.startswith("//"):
rd, fn = s3dec(rd, fn)
rp = quotep("/".join([x for x in [vtop, rd, fn] if x]))
if not dots and "/." in ("/" + rp):
continue
if rp in seen_rps:
continue
if not fk:
suf = ""
else:
try:
ap = absreal(os.path.join(ptop, rd, fn))
inf = bos.stat(ap)
ino = 0 if ANYWIN or fk_alg == 2 else bos.stat(ap).st_ino
except:
continue
suf = (
"?k="
+ gen_filekey(
self.args.fk_salt, ap, sz, 0 if ANYWIN else inf.st_ino
)[:fk]
)
suf = "?k=" + gen_filekey(
fk_alg,
self.args.fk_salt,
ap,
sz,
ino,
)[:fk]
rp = quotep("/".join([x for x in [vtop, rd, fn] if x])) + suf
sret.append({"ts": int(ts), "sz": sz, "rp": rp, "w": w[:16]})
lim -= 1
if lim < 0:
break
seen_rps.add(rp)
sret.append({"ts": int(ts), "sz": sz, "rp": rp + suf, "w": w[:16]})
for hit in sret:
w = hit["w"]
@@ -357,17 +370,9 @@ class U2idx(object):
done_flag.append(True)
self.active_id = ""
# undupe hits from multiple metadata keys
if len(ret) > 1:
ret = [ret[0]] + [
y
for x, y in zip(ret[:-1], ret[1:])
if x["rp"].split("?")[0] != y["rp"].split("?")[0]
]
ret.sort(key=itemgetter("rp"))
return ret, list(taglist.keys())
return ret, list(taglist.keys()), lim < 0
def terminator(self, identifier: str, done_flag: list[bool]) -> None:
for _ in range(self.timeout):

File diff suppressed because it is too large Load Diff

View File

@@ -6,6 +6,7 @@ import contextlib
import errno
import hashlib
import hmac
import json
import logging
import math
import mimetypes
@@ -13,6 +14,7 @@ import os
import platform
import re
import select
import shutil
import signal
import socket
import stat
@@ -29,7 +31,7 @@ from email.utils import formatdate
from ipaddress import IPv4Address, IPv4Network, IPv6Address, IPv6Network
from queue import Queue
from .__init__ import ANYWIN, MACOS, PY2, TYPE_CHECKING, VT100, WINDOWS
from .__init__ import ANYWIN, EXE, MACOS, PY2, TYPE_CHECKING, VT100, WINDOWS
from .__version__ import S_BUILD_DT, S_VERSION
from .stolen import surrogateescape
@@ -54,6 +56,8 @@ E_ADDR_IN_USE = _ens("EADDRINUSE WSAEADDRINUSE")
E_ACCESS = _ens("EACCES WSAEACCES")
E_UNREACH = _ens("EHOSTUNREACH WSAEHOSTUNREACH ENETUNREACH WSAENETUNREACH")
IP6ALL = "0:0:0:0:0:0:0:0"
try:
import ctypes
@@ -64,7 +68,9 @@ except:
try:
HAVE_SQLITE3 = True
import sqlite3 # pylint: disable=unused-import # typechk
import sqlite3
assert hasattr(sqlite3, "connect") # graalpy
except:
HAVE_SQLITE3 = False
@@ -142,6 +148,8 @@ SYMTIME = sys.version_info > (3, 6) and os.utime in os.supports_follow_symlinks
META_NOBOTS = '<meta name="robots" content="noindex, nofollow">'
FFMPEG_URL = "https://www.gyan.dev/ffmpeg/builds/ffmpeg-git-full.7z"
HTTPCODE = {
200: "OK",
201: "Created",
@@ -167,6 +175,7 @@ HTTPCODE = {
500: "Internal Server Error",
501: "Not Implemented",
503: "Service Unavailable",
999: "MissingNo",
}
@@ -228,6 +237,7 @@ application msi=x-ms-installer cab=vnd.ms-cab-compressed rpm=x-rpm crx=x-chrome-
application epub=epub+zip mobi=x-mobipocket-ebook lit=x-ms-reader rss=rss+xml atom=atom+xml torrent=x-bittorrent
application p7s=pkcs7-signature dcm=dicom shx=vnd.shx shp=vnd.shp dbf=x-dbf gml=gml+xml gpx=gpx+xml amf=x-amf
application swf=x-shockwave-flash m3u=vnd.apple.mpegurl db3=vnd.sqlite3 sqlite=vnd.sqlite3
text ass=plain ssa=plain
image jpg=jpeg xpm=x-xpixmap psd=vnd.adobe.photoshop jpf=jpx tif=tiff ico=x-icon djvu=vnd.djvu
image heic=heic-sequence heif=heif-sequence hdr=vnd.radiance svg=svg+xml
audio caf=x-caf mp3=mpeg m4a=mp4 mid=midi mpc=musepack aif=aiff au=basic qcp=qcelp
@@ -288,6 +298,27 @@ REKOBO_KEY = {
REKOBO_LKEY = {k.lower(): v for k, v in REKOBO_KEY.items()}
_exestr = "python3 python ffmpeg ffprobe cfssl cfssljson cfssl-certinfo"
CMD_EXEB = set(_exestr.encode("utf-8").split())
CMD_EXES = set(_exestr.split())
pybin = sys.executable or ""
if EXE:
pybin = ""
for zsg in "python3 python".split():
try:
if ANYWIN:
zsg += ".exe"
zsg = shutil.which(zsg)
if zsg:
pybin = zsg
break
except:
pass
def py_desc() -> str:
interp = platform.python_implementation()
py_ver = ".".join([str(x) for x in sys.version_info])
@@ -361,8 +392,11 @@ class Daemon(threading.Thread):
name: Optional[str] = None,
a: Optional[Iterable[Any]] = None,
r: bool = True,
ka: Optional[dict[Any, Any]] = None,
) -> None:
threading.Thread.__init__(self, target=target, name=name, args=a or ())
threading.Thread.__init__(
self, target=target, name=name, args=a or (), kwargs=ka
)
self.daemon = True
if r:
self.start()
@@ -378,6 +412,9 @@ class Netdev(object):
def __str__(self):
return "{}-{}{}".format(self.idx, self.name, self.desc)
def __repr__(self):
return "'{}-{}'".format(self.idx, self.name)
def __lt__(self, rhs):
return str(self) < str(rhs)
@@ -437,9 +474,7 @@ class HLog(logging.Handler):
else:
c = 1
if record.name.startswith("PIL") and lv < logging.WARNING:
return
elif record.name == "pyftpdlib":
if record.name == "pyftpdlib":
m = self.ptn_ftp.match(msg)
if m:
ip = m.group(1)
@@ -469,7 +504,7 @@ class NetMap(object):
)
ips = [x for x in ips if x not in ("::1", "127.0.0.1")]
ips = [[x for x in netdevs if x.startswith(y + "/")][0] for y in ips]
ips = find_prefix(ips, netdevs)
self.cache: dict[str, str] = {}
self.b2sip: dict[bytes, str] = {}
@@ -515,7 +550,7 @@ class _Unrecv(object):
self.log = log
self.buf: bytes = b""
def recv(self, nbytes: int) -> bytes:
def recv(self, nbytes: int, spins: int = 1) -> bytes:
if self.buf:
ret = self.buf[:nbytes]
self.buf = self.buf[nbytes:]
@@ -526,6 +561,10 @@ class _Unrecv(object):
ret = self.s.recv(nbytes)
break
except socket.timeout:
spins -= 1
if spins <= 0:
ret = b""
break
continue
except:
ret = b""
@@ -568,7 +607,7 @@ class _LUnrecv(object):
self.log = log
self.buf = b""
def recv(self, nbytes: int) -> bytes:
def recv(self, nbytes: int, spins: int) -> bytes:
if self.buf:
ret = self.buf[:nbytes]
self.buf = self.buf[nbytes:]
@@ -587,7 +626,7 @@ class _LUnrecv(object):
def recv_ex(self, nbytes: int, raise_on_trunc: bool = True) -> bytes:
"""read an exact number of bytes"""
try:
ret = self.recv(nbytes)
ret = self.recv(nbytes, 1)
err = False
except:
ret = b""
@@ -595,7 +634,7 @@ class _LUnrecv(object):
while not err and len(ret) < nbytes:
try:
ret += self.recv(nbytes - len(ret))
ret += self.recv(nbytes - len(ret), 1)
except OSError:
err = True
@@ -646,6 +685,7 @@ class FHC(object):
def __init__(self) -> None:
self.cache: dict[str, FHC.CE] = {}
self.aps: set[str] = set()
def close(self, path: str) -> None:
try:
@@ -657,6 +697,7 @@ class FHC(object):
fh.close()
del self.cache[path]
self.aps.remove(path)
def clean(self) -> None:
if not self.cache:
@@ -677,6 +718,7 @@ class FHC(object):
return self.cache[path].fhs.pop()
def put(self, path: str, fh: typing.BinaryIO) -> None:
self.aps.add(path)
try:
ce = self.cache[path]
ce.fhs.append(fh)
@@ -896,7 +938,8 @@ class Magician(object):
class Garda(object):
"""ban clients for repeated offenses"""
def __init__(self, cfg: str) -> None:
def __init__(self, cfg: str, uniq: bool = True) -> None:
self.uniq = uniq
try:
a, b, c = cfg.strip().split(",")
self.lim = int(a)
@@ -942,7 +985,7 @@ class Garda(object):
# assume /64 clients; drop 4 groups
ip = IPv6Address(ip).exploded[:-20]
if prev:
if prev and self.uniq:
if self.prev.get(ip) == prev:
return 0, ip
@@ -1147,20 +1190,12 @@ def ren_open(
fun = kwargs.pop("fun", open)
fdir = kwargs.pop("fdir", None)
suffix = kwargs.pop("suffix", None)
overwrite = kwargs.pop("overwrite", None)
if fname == os.devnull:
with fun(fname, *args, **kwargs) as f:
yield {"orz": (f, fname)}
return
if overwrite:
assert fdir
fpath = os.path.join(fdir, fname)
with fun(fsenc(fpath), *args, **kwargs) as f:
yield {"orz": (f, fname)}
return
if suffix:
ext = fname.split(".")[-1]
if len(ext) < 7:
@@ -1187,7 +1222,7 @@ def ren_open(
else:
fpath = fname
if suffix and os.path.exists(fsenc(fpath)):
if suffix and os.path.lexists(fsenc(fpath)):
fpath += suffix
fname += suffix
ext += suffix
@@ -1206,12 +1241,15 @@ def ren_open(
except OSError as ex_:
ex = ex_
if ex.errno == errno.EINVAL and not asciified:
# EPERM: android13
if ex.errno in (errno.EINVAL, errno.EPERM) and not asciified:
asciified = True
bname, fname = [
zs.encode("ascii", "replace").decode("ascii").replace("?", "_")
for zs in [bname, fname]
]
zsl = []
for zs in (bname, fname):
zs = zs.encode("ascii", "replace").decode("ascii")
zs = re.sub(r"[^][a-zA-Z0-9(){}.,+=!-]", "_", zs)
zsl.append(zs)
bname, fname = zsl
continue
# ENOTSUP: zfs on ubuntu 20.04
@@ -1275,7 +1313,7 @@ class MultipartParser(object):
rfc1341/rfc1521/rfc2047/rfc2231/rfc2388/rfc6266/the-real-world
(only the fallback non-js uploader relies on these filenames)
"""
for ln in read_header(self.sr):
for ln in read_header(self.sr, 2, 2592000):
self.log(ln)
m = self.re_ctype.match(ln)
@@ -1422,7 +1460,7 @@ class MultipartParser(object):
for buf in iterable:
ret += buf
if len(ret) > max_len:
raise Pebkac(400, "field length is too long")
raise Pebkac(422, "field length is too long")
return ret
@@ -1475,15 +1513,15 @@ def get_boundary(headers: dict[str, str]) -> str:
return m.group(2)
def read_header(sr: Unrecv) -> list[str]:
def read_header(sr: Unrecv, t_idle: int, t_tot: int) -> list[str]:
t0 = time.time()
ret = b""
while True:
if time.time() - t0 > 120:
if time.time() - t0 >= t_tot:
return []
try:
ret += sr.recv(1024)
ret += sr.recv(1024, t_idle // 2)
except:
if not ret:
return []
@@ -1507,15 +1545,40 @@ def read_header(sr: Unrecv) -> list[str]:
return ret[:ofs].decode("utf-8", "surrogateescape").lstrip("\r\n").split("\r\n")
def gen_filekey(salt: str, fspath: str, fsize: int, inode: int) -> str:
return base64.urlsafe_b64encode(
hashlib.sha512(
"{} {} {} {}".format(salt, fspath, fsize, inode).encode("utf-8", "replace")
).digest()
).decode("ascii")
def rand_name(fdir: str, fn: str, rnd: int) -> str:
ok = False
try:
ext = "." + fn.rsplit(".", 1)[1]
except:
ext = ""
for extra in range(16):
for _ in range(16):
if ok:
break
nc = rnd + extra
nb = int((6 + 6 * nc) / 8)
zb = os.urandom(nb)
zb = base64.urlsafe_b64encode(zb)
fn = zb[:nc].decode("utf-8") + ext
ok = not os.path.exists(fsenc(os.path.join(fdir, fn)))
return fn
def gen_filekey(alg: int, salt: str, fspath: str, fsize: int, inode: int) -> str:
if alg == 1:
zs = "%s %s %s %s" % (salt, fspath, fsize, inode)
else:
zs = "%s %s" % (salt, fspath)
zb = zs.encode("utf-8", "replace")
return base64.urlsafe_b64encode(hashlib.sha512(zb).digest()).decode("ascii")
def gen_filekey_dbg(
alg: int,
salt: str,
fspath: str,
fsize: int,
@@ -1523,7 +1586,7 @@ def gen_filekey_dbg(
log: "NamedLogger",
log_ptn: Optional[Pattern[str]],
) -> str:
ret = gen_filekey(salt, fspath, fsize, inode)
ret = gen_filekey(alg, salt, fspath, fsize, inode)
assert log_ptn
if log_ptn.search(fspath):
@@ -1549,14 +1612,16 @@ def gen_filekey_dbg(
return ret
def gencookie(k: str, v: str, dur: Optional[int]) -> str:
v = v.replace(";", "")
def gencookie(k: str, v: str, r: str, tls: bool, dur: Optional[int]) -> str:
v = v.replace("%", "%25").replace(";", "%3B")
if dur:
exp = formatdate(time.time() + dur, usegmt=True)
else:
exp = "Fri, 15 Aug 1997 01:00:00 GMT"
return "{}={}; Path=/; Expires={}; SameSite=Lax".format(k, v, exp)
return "{}={}; Path=/{}; Expires={}{}; SameSite=Lax".format(
k, v, r, exp, "; Secure" if tls else ""
)
def humansize(sz: float, terse: bool = False) -> str:
@@ -1581,7 +1646,12 @@ def unhumanize(sz: str) -> int:
pass
mc = sz[-1:].lower()
mi = {"k": 1024, "m": 1024 * 1024, "g": 1024 * 1024 * 1024}.get(mc, 1)
mi = {
"k": 1024,
"m": 1024 * 1024,
"g": 1024 * 1024 * 1024,
"t": 1024 * 1024 * 1024 * 1024,
}.get(mc, 1)
return int(float(sz[:-1]) * mi)
@@ -1617,7 +1687,7 @@ def uncyg(path: str) -> str:
if len(path) > 2 and path[2] != "/":
return path
return "{}:\\{}".format(path[1], path[3:])
return "%s:\\%s" % (path[1], path[3:])
def undot(path: str) -> str:
@@ -1660,7 +1730,7 @@ def sanitize_fn(fn: str, ok: str, bad: list[str]) -> str:
bad = ["con", "prn", "aux", "nul"]
for n in range(1, 10):
bad += "com{0} lpt{0}".format(n).split(" ")
bad += ("com%s lpt%s" % (n, n)).split(" ")
if fn.lower().split(".")[0] in bad:
fn = "_" + fn
@@ -1682,7 +1752,7 @@ def relchk(rp: str) -> str:
def absreal(fpath: str) -> str:
try:
return fsdec(os.path.abspath(os.path.realpath(fsenc(fpath))))
return fsdec(os.path.abspath(os.path.realpath(afsenc(fpath))))
except:
if not WINDOWS:
raise
@@ -1712,6 +1782,15 @@ def ipnorm(ip: str) -> str:
return ip
def find_prefix(ips: list[str], netdevs: dict[str, Netdev]) -> list[str]:
ret = []
for ip in ips:
hit = next((x for x in netdevs if x.startswith(ip + "/")), None)
if hit:
ret.append(hit)
return ret
def html_escape(s: str, quot: bool = False, crlf: bool = False) -> str:
"""html.escape but also newlines"""
s = s.replace("&", "&amp;").replace("<", "&lt;").replace(">", "&gt;")
@@ -1793,6 +1872,32 @@ def _w8enc3(txt: str) -> bytes:
return txt.encode(FS_ENCODING, "surrogateescape")
def _msdec(txt: bytes) -> str:
ret = txt.decode(FS_ENCODING, "surrogateescape")
return ret[4:] if ret.startswith("\\\\?\\") else ret
def _msaenc(txt: str) -> bytes:
return txt.replace("/", "\\").encode(FS_ENCODING, "surrogateescape")
def _uncify(txt: str) -> str:
txt = txt.replace("/", "\\")
if ":" not in txt and not txt.startswith("\\\\"):
txt = absreal(txt)
return txt if txt.startswith("\\\\") else "\\\\?\\" + txt
def _msenc(txt: str) -> bytes:
txt = txt.replace("/", "\\")
if ":" not in txt and not txt.startswith("\\\\"):
txt = absreal(txt)
ret = txt.encode(FS_ENCODING, "surrogateescape")
return ret if ret.startswith(b"\\\\") else b"\\\\?\\" + ret
w8dec = _w8dec3 if not PY2 else _w8dec2
w8enc = _w8enc3 if not PY2 else _w8enc2
@@ -1807,9 +1912,16 @@ def w8b64enc(txt: str) -> str:
return base64.urlsafe_b64encode(w8enc(txt)).decode("ascii")
if not PY2 or not WINDOWS:
fsenc = w8enc
if not PY2 and WINDOWS:
sfsenc = w8enc
afsenc = _msaenc
fsenc = _msenc
fsdec = _msdec
uncify = _uncify
elif not PY2 or not WINDOWS:
fsenc = afsenc = sfsenc = w8enc
fsdec = w8dec
uncify = str
else:
# moonrunes become \x3f with bytestrings,
# losing mojibake support is worth
@@ -1819,8 +1931,9 @@ else:
def _not_actually_mbcs_dec(txt: bytes) -> str:
return txt
fsenc = _not_actually_mbcs_enc
fsenc = afsenc = sfsenc = _not_actually_mbcs_enc
fsdec = _not_actually_mbcs_dec
uncify = str
def s3enc(mem_cur: "sqlite3.Cursor", rd: str, fn: str) -> tuple[str, str]:
@@ -1931,6 +2044,8 @@ def shut_socket(log: "NamedLogger", sck: socket.socket, timeout: int = 3) -> Non
sck.shutdown(socket.SHUT_RDWR)
except:
pass
except Exception as ex:
log("shut({}): {}".format(fd, ex), "90")
finally:
td = time.time() - t0
if td >= 1:
@@ -2008,10 +2123,24 @@ def read_socket_chunked(
raise Pebkac(400, t.format(x))
def list_ips() -> list[str]:
from .stolen.ifaddr import get_adapters
ret: set[str] = set()
for nic in get_adapters():
for ipo in nic.ips:
if len(ipo.ip) < 7:
ret.add(ipo.ip[0]) # ipv6 is (ip,0,0)
else:
ret.add(ipo.ip)
return list(ret)
def yieldfile(fn: str) -> Generator[bytes, None, None]:
with open(fsenc(fn), "rb", 512 * 1024) as f:
while True:
buf = f.read(64 * 1024)
buf = f.read(128 * 1024)
if not buf:
break
@@ -2168,7 +2297,7 @@ def rmdirs(
dirs = [os.path.join(top, x) for x in dirs]
ok = []
ng = []
for d in dirs[::-1]:
for d in reversed(dirs):
a, b = rmdirs(logger, scandir, lstat, d, depth + 1)
ok += a
ng += b
@@ -2183,18 +2312,21 @@ def rmdirs(
return ok, ng
def rmdirs_up(top: str) -> tuple[list[str], list[str]]:
def rmdirs_up(top: str, stop: str) -> tuple[list[str], list[str]]:
"""rmdir on self, then all parents"""
if top == stop:
return [], [top]
try:
os.rmdir(fsenc(top))
except:
return [], [top]
par = os.path.dirname(top)
if not par:
if not par or par == stop:
return [top], []
ok, ng = rmdirs_up(par)
ok, ng = rmdirs_up(par, stop)
return [top] + ok, ng
@@ -2215,7 +2347,7 @@ def unescape_cookie(orig: str) -> str:
ret += chr(int(esc[1:], 16))
except:
ret += esc
esc = ""
esc = ""
else:
ret += ch
@@ -2315,7 +2447,7 @@ def killtree(root: int) -> None:
def runcmd(
argv: Union[list[bytes], list[str]], timeout: Optional[int] = None, **ka: Any
argv: Union[list[bytes], list[str]], timeout: Optional[float] = None, **ka: Any
) -> tuple[int, str, str]:
kill = ka.pop("kill", "t") # [t]ree [m]ain [n]one
capture = ka.pop("capture", 3) # 0=none 1=stdout 2=stderr 3=both
@@ -2329,6 +2461,14 @@ def runcmd(
bout: bytes
berr: bytes
if ANYWIN:
if isinstance(argv[0], (bytes, bytearray)):
if argv[0] in CMD_EXEB:
argv[0] += b".exe"
else:
if argv[0] in CMD_EXES:
argv[0] += ".exe"
p = sp.Popen(argv, stdout=cout, stderr=cerr, **ka)
if not timeout or PY2:
bout, berr = p.communicate(sin)
@@ -2368,7 +2508,7 @@ def chkcmd(argv: Union[list[bytes], list[str]], **ka: Any) -> tuple[str, str]:
return sout, serr
def mchkcmd(argv: Union[list[bytes], list[str]], timeout: int = 10) -> None:
def mchkcmd(argv: Union[list[bytes], list[str]], timeout: float = 10) -> None:
if PY2:
with open(os.devnull, "wb") as f:
rv = sp.call(argv, stdout=f, stderr=f)
@@ -2428,6 +2568,221 @@ def retchk(
raise Exception(t)
def _parsehook(
log: Optional["NamedLogger"], cmd: str
) -> tuple[bool, bool, bool, float, dict[str, Any], str]:
chk = False
fork = False
jtxt = False
wait = 0.0
tout = 0.0
kill = "t"
cap = 0
ocmd = cmd
while "," in cmd[:6]:
arg, cmd = cmd.split(",", 1)
if arg == "c":
chk = True
elif arg == "f":
fork = True
elif arg == "j":
jtxt = True
elif arg.startswith("w"):
wait = float(arg[1:])
elif arg.startswith("t"):
tout = float(arg[1:])
elif arg.startswith("c"):
cap = int(arg[1:]) # 0=none 1=stdout 2=stderr 3=both
elif arg.startswith("k"):
kill = arg[1:] # [t]ree [m]ain [n]one
elif arg.startswith("i"):
pass
else:
t = "hook: invalid flag {} in {}"
(log or print)(t.format(arg, ocmd))
env = os.environ.copy()
try:
if EXE:
raise Exception()
pypath = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
zsl = [str(pypath)] + [str(x) for x in sys.path if x]
pypath = str(os.pathsep.join(zsl))
env["PYTHONPATH"] = pypath
except:
if not EXE:
raise
sp_ka = {
"env": env,
"timeout": tout,
"kill": kill,
"capture": cap,
}
if cmd.startswith("~"):
cmd = os.path.expanduser(cmd)
return chk, fork, jtxt, wait, sp_ka, cmd
def runihook(
log: Optional["NamedLogger"],
cmd: str,
vol: "VFS",
ups: list[tuple[str, int, int, str, str, str, int]],
) -> bool:
ocmd = cmd
chk, fork, jtxt, wait, sp_ka, cmd = _parsehook(log, cmd)
bcmd = [sfsenc(cmd)]
if cmd.endswith(".py"):
bcmd = [sfsenc(pybin)] + bcmd
vps = [vjoin(*list(s3dec(x[3], x[4]))) for x in ups]
aps = [djoin(vol.realpath, x) for x in vps]
if jtxt:
# 0w 1mt 2sz 3rd 4fn 5ip 6at
ja = [
{
"ap": uncify(ap), # utf8 for json
"vp": vp,
"wark": x[0][:16],
"mt": x[1],
"sz": x[2],
"ip": x[5],
"at": x[6],
}
for x, vp, ap in zip(ups, vps, aps)
]
sp_ka["sin"] = json.dumps(ja).encode("utf-8", "replace")
else:
sp_ka["sin"] = b"\n".join(fsenc(x) for x in aps)
t0 = time.time()
if fork:
Daemon(runcmd, ocmd, [bcmd], ka=sp_ka)
else:
rc, v, err = runcmd(bcmd, **sp_ka) # type: ignore
if chk and rc:
retchk(rc, bcmd, err, log, 5)
return False
wait -= time.time() - t0
if wait > 0:
time.sleep(wait)
return True
def _runhook(
log: Optional["NamedLogger"],
cmd: str,
ap: str,
vp: str,
host: str,
uname: str,
mt: float,
sz: int,
ip: str,
at: float,
txt: str,
) -> bool:
ocmd = cmd
chk, fork, jtxt, wait, sp_ka, cmd = _parsehook(log, cmd)
if jtxt:
ja = {
"ap": ap,
"vp": vp,
"mt": mt,
"sz": sz,
"ip": ip,
"at": at or time.time(),
"host": host,
"user": uname,
"txt": txt,
}
arg = json.dumps(ja)
else:
arg = txt or ap
acmd = [cmd, arg]
if cmd.endswith(".py"):
acmd = [pybin] + acmd
bcmd = [fsenc(x) if x == ap else sfsenc(x) for x in acmd]
t0 = time.time()
if fork:
Daemon(runcmd, ocmd, [bcmd], ka=sp_ka)
else:
rc, v, err = runcmd(bcmd, **sp_ka) # type: ignore
if chk and rc:
retchk(rc, bcmd, err, log, 5)
return False
wait -= time.time() - t0
if wait > 0:
time.sleep(wait)
return True
def runhook(
log: Optional["NamedLogger"],
cmds: list[str],
ap: str,
vp: str,
host: str,
uname: str,
mt: float,
sz: int,
ip: str,
at: float,
txt: str,
) -> bool:
vp = vp.replace("\\", "/")
for cmd in cmds:
try:
if not _runhook(log, cmd, ap, vp, host, uname, mt, sz, ip, at, txt):
return False
except Exception as ex:
(log or print)("hook: {}".format(ex))
if ",c," in "," + cmd:
return False
break
return True
def loadpy(ap: str, hot: bool) -> Any:
"""
a nice can of worms capable of causing all sorts of bugs
depending on what other inconveniently named files happen
to be in the same folder
"""
if ap.startswith("~"):
ap = os.path.expanduser(ap)
mdir, mfile = os.path.split(absreal(ap))
mname = mfile.rsplit(".", 1)[0]
sys.path.insert(0, mdir)
if PY2:
mod = __import__(mname)
if hot:
reload(mod)
else:
import importlib
mod = importlib.import_module(mname)
if hot:
importlib.reload(mod)
sys.path.remove(mdir)
return mod
def gzip_orig_sz(fn: str) -> int:
with open(fsenc(fn), "rb") as f:
f.seek(-4, 2)

View File

@@ -6,7 +6,7 @@ pk: $(addsuffix .gz, $(wildcard *.js *.css))
un: $(addsuffix .un, $(wildcard *.gz))
%.gz: %
pigz -11 -J 34 -I 5730 $<
pigz -11 -J 34 -I 573 $<
%.un: %
pigz -d $<

1
copyparty/web/a/u2c.py Symbolic link
View File

@@ -0,0 +1 @@
../../../bin/u2c.py

View File

@@ -1 +0,0 @@
../../../bin/up2k.py

View File

@@ -27,8 +27,8 @@ window.baguetteBox = (function () {
isOverlayVisible = false,
touch = {}, // start-pos
touchFlag = false, // busy
re_i = /.+\.(gif|jpe?g|png|webp)(\?|$)/i,
re_v = /.+\.(webm|mkv|mp4)(\?|$)/i,
re_i = /^[^?]+\.(a?png|avif|bmp|gif|heif|jpe?g|jfif|svg|webp)(\?|$)/i,
re_v = /^[^?]+\.(webm|mkv|mp4)(\?|$)/i,
anims = ['slideIn', 'fadeIn', 'none'],
data = {}, // all galleries
imagesElements = [],
@@ -127,7 +127,7 @@ window.baguetteBox = (function () {
var gallery = [];
[].forEach.call(tagsNodeList, function (imageElement, imageIndex) {
var imageElementClickHandler = function (e) {
if (ctrl(e))
if (ctrl(e) || e && e.shiftKey)
return true;
e.preventDefault ? e.preventDefault() : e.returnValue = false;
@@ -261,7 +261,7 @@ window.baguetteBox = (function () {
setloop(1);
else if (k == "BracketRight")
setloop(2);
else if (e.shiftKey)
else if (e.shiftKey && k != 'KeyR')
return;
else if (k == "ArrowLeft" || k == "KeyJ")
showPreviousImage();
@@ -277,8 +277,8 @@ window.baguetteBox = (function () {
playpause();
else if (k == "KeyU" || k == "KeyO")
relseek(k == "KeyU" ? -10 : 10);
else if (k.indexOf('Digit') === 0)
vid().currentTime = vid().duration * parseInt(k.slice(-1)) * 0.1;
else if (k.indexOf('Digit') === 0 && v)
v.currentTime = v.duration * parseInt(k.slice(-1)) * 0.1;
else if (k == "KeyM" && v) {
v.muted = vmute = !vmute;
mp_ctl();
@@ -310,7 +310,7 @@ window.baguetteBox = (function () {
options = {};
setOptions(o);
if (tt.en)
tt.show.bind(this)();
tt.show.call(this);
}
function setVmode() {
@@ -356,7 +356,7 @@ window.baguetteBox = (function () {
setVmode();
if (tt.en)
tt.show.bind(this)();
tt.show.call(this);
}
function findfile() {
@@ -376,7 +376,12 @@ window.baguetteBox = (function () {
else
(vid() || ebi('bbox-overlay')).requestFullscreen();
}
catch (ex) { alert(ex); }
catch (ex) {
if (IPHONE)
alert('sorry, apple decided to make this impossible on iphones (should work on ipad tho)');
else
alert(ex);
}
}
function tglsel() {
@@ -519,7 +524,7 @@ window.baguetteBox = (function () {
options[item] = newOptions[item];
}
var an = options.animation = sread('ganim') || anims[ANIM ? 0 : 2];
var an = options.animation = sread('ganim', anims) || anims[ANIM ? 0 : 2];
btnAnim.textContent = ['⇄', '⮺', '⚡'][anims.indexOf(an)];
btnAnim.setAttribute('tt', 'animation: ' + an);
@@ -580,6 +585,7 @@ window.baguetteBox = (function () {
function hideOverlay(e) {
ev(e);
playvid(false);
removeFromCache('#files');
if (options.noScrollbars) {
document.documentElement.style.overflowY = 'auto';
document.body.style.overflowY = 'auto';
@@ -812,10 +818,16 @@ window.baguetteBox = (function () {
}
function vid() {
if (currentIndex >= imagesElements.length)
return;
return imagesElements[currentIndex].querySelector('video');
}
function vidimg() {
if (currentIndex >= imagesElements.length)
return;
return imagesElements[currentIndex].querySelector('img, video');
}
@@ -863,7 +875,7 @@ window.baguetteBox = (function () {
if (loopB !== null) {
timer.add(loopchk);
sethash(window.location.hash.slice(1).split('&')[0] + '&t=' + (loopA || 0) + '-' + loopB);
sethash(location.hash.slice(1).split('&')[0] + '&t=' + (loopA || 0) + '-' + loopB);
}
}
@@ -961,7 +973,7 @@ window.baguetteBox = (function () {
clmod(btnPrev, 'off', 't');
clmod(btnNext, 'off', 't');
if (Date.now() - ctime <= 500)
if (Date.now() - ctime <= 500 && !IPHONE)
tglfull();
ctime = Date.now();

View File

@@ -55,6 +55,7 @@
--u2-sbtn-b1: #999;
--u2-txt-bg: var(--bg-u5);
--u2-tab-bg: linear-gradient(to bottom, var(--bg), var(--bg-u1));
--u2-tab-b1: rgba(128,128,128,0.8);
--u2-tab-1-fg: #fd7;
--u2-tab-1-bg: linear-gradient(to bottom, var(#353), var(--bg) 80%);
--u2-tab-1-b1: #7c5;
@@ -93,6 +94,7 @@
--g-fsel-bg: #d39;
--g-fsel-b1: #f4a;
--g-fsel-ts: #804;
--g-dfg: var(--srv-3);
--g-fg: var(--a-hil);
--g-bg: var(--bg-u2);
--g-b1: var(--bg-u4);
@@ -269,6 +271,7 @@ html.bz {
--btn-1h-fg: #000;
--txt-sh: a;
--u2-tab-b1: var(--bg-u5);
--u2-tab-1-fg: var(--fg-max);
--u2-tab-1-bg: var(--bg);
@@ -327,6 +330,8 @@ html.c {
}
html.cz {
--bgg: var(--bg-u2);
--srv-3: #fff;
--u2-tab-b1: var(--bg-d3);
}
html.cy {
--fg: #fff;
@@ -354,6 +359,7 @@ html.cy {
--chk-fg: #fd0;
--srv-1: #f00;
--srv-3: #fff;
--op-aa-bg: #fff;
--u2-b1-bg: #f00;
@@ -408,10 +414,11 @@ html.dz {
--op-aa-bg: var(--bg-d2);
--op-a-sh: rgba(0,0,0,0.5);
--u2-btn-b1: #999;
--u2-sbtn-b1: #999;
--u2-btn-b1: var(--fg-weak);
--u2-sbtn-b1: var(--fg-weak);
--u2-txt-bg: var(--bg-u5);
--u2-tab-bg: linear-gradient(to bottom, var(--bg), var(--bg-u1));
--u2-tab-b1: var(--fg-weak);
--u2-tab-1-fg: #fff;
--u2-tab-1-bg: linear-gradient(to bottom, var(#353), var(--bg) 80%);
--u2-tab-1-b1: #7c5;
@@ -420,6 +427,12 @@ html.dz {
--u2-b-fg: #fff;
--u2-b1-bg: #3a3;
--u2-b2-bg: #3a3;
--u2-o-bg: var(--btn-bg);
--u2-o-b1: var(--bg-u5);
--u2-o-h-bg: var(--fg-weak);
--u2-o-1-bg: var(--fg-weak);
--u2-o-1-b1: var(--a);
--u2-o-1h-bg: var(--a);
--u2-inf-bg: #07a;
--u2-inf-b1: #0be;
--u2-ok-bg: #380;
@@ -480,6 +493,7 @@ html.dz {
--err-ts: #500;
text-shadow: none;
font-family: 'scp', monospace, monospace;
}
html.dy {
--fg: #000;
@@ -572,6 +586,11 @@ html.dy {
* {
line-height: 1.2em;
}
::selection {
color: var(--bg-d1);
background: var(--fg);
text-shadow: none;
}
html,body,tr,th,td,#files,a {
color: inherit;
background: none;
@@ -709,18 +728,22 @@ a:hover {
html.y #files thead th {
box-shadow: 0 1px 0 rgba(0,0,0,0.12);
}
html #files.hhpick thead th {
color: #f7d;
background: #000;
box-shadow: .1em .2em 0 #f6c inset, -.1em -.1em 0 #f6c inset;
}
#files td {
margin: 0;
padding: .3em .5em;
background: var(--bg);
max-width: var(--file-td-w);
word-wrap: break-word;
overflow: hidden;
}
#files tr:nth-child(2n) td {
background: var(--row-alt);
}
#files td+td+td {
max-width: 30em;
overflow: hidden;
}
#files td+td {
box-shadow: 1px 0 0 0 rgba(128,128,128,var(--f-sh1)) inset, 0 1px 0 rgba(255,255,255,var(--f-sh2)) inset, 0 -1px 0 rgba(255,255,255,var(--f-sh2)) inset;
}
@@ -754,8 +777,9 @@ html.y #files thead th {
display: inline;
}
#path a {
margin: 0 0 0 -.2em;
padding: 0 0 0 .4em;
padding: 0 .35em;
position: relative;
z-index: 1;
/* ie: */
border-bottom: .1em solid #777\9;
margin-right: 1em\9;
@@ -763,18 +787,17 @@ html.y #files thead th {
#path a:first-child {
padding-left: .8em;
}
#path a:not(:last-child):after {
content: '';
#path i {
width: 1.05em;
height: 1.05em;
margin: -.2em .3em -.2em -.4em;
margin: -.5em .15em -.15em -.7em;
display: inline-block;
border: 1px solid rgba(255,224,192,0.3);
border-width: .05em .05em 0 0;
transform: rotate(45deg);
background: linear-gradient(45deg, rgba(0,0,0,0) 40%, rgba(0,0,0,0.25) 75%, rgba(0,0,0,0.35));
}
html.y #path a:not(:last-child)::after {
html.y #path i {
background: none;
border-color: rgba(0,0,0,0.2);
border-width: .1em .1em 0 0;
@@ -788,11 +811,31 @@ html.y #path a:hover {
}
.logue {
padding: .2em 0;
position: relative;
z-index: 1;
}
.logue.hidden,
.logue:empty {
display: none;
}
#doc>iframe,
.logue>iframe {
background: var(--bgg);
border: 1px solid var(--bgg);
border-width: 0 .3em 0 .3em;
border-radius: .5em;
visibility: hidden;
margin: 0 -.3em;
width: 100%;
height: 0;
}
#doc>iframe.focus,
.logue>iframe.focus {
box-shadow: 0 0 .1em .1em var(--a);
}
#pro.logue>iframe {
height: 100vh;
}
#pro.logue {
margin-bottom: .8em;
}
@@ -817,9 +860,13 @@ html.y #path a:hover {
.mdo {
max-width: 52em;
}
.mdo.sb,
#epi.logue.mdo>iframe {
max-width: 54em;
}
.mdo,
.mdo * {
line-height: 1.4em;
line-height: 1.5em;
}
#srv_info,
#srv_info2,
@@ -937,6 +984,9 @@ html.y #path a:hover {
#ggrid>a.dir:before {
content: '📂';
}
#ggrid>a.dir>span {
color: var(--g-dfg);
}
#ggrid>a.au:before {
content: '💾';
}
@@ -983,6 +1033,9 @@ html.np_open #ggrid>a.au:before {
background: var(--g-sel-bg);
border-color: var(--g-sel-b1);
}
#ggrid>a.sel>span {
color: var(--g-sel-fg);
}
#ggrid>a.sel,
#ggrid>a[tt].sel {
border-top: 1px solid var(--g-fsel-b1);
@@ -1036,6 +1089,9 @@ html.np_open #ggrid>a.au:before {
background: var(--bg-d3);
box-shadow: -.2em .2em 0 var(--f-sel-sh), -.2em -.2em 0 var(--f-sel-sh);
}
#player {
display: none;
}
#widget {
position: fixed;
font-size: 1.4em;
@@ -1118,10 +1174,10 @@ html.y #widget.open {
background: #fff;
background: var(--bg-u3);
}
#wfm, #wzip, #wnp {
#wfs, #wfm, #wzip, #wnp {
display: none;
}
#wzip, #wnp {
#wfs, #wzip, #wnp {
margin-right: .2em;
padding-right: .2em;
border: 1px solid var(--bg-u5);
@@ -1133,6 +1189,7 @@ html.y #widget.open {
padding-left: .2em;
border-left-width: .1em;
}
#wfs.act,
#wfm.act {
display: inline-block;
}
@@ -1156,6 +1213,13 @@ html.y #widget.open {
position: relative;
display: inline-block;
}
#wfs {
font-size: .36em;
text-align: right;
line-height: 1.3em;
padding: 0 .3em 0 0;
border-width: 0 .25em 0 0;
}
#wfm span,
#wnp span {
font-size: .6em;
@@ -1171,7 +1235,8 @@ html.y #widget.open {
#wfm a.hide {
display: none;
}
#files tbody tr.fcut td {
#files tbody tr.fcut td,
#ggrid>a.fcut {
animation: fcut .5s ease-out;
}
@keyframes fcut {
@@ -1294,6 +1359,10 @@ html.y #ops svg circle {
padding: .3em .6em;
white-space: nowrap;
}
#noie {
color: #b60;
margin: 0 0 0 .5em;
}
.opbox {
padding: .5em;
border-radius: 0 .3em .3em 0;
@@ -1334,6 +1403,9 @@ input[type="checkbox"]:checked+label {
color: #0e0;
color: var(--a);
}
html.dz input {
font-family: 'scp', monospace, monospace;
}
.opwide div>span>input+label {
padding: .3em 0 .3em .3em;
margin: 0 0 0 -.3em;
@@ -1514,6 +1586,7 @@ html.cz .btn {
border-bottom: .2em solid #709;
}
html.dz .btn {
font-size: 1em;
box-shadow: 0 0 0 .1em #080 inset;
}
html.dz .tgl.btn.on {
@@ -1557,6 +1630,12 @@ html.cz .tgl.btn.on {
list-style: none;
border-top: 1px solid var(--bg-u5);
}
#tree li.offline>a:first-child:before {
content: '❌';
position: absolute;
margin-left: -.25em;
z-index: 3;
}
#tree ul a.sel {
background: #000;
background: var(--bg-d3);
@@ -1698,6 +1777,8 @@ html.y #tree.nowrap .ntree a+a:hover {
display: none;
}
.ghead {
background: #fff;
background: var(--bg-u2);
border-radius: .3em;
padding: .2em .5em;
line-height: 2.3em;
@@ -1727,6 +1808,7 @@ html.y #tree.nowrap .ntree a+a:hover {
padding: 0;
}
#rui {
background: #fff;
background: var(--bg);
position: fixed;
top: 0;
@@ -1783,6 +1865,7 @@ html.y #tree.nowrap .ntree a+a:hover {
}
#doc {
overflow: visible;
background: #fff;
background: var(--bg);
margin: -1em 0 .5em 0;
padding: 1em 0 1em 0;
@@ -2037,12 +2120,12 @@ html.y #bbox-overlay figcaption a {
}
.bbox-btn,
#bbox-btns {
opacity: 1;
opacity: 1;
animation: opacity .2s infinite ease-in-out;
}
.bbox-btn.off,
#bbox-btns.off {
opacity: 0;
opacity: 0;
}
#bbox-overlay button {
cursor: pointer;
@@ -2312,7 +2395,7 @@ html.y #bbox-overlay figcaption a {
display: block;
}
#u2bm sup {
font-weight: bold;
font-weight: bold;
}
#u2notbtn {
display: none;
@@ -2411,7 +2494,7 @@ html.y #bbox-overlay figcaption a {
width: 21em;
}
#u2cards {
padding: 1em 1em .3em 1em;
padding: 1em 1em .42em 1em;
margin: 0 auto;
white-space: nowrap;
text-align: center;
@@ -2419,14 +2502,14 @@ html.y #bbox-overlay figcaption a {
min-width: 24em;
}
#u2cards.w {
width: 44em;
width: 48em;
text-align: left;
}
#u2cards.ww {
display: inline-block;
}
#u2etaw.w {
width: 52em;
width: 55em;
text-align: right;
margin: 2em auto -2.7em auto;
}
@@ -2436,7 +2519,8 @@ html.y #bbox-overlay figcaption a {
#u2cards a {
padding: .2em 1em;
background: var(--u2-tab-bg);
border: 1px solid rgba(128,128,128,0.8);
border: 1px solid #999;
border-color: var(--u2-tab-b1);
border-width: 0 0 1px 0;
}
#u2cards a:first-child {
@@ -2470,10 +2554,10 @@ html.y #bbox-overlay figcaption a {
width: 30em;
}
#u2conf.w {
width: 48em;
width: 51em;
}
#u2conf.ww {
width: 78em;
width: 82em;
}
#u2conf.ww #u2c3w {
width: 29em;
@@ -2557,7 +2641,6 @@ html.b #u2conf a.b:hover {
#u2conf input[type="checkbox"]:checked+label:hover {
background: var(--u2-o-1h-bg);
}
#op_up2k.srch #u2conf td:nth-child(1)>*,
#op_up2k.srch #u2conf td:nth-child(2)>*,
#op_up2k.srch #u2conf td:nth-child(3)>* {
background: #777;
@@ -2696,6 +2779,9 @@ html.c .opbox,
html.a .opbox {
margin: 1.5em 0 0 0;
}
html.dz .opview input.i {
width: calc(100% - 18em);
}
html.c #tree,
html.c #treeh,
html.a #tree,
@@ -2748,6 +2834,9 @@ html.a #u2btn {
html.ay #u2btn {
box-shadow: .4em .4em 0 #ccc;
}
html.dz #u2btn {
letter-spacing: -.033em;
}
html.c #u2conf.ww #u2btn,
html.a #u2conf.ww #u2btn {
margin: -2em .5em -3em 0;
@@ -2895,6 +2984,7 @@ html.b #treepar {
html.b #wrap {
margin-top: 2em;
}
html.by .ghead,
html.bz .ghead {
background: var(--bg);
padding: .2em 0;
@@ -2909,13 +2999,13 @@ html.b .btn {
top: -.1em;
}
html.b #op_up2k.srch sup {
color: #fc0;
color: #fc0;
}
html.by #u2btn sup {
color: #06b;
color: #06b;
}
html.by #op_up2k.srch sup {
color: #b70;
color: #b70;
}
html.bz #u2cards a.act {
box-shadow: 0 -.1em .2em var(--bg-d2);
@@ -2964,6 +3054,16 @@ html.d #treepar {
@media (max-width: 32em) {
#u2conf {
font-size: .9em;
}
}
@media (max-width: 28em) {
#u2conf {
font-size: .8em;
}
}
@media (min-width: 70em) {
#barpos,
#barbuf {

View File

@@ -11,7 +11,7 @@
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/browser.css?_={{ ts }}">
{%- if css %}
<link rel="stylesheet" media="screen" href="{{ css }}?_={{ ts }}">
<link rel="stylesheet" media="screen" href="{{ css }}_={{ ts }}">
{%- endif %}
</head>
@@ -29,17 +29,17 @@
<div id="op_player" class="opview opbox opwide"></div>
<div id="op_bup" class="opview opbox act">
<div id="op_bup" class="opview opbox {% if not ls0 %}act{% endif %}">
<div id="u2err"></div>
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
<input type="hidden" name="act" value="bput" />
<input type="file" name="f" multiple /><br />
<input type="submit" value="start upload">
</form>
<a id="bbsw" href="?b=u"><br />switch to basic browser</a>
<a id="bbsw" href="?b=u" rel="nofollow"><br />switch to basic browser</a>
</div>
<div id="op_mkdir" class="opview opbox act">
<div id="op_mkdir" class="opview opbox {% if not ls0 %}act{% endif %}">
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
<input type="hidden" name="act" value="mkdir" />
📂<input type="text" name="name" class="i" placeholder="awesome mix vol.1">
@@ -55,7 +55,7 @@
</form>
</div>
<div id="op_msg" class="opview opbox act">
<div id="op_msg" class="opview opbox {% if not ls0 %}act{% endif %}">
<form method="post" enctype="application/x-www-form-urlencoded" accept-charset="utf-8" action="{{ url_suf }}">
📟<input type="text" name="msg" class="i" placeholder="lorem ipsum dolor sit amet">
<input type="submit" value="send msg to srv log">
@@ -85,7 +85,7 @@
<div id="bdoc"></div>
{%- endif %}
<div id="pro" class="logue">{{ logues[0] }}</div>
<div id="pro" class="logue">{{ "" if sb_lg else logues[0] }}</div>
<table id="files">
<thead>
@@ -119,9 +119,9 @@
</tbody>
</table>
<div id="epi" class="logue">{{ logues[1] }}</div>
<div id="epi" class="logue">{{ "" if sb_lg else logues[1] }}</div>
<h2><a href="{{ r }}/?h" id="goh">control-panel</a></h2>
<h2 id="wfp"><a href="{{ r }}/?h" id="goh">control-panel</a></h2>
<a href="#" id="repl">π</a>
@@ -135,38 +135,27 @@
<script>
var SR = {{ r|tojson }},
acct = "{{ acct }}",
perms = {{ perms }},
themes = {{ themes }},
CGV = {{ cgv|tojson }},
TS = "{{ ts }}",
dtheme = "{{ dtheme }}",
srvinf = "{{ srv_info }}",
s_name = "{{ s_name }}",
lang = "{{ lang }}",
dfavico = "{{ favico }}",
def_hcols = {{ def_hcols|tojson }},
have_up2k_idx = {{ have_up2k_idx|tojson }},
have_tags_idx = {{ have_tags_idx|tojson }},
have_acode = {{ have_acode|tojson }},
have_mv = {{ have_mv|tojson }},
have_del = {{ have_del|tojson }},
have_unpost = {{ have_unpost }},
have_zip = {{ have_zip|tojson }},
lifetime = {{ lifetime }},
turbolvl = {{ turbolvl }},
u2sort = "{{ u2sort }}",
have_emp = {{ have_emp|tojson }},
sb_lg = "{{ sb_lg }}",
txt_ext = "{{ txt_ext }}",
{% if no_prism %}no_prism = 1,{% endif %}
readme = {{ readme|tojson }},
logues = {{ logues|tojson if sb_lg else "[]" }},
ls0 = {{ ls0|tojson }};
document.documentElement.className = localStorage.theme || dtheme;
document.documentElement.className = localStorage.cpp_thm || dtheme;
</script>
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
<script src="{{ r }}/.cpr/baguettebox.js?_={{ ts }}"></script>
<script src="{{ r }}/.cpr/browser.js?_={{ ts }}"></script>
<script src="{{ r }}/.cpr/up2k.js?_={{ ts }}"></script>
{%- if js %}
<script src="{{ js }}?_={{ ts }}"></script>
<script src="{{ js }}_={{ ts }}"></script>
{%- endif %}
</body>

File diff suppressed because it is too large Load Diff

View File

@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<title>{{ svcname }}</title>
<title>{{ s_doctitle }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.8">
</head>

View File

@@ -31,7 +31,7 @@
<span id="lno">L#</span>
{%- else %}
<a href="{{ arg_base }}edit" tt="good: higher performance$Ngood: same document width as viewer$Nbad: assumes you know markdown">edit (basic)</a>
<a href="{{ arg_base }}edit2" tt="not in-house so probably less buggy">edit (fancy)</a>
<a href="{{ arg_base }}edit2" id="edit2" tt="not in-house so probably less buggy">edit (fancy)</a>
<a href="{{ arg_base }}">view raw</a>
{%- endif %}
</div>

View File

@@ -52,7 +52,7 @@ var img_load = (function () {
var r = {};
r.callbacks = [];
function fire() {
var fire = function () {
for (var a = 0; a < r.callbacks.length; a++)
r.callbacks[a]();
}
@@ -212,6 +212,8 @@ function convert_markdown(md_text, dest_dom) {
try {
var md_html = marked.parse(md_text, marked_opts);
if (!have_emp)
md_html = DOMPurify.sanitize(md_html);
}
catch (ex) {
if (ext)
@@ -231,11 +233,11 @@ function convert_markdown(md_text, dest_dom) {
var nodes = md_dom.getElementsByTagName('a');
for (var a = nodes.length - 1; a >= 0; a--) {
var href = nodes[a].getAttribute('href');
var txt = nodes[a].textContent;
var txt = nodes[a].innerHTML;
if (!txt)
nodes[a].textContent = href;
else if (href !== txt)
else if (href !== txt && !nodes[a].className)
nodes[a].className = 'vis';
}
@@ -470,7 +472,7 @@ img_load.callbacks = [toc.refresh];
// scroll handler
var redraw = (function () {
var sbs = true;
function onresize() {
var onresize = function () {
if (window.matchMedia)
sbs = window.matchMedia('(min-width: 64em)').matches;
@@ -483,7 +485,7 @@ var redraw = (function () {
onscroll();
}
function onscroll() {
var onscroll = function () {
toc.refresh();
}
@@ -505,6 +507,13 @@ dom_navtgl.onclick = function () {
redraw();
};
if (!HTTPS && location.hostname != '127.0.0.1') try {
ebi('edit2').onclick = function (e) {
toast.err(0, "the fancy editor is only available over https");
return ev(e);
}
} catch (ex) { }
if (sread('hidenav') == 1)
dom_navtgl.onclick();

View File

@@ -92,7 +92,7 @@ var action_stack = null;
var nlines = 0;
var draw_md = (function () {
var delay = 1;
function draw_md() {
var draw_md = function () {
var t0 = Date.now();
var src = dom_src.value;
convert_markdown(src, dom_pre);
@@ -135,7 +135,7 @@ img_load.callbacks = [function () {
// resize handler
redraw = (function () {
function onresize() {
var onresize = function () {
var y = (dom_hbar.offsetTop + dom_hbar.offsetHeight) + 'px';
dom_wrap.style.top = y;
dom_swrap.style.top = y;
@@ -143,12 +143,12 @@ redraw = (function () {
map_src = genmap(dom_ref, map_src);
map_pre = genmap(dom_pre, map_pre);
}
function setsbs() {
var setsbs = function () {
dom_wrap.className = '';
dom_swrap.className = '';
onresize();
}
function modetoggle() {
var modetoggle = function () {
var mode = dom_nsbs.innerHTML;
dom_nsbs.innerHTML = mode == 'editor' ? 'preview' : 'editor';
mode += ' single';
@@ -172,7 +172,7 @@ redraw = (function () {
(function () {
var skip_src = false, skip_pre = false;
function scroll(src, srcmap, dst, dstmap) {
var scroll = function (src, srcmap, dst, dstmap) {
var y = src.scrollTop;
if (y < 8) {
dst.scrollTop = 0;
@@ -278,6 +278,7 @@ function Modpoll() {
return;
var new_md = this.responseText,
new_mt = this.getResponseHeader('X-Lastmod3') || r.lastmod,
server_ref = server_md.replace(/\r/g, ''),
server_now = new_md.replace(/\r/g, '');
@@ -285,6 +286,7 @@ function Modpoll() {
if (r.initial && server_ref != server_now)
return modal.confirm('Your browser decided to show an outdated copy of the document!\n\nDo you want to load the latest version from the server instead?', function () {
dom_src.value = server_md = new_md;
last_modified = new_mt;
draw_md();
}, null);
@@ -898,12 +900,12 @@ var set_lno = (function () {
pv = null,
lno = ebi('lno');
function poke() {
var poke = function () {
clearTimeout(t);
t = setTimeout(fire, 20);
}
function fire() {
var fire = function () {
try {
clearTimeout(t);
@@ -928,9 +930,11 @@ var set_lno = (function () {
// hotkeys / toolbar
(function () {
function keydown(ev) {
var keydown = function (ev) {
ev = ev || window.event;
var kc = ev.code || ev.keyCode || ev.which;
var kc = ev.code || ev.keyCode || ev.which,
editing = document.activeElement == dom_src;
//console.log(ev.key, ev.code, ev.keyCode, ev.which);
if (ctrl(ev) && (ev.code == "KeyS" || kc == 83)) {
save();
@@ -941,12 +945,17 @@ var set_lno = (function () {
if (d)
d.click();
}
if (document.activeElement != dom_src)
return true;
set_lno();
if (editing)
set_lno();
if (ctrl(ev)) {
if (ev.code == "KeyE") {
dom_nsbs.click();
return false;
}
if (!editing)
return true;
if (ev.code == "KeyH" || kc == 72) {
md_header(ev.shiftKey);
return false;
@@ -971,10 +980,6 @@ var set_lno = (function () {
iter_uni();
return false;
}
if (ev.code == "KeyE") {
dom_nsbs.click();
return false;
}
var up = ev.code == "ArrowUp" || kc == 38;
var dn = ev.code == "ArrowDown" || kc == 40;
if (up || dn) {
@@ -987,6 +992,9 @@ var set_lno = (function () {
}
}
else {
if (!editing)
return true;
if (ev.code == "Tab" || kc == 9) {
md_indent(ev.shiftKey);
return false;
@@ -1050,7 +1058,7 @@ action_stack = (function () {
var ignore = false;
var ref = dom_src.value;
function diff(from, to, cpos) {
var diff = function (from, to, cpos) {
if (from === to)
return null;
@@ -1081,14 +1089,14 @@ action_stack = (function () {
};
}
function undiff(from, change) {
var undiff = function (from, change) {
return {
txt: from.substring(0, change.car) + change.txt + from.substring(change.cdr),
cpos: change.cpos
};
}
function apply(src, dst) {
var apply = function (src, dst) {
dbg('undos(%d) redos(%d)', hist.un.length, hist.re.length);
if (src.length === 0)
@@ -1112,7 +1120,7 @@ action_stack = (function () {
return true;
}
function schedule_push() {
var schedule_push = function () {
if (ignore) {
ignore = false;
return;
@@ -1123,7 +1131,7 @@ action_stack = (function () {
sched_timer = setTimeout(push, 500);
}
function undo() {
var undo = function () {
if (hist.re.length == 0) {
clearTimeout(sched_timer);
push();
@@ -1131,11 +1139,11 @@ action_stack = (function () {
return apply(hist.un, hist.re);
}
function redo() {
var redo = function () {
return apply(hist.re, hist.un);
}
function push() {
var push = function () {
var newtxt = dom_src.value;
var change = diff(ref, newtxt, sched_cpos);
if (change !== null)

Some files were not shown because too many files have changed in this diff Show More