Compare commits

...

100 Commits

Author SHA1 Message Date
ed
d0bb1ad141 v1.4.3 2022-09-26 22:37:54 +02:00
ed
b299aaed93 fix some cases of deleted files not being forgotten 2022-09-26 22:19:46 +02:00
ed
abb3224cc5 option to save a copy of corrupted uploads 2022-09-26 22:01:49 +02:00
ed
1c66d06702 cleanup versionchecks 2022-09-25 21:31:47 +02:00
ed
e00e80ae39 v1.4.2 2022-09-25 14:36:10 +02:00
ed
4f4f106c48 add ability to forget uploads by deleting the files 2022-09-25 14:24:01 +02:00
ed
a286cc9d55 fix printing big unicode messages 2022-09-25 14:04:35 +02:00
ed
53bb1c719b fix huge-filename trunc on ubuntu-20.04 zfs 2022-09-25 14:00:11 +02:00
ed
98d5aa17e2 notes on dealing with bitflips 2022-09-24 22:41:00 +02:00
ed
aaaa80e4b8 v1.4.1 2022-09-24 14:45:50 +02:00
ed
e70e926a40 support up2k uploads from old browsertabs 2022-09-24 14:35:51 +02:00
ed
e80c1f6d59 mention how ffmpeg was built 2022-09-24 00:05:47 +02:00
ed
24de360325 v1.4.0 2022-09-23 22:53:51 +02:00
ed
e0039bc1e6 syntax-hl: elixir, glsl, matlab, moonscript, nim, zig 2022-09-23 22:32:40 +02:00
ed
ae5c4a0109 update webdeps + isort + readme 2022-09-23 22:32:04 +02:00
ed
1d367a0da0 cleanup 2022-09-23 20:37:37 +02:00
ed
d285f7ee4a macos-safari support 2022-09-23 19:36:07 +02:00
ed
37c84021a2 up2k: folder-upload without drag/drop 2022-09-22 21:58:04 +02:00
ed
8ee9de4291 up2k: add separate sfx toggle 2022-09-22 20:12:25 +02:00
ed
249b63453b good api 2022-09-22 19:20:33 +02:00
ed
1c0017d763 up2k: upload-complete notification 2022-09-21 23:39:36 +02:00
ed
df51e23639 playing next folder makes no sense in search results 2022-09-21 22:30:31 +02:00
ed
32e71a43b8 reinvent fail2ban 2022-09-21 22:27:20 +02:00
ed
47a1e6ddfa avoid windows funk 2022-09-21 08:25:44 +02:00
ed
c5f41457bb add ffmpeg build notes 2022-09-21 08:17:26 +02:00
ed
f1e0c44bdd better autocorrect for poor ffmpeg builds 2022-09-20 23:25:35 +02:00
ed
9d2e390b6a shrink the exe + add errorhandler 2022-09-20 21:40:56 +02:00
ed
75a58b435d reject anon ftp if anon has no read/write 2022-09-20 21:40:21 +02:00
ed
f5474d34ac embed licenses 2022-09-20 20:11:38 +02:00
ed
c962d2544f ux 2022-09-20 20:07:02 +02:00
ed
0b87a4a810 allow setting lifetimes from up2k ui 2022-09-19 23:49:07 +02:00
ed
1882afb8b6 whoops 2022-09-19 02:10:14 +02:00
ed
2270c8737a and audio seekpoints got floored to ints 2022-09-19 01:30:59 +02:00
ed
d6794955a4 playback position covered up the waveform 2022-09-19 01:23:40 +02:00
ed
f5520f45ef add pyinstaller 2022-09-19 00:59:54 +02:00
ed
9401b5ae13 add filetype detection for nameless uploads 2022-09-18 17:30:57 +02:00
ed
df64a62a03 patch popen on windows-python <3.8 2022-09-18 15:09:41 +02:00
ed
09cea66aa8 add ability to set lifetime per-file during upload 2022-09-18 13:12:38 +02:00
ed
13cc33e0a5 support random filenames in bup too 2022-09-18 01:03:38 +02:00
ed
ab36c8c9de fix tests 2022-09-18 00:16:40 +02:00
ed
f85d4ce82f support alpine's ffmpeg 2022-09-17 23:56:32 +02:00
ed
6bec4c28ba add waveform seekbar 2022-09-17 23:40:37 +02:00
ed
fad1449259 drop the redundant request for folders on navigation 2022-09-17 21:39:44 +02:00
ed
86b3b57137 smaller optimizations 2022-09-17 20:39:08 +02:00
ed
b235037dd3 5x faster rendering of huge tagsets 2022-09-17 20:17:24 +02:00
ed
3108139d51 30% faster tags listing 2022-09-17 19:36:42 +02:00
ed
2ae99ecfa0 new upload modifiers:
* terse upload responser
* randomize filenames
2022-09-17 14:48:53 +02:00
ed
e8ab53c270 fix read-only search positioning 2022-09-17 13:45:41 +02:00
ed
5e9bc1127d fix windows symlink creation 2022-09-17 13:27:54 +02:00
ed
415e61c3c9 prevent blanks from skipping ahead in the queue 2022-09-16 23:51:55 +02:00
ed
5152f37ec8 fix sfx keepalive across unix users 2022-09-16 22:19:59 +02:00
ed
0dbeb010cf fix symlinked filekeys 2022-09-16 21:41:17 +02:00
ed
17c465bed7 lazyload big folders; closes #11 2022-09-15 23:43:40 +02:00
ed
add04478e5 multiprocessing: fix listening-socket config 2022-09-15 22:25:11 +02:00
ed
6db72d7166 optimizations / cleanup 2022-09-15 01:18:19 +02:00
ed
868103a9c5 more flexible --stackmon 2022-09-14 02:06:34 +02:00
ed
0f37718671 improve error messages 2022-09-14 01:56:16 +02:00
icxes
fa1445df86 align grid items to left if there's not enough to fill a row 2022-09-12 00:58:54 +02:00
icxes
a783e7071e add small margin to grid 2022-09-12 00:58:54 +02:00
icxes
a9919df5af change justify-content depending on whether sidebar is open 2022-09-12 00:58:54 +02:00
icxes
b0af31ac35 fix indentation? 2022-09-12 00:58:54 +02:00
icxes
c4c964a685 simplify style and make gaps equal size 2022-09-12 00:58:54 +02:00
icxes
348ec71398 make grid items scale properly at different zoom levels 2022-09-12 00:58:54 +02:00
exci
a257ccc8b3 try using grids for the.. grids 2022-09-12 00:58:54 +02:00
ed
fcc4296040 mention the upcoming bugfix in chrome 2022-09-11 22:31:36 +02:00
ed
1684d05d49 dont crash chrome with too many unique SVGs 2022-09-11 11:47:26 +02:00
ed
0006f933a2 hmac uploader-ip when avoiding filename collisions 2022-09-11 08:27:45 +02:00
ed
0484f97c9c stop writing upload-summary textfiles,
can be reenabled with --write-uplog
2022-09-10 22:07:10 +02:00
ed
e430b2567a add pyoxidizer (windows-only) 2022-09-10 17:33:04 +02:00
ed
fbc8ee15da make firefox stop complaining 2022-09-08 19:22:51 +02:00
ed
68a9c05947 load eq ui early 2022-09-08 18:47:30 +02:00
ed
0a81aba899 fix wrong ETA after failed handshakes +
tooltip-hint positioning on bottom-most elements
2022-09-07 23:34:43 +02:00
ed
d2ae822e15 more socket cleanup fiddling 2022-09-07 23:06:12 +02:00
ed
fac4b08526 firefox may forget FDs during upload; warn about it 2022-09-07 23:03:48 +02:00
ed
3a7b43c663 dodge firefox race (thx exci) 2022-09-07 21:27:36 +02:00
ed
8fcb2d1554 defer actx until needed (audioplayer, uploads) and
try to be less reliant on the actx speedhack for upload performance
2022-09-07 21:08:09 +02:00
ed
590c763659 add unforgetti beam 2022-09-07 08:09:32 +02:00
ed
11d1267f8c option to keep files in index when deleted 2022-09-07 01:07:21 +02:00
ed
8f5bae95ce fix visual glitches in upload ui 2022-09-07 00:38:19 +02:00
ed
e6b12ef14c hide warnings when they are remedied 2022-09-07 00:29:26 +02:00
ed
b65674618b fix ui bug on upload-queues >= 1 TiB large 2022-09-06 23:24:58 +02:00
ed
20dca2bea5 mtp: add guestbook reader 2022-09-05 20:23:59 +02:00
ed
059e93cdcf u2cli: fix py3.5 support + better deps warning 2022-09-05 18:24:18 +02:00
ed
635ab25013 up2k.js: defer worker startup until needed 2022-09-05 00:55:52 +02:00
ed
995cd10df8 bump timeouts for zfs / bursty filesystems 2022-09-04 21:21:54 +02:00
ed
50f3820a6d downgrade severity of some transient errors 2022-09-04 12:53:49 +02:00
ed
617f3ea861 up2k-hook-ytid: discover related files in subfolders 2022-09-04 12:20:40 +02:00
ed
788db47b95 option to let mtp's keep stdout/stderr 2022-09-04 01:42:28 +02:00
ed
5fa8aaabb9 up2k-hook-ytid: comment-field example 2022-09-04 00:06:42 +02:00
ed
89d1af7f33 this actually serves a purpose but please dont ask 2022-09-03 20:19:16 +02:00
ed
799cf27c5d restore .bin-suffix for nameless PUT/POSTs
disappeared in v1.0.11
2022-09-03 19:59:59 +02:00
ed
c930d8f773 add mtp debug mode 2022-09-03 19:58:10 +02:00
ed
a7f921abb9 up2k-hook-ytid: support tiny files 2022-09-03 15:08:08 +02:00
ed
bc6234e032 parallel socket shutdown 2022-08-31 08:38:34 +02:00
ed
558bfa4e1e siocoutq-based shutdown 2022-08-31 01:16:09 +02:00
ed
5d19f23372 accurate num.cores detection 2022-08-29 19:24:48 +02:00
ed
27f08cdbfa better isNaN + fade + fix preload seek:
* use Number.isFinite or shim it, rejecting strings
* fade-in/out was too quick on volumes < 100%
* fades (especially -out) was too slow on chrome
* seek to start if playing into the previously played file
* and let π raise if it wants to
2022-08-29 19:23:23 +02:00
ed
993213e2c0 mtp/vidchk: support stuff like rag-prep 2022-08-24 23:25:03 +02:00
ed
49470c05fa well that was dumb 2022-08-23 00:03:04 +02:00
ed
ee0a060b79 mention the chrome gc bugs 2022-08-20 09:25:29 +02:00
57 changed files with 3409 additions and 604 deletions

View File

@@ -49,6 +49,7 @@ try the **[read-only demo server](https://a.ocv.me/pub/demo/)** 👀 running fro
* [uploading](#uploading) - drag files/folders into the web-browser to upload
* [file-search](#file-search) - dropping files into the browser also lets you see if they exist on the server
* [unpost](#unpost) - undo/delete accidental uploads
* [self-destruct](#self-destruct) - uploads can be given a lifetime
* [file manager](#file-manager) - cut/paste, rename, and delete files/folders (if you have permission)
* [batch rename](#batch-rename) - select some files and press `F2` to bring up the rename UI
* [markdown viewer](#markdown-viewer) - and there are *two* editors
@@ -62,6 +63,7 @@ try the **[read-only demo server](https://a.ocv.me/pub/demo/)** 👀 running fro
* [periodic rescan](#periodic-rescan) - filesystem monitoring
* [upload rules](#upload-rules) - set upload rules using volflags
* [compress uploads](#compress-uploads) - files can be autocompressed on upload
* [other flags](#other-flags)
* [database location](#database-location) - in-volume (`.hist/up2k.db`, default) or somewhere else
* [metadata from audio files](#metadata-from-audio-files) - set `-e2t` to index tags on upload
* [file parser plugins](#file-parser-plugins) - provide custom parsers to index additional tags, also see [./bin/mtag/README.md](./bin/mtag/README.md)
@@ -167,6 +169,7 @@ feature summary
* ☑ [up2k](#uploading): js, resumable, multithreaded
* ☑ stash: simple PUT filedropper
* ☑ [unpost](#unpost): undo/delete accidental uploads
* ☑ [self-destruct](#self-destruct) (specified server-side or client-side)
* ☑ symlink/discard existing files (content-matching)
* download
* ☑ single files in browser
@@ -238,7 +241,6 @@ some improvement ideas
# bugs
* Windows: python 3.7 and older cannot read tags with FFprobe, so use Mutagen or upgrade
* Windows: python 2.7 cannot index non-ascii filenames with `-e2d`
* Windows: python 2.7 cannot handle filenames with mojibake
* `--th-ff-jpg` may fix video thumbnails on some FFmpeg versions (macos, some linux)
@@ -259,7 +261,13 @@ some improvement ideas
* [Chrome issue 1317069](https://bugs.chromium.org/p/chromium/issues/detail?id=1317069) -- if you try to upload a folder which contains symlinks by dragging it into the browser, the symlinked files will not get uploaded
* [Chrome issue 1352210](https://bugs.chromium.org/p/chromium/issues/detail?id=1352210) -- plaintext http may be faster at filehashing than https (but also extremely CPU-intensive)
* [Chrome issue 1354816](https://bugs.chromium.org/p/chromium/issues/detail?id=1354816) -- chrome may eat all RAM uploading over plaintext http with `mt` enabled
* more amusingly, [Chrome issue 1354800](https://bugs.chromium.org/p/chromium/issues/detail?id=1354800) -- chrome may eat all RAM uploading in general (altho you probably won't run into this one)
* [Chrome issue 1352210](https://bugs.chromium.org/p/chromium/issues/detail?id=1352210) -- plaintext http may be faster at filehashing than https (but also extremely CPU-intensive and likely to run into the above gc bugs)
* [Firefox issue 1790500](https://bugzilla.mozilla.org/show_bug.cgi?id=1790500) -- sometimes forgets to close filedescriptors during upload so the browser can crash after ~4000 files
* iPhones: the volume control doesn't work because [apple doesn't want it to](https://developer.apple.com/library/archive/documentation/AudioVideo/Conceptual/Using_HTML5_Audio_Video/Device-SpecificConsiderations/Device-SpecificConsiderations.html#//apple_ref/doc/uid/TP40009523-CH5-SW11)
* *future workaround:* enable the equalizer, make it all-zero, and set a negative boost to reduce the volume
@@ -274,6 +282,9 @@ some improvement ideas
* VirtualBox: sqlite throws `Disk I/O Error` when running in a VM and the up2k database is in a vboxsf
* use `--hist` or the `hist` volflag (`-v [...]:c,hist=/tmp/foo`) to place the db inside the vm instead
* Ubuntu: dragging files from certain folders into firefox or chrome is impossible
* due to snap security policies -- see `snap connections firefox` for the allowlist, `removable-media` permits all of `/mnt` and `/media` apparently
# FAQ
@@ -322,12 +333,14 @@ examples:
* `u1` can upload files, browse the folder, and see the generated accesskeys
* other users cannot browse the folder, but can access the files if they have the full file URL with the accesskey
anyone trying to bruteforce a password gets banned according to `--ban-pw`; default is 24h ban for 9 failed attempts in 1 hour
# the browser
accessing a copyparty server using a web-browser
![copyparty-browser-fs8](https://user-images.githubusercontent.com/241032/129635359-d6dd9b07-8079-4020-ad77-2bfdb9ebd8d5.png)
![copyparty-browser-fs8](https://user-images.githubusercontent.com/241032/192042695-522b3ec7-6845-494a-abdb-d1c0d0e23801.png)
## tabs
@@ -486,7 +499,7 @@ see [up2k](#up2k) for details on how it works, or watch a [demo video](https://a
**protip:** you can avoid scaring away users with [contrib/plugins/minimal-up2k.html](contrib/plugins/minimal-up2k.html) which makes it look [much simpler](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png)
**protip:** if you enable `favicon` in the `[⚙️] settings` tab (by typing something into the textbox), the icon in the browser tab will indicate upload progress
**protip:** if you enable `favicon` in the `[⚙️] settings` tab (by typing something into the textbox), the icon in the browser tab will indicate upload progress -- also, the `[🔔]` and/or `[🔊]` switches enable visible and/or audible notifications on upload completion
the up2k UI is the epitome of polished inutitive experiences:
* "parallel uploads" specifies how many chunks to upload at the same time
@@ -534,6 +547,17 @@ undo/delete accidental uploads
you can unpost even if you don't have regular move/delete access, however only for files uploaded within the past `--unpost` seconds (default 12 hours) and the server must be running with `-e2d`
### self-destruct
uploads can be given a lifetime, afer which they expire / self-destruct
the feature must be enabled per-volume with the `lifetime` [upload rule](#upload-rules) which sets the upper limit for how long a file gets to stay on the server
clients can specify a shorter expiration time using the [up2k ui](#uploading) -- the relevant options become visible upon navigating into a folder with `lifetimes` enabled -- or by using the `life` [upload modifier](#write)
specifying a custom expiration time client-side will affect the timespan in which unposts are permitted, so keep an eye on the estimates in the up2k ui
## file manager
cut/paste, rename, and delete files/folders (if you have permission)
@@ -691,6 +715,7 @@ note:
* the parser can finally handle `c,e2dsa,e2tsr` so you no longer have to `c,e2dsa:c,e2tsr`
* `e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and `e2ts` would then reindex those, unless there is a new copyparty version with new parsers and the release note says otherwise
* the rescan button in the admin panel has no effect unless the volume has `-e2ds` or higher
* deduplication is possible on windows if you run copyparty as administrator (not saying you should!)
### exclude-patterns
@@ -767,6 +792,11 @@ some examples,
allows (but does not force) gz compression if client uploads to `/inc?pk` or `/inc?gz` or `/inc?gz=4`
## other flags
* `:c,magic` enables filetype detection for nameless uploads, same as `--magic`
## database location
in-volume (`.hist/up2k.db`, default) or somewhere else
@@ -806,12 +836,14 @@ see the beautiful mess of a dictionary in [mtag.py](https://github.com/9001/copy
* avoids pulling any GPL code into copyparty
* more importantly runs FFprobe on incoming files which is bad if your FFmpeg has a cve
`--mtag-to` sets the tag-scan timeout; very high default (60 sec) to cater for zfs and other randomly-freezing filesystems. Lower values like 10 are usually safe, allowing for faster processing of tricky files
## file parser plugins
provide custom parsers to index additional tags, also see [./bin/mtag/README.md](./bin/mtag/README.md)
copyparty can invoke external programs to collect additional metadata for files using `mtp` (either as argument or volflag), there is a default timeout of 30sec, and only files which contain audio get analyzed by default (see ay/an/ad below)
copyparty can invoke external programs to collect additional metadata for files using `mtp` (either as argument or volflag), there is a default timeout of 60sec, and only files which contain audio get analyzed by default (see ay/an/ad below)
* `-mtp .bpm=~/bin/audio-bpm.py` will execute `~/bin/audio-bpm.py` with the audio file as argument 1 to provide the `.bpm` tag, if that does not exist in the audio metadata
* `-mtp key=f,t5,~/bin/audio-key.py` uses `~/bin/audio-key.py` to get the `key` tag, replacing any existing metadata tag (`f,`), aborting if it takes longer than 5sec (`t5,`)
@@ -822,8 +854,11 @@ copyparty can invoke external programs to collect additional metadata for files
* "audio file" also means videos btw, as long as there is an audio stream
* `-mtp ext=an,~/bin/file-ext.py` runs `~/bin/file-ext.py` to get the `ext` tag only if file is not audio (`an`)
* `-mtp arch,built,ver,orig=an,eexe,edll,~/bin/exe.py` runs `~/bin/exe.py` to get properties about windows-binaries only if file is not audio (`an`) and file extension is exe or dll
you can control how the parser is killed if it times out with option `kt` killing the entire process tree (default), `km` just the main process, or `kn` let it continue running until copyparty is terminated
* if you want to daisychain parsers, use the `p` flag to set processing order
* `-mtp foo=p1,~/a.py` runs before `-mtp foo=p2,~/b.py` and will forward all the tags detected so far as json to the stdin of b.py
* option `c0` disables capturing of stdout/stderr, so copyparty will not receive any tags from the process at all -- instead the invoked program is free to print whatever to the console, just using copyparty as a launcher
* `c1` captures stdout only, `c2` only stderr, and `c3` (default) captures both
* you can control how the parser is killed if it times out with option `kt` killing the entire process tree (default), `km` just the main process, or `kn` let it continue running until copyparty is terminated
if something doesn't work, try `--mtag-v` for verbose error messages
@@ -842,7 +877,7 @@ that'll run the command `notify-send` with the path to the uploaded file as the
note that it will only trigger on new unique files, not dupes
and it will occupy the parsing threads, so fork anything expensive, or if you want to intentionally queue/singlethread you can combine it with `--mtag-mt 1`
and it will occupy the parsing threads, so fork anything expensive (or set `kn` to have copyparty fork it for you) -- otoh if you want to intentionally queue/singlethread you can combine it with `--mtag-mt 1`
if this becomes popular maybe there should be a less janky way to do it actually
@@ -953,7 +988,9 @@ interact with copyparty using non-browser clients
* `var xhr = new XMLHttpRequest(); xhr.open('POST', '//127.0.0.1:3923/msgs?raw'); xhr.send('foo');`
* curl/wget: upload some files (post=file, chunk=stdin)
* `post(){ curl -b cppwd=wark -F act=bput -F f=@"$1" http://127.0.0.1:3923/;}`
* `post(){ curl -F act=bput -F f=@"$1" http://127.0.0.1:3923/?pw=wark;}`
`post movie.mkv`
* `post(){ curl -b cppwd=wark -H rand:8 -T "$1" http://127.0.0.1:3923/;}`
`post movie.mkv`
* `post(){ wget --header='Cookie: cppwd=wark' --post-file="$1" -O- http://127.0.0.1:3923/?raw;}`
`post movie.mkv`
@@ -979,7 +1016,9 @@ copyparty returns a truncated sha512sum of your PUT/POST as base64; you can gene
b512(){ printf "$((sha512sum||shasum -a512)|sed -E 's/ .*//;s/(..)/\\x\1/g')"|base64|tr '+/' '-_'|head -c44;}
b512 <movie.mkv
you can provide passwords using cookie `cppwd=hunter2`, as a url query `?pw=hunter2`, or with basic-authentication (either as the username or password)
you can provide passwords using cookie `cppwd=hunter2`, as a url-param `?pw=hunter2`, or with basic-authentication (either as the username or password)
NOTE: curl will not send the original filename if you use `-T` combined with url-params! Also, make sure to always leave a trailing slash in URLs unless you want to override the filename
# up2k
@@ -997,7 +1036,9 @@ quick outline of the up2k protocol, see [uploading](#uploading) for the web-clie
* server writes chunks into place based on the hash
* client does another handshake with the hashlist; server replies with OK or a list of chunks to reupload
up2k has saved a few uploads from becoming corrupted in-transfer already; caught an android phone on wifi redhanded in wireshark with a bitflip, however bup with https would *probably* have noticed as well (thanks to tls also functioning as an integrity check)
up2k has saved a few uploads from becoming corrupted in-transfer already;
* caught an android phone on wifi redhanded in wireshark with a bitflip, however bup with https would *probably* have noticed as well (thanks to tls also functioning as an integrity check)
* also stopped someone from uploading because their ram was bad
regarding the frequent server log message during uploads;
`6.0M 106M/s 2.77G 102.9M/s n948 thank 4/0/3/1 10042/7198 00:01:09`
@@ -1033,6 +1074,7 @@ below are some tweaks roughly ordered by usefulness:
* `--http-only` or `--https-only` (unless you want to support both protocols) will reduce the delay before a new connection is established
* `--hist` pointing to a fast location (ssd) will make directory listings and searches faster when `-e2d` or `-e2t` is set
* `--no-hash .` when indexing a network-disk if you don't care about the actual filehashes and only want the names/tags searchable
* `--no-htp --hash-mt=0 --th-mt=1` minimizes the number of threads; can help in some eccentric environments (like the vscode debugger)
* `-j` enables multiprocessing (actual multithreading) and can make copyparty perform better in cpu-intensive workloads, for example:
* huge amount of short-lived connections
* really heavy traffic (downloads/uploads)
@@ -1072,6 +1114,8 @@ some notes on hardening
* `--hardlink` creates hardlinks instead of symlinks when deduplicating uploads, which is less maintenance
* however note if you edit one file it will also affect the other copies
* `--vague-403` returns a "404 not found" instead of "403 forbidden" which is a common enterprise meme
* `--ban-404=50,60,1440` ban client for 1440min (24h) if they hit 50 404's in 60min
* **NB:** will ban anyone who enables up2k turbo
* `--nih` removes the server hostname from directory listings
* option `-sss` is a shortcut for the above plus:
@@ -1172,7 +1216,17 @@ authenticate using header `Cookie: cppwd=foo` or url param `&pw=foo`
| uPOST | | `msg=foo` | send message `foo` into server log |
| mPOST | | `act=tput`, `body=TEXT` | overwrite markdown document at URL |
server behavior of `msg` can be reconfigured with `--urlform`
upload modifiers:
| http-header | url-param | effect |
|--|--|--|
| `Accept: url` | `want=url` | return just the file URL |
| `Rand: 4` | `rand=4` | generate random filename with 4 characters |
| `Life: 30` | `life=30` | delete file after 30 seconds |
* `life` only has an effect if the volume has a lifetime, and the volume lifetime must be greater than the file's
* server behavior of `msg` can be reconfigured with `--urlform`
## admin

61
bin/mtag/guestbook-read.py Executable file
View File

@@ -0,0 +1,61 @@
#!/usr/bin/env python3
"""
fetch latest msg from guestbook and return as tag
example copyparty config to use this:
--urlform save,get -vsrv/hello:hello:w:c,e2ts,mtp=guestbook=t10,ad,p,bin/mtag/guestbook-read.py:mte=+guestbook
explained:
for realpath srv/hello (served at /hello), write-only for eveyrone,
enable file analysis on upload (e2ts),
use mtp plugin "bin/mtag/guestbook-read.py" to provide metadata tag "guestbook",
do this on all uploads regardless of extension,
t10 = 10 seconds timeout for each dwonload,
ad = parse file regardless if FFmpeg thinks it is audio or not
p = request upload info as json on stdin (need ip)
mte=+guestbook enabled indexing of that tag for this volume
PS: this requires e2ts to be functional,
meaning you need to do at least one of these:
* apt install ffmpeg
* pip3 install mutagen
"""
import json
import os
import sqlite3
import sys
# set 0 to allow infinite msgs from one IP,
# other values delete older messages to make space,
# so 1 only keeps latest msg
NUM_MSGS_TO_KEEP = 1
def main():
fp = os.path.abspath(sys.argv[1])
fdir = os.path.dirname(fp)
zb = sys.stdin.buffer.read()
zs = zb.decode("utf-8", "replace")
md = json.loads(zs)
ip = md["up_ip"]
# can put the database inside `fdir` if you'd like,
# by default it saves to PWD:
# os.chdir(fdir)
db = sqlite3.connect("guestbook.db3")
with db:
t = "select msg from gb where ip = ? order by ts desc"
r = db.execute(t, (ip,)).fetchone()
if r:
print(r[0])
if __name__ == "__main__":
main()

111
bin/mtag/guestbook.py Normal file
View File

@@ -0,0 +1,111 @@
#!/usr/bin/env python3
"""
store messages from users in an sqlite database
which can be read from another mtp for example
takes input from application/x-www-form-urlencoded POSTs,
for example using the message/pager function on the website
example copyparty config to use this:
--urlform save,get -vsrv/hello:hello:w:c,e2ts,mtp=xgb=ebin,t10,ad,p,bin/mtag/guestbook.py:mte=+xgb
explained:
for realpath srv/hello (served at /hello),write-only for eveyrone,
enable file analysis on upload (e2ts),
use mtp plugin "bin/mtag/guestbook.py" to provide metadata tag "xgb",
do this on all uploads with the file extension "bin",
t300 = 300 seconds timeout for each dwonload,
ad = parse file regardless if FFmpeg thinks it is audio or not
p = request upload info as json on stdin
mte=+xgb enabled indexing of that tag for this volume
PS: this requires e2ts to be functional,
meaning you need to do at least one of these:
* apt install ffmpeg
* pip3 install mutagen
"""
import json
import os
import sqlite3
import sys
from urllib.parse import unquote_to_bytes as unquote
# set 0 to allow infinite msgs from one IP,
# other values delete older messages to make space,
# so 1 only keeps latest msg
NUM_MSGS_TO_KEEP = 1
def main():
fp = os.path.abspath(sys.argv[1])
fdir = os.path.dirname(fp)
fname = os.path.basename(fp)
if not fname.startswith("put-") or not fname.endswith(".bin"):
raise Exception("not a post file")
zb = sys.stdin.buffer.read()
zs = zb.decode("utf-8", "replace")
md = json.loads(zs)
buf = b""
with open(fp, "rb") as f:
while True:
b = f.read(4096)
buf += b
if len(buf) > 4096:
raise Exception("too big")
if not b:
break
if not buf:
raise Exception("file is empty")
buf = unquote(buf.replace(b"+", b" "))
txt = buf.decode("utf-8")
if not txt.startswith("msg="):
raise Exception("does not start with msg=")
ip = md["up_ip"]
ts = md["up_at"]
txt = txt[4:]
# can put the database inside `fdir` if you'd like,
# by default it saves to PWD:
# os.chdir(fdir)
db = sqlite3.connect("guestbook.db3")
try:
db.execute("select 1 from gb").fetchone()
except:
with db:
db.execute("create table gb (ip text, ts real, msg text)")
db.execute("create index gb_ip on gb(ip)")
with db:
if NUM_MSGS_TO_KEEP == 1:
t = "delete from gb where ip = ?"
db.execute(t, (ip,))
t = "insert into gb values (?,?,?)"
db.execute(t, (ip, ts, txt))
if NUM_MSGS_TO_KEEP > 1:
t = "select ts from gb where ip = ? order by ts desc"
hits = db.execute(t, (ip,)).fetchall()
if len(hits) > NUM_MSGS_TO_KEEP:
lim = hits[NUM_MSGS_TO_KEEP][0]
t = "delete from gb where ip = ? and ts <= ?"
db.execute(t, (ip, lim))
print(txt)
if __name__ == "__main__":
main()

View File

@@ -16,7 +16,7 @@ goes without saying, but this is HELLA DANGEROUS,
GIVES RCE TO ANYONE WHO HAVE UPLOAD PERMISSIONS
example copyparty config to use this:
--urlform save,get -v.::w:c,e2d,e2t,mte=+a1:c,mtp=a1=ad,kn,bin/mtag/very-bad-idea.py
--urlform save,get -v.::w:c,e2d,e2t,mte=+a1:c,mtp=a1=ad,kn,c0,bin/mtag/very-bad-idea.py
recommended deps:
apt install xdotool libnotify-bin

View File

@@ -2,6 +2,7 @@
import json
import re
import os
import sys
import subprocess as sp
@@ -36,14 +37,21 @@ FAST = True # parse entire file at container level
# warnings to ignore
harmless = re.compile("^Unsupported codec with id ")
harmless = re.compile(
r"Unsupported codec with id |Could not find codec parameters.*Attachment:|analyzeduration"
+ r"|timescale not set"
)
def wfilter(lines):
return [x for x in lines if not harmless.search(x)]
return [x for x in lines if x.strip() and not harmless.search(x)]
def errchk(so, se, rc):
def errchk(so, se, rc, dbg):
if dbg:
with open(dbg, "wb") as f:
f.write(b"so:\n" + so + b"\nse:\n" + se + b"\n")
if rc:
err = (so + se).decode("utf-8", "replace").split("\n", 1)
err = wfilter(err) or err
@@ -64,6 +72,11 @@ def main():
zs = zb.decode("utf-8", "replace")
md = json.loads(zs)
fdir = os.path.dirname(os.path.realpath(fp))
flag = os.path.join(fdir, ".processed")
if os.path.exists(flag):
return "already processed"
try:
w, h = [int(x) for x in md["res"].split("x")]
if not w + h:
@@ -87,11 +100,11 @@ def main():
with open(fsenc(f"{fp}.ff.json"), "wb") as f:
f.write(so)
err = errchk(so, se, p.returncode)
err = errchk(so, se, p.returncode, f"{fp}.vidchk")
if err:
return err
if min(w, h) < 1080:
if max(w, h) < 1280 and min(w, h) < 720:
return "resolution too small"
zs = (
@@ -111,7 +124,7 @@ def main():
p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
so, se = p.communicate()
return errchk(so, se, p.returncode)
return errchk(so, se, p.returncode, f"{fp}.vidchk")
if __name__ == "__main__":

99
bin/unforget.py Executable file
View File

@@ -0,0 +1,99 @@
#!/usr/bin/env python3
"""
unforget.py: rebuild db from logfiles
2022-09-07, v0.1, ed <irc.rizon.net>, MIT-Licensed
https://github.com/9001/copyparty/blob/hovudstraum/bin/unforget.py
only makes sense if running copyparty with --no-forget
(e.g. immediately shifting uploads to other storage)
usage:
xz -d < log | ./unforget.py .hist/up2k.db
"""
import re
import sys
import json
import base64
import sqlite3
import argparse
FS_ENCODING = sys.getfilesystemencoding()
class APF(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter):
pass
mem_cur = sqlite3.connect(":memory:").cursor()
mem_cur.execute(r"create table a (b text)")
def s3enc(rd: str, fn: str) -> tuple[str, str]:
ret: list[str] = []
for v in [rd, fn]:
try:
mem_cur.execute("select * from a where b = ?", (v,))
ret.append(v)
except:
wtf8 = v.encode(FS_ENCODING, "surrogateescape")
ret.append("//" + base64.urlsafe_b64encode(wtf8).decode("ascii"))
return ret[0], ret[1]
def main():
ap = argparse.ArgumentParser()
ap.add_argument("db")
ar = ap.parse_args()
db = sqlite3.connect(ar.db).cursor()
ptn_times = re.compile(r"no more chunks, setting times \(([0-9]+)")
at = 0
ctr = 0
for ln in [x.decode("utf-8", "replace").rstrip() for x in sys.stdin.buffer]:
if "no more chunks, setting times (" in ln:
m = ptn_times.search(ln)
if m:
at = int(m.group(1))
if '"hash": []' in ln:
try:
ofs = ln.find("{")
j = json.loads(ln[ofs:])
except:
pass
w = j["wark"]
if db.execute("select w from up where w = ?", (w,)).fetchone():
continue
# PYTHONPATH=/home/ed/dev/copyparty/ python3 -m copyparty -e2dsa -v foo:foo:rwmd,ed -aed:wark --no-forget
# 05:34:43.845 127.0.0.1 42496 no more chunks, setting times (1662528883, 1658001882)
# 05:34:43.863 127.0.0.1 42496 {"name": "f\"2", "purl": "/foo/bar/baz/", "size": 1674, "lmod": 1658001882, "sprs": true, "hash": [], "wark": "LKIWpp2jEAh9dH3fu-DobuURFGEKlODXDGTpZ1otMhUg"}
# | w | mt | sz | rd | fn | ip | at |
# | LKIWpp2jEAh9dH3fu-DobuURFGEKlODXDGTpZ1otMhUg | 1658001882 | 1674 | bar/baz | f"2 | 127.0.0.1 | 1662528883 |
rd, fn = s3enc(j["purl"].strip("/"), j["name"])
ip = ln.split(" ")[1].split("m")[-1]
q = "insert into up values (?,?,?,?,?,?,?)"
v = (w, int(j["lmod"]), int(j["size"]), rd, fn, ip, at)
db.execute(q, v)
ctr += 1
if ctr % 1024 == 1023:
print(f"{ctr} commit...")
db.connection.commit()
if ctr:
db.connection.commit()
print(f"unforgot {ctr} files")
if __name__ == "__main__":
main()

View File

@@ -3,7 +3,7 @@ from __future__ import print_function, unicode_literals
"""
up2k.py: upload to copyparty
2022-08-13, v0.18, ed <irc.rizon.net>, MIT-Licensed
2022-09-05, v0.19, ed <irc.rizon.net>, MIT-Licensed
https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py
- dependencies: requests
@@ -35,20 +35,20 @@ except:
try:
import requests
except:
except ImportError:
if sys.version_info > (2, 7):
m = "\n ERROR: need 'requests'; run this:\n python -m pip install --user requests\n"
m = "\nERROR: need 'requests'; please run this command:\n {0} -m pip install --user requests\n"
else:
m = "requests/2.18.4 urllib3/1.23 chardet/3.0.4 certifi/2020.4.5.1 idna/2.7"
m = [" https://pypi.org/project/" + x + "/#files" for x in m.split()]
m = "\n ERROR: need these:\n" + "\n".join(m) + "\n"
print(m)
raise
print(m.format(sys.executable))
sys.exit(1)
# from copyparty/__init__.py
PY2 = sys.version_info[0] == 2
PY2 = sys.version_info < (3,)
if PY2:
from Queue import Queue
from urllib import unquote
@@ -344,7 +344,7 @@ def _lsd(err, top):
err.append((abspath, str(ex)))
if hasattr(os, "scandir"):
if hasattr(os, "scandir") and sys.version_info > (3, 6):
statdir = _scd
else:
statdir = _lsd
@@ -929,7 +929,7 @@ def main():
if not VT100:
os.system("rem") # enables colors
cores = os.cpu_count() if hasattr(os, "cpu_count") else 4
cores = (os.cpu_count() if hasattr(os, "cpu_count") else 0) or 2
hcores = min(cores, 3) # 4% faster than 4+ on py3.9 @ r5-4500U
# fmt: off

View File

@@ -14,6 +14,8 @@ function up2k_namefilter(good_files, nil_files, bad_files, hooks) {
a_up2k_namefilter(good_files, nil_files, bad_files, hooks).then(() => { });
}
// ebi('op_up2k').appendChild(mknod('input','unick'));
function bstrpos(buf, ptn) {
var ofs = 0,
ch0 = ptn[0],
@@ -44,7 +46,11 @@ async function a_up2k_namefilter(good_files, nil_files, bad_files, hooks) {
md_only = [], // `${id} ${fn}` where ID was only found in metadata
mofs = 0,
mnchk = 0,
mfile = '';
mfile = '',
myid = localStorage.getItem('ytid_t0');
if (!myid)
localStorage.setItem('ytid_t0', myid = Date.now());
for (var a = 0; a < good_files.length; a++) {
var [fobj, name] = good_files[a],
@@ -87,6 +93,8 @@ async function a_up2k_namefilter(good_files, nil_files, bad_files, hooks) {
aspan = id_ok ? 128 : 512; // MiB
aspan = parseInt(Math.min(sz / 2, aspan * 1024 * 1024) / chunksz) * chunksz;
if (!aspan)
aspan = Math.min(sz, chunksz);
for (var side = 0; side < 2; side++) {
var ofs = side ? Math.max(0, sz - aspan) : 0,
@@ -161,6 +169,16 @@ async function a_up2k_namefilter(good_files, nil_files, bad_files, hooks) {
return hooks[0]([], [], [], hooks.slice(1));
}
var el = ebi('unick'), unick = el ? el.value : '';
if (unick) {
console.log(`sending uploader nickname [${unick}]`);
fetch(document.location, {
method: 'POST',
headers: { 'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8' },
body: 'msg=' + encodeURIComponent(unick)
});
}
toast.inf(5, `running query for ${yt_ids.size} youtube-IDs...`);
var xhr = new XHR();
@@ -177,20 +195,29 @@ async function a_up2k_namefilter(good_files, nil_files, bad_files, hooks) {
function process_id_list(txt) {
var wanted_ids = new Set(txt.trim().split('\n')),
name_id = {},
wanted_names = new Set(), // basenames with a wanted ID
wanted_names = new Set(), // basenames with a wanted ID -- not including relpath
wanted_names_scoped = {}, // basenames with a wanted ID -> list of dirs to search under
wanted_files = new Set(); // filedrops
for (var a = 0; a < good_files.length; a++) {
var name = good_files[a][1];
for (var b = 0; b < file_ids[a].length; b++)
if (wanted_ids.has(file_ids[a][b])) {
wanted_files.add(good_files[a]);
// let the next stage handle this to prevent dupes
//wanted_files.add(good_files[a]);
var m = /(.*)\.(mp4|webm|mkv|flv|opus|ogg|mp3|m4a|aac)$/i.exec(name);
if (!m)
continue;
wanted_names.add(m[1]);
var [rd, fn] = vsplit(m[1]);
if (fn in wanted_names_scoped)
wanted_names_scoped[fn].push(rd);
else
wanted_names_scoped[fn] = [rd];
wanted_names.add(fn);
name_id[m[1]] = file_ids[a][b];
break;
@@ -200,16 +227,35 @@ async function a_up2k_namefilter(good_files, nil_files, bad_files, hooks) {
// add all files with the same basename as each explicitly wanted file
// (infojson/chatlog/etc when ID was discovered from metadata)
for (var a = 0; a < good_files.length; a++) {
var name = good_files[a][1];
var [rd, name] = vsplit(good_files[a][1]);
for (var b = 0; b < 3; b++) {
name = name.replace(/\.[^\.]+$/, '');
if (wanted_names.has(name)) {
wanted_files.add(good_files[a]);
if (!wanted_names.has(name))
continue;
var subdir = `${name_id[name]}-${Date.now()}-${a}`;
good_files[a][1] = subdir + '/' + good_files[a][1].split(/\//g).pop();
var vid_fp = false;
for (var c of wanted_names_scoped[name])
if (rd.startsWith(c))
vid_fp = c + name;
if (!vid_fp)
continue;
var subdir = name_id[vid_fp];
subdir = `v${subdir.slice(0, 1)}/${subdir}-${myid}`;
var newpath = subdir + '/' + good_files[a][1].split(/\//g).pop();
// check if this file is a dupe
for (var c of good_files)
if (c[1] == newpath)
newpath = null;
if (!newpath)
break;
}
good_files[a][1] = newpath;
wanted_files.add(good_files[a]);
break;
}
}
@@ -237,3 +283,15 @@ async function a_up2k_namefilter(good_files, nil_files, bad_files, hooks) {
up2k_hooks.push(function () {
up2k.gotallfiles.unshift(up2k_namefilter);
});
// persist/restore nickname field if present
setInterval(function () {
var o = ebi('unick');
if (!o || document.activeElement == o)
return;
o.oninput = function () {
localStorage.setItem('unick', o.value);
};
o.value = localStorage.getItem('unick') || '';
}, 1000);

View File

@@ -7,13 +7,11 @@ import sys
import time
try:
from collections.abc import Callable
from typing import TYPE_CHECKING, Any
except:
TYPE_CHECKING = False
PY2 = sys.version_info[0] == 2
PY2 = sys.version_info < (3,)
if PY2:
sys.dont_write_bytecode = True
unicode = unicode # noqa: F821 # pylint: disable=undefined-variable,self-assigning-variable
@@ -33,57 +31,18 @@ ANYWIN = WINDOWS or sys.platform in ["msys", "cygwin"]
MACOS = platform.system() == "Darwin"
def get_unixdir() -> str:
paths: list[tuple[Callable[..., str], str]] = [
(os.environ.get, "XDG_CONFIG_HOME"),
(os.path.expanduser, "~/.config"),
(os.environ.get, "TMPDIR"),
(os.environ.get, "TEMP"),
(os.environ.get, "TMP"),
(unicode, "/tmp"),
]
for chk in [os.listdir, os.mkdir]:
for pf, pa in paths:
try:
p = pf(pa)
# print(chk.__name__, p, pa)
if not p or p.startswith("~"):
continue
p = os.path.normpath(p)
chk(p) # type: ignore
p = os.path.join(p, "copyparty")
if not os.path.isdir(p):
os.mkdir(p)
return p
except:
pass
raise Exception("could not find a writable path for config")
try:
CORES = len(os.sched_getaffinity(0))
except:
CORES = (os.cpu_count() if hasattr(os, "cpu_count") else 0) or 2
class EnvParams(object):
def __init__(self) -> None:
self.t0 = time.time()
self.mod = os.path.dirname(os.path.realpath(__file__))
if self.mod.endswith("__init__"):
self.mod = os.path.dirname(self.mod)
if sys.platform == "win32":
self.cfg = os.path.normpath(os.environ["APPDATA"] + "/copyparty")
elif sys.platform == "darwin":
self.cfg = os.path.expanduser("~/Library/Preferences/copyparty")
else:
self.cfg = get_unixdir()
self.cfg = self.cfg.replace("\\", "/")
try:
os.makedirs(self.cfg)
except:
if not os.path.isdir(self.cfg):
raise
self.mod = None
self.cfg = None
self.ox = getattr(sys, "oxidized", None)
E = EnvParams()

123
copyparty/__main__.py Normal file → Executable file
View File

@@ -20,7 +20,7 @@ import time
import traceback
from textwrap import dedent
from .__init__ import ANYWIN, PY2, VT100, WINDOWS, E, unicode
from .__init__ import ANYWIN, CORES, PY2, VT100, WINDOWS, E, EnvParams, unicode
from .__version__ import CODENAME, S_BUILD_DT, S_VERSION
from .authsrv import re_vol
from .svchub import SvcHub
@@ -38,6 +38,7 @@ from .util import (
)
try:
from collections.abc import Callable
from types import FrameType
from typing import Any, Optional
@@ -131,6 +132,79 @@ def warn(msg: str) -> None:
lprint("\033[1mwarning:\033[0;33m {}\033[0m\n".format(msg))
def init_E(E: EnvParams) -> None:
# __init__ runs 18 times when oxidized; do expensive stuff here
def get_unixdir() -> str:
paths: list[tuple[Callable[..., str], str]] = [
(os.environ.get, "XDG_CONFIG_HOME"),
(os.path.expanduser, "~/.config"),
(os.environ.get, "TMPDIR"),
(os.environ.get, "TEMP"),
(os.environ.get, "TMP"),
(unicode, "/tmp"),
]
for chk in [os.listdir, os.mkdir]:
for pf, pa in paths:
try:
p = pf(pa)
# print(chk.__name__, p, pa)
if not p or p.startswith("~"):
continue
p = os.path.normpath(p)
chk(p) # type: ignore
p = os.path.join(p, "copyparty")
if not os.path.isdir(p):
os.mkdir(p)
return p
except:
pass
raise Exception("could not find a writable path for config")
def _unpack() -> str:
import atexit
import tarfile
import tempfile
from importlib.resources import open_binary
td = tempfile.TemporaryDirectory(prefix="")
atexit.register(td.cleanup)
tdn = td.name
with open_binary("copyparty", "z.tar") as tgz:
with tarfile.open(fileobj=tgz) as tf:
tf.extractall(tdn)
return tdn
try:
E.mod = os.path.dirname(os.path.realpath(__file__))
if E.mod.endswith("__init__"):
E.mod = os.path.dirname(E.mod)
except:
if not E.ox:
raise
E.mod = _unpack()
if sys.platform == "win32":
E.cfg = os.path.normpath(os.environ["APPDATA"] + "/copyparty")
elif sys.platform == "darwin":
E.cfg = os.path.expanduser("~/Library/Preferences/copyparty")
else:
E.cfg = get_unixdir()
E.cfg = E.cfg.replace("\\", "/")
try:
os.makedirs(E.cfg)
except:
if not os.path.isdir(E.cfg):
raise
def ensure_locale() -> None:
for x in [
"en_US.UTF-8",
@@ -323,6 +397,16 @@ def disable_quickedit() -> None:
cmode(True, mode | 4)
def showlic() -> None:
p = os.path.join(E.mod, "res", "COPYING.txt")
if not os.path.exists(p):
print("no relevant license info to display")
return
with open(p, "rb") as f:
print(f.read().decode("utf-8", "replace"))
def run_argparse(argv: list[str], formatter: Any, retry: bool) -> argparse.Namespace:
ap = argparse.ArgumentParser(
formatter_class=formatter,
@@ -335,8 +419,7 @@ def run_argparse(argv: list[str], formatter: Any, retry: bool) -> argparse.Names
except:
fk_salt = "hunter2"
cores = (os.cpu_count() if hasattr(os, "cpu_count") else 0) or 4
hcores = min(cores, 3) # 4% faster than 4+ on py3.9 @ r5-4500U
hcores = min(CORES, 3) # 4% faster than 4+ on py3.9 @ r5-4500U
sects = [
[
@@ -388,6 +471,7 @@ def run_argparse(argv: list[str], formatter: Any, retry: bool) -> argparse.Names
\033[0muploads, general:
\033[36mnodupe\033[35m rejects existing files (instead of symlinking them)
\033[36mnosub\033[35m forces all uploads into the top folder of the vfs
\033[36mmagic$\033[35m enables filetype detection for nameless uploads
\033[36mgz\033[35m allows server-side gzip of uploads with ?gz (also c,xz)
\033[36mpk\033[35m forces server-side compression, optional arg: xz,9
@@ -414,6 +498,7 @@ def run_argparse(argv: list[str], formatter: Any, retry: bool) -> argparse.Names
\033[36mscan=60\033[35m scan for new files every 60sec, same as --re-maxage
\033[36mnohash=\\.iso$\033[35m skips hashing file contents if path matches *.iso
\033[36mnoidx=\\.iso$\033[35m fully ignores the contents at paths matching *.iso
\033[36mnoforget$\033[35m don't forget files when deleted from disk
\033[36mxdev\033[35m do not descend into other filesystems
\033[36mxvol\033[35m skip symlinks leaving the volume root
@@ -488,9 +573,12 @@ def run_argparse(argv: list[str], formatter: Any, retry: bool) -> argparse.Names
ap2.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate")
ap2.add_argument("--urlform", metavar="MODE", type=u, default="print,get", help="how to handle url-form POSTs; see --help-urlform")
ap2.add_argument("--wintitle", metavar="TXT", type=u, default="cpp @ $pub", help="window title, for example '$ip-10.1.2.' or '$ip-'")
ap2.add_argument("--license", action="store_true", help="show licenses and exit")
ap2.add_argument("--version", action="store_true", help="show versions and exit")
ap2 = ap.add_argument_group('upload options')
ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads, hiding them from clients unless -ed")
ap2.add_argument("--plain-ip", action="store_true", help="when avoiding filename collisions by appending the uploader's ip to the filename: append the plaintext ip instead of salting and hashing the ip")
ap2.add_argument("--unpost", metavar="SEC", type=int, default=3600*12, help="grace period where uploads can be deleted by the uploader, even without delete permissions; 0=disabled")
ap2.add_argument("--reg-cap", metavar="N", type=int, default=38400, help="max number of uploads to keep in memory when running without -e2d; roughly 1 MiB RAM per 600")
ap2.add_argument("--no-fpool", action="store_true", help="disable file-handle pooling -- instead, repeatedly close and reopen files during upload")
@@ -498,10 +586,12 @@ def run_argparse(argv: list[str], formatter: Any, retry: bool) -> argparse.Names
ap2.add_argument("--hardlink", action="store_true", help="prefer hardlinks instead of symlinks when possible (within same filesystem)")
ap2.add_argument("--never-symlink", action="store_true", help="do not fallback to symlinks when a hardlink cannot be made")
ap2.add_argument("--no-dedup", action="store_true", help="disable symlink/hardlink creation; copy file contents instead")
ap2.add_argument("--magic", action="store_true", help="enable filetype detection on nameless uploads")
ap2.add_argument("--df", metavar="GiB", type=float, default=0, help="ensure GiB free disk space by rejecting upload requests")
ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="windows-only: minimum size of incoming uploads through up2k before they are made into sparse files")
ap2.add_argument("--turbo", metavar="LVL", type=int, default=0, help="configure turbo-mode in up2k client; 0 = off and warn if enabled, 1 = off, 2 = on, 3 = on and disable datecheck")
ap2.add_argument("--u2sort", metavar="TXT", type=u, default="s", help="upload order; s=smallest-first, n=alphabetical, fs=force-s, fn=force-n -- alphabetical is a bit slower on fiber/LAN but makes it easier to eyeball if everything went fine")
ap2.add_argument("--write-uplog", action="store_true", help="write POST reports to textfiles in working-directory")
ap2 = ap.add_argument_group('network options')
ap2.add_argument("-i", metavar="IP", type=u, default="0.0.0.0", help="ip to bind (comma-sep.)")
@@ -538,7 +628,7 @@ def run_argparse(argv: list[str], formatter: Any, retry: bool) -> argparse.Names
ap2 = ap.add_argument_group('safety options')
ap2.add_argument("-s", action="count", default=0, help="increase safety: Disable thumbnails / potentially dangerous software (ffmpeg/pillow/vips), hide partial uploads, avoid crawlers.\n └─Alias of\033[32m --dotpart --no-thumb --no-mtag-ff --no-robots --force-js")
ap2.add_argument("-ss", action="store_true", help="further increase safety: Prevent js-injection, accidental move/delete, broken symlinks, 404 on 403.\n └─Alias of\033[32m -s --no-dot-mv --no-dot-ren --unpost=0 --no-del --no-mv --hardlink --vague-403 -nih")
ap2.add_argument("-ss", action="store_true", help="further increase safety: Prevent js-injection, accidental move/delete, broken symlinks, 404 on 403, ban on excessive 404s.\n └─Alias of\033[32m -s --no-dot-mv --no-dot-ren --unpost=0 --no-del --no-mv --hardlink --vague-403 --ban-404=50,60,1440 -nih")
ap2.add_argument("-sss", action="store_true", help="further increase safety: Enable logging to disk, scan for dangerous symlinks.\n └─Alias of\033[32m -ss -lo=cpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz --ls=**,*,ln,p,r")
ap2.add_argument("--ls", metavar="U[,V[,F]]", type=u, help="do a sanity/safety check of all volumes on startup; arguments USER,VOL,FLAGS; example [**,*,ln,p,r]")
ap2.add_argument("--salt", type=u, default="hunter2", help="up2k file-hash salt; used to generate unpredictable internal identifiers for uploads -- doesn't really matter")
@@ -551,6 +641,8 @@ def run_argparse(argv: list[str], formatter: Any, retry: bool) -> argparse.Names
ap2.add_argument("--force-js", action="store_true", help="don't send folder listings as HTML, force clients to use the embedded json instead -- slight protection against misbehaving search engines which ignore --no-robots")
ap2.add_argument("--no-robots", action="store_true", help="adds http and html headers asking search engines to not index anything")
ap2.add_argument("--logout", metavar="H", type=float, default="8086", help="logout clients after H hours of inactivity (0.0028=10sec, 0.1=6min, 24=day, 168=week, 720=month, 8760=year)")
ap2.add_argument("--ban-pw", metavar="N,W,B", type=u, default="9,60,1440", help="more than N wrong passwords in W minutes = ban for B minutes (disable with \"no\")")
ap2.add_argument("--ban-404", metavar="N,W,B", type=u, default="no", help="hitting more than N 404's in W minutes = ban for B minutes (disabled by default since turbo-up2k counts as 404s)")
ap2 = ap.add_argument_group('shutdown options')
ap2.add_argument("--ign-ebind", action="store_true", help="continue running even if it's impossible to listen on some of the requested endpoints")
@@ -576,7 +668,7 @@ def run_argparse(argv: list[str], formatter: Any, retry: bool) -> argparse.Names
ap2.add_argument("--no-athumb", action="store_true", help="disable audio thumbnails (spectrograms)")
ap2.add_argument("--no-vthumb", action="store_true", help="disable video thumbnails")
ap2.add_argument("--th-size", metavar="WxH", default="320x256", help="thumbnail res")
ap2.add_argument("--th-mt", metavar="CORES", type=int, default=cores, help="num cpu cores to use for generating thumbnails")
ap2.add_argument("--th-mt", metavar="CORES", type=int, default=CORES, help="num cpu cores to use for generating thumbnails")
ap2.add_argument("--th-convt", metavar="SEC", type=int, default=60, help="conversion timeout in seconds")
ap2.add_argument("--th-no-crop", action="store_true", help="dynamic height; show full image")
ap2.add_argument("--th-dec", metavar="LIBS", default="vips,pil,ff", help="image decoders, in order of preference")
@@ -612,12 +704,13 @@ def run_argparse(argv: list[str], formatter: Any, retry: bool) -> argparse.Names
ap2.add_argument("--no-hash", metavar="PTN", type=u, help="regex: disable hashing of matching paths during e2ds folder scans")
ap2.add_argument("--no-idx", metavar="PTN", type=u, help="regex: disable indexing of matching paths during e2ds folder scans")
ap2.add_argument("--no-dhash", action="store_true", help="disable rescan acceleration; do full database integrity check -- makes the db ~5%% smaller and bootup/rescans 3~10x slower")
ap2.add_argument("--no-forget", action="store_true", help="never forget indexed files, even when deleted from disk -- makes it impossible to ever upload the same file twice")
ap2.add_argument("--xdev", action="store_true", help="do not descend into other filesystems (symlink or bind-mount to another HDD, ...)")
ap2.add_argument("--xvol", action="store_true", help="skip symlinks leaving the volume root")
ap2.add_argument("--hash-mt", metavar="CORES", type=int, default=hcores, help="num cpu cores to use for file hashing; set 0 or 1 for single-core hashing")
ap2.add_argument("--re-maxage", metavar="SEC", type=int, default=0, help="disk rescan volume interval, 0=off, can be set per-volume with the 'scan' volflag")
ap2.add_argument("--db-act", metavar="SEC", type=float, default=10, help="defer any scheduled volume reindexing until SEC seconds after last db write (uploads, renames, ...)")
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline -- terminate searches running for more than SEC seconds")
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=45, help="search deadline -- terminate searches running for more than SEC seconds")
ap2.add_argument("--srch-hits", metavar="N", type=int, default=7999, help="max search results to allow clients to fetch; 125 results will be shown initially")
ap2 = ap.add_argument_group('metadata db options')
@@ -626,8 +719,10 @@ def run_argparse(argv: list[str], formatter: Any, retry: bool) -> argparse.Names
ap2.add_argument("-e2tsr", action="store_true", help="delete all metadata from DB and do a full rescan; sets -e2ts")
ap2.add_argument("--no-mutagen", action="store_true", help="use FFprobe for tags instead; will catch more tags")
ap2.add_argument("--no-mtag-ff", action="store_true", help="never use FFprobe as tag reader; is probably safer")
ap2.add_argument("--mtag-mt", metavar="CORES", type=int, default=cores, help="num cpu cores to use for tag scanning")
ap2.add_argument("--mtag-to", metavar="SEC", type=int, default=60, help="timeout for ffprobe tag-scan")
ap2.add_argument("--mtag-mt", metavar="CORES", type=int, default=CORES, help="num cpu cores to use for tag scanning")
ap2.add_argument("--mtag-v", action="store_true", help="verbose tag scanning; print errors from mtp subprocesses and such")
ap2.add_argument("--mtag-vv", action="store_true", help="debug mtp settings")
ap2.add_argument("-mtm", metavar="M=t,t,t", type=u, action="append", help="add/replace metadata mapping")
ap2.add_argument("-mte", metavar="M,M,M", type=u, help="tags to index/display (comma-sep.)",
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,vc,ac,fmt,res,.fps,ahash,vhash")
@@ -652,9 +747,12 @@ def run_argparse(argv: list[str], formatter: Any, retry: bool) -> argparse.Names
ap2.add_argument("--no-scandir", action="store_true", help="disable scandir; instead using listdir + stat on each file")
ap2.add_argument("--no-fastboot", action="store_true", help="wait for up2k indexing before starting the httpd")
ap2.add_argument("--no-htp", action="store_true", help="disable httpserver threadpool, create threads as-needed instead")
ap2.add_argument("--stackmon", metavar="P,S", type=u, help="write stacktrace to Path every S second")
ap2.add_argument("--stackmon", metavar="P,S", type=u, help="write stacktrace to Path every S second, for example --stackmon=./st/%%Y-%%m/%%d/%%H%%M.xz,60")
ap2.add_argument("--log-thrs", metavar="SEC", type=float, help="list active threads every SEC")
ap2.add_argument("--log-fk", metavar="REGEX", type=u, default="", help="log filekey params for files where path matches REGEX; '.' (a single dot) = all files")
ap2.add_argument("--bak-flips", action="store_true", help="[up2k] if a client uploads a bitflipped/corrupted chunk, store a copy according to --bf-nc and --bf-dir")
ap2.add_argument("--bf-nc", metavar="NUM", type=int, default=200, help="bak-flips: stop if there's more than NUM files at --kf-dir already; default: 6.3 GiB max (200*32M)")
ap2.add_argument("--bf-dir", metavar="PATH", type=u, default="bf", help="bak-flips: store corrupted chunks at PATH; default: folder named 'bf' wherever copyparty was started")
# fmt: on
ap2 = ap.add_argument_group("help sections")
@@ -677,6 +775,7 @@ def main(argv: Optional[list[str]] = None) -> None:
if WINDOWS:
os.system("rem") # enables colors
init_E(E)
if argv is None:
argv = sys.argv
@@ -692,6 +791,13 @@ def main(argv: Optional[list[str]] = None) -> None:
)
lprint(f)
if "--version" in argv:
sys.exit(0)
if "--license" in argv:
showlic()
sys.exit(0)
ensure_locale()
if HAVE_SSL:
ensure_cert()
@@ -730,6 +836,7 @@ def main(argv: Optional[list[str]] = None) -> None:
lprint("\n[ {} ]:\n{}\n".format(fmtr, min_ex()))
assert al
al.E = E # __init__ is not shared when oxidized
if WINDOWS and not al.keep_qem:
try:

View File

@@ -1,8 +1,8 @@
# coding: utf-8
VERSION = (1, 3, 16)
CODENAME = "god dag"
BUILD_DT = (2022, 8, 18)
VERSION = (1, 4, 3)
CODENAME = "mostly reliable"
BUILD_DT = (2022, 9, 26)
S_VERSION = ".".join(map(str, VERSION))
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)

View File

@@ -1071,6 +1071,10 @@ class AuthSrv(object):
if getattr(self.args, k):
vol.flags[k] = True
for ga, vf in [["no_forget", "noforget"], ["magic", "magic"]]:
if getattr(self.args, ga):
vol.flags[vf] = True
for k1, k2 in IMPLICATIONS:
if k1 in vol.flags:
vol.flags[k2] = True
@@ -1117,6 +1121,16 @@ class AuthSrv(object):
vol.flags = {k: v for k, v in vol.flags.items() if not k.startswith(rm)}
ints = ["lifetime"]
for k in list(vol.flags):
if k in ints:
vol.flags[k] = int(vol.flags[k])
if "lifetime" in vol.flags and "e2d" not in vol.flags:
t = 'removing lifetime config from volume "/{}" because e2d is disabled'
self.log(t.format(vol.vpath), 1)
del vol.flags["lifetime"]
# verify tags mentioned by -mt[mp] are used by -mte
local_mtp = {}
local_only_mtp = {}

View File

@@ -6,7 +6,7 @@ import time
import queue
from .__init__ import TYPE_CHECKING
from .__init__ import CORES, TYPE_CHECKING
from .broker_mpw import MpWorker
from .broker_util import try_exec
from .util import mp
@@ -44,7 +44,7 @@ class BrokerMp(object):
self.procs = []
self.mutex = threading.Lock()
self.num_workers = self.args.j or mp.cpu_count()
self.num_workers = self.args.j or CORES
self.log("broker", "booting {} subprocesses".format(self.num_workers))
for n in range(1, self.num_workers + 1):
q_pend: queue.Queue[tuple[int, str, list[Any]]] = mp.Queue(1)

View File

@@ -2,6 +2,7 @@
from __future__ import print_function, unicode_literals
import argparse
import os
import signal
import sys
import threading
@@ -11,7 +12,7 @@ import queue
from .authsrv import AuthSrv
from .broker_util import BrokerCli, ExceptionalQueue
from .httpsrv import HttpSrv
from .util import FAKE_MP
from .util import FAKE_MP, HMaccas
try:
from types import FrameType
@@ -54,6 +55,7 @@ class MpWorker(BrokerCli):
self.asrv = AuthSrv(args, None, False)
# instantiate all services here (TODO: inheritance?)
self.iphash = HMaccas(os.path.join(self.args.E.cfg, "iphash"), 8)
self.httpsrv = HttpSrv(self, n)
# on winxp and some other platforms,

View File

@@ -1,11 +1,13 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import os
import threading
from .__init__ import TYPE_CHECKING
from .broker_util import BrokerCli, ExceptionalQueue, try_exec
from .httpsrv import HttpSrv
from .util import HMaccas
if TYPE_CHECKING:
from .svchub import SvcHub
@@ -31,6 +33,7 @@ class BrokerThr(BrokerCli):
self.num_workers = 1
# instantiate all services here (TODO: inheritance?)
self.iphash = HMaccas(os.path.join(self.args.E.cfg, "iphash"), 8)
self.httpsrv = HttpSrv(self, None)
self.reload = self.noop

View File

@@ -8,7 +8,7 @@ from queue import Queue
from .__init__ import TYPE_CHECKING
from .authsrv import AuthSrv
from .util import Pebkac
from .util import HMaccas, Pebkac
try:
from typing import Any, Optional, Union
@@ -46,6 +46,7 @@ class BrokerCli(object):
self.args: argparse.Namespace = None
self.asrv: AuthSrv = None
self.httpsrv: "HttpSrv" = None
self.iphash: HMaccas = None
def ask(self, dest: str, *args: Any) -> ExceptionalQueue:
return ExceptionalQueue(1)

View File

@@ -56,7 +56,9 @@ class FtpAuth(DummyAuthorizer):
handler.username = uname
if password and not uname:
if (password and not uname) or not (
asrv.vfs.aread.get(uname) or asrv.vfs.awrite.get(uname)
):
raise AuthenticationFailed("Authentication failed.")
def get_home_dir(self, username: str) -> str:
@@ -356,7 +358,7 @@ class Ftpd(object):
print(t.format(sys.executable))
sys.exit(1)
h1.certfile = os.path.join(E.cfg, "cert.pem")
h1.certfile = os.path.join(self.args.E.cfg, "cert.pem")
h1.tls_control_required = True
h1.tls_data_required = True

View File

@@ -24,7 +24,12 @@ try:
except:
pass
from .__init__ import ANYWIN, PY2, TYPE_CHECKING, E, unicode
try:
from ipaddress import IPv6Address
except:
pass
from .__init__ import ANYWIN, PY2, TYPE_CHECKING, EnvParams, unicode
from .authsrv import VFS # typechk
from .bos import bos
from .star import StreamTar
@@ -69,6 +74,7 @@ from .util import (
unescape_cookie,
unquote,
unquotep,
vjoin,
vol_san,
vsplit,
yieldfile,
@@ -76,6 +82,7 @@ from .util import (
try:
from typing import Any, Generator, Match, Optional, Pattern, Type, Union
import typing
except:
pass
@@ -103,12 +110,14 @@ class HttpCli(object):
self.ip = conn.addr[0]
self.addr: tuple[str, int] = conn.addr
self.args = conn.args # mypy404
self.E: EnvParams = self.args.E
self.asrv = conn.asrv # mypy404
self.ico = conn.ico # mypy404
self.thumbcli = conn.thumbcli # mypy404
self.u2fh = conn.u2fh # mypy404
self.log_func = conn.log_func # mypy404
self.log_src = conn.log_src # mypy404
self.bans = conn.hsrv.bans
self.gen_fk = self._gen_fk if self.args.log_fk else gen_filekey
self.tls: bool = hasattr(self.s, "cipher")
@@ -122,7 +131,6 @@ class HttpCli(object):
self.ua = " "
self.is_rclone = False
self.is_ancient = False
self.dip = " "
self.ouparam: dict[str, str] = {}
self.uparam: dict[str, str] = {}
self.cookies: dict[str, str] = {}
@@ -263,7 +271,20 @@ class HttpCli(object):
self.log_src = self.conn.set_rproxy(self.ip)
self.dip = self.ip.replace(":", ".")
if self.bans:
ip = self.ip
if ":" in ip and not PY2:
ip = IPv6Address(ip).exploded[:-20]
if ip in self.bans:
ban = self.bans[ip] - time.time()
if ban < 0:
self.log("client unbanned", 3)
del self.bans[ip]
else:
self.log("banned for {:.0f} sec".format(ban), 6)
self.reply(b"thank you for playing", 403)
return False
if self.args.ihead:
keys = self.args.ihead
@@ -402,13 +423,19 @@ class HttpCli(object):
except Pebkac:
return False
def dip(self) -> str:
if self.args.plain_ip:
return self.ip.replace(":", ".")
else:
return self.conn.iphash.s(self.ip)
def permit_caching(self) -> None:
cache = self.uparam.get("cache")
if cache is None:
self.out_headers.update(NO_CACHE)
return
n = "604800" if cache == "i" else cache or "69"
n = "604869" if cache == "i" else cache or "69"
self.out_headers["Cache-Control"] = "max-age=" + n
def k304(self) -> bool:
@@ -461,7 +488,13 @@ class HttpCli(object):
headers: Optional[dict[str, str]] = None,
volsan: bool = False,
) -> bytes:
# TODO something to reply with user-supplied values safely
if status == 404:
g = self.conn.hsrv.g404
if g.lim:
bonk, ip = g.bonk(self.ip, self.vpath)
if bonk:
self.log("client banned: 404s", 1)
self.conn.hsrv.bans[ip] = bonk
if volsan:
vols = list(self.asrv.vfs.all_vols.values())
@@ -553,7 +586,7 @@ class HttpCli(object):
if self.vpath.startswith(".cpr/ico/"):
return self.tx_ico(self.vpath.split("/")[-1], exact=True)
static_path = os.path.join(E.mod, "web/", self.vpath[5:])
static_path = os.path.join(self.E.mod, "web/", self.vpath[5:])
return self.tx_file(static_path)
if "cf_challenge" in self.uparam:
@@ -678,7 +711,13 @@ class HttpCli(object):
self.log("urlform: {} bytes, {}".format(post_sz, path))
elif "print" in opt:
reader, _ = self.get_body_reader()
for buf in reader:
buf = b""
for rbuf in reader:
buf += rbuf
if not rbuf or len(buf) >= 32768:
break
if buf:
orig = buf.decode("utf-8", "replace")
t = "urlform_raw {} @ {}\n {}\n"
self.log(t.format(len(orig), self.vpath, orig))
@@ -715,6 +754,7 @@ class HttpCli(object):
# post_sz, sha_hex, sha_b64, remains, path, url
reader, remains = self.get_body_reader()
vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True)
rnd, want_url, lifetime = self.upload_flags(vfs)
lim = vfs.get_dbv(rem)[0].lim
fdir = vfs.canonical(rem)
if lim:
@@ -777,16 +817,22 @@ class HttpCli(object):
else:
self.log("fallthrough? thats a bug", 1)
suffix = "-{:.6f}-{}".format(time.time(), self.dip)
suffix = "-{:.6f}-{}".format(time.time(), self.dip())
nameless = not fn
if nameless:
suffix += ".bin"
fn = "put" + suffix
params = {"suffix": suffix, "fdir": fdir}
if self.args.nw:
params = {}
fn = os.devnull
params.update(open_ka)
assert fn
if not fn:
fn = "put" + suffix
if rnd and not self.args.nw:
fn = self.rand_name(fdir, fn, rnd)
with ren_open(fn, *open_a, **params) as zfw:
f, fn = zfw["orz"]
@@ -805,6 +851,28 @@ class HttpCli(object):
if self.args.nw:
return post_sz, sha_hex, sha_b64, remains, path, ""
if nameless and "magic" in vfs.flags:
try:
ext = self.conn.hsrv.magician.ext(path)
except Exception as ex:
self.log("filetype detection failed for [{}]: {}".format(path, ex), 6)
ext = None
if ext:
if rnd:
fn2 = self.rand_name(fdir, "a." + ext, rnd)
else:
fn2 = fn.rsplit(".", 1)[0] + "." + ext
params["suffix"] = suffix[:-4]
with ren_open(fn, *open_a, **params) as zfw:
f, fn = zfw["orz"]
path2 = os.path.join(fdir, fn2)
atomic_move(path, path2)
fn = fn2
path = path2
vfs, rem = vfs.get_dbv(rem)
self.conn.hsrv.broker.say(
"up2k.hash_file",
@@ -813,7 +881,7 @@ class HttpCli(object):
rem,
fn,
self.ip,
time.time(),
time.time() - lifetime,
)
vsuf = ""
@@ -841,10 +909,71 @@ class HttpCli(object):
spd = self._spd(post_sz)
t = "{} wrote {}/{} bytes to {} # {}"
self.log(t.format(spd, post_sz, remains, path, sha_b64[:28])) # 21
t = "{}\n{}\n{}\n{}\n".format(post_sz, sha_b64, sha_hex[:56], url)
ac = self.uparam.get(
"want", self.headers.get("accept", "").lower().split(";")[-1]
)
if ac == "url":
t = url
else:
t = "{}\n{}\n{}\n{}\n".format(post_sz, sha_b64, sha_hex[:56], url)
self.reply(t.encode("utf-8"))
return True
def bakflip(self, f: typing.BinaryIO, ofs: int, sz: int, sha: str) -> None:
if not self.args.bak_flips or self.args.nw:
return
sdir = self.args.bf_dir
fp = os.path.join(sdir, sha)
if bos.path.exists(fp):
return self.log("no bakflip; have it", 6)
if not bos.path.isdir(sdir):
bos.makedirs(sdir)
if len(bos.listdir(sdir)) >= self.args.bf_nc:
return self.log("no bakflip; too many", 3)
nrem = sz
f.seek(ofs)
with open(fp, "wb") as fo:
while nrem:
buf = f.read(min(nrem, 512 * 1024))
if not buf:
break
nrem -= len(buf)
fo.write(buf)
if nrem:
self.log("bakflip truncated; {} remains".format(nrem), 1)
atomic_move(fp, fp + ".trunc")
else:
self.log("bakflip ok", 2)
def rand_name(self, fdir: str, fn: str, rnd: int) -> str:
ok = False
try:
ext = "." + fn.rsplit(".", 1)[1]
except:
ext = ""
for extra in range(16):
for _ in range(16):
if ok:
break
nc = rnd + extra
nb = int((6 + 6 * nc) / 8)
zb = os.urandom(nb)
zb = base64.urlsafe_b64encode(zb)
fn = zb[:nc].decode("utf-8") + ext
ok = not bos.path.exists(os.path.join(fdir, fn))
return fn
def _spd(self, nbytes: int, add: bool = True) -> str:
if add:
self.conn.nbyte += nbytes
@@ -1081,6 +1210,11 @@ class HttpCli(object):
post_sz, _, sha_b64 = hashcopy(reader, f, self.args.s_wr_slp)
if sha_b64 != chash:
try:
self.bakflip(f, cstart[0], post_sz, sha_b64)
except:
self.log("bakflip failed: " + min_ex())
t = "your chunk got corrupted somehow (received {} bytes); expected vs received hash:\n{}\n{}"
raise Pebkac(400, t.format(post_sz, chash, sha_b64))
@@ -1168,6 +1302,14 @@ class HttpCli(object):
msg = "login ok"
dur = int(60 * 60 * self.args.logout)
else:
self.log("invalid password: {}".format(pwd), 3)
g = self.conn.hsrv.gpwd
if g.lim:
bonk, ip = g.bonk(self.ip, pwd)
if bonk:
self.log("client banned: invalid passwords", 1)
self.conn.hsrv.bans[ip] = bonk
msg = "naw dude"
pwd = "x" # nosec
dur = None
@@ -1242,6 +1384,22 @@ class HttpCli(object):
self.redirect(vpath, "?edit")
return True
def upload_flags(self, vfs: VFS) -> tuple[int, bool, int]:
srnd = self.uparam.get("rand", self.headers.get("rand", ""))
rnd = int(srnd) if srnd and not self.args.nw else 0
ac = self.uparam.get(
"want", self.headers.get("accept", "").lower().split(";")[-1]
)
want_url = ac == "url"
zs = self.uparam.get("life", self.headers.get("life", ""))
if zs:
vlife = vfs.flags.get("lifetime") or 0
lifetime = max(0, int(vlife - int(zs)))
else:
lifetime = 0
return rnd, want_url, lifetime
def handle_plain_upload(self) -> bool:
assert self.parser
nullwrite = self.args.nw
@@ -1257,9 +1415,12 @@ class HttpCli(object):
if not nullwrite:
bos.makedirs(fdir_base)
rnd, want_url, lifetime = self.upload_flags(vfs)
files: list[tuple[int, str, str, str, str, str]] = []
# sz, sha_hex, sha_b64, p_file, fname, abspath
errmsg = ""
dip = self.dip()
t0 = time.time()
try:
assert self.parser.gen
@@ -1273,10 +1434,13 @@ class HttpCli(object):
p_file or "", "", [".prologue.html", ".epilogue.html"]
)
if p_file and not nullwrite:
if rnd:
fname = self.rand_name(fdir, fname, rnd)
if not bos.path.isdir(fdir):
raise Pebkac(404, "that folder does not exist")
suffix = "-{:.6f}-{}".format(time.time(), self.dip)
suffix = "-{:.6f}-{}".format(time.time(), dip)
open_args = {"fdir": fdir, "suffix": suffix}
# reserve destination filename
@@ -1343,7 +1507,7 @@ class HttpCli(object):
vrem,
fname,
self.ip,
time.time(),
time.time() - lifetime,
)
self.conn.nbyte += sz
@@ -1419,21 +1583,31 @@ class HttpCli(object):
vspd = self._spd(sz_total, False)
self.log("{} {}".format(vspd, msg))
if not nullwrite:
log_fn = "up.{:.6f}.txt".format(t0)
with open(log_fn, "wb") as f:
ft = "{}:{}".format(self.ip, self.addr[1])
ft = "{}\n{}\n{}\n".format(ft, msg.rstrip(), errmsg)
f.write(ft.encode("utf-8"))
suf = ""
if not nullwrite and self.args.write_uplog:
try:
log_fn = "up.{:.6f}.txt".format(t0)
with open(log_fn, "wb") as f:
ft = "{}:{}".format(self.ip, self.addr[1])
ft = "{}\n{}\n{}\n".format(ft, msg.rstrip(), errmsg)
f.write(ft.encode("utf-8"))
except Exception as ex:
suf = "\nfailed to write the upload report: {}".format(ex)
sc = 400 if errmsg else 200
if "j" in self.uparam:
if want_url:
msg = "\n".join([x["url"] for x in jmsg["files"]])
if errmsg:
msg += "\n" + errmsg
self.reply(msg.encode("utf-8", "replace"), status=sc)
elif "j" in self.uparam:
jtxt = json.dumps(jmsg, indent=2, sort_keys=True).encode("utf-8", "replace")
self.reply(jtxt, mime="application/json", status=sc)
else:
self.redirect(
self.vpath,
msg=msg,
msg=msg + suf,
flavor="return to",
click=False,
status=sc,
@@ -1713,7 +1887,7 @@ class HttpCli(object):
# send reply
if is_compressed:
self.out_headers["Cache-Control"] = "max-age=573"
self.out_headers["Cache-Control"] = "max-age=604869"
else:
self.permit_caching()
@@ -1836,9 +2010,11 @@ class HttpCli(object):
if len(ext) > 11:
ext = "" + ext[-9:]
mime, ico = self.ico.get(ext, not exact)
# chrome cannot handle more than ~2000 unique SVGs
chrome = " rv:" not in self.ua
mime, ico = self.ico.get(ext, not exact, chrome)
dt = datetime.utcfromtimestamp(E.t0)
dt = datetime.utcfromtimestamp(self.E.t0)
lm = dt.strftime("%a, %d %b %Y %H:%M:%S GMT")
self.reply(ico, mime=mime, headers={"Last-Modified": lm})
return True
@@ -1851,7 +2027,7 @@ class HttpCli(object):
return self.tx_404(True)
tpl = "mde" if "edit2" in self.uparam else "md"
html_path = os.path.join(E.mod, "web", "{}.html".format(tpl))
html_path = os.path.join(self.E.mod, "web", "{}.html".format(tpl))
template = self.j2j(tpl)
st = bos.stat(fs_path)
@@ -1866,7 +2042,7 @@ class HttpCli(object):
for c, v in [(b"&", 4), (b"<", 3), (b">", 3)]:
sz_md += (len(buf) - len(buf.replace(c, b""))) * v
file_ts = max(ts_md, ts_html, E.t0)
file_ts = max(ts_md, ts_html, self.E.t0)
file_lastmod, do_send = self._chk_lastmod(file_ts)
self.out_headers["Last-Modified"] = file_lastmod
self.out_headers.update(NO_CACHE)
@@ -2318,8 +2494,9 @@ class HttpCli(object):
if not is_dir and (self.can_read or self.can_get):
if not self.can_read and "fk" in vn.flags:
vabs = vjoin(vn.realpath, rem)
correct = self.gen_fk(
self.args.fk_salt, abspath, st.st_size, 0 if ANYWIN else st.st_ino
self.args.fk_salt, vabs, st.st_size, 0 if ANYWIN else st.st_ino
)[: vn.flags["fk"]]
got = self.uparam.get("k")
if got != correct:
@@ -2398,6 +2575,7 @@ class HttpCli(object):
"srvinf": srv_infot,
"acct": self.uname,
"idx": ("e2d" in vn.flags),
"lifetime": vn.flags.get("lifetime") or 0,
"perms": perms,
"logues": logues,
"readme": readme,
@@ -2409,6 +2587,7 @@ class HttpCli(object):
"ls0": None,
"acct": self.uname,
"perms": json.dumps(perms),
"lifetime": ls_ret["lifetime"],
"taglist": [],
"def_hcols": [],
"have_emp": self.args.emp,
@@ -2418,7 +2597,7 @@ class HttpCli(object):
"have_mv": (not self.args.no_mv),
"have_del": (not self.args.no_del),
"have_zip": (not self.args.no_zip),
"have_unpost": (self.args.unpost > 0),
"have_unpost": int(self.args.unpost),
"have_b_u": (self.can_write and self.uparam.get("b") == "u"),
"url_suf": url_suf,
"logues": logues,
@@ -2570,43 +2749,28 @@ class HttpCli(object):
rd = fe["rd"]
del fe["rd"]
if not icur:
break
continue
if vn != dbv:
_, rd = vn.get_dbv(rd)
q = "select w from up where rd = ? and fn = ?"
r = None
q = "select mt.k, mt.v from up inner join mt on mt.w = substr(up.w,1,16) where up.rd = ? and up.fn = ? and +mt.k != 'x'"
try:
r = icur.execute(q, (rd, fn)).fetchone()
r = icur.execute(q, (rd, fn))
except Exception as ex:
if "database is locked" in str(ex):
break
try:
args = s3enc(idx.mem_cur, rd, fn)
r = icur.execute(q, args).fetchone()
r = icur.execute(q, args)
except:
t = "tag list error, {}/{}\n{}"
t = "tag read error, {}/{}\n{}"
self.log(t.format(rd, fn, min_ex()))
break
tags: dict[str, Any] = {}
fe["tags"] = tags
if not r:
continue
w = r[0][:16]
q = "select k, v from mt where w = ? and +k != 'x'"
try:
for k, v in icur.execute(q, (w,)):
tagset.add(k)
tags[k] = v
except:
t = "tag read error, {}/{} [{}]:\n{}"
self.log(t.format(rd, fn, w, min_ex()))
break
fe["tags"] = {k: v for k, v in r}
_ = [tagset.add(k) for k in fe["tags"]]
if icur:
taglist = [k for k in vn.flags.get("mte", "").split(",") if k in tagset]

View File

@@ -15,7 +15,7 @@ except:
HAVE_SSL = False
from . import util as Util
from .__init__ import TYPE_CHECKING, E
from .__init__ import TYPE_CHECKING, EnvParams
from .authsrv import AuthSrv # typechk
from .httpcli import HttpCli
from .ico import Ico
@@ -23,6 +23,7 @@ from .mtag import HAVE_FFMPEG
from .th_cli import ThumbCli
from .th_srv import HAVE_PIL, HAVE_VIPS
from .u2idx import U2idx
from .util import HMaccas, shut_socket
try:
from typing import Optional, Pattern, Union
@@ -49,9 +50,11 @@ class HttpConn(object):
self.mutex: threading.Lock = hsrv.mutex # mypy404
self.args: argparse.Namespace = hsrv.args # mypy404
self.E: EnvParams = self.args.E
self.asrv: AuthSrv = hsrv.asrv # mypy404
self.cert_path = hsrv.cert_path
self.u2fh: Util.FHC = hsrv.u2fh # mypy404
self.iphash: HMaccas = hsrv.broker.iphash
enth = (HAVE_PIL or HAVE_VIPS or HAVE_FFMPEG) and not self.args.no_thumb
self.thumbcli: Optional[ThumbCli] = ThumbCli(hsrv) if enth else None # mypy404
@@ -72,8 +75,7 @@ class HttpConn(object):
def shutdown(self) -> None:
self.stopping = True
try:
self.s.shutdown(socket.SHUT_RDWR)
self.s.close()
shut_socket(self.log, self.s, 1)
except:
pass
@@ -91,7 +93,7 @@ class HttpConn(object):
return self.log_src
def respath(self, res_name: str) -> str:
return os.path.join(E.mod, "web", res_name)
return os.path.join(self.E.mod, "web", res_name)
def log(self, msg: str, c: Union[int, str] = 0) -> None:
self.log_func(self.log_src, msg, c)
@@ -189,11 +191,7 @@ class HttpConn(object):
except Exception as ex:
em = str(ex)
if "ALERT_BAD_CERTIFICATE" in em:
# firefox-linux if there is no exception yet
self.log("client rejected our certificate (nice)")
elif "ALERT_CERTIFICATE_UNKNOWN" in em:
if "ALERT_CERTIFICATE_UNKNOWN" in em:
# android-chrome keeps doing this
pass

View File

@@ -28,10 +28,19 @@ except ImportError:
)
sys.exit(1)
from .__init__ import MACOS, TYPE_CHECKING, E
from .__init__ import MACOS, TYPE_CHECKING, EnvParams
from .bos import bos
from .httpconn import HttpConn
from .util import FHC, min_ex, spack, start_log_thrs, start_stackmon
from .util import (
FHC,
Garda,
Magician,
min_ex,
shut_socket,
spack,
start_log_thrs,
start_stackmon,
)
if TYPE_CHECKING:
from .broker_util import BrokerCli
@@ -52,10 +61,18 @@ class HttpSrv(object):
self.broker = broker
self.nid = nid
self.args = broker.args
self.E: EnvParams = self.args.E
self.log = broker.log
self.asrv = broker.asrv
# redefine in case of multiprocessing
socket.setdefaulttimeout(120)
nsuf = "-n{}-i{:x}".format(nid, os.getpid()) if nid else ""
self.magician = Magician()
self.bans: dict[str, int] = {}
self.gpwd = Garda(self.args.ban_pw)
self.g404 = Garda(self.args.ban_404)
self.name = "hsrv" + nsuf
self.mutex = threading.Lock()
@@ -78,14 +95,15 @@ class HttpSrv(object):
self.cb_v = ""
env = jinja2.Environment()
env.loader = jinja2.FileSystemLoader(os.path.join(E.mod, "web"))
env.loader = jinja2.FileSystemLoader(os.path.join(self.E.mod, "web"))
self.j2 = {
x: env.get_template(x + ".html")
for x in ["splash", "browser", "browser2", "msg", "md", "mde", "cf"]
}
self.prism = os.path.exists(os.path.join(E.mod, "web", "deps", "prism.js.gz"))
zs = os.path.join(self.E.mod, "web", "deps", "prism.js.gz")
self.prism = os.path.exists(zs)
cert_path = os.path.join(E.cfg, "cert.pem")
cert_path = os.path.join(self.E.cfg, "cert.pem")
if bos.path.exists(cert_path):
self.cert_path = cert_path
else:
@@ -150,6 +168,12 @@ class HttpSrv(object):
return
def listen(self, sck: socket.socket, nlisteners: int) -> None:
if self.args.j != 1:
# lost in the pickle; redefine
sck.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sck.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
sck.settimeout(None) # < does not inherit, ^ does
ip, port = sck.getsockname()
self.srvs.append(sck)
self.nclimax = math.ceil(self.args.nc * 1.0 / nlisteners)
@@ -275,12 +299,12 @@ class HttpSrv(object):
except:
pass
thrs = []
clients = list(self.clients)
for cli in clients:
try:
cli.shutdown()
except:
pass
t = threading.Thread(target=cli.shutdown)
thrs.append(t)
t.start()
if self.tp_q:
self.stop_threads(self.tp_nthr)
@@ -289,12 +313,13 @@ class HttpSrv(object):
if self.tp_q.empty():
break
for t in thrs:
t.join()
self.log(self.name, "ok bye")
def thr_client(self, sck: socket.socket, addr: tuple[str, int]) -> None:
"""thread managing one tcp client"""
sck.settimeout(120)
cli = HttpConn(sck, addr, self)
with self.mutex:
self.clients.add(cli)
@@ -321,8 +346,7 @@ class HttpSrv(object):
try:
fno = sck.fileno()
sck.shutdown(socket.SHUT_RDWR)
sck.close()
shut_socket(cli.log, sck)
except (OSError, socket.error) as ex:
if not MACOS:
self.log(
@@ -351,9 +375,9 @@ class HttpSrv(object):
if time.time() - self.cb_ts < 1:
return self.cb_v
v = E.t0
v = self.E.t0
try:
with os.scandir(os.path.join(E.mod, "web")) as dh:
with os.scandir(os.path.join(self.E.mod, "web")) as dh:
for fh in dh:
inf = fh.stat()
v = max(v, inf.st_mtime)

View File

@@ -6,13 +6,15 @@ import colorsys
import hashlib
from .__init__ import PY2
from .th_srv import HAVE_PIL
from .util import BytesIO
class Ico(object):
def __init__(self, args: argparse.Namespace) -> None:
self.args = args
def get(self, ext: str, as_thumb: bool) -> tuple[str, bytes]:
def get(self, ext: str, as_thumb: bool, chrome: bool) -> tuple[str, bytes]:
"""placeholder to make thumbnails not break"""
zb = hashlib.sha1(ext.encode("utf-8")).digest()[2:4]
@@ -24,10 +26,44 @@ class Ico(object):
ci = [int(x * 255) for x in list(c1) + list(c2)]
c = "".join(["{:02x}".format(x) for x in ci])
w = 100
h = 30
if not self.args.th_no_crop and as_thumb:
w, h = self.args.th_size.split("x")
h = int(100 / (float(w) / float(h)))
sw, sh = self.args.th_size.split("x")
h = int(100 / (float(sw) / float(sh)))
w = 100
if chrome and as_thumb:
# cannot handle more than ~2000 unique SVGs
if HAVE_PIL:
# svg: 3s, cache: 6s, this: 8s
from PIL import Image, ImageDraw
h = int(64 * h / w)
w = 64
img = Image.new("RGB", (w, h), "#" + c[:6])
pb = ImageDraw.Draw(img)
tw, th = pb.textsize(ext)
pb.text(((w - tw) // 2, (h - th) // 2), ext, fill="#" + c[6:])
img = img.resize((w * 3, h * 3), Image.NEAREST)
buf = BytesIO()
img.save(buf, format="PNG", compress_level=1)
return "image/png", buf.getvalue()
elif False:
# 48s, too slow
import pyvips
h = int(192 * h / w)
w = 192
img = pyvips.Image.text(
ext, width=w, height=h, dpi=192, align=pyvips.Align.CENTRE
)
img = img.ifthenelse(ci[3:], ci[:3], blend=True)
# i = i.resize(3, kernel=pyvips.Kernel.NEAREST)
buf = img.write_to_buffer(".png[compression=1]")
return "image/png", buf
svg = """\
<?xml version="1.0" encoding="UTF-8"?>

View File

@@ -8,7 +8,7 @@ import shutil
import subprocess as sp
import sys
from .__init__ import PY2, WINDOWS, unicode
from .__init__ import PY2, WINDOWS, E, unicode
from .bos import bos
from .util import REKOBO_LKEY, fsenc, min_ex, retchk, runcmd, uncyg
@@ -42,9 +42,10 @@ class MParser(object):
self.tag, args = cmdline.split("=", 1)
self.tags = self.tag.split(",")
self.timeout = 30
self.timeout = 60
self.force = False
self.kill = "t" # tree; all children recursively
self.capture = 3 # outputs to consume
self.audio = "y"
self.pri = 0 # priority; higher = later
self.ext = []
@@ -72,6 +73,10 @@ class MParser(object):
self.kill = arg[1:] # [t]ree [m]ain [n]one
continue
if arg.startswith("c"):
self.capture = int(arg[1:]) # 0=none 1=stdout 2=stderr 3=both
continue
if arg == "f":
self.force = True
continue
@@ -92,7 +97,7 @@ class MParser(object):
def ffprobe(
abspath: str, timeout: int = 10
abspath: str, timeout: int = 60
) -> tuple[dict[str, tuple[int, Any]], dict[str, list[Any]]]:
cmd = [
b"ffprobe",
@@ -257,11 +262,7 @@ class MTag(object):
self.usable = True
self.prefer_mt = not args.no_mtag_ff
self.backend = "ffprobe" if args.no_mutagen else "mutagen"
self.can_ffprobe = (
HAVE_FFPROBE
and not args.no_mtag_ff
and (not WINDOWS or sys.version_info >= (3, 8))
)
self.can_ffprobe = HAVE_FFPROBE and not args.no_mtag_ff
mappings = args.mtm
or_ffprobe = " or FFprobe"
@@ -285,11 +286,6 @@ class MTag(object):
msg = "found FFprobe but it was disabled by --no-mtag-ff"
self.log(msg, c=3)
elif WINDOWS and sys.version_info < (3, 8):
or_ffprobe = " or python >= 3.8"
msg = "found FFprobe but your python is too old; need 3.8 or newer"
self.log(msg, c=1)
if not self.usable:
msg = "need Mutagen{} to read media tags so please run this:\n{}{} -m pip install --user mutagen\n"
pybin = os.path.basename(sys.executable)
@@ -497,7 +493,7 @@ class MTag(object):
if not bos.path.isfile(abspath):
return {}
ret, md = ffprobe(abspath)
ret, md = ffprobe(abspath, self.args.mtag_to)
return self.normalize_tags(ret, md)
def get_bin(
@@ -506,11 +502,15 @@ class MTag(object):
if not bos.path.isfile(abspath):
return {}
pypath = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
zsl = [str(pypath)] + [str(x) for x in sys.path if x]
pypath = str(os.pathsep.join(zsl))
env = os.environ.copy()
env["PYTHONPATH"] = pypath
try:
pypath = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
zsl = [str(pypath)] + [str(x) for x in sys.path if x]
pypath = str(os.pathsep.join(zsl))
env["PYTHONPATH"] = pypath
except:
if not E.ox:
raise
ret: dict[str, Any] = {}
for tagname, parser in sorted(parsers.items(), key=lambda x: (x[1].pri, x[0])):
@@ -519,7 +519,12 @@ class MTag(object):
if parser.bin.endswith(".py"):
cmd = [sys.executable] + cmd
args = {"env": env, "timeout": parser.timeout, "kill": parser.kill}
args = {
"env": env,
"timeout": parser.timeout,
"kill": parser.kill,
"capture": parser.capture,
}
if parser.pri:
zd = oth_tags.copy()

View File

@@ -16,7 +16,7 @@ import codecs
import platform
import sys
PY3 = sys.version_info[0] > 2
PY3 = sys.version_info > (3,)
WINDOWS = platform.system() == "Windows"
FS_ERRORS = "surrogateescape"
@@ -26,20 +26,6 @@ except:
pass
def u(text: Any) -> str:
if PY3:
return text
else:
return text.decode("unicode_escape")
def b(data: Any) -> bytes:
if PY3:
return data.encode("latin1")
else:
return data
if PY3:
_unichr = chr
bytes_chr = lambda code: bytes((code,))
@@ -171,9 +157,6 @@ def decodefilename(fn: bytes) -> str:
FS_ENCODING = sys.getfilesystemencoding()
# FS_ENCODING = "ascii"; fn = b("[abc\xff]"); encoded = u("[abc\udcff]")
# FS_ENCODING = 'cp932'; fn = b('[abc\x81\x00]'); encoded = u('[abc\udc81\x00]')
# FS_ENCODING = 'UTF-8'; fn = b('[abc\xff]'); encoded = u('[abc\udcff]')
if WINDOWS and not PY3:

View File

@@ -24,7 +24,7 @@ try:
except:
pass
from .__init__ import ANYWIN, MACOS, PY2, VT100, WINDOWS, E, unicode
from .__init__ import ANYWIN, MACOS, VT100, EnvParams, unicode
from .authsrv import AuthSrv
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE
from .tcpsrv import TcpSrv
@@ -32,6 +32,7 @@ from .th_srv import HAVE_PIL, HAVE_VIPS, HAVE_WEBP, ThumbSrv
from .up2k import Up2k
from .util import (
VERSIONS,
HMaccas,
alltrace,
ansi_re,
min_ex,
@@ -55,6 +56,7 @@ class SvcHub(object):
def __init__(self, args: argparse.Namespace, argv: list[str], printed: str) -> None:
self.args = args
self.argv = argv
self.E: EnvParams = args.E
self.logf: Optional[typing.TextIO] = None
self.logf_base_fn = ""
self.stop_req = False
@@ -71,6 +73,8 @@ class SvcHub(object):
self.next_day = 0
self.tstack = 0.0
self.iphash = HMaccas(os.path.join(self.E.cfg, "iphash"), 8)
if args.sss or args.s >= 3:
args.ss = True
args.lo = args.lo or "cpp-%Y-%m%d-%H%M%S.txt.xz"
@@ -85,6 +89,7 @@ class SvcHub(object):
args.no_mv = True
args.hardlink = True
args.vague_403 = True
args.ban_404 = "50,60,1440"
args.nih = True
if args.s:
@@ -264,7 +269,7 @@ class SvcHub(object):
msg = "[+] opened logfile [{}]\n".format(fn)
printed += msg
lh.write("t0: {:.3f}\nargv: {}\n\n{}".format(E.t0, " ".join(argv), printed))
lh.write("t0: {:.3f}\nargv: {}\n\n{}".format(self.E.t0, " ".join(argv), printed))
self.logf = lh
self.logf_base_fn = base_fn
print(msg, end="")
@@ -475,17 +480,10 @@ class SvcHub(object):
print(*a, **ka)
def check_mp_support(self) -> str:
vmin = sys.version_info[1]
if WINDOWS:
msg = "need python 3.3 or newer for multiprocessing;"
if PY2 or vmin < 3:
return msg
elif MACOS:
if MACOS:
return "multiprocessing is wonky on mac osx;"
else:
msg = "need python 3.3+ for multiprocessing;"
if PY2 or vmin < 3:
return msg
elif sys.version_info < (3, 3):
return "need python 3.3 or newer for multiprocessing;"
try:
x: mp.Queue[tuple[str, str]] = mp.Queue(1)
@@ -501,7 +499,10 @@ class SvcHub(object):
if self.args.j == 1:
return False
if mp.cpu_count() <= 1:
try:
if mp.cpu_count() <= 1:
raise Exception()
except:
self.log("svchub", "only one CPU detected; multiprocessing disabled")
return False

View File

@@ -24,8 +24,10 @@ class TcpSrv(object):
self.args = hub.args
self.log = hub.log
self.stopping = False
# mp-safe since issue6056
socket.setdefaulttimeout(120)
self.stopping = False
self.srv: list[socket.socket] = []
self.nsrv = 0
ok: dict[str, list[int]] = {}
@@ -112,6 +114,7 @@ class TcpSrv(object):
srv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
srv.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
srv.settimeout(None) # < does not inherit, ^ does
try:
srv.bind((ip, port))
self.srv.append(srv)

View File

@@ -75,7 +75,7 @@ class ThumbCli(object):
preferred = self.args.th_dec[0] if self.args.th_dec else ""
if rem.startswith(".hist/th/") and rem.split(".")[-1] in ["webp", "jpg"]:
if rem.startswith(".hist/th/") and rem.split(".")[-1] in ["webp", "jpg", "png"]:
return os.path.join(ptop, rem)
if fmt == "j" and self.args.th_no_jpg:

View File

@@ -14,7 +14,7 @@ from queue import Queue
from .__init__ import TYPE_CHECKING
from .bos import bos
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, ffprobe
from .util import BytesIO, Cooldown, fsenc, min_ex, runcmd, statdir, vsplit
from .util import BytesIO, Cooldown, Pebkac, fsenc, min_ex, runcmd, statdir, vsplit
try:
from typing import Optional, Union
@@ -82,7 +82,7 @@ def thumb_path(histpath: str, rem: str, mtime: float, fmt: str) -> str:
if fmt in ("opus", "caf"):
cat = "ac"
else:
fmt = "webp" if fmt == "w" else "jpg"
fmt = "webp" if fmt == "w" else "png" if fmt == "p" else "jpg"
cat = "th"
return "{}/{}/{}/{}.{:x}.{}".format(histpath, cat, rd, fn, int(mtime), fmt)
@@ -239,6 +239,7 @@ class ThumbSrv(object):
abspath, tpath = task
ext = abspath.split(".")[-1].lower()
png_ok = False
fun = None
if not bos.path.exists(tpath):
for lib in self.args.th_dec:
@@ -253,19 +254,32 @@ class ThumbSrv(object):
elif lib == "ff" and ext in self.fmt_ffa:
if tpath.endswith(".opus") or tpath.endswith(".caf"):
fun = self.conv_opus
elif tpath.endswith(".png"):
fun = self.conv_waves
png_ok = True
else:
fun = self.conv_spec
if not png_ok and tpath.endswith(".png"):
raise Pebkac(400, "png only allowed for waveforms")
if fun:
try:
fun(abspath, tpath)
except:
except Exception as ex:
msg = "{} could not create thumbnail of {}\n{}"
msg = msg.format(fun.__name__, abspath, min_ex())
c: Union[str, int] = 1 if "<Signals.SIG" in msg else "1;30"
self.log(msg, c)
with open(tpath, "wb") as _:
pass
if getattr(ex, "returncode", 0) != 321:
with open(tpath, "wb") as _:
pass
else:
# ffmpeg may spawn empty files on windows
try:
os.unlink(tpath)
except:
pass
with self.mutex:
subs = self.busy[tpath]
@@ -352,7 +366,7 @@ class ThumbSrv(object):
img.write_to_file(tpath, Q=40)
def conv_ffmpeg(self, abspath: str, tpath: str) -> None:
ret, _ = ffprobe(abspath)
ret, _ = ffprobe(abspath, int(self.args.th_convt / 2))
if not ret:
return
@@ -411,21 +425,30 @@ class ThumbSrv(object):
c: Union[str, int] = "1;30"
t = "FFmpeg failed (probably a corrupt video file):\n"
if cmd[-1].lower().endswith(b".webp") and (
"Error selecting an encoder" in serr
or "Automatic encoder selection failed" in serr
or "Default encoder for format webp" in serr
or "Please choose an encoder manually" in serr
if (
(not self.args.th_ff_jpg or time.time() - int(self.args.th_ff_jpg) < 60)
and cmd[-1].lower().endswith(b".webp")
and (
"Error selecting an encoder" in serr
or "Automatic encoder selection failed" in serr
or "Default encoder for format webp" in serr
or "Please choose an encoder manually" in serr
)
):
self.args.th_ff_jpg = True
self.args.th_ff_jpg = time.time()
t = "FFmpeg failed because it was compiled without libwebp; enabling --th-ff-jpg to force jpeg output:\n"
ret = 321
c = 1
if (
not self.args.th_ff_swr or time.time() - int(self.args.th_ff_swr) < 60
) and (
"Requested resampling engine is unavailable" in serr
or "output pad on Parsed_aresample_" in serr
):
t = "FFmpeg failed because it was compiled without libsox; you must set --th-ff-swr to force swr resampling:\n"
self.args.th_ff_swr = time.time()
t = "FFmpeg failed because it was compiled without libsox; enabling --th-ff-swr to force swr resampling:\n"
ret = 321
c = 1
lines = serr.strip("\n").split("\n")
@@ -439,8 +462,36 @@ class ThumbSrv(object):
self.log(t + txt, c=c)
raise sp.CalledProcessError(ret, (cmd[0], b"...", cmd[-1]))
def conv_waves(self, abspath: str, tpath: str) -> None:
ret, _ = ffprobe(abspath, int(self.args.th_convt / 2))
if "ac" not in ret:
raise Exception("not audio")
flt = (
b"[0:a:0]"
b"compand=.3|.3:1|1:-90/-60|-60/-40|-40/-30|-20/-20:6:0:-90:0.2"
b",volume=2"
b",showwavespic=s=2048x64:colors=white"
b",convolution=1 1 1 1 1 1 1 1 1:1 1 1 1 1 1 1 1 1:1 1 1 1 1 1 1 1 1:1 -1 1 -1 5 -1 1 -1 1" # idk what im doing but it looks ok
)
# fmt: off
cmd = [
b"ffmpeg",
b"-nostdin",
b"-v", b"error",
b"-hide_banner",
b"-i", fsenc(abspath),
b"-filter_complex", flt,
b"-frames:v", b"1",
]
# fmt: on
cmd += [fsenc(tpath)]
self._run_ff(cmd)
def conv_spec(self, abspath: str, tpath: str) -> None:
ret, _ = ffprobe(abspath)
ret, _ = ffprobe(abspath, int(self.args.th_convt / 2))
if "ac" not in ret:
raise Exception("not audio")
@@ -461,7 +512,8 @@ class ThumbSrv(object):
b"-hide_banner",
b"-i", fsenc(abspath),
b"-filter_complex", fc.encode("utf-8"),
b"-map", b"[o]"
b"-map", b"[o]",
b"-frames:v", b"1",
]
# fmt: on
@@ -485,7 +537,7 @@ class ThumbSrv(object):
if self.args.no_acode:
raise Exception("disabled in server config")
ret, _ = ffprobe(abspath)
ret, _ = ffprobe(abspath, int(self.args.th_convt / 2))
if "ac" not in ret:
raise Exception("not audio")

View File

@@ -45,6 +45,7 @@ from .util import (
s3dec,
s3enc,
sanitize_fn,
spack,
statdir,
vjoin,
vsplit,
@@ -135,7 +136,7 @@ class Up2k(object):
self.mem_cur = None
self.sqlite_ver = None
self.no_expr_idx = False
self.timeout = int(max(self.args.srch_time, 5) * 1.2) + 1
self.timeout = int(max(self.args.srch_time, 50) * 1.2) + 1
self.spools: set[tempfile.SpooledTemporaryFile[bytes]] = set()
if HAVE_SQLITE3:
# mojibake detector
@@ -265,22 +266,29 @@ class Up2k(object):
def _sched_rescan(self) -> None:
volage = {}
cooldown = 0.0
timeout = time.time() + 3
cooldown = timeout = time.time() + 3.0
while True:
now = time.time()
timeout = max(timeout, cooldown)
wait = max(0.1, timeout + 0.1 - time.time())
wait = timeout - time.time()
# self.log("SR in {:.2f}".format(wait), 5)
with self.rescan_cond:
self.rescan_cond.wait(wait)
now = time.time()
if now < cooldown:
# self.log("SR: cd - now = {:.2f}".format(cooldown - now), 5)
timeout = cooldown # wakeup means stuff to do, forget timeout
continue
if self.pp:
# self.log("SR: pp; cd := 1", 5)
cooldown = now + 1
continue
cooldown = now + 5
# self.log("SR", 5)
if self.args.no_lifetime:
timeout = now + 9001
else:
@@ -302,7 +310,7 @@ class Up2k(object):
timeout = min(timeout, deadline)
if self.db_act > now - self.args.db_act:
if self.db_act > now - self.args.db_act and self.need_rescan:
# recent db activity; defer volume rescan
act_timeout = self.db_act + self.args.db_act
if self.need_rescan:
@@ -344,11 +352,9 @@ class Up2k(object):
if not cur:
continue
lifetime = int(lifetime)
timeout = min(timeout, now + lifetime)
nrm = 0
deadline = time.time() - lifetime
timeout = min(timeout, now + lifetime)
q = "select rd, fn from up where at > 0 and at < ? limit 100"
while True:
with self.mutex:
@@ -683,10 +689,9 @@ class Up2k(object):
top = vol.realpath
rei = vol.flags.get("noidx")
reh = vol.flags.get("nohash")
dev = 0
if vol.flags.get("xdev"):
dev = bos.stat(top).st_dev
n4g = bool(vol.flags.get("noforget"))
cst = bos.stat(top)
dev = cst.st_dev if vol.flags.get("xdev") else 0
with self.mutex:
reg = self.register_vpath(top, vol.flags)
@@ -720,11 +725,14 @@ class Up2k(object):
rtop,
rei,
reh,
n4g,
[],
cst,
dev,
bool(vol.flags.get("xvol")),
)
n_rm = self._drop_lost(db.c, top, excl)
if not n4g:
n_rm = self._drop_lost(db.c, top, excl)
except Exception as ex:
t = "failed to index volume [{}]:\n{}"
self.log(t.format(top, min_ex()), c=1)
@@ -754,7 +762,9 @@ class Up2k(object):
rcdir: str,
rei: Optional[Pattern[str]],
reh: Optional[Pattern[str]],
n4g: bool,
seen: list[str],
cst: os.stat_result,
dev: int,
xvol: bool,
) -> int:
@@ -809,7 +819,7 @@ class Up2k(object):
# self.log(" dir: {}".format(abspath))
try:
ret += self._build_dir(
db, top, excl, abspath, rap, rei, reh, seen, dev, xvol
db, top, excl, abspath, rap, rei, reh, n4g, seen, inf, dev, xvol
)
except:
t = "failed to index subdir [{}]:\n{}"
@@ -842,6 +852,7 @@ class Up2k(object):
zh = hashlib.sha1()
_ = [zh.update(str(x).encode("utf-8", "replace")) for x in files]
zh.update(spack(b"<d", cst.st_mtime))
dhash = base64.urlsafe_b64encode(zh.digest()[:12]).decode("ascii")
sql = "select d from dh where d = ? and h = ?"
try:
@@ -932,27 +943,30 @@ class Up2k(object):
return -1
# drop shadowed folders
for rd in unreg:
for sh_rd in unreg:
n = 0
q = "select count(w) from up where (rd = ? or rd like ?||'%') and at == 0"
for erd in [rd, "//" + w8b64enc(rd)]:
for sh_erd in [sh_rd, "//" + w8b64enc(sh_rd)]:
try:
n = db.c.execute(q, (erd, erd + "/")).fetchone()[0]
n = db.c.execute(q, (sh_erd, sh_erd + "/")).fetchone()[0]
break
except:
pass
if n:
t = "forgetting {} shadowed autoindexed files in [{}] > [{}]"
self.log(t.format(n, top, rd))
self.log(t.format(n, top, sh_rd))
q = "delete from dh where (d = ? or d like ?||'%')"
db.c.execute(q, (erd, erd + "/"))
db.c.execute(q, (sh_erd, sh_erd + "/"))
q = "delete from up where (rd = ? or rd like ?||'%') and at == 0"
db.c.execute(q, (erd, erd + "/"))
db.c.execute(q, (sh_erd, sh_erd + "/"))
ret += n
if n4g:
return ret
# drop missing files
q = "select fn from up where rd = ?"
try:
@@ -1320,7 +1334,7 @@ class Up2k(object):
nq -= 1
td = time.time() - last_write
if n_buf >= 4096 or td >= max(1, self.timeout - 1):
if n_buf >= 4096 or td >= self.timeout / 2:
self.log("commit {} new tags".format(n_buf))
with self.mutex:
cur.connection.commit()
@@ -1424,6 +1438,10 @@ class Up2k(object):
if tag in parser.tags:
parsers[parser.tag] = parser
if self.args.mtag_vv:
t = "parsers for {}: \033[0m{}"
self.log(t.format(ptop, list(parsers.keys())), "1;30")
self.mtp_parsers[ptop] = parsers
q = "select count(w) from mt where k = 't:mtp'"
@@ -1552,6 +1570,8 @@ class Up2k(object):
try:
all_parsers = self.mtp_parsers[ptop]
except:
if self.args.mtag_vv:
self.log("no mtp defined for {}".format(ptop), "1;30")
return {}
entags = self.entags[ptop]
@@ -1561,9 +1581,15 @@ class Up2k(object):
if "ac" in have or ".aq" in have:
# is audio, require non-audio?
if v.audio == "n":
if self.args.mtag_vv:
t = "skip mtp {}; is no-audio, have audio"
self.log(t.format(k), "1;30")
continue
# is not audio, require audio?
elif v.audio == "y":
if self.args.mtag_vv:
t = "skip mtp {}; is audio, have no-audio"
self.log(t.format(k), "1;30")
continue
if v.ext:
@@ -1574,6 +1600,9 @@ class Up2k(object):
break
if not match:
if self.args.mtag_vv:
t = "skip mtp {}; need file-ext {}, have {}"
self.log(t.format(k, v.ext, abspath.rsplit(".")[-1]), "1;30")
continue
parsers[k] = v
@@ -1619,8 +1648,16 @@ class Up2k(object):
try:
if not qe.mtp:
if self.args.mtag_vv:
t = "tag-thr: {}({})"
self.log(t.format(self.mtag.backend, qe.abspath), "1;30")
tags = self.mtag.get(qe.abspath)
else:
if self.args.mtag_vv:
t = "tag-thr: {}({})"
self.log(t.format(list(qe.mtp.keys()), qe.abspath), "1;30")
tags = self.mtag.get_bin(qe.mtp, qe.abspath, qe.oth_tags)
vtags = [
"\033[36m{} \033[33m{}".format(k, v) for k, v in tags.items()
@@ -1651,7 +1688,7 @@ class Up2k(object):
wark: str,
abspath: str,
ip: str,
at: float
at: float,
) -> int:
"""will mutex"""
assert self.mtag
@@ -1887,6 +1924,9 @@ class Up2k(object):
with self.mutex:
cur = self.cur.get(cj["ptop"])
reg = self.registry[cj["ptop"]]
vfs = self.asrv.vfs.all_vols[cj["vtop"]]
n4g = vfs.flags.get("noforget")
lost: list[tuple[str, str]] = []
if cur:
if self.no_expr_idx:
q = r"select * from up where w = ?"
@@ -1908,7 +1948,11 @@ class Up2k(object):
# broken symlink
raise Exception()
except:
continue
if n4g:
st = os.stat_result((0, -1, -1, 0, 0, 0, 0, 0, 0, 0))
else:
lost.append((dp_dir, dp_fn))
continue
j = {
"name": dp_fn,
@@ -1924,6 +1968,10 @@ class Up2k(object):
"need": [],
"busy": {},
}
for k in ["life"]:
if k in cj:
j[k] = cj[k]
score = (
(3 if st.st_dev == dev else 0)
+ (2 if dp_dir == cj["prel"] else 0)
@@ -1936,6 +1984,12 @@ class Up2k(object):
# self.log("pop " + wark + " " + job["name"] + " handle_json db", 4)
del reg[wark]
if lost:
for dp_dir, dp_fn in lost:
self.db_rm(cur, dp_dir, dp_fn)
cur.connection.commit()
if job or wark in reg:
job = job or reg[wark]
if job["prel"] == cj["prel"] and job["name"] == cj["name"]:
@@ -1949,7 +2003,7 @@ class Up2k(object):
break
except:
# missing; restart
if not self.args.nw:
if not self.args.nw and not n4g:
job = None
break
else:
@@ -1991,7 +2045,11 @@ class Up2k(object):
dst = os.path.join(job["ptop"], job["prel"], job["name"])
if not self.args.nw:
bos.unlink(dst) # TODO ed pls
self._symlink(src, dst, lmod=cj["lmod"])
try:
self._symlink(src, dst, lmod=cj["lmod"])
except:
if not n4g:
raise
if cur:
a = [cj[x] for x in "prel name lmod size addr".split()]
@@ -2000,7 +2058,6 @@ class Up2k(object):
cur.connection.commit()
if not job:
vfs = self.asrv.vfs.all_vols[cj["vtop"]]
if vfs.lim:
ap1 = djoin(cj["ptop"], cj["prel"])
ap2, cj["prel"] = vfs.lim.all(
@@ -2031,6 +2088,10 @@ class Up2k(object):
]:
job[k] = cj[k]
for k in ["life"]:
if k in cj:
job[k] = cj[k]
# one chunk may occur multiple times in a file;
# filter to unique values for the list of missing chunks
# (preserve order to reduce disk thrashing)
@@ -2063,9 +2124,12 @@ class Up2k(object):
if self.args.nw:
return fname
# TODO broker which avoid this race and
# provides a new filename if taken (same as bup)
suffix = "-{:.6f}-{}".format(ts, ip.replace(":", "."))
if self.args.plain_ip:
dip = ip.replace(":", ".")
else:
dip = self.hub.iphash.s(ip)
suffix = "-{:.6f}-{}".format(ts, dip)
with ren_open(fname, "wb", fdir=fdir, suffix=suffix) as zfw:
return zfw["orz"][1]
@@ -2092,13 +2156,9 @@ class Up2k(object):
raise OSError(38, "filesystem does not have st_dev")
elif fs1 == fs2:
# same fs; make symlink as relative as possible
v = []
for p in [src, dst]:
if WINDOWS:
p = p.replace("\\", "/")
v.append(p.split("/"))
nsrc, ndst = v
spl = r"[\\/]" if WINDOWS else "/"
nsrc = re.split(spl, src)
ndst = re.split(spl, dst)
nc = 0
for a, b in zip(nsrc, ndst):
if a != b:
@@ -2109,6 +2169,10 @@ class Up2k(object):
hops = len(ndst[nc:]) - 1
lsrc = "../" * hops + "/".join(zsl)
if WINDOWS:
lsrc = lsrc.replace("/", "\\")
ldst = ldst.replace("/", "\\")
try:
if self.args.hardlink:
os.link(fsenc(src), fsenc(dst))
@@ -2245,12 +2309,30 @@ class Up2k(object):
pass
z2 = [job[x] for x in "ptop wark prel name lmod size addr".split()]
z2 += [job.get("at") or time.time()]
upt = job.get("at") or time.time()
wake_sr = False
try:
flt = job["life"]
vfs = self.asrv.vfs.all_vols[job["vtop"]]
vlt = vfs.flags["lifetime"]
if vlt and flt < vlt:
upt -= vlt - flt
wake_sr = True
t = "using client lifetime; at={:.0f} ({}-{})"
self.log(t.format(upt, vlt, flt))
except:
pass
z2 += [upt]
if self.idx_wark(*z2):
del self.registry[ptop][wark]
else:
self.regdrop(ptop, wark)
if wake_sr:
with self.rescan_cond:
self.rescan_cond.notify_all()
dupes = self.dupesched.pop(dst, [])
if not dupes:
return
@@ -2804,7 +2886,11 @@ class Up2k(object):
del self.registry[job["ptop"]][job["wark"]]
return
dip = job["addr"].replace(":", ".")
if self.args.plain_ip:
dip = job["addr"].replace(":", ".")
else:
dip = self.hub.iphash.s(job["addr"])
suffix = "-{:.6f}-{}".format(job["t0"], dip)
with ren_open(tnam, "wb", fdir=pdir, suffix=suffix) as zfw:
f, job["tnam"] = zfw["orz"]
@@ -2899,8 +2985,18 @@ class Up2k(object):
for x in reg.values()
if x["need"] and now - x["poke"] > self.snap_discard_interval
]
if rm:
t = "dropping {} abandoned uploads in {}".format(len(rm), ptop)
lost = [
x
for x in reg.values()
if x["need"]
and not bos.path.exists(os.path.join(x["ptop"], x["prel"], x["name"]))
]
if rm or lost:
t = "dropping {} abandoned, {} deleted uploads in {}"
t = t.format(len(rm), len(lost), ptop)
rm.extend(lost)
vis = [self._vis_job_progress(x) for x in rm]
self.log("\n".join([t] + vis))
for job in rm:
@@ -2910,7 +3006,10 @@ class Up2k(object):
path = os.path.join(job["ptop"], job["prel"], job["name"])
if bos.path.getsize(path) == 0:
bos.unlink(path)
except:
pass
try:
if len(job["hash"]) == len(job["need"]):
# PARTIAL is empty, delete that too
path = os.path.join(job["ptop"], job["prel"], job["tnam"])
@@ -2966,6 +3065,12 @@ class Up2k(object):
tags = self.mtag.get(abspath)
ntags1 = len(tags)
parsers = self._get_parsers(ptop, tags, abspath)
if self.args.mtag_vv:
t = "parsers({}): {}\n{} {} tags: {}".format(
ptop, list(parsers.keys()), ntags1, self.mtag.backend, tags
)
self.log(t)
if parsers:
tags["up_ip"] = ip
tags["up_at"] = at
@@ -3022,6 +3127,10 @@ class Up2k(object):
with self.mutex:
self.idx_wark(ptop, wark, rd, fn, inf.st_mtime, inf.st_size, ip, at)
if at and time.time() - at > 30:
with self.rescan_cond:
self.rescan_cond.notify_all()
def hash_file(
self, ptop: str, flags: dict[str, Any], rd: str, fn: str, ip: str, at: float
) -> None:

View File

@@ -4,6 +4,7 @@ from __future__ import print_function, unicode_literals
import base64
import contextlib
import hashlib
import hmac
import math
import mimetypes
import os
@@ -24,12 +25,19 @@ from datetime import datetime
from queue import Queue
from .__init__ import ANYWIN, PY2, TYPE_CHECKING, VT100, WINDOWS
from .__init__ import ANYWIN, MACOS, PY2, TYPE_CHECKING, VT100, WINDOWS
from .__version__ import S_BUILD_DT, S_VERSION
from .stolen import surrogateescape
try:
import ctypes
import fcntl
import termios
except:
pass
try:
from ipaddress import IPv6Address
except:
pass
@@ -64,8 +72,9 @@ except:
pass
if TYPE_CHECKING:
from .authsrv import VFS
import magic
from .authsrv import VFS
FAKE_MP = False
@@ -111,7 +120,7 @@ else:
FS_ENCODING = sys.getfilesystemencoding()
SYMTIME = sys.version_info >= (3, 6) and os.utime in os.supports_follow_symlinks
SYMTIME = sys.version_info > (3, 6) and os.utime in os.supports_follow_symlinks
HTTP_TS_FMT = "%a, %d %b %Y %H:%M:%S GMT"
@@ -151,22 +160,18 @@ IMPLICATIONS = [
MIMES = {
"md": "text/plain",
"txt": "text/plain",
"js": "text/javascript",
"opus": "audio/ogg; codecs=opus",
"caf": "audio/x-caf",
"mp3": "audio/mpeg",
"m4a": "audio/mp4",
"jpg": "image/jpeg",
}
def _add_mimes() -> None:
# `mimetypes` is woefully unpopulated on windows
# but will be used as fallback on linux
for ln in """text css html csv
application json wasm xml pdf rtf zip
image webp jpeg png gif bmp
audio aac ogg wav
application json wasm xml pdf rtf zip jar fits wasm
image webp jpeg png gif bmp jxl jp2 jxs jxr tiff bpg heic heif avif
audio aac ogg wav flac ape amr
video webm mp4 mpeg
font woff woff2 otf ttf
""".splitlines():
@@ -174,10 +179,35 @@ font woff woff2 otf ttf
for v in vs.strip().split():
MIMES[v] = "{}/{}".format(k, v)
for ln in """text md=plain txt=plain js=javascript
application 7z=x-7z-compressed tar=x-tar bz2=x-bzip2 gz=gzip rar=x-rar-compressed zst=zstd xz=x-xz lz=lzip cpio=x-cpio
application msi=x-ms-installer cab=vnd.ms-cab-compressed rpm=x-rpm crx=x-chrome-extension
application epub=epub+zip mobi=x-mobipocket-ebook lit=x-ms-reader rss=rss+xml atom=atom+xml torrent=x-bittorrent
application p7s=pkcs7-signature dcm=dicom shx=vnd.shx shp=vnd.shp dbf=x-dbf gml=gml+xml gpx=gpx+xml amf=x-amf
application swf=x-shockwave-flash m3u=vnd.apple.mpegurl db3=vnd.sqlite3 sqlite=vnd.sqlite3
image jpg=jpeg xpm=x-xpixmap psd=vnd.adobe.photoshop jpf=jpx tif=tiff ico=x-icon djvu=vnd.djvu
image heic=heic-sequence heif=heif-sequence hdr=vnd.radiance svg=svg+xml
audio caf=x-caf mp3=mpeg m4a=mp4 mid=midi mpc=musepack aif=aiff au=basic qcp=qcelp
video mkv=x-matroska mov=quicktime avi=x-msvideo m4v=x-m4v ts=mp2t
video asf=x-ms-asf flv=x-flv 3gp=3gpp 3g2=3gpp2 rmvb=vnd.rn-realmedia-vbr
font ttc=collection
""".splitlines():
k, ems = ln.split(" ", 1)
for em in ems.strip().split():
ext, mime = em.split("=")
MIMES[ext] = "{}/{}".format(k, mime)
_add_mimes()
EXTS: dict[str, str] = {v: k for k, v in MIMES.items()}
EXTS["vnd.mozilla.apng"] = "png"
MAGIC_MAP = {"jpeg": "jpg"}
REKOBO_KEY = {
v: ln.split(" ", 1)[0]
for ln in """
@@ -595,6 +625,162 @@ class MTHash(object):
return nch, udig, ofs0, chunk_sz
class HMaccas(object):
def __init__(self, keypath: str, retlen: int) -> None:
self.retlen = retlen
self.cache: dict[bytes, str] = {}
try:
with open(keypath, "rb") as f:
self.key = f.read()
if len(self.key) != 64:
raise Exception()
except:
self.key = os.urandom(64)
with open(keypath, "wb") as f:
f.write(self.key)
def b(self, msg: bytes) -> str:
try:
return self.cache[msg]
except:
zb = hmac.new(self.key, msg, hashlib.sha512).digest()
zs = base64.urlsafe_b64encode(zb)[: self.retlen].decode("utf-8")
self.cache[msg] = zs
return zs
def s(self, msg: str) -> str:
return self.b(msg.encode("utf-8", "replace"))
class Magician(object):
def __init__(self) -> None:
self.bad_magic = False
self.mutex = threading.Lock()
self.magic: Optional["magic.Magic"] = None
def ext(self, fpath: str) -> str:
import magic
try:
if self.bad_magic:
raise Exception()
if not self.magic:
try:
with self.mutex:
if not self.magic:
self.magic = magic.Magic(uncompress=False, extension=True)
except:
self.bad_magic = True
raise
with self.mutex:
ret = self.magic.from_file(fpath)
except:
ret = "?"
ret = ret.split("/")[0]
ret = MAGIC_MAP.get(ret, ret)
if "?" not in ret:
return ret
mime = magic.from_file(fpath, mime=True)
mime = re.split("[; ]", mime, 1)[0]
try:
return EXTS[mime]
except:
pass
mg = mimetypes.guess_extension(mime)
if mg:
return mg[1:]
else:
raise Exception()
class Garda(object):
"""ban clients for repeated offenses"""
def __init__(self, cfg: str) -> None:
try:
a, b, c = cfg.strip().split(",")
self.lim = int(a)
self.win = int(b) * 60
self.pen = int(c) * 60
except:
self.lim = self.win = self.pen = 0
self.ct: dict[str, list[int]] = {}
self.prev: dict[str, str] = {}
self.last_cln = 0
def cln(self, ip: str) -> None:
n = 0
ok = int(time.time() - self.win)
for v in self.ct[ip]:
if v < ok:
n += 1
else:
break
if n:
te = self.ct[ip][n:]
if te:
self.ct[ip] = te
else:
del self.ct[ip]
try:
del self.prev[ip]
except:
pass
def allcln(self) -> None:
for k in list(self.ct):
self.cln(k)
self.last_cln = int(time.time())
def bonk(self, ip: str, prev: str) -> tuple[int, str]:
if not self.lim:
return 0, ip
if ":" in ip and not PY2:
# assume /64 clients; drop 4 groups
ip = IPv6Address(ip).exploded[:-20]
if prev:
if self.prev.get(ip) == prev:
return 0, ip
self.prev[ip] = prev
now = int(time.time())
try:
self.ct[ip].append(now)
except:
self.ct[ip] = [now]
if now - self.last_cln > 300:
self.allcln()
else:
self.cln(ip)
if len(self.ct[ip]) >= self.lim:
return now + self.pen, ip
else:
return 0, ip
if WINDOWS and sys.version_info < (3, 8):
_popen = sp.Popen
def _spopen(c, *a, **ka):
enc = sys.getfilesystemencoding()
c = [x.decode(enc, "replace") if hasattr(x, "decode") else x for x in c]
return _popen(c, *a, **ka)
sp.Popen = _spopen
def uprint(msg: str) -> None:
try:
print(msg, end="")
@@ -677,12 +863,43 @@ def start_stackmon(arg_str: str, nid: int) -> None:
def stackmon(fp: str, ival: float, suffix: str) -> None:
ctr = 0
fp0 = fp
while True:
ctr += 1
fp = fp0
time.sleep(ival)
st = "{}, {}\n{}".format(ctr, time.time(), alltrace())
buf = st.encode("utf-8", "replace")
if fp.endswith(".gz"):
import gzip
# 2459b 2304b 2241b 2202b 2194b 2191b lv3..8
# 0.06s 0.08s 0.11s 0.13s 0.16s 0.19s
buf = gzip.compress(buf, compresslevel=6)
elif fp.endswith(".xz"):
import lzma
# 2276b 2216b 2200b 2192b 2168b lv0..4
# 0.04s 0.10s 0.22s 0.41s 0.70s
buf = lzma.compress(buf, preset=0)
if "%" in fp:
dt = datetime.utcnow()
for fs in "YmdHMS":
fs = "%" + fs
if fs in fp:
fp = fp.replace(fs, dt.strftime(fs))
if "/" in fp:
try:
os.makedirs(fp.rsplit("/", 1)[0])
except:
pass
with open(fp + suffix, "wb") as f:
f.write(st.encode("utf-8", "replace"))
f.write(buf)
def start_log_thrs(
@@ -805,7 +1022,7 @@ def ren_open(
]
continue
if ex.errno not in [36, 63] and (not WINDOWS or ex.errno != 22):
if ex.errno not in [36, 63, 95] and (not WINDOWS or ex.errno != 22):
raise
if not b64:
@@ -1403,7 +1620,7 @@ def db_ex_chk(log: "NamedLogger", ex: Exception, db_path: str) -> bool:
def lsof(log: "NamedLogger", abspath: str) -> None:
try:
rc, so, se = runcmd([b"lsof", b"-R", fsenc(abspath)], timeout=5)
rc, so, se = runcmd([b"lsof", b"-R", fsenc(abspath)], timeout=45)
zs = (so.strip() + "\n" + se.strip()).strip()
log("lsof {} = {}\n{}".format(abspath, rc, zs), 3)
except:
@@ -1440,6 +1657,55 @@ def get_df(abspath: str) -> tuple[Optional[int], Optional[int]]:
return (None, None)
if not ANYWIN and not MACOS:
def siocoutq(sck: socket.socket) -> int:
# SIOCOUTQ^sockios.h == TIOCOUTQ^ioctl.h
try:
zb = fcntl.ioctl(sck.fileno(), termios.TIOCOUTQ, b"AAAA")
return sunpack(b"I", zb)[0] # type: ignore
except:
return 1
else:
# macos: getsockopt(fd, SOL_SOCKET, SO_NWRITE, ...)
# windows: TcpConnectionEstatsSendBuff
def siocoutq(sck: socket.socket) -> int:
return 1
def shut_socket(log: "NamedLogger", sck: socket.socket, timeout: int = 3) -> None:
t0 = time.time()
fd = sck.fileno()
if fd == -1:
sck.close()
return
try:
sck.settimeout(timeout)
sck.shutdown(socket.SHUT_WR)
try:
while time.time() - t0 < timeout:
if not siocoutq(sck):
# kernel says tx queue empty, we good
break
# on windows in particular, drain rx until client shuts
if not sck.recv(32 * 1024):
break
sck.shutdown(socket.SHUT_RDWR)
except:
pass
finally:
td = time.time() - t0
if td >= 1:
log("shut({}) in {:.3f} sec".format(fd, td), "1;30")
sck.close()
def read_socket(sr: Unrecv, total_size: int) -> Generator[bytes, None, None]:
remains = total_size
while remains > 0:
@@ -1520,7 +1786,7 @@ def yieldfile(fn: str) -> Generator[bytes, None, None]:
def hashcopy(
fin: Union[typing.BinaryIO, Generator[bytes, None, None]],
fin: Generator[bytes, None, None],
fout: Union[typing.BinaryIO, typing.IO[Any]],
slp: int = 0,
max_sz: int = 0,
@@ -1818,12 +2084,16 @@ def runcmd(
argv: Union[list[bytes], list[str]], timeout: Optional[int] = None, **ka: Any
) -> tuple[int, str, str]:
kill = ka.pop("kill", "t") # [t]ree [m]ain [n]one
capture = ka.pop("capture", 3) # 0=none 1=stdout 2=stderr 3=both
sin = ka.pop("sin", None)
if sin:
ka["stdin"] = sp.PIPE
p = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE, **ka)
cout = sp.PIPE if capture in [1, 3] else None
cerr = sp.PIPE if capture in [2, 3] else None
p = sp.Popen(argv, stdout=cout, stderr=cerr, **ka)
if not timeout or PY2:
stdout, stderr = p.communicate(sin)
else:
@@ -1843,8 +2113,8 @@ def runcmd(
stdout = b""
stderr = b""
stdout = stdout.decode("utf-8", "replace")
stderr = stderr.decode("utf-8", "replace")
stdout = stdout.decode("utf-8", "replace") if cout else b""
stderr = stderr.decode("utf-8", "replace") if cerr else b""
rc = p.returncode
if rc is None:
@@ -2030,10 +2300,7 @@ def termsize() -> tuple[int, int]:
def ioctl_GWINSZ(fd: int) -> Optional[tuple[int, int]]:
try:
import fcntl
import termios
cr = struct.unpack("hh", fcntl.ioctl(fd, termios.TIOCGWINSZ, b"1234"))
cr = sunpack(b"hh", fcntl.ioctl(fd, termios.TIOCGWINSZ, b"AAAA"))
return int(cr[1]), int(cr[0])
except:
return None

View File

@@ -88,10 +88,10 @@
--g-sel-fg: #fff;
--g-sel-bg: #925;
--g-sel-b1: #c37;
--g-sel-b1: #e39;
--g-sel-sh: #b36;
--g-fsel-bg: #d39;
--g-fsel-b1: #d48;
--g-fsel-b1: #f4a;
--g-fsel-ts: #804;
--g-fg: var(--a-hil);
--g-bg: var(--bg-u2);
@@ -246,6 +246,7 @@ html.b {
--u2-o-1h-bg: var(--a-hil);
--f-sh1: 0.1;
--mp-b-bg: transparent;
}
html.bz {
--fg: #cce;
@@ -278,6 +279,7 @@ html.bz {
--f-h-b1: #34384e;
--mp-sh: #11121d;
/*--mp-b-bg: #2c3044;*/
}
html.by {
--bg: #f2f2f2;
@@ -321,6 +323,7 @@ html.c {
--u2-o-1-bg: #4cf;
--srv-1: #ea0;
--mp-b-bg: transparent;
}
html.cz {
--bgg: var(--bg-u2);
@@ -470,7 +473,6 @@ html.dz {
--fm-off: #f6c;
--mp-sh: var(--bg-d3);
--mp-b-bg: rgba(0,0,0,0.2);
--err-fg: #fff;
--err-bg: #a20;
@@ -664,7 +666,7 @@ a:hover {
background: var(--bg-d3);
text-decoration: underline;
}
#files thead {
#files thead th {
position: sticky;
top: -1px;
}
@@ -1261,8 +1263,12 @@ html.y #ops svg circle {
max-width: min(41em, calc(100% - 2.6em));
}
.opbox input {
position: relative;
margin: .5em;
}
#op_cfg input[type=text] {
top: -.3em;
}
.opview input[type=text] {
color: var(--fg);
background: var(--txt-bg);
@@ -1454,7 +1460,7 @@ html {
margin: .2em;
white-space: pre;
position: relative;
top: -.2em;
top: -.12em;
}
html.c .btn,
html.a .btn {
@@ -1581,11 +1587,27 @@ html.y #tree.nowrap .ntree a+a:hover {
}
#files>thead>tr>th.min,
#files td.min {
display: none;
display: none;
}
#files td:nth-child(2n) {
color: var(--tab-alt);
}
#plazy {
width: 1px;
height: 1px;
overflow: hidden;
white-space: nowrap;
}
#blazy {
text-align: center;
font-size: 1.2em;
margin: 1em 0;
}
#blazy code,
#blazy a {
font-size: 1.1em;
padding: 0 .2em;
}
.opwide,
#op_unpost,
#srch_form {
@@ -2383,6 +2405,9 @@ html.y #bbox-overlay figcaption a {
#u2conf.ww #u2c3w {
width: 29em;
}
#u2conf.ww #u2c3w.s {
width: 39em;
}
#u2conf .c,
#u2conf .c * {
text-align: center;
@@ -2475,13 +2500,34 @@ html.b #u2conf a.b:hover {
text-align: center;
border: .2em dashed rgba(128, 128, 128, 0.3);
}
#u2foot {
#u2foot,
#u2life {
color: var(--fg-max);
font-style: italic;
text-align: center;
font-size: 1.2em;
margin: .8em 0;
}
#u2life {
margin: 2.5em 0;
line-height: 1.5em;
}
#u2life div {
display: inline-block;
white-space: nowrap;
margin: 0 2em;
}
#u2life div:first-child {
margin-bottom: .2em;
}
#u2life small {
opacity: .6;
}
#lifew {
border-bottom: 1px dotted var(--fg-max);
}
#u2foot {
font-size: 1.2em;
font-style: italic;
}
#u2foot .warn {
font-size: 1.2em;
padding: .5em .8em;
@@ -2500,6 +2546,10 @@ html.b #u2conf a.b:hover {
#u2foot>*+* {
margin-top: 1.5em;
}
#u2life input {
width: 4em;
text-align: right;
}
.prog {
font-family: 'scp', monospace, monospace;
}
@@ -2720,7 +2770,6 @@ html.b #barpos,
html.b #barbuf,
html.b #pvol {
border-radius: .2em;
background: none;
}
html.b #barpos {
box-shadow: 0 0 0 1px rgba(0,0,0,0.4);
@@ -2762,6 +2811,7 @@ html.b #tree li {
html.b #tree li {
margin-left: .8em;
}
html.b #docul a,
html.b .ntree a {
padding: .6em .2em;
}
@@ -2893,3 +2943,28 @@ html.d #treepar {
margin-top: 1.7em;
}
}
@supports (display: grid) {
#ggrid {
display: grid;
margin: 0em 0.25em;
padding: unset;
grid-template-columns: repeat(auto-fit,var(--grid-sz));
justify-content: center;
gap: 1em;
}
html.b #ggrid {
padding: 0 2em 2em 0;
gap: .5em 3em;
}
#ggrid > a {
margin: unset;
padding: unset;
}
#ggrid>a>span {
text-align: center;
padding: 0.2em;
}
}

View File

@@ -146,8 +146,9 @@
have_acode = {{ have_acode|tojson }},
have_mv = {{ have_mv|tojson }},
have_del = {{ have_del|tojson }},
have_unpost = {{ have_unpost|tojson }},
have_unpost = {{ have_unpost }},
have_zip = {{ have_zip|tojson }},
lifetime = {{ lifetime }},
turbolvl = {{ turbolvl }},
u2sort = "{{ u2sort }}",
have_emp = {{ have_emp|tojson }},

File diff suppressed because it is too large Load Diff

View File

@@ -40,7 +40,7 @@ var dbg = function () { };
link += parts[a] + (a < aa ? '/' : '');
o = mknod('a');
o.setAttribute('href', link);
o.textContent = uricom_dec(parts[a])[0] || 'top';
o.textContent = uricom_dec(parts[a]) || 'top';
dom_nav.appendChild(o);
}
})();
@@ -395,7 +395,7 @@ function init_toc() {
// collect vertical position of all toc items (headers in document)
function freshen_offsets() {
var top = window.pageYOffset || document.documentElement.scrollTop;
var top = yscroll();
for (var a = anchors.length - 1; a >= 0; a--) {
var y = top + anchors[a].elm.getBoundingClientRect().top;
y = Math.round(y * 10.0) / 10;
@@ -411,7 +411,7 @@ function init_toc() {
if (anchors.length == 0)
return;
var ptop = window.pageYOffset || document.documentElement.scrollTop;
var ptop = yscroll();
var hit = anchors.length - 1;
for (var a = 0; a < anchors.length; a++) {
if (anchors[a].y >= ptop - 8) { //???

View File

@@ -15,7 +15,7 @@ var dom_md = ebi('mt');
if (a > 0)
loc.push(n[a]);
var dec = uricom_dec(n[a])[0].replace(/&/g, "&amp;").replace(/</g, "&lt;").replace(/>/g, "&gt;");
var dec = uricom_dec(n[a]).replace(/&/g, "&amp;").replace(/</g, "&lt;").replace(/>/g, "&gt;");
nav.push('<a href="/' + loc.join('/') + '">' + dec + '</a>');
}

View File

@@ -236,6 +236,10 @@ html.y #tth {
max-height: 30em;
overflow: auto;
}
#modalc td {
text-align: unset;
padding: .2em;
}
@media (min-width: 40em) {
#modalc {
min-width: 30em;

View File

@@ -267,6 +267,9 @@ function U2pvis(act, btns, uc, st) {
fo.cb[nchunk] = cbd;
fo.bd += delta;
if (!fo.bd)
return;
var p = r.perc(fo.bd, fo.bd0, fo.bt, fobj.t_uploading);
fo.hp = f2f(p[0], 2) + '%, ' + p[1] + ', ' + f2f(p[2], 2) + ' MB/s';
@@ -612,6 +615,9 @@ function Donut(uc, st) {
var r = this,
el = null,
psvg = null,
tenstrobe = null,
tstrober = null,
strobes = [],
o = 20 * 2 * Math.PI,
optab = QS('#ops a[data-dest="up2k"]');
@@ -661,11 +667,15 @@ function Donut(uc, st) {
r.base = pos();
optab.innerHTML = ya ? svg() : optab.getAttribute('ico');
el = QS('#ops a .donut');
clearTimeout(tenstrobe);
if (!ya) {
favico.upd();
wintitle();
if (document.visibilityState == 'hidden')
tenstrobe = setTimeout(enstrobe, 500); //debounce
}
};
r.do = function () {
if (!el)
return;
@@ -698,6 +708,54 @@ function Donut(uc, st) {
r.fc = 0;
}
};
function enstrobe() {
strobes = ['████████████████', '________________', '████████████████'];
tstrober = setInterval(strobe, 300);
if (uc.upsfx && actx && actx.state != 'suspended')
sfx();
// firefox may forget that filedrops are user-gestures so it can skip this:
if (uc.upnag && window.Notification && Notification.permission == 'granted')
new Notification(uc.nagtxt);
}
function strobe() {
var txt = strobes.pop();
wintitle(txt);
if (!txt)
clearInterval(tstrober);
}
function sfx() {
var osc = actx.createOscillator(),
gain = actx.createGain(),
gg = gain.gain,
ft = [660, 880, 440, 660, 880],
ofs = 0;
osc.connect(gain);
gain.connect(actx.destination);
var ct = actx.currentTime + 0.03;
osc.type = 'triangle';
while (ft.length)
osc.frequency.setTargetAtTime(
ft.shift(), ct + (ofs += 0.05), 0.001);
gg.value = 0.15;
gg.setTargetAtTime(0.8, ct, 0.01);
gg.setTargetAtTime(0.3, ct + 0.13, 0.01);
gg.setTargetAtTime(0, ct + ofs + 0.05, 0.02);
osc.start();
setTimeout(function () {
osc.stop();
osc.disconnect();
gain.disconnect();
}, 500);
}
}
@@ -719,12 +777,10 @@ function up2k_init(subtle) {
"gotallfiles": [gotallfiles] // hooks
};
if (window.WebAssembly) {
for (var a = 0; a < Math.min(navigator.hardwareConcurrency || 4, 16); a++)
hws.push(new Worker('/.cpr/w.hash.js'));
console.log(hws.length + " hashers ready");
}
setTimeout(function () {
if (window.WebAssembly && !hws.length)
fetch('/.cpr/w.hash.js' + CB);
}, 1000);
function showmodal(msg) {
ebi('u2notbtn').innerHTML = msg;
@@ -803,12 +859,15 @@ function up2k_init(subtle) {
bcfg_bind(uc, 'multitask', 'multitask', true, null, false);
bcfg_bind(uc, 'potato', 'potato', false, set_potato, false);
bcfg_bind(uc, 'ask_up', 'ask_up', true, null, false);
bcfg_bind(uc, 'flag_en', 'flag_en', false, apply_flag_cfg);
bcfg_bind(uc, 'fsearch', 'fsearch', false, set_fsearch, false);
bcfg_bind(uc, 'turbo', 'u2turbo', turbolvl > 1, draw_turbo, false);
bcfg_bind(uc, 'datechk', 'u2tdate', turbolvl < 3, null, false);
bcfg_bind(uc, 'az', 'u2sort', u2sort.indexOf('n') + 1, set_u2sort, false);
bcfg_bind(uc, 'hashw', 'hashw', !!window.WebAssembly && (!HTTPS || !CHROME || MOBILE), set_hashw, false);
bcfg_bind(uc, 'flag_en', 'flag_en', false, apply_flag_cfg);
bcfg_bind(uc, 'turbo', 'u2turbo', turbolvl > 1, draw_turbo);
bcfg_bind(uc, 'datechk', 'u2tdate', turbolvl < 3, null);
bcfg_bind(uc, 'az', 'u2sort', u2sort.indexOf('n') + 1, set_u2sort);
bcfg_bind(uc, 'hashw', 'hashw', !!window.WebAssembly && (!subtle || !CHROME || MOBILE), set_hashw);
bcfg_bind(uc, 'upnag', 'upnag', false, set_upnag);
bcfg_bind(uc, 'upsfx', 'upsfx', false);
var st = {
"files": [],
@@ -869,11 +928,7 @@ function up2k_init(subtle) {
r.st = st;
r.uc = uc;
var bobslice = null;
if (window.File)
bobslice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice;
if (!bobslice || !window.FileReader || !window.FileList)
if (!window.File || !File.prototype.slice || !window.FileReader || !window.FileList)
return un2k(L.u_ever);
var flag = false;
@@ -881,7 +936,17 @@ function up2k_init(subtle) {
set_fsearch();
function nav() {
ebi('file' + fdom_ctr).click();
start_actx();
// too buggy on chrome <= 72
var m = / Chrome\/([0-9]+)\./.exec(navigator.userAgent);
if (m && parseInt(m[1]) < 73)
return ebi('file' + fdom_ctr).click();
modal.confirm(L.u_nav_m,
function () { ebi('file' + fdom_ctr).click(); },
function () { ebi('dir' + fdom_ctr).click(); },
null, L.u_nav_b);
}
ebi('u2btn').onclick = nav;
@@ -974,6 +1039,36 @@ function up2k_init(subtle) {
}
ebi('drops').onclick = offdrag; // old ff
function gotdir(e) {
ev(e);
var good_files = [],
nil_files = [],
bad_files = [];
for (var a = 0, aa = e.target.files.length; a < aa; a++) {
var fobj = e.target.files[a],
name = fobj.webkitRelativePath,
dst = good_files;
try {
if (!name)
throw 1;
if (fobj.size < 1)
dst = nil_files;
}
catch (ex) {
dst = bad_files;
}
dst.push([fobj, name]);
}
if (!good_files.length && bad_files.length)
return toast.err(30, "that's not a folder!\n\nyour browser is too old,\nplease try dragdrop instead");
return read_dirs(null, [], [], good_files, nil_files, bad_files);
}
function gotfile(e) {
ev(e);
nenters = 0;
@@ -1043,9 +1138,8 @@ function up2k_init(subtle) {
}
dst.push([fobj, fobj.name]);
}
if (dirs) {
return read_dirs(null, [], dirs, good_files, nil_files, bad_files);
}
start_actx(); // good enough for chrome; not firefox
return read_dirs(null, [], dirs, good_files, nil_files, bad_files);
}
function rd_flatten(pf, dirs) {
@@ -1150,6 +1244,7 @@ function up2k_init(subtle) {
msg += L.u_just1;
return modal.alert(msg, function () {
start_actx();
gotallfiles(good_files, nil_files, []);
});
}
@@ -1161,8 +1256,10 @@ function up2k_init(subtle) {
msg += L.u_just1;
return modal.confirm(msg, function () {
start_actx();
gotallfiles(good_files.concat(nil_files), [], []);
}, function () {
start_actx();
gotallfiles(good_files, [], []);
});
}
@@ -1173,23 +1270,40 @@ function up2k_init(subtle) {
return a < b ? -1 : a > b ? 1 : 0;
});
var msg = [L.u_asku.format(good_files.length, esc(get_vpath())) + '<ul>'];
var msg = [];
if (lifetime)
msg.push('<b>' + L.u_up_life.format(lhumantime(st.lifetime || lifetime)) + '</b>\n\n');
if (FIREFOX && good_files.length > 3000)
msg.push(L.u_ff_many + "\n\n");
msg.push(L.u_asku.format(good_files.length, esc(get_vpath())) + '<ul>');
for (var a = 0, aa = Math.min(20, good_files.length); a < aa; a++)
msg.push('<li>' + esc(good_files[a][1]) + '</li>');
if (uc.ask_up && !uc.fsearch)
return modal.confirm(msg.join('') + '</ul>', function () {
start_actx();
up_them(good_files);
toast.inf(15, L.u_unpt);
toast.inf(15, L.u_unpt, L.u_unpt);
}, null);
up_them(good_files);
}
function up_them(good_files) {
start_actx();
var evpath = get_evpath(),
draw_each = good_files.length < 50;
if (window.WebAssembly && !hws.length) {
for (var a = 0; a < Math.min(navigator.hardwareConcurrency || 4, 16); a++)
hws.push(new Worker('/.cpr/w.hash.js' + CB));
console.log(hws.length + " hashers");
}
if (!uc.az)
good_files.sort(function (a, b) {
a = a[0].size;
@@ -1249,9 +1363,7 @@ function up2k_init(subtle) {
st.bytes.total += entry.size;
st.files.push(entry);
if (!entry.size)
push_t(st.todo.handshake, entry);
else if (uc.turbo)
if (uc.turbo)
push_t(st.todo.head, entry);
else
push_t(st.todo.hash, entry);
@@ -1261,14 +1373,25 @@ function up2k_init(subtle) {
pvis.changecard(pvis.act);
}
ebi('u2tabw').className = 'ye';
setTimeout(function () {
if (!actx || actx.state != 'suspended' || toast.tag == L.u_unpt)
return;
toast.warn(30, "<div onclick=\"start_actx();toast.inf(3,'thanks!')\">please click this text to<br />unlock full upload speed</div>");
}, 500);
}
function more_one_file() {
fdom_ctr++;
var elm = mknod('div');
elm.innerHTML = '<input id="file{0}" type="file" name="file{0}[]" multiple="multiple" tabindex="-1" />'.format(fdom_ctr);
elm.innerHTML = (
'<input id="file{0}" type="file" name="file{0}[]" multiple="multiple" tabindex="-1" />' +
'<input id="dir{0}" type="file" name="dir{0}[]" multiple="multiple" tabindex="-1" webkitdirectory />'
).format(fdom_ctr);
ebi('u2form').appendChild(elm);
ebi('file' + fdom_ctr).onchange = gotfile;
ebi('dir' + fdom_ctr).onchange = gotdir;
}
more_one_file();
@@ -1372,7 +1495,7 @@ function up2k_init(subtle) {
st.oserr = true;
var msg = HTTPS ? L.u_emtleak3 : L.u_emtleak2.format((window.location + '').replace(':', 's:'));
modal.alert(L.u_emtleak1 + msg + L.u_emtleak4);
modal.alert(L.u_emtleak1 + msg + L.u_emtleak4 + (CHROME ? L.u_emtleakc : FIREFOX ? L.u_emtleakf : ''));
}
/////
@@ -1479,6 +1602,7 @@ function up2k_init(subtle) {
if (!is_busy) {
uptoast();
//throw console.hist.join('\n');
}
else {
timer.add(donut.do);
@@ -1598,15 +1722,15 @@ function up2k_init(subtle) {
console.log('toast', ok, ng);
if (ok && ng)
toast.warn(t, (sr ? L.ur_sm : L.ur_um).format(ok, ng));
toast.warn(t, uc.nagtxt = (sr ? L.ur_sm : L.ur_um).format(ok, ng));
else if (ok > 1)
toast.ok(t, (sr ? L.ur_aso : L.ur_auo).format(ok));
toast.ok(t, uc.nagtxt = (sr ? L.ur_aso : L.ur_auo).format(ok));
else if (ok)
toast.ok(t, sr ? L.ur_1so : L.ur_1uo);
toast.ok(t, uc.nagtxt = sr ? L.ur_1so : L.ur_1uo);
else if (ng > 1)
toast.err(t, (sr ? L.ur_asn : L.ur_aun).format(ng));
toast.err(t, uc.nagtxt = (sr ? L.ur_asn : L.ur_aun).format(ng));
else if (ng)
toast.err(t, sr ? L.ur_1sn : L.ur_1un);
toast.err(t, uc.nagtxt = sr ? L.ur_1sn : L.ur_1un);
timer.rm(etafun);
timer.rm(donut.do);
@@ -1687,6 +1811,9 @@ function up2k_init(subtle) {
function exec_hash() {
var t = st.todo.hash.shift();
if (!t.size)
return st.todo.handshake.push(t);
st.busy.hash.push(t);
st.nfile.hash = t.n;
t.t_hashing = Date.now();
@@ -1700,7 +1827,8 @@ function up2k_init(subtle) {
pvis.setab(t.n, nchunks);
pvis.move(t.n, 'bz');
if (nchunks > 1 && hws.length && uc.hashw)
if (hws.length && uc.hashw && (nchunks > 1 || document.visibilityState == 'hidden'))
// resolving subtle.digest w/o worker takes 1sec on blur if the actx hack breaks
return wexec_hash(t, chunksize, nchunks);
var segm_next = function () {
@@ -1746,8 +1874,7 @@ function up2k_init(subtle) {
toast.err(0, 'y o u b r o k e i t\nfile: ' + esc(t.name + '') + '\nerror: ' + err);
};
bpend++;
reader.readAsArrayBuffer(
bobslice.call(t.fobj, car, cdr));
reader.readAsArrayBuffer(t.fobj.slice(car, cdr));
return true;
};
@@ -1921,7 +2048,10 @@ function up2k_init(subtle) {
var xhr = new XMLHttpRequest();
xhr.onerror = function () {
console.log('head onerror, retrying', t);
console.log('head onerror, retrying', t.name, t);
if (!toast.visible)
toast.warn(9.98, L.u_enethd + "\n\nfile: " + t.name, t);
apop(st.busy.head, t);
st.todo.head.unshift(t);
};
@@ -1977,22 +2107,25 @@ function up2k_init(subtle) {
t.t_busied = me;
if (keepalive)
console.log("sending keepalive handshake", t);
console.log("sending keepalive handshake", t.name, t);
var xhr = new XMLHttpRequest();
xhr.onerror = function () {
if (t.t_busied != me) {
console.log('zombie handshake onerror,', t);
console.log('zombie handshake onerror,', t.name, t);
return;
}
console.log('handshake onerror, retrying', t);
if (!toast.visible)
toast.warn(9.98, L.u_eneths + "\n\nfile: " + t.name, t);
console.log('handshake onerror, retrying', t.name, t);
apop(st.busy.handshake, t);
st.todo.handshake.unshift(t);
t.keepalive = keepalive;
};
function orz(e) {
if (t.t_busied != me) {
console.log('zombie handshake onload,', t);
console.log('zombie handshake onload,', t.name, t);
return;
}
if (xhr.status == 200) {
@@ -2002,6 +2135,9 @@ function up2k_init(subtle) {
return;
}
if (toast.tag === t)
toast.ok(5, L.u_fixed);
var response = JSON.parse(xhr.responseText);
if (!response.name) {
var msg = '',
@@ -2087,7 +2223,7 @@ function up2k_init(subtle) {
'npart': t.postlist[a]
});
msg = L.u_upping;
msg = null;
done = false;
if (sort)
@@ -2097,7 +2233,10 @@ function up2k_init(subtle) {
a.npart < b.npart ? -1 : 1;
});
}
pvis.seth(t.n, 1, msg);
if (msg)
pvis.seth(t.n, 1, msg);
apop(st.busy.handshake, t);
if (done) {
@@ -2108,7 +2247,7 @@ function up2k_init(subtle) {
spd2 = (t.size / ((t.t_uploaded - t.t_uploading) / 1000.)) / (1024 * 1024.);
pvis.seth(t.n, 2, 'hash {0}, up {1} MB/s'.format(
f2f(spd1, 2), isNaN(spd2) ? '--' : f2f(spd2, 2)));
f2f(spd1, 2), !isNum(spd2) ? '--' : f2f(spd2, 2)));
pvis.move(t.n, 'ok');
if (!pvis.ctr.bz && !pvis.ctr.q)
@@ -2124,7 +2263,7 @@ function up2k_init(subtle) {
}
else {
pvis.seth(t.n, 1, "ERROR");
pvis.seth(t.n, 2, L.u_ehstmp);
pvis.seth(t.n, 2, L.u_ehstmp, t);
var err = "",
rsp = (xhr.responseText + ''),
@@ -2145,7 +2284,6 @@ function up2k_init(subtle) {
return;
}
st.bytes.finished += t.size;
var err_pend = rsp.indexOf('partial upload exists') + 1,
err_dupe = rsp.indexOf('file already exists') + 1;
@@ -2164,6 +2302,9 @@ function up2k_init(subtle) {
return toast.err(0, L.u_ehsdf + "\n\n" + rsp.replace(/.*; /, ''));
if (err != "") {
if (!t.t_uploading)
st.bytes.finished += t.size;
pvis.seth(t.n, 1, "ERROR");
pvis.seth(t.n, 2, err);
pvis.move(t.n, 'ng');
@@ -2173,7 +2314,7 @@ function up2k_init(subtle) {
return;
}
err = t.t_uploading ? L.u_ehsfin : t.srch ? L.u_ehssrch : L.u_ehsinit;
xhrchk(xhr, err + ";\n\nfile: " + t.name + "\n\nerror ", "404, target folder not found");
xhrchk(xhr, err + "\n\nfile: " + t.name + "\n\nerror ", "404, target folder not found", "warn", t);
}
}
xhr.onload = function (e) {
@@ -2184,6 +2325,7 @@ function up2k_init(subtle) {
"name": t.name,
"size": t.size,
"lmod": t.lmod,
"life": st.lifetime,
"hash": t.hash
};
if (t.srch)
@@ -2201,7 +2343,14 @@ function up2k_init(subtle) {
function can_upload_next() {
var upt = st.todo.upload[0],
upf = st.files[upt.nfile];
upf = st.files[upt.nfile],
now = Date.now();
for (var a = 0, aa = st.busy.handshake.length; a < aa; a++) {
var hs = st.busy.handshake[a];
if (hs.n < upt.nfile && hs.t_busied > now - 10 * 1000 && !st.files[hs.n].bytes_uploaded)
return false; // handshake race; wait for lexically first
}
if (upf.sprs)
return true;
@@ -2244,10 +2393,10 @@ function up2k_init(subtle) {
}
else if (txt.indexOf('already got that') + 1 ||
txt.indexOf('already being written') + 1) {
console.log("ignoring dupe-segment error", t);
console.log("ignoring dupe-segment error", t.name, t);
}
else {
xhrchk(xhr, L.u_cuerr2.format(npart, Math.ceil(t.size / chunksize), t.name), "404, target folder not found (???)");
xhrchk(xhr, L.u_cuerr2.format(npart, Math.ceil(t.size / chunksize), t.name), "404, target folder not found (???)", "warn", t);
chill(t);
}
@@ -2279,9 +2428,9 @@ function up2k_init(subtle) {
return;
if (!toast.visible)
toast.warn(9.98, L.u_cuerr.format(npart, Math.ceil(t.size / chunksize), t.name));
toast.warn(9.98, L.u_cuerr.format(npart, Math.ceil(t.size / chunksize), t.name), t);
console.log('chunkpit onerror,', ++tries, t);
console.log('chunkpit onerror,', ++tries, t.name, t);
orz2(xhr);
};
xhr.open('POST', t.purl, true);
@@ -2295,7 +2444,7 @@ function up2k_init(subtle) {
xhr.overrideMimeType('Content-Type', 'application/octet-stream');
xhr.responseType = 'text';
xhr.send(bobslice.call(t.fobj, car, cdr));
xhr.send(t.fobj.slice(car, cdr));
}
do_send();
}
@@ -2311,9 +2460,8 @@ function up2k_init(subtle) {
wpx = window.innerWidth,
fpx = parseInt(getComputedStyle(bar)['font-size']),
wem = wpx * 1.0 / fpx,
write = has(perms, 'write'),
wide = write && wem > 54 ? 'w' : '',
parent = ebi(wide && write ? 'u2btn_cw' : 'u2btn_ct'),
wide = wem > 54 ? 'w' : '',
parent = ebi(wide ? 'u2btn_cw' : 'u2btn_ct'),
btn = ebi('u2btn');
if (btn.parentNode !== parent) {
@@ -2321,8 +2469,8 @@ function up2k_init(subtle) {
ebi('u2conf').className = ebi('u2cards').className = ebi('u2etaw').className = wide;
}
wide = write && wem > 82 ? 'ww' : wide;
parent = ebi(wide == 'ww' && write ? 'u2c3w' : 'u2c3t');
wide = wem > 82 ? 'ww' : wide;
parent = ebi(wide == 'ww' ? 'u2c3w' : 'u2c3t');
var its = [ebi('u2etaw'), ebi('u2cards')];
if (its[0].parentNode !== parent) {
ebi('u2conf').className = wide;
@@ -2411,19 +2559,83 @@ function up2k_init(subtle) {
}
draw_turbo();
function draw_life() {
var el = ebi('u2life');
if (!lifetime) {
el.style.display = 'none';
el.innerHTML = '';
st.lifetime = 0;
return;
}
el.style.display = uc.fsearch ? 'none' : '';
el.innerHTML = '<div>' + L.u_life_cfg + '</div><div>' + L.u_life_est + '</div><div id="undor"></div>';
set_life(Math.min(lifetime, icfg_get('lifetime', lifetime)));
ebi('lifem').oninput = ebi('lifeh').oninput = mod_life;
tt.att(ebi('u2life'));
}
draw_life();
function mod_life(e) {
var el = e.target,
pow = parseInt(el.getAttribute('p')),
v = parseInt(el.value);
if (!isNum(v))
return;
if (toast.tag == mod_life)
toast.hide();
v *= pow;
if (v > lifetime) {
v = lifetime;
toast.warn(20, L.u_life_max.format(lhumantime(lifetime)), mod_life);
}
swrite('lifetime', v);
set_life(v);
}
function set_life(v) {
//ebi('lifes').value = v;
ebi('lifem').value = parseInt(v / 60);
ebi('lifeh').value = parseInt(v / 3600);
var undo = have_unpost - (v || lifetime);
ebi('undor').innerHTML = undo <= 0 ?
L.u_unp_ng : L.u_unp_ok.format(lhumantime(undo));
st.lifetime = v;
rel_life();
}
function rel_life() {
if (!lifetime)
return;
try {
ebi('lifew').innerHTML = unix2iso((st.lifetime || lifetime) +
Date.now() / 1000 - new Date().getTimezoneOffset() * 60
).replace(' ', ', ').slice(0, -3);
}
catch (ex) { }
}
setInterval(rel_life, 9000);
function set_potato() {
pvis.potato();
set_fsearch();
}
function set_fsearch(new_state) {
var fixed = false;
var fixed = false,
can_write = false;
if (!ebi('fsearch')) {
new_state = false;
}
else if (perms.length) {
if (!has(perms, 'write')) {
if (!(can_write = has(perms, 'write'))) {
new_state = true;
fixed = true;
}
@@ -2433,12 +2645,11 @@ function up2k_init(subtle) {
}
}
if (new_state !== undefined) {
uc.fsearch = new_state;
bcfg_set('fsearch', uc.fsearch);
}
if (new_state !== undefined)
bcfg_set('fsearch', uc.fsearch = new_state);
try {
clmod(ebi('u2c3w'), 's', !can_write);
QS('label[for="fsearch"]').style.display = QS('#fsearch').style.display = fixed ? 'none' : '';
}
catch (ex) { }
@@ -2459,6 +2670,7 @@ function up2k_init(subtle) {
ebi('u2mu').style.display = potato ? '' : 'none';
draw_turbo();
draw_life();
onresize();
}
@@ -2469,7 +2681,7 @@ function up2k_init(subtle) {
}
catch (ex) {
toast.err(5, "not supported on your browser:\n" + esc(basenames(ex)));
bcfg_set('flag_en', false);
bcfg_set('flag_en', uc.flag_en = false);
}
}
else if (!uc.flag_en && flag) {
@@ -2491,11 +2703,32 @@ function up2k_init(subtle) {
function set_hashw() {
if (!window.WebAssembly) {
bcfg_set('hashw', false);
bcfg_set('hashw', uc.hashw = false);
toast.err(10, L.u_nowork);
}
}
function set_upnag(en) {
function nopenag() {
bcfg_set('upnag', uc.upnag = false);
toast.err(10, "https only");
}
function chknag() {
if (Notification.permission != 'granted')
nopenag();
}
if (!window.Notification || !HTTPS)
return nopenag();
if (en && Notification.permission == 'default')
Notification.requestPermission().then(chknag, chknag);
}
if (uc.upnag && (!window.Notification || Notification.permission != 'granted'))
bcfg_set('upnag', uc.upnag = false);
ebi('nthread_add').onclick = function (e) {
ev(e);
bumpthread(1);

View File

@@ -7,16 +7,19 @@ if (!window['console'])
var wah = '',
CB = '?_=' + Date.now(),
HALFMAX = 8192 * 8192 * 8192 * 8192,
HTTPS = (window.location + '').indexOf('https:') === 0,
TOUCH = 'ontouchstart' in window,
MOBILE = TOUCH,
CHROME = !!window.chrome,
FIREFOX = ('netscape' in window) && / rv:/.test(navigator.userAgent),
IPHONE = TOUCH && /iPhone|iPad|iPod/i.test(navigator.userAgent),
WINDOWS = navigator.platform ? navigator.platform == 'Win32' : /Windows/.test(navigator.userAgent);
try {
CB = '?' + document.currentScript.src.split('?').pop();
if (navigator.userAgentData.mobile)
MOBILE = true;
@@ -108,7 +111,7 @@ catch (ex) {
console.log = console.stdlog;
console.log('console capture failed', ex);
}
var crashed = false, ignexd = {};
var crashed = false, ignexd = {}, evalex_fatal = false;
function vis_exh(msg, url, lineNo, columnNo, error) {
if ((msg + '').indexOf('ResizeObserver') + 1)
return; // chrome issue 809574 (benign, from <video>)
@@ -119,7 +122,7 @@ function vis_exh(msg, url, lineNo, columnNo, error) {
if (!/\.js($|\?)/.exec('' + url))
return; // chrome debugger
if ((url + '').indexOf(' > eval') + 1)
if ((url + '').indexOf(' > eval') + 1 && !evalex_fatal)
return; // md timer
var ekey = url + '\n' + lineNo + '\n' + msg;
@@ -132,7 +135,7 @@ function vis_exh(msg, url, lineNo, columnNo, error) {
'<h1>you hit a bug!</h1>',
'<p style="font-size:1.3em;margin:0">try to <a href="#" onclick="localStorage.clear();location.reload();">reset copyparty settings</a> if you are stuck here, or <a href="#" onclick="ignex();">ignore this</a> / <a href="#" onclick="ignex(true);">ignore all</a></p>',
'<p style="color:#fff">please send me a screenshot arigathanks gozaimuch: <a href="<ghi>" target="_blank">github issue</a> or <code>ed#2644</code></p>',
'<p class="b">' + esc(url + ' @' + lineNo + ':' + columnNo), '<br />' + esc(String(msg)) + '</p>',
'<p class="b">' + esc(url + ' @' + lineNo + ':' + columnNo), '<br />' + esc(String(msg)).replace(/\n/g, '<br />') + '</p>',
'<p><b>UA:</b> ' + esc(navigator.userAgent + '')
];
@@ -232,7 +235,7 @@ function ev(e) {
return;
if (e.preventDefault)
e.preventDefault()
e.preventDefault();
if (e.stopPropagation)
e.stopPropagation();
@@ -387,6 +390,21 @@ if (window.matchMedia) {
}
function yscroll() {
if (document.documentElement.scrollTop) {
return (window.yscroll = function () {
return document.documentElement.scrollTop;
})();
}
if (window.pageYOffset) {
return (window.yscroll = function () {
return window.pageYOffset;
})();
}
return 0;
}
function showsort(tab) {
var v, vn, v1, v2, th = tab.tHead,
sopts = jread('fsort', [["href", 1, ""]]);
@@ -526,7 +544,7 @@ function linksplit(rp, id) {
link = rp.slice(0, ofs + 1);
rp = rp.slice(ofs + 1);
}
var vlink = esc(uricom_dec(link)[0]);
var vlink = esc(uricom_dec(link));
if (link.indexOf('/') !== -1) {
vlink = vlink.slice(0, -1) + '<span>/</span>';
@@ -584,6 +602,17 @@ function url_enc(txt) {
function uricom_dec(txt) {
try {
return decodeURIComponent(txt);
}
catch (ex) {
console.log("ucd-err [" + txt + "]");
return txt;
}
}
function uricom_sdec(txt) {
try {
return [decodeURIComponent(txt), true];
}
@@ -597,7 +626,7 @@ function uricom_dec(txt) {
function uricom_adec(arr, li) {
var ret = [];
for (var a = 0; a < arr.length; a++) {
var txt = uricom_dec(arr[a])[0];
var txt = uricom_dec(arr[a]);
ret.push(li ? '<li>' + esc(txt) + '</li>' : txt);
}
@@ -619,7 +648,7 @@ function get_evpath() {
function get_vpath() {
return uricom_dec(get_evpath())[0];
return uricom_dec(get_evpath());
}
@@ -649,6 +678,14 @@ function s2ms(s) {
}
var isNum = function (v) {
var n = parseFloat(v);
return !isNaN(v - n) && n === v;
};
if (window.Number && Number.isFinite)
isNum = Number.isFinite;
function f2f(val, nd) {
// 10.toFixed(1) returns 10.00 for certain values of 10
val = (val * Math.pow(10, nd)).toFixed(0).split('.')[0];
@@ -657,12 +694,13 @@ function f2f(val, nd) {
function humansize(b, terse) {
var i = 0, u = terse ? ['B', 'K', 'M', 'G'] : ['B', 'KB', 'MB', 'GB'];
while (b >= 1000 && i < u.length) {
var i = 0, u = ['B', 'KB', 'MB', 'GB', 'TB', 'PB'];
while (b >= 1000 && i < u.length - 1) {
b /= 1024;
i += 1;
}
return f2f(b, b >= 100 ? 0 : b >= 10 ? 1 : 2) + ' ' + u[i];
return (f2f(b, b >= 100 ? 0 : b >= 10 ? 1 : 2) +
' ' + (terse ? u[i].charAt(0) : u[i]));
}
@@ -679,7 +717,7 @@ function humantime(v) {
}
function shumantime(v) {
function shumantime(v, long) {
if (v < 10)
return f2f(v, 2) + 's';
if (v < 60)
@@ -699,11 +737,27 @@ function shumantime(v) {
var v1 = parseInt(v / m1),
v2 = ('0' + parseInt((v % m1) / m2)).slice(-2);
return v1 + ch + (v1 >= 10 ? '' : v2);
return v1 + ch + (v1 >= 10 || v2 == '00' ? '' : v2 + (
long && a < st.length - 1 ? st[a + 1][2] : ''));
}
}
function lhumantime(v) {
var t = shumantime(v, 1),
tp = t.replace(/([a-z])/g, " $1 ").split(/ /g).slice(0, -1);
if (!window.L || tp.length < 2 || tp[1].indexOf('$') + 1)
return t;
var ret = '';
for (var a = 0; a < tp.length; a += 2)
ret += tp[a] + ' ' + L['ht_' + tp[a + 1]].replace(tp[a] == 1 ? /!.*/ : /!/, '') + L.ht_and;
return ret.slice(0, -L.ht_and.length);
}
function clamp(v, a, b) {
return Math.min(Math.max(v, a), b);
}
@@ -774,7 +828,7 @@ function fcfg_get(name, defval) {
var o = ebi(name),
val = parseFloat(sread(name));
if (isNaN(val))
if (!isNum(val))
return parseFloat(o ? o.value : defval);
if (o)
@@ -865,14 +919,18 @@ function scfg_bind(obj, oname, cname, defval, cb) {
function hist_push(url) {
console.log("h-push " + url);
if (window.history && history.pushState)
try {
history.pushState(url, url, url);
}
catch (ex) { }
}
function hist_replace(url) {
console.log("h-repl " + url);
if (window.history && history.replaceState)
try {
history.replaceState(url, url, url);
}
catch (ex) { } // ff "The operation is insecure." on rapid switches
}
function sethash(hv) {
@@ -951,7 +1009,7 @@ var tt = (function () {
prev = this;
};
var tev;
var tev, vh;
r.dshow = function (e) {
clearTimeout(tev);
if (!r.getmsg(this))
@@ -964,6 +1022,7 @@ var tt = (function () {
if (TOUCH)
return;
vh = window.innerHeight;
this.addEventListener('mousemove', r.move);
clmod(r.th, 'act', 1);
r.move(e);
@@ -1025,7 +1084,7 @@ var tt = (function () {
};
r.hide = function (e) {
ev(e);
//ev(e); // eats checkbox-label clicks
clearTimeout(tev);
window.removeEventListener('scroll', r.hide);
@@ -1042,8 +1101,9 @@ var tt = (function () {
};
r.move = function (e) {
var sy = e.clientY + 128 > vh ? -1 : 1;
r.th.style.left = (e.pageX + 12) + 'px';
r.th.style.top = (e.pageY + 12) + 'px';
r.th.style.top = (e.pageY + 12 * sy) + 'px';
};
if (IPHONE) {
@@ -1113,6 +1173,7 @@ var toast = (function () {
document.body.appendChild(obj);
r.visible = false;
r.txt = null;
r.tag = obj; // filler value (null is scary)
function scrollchk() {
if (scrolling)
@@ -1141,9 +1202,10 @@ var toast = (function () {
clearTimeout(te);
clmod(obj, 'vis');
r.visible = false;
r.tag = obj;
};
r.show = function (cl, sec, txt) {
r.show = function (cl, sec, txt, tag) {
clearTimeout(te);
if (sec)
te = setTimeout(r.hide, sec * 1000);
@@ -1159,19 +1221,20 @@ var toast = (function () {
timer.add(scrollchk);
r.visible = true;
r.txt = txt;
r.tag = tag;
};
r.ok = function (sec, txt) {
r.show('ok', sec, txt);
r.ok = function (sec, txt, tag) {
r.show('ok', sec, txt, tag);
};
r.inf = function (sec, txt) {
r.show('inf', sec, txt);
r.inf = function (sec, txt, tag) {
r.show('inf', sec, txt, tag);
};
r.warn = function (sec, txt) {
r.show('warn', sec, txt);
r.warn = function (sec, txt, tag) {
r.show('warn', sec, txt, tag);
};
r.err = function (sec, txt) {
r.show('err', sec, txt);
r.err = function (sec, txt, tag) {
r.show('err', sec, txt, tag);
};
return r;
@@ -1185,9 +1248,16 @@ var modal = (function () {
cb_up = null,
cb_ok = null,
cb_ng = null,
prim = '<a href="#" id="modal-ok">OK</a>',
sec = '<a href="#" id="modal-ng">Cancel</a>',
tok, tng, prim, sec, ok_cancel;
r.load = function () {
tok = (window.L && L.m_ok) || 'OK';
tng = (window.L && L.m_ng) || 'Cancel';
prim = '<a href="#" id="modal-ok">' + tok + '</a>';
sec = '<a href="#" id="modal-ng">' + tng + '</a>';
ok_cancel = WINDOWS ? prim + sec : sec + prim;
};
r.load();
r.busy = false;
@@ -1294,17 +1364,17 @@ var modal = (function () {
r.show(html);
}
r.confirm = function (html, cok, cng, fun) {
r.confirm = function (html, cok, cng, fun, btns) {
q.push(function () {
_confirm(lf2br(html), cok, cng, fun);
_confirm(lf2br(html), cok, cng, fun, btns);
});
next();
}
function _confirm(html, cok, cng, fun) {
function _confirm(html, cok, cng, fun, btns) {
cb_ok = cok;
cb_ng = cng === undefined ? cok : cng;
cb_up = fun;
html += '<div id="modalb">' + ok_cancel + '</div>';
html += '<div id="modalb">' + (btns || ok_cancel) + '</div>';
r.show(html);
}
@@ -1406,8 +1476,10 @@ function repl(e) {
if (!cmd)
return toast.inf(3, 'eval aborted');
if (cmd.startsWith(','))
return modal.alert(esc(eval(cmd.slice(1)) + ''))
if (cmd.startsWith(',')) {
evalex_fatal = true;
return modal.alert(esc(eval(cmd.slice(1)) + ''));
}
try {
modal.alert(esc(eval(cmd) + ''));
@@ -1535,18 +1607,18 @@ var favico = (function () {
var cf_cha_t = 0;
function xhrchk(xhr, prefix, e404) {
function xhrchk(xhr, prefix, e404, lvl, tag) {
if (xhr.status < 400 && xhr.status >= 200)
return true;
if (xhr.status == 403)
return toast.err(0, prefix + (window.L && L.xhr403 || "403: access denied\n\ntry pressing F5, maybe you got logged out"));
return toast.err(0, prefix + (window.L && L.xhr403 || "403: access denied\n\ntry pressing F5, maybe you got logged out"), tag);
if (xhr.status == 404)
return toast.err(0, prefix + e404);
return toast.err(0, prefix + e404, tag);
var errtxt = (xhr.response && xhr.response.err) || xhr.responseText,
fun = toast.err;
fun = toast[lvl || 'err'];
if (xhr.status == 503 && /[Cc]loud[f]lare|>Just a mo[m]ent|#cf-b[u]bbles|Chec[k]ing your br[o]wser/.test(errtxt)) {
var now = Date.now(), td = now - cf_cha_t;
@@ -1563,5 +1635,5 @@ function xhrchk(xhr, prefix, e404) {
document.body.appendChild(fr);
}
return fun(0, prefix + xhr.status + ": " + errtxt);
return fun(0, prefix + xhr.status + ": " + errtxt, tag);
}

View File

@@ -51,6 +51,7 @@ onmessage = (d) => {
hash_calc(gc1);
}
catch (ex) {
busy = false;
postMessage(["panic", ex + '']);
}
};
@@ -67,8 +68,7 @@ onmessage = (d) => {
};
//console.log('[ w] %d read bgin', nchunk);
busy = true;
reader.readAsArrayBuffer(
File.prototype.slice.call(fobj, car, cdr));
reader.readAsArrayBuffer(fobj.slice(car, cdr));
var hash_calc = function (buf) {

View File

@@ -1,3 +1,143 @@
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
# 2022-0925-1236 `v1.4.2` fuhgeddaboudit
* read-only demo server at https://a.ocv.me/pub/demo/
* latest gzip edition of the sfx: [v1.0.14](https://github.com/9001/copyparty/releases/tag/v1.0.14#:~:text=release-specific%20notes)
## new features
* forget incoming uploads by deleting the name-reservation
* (the zerobyte file with the actual filename, not the .PARTIAL)
* can take 5min to kick in
## bugfixes
* zfs on ubuntu 20.04 would reject files with big unicode names such as `148. Профессор Лебединский, Виктор Бондарюк, Дмитрий Нагиев - Я её хой (Я танцую пьяный на столе) (feat. Виктор Бондарюк & Дмитрий Нагиев).mp3`
* usually not a problem since copyparty truncates names to fit filesystem limits, except zfs uses a nonstandard errorcode
* in the "print-message-to-serverlog" feature, a unicode message larger than one tcp-frame could decode incorrectly
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
# 2022-0924-1245 `v1.4.1` fix api compat
* read-only demo server at https://a.ocv.me/pub/demo/
* latest gzip edition of the sfx: [v1.0.14](https://github.com/9001/copyparty/releases/tag/v1.0.14#:~:text=release-specific%20notes)
# bugfixes
* [v1.4.0](https://github.com/9001/copyparty/releases/tag/v1.4.0) accidentally required all clients to use the new up2k.js to continue uploading; support the old js too
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
# 2022-0923-2053 `v1.4.0` mostly reliable
* read-only demo server at https://a.ocv.me/pub/demo/
* latest gzip edition of the sfx: [v1.0.14](https://github.com/9001/copyparty/releases/tag/v1.0.14#:~:text=release-specific%20notes)
## new features
* huge folders are lazily rendered for a massive speedup, #11
* also reduces the number of `?tree` requests; helps a tiny bit on server load
* [selfdestruct timer](https://github.com/9001/copyparty#self-destruct) on uploaded files -- see link for howto and side-effects
* ban clients trying to bruteforce passwords
* arg `--ban-pw`, default `9,60,1440`, bans for 1440min after 9 wrong passwords in 60min
* clients repeatedly trying the same password (due to a bug or whatever) are not counted
* does a `/64` range-ban for IPv6 offenders
* arg `--ban-404`, disabled by default, bans for excessive 404s / directory-scanning
* but that breaks up2k turbo-mode and probably some other eccentric usecases
* waveform seekbar [(screenshot)](https://user-images.githubusercontent.com/241032/192042695-522b3ec7-6845-494a-abdb-d1c0d0e23801.png)
* the up2k upload button can do folders recursively now
* but only a single folder can be selected at a time, making drag-drop the obvious choice still
* gridview is now less jank, #12
* togglebuttons for desktop-notifications and audio-jingle when upload completes
* stop exposing uploader IPs when avoiding filename collisions
* IPs are now HMAC'ed with urandom stored at `~/.config/copyparty/iphash`
* stop crashing chrome; generate PNGs rather than SVGs for filetype icons
* terminate connections with SHUT_WR and flush with siocoutq
* makes buggy enterprise proxies behave less buggy
* do a read-spin on windows for almost the same effect
* improved upload scheduling
* unfortunately removes the `0.0%, NaN:aN, N.aN MB/s` easteregg
* arg `--magic` enables filetype detection on nameless uploads based on libmagic
* mtp modifiers to let tagparsers keep their stdout/stderr instead of capturing
* `c0` disables all capturing, `c1` captures stdout only, `c2` only stderr, and `c3` (default) captures both
* arg `--write-uplog` enables the old default of writing upload reports on POSTs
* kinda pointless and was causing issues in prisonparty
* [upload modifiers](https://github.com/9001/copyparty#write) for terse replies and to randomize filenames
* other optimizations
* 30% faster tag collection on directory listings
* 8x faster rendering of huge tagsets
* new mtps [guestbook](https://github.com/9001/copyparty/blob/hovudstraum/bin/mtag/guestbook.py) and [guestbook-read](https://github.com/9001/copyparty/blob/hovudstraum/bin/mtag/guestbook-read.py), for example for comment-fields on uploads
* arg `--stackmon` now takes dateformat filenames to produce multiple files
* arg `--mtag-vv` to debug tagparser configs
* arg `--version` shows copyparty version and exits
* arg `--license` shows a list of embedded dependencies + their licenses
* arg `--no-forget` and volflag `:c,noforget` keeps deleted files in the up2k db/index
* useful if you're shuffling uploads to s3/gdrive/etc and still want deduplication
## bugfixes
* upload deduplication using symlinks on windows
* increase timeouts to run better on servers with extremely overloaded HDDs
* arg `--mtag-to` (default 60 sec, was 10) can be reduced for faster tag scanning
* incorrect filekeys for files symlinked into another volume
* playback could start mid-song if skipping back and forth between songs
* use affinity mask to determine how many CPU cores are available
* restore .bin-suffix for nameless PUT/POSTs (disappeared in v1.0.11)
* fix glitch in uploader-UI when upload queue is bigger than 1 TiB
* avoid a firefox race-condition accessing the navigation history
* sfx tmpdir keepalive when flipflopping between unix users
* reject anon ftp if anon has no read/write
* improved autocorrect for poor ffmpeg builds
* patch popen on older pythons so collecting tags on windows is always possible
* misc ui/ux fixes
* filesearch layout in read-only folders
* more comfy fadein/fadeout on play/pause
* total-ETA going crazy when an overloaded server drops requests
* stop trying to play into the next folder while in search results
* improve warnings/errors in the uploader ui
* some errors which should have been warnings are now warnings
* autohide warnings/errors when they are remedied
* delay starting the audiocontext until necessary
* reduces cpu-load by 0.2% and fixes chrome claiming the tab is playing audio
# copyparty.exe
now introducing [copyparty.exe](https://github.com/9001/copyparty/releases/download/v1.4.0/copyparty.exe)!   only suitable for the rainiest of days ™
[first thing you'll see](https://user-images.githubusercontent.com/241032/192070274-bfe0bfef-2293-40fc-8852-fcf4f7a90043.png) when you run it is a warning to **«please use the [python-sfx](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) instead»**,
* `copyparty.exe` was compiled using 32bit python3.7 to support windows7, meaning it won't receive any security patches
* `copyparty-sfx.py` uses your system libraries instead so it'll stay safe for much longer while also having better performance
so the exe might be super useful in a pinch on a secluded LAN but otherwise *Absolutely Not Recommended*
you can download [ffmpeg](https://ocv.me/stuff/bin/ffmpeg.exe) and [ffprobe](https://ocv.me/stuff/bin/ffprobe.exe) into the same folder if you want multimedia-info, audio-transcoding or thumbnails/spectrograms/waveforms -- those binaries were [built](https://github.com/9001/copyparty/tree/hovudstraum/scripts/pyinstaller#ffmpeg) with just enough features to cover what copyparty wants, but much like copyparty.exe itself (so due to security reasons) it is strongly recommended to instead grab a [recent official build](https://github.com/BtbN/FFmpeg-Builds/releases/download/latest/ffmpeg-master-latest-win64-gpl.zip) every once in a while
## and finally some good news
* the chrome memory leak will be [fixed in v107](https://bugs.chromium.org/p/chromium/issues/detail?id=1354816)
* and firefox may fix the crash in [v106 or so](https://bugzilla.mozilla.org/show_bug.cgi?id=1790500)
* and the release title / this season's codename stems from a cpp instance recently being slammed with terabytes of uploads running on a struggling server mostly without breaking a sweat 👍
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
# 2022-0818-1724 `v1.3.16` gc kiting
* read-only demo server at https://a.ocv.me/pub/demo/
* latest gzip edition of the sfx: [v1.0.14](https://github.com/9001/copyparty/releases/tag/v1.0.14#:~:text=release-specific%20notes)
## bugfixes
* found a janky workaround for [the remaining chrome wasm gc bug](https://bugs.chromium.org/p/chromium/issues/detail?id=1354816)
* worker-global typedarray holding on to the first and last byte of the filereader output while wasm chews on it
* overhead is small enough, slows down firefox by 2~3%
* seems to work on many chrome versions but no guarantees
* still OOM's some 93 and 97 betas, probably way more
## other changes
* disable `mt` by default on https-desktop-chrome
* avoids the gc bug entirely (except for plaintext-http and phones)
* chrome [doesn't parallelize](https://bugs.chromium.org/p/chromium/issues/detail?id=1352210) `crypto.subtle.digest` anyways
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
# 2022-0817-2302 `v1.3.15` pls let me stop finding chrome bugs

63
docs/lics.txt Normal file
View File

@@ -0,0 +1,63 @@
--- server-side --- software ---
https://github.com/9001/copyparty/
C: 2019 ed
L: MIT
https://github.com/pallets/jinja/
C: 2007 Pallets
L: BSD 3-Clause
https://github.com/pallets/markupsafe/
C: 2010 Pallets
L: BSD 3-Clause
https://github.com/giampaolo/pyftpdlib/
C: 2007 Giampaolo Rodola'
L: MIT
https://github.com/python/cpython/blob/3.10/Lib/asyncore.py
C: 1996 Sam Rushing
L: ISC
https://github.com/ahupp/python-magic/
C: 2001-2014 Adam Hupp
L: MIT
--- client-side --- software ---
https://github.com/Daninet/hash-wasm/
C: 2020 Dani Biró
L: MIT
https://github.com/openpgpjs/asmcrypto.js/
C: 2013 Artem S Vybornov
L: MIT
https://github.com/feimosi/baguetteBox.js/
C: 2017 Marek Grzybek
L: MIT
https://github.com/markedjs/marked/
C: 2018+, MarkedJS
C: 2011-2018, Christopher Jeffrey (https://github.com/chjj/)
L: MIT
https://github.com/codemirror/codemirror5/
C: 2017 Marijn Haverbeke <marijnh@gmail.com> and others
L: MIT
https://github.com/Ionaru/easy-markdown-editor/
C: 2015 Sparksuite, Inc.
C: 2017 Jeroen Akkerman.
L: MIT
--- client-side --- fonts ---
https://github.com/adobe-fonts/source-code-pro/
C: 2010-2019 Adobe
L: SIL OFL 1.1
https://github.com/FortAwesome/Font-Awesome/
C: 2022 Fonticons, Inc.
L: SIL OFL 1.1

View File

@@ -54,6 +54,11 @@ cat log | awk '!/"purl"/{next} {s=$1;sub(/[^m]+m/,"");gsub(/:/," ");t=60*(60*$1+
cat log | awk '!/"purl"/{next} {s=$1;sub(/[^m]+m/,"");gsub(/:/," ");t=60*(60*$1+$2)+$3} t<p{t+=86400} !p{a=t;p=t;r=0;next} t-p>1{printf "%.3f += %.3f - %.3f (%.3f) # %.3f -> %.3f\n",r,p,a,p-a,p,t;r+=p-a;a=t} {p=t} END {print r+p-a}'
##
## find uploads blocked by slow i/o or maybe deadlocks
awk '/^.\+. opened logfile/{print;next} {sub(/.$/,"")} !/^..36m[0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{3} /{next} !/0m(POST|writing) /{next} {c=0;p=$3} /0mPOST/{c=1} {s=$1;sub(/[^m]+m/,"");gsub(/:/," ");s=60*(60*$1+$2)+$3} c{t[p]=s;next} {d=s-t[p]} d>10{print $0 " # " d}'
##
## bad filenames
@@ -138,6 +143,31 @@ sqlite3 -readonly up2k.db.key-full 'select w, v from mt where k = "key" order by
sqlite3 -readonly up2k.db.key-full 'select w, v from mt where k = "key" order by w' > k1; sqlite3 -readonly up2k.db 'select mt.w, mt.v, up.rd, up.fn from mt inner join up on mt.w = substr(up.w,1,16) where mt.k = "key" order by up.rd, up.fn' > k2; ok=0; ng=0; while IFS='|' read w k2 path; do k1="$(grep -E "^$w" k1 | sed -r 's/.*\|//')"; [ "$k1" = "$k2" ] && ok=$((ok+1)) || { ng=$((ng+1)); printf '%3s %3s %s\n' "$k1" "$k2" "$path"; }; done < <(cat k2); echo "match $ok diff $ng"
##
## tracking bitflips
l=log.tmux-1662316902 # your logfile (tmux-capture or decompressed -lo)
# grab handshakes to a smaller logfile
tr -d '\r' <$l | awk '/^.\[36m....-..-...\[0m.?$/{d=substr($0,6,10)} !d{next} /"purl": "/{t=substr($1,6);sub(/[^ ]+ /,"");sub(/ .\[34m[0-9]+ /," ");printf("%s %s %s %s\n",d,t,ip,$0)}' | while read d t ip f; do u=$(date +%s --date="${d}T${t}Z"); printf '%s\n' "$u $ip $f"; done > handshakes
# quick list of affected files
grep 'your chunk got corrupted somehow' -A1 $l | tr -d '\r' | grep -E '^[a-zA-Z0-9_-]{44}$' | sort | uniq | while IFS= read -r x; do grep -F "$x" handshakes | head -c 200; echo; done | sed -r 's/.*"name": "//' | sort | uniq -cw20
# find all cases of corrupt chunks and print their respective handshakes (if any),
# timestamps are when the corrupted chunk was received (and also the order they are displayed),
# first checksum is the expected value from the handshake, second is what got uploaded
awk <$l '/^.\[36m....-..-...\[0m.?$/{d=substr($0,6,10)} /your chunk got corrupted somehow/{n=2;t=substr($1,6);next} !n{next} {n--;sub(/\r$/,"")} n{a=$0;next} {sub(/.\[0m,.*/,"");printf "%s %s %s %s\n",d,t,a,$0}' |
while read d t h1 h2; do printf '%s %s\n' $d $t; (
printf ' %s [%s]\n' $h1 "$(grep -F $h1 <handshakes | head -n 1)"
printf ' %s [%s]\n' $h2 "$(grep -F $h2 <handshakes | head -n 1)"
) | sed 's/, "sprs":.*//'; done | less -R
# notes; TODO clean up and put in the readme maybe --
# quickest way to drop the bad files (if a client generated bad hashes for the initial handshake) is shutting down copyparty and moving aside the unfinished file (both the .PARTIAL and the empty placeholder)
# BUT the clients will immediately re-handshake the upload with the same bitflipped hashes, so the uploaders have to refresh their browsers before you do that,
# so maybe just ask them to refresh and do nothing for 6 hours so the timeout kicks in, which deletes the placeholders/name-reservations and you can then manually delete the .PARTIALs at some point later
##
## media

52
docs/pyoxidizer.txt Normal file
View File

@@ -0,0 +1,52 @@
pyoxidizer doesn't crosscompile yet so need to build in a windows vm,
luckily possible to do mostly airgapped (https-proxy for crates)
none of this is version-specific but doing absolute links just in case
(only exception is py3.8 which is the final win7 ver)
# deps (download on linux host):
https://www.python.org/ftp/python/3.10.7/python-3.10.7-amd64.exe
https://github.com/indygreg/PyOxidizer/releases/download/pyoxidizer%2F0.22.0/pyoxidizer-0.22.0-x86_64-pc-windows-msvc.zip
https://github.com/upx/upx/releases/download/v3.96/upx-3.96-win64.zip
https://static.rust-lang.org/dist/rust-1.61.0-x86_64-pc-windows-msvc.msi
https://github.com/indygreg/python-build-standalone/releases/download/20220528/cpython-3.8.13%2B20220528-i686-pc-windows-msvc-static-noopt-full.tar.zst
# need cl.exe, prefer 2017 -- download on linux host:
https://visualstudio.microsoft.com/downloads/?q=build+tools
https://docs.microsoft.com/en-us/visualstudio/releases/2022/release-history#release-dates-and-build-numbers
https://aka.ms/vs/15/release/vs_buildtools.exe # 2017
https://aka.ms/vs/16/release/vs_buildtools.exe # 2019
https://aka.ms/vs/17/release/vs_buildtools.exe # 2022
https://docs.microsoft.com/en-us/visualstudio/install/workload-component-id-vs-build-tools?view=vs-2017
# use disposable w10 vm to prep offline installer; xfer to linux host with firefox to copyparty
vs_buildtools-2017.exe --add Microsoft.VisualStudio.Workload.MSBuildTools --add Microsoft.VisualStudio.Workload.VCTools --add Microsoft.VisualStudio.Component.Windows10SDK.17763 --layout c:\msbt2017 --lang en-us
# need two proxies on host; s5s or ssh for msys2(socks5), and tinyproxy for rust(http)
UP=- python3 socks5server.py 192.168.123.1 4321
ssh -vND 192.168.123.1:4321 localhost
git clone https://github.com/tinyproxy/tinyproxy.git
./autogen.sh
./configure --prefix=/home/ed/pe/tinyproxy
make -j24 install
printf '%s\n' >cfg "Port 4380" "Listen 192.168.123.1"
./tinyproxy -dccfg
https://github.com/msys2/msys2-installer/releases/download/2022-09-04/msys2-x86_64-20220904.exe
export all_proxy=socks5h://192.168.123.1:4321
# if chat dies after auth (2 messages) it probably failed dns, note the h in socks5h to tunnel dns
pacman -Syuu
pacman -S git patch mingw64/mingw-w64-x86_64-zopfli
cd /c && curl -k https://192.168.123.1:3923/ro/ox/msbt2017/?tar | tar -xv
first install certs from msbt/certificates then admin-cmd `vs_buildtools.exe --noweb`,
default selection (vc++2017-v15.9-v14.16, vc++redist, vc++bt-core) += win10sdk (for io.h)
install rust without documentation, python 3.10, put upx and pyoxidizer into ~/bin,
[cmd.exe] python -m pip install --user -U wheel-0.37.1.tar.gz strip-hints-0.1.10.tar.gz
p=192.168.123.1:4380; export https_proxy=$p; export http_proxy=$p
# and with all of the one-time-setup out of the way,
mkdir /c/d; cd /c/d && curl -k https://192.168.123.1:3923/cpp/gb?pw=wark > gb && git clone gb copyparty
cd /c/d/copyparty/ && curl -k https://192.168.123.1:3923/cpp/patch?pw=wark | patch -p1
cd /c/d/copyparty/scripts && CARGO_HTTP_CHECK_REVOKE=false PATH=/c/Users/$USER/AppData/Local/Programs/Python/Python310:/c/Users/$USER/bin:"$(cygpath "C:\Program Files (x86)\Microsoft Visual Studio\2017\BuildTools\VC\Tools\MSVC\14.16.27023\bin\Hostx86\x86"):$PATH" ./make-sfx.sh ox ultra

48
pyoxidizer.bzl Normal file
View File

@@ -0,0 +1,48 @@
# builds win7-i386 exe on win10-ltsc-1809(17763.316)
# see docs/pyoxidizer.txt
def make_exe():
dist = default_python_distribution(flavor="standalone_static", python_version="3.8")
policy = dist.make_python_packaging_policy()
policy.allow_files = True
policy.allow_in_memory_shared_library_loading = True
#policy.bytecode_optimize_level_zero = True
#policy.include_distribution_sources = False # error instantiating embedded Python interpreter: during initializing Python main: init_fs_encoding: failed to get the Python codec of the filesystem encoding
policy.include_distribution_resources = False
policy.include_non_distribution_sources = False
policy.include_test = False
python_config = dist.make_python_interpreter_config()
#python_config.module_search_paths = ["$ORIGIN/lib"]
python_config.run_module = "copyparty"
exe = dist.to_python_executable(
name="copyparty",
config=python_config,
packaging_policy=policy,
)
exe.windows_runtime_dlls_mode = "never"
exe.windows_subsystem = "console"
exe.add_python_resources(exe.read_package_root(
path="sfx",
packages=[
"copyparty",
"jinja2",
"markupsafe",
"pyftpdlib",
"python-magic",
]
))
return exe
def make_embedded_resources(exe):
return exe.to_embedded_resources()
def make_install(exe):
files = FileManifest()
files.add_python_resource("copyparty", exe)
return files
register_target("exe", make_exe)
register_target("resources", make_embedded_resources, depends=["exe"], default_build_script=True)
register_target("install", make_install, depends=["exe"], default=True)
resolve_targets()

View File

@@ -1,10 +1,10 @@
FROM alpine:3.16
FROM alpine:3
WORKDIR /z
ENV ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \
ver_hashwasm=4.9.0 \
ver_marked=4.0.18 \
ver_mde=2.16.1 \
ver_codemirror=5.65.7 \
ver_mde=2.18.0 \
ver_codemirror=5.65.9 \
ver_fontawesome=5.13.0 \
ver_zopfli=1.0.3
@@ -17,7 +17,7 @@ RUN mkdir -p /z/dist/no-pk \
&& wget https://github.com/openpgpjs/asmcrypto.js/archive/$ver_asmcrypto.tar.gz -O asmcrypto.tgz \
&& wget https://github.com/markedjs/marked/archive/v$ver_marked.tar.gz -O marked.tgz \
&& wget https://github.com/Ionaru/easy-markdown-editor/archive/$ver_mde.tar.gz -O mde.tgz \
&& wget https://github.com/codemirror/CodeMirror/archive/$ver_codemirror.tar.gz -O codemirror.tgz \
&& wget https://github.com/codemirror/codemirror5/archive/$ver_codemirror.tar.gz -O codemirror.tgz \
&& wget https://github.com/FortAwesome/Font-Awesome/releases/download/$ver_fontawesome/fontawesome-free-$ver_fontawesome-web.zip -O fontawesome.zip \
&& wget https://github.com/google/zopfli/archive/zopfli-$ver_zopfli.tar.gz -O zopfli.tgz \
&& wget https://github.com/Daninet/hash-wasm/releases/download/v$ver_hashwasm/hash-wasm@$ver_hashwasm.zip -O hash-wasm.zip \

View File

@@ -12,6 +12,8 @@ help() { exec cat <<'EOF'
# `re` does a repack of an sfx which you already executed once
# (grabs files from the sfx-created tempdir), overrides `clean`
#
# `ox` builds a pyoxidizer exe instead of py
#
# `gz` creates a gzip-compressed python sfx instead of bzip2
#
# `lang` limits which languages/translations to include,
@@ -56,6 +58,10 @@ gtar=$(command -v gtar || command -v gnutar) || true
gawk=$(command -v gawk || command -v gnuawk || command -v awk)
awk() { $gawk "$@"; }
targs=(--owner=1000 --group=1000)
[ "$OSTYPE" = msys ] &&
targs=()
pybin=$(command -v python3 || command -v python) || {
echo need python
exit 1
@@ -79,12 +85,14 @@ while [ ! -z "$1" ]; do
case $1 in
clean) clean=1 ; ;;
re) repack=1 ; ;;
ox) use_ox=1 ; ;;
gz) use_gz=1 ; ;;
no-fnt) no_fnt=1 ; ;;
no-hl) no_hl=1 ; ;;
no-dd) no_dd=1 ; ;;
no-cm) no_cm=1 ; ;;
fast) zopf= ; ;;
ultra) ultra=1 ; ;;
lang) shift;langs="$1"; ;;
*) help ; ;;
esac
@@ -112,7 +120,7 @@ tmpdir="$(
)"
[ $repack ] && {
old="$tmpdir/pe-copyparty"
old="$tmpdir/pe-copyparty.$(id -u)"
echo "repack of files in $old"
cp -pR "$old/"*{py2,j2,ftp,copyparty} .
}
@@ -162,8 +170,25 @@ tmpdir="$(
wget -O$f "$url" || curl -L "$url" >$f)
done
# enable this to dynamically remove type hints at startup,
# in case a future python version can use them for performance
echo collecting python-magic
v=0.4.27
f="../build/python-magic-$v.tar.gz"
[ -e "$f" ] ||
(url=https://files.pythonhosted.org/packages/da/db/0b3e28ac047452d079d375ec6798bf76a036a08182dbb39ed38116a49130/python-magic-0.4.27.tar.gz;
wget -O$f "$url" || curl -L "$url" >$f)
tar -zxf $f
mkdir magic
mv python-magic-*/magic .
rm -rf python-magic-*
rm magic/compat.py
f=magic/__init__.py
awk '/^def _add_compat/{o=1} !o; /^_add_compat/{o=0}' <$f >t
tmv "$f"
mv magic ftp/ # doesn't provide a version label anyways
# enable this to dynamically remove type hints at startup,
# in case a future python version can use them for performance
true || (
echo collecting strip-hints
f=../build/strip-hints-0.1.10.tar.gz
@@ -199,6 +224,47 @@ tmpdir="$(
# remove type hints before build instead
(cd copyparty; "$pybin" ../../scripts/strip_hints/a.py; rm uh)
f=../build/mit.txt
[ -e $f ] ||
curl https://opensource.org/licenses/MIT |
awk '/div>/{o=0}o>1;o{o++}/;COPYRIGHT HOLDER/{o=1}' |
awk '{gsub(/<[^>]+>/,"")};1' >$f
f=../build/isc.txt
[ -e $f ] ||
curl https://opensource.org/licenses/ISC |
awk '/div>/{o=0}o>2;o{o++}/;OWNER/{o=1}' |
awk '{gsub(/<[^>]+>/,"")};/./{b=0}!/./{b++}b>1{next}1' >$f
f=../build/3bsd.txt
[ -e $f ] ||
curl https://opensource.org/licenses/BSD-3-Clause |
awk '/div>/{o=0}o>1;o{o++}/HOLDER/{o=1}' |
awk '{gsub(/<[^>]+>/,"")};1' >$f
f=../build/ofl.txt
[ -e $f ] ||
curl https://opensource.org/licenses/OFL-1.1 |
awk '/PREAMBLE/{o=1}/sil\.org/{o=0}!o{next}/./{printf "%s ",$0;next}{print"\n"}' |
awk '{gsub(/<[^>]+>/,"");gsub(/^\s+/,"");gsub(/&amp;/,"\\&")}/./{b=0}!/./{b++}b>1{next}1' >$f
(sed -r 's/^L: /License: /;s/^C: /Copyright (c) /' <../docs/lics.txt
printf '\n\n--- MIT License ---\n\n'; cat ../build/mit.txt
printf '\n\n--- ISC License ---\n\n'; cat ../build/isc.txt
printf '\n\n--- BSD 3-Clause License ---\n\n'; cat ../build/3bsd.txt
printf '\n\n--- SIL Open Font License v1.1 ---\n\n'; cat ../build/ofl.txt
) |
while IFS= read -r x; do
[ "${x:0:4}" = "--- " ] || {
printf '%s\n' "$x"
continue
}
n=${#x}
p=$(( (80-n)/2 ))
printf "%${p}s\033[07m%s\033[0m\n" "" "$x"
done > copyparty/res/COPYING.txt
}
ver=
@@ -303,8 +369,8 @@ rm have
tmv "$f"
done
[ $repack ] || {
# uncomment
[ ! $repack ] && [ ! $use_ox ] && {
# uncomment; oxidized drops 45 KiB but becomes undebuggable
find | grep -E '\.py$' |
grep -vE '__version__' |
tr '\n' '\0' |
@@ -318,6 +384,7 @@ rm have
f=j2/jinja2/constants.py
awk '/^LOREM_IPSUM_WORDS/{o=1;print "LOREM_IPSUM_WORDS = u\"a\"";next} !o; /"""/{o=0}' <$f >t
tmv "$f"
rm -f j2/jinja2/async*
grep -rLE '^#[^a-z]*coding: utf-8' j2 |
while IFS= read -r f; do
@@ -348,9 +415,9 @@ find | grep -E '\.(js|html)$' | while IFS= read -r f; do
done
gzres() {
command -v pigz && [ $zopf ] &&
pk="pigz -11 -I $zopf" ||
pk='gzip'
[ $zopf ] && command -v zopfli && pk="zopfli --i$zopf"
[ $zopf ] && command -v pigz && pk="pigz -11 -I $zopf"
[ -z "$pk" ] && pk='gzip'
np=$(nproc)
echo "$pk #$np"
@@ -399,6 +466,33 @@ nf=$(ls -1 "$zdir"/arc.* | wc -l)
}
[ $use_ox ] && {
tgt=x86_64-pc-windows-msvc
tgt=i686-pc-windows-msvc # 2M smaller (770k after upx)
bdir=build/$tgt/release/install/copyparty
t="res web"
(printf "\n\n\nBUT WAIT! THERE'S MORE!!\n\n";
cat ../$bdir/COPYING.txt) >> copyparty/res/COPYING.txt ||
echo "copying.txt 404 pls rebuild"
mv ftp/* j2/* copyparty/vend/* .
rm -rf ftp j2 py2 copyparty/vend
(cd copyparty; tar -cvf z.tar $t; rm -rf $t)
cd ..
pyoxidizer build --release --target-triple $tgt
mv $bdir/copyparty.exe dist/
cp -pv "$(for d in '/c/Program Files (x86)/Microsoft Visual Studio/'*'/BuildTools/VC/Redist/MSVC'; do
find "$d" -name vcruntime140.dll; done | sort | grep -vE '/x64/|/onecore/'ƒ | head -n 1)" dist/
dist/copyparty.exe --version
cp -pv dist/copyparty{,.orig}.exe
[ $ultra ] && a="--best --lzma" || a=-1
/bin/time -f %es upx $a dist/copyparty.exe >/dev/null
ls -al dist/copyparty{,.orig}.exe
exit 0
}
echo gen tarlist
for d in copyparty j2 ftp py2; do find $d -type f; done | # strip_hints
sed -r 's/(.*)\.(.*)/\2 \1/' | LC_ALL=C sort |
@@ -414,11 +508,7 @@ done
[ $n -eq 50 ] && exit
echo creating tar
args=(--owner=1000 --group=1000)
[ "$OSTYPE" = msys ] &&
args=()
tar -cf tar "${args[@]}" --numeric-owner -T list
tar -cf tar "${targs[@]}" --numeric-owner -T list
pc=bzip2
pe=bz2

View File

@@ -0,0 +1,12 @@
builds a fully standalone copyparty.exe compatible with 32bit win7-sp1 and later
requires a win7 vm which has never been connected to the internet and a host-only network with the linux host at 192.168.123.1
first-time setup steps in notes.txt
run build.sh in the vm to fetch src + compile + push a new exe to the linux host for manual publishing
## ffmpeg
built with [ffmpeg-windows-build-helpers](https://github.com/rdp/ffmpeg-windows-build-helpers) and [this patch](./ffmpeg.patch) using [these steps](./ffmpeg.txt)

View File

@@ -0,0 +1,64 @@
#!/bin/bash
set -e
curl http://192.168.123.1:3923/cpp/scripts/pyinstaller/build.sh |
tee build2.sh | cmp build.sh && rm build2.sh || {
echo "new build script; upgrade y/n:"
while true; do read -u1 -n1 -r r; [[ $r =~ [yYnN] ]] && break; done
[[ $r =~ [yY] ]] && mv build{2,}.sh && exec ./build.sh
}
dl() { curl -fkLO "$1"; }
cd ~/Downloads
dl https://192.168.123.1:3923/cpp/dist/copyparty-sfx.py
dl https://192.168.123.1:3923/cpp/scripts/pyinstaller/loader.ico
dl https://192.168.123.1:3923/cpp/scripts/pyinstaller/loader.py
dl https://192.168.123.1:3923/cpp/scripts/pyinstaller/loader.rc
rm -rf $TEMP/pe-copyparty*
python copyparty-sfx.py --version
rm -rf mods; mkdir mods
cp -pR $TEMP/pe-copyparty/copyparty/ $TEMP/pe-copyparty/{ftp,j2}/* mods/
af() { awk "$1" <$2 >tf; mv tf "$2"; }
rm -rf mods/magic/
sed -ri /pickle/d mods/jinja2/_compat.py
sed -ri '/(bccache|PackageLoader)/d' mods/jinja2/__init__.py
af '/^class/{s=0}/^class PackageLoader/{s=1}!s' mods/jinja2/loaders.py
sed -ri /fork_process/d mods/pyftpdlib/servers.py
af '/^class _Base/{s=1}!s' mods/pyftpdlib/authorizers.py
read a b c d _ < <(
grep -E '^VERSION =' mods/copyparty/__version__.py |
tail -n 1 |
sed -r 's/[^0-9]+//;s/[" )]//g;s/[-,]/ /g;s/$/ 0/'
)
sed -r 's/1,2,3,0/'$a,$b,$c,$d'/;s/1\.2\.3/'$a.$b.$c/ <loader.rc >loader.rc2
$APPDATA/python/python37/scripts/pyinstaller \
-y --clean -p mods --upx-dir=. \
--exclude-module copyparty.broker_mp \
--exclude-module copyparty.broker_mpw \
--exclude-module curses \
--exclude-module ctypes.macholib \
--exclude-module multiprocessing \
--exclude-module pdb \
--exclude-module pickle \
--exclude-module pyftpdlib.prefork \
--exclude-module urllib.request \
--exclude-module urllib.response \
--exclude-module urllib.robotparser \
--exclude-module zipfile \
--version-file loader.rc2 -i loader.ico -n copyparty -c -F loader.py \
--add-data 'mods/copyparty/res;copyparty/res' \
--add-data 'mods/copyparty/web;copyparty/web'
# ./upx.exe --best --ultra-brute --lzma -k dist/copyparty.exe
curl -fkT dist/copyparty.exe -b cppwd=wark https://192.168.123.1:3923/

View File

@@ -0,0 +1,228 @@
diff --git a/cross_compile_ffmpeg.sh b/cross_compile_ffmpeg.sh
index 45c4ef8..f9bc83a 100755
--- a/cross_compile_ffmpeg.sh
+++ b/cross_compile_ffmpeg.sh
@@ -2287,15 +2287,8 @@ build_ffmpeg() {
else
local output_dir=$3
fi
- if [[ "$non_free" = "y" ]]; then
- output_dir+="_with_fdk_aac"
- fi
- if [[ $build_intel_qsv == "n" ]]; then
- output_dir+="_xp_compat"
- fi
- if [[ $enable_gpl == 'n' ]]; then
- output_dir+="_lgpl"
- fi
+ output_dir+="_xp_compat"
+ output_dir+="_lgpl"
if [[ ! -z $ffmpeg_git_checkout_version ]]; then
local output_branch_sanitized=$(echo ${ffmpeg_git_checkout_version} | sed "s/\//_/g") # release/4.3 to release_4.3
@@ -2354,9 +2347,9 @@ build_ffmpeg() {
init_options+=" --disable-schannel"
# Fix WinXP incompatibility by disabling Microsoft's Secure Channel, because Windows XP doesn't support TLS 1.1 and 1.2, but with GnuTLS or OpenSSL it does. XP compat!
fi
- config_options="$init_options --enable-libcaca --enable-gray --enable-libtesseract --enable-fontconfig --enable-gmp --enable-gnutls --enable-libass --enable-libbluray --enable-libbs2b --enable-libflite --enable-libfreetype --enable-libfribidi --enable-libgme --enable-libgsm --enable-libilbc --enable-libmodplug --enable-libmp3lame --enable-libopencore-amrnb --enable-libopencore-amrwb --enable-libopus --enable-libsnappy --enable-libsoxr --enable-libspeex --enable-libtheora --enable-libtwolame --enable-libvo-amrwbenc --enable-libvorbis --enable-libwebp --enable-libzimg --enable-libzvbi --enable-libmysofa --enable-libopenjpeg --enable-libopenh264 --enable-libvmaf --enable-libsrt --enable-libxml2 --enable-opengl --enable-libdav1d --enable-cuda-llvm"
+ config_options="$init_options --enable-gray --enable-libopus --enable-libvorbis --enable-libwebp --enable-libopenjpeg"
- if [[ $build_svt = y ]]; then
+ if [[ '' ]]; then
if [ "$bits_target" != "32" ]; then
# SVT-VP9 see comments below
@@ -2379,40 +2372,13 @@ build_ffmpeg() {
config_options+=" --enable-libvpx"
fi # else doesn't work/matter with 32 bit
fi
- config_options+=" --enable-libaom"
-
- if [[ $compiler_flavors != "native" ]]; then
- config_options+=" --enable-nvenc --enable-nvdec" # don't work OS X
- fi
- config_options+=" --extra-libs=-lharfbuzz" # grr...needed for pre x264 build???
config_options+=" --extra-libs=-lm" # libflite seemed to need this linux native...and have no .pc file huh?
config_options+=" --extra-libs=-lshlwapi" # lame needed this, no .pc file?
- config_options+=" --extra-libs=-lmpg123" # ditto
config_options+=" --extra-libs=-lpthread" # for some reason various and sundry needed this linux native
- config_options+=" --extra-cflags=-DLIBTWOLAME_STATIC --extra-cflags=-DMODPLUG_STATIC --extra-cflags=-DCACA_STATIC" # if we ever do a git pull then it nukes changes, which overrides manual changes to configure, so just use these for now :|
- if [[ $build_amd_amf = n ]]; then
- config_options+=" --disable-amf" # Since its autodetected we have to disable it if we do not want it. #unless we define no autodetection but.. we don't.
- else
- config_options+=" --enable-amf" # This is actually autodetected but for consistency.. we might as well set it.
- fi
-
- if [[ $build_intel_qsv = y && $compiler_flavors != "native" ]]; then # Broken for native builds right now: https://github.com/lu-zero/mfx_dispatch/issues/71
- config_options+=" --enable-libmfx"
- else
- config_options+=" --disable-libmfx"
- fi
- if [[ $enable_gpl == 'y' ]]; then
- config_options+=" --enable-gpl --enable-frei0r --enable-librubberband --enable-libvidstab --enable-libx264 --enable-libx265 --enable-avisynth --enable-libaribb24"
- config_options+=" --enable-libxvid --enable-libdavs2"
- if [[ $host_target != 'i686-w64-mingw32' ]]; then
- config_options+=" --enable-libxavs2"
- fi
- if [[ $compiler_flavors != "native" ]]; then
- config_options+=" --enable-libxavs" # don't compile OS X
- fi
- fi
+ config_options+=" --disable-amf" # Since its autodetected we have to disable it if we do not want it. #unless we define no autodetection but.. we don't.
+ config_options+=" --disable-libmfx"
local licensed_gpl=n # lgpl build with libx264 included for those with "commercial" license :)
if [[ $licensed_gpl == 'y' ]]; then
apply_patch file://$patch_dir/x264_non_gpl.diff -p1
@@ -2427,7 +2393,7 @@ build_ffmpeg() {
config_options+=" $postpend_configure_opts"
- if [[ "$non_free" = "y" ]]; then
+ if [[ '' ]]; then
config_options+=" --enable-nonfree --enable-libfdk-aac"
if [[ $compiler_flavors != "native" ]]; then
@@ -2436,6 +2402,17 @@ build_ffmpeg() {
# other possible options: --enable-openssl [unneeded since we already use gnutls]
fi
+ config_options+=" --disable-indevs --disable-outdevs --disable-protocols --disable-hwaccels --disable-schannel --disable-mediafoundation" # 8032256
+ config_options+=" --disable-muxers --enable-muxer=image2 --enable-muxer=mjpeg --enable-muxer=opus --enable-muxer=webp" # 7927296
+ config_options+=" --disable-encoders --enable-encoder=libopus --enable-encoder=libopenjpeg --enable-encoder=libwebp --enable-encoder=ljpeg --enable-encoder=png" # 6776320
+ config_options+=" --enable-small" # 5409792
+ #config_options+=" --disable-runtime-cpudetect" # 5416448
+ config_options+=" --disable-bsfs --disable-filters --enable-filter=scale --enable-filter=compand --enable-filter=volume --enable-filter=showwavespic --enable-filter=convolution --enable-filter=aresample --enable-filter=showspectrumpic --enable-filter=crop" # 4647424
+ config_options+=" --disable-network" # 4585984
+ #config_options+=" --disable-pthreads --disable-w32threads" # kills ffmpeg
+ config_options+=" --enable-protocol=cache --enable-protocol=file --enable-protocol=pipe"
+
+
do_debug_build=n # if you need one for backtraces/examining segfaults using gdb.exe ... change this to y :) XXXX make it affect x264 too...and make it real param :)
if [[ "$do_debug_build" = "y" ]]; then
# not sure how many of these are actually needed/useful...possibly none LOL
@@ -2561,36 +2538,16 @@ build_ffmpeg_dependencies() {
build_meson_cross
build_mingw_std_threads
build_zlib # Zlib in FFmpeg is autodetected.
- build_libcaca # Uses zlib and dlfcn (on windows).
build_bzip2 # Bzlib (bzip2) in FFmpeg is autodetected.
build_liblzma # Lzma in FFmpeg is autodetected. Uses dlfcn.
build_iconv # Iconv in FFmpeg is autodetected. Uses dlfcn.
- build_sdl2 # Sdl2 in FFmpeg is autodetected. Needed to build FFPlay. Uses iconv and dlfcn.
- if [[ $build_amd_amf = y ]]; then
- build_amd_amf_headers
- fi
- if [[ $build_intel_qsv = y && $compiler_flavors != "native" ]]; then # Broken for native builds right now: https://github.com/lu-zero/mfx_dispatch/issues/71
- build_intel_quicksync_mfx
- fi
- build_nv_headers
build_libzimg # Uses dlfcn.
build_libopenjpeg
- build_glew
- build_glfw
#build_libjpeg_turbo # mplayer can use this, VLC qt might need it? [replaces libjpeg] (ffmpeg seems to not need it so commented out here)
build_libpng # Needs zlib >= 1.0.4. Uses dlfcn.
build_libwebp # Uses dlfcn.
- build_harfbuzz
# harf does now include build_freetype # Uses zlib, bzip2, and libpng.
- build_libxml2 # Uses zlib, liblzma, iconv and dlfcn.
- build_libvmaf
- build_fontconfig # Needs freetype and libxml >= 2.6. Uses iconv and dlfcn.
- build_gmp # For rtmp support configure FFmpeg with '--enable-gmp'. Uses dlfcn.
#build_librtmfp # mainline ffmpeg doesn't use it yet
- build_libnettle # Needs gmp >= 3.0. Uses dlfcn.
- build_unistring
- build_libidn2 # needs iconv and unistring
- build_gnutls # Needs nettle >= 3.1, hogweed (nettle) >= 3.1. Uses libidn2, unistring, zlib, and dlfcn.
#if [[ "$non_free" = "y" ]]; then
# build_openssl-1.0.2 # Nonfree alternative to GnuTLS. 'build_openssl-1.0.2 "dllonly"' to build shared libraries only.
# build_openssl-1.1.1 # Nonfree alternative to GnuTLS. Can't be used with LibRTMP. 'build_openssl-1.1.1 "dllonly"' to build shared libraries only.
@@ -2598,86 +2555,13 @@ build_ffmpeg_dependencies() {
build_libogg # Uses dlfcn.
build_libvorbis # Needs libogg >= 1.0. Uses dlfcn.
build_libopus # Uses dlfcn.
- build_libspeexdsp # Needs libogg for examples. Uses dlfcn.
- build_libspeex # Uses libspeexdsp and dlfcn.
- build_libtheora # Needs libogg >= 1.1. Needs libvorbis >= 1.0.1, sdl and libpng for test, programs and examples [disabled]. Uses dlfcn.
- build_libsndfile "install-libgsm" # Needs libogg >= 1.1.3 and libvorbis >= 1.2.3 for external support [disabled]. Uses dlfcn. 'build_libsndfile "install-libgsm"' to install the included LibGSM 6.10.
- build_mpg123
- build_lame # Uses dlfcn, mpg123
- build_twolame # Uses libsndfile >= 1.0.0 and dlfcn.
- build_libopencore # Uses dlfcn.
- build_libilbc # Uses dlfcn.
- build_libmodplug # Uses dlfcn.
- build_libgme
- build_libbluray # Needs libxml >= 2.6, freetype, fontconfig. Uses dlfcn.
- build_libbs2b # Needs libsndfile. Uses dlfcn.
- build_libsoxr
- build_libflite
- build_libsnappy # Uses zlib (only for unittests [disabled]) and dlfcn.
- build_vamp_plugin # Needs libsndfile for 'vamp-simple-host.exe' [disabled].
build_fftw # Uses dlfcn.
- build_libsamplerate # Needs libsndfile >= 1.0.6 and fftw >= 0.15.0 for tests. Uses dlfcn.
- build_librubberband # Needs libsamplerate, libsndfile, fftw and vamp_plugin. 'configure' will fail otherwise. Eventhough librubberband doesn't necessarily need them (libsndfile only for 'rubberband.exe' and vamp_plugin only for "Vamp audio analysis plugin"). How to use the bundled libraries '-DUSE_SPEEX' and '-DUSE_KISSFFT'?
- build_frei0r # Needs dlfcn. could use opencv...
- if [[ "$bits_target" != "32" && $build_svt = "y" ]]; then
- build_svt-hevc
- build_svt-av1
- build_svt-vp9
- fi
- build_vidstab
- #build_facebooktransform360 # needs modified ffmpeg to use it so not typically useful
- build_libmysofa # Needed for FFmpeg's SOFAlizer filter (https://ffmpeg.org/ffmpeg-filters.html#sofalizer). Uses dlfcn.
- if [[ "$non_free" = "y" ]]; then
- build_fdk-aac # Uses dlfcn.
- if [[ $compiler_flavors != "native" ]]; then
- build_libdecklink # Error finding rpc.h in native builds even if it's available
- fi
- fi
- build_zvbi # Uses iconv, libpng and dlfcn.
- build_fribidi # Uses dlfcn.
- build_libass # Needs freetype >= 9.10.3 (see https://bugs.launchpad.net/ubuntu/+source/freetype1/+bug/78573 o_O) and fribidi >= 0.19.0. Uses fontconfig >= 2.10.92, iconv and dlfcn.
-
- build_libxvid # FFmpeg now has native support, but libxvid still provides a better image.
- build_libsrt # requires gnutls, mingw-std-threads
- build_libaribb24
- build_libtesseract
- build_lensfun # requires png, zlib, iconv
- # build_libtensorflow # broken
- build_libvpx
- build_libx265
- build_libopenh264
- build_libaom
- build_dav1d
- build_avisynth
- build_libx264 # at bottom as it might internally build a copy of ffmpeg (which needs all the above deps...
}
build_apps() {
- if [[ $build_dvbtee = "y" ]]; then
- build_dvbtee_app
- fi
- # now the things that use the dependencies...
- if [[ $build_libmxf = "y" ]]; then
- build_libMXF
- fi
- if [[ $build_mp4box = "y" ]]; then
- build_mp4box
- fi
- if [[ $build_mplayer = "y" ]]; then
- build_mplayer
- fi
if [[ $build_ffmpeg_static = "y" ]]; then
build_ffmpeg static
fi
- if [[ $build_ffmpeg_shared = "y" ]]; then
- build_ffmpeg shared
- fi
- if [[ $build_vlc = "y" ]]; then
- build_vlc
- fi
- if [[ $build_lsw = "y" ]]; then
- build_lsw
- fi
}
# set some parameters initial values

View File

@@ -0,0 +1,13 @@
apt install subversion ragel curl texinfo ed bison flex cvs yasm automake libtool cmake git make pkg-config pax nasm gperf autogen bzip2 autoconf-archive p7zip-full meson clang libtool-bin ed python-is-python3
git clone https://github.com/rdp/ffmpeg-windows-build-helpers
# commit 3d88e2b6aedfbb5b8fed19dd24621e5dd7fc5519 (HEAD -> master, origin/master, origin/HEAD)
# Merge: b0bd70c 9905dd7
# Author: Roger Pack <rogerpack2005@gmail.com>
# Date: Fri Aug 19 23:36:35 2022 -0600
cd ffmpeg-windows-build-helpers/
vim cross_compile_ffmpeg.sh
(cd ./sandbox/win32/ffmpeg_git_xp_compat_lgpl/ ; git reset --hard ; git clean -fx )
./cross_compile_ffmpeg.sh
for f in sandbox/win32/ffmpeg_git_xp_compat_lgpl/ff{mpeg,probe}.exe; do upx --best --ultra-brute -k $f; mv $f ~/dev; done

27
scripts/pyinstaller/icon.sh Executable file
View File

@@ -0,0 +1,27 @@
#!/bin/bash
set -e
# imagemagick png compression is broken, use pillow instead
convert ~/AndroidStudioProjects/PartyUP/metadata/en-US/images/icon.png a.bmp
#convert a.bmp -trim -resize '48x48!' -strip a.png
python3 <<'EOF'
from PIL import Image
i = Image.open('a.bmp')
i = i.crop(i.getbbox())
i = i.resize((48,48), Image.BICUBIC)
i = Image.alpha_composite(i,i)
i.save('a.png')
EOF
pngquant --strip --quality 30 a.png
mv a-*.png a.png
python3 <<'EOF'
from PIL import Image
Image.open('a.png').save('loader.ico',sizes=[(48,48)])
EOF
rm a.{bmp,png}
ls -al
exit 0

View File

@@ -0,0 +1,94 @@
# coding: utf-8
v = r"""
this is the EXE edition of copyparty, compatible with Windows7-SP1
and later. To make this possible, the EXE was compiled with Python
3.7.9, which is EOL and does not receive security patches anymore.
if possible, for performance and security reasons, please use this instead:
https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py
"""
print(v.replace("\n", "\n▒▌ ")[1:] + "\n")
import re
import os
import sys
import shutil
import traceback
import subprocess as sp
def confirm(rv):
print()
print("retcode", rv if rv else traceback.format_exc())
print("*** hit enter to exit ***")
try:
input()
except:
pass
sys.exit(rv or 1)
def meicln(mod):
pdir, mine = os.path.split(mod)
dirs = os.listdir(pdir)
dirs = [x for x in dirs if x.startswith("_MEI") and x != mine]
dirs = [os.path.join(pdir, x) for x in dirs]
rm = []
for d in dirs:
if os.path.isdir(os.path.join(d, "copyparty", "web")):
rm.append(d)
if not rm:
return
print("deleting abandoned SFX dirs:")
for d in rm:
print(d)
for _ in range(9):
try:
shutil.rmtree(d)
break
except:
pass
print()
def meichk():
filt = "copyparty"
if filt not in sys.executable:
filt = os.path.basename(sys.executable)
pids = []
ptn = re.compile(r"^([^\s]+)\s+([0-9]+)")
procs = sp.check_output("tasklist").decode("utf-8", "replace")
for ln in procs.splitlines():
m = ptn.match(ln)
if m and filt in m.group(1).lower():
pids.append(int(m.group(2)))
mod = os.path.dirname(os.path.realpath(__file__))
if os.path.basename(mod).startswith("_MEI") and len(pids) == 2:
meicln(mod)
meichk()
from copyparty.__main__ import main
try:
main()
except SystemExit as ex:
c = ex.code
if c not in [0, -15]:
confirm(ex.code)
except KeyboardInterrupt:
pass
except:
confirm(0)

View File

@@ -0,0 +1,29 @@
# UTF-8
VSVersionInfo(
ffi=FixedFileInfo(
filevers=(1,2,3,0),
prodvers=(1,2,3,0),
mask=0x3f,
flags=0x0,
OS=0x4,
fileType=0x1,
subtype=0x0,
date=(0, 0)
),
kids=[
StringFileInfo(
[
StringTable(
'000004b0',
[StringStruct('CompanyName', 'ocv.me'),
StringStruct('FileDescription', 'copyparty'),
StringStruct('FileVersion', '1.2.3'),
StringStruct('InternalName', 'copyparty'),
StringStruct('LegalCopyright', '2019, ed'),
StringStruct('OriginalFilename', 'copyparty.exe'),
StringStruct('ProductName', 'copyparty'),
StringStruct('ProductVersion', '1.2.3')])
]),
VarFileInfo([VarStruct('Translation', [0, 1200])])
]
)

View File

@@ -0,0 +1,58 @@
run ./build.sh in git-bash to build + upload the exe
## ============================================================
## first-time setup on a stock win7x32sp1 vm:
##
download + install git:
http://192.168.123.1:3923/ro/pyi/Git-2.37.3-32-bit.exe
<git-bash>
dl() { curl -fkLOC- "$1"; }
cd ~/Downloads &&
dl https://192.168.123.1:3923/ro/pyi/upx-3.96-win32.zip &&
dl https://192.168.123.1:3923/ro/pyi/KB2533623/Windows6.1-KB2533623-x86.msu &&
dl https://192.168.123.1:3923/ro/pyi/python-3.7.9.exe &&
dl https://192.168.123.1:3923/ro/pyi/pip-22.2.2-py3-none-any.whl &&
dl https://192.168.123.1:3923/ro/pyi/altgraph-0.17.2-py2.py3-none-any.whl &&
dl https://192.168.123.1:3923/ro/pyi/future-0.18.2.tar.gz &&
dl https://192.168.123.1:3923/ro/pyi/importlib_metadata-4.12.0-py3-none-any.whl &&
dl https://192.168.123.1:3923/ro/pyi/pefile-2022.5.30.tar.gz &&
dl https://192.168.123.1:3923/ro/pyi/pyinstaller-5.4.1-py3-none-win32.whl &&
dl https://192.168.123.1:3923/ro/pyi/pyinstaller_hooks_contrib-2022.10-py2.py3-none-any.whl &&
dl https://192.168.123.1:3923/ro/pyi/pywin32_ctypes-0.2.0-py2.py3-none-any.whl &&
dl https://192.168.123.1:3923/ro/pyi/typing_extensions-4.3.0-py3-none-any.whl &&
dl https://192.168.123.1:3923/ro/pyi/zipp-3.8.1-py3-none-any.whl &&
echo ok
manually install:
windows6.1-kb2533623-x86.msu + reboot
python-3.7.9.exe
<git-bash>
cd ~/Downloads &&
unzip -j upx-3.96-win32.zip upx-3.96-win32/upx.exe &&
python -m ensurepip &&
python -m pip install --user -U pip-22.2.2-py3-none-any.whl &&
python -m pip install --user -U pyinstaller-5.4.1-py3-none-win32.whl pefile-2022.5.30.tar.gz pywin32_ctypes-0.2.0-py2.py3-none-any.whl pyinstaller_hooks_contrib-2022.10-py2.py3-none-any.whl altgraph-0.17.2-py2.py3-none-any.whl future-0.18.2.tar.gz importlib_metadata-4.12.0-py3-none-any.whl typing_extensions-4.3.0-py3-none-any.whl zipp-3.8.1-py3-none-any.whl &&
echo ok
# python -m pip install --user -U Pillow-9.2.0-cp37-cp37m-win32.whl
# sed -ri 's/, bestopt, /]+bestopt+[/' $APPDATA/Python/Python37/site-packages/pyinstaller/building/utils.py
# sed -ri 's/(^\s+bestopt = ).*/\1["--best","--lzma","--ultra-brute"]/' $APPDATA/Python/Python37/site-packages/pyinstaller/building/utils.py
## ============================================================
## notes
##
size t-unpack virustotal cmnt
8059k 0m0.375s 5/70 generic-only, sandbox-ok no-upx
7095k 0m0.563s 4/70 generic-only, sandbox-ok standard-upx
6958k 0m0.578s 7/70 generic-only, sandbox-ok upx+upx
use python 3.7 since 3.8 onwards requires KB2533623 on target
generate loader.rc template:
%appdata%\python\python37\scripts\pyi-grab_version C:\Users\ed\AppData\Local\Programs\Python\Python37\python.exe

View File

@@ -17,8 +17,10 @@ for py in python{2,3}; do
pids+=($!)
done
python3 ../scripts/test/smoketest.py &
pids+=($!)
[ "$1" ] || {
python3 ../scripts/test/smoketest.py &
pids+=($!)
}
for pid in ${pids[@]}; do
wait $pid

View File

@@ -19,6 +19,7 @@ copyparty/httpsrv.py,
copyparty/ico.py,
copyparty/mtag.py,
copyparty/res,
copyparty/res/COPYING.txt,
copyparty/res/insecure.pem,
copyparty/star.py,
copyparty/stolen,

View File

@@ -27,7 +27,7 @@ SIZE = None
CKSUM = None
STAMP = None
PY2 = sys.version_info[0] == 2
PY2 = sys.version_info < (3,)
WINDOWS = sys.platform in ["win32", "msys"]
sys.dont_write_bytecode = True
me = os.path.abspath(os.path.realpath(__file__))
@@ -222,7 +222,12 @@ def hashfile(fn):
def unpack():
"""unpacks the tar yielded by `data`"""
name = "pe-copyparty"
name = "pe-copyparty."
try:
name += str(os.geteuid())
except:
pass
tag = "v" + str(STAMP)
top = tempfile.gettempdir()
opj = os.path.join

View File

@@ -30,8 +30,12 @@ if MACOS:
# 25% faster; until any tests do symlink stuff
from copyparty.__init__ import E
from copyparty.__main__ import init_E
from copyparty.util import Unrecv, FHC
init_E(E)
def runcmd(argv):
p = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE)
@@ -94,13 +98,13 @@ class Cfg(Namespace):
def __init__(self, a=None, v=None, c=None):
ka = {}
ex = "e2d e2ds e2dsa e2t e2ts e2tsr e2v e2vu e2vp xdev xvol ed emp force_js ihead no_acode no_athumb no_del no_logues no_mv no_readme no_robots no_scandir no_thumb no_vthumb no_zip nid nih nw"
ex = "e2d e2ds e2dsa e2t e2ts e2tsr e2v e2vu e2vp xdev xvol ed emp force_js ihead magic no_acode no_athumb no_del no_logues no_mv no_readme no_robots no_scandir no_thumb no_vthumb no_zip nid nih nw"
ka.update(**{k: False for k in ex.split()})
ex = "no_rescan no_sendfile no_voldump"
ex = "no_rescan no_sendfile no_voldump plain_ip"
ka.update(**{k: True for k in ex.split()})
ex = "css_browser hist js_browser no_hash no_idx"
ex = "css_browser hist js_browser no_hash no_idx no_forget"
ka.update(**{k: None for k in ex.split()})
ex = "re_maxage rproxy rsp_slp s_wr_slp theme themes turbo df"
@@ -113,6 +117,7 @@ class Cfg(Namespace):
a=a or [],
v=v or [],
c=c,
E=E,
s_wr_sz=512 * 1024,
unpost=600,
u2sort="s",
@@ -155,6 +160,7 @@ class VHttpSrv(object):
def __init__(self):
self.broker = NullBroker()
self.prism = None
self.bans = {}
aliases = ["splash", "browser", "browser2", "msg", "md", "mde"]
self.j2 = {x: J2_FILES for x in aliases}