Compare commits

...

165 Commits

Author SHA1 Message Date
ed
ed69d42005 v1.1.11 2022-01-14 22:25:06 +01:00
ed
0b47ee306b bump marked.js to 4.0.10 2022-01-14 20:42:23 +01:00
ed
e4e63619d4 linkable maintabs 2022-01-14 19:26:07 +01:00
ed
f32cca292a propagate sort-order to thegrid 2022-01-14 18:28:49 +01:00
ed
e87ea19ff1 return file URL in PUT response 2022-01-11 22:59:19 +01:00
ed
0214793740 fix garbage in markdown output 2022-01-05 18:57:05 +01:00
ed
fc9dd5d743 meadup changes 2022-01-03 01:16:27 +01:00
ed
9e6d5dd2b9 vbi: add onscreen qrcode 2021-12-28 20:57:11 +01:00
ed
bdad197e2c make it even worse 2021-12-27 00:04:38 +01:00
ed
7e139288a6 add very bad idea 2021-12-26 23:32:46 +01:00
ed
6e7935abaf repaint cut/paste buttons when permissions change 2021-12-24 00:50:52 +01:00
ed
3ba0cc20f1 v1.1.10 2021-12-17 00:05:17 +01:00
ed
dd28de1796 sendfile: handle eagain 2021-12-17 00:04:19 +01:00
ed
9eecc9e19a v1.1.9 2021-12-16 22:54:44 +01:00
ed
6530cb6b05 shut socket on tx error 2021-12-16 22:51:24 +01:00
ed
41ce613379 add multisearch 2021-12-12 20:11:07 +01:00
ed
5e2785caba more aggressively try ffmpeg when mutagen fails 2021-12-11 20:31:04 +01:00
ed
d7cc000976 v1.1.8 2021-12-10 02:44:48 +01:00
ed
50d8ff95ae good stuff 2021-12-10 02:21:56 +01:00
ed
b2de1459b6 quick backports to the alternative fuse client 2021-12-10 01:59:45 +01:00
ed
f0ffbea0b2 add breadcrumbs to the textfile tree 2021-12-10 00:44:47 +01:00
ed
199ccca0fe v1.1.7 2021-12-07 19:19:35 +01:00
ed
1d9b355743 fix search ui after b265e59 broke it 2021-12-07 19:12:36 +01:00
ed
f0437fbb07 cleanup the windowtitle a bit 2021-12-07 19:09:24 +01:00
ed
abc404a5b7 v1.1.6 2021-12-07 01:17:56 +01:00
ed
04b9e21330 update web-deps 2021-12-07 01:12:32 +01:00
ed
1044aa071b deal with consecutive dupes even without sqlite 2021-12-06 23:51:44 +01:00
ed
4c3192c8cc set window-title to listening ip 2021-12-06 23:08:04 +01:00
ed
689e77a025 option to set a custom servicename 2021-12-06 22:24:25 +01:00
ed
3bd89403d2 apply per-volume index config to ui 2021-12-06 22:04:24 +01:00
ed
b4800d9bcb option to disable onboot-scans per-volume 2021-12-06 20:54:13 +01:00
ed
05485e8539 md: smaller indent on outermost list 2021-12-06 20:17:12 +01:00
ed
0e03dc0868 and fix the markdown breadcrumbs too 2021-12-06 19:51:47 +01:00
ed
352b1ed10a generate correct links when trailing slash missing 2021-12-06 19:49:14 +01:00
ed
0db1244d04 also consider TMPDIR and friends 2021-12-06 09:47:39 +01:00
ed
ece08b8179 create ~/.config if /tmp is readonly 2021-12-06 02:02:44 +01:00
ed
b8945ae233 fix tests and readme 2021-12-04 18:52:14 +01:00
ed
dcaf7b0a20 v1.1.5 2021-12-04 03:33:57 +01:00
ed
f982cdc178 spa gridview 2021-12-04 03:31:12 +01:00
ed
b265e59834 spa filetab 2021-12-04 03:25:28 +01:00
ed
4a843a6624 unflicker navpane + add client state escape hatch 2021-12-04 02:46:00 +01:00
ed
241ef5b99d preserve mtimes when juggling symlinks 2021-12-04 01:58:04 +01:00
ed
f39f575a9c sort-order indicators 2021-12-03 23:53:41 +01:00
ed
1521307f1e use preferred sort on initial render, fixes #8 2021-12-03 02:07:08 +01:00
ed
dd122111e6 v1.1.4 2021-11-28 04:22:05 +01:00
ed
00c177fa74 show upload eta in window title 2021-11-28 04:05:16 +01:00
ed
f6c7e49eb8 u2cli: better error messages 2021-11-28 03:38:57 +01:00
ed
1a8dc3d18a add workaround for #7 after all since it was trivial 2021-11-28 00:12:19 +01:00
ed
38a163a09a better dropzone for extremely slow browsers 2021-11-28 00:11:21 +01:00
ed
8f031246d2 disable windows quickedit to avoid accidental lockups 2021-11-27 21:43:19 +01:00
ed
8f3d97dde7 indicate onclick action for audio files in grid view 2021-11-24 22:10:59 +01:00
ed
4acaf24d65 remember if media controls were open or not 2021-11-24 21:49:41 +01:00
ed
9a8dbbbcf8 another accesskey fix 2021-11-22 21:57:29 +01:00
ed
a3efc4c726 encode quoted queries into raw 2021-11-22 21:53:23 +01:00
ed
0278bf328f support raw-queries with quotes 2021-11-22 20:59:07 +01:00
ed
17ddd96cc6 up2k list wasnt centered anymore 2021-11-21 22:44:11 +01:00
ed
0e82e79aea mention the eq fixing gapless albums 2021-11-20 19:33:56 +01:00
ed
30f124c061 fix forcing compression levels 2021-11-20 18:51:15 +01:00
ed
e19d90fcfc add missing examples 2021-11-20 18:50:55 +01:00
ed
184bbdd23d legalese rephrasing 2021-11-20 17:58:37 +01:00
ed
30b50aec95 mention mtp readme 2021-11-20 17:51:49 +01:00
ed
c3c3d81db1 add mtp plugin for exif stripping 2021-11-20 17:45:56 +01:00
ed
49b7231283 fix mojibake support in misc mtp plugins 2021-11-20 17:33:24 +01:00
ed
edbedcdad3 v1.1.3 2021-11-20 02:27:09 +01:00
ed
e4ae5f74e6 add tooltip indicator 2021-11-20 01:47:16 +01:00
ed
2c7ffe08d7 include sha512 as both hex and b64 in responses 2021-11-20 01:03:32 +01:00
ed
3ca46bae46 good oneliner 2021-11-20 00:20:34 +01:00
ed
7e82aaf843 simplify/improve up2k ui debounce 2021-11-20 00:03:15 +01:00
ed
315bd71adf limit turbo runahead 2021-11-20 00:01:14 +01:00
ed
2c612c9aeb ux 2021-11-19 21:31:05 +01:00
ed
36aee085f7 add timeouts to FFmpeg things 2021-11-16 22:22:09 +01:00
ed
d01bb69a9c u2cli: option to ignore inaccessible files 2021-11-16 21:53:00 +01:00
ed
c9b1c48c72 sizelimit registry + persist without e2d 2021-11-16 21:31:24 +01:00
ed
aea3843cf2 this is just noise 2021-11-16 21:28:50 +01:00
ed
131b6f4b9a workaround chrome rendering bug 2021-11-16 21:28:36 +01:00
ed
6efb8b735a better handling of python builds without sqlite3 2021-11-16 01:13:04 +01:00
ed
223b7af2ce more iOS jank 2021-11-16 00:05:35 +01:00
ed
e72c2a6982 add fastpath for using the eq as a pure gain control 2021-11-15 23:19:43 +01:00
ed
dd9b93970e autoenable aac transcoding when codec missing 2021-11-15 23:18:52 +01:00
ed
e4c7cd81a9 update readme 2021-11-15 20:28:53 +01:00
ed
12b3a62586 fix dumb mistakes 2021-11-15 20:13:16 +01:00
ed
2da3bdcd47 delay tooltips, fix #6 2021-11-15 03:56:17 +01:00
ed
c1dccbe0ba trick iphones into preloading natively 2021-11-15 03:01:11 +01:00
ed
9629fcde68 optionally enable seeking through os controls 2021-11-15 02:47:42 +01:00
ed
cae436b566 add client-option to disconnect on HTTP 304 2021-11-15 02:45:18 +01:00
ed
01714700ae more gapless fixes 2021-11-14 20:25:28 +01:00
ed
51e6c4852b retire ogvjs 2021-11-14 19:28:44 +01:00
ed
b206c5d64e handle multiple simultaneous uploads of the same file 2021-11-14 15:03:11 +01:00
ed
62c3272351 add option to simulate latency 2021-11-14 15:01:20 +01:00
ed
c5d822c70a v1.1.2 2021-11-12 23:08:24 +01:00
ed
9c09b4061a prefer fpool on linux as well 2021-11-12 22:57:36 +01:00
ed
c26fb43ced more cleanup 2021-11-12 22:30:23 +01:00
ed
deb8f20db6 misc cleanup/unjank 2021-11-12 20:48:26 +01:00
ed
50e18ed8ff fix up2k layout in readonly folders 2021-11-12 19:18:52 +01:00
ed
31f3895f40 close misc views on escape 2021-11-12 19:18:29 +01:00
ed
615929268a cache monet 2021-11-12 02:00:44 +01:00
ed
b8b15814cf add traffic shaping, bump speeds on https/windows 2021-11-12 01:34:56 +01:00
ed
7766fffe83 mostly fix ogvjs preloading 2021-11-12 01:09:01 +01:00
ed
2a16c150d1 general preload improvements 2021-11-12 01:04:31 +01:00
ed
418c2166cc add cursed doubleclick-handler in gridsel mode 2021-11-11 01:03:14 +01:00
ed
a4dd44f648 textviewer initiable through hotkeys 2021-11-11 00:18:34 +01:00
ed
5352f7cda7 fix ctrl-a fencing in codeblocks 2021-11-11 00:11:29 +01:00
ed
5533b47099 handle crc collisions 2021-11-10 23:59:07 +01:00
ed
e9b14464ee terminate preloader if it can't finish in time 2021-11-10 22:53:02 +01:00
ed
4e986e5cd1 xhr preload is not gapless 2021-11-10 22:00:24 +01:00
ed
8a59b40c53 better clientside upload dedup 2021-11-10 20:57:45 +01:00
ed
391caca043 v1.1.1 2021-11-08 22:39:00 +01:00
ed
171ce348d6 improve swr 2021-11-08 22:25:35 +01:00
ed
c2cc729135 update sfx sizes 2021-11-08 21:11:10 +01:00
ed
e7e71b76f0 add alternative preloader for spotty connections 2021-11-08 20:46:40 +01:00
ed
a2af61cf6f fix clipboard sharing on recent firefox versions 2021-11-08 20:43:26 +01:00
ed
e111edd5e4 v1.1.0 2021-11-06 23:27:48 +01:00
ed
3375377371 update tests 2021-11-06 23:27:21 +01:00
ed
0ced020c67 update readme 2021-11-06 22:15:37 +01:00
ed
c0d7aa9e4a add file selection from text viewer 2021-11-06 22:02:43 +01:00
ed
e5b3d2a312 dont hilight huge files 2021-11-06 20:56:23 +01:00
ed
7b4a794981 systemd-service: add reload 2021-11-06 20:33:15 +01:00
ed
86a859de17 navpane default on if 60em viewport 2021-11-06 20:32:43 +01:00
ed
b3aaa7bd0f fence ctrl-a within documents and codeblocks 2021-11-06 19:37:19 +01:00
ed
a90586e6a8 add reload api 2021-11-06 19:05:58 +01:00
ed
807f272895 missed one 2021-11-06 18:33:32 +01:00
ed
f050647b43 rescan volumes on sigusr1 2021-11-06 18:20:31 +01:00
ed
73baebbd16 initial sigusr1 acc/vol reload 2021-11-06 07:15:04 +01:00
ed
f327f698b9 finally drop the -e2s compat 2021-11-06 03:19:57 +01:00
ed
8164910fe8 support setting argv from config files 2021-11-06 03:11:21 +01:00
ed
3498644055 fix permission parser so it matches the documentation 2021-11-06 03:09:03 +01:00
ed
d31116b54c spaghetti unraveling 2021-11-06 02:07:13 +01:00
ed
aced110cdf bump preload window wrt opus transcoding 2021-11-06 01:02:22 +01:00
ed
e9ab6aec77 allow full mime override 2021-11-06 00:50:20 +01:00
ed
15b261c861 help windows a little 2021-11-06 00:45:42 +01:00
ed
970badce66 positioning + optimization 2021-11-06 00:06:14 +01:00
ed
64304a9d65 make it optional 2021-11-06 00:06:05 +01:00
ed
d1983553d2 add click handlers 2021-11-06 00:04:45 +01:00
ed
6b15df3bcd fix wordwrap not being set initially 2021-11-06 00:00:35 +01:00
ed
730b1fff71 hilight parents of current folder 2021-11-06 00:00:04 +01:00
ed
c3add751e5 oh 2021-11-05 02:12:25 +01:00
ed
9da2dbdc1c rough attempt at docked navpane context 2021-11-05 02:03:35 +01:00
ed
977f09c470 .txt.gz is not actually .txt 2021-11-05 00:29:25 +01:00
ed
4d0c6a8802 ensure selected item visible when toggling navpane mode 2021-11-05 00:13:09 +01:00
ed
5345565037 a 2021-11-04 23:34:00 +01:00
ed
be38c27c64 thxci 2021-11-04 22:33:10 +01:00
ed
82a0401099 at some point firefox became case-sensitive 2021-11-04 22:10:45 +01:00
ed
33bea1b663 navpane mode-toggle button and hotkey 2021-11-04 22:04:32 +01:00
ed
f083acd46d let client force plaintext response content-type 2021-11-04 22:02:39 +01:00
ed
5aacd15272 ux 2021-11-04 03:38:09 +01:00
ed
cb7674b091 make prism optional 2021-11-04 03:10:13 +01:00
ed
3899c7ad56 golfimize 2021-11-04 02:36:21 +01:00
ed
d2debced09 navigation history support 2021-11-04 02:29:24 +01:00
ed
b86c0ddc48 optimize 2021-11-04 02:06:55 +01:00
ed
ba36f33bd8 add textfile viewer 2021-11-04 01:40:03 +01:00
ed
49368a10ba navpane enabled by default on non-touch devices 2021-11-04 01:35:05 +01:00
ed
ac1568cacf golf elm removal 2021-11-04 01:33:40 +01:00
ed
862ca3439d proactive opus cache expiration 2021-11-02 20:39:08 +01:00
ed
fdd4f9f2aa dirlist alignment 2021-11-02 18:59:34 +01:00
ed
aa2dc49ebe trailing newline for plaintext folder listings 2021-11-02 18:48:32 +01:00
ed
cc23b7ee74 better user-feedback when transcoding is unavailable 2021-11-02 03:22:39 +01:00
ed
f6f9fc5a45 add audio transcoder 2021-11-02 02:59:37 +01:00
ed
26c8589399 Merge branch 'hovudstraum' of github.com:9001/copyparty into hovudstraum 2021-11-02 00:26:54 +01:00
ed
c2469935cb add audio spectrogram thumbnails 2021-11-02 00:26:51 +01:00
kipukun
5e7c20955e contrib: describe rc script 2021-10-31 19:25:22 +01:00
kipukun
967fa38108 contrib: add freebsd rc script 2021-10-31 19:25:22 +01:00
ed
280fe8e36b document some of the api 2021-10-31 15:30:09 +01:00
ed
03ca96ccc3 performance tips 2021-10-31 06:24:11 +01:00
ed
b5b8a2c9d5 why are there https warnings when https checking is disabled 2021-10-31 03:37:31 +01:00
ed
0008832730 update repacker 2021-10-31 02:22:14 +02:00
72 changed files with 4252 additions and 1203 deletions

204
README.md
View File

@@ -16,6 +16,13 @@ turn your phone or raspi into a portable file server with resumable uploads/down
📷 **screenshots:** [browser](#the-browser) // [upload](#uploading) // [unpost](#unpost) // [thumbnails](#thumbnails) // [search](#searching) // [fsearch](#file-search) // [zip-DL](#zip-downloads) // [md-viewer](#markdown-viewer) // [ie4](#browser-support) 📷 **screenshots:** [browser](#the-browser) // [upload](#uploading) // [unpost](#unpost) // [thumbnails](#thumbnails) // [search](#searching) // [fsearch](#file-search) // [zip-DL](#zip-downloads) // [md-viewer](#markdown-viewer) // [ie4](#browser-support)
## get the app
<a href="https://f-droid.org/packages/me.ocv.partyup/"><img src="https://ocv.me/fdroid.png" alt="Get it on F-Droid" height="50" /> '' <img src="https://img.shields.io/f-droid/v/me.ocv.partyup.svg" alt="f-droid version info" /></a> '' <a href="https://github.com/9001/party-up"><img src="https://img.shields.io/github/release/9001/party-up.svg?logo=github" alt="github version info" /></a>
(basic upload client, nothing fancy yet)
## readme toc ## readme toc
* top * top
@@ -46,13 +53,13 @@ turn your phone or raspi into a portable file server with resumable uploads/down
* [markdown viewer](#markdown-viewer) - and there are *two* editors * [markdown viewer](#markdown-viewer) - and there are *two* editors
* [other tricks](#other-tricks) * [other tricks](#other-tricks)
* [searching](#searching) - search by size, date, path/name, mp3-tags, ... * [searching](#searching) - search by size, date, path/name, mp3-tags, ...
* [server config](#server-config) * [server config](#server-config) - using arguments or config files, or a mix of both
* [file indexing](#file-indexing) * [file indexing](#file-indexing)
* [upload rules](#upload-rules) - set upload rules using volume flags * [upload rules](#upload-rules) - set upload rules using volume flags
* [compress uploads](#compress-uploads) - files can be autocompressed on upload * [compress uploads](#compress-uploads) - files can be autocompressed on upload
* [database location](#database-location) - in-volume (`.hist/up2k.db`, default) or somewhere else * [database location](#database-location) - in-volume (`.hist/up2k.db`, default) or somewhere else
* [metadata from audio files](#metadata-from-audio-files) - set `-e2t` to index tags on upload * [metadata from audio files](#metadata-from-audio-files) - set `-e2t` to index tags on upload
* [file parser plugins](#file-parser-plugins) - provide custom parsers to index additional tags * [file parser plugins](#file-parser-plugins) - provide custom parsers to index additional tags, also see [./bin/mtag/README.md](./bin/mtag/README.md)
* [upload events](#upload-events) - trigger a script/program on each upload * [upload events](#upload-events) - trigger a script/program on each upload
* [complete examples](#complete-examples) * [complete examples](#complete-examples)
* [browser support](#browser-support) - TLDR: yes * [browser support](#browser-support) - TLDR: yes
@@ -60,11 +67,17 @@ turn your phone or raspi into a portable file server with resumable uploads/down
* [up2k](#up2k) - quick outline of the up2k protocol, see [uploading](#uploading) for the web-client * [up2k](#up2k) - quick outline of the up2k protocol, see [uploading](#uploading) for the web-client
* [why chunk-hashes](#why-chunk-hashes) - a single sha512 would be better, right? * [why chunk-hashes](#why-chunk-hashes) - a single sha512 would be better, right?
* [performance](#performance) - defaults are usually fine - expect `8 GiB/s` download, `1 GiB/s` upload * [performance](#performance) - defaults are usually fine - expect `8 GiB/s` download, `1 GiB/s` upload
* [client-side](#client-side) - when uploading files
* [security](#security) - some notes on hardening * [security](#security) - some notes on hardening
* [gotchas](#gotchas) - behavior that might be unexpected * [gotchas](#gotchas) - behavior that might be unexpected
* [recovering from crashes](#recovering-from-crashes) * [recovering from crashes](#recovering-from-crashes)
* [client crashes](#client-crashes) * [client crashes](#client-crashes)
* [frefox wsod](#frefox-wsod) - firefox 87 can crash during uploads * [frefox wsod](#frefox-wsod) - firefox 87 can crash during uploads
* [HTTP API](#HTTP-API)
* [read](#read)
* [write](#write)
* [admin](#admin)
* [general](#general)
* [dependencies](#dependencies) - mandatory deps * [dependencies](#dependencies) - mandatory deps
* [optional dependencies](#optional-dependencies) - install these to enable bonus features * [optional dependencies](#optional-dependencies) - install these to enable bonus features
* [install recommended deps](#install-recommended-deps) * [install recommended deps](#install-recommended-deps)
@@ -72,6 +85,7 @@ turn your phone or raspi into a portable file server with resumable uploads/down
* [sfx](#sfx) - there are two self-contained "binaries" * [sfx](#sfx) - there are two self-contained "binaries"
* [sfx repack](#sfx-repack) - reduce the size of an sfx by removing features * [sfx repack](#sfx-repack) - reduce the size of an sfx by removing features
* [install on android](#install-on-android) * [install on android](#install-on-android)
* [reporting bugs](#reporting-bugs) - ideas for context to include in bug reports
* [building](#building) * [building](#building)
* [dev env setup](#dev-env-setup) * [dev env setup](#dev-env-setup)
* [just the sfx](#just-the-sfx) * [just the sfx](#just-the-sfx)
@@ -153,14 +167,15 @@ feature summary
* browser * browser
* ☑ [navpane](#navpane) (directory tree sidebar) * ☑ [navpane](#navpane) (directory tree sidebar)
* ☑ file manager (cut/paste, delete, [batch-rename](#batch-rename)) * ☑ file manager (cut/paste, delete, [batch-rename](#batch-rename))
* ☑ audio player (with OS media controls) * ☑ audio player (with OS media controls and opus transcoding)
* ☑ image gallery with webm player * ☑ image gallery with webm player
* ☑ textfile browser with syntax hilighting
* ☑ [thumbnails](#thumbnails) * ☑ [thumbnails](#thumbnails)
* ☑ ...of images using Pillow * ☑ ...of images using Pillow
* ☑ ...of videos using FFmpeg * ☑ ...of videos using FFmpeg
* ☑ ...of audio (spectrograms) using FFmpeg
* ☑ cache eviction (max-age; maybe max-size eventually) * ☑ cache eviction (max-age; maybe max-size eventually)
* ☑ SPA (browse while uploading) * ☑ SPA (browse while uploading)
* if you use the navpane to navigate, not folders in the file list
* server indexing * server indexing
* ☑ [locate files by contents](#file-search) * ☑ [locate files by contents](#file-search)
* ☑ search by name/path/date/size * ☑ search by name/path/date/size
@@ -218,6 +233,7 @@ some improvement ideas
* Windows: python 2.7 cannot index non-ascii filenames with `-e2d` * Windows: python 2.7 cannot index non-ascii filenames with `-e2d`
* Windows: python 2.7 cannot handle filenames with mojibake * Windows: python 2.7 cannot handle filenames with mojibake
* `--th-ff-jpg` may fix video thumbnails on some FFmpeg versions (macos, some linux) * `--th-ff-jpg` may fix video thumbnails on some FFmpeg versions (macos, some linux)
* `--th-ff-swr` may fix audio thumbnails on some FFmpeg versions
## general bugs ## general bugs
@@ -226,6 +242,10 @@ some improvement ideas
## not my bugs ## not my bugs
* iPhones: the volume control doesn't work because [apple doesn't want it to](https://developer.apple.com/library/archive/documentation/AudioVideo/Conceptual/Using_HTML5_Audio_Video/Device-SpecificConsiderations/Device-SpecificConsiderations.html#//apple_ref/doc/uid/TP40009523-CH5-SW11)
* *future workaround:* enable the equalizer, make it all-zero, and set a negative boost to reduce the volume
* "future" because `AudioContext` is broken in the current iOS version (15.1), maybe one day...
* Windows: folders cannot be accessed if the name ends with `.` * Windows: folders cannot be accessed if the name ends with `.`
* python or windows bug * python or windows bug
@@ -242,6 +262,7 @@ some improvement ideas
* is it possible to block read-access to folders unless you know the exact URL for a particular file inside? * is it possible to block read-access to folders unless you know the exact URL for a particular file inside?
* yes, using the [`g` permission](#accounts-and-volumes), see the examples there * yes, using the [`g` permission](#accounts-and-volumes), see the examples there
* you can also do this with linux filesystem permissions; `chmod 111 music` will make it possible to access files and folders inside the `music` folder but not list the immediate contents -- also works with other software, not just copyparty
* can I make copyparty download a file to my server if I give it a URL? * can I make copyparty download a file to my server if I give it a URL?
* not officially, but there is a [terrible hack](https://github.com/9001/copyparty/blob/hovudstraum/bin/mtag/wget.py) which makes it possible * not officially, but there is a [terrible hack](https://github.com/9001/copyparty/blob/hovudstraum/bin/mtag/wget.py) which makes it possible
@@ -249,7 +270,10 @@ some improvement ideas
# accounts and volumes # accounts and volumes
per-folder, per-user permissions per-folder, per-user permissions - if your setup is getting complex, consider making a [config file](./docs/example.conf) instead of using arguments
* much easier to manage, and you can modify the config at runtime with `systemctl reload copyparty` or more conveniently using the `[reload cfg]` button in the control-panel (if logged in as admin)
configuring accounts/volumes with arguments:
* `-a usr:pwd` adds account `usr` with password `pwd` * `-a usr:pwd` adds account `usr` with password `pwd`
* `-v .::r` adds current-folder `.` as the webroot, `r`eadable by anyone * `-v .::r` adds current-folder `.` as the webroot, `r`eadable by anyone
* the syntax is `-v src:dst:perm:perm:...` so local-path, url-path, and one or more permissions to set * the syntax is `-v src:dst:perm:perm:...` so local-path, url-path, and one or more permissions to set
@@ -304,8 +328,10 @@ the browser has the following hotkeys (always qwerty)
* `B` toggle breadcrumbs / [navpane](#navpane) * `B` toggle breadcrumbs / [navpane](#navpane)
* `I/K` prev/next folder * `I/K` prev/next folder
* `M` parent folder (or unexpand current) * `M` parent folder (or unexpand current)
* `V` toggle folders / textfiles in the navpane
* `G` toggle list / [grid view](#thumbnails) * `G` toggle list / [grid view](#thumbnails)
* `T` toggle thumbnails / icons * `T` toggle thumbnails / icons
* `ESC` close various things
* `ctrl-X` cut selected files/folders * `ctrl-X` cut selected files/folders
* `ctrl-V` paste * `ctrl-V` paste
* `F2` [rename](#batch-rename) selected file/folder * `F2` [rename](#batch-rename) selected file/folder
@@ -315,6 +341,10 @@ the browser has the following hotkeys (always qwerty)
* ctrl+`Up/Down` move cursor and scroll viewport * ctrl+`Up/Down` move cursor and scroll viewport
* `Space` toggle file selection * `Space` toggle file selection
* `Ctrl-A` toggle select all * `Ctrl-A` toggle select all
* when a textfile is open:
* `I/K` prev/next textfile
* `S` toggle selection of open file
* `M` close textfile
* when playing audio: * when playing audio:
* `J/L` prev/next song * `J/L` prev/next song
* `U/O` skip 10sec back/forward * `U/O` skip 10sec back/forward
@@ -353,9 +383,13 @@ switching between breadcrumbs or navpane
click the `🌲` or pressing the `B` hotkey to toggle between breadcrumbs path (default), or a navpane (tree-browser sidebar thing) click the `🌲` or pressing the `B` hotkey to toggle between breadcrumbs path (default), or a navpane (tree-browser sidebar thing)
* `[-]` and `[+]` (or hotkeys `A`/`D`) adjust the size * `[+]` and `[-]` (or hotkeys `A`/`D`) adjust the size
* `[v]` jumps to the currently open folder * `[🎯]` jumps to the currently open folder
* `[📃]` toggles between showing folders and textfiles
* `[📌]` shows the name of all parent folders in a docked panel
* `[a]` toggles automatic widening as you go deeper * `[a]` toggles automatic widening as you go deeper
* `[↵]` toggles wordwrap
* `[👀]` show full name on hover (if wordwrap is off)
## thumbnails ## thumbnails
@@ -366,9 +400,12 @@ press `g` to toggle grid-view instead of the file listing, and `t` toggles icon
it does static images with Pillow and uses FFmpeg for video files, so you may want to `--no-thumb` or maybe just `--no-vthumb` depending on how dangerous your users are it does static images with Pillow and uses FFmpeg for video files, so you may want to `--no-thumb` or maybe just `--no-vthumb` depending on how dangerous your users are
audio files are covnerted into spectrograms using FFmpeg unless you `--no-athumb` (and some FFmpeg builds may need `--th-ff-swr`)
images with the following names (see `--th-covers`) become the thumbnail of the folder they're in: `folder.png`, `folder.jpg`, `cover.png`, `cover.jpg` images with the following names (see `--th-covers`) become the thumbnail of the folder they're in: `folder.png`, `folder.jpg`, `cover.png`, `cover.jpg`
in the grid/thumbnail view, if the audio player panel is open, songs will start playing when clicked in the grid/thumbnail view, if the audio player panel is open, songs will start playing when clicked
* indicated by the audio files having the ▶ icon instead of 💾
## zip downloads ## zip downloads
@@ -419,7 +456,9 @@ see [up2k](#up2k) for details on how it works
![copyparty-upload-fs8](https://user-images.githubusercontent.com/241032/129635371-48fc54ca-fa91-48e3-9b1d-ba413e4b68cb.png) ![copyparty-upload-fs8](https://user-images.githubusercontent.com/241032/129635371-48fc54ca-fa91-48e3-9b1d-ba413e4b68cb.png)
**protip:** you can avoid scaring away users with [docs/minimal-up2k.html](docs/minimal-up2k.html) which makes it look [much simpler](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png) **protip:** you can avoid scaring away users with [contrib/plugins/minimal-up2k.html](contrib/plugins/minimal-up2k.html) which makes it look [much simpler](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png)
**protip:** if you enable `favicon` in the `[⚙️] settings` tab (by typing something into the textbox), the icon in the browser tab will indicate upload progress
the up2k UI is the epitome of polished inutitive experiences: the up2k UI is the epitome of polished inutitive experiences:
* "parallel uploads" specifies how many chunks to upload at the same time * "parallel uploads" specifies how many chunks to upload at the same time
@@ -457,8 +496,6 @@ the files will be hashed on the client-side, and each hash is sent to the server
files go into `[ok]` if they exist (and you get a link to where it is), otherwise they land in `[ng]` files go into `[ok]` if they exist (and you get a link to where it is), otherwise they land in `[ng]`
* the main reason filesearch is combined with the uploader is cause the code was too spaghetti to separate it out somewhere else, this is no longer the case but now i've warmed up to the idea too much * the main reason filesearch is combined with the uploader is cause the code was too spaghetti to separate it out somewhere else, this is no longer the case but now i've warmed up to the idea too much
adding the same file multiple times is blocked, so if you first search for a file and then decide to upload it, you have to click the `[cleanup]` button to discard `[done]` files (or just refresh the page)
### unpost ### unpost
@@ -546,6 +583,8 @@ and there are *two* editors
* you can link a particular timestamp in an audio file by adding it to the URL, such as `&20` / `&20s` / `&1m20` / `&t=1:20` after the `.../#af-c8960dab` * you can link a particular timestamp in an audio file by adding it to the URL, such as `&20` / `&20s` / `&1m20` / `&t=1:20` after the `.../#af-c8960dab`
* enabling the audio equalizer can help make gapless albums fully gapless in some browsers (chrome), so consider leaving it on with all the values at zero
* get a plaintext file listing by adding `?ls=t` to a URL, or a compact colored one with `?ls=v` (for unix terminals) * get a plaintext file listing by adding `?ls=t` to a URL, or a compact colored one with `?ls=v` (for unix terminals)
* if you are using media hotkeys to switch songs and are getting tired of seeing the OSD popup which Windows doesn't let you disable, consider https://ocv.me/dev/?media-osd-bgone.ps1 * if you are using media hotkeys to switch songs and are getting tired of seeing the OSD popup which Windows doesn't let you disable, consider https://ocv.me/dev/?media-osd-bgone.ps1
@@ -576,6 +615,12 @@ add the argument `-e2ts` to also scan/index tags from music files, which brings
# server config # server config
using arguments or config files, or a mix of both:
* config files (`-c some.conf`) can set additional commandline arguments; see [./docs/example.conf](docs/example.conf)
* `kill -s USR1` (same as `systemctl reload copyparty`) to reload accounts and volumes from config files without restarting
* or click the `[reload cfg]` button in the control-panel when logged in as admin
## file indexing ## file indexing
file indexing relies on two database tables, the up2k filetree (`-e2d`) and the metadata tags (`-e2t`), stored in `.hist/up2k.db`. Configuration can be done through arguments, volume flags, or a mix of both. file indexing relies on two database tables, the up2k filetree (`-e2d`) and the metadata tags (`-e2t`), stored in `.hist/up2k.db`. Configuration can be done through arguments, volume flags, or a mix of both.
@@ -588,10 +633,12 @@ through arguments:
* `-e2ts` also scans for tags in all files that don't have tags yet * `-e2ts` also scans for tags in all files that don't have tags yet
* `-e2tsr` also deletes all existing tags, doing a full reindex * `-e2tsr` also deletes all existing tags, doing a full reindex
the same arguments can be set as volume flags, in addition to `d2d` and `d2t` for disabling: the same arguments can be set as volume flags, in addition to `d2d`, `d2ds`, `d2t`, `d2ts` for disabling:
* `-v ~/music::r:c,e2dsa,e2tsr` does a full reindex of everything on startup * `-v ~/music::r:c,e2dsa,e2tsr` does a full reindex of everything on startup
* `-v ~/music::r:c,d2d` disables **all** indexing, even if any `-e2*` are on * `-v ~/music::r:c,d2d` disables **all** indexing, even if any `-e2*` are on
* `-v ~/music::r:c,d2t` disables all `-e2t*` (tags), does not affect `-e2d*` * `-v ~/music::r:c,d2t` disables all `-e2t*` (tags), does not affect `-e2d*`
* `-v ~/music::r:c,d2ds` disables on-boot scans; only index new uploads
* `-v ~/music::r:c,d2ts` same except only affecting tags
note: note:
* the parser can finally handle `c,e2dsa,e2tsr` so you no longer have to `c,e2dsa:c,e2tsr` * the parser can finally handle `c,e2dsa,e2tsr` so you no longer have to `c,e2dsa:c,e2tsr`
@@ -612,7 +659,7 @@ if you set `--no-hash [...]` globally, you can enable hashing for specific volum
set upload rules using volume flags, some examples: set upload rules using volume flags, some examples:
* `:c,sz=1k-3m` sets allowed filesize between 1 KiB and 3 MiB inclusive (suffixes: b, k, m, g) * `:c,sz=1k-3m` sets allowed filesize between 1 KiB and 3 MiB inclusive (suffixes: `b`, `k`, `m`, `g`)
* `:c,nosub` disallow uploading into subdirectories; goes well with `rotn` and `rotf`: * `:c,nosub` disallow uploading into subdirectories; goes well with `rotn` and `rotf`:
* `:c,rotn=1000,2` moves uploads into subfolders, up to 1000 files in each folder before making a new one, two levels deep (must be at least 1) * `:c,rotn=1000,2` moves uploads into subfolders, up to 1000 files in each folder before making a new one, two levels deep (must be at least 1)
* `:c,rotf=%Y/%m/%d/%H` enforces files to be uploaded into a structure of subfolders according to that date format * `:c,rotf=%Y/%m/%d/%H` enforces files to be uploaded into a structure of subfolders according to that date format
@@ -646,6 +693,12 @@ things to note,
* the files will be indexed after compression, so dupe-detection and file-search will not work as expected * the files will be indexed after compression, so dupe-detection and file-search will not work as expected
some examples, some examples,
* `-v inc:inc:w:c,pk=xz,0`
folder named inc, shared at inc, write-only for everyone, forces xz compression at level 0
* `-v inc:inc:w:c,pk`
same write-only inc, but forces gz compression (default) instead of xz
* `-v inc:inc:w:c,gz`
allows (but does not force) gz compression if client uploads to `/inc?pk` or `/inc?gz` or `/inc?gz=4`
## database location ## database location
@@ -690,7 +743,7 @@ see the beautiful mess of a dictionary in [mtag.py](https://github.com/9001/copy
## file parser plugins ## file parser plugins
provide custom parsers to index additional tags provide custom parsers to index additional tags, also see [./bin/mtag/README.md](./bin/mtag/README.md)
copyparty can invoke external programs to collect additional metadata for files using `mtp` (either as argument or volume flag), there is a default timeout of 30sec copyparty can invoke external programs to collect additional metadata for files using `mtp` (either as argument or volume flag), there is a default timeout of 30sec
@@ -761,7 +814,7 @@ TLDR: yes
* internet explorer 6 to 8 behave the same * internet explorer 6 to 8 behave the same
* firefox 52 and chrome 49 are the final winxp versions * firefox 52 and chrome 49 are the final winxp versions
* `*1` yes, but extremely slow (ie10: `1 MiB/s`, ie11: `270 KiB/s`) * `*1` yes, but extremely slow (ie10: `1 MiB/s`, ie11: `270 KiB/s`)
* `*3` using a wasm decoder which consumes a bit more power * `*3` iOS 11 and newer, opus only, and requires FFmpeg on the server
quick summary of more eccentric web-browsers trying to view a directory index: quick summary of more eccentric web-browsers trying to view a directory index:
@@ -781,8 +834,8 @@ quick summary of more eccentric web-browsers trying to view a directory index:
interact with copyparty using non-browser clients interact with copyparty using non-browser clients
* javascript: dump some state into a file (two separate examples) * javascript: dump some state into a file (two separate examples)
* `await fetch('https://127.0.0.1:3923/', {method:"PUT", body: JSON.stringify(foo)});` * `await fetch('//127.0.0.1:3923/', {method:"PUT", body: JSON.stringify(foo)});`
* `var xhr = new XMLHttpRequest(); xhr.open('POST', 'https://127.0.0.1:3923/msgs?raw'); xhr.send('foo');` * `var xhr = new XMLHttpRequest(); xhr.open('POST', '//127.0.0.1:3923/msgs?raw'); xhr.send('foo');`
* curl/wget: upload some files (post=file, chunk=stdin) * curl/wget: upload some files (post=file, chunk=stdin)
* `post(){ curl -b cppwd=wark -F act=bput -F f=@"$1" http://127.0.0.1:3923/;}` * `post(){ curl -b cppwd=wark -F act=bput -F f=@"$1" http://127.0.0.1:3923/;}`
@@ -811,7 +864,7 @@ copyparty returns a truncated sha512sum of your PUT/POST as base64; you can gene
b512(){ printf "$((sha512sum||shasum -a512)|sed -E 's/ .*//;s/(..)/\\x\1/g')"|base64|tr '+/' '-_'|head -c44;} b512(){ printf "$((sha512sum||shasum -a512)|sed -E 's/ .*//;s/(..)/\\x\1/g')"|base64|tr '+/' '-_'|head -c44;}
b512 <movie.mkv b512 <movie.mkv
you can provide passwords using cookie 'cppwd=hunter2', as a url query `?pw=hunter2`, or with basic-authentication (either as the username or password) you can provide passwords using cookie `cppwd=hunter2`, as a url query `?pw=hunter2`, or with basic-authentication (either as the username or password)
# up2k # up2k
@@ -860,6 +913,21 @@ below are some tweaks roughly ordered by usefulness:
...however it adds an overhead to internal communication so it might be a net loss, see if it works 4 u ...however it adds an overhead to internal communication so it might be a net loss, see if it works 4 u
## client-side
when uploading files,
* chrome is recommended, at least compared to firefox:
* up to 90% faster when hashing, especially on SSDs
* up to 40% faster when uploading over extremely fast internets
* but [up2k.py](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py) can be 40% faster than chrome again
* if you're cpu-bottlenecked, or the browser is maxing a cpu core:
* up to 30% faster uploads if you hide the upload status list by switching away from the `[🚀]` up2k ui-tab (or closing it)
* switching to another browser-tab also works, the favicon will update every 10 seconds in that case
* unlikely to be a problem, but can happen when uploding many small files, or your internet is too fast, or PC too slow
# security # security
some notes on hardening some notes on hardening
@@ -903,6 +971,84 @@ however you can hit `F12` in the up2k tab and use the devtools to see how far yo
`await fetch('/inc', {method:'PUT', body:JSON.stringify(ng,null,1)})` `await fetch('/inc', {method:'PUT', body:JSON.stringify(ng,null,1)})`
# HTTP API
* table-column `params` = URL parameters; `?foo=bar&qux=...`
* table-column `body` = POST payload
* method `jPOST` = json post
* method `mPOST` = multipart post
* method `uPOST` = url-encoded post
* `FILE` = conventional HTTP file upload entry (rfc1867 et al, filename in `Content-Disposition`)
authenticate using header `Cookie: cppwd=foo` or url param `&pw=foo`
## read
| method | params | result |
|--|--|--|
| GET | `?ls` | list files/folders at URL as JSON |
| GET | `?ls&dots` | list files/folders at URL as JSON, including dotfiles |
| GET | `?ls=t` | list files/folders at URL as plaintext |
| GET | `?ls=v` | list files/folders at URL, terminal-formatted |
| GET | `?b` | list files/folders at URL as simplified HTML |
| GET | `?tree=.` | list one level of subdirectories inside URL |
| GET | `?tree` | list one level of subdirectories for each level until URL |
| GET | `?tar` | download everything below URL as a tar file |
| GET | `?zip=utf-8` | download everything below URL as a zip file |
| GET | `?ups` | show recent uploads from your IP |
| GET | `?ups&filter=f` | ...where URL contains `f` |
| GET | `?mime=foo` | specify return mimetype `foo` |
| GET | `?raw` | get markdown file at URL as plaintext |
| GET | `?txt` | get file at URL as plaintext |
| GET | `?txt=iso-8859-1` | ...with specific charset |
| GET | `?th` | get image/video at URL as thumbnail |
| GET | `?th=opus` | convert audio file to 128kbps opus |
| GET | `?th=caf` | ...in the iOS-proprietary container |
| method | body | result |
|--|--|--|
| jPOST | `{"q":"foo"}` | do a server-wide search; see the `[🔎]` search tab `raw` field for syntax |
| method | params | body | result |
|--|--|--|--|
| jPOST | `?tar` | `["foo","bar"]` | download folders `foo` and `bar` inside URL as a tar file |
## write
| method | params | result |
|--|--|--|
| GET | `?move=/foo/bar` | move/rename the file/folder at URL to /foo/bar |
| method | params | body | result |
|--|--|--|--|
| PUT | | (binary data) | upload into file at URL |
| PUT | `?gz` | (binary data) | compress with gzip and write into file at URL |
| PUT | `?xz` | (binary data) | compress with xz and write into file at URL |
| mPOST | | `act=bput`, `f=FILE` | upload `FILE` into the folder at URL |
| mPOST | `?j` | `act=bput`, `f=FILE` | ...and reply with json |
| mPOST | | `act=mkdir`, `name=foo` | create directory `foo` at URL |
| GET | `?delete` | | delete URL recursively |
| jPOST | `?delete` | `["/foo","/bar"]` | delete `/foo` and `/bar` recursively |
| uPOST | | `msg=foo` | send message `foo` into server log |
| mPOST | | `act=tput`, `body=TEXT` | overwrite markdown document at URL |
server behavior of `msg` can be reconfigured with `--urlform`
## admin
| method | params | result |
|--|--|--|
| GET | `?reload=cfg` | reload config files and rescan volumes |
| GET | `?scan` | initiate a rescan of the volume which provides URL |
| GET | `?stack` | show a stacktrace of all threads |
## general
| method | params | result |
|--|--|--|
| GET | `?pw=x` | logout |
# dependencies # dependencies
mandatory deps: mandatory deps:
@@ -919,7 +1065,7 @@ enable music tags:
enable [thumbnails](#thumbnails) of... enable [thumbnails](#thumbnails) of...
* **images:** `Pillow` (requires py2.7 or py3.5+) * **images:** `Pillow` (requires py2.7 or py3.5+)
* **videos:** `ffmpeg` and `ffprobe` somewhere in `$PATH` * **videos/audio:** `ffmpeg` and `ffprobe` somewhere in `$PATH`
* **HEIF pictures:** `pyheif-pillow-opener` (requires Linux or a C compiler) * **HEIF pictures:** `pyheif-pillow-opener` (requires Linux or a C compiler)
* **AVIF pictures:** `pillow-avif-plugin` * **AVIF pictures:** `pillow-avif-plugin`
@@ -953,19 +1099,19 @@ pls note that `copyparty-sfx.sh` will fail if you rename `copyparty-sfx.py` to `
reduce the size of an sfx by removing features reduce the size of an sfx by removing features
if you don't need all the features, you can repack the sfx and save a bunch of space; all you need is an sfx and a copy of this repo (nothing else to download or build, except if you're on windows then you need msys2 or WSL) if you don't need all the features, you can repack the sfx and save a bunch of space; all you need is an sfx and a copy of this repo (nothing else to download or build, except if you're on windows then you need msys2 or WSL)
* `525k` size of original sfx.py as of v0.11.30 * `393k` size of original sfx.py as of v1.1.3
* `315k` after `./scripts/make-sfx.sh re no-ogv` * `310k` after `./scripts/make-sfx.sh re no-cm`
* `223k` after `./scripts/make-sfx.sh re no-ogv no-cm` * `269k` after `./scripts/make-sfx.sh re no-cm no-hl`
the features you can opt to drop are the features you can opt to drop are
* `ogv`.js, the opus/vorbis decoder which is needed by apple devices to play foss audio files, saves ~192k * `cm`/easymde, the "fancy" markdown editor, saves ~82k
* `cm`/easymde, the "fancy" markdown editor, saves ~92k * `hl`, prism, the syntax hilighter, saves ~41k
* `fnt`, source-code-pro, the monospace font, saves ~9k * `fnt`, source-code-pro, the monospace font, saves ~9k
* `dd`, the custom mouse cursor for the media player tray tab, saves ~2k * `dd`, the custom mouse cursor for the media player tray tab, saves ~2k
for the `re`pack to work, first run one of the sfx'es once to unpack it for the `re`pack to work, first run one of the sfx'es once to unpack it
**note:** you can also just download and run [scripts/copyparty-repack.sh](scripts/copyparty-repack.sh) -- this will grab the latest copyparty release from github and do a `no-ogv no-cm` repack; works on linux/macos (and windows with msys2 or WSL) **note:** you can also just download and run [scripts/copyparty-repack.sh](scripts/copyparty-repack.sh) -- this will grab the latest copyparty release from github and do a few repacks; works on linux/macos (and windows with msys2 or WSL)
# install on android # install on android
@@ -979,6 +1125,16 @@ echo $?
after the initial setup, you can launch copyparty at any time by running `copyparty` anywhere in Termux after the initial setup, you can launch copyparty at any time by running `copyparty` anywhere in Termux
# reporting bugs
ideas for context to include in bug reports
if something broke during an upload (replacing FILENAME with a part of the filename that broke):
```
journalctl -aS '48 hour ago' -u copyparty | grep -C10 FILENAME | tee bug.log
```
# building # building
## dev env setup ## dev env setup

View File

@@ -2,9 +2,14 @@
* command-line up2k client [(webm)](https://ocv.me/stuff/u2cli.webm) * command-line up2k client [(webm)](https://ocv.me/stuff/u2cli.webm)
* file uploads, file-search, autoresume of aborted/broken uploads * file uploads, file-search, autoresume of aborted/broken uploads
* faster than browsers * faster than browsers
* early beta, if something breaks just restart it * if something breaks just restart it
# [`partyjournal.py`](partyjournal.py)
produces a chronological list of all uploads by collecting info from up2k databases and the filesystem
* outputs a standalone html file
* optional mapping from IP-addresses to nicknames
# [`copyparty-fuse.py`](copyparty-fuse.py) # [`copyparty-fuse.py`](copyparty-fuse.py)
* mount a copyparty server as a local filesystem (read-only) * mount a copyparty server as a local filesystem (read-only)

View File

@@ -11,14 +11,18 @@ import re
import os import os
import sys import sys
import time import time
import json
import stat import stat
import errno import errno
import struct import struct
import codecs
import platform
import threading import threading
import http.client # py2: httplib import http.client # py2: httplib
import urllib.parse import urllib.parse
from datetime import datetime from datetime import datetime
from urllib.parse import quote_from_bytes as quote from urllib.parse import quote_from_bytes as quote
from urllib.parse import unquote_to_bytes as unquote
try: try:
import fuse import fuse
@@ -38,7 +42,7 @@ except:
mount a copyparty server (local or remote) as a filesystem mount a copyparty server (local or remote) as a filesystem
usage: usage:
python ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,url=http://192.168.1.69:3923 /mnt/nas python ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,pw=wark,url=http://192.168.1.69:3923 /mnt/nas
dependencies: dependencies:
sudo apk add fuse-dev python3-dev sudo apk add fuse-dev python3-dev
@@ -50,6 +54,10 @@ fork of copyparty-fuse.py based on fuse-python which
""" """
WINDOWS = sys.platform == "win32"
MACOS = platform.system() == "Darwin"
def threadless_log(msg): def threadless_log(msg):
print(msg + "\n", end="") print(msg + "\n", end="")
@@ -93,6 +101,41 @@ def html_dec(txt):
) )
def register_wtf8():
def wtf8_enc(text):
return str(text).encode("utf-8", "surrogateescape"), len(text)
def wtf8_dec(binary):
return bytes(binary).decode("utf-8", "surrogateescape"), len(binary)
def wtf8_search(encoding_name):
return codecs.CodecInfo(wtf8_enc, wtf8_dec, name="wtf-8")
codecs.register(wtf8_search)
bad_good = {}
good_bad = {}
def enwin(txt):
return "".join([bad_good.get(x, x) for x in txt])
for bad, good in bad_good.items():
txt = txt.replace(bad, good)
return txt
def dewin(txt):
return "".join([good_bad.get(x, x) for x in txt])
for bad, good in bad_good.items():
txt = txt.replace(good, bad)
return txt
class CacheNode(object): class CacheNode(object):
def __init__(self, tag, data): def __init__(self, tag, data):
self.tag = tag self.tag = tag
@@ -115,8 +158,9 @@ class Stat(fuse.Stat):
class Gateway(object): class Gateway(object):
def __init__(self, base_url): def __init__(self, base_url, pw):
self.base_url = base_url self.base_url = base_url
self.pw = pw
ui = urllib.parse.urlparse(base_url) ui = urllib.parse.urlparse(base_url)
self.web_root = ui.path.strip("/") self.web_root = ui.path.strip("/")
@@ -135,8 +179,7 @@ class Gateway(object):
self.conns = {} self.conns = {}
def quotep(self, path): def quotep(self, path):
# TODO: mojibake support path = path.encode("wtf-8")
path = path.encode("utf-8", "ignore")
return quote(path, safe="/") return quote(path, safe="/")
def getconn(self, tid=None): def getconn(self, tid=None):
@@ -159,20 +202,29 @@ class Gateway(object):
except: except:
pass pass
def sendreq(self, *args, **kwargs): def sendreq(self, *args, **ka):
tid = get_tid() tid = get_tid()
if self.pw:
ck = "cppwd=" + self.pw
try:
ka["headers"]["Cookie"] = ck
except:
ka["headers"] = {"Cookie": ck}
try: try:
c = self.getconn(tid) c = self.getconn(tid)
c.request(*list(args), **kwargs) c.request(*list(args), **ka)
return c.getresponse() return c.getresponse()
except: except:
self.closeconn(tid) self.closeconn(tid)
c = self.getconn(tid) c = self.getconn(tid)
c.request(*list(args), **kwargs) c.request(*list(args), **ka)
return c.getresponse() return c.getresponse()
def listdir(self, path): def listdir(self, path):
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?dots" if bad_good:
path = dewin(path)
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?dots&ls"
r = self.sendreq("GET", web_path) r = self.sendreq("GET", web_path)
if r.status != 200: if r.status != 200:
self.closeconn() self.closeconn()
@@ -182,9 +234,12 @@ class Gateway(object):
) )
) )
return self.parse_html(r) return self.parse_jls(r)
def download_file_range(self, path, ofs1, ofs2): def download_file_range(self, path, ofs1, ofs2):
if bad_good:
path = dewin(path)
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?raw" web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?raw"
hdr_range = "bytes={}-{}".format(ofs1, ofs2 - 1) hdr_range = "bytes={}-{}".format(ofs1, ofs2 - 1)
log("downloading {}".format(hdr_range)) log("downloading {}".format(hdr_range))
@@ -200,40 +255,27 @@ class Gateway(object):
return r.read() return r.read()
def parse_html(self, datasrc): def parse_jls(self, datasrc):
ret = [] rsp = b""
remainder = b""
ptn = re.compile(
r"^<tr><td>(-|DIR)</td><td><a [^>]+>([^<]+)</a></td><td>([^<]+)</td><td>([^<]+)</td></tr>$"
)
while True: while True:
buf = remainder + datasrc.read(4096) buf = datasrc.read(1024 * 32)
# print('[{}]'.format(buf.decode('utf-8')))
if not buf: if not buf:
break break
remainder = b"" rsp += buf
endpos = buf.rfind(b"\n")
if endpos >= 0:
remainder = buf[endpos + 1 :]
buf = buf[:endpos]
lines = buf.decode("utf-8").split("\n") rsp = json.loads(rsp.decode("utf-8"))
for line in lines: ret = []
m = ptn.match(line) for statfun, nodes in [
if not m: [self.stat_dir, rsp["dirs"]],
# print(line) [self.stat_file, rsp["files"]],
continue ]:
for n in nodes:
fname = unquote(n["href"].split("?")[0]).rstrip(b"/").decode("wtf-8")
if bad_good:
fname = enwin(fname)
ftype, fname, fsize, fdate = m.groups() ret.append([fname, statfun(n["ts"], n["sz"]), 0])
fname = html_dec(fname)
ts = datetime.strptime(fdate, "%Y-%m-%d %H:%M:%S").timestamp()
sz = int(fsize)
if ftype == "-":
ret.append([fname, self.stat_file(ts, sz), 0])
else:
ret.append([fname, self.stat_dir(ts, sz), 0])
return ret return ret
@@ -262,6 +304,7 @@ class CPPF(Fuse):
Fuse.__init__(self, *args, **kwargs) Fuse.__init__(self, *args, **kwargs)
self.url = None self.url = None
self.pw = None
self.dircache = [] self.dircache = []
self.dircache_mtx = threading.Lock() self.dircache_mtx = threading.Lock()
@@ -271,7 +314,7 @@ class CPPF(Fuse):
def init2(self): def init2(self):
# TODO figure out how python-fuse wanted this to go # TODO figure out how python-fuse wanted this to go
self.gw = Gateway(self.url) # .decode('utf-8')) self.gw = Gateway(self.url, self.pw) # .decode('utf-8'))
info("up") info("up")
def clean_dircache(self): def clean_dircache(self):
@@ -536,6 +579,8 @@ class CPPF(Fuse):
def getattr(self, path): def getattr(self, path):
log("getattr [{}]".format(path)) log("getattr [{}]".format(path))
if WINDOWS:
path = enwin(path) # windows occasionally decodes f0xx to xx
path = path.strip("/") path = path.strip("/")
try: try:
@@ -568,9 +613,25 @@ class CPPF(Fuse):
def main(): def main():
time.strptime("19970815", "%Y%m%d") # python#7980 time.strptime("19970815", "%Y%m%d") # python#7980
register_wtf8()
if WINDOWS:
os.system("rem")
for ch in '<>:"\\|?*':
# microsoft maps illegal characters to f0xx
# (e000 to f8ff is basic-plane private-use)
bad_good[ch] = chr(ord(ch) + 0xF000)
for n in range(0, 0x100):
# map surrogateescape to another private-use area
bad_good[chr(n + 0xDC00)] = chr(n + 0xF100)
for k, v in bad_good.items():
good_bad[v] = k
server = CPPF() server = CPPF()
server.parser.add_option(mountopt="url", metavar="BASE_URL", default=None) server.parser.add_option(mountopt="url", metavar="BASE_URL", default=None)
server.parser.add_option(mountopt="pw", metavar="PASSWORD", default=None)
server.parse(values=server, errex=1) server.parse(values=server, errex=1)
if not server.url or not str(server.url).startswith("http"): if not server.url or not str(server.url).startswith("http"):
print("\nerror:") print("\nerror:")
@@ -578,7 +639,7 @@ def main():
print(" need argument: mount-path") print(" need argument: mount-path")
print("example:") print("example:")
print( print(
" ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,url=http://192.168.1.69:3923 /mnt/nas" " ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,pw=wark,url=http://192.168.1.69:3923 /mnt/nas"
) )
sys.exit(1) sys.exit(1)

View File

@@ -6,9 +6,13 @@ some of these rely on libraries which are not MIT-compatible
* [audio-bpm.py](./audio-bpm.py) detects the BPM of music using the BeatRoot Vamp Plugin; imports GPL2 * [audio-bpm.py](./audio-bpm.py) detects the BPM of music using the BeatRoot Vamp Plugin; imports GPL2
* [audio-key.py](./audio-key.py) detects the melodic key of music using the Mixxx fork of keyfinder; imports GPL3 * [audio-key.py](./audio-key.py) detects the melodic key of music using the Mixxx fork of keyfinder; imports GPL3
* [media-hash.py](./media-hash.py) generates checksums for audio and video streams; uses FFmpeg (LGPL or GPL)
these do not have any problematic dependencies: these invoke standalone programs which are GPL or similar, so is legally fine for most purposes:
* [media-hash.py](./media-hash.py) generates checksums for audio and video streams; uses FFmpeg (LGPL or GPL)
* [image-noexif.py](./image-noexif.py) removes exif tags from images; uses exiftool (GPLv1 or artistic-license)
these do not have any problematic dependencies at all:
* [cksum.py](./cksum.py) computes various checksums * [cksum.py](./cksum.py) computes various checksums
* [exe.py](./exe.py) grabs metadata from .exe and .dll files (example for retrieving multiple tags with one parser) * [exe.py](./exe.py) grabs metadata from .exe and .dll files (example for retrieving multiple tags with one parser)

View File

@@ -19,18 +19,18 @@ dep: ffmpeg
def det(tf): def det(tf):
# fmt: off # fmt: off
sp.check_call([ sp.check_call([
"ffmpeg", b"ffmpeg",
"-nostdin", b"-nostdin",
"-hide_banner", b"-hide_banner",
"-v", "fatal", b"-v", b"fatal",
"-ss", "13", b"-ss", b"13",
"-y", "-i", fsenc(sys.argv[1]), b"-y", b"-i", fsenc(sys.argv[1]),
"-map", "0:a:0", b"-map", b"0:a:0",
"-ac", "1", b"-ac", b"1",
"-ar", "22050", b"-ar", b"22050",
"-t", "300", b"-t", b"300",
"-f", "f32le", b"-f", b"f32le",
tf fsenc(tf)
]) ])
# fmt: on # fmt: on

View File

@@ -23,15 +23,15 @@ dep: ffmpeg
def det(tf): def det(tf):
# fmt: off # fmt: off
sp.check_call([ sp.check_call([
"ffmpeg", b"ffmpeg",
"-nostdin", b"-nostdin",
"-hide_banner", b"-hide_banner",
"-v", "fatal", b"-v", b"fatal",
"-y", "-i", fsenc(sys.argv[1]), b"-y", b"-i", fsenc(sys.argv[1]),
"-map", "0:a:0", b"-map", b"0:a:0",
"-t", "300", b"-t", b"300",
"-sample_fmt", "s16", b"-sample_fmt", b"s16",
tf fsenc(tf)
]) ])
# fmt: on # fmt: on

93
bin/mtag/image-noexif.py Normal file
View File

@@ -0,0 +1,93 @@
#!/usr/bin/env python3
"""
remove exif tags from uploaded images
dependencies:
exiftool
about:
creates a "noexif" subfolder and puts exif-stripped copies of each image there,
the reason for the subfolder is to avoid issues with the up2k.db / deduplication:
if the original image is modified in-place, then copyparty will keep the original
hash in up2k.db for a while (until the next volume rescan), so if the image is
reuploaded after a rescan then the upload will be renamed and kept as a dupe
alternatively you could switch the logic around, making a copy of the original
image into a subfolder named "exif" and modify the original in-place, but then
up2k.db will be out of sync until the next rescan, so any additional uploads
of the same image will get symlinked (deduplicated) to the modified copy
instead of the original in "exif"
or maybe delete the original image after processing, that would kinda work too
example copyparty config to use this:
-v/mnt/nas/pics:pics:rwmd,ed:c,e2ts,mte=+noexif:c,mtp=noexif=ejpg,ejpeg,ad,bin/mtag/image-noexif.py
explained:
for realpath /mnt/nas/pics (served at /pics) with read-write-modify-delete for ed,
enable file analysis on upload (e2ts),
append "noexif" to the list of known tags (mtp),
and use mtp plugin "bin/mtag/image-noexif.py" to provide that tag,
do this on all uploads with the file extension "jpg" or "jpeg",
ad = parse file regardless if FFmpeg thinks it is audio or not
PS: this requires e2ts to be functional,
meaning you need to do at least one of these:
* apt install ffmpeg
* pip3 install mutagen
and your python must have sqlite3 support compiled in
"""
import os
import sys
import time
import filecmp
import subprocess as sp
try:
from copyparty.util import fsenc
except:
def fsenc(p):
return p.encode("utf-8")
def main():
cwd, fn = os.path.split(sys.argv[1])
if os.path.basename(cwd) == "noexif":
return
os.chdir(cwd)
f1 = fsenc(fn)
f2 = os.path.join(b"noexif", f1)
cmd = [
b"exiftool",
b"-exif:all=",
b"-iptc:all=",
b"-xmp:all=",
b"-P",
b"-o",
b"noexif/",
b"--",
f1,
]
sp.check_output(cmd)
if not os.path.exists(f2):
print("failed")
return
if filecmp.cmp(f1, f2, shallow=False):
print("clean")
else:
print("exif")
# lastmod = os.path.getmtime(f1)
# times = (int(time.time()), int(lastmod))
# os.utime(f2, times)
if __name__ == "__main__":
main()

View File

@@ -13,7 +13,7 @@ try:
except: except:
def fsenc(p): def fsenc(p):
return p return p.encode("utf-8")
""" """
@@ -24,13 +24,13 @@ dep: ffmpeg
def det(): def det():
# fmt: off # fmt: off
cmd = [ cmd = [
"ffmpeg", b"ffmpeg",
"-nostdin", b"-nostdin",
"-hide_banner", b"-hide_banner",
"-v", "fatal", b"-v", b"fatal",
"-i", fsenc(sys.argv[1]), b"-i", fsenc(sys.argv[1]),
"-f", "framemd5", b"-f", b"framemd5",
"-" b"-"
] ]
# fmt: on # fmt: on

View File

@@ -0,0 +1,21 @@
// ==UserScript==
// @name twitter-unmute
// @namespace http://ocv.me/
// @version 0.1
// @description memes
// @author ed <irc.rizon.net>
// @match https://twitter.com/*
// @icon https://www.google.com/s2/favicons?domain=twitter.com
// @grant GM_addStyle
// ==/UserScript==
function grunnur() {
setInterval(function () {
//document.querySelector('div[aria-label="Unmute"]').click();
document.querySelector('video').muted = false;
}, 200);
}
var scr = document.createElement('script');
scr.textContent = '(' + grunnur.toString() + ')();';
(document.head || document.getElementsByTagName('head')[0]).appendChild(scr);

139
bin/mtag/very-bad-idea.py Executable file
View File

@@ -0,0 +1,139 @@
#!/usr/bin/env python3
"""
use copyparty as a chromecast replacement:
* post a URL and it will open in the default browser
* upload a file and it will open in the default application
* the `key` command simulates keyboard input
* the `x` command executes other xdotool commands
* the `c` command executes arbitrary unix commands
the android app makes it a breeze to post pics and links:
https://github.com/9001/party-up/releases
(iOS devices have to rely on the web-UI)
goes without saying, but this is HELLA DANGEROUS,
GIVES RCE TO ANYONE WHO HAVE UPLOAD PERMISSIONS
example copyparty config to use this:
--urlform save,get -v.::w:c,e2d,e2t,mte=+a1:c,mtp=a1=ad,bin/mtag/very-bad-idea.py
recommended deps:
apt install xdotool libnotify-bin
https://github.com/9001/copyparty/blob/hovudstraum/contrib/plugins/meadup.js
and you probably want `twitter-unmute.user.js` from the res folder
-----------------------------------------------------------------------
-- startup script:
-----------------------------------------------------------------------
#!/bin/bash
set -e
# create qr code
ip=$(ip r | awk '/^default/{print$(NF-2)}'); echo http://$ip:3923/ | qrencode -o - -s 4 >/dev/shm/cpp-qr.png
/usr/bin/feh -x /dev/shm/cpp-qr.png &
# reposition and make topmost (with janky raspbian support)
( sleep 0.5
xdotool search --name cpp-qr.png windowactivate --sync windowmove 1780 0
wmctrl -r :ACTIVE: -b toggle,above || true
ps aux | grep -E 'sleep[ ]7\.27' ||
while true; do
w=$(xdotool getactivewindow)
xdotool search --name cpp-qr.png windowactivate windowraise windowfocus
xdotool windowactivate $w
xdotool windowfocus $w
sleep 7.27 || break
done &
xeyes # distraction window to prevent ^w from closing the qr-code
) &
# bail if copyparty is already running
ps aux | grep -E '[3] copy[p]arty' && exit 0
# dumb chrome wrapper to allow autoplay
cat >/usr/local/bin/chromium-browser <<'EOF'
#!/bin/bash
set -e
/usr/bin/chromium-browser --autoplay-policy=no-user-gesture-required "$@"
EOF
chmod 755 /usr/local/bin/chromium-browser
# start the server (note: replace `-v.::rw:` with `-v.::r:` to disallow retrieving uploaded stuff)
cd ~/Downloads; python3 copyparty-sfx.py --urlform save,get -v.::rw:c,e2d,e2t,mte=+a1:c,mtp=a1=ad,very-bad-idea.py
"""
import os
import sys
import time
import subprocess as sp
from urllib.parse import unquote_to_bytes as unquote
def main():
fp = os.path.abspath(sys.argv[1])
with open(fp, "rb") as f:
txt = f.read(4096)
if txt.startswith(b"msg="):
open_post(txt)
else:
open_url(fp)
def open_post(txt):
txt = unquote(txt.replace(b"+", b" ")).decode("utf-8")[4:]
try:
k, v = txt.split(" ", 1)
except:
open_url(txt)
if k == "key":
sp.call(["xdotool", "key"] + v.split(" "))
elif k == "x":
sp.call(["xdotool"] + v.split(" "))
elif k == "c":
env = os.environ.copy()
while " " in v:
v1, v2 = v.split(" ", 1)
if "=" not in v1:
break
ek, ev = v1.split("=", 1)
env[ek] = ev
v = v2
sp.call(v.split(" "), env=env)
else:
open_url(txt)
def open_url(txt):
ext = txt.rsplit(".")[-1].lower()
sp.call(["notify-send", "--", txt])
if ext not in ["jpg", "jpeg", "png", "gif", "webp"]:
# sp.call(["wmctrl", "-c", ":ACTIVE:"]) # closes the active window correctly
sp.call(["killall", "vlc"])
sp.call(["killall", "mpv"])
sp.call(["killall", "feh"])
time.sleep(0.5)
for _ in range(20):
sp.call(["xdotool", "key", "ctrl+w"]) # closes the open tab correctly
# else:
# sp.call(["xdotool", "getactivewindow", "windowminimize"]) # minimizes the focused windo
# close any error messages:
sp.call(["xdotool", "search", "--name", "Error", "windowclose"])
# sp.call(["xdotool", "key", "ctrl+alt+d"]) # doesnt work at all
# sp.call(["xdotool", "keydown", "--delay", "100", "ctrl+alt+d"])
# sp.call(["xdotool", "keyup", "ctrl+alt+d"])
sp.call(["xdg-open", txt])
main()

177
bin/partyjournal.py Executable file
View File

@@ -0,0 +1,177 @@
#!/usr/bin/env python3
"""
partyjournal.py: chronological history of uploads
2021-12-31, v0.1, ed <irc.rizon.net>, MIT-Licensed
https://github.com/9001/copyparty/blob/hovudstraum/bin/partyjournal.py
produces a chronological list of all uploads,
by collecting info from up2k databases and the filesystem
specify subnet `192.168.1.*` with argument `.=192.168.1.`,
affecting all successive mappings
usage:
./partyjournal.py > partyjournal.html .=192.168.1. cart=125 steen=114 steen=131 sleepy=121 fscarlet=144 ed=101 ed=123
"""
import sys
import base64
import sqlite3
import argparse
from datetime import datetime
from urllib.parse import quote_from_bytes as quote
from urllib.parse import unquote_to_bytes as unquote
FS_ENCODING = sys.getfilesystemencoding()
class APF(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter):
pass
##
## snibbed from copyparty
def s3dec(v):
if not v.startswith("//"):
return v
v = base64.urlsafe_b64decode(v.encode("ascii")[2:])
return v.decode(FS_ENCODING, "replace")
def quotep(txt):
btxt = txt.encode("utf-8", "replace")
quot1 = quote(btxt, safe=b"/")
quot1 = quot1.encode("ascii")
quot2 = quot1.replace(b" ", b"+")
return quot2.decode("utf-8", "replace")
def html_escape(s, quote=False, crlf=False):
"""html.escape but also newlines"""
s = s.replace("&", "&amp;").replace("<", "&lt;").replace(">", "&gt;")
if quote:
s = s.replace('"', "&quot;").replace("'", "&#x27;")
if crlf:
s = s.replace("\r", "&#13;").replace("\n", "&#10;")
return s
## end snibs
##
def main():
ap = argparse.ArgumentParser(formatter_class=APF)
ap.add_argument("who", nargs="*")
ar = ap.parse_args()
imap = {}
subnet = ""
for v in ar.who:
if "=" not in v:
raise Exception("bad who: " + v)
k, v = v.split("=")
if k == ".":
subnet = v
continue
imap["{}{}".format(subnet, v)] = k
print(repr(imap), file=sys.stderr)
print(
"""\
<!DOCTYPE html>
<html lang="en">
<head><meta charset="utf-8"><style>
html, body {
color: #ccc;
background: #222;
font-family: sans-serif;
}
a {
color: #fc5;
}
td, th {
padding: .2em .5em;
border: 1px solid #999;
border-width: 0 1px 1px 0;
white-space: nowrap;
}
td:nth-child(1),
td:nth-child(2),
td:nth-child(3) {
font-family: monospace, monospace;
text-align: right;
}
tr:first-child {
position: sticky;
top: -1px;
}
th {
background: #222;
text-align: left;
}
</style></head><body><table><tr>
<th>wark</th>
<th>time</th>
<th>size</th>
<th>who</th>
<th>link</th>
</tr>"""
)
db_path = ".hist/up2k.db"
conn = sqlite3.connect(db_path)
q = r"pragma table_info(up)"
inf = conn.execute(q).fetchall()
cols = [x[1] for x in inf]
print("<!-- " + str(cols) + " -->")
# ['w', 'mt', 'sz', 'rd', 'fn', 'ip', 'at']
q = r"select * from up order by case when at > 0 then at else mt end"
for w, mt, sz, rd, fn, ip, at in conn.execute(q):
link = "/".join([s3dec(x) for x in [rd, fn] if x])
if fn.startswith("put-") and sz < 4096:
try:
with open(link, "rb") as f:
txt = f.read().decode("utf-8", "replace")
except:
continue
if txt.startswith("msg="):
txt = txt.encode("utf-8", "replace")
txt = unquote(txt.replace(b"+", b" "))
link = txt.decode("utf-8")[4:]
sz = "{:,}".format(sz)
v = [
w[:16],
datetime.utcfromtimestamp(at if at > 0 else mt).strftime(
"%Y-%m-%d %H:%M:%S"
),
sz,
imap.get(ip, ip),
]
row = "<tr>\n "
row += "\n ".join(["<td>{}</th>".format(x) for x in v])
row += '\n <td><a href="{}">{}</a></td>'.format(link, html_escape(link))
row += "\n</tr>"
print(row)
print("</table></body></html>")
if __name__ == "__main__":
main()

View File

@@ -3,7 +3,7 @@ from __future__ import print_function, unicode_literals
""" """
up2k.py: upload to copyparty up2k.py: upload to copyparty
2021-10-29, v0.10, ed <irc.rizon.net>, MIT-Licensed 2021-11-28, v0.13, ed <irc.rizon.net>, MIT-Licensed
https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py
- dependencies: requests - dependencies: requests
@@ -224,29 +224,47 @@ class CTermsize(object):
ss = CTermsize() ss = CTermsize()
def statdir(top): def _scd(err, top):
"""non-recursive listing of directory contents, along with stat() info""" """non-recursive listing of directory contents, along with stat() info"""
if hasattr(os, "scandir"):
with os.scandir(top) as dh: with os.scandir(top) as dh:
for fh in dh: for fh in dh:
yield [os.path.join(top, fh.name), fh.stat()] abspath = os.path.join(top, fh.name)
else: try:
yield [abspath, fh.stat()]
except:
err.append(abspath)
def _lsd(err, top):
"""non-recursive listing of directory contents, along with stat() info"""
for name in os.listdir(top): for name in os.listdir(top):
abspath = os.path.join(top, name) abspath = os.path.join(top, name)
try:
yield [abspath, os.stat(abspath)] yield [abspath, os.stat(abspath)]
except:
err.append(abspath)
def walkdir(top): if hasattr(os, "scandir"):
statdir = _scd
else:
statdir = _lsd
def walkdir(err, top):
"""recursive statdir""" """recursive statdir"""
for ap, inf in sorted(statdir(top)): for ap, inf in sorted(statdir(err, top)):
if stat.S_ISDIR(inf.st_mode): if stat.S_ISDIR(inf.st_mode):
for x in walkdir(ap): try:
for x in walkdir(err, ap):
yield x yield x
except:
err.append(ap)
else: else:
yield ap, inf yield ap, inf
def walkdirs(tops): def walkdirs(err, tops):
"""recursive statdir for a list of tops, yields [top, relpath, stat]""" """recursive statdir for a list of tops, yields [top, relpath, stat]"""
sep = "{0}".format(os.sep).encode("ascii") sep = "{0}".format(os.sep).encode("ascii")
for top in tops: for top in tops:
@@ -256,7 +274,7 @@ def walkdirs(tops):
stop = os.path.dirname(top) stop = os.path.dirname(top)
if os.path.isdir(top): if os.path.isdir(top):
for ap, inf in walkdir(top): for ap, inf in walkdir(err, top):
yield stop, ap[len(stop) :].lstrip(sep), inf yield stop, ap[len(stop) :].lstrip(sep), inf
else: else:
d, n = top.rsplit(sep, 1) d, n = top.rsplit(sep, 1)
@@ -372,7 +390,7 @@ def handshake(req_ses, url, file, pw, search):
r = req_ses.post(url, headers=headers, json=req) r = req_ses.post(url, headers=headers, json=req)
break break
except: except:
eprint("handshake failed, retry...\n") eprint("handshake failed, retrying: {0}\n".format(file.name))
time.sleep(1) time.sleep(1)
try: try:
@@ -446,20 +464,32 @@ class Ctl(object):
nfiles = 0 nfiles = 0
nbytes = 0 nbytes = 0
for _, _, inf in walkdirs(ar.files): err = []
for _, _, inf in walkdirs(err, ar.files):
nfiles += 1 nfiles += 1
nbytes += inf.st_size nbytes += inf.st_size
if err:
eprint("\n# failed to access {0} paths:\n".format(len(err)))
for x in err:
eprint(x.decode("utf-8", "replace") + "\n")
eprint("^ failed to access those {0} paths ^\n\n".format(len(err)))
if not ar.ok:
eprint("aborting because --ok is not set\n")
return
eprint("found {0} files, {1}\n\n".format(nfiles, humansize(nbytes))) eprint("found {0} files, {1}\n\n".format(nfiles, humansize(nbytes)))
self.nfiles = nfiles self.nfiles = nfiles
self.nbytes = nbytes self.nbytes = nbytes
if ar.td: if ar.td:
requests.packages.urllib3.disable_warnings()
req_ses.verify = False req_ses.verify = False
if ar.te: if ar.te:
req_ses.verify = ar.te req_ses.verify = ar.te
self.filegen = walkdirs(ar.files) self.filegen = walkdirs([], ar.files)
if ar.safe: if ar.safe:
self.safe() self.safe()
else: else:
@@ -475,7 +505,7 @@ class Ctl(object):
print("{0} {1}\n hash...".format(self.nfiles - nf, upath)) print("{0} {1}\n hash...".format(self.nfiles - nf, upath))
get_hashlist(file, None) get_hashlist(file, None)
burl = self.ar.url[:8] + self.ar.url[8:].split("/")[0] + "/" burl = self.ar.url[:12] + self.ar.url[8:].split("/")[0] + "/"
while True: while True:
print(" hs...") print(" hs...")
hs = handshake(req_ses, self.ar.url, file, self.ar.a, search) hs = handshake(req_ses, self.ar.url, file, self.ar.a, search)
@@ -743,7 +773,7 @@ class Ctl(object):
try: try:
upload(req_ses, file, cid, self.ar.a) upload(req_ses, file, cid, self.ar.a)
except: except:
eprint("upload failed, retry...\n") eprint("upload failed, retrying: {0} #{1}\n".format(file.name, cid[:8]))
pass # handshake will fix it pass # handshake will fix it
with self.mutex: with self.mutex:
@@ -782,6 +812,7 @@ source file/folder selection uses rsync syntax, meaning that:
ap.add_argument("files", type=unicode, nargs="+", help="files and/or folders to process") ap.add_argument("files", type=unicode, nargs="+", help="files and/or folders to process")
ap.add_argument("-a", metavar="PASSWORD", help="password") ap.add_argument("-a", metavar="PASSWORD", help="password")
ap.add_argument("-s", action="store_true", help="file-search (disables upload)") ap.add_argument("-s", action="store_true", help="file-search (disables upload)")
ap.add_argument("--ok", action="store_true", help="continue even if some local files are inaccessible")
ap = app.add_argument_group("performance tweaks") ap = app.add_argument_group("performance tweaks")
ap.add_argument("-j", type=int, metavar="THREADS", default=4, help="parallel connections") ap.add_argument("-j", type=int, metavar="THREADS", default=4, help="parallel connections")
ap.add_argument("-nh", action="store_true", help="disable hashing while uploading") ap.add_argument("-nh", action="store_true", help="disable hashing while uploading")

View File

@@ -1,3 +1,6 @@
### [`plugins/`](plugins/)
* example extensions
### [`copyparty.bat`](copyparty.bat) ### [`copyparty.bat`](copyparty.bat)
* launches copyparty with no arguments (anon read+write within same folder) * launches copyparty with no arguments (anon read+write within same folder)
* intended for windows machines with no python.exe in PATH * intended for windows machines with no python.exe in PATH
@@ -30,6 +33,7 @@ however if your copyparty is behind a reverse-proxy, you may want to use [`share
# OS integration # OS integration
init-scripts to start copyparty as a service init-scripts to start copyparty as a service
* [`systemd/copyparty.service`](systemd/copyparty.service) runs the sfx normally * [`systemd/copyparty.service`](systemd/copyparty.service) runs the sfx normally
* [`rc/copyparty`](rc/copyparty) runs sfx normally on freebsd, create a `copyparty` user
* [`systemd/prisonparty.service`](systemd/prisonparty.service) runs the sfx in a chroot * [`systemd/prisonparty.service`](systemd/prisonparty.service) runs the sfx in a chroot
* [`openrc/copyparty`](openrc/copyparty) * [`openrc/copyparty`](openrc/copyparty)

View File

@@ -13,7 +13,7 @@
upstream cpp { upstream cpp {
server 127.0.0.1:3923; server 127.0.0.1:3923;
keepalive 120; keepalive 1;
} }
server { server {
listen 443 ssl; listen 443 ssl;

25
contrib/plugins/README.md Normal file
View File

@@ -0,0 +1,25 @@
# example resource files
can be provided to copyparty to tweak things
## example `.epilogue.html`
save one of these as `.epilogue.html` inside a folder to customize it:
* [`minimal-up2k.html`](minimal-up2k.html) will [simplify the upload ui](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png)
## example browser-css
point `--css-browser` to one of these by URL:
* [`browser.css`](browser.css) changes the background
* [`browser-icons.css`](browser-icons.css) adds filetype icons
## meadup.js
* turns copyparty into chromecast just more flexible (and probably way more buggy)
* usage: put the js somewhere in the webroot and `--js-browser /memes/meadup.js`

506
contrib/plugins/meadup.js Normal file
View File

@@ -0,0 +1,506 @@
// USAGE:
// place this file somewhere in the webroot and then
// python3 -m copyparty --js-browser /memes/meadup.js
//
// FEATURES:
// * adds an onscreen keyboard for operating a media center remotely,
// relies on https://github.com/9001/copyparty/blob/hovudstraum/bin/mtag/very-bad-idea.py
// * adds an interactive anime girl (if you can find the dependencies)
var hambagas = [
"https://www.youtube.com/watch?v=pFA3KGp4GuU"
];
// keybaord,
// onscreen keyboard by @steinuil
function initKeybaord(BASE_URL, HAMBAGA, consoleLog, consoleError) {
document.querySelector('.keybaord-container').innerHTML = `
<div class="keybaord-body">
<div class="keybaord-row keybaord-row-1">
<div class="keybaord-key" data-keybaord-key="Escape">
esc
</div>
<div class="keybaord-key" data-keybaord-key="F1">
F1
</div>
<div class="keybaord-key" data-keybaord-key="F2">
F2
</div>
<div class="keybaord-key" data-keybaord-key="F3">
F3
</div>
<div class="keybaord-key" data-keybaord-key="F4">
F4
</div>
<div class="keybaord-key" data-keybaord-key="F5">
F5
</div>
<div class="keybaord-key" data-keybaord-key="F6">
F6
</div>
<div class="keybaord-key" data-keybaord-key="F7">
F7
</div>
<div class="keybaord-key" data-keybaord-key="F8">
F8
</div>
<div class="keybaord-key" data-keybaord-key="F9">
F9
</div>
<div class="keybaord-key" data-keybaord-key="F10">
F10
</div>
<div class="keybaord-key" data-keybaord-key="F11">
F11
</div>
<div class="keybaord-key" data-keybaord-key="F12">
F12
</div>
<div class="keybaord-key" data-keybaord-key="Insert">
ins
</div>
<div class="keybaord-key" data-keybaord-key="Delete">
del
</div>
</div>
<div class="keybaord-row keybaord-row-2">
<div class="keybaord-key" data-keybaord-key="\`">
\`
</div>
<div class="keybaord-key" data-keybaord-key="1">
1
</div>
<div class="keybaord-key" data-keybaord-key="2">
2
</div>
<div class="keybaord-key" data-keybaord-key="3">
3
</div>
<div class="keybaord-key" data-keybaord-key="4">
4
</div>
<div class="keybaord-key" data-keybaord-key="5">
5
</div>
<div class="keybaord-key" data-keybaord-key="6">
6
</div>
<div class="keybaord-key" data-keybaord-key="7">
7
</div>
<div class="keybaord-key" data-keybaord-key="8">
8
</div>
<div class="keybaord-key" data-keybaord-key="9">
9
</div>
<div class="keybaord-key" data-keybaord-key="0">
0
</div>
<div class="keybaord-key" data-keybaord-key="-">
-
</div>
<div class="keybaord-key" data-keybaord-key="=">
=
</div>
<div class="keybaord-key keybaord-backspace" data-keybaord-key="BackSpace">
backspace
</div>
</div>
<div class="keybaord-row keybaord-row-3">
<div class="keybaord-key keybaord-tab" data-keybaord-key="Tab">
tab
</div>
<div class="keybaord-key" data-keybaord-key="q">
q
</div>
<div class="keybaord-key" data-keybaord-key="w">
w
</div>
<div class="keybaord-key" data-keybaord-key="e">
e
</div>
<div class="keybaord-key" data-keybaord-key="r">
r
</div>
<div class="keybaord-key" data-keybaord-key="t">
t
</div>
<div class="keybaord-key" data-keybaord-key="y">
y
</div>
<div class="keybaord-key" data-keybaord-key="u">
u
</div>
<div class="keybaord-key" data-keybaord-key="i">
i
</div>
<div class="keybaord-key" data-keybaord-key="o">
o
</div>
<div class="keybaord-key" data-keybaord-key="p">
p
</div>
<div class="keybaord-key" data-keybaord-key="[">
[
</div>
<div class="keybaord-key" data-keybaord-key="]">
]
</div>
<div class="keybaord-key keybaord-enter" data-keybaord-key="Return">
enter
</div>
</div>
<div class="keybaord-row keybaord-row-4">
<div class="keybaord-key keybaord-capslock" data-keybaord-key="HAMBAGA">
🍔
</div>
<div class="keybaord-key" data-keybaord-key="a">
a
</div>
<div class="keybaord-key" data-keybaord-key="s">
s
</div>
<div class="keybaord-key" data-keybaord-key="d">
d
</div>
<div class="keybaord-key" data-keybaord-key="f">
f
</div>
<div class="keybaord-key" data-keybaord-key="g">
g
</div>
<div class="keybaord-key" data-keybaord-key="h">
h
</div>
<div class="keybaord-key" data-keybaord-key="j">
j
</div>
<div class="keybaord-key" data-keybaord-key="k">
k
</div>
<div class="keybaord-key" data-keybaord-key="l">
l
</div>
<div class="keybaord-key" data-keybaord-key=";">
;
</div>
<div class="keybaord-key" data-keybaord-key="'">
'
</div>
<div class="keybaord-key keybaord-backslash" data-keybaord-key="\\">
\\
</div>
</div>
<div class="keybaord-row keybaord-row-5">
<div class="keybaord-key keybaord-lshift" data-keybaord-key="Shift_L">
shift
</div>
<div class="keybaord-key" data-keybaord-key="\\">
\\
</div>
<div class="keybaord-key" data-keybaord-key="z">
z
</div>
<div class="keybaord-key" data-keybaord-key="x">
x
</div>
<div class="keybaord-key" data-keybaord-key="c">
c
</div>
<div class="keybaord-key" data-keybaord-key="v">
v
</div>
<div class="keybaord-key" data-keybaord-key="b">
b
</div>
<div class="keybaord-key" data-keybaord-key="n">
n
</div>
<div class="keybaord-key" data-keybaord-key="m">
m
</div>
<div class="keybaord-key" data-keybaord-key=",">
,
</div>
<div class="keybaord-key" data-keybaord-key=".">
.
</div>
<div class="keybaord-key" data-keybaord-key="/">
/
</div>
<div class="keybaord-key keybaord-rshift" data-keybaord-key="Shift_R">
shift
</div>
</div>
<div class="keybaord-row keybaord-row-6">
<div class="keybaord-key keybaord-lctrl" data-keybaord-key="Control_L">
ctrl
</div>
<div class="keybaord-key keybaord-super" data-keybaord-key="Meta_L">
win
</div>
<div class="keybaord-key keybaord-alt" data-keybaord-key="Alt_L">
alt
</div>
<div class="keybaord-key keybaord-spacebar" data-keybaord-key="space">
space
</div>
<div class="keybaord-key keybaord-altgr" data-keybaord-key="Alt_R">
altgr
</div>
<div class="keybaord-key keybaord-what" data-keybaord-key="Menu">
menu
</div>
<div class="keybaord-key keybaord-rctrl" data-keybaord-key="Control_R">
ctrl
</div>
</div>
<div class="keybaord-row">
<div class="keybaord-key" data-keybaord-key="XF86AudioLowerVolume">
🔉
</div>
<div class="keybaord-key" data-keybaord-key="XF86AudioRaiseVolume">
🔊
</div>
<div class="keybaord-key" data-keybaord-key="Left">
⬅️
</div>
<div class="keybaord-key" data-keybaord-key="Down">
⬇️
</div>
<div class="keybaord-key" data-keybaord-key="Up">
⬆️
</div>
<div class="keybaord-key" data-keybaord-key="Right">
➡️
</div>
<div class="keybaord-key" data-keybaord-key="Page_Up">
PgUp
</div>
<div class="keybaord-key" data-keybaord-key="Page_Down">
PgDn
</div>
<div class="keybaord-key" data-keybaord-key="Home">
🏠
</div>
<div class="keybaord-key" data-keybaord-key="End">
End
</div>
</div>
<div>
`;
function arraySample(array) {
return array[Math.floor(Math.random() * array.length)];
}
function sendMessage(msg) {
return fetch(BASE_URL, {
method: "POST",
headers: {
"Content-Type": "application/x-www-form-urlencoded;charset=UTF-8",
},
body: "msg=" + encodeURIComponent(msg),
}).then(
(r) => r.text(), // so the response body shows up in network tab
(err) => consoleError(err)
);
}
const MODIFIER_ON_CLASS = "keybaord-modifier-on";
const KEY_DATASET = "data-keybaord-key";
const KEY_CLASS = "keybaord-key";
const modifiers = new Set()
function toggleModifier(button, key) {
button.classList.toggle(MODIFIER_ON_CLASS);
if (modifiers.has(key)) {
modifiers.delete(key);
} else {
modifiers.add(key);
}
}
function popModifiers() {
let modifierString = "";
modifiers.forEach((mod) => {
document.querySelector("[" + KEY_DATASET + "='" + mod + "']")
.classList.remove(MODIFIER_ON_CLASS);
modifierString += mod + "+";
});
modifiers.clear();
return modifierString;
}
Array.from(document.querySelectorAll("." + KEY_CLASS)).forEach((button) => {
const key = button.dataset.keybaordKey;
button.addEventListener("click", (ev) => {
switch (key) {
case "HAMBAGA":
sendMessage(arraySample(HAMBAGA));
break;
case "Shift_L":
case "Shift_R":
case "Control_L":
case "Control_R":
case "Meta_L":
case "Alt_L":
case "Alt_R":
toggleModifier(button, key);
break;
default: {
const keyWithModifiers = popModifiers() + key;
consoleLog(keyWithModifiers);
sendMessage("key " + keyWithModifiers)
.then(() => consoleLog(keyWithModifiers + " OK"));
}
}
});
});
}
// keybaord integration
(function () {
var o = mknod('div');
clmod(o, 'keybaord-container', 1);
ebi('op_msg').appendChild(o);
o = mknod('style');
o.innerHTML = `
.keybaord-body {
display: flex;
flex-flow: column nowrap;
margin: .6em 0;
}
.keybaord-row {
display: flex;
}
.keybaord-key {
border: 1px solid rgba(128,128,128,0.2);
width: 41px;
height: 40px;
display: flex;
justify-content: center;
align-items: center;
}
.keybaord-key:active {
background-color: lightgrey;
}
.keybaord-key.keybaord-modifier-on {
background-color: lightblue;
}
.keybaord-key.keybaord-backspace {
width: 82px;
}
.keybaord-key.keybaord-tab {
width: 55px;
}
.keybaord-key.keybaord-enter {
width: 69px;
}
.keybaord-key.keybaord-capslock {
width: 80px;
}
.keybaord-key.keybaord-backslash {
width: 88px;
}
.keybaord-key.keybaord-lshift {
width: 65px;
}
.keybaord-key.keybaord-rshift {
width: 103px;
}
.keybaord-key.keybaord-lctrl {
width: 55px;
}
.keybaord-key.keybaord-super {
width: 55px;
}
.keybaord-key.keybaord-alt {
width: 55px;
}
.keybaord-key.keybaord-altgr {
width: 55px;
}
.keybaord-key.keybaord-what {
width: 55px;
}
.keybaord-key.keybaord-rctrl {
width: 55px;
}
.keybaord-key.keybaord-spacebar {
width: 302px;
}
`;
document.head.appendChild(o);
initKeybaord('/', hambagas,
(msg) => { toast.inf(2, msg.toString()) },
(msg) => { toast.err(30, msg.toString()) });
})();
// live2d (dumb pointless meme)
// dependencies for this part are not tracked in git
// so delete this section if you wanna use this file
// (or supply your own l2d model and js)
(function () {
var o = mknod('link');
o.setAttribute('rel', 'stylesheet');
o.setAttribute('href', "/bad-memes/pio.css");
document.head.appendChild(o);
o = mknod('style');
o.innerHTML = '.pio-container{text-shadow:none;z-index:1}';
document.head.appendChild(o);
o = mknod('div');
clmod(o, 'pio-container', 1);
o.innerHTML = '<div class="pio-action"></div><canvas id="pio" width="280" height="500"></canvas>';
document.body.appendChild(o);
var remaining = 3;
for (var a of ['pio', 'l2d', 'fireworks']) {
import_js(`/bad-memes/${a}.js`, function () {
if (remaining --> 1)
return;
o = mknod('script');
o.innerHTML = 'var pio = new Paul_Pio({"selector":[],"mode":"fixed","hidden":false,"content":{"close":"ok bye"},"model":["/bad-memes/sagiri/model.json"]});';
document.body.appendChild(o);
});
}
})();

View File

@@ -9,7 +9,7 @@
#ops, #tree, #path, #wrap>h2:last-child, /* main tabs and navigators (tree/breadcrumbs) */ #ops, #tree, #path, #wrap>h2:last-child, /* main tabs and navigators (tree/breadcrumbs) */
#u2cleanup, #u2conf tr:first-child>td[rowspan]:not(#u2btn_cw), /* most of the config options */ #u2conf tr:first-child>td[rowspan]:not(#u2btn_cw), /* most of the config options */
#srch_dz, #srch_zd, /* the filesearch dropzone */ #srch_dz, #srch_zd, /* the filesearch dropzone */

31
contrib/rc/copyparty Normal file
View File

@@ -0,0 +1,31 @@
#!/bin/sh
#
# PROVIDE: copyparty
# REQUIRE: networking
# KEYWORD:
. /etc/rc.subr
name="copyparty"
rcvar="copyparty_enable"
copyparty_user="copyparty"
copyparty_args="-e2dsa -v /storage:/storage:r" # change as you see fit
copyparty_command="/usr/local/bin/python3.8 /usr/local/copyparty/copyparty-sfx.py ${copyparty_args}"
pidfile="/var/run/copyparty/${name}.pid"
command="/usr/sbin/daemon"
command_args="-P ${pidfile} -r -f ${copyparty_command}"
stop_postcmd="copyparty_shutdown"
copyparty_shutdown()
{
if [ -e "${pidfile}" ]; then
echo "Stopping supervising daemon."
kill -s TERM `cat ${pidfile}`
fi
}
load_rc_config $name
: ${copyparty_enable:=no}
run_rc_command "$1"

View File

@@ -12,7 +12,6 @@
# change '/mnt::rw' to another location or permission-set # change '/mnt::rw' to another location or permission-set
# remove '-p 80,443,3923' to only listen on port 3923 # remove '-p 80,443,3923' to only listen on port 3923
# add '-i 127.0.0.1' to only allow local connections # add '-i 127.0.0.1' to only allow local connections
# add '--use-fpool' if uploading into nfs locations
# #
# with `Type=notify`, copyparty will signal systemd when it is ready to # with `Type=notify`, copyparty will signal systemd when it is ready to
# accept connections; correctly delaying units depending on copyparty. # accept connections; correctly delaying units depending on copyparty.
@@ -20,11 +19,8 @@
# python disabling line-buffering, so messages are out-of-order: # python disabling line-buffering, so messages are out-of-order:
# https://user-images.githubusercontent.com/241032/126040249-cb535cc7-c599-4931-a796-a5d9af691bad.png # https://user-images.githubusercontent.com/241032/126040249-cb535cc7-c599-4931-a796-a5d9af691bad.png
# #
# enable line-buffering for realtime logging (slight performance cost): # if you remove -q to enable logging, you may also want to remove the
# modify ExecStart and prefix it with `/usr/bin/stdbuf -oL` like so: # following line to enable buffering (slightly better performance):
# ExecStart=/usr/bin/stdbuf -oL /usr/bin/python3 [...]
# but some systemd versions require this instead (higher performance cost):
# inside the [Service] block, add the following line:
# Environment=PYTHONUNBUFFERED=x # Environment=PYTHONUNBUFFERED=x
[Unit] [Unit]
@@ -33,8 +29,10 @@ Description=copyparty file server
[Service] [Service]
Type=notify Type=notify
SyslogIdentifier=copyparty SyslogIdentifier=copyparty
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -p 80,443,3923 -v /mnt::rw Environment=PYTHONUNBUFFERED=x
ExecReload=/bin/kill -s USR1 $MAINPID
ExecStartPre=/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf' ExecStartPre=/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -p 80,443,3923 -v /mnt::rw
[Install] [Install]
WantedBy=multi-user.target WantedBy=multi-user.target

View File

@@ -25,26 +25,34 @@ ANYWIN = WINDOWS or sys.platform in ["msys"]
MACOS = platform.system() == "Darwin" MACOS = platform.system() == "Darwin"
def get_unix_home(): def get_unixdir():
paths = [
(os.environ.get, "XDG_CONFIG_HOME"),
(os.path.expanduser, "~/.config"),
(os.environ.get, "TMPDIR"),
(os.environ.get, "TEMP"),
(os.environ.get, "TMP"),
(unicode, "/tmp"),
]
for chk in [os.listdir, os.mkdir]:
for pf, pa in paths:
try: try:
v = os.environ["XDG_CONFIG_HOME"] p = pf(pa)
if not v: # print(chk.__name__, p, pa)
raise Exception() if not p or p.startswith("~"):
ret = os.path.normpath(v) continue
os.listdir(ret)
return ret p = os.path.normpath(p)
chk(p)
p = os.path.join(p, "copyparty")
if not os.path.isdir(p):
os.mkdir(p)
return p
except: except:
pass pass
try: raise Exception("could not find a writable path for config")
v = os.path.expanduser("~/.config")
if v.startswith("~"):
raise Exception()
ret = os.path.normpath(v)
os.listdir(ret)
return ret
except:
return "/tmp"
class EnvParams(object): class EnvParams(object):
@@ -59,7 +67,7 @@ class EnvParams(object):
elif sys.platform == "darwin": elif sys.platform == "darwin":
self.cfg = os.path.expanduser("~/Library/Preferences/copyparty") self.cfg = os.path.expanduser("~/Library/Preferences/copyparty")
else: else:
self.cfg = get_unix_home() + "/copyparty" self.cfg = get_unixdir()
self.cfg = self.cfg.replace("\\", "/") self.cfg = self.cfg.replace("\\", "/")
try: try:

View File

@@ -23,7 +23,7 @@ from textwrap import dedent
from .__init__ import E, WINDOWS, ANYWIN, VT100, PY2, unicode from .__init__ import E, WINDOWS, ANYWIN, VT100, PY2, unicode
from .__version__ import S_VERSION, S_BUILD_DT, CODENAME from .__version__ import S_VERSION, S_BUILD_DT, CODENAME
from .svchub import SvcHub from .svchub import SvcHub
from .util import py_desc, align_tab, IMPLICATIONS, ansi_re from .util import py_desc, align_tab, IMPLICATIONS, ansi_re, min_ex
from .authsrv import re_vol from .authsrv import re_vol
HAVE_SSL = True HAVE_SSL = True
@@ -186,6 +186,32 @@ def configure_ssl_ciphers(al):
sys.exit(0) sys.exit(0)
def args_from_cfg(cfg_path):
ret = []
skip = False
with open(cfg_path, "rb") as f:
for ln in [x.decode("utf-8").strip() for x in f]:
if not ln:
skip = False
continue
if ln.startswith("#"):
continue
if not ln.startswith("-"):
continue
if skip:
continue
try:
ret.extend(ln.split(" ", 1))
except:
ret.append(ln)
return ret
def sighandler(sig=None, frame=None): def sighandler(sig=None, frame=None):
msg = [""] * 5 msg = [""] * 5
for th in threading.enumerate(): for th in threading.enumerate():
@@ -196,6 +222,54 @@ def sighandler(sig=None, frame=None):
print("\n".join(msg)) print("\n".join(msg))
def disable_quickedit():
import ctypes
import atexit
from ctypes import wintypes
def ecb(ok, fun, args):
if not ok:
err = ctypes.get_last_error()
if err:
raise ctypes.WinError(err)
return args
k32 = ctypes.WinDLL("kernel32", use_last_error=True)
if PY2:
wintypes.LPDWORD = ctypes.POINTER(wintypes.DWORD)
k32.GetStdHandle.errcheck = ecb
k32.GetConsoleMode.errcheck = ecb
k32.SetConsoleMode.errcheck = ecb
k32.GetConsoleMode.argtypes = (wintypes.HANDLE, wintypes.LPDWORD)
k32.SetConsoleMode.argtypes = (wintypes.HANDLE, wintypes.DWORD)
def cmode(out, mode=None):
h = k32.GetStdHandle(-11 if out else -10)
if mode:
return k32.SetConsoleMode(h, mode)
mode = wintypes.DWORD()
k32.GetConsoleMode(h, ctypes.byref(mode))
return mode.value
# disable quickedit
mode = orig_in = cmode(False)
quickedit = 0x40
extended = 0x80
mask = quickedit + extended
if mode & mask != extended:
atexit.register(cmode, False, orig_in)
cmode(False, mode & ~mask | extended)
# enable colors in case the os.system("rem") trick ever stops working
if VT100:
mode = orig_out = cmode(True)
if mode & 4 != 4:
atexit.register(cmode, True, orig_out)
cmode(True, mode | 4)
def run_argparse(argv, formatter): def run_argparse(argv, formatter):
ap = argparse.ArgumentParser( ap = argparse.ArgumentParser(
formatter_class=formatter, formatter_class=formatter,
@@ -276,6 +350,8 @@ def run_argparse(argv, formatter):
\033[0mdatabase, general: \033[0mdatabase, general:
\033[36me2d\033[35m sets -e2d (all -e2* args can be set using ce2* volflags) \033[36me2d\033[35m sets -e2d (all -e2* args can be set using ce2* volflags)
\033[36md2ts\033[35m disables metadata collection for existing files
\033[36md2ds\033[35m disables onboot indexing, overrides -e2ds*
\033[36md2t\033[35m disables metadata collection, overrides -e2t* \033[36md2t\033[35m disables metadata collection, overrides -e2t*
\033[36md2d\033[35m disables all database stuff, overrides -e2* \033[36md2d\033[35m disables all database stuff, overrides -e2*
\033[36mnohash=\\.iso$\033[35m skips hashing file contents if path matches *.iso \033[36mnohash=\\.iso$\033[35m skips hashing file contents if path matches *.iso
@@ -342,6 +418,7 @@ def run_argparse(argv, formatter):
ap2.add_argument("-emp", action="store_true", help="enable markdown plugins") ap2.add_argument("-emp", action="store_true", help="enable markdown plugins")
ap2.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate") ap2.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate")
ap2.add_argument("--urlform", metavar="MODE", type=u, default="print,get", help="how to handle url-forms; examples: [stash], [save,get]") ap2.add_argument("--urlform", metavar="MODE", type=u, default="print,get", help="how to handle url-forms; examples: [stash], [save,get]")
ap2.add_argument("--wintitle", metavar="TXT", type=u, default="cpp @ $pub", help="window title, for example '$ip-10.1.2.' or '$ip-'")
ap2 = ap.add_argument_group('upload options') ap2 = ap.add_argument_group('upload options')
ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads") ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads")
@@ -350,11 +427,15 @@ def run_argparse(argv, formatter):
ap2.add_argument("--no-fpool", action="store_true", help="disable file-handle pooling -- instead, repeatedly close and reopen files during upload") ap2.add_argument("--no-fpool", action="store_true", help="disable file-handle pooling -- instead, repeatedly close and reopen files during upload")
ap2.add_argument("--use-fpool", action="store_true", help="force file-handle pooling, even if copyparty thinks you're better off without") ap2.add_argument("--use-fpool", action="store_true", help="force file-handle pooling, even if copyparty thinks you're better off without")
ap2.add_argument("--no-symlink", action="store_true", help="duplicate file contents instead") ap2.add_argument("--no-symlink", action="store_true", help="duplicate file contents instead")
ap2.add_argument("--reg-cap", metavar="N", type=int, default=9000, help="max number of uploads to keep in memory when running without -e2d")
ap2 = ap.add_argument_group('network options') ap2 = ap.add_argument_group('network options')
ap2.add_argument("-i", metavar="IP", type=u, default="0.0.0.0", help="ip to bind (comma-sep.)") ap2.add_argument("-i", metavar="IP", type=u, default="0.0.0.0", help="ip to bind (comma-sep.)")
ap2.add_argument("-p", metavar="PORT", type=u, default="3923", help="ports to bind (comma/range)") ap2.add_argument("-p", metavar="PORT", type=u, default="3923", help="ports to bind (comma/range)")
ap2.add_argument("--rproxy", metavar="DEPTH", type=int, default=1, help="which ip to keep; 0 = tcp, 1 = origin (first x-fwd), 2 = cloudflare, 3 = nginx, -1 = closest proxy") ap2.add_argument("--rproxy", metavar="DEPTH", type=int, default=1, help="which ip to keep; 0 = tcp, 1 = origin (first x-fwd), 2 = cloudflare, 3 = nginx, -1 = closest proxy")
ap2.add_argument("--s-wr-sz", metavar="B", type=int, default=256*1024, help="socket write size in bytes")
ap2.add_argument("--s-wr-slp", metavar="SEC", type=float, default=0, help="socket write delay in seconds")
ap2.add_argument("--rsp-slp", metavar="SEC", type=float, default=0, help="response delay in seconds")
ap2 = ap.add_argument_group('SSL/TLS options') ap2 = ap.add_argument_group('SSL/TLS options')
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls") ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls")
@@ -366,6 +447,7 @@ def run_argparse(argv, formatter):
ap2 = ap.add_argument_group('opt-outs') ap2 = ap.add_argument_group('opt-outs')
ap2.add_argument("-nw", action="store_true", help="disable writes (benchmark)") ap2.add_argument("-nw", action="store_true", help="disable writes (benchmark)")
ap2.add_argument("--keep-qem", action="store_true", help="do not disable quick-edit-mode on windows")
ap2.add_argument("--no-del", action="store_true", help="disable delete operations") ap2.add_argument("--no-del", action="store_true", help="disable delete operations")
ap2.add_argument("--no-mv", action="store_true", help="disable move/rename operations") ap2.add_argument("--no-mv", action="store_true", help="disable move/rename operations")
ap2.add_argument("-nih", action="store_true", help="no info hostname") ap2.add_argument("-nih", action="store_true", help="no info hostname")
@@ -397,23 +479,31 @@ def run_argparse(argv, formatter):
ap2.add_argument("--lf-url", metavar="RE", type=u, default=r"^/\.cpr/|\?th=[wj]$", help="dont log URLs matching") ap2.add_argument("--lf-url", metavar="RE", type=u, default=r"^/\.cpr/|\?th=[wj]$", help="dont log URLs matching")
ap2 = ap.add_argument_group('admin panel options') ap2 = ap.add_argument_group('admin panel options')
ap2.add_argument("--no-reload", action="store_true", help="disable ?reload=cfg (reload users/volumes/volflags from config file)")
ap2.add_argument("--no-rescan", action="store_true", help="disable ?scan (volume reindexing)") ap2.add_argument("--no-rescan", action="store_true", help="disable ?scan (volume reindexing)")
ap2.add_argument("--no-stack", action="store_true", help="disable ?stack (list all stacks)") ap2.add_argument("--no-stack", action="store_true", help="disable ?stack (list all stacks)")
ap2 = ap.add_argument_group('thumbnail options') ap2 = ap.add_argument_group('thumbnail options')
ap2.add_argument("--no-thumb", action="store_true", help="disable all thumbnails") ap2.add_argument("--no-thumb", action="store_true", help="disable all thumbnails")
ap2.add_argument("--no-athumb", action="store_true", help="disable audio thumbnails (spectrograms)")
ap2.add_argument("--no-vthumb", action="store_true", help="disable video thumbnails") ap2.add_argument("--no-vthumb", action="store_true", help="disable video thumbnails")
ap2.add_argument("--th-size", metavar="WxH", default="320x256", help="thumbnail res") ap2.add_argument("--th-size", metavar="WxH", default="320x256", help="thumbnail res")
ap2.add_argument("--th-mt", metavar="CORES", type=int, default=cores, help="num cpu cores to use for generating thumbnails") ap2.add_argument("--th-mt", metavar="CORES", type=int, default=cores, help="num cpu cores to use for generating thumbnails")
ap2.add_argument("--th-convt", metavar="SEC", type=int, default=60, help="conversion timeout in seconds")
ap2.add_argument("--th-no-crop", action="store_true", help="dynamic height; show full image") ap2.add_argument("--th-no-crop", action="store_true", help="dynamic height; show full image")
ap2.add_argument("--th-no-jpg", action="store_true", help="disable jpg output") ap2.add_argument("--th-no-jpg", action="store_true", help="disable jpg output")
ap2.add_argument("--th-no-webp", action="store_true", help="disable webp output") ap2.add_argument("--th-no-webp", action="store_true", help="disable webp output")
ap2.add_argument("--th-ff-jpg", action="store_true", help="force jpg for video thumbs") ap2.add_argument("--th-ff-jpg", action="store_true", help="force jpg for video thumbs")
ap2.add_argument("--th-ff-swr", action="store_true", help="use swresample instead of soxr for audio thumbs")
ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown") ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown")
ap2.add_argument("--th-clean", metavar="SEC", type=int, default=43200, help="cleanup interval; 0=disabled") ap2.add_argument("--th-clean", metavar="SEC", type=int, default=43200, help="cleanup interval; 0=disabled")
ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age") ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age")
ap2.add_argument("--th-covers", metavar="N,N", type=u, default="folder.png,folder.jpg,cover.png,cover.jpg", help="folder thumbnails to stat for") ap2.add_argument("--th-covers", metavar="N,N", type=u, default="folder.png,folder.jpg,cover.png,cover.jpg", help="folder thumbnails to stat for")
ap2 = ap.add_argument_group('transcoding options')
ap2.add_argument("--no-acode", action="store_true", help="disable audio transcoding")
ap2.add_argument("--ac-maxage", metavar="SEC", type=int, default=86400, help="delete transcode output after SEC seconds")
ap2 = ap.add_argument_group('general db options') ap2 = ap.add_argument_group('general db options')
ap2.add_argument("-e2d", action="store_true", help="enable up2k database") ap2.add_argument("-e2d", action="store_true", help="enable up2k database")
ap2.add_argument("-e2ds", action="store_true", help="enable up2k db-scanner, sets -e2d") ap2.add_argument("-e2ds", action="store_true", help="enable up2k db-scanner, sets -e2d")
@@ -441,6 +531,8 @@ def run_argparse(argv, formatter):
ap2 = ap.add_argument_group('ui options') ap2 = ap.add_argument_group('ui options')
ap2.add_argument("--js-browser", metavar="L", type=u, help="URL to additional JS to include") ap2.add_argument("--js-browser", metavar="L", type=u, help="URL to additional JS to include")
ap2.add_argument("--css-browser", metavar="L", type=u, help="URL to additional CSS to include") ap2.add_argument("--css-browser", metavar="L", type=u, help="URL to additional CSS to include")
ap2.add_argument("--textfiles", metavar="CSV", type=u, default="txt,nfo,diz,cue,readme", help="file extensions to present as plaintext")
ap2.add_argument("--doctitle", metavar="TXT", type=u, default="copyparty", help="title / service-name to show in html documents")
ap2 = ap.add_argument_group('debug options') ap2 = ap.add_argument_group('debug options')
ap2.add_argument("--no-sendfile", action="store_true", help="disable sendfile") ap2.add_argument("--no-sendfile", action="store_true", help="disable sendfile")
@@ -483,7 +575,12 @@ def main(argv=None):
if HAVE_SSL: if HAVE_SSL:
ensure_cert() ensure_cert()
deprecated = [["-e2s", "-e2ds"]] for k, v in zip(argv[1:], argv[2:]):
if k == "-c":
supp = args_from_cfg(v)
argv.extend(supp)
deprecated = []
for dk, nk in deprecated: for dk, nk in deprecated:
try: try:
idx = argv.index(dk) idx = argv.index(dk)
@@ -506,6 +603,15 @@ def main(argv=None):
except AssertionError: except AssertionError:
al = run_argparse(argv, Dodge11874) al = run_argparse(argv, Dodge11874)
if WINDOWS and not al.keep_qem:
try:
disable_quickedit()
except:
print("\nfailed to disable quick-edit-mode:\n" + min_ex() + "\n")
if not VT100:
al.wintitle = ""
nstrs = [] nstrs = []
anymod = False anymod = False
for ostr in al.v or []: for ostr in al.v or []:

View File

@@ -1,8 +1,8 @@
# coding: utf-8 # coding: utf-8
VERSION = (1, 0, 14) VERSION = (1, 1, 11)
CODENAME = "sufficient" CODENAME = "opus"
BUILD_DT = (2021, 10, 30) BUILD_DT = (2022, 1, 14)
S_VERSION = ".".join(map(str, VERSION)) S_VERSION = ".".join(map(str, VERSION))
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT) S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)

View File

@@ -546,6 +546,7 @@ class AuthSrv(object):
def _parse_config_file(self, fd, acct, daxs, mflags, mount): def _parse_config_file(self, fd, acct, daxs, mflags, mount):
# type: (any, str, dict[str, AXS], any, str) -> None # type: (any, str, dict[str, AXS], any, str) -> None
skip = False
vol_src = None vol_src = None
vol_dst = None vol_dst = None
self.line_ctr = 0 self.line_ctr = 0
@@ -555,6 +556,11 @@ class AuthSrv(object):
vol_src = None vol_src = None
vol_dst = None vol_dst = None
if skip:
if not ln:
skip = False
continue
if not ln or ln.startswith("#"): if not ln or ln.startswith("#"):
continue continue
@@ -562,6 +568,8 @@ class AuthSrv(object):
if ln.startswith("u "): if ln.startswith("u "):
u, p = ln[2:].split(":", 1) u, p = ln[2:].split(":", 1)
acct[u] = p acct[u] = p
elif ln.startswith("-"):
skip = True # argv
else: else:
vol_src = ln vol_src = ln
continue continue
@@ -613,7 +621,7 @@ class AuthSrv(object):
if uname == "": if uname == "":
uname = "*" uname = "*"
for un in uname.split(","): for un in uname.replace(",", " ").strip().split():
if "r" in lvl: if "r" in lvl:
axs.uread[un] = 1 axs.uread[un] = 1
@@ -918,6 +926,14 @@ class AuthSrv(object):
vol.flags["d2t"] = True vol.flags["d2t"] = True
vol.flags = {k: v for k, v in vol.flags.items() if not k.startswith(rm)} vol.flags = {k: v for k, v in vol.flags.items() if not k.startswith(rm)}
# d2ds drops all onboot scans for a volume
for grp, rm in [["d2ds", "e2ds"], ["d2ts", "e2ts"]]:
if not vol.flags.get(grp, False):
continue
vol.flags["d2ts"] = True
vol.flags = {k: v for k, v in vol.flags.items() if not k.startswith(rm)}
# mt* needs e2t so drop those too # mt* needs e2t so drop those too
for grp, rm in [["e2t", "mt"]]: for grp, rm in [["e2t", "mt"]]:
if vol.flags.get(grp, False): if vol.flags.get(grp, False):

View File

@@ -2,7 +2,7 @@
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
import os import os
from ..util import fsenc, fsdec from ..util import fsenc, fsdec, SYMTIME
from . import path from . import path
@@ -55,5 +55,8 @@ def unlink(p):
return os.unlink(fsenc(p)) return os.unlink(fsenc(p))
def utime(p, times=None): def utime(p, times=None, follow_symlinks=True):
if SYMTIME:
return os.utime(fsenc(p), times, follow_symlinks=follow_symlinks)
else:
return os.utime(fsenc(p), times) return os.utime(fsenc(p), times)

View File

@@ -2,7 +2,7 @@
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
import os import os
from ..util import fsenc, fsdec from ..util import fsenc, fsdec, SYMTIME
def abspath(p): def abspath(p):
@@ -13,7 +13,10 @@ def exists(p):
return os.path.exists(fsenc(p)) return os.path.exists(fsenc(p))
def getmtime(p): def getmtime(p, follow_symlinks=True):
if not follow_symlinks and SYMTIME:
return os.lstat(fsenc(p)).st_mtime
else:
return os.path.getmtime(fsenc(p)) return os.path.getmtime(fsenc(p))

View File

@@ -62,6 +62,11 @@ class BrokerMp(object):
procs.pop() procs.pop()
def reload(self):
self.log("broker", "reloading")
for _, proc in enumerate(self.procs):
proc.q_pend.put([0, "reload", []])
def collector(self, proc): def collector(self, proc):
"""receive message from hub in other process""" """receive message from hub in other process"""
while True: while True:

View File

@@ -29,7 +29,7 @@ class MpWorker(object):
# we inherited signal_handler from parent, # we inherited signal_handler from parent,
# replace it with something harmless # replace it with something harmless
if not FAKE_MP: if not FAKE_MP:
for sig in [signal.SIGINT, signal.SIGTERM]: for sig in [signal.SIGINT, signal.SIGTERM, signal.SIGUSR1]:
signal.signal(sig, self.signal_handler) signal.signal(sig, self.signal_handler)
# starting to look like a good idea # starting to look like a good idea
@@ -69,6 +69,11 @@ class MpWorker(object):
sys.exit(0) sys.exit(0)
return return
elif dest == "reload":
self.logw("mpw.asrv reloading")
self.asrv.reload()
self.logw("mpw.asrv reloaded")
elif dest == "listen": elif dest == "listen":
self.httpsrv.listen(args[0], args[1]) self.httpsrv.listen(args[0], args[1])

View File

@@ -21,10 +21,13 @@ class BrokerThr(object):
# instantiate all services here (TODO: inheritance?) # instantiate all services here (TODO: inheritance?)
self.httpsrv = HttpSrv(self, None) self.httpsrv = HttpSrv(self, None)
self.reload = self.noop
def shutdown(self): def shutdown(self):
# self.log("broker", "shutting down") # self.log("broker", "shutting down")
self.httpsrv.shutdown() self.httpsrv.shutdown()
def noop(self):
pass pass
def put(self, want_retval, dest, *args): def put(self, want_retval, dest, *args):

View File

@@ -60,6 +60,7 @@ class HttpCli(object):
self.bufsz = 1024 * 32 self.bufsz = 1024 * 32
self.hint = None self.hint = None
self.trailing_slash = True self.trailing_slash = True
self.out_headerlist = []
self.out_headers = { self.out_headers = {
"Access-Control-Allow-Origin": "*", "Access-Control-Allow-Origin": "*",
"Cache-Control": "no-store; max-age=0", "Cache-Control": "no-store; max-age=0",
@@ -91,6 +92,7 @@ class HttpCli(object):
tpl = self.conn.hsrv.j2[name] tpl = self.conn.hsrv.j2[name]
if ka: if ka:
ka["ts"] = self.conn.hsrv.cachebuster() ka["ts"] = self.conn.hsrv.cachebuster()
ka["svcname"] = self.args.doctitle
return tpl.render(**ka) return tpl.render(**ka)
return tpl return tpl
@@ -126,7 +128,8 @@ class HttpCli(object):
self.loud_reply(unicode(ex), status=ex.code, volsan=True) self.loud_reply(unicode(ex), status=ex.code, volsan=True)
return self.keepalive return self.keepalive
# time.sleep(0.4) if self.args.rsp_slp:
time.sleep(self.args.rsp_slp)
# normalize incoming headers to lowercase; # normalize incoming headers to lowercase;
# outgoing headers however are Correct-Case # outgoing headers however are Correct-Case
@@ -225,10 +228,10 @@ class HttpCli(object):
self.gvol = self.asrv.vfs.aget[self.uname] self.gvol = self.asrv.vfs.aget[self.uname]
if pwd and "pw" in self.ouparam and pwd != cookies.get("cppwd"): if pwd and "pw" in self.ouparam and pwd != cookies.get("cppwd"):
self.out_headers["Set-Cookie"] = self.get_pwd_cookie(pwd)[0] self.out_headerlist.append(("Set-Cookie", self.get_pwd_cookie(pwd)[0]))
ua = self.headers.get("user-agent", "") self.ua = self.headers.get("user-agent", "")
self.is_rclone = ua.startswith("rclone/") self.is_rclone = self.ua.startswith("rclone/")
if self.is_rclone: if self.is_rclone:
uparam["raw"] = False uparam["raw"] = False
uparam["dots"] = False uparam["dots"] = False
@@ -283,12 +286,19 @@ class HttpCli(object):
n = "604800" if cache == "i" else cache or "69" n = "604800" if cache == "i" else cache or "69"
self.out_headers["Cache-Control"] = "max-age=" + n self.out_headers["Cache-Control"] = "max-age=" + n
def k304(self):
k304 = self.cookies.get("k304")
return k304 == "y" or ("; Trident/" in self.ua and not k304)
def send_headers(self, length, status=200, mime=None, headers=None): def send_headers(self, length, status=200, mime=None, headers=None):
response = ["{} {} {}".format(self.http_ver, status, HTTPCODE[status])] response = ["{} {} {}".format(self.http_ver, status, HTTPCODE[status])]
if length is not None: if length is not None:
response.append("Content-Length: " + unicode(length)) response.append("Content-Length: " + unicode(length))
if status == 304 and self.k304():
self.keepalive = False
# close if unknown length, otherwise take client's preference # close if unknown length, otherwise take client's preference
response.append("Connection: " + ("Keep-Alive" if self.keepalive else "Close")) response.append("Connection: " + ("Keep-Alive" if self.keepalive else "Close"))
@@ -298,11 +308,11 @@ class HttpCli(object):
# default to utf8 html if no content-type is set # default to utf8 html if no content-type is set
if not mime: if not mime:
mime = self.out_headers.get("Content-Type", "text/html; charset=UTF-8") mime = self.out_headers.get("Content-Type", "text/html; charset=utf-8")
self.out_headers["Content-Type"] = mime self.out_headers["Content-Type"] = mime
for k, v in self.out_headers.items(): for k, v in list(self.out_headers.items()) + self.out_headerlist:
response.append("{}: {}".format(k, v)) response.append("{}: {}".format(k, v))
try: try:
@@ -419,12 +429,24 @@ class HttpCli(object):
return self.scanvol() return self.scanvol()
if not self.vpath: if not self.vpath:
if "reload" in self.uparam:
return self.handle_reload()
if "stack" in self.uparam: if "stack" in self.uparam:
return self.tx_stack() return self.tx_stack()
if "ups" in self.uparam: if "ups" in self.uparam:
return self.tx_ups() return self.tx_ups()
if "k304" in self.uparam:
return self.set_k304()
if "am_js" in self.uparam:
return self.set_am_js()
if "reset" in self.uparam:
return self.set_cfg_reset()
if "h" in self.uparam: if "h" in self.uparam:
return self.tx_mounts() return self.tx_mounts()
@@ -502,7 +524,7 @@ class HttpCli(object):
return self.handle_stash() return self.handle_stash()
if "save" in opt: if "save" in opt:
post_sz, _, _, path = self.dump_to_file() post_sz, _, _, _, path, _ = self.dump_to_file()
self.log("urlform: {} bytes, {}".format(post_sz, path)) self.log("urlform: {} bytes, {}".format(post_sz, path))
elif "print" in opt: elif "print" in opt:
reader, _ = self.get_body_reader() reader, _ = self.get_body_reader()
@@ -529,11 +551,11 @@ class HttpCli(object):
raise Pebkac(405, "don't know how to handle POST({})".format(ctype)) raise Pebkac(405, "don't know how to handle POST({})".format(ctype))
def get_body_reader(self): def get_body_reader(self):
chunked = "chunked" in self.headers.get("transfer-encoding", "").lower() if "chunked" in self.headers.get("transfer-encoding", "").lower():
return read_socket_chunked(self.sr), -1
remains = int(self.headers.get("content-length", -1)) remains = int(self.headers.get("content-length", -1))
if chunked: if remains == -1:
return read_socket_chunked(self.sr), remains
elif remains == -1:
self.keepalive = False self.keepalive = False
return read_socket_unbounded(self.sr), remains return read_socket_unbounded(self.sr), remains
else: else:
@@ -587,8 +609,8 @@ class HttpCli(object):
alg = alg or "gz" # def.pk alg = alg or "gz" # def.pk
try: try:
# config-forced opts # config-forced opts
alg, lv = pk.split(",") alg, nlv = pk.split(",")
lv[alg] = int(lv) lv[alg] = int(nlv)
except: except:
pass pass
@@ -618,7 +640,7 @@ class HttpCli(object):
with ren_open(fn, *open_a, **params) as f: with ren_open(fn, *open_a, **params) as f:
f, fn = f["orz"] f, fn = f["orz"]
path = os.path.join(fdir, fn) path = os.path.join(fdir, fn)
post_sz, _, sha_b64 = hashcopy(reader, f) post_sz, sha_hex, sha_b64 = hashcopy(reader, f)
if lim: if lim:
lim.nup(self.ip) lim.nup(self.ip)
@@ -629,26 +651,46 @@ class HttpCli(object):
bos.unlink(path) bos.unlink(path)
raise raise
if not self.args.nw: if self.args.nw:
vfs, vrem = vfs.get_dbv(rem) return post_sz, sha_hex, sha_b64, remains, path, ""
vfs, rem = vfs.get_dbv(rem)
self.conn.hsrv.broker.put( self.conn.hsrv.broker.put(
False, False,
"up2k.hash_file", "up2k.hash_file",
vfs.realpath, vfs.realpath,
vfs.flags, vfs.flags,
vrem, rem,
fn, fn,
self.ip, self.ip,
time.time(), time.time(),
) )
return post_sz, sha_b64, remains, path if self.can_read and "fk" in vfs.flags:
vsuf = "?k=" + gen_filekey(
self.args.fk_salt,
path,
post_sz,
0 if ANYWIN else bos.stat(path).st_ino,
)[: vfs.flags["fk"]]
vpath = "/".join([x for x in [vfs.vpath, rem, fn] if x])
vpath = quotep(vpath)
url = "{}://{}/{}".format(
"https" if self.is_https else "http",
self.headers.get("host") or "{}:{}".format(*list(self.s.getsockname())),
vpath + vsuf,
)
return post_sz, sha_hex, sha_b64, remains, path, url
def handle_stash(self): def handle_stash(self):
post_sz, sha_b64, remains, path = self.dump_to_file() post_sz, sha_hex, sha_b64, remains, path, url = self.dump_to_file()
spd = self._spd(post_sz) spd = self._spd(post_sz)
self.log("{} wrote {}/{} bytes to {}".format(spd, post_sz, remains, path)) self.log("{} wrote {}/{} bytes to {}".format(spd, post_sz, remains, path))
self.reply("{}\n{}\n".format(post_sz, sha_b64).encode("utf-8")) m = "{}\n{}\n{}\n{}\n".format(post_sz, sha_b64, sha_hex[:56], url)
self.reply(m.encode("utf-8"))
return True return True
def _spd(self, nbytes, add=True): def _spd(self, nbytes, add=True):
@@ -780,6 +822,10 @@ class HttpCli(object):
return True return True
def handle_search(self, body): def handle_search(self, body):
idx = self.conn.get_u2idx()
if not hasattr(idx, "p_end"):
raise Pebkac(500, "sqlite3 is not available on the server; cannot search")
vols = [] vols = []
seen = {} seen = {}
for vtop in self.rvol: for vtop in self.rvol:
@@ -791,7 +837,6 @@ class HttpCli(object):
seen[vfs] = True seen[vfs] = True
vols.append([vfs.vpath, vfs.realpath, vfs.flags]) vols.append([vfs.vpath, vfs.realpath, vfs.flags])
idx = self.conn.get_u2idx()
t0 = time.time() t0 = time.time()
if idx.p_end: if idx.p_end:
penalty = 0.7 penalty = 0.7
@@ -851,6 +896,7 @@ class HttpCli(object):
response = x.get() response = x.get()
chunksize, cstart, path, lastmod = response chunksize, cstart, path, lastmod = response
try:
if self.args.nw: if self.args.nw:
path = os.devnull path = os.devnull
@@ -877,12 +923,8 @@ class HttpCli(object):
post_sz, _, sha_b64 = hashcopy(reader, f) post_sz, _, sha_b64 = hashcopy(reader, f)
if sha_b64 != chash: if sha_b64 != chash:
raise Pebkac( m = "your chunk got corrupted somehow (received {} bytes); expected vs received hash:\n{}\n{}"
400, raise Pebkac(400, m.format(post_sz, chash, sha_b64))
"your chunk got corrupted somehow (received {} bytes); expected vs received hash:\n{}\n{}".format(
post_sz, chash, sha_b64
),
)
if len(cstart) > 1 and path != os.devnull: if len(cstart) > 1 and path != os.devnull:
self.log( self.log(
@@ -908,6 +950,9 @@ class HttpCli(object):
else: else:
with self.mutex: with self.mutex:
self.u2fh.put(path, f) self.u2fh.put(path, f)
finally:
x = self.conn.hsrv.broker.put(True, "up2k.release_chunk", ptop, wark, chash)
x.get() # block client until released
x = self.conn.hsrv.broker.put(True, "up2k.confirm_chunk", ptop, wark, chash) x = self.conn.hsrv.broker.put(True, "up2k.confirm_chunk", ptop, wark, chash)
x = x.get() x = x.get()
@@ -954,15 +999,13 @@ class HttpCli(object):
def get_pwd_cookie(self, pwd): def get_pwd_cookie(self, pwd):
if pwd in self.asrv.iacct: if pwd in self.asrv.iacct:
msg = "login ok" msg = "login ok"
dt = datetime.utcfromtimestamp(time.time() + 60 * 60 * 24 * 365) dur = 60 * 60 * 24 * 365
exp = dt.strftime("%a, %d %b %Y %H:%M:%S GMT")
else: else:
msg = "naw dude" msg = "naw dude"
pwd = "x" # nosec pwd = "x" # nosec
exp = "Fri, 15 Aug 1997 01:00:00 GMT" dur = None
ck = "cppwd={}; Path=/; Expires={}; SameSite=Lax".format(pwd, exp) return [gencookie("cppwd", pwd, dur), msg]
return [ck, msg]
def handle_mkdir(self): def handle_mkdir(self):
new_dir = self.parser.require("name", 512) new_dir = self.parser.require("name", 512)
@@ -1070,7 +1113,7 @@ class HttpCli(object):
f, fname = f["orz"] f, fname = f["orz"]
abspath = os.path.join(fdir, fname) abspath = os.path.join(fdir, fname)
self.log("writing to {}".format(abspath)) self.log("writing to {}".format(abspath))
sz, sha512_hex, _ = hashcopy(p_data, f) sz, sha_hex, sha_b64 = hashcopy(p_data, f)
if sz == 0: if sz == 0:
raise Pebkac(400, "empty files in post") raise Pebkac(400, "empty files in post")
@@ -1083,7 +1126,7 @@ class HttpCli(object):
bos.unlink(abspath) bos.unlink(abspath)
raise raise
files.append([sz, sha512_hex, p_file, fname, abspath]) files.append([sz, sha_hex, sha_b64, p_file, fname, abspath])
dbv, vrem = vfs.get_dbv(rem) dbv, vrem = vfs.get_dbv(rem)
self.conn.hsrv.broker.put( self.conn.hsrv.broker.put(
False, False,
@@ -1135,7 +1178,7 @@ class HttpCli(object):
jmsg["error"] = errmsg jmsg["error"] = errmsg
errmsg = "ERROR: " + errmsg errmsg = "ERROR: " + errmsg
for sz, sha512, ofn, lfn, ap in files: for sz, sha_hex, sha_b64, ofn, lfn, ap in files:
vsuf = "" vsuf = ""
if self.can_read and "fk" in vfs.flags: if self.can_read and "fk" in vfs.flags:
vsuf = "?k=" + gen_filekey( vsuf = "?k=" + gen_filekey(
@@ -1146,22 +1189,30 @@ class HttpCli(object):
)[: vfs.flags["fk"]] )[: vfs.flags["fk"]]
vpath = "{}/{}".format(upload_vpath, lfn).strip("/") vpath = "{}/{}".format(upload_vpath, lfn).strip("/")
msg += 'sha512: {} // {} bytes // <a href="/{}">{}</a> {}\n'.format( rel_url = quotep(vpath) + vsuf
sha512[:56], sz, quotep(vpath) + vsuf, html_escape(ofn, crlf=True), vsuf msg += 'sha512: {} // {} // {} bytes // <a href="/{}">{}</a> {}\n'.format(
sha_hex[:56],
sha_b64,
sz,
rel_url,
html_escape(ofn, crlf=True),
vsuf,
) )
# truncated SHA-512 prevents length extension attacks; # truncated SHA-512 prevents length extension attacks;
# using SHA-512/224, optionally SHA-512/256 = :64 # using SHA-512/224, optionally SHA-512/256 = :64
jpart = { jpart = {
"url": "{}://{}/{}".format( "url": "{}://{}/{}".format(
"https" if self.is_https else "http", "https" if self.is_https else "http",
self.headers.get("host", "copyparty"), self.headers.get("host")
vpath + vsuf, or "{}:{}".format(*list(self.s.getsockname())),
rel_url,
), ),
"sha512": sha512[:56], "sha512": sha_hex[:56],
"sha_b64": sha_b64,
"sz": sz, "sz": sz,
"fn": lfn, "fn": lfn,
"fn_orig": ofn, "fn_orig": ofn,
"path": vpath + vsuf, "path": rel_url,
} }
jmsg["files"].append(jpart) jmsg["files"].append(jpart)
@@ -1337,6 +1388,9 @@ class HttpCli(object):
try: try:
fs_path = req_path + ext fs_path = req_path + ext
st = bos.stat(fs_path) st = bos.stat(fs_path)
if stat.S_ISDIR(st.st_mode):
continue
file_ts = max(file_ts, st.st_mtime) file_ts = max(file_ts, st.st_mtime)
editions[ext or "plain"] = [fs_path, st.st_size] editions[ext or "plain"] = [fs_path, st.st_size]
except: except:
@@ -1375,8 +1429,7 @@ class HttpCli(object):
if "gzip" not in supported_editions: if "gzip" not in supported_editions:
decompress = True decompress = True
else: else:
ua = self.headers.get("user-agent", "") if re.match(r"MSIE [4-6]\.", self.ua) and " SV1" not in self.ua:
if re.match(r"MSIE [4-6]\.", ua) and " SV1" not in ua:
decompress = True decompress = True
if not decompress: if not decompress:
@@ -1461,12 +1514,15 @@ class HttpCli(object):
else: else:
self.permit_caching() self.permit_caching()
if "txt" in self.uparam:
mime = "text/plain; charset={}".format(self.uparam["txt"] or "utf-8")
elif "mime" in self.uparam:
mime = self.uparam.get("mime")
else:
mime = guess_mime(req_path)
self.out_headers["Accept-Ranges"] = "bytes" self.out_headers["Accept-Ranges"] = "bytes"
self.send_headers( self.send_headers(length=upper - lower, status=status, mime=mime)
length=upper - lower,
status=status,
mime=guess_mime(req_path),
)
logmsg += unicode(status) + logtail logmsg += unicode(status) + logtail
@@ -1478,13 +1534,14 @@ class HttpCli(object):
ret = True ret = True
with open_func(*open_args) as f: with open_func(*open_args) as f:
if use_sendfile: sendfun = sendfile_kern if use_sendfile else sendfile_py
remains = sendfile_kern(lower, upper, f, self.s) remains = sendfun(
else: self.log, lower, upper, f, self.s, self.args.s_wr_sz, self.args.s_wr_slp
remains = sendfile_py(lower, upper, f, self.s) )
if remains > 0: if remains > 0:
logmsg += " \033[31m" + unicode(upper - remains) + "\033[0m" logmsg += " \033[31m" + unicode(upper - remains) + "\033[0m"
self.keepalive = False
spd = self._spd((upper - lower) - remains) spd = self._spd((upper - lower) - remains)
if self.do_log: if self.do_log:
@@ -1686,10 +1743,28 @@ class HttpCli(object):
tagq=vs["tagq"], tagq=vs["tagq"],
mtpq=vs["mtpq"], mtpq=vs["mtpq"],
url_suf=suf, url_suf=suf,
k304=self.k304(),
) )
self.reply(html.encode("utf-8")) self.reply(html.encode("utf-8"))
return True return True
def set_k304(self):
ck = gencookie("k304", self.uparam["k304"], 60 * 60 * 24 * 365)
self.out_headerlist.append(("Set-Cookie", ck))
self.redirect("", "?h#cc")
def set_am_js(self):
v = "n" if self.uparam["am_js"] == "n" else "y"
ck = gencookie("js", v, 60 * 60 * 24 * 365)
self.out_headerlist.append(("Set-Cookie", ck))
self.reply(b"promoted\n")
def set_cfg_reset(self):
for k in ("k304", "js", "cppwd"):
self.out_headerlist.append(("Set-Cookie", gencookie(k, "x", None)))
self.redirect("", "?h#cc")
def tx_404(self, is_403=False): def tx_404(self, is_403=False):
if self.args.vague_403: if self.args.vague_403:
m = '<h1>404 not found &nbsp;┐( ´ -`)┌</h1><p>or maybe you don\'t have access -- try logging in or <a href="/?h">go home</a></p>' m = '<h1>404 not found &nbsp;┐( ´ -`)┌</h1><p>or maybe you don\'t have access -- try logging in or <a href="/?h">go home</a></p>'
@@ -1711,7 +1786,7 @@ class HttpCli(object):
vn, _ = self.asrv.vfs.get(self.vpath, self.uname, True, True) vn, _ = self.asrv.vfs.get(self.vpath, self.uname, True, True)
args = [self.asrv.vfs.all_vols, [vn.vpath]] args = [self.asrv.vfs.all_vols, [vn.vpath], False]
x = self.conn.hsrv.broker.put(True, "up2k.rescan", *args) x = self.conn.hsrv.broker.put(True, "up2k.rescan", *args)
x = x.get() x = x.get()
@@ -1721,6 +1796,20 @@ class HttpCli(object):
raise Pebkac(500, x) raise Pebkac(500, x)
def handle_reload(self):
act = self.uparam.get("reload")
if act != "cfg":
raise Pebkac(400, "only config files ('cfg') can be reloaded rn")
if not [x for x in self.wvol if x in self.rvol]:
raise Pebkac(403, "not allowed for user " + self.uname)
if self.args.no_reload:
raise Pebkac(403, "the reload feature is disabled in server config")
x = self.conn.hsrv.broker.put(True, "reload")
return self.redirect("", "?h", x.get(), "return to", False)
def tx_stack(self): def tx_stack(self):
if not [x for x in self.wvol if x in self.rvol]: if not [x for x in self.wvol if x in self.rvol]:
raise Pebkac(403, "not allowed for user " + self.uname) raise Pebkac(403, "not allowed for user " + self.uname)
@@ -1792,13 +1881,16 @@ class HttpCli(object):
if not self.args.unpost: if not self.args.unpost:
raise Pebkac(400, "the unpost feature is disabled in server config") raise Pebkac(400, "the unpost feature is disabled in server config")
idx = self.conn.get_u2idx()
if not hasattr(idx, "p_end"):
raise Pebkac(500, "sqlite3 is not available on the server; cannot unpost")
filt = self.uparam.get("filter") filt = self.uparam.get("filter")
lm = "ups [{}]".format(filt) lm = "ups [{}]".format(filt)
self.log(lm) self.log(lm)
ret = [] ret = []
t0 = time.time() t0 = time.time()
idx = self.conn.get_u2idx()
lim = time.time() - self.args.unpost lim = time.time() - self.args.unpost
for vol in self.asrv.vfs.all_vols.values(): for vol in self.asrv.vfs.all_vols.values():
cur = idx.get_cur(vol.realpath) cur = idx.get_cur(vol.realpath)
@@ -1862,7 +1954,7 @@ class HttpCli(object):
arg = self.uparam["ls"] arg = self.uparam["ls"]
if arg in ["v", "t", "txt"]: if arg in ["v", "t", "txt"]:
try: try:
biggest = max(ls["files"], key=itemgetter("sz"))["sz"] biggest = max(ls["files"] + ls["dirs"], key=itemgetter("sz"))["sz"]
except: except:
biggest = 0 biggest = 0
@@ -1892,6 +1984,13 @@ class HttpCli(object):
fmt = "{{}} {{:{},}} {{}}" fmt = "{{}} {{:{},}} {{}}"
nfmt = "{:,}" nfmt = "{:,}"
for x in dirs:
n = x["name"] + "/"
if arg == "v":
n = "\033[94m" + n
x["name"] = n
fmt = fmt.format(len(nfmt.format(biggest))) fmt = fmt.format(len(nfmt.format(biggest)))
ret = [ ret = [
"# {}: {}".format(x, ls[x]) "# {}: {}".format(x, ls[x])
@@ -1904,14 +2003,14 @@ class HttpCli(object):
for x in y for x in y
] ]
ret = "\n".join(ret) ret = "\n".join(ret)
mime = "text/plain; encoding=utf-8" mime = "text/plain; charset=utf-8"
else: else:
[x.pop(k) for k in ["name", "dt"] for y in [dirs, files] for x in y] [x.pop(k) for k in ["name", "dt"] for y in [dirs, files] for x in y]
ret = json.dumps(ls) ret = json.dumps(ls)
mime = "application/json" mime = "application/json"
self.reply(ret.encode("utf-8", "replace"), mime=mime) self.reply(ret.encode("utf-8", "replace") + b"\n", mime=mime)
return True return True
def tx_browser(self): def tx_browser(self):
@@ -2030,6 +2129,7 @@ class HttpCli(object):
url_suf = self.urlq({}, []) url_suf = self.urlq({}, [])
is_ls = "ls" in self.uparam is_ls = "ls" in self.uparam
is_js = self.cookies.get("js") == "y"
tpl = "browser" tpl = "browser"
if "b" in self.uparam: if "b" in self.uparam:
@@ -2058,6 +2158,7 @@ class HttpCli(object):
"taglist": [], "taglist": [],
"srvinf": srv_info, "srvinf": srv_info,
"acct": self.uname, "acct": self.uname,
"idx": ("e2d" in vn.flags),
"perms": perms, "perms": perms,
"logues": logues, "logues": logues,
"readme": readme, "readme": readme,
@@ -2066,12 +2167,14 @@ class HttpCli(object):
"vdir": quotep(self.vpath), "vdir": quotep(self.vpath),
"vpnodes": vpnodes, "vpnodes": vpnodes,
"files": [], "files": [],
"ls0": None,
"acct": self.uname, "acct": self.uname,
"perms": json.dumps(perms), "perms": json.dumps(perms),
"taglist": [], "taglist": [],
"def_hcols": [], "def_hcols": [],
"have_up2k_idx": ("e2d" in vn.flags), "have_up2k_idx": ("e2d" in vn.flags),
"have_tags_idx": ("e2t" in vn.flags), "have_tags_idx": ("e2t" in vn.flags),
"have_acode": (not self.args.no_acode),
"have_mv": (not self.args.no_mv), "have_mv": (not self.args.no_mv),
"have_del": (not self.args.no_del), "have_del": (not self.args.no_del),
"have_zip": (not self.args.no_zip), "have_zip": (not self.args.no_zip),
@@ -2145,7 +2248,7 @@ class HttpCli(object):
for fn in vfs_ls: for fn in vfs_ls:
base = "" base = ""
href = fn href = fn
if not is_ls and not self.trailing_slash and vpath: if not is_ls and not is_js and not self.trailing_slash and vpath:
base = "/" + vpath + "/" base = "/" + vpath + "/"
href = base + fn href = base + fn
@@ -2267,14 +2370,36 @@ class HttpCli(object):
ls_ret["taglist"] = taglist ls_ret["taglist"] = taglist
return self.tx_ls(ls_ret) return self.tx_ls(ls_ret)
doc = self.uparam.get("doc") if self.can_read else None
if doc:
doc = unquotep(doc.replace("+", " "))
j2a["docname"] = doc
if next((x for x in files if x["name"] == doc), None):
with open(os.path.join(abspath, doc), "rb") as f:
doc = f.read().decode("utf-8", "replace")
else:
self.log("doc 404: [{}]".format(doc), c=6)
doc = "( textfile not found )"
j2a["doc"] = doc
if not self.conn.hsrv.prism:
j2a["no_prism"] = True
for d in dirs: for d in dirs:
d["name"] += "/" d["name"] += "/"
dirs.sort(key=itemgetter("name")) dirs.sort(key=itemgetter("name"))
if is_js:
j2a["ls0"] = {"dirs": dirs, "files": files, "taglist": taglist}
j2a["files"] = []
else:
j2a["files"] = dirs + files j2a["files"] = dirs + files
j2a["logues"] = logues j2a["logues"] = logues
j2a["taglist"] = taglist j2a["taglist"] = taglist
j2a["txt_ext"] = self.args.textfiles.replace(",", " ")
if "mth" in vn.flags: if "mth" in vn.flags:
j2a["def_hcols"] = vn.flags["mth"].split(",") j2a["def_hcols"] = vn.flags["mth"].split(",")

View File

@@ -39,7 +39,7 @@ class HttpConn(object):
self.u2fh = hsrv.u2fh self.u2fh = hsrv.u2fh
enth = HAVE_PIL and not self.args.no_thumb enth = HAVE_PIL and not self.args.no_thumb
self.thumbcli = ThumbCli(hsrv.broker) if enth else None self.thumbcli = ThumbCli(hsrv) if enth else None
self.ico = Ico(self.args) self.ico = Ico(self.args)
self.t0 = time.time() self.t0 = time.time()

View File

@@ -76,6 +76,7 @@ class HttpSrv(object):
x: env.get_template(x + ".html") x: env.get_template(x + ".html")
for x in ["splash", "browser", "browser2", "msg", "md", "mde"] for x in ["splash", "browser", "browser2", "msg", "md", "mde"]
} }
self.prism = os.path.exists(os.path.join(E.mod, "web", "deps", "prism.js.gz"))
cert_path = os.path.join(E.cfg, "cert.pem") cert_path = os.path.join(E.cfg, "cert.pem")
if bos.path.exists(cert_path): if bos.path.exists(cert_path):

View File

@@ -8,7 +8,7 @@ import shutil
import subprocess as sp import subprocess as sp
from .__init__ import PY2, WINDOWS, unicode from .__init__ import PY2, WINDOWS, unicode
from .util import fsenc, fsdec, uncyg, REKOBO_LKEY from .util import fsenc, fsdec, uncyg, runcmd, REKOBO_LKEY
from .bos import bos from .bos import bos
@@ -73,7 +73,7 @@ class MParser(object):
raise Exception() raise Exception()
def ffprobe(abspath): def ffprobe(abspath, timeout=10):
cmd = [ cmd = [
b"ffprobe", b"ffprobe",
b"-hide_banner", b"-hide_banner",
@@ -82,10 +82,8 @@ def ffprobe(abspath):
b"--", b"--",
fsenc(abspath), fsenc(abspath),
] ]
p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE) rc = runcmd(cmd, timeout=timeout)
r = p.communicate() return parse_ffprobe(rc[1])
txt = r[0].decode("utf-8", "replace")
return parse_ffprobe(txt)
def parse_ffprobe(txt): def parse_ffprobe(txt):
@@ -420,7 +418,8 @@ class MTag(object):
try: try:
md = mutagen.File(fsenc(abspath), easy=True) md = mutagen.File(fsenc(abspath), easy=True)
x = md.info.length if not md.info.length and not md.info.codec:
raise Exception()
except Exception as ex: except Exception as ex:
return self.get_ffprobe(abspath) if self.can_ffprobe else {} return self.get_ffprobe(abspath) if self.can_ffprobe else {}

View File

@@ -18,6 +18,7 @@ from .authsrv import AuthSrv
from .tcpsrv import TcpSrv from .tcpsrv import TcpSrv
from .up2k import Up2k from .up2k import Up2k
from .th_srv import ThumbSrv, HAVE_PIL, HAVE_WEBP from .th_srv import ThumbSrv, HAVE_PIL, HAVE_WEBP
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE
class SvcHub(object): class SvcHub(object):
@@ -36,7 +37,9 @@ class SvcHub(object):
self.argv = argv self.argv = argv
self.logf = None self.logf = None
self.stop_req = False self.stop_req = False
self.reload_req = False
self.stopping = False self.stopping = False
self.reloading = False
self.stop_cond = threading.Condition() self.stop_cond = threading.Condition()
self.retcode = 0 self.retcode = 0
self.httpsrv_up = 0 self.httpsrv_up = 0
@@ -54,8 +57,10 @@ class SvcHub(object):
if args.log_thrs: if args.log_thrs:
start_log_thrs(self.log, args.log_thrs, 0) start_log_thrs(self.log, args.log_thrs, 0)
if not ANYWIN and not args.use_fpool: if not args.use_fpool and args.j != 1:
args.no_fpool = True args.no_fpool = True
m = "multithreading enabled with -j {}, so disabling fpool -- this can reduce upload performance on some filesystems"
self.log("root", m.format(args.j))
if not args.no_fpool and args.j != 1: if not args.no_fpool and args.j != 1:
m = "WARNING: --use-fpool combined with multithreading is untested and can probably cause undefined behavior" m = "WARNING: --use-fpool combined with multithreading is untested and can probably cause undefined behavior"
@@ -88,6 +93,18 @@ class SvcHub(object):
"thumb", msg.format(" " * 37, os.path.basename(sys.executable)), c=3 "thumb", msg.format(" " * 37, os.path.basename(sys.executable)), c=3
) )
if not args.no_acode and args.no_thumb:
msg = "setting --no-acode because --no-thumb (sorry)"
self.log("thumb", msg, c=6)
args.no_acode = True
if not args.no_acode and (not HAVE_FFMPEG or not HAVE_FFPROBE):
msg = "setting --no-acode because either FFmpeg or FFprobe is not available"
self.log("thumb", msg, c=6)
args.no_acode = True
args.th_poke = min(args.th_poke, args.th_maxage, args.ac_maxage)
# decide which worker impl to use # decide which worker impl to use
if self.check_mp_enable(): if self.check_mp_enable():
from .broker_mp import BrokerMp as Broker from .broker_mp import BrokerMp as Broker
@@ -182,7 +199,11 @@ class SvcHub(object):
thr.daemon = True thr.daemon = True
thr.start() thr.start()
for sig in [signal.SIGINT, signal.SIGTERM]: sigs = [signal.SIGINT, signal.SIGTERM]
if not ANYWIN:
sigs.append(signal.SIGUSR1)
for sig in sigs:
signal.signal(sig, self.signal_handler) signal.signal(sig, self.signal_handler)
# macos hangs after shutdown on sigterm with while-sleep, # macos hangs after shutdown on sigterm with while-sleep,
@@ -206,18 +227,45 @@ class SvcHub(object):
else: else:
self.stop_thr() self.stop_thr()
def reload(self):
if self.reloading:
return "cannot reload; already in progress"
self.reloading = True
t = threading.Thread(target=self._reload)
t.daemon = True
t.start()
return "reload initiated"
def _reload(self):
self.log("root", "reload scheduled")
with self.up2k.mutex:
self.asrv.reload()
self.up2k.reload()
self.broker.reload()
self.reloading = False
def stop_thr(self): def stop_thr(self):
while not self.stop_req: while not self.stop_req:
with self.stop_cond: with self.stop_cond:
self.stop_cond.wait(9001) self.stop_cond.wait(9001)
if self.reload_req:
self.reload_req = False
self.reload()
self.shutdown() self.shutdown()
def signal_handler(self, sig, frame): def signal_handler(self, sig, frame):
if self.stopping: if self.stopping:
return return
if sig == signal.SIGUSR1:
self.reload_req = True
else:
self.stop_req = True self.stop_req = True
with self.stop_cond: with self.stop_cond:
self.stop_cond.notify_all() self.stop_cond.notify_all()
@@ -254,6 +302,10 @@ class SvcHub(object):
print("nailed it", end="") print("nailed it", end="")
ret = self.retcode ret = self.retcode
finally: finally:
if self.args.wintitle:
print("\033]0;\033\\", file=sys.stderr, end="")
sys.stderr.flush()
print("\033[0m") print("\033[0m")
if self.logf: if self.logf:
self.logf.close() self.logf.close()
@@ -349,7 +401,6 @@ class SvcHub(object):
def check_mp_enable(self): def check_mp_enable(self):
if self.args.j == 1: if self.args.j == 1:
self.log("svchub", "multiprocessing disabled by argument -j 1")
return False return False
if mp.cpu_count() <= 1: if mp.cpu_count() <= 1:

View File

@@ -2,9 +2,10 @@
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
import re import re
import sys
import socket import socket
from .__init__ import MACOS, ANYWIN from .__init__ import MACOS, ANYWIN, unicode
from .util import chkcmd from .util import chkcmd
@@ -54,6 +55,8 @@ class TcpSrv(object):
eps[x] = "external" eps[x] = "external"
msgs = [] msgs = []
title_tab = {}
title_vars = [x[1:] for x in self.args.wintitle.split(" ") if x.startswith("$")]
m = "available @ http://{}:{}/ (\033[33m{}\033[0m)" m = "available @ http://{}:{}/ (\033[33m{}\033[0m)"
for ip, desc in sorted(eps.items(), key=lambda x: x[1]): for ip, desc in sorted(eps.items(), key=lambda x: x[1]):
for port in sorted(self.args.p): for port in sorted(self.args.p):
@@ -62,11 +65,39 @@ class TcpSrv(object):
msgs.append(m.format(ip, port, desc)) msgs.append(m.format(ip, port, desc))
if not self.args.wintitle:
continue
if port in [80, 443]:
ep = ip
else:
ep = "{}:{}".format(ip, port)
hits = []
if "pub" in title_vars and "external" in unicode(desc):
hits.append(("pub", ep))
if "pub" in title_vars or "all" in title_vars:
hits.append(("all", ep))
for var in title_vars:
if var.startswith("ip-") and ep.startswith(var[3:]):
hits.append((var, ep))
for tk, tv in hits:
try:
title_tab[tk][tv] = 1
except:
title_tab[tk] = {tv: 1}
if msgs: if msgs:
msgs[-1] += "\n" msgs[-1] += "\n"
for m in msgs: for m in msgs:
self.log("tcpsrv", m) self.log("tcpsrv", m)
if self.args.wintitle:
self._set_wintitle(title_tab)
def _listen(self, ip, port): def _listen(self, ip, port):
srv = socket.socket(socket.AF_INET, socket.SOCK_STREAM) srv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
@@ -232,3 +263,26 @@ class TcpSrv(object):
eps[default_route] = desc eps[default_route] = desc
return eps return eps
def _set_wintitle(self, vars):
vars["all"] = vars.get("all", {"Local-Only": 1})
vars["pub"] = vars.get("pub", vars["all"])
vars2 = {}
for k, eps in vars.items():
vars2[k] = {
ep: 1
for ep in eps.keys()
if ":" not in ep or ep.split(":")[0] not in eps
}
title = ""
vars = vars2
for p in self.args.wintitle.split(" "):
if p.startswith("$"):
p = " and ".join(sorted(vars.get(p[1:], {"(None)": 1}).keys()))
title += "{} ".format(p)
print("\033]0;{}\033\\".format(title), file=sys.stderr, end="")
sys.stderr.flush()

View File

@@ -4,28 +4,44 @@ from __future__ import print_function, unicode_literals
import os import os
from .util import Cooldown from .util import Cooldown
from .th_srv import thumb_path, THUMBABLE, FMT_FF from .th_srv import thumb_path, THUMBABLE, FMT_FFV, FMT_FFA
from .bos import bos from .bos import bos
class ThumbCli(object): class ThumbCli(object):
def __init__(self, broker): def __init__(self, hsrv):
self.broker = broker self.broker = hsrv.broker
self.args = broker.args self.log_func = hsrv.log
self.asrv = broker.asrv self.args = hsrv.args
self.asrv = hsrv.asrv
# cache on both sides for less broker spam # cache on both sides for less broker spam
self.cooldown = Cooldown(self.args.th_poke) self.cooldown = Cooldown(self.args.th_poke)
def log(self, msg, c=0):
self.log_func("thumbcli", msg, c)
def get(self, ptop, rem, mtime, fmt): def get(self, ptop, rem, mtime, fmt):
ext = rem.rsplit(".")[-1].lower() ext = rem.rsplit(".")[-1].lower()
if ext not in THUMBABLE: if ext not in THUMBABLE:
return None return None
is_vid = ext in FMT_FF is_vid = ext in FMT_FFV
if is_vid and self.args.no_vthumb: if is_vid and self.args.no_vthumb:
return None return None
want_opus = fmt in ("opus", "caf")
is_au = ext in FMT_FFA
if is_au:
if want_opus:
if self.args.no_acode:
return None
else:
if self.args.no_athumb:
return None
elif want_opus:
return None
if rem.startswith(".hist/th/") and rem.split(".")[-1] in ["webp", "jpg"]: if rem.startswith(".hist/th/") and rem.split(".")[-1] in ["webp", "jpg"]:
return os.path.join(ptop, rem) return os.path.join(ptop, rem)
@@ -33,10 +49,14 @@ class ThumbCli(object):
fmt = "w" fmt = "w"
if fmt == "w": if fmt == "w":
if self.args.th_no_webp or (is_vid and self.args.th_ff_jpg): if self.args.th_no_webp or ((is_vid or is_au) and self.args.th_ff_jpg):
fmt = "j" fmt = "j"
histpath = self.asrv.vfs.histtab[ptop] histpath = self.asrv.vfs.histtab.get(ptop)
if not histpath:
self.log("no histpath for [{}]".format(ptop))
return None
tpath = thumb_path(histpath, rem, mtime, fmt) tpath = thumb_path(histpath, rem, mtime, fmt)
ret = None ret = None
try: try:
@@ -53,6 +73,11 @@ class ThumbCli(object):
if self.cooldown.poke(tdir): if self.cooldown.poke(tdir):
self.broker.put(False, "thumbsrv.poke", tdir) self.broker.put(False, "thumbsrv.poke", tdir)
if want_opus:
# audio files expire individually
if self.cooldown.poke(tpath):
self.broker.put(False, "thumbsrv.poke", tpath)
return ret return ret
x = self.broker.put(True, "thumbsrv.get", ptop, rem, mtime, fmt) x = self.broker.put(True, "thumbsrv.get", ptop, rem, mtime, fmt)

View File

@@ -10,7 +10,7 @@ import threading
import subprocess as sp import subprocess as sp
from .__init__ import PY2, unicode from .__init__ import PY2, unicode
from .util import fsenc, vsplit, runcmd, Queue, Cooldown, BytesIO, min_ex from .util import fsenc, vsplit, statdir, runcmd, Queue, Cooldown, BytesIO, min_ex
from .bos import bos from .bos import bos
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, ffprobe from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, ffprobe
@@ -50,7 +50,8 @@ except:
# https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html # https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html
# ffmpeg -formats # ffmpeg -formats
FMT_PIL = "bmp dib gif icns ico jpg jpeg jp2 jpx pcx png pbm pgm ppm pnm sgi tga tif tiff webp xbm dds xpm" FMT_PIL = "bmp dib gif icns ico jpg jpeg jp2 jpx pcx png pbm pgm ppm pnm sgi tga tif tiff webp xbm dds xpm"
FMT_FF = "av1 asf avi flv m4v mkv mjpeg mjpg mpg mpeg mpg2 mpeg2 h264 avc mts h265 hevc mov 3gp mp4 ts mpegts nut ogv ogm rm vob webm wmv" FMT_FFV = "av1 asf avi flv m4v mkv mjpeg mjpg mpg mpeg mpg2 mpeg2 h264 avc mts h265 hevc mov 3gp mp4 ts mpegts nut ogv ogm rm vob webm wmv"
FMT_FFA = "aac m4a ogg opus flac alac mp3 mp2 ac3 dts wma ra wav aif aiff au alaw ulaw mulaw amr gsm ape tak tta wv"
if HAVE_HEIF: if HAVE_HEIF:
FMT_PIL += " heif heifs heic heics" FMT_PIL += " heif heifs heic heics"
@@ -58,7 +59,9 @@ if HAVE_HEIF:
if HAVE_AVIF: if HAVE_AVIF:
FMT_PIL += " avif avifs" FMT_PIL += " avif avifs"
FMT_PIL, FMT_FF = [{x: True for x in y.split(" ") if x} for y in [FMT_PIL, FMT_FF]] FMT_PIL, FMT_FFV, FMT_FFA = [
{x: True for x in y.split(" ") if x} for y in [FMT_PIL, FMT_FFV, FMT_FFA]
]
THUMBABLE = {} THUMBABLE = {}
@@ -67,7 +70,8 @@ if HAVE_PIL:
THUMBABLE.update(FMT_PIL) THUMBABLE.update(FMT_PIL)
if HAVE_FFMPEG and HAVE_FFPROBE: if HAVE_FFMPEG and HAVE_FFPROBE:
THUMBABLE.update(FMT_FF) THUMBABLE.update(FMT_FFV)
THUMBABLE.update(FMT_FFA)
def thumb_path(histpath, rem, mtime, fmt): def thumb_path(histpath, rem, mtime, fmt):
@@ -86,9 +90,13 @@ def thumb_path(histpath, rem, mtime, fmt):
h = hashlib.sha512(fsenc(fn)).digest() h = hashlib.sha512(fsenc(fn)).digest()
fn = base64.urlsafe_b64encode(h).decode("ascii")[:24] fn = base64.urlsafe_b64encode(h).decode("ascii")[:24]
return "{}/th/{}/{}.{:x}.{}".format( if fmt in ("opus", "caf"):
histpath, rd, fn, int(mtime), "webp" if fmt == "w" else "jpg" cat = "ac"
) else:
fmt = "webp" if fmt == "w" else "jpg"
cat = "th"
return "{}/{}/{}/{}.{:x}.{}".format(histpath, cat, rd, fn, int(mtime), fmt)
class ThumbSrv(object): class ThumbSrv(object):
@@ -115,7 +123,8 @@ class ThumbSrv(object):
t.daemon = True t.daemon = True
t.start() t.start()
if not self.args.no_vthumb and (not HAVE_FFMPEG or not HAVE_FFPROBE): want_ff = not self.args.no_vthumb or not self.args.no_athumb
if want_ff and (not HAVE_FFMPEG or not HAVE_FFPROBE):
missing = [] missing = []
if not HAVE_FFMPEG: if not HAVE_FFMPEG:
missing.append("FFmpeg") missing.append("FFmpeg")
@@ -123,7 +132,7 @@ class ThumbSrv(object):
if not HAVE_FFPROBE: if not HAVE_FFPROBE:
missing.append("FFprobe") missing.append("FFprobe")
msg = "cannot create video thumbnails because some of the required programs are not available: " msg = "cannot create audio/video thumbnails because some of the required programs are not available: "
msg += ", ".join(missing) msg += ", ".join(missing)
self.log(msg, c=3) self.log(msg, c=3)
@@ -145,7 +154,11 @@ class ThumbSrv(object):
return not self.nthr return not self.nthr
def get(self, ptop, rem, mtime, fmt): def get(self, ptop, rem, mtime, fmt):
histpath = self.asrv.vfs.histtab[ptop] histpath = self.asrv.vfs.histtab.get(ptop)
if not histpath:
self.log("no histpath for [{}]".format(ptop))
return None
tpath = thumb_path(histpath, rem, mtime, fmt) tpath = thumb_path(histpath, rem, mtime, fmt)
abspath = os.path.join(ptop, rem) abspath = os.path.join(ptop, rem)
cond = threading.Condition(self.mutex) cond = threading.Condition(self.mutex)
@@ -181,6 +194,7 @@ class ThumbSrv(object):
try: try:
st = bos.stat(tpath) st = bos.stat(tpath)
if st.st_size: if st.st_size:
self.poke(tpath)
return tpath return tpath
except: except:
pass pass
@@ -199,8 +213,13 @@ class ThumbSrv(object):
if not bos.path.exists(tpath): if not bos.path.exists(tpath):
if ext in FMT_PIL: if ext in FMT_PIL:
fun = self.conv_pil fun = self.conv_pil
elif ext in FMT_FF: elif ext in FMT_FFV:
fun = self.conv_ffmpeg fun = self.conv_ffmpeg
elif ext in FMT_FFA:
if tpath.endswith(".opus") or tpath.endswith(".caf"):
fun = self.conv_opus
else:
fun = self.conv_spec
if fun: if fun:
try: try:
@@ -326,25 +345,116 @@ class ThumbSrv(object):
] ]
cmd += [fsenc(tpath)] cmd += [fsenc(tpath)]
# self.log((b" ".join(cmd)).decode("utf-8")) self._run_ff(cmd)
ret, sout, serr = runcmd(cmd) def _run_ff(self, cmd):
# self.log((b" ".join(cmd)).decode("utf-8"))
ret, sout, serr = runcmd(cmd, timeout=self.args.th_convt)
if ret != 0: if ret != 0:
m = "FFmpeg failed (probably a corrupt video file):\n" m = "FFmpeg failed (probably a corrupt video file):\n"
m += "\n".join(["ff: {}".format(x) for x in serr.split("\n")]) m += "\n".join(["ff: {}".format(x) for x in serr.split("\n")])
self.log(m, c="1;30") self.log(m, c="1;30")
raise sp.CalledProcessError(ret, (cmd[0], b"...", cmd[-1])) raise sp.CalledProcessError(ret, (cmd[0], b"...", cmd[-1]))
def conv_spec(self, abspath, tpath):
ret, _ = ffprobe(abspath)
if "ac" not in ret:
raise Exception("not audio")
fc = "[0:a:0]aresample=48000{},showspectrumpic=s=640x512,crop=780:544:70:50[o]"
if self.args.th_ff_swr:
fco = ":filter_size=128:cutoff=0.877"
else:
fco = ":resampler=soxr"
fc = fc.format(fco)
# fmt: off
cmd = [
b"ffmpeg",
b"-nostdin",
b"-v", b"error",
b"-hide_banner",
b"-i", fsenc(abspath),
b"-filter_complex", fc.encode("utf-8"),
b"-map", b"[o]"
]
# fmt: on
if tpath.endswith(".jpg"):
cmd += [
b"-q:v",
b"6", # default=??
]
else:
cmd += [
b"-q:v",
b"50", # default=75
b"-compression_level:v",
b"6", # default=4, 0=fast, 6=max
]
cmd += [fsenc(tpath)]
self._run_ff(cmd)
def conv_opus(self, abspath, tpath):
if self.args.no_acode:
raise Exception("disabled in server config")
ret, _ = ffprobe(abspath)
if "ac" not in ret:
raise Exception("not audio")
src_opus = abspath.lower().endswith(".opus") or ret["ac"][1] == "opus"
want_caf = tpath.endswith(".caf")
tmp_opus = tpath
if want_caf:
tmp_opus = tpath.rsplit(".", 1)[0] + ".opus"
if not want_caf or (not src_opus and not bos.path.isfile(tmp_opus)):
# fmt: off
cmd = [
b"ffmpeg",
b"-nostdin",
b"-v", b"error",
b"-hide_banner",
b"-i", fsenc(abspath),
b"-map_metadata", b"-1",
b"-map", b"0:a:0",
b"-c:a", b"libopus",
b"-b:a", b"128k",
fsenc(tmp_opus)
]
# fmt: on
self._run_ff(cmd)
if want_caf:
# fmt: off
cmd = [
b"ffmpeg",
b"-nostdin",
b"-v", b"error",
b"-hide_banner",
b"-i", fsenc(abspath if src_opus else tmp_opus),
b"-map_metadata", b"-1",
b"-map", b"0:a:0",
b"-c:a", b"copy",
b"-f", b"caf",
fsenc(tpath)
]
# fmt: on
self._run_ff(cmd)
def poke(self, tdir): def poke(self, tdir):
if not self.poke_cd.poke(tdir): if not self.poke_cd.poke(tdir):
return return
ts = int(time.time()) ts = int(time.time())
try: try:
p1 = os.path.dirname(tdir) for _ in range(4):
p2 = os.path.dirname(p1) bos.utime(tdir, (ts, ts))
for dp in [tdir, p1, p2]: tdir = os.path.dirname(tdir)
bos.utime(dp, (ts, ts))
except: except:
pass pass
@@ -364,25 +474,36 @@ class ThumbSrv(object):
self.log("\033[Jcln ok; rm {} dirs".format(ndirs)) self.log("\033[Jcln ok; rm {} dirs".format(ndirs))
def clean(self, histpath): def clean(self, histpath):
thumbpath = os.path.join(histpath, "th") ret = 0
for cat in ["th", "ac"]:
ret += self._clean(histpath, cat, None)
return ret
def _clean(self, histpath, cat, thumbpath):
if not thumbpath:
thumbpath = os.path.join(histpath, cat)
# self.log("cln {}".format(thumbpath)) # self.log("cln {}".format(thumbpath))
maxage = self.args.th_maxage exts = ["jpg", "webp"] if cat == "th" else ["opus", "caf"]
maxage = getattr(self.args, cat + "_maxage")
now = time.time() now = time.time()
prev_b64 = None prev_b64 = None
prev_fp = None prev_fp = None
try: try:
ents = bos.listdir(thumbpath) ents = statdir(self.log, not self.args.no_scandir, False, thumbpath)
ents = sorted(list(ents))
except: except:
return 0 return 0
ndirs = 0 ndirs = 0
for f in sorted(ents): for f, inf in ents:
fp = os.path.join(thumbpath, f) fp = os.path.join(thumbpath, f)
cmp = fp.lower().replace("\\", "/") cmp = fp.lower().replace("\\", "/")
# "top" or b64 prefix/full (a folder) # "top" or b64 prefix/full (a folder)
if len(f) <= 3 or len(f) == 24: if len(f) <= 3 or len(f) == 24:
age = now - bos.path.getmtime(fp) age = now - inf.st_mtime
if age > maxage: if age > maxage:
with self.mutex: with self.mutex:
safe = True safe = True
@@ -396,16 +517,15 @@ class ThumbSrv(object):
self.log("rm -rf [{}]".format(fp)) self.log("rm -rf [{}]".format(fp))
shutil.rmtree(fp, ignore_errors=True) shutil.rmtree(fp, ignore_errors=True)
else: else:
ndirs += self.clean(fp) self._clean(histpath, cat, fp)
continue continue
# thumb file # thumb file
try: try:
b64, ts, ext = f.split(".") b64, ts, ext = f.split(".")
if len(b64) != 24 or len(ts) != 8 or ext not in ["jpg", "webp"]: if len(b64) != 24 or len(ts) != 8 or ext not in exts:
raise Exception() raise Exception()
ts = int(ts, 16)
except: except:
if f != "dir.txt": if f != "dir.txt":
self.log("foreign file in thumbs dir: [{}]".format(fp), 1) self.log("foreign file in thumbs dir: [{}]".format(fp), 1)
@@ -416,6 +536,10 @@ class ThumbSrv(object):
self.log("rm replaced [{}]".format(fp)) self.log("rm replaced [{}]".format(fp))
bos.unlink(prev_fp) bos.unlink(prev_fp)
if cat != "th" and inf.st_mtime + maxage < now:
self.log("rm expired [{}]".format(fp))
bos.unlink(fp)
prev_b64 = b64 prev_b64 = b64
prev_fp = fp prev_fp = fp

View File

@@ -67,7 +67,11 @@ class U2idx(object):
if cur: if cur:
return cur return cur
histpath = self.asrv.vfs.histtab[ptop] histpath = self.asrv.vfs.histtab.get(ptop)
if not histpath:
self.log("no histpath for [{}]".format(ptop))
return None
db_path = os.path.join(histpath, "up2k.db") db_path = os.path.join(histpath, "up2k.db")
if not bos.path.exists(db_path): if not bos.path.exists(db_path):
return None return None
@@ -113,7 +117,16 @@ class U2idx(object):
if ok: if ok:
continue continue
if uq.startswith('"'):
v, uq = uq[1:].split('"', 1)
while v.endswith("\\"):
v2, uq = uq.split('"', 1)
v = v[:-1] + '"' + v2
uq = uq.strip()
else:
v, uq = (uq + " ").split(" ", 1) v, uq = (uq + " ").split(" ", 1)
v = v.replace('\\"', '"')
if is_key: if is_key:
is_key = False is_key = False

View File

@@ -21,6 +21,7 @@ from .util import (
Pebkac, Pebkac,
Queue, Queue,
ProgressPrinter, ProgressPrinter,
SYMTIME,
fsdec, fsdec,
fsenc, fsenc,
absreal, absreal,
@@ -68,10 +69,12 @@ class Up2k(object):
self.tagq = Queue() self.tagq = Queue()
self.n_hashq = 0 self.n_hashq = 0
self.n_tagq = 0 self.n_tagq = 0
self.gid = 0
self.volstate = {} self.volstate = {}
self.need_rescan = {} self.need_rescan = {}
self.dupesched = {} self.dupesched = {}
self.registry = {} self.registry = {}
self.droppable = {}
self.entags = {} self.entags = {}
self.flags = {} self.flags = {}
self.cur = {} self.cur = {}
@@ -114,15 +117,21 @@ class Up2k(object):
t.daemon = True t.daemon = True
t.start() t.start()
def reload(self):
self.gid += 1
self.log("reload #{} initiated".format(self.gid))
all_vols = self.asrv.vfs.all_vols
self.rescan(all_vols, list(all_vols.keys()), True)
def deferred_init(self): def deferred_init(self):
all_vols = self.asrv.vfs.all_vols all_vols = self.asrv.vfs.all_vols
have_e2d = self.init_indexes(all_vols) have_e2d = self.init_indexes(all_vols)
if have_e2d:
thr = threading.Thread(target=self._snapshot, name="up2k-snapshot") thr = threading.Thread(target=self._snapshot, name="up2k-snapshot")
thr.daemon = True thr.daemon = True
thr.start() thr.start()
if have_e2d:
thr = threading.Thread(target=self._hasher, name="up2k-hasher") thr = threading.Thread(target=self._hasher, name="up2k-hasher")
thr.daemon = True thr.daemon = True
thr.start() thr.start()
@@ -168,15 +177,15 @@ class Up2k(object):
} }
return json.dumps(ret, indent=4) return json.dumps(ret, indent=4)
def rescan(self, all_vols, scan_vols): def rescan(self, all_vols, scan_vols, wait):
if hasattr(self, "pp"): if not wait and hasattr(self, "pp"):
return "cannot initiate; scan is already in progress" return "cannot initiate; scan is already in progress"
args = (all_vols, scan_vols) args = (all_vols, scan_vols)
t = threading.Thread( t = threading.Thread(
target=self.init_indexes, target=self.init_indexes,
args=args, args=args,
name="up2k-rescan-{}".format(scan_vols[0]), name="up2k-rescan-{}".format(scan_vols[0] if scan_vols else "all"),
) )
t.daemon = True t.daemon = True
t.start() t.start()
@@ -196,6 +205,10 @@ class Up2k(object):
if now < cooldown: if now < cooldown:
continue continue
if hasattr(self, "pp"):
cooldown = now + 5
continue
timeout = now + 9001 timeout = now + 9001
with self.mutex: with self.mutex:
for vp, vol in sorted(self.asrv.vfs.all_vols.items()): for vp, vol in sorted(self.asrv.vfs.all_vols.items()):
@@ -217,7 +230,7 @@ class Up2k(object):
if vols: if vols:
cooldown = now + 10 cooldown = now + 10
err = self.rescan(self.asrv.vfs.all_vols, vols) err = self.rescan(self.asrv.vfs.all_vols, vols, False)
if err: if err:
for v in vols: for v in vols:
self.need_rescan[v] = True self.need_rescan[v] = True
@@ -284,6 +297,7 @@ class Up2k(object):
def _vis_reg_progress(self, reg): def _vis_reg_progress(self, reg):
ret = [] ret = []
for _, job in reg.items(): for _, job in reg.items():
if job["need"]:
ret.append(self._vis_job_progress(job)) ret.append(self._vis_job_progress(job))
return ret return ret
@@ -299,6 +313,16 @@ class Up2k(object):
return True, ret return True, ret
def init_indexes(self, all_vols, scan_vols=None): def init_indexes(self, all_vols, scan_vols=None):
gid = self.gid
while hasattr(self, "pp") and gid == self.gid:
time.sleep(0.1)
if gid != self.gid:
return
if gid:
self.log("reload #{} running".format(self.gid))
self.pp = ProgressPrinter() self.pp = ProgressPrinter()
vols = all_vols.values() vols = all_vols.values()
t0 = time.time() t0 = time.time()
@@ -429,7 +453,11 @@ class Up2k(object):
return have_e2d return have_e2d
def register_vpath(self, ptop, flags): def register_vpath(self, ptop, flags):
histpath = self.asrv.vfs.histtab[ptop] histpath = self.asrv.vfs.histtab.get(ptop)
if not histpath:
self.log("no histpath for [{}]".format(ptop))
return None
db_path = os.path.join(histpath, "up2k.db") db_path = os.path.join(histpath, "up2k.db")
if ptop in self.registry: if ptop in self.registry:
try: try:
@@ -458,26 +486,41 @@ class Up2k(object):
self.log("/{} {}".format(vpath, " ".join(sorted(a))), "35") self.log("/{} {}".format(vpath, " ".join(sorted(a))), "35")
reg = {} reg = {}
drp = None
path = os.path.join(histpath, "up2k.snap") path = os.path.join(histpath, "up2k.snap")
if "e2d" in flags and bos.path.exists(path): if bos.path.exists(path):
with gzip.GzipFile(path, "rb") as f: with gzip.GzipFile(path, "rb") as f:
j = f.read().decode("utf-8") j = f.read().decode("utf-8")
reg2 = json.loads(j) reg2 = json.loads(j)
try:
drp = reg2["droppable"]
reg2 = reg2["registry"]
except:
pass
for k, job in reg2.items(): for k, job in reg2.items():
path = os.path.join(job["ptop"], job["prel"], job["name"]) path = os.path.join(job["ptop"], job["prel"], job["name"])
if bos.path.exists(path): if bos.path.exists(path):
reg[k] = job reg[k] = job
job["poke"] = time.time() job["poke"] = time.time()
job["busy"] = {}
else: else:
self.log("ign deleted file in snap: [{}]".format(path)) self.log("ign deleted file in snap: [{}]".format(path))
m = "loaded snap {} |{}|".format(path, len(reg.keys())) if drp is None:
drp = [k for k, v in reg.items() if not v.get("need", [])]
else:
drp = [x for x in drp if x in reg]
m = "loaded snap {} |{}| ({})".format(path, len(reg.keys()), len(drp or []))
m = [m] + self._vis_reg_progress(reg) m = [m] + self._vis_reg_progress(reg)
self.log("\n".join(m)) self.log("\n".join(m))
self.flags[ptop] = flags self.flags[ptop] = flags
self.registry[ptop] = reg self.registry[ptop] = reg
self.droppable[ptop] = drp or []
self.regdrop(ptop, None)
if not HAVE_SQLITE3 or "e2d" not in flags or "d2d" in flags: if not HAVE_SQLITE3 or "e2d" not in flags or "d2d" in flags:
return None return None
@@ -797,10 +840,11 @@ class Up2k(object):
return ret return ret
def _run_all_mtp(self): def _run_all_mtp(self):
gid = self.gid
t0 = time.time() t0 = time.time()
for ptop, flags in self.flags.items(): for ptop, flags in self.flags.items():
if "mtp" in flags: if "mtp" in flags:
self._run_one_mtp(ptop) self._run_one_mtp(ptop, gid)
td = time.time() - t0 td = time.time() - t0
msg = "mtp finished in {:.2f} sec ({})" msg = "mtp finished in {:.2f} sec ({})"
@@ -811,7 +855,10 @@ class Up2k(object):
if "OFFLINE" not in self.volstate[k]: if "OFFLINE" not in self.volstate[k]:
self.volstate[k] = "online, idle" self.volstate[k] = "online, idle"
def _run_one_mtp(self, ptop): def _run_one_mtp(self, ptop, gid):
if gid != self.gid:
return
entags = self.entags[ptop] entags = self.entags[ptop]
parsers = {} parsers = {}
@@ -844,6 +891,9 @@ class Up2k(object):
in_progress = {} in_progress = {}
while True: while True:
with self.mutex: with self.mutex:
if gid != self.gid:
break
q = "select w from mt where k = 't:mtp' limit ?" q = "select w from mt where k = 't:mtp' limit ?"
warks = cur.execute(q, (batch_sz,)).fetchall() warks = cur.execute(q, (batch_sz,)).fetchall()
warks = [x[0] for x in warks] warks = [x[0] for x in warks]
@@ -1224,6 +1274,7 @@ class Up2k(object):
"at": at, "at": at,
"hash": [], "hash": [],
"need": [], "need": [],
"busy": {},
} }
if job and wark in reg: if job and wark in reg:
@@ -1257,7 +1308,10 @@ class Up2k(object):
err = "partial upload exists at a different location; please resume uploading here instead:\n" err = "partial upload exists at a different location; please resume uploading here instead:\n"
err += "/" + quotep(vsrc) + " " err += "/" + quotep(vsrc) + " "
dupe = [cj["prel"], cj["name"]] # registry is size-constrained + can only contain one unique wark;
# let want_recheck trigger symlink (if still in reg) or reupload
if cur:
dupe = [cj["prel"], cj["name"], cj["lmod"]]
try: try:
self.dupesched[src].append(dupe) self.dupesched[src].append(dupe)
except: except:
@@ -1282,7 +1336,7 @@ class Up2k(object):
dst = os.path.join(job["ptop"], job["prel"], job["name"]) dst = os.path.join(job["ptop"], job["prel"], job["name"])
if not self.args.nw: if not self.args.nw:
bos.unlink(dst) # TODO ed pls bos.unlink(dst) # TODO ed pls
self._symlink(src, dst) self._symlink(src, dst, lmod=cj["lmod"])
if cur: if cur:
a = [cj[x] for x in "prel name lmod size addr".split()] a = [cj[x] for x in "prel name lmod size addr".split()]
@@ -1306,6 +1360,7 @@ class Up2k(object):
"t0": now, "t0": now,
"hash": deepcopy(cj["hash"]), "hash": deepcopy(cj["hash"]),
"need": [], "need": [],
"busy": {},
} }
# client-provided, sanitized by _get_wark: name, size, lmod # client-provided, sanitized by _get_wark: name, size, lmod
for k in [ for k in [
@@ -1353,13 +1408,14 @@ class Up2k(object):
with ren_open(fname, "wb", fdir=fdir, suffix=suffix) as f: with ren_open(fname, "wb", fdir=fdir, suffix=suffix) as f:
return f["orz"][1] return f["orz"][1]
def _symlink(self, src, dst, verbose=True): def _symlink(self, src, dst, verbose=True, lmod=None):
if verbose: if verbose:
self.log("linking dupe:\n {0}\n {1}".format(src, dst)) self.log("linking dupe:\n {0}\n {1}".format(src, dst))
if self.args.nw: if self.args.nw:
return return
linked = False
try: try:
if self.args.no_symlink: if self.args.no_symlink:
raise Exception("disabled in config") raise Exception("disabled in config")
@@ -1390,10 +1446,18 @@ class Up2k(object):
hops = len(ndst[nc:]) - 1 hops = len(ndst[nc:]) - 1
lsrc = "../" * hops + "/".join(lsrc) lsrc = "../" * hops + "/".join(lsrc)
os.symlink(fsenc(lsrc), fsenc(ldst)) os.symlink(fsenc(lsrc), fsenc(ldst))
linked = True
except Exception as ex: except Exception as ex:
self.log("cannot symlink; creating copy: " + repr(ex)) self.log("cannot symlink; creating copy: " + repr(ex))
shutil.copy2(fsenc(src), fsenc(dst)) shutil.copy2(fsenc(src), fsenc(dst))
if lmod and (not linked or SYMTIME):
times = (int(time.time()), int(lmod))
if ANYWIN:
self.lastmod_q.put([dst, 0, times])
else:
bos.utime(dst, times, False)
def handle_chunk(self, ptop, wark, chash): def handle_chunk(self, ptop, wark, chash):
with self.mutex: with self.mutex:
job = self.registry[ptop].get(wark) job = self.registry[ptop].get(wark)
@@ -1412,6 +1476,14 @@ class Up2k(object):
if not nchunk: if not nchunk:
raise Pebkac(400, "unknown chunk") raise Pebkac(400, "unknown chunk")
if chash in job["busy"]:
nh = len(job["hash"])
idx = job["hash"].index(chash)
m = "that chunk is already being written to:\n {}\n {} {}/{}\n {}"
raise Pebkac(400, m.format(wark, chash, idx, nh, job["name"]))
job["busy"][chash] = 1
job["poke"] = time.time() job["poke"] = time.time()
chunksize = up2k_chunksize(job["size"]) chunksize = up2k_chunksize(job["size"])
@@ -1421,6 +1493,14 @@ class Up2k(object):
return [chunksize, ofs, path, job["lmod"]] return [chunksize, ofs, path, job["lmod"]]
def release_chunk(self, ptop, wark, chash):
with self.mutex:
job = self.registry[ptop].get(wark)
if job:
job["busy"].pop(chash, None)
return [True]
def confirm_chunk(self, ptop, wark, chash): def confirm_chunk(self, ptop, wark, chash):
with self.mutex: with self.mutex:
try: try:
@@ -1431,6 +1511,8 @@ class Up2k(object):
except Exception as ex: except Exception as ex:
return "confirm_chunk, wark, " + repr(ex) return "confirm_chunk, wark, " + repr(ex)
job["busy"].pop(chash, None)
try: try:
job["need"].remove(chash) job["need"].remove(chash)
except Exception as ex: except Exception as ex:
@@ -1441,7 +1523,7 @@ class Up2k(object):
return ret, src return ret, src
if self.args.nw: if self.args.nw:
# del self.registry[ptop][wark] self.regdrop(ptop, wark)
return ret, dst return ret, dst
# windows cant rename open files # windows cant rename open files
@@ -1473,21 +1555,21 @@ class Up2k(object):
a = [job[x] for x in "ptop wark prel name lmod size addr".split()] a = [job[x] for x in "ptop wark prel name lmod size addr".split()]
a += [job.get("at") or time.time()] a += [job.get("at") or time.time()]
if self.idx_wark(*a): if self.idx_wark(*a):
# self.log("pop " + wark + " " + dst + " finish_upload idx_wark", 4)
del self.registry[ptop][wark] del self.registry[ptop][wark]
# in-memory registry is reserved for unfinished uploads else:
self.regdrop(ptop, wark)
dupes = self.dupesched.pop(dst, []) dupes = self.dupesched.pop(dst, [])
if not dupes: if not dupes:
return return
cur = self.cur.get(ptop) cur = self.cur.get(ptop)
for rd, fn in dupes: for rd, fn, lmod in dupes:
d2 = os.path.join(ptop, rd, fn) d2 = os.path.join(ptop, rd, fn)
if os.path.exists(d2): if os.path.exists(d2):
continue continue
self._symlink(dst, d2) self._symlink(dst, d2, lmod=lmod)
if cur: if cur:
self.db_rm(cur, rd, fn) self.db_rm(cur, rd, fn)
self.db_add(cur, wark, rd, fn, *a[-4:]) self.db_add(cur, wark, rd, fn, *a[-4:])
@@ -1495,6 +1577,21 @@ class Up2k(object):
if cur: if cur:
cur.connection.commit() cur.connection.commit()
def regdrop(self, ptop, wark):
t = self.droppable[ptop]
if wark:
t.append(wark)
if len(t) <= self.args.reg_cap:
return
n = len(t) - int(self.args.reg_cap / 2)
m = "up2k-registry [{}] has {} droppables; discarding {}"
self.log(m.format(ptop, len(t), n))
for k in t[:n]:
self.registry[ptop].pop(k, None)
self.droppable[ptop] = t[n:]
def idx_wark(self, ptop, wark, rd, fn, lmod, sz, ip, at): def idx_wark(self, ptop, wark, rd, fn, lmod, sz, ip, at):
cur = self.cur.get(ptop) cur = self.cur.get(ptop)
if not cur: if not cur:
@@ -1689,8 +1786,9 @@ class Up2k(object):
dlabs = absreal(sabs) dlabs = absreal(sabs)
m = "moving symlink from [{}] to [{}], target [{}]" m = "moving symlink from [{}] to [{}], target [{}]"
self.log(m.format(sabs, dabs, dlabs)) self.log(m.format(sabs, dabs, dlabs))
os.unlink(sabs) mt = bos.path.getmtime(sabs, False)
self._symlink(dlabs, dabs, False) bos.unlink(sabs)
self._symlink(dlabs, dabs, False, lmod=mt)
# folders are too scary, schedule rescan of both vols # folders are too scary, schedule rescan of both vols
self.need_rescan[svn.vpath] = 1 self.need_rescan[svn.vpath] = 1
@@ -1820,25 +1918,30 @@ class Up2k(object):
slabs = list(sorted(links.keys()))[0] slabs = list(sorted(links.keys()))[0]
ptop, rem = links.pop(slabs) ptop, rem = links.pop(slabs)
self.log("linkswap [{}] and [{}]".format(sabs, slabs)) self.log("linkswap [{}] and [{}]".format(sabs, slabs))
mt = bos.path.getmtime(slabs, False)
bos.unlink(slabs) bos.unlink(slabs)
bos.rename(sabs, slabs) bos.rename(sabs, slabs)
bos.utime(slabs, (int(time.time()), int(mt)), False)
self._symlink(slabs, sabs, False) self._symlink(slabs, sabs, False)
full[slabs] = [ptop, rem] full[slabs] = [ptop, rem]
sabs = slabs
if not dabs: if not dabs:
dabs = list(sorted(full.keys()))[0] dabs = list(sorted(full.keys()))[0]
for alink in links.keys(): for alink in links.keys():
lmod = None
try: try:
if alink != sabs and absreal(alink) != sabs: if alink != sabs and absreal(alink) != sabs:
continue continue
self.log("relinking [{}] to [{}]".format(alink, dabs)) self.log("relinking [{}] to [{}]".format(alink, dabs))
lmod = bos.path.getmtime(alink, False)
bos.unlink(alink) bos.unlink(alink)
except: except:
pass pass
self._symlink(dabs, alink, False) self._symlink(dabs, alink, False, lmod=lmod)
return len(full) + len(links) return len(full) + len(links)
@@ -1944,7 +2047,7 @@ class Up2k(object):
for path, sz, times in ready: for path, sz, times in ready:
self.log("lmod: setting times {} on {}".format(times, path)) self.log("lmod: setting times {} on {}".format(times, path))
try: try:
bos.utime(path, times) bos.utime(path, times, False)
except: except:
self.log("lmod: failed to utime ({}, {})".format(path, times)) self.log("lmod: failed to utime ({}, {})".format(path, times))
@@ -1960,6 +2063,7 @@ class Up2k(object):
self.snap_prev = {} self.snap_prev = {}
while True: while True:
time.sleep(self.snap_persist_interval) time.sleep(self.snap_persist_interval)
if not hasattr(self, "pp"):
self.do_snapshot() self.do_snapshot()
def do_snapshot(self): def do_snapshot(self):
@@ -1969,7 +2073,10 @@ class Up2k(object):
def _snap_reg(self, ptop, reg): def _snap_reg(self, ptop, reg):
now = time.time() now = time.time()
histpath = self.asrv.vfs.histtab[ptop] histpath = self.asrv.vfs.histtab.get(ptop)
if not histpath:
return
rm = [x for x in reg.values() if now - x["poke"] > self.snap_discard_interval] rm = [x for x in reg.values() if now - x["poke"] > self.snap_discard_interval]
if rm: if rm:
m = "dropping {} abandoned uploads in {}".format(len(rm), ptop) m = "dropping {} abandoned uploads in {}".format(len(rm), ptop)
@@ -2006,7 +2113,8 @@ class Up2k(object):
bos.makedirs(histpath) bos.makedirs(histpath)
path2 = "{}.{}".format(path, os.getpid()) path2 = "{}.{}".format(path, os.getpid())
j = json.dumps(reg, indent=2, sort_keys=True).encode("utf-8") body = {"droppable": self.droppable[ptop], "registry": reg}
j = json.dumps(body, indent=2, sort_keys=True).encode("utf-8")
with gzip.GzipFile(path2, "wb") as f: with gzip.GzipFile(path2, "wb") as f:
f.write(j) f.write(j)

View File

@@ -67,8 +67,9 @@ if WINDOWS and PY2:
FS_ENCODING = "utf-8" FS_ENCODING = "utf-8"
HTTP_TS_FMT = "%a, %d %b %Y %H:%M:%S GMT" SYMTIME = sys.version_info >= (3, 6) and os.supports_follow_symlinks
HTTP_TS_FMT = "%a, %d %b %Y %H:%M:%S GMT"
HTTPCODE = { HTTPCODE = {
200: "OK", 200: "OK",
@@ -100,10 +101,25 @@ IMPLICATIONS = [
MIMES = { MIMES = {
"md": "text/plain; charset=UTF-8", "md": "text/plain",
"txt": "text/plain",
"js": "text/javascript",
"opus": "audio/ogg; codecs=opus", "opus": "audio/ogg; codecs=opus",
"webp": "image/webp", "caf": "audio/x-caf",
"mp3": "audio/mpeg",
"m4a": "audio/mp4",
"jpg": "image/jpeg",
} }
for ln in """text css html csv
application json wasm xml pdf rtf zip
image webp jpeg png gif bmp
audio aac ogg wav
video webm mp4 mpeg
font woff woff2 otf ttf
""".splitlines():
k, vs = ln.split(" ", 1)
for v in vs.strip().split():
MIMES[v] = "{}/{}".format(k, v)
REKOBO_KEY = { REKOBO_KEY = {
@@ -807,6 +823,17 @@ def gen_filekey(salt, fspath, fsize, inode):
).decode("ascii") ).decode("ascii")
def gencookie(k, v, dur):
v = v.replace(";", "")
if dur:
dt = datetime.utcfromtimestamp(time.time() + dur)
exp = dt.strftime("%a, %d %b %Y %H:%M:%S GMT")
else:
exp = "Fri, 15 Aug 1997 01:00:00 GMT"
return "{}={}; Path=/; Expires={}; SameSite=Lax".format(k, v, exp)
def humansize(sz, terse=False): def humansize(sz, terse=False):
for unit in ["B", "KiB", "MiB", "GiB", "TiB"]: for unit in ["B", "KiB", "MiB", "GiB", "TiB"]:
if sz < 1024: if sz < 1024:
@@ -1070,7 +1097,8 @@ def read_socket(sr, total_size):
buf = sr.recv(bufsz) buf = sr.recv(bufsz)
if not buf: if not buf:
raise Pebkac(400, "client d/c during binary post") m = "client d/c during binary post after {} bytes, {} bytes remaining"
raise Pebkac(400, m.format(total_size - remains, remains))
remains -= len(buf) remains -= len(buf)
yield buf yield buf
@@ -1150,12 +1178,14 @@ def hashcopy(fin, fout):
return tlen, hashobj.hexdigest(), digest_b64 return tlen, hashobj.hexdigest(), digest_b64
def sendfile_py(lower, upper, f, s): def sendfile_py(log, lower, upper, f, s, bufsz, slp):
remains = upper - lower remains = upper - lower
f.seek(lower) f.seek(lower)
while remains > 0: while remains > 0:
# time.sleep(0.01) if slp:
buf = f.read(min(1024 * 32, remains)) time.sleep(slp)
buf = f.read(min(bufsz, remains))
if not buf: if not buf:
return remains return remains
@@ -1168,17 +1198,24 @@ def sendfile_py(lower, upper, f, s):
return 0 return 0
def sendfile_kern(lower, upper, f, s): def sendfile_kern(log, lower, upper, f, s, bufsz, slp):
out_fd = s.fileno() out_fd = s.fileno()
in_fd = f.fileno() in_fd = f.fileno()
ofs = lower ofs = lower
stuck = None
while ofs < upper: while ofs < upper:
stuck = stuck or time.time()
try: try:
req = min(2 ** 30, upper - ofs) req = min(2 ** 30, upper - ofs)
select.select([], [out_fd], [], 10) select.select([], [out_fd], [], 10)
n = os.sendfile(out_fd, in_fd, ofs, req) n = os.sendfile(out_fd, in_fd, ofs, req)
stuck = None
except Exception as ex: except Exception as ex:
# print("sendfile: " + repr(ex)) d = time.time() - stuck
log("sendfile stuck for {:.3f} sec: {!r}".format(d, ex))
if d < 3600 and ex.errno == 11: # eagain
continue
n = 0 n = 0
if n <= 0: if n <= 0:
@@ -1280,18 +1317,33 @@ def guess_mime(url, fallback="application/octet-stream"):
except: except:
return fallback return fallback
ret = MIMES.get(ext) or mimetypes.guess_type(url)[0] or fallback ret = MIMES.get(ext)
if not ret:
x = mimetypes.guess_type(url)
ret = "application/{}".format(x[1]) if x[1] else x[0]
if not ret:
ret = fallback
if ";" not in ret: if ";" not in ret:
if ret.startswith("text/") or ret.endswith("/javascript"): if ret.startswith("text/") or ret.endswith("/javascript"):
ret += "; charset=UTF-8" ret += "; charset=utf-8"
return ret return ret
def runcmd(argv): def runcmd(argv, timeout=None):
p = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE) p = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE)
if not timeout or PY2:
stdout, stderr = p.communicate() stdout, stderr = p.communicate()
else:
try:
stdout, stderr = p.communicate(timeout=timeout)
except sp.TimeoutExpired:
p.kill()
stdout, stderr = p.communicate()
stdout = stdout.decode("utf-8", "replace") stdout = stdout.decode("utf-8", "replace")
stderr = stderr.decode("utf-8", "replace") stderr = stderr.decode("utf-8", "replace")
return [p.returncode, stdout, stderr] return [p.returncode, stdout, stderr]

View File

@@ -237,7 +237,7 @@ window.baguetteBox = (function () {
} }
function keyDownHandler(e) { function keyDownHandler(e) {
if (e.ctrlKey || e.altKey || e.metaKey || e.isComposing) if (e.ctrlKey || e.altKey || e.metaKey || e.isComposing || modal.busy)
return; return;
var k = e.code + '', v = vid(); var k = e.code + '', v = vid();
@@ -331,7 +331,7 @@ window.baguetteBox = (function () {
function tglsel() { function tglsel() {
var thumb = currentGallery[currentIndex].imageElement, var thumb = currentGallery[currentIndex].imageElement,
name = vsplit(thumb.href)[1], name = vsplit(thumb.href)[1].split('?')[0],
files = msel.getall(); files = msel.getall();
for (var a = 0; a < files.length; a++) for (var a = 0; a < files.length; a++)
@@ -345,7 +345,7 @@ window.baguetteBox = (function () {
function selbg() { function selbg() {
var img = vidimg(), var img = vidimg(),
thumb = currentGallery[currentIndex].imageElement, thumb = currentGallery[currentIndex].imageElement,
name = vsplit(thumb.href)[1], name = vsplit(thumb.href)[1].split('?')[0],
files = msel.getsel(), files = msel.getsel(),
sel = false; sel = false;
@@ -530,9 +530,7 @@ window.baguetteBox = (function () {
if (options.bodyClass && document.body.classList) if (options.bodyClass && document.body.classList)
document.body.classList.remove(options.bodyClass); document.body.classList.remove(options.bodyClass);
var h = ebi('bbox-halp'); qsr('#bbox-halp');
if (h)
h.parentNode.removeChild(h);
if (options.afterHide) if (options.afterHide)
options.afterHide(); options.afterHide();
@@ -590,8 +588,7 @@ window.baguetteBox = (function () {
image.addEventListener(is_vid ? 'loadedmetadata' : 'load', function () { image.addEventListener(is_vid ? 'loadedmetadata' : 'load', function () {
// Remove loader element // Remove loader element
var spinner = QS('#baguette-img-' + index + ' .bbox-spinner'); qsr('#baguette-img-' + index + ' .bbox-spinner');
figure.removeChild(spinner);
if (!options.async && callback) if (!options.async && callback)
callback(); callback();
}); });

View File

@@ -23,7 +23,7 @@ html, body {
margin: 0; margin: 0;
padding: 0; padding: 0;
} }
pre, code, tt { pre, code, tt, #doc, #doc>code {
font-family: 'scp', monospace, monospace; font-family: 'scp', monospace, monospace;
} }
#path, #path,
@@ -31,9 +31,8 @@ pre, code, tt {
font-size: 1em; font-size: 1em;
} }
#path { #path {
color: #aca; color: #ccc;
text-shadow: 1px 1px 0 #000; text-shadow: 1px 1px 0 #000;
font-variant: small-caps;
font-weight: normal; font-weight: normal;
display: inline-block; display: inline-block;
padding: .35em .5em .2em .5em; padding: .35em .5em .2em .5em;
@@ -45,8 +44,9 @@ pre, code, tt {
margin-left: -.7em; margin-left: -.7em;
} }
#files { #files {
border-spacing: 0;
z-index: 1; z-index: 1;
top: -.3em;
border-spacing: 0;
position: relative; position: relative;
} }
#files tbody a { #files tbody a {
@@ -73,17 +73,38 @@ a, #files tbody div a:last-child {
} }
#files thead { #files thead {
position: sticky; position: sticky;
top: 0; top: -1px;
} }
#files thead a { #files thead a {
color: #999; color: #999;
font-weight: normal; font-weight: normal;
} }
.s0:after,
.s1:after {
content: '⌄';
margin-left: -.1em;
}
.s0r:after,
.s1r:after {
content: '⌃';
margin-left: -.1em;
}
.s0:after,
.s0r:after {
color: #fb0;
}
.s1:after,
.s1r:after {
color: #d09;
}
#files thead th:after {
margin-right: -.7em;
}
#files tbody tr:hover td { #files tbody tr:hover td {
background: #1c1c1c; background: #1c1c1c;
} }
#files thead th { #files thead th {
padding: 0 .3em .3em .3em; padding: .3em;
border-bottom: 1px solid #444; border-bottom: 1px solid #444;
cursor: pointer; cursor: pointer;
} }
@@ -205,6 +226,12 @@ a, #files tbody div a:last-child {
#repl { #repl {
padding: .33em; padding: .33em;
} }
#files a.doc {
color: #666;
}
#files a.doc.bri {
color: #f5a;
}
#files tbody a.play { #files tbody a.play {
color: #e70; color: #e70;
padding: .2em; padding: .2em;
@@ -234,6 +261,8 @@ html.light #ggrid>a[tt].sel {
#files tbody tr.sel:hover td, #files tbody tr.sel:hover td,
#files tbody tr.sel:focus td, #files tbody tr.sel:focus td,
#ggrid>a.sel:hover, #ggrid>a.sel:hover,
#ggrid>a.sel:focus,
html.light #ggrid>a.sel:focus,
html.light #ggrid>a.sel:hover { html.light #ggrid>a.sel:hover {
color: #fff; color: #fff;
background: #d39; background: #d39;
@@ -289,6 +318,8 @@ html.light #ggrid>a.sel {
width: 100%; width: 100%;
z-index: 3; z-index: 3;
touch-action: none; touch-action: none;
}
#widget.anim {
transition: bottom 0.15s; transition: bottom 0.15s;
} }
#widget.open { #widget.open {
@@ -395,6 +426,9 @@ html.light #ggrid>a.sel {
opacity: .3; opacity: .3;
color: #f6c; color: #f6c;
} }
#wfm a.hide {
display: none;
}
html.light #wfm a:not(.en) { html.light #wfm a:not(.en) {
color: #c4a; color: #c4a;
} }
@@ -456,7 +490,7 @@ html.light #wfm a:not(.en) {
width: calc(100% - 10.5em); width: calc(100% - 10.5em);
background: rgba(0,0,0,0.2); background: rgba(0,0,0,0.2);
} }
@media (min-width: 80em) { @media (min-width: 70em) {
#barpos, #barpos,
#barbuf { #barbuf {
width: calc(100% - 21em); width: calc(100% - 21em);
@@ -554,6 +588,11 @@ input[type="radio"]:checked+label,
input[type="checkbox"]:checked+label { input[type="checkbox"]:checked+label {
color: #fc5; color: #fc5;
} }
.opwide div>span>input+label {
padding: .3em 0 .3em .3em;
margin: 0 0 0 -.3em;
cursor: pointer;
}
.opview input.i { .opview input.i {
width: calc(100% - 16.2em); width: calc(100% - 16.2em);
} }
@@ -643,7 +682,7 @@ input.eq_gain {
#wrap { #wrap {
margin: 1.8em 1.5em 0 1.5em; margin: 1.8em 1.5em 0 1.5em;
min-height: 70vh; min-height: 70vh;
padding-bottom: 5em; padding-bottom: 7em;
} }
#tree { #tree {
display: none; display: none;
@@ -668,6 +707,17 @@ input.eq_gain {
border-bottom: 1px solid #111; border-bottom: 1px solid #111;
overflow: hidden; overflow: hidden;
} }
#treepar {
z-index: 1;
position: fixed;
left: -.75em;
width: calc(var(--nav-sz) - 0.5em);
border-bottom: 1px solid #444;
overflow: hidden;
}
#treepar.off {
display: none;
}
#tree, #treeh { #tree, #treeh {
border-radius: 0 .3em 0 0; border-radius: 0 .3em 0 0;
} }
@@ -732,37 +782,46 @@ input.eq_gain {
#tree li:last-child { #tree li:last-child {
border-bottom: none; border-bottom: none;
} }
#treeul a.hl { #tree ul a.sel {
background: #111;
box-shadow: -.8em 0 0 #c37 inset;
color: #fff;
}
#tree ul a.hl {
color: #400; color: #400;
background: #fc4; background: #fc4;
text-shadow: none; text-shadow: none;
} }
#treeul a { #tree ul a.par {
color: #fff;
}
#tree ul a {
border-radius: .3em; border-radius: .3em;
display: inline-block; display: inline-block;
} }
#treeul a+a { .ntree a+a {
width: calc(100% - 2em); width: calc(100% - 2em);
line-height: 1em; line-height: 1em;
} }
#tree.nowrap #treeul li { #tree.nowrap li {
min-height: 1.4em; min-height: 1.4em;
white-space: nowrap; white-space: nowrap;
} }
#tree.nowrap #treeul a+a:hover { #tree.nowrap .ntree a+a:hover {
background: rgba(16, 16, 16, 0.67); background: rgba(16, 16, 16, 0.67);
min-width: calc(var(--nav-sz) - 2em); min-width: calc(var(--nav-sz) - 2em);
width: auto; width: auto;
} }
html.light #tree.nowrap #treeul a+a:hover { html.light #tree.nowrap .ntree a+a:hover {
background: rgba(255, 255, 255, 0.67); background: rgba(255, 255, 255, 0.67);
color: #000; color: #000;
} }
#treeul a+a:hover { #docul a:hover,
#tree .ntree a+a:hover {
background: #181818; background: #181818;
color: #fff; color: #fff;
} }
#treeul a:first-child { .ntree a:first-child {
font-family: 'scp', monospace, monospace; font-family: 'scp', monospace, monospace;
font-size: 1.2em; font-size: 1.2em;
line-height: 0; line-height: 0;
@@ -845,42 +904,47 @@ html.light #tree.nowrap #treeul a+a:hover {
border-bottom: 1px solid #555; border-bottom: 1px solid #555;
} }
#thumbs, #thumbs,
#au_fullpre,
#au_os_seek,
#au_osd_cv, #au_osd_cv,
#u2tdate { #u2tdate {
opacity: .3; opacity: .3;
} }
#griden.on+#thumbs, #griden.on+#thumbs,
#au_os_ctl.on+#au_osd_cv, #au_preload.on+#au_fullpre,
#au_os_ctl.on+#au_os_seek,
#au_os_ctl.on+#au_os_seek+#au_osd_cv,
#u2turbo.on+#u2tdate { #u2turbo.on+#u2tdate {
opacity: 1; opacity: 1;
} }
#wraptree.on+#hovertree { #wraptree.on+#hovertree {
display: none; display: none;
} }
#ghead { .ghead {
border-radius: .3em; border-radius: .3em;
padding: .2em .5em; padding: .2em .5em;
line-height: 2.3em; line-height: 2.3em;
margin-bottom: 1em; margin-bottom: 1.5em;
}
#ghead {
position: sticky; position: sticky;
top: -.3em; top: -.3em;
z-index: 1; z-index: 1;
} }
html.light #ghead { html.light .ghead {
background: #f7f7f7; background: #f7f7f7;
border-color: #ddd; border-color: #ddd;
} }
#ghead .btn { .ghead .btn {
position: relative; position: relative;
top: 0; top: 0;
} }
#ghead>span { .ghead>span {
white-space: pre; white-space: pre;
padding-left: .3em; padding-left: .3em;
} }
#ggrid { #ggrid {
padding-top: .5em; margin: -.2em -.5em;
margin: 0 -.5em;
} }
#ggrid>a>span { #ggrid>a>span {
overflow: hidden; overflow: hidden;
@@ -912,6 +976,12 @@ html.light #ghead {
#ggrid>a.dir:before { #ggrid>a.dir:before {
content: '📂'; content: '📂';
} }
#ggrid>a.au:before {
content: '💾';
}
html.np_open #ggrid>a.au:before {
content: '▶';
}
#ggrid>a:before { #ggrid>a:before {
display: block; display: block;
position: absolute; position: absolute;
@@ -921,6 +991,12 @@ html.light #ghead {
background: linear-gradient(135deg,rgba(255,255,255,0) 50%,rgba(255,255,255,0.2)); background: linear-gradient(135deg,rgba(255,255,255,0) 50%,rgba(255,255,255,0.2));
border-radius: .3em; border-radius: .3em;
font-size: 2em; font-size: 2em;
transition: font-size .15s, margin .15s;
}
#ggrid>a:focus:before,
#ggrid>a:hover:before {
font-size: 2.5em;
margin: -.2em;
} }
#op_unpost { #op_unpost {
padding: 1em; padding: 1em;
@@ -989,6 +1065,52 @@ html.light #rui {
padding: 0; padding: 0;
font-size: 1.5em; font-size: 1.5em;
} }
#doc {
overflow: visible;
margin: -1em 0 .5em 0;
padding: 1em 0 1em 0;
}
#docul {
position: relative;
}
#docul li.bn {
text-align: center;
padding: .5em;
}
#doc.prism {
padding-left: 3em;
}
#doc>code {
background: none;
box-shadow: none;
z-index: 1;
}
#doc.mdo {
white-space: normal;
font-family: sans-serif;
}
#doc.prism * {
line-height: 1.5em;
}
#doc .line-highlight {
border-radius: .3em;
box-shadow: 0 0 .5em #333;
background: linear-gradient(90deg, #111, #222);
}
html.light #doc .line-highlight {
box-shadow: 0 0 .5em #ccc;
background: linear-gradient(90deg, #fff, #eee);
}
#docul li {
margin: 0;
}
#tree #docul li+li a {
display: block;
}
#seldoc.sel {
color: #fff;
background: #925;
}
#pvol, #pvol,
#barbuf, #barbuf,
#barpos, #barpos,
@@ -1031,6 +1153,7 @@ a.btn,
html, html,
#doc,
#rui, #rui,
#files td, #files td,
#files thead th, #files thead th,
@@ -1043,7 +1166,7 @@ html,
.opbox, .opbox,
#path, #path,
#srch_form, #srch_form,
#ghead { .ghead {
background: #2b2b2b; background: #2b2b2b;
border: 1px solid #333; border: 1px solid #333;
box-shadow: 0 0 .3em #111; box-shadow: 0 0 .3em #111;
@@ -1052,7 +1175,8 @@ html,
background: #282828; background: #282828;
} }
#tree, #tree,
#treeh { #treeh,
#treepar {
background: #2b2b2b; background: #2b2b2b;
} }
#wtoggle, #wtoggle,
@@ -1077,6 +1201,7 @@ html,
#ggrid>a[tt] { #ggrid>a[tt] {
background: linear-gradient(135deg, #2c2c2c 95%, #444 95%); background: linear-gradient(135deg, #2c2c2c 95%, #444 95%);
} }
#ggrid>a:focus,
#ggrid>a:hover { #ggrid>a:hover {
background: #383838; background: #383838;
border-color: #555; border-color: #555;
@@ -1090,6 +1215,7 @@ html.light #ggrid>a {
html.light #ggrid>a[tt] { html.light #ggrid>a[tt] {
background: linear-gradient(135deg, #f7f7f7 95%, #ccc 95%); background: linear-gradient(135deg, #f7f7f7 95%, #ccc 95%);
} }
html.light #ggrid>a:focus,
html.light #ggrid>a:hover { html.light #ggrid>a:hover {
background: #fff; background: #fff;
border-color: #ccc; border-color: #ccc;
@@ -1109,13 +1235,17 @@ html.light {
html.light #ops, html.light #ops,
html.light .opbox, html.light .opbox,
html.light #path, html.light #path,
html.light #doc,
html.light #srch_form, html.light #srch_form,
html.light #ghead, html.light .ghead,
html.light #u2etas { html.light #u2etas {
background: #f7f7f7; background: #f7f7f7;
box-shadow: 0 0 .3em #ccc; box-shadow: 0 0 .3em #ccc;
border-color: #f7f7f7; border-color: #f7f7f7;
} }
html.light #wrap.doc {
background: #f7f7f7;
}
html.light #ops a.act { html.light #ops a.act {
box-shadow: 0 .2em .2em #ccc; box-shadow: 0 .2em .2em #ccc;
background: #fff; background: #fff;
@@ -1154,21 +1284,25 @@ html.light #acc_info {
html.light #srv_info span { html.light #srv_info span {
color: #777; color: #777;
} }
html.light #treeul a+a { html.light #tree .ntree a+a {
background: inherit; background: inherit;
color: #06a; color: #06a;
} }
html.light #treeul a.hl { html.light #tree ul a.hl {
background: #07a; background: #07a;
color: #fff; color: #fff;
} }
html.light #treeul a.hl:hover { html.light #tree ul a.par {
color: #000;
}
html.light #tree ul a.hl:hover {
background: #059; background: #059;
} }
html.light #tree li { html.light #tree li,
html.light #tree #treepar {
border-color: #f7f7f7 #fff #ddd #fff; border-color: #f7f7f7 #fff #ddd #fff;
} }
html.light #treeul a:hover { html.light #tree ul a:hover {
background: #fff; background: #fff;
} }
html.light #tree ul { html.light #tree ul {
@@ -1179,6 +1313,14 @@ html.light #ops a,
html.light #files tbody div a:last-child { html.light #files tbody div a:last-child {
color: #06a; color: #06a;
} }
html.light .s0:after,
html.light .s0r:after {
color: #059;
}
html.light .s1:after,
html.light .s1r:after {
color: #f5d;
}
html.light #files thead th { html.light #files thead th {
background: #eaeaea; background: #eaeaea;
border-color: #ccc; border-color: #ccc;
@@ -1216,6 +1358,12 @@ html.light #files tbody a.play {
html.light #files tbody a.play.act { html.light #files tbody a.play.act {
color: #90c; color: #90c;
} }
html.light #files a.doc {
color: #bbb;
}
html.light #files a.doc.bri {
color: #d38;
}
html.light #files tr.play td { html.light #files tr.play td {
background: #fc5; background: #fc5;
border-color: #eb1; border-color: #eb1;
@@ -1279,10 +1427,12 @@ html.light .opview input[type="text"] {
border-color: #38d; border-color: #38d;
} }
html.light #u2tab a>span, html.light #u2tab a>span,
html.light #docul .bn a>span,
html.light #files td div span { html.light #files td div span {
color: #000; color: #000;
} }
html.light #path { html.light #path {
color: #777;
background: #f7f7f7; background: #f7f7f7;
text-shadow: none; text-shadow: none;
box-shadow: 0 0 .3em #bbb; box-shadow: 0 0 .3em #bbb;
@@ -1300,13 +1450,15 @@ html.light #path a:hover {
html.light #files tbody div a { html.light #files tbody div a {
color: #d38; color: #d38;
} }
html.light #docul a:hover,
html.light #files a:hover, html.light #files a:hover,
html.light #files tr.sel a:hover { html.light #files tr.sel a:hover {
color: #000; color: #000;
background: #fff; background: #fff;
text-decoration: underline; text-decoration: underline;
} }
html.light #treeh { html.light #treeh,
html.light #treepar {
background: #f7f7f7; background: #f7f7f7;
border-color: #ddd; border-color: #ddd;
} }
@@ -1566,8 +1718,6 @@ html.light #bbox-overlay figcaption a {
#op_up2k { #op_up2k {
padding: 0 1em 1em 1em; padding: 0 1em 1em 1em;
min-height: 0;
transition: min-height .2s;
} }
#drops { #drops {
display: none; display: none;
@@ -1732,13 +1882,18 @@ html.light #u2err.err {
#u2notbtn * { #u2notbtn * {
line-height: 1.3em; line-height: 1.3em;
} }
#u2tabw {
min-height: 0;
transition: min-height .2s;
margin: 3em 0;
}
#u2tab { #u2tab {
border-collapse: collapse; border-collapse: collapse;
margin: 3em auto;
width: calc(100% - 2em); width: calc(100% - 2em);
max-width: 100em; max-width: 100em;
margin: 0 auto;
} }
#op_up2k.srch #u2tab { #op_up2k.srch #u2tabf {
max-width: none; max-width: none;
} }
#u2tab td { #u2tab td {
@@ -1998,16 +2153,13 @@ html.light #u2foot .warn span {
border-color: #d06; border-color: #d06;
} }
#u2tab a>span, #u2tab a>span,
#docul .bn a>span,
#unpost a>span { #unpost a>span {
font-weight: bold; font-weight: bold;
font-style: italic; font-style: italic;
color: #fff; color: #fff;
padding-left: .2em; padding-left: .2em;
} }
#u2cleanup {
float: right;
margin-bottom: -.3em;
}
.fsearch_explain { .fsearch_explain {
padding-left: .7em; padding-left: .7em;
font-size: 1.1em; font-size: 1.1em;

View File

@@ -76,6 +76,12 @@
<div id="wrap"> <div id="wrap">
{%- if doc %}
<div id="bdoc"><pre>{{ doc|e }}</pre></div>
{%- else %}
<div id="bdoc"></div>
{%- endif %}
<div id="pro" class="logue">{{ logues[0] }}</div> <div id="pro" class="logue">{{ logues[0] }}</div>
<table id="files"> <table id="files">
@@ -130,11 +136,15 @@
def_hcols = {{ def_hcols|tojson }}, def_hcols = {{ def_hcols|tojson }},
have_up2k_idx = {{ have_up2k_idx|tojson }}, have_up2k_idx = {{ have_up2k_idx|tojson }},
have_tags_idx = {{ have_tags_idx|tojson }}, have_tags_idx = {{ have_tags_idx|tojson }},
have_acode = {{ have_acode|tojson }},
have_mv = {{ have_mv|tojson }}, have_mv = {{ have_mv|tojson }},
have_del = {{ have_del|tojson }}, have_del = {{ have_del|tojson }},
have_unpost = {{ have_unpost|tojson }}, have_unpost = {{ have_unpost|tojson }},
have_zip = {{ have_zip|tojson }}, have_zip = {{ have_zip|tojson }},
readme = {{ readme|tojson }}; txt_ext = "{{ txt_ext }}",
{% if no_prism %}no_prism = 1,{% endif %}
readme = {{ readme|tojson }},
ls0 = {{ ls0|tojson }};
document.documentElement.setAttribute("class", localStorage.lightmode == 1 ? "light" : "dark"); document.documentElement.setAttribute("class", localStorage.lightmode == 1 ? "light" : "dark");
</script> </script>

File diff suppressed because it is too large Load Diff

View File

@@ -10,7 +10,7 @@
{%- endif %} {%- endif %}
</head> </head>
<body> <body>
<div id="mn">navbar</div> <div id="mn"></div>
<div id="mh"> <div id="mh">
<a id="lightswitch" href="#">go dark</a> <a id="lightswitch" href="#">go dark</a>
<a id="navtoggle" href="#">hide nav</a> <a id="navtoggle" href="#">hide nav</a>

View File

@@ -39,20 +39,14 @@ var md_plug = {};
// add navbar // add navbar
(function () { (function () {
var n = document.location + ''; var parts = get_evpath().split('/'), link = '', o;
n = n.substr(n.indexOf('//') + 2).split('?')[0].split('/'); for (var a = 0, aa = parts.length - 2; a <= aa; a++) {
n[0] = 'top'; link += parts[a] + (a < aa ? '/' : '');
var loc = []; o = mknod('a');
var nav = []; o.setAttribute('href', link);
for (var a = 0; a < n.length; a++) { o.textContent = uricom_dec(parts[a])[0] || 'top';
if (a > 0) dom_nav.appendChild(o);
loc.push(n[a]);
var dec = esc(uricom_dec(n[a])[0]);
nav.push('<a href="/' + loc.join('/') + '">' + dec + '</a>');
} }
dom_nav.innerHTML = nav.join('');
})(); })();
@@ -91,13 +85,13 @@ function copydom(src, dst, lv) {
var rpl = []; var rpl = [];
for (var a = sc.length - 1; a >= 0; a--) { for (var a = sc.length - 1; a >= 0; a--) {
var st = sc[a].tagName, var st = sc[a].tagName || sc[a].nodeType,
dt = dc[a].tagName; dt = dc[a].tagName || dc[a].nodeType;
if (st !== dt) { if (st !== dt) {
dbg("replace L%d (%d/%d) type %s/%s", lv, a, sc.length, st, dt); dbg("replace L%d (%d/%d) type %s/%s", lv, a, sc.length, st, dt);
rpl.push(a); dst.innerHTML = src.innerHTML;
continue; return;
} }
var sa = sc[a].attributes || [], var sa = sc[a].attributes || [],
@@ -146,8 +140,11 @@ function copydom(src, dst, lv) {
// repl is reversed; build top-down // repl is reversed; build top-down
var nbytes = 0; var nbytes = 0;
for (var a = rpl.length - 1; a >= 0; a--) { for (var a = rpl.length - 1; a >= 0; a--) {
var html = sc[rpl[a]].outerHTML; var i = rpl[a],
dc[rpl[a]].outerHTML = html; prop = sc[i].nodeType == 1 ? 'outerHTML' : 'nodeValue';
var html = sc[i][prop];
dc[i][prop] = html;
nbytes += html.length; nbytes += html.length;
} }
if (nbytes > 0) if (nbytes > 0)
@@ -164,10 +161,7 @@ function copydom(src, dst, lv) {
function md_plug_err(ex, js) { function md_plug_err(ex, js) {
var errbox = ebi('md_errbox'); qsr('#md_errbox');
if (errbox)
errbox.parentNode.removeChild(errbox);
if (!ex) if (!ex)
return; return;
@@ -183,7 +177,7 @@ function md_plug_err(ex, js) {
o.textContent = lns[ln - 1]; o.textContent = lns[ln - 1];
} }
} }
errbox = mknod('div'); var errbox = mknod('div');
errbox.setAttribute('id', 'md_errbox'); errbox.setAttribute('id', 'md_errbox');
errbox.style.cssText = 'position:absolute;top:0;left:0;padding:1em .5em;background:#2b2b2b;color:#fc5' errbox.style.cssText = 'position:absolute;top:0;left:0;padding:1em .5em;background:#2b2b2b;color:#fc5'
errbox.textContent = msg; errbox.textContent = msg;
@@ -259,7 +253,7 @@ function convert_markdown(md_text, dest_dom) {
Object.assign(marked_opts, ext[0]); Object.assign(marked_opts, ext[0]);
try { try {
var md_html = marked(md_text, marked_opts); var md_html = marked.parse(md_text, marked_opts);
} }
catch (ex) { catch (ex) {
if (ext) if (ext)
@@ -381,8 +375,7 @@ function convert_markdown(md_text, dest_dom) {
function init_toc() { function init_toc() {
var loader = ebi('ml'); qsr('#ml');
loader.parentNode.removeChild(loader);
var anchors = []; // list of toc entries, complex objects var anchors = []; // list of toc entries, complex objects
var anchor = null; // current toc node var anchor = null; // current toc node

View File

@@ -65,8 +65,7 @@ var mde = (function () {
mde.codemirror.on("change", function () { mde.codemirror.on("change", function () {
md_changed(mde); md_changed(mde);
}); });
var loader = ebi('ml'); qsr('#ml');
loader.parentNode.removeChild(loader);
return mde; return mde;
})(); })();

View File

@@ -3,7 +3,7 @@
<head> <head>
<meta charset="utf-8"> <meta charset="utf-8">
<title>copyparty</title> <title>{{ svcname }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.8"> <meta name="viewport" content="width=device-width, initial-scale=0.8">
<link rel="stylesheet" media="screen" href="/.cpr/msg.css?_={{ ts }}"> <link rel="stylesheet" media="screen" href="/.cpr/msg.css?_={{ ts }}">

View File

@@ -29,15 +29,19 @@ a {
border-radius: .2em; border-radius: .2em;
padding: .2em .8em; padding: .2em .8em;
} }
a+a {
margin-left: .5em;
}
.refresh, .refresh,
.logout { .logout {
float: right; float: right;
margin-top: -.2em; margin: -.2em 0 0 .5em;
} }
.logout { .logout,
.btns a,
a.r {
color: #c04; color: #c04;
border-color: #c7a; border-color: #c7a;
margin-right: .5em;
} }
#repl { #repl {
border: none; border: none;
@@ -52,6 +56,7 @@ table {
.vols th { .vols th {
padding: .3em .6em; padding: .3em .6em;
text-align: left; text-align: left;
white-space: nowrap;
} }
.num { .num {
border-right: 1px solid #bbb; border-right: 1px solid #bbb;
@@ -75,6 +80,12 @@ table {
margin-top: .3em; margin-top: .3em;
text-align: right; text-align: right;
} }
blockquote {
margin: 0 0 1.6em .6em;
padding: .7em 1em 0 1em;
border-left: .3em solid rgba(128,128,128,0.5);
border-radius: 0 0 0 .25em;
}
html.dark, html.dark,
@@ -91,7 +102,9 @@ html.dark a {
background: #057; background: #057;
border-color: #37a; border-color: #37a;
} }
html.dark .logout { html.dark .logout,
html.dark .btns a,
html.dark a.r {
background: #804; background: #804;
border-color: #c28; border-color: #c28;
} }

View File

@@ -3,7 +3,7 @@
<head> <head>
<meta charset="utf-8"> <meta charset="utf-8">
<title>copyparty</title> <title>{{ svcname }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.8"> <meta name="viewport" content="width=device-width, initial-scale=0.8">
<link rel="stylesheet" media="screen" href="/.cpr/splash.css?_={{ ts }}"> <link rel="stylesheet" media="screen" href="/.cpr/splash.css?_={{ ts }}">
@@ -49,7 +49,8 @@
</table> </table>
</td></tr></table> </td></tr></table>
<div class="btns"> <div class="btns">
<a href="/?stack">dump stack</a> <a href="/?stack" tt="shows the state of all active threads">dump stack</a>
<a href="/?reload=cfg" tt="reload config files (accounts/volumes/volflags),$Nand rescan all e2ds volumes">reload cfg</a>
</div> </div>
{%- endif %} {%- endif %}
@@ -71,6 +72,18 @@
</ul> </ul>
{%- endif %} {%- endif %}
<h1 id="cc">client config:</h1>
<ul>
{% if k304 %}
<li><a href="/?k304=n">disable k304</a> (currently enabled)
{%- else %}
<li><a href="/?k304=y" class="r">enable k304</a> (currently disabled)
{% endif %}
<blockquote>enabling this will disconnect your client on every HTTP 304, which can prevent some buggy browsers/proxies from getting stuck (suddenly not being able to load pages), <em>but</em> it will also make things slower in general</blockquote></li>
<li><a href="/?reset" class="r" onclick="localStorage.clear();return true">reset client settings</a></li>
</ul>
<h1>login for more:</h1> <h1>login for more:</h1>
<ul> <ul>
<form method="post" enctype="multipart/form-data" action="/{{ qvpath }}"> <form method="post" enctype="multipart/form-data" action="/{{ qvpath }}">
@@ -83,10 +96,10 @@
<a href="#" id="repl">π</a> <a href="#" id="repl">π</a>
<script> <script>
if (localStorage.lightmode != 1) document.documentElement.setAttribute("class", localStorage.lightmode == 1 ? "light" : "dark");
document.documentElement.setAttribute("class", "dark");
</script> </script>
<script src="/.cpr/util.js?_={{ ts }}"></script> <script src="/.cpr/util.js?_={{ ts }}"></script>
<script>tt.init();</script>
</body> </body>
</html> </html>

View File

@@ -116,6 +116,20 @@ html {
#toast.err #toastc { #toast.err #toastc {
background: #d06; background: #d06;
} }
#tth {
color: #fff;
background: #111;
font-size: .9em;
padding: 0 .26em;
line-height: .97em;
border-radius: 1em;
position: absolute;
display: none;
}
#tth.act {
display: block;
z-index: 9001;
}
#tt.b { #tt.b {
padding: 0 2em; padding: 0 2em;
border-radius: .5em; border-radius: .5em;
@@ -133,7 +147,8 @@ html {
} }
#modalc code, #modalc code,
#tt code { #tt code {
background: #3c3c3c; color: #eee;
background: #444;
padding: .1em .3em; padding: .1em .3em;
border-top: 1px solid #777; border-top: 1px solid #777;
border-radius: .3em; border-radius: .3em;
@@ -158,6 +173,10 @@ html.light #tt code {
html.light #tt em { html.light #tt em {
color: #d38; color: #d38;
} }
html.light #tth {
color: #000;
background: #fff;
}
#modal { #modal {
position: fixed; position: fixed;
overflow: auto; overflow: auto;
@@ -335,6 +354,13 @@ html.light textarea:focus {
} }
.mdo ul, .mdo ul,
.mdo ol { .mdo ol {
padding-left: 1em;
}
.mdo ul ul,
.mdo ul ol,
.mdo ol ul,
.mdo ol ol {
padding-left: 2em;
border-left: .3em solid #ddd; border-left: .3em solid #ddd;
} }
.mdo ul>li, .mdo ul>li,

View File

@@ -332,8 +332,7 @@ function U2pvis(act, btns) {
r.head++; r.head++;
if (!bz_act) { if (!bz_act) {
var tr = ebi("f" + nfile); qsr("#f" + nfile);
tr.parentNode.removeChild(tr);
} }
} }
else return; else return;
@@ -352,9 +351,7 @@ function U2pvis(act, btns) {
last = parseInt(last.getAttribute('id').slice(1)); last = parseInt(last.getAttribute('id').slice(1));
while (r.head - first > r.wsz) { while (r.head - first > r.wsz) {
var obj = ebi('f' + (first++)); qsr('#f' + (first++));
if (obj)
obj.parentNode.removeChild(obj);
} }
while (last - r.tail < r.wsz && last < r.tab.length - 2) { while (last - r.tail < r.wsz && last < r.tab.length - 2) {
var obj = ebi('f' + (++last)); var obj = ebi('f' + (++last));
@@ -528,13 +525,15 @@ function Donut(uc, st) {
} }
r.on = function (ya) { r.on = function (ya) {
r.fc = 99; r.fc = r.tc = 99;
r.eta = null; r.eta = null;
r.base = pos(); r.base = pos();
optab.innerHTML = ya ? svg() : optab.getAttribute('ico'); optab.innerHTML = ya ? svg() : optab.getAttribute('ico');
el = QS('#ops a .donut'); el = QS('#ops a .donut');
if (!ya) if (!ya) {
favico.upd(); favico.upd();
wintitle();
}
}; };
r.do = function () { r.do = function () {
if (!el) if (!el)
@@ -544,6 +543,11 @@ function Donut(uc, st) {
v = pos() - r.base, v = pos() - r.base,
ofs = el.style.strokeDashoffset = o - o * v / t; ofs = el.style.strokeDashoffset = o - o * v / t;
if (++r.tc >= 10) {
wintitle(f2f(v * 100 / t, 1) + '%, ' + r.eta + 's, ', true);
r.tc = 0;
}
if (favico.txt) { if (favico.txt) {
if (++r.fc < 10 && r.eta && r.eta > 99) if (++r.fc < 10 && r.eta && r.eta > 99)
return; return;
@@ -565,9 +569,9 @@ function fsearch_explain(n) {
return toast.inf(60, 'your access to this folder is Read-Only\n\n' + (acct == '*' ? 'you are currently not logged in' : 'you are currently logged in as "' + acct + '"')); return toast.inf(60, 'your access to this folder is Read-Only\n\n' + (acct == '*' ? 'you are currently not logged in' : 'you are currently logged in as "' + acct + '"'));
if (bcfg_get('fsearch', false)) if (bcfg_get('fsearch', false))
return toast.inf(60, 'you are currently in file-search mode\n\nswitch to upload-mode by clicking the green magnifying glass (next to the big yellow search button), and then refresh\n\nsorry'); return toast.inf(60, 'you are currently in file-search mode\n\nswitch to upload-mode by clicking the green magnifying glass (next to the big yellow search button), and try uploading again\n\nsorry');
return toast.inf(60, 'refresh the page and try again, it should work now'); return toast.inf(60, 'try again, it should work now');
} }
@@ -672,6 +676,7 @@ function up2k_init(subtle) {
var st = { var st = {
"files": [], "files": [],
"seen": {},
"todo": { "todo": {
"head": [], "head": [],
"hash": [], "hash": [],
@@ -730,7 +735,6 @@ function up2k_init(subtle) {
if (++nenters <= 0) if (++nenters <= 0)
nenters = 1; nenters = 1;
//console.log(nenters, Date.now(), 'enter', this, e.target);
if (onover.bind(this)(e)) if (onover.bind(this)(e))
return true; return true;
@@ -752,12 +756,19 @@ function up2k_init(subtle) {
ebi('up_dz').setAttribute('err', mup || ''); ebi('up_dz').setAttribute('err', mup || '');
ebi('srch_dz').setAttribute('err', msr || ''); ebi('srch_dz').setAttribute('err', msr || '');
} }
function onoverb(e) {
// zones are alive; disable cuo2duo branch
document.body.ondragover = document.body.ondrop = null;
return onover.bind(this)(e);
}
function onover(e) { function onover(e) {
try { try {
var ok = false, dt = e.dataTransfer.types; var ok = false, dt = e.dataTransfer.types;
for (var a = 0; a < dt.length; a++) for (var a = 0; a < dt.length; a++)
if (dt[a] == 'Files') if (dt[a] == 'Files')
ok = true; ok = true;
else if (dt[a] == 'text/uri-list')
return true;
if (!ok) if (!ok)
return true; return true;
@@ -783,17 +794,20 @@ function up2k_init(subtle) {
clmod(ebi('drops'), 'vis'); clmod(ebi('drops'), 'vis');
clmod(ebi('up_dz'), 'hl'); clmod(ebi('up_dz'), 'hl');
clmod(ebi('srch_dz'), 'hl'); clmod(ebi('srch_dz'), 'hl');
// cuo2duo:
document.body.ondragover = onover;
document.body.ondrop = gotfile;
} }
//console.log(nenters, Date.now(), 'leave', this, e && e.target);
} }
document.body.ondragenter = ondrag; document.body.ondragenter = ondrag;
document.body.ondragleave = offdrag; document.body.ondragleave = offdrag;
document.body.ondragover = onover;
document.body.ondrop = gotfile;
var drops = [ebi('up_dz'), ebi('srch_dz')]; var drops = [ebi('up_dz'), ebi('srch_dz')];
for (var a = 0; a < 2; a++) { for (var a = 0; a < 2; a++) {
drops[a].ondragenter = ondrag; drops[a].ondragenter = ondrag;
drops[a].ondragover = onover; drops[a].ondragover = onoverb;
drops[a].ondragleave = offdrag; drops[a].ondragleave = offdrag;
drops[a].ondrop = gotfile; drops[a].ondrop = gotfile;
} }
@@ -803,7 +817,10 @@ function up2k_init(subtle) {
ev(e); ev(e);
nenters = 0; nenters = 0;
offdrag.bind(this)(); offdrag.bind(this)();
var dz = (this && this.getAttribute('id')); var dz = this && this.getAttribute('id');
if (!dz && e && e.clientY)
// cuo2duo fallback
dz = e.clientY < window.innerHeight / 2 ? 'up_dz' : 'srch_dz';
var err = this.getAttribute('err'); var err = this.getAttribute('err');
if (err) if (err)
@@ -997,13 +1014,9 @@ function up2k_init(subtle) {
} }
function up_them(good_files) { function up_them(good_files) {
var seen = {}, var evpath = get_evpath(),
evpath = get_evpath(),
draw_each = good_files.length < 50; draw_each = good_files.length < 50;
for (var a = 0; a < st.files.length; a++)
seen[st.files[a].name + '\n' + st.files[a].size] = 1;
for (var a = 0; a < good_files.length; a++) { for (var a = 0; a < good_files.length; a++) {
var fobj = good_files[a][0], var fobj = good_files[a][0],
name = good_files[a][1], name = good_files[a][1],
@@ -1029,15 +1042,20 @@ function up2k_init(subtle) {
"bytes_uploaded": 0, "bytes_uploaded": 0,
"hash": [] "hash": []
}, },
key = entry.name + '\n' + entry.size; key = name + '\n' + entry.size + '\n' + lmod + '\n' + uc.fsearch;
if (uc.fsearch) if (uc.fsearch)
entry.srch = 1; entry.srch = 1;
if (seen[key]) try {
if (st.seen[fdir][key])
continue; continue;
}
catch (ex) {
st.seen[fdir] = {};
}
seen[key] = 1; st.seen[fdir][key] = 1;
pvis.addfile([ pvis.addfile([
uc.fsearch ? esc(entry.name) : linksplit( uc.fsearch ? esc(entry.name) : linksplit(
@@ -1070,23 +1088,7 @@ function up2k_init(subtle) {
} }
more_one_file(); more_one_file();
function u2cleanup(e) { var etaref = 0, etaskip = 0, utw_minh = 0;
ev(e);
for (var a = 0; a < st.files.length; a++) {
var t = st.files[a];
if (t.done && t.name) {
var tr = ebi('f' + t.n);
if (!tr)
continue;
tr.parentNode.removeChild(tr);
t.name = undefined;
}
}
}
ebi('u2cleanup').onclick = u2cleanup;
var etaref = 0, etaskip = 0, op_minh = 0;
function etafun() { function etafun() {
var nhash = st.busy.head.length + st.busy.hash.length + st.todo.head.length + st.todo.hash.length, var nhash = st.busy.head.length + st.busy.hash.length + st.todo.head.length + st.todo.hash.length,
nsend = st.busy.upload.length + st.todo.upload.length, nsend = st.busy.upload.length + st.todo.upload.length,
@@ -1099,13 +1101,10 @@ function up2k_init(subtle) {
//ebi('acc_info').innerHTML = humantime(st.time.busy) + ' ' + f2f(now / 1000, 1); //ebi('acc_info').innerHTML = humantime(st.time.busy) + ' ' + f2f(now / 1000, 1);
var op = ebi('op_up2k'), var minh = QS('#op_up2k.act') && st.is_busy ? Math.max(utw_minh, ebi('u2tab').offsetHeight + 32) : 0;
uff = ebi('u2footfoot'), if (utw_minh < minh || !utw_minh) {
minh = QS('#op_up2k.act') ? Math.max(op_minh, uff.offsetTop + uff.offsetHeight - op.offsetTop + 32) : 0; utw_minh = minh;
ebi('u2tabw').style.minHeight = utw_minh + 'px';
if (minh > op_minh || !op_minh) {
op_minh = minh;
op.style.minHeight = op_minh + 'px';
} }
if (!nhash) if (!nhash)
@@ -1228,15 +1227,16 @@ function up2k_init(subtle) {
running = true; running = true;
while (true) { while (true) {
var now = Date.now(), var now = Date.now(),
is_busy = 0 != oldest_active = Math.min( // gzip take the wheel
st.todo.head.length + st.todo.head.length ? st.todo.head[0].n : st.files.length,
st.todo.hash.length + st.todo.hash.length ? st.todo.hash[0].n : st.files.length,
st.todo.handshake.length + st.todo.upload.length ? st.todo.upload[0].nfile : st.files.length,
st.todo.upload.length + st.todo.handshake.length ? st.todo.handshake[0].n : st.files.length,
st.busy.head.length + st.busy.head.length ? st.busy.head[0].n : st.files.length,
st.busy.hash.length + st.busy.hash.length ? st.busy.hash[0].n : st.files.length,
st.busy.handshake.length + st.busy.upload.length ? st.busy.upload[0].nfile : st.files.length,
st.busy.upload.length; st.busy.handshake.length ? st.busy.handshake[0].n : st.files.length),
is_busy = oldest_active < st.files.length;
if (was_busy && !is_busy) { if (was_busy && !is_busy) {
for (var a = 0; a < st.files.length; a++) { for (var a = 0; a < st.files.length; a++) {
@@ -1256,7 +1256,7 @@ function up2k_init(subtle) {
} }
if (was_busy != is_busy) { if (was_busy != is_busy) {
was_busy = is_busy; st.is_busy = was_busy = is_busy;
window[(is_busy ? "add" : "remove") + window[(is_busy ? "add" : "remove") +
"EventListener"]("beforeunload", warn_uploader_busy); "EventListener"]("beforeunload", warn_uploader_busy);
@@ -1285,7 +1285,7 @@ function up2k_init(subtle) {
timer.rm(etafun); timer.rm(etafun);
timer.rm(donut.do); timer.rm(donut.do);
op_minh = 0; utw_minh = 0;
} }
else { else {
timer.add(donut.do); timer.add(donut.do);
@@ -1337,7 +1337,8 @@ function up2k_init(subtle) {
} }
if (st.todo.head.length && if (st.todo.head.length &&
st.busy.head.length < parallel_uploads) { st.busy.head.length < parallel_uploads &&
(!is_busy || st.todo.head[0].n - oldest_active < parallel_uploads * 2)) {
exec_head(); exec_head();
mou_ikkai = true; mou_ikkai = true;
} }
@@ -1484,7 +1485,8 @@ function up2k_init(subtle) {
err.indexOf('NotFoundError') !== -1 // macos-firefox permissions err.indexOf('NotFoundError') !== -1 // macos-firefox permissions
) { ) {
pvis.seth(t.n, 1, 'OS-error'); pvis.seth(t.n, 1, 'OS-error');
pvis.seth(t.n, 2, err); pvis.seth(t.n, 2, err + ' @ ' + car);
console.log('OS-error', reader.error, '@', car);
handled = true; handled = true;
} }
@@ -1860,7 +1862,8 @@ function up2k_init(subtle) {
st.bytes.uploaded += cdr - car; st.bytes.uploaded += cdr - car;
t.bytes_uploaded += cdr - car; t.bytes_uploaded += cdr - car;
} }
else if (txt.indexOf('already got that') !== -1) { else if (txt.indexOf('already got that') + 1 ||
txt.indexOf('already being written') + 1) {
console.log("ignoring dupe-segment error", t); console.log("ignoring dupe-segment error", t);
} }
else { else {
@@ -1868,6 +1871,9 @@ function up2k_init(subtle) {
xhr.status, t.name) + (txt || "no further information")); xhr.status, t.name) + (txt || "no further information"));
return; return;
} }
orz2(xhr);
}
function orz2(xhr) {
apop(st.busy.upload, upt); apop(st.busy.upload, upt);
apop(t.postlist, npart); apop(t.postlist, npart);
if (!t.postlist.length) { if (!t.postlist.length) {
@@ -1889,9 +1895,11 @@ function up2k_init(subtle) {
if (crashed) if (crashed)
return; return;
toast.err(9.98, "failed to upload a chunk,\n" + tries + " retries so far -- retrying in 10sec\n\n" + t.name); if (!toast.visible)
toast.warn(9.98, "failed to upload a chunk;\nprobably harmless, continuing\n\n" + t.name);
console.log('chunkpit onerror,', ++tries, t); console.log('chunkpit onerror,', ++tries, t);
setTimeout(do_send, 10 * 1000); orz2(xhr);
}; };
xhr.open('POST', t.purl, true); xhr.open('POST', t.purl, true);
xhr.setRequestHeader("X-Up2k-Hash", t.hash[npart]); xhr.setRequestHeader("X-Up2k-Hash", t.hash[npart]);
@@ -1916,8 +1924,8 @@ function up2k_init(subtle) {
wpx = window.innerWidth, wpx = window.innerWidth,
fpx = parseInt(getComputedStyle(bar)['font-size']), fpx = parseInt(getComputedStyle(bar)['font-size']),
wem = wpx * 1.0 / fpx, wem = wpx * 1.0 / fpx,
wide = wem > 54 ? 'w' : '',
write = has(perms, 'write'), write = has(perms, 'write'),
wide = write && wem > 54 ? 'w' : '',
parent = ebi(wide && write ? 'u2btn_cw' : 'u2btn_ct'), parent = ebi(wide && write ? 'u2btn_cw' : 'u2btn_ct'),
btn = ebi('u2btn'); btn = ebi('u2btn');
@@ -1929,7 +1937,7 @@ function up2k_init(subtle) {
ebi('u2etaw').setAttribute('class', wide); ebi('u2etaw').setAttribute('class', wide);
} }
wide = wem > 78 ? 'ww' : wide; wide = write && wem > 78 ? 'ww' : wide;
parent = ebi(wide == 'ww' && write ? 'u2c3w' : 'u2c3t'); parent = ebi(wide == 'ww' && write ? 'u2c3w' : 'u2c3t');
var its = [ebi('u2etaw'), ebi('u2cards')]; var its = [ebi('u2etaw'), ebi('u2cards')];
if (its[0].parentNode !== parent) { if (its[0].parentNode !== parent) {
@@ -2031,7 +2039,7 @@ function up2k_init(subtle) {
new_state = true; new_state = true;
fixed = true; fixed = true;
} }
if (!has(perms, 'read')) { if (!has(perms, 'read') || !have_up2k_idx) {
new_state = false; new_state = false;
fixed = true; fixed = true;
} }
@@ -2106,7 +2114,7 @@ function up2k_init(subtle) {
if (parallel_uploads < 1) if (parallel_uploads < 1)
bumpthread(1); bumpthread(1);
return { "init_deps": init_deps, "set_fsearch": set_fsearch, "ui": pvis } return { "init_deps": init_deps, "set_fsearch": set_fsearch, "ui": pvis, "st": st, "uc": uc }
} }

View File

@@ -7,8 +7,7 @@ if (!window['console'])
var is_touch = 'ontouchstart' in window, var is_touch = 'ontouchstart' in window,
IPHONE = /iPhone|iPad|iPod/i.test(navigator.userAgent), IPHONE = is_touch && /iPhone|iPad|iPod/i.test(navigator.userAgent),
ANDROID = /android/i.test(navigator.userAgent),
WINDOWS = navigator.platform ? navigator.platform == 'Win32' : /Windows/.test(navigator.userAgent); WINDOWS = navigator.platform ? navigator.platform == 'Win32' : /Windows/.test(navigator.userAgent);
@@ -18,6 +17,15 @@ var ebi = document.getElementById.bind(document),
mknod = document.createElement.bind(document); mknod = document.createElement.bind(document);
function qsr(sel) {
var el = QS(sel);
if (el)
el.parentNode.removeChild(el);
return el;
}
// error handler for mobile devices // error handler for mobile devices
function esc(txt) { function esc(txt) {
return txt.replace(/[&"<>]/g, function (c) { return txt.replace(/[&"<>]/g, function (c) {
@@ -78,6 +86,9 @@ function vis_exh(msg, url, lineNo, columnNo, error) {
if ((msg + '').indexOf('ResizeObserver') !== -1) if ((msg + '').indexOf('ResizeObserver') !== -1)
return; // chrome issue 809574 (benign, from <video>) return; // chrome issue 809574 (benign, from <video>)
if ((msg + '').indexOf('l2d.js') !== -1)
return; // `t` undefined in tapEvent -> hitTestSimpleCustom
var ekey = url + '\n' + lineNo + '\n' + msg; var ekey = url + '\n' + lineNo + '\n' + msg;
if (ignexd[ekey] || crashed) if (ignexd[ekey] || crashed)
return; return;
@@ -163,7 +174,6 @@ function vis_exh(msg, url, lineNo, columnNo, error) {
catch (e) { catch (e) {
document.body.innerHTML = html.join('\n'); document.body.innerHTML = html.join('\n');
} }
throw 'fatal_err';
} }
function ignex(all) { function ignex(all) {
var o = ebi('exbox'); var o = ebi('exbox');
@@ -173,6 +183,7 @@ function ignex(all) {
if (!all) if (!all)
window.onerror = vis_exh; window.onerror = vis_exh;
} }
window.onerror = vis_exh;
function noop() { } function noop() { }
@@ -211,15 +222,15 @@ if (!String.prototype.endsWith)
return this.substring(this_len - search.length, this_len) === search; return this.substring(this_len - search.length, this_len) === search;
}; };
if (!String.startsWith) if (!String.prototype.startsWith)
String.prototype.startsWith = function (s, i) { String.prototype.startsWith = function (s, i) {
i = i > 0 ? i | 0 : 0; i = i > 0 ? i | 0 : 0;
return this.substring(i, i + s.length) === s; return this.substring(i, i + s.length) === s;
}; };
if (!String.trimEnd) if (!String.prototype.trimEnd)
String.prototype.trimEnd = String.prototype.trimRight = function () { String.prototype.trimEnd = String.prototype.trimRight = function () {
return this.replace(/[ \t\r\n]+$/m, ''); return this.replace(/[ \t\r\n]+$/, '');
}; };
if (!Element.prototype.matches) if (!Element.prototype.matches)
@@ -278,15 +289,19 @@ function crc32(str) {
function clmod(el, cls, add) { function clmod(el, cls, add) {
if (!el)
return false;
if (el.classList) { if (el.classList) {
var have = el.classList.contains(cls); var have = el.classList.contains(cls);
if (add == 't') if (add == 't')
add = !have; add = !have;
if (add != have) if (!add == !have)
el.classList[add ? 'add' : 'remove'](cls); return false;
return; el.classList[add ? 'add' : 'remove'](cls);
return true;
} }
var re = new RegExp('\\s*\\b' + cls + '\\s*\\b', 'g'), var re = new RegExp('\\s*\\b' + cls + '\\s*\\b', 'g'),
@@ -297,12 +312,18 @@ function clmod(el, cls, add) {
var n2 = n1.replace(re, ' ') + (add ? ' ' + cls : ''); var n2 = n1.replace(re, ' ') + (add ? ' ' + cls : '');
if (n1 != n2) if (!n1 == !n2)
return false;
el.className = n2; el.className = n2;
return true;
} }
function clgot(el, cls) { function clgot(el, cls) {
if (!el)
return;
if (el.classList) if (el.classList)
return el.classList.contains(cls); return el.classList.contains(cls);
@@ -311,14 +332,45 @@ function clgot(el, cls) {
} }
function showsort(tab) {
var v, vn, v1, v2, th = tab.tHead,
sopts = jread('fsort', [["href", 1, ""]]);
th && (th = th.rows[0]) && (th = th.cells);
for (var a = sopts.length - 1; a >= 0; a--) {
if (!sopts[a][0])
continue;
v2 = v1;
v1 = sopts[a];
}
v = [v1, v2];
vn = [v1 ? v1[0] : '', v2 ? v2[0] : ''];
var ga = QSA('#ghead a[s]');
for (var a = 0; a < ga.length; a++)
ga[a].className = '';
for (var a = 0; a < th.length; a++) {
var n = vn.indexOf(th[a].getAttribute('name')),
cl = n < 0 ? ' ' : ' s' + n + (v[n][1] > 0 ? ' ' : 'r ');
th[a].className = th[a].className.replace(/ *s[01]r? */, ' ') + cl;
if (n + 1) {
ga = QS('#ghead a[s="' + vn[n] + '"]');
if (ga)
ga.className = cl;
}
}
}
function sortTable(table, col, cb) { function sortTable(table, col, cb) {
var tb = table.tBodies[0], var tb = table.tBodies[0],
th = table.tHead.rows[0].cells, th = table.tHead.rows[0].cells,
tr = Array.prototype.slice.call(tb.rows, 0), tr = Array.prototype.slice.call(tb.rows, 0),
i, reverse = th[col].className.indexOf('sort1') !== -1 ? -1 : 1; i, reverse = /s0[^r]/.exec(th[col].className + ' ') ? -1 : 1;
for (var a = 0, thl = th.length; a < thl; a++)
th[a].className = th[a].className.replace(/ *sort-?1 */, " ");
th[col].className += ' sort' + reverse;
var stype = th[col].getAttribute('sort'); var stype = th[col].getAttribute('sort');
try { try {
var nrules = [], rules = jread("fsort", []); var nrules = [], rules = jread("fsort", []);
@@ -336,6 +388,7 @@ function sortTable(table, col, cb) {
break; break;
} }
jwrite("fsort", nrules); jwrite("fsort", nrules);
try { showsort(table); } catch (ex) { }
} }
catch (ex) { catch (ex) {
console.log("failed to persist sort rules, resetting: " + ex); console.log("failed to persist sort rules, resetting: " + ex);
@@ -384,7 +437,7 @@ function makeSortable(table, cb) {
} }
function linksplit(rp) { function linksplit(rp, id) {
var ret = [], var ret = [],
apath = '/', apath = '/',
q = null; q = null;
@@ -414,9 +467,14 @@ function linksplit(rp) {
vlink = vlink.slice(0, -1) + '<span>/</span>'; vlink = vlink.slice(0, -1) + '<span>/</span>';
} }
if (!rp && q) if (!rp) {
if (q)
link += q; link += q;
if (id)
link += '" id="' + id;
}
ret.push('<a href="' + apath + link + '">' + vlink + '</a>'); ret.push('<a href="' + apath + link + '">' + vlink + '</a>');
apath += link; apath += link;
} }
@@ -727,7 +785,7 @@ function hist_replace(url) {
function sethash(hv) { function sethash(hv) {
if (window.history && history.replaceState) { if (window.history && history.replaceState) {
hist_replace(document.location.pathname + '#' + hv); hist_replace(document.location.pathname + document.location.search + '#' + hv);
} }
else { else {
document.location.hash = hv; document.location.hash = hv;
@@ -774,13 +832,18 @@ var timer = (function () {
var tt = (function () { var tt = (function () {
var r = { var r = {
"tt": mknod("div"), "tt": mknod("div"),
"th": mknod("div"),
"en": true, "en": true,
"el": null, "el": null,
"skip": false "skip": false,
"lvis": 0
}; };
r.th.innerHTML = '?';
r.tt.setAttribute('id', 'tt'); r.tt.setAttribute('id', 'tt');
r.th.setAttribute('id', 'tth');
document.body.appendChild(r.tt); document.body.appendChild(r.tt);
document.body.appendChild(r.th);
var prev = null; var prev = null;
r.cshow = function () { r.cshow = function () {
@@ -790,11 +853,25 @@ var tt = (function () {
prev = this; prev = this;
}; };
r.show = function () { var tev;
if (r.skip) { r.dshow = function (e) {
r.skip = false; clearTimeout(tev);
if (!r.getmsg(this))
return; return;
}
if (Date.now() - r.lvis < 400)
return r.show.bind(this)();
tev = setTimeout(r.show.bind(this), 800);
if (is_touch)
return;
this.addEventListener('mousemove', r.move);
clmod(r.th, 'act', 1);
r.move(e);
};
r.getmsg = function (el) {
if (QS('body.bbox-open')) if (QS('body.bbox-open'))
return; return;
@@ -802,7 +879,16 @@ var tt = (function () {
if (cfg !== null && cfg != '1') if (cfg !== null && cfg != '1')
return; return;
var msg = this.getAttribute('tt'); return el.getAttribute('tt');
};
r.show = function () {
clearTimeout(tev);
if (r.skip) {
r.skip = false;
return;
}
var msg = r.getmsg(this);
if (!msg) if (!msg)
return; return;
@@ -816,6 +902,7 @@ var tt = (function () {
if (dir.indexOf('u') + 1) top = false; if (dir.indexOf('u') + 1) top = false;
if (dir.indexOf('d') + 1) top = true; if (dir.indexOf('d') + 1) top = true;
clmod(r.th, 'act');
clmod(r.tt, 'b', big); clmod(r.tt, 'b', big);
r.tt.style.left = '0'; r.tt.style.left = '0';
r.tt.style.top = '0'; r.tt.style.top = '0';
@@ -841,14 +928,27 @@ var tt = (function () {
r.hide = function (e) { r.hide = function (e) {
ev(e); ev(e);
clearTimeout(tev);
window.removeEventListener('scroll', r.hide); window.removeEventListener('scroll', r.hide);
clmod(r.tt, 'show');
clmod(r.tt, 'b'); clmod(r.tt, 'b');
clmod(r.th, 'act');
if (clmod(r.tt, 'show'))
r.lvis = Date.now();
if (r.el) if (r.el)
r.el.removeEventListener('mouseleave', r.hide); r.el.removeEventListener('mouseleave', r.hide);
if (e && e.target)
e.target.removeEventListener('mousemove', r.move);
}; };
if (is_touch && IPHONE) { r.move = function (e) {
r.th.style.left = (e.pageX + 12) + 'px';
r.th.style.top = (e.pageY + 12) + 'px';
};
if (IPHONE) {
var f1 = r.show, var f1 = r.show,
f2 = r.hide, f2 = r.hide,
q = []; q = [];
@@ -874,14 +974,14 @@ var tt = (function () {
r.att = function (ctr) { r.att = function (ctr) {
var _cshow = r.en ? r.cshow : null, var _cshow = r.en ? r.cshow : null,
_show = r.en ? r.show : null, _dshow = r.en ? r.dshow : null,
_hide = r.en ? r.hide : null, _hide = r.en ? r.hide : null,
o = ctr.querySelectorAll('*[tt]'); o = ctr.querySelectorAll('*[tt]');
for (var a = o.length - 1; a >= 0; a--) { for (var a = o.length - 1; a >= 0; a--) {
o[a].onfocus = _cshow; o[a].onfocus = _cshow;
o[a].onblur = _hide; o[a].onblur = _hide;
o[a].onmouseenter = _show; o[a].onmouseenter = _dshow;
o[a].onmouseleave = _hide; o[a].onmouseleave = _hide;
} }
r.hide(); r.hide();

View File

@@ -2,24 +2,12 @@
# example resource files # utilities
can be provided to copyparty to tweak things ## [`multisearch.html`](multisearch.html)
* takes a list of filenames of youtube rips, grabs the youtube-id of each file, and does a search on the server for those
* use it by putting it somewhere on the server and opening it as an html page
* also serves as an extendable template for other specific search behaviors
## example `.epilogue.html`
save one of these as `.epilogue.html` inside a folder to customize it:
* [`minimal-up2k.html`](minimal-up2k.html) will [simplify the upload ui](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png)
## example browser-css
point `--css-browser` to one of these by URL:
* [`browser.css`](browser.css) changes the background
* [`browser-icons.css`](browser-icons.css) adds filetype icons
@@ -29,7 +17,7 @@ point `--css-browser` to one of these by URL:
* notes on using rclone as a fuse client/server * notes on using rclone as a fuse client/server
## [`example.conf`](example.conf) ## [`example.conf`](example.conf)
* example config file for `-c` (supports accounts, volumes, and volume-flags) * example config file for `-c`

View File

@@ -1,3 +1,10 @@
# append some arguments to the commandline;
# the first space in a line counts as a separator,
# any additional spaces are part of the value
-e2dsa
-e2ts
-i 127.0.0.1
# create users: # create users:
# u username:password # u username:password
u ed:123 u ed:123
@@ -24,7 +31,8 @@ rw ed
r k r k
rw ed rw ed
# this does the same thing: # this does the same thing,
# and will cause an error on startup since /priv is already taken:
./priv ./priv
/priv /priv
r ed k r ed k

124
docs/multisearch.html Normal file
View File

@@ -0,0 +1,124 @@
<!DOCTYPE html><html lang="en"><head>
<meta charset="utf-8">
<title>multisearch</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<style>
html, body {
margin: 0;
padding: 0;
color: #ddd;
background: #222;
font-family: sans-serif;
}
body {
padding: 1em;
}
a {
color: #fc5;
}
ul {
line-height: 1.5em;
}
code {
color: #fc5;
border: 1px solid #444;
padding: .1em .2em;
font-family: sans-serif, sans-serif;
}
#src {
display: block;
width: calc(100% - 1em);
padding: .5em;
margin: 0;
}
td {
padding-left: 1em;
}
.hit,
.miss {
font-weight: bold;
padding-left: 0;
padding-top: 1em;
}
.hit {color: #af0;}
.miss {color: #f0c;}
.hit:before {content: '✅';}
.miss:before {content: '❌';}
</style></head><body>
<ul>
<li>paste a list of filenames (youtube rips) below and hit search</li>
<li>it will grab the youtube-id from the filenames and search for each id</li>
<li>filenames must be like <code>-YTID.webm</code> (youtube-dl style) or <code>[YTID].webm</code> (ytdlp style)</li>
</ul>
<textarea id="src"></textarea>
<button id="go">search</button>
<div id="res"></div>
<script>
var ebi = document.getElementById.bind(document);
function esc(txt) {
return txt.replace(/[&"<>]/g, function (c) {
return {
'&': '&amp;',
'"': '&quot;',
'<': '&lt;',
'>': '&gt;'
}[c];
});
}
ebi('go').onclick = async function() {
var queries = [];
for (var ln of ebi('src').value.split(/\n/g)) {
// filter the list of input files,
// only keeping youtube videos,
// meaning the filename ends with either
// [YOUTUBEID].EXTENSION or
// -YOUTUBEID.EXTENSION
var m = /[[-]([0-9a-zA-Z_-]{11})\]?\.(mp4|webm|mkv)$/.exec(ln);
if (!m || !(m = m[1]))
continue;
// create a search query for each line: name like *youtubeid*
queries.push([ln, `name like *${m}*`]);
}
var a = 0, html = ['<table>'], hits = [], misses = [];
for (var [fn, q] of queries) {
var r = await fetch('/?srch', {
method: 'POST',
body: JSON.stringify({'q': q})
});
r = await r.json();
var cl, tab2;
if (r.hits.length) {
tab2 = hits;
cl = 'hit';
}
else {
tab2 = misses;
cl = 'miss';
}
var h = `<tr><td class="${cl}" colspan="9">${esc(fn)}</td></tr>`;
tab2.push(h);
html.push(h);
for (var h of r.hits) {
var link = `<a href="/${h.rp}">${esc(decodeURIComponent(h.rp))}</a>`;
html.push(`<tr><td>${h.sz}</td><td>${link}</td></tr>`);
}
ebi('res').innerHTML = `searching, ${++a} / ${queries.length} done, ${hits.length} hits, ${misses.length} miss`;
}
html.push('<tr><td><h1>hits:</h1></td></tr>');
html = html.concat(hits);
html.push('<tr><td><h1>miss:</h1></td></tr>');
html = html.concat(misses);
html.push('</table>');
ebi('res').innerHTML = html.join('\n');
};
</script></body></html>

View File

@@ -38,6 +38,13 @@ para() { for s in 1 2 3 4 5 6 7 8 12 16 24 32 48 64; do echo $s; for r in {1..4}
avg() { awk 'function pr(ncsz) {if (nsmp>0) {printf "%3s %s\n", csz, sum/nsmp} csz=$1;sum=0;nsmp=0} {sub(/\r$/,"")} /^[0-9]+$/ {pr($1);next} / MiB/ {sub(/ MiB.*/,"");sub(/.* /,"");sum+=$1;nsmp++} END {pr(0)}' "$1"; } avg() { awk 'function pr(ncsz) {if (nsmp>0) {printf "%3s %s\n", csz, sum/nsmp} csz=$1;sum=0;nsmp=0} {sub(/\r$/,"")} /^[0-9]+$/ {pr($1);next} / MiB/ {sub(/ MiB.*/,"");sub(/.* /,"");sum+=$1;nsmp++} END {pr(0)}' "$1"; }
##
## time between first and last upload
python3 -um copyparty -nw -v srv::rw -i 127.0.0.1 2>&1 | tee log
cat log | awk '!/"purl"/{next} {s=$1;sub(/[^m]+m/,"");gsub(/:/," ");t=60*(60*$1+$2)+$3} !a{a=t;sa=s} {b=t;sb=s} END {print b-a,sa,sb}'
## ##
## bad filenames ## bad filenames
@@ -73,6 +80,12 @@ shab64() { sp=$1; f="$2"; v=0; sz=$(stat -c%s "$f"); while true; do w=$((v+sp*10
command -v gdate && date() { gdate "$@"; }; while true; do t=$(date +%s.%N); (time wget http://127.0.0.1:3923/?ls -qO- | jq -C '.files[]|{sz:.sz,ta:.tags.artist,tb:.tags.".bpm"}|del(.[]|select(.==null))' | awk -F\" '/"/{t[$2]++} END {for (k in t){v=t[k];p=sprintf("%" (v+1) "s",v);gsub(/ /,"#",p);printf "\033[36m%s\033[33m%s ",k,p}}') 2>&1 | awk -v ts=$t 'NR==1{t1=$0} NR==2{sub(/.*0m/,"");sub(/s$/,"");t2=$0;c=2; if(t2>0.3){c=3} if(t2>0.8){c=1} } END{sub(/[0-9]{6}$/,"",ts);printf "%s \033[3%dm%s %s\033[0m\n",ts,c,t2,t1}'; sleep 0.1 || break; done command -v gdate && date() { gdate "$@"; }; while true; do t=$(date +%s.%N); (time wget http://127.0.0.1:3923/?ls -qO- | jq -C '.files[]|{sz:.sz,ta:.tags.artist,tb:.tags.".bpm"}|del(.[]|select(.==null))' | awk -F\" '/"/{t[$2]++} END {for (k in t){v=t[k];p=sprintf("%" (v+1) "s",v);gsub(/ /,"#",p);printf "\033[36m%s\033[33m%s ",k,p}}') 2>&1 | awk -v ts=$t 'NR==1{t1=$0} NR==2{sub(/.*0m/,"");sub(/s$/,"");t2=$0;c=2; if(t2>0.3){c=3} if(t2>0.8){c=1} } END{sub(/[0-9]{6}$/,"",ts);printf "%s \033[3%dm%s %s\033[0m\n",ts,c,t2,t1}'; sleep 0.1 || break; done
##
## track an up2k upload and print all chunks in file-order
grep '"name": "2021-07-18 02-17-59.mkv"' fug.log | head -n 1 | sed -r 's/.*"hash": \[//; s/\].*//' | tr '"' '\n' | grep -E '^[a-zA-Z0-9_-]{44}$' | while IFS= read -r cid; do cat -n fug.log | grep -vF '"purl": "' | grep -- "$cid"; echo; done | stdbuf -oL tr '\t' ' ' | while IFS=' ' read -r ln _ _ _ _ _ ts ip port msg; do [ -z "$msg" ] && echo && continue; printf '%6s [%s] [%s] %s\n' $ln "$ts" "$ip $port" "$msg"; read -r ln _ _ _ _ _ ts ip port msg < <(cat -n fug.log | tail -n +$((ln+1)) | grep -F "$ip $port" | head -n 1); printf '%6s [%s] [%s] %s\n' $ln "$ts" "$ip $port" "$msg"; done
## ##
## js oneliners ## js oneliners
@@ -162,7 +175,7 @@ brew install python@2
pip install virtualenv pip install virtualenv
# readme toc # readme toc
cat README.md | awk 'function pr() { if (!h) {return}; if (/^ *[*!#]/||!s) {printf "%s\n",h;h=0;return}; if (/.../) {printf "%s - %s\n",h,$0;h=0}; }; /^#/{s=1;pr()} /^#* *(file indexing|install on android|dev env setup|just the sfx|complete release|optional gpl stuff)|`$/{s=0} /^#/{lv=length($1);sub(/[^ ]+ /,"");bab=$0;gsub(/ /,"-",bab); h=sprintf("%" ((lv-1)*4+1) "s [%s](#%s)", "*",$0,bab);next} !h{next} {sub(/ .*/,"");sub(/[:,]$/,"")} {pr()}' > toc; grep -E '^## readme toc' -B1000 -A2 <README.md >p1; grep -E '^## quickstart' -B2 -A999999 <README.md >p2; (cat p1; grep quickstart -A1000 <toc; cat p2) >README.md; rm p1 p2 toc cat README.md | awk 'function pr() { if (!h) {return}; if (/^ *[*!#|]/||!s) {printf "%s\n",h;h=0;return}; if (/.../) {printf "%s - %s\n",h,$0;h=0}; }; /^#/{s=1;pr()} /^#* *(file indexing|install on android|dev env setup|just the sfx|complete release|optional gpl stuff)|`$/{s=0} /^#/{lv=length($1);sub(/[^ ]+ /,"");bab=$0;gsub(/ /,"-",bab); h=sprintf("%" ((lv-1)*4+1) "s [%s](#%s)", "*",$0,bab);next} !h{next} {sub(/ .*/,"");sub(/[:,]$/,"")} {pr()}' > toc; grep -E '^## readme toc' -B1000 -A2 <README.md >p1; grep -E '^## quickstart' -B2 -A999999 <README.md >p2; (cat p1; grep quickstart -A1000 <toc; cat p2) >README.md; rm p1 p2 toc
# fix firefox phantom breakpoints, # fix firefox phantom breakpoints,
# suggestions from bugtracker, doesnt work (debugger is not attachable) # suggestions from bugtracker, doesnt work (debugger is not attachable)
@@ -178,8 +191,13 @@ about:config >> devtools.debugger.prefs-schema-version = -1
git pull; git reset --hard origin/HEAD && git log --format=format:"%H %ai %d" --decorate=full > ../revs && cat ../{util,browser,up2k}.js >../vr && cat ../revs | while read -r rev extra; do (git reset --hard $rev >/dev/null 2>/dev/null && dsz=$(cat copyparty/web/{util,browser,up2k}.js >../vg 2>/dev/null && diff -wNarU0 ../{vg,vr} | wc -c) && printf '%s %6s %s\n' "$rev" $dsz "$extra") </dev/null; done git pull; git reset --hard origin/HEAD && git log --format=format:"%H %ai %d" --decorate=full > ../revs && cat ../{util,browser,up2k}.js >../vr && cat ../revs | while read -r rev extra; do (git reset --hard $rev >/dev/null 2>/dev/null && dsz=$(cat copyparty/web/{util,browser,up2k}.js >../vg 2>/dev/null && diff -wNarU0 ../{vg,vr} | wc -c) && printf '%s %6s %s\n' "$rev" $dsz "$extra") </dev/null; done
# download all sfx versions # download all sfx versions
curl https://api.github.com/repos/9001/copyparty/releases?per_page=100 | jq -r '.[] | .tag_name + " " + .name' | tr -d '\r' | while read v t; do fn="copyparty $v $t.py"; [ -e "$fn" ] || curl https://github.com/9001/copyparty/releases/download/$v/copyparty-sfx.py -Lo "$fn"; done curl https://api.github.com/repos/9001/copyparty/releases?per_page=100 | jq -r '.[] | .tag_name + " " + .name' | tr -d '\r' | while read v t; do fn="$(printf '%s\n' "copyparty $v $t.py" | tr / -)"; [ -e "$fn" ] || curl https://github.com/9001/copyparty/releases/download/$v/copyparty-sfx.py -Lo "$fn"; done
# push to multiple git remotes
git config -l | grep '^remote'
git remote add all git@github.com:9001/copyparty.git
git remote set-url --add --push all git@gitlab.com:9001/copyparty.git
git remote set-url --add --push all git@github.com:9001/copyparty.git
## ##
## http 206 ## http 206

View File

@@ -10,14 +10,41 @@ set -e
# (and those are usually linux so bash is good inaff) # (and those are usually linux so bash is good inaff)
# (but that said this even has macos support) # (but that said this even has macos support)
# #
# bundle will look like: # output summary (filesizes and contents):
# -rwxr-xr-x 0 ed ed 183808 Nov 19 00:43 copyparty #
# -rw-r--r-- 0 ed ed 491318 Nov 19 00:40 copyparty-extras/copyparty-0.5.4.tar.gz # 535672 copyparty-extras/sfx-full/copyparty-sfx.sh
# -rwxr-xr-x 0 ed ed 30254 Nov 17 23:58 copyparty-extras/copyparty-fuse.py # 550760 copyparty-extras/sfx-full/copyparty-sfx.py
# -rwxr-xr-x 0 ed ed 481403 Nov 19 00:40 copyparty-extras/sfx-full/copyparty-sfx.sh # `- original unmodified sfx from github
# -rwxr-xr-x 0 ed ed 506043 Nov 19 00:40 copyparty-extras/sfx-full/copyparty-sfx.py #
# -rwxr-xr-x 0 ed ed 167699 Nov 19 00:43 copyparty-extras/sfx-lite/copyparty-sfx.sh # 572923 copyparty-extras/sfx-full/copyparty-sfx-gz.py
# -rwxr-xr-x 0 ed ed 183808 Nov 19 00:43 copyparty-extras/sfx-lite/copyparty-sfx.py # `- unmodified but recompressed from bzip2 to gzip
#
# 341792 copyparty-extras/sfx-ent/copyparty-sfx.sh
# 353975 copyparty-extras/sfx-ent/copyparty-sfx.py
# 376934 copyparty-extras/sfx-ent/copyparty-sfx-gz.py
# `- removed iOS ogg/opus/vorbis audio decoder,
# removed the audio tray mouse cursor,
# "enterprise edition"
#
# 259288 copyparty-extras/sfx-lite/copyparty-sfx.sh
# 270004 copyparty-extras/sfx-lite/copyparty-sfx.py
# 293159 copyparty-extras/sfx-lite/copyparty-sfx-gz.py
# `- also removed the codemirror markdown editor
# and the text-viewer syntax hilighting,
# only essential features remaining
#
# 646297 copyparty-extras/copyparty-1.0.14.tar.gz
# 4823 copyparty-extras/copyparty-repack.sh
# `- source files from github
#
# 23663 copyparty-extras/up2k.py
# `- standalone utility to upload or search for files
#
# 32280 copyparty-extras/copyparty-fuse.py
# `- standalone to mount a URL as a local read-only filesystem
#
# 270004 copyparty
# `- minimal binary, same as sfx-lite/copyparty-sfx.py
command -v gnutar && tar() { gnutar "$@"; } command -v gnutar && tar() { gnutar "$@"; }
@@ -54,6 +81,7 @@ cache="$od/.copyparty-repack.cache"
# fallback to awk (sorry) # fallback to awk (sorry)
awk -F\" '/"browser_download_url".*(\.tar\.gz|-sfx\.)/ {print$4}' awk -F\" '/"browser_download_url".*(\.tar\.gz|-sfx\.)/ {print$4}'
) | ) |
grep -E '(sfx\.(sh|py)|tar\.gz)$' |
tee /dev/stderr | tee /dev/stderr |
tr -d '\r' | tr '\n' '\0' | tr -d '\r' | tr '\n' '\0' |
xargs -0 bash -c 'dl_files "$@"' _ xargs -0 bash -c 'dl_files "$@"' _
@@ -64,7 +92,7 @@ cache="$od/.copyparty-repack.cache"
# move src into copyparty-extras/, # move src into copyparty-extras/,
# move sfx into copyparty-extras/sfx-full/ # move sfx into copyparty-extras/sfx-full/
mkdir -p copyparty-extras/sfx-{full,lite} mkdir -p copyparty-extras/sfx-{full,ent,lite}
mv copyparty-sfx.* copyparty-extras/sfx-full/ mv copyparty-sfx.* copyparty-extras/sfx-full/
mv copyparty-*.tar.gz copyparty-extras/ mv copyparty-*.tar.gz copyparty-extras/
@@ -112,14 +140,17 @@ repack() {
} }
repack sfx-full "re gz no-sh" repack sfx-full "re gz no-sh"
repack sfx-lite "re no-ogv no-cm" repack sfx-ent "re no-dd"
repack sfx-lite "re no-ogv no-cm gz no-sh" repack sfx-ent "re no-dd gz no-sh"
repack sfx-lite "re no-dd no-cm no-hl"
repack sfx-lite "re no-dd no-cm no-hl gz no-sh"
# move fuse client into copyparty-extras/, # move fuse and up2k clients into copyparty-extras/,
# copy lite-sfx.py to ./copyparty, # copy lite-sfx.py to ./copyparty,
# delete extracted source code # delete extracted source code
( cd copyparty-extras/ ( cd copyparty-extras/
mv copyparty-*/bin/up2k.py .
mv copyparty-*/bin/copyparty-fuse.py . mv copyparty-*/bin/copyparty-fuse.py .
cp -pv sfx-lite/copyparty-sfx.py ../copyparty cp -pv sfx-lite/copyparty-sfx.py ../copyparty
rm -rf copyparty-{0..9}*.*.*{0..9} rm -rf copyparty-{0..9}*.*.*{0..9}

View File

@@ -1,11 +1,10 @@
FROM alpine:3.14 FROM alpine:3.15
WORKDIR /z WORKDIR /z
ENV ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \ ENV ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \
ver_hashwasm=4.9.0 \ ver_hashwasm=4.9.0 \
ver_marked=3.0.4 \ ver_marked=4.0.10 \
ver_ogvjs=1.8.4 \
ver_mde=2.15.0 \ ver_mde=2.15.0 \
ver_codemirror=5.62.3 \ ver_codemirror=5.64.0 \
ver_fontawesome=5.13.0 \ ver_fontawesome=5.13.0 \
ver_zopfli=1.0.3 ver_zopfli=1.0.3
@@ -15,7 +14,6 @@ ENV ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \
RUN mkdir -p /z/dist/no-pk \ RUN mkdir -p /z/dist/no-pk \
&& wget https://fonts.gstatic.com/s/sourcecodepro/v11/HI_SiYsKILxRpg3hIP6sJ7fM7PqlPevW.woff2 -O scp.woff2 \ && wget https://fonts.gstatic.com/s/sourcecodepro/v11/HI_SiYsKILxRpg3hIP6sJ7fM7PqlPevW.woff2 -O scp.woff2 \
&& apk add cmake make g++ git bash npm patch wget tar pigz brotli gzip unzip python3 python3-dev brotli py3-brotli \ && apk add cmake make g++ git bash npm patch wget tar pigz brotli gzip unzip python3 python3-dev brotli py3-brotli \
&& wget https://github.com/brion/ogv.js/releases/download/$ver_ogvjs/ogvjs-$ver_ogvjs.zip -O ogvjs.zip \
&& wget https://github.com/openpgpjs/asmcrypto.js/archive/$ver_asmcrypto.tar.gz -O asmcrypto.tgz \ && wget https://github.com/openpgpjs/asmcrypto.js/archive/$ver_asmcrypto.tar.gz -O asmcrypto.tgz \
&& wget https://github.com/markedjs/marked/archive/v$ver_marked.tar.gz -O marked.tgz \ && wget https://github.com/markedjs/marked/archive/v$ver_marked.tar.gz -O marked.tgz \
&& wget https://github.com/Ionaru/easy-markdown-editor/archive/$ver_mde.tar.gz -O mde.tgz \ && wget https://github.com/Ionaru/easy-markdown-editor/archive/$ver_mde.tar.gz -O mde.tgz \
@@ -23,7 +21,6 @@ RUN mkdir -p /z/dist/no-pk \
&& wget https://github.com/FortAwesome/Font-Awesome/releases/download/$ver_fontawesome/fontawesome-free-$ver_fontawesome-web.zip -O fontawesome.zip \ && wget https://github.com/FortAwesome/Font-Awesome/releases/download/$ver_fontawesome/fontawesome-free-$ver_fontawesome-web.zip -O fontawesome.zip \
&& wget https://github.com/google/zopfli/archive/zopfli-$ver_zopfli.tar.gz -O zopfli.tgz \ && wget https://github.com/google/zopfli/archive/zopfli-$ver_zopfli.tar.gz -O zopfli.tgz \
&& wget https://github.com/Daninet/hash-wasm/releases/download/v$ver_hashwasm/hash-wasm@$ver_hashwasm.zip -O hash-wasm.zip \ && wget https://github.com/Daninet/hash-wasm/releases/download/v$ver_hashwasm/hash-wasm@$ver_hashwasm.zip -O hash-wasm.zip \
&& unzip ogvjs.zip \
&& (mkdir hash-wasm \ && (mkdir hash-wasm \
&& cd hash-wasm \ && cd hash-wasm \
&& unzip ../hash-wasm.zip) \ && unzip ../hash-wasm.zip) \
@@ -45,6 +42,12 @@ RUN mkdir -p /z/dist/no-pk \
&& tar -xf zopfli.tgz && tar -xf zopfli.tgz
# todo
# https://cdn.jsdelivr.net/gh/highlightjs/cdn-release/build/highlight.min.js
# https://cdn.jsdelivr.net/gh/highlightjs/cdn-release/build/styles/default.min.css
# https://prismjs.com/download.html#themes=prism-funky&languages=markup+css+clike+javascript+autohotkey+bash+basic+batch+c+csharp+cpp+cmake+diff+docker+go+ini+java+json+kotlin+latex+less+lisp+lua+makefile+objectivec+perl+powershell+python+r+jsx+ruby+rust+sass+scss+sql+swift+systemd+toml+typescript+vbnet+verilog+vhdl+yaml&plugins=line-highlight+line-numbers+autolinker
# build fonttools (which needs zopfli) # build fonttools (which needs zopfli)
RUN tar -xf zopfli.tgz \ RUN tar -xf zopfli.tgz \
&& cd zopfli* \ && cd zopfli* \
@@ -71,21 +74,6 @@ RUN cd hash-wasm \
&& mv sha512.umd.min.js /z/dist/sha512.hw.js && mv sha512.umd.min.js /z/dist/sha512.hw.js
# build ogvjs
RUN cd ogvjs-$ver_ogvjs \
&& cp -pv \
ogv-worker-audio.js \
ogv-demuxer-ogg-wasm.js \
ogv-demuxer-ogg-wasm.wasm \
ogv-decoder-audio-opus-wasm.js \
ogv-decoder-audio-opus-wasm.wasm \
ogv-decoder-audio-vorbis-wasm.js \
ogv-decoder-audio-vorbis-wasm.wasm \
/z/dist \
&& cp -pv \
ogv-es2017.js /z/dist/ogv.js
# build marked # build marked
COPY marked.patch /z/ COPY marked.patch /z/
COPY marked-ln.patch /z/ COPY marked-ln.patch /z/
@@ -94,7 +82,6 @@ RUN cd marked-$ver_marked \
&& patch -p1 < /z/marked.patch \ && patch -p1 < /z/marked.patch \
&& npm run build \ && npm run build \
&& cp -pv marked.min.js /z/dist/marked.js \ && cp -pv marked.min.js /z/dist/marked.js \
&& cp -pv lib/marked.js /z/dist/marked.full.js \
&& mkdir -p /z/nodepkgs \ && mkdir -p /z/nodepkgs \
&& ln -s $(pwd) /z/nodepkgs/marked && ln -s $(pwd) /z/nodepkgs/marked
# && npm run test \ # && npm run test \
@@ -110,8 +97,10 @@ RUN cd CodeMirror-$ver_codemirror \
# build easymde # build easymde
COPY easymde-marked6.patch /z/
COPY easymde.patch /z/ COPY easymde.patch /z/
RUN cd easy-markdown-editor-$ver_mde \ RUN cd easy-markdown-editor-$ver_mde \
&& patch -p1 < /z/easymde-marked6.patch \
&& patch -p1 < /z/easymde.patch \ && patch -p1 < /z/easymde.patch \
&& sed -ri 's`https://registry.npmjs.org/marked/-/marked-[0-9\.]+.tgz`file:/z/nodepkgs/marked`' package-lock.json \ && sed -ri 's`https://registry.npmjs.org/marked/-/marked-[0-9\.]+.tgz`file:/z/nodepkgs/marked`' package-lock.json \
&& sed -ri 's`("marked": ")[^"]+`\1file:/z/nodepkgs/marked`' ./package.json \ && sed -ri 's`("marked": ")[^"]+`\1file:/z/nodepkgs/marked`' ./package.json \

View File

@@ -0,0 +1,12 @@
diff --git a/src/js/easymde.js b/src/js/easymde.js
--- a/src/js/easymde.js
+++ b/src/js/easymde.js
@@ -1962,7 +1962,7 @@ EasyMDE.prototype.markdown = function (text) {
marked.setOptions(markedOptions);
// Convert the markdown to HTML
- var htmlText = marked(text);
+ var htmlText = marked.parse(text);
// Sanitize HTML
if (this.options.renderingConfig && typeof this.options.renderingConfig.sanitizerFunction === 'function') {

View File

@@ -1,15 +1,15 @@
diff --git a/src/Lexer.js b/src/Lexer.js diff --git a/src/Lexer.js b/src/Lexer.js
adds linetracking to marked.js v3.0.4; adds linetracking to marked.js v4.0.6;
add data-ln="%d" to most tags, %d is the source markdown line add data-ln="%d" to most tags, %d is the source markdown line
--- a/src/Lexer.js --- a/src/Lexer.js
+++ b/src/Lexer.js +++ b/src/Lexer.js
@@ -50,4 +50,5 @@ function mangle(text) { @@ -50,4 +50,5 @@ function mangle(text) {
module.exports = class Lexer { export class Lexer {
constructor(options) { constructor(options) {
+ this.ln = 1; // like most editors, start couting from 1 + this.ln = 1; // like most editors, start couting from 1
this.tokens = []; this.tokens = [];
this.tokens.links = Object.create(null); this.tokens.links = Object.create(null);
@@ -127,4 +128,15 @@ module.exports = class Lexer { @@ -127,4 +128,15 @@ export class Lexer {
} }
+ set_ln(token, ln = this.ln) { + set_ln(token, ln = this.ln) {
@@ -25,7 +25,7 @@ add data-ln="%d" to most tags, %d is the source markdown line
+ +
/** /**
* Lexing * Lexing
@@ -134,7 +146,11 @@ module.exports = class Lexer { @@ -134,7 +146,11 @@ export class Lexer {
src = src.replace(/^ +$/gm, ''); src = src.replace(/^ +$/gm, '');
} }
- let token, lastToken, cutSrc, lastParagraphClipped; - let token, lastToken, cutSrc, lastParagraphClipped;
@@ -38,105 +38,105 @@ add data-ln="%d" to most tags, %d is the source markdown line
+ +
if (this.options.extensions if (this.options.extensions
&& this.options.extensions.block && this.options.extensions.block
@@ -142,4 +158,5 @@ module.exports = class Lexer { @@ -142,4 +158,5 @@ export class Lexer {
if (token = extTokenizer.call({ lexer: this }, src, tokens)) { if (token = extTokenizer.call({ lexer: this }, src, tokens)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
+ this.set_ln(token, ln); + this.set_ln(token, ln);
tokens.push(token); tokens.push(token);
return true; return true;
@@ -153,4 +170,5 @@ module.exports = class Lexer { @@ -153,4 +170,5 @@ export class Lexer {
if (token = this.tokenizer.space(src)) { if (token = this.tokenizer.space(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
+ this.set_ln(token, ln); // is \n if not type + this.set_ln(token, ln); // is \n if not type
if (token.type) { if (token.type) {
tokens.push(token); tokens.push(token);
@@ -162,4 +180,5 @@ module.exports = class Lexer { @@ -162,4 +180,5 @@ export class Lexer {
if (token = this.tokenizer.code(src)) { if (token = this.tokenizer.code(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
+ this.set_ln(token, ln); + this.set_ln(token, ln);
lastToken = tokens[tokens.length - 1]; lastToken = tokens[tokens.length - 1];
// An indented code block cannot interrupt a paragraph. // An indented code block cannot interrupt a paragraph.
@@ -177,4 +196,5 @@ module.exports = class Lexer { @@ -177,4 +196,5 @@ export class Lexer {
if (token = this.tokenizer.fences(src)) { if (token = this.tokenizer.fences(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
+ this.set_ln(token, ln); + this.set_ln(token, ln);
tokens.push(token); tokens.push(token);
continue; continue;
@@ -184,4 +204,5 @@ module.exports = class Lexer { @@ -184,4 +204,5 @@ export class Lexer {
if (token = this.tokenizer.heading(src)) { if (token = this.tokenizer.heading(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
+ this.set_ln(token, ln); + this.set_ln(token, ln);
tokens.push(token); tokens.push(token);
continue; continue;
@@ -191,4 +212,5 @@ module.exports = class Lexer { @@ -191,4 +212,5 @@ export class Lexer {
if (token = this.tokenizer.hr(src)) { if (token = this.tokenizer.hr(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
+ this.set_ln(token, ln); + this.set_ln(token, ln);
tokens.push(token); tokens.push(token);
continue; continue;
@@ -198,4 +220,5 @@ module.exports = class Lexer { @@ -198,4 +220,5 @@ export class Lexer {
if (token = this.tokenizer.blockquote(src)) { if (token = this.tokenizer.blockquote(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
+ this.set_ln(token, ln); + this.set_ln(token, ln);
tokens.push(token); tokens.push(token);
continue; continue;
@@ -205,4 +228,5 @@ module.exports = class Lexer { @@ -205,4 +228,5 @@ export class Lexer {
if (token = this.tokenizer.list(src)) { if (token = this.tokenizer.list(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
+ this.set_ln(token, ln); + this.set_ln(token, ln);
tokens.push(token); tokens.push(token);
continue; continue;
@@ -212,4 +236,5 @@ module.exports = class Lexer { @@ -212,4 +236,5 @@ export class Lexer {
if (token = this.tokenizer.html(src)) { if (token = this.tokenizer.html(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
+ this.set_ln(token, ln); + this.set_ln(token, ln);
tokens.push(token); tokens.push(token);
continue; continue;
@@ -219,4 +244,5 @@ module.exports = class Lexer { @@ -219,4 +244,5 @@ export class Lexer {
if (token = this.tokenizer.def(src)) { if (token = this.tokenizer.def(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
+ this.set_ln(token, ln); + this.set_ln(token, ln);
lastToken = tokens[tokens.length - 1]; lastToken = tokens[tokens.length - 1];
if (lastToken && (lastToken.type === 'paragraph' || lastToken.type === 'text')) { if (lastToken && (lastToken.type === 'paragraph' || lastToken.type === 'text')) {
@@ -236,4 +262,5 @@ module.exports = class Lexer { @@ -236,4 +262,5 @@ export class Lexer {
if (token = this.tokenizer.table(src)) { if (token = this.tokenizer.table(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
+ this.set_ln(token, ln); + this.set_ln(token, ln);
tokens.push(token); tokens.push(token);
continue; continue;
@@ -243,4 +270,5 @@ module.exports = class Lexer { @@ -243,4 +270,5 @@ export class Lexer {
if (token = this.tokenizer.lheading(src)) { if (token = this.tokenizer.lheading(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
+ this.set_ln(token, ln); + this.set_ln(token, ln);
tokens.push(token); tokens.push(token);
continue; continue;
@@ -263,4 +291,5 @@ module.exports = class Lexer { @@ -263,4 +291,5 @@ export class Lexer {
} }
if (this.state.top && (token = this.tokenizer.paragraph(cutSrc))) { if (this.state.top && (token = this.tokenizer.paragraph(cutSrc))) {
+ this.set_ln(token, ln); + this.set_ln(token, ln);
lastToken = tokens[tokens.length - 1]; lastToken = tokens[tokens.length - 1];
if (lastParagraphClipped && lastToken.type === 'paragraph') { if (lastParagraphClipped && lastToken.type === 'paragraph') {
@@ -280,4 +309,6 @@ module.exports = class Lexer { @@ -280,4 +309,6 @@ export class Lexer {
if (token = this.tokenizer.text(src)) { if (token = this.tokenizer.text(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
+ this.set_ln(token, ln); + this.set_ln(token, ln);
+ this.ln++; + this.ln++;
lastToken = tokens[tokens.length - 1]; lastToken = tokens[tokens.length - 1];
if (lastToken && lastToken.type === 'text') { if (lastToken && lastToken.type === 'text') {
@@ -355,4 +386,5 @@ module.exports = class Lexer { @@ -355,4 +386,5 @@ export class Lexer {
if (token = extTokenizer.call({ lexer: this }, src, tokens)) { if (token = extTokenizer.call({ lexer: this }, src, tokens)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
+ this.ln = token.ln || this.ln; + this.ln = token.ln || this.ln;
tokens.push(token); tokens.push(token);
return true; return true;
@@ -420,4 +452,6 @@ module.exports = class Lexer { @@ -420,4 +452,6 @@ export class Lexer {
if (token = this.tokenizer.br(src)) { if (token = this.tokenizer.br(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
+ // no need to reset (no more blockTokens anyways) + // no need to reset (no more blockTokens anyways)
+ token.ln = this.ln++; + token.ln = this.ln++;
tokens.push(token); tokens.push(token);
continue; continue;
@@ -462,4 +496,5 @@ module.exports = class Lexer { @@ -462,4 +496,5 @@ export class Lexer {
if (token = this.tokenizer.inlineText(cutSrc, smartypants)) { if (token = this.tokenizer.inlineText(cutSrc, smartypants)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
+ this.ln = token.ln || this.ln; + this.ln = token.ln || this.ln;
@@ -145,13 +145,13 @@ add data-ln="%d" to most tags, %d is the source markdown line
diff --git a/src/Parser.js b/src/Parser.js diff --git a/src/Parser.js b/src/Parser.js
--- a/src/Parser.js --- a/src/Parser.js
+++ b/src/Parser.js +++ b/src/Parser.js
@@ -18,4 +18,5 @@ module.exports = class Parser { @@ -18,4 +18,5 @@ export class Parser {
this.textRenderer = new TextRenderer(); this.textRenderer = new TextRenderer();
this.slugger = new Slugger(); this.slugger = new Slugger();
+ this.ln = 0; // error indicator; should always be set >=1 from tokens + this.ln = 0; // error indicator; should always be set >=1 from tokens
} }
@@ -64,4 +65,8 @@ module.exports = class Parser { @@ -64,4 +65,8 @@ export class Parser {
for (i = 0; i < l; i++) { for (i = 0; i < l; i++) {
token = tokens[i]; token = tokens[i];
+ // take line-numbers from tokens whenever possible + // take line-numbers from tokens whenever possible
@@ -160,7 +160,7 @@ diff --git a/src/Parser.js b/src/Parser.js
+ this.renderer.tag_ln(this.ln); + this.renderer.tag_ln(this.ln);
// Run any renderer extensions // Run any renderer extensions
@@ -124,7 +129,10 @@ module.exports = class Parser { @@ -124,7 +129,10 @@ export class Parser {
} }
- body += this.renderer.tablerow(cell); - body += this.renderer.tablerow(cell);
@@ -173,7 +173,7 @@ diff --git a/src/Parser.js b/src/Parser.js
+ out += this.renderer.tag_ln(token.ln).table(header, body); + out += this.renderer.tag_ln(token.ln).table(header, body);
continue; continue;
} }
@@ -167,8 +175,12 @@ module.exports = class Parser { @@ -167,8 +175,12 @@ export class Parser {
itemBody += this.parse(item.tokens, loose); itemBody += this.parse(item.tokens, loose);
- body += this.renderer.listitem(itemBody, task, checked); - body += this.renderer.listitem(itemBody, task, checked);
@@ -188,7 +188,7 @@ diff --git a/src/Parser.js b/src/Parser.js
+ out += this.renderer.tag_ln(token.ln).list(body, ordered, start); + out += this.renderer.tag_ln(token.ln).list(body, ordered, start);
continue; continue;
} }
@@ -179,5 +191,6 @@ module.exports = class Parser { @@ -179,5 +191,6 @@ export class Parser {
} }
case 'paragraph': { case 'paragraph': {
- out += this.renderer.paragraph(this.parseInline(token.tokens)); - out += this.renderer.paragraph(this.parseInline(token.tokens));
@@ -196,7 +196,7 @@ diff --git a/src/Parser.js b/src/Parser.js
+ out += this.renderer.tag_ln(token.ln).paragraph(t); + out += this.renderer.tag_ln(token.ln).paragraph(t);
continue; continue;
} }
@@ -221,4 +234,7 @@ module.exports = class Parser { @@ -221,4 +234,7 @@ export class Parser {
token = tokens[i]; token = tokens[i];
+ // another thing that only affects <br/> and other inlines + // another thing that only affects <br/> and other inlines
@@ -207,7 +207,7 @@ diff --git a/src/Parser.js b/src/Parser.js
diff --git a/src/Renderer.js b/src/Renderer.js diff --git a/src/Renderer.js b/src/Renderer.js
--- a/src/Renderer.js --- a/src/Renderer.js
+++ b/src/Renderer.js +++ b/src/Renderer.js
@@ -11,6 +11,12 @@ module.exports = class Renderer { @@ -11,6 +11,12 @@ export class Renderer {
constructor(options) { constructor(options) {
this.options = options || defaults; this.options = options || defaults;
+ this.ln = ""; + this.ln = "";
@@ -220,7 +220,7 @@ diff --git a/src/Renderer.js b/src/Renderer.js
+ +
code(code, infostring, escaped) { code(code, infostring, escaped) {
const lang = (infostring || '').match(/\S*/)[0]; const lang = (infostring || '').match(/\S*/)[0];
@@ -26,10 +32,10 @@ module.exports = class Renderer { @@ -26,10 +32,10 @@ export class Renderer {
if (!lang) { if (!lang) {
- return '<pre><code>' - return '<pre><code>'
@@ -233,55 +233,55 @@ diff --git a/src/Renderer.js b/src/Renderer.js
+ return '<pre' + this.ln + '><code class="' + return '<pre' + this.ln + '><code class="'
+ this.options.langPrefix + this.options.langPrefix
+ escape(lang, true) + escape(lang, true)
@@ -40,5 +46,5 @@ module.exports = class Renderer { @@ -40,5 +46,5 @@ export class Renderer {
blockquote(quote) { blockquote(quote) {
- return '<blockquote>\n' + quote + '</blockquote>\n'; - return '<blockquote>\n' + quote + '</blockquote>\n';
+ return '<blockquote' + this.ln + '>\n' + quote + '</blockquote>\n'; + return '<blockquote' + this.ln + '>\n' + quote + '</blockquote>\n';
} }
@@ -51,4 +57,5 @@ module.exports = class Renderer { @@ -51,4 +57,5 @@ export class Renderer {
return '<h' return '<h'
+ level + level
+ + this.ln + + this.ln
+ ' id="' + ' id="'
+ this.options.headerPrefix + this.options.headerPrefix
@@ -61,5 +68,5 @@ module.exports = class Renderer { @@ -61,5 +68,5 @@ export class Renderer {
} }
// ignore IDs // ignore IDs
- return '<h' + level + '>' + text + '</h' + level + '>\n'; - return '<h' + level + '>' + text + '</h' + level + '>\n';
+ return '<h' + level + this.ln + '>' + text + '</h' + level + '>\n'; + return '<h' + level + this.ln + '>' + text + '</h' + level + '>\n';
} }
@@ -75,5 +82,5 @@ module.exports = class Renderer { @@ -75,5 +82,5 @@ export class Renderer {
listitem(text) { listitem(text) {
- return '<li>' + text + '</li>\n'; - return '<li>' + text + '</li>\n';
+ return '<li' + this.ln + '>' + text + '</li>\n'; + return '<li' + this.ln + '>' + text + '</li>\n';
} }
@@ -87,5 +94,5 @@ module.exports = class Renderer { @@ -87,5 +94,5 @@ export class Renderer {
paragraph(text) { paragraph(text) {
- return '<p>' + text + '</p>\n'; - return '<p>' + text + '</p>\n';
+ return '<p' + this.ln + '>' + text + '</p>\n'; + return '<p' + this.ln + '>' + text + '</p>\n';
} }
@@ -102,5 +109,5 @@ module.exports = class Renderer { @@ -102,5 +109,5 @@ export class Renderer {
tablerow(content) { tablerow(content) {
- return '<tr>\n' + content + '</tr>\n'; - return '<tr>\n' + content + '</tr>\n';
+ return '<tr' + this.ln + '>\n' + content + '</tr>\n'; + return '<tr' + this.ln + '>\n' + content + '</tr>\n';
} }
@@ -127,5 +134,5 @@ module.exports = class Renderer { @@ -127,5 +134,5 @@ export class Renderer {
br() { br() {
- return this.options.xhtml ? '<br/>' : '<br>'; - return this.options.xhtml ? '<br/>' : '<br>';
+ return this.options.xhtml ? '<br' + this.ln + '/>' : '<br' + this.ln + '>'; + return this.options.xhtml ? '<br' + this.ln + '/>' : '<br' + this.ln + '>';
} }
@@ -153,5 +160,5 @@ module.exports = class Renderer { @@ -153,5 +160,5 @@ export class Renderer {
} }
- let out = '<img src="' + href + '" alt="' + text + '"'; - let out = '<img src="' + href + '" alt="' + text + '"';
@@ -291,7 +291,7 @@ diff --git a/src/Renderer.js b/src/Renderer.js
diff --git a/src/Tokenizer.js b/src/Tokenizer.js diff --git a/src/Tokenizer.js b/src/Tokenizer.js
--- a/src/Tokenizer.js --- a/src/Tokenizer.js
+++ b/src/Tokenizer.js +++ b/src/Tokenizer.js
@@ -301,4 +301,7 @@ module.exports = class Tokenizer { @@ -297,4 +297,7 @@ export class Tokenizer {
const l = list.items.length; const l = list.items.length;
+ // each nested list gets +1 ahead; this hack makes every listgroup -1 but atleast it doesn't get infinitely bad + // each nested list gets +1 ahead; this hack makes every listgroup -1 but atleast it doesn't get infinitely bad

View File

@@ -1,7 +1,7 @@
diff --git a/src/Lexer.js b/src/Lexer.js diff --git a/src/Lexer.js b/src/Lexer.js
--- a/src/Lexer.js --- a/src/Lexer.js
+++ b/src/Lexer.js +++ b/src/Lexer.js
@@ -6,5 +6,5 @@ const { repeatString } = require('./helpers.js'); @@ -6,5 +6,5 @@ import { repeatString } from './helpers.js';
/** /**
* smartypants text replacement * smartypants text replacement
- */ - */
@@ -15,21 +15,21 @@ diff --git a/src/Lexer.js b/src/Lexer.js
+ * + *
function mangle(text) { function mangle(text) {
let out = '', let out = '',
@@ -465,5 +465,5 @@ module.exports = class Lexer { @@ -466,5 +466,5 @@ export class Lexer {
// autolink // autolink
- if (token = this.tokenizer.autolink(src, mangle)) { - if (token = this.tokenizer.autolink(src, mangle)) {
+ if (token = this.tokenizer.autolink(src)) { + if (token = this.tokenizer.autolink(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
tokens.push(token); tokens.push(token);
@@ -472,5 +472,5 @@ module.exports = class Lexer { @@ -473,5 +473,5 @@ export class Lexer {
// url (gfm) // url (gfm)
- if (!this.state.inLink && (token = this.tokenizer.url(src, mangle))) { - if (!this.state.inLink && (token = this.tokenizer.url(src, mangle))) {
+ if (!this.state.inLink && (token = this.tokenizer.url(src))) { + if (!this.state.inLink && (token = this.tokenizer.url(src))) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
tokens.push(token); tokens.push(token);
@@ -493,5 +493,5 @@ module.exports = class Lexer { @@ -494,5 +494,5 @@ export class Lexer {
} }
} }
- if (token = this.tokenizer.inlineText(cutSrc, smartypants)) { - if (token = this.tokenizer.inlineText(cutSrc, smartypants)) {
@@ -39,14 +39,14 @@ diff --git a/src/Lexer.js b/src/Lexer.js
diff --git a/src/Renderer.js b/src/Renderer.js diff --git a/src/Renderer.js b/src/Renderer.js
--- a/src/Renderer.js --- a/src/Renderer.js
+++ b/src/Renderer.js +++ b/src/Renderer.js
@@ -142,5 +142,5 @@ module.exports = class Renderer { @@ -142,5 +142,5 @@ export class Renderer {
link(href, title, text) { link(href, title, text) {
- href = cleanUrl(this.options.sanitize, this.options.baseUrl, href); - href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
+ href = cleanUrl(this.options.baseUrl, href); + href = cleanUrl(this.options.baseUrl, href);
if (href === null) { if (href === null) {
return text; return text;
@@ -155,5 +155,5 @@ module.exports = class Renderer { @@ -155,5 +155,5 @@ export class Renderer {
image(href, title, text) { image(href, title, text) {
- href = cleanUrl(this.options.sanitize, this.options.baseUrl, href); - href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
@@ -56,7 +56,7 @@ diff --git a/src/Renderer.js b/src/Renderer.js
diff --git a/src/Tokenizer.js b/src/Tokenizer.js diff --git a/src/Tokenizer.js b/src/Tokenizer.js
--- a/src/Tokenizer.js --- a/src/Tokenizer.js
+++ b/src/Tokenizer.js +++ b/src/Tokenizer.js
@@ -321,14 +321,7 @@ module.exports = class Tokenizer { @@ -320,14 +320,7 @@ export class Tokenizer {
type: 'html', type: 'html',
raw: cap[0], raw: cap[0],
- pre: !this.options.sanitizer - pre: !this.options.sanitizer
@@ -72,7 +72,7 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
- } - }
return token; return token;
} }
@@ -477,15 +470,9 @@ module.exports = class Tokenizer { @@ -476,15 +469,9 @@ export class Tokenizer {
return { return {
- type: this.options.sanitize - type: this.options.sanitize
@@ -90,7 +90,7 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
+ text: cap[0] + text: cap[0]
}; };
} }
@@ -672,10 +659,10 @@ module.exports = class Tokenizer { @@ -671,10 +658,10 @@ export class Tokenizer {
} }
- autolink(src, mangle) { - autolink(src, mangle) {
@@ -103,7 +103,7 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
+ text = escape(cap[1]); + text = escape(cap[1]);
href = 'mailto:' + text; href = 'mailto:' + text;
} else { } else {
@@ -700,10 +687,10 @@ module.exports = class Tokenizer { @@ -699,10 +686,10 @@ export class Tokenizer {
} }
- url(src, mangle) { - url(src, mangle) {
@@ -116,7 +116,7 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
+ text = escape(cap[0]); + text = escape(cap[0]);
href = 'mailto:' + text; href = 'mailto:' + text;
} else { } else {
@@ -737,12 +724,12 @@ module.exports = class Tokenizer { @@ -736,12 +723,12 @@ export class Tokenizer {
} }
- inlineText(src, smartypants) { - inlineText(src, smartypants) {
@@ -135,7 +135,7 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
diff --git a/src/defaults.js b/src/defaults.js diff --git a/src/defaults.js b/src/defaults.js
--- a/src/defaults.js --- a/src/defaults.js
+++ b/src/defaults.js +++ b/src/defaults.js
@@ -9,12 +9,8 @@ function getDefaults() { @@ -9,12 +9,8 @@ export function getDefaults() {
highlight: null, highlight: null,
langPrefix: 'language-', langPrefix: 'language-',
- mangle: true, - mangle: true,
@@ -151,10 +151,10 @@ diff --git a/src/defaults.js b/src/defaults.js
diff --git a/src/helpers.js b/src/helpers.js diff --git a/src/helpers.js b/src/helpers.js
--- a/src/helpers.js --- a/src/helpers.js
+++ b/src/helpers.js +++ b/src/helpers.js
@@ -64,18 +64,5 @@ function edit(regex, opt) { @@ -64,18 +64,5 @@ export function edit(regex, opt) {
const nonWordAndColonTest = /[^\w:]/g; const nonWordAndColonTest = /[^\w:]/g;
const originIndependentUrl = /^$|^[a-z][a-z0-9+.-]*:|^[?#]/i; const originIndependentUrl = /^$|^[a-z][a-z0-9+.-]*:|^[?#]/i;
-function cleanUrl(sanitize, base, href) { -export function cleanUrl(sanitize, base, href) {
- if (sanitize) { - if (sanitize) {
- let prot; - let prot;
- try { - try {
@@ -168,36 +168,30 @@ diff --git a/src/helpers.js b/src/helpers.js
- return null; - return null;
- } - }
- } - }
+function cleanUrl(base, href) { +export function cleanUrl(base, href) {
if (base && !originIndependentUrl.test(href)) { if (base && !originIndependentUrl.test(href)) {
href = resolveUrl(base, href); href = resolveUrl(base, href);
@@ -227,10 +214,4 @@ function findClosingBracket(str, b) { @@ -227,10 +214,4 @@ export function findClosingBracket(str, b) {
} }
-function checkSanitizeDeprecation(opt) { -export function checkSanitizeDeprecation(opt) {
- if (opt && opt.sanitize && !opt.silent) { - if (opt && opt.sanitize && !opt.silent) {
- console.warn('marked(): sanitize and sanitizer parameters are deprecated since version 0.7.0, should not be used and will be removed in the future. Read more here: https://marked.js.org/#/USING_ADVANCED.md#options'); - console.warn('marked(): sanitize and sanitizer parameters are deprecated since version 0.7.0, should not be used and will be removed in the future. Read more here: https://marked.js.org/#/USING_ADVANCED.md#options');
- } - }
-} -}
- -
// copied from https://stackoverflow.com/a/5450113/806777 // copied from https://stackoverflow.com/a/5450113/806777
function repeatString(pattern, count) { export function repeatString(pattern, count) {
@@ -260,5 +241,4 @@ module.exports = {
rtrim,
findClosingBracket,
- checkSanitizeDeprecation,
repeatString
};
diff --git a/src/marked.js b/src/marked.js diff --git a/src/marked.js b/src/marked.js
--- a/src/marked.js --- a/src/marked.js
+++ b/src/marked.js +++ b/src/marked.js
@@ -7,5 +7,4 @@ const Slugger = require('./Slugger.js'); @@ -7,5 +7,4 @@ import { Slugger } from './Slugger.js';
const { import {
merge, merge,
- checkSanitizeDeprecation, - checkSanitizeDeprecation,
escape escape
} = require('./helpers.js'); } from './helpers.js';
@@ -35,5 +34,4 @@ function marked(src, opt, callback) { @@ -35,5 +34,4 @@ export function marked(src, opt, callback) {
opt = merge({}, marked.defaults, opt || {}); opt = merge({}, marked.defaults, opt || {});
- checkSanitizeDeprecation(opt); - checkSanitizeDeprecation(opt);
@@ -219,37 +213,37 @@ diff --git a/src/marked.js b/src/marked.js
diff --git a/test/bench.js b/test/bench.js diff --git a/test/bench.js b/test/bench.js
--- a/test/bench.js --- a/test/bench.js
+++ b/test/bench.js +++ b/test/bench.js
@@ -33,5 +33,4 @@ async function runBench(options) { @@ -37,5 +37,4 @@ export async function runBench(options) {
breaks: false, breaks: false,
pedantic: false, pedantic: false,
- sanitize: false, - sanitize: false,
smartLists: false smartLists: false
}); });
@@ -45,5 +44,4 @@ async function runBench(options) { @@ -49,5 +48,4 @@ export async function runBench(options) {
breaks: false, breaks: false,
pedantic: false, pedantic: false,
- sanitize: false, - sanitize: false,
smartLists: false smartLists: false
}); });
@@ -58,5 +56,4 @@ async function runBench(options) { @@ -62,5 +60,4 @@ export async function runBench(options) {
breaks: false, breaks: false,
pedantic: false, pedantic: false,
- sanitize: false, - sanitize: false,
smartLists: false smartLists: false
}); });
@@ -70,5 +67,4 @@ async function runBench(options) { @@ -74,5 +71,4 @@ export async function runBench(options) {
breaks: false, breaks: false,
pedantic: false, pedantic: false,
- sanitize: false, - sanitize: false,
smartLists: false smartLists: false
}); });
@@ -83,5 +79,4 @@ async function runBench(options) { @@ -87,5 +83,4 @@ export async function runBench(options) {
breaks: false, breaks: false,
pedantic: true, pedantic: true,
- sanitize: false, - sanitize: false,
smartLists: false smartLists: false
}); });
@@ -95,5 +90,4 @@ async function runBench(options) { @@ -99,5 +94,4 @@ export async function runBench(options) {
breaks: false, breaks: false,
pedantic: true, pedantic: true,
- sanitize: false, - sanitize: false,
@@ -258,7 +252,7 @@ diff --git a/test/bench.js b/test/bench.js
diff --git a/test/specs/run-spec.js b/test/specs/run-spec.js diff --git a/test/specs/run-spec.js b/test/specs/run-spec.js
--- a/test/specs/run-spec.js --- a/test/specs/run-spec.js
+++ b/test/specs/run-spec.js +++ b/test/specs/run-spec.js
@@ -22,9 +22,4 @@ function runSpecs(title, dir, showCompletionTable, options) { @@ -25,9 +25,4 @@ function runSpecs(title, dir, showCompletionTable, options) {
} }
- if (spec.options.sanitizer) { - if (spec.options.sanitizer) {
@@ -268,77 +262,77 @@ diff --git a/test/specs/run-spec.js b/test/specs/run-spec.js
- -
(spec.only ? fit : (spec.skip ? xit : it))('should ' + passFail + example, async() => { (spec.only ? fit : (spec.skip ? xit : it))('should ' + passFail + example, async() => {
const before = process.hrtime(); const before = process.hrtime();
@@ -53,3 +48,2 @@ runSpecs('Original', './original', false, { gfm: false, pedantic: true }); @@ -56,3 +51,2 @@ runSpecs('Original', './original', false, { gfm: false, pedantic: true });
runSpecs('New', './new'); runSpecs('New', './new');
runSpecs('ReDOS', './redos'); runSpecs('ReDOS', './redos');
-runSpecs('Security', './security', false, { silent: true }); // silent - do not show deprecation warning -runSpecs('Security', './security', false, { silent: true }); // silent - do not show deprecation warning
diff --git a/test/unit/Lexer-spec.js b/test/unit/Lexer-spec.js diff --git a/test/unit/Lexer-spec.js b/test/unit/Lexer-spec.js
--- a/test/unit/Lexer-spec.js --- a/test/unit/Lexer-spec.js
+++ b/test/unit/Lexer-spec.js +++ b/test/unit/Lexer-spec.js
@@ -589,5 +589,5 @@ paragraph @@ -635,5 +635,5 @@ paragraph
}); });
- it('sanitize', () => { - it('sanitize', () => {
+ /*it('sanitize', () => { + /*it('sanitize', () => {
expectTokens({ expectTokens({
md: '<div>html</div>', md: '<div>html</div>',
@@ -607,5 +607,5 @@ paragraph @@ -653,5 +653,5 @@ paragraph
] ]
}); });
- }); - });
+ });*/ + });*/
}); });
@@ -652,5 +652,5 @@ paragraph @@ -698,5 +698,5 @@ paragraph
}); });
- it('html sanitize', () => { - it('html sanitize', () => {
+ /*it('html sanitize', () => { + /*it('html sanitize', () => {
expectInlineTokens({ expectInlineTokens({
md: '<div>html</div>', md: '<div>html</div>',
@@ -660,5 +660,5 @@ paragraph @@ -706,5 +706,5 @@ paragraph
] ]
}); });
- }); - });
+ });*/ + });*/
it('link', () => { it('link', () => {
@@ -971,5 +971,5 @@ paragraph @@ -1017,5 +1017,5 @@ paragraph
}); });
- it('autolink mangle email', () => { - it('autolink mangle email', () => {
+ /*it('autolink mangle email', () => { + /*it('autolink mangle email', () => {
expectInlineTokens({ expectInlineTokens({
md: '<test@example.com>', md: '<test@example.com>',
@@ -991,5 +991,5 @@ paragraph @@ -1037,5 +1037,5 @@ paragraph
] ]
}); });
- }); - });
+ });*/ + });*/
it('url', () => { it('url', () => {
@@ -1028,5 +1028,5 @@ paragraph @@ -1074,5 +1074,5 @@ paragraph
}); });
- it('url mangle email', () => { - it('url mangle email', () => {
+ /*it('url mangle email', () => { + /*it('url mangle email', () => {
expectInlineTokens({ expectInlineTokens({
md: 'test@example.com', md: 'test@example.com',
@@ -1048,5 +1048,5 @@ paragraph @@ -1094,5 +1094,5 @@ paragraph
] ]
}); });
- }); - });
+ });*/ + });*/
}); });
@@ -1064,5 +1064,5 @@ paragraph @@ -1110,5 +1110,5 @@ paragraph
}); });
- describe('smartypants', () => { - describe('smartypants', () => {
+ /*describe('smartypants', () => { + /*describe('smartypants', () => {
it('single quotes', () => { it('single quotes', () => {
expectInlineTokens({ expectInlineTokens({
@@ -1134,5 +1134,5 @@ paragraph @@ -1180,5 +1180,5 @@ paragraph
}); });
}); });
- }); - });

View File

@@ -86,8 +86,6 @@ function have() {
python -c "import $1; $1; $1.__version__" python -c "import $1; $1; $1.__version__"
} }
mv copyparty/web/deps/marked.full.js.gz srv/ || true
. buildenv/bin/activate . buildenv/bin/activate
have setuptools have setuptools
have wheel have wheel

View File

@@ -16,12 +16,11 @@ help() { exec cat <<'EOF'
# #
# `no-sh` makes just the python sfx, skips the sh/unix sfx # `no-sh` makes just the python sfx, skips the sh/unix sfx
# #
# `no-ogv` saves ~192k by removing the opus/vorbis audio codecs # `no-cm` saves ~82k by removing easymde/codemirror
# (only affects apple devices; everything else has native support)
#
# `no-cm` saves ~92k by removing easymde/codemirror
# (the fancy markdown editor) # (the fancy markdown editor)
# #
# `no-hl` saves ~41k by removing syntax hilighting in the text viewer
#
# `no-fnt` saves ~9k by removing the source-code-pro font # `no-fnt` saves ~9k by removing the source-code-pro font
# (browsers will try to use 'Consolas' instead) # (browsers will try to use 'Consolas' instead)
# #
@@ -73,8 +72,8 @@ while [ ! -z "$1" ]; do
clean) clean=1 ; ;; clean) clean=1 ; ;;
re) repack=1 ; ;; re) repack=1 ; ;;
gz) use_gz=1 ; ;; gz) use_gz=1 ; ;;
no-ogv) no_ogv=1 ; ;;
no-fnt) no_fnt=1 ; ;; no-fnt) no_fnt=1 ; ;;
no-hl) no_hl=1 ; ;;
no-dd) no_dd=1 ; ;; no-dd) no_dd=1 ; ;;
no-cm) no_cm=1 ; ;; no-cm) no_cm=1 ; ;;
no-sh) do_sh= ; ;; no-sh) do_sh= ; ;;
@@ -215,9 +214,6 @@ cat have | while IFS= read -r x; do
done done
rm have rm have
[ $no_ogv ] &&
rm -rf copyparty/web/deps/{dynamicaudio,ogv}*
[ $no_cm ] && { [ $no_cm ] && {
rm -rf copyparty/web/mde.* copyparty/web/deps/easymde* rm -rf copyparty/web/mde.* copyparty/web/deps/easymde*
echo h > copyparty/web/mde.html echo h > copyparty/web/mde.html
@@ -226,6 +222,9 @@ rm have
tmv "$f" tmv "$f"
} }
[ $no_hl ] &&
rm -rf copyparty/web/deps/prism*
[ $no_fnt ] && { [ $no_fnt ] && {
rm -f copyparty/web/deps/scp.woff2 rm -f copyparty/web/deps/scp.woff2
f=copyparty/web/ui.css f=copyparty/web/ui.css

View File

@@ -35,8 +35,6 @@ ver="$1"
exit 1 exit 1
} }
mv copyparty/web/deps/marked.full.js.gz srv/ || true
mkdir -p dist mkdir -p dist
zip_path="$(pwd)/dist/copyparty-$ver.zip" zip_path="$(pwd)/dist/copyparty-$ver.zip"
tgz_path="$(pwd)/dist/copyparty-$ver.tar.gz" tgz_path="$(pwd)/dist/copyparty-$ver.tar.gz"

View File

@@ -7,8 +7,9 @@ v=$1
printf '%s\n' "$v" | grep -qE '^[0-9\.]+$' || exit 1 printf '%s\n' "$v" | grep -qE '^[0-9\.]+$' || exit 1
grep -E "(${v//./, })" ../copyparty/__version__.py || exit 1 grep -E "(${v//./, })" ../copyparty/__version__.py || exit 1
git push all
git tag v$v git tag v$v
git push origin --tags git push all --tags
rm -rf ../dist rm -rf ../dist

View File

@@ -49,14 +49,9 @@ copyparty/web/deps/easymde.js,
copyparty/web/deps/marked.js, copyparty/web/deps/marked.js,
copyparty/web/deps/mini-fa.css, copyparty/web/deps/mini-fa.css,
copyparty/web/deps/mini-fa.woff, copyparty/web/deps/mini-fa.woff,
copyparty/web/deps/ogv-decoder-audio-opus-wasm.js, copyparty/web/deps/prism.js,
copyparty/web/deps/ogv-decoder-audio-opus-wasm.wasm, copyparty/web/deps/prism.css,
copyparty/web/deps/ogv-decoder-audio-vorbis-wasm.js, copyparty/web/deps/prismd.css,
copyparty/web/deps/ogv-decoder-audio-vorbis-wasm.wasm,
copyparty/web/deps/ogv-demuxer-ogg-wasm.js,
copyparty/web/deps/ogv-demuxer-ogg-wasm.wasm,
copyparty/web/deps/ogv-worker-audio.js,
copyparty/web/deps/ogv.js,
copyparty/web/deps/scp.woff2, copyparty/web/deps/scp.woff2,
copyparty/web/deps/sha512.ac.js, copyparty/web/deps/sha512.ac.js,
copyparty/web/deps/sha512.hw.js, copyparty/web/deps/sha512.hw.js,

View File

@@ -1,10 +1,10 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
# coding: latin-1 # coding: latin-1
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
import re, os, sys, time, shutil, signal, threading, tarfile, hashlib, platform, tempfile, traceback import re, os, sys, time, shutil, signal, threading, tarfile, hashlib, platform, tempfile, traceback
import subprocess as sp import subprocess as sp
""" """
to edit this file, use HxD or "vim -b" to edit this file, use HxD or "vim -b"
(there is compressed stuff at the end) (there is compressed stuff at the end)
@@ -20,6 +20,7 @@ the archive data is attached after the b"\n# eof\n" archive marker,
b"\n# " decodes to b"" b"\n# " decodes to b""
""" """
# set by make-sfx.sh # set by make-sfx.sh
VER = None VER = None
SIZE = None SIZE = None

View File

@@ -29,6 +29,9 @@ class Cfg(Namespace):
v=v or [], v=v or [],
c=c, c=c,
rproxy=0, rproxy=0,
rsp_slp=0,
s_wr_slp=0,
s_wr_sz=512 * 1024,
ed=False, ed=False,
nw=False, nw=False,
unpost=600, unpost=600,
@@ -47,12 +50,14 @@ class Cfg(Namespace):
mtp=[], mtp=[],
mte="a", mte="a",
mth="", mth="",
textfiles="",
doctitle="",
hist=None, hist=None,
no_idx=None, no_idx=None,
no_hash=None, no_hash=None,
js_browser=None, js_browser=None,
css_browser=None, css_browser=None,
**{k: False for k in "e2d e2ds e2dsa e2t e2ts e2tsr".split()} **{k: False for k in "e2d e2ds e2dsa e2t e2ts e2tsr no_acode".split()}
) )

View File

@@ -17,21 +17,24 @@ from copyparty import util
class Cfg(Namespace): class Cfg(Namespace):
def __init__(self, a=None, v=None, c=None): def __init__(self, a=None, v=None, c=None):
ex = {k: False for k in "nw e2d e2ds e2dsa e2t e2ts e2tsr".split()} ex = "nw e2d e2ds e2dsa e2t e2ts e2tsr no_logues no_readme no_acode"
ex = {k: False for k in ex.split()}
ex2 = { ex2 = {
"mtp": [], "mtp": [],
"mte": "a", "mte": "a",
"mth": "", "mth": "",
"doctitle": "",
"hist": None, "hist": None,
"no_idx": None, "no_idx": None,
"no_hash": None, "no_hash": None,
"js_browser": None, "js_browser": None,
"css_browser": None, "css_browser": None,
"no_voldump": True, "no_voldump": True,
"no_logues": False,
"no_readme": False,
"re_maxage": 0, "re_maxage": 0,
"rproxy": 0, "rproxy": 0,
"rsp_slp": 0,
"s_wr_slp": 0,
"s_wr_sz": 512 * 1024,
} }
ex.update(ex2) ex.update(ex2)
super(Cfg, self).__init__(a=a or [], v=v or [], c=c, **ex) super(Cfg, self).__init__(a=a or [], v=v or [], c=c, **ex)

View File

@@ -113,6 +113,7 @@ class VSock(object):
class VHttpSrv(object): class VHttpSrv(object):
def __init__(self): def __init__(self):
self.broker = NullBroker() self.broker = NullBroker()
self.prism = None
aliases = ["splash", "browser", "browser2", "msg", "md", "mde"] aliases = ["splash", "browser", "browser2", "msg", "md", "mde"]
self.j2 = {x: J2_FILES for x in aliases} self.j2 = {x: J2_FILES for x in aliases}