Compare commits

...

298 Commits

Author SHA1 Message Date
ed
cea5aecbf2 v1.3.2 2022-06-20 01:31:29 +02:00
ed
0e61e70670 audioplayer continues to next folder by default 2022-06-20 00:20:13 +02:00
ed
1e333c0939 fix doc traversal 2022-06-19 23:32:36 +02:00
ed
917b6ec03c naming 2022-06-19 22:58:20 +02:00
ed
fe67c52ead configurable list of sparse-supporting filesystems +
close nonsparse files after each write to force flush
2022-06-19 22:38:52 +02:00
ed
909c7bee3e ignore md plugin errors 2022-06-19 20:28:45 +02:00
ed
27ca54d138 md: ol appeared as ul 2022-06-19 19:05:41 +02:00
ed
2147c3a646 run markdown plugins in directory listings 2022-06-19 18:17:22 +02:00
ed
a99120116f ux: breadcrumb ctrl-click 2022-06-19 17:51:03 +02:00
ed
802efeaff2 dont let tags imply subdirectories when renaming 2022-06-19 16:06:39 +02:00
ed
9ad3af1ef6 misc tweaks 2022-06-19 16:05:48 +02:00
ed
715727b811 add changelog 2022-06-17 15:33:57 +02:00
ed
c6eaa7b836 aight good to know 2022-06-17 00:37:56 +02:00
ed
c2fceea2a5 v1.3.1 2022-06-16 21:56:12 +02:00
ed
190e11f7ea update deps + misc 2022-06-16 21:43:40 +02:00
ed
ad7413a5ff add .PARTIAL suffix to bup uploads too +
aggressive limits checking
2022-06-16 21:00:41 +02:00
ed
903b9e627a ux snappiness + keepalive on http-1.0 2022-06-16 20:33:09 +02:00
ed
c5c1e96cf8 ux: button to reset hidden columns 2022-06-16 19:06:28 +02:00
ed
62fbb04c9d allow moving files between filesystems 2022-06-16 18:46:50 +02:00
ed
728dc62d0b optimize nonsparse uploads (fat32, exfat, hpfs) 2022-06-16 17:51:42 +02:00
ed
2dfe1b1c6b add themes: hacker, hi-con 2022-06-16 12:21:21 +02:00
ed
35d4a1a6af ux: delay loading animation + focus outlines + explain ng 2022-06-16 11:02:05 +02:00
ed
eb3fa5aa6b add safety profiles + improve helptext + speed 2022-06-16 10:21:44 +02:00
ed
438384425a add types, isort, errorhandling 2022-06-16 01:07:15 +02:00
ed
0b6f102436 fix multiprocessing ftpd 2022-06-12 16:37:56 +02:00
ed
c9b7ec72d8 add hotkey Y to download current song / vid / pic 2022-06-09 17:23:11 +02:00
ed
256c7f1789 add option to see errors from mtp parsers 2022-06-09 14:46:35 +02:00
ed
4e5a323c62 more cleanup 2022-06-08 01:05:35 +02:00
ed
f4a3bbd237 fix ansify prepending bracket to all logfiles 2022-06-07 23:45:54 +02:00
ed
fe73f2d579 cleanup 2022-06-07 23:08:43 +02:00
ed
f79fcc7073 discover local ip under termux 2022-06-07 23:03:16 +02:00
ed
4c4b3790c7 fix read-spin on d/c during json post + errorhandling 2022-06-07 19:02:52 +02:00
ed
bd60b464bb fix misleading log-msg 2022-06-07 14:12:55 +02:00
ed
6bce852765 ux: treepar positioning 2022-06-06 22:05:13 +02:00
ed
3b19a5a59d improve a11y jumpers 2022-05-25 20:31:12 +02:00
ed
f024583011 add a11y jumpers 2022-05-24 09:09:54 +02:00
ed
1111baacb2 v1.3.0 2022-05-22 17:02:38 +02:00
ed
1b9c913efb update deps (marked, codemirror, prism) 2022-05-22 16:49:18 +02:00
ed
3524c36e1b tl 2022-05-22 16:04:10 +02:00
ed
cf87cea9f8 ux, tl 2022-05-21 11:32:25 +02:00
ed
bfa34404b8 ux tweaks 2022-05-19 18:00:33 +02:00
ed
0aba5f35bf add confirms on colhide, bigtxt 2022-05-19 17:59:33 +02:00
ed
663bc0842a ux 2022-05-18 19:51:25 +02:00
ed
7d10c96e73 grammar 2022-05-18 19:33:20 +02:00
ed
6b2720fab0 dont switch to treeview on play into next folder 2022-05-18 19:24:47 +02:00
ed
e74ad5132a persist videoplayer prefs 2022-05-18 19:17:21 +02:00
ed
1f6f89c1fd apply default-language to splashpage 2022-05-18 19:02:36 +02:00
ed
4d55e60980 update flat-light ss 2022-05-16 19:01:32 +02:00
ed
ddaaccd5af ux tweaks 2022-05-16 18:56:53 +02:00
ed
c20b7dac3d ah whatever, still 16 years left 2022-05-15 17:23:52 +02:00
ed
1f779d5094 zip: add ntfs and unix extensions for utc time 2022-05-15 16:13:49 +02:00
ed
715401ca8e fix timezone in search, zipfiles, fuse 2022-05-15 13:51:44 +02:00
ed
e7cd922d8b translate splashpage and search too 2022-05-15 13:20:52 +02:00
ed
187feee0c1 add norwegian translation 2022-05-14 23:25:40 +02:00
ed
49e962a7dc dbtool: faster, add examples,
match on hashes rather than paths by default,
add no-clobber option to keep existing tags
2022-05-14 12:44:05 +02:00
ed
633ff601e5 perf + ux 2022-05-14 00:13:06 +02:00
ed
331cf37054 show loading progress for huge documents 2022-05-13 23:02:20 +02:00
ed
23e4b9002f support ?doc=mojibake 2022-05-13 18:10:55 +02:00
ed
c0de3c8053 v1.2.11 2022-05-13 17:24:50 +02:00
ed
a82a3b084a make search results unselectable 2022-05-13 17:18:19 +02:00
ed
67c298e66b don't embed huge docs (defer to ajax), closes #9 2022-05-13 17:08:17 +02:00
ed
c110ccb9ae v1.2.10 2022-05-13 01:44:00 +02:00
ed
0143380306 help the query planner 2022-05-13 01:41:39 +02:00
ed
af9000d3c8 v1.2.9 2022-05-12 23:10:54 +02:00
ed
097d798e5e steal colors from monokai 2022-05-12 23:06:37 +02:00
ed
1d9f9f221a louder 2022-05-12 20:55:37 +02:00
ed
214a367f48 be loud about segfaults and such 2022-05-12 20:26:48 +02:00
ed
2fb46551a2 avoid pointless recursion + show scan summary 2022-05-09 23:43:59 +02:00
ed
6bcf330ae0 symlink-checker: print base vpath in nonverbose mode 2022-05-09 20:17:03 +00:00
ed
2075a8b18c skip nonregular files when indexing filesystem 2022-05-09 19:56:17 +00:00
ed
1275ac6c42 start up2k indexing even if no interfaces could bind 2022-05-09 20:38:06 +02:00
ed
708f20b7af remove option to disable spa 2022-05-08 14:29:05 +02:00
ed
a2c0c708e8 focus password field if not logged in 2022-05-07 22:16:12 +02:00
ed
2f2c65d91e improve up2k error messages 2022-05-07 22:15:09 +02:00
ed
cd5fcc7ca7 fix file sel/play background on focus 2022-05-06 21:15:18 +02:00
ed
aa29e7be48 minimal support for browsers without css-variables 2022-05-03 00:52:26 +02:00
ed
93febe34b0 truncate huge ffmpeg errors 2022-05-03 00:32:00 +02:00
ed
f086e6d3c1 best-effort recovery when chrome desyncs the mediaSession 2022-05-02 19:08:37 +02:00
ed
22e51e1c96 compensate for play/pause fades by rewinding a bit 2022-05-02 19:07:16 +02:00
ed
63a5336f31 change modal ok/cancel focus with left/right keys 2022-05-02 19:06:51 +02:00
ed
bfc6c53cc5 ux 2022-05-02 19:06:08 +02:00
ed
236017f310 better dropzones on small screens 2022-05-02 01:08:31 +02:00
ed
0a1d9b4dfd nevermind, not reliable when rproxied 2022-05-01 22:35:34 +02:00
ed
b50d090946 add logout on inactivity + related errorhandling 2022-05-01 22:12:25 +02:00
ed
00b5db52cf notes 2022-05-01 12:02:27 +02:00
ed
24cb30e2c5 support login from ie4 / win3.11 2022-05-01 11:42:19 +02:00
ed
4549145ab5 fix filekeys in basic-html browser 2022-05-01 11:29:51 +02:00
ed
67b0217754 cleanup + readme 2022-04-30 23:37:27 +02:00
ed
ccae9efdf0 safer systemd example (unprivileged user + NAT for port 80 / 443) 2022-04-30 23:28:51 +02:00
ed
59d596b222 add service to autogenerate TLS certificates 2022-04-30 22:54:35 +02:00
ed
4878eb2c45 support symlinks as volume root 2022-04-30 20:26:26 +02:00
ed
7755392f57 redirect to webroot after login 2022-04-30 18:15:09 +02:00
ed
dc2ea20959 v1.2.8 2022-04-30 02:16:34 +02:00
ed
8eaea2bd17 ux 2022-04-30 00:37:31 +02:00
ed
58e559918f fix dynamic tree sizing 2022-04-30 00:04:06 +02:00
ed
f38a3fca5b case-insensitive cover check 2022-04-29 23:39:16 +02:00
ed
1ea145b384 wow when did that break 2022-04-29 23:37:38 +02:00
ed
0d9567575a avoid hashing busy uploads during rescan 2022-04-29 23:16:23 +02:00
ed
e82f176289 fix deadlock on rescan during upload 2022-04-29 23:14:51 +02:00
ed
d4b51c040e doc + ux 2022-04-29 23:13:37 +02:00
ed
125d0efbd8 good stuff 2022-04-29 02:06:56 +02:00
ed
3215afc504 immediately search on enter key 2022-04-28 22:53:37 +02:00
ed
c73ff3ce1b avoid sqlite deadlock on windows 2022-04-28 22:46:53 +02:00
ed
f9c159a051 add option to force up2k turbo + hide warning 2022-04-28 21:57:37 +02:00
ed
2ab1325c90 add option to load more search results 2022-04-28 21:55:01 +02:00
ed
5b0f7ff506 perfect 2022-04-28 10:36:56 +02:00
ed
9269bc84f2 skip more stuff windows doesn't like 2022-04-28 10:31:10 +02:00
ed
4e8b651e18 too much effort into this joke 2022-04-28 10:29:54 +02:00
ed
65b4f79534 add themes "vice" and "hot dog stand" 2022-04-27 22:33:01 +02:00
ed
5dd43dbc45 ignore bugs in chrome v102 2022-04-27 22:32:11 +02:00
ed
5f73074c7e fix audio playback on first visit 2022-04-27 22:31:33 +02:00
ed
f5d6ba27b2 handle invalid headers better 2022-04-27 22:30:19 +02:00
ed
73fa70b41f fix mostly-harmless xss 2022-04-27 22:29:16 +02:00
ed
2a1cda42e7 avoid deadlocks on windows 2022-04-27 22:27:49 +02:00
ed
1bd7e31466 more theme porting 2022-04-26 00:42:00 +02:00
ed
eb49e1fb4a conditional up2k column sizes depending on card 2022-04-24 23:48:23 +02:00
ed
9838c2f0ce golf 2022-04-24 23:47:15 +02:00
ed
6041df8370 start replacing class-scopes with css variables 2022-04-24 23:46:38 +02:00
ed
2933dce3ef mtime blank uploads + helptext 2022-04-24 22:58:11 +02:00
ed
dab377d37b v1.2.7 2022-04-16 23:44:28 +02:00
ed
f35e41baf1 allow unposting with write-only access 2022-04-16 23:35:04 +02:00
ed
c4083a2942 v1.2.6 2022-04-15 20:09:50 +02:00
ed
36c20bbe53 fix setting mtime on windows 2022-04-15 20:08:55 +02:00
ed
e34634f5af v1.2.5 2022-04-15 19:42:40 +02:00
ed
cba9e5b669 add hardlinks (symlink alternative) for up2k dedup 2022-04-15 19:13:53 +02:00
ed
1f3c46a6b0 forgot some css files 2022-04-15 17:11:46 +02:00
ed
799a5ffa47 v1.2.4 2022-04-14 21:45:22 +02:00
ed
b000707c10 detect poor ffmpeg builds 2022-04-14 18:20:48 +02:00
ed
feba4de1d6 make gallery linkable 2022-04-14 17:12:56 +02:00
ed
951fdb27ca dont scan orphaned volumes 2022-04-14 17:11:51 +02:00
ed
9697fb3d84 option to disable thumbnails per volume 2022-04-14 17:11:26 +02:00
ed
2dbed4500a add flat theme 2022-04-14 16:57:51 +02:00
ed
fd9d0e433d thumbnails: try FFmpeg for images too 2022-04-11 10:38:57 +02:00
ed
f096f3ef81 thumbnails: disable pdf because too scary 2022-04-10 23:02:09 +02:00
ed
cc4a063695 thumbnails: per-decoder filetype config 2022-04-10 22:59:45 +02:00
ed
b64cabc3c9 thumbnails: add pyvips as alt/supp. to pillow 2022-04-10 14:16:09 +02:00
ed
3dd460717c add flat theme 2022-04-09 23:05:54 +02:00
ed
bf658a522b naming 2022-04-09 20:41:08 +02:00
ed
e9be7e712d futureproof clipboard function 2022-04-09 19:38:05 +02:00
ed
e40cd2a809 optimize window resizing 2022-04-09 19:20:09 +02:00
ed
dbabeb9692 gallery: add animation preferences 2022-04-09 17:23:54 +02:00
ed
8dd37d76b0 fix drifting resize 2022-04-09 14:37:25 +02:00
ed
fd475aa358 textviewer: translate basic ansi/sgr colors 2022-04-09 00:50:54 +02:00
ed
f0988c0e32 filter some volflags from up2k dump 2022-04-08 21:56:24 +02:00
ed
0632f09bff rhel8 ignores flock and kills us anyways 2022-04-08 21:29:31 +02:00
ed
ba599aaca0 explain systemd jank 2022-04-08 20:39:22 +02:00
ed
ff05919e89 support mpc/musepack audio (streaming + thumbnailing) 2022-04-02 22:17:16 +02:00
ed
52e63fa101 dont crash when mediaplayer config is changed while music isnt playing 2022-03-28 23:17:02 +02:00
ed
96ceccd12a v1.2.3 2022-03-24 02:35:53 +01:00
ed
87994fe006 retry failed uploads with backoff 2022-03-24 02:29:59 +01:00
ed
fa12c81a03 zip-download files older than 1980-01-01 2022-03-24 01:31:50 +01:00
ed
344ce63455 basic-browser is implicitly not js 2022-03-21 01:20:47 +01:00
ed
ec4daacf9e v1.2.2 2022-03-20 06:15:57 +01:00
ed
f3e8308718 eh, better as volflags 2022-03-20 05:45:07 +01:00
ed
515ac5d941 show textfile name in document title 2022-03-20 03:40:21 +01:00
ed
954c7e7e50 add option to request noindex from crawlers 2022-03-20 03:23:42 +01:00
ed
67ff57f3a3 add option to disable html folder listings 2022-03-20 02:45:53 +01:00
ed
c10c70c1e5 misc 2022-03-04 21:30:31 +01:00
ed
04592a98d2 include all IPs + link status in server url listing 2022-03-04 21:29:28 +01:00
ed
c9c4aac6cf v1.2.1 2022-03-03 01:26:29 +01:00
ed
8b2c7586ce minimal py2 support for ftpd 2022-03-03 01:18:01 +01:00
ed
32e22dfe84 vendor asynchat for pyftpdlib 2022-03-03 01:16:52 +01:00
ed
d70b885722 failed attempt at upgrading scp 2022-03-03 00:17:03 +01:00
ed
ac6c4b13f5 add plaintext volume listing 2022-03-02 21:20:19 +01:00
ed
ececdad22d and increase debounce a bit 2022-03-02 01:56:05 +01:00
ed
bf659781b0 try some more spacing 2022-03-02 01:49:15 +01:00
ed
2c6bb195a4 search: get rid of inner-joins to fix -tags 2022-03-02 00:35:04 +01:00
ed
c032cd08b3 prisonparty: clean exit on sigterm/int 2022-02-27 20:07:28 +01:00
ed
39e7a7a231 sfx: prefer system pyftpdlib if available 2022-02-13 21:00:13 +01:00
ed
6e14cd2c39 graduate copyparty-sfx.sh 2022-02-13 20:44:03 +01:00
ed
aab3baaea7 v1.2.0 2022-02-13 16:58:54 +01:00
ed
b8453c3b4f ftpd: support rootless filesystems 2022-02-13 16:38:24 +01:00
ed
6ce0e2cd5b ftpd: add ftps 2022-02-13 15:46:33 +01:00
ed
76beaae7f2 ftpd: add move/rename 2022-02-13 14:26:16 +01:00
ed
c1a7f9edbe ftpd: add indexing, delete, windows support 2022-02-13 13:58:16 +01:00
ed
b5f2fe2f0a add ftpd 2022-02-13 03:10:53 +01:00
ed
98a90d49cb ctrl-click document links to open in new tab 2022-02-12 20:26:44 +01:00
ed
f55e982cb5 configurable max-hits 2022-02-12 16:22:35 +01:00
ed
686c7defeb fix path-search in nontop volumes 2022-02-12 16:00:14 +01:00
ed
0b1e483c53 bump webdeps 2022-02-09 23:45:09 +01:00
ed
457d7df129 fix ie11 hotkey crash 2022-02-06 02:08:18 +01:00
ed
ce776a547c add rate throttling to uploads too 2022-02-06 02:06:59 +01:00
ed
ded0567cbf v1.1.12 2022-01-18 22:28:33 +01:00
ed
c9cac83d09 fix PUT response in write-only folders 2022-01-18 21:37:11 +01:00
ed
4fbe6b01a8 clarify what the app does 2022-01-17 00:31:23 +00:00
ed
ee9585264e deal with github api change + build vamp if necessary 2022-01-17 00:27:23 +00:00
ed
c9ffead7bf prisonparty: support running from src 2022-01-17 00:24:40 +00:00
ed
ed69d42005 v1.1.11 2022-01-14 22:25:06 +01:00
ed
0b47ee306b bump marked.js to 4.0.10 2022-01-14 20:42:23 +01:00
ed
e4e63619d4 linkable maintabs 2022-01-14 19:26:07 +01:00
ed
f32cca292a propagate sort-order to thegrid 2022-01-14 18:28:49 +01:00
ed
e87ea19ff1 return file URL in PUT response 2022-01-11 22:59:19 +01:00
ed
0214793740 fix garbage in markdown output 2022-01-05 18:57:05 +01:00
ed
fc9dd5d743 meadup changes 2022-01-03 01:16:27 +01:00
ed
9e6d5dd2b9 vbi: add onscreen qrcode 2021-12-28 20:57:11 +01:00
ed
bdad197e2c make it even worse 2021-12-27 00:04:38 +01:00
ed
7e139288a6 add very bad idea 2021-12-26 23:32:46 +01:00
ed
6e7935abaf repaint cut/paste buttons when permissions change 2021-12-24 00:50:52 +01:00
ed
3ba0cc20f1 v1.1.10 2021-12-17 00:05:17 +01:00
ed
dd28de1796 sendfile: handle eagain 2021-12-17 00:04:19 +01:00
ed
9eecc9e19a v1.1.9 2021-12-16 22:54:44 +01:00
ed
6530cb6b05 shut socket on tx error 2021-12-16 22:51:24 +01:00
ed
41ce613379 add multisearch 2021-12-12 20:11:07 +01:00
ed
5e2785caba more aggressively try ffmpeg when mutagen fails 2021-12-11 20:31:04 +01:00
ed
d7cc000976 v1.1.8 2021-12-10 02:44:48 +01:00
ed
50d8ff95ae good stuff 2021-12-10 02:21:56 +01:00
ed
b2de1459b6 quick backports to the alternative fuse client 2021-12-10 01:59:45 +01:00
ed
f0ffbea0b2 add breadcrumbs to the textfile tree 2021-12-10 00:44:47 +01:00
ed
199ccca0fe v1.1.7 2021-12-07 19:19:35 +01:00
ed
1d9b355743 fix search ui after b265e59 broke it 2021-12-07 19:12:36 +01:00
ed
f0437fbb07 cleanup the windowtitle a bit 2021-12-07 19:09:24 +01:00
ed
abc404a5b7 v1.1.6 2021-12-07 01:17:56 +01:00
ed
04b9e21330 update web-deps 2021-12-07 01:12:32 +01:00
ed
1044aa071b deal with consecutive dupes even without sqlite 2021-12-06 23:51:44 +01:00
ed
4c3192c8cc set window-title to listening ip 2021-12-06 23:08:04 +01:00
ed
689e77a025 option to set a custom servicename 2021-12-06 22:24:25 +01:00
ed
3bd89403d2 apply per-volume index config to ui 2021-12-06 22:04:24 +01:00
ed
b4800d9bcb option to disable onboot-scans per-volume 2021-12-06 20:54:13 +01:00
ed
05485e8539 md: smaller indent on outermost list 2021-12-06 20:17:12 +01:00
ed
0e03dc0868 and fix the markdown breadcrumbs too 2021-12-06 19:51:47 +01:00
ed
352b1ed10a generate correct links when trailing slash missing 2021-12-06 19:49:14 +01:00
ed
0db1244d04 also consider TMPDIR and friends 2021-12-06 09:47:39 +01:00
ed
ece08b8179 create ~/.config if /tmp is readonly 2021-12-06 02:02:44 +01:00
ed
b8945ae233 fix tests and readme 2021-12-04 18:52:14 +01:00
ed
dcaf7b0a20 v1.1.5 2021-12-04 03:33:57 +01:00
ed
f982cdc178 spa gridview 2021-12-04 03:31:12 +01:00
ed
b265e59834 spa filetab 2021-12-04 03:25:28 +01:00
ed
4a843a6624 unflicker navpane + add client state escape hatch 2021-12-04 02:46:00 +01:00
ed
241ef5b99d preserve mtimes when juggling symlinks 2021-12-04 01:58:04 +01:00
ed
f39f575a9c sort-order indicators 2021-12-03 23:53:41 +01:00
ed
1521307f1e use preferred sort on initial render, fixes #8 2021-12-03 02:07:08 +01:00
ed
dd122111e6 v1.1.4 2021-11-28 04:22:05 +01:00
ed
00c177fa74 show upload eta in window title 2021-11-28 04:05:16 +01:00
ed
f6c7e49eb8 u2cli: better error messages 2021-11-28 03:38:57 +01:00
ed
1a8dc3d18a add workaround for #7 after all since it was trivial 2021-11-28 00:12:19 +01:00
ed
38a163a09a better dropzone for extremely slow browsers 2021-11-28 00:11:21 +01:00
ed
8f031246d2 disable windows quickedit to avoid accidental lockups 2021-11-27 21:43:19 +01:00
ed
8f3d97dde7 indicate onclick action for audio files in grid view 2021-11-24 22:10:59 +01:00
ed
4acaf24d65 remember if media controls were open or not 2021-11-24 21:49:41 +01:00
ed
9a8dbbbcf8 another accesskey fix 2021-11-22 21:57:29 +01:00
ed
a3efc4c726 encode quoted queries into raw 2021-11-22 21:53:23 +01:00
ed
0278bf328f support raw-queries with quotes 2021-11-22 20:59:07 +01:00
ed
17ddd96cc6 up2k list wasnt centered anymore 2021-11-21 22:44:11 +01:00
ed
0e82e79aea mention the eq fixing gapless albums 2021-11-20 19:33:56 +01:00
ed
30f124c061 fix forcing compression levels 2021-11-20 18:51:15 +01:00
ed
e19d90fcfc add missing examples 2021-11-20 18:50:55 +01:00
ed
184bbdd23d legalese rephrasing 2021-11-20 17:58:37 +01:00
ed
30b50aec95 mention mtp readme 2021-11-20 17:51:49 +01:00
ed
c3c3d81db1 add mtp plugin for exif stripping 2021-11-20 17:45:56 +01:00
ed
49b7231283 fix mojibake support in misc mtp plugins 2021-11-20 17:33:24 +01:00
ed
edbedcdad3 v1.1.3 2021-11-20 02:27:09 +01:00
ed
e4ae5f74e6 add tooltip indicator 2021-11-20 01:47:16 +01:00
ed
2c7ffe08d7 include sha512 as both hex and b64 in responses 2021-11-20 01:03:32 +01:00
ed
3ca46bae46 good oneliner 2021-11-20 00:20:34 +01:00
ed
7e82aaf843 simplify/improve up2k ui debounce 2021-11-20 00:03:15 +01:00
ed
315bd71adf limit turbo runahead 2021-11-20 00:01:14 +01:00
ed
2c612c9aeb ux 2021-11-19 21:31:05 +01:00
ed
36aee085f7 add timeouts to FFmpeg things 2021-11-16 22:22:09 +01:00
ed
d01bb69a9c u2cli: option to ignore inaccessible files 2021-11-16 21:53:00 +01:00
ed
c9b1c48c72 sizelimit registry + persist without e2d 2021-11-16 21:31:24 +01:00
ed
aea3843cf2 this is just noise 2021-11-16 21:28:50 +01:00
ed
131b6f4b9a workaround chrome rendering bug 2021-11-16 21:28:36 +01:00
ed
6efb8b735a better handling of python builds without sqlite3 2021-11-16 01:13:04 +01:00
ed
223b7af2ce more iOS jank 2021-11-16 00:05:35 +01:00
ed
e72c2a6982 add fastpath for using the eq as a pure gain control 2021-11-15 23:19:43 +01:00
ed
dd9b93970e autoenable aac transcoding when codec missing 2021-11-15 23:18:52 +01:00
ed
e4c7cd81a9 update readme 2021-11-15 20:28:53 +01:00
ed
12b3a62586 fix dumb mistakes 2021-11-15 20:13:16 +01:00
ed
2da3bdcd47 delay tooltips, fix #6 2021-11-15 03:56:17 +01:00
ed
c1dccbe0ba trick iphones into preloading natively 2021-11-15 03:01:11 +01:00
ed
9629fcde68 optionally enable seeking through os controls 2021-11-15 02:47:42 +01:00
ed
cae436b566 add client-option to disconnect on HTTP 304 2021-11-15 02:45:18 +01:00
ed
01714700ae more gapless fixes 2021-11-14 20:25:28 +01:00
ed
51e6c4852b retire ogvjs 2021-11-14 19:28:44 +01:00
ed
b206c5d64e handle multiple simultaneous uploads of the same file 2021-11-14 15:03:11 +01:00
ed
62c3272351 add option to simulate latency 2021-11-14 15:01:20 +01:00
ed
c5d822c70a v1.1.2 2021-11-12 23:08:24 +01:00
ed
9c09b4061a prefer fpool on linux as well 2021-11-12 22:57:36 +01:00
ed
c26fb43ced more cleanup 2021-11-12 22:30:23 +01:00
ed
deb8f20db6 misc cleanup/unjank 2021-11-12 20:48:26 +01:00
ed
50e18ed8ff fix up2k layout in readonly folders 2021-11-12 19:18:52 +01:00
ed
31f3895f40 close misc views on escape 2021-11-12 19:18:29 +01:00
ed
615929268a cache monet 2021-11-12 02:00:44 +01:00
ed
b8b15814cf add traffic shaping, bump speeds on https/windows 2021-11-12 01:34:56 +01:00
ed
7766fffe83 mostly fix ogvjs preloading 2021-11-12 01:09:01 +01:00
ed
2a16c150d1 general preload improvements 2021-11-12 01:04:31 +01:00
ed
418c2166cc add cursed doubleclick-handler in gridsel mode 2021-11-11 01:03:14 +01:00
ed
a4dd44f648 textviewer initiable through hotkeys 2021-11-11 00:18:34 +01:00
ed
5352f7cda7 fix ctrl-a fencing in codeblocks 2021-11-11 00:11:29 +01:00
ed
5533b47099 handle crc collisions 2021-11-10 23:59:07 +01:00
ed
e9b14464ee terminate preloader if it can't finish in time 2021-11-10 22:53:02 +01:00
ed
4e986e5cd1 xhr preload is not gapless 2021-11-10 22:00:24 +01:00
ed
8a59b40c53 better clientside upload dedup 2021-11-10 20:57:45 +01:00
ed
391caca043 v1.1.1 2021-11-08 22:39:00 +01:00
ed
171ce348d6 improve swr 2021-11-08 22:25:35 +01:00
ed
c2cc729135 update sfx sizes 2021-11-08 21:11:10 +01:00
ed
e7e71b76f0 add alternative preloader for spotty connections 2021-11-08 20:46:40 +01:00
ed
a2af61cf6f fix clipboard sharing on recent firefox versions 2021-11-08 20:43:26 +01:00
107 changed files with 13672 additions and 4752 deletions

25
.vscode/settings.json vendored
View File

@@ -23,7 +23,6 @@
"terminal.ansiBrightWhite": "#ffffff", "terminal.ansiBrightWhite": "#ffffff",
}, },
"python.testing.pytestEnabled": false, "python.testing.pytestEnabled": false,
"python.testing.nosetestsEnabled": false,
"python.testing.unittestEnabled": true, "python.testing.unittestEnabled": true,
"python.testing.unittestArgs": [ "python.testing.unittestArgs": [
"-v", "-v",
@@ -35,18 +34,40 @@
"python.linting.pylintEnabled": true, "python.linting.pylintEnabled": true,
"python.linting.flake8Enabled": true, "python.linting.flake8Enabled": true,
"python.linting.banditEnabled": true, "python.linting.banditEnabled": true,
"python.linting.mypyEnabled": true,
"python.linting.mypyArgs": [
"--ignore-missing-imports",
"--follow-imports=silent",
"--show-column-numbers",
"--strict"
],
"python.linting.flake8Args": [ "python.linting.flake8Args": [
"--max-line-length=120", "--max-line-length=120",
"--ignore=E722,F405,E203,W503,W293,E402", "--ignore=E722,F405,E203,W503,W293,E402,E501,E128",
], ],
"python.linting.banditArgs": [ "python.linting.banditArgs": [
"--ignore=B104" "--ignore=B104"
], ],
"python.linting.pylintArgs": [
"--disable=missing-module-docstring",
"--disable=missing-class-docstring",
"--disable=missing-function-docstring",
"--disable=wrong-import-position",
"--disable=raise-missing-from",
"--disable=bare-except",
"--disable=invalid-name",
"--disable=line-too-long",
"--disable=consider-using-f-string"
],
// python3 -m isort --py=27 --profile=black copyparty/
"python.formatting.provider": "black", "python.formatting.provider": "black",
"editor.formatOnSave": true, "editor.formatOnSave": true,
"[html]": { "[html]": {
"editor.formatOnSave": false, "editor.formatOnSave": false,
}, },
"[css]": {
"editor.formatOnSave": false,
},
"files.associations": { "files.associations": {
"*.makefile": "makefile" "*.makefile": "makefile"
}, },

220
README.md
View File

@@ -9,11 +9,20 @@
turn your phone or raspi into a portable file server with resumable uploads/downloads using *any* web browser turn your phone or raspi into a portable file server with resumable uploads/downloads using *any* web browser
* server only needs `py2.7` or `py3.3+`, all dependencies optional * server only needs `py2.7` or `py3.3+`, all dependencies optional
* browse/upload with IE4 / netscape4.0 on win3.11 (heh) * browse/upload with [IE4](#browser-support) / netscape4.0 on win3.11 (heh)
* *resumable* uploads need `firefox 34+` / `chrome 41+` / `safari 7+` for full speed * *resumable* uploads need `firefox 34+` / `chrome 41+` / `safari 7+` for full speed
* code standard: `black` * code standard: `black`
📷 **screenshots:** [browser](#the-browser) // [upload](#uploading) // [unpost](#unpost) // [thumbnails](#thumbnails) // [search](#searching) // [fsearch](#file-search) // [zip-DL](#zip-downloads) // [md-viewer](#markdown-viewer) // [ie4](#browser-support) try the **[read-only demo server](https://a.ocv.me/pub/demo/)** 👀 running from a basement in finland
📷 **screenshots:** [browser](#the-browser) // [upload](#uploading) // [unpost](#unpost) // [thumbnails](#thumbnails) // [search](#searching) // [fsearch](#file-search) // [zip-DL](#zip-downloads) // [md-viewer](#markdown-viewer)
## get the app
<a href="https://f-droid.org/packages/me.ocv.partyup/"><img src="https://ocv.me/fdroid.png" alt="Get it on F-Droid" height="50" /> '' <img src="https://img.shields.io/f-droid/v/me.ocv.partyup.svg" alt="f-droid version info" /></a> '' <a href="https://github.com/9001/party-up"><img src="https://img.shields.io/github/release/9001/party-up.svg?logo=github" alt="github version info" /></a>
(the app is **NOT** the full copyparty server! just a basic upload client, nothing fancy yet)
## readme toc ## readme toc
@@ -47,13 +56,16 @@ turn your phone or raspi into a portable file server with resumable uploads/down
* [other tricks](#other-tricks) * [other tricks](#other-tricks)
* [searching](#searching) - search by size, date, path/name, mp3-tags, ... * [searching](#searching) - search by size, date, path/name, mp3-tags, ...
* [server config](#server-config) - using arguments or config files, or a mix of both * [server config](#server-config) - using arguments or config files, or a mix of both
* [ftp-server](#ftp-server) - an FTP server can be started using `--ftp 3921`
* [file indexing](#file-indexing) * [file indexing](#file-indexing)
* [upload rules](#upload-rules) - set upload rules using volume flags * [upload rules](#upload-rules) - set upload rules using volume flags
* [compress uploads](#compress-uploads) - files can be autocompressed on upload * [compress uploads](#compress-uploads) - files can be autocompressed on upload
* [database location](#database-location) - in-volume (`.hist/up2k.db`, default) or somewhere else * [database location](#database-location) - in-volume (`.hist/up2k.db`, default) or somewhere else
* [metadata from audio files](#metadata-from-audio-files) - set `-e2t` to index tags on upload * [metadata from audio files](#metadata-from-audio-files) - set `-e2t` to index tags on upload
* [file parser plugins](#file-parser-plugins) - provide custom parsers to index additional tags * [file parser plugins](#file-parser-plugins) - provide custom parsers to index additional tags, also see [./bin/mtag/README.md](./bin/mtag/README.md)
* [upload events](#upload-events) - trigger a script/program on each upload * [upload events](#upload-events) - trigger a script/program on each upload
* [hiding from google](#hiding-from-google) - tell search engines you dont wanna be indexed
* [themes](#themes)
* [complete examples](#complete-examples) * [complete examples](#complete-examples)
* [browser support](#browser-support) - TLDR: yes * [browser support](#browser-support) - TLDR: yes
* [client examples](#client-examples) - interact with copyparty using non-browser clients * [client examples](#client-examples) - interact with copyparty using non-browser clients
@@ -75,9 +87,10 @@ turn your phone or raspi into a portable file server with resumable uploads/down
* [optional dependencies](#optional-dependencies) - install these to enable bonus features * [optional dependencies](#optional-dependencies) - install these to enable bonus features
* [install recommended deps](#install-recommended-deps) * [install recommended deps](#install-recommended-deps)
* [optional gpl stuff](#optional-gpl-stuff) * [optional gpl stuff](#optional-gpl-stuff)
* [sfx](#sfx) - there are two self-contained "binaries" * [sfx](#sfx) - the self-contained "binary"
* [sfx repack](#sfx-repack) - reduce the size of an sfx by removing features * [sfx repack](#sfx-repack) - reduce the size of an sfx by removing features
* [install on android](#install-on-android) * [install on android](#install-on-android)
* [reporting bugs](#reporting-bugs) - ideas for context to include in bug reports
* [building](#building) * [building](#building)
* [dev env setup](#dev-env-setup) * [dev env setup](#dev-env-setup)
* [just the sfx](#just-the-sfx) * [just the sfx](#just-the-sfx)
@@ -146,6 +159,7 @@ feature summary
* ☑ multiprocessing (actual multithreading) * ☑ multiprocessing (actual multithreading)
* ☑ volumes (mountpoints) * ☑ volumes (mountpoints)
* ☑ [accounts](#accounts-and-volumes) * ☑ [accounts](#accounts-and-volumes)
* ☑ [ftp-server](#ftp-server)
* upload * upload
* ☑ basic: plain multipart, ie6 support * ☑ basic: plain multipart, ie6 support
* ☑ [up2k](#uploading): js, resumable, multithreaded * ☑ [up2k](#uploading): js, resumable, multithreaded
@@ -161,12 +175,13 @@ feature summary
* ☑ file manager (cut/paste, delete, [batch-rename](#batch-rename)) * ☑ file manager (cut/paste, delete, [batch-rename](#batch-rename))
* ☑ audio player (with OS media controls and opus transcoding) * ☑ audio player (with OS media controls and opus transcoding)
* ☑ image gallery with webm player * ☑ image gallery with webm player
* ☑ textfile browser with syntax hilighting
* ☑ [thumbnails](#thumbnails) * ☑ [thumbnails](#thumbnails)
* ☑ ...of images using Pillow * ☑ ...of images using Pillow, pyvips, or FFmpeg
* ☑ ...of videos using FFmpeg * ☑ ...of videos using FFmpeg
* ☑ ...of audio (spectrograms) using FFmpeg
* ☑ cache eviction (max-age; maybe max-size eventually) * ☑ cache eviction (max-age; maybe max-size eventually)
* ☑ SPA (browse while uploading) * ☑ SPA (browse while uploading)
* if you use the navpane to navigate, not folders in the file list
* server indexing * server indexing
* ☑ [locate files by contents](#file-search) * ☑ [locate files by contents](#file-search)
* ☑ search by name/path/date/size * ☑ search by name/path/date/size
@@ -228,11 +243,19 @@ some improvement ideas
## general bugs ## general bugs
* Windows: if the up2k db is on a samba-share or network disk, you'll get unpredictable behavior if the share is disconnected for a bit
* use `--hist` or the `hist` volflag (`-v [...]:c,hist=/tmp/foo`) to place the db on a local disk instead
* all volumes must exist / be available on startup; up2k (mtp especially) gets funky otherwise * all volumes must exist / be available on startup; up2k (mtp especially) gets funky otherwise
* probably more, pls let me know * probably more, pls let me know
## not my bugs ## not my bugs
* [Chrome issue 1317069](https://bugs.chromium.org/p/chromium/issues/detail?id=1317069) -- if you try to upload a folder which contains symlinks by dragging it into the browser, the symlinked files will not get uploaded
* iPhones: the volume control doesn't work because [apple doesn't want it to](https://developer.apple.com/library/archive/documentation/AudioVideo/Conceptual/Using_HTML5_Audio_Video/Device-SpecificConsiderations/Device-SpecificConsiderations.html#//apple_ref/doc/uid/TP40009523-CH5-SW11)
* *future workaround:* enable the equalizer, make it all-zero, and set a negative boost to reduce the volume
* "future" because `AudioContext` is broken in the current iOS version (15.1), maybe one day...
* Windows: folders cannot be accessed if the name ends with `.` * Windows: folders cannot be accessed if the name ends with `.`
* python or windows bug * python or windows bug
@@ -249,6 +272,7 @@ some improvement ideas
* is it possible to block read-access to folders unless you know the exact URL for a particular file inside? * is it possible to block read-access to folders unless you know the exact URL for a particular file inside?
* yes, using the [`g` permission](#accounts-and-volumes), see the examples there * yes, using the [`g` permission](#accounts-and-volumes), see the examples there
* you can also do this with linux filesystem permissions; `chmod 111 music` will make it possible to access files and folders inside the `music` folder but not list the immediate contents -- also works with other software, not just copyparty
* can I make copyparty download a file to my server if I give it a URL? * can I make copyparty download a file to my server if I give it a URL?
* not officially, but there is a [terrible hack](https://github.com/9001/copyparty/blob/hovudstraum/bin/mtag/wget.py) which makes it possible * not officially, but there is a [terrible hack](https://github.com/9001/copyparty/blob/hovudstraum/bin/mtag/wget.py) which makes it possible
@@ -256,7 +280,10 @@ some improvement ideas
# accounts and volumes # accounts and volumes
per-folder, per-user permissions per-folder, per-user permissions - if your setup is getting complex, consider making a [config file](./docs/example.conf) instead of using arguments
* much easier to manage, and you can modify the config at runtime with `systemctl reload copyparty` or more conveniently using the `[reload cfg]` button in the control-panel (if logged in as admin)
configuring accounts/volumes with arguments:
* `-a usr:pwd` adds account `usr` with password `pwd` * `-a usr:pwd` adds account `usr` with password `pwd`
* `-v .::r` adds current-folder `.` as the webroot, `r`eadable by anyone * `-v .::r` adds current-folder `.` as the webroot, `r`eadable by anyone
* the syntax is `-v src:dst:perm:perm:...` so local-path, url-path, and one or more permissions to set * the syntax is `-v src:dst:perm:perm:...` so local-path, url-path, and one or more permissions to set
@@ -314,6 +341,7 @@ the browser has the following hotkeys (always qwerty)
* `V` toggle folders / textfiles in the navpane * `V` toggle folders / textfiles in the navpane
* `G` toggle list / [grid view](#thumbnails) * `G` toggle list / [grid view](#thumbnails)
* `T` toggle thumbnails / icons * `T` toggle thumbnails / icons
* `ESC` close various things
* `ctrl-X` cut selected files/folders * `ctrl-X` cut selected files/folders
* `ctrl-V` paste * `ctrl-V` paste
* `F2` [rename](#batch-rename) selected file/folder * `F2` [rename](#batch-rename) selected file/folder
@@ -332,11 +360,13 @@ the browser has the following hotkeys (always qwerty)
* `U/O` skip 10sec back/forward * `U/O` skip 10sec back/forward
* `0..9` jump to 0%..90% * `0..9` jump to 0%..90%
* `P` play/pause (also starts playing the folder) * `P` play/pause (also starts playing the folder)
* `Y` download file
* when viewing images / playing videos: * when viewing images / playing videos:
* `J/L, Left/Right` prev/next file * `J/L, Left/Right` prev/next file
* `Home/End` first/last file * `Home/End` first/last file
* `S` toggle selection * `S` toggle selection
* `R` rotate clockwise (shift=ccw) * `R` rotate clockwise (shift=ccw)
* `Y` download file
* `Esc` close viewer * `Esc` close viewer
* videos: * videos:
* `U/O` skip 10sec back/forward * `U/O` skip 10sec back/forward
@@ -365,9 +395,13 @@ switching between breadcrumbs or navpane
click the `🌲` or pressing the `B` hotkey to toggle between breadcrumbs path (default), or a navpane (tree-browser sidebar thing) click the `🌲` or pressing the `B` hotkey to toggle between breadcrumbs path (default), or a navpane (tree-browser sidebar thing)
* `[-]` and `[+]` (or hotkeys `A`/`D`) adjust the size * `[+]` and `[-]` (or hotkeys `A`/`D`) adjust the size
* `[v]` jumps to the currently open folder * `[🎯]` jumps to the currently open folder
* `[📃]` toggles between showing folders and textfiles
* `[📌]` shows the name of all parent folders in a docked panel
* `[a]` toggles automatic widening as you go deeper * `[a]` toggles automatic widening as you go deeper
* `[↵]` toggles wordwrap
* `[👀]` show full name on hover (if wordwrap is off)
## thumbnails ## thumbnails
@@ -376,13 +410,16 @@ press `g` to toggle grid-view instead of the file listing, and `t` toggles icon
![copyparty-thumbs-fs8](https://user-images.githubusercontent.com/241032/129636211-abd20fa2-a953-4366-9423-1c88ebb96ba9.png) ![copyparty-thumbs-fs8](https://user-images.githubusercontent.com/241032/129636211-abd20fa2-a953-4366-9423-1c88ebb96ba9.png)
it does static images with Pillow and uses FFmpeg for video files, so you may want to `--no-thumb` or maybe just `--no-vthumb` depending on how dangerous your users are it does static images with Pillow / pyvips / FFmpeg, and uses FFmpeg for video files, so you may want to `--no-thumb` or maybe just `--no-vthumb` depending on how dangerous your users are
* pyvips is 3x faster than Pillow, Pillow is 3x faster than FFmpeg
* disable thumbnails for specific volumes with volflag `dthumb` for all, or `dvthumb` / `dathumb` / `dithumb` for video/audio/images only
audio files are covnerted into spectrograms using FFmpeg unless you `--no-athumb` (and some FFmpeg builds may need `--th-ff-swr`) audio files are covnerted into spectrograms using FFmpeg unless you `--no-athumb` (and some FFmpeg builds may need `--th-ff-swr`)
images with the following names (see `--th-covers`) become the thumbnail of the folder they're in: `folder.png`, `folder.jpg`, `cover.png`, `cover.jpg` images with the following names (see `--th-covers`) become the thumbnail of the folder they're in: `folder.png`, `folder.jpg`, `cover.png`, `cover.jpg`
in the grid/thumbnail view, if the audio player panel is open, songs will start playing when clicked in the grid/thumbnail view, if the audio player panel is open, songs will start playing when clicked
* indicated by the audio files having the ▶ icon instead of 💾
## zip downloads ## zip downloads
@@ -433,7 +470,7 @@ see [up2k](#up2k) for details on how it works
![copyparty-upload-fs8](https://user-images.githubusercontent.com/241032/129635371-48fc54ca-fa91-48e3-9b1d-ba413e4b68cb.png) ![copyparty-upload-fs8](https://user-images.githubusercontent.com/241032/129635371-48fc54ca-fa91-48e3-9b1d-ba413e4b68cb.png)
**protip:** you can avoid scaring away users with [docs/minimal-up2k.html](docs/minimal-up2k.html) which makes it look [much simpler](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png) **protip:** you can avoid scaring away users with [contrib/plugins/minimal-up2k.html](contrib/plugins/minimal-up2k.html) which makes it look [much simpler](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png)
**protip:** if you enable `favicon` in the `[⚙️] settings` tab (by typing something into the textbox), the icon in the browser tab will indicate upload progress **protip:** if you enable `favicon` in the `[⚙️] settings` tab (by typing something into the textbox), the icon in the browser tab will indicate upload progress
@@ -473,8 +510,6 @@ the files will be hashed on the client-side, and each hash is sent to the server
files go into `[ok]` if they exist (and you get a link to where it is), otherwise they land in `[ng]` files go into `[ok]` if they exist (and you get a link to where it is), otherwise they land in `[ng]`
* the main reason filesearch is combined with the uploader is cause the code was too spaghetti to separate it out somewhere else, this is no longer the case but now i've warmed up to the idea too much * the main reason filesearch is combined with the uploader is cause the code was too spaghetti to separate it out somewhere else, this is no longer the case but now i've warmed up to the idea too much
adding the same file multiple times is blocked, so if you first search for a file and then decide to upload it, you have to click the `[cleanup]` button to discard `[done]` files (or just refresh the page)
### unpost ### unpost
@@ -562,6 +597,8 @@ and there are *two* editors
* you can link a particular timestamp in an audio file by adding it to the URL, such as `&20` / `&20s` / `&1m20` / `&t=1:20` after the `.../#af-c8960dab` * you can link a particular timestamp in an audio file by adding it to the URL, such as `&20` / `&20s` / `&1m20` / `&t=1:20` after the `.../#af-c8960dab`
* enabling the audio equalizer can help make gapless albums fully gapless in some browsers (chrome), so consider leaving it on with all the values at zero
* get a plaintext file listing by adding `?ls=t` to a URL, or a compact colored one with `?ls=v` (for unix terminals) * get a plaintext file listing by adding `?ls=t` to a URL, or a compact colored one with `?ls=v` (for unix terminals)
* if you are using media hotkeys to switch songs and are getting tired of seeing the OSD popup which Windows doesn't let you disable, consider https://ocv.me/dev/?media-osd-bgone.ps1 * if you are using media hotkeys to switch songs and are getting tired of seeing the OSD popup which Windows doesn't let you disable, consider https://ocv.me/dev/?media-osd-bgone.ps1
@@ -594,6 +631,20 @@ add the argument `-e2ts` to also scan/index tags from music files, which brings
using arguments or config files, or a mix of both: using arguments or config files, or a mix of both:
* config files (`-c some.conf`) can set additional commandline arguments; see [./docs/example.conf](docs/example.conf) * config files (`-c some.conf`) can set additional commandline arguments; see [./docs/example.conf](docs/example.conf)
* `kill -s USR1` (same as `systemctl reload copyparty`) to reload accounts and volumes from config files without restarting
* or click the `[reload cfg]` button in the control-panel when logged in as admin
## ftp-server
an FTP server can be started using `--ftp 3921`, and/or `--ftps` for explicit TLS (ftpes)
* based on [pyftpdlib](https://github.com/giampaolo/pyftpdlib)
* needs a dedicated port (cannot share with the HTTP/HTTPS API)
* uploads are not resumable -- delete and restart if necessary
* runs in active mode by default, you probably want `--ftp-pr 12000-13000`
* if you enable both `ftp` and `ftps`, the port-range will be divided in half
* some older software (filezilla on debian-stable) cannot passive-mode with TLS
## file indexing ## file indexing
@@ -608,10 +659,12 @@ through arguments:
* `-e2ts` also scans for tags in all files that don't have tags yet * `-e2ts` also scans for tags in all files that don't have tags yet
* `-e2tsr` also deletes all existing tags, doing a full reindex * `-e2tsr` also deletes all existing tags, doing a full reindex
the same arguments can be set as volume flags, in addition to `d2d` and `d2t` for disabling: the same arguments can be set as volume flags, in addition to `d2d`, `d2ds`, `d2t`, `d2ts` for disabling:
* `-v ~/music::r:c,e2dsa,e2tsr` does a full reindex of everything on startup * `-v ~/music::r:c,e2dsa,e2tsr` does a full reindex of everything on startup
* `-v ~/music::r:c,d2d` disables **all** indexing, even if any `-e2*` are on * `-v ~/music::r:c,d2d` disables **all** indexing, even if any `-e2*` are on
* `-v ~/music::r:c,d2t` disables all `-e2t*` (tags), does not affect `-e2d*` * `-v ~/music::r:c,d2t` disables all `-e2t*` (tags), does not affect `-e2d*`
* `-v ~/music::r:c,d2ds` disables on-boot scans; only index new uploads
* `-v ~/music::r:c,d2ts` same except only affecting tags
note: note:
* the parser can finally handle `c,e2dsa,e2tsr` so you no longer have to `c,e2dsa:c,e2tsr` * the parser can finally handle `c,e2dsa,e2tsr` so you no longer have to `c,e2dsa:c,e2tsr`
@@ -632,7 +685,7 @@ if you set `--no-hash [...]` globally, you can enable hashing for specific volum
set upload rules using volume flags, some examples: set upload rules using volume flags, some examples:
* `:c,sz=1k-3m` sets allowed filesize between 1 KiB and 3 MiB inclusive (suffixes: b, k, m, g) * `:c,sz=1k-3m` sets allowed filesize between 1 KiB and 3 MiB inclusive (suffixes: `b`, `k`, `m`, `g`)
* `:c,nosub` disallow uploading into subdirectories; goes well with `rotn` and `rotf`: * `:c,nosub` disallow uploading into subdirectories; goes well with `rotn` and `rotf`:
* `:c,rotn=1000,2` moves uploads into subfolders, up to 1000 files in each folder before making a new one, two levels deep (must be at least 1) * `:c,rotn=1000,2` moves uploads into subfolders, up to 1000 files in each folder before making a new one, two levels deep (must be at least 1)
* `:c,rotf=%Y/%m/%d/%H` enforces files to be uploaded into a structure of subfolders according to that date format * `:c,rotf=%Y/%m/%d/%H` enforces files to be uploaded into a structure of subfolders according to that date format
@@ -666,6 +719,12 @@ things to note,
* the files will be indexed after compression, so dupe-detection and file-search will not work as expected * the files will be indexed after compression, so dupe-detection and file-search will not work as expected
some examples, some examples,
* `-v inc:inc:w:c,pk=xz,0`
folder named inc, shared at inc, write-only for everyone, forces xz compression at level 0
* `-v inc:inc:w:c,pk`
same write-only inc, but forces gz compression (default) instead of xz
* `-v inc:inc:w:c,gz`
allows (but does not force) gz compression if client uploads to `/inc?pk` or `/inc?gz` or `/inc?gz=4`
## database location ## database location
@@ -710,7 +769,7 @@ see the beautiful mess of a dictionary in [mtag.py](https://github.com/9001/copy
## file parser plugins ## file parser plugins
provide custom parsers to index additional tags provide custom parsers to index additional tags, also see [./bin/mtag/README.md](./bin/mtag/README.md)
copyparty can invoke external programs to collect additional metadata for files using `mtp` (either as argument or volume flag), there is a default timeout of 30sec copyparty can invoke external programs to collect additional metadata for files using `mtp` (either as argument or volume flag), there is a default timeout of 30sec
@@ -743,6 +802,40 @@ and it will occupy the parsing threads, so fork anything expensive, or if you wa
if this becomes popular maybe there should be a less janky way to do it actually if this becomes popular maybe there should be a less janky way to do it actually
## hiding from google
tell search engines you dont wanna be indexed, either using the good old [robots.txt](https://www.robotstxt.org/robotstxt.html) or through copyparty settings:
* `--no-robots` adds HTTP (`X-Robots-Tag`) and HTML (`<meta>`) headers with `noindex, nofollow` globally
* volume-flag `[...]:c,norobots` does the same thing for that single volume
* volume-flag `[...]:c,robots` ALLOWS search-engine crawling for that volume, even if `--no-robots` is set globally
also, `--force-js` disables the plain HTML folder listing, making things harder to parse for search engines
## themes
you can change the default theme with `--theme 2`, and add your own themes by modifying `browser.css` or providing your own css to `--css-browser`, then telling copyparty they exist by increasing `--themes`
<table><tr><td width="33%" align="center"><a href="https://user-images.githubusercontent.com/241032/165864907-17e2ac7d-319d-4f25-8718-2f376f614b51.png"><img src="https://user-images.githubusercontent.com/241032/165867551-fceb35dd-38f0-42bb-bef3-25ba651ca69b.png"></a>
0. classic dark</td><td width="33%" align="center"><a href="https://user-images.githubusercontent.com/241032/168644399-68938de5-da9b-445f-8d92-b51c74b5f345.png"><img src="https://user-images.githubusercontent.com/241032/168644404-8e1a2fdc-6e59-4c41-905e-ba5399ed686f.png"></a>
2. flat pm-monokai</td><td width="33%" align="center"><a href="https://user-images.githubusercontent.com/241032/165864901-db13a429-a5da-496d-8bc6-ce838547f69d.png"><img src="https://user-images.githubusercontent.com/241032/165867560-aa834aef-58dc-4abe-baef-7e562b647945.png"></a>
4. vice</td></tr><tr><td align="center"><a href="https://user-images.githubusercontent.com/241032/165864905-692682eb-6fb4-4d40-b6fe-27d2c7d3e2a7.png"><img src="https://user-images.githubusercontent.com/241032/165867555-080b73b6-6d85-41bb-a7c6-ad277c608365.png"></a>
1. classic light</td><td align="center"><a href="https://user-images.githubusercontent.com/241032/168645276-fb02fd19-190a-407a-b8d3-d58fee277e02.png"><img src="https://user-images.githubusercontent.com/241032/168645280-f0662b3c-9764-4875-a2e2-d91cc8199b23.png"></a>
3. flat light
</td><td align="center"><a href="https://user-images.githubusercontent.com/241032/165864898-10ce7052-a117-4fcf-845b-b56c91687908.png"><img src="https://user-images.githubusercontent.com/241032/165867562-f3003d45-dd2a-4564-8aae-fed44c1ae064.png"></a>
5. <a href="https://blog.codinghorror.com/a-tribute-to-the-windows-31-hot-dog-stand-color-scheme/">hotdog stand</a></td></tr></table>
the classname of the HTML tag is set according to the selected theme, which is used to set colors as css variables ++
* each theme *generally* has a dark theme (even numbers) and a light theme (odd numbers), showing in pairs
* the first theme (theme 0 and 1) is `html.a`, second theme (2 and 3) is `html.b`
* if a light theme is selected, `html.y` is set, otherwise `html.z` is
* so if the dark edition of the 2nd theme is selected, you use any of `html.b`, `html.z`, `html.bz` to specify rules
see the top of [./copyparty/web/browser.css](./copyparty/web/browser.css) where the color variables are set, and there's layout-specific stuff near the bottom
## complete examples ## complete examples
* read-only music server with bpm and key scanning * read-only music server with bpm and key scanning
@@ -781,7 +874,7 @@ TLDR: yes
* internet explorer 6 to 8 behave the same * internet explorer 6 to 8 behave the same
* firefox 52 and chrome 49 are the final winxp versions * firefox 52 and chrome 49 are the final winxp versions
* `*1` yes, but extremely slow (ie10: `1 MiB/s`, ie11: `270 KiB/s`) * `*1` yes, but extremely slow (ie10: `1 MiB/s`, ie11: `270 KiB/s`)
* `*3` using a wasm decoder which consumes a bit more power * `*3` iOS 11 and newer, opus only, and requires FFmpeg on the server
quick summary of more eccentric web-browsers trying to view a directory index: quick summary of more eccentric web-browsers trying to view a directory index:
@@ -792,7 +885,8 @@ quick summary of more eccentric web-browsers trying to view a directory index:
| **w3m** (0.5.3/macports) | can browse, login, upload at 100kB/s, mkdir/msg | | **w3m** (0.5.3/macports) | can browse, login, upload at 100kB/s, mkdir/msg |
| **netsurf** (3.10/arch) | is basically ie6 with much better css (javascript has almost no effect) | | **netsurf** (3.10/arch) | is basically ie6 with much better css (javascript has almost no effect) |
| **opera** (11.60/winxp) | OK: thumbnails, image-viewer, zip-selection, rename/cut/paste. NG: up2k, navpane, markdown, audio | | **opera** (11.60/winxp) | OK: thumbnails, image-viewer, zip-selection, rename/cut/paste. NG: up2k, navpane, markdown, audio |
| **ie4** and **netscape** 4.0 | can browse, upload with `?b=u` | | **ie4** and **netscape** 4.0 | can browse, upload with `?b=u`, auth with `&pw=wark` |
| **ncsa mosaic** 2.7 | does not get a pass, [pic1](https://user-images.githubusercontent.com/241032/174189227-ae816026-cf6f-4be5-a26e-1b3b072c1b2f.png) - [pic2](https://user-images.githubusercontent.com/241032/174189225-5651c059-5152-46e9-ac26-7e98e497901b.png) |
| **SerenityOS** (7e98457) | hits a page fault, works with `?b=u`, file upload not-impl | | **SerenityOS** (7e98457) | hits a page fault, works with `?b=u`, file upload not-impl |
@@ -801,8 +895,8 @@ quick summary of more eccentric web-browsers trying to view a directory index:
interact with copyparty using non-browser clients interact with copyparty using non-browser clients
* javascript: dump some state into a file (two separate examples) * javascript: dump some state into a file (two separate examples)
* `await fetch('https://127.0.0.1:3923/', {method:"PUT", body: JSON.stringify(foo)});` * `await fetch('//127.0.0.1:3923/', {method:"PUT", body: JSON.stringify(foo)});`
* `var xhr = new XMLHttpRequest(); xhr.open('POST', 'https://127.0.0.1:3923/msgs?raw'); xhr.send('foo');` * `var xhr = new XMLHttpRequest(); xhr.open('POST', '//127.0.0.1:3923/msgs?raw'); xhr.send('foo');`
* curl/wget: upload some files (post=file, chunk=stdin) * curl/wget: upload some files (post=file, chunk=stdin)
* `post(){ curl -b cppwd=wark -F act=bput -F f=@"$1" http://127.0.0.1:3923/;}` * `post(){ curl -b cppwd=wark -F act=bput -F f=@"$1" http://127.0.0.1:3923/;}`
@@ -831,7 +925,7 @@ copyparty returns a truncated sha512sum of your PUT/POST as base64; you can gene
b512(){ printf "$((sha512sum||shasum -a512)|sed -E 's/ .*//;s/(..)/\\x\1/g')"|base64|tr '+/' '-_'|head -c44;} b512(){ printf "$((sha512sum||shasum -a512)|sed -E 's/ .*//;s/(..)/\\x\1/g')"|base64|tr '+/' '-_'|head -c44;}
b512 <movie.mkv b512 <movie.mkv
you can provide passwords using cookie 'cppwd=hunter2', as a url query `?pw=hunter2`, or with basic-authentication (either as the username or password) you can provide passwords using cookie `cppwd=hunter2`, as a url query `?pw=hunter2`, or with basic-authentication (either as the username or password)
# up2k # up2k
@@ -899,13 +993,25 @@ when uploading files,
some notes on hardening some notes on hardening
on public copyparty instances with anonymous upload enabled: * option `-s` is a shortcut to set the following options:
* `--no-thumb` disables thumbnails and audio transcoding to stop copyparty from running `FFmpeg`/`Pillow`/`VIPS` on uploaded files, which is a [good idea](https://www.cvedetails.com/vulnerability-list.php?vendor_id=3611) if anonymous upload is enabled
* `--no-mtag-ff` uses `mutagen` to grab music tags instead of `FFmpeg`, which is safer and faster but less accurate
* `--dotpart` hides uploads from directory listings while they're still incoming
* `--no-robots` and `--force-js` makes life harder for crawlers, see [hiding from google](#hiding-from-google)
* users can upload html/css/js which will evaluate for other visitors in a few ways, * option `-ss` is a shortcut for the above plus:
* unless `--no-readme` is set: by uploading/modifying a file named `readme.md` * `--no-logues` and `--no-readme` disables support for readme's and prologues / epilogues in directory listings, which otherwise lets people upload arbitrary `<script>` tags
* if `move` access is granted AND none of `--no-logues`, `--no-dot-mv`, `--no-dot-ren` is set: by uploading some .html file and renaming it to `.epilogue.html` (uploading it directly is blocked) * `--unpost 0`, `--no-del`, `--no-mv` disables all move/delete support
* `--hardlink` creates hardlinks instead of symlinks when deduplicating uploads, which is less maintenance
* however note if you edit one file it will also affect the other copies
* `--vague-403` returns a "404 not found" instead of "403 forbidden" which is a common enterprise meme
* `--nih` removes the server hostname from directory listings
other misc: * option `-sss` is a shortcut for the above plus:
* `-lo cpp-%Y-%m%d-%H%M%S.txt.xz` enables logging to disk
* `-ls **,*,ln,p,r` does a scan on startup for any dangerous symlinks
other misc notes:
* you can disable directory listings by giving permission `g` instead of `r`, only accepting direct URLs to files * you can disable directory listings by giving permission `g` instead of `r`, only accepting direct URLs to files
* combine this with volume-flag `c,fk` to generate per-file accesskeys; users which have full read-access will then see URLs with `?k=...` appended to the end, and `g` users must provide that URL including the correct key to avoid a 404 * combine this with volume-flag `c,fk` to generate per-file accesskeys; users which have full read-access will then see URLs with `?k=...` appended to the end, and `g` users must provide that URL including the correct key to avoid a 404
@@ -970,6 +1076,7 @@ authenticate using header `Cookie: cppwd=foo` or url param `&pw=foo`
| GET | `?txt=iso-8859-1` | ...with specific charset | | GET | `?txt=iso-8859-1` | ...with specific charset |
| GET | `?th` | get image/video at URL as thumbnail | | GET | `?th` | get image/video at URL as thumbnail |
| GET | `?th=opus` | convert audio file to 128kbps opus | | GET | `?th=opus` | convert audio file to 128kbps opus |
| GET | `?th=caf` | ...in the iOS-proprietary container |
| method | body | result | | method | body | result |
|--|--|--| |--|--|--|
@@ -1025,15 +1132,22 @@ mandatory deps:
install these to enable bonus features install these to enable bonus features
enable ftp-server:
* for just plaintext FTP, `pyftpdlib` (is built into the SFX)
* with TLS encryption, `pyftpdlib pyopenssl`
enable music tags: enable music tags:
* either `mutagen` (fast, pure-python, skips a few tags, makes copyparty GPL? idk) * either `mutagen` (fast, pure-python, skips a few tags, makes copyparty GPL? idk)
* or `ffprobe` (20x slower, more accurate, possibly dangerous depending on your distro and users) * or `ffprobe` (20x slower, more accurate, possibly dangerous depending on your distro and users)
enable [thumbnails](#thumbnails) of... enable [thumbnails](#thumbnails) of...
* **images:** `Pillow` (requires py2.7 or py3.5+) * **images:** `Pillow` and/or `pyvips` and/or `ffmpeg` (requires py2.7 or py3.5+)
* **videos/audio:** `ffmpeg` and `ffprobe` somewhere in `$PATH` * **videos/audio:** `ffmpeg` and `ffprobe` somewhere in `$PATH`
* **HEIF pictures:** `pyheif-pillow-opener` (requires Linux or a C compiler) * **HEIF pictures:** `pyvips` or `ffmpeg` or `pyheif-pillow-opener` (requires Linux or a C compiler)
* **AVIF pictures:** `pillow-avif-plugin` * **AVIF pictures:** `pyvips` or `ffmpeg` or `pillow-avif-plugin`
* **JPEG XL pictures:** `pyvips` or `ffmpeg`
`pyvips` gives higher quality thumbnails than `Pillow` and is 320% faster, using 270% more ram: `sudo apt install libvips42 && python3 -m pip install --user -U pyvips`
## install recommended deps ## install recommended deps
@@ -1051,13 +1165,7 @@ these are standalone programs and will never be imported / evaluated by copypart
# sfx # sfx
there are two self-contained "binaries": the self-contained "binary" [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) will unpack itself and run copyparty, assuming you have python installed of course
* [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) -- pure python, works everywhere, **recommended**
* [copyparty-sfx.sh](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.sh) -- smaller, but only for linux and macos, kinda deprecated
launch either of them (**use sfx.py on systemd**) and it'll unpack and run copyparty, assuming you have python installed of course
pls note that `copyparty-sfx.sh` will fail if you rename `copyparty-sfx.py` to `copyparty.py` and keep it in the same folder because `sys.path` is funky
## sfx repack ## sfx repack
@@ -1065,46 +1173,62 @@ pls note that `copyparty-sfx.sh` will fail if you rename `copyparty-sfx.py` to `
reduce the size of an sfx by removing features reduce the size of an sfx by removing features
if you don't need all the features, you can repack the sfx and save a bunch of space; all you need is an sfx and a copy of this repo (nothing else to download or build, except if you're on windows then you need msys2 or WSL) if you don't need all the features, you can repack the sfx and save a bunch of space; all you need is an sfx and a copy of this repo (nothing else to download or build, except if you're on windows then you need msys2 or WSL)
* `525k` size of original sfx.py as of v0.11.30 * `393k` size of original sfx.py as of v1.1.3
* `315k` after `./scripts/make-sfx.sh re no-ogv` * `310k` after `./scripts/make-sfx.sh re no-cm`
* `223k` after `./scripts/make-sfx.sh re no-ogv no-cm` * `269k` after `./scripts/make-sfx.sh re no-cm no-hl`
the features you can opt to drop are the features you can opt to drop are
* `ogv`.js, the opus/vorbis decoder which is needed by apple devices to play foss audio files, saves ~192k * `cm`/easymde, the "fancy" markdown editor, saves ~82k
* `cm`/easymde, the "fancy" markdown editor, saves ~92k
* `hl`, prism, the syntax hilighter, saves ~41k * `hl`, prism, the syntax hilighter, saves ~41k
* `fnt`, source-code-pro, the monospace font, saves ~9k * `fnt`, source-code-pro, the monospace font, saves ~9k
* `dd`, the custom mouse cursor for the media player tray tab, saves ~2k * `dd`, the custom mouse cursor for the media player tray tab, saves ~2k
for the `re`pack to work, first run one of the sfx'es once to unpack it for the `re`pack to work, first run one of the sfx'es once to unpack it
**note:** you can also just download and run [scripts/copyparty-repack.sh](scripts/copyparty-repack.sh) -- this will grab the latest copyparty release from github and do a `no-ogv no-cm` repack; works on linux/macos (and windows with msys2 or WSL) **note:** you can also just download and run [scripts/copyparty-repack.sh](scripts/copyparty-repack.sh) -- this will grab the latest copyparty release from github and do a few repacks; works on linux/macos (and windows with msys2 or WSL)
# install on android # install on android
install [Termux](https://termux.com/) (see [ocv.me/termux](https://ocv.me/termux/)) and then copy-paste this into Termux (long-tap) all at once: install [Termux](https://termux.com/) (see [ocv.me/termux](https://ocv.me/termux/)) and then copy-paste this into Termux (long-tap) all at once:
```sh ```sh
apt update && apt -y full-upgrade && termux-setup-storage && apt -y install python && python -m ensurepip && python -m pip install -U copyparty apt update && apt -y full-upgrade && apt update && termux-setup-storage && apt -y install python && python -m ensurepip && python -m pip install --user -U copyparty
echo $? echo $?
``` ```
after the initial setup, you can launch copyparty at any time by running `copyparty` anywhere in Termux after the initial setup, you can launch copyparty at any time by running `copyparty` anywhere in Termux
if you want thumbnails, `apt -y install ffmpeg`
* or if you want to use vips instead, `apt -y install libvips && python -m pip install --user -U wheel && python -m pip install --user -U pyvips && (cd /data/data/com.termux/files/usr/lib/; ln -s libgobject-2.0.so{,.0}; ln -s libvips.so{,.42})`
# reporting bugs
ideas for context to include in bug reports
if something broke during an upload (replacing FILENAME with a part of the filename that broke):
```
journalctl -aS '48 hour ago' -u copyparty | grep -C10 FILENAME | tee bug.log
```
# building # building
## dev env setup ## dev env setup
mostly optional; if you need a working env for vscode or similar you need python 3.9 or newer due to type hints
the rest is mostly optional; if you need a working env for vscode or similar
```sh ```sh
python3 -m venv .venv python3 -m venv .venv
. .venv/bin/activate . .venv/bin/activate
pip install jinja2 # mandatory pip install jinja2 strip_hints # MANDATORY
pip install mutagen # audio metadata pip install mutagen # audio metadata
pip install pyftpdlib # ftp server
pip install Pillow pyheif-pillow-opener pillow-avif-plugin # thumbnails pip install Pillow pyheif-pillow-opener pillow-avif-plugin # thumbnails
pip install black bandit pylint flake8 # vscode tooling pip install black==21.12b0 click==8.0.2 bandit pylint flake8 isort mypy # vscode tooling
``` ```
@@ -1123,8 +1247,8 @@ mv /tmp/pe-copyparty/copyparty/web/deps/ copyparty/web/deps/
then build the sfx using any of the following examples: then build the sfx using any of the following examples:
```sh ```sh
./scripts/make-sfx.sh # both python and sh editions ./scripts/make-sfx.sh # regular edition
./scripts/make-sfx.sh no-sh gz # just python with gzip ./scripts/make-sfx.sh gz no-cm # gzip-compressed + no fancy markdown editor
``` ```

View File

@@ -2,9 +2,14 @@
* command-line up2k client [(webm)](https://ocv.me/stuff/u2cli.webm) * command-line up2k client [(webm)](https://ocv.me/stuff/u2cli.webm)
* file uploads, file-search, autoresume of aborted/broken uploads * file uploads, file-search, autoresume of aborted/broken uploads
* faster than browsers * faster than browsers
* early beta, if something breaks just restart it * if something breaks just restart it
# [`partyjournal.py`](partyjournal.py)
produces a chronological list of all uploads by collecting info from up2k databases and the filesystem
* outputs a standalone html file
* optional mapping from IP-addresses to nicknames
# [`copyparty-fuse.py`](copyparty-fuse.py) # [`copyparty-fuse.py`](copyparty-fuse.py)
* mount a copyparty server as a local filesystem (read-only) * mount a copyparty server as a local filesystem (read-only)

View File

@@ -42,6 +42,7 @@ import threading
import traceback import traceback
import http.client # py2: httplib import http.client # py2: httplib
import urllib.parse import urllib.parse
import calendar
from datetime import datetime from datetime import datetime
from urllib.parse import quote_from_bytes as quote from urllib.parse import quote_from_bytes as quote
from urllib.parse import unquote_to_bytes as unquote from urllib.parse import unquote_to_bytes as unquote
@@ -495,7 +496,7 @@ class Gateway(object):
ts = 60 * 60 * 24 * 2 ts = 60 * 60 * 24 * 2
try: try:
sz = int(fsize) sz = int(fsize)
ts = datetime.strptime(fdate, "%Y-%m-%d %H:%M:%S").timestamp() ts = calendar.timegm(time.strptime(fdate, "%Y-%m-%d %H:%M:%S"))
except: except:
info("bad HTML or OS [{}] [{}]".format(fdate, fsize)) info("bad HTML or OS [{}] [{}]".format(fdate, fsize))
# python cannot strptime(1959-01-01) on windows # python cannot strptime(1959-01-01) on windows

View File

@@ -45,6 +45,7 @@ import threading
import traceback import traceback
import http.client # py2: httplib import http.client # py2: httplib
import urllib.parse import urllib.parse
import calendar
from datetime import datetime from datetime import datetime
from urllib.parse import quote_from_bytes as quote from urllib.parse import quote_from_bytes as quote
from urllib.parse import unquote_to_bytes as unquote from urllib.parse import unquote_to_bytes as unquote
@@ -443,7 +444,7 @@ class Gateway(object):
ts = 60 * 60 * 24 * 2 ts = 60 * 60 * 24 * 2
try: try:
sz = int(fsize) sz = int(fsize)
ts = datetime.strptime(fdate, "%Y-%m-%d %H:%M:%S").timestamp() ts = calendar.timegm(time.strptime(fdate, "%Y-%m-%d %H:%M:%S"))
except: except:
info("bad HTML or OS [{}] [{}]".format(fdate, fsize)) info("bad HTML or OS [{}] [{}]".format(fdate, fsize))
# python cannot strptime(1959-01-01) on windows # python cannot strptime(1959-01-01) on windows

View File

@@ -11,14 +11,18 @@ import re
import os import os
import sys import sys
import time import time
import json
import stat import stat
import errno import errno
import struct import struct
import codecs
import platform
import threading import threading
import http.client # py2: httplib import http.client # py2: httplib
import urllib.parse import urllib.parse
from datetime import datetime from datetime import datetime
from urllib.parse import quote_from_bytes as quote from urllib.parse import quote_from_bytes as quote
from urllib.parse import unquote_to_bytes as unquote
try: try:
import fuse import fuse
@@ -38,7 +42,7 @@ except:
mount a copyparty server (local or remote) as a filesystem mount a copyparty server (local or remote) as a filesystem
usage: usage:
python ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,url=http://192.168.1.69:3923 /mnt/nas python ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,pw=wark,url=http://192.168.1.69:3923 /mnt/nas
dependencies: dependencies:
sudo apk add fuse-dev python3-dev sudo apk add fuse-dev python3-dev
@@ -50,6 +54,10 @@ fork of copyparty-fuse.py based on fuse-python which
""" """
WINDOWS = sys.platform == "win32"
MACOS = platform.system() == "Darwin"
def threadless_log(msg): def threadless_log(msg):
print(msg + "\n", end="") print(msg + "\n", end="")
@@ -93,6 +101,41 @@ def html_dec(txt):
) )
def register_wtf8():
def wtf8_enc(text):
return str(text).encode("utf-8", "surrogateescape"), len(text)
def wtf8_dec(binary):
return bytes(binary).decode("utf-8", "surrogateescape"), len(binary)
def wtf8_search(encoding_name):
return codecs.CodecInfo(wtf8_enc, wtf8_dec, name="wtf-8")
codecs.register(wtf8_search)
bad_good = {}
good_bad = {}
def enwin(txt):
return "".join([bad_good.get(x, x) for x in txt])
for bad, good in bad_good.items():
txt = txt.replace(bad, good)
return txt
def dewin(txt):
return "".join([good_bad.get(x, x) for x in txt])
for bad, good in bad_good.items():
txt = txt.replace(good, bad)
return txt
class CacheNode(object): class CacheNode(object):
def __init__(self, tag, data): def __init__(self, tag, data):
self.tag = tag self.tag = tag
@@ -115,8 +158,9 @@ class Stat(fuse.Stat):
class Gateway(object): class Gateway(object):
def __init__(self, base_url): def __init__(self, base_url, pw):
self.base_url = base_url self.base_url = base_url
self.pw = pw
ui = urllib.parse.urlparse(base_url) ui = urllib.parse.urlparse(base_url)
self.web_root = ui.path.strip("/") self.web_root = ui.path.strip("/")
@@ -135,8 +179,7 @@ class Gateway(object):
self.conns = {} self.conns = {}
def quotep(self, path): def quotep(self, path):
# TODO: mojibake support path = path.encode("wtf-8")
path = path.encode("utf-8", "ignore")
return quote(path, safe="/") return quote(path, safe="/")
def getconn(self, tid=None): def getconn(self, tid=None):
@@ -159,20 +202,29 @@ class Gateway(object):
except: except:
pass pass
def sendreq(self, *args, **kwargs): def sendreq(self, *args, **ka):
tid = get_tid() tid = get_tid()
if self.pw:
ck = "cppwd=" + self.pw
try:
ka["headers"]["Cookie"] = ck
except:
ka["headers"] = {"Cookie": ck}
try: try:
c = self.getconn(tid) c = self.getconn(tid)
c.request(*list(args), **kwargs) c.request(*list(args), **ka)
return c.getresponse() return c.getresponse()
except: except:
self.closeconn(tid) self.closeconn(tid)
c = self.getconn(tid) c = self.getconn(tid)
c.request(*list(args), **kwargs) c.request(*list(args), **ka)
return c.getresponse() return c.getresponse()
def listdir(self, path): def listdir(self, path):
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?dots" if bad_good:
path = dewin(path)
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?dots&ls"
r = self.sendreq("GET", web_path) r = self.sendreq("GET", web_path)
if r.status != 200: if r.status != 200:
self.closeconn() self.closeconn()
@@ -182,9 +234,12 @@ class Gateway(object):
) )
) )
return self.parse_html(r) return self.parse_jls(r)
def download_file_range(self, path, ofs1, ofs2): def download_file_range(self, path, ofs1, ofs2):
if bad_good:
path = dewin(path)
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?raw" web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?raw"
hdr_range = "bytes={}-{}".format(ofs1, ofs2 - 1) hdr_range = "bytes={}-{}".format(ofs1, ofs2 - 1)
log("downloading {}".format(hdr_range)) log("downloading {}".format(hdr_range))
@@ -200,40 +255,27 @@ class Gateway(object):
return r.read() return r.read()
def parse_html(self, datasrc): def parse_jls(self, datasrc):
ret = [] rsp = b""
remainder = b""
ptn = re.compile(
r"^<tr><td>(-|DIR)</td><td><a [^>]+>([^<]+)</a></td><td>([^<]+)</td><td>([^<]+)</td></tr>$"
)
while True: while True:
buf = remainder + datasrc.read(4096) buf = datasrc.read(1024 * 32)
# print('[{}]'.format(buf.decode('utf-8')))
if not buf: if not buf:
break break
remainder = b"" rsp += buf
endpos = buf.rfind(b"\n")
if endpos >= 0:
remainder = buf[endpos + 1 :]
buf = buf[:endpos]
lines = buf.decode("utf-8").split("\n") rsp = json.loads(rsp.decode("utf-8"))
for line in lines: ret = []
m = ptn.match(line) for statfun, nodes in [
if not m: [self.stat_dir, rsp["dirs"]],
# print(line) [self.stat_file, rsp["files"]],
continue ]:
for n in nodes:
fname = unquote(n["href"].split("?")[0]).rstrip(b"/").decode("wtf-8")
if bad_good:
fname = enwin(fname)
ftype, fname, fsize, fdate = m.groups() ret.append([fname, statfun(n["ts"], n["sz"]), 0])
fname = html_dec(fname)
ts = datetime.strptime(fdate, "%Y-%m-%d %H:%M:%S").timestamp()
sz = int(fsize)
if ftype == "-":
ret.append([fname, self.stat_file(ts, sz), 0])
else:
ret.append([fname, self.stat_dir(ts, sz), 0])
return ret return ret
@@ -262,6 +304,7 @@ class CPPF(Fuse):
Fuse.__init__(self, *args, **kwargs) Fuse.__init__(self, *args, **kwargs)
self.url = None self.url = None
self.pw = None
self.dircache = [] self.dircache = []
self.dircache_mtx = threading.Lock() self.dircache_mtx = threading.Lock()
@@ -271,7 +314,7 @@ class CPPF(Fuse):
def init2(self): def init2(self):
# TODO figure out how python-fuse wanted this to go # TODO figure out how python-fuse wanted this to go
self.gw = Gateway(self.url) # .decode('utf-8')) self.gw = Gateway(self.url, self.pw) # .decode('utf-8'))
info("up") info("up")
def clean_dircache(self): def clean_dircache(self):
@@ -536,6 +579,8 @@ class CPPF(Fuse):
def getattr(self, path): def getattr(self, path):
log("getattr [{}]".format(path)) log("getattr [{}]".format(path))
if WINDOWS:
path = enwin(path) # windows occasionally decodes f0xx to xx
path = path.strip("/") path = path.strip("/")
try: try:
@@ -568,9 +613,25 @@ class CPPF(Fuse):
def main(): def main():
time.strptime("19970815", "%Y%m%d") # python#7980 time.strptime("19970815", "%Y%m%d") # python#7980
register_wtf8()
if WINDOWS:
os.system("rem")
for ch in '<>:"\\|?*':
# microsoft maps illegal characters to f0xx
# (e000 to f8ff is basic-plane private-use)
bad_good[ch] = chr(ord(ch) + 0xF000)
for n in range(0, 0x100):
# map surrogateescape to another private-use area
bad_good[chr(n + 0xDC00)] = chr(n + 0xF100)
for k, v in bad_good.items():
good_bad[v] = k
server = CPPF() server = CPPF()
server.parser.add_option(mountopt="url", metavar="BASE_URL", default=None) server.parser.add_option(mountopt="url", metavar="BASE_URL", default=None)
server.parser.add_option(mountopt="pw", metavar="PASSWORD", default=None)
server.parse(values=server, errex=1) server.parse(values=server, errex=1)
if not server.url or not str(server.url).startswith("http"): if not server.url or not str(server.url).startswith("http"):
print("\nerror:") print("\nerror:")
@@ -578,7 +639,7 @@ def main():
print(" need argument: mount-path") print(" need argument: mount-path")
print("example:") print("example:")
print( print(
" ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,url=http://192.168.1.69:3923 /mnt/nas" " ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,pw=wark,url=http://192.168.1.69:3923 /mnt/nas"
) )
sys.exit(1) sys.exit(1)

View File

@@ -8,7 +8,10 @@ import sqlite3
import argparse import argparse
DB_VER1 = 3 DB_VER1 = 3
DB_VER2 = 4 DB_VER2 = 5
BY_PATH = None
NC = None
def die(msg): def die(msg):
@@ -57,8 +60,13 @@ def compare(n1, d1, n2, d2, verbose):
if rd.split("/", 1)[0] == ".hist": if rd.split("/", 1)[0] == ".hist":
continue continue
q = "select w from up where rd = ? and fn = ?" if BY_PATH:
hit = d2.execute(q, (rd, fn)).fetchone() q = "select w from up where rd = ? and fn = ?"
hit = d2.execute(q, (rd, fn)).fetchone()
else:
q = "select w from up where substr(w,1,16) = ? and +w = ?"
hit = d2.execute(q, (w1[:16], w1)).fetchone()
if not hit: if not hit:
miss += 1 miss += 1
if verbose: if verbose:
@@ -70,27 +78,32 @@ def compare(n1, d1, n2, d2, verbose):
n = 0 n = 0
miss = {} miss = {}
nmiss = 0 nmiss = 0
for w1, k, v in d1.execute("select * from mt"): for w1s, k, v in d1.execute("select * from mt"):
n += 1 n += 1
if n % 100_000 == 0: if n % 100_000 == 0:
m = f"\033[36mchecked {n:,} of {nt:,} tags in {n1} against {n2}, so far {nmiss} missing tags\033[0m" m = f"\033[36mchecked {n:,} of {nt:,} tags in {n1} against {n2}, so far {nmiss} missing tags\033[0m"
print(m) print(m)
q = "select rd, fn from up where substr(w,1,16) = ?" q = "select w, rd, fn from up where substr(w,1,16) = ?"
rd, fn = d1.execute(q, (w1,)).fetchone() w1, rd, fn = d1.execute(q, (w1s,)).fetchone()
if rd.split("/", 1)[0] == ".hist": if rd.split("/", 1)[0] == ".hist":
continue continue
q = "select substr(w,1,16) from up where rd = ? and fn = ?" if BY_PATH:
w2 = d2.execute(q, (rd, fn)).fetchone() q = "select w from up where rd = ? and fn = ?"
w2 = d2.execute(q, (rd, fn)).fetchone()
else:
q = "select w from up where substr(w,1,16) = ? and +w = ?"
w2 = d2.execute(q, (w1s, w1)).fetchone()
if w2: if w2:
w2 = w2[0] w2 = w2[0]
v2 = None v2 = None
if w2: if w2:
v2 = d2.execute( v2 = d2.execute(
"select v from mt where w = ? and +k = ?", (w2, k) "select v from mt where w = ? and +k = ?", (w2[:16], k)
).fetchone() ).fetchone()
if v2: if v2:
v2 = v2[0] v2 = v2[0]
@@ -124,7 +137,7 @@ def compare(n1, d1, n2, d2, verbose):
for k, v in sorted(miss.items()): for k, v in sorted(miss.items()):
if v: if v:
print(f"{n1} has {v:6} more {k:<6} tags than {n2}") print(f"{n1} has {v:7} more {k:<7} tags than {n2}")
print(f"in total, {nmiss} missing tags in {n2}\n") print(f"in total, {nmiss} missing tags in {n2}\n")
@@ -132,47 +145,75 @@ def compare(n1, d1, n2, d2, verbose):
def copy_mtp(d1, d2, tag, rm): def copy_mtp(d1, d2, tag, rm):
nt = next(d1.execute("select count(w) from mt where k = ?", (tag,)))[0] nt = next(d1.execute("select count(w) from mt where k = ?", (tag,)))[0]
n = 0 n = 0
ndone = 0 ncopy = 0
for w1, k, v in d1.execute("select * from mt where k = ?", (tag,)): nskip = 0
for w1s, k, v in d1.execute("select * from mt where k = ?", (tag,)):
n += 1 n += 1
if n % 25_000 == 0: if n % 25_000 == 0:
m = f"\033[36m{n:,} of {nt:,} tags checked, so far {ndone} copied\033[0m" m = f"\033[36m{n:,} of {nt:,} tags checked, so far {ncopy} copied, {nskip} skipped\033[0m"
print(m) print(m)
q = "select rd, fn from up where substr(w,1,16) = ?" q = "select w, rd, fn from up where substr(w,1,16) = ?"
rd, fn = d1.execute(q, (w1,)).fetchone() w1, rd, fn = d1.execute(q, (w1s,)).fetchone()
if rd.split("/", 1)[0] == ".hist": if rd.split("/", 1)[0] == ".hist":
continue continue
q = "select substr(w,1,16) from up where rd = ? and fn = ?" if BY_PATH:
w2 = d2.execute(q, (rd, fn)).fetchone() q = "select w from up where rd = ? and fn = ?"
w2 = d2.execute(q, (rd, fn)).fetchone()
else:
q = "select w from up where substr(w,1,16) = ? and +w = ?"
w2 = d2.execute(q, (w1s, w1)).fetchone()
if not w2: if not w2:
continue continue
w2 = w2[0] w2s = w2[0][:16]
hit = d2.execute("select v from mt where w = ? and +k = ?", (w2, k)).fetchone() hit = d2.execute("select v from mt where w = ? and +k = ?", (w2s, k)).fetchone()
if hit: if hit:
hit = hit[0] hit = hit[0]
if hit != v: if hit != v:
ndone += 1 if NC and hit is not None:
if hit is not None: nskip += 1
d2.execute("delete from mt where w = ? and +k = ?", (w2, k)) continue
d2.execute("insert into mt values (?,?,?)", (w2, k, v)) ncopy += 1
if hit is not None:
d2.execute("delete from mt where w = ? and +k = ?", (w2s, k))
d2.execute("insert into mt values (?,?,?)", (w2s, k, v))
if rm: if rm:
d2.execute("delete from mt where w = ? and +k = 't:mtp'", (w2,)) d2.execute("delete from mt where w = ? and +k = 't:mtp'", (w2s,))
d2.commit() d2.commit()
print(f"copied {ndone} {tag} tags over") print(f"copied {ncopy} {tag} tags over, skipped {nskip}")
def examples():
print(
"""
# clearing the journal
./dbtool.py up2k.db
# copy tags ".bpm" and "key" from old.db to up2k.db, and remove the mtp flag from matching files (so copyparty won't run any mtps on it)
./dbtool.py -ls up2k.db
./dbtool.py -src old.db up2k.db -cmp
./dbtool.py -src old.v3 up2k.db -rm-mtp-flag -copy key
./dbtool.py -src old.v3 up2k.db -rm-mtp-flag -copy .bpm -vac
"""
)
def main(): def main():
global NC, BY_PATH
os.system("") os.system("")
print() print()
ap = argparse.ArgumentParser() ap = argparse.ArgumentParser()
ap.add_argument("db", help="database to work on") ap.add_argument("db", help="database to work on")
ap.add_argument("-h2", action="store_true", help="show examples")
ap.add_argument("-src", metavar="DB", type=str, help="database to copy from") ap.add_argument("-src", metavar="DB", type=str, help="database to copy from")
ap2 = ap.add_argument_group("informational / read-only stuff") ap2 = ap.add_argument_group("informational / read-only stuff")
@@ -185,11 +226,29 @@ def main():
ap2.add_argument( ap2.add_argument(
"-rm-mtp-flag", "-rm-mtp-flag",
action="store_true", action="store_true",
help="when an mtp tag is copied over, also mark that as done, so copyparty won't run mtp on it", help="when an mtp tag is copied over, also mark that file as done, so copyparty won't run any mtps on those files",
) )
ap2.add_argument("-vac", action="store_true", help="optimize DB") ap2.add_argument("-vac", action="store_true", help="optimize DB")
ap2 = ap.add_argument_group("behavior modifiers")
ap2.add_argument(
"-nc",
action="store_true",
help="no-clobber; don't replace/overwrite existing tags",
)
ap2.add_argument(
"-by-path",
action="store_true",
help="match files based on location rather than warks (content-hash), use this if the databases have different wark salts",
)
ar = ap.parse_args() ar = ap.parse_args()
if ar.h2:
examples()
return
NC = ar.nc
BY_PATH = ar.by_path
for v in [ar.db, ar.src]: for v in [ar.db, ar.src]:
if v and not os.path.exists(v): if v and not os.path.exists(v):

View File

@@ -6,9 +6,13 @@ some of these rely on libraries which are not MIT-compatible
* [audio-bpm.py](./audio-bpm.py) detects the BPM of music using the BeatRoot Vamp Plugin; imports GPL2 * [audio-bpm.py](./audio-bpm.py) detects the BPM of music using the BeatRoot Vamp Plugin; imports GPL2
* [audio-key.py](./audio-key.py) detects the melodic key of music using the Mixxx fork of keyfinder; imports GPL3 * [audio-key.py](./audio-key.py) detects the melodic key of music using the Mixxx fork of keyfinder; imports GPL3
* [media-hash.py](./media-hash.py) generates checksums for audio and video streams; uses FFmpeg (LGPL or GPL)
these do not have any problematic dependencies: these invoke standalone programs which are GPL or similar, so is legally fine for most purposes:
* [media-hash.py](./media-hash.py) generates checksums for audio and video streams; uses FFmpeg (LGPL or GPL)
* [image-noexif.py](./image-noexif.py) removes exif tags from images; uses exiftool (GPLv1 or artistic-license)
these do not have any problematic dependencies at all:
* [cksum.py](./cksum.py) computes various checksums * [cksum.py](./cksum.py) computes various checksums
* [exe.py](./exe.py) grabs metadata from .exe and .dll files (example for retrieving multiple tags with one parser) * [exe.py](./exe.py) grabs metadata from .exe and .dll files (example for retrieving multiple tags with one parser)

View File

@@ -19,18 +19,18 @@ dep: ffmpeg
def det(tf): def det(tf):
# fmt: off # fmt: off
sp.check_call([ sp.check_call([
"ffmpeg", b"ffmpeg",
"-nostdin", b"-nostdin",
"-hide_banner", b"-hide_banner",
"-v", "fatal", b"-v", b"fatal",
"-ss", "13", b"-ss", b"13",
"-y", "-i", fsenc(sys.argv[1]), b"-y", b"-i", fsenc(sys.argv[1]),
"-map", "0:a:0", b"-map", b"0:a:0",
"-ac", "1", b"-ac", b"1",
"-ar", "22050", b"-ar", b"22050",
"-t", "300", b"-t", b"300",
"-f", "f32le", b"-f", b"f32le",
tf fsenc(tf)
]) ])
# fmt: on # fmt: on

View File

@@ -23,15 +23,15 @@ dep: ffmpeg
def det(tf): def det(tf):
# fmt: off # fmt: off
sp.check_call([ sp.check_call([
"ffmpeg", b"ffmpeg",
"-nostdin", b"-nostdin",
"-hide_banner", b"-hide_banner",
"-v", "fatal", b"-v", b"fatal",
"-y", "-i", fsenc(sys.argv[1]), b"-y", b"-i", fsenc(sys.argv[1]),
"-map", "0:a:0", b"-map", b"0:a:0",
"-t", "300", b"-t", b"300",
"-sample_fmt", "s16", b"-sample_fmt", b"s16",
tf fsenc(tf)
]) ])
# fmt: on # fmt: on

92
bin/mtag/image-noexif.py Normal file
View File

@@ -0,0 +1,92 @@
#!/usr/bin/env python3
"""
remove exif tags from uploaded images
dependencies:
exiftool
about:
creates a "noexif" subfolder and puts exif-stripped copies of each image there,
the reason for the subfolder is to avoid issues with the up2k.db / deduplication:
if the original image is modified in-place, then copyparty will keep the original
hash in up2k.db for a while (until the next volume rescan), so if the image is
reuploaded after a rescan then the upload will be renamed and kept as a dupe
alternatively you could switch the logic around, making a copy of the original
image into a subfolder named "exif" and modify the original in-place, but then
up2k.db will be out of sync until the next rescan, so any additional uploads
of the same image will get symlinked (deduplicated) to the modified copy
instead of the original in "exif"
or maybe delete the original image after processing, that would kinda work too
example copyparty config to use this:
-v/mnt/nas/pics:pics:rwmd,ed:c,e2ts,mte=+noexif:c,mtp=noexif=ejpg,ejpeg,ad,bin/mtag/image-noexif.py
explained:
for realpath /mnt/nas/pics (served at /pics) with read-write-modify-delete for ed,
enable file analysis on upload (e2ts),
append "noexif" to the list of known tags (mtp),
and use mtp plugin "bin/mtag/image-noexif.py" to provide that tag,
do this on all uploads with the file extension "jpg" or "jpeg",
ad = parse file regardless if FFmpeg thinks it is audio or not
PS: this requires e2ts to be functional,
meaning you need to do at least one of these:
* apt install ffmpeg
* pip3 install mutagen
and your python must have sqlite3 support compiled in
"""
import os
import sys
import filecmp
import subprocess as sp
try:
from copyparty.util import fsenc
except:
def fsenc(p):
return p.encode("utf-8")
def main():
cwd, fn = os.path.split(sys.argv[1])
if os.path.basename(cwd) == "noexif":
return
os.chdir(cwd)
f1 = fsenc(fn)
f2 = os.path.join(b"noexif", f1)
cmd = [
b"exiftool",
b"-exif:all=",
b"-iptc:all=",
b"-xmp:all=",
b"-P",
b"-o",
b"noexif/",
b"--",
f1,
]
sp.check_output(cmd)
if not os.path.exists(f2):
print("failed")
return
if filecmp.cmp(f1, f2, shallow=False):
print("clean")
else:
print("exif")
# lastmod = os.path.getmtime(f1)
# times = (int(time.time()), int(lastmod))
# os.utime(f2, times)
if __name__ == "__main__":
main()

View File

@@ -4,8 +4,8 @@ set -e
# install dependencies for audio-*.py # install dependencies for audio-*.py
# #
# linux/alpine: requires {python3,ffmpeg,fftw}-dev py3-{wheel,pip} py3-numpy{,-dev} vamp-sdk-dev patchelf cmake # linux/alpine: requires gcc g++ make cmake patchelf {python3,ffmpeg,fftw,libsndfile}-dev py3-{wheel,pip} py3-numpy{,-dev}
# linux/debian: requires libav{codec,device,filter,format,resample,util}-dev {libfftw3,python3}-dev python3-{numpy,pip} vamp-{plugin-sdk,examples} patchelf cmake # linux/debian: requires libav{codec,device,filter,format,resample,util}-dev {libfftw3,python3,libsndfile1}-dev python3-{numpy,pip} vamp-{plugin-sdk,examples} patchelf cmake
# win64: requires msys2-mingw64 environment # win64: requires msys2-mingw64 environment
# macos: requires macports # macos: requires macports
# #
@@ -101,8 +101,11 @@ export -f dl_files
github_tarball() { github_tarball() {
rm -rf g
mkdir g
cd g
dl_text "$1" | dl_text "$1" |
tee json | tee ../json |
( (
# prefer jq if available # prefer jq if available
jq -r '.tarball_url' || jq -r '.tarball_url' ||
@@ -111,8 +114,11 @@ github_tarball() {
awk -F\" '/"tarball_url": "/ {print$4}' awk -F\" '/"tarball_url": "/ {print$4}'
) | ) |
tee /dev/stderr | tee /dev/stderr |
head -n 1 |
tr -d '\r' | tr '\n' '\0' | tr -d '\r' | tr '\n' '\0' |
xargs -0 bash -c 'dl_files "$@"' _ xargs -0 bash -c 'dl_files "$@"' _
mv * ../tgz
cd ..
} }
@@ -127,6 +133,7 @@ gitlab_tarball() {
tr \" '\n' | grep -E '\.tar\.gz$' | head -n 1 tr \" '\n' | grep -E '\.tar\.gz$' | head -n 1
) | ) |
tee /dev/stderr | tee /dev/stderr |
head -n 1 |
tr -d '\r' | tr '\n' '\0' | tr -d '\r' | tr '\n' '\0' |
tee links | tee links |
xargs -0 bash -c 'dl_files "$@"' _ xargs -0 bash -c 'dl_files "$@"' _
@@ -138,10 +145,17 @@ install_keyfinder() {
# use msys2 in mingw-w64 mode # use msys2 in mingw-w64 mode
# pacman -S --needed mingw-w64-x86_64-{ffmpeg,python} # pacman -S --needed mingw-w64-x86_64-{ffmpeg,python}
github_tarball https://api.github.com/repos/mixxxdj/libkeyfinder/releases/latest [ -e $HOME/pe/keyfinder ] && {
echo found a keyfinder build in ~/pe, skipping
return
}
tar -xf mixxxdj-libkeyfinder-* cd "$td"
rm -- *.tar.gz github_tarball https://api.github.com/repos/mixxxdj/libkeyfinder/releases/latest
ls -al
tar -xf tgz
rm tgz
cd mixxxdj-libkeyfinder* cd mixxxdj-libkeyfinder*
h="$HOME" h="$HOME"
@@ -208,6 +222,22 @@ install_vamp() {
$pybin -m pip install --user vamp $pybin -m pip install --user vamp
cd "$td"
echo '#include <vamp-sdk/Plugin.h>' | gcc -x c -c -o /dev/null - || [ -e ~/pe/vamp-sdk ] || {
printf '\033[33mcould not find the vamp-sdk, building from source\033[0m\n'
(dl_files yolo https://code.soundsoftware.ac.uk/attachments/download/2588/vamp-plugin-sdk-2.9.0.tar.gz)
sha512sum -c <(
echo "7ef7f837d19a08048b059e0da408373a7964ced452b290fae40b85d6d70ca9000bcfb3302cd0b4dc76cf2a848528456f78c1ce1ee0c402228d812bd347b6983b -"
) <vamp-plugin-sdk-2.9.0.tar.gz
tar -xf vamp-plugin-sdk-2.9.0.tar.gz
rm -- *.tar.gz
ls -al
cd vamp-plugin-sdk-*
./configure --prefix=$HOME/pe/vamp-sdk
make -j1 install
}
cd "$td"
have_beatroot || { have_beatroot || {
printf '\033[33mcould not find the vamp beatroot plugin, building from source\033[0m\n' printf '\033[33mcould not find the vamp beatroot plugin, building from source\033[0m\n'
(dl_files yolo https://code.soundsoftware.ac.uk/attachments/download/885/beatroot-vamp-v1.0.tar.gz) (dl_files yolo https://code.soundsoftware.ac.uk/attachments/download/885/beatroot-vamp-v1.0.tar.gz)
@@ -215,8 +245,11 @@ install_vamp() {
echo "1f444d1d58ccf565c0adfe99f1a1aa62789e19f5071e46857e2adfbc9d453037bc1c4dcb039b02c16240e9b97f444aaff3afb625c86aa2470233e711f55b6874 -" echo "1f444d1d58ccf565c0adfe99f1a1aa62789e19f5071e46857e2adfbc9d453037bc1c4dcb039b02c16240e9b97f444aaff3afb625c86aa2470233e711f55b6874 -"
) <beatroot-vamp-v1.0.tar.gz ) <beatroot-vamp-v1.0.tar.gz
tar -xf beatroot-vamp-v1.0.tar.gz tar -xf beatroot-vamp-v1.0.tar.gz
rm -- *.tar.gz
cd beatroot-vamp-v1.0 cd beatroot-vamp-v1.0
make -f Makefile.linux -j4 [ -e ~/pe/vamp-sdk ] &&
sed -ri 's`^(CFLAGS :=.*)`\1 -I'$HOME'/pe/vamp-sdk/include`' Makefile.linux
make -f Makefile.linux -j4 LDFLAGS=-L$HOME/pe/vamp-sdk/lib
# /home/ed/vamp /home/ed/.vamp /usr/local/lib/vamp # /home/ed/vamp /home/ed/.vamp /usr/local/lib/vamp
mkdir ~/vamp mkdir ~/vamp
cp -pv beatroot-vamp.* ~/vamp/ cp -pv beatroot-vamp.* ~/vamp/
@@ -230,6 +263,7 @@ install_vamp() {
# not in use because it kinda segfaults, also no windows support # not in use because it kinda segfaults, also no windows support
install_soundtouch() { install_soundtouch() {
cd "$td"
gitlab_tarball https://gitlab.com/api/v4/projects/soundtouch%2Fsoundtouch/releases gitlab_tarball https://gitlab.com/api/v4/projects/soundtouch%2Fsoundtouch/releases
tar -xvf soundtouch-* tar -xvf soundtouch-*

View File

@@ -13,7 +13,7 @@ try:
except: except:
def fsenc(p): def fsenc(p):
return p return p.encode("utf-8")
""" """
@@ -24,13 +24,13 @@ dep: ffmpeg
def det(): def det():
# fmt: off # fmt: off
cmd = [ cmd = [
"ffmpeg", b"ffmpeg",
"-nostdin", b"-nostdin",
"-hide_banner", b"-hide_banner",
"-v", "fatal", b"-v", b"fatal",
"-i", fsenc(sys.argv[1]), b"-i", fsenc(sys.argv[1]),
"-f", "framemd5", b"-f", b"framemd5",
"-" b"-"
] ]
# fmt: on # fmt: on

View File

@@ -0,0 +1,21 @@
// ==UserScript==
// @name twitter-unmute
// @namespace http://ocv.me/
// @version 0.1
// @description memes
// @author ed <irc.rizon.net>
// @match https://twitter.com/*
// @icon https://www.google.com/s2/favicons?domain=twitter.com
// @grant GM_addStyle
// ==/UserScript==
function grunnur() {
setInterval(function () {
//document.querySelector('div[aria-label="Unmute"]').click();
document.querySelector('video').muted = false;
}, 200);
}
var scr = document.createElement('script');
scr.textContent = '(' + grunnur.toString() + ')();';
(document.head || document.getElementsByTagName('head')[0]).appendChild(scr);

139
bin/mtag/very-bad-idea.py Executable file
View File

@@ -0,0 +1,139 @@
#!/usr/bin/env python3
"""
use copyparty as a chromecast replacement:
* post a URL and it will open in the default browser
* upload a file and it will open in the default application
* the `key` command simulates keyboard input
* the `x` command executes other xdotool commands
* the `c` command executes arbitrary unix commands
the android app makes it a breeze to post pics and links:
https://github.com/9001/party-up/releases
(iOS devices have to rely on the web-UI)
goes without saying, but this is HELLA DANGEROUS,
GIVES RCE TO ANYONE WHO HAVE UPLOAD PERMISSIONS
example copyparty config to use this:
--urlform save,get -v.::w:c,e2d,e2t,mte=+a1:c,mtp=a1=ad,bin/mtag/very-bad-idea.py
recommended deps:
apt install xdotool libnotify-bin
https://github.com/9001/copyparty/blob/hovudstraum/contrib/plugins/meadup.js
and you probably want `twitter-unmute.user.js` from the res folder
-----------------------------------------------------------------------
-- startup script:
-----------------------------------------------------------------------
#!/bin/bash
set -e
# create qr code
ip=$(ip r | awk '/^default/{print$(NF-2)}'); echo http://$ip:3923/ | qrencode -o - -s 4 >/dev/shm/cpp-qr.png
/usr/bin/feh -x /dev/shm/cpp-qr.png &
# reposition and make topmost (with janky raspbian support)
( sleep 0.5
xdotool search --name cpp-qr.png windowactivate --sync windowmove 1780 0
wmctrl -r :ACTIVE: -b toggle,above || true
ps aux | grep -E 'sleep[ ]7\.27' ||
while true; do
w=$(xdotool getactivewindow)
xdotool search --name cpp-qr.png windowactivate windowraise windowfocus
xdotool windowactivate $w
xdotool windowfocus $w
sleep 7.27 || break
done &
xeyes # distraction window to prevent ^w from closing the qr-code
) &
# bail if copyparty is already running
ps aux | grep -E '[3] copy[p]arty' && exit 0
# dumb chrome wrapper to allow autoplay
cat >/usr/local/bin/chromium-browser <<'EOF'
#!/bin/bash
set -e
/usr/bin/chromium-browser --autoplay-policy=no-user-gesture-required "$@"
EOF
chmod 755 /usr/local/bin/chromium-browser
# start the server (note: replace `-v.::rw:` with `-v.::w:` to disallow retrieving uploaded stuff)
cd ~/Downloads; python3 copyparty-sfx.py --urlform save,get -v.::rw:c,e2d,e2t,mte=+a1:c,mtp=a1=ad,very-bad-idea.py
"""
import os
import sys
import time
import subprocess as sp
from urllib.parse import unquote_to_bytes as unquote
def main():
fp = os.path.abspath(sys.argv[1])
with open(fp, "rb") as f:
txt = f.read(4096)
if txt.startswith(b"msg="):
open_post(txt)
else:
open_url(fp)
def open_post(txt):
txt = unquote(txt.replace(b"+", b" ")).decode("utf-8")[4:]
try:
k, v = txt.split(" ", 1)
except:
open_url(txt)
if k == "key":
sp.call(["xdotool", "key"] + v.split(" "))
elif k == "x":
sp.call(["xdotool"] + v.split(" "))
elif k == "c":
env = os.environ.copy()
while " " in v:
v1, v2 = v.split(" ", 1)
if "=" not in v1:
break
ek, ev = v1.split("=", 1)
env[ek] = ev
v = v2
sp.call(v.split(" "), env=env)
else:
open_url(txt)
def open_url(txt):
ext = txt.rsplit(".")[-1].lower()
sp.call(["notify-send", "--", txt])
if ext not in ["jpg", "jpeg", "png", "gif", "webp"]:
# sp.call(["wmctrl", "-c", ":ACTIVE:"]) # closes the active window correctly
sp.call(["killall", "vlc"])
sp.call(["killall", "mpv"])
sp.call(["killall", "feh"])
time.sleep(0.5)
for _ in range(20):
sp.call(["xdotool", "key", "ctrl+w"]) # closes the open tab correctly
# else:
# sp.call(["xdotool", "getactivewindow", "windowminimize"]) # minimizes the focused windo
# close any error messages:
sp.call(["xdotool", "search", "--name", "Error", "windowclose"])
# sp.call(["xdotool", "key", "ctrl+alt+d"]) # doesnt work at all
# sp.call(["xdotool", "keydown", "--delay", "100", "ctrl+alt+d"])
# sp.call(["xdotool", "keyup", "ctrl+alt+d"])
sp.call(["xdg-open", txt])
main()

177
bin/partyjournal.py Executable file
View File

@@ -0,0 +1,177 @@
#!/usr/bin/env python3
"""
partyjournal.py: chronological history of uploads
2021-12-31, v0.1, ed <irc.rizon.net>, MIT-Licensed
https://github.com/9001/copyparty/blob/hovudstraum/bin/partyjournal.py
produces a chronological list of all uploads,
by collecting info from up2k databases and the filesystem
specify subnet `192.168.1.*` with argument `.=192.168.1.`,
affecting all successive mappings
usage:
./partyjournal.py > partyjournal.html .=192.168.1. cart=125 steen=114 steen=131 sleepy=121 fscarlet=144 ed=101 ed=123
"""
import sys
import base64
import sqlite3
import argparse
from datetime import datetime
from urllib.parse import quote_from_bytes as quote
from urllib.parse import unquote_to_bytes as unquote
FS_ENCODING = sys.getfilesystemencoding()
class APF(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter):
pass
##
## snibbed from copyparty
def s3dec(v):
if not v.startswith("//"):
return v
v = base64.urlsafe_b64decode(v.encode("ascii")[2:])
return v.decode(FS_ENCODING, "replace")
def quotep(txt):
btxt = txt.encode("utf-8", "replace")
quot1 = quote(btxt, safe=b"/")
quot1 = quot1.encode("ascii")
quot2 = quot1.replace(b" ", b"+")
return quot2.decode("utf-8", "replace")
def html_escape(s, quote=False, crlf=False):
"""html.escape but also newlines"""
s = s.replace("&", "&amp;").replace("<", "&lt;").replace(">", "&gt;")
if quote:
s = s.replace('"', "&quot;").replace("'", "&#x27;")
if crlf:
s = s.replace("\r", "&#13;").replace("\n", "&#10;")
return s
## end snibs
##
def main():
ap = argparse.ArgumentParser(formatter_class=APF)
ap.add_argument("who", nargs="*")
ar = ap.parse_args()
imap = {}
subnet = ""
for v in ar.who:
if "=" not in v:
raise Exception("bad who: " + v)
k, v = v.split("=")
if k == ".":
subnet = v
continue
imap["{}{}".format(subnet, v)] = k
print(repr(imap), file=sys.stderr)
print(
"""\
<!DOCTYPE html>
<html lang="en">
<head><meta charset="utf-8"><style>
html, body {
color: #ccc;
background: #222;
font-family: sans-serif;
}
a {
color: #fc5;
}
td, th {
padding: .2em .5em;
border: 1px solid #999;
border-width: 0 1px 1px 0;
white-space: nowrap;
}
td:nth-child(1),
td:nth-child(2),
td:nth-child(3) {
font-family: monospace, monospace;
text-align: right;
}
tr:first-child {
position: sticky;
top: -1px;
}
th {
background: #222;
text-align: left;
}
</style></head><body><table><tr>
<th>wark</th>
<th>time</th>
<th>size</th>
<th>who</th>
<th>link</th>
</tr>"""
)
db_path = ".hist/up2k.db"
conn = sqlite3.connect(db_path)
q = r"pragma table_info(up)"
inf = conn.execute(q).fetchall()
cols = [x[1] for x in inf]
print("<!-- " + str(cols) + " -->")
# ['w', 'mt', 'sz', 'rd', 'fn', 'ip', 'at']
q = r"select * from up order by case when at > 0 then at else mt end"
for w, mt, sz, rd, fn, ip, at in conn.execute(q):
link = "/".join([s3dec(x) for x in [rd, fn] if x])
if fn.startswith("put-") and sz < 4096:
try:
with open(link, "rb") as f:
txt = f.read().decode("utf-8", "replace")
except:
continue
if txt.startswith("msg="):
txt = txt.encode("utf-8", "replace")
txt = unquote(txt.replace(b"+", b" "))
link = txt.decode("utf-8")[4:]
sz = "{:,}".format(sz)
v = [
w[:16],
datetime.utcfromtimestamp(at if at > 0 else mt).strftime(
"%Y-%m-%d %H:%M:%S"
),
sz,
imap.get(ip, ip),
]
row = "<tr>\n "
row += "\n ".join(["<td>{}</th>".format(x) for x in v])
row += '\n <td><a href="{}">{}</a></td>'.format(link, html_escape(link))
row += "\n</tr>"
print(row)
print("</table></body></html>")
if __name__ == "__main__":
main()

67
bin/prisonparty.sh Normal file → Executable file
View File

@@ -11,10 +11,16 @@ sysdirs=( /bin /lib /lib32 /lib64 /sbin /usr )
help() { cat <<'EOF' help() { cat <<'EOF'
usage: usage:
./prisonparty.sh <ROOTDIR> <UID> <GID> [VOLDIR [VOLDIR...]] -- copyparty-sfx.py [...]" ./prisonparty.sh <ROOTDIR> <UID> <GID> [VOLDIR [VOLDIR...]] -- python3 copyparty-sfx.py [...]"
example: example:
./prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt/nas/music -- copyparty-sfx.py -v /mnt/nas/music::rwmd" ./prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt/nas/music -- python3 copyparty-sfx.py -v /mnt/nas/music::rwmd"
example for running straight from source (instead of using an sfx):
PYTHONPATH=$PWD ./prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt/nas/music -- python3 -um copyparty -v /mnt/nas/music::rwmd"
note that if you have python modules installed as --user (such as bpm/key detectors),
you should add /home/foo/.local as a VOLDIR
EOF EOF
exit 1 exit 1
@@ -35,10 +41,20 @@ while true; do
vols+=( "$(realpath "$v")" ) vols+=( "$(realpath "$v")" )
done done
pybin="$1"; shift pybin="$1"; shift
pybin="$(realpath "$pybin")" pybin="$(command -v "$pybin")"
pyarg=
while true; do
v="$1"
[ "${v:0:1}" = - ] || break
pyarg="$pyarg $v"
shift
done
cpp="$1"; shift cpp="$1"; shift
cpp="$(realpath "$cpp")" [ -d "$cpp" ] && cppdir="$PWD" || {
cppdir="$(dirname "$cpp")" # sfx, not module
cpp="$(realpath "$cpp")"
cppdir="$(dirname "$cpp")"
}
trap - EXIT trap - EXIT
@@ -60,11 +76,10 @@ echo
# remove any trailing slashes # remove any trailing slashes
jail="${jail%/}" jail="${jail%/}"
cppdir="${cppdir%/}"
# bind-mount system directories and volumes # bind-mount system directories and volumes
printf '%s\n' "${sysdirs[@]}" "${vols[@]}" | LC_ALL=C sort | printf '%s\n' "${sysdirs[@]}" "${vols[@]}" | sed -r 's`/$``' | LC_ALL=C sort | uniq |
while IFS= read -r v; do while IFS= read -r v; do
[ -e "$v" ] || { [ -e "$v" ] || {
# printf '\033[1;31mfolder does not exist:\033[0m %s\n' "/$v" # printf '\033[1;31mfolder does not exist:\033[0m %s\n' "/$v"
@@ -72,6 +87,7 @@ while IFS= read -r v; do
} }
i1=$(stat -c%D.%i "$v" 2>/dev/null || echo a) i1=$(stat -c%D.%i "$v" 2>/dev/null || echo a)
i2=$(stat -c%D.%i "$jail$v" 2>/dev/null || echo b) i2=$(stat -c%D.%i "$jail$v" 2>/dev/null || echo b)
# echo "v [$v] i1 [$i1] i2 [$i2]"
[ $i1 = $i2 ] && continue [ $i1 = $i2 ] && continue
mkdir -p "$jail$v" mkdir -p "$jail$v"
@@ -79,21 +95,34 @@ while IFS= read -r v; do
done done
cln() {
rv=$?
# cleanup if not in use
lsof "$jail" | grep -qF "$jail" &&
echo "chroot is in use, will not cleanup" ||
{
mount | grep -F " on $jail" |
awk '{sub(/ type .*/,"");sub(/.* on /,"");print}' |
LC_ALL=C sort -r | tee /dev/stderr | tr '\n' '\0' | xargs -r0 umount
}
exit $rv
}
trap cln EXIT
# create a tmp # create a tmp
mkdir -p "$jail/tmp" mkdir -p "$jail/tmp"
chmod 777 "$jail/tmp" chmod 777 "$jail/tmp"
# run copyparty # run copyparty
/sbin/chroot --userspec=$uid:$gid "$jail" "$pybin" "$cpp" "$@" && rv=0 || rv=$? export HOME=$(getent passwd $uid | cut -d: -f6)
export USER=$(getent passwd $uid | cut -d: -f1)
export LOGNAME="$USER"
# cleanup if not in use #echo "pybin [$pybin]"
lsof "$jail" | grep -qF "$jail" && #echo "pyarg [$pyarg]"
echo "chroot is in use, will not cleanup" || #echo "cpp [$cpp]"
{ chroot --userspec=$uid:$gid "$jail" "$pybin" $pyarg "$cpp" "$@" &
mount | grep -qF " on $jail" | p=$!
awk '{sub(/ type .*/,"");sub(/.* on /,"");print}' | trap 'kill $p' INT TERM
LC_ALL=C sort -r | tee /dev/stderr | tr '\n' '\0' | xargs -r0 umount wait
}
exit $rv

View File

@@ -3,11 +3,11 @@ from __future__ import print_function, unicode_literals
""" """
up2k.py: upload to copyparty up2k.py: upload to copyparty
2021-10-31, v0.11, ed <irc.rizon.net>, MIT-Licensed 2022-06-16, v0.15, ed <irc.rizon.net>, MIT-Licensed
https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py
- dependencies: requests - dependencies: requests
- supports python 2.6, 2.7, and 3.3 through 3.10 - supports python 2.6, 2.7, and 3.3 through 3.11
- almost zero error-handling - almost zero error-handling
- but if something breaks just try again and it'll autoresume - but if something breaks just try again and it'll autoresume
@@ -25,9 +25,10 @@ import hashlib
import argparse import argparse
import platform import platform
import threading import threading
import requests
import datetime import datetime
import requests
# from copyparty/__init__.py # from copyparty/__init__.py
PY2 = sys.version_info[0] == 2 PY2 = sys.version_info[0] == 2
@@ -76,15 +77,15 @@ class File(object):
self.up_b = 0 # type: int self.up_b = 0 # type: int
self.up_c = 0 # type: int self.up_c = 0 # type: int
# m = "size({}) lmod({}) top({}) rel({}) abs({}) name({})\n" # t = "size({}) lmod({}) top({}) rel({}) abs({}) name({})\n"
# eprint(m.format(self.size, self.lmod, self.top, self.rel, self.abs, self.name)) # eprint(t.format(self.size, self.lmod, self.top, self.rel, self.abs, self.name))
class FileSlice(object): class FileSlice(object):
"""file-like object providing a fixed window into a file""" """file-like object providing a fixed window into a file"""
def __init__(self, file, cid): def __init__(self, file, cid):
# type: (File, str) -> FileSlice # type: (File, str) -> None
self.car, self.len = file.kchunks[cid] self.car, self.len = file.kchunks[cid]
self.cdr = self.car + self.len self.cdr = self.car + self.len
@@ -150,13 +151,11 @@ if not VT100:
def termsize(): def termsize():
import os
env = os.environ env = os.environ
def ioctl_GWINSZ(fd): def ioctl_GWINSZ(fd):
try: try:
import fcntl, termios, struct, os import fcntl, termios, struct
cr = struct.unpack("hh", fcntl.ioctl(fd, termios.TIOCGWINSZ, "1234")) cr = struct.unpack("hh", fcntl.ioctl(fd, termios.TIOCGWINSZ, "1234"))
except: except:
@@ -217,36 +216,54 @@ class CTermsize(object):
eprint("\033[s\033[r\033[u") eprint("\033[s\033[r\033[u")
else: else:
self.g = 1 + self.h - margin self.g = 1 + self.h - margin
m = "{0}\033[{1}A".format("\n" * margin, margin) t = "{0}\033[{1}A".format("\n" * margin, margin)
eprint("{0}\033[s\033[1;{1}r\033[u".format(m, self.g - 1)) eprint("{0}\033[s\033[1;{1}r\033[u".format(t, self.g - 1))
ss = CTermsize() ss = CTermsize()
def statdir(top): def _scd(err, top):
"""non-recursive listing of directory contents, along with stat() info""" """non-recursive listing of directory contents, along with stat() info"""
if hasattr(os, "scandir"): with os.scandir(top) as dh:
with os.scandir(top) as dh: for fh in dh:
for fh in dh: abspath = os.path.join(top, fh.name)
yield [os.path.join(top, fh.name), fh.stat()] try:
else: yield [abspath, fh.stat()]
for name in os.listdir(top): except:
abspath = os.path.join(top, name) err.append(abspath)
def _lsd(err, top):
"""non-recursive listing of directory contents, along with stat() info"""
for name in os.listdir(top):
abspath = os.path.join(top, name)
try:
yield [abspath, os.stat(abspath)] yield [abspath, os.stat(abspath)]
except:
err.append(abspath)
def walkdir(top): if hasattr(os, "scandir"):
statdir = _scd
else:
statdir = _lsd
def walkdir(err, top):
"""recursive statdir""" """recursive statdir"""
for ap, inf in sorted(statdir(top)): for ap, inf in sorted(statdir(err, top)):
if stat.S_ISDIR(inf.st_mode): if stat.S_ISDIR(inf.st_mode):
for x in walkdir(ap): try:
yield x for x in walkdir(err, ap):
yield x
except:
err.append(ap)
else: else:
yield ap, inf yield ap, inf
def walkdirs(tops): def walkdirs(err, tops):
"""recursive statdir for a list of tops, yields [top, relpath, stat]""" """recursive statdir for a list of tops, yields [top, relpath, stat]"""
sep = "{0}".format(os.sep).encode("ascii") sep = "{0}".format(os.sep).encode("ascii")
for top in tops: for top in tops:
@@ -256,7 +273,7 @@ def walkdirs(tops):
stop = os.path.dirname(top) stop = os.path.dirname(top)
if os.path.isdir(top): if os.path.isdir(top):
for ap, inf in walkdir(top): for ap, inf in walkdir(err, top):
yield stop, ap[len(stop) :].lstrip(sep), inf yield stop, ap[len(stop) :].lstrip(sep), inf
else: else:
d, n = top.rsplit(sep, 1) d, n = top.rsplit(sep, 1)
@@ -342,7 +359,7 @@ def get_hashlist(file, pcb):
def handshake(req_ses, url, file, pw, search): def handshake(req_ses, url, file, pw, search):
# type: (requests.Session, str, File, any, bool) -> List[str] # type: (requests.Session, str, File, any, bool) -> list[str]
""" """
performs a handshake with the server; reply is: performs a handshake with the server; reply is:
if search, a list of search results if search, a list of search results
@@ -372,7 +389,7 @@ def handshake(req_ses, url, file, pw, search):
r = req_ses.post(url, headers=headers, json=req) r = req_ses.post(url, headers=headers, json=req)
break break
except: except:
eprint("handshake failed, retry...\n") eprint("handshake failed, retrying: {0}\n".format(file.name))
time.sleep(1) time.sleep(1)
try: try:
@@ -393,7 +410,7 @@ def handshake(req_ses, url, file, pw, search):
file.name = r["name"] file.name = r["name"]
file.wark = r["wark"] file.wark = r["wark"]
return r["hash"] return r["hash"], r["sprs"]
def upload(req_ses, file, cid, pw): def upload(req_ses, file, cid, pw):
@@ -446,10 +463,21 @@ class Ctl(object):
nfiles = 0 nfiles = 0
nbytes = 0 nbytes = 0
for _, _, inf in walkdirs(ar.files): err = []
for _, _, inf in walkdirs(err, ar.files):
nfiles += 1 nfiles += 1
nbytes += inf.st_size nbytes += inf.st_size
if err:
eprint("\n# failed to access {0} paths:\n".format(len(err)))
for x in err:
eprint(x.decode("utf-8", "replace") + "\n")
eprint("^ failed to access those {0} paths ^\n\n".format(len(err)))
if not ar.ok:
eprint("aborting because --ok is not set\n")
return
eprint("found {0} files, {1}\n\n".format(nfiles, humansize(nbytes))) eprint("found {0} files, {1}\n\n".format(nfiles, humansize(nbytes)))
self.nfiles = nfiles self.nfiles = nfiles
self.nbytes = nbytes self.nbytes = nbytes
@@ -460,13 +488,37 @@ class Ctl(object):
if ar.te: if ar.te:
req_ses.verify = ar.te req_ses.verify = ar.te
self.filegen = walkdirs(ar.files) self.filegen = walkdirs([], ar.files)
if ar.safe: if ar.safe:
self.safe() self._safe()
else: else:
self.fancy() self.hash_f = 0
self.hash_c = 0
self.hash_b = 0
self.up_f = 0
self.up_c = 0
self.up_b = 0
self.up_br = 0
self.hasher_busy = 1
self.handshaker_busy = 0
self.uploader_busy = 0
self.serialized = False
def safe(self): self.t0 = time.time()
self.t0_up = None
self.spd = None
self.mutex = threading.Lock()
self.q_handshake = Queue() # type: Queue[File]
self.q_recheck = Queue() # type: Queue[File] # partial upload exists [...]
self.q_upload = Queue() # type: Queue[tuple[File, str]]
self.st_hash = [None, "(idle, starting...)"] # type: tuple[File, int]
self.st_up = [None, "(idle, starting...)"] # type: tuple[File, int]
self._fancy()
def _safe(self):
"""minimal basic slow boring fallback codepath""" """minimal basic slow boring fallback codepath"""
search = self.ar.s search = self.ar.s
for nf, (top, rel, inf) in enumerate(self.filegen): for nf, (top, rel, inf) in enumerate(self.filegen):
@@ -476,10 +528,10 @@ class Ctl(object):
print("{0} {1}\n hash...".format(self.nfiles - nf, upath)) print("{0} {1}\n hash...".format(self.nfiles - nf, upath))
get_hashlist(file, None) get_hashlist(file, None)
burl = self.ar.url[:8] + self.ar.url[8:].split("/")[0] + "/" burl = self.ar.url[:12] + self.ar.url[8:].split("/")[0] + "/"
while True: while True:
print(" hs...") print(" hs...")
hs = handshake(req_ses, self.ar.url, file, self.ar.a, search) hs, _ = handshake(req_ses, self.ar.url, file, self.ar.a, search)
if search: if search:
if hs: if hs:
for hit in hs: for hit in hs:
@@ -500,29 +552,7 @@ class Ctl(object):
print(" ok!") print(" ok!")
def fancy(self): def _fancy(self):
self.hash_f = 0
self.hash_c = 0
self.hash_b = 0
self.up_f = 0
self.up_c = 0
self.up_b = 0
self.up_br = 0
self.hasher_busy = 1
self.handshaker_busy = 0
self.uploader_busy = 0
self.t0 = time.time()
self.t0_up = None
self.spd = None
self.mutex = threading.Lock()
self.q_handshake = Queue() # type: Queue[File]
self.q_recheck = Queue() # type: Queue[File] # partial upload exists [...]
self.q_upload = Queue() # type: Queue[tuple[File, str]]
self.st_hash = [None, "(idle, starting...)"] # type: tuple[File, int]
self.st_up = [None, "(idle, starting...)"] # type: tuple[File, int]
if VT100: if VT100:
atexit.register(self.cleanup_vt100) atexit.register(self.cleanup_vt100)
ss.scroll_region(3) ss.scroll_region(3)
@@ -568,8 +598,8 @@ class Ctl(object):
if "/" in name: if "/" in name:
name = "\033[36m{0}\033[0m/{1}".format(*name.rsplit("/", 1)) name = "\033[36m{0}\033[0m/{1}".format(*name.rsplit("/", 1))
m = "{0:6.1f}% {1} {2}\033[K" t = "{0:6.1f}% {1} {2}\033[K"
txt += m.format(p, self.nfiles - f, name) txt += t.format(p, self.nfiles - f, name)
txt += "\033[{0}H ".format(ss.g + 2) txt += "\033[{0}H ".format(ss.g + 2)
else: else:
@@ -585,11 +615,12 @@ class Ctl(object):
spd = humansize(spd) spd = humansize(spd)
eta = str(datetime.timedelta(seconds=int(eta))) eta = str(datetime.timedelta(seconds=int(eta)))
left = humansize(self.nbytes - self.up_b) sleft = humansize(self.nbytes - self.up_b)
nleft = self.nfiles - self.up_f
tail = "\033[K\033[u" if VT100 else "\r" tail = "\033[K\033[u" if VT100 else "\r"
m = "eta: {0} @ {1}/s, {2} left".format(eta, spd, left) t = "{0} eta @ {1}/s, {2}, {3}# left".format(eta, spd, sleft, nleft)
eprint(txt + "\033]0;{0}\033\\\r{1}{2}".format(m, m, tail)) eprint(txt + "\033]0;{0}\033\\\r{0}{1}".format(t, tail))
def cleanup_vt100(self): def cleanup_vt100(self):
ss.scroll_region(None) ss.scroll_region(None)
@@ -680,7 +711,7 @@ class Ctl(object):
upath = file.abs.decode("utf-8", "replace") upath = file.abs.decode("utf-8", "replace")
try: try:
hs = handshake(req_ses, self.ar.url, file, self.ar.a, search) hs, sprs = handshake(req_ses, self.ar.url, file, self.ar.a, search)
except Exception as ex: except Exception as ex:
if q == self.q_handshake and "<pre>partial upload exists" in str(ex): if q == self.q_handshake and "<pre>partial upload exists" in str(ex):
self.q_recheck.put(file) self.q_recheck.put(file)
@@ -691,8 +722,8 @@ class Ctl(object):
if search: if search:
if hs: if hs:
for hit in hs: for hit in hs:
m = "found: {0}\n {1}{2}\n" t = "found: {0}\n {1}{2}\n"
print(m.format(upath, burl, hit["rp"]), end="") print(t.format(upath, burl, hit["rp"]), end="")
else: else:
print("NOT found: {0}\n".format(upath), end="") print("NOT found: {0}\n".format(upath), end="")
@@ -705,6 +736,12 @@ class Ctl(object):
continue continue
with self.mutex: with self.mutex:
if not sprs and not self.serialized:
t = "server filesystem does not support sparse files; serializing uploads\n"
eprint(t)
self.serialized = True
for _ in range(self.ar.j - 1):
self.q_upload.put(None)
if not hs: if not hs:
# all chunks done # all chunks done
self.up_f += 1 self.up_f += 1
@@ -744,7 +781,7 @@ class Ctl(object):
try: try:
upload(req_ses, file, cid, self.ar.a) upload(req_ses, file, cid, self.ar.a)
except: except:
eprint("upload failed, retry...\n") eprint("upload failed, retrying: {0} #{1}\n".format(file.name, cid[:8]))
pass # handshake will fix it pass # handshake will fix it
with self.mutex: with self.mutex:
@@ -783,6 +820,7 @@ source file/folder selection uses rsync syntax, meaning that:
ap.add_argument("files", type=unicode, nargs="+", help="files and/or folders to process") ap.add_argument("files", type=unicode, nargs="+", help="files and/or folders to process")
ap.add_argument("-a", metavar="PASSWORD", help="password") ap.add_argument("-a", metavar="PASSWORD", help="password")
ap.add_argument("-s", action="store_true", help="file-search (disables upload)") ap.add_argument("-s", action="store_true", help="file-search (disables upload)")
ap.add_argument("--ok", action="store_true", help="continue even if some local files are inaccessible")
ap = app.add_argument_group("performance tweaks") ap = app.add_argument_group("performance tweaks")
ap.add_argument("-j", type=int, metavar="THREADS", default=4, help="parallel connections") ap.add_argument("-j", type=int, metavar="THREADS", default=4, help="parallel connections")
ap.add_argument("-nh", action="store_true", help="disable hashing while uploading") ap.add_argument("-nh", action="store_true", help="disable hashing while uploading")

View File

@@ -1,3 +1,6 @@
### [`plugins/`](plugins/)
* example extensions
### [`copyparty.bat`](copyparty.bat) ### [`copyparty.bat`](copyparty.bat)
* launches copyparty with no arguments (anon read+write within same folder) * launches copyparty with no arguments (anon read+write within same folder)
* intended for windows machines with no python.exe in PATH * intended for windows machines with no python.exe in PATH
@@ -26,6 +29,7 @@ however if your copyparty is behind a reverse-proxy, you may want to use [`share
### [`cfssl.sh`](cfssl.sh) ### [`cfssl.sh`](cfssl.sh)
* creates CA and server certificates using cfssl * creates CA and server certificates using cfssl
* give a 3rd argument to install it to your copyparty config * give a 3rd argument to install it to your copyparty config
* systemd service at [`systemd/cfssl.service`](systemd/cfssl.service)
# OS integration # OS integration
init-scripts to start copyparty as a service init-scripts to start copyparty as a service

View File

@@ -7,7 +7,7 @@ srv_fqdn="$2"
[ -z "$srv_fqdn" ] && { [ -z "$srv_fqdn" ] && {
echo "need arg 1: ca name" echo "need arg 1: ca name"
echo "need arg 2: server fqdn" echo "need arg 2: server fqdn and/or IPs, comma-separated"
echo "optional arg 3: if set, write cert into copyparty cfg" echo "optional arg 3: if set, write cert into copyparty cfg"
exit 1 exit 1
} }

View File

@@ -13,7 +13,7 @@
upstream cpp { upstream cpp {
server 127.0.0.1:3923; server 127.0.0.1:3923;
keepalive 120; keepalive 1;
} }
server { server {
listen 443 ssl; listen 443 ssl;

24
contrib/plugins/README.md Normal file
View File

@@ -0,0 +1,24 @@
# example resource files
can be provided to copyparty to tweak things
## example `.epilogue.html`
save one of these as `.epilogue.html` inside a folder to customize it:
* [`minimal-up2k.html`](minimal-up2k.html) will [simplify the upload ui](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png)
## example browser-css
point `--css-browser` to one of these by URL:
* [`browser-icons.css`](browser-icons.css) adds filetype icons
## meadup.js
* turns copyparty into chromecast just more flexible (and probably way more buggy)
* usage: put the js somewhere in the webroot and `--js-browser /memes/meadup.js`

506
contrib/plugins/meadup.js Normal file
View File

@@ -0,0 +1,506 @@
// USAGE:
// place this file somewhere in the webroot and then
// python3 -m copyparty --js-browser /memes/meadup.js
//
// FEATURES:
// * adds an onscreen keyboard for operating a media center remotely,
// relies on https://github.com/9001/copyparty/blob/hovudstraum/bin/mtag/very-bad-idea.py
// * adds an interactive anime girl (if you can find the dependencies)
var hambagas = [
"https://www.youtube.com/watch?v=pFA3KGp4GuU"
];
// keybaord,
// onscreen keyboard by @steinuil
function initKeybaord(BASE_URL, HAMBAGA, consoleLog, consoleError) {
document.querySelector('.keybaord-container').innerHTML = `
<div class="keybaord-body">
<div class="keybaord-row keybaord-row-1">
<div class="keybaord-key" data-keybaord-key="Escape">
esc
</div>
<div class="keybaord-key" data-keybaord-key="F1">
F1
</div>
<div class="keybaord-key" data-keybaord-key="F2">
F2
</div>
<div class="keybaord-key" data-keybaord-key="F3">
F3
</div>
<div class="keybaord-key" data-keybaord-key="F4">
F4
</div>
<div class="keybaord-key" data-keybaord-key="F5">
F5
</div>
<div class="keybaord-key" data-keybaord-key="F6">
F6
</div>
<div class="keybaord-key" data-keybaord-key="F7">
F7
</div>
<div class="keybaord-key" data-keybaord-key="F8">
F8
</div>
<div class="keybaord-key" data-keybaord-key="F9">
F9
</div>
<div class="keybaord-key" data-keybaord-key="F10">
F10
</div>
<div class="keybaord-key" data-keybaord-key="F11">
F11
</div>
<div class="keybaord-key" data-keybaord-key="F12">
F12
</div>
<div class="keybaord-key" data-keybaord-key="Insert">
ins
</div>
<div class="keybaord-key" data-keybaord-key="Delete">
del
</div>
</div>
<div class="keybaord-row keybaord-row-2">
<div class="keybaord-key" data-keybaord-key="\`">
\`
</div>
<div class="keybaord-key" data-keybaord-key="1">
1
</div>
<div class="keybaord-key" data-keybaord-key="2">
2
</div>
<div class="keybaord-key" data-keybaord-key="3">
3
</div>
<div class="keybaord-key" data-keybaord-key="4">
4
</div>
<div class="keybaord-key" data-keybaord-key="5">
5
</div>
<div class="keybaord-key" data-keybaord-key="6">
6
</div>
<div class="keybaord-key" data-keybaord-key="7">
7
</div>
<div class="keybaord-key" data-keybaord-key="8">
8
</div>
<div class="keybaord-key" data-keybaord-key="9">
9
</div>
<div class="keybaord-key" data-keybaord-key="0">
0
</div>
<div class="keybaord-key" data-keybaord-key="-">
-
</div>
<div class="keybaord-key" data-keybaord-key="=">
=
</div>
<div class="keybaord-key keybaord-backspace" data-keybaord-key="BackSpace">
backspace
</div>
</div>
<div class="keybaord-row keybaord-row-3">
<div class="keybaord-key keybaord-tab" data-keybaord-key="Tab">
tab
</div>
<div class="keybaord-key" data-keybaord-key="q">
q
</div>
<div class="keybaord-key" data-keybaord-key="w">
w
</div>
<div class="keybaord-key" data-keybaord-key="e">
e
</div>
<div class="keybaord-key" data-keybaord-key="r">
r
</div>
<div class="keybaord-key" data-keybaord-key="t">
t
</div>
<div class="keybaord-key" data-keybaord-key="y">
y
</div>
<div class="keybaord-key" data-keybaord-key="u">
u
</div>
<div class="keybaord-key" data-keybaord-key="i">
i
</div>
<div class="keybaord-key" data-keybaord-key="o">
o
</div>
<div class="keybaord-key" data-keybaord-key="p">
p
</div>
<div class="keybaord-key" data-keybaord-key="[">
[
</div>
<div class="keybaord-key" data-keybaord-key="]">
]
</div>
<div class="keybaord-key keybaord-enter" data-keybaord-key="Return">
enter
</div>
</div>
<div class="keybaord-row keybaord-row-4">
<div class="keybaord-key keybaord-capslock" data-keybaord-key="HAMBAGA">
🍔
</div>
<div class="keybaord-key" data-keybaord-key="a">
a
</div>
<div class="keybaord-key" data-keybaord-key="s">
s
</div>
<div class="keybaord-key" data-keybaord-key="d">
d
</div>
<div class="keybaord-key" data-keybaord-key="f">
f
</div>
<div class="keybaord-key" data-keybaord-key="g">
g
</div>
<div class="keybaord-key" data-keybaord-key="h">
h
</div>
<div class="keybaord-key" data-keybaord-key="j">
j
</div>
<div class="keybaord-key" data-keybaord-key="k">
k
</div>
<div class="keybaord-key" data-keybaord-key="l">
l
</div>
<div class="keybaord-key" data-keybaord-key=";">
;
</div>
<div class="keybaord-key" data-keybaord-key="'">
'
</div>
<div class="keybaord-key keybaord-backslash" data-keybaord-key="\\">
\\
</div>
</div>
<div class="keybaord-row keybaord-row-5">
<div class="keybaord-key keybaord-lshift" data-keybaord-key="Shift_L">
shift
</div>
<div class="keybaord-key" data-keybaord-key="\\">
\\
</div>
<div class="keybaord-key" data-keybaord-key="z">
z
</div>
<div class="keybaord-key" data-keybaord-key="x">
x
</div>
<div class="keybaord-key" data-keybaord-key="c">
c
</div>
<div class="keybaord-key" data-keybaord-key="v">
v
</div>
<div class="keybaord-key" data-keybaord-key="b">
b
</div>
<div class="keybaord-key" data-keybaord-key="n">
n
</div>
<div class="keybaord-key" data-keybaord-key="m">
m
</div>
<div class="keybaord-key" data-keybaord-key=",">
,
</div>
<div class="keybaord-key" data-keybaord-key=".">
.
</div>
<div class="keybaord-key" data-keybaord-key="/">
/
</div>
<div class="keybaord-key keybaord-rshift" data-keybaord-key="Shift_R">
shift
</div>
</div>
<div class="keybaord-row keybaord-row-6">
<div class="keybaord-key keybaord-lctrl" data-keybaord-key="Control_L">
ctrl
</div>
<div class="keybaord-key keybaord-super" data-keybaord-key="Meta_L">
win
</div>
<div class="keybaord-key keybaord-alt" data-keybaord-key="Alt_L">
alt
</div>
<div class="keybaord-key keybaord-spacebar" data-keybaord-key="space">
space
</div>
<div class="keybaord-key keybaord-altgr" data-keybaord-key="Alt_R">
altgr
</div>
<div class="keybaord-key keybaord-what" data-keybaord-key="Menu">
menu
</div>
<div class="keybaord-key keybaord-rctrl" data-keybaord-key="Control_R">
ctrl
</div>
</div>
<div class="keybaord-row">
<div class="keybaord-key" data-keybaord-key="XF86AudioLowerVolume">
🔉
</div>
<div class="keybaord-key" data-keybaord-key="XF86AudioRaiseVolume">
🔊
</div>
<div class="keybaord-key" data-keybaord-key="Left">
⬅️
</div>
<div class="keybaord-key" data-keybaord-key="Down">
⬇️
</div>
<div class="keybaord-key" data-keybaord-key="Up">
⬆️
</div>
<div class="keybaord-key" data-keybaord-key="Right">
➡️
</div>
<div class="keybaord-key" data-keybaord-key="Page_Up">
PgUp
</div>
<div class="keybaord-key" data-keybaord-key="Page_Down">
PgDn
</div>
<div class="keybaord-key" data-keybaord-key="Home">
🏠
</div>
<div class="keybaord-key" data-keybaord-key="End">
End
</div>
</div>
<div>
`;
function arraySample(array) {
return array[Math.floor(Math.random() * array.length)];
}
function sendMessage(msg) {
return fetch(BASE_URL, {
method: "POST",
headers: {
"Content-Type": "application/x-www-form-urlencoded;charset=UTF-8",
},
body: "msg=" + encodeURIComponent(msg),
}).then(
(r) => r.text(), // so the response body shows up in network tab
(err) => consoleError(err)
);
}
const MODIFIER_ON_CLASS = "keybaord-modifier-on";
const KEY_DATASET = "data-keybaord-key";
const KEY_CLASS = "keybaord-key";
const modifiers = new Set()
function toggleModifier(button, key) {
button.classList.toggle(MODIFIER_ON_CLASS);
if (modifiers.has(key)) {
modifiers.delete(key);
} else {
modifiers.add(key);
}
}
function popModifiers() {
let modifierString = "";
modifiers.forEach((mod) => {
document.querySelector("[" + KEY_DATASET + "='" + mod + "']")
.classList.remove(MODIFIER_ON_CLASS);
modifierString += mod + "+";
});
modifiers.clear();
return modifierString;
}
Array.from(document.querySelectorAll("." + KEY_CLASS)).forEach((button) => {
const key = button.dataset.keybaordKey;
button.addEventListener("click", (ev) => {
switch (key) {
case "HAMBAGA":
sendMessage(arraySample(HAMBAGA));
break;
case "Shift_L":
case "Shift_R":
case "Control_L":
case "Control_R":
case "Meta_L":
case "Alt_L":
case "Alt_R":
toggleModifier(button, key);
break;
default: {
const keyWithModifiers = popModifiers() + key;
consoleLog(keyWithModifiers);
sendMessage("key " + keyWithModifiers)
.then(() => consoleLog(keyWithModifiers + " OK"));
}
}
});
});
}
// keybaord integration
(function () {
var o = mknod('div');
clmod(o, 'keybaord-container', 1);
ebi('op_msg').appendChild(o);
o = mknod('style');
o.innerHTML = `
.keybaord-body {
display: flex;
flex-flow: column nowrap;
margin: .6em 0;
}
.keybaord-row {
display: flex;
}
.keybaord-key {
border: 1px solid rgba(128,128,128,0.2);
width: 41px;
height: 40px;
display: flex;
justify-content: center;
align-items: center;
}
.keybaord-key:active {
background-color: lightgrey;
}
.keybaord-key.keybaord-modifier-on {
background-color: lightblue;
}
.keybaord-key.keybaord-backspace {
width: 82px;
}
.keybaord-key.keybaord-tab {
width: 55px;
}
.keybaord-key.keybaord-enter {
width: 69px;
}
.keybaord-key.keybaord-capslock {
width: 80px;
}
.keybaord-key.keybaord-backslash {
width: 88px;
}
.keybaord-key.keybaord-lshift {
width: 65px;
}
.keybaord-key.keybaord-rshift {
width: 103px;
}
.keybaord-key.keybaord-lctrl {
width: 55px;
}
.keybaord-key.keybaord-super {
width: 55px;
}
.keybaord-key.keybaord-alt {
width: 55px;
}
.keybaord-key.keybaord-altgr {
width: 55px;
}
.keybaord-key.keybaord-what {
width: 55px;
}
.keybaord-key.keybaord-rctrl {
width: 55px;
}
.keybaord-key.keybaord-spacebar {
width: 302px;
}
`;
document.head.appendChild(o);
initKeybaord('/', hambagas,
(msg) => { toast.inf(2, msg.toString()) },
(msg) => { toast.err(30, msg.toString()) });
})();
// live2d (dumb pointless meme)
// dependencies for this part are not tracked in git
// so delete this section if you wanna use this file
// (or supply your own l2d model and js)
(function () {
var o = mknod('link');
o.setAttribute('rel', 'stylesheet');
o.setAttribute('href', "/bad-memes/pio.css");
document.head.appendChild(o);
o = mknod('style');
o.innerHTML = '.pio-container{text-shadow:none;z-index:1}';
document.head.appendChild(o);
o = mknod('div');
clmod(o, 'pio-container', 1);
o.innerHTML = '<div class="pio-action"></div><canvas id="pio" width="280" height="500"></canvas>';
document.body.appendChild(o);
var remaining = 3;
for (var a of ['pio', 'l2d', 'fireworks']) {
import_js(`/bad-memes/${a}.js`, function () {
if (remaining --> 1)
return;
o = mknod('script');
o.innerHTML = 'var pio = new Paul_Pio({"selector":[],"mode":"fixed","hidden":false,"content":{"close":"ok bye"},"model":["/bad-memes/sagiri/model.json"]});';
document.body.appendChild(o);
});
}
})();

View File

@@ -9,7 +9,7 @@
#ops, #tree, #path, #wrap>h2:last-child, /* main tabs and navigators (tree/breadcrumbs) */ #ops, #tree, #path, #wrap>h2:last-child, /* main tabs and navigators (tree/breadcrumbs) */
#u2cleanup, #u2conf tr:first-child>td[rowspan]:not(#u2btn_cw), /* most of the config options */ #u2conf tr:first-child>td[rowspan]:not(#u2btn_cw), /* most of the config options */
#srch_dz, #srch_zd, /* the filesearch dropzone */ #srch_dz, #srch_zd, /* the filesearch dropzone */

View File

@@ -0,0 +1,23 @@
# systemd service which generates a new TLS certificate on each boot,
# that way the one-year expiry time won't cause any issues --
# just have everyone trust the ca.pem once every 10 years
#
# assumptions/placeholder values:
# * this script and copyparty runs as user "cpp"
# * copyparty repo is at ~cpp/dev/copyparty
# * CA is named partylan
# * server IPs = 10.1.2.3 and 192.168.123.1
# * server hostname = party.lan
[Unit]
Description=copyparty certificate generator
Before=copyparty.service
[Service]
User=cpp
Type=oneshot
SyslogIdentifier=cpp-cert
ExecStart=/bin/bash -c 'cd ~/dev/copyparty/contrib && ./cfssl.sh partylan 10.1.2.3,192.168.123.1,party.lan y'
[Install]
WantedBy=multi-user.target

View File

@@ -2,17 +2,22 @@
# and share '/mnt' with anonymous read+write # and share '/mnt' with anonymous read+write
# #
# installation: # installation:
# cp -pv copyparty.service /etc/systemd/system && systemctl enable --now copyparty # cp -pv copyparty.service /etc/systemd/system
# restorecon -vr /etc/systemd/system/copyparty.service # restorecon -vr /etc/systemd/system/copyparty.service
# firewall-cmd --permanent --add-port={80,443,3923}/tcp # firewall-cmd --permanent --add-port={80,443,3923}/tcp # --zone=libvirt
# firewall-cmd --reload # firewall-cmd --reload
# systemctl daemon-reload && systemctl enable --now copyparty
# #
# you may want to: # you may want to:
# change "User=cpp" and "/home/cpp/" to another user
# remove the nft lines to only listen on port 3923
# and in the ExecStart= line:
# change '/usr/bin/python3' to another interpreter # change '/usr/bin/python3' to another interpreter
# change '/mnt::rw' to another location or permission-set # change '/mnt::rw' to another location or permission-set
# remove '-p 80,443,3923' to only listen on port 3923 # add '-q' to disable logging on busy servers
# add '-i 127.0.0.1' to only allow local connections # add '-i 127.0.0.1' to only allow local connections
# add '--use-fpool' if uploading into nfs locations # add '-e2dsa' to enable filesystem scanning + indexing
# add '-e2ts' to enable metadata indexing
# #
# with `Type=notify`, copyparty will signal systemd when it is ready to # with `Type=notify`, copyparty will signal systemd when it is ready to
# accept connections; correctly delaying units depending on copyparty. # accept connections; correctly delaying units depending on copyparty.
@@ -20,9 +25,11 @@
# python disabling line-buffering, so messages are out-of-order: # python disabling line-buffering, so messages are out-of-order:
# https://user-images.githubusercontent.com/241032/126040249-cb535cc7-c599-4931-a796-a5d9af691bad.png # https://user-images.githubusercontent.com/241032/126040249-cb535cc7-c599-4931-a796-a5d9af691bad.png
# #
# if you remove -q to enable logging, you may also want to remove the # unless you add -q to disable logging, you may want to remove the
# following line to enable buffering (slightly better performance): # following line to allow buffering (slightly better performance):
# Environment=PYTHONUNBUFFERED=x # Environment=PYTHONUNBUFFERED=x
#
# keep ExecStartPre before ExecStart, at least on rhel8
[Unit] [Unit]
Description=copyparty file server Description=copyparty file server
@@ -32,8 +39,23 @@ Type=notify
SyslogIdentifier=copyparty SyslogIdentifier=copyparty
Environment=PYTHONUNBUFFERED=x Environment=PYTHONUNBUFFERED=x
ExecReload=/bin/kill -s USR1 $MAINPID ExecReload=/bin/kill -s USR1 $MAINPID
ExecStartPre=/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -p 80,443,3923 -v /mnt::rw # user to run as + where the TLS certificate is (if any)
User=cpp
Environment=XDG_CONFIG_HOME=/home/cpp/.config
# setup forwarding from ports 80 and 443 to port 3923
ExecStartPre=+/bin/bash -c 'nft -n -a list table nat | awk "/ to :3923 /{print\$NF}" | xargs -rL1 nft delete rule nat prerouting handle; true'
ExecStartPre=+nft add table ip nat
ExecStartPre=+nft -- add chain ip nat prerouting { type nat hook prerouting priority -100 \; }
ExecStartPre=+nft add rule ip nat prerouting tcp dport 80 redirect to :3923
ExecStartPre=+nft add rule ip nat prerouting tcp dport 443 redirect to :3923
# stop systemd-tmpfiles-clean.timer from deleting copyparty while it's running
ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
# copyparty settings
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -e2d -v /mnt::rw
[Install] [Install]
WantedBy=multi-user.target WantedBy=multi-user.target

View File

@@ -1,54 +1,71 @@
# coding: utf-8 # coding: utf-8
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
import platform
import time
import sys
import os import os
import platform
import sys
import time
try:
from collections.abc import Callable
from typing import TYPE_CHECKING, Any
except:
TYPE_CHECKING = False
PY2 = sys.version_info[0] == 2 PY2 = sys.version_info[0] == 2
if PY2: if PY2:
sys.dont_write_bytecode = True sys.dont_write_bytecode = True
unicode = unicode unicode = unicode # noqa: F821 # pylint: disable=undefined-variable,self-assigning-variable
else: else:
unicode = str unicode = str
WINDOWS = False WINDOWS: Any = (
if platform.system() == "Windows": [int(x) for x in platform.version().split(".")]
WINDOWS = [int(x) for x in platform.version().split(".")] if platform.system() == "Windows"
else False
)
VT100 = not WINDOWS or WINDOWS >= [10, 0, 14393] VT100 = not WINDOWS or WINDOWS >= [10, 0, 14393]
# introduced in anniversary update # introduced in anniversary update
ANYWIN = WINDOWS or sys.platform in ["msys"] ANYWIN = WINDOWS or sys.platform in ["msys", "cygwin"]
MACOS = platform.system() == "Darwin" MACOS = platform.system() == "Darwin"
def get_unix_home(): def get_unixdir() -> str:
try: paths: list[tuple[Callable[..., str], str]] = [
v = os.environ["XDG_CONFIG_HOME"] (os.environ.get, "XDG_CONFIG_HOME"),
if not v: (os.path.expanduser, "~/.config"),
raise Exception() (os.environ.get, "TMPDIR"),
ret = os.path.normpath(v) (os.environ.get, "TEMP"),
os.listdir(ret) (os.environ.get, "TMP"),
return ret (unicode, "/tmp"),
except: ]
pass for chk in [os.listdir, os.mkdir]:
for pf, pa in paths:
try:
p = pf(pa)
# print(chk.__name__, p, pa)
if not p or p.startswith("~"):
continue
try: p = os.path.normpath(p)
v = os.path.expanduser("~/.config") chk(p) # type: ignore
if v.startswith("~"): p = os.path.join(p, "copyparty")
raise Exception() if not os.path.isdir(p):
ret = os.path.normpath(v) os.mkdir(p)
os.listdir(ret)
return ret return p
except: except:
return "/tmp" pass
raise Exception("could not find a writable path for config")
class EnvParams(object): class EnvParams(object):
def __init__(self): def __init__(self) -> None:
self.t0 = time.time() self.t0 = time.time()
self.mod = os.path.dirname(os.path.realpath(__file__)) self.mod = os.path.dirname(os.path.realpath(__file__))
if self.mod.endswith("__init__"): if self.mod.endswith("__init__"):
@@ -59,7 +76,7 @@ class EnvParams(object):
elif sys.platform == "darwin": elif sys.platform == "darwin":
self.cfg = os.path.expanduser("~/Library/Preferences/copyparty") self.cfg = os.path.expanduser("~/Library/Preferences/copyparty")
else: else:
self.cfg = get_unix_home() + "/copyparty" self.cfg = get_unixdir()
self.cfg = self.cfg.replace("\\", "/") self.cfg = self.cfg.replace("\\", "/")
try: try:

View File

@@ -8,35 +8,48 @@ __copyright__ = 2019
__license__ = "MIT" __license__ = "MIT"
__url__ = "https://github.com/9001/copyparty/" __url__ = "https://github.com/9001/copyparty/"
import re import argparse
import os
import sys
import time
import shutil
import filecmp import filecmp
import locale import locale
import argparse import os
import re
import shutil
import sys
import threading import threading
import time
import traceback import traceback
from textwrap import dedent from textwrap import dedent
from .__init__ import E, WINDOWS, ANYWIN, VT100, PY2, unicode from .__init__ import ANYWIN, PY2, VT100, WINDOWS, E, unicode
from .__version__ import S_VERSION, S_BUILD_DT, CODENAME from .__version__ import CODENAME, S_BUILD_DT, S_VERSION
from .svchub import SvcHub
from .util import py_desc, align_tab, IMPLICATIONS, ansi_re
from .authsrv import re_vol from .authsrv import re_vol
from .svchub import SvcHub
from .util import IMPLICATIONS, align_tab, ansi_re, min_ex, py_desc, termsize, wrap
HAVE_SSL = True
try: try:
from types import FrameType
from typing import Any, Optional
except:
pass
try:
HAVE_SSL = True
import ssl import ssl
except: except:
HAVE_SSL = False HAVE_SSL = False
printed = "" printed: list[str] = []
class RiceFormatter(argparse.HelpFormatter): class RiceFormatter(argparse.HelpFormatter):
def _get_help_string(self, action): def __init__(self, *args: Any, **kwargs: Any) -> None:
if PY2:
kwargs["width"] = termsize()[0]
super(RiceFormatter, self).__init__(*args, **kwargs)
def _get_help_string(self, action: argparse.Action) -> str:
""" """
same as ArgumentDefaultsHelpFormatter(HelpFormatter) same as ArgumentDefaultsHelpFormatter(HelpFormatter)
except the help += [...] line now has colors except the help += [...] line now has colors
@@ -45,41 +58,68 @@ class RiceFormatter(argparse.HelpFormatter):
if not VT100: if not VT100:
fmt = " (default: %(default)s)" fmt = " (default: %(default)s)"
help = action.help ret = unicode(action.help)
if "%(default)" not in action.help: if "%(default)" not in ret:
if action.default is not argparse.SUPPRESS: if action.default is not argparse.SUPPRESS:
defaulting_nargs = [argparse.OPTIONAL, argparse.ZERO_OR_MORE] defaulting_nargs = [argparse.OPTIONAL, argparse.ZERO_OR_MORE]
if action.option_strings or action.nargs in defaulting_nargs: if action.option_strings or action.nargs in defaulting_nargs:
help += fmt ret += fmt
return help return ret
def _fill_text(self, text, width, indent): def _fill_text(self, text: str, width: int, indent: str) -> str:
"""same as RawDescriptionHelpFormatter(HelpFormatter)""" """same as RawDescriptionHelpFormatter(HelpFormatter)"""
return "".join(indent + line + "\n" for line in text.splitlines()) return "".join(indent + line + "\n" for line in text.splitlines())
def __add_whitespace(self, idx: int, iWSpace: int, text: str) -> str:
return (" " * iWSpace) + text if idx else text
def _split_lines(self, text: str, width: int) -> list[str]:
# https://stackoverflow.com/a/35925919
textRows = text.splitlines()
ptn = re.compile(r"\s*[0-9\-]{0,}\.?\s*")
for idx, line in enumerate(textRows):
search = ptn.search(line)
if not line.strip():
textRows[idx] = " "
elif search:
lWSpace = search.end()
lines = [
self.__add_whitespace(i, lWSpace, x)
for i, x in enumerate(wrap(line, width, width - 1))
]
textRows[idx] = lines
return [item for sublist in textRows for item in sublist]
class Dodge11874(RiceFormatter): class Dodge11874(RiceFormatter):
def __init__(self, *args, **kwargs): def __init__(self, *args: Any, **kwargs: Any) -> None:
kwargs["width"] = 9003 kwargs["width"] = 9003
super(Dodge11874, self).__init__(*args, **kwargs) super(Dodge11874, self).__init__(*args, **kwargs)
def lprint(*a, **ka): class BasicDodge11874(
global printed argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter
):
def __init__(self, *args: Any, **kwargs: Any) -> None:
kwargs["width"] = 9003
super(BasicDodge11874, self).__init__(*args, **kwargs)
txt = " ".join(unicode(x) for x in a) + ka.get("end", "\n")
printed += txt def lprint(*a: Any, **ka: Any) -> None:
txt: str = " ".join(unicode(x) for x in a) + ka.get("end", "\n")
printed.append(txt)
if not VT100: if not VT100:
txt = ansi_re.sub("", txt) txt = ansi_re.sub("", txt)
print(txt, **ka) print(txt, **ka)
def warn(msg): def warn(msg: str) -> None:
lprint("\033[1mwarning:\033[0;33m {}\033[0m\n".format(msg)) lprint("\033[1mwarning:\033[0;33m {}\033[0m\n".format(msg))
def ensure_locale(): def ensure_locale() -> None:
for x in [ for x in [
"en_US.UTF-8", "en_US.UTF-8",
"English_United States.UTF8", "English_United States.UTF8",
@@ -93,7 +133,7 @@ def ensure_locale():
continue continue
def ensure_cert(): def ensure_cert() -> None:
""" """
the default cert (and the entire TLS support) is only here to enable the the default cert (and the entire TLS support) is only here to enable the
crypto.subtle javascript API, which is necessary due to the webkit guys crypto.subtle javascript API, which is necessary due to the webkit guys
@@ -119,8 +159,8 @@ def ensure_cert():
# printf 'NO\n.\n.\n.\n.\ncopyparty-insecure\n.\n' | faketime '2000-01-01 00:00:00' openssl req -x509 -sha256 -newkey rsa:2048 -keyout insecure.pem -out insecure.pem -days $((($(printf %d 0x7fffffff)-$(date +%s --date=2000-01-01T00:00:00Z))/(60*60*24))) -nodes && ls -al insecure.pem && openssl x509 -in insecure.pem -text -noout # printf 'NO\n.\n.\n.\n.\ncopyparty-insecure\n.\n' | faketime '2000-01-01 00:00:00' openssl req -x509 -sha256 -newkey rsa:2048 -keyout insecure.pem -out insecure.pem -days $((($(printf %d 0x7fffffff)-$(date +%s --date=2000-01-01T00:00:00Z))/(60*60*24))) -nodes && ls -al insecure.pem && openssl x509 -in insecure.pem -text -noout
def configure_ssl_ver(al): def configure_ssl_ver(al: argparse.Namespace) -> None:
def terse_sslver(txt): def terse_sslver(txt: str) -> str:
txt = txt.lower() txt = txt.lower()
for c in ["_", "v", "."]: for c in ["_", "v", "."]:
txt = txt.replace(c, "") txt = txt.replace(c, "")
@@ -135,8 +175,8 @@ def configure_ssl_ver(al):
flags = [k for k in ssl.__dict__ if ptn.match(k)] flags = [k for k in ssl.__dict__ if ptn.match(k)]
# SSLv2 SSLv3 TLSv1 TLSv1_1 TLSv1_2 TLSv1_3 # SSLv2 SSLv3 TLSv1 TLSv1_1 TLSv1_2 TLSv1_3
if "help" in sslver: if "help" in sslver:
avail = [terse_sslver(x[6:]) for x in flags] avail1 = [terse_sslver(x[6:]) for x in flags]
avail = " ".join(sorted(avail) + ["all"]) avail = " ".join(sorted(avail1) + ["all"])
lprint("\navailable ssl/tls versions:\n " + avail) lprint("\navailable ssl/tls versions:\n " + avail)
sys.exit(0) sys.exit(0)
@@ -157,12 +197,12 @@ def configure_ssl_ver(al):
for k in ["ssl_flags_en", "ssl_flags_de"]: for k in ["ssl_flags_en", "ssl_flags_de"]:
num = getattr(al, k) num = getattr(al, k)
lprint("{}: {:8x} ({})".format(k, num, num)) lprint("{0}: {1:8x} ({1})".format(k, num))
# think i need that beer now # think i need that beer now
def configure_ssl_ciphers(al): def configure_ssl_ciphers(al: argparse.Namespace) -> None:
ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
if al.ssl_ver: if al.ssl_ver:
ctx.options &= ~al.ssl_flags_en ctx.options &= ~al.ssl_flags_en
@@ -186,8 +226,8 @@ def configure_ssl_ciphers(al):
sys.exit(0) sys.exit(0)
def args_from_cfg(cfg_path): def args_from_cfg(cfg_path: str) -> list[str]:
ret = [] ret: list[str] = []
skip = False skip = False
with open(cfg_path, "rb") as f: with open(cfg_path, "rb") as f:
for ln in [x.decode("utf-8").strip() for x in f]: for ln in [x.decode("utf-8").strip() for x in f]:
@@ -212,17 +252,66 @@ def args_from_cfg(cfg_path):
return ret return ret
def sighandler(sig=None, frame=None): def sighandler(sig: Optional[int] = None, frame: Optional[FrameType] = None) -> None:
msg = [""] * 5 msg = [""] * 5
for th in threading.enumerate(): for th in threading.enumerate():
stk = sys._current_frames()[th.ident] # type: ignore
msg.append(str(th)) msg.append(str(th))
msg.extend(traceback.format_stack(sys._current_frames()[th.ident])) msg.extend(traceback.format_stack(stk))
msg.append("\n") msg.append("\n")
print("\n".join(msg)) print("\n".join(msg))
def run_argparse(argv, formatter): def disable_quickedit() -> None:
import atexit
import ctypes
from ctypes import wintypes
def ecb(ok: bool, fun: Any, args: list[Any]) -> list[Any]:
if not ok:
err: int = ctypes.get_last_error() # type: ignore
if err:
raise ctypes.WinError(err) # type: ignore
return args
k32 = ctypes.WinDLL("kernel32", use_last_error=True) # type: ignore
if PY2:
wintypes.LPDWORD = ctypes.POINTER(wintypes.DWORD)
k32.GetStdHandle.errcheck = ecb
k32.GetConsoleMode.errcheck = ecb
k32.SetConsoleMode.errcheck = ecb
k32.GetConsoleMode.argtypes = (wintypes.HANDLE, wintypes.LPDWORD)
k32.SetConsoleMode.argtypes = (wintypes.HANDLE, wintypes.DWORD)
def cmode(out: bool, mode: Optional[int] = None) -> int:
h = k32.GetStdHandle(-11 if out else -10)
if mode:
return k32.SetConsoleMode(h, mode) # type: ignore
cmode = wintypes.DWORD()
k32.GetConsoleMode(h, ctypes.byref(cmode))
return cmode.value
# disable quickedit
mode = orig_in = cmode(False)
quickedit = 0x40
extended = 0x80
mask = quickedit + extended
if mode & mask != extended:
atexit.register(cmode, False, orig_in)
cmode(False, mode & ~mask | extended)
# enable colors in case the os.system("rem") trick ever stops working
if VT100:
mode = orig_out = cmode(True)
if mode & 4 != 4:
atexit.register(cmode, True, orig_out)
cmode(True, mode | 4)
def run_argparse(argv: list[str], formatter: Any) -> argparse.Namespace:
ap = argparse.ArgumentParser( ap = argparse.ArgumentParser(
formatter_class=formatter, formatter_class=formatter,
prog="copyparty", prog="copyparty",
@@ -243,10 +332,10 @@ def run_argparse(argv, formatter):
dedent( dedent(
""" """
-a takes username:password, -a takes username:password,
-v takes src:dst:perm1:perm2:permN:volflag1:volflag2:volflagN:... -v takes src:dst:\033[33mperm\033[0m1:\033[33mperm\033[0m2:\033[33mperm\033[0mN:\033[32mvolflag\033[0m1:\033[32mvolflag\033[0m2:\033[32mvolflag\033[0mN:...
where "perm" is "permissions,username1,username2,..." * "\033[33mperm\033[0m" is "permissions,username1,username2,..."
and "volflag" is config flags to set on this volume * "\033[32mvolflag\033[0m" is config flags to set on this volume
list of permissions: list of permissions:
"r" (read): list folder contents, download files "r" (read): list folder contents, download files
"w" (write): upload files; need "r" to see the uploads "w" (write): upload files; need "r" to see the uploads
@@ -265,7 +354,7 @@ def run_argparse(argv, formatter):
* w (write-only) for everyone * w (write-only) for everyone
* rw (read+write) for ed * rw (read+write) for ed
* reject duplicate files \033[0m * reject duplicate files \033[0m
if no accounts or volumes are configured, if no accounts or volumes are configured,
current folder will be read/write for everyone current folder will be read/write for everyone
@@ -288,33 +377,46 @@ def run_argparse(argv, formatter):
\033[36mnosub\033[35m forces all uploads into the top folder of the vfs \033[36mnosub\033[35m forces all uploads into the top folder of the vfs
\033[36mgz\033[35m allows server-side gzip of uploads with ?gz (also c,xz) \033[36mgz\033[35m allows server-side gzip of uploads with ?gz (also c,xz)
\033[36mpk\033[35m forces server-side compression, optional arg: xz,9 \033[36mpk\033[35m forces server-side compression, optional arg: xz,9
\033[0mupload rules: \033[0mupload rules:
\033[36mmaxn=250,600\033[35m max 250 uploads over 15min \033[36mmaxn=250,600\033[35m max 250 uploads over 15min
\033[36mmaxb=1g,300\033[35m max 1 GiB over 5min (suffixes: b, k, m, g) \033[36mmaxb=1g,300\033[35m max 1 GiB over 5min (suffixes: b, k, m, g)
\033[36msz=1k-3m\033[35m allow filesizes between 1 KiB and 3MiB \033[36msz=1k-3m\033[35m allow filesizes between 1 KiB and 3MiB
\033[0mupload rotation: \033[0mupload rotation:
(moves all uploads into the specified folder structure) (moves all uploads into the specified folder structure)
\033[36mrotn=100,3\033[35m 3 levels of subfolders with 100 entries in each \033[36mrotn=100,3\033[35m 3 levels of subfolders with 100 entries in each
\033[36mrotf=%Y-%m/%d-%H\033[35m date-formatted organizing \033[36mrotf=%Y-%m/%d-%H\033[35m date-formatted organizing
\033[36mlifetime=3600\033[35m uploads are deleted after 1 hour \033[36mlifetime=3600\033[35m uploads are deleted after 1 hour
\033[0mdatabase, general: \033[0mdatabase, general:
\033[36me2d\033[35m sets -e2d (all -e2* args can be set using ce2* volflags) \033[36me2d\033[35m sets -e2d (all -e2* args can be set using ce2* volflags)
\033[36md2ts\033[35m disables metadata collection for existing files
\033[36md2ds\033[35m disables onboot indexing, overrides -e2ds*
\033[36md2t\033[35m disables metadata collection, overrides -e2t* \033[36md2t\033[35m disables metadata collection, overrides -e2t*
\033[36md2d\033[35m disables all database stuff, overrides -e2* \033[36md2d\033[35m disables all database stuff, overrides -e2*
\033[36mnohash=\\.iso$\033[35m skips hashing file contents if path matches *.iso \033[36mnohash=\\.iso$\033[35m skips hashing file contents if path matches *.iso
\033[36mnoidx=\\.iso$\033[35m fully ignores the contents at paths matching *.iso \033[36mnoidx=\\.iso$\033[35m fully ignores the contents at paths matching *.iso
\033[36mhist=/tmp/cdb\033[35m puts thumbnails and indexes at that location \033[36mhist=/tmp/cdb\033[35m puts thumbnails and indexes at that location
\033[36mscan=60\033[35m scan for new files every 60sec, same as --re-maxage \033[36mscan=60\033[35m scan for new files every 60sec, same as --re-maxage
\033[0mdatabase, audio tags: \033[0mdatabase, audio tags:
"mte", "mth", "mtp", "mtm" all work the same as -mte, -mth, ... "mte", "mth", "mtp", "mtm" all work the same as -mte, -mth, ...
\033[36mmtp=.bpm=f,audio-bpm.py\033[35m uses the "audio-bpm.py" program to \033[36mmtp=.bpm=f,audio-bpm.py\033[35m uses the "audio-bpm.py" program to
generate ".bpm" tags from uploads (f = overwrite tags) generate ".bpm" tags from uploads (f = overwrite tags)
\033[36mmtp=ahash,vhash=media-hash.py\033[35m collects two tags at once \033[36mmtp=ahash,vhash=media-hash.py\033[35m collects two tags at once
\033[0mthumbnails:
\033[36mdthumb\033[35m disables all thumbnails
\033[36mdvthumb\033[35m disables video thumbnails
\033[36mdathumb\033[35m disables audio thumbnails (spectrograms)
\033[36mdithumb\033[35m disables image thumbnails
\033[0mclient and ux:
\033[36mhtml_head=TXT\033[35m includes TXT in the <head>
\033[36mrobots\033[35m allows indexing by search engines (default)
\033[36mnorobots\033[35m kindly asks search engines to leave
\033[0mothers: \033[0mothers:
\033[36mfk=8\033[35m generates per-file accesskeys, \033[36mfk=8\033[35m generates per-file accesskeys,
which will then be required at the "g" permission which will then be required at the "g" permission
@@ -323,7 +425,7 @@ def run_argparse(argv, formatter):
], ],
[ [
"urlform", "urlform",
"", "how to handle url-form POSTs",
dedent( dedent(
""" """
values for --urlform: values for --urlform:
@@ -362,52 +464,75 @@ def run_argparse(argv, formatter):
ap2.add_argument("-c", metavar="PATH", type=u, action="append", help="add config file") ap2.add_argument("-c", metavar="PATH", type=u, action="append", help="add config file")
ap2.add_argument("-nc", metavar="NUM", type=int, default=64, help="max num clients") ap2.add_argument("-nc", metavar="NUM", type=int, default=64, help="max num clients")
ap2.add_argument("-j", metavar="CORES", type=int, default=1, help="max num cpu cores, 0=all") ap2.add_argument("-j", metavar="CORES", type=int, default=1, help="max num cpu cores, 0=all")
ap2.add_argument("-a", metavar="ACCT", type=u, action="append", help="add account, USER:PASS; example [ed:wark") ap2.add_argument("-a", metavar="ACCT", type=u, action="append", help="add account, USER:PASS; example [ed:wark]")
ap2.add_argument("-v", metavar="VOL", type=u, action="append", help="add volume, SRC:DST:FLAG; example [.::r], [/mnt/nas/music:/music:r:aed") ap2.add_argument("-v", metavar="VOL", type=u, action="append", help="add volume, SRC:DST:FLAG; examples [.::r], [/mnt/nas/music:/music:r:aed]")
ap2.add_argument("-ed", action="store_true", help="enable ?dots") ap2.add_argument("-ed", action="store_true", help="enable the ?dots url parameter / client option which allows clients to see dotfiles / hidden files")
ap2.add_argument("-emp", action="store_true", help="enable markdown plugins") ap2.add_argument("-emp", action="store_true", help="enable markdown plugins -- neat but dangerous, big XSS risk")
ap2.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate") ap2.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate")
ap2.add_argument("--urlform", metavar="MODE", type=u, default="print,get", help="how to handle url-forms; examples: [stash], [save,get]") ap2.add_argument("--urlform", metavar="MODE", type=u, default="print,get", help="how to handle url-form POSTs; see --help-urlform")
ap2.add_argument("--wintitle", metavar="TXT", type=u, default="cpp @ $pub", help="window title, for example '$ip-10.1.2.' or '$ip-'")
ap2 = ap.add_argument_group('upload options') ap2 = ap.add_argument_group('upload options')
ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads") ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads, hiding them from clients unless -ed")
ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="up2k min.size threshold (mswin-only)")
ap2.add_argument("--unpost", metavar="SEC", type=int, default=3600*12, help="grace period where uploads can be deleted by the uploader, even without delete permissions; 0=disabled") ap2.add_argument("--unpost", metavar="SEC", type=int, default=3600*12, help="grace period where uploads can be deleted by the uploader, even without delete permissions; 0=disabled")
ap2.add_argument("--reg-cap", metavar="N", type=int, default=38400, help="max number of uploads to keep in memory when running without -e2d; roughly 1 MiB RAM per 600")
ap2.add_argument("--no-fpool", action="store_true", help="disable file-handle pooling -- instead, repeatedly close and reopen files during upload") ap2.add_argument("--no-fpool", action="store_true", help="disable file-handle pooling -- instead, repeatedly close and reopen files during upload")
ap2.add_argument("--use-fpool", action="store_true", help="force file-handle pooling, even if copyparty thinks you're better off without") ap2.add_argument("--use-fpool", action="store_true", help="force file-handle pooling, even if copyparty thinks you're better off without -- probably useful on nfs and cow filesystems (zfs, btrfs)")
ap2.add_argument("--no-symlink", action="store_true", help="duplicate file contents instead") ap2.add_argument("--hardlink", action="store_true", help="prefer hardlinks instead of symlinks when possible (within same filesystem)")
ap2.add_argument("--never-symlink", action="store_true", help="do not fallback to symlinks when a hardlink cannot be made")
ap2.add_argument("--no-dedup", action="store_true", help="disable symlink/hardlink creation; copy file contents instead")
ap2.add_argument("--thickfs", metavar="REGEX", type=u, default="fat|vfat|ex.?fat|hpfs|fuse", help="filesystems which dont support sparse files")
ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="windows-only: minimum size of incoming uploads through up2k before they are made into sparse files")
ap2.add_argument("--turbo", metavar="LVL", type=int, default=0, help="configure turbo-mode in up2k client; 0 = off and warn if enabled, 1 = off, 2 = on, 3 = on and disable datecheck")
ap2 = ap.add_argument_group('network options') ap2 = ap.add_argument_group('network options')
ap2.add_argument("-i", metavar="IP", type=u, default="0.0.0.0", help="ip to bind (comma-sep.)") ap2.add_argument("-i", metavar="IP", type=u, default="0.0.0.0", help="ip to bind (comma-sep.)")
ap2.add_argument("-p", metavar="PORT", type=u, default="3923", help="ports to bind (comma/range)") ap2.add_argument("-p", metavar="PORT", type=u, default="3923", help="ports to bind (comma/range)")
ap2.add_argument("--rproxy", metavar="DEPTH", type=int, default=1, help="which ip to keep; 0 = tcp, 1 = origin (first x-fwd), 2 = cloudflare, 3 = nginx, -1 = closest proxy") ap2.add_argument("--rproxy", metavar="DEPTH", type=int, default=1, help="which ip to keep; 0 = tcp, 1 = origin (first x-fwd), 2 = cloudflare, 3 = nginx, -1 = closest proxy")
ap2.add_argument("--s-wr-sz", metavar="B", type=int, default=256*1024, help="socket write size in bytes")
ap2.add_argument("--s-wr-slp", metavar="SEC", type=float, default=0, help="debug: socket write delay in seconds")
ap2.add_argument("--rsp-slp", metavar="SEC", type=float, default=0, help="debug: response delay in seconds")
ap2 = ap.add_argument_group('SSL/TLS options') ap2 = ap.add_argument_group('SSL/TLS options')
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls") ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls -- force plaintext")
ap2.add_argument("--https-only", action="store_true", help="disable plaintext") ap2.add_argument("--https-only", action="store_true", help="disable plaintext -- force tls")
ap2.add_argument("--ssl-ver", metavar="LIST", type=u, help="set allowed ssl/tls versions; [help] shows available versions; default is what your python version considers safe") ap2.add_argument("--ssl-ver", metavar="LIST", type=u, help="set allowed ssl/tls versions; [help] shows available versions; default is what your python version considers safe")
ap2.add_argument("--ciphers", metavar="LIST", type=u, help="set allowed ssl/tls ciphers; [help] shows available ciphers") ap2.add_argument("--ciphers", metavar="LIST", type=u, help="set allowed ssl/tls ciphers; [help] shows available ciphers")
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info") ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
ap2.add_argument("--ssl-log", metavar="PATH", type=u, help="log master secrets") ap2.add_argument("--ssl-log", metavar="PATH", type=u, help="log master secrets for later decryption in wireshark")
ap2 = ap.add_argument_group('FTP options')
ap2.add_argument("--ftp", metavar="PORT", type=int, help="enable FTP server on PORT, for example 3921")
ap2.add_argument("--ftps", metavar="PORT", type=int, help="enable FTPS server on PORT, for example 3990")
ap2.add_argument("--ftp-dbg", action="store_true", help="enable debug logging")
ap2.add_argument("--ftp-nat", metavar="ADDR", type=u, help="the NAT address to use for passive connections")
ap2.add_argument("--ftp-pr", metavar="P-P", type=u, help="the range of TCP ports to use for passive connections, for example 12000-13000")
ap2 = ap.add_argument_group('opt-outs') ap2 = ap.add_argument_group('opt-outs')
ap2.add_argument("-nw", action="store_true", help="disable writes (benchmark)") ap2.add_argument("-nw", action="store_true", help="never write anything to disk (debug/benchmark)")
ap2.add_argument("--keep-qem", action="store_true", help="do not disable quick-edit-mode on windows (it is disabled to avoid accidental text selection which will deadlock copyparty)")
ap2.add_argument("--no-del", action="store_true", help="disable delete operations") ap2.add_argument("--no-del", action="store_true", help="disable delete operations")
ap2.add_argument("--no-mv", action="store_true", help="disable move/rename operations") ap2.add_argument("--no-mv", action="store_true", help="disable move/rename operations")
ap2.add_argument("-nih", action="store_true", help="no info hostname") ap2.add_argument("-nih", action="store_true", help="no info hostname -- don't show in UI")
ap2.add_argument("-nid", action="store_true", help="no info disk-usage") ap2.add_argument("-nid", action="store_true", help="no info disk-usage -- don't show in UI")
ap2.add_argument("--no-zip", action="store_true", help="disable download as zip/tar") ap2.add_argument("--no-zip", action="store_true", help="disable download as zip/tar")
ap2.add_argument("--no-lifetime", action="store_true", help="disable automatic deletion of uploads after a certain time (lifetime volflag)") ap2.add_argument("--no-lifetime", action="store_true", help="disable automatic deletion of uploads after a certain time (lifetime volflag)")
ap2 = ap.add_argument_group('safety options') ap2 = ap.add_argument_group('safety options')
ap2.add_argument("--ls", metavar="U[,V[,F]]", type=u, help="scan all volumes; arguments USER,VOL,FLAGS; example [**,*,ln,p,r]") ap2.add_argument("-s", action="count", default=0, help="increase safety: Disable thumbnails / potentially dangerous software (ffmpeg/pillow/vips), hide partial uploads, avoid crawlers.\n └─Alias of\033[32m --dotpart --no-thumb --no-mtag-ff --no-robots --force-js")
ap2.add_argument("--salt", type=u, default="hunter2", help="up2k file-hash salt") ap2.add_argument("-ss", action="store_true", help="further increase safety: Prevent js-injection, accidental move/delete, broken symlinks, 404 on 403.\n └─Alias of\033[32m -s --no-dot-mv --no-dot-ren --unpost=0 --no-del --no-mv --hardlink --vague-403 -nih")
ap2.add_argument("--fk-salt", metavar="SALT", type=u, default=fk_salt, help="per-file accesskey salt") ap2.add_argument("-sss", action="store_true", help="further increase safety: Enable logging to disk, scan for dangerous symlinks.\n └─Alias of\033[32m -ss -lo=cpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz --ls=**,*,ln,p,r")
ap2.add_argument("--ls", metavar="U[,V[,F]]", type=u, help="do a sanity/safety check of all volumes on startup; arguments USER,VOL,FLAGS; example [**,*,ln,p,r]")
ap2.add_argument("--salt", type=u, default="hunter2", help="up2k file-hash salt; used to generate unpredictable internal identifiers for uploads -- doesn't really matter")
ap2.add_argument("--fk-salt", metavar="SALT", type=u, default=fk_salt, help="per-file accesskey salt; used to generate unpredictable URLs for hidden files -- this one DOES matter")
ap2.add_argument("--no-dot-mv", action="store_true", help="disallow moving dotfiles; makes it impossible to move folders containing dotfiles") ap2.add_argument("--no-dot-mv", action="store_true", help="disallow moving dotfiles; makes it impossible to move folders containing dotfiles")
ap2.add_argument("--no-dot-ren", action="store_true", help="disallow renaming dotfiles; makes it impossible to make something a dotfile") ap2.add_argument("--no-dot-ren", action="store_true", help="disallow renaming dotfiles; makes it impossible to make something a dotfile")
ap2.add_argument("--no-logues", action="store_true", help="disable rendering .prologue/.epilogue.html into directory listings") ap2.add_argument("--no-logues", action="store_true", help="disable rendering .prologue/.epilogue.html into directory listings")
ap2.add_argument("--no-readme", action="store_true", help="disable rendering readme.md into directory listings") ap2.add_argument("--no-readme", action="store_true", help="disable rendering readme.md into directory listings")
ap2.add_argument("--vague-403", action="store_true", help="send 404 instead of 403 (security through ambiguity, very enterprise)") ap2.add_argument("--vague-403", action="store_true", help="send 404 instead of 403 (security through ambiguity, very enterprise)")
ap2.add_argument("--force-js", action="store_true", help="don't send folder listings as HTML, force clients to use the embedded json instead -- slight protection against misbehaving search engines which ignore --no-robots")
ap2.add_argument("--no-robots", action="store_true", help="adds http and html headers asking search engines to not index anything")
ap2.add_argument("--logout", metavar="H", type=float, default="8086", help="logout clients after H hours of inactivity (0.0028=10sec, 0.1=6min, 24=day, 168=week, 720=month, 8760=year)")
ap2 = ap.add_argument_group('yolo options') ap2 = ap.add_argument_group('yolo options')
ap2.add_argument("--ign-ebind", action="store_true", help="continue running even if it's impossible to listen on some of the requested endpoints") ap2.add_argument("--ign-ebind", action="store_true", help="continue running even if it's impossible to listen on some of the requested endpoints")
@@ -417,8 +542,8 @@ def run_argparse(argv, formatter):
ap2.add_argument("-q", action="store_true", help="quiet") ap2.add_argument("-q", action="store_true", help="quiet")
ap2.add_argument("-lo", metavar="PATH", type=u, help="logfile, example: cpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz") ap2.add_argument("-lo", metavar="PATH", type=u, help="logfile, example: cpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz")
ap2.add_argument("--no-voldump", action="store_true", help="do not list volumes and permissions on startup") ap2.add_argument("--no-voldump", action="store_true", help="do not list volumes and permissions on startup")
ap2.add_argument("--log-conn", action="store_true", help="print tcp-server msgs") ap2.add_argument("--log-conn", action="store_true", help="debug: print tcp-server msgs")
ap2.add_argument("--log-htp", action="store_true", help="print http-server threadpool scaling") ap2.add_argument("--log-htp", action="store_true", help="debug: print http-server threadpool scaling")
ap2.add_argument("--ihead", metavar="HEADER", type=u, action='append', help="dump incoming header") ap2.add_argument("--ihead", metavar="HEADER", type=u, action='append', help="dump incoming header")
ap2.add_argument("--lf-url", metavar="RE", type=u, default=r"^/\.cpr/|\?th=[wj]$", help="dont log URLs matching") ap2.add_argument("--lf-url", metavar="RE", type=u, default=r"^/\.cpr/|\?th=[wj]$", help="dont log URLs matching")
@@ -433,53 +558,71 @@ def run_argparse(argv, formatter):
ap2.add_argument("--no-vthumb", action="store_true", help="disable video thumbnails") ap2.add_argument("--no-vthumb", action="store_true", help="disable video thumbnails")
ap2.add_argument("--th-size", metavar="WxH", default="320x256", help="thumbnail res") ap2.add_argument("--th-size", metavar="WxH", default="320x256", help="thumbnail res")
ap2.add_argument("--th-mt", metavar="CORES", type=int, default=cores, help="num cpu cores to use for generating thumbnails") ap2.add_argument("--th-mt", metavar="CORES", type=int, default=cores, help="num cpu cores to use for generating thumbnails")
ap2.add_argument("--th-convt", metavar="SEC", type=int, default=60, help="conversion timeout in seconds")
ap2.add_argument("--th-no-crop", action="store_true", help="dynamic height; show full image") ap2.add_argument("--th-no-crop", action="store_true", help="dynamic height; show full image")
ap2.add_argument("--th-dec", metavar="LIBS", default="vips,pil,ff", help="image decoders, in order of preference")
ap2.add_argument("--th-no-jpg", action="store_true", help="disable jpg output") ap2.add_argument("--th-no-jpg", action="store_true", help="disable jpg output")
ap2.add_argument("--th-no-webp", action="store_true", help="disable webp output") ap2.add_argument("--th-no-webp", action="store_true", help="disable webp output")
ap2.add_argument("--th-ff-jpg", action="store_true", help="force jpg for video thumbs") ap2.add_argument("--th-ff-jpg", action="store_true", help="force jpg output for video thumbs")
ap2.add_argument("--th-ff-swr", action="store_true", help="use swresample instead of soxr for audio thumbs") ap2.add_argument("--th-ff-swr", action="store_true", help="use swresample instead of soxr for audio thumbs")
ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown") ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown -- avoids doing keepalive pokes (updating the mtime) on thumbnail folders more often than SEC seconds")
ap2.add_argument("--th-clean", metavar="SEC", type=int, default=43200, help="cleanup interval; 0=disabled") ap2.add_argument("--th-clean", metavar="SEC", type=int, default=43200, help="cleanup interval; 0=disabled")
ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age") ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age -- folders which haven't been poked for longer than --th-poke seconds will get deleted every --th-clean seconds")
ap2.add_argument("--th-covers", metavar="N,N", type=u, default="folder.png,folder.jpg,cover.png,cover.jpg", help="folder thumbnails to stat for") ap2.add_argument("--th-covers", metavar="N,N", type=u, default="folder.png,folder.jpg,cover.png,cover.jpg", help="folder thumbnails to stat/look for")
# https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html
# https://github.com/libvips/libvips
# ffmpeg -hide_banner -demuxers | awk '/^ D /{print$2}' | while IFS= read -r x; do ffmpeg -hide_banner -h demuxer=$x; done | grep -E '^Demuxer |extensions:'
ap2.add_argument("--th-r-pil", metavar="T,T", type=u, default="bmp,dib,gif,icns,ico,jpg,jpeg,jp2,jpx,pcx,png,pbm,pgm,ppm,pnm,sgi,tga,tif,tiff,webp,xbm,dds,xpm,heif,heifs,heic,heics,avif,avifs", help="image formats to decode using pillow")
ap2.add_argument("--th-r-vips", metavar="T,T", type=u, default="jpg,jpeg,jp2,jpx,jxl,tif,tiff,png,webp,heic,avif,fit,fits,fts,exr,svg,hdr,ppm,pgm,pfm,gif,nii", help="image formats to decode using pyvips")
ap2.add_argument("--th-r-ffi", metavar="T,T", type=u, default="apng,avif,avifs,bmp,dds,dib,fit,fits,fts,gif,heic,heics,heif,heifs,icns,ico,jp2,jpeg,jpg,jpx,jxl,pbm,pcx,pfm,pgm,png,pnm,ppm,psd,sgi,tga,tif,tiff,webp,xbm,xpm", help="image formats to decode using ffmpeg")
ap2.add_argument("--th-r-ffv", metavar="T,T", type=u, default="av1,asf,avi,flv,m4v,mkv,mjpeg,mjpg,mpg,mpeg,mpg2,mpeg2,h264,avc,mts,h265,hevc,mov,3gp,mp4,ts,mpegts,nut,ogv,ogm,rm,vob,webm,wmv", help="video formats to decode using ffmpeg")
ap2.add_argument("--th-r-ffa", metavar="T,T", type=u, default="aac,m4a,ogg,opus,flac,alac,mp3,mp2,ac3,dts,wma,ra,wav,aif,aiff,au,alaw,ulaw,mulaw,amr,gsm,ape,tak,tta,wv,mpc", help="audio formats to decode using ffmpeg")
ap2 = ap.add_argument_group('transcoding options') ap2 = ap.add_argument_group('transcoding options')
ap2.add_argument("--no-acode", action="store_true", help="disable audio transcoding") ap2.add_argument("--no-acode", action="store_true", help="disable audio transcoding")
ap2.add_argument("--ac-maxage", metavar="SEC", type=int, default=86400, help="delete transcode output after SEC seconds") ap2.add_argument("--ac-maxage", metavar="SEC", type=int, default=86400, help="delete cached transcode output after SEC seconds")
ap2 = ap.add_argument_group('general db options') ap2 = ap.add_argument_group('general db options')
ap2.add_argument("-e2d", action="store_true", help="enable up2k database") ap2.add_argument("-e2d", action="store_true", help="enable up2k database, making files searchable + enables upload deduplocation")
ap2.add_argument("-e2ds", action="store_true", help="enable up2k db-scanner, sets -e2d") ap2.add_argument("-e2ds", action="store_true", help="scan writable folders for new files on startup; sets -e2d")
ap2.add_argument("-e2dsa", action="store_true", help="scan all folders (for search), sets -e2ds") ap2.add_argument("-e2dsa", action="store_true", help="scans all folders on startup; sets -e2ds")
ap2.add_argument("--hist", metavar="PATH", type=u, help="where to store volume data (db, thumbs)") ap2.add_argument("--hist", metavar="PATH", type=u, help="where to store volume data (db, thumbs)")
ap2.add_argument("--no-hash", metavar="PTN", type=u, help="regex: disable hashing of matching paths during e2ds folder scans") ap2.add_argument("--no-hash", metavar="PTN", type=u, help="regex: disable hashing of matching paths during e2ds folder scans")
ap2.add_argument("--no-idx", metavar="PTN", type=u, help="regex: disable indexing of matching paths during e2ds folder scans") ap2.add_argument("--no-idx", metavar="PTN", type=u, help="regex: disable indexing of matching paths during e2ds folder scans")
ap2.add_argument("--re-maxage", metavar="SEC", type=int, default=0, help="disk rescan volume interval, 0=off, can be set per-volume with the 'scan' volflag") ap2.add_argument("--re-maxage", metavar="SEC", type=int, default=0, help="disk rescan volume interval, 0=off, can be set per-volume with the 'scan' volflag")
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline") ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline -- terminate searches running for more than SEC seconds")
ap2.add_argument("--srch-hits", metavar="N", type=int, default=7999, help="max search results to allow clients to fetch; 125 results will be shown initially")
ap2 = ap.add_argument_group('metadata db options') ap2 = ap.add_argument_group('metadata db options')
ap2.add_argument("-e2t", action="store_true", help="enable metadata indexing") ap2.add_argument("-e2t", action="store_true", help="enable metadata indexing; makes it possible to search for artist/title/codec/resolution/...")
ap2.add_argument("-e2ts", action="store_true", help="enable metadata scanner, sets -e2t") ap2.add_argument("-e2ts", action="store_true", help="scan existing files on startup; sets -e2t")
ap2.add_argument("-e2tsr", action="store_true", help="rescan all metadata, sets -e2ts") ap2.add_argument("-e2tsr", action="store_true", help="delete all metadata from DB and do a full rescan; sets -e2ts")
ap2.add_argument("--no-mutagen", action="store_true", help="use FFprobe for tags instead") ap2.add_argument("--no-mutagen", action="store_true", help="use FFprobe for tags instead; will catch more tags")
ap2.add_argument("--no-mtag-ff", action="store_true", help="never use FFprobe as tag reader") ap2.add_argument("--no-mtag-ff", action="store_true", help="never use FFprobe as tag reader; is probably safer")
ap2.add_argument("--mtag-mt", metavar="CORES", type=int, default=cores, help="num cpu cores to use for tag scanning") ap2.add_argument("--mtag-mt", metavar="CORES", type=int, default=cores, help="num cpu cores to use for tag scanning")
ap2.add_argument("--mtag-v", action="store_true", help="verbose tag scanning; print errors from mtp subprocesses and such")
ap2.add_argument("-mtm", metavar="M=t,t,t", type=u, action="append", help="add/replace metadata mapping") ap2.add_argument("-mtm", metavar="M=t,t,t", type=u, action="append", help="add/replace metadata mapping")
ap2.add_argument("-mte", metavar="M,M,M", type=u, help="tags to index/display (comma-sep.)", ap2.add_argument("-mte", metavar="M,M,M", type=u, help="tags to index/display (comma-sep.)",
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,vc,ac,res,.fps,ahash,vhash") default="circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,vc,ac,res,.fps,ahash,vhash")
ap2.add_argument("-mth", metavar="M,M,M", type=u, help="tags to hide by default (comma-sep.)", ap2.add_argument("-mth", metavar="M,M,M", type=u, help="tags to hide by default (comma-sep.)",
default=".vq,.aq,vc,ac,res,.fps") default=".vq,.aq,vc,ac,res,.fps")
ap2.add_argument("-mtp", metavar="M=[f,]bin", type=u, action="append", help="read tag M using bin") ap2.add_argument("-mtp", metavar="M=[f,]BIN", type=u, action="append", help="read tag M using program BIN to parse the file")
ap2 = ap.add_argument_group('ui options') ap2 = ap.add_argument_group('ui options')
ap2.add_argument("--lang", metavar="LANG", type=u, default="eng", help="language")
ap2.add_argument("--theme", metavar="NUM", type=int, default=0, help="default theme to use")
ap2.add_argument("--themes", metavar="NUM", type=int, default=8, help="number of themes installed")
ap2.add_argument("--js-browser", metavar="L", type=u, help="URL to additional JS to include") ap2.add_argument("--js-browser", metavar="L", type=u, help="URL to additional JS to include")
ap2.add_argument("--css-browser", metavar="L", type=u, help="URL to additional CSS to include") ap2.add_argument("--css-browser", metavar="L", type=u, help="URL to additional CSS to include")
ap2.add_argument("--html-head", metavar="TXT", type=u, default="", help="text to append to the <head> of all HTML pages")
ap2.add_argument("--textfiles", metavar="CSV", type=u, default="txt,nfo,diz,cue,readme", help="file extensions to present as plaintext") ap2.add_argument("--textfiles", metavar="CSV", type=u, default="txt,nfo,diz,cue,readme", help="file extensions to present as plaintext")
ap2.add_argument("--txt-max", metavar="KiB", type=int, default=64, help="max size of embedded textfiles on ?doc= (anything bigger will be lazy-loaded by JS)")
ap2.add_argument("--doctitle", metavar="TXT", type=u, default="copyparty", help="title / service-name to show in html documents")
ap2 = ap.add_argument_group('debug options') ap2 = ap.add_argument_group('debug options')
ap2.add_argument("--no-sendfile", action="store_true", help="disable sendfile") ap2.add_argument("--no-sendfile", action="store_true", help="disable sendfile; instead using a traditional file read loop")
ap2.add_argument("--no-scandir", action="store_true", help="disable scandir") ap2.add_argument("--no-scandir", action="store_true", help="disable scandir; instead using listdir + stat on each file")
ap2.add_argument("--no-fastboot", action="store_true", help="wait for up2k indexing") ap2.add_argument("--no-fastboot", action="store_true", help="wait for up2k indexing before starting the httpd")
ap2.add_argument("--no-htp", action="store_true", help="disable httpserver threadpool, create threads as-needed instead") ap2.add_argument("--no-htp", action="store_true", help="disable httpserver threadpool, create threads as-needed instead")
ap2.add_argument("--stackmon", metavar="P,S", type=u, help="write stacktrace to Path every S second") ap2.add_argument("--stackmon", metavar="P,S", type=u, help="write stacktrace to Path every S second")
ap2.add_argument("--log-thrs", metavar="SEC", type=float, help="list active threads every SEC") ap2.add_argument("--log-thrs", metavar="SEC", type=float, help="list active threads every SEC")
@@ -500,7 +643,7 @@ def run_argparse(argv, formatter):
return ret return ret
def main(argv=None): def main(argv: Optional[list[str]] = None) -> None:
time.strptime("19970815", "%Y%m%d") # python#7980 time.strptime("19970815", "%Y%m%d") # python#7980
if WINDOWS: if WINDOWS:
os.system("rem") # enables colors os.system("rem") # enables colors
@@ -517,12 +660,12 @@ def main(argv=None):
if HAVE_SSL: if HAVE_SSL:
ensure_cert() ensure_cert()
for k, v in zip(argv, argv[1:]): for k, v in zip(argv[1:], argv[2:]):
if k == "-c": if k == "-c":
supp = args_from_cfg(v) supp = args_from_cfg(v)
argv.extend(supp) argv.extend(supp)
deprecated = [] deprecated: list[tuple[str, str]] = []
for dk, nk in deprecated: for dk, nk in deprecated:
try: try:
idx = argv.index(dk) idx = argv.index(dk)
@@ -540,12 +683,26 @@ def main(argv=None):
except: except:
pass pass
try: for fmtr in [RiceFormatter, Dodge11874, BasicDodge11874]:
al = run_argparse(argv, RiceFormatter) try:
except AssertionError: al = run_argparse(argv, fmtr)
al = run_argparse(argv, Dodge11874) except SystemExit:
raise
except:
lprint("\n[ {} ]:\n{}\n".format(fmtr, min_ex()))
nstrs = [] assert al
if WINDOWS and not al.keep_qem:
try:
disable_quickedit()
except:
lprint("\nfailed to disable quick-edit-mode:\n" + min_ex() + "\n")
if not VT100:
al.wintitle = ""
nstrs: list[str] = []
anymod = False anymod = False
for ostr in al.v or []: for ostr in al.v or []:
m = re_vol.match(ostr) m = re_vol.match(ostr)
@@ -616,7 +773,7 @@ def main(argv=None):
# signal.signal(signal.SIGINT, sighandler) # signal.signal(signal.SIGINT, sighandler)
SvcHub(al, argv, printed).run() SvcHub(al, argv, "".join(printed)).run()
if __name__ == "__main__": if __name__ == "__main__":

View File

@@ -1,8 +1,8 @@
# coding: utf-8 # coding: utf-8
VERSION = (1, 1, 0) VERSION = (1, 3, 2)
CODENAME = "opus" CODENAME = "god dag"
BUILD_DT = (2021, 11, 6) BUILD_DT = (2022, 6, 20)
S_VERSION = ".".join(map(str, VERSION)) S_VERSION = ".".join(map(str, VERSION))
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT) S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)

File diff suppressed because it is too large Load Diff

View File

@@ -2,27 +2,30 @@
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
import os import os
from ..util import fsenc, fsdec
from ..util import SYMTIME, fsdec, fsenc
from . import path from . import path
try:
from typing import Optional
except:
pass
_ = (path,)
# grep -hRiE '(^|[^a-zA-Z_\.-])os\.' . | gsed -r 's/ /\n/g;s/\(/(\n/g' | grep -hRiE '(^|[^a-zA-Z_\.-])os\.' | sort | uniq -c # grep -hRiE '(^|[^a-zA-Z_\.-])os\.' . | gsed -r 's/ /\n/g;s/\(/(\n/g' | grep -hRiE '(^|[^a-zA-Z_\.-])os\.' | sort | uniq -c
# printf 'os\.(%s)' "$(grep ^def bos/__init__.py | gsed -r 's/^def //;s/\(.*//' | tr '\n' '|' | gsed -r 's/.$//')" # printf 'os\.(%s)' "$(grep ^def bos/__init__.py | gsed -r 's/^def //;s/\(.*//' | tr '\n' '|' | gsed -r 's/.$//')"
def chmod(p, mode): def chmod(p: str, mode: int) -> None:
return os.chmod(fsenc(p), mode) return os.chmod(fsenc(p), mode)
def listdir(p="."): def listdir(p: str = ".") -> list[str]:
return [fsdec(x) for x in os.listdir(fsenc(p))] return [fsdec(x) for x in os.listdir(fsenc(p))]
def lstat(p): def makedirs(name: str, mode: int = 0o755, exist_ok: bool = True) -> None:
return os.lstat(fsenc(p))
def makedirs(name, mode=0o755, exist_ok=True):
bname = fsenc(name) bname = fsenc(name)
try: try:
os.makedirs(bname, mode) os.makedirs(bname, mode)
@@ -31,29 +34,43 @@ def makedirs(name, mode=0o755, exist_ok=True):
raise raise
def mkdir(p, mode=0o755): def mkdir(p: str, mode: int = 0o755) -> None:
return os.mkdir(fsenc(p), mode) return os.mkdir(fsenc(p), mode)
def rename(src, dst): def rename(src: str, dst: str) -> None:
return os.rename(fsenc(src), fsenc(dst)) return os.rename(fsenc(src), fsenc(dst))
def replace(src, dst): def replace(src: str, dst: str) -> None:
return os.replace(fsenc(src), fsenc(dst)) return os.replace(fsenc(src), fsenc(dst))
def rmdir(p): def rmdir(p: str) -> None:
return os.rmdir(fsenc(p)) return os.rmdir(fsenc(p))
def stat(p): def stat(p: str) -> os.stat_result:
return os.stat(fsenc(p)) return os.stat(fsenc(p))
def unlink(p): def unlink(p: str) -> None:
return os.unlink(fsenc(p)) return os.unlink(fsenc(p))
def utime(p, times=None): def utime(
return os.utime(fsenc(p), times) p: str, times: Optional[tuple[float, float]] = None, follow_symlinks: bool = True
) -> None:
if SYMTIME:
return os.utime(fsenc(p), times, follow_symlinks=follow_symlinks)
else:
return os.utime(fsenc(p), times)
if hasattr(os, "lstat"):
def lstat(p: str) -> os.stat_result:
return os.lstat(fsenc(p))
else:
lstat = stat

View File

@@ -2,36 +2,44 @@
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
import os import os
from ..util import fsenc, fsdec
from ..util import SYMTIME, fsdec, fsenc
def abspath(p): def abspath(p: str) -> str:
return fsdec(os.path.abspath(fsenc(p))) return fsdec(os.path.abspath(fsenc(p)))
def exists(p): def exists(p: str) -> bool:
return os.path.exists(fsenc(p)) return os.path.exists(fsenc(p))
def getmtime(p): def getmtime(p: str, follow_symlinks: bool = True) -> float:
return os.path.getmtime(fsenc(p)) if not follow_symlinks and SYMTIME:
return os.lstat(fsenc(p)).st_mtime
else:
return os.path.getmtime(fsenc(p))
def getsize(p): def getsize(p: str) -> int:
return os.path.getsize(fsenc(p)) return os.path.getsize(fsenc(p))
def isfile(p): def isfile(p: str) -> bool:
return os.path.isfile(fsenc(p)) return os.path.isfile(fsenc(p))
def isdir(p): def isdir(p: str) -> bool:
return os.path.isdir(fsenc(p)) return os.path.isdir(fsenc(p))
def islink(p): def islink(p: str) -> bool:
return os.path.islink(fsenc(p)) return os.path.islink(fsenc(p))
def realpath(p): def lexists(p: str) -> bool:
return os.path.lexists(fsenc(p))
def realpath(p: str) -> str:
return fsdec(os.path.realpath(fsenc(p))) return fsdec(os.path.realpath(fsenc(p)))

View File

@@ -1,37 +1,56 @@
# coding: utf-8 # coding: utf-8
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
import time
import threading import threading
import time
from .broker_util import try_exec import queue
from .__init__ import TYPE_CHECKING
from .broker_mpw import MpWorker from .broker_mpw import MpWorker
from .broker_util import try_exec
from .util import mp from .util import mp
if TYPE_CHECKING:
from .svchub import SvcHub
try:
from typing import Any
except:
pass
class MProcess(mp.Process):
def __init__(
self,
q_pend: queue.Queue[tuple[int, str, list[Any]]],
q_yield: queue.Queue[tuple[int, str, list[Any]]],
target: Any,
args: Any,
) -> None:
super(MProcess, self).__init__(target=target, args=args)
self.q_pend = q_pend
self.q_yield = q_yield
class BrokerMp(object): class BrokerMp(object):
"""external api; manages MpWorkers""" """external api; manages MpWorkers"""
def __init__(self, hub): def __init__(self, hub: "SvcHub") -> None:
self.hub = hub self.hub = hub
self.log = hub.log self.log = hub.log
self.args = hub.args self.args = hub.args
self.procs = [] self.procs = []
self.retpend = {}
self.retpend_mutex = threading.Lock()
self.mutex = threading.Lock() self.mutex = threading.Lock()
self.num_workers = self.args.j or mp.cpu_count() self.num_workers = self.args.j or mp.cpu_count()
self.log("broker", "booting {} subprocesses".format(self.num_workers)) self.log("broker", "booting {} subprocesses".format(self.num_workers))
for n in range(1, self.num_workers + 1): for n in range(1, self.num_workers + 1):
q_pend = mp.Queue(1) q_pend: queue.Queue[tuple[int, str, list[Any]]] = mp.Queue(1)
q_yield = mp.Queue(64) q_yield: queue.Queue[tuple[int, str, list[Any]]] = mp.Queue(64)
proc = mp.Process(target=MpWorker, args=(q_pend, q_yield, self.args, n)) proc = MProcess(q_pend, q_yield, MpWorker, (q_pend, q_yield, self.args, n))
proc.q_pend = q_pend
proc.q_yield = q_yield
proc.clients = {}
thr = threading.Thread( thr = threading.Thread(
target=self.collector, args=(proc,), name="mp-sink-{}".format(n) target=self.collector, args=(proc,), name="mp-sink-{}".format(n)
@@ -42,11 +61,11 @@ class BrokerMp(object):
self.procs.append(proc) self.procs.append(proc)
proc.start() proc.start()
def shutdown(self): def shutdown(self) -> None:
self.log("broker", "shutting down") self.log("broker", "shutting down")
for n, proc in enumerate(self.procs): for n, proc in enumerate(self.procs):
thr = threading.Thread( thr = threading.Thread(
target=proc.q_pend.put([0, "shutdown", []]), target=proc.q_pend.put((0, "shutdown", [])),
name="mp-shutdown-{}-{}".format(n, len(self.procs)), name="mp-shutdown-{}-{}".format(n, len(self.procs)),
) )
thr.start() thr.start()
@@ -62,12 +81,12 @@ class BrokerMp(object):
procs.pop() procs.pop()
def reload(self): def reload(self) -> None:
self.log("broker", "reloading") self.log("broker", "reloading")
for _, proc in enumerate(self.procs): for _, proc in enumerate(self.procs):
proc.q_pend.put([0, "reload", []]) proc.q_pend.put((0, "reload", []))
def collector(self, proc): def collector(self, proc: MProcess) -> None:
"""receive message from hub in other process""" """receive message from hub in other process"""
while True: while True:
msg = proc.q_yield.get() msg = proc.q_yield.get()
@@ -78,10 +97,7 @@ class BrokerMp(object):
elif dest == "retq": elif dest == "retq":
# response from previous ipc call # response from previous ipc call
with self.retpend_mutex: raise Exception("invalid broker_mp usage")
retq = self.retpend.pop(retq_id)
retq.put(args)
else: else:
# new ipc invoking managed service in hub # new ipc invoking managed service in hub
@@ -93,9 +109,9 @@ class BrokerMp(object):
rv = try_exec(retq_id, obj, *args) rv = try_exec(retq_id, obj, *args)
if retq_id: if retq_id:
proc.q_pend.put([retq_id, "retq", rv]) proc.q_pend.put((retq_id, "retq", rv))
def put(self, want_retval, dest, *args): def say(self, dest: str, *args: Any) -> None:
""" """
send message to non-hub component in other process, send message to non-hub component in other process,
returns a Queue object which eventually contains the response if want_retval returns a Queue object which eventually contains the response if want_retval
@@ -103,7 +119,7 @@ class BrokerMp(object):
""" """
if dest == "listen": if dest == "listen":
for p in self.procs: for p in self.procs:
p.q_pend.put([0, dest, [args[0], len(self.procs)]]) p.q_pend.put((0, dest, [args[0], len(self.procs)]))
elif dest == "cb_httpsrv_up": elif dest == "cb_httpsrv_up":
self.hub.cb_httpsrv_up() self.hub.cb_httpsrv_up()

View File

@@ -1,20 +1,38 @@
# coding: utf-8 # coding: utf-8
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
import sys import argparse
import signal import signal
import sys
import threading import threading
from .broker_util import ExceptionalQueue import queue
from .authsrv import AuthSrv
from .broker_util import BrokerCli, ExceptionalQueue
from .httpsrv import HttpSrv from .httpsrv import HttpSrv
from .util import FAKE_MP from .util import FAKE_MP
from copyparty.authsrv import AuthSrv
try:
from types import FrameType
from typing import Any, Optional, Union
except:
pass
class MpWorker(object): class MpWorker(BrokerCli):
"""one single mp instance""" """one single mp instance"""
def __init__(self, q_pend, q_yield, args, n): def __init__(
self,
q_pend: queue.Queue[tuple[int, str, list[Any]]],
q_yield: queue.Queue[tuple[int, str, list[Any]]],
args: argparse.Namespace,
n: int,
) -> None:
super(MpWorker, self).__init__()
self.q_pend = q_pend self.q_pend = q_pend
self.q_yield = q_yield self.q_yield = q_yield
self.args = args self.args = args
@@ -22,7 +40,7 @@ class MpWorker(object):
self.log = self._log_disabled if args.q and not args.lo else self._log_enabled self.log = self._log_disabled if args.q and not args.lo else self._log_enabled
self.retpend = {} self.retpend: dict[int, Any] = {}
self.retpend_mutex = threading.Lock() self.retpend_mutex = threading.Lock()
self.mutex = threading.Lock() self.mutex = threading.Lock()
@@ -45,20 +63,20 @@ class MpWorker(object):
thr.start() thr.start()
thr.join() thr.join()
def signal_handler(self, sig, frame): def signal_handler(self, sig: Optional[int], frame: Optional[FrameType]) -> None:
# print('k') # print('k')
pass pass
def _log_enabled(self, src, msg, c=0): def _log_enabled(self, src: str, msg: str, c: Union[int, str] = 0) -> None:
self.q_yield.put([0, "log", [src, msg, c]]) self.q_yield.put((0, "log", [src, msg, c]))
def _log_disabled(self, src, msg, c=0): def _log_disabled(self, src: str, msg: str, c: Union[int, str] = 0) -> None:
pass pass
def logw(self, msg, c=0): def logw(self, msg: str, c: Union[int, str] = 0) -> None:
self.log("mp{}".format(self.n), msg, c) self.log("mp{}".format(self.n), msg, c)
def main(self): def main(self) -> None:
while True: while True:
retq_id, dest, args = self.q_pend.get() retq_id, dest, args = self.q_pend.get()
@@ -87,15 +105,14 @@ class MpWorker(object):
else: else:
raise Exception("what is " + str(dest)) raise Exception("what is " + str(dest))
def put(self, want_retval, dest, *args): def ask(self, dest: str, *args: Any) -> ExceptionalQueue:
if want_retval: retq = ExceptionalQueue(1)
retq = ExceptionalQueue(1) retq_id = id(retq)
retq_id = id(retq) with self.retpend_mutex:
with self.retpend_mutex: self.retpend[retq_id] = retq
self.retpend[retq_id] = retq
else:
retq = None
retq_id = 0
self.q_yield.put([retq_id, dest, args]) self.q_yield.put((retq_id, dest, list(args)))
return retq return retq
def say(self, dest: str, *args: Any) -> None:
self.q_yield.put((0, dest, list(args)))

View File

@@ -3,14 +3,25 @@ from __future__ import print_function, unicode_literals
import threading import threading
from .__init__ import TYPE_CHECKING
from .broker_util import BrokerCli, ExceptionalQueue, try_exec
from .httpsrv import HttpSrv from .httpsrv import HttpSrv
from .broker_util import ExceptionalQueue, try_exec
if TYPE_CHECKING:
from .svchub import SvcHub
try:
from typing import Any
except:
pass
class BrokerThr(object): class BrokerThr(BrokerCli):
"""external api; behaves like BrokerMP but using plain threads""" """external api; behaves like BrokerMP but using plain threads"""
def __init__(self, hub): def __init__(self, hub: "SvcHub") -> None:
super(BrokerThr, self).__init__()
self.hub = hub self.hub = hub
self.log = hub.log self.log = hub.log
self.args = hub.args self.args = hub.args
@@ -23,29 +34,35 @@ class BrokerThr(object):
self.httpsrv = HttpSrv(self, None) self.httpsrv = HttpSrv(self, None)
self.reload = self.noop self.reload = self.noop
def shutdown(self): def shutdown(self) -> None:
# self.log("broker", "shutting down") # self.log("broker", "shutting down")
self.httpsrv.shutdown() self.httpsrv.shutdown()
def noop(self): def noop(self) -> None:
pass pass
def put(self, want_retval, dest, *args): def ask(self, dest: str, *args: Any) -> ExceptionalQueue:
# new ipc invoking managed service in hub
obj = self.hub
for node in dest.split("."):
obj = getattr(obj, node)
rv = try_exec(True, obj, *args)
# pretend we're broker_mp
retq = ExceptionalQueue(1)
retq.put(rv)
return retq
def say(self, dest: str, *args: Any) -> None:
if dest == "listen": if dest == "listen":
self.httpsrv.listen(args[0], 1) self.httpsrv.listen(args[0], 1)
return
else: # new ipc invoking managed service in hub
# new ipc invoking managed service in hub obj = self.hub
obj = self.hub for node in dest.split("."):
for node in dest.split("."): obj = getattr(obj, node)
obj = getattr(obj, node)
# TODO will deadlock if dest performs another ipc try_exec(False, obj, *args)
rv = try_exec(want_retval, obj, *args)
if not want_retval:
return
# pretend we're broker_mp
retq = ExceptionalQueue(1)
retq.put(rv)
return retq

View File

@@ -1,17 +1,30 @@
# coding: utf-8 # coding: utf-8
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
import argparse
import traceback import traceback
from .util import Pebkac, Queue from queue import Queue
from .__init__ import TYPE_CHECKING
from .authsrv import AuthSrv
from .util import Pebkac
try:
from typing import Any, Optional, Union
from .util import RootLogger
except:
pass
if TYPE_CHECKING:
from .httpsrv import HttpSrv
class ExceptionalQueue(Queue, object): class ExceptionalQueue(Queue, object):
def get(self, block=True, timeout=None): def get(self, block: bool = True, timeout: Optional[float] = None) -> Any:
rv = super(ExceptionalQueue, self).get(block, timeout) rv = super(ExceptionalQueue, self).get(block, timeout)
# TODO: how expensive is this?
if isinstance(rv, list): if isinstance(rv, list):
if rv[0] == "exception": if rv[0] == "exception":
if rv[1] == "pebkac": if rv[1] == "pebkac":
@@ -22,7 +35,26 @@ class ExceptionalQueue(Queue, object):
return rv return rv
def try_exec(want_retval, func, *args): class BrokerCli(object):
"""
helps mypy understand httpsrv.broker but still fails a few levels deeper,
for example resolving httpconn.* in httpcli -- see lines tagged #mypy404
"""
def __init__(self) -> None:
self.log: RootLogger = None
self.args: argparse.Namespace = None
self.asrv: AuthSrv = None
self.httpsrv: "HttpSrv" = None
def ask(self, dest: str, *args: Any) -> ExceptionalQueue:
return ExceptionalQueue(1)
def say(self, dest: str, *args: Any) -> None:
pass
def try_exec(want_retval: Union[bool, int], func: Any, *args: list[Any]) -> Any:
try: try:
return func(*args) return func(*args)

138
copyparty/fsutil.py Normal file
View File

@@ -0,0 +1,138 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import ctypes
import re
import time
from .__init__ import ANYWIN, MACOS
from .authsrv import AXS, VFS
from .util import chkcmd, min_ex
try:
from typing import Optional, Union
from .util import RootLogger
except:
pass
class Fstab(object):
def __init__(self, log: RootLogger):
self.log_func = log
self.tab: Optional[VFS] = None
self.cache: dict[str, str] = {}
self.age = 0.0
def log(self, msg: str, c: Union[int, str] = 0) -> None:
self.log_func("fstab", msg + "\033[K", c)
def get(self, path: str):
if time.time() - self.age > 600 or len(self.cache) > 9000:
self.age = time.time()
self.tab = None
self.cache = {}
fs = "ext4"
msg = "failed to determine filesystem at [{}]; assuming {}\n{}"
if ANYWIN:
fs = "vfat" # can smb do sparse files? gonna guess no
try:
# good enough
disk = path.split(":", 1)[0]
disk = "{}:\\".format(disk).lower()
assert len(disk) == 3
path = disk
except:
self.log(msg.format(path, fs, min_ex()), 3)
return fs
try:
return self.cache[path]
except:
pass
try:
fs = self.get_w32(path) if ANYWIN else self.get_unix(path)
except:
self.log(msg.format(path, fs, min_ex()), 3)
fs = fs.lower()
self.cache[path] = fs
self.log("found {} at {}".format(fs, path))
return fs
def build_tab(self):
self.log("building tab")
sptn = r"^.*? on (.*) type ([^ ]+) \(.*"
if MACOS:
sptn = r"^.*? on (.*) \(([^ ]+), .*"
ptn = re.compile(sptn)
so, _ = chkcmd(["mount"])
tab1: list[tuple[str, str]] = []
for ln in so.split("\n"):
m = ptn.match(ln)
if not m:
continue
tab1.append(m.groups())
tab1.sort(key=lambda x: (len(x[0]), x[0]))
path1, fs1 = tab1[0]
tab = VFS(self.log_func, fs1, path1, AXS(), {})
for path, fs in tab1[1:]:
tab.add(fs, path.lstrip("/"))
self.tab = tab
def get_unix(self, path: str):
if not self.tab:
self.build_tab()
return self.tab._find(path)[0].realpath.split("/")[0]
def get_w32(self, path: str):
# list mountpoints: fsutil fsinfo drives
from ctypes.wintypes import BOOL, DWORD, LPCWSTR, LPDWORD, LPWSTR, MAX_PATH
def echk(rc, fun, args):
if not rc:
raise ctypes.WinError(ctypes.get_last_error())
return None
k32 = ctypes.WinDLL("kernel32", use_last_error=True)
k32.GetVolumeInformationW.errcheck = echk
k32.GetVolumeInformationW.restype = BOOL
k32.GetVolumeInformationW.argtypes = (
LPCWSTR,
LPWSTR,
DWORD,
LPDWORD,
LPDWORD,
LPDWORD,
LPWSTR,
DWORD,
)
bvolname = ctypes.create_unicode_buffer(MAX_PATH + 1)
bfstype = ctypes.create_unicode_buffer(MAX_PATH + 1)
serial = DWORD()
max_name_len = DWORD()
fs_flags = DWORD()
k32.GetVolumeInformationW(
path,
bvolname,
ctypes.sizeof(bvolname),
ctypes.byref(serial),
ctypes.byref(max_name_len),
ctypes.byref(fs_flags),
bfstype,
ctypes.sizeof(bfstype),
)
return bfstype.value

401
copyparty/ftpd.py Normal file
View File

@@ -0,0 +1,401 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import argparse
import logging
import os
import stat
import sys
import threading
import time
from pyftpdlib.authorizers import AuthenticationFailed, DummyAuthorizer
from pyftpdlib.filesystems import AbstractedFS, FilesystemError
from pyftpdlib.handlers import FTPHandler
from pyftpdlib.log import config_logging
from pyftpdlib.servers import FTPServer
from .__init__ import PY2, TYPE_CHECKING, E
from .bos import bos
from .util import Pebkac, exclude_dotfiles, fsenc
try:
from pyftpdlib.ioloop import IOLoop
except ImportError:
p = os.path.join(E.mod, "vend")
print("loading asynchat from " + p)
sys.path.append(p)
from pyftpdlib.ioloop import IOLoop
if TYPE_CHECKING:
from .svchub import SvcHub
try:
import typing
from typing import Any, Optional
except:
pass
class FtpAuth(DummyAuthorizer):
def __init__(self, hub: "SvcHub") -> None:
super(FtpAuth, self).__init__()
self.hub = hub
def validate_authentication(
self, username: str, password: str, handler: Any
) -> None:
asrv = self.hub.asrv
if username == "anonymous":
password = ""
uname = "*"
if password:
uname = asrv.iacct.get(password, "")
handler.username = uname
if password and not uname:
raise AuthenticationFailed("Authentication failed.")
def get_home_dir(self, username: str) -> str:
return "/"
def has_user(self, username: str) -> bool:
asrv = self.hub.asrv
return username in asrv.acct
def has_perm(self, username: str, perm: int, path: Optional[str] = None) -> bool:
return True # handled at filesystem layer
def get_perms(self, username: str) -> str:
return "elradfmwMT"
def get_msg_login(self, username: str) -> str:
return "sup {}".format(username)
def get_msg_quit(self, username: str) -> str:
return "cya"
class FtpFs(AbstractedFS):
def __init__(
self, root: str, cmd_channel: Any
) -> None: # pylint: disable=super-init-not-called
self.h = self.cmd_channel = cmd_channel # type: FTPHandler
self.hub: "SvcHub" = cmd_channel.hub
self.args = cmd_channel.args
self.uname = self.hub.asrv.iacct.get(cmd_channel.password, "*")
self.cwd = "/" # pyftpdlib convention of leading slash
self.root = "/var/lib/empty"
self.listdirinfo = self.listdir
self.chdir(".")
def v2a(
self,
vpath: str,
r: bool = False,
w: bool = False,
m: bool = False,
d: bool = False,
) -> str:
try:
vpath = vpath.replace("\\", "/").lstrip("/")
vfs, rem = self.hub.asrv.vfs.get(vpath, self.uname, r, w, m, d)
if not vfs.realpath:
raise FilesystemError("no filesystem mounted at this path")
return os.path.join(vfs.realpath, rem)
except Pebkac as ex:
raise FilesystemError(str(ex))
def rv2a(
self,
vpath: str,
r: bool = False,
w: bool = False,
m: bool = False,
d: bool = False,
) -> str:
return self.v2a(os.path.join(self.cwd, vpath), r, w, m, d)
def ftp2fs(self, ftppath: str) -> str:
# return self.v2a(ftppath)
return ftppath # self.cwd must be vpath
def fs2ftp(self, fspath: str) -> str:
# raise NotImplementedError()
return fspath
def validpath(self, path: str) -> bool:
if "/.hist/" in path:
if "/up2k." in path or path.endswith("/dir.txt"):
raise FilesystemError("access to this file is forbidden")
return True
def open(self, filename: str, mode: str) -> typing.IO[Any]:
r = "r" in mode
w = "w" in mode or "a" in mode or "+" in mode
ap = self.rv2a(filename, r, w)
if w and bos.path.exists(ap):
raise FilesystemError("cannot open existing file for writing")
self.validpath(ap)
return open(fsenc(ap), mode)
def chdir(self, path: str) -> None:
self.cwd = join(self.cwd, path)
x = self.hub.asrv.vfs.can_access(self.cwd.lstrip("/"), self.h.username)
self.can_read, self.can_write, self.can_move, self.can_delete, self.can_get = x
def mkdir(self, path: str) -> None:
ap = self.rv2a(path, w=True)
bos.mkdir(ap)
def listdir(self, path: str) -> list[str]:
vpath = join(self.cwd, path).lstrip("/")
try:
vfs, rem = self.hub.asrv.vfs.get(vpath, self.uname, True, False)
fsroot, vfs_ls1, vfs_virt = vfs.ls(
rem, self.uname, not self.args.no_scandir, [[True], [False, True]]
)
vfs_ls = [x[0] for x in vfs_ls1]
vfs_ls.extend(vfs_virt.keys())
if not self.args.ed:
vfs_ls = exclude_dotfiles(vfs_ls)
vfs_ls.sort()
return vfs_ls
except:
if vpath:
# display write-only folders as empty
return []
# return list of volumes
r = {x.split("/")[0]: 1 for x in self.hub.asrv.vfs.all_vols.keys()}
return list(sorted(list(r.keys())))
def rmdir(self, path: str) -> None:
ap = self.rv2a(path, d=True)
bos.rmdir(ap)
def remove(self, path: str) -> None:
if self.args.no_del:
raise FilesystemError("the delete feature is disabled in server config")
vp = join(self.cwd, path).lstrip("/")
try:
self.hub.up2k.handle_rm(self.uname, self.h.remote_ip, [vp])
except Exception as ex:
raise FilesystemError(str(ex))
def rename(self, src: str, dst: str) -> None:
if not self.can_move:
raise FilesystemError("not allowed for user " + self.h.username)
if self.args.no_mv:
t = "the rename/move feature is disabled in server config"
raise FilesystemError(t)
svp = join(self.cwd, src).lstrip("/")
dvp = join(self.cwd, dst).lstrip("/")
try:
self.hub.up2k.handle_mv(self.uname, svp, dvp)
except Exception as ex:
raise FilesystemError(str(ex))
def chmod(self, path: str, mode: str) -> None:
pass
def stat(self, path: str) -> os.stat_result:
try:
ap = self.rv2a(path, r=True)
return bos.stat(ap)
except:
ap = self.rv2a(path)
st = bos.stat(ap)
if not stat.S_ISDIR(st.st_mode):
raise
return st
def utime(self, path: str, timeval: float) -> None:
ap = self.rv2a(path, w=True)
return bos.utime(ap, (timeval, timeval))
def lstat(self, path: str) -> os.stat_result:
ap = self.rv2a(path)
return bos.lstat(ap)
def isfile(self, path: str) -> bool:
st = self.stat(path)
return stat.S_ISREG(st.st_mode)
def islink(self, path: str) -> bool:
ap = self.rv2a(path)
return bos.path.islink(ap)
def isdir(self, path: str) -> bool:
try:
st = self.stat(path)
return stat.S_ISDIR(st.st_mode)
except:
return True
def getsize(self, path: str) -> int:
ap = self.rv2a(path)
return bos.path.getsize(ap)
def getmtime(self, path: str) -> float:
ap = self.rv2a(path)
return bos.path.getmtime(ap)
def realpath(self, path: str) -> str:
return path
def lexists(self, path: str) -> bool:
ap = self.rv2a(path)
return bos.path.lexists(ap)
def get_user_by_uid(self, uid: int) -> str:
return "root"
def get_group_by_uid(self, gid: int) -> str:
return "root"
class FtpHandler(FTPHandler):
abstracted_fs = FtpFs
hub: "SvcHub" = None
args: argparse.Namespace = None
def __init__(self, conn: Any, server: Any, ioloop: Any = None) -> None:
self.hub: "SvcHub" = FtpHandler.hub
self.args: argparse.Namespace = FtpHandler.args
if PY2:
FTPHandler.__init__(self, conn, server, ioloop)
else:
super(FtpHandler, self).__init__(conn, server, ioloop)
# abspath->vpath mapping to resolve log_transfer paths
self.vfs_map: dict[str, str] = {}
def ftp_STOR(self, file: str, mode: str = "w") -> Any:
# Optional[str]
vp = join(self.fs.cwd, file).lstrip("/")
ap = self.fs.v2a(vp)
self.vfs_map[ap] = vp
# print("ftp_STOR: {} {} => {}".format(vp, mode, ap))
ret = FTPHandler.ftp_STOR(self, file, mode)
# print("ftp_STOR: {} {} OK".format(vp, mode))
return ret
def log_transfer(
self,
cmd: str,
filename: bytes,
receive: bool,
completed: bool,
elapsed: float,
bytes: int,
) -> Any:
# None
ap = filename.decode("utf-8", "replace")
vp = self.vfs_map.pop(ap, None)
# print("xfer_end: {} => {}".format(ap, vp))
if vp:
vp, fn = os.path.split(vp)
vfs, rem = self.hub.asrv.vfs.get(vp, self.username, False, True)
vfs, rem = vfs.get_dbv(rem)
self.hub.up2k.hash_file(
vfs.realpath,
vfs.flags,
rem,
fn,
self.remote_ip,
time.time(),
)
return FTPHandler.log_transfer(
self, cmd, filename, receive, completed, elapsed, bytes
)
try:
from pyftpdlib.handlers import TLS_FTPHandler
class SftpHandler(FtpHandler, TLS_FTPHandler):
pass
except:
pass
class Ftpd(object):
def __init__(self, hub: "SvcHub") -> None:
self.hub = hub
self.args = hub.args
hs = []
if self.args.ftp:
hs.append([FtpHandler, self.args.ftp])
if self.args.ftps:
try:
h1 = SftpHandler
except:
t = "\nftps requires pyopenssl;\nplease run the following:\n\n {} -m pip install --user pyopenssl\n"
print(t.format(sys.executable))
sys.exit(1)
h1.certfile = os.path.join(E.cfg, "cert.pem")
h1.tls_control_required = True
h1.tls_data_required = True
hs.append([h1, self.args.ftps])
for h_lp in hs:
h2, lp = h_lp
h2.hub = hub
h2.args = hub.args
h2.authorizer = FtpAuth(hub)
if self.args.ftp_pr:
p1, p2 = [int(x) for x in self.args.ftp_pr.split("-")]
if self.args.ftp and self.args.ftps:
# divide port range in half
d = int((p2 - p1) / 2)
if lp == self.args.ftp:
p2 = p1 + d
else:
p1 += d + 1
h2.passive_ports = list(range(p1, p2 + 1))
if self.args.ftp_nat:
h2.masquerade_address = self.args.ftp_nat
if self.args.ftp_dbg:
config_logging(level=logging.DEBUG)
ioloop = IOLoop()
for ip in self.args.i:
for h, lp in hs:
FTPServer((ip, int(lp)), h, ioloop)
thr = threading.Thread(target=ioloop.loop)
thr.daemon = True
thr.start()
def join(p1: str, p2: str) -> str:
w = os.path.join(p1, p2.replace("\\", "/"))
return os.path.normpath(w).replace("\\", "/")

File diff suppressed because it is too large Load Diff

View File

@@ -1,24 +1,36 @@
# coding: utf-8 # coding: utf-8
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
import re import argparse # typechk
import os import os
import time import re
import socket import socket
import threading # typechk
import time
HAVE_SSL = True
try: try:
HAVE_SSL = True
import ssl import ssl
except: except:
HAVE_SSL = False HAVE_SSL = False
from .__init__ import E from . import util as Util
from .util import Unrecv from .__init__ import TYPE_CHECKING, E
from .authsrv import AuthSrv # typechk
from .httpcli import HttpCli from .httpcli import HttpCli
from .u2idx import U2idx
from .th_cli import ThumbCli
from .th_srv import HAVE_PIL
from .ico import Ico from .ico import Ico
from .mtag import HAVE_FFMPEG
from .th_cli import ThumbCli
from .th_srv import HAVE_PIL, HAVE_VIPS
from .u2idx import U2idx
try:
from typing import Optional, Pattern, Union
except:
pass
if TYPE_CHECKING:
from .httpsrv import HttpSrv
class HttpConn(object): class HttpConn(object):
@@ -27,31 +39,37 @@ class HttpConn(object):
creates an HttpCli for each request (Connection: Keep-Alive) creates an HttpCli for each request (Connection: Keep-Alive)
""" """
def __init__(self, sck, addr, hsrv): def __init__(
self, sck: socket.socket, addr: tuple[str, int], hsrv: "HttpSrv"
) -> None:
self.s = sck self.s = sck
self.sr: Optional[Util._Unrecv] = None
self.addr = addr self.addr = addr
self.hsrv = hsrv self.hsrv = hsrv
self.mutex = hsrv.mutex self.mutex: threading.Lock = hsrv.mutex # mypy404
self.args = hsrv.args self.args: argparse.Namespace = hsrv.args # mypy404
self.asrv = hsrv.asrv self.asrv: AuthSrv = hsrv.asrv # mypy404
self.cert_path = hsrv.cert_path self.cert_path = hsrv.cert_path
self.u2fh = hsrv.u2fh self.u2fh: Util.FHC = hsrv.u2fh # mypy404
enth = HAVE_PIL and not self.args.no_thumb enth = (HAVE_PIL or HAVE_VIPS or HAVE_FFMPEG) and not self.args.no_thumb
self.thumbcli = ThumbCli(hsrv) if enth else None self.thumbcli: Optional[ThumbCli] = ThumbCli(hsrv) if enth else None # mypy404
self.ico = Ico(self.args) self.ico: Ico = Ico(self.args) # mypy404
self.t0 = time.time() self.t0: float = time.time() # mypy404
self.stopping = False self.stopping = False
self.nreq = 0 self.nreq: int = 0 # mypy404
self.nbyte = 0 self.nbyte: int = 0 # mypy404
self.u2idx = None self.u2idx: Optional[U2idx] = None
self.log_func = hsrv.log self.log_func: Util.RootLogger = hsrv.log # mypy404
self.lf_url = re.compile(self.args.lf_url) if self.args.lf_url else None self.log_src: str = "httpconn" # mypy404
self.lf_url: Optional[Pattern[str]] = (
re.compile(self.args.lf_url) if self.args.lf_url else None
) # mypy404
self.set_rproxy() self.set_rproxy()
def shutdown(self): def shutdown(self) -> None:
self.stopping = True self.stopping = True
try: try:
self.s.shutdown(socket.SHUT_RDWR) self.s.shutdown(socket.SHUT_RDWR)
@@ -59,7 +77,7 @@ class HttpConn(object):
except: except:
pass pass
def set_rproxy(self, ip=None): def set_rproxy(self, ip: Optional[str] = None) -> str:
if ip is None: if ip is None:
color = 36 color = 36
ip = self.addr[0] ip = self.addr[0]
@@ -72,35 +90,37 @@ class HttpConn(object):
self.log_src = "{} \033[{}m{}".format(ip, color, self.addr[1]).ljust(26) self.log_src = "{} \033[{}m{}".format(ip, color, self.addr[1]).ljust(26)
return self.log_src return self.log_src
def respath(self, res_name): def respath(self, res_name: str) -> str:
return os.path.join(E.mod, "web", res_name) return os.path.join(E.mod, "web", res_name)
def log(self, msg, c=0): def log(self, msg: str, c: Union[int, str] = 0) -> None:
self.log_func(self.log_src, msg, c) self.log_func(self.log_src, msg, c)
def get_u2idx(self): def get_u2idx(self) -> U2idx:
# one u2idx per tcp connection;
# sqlite3 fully parallelizes under python threads
if not self.u2idx: if not self.u2idx:
self.u2idx = U2idx(self) self.u2idx = U2idx(self)
return self.u2idx return self.u2idx
def _detect_https(self): def _detect_https(self) -> bool:
method = None method = None
if self.cert_path: if self.cert_path:
try: try:
method = self.s.recv(4, socket.MSG_PEEK) method = self.s.recv(4, socket.MSG_PEEK)
except socket.timeout: except socket.timeout:
return return False
except AttributeError: except AttributeError:
# jython does not support msg_peek; forget about https # jython does not support msg_peek; forget about https
method = self.s.recv(4) method = self.s.recv(4)
self.sr = Unrecv(self.s) self.sr = Util.Unrecv(self.s, self.log)
self.sr.buf = method self.sr.buf = method
# jython used to do this, they stopped since it's broken # jython used to do this, they stopped since it's broken
# but reimplementing sendall is out of scope for now # but reimplementing sendall is out of scope for now
if not getattr(self.s, "sendall", None): if not getattr(self.s, "sendall", None):
self.s.sendall = self.s.send self.s.sendall = self.s.send # type: ignore
if len(method) != 4: if len(method) != 4:
err = "need at least 4 bytes in the first packet; got {}".format( err = "need at least 4 bytes in the first packet; got {}".format(
@@ -110,17 +130,18 @@ class HttpConn(object):
self.log(err) self.log(err)
self.s.send(b"HTTP/1.1 400 Bad Request\r\n\r\n" + err.encode("utf-8")) self.s.send(b"HTTP/1.1 400 Bad Request\r\n\r\n" + err.encode("utf-8"))
return return False
return method not in [None, b"GET ", b"HEAD", b"POST", b"PUT ", b"OPTI"] return method not in [None, b"GET ", b"HEAD", b"POST", b"PUT ", b"OPTI"]
def run(self): def run(self) -> None:
self.sr = None self.sr = None
if self.args.https_only: if self.args.https_only:
is_https = True is_https = True
elif self.args.http_only or not HAVE_SSL: elif self.args.http_only or not HAVE_SSL:
is_https = False is_https = False
else: else:
# raise Exception("asdf")
is_https = self._detect_https() is_https = self._detect_https()
if is_https: if is_https:
@@ -149,14 +170,15 @@ class HttpConn(object):
self.s = ctx.wrap_socket(self.s, server_side=True) self.s = ctx.wrap_socket(self.s, server_side=True)
msg = [ msg = [
"\033[1;3{:d}m{}".format(c, s) "\033[1;3{:d}m{}".format(c, s)
for c, s in zip([0, 5, 0], self.s.cipher()) for c, s in zip([0, 5, 0], self.s.cipher()) # type: ignore
] ]
self.log(" ".join(msg) + "\033[0m") self.log(" ".join(msg) + "\033[0m")
if self.args.ssl_dbg and hasattr(self.s, "shared_ciphers"): if self.args.ssl_dbg and hasattr(self.s, "shared_ciphers"):
overlap = [y[::-1] for y in self.s.shared_ciphers()] ciphers = self.s.shared_ciphers()
lines = [str(x) for x in (["TLS cipher overlap:"] + overlap)] assert ciphers
self.log("\n".join(lines)) overlap = [str(y[::-1]) for y in ciphers]
self.log("TLS cipher overlap:" + "\n".join(overlap))
for k, v in [ for k, v in [
["compression", self.s.compression()], ["compression", self.s.compression()],
["ALPN proto", self.s.selected_alpn_protocol()], ["ALPN proto", self.s.selected_alpn_protocol()],
@@ -181,7 +203,7 @@ class HttpConn(object):
return return
if not self.sr: if not self.sr:
self.sr = Unrecv(self.s) self.sr = Util.Unrecv(self.s, self.log)
while not self.stopping: while not self.stopping:
self.nreq += 1 self.nreq += 1

View File

@@ -1,13 +1,15 @@
# coding: utf-8 # coding: utf-8
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
import os
import sys
import time
import math
import base64 import base64
import math
import os
import socket import socket
import sys
import threading import threading
import time
import queue
try: try:
import jinja2 import jinja2
@@ -26,15 +28,18 @@ except ImportError:
) )
sys.exit(1) sys.exit(1)
from .__init__ import E, PY2, MACOS from .__init__ import MACOS, TYPE_CHECKING, E
from .util import FHC, spack, min_ex, start_stackmon, start_log_thrs
from .bos import bos from .bos import bos
from .httpconn import HttpConn from .httpconn import HttpConn
from .util import FHC, min_ex, spack, start_log_thrs, start_stackmon
if PY2: if TYPE_CHECKING:
import Queue as queue from .broker_util import BrokerCli
else:
import queue try:
from typing import Any, Optional
except:
pass
class HttpSrv(object): class HttpSrv(object):
@@ -43,7 +48,7 @@ class HttpSrv(object):
relying on MpSrv for performance (HttpSrv is just plain threads) relying on MpSrv for performance (HttpSrv is just plain threads)
""" """
def __init__(self, broker, nid): def __init__(self, broker: "BrokerCli", nid: Optional[int]) -> None:
self.broker = broker self.broker = broker
self.nid = nid self.nid = nid
self.args = broker.args self.args = broker.args
@@ -58,17 +63,19 @@ class HttpSrv(object):
self.tp_nthr = 0 # actual self.tp_nthr = 0 # actual
self.tp_ncli = 0 # fading self.tp_ncli = 0 # fading
self.tp_time = None # latest worker collect self.tp_time = 0.0 # latest worker collect
self.tp_q = None if self.args.no_htp else queue.LifoQueue() self.tp_q: Optional[queue.LifoQueue[Any]] = (
self.t_periodic = None None if self.args.no_htp else queue.LifoQueue()
)
self.t_periodic: Optional[threading.Thread] = None
self.u2fh = FHC() self.u2fh = FHC()
self.srvs = [] self.srvs: list[socket.socket] = []
self.ncli = 0 # exact self.ncli = 0 # exact
self.clients = {} # laggy self.clients: set[HttpConn] = set() # laggy
self.nclimax = 0 self.nclimax = 0
self.cb_ts = 0 self.cb_ts = 0.0
self.cb_v = 0 self.cb_v = ""
env = jinja2.Environment() env = jinja2.Environment()
env.loader = jinja2.FileSystemLoader(os.path.join(E.mod, "web")) env.loader = jinja2.FileSystemLoader(os.path.join(E.mod, "web"))
@@ -82,7 +89,7 @@ class HttpSrv(object):
if bos.path.exists(cert_path): if bos.path.exists(cert_path):
self.cert_path = cert_path self.cert_path = cert_path
else: else:
self.cert_path = None self.cert_path = ""
if self.tp_q: if self.tp_q:
self.start_threads(4) self.start_threads(4)
@@ -94,7 +101,19 @@ class HttpSrv(object):
if self.args.log_thrs: if self.args.log_thrs:
start_log_thrs(self.log, self.args.log_thrs, nid) start_log_thrs(self.log, self.args.log_thrs, nid)
def start_threads(self, n): self.th_cfg: dict[str, Any] = {}
t = threading.Thread(target=self.post_init)
t.daemon = True
t.start()
def post_init(self) -> None:
try:
x = self.broker.ask("thumbsrv.getcfg")
self.th_cfg = x.get()
except:
pass
def start_threads(self, n: int) -> None:
self.tp_nthr += n self.tp_nthr += n
if self.args.log_htp: if self.args.log_htp:
self.log(self.name, "workers += {} = {}".format(n, self.tp_nthr), 6) self.log(self.name, "workers += {} = {}".format(n, self.tp_nthr), 6)
@@ -107,15 +126,16 @@ class HttpSrv(object):
thr.daemon = True thr.daemon = True
thr.start() thr.start()
def stop_threads(self, n): def stop_threads(self, n: int) -> None:
self.tp_nthr -= n self.tp_nthr -= n
if self.args.log_htp: if self.args.log_htp:
self.log(self.name, "workers -= {} = {}".format(n, self.tp_nthr), 6) self.log(self.name, "workers -= {} = {}".format(n, self.tp_nthr), 6)
assert self.tp_q
for _ in range(n): for _ in range(n):
self.tp_q.put(None) self.tp_q.put(None)
def periodic(self): def periodic(self) -> None:
while True: while True:
time.sleep(2 if self.tp_ncli or self.ncli else 10) time.sleep(2 if self.tp_ncli or self.ncli else 10)
with self.mutex: with self.mutex:
@@ -129,7 +149,7 @@ class HttpSrv(object):
self.t_periodic = None self.t_periodic = None
return return
def listen(self, sck, nlisteners): def listen(self, sck: socket.socket, nlisteners: int) -> None:
ip, port = sck.getsockname() ip, port = sck.getsockname()
self.srvs.append(sck) self.srvs.append(sck)
self.nclimax = math.ceil(self.args.nc * 1.0 / nlisteners) self.nclimax = math.ceil(self.args.nc * 1.0 / nlisteners)
@@ -141,15 +161,15 @@ class HttpSrv(object):
t.daemon = True t.daemon = True
t.start() t.start()
def thr_listen(self, srv_sck): def thr_listen(self, srv_sck: socket.socket) -> None:
"""listens on a shared tcp server""" """listens on a shared tcp server"""
ip, port = srv_sck.getsockname() ip, port = srv_sck.getsockname()
fno = srv_sck.fileno() fno = srv_sck.fileno()
msg = "subscribed @ {}:{} f{}".format(ip, port, fno) msg = "subscribed @ {}:{} f{}".format(ip, port, fno)
self.log(self.name, msg) self.log(self.name, msg)
def fun(): def fun() -> None:
self.broker.put(False, "cb_httpsrv_up") self.broker.say("cb_httpsrv_up")
threading.Thread(target=fun).start() threading.Thread(target=fun).start()
@@ -173,21 +193,21 @@ class HttpSrv(object):
continue continue
if self.args.log_conn: if self.args.log_conn:
m = "|{}C-acc2 \033[0;36m{} \033[3{}m{}".format( t = "|{}C-acc2 \033[0;36m{} \033[3{}m{}".format(
"-" * 3, ip, port % 8, port "-" * 3, ip, port % 8, port
) )
self.log("%s %s" % addr, m, c="1;30") self.log("%s %s" % addr, t, c="1;30")
self.accept(sck, addr) self.accept(sck, addr)
def accept(self, sck, addr): def accept(self, sck: socket.socket, addr: tuple[str, int]) -> None:
"""takes an incoming tcp connection and creates a thread to handle it""" """takes an incoming tcp connection and creates a thread to handle it"""
now = time.time() now = time.time()
if now - (self.tp_time or now) > 300: if now - (self.tp_time or now) > 300:
m = "httpserver threadpool died: tpt {:.2f}, now {:.2f}, nthr {}, ncli {}" t = "httpserver threadpool died: tpt {:.2f}, now {:.2f}, nthr {}, ncli {}"
self.log(self.name, m.format(self.tp_time, now, self.tp_nthr, self.ncli), 1) self.log(self.name, t.format(self.tp_time, now, self.tp_nthr, self.ncli), 1)
self.tp_time = None self.tp_time = 0
self.tp_q = None self.tp_q = None
with self.mutex: with self.mutex:
@@ -197,10 +217,10 @@ class HttpSrv(object):
if self.nid: if self.nid:
name += "-{}".format(self.nid) name += "-{}".format(self.nid)
t = threading.Thread(target=self.periodic, name=name) thr = threading.Thread(target=self.periodic, name=name)
self.t_periodic = t self.t_periodic = thr
t.daemon = True thr.daemon = True
t.start() thr.start()
if self.tp_q: if self.tp_q:
self.tp_time = self.tp_time or now self.tp_time = self.tp_time or now
@@ -212,8 +232,8 @@ class HttpSrv(object):
return return
if not self.args.no_htp: if not self.args.no_htp:
m = "looks like the httpserver threadpool died; please make an issue on github and tell me the story of how you pulled that off, thanks and dog bless\n" t = "looks like the httpserver threadpool died; please make an issue on github and tell me the story of how you pulled that off, thanks and dog bless\n"
self.log(self.name, m, 1) self.log(self.name, t, 1)
thr = threading.Thread( thr = threading.Thread(
target=self.thr_client, target=self.thr_client,
@@ -223,14 +243,15 @@ class HttpSrv(object):
thr.daemon = True thr.daemon = True
thr.start() thr.start()
def thr_poolw(self): def thr_poolw(self) -> None:
assert self.tp_q
while True: while True:
task = self.tp_q.get() task = self.tp_q.get()
if not task: if not task:
break break
with self.mutex: with self.mutex:
self.tp_time = None self.tp_time = 0
try: try:
sck, addr = task sck, addr = task
@@ -243,7 +264,7 @@ class HttpSrv(object):
except: except:
self.log(self.name, "thr_client: " + min_ex(), 3) self.log(self.name, "thr_client: " + min_ex(), 3)
def shutdown(self): def shutdown(self) -> None:
self.stopping = True self.stopping = True
for srv in self.srvs: for srv in self.srvs:
try: try:
@@ -251,7 +272,7 @@ class HttpSrv(object):
except: except:
pass pass
clients = list(self.clients.keys()) clients = list(self.clients)
for cli in clients: for cli in clients:
try: try:
cli.shutdown() cli.shutdown()
@@ -267,13 +288,13 @@ class HttpSrv(object):
self.log(self.name, "ok bye") self.log(self.name, "ok bye")
def thr_client(self, sck, addr): def thr_client(self, sck: socket.socket, addr: tuple[str, int]) -> None:
"""thread managing one tcp client""" """thread managing one tcp client"""
sck.settimeout(120) sck.settimeout(120)
cli = HttpConn(sck, addr, self) cli = HttpConn(sck, addr, self)
with self.mutex: with self.mutex:
self.clients[cli] = 0 self.clients.add(cli)
fno = sck.fileno() fno = sck.fileno()
try: try:
@@ -316,10 +337,10 @@ class HttpSrv(object):
raise raise
finally: finally:
with self.mutex: with self.mutex:
del self.clients[cli] self.clients.remove(cli)
self.ncli -= 1 self.ncli -= 1
def cachebuster(self): def cachebuster(self) -> str:
if time.time() - self.cb_ts < 1: if time.time() - self.cb_ts < 1:
return self.cb_v return self.cb_v

View File

@@ -1,28 +1,28 @@
# coding: utf-8 # coding: utf-8
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
import hashlib import argparse # typechk
import colorsys import colorsys
import hashlib
from .__init__ import PY2 from .__init__ import PY2
class Ico(object): class Ico(object):
def __init__(self, args): def __init__(self, args: argparse.Namespace) -> None:
self.args = args self.args = args
def get(self, ext, as_thumb): def get(self, ext: str, as_thumb: bool) -> tuple[str, bytes]:
"""placeholder to make thumbnails not break""" """placeholder to make thumbnails not break"""
h = hashlib.md5(ext.encode("utf-8")).digest()[:2] zb = hashlib.md5(ext.encode("utf-8")).digest()[:2]
if PY2: if PY2:
h = [ord(x) for x in h] zb = [ord(x) for x in zb]
c1 = colorsys.hsv_to_rgb(h[0] / 256.0, 1, 0.3) c1 = colorsys.hsv_to_rgb(zb[0] / 256.0, 1, 0.3)
c2 = colorsys.hsv_to_rgb(h[0] / 256.0, 1, 1) c2 = colorsys.hsv_to_rgb(zb[0] / 256.0, 1, 1)
c = list(c1) + list(c2) ci = [int(x * 255) for x in list(c1) + list(c2)]
c = [int(x * 255) for x in c] c = "".join(["{:02x}".format(x) for x in ci])
c = "".join(["{:02x}".format(x) for x in c])
h = 30 h = 30
if not self.args.th_no_crop and as_thumb: if not self.args.th_no_crop and as_thumb:
@@ -37,6 +37,6 @@ class Ico(object):
fill="#{}" font-family="monospace" font-size="14px" style="letter-spacing:.5px">{}</text> fill="#{}" font-family="monospace" font-size="14px" style="letter-spacing:.5px">{}</text>
</g></svg> </g></svg>
""" """
svg = svg.format(h, c[:6], c[6:], ext).encode("utf-8") svg = svg.format(h, c[:6], c[6:], ext)
return ["image/svg+xml", svg] return "image/svg+xml", svg.encode("utf-8")

View File

@@ -1,18 +1,26 @@
# coding: utf-8 # coding: utf-8
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
import os import argparse
import sys
import json import json
import os
import shutil import shutil
import subprocess as sp import subprocess as sp
import sys
from .__init__ import PY2, WINDOWS, unicode from .__init__ import PY2, WINDOWS, unicode
from .util import fsenc, fsdec, uncyg, REKOBO_LKEY
from .bos import bos from .bos import bos
from .util import REKOBO_LKEY, fsenc, retchk, runcmd, uncyg
try:
from typing import Any, Union
from .util import RootLogger
except:
pass
def have_ff(cmd): def have_ff(cmd: str) -> bool:
if PY2: if PY2:
print("# checking {}".format(cmd)) print("# checking {}".format(cmd))
cmd = (cmd + " -version").encode("ascii").split(b" ") cmd = (cmd + " -version").encode("ascii").split(b" ")
@@ -30,7 +38,7 @@ HAVE_FFPROBE = have_ff("ffprobe")
class MParser(object): class MParser(object):
def __init__(self, cmdline): def __init__(self, cmdline: str) -> None:
self.tag, args = cmdline.split("=", 1) self.tag, args = cmdline.split("=", 1)
self.tags = self.tag.split(",") self.tags = self.tag.split(",")
@@ -73,7 +81,9 @@ class MParser(object):
raise Exception() raise Exception()
def ffprobe(abspath): def ffprobe(
abspath: str, timeout: int = 10
) -> tuple[dict[str, tuple[int, Any]], dict[str, list[Any]]]:
cmd = [ cmd = [
b"ffprobe", b"ffprobe",
b"-hide_banner", b"-hide_banner",
@@ -82,21 +92,20 @@ def ffprobe(abspath):
b"--", b"--",
fsenc(abspath), fsenc(abspath),
] ]
p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE) rc, so, se = runcmd(cmd, timeout=timeout)
r = p.communicate() retchk(rc, cmd, se)
txt = r[0].decode("utf-8", "replace") return parse_ffprobe(so)
return parse_ffprobe(txt)
def parse_ffprobe(txt): def parse_ffprobe(txt: str) -> tuple[dict[str, tuple[int, Any]], dict[str, list[Any]]]:
"""ffprobe -show_format -show_streams""" """ffprobe -show_format -show_streams"""
streams = [] streams = []
fmt = {} fmt = {}
g = None g = {}
for ln in [x.rstrip("\r") for x in txt.split("\n")]: for ln in [x.rstrip("\r") for x in txt.split("\n")]:
try: try:
k, v = ln.split("=", 1) sk, sv = ln.split("=", 1)
g[k] = v g[sk] = sv
continue continue
except: except:
pass pass
@@ -110,8 +119,8 @@ def parse_ffprobe(txt):
fmt = g fmt = g
streams = [fmt] + streams streams = [fmt] + streams
ret = {} # processed ret: dict[str, Any] = {} # processed
md = {} # raw tags md: dict[str, list[Any]] = {} # raw tags
is_audio = fmt.get("format_name") in ["mp3", "ogg", "flac", "wav"] is_audio = fmt.get("format_name") in ["mp3", "ogg", "flac", "wav"]
if fmt.get("filename", "").split(".")[-1].lower() in ["m4a", "aac"]: if fmt.get("filename", "").split(".")[-1].lower() in ["m4a", "aac"]:
@@ -162,43 +171,43 @@ def parse_ffprobe(txt):
kvm = [["duration", ".dur"], ["bit_rate", ".q"]] kvm = [["duration", ".dur"], ["bit_rate", ".q"]]
for sk, rk in kvm: for sk, rk in kvm:
v = strm.get(sk) v1 = strm.get(sk)
if v is None: if v1 is None:
continue continue
if rk.startswith("."): if rk.startswith("."):
try: try:
v = float(v) zf = float(v1)
v2 = ret.get(rk) v2 = ret.get(rk)
if v2 is None or v > v2: if v2 is None or zf > v2:
ret[rk] = v ret[rk] = zf
except: except:
# sqlite doesnt care but the code below does # sqlite doesnt care but the code below does
if v not in ["N/A"]: if v1 not in ["N/A"]:
ret[rk] = v ret[rk] = v1
else: else:
ret[rk] = v ret[rk] = v1
if ret.get("vc") == "ansi": # shellscript if ret.get("vc") == "ansi": # shellscript
return {}, {} return {}, {}
for strm in streams: for strm in streams:
for k, v in strm.items(): for sk, sv in strm.items():
if not k.startswith("TAG:"): if not sk.startswith("TAG:"):
continue continue
k = k[4:].strip() sk = sk[4:].strip()
v = v.strip() sv = sv.strip()
if k and v and k not in md: if sk and sv and sk not in md:
md[k] = [v] md[sk] = [sv]
for k in [".q", ".vq", ".aq"]: for sk in [".q", ".vq", ".aq"]:
if k in ret: if sk in ret:
ret[k] /= 1000 # bit_rate=320000 ret[sk] /= 1000 # bit_rate=320000
for k in [".q", ".vq", ".aq", ".resw", ".resh"]: for sk in [".q", ".vq", ".aq", ".resw", ".resh"]:
if k in ret: if sk in ret:
ret[k] = int(ret[k]) ret[sk] = int(ret[sk])
if ".fps" in ret: if ".fps" in ret:
fps = ret[".fps"] fps = ret[".fps"]
@@ -220,13 +229,13 @@ def parse_ffprobe(txt):
if ".resw" in ret and ".resh" in ret: if ".resw" in ret and ".resh" in ret:
ret["res"] = "{}x{}".format(ret[".resw"], ret[".resh"]) ret["res"] = "{}x{}".format(ret[".resw"], ret[".resh"])
ret = {k: [0, v] for k, v in ret.items()} zd = {k: (0, v) for k, v in ret.items()}
return ret, md return zd, md
class MTag(object): class MTag(object):
def __init__(self, log_func, args): def __init__(self, log_func: RootLogger, args: argparse.Namespace) -> None:
self.log_func = log_func self.log_func = log_func
self.args = args self.args = args
self.usable = True self.usable = True
@@ -243,7 +252,7 @@ class MTag(object):
if self.backend == "mutagen": if self.backend == "mutagen":
self.get = self.get_mutagen self.get = self.get_mutagen
try: try:
import mutagen import mutagen # noqa: F401 # pylint: disable=unused-import,import-outside-toplevel
except: except:
self.log("could not load Mutagen, trying FFprobe instead", c=3) self.log("could not load Mutagen, trying FFprobe instead", c=3)
self.backend = "ffprobe" self.backend = "ffprobe"
@@ -340,31 +349,33 @@ class MTag(object):
} }
# self.get = self.compare # self.get = self.compare
def log(self, msg, c=0): def log(self, msg: str, c: Union[int, str] = 0) -> None:
self.log_func("mtag", msg, c) self.log_func("mtag", msg, c)
def normalize_tags(self, ret, md): def normalize_tags(
for k, v in dict(md).items(): self, parser_output: dict[str, tuple[int, Any]], md: dict[str, list[Any]]
if not v: ) -> dict[str, Union[str, float]]:
for sk, tv in dict(md).items():
if not tv:
continue continue
k = k.lower().split("::")[0].strip() sk = sk.lower().split("::")[0].strip()
mk = self.rmap.get(k) key_mapping = self.rmap.get(sk)
if not mk: if not key_mapping:
continue continue
pref, mk = mk priority, alias = key_mapping
if mk not in ret or ret[mk][0] > pref: if alias not in parser_output or parser_output[alias][0] > priority:
ret[mk] = [pref, v[0]] parser_output[alias] = (priority, tv[0])
# take first value # take first value (lowest priority / most preferred)
ret = {k: unicode(v[1]).strip() for k, v in ret.items()} ret = {sk: unicode(tv[1]).strip() for sk, tv in parser_output.items()}
# track 3/7 => track 3 # track 3/7 => track 3
for k, v in ret.items(): for sk, tv in ret.items():
if k[0] == ".": if sk[0] == ".":
v = v.split("/")[0].strip().lstrip("0") sv = str(tv).split("/")[0].strip().lstrip("0")
ret[k] = v or 0 ret[sk] = sv or 0
# normalize key notation to rkeobo # normalize key notation to rkeobo
okey = ret.get("key") okey = ret.get("key")
@@ -374,7 +385,7 @@ class MTag(object):
return ret return ret
def compare(self, abspath): def compare(self, abspath: str) -> dict[str, Union[str, float]]:
if abspath.endswith(".au"): if abspath.endswith(".au"):
return {} return {}
@@ -412,7 +423,7 @@ class MTag(object):
return r1 return r1
def get_mutagen(self, abspath): def get_mutagen(self, abspath: str) -> dict[str, Union[str, float]]:
if not bos.path.isfile(abspath): if not bos.path.isfile(abspath):
return {} return {}
@@ -420,12 +431,13 @@ class MTag(object):
try: try:
md = mutagen.File(fsenc(abspath), easy=True) md = mutagen.File(fsenc(abspath), easy=True)
x = md.info.length if not md.info.length and not md.info.codec:
except Exception as ex: raise Exception()
except:
return self.get_ffprobe(abspath) if self.can_ffprobe else {} return self.get_ffprobe(abspath) if self.can_ffprobe else {}
sz = bos.path.getsize(abspath) sz = bos.path.getsize(abspath)
ret = {".q": [0, int((sz / md.info.length) / 128)]} ret = {".q": (0, int((sz / md.info.length) / 128))}
for attr, k, norm in [ for attr, k, norm in [
["codec", "ac", unicode], ["codec", "ac", unicode],
@@ -456,53 +468,55 @@ class MTag(object):
if k == "ac" and v.startswith("mp4a.40."): if k == "ac" and v.startswith("mp4a.40."):
v = "aac" v = "aac"
ret[k] = [0, norm(v)] ret[k] = (0, norm(v))
return self.normalize_tags(ret, md) return self.normalize_tags(ret, md)
def get_ffprobe(self, abspath): def get_ffprobe(self, abspath: str) -> dict[str, Union[str, float]]:
if not bos.path.isfile(abspath): if not bos.path.isfile(abspath):
return {} return {}
ret, md = ffprobe(abspath) ret, md = ffprobe(abspath)
return self.normalize_tags(ret, md) return self.normalize_tags(ret, md)
def get_bin(self, parsers, abspath): def get_bin(self, parsers: dict[str, MParser], abspath: str) -> dict[str, Any]:
if not bos.path.isfile(abspath): if not bos.path.isfile(abspath):
return {} return {}
pypath = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) pypath = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
pypath = [str(pypath)] + [str(x) for x in sys.path if x] zsl = [str(pypath)] + [str(x) for x in sys.path if x]
pypath = str(os.pathsep.join(pypath)) pypath = str(os.pathsep.join(zsl))
env = os.environ.copy() env = os.environ.copy()
env["PYTHONPATH"] = pypath env["PYTHONPATH"] = pypath
ret = {} ret = {}
for tagname, mp in parsers.items(): for tagname, parser in parsers.items():
try: try:
cmd = [mp.bin, abspath] cmd = [parser.bin, abspath]
if mp.bin.endswith(".py"): if parser.bin.endswith(".py"):
cmd = [sys.executable] + cmd cmd = [sys.executable] + cmd
args = {"env": env, "timeout": mp.timeout} args = {"env": env, "timeout": parser.timeout}
if WINDOWS: if WINDOWS:
args["creationflags"] = 0x4000 args["creationflags"] = 0x4000
else: else:
cmd = ["nice"] + cmd cmd = ["nice"] + cmd
cmd = [fsenc(x) for x in cmd] bcmd = [fsenc(x) for x in cmd]
v = sp.check_output(cmd, **args).strip() rc, v, err = runcmd(bcmd, **args) # type: ignore
retchk(rc, bcmd, err, self.log, 5, self.args.mtag_v)
v = v.strip()
if not v: if not v:
continue continue
if "," not in tagname: if "," not in tagname:
ret[tagname] = v.decode("utf-8") ret[tagname] = v
else: else:
v = json.loads(v) zj = json.loads(v)
for tag in tagname.split(","): for tag in tagname.split(","):
if tag and tag in v: if tag and tag in zj:
ret[tag] = v[tag] ret[tag] = zj[tag]
except: except:
pass pass

View File

@@ -4,20 +4,29 @@ from __future__ import print_function, unicode_literals
import tarfile import tarfile
import threading import threading
from .sutil import errdesc from queue import Queue
from .util import Queue, fsenc
from .bos import bos from .bos import bos
from .sutil import StreamArc, errdesc
from .util import fsenc, min_ex
try:
from typing import Any, Generator, Optional
from .util import NamedLogger
except:
pass
class QFile(object): class QFile(object): # inherit io.StringIO for painful typing
"""file-like object which buffers writes into a queue""" """file-like object which buffers writes into a queue"""
def __init__(self): def __init__(self) -> None:
self.q = Queue(64) self.q: Queue[Optional[bytes]] = Queue(64)
self.bq = [] self.bq: list[bytes] = []
self.nq = 0 self.nq = 0
def write(self, buf): def write(self, buf: Optional[bytes]) -> None:
if buf is None or self.nq >= 240 * 1024: if buf is None or self.nq >= 240 * 1024:
self.q.put(b"".join(self.bq)) self.q.put(b"".join(self.bq))
self.bq = [] self.bq = []
@@ -30,27 +39,32 @@ class QFile(object):
self.nq += len(buf) self.nq += len(buf)
class StreamTar(object): class StreamTar(StreamArc):
"""construct in-memory tar file from the given path""" """construct in-memory tar file from the given path"""
def __init__(self, log, fgen, **kwargs): def __init__(
self,
log: NamedLogger,
fgen: Generator[dict[str, Any], None, None],
**kwargs: Any
):
super(StreamTar, self).__init__(log, fgen)
self.ci = 0 self.ci = 0
self.co = 0 self.co = 0
self.qfile = QFile() self.qfile = QFile()
self.log = log self.errf: dict[str, Any] = {}
self.fgen = fgen
self.errf = None
# python 3.8 changed to PAX_FORMAT as default, # python 3.8 changed to PAX_FORMAT as default,
# waste of space and don't care about the new features # waste of space and don't care about the new features
fmt = tarfile.GNU_FORMAT fmt = tarfile.GNU_FORMAT
self.tar = tarfile.open(fileobj=self.qfile, mode="w|", format=fmt) self.tar = tarfile.open(fileobj=self.qfile, mode="w|", format=fmt) # type: ignore
w = threading.Thread(target=self._gen, name="star-gen") w = threading.Thread(target=self._gen, name="star-gen")
w.daemon = True w.daemon = True
w.start() w.start()
def gen(self): def gen(self) -> Generator[Optional[bytes], None, None]:
while True: while True:
buf = self.qfile.q.get() buf = self.qfile.q.get()
if not buf: if not buf:
@@ -63,7 +77,7 @@ class StreamTar(object):
if self.errf: if self.errf:
bos.unlink(self.errf["ap"]) bos.unlink(self.errf["ap"])
def ser(self, f): def ser(self, f: dict[str, Any]) -> None:
name = f["vp"] name = f["vp"]
src = f["ap"] src = f["ap"]
fsi = f["st"] fsi = f["st"]
@@ -76,20 +90,21 @@ class StreamTar(object):
inf.gid = 0 inf.gid = 0
self.ci += inf.size self.ci += inf.size
with open(fsenc(src), "rb", 512 * 1024) as f: with open(fsenc(src), "rb", 512 * 1024) as fo:
self.tar.addfile(inf, f) self.tar.addfile(inf, fo)
def _gen(self): def _gen(self) -> None:
errors = [] errors = []
for f in self.fgen: for f in self.fgen:
if "err" in f: if "err" in f:
errors.append([f["vp"], f["err"]]) errors.append((f["vp"], f["err"]))
continue continue
try: try:
self.ser(f) self.ser(f)
except Exception as ex: except:
errors.append([f["vp"], repr(ex)]) ex = min_ex(5, True).replace("\n", "\n-- ")
errors.append((f["vp"], ex))
if errors: if errors:
self.errf, txt = errdesc(errors) self.errf, txt = errdesc(errors)

View File

@@ -12,23 +12,28 @@ Original source: misc/python/surrogateescape.py in https://bitbucket.org/haypo/m
# This code is released under the Python license and the BSD 2-clause license # This code is released under the Python license and the BSD 2-clause license
import platform
import codecs import codecs
import platform
import sys import sys
PY3 = sys.version_info[0] > 2 PY3 = sys.version_info[0] > 2
WINDOWS = platform.system() == "Windows" WINDOWS = platform.system() == "Windows"
FS_ERRORS = "surrogateescape" FS_ERRORS = "surrogateescape"
try:
from typing import Any
except:
pass
def u(text):
def u(text: Any) -> str:
if PY3: if PY3:
return text return text
else: else:
return text.decode("unicode_escape") return text.decode("unicode_escape")
def b(data): def b(data: Any) -> bytes:
if PY3: if PY3:
return data.encode("latin1") return data.encode("latin1")
else: else:
@@ -43,7 +48,7 @@ else:
bytes_chr = chr bytes_chr = chr
def surrogateescape_handler(exc): def surrogateescape_handler(exc: Any) -> tuple[str, int]:
""" """
Pure Python implementation of the PEP 383: the "surrogateescape" error Pure Python implementation of the PEP 383: the "surrogateescape" error
handler of Python 3. Undecodable bytes will be replaced by a Unicode handler of Python 3. Undecodable bytes will be replaced by a Unicode
@@ -74,7 +79,7 @@ class NotASurrogateError(Exception):
pass pass
def replace_surrogate_encode(mystring): def replace_surrogate_encode(mystring: str) -> str:
""" """
Returns a (unicode) string, not the more logical bytes, because the codecs Returns a (unicode) string, not the more logical bytes, because the codecs
register_error functionality expects this. register_error functionality expects this.
@@ -100,7 +105,7 @@ def replace_surrogate_encode(mystring):
return str().join(decoded) return str().join(decoded)
def replace_surrogate_decode(mybytes): def replace_surrogate_decode(mybytes: bytes) -> str:
""" """
Returns a (unicode) string Returns a (unicode) string
""" """
@@ -121,7 +126,7 @@ def replace_surrogate_decode(mybytes):
return str().join(decoded) return str().join(decoded)
def encodefilename(fn): def encodefilename(fn: str) -> bytes:
if FS_ENCODING == "ascii": if FS_ENCODING == "ascii":
# ASCII encoder of Python 2 expects that the error handler returns a # ASCII encoder of Python 2 expects that the error handler returns a
# Unicode string encodable to ASCII, whereas our surrogateescape error # Unicode string encodable to ASCII, whereas our surrogateescape error
@@ -161,7 +166,7 @@ def encodefilename(fn):
return fn.encode(FS_ENCODING, FS_ERRORS) return fn.encode(FS_ENCODING, FS_ERRORS)
def decodefilename(fn): def decodefilename(fn: bytes) -> str:
return fn.decode(FS_ENCODING, FS_ERRORS) return fn.decode(FS_ENCODING, FS_ERRORS)
@@ -181,7 +186,7 @@ if WINDOWS and not PY3:
FS_ENCODING = codecs.lookup(FS_ENCODING).name FS_ENCODING = codecs.lookup(FS_ENCODING).name
def register_surrogateescape(): def register_surrogateescape() -> None:
""" """
Registers the surrogateescape error handler on Python 2 (only) Registers the surrogateescape error handler on Python 2 (only)
""" """

View File

@@ -1,14 +1,34 @@
# coding: utf-8 # coding: utf-8
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
import time
import tempfile import tempfile
from datetime import datetime from datetime import datetime
from .bos import bos from .bos import bos
try:
from typing import Any, Generator, Optional
def errdesc(errors): from .util import NamedLogger
except:
pass
class StreamArc(object):
def __init__(
self,
log: NamedLogger,
fgen: Generator[dict[str, Any], None, None],
**kwargs: Any
):
self.log = log
self.fgen = fgen
def gen(self) -> Generator[Optional[bytes], None, None]:
pass
def errdesc(errors: list[tuple[str, str]]) -> tuple[dict[str, Any], list[str]]:
report = ["copyparty failed to add the following files to the archive:", ""] report = ["copyparty failed to add the following files to the archive:", ""]
for fn, err in errors: for fn, err in errors:

View File

@@ -1,41 +1,51 @@
# coding: utf-8 # coding: utf-8
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
import argparse
import calendar
import os import os
import sys
import time
import shlex import shlex
import string
import signal import signal
import socket import socket
import string
import sys
import threading import threading
import time
from datetime import datetime, timedelta from datetime import datetime, timedelta
import calendar
from .__init__ import E, PY2, WINDOWS, ANYWIN, MACOS, VT100, unicode try:
from .util import mp, start_log_thrs, start_stackmon, min_ex, ansi_re from types import FrameType
import typing
from typing import Optional, Union
except:
pass
from .__init__ import ANYWIN, MACOS, PY2, VT100, WINDOWS, E, unicode
from .authsrv import AuthSrv from .authsrv import AuthSrv
from .tcpsrv import TcpSrv
from .up2k import Up2k
from .th_srv import ThumbSrv, HAVE_PIL, HAVE_WEBP
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE from .mtag import HAVE_FFMPEG, HAVE_FFPROBE
from .tcpsrv import TcpSrv
from .th_srv import HAVE_PIL, HAVE_VIPS, HAVE_WEBP, ThumbSrv
from .up2k import Up2k
from .util import ansi_re, min_ex, mp, start_log_thrs, start_stackmon
class SvcHub(object): class SvcHub(object):
""" """
Hosts all services which cannot be parallelized due to reliance on monolithic resources. Hosts all services which cannot be parallelized due to reliance on monolithic resources.
Creates a Broker which does most of the heavy stuff; hosted services can use this to perform work: Creates a Broker which does most of the heavy stuff; hosted services can use this to perform work:
hub.broker.put(want_reply, destination, args_list). hub.broker.<say|ask>(destination, args_list).
Either BrokerThr (plain threads) or BrokerMP (multiprocessing) is used depending on configuration. Either BrokerThr (plain threads) or BrokerMP (multiprocessing) is used depending on configuration.
Nothing is returned synchronously; if you want any value returned from the call, Nothing is returned synchronously; if you want any value returned from the call,
put() can return a queue (if want_reply=True) which has a blocking get() with the response. put() can return a queue (if want_reply=True) which has a blocking get() with the response.
""" """
def __init__(self, args, argv, printed): def __init__(self, args: argparse.Namespace, argv: list[str], printed: str) -> None:
self.args = args self.args = args
self.argv = argv self.argv = argv
self.logf = None self.logf: Optional[typing.TextIO] = None
self.logf_base_fn = ""
self.stop_req = False self.stop_req = False
self.reload_req = False self.reload_req = False
self.stopping = False self.stopping = False
@@ -47,6 +57,29 @@ class SvcHub(object):
self.log_mutex = threading.Lock() self.log_mutex = threading.Lock()
self.next_day = 0 self.next_day = 0
if args.sss or args.s >= 3:
args.ss = True
args.lo = args.lo or "cpp-%Y-%m%d-%H%M%S.txt.xz"
args.ls = args.ls or "**,*,ln,p,r"
if args.ss or args.s >= 2:
args.s = True
args.no_logues = True
args.no_readme = True
args.unpost = 0
args.no_del = True
args.no_mv = True
args.hardlink = True
args.vague_403 = True
args.nih = True
if args.s:
args.dotpart = True
args.no_thumb = True
args.no_mtag_ff = True
args.no_robots = True
args.force_js = True
self.log = self._log_disabled if args.q else self._log_enabled self.log = self._log_disabled if args.q else self._log_enabled
if args.lo: if args.lo:
self._setup_logfile(printed) self._setup_logfile(printed)
@@ -57,16 +90,25 @@ class SvcHub(object):
if args.log_thrs: if args.log_thrs:
start_log_thrs(self.log, args.log_thrs, 0) start_log_thrs(self.log, args.log_thrs, 0)
if not ANYWIN and not args.use_fpool: if not args.use_fpool and args.j != 1:
args.no_fpool = True args.no_fpool = True
t = "multithreading enabled with -j {}, so disabling fpool -- this can reduce upload performance on some filesystems"
self.log("root", t.format(args.j))
if not args.no_fpool and args.j != 1: if not args.no_fpool and args.j != 1:
m = "WARNING: --use-fpool combined with multithreading is untested and can probably cause undefined behavior" t = "WARNING: --use-fpool combined with multithreading is untested and can probably cause undefined behavior"
if ANYWIN: if ANYWIN:
m = 'windows cannot do multithreading without --no-fpool, so enabling that -- note that upload performance will suffer if you have microsoft defender "real-time protection" enabled, so you probably want to use -j 1 instead' t = 'windows cannot do multithreading without --no-fpool, so enabling that -- note that upload performance will suffer if you have microsoft defender "real-time protection" enabled, so you probably want to use -j 1 instead'
args.no_fpool = True args.no_fpool = True
self.log("root", m, c=3) self.log("root", t, c=3)
bri = "zy"[args.theme % 2 :][:1]
ch = "abcdefghijklmnopqrstuvwx"[int(args.theme / 2)]
args.theme = "{0}{1} {0} {1}".format(ch, bri)
if not args.hardlink and args.never_symlink:
args.no_dedup = True
# initiate all services to manage # initiate all services to manage
self.asrv = AuthSrv(self.args, self.log) self.asrv = AuthSrv(self.args, self.log)
@@ -76,20 +118,30 @@ class SvcHub(object):
self.tcpsrv = TcpSrv(self) self.tcpsrv = TcpSrv(self)
self.up2k = Up2k(self) self.up2k = Up2k(self)
decs = {k: 1 for k in self.args.th_dec.split(",")}
if not HAVE_VIPS:
decs.pop("vips", None)
if not HAVE_PIL:
decs.pop("pil", None)
if not HAVE_FFMPEG or not HAVE_FFPROBE:
decs.pop("ff", None)
self.args.th_dec = list(decs.keys())
self.thumbsrv = None self.thumbsrv = None
if not args.no_thumb: if not args.no_thumb:
if HAVE_PIL: t = "decoder preference: {}".format(", ".join(self.args.th_dec))
if not HAVE_WEBP: self.log("thumb", t)
args.th_no_webp = True
msg = "setting --th-no-webp because either libwebp is not available or your Pillow is too old"
self.log("thumb", msg, c=3)
if "pil" in self.args.th_dec and not HAVE_WEBP:
msg = "disabling webp thumbnails because either libwebp is not available or your Pillow is too old"
self.log("thumb", msg, c=3)
if self.args.th_dec:
self.thumbsrv = ThumbSrv(self) self.thumbsrv = ThumbSrv(self)
else: else:
msg = "need Pillow to create thumbnails; for example:\n{}{} -m pip install --user Pillow\n" msg = "need either Pillow, pyvips, or FFmpeg to create thumbnails; for example:\n{0}{1} -m pip install --user Pillow\n{0}{1} -m pip install --user pyvips\n{0}apt install ffmpeg"
self.log( msg = msg.format(" " * 37, os.path.basename(sys.executable))
"thumb", msg.format(" " * 37, os.path.basename(sys.executable)), c=3 self.log("thumb", msg, c=3)
)
if not args.no_acode and args.no_thumb: if not args.no_acode and args.no_thumb:
msg = "setting --no-acode because --no-thumb (sorry)" msg = "setting --no-acode because --no-thumb (sorry)"
@@ -103,35 +155,43 @@ class SvcHub(object):
args.th_poke = min(args.th_poke, args.th_maxage, args.ac_maxage) args.th_poke = min(args.th_poke, args.th_maxage, args.ac_maxage)
if args.ftp or args.ftps:
from .ftpd import Ftpd
self.ftpd = Ftpd(self)
# decide which worker impl to use # decide which worker impl to use
if self.check_mp_enable(): if self.check_mp_enable():
from .broker_mp import BrokerMp as Broker from .broker_mp import BrokerMp as Broker
else: else:
from .broker_thr import BrokerThr as Broker from .broker_thr import BrokerThr as Broker # type: ignore
self.broker = Broker(self) self.broker = Broker(self)
def thr_httpsrv_up(self): def thr_httpsrv_up(self) -> None:
time.sleep(5) time.sleep(1 if self.args.ign_ebind_all else 5)
expected = self.broker.num_workers * self.tcpsrv.nsrv expected = self.broker.num_workers * self.tcpsrv.nsrv
failed = expected - self.httpsrv_up failed = expected - self.httpsrv_up
if not failed: if not failed:
return return
if self.args.ign_ebind_all: if self.args.ign_ebind_all:
if not self.tcpsrv.srv:
for _ in range(self.broker.num_workers):
self.broker.say("cb_httpsrv_up")
return return
if self.args.ign_ebind and self.tcpsrv.srv: if self.args.ign_ebind and self.tcpsrv.srv:
return return
m = "{}/{} workers failed to start" t = "{}/{} workers failed to start"
m = m.format(failed, expected) t = t.format(failed, expected)
self.log("root", m, 1) self.log("root", t, 1)
self.retcode = 1 self.retcode = 1
os.kill(os.getpid(), signal.SIGTERM) os.kill(os.getpid(), signal.SIGTERM)
def cb_httpsrv_up(self): def cb_httpsrv_up(self) -> None:
self.httpsrv_up += 1 self.httpsrv_up += 1
if self.httpsrv_up != self.broker.num_workers: if self.httpsrv_up != self.broker.num_workers:
return return
@@ -144,9 +204,9 @@ class SvcHub(object):
thr.daemon = True thr.daemon = True
thr.start() thr.start()
def _logname(self): def _logname(self) -> str:
dt = datetime.utcnow() dt = datetime.utcnow()
fn = self.args.lo fn = str(self.args.lo)
for fs in "YmdHMS": for fs in "YmdHMS":
fs = "%" + fs fs = "%" + fs
if fs in fn: if fs in fn:
@@ -154,7 +214,7 @@ class SvcHub(object):
return fn return fn
def _setup_logfile(self, printed): def _setup_logfile(self, printed: str) -> None:
base_fn = fn = sel_fn = self._logname() base_fn = fn = sel_fn = self._logname()
if fn != self.args.lo: if fn != self.args.lo:
ctr = 0 ctr = 0
@@ -176,8 +236,6 @@ class SvcHub(object):
lh = codecs.open(fn, "w", encoding="utf-8", errors="replace") lh = codecs.open(fn, "w", encoding="utf-8", errors="replace")
lh.base_fn = base_fn
argv = [sys.executable] + self.argv argv = [sys.executable] + self.argv
if hasattr(shlex, "quote"): if hasattr(shlex, "quote"):
argv = [shlex.quote(x) for x in argv] argv = [shlex.quote(x) for x in argv]
@@ -188,9 +246,10 @@ class SvcHub(object):
printed += msg printed += msg
lh.write("t0: {:.3f}\nargv: {}\n\n{}".format(E.t0, " ".join(argv), printed)) lh.write("t0: {:.3f}\nargv: {}\n\n{}".format(E.t0, " ".join(argv), printed))
self.logf = lh self.logf = lh
self.logf_base_fn = base_fn
print(msg, end="") print(msg, end="")
def run(self): def run(self) -> None:
self.tcpsrv.run() self.tcpsrv.run()
thr = threading.Thread(target=self.thr_httpsrv_up) thr = threading.Thread(target=self.thr_httpsrv_up)
@@ -225,7 +284,7 @@ class SvcHub(object):
else: else:
self.stop_thr() self.stop_thr()
def reload(self): def reload(self) -> str:
if self.reloading: if self.reloading:
return "cannot reload; already in progress" return "cannot reload; already in progress"
@@ -235,7 +294,7 @@ class SvcHub(object):
t.start() t.start()
return "reload initiated" return "reload initiated"
def _reload(self): def _reload(self) -> None:
self.log("root", "reload scheduled") self.log("root", "reload scheduled")
with self.up2k.mutex: with self.up2k.mutex:
self.asrv.reload() self.asrv.reload()
@@ -244,7 +303,7 @@ class SvcHub(object):
self.reloading = False self.reloading = False
def stop_thr(self): def stop_thr(self) -> None:
while not self.stop_req: while not self.stop_req:
with self.stop_cond: with self.stop_cond:
self.stop_cond.wait(9001) self.stop_cond.wait(9001)
@@ -255,7 +314,7 @@ class SvcHub(object):
self.shutdown() self.shutdown()
def signal_handler(self, sig, frame): def signal_handler(self, sig: int, frame: Optional[FrameType]) -> None:
if self.stopping: if self.stopping:
return return
@@ -267,7 +326,7 @@ class SvcHub(object):
with self.stop_cond: with self.stop_cond:
self.stop_cond.notify_all() self.stop_cond.notify_all()
def shutdown(self): def shutdown(self) -> None:
if self.stopping: if self.stopping:
return return
@@ -300,13 +359,17 @@ class SvcHub(object):
print("nailed it", end="") print("nailed it", end="")
ret = self.retcode ret = self.retcode
finally: finally:
if self.args.wintitle:
print("\033]0;\033\\", file=sys.stderr, end="")
sys.stderr.flush()
print("\033[0m") print("\033[0m")
if self.logf: if self.logf:
self.logf.close() self.logf.close()
sys.exit(ret) sys.exit(ret)
def _log_disabled(self, src, msg, c=0): def _log_disabled(self, src: str, msg: str, c: Union[int, str] = 0) -> None:
if not self.logf: if not self.logf:
return return
@@ -318,8 +381,8 @@ class SvcHub(object):
if now >= self.next_day: if now >= self.next_day:
self._set_next_day() self._set_next_day()
def _set_next_day(self): def _set_next_day(self) -> None:
if self.next_day and self.logf and self.logf.base_fn != self._logname(): if self.next_day and self.logf and self.logf_base_fn != self._logname():
self.logf.close() self.logf.close()
self._setup_logfile("") self._setup_logfile("")
@@ -333,7 +396,7 @@ class SvcHub(object):
dt = dt.replace(hour=0, minute=0, second=0) dt = dt.replace(hour=0, minute=0, second=0)
self.next_day = calendar.timegm(dt.utctimetuple()) self.next_day = calendar.timegm(dt.utctimetuple())
def _log_enabled(self, src, msg, c=0): def _log_enabled(self, src: str, msg: str, c: Union[int, str] = 0) -> None:
"""handles logging from all components""" """handles logging from all components"""
with self.log_mutex: with self.log_mutex:
now = time.time() now = time.time()
@@ -351,7 +414,7 @@ class SvcHub(object):
src = ansi_re.sub("", src) src = ansi_re.sub("", src)
elif c: elif c:
if isinstance(c, int): if isinstance(c, int):
msg = "\033[3{}m{}".format(c, msg) msg = "\033[3{}m{}\033[0m".format(c, msg)
elif "\033" not in c: elif "\033" not in c:
msg = "\033[{}m{}\033[0m".format(c, msg) msg = "\033[{}m{}\033[0m".format(c, msg)
else: else:
@@ -370,7 +433,7 @@ class SvcHub(object):
if self.logf: if self.logf:
self.logf.write(msg) self.logf.write(msg)
def check_mp_support(self): def check_mp_support(self) -> str:
vmin = sys.version_info[1] vmin = sys.version_info[1]
if WINDOWS: if WINDOWS:
msg = "need python 3.3 or newer for multiprocessing;" msg = "need python 3.3 or newer for multiprocessing;"
@@ -384,18 +447,17 @@ class SvcHub(object):
return msg return msg
try: try:
x = mp.Queue(1) x: mp.Queue[tuple[str, str]] = mp.Queue(1)
x.put(["foo", "bar"]) x.put(("foo", "bar"))
if x.get()[0] != "foo": if x.get()[0] != "foo":
raise Exception() raise Exception()
except: except:
return "multiprocessing is not supported on your platform;" return "multiprocessing is not supported on your platform;"
return None return ""
def check_mp_enable(self): def check_mp_enable(self) -> bool:
if self.args.j == 1: if self.args.j == 1:
self.log("svchub", "multiprocessing disabled by argument -j 1")
return False return False
if mp.cpu_count() <= 1: if mp.cpu_count() <= 1:
@@ -417,18 +479,18 @@ class SvcHub(object):
self.log("svchub", "cannot efficiently use multiple CPU cores") self.log("svchub", "cannot efficiently use multiple CPU cores")
return False return False
def sd_notify(self): def sd_notify(self) -> None:
try: try:
addr = os.getenv("NOTIFY_SOCKET") zb = os.getenv("NOTIFY_SOCKET")
if not addr: if not zb:
return return
addr = unicode(addr) addr = unicode(zb)
if addr.startswith("@"): if addr.startswith("@"):
addr = "\0" + addr[1:] addr = "\0" + addr[1:]
m = "".join(x for x in addr if x in string.printable) t = "".join(x for x in addr if x in string.printable)
self.log("sd_notify", m) self.log("sd_notify", t)
sck = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM) sck = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM)
sck.connect(addr) sck.connect(addr)

View File

@@ -1,17 +1,23 @@
# coding: utf-8 # coding: utf-8
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
import os import calendar
import time import time
import zlib import zlib
from datetime import datetime
from .sutil import errdesc
from .util import yieldfile, sanitize_fn, spack, sunpack
from .bos import bos from .bos import bos
from .sutil import StreamArc, errdesc
from .util import min_ex, sanitize_fn, spack, sunpack, yieldfile
try:
from typing import Any, Generator, Optional
from .util import NamedLogger
except:
pass
def dostime2unix(buf): def dostime2unix(buf: bytes) -> int:
t, d = sunpack(b"<HH", buf) t, d = sunpack(b"<HH", buf)
ts = (t & 0x1F) * 2 ts = (t & 0x1F) * 2
@@ -26,27 +32,38 @@ def dostime2unix(buf):
tf = "{:04d}-{:02d}-{:02d} {:02d}:{:02d}:{:02d}" tf = "{:04d}-{:02d}-{:02d} {:02d}:{:02d}:{:02d}"
iso = tf.format(*tt) iso = tf.format(*tt)
dt = datetime.strptime(iso, "%Y-%m-%d %H:%M:%S") dt = time.strptime(iso, "%Y-%m-%d %H:%M:%S")
return int(dt.timestamp()) return int(calendar.timegm(dt))
def unixtime2dos(ts): def unixtime2dos(ts: int) -> bytes:
tt = time.gmtime(ts) tt = time.gmtime(ts + 1)
dy, dm, dd, th, tm, ts = list(tt)[:6] dy, dm, dd, th, tm, ts = list(tt)[:6]
bd = ((dy - 1980) << 9) + (dm << 5) + dd bd = ((dy - 1980) << 9) + (dm << 5) + dd
bt = (th << 11) + (tm << 5) + ts // 2 bt = (th << 11) + (tm << 5) + ts // 2
return spack(b"<HH", bt, bd) try:
return spack(b"<HH", bt, bd)
except:
return b"\x00\x00\x21\x00"
def gen_fdesc(sz, crc32, z64): def gen_fdesc(sz: int, crc32: int, z64: bool) -> bytes:
ret = b"\x50\x4b\x07\x08" ret = b"\x50\x4b\x07\x08"
fmt = b"<LQQ" if z64 else b"<LLL" fmt = b"<LQQ" if z64 else b"<LLL"
ret += spack(fmt, crc32, sz, sz) ret += spack(fmt, crc32, sz, sz)
return ret return ret
def gen_hdr(h_pos, fn, sz, lastmod, utf8, crc32, pre_crc): def gen_hdr(
h_pos: Optional[int],
fn: str,
sz: int,
lastmod: int,
utf8: bool,
icrc32: int,
pre_crc: bool,
) -> bytes:
""" """
does regular file headers does regular file headers
and the central directory meme if h_pos is set and the central directory meme if h_pos is set
@@ -65,8 +82,8 @@ def gen_hdr(h_pos, fn, sz, lastmod, utf8, crc32, pre_crc):
# confusingly this doesn't bump if h_pos # confusingly this doesn't bump if h_pos
req_ver = b"\x2d\x00" if z64 else b"\x0a\x00" req_ver = b"\x2d\x00" if z64 else b"\x0a\x00"
if crc32: if icrc32:
crc32 = spack(b"<L", crc32) crc32 = spack(b"<L", icrc32)
else: else:
crc32 = b"\x00" * 4 crc32 = b"\x00" * 4
@@ -74,7 +91,7 @@ def gen_hdr(h_pos, fn, sz, lastmod, utf8, crc32, pre_crc):
# 4b magic, 2b min-ver # 4b magic, 2b min-ver
ret = b"\x50\x4b\x03\x04" + req_ver ret = b"\x50\x4b\x03\x04" + req_ver
else: else:
# 4b magic, 2b spec-ver, 2b min-ver # 4b magic, 2b spec-ver (1b compat, 1b os (00 dos, 03 unix)), 2b min-ver
ret = b"\x50\x4b\x01\x02\x1e\x03" + req_ver ret = b"\x50\x4b\x01\x02\x1e\x03" + req_ver
ret += b"\x00" if pre_crc else b"\x08" # streaming ret += b"\x00" if pre_crc else b"\x08" # streaming
@@ -93,30 +110,43 @@ def gen_hdr(h_pos, fn, sz, lastmod, utf8, crc32, pre_crc):
fn = sanitize_fn(fn, "/", []) fn = sanitize_fn(fn, "/", [])
bfn = fn.encode("utf-8" if utf8 else "cp437", "replace").replace(b"?", b"_") bfn = fn.encode("utf-8" if utf8 else "cp437", "replace").replace(b"?", b"_")
# add ntfs (0x24) and/or unix (0x10) extrafields for utc, add z64 if requested
z64_len = len(z64v) * 8 + 4 if z64v else 0 z64_len = len(z64v) * 8 + 4 if z64v else 0
ret += spack(b"<HH", len(bfn), z64_len) ret += spack(b"<HH", len(bfn), 0x10 + z64_len)
if h_pos is not None: if h_pos is not None:
# 2b comment, 2b diskno # 2b comment, 2b diskno
ret += b"\x00" * 4 ret += b"\x00" * 4
# 2b internal.attr, 4b external.attr # 2b internal.attr, 4b external.attr
# infozip-macos: 0100 0000 a481 file:644 # infozip-macos: 0100 0000 a481 (spec-ver 1e03) file:644
# infozip-macos: 0100 0100 0080 file:000 # infozip-macos: 0100 0100 0080 (spec-ver 1e03) file:000
ret += b"\x01\x00\x00\x00\xa4\x81" # win10-zip: 0000 2000 0000 (spec-ver xx00) FILE_ATTRIBUTE_ARCHIVE
ret += b"\x00\x00\x00\x00\xa4\x81" # unx
# ret += b"\x00\x00\x20\x00\x00\x00" # fat
# 4b local-header-ofs # 4b local-header-ofs
ret += spack(b"<L", min(h_pos, 0xFFFFFFFF)) ret += spack(b"<L", min(h_pos, 0xFFFFFFFF))
ret += bfn ret += bfn
# ntfs: type 0a, size 20, rsvd, attr1, len 18, mtime, atime, ctime
# b"\xa3\x2f\x82\x41\x55\x68\xd8\x01" 1652616838.798941100 ~5.861518 132970904387989411 ~58615181
# nt = int((lastmod + 11644473600) * 10000000)
# ret += spack(b"<HHLHHQQQ", 0xA, 0x20, 0, 1, 0x18, nt, nt, nt)
# unix: type 0d, size 0c, atime, mtime, uid, gid
ret += spack(b"<HHLLHH", 0xD, 0xC, int(lastmod), int(lastmod), 1000, 1000)
if z64v: if z64v:
ret += spack(b"<HH" + b"Q" * len(z64v), 1, len(z64v) * 8, *z64v) ret += spack(b"<HH" + b"Q" * len(z64v), 1, len(z64v) * 8, *z64v)
return ret return ret
def gen_ecdr(items, cdir_pos, cdir_end): def gen_ecdr(
items: list[tuple[str, int, int, int, int]], cdir_pos: int, cdir_end: int
) -> tuple[bytes, bool]:
""" """
summary of all file headers, summary of all file headers,
usually the zipfile footer unless something clamps usually the zipfile footer unless something clamps
@@ -141,10 +171,12 @@ def gen_ecdr(items, cdir_pos, cdir_end):
# 2b comment length # 2b comment length
ret += b"\x00\x00" ret += b"\x00\x00"
return [ret, need_64] return ret, need_64
def gen_ecdr64(items, cdir_pos, cdir_end): def gen_ecdr64(
items: list[tuple[str, int, int, int, int]], cdir_pos: int, cdir_end: int
) -> bytes:
""" """
z64 end of central directory z64 end of central directory
added when numfiles or a headerptr clamps added when numfiles or a headerptr clamps
@@ -168,7 +200,7 @@ def gen_ecdr64(items, cdir_pos, cdir_end):
return ret return ret
def gen_ecdr64_loc(ecdr64_pos): def gen_ecdr64_loc(ecdr64_pos: int) -> bytes:
""" """
z64 end of central directory locator z64 end of central directory locator
points to ecdr64 points to ecdr64
@@ -183,31 +215,36 @@ def gen_ecdr64_loc(ecdr64_pos):
return ret return ret
class StreamZip(object): class StreamZip(StreamArc):
def __init__(self, log, fgen, utf8=False, pre_crc=False): def __init__(
self.log = log self,
self.fgen = fgen log: NamedLogger,
fgen: Generator[dict[str, Any], None, None],
utf8: bool = False,
pre_crc: bool = False,
) -> None:
super(StreamZip, self).__init__(log, fgen)
self.utf8 = utf8 self.utf8 = utf8
self.pre_crc = pre_crc self.pre_crc = pre_crc
self.pos = 0 self.pos = 0
self.items = [] self.items: list[tuple[str, int, int, int, int]] = []
def _ct(self, buf): def _ct(self, buf: bytes) -> bytes:
self.pos += len(buf) self.pos += len(buf)
return buf return buf
def ser(self, f): def ser(self, f: dict[str, Any]) -> Generator[bytes, None, None]:
name = f["vp"] name = f["vp"]
src = f["ap"] src = f["ap"]
st = f["st"] st = f["st"]
sz = st.st_size sz = st.st_size
ts = st.st_mtime + 1 ts = st.st_mtime
crc = None crc = 0
if self.pre_crc: if self.pre_crc:
crc = 0
for buf in yieldfile(src): for buf in yieldfile(src):
crc = zlib.crc32(buf, crc) crc = zlib.crc32(buf, crc)
@@ -217,7 +254,6 @@ class StreamZip(object):
buf = gen_hdr(None, name, sz, ts, self.utf8, crc, self.pre_crc) buf = gen_hdr(None, name, sz, ts, self.utf8, crc, self.pre_crc)
yield self._ct(buf) yield self._ct(buf)
crc = crc or 0
for buf in yieldfile(src): for buf in yieldfile(src):
if not self.pre_crc: if not self.pre_crc:
crc = zlib.crc32(buf, crc) crc = zlib.crc32(buf, crc)
@@ -226,7 +262,7 @@ class StreamZip(object):
crc &= 0xFFFFFFFF crc &= 0xFFFFFFFF
self.items.append([name, sz, ts, crc, h_pos]) self.items.append((name, sz, ts, crc, h_pos))
z64 = sz >= 4 * 1024 * 1024 * 1024 z64 = sz >= 4 * 1024 * 1024 * 1024
@@ -234,18 +270,19 @@ class StreamZip(object):
buf = gen_fdesc(sz, crc, z64) buf = gen_fdesc(sz, crc, z64)
yield self._ct(buf) yield self._ct(buf)
def gen(self): def gen(self) -> Generator[bytes, None, None]:
errors = [] errors = []
for f in self.fgen: for f in self.fgen:
if "err" in f: if "err" in f:
errors.append([f["vp"], f["err"]]) errors.append((f["vp"], f["err"]))
continue continue
try: try:
for x in self.ser(f): for x in self.ser(f):
yield x yield x
except Exception as ex: except:
errors.append([f["vp"], repr(ex)]) ex = min_ex(5, True).replace("\n", "\n-- ")
errors.append((f["vp"], ex))
if errors: if errors:
errf, txt = errdesc(errors) errf, txt = errdesc(errors)

View File

@@ -3,10 +3,14 @@ from __future__ import print_function, unicode_literals
import re import re
import socket import socket
import sys
from .__init__ import MACOS, ANYWIN from .__init__ import ANYWIN, MACOS, TYPE_CHECKING, unicode
from .util import chkcmd from .util import chkcmd
if TYPE_CHECKING:
from .svchub import SvcHub
class TcpSrv(object): class TcpSrv(object):
""" """
@@ -14,16 +18,16 @@ class TcpSrv(object):
which then uses the least busy HttpSrv to handle it which then uses the least busy HttpSrv to handle it
""" """
def __init__(self, hub): def __init__(self, hub: "SvcHub"):
self.hub = hub self.hub = hub
self.args = hub.args self.args = hub.args
self.log = hub.log self.log = hub.log
self.stopping = False self.stopping = False
self.srv = [] self.srv: list[socket.socket] = []
self.nsrv = 0 self.nsrv = 0
ok = {} ok: dict[str, list[int]] = {}
for ip in self.args.i: for ip in self.args.i:
ok[ip] = [] ok[ip] = []
for port in self.args.p: for port in self.args.p:
@@ -33,8 +37,8 @@ class TcpSrv(object):
ok[ip].append(port) ok[ip].append(port)
except Exception as ex: except Exception as ex:
if self.args.ign_ebind or self.args.ign_ebind_all: if self.args.ign_ebind or self.args.ign_ebind_all:
m = "could not listen on {}:{}: {}" t = "could not listen on {}:{}: {}"
self.log("tcpsrv", m.format(ip, port, ex), c=3) self.log("tcpsrv", t.format(ip, port, ex), c=3)
else: else:
raise raise
@@ -54,20 +58,56 @@ class TcpSrv(object):
eps[x] = "external" eps[x] = "external"
msgs = [] msgs = []
m = "available @ http://{}:{}/ (\033[33m{}\033[0m)" title_tab: dict[str, dict[str, int]] = {}
title_vars = [x[1:] for x in self.args.wintitle.split(" ") if x.startswith("$")]
t = "available @ {}://{}:{}/ (\033[33m{}\033[0m)"
for ip, desc in sorted(eps.items(), key=lambda x: x[1]): for ip, desc in sorted(eps.items(), key=lambda x: x[1]):
for port in sorted(self.args.p): for port in sorted(self.args.p):
if port not in ok.get(ip, ok.get("0.0.0.0", [])): if port not in ok.get(ip, ok.get("0.0.0.0", [])):
continue continue
msgs.append(m.format(ip, port, desc)) proto = " http"
if self.args.http_only:
pass
elif self.args.https_only or port == 443:
proto = "https"
msgs.append(t.format(proto, ip, port, desc))
if not self.args.wintitle:
continue
if port in [80, 443]:
ep = ip
else:
ep = "{}:{}".format(ip, port)
hits = []
if "pub" in title_vars and "external" in unicode(desc):
hits.append(("pub", ep))
if "pub" in title_vars or "all" in title_vars:
hits.append(("all", ep))
for var in title_vars:
if var.startswith("ip-") and ep.startswith(var[3:]):
hits.append((var, ep))
for tk, tv in hits:
try:
title_tab[tk][tv] = 1
except:
title_tab[tk] = {tv: 1}
if msgs: if msgs:
msgs[-1] += "\n" msgs[-1] += "\n"
for m in msgs: for t in msgs:
self.log("tcpsrv", m) self.log("tcpsrv", t)
def _listen(self, ip, port): if self.args.wintitle:
self._set_wintitle(title_tab)
def _listen(self, ip: str, port: int) -> None:
srv = socket.socket(socket.AF_INET, socket.SOCK_STREAM) srv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
srv.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) srv.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
@@ -83,7 +123,7 @@ class TcpSrv(object):
raise raise
raise Exception(e) raise Exception(e)
def run(self): def run(self) -> None:
for srv in self.srv: for srv in self.srv:
srv.listen(self.args.nc) srv.listen(self.args.nc)
ip, port = srv.getsockname() ip, port = srv.getsockname()
@@ -93,9 +133,9 @@ class TcpSrv(object):
if self.args.q: if self.args.q:
print(msg) print(msg)
self.hub.broker.put(False, "listen", srv) self.hub.broker.say("listen", srv)
def shutdown(self): def shutdown(self) -> None:
self.stopping = True self.stopping = True
try: try:
for srv in self.srv: for srv in self.srv:
@@ -105,25 +145,59 @@ class TcpSrv(object):
self.log("tcpsrv", "ok bye") self.log("tcpsrv", "ok bye")
def ips_linux(self): def ips_linux_ifconfig(self) -> dict[str, str]:
eps = {} # for termux
try:
txt, _ = chkcmd(["ifconfig"])
except:
return {}
eps: dict[str, str] = {}
dev = None
ip = None
up = None
for ln in (txt + "\n").split("\n"):
if not ln.strip() and dev and ip:
eps[ip] = dev + ("" if up else ", \033[31mLINK-DOWN")
dev = ip = up = None
continue
if ln == ln.lstrip():
dev = re.split(r"[: ]", ln)[0]
if "UP" in re.split(r"[<>, \t]", ln):
up = True
m = re.match(r"^\s+inet\s+([^ ]+)", ln)
if m:
ip = m.group(1)
return eps
def ips_linux(self) -> dict[str, str]:
try: try:
txt, _ = chkcmd(["ip", "addr"]) txt, _ = chkcmd(["ip", "addr"])
except: except:
return eps return self.ips_linux_ifconfig()
r = re.compile(r"^\s+inet ([^ ]+)/.* (.*)") r = re.compile(r"^\s+inet ([^ ]+)/.* (.*)")
ri = re.compile(r"^\s*[0-9]+\s*:.*")
up = False
eps: dict[str, str] = {}
for ln in txt.split("\n"): for ln in txt.split("\n"):
if ri.match(ln):
up = "UP" in re.split("[>,< ]", ln)
try: try:
ip, dev = r.match(ln.rstrip()).groups() ip, dev = r.match(ln.rstrip()).groups() # type: ignore
eps[ip] = dev eps[ip] = dev + ("" if up else ", \033[31mLINK-DOWN")
except: except:
pass pass
return eps return eps
def ips_macos(self): def ips_macos(self) -> dict[str, str]:
eps = {} eps: dict[str, str] = {}
try: try:
txt, _ = chkcmd(["ifconfig"]) txt, _ = chkcmd(["ifconfig"])
except: except:
@@ -131,7 +205,7 @@ class TcpSrv(object):
rdev = re.compile(r"^([^ ]+):") rdev = re.compile(r"^([^ ]+):")
rip = re.compile(r"^\tinet ([0-9\.]+) ") rip = re.compile(r"^\tinet ([0-9\.]+) ")
dev = None dev = "UNKNOWN"
for ln in txt.split("\n"): for ln in txt.split("\n"):
m = rdev.match(ln) m = rdev.match(ln)
if m: if m:
@@ -140,34 +214,46 @@ class TcpSrv(object):
m = rip.match(ln) m = rip.match(ln)
if m: if m:
eps[m.group(1)] = dev eps[m.group(1)] = dev
dev = None dev = "UNKNOWN"
return eps return eps
def ips_windows_ipconfig(self): def ips_windows_ipconfig(self) -> tuple[dict[str, str], set[str]]:
eps = {} eps: dict[str, str] = {}
offs: set[str] = set()
try: try:
txt, _ = chkcmd(["ipconfig"]) txt, _ = chkcmd(["ipconfig"])
except: except:
return eps return eps, offs
rdev = re.compile(r"(^[^ ].*):$") rdev = re.compile(r"(^[^ ].*):$")
rip = re.compile(r"^ +IPv?4? [^:]+: *([0-9\.]{7,15})$") rip = re.compile(r"^ +IPv?4? [^:]+: *([0-9\.]{7,15})$")
roff = re.compile(r".*: Media disconnected$")
dev = None dev = None
for ln in txt.replace("\r", "").split("\n"): for ln in txt.replace("\r", "").split("\n"):
m = rdev.match(ln) m = rdev.match(ln)
if m: if m:
if dev and dev not in eps.values():
offs.add(dev)
dev = m.group(1).split(" adapter ", 1)[-1] dev = m.group(1).split(" adapter ", 1)[-1]
if dev and roff.match(ln):
offs.add(dev)
dev = None
m = rip.match(ln) m = rip.match(ln)
if m and dev: if m and dev:
eps[m.group(1)] = dev eps[m.group(1)] = dev
dev = None dev = None
return eps if dev and dev not in eps.values():
offs.add(dev)
def ips_windows_netsh(self): return eps, offs
eps = {}
def ips_windows_netsh(self) -> dict[str, str]:
eps: dict[str, str] = {}
try: try:
txt, _ = chkcmd("netsh interface ip show address".split()) txt, _ = chkcmd("netsh interface ip show address".split())
except: except:
@@ -184,16 +270,18 @@ class TcpSrv(object):
m = rip.match(ln) m = rip.match(ln)
if m and dev: if m and dev:
eps[m.group(1)] = dev eps[m.group(1)] = dev
dev = None
return eps return eps
def detect_interfaces(self, listen_ips): def detect_interfaces(self, listen_ips: list[str]) -> dict[str, str]:
if MACOS: if MACOS:
eps = self.ips_macos() eps = self.ips_macos()
elif ANYWIN: elif ANYWIN:
eps = self.ips_windows_ipconfig() # sees more interfaces eps, off = self.ips_windows_ipconfig() # sees more interfaces + link state
eps.update(self.ips_windows_netsh()) # has better names eps.update(self.ips_windows_netsh()) # has better names
for k, v in eps.items():
if v in off:
eps[k] += ", \033[31mLINK-DOWN"
else: else:
eps = self.ips_linux() eps = self.ips_linux()
@@ -212,7 +300,6 @@ class TcpSrv(object):
]: ]:
try: try:
s.connect((ip, 1)) s.connect((ip, 1))
# raise OSError(13, "a")
default_route = s.getsockname()[0] default_route = s.getsockname()[0]
break break
except (OSError, socket.error) as ex: except (OSError, socket.error) as ex:
@@ -232,3 +319,26 @@ class TcpSrv(object):
eps[default_route] = desc eps[default_route] = desc
return eps return eps
def _set_wintitle(self, vs: dict[str, dict[str, int]]) -> None:
vs["all"] = vs.get("all", {"Local-Only": 1})
vs["pub"] = vs.get("pub", vs["all"])
vs2 = {}
for k, eps in vs.items():
vs2[k] = {
ep: 1
for ep in eps.keys()
if ":" not in ep or ep.split(":")[0] not in eps
}
title = ""
vs = vs2
for p in self.args.wintitle.split(" "):
if p.startswith("$"):
p = " and ".join(sorted(vs.get(p[1:], {"(None)": 1}).keys()))
title += "{} ".format(p)
print("\033]0;{}\033\\".format(title), file=sys.stderr, end="")
sys.stderr.flush()

View File

@@ -3,13 +3,23 @@ from __future__ import print_function, unicode_literals
import os import os
from .util import Cooldown from .__init__ import TYPE_CHECKING
from .th_srv import thumb_path, THUMBABLE, FMT_FFV, FMT_FFA from .authsrv import VFS
from .bos import bos from .bos import bos
from .th_srv import HAVE_WEBP, thumb_path
from .util import Cooldown
try:
from typing import Optional, Union
except:
pass
if TYPE_CHECKING:
from .httpsrv import HttpSrv
class ThumbCli(object): class ThumbCli(object):
def __init__(self, hsrv): def __init__(self, hsrv: "HttpSrv") -> None:
self.broker = hsrv.broker self.broker = hsrv.broker
self.log_func = hsrv.log self.log_func = hsrv.log
self.args = hsrv.args self.args = hsrv.args
@@ -18,30 +28,53 @@ class ThumbCli(object):
# cache on both sides for less broker spam # cache on both sides for less broker spam
self.cooldown = Cooldown(self.args.th_poke) self.cooldown = Cooldown(self.args.th_poke)
def log(self, msg, c=0): try:
c = hsrv.th_cfg
except:
c = {k: {} for k in ["thumbable", "pil", "vips", "ffi", "ffv", "ffa"]}
self.thumbable = c["thumbable"]
self.fmt_pil = c["pil"]
self.fmt_vips = c["vips"]
self.fmt_ffi = c["ffi"]
self.fmt_ffv = c["ffv"]
self.fmt_ffa = c["ffa"]
# defer args.th_ff_jpg, can change at runtime
d = next((x for x in self.args.th_dec if x in ("vips", "pil")), None)
self.can_webp = HAVE_WEBP or d == "vips"
def log(self, msg: str, c: Union[int, str] = 0) -> None:
self.log_func("thumbcli", msg, c) self.log_func("thumbcli", msg, c)
def get(self, ptop, rem, mtime, fmt): def get(self, dbv: VFS, rem: str, mtime: float, fmt: str) -> Optional[str]:
ptop = dbv.realpath
ext = rem.rsplit(".")[-1].lower() ext = rem.rsplit(".")[-1].lower()
if ext not in THUMBABLE: if ext not in self.thumbable or "dthumb" in dbv.flags:
return None return None
is_vid = ext in FMT_FFV is_vid = ext in self.fmt_ffv
if is_vid and self.args.no_vthumb: if is_vid and "dvthumb" in dbv.flags:
return None return None
want_opus = fmt == "opus" want_opus = fmt in ("opus", "caf")
is_au = ext in FMT_FFA is_au = ext in self.fmt_ffa
if is_au: if is_au:
if want_opus: if want_opus:
if self.args.no_acode: if self.args.no_acode:
return None return None
else: else:
if self.args.no_athumb: if "dathumb" in dbv.flags:
return None return None
elif want_opus: elif want_opus:
return None return None
is_img = not is_vid and not is_au
if is_img and "dithumb" in dbv.flags:
return None
preferred = self.args.th_dec[0] if self.args.th_dec else ""
if rem.startswith(".hist/th/") and rem.split(".")[-1] in ["webp", "jpg"]: if rem.startswith(".hist/th/") and rem.split(".")[-1] in ["webp", "jpg"]:
return os.path.join(ptop, rem) return os.path.join(ptop, rem)
@@ -49,7 +82,11 @@ class ThumbCli(object):
fmt = "w" fmt = "w"
if fmt == "w": if fmt == "w":
if self.args.th_no_webp or ((is_vid or is_au) and self.args.th_ff_jpg): if (
self.args.th_no_webp
or (is_img and not self.can_webp)
or (self.args.th_ff_jpg and (not is_img or preferred == "ff"))
):
fmt = "j" fmt = "j"
histpath = self.asrv.vfs.histtab.get(ptop) histpath = self.asrv.vfs.histtab.get(ptop)
@@ -58,27 +95,38 @@ class ThumbCli(object):
return None return None
tpath = thumb_path(histpath, rem, mtime, fmt) tpath = thumb_path(histpath, rem, mtime, fmt)
tpaths = [tpath]
if fmt == "w":
# also check for jpg (maybe webp is unavailable)
tpaths.append(tpath.rsplit(".", 1)[0] + ".jpg")
ret = None ret = None
try: abort = False
st = bos.stat(tpath) for tp in tpaths:
if st.st_size: try:
ret = tpath st = bos.stat(tp)
else: if st.st_size:
return None ret = tpath = tp
except: fmt = ret.rsplit(".")[1]
pass else:
abort = True
except:
pass
if ret: if ret:
tdir = os.path.dirname(tpath) tdir = os.path.dirname(tpath)
if self.cooldown.poke(tdir): if self.cooldown.poke(tdir):
self.broker.put(False, "thumbsrv.poke", tdir) self.broker.say("thumbsrv.poke", tdir)
if want_opus: if want_opus:
# audio files expire individually # audio files expire individually
if self.cooldown.poke(tpath): if self.cooldown.poke(tpath):
self.broker.put(False, "thumbsrv.poke", tpath) self.broker.say("thumbsrv.poke", tpath)
return ret return ret
x = self.broker.put(True, "thumbsrv.get", ptop, rem, mtime, fmt) if abort:
return x.get() return None
x = self.broker.ask("thumbsrv.get", ptop, rem, mtime, fmt)
return x.get() # type: ignore

View File

@@ -1,19 +1,28 @@
# coding: utf-8 # coding: utf-8
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
import os
import time
import shutil
import base64 import base64
import hashlib import hashlib
import threading import os
import shutil
import subprocess as sp import subprocess as sp
import threading
import time
from .__init__ import PY2, unicode from queue import Queue
from .util import fsenc, vsplit, statdir, runcmd, Queue, Cooldown, BytesIO, min_ex
from .__init__ import TYPE_CHECKING
from .bos import bos from .bos import bos
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, ffprobe from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, ffprobe
from .util import BytesIO, Cooldown, fsenc, min_ex, runcmd, statdir, vsplit
try:
from typing import Optional, Union
except:
pass
if TYPE_CHECKING:
from .svchub import SvcHub
HAVE_PIL = False HAVE_PIL = False
HAVE_HEIF = False HAVE_HEIF = False
@@ -21,7 +30,7 @@ HAVE_AVIF = False
HAVE_WEBP = False HAVE_WEBP = False
try: try:
from PIL import Image, ImageOps, ExifTags from PIL import ExifTags, Image, ImageOps
HAVE_PIL = True HAVE_PIL = True
try: try:
@@ -39,7 +48,7 @@ try:
pass pass
try: try:
import pillow_avif import pillow_avif # noqa: F401 # pylint: disable=unused-import
HAVE_AVIF = True HAVE_AVIF = True
except: except:
@@ -47,34 +56,14 @@ try:
except: except:
pass pass
# https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html try:
# ffmpeg -formats HAVE_VIPS = True
FMT_PIL = "bmp dib gif icns ico jpg jpeg jp2 jpx pcx png pbm pgm ppm pnm sgi tga tif tiff webp xbm dds xpm" import pyvips
FMT_FFV = "av1 asf avi flv m4v mkv mjpeg mjpg mpg mpeg mpg2 mpeg2 h264 avc mts h265 hevc mov 3gp mp4 ts mpegts nut ogv ogm rm vob webm wmv" except:
FMT_FFA = "aac m4a ogg opus flac alac mp3 mp2 ac3 dts wma ra wav aif aiff au alaw ulaw mulaw amr gsm ape tak tta wv" HAVE_VIPS = False
if HAVE_HEIF:
FMT_PIL += " heif heifs heic heics"
if HAVE_AVIF:
FMT_PIL += " avif avifs"
FMT_PIL, FMT_FFV, FMT_FFA = [
{x: True for x in y.split(" ") if x} for y in [FMT_PIL, FMT_FFV, FMT_FFA]
]
THUMBABLE = {} def thumb_path(histpath: str, rem: str, mtime: float, fmt: str) -> str:
if HAVE_PIL:
THUMBABLE.update(FMT_PIL)
if HAVE_FFMPEG and HAVE_FFPROBE:
THUMBABLE.update(FMT_FFV)
THUMBABLE.update(FMT_FFA)
def thumb_path(histpath, rem, mtime, fmt):
# base16 = 16 = 256 # base16 = 16 = 256
# b64-lc = 38 = 1444 # b64-lc = 38 = 1444
# base64 = 64 = 4096 # base64 = 64 = 4096
@@ -90,7 +79,7 @@ def thumb_path(histpath, rem, mtime, fmt):
h = hashlib.sha512(fsenc(fn)).digest() h = hashlib.sha512(fsenc(fn)).digest()
fn = base64.urlsafe_b64encode(h).decode("ascii")[:24] fn = base64.urlsafe_b64encode(h).decode("ascii")[:24]
if fmt == "opus": if fmt in ("opus", "caf"):
cat = "ac" cat = "ac"
else: else:
fmt = "webp" if fmt == "w" else "jpg" fmt = "webp" if fmt == "w" else "jpg"
@@ -100,7 +89,7 @@ def thumb_path(histpath, rem, mtime, fmt):
class ThumbSrv(object): class ThumbSrv(object):
def __init__(self, hub): def __init__(self, hub: "SvcHub") -> None:
self.hub = hub self.hub = hub
self.asrv = hub.asrv self.asrv = hub.asrv
self.args = hub.args self.args = hub.args
@@ -111,17 +100,17 @@ class ThumbSrv(object):
self.poke_cd = Cooldown(self.args.th_poke) self.poke_cd = Cooldown(self.args.th_poke)
self.mutex = threading.Lock() self.mutex = threading.Lock()
self.busy = {} self.busy: dict[str, list[threading.Condition]] = {}
self.stopping = False self.stopping = False
self.nthr = max(1, self.args.th_mt) self.nthr = max(1, self.args.th_mt)
self.q = Queue(self.nthr * 4) self.q: Queue[Optional[tuple[str, str]]] = Queue(self.nthr * 4)
for n in range(self.nthr): for n in range(self.nthr):
t = threading.Thread( thr = threading.Thread(
target=self.worker, name="thumb-{}-{}".format(n, self.nthr) target=self.worker, name="thumb-{}-{}".format(n, self.nthr)
) )
t.daemon = True thr.daemon = True
t.start() thr.start()
want_ff = not self.args.no_vthumb or not self.args.no_athumb want_ff = not self.args.no_vthumb or not self.args.no_athumb
if want_ff and (not HAVE_FFMPEG or not HAVE_FFPROBE): if want_ff and (not HAVE_FFMPEG or not HAVE_FFPROBE):
@@ -141,19 +130,50 @@ class ThumbSrv(object):
t.daemon = True t.daemon = True
t.start() t.start()
def log(self, msg, c=0): self.fmt_pil, self.fmt_vips, self.fmt_ffi, self.fmt_ffv, self.fmt_ffa = [
set(y.split(","))
for y in [
self.args.th_r_pil,
self.args.th_r_vips,
self.args.th_r_ffi,
self.args.th_r_ffv,
self.args.th_r_ffa,
]
]
if not HAVE_HEIF:
for f in "heif heifs heic heics".split(" "):
self.fmt_pil.discard(f)
if not HAVE_AVIF:
for f in "avif avifs".split(" "):
self.fmt_pil.discard(f)
self.thumbable: set[str] = set()
if "pil" in self.args.th_dec:
self.thumbable |= self.fmt_pil
if "vips" in self.args.th_dec:
self.thumbable |= self.fmt_vips
if "ff" in self.args.th_dec:
for zss in [self.fmt_ffi, self.fmt_ffv, self.fmt_ffa]:
self.thumbable |= zss
def log(self, msg: str, c: Union[int, str] = 0) -> None:
self.log_func("thumb", msg, c) self.log_func("thumb", msg, c)
def shutdown(self): def shutdown(self) -> None:
self.stopping = True self.stopping = True
for _ in range(self.nthr): for _ in range(self.nthr):
self.q.put(None) self.q.put(None)
def stopped(self): def stopped(self) -> bool:
with self.mutex: with self.mutex:
return not self.nthr return not self.nthr
def get(self, ptop, rem, mtime, fmt): def get(self, ptop: str, rem: str, mtime: float, fmt: str) -> Optional[str]:
histpath = self.asrv.vfs.histtab.get(ptop) histpath = self.asrv.vfs.histtab.get(ptop)
if not histpath: if not histpath:
self.log("no histpath for [{}]".format(ptop)) self.log("no histpath for [{}]".format(ptop))
@@ -180,7 +200,7 @@ class ThumbSrv(object):
do_conv = True do_conv = True
if do_conv: if do_conv:
self.q.put([abspath, tpath]) self.q.put((abspath, tpath))
self.log("conv {} \033[0m{}".format(tpath, abspath), c=6) self.log("conv {} \033[0m{}".format(tpath, abspath), c=6)
while not self.stopping: while not self.stopping:
@@ -201,7 +221,17 @@ class ThumbSrv(object):
return None return None
def worker(self): def getcfg(self) -> dict[str, set[str]]:
return {
"thumbable": self.thumbable,
"pil": self.fmt_pil,
"vips": self.fmt_vips,
"ffi": self.fmt_ffi,
"ffv": self.fmt_ffv,
"ffa": self.fmt_ffa,
}
def worker(self) -> None:
while not self.stopping: while not self.stopping:
task = self.q.get() task = self.q.get()
if not task: if not task:
@@ -211,22 +241,29 @@ class ThumbSrv(object):
ext = abspath.split(".")[-1].lower() ext = abspath.split(".")[-1].lower()
fun = None fun = None
if not bos.path.exists(tpath): if not bos.path.exists(tpath):
if ext in FMT_PIL: for lib in self.args.th_dec:
fun = self.conv_pil if fun:
elif ext in FMT_FFV: break
fun = self.conv_ffmpeg elif lib == "pil" and ext in self.fmt_pil:
elif ext in FMT_FFA: fun = self.conv_pil
if tpath.endswith(".opus"): elif lib == "vips" and ext in self.fmt_vips:
fun = self.conv_opus fun = self.conv_vips
else: elif lib == "ff" and ext in self.fmt_ffi or ext in self.fmt_ffv:
fun = self.conv_spec fun = self.conv_ffmpeg
elif lib == "ff" and ext in self.fmt_ffa:
if tpath.endswith(".opus") or tpath.endswith(".caf"):
fun = self.conv_opus
else:
fun = self.conv_spec
if fun: if fun:
try: try:
fun(abspath, tpath) fun(abspath, tpath)
except: except:
msg = "{} could not create thumbnail of {}\n{}" msg = "{} could not create thumbnail of {}\n{}"
self.log(msg.format(fun.__name__, abspath, min_ex()), "1;30") msg = msg.format(fun.__name__, abspath, min_ex())
c: Union[str, int] = 1 if "<Signals.SIG" in msg else "1;30"
self.log(msg, c)
with open(tpath, "wb") as _: with open(tpath, "wb") as _:
pass pass
@@ -241,7 +278,7 @@ class ThumbSrv(object):
with self.mutex: with self.mutex:
self.nthr -= 1 self.nthr -= 1
def fancy_pillow(self, im): def fancy_pillow(self, im: "Image.Image") -> "Image.Image":
# exif_transpose is expensive (loads full image + unconditional copy) # exif_transpose is expensive (loads full image + unconditional copy)
r = max(*self.res) * 2 r = max(*self.res) * 2
im.thumbnail((r, r), resample=Image.LANCZOS) im.thumbnail((r, r), resample=Image.LANCZOS)
@@ -267,7 +304,7 @@ class ThumbSrv(object):
return im return im
def conv_pil(self, abspath, tpath): def conv_pil(self, abspath: str, tpath: str) -> None:
with Image.open(fsenc(abspath)) as im: with Image.open(fsenc(abspath)) as im:
try: try:
im = self.fancy_pillow(im) im = self.fancy_pillow(im)
@@ -296,16 +333,35 @@ class ThumbSrv(object):
im.save(tpath, **args) im.save(tpath, **args)
def conv_ffmpeg(self, abspath, tpath): def conv_vips(self, abspath: str, tpath: str) -> None:
ret, _ = ffprobe(abspath) crops = ["centre", "none"]
if self.args.th_no_crop:
crops = ["none"]
ext = abspath.rsplit(".")[-1] w, h = self.res
if ext in ["h264", "h265"]: kw = {"height": h, "size": "down", "intent": "relative"}
seek = []
for c in crops:
try:
kw["crop"] = c
img = pyvips.Image.thumbnail(abspath, w, **kw)
break
except:
pass
img.write_to_file(tpath, Q=40)
def conv_ffmpeg(self, abspath: str, tpath: str) -> None:
ret, _ = ffprobe(abspath)
if not ret:
return
ext = abspath.rsplit(".")[-1].lower()
if ext in ["h264", "h265"] or ext in self.fmt_ffi:
seek: list[bytes] = []
else: else:
dur = ret[".dur"][1] if ".dur" in ret else 4 dur = ret[".dur"][1] if ".dur" in ret else 4
seek = "{:.0f}".format(dur / 3) seek = [b"-ss", "{:.0f}".format(dur / 3).encode("utf-8")]
seek = [b"-ss", seek.encode("utf-8")]
scale = "scale={0}:{1}:force_original_aspect_ratio=" scale = "scale={0}:{1}:force_original_aspect_ratio="
if self.args.th_no_crop: if self.args.th_no_crop:
@@ -313,7 +369,7 @@ class ThumbSrv(object):
else: else:
scale += "increase,crop={0}:{1},setsar=1:1" scale += "increase,crop={0}:{1},setsar=1:1"
scale = scale.format(*list(self.res)).encode("utf-8") bscale = scale.format(*list(self.res)).encode("utf-8")
# fmt: off # fmt: off
cmd = [ cmd = [
b"ffmpeg", b"ffmpeg",
@@ -325,7 +381,7 @@ class ThumbSrv(object):
cmd += [ cmd += [
b"-i", fsenc(abspath), b"-i", fsenc(abspath),
b"-map", b"0:v:0", b"-map", b"0:v:0",
b"-vf", scale, b"-vf", bscale,
b"-frames:v", b"1", b"-frames:v", b"1",
b"-metadata:s:v:0", b"rotate=0", b"-metadata:s:v:0", b"rotate=0",
] ]
@@ -347,22 +403,55 @@ class ThumbSrv(object):
cmd += [fsenc(tpath)] cmd += [fsenc(tpath)]
self._run_ff(cmd) self._run_ff(cmd)
def _run_ff(self, cmd): def _run_ff(self, cmd: list[bytes]) -> None:
# self.log((b" ".join(cmd)).decode("utf-8")) # self.log((b" ".join(cmd)).decode("utf-8"))
ret, sout, serr = runcmd(cmd) ret, _, serr = runcmd(cmd, timeout=self.args.th_convt)
if ret != 0: if not ret:
m = "FFmpeg failed (probably a corrupt video file):\n" return
m += "\n".join(["ff: {}".format(x) for x in serr.split("\n")])
self.log(m, c="1;30")
raise sp.CalledProcessError(ret, (cmd[0], b"...", cmd[-1]))
def conv_spec(self, abspath, tpath): c: Union[str, int] = "1;30"
t = "FFmpeg failed (probably a corrupt video file):\n"
if cmd[-1].lower().endswith(b".webp") and (
"Error selecting an encoder" in serr
or "Automatic encoder selection failed" in serr
or "Default encoder for format webp" in serr
or "Please choose an encoder manually" in serr
):
self.args.th_ff_jpg = True
t = "FFmpeg failed because it was compiled without libwebp; enabling --th-ff-jpg to force jpeg output:\n"
c = 1
if (
"Requested resampling engine is unavailable" in serr
or "output pad on Parsed_aresample_" in serr
):
t = "FFmpeg failed because it was compiled without libsox; you must set --th-ff-swr to force swr resampling:\n"
c = 1
lines = serr.strip("\n").split("\n")
if len(lines) > 50:
lines = lines[:25] + ["[...]"] + lines[-25:]
txt = "\n".join(["ff: " + str(x) for x in lines])
if len(txt) > 5000:
txt = txt[:2500] + "...\nff: [...]\nff: ..." + txt[-2500:]
self.log(t + txt, c=c)
raise sp.CalledProcessError(ret, (cmd[0], b"...", cmd[-1]))
def conv_spec(self, abspath: str, tpath: str) -> None:
ret, _ = ffprobe(abspath) ret, _ = ffprobe(abspath)
if "ac" not in ret: if "ac" not in ret:
raise Exception("not audio") raise Exception("not audio")
fc = "[0:a:0]aresample=48000{},showspectrumpic=s=640x512,crop=780:544:70:50[o]" fc = "[0:a:0]aresample=48000{},showspectrumpic=s=640x512,crop=780:544:70:50[o]"
fc = fc.format("" if self.args.th_ff_swr else ":resampler=soxr")
if self.args.th_ff_swr:
fco = ":filter_size=128:cutoff=0.877"
else:
fco = ":resampler=soxr"
fc = fc.format(fco)
# fmt: off # fmt: off
cmd = [ cmd = [
@@ -392,7 +481,7 @@ class ThumbSrv(object):
cmd += [fsenc(tpath)] cmd += [fsenc(tpath)]
self._run_ff(cmd) self._run_ff(cmd)
def conv_opus(self, abspath, tpath): def conv_opus(self, abspath: str, tpath: str) -> None:
if self.args.no_acode: if self.args.no_acode:
raise Exception("disabled in server config") raise Exception("disabled in server config")
@@ -400,23 +489,47 @@ class ThumbSrv(object):
if "ac" not in ret: if "ac" not in ret:
raise Exception("not audio") raise Exception("not audio")
# fmt: off src_opus = abspath.lower().endswith(".opus") or ret["ac"][1] == "opus"
cmd = [ want_caf = tpath.endswith(".caf")
b"ffmpeg", tmp_opus = tpath
b"-nostdin", if want_caf:
b"-v", b"error", tmp_opus = tpath.rsplit(".", 1)[0] + ".opus"
b"-hide_banner",
b"-i", fsenc(abspath),
b"-map", b"0:a:0",
b"-c:a", b"libopus",
b"-b:a", b"128k",
fsenc(tpath)
]
# fmt: on
self._run_ff(cmd) if not want_caf or (not src_opus and not bos.path.isfile(tmp_opus)):
# fmt: off
cmd = [
b"ffmpeg",
b"-nostdin",
b"-v", b"error",
b"-hide_banner",
b"-i", fsenc(abspath),
b"-map_metadata", b"-1",
b"-map", b"0:a:0",
b"-c:a", b"libopus",
b"-b:a", b"128k",
fsenc(tmp_opus)
]
# fmt: on
self._run_ff(cmd)
def poke(self, tdir): if want_caf:
# fmt: off
cmd = [
b"ffmpeg",
b"-nostdin",
b"-v", b"error",
b"-hide_banner",
b"-i", fsenc(abspath if src_opus else tmp_opus),
b"-map_metadata", b"-1",
b"-map", b"0:a:0",
b"-c:a", b"copy",
b"-f", b"caf",
fsenc(tpath)
]
# fmt: on
self._run_ff(cmd)
def poke(self, tdir: str) -> None:
if not self.poke_cd.poke(tdir): if not self.poke_cd.poke(tdir):
return return
@@ -428,7 +541,7 @@ class ThumbSrv(object):
except: except:
pass pass
def cleaner(self): def cleaner(self) -> None:
interval = self.args.th_clean interval = self.args.th_clean
while True: while True:
time.sleep(interval) time.sleep(interval)
@@ -443,26 +556,26 @@ class ThumbSrv(object):
self.log("\033[Jcln ok; rm {} dirs".format(ndirs)) self.log("\033[Jcln ok; rm {} dirs".format(ndirs))
def clean(self, histpath): def clean(self, histpath: str) -> int:
ret = 0 ret = 0
for cat in ["th", "ac"]: for cat in ["th", "ac"]:
ret += self._clean(histpath, cat, None) ret += self._clean(histpath, cat, "")
return ret return ret
def _clean(self, histpath, cat, thumbpath): def _clean(self, histpath: str, cat: str, thumbpath: str) -> int:
if not thumbpath: if not thumbpath:
thumbpath = os.path.join(histpath, cat) thumbpath = os.path.join(histpath, cat)
# self.log("cln {}".format(thumbpath)) # self.log("cln {}".format(thumbpath))
exts = ["jpg", "webp"] if cat == "th" else ["opus"] exts = ["jpg", "webp"] if cat == "th" else ["opus", "caf"]
maxage = getattr(self.args, cat + "_maxage") maxage = getattr(self.args, cat + "_maxage")
now = time.time() now = time.time()
prev_b64 = None prev_b64 = None
prev_fp = None prev_fp = ""
try: try:
ents = statdir(self.log, not self.args.no_scandir, False, thumbpath) t1 = statdir(self.log_func, not self.args.no_scandir, False, thumbpath)
ents = sorted(list(ents)) ents = sorted(list(t1))
except: except:
return 0 return 0
@@ -477,7 +590,7 @@ class ThumbSrv(object):
if age > maxage: if age > maxage:
with self.mutex: with self.mutex:
safe = True safe = True
for k in self.busy.keys(): for k in self.busy:
if k.lower().replace("\\", "/").startswith(cmp): if k.lower().replace("\\", "/").startswith(cmp):
safe = False safe = False
break break

View File

@@ -1,28 +1,37 @@
# coding: utf-8 # coding: utf-8
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
import re import calendar
import os import os
import time import re
import threading import threading
from datetime import datetime import time
from operator import itemgetter from operator import itemgetter
from .__init__ import ANYWIN, unicode from .__init__ import ANYWIN, TYPE_CHECKING, unicode
from .util import absreal, s3dec, Pebkac, min_ex, gen_filekey, quotep
from .bos import bos from .bos import bos
from .up2k import up2k_wark_from_hashlist from .up2k import up2k_wark_from_hashlist
from .util import HAVE_SQLITE3, Pebkac, absreal, gen_filekey, min_ex, quotep, s3dec
if HAVE_SQLITE3:
import sqlite3
try: try:
HAVE_SQLITE3 = True from pathlib import Path
import sqlite3
except: except:
HAVE_SQLITE3 = False pass
try:
from typing import Any, Optional, Union
except:
pass
if TYPE_CHECKING:
from .httpconn import HttpConn
class U2idx(object): class U2idx(object):
def __init__(self, conn): def __init__(self, conn: "HttpConn") -> None:
self.log_func = conn.log_func self.log_func = conn.log_func
self.asrv = conn.asrv self.asrv = conn.asrv
self.args = conn.args self.args = conn.args
@@ -32,17 +41,21 @@ class U2idx(object):
self.log("your python does not have sqlite3; searching will be disabled") self.log("your python does not have sqlite3; searching will be disabled")
return return
self.cur = {} self.active_id = ""
self.mem_cur = sqlite3.connect(":memory:") self.active_cur: Optional["sqlite3.Cursor"] = None
self.cur: dict[str, "sqlite3.Cursor"] = {}
self.mem_cur = sqlite3.connect(":memory:").cursor()
self.mem_cur.execute(r"create table a (b text)") self.mem_cur.execute(r"create table a (b text)")
self.p_end = None self.p_end = 0.0
self.p_dur = 0 self.p_dur = 0.0
def log(self, msg, c=0): def log(self, msg: str, c: Union[int, str] = 0) -> None:
self.log_func("u2idx", msg, c) self.log_func("u2idx", msg, c)
def fsearch(self, vols, body): def fsearch(
self, vols: list[tuple[str, str, dict[str, Any]]], body: dict[str, Any]
) -> list[dict[str, Any]]:
"""search by up2k hashlist""" """search by up2k hashlist"""
if not HAVE_SQLITE3: if not HAVE_SQLITE3:
return [] return []
@@ -51,15 +64,15 @@ class U2idx(object):
fhash = body["hash"] fhash = body["hash"]
wark = up2k_wark_from_hashlist(self.args.salt, fsize, fhash) wark = up2k_wark_from_hashlist(self.args.salt, fsize, fhash)
uq = "where substr(w,1,16) = ? and w = ?" uq = "substr(w,1,16) = ? and w = ?"
uv = [wark[:16], wark] uv: list[Union[str, int]] = [wark[:16], wark]
try: try:
return self.run_query(vols, uq, uv)[0] return self.run_query(vols, uq, uv, True, False, 99999)[0]
except: except:
raise Pebkac(500, min_ex()) raise Pebkac(500, min_ex())
def get_cur(self, ptop): def get_cur(self, ptop: str) -> Optional["sqlite3.Cursor"]:
if not HAVE_SQLITE3: if not HAVE_SQLITE3:
return None return None
@@ -76,28 +89,45 @@ class U2idx(object):
if not bos.path.exists(db_path): if not bos.path.exists(db_path):
return None return None
cur = sqlite3.connect(db_path, 2).cursor() cur = None
if ANYWIN:
uri = ""
try:
uri = "{}?mode=ro&nolock=1".format(Path(db_path).as_uri())
cur = sqlite3.connect(uri, 2, uri=True).cursor()
self.log("ro: {}".format(db_path))
except:
self.log("could not open read-only: {}\n{}".format(uri, min_ex()))
if not cur:
# on windows, this steals the write-lock from up2k.deferred_init --
# seen on win 10.0.17763.2686, py 3.10.4, sqlite 3.37.2
cur = sqlite3.connect(db_path, 2).cursor()
self.log("opened {}".format(db_path))
self.cur[ptop] = cur self.cur[ptop] = cur
return cur return cur
def search(self, vols, uq): def search(
self, vols: list[tuple[str, str, dict[str, Any]]], uq: str, lim: int
) -> tuple[list[dict[str, Any]], list[str]]:
"""search by query params""" """search by query params"""
if not HAVE_SQLITE3: if not HAVE_SQLITE3:
return [] return [], []
q = "" q = ""
va = [] v: Union[str, int] = ""
joins = "" va: list[Union[str, int]] = []
have_up = False # query has up.* operands
have_mt = False
is_key = True is_key = True
is_size = False is_size = False
is_date = False is_date = False
field_end = "" # closing parenthesis or whatever
kw_key = ["(", ")", "and ", "or ", "not "] kw_key = ["(", ")", "and ", "or ", "not "]
kw_val = ["==", "=", "!=", ">", ">=", "<", "<=", "like "] kw_val = ["==", "=", "!=", ">", ">=", "<", "<=", "like "]
ptn_mt = re.compile(r"^\.?[a-z_-]+$") ptn_mt = re.compile(r"^\.?[a-z_-]+$")
mt_ctr = 0 ptn_lc = re.compile(r" (mt\.v) ([=<!>]+) \? \) $")
mt_keycmp = "substr(up.w,1,16)"
mt_keycmp2 = None
ptn_lc = re.compile(r" (mt[0-9]+\.v) ([=<!>]+) \? $")
ptn_lcv = re.compile(r"[a-zA-Z]") ptn_lcv = re.compile(r"[a-zA-Z]")
while True: while True:
@@ -117,35 +147,47 @@ class U2idx(object):
if ok: if ok:
continue continue
v, uq = (uq + " ").split(" ", 1) if uq.startswith('"'):
v, uq = uq[1:].split('"', 1)
while v.endswith("\\"):
v2, uq = uq.split('"', 1)
v = v[:-1] + '"' + v2
uq = uq.strip()
else:
v, uq = (uq + " ").split(" ", 1)
v = v.replace('\\"', '"')
if is_key: if is_key:
is_key = False is_key = False
if v == "size": if v == "size":
v = "up.sz" v = "up.sz"
is_size = True is_size = True
have_up = True
elif v == "date": elif v == "date":
v = "up.mt" v = "up.mt"
is_date = True is_date = True
have_up = True
elif v == "path": elif v == "path":
v = "up.rd" v = "trim(?||up.rd,'/')"
va.append("\nrd")
have_up = True
elif v == "name": elif v == "name":
v = "up.fn" v = "up.fn"
have_up = True
elif v == "tags" or ptn_mt.match(v): elif v == "tags" or ptn_mt.match(v):
mt_ctr += 1 have_mt = True
mt_keycmp2 = "mt{}.w".format(mt_ctr) field_end = ") "
joins += "inner join mt mt{} on {} = {} ".format(
mt_ctr, mt_keycmp, mt_keycmp2
)
mt_keycmp = mt_keycmp2
if v == "tags": if v == "tags":
v = "mt{0}.v".format(mt_ctr) vq = "mt.v"
else: else:
v = "+mt{0}.k = '{1}' and mt{0}.v".format(mt_ctr, v) vq = "+mt.k = '{}' and mt.v".format(v)
v = "exists(select 1 from mt where mt.w = mtw and " + vq
else: else:
raise Pebkac(400, "invalid key [" + v + "]") raise Pebkac(400, "invalid key [" + v + "]")
@@ -158,18 +200,17 @@ class U2idx(object):
if is_date: if is_date:
is_date = False is_date = False
v = v.upper().rstrip("Z").replace(",", " ").replace("T", " ") v = re.sub(r"[tzTZ, ]+", " ", v).strip()
while " " in v:
v = v.replace(" ", " ")
for fmt in [ for fmt in [
"%Y-%m-%d %H:%M:%S", "%Y-%m-%d %H:%M:%S",
"%Y-%m-%d %H:%M", "%Y-%m-%d %H:%M",
"%Y-%m-%d %H", "%Y-%m-%d %H",
"%Y-%m-%d", "%Y-%m-%d",
"%Y-%m",
"%Y",
]: ]:
try: try:
v = datetime.strptime(v, fmt).timestamp() v = calendar.timegm(time.strptime(str(v), fmt))
break break
except: except:
pass pass
@@ -191,28 +232,41 @@ class U2idx(object):
va.append(v) va.append(v)
is_key = True is_key = True
if field_end:
q += field_end
field_end = ""
# lowercase tag searches # lowercase tag searches
m = ptn_lc.search(q) m = ptn_lc.search(q)
if not m or not ptn_lcv.search(unicode(v)): zs = unicode(v)
if not m or not ptn_lcv.search(zs):
continue continue
va.pop() va.pop()
va.append(v.lower()) va.append(zs.lower())
q = q[: m.start()] q = q[: m.start()]
field, oper = m.groups() field, oper = m.groups()
if oper in ["=", "=="]: if oper in ["=", "=="]:
q += " {} like ? ".format(field) q += " {} like ? ) ".format(field)
else: else:
q += " lower({}) {} ? ".format(field, oper) q += " lower({}) {} ? ) ".format(field, oper)
try: try:
return self.run_query(vols, joins + "where " + q, va) return self.run_query(vols, q, va, have_up, have_mt, lim)
except Exception as ex: except Exception as ex:
raise Pebkac(500, repr(ex)) raise Pebkac(500, repr(ex))
def run_query(self, vols, uq, uv): def run_query(
done_flag = [] self,
vols: list[tuple[str, str, dict[str, Any]]],
uq: str,
uv: list[Union[str, int]],
have_up: bool,
have_mt: bool,
lim: int,
) -> tuple[list[dict[str, Any]], list[str]]:
done_flag: list[bool] = []
self.active_id = "{:.6f}_{}".format( self.active_id = "{:.6f}_{}".format(
time.time(), threading.current_thread().ident time.time(), threading.current_thread().ident
) )
@@ -228,16 +282,17 @@ class U2idx(object):
thr.start() thr.start()
if not uq or not uv: if not uq or not uv:
q = "select * from up" uq = "select * from up"
v = () uv = []
elif have_mt:
uq = "select up.*, substr(up.w,1,16) mtw from up where " + uq
else: else:
q = "select up.* from up " + uq uq = "select up.* from up where " + uq
v = tuple(uv)
self.log("qs: {!r} {!r}".format(q, v)) self.log("qs: {!r} {!r}".format(uq, uv))
ret = [] ret = []
lim = 1000 lim = min(lim, int(self.args.srch_hits))
taglist = {} taglist = {}
for (vtop, ptop, flags) in vols: for (vtop, ptop, flags) in vols:
cur = self.get_cur(ptop) cur = self.get_cur(ptop)
@@ -246,13 +301,20 @@ class U2idx(object):
self.active_cur = cur self.active_cur = cur
vuv = []
for v in uv:
if v == "\nrd":
v = vtop + "/"
vuv.append(v)
sret = [] sret = []
fk = flags.get("fk") fk = flags.get("fk")
c = cur.execute(q, v) c = cur.execute(uq, tuple(vuv))
for hit in c: for hit in c:
w, ts, sz, rd, fn, ip, at = hit w, ts, sz, rd, fn, ip, at = hit[:7]
lim -= 1 lim -= 1
if lim <= 0: if lim < 0:
break break
if rd.startswith("//") or fn.startswith("//"): if rd.startswith("//") or fn.startswith("//"):
@@ -281,7 +343,7 @@ class U2idx(object):
w = hit["w"] w = hit["w"]
del hit["w"] del hit["w"]
tags = {} tags = {}
q2 = "select k, v from mt where w = ? and k != 'x'" q2 = "select k, v from mt where w = ? and +k != 'x'"
for k, v2 in cur.execute(q2, (w,)): for k, v2 in cur.execute(q2, (w,)):
taglist[k] = True taglist[k] = True
tags[k] = v2 tags[k] = v2
@@ -292,7 +354,7 @@ class U2idx(object):
# print("[{}] {}".format(ptop, sret)) # print("[{}] {}".format(ptop, sret))
done_flag.append(True) done_flag.append(True)
self.active_id = None self.active_id = ""
# undupe hits from multiple metadata keys # undupe hits from multiple metadata keys
if len(ret) > 1: if len(ret) > 1:
@@ -306,11 +368,12 @@ class U2idx(object):
return ret, list(taglist.keys()) return ret, list(taglist.keys())
def terminator(self, identifier, done_flag): def terminator(self, identifier: str, done_flag: list[bool]) -> None:
for _ in range(self.timeout): for _ in range(self.timeout):
time.sleep(1) time.sleep(1)
if done_flag: if done_flag:
return return
if identifier == self.active_id: if identifier == self.active_id:
assert self.active_cur
self.active_cur.connection.interrupt() self.active_cur.connection.interrupt()

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -17,12 +17,11 @@ window.baguetteBox = (function () {
titleTag: false, titleTag: false,
async: false, async: false,
preload: 2, preload: 2,
animation: 'slideIn',
afterShow: null, afterShow: null,
afterHide: null, afterHide: null,
onChange: null, onChange: null,
}, },
overlay, slider, btnPrev, btnNext, btnHelp, btnRotL, btnRotR, btnSel, btnVmode, btnClose, overlay, slider, btnPrev, btnNext, btnHelp, btnAnim, btnRotL, btnRotR, btnSel, btnVmode, btnClose,
currentGallery = [], currentGallery = [],
currentIndex = 0, currentIndex = 0,
isOverlayVisible = false, isOverlayVisible = false,
@@ -30,13 +29,14 @@ window.baguetteBox = (function () {
touchFlag = false, // busy touchFlag = false, // busy
re_i = /.+\.(gif|jpe?g|png|webp)(\?|$)/i, re_i = /.+\.(gif|jpe?g|png|webp)(\?|$)/i,
re_v = /.+\.(webm|mp4)(\?|$)/i, re_v = /.+\.(webm|mp4)(\?|$)/i,
anims = ['slideIn', 'fadeIn', 'none'],
data = {}, // all galleries data = {}, // all galleries
imagesElements = [], imagesElements = [],
documentLastFocus = null, documentLastFocus = null,
isFullscreen = false, isFullscreen = false,
vmute = false, vmute = false,
vloop = false, vloop = sread('vmode') == 'L',
vnext = false, vnext = sread('vmode') == 'C',
resume_mp = false; resume_mp = false;
var onFSC = function (e) { var onFSC = function (e) {
@@ -178,6 +178,7 @@ window.baguetteBox = (function () {
'<button id="bbox-next" class="bbox-btn" type="button" aria-label="Next">&gt;</button>' + '<button id="bbox-next" class="bbox-btn" type="button" aria-label="Next">&gt;</button>' +
'<div id="bbox-btns">' + '<div id="bbox-btns">' +
'<button id="bbox-help" type="button">?</button>' + '<button id="bbox-help" type="button">?</button>' +
'<button id="bbox-anim" type="button" tt="a">-</button>' +
'<button id="bbox-rotl" type="button">↶</button>' + '<button id="bbox-rotl" type="button">↶</button>' +
'<button id="bbox-rotr" type="button">↷</button>' + '<button id="bbox-rotr" type="button">↷</button>' +
'<button id="bbox-tsel" type="button">sel</button>' + '<button id="bbox-tsel" type="button">sel</button>' +
@@ -193,6 +194,7 @@ window.baguetteBox = (function () {
btnPrev = ebi('bbox-prev'); btnPrev = ebi('bbox-prev');
btnNext = ebi('bbox-next'); btnNext = ebi('bbox-next');
btnHelp = ebi('bbox-help'); btnHelp = ebi('bbox-help');
btnAnim = ebi('bbox-anim');
btnRotL = ebi('bbox-rotl'); btnRotL = ebi('bbox-rotl');
btnRotR = ebi('bbox-rotr'); btnRotR = ebi('bbox-rotr');
btnSel = ebi('bbox-tsel'); btnSel = ebi('bbox-tsel');
@@ -237,7 +239,7 @@ window.baguetteBox = (function () {
} }
function keyDownHandler(e) { function keyDownHandler(e) {
if (e.ctrlKey || e.altKey || e.metaKey || e.isComposing) if (e.ctrlKey || e.altKey || e.metaKey || e.isComposing || modal.busy)
return; return;
var k = e.code + '', v = vid(); var k = e.code + '', v = vid();
@@ -282,6 +284,18 @@ window.baguetteBox = (function () {
tglsel(); tglsel();
else if (k == "KeyR") else if (k == "KeyR")
rotn(e.shiftKey ? -1 : 1); rotn(e.shiftKey ? -1 : 1);
else if (k == "KeyY")
dlpic();
}
function anim() {
var i = (anims.indexOf(options.animation) + 1) % anims.length,
o = options;
swrite('ganim', anims[i]);
options = {};
setOptions(o);
if (tt.en)
tt.show.bind(this)();
} }
function setVmode() { function setVmode() {
@@ -308,6 +322,7 @@ window.baguetteBox = (function () {
btnVmode.setAttribute('aria-label', msg); btnVmode.setAttribute('aria-label', msg);
btnVmode.setAttribute('tt', msg + tts); btnVmode.setAttribute('tt', msg + tts);
btnVmode.textContent = lbl; btnVmode.textContent = lbl;
swrite('vmode', lbl[0]);
v.loop = vloop v.loop = vloop
if (vloop && v.paused) if (vloop && v.paused)
@@ -329,19 +344,29 @@ window.baguetteBox = (function () {
tt.show.bind(this)(); tt.show.bind(this)();
} }
function tglsel() { function findfile() {
var thumb = currentGallery[currentIndex].imageElement, var thumb = currentGallery[currentIndex].imageElement,
name = vsplit(thumb.href)[1].split('?')[0], name = vsplit(thumb.href)[1].split('?')[0],
files = msel.getall(); files = msel.getall();
for (var a = 0; a < files.length; a++) for (var a = 0; a < files.length; a++)
if (vsplit(files[a].vp)[1] == name) if (vsplit(files[a].vp)[1] == name)
clmod(ebi(files[a].id).closest('tr'), 'sel', 't'); return [name, a, files, ebi(files[a].id)];
}
function tglsel() {
var o = findfile()[3];
clmod(o.closest('tr'), 'sel', 't');
msel.selui(); msel.selui();
selbg(); selbg();
} }
function dlpic() {
var url = findfile()[3].href;
url += (url.indexOf('?') < 0 ? '?' : '&') + 'cache';
dl_file(url);
}
function selbg() { function selbg() {
var img = vidimg(), var img = vidimg(),
thumb = currentGallery[currentIndex].imageElement, thumb = currentGallery[currentIndex].imageElement,
@@ -397,6 +422,7 @@ window.baguetteBox = (function () {
bind(btnClose, 'click', hideOverlay); bind(btnClose, 'click', hideOverlay);
bind(btnVmode, 'click', tglVmode); bind(btnVmode, 'click', tglVmode);
bind(btnHelp, 'click', halp); bind(btnHelp, 'click', halp);
bind(btnAnim, 'click', anim);
bind(btnRotL, 'click', rotl); bind(btnRotL, 'click', rotl);
bind(btnRotR, 'click', rotr); bind(btnRotR, 'click', rotr);
bind(btnSel, 'click', tglsel); bind(btnSel, 'click', tglsel);
@@ -414,6 +440,7 @@ window.baguetteBox = (function () {
unbind(btnClose, 'click', hideOverlay); unbind(btnClose, 'click', hideOverlay);
unbind(btnVmode, 'click', tglVmode); unbind(btnVmode, 'click', tglVmode);
unbind(btnHelp, 'click', halp); unbind(btnHelp, 'click', halp);
unbind(btnAnim, 'click', anim);
unbind(btnRotL, 'click', rotl); unbind(btnRotL, 'click', rotl);
unbind(btnRotR, 'click', rotr); unbind(btnRotR, 'click', rotr);
unbind(btnSel, 'click', tglsel); unbind(btnSel, 'click', tglsel);
@@ -459,7 +486,12 @@ window.baguetteBox = (function () {
if (typeof newOptions[item] !== 'undefined') if (typeof newOptions[item] !== 'undefined')
options[item] = newOptions[item]; options[item] = newOptions[item];
} }
slider.style.transition = (options.animation === 'fadeIn' ? 'opacity .4s ease' :
var an = options.animation = sread('ganim') || anims[ANIM ? 0 : 2];
btnAnim.textContent = ['⇄', '⮺', '⚡'][anims.indexOf(an)];
btnAnim.setAttribute('tt', 'animation: ' + an);
slider.style.transition = (options.animation === 'fadeIn' ? 'opacity .3s ease' :
options.animation === 'slideIn' ? '' : 'none'); options.animation === 'slideIn' ? '' : 'none');
if (options.buttons === 'auto' && ('ontouchstart' in window || currentGallery.length === 1)) if (options.buttons === 'auto' && ('ontouchstart' in window || currentGallery.length === 1))
@@ -520,6 +552,7 @@ window.baguetteBox = (function () {
if (overlay.style.display === 'none') if (overlay.style.display === 'none')
return; return;
sethash('');
unbind(document, 'keydown', keyDownHandler); unbind(document, 'keydown', keyDownHandler);
unbind(document, 'keyup', keyUpHandler); unbind(document, 'keyup', keyUpHandler);
unbind(document, 'fullscreenchange', onFSC); unbind(document, 'fullscreenchange', onFSC);
@@ -806,7 +839,7 @@ window.baguetteBox = (function () {
slider.style.transform = 'translate3d(' + offset + ',0,0)' : slider.style.transform = 'translate3d(' + offset + ',0,0)' :
slider.style.left = offset; slider.style.left = offset;
slider.style.opacity = 1; slider.style.opacity = 1;
}, 400); }, 100);
} else { } else {
xform ? xform ?
slider.style.transform = 'translate3d(' + offset + ',0,0)' : slider.style.transform = 'translate3d(' + offset + ',0,0)' :

File diff suppressed because it is too large Load Diff

View File

@@ -6,6 +6,7 @@
<title>⇆🎉 {{ title }}</title> <title>⇆🎉 {{ title }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.8"> <meta name="viewport" content="width=device-width, initial-scale=0.8">
{{ html_head }}
<link rel="stylesheet" media="screen" href="/.cpr/ui.css?_={{ ts }}"> <link rel="stylesheet" media="screen" href="/.cpr/ui.css?_={{ ts }}">
<link rel="stylesheet" media="screen" href="/.cpr/browser.css?_={{ ts }}"> <link rel="stylesheet" media="screen" href="/.cpr/browser.css?_={{ ts }}">
{%- if css %} {%- if css %}
@@ -34,6 +35,7 @@
<input type="file" name="f" multiple /><br /> <input type="file" name="f" multiple /><br />
<input type="submit" value="start upload"> <input type="submit" value="start upload">
</form> </form>
<a id="bbsw" href="?b=u"><br />switch to basic browser</a>
</div> </div>
<div id="op_mkdir" class="opview opbox act"> <div id="op_mkdir" class="opview opbox act">
@@ -66,7 +68,7 @@
<div id="op_cfg" class="opview opbox opwide"></div> <div id="op_cfg" class="opview opbox opwide"></div>
<h1 id="path"> <h1 id="path">
<a href="#" id="entree" tt="show navpane (directory tree sidebar)$NHotkey: B">🌲</a> <a href="#" id="entree">🌲</a>
{%- for n in vpnodes %} {%- for n in vpnodes %}
<a href="/{{ n[0] }}">{{ n[1] }}</a> <a href="/{{ n[0] }}">{{ n[1] }}</a>
{%- endfor %} {%- endfor %}
@@ -118,7 +120,7 @@
<div id="epi" class="logue">{{ logues[1] }}</div> <div id="epi" class="logue">{{ logues[1] }}</div>
<h2><a href="/?h">control-panel</a></h2> <h2><a href="/?h" id="goh">control-panel</a></h2>
<a href="#" id="repl">π</a> <a href="#" id="repl">π</a>
@@ -133,6 +135,10 @@
<script> <script>
var acct = "{{ acct }}", var acct = "{{ acct }}",
perms = {{ perms }}, perms = {{ perms }},
themes = {{ themes }},
dtheme = "{{ dtheme }}",
srvinf = "{{ srv_info }}",
lang = "{{ lang }}",
def_hcols = {{ def_hcols|tojson }}, def_hcols = {{ def_hcols|tojson }},
have_up2k_idx = {{ have_up2k_idx|tojson }}, have_up2k_idx = {{ have_up2k_idx|tojson }},
have_tags_idx = {{ have_tags_idx|tojson }}, have_tags_idx = {{ have_tags_idx|tojson }},
@@ -141,13 +147,17 @@
have_del = {{ have_del|tojson }}, have_del = {{ have_del|tojson }},
have_unpost = {{ have_unpost|tojson }}, have_unpost = {{ have_unpost|tojson }},
have_zip = {{ have_zip|tojson }}, have_zip = {{ have_zip|tojson }},
turbolvl = {{ turbolvl|tojson }},
have_emp = {{ have_emp|tojson }},
txt_ext = "{{ txt_ext }}", txt_ext = "{{ txt_ext }}",
{% if no_prism %}no_prism = 1,{% endif %} {% if no_prism %}no_prism = 1,{% endif %}
readme = {{ readme|tojson }}; readme = {{ readme|tojson }},
ls0 = {{ ls0|tojson }};
document.documentElement.setAttribute("class", localStorage.lightmode == 1 ? "light" : "dark"); document.documentElement.className = localStorage.theme || dtheme;
</script> </script>
<script src="/.cpr/util.js?_={{ ts }}"></script> <script src="/.cpr/util.js?_={{ ts }}"></script>
<script src="/.cpr/baguettebox.js?_={{ ts }}"></script>
<script src="/.cpr/browser.js?_={{ ts }}"></script> <script src="/.cpr/browser.js?_={{ ts }}"></script>
<script src="/.cpr/up2k.js?_={{ ts }}"></script> <script src="/.cpr/up2k.js?_={{ ts }}"></script>
{%- if js %} {%- if js %}

File diff suppressed because it is too large Load Diff

View File

@@ -6,6 +6,7 @@
<title>{{ title }}</title> <title>{{ title }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.8"> <meta name="viewport" content="width=device-width, initial-scale=0.8">
{{ html_head }}
<style> <style>
html{font-family:sans-serif} html{font-family:sans-serif}
td{border:1px solid #999;border-width:1px 1px 0 0;padding:0 5px} td{border:1px solid #999;border-width:1px 1px 0 0;padding:0 5px}
@@ -44,7 +45,9 @@
<tr><td></td><td><a href="../{{ url_suf }}">parent folder</a></td><td>-</td><td>-</td></tr> <tr><td></td><td><a href="../{{ url_suf }}">parent folder</a></td><td>-</td><td>-</td></tr>
{%- for f in files %} {%- for f in files %}
<tr><td>{{ f.lead }}</td><td><a href="{{ f.href }}{{ url_suf }}">{{ f.name|e }}</a></td><td>{{ f.sz }}</td><td>{{ f.dt }}</td></tr> <tr><td>{{ f.lead }}</td><td><a href="{{ f.href }}{{
'&' + url_suf[1:] if url_suf[:1] == '?' and '?' in f.href else url_suf
}}">{{ f.name|e }}</a></td><td>{{ f.sz }}</td><td>{{ f.dt }}</td></tr>
{%- endfor %} {%- endfor %}
</tbody> </tbody>

View File

@@ -161,7 +161,7 @@ blink {
height: 1.05em; height: 1.05em;
margin: -.2em .3em -.2em -.4em; margin: -.2em .3em -.2em -.4em;
display: inline-block; display: inline-block;
border: 1px solid rgba(0,0,0,0.2); border: 1px solid rgba(154,154,154,0.6);
border-width: .2em .2em 0 0; border-width: .2em .2em 0 0;
transform: rotate(45deg); transform: rotate(45deg);
} }
@@ -219,48 +219,45 @@ blink {
html.dark, html.z,
html.dark body { html.z body {
background: #222; background: #222;
color: #ccc; color: #ccc;
} }
html.dark #toc a { html.z #toc a {
color: #ccc; color: #ccc;
border-left: .4em solid #444; border-left: .4em solid #444;
border-bottom: .1em solid #333; border-bottom: .1em solid #333;
} }
html.dark #toc a.act { html.z #toc a.act {
color: #fff; color: #fff;
border-left: .4em solid #3ad; border-left: .4em solid #3ad;
} }
html.dark #toc li { html.z #toc li {
border-width: 0; border-width: 0;
} }
html.dark #mn a:not(:last-child)::after { html.z #mn a {
border-color: rgba(255,255,255,0.3);
}
html.dark #mn a {
color: #ccc; color: #ccc;
} }
html.dark #mn { html.z #mn {
border-bottom: 1px solid #333; border-bottom: 1px solid #333;
} }
html.dark #mn, html.z #mn,
html.dark #mh { html.z #mh {
background: #222; background: #222;
} }
html.dark #mh a { html.z #mh a {
color: #ccc; color: #ccc;
background: none; background: none;
} }
html.dark #mh a:hover { html.z #mh a:hover {
background: #333; background: #333;
color: #fff; color: #fff;
} }
html.dark #toolsbox { html.z #toolsbox {
background: #222; background: #222;
} }
html.dark #toolsbox.open { html.z #toolsbox.open {
box-shadow: 0 .2em .2em #069; box-shadow: 0 .2em .2em #069;
border-radius: 0 0 .4em .4em; border-radius: 0 0 .4em .4em;
} }
@@ -307,24 +304,24 @@ blink {
} }
html.dark #toc { html.z #toc {
background: #282828; background: #282828;
border-top: 1px solid #2c2c2c; border-top: 1px solid #2c2c2c;
box-shadow: 0 0 1em #181818; box-shadow: 0 0 1em #181818;
} }
html.dark #toc, html.z #toc,
html.dark #mw { html.z #mw {
scrollbar-color: #b80 #282828; scrollbar-color: #b80 #282828;
} }
html.dark #toc::-webkit-scrollbar-track { html.z #toc::-webkit-scrollbar-track {
background: #282828; background: #282828;
} }
html.dark #toc::-webkit-scrollbar { html.z #toc::-webkit-scrollbar {
background: #282828; background: #282828;
width: .8em; width: .8em;
} }
html.dark #toc::-webkit-scrollbar-thumb { html.z #toc::-webkit-scrollbar-thumb {
background: #b80; background: #b80;
} }
} }
@@ -431,17 +428,17 @@ blink {
} }
html.dark .mdo a { html.z .mdo a {
color: #000; color: #000;
} }
html.dark .mdo pre, html.z .mdo pre,
html.dark .mdo code { html.z .mdo code {
color: #240; color: #240;
} }
html.dark .mdo p>em, html.z .mdo p>em,
html.dark .mdo li>em, html.z .mdo li>em,
html.dark .mdo td>em { html.z .mdo td>em {
color: #940; color: #940;
} }
} }

View File

@@ -3,6 +3,7 @@
<title>📝🎉 {{ title }}</title> <title>📝🎉 {{ title }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.7"> <meta name="viewport" content="width=device-width, initial-scale=0.7">
{{ html_head }}
<link rel="stylesheet" href="/.cpr/ui.css?_={{ ts }}"> <link rel="stylesheet" href="/.cpr/ui.css?_={{ ts }}">
<link rel="stylesheet" href="/.cpr/md.css?_={{ ts }}"> <link rel="stylesheet" href="/.cpr/md.css?_={{ ts }}">
{%- if edit %} {%- if edit %}
@@ -10,7 +11,7 @@
{%- endif %} {%- endif %}
</head> </head>
<body> <body>
<div id="mn">navbar</div> <div id="mn"></div>
<div id="mh"> <div id="mh">
<a id="lightswitch" href="#">go dark</a> <a id="lightswitch" href="#">go dark</a>
<a id="navtoggle" href="#">hide nav</a> <a id="navtoggle" href="#">hide nav</a>
@@ -126,30 +127,31 @@ write markdown (most html is 🙆 too)
<script> <script>
var last_modified = {{ lastmod }}; var last_modified = {{ lastmod }},
have_emp = {{ have_emp|tojson }};
var md_opt = { var md_opt = {
link_md_as_html: false, link_md_as_html: false,
allow_plugins: {{ md_plug }},
modpoll_freq: {{ md_chk_rate }} modpoll_freq: {{ md_chk_rate }}
}; };
(function () { (function () {
var l = localStorage, var l = localStorage,
drk = l.lightmode != 1, drk = l.light != 1,
btn = document.getElementById("lightswitch"), btn = document.getElementById("lightswitch"),
f = function (e) { f = function (e) {
if (e) { e.preventDefault(); drk = !drk; } if (e) { e.preventDefault(); drk = !drk; }
document.documentElement.setAttribute("class", drk? "dark":"light"); document.documentElement.className = drk? "z":"y";
btn.innerHTML = "go " + (drk ? "light":"dark"); btn.innerHTML = "go " + (drk ? "light":"dark");
l.lightmode = drk? 0:1; l.light = drk? 0:1;
}; };
btn.onclick = f; btn.onclick = f;
f(); f();
})(); })();
</script> </script>
<script src="/.cpr/util.js?_={{ ts }}"></script> <script src="/.cpr/util.js?_={{ ts }}"></script>
<script src="/.cpr/deps/marked.js?_={{ ts }}"></script> <script src="/.cpr/deps/marked.js?_={{ ts }}"></script>
<script src="/.cpr/md.js?_={{ ts }}"></script> <script src="/.cpr/md.js?_={{ ts }}"></script>
{%- if edit %} {%- if edit %}

View File

@@ -20,10 +20,6 @@ var dbg = function () { };
// dbg = console.log // dbg = console.log
// plugins
var md_plug = {};
// dodge browser issues // dodge browser issues
(function () { (function () {
var ua = navigator.userAgent; var ua = navigator.userAgent;
@@ -39,20 +35,14 @@ var md_plug = {};
// add navbar // add navbar
(function () { (function () {
var n = document.location + ''; var parts = get_evpath().split('/'), link = '', o;
n = n.substr(n.indexOf('//') + 2).split('?')[0].split('/'); for (var a = 0, aa = parts.length - 2; a <= aa; a++) {
n[0] = 'top'; link += parts[a] + (a < aa ? '/' : '');
var loc = []; o = mknod('a');
var nav = []; o.setAttribute('href', link);
for (var a = 0; a < n.length; a++) { o.textContent = uricom_dec(parts[a])[0] || 'top';
if (a > 0) dom_nav.appendChild(o);
loc.push(n[a]);
var dec = esc(uricom_dec(n[a])[0]);
nav.push('<a href="/' + loc.join('/') + '">' + dec + '</a>');
} }
dom_nav.innerHTML = nav.join('');
})(); })();
@@ -91,13 +81,13 @@ function copydom(src, dst, lv) {
var rpl = []; var rpl = [];
for (var a = sc.length - 1; a >= 0; a--) { for (var a = sc.length - 1; a >= 0; a--) {
var st = sc[a].tagName, var st = sc[a].tagName || sc[a].nodeType,
dt = dc[a].tagName; dt = dc[a].tagName || dc[a].nodeType;
if (st !== dt) { if (st !== dt) {
dbg("replace L%d (%d/%d) type %s/%s", lv, a, sc.length, st, dt); dbg("replace L%d (%d/%d) type %s/%s", lv, a, sc.length, st, dt);
rpl.push(a); dst.innerHTML = src.innerHTML;
continue; return;
} }
var sa = sc[a].attributes || [], var sa = sc[a].attributes || [],
@@ -146,8 +136,11 @@ function copydom(src, dst, lv) {
// repl is reversed; build top-down // repl is reversed; build top-down
var nbytes = 0; var nbytes = 0;
for (var a = rpl.length - 1; a >= 0; a--) { for (var a = rpl.length - 1; a >= 0; a--) {
var html = sc[rpl[a]].outerHTML; var i = rpl[a],
dc[rpl[a]].outerHTML = html; prop = sc[i].nodeType == 1 ? 'outerHTML' : 'nodeValue';
var html = sc[i][prop];
dc[i][prop] = html;
nbytes += html.length; nbytes += html.length;
} }
if (nbytes > 0) if (nbytes > 0)
@@ -163,7 +156,7 @@ function copydom(src, dst, lv) {
} }
function md_plug_err(ex, js) { md_plug_err = function (ex, js) {
qsr('#md_errbox'); qsr('#md_errbox');
if (!ex) if (!ex)
return; return;
@@ -200,50 +193,12 @@ function md_plug_err(ex, js) {
} }
function load_plug(md_text, plug_type) {
if (!md_opt.allow_plugins)
return md_text;
var find = '\n```copyparty_' + plug_type + '\n';
var ofs = md_text.indexOf(find);
if (ofs === -1)
return md_text;
var ofs2 = md_text.indexOf('\n```', ofs + 1);
if (ofs2 == -1)
return md_text;
var js = md_text.slice(ofs + find.length, ofs2 + 1);
var md = md_text.slice(0, ofs + 1) + md_text.slice(ofs2 + 4);
var old_plug = md_plug[plug_type];
if (!old_plug || old_plug[1] != js) {
js = 'const x = { ' + js + ' }; x;';
try {
var x = eval(js);
}
catch (ex) {
md_plug[plug_type] = null;
md_plug_err(ex, js);
return md;
}
if (x['ctor']) {
x['ctor']();
delete x['ctor'];
}
md_plug[plug_type] = [x, js];
}
return md;
}
function convert_markdown(md_text, dest_dom) { function convert_markdown(md_text, dest_dom) {
md_text = md_text.replace(/\r/g, ''); md_text = md_text.replace(/\r/g, '');
md_plug_err(null); md_plug_err(null);
md_text = load_plug(md_text, 'pre'); md_text = load_md_plug(md_text, 'pre');
md_text = load_plug(md_text, 'post'); md_text = load_md_plug(md_text, 'post');
var marked_opts = { var marked_opts = {
//headerPrefix: 'h-', //headerPrefix: 'h-',
@@ -251,12 +206,12 @@ function convert_markdown(md_text, dest_dom) {
gfm: true gfm: true
}; };
var ext = md_plug['pre']; var ext = md_plug.pre;
if (ext) if (ext)
Object.assign(marked_opts, ext[0]); Object.assign(marked_opts, ext[0]);
try { try {
var md_html = marked(md_text, marked_opts); var md_html = marked.parse(md_text, marked_opts);
} }
catch (ex) { catch (ex) {
if (ext) if (ext)
@@ -281,7 +236,7 @@ function convert_markdown(md_text, dest_dom) {
if (!txt) if (!txt)
nodes[a].textContent = href; nodes[a].textContent = href;
else if (href !== txt) else if (href !== txt)
nodes[a].setAttribute('class', 'vis'); nodes[a].className = 'vis';
} }
// todo-lists (should probably be a marked extension) // todo-lists (should probably be a marked extension)
@@ -297,7 +252,7 @@ function convert_markdown(md_text, dest_dom) {
var clas = done ? 'done' : 'pend'; var clas = done ? 'done' : 'pend';
var char = done ? 'Y' : 'N'; var char = done ? 'Y' : 'N';
dom_li.setAttribute('class', 'task-list-item'); dom_li.className = 'task-list-item';
dom_li.style.listStyleType = 'none'; dom_li.style.listStyleType = 'none';
var html = dom_li.innerHTML; var html = dom_li.innerHTML;
dom_li.innerHTML = dom_li.innerHTML =
@@ -352,7 +307,7 @@ function convert_markdown(md_text, dest_dom) {
el.innerHTML = '<a href="#' + id + '">' + el.innerHTML + '</a>'; el.innerHTML = '<a href="#' + id + '">' + el.innerHTML + '</a>';
} }
ext = md_plug['post']; ext = md_plug.post;
if (ext && ext[0].render) if (ext && ext[0].render)
try { try {
ext[0].render(md_dom); ext[0].render(md_dom);
@@ -471,11 +426,11 @@ function init_toc() {
for (var a = 0; a < anchors.length; a++) { for (var a = 0; a < anchors.length; a++) {
if (anchors[a].active) { if (anchors[a].active) {
anchors[a].active = false; anchors[a].active = false;
links[a].setAttribute('class', ''); links[a].className = '';
} }
} }
anchors[hit].active = true; anchors[hit].active = true;
links[hit].setAttribute('class', 'act'); links[hit].className = 'act';
} }
var pane_height = parseInt(getComputedStyle(dom_toc).height); var pane_height = parseInt(getComputedStyle(dom_toc).height);

View File

@@ -61,7 +61,7 @@
position: relative; position: relative;
scrollbar-color: #eb0 #f7f7f7; scrollbar-color: #eb0 #f7f7f7;
} }
html.dark #mt { html.z #mt {
color: #eee; color: #eee;
background: #222; background: #222;
border: 1px solid #777; border: 1px solid #777;
@@ -77,7 +77,7 @@ html.dark #mt {
background: #f97; background: #f97;
border-radius: .15em; border-radius: .15em;
} }
html.dark #save.force-save { html.z #save.force-save {
color: #fca; color: #fca;
background: #720; background: #720;
} }
@@ -102,7 +102,7 @@ html.dark #save.force-save {
#helpclose { #helpclose {
display: block; display: block;
} }
html.dark #helpbox { html.z #helpbox {
box-shadow: 0 .5em 2em #444; box-shadow: 0 .5em 2em #444;
background: #222; background: #222;
border: 1px solid #079; border: 1px solid #079;

View File

@@ -144,16 +144,16 @@ redraw = (function () {
map_pre = genmap(dom_pre, map_pre); map_pre = genmap(dom_pre, map_pre);
} }
function setsbs() { function setsbs() {
dom_wrap.setAttribute('class', ''); dom_wrap.className = '';
dom_swrap.setAttribute('class', ''); dom_swrap.className = '';
onresize(); onresize();
} }
function modetoggle() { function modetoggle() {
var mode = dom_nsbs.innerHTML; var mode = dom_nsbs.innerHTML;
dom_nsbs.innerHTML = mode == 'editor' ? 'preview' : 'editor'; dom_nsbs.innerHTML = mode == 'editor' ? 'preview' : 'editor';
mode += ' single'; mode += ' single';
dom_wrap.setAttribute('class', mode); dom_wrap.className = mode;
dom_swrap.setAttribute('class', mode); dom_swrap.className = mode;
onresize(); onresize();
} }
@@ -255,10 +255,10 @@ function Modpoll() {
console.log('modpoll...'); console.log('modpoll...');
var url = (document.location + '').split('?')[0] + '?raw&_=' + Date.now(); var url = (document.location + '').split('?')[0] + '?raw&_=' + Date.now();
var xhr = new XMLHttpRequest(); var xhr = new XHR();
xhr.open('GET', url, true); xhr.open('GET', url, true);
xhr.responseType = 'text'; xhr.responseType = 'text';
xhr.onreadystatechange = r.cb; xhr.onload = xhr.onerror = r.cb;
xhr.send(); xhr.send();
}; };
@@ -268,9 +268,6 @@ function Modpoll() {
return; return;
} }
if (this.readyState != XMLHttpRequest.DONE)
return;
if (this.status !== 200) { if (this.status !== 200) {
console.log('modpoll err ' + this.status + ": " + this.responseText); console.log('modpoll err ' + this.status + ": " + this.responseText);
return; return;
@@ -309,7 +306,7 @@ var modpoll = new Modpoll();
window.onbeforeunload = function (e) { window.onbeforeunload = function (e) {
if ((ebi("save").getAttribute('class') + '').indexOf('disabled') >= 0) if ((ebi("save").className + '').indexOf('disabled') >= 0)
return; //nice (todo) return; //nice (todo)
e.preventDefault(); //ff e.preventDefault(); //ff
@@ -321,7 +318,7 @@ window.onbeforeunload = function (e) {
function save(e) { function save(e) {
if (e) e.preventDefault(); if (e) e.preventDefault();
var save_btn = ebi("save"), var save_btn = ebi("save"),
save_cls = save_btn.getAttribute('class') + ''; save_cls = save_btn.className + '';
if (save_cls.indexOf('disabled') >= 0) if (save_cls.indexOf('disabled') >= 0)
return toast.inf(2, "no changes"); return toast.inf(2, "no changes");
@@ -336,10 +333,10 @@ function save(e) {
fd.append("body", txt); fd.append("body", txt);
var url = (document.location + '').split('?')[0]; var url = (document.location + '').split('?')[0];
var xhr = new XMLHttpRequest(); var xhr = new XHR();
xhr.open('POST', url, true); xhr.open('POST', url, true);
xhr.responseType = 'text'; xhr.responseType = 'text';
xhr.onreadystatechange = save_cb; xhr.onload = xhr.onerror = save_cb;
xhr.btn = save_btn; xhr.btn = save_btn;
xhr.txt = txt; xhr.txt = txt;
@@ -356,9 +353,6 @@ function save(e) {
} }
function save_cb() { function save_cb() {
if (this.readyState != XMLHttpRequest.DONE)
return;
if (this.status !== 200) if (this.status !== 200)
return toast.err(0, 'Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, "")); return toast.err(0, 'Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
@@ -397,10 +391,10 @@ function save_cb() {
function run_savechk(lastmod, txt, btn, ntry) { function run_savechk(lastmod, txt, btn, ntry) {
// download the saved doc from the server and compare // download the saved doc from the server and compare
var url = (document.location + '').split('?')[0] + '?raw&_=' + Date.now(); var url = (document.location + '').split('?')[0] + '?raw&_=' + Date.now();
var xhr = new XMLHttpRequest(); var xhr = new XHR();
xhr.open('GET', url, true); xhr.open('GET', url, true);
xhr.responseType = 'text'; xhr.responseType = 'text';
xhr.onreadystatechange = savechk_cb; xhr.onload = xhr.onerror = savechk_cb;
xhr.lastmod = lastmod; xhr.lastmod = lastmod;
xhr.txt = txt; xhr.txt = txt;
xhr.btn = btn; xhr.btn = btn;
@@ -409,9 +403,6 @@ function run_savechk(lastmod, txt, btn, ntry) {
} }
function savechk_cb() { function savechk_cb() {
if (this.readyState != XMLHttpRequest.DONE)
return;
if (this.status !== 200) if (this.status !== 200)
return toast.err(0, 'Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, "")); return toast.err(0, 'Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
@@ -678,7 +669,7 @@ function reLastIndexOf(txt, ptn, end) {
// table formatter // table formatter
function fmt_table(e) { function fmt_table(e) {
if (e) e.preventDefault(); if (e) e.preventDefault();
//dom_tbox.setAttribute('class', ''); //dom_tbox.className = '';
var txt = dom_src.value, var txt = dom_src.value,
ofs = dom_src.selectionStart, ofs = dom_src.selectionStart,
@@ -829,7 +820,7 @@ function fmt_table(e) {
// show unicode // show unicode
function mark_uni(e) { function mark_uni(e) {
if (e) e.preventDefault(); if (e) e.preventDefault();
dom_tbox.setAttribute('class', ''); dom_tbox.className = '';
var txt = dom_src.value, var txt = dom_src.value,
ptn = new RegExp('([^' + js_uni_whitelist + ']+)', 'g'), ptn = new RegExp('([^' + js_uni_whitelist + ']+)', 'g'),
@@ -989,14 +980,14 @@ var set_lno = (function () {
ebi('tools').onclick = function (e) { ebi('tools').onclick = function (e) {
if (e) e.preventDefault(); if (e) e.preventDefault();
var is_open = dom_tbox.getAttribute('class') != 'open'; var is_open = dom_tbox.className != 'open';
dom_tbox.setAttribute('class', is_open ? 'open' : ''); dom_tbox.className = is_open ? 'open' : '';
}; };
ebi('help').onclick = function (e) { ebi('help').onclick = function (e) {
if (e) e.preventDefault(); if (e) e.preventDefault();
dom_tbox.setAttribute('class', ''); dom_tbox.className = '';
var dom = ebi('helpbox'); var dom = ebi('helpbox');
var dtxt = dom.getElementsByTagName('textarea'); var dtxt = dom.getElementsByTagName('textarea');

View File

@@ -84,24 +84,24 @@ html .editor-toolbar>button.save.force-save {
/* darkmode */ /* darkmode */
html.dark .mdo, html.z .mdo,
html.dark .CodeMirror { html.z .CodeMirror {
border-color: #222; border-color: #222;
} }
html.dark, html.z,
html.dark body, html.z body,
html.dark .CodeMirror { html.z .CodeMirror {
background: #222; background: #222;
color: #ccc; color: #ccc;
} }
html.dark .CodeMirror-cursor { html.z .CodeMirror-cursor {
border-color: #fff; border-color: #fff;
} }
html.dark .CodeMirror-selected { html.z .CodeMirror-selected {
box-shadow: 0 0 1px #0cf inset; box-shadow: 0 0 1px #0cf inset;
} }
html.dark .CodeMirror-selected, html.z .CodeMirror-selected,
html.dark .CodeMirror-selectedtext { html.z .CodeMirror-selectedtext {
border-radius: .1em; border-radius: .1em;
background: #246; background: #246;
color: #fff; color: #fff;
@@ -109,37 +109,37 @@ html.dark .CodeMirror-selectedtext {
html.dark #mn a { html.z #mn a {
color: #ccc; color: #ccc;
} }
html.dark #mn a:not(:last-child):after { html.z #mn a:not(:last-child):after {
border-color: rgba(255,255,255,0.3); border-color: rgba(255,255,255,0.3);
} }
html.dark .editor-toolbar { html.z .editor-toolbar {
border-color: #2c2c2c; border-color: #2c2c2c;
background: #1c1c1c; background: #1c1c1c;
} }
html.dark .editor-toolbar>i.separator { html.z .editor-toolbar>i.separator {
border-left: 1px solid #444; border-left: 1px solid #444;
border-right: 1px solid #111; border-right: 1px solid #111;
} }
html.dark .editor-toolbar>button { html.z .editor-toolbar>button {
margin-left: -1px; border: 1px solid rgba(255,255,255,0.1); margin-left: -1px; border: 1px solid rgba(255,255,255,0.1);
color: #aaa; color: #aaa;
} }
html.dark .editor-toolbar>button:hover { html.z .editor-toolbar>button:hover {
color: #333; color: #333;
} }
html.dark .editor-toolbar>button.active { html.z .editor-toolbar>button.active {
color: #333; color: #333;
border-color: #ec1; border-color: #ec1;
background: #c90; background: #c90;
} }
html.dark .editor-toolbar::after, html.z .editor-toolbar::after,
html.dark .editor-toolbar::before { html.z .editor-toolbar::before {
background: none; background: none;
} }
@@ -150,6 +150,6 @@ html.dark .editor-toolbar::before {
padding: 1em; padding: 1em;
background: #f7f7f7; background: #f7f7f7;
} }
html.dark .mdo { html.z .mdo {
background: #1c1c1c; background: #1c1c1c;
} }

View File

@@ -3,6 +3,7 @@
<title>📝🎉 {{ title }}</title> <title>📝🎉 {{ title }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.7"> <meta name="viewport" content="width=device-width, initial-scale=0.7">
{{ html_head }}
<link rel="stylesheet" href="/.cpr/ui.css?_={{ ts }}"> <link rel="stylesheet" href="/.cpr/ui.css?_={{ ts }}">
<link rel="stylesheet" href="/.cpr/mde.css?_={{ ts }}"> <link rel="stylesheet" href="/.cpr/mde.css?_={{ ts }}">
<link rel="stylesheet" href="/.cpr/deps/mini-fa.css?_={{ ts }}"> <link rel="stylesheet" href="/.cpr/deps/mini-fa.css?_={{ ts }}">
@@ -24,27 +25,28 @@
<a href="#" id="repl">π</a> <a href="#" id="repl">π</a>
<script> <script>
var last_modified = {{ lastmod }}; var last_modified = {{ lastmod }},
have_emp = {{ have_emp|tojson }};
var md_opt = { var md_opt = {
link_md_as_html: false, link_md_as_html: false,
allow_plugins: {{ md_plug }},
modpoll_freq: {{ md_chk_rate }} modpoll_freq: {{ md_chk_rate }}
}; };
var lightswitch = (function () { var lightswitch = (function () {
var l = localStorage, var l = localStorage,
drk = l.lightmode != 1, drk = l.light != 1,
f = function (e) { f = function (e) {
if (e) drk = !drk; if (e) drk = !drk;
document.documentElement.setAttribute("class", drk? "dark":"light"); document.documentElement.className = drk? "z":"y";
l.lightmode = drk? 0:1; l.light = drk? 0:1;
}; };
f(); f();
return f; return f;
})(); })();
</script> </script>
<script src="/.cpr/util.js?_={{ ts }}"></script> <script src="/.cpr/util.js?_={{ ts }}"></script>
<script src="/.cpr/deps/marked.js?_={{ ts }}"></script> <script src="/.cpr/deps/marked.js?_={{ ts }}"></script>
<script src="/.cpr/deps/easymde.js?_={{ ts }}"></script> <script src="/.cpr/deps/easymde.js?_={{ ts }}"></script>
<script src="/.cpr/mde.js?_={{ ts }}"></script> <script src="/.cpr/mde.js?_={{ ts }}"></script>

View File

@@ -114,10 +114,10 @@ function save(mde) {
fd.append("body", txt); fd.append("body", txt);
var url = (document.location + '').split('?')[0]; var url = (document.location + '').split('?')[0];
var xhr = new XMLHttpRequest(); var xhr = new XHR();
xhr.open('POST', url, true); xhr.open('POST', url, true);
xhr.responseType = 'text'; xhr.responseType = 'text';
xhr.onreadystatechange = save_cb; xhr.onload = xhr.onerror = save_cb;
xhr.btn = save_btn; xhr.btn = save_btn;
xhr.mde = mde; xhr.mde = mde;
xhr.txt = txt; xhr.txt = txt;
@@ -133,9 +133,6 @@ function save(mde) {
} }
function save_cb() { function save_cb() {
if (this.readyState != XMLHttpRequest.DONE)
return;
if (this.status !== 200) if (this.status !== 200)
return toast.err(0, 'Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, "")); return toast.err(0, 'Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
@@ -170,10 +167,10 @@ function save_cb() {
// download the saved doc from the server and compare // download the saved doc from the server and compare
var url = (document.location + '').split('?')[0] + '?raw'; var url = (document.location + '').split('?')[0] + '?raw';
var xhr = new XMLHttpRequest(); var xhr = new XHR();
xhr.open('GET', url, true); xhr.open('GET', url, true);
xhr.responseType = 'text'; xhr.responseType = 'text';
xhr.onreadystatechange = save_chk; xhr.onload = xhr.onerror = save_chk;
xhr.btn = this.save_btn; xhr.btn = this.save_btn;
xhr.mde = this.mde; xhr.mde = this.mde;
xhr.txt = this.txt; xhr.txt = this.txt;
@@ -182,9 +179,6 @@ function save_cb() {
} }
function save_chk() { function save_chk() {
if (this.readyState != XMLHttpRequest.DONE)
return;
if (this.status !== 200) if (this.status !== 200)
return toast.err(0, 'Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, "")); return toast.err(0, 'Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));

View File

@@ -2,48 +2,49 @@
<html lang="en"> <html lang="en">
<head> <head>
<meta charset="utf-8"> <meta charset="utf-8">
<title>copyparty</title> <title>{{ svcname }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.8"> <meta name="viewport" content="width=device-width, initial-scale=0.8">
<link rel="stylesheet" media="screen" href="/.cpr/msg.css?_={{ ts }}"> {{ html_head }}
<link rel="stylesheet" media="screen" href="/.cpr/msg.css?_={{ ts }}">
</head> </head>
<body> <body>
<div id="box"> <div id="box">
{%- if h1 %}
<h1>{{ h1 }}</h1>
{%- endif %}
{%- if h2 %}
<h2>{{ h2 }}</h2>
{%- endif %}
{%- if p %}
<p>{{ p }}</p>
{%- endif %}
{%- if pre %} {%- if h1 %}
<pre>{{ pre }}</pre> <h1>{{ h1 }}</h1>
{%- endif %} {%- endif %}
{%- if html %} {%- if h2 %}
{{ html }} <h2>{{ h2 }}</h2>
{%- endif %} {%- endif %}
{%- if click %} {%- if p %}
<script>document.getElementsByTagName("a")[0].click()</script> <p>{{ p }}</p>
{%- endif %} {%- endif %}
</div>
{%- if redir %} {%- if pre %}
<script> <pre>{{ pre }}</pre>
setTimeout(function() { {%- endif %}
window.location.replace("{{ redir }}");
}, 1000); {%- if html %}
</script> {{ html }}
{%- endif %} {%- endif %}
{%- if click %}
<script>document.getElementsByTagName("a")[0].click()</script>
{%- endif %}
</div>
{%- if redir %}
<script>
setTimeout(function() {
window.location.replace("{{ redir }}");
}, 1000);
</script>
{%- endif %}
</body> </body>
</html> </html>

View File

@@ -1,9 +1,7 @@
html, body, #wrap { html {
color: #333; color: #333;
background: #f7f7f7; background: #f7f7f7;
font-family: sans-serif; font-family: sans-serif;
}
html {
touch-action: manipulation; touch-action: manipulation;
} }
#wrap { #wrap {
@@ -37,7 +35,8 @@ a+a {
float: right; float: right;
margin: -.2em 0 0 .5em; margin: -.2em 0 0 .5em;
} }
.logout { .logout,
a.r {
color: #c04; color: #c04;
border-color: #c7a; border-color: #c7a;
} }
@@ -78,27 +77,32 @@ table {
margin-top: .3em; margin-top: .3em;
text-align: right; text-align: right;
} }
blockquote {
margin: 0 0 1.6em .6em;
padding: .7em 1em 0 1em;
border-left: .3em solid rgba(128,128,128,0.5);
border-radius: 0 0 0 .25em;
}
html.dark, html.z {
html.dark body,
html.dark #wrap {
background: #222; background: #222;
color: #ccc; color: #ccc;
} }
html.dark h1 { html.z h1 {
border-color: #777; border-color: #777;
} }
html.dark a { html.z a {
color: #fff; color: #fff;
background: #057; background: #057;
border-color: #37a; border-color: #37a;
} }
html.dark .logout { html.z .logout,
html.z a.r {
background: #804; background: #804;
border-color: #c28; border-color: #c28;
} }
html.dark input { html.z input {
color: #fff; color: #fff;
background: #626; background: #626;
border: 1px solid #c2c; border: 1px solid #c2c;
@@ -107,6 +111,12 @@ html.dark input {
padding: .5em .7em; padding: .5em .7em;
margin: 0 .5em 0 0; margin: 0 .5em 0 0;
} }
html.dark .num { html.z .num {
border-color: #777; border-color: #777;
} }
html.bz {
color: #bbd;
background: #11121d;
}

View File

@@ -2,93 +2,106 @@
<html lang="en"> <html lang="en">
<head> <head>
<meta charset="utf-8"> <meta charset="utf-8">
<title>copyparty</title> <title>{{ svcname }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.8"> <meta name="viewport" content="width=device-width, initial-scale=0.8">
<link rel="stylesheet" media="screen" href="/.cpr/splash.css?_={{ ts }}"> {{ html_head }}
<link rel="stylesheet" media="screen" href="/.cpr/ui.css?_={{ ts }}"> <link rel="stylesheet" media="screen" href="/.cpr/splash.css?_={{ ts }}">
<link rel="stylesheet" media="screen" href="/.cpr/ui.css?_={{ ts }}">
</head> </head>
<body> <body>
<div id="wrap"> <div id="wrap">
<a href="/?h" class="refresh">refresh</a> <a id="a" href="/?h" class="refresh">refresh</a>
{%- if this.uname == '*' %} {%- if this.uname == '*' %}
<p>howdy stranger &nbsp; <small>(you're not logged in)</small></p> <p id="b">howdy stranger &nbsp; <small>(you're not logged in)</small></p>
{%- else %} {%- else %}
<a href="/?pw=x" class="logout">logout</a> <a id="c" href="/?pw=x" class="logout">logout</a>
<p>welcome back, <strong>{{ this.uname }}</strong></p> <p><span id="m">welcome back,</span> <strong>{{ this.uname }}</strong></p>
{%- endif %} {%- endif %}
{%- if msg %} {%- if msg %}
<div id="msg"> <div id="msg">
{{ msg }} {{ msg }}
</div> </div>
{%- endif %} {%- endif %}
{%- if avol %} {%- if avol %}
<h1>admin panel:</h1> <h1>admin panel:</h1>
<table><tr><td> <!-- hehehe --> <table><tr><td> <!-- hehehe -->
<table class="num"> <table class="num">
<tr><td>scanning</td><td>{{ scanning }}</td></tr> <tr><td>scanning</td><td>{{ scanning }}</td></tr>
<tr><td>hash-q</td><td>{{ hashq }}</td></tr> <tr><td>hash-q</td><td>{{ hashq }}</td></tr>
<tr><td>tag-q</td><td>{{ tagq }}</td></tr> <tr><td>tag-q</td><td>{{ tagq }}</td></tr>
<tr><td>mtp-q</td><td>{{ mtpq }}</td></tr> <tr><td>mtp-q</td><td>{{ mtpq }}</td></tr>
</table> </table>
</td><td> </td><td>
<table class="vols"> <table class="vols">
<thead><tr><th>vol</th><th>action</th><th>status</th></tr></thead> <thead><tr><th>vol</th><th id="t">action</th><th>status</th></tr></thead>
<tbody> <tbody>
{% for mp in avol %} {% for mp in avol %}
{%- if mp in vstate and vstate[mp] %} {%- if mp in vstate and vstate[mp] %}
<tr><td><a href="{{ mp }}{{ url_suf }}">{{ mp }}</a></td><td><a href="{{ mp }}?scan">rescan</a></td><td>{{ vstate[mp] }}</td></tr> <tr><td><a href="{{ mp }}{{ url_suf }}">{{ mp }}</a></td><td><a class="s" href="{{ mp }}?scan">rescan</a></td><td>{{ vstate[mp] }}</td></tr>
{%- endif %} {%- endif %}
{% endfor %} {% endfor %}
</tbody> </tbody>
</table> </table>
</td></tr></table> </td></tr></table>
<div class="btns"> <div class="btns">
<a href="/?stack" tt="shows the state of all active threads">dump stack</a> <a id="d" href="/?stack" tt="shows the state of all active threads">dump stack</a>
<a href="/?reload=cfg" tt="reload config files (accounts/volumes/volflags),$Nand rescan all e2ds volumes">reload cfg</a> <a id="e" href="/?reload=cfg" tt="reload config files (accounts/volumes/volflags),$Nand rescan all e2ds volumes">reload cfg</a>
</div> </div>
{%- endif %} {%- endif %}
{%- if rvol %} {%- if rvol %}
<h1>you can browse these:</h1> <h1 id="f">you can browse:</h1>
<ul> <ul>
{% for mp in rvol %} {% for mp in rvol %}
<li><a href="{{ mp }}{{ url_suf }}">{{ mp }}</a></li> <li><a href="{{ mp }}{{ url_suf }}">{{ mp }}</a></li>
{% endfor %} {% endfor %}
</ul> </ul>
{%- endif %} {%- endif %}
{%- if wvol %} {%- if wvol %}
<h1>you can upload to:</h1> <h1 id="g">you can upload to:</h1>
<ul> <ul>
{% for mp in wvol %} {% for mp in wvol %}
<li><a href="{{ mp }}{{ url_suf }}">{{ mp }}</a></li> <li><a href="{{ mp }}{{ url_suf }}">{{ mp }}</a></li>
{% endfor %} {% endfor %}
</ul> </ul>
{%- endif %} {%- endif %}
<h1>login for more:</h1> <h1 id="cc">client config:</h1>
<ul> <ul>
<form method="post" enctype="multipart/form-data" action="/{{ qvpath }}"> {% if k304 %}
<input type="hidden" name="act" value="login" /> <li><a id="h" href="/?k304=n">disable k304</a> (currently enabled)
<input type="password" name="cppwd" /> {%- else %}
<input type="submit" value="Login" /> <li><a id="i" href="/?k304=y" class="r">enable k304</a> (currently disabled)
</form> {% endif %}
</ul> <blockquote id="j">enabling this will disconnect your client on every HTTP 304, which can prevent some buggy proxies from getting stuck (suddenly not loading pages), <em>but</em> it will also make things slower in general</blockquote></li>
</div>
<li><a id="k" href="/?reset" class="r" onclick="localStorage.clear();return true">reset client settings</a></li>
</ul>
<h1 id="l">login for more:</h1>
<ul>
<form method="post" enctype="multipart/form-data" action="/{{ qvpath }}">
<input type="hidden" name="act" value="login" />
<input type="password" name="cppwd" />
<input type="submit" value="Login" />
</form>
</ul>
</div>
<a href="#" id="repl">π</a> <a href="#" id="repl">π</a>
<script> <script>
if (localStorage.lightmode != 1) var lang="{{ this.args.lang }}";
document.documentElement.setAttribute("class", "dark"); document.documentElement.className=localStorage.theme||"{{ this.args.theme }}";
</script> </script>
<script src="/.cpr/util.js?_={{ ts }}"></script> <script src="/.cpr/util.js?_={{ ts }}"></script>
<script>tt.init();</script> <script src="/.cpr/splash.js?_={{ ts }}"></script>
</body> </body>
</html> </html>

44
copyparty/web/splash.js Normal file
View File

@@ -0,0 +1,44 @@
var Ls = {
"nor": {
"a1": "oppdater",
"b1": "halloien &nbsp; <small>(du er ikke logget inn)</small>",
"c1": "logg ut",
"d1": "tilstand",
"d2": "vis tilstanden til alle tråder",
"e1": "last innst.",
"e2": "leser inn konfigurasjonsfiler på nytt$N(kontoer, volumer, volumbrytere)$Nog kartlegger alle e2ds-volumer",
"f1": "du kan betrakte:",
"g1": "du kan laste opp til:",
"cc1": "klient-konfigurasjon",
"h1": "skru av k304",
"i1": "skru på k304",
"j1": "k304 bryter tilkoplingen for hver HTTP 304. Dette hjelper visse mellomtjenere som kan sette seg fast / plutselig slutter å laste sider, men det reduserer også ytelsen betydelig",
"k1": "nullstill innstillinger",
"l1": "logg inn:",
"m1": "velkommen tilbake,",
"n1": "404: filen finnes ikke &nbsp;┐( ´ -`)┌",
"o1": 'eller kanskje du ikke har tilgang? prøv å logge inn eller <a href="/?h">gå hjem</a>',
"p1": "403: tilgang nektet &nbsp;~┻━┻",
"q1": 'du må logge inn eller <a href="/?h">gå hjem</a>',
"r1": "gå hjem",
".s1": "kartlegg",
"t1": "handling",
}
},
d = Ls[sread("lang") || lang];
for (var k in (d || {})) {
var f = k.slice(-1),
i = k.slice(0, -1),
o = QSA(i.startsWith('.') ? i : '#' + i);
for (var a = 0; a < o.length; a++)
if (f == 1)
o[a].innerHTML = d[k];
else if (f == 2)
o[a].setAttribute("tt", d[k]);
}
tt.init();
if (!ebi('c'))
QS('input[name="cppwd"]').focus();

View File

@@ -11,6 +11,7 @@ html {
max-width: 34em; max-width: 34em;
max-width: min(34em, 90%); max-width: min(34em, 90%);
max-width: min(34em, calc(100% - 7em)); max-width: min(34em, calc(100% - 7em));
color: #ddd;
background: #333; background: #333;
border: 0 solid #777; border: 0 solid #777;
box-shadow: 0 .2em .5em #111; box-shadow: 0 .2em .5em #111;
@@ -74,6 +75,9 @@ html {
margin-right: -1.2em; margin-right: -1.2em;
padding-right: .7em; padding-right: .7em;
} }
#toast.r #toastb {
text-align: right;
}
#toast pre { #toast pre {
margin: 0; margin: 0;
} }
@@ -116,6 +120,20 @@ html {
#toast.err #toastc { #toast.err #toastc {
background: #d06; background: #d06;
} }
#tth {
color: #fff;
background: #111;
font-size: .9em;
padding: 0 .26em;
line-height: .97em;
border-radius: 1em;
position: absolute;
display: none;
}
#tth.act {
display: block;
z-index: 9001;
}
#tt.b { #tt.b {
padding: 0 2em; padding: 0 2em;
border-radius: .5em; border-radius: .5em;
@@ -133,7 +151,8 @@ html {
} }
#modalc code, #modalc code,
#tt code { #tt code {
background: #3c3c3c; color: #eee;
background: #444;
padding: .1em .3em; padding: .1em .3em;
border-top: 1px solid #777; border-top: 1px solid #777;
border-radius: .3em; border-radius: .3em;
@@ -142,22 +161,35 @@ html {
#tt em { #tt em {
color: #f6a; color: #f6a;
} }
html.light #tt { html.y #tt {
color: #333;
background: #fff; background: #fff;
border-color: #888 #000 #777 #000; border-color: #888 #000 #777 #000;
} }
html.light #tt, html.bz #tt {
html.light #toast { background: #202231;
border-color: #3b3f58;
}
html.y #tt,
html.y #toast {
box-shadow: 0 .3em 1em rgba(0,0,0,0.4); box-shadow: 0 .3em 1em rgba(0,0,0,0.4);
} }
#modalc code, html.y #tt code {
html.light #tt code {
background: #060; background: #060;
color: #fff; color: #fff;
} }
html.light #tt em { #modalc code {
color: #060;
background: transparent;
border: 1px solid #ccc;
}
html.y #tt em {
color: #d38; color: #d38;
} }
html.y #tth {
color: #000;
background: #fff;
}
#modal { #modal {
position: fixed; position: fixed;
overflow: auto; overflow: auto;
@@ -249,24 +281,28 @@ html.light #tt em {
max-width: 24em; max-width: 24em;
} }
*:focus, *:focus,
*:focus+label,
#pctl *:focus, #pctl *:focus,
.btn:focus { .btn:focus {
box-shadow: 0 .1em .2em #fc0 inset; box-shadow: 0 .1em .2em #fc0 inset;
outline: #fc0 solid .1em;
border-radius: .2em; border-radius: .2em;
} }
html.light *:focus, html.y *:focus,
html.light #pctl *:focus, html.y *:focus+label,
html.light .btn:focus { html.y #pctl *:focus,
html.y .btn:focus {
box-shadow: 0 .1em .2em #037 inset; box-shadow: 0 .1em .2em #037 inset;
outline: #037 solid .1em;
} }
input[type="text"]:focus, input[type="text"]:focus,
input:not([type]):focus, input:not([type]):focus,
textarea:focus { textarea:focus {
box-shadow: 0 .1em .3em #fc0, 0 -.1em .3em #fc0; box-shadow: 0 .1em .3em #fc0, 0 -.1em .3em #fc0;
} }
html.light input[type="text"]:focus, html.y input[type="text"]:focus,
html.light input:not([type]):focus, html.y input:not([type]):focus,
html.light textarea:focus { html.y textarea:focus {
box-shadow: 0 .1em .3em #037, 0 -.1em .3em #037; box-shadow: 0 .1em .3em #037, 0 -.1em .3em #037;
} }
@@ -335,13 +371,22 @@ html.light textarea:focus {
} }
.mdo ul, .mdo ul,
.mdo ol { .mdo ol {
padding-left: 1em;
}
.mdo ul ul,
.mdo ul ol,
.mdo ol ul,
.mdo ol ol {
padding-left: 2em;
border-left: .3em solid #ddd; border-left: .3em solid #ddd;
} }
.mdo ul>li, .mdo ul>li {
.mdo ol>li {
margin: .7em 0; margin: .7em 0;
list-style-type: disc; list-style-type: disc;
} }
.mdo ol>li {
margin: .7em 0 .7em 2em;
}
.mdo strong { .mdo strong {
color: #000; color: #000;
} }
@@ -388,7 +433,7 @@ html.light textarea:focus {
overflow-wrap: break-word; overflow-wrap: break-word;
word-wrap: break-word; /*ie*/ word-wrap: break-word; /*ie*/
} }
html.light .mdo a, html.y .mdo a,
.mdo a { .mdo a {
color: #fff; color: #fff;
background: #39b; background: #39b;
@@ -417,48 +462,58 @@ html.light textarea:focus {
html.dark .mdo a { html.z .mdo a {
background: #057; background: #057;
} }
html.dark .mdo h1 a, html.dark .mdo h4 a, html.z .mdo h1 a, html.z .mdo h4 a,
html.dark .mdo h2 a, html.dark .mdo h5 a, html.z .mdo h2 a, html.z .mdo h5 a,
html.dark .mdo h3 a, html.dark .mdo h6 a { html.z .mdo h3 a, html.z .mdo h6 a {
color: inherit; color: inherit;
background: none; background: none;
} }
html.dark .mdo pre, html.z .mdo pre,
html.dark .mdo code { html.z .mdo code {
color: #8c0; color: #8c0;
background: #1a1a1a; background: #1a1a1a;
border: .07em solid #333; border: .07em solid #333;
} }
html.dark .mdo ul, html.z .mdo ul,
html.dark .mdo ol { html.z .mdo ol {
border-color: #444; border-color: #444;
} }
html.dark .mdo strong { html.z .mdo strong {
color: #fff; color: #fff;
} }
html.dark .mdo p>em, html.z .mdo p>em,
html.dark .mdo li>em, html.z .mdo li>em,
html.dark .mdo td>em { html.z .mdo td>em {
color: #f94; color: #f94;
border-color: #666; border-color: #666;
} }
html.dark .mdo h1 { html.z .mdo h1 {
background: #383838; background: #383838;
border-top: .4em solid #b80; border-top: .4em solid #b80;
border-bottom: .4em solid #4c4c4c; border-bottom: .4em solid #4c4c4c;
} }
html.dark .mdo h2 { html.bz .mdo h1 {
background: #202231;
border: 1px solid #2d2f45;
border-width: 0 0 .4em 0;
}
html.z .mdo h2 {
background: #444; background: #444;
border-bottom: .22em solid #555; border-bottom: .22em solid #555;
} }
html.dark .mdo td, html.bz .mdo h2,
html.dark .mdo th { html.bz .mdo h3 {
background: transparent;
border-color: #3b3f58;
}
html.z .mdo td,
html.z .mdo th {
border-color: #444; border-color: #444;
} }
html.dark .mdo blockquote { html.z .mdo blockquote {
background: #282828; background: #282828;
border: .07em dashed #444; border: .07em dashed #444;
} }

View File

@@ -135,7 +135,7 @@ function up2k_flagbus() {
} }
function U2pvis(act, btns) { function U2pvis(act, btns, uc) {
var r = this; var r = this;
r.act = act; r.act = act;
r.ctr = { "ok": 0, "ng": 0, "bz": 0, "q": 0 }; r.ctr = { "ok": 0, "ng": 0, "bz": 0, "q": 0 };
@@ -425,7 +425,9 @@ function U2pvis(act, btns) {
html.push(r.genrow(a, true).replace(/><td>/, "><td>b ")); html.push(r.genrow(a, true).replace(/><td>/, "><td>b "));
} }
} }
ebi('u2tab').tBodies[0].innerHTML = html.join('\n'); var el = ebi('u2tab');
el.tBodies[0].innerHTML = html.join('\n');
el.className = (uc.fsearch ? 'srch ' : 'up ') + r.act;
}; };
r.genrow = function (nfile, as_html) { r.genrow = function (nfile, as_html) {
@@ -525,13 +527,15 @@ function Donut(uc, st) {
} }
r.on = function (ya) { r.on = function (ya) {
r.fc = 99; r.fc = r.tc = 99;
r.eta = null; r.eta = null;
r.base = pos(); r.base = pos();
optab.innerHTML = ya ? svg() : optab.getAttribute('ico'); optab.innerHTML = ya ? svg() : optab.getAttribute('ico');
el = QS('#ops a .donut'); el = QS('#ops a .donut');
if (!ya) if (!ya) {
favico.upd(); favico.upd();
wintitle();
}
}; };
r.do = function () { r.do = function () {
if (!el) if (!el)
@@ -541,6 +545,11 @@ function Donut(uc, st) {
v = pos() - r.base, v = pos() - r.base,
ofs = el.style.strokeDashoffset = o - o * v / t; ofs = el.style.strokeDashoffset = o - o * v / t;
if (++r.tc >= 10) {
wintitle(f2f(v * 100 / t, 1) + '%, ' + r.eta + 's, ', true);
r.tc = 0;
}
if (favico.txt) { if (favico.txt) {
if (++r.fc < 10 && r.eta && r.eta > 99) if (++r.fc < 10 && r.eta && r.eta > 99)
return; return;
@@ -559,12 +568,12 @@ function Donut(uc, st) {
function fsearch_explain(n) { function fsearch_explain(n) {
if (n) if (n)
return toast.inf(60, 'your access to this folder is Read-Only\n\n' + (acct == '*' ? 'you are currently not logged in' : 'you are currently logged in as "' + acct + '"')); return toast.inf(60, L.ue_ro + (acct == '*' ? L.ue_nl : L.ue_la).format(acct));
if (bcfg_get('fsearch', false)) if (bcfg_get('fsearch', false))
return toast.inf(60, 'you are currently in file-search mode\n\nswitch to upload-mode by clicking the green magnifying glass (next to the big yellow search button), and then refresh\n\nsorry'); return toast.inf(60, L.ue_sr);
return toast.inf(60, 'refresh the page and try again, it should work now'); return toast.inf(60, L.ue_ta);
} }
@@ -578,19 +587,12 @@ function up2k_init(subtle) {
function unmodal() { function unmodal() {
ebi('u2notbtn').style.display = 'none'; ebi('u2notbtn').style.display = 'none';
ebi('u2btn').style.display = 'block';
ebi('u2conf').style.opacity = '1'; ebi('u2conf').style.opacity = '1';
ebi('u2btn').style.display = '';
ebi('u2notbtn').innerHTML = ''; ebi('u2notbtn').innerHTML = '';
} }
var suggest_up2k = 'this is the basic uploader; <a href="#" id="u2yea">up2k</a> is better'; var suggest_up2k = L.u_su2k;
var shame = 'your browser <a href="https://www.chromium.org/blink/webcrypto">disables sha512</a> unless you <a href="' + (window.location + '').replace(':', 's:') + '">use https</a>',
is_https = (window.location + '').indexOf('https:') === 0;
if (is_https)
// chrome<37 firefox<34 edge<12 opera<24 safari<7
shame = 'your browser is impressively ancient';
function got_deps() { function got_deps() {
return subtle || window.asmCrypto || window.hashwasm; return subtle || window.asmCrypto || window.hashwasm;
@@ -599,15 +601,18 @@ function up2k_init(subtle) {
var loading_deps = false; var loading_deps = false;
function init_deps() { function init_deps() {
if (!loading_deps && !got_deps()) { if (!loading_deps && !got_deps()) {
var fn = 'sha512.' + sha_js + '.js'; var fn = 'sha512.' + sha_js + '.js',
showmodal('<h1>loading ' + fn + '</h1><h2>since ' + shame + '</h2><h4>thanks chrome</h4>'); m = L.u_https1 + ' <a href="' + (window.location + '').replace(':', 's:') + '">' + L.u_https2 + '</a> ' + L.u_https3;
showmodal('<h1>loading ' + fn + '</h1>');
import_js('/.cpr/deps/' + fn, unmodal); import_js('/.cpr/deps/' + fn, unmodal);
if (is_https) if (is_https) {
ebi('u2foot').innerHTML = shame + ' so <em>this</em> uploader will do like 500 KiB/s at best'; // chrome<37 firefox<34 edge<12 opera<24 safari<7
else m = L.u_ancient;
ebi('u2foot').innerHTML = 'seems like ' + shame + ' so do that if you want more performance <span style="color:#' + setmsg('');
(sha_js == 'ac' ? 'c84">(expecting 20' : '8a5">(but dont worry too much, expect 100') + ' MiB/s)</span>'; }
ebi('u2foot').innerHTML = '<big>' + m + '</big>';
} }
loading_deps = true; loading_deps = true;
} }
@@ -617,11 +622,11 @@ function up2k_init(subtle) {
function setmsg(msg, type) { function setmsg(msg, type) {
if (msg !== undefined) { if (msg !== undefined) {
ebi('u2err').setAttribute('class', type); ebi('u2err').className = type;
ebi('u2err').innerHTML = msg; ebi('u2err').innerHTML = msg;
} }
else { else {
ebi('u2err').setAttribute('class', ''); ebi('u2err').className = '';
ebi('u2err').innerHTML = ''; ebi('u2err').innerHTML = '';
} }
if (msg == suggest_up2k) { if (msg == suggest_up2k) {
@@ -637,24 +642,8 @@ function up2k_init(subtle) {
return false; return false;
} }
ebi('u2nope').onclick = function (e) {
ev(e);
setmsg(suggest_up2k, 'msg');
goto('bup');
};
setmsg(suggest_up2k, 'msg'); setmsg(suggest_up2k, 'msg');
if (!String.prototype.format) {
String.prototype.format = function () {
var args = arguments;
return this.replace(/{(\d+)}/g, function (match, number) {
return typeof args[number] != 'undefined' ?
args[number] : match;
});
};
}
var parallel_uploads = icfg_get('nthread'), var parallel_uploads = icfg_get('nthread'),
uc = {}, uc = {},
fdom_ctr = 0, fdom_ctr = 0,
@@ -664,11 +653,12 @@ function up2k_init(subtle) {
bcfg_bind(uc, 'ask_up', 'ask_up', true, null, false); bcfg_bind(uc, 'ask_up', 'ask_up', true, null, false);
bcfg_bind(uc, 'flag_en', 'flag_en', false, apply_flag_cfg); bcfg_bind(uc, 'flag_en', 'flag_en', false, apply_flag_cfg);
bcfg_bind(uc, 'fsearch', 'fsearch', false, set_fsearch, false); bcfg_bind(uc, 'fsearch', 'fsearch', false, set_fsearch, false);
bcfg_bind(uc, 'turbo', 'u2turbo', false, draw_turbo, false); bcfg_bind(uc, 'turbo', 'u2turbo', turbolvl > 1, draw_turbo, false);
bcfg_bind(uc, 'datechk', 'u2tdate', true, null, false); bcfg_bind(uc, 'datechk', 'u2tdate', turbolvl < 3, null, false);
var st = { var st = {
"files": [], "files": [],
"seen": {},
"todo": { "todo": {
"head": [], "head": [],
"hash": [], "hash": [],
@@ -703,7 +693,7 @@ function up2k_init(subtle) {
}); });
} }
var pvis = new U2pvis("bz", '#u2cards'), var pvis = new U2pvis("bz", '#u2cards', uc),
donut = new Donut(uc, st); donut = new Donut(uc, st);
var bobslice = null; var bobslice = null;
@@ -711,7 +701,7 @@ function up2k_init(subtle) {
bobslice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice; bobslice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice;
if (!bobslice || !window.FileReader || !window.FileList) if (!bobslice || !window.FileReader || !window.FileList)
return un2k("this is the basic uploader; up2k needs at least<br />chrome 21 // firefox 13 // edge 12 // opera 12 // safari 5.1"); return un2k(L.u_ever);
var flag = false; var flag = false;
apply_flag_cfg(); apply_flag_cfg();
@@ -727,21 +717,20 @@ function up2k_init(subtle) {
if (++nenters <= 0) if (++nenters <= 0)
nenters = 1; nenters = 1;
//console.log(nenters, Date.now(), 'enter', this, e.target);
if (onover.bind(this)(e)) if (onover.bind(this)(e))
return true; return true;
var mup, up = QS('#up_zd'); var mup, up = QS('#up_zd');
var msr, sr = QS('#srch_zd'); var msr, sr = QS('#srch_zd');
if (!has(perms, 'write')) if (!has(perms, 'write'))
mup = 'you do not have write-access to this folder'; mup = L.u_ewrite;
if (!has(perms, 'read')) if (!has(perms, 'read'))
msr = 'you do not have read-access to this folder'; msr = L.u_eread;
if (!have_up2k_idx) if (!have_up2k_idx)
msr = 'file-search is not enabled in server config'; msr = L.u_enoi;
up.querySelector('span').textContent = mup || 'drop it here'; up.querySelector('span').textContent = mup || L.udt_drop;
sr.querySelector('span').textContent = msr || 'drop it here'; sr.querySelector('span').textContent = msr || L.udt_drop;
clmod(up, 'err', mup); clmod(up, 'err', mup);
clmod(sr, 'err', msr); clmod(sr, 'err', msr);
clmod(up, 'ok', !mup); clmod(up, 'ok', !mup);
@@ -749,12 +738,19 @@ function up2k_init(subtle) {
ebi('up_dz').setAttribute('err', mup || ''); ebi('up_dz').setAttribute('err', mup || '');
ebi('srch_dz').setAttribute('err', msr || ''); ebi('srch_dz').setAttribute('err', msr || '');
} }
function onoverb(e) {
// zones are alive; disable cuo2duo branch
document.body.ondragover = document.body.ondrop = null;
return onover.bind(this)(e);
}
function onover(e) { function onover(e) {
try { try {
var ok = false, dt = e.dataTransfer.types; var ok = false, dt = e.dataTransfer.types;
for (var a = 0; a < dt.length; a++) for (var a = 0; a < dt.length; a++)
if (dt[a] == 'Files') if (dt[a] == 'Files')
ok = true; ok = true;
else if (dt[a] == 'text/uri-list')
return true;
if (!ok) if (!ok)
return true; return true;
@@ -780,17 +776,20 @@ function up2k_init(subtle) {
clmod(ebi('drops'), 'vis'); clmod(ebi('drops'), 'vis');
clmod(ebi('up_dz'), 'hl'); clmod(ebi('up_dz'), 'hl');
clmod(ebi('srch_dz'), 'hl'); clmod(ebi('srch_dz'), 'hl');
// cuo2duo:
document.body.ondragover = onover;
document.body.ondrop = gotfile;
} }
//console.log(nenters, Date.now(), 'leave', this, e && e.target);
} }
document.body.ondragenter = ondrag; document.body.ondragenter = ondrag;
document.body.ondragleave = offdrag; document.body.ondragleave = offdrag;
document.body.ondragover = onover;
document.body.ondrop = gotfile;
var drops = [ebi('up_dz'), ebi('srch_dz')]; var drops = [ebi('up_dz'), ebi('srch_dz')];
for (var a = 0; a < 2; a++) { for (var a = 0; a < 2; a++) {
drops[a].ondragenter = ondrag; drops[a].ondragenter = ondrag;
drops[a].ondragover = onover; drops[a].ondragover = onoverb;
drops[a].ondragleave = offdrag; drops[a].ondragleave = offdrag;
drops[a].ondrop = gotfile; drops[a].ondrop = gotfile;
} }
@@ -800,7 +799,10 @@ function up2k_init(subtle) {
ev(e); ev(e);
nenters = 0; nenters = 0;
offdrag.bind(this)(); offdrag.bind(this)();
var dz = (this && this.getAttribute('id')); var dz = this && this.getAttribute('id');
if (!dz && e && e.clientY)
// cuo2duo fallback
dz = e.clientY < window.innerHeight / 2 ? 'up_dz' : 'srch_dz';
var err = this.getAttribute('err'); var err = this.getAttribute('err');
if (err) if (err)
@@ -954,22 +956,22 @@ function up2k_init(subtle) {
function gotallfiles(good_files, nil_files, bad_files) { function gotallfiles(good_files, nil_files, bad_files) {
var ntot = good_files.concat(nil_files, bad_files).length; var ntot = good_files.concat(nil_files, bad_files).length;
if (bad_files.length) { if (bad_files.length) {
var msg = 'These {0} files (of {1} total) were skipped, possibly due to filesystem permissions:\n'.format(bad_files.length, ntot); var msg = L.u_badf.format(bad_files.length, ntot);
for (var a = 0, aa = Math.min(20, bad_files.length); a < aa; a++) for (var a = 0, aa = Math.min(20, bad_files.length); a < aa; a++)
msg += '-- ' + bad_files[a][1] + '\n'; msg += '-- ' + bad_files[a][1] + '\n';
msg += '\nMaybe it works better if you select just one file'; msg += L.u_just1;
return modal.alert(msg, function () { return modal.alert(msg, function () {
gotallfiles(good_files, nil_files, []); gotallfiles(good_files, nil_files, []);
}); });
} }
if (nil_files.length) { if (nil_files.length) {
var msg = 'These {0} files (of {1} total) are blank/empty; upload them anyways?\n'.format(nil_files.length, ntot); var msg = L.u_blankf.format(nil_files.length, ntot);
for (var a = 0, aa = Math.min(20, nil_files.length); a < aa; a++) for (var a = 0, aa = Math.min(20, nil_files.length); a < aa; a++)
msg += '-- ' + nil_files[a][1] + '\n'; msg += '-- ' + nil_files[a][1] + '\n';
msg += '\nMaybe it works better if you select just one file'; msg += L.u_just1;
return modal.confirm(msg, function () { return modal.confirm(msg, function () {
gotallfiles(good_files.concat(nil_files), [], []); gotallfiles(good_files.concat(nil_files), [], []);
}, function () { }, function () {
@@ -983,24 +985,23 @@ function up2k_init(subtle) {
return a < b ? -1 : a > b ? 1 : 0; return a < b ? -1 : a > b ? 1 : 0;
}); });
var msg = ['{0} these {1} files?<ul>'.format(uc.fsearch ? 'search' : 'upload', good_files.length)]; var msg = [L.u_asku.format(good_files.length, esc(get_vpath())) + '<ul>'];
for (var a = 0, aa = Math.min(20, good_files.length); a < aa; a++) for (var a = 0, aa = Math.min(20, good_files.length); a < aa; a++)
msg.push('<li>' + esc(good_files[a][1]) + '</li>'); msg.push('<li>' + esc(good_files[a][1]) + '</li>');
if (uc.ask_up && !uc.fsearch) if (uc.ask_up && !uc.fsearch)
return modal.confirm(msg.join('') + '</ul>', function () { up_them(good_files); }, null); return modal.confirm(msg.join('') + '</ul>', function () {
up_them(good_files);
toast.inf(15, L.u_unpt);
}, null);
up_them(good_files); up_them(good_files);
} }
function up_them(good_files) { function up_them(good_files) {
var seen = {}, var evpath = get_evpath(),
evpath = get_evpath(),
draw_each = good_files.length < 50; draw_each = good_files.length < 50;
for (var a = 0; a < st.files.length; a++)
seen[st.files[a].name + '\n' + st.files[a].size] = 1;
for (var a = 0; a < good_files.length; a++) { for (var a = 0; a < good_files.length; a++) {
var fobj = good_files[a][0], var fobj = good_files[a][0],
name = good_files[a][1], name = good_files[a][1],
@@ -1026,20 +1027,25 @@ function up2k_init(subtle) {
"bytes_uploaded": 0, "bytes_uploaded": 0,
"hash": [] "hash": []
}, },
key = entry.name + '\n' + entry.size; key = name + '\n' + entry.size + '\n' + lmod + '\n' + uc.fsearch;
if (uc.fsearch) if (uc.fsearch)
entry.srch = 1; entry.srch = 1;
if (seen[key]) try {
continue; if (st.seen[fdir][key])
continue;
}
catch (ex) {
st.seen[fdir] = {};
}
seen[key] = 1; st.seen[fdir][key] = 1;
pvis.addfile([ pvis.addfile([
uc.fsearch ? esc(entry.name) : linksplit( uc.fsearch ? esc(entry.name) : linksplit(
entry.purl + uricom_enc(entry.name)).join(' '), entry.purl + uricom_enc(entry.name)).join(' '),
'📐 hash', '📐 ' + L.u_hashing,
'' ''
], fobj.size, draw_each); ], fobj.size, draw_each);
@@ -1061,27 +1067,13 @@ function up2k_init(subtle) {
function more_one_file() { function more_one_file() {
fdom_ctr++; fdom_ctr++;
var elm = mknod('div'); var elm = mknod('div');
elm.innerHTML = '<input id="file{0}" type="file" name="file{0}[]" multiple="multiple" />'.format(fdom_ctr); elm.innerHTML = '<input id="file{0}" type="file" name="file{0}[]" multiple="multiple" tabindex="-1" />'.format(fdom_ctr);
ebi('u2form').appendChild(elm); ebi('u2form').appendChild(elm);
ebi('file' + fdom_ctr).onchange = gotfile; ebi('file' + fdom_ctr).onchange = gotfile;
} }
more_one_file(); more_one_file();
function u2cleanup(e) { var etaref = 0, etaskip = 0, utw_minh = 0;
ev(e);
for (var a = 0; a < st.files.length; a++) {
var t = st.files[a];
if (t.done && t.name) {
if (!qsr('#f' + t.n))
continue;
t.name = undefined;
}
}
}
ebi('u2cleanup').onclick = u2cleanup;
var etaref = 0, etaskip = 0, op_minh = 0;
function etafun() { function etafun() {
var nhash = st.busy.head.length + st.busy.hash.length + st.todo.head.length + st.todo.hash.length, var nhash = st.busy.head.length + st.busy.hash.length + st.todo.head.length + st.todo.hash.length,
nsend = st.busy.upload.length + st.todo.upload.length, nsend = st.busy.upload.length + st.todo.upload.length,
@@ -1094,21 +1086,18 @@ function up2k_init(subtle) {
//ebi('acc_info').innerHTML = humantime(st.time.busy) + ' ' + f2f(now / 1000, 1); //ebi('acc_info').innerHTML = humantime(st.time.busy) + ' ' + f2f(now / 1000, 1);
var op = ebi('op_up2k'), var minh = QS('#op_up2k.act') && st.is_busy ? Math.max(utw_minh, ebi('u2tab').offsetHeight + 32) : 0;
uff = ebi('u2footfoot'), if (utw_minh < minh || !utw_minh) {
minh = QS('#op_up2k.act') ? Math.max(op_minh, uff.offsetTop + uff.offsetHeight - op.offsetTop + 32) : 0; utw_minh = minh;
ebi('u2tabw').style.minHeight = utw_minh + 'px';
if (minh > op_minh || !op_minh) {
op_minh = minh;
op.style.minHeight = op_minh + 'px';
} }
if (!nhash) if (!nhash)
ebi('u2etah').innerHTML = 'Done ({0}, {1} files)'.format(humansize(st.bytes.hashed), pvis.ctr["ok"] + pvis.ctr["ng"]); ebi('u2etah').innerHTML = L.u_etadone.format(humansize(st.bytes.hashed), pvis.ctr["ok"] + pvis.ctr["ng"]);
if (!nsend && !nhash) if (!nsend && !nhash)
ebi('u2etau').innerHTML = ebi('u2etat').innerHTML = ( ebi('u2etau').innerHTML = ebi('u2etat').innerHTML = (
'Done ({0}, {1} files)'.format(humansize(st.bytes.uploaded), pvis.ctr["ok"] + pvis.ctr["ng"])); L.u_etadone.format(humansize(st.bytes.uploaded), pvis.ctr["ok"] + pvis.ctr["ng"]));
if (!st.busy.hash.length && !hashing_permitted()) if (!st.busy.hash.length && !hashing_permitted())
nhash = 0; nhash = 0;
@@ -1129,7 +1118,7 @@ function up2k_init(subtle) {
} }
if ((nhash || nsend) && !uc.fsearch) { if ((nhash || nsend) && !uc.fsearch) {
if (!st.bytes.finished) { if (!st.bytes.finished) {
ebi('u2etat').innerHTML = '(preparing to upload)'; ebi('u2etat').innerHTML = L.u_etaprep;
} }
else { else {
st.time.busy += td; st.time.busy += td;
@@ -1142,7 +1131,7 @@ function up2k_init(subtle) {
eta = Math.floor(rem / bps); eta = Math.floor(rem / bps);
if (t[a][1] < 1024 || t[a][3] < 0.1) { if (t[a][1] < 1024 || t[a][3] < 0.1) {
ebi(t[a][0]).innerHTML = '(preparing to upload)'; ebi(t[a][0]).innerHTML = L.u_etaprep;
continue; continue;
} }
@@ -1169,7 +1158,7 @@ function up2k_init(subtle) {
var t = st.todo.handshake[0], var t = st.todo.handshake[0],
cd = t.cooldown; cd = t.cooldown;
if (cd && cd - Date.now() > 0) if (cd && cd > Date.now())
return false; return false;
// keepalive or verify // keepalive or verify
@@ -1223,15 +1212,16 @@ function up2k_init(subtle) {
running = true; running = true;
while (true) { while (true) {
var now = Date.now(), var now = Date.now(),
is_busy = 0 != oldest_active = Math.min( // gzip take the wheel
st.todo.head.length + st.todo.head.length ? st.todo.head[0].n : st.files.length,
st.todo.hash.length + st.todo.hash.length ? st.todo.hash[0].n : st.files.length,
st.todo.handshake.length + st.todo.upload.length ? st.todo.upload[0].nfile : st.files.length,
st.todo.upload.length + st.todo.handshake.length ? st.todo.handshake[0].n : st.files.length,
st.busy.head.length + st.busy.head.length ? st.busy.head[0].n : st.files.length,
st.busy.hash.length + st.busy.hash.length ? st.busy.hash[0].n : st.files.length,
st.busy.handshake.length + st.busy.upload.length ? st.busy.upload[0].nfile : st.files.length,
st.busy.upload.length; st.busy.handshake.length ? st.busy.handshake[0].n : st.files.length),
is_busy = oldest_active < st.files.length;
if (was_busy && !is_busy) { if (was_busy && !is_busy) {
for (var a = 0; a < st.files.length; a++) { for (var a = 0; a < st.files.length; a++) {
@@ -1251,7 +1241,7 @@ function up2k_init(subtle) {
} }
if (was_busy != is_busy) { if (was_busy != is_busy) {
was_busy = is_busy; st.is_busy = was_busy = is_busy;
window[(is_busy ? "add" : "remove") + window[(is_busy ? "add" : "remove") +
"EventListener"]("beforeunload", warn_uploader_busy); "EventListener"]("beforeunload", warn_uploader_busy);
@@ -1259,28 +1249,25 @@ function up2k_init(subtle) {
donut.on(is_busy); donut.on(is_busy);
if (!is_busy) { if (!is_busy) {
var k = uc.fsearch ? 'searches' : 'uploads', var sr = uc.fsearch,
ks = uc.fsearch ? 'Search' : 'Upload',
tok = uc.fsearch ? 'successful (found on server)' : 'completed successfully',
tng = uc.fsearch ? 'failed (NOT found on server)' : 'failed, sorry',
ok = pvis.ctr["ok"], ok = pvis.ctr["ok"],
ng = pvis.ctr["ng"], ng = pvis.ctr["ng"],
t = uc.ask_up ? 0 : 10; t = uc.ask_up ? 0 : 10;
if (ok && ng) if (ok && ng)
toast.warn(t, 'Finished, but some {0} failed:\n{1} {2},\n{3} {4}'.format(k, ok, tok, ng, tng)); toast.warn(t, (sr ? L.ur_sm : L.ur_um).format(ok, ng));
else if (ok > 1) else if (ok > 1)
toast.ok(t, 'All {1} {0} {2}'.format(k, ok, tok)); toast.ok(t, (sr ? L.ur_aso : L.ur_auo).format(ok));
else if (ok) else if (ok)
toast.ok(t, '{0} {1}'.format(ks, tok)); toast.ok(t, sr ? L.ur_1so : L.ur_1uo);
else if (ng > 1) else if (ng > 1)
toast.err(t, 'All {1} {0} {2}'.format(k, ng, tng)); toast.err(t, (sr ? L.ur_asn : L.ur_aun).format(ng));
else if (ng) else if (ng)
toast.err(t, '{0} {1}'.format(ks, tng)); toast.err(t, sr ? L.ur_1sn : L.ur_1un);
timer.rm(etafun); timer.rm(etafun);
timer.rm(donut.do); timer.rm(donut.do);
op_minh = 0; utw_minh = 0;
} }
else { else {
timer.add(donut.do); timer.add(donut.do);
@@ -1332,7 +1319,8 @@ function up2k_init(subtle) {
} }
if (st.todo.head.length && if (st.todo.head.length &&
st.busy.head.length < parallel_uploads) { st.busy.head.length < parallel_uploads &&
(!is_busy || st.todo.head[0].n - oldest_active < parallel_uploads * 2)) {
exec_head(); exec_head();
mou_ikkai = true; mou_ikkai = true;
} }
@@ -1344,7 +1332,8 @@ function up2k_init(subtle) {
} }
if (st.todo.upload.length && if (st.todo.upload.length &&
st.busy.upload.length < parallel_uploads) { st.busy.upload.length < parallel_uploads &&
can_upload_next()) {
exec_upload(); exec_upload();
mou_ikkai = true; mou_ikkai = true;
} }
@@ -1364,6 +1353,14 @@ function up2k_init(subtle) {
return taskerd; return taskerd;
})(); })();
function chill(t) {
var now = Date.now();
if ((t.coolmul || 0) < 2 || now - t.cooldown < t.coolmul * 700)
t.coolmul = Math.min((t.coolmul || 0.5) * 2, 32);
t.cooldown = Math.max(t.cooldown || 1, Date.now() + t.coolmul * 1000);
}
///// /////
//// ////
/// hashing /// hashing
@@ -1462,7 +1459,6 @@ function up2k_init(subtle) {
min_filebuf = 1; min_filebuf = 1;
var td = Date.now() - t0; var td = Date.now() - t0;
if (td > 50) { if (td > 50) {
ebi('u2foot').innerHTML += "<p>excessive filereader latency (" + td + " ms), increasing readahead</p>";
min_filebuf = 32 * 1024 * 1024; min_filebuf = 32 * 1024 * 1024;
} }
} }
@@ -1479,7 +1475,8 @@ function up2k_init(subtle) {
err.indexOf('NotFoundError') !== -1 // macos-firefox permissions err.indexOf('NotFoundError') !== -1 // macos-firefox permissions
) { ) {
pvis.seth(t.n, 1, 'OS-error'); pvis.seth(t.n, 1, 'OS-error');
pvis.seth(t.n, 2, err); pvis.seth(t.n, 2, err + ' @ ' + car);
console.log('OS-error', reader.error, '@', car);
handled = true; handled = true;
} }
@@ -1520,7 +1517,7 @@ function up2k_init(subtle) {
t.t_hashed = Date.now(); t.t_hashed = Date.now();
pvis.seth(t.n, 2, 'hashing done'); pvis.seth(t.n, 2, L.u_hashdone);
pvis.seth(t.n, 1, '📦 wait'); pvis.seth(t.n, 1, '📦 wait');
apop(st.busy.hash, t); apop(st.busy.hash, t);
st.todo.handshake.push(t); st.todo.handshake.push(t);
@@ -1648,8 +1645,8 @@ function up2k_init(subtle) {
if (!response || !response.hits || !response.hits.length) { if (!response || !response.hits || !response.hits.length) {
smsg = '404'; smsg = '404';
msg = ('not found on server <a href="#" onclick="fsearch_explain(' + msg = (L.u_s404 + ' <a href="#" onclick="fsearch_explain(' +
(has(perms, 'write') ? '0' : '1') + ')" class="fsearch_explain">(explain)</a>'); (has(perms, 'write') ? '0' : '1') + ')" class="fsearch_explain">(' + L.u_expl + ')</a>');
} }
else { else {
smsg = 'found'; smsg = 'found';
@@ -1677,6 +1674,8 @@ function up2k_init(subtle) {
return; return;
} }
t.sprs = response.sprs;
var rsp_purl = url_enc(response.purl); var rsp_purl = url_enc(response.purl);
if (rsp_purl !== t.purl || response.name !== t.name) { if (rsp_purl !== t.purl || response.name !== t.name) {
// server renamed us (file exists / path restrictions) // server renamed us (file exists / path restrictions)
@@ -1724,7 +1723,7 @@ function up2k_init(subtle) {
'npart': t.postlist[a] 'npart': t.postlist[a]
}); });
msg = 'uploading'; msg = L.u_upping;
done = false; done = false;
if (sort) if (sort)
@@ -1749,8 +1748,12 @@ function up2k_init(subtle) {
pvis.move(t.n, 'ok'); pvis.move(t.n, 'ok');
} }
else t.t_uploaded = undefined; else {
if (t.t_uploaded)
chill(t);
t.t_uploaded = undefined;
}
tasker(); tasker();
} }
else { else {
@@ -1797,11 +1800,8 @@ function up2k_init(subtle) {
tasker(); tasker();
return; return;
} }
toast.err(0, "server broke; hs-err {0} on file [{1}]:\n".format( err = t.t_uploading ? L.u_ehsfin : t.srch ? L.u_ehssrch : L.u_ehsinit;
xhr.status, t.name) + ( xhrchk(xhr, err + ";\n\nfile: " + t.name + "\n\nerror ", "404, target folder not found");
(xhr.response && xhr.response.err) ||
(xhr.responseText && xhr.responseText) ||
"no further information"));
} }
} }
xhr.onload = function (e) { xhr.onload = function (e) {
@@ -1827,6 +1827,20 @@ function up2k_init(subtle) {
/// upload /// upload
// //
function can_upload_next() {
var upt = st.todo.upload[0],
upf = st.files[upt.nfile];
if (upf.sprs)
return true;
for (var a = 0, aa = st.busy.upload.length; a < aa; a++)
if (st.busy.upload[a].nfile == upt.nfile)
return false;
return true;
}
function exec_upload() { function exec_upload() {
var upt = st.todo.upload.shift(); var upt = st.todo.upload.shift();
st.busy.upload.push(upt); st.busy.upload.push(upt);
@@ -1855,14 +1869,18 @@ function up2k_init(subtle) {
st.bytes.uploaded += cdr - car; st.bytes.uploaded += cdr - car;
t.bytes_uploaded += cdr - car; t.bytes_uploaded += cdr - car;
} }
else if (txt.indexOf('already got that') !== -1) { else if (txt.indexOf('already got that') + 1 ||
txt.indexOf('already being written') + 1) {
console.log("ignoring dupe-segment error", t); console.log("ignoring dupe-segment error", t);
} }
else { else {
toast.err(0, "server broke; cu-err {0} on file [{1}]:\n".format( xhrchk(xhr, L.u_cuerr2.format(npart, Math.ceil(t.size / chunksize), t.name), "404, target folder not found (???)");
xhr.status, t.name) + (txt || "no further information"));
return; chill(t);
} }
orz2(xhr);
}
function orz2(xhr) {
apop(st.busy.upload, upt); apop(st.busy.upload, upt);
apop(t.postlist, npart); apop(t.postlist, npart);
if (!t.postlist.length) { if (!t.postlist.length) {
@@ -1884,9 +1902,11 @@ function up2k_init(subtle) {
if (crashed) if (crashed)
return; return;
toast.err(9.98, "failed to upload a chunk,\n" + tries + " retries so far -- retrying in 10sec\n\n" + t.name); if (!toast.visible)
toast.warn(9.98, L.u_cuerr.format(npart, Math.ceil(t.size / chunksize), t.name));
console.log('chunkpit onerror,', ++tries, t); console.log('chunkpit onerror,', ++tries, t);
setTimeout(do_send, 10 * 1000); orz2(xhr);
}; };
xhr.open('POST', t.purl, true); xhr.open('POST', t.purl, true);
xhr.setRequestHeader("X-Up2k-Hash", t.hash[npart]); xhr.setRequestHeader("X-Up2k-Hash", t.hash[npart]);
@@ -1907,31 +1927,29 @@ function up2k_init(subtle) {
// //
function onresize(e) { function onresize(e) {
// 10x faster than matchMedia('(min-width
var bar = ebi('ops'), var bar = ebi('ops'),
wpx = window.innerWidth, wpx = window.innerWidth,
fpx = parseInt(getComputedStyle(bar)['font-size']), fpx = parseInt(getComputedStyle(bar)['font-size']),
wem = wpx * 1.0 / fpx, wem = wpx * 1.0 / fpx,
wide = wem > 54 ? 'w' : '',
write = has(perms, 'write'), write = has(perms, 'write'),
wide = write && wem > 54 ? 'w' : '',
parent = ebi(wide && write ? 'u2btn_cw' : 'u2btn_ct'), parent = ebi(wide && write ? 'u2btn_cw' : 'u2btn_ct'),
btn = ebi('u2btn'); btn = ebi('u2btn');
//console.log([wpx, fpx, wem]);
if (btn.parentNode !== parent) { if (btn.parentNode !== parent) {
parent.appendChild(btn); parent.appendChild(btn);
ebi('u2conf').setAttribute('class', wide); ebi('u2conf').className = ebi('u2cards').className = ebi('u2etaw').className = wide;
ebi('u2cards').setAttribute('class', wide);
ebi('u2etaw').setAttribute('class', wide);
} }
wide = wem > 78 ? 'ww' : wide; wide = write && wem > 78 ? 'ww' : wide;
parent = ebi(wide == 'ww' && write ? 'u2c3w' : 'u2c3t'); parent = ebi(wide == 'ww' && write ? 'u2c3w' : 'u2c3t');
var its = [ebi('u2etaw'), ebi('u2cards')]; var its = [ebi('u2etaw'), ebi('u2cards')];
if (its[0].parentNode !== parent) { if (its[0].parentNode !== parent) {
ebi('u2conf').setAttribute('class', wide); ebi('u2conf').className = wide;
for (var a = 0; a < 2; a++) { for (var a = 0; a < 2; a++) {
parent.appendChild(its[a]); parent.appendChild(its[a]);
its[a].setAttribute('class', wide); its[a].className = wide;
} }
} }
} }
@@ -1998,17 +2016,18 @@ function up2k_init(subtle) {
} }
function draw_turbo() { function draw_turbo() {
var msgu = '<p class="warn">WARNING: turbo enabled, <span>&nbsp;client may not detect and resume incomplete uploads; see turbo-button tooltip</span></p>', var msg = uc.fsearch ? L.u_ts : L.u_tu,
msgs = '<p class="warn">WARNING: turbo enabled, <span>&nbsp;search results can be incorrect; see turbo-button tooltip</span></p>', omsg = uc.fsearch ? L.u_tu : L.u_ts,
msg = uc.fsearch ? msgs : msgu,
omsg = uc.fsearch ? msgu : msgs,
html = ebi('u2foot').innerHTML, html = ebi('u2foot').innerHTML,
ohtml = html; ohtml = html;
if (uc.turbo && html.indexOf(msg) === -1) if (turbolvl || !uc.turbo)
msg = null;
if (msg && html.indexOf(msg) === -1)
html = html.replace(omsg, '') + msg; html = html.replace(omsg, '') + msg;
else if (!uc.turbo) else if (!msg)
html = html.replace(msgu, '').replace(msgs, ''); html = html.replace(L.u_tu, '').replace(L.u_ts, '');
if (html !== ohtml) if (html !== ohtml)
ebi('u2foot').innerHTML = html; ebi('u2foot').innerHTML = html;
@@ -2026,7 +2045,7 @@ function up2k_init(subtle) {
new_state = true; new_state = true;
fixed = true; fixed = true;
} }
if (!has(perms, 'read')) { if (!has(perms, 'read') || !have_up2k_idx) {
new_state = false; new_state = false;
fixed = true; fixed = true;
} }
@@ -2044,13 +2063,15 @@ function up2k_init(subtle) {
try { try {
var ico = uc.fsearch ? '🔎' : '🚀', var ico = uc.fsearch ? '🔎' : '🚀',
desc = uc.fsearch ? 'Search' : 'Upload'; desc = uc.fsearch ? L.ul_btns : L.ul_btnu;
clmod(ebi('op_up2k'), 'srch', uc.fsearch); clmod(ebi('op_up2k'), 'srch', uc.fsearch);
ebi('u2bm').innerHTML = ico + ' <sup>' + desc + '</sup>'; ebi('u2bm').innerHTML = ico + '&nbsp; <sup>' + desc + '</sup>';
} }
catch (ex) { } catch (ex) { }
ebi('u2tab').className = (uc.fsearch ? 'srch ' : 'up ') + pvis.act;
draw_turbo(); draw_turbo();
onresize(); onresize();
} }
@@ -2101,7 +2122,7 @@ function up2k_init(subtle) {
if (parallel_uploads < 1) if (parallel_uploads < 1)
bumpthread(1); bumpthread(1);
return { "init_deps": init_deps, "set_fsearch": set_fsearch, "ui": pvis } return { "init_deps": init_deps, "set_fsearch": set_fsearch, "ui": pvis, "st": st, "uc": uc }
} }

View File

@@ -7,15 +7,16 @@ if (!window['console'])
var is_touch = 'ontouchstart' in window, var is_touch = 'ontouchstart' in window,
IPHONE = /iPhone|iPad|iPod/i.test(navigator.userAgent), is_https = (window.location + '').indexOf('https:') === 0,
ANDROID = /android/i.test(navigator.userAgent), IPHONE = is_touch && /iPhone|iPad|iPod/i.test(navigator.userAgent),
WINDOWS = navigator.platform ? navigator.platform == 'Win32' : /Windows/.test(navigator.userAgent); WINDOWS = navigator.platform ? navigator.platform == 'Win32' : /Windows/.test(navigator.userAgent);
var ebi = document.getElementById.bind(document), var ebi = document.getElementById.bind(document),
QS = document.querySelector.bind(document), QS = document.querySelector.bind(document),
QSA = document.querySelectorAll.bind(document), QSA = document.querySelectorAll.bind(document),
mknod = document.createElement.bind(document); mknod = document.createElement.bind(document),
XHR = XMLHttpRequest;
function qsr(sel) { function qsr(sel) {
@@ -84,9 +85,18 @@ catch (ex) {
} }
var crashed = false, ignexd = {}; var crashed = false, ignexd = {};
function vis_exh(msg, url, lineNo, columnNo, error) { function vis_exh(msg, url, lineNo, columnNo, error) {
if ((msg + '').indexOf('ResizeObserver') !== -1) if ((msg + '').indexOf('ResizeObserver') + 1)
return; // chrome issue 809574 (benign, from <video>) return; // chrome issue 809574 (benign, from <video>)
if ((msg + '').indexOf('l2d.js') + 1)
return; // `t` undefined in tapEvent -> hitTestSimpleCustom
if (!/\.js($|\?)/.exec('' + url))
return; // chrome debugger
if ((url + '').indexOf(' > eval') + 1)
return; // md timer
var ekey = url + '\n' + lineNo + '\n' + msg; var ekey = url + '\n' + lineNo + '\n' + msg;
if (ignexd[ekey] || crashed) if (ignexd[ekey] || crashed)
return; return;
@@ -181,6 +191,7 @@ function ignex(all) {
if (!all) if (!all)
window.onerror = vis_exh; window.onerror = vis_exh;
} }
window.onerror = vis_exh;
function noop() { } function noop() { }
@@ -219,15 +230,15 @@ if (!String.prototype.endsWith)
return this.substring(this_len - search.length, this_len) === search; return this.substring(this_len - search.length, this_len) === search;
}; };
if (!String.startsWith) if (!String.prototype.startsWith)
String.prototype.startsWith = function (s, i) { String.prototype.startsWith = function (s, i) {
i = i > 0 ? i | 0 : 0; i = i > 0 ? i | 0 : 0;
return this.substring(i, i + s.length) === s; return this.substring(i, i + s.length) === s;
}; };
if (!String.trimEnd) if (!String.prototype.trimEnd)
String.prototype.trimEnd = String.prototype.trimRight = function () { String.prototype.trimEnd = String.prototype.trimRight = function () {
return this.replace(/[ \t\r\n]+$/m, ''); return this.replace(/[ \t\r\n]+$/, '');
}; };
if (!Element.prototype.matches) if (!Element.prototype.matches)
@@ -246,6 +257,14 @@ if (!Element.prototype.closest)
} while (el !== null && el.nodeType === 1); } while (el !== null && el.nodeType === 1);
}; };
if (!String.prototype.format)
String.prototype.format = function () {
var args = arguments;
return this.replace(/{(\d+)}/g, function (match, number) {
return typeof args[number] != 'undefined' ?
args[number] : match;
});
};
// https://stackoverflow.com/a/950146 // https://stackoverflow.com/a/950146
function import_js(url, cb) { function import_js(url, cb) {
@@ -286,15 +305,19 @@ function crc32(str) {
function clmod(el, cls, add) { function clmod(el, cls, add) {
if (!el)
return false;
if (el.classList) { if (el.classList) {
var have = el.classList.contains(cls); var have = el.classList.contains(cls);
if (add == 't') if (add == 't')
add = !have; add = !have;
if (add != have) if (!add == !have)
el.classList[add ? 'add' : 'remove'](cls); return false;
return; el.classList[add ? 'add' : 'remove'](cls);
return true;
} }
var re = new RegExp('\\s*\\b' + cls + '\\s*\\b', 'g'), var re = new RegExp('\\s*\\b' + cls + '\\s*\\b', 'g'),
@@ -305,28 +328,75 @@ function clmod(el, cls, add) {
var n2 = n1.replace(re, ' ') + (add ? ' ' + cls : ''); var n2 = n1.replace(re, ' ') + (add ? ' ' + cls : '');
if (n1 != n2) if (n1 == n2)
el.className = n2; return false;
el.className = n2;
return true;
} }
function clgot(el, cls) { function clgot(el, cls) {
if (!el)
return;
if (el.classList) if (el.classList)
return el.classList.contains(cls); return el.classList.contains(cls);
var lst = (el.getAttribute('class') + '').split(/ /g); var lst = (el.className + '').split(/ /g);
return has(lst, cls); return has(lst, cls);
} }
var ANIM = true;
if (window.matchMedia) {
var mq = window.matchMedia('(prefers-reduced-motion: reduce)');
mq.onchange = function () {
ANIM = !mq.matches;
};
ANIM = !mq.matches;
}
function showsort(tab) {
var v, vn, v1, v2, th = tab.tHead,
sopts = jread('fsort', [["href", 1, ""]]);
th && (th = th.rows[0]) && (th = th.cells);
for (var a = sopts.length - 1; a >= 0; a--) {
if (!sopts[a][0])
continue;
v2 = v1;
v1 = sopts[a];
}
v = [v1, v2];
vn = [v1 ? v1[0] : '', v2 ? v2[0] : ''];
var ga = QSA('#ghead a[s]');
for (var a = 0; a < ga.length; a++)
ga[a].className = '';
for (var a = 0; a < th.length; a++) {
var n = vn.indexOf(th[a].getAttribute('name')),
cl = n < 0 ? ' ' : ' s' + n + (v[n][1] > 0 ? ' ' : 'r ');
th[a].className = th[a].className.replace(/ *s[01]r? */, ' ') + cl;
if (n + 1) {
ga = QS('#ghead a[s="' + vn[n] + '"]');
if (ga)
ga.className = cl;
}
}
}
function sortTable(table, col, cb) { function sortTable(table, col, cb) {
var tb = table.tBodies[0], var tb = table.tBodies[0],
th = table.tHead.rows[0].cells, th = table.tHead.rows[0].cells,
tr = Array.prototype.slice.call(tb.rows, 0), tr = Array.prototype.slice.call(tb.rows, 0),
i, reverse = th[col].className.indexOf('sort1') !== -1 ? -1 : 1; i, reverse = /s0[^r]/.exec(th[col].className + ' ') ? -1 : 1;
for (var a = 0, thl = th.length; a < thl; a++)
th[a].className = th[a].className.replace(/ *sort-?1 */, " ");
th[col].className += ' sort' + reverse;
var stype = th[col].getAttribute('sort'); var stype = th[col].getAttribute('sort');
try { try {
var nrules = [], rules = jread("fsort", []); var nrules = [], rules = jread("fsort", []);
@@ -344,6 +414,7 @@ function sortTable(table, col, cb) {
break; break;
} }
jwrite("fsort", nrules); jwrite("fsort", nrules);
try { showsort(table); } catch (ex) { }
} }
catch (ex) { catch (ex) {
console.log("failed to persist sort rules, resetting: " + ex); console.log("failed to persist sort rules, resetting: " + ex);
@@ -392,7 +463,7 @@ function makeSortable(table, cb) {
} }
function linksplit(rp) { function linksplit(rp, id) {
var ret = [], var ret = [],
apath = '/', apath = '/',
q = null; q = null;
@@ -403,7 +474,7 @@ function linksplit(rp) {
q = '?' + q[1]; q = '?' + q[1];
} }
if (rp && rp.charAt(0) == '/') if (rp && rp[0] == '/')
rp = rp.slice(1); rp = rp.slice(1);
while (rp) { while (rp) {
@@ -422,8 +493,13 @@ function linksplit(rp) {
vlink = vlink.slice(0, -1) + '<span>/</span>'; vlink = vlink.slice(0, -1) + '<span>/</span>';
} }
if (!rp && q) if (!rp) {
link += q; if (q)
link += q;
if (id)
link += '" id="' + id;
}
ret.push('<a href="' + apath + link + '">' + vlink + '</a>'); ret.push('<a href="' + apath + link + '">' + vlink + '</a>');
apath += link; apath += link;
@@ -742,6 +818,14 @@ function sethash(hv) {
} }
} }
function dl_file(url) {
console.log('DL [%s]', url);
var o = mknod('a');
o.setAttribute('href', url);
o.setAttribute('download', '');
o.click();
}
var timer = (function () { var timer = (function () {
var r = {}; var r = {};
@@ -782,13 +866,18 @@ var timer = (function () {
var tt = (function () { var tt = (function () {
var r = { var r = {
"tt": mknod("div"), "tt": mknod("div"),
"th": mknod("div"),
"en": true, "en": true,
"el": null, "el": null,
"skip": false "skip": false,
"lvis": 0
}; };
r.th.innerHTML = '?';
r.tt.setAttribute('id', 'tt'); r.tt.setAttribute('id', 'tt');
r.th.setAttribute('id', 'tth');
document.body.appendChild(r.tt); document.body.appendChild(r.tt);
document.body.appendChild(r.th);
var prev = null; var prev = null;
r.cshow = function () { r.cshow = function () {
@@ -798,19 +887,42 @@ var tt = (function () {
prev = this; prev = this;
}; };
r.show = function () { var tev;
if (r.skip) { r.dshow = function (e) {
r.skip = false; clearTimeout(tev);
if (!r.getmsg(this))
return; return;
}
if (QS('body.bbox-open')) if (Date.now() - r.lvis < 400)
return r.show.bind(this)();
tev = setTimeout(r.show.bind(this), 800);
if (is_touch)
return;
this.addEventListener('mousemove', r.move);
clmod(r.th, 'act', 1);
r.move(e);
};
r.getmsg = function (el) {
if (IPHONE && QS('body.bbox-open'))
return; return;
var cfg = sread('tooltips'); var cfg = sread('tooltips');
if (cfg !== null && cfg != '1') if (cfg !== null && cfg != '1')
return; return;
var msg = this.getAttribute('tt'); return el.getAttribute('tt');
};
r.show = function () {
clearTimeout(tev);
if (r.skip) {
r.skip = false;
return;
}
var msg = r.getmsg(this);
if (!msg) if (!msg)
return; return;
@@ -824,6 +936,7 @@ var tt = (function () {
if (dir.indexOf('u') + 1) top = false; if (dir.indexOf('u') + 1) top = false;
if (dir.indexOf('d') + 1) top = true; if (dir.indexOf('d') + 1) top = true;
clmod(r.th, 'act');
clmod(r.tt, 'b', big); clmod(r.tt, 'b', big);
r.tt.style.left = '0'; r.tt.style.left = '0';
r.tt.style.top = '0'; r.tt.style.top = '0';
@@ -849,14 +962,27 @@ var tt = (function () {
r.hide = function (e) { r.hide = function (e) {
ev(e); ev(e);
clearTimeout(tev);
window.removeEventListener('scroll', r.hide); window.removeEventListener('scroll', r.hide);
clmod(r.tt, 'show');
clmod(r.tt, 'b'); clmod(r.tt, 'b');
clmod(r.th, 'act');
if (clmod(r.tt, 'show'))
r.lvis = Date.now();
if (r.el) if (r.el)
r.el.removeEventListener('mouseleave', r.hide); r.el.removeEventListener('mouseleave', r.hide);
if (e && e.target)
e.target.removeEventListener('mousemove', r.move);
}; };
if (is_touch && IPHONE) { r.move = function (e) {
r.th.style.left = (e.pageX + 12) + 'px';
r.th.style.top = (e.pageY + 12) + 'px';
};
if (IPHONE) {
var f1 = r.show, var f1 = r.show,
f2 = r.hide, f2 = r.hide,
q = []; q = [];
@@ -882,14 +1008,14 @@ var tt = (function () {
r.att = function (ctr) { r.att = function (ctr) {
var _cshow = r.en ? r.cshow : null, var _cshow = r.en ? r.cshow : null,
_show = r.en ? r.show : null, _dshow = r.en ? r.dshow : null,
_hide = r.en ? r.hide : null, _hide = r.en ? r.hide : null,
o = ctr.querySelectorAll('*[tt]'); o = ctr.querySelectorAll('*[tt]');
for (var a = o.length - 1; a >= 0; a--) { for (var a = o.length - 1; a >= 0; a--) {
o[a].onfocus = _cshow; o[a].onfocus = _cshow;
o[a].onblur = _hide; o[a].onblur = _hide;
o[a].onmouseenter = _show; o[a].onmouseenter = _dshow;
o[a].onmouseleave = _hide; o[a].onmouseleave = _hide;
} }
r.hide(); r.hide();
@@ -1080,6 +1206,9 @@ var modal = (function () {
return ok(); return ok();
} }
if ((k == 'ArrowLeft' || k == 'ArrowRight') && eng && (ae == eok || ae == eng))
return (ae == eok ? eng : eok).focus() || ev(e);
if (k == 'Escape') if (k == 'Escape')
return ng(); return ng();
} }
@@ -1229,6 +1358,49 @@ if (ebi('repl'))
ebi('repl').onclick = repl; ebi('repl').onclick = repl;
var md_plug = {};
var md_plug_err = function (ex, js) {
if (ex)
console.log(ex, js);
};
function load_md_plug(md_text, plug_type) {
if (!have_emp)
return md_text;
var find = '\n```copyparty_' + plug_type + '\n';
var ofs = md_text.indexOf(find);
if (ofs === -1)
return md_text;
var ofs2 = md_text.indexOf('\n```', ofs + 1);
if (ofs2 == -1)
return md_text;
var js = md_text.slice(ofs + find.length, ofs2 + 1);
var md = md_text.slice(0, ofs + 1) + md_text.slice(ofs2 + 4);
var old_plug = md_plug[plug_type];
if (!old_plug || old_plug[1] != js) {
js = 'const x = { ' + js + ' }; x;';
try {
var x = eval(js);
if (x['ctor']) {
x['ctor']();
delete x['ctor'];
}
}
catch (ex) {
md_plug[plug_type] = null;
md_plug_err(ex, js);
return md;
}
md_plug[plug_type] = [x, js];
}
return md;
}
var svg_decl = '<?xml version="1.0" encoding="UTF-8"?>\n'; var svg_decl = '<?xml version="1.0" encoding="UTF-8"?>\n';
@@ -1281,3 +1453,18 @@ var favico = (function () {
r.to = setTimeout(r.init, 100); r.to = setTimeout(r.init, 100);
return r; return r;
})(); })();
function xhrchk(xhr, prefix, e404) {
if (xhr.status < 400 && xhr.status >= 200)
return true;
if (xhr.status == 403)
return toast.err(0, prefix + (window.L && L.xhr403 || "403: access denied\n\ntry pressing F5, maybe you got logged out"));
if (xhr.status == 404)
return toast.err(0, prefix + e404);
return toast.err(0, prefix + xhr.status + ": " + (
(xhr.response && xhr.response.err) || xhr.responseText));
}

View File

@@ -2,29 +2,20 @@
# example resource files # utilities
can be provided to copyparty to tweak things ## [`multisearch.html`](multisearch.html)
* takes a list of filenames of youtube rips, grabs the youtube-id of each file, and does a search on the server for those
* use it by putting it somewhere on the server and opening it as an html page
* also serves as an extendable template for other specific search behaviors
## example `.epilogue.html`
save one of these as `.epilogue.html` inside a folder to customize it:
* [`minimal-up2k.html`](minimal-up2k.html) will [simplify the upload ui](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png)
## example browser-css
point `--css-browser` to one of these by URL:
* [`browser.css`](browser.css) changes the background
* [`browser-icons.css`](browser-icons.css) adds filetype icons
# other stuff # other stuff
## [`changelog.md`](changelog.md)
* occasionally grabbed from github release notes
## [`rclone.md`](rclone.md) ## [`rclone.md`](rclone.md)
* notes on using rclone as a fuse client/server * notes on using rclone as a fuse client/server

View File

@@ -1,30 +0,0 @@
html {
background: #222 url('/wp/wallhaven-mdjrqy.jpg') center / cover no-repeat fixed;
}
#files th {
background: rgba(32, 32, 32, 0.9) !important;
}
#ops,
#tree,
#files td {
background: rgba(32, 32, 32, 0.3) !important;
}
html.light {
background: #eee url('/wp/wallhaven-dpxl6l.png') center / cover no-repeat fixed;
}
html.light #files th {
background: rgba(255, 255, 255, 0.9) !important;
}
html.light .logue,
html.light #ops,
html.light #tree,
html.light #files td {
background: rgba(248, 248, 248, 0.8) !important;
}
#files * {
background: transparent !important;
}

2581
docs/changelog.md Normal file

File diff suppressed because it is too large Load Diff

124
docs/multisearch.html Normal file
View File

@@ -0,0 +1,124 @@
<!DOCTYPE html><html lang="en"><head>
<meta charset="utf-8">
<title>multisearch</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<style>
html, body {
margin: 0;
padding: 0;
color: #ddd;
background: #222;
font-family: sans-serif;
}
body {
padding: 1em;
}
a {
color: #fc5;
}
ul {
line-height: 1.5em;
}
code {
color: #fc5;
border: 1px solid #444;
padding: .1em .2em;
font-family: sans-serif, sans-serif;
}
#src {
display: block;
width: calc(100% - 1em);
padding: .5em;
margin: 0;
}
td {
padding-left: 1em;
}
.hit,
.miss {
font-weight: bold;
padding-left: 0;
padding-top: 1em;
}
.hit {color: #af0;}
.miss {color: #f0c;}
.hit:before {content: '✅';}
.miss:before {content: '❌';}
</style></head><body>
<ul>
<li>paste a list of filenames (youtube rips) below and hit search</li>
<li>it will grab the youtube-id from the filenames and search for each id</li>
<li>filenames must be like <code>-YTID.webm</code> (youtube-dl style) or <code>[YTID].webm</code> (ytdlp style)</li>
</ul>
<textarea id="src"></textarea>
<button id="go">search</button>
<div id="res"></div>
<script>
var ebi = document.getElementById.bind(document);
function esc(txt) {
return txt.replace(/[&"<>]/g, function (c) {
return {
'&': '&amp;',
'"': '&quot;',
'<': '&lt;',
'>': '&gt;'
}[c];
});
}
ebi('go').onclick = async function() {
var queries = [];
for (var ln of ebi('src').value.split(/\n/g)) {
// filter the list of input files,
// only keeping youtube videos,
// meaning the filename ends with either
// [YOUTUBEID].EXTENSION or
// -YOUTUBEID.EXTENSION
var m = /[[-]([0-9a-zA-Z_-]{11})\]?\.(mp4|webm|mkv)$/.exec(ln);
if (!m || !(m = m[1]))
continue;
// create a search query for each line: name like *youtubeid*
queries.push([ln, `name like *${m}*`]);
}
var a = 0, html = ['<table>'], hits = [], misses = [];
for (var [fn, q] of queries) {
var r = await fetch('/?srch', {
method: 'POST',
body: JSON.stringify({'q': q})
});
r = await r.json();
var cl, tab2;
if (r.hits.length) {
tab2 = hits;
cl = 'hit';
}
else {
tab2 = misses;
cl = 'miss';
}
var h = `<tr><td class="${cl}" colspan="9">${esc(fn)}</td></tr>`;
tab2.push(h);
html.push(h);
for (var h of r.hits) {
var link = `<a href="/${h.rp}">${esc(decodeURIComponent(h.rp))}</a>`;
html.push(`<tr><td>${h.sz}</td><td>${link}</td></tr>`);
}
ebi('res').innerHTML = `searching, ${++a} / ${queries.length} done, ${hits.length} hits, ${misses.length} miss`;
}
html.push('<tr><td><h1>hits:</h1></td></tr>');
html = html.concat(hits);
html.push('<tr><td><h1>miss:</h1></td></tr>');
html = html.concat(misses);
html.push('</table>');
ebi('res').innerHTML = html.join('\n');
};
</script></body></html>

4
docs/notes.bat Normal file
View File

@@ -0,0 +1,4 @@
rem appending a static ip to a dhcp nic on windows 10-1703 or later
netsh interface ipv4 show interface
netsh interface ipv4 set interface interface="Ethernet 2" dhcpstaticipcoexistence=enabled
netsh interface ipv4 add address "Ethernet 2" 10.1.2.4 255.255.255.0

View File

@@ -3,6 +3,12 @@ echo not a script
exit 1 exit 1
##
## add index.html banners
find -name index.html | sed -r 's/index.html$//' | while IFS= read -r dir; do f="$dir/.prologue.html"; [ -e "$f" ] || echo '<h1><a href="index.html">open index.html</a></h1>' >"$f"; done
## ##
## delete all partial uploads ## delete all partial uploads
## (supports linux/macos, probably windows+msys2) ## (supports linux/macos, probably windows+msys2)
@@ -80,6 +86,12 @@ shab64() { sp=$1; f="$2"; v=0; sz=$(stat -c%s "$f"); while true; do w=$((v+sp*10
command -v gdate && date() { gdate "$@"; }; while true; do t=$(date +%s.%N); (time wget http://127.0.0.1:3923/?ls -qO- | jq -C '.files[]|{sz:.sz,ta:.tags.artist,tb:.tags.".bpm"}|del(.[]|select(.==null))' | awk -F\" '/"/{t[$2]++} END {for (k in t){v=t[k];p=sprintf("%" (v+1) "s",v);gsub(/ /,"#",p);printf "\033[36m%s\033[33m%s ",k,p}}') 2>&1 | awk -v ts=$t 'NR==1{t1=$0} NR==2{sub(/.*0m/,"");sub(/s$/,"");t2=$0;c=2; if(t2>0.3){c=3} if(t2>0.8){c=1} } END{sub(/[0-9]{6}$/,"",ts);printf "%s \033[3%dm%s %s\033[0m\n",ts,c,t2,t1}'; sleep 0.1 || break; done command -v gdate && date() { gdate "$@"; }; while true; do t=$(date +%s.%N); (time wget http://127.0.0.1:3923/?ls -qO- | jq -C '.files[]|{sz:.sz,ta:.tags.artist,tb:.tags.".bpm"}|del(.[]|select(.==null))' | awk -F\" '/"/{t[$2]++} END {for (k in t){v=t[k];p=sprintf("%" (v+1) "s",v);gsub(/ /,"#",p);printf "\033[36m%s\033[33m%s ",k,p}}') 2>&1 | awk -v ts=$t 'NR==1{t1=$0} NR==2{sub(/.*0m/,"");sub(/s$/,"");t2=$0;c=2; if(t2>0.3){c=3} if(t2>0.8){c=1} } END{sub(/[0-9]{6}$/,"",ts);printf "%s \033[3%dm%s %s\033[0m\n",ts,c,t2,t1}'; sleep 0.1 || break; done
##
## track an up2k upload and print all chunks in file-order
grep '"name": "2021-07-18 02-17-59.mkv"' fug.log | head -n 1 | sed -r 's/.*"hash": \[//; s/\].*//' | tr '"' '\n' | grep -E '^[a-zA-Z0-9_-]{44}$' | while IFS= read -r cid; do cat -n fug.log | grep -vF '"purl": "' | grep -- "$cid"; echo; done | stdbuf -oL tr '\t' ' ' | while IFS=' ' read -r ln _ _ _ _ _ ts ip port msg; do [ -z "$msg" ] && echo && continue; printf '%6s [%s] [%s] %s\n' $ln "$ts" "$ip $port" "$msg"; read -r ln _ _ _ _ _ ts ip port msg < <(cat -n fug.log | tail -n +$((ln+1)) | grep -F "$ip $port" | head -n 1); printf '%6s [%s] [%s] %s\n' $ln "$ts" "$ip $port" "$msg"; done
## ##
## js oneliners ## js oneliners
@@ -89,6 +101,7 @@ var t=[]; var b=document.location.href.split('#')[0].slice(0, -1); document.quer
# debug md-editor line tracking # debug md-editor line tracking
var s=mknod('style');s.innerHTML='*[data-ln]:before {content:attr(data-ln)!important;color:#f0c;background:#000;position:absolute;left:-1.5em;font-size:1rem}';document.head.appendChild(s); var s=mknod('style');s.innerHTML='*[data-ln]:before {content:attr(data-ln)!important;color:#f0c;background:#000;position:absolute;left:-1.5em;font-size:1rem}';document.head.appendChild(s);
## ##
## bash oneliners ## bash oneliners
@@ -169,7 +182,7 @@ brew install python@2
pip install virtualenv pip install virtualenv
# readme toc # readme toc
cat README.md | awk 'function pr() { if (!h) {return}; if (/^ *[*!#]/||!s) {printf "%s\n",h;h=0;return}; if (/.../) {printf "%s - %s\n",h,$0;h=0}; }; /^#/{s=1;pr()} /^#* *(file indexing|install on android|dev env setup|just the sfx|complete release|optional gpl stuff)|`$/{s=0} /^#/{lv=length($1);sub(/[^ ]+ /,"");bab=$0;gsub(/ /,"-",bab); h=sprintf("%" ((lv-1)*4+1) "s [%s](#%s)", "*",$0,bab);next} !h{next} {sub(/ .*/,"");sub(/[:,]$/,"")} {pr()}' > toc; grep -E '^## readme toc' -B1000 -A2 <README.md >p1; grep -E '^## quickstart' -B2 -A999999 <README.md >p2; (cat p1; grep quickstart -A1000 <toc; cat p2) >README.md; rm p1 p2 toc cat README.md | awk 'function pr() { if (!h) {return}; if (/^ *[*!#|]/||!s) {printf "%s\n",h;h=0;return}; if (/.../) {printf "%s - %s\n",h,$0;h=0}; }; /^#/{s=1;pr()} /^#* *(file indexing|install on android|dev env setup|just the sfx|complete release|optional gpl stuff)|`$/{s=0} /^#/{lv=length($1);sub(/[^ ]+ /,"");bab=$0;gsub(/ /,"-",bab); h=sprintf("%" ((lv-1)*4+1) "s [%s](#%s)", "*",$0,bab);next} !h{next} {sub(/ .*/,"");sub(/[:,]$/,"")} {pr()}' > toc; grep -E '^## readme toc' -B1000 -A2 <README.md >p1; grep -E '^## quickstart' -B2 -A999999 <README.md >p2; (cat p1; grep quickstart -A1000 <toc; cat p2) >README.md; rm p1 p2 toc
# fix firefox phantom breakpoints, # fix firefox phantom breakpoints,
# suggestions from bugtracker, doesnt work (debugger is not attachable) # suggestions from bugtracker, doesnt work (debugger is not attachable)
@@ -185,7 +198,16 @@ about:config >> devtools.debugger.prefs-schema-version = -1
git pull; git reset --hard origin/HEAD && git log --format=format:"%H %ai %d" --decorate=full > ../revs && cat ../{util,browser,up2k}.js >../vr && cat ../revs | while read -r rev extra; do (git reset --hard $rev >/dev/null 2>/dev/null && dsz=$(cat copyparty/web/{util,browser,up2k}.js >../vg 2>/dev/null && diff -wNarU0 ../{vg,vr} | wc -c) && printf '%s %6s %s\n' "$rev" $dsz "$extra") </dev/null; done git pull; git reset --hard origin/HEAD && git log --format=format:"%H %ai %d" --decorate=full > ../revs && cat ../{util,browser,up2k}.js >../vr && cat ../revs | while read -r rev extra; do (git reset --hard $rev >/dev/null 2>/dev/null && dsz=$(cat copyparty/web/{util,browser,up2k}.js >../vg 2>/dev/null && diff -wNarU0 ../{vg,vr} | wc -c) && printf '%s %6s %s\n' "$rev" $dsz "$extra") </dev/null; done
# download all sfx versions # download all sfx versions
curl https://api.github.com/repos/9001/copyparty/releases?per_page=100 | jq -r '.[] | .tag_name + " " + .name' | tr -d '\r' | while read v t; do fn="copyparty $v $t.py"; [ -e "$fn" ] || curl https://github.com/9001/copyparty/releases/download/$v/copyparty-sfx.py -Lo "$fn"; done curl https://api.github.com/repos/9001/copyparty/releases?per_page=100 | jq -r '.[] | .tag_name + " " + .name' | tr -d '\r' | while read v t; do fn="$(printf '%s\n' "copyparty $v $t.py" | tr / -)"; [ -e "$fn" ] || curl https://github.com/9001/copyparty/releases/download/$v/copyparty-sfx.py -Lo "$fn"; done
# convert releasenotes to changelog
curl https://api.github.com/repos/9001/copyparty/releases?per_page=100 | jq -r '.[] | "▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀ \n# \(.created_at) `\(.tag_name)` \(.name)\n\n\(.body)\n\n\n"' | sed -r 's/^# ([0-9]{4}-)([0-9]{2})-([0-9]{2})T([0-9]{2}):([0-9]{2}):([0-9]{2})Z /# \1\2\3-\4\5 /' > changelog.md
# push to multiple git remotes
git config -l | grep '^remote'
git remote add all git@github.com:9001/copyparty.git
git remote set-url --add --push all git@gitlab.com:9001/copyparty.git
git remote set-url --add --push all git@github.com:9001/copyparty.git
## ##

View File

@@ -12,21 +12,18 @@ set -e
# #
# output summary (filesizes and contents): # output summary (filesizes and contents):
# #
# 535672 copyparty-extras/sfx-full/copyparty-sfx.sh
# 550760 copyparty-extras/sfx-full/copyparty-sfx.py # 550760 copyparty-extras/sfx-full/copyparty-sfx.py
# `- original unmodified sfx from github # `- original unmodified sfx from github
# #
# 572923 copyparty-extras/sfx-full/copyparty-sfx-gz.py # 572923 copyparty-extras/sfx-full/copyparty-sfx-gz.py
# `- unmodified but recompressed from bzip2 to gzip # `- unmodified but recompressed from bzip2 to gzip
# #
# 341792 copyparty-extras/sfx-ent/copyparty-sfx.sh
# 353975 copyparty-extras/sfx-ent/copyparty-sfx.py # 353975 copyparty-extras/sfx-ent/copyparty-sfx.py
# 376934 copyparty-extras/sfx-ent/copyparty-sfx-gz.py # 376934 copyparty-extras/sfx-ent/copyparty-sfx-gz.py
# `- removed iOS ogg/opus/vorbis audio decoder, # `- removed iOS ogg/opus/vorbis audio decoder,
# removed the audio tray mouse cursor, # removed the audio tray mouse cursor,
# "enterprise edition" # "enterprise edition"
# #
# 259288 copyparty-extras/sfx-lite/copyparty-sfx.sh
# 270004 copyparty-extras/sfx-lite/copyparty-sfx.py # 270004 copyparty-extras/sfx-lite/copyparty-sfx.py
# 293159 copyparty-extras/sfx-lite/copyparty-sfx-gz.py # 293159 copyparty-extras/sfx-lite/copyparty-sfx-gz.py
# `- also removed the codemirror markdown editor # `- also removed the codemirror markdown editor
@@ -81,7 +78,7 @@ cache="$od/.copyparty-repack.cache"
# fallback to awk (sorry) # fallback to awk (sorry)
awk -F\" '/"browser_download_url".*(\.tar\.gz|-sfx\.)/ {print$4}' awk -F\" '/"browser_download_url".*(\.tar\.gz|-sfx\.)/ {print$4}'
) | ) |
grep -E '(sfx\.(sh|py)|tar\.gz)$' | grep -E '(sfx\.py|tar\.gz)$' |
tee /dev/stderr | tee /dev/stderr |
tr -d '\r' | tr '\n' '\0' | tr -d '\r' | tr '\n' '\0' |
xargs -0 bash -c 'dl_files "$@"' _ xargs -0 bash -c 'dl_files "$@"' _
@@ -139,11 +136,11 @@ repack() {
) )
} }
repack sfx-full "re gz no-sh" repack sfx-full "re gz"
repack sfx-ent "re no-dd no-ogv" repack sfx-ent "re no-dd"
repack sfx-ent "re no-dd no-ogv gz no-sh" repack sfx-ent "re no-dd gz"
repack sfx-lite "re no-dd no-ogv no-cm no-hl" repack sfx-lite "re no-dd no-cm no-hl"
repack sfx-lite "re no-dd no-ogv no-cm no-hl gz no-sh" repack sfx-lite "re no-dd no-cm no-hl gz"
# move fuse and up2k clients into copyparty-extras/, # move fuse and up2k clients into copyparty-extras/,

View File

@@ -1,21 +1,19 @@
FROM alpine:3.14 FROM alpine:3.16
WORKDIR /z WORKDIR /z
ENV ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \ ENV ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \
ver_hashwasm=4.9.0 \ ver_hashwasm=4.9.0 \
ver_marked=3.0.4 \ ver_marked=4.0.17 \
ver_ogvjs=1.8.4 \ ver_mde=2.16.1 \
ver_mde=2.15.0 \ ver_codemirror=5.65.5 \
ver_codemirror=5.62.3 \
ver_fontawesome=5.13.0 \ ver_fontawesome=5.13.0 \
ver_zopfli=1.0.3 ver_zopfli=1.0.3
# download; # download;
# the scp url is latin from https://fonts.googleapis.com/css2?family=Source+Code+Pro&display=swap # the scp url is regular latin from https://fonts.googleapis.com/css2?family=Source+Code+Pro&display=swap
RUN mkdir -p /z/dist/no-pk \ RUN mkdir -p /z/dist/no-pk \
&& wget https://fonts.gstatic.com/s/sourcecodepro/v11/HI_SiYsKILxRpg3hIP6sJ7fM7PqlPevW.woff2 -O scp.woff2 \ && wget https://fonts.gstatic.com/s/sourcecodepro/v11/HI_SiYsKILxRpg3hIP6sJ7fM7PqlPevW.woff2 -O scp.woff2 \
&& apk add cmake make g++ git bash npm patch wget tar pigz brotli gzip unzip python3 python3-dev brotli py3-brotli \ && apk add cmake make g++ git bash npm patch wget tar pigz brotli gzip unzip python3 python3-dev brotli py3-brotli \
&& wget https://github.com/brion/ogv.js/releases/download/$ver_ogvjs/ogvjs-$ver_ogvjs.zip -O ogvjs.zip \
&& wget https://github.com/openpgpjs/asmcrypto.js/archive/$ver_asmcrypto.tar.gz -O asmcrypto.tgz \ && wget https://github.com/openpgpjs/asmcrypto.js/archive/$ver_asmcrypto.tar.gz -O asmcrypto.tgz \
&& wget https://github.com/markedjs/marked/archive/v$ver_marked.tar.gz -O marked.tgz \ && wget https://github.com/markedjs/marked/archive/v$ver_marked.tar.gz -O marked.tgz \
&& wget https://github.com/Ionaru/easy-markdown-editor/archive/$ver_mde.tar.gz -O mde.tgz \ && wget https://github.com/Ionaru/easy-markdown-editor/archive/$ver_mde.tar.gz -O mde.tgz \
@@ -23,7 +21,6 @@ RUN mkdir -p /z/dist/no-pk \
&& wget https://github.com/FortAwesome/Font-Awesome/releases/download/$ver_fontawesome/fontawesome-free-$ver_fontawesome-web.zip -O fontawesome.zip \ && wget https://github.com/FortAwesome/Font-Awesome/releases/download/$ver_fontawesome/fontawesome-free-$ver_fontawesome-web.zip -O fontawesome.zip \
&& wget https://github.com/google/zopfli/archive/zopfli-$ver_zopfli.tar.gz -O zopfli.tgz \ && wget https://github.com/google/zopfli/archive/zopfli-$ver_zopfli.tar.gz -O zopfli.tgz \
&& wget https://github.com/Daninet/hash-wasm/releases/download/v$ver_hashwasm/hash-wasm@$ver_hashwasm.zip -O hash-wasm.zip \ && wget https://github.com/Daninet/hash-wasm/releases/download/v$ver_hashwasm/hash-wasm@$ver_hashwasm.zip -O hash-wasm.zip \
&& unzip ogvjs.zip \
&& (mkdir hash-wasm \ && (mkdir hash-wasm \
&& cd hash-wasm \ && cd hash-wasm \
&& unzip ../hash-wasm.zip) \ && unzip ../hash-wasm.zip) \
@@ -35,7 +32,7 @@ RUN mkdir -p /z/dist/no-pk \
&& npm install \ && npm install \
&& npm i grunt uglify-js -g ) \ && npm i grunt uglify-js -g ) \
&& (tar -xf codemirror.tgz \ && (tar -xf codemirror.tgz \
&& cd CodeMirror-$ver_codemirror \ && cd codemirror5-$ver_codemirror \
&& npm install ) \ && npm install ) \
&& (tar -xf mde.tgz \ && (tar -xf mde.tgz \
&& cd easy-markdown-editor* \ && cd easy-markdown-editor* \
@@ -46,8 +43,6 @@ RUN mkdir -p /z/dist/no-pk \
# todo # todo
# https://cdn.jsdelivr.net/gh/highlightjs/cdn-release/build/highlight.min.js
# https://cdn.jsdelivr.net/gh/highlightjs/cdn-release/build/styles/default.min.css
# https://prismjs.com/download.html#themes=prism-funky&languages=markup+css+clike+javascript+autohotkey+bash+basic+batch+c+csharp+cpp+cmake+diff+docker+go+ini+java+json+kotlin+latex+less+lisp+lua+makefile+objectivec+perl+powershell+python+r+jsx+ruby+rust+sass+scss+sql+swift+systemd+toml+typescript+vbnet+verilog+vhdl+yaml&plugins=line-highlight+line-numbers+autolinker # https://prismjs.com/download.html#themes=prism-funky&languages=markup+css+clike+javascript+autohotkey+bash+basic+batch+c+csharp+cpp+cmake+diff+docker+go+ini+java+json+kotlin+latex+less+lisp+lua+makefile+objectivec+perl+powershell+python+r+jsx+ruby+rust+sass+scss+sql+swift+systemd+toml+typescript+vbnet+verilog+vhdl+yaml&plugins=line-highlight+line-numbers+autolinker
@@ -77,21 +72,6 @@ RUN cd hash-wasm \
&& mv sha512.umd.min.js /z/dist/sha512.hw.js && mv sha512.umd.min.js /z/dist/sha512.hw.js
# build ogvjs
RUN cd ogvjs-$ver_ogvjs \
&& cp -pv \
ogv-worker-audio.js \
ogv-demuxer-ogg-wasm.js \
ogv-demuxer-ogg-wasm.wasm \
ogv-decoder-audio-opus-wasm.js \
ogv-decoder-audio-opus-wasm.wasm \
ogv-decoder-audio-vorbis-wasm.js \
ogv-decoder-audio-vorbis-wasm.wasm \
/z/dist \
&& cp -pv \
ogv-es2017.js /z/dist/ogv.js
# build marked # build marked
COPY marked.patch /z/ COPY marked.patch /z/
COPY marked-ln.patch /z/ COPY marked-ln.patch /z/
@@ -100,7 +80,6 @@ RUN cd marked-$ver_marked \
&& patch -p1 < /z/marked.patch \ && patch -p1 < /z/marked.patch \
&& npm run build \ && npm run build \
&& cp -pv marked.min.js /z/dist/marked.js \ && cp -pv marked.min.js /z/dist/marked.js \
&& cp -pv lib/marked.js /z/dist/marked.full.js \
&& mkdir -p /z/nodepkgs \ && mkdir -p /z/nodepkgs \
&& ln -s $(pwd) /z/nodepkgs/marked && ln -s $(pwd) /z/nodepkgs/marked
# && npm run test \ # && npm run test \
@@ -108,7 +87,7 @@ RUN cd marked-$ver_marked \
# build codemirror # build codemirror
COPY codemirror.patch /z/ COPY codemirror.patch /z/
RUN cd CodeMirror-$ver_codemirror \ RUN cd codemirror5-$ver_codemirror \
&& patch -p1 < /z/codemirror.patch \ && patch -p1 < /z/codemirror.patch \
&& sed -ri '/^var urlRE = /d' mode/gfm/gfm.js \ && sed -ri '/^var urlRE = /d' mode/gfm/gfm.js \
&& npm run build \ && npm run build \
@@ -120,9 +99,10 @@ COPY easymde.patch /z/
RUN cd easy-markdown-editor-$ver_mde \ RUN cd easy-markdown-editor-$ver_mde \
&& patch -p1 < /z/easymde.patch \ && patch -p1 < /z/easymde.patch \
&& sed -ri 's`https://registry.npmjs.org/marked/-/marked-[0-9\.]+.tgz`file:/z/nodepkgs/marked`' package-lock.json \ && sed -ri 's`https://registry.npmjs.org/marked/-/marked-[0-9\.]+.tgz`file:/z/nodepkgs/marked`' package-lock.json \
&& sed -ri 's`https://registry.npmjs.org/codemirror/-/codemirror-[0-9\.]+.tgz`file:/z/nodepkgs/codemirror`' package-lock.json \
&& sed -ri 's`("marked": ")[^"]+`\1file:/z/nodepkgs/marked`' ./package.json \ && sed -ri 's`("marked": ")[^"]+`\1file:/z/nodepkgs/marked`' ./package.json \
&& sed -ri 's`("codemirror": ")[^"]+`\1file:/z/nodepkgs/codemirror`' ./package.json \ && sed -ri 's`("codemirror": ")[^"]+`\1file:/z/nodepkgs/codemirror`' ./package.json \
&& sed -ri 's`^var marked = require\(.marked/lib/marked.\);$`var marked = window.marked;`' src/js/easymde.js \ && sed -ri 's`^var marked = require\(.marked.\).marked;$`var marked = window.marked;`' src/js/easymde.js \
&& npm install && npm install
COPY easymde-ln.patch /z/ COPY easymde-ln.patch /z/
@@ -136,6 +116,7 @@ RUN cd easy-markdown-editor-$ver_mde \
# build fontawesome and scp # build fontawesome and scp
COPY mini-fa.sh /z COPY mini-fa.sh /z
COPY mini-fa.css /z COPY mini-fa.css /z
COPY shiftbase.py /z
RUN /bin/ash /z/mini-fa.sh RUN /bin/ash /z/mini-fa.sh

View File

@@ -23,4 +23,4 @@ purge:
sh: sh:
@printf "\n\033[1;31mopening a shell in the most recently created docker image\033[0m\n" @printf "\n\033[1;31mopening a shell in the most recently created docker image\033[0m\n"
docker run --rm -it `docker images -aq | head -n 1` /bin/bash docker run --rm -it `docker images -aq | head -n 1` /bin/ash

View File

@@ -1,6 +1,6 @@
diff -NarU2 codemirror-5.59.3-orig/mode/gfm/gfm.js codemirror-5.59.3/mode/gfm/gfm.js diff -wNarU2 codemirror-5.65.1-orig/mode/gfm/gfm.js codemirror-5.65.1/mode/gfm/gfm.js
--- codemirror-5.59.3-orig/mode/gfm/gfm.js 2021-02-20 21:24:57.000000000 +0000 --- codemirror-5.65.1-orig/mode/gfm/gfm.js 2022-01-20 13:06:23.000000000 +0100
+++ codemirror-5.59.3/mode/gfm/gfm.js 2021-02-21 20:42:02.166174775 +0000 +++ codemirror-5.65.1/mode/gfm/gfm.js 2022-02-09 22:50:18.145862052 +0100
@@ -97,5 +97,5 @@ @@ -97,5 +97,5 @@
} }
} }
@@ -15,9 +15,9 @@ diff -NarU2 codemirror-5.59.3-orig/mode/gfm/gfm.js codemirror-5.59.3/mode/gfm/gf
+ }*/ + }*/
stream.next(); stream.next();
return null; return null;
diff -NarU2 codemirror-5.59.3-orig/mode/meta.js codemirror-5.59.3/mode/meta.js diff -wNarU2 codemirror-5.65.1-orig/mode/meta.js codemirror-5.65.1/mode/meta.js
--- codemirror-5.59.3-orig/mode/meta.js 2021-02-20 21:24:57.000000000 +0000 --- codemirror-5.65.1-orig/mode/meta.js 2022-01-20 13:06:23.000000000 +0100
+++ codemirror-5.59.3/mode/meta.js 2021-02-21 20:42:54.798742821 +0000 +++ codemirror-5.65.1/mode/meta.js 2022-02-09 22:50:18.145862052 +0100
@@ -13,4 +13,5 @@ @@ -13,4 +13,5 @@
CodeMirror.modeInfo = [ CodeMirror.modeInfo = [
@@ -62,10 +62,10 @@ diff -NarU2 codemirror-5.59.3-orig/mode/meta.js codemirror-5.59.3/mode/meta.js
+ */ + */
]; ];
// Ensure all modes have a mime property for backwards compatibility // Ensure all modes have a mime property for backwards compatibility
diff -NarU2 codemirror-5.59.3-orig/src/display/selection.js codemirror-5.59.3/src/display/selection.js diff -wNarU2 codemirror-5.65.1-orig/src/display/selection.js codemirror-5.65.1/src/display/selection.js
--- codemirror-5.59.3-orig/src/display/selection.js 2021-02-20 21:24:57.000000000 +0000 --- codemirror-5.65.1-orig/src/display/selection.js 2022-01-20 13:06:23.000000000 +0100
+++ codemirror-5.59.3/src/display/selection.js 2021-02-21 20:44:14.860894328 +0000 +++ codemirror-5.65.1/src/display/selection.js 2022-02-09 22:50:18.145862052 +0100
@@ -84,29 +84,21 @@ @@ -96,29 +96,21 @@
let order = getOrder(lineObj, doc.direction) let order = getOrder(lineObj, doc.direction)
iterateBidiSections(order, fromArg || 0, toArg == null ? lineLen : toArg, (from, to, dir, i) => { iterateBidiSections(order, fromArg || 0, toArg == null ? lineLen : toArg, (from, to, dir, i) => {
- let ltr = dir == "ltr" - let ltr = dir == "ltr"
@@ -105,24 +105,24 @@ diff -NarU2 codemirror-5.59.3-orig/src/display/selection.js codemirror-5.59.3/sr
+ botRight = openEnd && last ? rightSide : toPos.right + botRight = openEnd && last ? rightSide : toPos.right
add(topLeft, fromPos.top, topRight - topLeft, fromPos.bottom) add(topLeft, fromPos.top, topRight - topLeft, fromPos.bottom)
if (fromPos.bottom < toPos.top) add(leftSide, fromPos.bottom, null, toPos.top) if (fromPos.bottom < toPos.top) add(leftSide, fromPos.bottom, null, toPos.top)
diff -NarU2 codemirror-5.59.3-orig/src/input/ContentEditableInput.js codemirror-5.59.3/src/input/ContentEditableInput.js diff -wNarU2 codemirror-5.65.1-orig/src/input/ContentEditableInput.js codemirror-5.65.1/src/input/ContentEditableInput.js
--- codemirror-5.59.3-orig/src/input/ContentEditableInput.js 2021-02-20 21:24:57.000000000 +0000 --- codemirror-5.65.1-orig/src/input/ContentEditableInput.js 2022-01-20 13:06:23.000000000 +0100
+++ codemirror-5.59.3/src/input/ContentEditableInput.js 2021-02-21 20:44:33.273953867 +0000 +++ codemirror-5.65.1/src/input/ContentEditableInput.js 2022-02-09 22:50:18.145862052 +0100
@@ -399,4 +399,5 @@ @@ -400,4 +400,5 @@
let info = mapFromLineView(view, line, pos.line) let info = mapFromLineView(view, line, pos.line)
+ /* + /*
let order = getOrder(line, cm.doc.direction), side = "left" let order = getOrder(line, cm.doc.direction), side = "left"
if (order) { if (order) {
@@ -404,4 +405,5 @@ @@ -405,4 +406,5 @@
side = partPos % 2 ? "right" : "left" side = partPos % 2 ? "right" : "left"
} }
+ */ + */
let result = nodeAndOffsetInLineMap(info.map, pos.ch, side) let result = nodeAndOffsetInLineMap(info.map, pos.ch, side)
result.offset = result.collapse == "right" ? result.end : result.start result.offset = result.collapse == "right" ? result.end : result.start
diff -NarU2 codemirror-5.59.3-orig/src/input/movement.js codemirror-5.59.3/src/input/movement.js diff -wNarU2 codemirror-5.65.1-orig/src/input/movement.js codemirror-5.65.1/src/input/movement.js
--- codemirror-5.59.3-orig/src/input/movement.js 2021-02-20 21:24:57.000000000 +0000 --- codemirror-5.65.1-orig/src/input/movement.js 2022-01-20 13:06:23.000000000 +0100
+++ codemirror-5.59.3/src/input/movement.js 2021-02-21 20:45:12.763093671 +0000 +++ codemirror-5.65.1/src/input/movement.js 2022-02-09 22:50:18.145862052 +0100
@@ -15,4 +15,5 @@ @@ -15,4 +15,5 @@
export function endOfLine(visually, cm, lineObj, lineNo, dir) { export function endOfLine(visually, cm, lineObj, lineNo, dir) {
@@ -146,9 +146,16 @@ diff -NarU2 codemirror-5.59.3-orig/src/input/movement.js codemirror-5.59.3/src/i
return null return null
+ */ + */
} }
diff -NarU2 codemirror-5.59.3-orig/src/line/line_data.js codemirror-5.59.3/src/line/line_data.js diff -wNarU2 codemirror-5.65.1-orig/src/line/line_data.js codemirror-5.65.1/src/line/line_data.js
--- codemirror-5.59.3-orig/src/line/line_data.js 2021-02-20 21:24:57.000000000 +0000 --- codemirror-5.65.1-orig/src/line/line_data.js 2022-01-20 13:06:23.000000000 +0100
+++ codemirror-5.59.3/src/line/line_data.js 2021-02-21 20:45:36.472549599 +0000 +++ codemirror-5.65.1/src/line/line_data.js 2022-02-09 22:54:11.542722046 +0100
@@ -3,5 +3,5 @@
import { elt, eltP, joinClasses } from "../util/dom.js"
import { eventMixin, signal } from "../util/event.js"
-import { hasBadBidiRects, zeroWidthElement } from "../util/feature_detection.js"
+import { zeroWidthElement } from "../util/feature_detection.js"
import { lst, spaceStr } from "../util/misc.js"
@@ -79,6 +79,6 @@ @@ -79,6 +79,6 @@
// Optionally wire in some hacks into the token-rendering // Optionally wire in some hacks into the token-rendering
// algorithm, to deal with browser quirks. // algorithm, to deal with browser quirks.
@@ -158,10 +165,10 @@ diff -NarU2 codemirror-5.59.3-orig/src/line/line_data.js codemirror-5.59.3/src/l
+ // builder.addToken = buildTokenBadBidi(builder.addToken, order) + // builder.addToken = buildTokenBadBidi(builder.addToken, order)
builder.map = [] builder.map = []
let allowFrontierUpdate = lineView != cm.display.externalMeasured && lineNo(line) let allowFrontierUpdate = lineView != cm.display.externalMeasured && lineNo(line)
diff -NarU2 codemirror-5.59.3-orig/src/measurement/position_measurement.js codemirror-5.59.3/src/measurement/position_measurement.js diff -wNarU2 codemirror-5.65.1-orig/src/measurement/position_measurement.js codemirror-5.65.1/src/measurement/position_measurement.js
--- codemirror-5.59.3-orig/src/measurement/position_measurement.js 2021-02-20 21:24:57.000000000 +0000 --- codemirror-5.65.1-orig/src/measurement/position_measurement.js 2022-01-20 13:06:23.000000000 +0100
+++ codemirror-5.59.3/src/measurement/position_measurement.js 2021-02-21 20:50:52.372945293 +0000 +++ codemirror-5.65.1/src/measurement/position_measurement.js 2022-02-09 22:50:18.145862052 +0100
@@ -380,5 +380,6 @@ @@ -382,5 +382,6 @@
sticky = "after" sticky = "after"
} }
- if (!order) return get(sticky == "before" ? ch - 1 : ch, sticky == "before") - if (!order) return get(sticky == "before" ? ch - 1 : ch, sticky == "before")
@@ -169,39 +176,39 @@ diff -NarU2 codemirror-5.59.3-orig/src/measurement/position_measurement.js codem
+ /* + /*
function getBidi(ch, partPos, invert) { function getBidi(ch, partPos, invert) {
@@ -391,4 +392,5 @@ @@ -393,4 +394,5 @@
if (other != null) val.other = getBidi(ch, other, sticky != "before") if (other != null) val.other = getBidi(ch, other, sticky != "before")
return val return val
+ */ + */
} }
@@ -468,4 +470,5 @@ @@ -470,4 +472,5 @@
let begin = 0, end = lineObj.text.length, ltr = true let begin = 0, end = lineObj.text.length, ltr = true
+ /* + /*
let order = getOrder(lineObj, cm.doc.direction) let order = getOrder(lineObj, cm.doc.direction)
// If the line isn't plain left-to-right text, first figure out // If the line isn't plain left-to-right text, first figure out
@@ -482,4 +485,5 @@ @@ -484,4 +487,5 @@
end = ltr ? part.to : part.from - 1 end = ltr ? part.to : part.from - 1
} }
+ */ + */
// A binary search to find the first character whose bounding box // A binary search to find the first character whose bounding box
@@ -526,4 +530,5 @@ @@ -528,4 +532,5 @@
} }
+/* +/*
function coordsBidiPart(cm, lineObj, lineNo, preparedMeasure, order, x, y) { function coordsBidiPart(cm, lineObj, lineNo, preparedMeasure, order, x, y) {
// Bidi parts are sorted left-to-right, and in a non-line-wrapping // Bidi parts are sorted left-to-right, and in a non-line-wrapping
@@ -580,4 +585,5 @@ @@ -582,4 +587,5 @@
return part return part
} }
+*/ +*/
let measureText let measureText
diff -NarU2 codemirror-5.59.3-orig/src/util/bidi.js codemirror-5.59.3/src/util/bidi.js diff -wNarU2 codemirror-5.65.1-orig/src/util/bidi.js codemirror-5.65.1/src/util/bidi.js
--- codemirror-5.59.3-orig/src/util/bidi.js 2021-02-20 21:24:57.000000000 +0000 --- codemirror-5.65.1-orig/src/util/bidi.js 2022-01-20 13:06:23.000000000 +0100
+++ codemirror-5.59.3/src/util/bidi.js 2021-02-21 20:52:18.168092225 +0000 +++ codemirror-5.65.1/src/util/bidi.js 2022-02-09 22:50:18.145862052 +0100
@@ -4,5 +4,5 @@ @@ -4,5 +4,5 @@
export function iterateBidiSections(order, from, to, f) { export function iterateBidiSections(order, from, to, f) {
@@ -259,9 +266,9 @@ diff -NarU2 codemirror-5.59.3-orig/src/util/bidi.js codemirror-5.59.3/src/util/b
- return order - return order
+ return false; + return false;
} }
diff -NarU2 codemirror-5.59.3-orig/src/util/feature_detection.js codemirror-5.59.3/src/util/feature_detection.js diff -wNarU2 codemirror-5.65.1-orig/src/util/feature_detection.js codemirror-5.65.1/src/util/feature_detection.js
--- codemirror-5.59.3-orig/src/util/feature_detection.js 2021-02-20 21:24:57.000000000 +0000 --- codemirror-5.65.1-orig/src/util/feature_detection.js 2022-01-20 13:06:23.000000000 +0100
+++ codemirror-5.59.3/src/util/feature_detection.js 2021-02-21 20:49:22.191269270 +0000 +++ codemirror-5.65.1/src/util/feature_detection.js 2022-02-09 22:50:18.145862052 +0100
@@ -25,4 +25,5 @@ @@ -25,4 +25,5 @@
} }

View File

@@ -0,0 +1,12 @@
diff --git a/src/js/easymde.js b/src/js/easymde.js
--- a/src/js/easymde.js
+++ b/src/js/easymde.js
@@ -1962,7 +1962,7 @@ EasyMDE.prototype.markdown = function (text) {
marked.setOptions(markedOptions);
// Convert the markdown to HTML
- var htmlText = marked(text);
+ var htmlText = marked.parse(text);
// Sanitize HTML
if (this.options.renderingConfig && typeof this.options.renderingConfig.sanitizerFunction === 'function') {

View File

@@ -1,52 +1,52 @@
diff -NarU2 easy-markdown-editor-2.14.0-orig/gulpfile.js easy-markdown-editor-2.14.0/gulpfile.js diff -wNarU2 easy-markdown-editor-2.16.1-orig/gulpfile.js easy-markdown-editor-2.16.1/gulpfile.js
--- easy-markdown-editor-2.14.0-orig/gulpfile.js 2021-02-14 12:11:48.000000000 +0000 --- easy-markdown-editor-2.16.1-orig/gulpfile.js 2022-01-14 23:27:44.000000000 +0100
+++ easy-markdown-editor-2.14.0/gulpfile.js 2021-02-21 20:55:37.134701007 +0000 +++ easy-markdown-editor-2.16.1/gulpfile.js 2022-02-09 23:06:01.694592535 +0100
@@ -25,5 +25,4 @@ @@ -25,5 +25,4 @@
'./node_modules/codemirror/lib/codemirror.css', './node_modules/codemirror/lib/codemirror.css',
'./src/css/*.css', './src/css/*.css',
- './node_modules/codemirror-spell-checker/src/css/spell-checker.css', - './node_modules/codemirror-spell-checker/src/css/spell-checker.css',
]; ];
diff -NarU2 easy-markdown-editor-2.14.0-orig/package.json easy-markdown-editor-2.14.0/package.json diff -wNarU2 easy-markdown-editor-2.16.1-orig/package.json easy-markdown-editor-2.16.1/package.json
--- easy-markdown-editor-2.14.0-orig/package.json 2021-02-14 12:11:48.000000000 +0000 --- easy-markdown-editor-2.16.1-orig/package.json 2022-01-14 23:27:44.000000000 +0100
+++ easy-markdown-editor-2.14.0/package.json 2021-02-21 20:55:47.761190082 +0000 +++ easy-markdown-editor-2.16.1/package.json 2022-02-09 23:06:24.778501888 +0100
@@ -21,5 +21,4 @@ @@ -23,5 +23,4 @@
"dependencies": { "@types/marked": "^4.0.1",
"codemirror": "^5.59.2", "codemirror": "^5.63.1",
- "codemirror-spell-checker": "1.1.2", - "codemirror-spell-checker": "1.1.2",
"marked": "^2.0.0" "marked": "^4.0.10"
}, },
diff -NarU2 easy-markdown-editor-2.14.0-orig/src/js/easymde.js easy-markdown-editor-2.14.0/src/js/easymde.js diff -wNarU2 easy-markdown-editor-2.16.1-orig/src/js/easymde.js easy-markdown-editor-2.16.1/src/js/easymde.js
--- easy-markdown-editor-2.14.0-orig/src/js/easymde.js 2021-02-14 12:11:48.000000000 +0000 --- easy-markdown-editor-2.16.1-orig/src/js/easymde.js 2022-01-14 23:27:44.000000000 +0100
+++ easy-markdown-editor-2.14.0/src/js/easymde.js 2021-02-21 20:57:09.143171536 +0000 +++ easy-markdown-editor-2.16.1/src/js/easymde.js 2022-02-09 23:07:21.203131415 +0100
@@ -12,5 +12,4 @@ @@ -12,5 +12,4 @@
require('codemirror/mode/gfm/gfm.js'); require('codemirror/mode/gfm/gfm.js');
require('codemirror/mode/xml/xml.js'); require('codemirror/mode/xml/xml.js');
-var CodeMirrorSpellChecker = require('codemirror-spell-checker'); -var CodeMirrorSpellChecker = require('codemirror-spell-checker');
var marked = require('marked/lib/marked'); var marked = require('marked').marked;
@@ -1762,9 +1761,4 @@ @@ -1816,9 +1815,4 @@
options.autosave.uniqueId = options.autosave.unique_id; options.autosave.uniqueId = options.autosave.unique_id;
- // If overlay mode is specified and combine is not provided, default it to true - // If overlay mode is specified and combine is not provided, default it to true
- if (options.overlayMode && options.overlayMode.combine === undefined) { - if (options.overlayMode && options.overlayMode.combine === undefined) {
- options.overlayMode.combine = true; - options.overlayMode.combine = true;
- } - }
- -
// Update this options // Update this options
this.options = options; this.options = options;
@@ -2003,28 +1997,7 @@ @@ -2057,34 +2051,7 @@
var mode, backdrop; var mode, backdrop;
- // CodeMirror overlay mode - // CodeMirror overlay mode
- if (options.overlayMode) { - if (options.overlayMode) {
- CodeMirror.defineMode('overlay-mode', function(config) { - CodeMirror.defineMode('overlay-mode', function (config) {
- return CodeMirror.overlayMode(CodeMirror.getMode(config, options.spellChecker !== false ? 'spell-checker' : 'gfm'), options.overlayMode.mode, options.overlayMode.combine); - return CodeMirror.overlayMode(CodeMirror.getMode(config, options.spellChecker !== false ? 'spell-checker' : 'gfm'), options.overlayMode.mode, options.overlayMode.combine);
- }); - });
- -
- mode = 'overlay-mode'; - mode = 'overlay-mode';
- backdrop = options.parsingConfig; - backdrop = options.parsingConfig;
- backdrop.gitHubSpice = false; - backdrop.gitHubSpice = false;
- } else { - } else {
mode = options.parsingConfig; mode = options.parsingConfig;
mode.name = 'gfm'; mode.name = 'gfm';
@@ -58,31 +58,35 @@ diff -NarU2 easy-markdown-editor-2.14.0-orig/src/js/easymde.js easy-markdown-edi
- backdrop.name = 'gfm'; - backdrop.name = 'gfm';
- backdrop.gitHubSpice = false; - backdrop.gitHubSpice = false;
- -
- CodeMirrorSpellChecker({ - if (typeof options.spellChecker === 'function') {
- codeMirrorInstance: CodeMirror, - options.spellChecker({
- }); - codeMirrorInstance: CodeMirror,
- });
- } else {
- CodeMirrorSpellChecker({
- codeMirrorInstance: CodeMirror,
- });
- }
- } - }
// eslint-disable-next-line no-unused-vars // eslint-disable-next-line no-unused-vars
diff -NarU2 easy-markdown-editor-2.14.0-orig/types/easymde.d.ts easy-markdown-editor-2.14.0/types/easymde.d.ts diff -wNarU2 easy-markdown-editor-2.16.1-orig/types/easymde.d.ts easy-markdown-editor-2.16.1/types/easymde.d.ts
--- easy-markdown-editor-2.14.0-orig/types/easymde.d.ts 2021-02-14 12:11:48.000000000 +0000 --- easy-markdown-editor-2.16.1-orig/types/easymde.d.ts 2022-01-14 23:27:44.000000000 +0100
+++ easy-markdown-editor-2.14.0/types/easymde.d.ts 2021-02-21 20:57:42.492620979 +0000 +++ easy-markdown-editor-2.16.1/types/easymde.d.ts 2022-02-09 23:07:55.427605243 +0100
@@ -160,9 +160,4 @@ @@ -167,9 +167,4 @@
} }
- interface OverlayModeOptions { - interface OverlayModeOptions {
- mode: CodeMirror.Mode<any> - mode: CodeMirror.Mode<any>;
- combine?: boolean - combine?: boolean;
- } - }
- -
interface Options { interface SpellCheckerOptions {
autoDownloadFontAwesome?: boolean; codeMirrorInstance: CodeMirror.Editor;
@@ -214,7 +209,5 @@ @@ -229,6 +224,4 @@
syncSideBySidePreviewScroll?: boolean;
promptTexts?: PromptTexts; - overlayMode?: OverlayModeOptions;
- syncSideBySidePreviewScroll?: boolean;
- -
- overlayMode?: OverlayModeOptions direction?: 'ltr' | 'rtl';
+ syncSideBySidePreviewScroll?: boolean
} }
}

View File

@@ -1,15 +1,15 @@
diff --git a/src/Lexer.js b/src/Lexer.js diff --git a/src/Lexer.js b/src/Lexer.js
adds linetracking to marked.js v3.0.4; adds linetracking to marked.js v4.0.17;
add data-ln="%d" to most tags, %d is the source markdown line add data-ln="%d" to most tags, %d is the source markdown line
--- a/src/Lexer.js --- a/src/Lexer.js
+++ b/src/Lexer.js +++ b/src/Lexer.js
@@ -50,4 +50,5 @@ function mangle(text) { @@ -52,4 +52,5 @@ function mangle(text) {
module.exports = class Lexer { export class Lexer {
constructor(options) { constructor(options) {
+ this.ln = 1; // like most editors, start couting from 1 + this.ln = 1; // like most editors, start couting from 1
this.tokens = []; this.tokens = [];
this.tokens.links = Object.create(null); this.tokens.links = Object.create(null);
@@ -127,4 +128,15 @@ module.exports = class Lexer { @@ -128,4 +129,15 @@ export class Lexer {
} }
+ set_ln(token, ln = this.ln) { + set_ln(token, ln = this.ln) {
@@ -25,9 +25,9 @@ add data-ln="%d" to most tags, %d is the source markdown line
+ +
/** /**
* Lexing * Lexing
@@ -134,7 +146,11 @@ module.exports = class Lexer { @@ -140,7 +152,11 @@ export class Lexer {
src = src.replace(/^ +$/gm, '');
} }
- let token, lastToken, cutSrc, lastParagraphClipped; - let token, lastToken, cutSrc, lastParagraphClipped;
+ let token, lastToken, cutSrc, lastParagraphClipped, ln; + let token, lastToken, cutSrc, lastParagraphClipped, ln;
@@ -38,120 +38,121 @@ add data-ln="%d" to most tags, %d is the source markdown line
+ +
if (this.options.extensions if (this.options.extensions
&& this.options.extensions.block && this.options.extensions.block
@@ -142,4 +158,5 @@ module.exports = class Lexer { @@ -148,4 +164,5 @@ export class Lexer {
if (token = extTokenizer.call({ lexer: this }, src, tokens)) { if (token = extTokenizer.call({ lexer: this }, src, tokens)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
+ this.set_ln(token, ln); + this.set_ln(token, ln);
tokens.push(token); tokens.push(token);
return true; return true;
@@ -153,4 +170,5 @@ module.exports = class Lexer { @@ -159,4 +176,5 @@ export class Lexer {
if (token = this.tokenizer.space(src)) { if (token = this.tokenizer.space(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
+ this.set_ln(token, ln); // is \n if not type + this.set_ln(token, ln); // is \n if not type
if (token.type) { if (token.raw.length === 1 && tokens.length > 0) {
tokens.push(token); // if there's a single \n as a spacer, it's terminating the last line,
@@ -162,4 +180,5 @@ module.exports = class Lexer { @@ -172,4 +190,5 @@ export class Lexer {
if (token = this.tokenizer.code(src)) { if (token = this.tokenizer.code(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
+ this.set_ln(token, ln); + this.set_ln(token, ln);
lastToken = tokens[tokens.length - 1]; lastToken = tokens[tokens.length - 1];
// An indented code block cannot interrupt a paragraph. // An indented code block cannot interrupt a paragraph.
@@ -177,4 +196,5 @@ module.exports = class Lexer { @@ -187,4 +206,5 @@ export class Lexer {
if (token = this.tokenizer.fences(src)) { if (token = this.tokenizer.fences(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
+ this.set_ln(token, ln); + this.set_ln(token, ln);
tokens.push(token); tokens.push(token);
continue; continue;
@@ -184,4 +204,5 @@ module.exports = class Lexer { @@ -194,4 +214,5 @@ export class Lexer {
if (token = this.tokenizer.heading(src)) { if (token = this.tokenizer.heading(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
+ this.set_ln(token, ln); + this.set_ln(token, ln);
tokens.push(token); tokens.push(token);
continue; continue;
@@ -191,4 +212,5 @@ module.exports = class Lexer { @@ -201,4 +222,5 @@ export class Lexer {
if (token = this.tokenizer.hr(src)) { if (token = this.tokenizer.hr(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
+ this.set_ln(token, ln); + this.set_ln(token, ln);
tokens.push(token); tokens.push(token);
continue; continue;
@@ -198,4 +220,5 @@ module.exports = class Lexer { @@ -208,4 +230,5 @@ export class Lexer {
if (token = this.tokenizer.blockquote(src)) { if (token = this.tokenizer.blockquote(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
+ this.set_ln(token, ln); + this.set_ln(token, ln);
tokens.push(token); tokens.push(token);
continue; continue;
@@ -205,4 +228,5 @@ module.exports = class Lexer { @@ -215,4 +238,5 @@ export class Lexer {
if (token = this.tokenizer.list(src)) { if (token = this.tokenizer.list(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
+ this.set_ln(token, ln); + this.set_ln(token, ln);
tokens.push(token); tokens.push(token);
continue; continue;
@@ -212,4 +236,5 @@ module.exports = class Lexer { @@ -222,4 +246,5 @@ export class Lexer {
if (token = this.tokenizer.html(src)) { if (token = this.tokenizer.html(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
+ this.set_ln(token, ln); + this.set_ln(token, ln);
tokens.push(token); tokens.push(token);
continue; continue;
@@ -219,4 +244,5 @@ module.exports = class Lexer { @@ -229,4 +254,5 @@ export class Lexer {
if (token = this.tokenizer.def(src)) { if (token = this.tokenizer.def(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
+ this.set_ln(token, ln); + this.set_ln(token, ln);
lastToken = tokens[tokens.length - 1]; lastToken = tokens[tokens.length - 1];
if (lastToken && (lastToken.type === 'paragraph' || lastToken.type === 'text')) { if (lastToken && (lastToken.type === 'paragraph' || lastToken.type === 'text')) {
@@ -236,4 +262,5 @@ module.exports = class Lexer { @@ -246,4 +272,5 @@ export class Lexer {
if (token = this.tokenizer.table(src)) { if (token = this.tokenizer.table(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
+ this.set_ln(token, ln); + this.set_ln(token, ln);
tokens.push(token); tokens.push(token);
continue; continue;
@@ -243,4 +270,5 @@ module.exports = class Lexer { @@ -253,4 +280,5 @@ export class Lexer {
if (token = this.tokenizer.lheading(src)) { if (token = this.tokenizer.lheading(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
+ this.set_ln(token, ln); + this.set_ln(token, ln);
tokens.push(token); tokens.push(token);
continue; continue;
@@ -263,4 +291,5 @@ module.exports = class Lexer { @@ -273,4 +301,5 @@ export class Lexer {
} }
if (this.state.top && (token = this.tokenizer.paragraph(cutSrc))) { if (this.state.top && (token = this.tokenizer.paragraph(cutSrc))) {
+ this.set_ln(token, ln); + this.set_ln(token, ln);
lastToken = tokens[tokens.length - 1]; lastToken = tokens[tokens.length - 1];
if (lastParagraphClipped && lastToken.type === 'paragraph') { if (lastParagraphClipped && lastToken.type === 'paragraph') {
@@ -280,4 +309,6 @@ module.exports = class Lexer { @@ -290,4 +319,6 @@ export class Lexer {
if (token = this.tokenizer.text(src)) { if (token = this.tokenizer.text(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
+ this.set_ln(token, ln); + this.set_ln(token, ln);
+ this.ln++; + this.ln++;
lastToken = tokens[tokens.length - 1]; lastToken = tokens[tokens.length - 1];
if (lastToken && lastToken.type === 'text') { if (lastToken && lastToken.type === 'text') {
@@ -355,4 +386,5 @@ module.exports = class Lexer { @@ -365,4 +396,5 @@ export class Lexer {
if (token = extTokenizer.call({ lexer: this }, src, tokens)) { if (token = extTokenizer.call({ lexer: this }, src, tokens)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
+ this.ln = token.ln || this.ln; + this.ln = token.ln || this.ln;
tokens.push(token); tokens.push(token);
return true; return true;
@@ -420,4 +452,6 @@ module.exports = class Lexer { @@ -430,4 +462,6 @@ export class Lexer {
if (token = this.tokenizer.br(src)) { if (token = this.tokenizer.br(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
+ // no need to reset (no more blockTokens anyways) + // no need to reset (no more blockTokens anyways)
+ token.ln = this.ln++; + token.ln = this.ln++;
tokens.push(token); tokens.push(token);
continue; continue;
@@ -462,4 +496,5 @@ module.exports = class Lexer { @@ -472,4 +506,5 @@ export class Lexer {
if (token = this.tokenizer.inlineText(cutSrc, smartypants)) { if (token = this.tokenizer.inlineText(cutSrc, smartypants)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
+ this.ln = token.ln || this.ln; + this.ln = token.ln || this.ln;
if (token.raw.slice(-1) !== '_') { // Track prevChar before string of ____ started if (token.raw.slice(-1) !== '_') { // Track prevChar before string of ____ started
prevChar = token.raw.slice(-1); prevChar = token.raw.slice(-1);
diff --git a/src/Parser.js b/src/Parser.js diff --git a/src/Parser.js b/src/Parser.js
index a22a2bc..884ad66 100644
--- a/src/Parser.js --- a/src/Parser.js
+++ b/src/Parser.js +++ b/src/Parser.js
@@ -18,4 +18,5 @@ module.exports = class Parser { @@ -18,4 +18,5 @@ export class Parser {
this.textRenderer = new TextRenderer(); this.textRenderer = new TextRenderer();
this.slugger = new Slugger(); this.slugger = new Slugger();
+ this.ln = 0; // error indicator; should always be set >=1 from tokens + this.ln = 0; // error indicator; should always be set >=1 from tokens
} }
@@ -64,4 +65,8 @@ module.exports = class Parser { @@ -64,4 +65,8 @@ export class Parser {
for (i = 0; i < l; i++) { for (i = 0; i < l; i++) {
token = tokens[i]; token = tokens[i];
+ // take line-numbers from tokens whenever possible + // take line-numbers from tokens whenever possible
@@ -160,7 +161,7 @@ diff --git a/src/Parser.js b/src/Parser.js
+ this.renderer.tag_ln(this.ln); + this.renderer.tag_ln(this.ln);
// Run any renderer extensions // Run any renderer extensions
@@ -124,7 +129,10 @@ module.exports = class Parser { @@ -124,7 +129,10 @@ export class Parser {
} }
- body += this.renderer.tablerow(cell); - body += this.renderer.tablerow(cell);
@@ -173,7 +174,7 @@ diff --git a/src/Parser.js b/src/Parser.js
+ out += this.renderer.tag_ln(token.ln).table(header, body); + out += this.renderer.tag_ln(token.ln).table(header, body);
continue; continue;
} }
@@ -167,8 +175,12 @@ module.exports = class Parser { @@ -167,8 +175,12 @@ export class Parser {
itemBody += this.parse(item.tokens, loose); itemBody += this.parse(item.tokens, loose);
- body += this.renderer.listitem(itemBody, task, checked); - body += this.renderer.listitem(itemBody, task, checked);
@@ -188,7 +189,7 @@ diff --git a/src/Parser.js b/src/Parser.js
+ out += this.renderer.tag_ln(token.ln).list(body, ordered, start); + out += this.renderer.tag_ln(token.ln).list(body, ordered, start);
continue; continue;
} }
@@ -179,5 +191,6 @@ module.exports = class Parser { @@ -179,5 +191,6 @@ export class Parser {
} }
case 'paragraph': { case 'paragraph': {
- out += this.renderer.paragraph(this.parseInline(token.tokens)); - out += this.renderer.paragraph(this.parseInline(token.tokens));
@@ -196,7 +197,7 @@ diff --git a/src/Parser.js b/src/Parser.js
+ out += this.renderer.tag_ln(token.ln).paragraph(t); + out += this.renderer.tag_ln(token.ln).paragraph(t);
continue; continue;
} }
@@ -221,4 +234,7 @@ module.exports = class Parser { @@ -221,4 +234,7 @@ export class Parser {
token = tokens[i]; token = tokens[i];
+ // another thing that only affects <br/> and other inlines + // another thing that only affects <br/> and other inlines
@@ -205,22 +206,23 @@ diff --git a/src/Parser.js b/src/Parser.js
// Run any renderer extensions // Run any renderer extensions
if (this.options.extensions && this.options.extensions.renderers && this.options.extensions.renderers[token.type]) { if (this.options.extensions && this.options.extensions.renderers && this.options.extensions.renderers[token.type]) {
diff --git a/src/Renderer.js b/src/Renderer.js diff --git a/src/Renderer.js b/src/Renderer.js
index 7c36a75..aa1a53a 100644
--- a/src/Renderer.js --- a/src/Renderer.js
+++ b/src/Renderer.js +++ b/src/Renderer.js
@@ -11,6 +11,12 @@ module.exports = class Renderer { @@ -11,6 +11,12 @@ export class Renderer {
constructor(options) { constructor(options) {
this.options = options || defaults; this.options = options || defaults;
+ this.ln = ""; + this.ln = "";
} }
+ tag_ln(n) { + tag_ln(n) {
+ this.ln = ' data-ln="' + n + '"'; + this.ln = ` data-ln="${n}"`;
+ return this; + return this;
+ }; + };
+ +
code(code, infostring, escaped) { code(code, infostring, escaped) {
const lang = (infostring || '').match(/\S*/)[0]; const lang = (infostring || '').match(/\S*/)[0];
@@ -26,10 +32,10 @@ module.exports = class Renderer { @@ -26,10 +32,10 @@ export class Renderer {
if (!lang) { if (!lang) {
- return '<pre><code>' - return '<pre><code>'
@@ -233,65 +235,65 @@ diff --git a/src/Renderer.js b/src/Renderer.js
+ return '<pre' + this.ln + '><code class="' + return '<pre' + this.ln + '><code class="'
+ this.options.langPrefix + this.options.langPrefix
+ escape(lang, true) + escape(lang, true)
@@ -40,5 +46,5 @@ module.exports = class Renderer { @@ -43,5 +49,5 @@ export class Renderer {
*/
blockquote(quote) { blockquote(quote) {
- return '<blockquote>\n' + quote + '</blockquote>\n'; - return `<blockquote>\n${quote}</blockquote>\n`;
+ return '<blockquote' + this.ln + '>\n' + quote + '</blockquote>\n'; + return `<blockquote${this.ln}>\n${quote}</blockquote>\n`;
} }
@@ -51,4 +57,5 @@ module.exports = class Renderer { @@ -59,9 +65,9 @@ export class Renderer {
return '<h' if (this.options.headerIds) {
+ level const id = this.options.headerPrefix + slugger.slug(raw);
+ + this.ln - return `<h${level} id="${id}">${text}</h${level}>\n`;
+ ' id="' + return `<h${level}${this.ln} id="${id}">${text}</h${level}>\n`;
+ this.options.headerPrefix
@@ -61,5 +68,5 @@ module.exports = class Renderer {
} }
// ignore IDs // ignore IDs
- return '<h' + level + '>' + text + '</h' + level + '>\n'; - return `<h${level}>${text}</h${level}>\n`;
+ return '<h' + level + this.ln + '>' + text + '</h' + level + '>\n'; + return `<h${level}${this.ln}>${text}</h${level}>\n`;
} }
@@ -75,5 +82,5 @@ module.exports = class Renderer { @@ -80,5 +86,5 @@ export class Renderer {
*/
listitem(text) { listitem(text) {
- return '<li>' + text + '</li>\n'; - return `<li>${text}</li>\n`;
+ return '<li' + this.ln + '>' + text + '</li>\n'; + return `<li${this.ln}>${text}</li>\n`;
} }
@@ -87,5 +94,5 @@ module.exports = class Renderer { @@ -95,5 +101,5 @@ export class Renderer {
*/
paragraph(text) { paragraph(text) {
- return '<p>' + text + '</p>\n'; - return `<p>${text}</p>\n`;
+ return '<p' + this.ln + '>' + text + '</p>\n'; + return `<p${this.ln}>${text}</p>\n`;
} }
@@ -102,5 +109,5 @@ module.exports = class Renderer { @@ -117,5 +123,5 @@ export class Renderer {
*/
tablerow(content) { tablerow(content) {
- return '<tr>\n' + content + '</tr>\n'; - return `<tr>\n${content}</tr>\n`;
+ return '<tr' + this.ln + '>\n' + content + '</tr>\n'; + return `<tr${this.ln}>\n${content}</tr>\n`;
} }
@@ -127,5 +134,5 @@ module.exports = class Renderer { @@ -151,5 +157,5 @@ export class Renderer {
br() { br() {
- return this.options.xhtml ? '<br/>' : '<br>'; - return this.options.xhtml ? '<br/>' : '<br>';
+ return this.options.xhtml ? '<br' + this.ln + '/>' : '<br' + this.ln + '>'; + return this.options.xhtml ? `<br${this.ln}/>` : `<br${this.ln}>`;
} }
@@ -153,5 +160,5 @@ module.exports = class Renderer { @@ -190,5 +196,5 @@ export class Renderer {
} }
- let out = '<img src="' + href + '" alt="' + text + '"'; - let out = `<img src="${href}" alt="${text}"`;
+ let out = '<img' + this.ln + ' src="' + href + '" alt="' + text + '"'; + let out = `<img${this.ln} src="${href}" alt="${text}"`;
if (title) { if (title) {
out += ' title="' + title + '"'; out += ` title="${title}"`;
diff --git a/src/Tokenizer.js b/src/Tokenizer.js diff --git a/src/Tokenizer.js b/src/Tokenizer.js
index e8a69b6..2cc772b 100644
--- a/src/Tokenizer.js --- a/src/Tokenizer.js
+++ b/src/Tokenizer.js +++ b/src/Tokenizer.js
@@ -301,4 +301,7 @@ module.exports = class Tokenizer { @@ -302,4 +302,7 @@ export class Tokenizer {
const l = list.items.length; const l = list.items.length;
+ // each nested list gets +1 ahead; this hack makes every listgroup -1 but atleast it doesn't get infinitely bad + // each nested list gets +1 ahead; this hack makes every listgroup -1 but atleast it doesn't get infinitely bad

View File

@@ -1,7 +1,7 @@
diff --git a/src/Lexer.js b/src/Lexer.js diff --git a/src/Lexer.js b/src/Lexer.js
--- a/src/Lexer.js --- a/src/Lexer.js
+++ b/src/Lexer.js +++ b/src/Lexer.js
@@ -6,5 +6,5 @@ const { repeatString } = require('./helpers.js'); @@ -6,5 +6,5 @@ import { repeatString } from './helpers.js';
/** /**
* smartypants text replacement * smartypants text replacement
- */ - */
@@ -15,21 +15,21 @@ diff --git a/src/Lexer.js b/src/Lexer.js
+ * + *
function mangle(text) { function mangle(text) {
let out = '', let out = '',
@@ -465,5 +465,5 @@ module.exports = class Lexer { @@ -466,5 +466,5 @@ export class Lexer {
// autolink // autolink
- if (token = this.tokenizer.autolink(src, mangle)) { - if (token = this.tokenizer.autolink(src, mangle)) {
+ if (token = this.tokenizer.autolink(src)) { + if (token = this.tokenizer.autolink(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
tokens.push(token); tokens.push(token);
@@ -472,5 +472,5 @@ module.exports = class Lexer { @@ -473,5 +473,5 @@ export class Lexer {
// url (gfm) // url (gfm)
- if (!this.state.inLink && (token = this.tokenizer.url(src, mangle))) { - if (!this.state.inLink && (token = this.tokenizer.url(src, mangle))) {
+ if (!this.state.inLink && (token = this.tokenizer.url(src))) { + if (!this.state.inLink && (token = this.tokenizer.url(src))) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
tokens.push(token); tokens.push(token);
@@ -493,5 +493,5 @@ module.exports = class Lexer { @@ -494,5 +494,5 @@ export class Lexer {
} }
} }
- if (token = this.tokenizer.inlineText(cutSrc, smartypants)) { - if (token = this.tokenizer.inlineText(cutSrc, smartypants)) {
@@ -39,14 +39,14 @@ diff --git a/src/Lexer.js b/src/Lexer.js
diff --git a/src/Renderer.js b/src/Renderer.js diff --git a/src/Renderer.js b/src/Renderer.js
--- a/src/Renderer.js --- a/src/Renderer.js
+++ b/src/Renderer.js +++ b/src/Renderer.js
@@ -142,5 +142,5 @@ module.exports = class Renderer { @@ -142,5 +142,5 @@ export class Renderer {
link(href, title, text) { link(href, title, text) {
- href = cleanUrl(this.options.sanitize, this.options.baseUrl, href); - href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
+ href = cleanUrl(this.options.baseUrl, href); + href = cleanUrl(this.options.baseUrl, href);
if (href === null) { if (href === null) {
return text; return text;
@@ -155,5 +155,5 @@ module.exports = class Renderer { @@ -155,5 +155,5 @@ export class Renderer {
image(href, title, text) { image(href, title, text) {
- href = cleanUrl(this.options.sanitize, this.options.baseUrl, href); - href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
@@ -56,7 +56,7 @@ diff --git a/src/Renderer.js b/src/Renderer.js
diff --git a/src/Tokenizer.js b/src/Tokenizer.js diff --git a/src/Tokenizer.js b/src/Tokenizer.js
--- a/src/Tokenizer.js --- a/src/Tokenizer.js
+++ b/src/Tokenizer.js +++ b/src/Tokenizer.js
@@ -321,14 +321,7 @@ module.exports = class Tokenizer { @@ -320,14 +320,7 @@ export class Tokenizer {
type: 'html', type: 'html',
raw: cap[0], raw: cap[0],
- pre: !this.options.sanitizer - pre: !this.options.sanitizer
@@ -72,7 +72,7 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
- } - }
return token; return token;
} }
@@ -477,15 +470,9 @@ module.exports = class Tokenizer { @@ -476,15 +469,9 @@ export class Tokenizer {
return { return {
- type: this.options.sanitize - type: this.options.sanitize
@@ -90,7 +90,7 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
+ text: cap[0] + text: cap[0]
}; };
} }
@@ -672,10 +659,10 @@ module.exports = class Tokenizer { @@ -671,10 +658,10 @@ export class Tokenizer {
} }
- autolink(src, mangle) { - autolink(src, mangle) {
@@ -103,7 +103,7 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
+ text = escape(cap[1]); + text = escape(cap[1]);
href = 'mailto:' + text; href = 'mailto:' + text;
} else { } else {
@@ -700,10 +687,10 @@ module.exports = class Tokenizer { @@ -699,10 +686,10 @@ export class Tokenizer {
} }
- url(src, mangle) { - url(src, mangle) {
@@ -116,7 +116,7 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
+ text = escape(cap[0]); + text = escape(cap[0]);
href = 'mailto:' + text; href = 'mailto:' + text;
} else { } else {
@@ -737,12 +724,12 @@ module.exports = class Tokenizer { @@ -736,12 +723,12 @@ export class Tokenizer {
} }
- inlineText(src, smartypants) { - inlineText(src, smartypants) {
@@ -135,7 +135,7 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
diff --git a/src/defaults.js b/src/defaults.js diff --git a/src/defaults.js b/src/defaults.js
--- a/src/defaults.js --- a/src/defaults.js
+++ b/src/defaults.js +++ b/src/defaults.js
@@ -9,12 +9,8 @@ function getDefaults() { @@ -9,12 +9,8 @@ export function getDefaults() {
highlight: null, highlight: null,
langPrefix: 'language-', langPrefix: 'language-',
- mangle: true, - mangle: true,
@@ -151,10 +151,10 @@ diff --git a/src/defaults.js b/src/defaults.js
diff --git a/src/helpers.js b/src/helpers.js diff --git a/src/helpers.js b/src/helpers.js
--- a/src/helpers.js --- a/src/helpers.js
+++ b/src/helpers.js +++ b/src/helpers.js
@@ -64,18 +64,5 @@ function edit(regex, opt) { @@ -64,18 +64,5 @@ export function edit(regex, opt) {
const nonWordAndColonTest = /[^\w:]/g; const nonWordAndColonTest = /[^\w:]/g;
const originIndependentUrl = /^$|^[a-z][a-z0-9+.-]*:|^[?#]/i; const originIndependentUrl = /^$|^[a-z][a-z0-9+.-]*:|^[?#]/i;
-function cleanUrl(sanitize, base, href) { -export function cleanUrl(sanitize, base, href) {
- if (sanitize) { - if (sanitize) {
- let prot; - let prot;
- try { - try {
@@ -168,36 +168,30 @@ diff --git a/src/helpers.js b/src/helpers.js
- return null; - return null;
- } - }
- } - }
+function cleanUrl(base, href) { +export function cleanUrl(base, href) {
if (base && !originIndependentUrl.test(href)) { if (base && !originIndependentUrl.test(href)) {
href = resolveUrl(base, href); href = resolveUrl(base, href);
@@ -227,10 +214,4 @@ function findClosingBracket(str, b) { @@ -227,10 +214,4 @@ export function findClosingBracket(str, b) {
} }
-function checkSanitizeDeprecation(opt) { -export function checkSanitizeDeprecation(opt) {
- if (opt && opt.sanitize && !opt.silent) { - if (opt && opt.sanitize && !opt.silent) {
- console.warn('marked(): sanitize and sanitizer parameters are deprecated since version 0.7.0, should not be used and will be removed in the future. Read more here: https://marked.js.org/#/USING_ADVANCED.md#options'); - console.warn('marked(): sanitize and sanitizer parameters are deprecated since version 0.7.0, should not be used and will be removed in the future. Read more here: https://marked.js.org/#/USING_ADVANCED.md#options');
- } - }
-} -}
- -
// copied from https://stackoverflow.com/a/5450113/806777 // copied from https://stackoverflow.com/a/5450113/806777
function repeatString(pattern, count) { export function repeatString(pattern, count) {
@@ -260,5 +241,4 @@ module.exports = {
rtrim,
findClosingBracket,
- checkSanitizeDeprecation,
repeatString
};
diff --git a/src/marked.js b/src/marked.js diff --git a/src/marked.js b/src/marked.js
--- a/src/marked.js --- a/src/marked.js
+++ b/src/marked.js +++ b/src/marked.js
@@ -7,5 +7,4 @@ const Slugger = require('./Slugger.js'); @@ -7,5 +7,4 @@ import { Slugger } from './Slugger.js';
const { import {
merge, merge,
- checkSanitizeDeprecation, - checkSanitizeDeprecation,
escape escape
} = require('./helpers.js'); } from './helpers.js';
@@ -35,5 +34,4 @@ function marked(src, opt, callback) { @@ -35,5 +34,4 @@ export function marked(src, opt, callback) {
opt = merge({}, marked.defaults, opt || {}); opt = merge({}, marked.defaults, opt || {});
- checkSanitizeDeprecation(opt); - checkSanitizeDeprecation(opt);
@@ -219,37 +213,37 @@ diff --git a/src/marked.js b/src/marked.js
diff --git a/test/bench.js b/test/bench.js diff --git a/test/bench.js b/test/bench.js
--- a/test/bench.js --- a/test/bench.js
+++ b/test/bench.js +++ b/test/bench.js
@@ -33,5 +33,4 @@ async function runBench(options) { @@ -37,5 +37,4 @@ export async function runBench(options) {
breaks: false, breaks: false,
pedantic: false, pedantic: false,
- sanitize: false, - sanitize: false,
smartLists: false smartLists: false
}); });
@@ -45,5 +44,4 @@ async function runBench(options) { @@ -49,5 +48,4 @@ export async function runBench(options) {
breaks: false, breaks: false,
pedantic: false, pedantic: false,
- sanitize: false, - sanitize: false,
smartLists: false smartLists: false
}); });
@@ -58,5 +56,4 @@ async function runBench(options) { @@ -62,5 +60,4 @@ export async function runBench(options) {
breaks: false, breaks: false,
pedantic: false, pedantic: false,
- sanitize: false, - sanitize: false,
smartLists: false smartLists: false
}); });
@@ -70,5 +67,4 @@ async function runBench(options) { @@ -74,5 +71,4 @@ export async function runBench(options) {
breaks: false, breaks: false,
pedantic: false, pedantic: false,
- sanitize: false, - sanitize: false,
smartLists: false smartLists: false
}); });
@@ -83,5 +79,4 @@ async function runBench(options) { @@ -87,5 +83,4 @@ export async function runBench(options) {
breaks: false, breaks: false,
pedantic: true, pedantic: true,
- sanitize: false, - sanitize: false,
smartLists: false smartLists: false
}); });
@@ -95,5 +90,4 @@ async function runBench(options) { @@ -99,5 +94,4 @@ export async function runBench(options) {
breaks: false, breaks: false,
pedantic: true, pedantic: true,
- sanitize: false, - sanitize: false,
@@ -258,7 +252,7 @@ diff --git a/test/bench.js b/test/bench.js
diff --git a/test/specs/run-spec.js b/test/specs/run-spec.js diff --git a/test/specs/run-spec.js b/test/specs/run-spec.js
--- a/test/specs/run-spec.js --- a/test/specs/run-spec.js
+++ b/test/specs/run-spec.js +++ b/test/specs/run-spec.js
@@ -22,9 +22,4 @@ function runSpecs(title, dir, showCompletionTable, options) { @@ -25,9 +25,4 @@ function runSpecs(title, dir, showCompletionTable, options) {
} }
- if (spec.options.sanitizer) { - if (spec.options.sanitizer) {
@@ -268,77 +262,77 @@ diff --git a/test/specs/run-spec.js b/test/specs/run-spec.js
- -
(spec.only ? fit : (spec.skip ? xit : it))('should ' + passFail + example, async() => { (spec.only ? fit : (spec.skip ? xit : it))('should ' + passFail + example, async() => {
const before = process.hrtime(); const before = process.hrtime();
@@ -53,3 +48,2 @@ runSpecs('Original', './original', false, { gfm: false, pedantic: true }); @@ -56,3 +51,2 @@ runSpecs('Original', './original', false, { gfm: false, pedantic: true });
runSpecs('New', './new'); runSpecs('New', './new');
runSpecs('ReDOS', './redos'); runSpecs('ReDOS', './redos');
-runSpecs('Security', './security', false, { silent: true }); // silent - do not show deprecation warning -runSpecs('Security', './security', false, { silent: true }); // silent - do not show deprecation warning
diff --git a/test/unit/Lexer-spec.js b/test/unit/Lexer-spec.js diff --git a/test/unit/Lexer-spec.js b/test/unit/Lexer-spec.js
--- a/test/unit/Lexer-spec.js --- a/test/unit/Lexer-spec.js
+++ b/test/unit/Lexer-spec.js +++ b/test/unit/Lexer-spec.js
@@ -589,5 +589,5 @@ paragraph @@ -635,5 +635,5 @@ paragraph
}); });
- it('sanitize', () => { - it('sanitize', () => {
+ /*it('sanitize', () => { + /*it('sanitize', () => {
expectTokens({ expectTokens({
md: '<div>html</div>', md: '<div>html</div>',
@@ -607,5 +607,5 @@ paragraph @@ -653,5 +653,5 @@ paragraph
] ]
}); });
- }); - });
+ });*/ + });*/
}); });
@@ -652,5 +652,5 @@ paragraph @@ -698,5 +698,5 @@ paragraph
}); });
- it('html sanitize', () => { - it('html sanitize', () => {
+ /*it('html sanitize', () => { + /*it('html sanitize', () => {
expectInlineTokens({ expectInlineTokens({
md: '<div>html</div>', md: '<div>html</div>',
@@ -660,5 +660,5 @@ paragraph @@ -706,5 +706,5 @@ paragraph
] ]
}); });
- }); - });
+ });*/ + });*/
it('link', () => { it('link', () => {
@@ -971,5 +971,5 @@ paragraph @@ -1017,5 +1017,5 @@ paragraph
}); });
- it('autolink mangle email', () => { - it('autolink mangle email', () => {
+ /*it('autolink mangle email', () => { + /*it('autolink mangle email', () => {
expectInlineTokens({ expectInlineTokens({
md: '<test@example.com>', md: '<test@example.com>',
@@ -991,5 +991,5 @@ paragraph @@ -1037,5 +1037,5 @@ paragraph
] ]
}); });
- }); - });
+ });*/ + });*/
it('url', () => { it('url', () => {
@@ -1028,5 +1028,5 @@ paragraph @@ -1074,5 +1074,5 @@ paragraph
}); });
- it('url mangle email', () => { - it('url mangle email', () => {
+ /*it('url mangle email', () => { + /*it('url mangle email', () => {
expectInlineTokens({ expectInlineTokens({
md: 'test@example.com', md: 'test@example.com',
@@ -1048,5 +1048,5 @@ paragraph @@ -1094,5 +1094,5 @@ paragraph
] ]
}); });
- }); - });
+ });*/ + });*/
}); });
@@ -1064,5 +1064,5 @@ paragraph @@ -1110,5 +1110,5 @@ paragraph
}); });
- describe('smartypants', () => { - describe('smartypants', () => {
+ /*describe('smartypants', () => { + /*describe('smartypants', () => {
it('single quotes', () => { it('single quotes', () => {
expectInlineTokens({ expectInlineTokens({
@@ -1134,5 +1134,5 @@ paragraph @@ -1180,5 +1180,5 @@ paragraph
}); });
}); });
- }); - });

View File

@@ -29,3 +29,10 @@ pyftsubset "$orig_woff" --unicodes-file=/z/icon.list --no-ignore-missing-unicode
# scp is easier, just want basic latin # scp is easier, just want basic latin
pyftsubset /z/scp.woff2 --unicodes="20-7e,ab,b7,bb,2022" --no-ignore-missing-unicodes --flavor=woff2 --output-file=/z/dist/no-pk/scp.woff2 --verbose pyftsubset /z/scp.woff2 --unicodes="20-7e,ab,b7,bb,2022" --no-ignore-missing-unicodes --flavor=woff2 --output-file=/z/dist/no-pk/scp.woff2 --verbose
exit 0
# kinda works but ruins hinting on windows, just use the old version of the font which has correct baseline
python3 shiftbase.py /z/dist/no-pk/scp.woff2
cd /z/dist/no-pk/
mv scp.woff2.woff2 scp.woff2

View File

@@ -0,0 +1,27 @@
#!/usr/bin/env python3
import sys
from fontTools.ttLib import TTFont, newTable
def main():
woff = sys.argv[1]
font = TTFont(woff)
print(repr(font["hhea"].__dict__))
print(repr(font["OS/2"].__dict__))
# font["hhea"].ascent = round(base_asc * mul)
# font["hhea"].descent = round(base_desc * mul)
# font["OS/2"].usWinAscent = round(base_asc * mul)
font["OS/2"].usWinDescent = round(font["OS/2"].usWinDescent * 1.1)
font["OS/2"].sTypoDescender = round(font["OS/2"].sTypoDescender * 1.1)
try:
del font["post"].mapping["Delta#1"]
except:
pass
font.save(woff + ".woff2")
if __name__ == "__main__":
main()

View File

@@ -86,12 +86,19 @@ function have() {
python -c "import $1; $1; $1.__version__" python -c "import $1; $1; $1.__version__"
} }
mv copyparty/web/deps/marked.full.js.gz srv/ || true
. buildenv/bin/activate . buildenv/bin/activate
have setuptools have setuptools
have wheel have wheel
have twine have twine
# remove type hints to support python < 3.9
rm -rf build/pypi
mkdir -p build/pypi
cp -pR setup.py README.md LICENSE copyparty tests bin scripts/strip_hints build/pypi/
cd build/pypi
tar --strip-components=2 -xf ../strip-hints-0.1.10.tar.gz strip-hints-0.1.10/src/strip_hints
python3 -c 'from strip_hints.a import uh; uh("copyparty")'
./setup.py clean2 ./setup.py clean2
./setup.py sdist bdist_wheel --universal ./setup.py sdist bdist_wheel --universal

View File

@@ -14,12 +14,10 @@ help() { exec cat <<'EOF'
# #
# `gz` creates a gzip-compressed python sfx instead of bzip2 # `gz` creates a gzip-compressed python sfx instead of bzip2
# #
# `no-sh` makes just the python sfx, skips the sh/unix sfx # `lang` limits which languages/translations to include,
# for example `lang eng` or `lang eng|nor`
# #
# `no-ogv` saves ~192k by removing the opus/vorbis audio codecs # `no-cm` saves ~82k by removing easymde/codemirror
# (only affects apple devices; everything else has native support)
#
# `no-cm` saves ~92k by removing easymde/codemirror
# (the fancy markdown editor) # (the fancy markdown editor)
# #
# `no-hl` saves ~41k by removing syntax hilighting in the text viewer # `no-hl` saves ~41k by removing syntax hilighting in the text viewer
@@ -66,23 +64,20 @@ pybin=$(command -v python3 || command -v python) || {
exit 1 exit 1
} }
langs=
use_gz= use_gz=
do_sh=1
do_py=1
zopf=2560 zopf=2560
while [ ! -z "$1" ]; do while [ ! -z "$1" ]; do
case $1 in case $1 in
clean) clean=1 ; ;; clean) clean=1 ; ;;
re) repack=1 ; ;; re) repack=1 ; ;;
gz) use_gz=1 ; ;; gz) use_gz=1 ; ;;
no-ogv) no_ogv=1 ; ;;
no-fnt) no_fnt=1 ; ;; no-fnt) no_fnt=1 ; ;;
no-hl) no_hl=1 ; ;; no-hl) no_hl=1 ; ;;
no-dd) no_dd=1 ; ;; no-dd) no_dd=1 ; ;;
no-cm) no_cm=1 ; ;; no-cm) no_cm=1 ; ;;
no-sh) do_sh= ; ;; fast) zopf= ; ;;
no-py) do_py= ; ;; lang) shift;langs="$1"; ;;
fast) zopf=100 ; ;;
*) help ; ;; *) help ; ;;
esac esac
shift shift
@@ -111,7 +106,7 @@ tmpdir="$(
[ $repack ] && { [ $repack ] && {
old="$tmpdir/pe-copyparty" old="$tmpdir/pe-copyparty"
echo "repack of files in $old" echo "repack of files in $old"
cp -pR "$old/"*{dep-j2,copyparty} . cp -pR "$old/"*{j2,ftp,copyparty} .
} }
[ $repack ] || { [ $repack ] || {
@@ -135,8 +130,47 @@ tmpdir="$(
mv MarkupSafe-*/src/markupsafe . mv MarkupSafe-*/src/markupsafe .
rm -rf MarkupSafe-* markupsafe/_speedups.c rm -rf MarkupSafe-* markupsafe/_speedups.c
mkdir dep-j2/ mkdir j2/
mv {markupsafe,jinja2} dep-j2/ mv {markupsafe,jinja2} j2/
echo collecting pyftpdlib
f="../build/pyftpdlib-1.5.6.tar.gz"
[ -e "$f" ] ||
(url=https://github.com/giampaolo/pyftpdlib/archive/refs/tags/release-1.5.6.tar.gz;
wget -O$f "$url" || curl -L "$url" >$f)
tar -zxf $f
mv pyftpdlib-release-*/pyftpdlib .
rm -rf pyftpdlib-release-* pyftpdlib/test
mkdir ftp/
mv pyftpdlib ftp/
echo collecting asyncore, asynchat
for n in asyncore.py asynchat.py; do
f=../build/$n
[ -e "$f" ] ||
(url=https://raw.githubusercontent.com/python/cpython/c4d45ee670c09d4f6da709df072ec80cb7dfad22/Lib/$n;
wget -O$f "$url" || curl -L "$url" >$f)
done
# enable this to dynamically remove type hints at startup,
# in case a future python version can use them for performance
true || (
echo collecting strip-hints
f=../build/strip-hints-0.1.10.tar.gz
[ -e $f ] ||
(url=https://files.pythonhosted.org/packages/9c/d4/312ddce71ee10f7e0ab762afc027e07a918f1c0e1be5b0069db5b0e7542d/strip-hints-0.1.10.tar.gz;
wget -O$f "$url" || curl -L "$url" >$f)
tar -zxf $f
mv strip-hints-0.1.10/src/strip_hints .
rm -rf strip-hints-* strip_hints/import_hooks*
sed -ri 's/[a-z].* as import_hooks$/"""a"""/' strip_hints/*.py
cp -pR ../scripts/strip_hints/ .
)
cp -pR ../scripts/py2/ .
# msys2 tar is bad, make the best of it # msys2 tar is bad, make the best of it
echo collecting source echo collecting source
@@ -148,6 +182,15 @@ tmpdir="$(
(cd .. && tar -cf tar copyparty) && tar -xf ../tar (cd .. && tar -cf tar copyparty) && tar -xf ../tar
} }
rm -f ../tar rm -f ../tar
# insert asynchat
mkdir copyparty/vend
for n in asyncore.py asynchat.py; do
awk 'NR<4||NR>27;NR==4{print"# license: https://opensource.org/licenses/ISC\n"}' ../build/$n >copyparty/vend/$n
done
# remove type hints before build instead
(cd copyparty; python3 ../../scripts/strip_hints/a.py; rm uh)
} }
ver= ver=
@@ -218,9 +261,6 @@ cat have | while IFS= read -r x; do
done done
rm have rm have
[ $no_ogv ] &&
rm -rf copyparty/web/deps/{dynamicaudio,ogv}*
[ $no_cm ] && { [ $no_cm ] && {
rm -rf copyparty/web/mde.* copyparty/web/deps/easymde* rm -rf copyparty/web/mde.* copyparty/web/deps/easymde*
echo h > copyparty/web/mde.html echo h > copyparty/web/mde.html
@@ -248,17 +288,30 @@ rm have
tmv "$f" tmv "$f"
} }
[ $repack ] || [ $langs ] &&
find | grep -E '\.py$' | for f in copyparty/web/{browser.js,splash.js}; do
grep -vE '__version__' | gzip -d "$f.gz" || true
tr '\n' '\0' | awk '/^\}/{l=0} !l; /^var Ls =/{l=1;next} o; /^\t["}]/{o=0} /^\t"'"$langs"'"/{o=1;print}' <$f >t
xargs -0 $pybin ../scripts/uncomment.py tmv "$f"
done
f=dep-j2/jinja2/constants.py [ $repack ] || {
# uncomment
find | grep -E '\.py$' |
grep -vE '__version__' |
tr '\n' '\0' |
xargs -0 "$pybin" ../scripts/uncomment.py
# py2-compat
#find | grep -E '\.py$' | while IFS= read -r x; do
# sed -ri '/: TypeAlias = /d' "$x"; done
}
f=j2/jinja2/constants.py
awk '/^LOREM_IPSUM_WORDS/{o=1;print "LOREM_IPSUM_WORDS = u\"a\"";next} !o; /"""/{o=0}' <$f >t awk '/^LOREM_IPSUM_WORDS/{o=1;print "LOREM_IPSUM_WORDS = u\"a\"";next} !o; /"""/{o=0}' <$f >t
tmv "$f" tmv "$f"
grep -rLE '^#[^a-z]*coding: utf-8' dep-j2 | grep -rLE '^#[^a-z]*coding: utf-8' j2 |
while IFS= read -r f; do while IFS= read -r f; do
(echo "# coding: utf-8"; cat "$f") >t (echo "# coding: utf-8"; cat "$f") >t
tmv "$f" tmv "$f"
@@ -287,7 +340,7 @@ find | grep -E '\.(js|html)$' | while IFS= read -r f; do
done done
gzres() { gzres() {
command -v pigz && command -v pigz && [ $zopf ] &&
pk="pigz -11 -I $zopf" || pk="pigz -11 -I $zopf" ||
pk='gzip' pk='gzip'
@@ -328,7 +381,8 @@ nf=$(ls -1 "$zdir"/arc.* | wc -l)
} }
[ $use_zdir ] && { [ $use_zdir ] && {
arcs=("$zdir"/arc.*) arcs=("$zdir"/arc.*)
arc="${arcs[$RANDOM % ${#arcs[@]} ] }" n=$(( $RANDOM % ${#arcs[@]} ))
arc="${arcs[n]}"
echo "using $arc" echo "using $arc"
tar -xf "$arc" tar -xf "$arc"
for f in copyparty/web/*.gz; do for f in copyparty/web/*.gz; do
@@ -338,11 +392,18 @@ nf=$(ls -1 "$zdir"/arc.* | wc -l)
echo gen tarlist echo gen tarlist
for d in copyparty dep-j2; do find $d -type f; done | for d in copyparty j2 ftp py2; do find $d -type f; done | # strip_hints
sed -r 's/(.*)\.(.*)/\2 \1/' | LC_ALL=C sort | sed -r 's/(.*)\.(.*)/\2 \1/' | LC_ALL=C sort |
sed -r 's/([^ ]*) (.*)/\2.\1/' | grep -vE '/list1?$' > list1 sed -r 's/([^ ]*) (.*)/\2.\1/' | grep -vE '/list1?$' > list1
(grep -vE '\.(gz|br)$' list1; grep -E '\.(gz|br)$' list1 | shuf) >list || true for n in {1..50}; do
(grep -vE '\.(gz|br)$' list1; grep -E '\.(gz|br)$' list1 | shuf) >list || true
s=$(md5sum list | cut -c-16)
grep -q $s "$zdir/h" && continue
echo $s >> "$zdir/h"
break
done
[ $n -eq 50 ] && exit
echo creating tar echo creating tar
args=(--owner=1000 --group=1000) args=(--owner=1000 --group=1000)
@@ -357,41 +418,27 @@ pe=bz2
echo compressing tar echo compressing tar
# detect best level; bzip2 -7 is usually better than -9 # detect best level; bzip2 -7 is usually better than -9
[ $do_py ] && { for n in {2..9}; do cp tar t.$n; $pc -$n t.$n & done; wait; mv -v $(ls -1S t.*.$pe | tail -n 1) tar.bz2; } for n in {2..9}; do cp tar t.$n; $pc -$n t.$n & done; wait; mv -v $(ls -1S t.*.$pe | tail -n 1) tar.bz2
[ $do_sh ] && { for n in {2..9}; do cp tar t.$n; xz -ze$n t.$n & done; wait; mv -v $(ls -1S t.*.xz | tail -n 1) tar.xz; }
rm t.* || true rm t.* || true
exts=() exts=()
[ $do_sh ] && { echo creating sfx
exts+=(.sh)
echo creating unix sfx py=../scripts/sfx.py
( suf=
sed "s/PACK_TS/$ts/; s/PACK_HTS/$hts/; s/CPP_VER/$ver/" <../scripts/sfx.sh | [ $use_gz ] && {
grep -E '^sfx_eof$' -B 9001; sed -r 's/"r:bz2"/"r:gz"/' <$py >$py.t
cat tar.xz py=$py.t
) >$sfx_out.sh suf=-gz
} }
"$pybin" $py --sfx-make tar.bz2 $ver $ts
mv sfx.out $sfx_out$suf.py
[ $do_py ] && { exts+=($suf.py)
echo creating generic sfx [ $use_gz ] &&
rm $py
py=../scripts/sfx.py
suf=
[ $use_gz ] && {
sed -r 's/"r:bz2"/"r:gz"/' <$py >$py.t
py=$py.t
suf=-gz
}
$pybin $py --sfx-make tar.bz2 $ver $ts
mv sfx.out $sfx_out$suf.py
exts+=($suf.py)
[ $use_gz ] &&
rm $py
}
chmod 755 $sfx_out* chmod 755 $sfx_out*
@@ -402,4 +449,4 @@ for ext in ${exts[@]}; do
done done
# apk add bash python3 tar xz bzip2 # apk add bash python3 tar xz bzip2
# while true; do ./make-sfx.sh; for f in ..//dist/copyparty-sfx.{sh,py}; do mv $f $f.$(wc -c <$f | awk '{print$1}'); done; done # while true; do ./make-sfx.sh; f=../dist/copyparty-sfx.py; mv $f $f.$(wc -c <$f | awk '{print$1}'); done

View File

@@ -35,8 +35,6 @@ ver="$1"
exit 1 exit 1
} }
mv copyparty/web/deps/marked.full.js.gz srv/ || true
mkdir -p dist mkdir -p dist
zip_path="$(pwd)/dist/copyparty-$ver.zip" zip_path="$(pwd)/dist/copyparty-$ver.zip"
tgz_path="$(pwd)/dist/copyparty-$ver.tar.gz" tgz_path="$(pwd)/dist/copyparty-$ver.tar.gz"

View File

@@ -4,33 +4,31 @@ set -e
cd ~/dev/copyparty/scripts cd ~/dev/copyparty/scripts
v=$1 v=$1
printf '%s\n' "$v" | grep -qE '^[0-9\.]+$' || exit 1
grep -E "(${v//./, })" ../copyparty/__version__.py || exit 1
git tag v$v [ "$v" = sfx ] || {
git push origin --tags printf '%s\n' "$v" | grep -qE '^[0-9\.]+$' || exit 1
grep -E "(${v//./, })" ../copyparty/__version__.py || exit 1
rm -rf ../dist git push all
git tag v$v
git push all --tags
./make-pypi-release.sh u rm -rf ../dist
(cd .. && python3 ./setup.py clean2)
./make-tgz-release.sh $v ./make-pypi-release.sh u
(cd .. && python3 ./setup.py clean2)
./make-tgz-release.sh $v
}
rm -f ../dist/copyparty-sfx.* rm -f ../dist/copyparty-sfx.*
./make-sfx.sh no-sh f=../dist/copyparty-sfx.py
../dist/copyparty-sfx.py -h ./make-sfx.sh
$f -h
ar=
while true; do while true; do
for ((a=0; a<100; a++)); do mv $f $f.$(wc -c <$f | awk '{print$1}')
for f in ../dist/copyparty-sfx.{py,sh}; do ./make-sfx.sh re $ar
[ -e $f ] || continue;
mv $f $f.$(wc -c <$f | awk '{print$1}')
done
./make-sfx.sh re $ar
done
ar=no-sh
done done
# git tag -d v$v; git push --delete origin v$v # git tag -d v$v; git push --delete origin v$v

View File

@@ -1,13 +1,23 @@
#!/bin/bash #!/bin/bash
set -ex set -ex
rm -rf unt
mkdir -p unt/srv
cp -pR copyparty tests unt/
cd unt
python3 ../scripts/strip_hints/a.py
pids=() pids=()
for py in python{2,3}; do for py in python{2,3}; do
PYTHONPATH=
[ $py = python2 ] && PYTHONPATH=../scripts/py2
export PYTHONPATH
nice $py -m unittest discover -s tests >/dev/null & nice $py -m unittest discover -s tests >/dev/null &
pids+=($!) pids+=($!)
done done
python3 scripts/test/smoketest.py & python3 ../scripts/test/smoketest.py &
pids+=($!) pids+=($!)
for pid in ${pids[@]}; do for pid in ${pids[@]}; do

View File

@@ -11,6 +11,8 @@ copyparty/broker_mp.py,
copyparty/broker_mpw.py, copyparty/broker_mpw.py,
copyparty/broker_thr.py, copyparty/broker_thr.py,
copyparty/broker_util.py, copyparty/broker_util.py,
copyparty/fsutil.py,
copyparty/ftpd.py,
copyparty/httpcli.py, copyparty/httpcli.py,
copyparty/httpconn.py, copyparty/httpconn.py,
copyparty/httpsrv.py, copyparty/httpsrv.py,
@@ -31,6 +33,9 @@ copyparty/th_srv.py,
copyparty/u2idx.py, copyparty/u2idx.py,
copyparty/up2k.py, copyparty/up2k.py,
copyparty/util.py, copyparty/util.py,
copyparty/vend,
copyparty/vend/asynchat.py,
copyparty/vend/asyncore.py,
copyparty/web, copyparty/web,
copyparty/web/baguettebox.js, copyparty/web/baguettebox.js,
copyparty/web/browser.css, copyparty/web/browser.css,
@@ -49,14 +54,6 @@ copyparty/web/deps/easymde.js,
copyparty/web/deps/marked.js, copyparty/web/deps/marked.js,
copyparty/web/deps/mini-fa.css, copyparty/web/deps/mini-fa.css,
copyparty/web/deps/mini-fa.woff, copyparty/web/deps/mini-fa.woff,
copyparty/web/deps/ogv-decoder-audio-opus-wasm.js,
copyparty/web/deps/ogv-decoder-audio-opus-wasm.wasm,
copyparty/web/deps/ogv-decoder-audio-vorbis-wasm.js,
copyparty/web/deps/ogv-decoder-audio-vorbis-wasm.wasm,
copyparty/web/deps/ogv-demuxer-ogg-wasm.js,
copyparty/web/deps/ogv-demuxer-ogg-wasm.wasm,
copyparty/web/deps/ogv-worker-audio.js,
copyparty/web/deps/ogv.js,
copyparty/web/deps/prism.js, copyparty/web/deps/prism.js,
copyparty/web/deps/prism.css, copyparty/web/deps/prism.css,
copyparty/web/deps/prismd.css, copyparty/web/deps/prismd.css,
@@ -75,6 +72,7 @@ copyparty/web/msg.css,
copyparty/web/msg.html, copyparty/web/msg.html,
copyparty/web/splash.css, copyparty/web/splash.css,
copyparty/web/splash.html, copyparty/web/splash.html,
copyparty/web/splash.js,
copyparty/web/ui.css, copyparty/web/ui.css,
copyparty/web/up2k.js, copyparty/web/up2k.js,
copyparty/web/util.js, copyparty/web/util.js,

View File

@@ -1,10 +1,10 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
# coding: latin-1 # coding: latin-1
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
import re, os, sys, time, shutil, signal, threading, tarfile, hashlib, platform, tempfile, traceback import re, os, sys, time, shutil, signal, threading, tarfile, hashlib, platform, tempfile, traceback
import subprocess as sp import subprocess as sp
""" """
to edit this file, use HxD or "vim -b" to edit this file, use HxD or "vim -b"
(there is compressed stuff at the end) (there is compressed stuff at the end)
@@ -20,6 +20,7 @@ the archive data is attached after the b"\n# eof\n" archive marker,
b"\n# " decodes to b"" b"\n# " decodes to b""
""" """
# set by make-sfx.sh # set by make-sfx.sh
VER = None VER = None
SIZE = None SIZE = None
@@ -341,14 +342,15 @@ def get_payload():
def utime(top): def utime(top):
# avoid cleaners
i = 0 i = 0
files = [os.path.join(dp, p) for dp, dd, df in os.walk(top) for p in dd + df] files = [os.path.join(dp, p) for dp, dd, df in os.walk(top) for p in dd + df]
while WINDOWS: while WINDOWS or os.path.exists("/etc/systemd"):
t = int(time.time()) t = int(time.time())
if i: if i:
msg("utime {}, {}".format(i, t)) msg("utime {}, {}".format(i, t))
for f in files: for f in [top] + files:
os.utime(f, (t, t)) os.utime(f, (t, t))
i += 1 i += 1
@@ -367,28 +369,29 @@ def confirm(rv):
sys.exit(rv or 1) sys.exit(rv or 1)
def run(tmp, j2): def run(tmp, j2, ftp):
msg("jinja2:", j2 or "bundled") msg("jinja2:", j2 or "bundled")
msg("pyftpd:", ftp or "bundled")
msg("sfxdir:", tmp) msg("sfxdir:", tmp)
msg() msg()
# block systemd-tmpfiles-clean.timer
try:
import fcntl
fd = os.open(tmp, os.O_RDONLY)
fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
except Exception as ex:
if not WINDOWS:
msg("\033[31mflock:{!r}\033[0m".format(ex))
t = threading.Thread(target=utime, args=(tmp,)) t = threading.Thread(target=utime, args=(tmp,))
t.daemon = True t.daemon = True
t.start() t.start()
ld = [tmp, os.path.join(tmp, "dep-j2")] ld = (("", ""), (j2, "j2"), (ftp, "ftp"), (not PY2, "py2"))
if j2: ld = [os.path.join(tmp, b) for a, b in ld if not a]
del ld[-1]
# skip 1
# enable this to dynamically remove type hints at startup,
# in case a future python version can use them for performance
if sys.version_info < (3, 10) and False:
sys.path.insert(0, ld[0])
from strip_hints.a import uh
uh(tmp + "/copyparty")
# skip 0
if any([re.match(r"^-.*j[0-9]", x) for x in sys.argv]): if any([re.match(r"^-.*j[0-9]", x) for x in sys.argv]):
run_s(ld) run_s(ld)
@@ -461,7 +464,12 @@ def main():
j2 = None j2 = None
try: try:
run(tmp, j2) from pyftpdlib.__init__ import __ver__ as ftp
except:
ftp = None
try:
run(tmp, j2, ftp)
except SystemExit as ex: except SystemExit as ex:
c = ex.code c = ex.code
if c not in [0, -15]: if c not in [0, -15]:

View File

@@ -47,7 +47,7 @@ grep -E '/(python|pypy)[0-9\.-]*$' >$dir/pys || true
printf '\033[1;30mlooking for jinja2 in [%s]\033[0m\n' "$_py" >&2 printf '\033[1;30mlooking for jinja2 in [%s]\033[0m\n' "$_py" >&2
$_py -c 'import jinja2' 2>/dev/null || continue $_py -c 'import jinja2' 2>/dev/null || continue
printf '%s\n' "$_py" printf '%s\n' "$_py"
mv $dir/{,x.}dep-j2 mv $dir/{,x.}j2
break break
done)" done)"

72
scripts/strip_hints/a.py Normal file
View File

@@ -0,0 +1,72 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import re
import os
import sys
from strip_hints import strip_file_to_string
# list unique types used in hints:
# rm -rf unt && cp -pR copyparty unt && (cd unt && python3 ../scripts/strip_hints/a.py)
# diff -wNarU1 copyparty unt | grep -E '^\-' | sed -r 's/[^][, ]+://g; s/[^][, ]+[[(]//g; s/[],()<>{} -]/\n/g' | grep -E .. | sort | uniq -c | sort -n
def pr(m):
sys.stderr.write(m)
sys.stderr.flush()
def uh(top):
if os.path.exists(top + "/uh"):
return
# pr("building support for your python ver")
pr("unhinting")
files = []
for (dp, _, fns) in os.walk(top):
for fn in fns:
if not fn.endswith(".py"):
continue
fp = os.path.join(dp, fn)
files.append(fp)
try:
import multiprocessing as mp
with mp.Pool(os.cpu_count()) as pool:
pool.map(uh1, files)
except Exception as ex:
print("\nnon-mp fallback due to {}\n".format(ex))
for fp in files:
uh1(fp)
pr("k\n\n")
with open(top + "/uh", "wb") as f:
f.write(b"a")
def uh1(fp):
pr(".")
cs = strip_file_to_string(fp, no_ast=True, to_empty=True)
libs = "typing|types|collections\.abc"
ptn = re.compile(r"^(\s*)(from (?:{0}) import |import (?:{0})\b).*".format(libs))
# remove expensive imports too
lns = []
for ln in cs.split("\n"):
m = ptn.match(ln)
if m:
ln = m.group(1) + "raise Exception()"
lns.append(ln)
cs = "\n".join(lns)
with open(fp, "wb") as f:
f.write(cs.encode("utf-8"))
if __name__ == "__main__":
uh(".")

Some files were not shown because too many files have changed in this diff Show More