Compare commits

...

288 Commits

Author SHA1 Message Date
ed
d30001d23d v1.0.3 2021-09-18 17:50:40 +02:00
ed
06bbf0d656 filekeys in search results 2021-09-18 17:26:13 +02:00
ed
6ddd952e04 return filekeys in upload summary if read-access 2021-09-18 15:57:43 +02:00
ed
027ad0c3ee misc 2021-09-18 15:38:13 +02:00
ed
3abad2b87b fix navpane nowrap 2021-09-18 14:18:23 +02:00
ed
32a1c7c5d5 cosmetic 2021-09-18 02:07:29 +02:00
ed
f06e165bd4 retro 2021-09-18 02:07:09 +02:00
ed
1c843b24f7 ensure ffmpeg doesn't transcode video 2021-09-17 23:50:54 +02:00
ed
2ace9ed380 fix filekeys appearing in filenames 2021-09-17 23:12:32 +02:00
ed
5f30c0ae03 fix button hover bg 2021-09-17 22:49:49 +02:00
ed
ef60adf7e2 optional navpane wordwrap diasble 2021-09-17 22:49:26 +02:00
ed
7354b462e8 easymde: use extenral marked.js 2021-09-17 09:32:30 +02:00
ed
da904d6be8 upgrade marked.js from v1.1.0 to v3.0.4 2021-09-17 09:10:33 +02:00
ed
c5fbbbbb5c show current line number in md-editor 2021-09-17 01:36:06 +02:00
ed
5010387d8a markdown modpoll at an interval 2021-09-16 09:31:58 +02:00
ed
f00c54a7fb nice 2021-09-16 09:00:36 +02:00
ed
9f52c169d0 more python3 shebangs 2021-09-16 00:28:38 +02:00
ed
bf18339404 change sfx shebang to python3 2021-09-16 00:26:52 +02:00
ed
2ad12b074b return 404 on browsing folders with g 2021-09-16 00:17:27 +02:00
ed
a6788ffe8d mention e2ts deps 2021-09-16 00:06:19 +02:00
ed
0e884df486 keep empty folders after deleting all files 2021-09-15 23:31:49 +02:00
ed
ef1c55286f add filekeys 2021-09-15 23:17:02 +02:00
ed
abc0424c26 show login prompt on 404 2021-09-15 21:53:30 +02:00
ed
44e5c82e6d more aggressively no-cache 2021-09-15 20:49:02 +02:00
ed
5849c446ed new access level g 2021-09-15 01:01:20 +02:00
ed
12b7317831 wget: delete url file 2021-09-15 00:18:58 +02:00
ed
fe323f59af update readme 2021-09-14 23:05:32 +02:00
ed
a00e56f219 lol it works 2021-09-14 22:44:56 +02:00
ed
1a7852794f dry boolean configs 2021-09-14 00:50:27 +02:00
ed
22b1373a57 accessibility: always hilight focused elements 2021-09-14 00:46:53 +02:00
ed
17d78b1469 set max-width for readme.md 2021-09-14 00:46:03 +02:00
ed
4d8b32b249 prevent tooltips on alt-tab 2021-09-14 00:45:30 +02:00
ed
b65bea2550 show toast with stack on rejected promises 2021-09-14 00:42:46 +02:00
ed
0b52ccd200 fqdn makes more sense 2021-09-12 23:49:37 +02:00
ed
3006a07059 cfssl: mention arg 3 2021-09-12 23:38:38 +02:00
ed
801dbc7a9a readme: add motivations / future plans 2021-09-12 23:25:34 +02:00
ed
4f4e895fb7 update vscode launch args 2021-09-11 19:59:59 +02:00
ed
cc57c3b655 bump deps 2021-09-11 19:59:41 +02:00
ed
ca6ec9c5c7 v1.0.2 2021-09-09 09:21:30 +02:00
ed
633b1f0a78 v1.0.1 2021-09-09 00:59:55 +02:00
ed
6136b9bf9c don't double-eof 2021-09-09 00:54:09 +02:00
ed
524a3ba566 actually this is better 2021-09-09 00:41:23 +02:00
ed
58580320f9 make the primary tabs toggle-buttons 2021-09-09 00:35:07 +02:00
ed
759b0a994d alternative equalizer tuning 2021-09-09 00:27:18 +02:00
ed
d2800473e4 less aggressive searching, especially on phones 2021-09-08 23:24:32 +02:00
ed
f5b1a2065e multipart-parser needs exact reads 2021-09-08 21:07:34 +00:00
ed
5e62532295 minimal-up2k: remove filesearch dropzone 2021-09-08 09:16:02 +02:00
ed
c1bee96c40 fix filedrop trying to upload without write access 2021-09-08 00:19:48 +02:00
ed
f273253a2b ( ´ w `) 2021-09-08 00:16:08 +02:00
ed
012bbcf770 v1.0.0 2021-09-07 23:18:54 +02:00
ed
b54cb47b2e listen for filedrops in all tabs/modes 2021-09-07 22:44:48 +02:00
ed
1b15f43745 crashpage: add github-issue link 2021-09-07 22:30:50 +02:00
ed
96771bf1bd linken 2021-09-07 22:12:28 +02:00
ed
580078bddb more readme stuff 2021-09-07 22:10:59 +02:00
ed
c5c7080ec6 more readme fixup 2021-09-07 21:57:33 +02:00
ed
408339b51d mention the new dropzones 2021-09-07 21:49:00 +02:00
ed
02e3d44998 fix move/delete without -e2d (thx exci) 2021-09-07 21:20:34 +02:00
ed
156f13ded1 add 10-minute indicators to seekbar 2021-09-07 21:10:50 +02:00
ed
d288467cb7 separate dropzones for upload/search 2021-09-07 20:52:06 +02:00
ed
21662c9f3f error-message cleanup 2021-09-07 20:51:07 +02:00
ed
9149fe6cdd lightmode fix 2021-09-07 00:44:09 +02:00
ed
9a146192b7 don't unwrap single folders in zip/tar downloads 2021-09-07 00:43:51 +02:00
ed
3a9d3b7b61 rip hls 2021-09-07 00:05:51 +02:00
ed
f03f0973ab Create branch-rename.md 2021-09-06 23:42:42 +02:00
ed
7ec0881e8c Create CODE_OF_CONDUCT.md 2021-09-06 23:31:57 +02:00
ed
59e1ab42ff Create CONTRIBUTING.md 2021-09-06 22:18:41 +02:00
ed
722216b901 Update issue templates 2021-09-06 22:11:06 +02:00
ed
bd8f3dc368 Update issue templates 2021-09-06 22:09:10 +02:00
ed
33cd94a141 update TOC 2021-09-06 08:36:18 +02:00
ed
053ac74734 v0.13.14 2021-09-06 01:06:16 +02:00
ed
cced99fafa replace SCP with Consolas on no-fnt repack 2021-09-06 01:04:12 +02:00
ed
a009ff53f7 show README.md in directory listings 2021-09-06 00:23:35 +02:00
ed
ca16c4108d add options to disallow renaming/moving dotfiles 2021-09-06 00:17:35 +02:00
ed
d1b6c67dc3 fix misnomer 2021-09-06 00:13:52 +02:00
ed
a61f8133d5 add option to disable logues 2021-09-05 22:33:42 +02:00
ed
38d797a544 remove duplicate code 2021-09-05 22:32:34 +02:00
ed
16c1877f50 fix markdown scrollmap desync on offsite images 2021-09-05 21:44:17 +02:00
ed
da5f15a778 move general markdown to ui.css 2021-09-05 21:42:41 +02:00
ed
396c64ecf7 move sourcecodepro to ui.css 2021-09-05 18:55:28 +02:00
ed
252c3a7985 faster turbo 2021-09-05 18:51:01 +02:00
ed
a3ecbf0ae7 better fix for the up2k bounce 2021-09-05 18:50:24 +02:00
ed
314327d8f2 support alternative python impls 2021-09-05 18:48:58 +02:00
ed
bfacd06929 mention some more features 2021-09-04 21:40:22 +02:00
ed
4f5e8f8cf5 toc tweaks 2021-09-04 21:21:18 +02:00
ed
1fbb4c09cc readme/doc cleanup 2021-09-04 21:07:45 +02:00
ed
b332e1992b sfx-repack: fix git version numbers 2021-09-04 17:43:49 +02:00
ed
5955940b82 fix upload eta going bad after inactivity 2021-09-04 03:10:54 +02:00
ed
231a03bcfd v0.13.13 2021-09-03 21:21:17 +02:00
ed
bc85723657 more intense compressino 2021-09-03 21:20:40 +02:00
ed
be32b743c6 repl: select default text on load 2021-09-03 20:48:41 +02:00
ed
83c9843059 make-sfx: correct version number on repack 2021-09-03 20:38:41 +02:00
ed
11cf43626d make-sfx: fix no-dd css modifier 2021-09-03 20:38:14 +02:00
ed
a6dc5e2ce3 add some missing preventdefaults 2021-09-03 20:37:30 +02:00
ed
38593a0394 move column hider buttons above the header 2021-09-03 20:19:17 +02:00
ed
95309afeea fix file-list jumping around during uploads 2021-09-03 20:17:44 +02:00
ed
c2bf6fe2a3 add basic authentication 2021-09-03 20:15:24 +02:00
ed
99ac324fbd tweaks 2021-09-02 19:06:08 +02:00
ed
5562de330f slightly smaller jpeg thumbnails 2021-09-02 18:51:15 +02:00
ed
95014236ac js-repl presets 2021-09-02 18:50:47 +02:00
ed
6aa7386138 modals: onDisplay callback 2021-09-02 18:46:51 +02:00
ed
3226a1f588 crashpage: show recent console messages 2021-09-02 18:45:42 +02:00
ed
b4cf890cd8 emphasis 2021-09-02 18:42:53 +02:00
ed
ce09e323af ok/cancel buttons in platform-defined order 2021-09-02 18:42:12 +02:00
ed
941aedb177 v0.13.12 2021-09-01 23:48:01 +02:00
ed
87a0d502a3 crashpage: add useragent 2021-09-01 23:32:27 +02:00
ed
cab7c1b0b8 browser-icons: centered play button 2021-09-01 22:35:27 +02:00
ed
d5892341b6 prevent vertical toast overflow 2021-09-01 22:34:48 +02:00
ed
646557a43e crashpage: better localstore dump 2021-09-01 22:34:04 +02:00
ed
ed8d34ab43 dont try to play audio if js crashed 2021-09-01 22:28:15 +02:00
ed
5e34463c77 support massive cut/paste ops 2021-09-01 22:27:39 +02:00
ed
1b14eb7959 fix thumbnail-zoom hotkeys 2021-09-01 22:26:18 +02:00
ed
ed48c2d0ed v0.13.11 2021-08-30 22:32:16 +02:00
ed
26fe84b660 smaller sfx 2021-08-30 22:27:10 +02:00
ed
5938230270 more tray ui nitpicks 2021-08-30 22:25:07 +02:00
ed
1a33a047fa fix listening on single interface 2021-08-30 21:39:44 +02:00
ed
43a8bcefb9 v0.13.10 2021-08-30 03:02:11 +02:00
ed
2e740e513f cheap performance fix 2021-08-30 02:38:48 +02:00
ed
8a21a86b61 better iOS error-handling 2021-08-30 02:29:38 +02:00
ed
f600116205 login returns to volume listing 2021-08-30 01:55:24 +02:00
ed
1c03705de8 upload filedrops in alphabetical order 2021-08-30 01:50:12 +02:00
ed
f7e461fac6 add humantime 2021-08-30 01:16:20 +02:00
ed
03ce6c97ff better crash-handler ui 2021-08-30 01:15:37 +02:00
ed
ffd9e76e07 select all text in modal.prompt 2021-08-30 01:11:00 +02:00
ed
fc49cb1e67 add js repl 2021-08-30 01:09:27 +02:00
ed
f5712d9f25 v0.13.9 2021-08-29 02:24:09 +02:00
ed
161d57bdda v0.13.8 2021-08-29 01:38:06 +02:00
ed
bae0d440bf upgrade ogvjs to 1.8.4 2021-08-29 01:11:44 +02:00
ed
fff052dde1 explain the magic 2021-08-29 00:11:06 +02:00
ed
73b06eaa02 coerce iOS into playing opus in the background 2021-08-29 00:05:14 +02:00
ed
08a8ebed17 minor cleanup 2021-08-28 22:40:59 +02:00
ed
74d07426b3 make tray tab smaller 2021-08-28 22:37:39 +02:00
ed
69a2bba99a fix ogv.js crashing iOS 2021-08-28 22:35:47 +02:00
ed
4d685d78ee v0.13.7 2021-08-28 04:55:06 +02:00
ed
5845ec3f49 nevermind, nailed it 2021-08-28 04:08:22 +02:00
ed
13373426fe alright fine apple you win 2021-08-28 03:44:07 +02:00
ed
8e55551a06 positioning fixes 2021-08-28 03:27:14 +02:00
ed
12a3f0ac31 update the filetype icons example 2021-08-28 02:56:07 +02:00
ed
18e33edc88 hide tooltips on scroll 2021-08-28 02:46:06 +02:00
ed
c72c5ad4ee make the ellipsis more visible 2021-08-28 02:38:31 +02:00
ed
0fbc81ab2f missed some 2021-08-28 02:37:28 +02:00
ed
af0a34cf82 improve iphone fix 2021-08-28 02:11:40 +02:00
ed
b4590c5398 horizontally centered tooltips 2021-08-28 01:49:21 +02:00
ed
f787a66230 that was dumb 2021-08-28 01:47:36 +02:00
ed
b21a99fd62 only tooltip the ellipsed thumbnails 2021-08-28 01:25:27 +02:00
ed
eb16306cde misc cleanup 2021-08-28 00:03:30 +02:00
ed
7bc23687e3 this kinda broke ellipsing, hopefully not too expensive 2021-08-28 00:02:59 +02:00
ed
e1eaa057f2 optimize clmod 2021-08-27 23:58:23 +02:00
ed
97c264ca3e snappy taps 2021-08-27 23:57:46 +02:00
ed
cf848ab1f7 add ellipsing of thumbnail filename, fixes #3 (+ clamp zoom level) 2021-08-27 23:50:09 +02:00
ed
cf83f9b0fd v0.13.6 2021-08-27 00:09:36 +02:00
ed
d98e361083 quick debounce 2021-08-26 23:59:17 +02:00
ed
ce7f5309c7 tweak toast bg 2021-08-26 23:46:04 +02:00
ed
75c485ced7 misc toast rice and html escaping 2021-08-26 23:45:28 +02:00
ed
9c6e2ec012 misc modal rice and html escaping 2021-08-26 23:23:56 +02:00
ed
1a02948a61 prevent text selection on most buttons 2021-08-26 23:01:24 +02:00
ed
8b05ba4ba1 stop counting eta when we don't hold the flag 2021-08-26 22:51:07 +02:00
ed
21e2874cb7 warning when another browser tab holds the flag 2021-08-26 22:50:22 +02:00
ed
360ed5c46c release the up2k flag when disabling it 2021-08-26 22:48:57 +02:00
ed
5099bc365d better eta for fsearch 2021-08-26 22:47:43 +02:00
ed
12986da147 might be useful some time 2021-08-26 22:45:50 +02:00
ed
23e72797bc remove some more ansi escapes on win7 2021-08-26 22:45:36 +02:00
ed
ac7b6f8f55 update turbo hint for fsearch 2021-08-26 20:44:36 +02:00
ed
981b9ff11e more accurate eta 2021-08-26 20:43:52 +02:00
ed
4186906f4c pause hashing as well when parallel uploads is 0 2021-08-26 20:43:27 +02:00
ed
0850d24e0c improve spacing on narrow screens 2021-08-26 20:42:20 +02:00
ed
7ab8334c96 remove debug 2021-08-26 01:16:59 +02:00
ed
a4d7329ab7 revert to fixed MiB/s in upload tab 2021-08-26 01:13:20 +02:00
ed
3f4eae6bce yolo search + show in bz + md search 2021-08-26 00:57:49 +02:00
ed
518cf4be57 set fsearch tag on tasks 2021-08-26 00:54:00 +02:00
ed
71096182be toFixed is busted, workaround 2021-08-26 00:51:35 +02:00
ed
6452e927ea download-eta accuracy + misc ux 2021-08-26 00:40:12 +02:00
ed
bc70cfa6f0 fix tmi 2021-08-25 09:02:34 +02:00
ed
2b6e5ebd2d update minimal-up2k 2021-08-25 08:26:38 +02:00
ed
c761bd799a add pane with total eta for all uploads 2021-08-25 02:06:29 +02:00
ed
2f7c2fdee4 add colors to status column in up2k ui 2021-08-24 00:32:53 +02:00
ed
70a76ec343 add toast on upload/fsearch completion 2021-08-24 00:31:01 +02:00
ed
7c3f64abf2 fix navpane h.scroll bug 2021-08-24 00:29:11 +02:00
ed
f5f38f195c use scp.woff in browser too 2021-08-24 00:28:16 +02:00
ed
7e84f4f015 fence focus inside modals 2021-08-24 00:26:54 +02:00
ed
4802f8cf07 better msg when unposting a deleted file 2021-08-24 00:24:50 +02:00
ed
cc05e67d8f add summaries to readme toc 2021-08-22 17:23:42 +02:00
ed
2b6b174517 the smallest nitpick 2021-08-20 19:25:57 +02:00
ed
a1d05e6e12 folder thumbnail fix 2021-08-20 19:22:25 +02:00
ed
f95ceb6a9b fix toc 2021-08-17 08:54:19 +02:00
ed
8f91b0726d add missing hotkey hint 2021-08-17 00:24:27 +02:00
ed
97807f4383 update screenshots 2021-08-17 00:23:12 +02:00
ed
5f42237f2c v0.13.5 2021-08-16 08:40:26 +02:00
ed
68289cfa54 v0.13.4 2021-08-16 08:18:52 +02:00
ed
42ea30270f up2k-ui: post absolute URLs 2021-08-16 08:16:52 +02:00
ed
ebbbbf3d82 misc old-browser support 2021-08-16 00:22:30 +02:00
ed
27516e2d16 scroll navpane to open folder on load 2021-08-16 00:07:31 +02:00
ed
84bb6f915e fix unpost ui for nonroot volumes 2021-08-16 00:03:05 +02:00
ed
46752f758a fix bup into volumes with upload rules 2021-08-15 23:59:41 +02:00
ed
34c4c22e61 v0.13.3 2021-08-14 22:46:15 +02:00
ed
af2d0b8421 upgrade permsets in smoketest 2021-08-14 22:45:33 +02:00
ed
638b05a49a fix image-viewer touch handler 2021-08-14 22:40:54 +02:00
ed
7a13e8a7fc clear transform on 0deg rotate 2021-08-14 21:13:15 +02:00
ed
d9fa74711d cheaper shadows 2021-08-14 18:17:40 +02:00
ed
41867f578f image viewer: add rotation 2021-08-14 18:06:53 +02:00
ed
0bf41ed4ef exif orientation for thumbnails 2021-08-14 17:45:44 +02:00
ed
d080b4a731 v0.13.2 2021-08-12 22:42:36 +02:00
ed
ca4232ada9 move sortfiles from util to browser 2021-08-12 22:42:17 +02:00
ed
ad348f91c9 fix button placement in large modals 2021-08-12 22:31:28 +02:00
ed
990f915f42 ui tweaks 2021-08-12 22:31:07 +02:00
ed
53d720217b open videos in gallery 2021-08-12 22:30:52 +02:00
ed
7a06ff480d fix cut/paste on old chromes 2021-08-12 22:30:41 +02:00
ed
3ef551f788 selection-toggle in image viewer 2021-08-12 22:20:32 +02:00
ed
f0125cdc36 prevent massive stacks in chrome 2021-08-12 22:12:05 +02:00
ed
ed5f6736df add prisonparty systemd example 2021-08-10 23:29:14 +02:00
ed
15d8be0fae no more loops 2021-08-10 02:56:48 +02:00
ed
46f3e61360 no actually that is a terrible location 2021-08-09 23:53:09 +02:00
ed
87ad8c98d4 /var/empty is a good location 2021-08-09 23:37:01 +02:00
ed
9bbdc4100f fix permission flags in service scripts 2021-08-09 23:26:30 +02:00
ed
c80307e8ff v0.13.1 2021-08-09 22:28:54 +02:00
ed
c1d77e1041 add upload lifetimes 2021-08-09 22:17:41 +02:00
ed
d9e83650dc handle invalid XDG_CONFIG_HOME on linux 2021-08-09 22:13:16 +02:00
ed
f6d635acd9 sfx: return 1 on exception 2021-08-09 22:13:00 +02:00
ed
0dbd8a01ff mount PWD into chroot for config files 2021-08-09 22:12:39 +02:00
ed
8d755d41e0 per-volume rescan interval 2021-08-09 01:31:20 +02:00
ed
190473bd32 up2k-ui: fix hash-ahead button 2021-08-09 01:16:09 +02:00
ed
030d1ec254 no wait thats too much 2021-08-09 01:15:51 +02:00
ed
5a2b91a084 handle more exceptions + sanitize fs paths in msgs 2021-08-09 01:09:20 +02:00
ed
a50a05e4e7 git: set 0755 on binary 2021-08-09 00:44:19 +02:00
ed
6cb5a87c79 add chroot wrapper (tested on debian only) 2021-08-09 00:42:21 +02:00
ed
b9f89ca552 shared password for providers 2021-08-08 23:05:00 +02:00
ed
26c9fd5dea add converter to freg / yta-raw 2021-08-08 22:48:02 +02:00
ed
e81a9b6fe0 better error handling 2021-08-08 20:48:24 +02:00
ed
452450e451 improve youtube parser 2021-08-08 20:30:12 +02:00
ed
419dd2d1c7 v0.13.0 2021-08-08 04:14:59 +02:00
ed
ee86b06676 compat + perf + ux 2021-08-08 04:02:58 +02:00
ed
953183f16d add help sections and vt100 stripper 2021-08-08 02:47:42 +02:00
ed
228f71708b improve youtube collector/parser 2021-08-08 02:47:04 +02:00
ed
621471a7cb add streaming upload compression 2021-08-08 02:45:50 +02:00
ed
8b58e951e3 metadata search with keys containing _- 2021-08-07 21:38:52 +02:00
ed
1db489a0aa port changes to mde 2021-08-07 21:35:24 +02:00
ed
be65c3c6cf cleanup 2021-08-07 21:11:01 +02:00
ed
46e7fa31fe up2k-cli: handle subfolders better 2021-08-07 20:43:24 +02:00
ed
66e21bd499 up2k-ui: prevent accidentally showing huge lists 2021-08-07 20:08:41 +02:00
ed
8cab4c01fd chrome optimizations 2021-08-07 20:08:02 +02:00
ed
d52038366b reinventing alert/confirm/prompt was exactly what i had in mind for the weekend, thanks google 2021-08-07 18:41:06 +02:00
ed
4fcfd87f5b fix transfer limit 2021-08-07 18:40:28 +02:00
ed
f893c6baa4 add youtube manifest parser 2021-08-07 04:29:55 +02:00
ed
9a45549b66 adding upload rules 2021-08-07 03:45:50 +02:00
ed
ae3a01038b v0.12.12 2021-08-06 11:10:04 +02:00
ed
e47a2a4ca2 hyperlinks 2021-08-06 01:48:34 +02:00
ed
95ea6d5f78 v0.12.11 2021-08-06 00:53:44 +02:00
ed
7d290f6b8f fix volflag syntax in examples 2021-08-06 00:50:29 +02:00
ed
9db617ed5a new mtp: media-hash 2021-08-06 00:49:42 +02:00
ed
514456940a tooltips, examples, fwd ng in lpad 2021-08-05 23:56:09 +02:00
ed
33feefd9cd sup merge conflict 2021-08-05 23:14:19 +02:00
ed
65e14cf348 batch-rename: add functions and presets 2021-08-05 23:11:06 +02:00
ed
1d61bcc4f3 every time 2021-08-05 21:56:52 +02:00
ed
c38bbaca3c mention batch-rename in readme 2021-08-05 21:53:51 +02:00
ed
246d245ebc make it better 2021-08-05 21:53:08 +02:00
ed
f269a710e2 suspiciously working first attempt at batch-rename 2021-08-05 20:49:49 +02:00
ed
051998429c fix argv compat on windows paths 2021-08-05 20:46:08 +02:00
ed
432cdd640f video-thumbs: take first video stream + better errors 2021-08-05 20:44:04 +02:00
ed
9ed9b0964e nice race 2021-08-03 22:53:13 +00:00
ed
6a97b3526d why was that there 2021-08-03 21:16:26 +00:00
ed
451d757996 fix renaming single symlinks 2021-08-03 20:12:51 +02:00
ed
f9e9eba3b1 sfx-repack: fix no-fnt, no-dd 2021-08-03 20:12:21 +02:00
ed
2a9a6aebd9 systemd fun 2021-08-03 09:22:16 +02:00
ed
adbb6c449e v0.12.10 2021-08-02 00:49:31 +02:00
ed
3993605324 add -mth (deafult-hidden columns) 2021-08-02 00:47:07 +02:00
ed
0ae574ec2c better mutagen codec detection 2021-08-02 00:40:40 +02:00
ed
c56ded828c v0.12.9 2021-08-01 00:40:15 +02:00
ed
02c7061945 v0.12.8 2021-08-01 00:17:05 +02:00
ed
9209e44cd3 heh 2021-08-01 00:08:50 +02:00
ed
ebed37394e better rename ui 2021-08-01 00:04:53 +02:00
ed
4c7a2a7ec3 uridec alerts 2021-07-31 22:05:31 +02:00
ed
0a25a88a34 add mojibake fixer 2021-07-31 14:31:39 +02:00
ed
6aa9025347 v0.12.7 2021-07-31 13:21:43 +02:00
ed
a918cc67eb only drop tags when its safe 2021-07-31 13:19:02 +02:00
ed
08f4695283 v0.12.6 2021-07-31 12:38:53 +02:00
ed
44e76d5eeb optimize make-sfx 2021-07-31 12:38:17 +02:00
ed
cfa36fd279 phone-friendly toast positioning 2021-07-31 10:56:03 +02:00
ed
3d4166e006 dont thumbnail thumbnails 2021-07-31 10:51:18 +02:00
ed
07bac1c592 add option to show dotfiles 2021-07-31 10:44:35 +02:00
ed
755f2ce1ba more url encoding fun 2021-07-31 10:24:34 +02:00
ed
cca2844deb fix mode display for move 2021-07-31 07:19:10 +00:00
ed
24a2f760b7 v0.12.5 2021-07-30 19:28:14 +02:00
ed
79bbd8fe38 systemd: line-buffered logging 2021-07-30 10:39:46 +02:00
ed
35dce1e3e4 v0.12.4 2021-07-30 08:52:15 +02:00
ed
f886fdf913 mention unpost in the readme 2021-07-30 00:53:15 +02:00
ed
4476f2f0da v0.12.3 orz 2021-07-30 00:32:21 +02:00
ed
160f161700 v0.12.2 (1000GET) 2021-07-29 23:56:25 +02:00
ed
c164fc58a2 add unpost 2021-07-29 23:53:08 +02:00
ed
0c625a4e62 store upload ip and time 2021-07-29 00:30:10 +02:00
85 changed files with 6849 additions and 2708 deletions

40
.github/ISSUE_TEMPLATE/bug_report.md vendored Normal file
View File

@@ -0,0 +1,40 @@
---
name: Bug report
about: Create a report to help us improve
title: ''
labels: bug
assignees: '9001'
---
NOTE:
all of the below are optional, consider them as inspiration, delete and rewrite at will, thx md
**Describe the bug**
a description of what the bug is
**To Reproduce**
List of steps to reproduce the issue, or, if it's hard to reproduce, then at least a detailed explanation of what you did to run into it
**Expected behavior**
a description of what you expected to happen
**Screenshots**
if applicable, add screenshots to help explain your problem, such as the kickass crashpage :^)
**Server details**
if the issue is possibly on the server-side, then mention some of the following:
* server OS / version:
* python version:
* copyparty arguments:
* filesystem (`lsblk -f` on linux):
**Client details**
if the issue is possibly on the client-side, then mention some of the following:
* the device type and model:
* OS version:
* browser version:
**Additional context**
any other context about the problem here

View File

@@ -0,0 +1,22 @@
---
name: Feature request
about: Suggest an idea for this project
title: ''
labels: enhancement
assignees: '9001'
---
all of the below are optional, consider them as inspiration, delete and rewrite at will
**is your feature request related to a problem? Please describe.**
a description of what the problem is, for example, `I'm always frustrated when [...]` or `Why is it not possible to [...]`
**Describe the idea / solution you'd like**
a description of what you want to happen
**Describe any alternatives you've considered**
a description of any alternative solutions or features you've considered
**Additional context**
add any other context or screenshots about the feature request here

View File

@@ -0,0 +1,10 @@
---
name: Something else
about: "┐(゚∀゚)┌"
title: ''
labels: ''
assignees: ''
---

7
.github/branch-rename.md vendored Normal file
View File

@@ -0,0 +1,7 @@
modernize your local checkout of the repo like so,
```sh
git branch -m master hovudstraum
git fetch origin
git branch -u origin/hovudstraum hovudstraum
git remote set-head origin -a
```

2
.vscode/launch.json vendored
View File

@@ -17,7 +17,7 @@
"-mtp", "-mtp",
".bpm=f,bin/mtag/audio-bpm.py", ".bpm=f,bin/mtag/audio-bpm.py",
"-aed:wark", "-aed:wark",
"-vsrv::r:aed:cnodupe", "-vsrv::r:rw,ed:c,dupe",
"-vdist:dist:r" "-vdist:dist:r"
] ]
}, },

View File

@@ -55,4 +55,5 @@
"py27" "py27"
], ],
"python.linting.enabled": true, "python.linting.enabled": true,
"python.pythonPath": "/usr/bin/python3"
} }

24
CODE_OF_CONDUCT.md Normal file
View File

@@ -0,0 +1,24 @@
in the words of Abraham Lincoln:
> Be excellent to each other... and... PARTY ON, DUDES!
more specifically I'll paraphrase some examples from a german automotive corporation as they cover all the bases without being too wordy
## Examples of unacceptable behavior
* intimidation, harassment, trolling
* insulting, derogatory, harmful or prejudicial comments
* posting private information without permission
* political or personal attacks
## Examples of expected behavior
* being nice, friendly, welcoming, inclusive, mindful and empathetic
* acting considerate, modest, respectful
* using polite and inclusive language
* criticize constructively and accept constructive criticism
* respect different points of view
## finally and even more specifically,
* parse opinions and feedback objectively without prejudice
* it's the message that matters, not who said it
aaand that's how you say `be nice` in a way that fills half a floppy w

3
CONTRIBUTING.md Normal file
View File

@@ -0,0 +1,3 @@
* do something cool
really tho, send a PR or an issue or whatever, all appreciated, anything goes, just behave aight

558
README.md
View File

@@ -6,90 +6,103 @@
## summary ## summary
turn your phone or raspi into a portable file server with resumable uploads/downloads using IE6 or any other browser turn your phone or raspi into a portable file server with resumable uploads/downloads using *any* web browser
* server runs on anything with `py2.7` or `py3.3+` * server only needs `py2.7` or `py3.3+`, all dependencies optional
* browse/upload with IE4 / netscape4.0 on win3.11 (heh) * browse/upload with IE4 / netscape4.0 on win3.11 (heh)
* *resumable* uploads need `firefox 34+` / `chrome 41+` / `safari 7+` for full speed * *resumable* uploads need `firefox 34+` / `chrome 41+` / `safari 7+` for full speed
* code standard: `black` * code standard: `black`
📷 **screenshots:** [browser](#the-browser) // [upload](#uploading) // [thumbnails](#thumbnails) // [md-viewer](#markdown-viewer) // [search](#searching) // [fsearch](#file-search) // [zip-DL](#zip-downloads) // [ie4](#browser-support) 📷 **screenshots:** [browser](#the-browser) // [upload](#uploading) // [unpost](#unpost) // [thumbnails](#thumbnails) // [search](#searching) // [fsearch](#file-search) // [zip-DL](#zip-downloads) // [md-viewer](#markdown-viewer) // [ie4](#browser-support)
## breaking changes \o/
this is the readme for v0.12 which has a different expression for volume permissions (`-v`); see [the v0.11.x readme](https://github.com/9001/copyparty/tree/15b59822112dda56cee576df30f331252fc62628#readme) for stuff regarding the [current stable release](https://github.com/9001/copyparty/releases/tag/v0.11.47)
## readme toc ## readme toc
* top * top
* [quickstart](#quickstart) * **[quickstart](#quickstart)** - download **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py)** and you're all set!
* [on debian](#on-debian) * [on servers](#on-servers) - you may also want these, especially on servers
* [notes](#notes) * [on debian](#on-debian) - recommended additional steps on debian
* [status](#status) * [notes](#notes) - general notes
* [testimonials](#testimonials) * [status](#status) - feature summary
* [testimonials](#testimonials) - small collection of user feedback
* [motivations](#motivations) - project goals / philosophy
* [future plans](#future-plans) - some improvement ideas
* [bugs](#bugs) * [bugs](#bugs)
* [general bugs](#general-bugs) * [general bugs](#general-bugs)
* [not my bugs](#not-my-bugs) * [not my bugs](#not-my-bugs)
* [the browser](#the-browser) * [FAQ](#FAQ) - "frequently" asked questions
* [tabs](#tabs) * [accounts and volumes](#accounts-and-volumes) - per-folder, per-user permissions
* [hotkeys](#hotkeys) * [the browser](#the-browser) - accessing a copyparty server using a web-browser
* [navpane](#navpane) * [tabs](#tabs) - the main tabs in the ui
* [thumbnails](#thumbnails) * [hotkeys](#hotkeys) - the browser has the following hotkeys
* [zip downloads](#zip-downloads) * [navpane](#navpane) - switching between breadcrumbs or navpane
* [uploading](#uploading) * [thumbnails](#thumbnails) - press `g` to toggle grid-view instead of the file listing
* [file-search](#file-search) * [zip downloads](#zip-downloads) - download folders (or file selections) as `zip` or `tar` files
* [file manager](#file-manager) * [uploading](#uploading) - drag files/folders into the web-browser to upload
* [markdown viewer](#markdown-viewer) * [file-search](#file-search) - dropping files into the browser also lets you see if they exist on the server
* [unpost](#unpost) - undo/delete accidental uploads
* [file manager](#file-manager) - cut/paste, rename, and delete files/folders (if you have permission)
* [batch rename](#batch-rename) - select some files and press `F2` to bring up the rename UI
* [markdown viewer](#markdown-viewer) - and there are *two* editors
* [other tricks](#other-tricks) * [other tricks](#other-tricks)
* [searching](#searching) * [searching](#searching) - search by size, date, path/name, mp3-tags, ...
* [search configuration](#search-configuration) * [server config](#server-config)
* [database location](#database-location) * [file indexing](#file-indexing)
* [metadata from audio files](#metadata-from-audio-files) * [upload rules](#upload-rules) - set upload rules using volume flags
* [file parser plugins](#file-parser-plugins) * [compress uploads](#compress-uploads) - files can be autocompressed on upload
* [database location](#database-location) - in-volume (`.hist/up2k.db`, default) or somewhere else
* [metadata from audio files](#metadata-from-audio-files) - set `-e2t` to index tags on upload
* [file parser plugins](#file-parser-plugins) - provide custom parsers to index additional tags
* [complete examples](#complete-examples) * [complete examples](#complete-examples)
* [browser support](#browser-support) * [browser support](#browser-support) - TLDR: yes
* [client examples](#client-examples) * [client examples](#client-examples) - interact with copyparty using non-browser clients
* [up2k](#up2k) * [up2k](#up2k) - quick outline of the up2k protocol, see [uploading](#uploading) for the web-client
* [performance](#performance) * [why chunk-hashes](#why-chunk-hashes) - a single sha512 would be better, right?
* [dependencies](#dependencies) * [performance](#performance) - defaults are usually fine - expect `8 GiB/s` download, `1 GiB/s` upload
* [optional dependencies](#optional-dependencies) * [security](#security) - some notes on hardening
* [gotchas](#gotchas) - behavior that might be unexpected
* [dependencies](#dependencies) - mandatory deps
* [optional dependencies](#optional-dependencies) - install these to enable bonus features
* [install recommended deps](#install-recommended-deps) * [install recommended deps](#install-recommended-deps)
* [optional gpl stuff](#optional-gpl-stuff) * [optional gpl stuff](#optional-gpl-stuff)
* [sfx](#sfx) * [sfx](#sfx) - there are two self-contained "binaries"
* [sfx repack](#sfx-repack) * [sfx repack](#sfx-repack) - reduce the size of an sfx by removing features
* [install on android](#install-on-android) * [install on android](#install-on-android)
* [building](#building) * [building](#building)
* [dev env setup](#dev-env-setup) * [dev env setup](#dev-env-setup)
* [just the sfx](#just-the-sfx) * [just the sfx](#just-the-sfx)
* [complete release](#complete-release) * [complete release](#complete-release)
* [todo](#todo) * [todo](#todo) - roughly sorted by priority
* [discarded ideas](#discarded-ideas) * [discarded ideas](#discarded-ideas)
## quickstart ## quickstart
download [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) and you're all set! download **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py)** and you're all set!
running the sfx without arguments (for example doubleclicking it on Windows) will give everyone full access to the current folder; see `-h` for help if you want [accounts and volumes](#accounts-and-volumes) etc running the sfx without arguments (for example doubleclicking it on Windows) will give everyone read/write access to the current folder; see `-h` for help if you want [accounts and volumes](#accounts-and-volumes) etc
some recommended options: some recommended options:
* `-e2dsa` enables general file indexing, see [search configuration](#search-configuration) * `-e2dsa` enables general [file indexing](#file-indexing)
* `-e2ts` enables audio metadata indexing (needs either FFprobe or Mutagen), see [optional dependencies](#optional-dependencies) * `-e2ts` enables audio metadata indexing (needs either FFprobe or Mutagen), see [optional dependencies](#optional-dependencies)
* `-v /mnt/music:/music:r:rw,foo -a foo:bar` shares `/mnt/music` as `/music`, `r`eadable by anyone, and read-write for user `foo`, password `bar` * `-v /mnt/music:/music:r:rw,foo -a foo:bar` shares `/mnt/music` as `/music`, `r`eadable by anyone, and read-write for user `foo`, password `bar`
* replace `:r:rw,foo` with `:r,foo` to only make the folder readable by `foo` and nobody else * replace `:r:rw,foo` with `:r,foo` to only make the folder readable by `foo` and nobody else
* see [accounts and volumes](#accounts-and-volumes) for the syntax and other access levels (`r`ead, `w`rite, `m`ove, `d`elete) * see [accounts and volumes](#accounts-and-volumes) for the syntax and other permissions (`r`ead, `w`rite, `m`ove, `d`elete, `g`et)
* `--ls '**,*,ln,p,r'` to crash on startup if any of the volumes contain a symlink which point outside the volume, as that could give users unintended access * `--ls '**,*,ln,p,r'` to crash on startup if any of the volumes contain a symlink which point outside the volume, as that could give users unintended access
### on servers
you may also want these, especially on servers: you may also want these, especially on servers:
* [contrib/systemd/copyparty.service](contrib/systemd/copyparty.service) to run copyparty as a systemd service * [contrib/systemd/copyparty.service](contrib/systemd/copyparty.service) to run copyparty as a systemd service
* [contrib/systemd/prisonparty.service](contrib/systemd/prisonparty.service) to run it in a chroot (for extra security)
* [contrib/nginx/copyparty.conf](contrib/nginx/copyparty.conf) to reverse-proxy behind nginx (for better https) * [contrib/nginx/copyparty.conf](contrib/nginx/copyparty.conf) to reverse-proxy behind nginx (for better https)
### on debian ### on debian
recommended steps to enable audio metadata and thumbnails (from images and videos): recommended additional steps on debian which enable audio metadata and thumbnails (from images and videos):
* as root, run the following: * as root, run the following:
`apt install python3 python3-pip python3-dev ffmpeg` `apt install python3 python3-pip python3-dev ffmpeg`
@@ -102,7 +115,7 @@ recommended steps to enable audio metadata and thumbnails (from images and video
## notes ## notes
general: general notes:
* paper-printing is affected by dark/light-mode! use lightmode for color, darkmode for grayscale * paper-printing is affected by dark/light-mode! use lightmode for color, darkmode for grayscale
* because no browsers currently implement the media-query to do this properly orz * because no browsers currently implement the media-query to do this properly orz
@@ -111,12 +124,12 @@ browser-specific:
* Android-Chrome: increase "parallel uploads" for higher speed (android bug) * Android-Chrome: increase "parallel uploads" for higher speed (android bug)
* Android-Firefox: takes a while to select files (their fix for ☝️) * Android-Firefox: takes a while to select files (their fix for ☝️)
* Desktop-Firefox: ~~may use gigabytes of RAM if your files are massive~~ *seems to be OK now* * Desktop-Firefox: ~~may use gigabytes of RAM if your files are massive~~ *seems to be OK now*
* Desktop-Firefox: may stop you from deleting folders you've uploaded until you visit `about:memory` and click `Minimize memory usage` * Desktop-Firefox: may stop you from deleting files you've uploaded until you visit `about:memory` and click `Minimize memory usage`
## status ## status
summary: all planned features work! now please enjoy the bloatening feature summary
* backend stuff * backend stuff
* ☑ sanic multipart parser * ☑ sanic multipart parser
@@ -125,29 +138,31 @@ summary: all planned features work! now please enjoy the bloatening
* ☑ [accounts](#accounts-and-volumes) * ☑ [accounts](#accounts-and-volumes)
* upload * upload
* ☑ basic: plain multipart, ie6 support * ☑ basic: plain multipart, ie6 support
* ☑ up2k: js, resumable, multithreaded *[up2k](#uploading): js, resumable, multithreaded
* ☑ stash: simple PUT filedropper * ☑ stash: simple PUT filedropper
* ☑ [unpost](#unpost): undo/delete accidental uploads
* ☑ symlink/discard existing files (content-matching) * ☑ symlink/discard existing files (content-matching)
* download * download
* ☑ single files in browser * ☑ single files in browser
* ☑ folders as zip / tar files *[folders as zip / tar files](#zip-downloads)
* ☑ FUSE client (read-only) * ☑ FUSE client (read-only)
* browser * browser
* ☑ navpane (directory tree sidebar) *[navpane](#navpane) (directory tree sidebar)
* ☑ file manager (cut/paste, delete, [batch-rename](#batch-rename))
* ☑ audio player (with OS media controls) * ☑ audio player (with OS media controls)
*thumbnails *image gallery with webm player
* ☑ [thumbnails](#thumbnails)
* ☑ ...of images using Pillow * ☑ ...of images using Pillow
* ☑ ...of videos using FFmpeg * ☑ ...of videos using FFmpeg
* ☑ cache eviction (max-age; maybe max-size eventually) * ☑ cache eviction (max-age; maybe max-size eventually)
* ☑ image gallery with webm player
* ☑ SPA (browse while uploading) * ☑ SPA (browse while uploading)
* if you use the navpane to navigate, not folders in the file list * if you use the navpane to navigate, not folders in the file list
* server indexing * server indexing
* ☑ locate files by contents *[locate files by contents](#file-search)
* ☑ search by name/path/date/size * ☑ search by name/path/date/size
* ☑ search by ID3-tags etc. *[search by ID3-tags etc.](#searching)
* markdown * markdown
* ☑ viewer *[viewer](#markdown-viewer)
* ☑ editor (sure why not) * ☑ editor (sure why not)
@@ -158,17 +173,51 @@ small collection of user feedback
`good enough`, `surprisingly correct`, `certified good software`, `just works`, `why` `good enough`, `surprisingly correct`, `certified good software`, `just works`, `why`
# motivations
project goals / philosophy
* inverse linux philosophy -- do all the things, and do an *okay* job
* quick drop-in service to get a lot of features in a pinch
* there are probably [better alternatives](https://github.com/awesome-selfhosted/awesome-selfhosted) if you have specific/long-term needs
* run anywhere, support everything
* as many web-browsers and python versions as possible
* every browser should at least be able to browse, download, upload files
* be a good emergency solution for transferring stuff between ancient boxes
* minimal dependencies
* but optional dependencies adding bonus-features are ok
* everything being plaintext makes it possible to proofread for malicious code
* no preparations / setup necessary, just run the sfx (which is also plaintext)
* adaptable, malleable, hackable
* no build steps; modify the js/python without needing node.js or anything like that
## future plans
some improvement ideas
* the JS is a mess -- a preact rewrite would be nice
* preferably without build dependencies like webpack/babel/node.js, maybe a python thing to assemble js files into main.js
* good excuse to look at using virtual lists (browsers start to struggle when folders contain over 5000 files)
* the UX is a mess -- a proper design would be nice
* very organic (much like the python/js), everything was an afterthought
* true for both the layout and the visual flair
* something like the tron board-room ui (or most other hollywood ones, like ironman) would be :100:
* some of the python files are way too big
* `up2k.py` ended up doing all the file indexing / db management
* `httpcli.py` should be separated into modules in general
# bugs # bugs
* Windows: python 3.7 and older cannot read tags with FFprobe, so use Mutagen or upgrade * Windows: python 3.7 and older cannot read tags with FFprobe, so use Mutagen or upgrade
* Windows: python 2.7 cannot index non-ascii filenames with `-e2d` * Windows: python 2.7 cannot index non-ascii filenames with `-e2d`
* Windows: python 2.7 cannot handle filenames with mojibake * Windows: python 2.7 cannot handle filenames with mojibake
* `--th-ff-jpg` may fix video thumbnails on some FFmpeg versions * `--th-ff-jpg` may fix video thumbnails on some FFmpeg versions (macos, some linux)
## general bugs ## general bugs
* all volumes must exist / be available on startup; up2k (mtp especially) gets funky otherwise * all volumes must exist / be available on startup; up2k (mtp especially) gets funky otherwise
* cannot mount something at `/d1/d2/d3` unless `d2` exists inside `d1`
* probably more, pls let me know * probably more, pls let me know
## not my bugs ## not my bugs
@@ -180,23 +229,37 @@ small collection of user feedback
* this is an msys2 bug, the regular windows edition of python is fine * this is an msys2 bug, the regular windows edition of python is fine
* VirtualBox: sqlite throws `Disk I/O Error` when running in a VM and the up2k database is in a vboxsf * VirtualBox: sqlite throws `Disk I/O Error` when running in a VM and the up2k database is in a vboxsf
* use `--hist` or the `hist` volflag (`-v [...]:chist=/tmp/foo`) to place the db inside the vm instead * use `--hist` or the `hist` volflag (`-v [...]:c,hist=/tmp/foo`) to place the db inside the vm instead
# FAQ
"frequently" asked questions
* is it possible to block read-access to folders unless you know the exact URL for a particular file inside?
* yes, using the [`g` permission](#accounts-and-volumes), see the examples there
* can I make copyparty download a file to my server if I give it a URL?
* not officially, but there is a [terrible hack](https://github.com/9001/copyparty/blob/hovudstraum/bin/mtag/wget.py) which makes it possible
# accounts and volumes # accounts and volumes
per-folder, per-user permissions
* `-a usr:pwd` adds account `usr` with password `pwd` * `-a usr:pwd` adds account `usr` with password `pwd`
* `-v .::r` adds current-folder `.` as the webroot, `r`eadable by anyone * `-v .::r` adds current-folder `.` as the webroot, `r`eadable by anyone
* the syntax is `-v src:dst:perm:perm:...` so local-path, url-path, and one or more permissions to set * the syntax is `-v src:dst:perm:perm:...` so local-path, url-path, and one or more permissions to set
* when granting permissions to an account, the names are comma-separated: `-v .::r,usr1,usr2:rw,usr3,usr4` * granting the same permissions to multiple accounts:
`-v .::r,usr1,usr2:rw,usr3,usr4` = usr1/2 read-only, 3/4 read-write
permissions: permissions:
* `r` (read): browse folder contents, download files, download as zip/tar * `r` (read): browse folder contents, download files, download as zip/tar
* `w` (write): upload files, move files *into* folder * `w` (write): upload files, move files *into* this folder
* `m` (move): move files/folders *from* folder * `m` (move): move files/folders *from* this folder
* `d` (delete): delete files/folders * `d` (delete): delete files/folders
* `g` (get): only download files, cannot see folder contents or zip/tar
example: examples:
* add accounts named u1, u2, u3 with passwords p1, p2, p3: `-a u1:p1 -a u2:p2 -a u3:p3` * add accounts named u1, u2, u3 with passwords p1, p2, p3: `-a u1:p1 -a u2:p2 -a u3:p3`
* make folder `/srv` the root of the filesystem, read-only by anyone: `-v /srv::r` * make folder `/srv` the root of the filesystem, read-only by anyone: `-v /srv::r`
* make folder `/mnt/music` available at `/music`, read-only for u1 and u2, read-write for u3: `-v /mnt/music:music:r,u1,u2:rw,u3` * make folder `/mnt/music` available at `/music`, read-only for u1 and u2, read-write for u3: `-v /mnt/music:music:r,u1,u2:rw,u3`
@@ -205,35 +268,43 @@ example:
* unauthorized users accessing the webroot can see that the `inc` folder exists, but cannot open it * unauthorized users accessing the webroot can see that the `inc` folder exists, but cannot open it
* `u1` can open the `inc` folder, but cannot see the contents, only upload new files to it * `u1` can open the `inc` folder, but cannot see the contents, only upload new files to it
* `u2` can browse it and move files *from* `/inc` into any folder where `u2` has write-access * `u2` can browse it and move files *from* `/inc` into any folder where `u2` has write-access
* make folder `/mnt/ss` available at `/i`, read-write for u1, get-only for everyone else, and enable accesskeys: `-v /mnt/ss:i:rw,u1:g:c,fk=4`
* `c,fk=4` sets the `fk` volume-flag to 4, meaning each file gets a 4-character accesskey
* `u1` can upload files, browse the folder, and see the generated accesskeys
* other users cannot browse the folder, but can access the files if they have the full file URL with the accesskey
# the browser # the browser
![copyparty-browser-fs8](https://user-images.githubusercontent.com/241032/115978054-65106380-a57d-11eb-98f8-59e3dee73557.png) accessing a copyparty server using a web-browser
![copyparty-browser-fs8](https://user-images.githubusercontent.com/241032/129635359-d6dd9b07-8079-4020-ad77-2bfdb9ebd8d5.png)
## tabs ## tabs
* `[🔎]` search by size, date, path/name, mp3-tags ... see [searching](#searching) the main tabs in the ui
* `[🚀]` and `[🎈]` are the uploaders, see [uploading](#uploading) * `[🔎]` [search](#searching) by size, date, path/name, mp3-tags ...
* `[📂]` mkdir, create directories * `[🧯]` [unpost](#unpost): undo/delete accidental uploads
* `[📝]` new-md, create a new markdown document * `[🚀]` and `[🎈]` are the [uploaders](#uploading)
* `[📟]` send-msg, either to server-log or into textfiles if `--urlform save` * `[📂]` mkdir: create directories
* `[📝]` new-md: create a new markdown document
* `[📟]` send-msg: either to server-log or into textfiles if `--urlform save`
* `[🎺]` audio-player config options * `[🎺]` audio-player config options
* `[⚙️]` general client config options * `[⚙️]` general client config options
## hotkeys ## hotkeys
the browser has the following hotkeys (assumes qwerty, ignores actual layout) the browser has the following hotkeys (always qwerty)
* `B` toggle breadcrumbs / navpane * `B` toggle breadcrumbs / [navpane](#navpane)
* `I/K` prev/next folder * `I/K` prev/next folder
* `M` parent folder (or unexpand current) * `M` parent folder (or unexpand current)
* `G` toggle list / grid view * `G` toggle list / [grid view](#thumbnails)
* `T` toggle thumbnails / icons * `T` toggle thumbnails / icons
* `ctrl-X` cut selected files/folders * `ctrl-X` cut selected files/folders
* `ctrl-V` paste * `ctrl-V` paste
* `F2` rename selected file/folder * `F2` [rename](#batch-rename) selected file/folder
* when a file/folder is selected (in not-grid-view): * when a file/folder is selected (in not-grid-view):
* `Up/Down` move cursor * `Up/Down` move cursor
* shift+`Up/Down` select and move cursor * shift+`Up/Down` select and move cursor
@@ -248,17 +319,19 @@ the browser has the following hotkeys (assumes qwerty, ignores actual layout)
* when viewing images / playing videos: * when viewing images / playing videos:
* `J/L, Left/Right` prev/next file * `J/L, Left/Right` prev/next file
* `Home/End` first/last file * `Home/End` first/last file
* `S` toggle selection
* `R` rotate clockwise (shift=ccw)
* `Esc` close viewer * `Esc` close viewer
* videos: * videos:
* `U/O` skip 10sec back/forward * `U/O` skip 10sec back/forward
* `P/K/Space` play/pause * `P/K/Space` play/pause
* `F` fullscreen * `F` fullscreen
* `C` continue playing next video * `C` continue playing next video
* `R` loop * `V` loop
* `M` mute * `M` mute
* when the navpane is open: * when the navpane is open:
* `A/D` adjust tree width * `A/D` adjust tree width
* in the grid view: * in the [grid view](#thumbnails):
* `S` toggle multiselect * `S` toggle multiselect
* shift+`A/D` zoom * shift+`A/D` zoom
* in the markdown editor: * in the markdown editor:
@@ -272,14 +345,20 @@ the browser has the following hotkeys (assumes qwerty, ignores actual layout)
## navpane ## navpane
by default there's a breadcrumbs path; you can replace this with a navpane (tree-browser sidebar thing) by clicking the `🌲` or pressing the `B` hotkey switching between breadcrumbs or navpane
click `[-]` and `[+]` (or hotkeys `A`/`D`) to adjust the size, and the `[a]` toggles if the tree should widen dynamically as you go deeper or stay fixed-size click the `🌲` or pressing the `B` hotkey to toggle between breadcrumbs path (default), or a navpane (tree-browser sidebar thing)
* `[-]` and `[+]` (or hotkeys `A`/`D`) adjust the size
* `[v]` jumps to the currently open folder
* `[a]` toggles automatic widening as you go deeper
## thumbnails ## thumbnails
![copyparty-thumbs-fs8](https://user-images.githubusercontent.com/241032/120070302-10836b00-c08a-11eb-8eb4-82004a34c342.png) press `g` to toggle grid-view instead of the file listing, and `t` toggles icons / thumbnails
![copyparty-thumbs-fs8](https://user-images.githubusercontent.com/241032/129636211-abd20fa2-a953-4366-9423-1c88ebb96ba9.png)
it does static images with Pillow and uses FFmpeg for video files, so you may want to `--no-thumb` or maybe just `--no-vthumb` depending on how dangerous your users are it does static images with Pillow and uses FFmpeg for video files, so you may want to `--no-thumb` or maybe just `--no-vthumb` depending on how dangerous your users are
@@ -290,7 +369,9 @@ in the grid/thumbnail view, if the audio player panel is open, songs will start
## zip downloads ## zip downloads
the `zip` link next to folders can produce various types of zip/tar files using these alternatives in the browser settings tab: download folders (or file selections) as `zip` or `tar` files
select which type of archive you want in the `[⚙️] config` tab:
| name | url-suffix | description | | name | url-suffix | description |
|--|--|--| |--|--|--|
@@ -307,13 +388,18 @@ the `zip` link next to folders can produce various types of zip/tar files using
you can also zip a selection of files or folders by clicking them in the browser, that brings up a selection editor and zip button in the bottom right you can also zip a selection of files or folders by clicking them in the browser, that brings up a selection editor and zip button in the bottom right
![copyparty-zipsel-fs8](https://user-images.githubusercontent.com/241032/116008321-372a2e00-a614-11eb-9a4a-4a1fd9074224.png) ![copyparty-zipsel-fs8](https://user-images.githubusercontent.com/241032/129635374-e5136e01-470a-49b1-a762-848e8a4c9cdc.png)
## uploading ## uploading
two upload methods are available in the html client: drag files/folders into the web-browser to upload
* `🎈 bup`, the basic uploader, supports almost every browser since netscape 4.0
* `🚀 up2k`, the fancy one this initiates an upload using `up2k`; there are two uploaders available:
* `[🎈] bup`, the basic uploader, supports almost every browser since netscape 4.0
* `[🚀] up2k`, the fancy one
you can also undo/delete uploads by using `[🧯]` [unpost](#unpost)
up2k has several advantages: up2k has several advantages:
* you can drop folders into the browser (files are added recursively) * you can drop folders into the browser (files are added recursively)
@@ -327,50 +413,126 @@ up2k has several advantages:
see [up2k](#up2k) for details on how it works see [up2k](#up2k) for details on how it works
![copyparty-upload-fs8](https://user-images.githubusercontent.com/241032/115978061-680b5400-a57d-11eb-9ef6-cbb5f60aeccc.png) ![copyparty-upload-fs8](https://user-images.githubusercontent.com/241032/129635371-48fc54ca-fa91-48e3-9b1d-ba413e4b68cb.png)
**protip:** you can avoid scaring away users with [docs/minimal-up2k.html](docs/minimal-up2k.html) which makes it look [much simpler](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png) **protip:** you can avoid scaring away users with [docs/minimal-up2k.html](docs/minimal-up2k.html) which makes it look [much simpler](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png)
the up2k UI is the epitome of polished inutitive experiences: the up2k UI is the epitome of polished inutitive experiences:
* "parallel uploads" specifies how many chunks to upload at the same time * "parallel uploads" specifies how many chunks to upload at the same time
* `[🏃]` analysis of other files should continue while one is uploading * `[🏃]` analysis of other files should continue while one is uploading
* `[💭]` ask for confirmation before files are added to the list * `[💭]` ask for confirmation before files are added to the queue
* `[💤]` sync uploading between other copyparty tabs so only one is active * `[💤]` sync uploading between other copyparty browser-tabs so only one is active
* `[🔎]` switch between upload and file-search mode * `[🔎]` switch between upload and [file-search](#file-search) mode
* ignore `[🔎]` if you add files by dragging them into the browser
and then theres the tabs below it, and then theres the tabs below it,
* `[ok]` is uploads which completed successfully * `[ok]` is the files which completed successfully
* `[ng]` is the uploads which failed / got rejected (already exists, ...) * `[ng]` is the ones that failed / got rejected (already exists, ...)
* `[done]` shows a combined list of `[ok]` and `[ng]`, chronological order * `[done]` shows a combined list of `[ok]` and `[ng]`, chronological order
* `[busy]` files which are currently hashing, pending-upload, or uploading * `[busy]` files which are currently hashing, pending-upload, or uploading
* plus up to 3 entries each from `[done]` and `[que]` for context * plus up to 3 entries each from `[done]` and `[que]` for context
* `[que]` is all the files that are still queued * `[que]` is all the files that are still queued
note that since up2k has to read each file twice, `[🎈 bup]` can *theoretically* be up to 2x faster in some extreme cases (files bigger than your ram, combined with an internet connection faster than the read-speed of your HDD)
if you are resuming a massive upload and want to skip hashing the files which already finished, you can enable `turbo` in the `[⚙️] config` tab, but please read the tooltip on that button
### file-search ### file-search
![copyparty-fsearch-fs8](https://user-images.githubusercontent.com/241032/116008320-36919780-a614-11eb-803f-04162326a700.png) dropping files into the browser also lets you see if they exist on the server
in the `[🚀 up2k]` tab, after toggling the `[🔎]` switch green, any files/folders you drop onto the dropzone will be hashed on the client-side. Each hash is sent to the server which checks if that file exists somewhere already ![copyparty-fsearch-fs8](https://user-images.githubusercontent.com/241032/129635361-c79286f0-b8f1-440e-aaf4-6e929428fac9.png)
when you drag/drop files into the browser, you will see two dropzones: `Upload` and `Search`
> on a phone? toggle the `[🔎]` switch green before tapping the big yellow Search button to select your files
the files will be hashed on the client-side, and each hash is sent to the server, which checks if that file exists somewhere
files go into `[ok]` if they exist (and you get a link to where it is), otherwise they land in `[ng]` files go into `[ok]` if they exist (and you get a link to where it is), otherwise they land in `[ng]`
* the main reason filesearch is combined with the uploader is cause the code was too spaghetti to separate it out somewhere else, this is no longer the case but now i've warmed up to the idea too much * the main reason filesearch is combined with the uploader is cause the code was too spaghetti to separate it out somewhere else, this is no longer the case but now i've warmed up to the idea too much
adding the same file multiple times is blocked, so if you first search for a file and then decide to upload it, you have to click the `[cleanup]` button to discard `[done]` files (or just refresh the page) adding the same file multiple times is blocked, so if you first search for a file and then decide to upload it, you have to click the `[cleanup]` button to discard `[done]` files (or just refresh the page)
note that since up2k has to read the file twice, `[🎈 bup]` can be up to 2x faster in extreme cases (if your internet connection is faster than the read-speed of your HDD)
up2k has saved a few uploads from becoming corrupted in-transfer already; caught an android phone on wifi redhanded in wireshark with a bitflip, however bup with https would *probably* have noticed as well (thanks to tls also functioning as an integrity check) ### unpost
undo/delete accidental uploads
![copyparty-unpost-fs8](https://user-images.githubusercontent.com/241032/129635368-3afa6634-c20f-418c-90dc-ec411f3b3897.png)
you can unpost even if you don't have regular move/delete access, however only for files uploaded within the past `--unpost` seconds (default 12 hours) and the server must be running with `-e2d`
## file manager ## file manager
if you have the required permissions, you can cut/paste, rename, and delete files/folders cut/paste, rename, and delete files/folders (if you have permission)
file selection: click somewhere on the line (not the link itsef), then:
* `space` to toggle
* `up/down` to move
* `shift-up/down` to move-and-select
* `ctrl-shift-up/down` to also scroll
* cut: select some files and `ctrl-x`
* paste: `ctrl-v` in another folder
* rename: `F2`
you can move files across browser tabs (cut in one tab, paste in another) you can move files across browser tabs (cut in one tab, paste in another)
## batch rename
select some files and press `F2` to bring up the rename UI
![batch-rename-fs8](https://user-images.githubusercontent.com/241032/128434204-eb136680-3c07-4ec7-92e0-ae86af20c241.png)
quick explanation of the buttons,
* `[✅ apply rename]` confirms and begins renaming
* `[❌ cancel]` aborts and closes the rename window
* `[↺ reset]` reverts any filename changes back to the original name
* `[decode]` does a URL-decode on the filename, fixing stuff like `&` and `%20`
* `[advanced]` toggles advanced mode
advanced mode: rename files based on rules to decide the new names, based on the original name (regex), or based on the tags collected from the file (artist/title/...), or a mix of both
in advanced mode,
* `[case]` toggles case-sensitive regex
* `regex` is the regex pattern to apply to the original filename; any files which don't match will be skipped
* `format` is the new filename, taking values from regex capturing groups and/or from file tags
* very loosely based on foobar2000 syntax
* `presets` lets you save rename rules for later
available functions:
* `$lpad(text, length, pad_char)`
* `$rpad(text, length, pad_char)`
so,
say you have a file named [`meganeko - Eclipse - 07 Sirius A.mp3`](https://www.youtube.com/watch?v=-dtb0vDPruI) (absolutely fantastic album btw) and the tags are: `Album:Eclipse`, `Artist:meganeko`, `Title:Sirius A`, `tn:7`
you could use just regex to rename it:
* `regex` = `(.*) - (.*) - ([0-9]{2}) (.*)`
* `format` = `(3). (1) - (4)`
* `output` = `07. meganeko - Sirius A.mp3`
or you could use just tags:
* `format` = `$lpad((tn),2,0). (artist) - (title).(ext)`
* `output` = `7. meganeko - Sirius A.mp3`
or a mix of both:
* `regex` = ` - ([0-9]{2}) `
* `format` = `(1). (artist) - (title).(ext)`
* `output` = `07. meganeko - Sirius A.mp3`
the metadata keys you can use in the format field are the ones in the file-browser table header (whatever is collected with `-mte` and `-mtp`)
## markdown viewer ## markdown viewer
and there are *two* editors
![copyparty-md-read-fs8](https://user-images.githubusercontent.com/241032/115978057-66419080-a57d-11eb-8539-d2be843991aa.png) ![copyparty-md-read-fs8](https://user-images.githubusercontent.com/241032/115978057-66419080-a57d-11eb-8539-d2be843991aa.png)
* the document preview has a max-width which is the same as an A4 paper when printed * the document preview has a max-width which is the same as an A4 paper when printed
@@ -382,10 +544,18 @@ you can move files across browser tabs (cut in one tab, paste in another)
* if you are using media hotkeys to switch songs and are getting tired of seeing the OSD popup which Windows doesn't let you disable, consider https://ocv.me/dev/?media-osd-bgone.ps1 * if you are using media hotkeys to switch songs and are getting tired of seeing the OSD popup which Windows doesn't let you disable, consider https://ocv.me/dev/?media-osd-bgone.ps1
* click the bottom-left `π` to open a javascript prompt for debugging
# searching * files named `.prologue.html` / `.epilogue.html` will be rendered before/after directory listings unless `--no-logues`
![copyparty-search-fs8](https://user-images.githubusercontent.com/241032/115978060-6772bd80-a57d-11eb-81d3-174e869b72c3.png) * files named `README.md` / `readme.md` will be rendered after directory listings unless `--no-readme` (but `.epilogue.html` takes precedence)
## searching
search by size, date, path/name, mp3-tags, ...
![copyparty-search-fs8](https://user-images.githubusercontent.com/241032/129635365-c0ff2a9f-0ee5-4fc3-8bb6-006033cf67b8.png)
when started with `-e2dsa` copyparty will scan/index all your files. This avoids duplicates on upload, and also makes the volumes searchable through the web-ui: when started with `-e2dsa` copyparty will scan/index all your files. This avoids duplicates on upload, and also makes the volumes searchable through the web-ui:
* make search queries by `size`/`date`/`directory-path`/`filename`, or... * make search queries by `size`/`date`/`directory-path`/`filename`, or...
@@ -395,44 +565,89 @@ path/name queries are space-separated, AND'ed together, and words are negated wi
* path: `shibayan -bossa` finds all files where one of the folders contain `shibayan` but filters out any results where `bossa` exists somewhere in the path * path: `shibayan -bossa` finds all files where one of the folders contain `shibayan` but filters out any results where `bossa` exists somewhere in the path
* name: `demetori styx` gives you [good stuff](https://www.youtube.com/watch?v=zGh0g14ZJ8I&list=PL3A147BD151EE5218&index=9) * name: `demetori styx` gives you [good stuff](https://www.youtube.com/watch?v=zGh0g14ZJ8I&list=PL3A147BD151EE5218&index=9)
add `-e2ts` to also scan/index tags from music files: add the argument `-e2ts` to also scan/index tags from music files, which brings us over to:
## search configuration # server config
searching relies on two databases, the up2k filetree (`-e2d`) and the metadata tags (`-e2t`). Configuration can be done through arguments, volume flags, or a mix of both. ## file indexing
file indexing relies on two database tables, the up2k filetree (`-e2d`) and the metadata tags (`-e2t`), stored in `.hist/up2k.db`. Configuration can be done through arguments, volume flags, or a mix of both.
through arguments: through arguments:
* `-e2d` enables file indexing on upload * `-e2d` enables file indexing on upload
* `-e2ds` scans writable folders for new files on startup * `-e2ds` also scans writable folders for new files on startup
* `-e2dsa` scans all mounted volumes (including readonly ones) * `-e2dsa` also scans all mounted volumes (including readonly ones)
* `-e2t` enables metadata indexing on upload * `-e2t` enables metadata indexing on upload
* `-e2ts` scans for tags in all files that don't have tags yet * `-e2ts` also scans for tags in all files that don't have tags yet
* `-e2tsr` deletes all existing tags, does a full reindex * `-e2tsr` also deletes all existing tags, doing a full reindex
the same arguments can be set as volume flags, in addition to `d2d` and `d2t` for disabling: the same arguments can be set as volume flags, in addition to `d2d` and `d2t` for disabling:
* `-v ~/music::r:ce2dsa:ce2tsr` does a full reindex of everything on startup * `-v ~/music::r:c,e2dsa:c,e2tsr` does a full reindex of everything on startup
* `-v ~/music::r:cd2d` disables **all** indexing, even if any `-e2*` are on * `-v ~/music::r:c,d2d` disables **all** indexing, even if any `-e2*` are on
* `-v ~/music::r:cd2t` disables all `-e2t*` (tags), does not affect `-e2d*` * `-v ~/music::r:c,d2t` disables all `-e2t*` (tags), does not affect `-e2d*`
note: note:
* the parser currently can't handle `c,e2dsa,e2tsr` so you have to `c,e2dsa:c,e2tsr`
* `e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and `e2ts` would then reindex those, unless there is a new copyparty version with new parsers and the release note says otherwise * `e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and `e2ts` would then reindex those, unless there is a new copyparty version with new parsers and the release note says otherwise
* the rescan button in the admin panel has no effect unless the volume has `-e2ds` or higher * the rescan button in the admin panel has no effect unless the volume has `-e2ds` or higher
you can choose to only index filename/path/size/last-modified (and not the hash of the file contents) by setting `--no-hash` or the volume-flag `cdhash`, this has the following consequences: to save some time, you can choose to only index filename/path/size/last-modified (and not the hash of the file contents) by setting `--no-hash` or the volume-flag `:c,dhash`, this has the following consequences:
* initial indexing is way faster, especially when the volume is on a network disk * initial indexing is way faster, especially when the volume is on a network disk
* makes it impossible to [file-search](#file-search) * makes it impossible to [file-search](#file-search)
* if someone uploads the same file contents, the upload will not be detected as a dupe, so it will not get symlinked or rejected * if someone uploads the same file contents, the upload will not be detected as a dupe, so it will not get symlinked or rejected
if you set `--no-hash`, you can enable hashing for specific volumes using flag `cehash` if you set `--no-hash`, you can enable hashing for specific volumes using flag `:c,ehash`
## upload rules
set upload rules using volume flags, some examples:
* `:c,sz=1k-3m` sets allowed filesize between 1 KiB and 3 MiB inclusive (suffixes: b, k, m, g)
* `:c,nosub` disallow uploading into subdirectories; goes well with `rotn` and `rotf`:
* `:c,rotn=1000,2` moves uploads into subfolders, up to 1000 files in each folder before making a new one, two levels deep (must be at least 1)
* `:c,rotf=%Y/%m/%d/%H` enforces files to be uploaded into a structure of subfolders according to that date format
* if someone uploads to `/foo/bar` the path would be rewritten to `/foo/bar/2021/08/06/23` for example
* but the actual value is not verified, just the structure, so the uploader can choose any values which conform to the format string
* just to avoid additional complexity in up2k which is enough of a mess already
* `:c,lifetime=300` delete uploaded files when they become 5 minutes old
you can also set transaction limits which apply per-IP and per-volume, but these assume `-j 1` (default) otherwise the limits will be off, for example `-j 4` would allow anywhere between 1x and 4x the limits you set depending on which processing node the client gets routed to
* `:c,maxn=250,3600` allows 250 files over 1 hour from each IP (tracked per-volume)
* `:c,maxb=1g,300` allows 1 GiB total over 5 minutes from each IP (tracked per-volume)
## compress uploads
files can be autocompressed on upload, either on user-request (if config allows) or forced by server-config
* volume flag `gz` allows gz compression
* volume flag `xz` allows lzma compression
* volume flag `pk` **forces** compression on all files
* url parameter `pk` requests compression with server-default algorithm
* url parameter `gz` or `xz` requests compression with a specific algorithm
* url parameter `xz` requests xz compression
things to note,
* the `gz` and `xz` arguments take a single optional argument, the compression level (range 0 to 9)
* the `pk` volume flag takes the optional argument `ALGORITHM,LEVEL` which will then be forced for all uploads, for example `gz,9` or `xz,0`
* default compression is gzip level 9
* all upload methods except up2k are supported
* the files will be indexed after compression, so dupe-detection and file-search will not work as expected
some examples,
## database location ## database location
in-volume (`.hist/up2k.db`, default) or somewhere else
copyparty creates a subfolder named `.hist` inside each volume where it stores the database, thumbnails, and some other stuff copyparty creates a subfolder named `.hist` inside each volume where it stores the database, thumbnails, and some other stuff
this can instead be kept in a single place using the `--hist` argument, or the `hist=` volume flag, or a mix of both: this can instead be kept in a single place using the `--hist` argument, or the `hist=` volume flag, or a mix of both:
* `--hist ~/.cache/copyparty -v ~/music::r:chist=-` sets `~/.cache/copyparty` as the default place to put volume info, but `~/music` gets the regular `.hist` subfolder (`-` restores default behavior) * `--hist ~/.cache/copyparty -v ~/music::r:c,hist=-` sets `~/.cache/copyparty` as the default place to put volume info, but `~/music` gets the regular `.hist` subfolder (`-` restores default behavior)
note: note:
* markdown edits are always stored in a local `.hist` subdirectory * markdown edits are always stored in a local `.hist` subdirectory
@@ -442,16 +657,20 @@ note:
## metadata from audio files ## metadata from audio files
set `-e2t` to index tags on upload
`-mte` decides which tags to index and display in the browser (and also the display order), this can be changed per-volume: `-mte` decides which tags to index and display in the browser (and also the display order), this can be changed per-volume:
* `-v ~/music::r:cmte=title,artist` indexes and displays *title* followed by *artist* * `-v ~/music::r:c,mte=title,artist` indexes and displays *title* followed by *artist*
if you add/remove a tag from `mte` you will need to run with `-e2tsr` once to rebuild the database, otherwise only new files will be affected if you add/remove a tag from `mte` you will need to run with `-e2tsr` once to rebuild the database, otherwise only new files will be affected
but instead of using `-mte`, `-mth` is a better way to hide tags in the browser: these tags will not be displayed by default, but they still get indexed and become searchable, and users can choose to unhide them in the `[⚙️] config` pane
`-mtm` can be used to add or redefine a metadata mapping, say you have media files with `foo` and `bar` tags and you want them to display as `qux` in the browser (preferring `foo` if both are present), then do `-mtm qux=foo,bar` and now you can `-mte artist,title,qux` `-mtm` can be used to add or redefine a metadata mapping, say you have media files with `foo` and `bar` tags and you want them to display as `qux` in the browser (preferring `foo` if both are present), then do `-mtm qux=foo,bar` and now you can `-mte artist,title,qux`
tags that start with a `.` such as `.bpm` and `.dur`(ation) indicate numeric value tags that start with a `.` such as `.bpm` and `.dur`(ation) indicate numeric value
see the beautiful mess of a dictionary in [mtag.py](https://github.com/9001/copyparty/blob/master/copyparty/mtag.py) for the default mappings (should cover mp3,opus,flac,m4a,wav,aif,) see the beautiful mess of a dictionary in [mtag.py](https://github.com/9001/copyparty/blob/hovudstraum/copyparty/mtag.py) for the default mappings (should cover mp3,opus,flac,m4a,wav,aif,)
`--no-mutagen` disables Mutagen and uses FFprobe instead, which... `--no-mutagen` disables Mutagen and uses FFprobe instead, which...
* is about 20x slower than Mutagen * is about 20x slower than Mutagen
@@ -463,11 +682,13 @@ see the beautiful mess of a dictionary in [mtag.py](https://github.com/9001/copy
## file parser plugins ## file parser plugins
provide custom parsers to index additional tags
copyparty can invoke external programs to collect additional metadata for files using `mtp` (either as argument or volume flag), there is a default timeout of 30sec copyparty can invoke external programs to collect additional metadata for files using `mtp` (either as argument or volume flag), there is a default timeout of 30sec
* `-mtp .bpm=~/bin/audio-bpm.py` will execute `~/bin/audio-bpm.py` with the audio file as argument 1 to provide the `.bpm` tag, if that does not exist in the audio metadata * `-mtp .bpm=~/bin/audio-bpm.py` will execute `~/bin/audio-bpm.py` with the audio file as argument 1 to provide the `.bpm` tag, if that does not exist in the audio metadata
* `-mtp key=f,t5,~/bin/audio-key.py` uses `~/bin/audio-key.py` to get the `key` tag, replacing any existing metadata tag (`f,`), aborting if it takes longer than 5sec (`t5,`) * `-mtp key=f,t5,~/bin/audio-key.py` uses `~/bin/audio-key.py` to get the `key` tag, replacing any existing metadata tag (`f,`), aborting if it takes longer than 5sec (`t5,`)
* `-v ~/music::r:cmtp=.bpm=~/bin/audio-bpm.py:cmtp=key=f,t5,~/bin/audio-key.py` both as a per-volume config wow this is getting ugly * `-v ~/music::r:c,mtp=.bpm=~/bin/audio-bpm.py:c,mtp=key=f,t5,~/bin/audio-key.py` both as a per-volume config wow this is getting ugly
*but wait, there's more!* `-mtp` can be used for non-audio files as well using the `a` flag: `ay` only do audio files, `an` only do non-audio files, or `ad` do all files (d as in dontcare) *but wait, there's more!* `-mtp` can be used for non-audio files as well using the `a` flag: `ay` only do audio files, `an` only do non-audio files, or `ad` do all files (d as in dontcare)
@@ -483,59 +704,66 @@ copyparty can invoke external programs to collect additional metadata for files
# browser support # browser support
TLDR: yes
![copyparty-ie4-fs8](https://user-images.githubusercontent.com/241032/118192791-fb31fe00-b446-11eb-9647-898ea8efc1f7.png) ![copyparty-ie4-fs8](https://user-images.githubusercontent.com/241032/118192791-fb31fe00-b446-11eb-9647-898ea8efc1f7.png)
`ie` = internet-explorer, `ff` = firefox, `c` = chrome, `iOS` = iPhone/iPad, `Andr` = Android `ie` = internet-explorer, `ff` = firefox, `c` = chrome, `iOS` = iPhone/iPad, `Andr` = Android
| feature | ie6 | ie9 | ie10 | ie11 | ff 52 | c 49 | iOS | Andr | | feature | ie6 | ie9 | ie10 | ie11 | ff 52 | c 49 | iOS | Andr |
| --------------- | --- | --- | ---- | ---- | ----- | ---- | --- | ---- | | --------------- | --- | ---- | ---- | ---- | ----- | ---- | --- | ---- |
| browse files | yep | yep | yep | yep | yep | yep | yep | yep | | browse files | yep | yep | yep | yep | yep | yep | yep | yep |
| thumbnail view | - | yep | yep | yep | yep | yep | yep | yep |
| basic uploader | yep | yep | yep | yep | yep | yep | yep | yep | | basic uploader | yep | yep | yep | yep | yep | yep | yep | yep |
| up2k | - | - | `*1` | `*1` | yep | yep | yep | yep |
| make directory | yep | yep | yep | yep | yep | yep | yep | yep | | make directory | yep | yep | yep | yep | yep | yep | yep | yep |
| send message | yep | yep | yep | yep | yep | yep | yep | yep | | send message | yep | yep | yep | yep | yep | yep | yep | yep |
| set sort order | - | yep | yep | yep | yep | yep | yep | yep | | set sort order | - | yep | yep | yep | yep | yep | yep | yep |
| zip selection | - | yep | yep | yep | yep | yep | yep | yep | | zip selection | - | yep | yep | yep | yep | yep | yep | yep |
| navpane | - | - | `*1` | yep | yep | yep | yep | yep | | file rename | - | yep | yep | yep | yep | yep | yep | yep |
| up2k | - | - | yep | yep | yep | yep | yep | yep | | file cut/paste | - | yep | yep | yep | yep | yep | yep | yep |
| navpane | - | `*2` | yep | yep | yep | yep | yep | yep |
| image viewer | - | yep | yep | yep | yep | yep | yep | yep |
| video player | - | yep | yep | yep | yep | yep | yep | yep |
| markdown editor | - | - | yep | yep | yep | yep | yep | yep | | markdown editor | - | - | yep | yep | yep | yep | yep | yep |
| markdown viewer | - | - | yep | yep | yep | yep | yep | yep | | markdown viewer | - | yep | yep | yep | yep | yep | yep | yep |
| play mp3/m4a | - | yep | yep | yep | yep | yep | yep | yep | | play mp3/m4a | - | yep | yep | yep | yep | yep | yep | yep |
| play ogg/opus | - | - | - | - | yep | yep | `*2` | yep | | play ogg/opus | - | - | - | - | yep | yep | `*3` | yep |
| thumbnail view | - | - | - | - | yep | yep | yep | yep |
| image viewer | - | - | - | - | yep | yep | yep | yep |
| **= feature =** | ie6 | ie9 | ie10 | ie11 | ff 52 | c 49 | iOS | Andr | | **= feature =** | ie6 | ie9 | ie10 | ie11 | ff 52 | c 49 | iOS | Andr |
* internet explorer 6 to 8 behave the same * internet explorer 6 to 8 behave the same
* firefox 52 and chrome 49 are the last winxp versions * firefox 52 and chrome 49 are the final winxp versions
* `*1` only public folders (login session is dropped) and no history / back-button * `*1` yes, but extremely slow (ie10: `1 MiB/s`, ie11: `270 KiB/s`)
* `*2` using a wasm decoder which can sometimes get stuck and consumes a bit more power * `*2` causes a full-page refresh on each navigation
* `*3` using a wasm decoder which consumes a bit more power
quick summary of more eccentric web-browsers trying to view a directory index: quick summary of more eccentric web-browsers trying to view a directory index:
| browser | will it blend | | browser | will it blend |
| ------- | ------------- | | ------- | ------------- |
| **safari** (14.0.3/macos) | is chrome with janky wasm, so playing opus can deadlock the javascript engine |
| **safari** (14.0.1/iOS) | same as macos, except it recovers from the deadlocks if you poke it a bit |
| **links** (2.21/macports) | can browse, login, upload/mkdir/msg | | **links** (2.21/macports) | can browse, login, upload/mkdir/msg |
| **lynx** (2.8.9/macports) | can browse, login, upload/mkdir/msg | | **lynx** (2.8.9/macports) | can browse, login, upload/mkdir/msg |
| **w3m** (0.5.3/macports) | can browse, login, upload at 100kB/s, mkdir/msg | | **w3m** (0.5.3/macports) | can browse, login, upload at 100kB/s, mkdir/msg |
| **netsurf** (3.10/arch) | is basically ie6 with much better css (javascript has almost no effect) | | **netsurf** (3.10/arch) | is basically ie6 with much better css (javascript has almost no effect) |
| **opera** (11.60/winxp) | OK: thumbnails, image-viewer, zip-selection, rename/cut/paste. NG: up2k, navpane, markdown, audio |
| **ie4** and **netscape** 4.0 | can browse (text is yellow on white), upload with `?b=u` | | **ie4** and **netscape** 4.0 | can browse (text is yellow on white), upload with `?b=u` |
| **SerenityOS** (7e98457) | hits a page fault, works with `?b=u`, file upload not-impl | | **SerenityOS** (7e98457) | hits a page fault, works with `?b=u`, file upload not-impl |
# client examples # client examples
interact with copyparty using non-browser clients
* javascript: dump some state into a file (two separate examples) * javascript: dump some state into a file (two separate examples)
* `await fetch('https://127.0.0.1:3923/', {method:"PUT", body: JSON.stringify(foo)});` * `await fetch('https://127.0.0.1:3923/', {method:"PUT", body: JSON.stringify(foo)});`
* `var xhr = new XMLHttpRequest(); xhr.open('POST', 'https://127.0.0.1:3923/msgs?raw'); xhr.send('foo');` * `var xhr = new XMLHttpRequest(); xhr.open('POST', 'https://127.0.0.1:3923/msgs?raw'); xhr.send('foo');`
* curl/wget: upload some files (post=file, chunk=stdin) * curl/wget: upload some files (post=file, chunk=stdin)
* `post(){ curl -b cppwd=wark http://127.0.0.1:3923/ -F act=bput -F f=@"$1";}` * `post(){ curl -b cppwd=wark -F act=bput -F f=@"$1" http://127.0.0.1:3923/;}`
`post movie.mkv` `post movie.mkv`
* `post(){ wget --header='Cookie: cppwd=wark' http://127.0.0.1:3923/?raw --post-file="$1" -O-;}` * `post(){ wget --header='Cookie: cppwd=wark' --post-file="$1" -O- http://127.0.0.1:3923/?raw;}`
`post movie.mkv` `post movie.mkv`
* `chunk(){ curl -b cppwd=wark http://127.0.0.1:3923/ -T-;}` * `chunk(){ curl -b cppwd=wark -T- http://127.0.0.1:3923/;}`
`chunk <movie.mkv` `chunk <movie.mkv`
* FUSE: mount a copyparty server as a local filesystem * FUSE: mount a copyparty server as a local filesystem
@@ -549,6 +777,8 @@ copyparty returns a truncated sha512sum of your PUT/POST as base64; you can gene
b512(){ printf "$((sha512sum||shasum -a512)|sed -E 's/ .*//;s/(..)/\\x\1/g')"|base64|tr '+/' '-_'|head -c44;} b512(){ printf "$((sha512sum||shasum -a512)|sed -E 's/ .*//;s/(..)/\\x\1/g')"|base64|tr '+/' '-_'|head -c44;}
b512 <movie.mkv b512 <movie.mkv
you can provide passwords using cookie 'cppwd=hunter2', as a url query `?pw=hunter2`, or with basic-authentication (either as the username or password)
# up2k # up2k
@@ -565,10 +795,25 @@ quick outline of the up2k protocol, see [uploading](#uploading) for the web-clie
* server writes chunks into place based on the hash * server writes chunks into place based on the hash
* client does another handshake with the hashlist; server replies with OK or a list of chunks to reupload * client does another handshake with the hashlist; server replies with OK or a list of chunks to reupload
up2k has saved a few uploads from becoming corrupted in-transfer already; caught an android phone on wifi redhanded in wireshark with a bitflip, however bup with https would *probably* have noticed as well (thanks to tls also functioning as an integrity check)
## why chunk-hashes
a single sha512 would be better, right?
this is due to `crypto.subtle` not providing a streaming api (or the option to seed the sha512 hasher with a starting hash)
as a result, the hashes are much less useful than they could have been (search the server by sha512, provide the sha512 in the response http headers, ...)
hashwasm would solve the streaming issue but reduces hashing speed for sha512 (xxh128 does 6 GiB/s), and it would make old browsers and [iphones](https://bugs.webkit.org/show_bug.cgi?id=228552) unsupported
# performance # performance
defaults are good for most cases, don't mind the `cannot efficiently use multiple CPU cores` message, it's very unlikely to be a problem defaults are usually fine - expect `8 GiB/s` download, `1 GiB/s` upload
you can ignore the `cannot efficiently use multiple CPU cores` message, very unlikely to be a problem
below are some tweaks roughly ordered by usefulness: below are some tweaks roughly ordered by usefulness:
@@ -583,28 +828,48 @@ below are some tweaks roughly ordered by usefulness:
...however it adds an overhead to internal communication so it might be a net loss, see if it works 4 u ...however it adds an overhead to internal communication so it might be a net loss, see if it works 4 u
# security
some notes on hardening
on public copyparty instances with anonymous upload enabled:
* users can upload html/css/js which will evaluate for other visitors in a few ways,
* unless `--no-readme` is set: by uploading/modifying a file named `readme.md`
* if `move` access is granted AND none of `--no-logues`, `--no-dot-mv`, `--no-dot-ren` is set: by uploading some .html file and renaming it to `.epilogue.html` (uploading it directly is blocked)
other misc:
* you can disable directory listings by giving permission `g` instead of `r`, only accepting direct URLs to files
* combine this with volume-flag `c,fk` to generate per-file accesskeys; users which have full read-access will then see URLs with `?k=...` appended to the end, and `g` users must provide that URL including the correct key to avoid a 404
## gotchas
behavior that might be unexpected
* users without read-access to a folder can still see the `.prologue.html` / `.epilogue.html` / `README.md` contents, for the purpose of showing a description on how to use the uploader for example
# dependencies # dependencies
mandatory deps:
* `jinja2` (is built into the SFX) * `jinja2` (is built into the SFX)
## optional dependencies ## optional dependencies
install these to enable bonus features
enable music tags: enable music tags:
* either `mutagen` (fast, pure-python, skips a few tags, makes copyparty GPL? idk) * either `mutagen` (fast, pure-python, skips a few tags, makes copyparty GPL? idk)
* or `ffprobe` (20x slower, more accurate, possibly dangerous depending on your distro and users) * or `ffprobe` (20x slower, more accurate, possibly dangerous depending on your distro and users)
enable thumbnails of images: enable [thumbnails](#thumbnails) of...
* `Pillow` (requires py2.7 or py3.5+) * **images:** `Pillow` (requires py2.7 or py3.5+)
* **videos:** `ffmpeg` and `ffprobe` somewhere in `$PATH`
enable thumbnails of videos: * **HEIF pictures:** `pyheif-pillow-opener` (requires Linux or a C compiler)
* `ffmpeg` and `ffprobe` somewhere in `$PATH` * **AVIF pictures:** `pillow-avif-plugin`
enable thumbnails of HEIF pictures:
* `pyheif-pillow-opener` (requires Linux or a C compiler)
enable thumbnails of AVIF pictures:
* `pillow-avif-plugin`
## install recommended deps ## install recommended deps
@@ -622,7 +887,7 @@ these are standalone programs and will never be imported / evaluated by copypart
# sfx # sfx
currently there are two self-contained "binaries": there are two self-contained "binaries":
* [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) -- pure python, works everywhere, **recommended** * [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) -- pure python, works everywhere, **recommended**
* [copyparty-sfx.sh](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.sh) -- smaller, but only for linux and macos, kinda deprecated * [copyparty-sfx.sh](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.sh) -- smaller, but only for linux and macos, kinda deprecated
@@ -633,14 +898,18 @@ pls note that `copyparty-sfx.sh` will fail if you rename `copyparty-sfx.py` to `
## sfx repack ## sfx repack
reduce the size of an sfx by removing features
if you don't need all the features, you can repack the sfx and save a bunch of space; all you need is an sfx and a copy of this repo (nothing else to download or build, except if you're on windows then you need msys2 or WSL) if you don't need all the features, you can repack the sfx and save a bunch of space; all you need is an sfx and a copy of this repo (nothing else to download or build, except if you're on windows then you need msys2 or WSL)
* `525k` size of original sfx.py as of v0.11.30 * `525k` size of original sfx.py as of v0.11.30
* `315k` after `./scripts/make-sfx.sh re no-ogv` * `315k` after `./scripts/make-sfx.sh re no-ogv`
* `223k` after `./scripts/make-sfx.sh re no-ogv no-cm` * `223k` after `./scripts/make-sfx.sh re no-ogv no-cm`
the features you can opt to drop are the features you can opt to drop are
* `ogv`.js, the opus/vorbis decoder which is needed by apple devices to play foss audio files * `ogv`.js, the opus/vorbis decoder which is needed by apple devices to play foss audio files, saves ~192k
* `cm`/easymde, the "fancy" markdown editor * `cm`/easymde, the "fancy" markdown editor, saves ~92k
* `fnt`, source-code-pro, the monospace font, saves ~9k
* `dd`, the custom mouse cursor for the media player tray tab, saves ~2k
for the `re`pack to work, first run one of the sfx'es once to unpack it for the `re`pack to work, first run one of the sfx'es once to unpack it
@@ -676,7 +945,7 @@ pip install black bandit pylint flake8 # vscode tooling
## just the sfx ## just the sfx
unless you need to modify something in the web-dependencies, it's faster to grab those from a previous release: first grab the web-dependencies from a previous sfx (assuming you don't need to modify something in those):
```sh ```sh
rm -rf copyparty/web/deps rm -rf copyparty/web/deps
@@ -696,14 +965,14 @@ then build the sfx using any of the following examples:
## complete release ## complete release
also builds the sfx so disregard the sfx section above also builds the sfx so skip the sfx section above
in the `scripts` folder: in the `scripts` folder:
* run `make -C deps-docker` to build all dependencies * run `make -C deps-docker` to build all dependencies
* `git tag v1.2.3 && git push origin --tags` * `git tag v1.2.3 && git push origin --tags`
* create github release with `make-tgz-release.sh`
* upload to pypi with `make-pypi-release.(sh|bat)` * upload to pypi with `make-pypi-release.(sh|bat)`
* create github release with `make-tgz-release.sh`
* create sfx with `make-sfx.sh` * create sfx with `make-sfx.sh`
@@ -711,8 +980,7 @@ in the `scripts` folder:
roughly sorted by priority roughly sorted by priority
* hls framework for Someone Else to drop code into :^) * nothing! currently
* readme.md as epilogue
## discarded ideas ## discarded ideas
@@ -740,3 +1008,5 @@ roughly sorted by priority
* indexedDB for hashes, cfg enable/clear/sz, 2gb avail, ~9k for 1g, ~4k for 100m, 500k items before autoeviction * indexedDB for hashes, cfg enable/clear/sz, 2gb avail, ~9k for 1g, ~4k for 100m, 500k items before autoeviction
* blank hashlist when up-ok to skip handshake * blank hashlist when up-ok to skip handshake
* too many confusing side-effects * too many confusing side-effects
* hls framework for Someone Else to drop code into :^)
* probably not, too much stuff to consider -- seeking, start at offset, task stitching (probably np-hard), conditional passthru, rate-control (especially multi-consumer), session keepalive, cache mgmt...

View File

@@ -61,3 +61,8 @@ cd /mnt/nas/music/.hist
~/src/copyparty/bin/dbtool.py -src up2k.*.v3 up2k.db -rm-mtp-flag -copy key ~/src/copyparty/bin/dbtool.py -src up2k.*.v3 up2k.db -rm-mtp-flag -copy key
~/src/copyparty/bin/dbtool.py -src up2k.*.v3 up2k.db -rm-mtp-flag -copy .bpm -vac ~/src/copyparty/bin/dbtool.py -src up2k.*.v3 up2k.db -rm-mtp-flag -copy .bpm -vac
``` ```
# [`prisonparty.sh`](prisonparty.sh)
* run copyparty in a chroot, preventing any accidental file access
* creates bindmounts for /bin, /lib, and so on, see `sysdirs=`

View File

@@ -22,7 +22,7 @@ dependencies:
note: note:
you probably want to run this on windows clients: you probably want to run this on windows clients:
https://github.com/9001/copyparty/blob/master/contrib/explorer-nothumbs-nofoldertypes.reg https://github.com/9001/copyparty/blob/hovudstraum/contrib/explorer-nothumbs-nofoldertypes.reg
get server cert: get server cert:
awk '/-BEGIN CERTIFICATE-/ {a=1} a; /-END CERTIFICATE-/{exit}' <(openssl s_client -connect 127.0.0.1:3923 </dev/null 2>/dev/null) >cert.pem awk '/-BEGIN CERTIFICATE-/ {a=1} a; /-END CERTIFICATE-/{exit}' <(openssl s_client -connect 127.0.0.1:3923 </dev/null 2>/dev/null) >cert.pem

View File

@@ -1,9 +1,17 @@
standalone programs which take an audio file as argument standalone programs which take an audio file as argument
**NOTE:** these all require `-e2ts` to be functional, meaning you need to do at least one of these: `apt install ffmpeg` or `pip3 install mutagen`
some of these rely on libraries which are not MIT-compatible some of these rely on libraries which are not MIT-compatible
* [audio-bpm.py](./audio-bpm.py) detects the BPM of music using the BeatRoot Vamp Plugin; imports GPL2 * [audio-bpm.py](./audio-bpm.py) detects the BPM of music using the BeatRoot Vamp Plugin; imports GPL2
* [audio-key.py](./audio-key.py) detects the melodic key of music using the Mixxx fork of keyfinder; imports GPL3 * [audio-key.py](./audio-key.py) detects the melodic key of music using the Mixxx fork of keyfinder; imports GPL3
* [media-hash.py](./media-hash.py) generates checksums for audio and video streams; uses FFmpeg (LGPL or GPL)
these do not have any problematic dependencies:
* [exe.py](./exe.py) grabs metadata from .exe and .dll files (example for retrieving multiple tags with one parser)
* [wget.py](./wget.py) lets you download files by POSTing URLs to copyparty
# dependencies # dependencies
@@ -18,7 +26,10 @@ run [`install-deps.sh`](install-deps.sh) to build/install most dependencies requ
# usage from copyparty # usage from copyparty
`copyparty -e2dsa -e2ts -mtp key=f,audio-key.py -mtp .bpm=f,audio-bpm.py` `copyparty -e2dsa -e2ts` followed by any combination of these:
* `-mtp key=f,audio-key.py`
* `-mtp .bpm=f,audio-bpm.py`
* `-mtp ahash,vhash=f,media-hash.py`
* `f,` makes the detected value replace any existing values * `f,` makes the detected value replace any existing values
* the `.` in `.bpm` indicates numeric value * the `.` in `.bpm` indicates numeric value
@@ -29,6 +40,9 @@ run [`install-deps.sh`](install-deps.sh) to build/install most dependencies requ
## usage with volume-flags ## usage with volume-flags
instead of affecting all volumes, you can set the options for just one volume like so: instead of affecting all volumes, you can set the options for just one volume like so:
```
copyparty -v /mnt/nas/music:/music:r:cmtp=key=f,audio-key.py:cmtp=.bpm=f,audio-bpm.py:ce2dsa:ce2ts `copyparty -v /mnt/nas/music:/music:r:c,e2dsa:c,e2ts` immediately followed by any combination of these:
```
* `:c,mtp=key=f,audio-key.py`
* `:c,mtp=.bpm=f,audio-bpm.py`
* `:c,mtp=ahash,vhash=f,media-hash.py`

View File

@@ -25,6 +25,7 @@ def det(tf):
"-v", "fatal", "-v", "fatal",
"-ss", "13", "-ss", "13",
"-y", "-i", fsenc(sys.argv[1]), "-y", "-i", fsenc(sys.argv[1]),
"-map", "0:a:0",
"-ac", "1", "-ac", "1",
"-ar", "22050", "-ar", "22050",
"-t", "300", "-t", "300",

View File

@@ -28,6 +28,7 @@ def det(tf):
"-hide_banner", "-hide_banner",
"-v", "fatal", "-v", "fatal",
"-y", "-i", fsenc(sys.argv[1]), "-y", "-i", fsenc(sys.argv[1]),
"-map", "0:a:0",
"-t", "300", "-t", "300",
"-sample_fmt", "s16", "-sample_fmt", "s16",
tf tf

View File

@@ -4,7 +4,8 @@ set -e
# install dependencies for audio-*.py # install dependencies for audio-*.py
# #
# linux: requires {python3,ffmpeg,fftw}-dev py3-{wheel,pip} py3-numpy{,-dev} vamp-sdk-dev patchelf # linux/alpine: requires {python3,ffmpeg,fftw}-dev py3-{wheel,pip} py3-numpy{,-dev} vamp-sdk-dev patchelf cmake
# linux/debian: requires libav{codec,device,filter,format,resample,util}-dev {libfftw3,python3}-dev python3-{numpy,pip} vamp-{plugin-sdk,examples} patchelf cmake
# win64: requires msys2-mingw64 environment # win64: requires msys2-mingw64 environment
# macos: requires macports # macos: requires macports
# #

73
bin/mtag/media-hash.py Normal file
View File

@@ -0,0 +1,73 @@
#!/usr/bin/env python
import re
import sys
import json
import time
import base64
import hashlib
import subprocess as sp
try:
from copyparty.util import fsenc
except:
def fsenc(p):
return p
"""
dep: ffmpeg
"""
def det():
# fmt: off
cmd = [
"ffmpeg",
"-nostdin",
"-hide_banner",
"-v", "fatal",
"-i", fsenc(sys.argv[1]),
"-f", "framemd5",
"-"
]
# fmt: on
p = sp.Popen(cmd, stdout=sp.PIPE)
# ps = io.TextIOWrapper(p.stdout, encoding="utf-8")
ps = p.stdout
chans = {}
for ln in ps:
if ln.startswith(b"#stream#"):
break
m = re.match(r"^#media_type ([0-9]): ([a-zA-Z])", ln.decode("utf-8"))
if m:
chans[m.group(1)] = m.group(2)
hashers = [hashlib.sha512(), hashlib.sha512()]
for ln in ps:
n = int(ln[:1])
v = ln.rsplit(b",", 1)[-1].strip()
hashers[n].update(v)
r = {}
for k, v in chans.items():
dg = hashers[int(k)].digest()[:12]
dg = base64.urlsafe_b64encode(dg).decode("ascii")
r[v[0].lower() + "hash"] = dg
print(json.dumps(r, indent=4))
def main():
try:
det()
except:
pass # mute
if __name__ == "__main__":
main()

39
bin/mtag/res/yt-ipr.conf Normal file
View File

@@ -0,0 +1,39 @@
# example config file to use copyparty as a youtube manifest collector,
# use with copyparty like: python copyparty.py -c yt-ipr.conf
#
# see docs/example.conf for a better explanation of the syntax, but
# newlines are block separators, so adding blank lines inside a volume definition is bad
# (use comments as separators instead)
# create user ed, password wark
u ed:wark
# create a volume at /ytm which stores files at ./srv/ytm
./srv/ytm
/ytm
# write-only, but read-write for user ed
w
rw ed
# rescan the volume on startup
c e2dsa
# collect tags from all new files since last scan
c e2ts
# optionally enable compression to make the files 50% smaller
c pk
# only allow uploads which are between 16k and 1m large
c sz=16k-1m
# allow up to 10 uploads over 5 minutes from each ip
c maxn=10,300
# move uploads into subfolders: YEAR-MONTH / DAY-HOUR / <upload>
c rotf=%Y-%m/%d-%H
# delete uploads when they are 24 hours old
c lifetime=86400
# add the parser and tell copyparty what tags it can expect from it
c mtp=yt-id,yt-title,yt-author,yt-channel,yt-views,yt-private,yt-manifest,yt-expires=bin/mtag/yt-ipr.py
# decide which tags we want to index and in what order
c mte=yt-id,yt-title,yt-author,yt-channel,yt-views,yt-private,yt-manifest,yt-expires
# create any other volumes you'd like down here, or merge this with an existing config file

View File

@@ -0,0 +1,47 @@
// ==UserScript==
// @name youtube-playerdata-hub
// @match https://youtube.com/*
// @match https://*.youtube.com/*
// @version 1.0
// @grant GM_addStyle
// ==/UserScript==
function main() {
var server = 'https://127.0.0.1:3923/ytm?pw=wark',
interval = 60; // sec
var sent = {};
function send(txt, mf_url, desc) {
if (sent[mf_url])
return;
fetch(server + '&_=' + Date.now(), { method: "PUT", body: txt });
console.log('[yt-pdh] yeet %d bytes, %s', txt.length, desc);
sent[mf_url] = 1;
}
function collect() {
try {
var pd = document.querySelector('ytd-watch-flexy');
if (!pd)
return console.log('[yt-pdh] no video found');
pd = pd.playerData;
var mu = pd.streamingData.dashManifestUrl || pd.streamingData.hlsManifestUrl;
if (!mu || !mu.length)
return console.log('[yt-pdh] no manifest found');
var desc = pd.videoDetails.videoId + ', ' + pd.videoDetails.title;
send(JSON.stringify(pd), mu, desc);
}
catch (ex) {
console.log("[yt-pdh]", ex);
}
}
setInterval(collect, interval * 1000);
}
var scr = document.createElement('script');
scr.textContent = '(' + main.toString() + ')();';
(document.head || document.getElementsByTagName('head')[0]).appendChild(scr);
console.log('[yt-pdh] a');

85
bin/mtag/wget.py Normal file
View File

@@ -0,0 +1,85 @@
#!/usr/bin/env python3
"""
use copyparty as a file downloader by POSTing URLs as
application/x-www-form-urlencoded (for example using the
message/pager function on the website)
example copyparty config to use this:
--urlform save,get -vsrv/wget:wget:rwmd,ed:c,e2ts:c,mtp=title=ebin,t300,ad,bin/mtag/wget.py
explained:
for realpath srv/wget (served at /wget) with read-write-modify-delete for ed,
enable file analysis on upload (e2ts),
use mtp plugin "bin/mtag/wget.py" to provide metadata tag "title",
do this on all uploads with the file extension "bin",
t300 = 300 seconds timeout for each dwonload,
ad = parse file regardless if FFmpeg thinks it is audio or not
PS: this requires e2ts to be functional,
meaning you need to do at least one of these:
* apt install ffmpeg
* pip3 install mutagen
"""
import os
import sys
import subprocess as sp
from urllib.parse import unquote_to_bytes as unquote
def main():
fp = os.path.abspath(sys.argv[1])
fdir = os.path.dirname(fp)
fname = os.path.basename(fp)
if not fname.startswith("put-") or not fname.endswith(".bin"):
raise Exception("not a post file")
buf = b""
with open(fp, "rb") as f:
while True:
b = f.read(4096)
buf += b
if len(buf) > 4096:
raise Exception("too big")
if not b:
break
if not buf:
raise Exception("file is empty")
buf = unquote(buf.replace(b"+", b" "))
url = buf.decode("utf-8")
if not url.startswith("msg="):
raise Exception("does not start with msg=")
url = url[4:]
if "://" not in url:
url = "https://" + url
os.chdir(fdir)
name = url.split("?")[0].split("/")[-1]
tfn = "-- DOWNLOADING " + name
open(tfn, "wb").close()
cmd = ["wget", "--trust-server-names", "--", url]
try:
sp.check_call(cmd)
# OPTIONAL:
# on success, delete the .bin file which contains the URL
os.unlink(fp)
except:
open("-- FAILED TO DONWLOAD " + name, "wb").close()
os.unlink(tfn)
print(url)
if __name__ == "__main__":
main()

198
bin/mtag/yt-ipr.py Normal file
View File

@@ -0,0 +1,198 @@
#!/usr/bin/env python
import re
import os
import sys
import gzip
import json
import base64
import string
import urllib.request
from datetime import datetime
"""
youtube initial player response
it's probably best to use this through a config file; see res/yt-ipr.conf
but if you want to use plain arguments instead then:
-v srv/ytm:ytm:w:rw,ed
:c,e2ts:c,e2dsa
:c,sz=16k-1m:c,maxn=10,300:c,rotf=%Y-%m/%d-%H
:c,mtp=yt-id,yt-title,yt-author,yt-channel,yt-views,yt-private,yt-manifest,yt-expires=bin/mtag/yt-ipr.py
:c,mte=yt-id,yt-title,yt-author,yt-channel,yt-views,yt-private,yt-manifest,yt-expires
see res/yt-ipr.user.js for the example userscript to go with this
"""
def main():
try:
with gzip.open(sys.argv[1], "rt", encoding="utf-8", errors="replace") as f:
txt = f.read()
except:
with open(sys.argv[1], "r", encoding="utf-8", errors="replace") as f:
txt = f.read()
txt = "{" + txt.split("{", 1)[1]
try:
pd = json.loads(txt)
except json.decoder.JSONDecodeError as ex:
pd = json.loads(txt[: ex.pos])
# print(json.dumps(pd, indent=2))
if "videoDetails" in pd:
parse_youtube(pd)
else:
parse_freg(pd)
def get_expiration(url):
et = re.search(r"[?&]expire=([0-9]+)", url).group(1)
et = datetime.utcfromtimestamp(int(et))
return et.strftime("%Y-%m-%d, %H:%M")
def parse_youtube(pd):
vd = pd["videoDetails"]
sd = pd["streamingData"]
et = sd["adaptiveFormats"][0]["url"]
et = get_expiration(et)
mf = []
if "dashManifestUrl" in sd:
mf.append("dash")
if "hlsManifestUrl" in sd:
mf.append("hls")
r = {
"yt-id": vd["videoId"],
"yt-title": vd["title"],
"yt-author": vd["author"],
"yt-channel": vd["channelId"],
"yt-views": vd["viewCount"],
"yt-private": vd["isPrivate"],
# "yt-expires": sd["expiresInSeconds"],
"yt-manifest": ",".join(mf),
"yt-expires": et,
}
print(json.dumps(r))
freg_conv(pd)
def parse_freg(pd):
md = pd["metadata"]
r = {
"yt-id": md["id"],
"yt-title": md["title"],
"yt-author": md["channelName"],
"yt-channel": md["channelURL"].strip("/").split("/")[-1],
"yt-expires": get_expiration(list(pd["video"].values())[0]),
}
print(json.dumps(r))
def freg_conv(pd):
# based on getURLs.js v1.5 (2021-08-07)
# fmt: off
priority = {
"video": [
337, 315, 266, 138, # 2160p60
313, 336, # 2160p
308, # 1440p60
271, 264, # 1440p
335, 303, 299, # 1080p60
248, 169, 137, # 1080p
334, 302, 298, # 720p60
247, 136 # 720p
],
"audio": [
251, 141, 171, 140, 250, 249, 139
]
}
vid_id = pd["videoDetails"]["videoId"]
chan_id = pd["videoDetails"]["channelId"]
try:
thumb_url = pd["microformat"]["playerMicroformatRenderer"]["thumbnail"]["thumbnails"][0]["url"]
start_ts = pd["microformat"]["playerMicroformatRenderer"]["liveBroadcastDetails"]["startTimestamp"]
except:
thumb_url = f"https://img.youtube.com/vi/{vid_id}/maxresdefault.jpg"
start_ts = ""
# fmt: on
metadata = {
"title": pd["videoDetails"]["title"],
"id": vid_id,
"channelName": pd["videoDetails"]["author"],
"channelURL": "https://www.youtube.com/channel/" + chan_id,
"description": pd["videoDetails"]["shortDescription"],
"thumbnailUrl": thumb_url,
"startTimestamp": start_ts,
}
if [x for x in vid_id if x not in string.ascii_letters + string.digits + "_-"]:
print(f"malicious json", file=sys.stderr)
return
basepath = os.path.dirname(sys.argv[1])
thumb_fn = f"{basepath}/{vid_id}.jpg"
tmp_fn = f"{thumb_fn}.{os.getpid()}"
if not os.path.exists(thumb_fn) and (
thumb_url.startswith("https://img.youtube.com/vi/")
or thumb_url.startswith("https://i.ytimg.com/vi/")
):
try:
with urllib.request.urlopen(thumb_url) as fi:
with open(tmp_fn, "wb") as fo:
fo.write(fi.read())
os.rename(tmp_fn, thumb_fn)
except:
if os.path.exists(tmp_fn):
os.unlink(tmp_fn)
try:
with open(thumb_fn, "rb") as f:
thumb = base64.b64encode(f.read()).decode("ascii")
except:
thumb = "/9j/4AAQSkZJRgABAQEASABIAAD/2wBDAAMCAgICAgMCAgIDAwMDBAYEBAQEBAgGBgUGCQgKCgkICQkKDA8MCgsOCwkJDRENDg8QEBEQCgwSExIQEw8QEBD/yQALCAABAAEBAREA/8wABgAQEAX/2gAIAQEAAD8A0s8g/9k="
metadata["thumbnail"] = "data:image/jpeg;base64," + thumb
ret = {
"metadata": metadata,
"version": "1.5",
"createTime": datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ"),
}
for stream, itags in priority.items():
for itag in itags:
url = None
for afmt in pd["streamingData"]["adaptiveFormats"]:
if itag == afmt["itag"]:
url = afmt["url"]
break
if url:
ret[stream] = {itag: url}
break
fn = f"{basepath}/{vid_id}.urls.json"
with open(fn, "w", encoding="utf-8", errors="replace") as f:
f.write(json.dumps(ret, indent=4))
if __name__ == "__main__":
try:
main()
except:
# raise
pass

99
bin/prisonparty.sh Normal file
View File

@@ -0,0 +1,99 @@
#!/bin/bash
set -e
# runs copyparty (or any other program really) in a chroot
#
# assumption: these directories, and everything within, are owned by root
sysdirs=( /bin /lib /lib32 /lib64 /sbin /usr )
# error-handler
help() { cat <<'EOF'
usage:
./prisonparty.sh <ROOTDIR> <UID> <GID> [VOLDIR [VOLDIR...]] -- copyparty-sfx.py [...]"
example:
./prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt/nas/music -- copyparty-sfx.py -v /mnt/nas/music::rwmd"
EOF
exit 1
}
# read arguments
trap help EXIT
jail="$(realpath "$1")"; shift
uid="$1"; shift
gid="$1"; shift
vols=()
while true; do
v="$1"; shift
[ "$v" = -- ] && break # end of volumes
[ "$#" -eq 0 ] && break # invalid usage
vols+=( "$(realpath "$v")" )
done
pybin="$1"; shift
pybin="$(realpath "$pybin")"
cpp="$1"; shift
cpp="$(realpath "$cpp")"
cppdir="$(dirname "$cpp")"
trap - EXIT
# debug/vis
echo
echo "chroot-dir = $jail"
echo "user:group = $uid:$gid"
echo " copyparty = $cpp"
echo
printf '\033[33m%s\033[0m\n' "copyparty can access these folders and all their subdirectories:"
for v in "${vols[@]}"; do
printf '\033[36m ├─\033[0m %s \033[36m ── added by (You)\033[0m\n' "$v"
done
printf '\033[36m ├─\033[0m %s \033[36m ── where the copyparty binary is\033[0m\n' "$cppdir"
printf '\033[36m ╰─\033[0m %s \033[36m ── the folder you are currently in\033[0m\n' "$PWD"
vols+=("$cppdir" "$PWD")
echo
# remove any trailing slashes
jail="${jail%/}"
cppdir="${cppdir%/}"
# bind-mount system directories and volumes
printf '%s\n' "${sysdirs[@]}" "${vols[@]}" | LC_ALL=C sort |
while IFS= read -r v; do
[ -e "$v" ] || {
# printf '\033[1;31mfolder does not exist:\033[0m %s\n' "/$v"
continue
}
i1=$(stat -c%D.%i "$v" 2>/dev/null || echo a)
i2=$(stat -c%D.%i "$jail$v" 2>/dev/null || echo b)
[ $i1 = $i2 ] && continue
mkdir -p "$jail$v"
mount --bind "$v" "$jail$v"
done
# create a tmp
mkdir -p "$jail/tmp"
chmod 777 "$jail/tmp"
# run copyparty
/sbin/chroot --userspec=$uid:$gid "$jail" "$pybin" "$cpp" "$@" && rv=0 || rv=$?
# cleanup if not in use
lsof "$jail" | grep -qF "$jail" &&
echo "chroot is in use, will not cleanup" ||
{
mount | grep -qF " on $jail" |
awk '{sub(/ type .*/,"");sub(/.* on /,"");print}' |
LC_ALL=C sort -r | tee /dev/stderr | tr '\n' '\0' | xargs -r0 umount
}
exit $rv

View File

@@ -29,7 +29,8 @@ however if your copyparty is behind a reverse-proxy, you may want to use [`share
# OS integration # OS integration
init-scripts to start copyparty as a service init-scripts to start copyparty as a service
* [`systemd/copyparty.service`](systemd/copyparty.service) * [`systemd/copyparty.service`](systemd/copyparty.service) runs the sfx normally
* [`systemd/prisonparty.service`](systemd/prisonparty.service) runs the sfx in a chroot
* [`openrc/copyparty`](openrc/copyparty) * [`openrc/copyparty`](openrc/copyparty)
# Reverse-proxy # Reverse-proxy

View File

@@ -1,13 +1,14 @@
#!/bin/bash #!/bin/bash
set -e set -e
# ca-name and server-name # ca-name and server-fqdn
ca_name="$1" ca_name="$1"
srv_name="$2" srv_fqdn="$2"
[ -z "$srv_name" ] && { [ -z "$srv_fqdn" ] && {
echo "need arg 1: ca name" echo "need arg 1: ca name"
echo "need arg 2: server name" echo "need arg 2: server fqdn"
echo "optional arg 3: if set, write cert into copyparty cfg"
exit 1 exit 1
} }
@@ -31,15 +32,15 @@ EOF
gen_srv() { gen_srv() {
(tee /dev/stderr <<EOF (tee /dev/stderr <<EOF
{"key": {"algo":"rsa", "size":4096}, {"key": {"algo":"rsa", "size":4096},
"names": [{"O":"$ca_name - $srv_name"}]} "names": [{"O":"$ca_name - $srv_fqdn"}]}
EOF EOF
)| )|
cfssl gencert -ca ca.pem -ca-key ca.key \ cfssl gencert -ca ca.pem -ca-key ca.key \
-profile=www -hostname="$srv_name.$ca_name" - | -profile=www -hostname="$srv_fqdn" - |
cfssljson -bare "$srv_name" cfssljson -bare "$srv_fqdn"
mv "$srv_name-key.pem" "$srv_name.key" mv "$srv_fqdn-key.pem" "$srv_fqdn.key"
rm "$srv_name.csr" rm "$srv_fqdn.csr"
} }
@@ -57,13 +58,13 @@ show() {
awk '!o; {o=0} /[0-9a-f:]{16}/{o=1}' awk '!o; {o=0} /[0-9a-f:]{16}/{o=1}'
} }
show ca.pem show ca.pem
show "$srv_name.pem" show "$srv_fqdn.pem"
# write cert into copyparty config # write cert into copyparty config
[ -z "$3" ] || { [ -z "$3" ] || {
mkdir -p ~/.config/copyparty mkdir -p ~/.config/copyparty
cat "$srv_name".{key,pem} ca.pem >~/.config/copyparty/cert.pem cat "$srv_fqdn".{key,pem} ca.pem >~/.config/copyparty/cert.pem
} }

View File

@@ -8,11 +8,11 @@
# #
# you may want to: # you may want to:
# change '/usr/bin/python' to another interpreter # change '/usr/bin/python' to another interpreter
# change '/mnt::a' to another location or permission-set # change '/mnt::rw' to another location or permission-set
name="$SVCNAME" name="$SVCNAME"
command_background=true command_background=true
pidfile="/var/run/$SVCNAME.pid" pidfile="/var/run/$SVCNAME.pid"
command="/usr/bin/python /usr/local/bin/copyparty-sfx.py" command="/usr/bin/python /usr/local/bin/copyparty-sfx.py"
command_args="-q -v /mnt::a" command_args="-q -v /mnt::rw"

View File

@@ -6,13 +6,20 @@
# #
# you may want to: # you may want to:
# change '/usr/bin/python' to another interpreter # change '/usr/bin/python' to another interpreter
# change '/mnt::a' to another location or permission-set # change '/mnt::rw' to another location or permission-set
# #
# with `Type=notify`, copyparty will signal systemd when it is ready to # with `Type=notify`, copyparty will signal systemd when it is ready to
# accept connections; correctly delaying units depending on copyparty. # accept connections; correctly delaying units depending on copyparty.
# But note that journalctl will get the timestamps wrong due to # But note that journalctl will get the timestamps wrong due to
# python disabling line-buffering, so messages are out-of-order: # python disabling line-buffering, so messages are out-of-order:
# https://user-images.githubusercontent.com/241032/126040249-cb535cc7-c599-4931-a796-a5d9af691bad.png # https://user-images.githubusercontent.com/241032/126040249-cb535cc7-c599-4931-a796-a5d9af691bad.png
#
# enable line-buffering for realtime logging (slight performance cost):
# modify ExecStart and prefix it with `/usr/bin/stdbuf -oL` like so:
# ExecStart=/usr/bin/stdbuf -oL /usr/bin/python3 [...]
# but some systemd versions require this instead (higher performance cost):
# inside the [Service] block, add the following line:
# Environment=PYTHONUNBUFFERED=x
[Unit] [Unit]
Description=copyparty file server Description=copyparty file server
@@ -20,7 +27,7 @@ Description=copyparty file server
[Service] [Service]
Type=notify Type=notify
SyslogIdentifier=copyparty SyslogIdentifier=copyparty
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -v /mnt::a ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -v /mnt::rw
ExecStartPre=/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf' ExecStartPre=/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
[Install] [Install]

View File

@@ -0,0 +1,27 @@
# this will start `/usr/local/bin/copyparty-sfx.py`
# in a chroot, preventing accidental access elsewhere
# and share '/mnt' with anonymous read+write
#
# installation:
# 1) put copyparty-sfx.py and prisonparty.sh in /usr/local/bin
# 2) cp -pv prisonparty.service /etc/systemd/system && systemctl enable --now prisonparty
#
# you may want to:
# change '/mnt::rw' to another location or permission-set
# (remember to change the '/mnt' chroot arg too)
#
# enable line-buffering for realtime logging (slight performance cost):
# inside the [Service] block, add the following line:
# Environment=PYTHONUNBUFFERED=x
[Unit]
Description=copyparty file server
[Service]
SyslogIdentifier=prisonparty
WorkingDirectory=/usr/local/bin
ExecStart=/bin/bash /usr/local/bin/prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt -- \
/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -v /mnt::rw
[Install]
WantedBy=multi-user.target

View File

@@ -25,6 +25,28 @@ ANYWIN = WINDOWS or sys.platform in ["msys"]
MACOS = platform.system() == "Darwin" MACOS = platform.system() == "Darwin"
def get_unix_home():
try:
v = os.environ["XDG_CONFIG_HOME"]
if not v:
raise Exception()
ret = os.path.normpath(v)
os.listdir(ret)
return ret
except:
pass
try:
v = os.path.expanduser("~/.config")
if v.startswith("~"):
raise Exception()
ret = os.path.normpath(v)
os.listdir(ret)
return ret
except:
return "/tmp"
class EnvParams(object): class EnvParams(object):
def __init__(self): def __init__(self):
self.t0 = time.time() self.t0 = time.time()
@@ -37,10 +59,7 @@ class EnvParams(object):
elif sys.platform == "darwin": elif sys.platform == "darwin":
self.cfg = os.path.expanduser("~/Library/Preferences/copyparty") self.cfg = os.path.expanduser("~/Library/Preferences/copyparty")
else: else:
self.cfg = os.path.normpath( self.cfg = get_unix_home() + "/copyparty"
os.getenv("XDG_CONFIG_HOME", os.path.expanduser("~/.config"))
+ "/copyparty"
)
self.cfg = self.cfg.replace("\\", "/") self.cfg = self.cfg.replace("\\", "/")
try: try:

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env python3
# coding: utf-8 # coding: utf-8
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
@@ -23,7 +23,8 @@ from textwrap import dedent
from .__init__ import E, WINDOWS, VT100, PY2, unicode from .__init__ import E, WINDOWS, VT100, PY2, unicode
from .__version__ import S_VERSION, S_BUILD_DT, CODENAME from .__version__ import S_VERSION, S_BUILD_DT, CODENAME
from .svchub import SvcHub from .svchub import SvcHub
from .util import py_desc, align_tab, IMPLICATIONS from .util import py_desc, align_tab, IMPLICATIONS, ansi_re
from .authsrv import re_vol
HAVE_SSL = True HAVE_SSL = True
try: try:
@@ -66,8 +67,12 @@ class Dodge11874(RiceFormatter):
def lprint(*a, **ka): def lprint(*a, **ka):
global printed global printed
printed += " ".join(unicode(x) for x in a) + ka.get("end", "\n") txt = " ".join(unicode(x) for x in a) + ka.get("end", "\n")
print(*a, **ka) printed += txt
if not VT100:
txt = ansi_re.sub("", txt)
print(txt, **ka)
def warn(msg): def warn(msg):
@@ -99,7 +104,7 @@ def ensure_cert():
cert_insec = os.path.join(E.mod, "res/insecure.pem") cert_insec = os.path.join(E.mod, "res/insecure.pem")
cert_cfg = os.path.join(E.cfg, "cert.pem") cert_cfg = os.path.join(E.cfg, "cert.pem")
if not os.path.exists(cert_cfg): if not os.path.exists(cert_cfg):
shutil.copy2(cert_insec, cert_cfg) shutil.copy(cert_insec, cert_cfg)
try: try:
if filecmp.cmp(cert_cfg, cert_insec): if filecmp.cmp(cert_cfg, cert_insec):
@@ -196,24 +201,32 @@ def run_argparse(argv, formatter):
formatter_class=formatter, formatter_class=formatter,
prog="copyparty", prog="copyparty",
description="http file sharing hub v{} ({})".format(S_VERSION, S_BUILD_DT), description="http file sharing hub v{} ({})".format(S_VERSION, S_BUILD_DT),
epilog=dedent( )
try:
fk_salt = unicode(os.path.getmtime(os.path.join(E.cfg, "cert.pem")))
except:
fk_salt = "hunter2"
sects = [
[
"accounts",
"accounts and volumes",
dedent(
""" """
-a takes username:password, -a takes username:password,
-v takes src:dst:perm1:perm2:permN:cflag1:cflag2:cflagN:... -v takes src:dst:perm1:perm2:permN:volflag1:volflag2:volflagN:...
where "perm" is "accesslevels,username1,username2,..." where "perm" is "permissions,username1,username2,..."
and "cflag" is config flags to set on this volume and "volflag" is config flags to set on this volume
list of accesslevels: list of permissions:
"r" (read): list folder contents, download files "r" (read): list folder contents, download files
"w" (write): upload files; need "r" to see the uploads "w" (write): upload files; need "r" to see the uploads
"m" (move): move files and folders; need "w" at destination "m" (move): move files and folders; need "w" at destination
"d" (delete): permanently delete files and folders "d" (delete): permanently delete files and folders
"g" (get): download files, but cannot see folder contents
list of cflags: too many volflags to list here, see the other sections
"c,nodupe" rejects existing files (instead of symlinking them)
"c,e2d" sets -e2d (all -e2* args can be set using ce2* cflags)
"c,d2t" disables metadata collection, overrides -e2t*
"c,d2d" disables all database stuff, overrides -e2*
example:\033[35m example:\033[35m
-a ed:hunter2 -v .::r:rw,ed -v ../inc:dump:w:rw,ed:c,nodupe \033[36m -a ed:hunter2 -v .::r:rw,ed -v ../inc:dump:w:rw,ed:c,nodupe \033[36m
@@ -230,29 +243,90 @@ def run_argparse(argv, formatter):
consider the config file for more flexible account/volume management, consider the config file for more flexible account/volume management,
including dynamic reload at runtime (and being more readable w) including dynamic reload at runtime (and being more readable w)
"""
),
],
[
"flags",
"list of volflags",
dedent(
"""
volflags are appended to volume definitions, for example,
to create a write-only volume with the \033[33mnodupe\033[0m and \033[32mnosub\033[0m flags:
\033[35m-v /mnt/inc:/inc:w\033[33m:c,nodupe\033[32m:c,nosub
\033[0muploads, general:
\033[36mnodupe\033[35m rejects existing files (instead of symlinking them)
\033[36mnosub\033[35m forces all uploads into the top folder of the vfs
\033[36mgz\033[35m allows server-side gzip of uploads with ?gz (also c,xz)
\033[36mpk\033[35m forces server-side compression, optional arg: xz,9
\033[0mupload rules:
\033[36mmaxn=250,600\033[35m max 250 uploads over 15min
\033[36mmaxb=1g,300\033[35m max 1 GiB over 5min (suffixes: b, k, m, g)
\033[36msz=1k-3m\033[35m allow filesizes between 1 KiB and 3MiB
\033[0mupload rotation:
(moves all uploads into the specified folder structure)
\033[36mrotn=100,3\033[35m 3 levels of subfolders with 100 entries in each
\033[36mrotf=%Y-%m/%d-%H\033[35m date-formatted organizing
\033[36mlifetime=3600\033[35m uploads are deleted after 1 hour
\033[0mdatabase, general:
\033[36me2d\033[35m sets -e2d (all -e2* args can be set using ce2* volflags)
\033[36md2t\033[35m disables metadata collection, overrides -e2t*
\033[36md2d\033[35m disables all database stuff, overrides -e2*
\033[36mdhash\033[35m disables file hashing on initial scans, also ehash
\033[36mhist=/tmp/cdb\033[35m puts thumbnails and indexes at that location
\033[36mscan=60\033[35m scan for new files every 60sec, same as --re-maxage
\033[0mdatabase, audio tags:
"mte", "mth", "mtp", "mtm" all work the same as -mte, -mth, ...
\033[36mmtp=.bpm=f,audio-bpm.py\033[35m uses the "audio-bpm.py" program to
generate ".bpm" tags from uploads (f = overwrite tags)
\033[36mmtp=ahash,vhash=media-hash.py\033[35m collects two tags at once
\033[0mothers:
\033[36mfk=8\033[35m generates per-file accesskeys,
which will then be required at the "g" permission
\033[0m"""
),
],
[
"urlform",
"",
dedent(
"""
values for --urlform: values for --urlform:
"stash" dumps the data to file and returns length + checksum \033[36mstash\033[35m dumps the data to file and returns length + checksum
"save,get" dumps to file and returns the page like a GET \033[36msave,get\033[35m dumps to file and returns the page like a GET
"print,get" prints the data in the log and returns GET \033[36mprint,get\033[35m prints the data in the log and returns GET
(leave out the ",get" to return an error instead) (leave out the ",get" to return an error instead)
"""
values for --ls: ),
"USR" is a user to browse as; * is anonymous, ** is all users ],
"VOL" is a single volume to scan, default is * (all vols) [
"FLAG" is flags; "ls",
"v" in addition to realpaths, print usernames and vpaths "volume inspection",
"ln" only prints symlinks leaving the volume mountpoint dedent(
"p" exits 1 if any such symlinks are found """
"r" resumes startup after the listing \033[35m--ls USR,VOL,FLAGS
\033[36mUSR\033[0m is a user to browse as; * is anonymous, ** is all users
\033[36mVOL\033[0m is a single volume to scan, default is * (all vols)
\033[36mFLAG\033[0m is flags;
\033[36mv\033[0m in addition to realpaths, print usernames and vpaths
\033[36mln\033[0m only prints symlinks leaving the volume mountpoint
\033[36mp\033[0m exits 1 if any such symlinks are found
\033[36mr\033[0m resumes startup after the listing
examples: examples:
--ls '**' # list all files which are possible to read --ls '**' # list all files which are possible to read
--ls '**,*,ln' # check for dangerous symlinks --ls '**,*,ln' # check for dangerous symlinks
--ls '**,*,ln,p,r' # check, then start normally if safe --ls '**,*,ln,p,r' # check, then start normally if safe
\033[0m
""" """
), ),
) ],
]
# fmt: off # fmt: off
u = unicode u = unicode
ap2 = ap.add_argument_group('general options') ap2 = ap.add_argument_group('general options')
@@ -264,9 +338,12 @@ def run_argparse(argv, formatter):
ap2.add_argument("-ed", action="store_true", help="enable ?dots") ap2.add_argument("-ed", action="store_true", help="enable ?dots")
ap2.add_argument("-emp", action="store_true", help="enable markdown plugins") ap2.add_argument("-emp", action="store_true", help="enable markdown plugins")
ap2.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate") ap2.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate")
ap2.add_argument("--urlform", metavar="MODE", type=u, default="print,get", help="how to handle url-forms; examples: [stash], [save,get]")
ap2 = ap.add_argument_group('upload options')
ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads") ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads")
ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="up2k min.size threshold (mswin-only)") ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="up2k min.size threshold (mswin-only)")
ap2.add_argument("--urlform", metavar="MODE", type=u, default="print,get", help="how to handle url-forms; examples: [stash], [save,get]") ap2.add_argument("--unpost", metavar="SEC", type=int, default=3600*12, help="grace period where uploads can be deleted by the uploader, even without delete permissions; 0=disabled")
ap2 = ap.add_argument_group('network options') ap2 = ap.add_argument_group('network options')
ap2.add_argument("-i", metavar="IP", type=u, default="0.0.0.0", help="ip to bind (comma-sep.)") ap2.add_argument("-i", metavar="IP", type=u, default="0.0.0.0", help="ip to bind (comma-sep.)")
@@ -288,10 +365,16 @@ def run_argparse(argv, formatter):
ap2.add_argument("-nih", action="store_true", help="no info hostname") ap2.add_argument("-nih", action="store_true", help="no info hostname")
ap2.add_argument("-nid", action="store_true", help="no info disk-usage") ap2.add_argument("-nid", action="store_true", help="no info disk-usage")
ap2.add_argument("--no-zip", action="store_true", help="disable download as zip/tar") ap2.add_argument("--no-zip", action="store_true", help="disable download as zip/tar")
ap2.add_argument("--no-lifetime", action="store_true", help="disable automatic deletion of uploads after a certain time (lifetime volflag)")
ap2 = ap.add_argument_group('safety options') ap2 = ap.add_argument_group('safety options')
ap2.add_argument("--ls", metavar="U[,V[,F]]", type=u, help="scan all volumes; arguments USER,VOL,FLAGS; example [**,*,ln,p,r]") ap2.add_argument("--ls", metavar="U[,V[,F]]", type=u, help="scan all volumes; arguments USER,VOL,FLAGS; example [**,*,ln,p,r]")
ap2.add_argument("--salt", type=u, default="hunter2", help="up2k file-hash salt") ap2.add_argument("--salt", type=u, default="hunter2", help="up2k file-hash salt")
ap2.add_argument("--fk-salt", metavar="SALT", type=u, default=fk_salt, help="per-file accesskey salt")
ap2.add_argument("--no-dot-mv", action="store_true", help="disallow moving dotfiles; makes it impossible to move folders containing dotfiles")
ap2.add_argument("--no-dot-ren", action="store_true", help="disallow renaming dotfiles; makes it impossible to make something a dotfile")
ap2.add_argument("--no-logues", action="store_true", help="disable rendering .prologue/.epilogue.html into directory listings")
ap2.add_argument("--no-readme", action="store_true", help="disable rendering readme.md into directory listings")
ap2 = ap.add_argument_group('logging options') ap2 = ap.add_argument_group('logging options')
ap2.add_argument("-q", action="store_true", help="quiet") ap2.add_argument("-q", action="store_true", help="quiet")
@@ -310,6 +393,7 @@ def run_argparse(argv, formatter):
ap2.add_argument("--no-thumb", action="store_true", help="disable all thumbnails") ap2.add_argument("--no-thumb", action="store_true", help="disable all thumbnails")
ap2.add_argument("--no-vthumb", action="store_true", help="disable video thumbnails") ap2.add_argument("--no-vthumb", action="store_true", help="disable video thumbnails")
ap2.add_argument("--th-size", metavar="WxH", default="320x256", help="thumbnail res") ap2.add_argument("--th-size", metavar="WxH", default="320x256", help="thumbnail res")
ap2.add_argument("--th-mt", metavar="CORES", type=int, default=0, help="max num cpu cores to use, 0=all")
ap2.add_argument("--th-no-crop", action="store_true", help="dynamic height; show full image") ap2.add_argument("--th-no-crop", action="store_true", help="dynamic height; show full image")
ap2.add_argument("--th-no-jpg", action="store_true", help="disable jpg output") ap2.add_argument("--th-no-jpg", action="store_true", help="disable jpg output")
ap2.add_argument("--th-no-webp", action="store_true", help="disable webp output") ap2.add_argument("--th-no-webp", action="store_true", help="disable webp output")
@@ -319,25 +403,29 @@ def run_argparse(argv, formatter):
ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age") ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age")
ap2.add_argument("--th-covers", metavar="N,N", type=u, default="folder.png,folder.jpg,cover.png,cover.jpg", help="folder thumbnails to stat for") ap2.add_argument("--th-covers", metavar="N,N", type=u, default="folder.png,folder.jpg,cover.png,cover.jpg", help="folder thumbnails to stat for")
ap2 = ap.add_argument_group('database options') ap2 = ap.add_argument_group('general db options')
ap2.add_argument("-e2d", action="store_true", help="enable up2k database") ap2.add_argument("-e2d", action="store_true", help="enable up2k database")
ap2.add_argument("-e2ds", action="store_true", help="enable up2k db-scanner, sets -e2d") ap2.add_argument("-e2ds", action="store_true", help="enable up2k db-scanner, sets -e2d")
ap2.add_argument("-e2dsa", action="store_true", help="scan all folders (for search), sets -e2ds") ap2.add_argument("-e2dsa", action="store_true", help="scan all folders (for search), sets -e2ds")
ap2.add_argument("--hist", metavar="PATH", type=u, help="where to store volume data (db, thumbs)")
ap2.add_argument("--no-hash", action="store_true", help="disable hashing during e2ds folder scans")
ap2.add_argument("--re-int", metavar="SEC", type=int, default=30, help="disk rescan check interval")
ap2.add_argument("--re-maxage", metavar="SEC", type=int, default=0, help="disk rescan volume interval, 0=off, can be set per-volume with the 'scan' volflag")
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline")
ap2 = ap.add_argument_group('metadata db options')
ap2.add_argument("-e2t", action="store_true", help="enable metadata indexing") ap2.add_argument("-e2t", action="store_true", help="enable metadata indexing")
ap2.add_argument("-e2ts", action="store_true", help="enable metadata scanner, sets -e2t") ap2.add_argument("-e2ts", action="store_true", help="enable metadata scanner, sets -e2t")
ap2.add_argument("-e2tsr", action="store_true", help="rescan all metadata, sets -e2ts") ap2.add_argument("-e2tsr", action="store_true", help="rescan all metadata, sets -e2ts")
ap2.add_argument("--hist", metavar="PATH", type=u, help="where to store volume state")
ap2.add_argument("--no-hash", action="store_true", help="disable hashing during e2ds folder scans")
ap2.add_argument("--no-mutagen", action="store_true", help="use FFprobe for tags instead") ap2.add_argument("--no-mutagen", action="store_true", help="use FFprobe for tags instead")
ap2.add_argument("--no-mtag-mt", action="store_true", help="disable tag-read parallelism") ap2.add_argument("--no-mtag-mt", action="store_true", help="disable tag-read parallelism")
ap2.add_argument("--no-mtag-ff", action="store_true", help="never use FFprobe as tag reader") ap2.add_argument("--no-mtag-ff", action="store_true", help="never use FFprobe as tag reader")
ap2.add_argument("--re-int", metavar="SEC", type=int, default=30, help="disk rescan check interval")
ap2.add_argument("--re-maxage", metavar="SEC", type=int, default=0, help="disk rescan volume interval (0=off)")
ap2.add_argument("-mtm", metavar="M=t,t,t", type=u, action="append", help="add/replace metadata mapping") ap2.add_argument("-mtm", metavar="M=t,t,t", type=u, action="append", help="add/replace metadata mapping")
ap2.add_argument("-mte", metavar="M,M,M", type=u, help="tags to index/display (comma-sep.)", ap2.add_argument("-mte", metavar="M,M,M", type=u, help="tags to index/display (comma-sep.)",
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,ac,vc,res,.fps") default="circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,vc,ac,res,.fps,ahash,vhash")
ap2.add_argument("-mth", metavar="M,M,M", type=u, help="tags to hide by default (comma-sep.)",
default=".vq,.aq,vc,ac,res,.fps")
ap2.add_argument("-mtp", metavar="M=[f,]bin", type=u, action="append", help="read tag M using bin") ap2.add_argument("-mtp", metavar="M=[f,]bin", type=u, action="append", help="read tag M using bin")
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline")
ap2 = ap.add_argument_group('appearance options') ap2 = ap.add_argument_group('appearance options')
ap2.add_argument("--css-browser", metavar="L", type=u, help="URL to additional CSS to include") ap2.add_argument("--css-browser", metavar="L", type=u, help="URL to additional CSS to include")
@@ -349,10 +437,22 @@ def run_argparse(argv, formatter):
ap2.add_argument("--no-htp", action="store_true", help="disable httpserver threadpool, create threads as-needed instead") ap2.add_argument("--no-htp", action="store_true", help="disable httpserver threadpool, create threads as-needed instead")
ap2.add_argument("--stackmon", metavar="P,S", type=u, help="write stacktrace to Path every S second") ap2.add_argument("--stackmon", metavar="P,S", type=u, help="write stacktrace to Path every S second")
ap2.add_argument("--log-thrs", metavar="SEC", type=float, help="list active threads every SEC") ap2.add_argument("--log-thrs", metavar="SEC", type=float, help="list active threads every SEC")
return ap.parse_args(args=argv[1:])
# fmt: on # fmt: on
ap2 = ap.add_argument_group("help sections")
for k, h, _ in sects:
ap2.add_argument("--help-" + k, action="store_true", help=h)
ret = ap.parse_args(args=argv[1:])
for k, h, t in sects:
k2 = "help_" + k.replace("-", "_")
if vars(ret)[k2]:
lprint("# {} help page".format(k))
lprint(t + "\033[0m")
sys.exit(0)
return ret
def main(argv=None): def main(argv=None):
time.strptime("19970815", "%Y%m%d") # python#7980 time.strptime("19970815", "%Y%m%d") # python#7980
@@ -391,14 +491,20 @@ def main(argv=None):
nstrs = [] nstrs = []
anymod = False anymod = False
for ostr in al.v or []: for ostr in al.v or []:
m = re_vol.match(ostr)
if not m:
# not our problem
nstrs.append(ostr)
continue
src, dst, perms = m.groups()
na = [src, dst]
mod = False mod = False
oa = ostr.split(":") for opt in perms.split(":"):
na = oa[:2]
for opt in oa[2:]:
if re.match("c[^,]", opt): if re.match("c[^,]", opt):
mod = True mod = True
na.append("c," + opt[1:]) na.append("c," + opt[1:])
elif re.sub("^[rwmd]*", "", opt) and "," not in opt: elif re.sub("^[rwmdg]*", "", opt) and "," not in opt:
mod = True mod = True
perm = opt[0] perm = opt[0]
if perm == "a": if perm == "a":

View File

@@ -1,8 +1,8 @@
# coding: utf-8 # coding: utf-8
VERSION = (0, 12, 1) VERSION = (1, 0, 3)
CODENAME = "fil\033[33med" CODENAME = "sufficient"
BUILD_DT = (2021, 7, 28) BUILD_DT = (2021, 9, 18)
S_VERSION = ".".join(map(str, VERSION)) S_VERSION = ".".join(map(str, VERSION))
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT) S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)

View File

@@ -5,31 +5,196 @@ import re
import os import os
import sys import sys
import stat import stat
import time
import base64 import base64
import hashlib import hashlib
import threading import threading
from datetime import datetime
from .__init__ import WINDOWS from .__init__ import WINDOWS
from .util import IMPLICATIONS, uncyg, undot, absreal, Pebkac, fsdec, fsenc, statdir from .util import (
IMPLICATIONS,
uncyg,
undot,
unhumanize,
absreal,
Pebkac,
fsenc,
statdir,
)
from .bos import bos from .bos import bos
LEELOO_DALLAS = "leeloo_dallas"
class AXS(object): class AXS(object):
def __init__(self, uread=None, uwrite=None, umove=None, udel=None): def __init__(self, uread=None, uwrite=None, umove=None, udel=None, uget=None):
self.uread = {} if uread is None else {k: 1 for k in uread} self.uread = {} if uread is None else {k: 1 for k in uread}
self.uwrite = {} if uwrite is None else {k: 1 for k in uwrite} self.uwrite = {} if uwrite is None else {k: 1 for k in uwrite}
self.umove = {} if umove is None else {k: 1 for k in umove} self.umove = {} if umove is None else {k: 1 for k in umove}
self.udel = {} if udel is None else {k: 1 for k in udel} self.udel = {} if udel is None else {k: 1 for k in udel}
self.uget = {} if uget is None else {k: 1 for k in uget}
def __repr__(self): def __repr__(self):
return "AXS({})".format( return "AXS({})".format(
", ".join( ", ".join(
"{}={!r}".format(k, self.__dict__[k]) "{}={!r}".format(k, self.__dict__[k])
for k in "uread uwrite umove udel".split() for k in "uread uwrite umove udel uget".split()
) )
) )
class Lim(object):
def __init__(self):
self.nups = {} # num tracker
self.bups = {} # byte tracker list
self.bupc = {} # byte tracker cache
self.nosub = False # disallow subdirectories
self.smin = None # filesize min
self.smax = None # filesize max
self.bwin = None # bytes window
self.bmax = None # bytes max
self.nwin = None # num window
self.nmax = None # num max
self.rotn = None # rot num files
self.rotl = None # rot depth
self.rotf = None # rot datefmt
self.rot_re = None # rotf check
def set_rotf(self, fmt):
self.rotf = fmt
r = re.escape(fmt).replace("%Y", "[0-9]{4}").replace("%j", "[0-9]{3}")
r = re.sub("%[mdHMSWU]", "[0-9]{2}", r)
self.rot_re = re.compile("(^|/)" + r + "$")
def all(self, ip, rem, sz, abspath):
self.chk_nup(ip)
self.chk_bup(ip)
self.chk_rem(rem)
if sz != -1:
self.chk_sz(sz)
ap2, vp2 = self.rot(abspath)
if abspath == ap2:
return ap2, rem
return ap2, ("{}/{}".format(rem, vp2) if rem else vp2)
def chk_sz(self, sz):
if self.smin is not None and sz < self.smin:
raise Pebkac(400, "file too small")
if self.smax is not None and sz > self.smax:
raise Pebkac(400, "file too big")
def chk_rem(self, rem):
if self.nosub and rem:
raise Pebkac(500, "no subdirectories allowed")
def rot(self, path):
if not self.rotf and not self.rotn:
return path, ""
if self.rotf:
path = path.rstrip("/\\")
if self.rot_re.search(path.replace("\\", "/")):
return path, ""
suf = datetime.utcnow().strftime(self.rotf)
if path:
path += "/"
return path + suf, suf
ret = self.dive(path, self.rotl)
if not ret:
raise Pebkac(500, "no available slots in volume")
d = ret[len(path) :].strip("/\\").replace("\\", "/")
return ret, d
def dive(self, path, lvs):
items = bos.listdir(path)
if not lvs:
# at leaf level
return None if len(items) >= self.rotn else ""
dirs = [int(x) for x in items if x and all(y in "1234567890" for y in x)]
dirs.sort()
if not dirs:
# no branches yet; make one
sub = os.path.join(path, "0")
bos.mkdir(sub)
else:
# try newest branch only
sub = os.path.join(path, str(dirs[-1]))
ret = self.dive(sub, lvs - 1)
if ret is not None:
return os.path.join(sub, ret)
if len(dirs) >= self.rotn:
# full branch or root
return None
# make a branch
sub = os.path.join(path, str(dirs[-1] + 1))
bos.mkdir(sub)
ret = self.dive(sub, lvs - 1)
if ret is None:
raise Pebkac(500, "rotation bug")
return os.path.join(sub, ret)
def nup(self, ip):
try:
self.nups[ip].append(time.time())
except:
self.nups[ip] = [time.time()]
def bup(self, ip, nbytes):
v = [time.time(), nbytes]
try:
self.bups[ip].append(v)
self.bupc[ip] += nbytes
except:
self.bups[ip] = [v]
self.bupc[ip] = nbytes
def chk_nup(self, ip):
if not self.nmax or ip not in self.nups:
return
nups = self.nups[ip]
cutoff = time.time() - self.nwin
while nups and nups[0] < cutoff:
nups.pop(0)
if len(nups) >= self.nmax:
raise Pebkac(429, "too many uploads")
def chk_bup(self, ip):
if not self.bmax or ip not in self.bups:
return
bups = self.bups[ip]
cutoff = time.time() - self.bwin
mark = self.bupc[ip]
while bups and bups[0][0] < cutoff:
mark -= bups.pop(0)[1]
self.bupc[ip] = mark
if mark >= self.bmax:
raise Pebkac(429, "ingress saturated")
class VFS(object): class VFS(object):
"""single level in the virtual fs""" """single level in the virtual fs"""
@@ -42,6 +207,7 @@ class VFS(object):
self.nodes = {} # child nodes self.nodes = {} # child nodes
self.histtab = None # all realpath->histpath self.histtab = None # all realpath->histpath
self.dbv = None # closest full/non-jump parent self.dbv = None # closest full/non-jump parent
self.lim = None # type: Lim # upload limits; only set for dbv
if realpath: if realpath:
self.histpath = os.path.join(realpath, ".hist") # db / thumbcache self.histpath = os.path.join(realpath, ".hist") # db / thumbcache
@@ -50,6 +216,7 @@ class VFS(object):
self.awrite = {} self.awrite = {}
self.amove = {} self.amove = {}
self.adel = {} self.adel = {}
self.aget = {}
else: else:
self.histpath = None self.histpath = None
self.all_vols = None self.all_vols = None
@@ -57,6 +224,7 @@ class VFS(object):
self.awrite = None self.awrite = None
self.amove = None self.amove = None
self.adel = None self.adel = None
self.aget = None
def __repr__(self): def __repr__(self):
return "VFS({})".format( return "VFS({})".format(
@@ -143,7 +311,7 @@ class VFS(object):
def can_access(self, vpath, uname): def can_access(self, vpath, uname):
# type: (str, str) -> tuple[bool, bool, bool, bool] # type: (str, str) -> tuple[bool, bool, bool, bool]
"""can Read,Write,Move,Delete""" """can Read,Write,Move,Delete,Get"""
vn, _ = self._find(vpath) vn, _ = self._find(vpath)
c = vn.axs c = vn.axs
return [ return [
@@ -151,10 +319,20 @@ class VFS(object):
uname in c.uwrite or "*" in c.uwrite, uname in c.uwrite or "*" in c.uwrite,
uname in c.umove or "*" in c.umove, uname in c.umove or "*" in c.umove,
uname in c.udel or "*" in c.udel, uname in c.udel or "*" in c.udel,
uname in c.uget or "*" in c.uget,
] ]
def get(self, vpath, uname, will_read, will_write, will_move=False, will_del=False): def get(
# type: (str, str, bool, bool, bool, bool) -> tuple[VFS, str] self,
vpath,
uname,
will_read,
will_write,
will_move=False,
will_del=False,
will_get=False,
):
# type: (str, str, bool, bool, bool, bool, bool) -> tuple[VFS, str]
"""returns [vfsnode,fs_remainder] if user has the requested permissions""" """returns [vfsnode,fs_remainder] if user has the requested permissions"""
vn, rem = self._find(vpath) vn, rem = self._find(vpath)
c = vn.axs c = vn.axs
@@ -164,14 +342,16 @@ class VFS(object):
[will_write, c.uwrite, "write"], [will_write, c.uwrite, "write"],
[will_move, c.umove, "move"], [will_move, c.umove, "move"],
[will_del, c.udel, "delete"], [will_del, c.udel, "delete"],
[will_get, c.uget, "get"],
]: ]:
if req and (uname not in d and "*" not in d): if req and (uname not in d and "*" not in d) and uname != LEELOO_DALLAS:
m = "you don't have {}-access for this location" m = "you don't have {}-access for this location"
raise Pebkac(403, m.format(msg)) raise Pebkac(403, m.format(msg))
return vn, rem return vn, rem
def get_dbv(self, vrem): def get_dbv(self, vrem):
# type: (str) -> tuple[VFS, str]
dbv = self.dbv dbv = self.dbv
if not dbv: if not dbv:
return self, vrem return self, vrem
@@ -202,7 +382,7 @@ class VFS(object):
for name, vn2 in sorted(self.nodes.items()): for name, vn2 in sorted(self.nodes.items()):
ok = False ok = False
axs = vn2.axs axs = vn2.axs
axs = [axs.uread, axs.uwrite, axs.umove, axs.udel] axs = [axs.uread, axs.uwrite, axs.umove, axs.udel, axs.uget]
for pset in permsets: for pset in permsets:
ok = True ok = True
for req, lst in zip(pset, axs): for req, lst in zip(pset, axs):
@@ -268,7 +448,11 @@ class VFS(object):
f2a = os.sep + "dir.txt" f2a = os.sep + "dir.txt"
f2b = "{0}.hist{0}".format(os.sep) f2b = "{0}.hist{0}".format(os.sep)
g = self.walk("", vrem, [], uname, [[True]], dots, scandir, False) # if multiselect: add all items to archive root
# if single folder: the folder itself is the top-level item
folder = "" if flt else (vrem.split("/")[-1] or "top")
g = self.walk(folder, vrem, [], uname, [[True]], dots, scandir, False)
for _, _, vpath, apath, files, rd, vd in g: for _, _, vpath, apath, files, rd, vd in g:
if flt: if flt:
files = [x for x in files if x[0] in flt] files = [x for x in files if x[0] in flt]
@@ -310,6 +494,12 @@ class VFS(object):
yield f yield f
if WINDOWS:
re_vol = re.compile(r"^([a-zA-Z]:[\\/][^:]*|[^:]*):([^:]*):(.*)$")
else:
re_vol = re.compile(r"^([^:]*):([^:]*):(.*)$")
class AuthSrv(object): class AuthSrv(object):
"""verifies users against given paths""" """verifies users against given paths"""
@@ -319,11 +509,6 @@ class AuthSrv(object):
self.warn_anonwrite = warn_anonwrite self.warn_anonwrite = warn_anonwrite
self.line_ctr = 0 self.line_ctr = 0
if WINDOWS:
self.re_vol = re.compile(r"^([a-zA-Z]:[\\/][^:]*|[^:]*):([^:]*):(.*)$")
else:
self.re_vol = re.compile(r"^([^:]*):([^:]*):(.*)$")
self.mutex = threading.Lock() self.mutex = threading.Lock()
self.reload() self.reload()
@@ -390,6 +575,9 @@ class AuthSrv(object):
def _read_vol_str(self, lvl, uname, axs, flags): def _read_vol_str(self, lvl, uname, axs, flags):
# type: (str, str, AXS, any) -> None # type: (str, str, AXS, any) -> None
if lvl.strip("crwmdg"):
raise Exception("invalid volume flag: {},{}".format(lvl, uname))
if lvl == "c": if lvl == "c":
cval = True cval = True
if "=" in uname: if "=" in uname:
@@ -401,17 +589,21 @@ class AuthSrv(object):
if uname == "": if uname == "":
uname = "*" uname = "*"
for un in uname.split(","):
if "r" in lvl: if "r" in lvl:
axs.uread[uname] = 1 axs.uread[un] = 1
if "w" in lvl: if "w" in lvl:
axs.uwrite[uname] = 1 axs.uwrite[un] = 1
if "m" in lvl: if "m" in lvl:
axs.umove[uname] = 1 axs.umove[un] = 1
if "d" in lvl: if "d" in lvl:
axs.udel[uname] = 1 axs.udel[un] = 1
if "g" in lvl:
axs.uget[un] = 1
def _read_volflag(self, flags, name, value, is_list): def _read_volflag(self, flags, name, value, is_list):
if name not in ["mtp"]: if name not in ["mtp"]:
@@ -450,9 +642,9 @@ class AuthSrv(object):
if self.args.v: if self.args.v:
# list of src:dst:permset:permset:... # list of src:dst:permset:permset:...
# permset is <rwmd>[,username][,username] or <c>,<flag>[=args] # permset is <rwmdg>[,username][,username] or <c>,<flag>[=args]
for v_str in self.args.v: for v_str in self.args.v:
m = self.re_vol.match(v_str) m = re_vol.match(v_str)
if not m: if not m:
raise Exception("invalid -v argument: [{}]".format(v_str)) raise Exception("invalid -v argument: [{}]".format(v_str))
@@ -517,20 +709,21 @@ class AuthSrv(object):
vfs.all_vols = {} vfs.all_vols = {}
vfs.get_all_vols(vfs.all_vols) vfs.get_all_vols(vfs.all_vols)
for perm in "read write move del".split(): for perm in "read write move del get".split():
axs_key = "u" + perm axs_key = "u" + perm
unames = ["*"] + list(acct.keys()) unames = ["*"] + list(acct.keys())
umap = {x: [] for x in unames} umap = {x: [] for x in unames}
for usr in unames: for usr in unames:
for mp, vol in vfs.all_vols.items(): for mp, vol in vfs.all_vols.items():
if usr in getattr(vol.axs, axs_key): axs = getattr(vol.axs, axs_key)
if usr in axs or "*" in axs:
umap[usr].append(mp) umap[usr].append(mp)
setattr(vfs, "a" + perm, umap) setattr(vfs, "a" + perm, umap)
all_users = {} all_users = {}
missing_users = {} missing_users = {}
for axs in daxs.values(): for axs in daxs.values():
for d in [axs.uread, axs.uwrite, axs.umove, axs.udel]: for d in [axs.uread, axs.uwrite, axs.umove, axs.udel, axs.uget]:
for usr in d.keys(): for usr in d.keys():
all_users[usr] = 1 all_users[usr] = 1
if usr != "*" and usr not in acct: if usr != "*" and usr not in acct:
@@ -544,6 +737,9 @@ class AuthSrv(object):
) )
raise Exception("invalid config") raise Exception("invalid config")
if LEELOO_DALLAS in all_users:
raise Exception("sorry, reserved username: " + LEELOO_DALLAS)
promote = [] promote = []
demote = [] demote = []
for vol in vfs.all_vols.values(): for vol in vfs.all_vols.values():
@@ -602,6 +798,56 @@ class AuthSrv(object):
vfs.histtab = {v.realpath: v.histpath for v in vfs.all_vols.values()} vfs.histtab = {v.realpath: v.histpath for v in vfs.all_vols.values()}
for vol in vfs.all_vols.values():
lim = Lim()
use = False
if vol.flags.get("nosub"):
use = True
lim.nosub = True
v = vol.flags.get("sz")
if v:
use = True
lim.smin, lim.smax = [unhumanize(x) for x in v.split("-")]
v = vol.flags.get("rotn")
if v:
use = True
lim.rotn, lim.rotl = [int(x) for x in v.split(",")]
v = vol.flags.get("rotf")
if v:
use = True
lim.set_rotf(v)
v = vol.flags.get("maxn")
if v:
use = True
lim.nmax, lim.nwin = [int(x) for x in v.split(",")]
v = vol.flags.get("maxb")
if v:
use = True
lim.bmax, lim.bwin = [unhumanize(x) for x in v.split(",")]
if use:
vol.lim = lim
for vol in vfs.all_vols.values():
fk = vol.flags.get("fk")
if fk:
vol.flags["fk"] = int(fk) if fk is not True else 8
for vol in vfs.all_vols.values():
if "pk" in vol.flags and "gz" not in vol.flags and "xz" not in vol.flags:
vol.flags["gz"] = False # def.pk
if "scan" in vol.flags:
vol.flags["scan"] = int(vol.flags["scan"])
elif self.args.re_maxage:
vol.flags["scan"] = self.args.re_maxage
all_mte = {} all_mte = {}
errors = False errors = False
for vol in vfs.all_vols.values(): for vol in vfs.all_vols.values():
@@ -623,9 +869,11 @@ class AuthSrv(object):
if k1 in vol.flags: if k1 in vol.flags:
vol.flags[k2] = True vol.flags[k2] = True
# default tag-list if unset # default tag cfgs if unset
if "mte" not in vol.flags: if "mte" not in vol.flags:
vol.flags["mte"] = self.args.mte vol.flags["mte"] = self.args.mte
if "mth" not in vol.flags:
vol.flags["mth"] = self.args.mth
# append parsers from argv to volume-flags # append parsers from argv to volume-flags
self._read_volflag(vol.flags, "mtp", self.args.mtp, True) self._read_volflag(vol.flags, "mtp", self.args.mtp, True)
@@ -705,6 +953,7 @@ class AuthSrv(object):
[" write", "uwrite"], [" write", "uwrite"],
[" move", "umove"], [" move", "umove"],
["delete", "udel"], ["delete", "udel"],
[" get", "uget"],
]: ]:
u = list(sorted(getattr(v.axs, attr).keys())) u = list(sorted(getattr(v.axs, attr).keys()))
u = ", ".join("\033[35meverybody\033[0m" if x == "*" else x for x in u) u = ", ".join("\033[35meverybody\033[0m" if x == "*" else x for x in u)
@@ -772,10 +1021,10 @@ class AuthSrv(object):
raise Exception("volume not found: " + v) raise Exception("volume not found: " + v)
self.log({"users": users, "vols": vols, "flags": flags}) self.log({"users": users, "vols": vols, "flags": flags})
m = "/{}: read({}) write({}) move({}) del({})" m = "/{}: read({}) write({}) move({}) del({}) get({})"
for k, v in self.vfs.all_vols.items(): for k, v in self.vfs.all_vols.items():
vc = v.axs vc = v.axs
self.log(m.format(k, vc.uread, vc.uwrite, vc.umove, vc.udel)) self.log(m.format(k, vc.uread, vc.uwrite, vc.umove, vc.udel, vc.uget))
flag_v = "v" in flags flag_v = "v" in flags
flag_ln = "ln" in flags flag_ln = "ln" in flags
@@ -789,13 +1038,13 @@ class AuthSrv(object):
for u in users: for u in users:
self.log("checking /{} as {}".format(v, u)) self.log("checking /{} as {}".format(v, u))
try: try:
vn, _ = self.vfs.get(v, u, True, False, False, False) vn, _ = self.vfs.get(v, u, True, False, False, False, False)
except: except:
continue continue
atop = vn.realpath atop = vn.realpath
g = vn.walk( g = vn.walk(
"", "", [], u, True, [[True]], not self.args.no_scandir, False vn.vpath, "", [], u, [[True]], True, not self.args.no_scandir, False
) )
for _, _, vpath, apath, files, _, _ in g: for _, _, vpath, apath, files, _, _ in g:
fnames = [n[0] for n in files] fnames = [n[0] for n in files]

View File

@@ -1,6 +1,5 @@
# coding: utf-8 # coding: utf-8
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
from copyparty.authsrv import AuthSrv
import sys import sys
import signal import signal
@@ -9,6 +8,7 @@ import threading
from .broker_util import ExceptionalQueue from .broker_util import ExceptionalQueue
from .httpsrv import HttpSrv from .httpsrv import HttpSrv
from .util import FAKE_MP from .util import FAKE_MP
from copyparty.authsrv import AuthSrv
class MpWorker(object): class MpWorker(object):

View File

@@ -7,12 +7,18 @@ import gzip
import time import time
import copy import copy
import json import json
import base64
import string import string
import socket import socket
import ctypes import ctypes
from datetime import datetime from datetime import datetime
import calendar import calendar
try:
import lzma
except:
pass
from .__init__ import E, PY2, WINDOWS, ANYWIN, unicode from .__init__ import E, PY2, WINDOWS, ANYWIN, unicode
from .util import * # noqa # pylint: disable=unused-wildcard-import from .util import * # noqa # pylint: disable=unused-wildcard-import
from .bos import bos from .bos import bos
@@ -22,7 +28,6 @@ from .star import StreamTar
NO_CACHE = {"Cache-Control": "no-cache"} NO_CACHE = {"Cache-Control": "no-cache"}
NO_STORE = {"Cache-Control": "no-store; max-age=0"}
class HttpCli(object): class HttpCli(object):
@@ -48,7 +53,10 @@ class HttpCli(object):
self.bufsz = 1024 * 32 self.bufsz = 1024 * 32
self.hint = None self.hint = None
self.absolute_urls = False self.absolute_urls = False
self.out_headers = {"Access-Control-Allow-Origin": "*"} self.out_headers = {
"Access-Control-Allow-Origin": "*",
"Cache-Control": "no-store; max-age=0",
}
def log(self, msg, c=0): def log(self, msg, c=0):
ptn = self.asrv.re_pwd ptn = self.asrv.re_pwd
@@ -61,7 +69,10 @@ class HttpCli(object):
a, b = m.groups() a, b = m.groups()
return "=\033[7m {} \033[27m{}".format(self.asrv.iacct[a], b) return "=\033[7m {} \033[27m{}".format(self.asrv.iacct[a], b)
def _check_nonfatal(self, ex): def _check_nonfatal(self, ex, post):
if post:
return ex.code < 300
return ex.code < 400 or ex.code in [404, 429] return ex.code < 400 or ex.code in [404, 429]
def _assert_safe_rem(self, rem): def _assert_safe_rem(self, rem):
@@ -103,8 +114,8 @@ class HttpCli(object):
self.req = "[junk]" self.req = "[junk]"
self.http_ver = "HTTP/1.1" self.http_ver = "HTTP/1.1"
# self.log("pebkac at httpcli.run #1: " + repr(ex)) # self.log("pebkac at httpcli.run #1: " + repr(ex))
self.keepalive = self._check_nonfatal(ex) self.keepalive = False
self.loud_reply(unicode(ex), status=ex.code) self.loud_reply(unicode(ex), status=ex.code, volsan=True)
return self.keepalive return self.keepalive
# time.sleep(0.4) # time.sleep(0.4)
@@ -177,16 +188,34 @@ class HttpCli(object):
if kc in cookies and ku not in uparam: if kc in cookies and ku not in uparam:
uparam[ku] = cookies[kc] uparam[ku] = cookies[kc]
if len(uparam) > 10 or len(cookies) > 50:
raise Pebkac(400, "u wot m8")
self.uparam = uparam self.uparam = uparam
self.cookies = cookies self.cookies = cookies
self.vpath = unquotep(vpath) self.vpath = unquotep(vpath) # not query, so + means +
pwd = uparam.get("pw") pwd = None
ba = self.headers.get("authorization")
if ba:
try:
ba = ba.split(" ")[1].encode("ascii")
ba = base64.b64decode(ba).decode("utf-8")
# try "pwd", "x:pwd", "pwd:x"
for ba in [ba] + ba.split(":", 1)[::-1]:
if self.asrv.iacct.get(ba):
pwd = ba
break
except:
pass
pwd = uparam.get("pw") or pwd
self.uname = self.asrv.iacct.get(pwd, "*") self.uname = self.asrv.iacct.get(pwd, "*")
self.rvol = self.asrv.vfs.aread[self.uname] self.rvol = self.asrv.vfs.aread[self.uname]
self.wvol = self.asrv.vfs.awrite[self.uname] self.wvol = self.asrv.vfs.awrite[self.uname]
self.mvol = self.asrv.vfs.amove[self.uname] self.mvol = self.asrv.vfs.amove[self.uname]
self.dvol = self.asrv.vfs.adel[self.uname] self.dvol = self.asrv.vfs.adel[self.uname]
self.gvol = self.asrv.vfs.aget[self.uname]
if pwd and "pw" in self.ouparam and pwd != cookies.get("cppwd"): if pwd and "pw" in self.ouparam and pwd != cookies.get("cppwd"):
self.out_headers["Set-Cookie"] = self.get_pwd_cookie(pwd)[0] self.out_headers["Set-Cookie"] = self.get_pwd_cookie(pwd)[0]
@@ -201,6 +230,9 @@ class HttpCli(object):
self.do_log = not self.conn.lf_url or not self.conn.lf_url.search(self.req) self.do_log = not self.conn.lf_url or not self.conn.lf_url.search(self.req)
x = self.asrv.vfs.can_access(self.vpath, self.uname)
self.can_read, self.can_write, self.can_move, self.can_delete, self.can_get = x
try: try:
if self.mode in ["GET", "HEAD"]: if self.mode in ["GET", "HEAD"]:
return self.handle_get() and self.keepalive return self.handle_get() and self.keepalive
@@ -213,18 +245,24 @@ class HttpCli(object):
else: else:
raise Pebkac(400, 'invalid HTTP mode "{0}"'.format(self.mode)) raise Pebkac(400, 'invalid HTTP mode "{0}"'.format(self.mode))
except Pebkac as ex: except Exception as ex:
pex = ex
if not hasattr(ex, "code"):
pex = Pebkac(500)
try: try:
# self.log("pebkac at httpcli.run #2: " + repr(ex)) post = self.mode in ["POST", "PUT"] or "content-length" in self.headers
if not self._check_nonfatal(ex): if not self._check_nonfatal(pex, post):
self.keepalive = False self.keepalive = False
self.log("{}\033[0m, {}".format(str(ex), self.vpath), 3) msg = str(ex) if pex == ex else min_ex()
self.log("{}\033[0m, {}".format(msg, self.vpath), 3)
msg = "<pre>{}\r\nURL: {}\r\n".format(str(ex), self.vpath) msg = "<pre>{}\r\nURL: {}\r\n".format(str(ex), self.vpath)
if self.hint: if self.hint:
msg += "hint: {}\r\n".format(self.hint) msg += "hint: {}\r\n".format(self.hint)
self.reply(msg.encode("utf-8", "replace"), status=ex.code) self.reply(msg.encode("utf-8", "replace"), status=pex.code, volsan=True)
return self.keepalive return self.keepalive
except Pebkac: except Pebkac:
return False return False
@@ -257,8 +295,12 @@ class HttpCli(object):
except: except:
raise Pebkac(400, "client d/c while replying headers") raise Pebkac(400, "client d/c while replying headers")
def reply(self, body, status=200, mime=None, headers=None): def reply(self, body, status=200, mime=None, headers=None, volsan=False):
# TODO something to reply with user-supplied values safely # TODO something to reply with user-supplied values safely
if volsan:
body = vol_san(self.asrv.vfs.all_vols.values(), body)
self.send_headers(len(body), status, mime, headers) self.send_headers(len(body), status, mime, headers)
try: try:
@@ -315,8 +357,7 @@ class HttpCli(object):
).encode("utf-8", "replace") ).encode("utf-8", "replace")
if use302: if use302:
h = {"Location": "/" + vpath, "Cache-Control": "no-cache"} self.reply(html, status=302, headers={"Location": "/" + vpath})
self.reply(html, status=302, headers=h)
else: else:
self.reply(html, status=status) self.reply(html, status=status)
@@ -342,12 +383,35 @@ class HttpCli(object):
static_path = os.path.join(E.mod, "web/", self.vpath[5:]) static_path = os.path.join(E.mod, "web/", self.vpath[5:])
return self.tx_file(static_path) return self.tx_file(static_path)
if not self.can_read and not self.can_write and not self.can_get:
if self.vpath:
self.log("inaccessible: [{}]".format(self.vpath))
return self.tx_404()
self.uparam["h"] = False
if "tree" in self.uparam: if "tree" in self.uparam:
return self.tx_tree() return self.tx_tree()
if "delete" in self.uparam:
return self.handle_rm()
if "move" in self.uparam:
return self.handle_mv()
if "scan" in self.uparam:
return self.scanvol()
if not self.vpath:
if "stack" in self.uparam: if "stack" in self.uparam:
return self.tx_stack() return self.tx_stack()
if "ups" in self.uparam:
return self.tx_ups()
if "h" in self.uparam:
return self.tx_mounts()
# conditional redirect to single volumes # conditional redirect to single volumes
if self.vpath == "" and not self.ouparam: if self.vpath == "" and not self.ouparam:
nread = len(self.rvol) nread = len(self.rvol)
@@ -362,28 +426,6 @@ class HttpCli(object):
self.redirect(vpath, flavor="redirecting to", use302=True) self.redirect(vpath, flavor="redirecting to", use302=True)
return True return True
x = self.asrv.vfs.can_access(self.vpath, self.uname)
self.can_read, self.can_write, self.can_move, self.can_delete = x
if not self.can_read and not self.can_write:
if self.vpath:
self.log("inaccessible: [{}]".format(self.vpath))
raise Pebkac(404)
self.uparam = {"h": False}
if "delete" in self.uparam:
return self.handle_rm()
if "move" in self.uparam:
return self.handle_mv()
if "h" in self.uparam:
self.vpath = None
return self.tx_mounts()
if "scan" in self.uparam:
return self.scanvol()
return self.tx_browser() return self.tx_browser()
def handle_options(self): def handle_options(self):
@@ -466,7 +508,7 @@ class HttpCli(object):
if "get" in opt: if "get" in opt:
return self.handle_get() return self.handle_get()
raise Pebkac(405, "POST({}) is disabled".format(ctype)) raise Pebkac(405, "POST({}) is disabled in server config".format(ctype))
raise Pebkac(405, "don't know how to handle POST({})".format(ctype)) raise Pebkac(405, "don't know how to handle POST({})".format(ctype))
@@ -484,7 +526,11 @@ class HttpCli(object):
def dump_to_file(self): def dump_to_file(self):
reader, remains = self.get_body_reader() reader, remains = self.get_body_reader()
vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True) vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True)
lim = vfs.get_dbv(rem)[0].lim
fdir = os.path.join(vfs.realpath, rem) fdir = os.path.join(vfs.realpath, rem)
if lim:
fdir, rem = lim.all(self.ip, rem, remains, fdir)
bos.makedirs(fdir)
addr = self.ip.replace(":", ".") addr = self.ip.replace(":", ".")
fn = "put-{:.6f}-{}.bin".format(time.time(), addr) fn = "put-{:.6f}-{}.bin".format(time.time(), addr)
@@ -492,13 +538,81 @@ class HttpCli(object):
if self.args.nw: if self.args.nw:
path = os.devnull path = os.devnull
with open(fsenc(path), "wb", 512 * 1024) as f: open_f = open
open_a = [fsenc(path), "wb", 512 * 1024]
open_ka = {}
# user-request || config-force
if ("gz" in vfs.flags or "xz" in vfs.flags) and (
"pk" in vfs.flags
or "pk" in self.uparam
or "gz" in self.uparam
or "xz" in self.uparam
):
fb = {"gz": 9, "xz": 0} # default/fallback level
lv = {} # selected level
alg = None # selected algo (gz=preferred)
# user-prefs first
if "gz" in self.uparam or "pk" in self.uparam: # def.pk
alg = "gz"
if "xz" in self.uparam:
alg = "xz"
if alg:
v = self.uparam.get(alg)
lv[alg] = fb[alg] if v is None else int(v)
if alg not in vfs.flags:
alg = "gz" if "gz" in vfs.flags else "xz"
# then server overrides
pk = vfs.flags.get("pk")
if pk is not None:
# config-forced on
alg = alg or "gz" # def.pk
try:
# config-forced opts
alg, lv = pk.split(",")
lv[alg] = int(lv)
except:
pass
lv[alg] = lv.get(alg) or fb.get(alg)
self.log("compressing with {} level {}".format(alg, lv.get(alg)))
if alg == "gz":
open_f = gzip.GzipFile
open_a = [fsenc(path), "wb", lv[alg], None, 0x5FEE6600] # 2021-01-01
elif alg == "xz":
open_f = lzma.open
open_a = [fsenc(path), "wb"]
open_ka = {"preset": lv[alg]}
else:
self.log("fallthrough? thats a bug", 1)
with open_f(*open_a, **open_ka) as f:
post_sz, _, sha_b64 = hashcopy(reader, f) post_sz, _, sha_b64 = hashcopy(reader, f)
if lim:
lim.nup(self.ip)
lim.bup(self.ip, post_sz)
try:
lim.chk_sz(post_sz)
except:
bos.unlink(path)
raise
if not self.args.nw: if not self.args.nw:
vfs, vrem = vfs.get_dbv(rem) vfs, vrem = vfs.get_dbv(rem)
self.conn.hsrv.broker.put( self.conn.hsrv.broker.put(
False, "up2k.hash_file", vfs.realpath, vfs.flags, vrem, fn False,
"up2k.hash_file",
vfs.realpath,
vfs.flags,
vrem,
fn,
self.ip,
time.time(),
) )
return post_sz, sha_b64, remains, path return post_sz, sha_b64, remains, path
@@ -569,7 +683,7 @@ class HttpCli(object):
try: try:
remains = int(self.headers["content-length"]) remains = int(self.headers["content-length"])
except: except:
raise Pebkac(400, "you must supply a content-length for JSON POST") raise Pebkac(411)
if remains > 1024 * 1024: if remains > 1024 * 1024:
raise Pebkac(413, "json 2big") raise Pebkac(413, "json 2big")
@@ -592,17 +706,17 @@ class HttpCli(object):
if "srch" in self.uparam or "srch" in body: if "srch" in self.uparam or "srch" in body:
return self.handle_search(body) return self.handle_search(body)
if "delete" in self.uparam:
return self.handle_rm(body)
# up2k-php compat # up2k-php compat
for k in "chunkpit.php", "handshake.php": for k in "chunkpit.php", "handshake.php":
if self.vpath.endswith(k): if self.vpath.endswith(k):
self.vpath = self.vpath[: -len(k)] self.vpath = self.vpath[: -len(k)]
sub = None
name = undot(body["name"]) name = undot(body["name"])
if "/" in name: if "/" in name:
sub, name = name.rsplit("/", 1) raise Pebkac(400, "your client is old; press CTRL-SHIFT-R and try again")
self.vpath = "/".join([self.vpath, sub]).strip("/")
body["name"] = name
vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True) vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True)
dbv, vrem = vfs.get_dbv(rem) dbv, vrem = vfs.get_dbv(rem)
@@ -613,7 +727,7 @@ class HttpCli(object):
body["addr"] = self.ip body["addr"] = self.ip
body["vcfg"] = dbv.flags body["vcfg"] = dbv.flags
if sub: if rem:
try: try:
dst = os.path.join(vfs.realpath, rem) dst = os.path.join(vfs.realpath, rem)
if not bos.path.isdir(dst): if not bos.path.isdir(dst):
@@ -633,9 +747,6 @@ class HttpCli(object):
x = self.conn.hsrv.broker.put(True, "up2k.handle_json", body) x = self.conn.hsrv.broker.put(True, "up2k.handle_json", body)
ret = x.get() ret = x.get()
if sub:
ret["name"] = "/".join([sub, ret["name"]])
ret = json.dumps(ret) ret = json.dumps(ret)
self.log(ret) self.log(ret)
self.reply(ret.encode("utf-8"), mime="application/json") self.reply(ret.encode("utf-8"), mime="application/json")
@@ -779,8 +890,12 @@ class HttpCli(object):
pwd = self.parser.require("cppwd", 64) pwd = self.parser.require("cppwd", 64)
self.parser.drop() self.parser.drop()
dst = "/?h"
if self.vpath:
dst = "/" + quotep(self.vpath)
ck, msg = self.get_pwd_cookie(pwd) ck, msg = self.get_pwd_cookie(pwd)
html = self.j2("msg", h1=msg, h2='<a href="/">ack</a>', redir="/") html = self.j2("msg", h1=msg, h2='<a href="' + dst + '">ack</a>', redir=dst)
self.reply(html.encode("utf-8"), headers={"Set-Cookie": ck}) self.reply(html.encode("utf-8"), headers={"Set-Cookie": ck})
return True return True
@@ -863,6 +978,15 @@ class HttpCli(object):
vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True) vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True)
self._assert_safe_rem(rem) self._assert_safe_rem(rem)
upload_vpath = self.vpath
lim = vfs.get_dbv(rem)[0].lim
fdir_base = os.path.join(vfs.realpath, rem)
if lim:
fdir_base, rem = lim.all(self.ip, rem, -1, fdir_base)
upload_vpath = "{}/{}".format(vfs.vpath, rem).strip("/")
if not nullwrite:
bos.makedirs(fdir_base)
files = [] files = []
errmsg = "" errmsg = ""
t0 = time.time() t0 = time.time()
@@ -872,12 +996,9 @@ class HttpCli(object):
self.log("discarding incoming file without filename") self.log("discarding incoming file without filename")
# fallthrough # fallthrough
fdir = fdir_base
fname = sanitize_fn(p_file, "", [".prologue.html", ".epilogue.html"])
if p_file and not nullwrite: if p_file and not nullwrite:
fdir = os.path.join(vfs.realpath, rem)
fname = sanitize_fn(
p_file, "", [".prologue.html", ".epilogue.html"]
)
if not bos.path.isdir(fdir): if not bos.path.isdir(fdir):
raise Pebkac(404, "that folder does not exist") raise Pebkac(404, "that folder does not exist")
@@ -888,14 +1009,28 @@ class HttpCli(object):
fname = os.devnull fname = os.devnull
fdir = "" fdir = ""
if lim:
lim.chk_bup(self.ip)
lim.chk_nup(self.ip)
try: try:
with ren_open(fname, "wb", 512 * 1024, **open_args) as f: with ren_open(fname, "wb", 512 * 1024, **open_args) as f:
f, fname = f["orz"] f, fname = f["orz"]
self.log("writing to {}/{}".format(fdir, fname)) abspath = os.path.join(fdir, fname)
self.log("writing to {}".format(abspath))
sz, sha512_hex, _ = hashcopy(p_data, f) sz, sha512_hex, _ = hashcopy(p_data, f)
if sz == 0: if sz == 0:
raise Pebkac(400, "empty files in post") raise Pebkac(400, "empty files in post")
if lim:
lim.nup(self.ip)
lim.bup(self.ip, sz)
try:
lim.chk_sz(sz)
except:
bos.unlink(abspath)
raise
files.append([sz, sha512_hex, p_file, fname]) files.append([sz, sha512_hex, p_file, fname])
dbv, vrem = vfs.get_dbv(rem) dbv, vrem = vfs.get_dbv(rem)
self.conn.hsrv.broker.put( self.conn.hsrv.broker.put(
@@ -905,6 +1040,8 @@ class HttpCli(object):
dbv.flags, dbv.flags,
vrem, vrem,
fname, fname,
self.ip,
time.time(),
) )
self.conn.nbyte += sz self.conn.nbyte += sz
@@ -925,7 +1062,9 @@ class HttpCli(object):
raise raise
except Pebkac as ex: except Pebkac as ex:
errmsg = unicode(ex) errmsg = vol_san(
self.asrv.vfs.all_vols.values(), unicode(ex).encode("utf-8")
).decode("utf-8")
td = max(0.1, time.time() - t0) td = max(0.1, time.time() - t0)
sz_total = sum(x[0] for x in files) sz_total = sum(x[0] for x in files)
@@ -945,9 +1084,18 @@ class HttpCli(object):
errmsg = "ERROR: " + errmsg errmsg = "ERROR: " + errmsg
for sz, sha512, ofn, lfn in files: for sz, sha512, ofn, lfn in files:
vpath = (self.vpath + "/" if self.vpath else "") + lfn vsuf = ""
msg += 'sha512: {} // {} bytes // <a href="/{}">{}</a>\n'.format( if self.can_read and "fk" in vfs.flags:
sha512[:56], sz, quotep(vpath), html_escape(ofn, crlf=True) vsuf = "?k=" + gen_filekey(
self.args.fk_salt,
abspath,
sz,
0 if ANYWIN else bos.stat(os.path.join(vfs.realpath, lfn)).st_ino,
)[: vfs.flags["fk"]]
vpath = "{}/{}".format(upload_vpath, lfn).strip("/")
msg += 'sha512: {} // {} bytes // <a href="/{}">{}</a> {}\n'.format(
sha512[:56], sz, quotep(vpath) + vsuf, html_escape(ofn, crlf=True), vsuf
) )
# truncated SHA-512 prevents length extension attacks; # truncated SHA-512 prevents length extension attacks;
# using SHA-512/224, optionally SHA-512/256 = :64 # using SHA-512/224, optionally SHA-512/256 = :64
@@ -955,13 +1103,13 @@ class HttpCli(object):
"url": "{}://{}/{}".format( "url": "{}://{}/{}".format(
"https" if self.tls else "http", "https" if self.tls else "http",
self.headers.get("host", "copyparty"), self.headers.get("host", "copyparty"),
vpath, vpath + vsuf,
), ),
"sha512": sha512[:56], "sha512": sha512[:56],
"sz": sz, "sz": sz,
"fn": lfn, "fn": lfn,
"fn_orig": ofn, "fn_orig": ofn,
"path": vpath, "path": vpath + vsuf,
} }
jmsg["files"].append(jpart) jmsg["files"].append(jpart)
@@ -1004,6 +1152,20 @@ class HttpCli(object):
vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True) vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True)
self._assert_safe_rem(rem) self._assert_safe_rem(rem)
clen = int(self.headers.get("content-length", -1))
if clen == -1:
raise Pebkac(411)
rp, fn = vsplit(rem)
fp = os.path.join(vfs.realpath, rp)
lim = vfs.get_dbv(rem)[0].lim
if lim:
fp, rp = lim.all(self.ip, rp, clen, fp)
bos.makedirs(fp)
fp = os.path.join(fp, fn)
rem = "{}/{}".format(rp, fn).strip("/")
if not rem.endswith(".md"): if not rem.endswith(".md"):
raise Pebkac(400, "only markdown pls") raise Pebkac(400, "only markdown pls")
@@ -1015,7 +1177,6 @@ class HttpCli(object):
self.reply(response.encode("utf-8")) self.reply(response.encode("utf-8"))
return True return True
fp = os.path.join(vfs.realpath, rem)
srv_lastmod = srv_lastmod3 = -1 srv_lastmod = srv_lastmod3 = -1
try: try:
st = bos.stat(fp) st = bos.stat(fp)
@@ -1069,6 +1230,15 @@ class HttpCli(object):
with open(fsenc(fp), "wb", 512 * 1024) as f: with open(fsenc(fp), "wb", 512 * 1024) as f:
sz, sha512, _ = hashcopy(p_data, f) sz, sha512, _ = hashcopy(p_data, f)
if lim:
lim.nup(self.ip)
lim.bup(self.ip, sz)
try:
lim.chk_sz(sz)
except:
bos.unlink(fp)
raise
new_lastmod = bos.stat(fp).st_mtime new_lastmod = bos.stat(fp).st_mtime
new_lastmod3 = int(new_lastmod * 1000) new_lastmod3 = int(new_lastmod * 1000)
sha512 = sha512[:56] sha512 = sha512[:56]
@@ -1123,7 +1293,7 @@ class HttpCli(object):
break break
if not editions: if not editions:
raise Pebkac(404) return self.tx_404()
# #
# if-modified # if-modified
@@ -1236,6 +1406,8 @@ class HttpCli(object):
if not is_compressed and "cache" not in self.uparam: if not is_compressed and "cache" not in self.uparam:
self.out_headers.update(NO_CACHE) self.out_headers.update(NO_CACHE)
else:
self.out_headers.pop("Cache-Control")
self.out_headers["Accept-Ranges"] = "bytes" self.out_headers["Accept-Ranges"] = "bytes"
self.send_headers( self.send_headers(
@@ -1291,11 +1463,9 @@ class HttpCli(object):
else: else:
fn = self.headers.get("host", "hey") fn = self.headers.get("host", "hey")
afn = "".join( safe = (string.ascii_letters + string.digits).replace("%", "")
[x if x in (string.ascii_letters + string.digits) else "_" for x in fn] afn = "".join([x if x in safe.replace('"', "") else "_" for x in fn])
) bascii = unicode(safe).encode("utf-8")
bascii = unicode(string.ascii_letters + string.digits).encode("utf-8")
ufn = fn.encode("utf-8", "xmlcharrefreplace") ufn = fn.encode("utf-8", "xmlcharrefreplace")
if PY2: if PY2:
ufn = [unicode(x) if x in bascii else "%{:02x}".format(ord(x)) for x in ufn] ufn = [unicode(x) if x in bascii else "%{:02x}".format(ord(x)) for x in ufn]
@@ -1310,6 +1480,7 @@ class HttpCli(object):
cdis = "attachment; filename=\"{}.{}\"; filename*=UTF-8''{}.{}" cdis = "attachment; filename=\"{}.{}\"; filename*=UTF-8''{}.{}"
cdis = cdis.format(afn, fmt, ufn, fmt) cdis = cdis.format(afn, fmt, ufn, fmt)
self.log(cdis)
self.send_headers(None, mime=mime, headers={"Content-Disposition": cdis}) self.send_headers(None, mime=mime, headers={"Content-Disposition": cdis})
fgen = vn.zipgen(rem, items, self.uname, dots, not self.args.no_scandir) fgen = vn.zipgen(rem, items, self.uname, dots, not self.args.no_scandir)
@@ -1443,6 +1614,7 @@ class HttpCli(object):
html = self.j2( html = self.j2(
"splash", "splash",
this=self, this=self,
qvpath=quotep(self.vpath),
rvol=rvol, rvol=rvol,
wvol=wvol, wvol=wvol,
avol=avol, avol=avol,
@@ -1453,7 +1625,13 @@ class HttpCli(object):
mtpq=vs["mtpq"], mtpq=vs["mtpq"],
url_suf=suf, url_suf=suf,
) )
self.reply(html.encode("utf-8"), headers=NO_STORE) self.reply(html.encode("utf-8"))
return True
def tx_404(self):
m = '<h1>404 not found &nbsp;┐( ´ -`)┌</h1><p>or maybe you don\'t have access -- try logging in or <a href="/?h">go home</a></p>'
html = self.j2("splash", this=self, qvpath=quotep(self.vpath), msg=m)
self.reply(html.encode("utf-8"), status=404)
return True return True
def scanvol(self): def scanvol(self):
@@ -1461,7 +1639,7 @@ class HttpCli(object):
raise Pebkac(403, "not allowed for user " + self.uname) raise Pebkac(403, "not allowed for user " + self.uname)
if self.args.no_rescan: if self.args.no_rescan:
raise Pebkac(403, "disabled by argv") raise Pebkac(403, "the rescan feature is disabled in server config")
vn, _ = self.asrv.vfs.get(self.vpath, self.uname, True, True) vn, _ = self.asrv.vfs.get(self.vpath, self.uname, True, True)
@@ -1480,7 +1658,7 @@ class HttpCli(object):
raise Pebkac(403, "not allowed for user " + self.uname) raise Pebkac(403, "not allowed for user " + self.uname)
if self.args.no_stack: if self.args.no_stack:
raise Pebkac(403, "disabled by argv") raise Pebkac(403, "the stackdump feature is disabled in server config")
ret = "<pre>{}\n{}".format(time.time(), alltrace()) ret = "<pre>{}\n{}".format(time.time(), alltrace())
self.reply(ret.encode("utf-8")) self.reply(ret.encode("utf-8"))
@@ -1542,14 +1720,52 @@ class HttpCli(object):
ret["a"] = dirs ret["a"] = dirs
return ret return ret
def handle_rm(self): def tx_ups(self):
if not self.can_delete: if not self.args.unpost:
raise Pebkac(400, "the unpost feature is disabled in server config")
filt = self.uparam.get("filter")
lm = "ups [{}]".format(filt)
self.log(lm)
ret = []
t0 = time.time()
idx = self.conn.get_u2idx()
lim = time.time() - self.args.unpost
for vol in self.asrv.vfs.all_vols.values():
cur = idx.get_cur(vol.realpath)
if not cur:
continue
q = "select sz, rd, fn, at from up where ip=? and at>?"
for sz, rd, fn, at in cur.execute(q, (self.ip, lim)):
vp = "/" + "/".join(x for x in [vol.vpath, rd, fn] if x)
if filt and filt not in vp:
continue
ret.append({"vp": vp, "sz": sz, "at": at})
if len(ret) > 3000:
ret.sort(key=lambda x: x["at"], reverse=True)
ret = ret[:2000]
ret.sort(key=lambda x: x["at"], reverse=True)
ret = ret[:2000]
jtxt = json.dumps(ret, indent=2, sort_keys=True).encode("utf-8", "replace")
self.log("{} #{} {:.2f}sec".format(lm, len(ret), time.time() - t0))
self.reply(jtxt, mime="application/json")
def handle_rm(self, req=None):
if not req and not self.can_delete:
raise Pebkac(403, "not allowed for user " + self.uname) raise Pebkac(403, "not allowed for user " + self.uname)
if self.args.no_del: if self.args.no_del:
raise Pebkac(403, "disabled by argv") raise Pebkac(403, "the delete feature is disabled in server config")
x = self.conn.hsrv.broker.put(True, "up2k.handle_rm", self.uname, self.vpath) if not req:
req = [self.vpath]
x = self.conn.hsrv.broker.put(True, "up2k.handle_rm", self.uname, self.ip, req)
self.loud_reply(x.get()) self.loud_reply(x.get())
def handle_mv(self): def handle_mv(self):
@@ -1557,13 +1773,16 @@ class HttpCli(object):
raise Pebkac(403, "not allowed for user " + self.uname) raise Pebkac(403, "not allowed for user " + self.uname)
if self.args.no_mv: if self.args.no_mv:
raise Pebkac(403, "disabled by argv") raise Pebkac(403, "the rename/move feature is disabled in server config")
# full path of new loc (incl filename) # full path of new loc (incl filename)
dst = self.uparam.get("move") dst = self.uparam.get("move")
if not dst: if not dst:
raise Pebkac(400, "need dst vpath") raise Pebkac(400, "need dst vpath")
# x-www-form-urlencoded (url query part) uses
# either + or %20 for 0x20 so handle both
dst = unquotep(dst.replace("+", " "))
x = self.conn.hsrv.broker.put( x = self.conn.hsrv.broker.put(
True, "up2k.handle_mv", self.uname, self.vpath, dst True, "up2k.handle_mv", self.uname, self.vpath, dst
) )
@@ -1588,22 +1807,22 @@ class HttpCli(object):
try: try:
st = bos.stat(abspath) st = bos.stat(abspath)
except: except:
raise Pebkac(404) return self.tx_404()
if self.can_read:
if rem.startswith(".hist/up2k.") or ( if rem.startswith(".hist/up2k.") or (
rem.endswith("/dir.txt") and rem.startswith(".hist/th/") rem.endswith("/dir.txt") and rem.startswith(".hist/th/")
): ):
raise Pebkac(403) raise Pebkac(403)
is_dir = stat.S_ISDIR(st.st_mode) is_dir = stat.S_ISDIR(st.st_mode)
if self.can_read:
th_fmt = self.uparam.get("th") th_fmt = self.uparam.get("th")
if th_fmt is not None: if th_fmt is not None:
if is_dir: if is_dir:
for fn in self.args.th_covers.split(","): for fn in self.args.th_covers.split(","):
fp = os.path.join(abspath, fn) fp = os.path.join(abspath, fn)
if bos.path.exists(fp): if bos.path.exists(fp):
vrem = "{}/{}".format(vrem.rstrip("/"), fn) vrem = "{}/{}".format(vrem.rstrip("/"), fn).strip("/")
is_dir = False is_dir = False
break break
@@ -1621,12 +1840,24 @@ class HttpCli(object):
return self.tx_ico(rem) return self.tx_ico(rem)
if not is_dir: if not is_dir and (self.can_read or self.can_get):
if not self.can_read and "fk" in vn.flags:
correct = gen_filekey(
self.args.fk_salt, abspath, st.st_size, 0 if ANYWIN else st.st_ino
)[: vn.flags["fk"]]
got = self.uparam.get("k")
if got != correct:
self.log("wrong filekey, want {}, got {}".format(correct, got))
return self.tx_404()
if abspath.endswith(".md") and "raw" not in self.uparam: if abspath.endswith(".md") and "raw" not in self.uparam:
return self.tx_md(abspath) return self.tx_md(abspath)
return self.tx_file(abspath) return self.tx_file(abspath)
elif is_dir and not self.can_read and not self.can_write:
return self.tx_404()
srv_info = [] srv_info = []
try: try:
@@ -1665,6 +1896,8 @@ class HttpCli(object):
perms.append("move") perms.append("move")
if self.can_delete: if self.can_delete:
perms.append("delete") perms.append("delete")
if self.can_get:
perms.append("get")
url_suf = self.urlq({}, []) url_suf = self.urlq({}, [])
is_ls = "ls" in self.uparam is_ls = "ls" in self.uparam
@@ -1674,12 +1907,22 @@ class HttpCli(object):
tpl = "browser2" tpl = "browser2"
logues = ["", ""] logues = ["", ""]
if not self.args.no_logues:
for n, fn in enumerate([".prologue.html", ".epilogue.html"]): for n, fn in enumerate([".prologue.html", ".epilogue.html"]):
fn = os.path.join(abspath, fn) fn = os.path.join(abspath, fn)
if bos.path.exists(fn): if bos.path.exists(fn):
with open(fsenc(fn), "rb") as f: with open(fsenc(fn), "rb") as f:
logues[n] = f.read().decode("utf-8") logues[n] = f.read().decode("utf-8")
readme = ""
if not self.args.no_readme and not logues[1]:
for fn in ["README.md", "readme.md"]:
fn = os.path.join(abspath, fn)
if bos.path.exists(fn):
with open(fsenc(fn), "rb") as f:
readme = f.read().decode("utf-8")
break
ls_ret = { ls_ret = {
"dirs": [], "dirs": [],
"files": [], "files": [],
@@ -1688,6 +1931,7 @@ class HttpCli(object):
"acct": self.uname, "acct": self.uname,
"perms": perms, "perms": perms,
"logues": logues, "logues": logues,
"readme": readme,
} }
j2a = { j2a = {
"vdir": quotep(self.vpath), "vdir": quotep(self.vpath),
@@ -1696,33 +1940,34 @@ class HttpCli(object):
"acct": self.uname, "acct": self.uname,
"perms": json.dumps(perms), "perms": json.dumps(perms),
"taglist": [], "taglist": [],
"tag_order": [], "def_hcols": [],
"have_up2k_idx": ("e2d" in vn.flags), "have_up2k_idx": ("e2d" in vn.flags),
"have_tags_idx": ("e2t" in vn.flags), "have_tags_idx": ("e2t" in vn.flags),
"have_mv": (not self.args.no_mv), "have_mv": (not self.args.no_mv),
"have_del": (not self.args.no_del), "have_del": (not self.args.no_del),
"have_zip": (not self.args.no_zip), "have_zip": (not self.args.no_zip),
"have_unpost": (self.args.unpost > 0),
"have_b_u": (self.can_write and self.uparam.get("b") == "u"), "have_b_u": (self.can_write and self.uparam.get("b") == "u"),
"url_suf": url_suf, "url_suf": url_suf,
"logues": logues, "logues": logues,
"readme": readme,
"title": html_escape(self.vpath, crlf=True), "title": html_escape(self.vpath, crlf=True),
"srv_info": srv_info, "srv_info": srv_info,
} }
if not self.can_read: if not self.can_read:
if is_ls: if is_ls:
ret = json.dumps(ls_ret) ret = json.dumps(ls_ret)
self.reply( self.reply(ret.encode("utf-8", "replace"), mime="application/json")
ret.encode("utf-8", "replace"),
mime="application/json",
headers=NO_STORE,
)
return True return True
if not stat.S_ISDIR(st.st_mode): if not stat.S_ISDIR(st.st_mode):
raise Pebkac(404) return self.tx_404()
if "zip" in self.uparam or "tar" in self.uparam:
raise Pebkac(403)
html = self.j2(tpl, **j2a) html = self.j2(tpl, **j2a)
self.reply(html.encode("utf-8", "replace"), headers=NO_STORE) self.reply(html.encode("utf-8", "replace"))
return True return True
for k in ["zip", "tar"]: for k in ["zip", "tar"]:
@@ -1766,6 +2011,8 @@ class HttpCli(object):
idx = self.conn.get_u2idx() idx = self.conn.get_u2idx()
icur = idx.get_cur(dbv.realpath) icur = idx.get_cur(dbv.realpath)
add_fk = vn.flags.get("fk")
dirs = [] dirs = []
files = [] files = []
for fn in vfs_ls: for fn in vfs_ls:
@@ -1811,9 +2058,19 @@ class HttpCli(object):
except: except:
ext = "%" ext = "%"
if add_fk:
href = "{}?k={}".format(
quotep(href),
gen_filekey(
self.args.fk_salt, fspath, sz, 0 if ANYWIN else inf.st_ino
)[:add_fk],
)
else:
href = quotep(href)
item = { item = {
"lead": margin, "lead": margin,
"href": quotep(href), "href": href,
"name": fn, "name": fn,
"sz": sz, "sz": sz,
"ext": ext, "ext": ext,
@@ -1881,23 +2138,19 @@ class HttpCli(object):
ls_ret["files"] = files ls_ret["files"] = files
ls_ret["taglist"] = taglist ls_ret["taglist"] = taglist
ret = json.dumps(ls_ret) ret = json.dumps(ls_ret)
self.reply( self.reply(ret.encode("utf-8", "replace"), mime="application/json")
ret.encode("utf-8", "replace"),
mime="application/json",
headers=NO_STORE,
)
return True return True
j2a["files"] = dirs + files j2a["files"] = dirs + files
j2a["logues"] = logues j2a["logues"] = logues
j2a["taglist"] = taglist j2a["taglist"] = taglist
if "mte" in vn.flags: if "mth" in vn.flags:
j2a["tag_order"] = json.dumps(vn.flags["mte"].split(",")) j2a["def_hcols"] = vn.flags["mth"].split(",")
if self.args.css_browser: if self.args.css_browser:
j2a["css"] = self.args.css_browser j2a["css"] = self.args.css_browser
html = self.j2(tpl, **j2a) html = self.j2(tpl, **j2a)
self.reply(html.encode("utf-8", "replace"), headers=NO_STORE) self.reply(html.encode("utf-8", "replace"))
return True return True

View File

@@ -174,25 +174,26 @@ class HttpSrv(object):
now = time.time() now = time.time()
if now - (self.tp_time or now) > 300: if now - (self.tp_time or now) > 300:
m = "httpserver threadpool died: tpt {:.2f}, now {:.2f}, nthr {}, ncli {}"
self.log(self.name, m.format(self.tp_time, now, self.tp_nthr, self.ncli), 1)
self.tp_time = None
self.tp_q = None self.tp_q = None
if self.tp_q:
self.tp_q.put((sck, addr))
with self.mutex: with self.mutex:
self.ncli += 1 self.ncli += 1
if self.tp_q:
self.tp_time = self.tp_time or now self.tp_time = self.tp_time or now
self.tp_ncli = max(self.tp_ncli, self.ncli + 1) self.tp_ncli = max(self.tp_ncli, self.ncli)
if self.tp_nthr < self.ncli + 4: if self.tp_nthr < self.ncli + 4:
self.start_threads(8) self.start_threads(8)
self.tp_q.put((sck, addr))
return return
if not self.args.no_htp: if not self.args.no_htp:
m = "looks like the httpserver threadpool died; please make an issue on github and tell me the story of how you pulled that off, thanks and dog bless\n" m = "looks like the httpserver threadpool died; please make an issue on github and tell me the story of how you pulled that off, thanks and dog bless\n"
self.log(self.name, m, 1) self.log(self.name, m, 1)
with self.mutex:
self.ncli += 1
thr = threading.Thread( thr = threading.Thread(
target=self.thr_client, target=self.thr_client,
args=(sck, addr), args=(sck, addr),

View File

@@ -433,6 +433,14 @@ class MTag(object):
]: ]:
try: try:
v = getattr(md.info, attr) v = getattr(md.info, attr)
except:
if k != "ac":
continue
try:
v = str(md.info).split(".")[1]
if v.startswith("ogg"):
v = v[3:]
except: except:
continue continue

View File

@@ -1,3 +1,5 @@
# coding: utf-8
""" """
This is Victor Stinner's pure-Python implementation of PEP 383: the "surrogateescape" error This is Victor Stinner's pure-Python implementation of PEP 383: the "surrogateescape" error
handler of Python 3. handler of Python 3.
@@ -171,7 +173,7 @@ FS_ENCODING = sys.getfilesystemencoding()
if WINDOWS and not PY3: if WINDOWS and not PY3:
# py2 thinks win* is mbcs, probably a bug? anyways this works # py2 thinks win* is mbcs, probably a bug? anyways this works
FS_ENCODING = 'utf-8' FS_ENCODING = "utf-8"
# normalize the filesystem encoding name. # normalize the filesystem encoding name.

View File

@@ -18,8 +18,7 @@ def errdesc(errors):
tf_path = tf.name tf_path = tf.name
tf.write("\r\n".join(report).encode("utf-8", "replace")) tf.write("\r\n".join(report).encode("utf-8", "replace"))
dt = datetime.utcfromtimestamp(time.time()) dt = datetime.utcnow().strftime("%Y-%m%d-%H%M%S")
dt = dt.strftime("%Y-%m%d-%H%M%S")
bos.chmod(tf_path, 0o444) bos.chmod(tf_path, 0o444)
return { return {

View File

@@ -14,7 +14,7 @@ from datetime import datetime, timedelta
import calendar import calendar
from .__init__ import E, PY2, WINDOWS, ANYWIN, MACOS, VT100, unicode from .__init__ import E, PY2, WINDOWS, ANYWIN, MACOS, VT100, unicode
from .util import mp, start_log_thrs, start_stackmon, min_ex from .util import mp, start_log_thrs, start_stackmon, min_ex, ansi_re
from .authsrv import AuthSrv from .authsrv import AuthSrv
from .tcpsrv import TcpSrv from .tcpsrv import TcpSrv
from .up2k import Up2k from .up2k import Up2k
@@ -41,7 +41,6 @@ class SvcHub(object):
self.stop_cond = threading.Condition() self.stop_cond = threading.Condition()
self.httpsrv_up = 0 self.httpsrv_up = 0
self.ansi_re = re.compile("\033\\[[^m]*m")
self.log_mutex = threading.Lock() self.log_mutex = threading.Lock()
self.next_day = 0 self.next_day = 0
@@ -111,7 +110,7 @@ class SvcHub(object):
thr.start() thr.start()
def _logname(self): def _logname(self):
dt = datetime.utcfromtimestamp(time.time()) dt = datetime.utcnow()
fn = self.args.lo fn = self.args.lo
for fs in "YmdHMS": for fs in "YmdHMS":
fs = "%" + fs fs = "%" + fs
@@ -244,8 +243,7 @@ class SvcHub(object):
return return
with self.log_mutex: with self.log_mutex:
ts = datetime.utcfromtimestamp(time.time()) ts = datetime.utcnow().strftime("%Y-%m%d-%H%M%S.%f")[:-3]
ts = ts.strftime("%Y-%m%d-%H%M%S.%f")[:-3]
self.logf.write("@{} [{}] {}\n".format(ts, src, msg)) self.logf.write("@{} [{}] {}\n".format(ts, src, msg))
now = time.time() now = time.time()
@@ -257,7 +255,7 @@ class SvcHub(object):
self.logf.close() self.logf.close()
self._setup_logfile("") self._setup_logfile("")
dt = datetime.utcfromtimestamp(time.time()) dt = datetime.utcnow()
# unix timestamp of next 00:00:00 (leap-seconds safe) # unix timestamp of next 00:00:00 (leap-seconds safe)
day_now = dt.day day_now = dt.day
@@ -280,9 +278,9 @@ class SvcHub(object):
if not VT100: if not VT100:
fmt = "{} {:21} {}\n" fmt = "{} {:21} {}\n"
if "\033" in msg: if "\033" in msg:
msg = self.ansi_re.sub("", msg) msg = ansi_re.sub("", msg)
if "\033" in src: if "\033" in src:
src = self.ansi_re.sub("", src) src = ansi_re.sub("", src)
elif c: elif c:
if isinstance(c, int): if isinstance(c, int):
msg = "\033[3{}m{}".format(c, msg) msg = "\033[3{}m{}".format(c, msg)

View File

@@ -177,7 +177,7 @@ class TcpSrv(object):
eps = self.ips_linux() eps = self.ips_linux()
if "0.0.0.0" not in listen_ips: if "0.0.0.0" not in listen_ips:
eps = {k: v for k, v in eps if k in listen_ips} eps = {k: v for k, v in eps.items() if k in listen_ips}
default_route = None default_route = None
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)

View File

@@ -26,6 +26,9 @@ class ThumbCli(object):
if is_vid and self.args.no_vthumb: if is_vid and self.args.no_vthumb:
return None return None
if rem.startswith(".hist/th/") and rem.split(".")[-1] in ["webp", "jpg"]:
return os.path.join(ptop, rem)
if fmt == "j" and self.args.th_no_jpg: if fmt == "j" and self.args.th_no_jpg:
fmt = "w" fmt = "w"

View File

@@ -21,7 +21,7 @@ HAVE_AVIF = False
HAVE_WEBP = False HAVE_WEBP = False
try: try:
from PIL import Image, ImageOps from PIL import Image, ImageOps, ExifTags
HAVE_PIL = True HAVE_PIL = True
try: try:
@@ -105,7 +105,10 @@ class ThumbSrv(object):
self.mutex = threading.Lock() self.mutex = threading.Lock()
self.busy = {} self.busy = {}
self.stopping = False self.stopping = False
self.nthr = self.args.th_mt
if not self.nthr:
self.nthr = os.cpu_count() if hasattr(os, "cpu_count") else 4 self.nthr = os.cpu_count() if hasattr(os, "cpu_count") else 4
self.q = Queue(self.nthr * 4) self.q = Queue(self.nthr * 4)
for n in range(self.nthr): for n in range(self.nthr):
t = threading.Thread( t = threading.Thread(
@@ -205,8 +208,8 @@ class ThumbSrv(object):
try: try:
fun(abspath, tpath) fun(abspath, tpath)
except: except:
msg = "{} failed on {}\n{}" msg = "{} could not create thumbnail of {}\n{}"
self.log(msg.format(fun.__name__, abspath, min_ex()), 3) self.log(msg.format(fun.__name__, abspath, min_ex()), "1;30")
with open(tpath, "wb") as _: with open(tpath, "wb") as _:
pass pass
@@ -221,21 +224,38 @@ class ThumbSrv(object):
with self.mutex: with self.mutex:
self.nthr -= 1 self.nthr -= 1
def conv_pil(self, abspath, tpath): def fancy_pillow(self, im):
with Image.open(fsenc(abspath)) as im: # exif_transpose is expensive (loads full image + unconditional copy)
crop = not self.args.th_no_crop r = max(*self.res) * 2
res2 = self.res im.thumbnail((r, r), resample=Image.LANCZOS)
if crop:
res2 = (res2[0] * 2, res2[1] * 2)
try: try:
im.thumbnail(res2, resample=Image.LANCZOS) k = next(k for k, v in ExifTags.TAGS.items() if v == "Orientation")
if crop: exif = im.getexif()
rot = int(exif[k])
del exif[k]
except:
rot = 1
rots = {8: Image.ROTATE_90, 3: Image.ROTATE_180, 6: Image.ROTATE_270}
if rot in rots:
im = im.transpose(rots[rot])
if self.args.th_no_crop:
im.thumbnail(self.res, resample=Image.LANCZOS)
else:
iw, ih = im.size iw, ih = im.size
dw, dh = self.res dw, dh = self.res
res = (min(iw, dw), min(ih, dh)) res = (min(iw, dw), min(ih, dh))
im = ImageOps.fit(im, res, method=Image.LANCZOS) im = ImageOps.fit(im, res, method=Image.LANCZOS)
except:
return im
def conv_pil(self, abspath, tpath):
with Image.open(fsenc(abspath)) as im:
try:
im = self.fancy_pillow(im)
except Exception as ex:
self.log("fancy_pillow {}".format(ex), "1;30")
im.thumbnail(self.res) im.thumbnail(self.res)
fmts = ["RGB", "L"] fmts = ["RGB", "L"]
@@ -250,13 +270,14 @@ class ThumbSrv(object):
fmts += ["RGBA", "LA"] fmts += ["RGBA", "LA"]
args["method"] = 6 args["method"] = 6
else: else:
pass # default q = 75 # default q = 75
args["progressive"] = True
if im.mode not in fmts: if im.mode not in fmts:
# print("conv {}".format(im.mode)) # print("conv {}".format(im.mode))
im = im.convert("RGB") im = im.convert("RGB")
im.save(tpath, quality=40, method=6) im.save(tpath, **args)
def conv_ffmpeg(self, abspath, tpath): def conv_ffmpeg(self, abspath, tpath):
ret, _ = ffprobe(abspath) ret, _ = ffprobe(abspath)
@@ -286,8 +307,10 @@ class ThumbSrv(object):
cmd += seek cmd += seek
cmd += [ cmd += [
b"-i", fsenc(abspath), b"-i", fsenc(abspath),
b"-map", b"0:v:0",
b"-vf", scale, b"-vf", scale,
b"-vframes", b"1", b"-frames:v", b"1",
b"-metadata:s:v:0", b"rotate=0",
] ]
# fmt: on # fmt: on
@@ -305,11 +328,13 @@ class ThumbSrv(object):
] ]
cmd += [fsenc(tpath)] cmd += [fsenc(tpath)]
# self.log((b" ".join(cmd)).decode("utf-8"))
ret, sout, serr = runcmd(cmd) ret, sout, serr = runcmd(cmd)
if ret != 0: if ret != 0:
msg = ["ff: {}".format(x) for x in serr.split("\n")] m = "FFmpeg failed (probably a corrupt video file):\n"
self.log("FFmpeg failed:\n" + "\n".join(msg), c="1;30") m += "\n".join(["ff: {}".format(x) for x in serr.split("\n")])
self.log(m, c="1;30")
raise sp.CalledProcessError(ret, (cmd[0], b"...", cmd[-1])) raise sp.CalledProcessError(ret, (cmd[0], b"...", cmd[-1]))
def poke(self, tdir): def poke(self, tdir):

View File

@@ -7,8 +7,8 @@ import time
import threading import threading
from datetime import datetime from datetime import datetime
from .__init__ import unicode from .__init__ import ANYWIN, unicode
from .util import s3dec, Pebkac, min_ex from .util import absreal, s3dec, Pebkac, min_ex, gen_filekey
from .bos import bos from .bos import bos
from .up2k import up2k_wark_from_hashlist from .up2k import up2k_wark_from_hashlist
@@ -88,7 +88,7 @@ class U2idx(object):
is_date = False is_date = False
kw_key = ["(", ")", "and ", "or ", "not "] kw_key = ["(", ")", "and ", "or ", "not "]
kw_val = ["==", "=", "!=", ">", ">=", "<", "<=", "like "] kw_val = ["==", "=", "!=", ">", ">=", "<", "<=", "like "]
ptn_mt = re.compile(r"^\.?[a-z]+$") ptn_mt = re.compile(r"^\.?[a-z_-]+$")
mt_ctr = 0 mt_ctr = 0
mt_keycmp = "substr(up.w,1,16)" mt_keycmp = "substr(up.w,1,16)"
mt_keycmp2 = None mt_keycmp2 = None
@@ -242,9 +242,10 @@ class U2idx(object):
self.active_cur = cur self.active_cur = cur
sret = [] sret = []
fk = flags.get("fk")
c = cur.execute(q, v) c = cur.execute(q, v)
for hit in c: for hit in c:
w, ts, sz, rd, fn = hit w, ts, sz, rd, fn, ip, at = hit
lim -= 1 lim -= 1
if lim <= 0: if lim <= 0:
break break
@@ -252,6 +253,20 @@ class U2idx(object):
if rd.startswith("//") or fn.startswith("//"): if rd.startswith("//") or fn.startswith("//"):
rd, fn = s3dec(rd, fn) rd, fn = s3dec(rd, fn)
if fk:
try:
ap = absreal(os.path.join(ptop, rd, fn))
inf = bos.stat(ap)
except:
continue
fn += (
"?k="
+ gen_filekey(
self.args.fk_salt, ap, sz, 0 if ANYWIN else inf.st_ino
)[:fk]
)
rp = "/".join([x for x in [vtop, rd, fn] if x]) rp = "/".join([x for x in [vtop, rd, fn] if x])
sret.append({"ts": int(ts), "sz": sz, "rp": rp, "w": w[:16]}) sret.append({"ts": int(ts), "sz": sz, "rp": rp, "w": w[:16]})

View File

@@ -36,7 +36,7 @@ from .util import (
min_ex, min_ex,
) )
from .bos import bos from .bos import bos
from .authsrv import AuthSrv from .authsrv import AuthSrv, LEELOO_DALLAS
from .mtag import MTag, MParser from .mtag import MTag, MParser
try: try:
@@ -45,7 +45,7 @@ try:
except: except:
HAVE_SQLITE3 = False HAVE_SQLITE3 = False
DB_VER = 4 DB_VER = 5
class Up2k(object): class Up2k(object):
@@ -176,27 +176,26 @@ class Up2k(object):
return None return None
def _sched_rescan(self): def _sched_rescan(self):
maxage = self.args.re_maxage
volage = {} volage = {}
while True: while True:
time.sleep(self.args.re_int) time.sleep(self.args.re_int)
now = time.time() now = time.time()
vpaths = list(sorted(self.asrv.vfs.all_vols.keys()))
with self.mutex: with self.mutex:
if maxage: for vp, vol in sorted(self.asrv.vfs.all_vols.items()):
for vp in vpaths: maxage = vol.flags.get("scan")
if not maxage:
continue
if vp not in volage: if vp not in volage:
volage[vp] = now volage[vp] = now
if now - volage[vp] >= maxage: if now - volage[vp] >= maxage:
self.need_rescan[vp] = 1 self.need_rescan[vp] = 1
if not self.need_rescan:
continue
vols = list(sorted(self.need_rescan.keys())) vols = list(sorted(self.need_rescan.keys()))
self.need_rescan = {} self.need_rescan = {}
if vols:
err = self.rescan(self.asrv.vfs.all_vols, vols) err = self.rescan(self.asrv.vfs.all_vols, vols)
if err: if err:
for v in vols: for v in vols:
@@ -207,6 +206,42 @@ class Up2k(object):
for v in vols: for v in vols:
volage[v] = now volage[v] = now
if self.args.no_lifetime:
continue
for vp, vol in sorted(self.asrv.vfs.all_vols.items()):
lifetime = vol.flags.get("lifetime")
if not lifetime:
continue
cur = self.cur.get(vol.realpath)
if not cur:
continue
nrm = 0
deadline = time.time() - int(lifetime)
q = "select rd, fn from up where at > 0 and at < ? limit 100"
while True:
with self.mutex:
hits = cur.execute(q, (deadline,)).fetchall()
if not hits:
break
for rd, fn in hits:
if rd.startswith("//") or fn.startswith("//"):
rd, fn = s3dec(rd, fn)
fvp = "{}/{}".format(rd, fn).strip("/")
if vp:
fvp = "{}/{}".format(vp, fvp)
self._handle_rm(LEELOO_DALLAS, None, fvp, True)
nrm += 1
if nrm:
self.log("{} files graduated in {}".format(nrm, vp))
def _vis_job_progress(self, job): def _vis_job_progress(self, job):
perc = 100 - (len(job["need"]) * 100.0 / len(job["hash"])) perc = 100 - (len(job["need"]) * 100.0 / len(job["hash"]))
path = os.path.join(job["ptop"], job["prel"], job["name"]) path = os.path.join(job["ptop"], job["prel"], job["name"])
@@ -522,7 +557,7 @@ class Up2k(object):
wark = up2k_wark_from_hashlist(self.salt, sz, hashes) wark = up2k_wark_from_hashlist(self.salt, sz, hashes)
self.db_add(dbw[0], wark, rd, fn, lmod, sz) self.db_add(dbw[0], wark, rd, fn, lmod, sz, "", 0)
dbw[1] += 1 dbw[1] += 1
ret += 1 ret += 1
td = time.time() - dbw[2] td = time.time() - dbw[2]
@@ -537,8 +572,8 @@ class Up2k(object):
rm = [] rm = []
nchecked = 0 nchecked = 0
nfiles = next(cur.execute("select count(w) from up"))[0] nfiles = next(cur.execute("select count(w) from up"))[0]
c = cur.execute("select * from up") c = cur.execute("select rd, fn from up")
for dwark, dts, dsz, drd, dfn in c: for drd, dfn in c:
nchecked += 1 nchecked += 1
if drd.startswith("//") or dfn.startswith("//"): if drd.startswith("//") or dfn.startswith("//"):
drd, dfn = s3dec(drd, dfn) drd, dfn = s3dec(drd, dfn)
@@ -803,6 +838,7 @@ class Up2k(object):
cur.connection.commit() cur.connection.commit()
if n_done: if n_done:
self.log("mtp: scanned {} files in {}".format(n_done, ptop), c=6)
cur.execute("vacuum") cur.execute("vacuum")
wcur.close() wcur.close()
@@ -941,6 +977,15 @@ class Up2k(object):
if not existed and ver is None: if not existed and ver is None:
return self._create_db(db_path, cur) return self._create_db(db_path, cur)
if ver == 4:
try:
m = "creating backup before upgrade: "
cur = self._backup_db(db_path, cur, ver, m)
self._upgrade_v4(cur)
ver = 5
except:
self.log("WARN: failed to upgrade from v4", 3)
if ver == DB_VER: if ver == DB_VER:
try: try:
nfiles = next(cur.execute("select count(w) from up"))[0] nfiles = next(cur.execute("select count(w) from up"))[0]
@@ -1011,9 +1056,10 @@ class Up2k(object):
idx = r"create index up_w on up(w)" idx = r"create index up_w on up(w)"
for cmd in [ for cmd in [
r"create table up (w text, mt int, sz int, rd text, fn text)", r"create table up (w text, mt int, sz int, rd text, fn text, ip text, at int)",
r"create index up_rd on up(rd)", r"create index up_rd on up(rd)",
r"create index up_fn on up(fn)", r"create index up_fn on up(fn)",
r"create index up_ip on up(ip)",
idx, idx,
r"create table mt (w text, k text, v int)", r"create table mt (w text, k text, v int)",
r"create index mt_w on mt(w)", r"create index mt_w on mt(w)",
@@ -1028,6 +1074,17 @@ class Up2k(object):
self.log("created DB at {}".format(db_path)) self.log("created DB at {}".format(db_path))
return cur return cur
def _upgrade_v4(self, cur):
for cmd in [
r"alter table up add column ip text",
r"alter table up add column at int",
r"create index up_ip on up(ip)",
r"update kv set v=5 where k='sver'",
]:
cur.execute(cmd)
cur.connection.commit()
def handle_json(self, cj): def handle_json(self, cj):
with self.mutex: with self.mutex:
if not self.register_vpath(cj["ptop"], cj["vcfg"]): if not self.register_vpath(cj["ptop"], cj["vcfg"]):
@@ -1051,7 +1108,7 @@ class Up2k(object):
argv = (wark[:16], wark) argv = (wark[:16], wark)
cur = cur.execute(q, argv) cur = cur.execute(q, argv)
for _, dtime, dsize, dp_dir, dp_fn in cur: for _, dtime, dsize, dp_dir, dp_fn, ip, at in cur:
if dp_dir.startswith("//") or dp_fn.startswith("//"): if dp_dir.startswith("//") or dp_fn.startswith("//"):
dp_dir, dp_fn = s3dec(dp_dir, dp_fn) dp_dir, dp_fn = s3dec(dp_dir, dp_fn)
@@ -1065,6 +1122,8 @@ class Up2k(object):
"ptop": cj["ptop"], "ptop": cj["ptop"],
"size": dsize, "size": dsize,
"lmod": dtime, "lmod": dtime,
"addr": ip,
"at": at,
"hash": [], "hash": [],
"need": [], "need": [],
} }
@@ -1119,11 +1178,22 @@ class Up2k(object):
self._symlink(src, dst) self._symlink(src, dst)
if cur: if cur:
a = [cj[x] for x in "prel name lmod size".split()] a = [cj[x] for x in "prel name lmod size addr".split()]
a += [cj.get("at") or time.time()]
self.db_add(cur, wark, *a) self.db_add(cur, wark, *a)
cur.connection.commit() cur.connection.commit()
if not job: if not job:
vfs = self.asrv.vfs.all_vols[cj["vtop"]]
if vfs.lim:
ap1 = os.path.join(cj["ptop"], cj["prel"])
ap2, cj["prel"] = vfs.lim.all(
cj["addr"], cj["prel"], cj["size"], ap1
)
bos.makedirs(ap2)
vfs.lim.nup(cj["addr"])
vfs.lim.bup(cj["addr"], cj["size"])
job = { job = {
"wark": wark, "wark": wark,
"t0": now, "t0": now,
@@ -1154,8 +1224,12 @@ class Up2k(object):
self._new_upload(job) self._new_upload(job)
purl = "{}/{}".format(job["vtop"], job["prel"]).strip("/")
purl = "/{}/".format(purl) if purl else "/"
return { return {
"name": job["name"], "name": job["name"],
"purl": purl,
"size": job["size"], "size": job["size"],
"lmod": job["lmod"], "lmod": job["lmod"],
"hash": job["need"], "hash": job["need"],
@@ -1206,7 +1280,7 @@ class Up2k(object):
hops = len(ndst[nc:]) - 1 hops = len(ndst[nc:]) - 1
lsrc = "../" * hops + "/".join(lsrc) lsrc = "../" * hops + "/".join(lsrc)
os.symlink(fsenc(lsrc), fsenc(ldst)) os.symlink(fsenc(lsrc), fsenc(ldst))
except (AttributeError, OSError) as ex: except Exception as ex:
self.log("cannot symlink; creating copy: " + repr(ex)) self.log("cannot symlink; creating copy: " + repr(ex))
shutil.copy2(fsenc(src), fsenc(dst)) shutil.copy2(fsenc(src), fsenc(dst))
@@ -1266,20 +1340,21 @@ class Up2k(object):
a = [dst, job["size"], (int(time.time()), int(job["lmod"]))] a = [dst, job["size"], (int(time.time()), int(job["lmod"]))]
self.lastmod_q.put(a) self.lastmod_q.put(a)
a = [job[x] for x in "ptop wark prel name lmod size".split()] a = [job[x] for x in "ptop wark prel name lmod size addr".split()]
a += [job.get("at") or time.time()]
if self.idx_wark(*a): if self.idx_wark(*a):
del self.registry[ptop][wark] del self.registry[ptop][wark]
# in-memory registry is reserved for unfinished uploads # in-memory registry is reserved for unfinished uploads
return ret, dst return ret, dst
def idx_wark(self, ptop, wark, rd, fn, lmod, sz): def idx_wark(self, ptop, wark, rd, fn, lmod, sz, ip, at):
cur = self.cur.get(ptop) cur = self.cur.get(ptop)
if not cur: if not cur:
return False return False
self.db_rm(cur, rd, fn) self.db_rm(cur, rd, fn)
self.db_add(cur, wark, rd, fn, lmod, sz) self.db_add(cur, wark, rd, fn, lmod, sz, ip, at)
cur.connection.commit() cur.connection.commit()
if "e2t" in self.flags[ptop]: if "e2t" in self.flags[ptop]:
@@ -1295,53 +1370,105 @@ class Up2k(object):
except: except:
db.execute(sql, s3enc(self.mem_cur, rd, fn)) db.execute(sql, s3enc(self.mem_cur, rd, fn))
def db_add(self, db, wark, rd, fn, ts, sz): def db_add(self, db, wark, rd, fn, ts, sz, ip, at):
sql = "insert into up values (?,?,?,?,?)" sql = "insert into up values (?,?,?,?,?,?,?)"
v = (wark, int(ts), sz, rd, fn) v = (wark, int(ts), sz, rd, fn, ip or "", int(at or 0))
try: try:
db.execute(sql, v) db.execute(sql, v)
except: except:
rd, fn = s3enc(self.mem_cur, rd, fn) rd, fn = s3enc(self.mem_cur, rd, fn)
v = (wark, int(ts), sz, rd, fn) v = (wark, int(ts), sz, rd, fn, ip or "", int(at or 0))
db.execute(sql, v) db.execute(sql, v)
def handle_rm(self, uname, vpath): def handle_rm(self, uname, ip, vpaths):
n_files = 0
ok = {}
ng = {}
for vp in vpaths:
a, b, c = self._handle_rm(uname, ip, vp, False)
n_files += a
for k in b:
ok[k] = 1
for k in c:
ng[k] = 1
ng = {k: 1 for k in ng if k not in ok}
ok = len(ok)
ng = len(ng)
return "deleted {} files (and {}/{} folders)".format(n_files, ok, ok + ng)
def _handle_rm(self, uname, ip, vpath, rm_topdir):
try:
permsets = [[True, False, False, True]] permsets = [[True, False, False, True]]
vn, rem = self.asrv.vfs.get(vpath, uname, *permsets[0]) vn, rem = self.asrv.vfs.get(vpath, uname, *permsets[0])
unpost = False
except:
# unpost with missing permissions? try read+write and verify with db
if not self.args.unpost:
raise Pebkac(400, "the unpost feature is disabled in server config")
unpost = True
permsets = [[True, True]]
vn, rem = self.asrv.vfs.get(vpath, uname, *permsets[0])
_, _, _, _, dip, dat = self._find_from_vpath(vn.realpath, rem)
m = "you cannot delete this: "
if not dip:
m += "file not found"
elif dip != ip:
m += "not uploaded by (You)"
elif dat < time.time() - self.args.unpost:
m += "uploaded too long ago"
else:
m = None
if m:
raise Pebkac(400, m)
ptop = vn.realpath ptop = vn.realpath
atop = vn.canonical(rem) atop = vn.canonical(rem, False)
adir, fn = os.path.split(atop) adir, fn = os.path.split(atop)
try:
st = bos.lstat(atop) st = bos.lstat(atop)
except:
raise Pebkac(400, "file not found on disk (already deleted?)")
scandir = not self.args.no_scandir scandir = not self.args.no_scandir
if stat.S_ISLNK(st.st_mode) or stat.S_ISREG(st.st_mode): if stat.S_ISLNK(st.st_mode) or stat.S_ISREG(st.st_mode):
dbv, vrem = self.asrv.vfs.get(vpath, uname, *permsets[0]) dbv, vrem = self.asrv.vfs.get(vpath, uname, *permsets[0])
dbv, vrem = dbv.get_dbv(vrem) dbv, vrem = dbv.get_dbv(vrem)
g = [[dbv, vrem, os.path.dirname(vpath), adir, [[fn, 0]], [], []]] voldir = vsplit(vrem)[0]
vpath_dir = vsplit(vpath)[0]
g = [[dbv, voldir, vpath_dir, adir, [[fn, 0]], [], []]]
else: else:
g = vn.walk("", rem, [], uname, permsets, True, scandir, True) g = vn.walk("", rem, [], uname, permsets, True, scandir, True)
if unpost:
raise Pebkac(400, "cannot unpost folders")
n_files = 0 n_files = 0
for dbv, vrem, _, adir, files, rd, vd in g: for dbv, vrem, _, adir, files, rd, vd in g:
for fn in [x[0] for x in files]: for fn in [x[0] for x in files]:
n_files += 1 n_files += 1
abspath = os.path.join(adir, fn) abspath = os.path.join(adir, fn)
vpath = "{}/{}".format(vrem, fn).strip("/") volpath = "{}/{}".format(vrem, fn).strip("/")
vpath = "{}/{}".format(dbv.vpath, volpath).strip("/")
self.log("rm {}\n {}".format(vpath, abspath)) self.log("rm {}\n {}".format(vpath, abspath))
_ = dbv.get(vrem, uname, *permsets[0]) _ = dbv.get(volpath, uname, *permsets[0])
with self.mutex: with self.mutex:
cur = None
try: try:
ptop = dbv.realpath ptop = dbv.realpath
cur, wark, _, _ = self._find_from_vpath(ptop, vrem) cur, wark, _, _, _, _ = self._find_from_vpath(ptop, volpath)
self._forget_file(ptop, vpath, cur, wark) self._forget_file(ptop, volpath, cur, wark, True)
finally: finally:
if cur:
cur.connection.commit() cur.connection.commit()
bos.unlink(abspath) bos.unlink(abspath)
rm = rmdirs(self.log_func, scandir, True, atop) rm = rmdirs(self.log_func, scandir, True, atop, 1 if rm_topdir else 0)
ok = len(rm[0]) return n_files, rm[0], rm[1]
ng = len(rm[1])
return "deleted {} files (and {}/{} folders)".format(n_files, ok, ok + ng)
def handle_mv(self, uname, svp, dvp): def handle_mv(self, uname, svp, dvp):
svn, srem = self.asrv.vfs.get(svp, uname, True, False, True) svn, srem = self.asrv.vfs.get(svp, uname, True, False, True)
@@ -1351,8 +1478,9 @@ class Up2k(object):
if not srem: if not srem:
raise Pebkac(400, "mv: cannot move a mountpoint") raise Pebkac(400, "mv: cannot move a mountpoint")
st = bos.stat(sabs) st = bos.lstat(sabs)
if stat.S_ISREG(st.st_mode): if stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode):
with self.mutex:
return self._mv_file(uname, svp, dvp) return self._mv_file(uname, svp, dvp)
jail = svn.get_dbv(srem)[0] jail = svn.get_dbv(srem)[0]
@@ -1378,9 +1506,10 @@ class Up2k(object):
raise Pebkac(500, "mv: bug at {}, top {}".format(svpf, svp)) raise Pebkac(500, "mv: bug at {}, top {}".format(svpf, svp))
dvpf = dvp + svpf[len(svp) :] dvpf = dvp + svpf[len(svp) :]
with self.mutex:
self._mv_file(uname, svpf, dvpf) self._mv_file(uname, svpf, dvpf)
rmdirs(self.log_func, scandir, True, sabs) rmdirs(self.log_func, scandir, True, sabs, 1)
return "k" return "k"
def _mv_file(self, uname, svp, dvp): def _mv_file(self, uname, svp, dvp):
@@ -1394,6 +1523,14 @@ class Up2k(object):
dabs = dvn.canonical(drem) dabs = dvn.canonical(drem)
drd, dfn = vsplit(drem) drd, dfn = vsplit(drem)
n1 = svp.split("/")[-1]
n2 = dvp.split("/")[-1]
if n1.startswith(".") or n2.startswith("."):
if self.args.no_dot_mv:
raise Pebkac(400, "moving dotfiles is disabled in server config")
elif self.args.no_dot_ren and n1 != n2:
raise Pebkac(400, "renaming dotfiles is disabled in server config")
if bos.path.exists(dabs): if bos.path.exists(dabs):
raise Pebkac(400, "mv2: target file exists") raise Pebkac(400, "mv2: target file exists")
@@ -1411,7 +1548,7 @@ class Up2k(object):
self.need_rescan[dvn.vpath] = 1 self.need_rescan[dvn.vpath] = 1
return "k" return "k"
c1, w, ftime, fsize = self._find_from_vpath(svn.realpath, srem) c1, w, ftime, fsize, ip, at = self._find_from_vpath(svn.realpath, srem)
c2 = self.cur.get(dvn.realpath) c2 = self.cur.get(dvn.realpath)
if ftime is None: if ftime is None:
@@ -1420,15 +1557,15 @@ class Up2k(object):
fsize = st.st_size fsize = st.st_size
if w: if w:
if c2: if c2 and c2 != c1:
self._copy_tags(c1, c2, w) self._copy_tags(c1, c2, w)
self._forget_file(svn.realpath, srem, c1, w) self._forget_file(svn.realpath, srem, c1, w, c1 != c2)
self._relink(w, svn.realpath, srem, dabs) self._relink(w, svn.realpath, srem, dabs)
c1.connection.commit() c1.connection.commit()
if c2: if c2:
self.db_add(c2, w, drd, dfn, ftime, fsize) self.db_add(c2, w, drd, dfn, ftime, fsize, ip, at)
c2.connection.commit() c2.connection.commit()
else: else:
self.log("not found in src db: [{}]".format(svp)) self.log("not found in src db: [{}]".format(svp))
@@ -1449,10 +1586,10 @@ class Up2k(object):
def _find_from_vpath(self, ptop, vrem): def _find_from_vpath(self, ptop, vrem):
cur = self.cur.get(ptop) cur = self.cur.get(ptop)
if not cur: if not cur:
return None, None return [None] * 6
rd, fn = vsplit(vrem) rd, fn = vsplit(vrem)
q = "select w, mt, sz from up where rd=? and fn=? limit 1" q = "select w, mt, sz, ip, at from up where rd=? and fn=? limit 1"
try: try:
c = cur.execute(q, (rd, fn)) c = cur.execute(q, (rd, fn))
except: except:
@@ -1460,20 +1597,24 @@ class Up2k(object):
hit = c.fetchone() hit = c.fetchone()
if hit: if hit:
wark, ftime, fsize = hit wark, ftime, fsize, ip, at = hit
return cur, wark, ftime, fsize return cur, wark, ftime, fsize, ip, at
return cur, None, None, None return cur, None, None, None, None, None
def _forget_file(self, ptop, vrem, cur, wark): def _forget_file(self, ptop, vrem, cur, wark, drop_tags):
"""forgets file in db, fixes symlinks, does not delete""" """forgets file in db, fixes symlinks, does not delete"""
srd, sfn = vsplit(vrem) srd, sfn = vsplit(vrem)
self.log("forgetting {}".format(vrem)) self.log("forgetting {}".format(vrem))
if wark: if wark:
self.log("found {} in db".format(wark)) self.log("found {} in db".format(wark))
self._relink(wark, ptop, vrem, None) if drop_tags:
if self._relink(wark, ptop, vrem, None):
drop_tags = False
if drop_tags:
q = "delete from mt where w=?" q = "delete from mt where w=?"
cur.execute(q, (wark[:16],)) cur.execute(q, (wark[:16],))
self.db_rm(cur, srd, sfn) self.db_rm(cur, srd, sfn)
reg = self.registry.get(ptop) reg = self.registry.get(ptop)
@@ -1510,7 +1651,7 @@ class Up2k(object):
self.log("found {} dupe: [{}] {}".format(wark, ptop, dvrem)) self.log("found {} dupe: [{}] {}".format(wark, ptop, dvrem))
if not dupes: if not dupes:
return return 0
full = {} full = {}
links = {} links = {}
@@ -1526,7 +1667,7 @@ class Up2k(object):
# deleting final remaining full copy; swap it with a symlink # deleting final remaining full copy; swap it with a symlink
slabs = list(sorted(links.keys()))[0] slabs = list(sorted(links.keys()))[0]
ptop, rem = links.pop(slabs) ptop, rem = links.pop(slabs)
self.log("linkswap [{}] and [{}]".format(sabs, dabs)) self.log("linkswap [{}] and [{}]".format(sabs, slabs))
bos.unlink(slabs) bos.unlink(slabs)
bos.rename(sabs, slabs) bos.rename(sabs, slabs)
self._symlink(slabs, sabs, False) self._symlink(slabs, sabs, False)
@@ -1547,6 +1688,8 @@ class Up2k(object):
self._symlink(dabs, alink, False) self._symlink(dabs, alink, False)
return len(full) + len(links)
def _get_wark(self, cj): def _get_wark(self, cj):
if len(cj["name"]) > 1024 or len(cj["hash"]) > 512 * 1024: # 16TiB if len(cj["name"]) > 1024 or len(cj["hash"]) > 512 * 1024: # 16TiB
raise Pebkac(400, "name or numchunks not according to spec") raise Pebkac(400, "name or numchunks not according to spec")
@@ -1753,7 +1896,7 @@ class Up2k(object):
self.n_hashq -= 1 self.n_hashq -= 1
# self.log("hashq {}".format(self.n_hashq)) # self.log("hashq {}".format(self.n_hashq))
ptop, rd, fn = self.hashq.get() ptop, rd, fn, ip, at = self.hashq.get()
# self.log("hashq {} pop {}/{}/{}".format(self.n_hashq, ptop, rd, fn)) # self.log("hashq {} pop {}/{}/{}".format(self.n_hashq, ptop, rd, fn))
if "e2d" not in self.flags[ptop]: if "e2d" not in self.flags[ptop]:
continue continue
@@ -1764,12 +1907,12 @@ class Up2k(object):
hashes = self._hashlist_from_file(abspath) hashes = self._hashlist_from_file(abspath)
wark = up2k_wark_from_hashlist(self.salt, inf.st_size, hashes) wark = up2k_wark_from_hashlist(self.salt, inf.st_size, hashes)
with self.mutex: with self.mutex:
self.idx_wark(ptop, wark, rd, fn, inf.st_mtime, inf.st_size) self.idx_wark(ptop, wark, rd, fn, inf.st_mtime, inf.st_size, ip, at)
def hash_file(self, ptop, flags, rd, fn): def hash_file(self, ptop, flags, rd, fn, ip, at):
with self.mutex: with self.mutex:
self.register_vpath(ptop, flags) self.register_vpath(ptop, flags)
self.hashq.put([ptop, rd, fn]) self.hashq.put([ptop, rd, fn, ip, at])
self.n_hashq += 1 self.n_hashq += 1
# self.log("hashq {} push {}/{}/{}".format(self.n_hashq, ptop, rd, fn)) # self.log("hashq {} push {}/{}/{}".format(self.n_hashq, ptop, rd, fn))

View File

@@ -19,7 +19,7 @@ import subprocess as sp # nosec
from datetime import datetime from datetime import datetime
from collections import Counter from collections import Counter
from .__init__ import PY2, WINDOWS, ANYWIN from .__init__ import PY2, WINDOWS, ANYWIN, VT100, unicode
from .stolen import surrogateescape from .stolen import surrogateescape
FAKE_MP = False FAKE_MP = False
@@ -58,6 +58,9 @@ except:
return struct.unpack(f.decode("ascii"), *a, **ka) return struct.unpack(f.decode("ascii"), *a, **ka)
ansi_re = re.compile("\033\\[[^mK]*[mK]")
surrogateescape.register_surrogateescape() surrogateescape.register_surrogateescape()
FS_ENCODING = sys.getfilesystemencoding() FS_ENCODING = sys.getfilesystemencoding()
if WINDOWS and PY2: if WINDOWS and PY2:
@@ -77,6 +80,7 @@ HTTPCODE = {
403: "Forbidden", 403: "Forbidden",
404: "Not Found", 404: "Not Found",
405: "Method Not Allowed", 405: "Method Not Allowed",
411: "Length Required",
413: "Payload Too Large", 413: "Payload Too Large",
416: "Requested Range Not Satisfiable", 416: "Requested Range Not Satisfiable",
422: "Unprocessable Entity", 422: "Unprocessable Entity",
@@ -165,7 +169,7 @@ class Cooldown(object):
return ret return ret
class Unrecv(object): class _Unrecv(object):
""" """
undo any number of socket recv ops undo any number of socket recv ops
""" """
@@ -185,10 +189,68 @@ class Unrecv(object):
except: except:
return b"" return b""
def recv_ex(self, nbytes):
"""read an exact number of bytes"""
ret = self.recv(nbytes)
while ret and len(ret) < nbytes:
buf = self.recv(nbytes - len(ret))
if not buf:
break
ret += buf
return ret
def unrecv(self, buf): def unrecv(self, buf):
self.buf = buf + self.buf self.buf = buf + self.buf
class _LUnrecv(object):
"""
with expensive debug logging
"""
def __init__(self, s):
self.s = s
self.buf = b""
def recv(self, nbytes):
if self.buf:
ret = self.buf[:nbytes]
self.buf = self.buf[nbytes:]
m = "\033[0;7mur:pop:\033[0;1;32m {}\n\033[0;7mur:rem:\033[0;1;35m {}\033[0m\n"
print(m.format(ret, self.buf), end="")
return ret
try:
ret = self.s.recv(nbytes)
m = "\033[0;7mur:recv\033[0;1;33m {}\033[0m\n"
print(m.format(ret), end="")
return ret
except:
return b""
def recv_ex(self, nbytes):
"""read an exact number of bytes"""
ret = self.recv(nbytes)
while ret and len(ret) < nbytes:
buf = self.recv(nbytes - len(ret))
if not buf:
break
ret += buf
return ret
def unrecv(self, buf):
self.buf = buf + self.buf
m = "\033[0;7mur:push\033[0;1;31m {}\n\033[0;7mur:rem:\033[0;1;35m {}\033[0m\n"
print(m.format(buf, self.buf), end="")
Unrecv = _Unrecv
class ProgressPrinter(threading.Thread): class ProgressPrinter(threading.Thread):
""" """
periodically print progress info without linefeeds periodically print progress info without linefeeds
@@ -203,17 +265,22 @@ class ProgressPrinter(threading.Thread):
def run(self): def run(self):
msg = None msg = None
fmt = " {}\033[K\r" if VT100 else " {} $\r"
while not self.end: while not self.end:
time.sleep(0.1) time.sleep(0.1)
if msg == self.msg or self.end: if msg == self.msg or self.end:
continue continue
msg = self.msg msg = self.msg
uprint(" {}\033[K\r".format(msg)) uprint(fmt.format(msg))
if PY2: if PY2:
sys.stdout.flush() sys.stdout.flush()
if VT100:
print("\033[K", end="") print("\033[K", end="")
elif msg:
print("------------------------")
sys.stdout.flush() # necessary on win10 even w/ stderr btw sys.stdout.flush() # necessary on win10 even w/ stderr btw
@@ -340,6 +407,13 @@ def log_thrs(log, ival, name):
log(name, "\033[0m \033[33m".join(tv), 3) log(name, "\033[0m \033[33m".join(tv), 3)
def vol_san(vols, txt):
for vol in vols:
txt = txt.replace(vol.realpath.encode("utf-8"), vol.vpath.encode("utf-8"))
return txt
def min_ex(): def min_ex():
et, ev, tb = sys.exc_info() et, ev, tb = sys.exc_info()
tb = traceback.extract_tb(tb) tb = traceback.extract_tb(tb)
@@ -571,19 +645,21 @@ class MultipartParser(object):
yields [fieldname, unsanitized_filename, fieldvalue] yields [fieldname, unsanitized_filename, fieldvalue]
where fieldvalue yields chunks of data where fieldvalue yields chunks of data
""" """
while True: run = True
while run:
fieldname, filename = self._read_header() fieldname, filename = self._read_header()
yield [fieldname, filename, self._read_data()] yield [fieldname, filename, self._read_data()]
tail = self.sr.recv(2) tail = self.sr.recv_ex(2)
if tail == b"--": if tail == b"--":
# EOF indicated by this immediately after final boundary # EOF indicated by this immediately after final boundary
self.sr.recv(2) tail = self.sr.recv_ex(2)
return run = False
if tail != b"\r\n": if tail != b"\r\n":
raise Pebkac(400, "protocol error after field value") m = "protocol error after field value: want b'\\r\\n', got {!r}"
raise Pebkac(400, m.format(tail))
def _read_value(self, iterator, max_len): def _read_value(self, iterator, max_len):
ret = b"" ret = b""
@@ -669,6 +745,14 @@ def read_header(sr):
return ret[:ofs].decode("utf-8", "surrogateescape").lstrip("\r\n").split("\r\n") return ret[:ofs].decode("utf-8", "surrogateescape").lstrip("\r\n").split("\r\n")
def gen_filekey(salt, fspath, fsize, inode):
return base64.urlsafe_b64encode(
hashlib.sha512(
"{} {} {} {}".format(salt, fspath, fsize, inode).encode("utf-8", "replace")
).digest()
).decode("ascii")
def humansize(sz, terse=False): def humansize(sz, terse=False):
for unit in ["B", "KiB", "MiB", "GiB", "TiB"]: for unit in ["B", "KiB", "MiB", "GiB", "TiB"]:
if sz < 1024: if sz < 1024:
@@ -684,6 +768,17 @@ def humansize(sz, terse=False):
return ret.replace("iB", "").replace(" ", "") return ret.replace("iB", "").replace(" ", "")
def unhumanize(sz):
try:
return float(sz)
except:
pass
mul = sz[-1:].lower()
mul = {"k": 1024, "m": 1024 * 1024, "g": 1024 * 1024 * 1024}.get(mul, 1)
return float(sz[:-1]) * mul
def get_spd(nbyte, t0, t=None): def get_spd(nbyte, t0, t=None):
if t is None: if t is None:
t = time.time() t = time.time()
@@ -958,16 +1053,23 @@ def read_socket_chunked(sr, log=None):
raise Pebkac(400, err) raise Pebkac(400, err)
if chunklen == 0: if chunklen == 0:
sr.recv(2) # \r\n after final chunk x = sr.recv_ex(2)
if x == b"\r\n":
return return
m = "protocol error after final chunk: want b'\\r\\n', got {!r}"
raise Pebkac(400, m.format(x))
if log: if log:
log("receiving {} byte chunk".format(chunklen)) log("receiving {} byte chunk".format(chunklen))
for chunk in read_socket(sr, chunklen): for chunk in read_socket(sr, chunklen):
yield chunk yield chunk
sr.recv(2) # \r\n after each chunk too x = sr.recv_ex(2)
if x != b"\r\n":
m = "protocol error in chunk separator: want b'\\r\\n', got {!r}"
raise Pebkac(400, m.format(x))
def yieldfile(fn): def yieldfile(fn):
@@ -1062,17 +1164,21 @@ def statdir(logger, scandir, lstat, top):
logger(src, "{} @ {}".format(repr(ex), top), 1) logger(src, "{} @ {}".format(repr(ex), top), 1)
def rmdirs(logger, scandir, lstat, top): def rmdirs(logger, scandir, lstat, top, depth):
if not os.path.exists(fsenc(top)) or not os.path.isdir(fsenc(top)):
top = os.path.dirname(top)
dirs = statdir(logger, scandir, lstat, top) dirs = statdir(logger, scandir, lstat, top)
dirs = [x[0] for x in dirs if stat.S_ISDIR(x[1].st_mode)] dirs = [x[0] for x in dirs if stat.S_ISDIR(x[1].st_mode)]
dirs = [os.path.join(top, x) for x in dirs] dirs = [os.path.join(top, x) for x in dirs]
ok = [] ok = []
ng = [] ng = []
for d in dirs[::-1]: for d in dirs[::-1]:
a, b = rmdirs(logger, scandir, lstat, d) a, b = rmdirs(logger, scandir, lstat, d, depth + 1)
ok += a ok += a
ng += b ng += b
if depth:
try: try:
os.rmdir(fsenc(top)) os.rmdir(fsenc(top))
ok.append(top) ok.append(top)

View File

@@ -22,7 +22,7 @@ window.baguetteBox = (function () {
afterHide: null, afterHide: null,
onChange: null, onChange: null,
}, },
overlay, slider, btnPrev, btnNext, btnHelp, btnVmode, btnClose, overlay, slider, btnPrev, btnNext, btnHelp, btnRotL, btnRotR, btnSel, btnVmode, btnClose,
currentGallery = [], currentGallery = [],
currentIndex = 0, currentIndex = 0,
isOverlayVisible = false, isOverlayVisible = false,
@@ -49,7 +49,7 @@ window.baguetteBox = (function () {
}; };
var touchstartHandler = function (e) { var touchstartHandler = function (e) {
touch.count++; touch.count = e.touches.length;
if (touch.count > 1) if (touch.count > 1)
touch.multitouch = true; touch.multitouch = true;
@@ -72,8 +72,11 @@ window.baguetteBox = (function () {
hideOverlay(); hideOverlay();
} }
}; };
var touchendHandler = function () { var touchendHandler = function (e) {
touch.count--; touch.count--;
if (e && e.touches)
touch.count = e.touches.length;
if (touch.count <= 0) if (touch.count <= 0)
touch.multitouch = false; touch.multitouch = false;
@@ -175,6 +178,9 @@ window.baguetteBox = (function () {
'<button id="bbox-next" class="bbox-btn" type="button" aria-label="Next">&gt;</button>' + '<button id="bbox-next" class="bbox-btn" type="button" aria-label="Next">&gt;</button>' +
'<div id="bbox-btns">' + '<div id="bbox-btns">' +
'<button id="bbox-help" type="button">?</button>' + '<button id="bbox-help" type="button">?</button>' +
'<button id="bbox-rotl" type="button">↶</button>' +
'<button id="bbox-rotr" type="button">↷</button>' +
'<button id="bbox-tsel" type="button">sel</button>' +
'<button id="bbox-vmode" type="button" tt="a"></button>' + '<button id="bbox-vmode" type="button" tt="a"></button>' +
'<button id="bbox-close" type="button" aria-label="Close">X</button>' + '<button id="bbox-close" type="button" aria-label="Close">X</button>' +
'</div></div>' '</div></div>'
@@ -187,6 +193,9 @@ window.baguetteBox = (function () {
btnPrev = ebi('bbox-prev'); btnPrev = ebi('bbox-prev');
btnNext = ebi('bbox-next'); btnNext = ebi('bbox-next');
btnHelp = ebi('bbox-help'); btnHelp = ebi('bbox-help');
btnRotL = ebi('bbox-rotl');
btnRotR = ebi('bbox-rotr');
btnSel = ebi('bbox-tsel');
btnVmode = ebi('bbox-vmode'); btnVmode = ebi('bbox-vmode');
btnClose = ebi('bbox-close'); btnClose = ebi('bbox-close');
bindEvents(); bindEvents();
@@ -203,11 +212,13 @@ window.baguetteBox = (function () {
['right, L', 'next file'], ['right, L', 'next file'],
['home', 'first file'], ['home', 'first file'],
['end', 'last file'], ['end', 'last file'],
['R', 'rotate (shift=ccw)'],
['S', 'toggle file selection'],
['space, P, K', 'video: play / pause'], ['space, P, K', 'video: play / pause'],
['U', 'video: seek 10sec back'], ['U', 'video: seek 10sec back'],
['P', 'video: seek 10sec ahead'], ['P', 'video: seek 10sec ahead'],
['M', 'video: toggle mute'], ['M', 'video: toggle mute'],
['R', 'video: toggle loop'], ['V', 'video: toggle loop'],
['C', 'video: toggle auto-next'], ['C', 'video: toggle auto-next'],
['F', 'video: toggle fullscreen'], ['F', 'video: toggle fullscreen'],
], ],
@@ -249,7 +260,7 @@ window.baguetteBox = (function () {
v.muted = vmute = !vmute; v.muted = vmute = !vmute;
mp_ctl(); mp_ctl();
} }
else if (k == "KeyR" && v) { else if (k == "KeyV" && v) {
vloop = !vloop; vloop = !vloop;
vnext = vnext && !vloop; vnext = vnext && !vloop;
setVmode(); setVmode();
@@ -267,6 +278,10 @@ window.baguetteBox = (function () {
v.requestFullscreen(); v.requestFullscreen();
} }
catch (ex) { } catch (ex) { }
else if (k == "KeyS")
tglsel();
else if (k == "KeyR")
rotn(e.shiftKey ? -1 : 1);
} }
function setVmode() { function setVmode() {
@@ -279,7 +294,7 @@ window.baguetteBox = (function () {
if (vloop) { if (vloop) {
lbl = 'Loop'; lbl = 'Loop';
msg += 'repeat it'; msg += 'repeat it';
tts = '$NHotkey: R'; tts = '$NHotkey: V';
} }
else if (vnext) { else if (vnext) {
lbl = 'Cont'; lbl = 'Cont';
@@ -314,6 +329,40 @@ window.baguetteBox = (function () {
tt.show.bind(this)(); tt.show.bind(this)();
} }
function tglsel() {
var thumb = currentGallery[currentIndex].imageElement,
name = vsplit(thumb.href)[1],
files = msel.getall();
for (var a = 0; a < files.length; a++)
if (vsplit(files[a].vp)[1] == name)
clmod(ebi(files[a].id).closest('tr'), 'sel', 't');
msel.selui();
selbg();
}
function selbg() {
var img = vidimg(),
thumb = currentGallery[currentIndex].imageElement,
name = vsplit(thumb.href)[1],
files = msel.getsel(),
sel = false;
for (var a = 0; a < files.length; a++)
if (vsplit(files[a].vp)[1] == name)
sel = true;
ebi('bbox-overlay').style.background = sel ?
'rgba(153,34,85,0.7)' : '';
img.style.borderRadius = sel ? '1em' : '';
btnSel.style.color = sel ? '#fff' : '';
btnSel.style.background = sel ? '#d48' : '';
btnSel.style.textShadow = sel ? '1px 1px 0 #b38' : '';
btnSel.style.boxShadow = sel ? '.15em .15em 0 #502' : '';
}
function keyUpHandler(e) { function keyUpHandler(e) {
if (e.ctrlKey || e.altKey || e.metaKey || e.isComposing) if (e.ctrlKey || e.altKey || e.metaKey || e.isComposing)
return; return;
@@ -348,6 +397,9 @@ window.baguetteBox = (function () {
bind(btnClose, 'click', hideOverlay); bind(btnClose, 'click', hideOverlay);
bind(btnVmode, 'click', tglVmode); bind(btnVmode, 'click', tglVmode);
bind(btnHelp, 'click', halp); bind(btnHelp, 'click', halp);
bind(btnRotL, 'click', rotl);
bind(btnRotR, 'click', rotr);
bind(btnSel, 'click', tglsel);
bind(slider, 'contextmenu', contextmenuHandler); bind(slider, 'contextmenu', contextmenuHandler);
bind(overlay, 'touchstart', touchstartHandler, nonPassiveEvent); bind(overlay, 'touchstart', touchstartHandler, nonPassiveEvent);
bind(overlay, 'touchmove', touchmoveHandler, passiveEvent); bind(overlay, 'touchmove', touchmoveHandler, passiveEvent);
@@ -362,11 +414,15 @@ window.baguetteBox = (function () {
unbind(btnClose, 'click', hideOverlay); unbind(btnClose, 'click', hideOverlay);
unbind(btnVmode, 'click', tglVmode); unbind(btnVmode, 'click', tglVmode);
unbind(btnHelp, 'click', halp); unbind(btnHelp, 'click', halp);
unbind(btnRotL, 'click', rotl);
unbind(btnRotR, 'click', rotr);
unbind(btnSel, 'click', tglsel);
unbind(slider, 'contextmenu', contextmenuHandler); unbind(slider, 'contextmenu', contextmenuHandler);
unbind(overlay, 'touchstart', touchstartHandler, nonPassiveEvent); unbind(overlay, 'touchstart', touchstartHandler, nonPassiveEvent);
unbind(overlay, 'touchmove', touchmoveHandler, passiveEvent); unbind(overlay, 'touchmove', touchmoveHandler, passiveEvent);
unbind(overlay, 'touchend', touchendHandler); unbind(overlay, 'touchend', touchendHandler);
unbind(document, 'focus', trapFocusInsideOverlay, true); unbind(document, 'focus', trapFocusInsideOverlay, true);
timer.rm(rotn);
} }
function prepareOverlay(gallery, userOptions) { function prepareOverlay(gallery, userOptions) {
@@ -617,10 +673,91 @@ window.baguetteBox = (function () {
return true; return true;
} }
var prev_cw = 0, prev_ch = 0, unrot_timer = null;
function rotn(n) {
var el = vidimg(),
orot = parseInt(el.getAttribute('rot') || 0),
frot = orot + (n || 0) * 90;
if (!frot && !orot)
return; // reflow noop
var co = ebi('bbox-overlay'),
cw = co.clientWidth,
ch = co.clientHeight;
if (!n && prev_cw === cw && prev_ch === ch)
return; // reflow noop
prev_cw = cw;
prev_ch = ch;
var rot = frot,
iw = el.naturalWidth || el.videoWidth,
ih = el.naturalHeight || el.videoHeight,
magic = 4, // idk, works in enough browsers
dl = el.closest('div').querySelector('figcaption a'),
vw = cw,
vh = ch - dl.offsetHeight + magic,
pmag = Math.min(1, Math.min(vw / ih, vh / iw)),
wmag = Math.min(1, Math.min(vw / iw, vh / ih));
while (rot < 0) rot += 360;
while (rot >= 360) rot -= 360;
var q = rot == 90 || rot == 270 ? 1 : 0,
mag = q ? pmag : wmag;
el.style.cssText = 'max-width:none; max-height:none; position:absolute; display:block; margin:0';
if (!orot) {
el.style.width = iw * wmag + 'px';
el.style.height = ih * wmag + 'px';
el.style.left = (vw - iw * wmag) / 2 + 'px';
el.style.top = (vh - ih * wmag) / 2 - magic + 'px';
q = el.offsetHeight;
}
el.style.width = iw * mag + 'px';
el.style.height = ih * mag + 'px';
el.style.left = (vw - iw * mag) / 2 + 'px';
el.style.top = (vh - ih * mag) / 2 - magic + 'px';
el.style.transform = 'rotate(' + frot + 'deg)';
el.setAttribute('rot', frot);
timer.add(rotn);
if (!rot) {
clearTimeout(unrot_timer);
unrot_timer = setTimeout(unrot, 300);
}
}
function rotl() {
rotn(-1);
}
function rotr() {
rotn(1);
}
function unrot() {
var el = vidimg(),
orot = el.getAttribute('rot'),
rot = parseInt(orot || 0);
while (rot < 0) rot += 360;
while (rot >= 360) rot -= 360;
if (rot || orot === null)
return;
clmod(el, 'nt', 1);
el.removeAttribute('rot');
el.removeAttribute("style");
rot = el.offsetHeight;
clmod(el, 'nt');
timer.rm(rotn);
}
function vid() { function vid() {
return imagesElements[currentIndex].querySelector('video'); return imagesElements[currentIndex].querySelector('video');
} }
function vidimg() {
return imagesElements[currentIndex].querySelector('img, video');
}
function playvid(play) { function playvid(play) {
if (vid()) if (vid())
vid()[play ? 'play' : 'pause'](); vid()[play ? 'play' : 'pause']();
@@ -662,15 +799,21 @@ window.baguetteBox = (function () {
} }
function updateOffset() { function updateOffset() {
var offset = -currentIndex * 100 + '%'; var offset = -currentIndex * 100 + '%',
xform = slider.style.perspective !== undefined;
if (options.animation === 'fadeIn') { if (options.animation === 'fadeIn') {
slider.style.opacity = 0; slider.style.opacity = 0;
setTimeout(function () { setTimeout(function () {
slider.style.transform = 'translate3d(' + offset + ',0,0)'; xform ?
slider.style.transform = 'translate3d(' + offset + ',0,0)' :
slider.style.left = offset;
slider.style.opacity = 1; slider.style.opacity = 1;
}, 400); }, 400);
} else { } else {
slider.style.transform = 'translate3d(' + offset + ',0,0)'; xform ?
slider.style.transform = 'translate3d(' + offset + ',0,0)' :
slider.style.left = offset;
} }
playvid(false); playvid(false);
var v = vid(); var v = vid();
@@ -679,8 +822,21 @@ window.baguetteBox = (function () {
v.muted = vmute; v.muted = vmute;
v.loop = vloop; v.loop = vloop;
} }
selbg();
mp_ctl(); mp_ctl();
setVmode(); setVmode();
var el = vidimg();
if (el.getAttribute('rot'))
timer.add(rotn);
else
timer.rm(rotn);
var prev = QS('.full-image.vis');
if (prev)
clmod(prev, 'vis');
clmod(el.closest('div'), 'vis', 1);
} }
function preloadNext(index) { function preloadNext(index) {

File diff suppressed because it is too large Load Diff

View File

@@ -6,10 +6,10 @@
<title>⇆🎉 {{ title }}</title> <title>⇆🎉 {{ title }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.8"> <meta name="viewport" content="width=device-width, initial-scale=0.8">
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/browser.css?_={{ ts }}"> <link rel="stylesheet" media="screen" href="/.cpr/ui.css?_={{ ts }}">
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/upload.css?_={{ ts }}"> <link rel="stylesheet" media="screen" href="/.cpr/browser.css?_={{ ts }}">
{%- if css %} {%- if css %}
<link rel="stylesheet" type="text/css" media="screen" href="{{ css }}?_={{ ts }}"> <link rel="stylesheet" media="screen" href="{{ css }}?_={{ ts }}">
{%- endif %} {%- endif %}
</head> </head>
@@ -59,6 +59,8 @@
</form> </form>
</div> </div>
<div id="op_unpost" class="opview opbox"></div>
<div id="op_up2k" class="opview"></div> <div id="op_up2k" class="opview"></div>
<div id="op_cfg" class="opview opbox opwide"></div> <div id="op_cfg" class="opview opbox opwide"></div>
@@ -112,6 +114,8 @@
<h2><a href="/?h">control-panel</a></h2> <h2><a href="/?h">control-panel</a></h2>
<a href="#" id="repl">π</a>
</div> </div>
{%- if srv_info %} {%- if srv_info %}
@@ -123,12 +127,14 @@
<script> <script>
var acct = "{{ acct }}", var acct = "{{ acct }}",
perms = {{ perms }}, perms = {{ perms }},
tag_order_cfg = {{ tag_order }}, def_hcols = {{ def_hcols|tojson }},
have_up2k_idx = {{ have_up2k_idx|tojson }}, have_up2k_idx = {{ have_up2k_idx|tojson }},
have_tags_idx = {{ have_tags_idx|tojson }}, have_tags_idx = {{ have_tags_idx|tojson }},
have_mv = {{ have_mv|tojson }}, have_mv = {{ have_mv|tojson }},
have_del = {{ have_del|tojson }}, have_del = {{ have_del|tojson }},
have_zip = {{ have_zip|tojson }}; have_unpost = {{ have_unpost|tojson }},
have_zip = {{ have_zip|tojson }},
readme = {{ readme|tojson }};
</script> </script>
<script src="/.cpr/util.js?_={{ ts }}"></script> <script src="/.cpr/util.js?_={{ ts }}"></script>
<script src="/.cpr/browser.js?_={{ ts }}"></script> <script src="/.cpr/browser.js?_={{ ts }}"></script>

File diff suppressed because it is too large Load Diff

View File

@@ -1,144 +1,17 @@
@font-face {
font-family: 'scp';
src: local('Source Code Pro Regular'), local('SourceCodePro-Regular'), url(/.cpr/deps/scp.woff2) format('woff2');
}
html, body { html, body {
color: #333; color: #333;
background: #eee; background: #eee;
font-family: sans-serif; font-family: sans-serif;
line-height: 1.5em; line-height: 1.5em;
} }
#repl {
#tt, #toast {
position: fixed;
max-width: 34em;
background: #222;
border: 0 solid #777;
box-shadow: 0 .2em .5em #222;
border-radius: .4em;
z-index: 9001;
}
#tt {
overflow: hidden;
margin-top: 1em;
padding: 0 1.3em;
height: 0;
opacity: .1;
transition: opacity 0.14s, height 0.14s, padding 0.14s;
}
#toast {
top: 1.4em;
right: -1em;
line-height: 1.5em;
padding: 1em 1.3em;
border-width: .4em 0;
transform: translateX(100%);
transition:
transform .4s cubic-bezier(.2, 1.2, .5, 1),
right .4s cubic-bezier(.2, 1.2, .5, 1);
text-shadow: 1px 1px 0 #000;
color: #fff;
}
#toastc {
display: inline-block;
position: absolute; position: absolute;
overflow: hidden; top: 0;
left: 0; right: .5em;
width: 0;
opacity: 0;
padding: .3em 0;
margin: -.3em 0 0 0;
line-height: 1.5em;
color: #000;
border: none; border: none;
outline: none; color: inherit;
text-shadow: none; background: none;
border-radius: .5em 0 0 .5em;
transition: left .3s, width .3s, padding .3s, opacity .3s;
} }
#toast.vis {
right: 1.3em;
transform: unset;
}
#toast.vis #toastc {
left: -2em;
width: .4em;
padding: .3em .8em;
opacity: 1;
}
#toast.inf {
background: #07a;
border-color: #0be;
}
#toast.inf #toastc {
background: #0be;
}
#toast.ok {
background: #4a0;
border-color: #8e4;
}
#toast.ok #toastc {
background: #8e4;
}
#toast.warn {
background: #970;
border-color: #fc0;
}
#toast.warn #toastc {
background: #fc0;
}
#toast.err {
background: #900;
border-color: #d06;
}
#toast.err #toastc {
background: #d06;
}
#tt.b {
padding: 0 2em;
border-radius: .5em;
box-shadow: 0 .2em 1em #000;
}
#tt.show {
padding: 1em 1.3em;
border-width: .4em 0;
height: auto;
opacity: 1;
}
#tt.show.b {
padding: 1.5em 2em;
border-width: .5em 0;
}
#tt code {
background: #3c3c3c;
padding: .1em .3em;
border-top: 1px solid #777;
border-radius: .3em;
line-height: 1.7em;
}
#tt em {
color: #f6a;
}
html.light #tt {
background: #fff;
border-color: #888 #000 #777 #000;
}
html.light #tt,
html.light #toast {
box-shadow: 0 .3em 1em rgba(0,0,0,0.4);
}
html.light #tt code {
background: #060;
color: #fff;
}
html.light #tt em {
color: #d38;
}
#mtw { #mtw {
display: none; display: none;
} }
@@ -146,122 +19,12 @@ html.light #tt em {
margin: 0 auto; margin: 0 auto;
padding: 0 1.5em; padding: 0 1.5em;
} }
pre, code, a { #toast {
color: #480; bottom: auto;
background: #f7f7f7; top: 1.4em;
border: .07em solid #ddd;
border-radius: .2em;
padding: .1em .3em;
margin: 0 .1em;
} }
code { a {
font-size: .96em; text-decoration: none;
}
pre, code, tt {
font-family: 'scp', monospace, monospace;
white-space: pre-wrap;
word-break: break-all;
}
pre {
counter-reset: precode;
}
pre code {
counter-increment: precode;
display: inline-block;
margin: 0 -.3em;
padding: .4em .5em;
border: none;
border-bottom: 1px solid #cdc;
min-width: calc(100% - .6em);
line-height: 1.1em;
}
pre code:last-child {
border-bottom: none;
}
pre code::before {
content: counter(precode);
-webkit-user-select: none;
-moz-user-select: none;
-ms-user-select: none;
user-select: none;
display: inline-block;
text-align: right;
font-size: .75em;
color: #48a;
width: 4em;
padding-right: 1.5em;
margin-left: -5.5em;
}
pre code:hover {
background: #fec;
color: #360;
}
h1, h2 {
line-height: 1.5em;
}
h1 {
font-size: 1.7em;
text-align: center;
border: 1em solid #777;
border-width: .05em 0;
margin: 3em 0;
}
h2 {
font-size: 1.5em;
font-weight: normal;
background: #f7f7f7;
border-top: .07em solid #fff;
border-bottom: .07em solid #bbb;
border-radius: .5em .5em 0 0;
padding-left: .4em;
margin-top: 3em;
}
h3 {
border-bottom: .1em solid #999;
}
h1 a, h3 a, h5 a,
h2 a, h4 a, h6 a {
color: inherit;
display: block;
background: none;
border: none;
padding: 0;
margin: 0;
}
#mp ul,
#mp ol {
border-left: .3em solid #ddd;
}
#m>ul,
#m>ol {
border-color: #bbb;
}
#mp ul>li {
list-style-type: disc;
}
#mp ul>li,
#mp ol>li {
margin: .7em 0;
}
strong {
color: #000;
}
p>em,
li>em,
td>em {
color: #c50;
padding: .1em;
border-bottom: .1em solid #bbb;
}
blockquote {
font-family: serif;
background: #f7f7f7;
border: .07em dashed #ccc;
padding: 0 2em;
margin: 1em 0;
}
small {
opacity: .8;
} }
#toc { #toc {
margin: 0 1em; margin: 0 1em;
@@ -309,14 +72,6 @@ small {
color: #6b3; color: #6b3;
text-shadow: .02em 0 0 #6b3; text-shadow: .02em 0 0 #6b3;
} }
table {
border-collapse: collapse;
margin: 1em 0;
}
th, td {
padding: .2em .5em;
border: .12em solid #aaa;
}
blink { blink {
animation: blinker .7s cubic-bezier(.9, 0, .1, 1) infinite; animation: blinker .7s cubic-bezier(.9, 0, .1, 1) infinite;
} }
@@ -329,6 +84,36 @@ blink {
} }
} }
.mdo pre {
counter-reset: precode;
}
.mdo pre code {
counter-increment: precode;
display: inline-block;
border: none;
border-bottom: 1px solid #cdc;
min-width: calc(100% - .6em);
}
.mdo pre code:last-child {
border-bottom: none;
}
.mdo pre code::before {
content: counter(precode);
-webkit-user-select: none;
-moz-user-select: none;
-ms-user-select: none;
user-select: none;
display: inline-block;
text-align: right;
font-size: .75em;
color: #48a;
width: 4em;
padding-right: 1.5em;
margin-left: -5.5em;
}
@media screen { @media screen {
html, body { html, body {
margin: 0; margin: 0;
@@ -345,34 +130,6 @@ blink {
#mp { #mp {
max-width: 52em; max-width: 52em;
margin-bottom: 6em; margin-bottom: 6em;
word-break: break-word;
overflow-wrap: break-word;
word-wrap: break-word; /*ie*/
}
a {
color: #fff;
background: #39b;
text-decoration: none;
padding: 0 .3em;
border: none;
border-bottom: .07em solid #079;
}
h2 {
color: #fff;
background: #555;
margin-top: 2em;
border-bottom: .22em solid #999;
border-top: none;
}
h1 {
color: #fff;
background: #444;
font-weight: normal;
border-top: .4em solid #fb0;
border-bottom: .4em solid #777;
border-radius: 0 1em 0 1em;
margin: 3em 0 1em 0;
padding: .5em 0;
} }
#mn { #mn {
padding: 1.3em 0 .7em 1em; padding: 1.3em 0 .7em 1em;
@@ -425,6 +182,8 @@ blink {
color: #444; color: #444;
background: none; background: none;
text-decoration: underline; text-decoration: underline;
margin: 0 .1em;
padding: 0 .3em;
border: none; border: none;
} }
#mh a:hover { #mh a:hover {
@@ -453,6 +212,10 @@ blink {
#toolsbox a+a { #toolsbox a+a {
text-decoration: none; text-decoration: none;
} }
#lno {
position: absolute;
right: 0;
}
@@ -473,55 +236,6 @@ blink {
html.dark #toc li { html.dark #toc li {
border-width: 0; border-width: 0;
} }
html.dark #mp a {
background: #057;
}
html.dark #mp h1 a, html.dark #mp h4 a,
html.dark #mp h2 a, html.dark #mp h5 a,
html.dark #mp h3 a, html.dark #mp h6 a {
color: inherit;
background: none;
}
html.dark pre,
html.dark code {
color: #8c0;
background: #1a1a1a;
border: .07em solid #333;
}
html.dark #mp ul,
html.dark #mp ol {
border-color: #444;
}
html.dark #m>ul,
html.dark #m>ol {
border-color: #555;
}
html.dark strong {
color: #fff;
}
html.dark p>em,
html.dark li>em,
html.dark td>em {
color: #f94;
border-color: #666;
}
html.dark h1 {
background: #383838;
border-top: .4em solid #b80;
border-bottom: .4em solid #4c4c4c;
}
html.dark h2 {
background: #444;
border-bottom: .22em solid #555;
}
html.dark td,
html.dark th {
border-color: #444;
}
html.dark blockquote {
background: #282828;
border: .07em dashed #444;
}
html.dark #mn a:not(:last-child)::after { html.dark #mn a:not(:last-child)::after {
border-color: rgba(255,255,255,0.3); border-color: rgba(255,255,255,0.3);
} }
@@ -627,12 +341,15 @@ blink {
mso-footer-margin: .6in; mso-footer-margin: .6in;
mso-paper-source: 0; mso-paper-source: 0;
} }
a { .mdo a {
color: #079; color: #079;
text-decoration: none; text-decoration: none;
border-bottom: .07em solid #4ac; border-bottom: .07em solid #4ac;
padding: 0 .3em; padding: 0 .3em;
} }
#repl {
display: none;
}
#toc>ul { #toc>ul {
border-left: .1em solid #84c4dd; border-left: .1em solid #84c4dd;
} }
@@ -657,18 +374,20 @@ blink {
a[ctr]::before { a[ctr]::before {
content: attr(ctr) '. '; content: attr(ctr) '. ';
} }
h1 { .mdo h1 {
margin: 2em 0; margin: 2em 0;
} }
h2 { .mdo h2 {
margin: 2em 0 0 0; margin: 2em 0 0 0;
} }
h1, h2, h3 { .mdo h1,
.mdo h2,
.mdo h3 {
page-break-inside: avoid; page-break-inside: avoid;
} }
h1::after, .mdo h1::after,
h2::after, .mdo h2::after,
h3::after { .mdo h3::after {
content: 'orz'; content: 'orz';
color: transparent; color: transparent;
display: block; display: block;
@@ -676,20 +395,20 @@ blink {
padding: 4em 0 0 0; padding: 4em 0 0 0;
margin: 0 0 -5em 0; margin: 0 0 -5em 0;
} }
p { .mdo p {
page-break-inside: avoid; page-break-inside: avoid;
} }
table { .mdo table {
page-break-inside: auto; page-break-inside: auto;
} }
tr { .mdo tr {
page-break-inside: avoid; page-break-inside: avoid;
page-break-after: auto; page-break-after: auto;
} }
thead { .mdo thead {
display: table-header-group; display: table-header-group;
} }
tfoot { .mdo tfoot {
display: table-footer-group; display: table-footer-group;
} }
#mp a.vis::after { #mp a.vis::after {
@@ -697,31 +416,32 @@ blink {
border-bottom: 1px solid #bbb; border-bottom: 1px solid #bbb;
color: #444; color: #444;
} }
blockquote { .mdo blockquote {
border-color: #555; border-color: #555;
} }
code { .mdo code {
border-color: #bbb; border-color: #bbb;
} }
pre, pre code { .mdo pre,
.mdo pre code {
border-color: #999; border-color: #999;
} }
pre code::before { .mdo pre code::before {
color: #058; color: #058;
} }
html.dark a { html.dark .mdo a {
color: #000; color: #000;
} }
html.dark pre, html.dark .mdo pre,
html.dark code { html.dark .mdo code {
color: #240; color: #240;
} }
html.dark p>em, html.dark .mdo p>em,
html.dark li>em, html.dark .mdo li>em,
html.dark td>em { html.dark .mdo td>em {
color: #940; color: #940;
} }
} }

View File

@@ -1,11 +1,12 @@
<!DOCTYPE html><html><head> <!DOCTYPE html><html><head>
<meta charset="utf-8"> <meta charset="utf-8">
<title>📝🎉 {{ title }}</title> <!-- 📜 --> <title>📝🎉 {{ title }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.7"> <meta name="viewport" content="width=device-width, initial-scale=0.7">
<link href="/.cpr/md.css?_={{ ts }}" rel="stylesheet"> <link rel="stylesheet" href="/.cpr/ui.css?_={{ ts }}">
<link rel="stylesheet" href="/.cpr/md.css?_={{ ts }}">
{%- if edit %} {%- if edit %}
<link href="/.cpr/md2.css?_={{ ts }}" rel="stylesheet"> <link rel="stylesheet" href="/.cpr/md2.css?_={{ ts }}">
{%- endif %} {%- endif %}
</head> </head>
<body> <body>
@@ -25,6 +26,7 @@
<a id="cfg_uni" href="#">non-ascii: whitelist</a> <a id="cfg_uni" href="#">non-ascii: whitelist</a>
<a id="help" href="#">help</a> <a id="help" href="#">help</a>
</div> </div>
<span id="lno">L#</span>
{%- else %} {%- else %}
<a href="?edit" tt="good: higher performance$Ngood: same document width as viewer$Nbad: assumes you know markdown">edit (basic)</a> <a href="?edit" tt="good: higher performance$Ngood: same document width as viewer$Nbad: assumes you know markdown">edit (basic)</a>
<a href="?edit2" tt="not in-house so probably less buggy">edit (fancy)</a> <a href="?edit2" tt="not in-house so probably less buggy">edit (fancy)</a>
@@ -42,8 +44,9 @@
if you're still reading this, check that javascript is allowed if you're still reading this, check that javascript is allowed
</div> </div>
</div> </div>
<div id="mp"></div> <div id="mp" class="mdo"></div>
</div> </div>
<a href="#" id="repl">π</a>
{%- if edit %} {%- if edit %}
<div id="helpbox"> <div id="helpbox">

View File

@@ -24,23 +24,6 @@ var dbg = function () { };
var md_plug = {}; var md_plug = {};
function hesc(txt) {
return txt.replace(/&/g, "&amp;").replace(/</g, "&lt;").replace(/>/g, "&gt;");
}
function cls(dom, name, add) {
var re = new RegExp('(^| )' + name + '( |$)');
var lst = (dom.getAttribute('class') + '').replace(re, "$1$2").replace(/ /, "");
dom.setAttribute('class', lst + (add ? ' ' + name : ''));
}
function statify(obj) {
return JSON.parse(JSON.stringify(obj));
}
// dodge browser issues // dodge browser issues
(function () { (function () {
var ua = navigator.userAgent; var ua = navigator.userAgent;
@@ -65,7 +48,7 @@ function statify(obj) {
if (a > 0) if (a > 0)
loc.push(n[a]); loc.push(n[a]);
var dec = hesc(uricom_dec(n[a])[0]); var dec = esc(uricom_dec(n[a])[0]);
nav.push('<a href="/' + loc.join('/') + '">' + dec + '</a>'); nav.push('<a href="/' + loc.join('/') + '">' + dec + '</a>');
} }
@@ -73,6 +56,26 @@ function statify(obj) {
})(); })();
// image load handler
var img_load = (function () {
var r = {};
r.callbacks = [];
function fire() {
for (var a = 0; a < r.callbacks.length; a++)
r.callbacks[a]();
}
var timeout = null;
r.done = function () {
clearTimeout(timeout);
timeout = setTimeout(fire, 500);
};
return r;
})();
// faster than replacing the entire html (chrome 1.8x, firefox 1.6x) // faster than replacing the entire html (chrome 1.8x, firefox 1.6x)
function copydom(src, dst, lv) { function copydom(src, dst, lv) {
var sc = src.childNodes, var sc = src.childNodes,
@@ -185,7 +188,7 @@ function md_plug_err(ex, js) {
errbox.style.cssText = 'position:absolute;top:0;left:0;padding:1em .5em;background:#2b2b2b;color:#fc5' errbox.style.cssText = 'position:absolute;top:0;left:0;padding:1em .5em;background:#2b2b2b;color:#fc5'
errbox.textContent = msg; errbox.textContent = msg;
errbox.onclick = function () { errbox.onclick = function () {
alert('' + ex.stack); modal.alert('<pre>' + esc(ex.stack) + '</pre>');
}; };
if (o) { if (o) {
errbox.appendChild(o); errbox.appendChild(o);
@@ -264,7 +267,14 @@ function convert_markdown(md_text, dest_dom) {
throw ex; throw ex;
} }
var md_dom = new DOMParser().parseFromString(md_html, "text/html").body; var md_dom = dest_dom;
try {
md_dom = new DOMParser().parseFromString(md_html, "text/html").body;
}
catch (ex) {
md_dom.innerHTML = md_html;
window.copydom = noop;
}
var nodes = md_dom.getElementsByTagName('a'); var nodes = md_dom.getElementsByTagName('a');
for (var a = nodes.length - 1; a >= 0; a--) { for (var a = nodes.length - 1; a >= 0; a--) {
@@ -356,6 +366,10 @@ function convert_markdown(md_text, dest_dom) {
copydom(md_dom, dest_dom, 0); copydom(md_dom, dest_dom, 0);
var imgs = dest_dom.getElementsByTagName('img');
for (var a = 0, aa = imgs.length; a < aa; a++)
imgs[a].onload = img_load.done;
if (ext && ext[0].render2) if (ext && ext[0].render2)
try { try {
ext[0].render2(dest_dom); ext[0].render2(dest_dom);
@@ -490,13 +504,16 @@ function init_toc() {
// "main" :p // "main" :p
convert_markdown(dom_src.value, dom_pre); convert_markdown(dom_src.value, dom_pre);
var toc = init_toc(); var toc = init_toc();
img_load.callbacks = [toc.refresh];
// scroll handler // scroll handler
var redraw = (function () { var redraw = (function () {
var sbs = false; var sbs = true;
function onresize() { function onresize() {
if (window.matchMedia)
sbs = window.matchMedia('(min-width: 64em)').matches; sbs = window.matchMedia('(min-width: 64em)').matches;
var y = (dom_hbar.offsetTop + dom_hbar.offsetHeight) + 'px'; var y = (dom_hbar.offsetTop + dom_hbar.offsetHeight) + 'px';
if (sbs) { if (sbs) {
dom_toc.style.top = y; dom_toc.style.top = y;

View File

@@ -50,7 +50,7 @@
outline: none; outline: none;
padding: 0; padding: 0;
margin: 0; margin: 0;
font-family: 'consolas', monospace, monospace; font-family: 'scp', monospace, monospace;
white-space: pre-wrap; white-space: pre-wrap;
word-break: break-word; word-break: break-word;
overflow-wrap: break-word; overflow-wrap: break-word;

View File

@@ -98,7 +98,7 @@ var draw_md = (function () {
var src = dom_src.value; var src = dom_src.value;
convert_markdown(src, dom_pre); convert_markdown(src, dom_pre);
var lines = hesc(src).replace(/\r/g, "").split('\n'); var lines = esc(src).replace(/\r/g, "").split('\n');
nlines = lines.length; nlines = lines.length;
var html = []; var html = [];
for (var a = 0; a < lines.length; a++) for (var a = 0; a < lines.length; a++)
@@ -108,7 +108,7 @@ var draw_md = (function () {
map_src = genmap(dom_ref, map_src); map_src = genmap(dom_ref, map_src);
map_pre = genmap(dom_pre, map_pre); map_pre = genmap(dom_pre, map_pre);
cls(ebi('save'), 'disabled', src == server_md); clmod(ebi('save'), 'disabled', src == server_md);
var t1 = Date.now(); var t1 = Date.now();
delay = t1 - t0 > 100 ? 25 : 1; delay = t1 - t0 > 100 ? 25 : 1;
@@ -127,6 +127,12 @@ var draw_md = (function () {
})(); })();
// discard TOC callback, just regen editor scroll map
img_load.callbacks = [function () {
map_pre = genmap(dom_pre, map_pre);
}];
// resize handler // resize handler
redraw = (function () { redraw = (function () {
function onresize() { function onresize() {
@@ -136,7 +142,6 @@ redraw = (function () {
dom_ref.style.width = getComputedStyle(dom_src).offsetWidth + 'px'; dom_ref.style.width = getComputedStyle(dom_src).offsetWidth + 'px';
map_src = genmap(dom_ref, map_src); map_src = genmap(dom_ref, map_src);
map_pre = genmap(dom_pre, map_pre); map_pre = genmap(dom_pre, map_pre);
dbg(document.body.clientWidth + 'x' + document.body.clientHeight);
} }
function setsbs() { function setsbs() {
dom_wrap.setAttribute('class', ''); dom_wrap.setAttribute('class', '');
@@ -225,44 +230,40 @@ redraw = (function () {
// modification checker // modification checker
function Modpoll() { function Modpoll() {
this.skip_one = true; var r = {
this.disabled = false; skip_one: true,
disabled: false
this.periodic = function () { };
var that = this;
setTimeout(function () {
that.periodic();
}, 1000 * md_opt.modpoll_freq);
r.periodic = function () {
var skip = null; var skip = null;
if (toast.visible) if (toast.visible)
skip = 'toast'; skip = 'toast';
else if (this.skip_one) else if (r.skip_one)
skip = 'saved'; skip = 'saved';
else if (this.disabled) else if (r.disabled)
skip = 'disabled'; skip = 'disabled';
if (skip) { if (skip) {
console.log('modpoll skip, ' + skip); console.log('modpoll skip, ' + skip);
this.skip_one = false; r.skip_one = false;
return; return;
} }
console.log('modpoll...'); console.log('modpoll...');
var url = (document.location + '').split('?')[0] + '?raw&_=' + Date.now(); var url = (document.location + '').split('?')[0] + '?raw&_=' + Date.now();
var xhr = new XMLHttpRequest(); var xhr = new XMLHttpRequest();
xhr.modpoll = this;
xhr.open('GET', url, true); xhr.open('GET', url, true);
xhr.responseType = 'text'; xhr.responseType = 'text';
xhr.onreadystatechange = this.cb; xhr.onreadystatechange = r.cb;
xhr.send(); xhr.send();
} };
this.cb = function () { r.cb = function () {
if (this.modpoll.disabled || this.modpoll.skip_one) { if (r.disabled || r.skip_one) {
console.log('modpoll abort'); console.log('modpoll abort');
return; return;
} }
@@ -283,26 +284,26 @@ function Modpoll() {
if (server_ref != server_now) { if (server_ref != server_now) {
console.log("modpoll diff |" + server_ref.length + "|, |" + server_now.length + "|"); console.log("modpoll diff |" + server_ref.length + "|, |" + server_now.length + "|");
this.modpoll.disabled = true; r.disabled = true;
var msg = [ var msg = [
"The document has changed on the server.<br />" + "The document has changed on the server.",
"The changes will NOT be loaded into your editor automatically.", "The changes will NOT be loaded into your editor automatically.",
"",
"Press F5 or CTRL-R to refresh the page,<br />" + "Press F5 or CTRL-R to refresh the page,",
"replacing your document with the server copy.", "replacing your document with the server copy.",
"",
"You can close this message to ignore and contnue." "You can close this message to ignore and contnue."
]; ];
return toast.warn(0, "<p>" + msg.join('</p>\n<p>') + '</p>'); return toast.warn(0, msg.join('\n'));
} }
console.log('modpoll eq'); console.log('modpoll eq');
} };
if (md_opt.modpoll_freq > 0) if (md_opt.modpoll_freq > 0)
this.periodic(); setInterval(r.periodic, 1000 * md_opt.modpoll_freq);
return this; return r;
} }
var modpoll = new Modpoll(); var modpoll = new Modpoll();
@@ -326,12 +327,10 @@ function save(e) {
return toast.inf(2, "no changes"); return toast.inf(2, "no changes");
var force = (save_cls.indexOf('force-save') >= 0); var force = (save_cls.indexOf('force-save') >= 0);
if (force && !confirm('confirm that you wish to lose the changes made on the server since you opened this document')) function save2() {
return toast.inf(3, 'aborted'); var txt = dom_src.value,
fd = new FormData();
var txt = dom_src.value;
var fd = new FormData();
fd.append("act", "tput"); fd.append("act", "tput");
fd.append("lastmod", (force ? -1 : last_modified)); fd.append("lastmod", (force ? -1 : last_modified));
fd.append("body", txt); fd.append("body", txt);
@@ -346,6 +345,14 @@ function save(e) {
modpoll.skip_one = true; // skip one iteration while we save modpoll.skip_one = true; // skip one iteration while we save
xhr.send(fd); xhr.send(fd);
}
if (!force)
save2();
else
modal.confirm('confirm that you wish to lose the changes made on the server since you opened this document', save2, function () {
toast.inf(3, 'aborted');
});
} }
function save_cb() { function save_cb() {
@@ -353,19 +360,19 @@ function save_cb() {
return; return;
if (this.status !== 200) if (this.status !== 200)
return alert('Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, "")); return toast.err(0, 'Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
var r; var r;
try { try {
r = JSON.parse(this.responseText); r = JSON.parse(this.responseText);
} }
catch (ex) { catch (ex) {
return alert('Failed to parse reply from server:\n\n' + this.responseText); return toast.err(0, 'Failed to parse reply from server:\n\n' + this.responseText);
} }
if (!r.ok) { if (!r.ok) {
if (!this.btn.classList.contains('force-save')) { if (!clgot(this.btn, 'force-save')) {
this.btn.classList.add('force-save'); clmod(this.btn, 'force-save', 1);
var msg = [ var msg = [
'This file has been modified since you started editing it!\n', 'This file has been modified since you started editing it!\n',
'if you really want to overwrite, press save again.\n', 'if you really want to overwrite, press save again.\n',
@@ -375,15 +382,13 @@ function save_cb() {
r.lastmod + ' lastmod on the server now,', r.lastmod + ' lastmod on the server now,',
r.now + ' server time now,\n', r.now + ' server time now,\n',
]; ];
alert(msg.join('\n')); return toast.err(0, msg.join('\n'));
} }
else { else
alert('Error! Save failed. Maybe this JSON explains why:\n\n' + this.responseText); return toast.err(0, 'Error! Save failed. Maybe this JSON explains why:\n\n' + this.responseText);
}
return;
} }
this.btn.classList.remove('force-save'); clmod(this.btn, 'force-save');
//alert('save OK -- wrote ' + r.size + ' bytes.\n\nsha512: ' + r.sha512); //alert('save OK -- wrote ' + r.size + ' bytes.\n\nsha512: ' + r.sha512);
run_savechk(r.lastmod, this.txt, this.btn, 0); run_savechk(r.lastmod, this.txt, this.btn, 0);
@@ -407,10 +412,8 @@ function savechk_cb() {
if (this.readyState != XMLHttpRequest.DONE) if (this.readyState != XMLHttpRequest.DONE)
return; return;
if (this.status !== 200) { if (this.status !== 200)
alert('Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, "")); return toast.err(0, 'Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
return;
}
var doc1 = this.txt.replace(/\r\n/g, "\n"); var doc1 = this.txt.replace(/\r\n/g, "\n");
var doc2 = this.responseText.replace(/\r\n/g, "\n"); var doc2 = this.responseText.replace(/\r\n/g, "\n");
@@ -423,12 +426,12 @@ function savechk_cb() {
}, 100); }, 100);
return; return;
} }
alert( modal.alert(
'Error! The document on the server does not appear to have saved correctly (your editor contents and the server copy is not identical). Place the document on your clipboard for now and check the server logs for hints\n\n' + 'Error! The document on the server does not appear to have saved correctly (your editor contents and the server copy is not identical). Place the document on your clipboard for now and check the server logs for hints\n\n' +
'Length: yours=' + doc1.length + ', server=' + doc2.length 'Length: yours=' + doc1.length + ', server=' + doc2.length
); );
alert('yours, ' + doc1.length + ' byte:\n[' + doc1 + ']'); modal.alert('yours, ' + doc1.length + ' byte:\n[' + doc1 + ']');
alert('server, ' + doc2.length + ' byte:\n[' + doc2 + ']'); modal.alert('server, ' + doc2.length + ' byte:\n[' + doc2 + ']');
return; return;
} }
@@ -865,15 +868,47 @@ function iter_uni(e) {
function cfg_uni(e) { function cfg_uni(e) {
if (e) e.preventDefault(); if (e) e.preventDefault();
var reply = prompt("unicode whitelist", esc_uni_whitelist); modal.prompt("unicode whitelist", esc_uni_whitelist, function (reply) {
if (reply === null)
return;
esc_uni_whitelist = reply; esc_uni_whitelist = reply;
js_uni_whitelist = eval('\'' + esc_uni_whitelist + '\''); js_uni_whitelist = eval('\'' + esc_uni_whitelist + '\'');
}, null);
} }
var set_lno = (function () {
var t = null,
pi = null,
pv = null,
lno = ebi('lno');
function poke() {
clearTimeout(t);
t = setTimeout(fire, 20);
}
function fire() {
try {
clearTimeout(t);
var i = dom_src.selectionStart;
if (i === pi)
return;
var v = 'L' + dom_src.value.slice(0, i).split('\n').length;
if (v != pv)
lno.innerHTML = v;
pi = i;
pv = v;
}
catch (e) { }
}
timer.add(fire);
return poke;
})();
// hotkeys / toolbar // hotkeys / toolbar
(function () { (function () {
function keydown(ev) { function keydown(ev) {
@@ -892,6 +927,8 @@ function cfg_uni(e) {
if (document.activeElement != dom_src) if (document.activeElement != dom_src)
return true; return true;
set_lno();
if (ctrl(ev)) { if (ctrl(ev)) {
if (ev.code == "KeyH" || kc == 72) { if (ev.code == "KeyH" || kc == 72) {
md_header(ev.shiftKey); md_header(ev.shiftKey);
@@ -1086,9 +1123,9 @@ action_stack = (function () {
ref = newtxt; ref = newtxt;
dbg('undos(%d) redos(%d)', hist.un.length, hist.re.length); dbg('undos(%d) redos(%d)', hist.un.length, hist.re.length);
if (hist.un.length > 0) if (hist.un.length > 0)
dbg(statify(hist.un.slice(-1)[0])); dbg(jcp(hist.un.slice(-1)[0]));
if (hist.re.length > 0) if (hist.re.length > 0)
dbg(statify(hist.re.slice(-1)[0])); dbg(jcp(hist.re.slice(-1)[0]));
} }
return { return {

View File

@@ -7,6 +7,8 @@ html .editor-toolbar>button.active { border-color: rgba(0,0,0,0.4); background:
html .editor-toolbar>i.separator { border-left: 1px solid #ccc; } html .editor-toolbar>i.separator { border-left: 1px solid #ccc; }
html .editor-toolbar.disabled-for-preview>button:not(.no-disable) { opacity: .35 } html .editor-toolbar.disabled-for-preview>button:not(.no-disable) { opacity: .35 }
html { html {
line-height: 1.5em; line-height: 1.5em;
} }
@@ -18,6 +20,22 @@ html, body {
background: #f7f7f7; background: #f7f7f7;
color: #333; color: #333;
} }
#toast {
bottom: auto;
top: 1.4em;
}
#repl {
position: absolute;
top: 0;
right: .5em;
border: none;
color: inherit;
background: none;
text-decoration: none;
}
#mn { #mn {
font-weight: normal; font-weight: normal;
margin: 1.3em 0 .7em 1em; margin: 1.3em 0 .7em 1em;
@@ -59,148 +77,12 @@ html .editor-toolbar>button.disabled {
html .editor-toolbar>button.save.force-save { html .editor-toolbar>button.save.force-save {
background: #f97; background: #f97;
} }
/* copied from md.css for now */
.mdo pre,
.mdo code,
.mdo a {
color: #480;
background: #f7f7f7;
border: .07em solid #ddd;
border-radius: .2em;
padding: .1em .3em;
margin: 0 .1em;
}
.mdo code {
font-size: .96em;
}
.mdo pre,
.mdo code {
font-family: monospace, monospace;
white-space: pre-wrap;
word-break: break-all;
}
.mdo pre code {
display: block;
margin: 0 -.3em;
padding: .4em .5em;
line-height: 1.1em;
}
.mdo a {
color: #fff;
background: #39b;
text-decoration: none;
padding: 0 .3em;
border: none;
border-bottom: .07em solid #079;
}
.mdo h2 {
color: #fff;
background: #555;
margin-top: 2em;
border-bottom: .22em solid #999;
border-top: none;
}
.mdo h1 {
color: #fff;
background: #444;
font-weight: normal;
border-top: .4em solid #fb0;
border-bottom: .4em solid #777;
border-radius: 0 1em 0 1em;
margin: 3em 0 1em 0;
padding: .5em 0;
}
h1, h2 {
line-height: 1.5em;
}
h1 {
font-size: 1.7em;
text-align: center;
border: 1em solid #777;
border-width: .05em 0;
margin: 3em 0;
}
h2 {
font-size: 1.5em;
font-weight: normal;
background: #f7f7f7;
border-top: .07em solid #fff;
border-bottom: .07em solid #bbb;
border-radius: .5em .5em 0 0;
padding-left: .4em;
margin-top: 3em;
}
.mdo ul,
.mdo ol {
border-left: .3em solid #ddd;
}
.mdo>ul,
.mdo>ol {
border-color: #bbb;
}
.mdo ul>li {
list-style-type: disc;
}
.mdo ul>li,
.mdo ol>li {
margin: .7em 0;
}
strong {
color: #000;
}
p>em,
li>em,
td>em {
color: #c50;
padding: .1em;
border-bottom: .1em solid #bbb;
}
blockquote {
font-family: serif;
background: #f7f7f7;
border: .07em dashed #ccc;
padding: 0 2em;
margin: 1em 0;
}
small {
opacity: .8;
}
table {
border-collapse: collapse;
}
td {
padding: .2em .5em;
border: .12em solid #aaa;
}
th {
border: .12em solid #aaa;
}
/* mde support */
.mdo {
padding: 1em;
background: #f7f7f7;
}
html.dark .mdo {
background: #1c1c1c;
}
.CodeMirror { .CodeMirror {
background: #f7f7f7; background: #f7f7f7;
} }
/* darkmode */ /* darkmode */
html.dark .mdo, html.dark .mdo,
html.dark .CodeMirror { html.dark .CodeMirror {
@@ -224,55 +106,6 @@ html.dark .CodeMirror-selectedtext {
background: #246; background: #246;
color: #fff; color: #fff;
} }
html.dark .mdo a {
background: #057;
}
html.dark .mdo h1 a, html.dark .mdo h4 a,
html.dark .mdo h2 a, html.dark .mdo h5 a,
html.dark .mdo h3 a, html.dark .mdo h6 a {
color: inherit;
background: none;
}
html.dark pre,
html.dark code {
color: #8c0;
background: #1a1a1a;
border: .07em solid #333;
}
html.dark .mdo ul,
html.dark .mdo ol {
border-color: #444;
}
html.dark .mdo>ul,
html.dark .mdo>ol {
border-color: #555;
}
html.dark strong {
color: #fff;
}
html.dark p>em,
html.dark li>em,
html.dark td>em {
color: #f94;
border-color: #666;
}
html.dark h1 {
background: #383838;
border-top: .4em solid #b80;
border-bottom: .4em solid #4c4c4c;
}
html.dark h2 {
background: #444;
border-bottom: .22em solid #555;
}
html.dark td,
html.dark th {
border-color: #444;
}
html.dark blockquote {
background: #282828;
border: .07em dashed #444;
}
@@ -309,3 +142,14 @@ html.dark .editor-toolbar::after,
html.dark .editor-toolbar::before { html.dark .editor-toolbar::before {
background: none; background: none;
} }
/* ui.css overrides */
.mdo {
padding: 1em;
background: #f7f7f7;
}
html.dark .mdo {
background: #1c1c1c;
}

View File

@@ -3,9 +3,10 @@
<title>📝🎉 {{ title }}</title> <title>📝🎉 {{ title }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.7"> <meta name="viewport" content="width=device-width, initial-scale=0.7">
<link href="/.cpr/mde.css?_={{ ts }}" rel="stylesheet"> <link rel="stylesheet" href="/.cpr/ui.css?_={{ ts }}">
<link href="/.cpr/deps/mini-fa.css?_={{ ts }}" rel="stylesheet"> <link rel="stylesheet" href="/.cpr/mde.css?_={{ ts }}">
<link href="/.cpr/deps/easymde.css?_={{ ts }}" rel="stylesheet"> <link rel="stylesheet" href="/.cpr/deps/mini-fa.css?_={{ ts }}">
<link rel="stylesheet" href="/.cpr/deps/easymde.css?_={{ ts }}">
</head> </head>
<body> <body>
<div id="mw"> <div id="mw">
@@ -20,6 +21,7 @@
<textarea id="mt" style="display:none" autocomplete="off">{{ md }}</textarea> <textarea id="mt" style="display:none" autocomplete="off">{{ md }}</textarea>
</div> </div>
</div> </div>
<a href="#" id="repl">π</a>
<script> <script>
var last_modified = {{ lastmod }}; var last_modified = {{ lastmod }};
@@ -43,6 +45,7 @@ l.setItem('lightmode', drk? 0:1);
</script> </script>
<script src="/.cpr/util.js?_={{ ts }}"></script> <script src="/.cpr/util.js?_={{ ts }}"></script>
<script src="/.cpr/deps/marked.js?_={{ ts }}"></script>
<script src="/.cpr/deps/easymde.js?_={{ ts }}"></script> <script src="/.cpr/deps/easymde.js?_={{ ts }}"></script>
<script src="/.cpr/mde.js?_={{ ts }}"></script> <script src="/.cpr/mde.js?_={{ ts }}"></script>
</body></html> </body></html>

View File

@@ -75,7 +75,7 @@ function set_jumpto() {
} }
function jumpto(ev) { function jumpto(ev) {
var tgt = ev.target || ev.srcElement; var tgt = ev.target;
var ln = null; var ln = null;
while (tgt && !ln) { while (tgt && !ln) {
ln = tgt.getAttribute('data-ln'); ln = tgt.getAttribute('data-ln');
@@ -96,23 +96,17 @@ function md_changed(mde, on_srv) {
var md_now = mde.value(); var md_now = mde.value();
var save_btn = QS('.editor-toolbar button.save'); var save_btn = QS('.editor-toolbar button.save');
if (md_now == window.md_saved) clmod(save_btn, 'disabled', md_now == window.md_saved);
save_btn.classList.add('disabled');
else
save_btn.classList.remove('disabled');
set_jumpto(); set_jumpto();
} }
function save(mde) { function save(mde) {
var save_btn = QS('.editor-toolbar button.save'); var save_btn = QS('.editor-toolbar button.save');
if (save_btn.classList.contains('disabled')) if (clgot(save_btn, 'disabled'))
return toast.inf(2, 'no changes'); return toast.inf(2, 'no changes');
var force = save_btn.classList.contains('force-save'); var force = clgot(save_btn, 'force-save');
if (force && !confirm('confirm that you wish to lose the changes made on the server since you opened this document')) function save2() {
return toast.inf(3, 'aborted');
var txt = mde.value(); var txt = mde.value();
var fd = new FormData(); var fd = new FormData();
@@ -129,6 +123,14 @@ function save(mde) {
xhr.mde = mde; xhr.mde = mde;
xhr.txt = txt; xhr.txt = txt;
xhr.send(fd); xhr.send(fd);
}
if (!force)
save2();
else
modal.confirm('confirm that you wish to lose the changes made on the server since you opened this document', save2, function () {
toast.inf(3, 'aborted');
});
} }
function save_cb() { function save_cb() {
@@ -136,19 +138,19 @@ function save_cb() {
return; return;
if (this.status !== 200) if (this.status !== 200)
return alert('Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, "")); return toast.err(0, 'Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
var r; var r;
try { try {
r = JSON.parse(this.responseText); r = JSON.parse(this.responseText);
} }
catch (ex) { catch (ex) {
return alert('Failed to parse reply from server:\n\n' + this.responseText); return toast.err(0, 'Failed to parse reply from server:\n\n' + this.responseText);
} }
if (!r.ok) { if (!r.ok) {
if (!this.btn.classList.contains('force-save')) { if (!clgot(this.btn, 'force-save')) {
this.btn.classList.add('force-save'); clmod(this.btn, 'force-save', 1);
var msg = [ var msg = [
'This file has been modified since you started editing it!\n', 'This file has been modified since you started editing it!\n',
'if you really want to overwrite, press save again.\n', 'if you really want to overwrite, press save again.\n',
@@ -158,15 +160,13 @@ function save_cb() {
r.lastmod + ' lastmod on the server now,', r.lastmod + ' lastmod on the server now,',
r.now + ' server time now,\n', r.now + ' server time now,\n',
]; ];
alert(msg.join('\n')); return toast.err(0, msg.join('\n'));
} }
else { else
alert('Error! Save failed. Maybe this JSON explains why:\n\n' + this.responseText); return toast.err(0, 'Error! Save failed. Maybe this JSON explains why:\n\n' + this.responseText);
}
return;
} }
this.btn.classList.remove('force-save'); clmod(this.btn, 'force-save');
//alert('save OK -- wrote ' + r.size + ' bytes.\n\nsha512: ' + r.sha512); //alert('save OK -- wrote ' + r.size + ' bytes.\n\nsha512: ' + r.sha512);
// download the saved doc from the server and compare // download the saved doc from the server and compare
@@ -186,35 +186,23 @@ function save_chk() {
if (this.readyState != XMLHttpRequest.DONE) if (this.readyState != XMLHttpRequest.DONE)
return; return;
if (this.status !== 200) { if (this.status !== 200)
alert('Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, "")); return toast.err(0, 'Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
return;
}
var doc1 = this.txt.replace(/\r\n/g, "\n"); var doc1 = this.txt.replace(/\r\n/g, "\n");
var doc2 = this.responseText.replace(/\r\n/g, "\n"); var doc2 = this.responseText.replace(/\r\n/g, "\n");
if (doc1 != doc2) { if (doc1 != doc2) {
alert( modal.alert(
'Error! The document on the server does not appear to have saved correctly (your editor contents and the server copy is not identical). Place the document on your clipboard for now and check the server logs for hints\n\n' + 'Error! The document on the server does not appear to have saved correctly (your editor contents and the server copy is not identical). Place the document on your clipboard for now and check the server logs for hints\n\n' +
'Length: yours=' + doc1.length + ', server=' + doc2.length 'Length: yours=' + doc1.length + ', server=' + doc2.length
); );
alert('yours, ' + doc1.length + ' byte:\n[' + doc1 + ']'); modal.alert('yours, ' + doc1.length + ' byte:\n[' + doc1 + ']');
alert('server, ' + doc2.length + ' byte:\n[' + doc2 + ']'); modal.alert('server, ' + doc2.length + ' byte:\n[' + doc2 + ']');
return; return;
} }
last_modified = this.lastmod; last_modified = this.lastmod;
md_changed(this.mde, true); md_changed(this.mde, true);
var ok = mknod('div'); toast.ok(2, 'save OK' + (this.ntry ? '\nattempt ' + this.ntry : ''));
ok.setAttribute('style', 'font-size:6em;font-family:serif;font-weight:bold;color:#cf6;background:#444;border-radius:.3em;padding:.6em 0;position:fixed;top:30%;left:calc(50% - 2em);width:4em;text-align:center;z-index:9001;transition:opacity 0.2s ease-in-out;opacity:1');
ok.innerHTML = 'OK✔';
var parent = ebi('m');
document.documentElement.appendChild(ok);
setTimeout(function () {
ok.style.opacity = 0;
}, 500);
setTimeout(function () {
ok.parentNode.removeChild(ok);
}, 750);
} }

View File

@@ -11,14 +11,12 @@ html {
background: #333; background: #333;
font-family: sans-serif; font-family: sans-serif;
text-shadow: 1px 1px 0px #000; text-shadow: 1px 1px 0px #000;
touch-action: manipulation;
} }
html, body { html, body {
margin: 0; margin: 0;
padding: 0; padding: 0;
} }
body {
padding-bottom: 5em;
}
#box { #box {
padding: .5em 1em; padding: .5em 1em;
background: #2c2c2c; background: #2c2c2c;

View File

@@ -6,7 +6,7 @@
<title>copyparty</title> <title>copyparty</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.8"> <meta name="viewport" content="width=device-width, initial-scale=0.8">
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/msg.css?_={{ ts }}"> <link rel="stylesheet" media="screen" href="/.cpr/msg.css?_={{ ts }}">
</head> </head>
<body> <body>

View File

@@ -3,6 +3,9 @@ html, body, #wrap {
background: #f7f7f7; background: #f7f7f7;
font-family: sans-serif; font-family: sans-serif;
} }
html {
touch-action: manipulation;
}
#wrap { #wrap {
max-width: 40em; max-width: 40em;
margin: 2em auto; margin: 2em auto;
@@ -26,6 +29,12 @@ a {
border-radius: .2em; border-radius: .2em;
padding: .2em .8em; padding: .2em .8em;
} }
#repl {
border: none;
background: none;
color: inherit;
padding: 0;
}
table { table {
border-collapse: collapse; border-collapse: collapse;
} }
@@ -46,6 +55,16 @@ table {
.btns { .btns {
margin: 1em 0; margin: 1em 0;
} }
#msg {
margin: 3em 0;
}
#msg h1 {
margin-bottom: 0;
}
#msg h1 + p {
margin-top: .3em;
text-align: right;
}
html.dark, html.dark,

View File

@@ -6,12 +6,23 @@
<title>copyparty</title> <title>copyparty</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.8"> <meta name="viewport" content="width=device-width, initial-scale=0.8">
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/splash.css?_={{ ts }}"> <link rel="stylesheet" media="screen" href="/.cpr/splash.css?_={{ ts }}">
<link rel="stylesheet" media="screen" href="/.cpr/ui.css?_={{ ts }}">
</head> </head>
<body> <body>
<div id="wrap"> <div id="wrap">
<p>hello {{ this.uname }}</p> {%- if this.uname == '*' %}
<p>howdy stranger &nbsp; <small>(you're not logged in)</small></p>
{%- else %}
<p>welcome back, <strong>{{ this.uname }}</strong></p>
{%- endif %}
{%- if msg %}
<div id="msg">
{{ msg }}
</div>
{%- endif %}
{%- if avol %} {%- if avol %}
<h1>admin panel:</h1> <h1>admin panel:</h1>
@@ -59,18 +70,20 @@
<h1>login for more:</h1> <h1>login for more:</h1>
<ul> <ul>
<form method="post" enctype="multipart/form-data" action="/"> <form method="post" enctype="multipart/form-data" action="/{{ qvpath }}">
<input type="hidden" name="act" value="login" /> <input type="hidden" name="act" value="login" />
<input type="password" name="cppwd" /> <input type="password" name="cppwd" />
<input type="submit" value="Login" /> <input type="submit" value="Login" />
</form> </form>
</ul> </ul>
</div> </div>
<a href="#" id="repl">π</a>
<script> <script>
if (localStorage.getItem('lightmode') != 1) if (localStorage.getItem('lightmode') != 1)
document.documentElement.setAttribute("class", "dark"); document.documentElement.setAttribute("class", "dark");
</script> </script>
<script src="/.cpr/util.js?_={{ ts }}"></script>
</body> </body>
</html> </html>

453
copyparty/web/ui.css Normal file
View File

@@ -0,0 +1,453 @@
@font-face {
font-family: 'scp';
src: local('Source Code Pro Regular'), local('SourceCodePro-Regular'), url(/.cpr/deps/scp.woff2) format('woff2');
}
html {
touch-action: manipulation;
}
#tt, #toast {
position: fixed;
max-width: 34em;
max-width: min(34em, 90%);
max-width: min(34em, calc(100% - 7em));
background: #222;
border: 0 solid #777;
box-shadow: 0 .2em .5em #222;
border-radius: .4em;
z-index: 9001;
}
#tt {
max-width: min(34em, calc(100% - 3.3em));
overflow: hidden;
margin: .7em 0;
padding: 0 1.3em;
height: 0;
opacity: .1;
transition: opacity 0.14s, height 0.14s, padding 0.14s;
}
#toast {
bottom: 5em;
right: -1em;
line-height: 1.5em;
padding: 1em 1.3em;
margin-left: 3em;
border-width: .4em 0;
overflow-wrap: break-word;
transform: translateX(100%);
transition:
transform .4s cubic-bezier(.2, 1.2, .5, 1),
right .4s cubic-bezier(.2, 1.2, .5, 1);
text-shadow: 1px 1px 0 #000;
color: #fff;
}
#toast a {
color: inherit;
text-shadow: inherit;
background: rgba(0, 0, 0, 0.4);
border-radius: .3em;
padding: .2em .3em;
}
#toast a#toastc {
display: inline-block;
position: absolute;
overflow: hidden;
left: 0;
width: 0;
opacity: 0;
padding: .3em 0;
margin: -.3em 0 0 0;
line-height: 1.3em;
color: #000;
border: none;
outline: none;
text-shadow: none;
border-radius: .5em 0 0 .5em;
transition: left .3s, width .3s, padding .3s, opacity .3s;
}
#toastb {
max-height: 70vh;
overflow-y: auto;
}
#toast.scroll #toastb {
overflow-y: scroll;
margin-right: -1.2em;
padding-right: .7em;
}
#toast pre {
margin: 0;
}
#toast.vis {
right: 1.3em;
transform: unset;
}
#toast.vis #toastc {
left: -2em;
width: .4em;
padding: .3em .8em;
opacity: 1;
}
#toast.inf {
background: #07a;
border-color: #0be;
}
#toast.inf #toastc {
background: #0be;
}
#toast.ok {
background: #380;
border-color: #8e4;
}
#toast.ok #toastc {
background: #8e4;
}
#toast.warn {
background: #960;
border-color: #fc0;
}
#toast.warn #toastc {
background: #fc0;
}
#toast.err {
background: #900;
border-color: #d06;
}
#toast.err #toastc {
background: #d06;
}
#tt.b {
padding: 0 2em;
border-radius: .5em;
box-shadow: 0 .2em 1em #000;
}
#tt.show {
padding: 1em 1.3em;
border-width: .4em 0;
height: auto;
opacity: 1;
}
#tt.show.b {
padding: 1.5em 2em;
border-width: .5em 0;
}
#modalc code,
#tt code {
background: #3c3c3c;
padding: .1em .3em;
border-top: 1px solid #777;
border-radius: .3em;
line-height: 1.7em;
}
#tt em {
color: #f6a;
}
html.light #tt {
background: #fff;
border-color: #888 #000 #777 #000;
}
html.light #tt,
html.light #toast {
box-shadow: 0 .3em 1em rgba(0,0,0,0.4);
}
#modalc code,
html.light #tt code {
background: #060;
color: #fff;
}
html.light #tt em {
color: #d38;
}
#modal {
position: fixed;
overflow: auto;
top: 0;
left: 0;
right: 0;
bottom: 0;
width: 100%;
height: 100%;
z-index: 9001;
background: rgba(64,64,64,0.6);
}
#modal>table {
width: 100%;
height: 100%;
}
#modal td {
text-align: center;
}
#modalc {
position: relative;
display: inline-block;
background: #f7f7f7;
color: #333;
text-shadow: none;
text-align: left;
margin: 3em;
padding: 1em 1.1em;
border-radius: .6em;
box-shadow: 0 .3em 3em rgba(0,0,0,0.5);
max-width: 50em;
max-height: 30em;
overflow: auto;
}
@media (min-width: 40em) {
#modalc {
min-width: 30em;
}
}
#modalc li {
margin: 1em 0;
}
#modalc h6 {
font-size: 1.3em;
border-bottom: 1px solid #999;
margin: 0;
padding: .3em;
text-align: center;
}
#modalb {
position: sticky;
text-align: right;
padding-top: 1em;
bottom: 0;
right: 0;
}
#modalb a {
color: #000;
background: #ccc;
display: inline-block;
border-radius: .3em;
padding: .5em 1em;
outline: none;
border: none;
}
#modalb a:focus,
#modalb a:hover {
background: #06d;
color: #fff;
}
#modalb a+a {
margin-left: .5em;
}
#modali {
display: block;
background: #fff;
color: #000;
width: calc(100% - 1.25em);
margin: 1em -.1em 0 -.1em;
padding: .5em;
outline: none;
border: .25em solid #ccc;
border-radius: .4em;
}
#modali:focus {
border-color: #06d;
}
#repl_pre {
max-width: 24em;
}
*:focus,
#pctl *:focus,
.btn:focus {
box-shadow: 0 .1em .2em #fc0 inset;
border-radius: .2em;
}
html.light *:focus,
html.light #pctl *:focus,
html.light .btn:focus {
box-shadow: 0 .1em .2em #037 inset;
}
.mdo pre,
.mdo code,
.mdo a {
color: #480;
background: #f7f7f7;
border: .07em solid #ddd;
border-radius: .2em;
padding: .1em .3em;
margin: 0 .1em;
}
.mdo pre,
.mdo code,
.mdo tt {
font-family: 'scp', monospace, monospace;
white-space: pre-wrap;
word-break: break-all;
}
.mdo code {
font-size: .96em;
}
.mdo h1,
.mdo h2 {
line-height: 1.5em;
}
.mdo h1 {
font-size: 1.7em;
text-align: center;
border: 1em solid #777;
border-width: .05em 0;
margin: 3em 0;
}
.mdo h2 {
font-size: 1.5em;
font-weight: normal;
background: #f7f7f7;
border-top: .07em solid #fff;
border-bottom: .07em solid #bbb;
border-radius: .5em .5em 0 0;
padding-left: .4em;
margin-top: 3em;
}
.mdo h3 {
border-bottom: .1em solid #999;
}
.mdo h1 a, .mdo h3 a, .mdo h5 a,
.mdo h2 a, .mdo h4 a, .mdo h6 a {
color: inherit;
display: block;
background: none;
border: none;
padding: 0;
margin: 0;
}
.mdo ul,
.mdo ol {
border-left: .3em solid #ddd;
}
.mdo ul>li,
.mdo ol>li {
margin: .7em 0;
list-style-type: disc;
}
.mdo strong {
color: #000;
}
.mdo p>em,
.mdo li>em,
.mdo td>em {
color: #c50;
padding: .1em;
border-bottom: .1em solid #bbb;
}
.mdo blockquote {
font-family: serif;
background: #f7f7f7;
border: .07em dashed #ccc;
padding: 0 2em;
margin: 1em 0;
}
.mdo small {
opacity: .8;
}
.mdo pre code {
display: block;
margin: 0 -.3em;
padding: .4em .5em;
line-height: 1.1em;
}
.mdo pre code:hover {
background: #fec;
color: #360;
}
.mdo table {
border-collapse: collapse;
margin: 1em 0;
}
.mdo th,
.mdo td {
padding: .2em .5em;
border: .12em solid #aaa;
}
@media screen {
.mdo {
word-break: break-word;
overflow-wrap: break-word;
word-wrap: break-word; /*ie*/
}
html.light .mdo a,
.mdo a {
color: #fff;
background: #39b;
text-decoration: none;
padding: 0 .3em;
border: none;
border-bottom: .07em solid #079;
}
.mdo h1 {
color: #fff;
background: #444;
font-weight: normal;
border-top: .4em solid #fb0;
border-bottom: .4em solid #777;
border-radius: 0 1em 0 1em;
margin: 3em 0 1em 0;
padding: .5em 0;
}
.mdo h2 {
color: #fff;
background: #555;
margin-top: 2em;
border-bottom: .22em solid #999;
border-top: none;
}
html.dark .mdo a {
background: #057;
}
html.dark .mdo h1 a, html.dark .mdo h4 a,
html.dark .mdo h2 a, html.dark .mdo h5 a,
html.dark .mdo h3 a, html.dark .mdo h6 a {
color: inherit;
background: none;
}
html.dark .mdo pre,
html.dark .mdo code {
color: #8c0;
background: #1a1a1a;
border: .07em solid #333;
}
html.dark .mdo ul,
html.dark .mdo ol {
border-color: #444;
}
html.dark .mdo strong {
color: #fff;
}
html.dark .mdo p>em,
html.dark .mdo li>em,
html.dark .mdo td>em {
color: #f94;
border-color: #666;
}
html.dark .mdo h1 {
background: #383838;
border-top: .4em solid #b80;
border-bottom: .4em solid #4c4c4c;
}
html.dark .mdo h2 {
background: #444;
border-bottom: .22em solid #555;
}
html.dark .mdo td,
html.dark .mdo th {
border-color: #444;
}
html.dark .mdo blockquote {
background: #282828;
border: .07em dashed #444;
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,302 +0,0 @@
#op_up2k {
padding: 0 1em 1em 1em;
}
#u2form {
position: absolute;
top: 0;
left: 0;
width: 2px;
height: 2px;
overflow: hidden;
}
#u2form input {
background: #444;
border: 0px solid #444;
outline: none;
}
#u2err.err {
color: #f87;
padding: .5em;
}
#u2err.msg {
color: #999;
padding: .5em;
font-size: .9em;
}
#u2btn {
color: #eee;
background: #555;
background: -moz-linear-gradient(top, #367 0%, #489 50%, #38788a 51%, #367 100%);
background: -webkit-linear-gradient(top, #367 0%, #489 50%, #38788a 51%, #367 100%);
background: linear-gradient(to bottom, #367 0%, #489 50%, #38788a 51%, #367 100%);
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#489', endColorstr='#38788a', GradientType=0);
text-decoration: none;
line-height: 1.3em;
border: 1px solid #222;
border-radius: .4em;
text-align: center;
font-size: 1.5em;
margin: .5em auto;
padding: .8em 0;
width: 16em;
cursor: pointer;
box-shadow: .4em .4em 0 #111;
}
#op_up2k.srch #u2btn {
background: linear-gradient(to bottom, #ca3 0%, #fd8 50%, #fc6 51%, #b92 100%);
text-shadow: 1px 1px 1px #fc6;
color: #333;
}
#u2conf #u2btn {
margin: -1.5em 0;
padding: .8em 0;
width: 100%;
max-width: 12em;
display: inline-block;
}
#u2conf #u2btn_cw {
text-align: right;
}
#u2notbtn {
display: none;
text-align: center;
background: #333;
padding-top: 1em;
}
#u2notbtn * {
line-height: 1.3em;
}
#u2tab {
margin: 3em auto;
width: calc(100% - 2em);
max-width: 100em;
}
#op_up2k.srch #u2tab {
max-width: none;
}
#u2tab td {
border: 1px solid #ccc;
border-width: 0 0px 1px 0;
padding: .1em .3em;
}
#u2tab td:nth-child(2) {
width: 5em;
white-space: nowrap;
}
#u2tab td:nth-child(3) {
width: 40%;
}
#op_up2k.srch td.prog {
font-family: sans-serif;
font-size: 1em;
width: auto;
}
#u2tab tbody tr:hover td {
background: #222;
}
#u2cards {
padding: 1em 0 .3em 1em;
margin: 1.5em auto -2.5em auto;
white-space: nowrap;
text-align: center;
overflow: hidden;
}
#u2cards.w {
width: 45em;
text-align: left;
}
#u2cards a {
padding: .2em 1em;
border: 1px solid #777;
border-width: 0 0 1px 0;
background: linear-gradient(to bottom, #333, #222);
}
#u2cards a:first-child {
border-radius: .4em 0 0 0;
}
#u2cards a:last-child {
border-radius: 0 .4em 0 0;
}
#u2cards a.act {
padding-bottom: .5em;
border-width: 1px 1px .1em 1px;
border-radius: .3em .3em 0 0;
margin-left: -1px;
background: linear-gradient(to bottom, #464, #333 80%);
box-shadow: 0 -.17em .67em #280;
border-color: #7c5 #583 #333 #583;
position: relative;
color: #fd7;
}
#u2cards span {
color: #fff;
}
#u2conf {
margin: 1em auto;
width: 30em;
}
#u2conf.has_btn {
width: 48em;
}
#u2conf * {
text-align: center;
line-height: 1em;
margin: 0;
padding: 0;
border: none;
outline: none;
}
#u2conf .txtbox {
width: 3em;
color: #fff;
background: #444;
border: 1px solid #777;
font-size: 1.2em;
padding: .15em 0;
height: 1.05em;
}
#u2conf .txtbox.err {
background: #922;
}
#u2conf a {
color: #fff;
background: #c38;
text-decoration: none;
border-radius: .1em;
font-size: 1.5em;
padding: .1em 0;
margin: 0 -1px;
width: 1.5em;
height: 1em;
display: inline-block;
position: relative;
bottom: -0.08em;
}
#u2conf input+a {
background: #d80;
}
#u2conf label {
font-size: 1.6em;
width: 2em;
height: 1em;
padding: .4em 0;
display: block;
border-radius: .25em;
}
#u2conf input[type="checkbox"] {
position: relative;
opacity: .02;
top: 2em;
}
#u2conf input[type="checkbox"]+label {
position: relative;
background: #603;
border-bottom: .2em solid #a16;
box-shadow: 0 .1em .3em #a00 inset;
}
#u2conf input[type="checkbox"]:checked+label {
background: #6a1;
border-bottom: .2em solid #efa;
box-shadow: 0 .1em .5em #0c0;
}
#u2conf input[type="checkbox"]+label:hover {
box-shadow: 0 .1em .3em #fb0;
border-color: #fb0;
}
#op_up2k.srch #u2conf td:nth-child(1)>*,
#op_up2k.srch #u2conf td:nth-child(2)>*,
#op_up2k.srch #u2conf td:nth-child(3)>* {
background: #777;
border-color: #ccc;
box-shadow: none;
opacity: .2;
}
#u2foot {
color: #fff;
font-style: italic;
}
#u2foot .warn {
font-size: 1.3em;
padding: .5em .8em;
margin: 1em -.6em;
color: #f74;
background: #322;
border: 1px solid #633;
border-width: .1em 0;
text-align: center;
}
#u2foot .warn span {
color: #f86;
}
html.light #u2foot .warn {
color: #b00;
background: #fca;
border-color: #f70;
}
html.light #u2foot .warn span {
color: #930;
}
#u2foot span {
color: #999;
font-size: .9em;
font-weight: normal;
}
#u2footfoot {
margin-bottom: -1em;
}
.prog {
font-family: monospace, monospace;
}
#u2tab a>span {
font-weight: bold;
font-style: italic;
color: #fff;
padding-left: .2em;
}
#u2cleanup {
float: right;
margin-bottom: -.3em;
}
.fsearch_explain {
padding-left: .7em;
font-size: 1.1em;
line-height: 0;
}
html.light #u2btn {
box-shadow: .4em .4em 0 #ccc;
}
html.light #u2cards span {
color: #000;
}
html.light #u2cards a {
background: linear-gradient(to bottom, #eee, #fff);
}
html.light #u2cards a.act {
color: #037;
background: inherit;
box-shadow: 0 -.17em .67em #0ad;
border-color: #09c #05a #eee #05a;
}
html.light #u2conf .txtbox {
background: #fff;
color: #444;
}
html.light #u2conf .txtbox.err {
background: #f96;
color: #300;
}
html.light #op_up2k.srch #u2btn {
border-color: #a80;
}
html.light #u2foot {
color: #000;
}
html.light #u2tab tbody tr:hover td {
background: #fff;
}

View File

@@ -8,7 +8,8 @@ if (!window['console'])
var is_touch = 'ontouchstart' in window, var is_touch = 'ontouchstart' in window,
IPHONE = /iPhone|iPad|iPod/i.test(navigator.userAgent), IPHONE = /iPhone|iPad|iPod/i.test(navigator.userAgent),
ANDROID = /android/i.test(navigator.userAgent); ANDROID = /android/i.test(navigator.userAgent),
WINDOWS = navigator.platform ? navigator.platform == 'Win32' : /Windows/.test(navigator.userAgent);
var ebi = document.getElementById.bind(document), var ebi = document.getElementById.bind(document),
@@ -28,6 +29,44 @@ function esc(txt) {
}[c]; }[c];
}); });
} }
window.onunhandledrejection = function (e) {
var err = e.reason;
try {
err += '\n' + e.reason.stack;
}
catch (e) { }
console.log("REJ: " + err);
try {
toast.warn(30, err);
}
catch (e) { }
};
try {
console.hist = [];
var hook = function (t) {
var orig = console[t].bind(console),
cfun = function () {
console.hist.push(Date.now() + ' ' + t + ': ' + Array.from(arguments).join(', '));
if (console.hist.length > 100)
console.hist = console.hist.slice(50);
orig.apply(console, arguments);
};
console['std' + t] = orig;
console[t] = cfun;
};
hook('log');
console.log('log-capture ok');
hook('debug');
hook('warn');
hook('error');
}
catch (ex) {
if (console.stdlog)
console.log = console.stdlog;
console.log(ex);
}
var crashed = false, ignexd = {}; var crashed = false, ignexd = {};
function vis_exh(msg, url, lineNo, columnNo, error) { function vis_exh(msg, url, lineNo, columnNo, error) {
if ((msg + '').indexOf('ResizeObserver') !== -1) if ((msg + '').indexOf('ResizeObserver') !== -1)
@@ -39,22 +78,59 @@ function vis_exh(msg, url, lineNo, columnNo, error) {
crashed = true; crashed = true;
window.onerror = undefined; window.onerror = undefined;
var html = ['<h1>you hit a bug!</h1><p style="font-size:1.3em;margin:0">try to <a href="#" onclick="localStorage.clear();location.reload();">reset copyparty settings</a> if you are stuck here, or <a href="#" onclick="ignex();">ignore this</a> / <a href="#" onclick="ignex(true);">ignore all</a></p><p>please send me a screenshot arigathanks gozaimuch: <code>ed/irc.rizon.net</code> or <code>ed#2644</code><br />&nbsp; (and if you can, press F12 and include the "Console" tab in the screenshot too)</p><p>', var html = [
esc(url + ' @' + lineNo + ':' + columnNo), '<br />' + esc(String(msg)) + '</p>']; '<h1>you hit a bug!</h1>',
'<p style="font-size:1.3em;margin:0">try to <a href="#" onclick="localStorage.clear();location.reload();">reset copyparty settings</a> if you are stuck here, or <a href="#" onclick="ignex();">ignore this</a> / <a href="#" onclick="ignex(true);">ignore all</a></p>',
'<p style="color:#fff">please send me a screenshot arigathanks gozaimuch: <a href="<ghi>" target="_blank">github issue</a> or <code>ed#2644</code></p>',
'<p class="b">' + esc(url + ' @' + lineNo + ':' + columnNo), '<br />' + esc(String(msg)) + '</p>',
'<p><b>UA:</b> ' + esc(navigator.userAgent + '')
];
try {
var ua = '',
ad = navigator.userAgentData,
adb = ad.brands;
for (var a = 0; a < adb.length; a++)
if (!/Not.*A.*Brand/.exec(adb[a].brand))
ua += adb[a].brand + '/' + adb[a].version + ', ';
ua += ad.platform;
html.push('<br /><b>UAD:</b> ' + esc(ua.slice(0, 100)));
}
catch (e) { }
html.push('</p>');
try { try {
if (error) { if (error) {
var find = ['desc', 'stack', 'trace']; var find = ['desc', 'stack', 'trace'];
for (var a = 0; a < find.length; a++) for (var a = 0; a < find.length; a++)
if (String(error[find[a]]) !== 'undefined') if (String(error[find[a]]) !== 'undefined')
html.push('<h3>' + find[a] + '</h3>' + html.push('<p class="b"><b>' + find[a] + ':</b><br />' +
esc(String(error[find[a]])).replace(/\n/g, '<br />\n')); esc(String(error[find[a]])).replace(/\n/g, '<br />\n') + '</p>');
} }
ignexd[ekey] = true; ignexd[ekey] = true;
html.push('<h3>localStore</h3>' + esc(JSON.stringify(localStorage)));
var ls = jcp(localStorage);
if (ls.fman_clip)
ls.fman_clip = ls.fman_clip.length + ' items';
var lsk = Object.keys(ls);
lsk.sort();
html.push('<p class="b">');
for (var a = 0; a < lsk.length; a++)
html.push(' <b>' + esc(lsk[a]) + '</b> <code>' + esc(ls[lsk[a]]) + '</code> ');
html.push('</p>');
} }
catch (e) { } catch (e) { }
if (console.hist.length) {
html.push('<p class="b"><b>console:</b><ul><li>' + Date.now() + ' @</li>');
for (var a = console.hist.length - 1, aa = Math.max(0, console.hist.length - 20); a >= aa; a--)
html.push('<li>' + esc(console.hist[a]) + '</li>');
html.push('</ul>')
}
try { try {
var exbox = ebi('exbox'); var exbox = ebi('exbox');
if (!exbox) { if (!exbox) {
@@ -63,10 +139,19 @@ function vis_exh(msg, url, lineNo, columnNo, error) {
document.body.appendChild(exbox); document.body.appendChild(exbox);
var s = mknod('style'); var s = mknod('style');
s.innerHTML = '#exbox{background:#333;color:#ddd;font-family:sans-serif;font-size:0.8em;padding:0 1em 1em 1em;z-index:80386;position:fixed;top:0;left:0;right:0;bottom:0;width:100%;height:100%} #exbox h1{margin:.5em 1em 0 0;padding:0} #exbox h3{border-top:1px solid #999;margin:1em 0 0 0} #exbox a{text-decoration:underline;color:#fc0} #exbox code{color:#bf7;background:#222;padding:.1em;margin:.2em;font-size:1.1em;font-family:monospace,monospace} #exbox *{line-height:1.5em}'; s.innerHTML = (
'#exbox{background:#333;color:#ddd;font-family:sans-serif;font-size:0.8em;padding:0 1em 1em 1em;z-index:80386;position:fixed;top:0;left:0;right:0;bottom:0;width:100%;height:100%;overflow:auto;width:calc(100% - 2em)} ' +
'#exbox,#exbox *{line-height:1.5em;overflow-wrap:break-word} ' +
'#exbox code{color:#bf7;background:#222;padding:.1em;margin:.2em;font-size:1.1em;font-family:monospace,monospace} ' +
'#exbox a{text-decoration:underline;color:#fc0} ' +
'#exbox h1{margin:.5em 1em 0 0;padding:0} ' +
'#exbox p.b{border-top:1px solid #999;margin:1em 0 0 0;font-size:1em} ' +
'#exbox ul, #exbox li {margin:0 0 0 .5em;padding:0} ' +
'#exbox b{color:#fff}'
);
document.head.appendChild(s); document.head.appendChild(s);
} }
exbox.innerHTML = html.join('\n'); exbox.innerHTML = html.join('\n').replace(/https?:\/\/[^ \/]+\//g, '/').replace(/js\?_=[a-zA-Z]{4}/g, 'js').replace(/<ghi>/, 'https://github.com/9001/copyparty/issues/new?labels=bug&template=bug_report.md');
exbox.style.display = 'block'; exbox.style.display = 'block';
} }
catch (e) { catch (e) {
@@ -84,6 +169,9 @@ function ignex(all) {
} }
function noop() { }
function ctrl(e) { function ctrl(e) {
return e && (e.ctrlKey || e.metaKey); return e && (e.ctrlKey || e.metaKey);
} }
@@ -109,29 +197,40 @@ function ev(e) {
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
if (!String.prototype.endsWith) { if (!String.prototype.endsWith)
String.prototype.endsWith = function (search, this_len) { String.prototype.endsWith = function (search, this_len) {
if (this_len === undefined || this_len > this.length) { if (this_len === undefined || this_len > this.length) {
this_len = this.length; this_len = this.length;
} }
return this.substring(this_len - search.length, this_len) === search; return this.substring(this_len - search.length, this_len) === search;
}; };
}
if (!String.startsWith) { if (!String.startsWith)
String.prototype.startsWith = function (s, i) { String.prototype.startsWith = function (s, i) {
i = i > 0 ? i | 0 : 0; i = i > 0 ? i | 0 : 0;
return this.substring(i, i + s.length) === s; return this.substring(i, i + s.length) === s;
}; };
}
if (!Element.prototype.closest) { if (!String.trimEnd)
String.prototype.trimEnd = String.prototype.trimRight = function () {
return this.replace(/[ \t\r\n]+$/m, '');
};
if (!Element.prototype.matches)
Element.prototype.matches =
Element.prototype.oMatchesSelector ||
Element.prototype.msMatchesSelector ||
Element.prototype.mozMatchesSelector ||
Element.prototype.webkitMatchesSelector;
if (!Element.prototype.closest)
Element.prototype.closest = function (s) { Element.prototype.closest = function (s) {
var el = this; var el = this;
do { do {
if (el.msMatchesSelector(s)) return el; if (el.matches(s)) return el;
el = el.parentElement || el.parentNode; el = el.parentElement || el.parentNode;
} while (el !== null && el.nodeType === 1); } while (el !== null && el.nodeType === 1);
} };
}
// https://stackoverflow.com/a/950146 // https://stackoverflow.com/a/950146
@@ -140,10 +239,10 @@ function import_js(url, cb) {
var script = mknod('script'); var script = mknod('script');
script.type = 'text/javascript'; script.type = 'text/javascript';
script.src = url; script.src = url;
script.onreadystatechange = cb;
script.onload = cb; script.onload = cb;
script.onerror = function () {
toast.err(0, 'Failed to load module:\n' + url);
};
head.appendChild(script); head.appendChild(script);
} }
@@ -170,90 +269,37 @@ function crc32(str) {
} }
function clmod(obj, cls, add) { function clmod(el, cls, add) {
var re = new RegExp('\\s*\\b' + cls + '\\s*\\b', 'g'); if (el.classList) {
var have = el.classList.contains(cls);
if (add == 't') if (add == 't')
add = !re.test(obj.className); add = !have;
obj.className = obj.className.replace(re, ' ') + (add ? ' ' + cls : ''); if (add != have)
el.classList[add ? 'add' : 'remove'](cls);
return;
}
var re = new RegExp('\\s*\\b' + cls + '\\s*\\b', 'g'),
n1 = el.className;
if (add == 't')
add = !re.test(n1);
var n2 = n1.replace(re, ' ') + (add ? ' ' + cls : '');
if (n1 != n2)
el.className = n2;
} }
function sortfiles(nodes) { function clgot(el, cls) {
var sopts = jread('fsort', [["href", 1, ""]]); if (el.classList)
return el.classList.contains(cls);
try { var lst = (el.getAttribute('class') + '').split(/ /g);
var is_srch = false; return has(lst, cls);
if (nodes[0]['rp']) {
is_srch = true;
for (var b = 0, bb = nodes.length; b < bb; b++)
nodes[b].ext = nodes[b].rp.split('.').pop();
for (var b = 0; b < sopts.length; b++)
if (sopts[b][0] == 'href')
sopts[b][0] = 'rp';
}
for (var a = sopts.length - 1; a >= 0; a--) {
var name = sopts[a][0], rev = sopts[a][1], typ = sopts[a][2];
if (!name)
continue;
if (name == 'ts')
typ = 'int';
if (name.indexOf('tags/') === 0) {
name = name.slice(5);
for (var b = 0, bb = nodes.length; b < bb; b++)
nodes[b]._sv = nodes[b].tags[name];
}
else {
for (var b = 0, bb = nodes.length; b < bb; b++) {
var v = nodes[b][name];
if ((v + '').indexOf('<a ') === 0)
v = v.split('>')[1];
else if (name == "href" && v) {
if (v.slice(-1) == '/')
v = '\t' + v;
v = uricom_dec(v)[0]
}
nodes[b]._sv = v;
}
}
var onodes = nodes.map(function (x) { return x; });
nodes.sort(function (n1, n2) {
var v1 = n1._sv,
v2 = n2._sv;
if (v1 === undefined) {
if (v2 === undefined) {
return onodes.indexOf(n1) - onodes.indexOf(n2);
}
return -1 * rev;
}
if (v2 === undefined) return 1 * rev;
var ret = rev * (typ == 'int' ? (v1 - v2) : (v1.localeCompare(v2)));
if (ret === 0)
ret = onodes.indexOf(n1) - onodes.indexOf(n2);
return ret;
});
}
for (var b = 0, bb = nodes.length; b < bb; b++) {
delete nodes[b]._sv;
if (is_srch)
delete nodes[b].ext;
}
}
catch (ex) {
console.log("failed to apply sort config: " + ex);
console.log("resetting fsort " + sread('fsort'))
localStorage.removeItem('fsort');
}
return nodes;
} }
@@ -331,8 +377,16 @@ function makeSortable(table, cb) {
function linksplit(rp) { function linksplit(rp) {
var ret = []; var ret = [],
var apath = '/'; apath = '/',
q = null;
if (rp && rp.indexOf('?') + 1) {
q = rp.split('?', 2);
rp = q[0];
q = '?' + q[1];
}
if (rp && rp.charAt(0) == '/') if (rp && rp.charAt(0) == '/')
rp = rp.slice(1); rp = rp.slice(1);
@@ -354,6 +408,9 @@ function linksplit(rp) {
elink = elink.slice(0, -3) + '/'; elink = elink.slice(0, -3) + '/';
} }
if (!rp && q)
elink += q;
ret.push('<a href="' + apath + elink + '">' + vlink + '</a>'); ret.push('<a href="' + apath + elink + '">' + vlink + '</a>');
apath += elink; apath += elink;
} }
@@ -386,6 +443,16 @@ function uricom_enc(txt, do_fb_enc) {
} }
} }
function url_enc(txt) {
var parts = txt.split('/'),
ret = [];
for (var a = 0; a < parts.length; a++)
ret.push(uricom_enc(parts[a]));
return ret.join('/');
}
function uricom_dec(txt) { function uricom_dec(txt) {
try { try {
@@ -398,6 +465,17 @@ function uricom_dec(txt) {
} }
function uricom_adec(arr, li) {
var ret = [];
for (var a = 0; a < arr.length; a++) {
var txt = uricom_dec(arr[a])[0];
ret.push(li ? '<li>' + esc(txt) + '</li>' : txt);
}
return ret;
}
function get_evpath() { function get_evpath() {
var ret = document.location.pathname; var ret = document.location.pathname;
@@ -437,6 +515,41 @@ function s2ms(s) {
} }
function f2f(val, nd) {
// 10.toFixed(1) returns 10.00 for certain values of 10
val = (val * Math.pow(10, nd)).toFixed(0).split('.')[0];
return nd ? (val.slice(0, -nd) || '0') + '.' + val.slice(-nd) : val;
}
function humansize(b, terse) {
var i = 0, u = terse ? ['B', 'K', 'M', 'G'] : ['B', 'KB', 'MB', 'GB'];
while (b >= 1000 && i < u.length) {
b /= 1024;
i += 1;
}
return f2f(b, b >= 100 ? 0 : b >= 10 ? 1 : 2) + ' ' + u[i];
}
function humantime(v) {
if (v >= 60 * 60 * 24)
return v;
try {
return /.*(..:..:..).*/.exec(new Date(v * 1000).toUTCString())[1];
}
catch (ex) {
return v;
}
}
function clamp(v, a, b) {
return Math.min(Math.max(v, a), b);
}
function has(haystack, needle) { function has(haystack, needle) {
for (var a = 0; a < haystack.length; a++) for (var a = 0; a < haystack.length; a++)
if (haystack[a] == needle) if (haystack[a] == needle)
@@ -534,6 +647,24 @@ function bcfg_upd_ui(name, val) {
} }
} }
function bcfg_bind(obj, oname, cname, defval, cb, un_ev) {
var v = bcfg_get(cname, defval),
el = ebi(cname);
obj[oname] = v;
if (el)
el.onclick = function (e) {
if (un_ev !== false)
ev(e);
obj[oname] = bcfg_set(cname, !obj[oname]);
if (cb)
cb(obj[oname]);
};
return v;
}
function hist_push(url) { function hist_push(url) {
console.log("h-push " + url); console.log("h-push " + url);
@@ -546,6 +677,42 @@ function hist_replace(url) {
} }
var timer = (function () {
var r = {};
r.q = [];
r.last = 0;
r.add = function (fun, run) {
r.rm(fun);
r.q.push(fun);
if (run)
fun();
};
r.rm = function (fun) {
apop(r.q, fun);
};
function doevents() {
if (crashed)
return;
if (Date.now() - r.last < 69)
return;
var q = r.q.slice(0);
for (var a = 0; a < q.length; a++)
q[a]();
r.last = Date.now();
}
setInterval(doevents, 100);
return r;
})();
var tt = (function () { var tt = (function () {
var r = { var r = {
"tt": mknod("div"), "tt": mknod("div"),
@@ -557,11 +724,21 @@ var tt = (function () {
r.tt.setAttribute('id', 'tt'); r.tt.setAttribute('id', 'tt');
document.body.appendChild(r.tt); document.body.appendChild(r.tt);
var prev = null;
r.cshow = function () {
if (this !== prev)
r.show.bind(this)();
prev = this;
};
r.show = function () { r.show = function () {
if (r.skip) { if (r.skip) {
r.skip = false; r.skip = false;
return; return;
} }
if (QS('body.bbox-open'))
return;
var cfg = sread('tooltips'); var cfg = sread('tooltips');
if (cfg !== null && cfg != '1') if (cfg !== null && cfg != '1')
@@ -574,54 +751,77 @@ var tt = (function () {
r.el = this; r.el = this;
var pos = this.getBoundingClientRect(), var pos = this.getBoundingClientRect(),
dir = this.getAttribute('ttd') || '', dir = this.getAttribute('ttd') || '',
left = pos.left < window.innerWidth / 2, margin = parseFloat(this.getAttribute('ttm') || 0),
top = pos.top < window.innerHeight / 2, top = pos.top < window.innerHeight / 2,
big = this.className.indexOf(' ttb') !== -1; big = this.className.indexOf(' ttb') !== -1;
if (dir.indexOf('u') + 1) top = false; if (dir.indexOf('u') + 1) top = false;
if (dir.indexOf('d') + 1) top = true; if (dir.indexOf('d') + 1) top = true;
if (dir.indexOf('l') + 1) left = false;
if (dir.indexOf('r') + 1) left = true;
clmod(r.tt, 'b', big); clmod(r.tt, 'b', big);
r.tt.style.top = top ? pos.bottom + 'px' : 'auto'; r.tt.style.left = '0';
r.tt.style.bottom = top ? 'auto' : (window.innerHeight - pos.top) + 'px'; r.tt.style.top = '0';
r.tt.style.left = left ? pos.left + 'px' : 'auto';
r.tt.style.right = left ? 'auto' : (window.innerWidth - pos.right) + 'px';
r.tt.innerHTML = msg.replace(/\$N/g, "<br />"); r.tt.innerHTML = msg.replace(/\$N/g, "<br />");
r.el.addEventListener('mouseleave', r.hide); r.el.addEventListener('mouseleave', r.hide);
window.addEventListener('scroll', r.hide);
clmod(r.tt, 'show', 1); clmod(r.tt, 'show', 1);
var tw = r.tt.offsetWidth,
x = pos.left + (pos.right - pos.left) / 2 - tw / 2;
if (x + tw >= window.innerWidth - 24)
x = window.innerWidth - tw - 24;
if (x < 0)
x = 12;
r.tt.style.left = x + 'px';
r.tt.style.top = top ? (margin + pos.bottom) + 'px' : 'auto';
r.tt.style.bottom = top ? 'auto' : (margin + window.innerHeight - pos.top) + 'px';
}; };
r.hide = function (e) { r.hide = function (e) {
ev(e); ev(e);
window.removeEventListener('scroll', r.hide);
clmod(r.tt, 'show'); clmod(r.tt, 'show');
clmod(r.tt, 'b');
if (r.el) if (r.el)
r.el.removeEventListener('mouseleave', r.hide); r.el.removeEventListener('mouseleave', r.hide);
}; };
if (is_touch && IPHONE) { if (is_touch && IPHONE) {
var f1 = r.show, var f1 = r.show,
f2 = r.hide; f2 = r.hide,
q = [];
// if an onclick-handler creates a new timer,
// iOS 13.1.2 delays the entire handler by up to 401ms,
// win by using a shared timer instead
timer.add(function () {
while (q.length && Date.now() >= q[0][0])
q.shift()[1]();
});
r.show = function () { r.show = function () {
setTimeout(f1.bind(this), 301); q.push([Date.now() + 100, f1.bind(this)]);
}; };
r.hide = function () { r.hide = function () {
setTimeout(f2.bind(this), 301); q.push([Date.now() + 100, f2.bind(this)]);
}; };
} }
r.tt.onclick = r.hide; r.tt.onclick = r.hide;
r.att = function (ctr) { r.att = function (ctr) {
var _show = r.en ? r.show : null, var _cshow = r.en ? r.cshow : null,
_show = r.en ? r.show : null,
_hide = r.en ? r.hide : null, _hide = r.en ? r.hide : null,
o = ctr.querySelectorAll('*[tt]'); o = ctr.querySelectorAll('*[tt]');
for (var a = o.length - 1; a >= 0; a--) { for (var a = o.length - 1; a >= 0; a--) {
o[a].onfocus = _show; o[a].onfocus = _cshow;
o[a].onblur = _hide; o[a].onblur = _hide;
o[a].onmouseenter = _show; o[a].onmouseenter = _show;
o[a].onmouseleave = _hide; o[a].onmouseleave = _hide;
@@ -647,52 +847,315 @@ var tt = (function () {
})(); })();
var toast = (function () { function lf2br(txt) {
var r = {},
te = null,
visible = false,
obj = mknod('div');
obj.setAttribute('id', 'toast');
document.body.appendChild(obj);;
r.hide = function (e) {
ev(e);
clearTimeout(te);
clmod(obj, 'vis');
r.visible = false;
};
r.show = function (cl, ms, txt) {
clearTimeout(te);
if (ms)
te = setTimeout(r.hide, ms * 1000);
var html = '', hp = txt.split(/(?=<.?pre>)/i); var html = '', hp = txt.split(/(?=<.?pre>)/i);
for (var a = 0; a < hp.length; a++) for (var a = 0; a < hp.length; a++)
html += hp[a].startsWith('<pre>') ? hp[a] : html += hp[a].startsWith('<pre>') ? hp[a] :
hp[a].replace(/<br ?.?>\n/g, '\n').replace(/\n<br ?.?>/g, '\n').replace(/\n/g, '<br />\n'); hp[a].replace(/<br ?.?>\n/g, '\n').replace(/\n<br ?.?>/g, '\n').replace(/\n/g, '<br />\n');
obj.innerHTML = '<a href="#" id="toastc">x</a>' + html; return html;
obj.className = cl; }
ms += obj.offsetWidth;
obj.className += ' vis';
ebi('toastc').onclick = r.hide; var toast = (function () {
r.visible = true; var r = {},
te = null,
scrolling = false,
obj = mknod('div');
obj.setAttribute('id', 'toast');
document.body.appendChild(obj);
r.visible = false;
r.txt = null;
function scrollchk() {
if (scrolling)
return;
var tb = ebi('toastb'),
vis = tb.offsetHeight,
all = tb.scrollHeight;
if (8 + vis >= all)
return;
clmod(obj, 'scroll', 1);
scrolling = true;
}
function unscroll() {
timer.rm(scrollchk);
clmod(obj, 'scroll');
scrolling = false;
}
r.hide = function (e) {
ev(e);
unscroll();
clearTimeout(te);
clmod(obj, 'vis');
r.visible = false;
}; };
r.ok = function (ms, txt) { r.show = function (cl, sec, txt) {
r.show('ok', ms, txt); clearTimeout(te);
if (sec)
te = setTimeout(r.hide, sec * 1000);
obj.innerHTML = '<a href="#" id="toastc">x</a><div id="toastb">' + lf2br(txt) + '</div>';
obj.className = cl;
sec += obj.offsetWidth;
obj.className += ' vis';
ebi('toastc').onclick = r.hide;
timer.add(scrollchk);
r.visible = true;
r.txt = txt;
}; };
r.inf = function (ms, txt) {
r.show('inf', ms, txt); r.ok = function (sec, txt) {
r.show('ok', sec, txt);
}; };
r.warn = function (ms, txt) { r.inf = function (sec, txt) {
r.show('warn', ms, txt); r.show('inf', sec, txt);
}; };
r.err = function (ms, txt) { r.warn = function (sec, txt) {
r.show('err', ms, txt); r.show('warn', sec, txt);
};
r.err = function (sec, txt) {
r.show('err', sec, txt);
}; };
return r; return r;
})(); })();
var modal = (function () {
var r = {},
q = [],
o = null,
cb_up = null,
cb_ok = null,
cb_ng = null,
prim = '<a href="#" id="modal-ok">OK</a>',
sec = '<a href="#" id="modal-ng">Cancel</a>',
ok_cancel = WINDOWS ? prim + sec : sec + prim;
r.busy = false;
r.show = function (html) {
o = mknod('div');
o.setAttribute('id', 'modal');
o.innerHTML = '<table><tr><td><div id="modalc">' + html + '</div></td></tr></table>';
document.body.appendChild(o);
document.addEventListener('keydown', onkey);
r.busy = true;
var a = ebi('modal-ng');
if (a)
a.onclick = ng;
a = ebi('modal-ok');
a.onclick = ok;
var inp = ebi('modali');
(inp || a).focus();
if (inp)
setTimeout(function () {
inp.setSelectionRange(0, inp.value.length, "forward");
}, 0);
document.addEventListener('focus', onfocus);
timer.add(onfocus);
if (cb_up)
setTimeout(cb_up, 1);
};
r.hide = function () {
timer.rm(onfocus);
document.removeEventListener('focus', onfocus);
document.removeEventListener('keydown', onkey);
o.parentNode.removeChild(o);
r.busy = false;
setTimeout(next, 50);
};
function ok(e) {
ev(e);
var v = ebi('modali');
v = v ? v.value : true;
r.hide();
if (cb_ok)
cb_ok(v);
}
function ng(e) {
ev(e);
r.hide();
if (cb_ng)
cb_ng(null);
}
function onfocus(e) {
var ctr = ebi('modalc');
if (!ctr || !ctr.contains || !document.activeElement || ctr.contains(document.activeElement))
return;
setTimeout(function () {
ebi('modal-ok').focus();
}, 20);
ev(e);
}
function onkey(e) {
if (e.code == 'Enter') {
var a = ebi('modal-ng');
if (a && document.activeElement == a)
return ng();
return ok();
}
if (e.code == 'Escape')
return ng();
}
function next() {
if (!r.busy && q.length)
q.shift()();
}
r.alert = function (html, cb, fun) {
q.push(function () {
_alert(lf2br(html), cb, fun);
});
next();
};
function _alert(html, cb, fun) {
cb_ok = cb_ng = cb;
cb_up = fun;
html += '<div id="modalb"><a href="#" id="modal-ok">OK</a></div>';
r.show(html);
}
r.confirm = function (html, cok, cng, fun) {
q.push(function () {
_confirm(lf2br(html), cok, cng, fun);
});
next();
}
function _confirm(html, cok, cng, fun) {
cb_ok = cok;
cb_ng = cng === undefined ? cok : null;
cb_up = fun;
html += '<div id="modalb">' + ok_cancel + '</div>';
r.show(html);
}
r.prompt = function (html, v, cok, cng, fun) {
q.push(function () {
_prompt(lf2br(html), v, cok, cng, fun);
});
next();
}
function _prompt(html, v, cok, cng, fun) {
cb_ok = cok;
cb_ng = cng === undefined ? cok : null;
cb_up = fun;
html += '<input id="modali" type="text" /><div id="modalb">' + ok_cancel + '</div>';
r.show(html);
ebi('modali').value = v || '';
}
return r;
})();
function winpopup(txt) {
fetch(get_evpath(), {
method: 'POST',
headers: {
'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8'
},
body: 'msg=' + uricom_enc(Date.now() + ', ' + txt)
});
}
var last_repl = null;
function repl_load() {
var ipre = ebi('repl_pre'),
tb = ebi('modali');
function getpres() {
var o, ret = jread("repl_pre", []);
if (!ret.length)
ret = [
'var v=Object.keys(localStorage); v.sort(); JSON.stringify(v)',
'console.hist.slice(-10).join("\\n")'
];
ipre.innerHTML = '<option value=""></option>';
for (var a = 0; a < ret.length; a++) {
o = mknod('option');
o.setAttribute('value', ret[a]);
o.textContent = ret[a];
ipre.appendChild(o);
}
last_repl = ipre.value = (last_repl || (ret.length ? ret.slice(-1)[0] : ''));
return ret;
}
ebi('repl_pdel').onclick = function (e) {
var val = ipre.value,
pres = getpres();
apop(pres, val);
jwrite('repl_pre', pres);
getpres();
};
ebi('repl_pnew').onclick = function (e) {
var val = tb.value,
pres = getpres();
apop(pres, ipre.value);
pres.push(val);
jwrite('repl_pre', pres);
getpres();
ipre.value = val;
};
ipre.oninput = ipre.onchange = function () {
tb.value = last_repl = ipre.value;
};
tb.oninput = function () {
last_repl = this.value;
};
getpres();
tb.value = last_repl;
setTimeout(function () {
tb.setSelectionRange(0, tb.value.length, "forward");
}, 10);
}
function repl(e) {
ev(e);
var html = [
'<p>js repl (prefix with <code>,</code> to allow raise)</p>',
'<p><select id="repl_pre"></select>',
' &nbsp; <button id="repl_pdel">❌ del</button>',
' &nbsp; <button id="repl_pnew">💾 SAVE</button></p>'
];
modal.prompt(html.join(''), '', function (cmd) {
if (!cmd)
return toast.inf(3, 'eval aborted');
if (cmd.startsWith(','))
return modal.alert(esc(eval(cmd.slice(1)) + ''))
try {
modal.alert(esc(eval(cmd) + ''));
}
catch (ex) {
modal.alert('<h6>exception</h6>' + esc(ex + ''));
}
}, undefined, repl_load);
}
if (ebi('repl'))
ebi('repl').onclick = repl;

View File

@@ -1,11 +1,21 @@
# example `.epilogue.html` **NOTE:** there's more stuff (sharex config, service scripts, nginx configs, ...) in [`/contrib/`](/contrib/)
# example resource files
can be provided to copyparty to tweak things
## example `.epilogue.html`
save one of these as `.epilogue.html` inside a folder to customize it: save one of these as `.epilogue.html` inside a folder to customize it:
* [`minimal-up2k.html`](minimal-up2k.html) will [simplify the upload ui](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png) * [`minimal-up2k.html`](minimal-up2k.html) will [simplify the upload ui](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png)
# example browser-css ## example browser-css
point `--css-browser` to one of these by URL: point `--css-browser` to one of these by URL:
* [`browser.css`](browser.css) changes the background * [`browser.css`](browser.css) changes the background
@@ -19,4 +29,23 @@ point `--css-browser` to one of these by URL:
* notes on using rclone as a fuse client/server * notes on using rclone as a fuse client/server
## [`example.conf`](example.conf) ## [`example.conf`](example.conf)
* example config file for `-c` which never really happened * example config file for `-c` (supports accounts, volumes, and volume-flags)
# junk
alphabetical list of the remaining files
| what | why |
| -- | -- |
| [biquad.html](biquad.html) | bruteforce calibrator for the audio equalizer since im not that good at maths |
| [design.txt](design.txt) | initial brainstorming of the copyparty design, unmaintained, incorrect, sentimental value only |
| [hls.html](hls.html) | experimenting with hls playback using `hls.js`, works p well, almost became a thing |
| [music-analysis.sh](music-analysis.sh) | testing various bpm/key detection libraries before settling on the ones used in [`/bin/mtag/`](/bin/mtag/) |
| [notes.sh](notes.sh) | notepad, just scraps really |
| [nuitka.txt](nuitka.txt) | how to build a copyparty exe using nuitka (not maintained) |
| [pretend-youre-qnap.patch](pretend-youre-qnap.patch) | simulate a NAS which keeps returning old cached data even though you just modified the file yourself |
| [tcp-debug.sh](tcp-debug.sh) | looks like this was to debug stuck tcp connections? |
| [unirange.py](unirange.py) | uhh |
| [up2k.txt](up2k.txt) | initial ideas for how up2k should work, another unmaintained sentimental-value-only thing |

View File

@@ -3,6 +3,24 @@
setTimeout(location.reload.bind(location), 700); setTimeout(location.reload.bind(location), 700);
document.documentElement.scrollLeft = 0; document.documentElement.scrollLeft = 0;
var cali = (function() {
var ac = new AudioContext(),
fi = ac.createBiquadFilter(),
freqs = new Float32Array(1),
mag = new Float32Array(1),
phase = new Float32Array(1);
freqs[0] = 14000;
fi.type = 'peaking';
fi.frequency.value = 18000;
fi.Q.value = 0.8;
fi.gain.value = 1;
fi.getFrequencyResponse(freqs, mag, phase);
return mag[0]; // 1.0407 good, 1.0563 bad
})(),
mp = cali < 1.05;
var can = document.createElement('canvas'), var can = document.createElement('canvas'),
cc = can.getContext('2d'), cc = can.getContext('2d'),
w = 2048, w = 2048,
@@ -28,12 +46,12 @@ var cfg = [ // hz, q, g
[1000, 0.9, 1.1], [1000, 0.9, 1.1],
[2000, 0.9, 1.105], [2000, 0.9, 1.105],
[4000, 0.88, 1.05], [4000, 0.88, 1.05],
[8000 * 1.006, 0.73, 1.24], [8000 * 1.006, 0.73, mp ? 1.24 : 1.2],
//[16000 * 1.00, 0.5, 1.75], // peak.v1 //[16000 * 1.00, 0.5, 1.75], // peak.v1
//[16000 * 1.19, 0, 1.8] // shelf.v1 //[16000 * 1.19, 0, 1.8] // shelf.v1
[16000 * 0.89, 0.7, 1.26], // peak [16000 * 0.89, 0.7, mp ? 1.26 : 1.2], // peak
[16000 * 1.13, 0.82, 1.09], // peak [16000 * 1.13, 0.82, mp ? 1.09 : 0.75], // peak
[16000 * 1.205, 0, 1.9] // shelf [16000 * 1.205, 0, mp ? 1.9 : 1.85] // shelf
]; ];
var freqs = new Float32Array(22000), var freqs = new Float32Array(22000),

View File

@@ -1,37 +1,7 @@
/* put filetype icons inline with text /* video, alternative 1:
#ggrid>a>span:before, top-left icon, just like the other formats
#ggrid>a>span.dir:before { =======================================================================
display: inline;
line-height: 0;
font-size: 1.7em;
margin: -.7em .1em -.5em -.6em;
}
*/
/* move folder icons top-left */
#ggrid>a>span.dir:before {
content: initial;
}
#ggrid>a[href$="/"]:before {
content: '📂';
}
/* put filetype icons top-left */
#ggrid>a:before {
display: block;
position: absolute;
padding: .3em 0;
margin: -.4em;
text-shadow: 0 0 .1em #000;
background: linear-gradient(135deg,rgba(255,255,255,0) 50%,rgba(255,255,255,0.2));
border-radius: .3em;
font-size: 2em;
}
/* video */
#ggrid>a:is( #ggrid>a:is(
[href$=".mkv"i], [href$=".mkv"i],
[href$=".mp4"i], [href$=".mp4"i],
@@ -39,6 +9,40 @@
):before { ):before {
content: '📺'; content: '📺';
} }
*/
/* video, alternative 2:
play-icon in the middle of the thumbnail
=======================================================================
*/
#ggrid>a:is(
[href$=".mkv"i],
[href$=".mp4"i],
[href$=".webm"i],
) {
position: relative;
overflow: hidden;
}
#ggrid>a:is(
[href$=".mkv"i],
[href$=".mp4"i],
[href$=".webm"i],
):before {
content: '▶';
opacity: .8;
margin: 0;
padding: 1em .5em 1em .7em;
border-radius: 9em;
line-height: 0;
color: #fff;
text-shadow: none;
background: rgba(0, 0, 0, 0.7);
left: calc(50% - 1em);
top: calc(50% - 1.4em);
}
/* audio */ /* audio */
@@ -54,6 +58,7 @@
} }
/* image */ /* image */
#ggrid>a:is( #ggrid>a:is(
[href$=".jpg"i], [href$=".jpg"i],

View File

@@ -17,6 +17,7 @@ html.light {
html.light #files th { html.light #files th {
background: rgba(255, 255, 255, 0.9) !important; background: rgba(255, 255, 255, 0.9) !important;
} }
html.light .logue,
html.light #ops, html.light #ops,
html.light #treeul, html.light #treeul,
html.light #files td { html.light #files td {

View File

@@ -11,7 +11,9 @@
#u2cleanup, #u2conf tr:first-child>td[rowspan]:not(#u2btn_cw), /* most of the config options */ #u2cleanup, #u2conf tr:first-child>td[rowspan]:not(#u2btn_cw), /* most of the config options */
#u2cards /* and the upload progress tabs */ #srch_dz, #srch_zd, /* the filesearch dropzone */
#u2cards, #u2etaw /* and the upload progress tabs */
{display: none !important} /* do it! */ {display: none !important} /* do it! */
@@ -19,7 +21,7 @@
/* add some margins because now it's weird */ /* add some margins because now it's weird */
.opview {margin-top: 2.5em} .opview {margin-top: 2.5em}
#op_up2k {margin-top: 3em} #op_up2k {margin-top: 6em}
/* and embiggen the upload button */ /* and embiggen the upload button */
#u2conf #u2btn, #u2btn {padding:1.5em 0} #u2conf #u2btn, #u2btn {padding:1.5em 0}
@@ -27,6 +29,9 @@
/* adjust the button area a bit */ /* adjust the button area a bit */
#u2conf.has_btn {width: 35em !important; margin: 5em auto} #u2conf.has_btn {width: 35em !important; margin: 5em auto}
/* a */
#op_up2k {min-height: 0}
</style> </style>
<a href="#" onclick="this.parentNode.innerHTML='';">show advanced options</a> <a href="#" onclick="this.parentNode.innerHTML='';">show advanced options</a>

View File

@@ -1,26 +0,0 @@
method = self.s.recv(4)
self.s.unrecv(method)
print("xxx unrecv'd [{}]".format(method))
# jython used to do this, they stopped since it's broken
# but reimplementing sendall is out of scope for now
if not getattr(self.s.s, "sendall", None):
self.s.s.sendall = self.s.s.send
# TODO this is also pretty bad
have = dir(self.s)
for k in self.s.s.__dict__:
if k not in have and not k.startswith("__"):
if k == "recv":
raise Exception("wait what")
self.s.__dict__[k] = self.s.s.__dict__[k]
have = dir(self.s)
for k in dir(self.s.s):
if k not in have and not k.startswith("__"):
if k == "recv":
raise Exception("wait what")
setattr(self.s, k, getattr(self.s.s, k))

View File

@@ -41,10 +41,10 @@ avg() { awk 'function pr(ncsz) {if (nsmp>0) {printf "%3s %s\n", csz, sum/nsmp} c
## ##
## bad filenames ## bad filenames
dirs=("$HOME/vfs/ほげ" "$HOME/vfs/ほげ/ぴよ" "$HOME/vfs/$(printf \\xed\\x91)" "$HOME/vfs/$(printf \\xed\\x91/\\xed\\x92)") dirs=("./ほげ" "./ほげ/ぴよ" "./$(printf \\xed\\x91)" "./$(printf \\xed\\x91/\\xed\\x92)" './qw,er;ty%20as df?gh+jkl%zxc&vbn <qwe>"rty'"'"'uio&asd&nbsp;fgh')
mkdir -p "${dirs[@]}" mkdir -p "${dirs[@]}"
for dir in "${dirs[@]}"; do for fn in ふが "$(printf \\xed\\x93)" 'qwe,rty;asd fgh+jkl%zxc&vbn <qwe>"rty'"'"'uio&asd&nbsp;fgh'; do echo "$dir" > "$dir/$fn.html"; done; done for dir in "${dirs[@]}"; do for fn in ふが "$(printf \\xed\\x93)" 'qw,er;ty%20as df?gh+jkl%zxc&vbn <qwe>"rty'"'"'uio&asd&nbsp;fgh'; do echo "$dir" > "$dir/$fn.html"; done; done
# qw er+ty%20ui%%20op<as>df&gh&amp;jk#zx'cv"bn`m=qw*er^ty?ui@op,as.df-gh_jk
## ##
## upload mojibake ## upload mojibake
@@ -79,6 +79,8 @@ command -v gdate && date() { gdate "$@"; }; while true; do t=$(date +%s.%N); (ti
# get all up2k search result URLs # get all up2k search result URLs
var t=[]; var b=document.location.href.split('#')[0].slice(0, -1); document.querySelectorAll('#u2tab .prog a').forEach((x) => {t.push(b+encodeURI(x.getAttribute("href")))}); console.log(t.join("\n")); var t=[]; var b=document.location.href.split('#')[0].slice(0, -1); document.querySelectorAll('#u2tab .prog a').forEach((x) => {t.push(b+encodeURI(x.getAttribute("href")))}); console.log(t.join("\n"));
# debug md-editor line tracking
var s=mknod('style');s.innerHTML='*[data-ln]:before {content:attr(data-ln)!important;color:#f0c;background:#000;position:absolute;left:-1.5em;font-size:1rem}';document.head.appendChild(s);
## ##
## bash oneliners ## bash oneliners
@@ -122,6 +124,13 @@ e=6; s=10; d=~/dev/copyparty/srv/aus; n=1; p=0; e=$((e*60)); rm -rf $d; mkdir $d
-v srv/aus:aus:r:ce2dsa:ce2ts:cmtp=fgsfds=bin/mtag/sleep.py -v srv/aus:aus:r:ce2dsa:ce2ts:cmtp=fgsfds=bin/mtag/sleep.py
sqlite3 .hist/up2k.db 'select * from mt where k="fgsfds" or k="t:mtp"' | tee /dev/stderr | wc -l sqlite3 .hist/up2k.db 'select * from mt where k="fgsfds" or k="t:mtp"' | tee /dev/stderr | wc -l
# generate the sine meme
for ((f=420;f<1200;f++)); do sz=$(ffmpeg -y -f lavfi -i sine=frequency=$f:duration=2 -vf volume=0.1 -ac 1 -ar 44100 -f s16le /dev/shm/a.wav 2>/dev/null; base64 -w0 </dev/shm/a.wav | gzip -c | wc -c); printf '%d %d\n' $f $sz; done | tee /dev/stderr | sort -nrk2,2
ffmpeg -y -f lavfi -i sine=frequency=1050:duration=2 -vf volume=0.1 -ac 1 -ar 44100 /dev/shm/a.wav
# play icon calibration pics
for w in 150 170 190 210 230 250; do for h in 130 150 170 190 210; do /c/Program\ Files/ImageMagick-7.0.11-Q16-HDRI/magick.exe convert -size ${w}x${h} xc:brown -fill orange -draw "circle $((w/2)),$((h/2)) $((w/2)),$((h/3))" $w-$h.png; done; done
## ##
## vscode ## vscode
@@ -153,7 +162,7 @@ brew install python@2
pip install virtualenv pip install virtualenv
# readme toc # readme toc
cat README.md | awk '!/^#/{next} {lv=length($1);sub(/[^ ]+ /,"");bab=$0;gsub(/ /,"-",bab)} {printf "%" ((lv-1)*4+1) "s [%s](#%s)\n", "*",$0,bab}' cat README.md | awk 'function pr() { if (!h) {return}; if (/^ *[*!#]/||!s) {printf "%s\n",h;h=0;return}; if (/.../) {printf "%s - %s\n",h,$0;h=0}; }; /^#/{s=1;pr()} /^#* *(file indexing|install on android|dev env setup|just the sfx|complete release|optional gpl stuff)|`$/{s=0} /^#/{lv=length($1);sub(/[^ ]+ /,"");bab=$0;gsub(/ /,"-",bab); h=sprintf("%" ((lv-1)*4+1) "s [%s](#%s)", "*",$0,bab);next} !h{next} {sub(/ .*/,"");sub(/[:,]$/,"")} {pr()}' > toc; grep -E '^## readme toc' -B1000 -A2 <README.md >p1; grep -E '^## quickstart' -B2 -A999999 <README.md >p2; (cat p1; grep quickstart -A1000 <toc; cat p2) >README.md
# fix firefox phantom breakpoints, # fix firefox phantom breakpoints,
# suggestions from bugtracker, doesnt work (debugger is not attachable) # suggestions from bugtracker, doesnt work (debugger is not attachable)
@@ -169,7 +178,7 @@ about:config >> devtools.debugger.prefs-schema-version = -1
git pull; git reset --hard origin/HEAD && git log --format=format:"%H %ai %d" --decorate=full > ../revs && cat ../{util,browser,up2k}.js >../vr && cat ../revs | while read -r rev extra; do (git reset --hard $rev >/dev/null 2>/dev/null && dsz=$(cat copyparty/web/{util,browser,up2k}.js >../vg 2>/dev/null && diff -wNarU0 ../{vg,vr} | wc -c) && printf '%s %6s %s\n' "$rev" $dsz "$extra") </dev/null; done git pull; git reset --hard origin/HEAD && git log --format=format:"%H %ai %d" --decorate=full > ../revs && cat ../{util,browser,up2k}.js >../vr && cat ../revs | while read -r rev extra; do (git reset --hard $rev >/dev/null 2>/dev/null && dsz=$(cat copyparty/web/{util,browser,up2k}.js >../vg 2>/dev/null && diff -wNarU0 ../{vg,vr} | wc -c) && printf '%s %6s %s\n' "$rev" $dsz "$extra") </dev/null; done
# download all sfx versions # download all sfx versions
curl https://api.github.com/repos/9001/copyparty/releases?per_page=100 | jq -r '.[] | .tag_name + " " + .name' | while read v t; do fn="copyparty $v $t.py"; [ -e $fn ] || curl https://github.com/9001/copyparty/releases/download/$v/copyparty-sfx.py -Lo "$fn"; done curl https://api.github.com/repos/9001/copyparty/releases?per_page=100 | jq -r '.[] | .tag_name + " " + .name' | tr -d '\r' | while read v t; do fn="copyparty $v $t.py"; [ -e "$fn" ] || curl https://github.com/9001/copyparty/releases/download/$v/copyparty-sfx.py -Lo "$fn"; done
## ##

View File

@@ -1,11 +1,11 @@
FROM alpine:3.13 FROM alpine:3.14
WORKDIR /z WORKDIR /z
ENV ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \ ENV ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \
ver_hashwasm=4.7.0 \ ver_hashwasm=4.9.0 \
ver_marked=1.1.0 \ ver_marked=3.0.4 \
ver_ogvjs=1.8.0 \ ver_ogvjs=1.8.4 \
ver_mde=2.14.0 \ ver_mde=2.15.0 \
ver_codemirror=5.59.3 \ ver_codemirror=5.62.3 \
ver_fontawesome=5.13.0 \ ver_fontawesome=5.13.0 \
ver_zopfli=1.0.3 ver_zopfli=1.0.3
@@ -74,23 +74,16 @@ RUN cd hash-wasm \
# build ogvjs # build ogvjs
RUN cd ogvjs-$ver_ogvjs \ RUN cd ogvjs-$ver_ogvjs \
&& cp -pv \ && cp -pv \
ogv.js \
ogv-worker-audio.js \ ogv-worker-audio.js \
ogv-demuxer-ogg-wasm.js \ ogv-demuxer-ogg-wasm.js \
ogv-demuxer-ogg-wasm.wasm \ ogv-demuxer-ogg-wasm.wasm \
ogv-demuxer-webm-wasm.js \
ogv-demuxer-webm-wasm.wasm \
ogv-decoder-audio-opus-wasm.js \ ogv-decoder-audio-opus-wasm.js \
ogv-decoder-audio-opus-wasm.wasm \ ogv-decoder-audio-opus-wasm.wasm \
ogv-decoder-audio-vorbis-wasm.js \ ogv-decoder-audio-vorbis-wasm.js \
ogv-decoder-audio-vorbis-wasm.wasm \ ogv-decoder-audio-vorbis-wasm.wasm \
/z/dist /z/dist \
&& cp -pv \
# ogv-demuxer-ogg.js \ ogv-es2017.js /z/dist/ogv.js
# ogv-demuxer-webm.js \
# ogv-decoder-audio-opus.js \
# ogv-decoder-audio-vorbis.js \
# dynamicaudio.swf \
# build marked # build marked
@@ -120,9 +113,10 @@ RUN cd CodeMirror-$ver_codemirror \
COPY easymde.patch /z/ COPY easymde.patch /z/
RUN cd easy-markdown-editor-$ver_mde \ RUN cd easy-markdown-editor-$ver_mde \
&& patch -p1 < /z/easymde.patch \ && patch -p1 < /z/easymde.patch \
&& sed -ri 's`https://registry.npmjs.org/marked/-/marked-0.8.2.tgz`file:/z/nodepkgs/marked`' package-lock.json \ && sed -ri 's`https://registry.npmjs.org/marked/-/marked-[0-9\.]+.tgz`file:/z/nodepkgs/marked`' package-lock.json \
&& sed -ri 's`("marked": ")[^"]+`\1file:/z/nodepkgs/marked`' ./package.json \ && sed -ri 's`("marked": ")[^"]+`\1file:/z/nodepkgs/marked`' ./package.json \
&& sed -ri 's`("codemirror": ")[^"]+`\1file:/z/nodepkgs/codemirror`' ./package.json \ && sed -ri 's`("codemirror": ")[^"]+`\1file:/z/nodepkgs/codemirror`' ./package.json \
&& sed -ri 's`^var marked = require\(.marked/lib/marked.\);$`var marked = window.marked;`' src/js/easymde.js \
&& npm install && npm install
COPY easymde-ln.patch /z/ COPY easymde-ln.patch /z/

View File

@@ -1,15 +1,15 @@
diff --git a/src/Lexer.js b/src/Lexer.js diff --git a/src/Lexer.js b/src/Lexer.js
adds linetracking to marked.js v1.0.0 +git; adds linetracking to marked.js v3.0.4;
add data-ln="%d" to most tags, %d is the source markdown line add data-ln="%d" to most tags, %d is the source markdown line
--- a/src/Lexer.js --- a/src/Lexer.js
+++ b/src/Lexer.js +++ b/src/Lexer.js
@@ -49,4 +49,5 @@ function mangle(text) { @@ -50,4 +50,5 @@ function mangle(text) {
module.exports = class Lexer { module.exports = class Lexer {
constructor(options) { constructor(options) {
+ this.ln = 1; // like most editors, start couting from 1 + this.ln = 1; // like most editors, start couting from 1
this.tokens = []; this.tokens = [];
this.tokens.links = Object.create(null); this.tokens.links = Object.create(null);
@@ -108,4 +109,15 @@ module.exports = class Lexer { @@ -127,4 +128,15 @@ module.exports = class Lexer {
} }
+ set_ln(token, ln = this.ln) { + set_ln(token, ln = this.ln) {
@@ -25,122 +25,123 @@ add data-ln="%d" to most tags, %d is the source markdown line
+ +
/** /**
* Lexing * Lexing
@@ -113,10 +125,15 @@ module.exports = class Lexer { @@ -134,7 +146,11 @@ module.exports = class Lexer {
blockTokens(src, tokens = [], top = true) {
src = src.replace(/^ +$/gm, ''); src = src.replace(/^ +$/gm, '');
- let token, i, l, lastToken; }
+ let token, i, l, lastToken, ln; - let token, lastToken, cutSrc, lastParagraphClipped;
+ let token, lastToken, cutSrc, lastParagraphClipped, ln;
while (src) { while (src) {
+ // this.ln will be bumped by recursive calls into this func; + // this.ln will be bumped by recursive calls into this func;
+ // reset the count and rely on the outermost token's raw only + // reset the count and rely on the outermost token's raw only
+ ln = this.ln; + ln = this.ln;
+ +
// newline if (this.options.extensions
&& this.options.extensions.block
@@ -142,4 +158,5 @@ module.exports = class Lexer {
if (token = extTokenizer.call({ lexer: this }, src, tokens)) {
src = src.substring(token.raw.length);
+ this.set_ln(token, ln);
tokens.push(token);
return true;
@@ -153,4 +170,5 @@ module.exports = class Lexer {
if (token = this.tokenizer.space(src)) { if (token = this.tokenizer.space(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
+ this.set_ln(token); // is \n if not type + this.set_ln(token, ln); // is \n if not type
if (token.type) { if (token.type) {
tokens.push(token); tokens.push(token);
@@ -128,4 +145,5 @@ module.exports = class Lexer { @@ -162,4 +180,5 @@ module.exports = class Lexer {
if (token = this.tokenizer.code(src, tokens)) { if (token = this.tokenizer.code(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
+ this.set_ln(token); + this.set_ln(token, ln);
if (token.type) { lastToken = tokens[tokens.length - 1];
tokens.push(token); // An indented code block cannot interrupt a paragraph.
@@ -141,4 +159,5 @@ module.exports = class Lexer { @@ -177,4 +196,5 @@ module.exports = class Lexer {
if (token = this.tokenizer.fences(src)) { if (token = this.tokenizer.fences(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
+ this.set_ln(token); + this.set_ln(token, ln);
tokens.push(token); tokens.push(token);
continue; continue;
@@ -148,4 +167,5 @@ module.exports = class Lexer { @@ -184,4 +204,5 @@ module.exports = class Lexer {
if (token = this.tokenizer.heading(src)) { if (token = this.tokenizer.heading(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
+ this.set_ln(token); + this.set_ln(token, ln);
tokens.push(token); tokens.push(token);
continue; continue;
@@ -155,4 +175,5 @@ module.exports = class Lexer { @@ -191,4 +212,5 @@ module.exports = class Lexer {
if (token = this.tokenizer.nptable(src)) {
src = src.substring(token.raw.length);
+ this.set_ln(token);
tokens.push(token);
continue;
@@ -162,4 +183,5 @@ module.exports = class Lexer {
if (token = this.tokenizer.hr(src)) { if (token = this.tokenizer.hr(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
+ this.set_ln(token); + this.set_ln(token, ln);
tokens.push(token); tokens.push(token);
continue; continue;
@@ -170,4 +192,7 @@ module.exports = class Lexer { @@ -198,4 +220,5 @@ module.exports = class Lexer {
if (token = this.tokenizer.blockquote(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
token.tokens = this.blockTokens(token.text, [], top);
+ // recursive call to blockTokens probably bumped this.ln,
+ // token.raw is more reliable so reset this.ln and use that
+ this.set_ln(token, ln); + this.set_ln(token, ln);
tokens.push(token); tokens.push(token);
continue; continue;
@@ -180,5 +205,9 @@ module.exports = class Lexer { @@ -205,4 +228,5 @@ module.exports = class Lexer {
for (i = 0; i < l; i++) { if (token = this.tokenizer.list(src)) {
token.items[i].tokens = this.blockTokens(token.items[i].text, [], false); src = src.substring(token.raw.length);
+ // list entries don't bump the linecounter, so let's
+ this.ln++;
}
+ // then reset like blockquote
+ this.set_ln(token, ln); + this.set_ln(token, ln);
tokens.push(token); tokens.push(token);
continue; continue;
@@ -188,4 +217,5 @@ module.exports = class Lexer { @@ -212,4 +236,5 @@ module.exports = class Lexer {
if (token = this.tokenizer.html(src)) { if (token = this.tokenizer.html(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
+ this.set_ln(token); + this.set_ln(token, ln);
tokens.push(token); tokens.push(token);
continue; continue;
@@ -195,4 +225,5 @@ module.exports = class Lexer { @@ -219,4 +244,5 @@ module.exports = class Lexer {
if (top && (token = this.tokenizer.def(src))) { if (token = this.tokenizer.def(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
+ this.set_ln(token); + this.set_ln(token, ln);
if (!this.tokens.links[token.tag]) { lastToken = tokens[tokens.length - 1];
this.tokens.links[token.tag] = { if (lastToken && (lastToken.type === 'paragraph' || lastToken.type === 'text')) {
@@ -207,4 +238,5 @@ module.exports = class Lexer { @@ -236,4 +262,5 @@ module.exports = class Lexer {
if (token = this.tokenizer.table(src)) { if (token = this.tokenizer.table(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
+ this.set_ln(token); + this.set_ln(token, ln);
tokens.push(token); tokens.push(token);
continue; continue;
@@ -214,4 +246,5 @@ module.exports = class Lexer { @@ -243,4 +270,5 @@ module.exports = class Lexer {
if (token = this.tokenizer.lheading(src)) { if (token = this.tokenizer.lheading(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
+ this.set_ln(token); + this.set_ln(token, ln);
tokens.push(token); tokens.push(token);
continue; continue;
@@ -221,4 +254,5 @@ module.exports = class Lexer { @@ -263,4 +291,5 @@ module.exports = class Lexer {
if (top && (token = this.tokenizer.paragraph(src))) { }
if (this.state.top && (token = this.tokenizer.paragraph(cutSrc))) {
+ this.set_ln(token, ln);
lastToken = tokens[tokens.length - 1];
if (lastParagraphClipped && lastToken.type === 'paragraph') {
@@ -280,4 +309,6 @@ module.exports = class Lexer {
if (token = this.tokenizer.text(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
+ this.set_ln(token); + this.set_ln(token, ln);
tokens.push(token); + this.ln++;
continue; lastToken = tokens[tokens.length - 1];
@@ -228,4 +262,5 @@ module.exports = class Lexer { if (lastToken && lastToken.type === 'text') {
if (token = this.tokenizer.text(src, tokens)) { @@ -355,4 +386,5 @@ module.exports = class Lexer {
if (token = extTokenizer.call({ lexer: this }, src, tokens)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
+ this.set_ln(token);
if (token.type) {
tokens.push(token);
@@ -263,4 +298,7 @@ module.exports = class Lexer {
for (i = 0; i < l; i++) {
token = tokens[i];
+ // this.ln is at EOF when inline() is invoked;
+ // all this affects <br> tags only so no biggie if it breaks
+ this.ln = token.ln || this.ln; + this.ln = token.ln || this.ln;
switch (token.type) { tokens.push(token);
case 'paragraph': return true;
@@ -386,4 +424,6 @@ module.exports = class Lexer { @@ -420,4 +452,6 @@ module.exports = class Lexer {
if (token = this.tokenizer.br(src)) { if (token = this.tokenizer.br(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
+ // no need to reset (no more blockTokens anyways) + // no need to reset (no more blockTokens anyways)
+ token.ln = this.ln++; + token.ln = this.ln++;
tokens.push(token); tokens.push(token);
continue; continue;
@@ -462,4 +496,5 @@ module.exports = class Lexer {
if (token = this.tokenizer.inlineText(cutSrc, smartypants)) {
src = src.substring(token.raw.length);
+ this.ln = token.ln || this.ln;
if (token.raw.slice(-1) !== '_') { // Track prevChar before string of ____ started
prevChar = token.raw.slice(-1);
diff --git a/src/Parser.js b/src/Parser.js diff --git a/src/Parser.js b/src/Parser.js
--- a/src/Parser.js --- a/src/Parser.js
+++ b/src/Parser.js +++ b/src/Parser.js
@@ -150,17 +151,16 @@ diff --git a/src/Parser.js b/src/Parser.js
+ this.ln = 0; // error indicator; should always be set >=1 from tokens + this.ln = 0; // error indicator; should always be set >=1 from tokens
} }
@@ -55,4 +56,9 @@ module.exports = class Parser { @@ -64,4 +65,8 @@ module.exports = class Parser {
for (i = 0; i < l; i++) { for (i = 0; i < l; i++) {
token = tokens[i]; token = tokens[i];
+ // take line-numbers from tokens whenever possible + // take line-numbers from tokens whenever possible
+ // and update the renderer's html attribute with the new value + // and update the renderer's html attribute with the new value
+ this.ln = token.ln || this.ln; + this.ln = token.ln || this.ln;
+ this.renderer.tag_ln(this.ln); + this.renderer.tag_ln(this.ln);
+
switch (token.type) { // Run any renderer extensions
case 'space': { @@ -124,7 +129,10 @@ module.exports = class Parser {
@@ -105,7 +111,10 @@ module.exports = class Parser {
} }
- body += this.renderer.tablerow(cell); - body += this.renderer.tablerow(cell);
@@ -173,7 +173,7 @@ diff --git a/src/Parser.js b/src/Parser.js
+ out += this.renderer.tag_ln(token.ln).table(header, body); + out += this.renderer.tag_ln(token.ln).table(header, body);
continue; continue;
} }
@@ -148,8 +157,12 @@ module.exports = class Parser { @@ -167,8 +175,12 @@ module.exports = class Parser {
itemBody += this.parse(item.tokens, loose); itemBody += this.parse(item.tokens, loose);
- body += this.renderer.listitem(itemBody, task, checked); - body += this.renderer.listitem(itemBody, task, checked);
@@ -188,7 +188,7 @@ diff --git a/src/Parser.js b/src/Parser.js
+ out += this.renderer.tag_ln(token.ln).list(body, ordered, start); + out += this.renderer.tag_ln(token.ln).list(body, ordered, start);
continue; continue;
} }
@@ -160,5 +173,6 @@ module.exports = class Parser { @@ -179,5 +191,6 @@ module.exports = class Parser {
} }
case 'paragraph': { case 'paragraph': {
- out += this.renderer.paragraph(this.parseInline(token.tokens)); - out += this.renderer.paragraph(this.parseInline(token.tokens));
@@ -196,22 +196,14 @@ diff --git a/src/Parser.js b/src/Parser.js
+ out += this.renderer.tag_ln(token.ln).paragraph(t); + out += this.renderer.tag_ln(token.ln).paragraph(t);
continue; continue;
} }
@@ -199,4 +213,6 @@ module.exports = class Parser { @@ -221,4 +234,7 @@ module.exports = class Parser {
for (i = 0; i < l; i++) {
token = tokens[i]; token = tokens[i];
+ // another thing that only affects <br/> and other inlines + // another thing that only affects <br/> and other inlines
+ this.ln = token.ln || this.ln; + this.ln = token.ln || this.ln;
switch (token.type) { +
case 'escape': { // Run any renderer extensions
@@ -229,5 +245,7 @@ module.exports = class Parser { if (this.options.extensions && this.options.extensions.renderers && this.options.extensions.renderers[token.type]) {
}
case 'br': {
- out += renderer.br();
+ // update the html attribute before writing each <br/>,
+ // don't care about the others
+ out += renderer.tag_ln(this.ln).br();
break;
}
diff --git a/src/Renderer.js b/src/Renderer.js diff --git a/src/Renderer.js b/src/Renderer.js
--- a/src/Renderer.js --- a/src/Renderer.js
+++ b/src/Renderer.js +++ b/src/Renderer.js
@@ -228,7 +220,7 @@ diff --git a/src/Renderer.js b/src/Renderer.js
+ +
code(code, infostring, escaped) { code(code, infostring, escaped) {
const lang = (infostring || '').match(/\S*/)[0]; const lang = (infostring || '').match(/\S*/)[0];
@@ -24,10 +30,10 @@ module.exports = class Renderer { @@ -26,10 +32,10 @@ module.exports = class Renderer {
if (!lang) { if (!lang) {
- return '<pre><code>' - return '<pre><code>'
@@ -241,58 +233,69 @@ diff --git a/src/Renderer.js b/src/Renderer.js
+ return '<pre' + this.ln + '><code class="' + return '<pre' + this.ln + '><code class="'
+ this.options.langPrefix + this.options.langPrefix
+ escape(lang, true) + escape(lang, true)
@@ -38,5 +44,5 @@ module.exports = class Renderer { @@ -40,5 +46,5 @@ module.exports = class Renderer {
blockquote(quote) { blockquote(quote) {
- return '<blockquote>\n' + quote + '</blockquote>\n'; - return '<blockquote>\n' + quote + '</blockquote>\n';
+ return '<blockquote' + this.ln + '>\n' + quote + '</blockquote>\n'; + return '<blockquote' + this.ln + '>\n' + quote + '</blockquote>\n';
} }
@@ -49,4 +55,5 @@ module.exports = class Renderer { @@ -51,4 +57,5 @@ module.exports = class Renderer {
return '<h' return '<h'
+ level + level
+ + this.ln + + this.ln
+ ' id="' + ' id="'
+ this.options.headerPrefix + this.options.headerPrefix
@@ -59,5 +66,5 @@ module.exports = class Renderer { @@ -61,5 +68,5 @@ module.exports = class Renderer {
} }
// ignore IDs // ignore IDs
- return '<h' + level + '>' + text + '</h' + level + '>\n'; - return '<h' + level + '>' + text + '</h' + level + '>\n';
+ return '<h' + level + this.ln + '>' + text + '</h' + level + '>\n'; + return '<h' + level + this.ln + '>' + text + '</h' + level + '>\n';
} }
@@ -73,5 +80,5 @@ module.exports = class Renderer { @@ -75,5 +82,5 @@ module.exports = class Renderer {
listitem(text) { listitem(text) {
- return '<li>' + text + '</li>\n'; - return '<li>' + text + '</li>\n';
+ return '<li' + this.ln + '>' + text + '</li>\n'; + return '<li' + this.ln + '>' + text + '</li>\n';
} }
@@ -85,5 +92,5 @@ module.exports = class Renderer { @@ -87,5 +94,5 @@ module.exports = class Renderer {
paragraph(text) { paragraph(text) {
- return '<p>' + text + '</p>\n'; - return '<p>' + text + '</p>\n';
+ return '<p' + this.ln + '>' + text + '</p>\n'; + return '<p' + this.ln + '>' + text + '</p>\n';
} }
@@ -100,5 +107,5 @@ module.exports = class Renderer { @@ -102,5 +109,5 @@ module.exports = class Renderer {
tablerow(content) { tablerow(content) {
- return '<tr>\n' + content + '</tr>\n'; - return '<tr>\n' + content + '</tr>\n';
+ return '<tr' + this.ln + '>\n' + content + '</tr>\n'; + return '<tr' + this.ln + '>\n' + content + '</tr>\n';
} }
@@ -125,5 +132,5 @@ module.exports = class Renderer { @@ -127,5 +134,5 @@ module.exports = class Renderer {
br() { br() {
- return this.options.xhtml ? '<br/>' : '<br>'; - return this.options.xhtml ? '<br/>' : '<br>';
+ return this.options.xhtml ? '<br' + this.ln + '/>' : '<br' + this.ln + '>'; + return this.options.xhtml ? '<br' + this.ln + '/>' : '<br' + this.ln + '>';
} }
@@ -151,5 +158,5 @@ module.exports = class Renderer { @@ -153,5 +160,5 @@ module.exports = class Renderer {
} }
- let out = '<img src="' + href + '" alt="' + text + '"'; - let out = '<img src="' + href + '" alt="' + text + '"';
+ let out = '<img' + this.ln + ' src="' + href + '" alt="' + text + '"'; + let out = '<img' + this.ln + ' src="' + href + '" alt="' + text + '"';
if (title) { if (title) {
out += ' title="' + title + '"'; out += ' title="' + title + '"';
diff --git a/src/Tokenizer.js b/src/Tokenizer.js
--- a/src/Tokenizer.js
+++ b/src/Tokenizer.js
@@ -301,4 +301,7 @@ module.exports = class Tokenizer {
const l = list.items.length;
+ // each nested list gets +1 ahead; this hack makes every listgroup -1 but atleast it doesn't get infinitely bad
+ this.lexer.ln--;
+
// Item child tokens handled here at end because we needed to have the final item to trim it first
for (i = 0; i < l; i++) {

View File

@@ -1,52 +1,52 @@
diff --git a/src/Lexer.js b/src/Lexer.js diff --git a/src/Lexer.js b/src/Lexer.js
--- a/src/Lexer.js --- a/src/Lexer.js
+++ b/src/Lexer.js +++ b/src/Lexer.js
@@ -5,5 +5,5 @@ const { block, inline } = require('./rules.js'); @@ -6,5 +6,5 @@ const { repeatString } = require('./helpers.js');
/** /**
* smartypants text replacement * smartypants text replacement
- */ - */
+ * + *
function smartypants(text) { function smartypants(text) {
return text return text
@@ -26,5 +26,5 @@ function smartypants(text) { @@ -27,5 +27,5 @@ function smartypants(text) {
/** /**
* mangle email addresses * mangle email addresses
- */ - */
+ * + *
function mangle(text) { function mangle(text) {
let out = '', let out = '',
@@ -439,5 +439,5 @@ module.exports = class Lexer { @@ -465,5 +465,5 @@ module.exports = class Lexer {
// autolink // autolink
- if (token = this.tokenizer.autolink(src, mangle)) { - if (token = this.tokenizer.autolink(src, mangle)) {
+ if (token = this.tokenizer.autolink(src)) { + if (token = this.tokenizer.autolink(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
tokens.push(token); tokens.push(token);
@@ -446,5 +446,5 @@ module.exports = class Lexer { @@ -472,5 +472,5 @@ module.exports = class Lexer {
// url (gfm) // url (gfm)
- if (!inLink && (token = this.tokenizer.url(src, mangle))) { - if (!this.state.inLink && (token = this.tokenizer.url(src, mangle))) {
+ if (!inLink && (token = this.tokenizer.url(src))) { + if (!this.state.inLink && (token = this.tokenizer.url(src))) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
tokens.push(token); tokens.push(token);
@@ -453,5 +453,5 @@ module.exports = class Lexer { @@ -493,5 +493,5 @@ module.exports = class Lexer {
}
// text }
- if (token = this.tokenizer.inlineText(src, inRawBlock, smartypants)) { - if (token = this.tokenizer.inlineText(cutSrc, smartypants)) {
+ if (token = this.tokenizer.inlineText(src, inRawBlock)) { + if (token = this.tokenizer.inlineText(cutSrc)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
tokens.push(token); this.ln = token.ln || this.ln;
diff --git a/src/Renderer.js b/src/Renderer.js diff --git a/src/Renderer.js b/src/Renderer.js
--- a/src/Renderer.js --- a/src/Renderer.js
+++ b/src/Renderer.js +++ b/src/Renderer.js
@@ -140,5 +140,5 @@ module.exports = class Renderer { @@ -142,5 +142,5 @@ module.exports = class Renderer {
link(href, title, text) { link(href, title, text) {
- href = cleanUrl(this.options.sanitize, this.options.baseUrl, href); - href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
+ href = cleanUrl(this.options.baseUrl, href); + href = cleanUrl(this.options.baseUrl, href);
if (href === null) { if (href === null) {
return text; return text;
@@ -153,5 +153,5 @@ module.exports = class Renderer { @@ -155,5 +155,5 @@ module.exports = class Renderer {
image(href, title, text) { image(href, title, text) {
- href = cleanUrl(this.options.sanitize, this.options.baseUrl, href); - href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
@@ -56,22 +56,23 @@ diff --git a/src/Renderer.js b/src/Renderer.js
diff --git a/src/Tokenizer.js b/src/Tokenizer.js diff --git a/src/Tokenizer.js b/src/Tokenizer.js
--- a/src/Tokenizer.js --- a/src/Tokenizer.js
+++ b/src/Tokenizer.js +++ b/src/Tokenizer.js
@@ -287,11 +287,8 @@ module.exports = class Tokenizer { @@ -321,14 +321,7 @@ module.exports = class Tokenizer {
if (cap) { type: 'html',
return {
- type: this.options.sanitize
- ? 'paragraph'
- : 'html',
+ type: 'html',
raw: cap[0], raw: cap[0],
- pre: !this.options.sanitizer - pre: !this.options.sanitizer
- && (cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style'), - && (cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style'),
- text: this.options.sanitize ? (this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0])) : cap[0] + pre: (cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style'),
+ pre: cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style', text: cap[0]
+ text: cap[0]
}; };
- if (this.options.sanitize) {
- token.type = 'paragraph';
- token.text = this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0]);
- token.tokens = [];
- this.lexer.inline(token.text, token.tokens);
- }
return token;
} }
@@ -421,15 +418,9 @@ module.exports = class Tokenizer { @@ -477,15 +470,9 @@ module.exports = class Tokenizer {
return { return {
- type: this.options.sanitize - type: this.options.sanitize
@@ -79,8 +80,8 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
- : 'html', - : 'html',
+ type: 'html', + type: 'html',
raw: cap[0], raw: cap[0],
inLink, inLink: this.lexer.state.inLink,
inRawBlock, inRawBlock: this.lexer.state.inRawBlock,
- text: this.options.sanitize - text: this.options.sanitize
- ? (this.options.sanitizer - ? (this.options.sanitizer
- ? this.options.sanitizer(cap[0]) - ? this.options.sanitizer(cap[0])
@@ -89,7 +90,7 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
+ text: cap[0] + text: cap[0]
}; };
} }
@@ -550,10 +541,10 @@ module.exports = class Tokenizer { @@ -672,10 +659,10 @@ module.exports = class Tokenizer {
} }
- autolink(src, mangle) { - autolink(src, mangle) {
@@ -102,7 +103,7 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
+ text = escape(cap[1]); + text = escape(cap[1]);
href = 'mailto:' + text; href = 'mailto:' + text;
} else { } else {
@@ -578,10 +569,10 @@ module.exports = class Tokenizer { @@ -700,10 +687,10 @@ module.exports = class Tokenizer {
} }
- url(src, mangle) { - url(src, mangle) {
@@ -115,15 +116,15 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
+ text = escape(cap[0]); + text = escape(cap[0]);
href = 'mailto:' + text; href = 'mailto:' + text;
} else { } else {
@@ -615,12 +606,12 @@ module.exports = class Tokenizer { @@ -737,12 +724,12 @@ module.exports = class Tokenizer {
} }
- inlineText(src, inRawBlock, smartypants) { - inlineText(src, smartypants) {
+ inlineText(src, inRawBlock) { + inlineText(src) {
const cap = this.rules.inline.text.exec(src); const cap = this.rules.inline.text.exec(src);
if (cap) { if (cap) {
let text; let text;
if (inRawBlock) { if (this.lexer.state.inRawBlock) {
- text = this.options.sanitize ? (this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0])) : cap[0]; - text = this.options.sanitize ? (this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0])) : cap[0];
+ text = cap[0]; + text = cap[0];
} else { } else {
@@ -134,7 +135,7 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
diff --git a/src/defaults.js b/src/defaults.js diff --git a/src/defaults.js b/src/defaults.js
--- a/src/defaults.js --- a/src/defaults.js
+++ b/src/defaults.js +++ b/src/defaults.js
@@ -8,12 +8,8 @@ function getDefaults() { @@ -9,12 +9,8 @@ function getDefaults() {
highlight: null, highlight: null,
langPrefix: 'language-', langPrefix: 'language-',
- mangle: true, - mangle: true,
@@ -170,7 +171,7 @@ diff --git a/src/helpers.js b/src/helpers.js
+function cleanUrl(base, href) { +function cleanUrl(base, href) {
if (base && !originIndependentUrl.test(href)) { if (base && !originIndependentUrl.test(href)) {
href = resolveUrl(base, href); href = resolveUrl(base, href);
@@ -223,10 +210,4 @@ function findClosingBracket(str, b) { @@ -227,10 +214,4 @@ function findClosingBracket(str, b) {
} }
-function checkSanitizeDeprecation(opt) { -function checkSanitizeDeprecation(opt) {
@@ -179,14 +180,13 @@ diff --git a/src/helpers.js b/src/helpers.js
- } - }
-} -}
- -
module.exports = { // copied from https://stackoverflow.com/a/5450113/806777
escape, function repeatString(pattern, count) {
@@ -239,5 +220,4 @@ module.exports = { @@ -260,5 +241,4 @@ module.exports = {
splitCells,
rtrim, rtrim,
- findClosingBracket, findClosingBracket,
- checkSanitizeDeprecation - checkSanitizeDeprecation,
+ findClosingBracket repeatString
}; };
diff --git a/src/marked.js b/src/marked.js diff --git a/src/marked.js b/src/marked.js
--- a/src/marked.js --- a/src/marked.js
@@ -203,8 +203,14 @@ diff --git a/src/marked.js b/src/marked.js
- checkSanitizeDeprecation(opt); - checkSanitizeDeprecation(opt);
if (callback) { if (callback) {
@@ -108,5 +106,5 @@ function marked(src, opt, callback) { @@ -302,5 +300,4 @@ marked.parseInline = function(src, opt) {
return Parser.parse(tokens, opt);
opt = merge({}, marked.defaults, opt || {});
- checkSanitizeDeprecation(opt);
try {
@@ -311,5 +308,5 @@ marked.parseInline = function(src, opt) {
return Parser.parseInline(tokens, opt);
} catch (e) { } catch (e) {
- e.message += '\nPlease report this to https://github.com/markedjs/marked.'; - e.message += '\nPlease report this to https://github.com/markedjs/marked.';
+ e.message += '\nmake issue @ https://github.com/9001/copyparty'; + e.message += '\nmake issue @ https://github.com/9001/copyparty';
@@ -252,86 +258,87 @@ diff --git a/test/bench.js b/test/bench.js
diff --git a/test/specs/run-spec.js b/test/specs/run-spec.js diff --git a/test/specs/run-spec.js b/test/specs/run-spec.js
--- a/test/specs/run-spec.js --- a/test/specs/run-spec.js
+++ b/test/specs/run-spec.js +++ b/test/specs/run-spec.js
@@ -22,8 +22,4 @@ function runSpecs(title, dir, showCompletionTable, options) { @@ -22,9 +22,4 @@ function runSpecs(title, dir, showCompletionTable, options) {
} }
- if (spec.options.sanitizer) { - if (spec.options.sanitizer) {
- // eslint-disable-next-line no-eval - // eslint-disable-next-line no-eval
- spec.options.sanitizer = eval(spec.options.sanitizer); - spec.options.sanitizer = eval(spec.options.sanitizer);
- } - }
-
(spec.only ? fit : (spec.skip ? xit : it))('should ' + passFail + example, async() => { (spec.only ? fit : (spec.skip ? xit : it))('should ' + passFail + example, async() => {
@@ -53,3 +49,2 @@ runSpecs('Original', './original', false, { gfm: false, pedantic: true }); const before = process.hrtime();
@@ -53,3 +48,2 @@ runSpecs('Original', './original', false, { gfm: false, pedantic: true });
runSpecs('New', './new'); runSpecs('New', './new');
runSpecs('ReDOS', './redos'); runSpecs('ReDOS', './redos');
-runSpecs('Security', './security', false, { silent: true }); // silent - do not show deprecation warning -runSpecs('Security', './security', false, { silent: true }); // silent - do not show deprecation warning
diff --git a/test/unit/Lexer-spec.js b/test/unit/Lexer-spec.js diff --git a/test/unit/Lexer-spec.js b/test/unit/Lexer-spec.js
--- a/test/unit/Lexer-spec.js --- a/test/unit/Lexer-spec.js
+++ b/test/unit/Lexer-spec.js +++ b/test/unit/Lexer-spec.js
@@ -465,5 +465,5 @@ a | b @@ -589,5 +589,5 @@ paragraph
}); });
- it('sanitize', () => { - it('sanitize', () => {
+ /*it('sanitize', () => { + /*it('sanitize', () => {
expectTokens({ expectTokens({
md: '<div>html</div>', md: '<div>html</div>',
@@ -483,5 +483,5 @@ a | b @@ -607,5 +607,5 @@ paragraph
] ]
}); });
- }); - });
+ });*/ + });*/
}); });
@@ -587,5 +587,5 @@ a | b @@ -652,5 +652,5 @@ paragraph
}); });
- it('html sanitize', () => { - it('html sanitize', () => {
+ /*it('html sanitize', () => { + /*it('html sanitize', () => {
expectInlineTokens({ expectInlineTokens({
md: '<div>html</div>', md: '<div>html</div>',
@@ -597,5 +597,5 @@ a | b @@ -660,5 +660,5 @@ paragraph
] ]
}); });
- }); - });
+ });*/ + });*/
it('link', () => { it('link', () => {
@@ -909,5 +909,5 @@ a | b @@ -971,5 +971,5 @@ paragraph
}); });
- it('autolink mangle email', () => { - it('autolink mangle email', () => {
+ /*it('autolink mangle email', () => { + /*it('autolink mangle email', () => {
expectInlineTokens({ expectInlineTokens({
md: '<test@example.com>', md: '<test@example.com>',
@@ -929,5 +929,5 @@ a | b @@ -991,5 +991,5 @@ paragraph
] ]
}); });
- }); - });
+ });*/ + });*/
it('url', () => { it('url', () => {
@@ -966,5 +966,5 @@ a | b @@ -1028,5 +1028,5 @@ paragraph
}); });
- it('url mangle email', () => { - it('url mangle email', () => {
+ /*it('url mangle email', () => { + /*it('url mangle email', () => {
expectInlineTokens({ expectInlineTokens({
md: 'test@example.com', md: 'test@example.com',
@@ -986,5 +986,5 @@ a | b @@ -1048,5 +1048,5 @@ paragraph
] ]
}); });
- }); - });
+ });*/ + });*/
}); });
@@ -1002,5 +1002,5 @@ a | b @@ -1064,5 +1064,5 @@ paragraph
}); });
- describe('smartypants', () => { - describe('smartypants', () => {
+ /*describe('smartypants', () => { + /*describe('smartypants', () => {
it('single quotes', () => { it('single quotes', () => {
expectInlineTokens({ expectInlineTokens({
@@ -1072,5 +1072,5 @@ a | b @@ -1134,5 +1134,5 @@ paragraph
}); });
}); });
- }); - });

View File

@@ -2,7 +2,7 @@ all: $(addsuffix .gz, $(wildcard *.*))
%.gz: % %.gz: %
#brotli -q 11 $< #brotli -q 11 $<
pigz -11 -J 34 -I 573 $< pigz -11 -I 573 $<
# pigz -11 -J 34 -I 100 -F < $< > $@.first # pigz -11 -J 34 -I 100 -F < $< > $@.first

View File

@@ -2,6 +2,7 @@
set -e set -e
echo echo
help() { exec cat <<'EOF'
# optional args: # optional args:
# #
@@ -15,17 +16,19 @@ echo
# #
# `no-sh` makes just the python sfx, skips the sh/unix sfx # `no-sh` makes just the python sfx, skips the sh/unix sfx
# #
# `no-ogv` saves ~500k by removing the opus/vorbis audio codecs # `no-ogv` saves ~192k by removing the opus/vorbis audio codecs
# (only affects apple devices; everything else has native support) # (only affects apple devices; everything else has native support)
# #
# `no-cm` saves ~90k by removing easymde/codemirror # `no-cm` saves ~92k by removing easymde/codemirror
# (the fancy markdown editor) # (the fancy markdown editor)
# #
# `no-fnt` saves ~9k by removing the source-code-pro font # `no-fnt` saves ~9k by removing the source-code-pro font
# (mainly used my the markdown viewer/editor) # (browsers will try to use 'Consolas' instead)
# #
# `no-dd` saves ~2k by removing the mouse cursor # `no-dd` saves ~2k by removing the mouse cursor
EOF
}
# port install gnutar findutils gsed coreutils # port install gnutar findutils gsed coreutils
gtar=$(command -v gtar || command -v gnutar) || true gtar=$(command -v gtar || command -v gnutar) || true
@@ -34,6 +37,9 @@ gtar=$(command -v gtar || command -v gnutar) || true
sed() { gsed "$@"; } sed() { gsed "$@"; }
find() { gfind "$@"; } find() { gfind "$@"; }
sort() { gsort "$@"; } sort() { gsort "$@"; }
shuf() { gshuf "$@"; }
nproc() { gnproc; }
sha1sum() { shasum "$@"; }
unexpand() { gunexpand "$@"; } unexpand() { gunexpand "$@"; }
command -v grealpath >/dev/null && command -v grealpath >/dev/null &&
realpath() { grealpath "$@"; } realpath() { grealpath "$@"; }
@@ -61,6 +67,7 @@ pybin=$(command -v python3 || command -v python) || {
use_gz= use_gz=
do_sh=1 do_sh=1
do_py=1 do_py=1
zopf=2560
while [ ! -z "$1" ]; do while [ ! -z "$1" ]; do
case $1 in case $1 in
clean) clean=1 ; ;; clean) clean=1 ; ;;
@@ -72,6 +79,8 @@ while [ ! -z "$1" ]; do
no-cm) no_cm=1 ; ;; no-cm) no_cm=1 ; ;;
no-sh) do_sh= ; ;; no-sh) do_sh= ; ;;
no-py) do_py= ; ;; no-py) do_py= ; ;;
fast) zopf=100 ; ;;
*) help ; ;;
esac esac
shift shift
done done
@@ -81,16 +90,23 @@ tmv() {
mv t "$1" mv t "$1"
} }
stamp=$(
for d in copyparty scripts; do
find $d -type f -printf '%TY-%Tm-%Td %TH:%TM:%TS %p\n'
done | sort | tail -n 1 | sha1sum | cut -c-16
)
rm -rf sfx/* rm -rf sfx/*
mkdir -p sfx build mkdir -p sfx build
cd sfx cd sfx
[ $repack ] && { tmpdir="$(
old="$(
printf '%s\n' "$TMPDIR" /tmp | printf '%s\n' "$TMPDIR" /tmp |
awk '/./ {print; exit}' awk '/./ {print; exit}'
)/pe-copyparty" )"
[ $repack ] && {
old="$tmpdir/pe-copyparty"
echo "repack of files in $old" echo "repack of files in $old"
cp -pR "$old/"*{dep-j2,copyparty} . cp -pR "$old/"*{dep-j2,copyparty} .
} }
@@ -122,7 +138,7 @@ cd sfx
# msys2 tar is bad, make the best of it # msys2 tar is bad, make the best of it
echo collecting source echo collecting source
[ $clean ] && { [ $clean ] && {
(cd .. && git archive master >tar) && tar -xf ../tar copyparty (cd .. && git archive hovudstraum >tar) && tar -xf ../tar copyparty
(cd .. && tar -cf tar copyparty/web/deps) && tar -xf ../tar (cd .. && tar -cf tar copyparty/web/deps) && tar -xf ../tar
} }
[ $clean ] || { [ $clean ] || {
@@ -132,6 +148,7 @@ cd sfx
} }
ver= ver=
[ -z "$repack" ] &&
git describe --tags >/dev/null 2>/dev/null && { git describe --tags >/dev/null 2>/dev/null && {
git_ver="$(git describe --tags)"; # v0.5.5-2-gb164aa0 git_ver="$(git describe --tags)"; # v0.5.5-2-gb164aa0
ver="$(printf '%s\n' "$git_ver" | sed -r 's/^v//')"; ver="$(printf '%s\n' "$git_ver" | sed -r 's/^v//')";
@@ -163,7 +180,7 @@ git describe --tags >/dev/null 2>/dev/null && {
[ -z "$ver" ] && [ -z "$ver" ] &&
ver="$(awk '/^VERSION *= \(/ { ver="$(awk '/^VERSION *= \(/ {
gsub(/[^0-9,]/,""); gsub(/,/,"."); print; exit}' < copyparty/__version__.py)" gsub(/[^0-9,a-g-]/,""); gsub(/,/,"."); print; exit}' < copyparty/__version__.py)"
ts=$(date -u +%s) ts=$(date -u +%s)
hts=$(date -u +%Y-%m%d-%H%M%S) # --date=@$ts (thx osx) hts=$(date -u +%Y-%m%d-%H%M%S) # --date=@$ts (thx osx)
@@ -172,12 +189,12 @@ mkdir -p ../dist
sfx_out=../dist/copyparty-sfx sfx_out=../dist/copyparty-sfx
echo cleanup echo cleanup
find .. -name '*.pyc' -delete find -name '*.pyc' -delete
find .. -name __pycache__ -delete find -name __pycache__ -delete
# especially prevent osx from leaking your lan ip (wtf apple) # especially prevent osx from leaking your lan ip (wtf apple)
find .. -type f \( -name .DS_Store -or -name ._.DS_Store \) -delete find -type f \( -name .DS_Store -or -name ._.DS_Store \) -delete
find .. -type f -name ._\* | while IFS= read -r f; do cmp <(printf '\x00\x05\x16') <(head -c 3 -- "$f") && rm -f -- "$f"; done find -type f -name ._\* | while IFS= read -r f; do cmp <(printf '\x00\x05\x16') <(head -c 3 -- "$f") && rm -f -- "$f"; done
echo use smol web deps echo use smol web deps
rm -f copyparty/web/deps/*.full.* copyparty/web/dbg-* copyparty/web/Makefile rm -f copyparty/web/deps/*.full.* copyparty/web/dbg-* copyparty/web/Makefile
@@ -189,6 +206,15 @@ while IFS= read -r x; do
tmv "$x" tmv "$x"
done done
find copyparty | LC_ALL=C sort | sed 's/\.gz$//;s/$/,/' > have
cat have | while IFS= read -r x; do
grep -qF -- "$x" ../scripts/sfx.ls || {
echo "unexpected file: $x"
exit 1
}
done
rm have
[ $no_ogv ] && [ $no_ogv ] &&
rm -rf copyparty/web/deps/{dynamicaudio,ogv}* rm -rf copyparty/web/deps/{dynamicaudio,ogv}*
@@ -196,19 +222,24 @@ done
rm -rf copyparty/web/mde.* copyparty/web/deps/easymde* rm -rf copyparty/web/mde.* copyparty/web/deps/easymde*
echo h > copyparty/web/mde.html echo h > copyparty/web/mde.html
f=copyparty/web/md.html f=copyparty/web/md.html
sed -r '/edit2">edit \(fancy/d' <$f >t && tmv "$f" sed -r '/edit2">edit \(fancy/d' <$f >t
tmv "$f"
} }
[ $no_fnt ] && { [ $no_fnt ] && {
rm -f copyparty/web/deps/scp.woff2 rm -f copyparty/web/deps/scp.woff2
f=copyparty/web/md.css f=copyparty/web/ui.css
sed -r '/scp\.woff2/d' <$f >t && tmv "$f" gzip -d "$f.gz" || true
sed -r "s/src:.*scp.*\)/src:local('Consolas')/" <$f >t
tmv "$f"
} }
[ $no_dd ] && { [ $no_dd ] && {
rm -rf copyparty/web/dd rm -rf copyparty/web/dd
f=copyparty/web/browser.css f=copyparty/web/browser.css
sed -r 's/(cursor: )url\([^)]+\), (pointer)/\1\2/; /[0-9]+% \{cursor:/d; /animation: cursor/d' <$f >t && tmv "$f" gzip -d "$f.gz" || true
sed -r 's/(cursor: ?)url\([^)]+\), ?(pointer)/\1\2/; /[0-9]+% \{cursor:/d; /animation: ?cursor/d' <$f >t
tmv "$f"
} }
[ $repack ] || [ $repack ] ||
@@ -221,8 +252,15 @@ f=dep-j2/jinja2/constants.py
awk '/^LOREM_IPSUM_WORDS/{o=1;print "LOREM_IPSUM_WORDS = u\"a\"";next} !o; /"""/{o=0}' <$f >t awk '/^LOREM_IPSUM_WORDS/{o=1;print "LOREM_IPSUM_WORDS = u\"a\"";next} !o; /"""/{o=0}' <$f >t
tmv "$f" tmv "$f"
grep -rLE '^#[^a-z]*coding: utf-8' dep-j2 |
while IFS= read -r f; do
(echo "# coding: utf-8"; cat "$f") >t
tmv "$f"
done
# up2k goes from 28k to 22k laff # up2k goes from 28k to 22k laff
echo entabbening awk 'BEGIN{gensub(//,"",1)}' </dev/null &&
echo entabbening &&
find | grep -E '\.css$' | while IFS= read -r f; do find | grep -E '\.css$' | while IFS= read -r f; do
awk '{ awk '{
sub(/^[ \t]+/,""); sub(/^[ \t]+/,"");
@@ -236,25 +274,61 @@ find | grep -E '\.css$' | while IFS= read -r f; do
' <$f | sed 's/;\}$/}/' >t ' <$f | sed 's/;\}$/}/' >t
tmv "$f" tmv "$f"
done done
unexpand -h 2>/dev/null &&
find | grep -E '\.(js|html)$' | while IFS= read -r f; do find | grep -E '\.(js|html)$' | while IFS= read -r f; do
unexpand -t 4 --first-only <"$f" >t unexpand -t 4 --first-only <"$f" >t
tmv "$f" tmv "$f"
done done
gzres() { gzres() {
command -v pigz && command -v pigz &&
pk='pigz -11 -J 34 -I 100' || pk="pigz -11 -I $zopf" ||
pk='gzip' pk='gzip'
echo "$pk" np=$(nproc)
find | grep -E '\.(js|css)$' | grep -vF /deps/ | while IFS= read -r f; do echo "$pk #$np"
while IFS=' ' read -r _ f; do
while true; do
na=$(ps auxwww | grep -F "$pk" | wc -l)
[ $na -le $np ] && break
sleep 0.2
done
echo -n . echo -n .
$pk "$f" $pk "$f" &
done done < <(
echo find -printf '%s %p\n' |
grep -E '\.(js|css)$' |
grep -vF /deps/ |
sort -nr
)
wait
echo
}
zdir="$tmpdir/cpp-mksfx"
[ -e "$zdir/$stamp" ] || rm -rf "$zdir"
mkdir -p "$zdir"
echo a > "$zdir/$stamp"
nf=$(ls -1 "$zdir"/arc.* | wc -l)
[ $nf -ge 2 ] && [ ! $repack ] && use_zdir=1 || use_zdir=
[ $use_zdir ] || {
echo "$nf alts += 1"
gzres
[ $repack ] ||
tar -cf "$zdir/arc.$(date +%s)" copyparty/web/*.gz
}
[ $use_zdir ] && {
arcs=("$zdir"/arc.*)
arc="${arcs[$RANDOM % ${#arcs[@]} ] }"
echo "using $arc"
tar -xf "$arc"
for f in copyparty/web/*.gz; do
rm "${f%.*}"
done
} }
gzres
echo gen tarlist echo gen tarlist
@@ -262,7 +336,7 @@ for d in copyparty dep-j2; do find $d -type f; done |
sed -r 's/(.*)\.(.*)/\2 \1/' | LC_ALL=C sort | sed -r 's/(.*)\.(.*)/\2 \1/' | LC_ALL=C sort |
sed -r 's/([^ ]*) (.*)/\2.\1/' | grep -vE '/list1?$' > list1 sed -r 's/([^ ]*) (.*)/\2.\1/' | grep -vE '/list1?$' > list1
(grep -vE '\.(gz|br)$' list1; grep -E '\.(gz|br)$' list1) >list || true (grep -vE '\.(gz|br)$' list1; grep -E '\.(gz|br)$' list1 | shuf) >list || true
echo creating tar echo creating tar
args=(--owner=1000 --group=1000) args=(--owner=1000 --group=1000)

View File

@@ -61,7 +61,7 @@ rls_dir="$tmp/copyparty-$ver"
mkdir "$rls_dir" mkdir "$rls_dir"
echo ">>> export from git" echo ">>> export from git"
git archive master | tar -xC "$rls_dir" git archive hovudstraum | tar -xC "$rls_dir"
echo ">>> export untracked deps" echo ">>> export untracked deps"
tar -c copyparty/web/deps | tar -xC "$rls_dir" tar -c copyparty/web/deps | tar -xC "$rls_dir"
@@ -122,5 +122,5 @@ echo " $zip_path"
echo " $tgz_path" echo " $tgz_path"
echo echo
# function alr() { ls -alR copyparty-$1 | sed -r "s/copyparty-$1/copyparty/" | sed -r 's/[A-Z][a-z]{2} [0-9 ]{2} [0-9]{2}:[0-9]{2}//' > $1; }; for x in master rls src ; do alr $x; done # function alr() { ls -alR copyparty-$1 | sed -r "s/copyparty-$1/copyparty/" | sed -r 's/[A-Z][a-z]{2} [0-9 ]{2} [0-9]{2}:[0-9]{2}//' > $1; }; for x in hovudstraum rls src ; do alr $x; done

77
scripts/sfx.ls Normal file
View File

@@ -0,0 +1,77 @@
copyparty,
copyparty/__init__.py,
copyparty/__main__.py,
copyparty/__version__.py,
copyparty/authsrv.py,
copyparty/bos,
copyparty/bos/__init__.py,
copyparty/bos/bos.py,
copyparty/bos/path.py,
copyparty/broker_mp.py,
copyparty/broker_mpw.py,
copyparty/broker_thr.py,
copyparty/broker_util.py,
copyparty/httpcli.py,
copyparty/httpconn.py,
copyparty/httpsrv.py,
copyparty/ico.py,
copyparty/mtag.py,
copyparty/res,
copyparty/res/insecure.pem,
copyparty/star.py,
copyparty/stolen,
copyparty/stolen/__init__.py,
copyparty/stolen/surrogateescape.py,
copyparty/sutil.py,
copyparty/svchub.py,
copyparty/szip.py,
copyparty/tcpsrv.py,
copyparty/th_cli.py,
copyparty/th_srv.py,
copyparty/u2idx.py,
copyparty/up2k.py,
copyparty/util.py,
copyparty/web,
copyparty/web/baguettebox.js,
copyparty/web/browser.css,
copyparty/web/browser.html,
copyparty/web/browser.js,
copyparty/web/browser2.html,
copyparty/web/copyparty.gif,
copyparty/web/dd,
copyparty/web/dd/2.png,
copyparty/web/dd/3.png,
copyparty/web/dd/4.png,
copyparty/web/dd/5.png,
copyparty/web/deps,
copyparty/web/deps/easymde.css,
copyparty/web/deps/easymde.js,
copyparty/web/deps/marked.js,
copyparty/web/deps/mini-fa.css,
copyparty/web/deps/mini-fa.woff,
copyparty/web/deps/ogv-decoder-audio-opus-wasm.js,
copyparty/web/deps/ogv-decoder-audio-opus-wasm.wasm,
copyparty/web/deps/ogv-decoder-audio-vorbis-wasm.js,
copyparty/web/deps/ogv-decoder-audio-vorbis-wasm.wasm,
copyparty/web/deps/ogv-demuxer-ogg-wasm.js,
copyparty/web/deps/ogv-demuxer-ogg-wasm.wasm,
copyparty/web/deps/ogv-worker-audio.js,
copyparty/web/deps/ogv.js,
copyparty/web/deps/scp.woff2,
copyparty/web/deps/sha512.ac.js,
copyparty/web/deps/sha512.hw.js,
copyparty/web/md.css,
copyparty/web/md.html,
copyparty/web/md.js,
copyparty/web/md2.css,
copyparty/web/md2.js,
copyparty/web/mde.css,
copyparty/web/mde.html,
copyparty/web/mde.js,
copyparty/web/msg.css,
copyparty/web/msg.html,
copyparty/web/splash.css,
copyparty/web/splash.html,
copyparty/web/ui.css,
copyparty/web/up2k.js,
copyparty/web/util.js,

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env python3
# coding: latin-1 # coding: latin-1
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
@@ -364,7 +364,7 @@ def confirm(rv):
except: except:
pass pass
sys.exit(rv) sys.exit(rv or 1)
def run(tmp, j2): def run(tmp, j2):

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env python3
import os import os
import sys import sys

View File

@@ -108,6 +108,7 @@ def tc1():
pdirs = [x.replace("\\", "/") for x in pdirs] pdirs = [x.replace("\\", "/") for x in pdirs]
udirs = [x.split("/", 2)[2] for x in pdirs] udirs = [x.split("/", 2)[2] for x in pdirs]
perms = [x.rstrip("j/")[-1] for x in pdirs] perms = [x.rstrip("j/")[-1] for x in pdirs]
perms = ["rw" if x == "a" else x for x in perms]
for pd, ud, p in zip(pdirs, udirs, perms): for pd, ud, p in zip(pdirs, udirs, perms):
if ud[-1] == "j": if ud[-1] == "j":
continue continue
@@ -124,7 +125,7 @@ def tc1():
arg = "{}:{}:{}".format(pd, ud, p, hp) arg = "{}:{}:{}".format(pd, ud, p, hp)
if hp: if hp:
arg += ":chist=" + hp arg += ":c,hist=" + hp
args += ["-v", arg] args += ["-v", arg]
@@ -147,14 +148,14 @@ def tc1():
u = "{}{}/a.h264".format(ub, d) u = "{}{}/a.h264".format(ub, d)
r = requests.get(u) r = requests.get(u)
ok = bool(r) ok = bool(r)
if ok != (p in ["a"]): if ok != (p in ["rw"]):
raise Exception("get {} with perm {} at {}".format(ok, p, u)) raise Exception("get {} with perm {} at {}".format(ok, p, u))
# stat filesystem # stat filesystem
for d, p in zip(pdirs, perms): for d, p in zip(pdirs, perms):
u = "{}/a.h264".format(d) u = "{}/a.h264".format(d)
ok = os.path.exists(u) ok = os.path.exists(u)
if ok != (p in ["a", "w"]): if ok != (p in ["rw", "w"]):
raise Exception("stat {} with perm {} at {}".format(ok, p, u)) raise Exception("stat {} with perm {} at {}".format(ok, p, u))
# GET thumbnail, vreify contents # GET thumbnail, vreify contents
@@ -162,7 +163,7 @@ def tc1():
u = "{}{}/a.h264?th=j".format(ub, d) u = "{}{}/a.h264?th=j".format(ub, d)
r = requests.get(u) r = requests.get(u)
ok = bool(r and r.content[:3] == b"\xff\xd8\xff") ok = bool(r and r.content[:3] == b"\xff\xd8\xff")
if ok != (p in ["a"]): if ok != (p in ["rw"]):
raise Exception("thumb {} with perm {} at {}".format(ok, p, u)) raise Exception("thumb {} with perm {} at {}".format(ok, p, u))
# check tags # check tags
@@ -179,10 +180,10 @@ def tc1():
r_ok = bool(j) r_ok = bool(j)
w_ok = bool(r_ok and j.get("files")) w_ok = bool(r_ok and j.get("files"))
if not r_ok or w_ok != (p in ["a"]): if not r_ok or w_ok != (p in ["rw"]):
raise Exception("ls {} with perm {} at {}".format(ok, p, u)) raise Exception("ls {} with perm {} at {}".format(ok, p, u))
if (tag and p != "a") or (not tag and p == "a"): if (tag and p != "rw") or (not tag and p == "rw"):
raise Exception("tag {} with perm {} at {}".format(tag, p, u)) raise Exception("tag {} with perm {} at {}".format(tag, p, u))
if tag is not None and tag != "48x32": if tag is not None and tag != "48x32":

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env python3
# coding: utf-8 # coding: utf-8
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
@@ -8,7 +8,7 @@ import tokenize
def uncomment(fpath): def uncomment(fpath):
""" modified https://stackoverflow.com/a/62074206 """ """modified https://stackoverflow.com/a/62074206"""
with open(fpath, "rb") as f: with open(fpath, "rb") as f:
orig = f.read().decode("utf-8") orig = f.read().decode("utf-8")
@@ -65,9 +65,9 @@ def uncomment(fpath):
def main(): def main():
print("uncommenting", end="") print("uncommenting", end="", flush=True)
for f in sys.argv[1:]: for f in sys.argv[1:]:
print(".", end="") print(".", end="", flush=True)
uncomment(f) uncomment(f)
print("k") print("k")

View File

@@ -61,7 +61,7 @@ class clean2(Command):
pass pass
nuke = [] nuke = []
for (dirpath, dirnames, filenames) in os.walk("."): for (dirpath, _, filenames) in os.walk("."):
for fn in filenames: for fn in filenames:
if ( if (
fn.startswith("MANIFEST") fn.startswith("MANIFEST")
@@ -86,7 +86,7 @@ args = {
"url": "https://github.com/9001/copyparty", "url": "https://github.com/9001/copyparty",
"license": "MIT", "license": "MIT",
"classifiers": [ "classifiers": [
"Development Status :: 4 - Beta", "Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: MIT License", "License :: OSI Approved :: MIT License",
"Programming Language :: Python", "Programming Language :: Python",
"Programming Language :: Python :: 2", "Programming Language :: Python :: 2",
@@ -99,7 +99,9 @@ args = {
"Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: Jython",
"Programming Language :: Python :: Implementation :: PyPy", "Programming Language :: Python :: Implementation :: PyPy",
"Environment :: Console", "Environment :: Console",
"Environment :: No Input/Output (Daemon)", "Environment :: No Input/Output (Daemon)",

View File

@@ -1,11 +1,17 @@
### hello world ### hello world
* qwe * qwe
* asd * rty
* uio
* asd
* fgh
* jkl
* zxc * zxc
* vbn
* 573 * 573
* one * one
* two * two
* three
* ||| * |||
|--|--| |--|--|
@@ -134,12 +140,12 @@ a newline toplevel
| a table | on the right | | a table | on the right |
| second row | foo bar | | second row | foo bar |
|| a||a
--|:-:|-: --|:-:|-:
a table | big text in this | aaakbfddd a table | big text in this | aaakbfddd
second row | centred | bbb second row | centred | bbb
|| ||||
--|--|-- --|--|--
foo foo

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env python3
# coding: utf-8 # coding: utf-8
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
@@ -31,6 +31,7 @@ class Cfg(Namespace):
rproxy=0, rproxy=0,
ed=False, ed=False,
nw=False, nw=False,
unpost=600,
no_mv=False, no_mv=False,
no_del=False, no_del=False,
no_zip=False, no_zip=False,
@@ -38,10 +39,14 @@ class Cfg(Namespace):
no_scandir=False, no_scandir=False,
no_sendfile=True, no_sendfile=True,
no_rescan=True, no_rescan=True,
no_logues=False,
no_readme=False,
re_maxage=0,
ihead=False, ihead=False,
nih=True, nih=True,
mtp=[], mtp=[],
mte="a", mte="a",
mth="",
hist=None, hist=None,
no_hash=False, no_hash=False,
css_browser=None, css_browser=None,
@@ -93,7 +98,7 @@ class TestHttpCli(unittest.TestCase):
if not vol.startswith(top): if not vol.startswith(top):
continue continue
mode = vol[-2].replace("a", "rwmd") mode = vol[-2].replace("a", "rw")
usr = vol[-1] usr = vol[-1]
if usr == "a": if usr == "a":
usr = "" usr = ""
@@ -148,6 +153,7 @@ class TestHttpCli(unittest.TestCase):
tar = tarfile.open(fileobj=io.BytesIO(b)).getnames() tar = tarfile.open(fileobj=io.BytesIO(b)).getnames()
except: except:
tar = [] tar = []
tar = [x[4:] if x.startswith("top/") else x for x in tar]
tar = ["/".join([y for y in [top, durl, x] if y]) for x in tar] tar = ["/".join([y for y in [top, durl, x] if y]) for x in tar]
tar = [[x] + self.can_rw(x) for x in tar] tar = [[x] + self.can_rw(x) for x in tar]
tar_ok = [x[0] for x in tar if x[1]] tar_ok = [x[0] for x in tar if x[1]]

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env python3
# coding: utf-8 # coding: utf-8
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
@@ -21,10 +21,14 @@ class Cfg(Namespace):
ex2 = { ex2 = {
"mtp": [], "mtp": [],
"mte": "a", "mte": "a",
"mth": "",
"hist": None, "hist": None,
"no_hash": False, "no_hash": False,
"css_browser": None, "css_browser": None,
"no_voldump": True, "no_voldump": True,
"no_logues": False,
"no_readme": False,
"re_maxage": 0,
"rproxy": 0, "rproxy": 0,
} }
ex.update(ex2) ex.update(ex2)
@@ -193,10 +197,10 @@ class TestVFS(unittest.TestCase):
self.assertEqual(n.realpath, os.path.join(td, "a")) self.assertEqual(n.realpath, os.path.join(td, "a"))
self.assertAxs(n.axs.uread, ["*"]) self.assertAxs(n.axs.uread, ["*"])
self.assertAxs(n.axs.uwrite, []) self.assertAxs(n.axs.uwrite, [])
self.assertEqual(vfs.can_access("/", "*"), [False, False, False, False]) self.assertEqual(vfs.can_access("/", "*"), [False, False, False, False, False])
self.assertEqual(vfs.can_access("/", "k"), [True, True, False, False]) self.assertEqual(vfs.can_access("/", "k"), [True, True, False, False, False])
self.assertEqual(vfs.can_access("/a", "*"), [True, False, False, False]) self.assertEqual(vfs.can_access("/a", "*"), [True, False, False, False, False])
self.assertEqual(vfs.can_access("/a", "k"), [True, False, False, False]) self.assertEqual(vfs.can_access("/a", "k"), [True, False, False, False, False])
# breadth-first construction # breadth-first construction
vfs = AuthSrv( vfs = AuthSrv(