Compare commits

...

131 Commits

Author SHA1 Message Date
ed
c9b385db4b v1.0.14 2021-10-30 00:37:46 +02:00
ed
c951b66ae0 less messy startup messages 2021-10-29 23:43:09 +02:00
ed
de735f3a45 list successful binds only 2021-10-29 23:03:36 +02:00
ed
19161425f3 if no args, try to bind 80 and 443 as well 2021-10-29 23:01:07 +02:00
ed
c69e8d5bf4 filesearch donut accuracy 2021-10-29 21:07:46 +02:00
ed
3d3bce2788 less fancy but better 2021-10-29 11:02:20 +02:00
ed
1cb0dc7f8e colorcoded favicon donut 2021-10-29 02:40:17 +02:00
ed
cd5c56e601 u2cli: orz 2021-10-29 01:49:40 +02:00
ed
8c979905e4 mention fedora things 2021-10-29 01:07:58 +02:00
ed
4d69f15f48 fix empty files blocking successive uploads 2021-10-29 01:04:38 +02:00
ed
083f6572f7 ie11 support 2021-10-29 01:04:09 +02:00
ed
4e7dd75266 add upload donut 2021-10-29 01:01:32 +02:00
ed
3eb83f449b truncate ridiculous extensions 2021-10-27 23:42:28 +02:00
ed
d31f69117b better plaintext and vt100 folder listings 2021-10-27 23:04:59 +02:00
ed
f5f9e3ac97 reduce rescan/lifetime wakeups 2021-10-27 22:23:03 +02:00
ed
598d6c598c reduce wakeups in httpsrv 2021-10-27 22:20:21 +02:00
ed
744727087a better rmtree semantics 2021-10-27 09:40:20 +02:00
ed
f93212a665 add logout button to contrl panel 2021-10-27 01:27:59 +02:00
ed
6dade82d2c run tag scrapers in parallel on new uploads 2021-10-27 00:47:50 +02:00
ed
6b737bf1d7 abort tagging if the file has poofed 2021-10-27 00:11:58 +02:00
ed
94dbd70677 plaintext folder listing with ?ls=t 2021-10-27 00:00:12 +02:00
ed
527ae0348e locale-aware sorting of the navpane too 2021-10-26 23:59:21 +02:00
ed
79629c430a add refresh button on volumes listing 2021-10-26 23:58:10 +02:00
ed
908dd61be5 add cheatcode for turning links into downloads 2021-10-26 01:11:07 +02:00
ed
88f77b8cca spacebar as actionkey when ok/cancel focused 2021-10-25 21:31:27 +02:00
ed
1e846657d1 more css nitpicks 2021-10-25 21:31:12 +02:00
ed
ce70f62a88 catch shady vfs configs 2021-10-25 21:13:51 +02:00
ed
bca0cdbb62 v1.0.13 2021-10-24 21:06:14 +02:00
ed
1ee11e04e6 v1.0.12 2021-10-24 03:12:54 +02:00
ed
6eef44f212 ie 2021-10-24 02:57:19 +02:00
ed
8bd94f4a1c add readme banner 2021-10-24 01:24:54 +02:00
ed
4bc4701372 "fix" up2k layout 2021-10-24 01:19:48 +02:00
ed
dfd89b503a ajax navigation in table listing too 2021-10-24 00:54:22 +02:00
ed
060dc54832 thumbnail caching 2021-10-24 00:29:04 +02:00
ed
f7a4ea5793 add --js-browser 2021-10-24 00:26:47 +02:00
ed
71b478e6e2 persist webp test result 2021-10-24 00:23:51 +02:00
ed
ed8fff8c52 more ux 2021-10-24 00:22:46 +02:00
ed
95dc78db10 thumbnails alignment 2021-10-23 21:51:16 +02:00
ed
addeac64c7 checkbox selection hilight 2021-10-23 18:28:45 +02:00
ed
d77ec22007 more ux 2021-10-23 16:59:11 +02:00
ed
20030c91b7 looks better 2021-10-23 02:46:18 +02:00
ed
8b366e255c fix thumbnail toggle not giving instant feedback 2021-10-23 02:38:37 +02:00
ed
6da366fcb0 forgot a few 2021-10-23 02:33:51 +02:00
ed
2fa35f851e ux 2021-10-22 11:12:04 +02:00
ed
e4ca4260bb support mounting entire disks on windows 2021-10-20 00:51:00 +02:00
ed
b69aace8d8 v1.0.11 2021-10-19 01:10:16 +02:00
ed
79097bb43c optimize rmtree on windows 2021-10-19 01:04:21 +02:00
ed
806fac1742 nullwrite fixes 2021-10-19 00:58:24 +02:00
ed
4f97d7cf8d normalize collision suffix 2021-10-19 00:49:35 +02:00
ed
42acc457af allow providing target filename in PUT 2021-10-19 00:48:00 +02:00
ed
c02920607f linkable search results 2021-10-18 21:43:16 +02:00
ed
452885c271 replace the mediaplayer modal with malert 2021-10-18 21:18:46 +02:00
ed
5c242a07b6 refresh file listing on upload complete 2021-10-18 21:10:05 +02:00
ed
088899d59f fix unpost in jumpvols 2021-10-18 21:08:31 +02:00
ed
1faff2a37e u2cli: aggressive flushing on windows 2021-10-18 20:35:50 +02:00
ed
23c8d3d045 option to continue running if binds fail 2021-10-18 20:24:11 +02:00
ed
a033388d2b sort volume listing 2021-10-13 00:21:54 +02:00
ed
82fe45ac56 u2cli: add -z / yolo 2021-10-13 00:03:49 +02:00
ed
bcb7fcda6b u2cli: rsync-like source semantics 2021-10-12 22:46:33 +02:00
ed
726a98100b v1.0.10 2021-10-12 01:43:56 +02:00
ed
2f021a0c2b skip indexing files by regex 2021-10-12 01:40:19 +02:00
ed
eb05cb6c6e add optional favicon 2021-10-12 00:49:50 +02:00
ed
7530af95da css twiddling 2021-10-12 00:48:23 +02:00
ed
8399e95bda ui: fix mkdir race when navpane is closed 2021-10-12 00:46:44 +02:00
ed
3b4dfe326f support pythons with busted ffi 2021-10-12 00:44:55 +02:00
ed
2e787a254e fix mkdir on py2.7 2021-10-11 03:50:45 +02:00
ed
f888bed1a6 v1.0.9 2021-10-09 22:29:23 +02:00
ed
d865e9f35a support non-python mtp plugins 2021-10-09 22:09:35 +02:00
Daedren
fc7fe70f66 is_http now a class variable. Also checks lowercase value 2021-10-09 09:58:14 +02:00
Daedren
5aff39d2b2 Protocol of uploaded file based on X-Forwarded-Proto 2021-10-09 09:58:14 +02:00
ed
d1be37a04a nice 2021-10-09 01:33:27 +02:00
ed
b0fd8bf7d4 optimize indexer for huge filesystems 2021-10-09 01:24:19 +02:00
ed
b9cf8f3973 sfx-repack: fix no-dd killing the loader animation 2021-10-08 01:33:48 +02:00
ed
4588f11613 deflicker lightmode 2021-10-07 23:12:00 +02:00
ed
1a618c3c97 safety 2021-10-07 23:11:37 +02:00
ed
d500a51d97 golf 2021-10-07 23:11:11 +02:00
ed
734e9d3874 v1.0.8 2021-10-04 22:50:06 +02:00
ed
bd5cfc2f1b fix filedrop with fallback hashers 2021-10-04 22:37:35 +02:00
ed
89f88ee78c more obvious dropzones 2021-10-04 22:34:05 +02:00
ed
b2ae14695a show multiple filesearch hits 2021-10-04 21:53:28 +02:00
ed
19d86b44d9 less verbose debug toasts 2021-10-04 21:35:25 +02:00
ed
85be62e38b audioplayer: minute-mark text on progressbar 2021-10-04 21:26:26 +02:00
ed
80f3d90200 better focus outlines 2021-10-04 20:54:07 +02:00
ed
0249fa6e75 fix tests 2021-10-03 19:59:47 +02:00
ed
2d0696e048 allow appending mte in volflags 2021-10-03 19:35:51 +02:00
ed
ff32ec515e add mtp plugin cksum.py 2021-10-03 19:35:20 +02:00
ed
a6935b0293 allow uploading empty files 2021-10-02 23:34:12 +02:00
ed
63eb08ba9f u2cli: nobody asked for python2.6 support so here you go w 2021-10-02 00:36:41 +02:00
ed
e5b67d2b3a u2cli: add eta, errorhandling, better windows support 2021-10-01 22:31:24 +02:00
ed
9e10af6885 make the 404/403 vagueness optional 2021-10-01 19:51:51 +02:00
ed
42bc9115d2 hide logues in search results 2021-10-01 19:33:49 +02:00
ed
0a569ce413 readme: add bash client examples 2021-10-01 19:27:21 +02:00
ed
9a16639a61 u2cli: add webm 2021-10-01 02:25:22 +02:00
ed
57953c68c6 u2cli: add vt100 status panel 2021-10-01 02:10:03 +02:00
ed
088d08963f u2cli: add multithreading 2021-10-01 00:33:45 +02:00
ed
7bc8196821 u2cli: add file-search 2021-09-30 19:36:47 +02:00
ed
7715299dd3 dont show entire web pages in toasts 2021-09-30 19:35:56 +02:00
ed
b8ac9b7994 u2cli: connection reuse for lower latency 2021-09-28 00:14:45 +02:00
ed
98e7d8f728 more docstrings 2021-09-27 23:52:36 +02:00
ed
e7fd871ffe add up2k.py 2021-09-27 23:28:34 +02:00
ed
14aab62f32 fix current-directory hilight 2021-09-27 20:55:05 +02:00
ed
cb81fe962c v1.0.7 2021-09-26 20:15:21 +02:00
ed
fc970d2dea v1.0.6 2021-09-26 19:36:19 +02:00
ed
b0e203d1f9 fuse-cli: support fk volumes 2021-09-26 19:35:13 +02:00
ed
37cef05b19 move up2k flag switch to the settings tab 2021-09-26 17:17:16 +02:00
ed
5886a42901 url escaping 2021-09-26 16:59:02 +02:00
ed
2fd99f807d spa msg 2021-09-26 15:25:19 +02:00
ed
3d4cbd7d10 spa mkdir 2021-09-26 14:48:05 +02:00
ed
f10d03c238 add --no-symlink 2021-09-26 13:49:29 +02:00
ed
f9a66ffb0e up2k: fully parallelize handshakes/uploads 2021-09-26 12:57:16 +02:00
ed
777a50063d wrong key 2021-09-26 03:56:50 +02:00
ed
0bb9154747 catch more tagparser panics 2021-09-26 03:56:30 +02:00
ed
30c3f45072 fix deleting recently uploaded files without e2d 2021-09-26 03:45:16 +02:00
ed
0d5ca67f32 up2k-srv: add option to reuse file-handles 2021-09-26 03:44:22 +02:00
ed
4a8bf6aebd ff-crash: the queue can die before the rest of the browser 2021-09-25 19:26:48 +02:00
ed
b11db090d8 also hide windows-paths in exceptions 2021-09-25 18:19:17 +02:00
ed
189391fccd up2k-cli: less aggressive retries 2021-09-25 18:18:15 +02:00
ed
86d4c43909 update the up2k.sh client example 2021-09-25 18:04:18 +02:00
ed
5994f40982 mention firefox crash 2021-09-25 18:03:19 +02:00
ed
076d32dee5 up2k-srv: try all dupes for matching path 2021-09-24 19:21:19 +02:00
ed
16c8e38ecd support login/uploading from hv3 2021-09-19 17:03:01 +02:00
ed
eacbcda8e5 v1.0.5 2021-09-19 15:11:48 +02:00
ed
59be76cd44 fix basic-upload into fk-enabled folders 2021-09-19 15:00:55 +02:00
ed
5bb0e7e8b3 v1.0.4 2021-09-19 00:41:56 +02:00
ed
b78d207121 encourage statics caching 2021-09-19 00:36:48 +02:00
ed
0fcbcdd08c correctly ordered folders in initial listing 2021-09-19 00:08:29 +02:00
ed
ed6c683922 cosmetic 2021-09-19 00:07:49 +02:00
ed
9fe1edb02b support multiple volume flags in one group 2021-09-18 23:45:43 +02:00
ed
fb3811a708 bunch of filekey fixes 2021-09-18 23:44:44 +02:00
ed
18f8658eec insufficient navpane minsize 2021-09-18 18:55:19 +02:00
ed
3ead4676b0 add release script 2021-09-18 18:43:55 +02:00
48 changed files with 2728 additions and 735 deletions

5
.gitignore vendored
View File

@@ -9,6 +9,7 @@ buildenv/
build/ build/
dist/ dist/
sfx/ sfx/
py2/
.venv/ .venv/
# ide # ide
@@ -20,3 +21,7 @@ sfx/
# derived # derived
copyparty/web/deps/ copyparty/web/deps/
srv/ srv/
# state/logs
up.*.txt
.hist/

View File

@@ -19,7 +19,7 @@ turn your phone or raspi into a portable file server with resumable uploads/down
## readme toc ## readme toc
* top * top
* **[quickstart](#quickstart)** - download **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py)** and you're all set! * [quickstart](#quickstart) - download **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py)** and you're all set!
* [on servers](#on-servers) - you may also want these, especially on servers * [on servers](#on-servers) - you may also want these, especially on servers
* [on debian](#on-debian) - recommended additional steps on debian * [on debian](#on-debian) - recommended additional steps on debian
* [notes](#notes) - general notes * [notes](#notes) - general notes
@@ -53,6 +53,7 @@ turn your phone or raspi into a portable file server with resumable uploads/down
* [database location](#database-location) - in-volume (`.hist/up2k.db`, default) or somewhere else * [database location](#database-location) - in-volume (`.hist/up2k.db`, default) or somewhere else
* [metadata from audio files](#metadata-from-audio-files) - set `-e2t` to index tags on upload * [metadata from audio files](#metadata-from-audio-files) - set `-e2t` to index tags on upload
* [file parser plugins](#file-parser-plugins) - provide custom parsers to index additional tags * [file parser plugins](#file-parser-plugins) - provide custom parsers to index additional tags
* [upload events](#upload-events) - trigger a script/program on each upload
* [complete examples](#complete-examples) * [complete examples](#complete-examples)
* [browser support](#browser-support) - TLDR: yes * [browser support](#browser-support) - TLDR: yes
* [client examples](#client-examples) - interact with copyparty using non-browser clients * [client examples](#client-examples) - interact with copyparty using non-browser clients
@@ -61,6 +62,9 @@ turn your phone or raspi into a portable file server with resumable uploads/down
* [performance](#performance) - defaults are usually fine - expect `8 GiB/s` download, `1 GiB/s` upload * [performance](#performance) - defaults are usually fine - expect `8 GiB/s` download, `1 GiB/s` upload
* [security](#security) - some notes on hardening * [security](#security) - some notes on hardening
* [gotchas](#gotchas) - behavior that might be unexpected * [gotchas](#gotchas) - behavior that might be unexpected
* [recovering from crashes](#recovering-from-crashes)
* [client crashes](#client-crashes)
* [frefox wsod](#frefox-wsod) - firefox 87 can crash during uploads
* [dependencies](#dependencies) - mandatory deps * [dependencies](#dependencies) - mandatory deps
* [optional dependencies](#optional-dependencies) - install these to enable bonus features * [optional dependencies](#optional-dependencies) - install these to enable bonus features
* [install recommended deps](#install-recommended-deps) * [install recommended deps](#install-recommended-deps)
@@ -433,7 +437,7 @@ and then theres the tabs below it,
* plus up to 3 entries each from `[done]` and `[que]` for context * plus up to 3 entries each from `[done]` and `[que]` for context
* `[que]` is all the files that are still queued * `[que]` is all the files that are still queued
note that since up2k has to read each file twice, `[🎈 bup]` can *theoretically* be up to 2x faster in some extreme cases (files bigger than your ram, combined with an internet connection faster than the read-speed of your HDD) note that since up2k has to read each file twice, `[🎈 bup]` can *theoretically* be up to 2x faster in some extreme cases (files bigger than your ram, combined with an internet connection faster than the read-speed of your HDD, or if you're uploading from a cuo2duo)
if you are resuming a massive upload and want to skip hashing the files which already finished, you can enable `turbo` in the `[⚙️] config` tab, but please read the tooltip on that button if you are resuming a massive upload and want to skip hashing the files which already finished, you can enable `turbo` in the `[⚙️] config` tab, but please read the tooltip on that button
@@ -542,6 +546,8 @@ and there are *two* editors
* you can link a particular timestamp in an audio file by adding it to the URL, such as `&20` / `&20s` / `&1m20` / `&t=1:20` after the `.../#af-c8960dab` * you can link a particular timestamp in an audio file by adding it to the URL, such as `&20` / `&20s` / `&1m20` / `&t=1:20` after the `.../#af-c8960dab`
* get a plaintext file listing by adding `?ls=t` to a URL, or a compact colored one with `?ls=v` (for unix terminals)
* if you are using media hotkeys to switch songs and are getting tired of seeing the OSD popup which Windows doesn't let you disable, consider https://ocv.me/dev/?media-osd-bgone.ps1 * if you are using media hotkeys to switch songs and are getting tired of seeing the OSD popup which Windows doesn't let you disable, consider https://ocv.me/dev/?media-osd-bgone.ps1
* click the bottom-left `π` to open a javascript prompt for debugging * click the bottom-left `π` to open a javascript prompt for debugging
@@ -583,21 +589,23 @@ through arguments:
* `-e2tsr` also deletes all existing tags, doing a full reindex * `-e2tsr` also deletes all existing tags, doing a full reindex
the same arguments can be set as volume flags, in addition to `d2d` and `d2t` for disabling: the same arguments can be set as volume flags, in addition to `d2d` and `d2t` for disabling:
* `-v ~/music::r:c,e2dsa:c,e2tsr` does a full reindex of everything on startup * `-v ~/music::r:c,e2dsa,e2tsr` does a full reindex of everything on startup
* `-v ~/music::r:c,d2d` disables **all** indexing, even if any `-e2*` are on * `-v ~/music::r:c,d2d` disables **all** indexing, even if any `-e2*` are on
* `-v ~/music::r:c,d2t` disables all `-e2t*` (tags), does not affect `-e2d*` * `-v ~/music::r:c,d2t` disables all `-e2t*` (tags), does not affect `-e2d*`
note: note:
* the parser currently can't handle `c,e2dsa,e2tsr` so you have to `c,e2dsa:c,e2tsr` * the parser can finally handle `c,e2dsa,e2tsr` so you no longer have to `c,e2dsa:c,e2tsr`
* `e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and `e2ts` would then reindex those, unless there is a new copyparty version with new parsers and the release note says otherwise * `e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and `e2ts` would then reindex those, unless there is a new copyparty version with new parsers and the release note says otherwise
* the rescan button in the admin panel has no effect unless the volume has `-e2ds` or higher * the rescan button in the admin panel has no effect unless the volume has `-e2ds` or higher
to save some time, you can choose to only index filename/path/size/last-modified (and not the hash of the file contents) by setting `--no-hash` or the volume-flag `:c,dhash`, this has the following consequences: to save some time, you can provide a regex pattern for filepaths to only index by filename/path/size/last-modified (and not the hash of the file contents) by setting `--no-hash \.iso$` or the volume-flag `:c,nohash=\.iso$`, this has the following consequences:
* initial indexing is way faster, especially when the volume is on a network disk * initial indexing is way faster, especially when the volume is on a network disk
* makes it impossible to [file-search](#file-search) * makes it impossible to [file-search](#file-search)
* if someone uploads the same file contents, the upload will not be detected as a dupe, so it will not get symlinked or rejected * if someone uploads the same file contents, the upload will not be detected as a dupe, so it will not get symlinked or rejected
if you set `--no-hash`, you can enable hashing for specific volumes using flag `:c,ehash` similarly, you can fully ignore files/folders using `--no-idx [...]` and `:c,noidx=\.iso$`
if you set `--no-hash [...]` globally, you can enable hashing for specific volumes using flag `:c,nohash=`
## upload rules ## upload rules
@@ -696,6 +704,25 @@ copyparty can invoke external programs to collect additional metadata for files
* `-mtp arch,built,ver,orig=an,eexe,edll,~/bin/exe.py` runs `~/bin/exe.py` to get properties about windows-binaries only if file is not audio (`an`) and file extension is exe or dll * `-mtp arch,built,ver,orig=an,eexe,edll,~/bin/exe.py` runs `~/bin/exe.py` to get properties about windows-binaries only if file is not audio (`an`) and file extension is exe or dll
## upload events
trigger a script/program on each upload like so:
```
-v /mnt/inc:inc:w:c,mte=+a1:c,mtp=a1=ad,/usr/bin/notify-send
```
so filesystem location `/mnt/inc` shared at `/inc`, write-only for everyone, appending `a1` to the list of tags to index, and using `/usr/bin/notify-send` to "provide" that tag
that'll run the command `notify-send` with the path to the uploaded file as the first and only argument (so on linux it'll show a notification on-screen)
note that it will only trigger on new unique files, not dupes
and it will occupy the parsing threads, so fork anything expensive, or if you want to intentionally queue/singlethread you can combine it with `--mtag-mt 1`
if this becomes popular maybe there should be a less janky way to do it actually
## complete examples ## complete examples
* read-only music server with bpm and key scanning * read-only music server with bpm and key scanning
@@ -722,7 +749,7 @@ TLDR: yes
| zip selection | - | yep | yep | yep | yep | yep | yep | yep | | zip selection | - | yep | yep | yep | yep | yep | yep | yep |
| file rename | - | yep | yep | yep | yep | yep | yep | yep | | file rename | - | yep | yep | yep | yep | yep | yep | yep |
| file cut/paste | - | yep | yep | yep | yep | yep | yep | yep | | file cut/paste | - | yep | yep | yep | yep | yep | yep | yep |
| navpane | - | `*2` | yep | yep | yep | yep | yep | yep | | navpane | - | yep | yep | yep | yep | yep | yep | yep |
| image viewer | - | yep | yep | yep | yep | yep | yep | yep | | image viewer | - | yep | yep | yep | yep | yep | yep | yep |
| video player | - | yep | yep | yep | yep | yep | yep | yep | | video player | - | yep | yep | yep | yep | yep | yep | yep |
| markdown editor | - | - | yep | yep | yep | yep | yep | yep | | markdown editor | - | - | yep | yep | yep | yep | yep | yep |
@@ -734,7 +761,6 @@ TLDR: yes
* internet explorer 6 to 8 behave the same * internet explorer 6 to 8 behave the same
* firefox 52 and chrome 49 are the final winxp versions * firefox 52 and chrome 49 are the final winxp versions
* `*1` yes, but extremely slow (ie10: `1 MiB/s`, ie11: `270 KiB/s`) * `*1` yes, but extremely slow (ie10: `1 MiB/s`, ie11: `270 KiB/s`)
* `*2` causes a full-page refresh on each navigation
* `*3` using a wasm decoder which consumes a bit more power * `*3` using a wasm decoder which consumes a bit more power
quick summary of more eccentric web-browsers trying to view a directory index: quick summary of more eccentric web-browsers trying to view a directory index:
@@ -746,7 +772,7 @@ quick summary of more eccentric web-browsers trying to view a directory index:
| **w3m** (0.5.3/macports) | can browse, login, upload at 100kB/s, mkdir/msg | | **w3m** (0.5.3/macports) | can browse, login, upload at 100kB/s, mkdir/msg |
| **netsurf** (3.10/arch) | is basically ie6 with much better css (javascript has almost no effect) | | **netsurf** (3.10/arch) | is basically ie6 with much better css (javascript has almost no effect) |
| **opera** (11.60/winxp) | OK: thumbnails, image-viewer, zip-selection, rename/cut/paste. NG: up2k, navpane, markdown, audio | | **opera** (11.60/winxp) | OK: thumbnails, image-viewer, zip-selection, rename/cut/paste. NG: up2k, navpane, markdown, audio |
| **ie4** and **netscape** 4.0 | can browse (text is yellow on white), upload with `?b=u` | | **ie4** and **netscape** 4.0 | can browse, upload with `?b=u` |
| **SerenityOS** (7e98457) | hits a page fault, works with `?b=u`, file upload not-impl | | **SerenityOS** (7e98457) | hits a page fault, works with `?b=u`, file upload not-impl |
@@ -766,6 +792,14 @@ interact with copyparty using non-browser clients
* `chunk(){ curl -b cppwd=wark -T- http://127.0.0.1:3923/;}` * `chunk(){ curl -b cppwd=wark -T- http://127.0.0.1:3923/;}`
`chunk <movie.mkv` `chunk <movie.mkv`
* bash: when curl and wget is not available or too boring
* `(printf 'PUT /junk?pw=wark HTTP/1.1\r\n\r\n'; cat movie.mkv) | nc 127.0.0.1 3923`
* `(printf 'PUT / HTTP/1.1\r\n\r\n'; cat movie.mkv) >/dev/tcp/127.0.0.1/3923`
* python: [up2k.py](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py) is a command-line up2k client [(webm)](https://ocv.me/stuff/u2cli.webm)
* file uploads, file-search, autoresume of aborted/broken uploads
* see [./bin/README.md#up2kpy](bin/README.md#up2kpy)
* FUSE: mount a copyparty server as a local filesystem * FUSE: mount a copyparty server as a local filesystem
* cross-platform python client available in [./bin/](bin/) * cross-platform python client available in [./bin/](bin/)
* [rclone](https://rclone.org/) as client can give ~5x performance, see [./docs/rclone.md](docs/rclone.md) * [rclone](https://rclone.org/) as client can give ~5x performance, see [./docs/rclone.md](docs/rclone.md)
@@ -813,14 +847,12 @@ hashwasm would solve the streaming issue but reduces hashing speed for sha512 (x
defaults are usually fine - expect `8 GiB/s` download, `1 GiB/s` upload defaults are usually fine - expect `8 GiB/s` download, `1 GiB/s` upload
you can ignore the `cannot efficiently use multiple CPU cores` message, very unlikely to be a problem
below are some tweaks roughly ordered by usefulness: below are some tweaks roughly ordered by usefulness:
* `-q` disables logging and can help a bunch, even when combined with `-lo` to redirect logs to file * `-q` disables logging and can help a bunch, even when combined with `-lo` to redirect logs to file
* `--http-only` or `--https-only` (unless you want to support both protocols) will reduce the delay before a new connection is established * `--http-only` or `--https-only` (unless you want to support both protocols) will reduce the delay before a new connection is established
* `--hist` pointing to a fast location (ssd) will make directory listings and searches faster when `-e2d` or `-e2t` is set * `--hist` pointing to a fast location (ssd) will make directory listings and searches faster when `-e2d` or `-e2t` is set
* `--no-hash` when indexing a network-disk if you don't care about the actual filehashes and only want the names/tags searchable * `--no-hash .` when indexing a network-disk if you don't care about the actual filehashes and only want the names/tags searchable
* `-j` enables multiprocessing (actual multithreading) and can make copyparty perform better in cpu-intensive workloads, for example: * `-j` enables multiprocessing (actual multithreading) and can make copyparty perform better in cpu-intensive workloads, for example:
* huge amount of short-lived connections * huge amount of short-lived connections
* really heavy traffic (downloads/uploads) * really heavy traffic (downloads/uploads)
@@ -851,6 +883,26 @@ behavior that might be unexpected
* users without read-access to a folder can still see the `.prologue.html` / `.epilogue.html` / `README.md` contents, for the purpose of showing a description on how to use the uploader for example * users without read-access to a folder can still see the `.prologue.html` / `.epilogue.html` / `README.md` contents, for the purpose of showing a description on how to use the uploader for example
# recovering from crashes
## client crashes
### frefox wsod
firefox 87 can crash during uploads -- the entire browser goes, including all other browser tabs, everything turns white
however you can hit `F12` in the up2k tab and use the devtools to see how far you got in the uploads:
* get a complete list of all uploads, organized by statuts (ok / no-good / busy / queued):
`var tabs = { ok:[], ng:[], bz:[], q:[] }; for (var a of up2k.ui.tab) tabs[a.in].push(a); tabs`
* list of filenames which failed:
`var ng = []; for (var a of up2k.ui.tab) if (a.in != 'ok') ng.push(a.hn.split('<a href=\"').slice(-1)[0].split('\">')[0]); ng`
* send the list of filenames to copyparty for safekeeping:
`await fetch('/inc', {method:'PUT', body:JSON.stringify(ng,null,1)})`
# dependencies # dependencies
mandatory deps: mandatory deps:

View File

@@ -1,3 +1,11 @@
# [`up2k.py`](up2k.py)
* command-line up2k client [(webm)](https://ocv.me/stuff/u2cli.webm)
* file uploads, file-search, autoresume of aborted/broken uploads
* faster than browsers
* early beta, if something breaks just restart it
# [`copyparty-fuse.py`](copyparty-fuse.py) # [`copyparty-fuse.py`](copyparty-fuse.py)
* mount a copyparty server as a local filesystem (read-only) * mount a copyparty server as a local filesystem (read-only)
* **supports Windows!** -- expect `194 MiB/s` sequential read * **supports Windows!** -- expect `194 MiB/s` sequential read
@@ -47,6 +55,7 @@ you could replace winfsp with [dokan](https://github.com/dokan-dev/dokany/releas
* copyparty can Popen programs like these during file indexing to collect additional metadata * copyparty can Popen programs like these during file indexing to collect additional metadata
# [`dbtool.py`](dbtool.py) # [`dbtool.py`](dbtool.py)
upgrade utility which can show db info and help transfer data between databases, for example when a new version of copyparty is incompatible with the old DB and automatically rebuilds the DB from scratch, but you have some really expensive `-mtp` parsers and want to copy over the tags from the old db upgrade utility which can show db info and help transfer data between databases, for example when a new version of copyparty is incompatible with the old DB and automatically rebuilds the DB from scratch, but you have some really expensive `-mtp` parsers and want to copy over the tags from the old db
@@ -63,6 +72,7 @@ cd /mnt/nas/music/.hist
``` ```
# [`prisonparty.sh`](prisonparty.sh) # [`prisonparty.sh`](prisonparty.sh)
* run copyparty in a chroot, preventing any accidental file access * run copyparty in a chroot, preventing any accidental file access
* creates bindmounts for /bin, /lib, and so on, see `sysdirs=` * creates bindmounts for /bin, /lib, and so on, see `sysdirs=`

View File

@@ -71,7 +71,7 @@ except:
elif MACOS: elif MACOS:
libfuse = "install https://osxfuse.github.io/" libfuse = "install https://osxfuse.github.io/"
else: else:
libfuse = "apt install libfuse\n modprobe fuse" libfuse = "apt install libfuse3-3\n modprobe fuse"
print( print(
"\n could not import fuse; these may help:" "\n could not import fuse; these may help:"
@@ -393,15 +393,16 @@ class Gateway(object):
rsp = json.loads(rsp.decode("utf-8")) rsp = json.loads(rsp.decode("utf-8"))
ret = [] ret = []
for is_dir, nodes in [[True, rsp["dirs"]], [False, rsp["files"]]]: for statfun, nodes in [
[self.stat_dir, rsp["dirs"]],
[self.stat_file, rsp["files"]],
]:
for n in nodes: for n in nodes:
fname = unquote(n["href"]).rstrip(b"/") fname = unquote(n["href"].split("?")[0]).rstrip(b"/").decode("wtf-8")
fname = fname.decode("wtf-8")
if bad_good: if bad_good:
fname = enwin(fname) fname = enwin(fname)
fun = self.stat_dir if is_dir else self.stat_file ret.append([fname, statfun(n["ts"], n["sz"]), 0])
ret.append([fname, fun(n["ts"], n["sz"]), 0])
return ret return ret

View File

@@ -10,6 +10,7 @@ some of these rely on libraries which are not MIT-compatible
these do not have any problematic dependencies: these do not have any problematic dependencies:
* [cksum.py](./cksum.py) computes various checksums
* [exe.py](./exe.py) grabs metadata from .exe and .dll files (example for retrieving multiple tags with one parser) * [exe.py](./exe.py) grabs metadata from .exe and .dll files (example for retrieving multiple tags with one parser)
* [wget.py](./wget.py) lets you download files by POSTing URLs to copyparty * [wget.py](./wget.py) lets you download files by POSTing URLs to copyparty

89
bin/mtag/cksum.py Executable file
View File

@@ -0,0 +1,89 @@
#!/usr/bin/env python3
import sys
import json
import zlib
import struct
import base64
import hashlib
try:
from copyparty.util import fsenc
except:
def fsenc(p):
return p
"""
calculates various checksums for uploads,
usage: -mtp crc32,md5,sha1,sha256b=bin/mtag/cksum.py
"""
def main():
config = "crc32 md5 md5b sha1 sha1b sha256 sha256b sha512/240 sha512b/240"
# b suffix = base64 encoded
# slash = truncate to n bits
known = {
"md5": hashlib.md5,
"sha1": hashlib.sha1,
"sha256": hashlib.sha256,
"sha512": hashlib.sha512,
}
config = config.split()
hashers = {
k: v()
for k, v in known.items()
if k in [x.split("/")[0].rstrip("b") for x in known]
}
crc32 = 0 if "crc32" in config else None
with open(fsenc(sys.argv[1]), "rb", 512 * 1024) as f:
while True:
buf = f.read(64 * 1024)
if not buf:
break
for x in hashers.values():
x.update(buf)
if crc32 is not None:
crc32 = zlib.crc32(buf, crc32)
ret = {}
for s in config:
alg = s.split("/")[0]
b64 = alg.endswith("b")
alg = alg.rstrip("b")
if alg in hashers:
v = hashers[alg].digest()
elif alg == "crc32":
v = crc32
if v < 0:
v &= 2 ** 32 - 1
v = struct.pack(">L", v)
else:
raise Exception("what is {}".format(s))
if "/" in s:
v = v[: int(int(s.split("/")[1]) / 8)]
if b64:
v = base64.b64encode(v).decode("ascii").rstrip("=")
else:
try:
v = v.hex()
except:
import binascii
v = binascii.hexlify(v)
ret[s] = v
print(json.dumps(ret, indent=4))
if __name__ == "__main__":
main()

View File

@@ -6,7 +6,7 @@ application/x-www-form-urlencoded (for example using the
message/pager function on the website) message/pager function on the website)
example copyparty config to use this: example copyparty config to use this:
--urlform save,get -vsrv/wget:wget:rwmd,ed:c,e2ts:c,mtp=title=ebin,t300,ad,bin/mtag/wget.py --urlform save,get -vsrv/wget:wget:rwmd,ed:c,e2ts,mtp=title=ebin,t300,ad,bin/mtag/wget.py
explained: explained:
for realpath srv/wget (served at /wget) with read-write-modify-delete for ed, for realpath srv/wget (served at /wget) with read-write-modify-delete for ed,

View File

@@ -17,7 +17,7 @@ it's probably best to use this through a config file; see res/yt-ipr.conf
but if you want to use plain arguments instead then: but if you want to use plain arguments instead then:
-v srv/ytm:ytm:w:rw,ed -v srv/ytm:ytm:w:rw,ed
:c,e2ts:c,e2dsa :c,e2ts,e2dsa
:c,sz=16k-1m:c,maxn=10,300:c,rotf=%Y-%m/%d-%H :c,sz=16k-1m:c,maxn=10,300:c,rotf=%Y-%m/%d-%H
:c,mtp=yt-id,yt-title,yt-author,yt-channel,yt-views,yt-private,yt-manifest,yt-expires=bin/mtag/yt-ipr.py :c,mtp=yt-id,yt-title,yt-author,yt-channel,yt-views,yt-private,yt-manifest,yt-expires=bin/mtag/yt-ipr.py
:c,mte=yt-id,yt-title,yt-author,yt-channel,yt-views,yt-private,yt-manifest,yt-expires :c,mte=yt-id,yt-title,yt-author,yt-channel,yt-views,yt-private,yt-manifest,yt-expires

799
bin/up2k.py Executable file
View File

@@ -0,0 +1,799 @@
#!/usr/bin/env python3
from __future__ import print_function, unicode_literals
"""
up2k.py: upload to copyparty
2021-10-29, v0.10, ed <irc.rizon.net>, MIT-Licensed
https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py
- dependencies: requests
- supports python 2.6, 2.7, and 3.3 through 3.10
- almost zero error-handling
- but if something breaks just try again and it'll autoresume
"""
import os
import sys
import stat
import math
import time
import atexit
import signal
import base64
import hashlib
import argparse
import platform
import threading
import requests
import datetime
# from copyparty/__init__.py
PY2 = sys.version_info[0] == 2
if PY2:
from Queue import Queue
from urllib import unquote
from urllib import quote
sys.dont_write_bytecode = True
bytes = str
else:
from queue import Queue
from urllib.parse import unquote_to_bytes as unquote
from urllib.parse import quote_from_bytes as quote
unicode = str
VT100 = platform.system() != "Windows"
req_ses = requests.Session()
class File(object):
"""an up2k upload task; represents a single file"""
def __init__(self, top, rel, size, lmod):
self.top = top # type: bytes
self.rel = rel.replace(b"\\", b"/") # type: bytes
self.size = size # type: int
self.lmod = lmod # type: float
self.abs = os.path.join(top, rel) # type: bytes
self.name = self.rel.split(b"/")[-1].decode("utf-8", "replace") # type: str
# set by get_hashlist
self.cids = [] # type: list[tuple[str, int, int]] # [ hash, ofs, sz ]
self.kchunks = {} # type: dict[str, tuple[int, int]] # hash: [ ofs, sz ]
# set by handshake
self.ucids = [] # type: list[str] # chunks which need to be uploaded
self.wark = None # type: str
self.url = None # type: str
# set by upload
self.up_b = 0 # type: int
self.up_c = 0 # type: int
# m = "size({}) lmod({}) top({}) rel({}) abs({}) name({})\n"
# eprint(m.format(self.size, self.lmod, self.top, self.rel, self.abs, self.name))
class FileSlice(object):
"""file-like object providing a fixed window into a file"""
def __init__(self, file, cid):
# type: (File, str) -> FileSlice
self.car, self.len = file.kchunks[cid]
self.cdr = self.car + self.len
self.ofs = 0 # type: int
self.f = open(file.abs, "rb", 512 * 1024)
self.f.seek(self.car)
# https://stackoverflow.com/questions/4359495/what-is-exactly-a-file-like-object-in-python
# IOBase, RawIOBase, BufferedIOBase
funs = "close closed __enter__ __exit__ __iter__ isatty __next__ readable seekable writable"
try:
for fun in funs.split():
setattr(self, fun, getattr(self.f, fun))
except:
pass # py27 probably
def tell(self):
return self.ofs
def seek(self, ofs, wh=0):
if wh == 1:
ofs = self.ofs + ofs
elif wh == 2:
ofs = self.len + ofs # provided ofs is negative
if ofs < 0:
ofs = 0
elif ofs >= self.len:
ofs = self.len - 1
self.ofs = ofs
self.f.seek(self.car + ofs)
def read(self, sz):
sz = min(sz, self.len - self.ofs)
ret = self.f.read(sz)
self.ofs += len(ret)
return ret
_print = print
def eprint(*a, **ka):
ka["file"] = sys.stderr
ka["end"] = ""
if not PY2:
ka["flush"] = True
_print(*a, **ka)
if PY2 or not VT100:
sys.stderr.flush()
def flushing_print(*a, **ka):
_print(*a, **ka)
if "flush" not in ka:
sys.stdout.flush()
if not VT100:
print = flushing_print
def termsize():
import os
env = os.environ
def ioctl_GWINSZ(fd):
try:
import fcntl, termios, struct, os
cr = struct.unpack("hh", fcntl.ioctl(fd, termios.TIOCGWINSZ, "1234"))
except:
return
return cr
cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
if not cr:
try:
fd = os.open(os.ctermid(), os.O_RDONLY)
cr = ioctl_GWINSZ(fd)
os.close(fd)
except:
pass
if not cr:
try:
cr = (env["LINES"], env["COLUMNS"])
except:
cr = (25, 80)
return int(cr[1]), int(cr[0])
class CTermsize(object):
def __init__(self):
self.ev = False
self.margin = None
self.g = None
self.w, self.h = termsize()
try:
signal.signal(signal.SIGWINCH, self.ev_sig)
except:
return
thr = threading.Thread(target=self.worker)
thr.daemon = True
thr.start()
def worker(self):
while True:
time.sleep(0.5)
if not self.ev:
continue
self.ev = False
self.w, self.h = termsize()
if self.margin is not None:
self.scroll_region(self.margin)
def ev_sig(self, *a, **ka):
self.ev = True
def scroll_region(self, margin):
self.margin = margin
if margin is None:
self.g = None
eprint("\033[s\033[r\033[u")
else:
self.g = 1 + self.h - margin
m = "{0}\033[{1}A".format("\n" * margin, margin)
eprint("{0}\033[s\033[1;{1}r\033[u".format(m, self.g - 1))
ss = CTermsize()
def statdir(top):
"""non-recursive listing of directory contents, along with stat() info"""
if hasattr(os, "scandir"):
with os.scandir(top) as dh:
for fh in dh:
yield [os.path.join(top, fh.name), fh.stat()]
else:
for name in os.listdir(top):
abspath = os.path.join(top, name)
yield [abspath, os.stat(abspath)]
def walkdir(top):
"""recursive statdir"""
for ap, inf in sorted(statdir(top)):
if stat.S_ISDIR(inf.st_mode):
for x in walkdir(ap):
yield x
else:
yield ap, inf
def walkdirs(tops):
"""recursive statdir for a list of tops, yields [top, relpath, stat]"""
sep = "{0}".format(os.sep).encode("ascii")
for top in tops:
if top[-1:] == sep:
stop = top.rstrip(sep)
else:
stop = os.path.dirname(top)
if os.path.isdir(top):
for ap, inf in walkdir(top):
yield stop, ap[len(stop) :].lstrip(sep), inf
else:
d, n = top.rsplit(sep, 1)
yield d, n, os.stat(top)
# mostly from copyparty/util.py
def quotep(btxt):
quot1 = quote(btxt, safe=b"/")
if not PY2:
quot1 = quot1.encode("ascii")
return quot1.replace(b" ", b"+")
# from copyparty/util.py
def humansize(sz, terse=False):
"""picks a sensible unit for the given extent"""
for unit in ["B", "KiB", "MiB", "GiB", "TiB"]:
if sz < 1024:
break
sz /= 1024.0
ret = " ".join([str(sz)[:4].rstrip("."), unit])
if not terse:
return ret
return ret.replace("iB", "").replace(" ", "")
# from copyparty/up2k.py
def up2k_chunksize(filesize):
"""gives The correct chunksize for up2k hashing"""
chunksize = 1024 * 1024
stepsize = 512 * 1024
while True:
for mul in [1, 2]:
nchunks = math.ceil(filesize * 1.0 / chunksize)
if nchunks <= 256 or chunksize >= 32 * 1024 * 1024:
return chunksize
chunksize += stepsize
stepsize *= mul
# mostly from copyparty/up2k.py
def get_hashlist(file, pcb):
# type: (File, any) -> None
"""generates the up2k hashlist from file contents, inserts it into `file`"""
chunk_sz = up2k_chunksize(file.size)
file_rem = file.size
file_ofs = 0
ret = []
with open(file.abs, "rb", 512 * 1024) as f:
while file_rem > 0:
hashobj = hashlib.sha512()
chunk_sz = chunk_rem = min(chunk_sz, file_rem)
while chunk_rem > 0:
buf = f.read(min(chunk_rem, 64 * 1024))
if not buf:
raise Exception("EOF at " + str(f.tell()))
hashobj.update(buf)
chunk_rem -= len(buf)
digest = hashobj.digest()[:33]
digest = base64.urlsafe_b64encode(digest).decode("utf-8")
ret.append([digest, file_ofs, chunk_sz])
file_ofs += chunk_sz
file_rem -= chunk_sz
if pcb:
pcb(file, file_ofs)
file.cids = ret
file.kchunks = {}
for k, v1, v2 in ret:
file.kchunks[k] = [v1, v2]
def handshake(req_ses, url, file, pw, search):
# type: (requests.Session, str, File, any, bool) -> List[str]
"""
performs a handshake with the server; reply is:
if search, a list of search results
otherwise, a list of chunks to upload
"""
req = {
"hash": [x[0] for x in file.cids],
"name": file.name,
"lmod": file.lmod,
"size": file.size,
}
if search:
req["srch"] = 1
headers = {"Content-Type": "text/plain"} # wtf ed
if pw:
headers["Cookie"] = "=".join(["cppwd", pw])
if file.url:
url = file.url
elif b"/" in file.rel:
url += quotep(file.rel.rsplit(b"/", 1)[0]).decode("utf-8", "replace")
while True:
try:
r = req_ses.post(url, headers=headers, json=req)
break
except:
eprint("handshake failed, retry...\n")
time.sleep(1)
try:
r = r.json()
except:
raise Exception(r.text)
if search:
return r["hits"]
try:
pre, url = url.split("://")
pre += "://"
except:
pre = ""
file.url = pre + url.split("/")[0] + r["purl"]
file.name = r["name"]
file.wark = r["wark"]
return r["hash"]
def upload(req_ses, file, cid, pw):
# type: (requests.Session, File, str, any) -> None
"""upload one specific chunk, `cid` (a chunk-hash)"""
headers = {
"X-Up2k-Hash": cid,
"X-Up2k-Wark": file.wark,
"Content-Type": "application/octet-stream",
}
if pw:
headers["Cookie"] = "=".join(["cppwd", pw])
f = FileSlice(file, cid)
try:
r = req_ses.post(file.url, headers=headers, data=f)
if not r:
raise Exception(repr(r))
_ = r.content
finally:
f.f.close()
class Daemon(threading.Thread):
def __init__(self, *a, **ka):
threading.Thread.__init__(self, *a, **ka)
self.daemon = True
class Ctl(object):
"""
this will be the coordinator which runs everything in parallel
(hashing, handshakes, uploads) but right now it's p dumb
"""
def __init__(self, ar):
self.ar = ar
ar.files = [
os.path.abspath(os.path.realpath(x.encode("utf-8")))
+ (x[-1:] if x[-1:] == os.sep else "").encode("utf-8")
for x in ar.files
]
ar.url = ar.url.rstrip("/") + "/"
if "://" not in ar.url:
ar.url = "http://" + ar.url
eprint("\nscanning {0} locations\n".format(len(ar.files)))
nfiles = 0
nbytes = 0
for _, _, inf in walkdirs(ar.files):
nfiles += 1
nbytes += inf.st_size
eprint("found {0} files, {1}\n\n".format(nfiles, humansize(nbytes)))
self.nfiles = nfiles
self.nbytes = nbytes
if ar.td:
req_ses.verify = False
if ar.te:
req_ses.verify = ar.te
self.filegen = walkdirs(ar.files)
if ar.safe:
self.safe()
else:
self.fancy()
def safe(self):
"""minimal basic slow boring fallback codepath"""
search = self.ar.s
for nf, (top, rel, inf) in enumerate(self.filegen):
file = File(top, rel, inf.st_size, inf.st_mtime)
upath = file.abs.decode("utf-8", "replace")
print("{0} {1}\n hash...".format(self.nfiles - nf, upath))
get_hashlist(file, None)
burl = self.ar.url[:8] + self.ar.url[8:].split("/")[0] + "/"
while True:
print(" hs...")
hs = handshake(req_ses, self.ar.url, file, self.ar.a, search)
if search:
if hs:
for hit in hs:
print(" found: {0}{1}".format(burl, hit["rp"]))
else:
print(" NOT found")
break
file.ucids = hs
if not hs:
break
print("{0} {1}".format(self.nfiles - nf, upath))
ncs = len(hs)
for nc, cid in enumerate(hs):
print(" {0} up {1}".format(ncs - nc, cid))
upload(req_ses, file, cid, self.ar.a)
print(" ok!")
def fancy(self):
self.hash_f = 0
self.hash_c = 0
self.hash_b = 0
self.up_f = 0
self.up_c = 0
self.up_b = 0
self.up_br = 0
self.hasher_busy = 1
self.handshaker_busy = 0
self.uploader_busy = 0
self.t0 = time.time()
self.t0_up = None
self.spd = None
self.mutex = threading.Lock()
self.q_handshake = Queue() # type: Queue[File]
self.q_recheck = Queue() # type: Queue[File] # partial upload exists [...]
self.q_upload = Queue() # type: Queue[tuple[File, str]]
self.st_hash = [None, "(idle, starting...)"] # type: tuple[File, int]
self.st_up = [None, "(idle, starting...)"] # type: tuple[File, int]
if VT100:
atexit.register(self.cleanup_vt100)
ss.scroll_region(3)
Daemon(target=self.hasher).start()
for _ in range(self.ar.j):
Daemon(target=self.handshaker).start()
Daemon(target=self.uploader).start()
idles = 0
while idles < 3:
time.sleep(0.07)
with self.mutex:
if (
self.q_handshake.empty()
and self.q_upload.empty()
and not self.hasher_busy
and not self.handshaker_busy
and not self.uploader_busy
):
idles += 1
else:
idles = 0
if VT100:
maxlen = ss.w - len(str(self.nfiles)) - 14
txt = "\033[s\033[{0}H".format(ss.g)
for y, k, st, f in [
[0, "hash", self.st_hash, self.hash_f],
[1, "send", self.st_up, self.up_f],
]:
txt += "\033[{0}H{1}:".format(ss.g + y, k)
file, arg = st
if not file:
txt += " {0}\033[K".format(arg)
else:
if y:
p = 100 * file.up_b / file.size
else:
p = 100 * arg / file.size
name = file.abs.decode("utf-8", "replace")[-maxlen:]
if "/" in name:
name = "\033[36m{0}\033[0m/{1}".format(*name.rsplit("/", 1))
m = "{0:6.1f}% {1} {2}\033[K"
txt += m.format(p, self.nfiles - f, name)
txt += "\033[{0}H ".format(ss.g + 2)
else:
txt = " "
if not self.up_br:
spd = self.hash_b / (time.time() - self.t0)
eta = (self.nbytes - self.hash_b) / (spd + 1)
else:
spd = self.up_br / (time.time() - self.t0_up)
spd = self.spd = (self.spd or spd) * 0.9 + spd * 0.1
eta = (self.nbytes - self.up_b) / (spd + 1)
spd = humansize(spd)
eta = str(datetime.timedelta(seconds=int(eta)))
left = humansize(self.nbytes - self.up_b)
tail = "\033[K\033[u" if VT100 else "\r"
m = "eta: {0} @ {1}/s, {2} left".format(eta, spd, left)
eprint(txt + "\033]0;{0}\033\\\r{1}{2}".format(m, m, tail))
def cleanup_vt100(self):
ss.scroll_region(None)
eprint("\033[J\033]0;\033\\")
def cb_hasher(self, file, ofs):
self.st_hash = [file, ofs]
def hasher(self):
prd = None
ls = {}
for top, rel, inf in self.filegen:
if self.ar.z:
rd = os.path.dirname(rel)
if prd != rd:
prd = rd
headers = {}
if self.ar.a:
headers["Cookie"] = "=".join(["cppwd", self.ar.a])
ls = {}
try:
print(" ls ~{0}".format(rd.decode("utf-8", "replace")))
r = req_ses.get(
self.ar.url.encode("utf-8") + quotep(rd) + b"?ls",
headers=headers,
)
for f in r.json()["files"]:
rfn = f["href"].split("?")[0].encode("utf-8", "replace")
ls[unquote(rfn)] = f
except:
print(" mkdir ~{0}".format(rd.decode("utf-8", "replace")))
rf = ls.get(os.path.basename(rel), None)
if rf and rf["sz"] == inf.st_size and abs(rf["ts"] - inf.st_mtime) <= 1:
self.nfiles -= 1
self.nbytes -= inf.st_size
continue
file = File(top, rel, inf.st_size, inf.st_mtime)
while True:
with self.mutex:
if (
self.hash_b - self.up_b < 1024 * 1024 * 128
and self.hash_c - self.up_c < 64
and (
not self.ar.nh
or (
self.q_upload.empty()
and self.q_handshake.empty()
and not self.uploader_busy
)
)
):
break
time.sleep(0.05)
get_hashlist(file, self.cb_hasher)
with self.mutex:
self.hash_f += 1
self.hash_c += len(file.cids)
self.hash_b += file.size
self.q_handshake.put(file)
self.hasher_busy = 0
self.st_hash = [None, "(finished)"]
def handshaker(self):
search = self.ar.s
q = self.q_handshake
burl = self.ar.url[:8] + self.ar.url[8:].split("/")[0] + "/"
while True:
file = q.get()
if not file:
if q == self.q_handshake:
q = self.q_recheck
q.put(None)
continue
self.q_upload.put(None)
break
with self.mutex:
self.handshaker_busy += 1
upath = file.abs.decode("utf-8", "replace")
try:
hs = handshake(req_ses, self.ar.url, file, self.ar.a, search)
except Exception as ex:
if q == self.q_handshake and "<pre>partial upload exists" in str(ex):
self.q_recheck.put(file)
hs = []
else:
raise
if search:
if hs:
for hit in hs:
m = "found: {0}\n {1}{2}\n"
print(m.format(upath, burl, hit["rp"]), end="")
else:
print("NOT found: {0}\n".format(upath), end="")
with self.mutex:
self.up_f += 1
self.up_c += len(file.cids)
self.up_b += file.size
self.handshaker_busy -= 1
continue
with self.mutex:
if not hs:
# all chunks done
self.up_f += 1
self.up_c += len(file.cids) - file.up_c
self.up_b += file.size - file.up_b
if hs and file.up_c:
# some chunks failed
self.up_c -= len(hs)
file.up_c -= len(hs)
for cid in hs:
sz = file.kchunks[cid][1]
self.up_b -= sz
file.up_b -= sz
file.ucids = hs
self.handshaker_busy -= 1
if not hs:
kw = "uploaded" if file.up_b else " found"
print("{0} {1}".format(kw, upath))
for cid in hs:
self.q_upload.put([file, cid])
def uploader(self):
while True:
task = self.q_upload.get()
if not task:
self.st_up = [None, "(finished)"]
break
with self.mutex:
self.uploader_busy += 1
self.t0_up = self.t0_up or time.time()
file, cid = task
try:
upload(req_ses, file, cid, self.ar.a)
except:
eprint("upload failed, retry...\n")
pass # handshake will fix it
with self.mutex:
sz = file.kchunks[cid][1]
file.ucids = [x for x in file.ucids if x != cid]
if not file.ucids:
self.q_handshake.put(file)
self.st_up = [file, cid]
file.up_b += sz
self.up_b += sz
self.up_br += sz
file.up_c += 1
self.up_c += 1
self.uploader_busy -= 1
class APF(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter):
pass
def main():
time.strptime("19970815", "%Y%m%d") # python#7980
if not VT100:
os.system("rem") # enables colors
# fmt: off
ap = app = argparse.ArgumentParser(formatter_class=APF, epilog="""
NOTE:
source file/folder selection uses rsync syntax, meaning that:
"foo" uploads the entire folder to URL/foo/
"foo/" uploads the CONTENTS of the folder into URL/
""")
ap.add_argument("url", type=unicode, help="server url, including destination folder")
ap.add_argument("files", type=unicode, nargs="+", help="files and/or folders to process")
ap.add_argument("-a", metavar="PASSWORD", help="password")
ap.add_argument("-s", action="store_true", help="file-search (disables upload)")
ap = app.add_argument_group("performance tweaks")
ap.add_argument("-j", type=int, metavar="THREADS", default=4, help="parallel connections")
ap.add_argument("-nh", action="store_true", help="disable hashing while uploading")
ap.add_argument("--safe", action="store_true", help="use simple fallback approach")
ap.add_argument("-z", action="store_true", help="ZOOMIN' (skip uploading files if they exist at the destination with the ~same last-modified timestamp, so same as yolo / turbo with date-chk but even faster)")
ap = app.add_argument_group("tls")
ap.add_argument("-te", metavar="PEM_FILE", help="certificate to expect/verify")
ap.add_argument("-td", action="store_true", help="disable certificate check")
# fmt: on
Ctl(app.parse_args())
if __name__ == "__main__":
main()

24
bin/up2k.sh Executable file → Normal file
View File

@@ -8,7 +8,7 @@ set -e
## ##
## config ## config
datalen=$((2*1024*1024*1024)) datalen=$((128*1024*1024))
target=127.0.0.1 target=127.0.0.1
posturl=/inc posturl=/inc
passwd=wark passwd=wark
@@ -37,10 +37,10 @@ gendata() {
# pipe a chunk, get the base64 checksum # pipe a chunk, get the base64 checksum
gethash() { gethash() {
printf $( printf $(
sha512sum | cut -c-64 | sha512sum | cut -c-66 |
sed -r 's/ .*//;s/(..)/\\x\1/g' sed -r 's/ .*//;s/(..)/\\x\1/g'
) | ) |
base64 -w0 | cut -c-43 | base64 -w0 | cut -c-44 |
tr '+/' '-_' tr '+/' '-_'
} }
@@ -123,7 +123,7 @@ printf '\033[36m'
{ {
{ {
cat <<EOF cat <<EOF
POST $posturl/handshake.php HTTP/1.1 POST $posturl/ HTTP/1.1
Connection: Close Connection: Close
Cookie: cppwd=$passwd Cookie: cppwd=$passwd
Content-Type: text/plain;charset=UTF-8 Content-Type: text/plain;charset=UTF-8
@@ -145,14 +145,16 @@ printf '\033[0m\nwark: %s\n' $wark
## ##
## wait for signal to continue ## wait for signal to continue
w8=/dev/shm/$salt.w8 true || {
touch $w8 w8=/dev/shm/$salt.w8
touch $w8
echo "ready; rm -f $w8" echo "ready; rm -f $w8"
while [ -e $w8 ]; do while [ -e $w8 ]; do
sleep 0.2 sleep 0.2
done done
}
## ##
@@ -175,7 +177,7 @@ while [ $remains -gt 0 ]; do
{ {
cat <<EOF cat <<EOF
POST $posturl/chunkpit.php HTTP/1.1 POST $posturl/ HTTP/1.1
Connection: Keep-Alive Connection: Keep-Alive
Cookie: cppwd=$passwd Cookie: cppwd=$passwd
Content-Type: application/octet-stream Content-Type: application/octet-stream

View File

@@ -3,10 +3,16 @@
# #
# installation: # installation:
# cp -pv copyparty.service /etc/systemd/system && systemctl enable --now copyparty # cp -pv copyparty.service /etc/systemd/system && systemctl enable --now copyparty
# restorecon -vr /etc/systemd/system/copyparty.service
# firewall-cmd --permanent --add-port={80,443,3923}/tcp
# firewall-cmd --reload
# #
# you may want to: # you may want to:
# change '/usr/bin/python' to another interpreter # change '/usr/bin/python3' to another interpreter
# change '/mnt::rw' to another location or permission-set # change '/mnt::rw' to another location or permission-set
# remove '-p 80,443,3923' to only listen on port 3923
# add '-i 127.0.0.1' to only allow local connections
# add '--use-fpool' if uploading into nfs locations
# #
# with `Type=notify`, copyparty will signal systemd when it is ready to # with `Type=notify`, copyparty will signal systemd when it is ready to
# accept connections; correctly delaying units depending on copyparty. # accept connections; correctly delaying units depending on copyparty.
@@ -27,7 +33,7 @@ Description=copyparty file server
[Service] [Service]
Type=notify Type=notify
SyslogIdentifier=copyparty SyslogIdentifier=copyparty
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -v /mnt::rw ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -p 80,443,3923 -v /mnt::rw
ExecStartPre=/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf' ExecStartPre=/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
[Install] [Install]

View File

@@ -20,7 +20,7 @@ import threading
import traceback import traceback
from textwrap import dedent from textwrap import dedent
from .__init__ import E, WINDOWS, VT100, PY2, unicode from .__init__ import E, WINDOWS, ANYWIN, VT100, PY2, unicode
from .__version__ import S_VERSION, S_BUILD_DT, CODENAME from .__version__ import S_VERSION, S_BUILD_DT, CODENAME
from .svchub import SvcHub from .svchub import SvcHub
from .util import py_desc, align_tab, IMPLICATIONS, ansi_re from .util import py_desc, align_tab, IMPLICATIONS, ansi_re
@@ -208,6 +208,8 @@ def run_argparse(argv, formatter):
except: except:
fk_salt = "hunter2" fk_salt = "hunter2"
cores = os.cpu_count() if hasattr(os, "cpu_count") else 4
sects = [ sects = [
[ [
"accounts", "accounts",
@@ -276,7 +278,8 @@ def run_argparse(argv, formatter):
\033[36me2d\033[35m sets -e2d (all -e2* args can be set using ce2* volflags) \033[36me2d\033[35m sets -e2d (all -e2* args can be set using ce2* volflags)
\033[36md2t\033[35m disables metadata collection, overrides -e2t* \033[36md2t\033[35m disables metadata collection, overrides -e2t*
\033[36md2d\033[35m disables all database stuff, overrides -e2* \033[36md2d\033[35m disables all database stuff, overrides -e2*
\033[36mdhash\033[35m disables file hashing on initial scans, also ehash \033[36mnohash=\\.iso$\033[35m skips hashing file contents if path matches *.iso
\033[36mnoidx=\\.iso$\033[35m fully ignores the contents at paths matching *.iso
\033[36mhist=/tmp/cdb\033[35m puts thumbnails and indexes at that location \033[36mhist=/tmp/cdb\033[35m puts thumbnails and indexes at that location
\033[36mscan=60\033[35m scan for new files every 60sec, same as --re-maxage \033[36mscan=60\033[35m scan for new files every 60sec, same as --re-maxage
@@ -332,7 +335,7 @@ def run_argparse(argv, formatter):
ap2 = ap.add_argument_group('general options') ap2 = ap.add_argument_group('general options')
ap2.add_argument("-c", metavar="PATH", type=u, action="append", help="add config file") ap2.add_argument("-c", metavar="PATH", type=u, action="append", help="add config file")
ap2.add_argument("-nc", metavar="NUM", type=int, default=64, help="max num clients") ap2.add_argument("-nc", metavar="NUM", type=int, default=64, help="max num clients")
ap2.add_argument("-j", metavar="CORES", type=int, default=1, help="max num cpu cores") ap2.add_argument("-j", metavar="CORES", type=int, default=1, help="max num cpu cores, 0=all")
ap2.add_argument("-a", metavar="ACCT", type=u, action="append", help="add account, USER:PASS; example [ed:wark") ap2.add_argument("-a", metavar="ACCT", type=u, action="append", help="add account, USER:PASS; example [ed:wark")
ap2.add_argument("-v", metavar="VOL", type=u, action="append", help="add volume, SRC:DST:FLAG; example [.::r], [/mnt/nas/music:/music:r:aed") ap2.add_argument("-v", metavar="VOL", type=u, action="append", help="add volume, SRC:DST:FLAG; example [.::r], [/mnt/nas/music:/music:r:aed")
ap2.add_argument("-ed", action="store_true", help="enable ?dots") ap2.add_argument("-ed", action="store_true", help="enable ?dots")
@@ -344,6 +347,9 @@ def run_argparse(argv, formatter):
ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads") ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads")
ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="up2k min.size threshold (mswin-only)") ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="up2k min.size threshold (mswin-only)")
ap2.add_argument("--unpost", metavar="SEC", type=int, default=3600*12, help="grace period where uploads can be deleted by the uploader, even without delete permissions; 0=disabled") ap2.add_argument("--unpost", metavar="SEC", type=int, default=3600*12, help="grace period where uploads can be deleted by the uploader, even without delete permissions; 0=disabled")
ap2.add_argument("--no-fpool", action="store_true", help="disable file-handle pooling -- instead, repeatedly close and reopen files during upload")
ap2.add_argument("--use-fpool", action="store_true", help="force file-handle pooling, even if copyparty thinks you're better off without")
ap2.add_argument("--no-symlink", action="store_true", help="duplicate file contents instead")
ap2 = ap.add_argument_group('network options') ap2 = ap.add_argument_group('network options')
ap2.add_argument("-i", metavar="IP", type=u, default="0.0.0.0", help="ip to bind (comma-sep.)") ap2.add_argument("-i", metavar="IP", type=u, default="0.0.0.0", help="ip to bind (comma-sep.)")
@@ -375,6 +381,11 @@ def run_argparse(argv, formatter):
ap2.add_argument("--no-dot-ren", action="store_true", help="disallow renaming dotfiles; makes it impossible to make something a dotfile") ap2.add_argument("--no-dot-ren", action="store_true", help="disallow renaming dotfiles; makes it impossible to make something a dotfile")
ap2.add_argument("--no-logues", action="store_true", help="disable rendering .prologue/.epilogue.html into directory listings") ap2.add_argument("--no-logues", action="store_true", help="disable rendering .prologue/.epilogue.html into directory listings")
ap2.add_argument("--no-readme", action="store_true", help="disable rendering readme.md into directory listings") ap2.add_argument("--no-readme", action="store_true", help="disable rendering readme.md into directory listings")
ap2.add_argument("--vague-403", action="store_true", help="send 404 instead of 403 (security through ambiguity, very enterprise)")
ap2 = ap.add_argument_group('yolo options')
ap2.add_argument("--ign-ebind", action="store_true", help="continue running even if it's impossible to listen on some of the requested endpoints")
ap2.add_argument("--ign-ebind-all", action="store_true", help="continue running even if it's impossible to receive connections at all")
ap2 = ap.add_argument_group('logging options') ap2 = ap.add_argument_group('logging options')
ap2.add_argument("-q", action="store_true", help="quiet") ap2.add_argument("-q", action="store_true", help="quiet")
@@ -393,7 +404,7 @@ def run_argparse(argv, formatter):
ap2.add_argument("--no-thumb", action="store_true", help="disable all thumbnails") ap2.add_argument("--no-thumb", action="store_true", help="disable all thumbnails")
ap2.add_argument("--no-vthumb", action="store_true", help="disable video thumbnails") ap2.add_argument("--no-vthumb", action="store_true", help="disable video thumbnails")
ap2.add_argument("--th-size", metavar="WxH", default="320x256", help="thumbnail res") ap2.add_argument("--th-size", metavar="WxH", default="320x256", help="thumbnail res")
ap2.add_argument("--th-mt", metavar="CORES", type=int, default=0, help="max num cpu cores to use, 0=all") ap2.add_argument("--th-mt", metavar="CORES", type=int, default=cores, help="num cpu cores to use for generating thumbnails")
ap2.add_argument("--th-no-crop", action="store_true", help="dynamic height; show full image") ap2.add_argument("--th-no-crop", action="store_true", help="dynamic height; show full image")
ap2.add_argument("--th-no-jpg", action="store_true", help="disable jpg output") ap2.add_argument("--th-no-jpg", action="store_true", help="disable jpg output")
ap2.add_argument("--th-no-webp", action="store_true", help="disable webp output") ap2.add_argument("--th-no-webp", action="store_true", help="disable webp output")
@@ -408,8 +419,8 @@ def run_argparse(argv, formatter):
ap2.add_argument("-e2ds", action="store_true", help="enable up2k db-scanner, sets -e2d") ap2.add_argument("-e2ds", action="store_true", help="enable up2k db-scanner, sets -e2d")
ap2.add_argument("-e2dsa", action="store_true", help="scan all folders (for search), sets -e2ds") ap2.add_argument("-e2dsa", action="store_true", help="scan all folders (for search), sets -e2ds")
ap2.add_argument("--hist", metavar="PATH", type=u, help="where to store volume data (db, thumbs)") ap2.add_argument("--hist", metavar="PATH", type=u, help="where to store volume data (db, thumbs)")
ap2.add_argument("--no-hash", action="store_true", help="disable hashing during e2ds folder scans") ap2.add_argument("--no-hash", metavar="PTN", type=u, help="regex: disable hashing of matching paths during e2ds folder scans")
ap2.add_argument("--re-int", metavar="SEC", type=int, default=30, help="disk rescan check interval") ap2.add_argument("--no-idx", metavar="PTN", type=u, help="regex: disable indexing of matching paths during e2ds folder scans")
ap2.add_argument("--re-maxage", metavar="SEC", type=int, default=0, help="disk rescan volume interval, 0=off, can be set per-volume with the 'scan' volflag") ap2.add_argument("--re-maxage", metavar="SEC", type=int, default=0, help="disk rescan volume interval, 0=off, can be set per-volume with the 'scan' volflag")
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline") ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline")
@@ -418,8 +429,8 @@ def run_argparse(argv, formatter):
ap2.add_argument("-e2ts", action="store_true", help="enable metadata scanner, sets -e2t") ap2.add_argument("-e2ts", action="store_true", help="enable metadata scanner, sets -e2t")
ap2.add_argument("-e2tsr", action="store_true", help="rescan all metadata, sets -e2ts") ap2.add_argument("-e2tsr", action="store_true", help="rescan all metadata, sets -e2ts")
ap2.add_argument("--no-mutagen", action="store_true", help="use FFprobe for tags instead") ap2.add_argument("--no-mutagen", action="store_true", help="use FFprobe for tags instead")
ap2.add_argument("--no-mtag-mt", action="store_true", help="disable tag-read parallelism")
ap2.add_argument("--no-mtag-ff", action="store_true", help="never use FFprobe as tag reader") ap2.add_argument("--no-mtag-ff", action="store_true", help="never use FFprobe as tag reader")
ap2.add_argument("--mtag-mt", metavar="CORES", type=int, default=cores, help="num cpu cores to use for tag scanning")
ap2.add_argument("-mtm", metavar="M=t,t,t", type=u, action="append", help="add/replace metadata mapping") ap2.add_argument("-mtm", metavar="M=t,t,t", type=u, action="append", help="add/replace metadata mapping")
ap2.add_argument("-mte", metavar="M,M,M", type=u, help="tags to index/display (comma-sep.)", ap2.add_argument("-mte", metavar="M,M,M", type=u, help="tags to index/display (comma-sep.)",
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,vc,ac,res,.fps,ahash,vhash") default="circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,vc,ac,res,.fps,ahash,vhash")
@@ -427,7 +438,8 @@ def run_argparse(argv, formatter):
default=".vq,.aq,vc,ac,res,.fps") default=".vq,.aq,vc,ac,res,.fps")
ap2.add_argument("-mtp", metavar="M=[f,]bin", type=u, action="append", help="read tag M using bin") ap2.add_argument("-mtp", metavar="M=[f,]bin", type=u, action="append", help="read tag M using bin")
ap2 = ap.add_argument_group('appearance options') ap2 = ap.add_argument_group('ui options')
ap2.add_argument("--js-browser", metavar="L", type=u, help="URL to additional JS to include")
ap2.add_argument("--css-browser", metavar="L", type=u, help="URL to additional CSS to include") ap2.add_argument("--css-browser", metavar="L", type=u, help="URL to additional CSS to include")
ap2 = ap.add_argument_group('debug options') ap2 = ap.add_argument_group('debug options')
@@ -483,6 +495,12 @@ def main(argv=None):
argv[idx] = nk argv[idx] = nk
time.sleep(2) time.sleep(2)
try:
if len(argv) == 1 and (ANYWIN or not os.geteuid()):
argv.extend(["-p80,443,3923", "--ign-ebind"])
except:
pass
try: try:
al = run_argparse(argv, RiceFormatter) al = run_argparse(argv, RiceFormatter)
except AssertionError: except AssertionError:

View File

@@ -1,8 +1,8 @@
# coding: utf-8 # coding: utf-8
VERSION = (1, 0, 3) VERSION = (1, 0, 14)
CODENAME = "sufficient" CODENAME = "sufficient"
BUILD_DT = (2021, 9, 18) BUILD_DT = (2021, 10, 30)
S_VERSION = ".".join(map(str, VERSION)) S_VERSION = ".".join(map(str, VERSION))
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT) S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)

View File

@@ -356,7 +356,7 @@ class VFS(object):
if not dbv: if not dbv:
return self, vrem return self, vrem
vrem = [self.vpath[len(dbv.vpath) + 1 :], vrem] vrem = [self.vpath[len(dbv.vpath) :].lstrip("/"), vrem]
vrem = "/".join([x for x in vrem if x]) vrem = "/".join([x for x in vrem if x])
return dbv, vrem return dbv, vrem
@@ -526,6 +526,24 @@ class AuthSrv(object):
yield prev, True yield prev, True
def _map_volume(self, src, dst, mount, daxs, mflags):
if dst in mount:
m = "multiple filesystem-paths mounted at [/{}]:\n [{}]\n [{}]"
self.log(m.format(dst, mount[dst], src), c=1)
raise Exception("invalid config")
if src in mount.values():
m = "warning: filesystem-path [{}] mounted in multiple locations:"
m = m.format(src)
for v in [k for k, v in mount.items() if v == src] + [dst]:
m += "\n /{}".format(v)
self.log(m, c=3)
mount[dst] = src
daxs[dst] = AXS()
mflags[dst] = {}
def _parse_config_file(self, fd, acct, daxs, mflags, mount): def _parse_config_file(self, fd, acct, daxs, mflags, mount):
# type: (any, str, dict[str, AXS], any, str) -> None # type: (any, str, dict[str, AXS], any, str) -> None
vol_src = None vol_src = None
@@ -556,9 +574,7 @@ class AuthSrv(object):
# cfg files override arguments and previous files # cfg files override arguments and previous files
vol_src = bos.path.abspath(vol_src) vol_src = bos.path.abspath(vol_src)
vol_dst = vol_dst.strip("/") vol_dst = vol_dst.strip("/")
mount[vol_dst] = vol_src self._map_volume(vol_src, vol_dst, mount, daxs, mflags)
daxs[vol_dst] = AXS()
mflags[vol_dst] = {}
continue continue
try: try:
@@ -579,9 +595,17 @@ class AuthSrv(object):
raise Exception("invalid volume flag: {},{}".format(lvl, uname)) raise Exception("invalid volume flag: {},{}".format(lvl, uname))
if lvl == "c": if lvl == "c":
cval = True try:
if "=" in uname: # volume flag with arguments, possibly with a preceding list of bools
uname, cval = uname.split("=", 1) uname, cval = uname.split("=", 1)
except:
# just one or more bools
cval = True
while "," in uname:
# one or more bools before the final flag; eat them
n1, uname = uname.split(",", 1)
self._read_volflag(flags, n1, True, False)
self._read_volflag(flags, uname, cval, False) self._read_volflag(flags, uname, cval, False)
return return
@@ -655,9 +679,7 @@ class AuthSrv(object):
# print("\n".join([src, dst, perms])) # print("\n".join([src, dst, perms]))
src = bos.path.abspath(src) src = bos.path.abspath(src)
dst = dst.strip("/") dst = dst.strip("/")
mount[dst] = src self._map_volume(src, dst, mount, daxs, mflags)
daxs[dst] = AXS()
mflags[dst] = {}
for x in perms.split(":"): for x in perms.split(":"):
lvl, uname = x.split(",", 1) if "," in x else [x, ""] lvl, uname = x.split(",", 1) if "," in x else [x, ""]
@@ -718,6 +740,7 @@ class AuthSrv(object):
axs = getattr(vol.axs, axs_key) axs = getattr(vol.axs, axs_key)
if usr in axs or "*" in axs: if usr in axs or "*" in axs:
umap[usr].append(mp) umap[usr].append(mp)
umap[usr].sort()
setattr(vfs, "a" + perm, umap) setattr(vfs, "a" + perm, umap)
all_users = {} all_users = {}
@@ -857,9 +880,14 @@ class AuthSrv(object):
if self.args.e2d or "e2ds" in vol.flags: if self.args.e2d or "e2ds" in vol.flags:
vol.flags["e2d"] = True vol.flags["e2d"] = True
if self.args.no_hash: for ga, vf in [["no_hash", "nohash"], ["no_idx", "noidx"]]:
if "ehash" not in vol.flags: if vf in vol.flags:
vol.flags["dhash"] = True ptn = vol.flags.pop(vf)
else:
ptn = getattr(self.args, ga)
if ptn:
vol.flags[vf] = re.compile(ptn)
for k in ["e2t", "e2ts", "e2tsr"]: for k in ["e2t", "e2ts", "e2tsr"]:
if getattr(self.args, k): if getattr(self.args, k):
@@ -872,6 +900,10 @@ class AuthSrv(object):
# default tag cfgs if unset # default tag cfgs if unset
if "mte" not in vol.flags: if "mte" not in vol.flags:
vol.flags["mte"] = self.args.mte vol.flags["mte"] = self.args.mte
elif vol.flags["mte"].startswith("+"):
vol.flags["mte"] = ",".join(
x for x in [self.args.mte, vol.flags["mte"][1:]] if x
)
if "mth" not in vol.flags: if "mth" not in vol.flags:
vol.flags["mth"] = self.args.mth vol.flags["mth"] = self.args.mth
@@ -968,7 +1000,7 @@ class AuthSrv(object):
v, _ = vfs.get("/", "*", False, True) v, _ = vfs.get("/", "*", False, True)
if self.warn_anonwrite and os.getcwd() == v.realpath: if self.warn_anonwrite and os.getcwd() == v.realpath:
self.warn_anonwrite = False self.warn_anonwrite = False
msg = "anyone can read/write the current directory: {}" msg = "anyone can read/write the current directory: {}\n"
self.log(msg.format(v.realpath), c=1) self.log(msg.format(v.realpath), c=1)
except Pebkac: except Pebkac:
self.warn_anonwrite = True self.warn_anonwrite = True

View File

@@ -25,14 +25,14 @@ def lstat(p):
def makedirs(name, mode=0o755, exist_ok=True): def makedirs(name, mode=0o755, exist_ok=True):
bname = fsenc(name) bname = fsenc(name)
try: try:
os.makedirs(bname, mode=mode) os.makedirs(bname, mode)
except: except:
if not exist_ok or not os.path.isdir(bname): if not exist_ok or not os.path.isdir(bname):
raise raise
def mkdir(p, mode=0o755): def mkdir(p, mode=0o755):
return os.mkdir(fsenc(p), mode=mode) return os.mkdir(fsenc(p), mode)
def rename(src, dst): def rename(src, dst):

View File

@@ -21,6 +21,10 @@ def getsize(p):
return os.path.getsize(fsenc(p)) return os.path.getsize(fsenc(p))
def isfile(p):
return os.path.isfile(fsenc(p))
def isdir(p): def isdir(p):
return os.path.isdir(fsenc(p)) return os.path.isdir(fsenc(p))

View File

@@ -10,8 +10,8 @@ import json
import base64 import base64
import string import string
import socket import socket
import ctypes
from datetime import datetime from datetime import datetime
from operator import itemgetter
import calendar import calendar
try: try:
@@ -19,6 +19,11 @@ try:
except: except:
pass pass
try:
import ctypes
except:
pass
from .__init__ import E, PY2, WINDOWS, ANYWIN, unicode from .__init__ import E, PY2, WINDOWS, ANYWIN, unicode
from .util import * # noqa # pylint: disable=unused-wildcard-import from .util import * # noqa # pylint: disable=unused-wildcard-import
from .bos import bos from .bos import bos
@@ -38,6 +43,7 @@ class HttpCli(object):
def __init__(self, conn): def __init__(self, conn):
self.t0 = time.time() self.t0 = time.time()
self.conn = conn self.conn = conn
self.mutex = conn.mutex
self.s = conn.s # type: socket self.s = conn.s # type: socket
self.sr = conn.sr # type: Unrecv self.sr = conn.sr # type: Unrecv
self.ip = conn.addr[0] self.ip = conn.addr[0]
@@ -46,13 +52,14 @@ class HttpCli(object):
self.asrv = conn.asrv # type: AuthSrv self.asrv = conn.asrv # type: AuthSrv
self.ico = conn.ico self.ico = conn.ico
self.thumbcli = conn.thumbcli self.thumbcli = conn.thumbcli
self.u2fh = conn.u2fh
self.log_func = conn.log_func self.log_func = conn.log_func
self.log_src = conn.log_src self.log_src = conn.log_src
self.tls = hasattr(self.s, "cipher") self.tls = hasattr(self.s, "cipher")
self.bufsz = 1024 * 32 self.bufsz = 1024 * 32
self.hint = None self.hint = None
self.absolute_urls = False self.trailing_slash = True
self.out_headers = { self.out_headers = {
"Access-Control-Allow-Origin": "*", "Access-Control-Allow-Origin": "*",
"Cache-Control": "no-store; max-age=0", "Cache-Control": "no-store; max-age=0",
@@ -91,6 +98,7 @@ class HttpCli(object):
def run(self): def run(self):
"""returns true if connection can be reused""" """returns true if connection can be reused"""
self.keepalive = False self.keepalive = False
self.is_https = False
self.headers = {} self.headers = {}
self.hint = None self.hint = None
try: try:
@@ -128,6 +136,7 @@ class HttpCli(object):
v = self.headers.get("connection", "").lower() v = self.headers.get("connection", "").lower()
self.keepalive = not v.startswith("close") and self.http_ver != "HTTP/1.0" self.keepalive = not v.startswith("close") and self.http_ver != "HTTP/1.0"
self.is_https = (self.headers.get("x-forwarded-proto", "").lower() == "https" or self.tls)
n = self.args.rproxy n = self.args.rproxy
if n: if n:
@@ -145,6 +154,8 @@ class HttpCli(object):
self.log_src = self.conn.set_rproxy(self.ip) self.log_src = self.conn.set_rproxy(self.ip)
self.dip = self.ip.replace(":", ".")
if self.args.ihead: if self.args.ihead:
keys = self.args.ihead keys = self.args.ihead
if "*" in keys: if "*" in keys:
@@ -161,15 +172,11 @@ class HttpCli(object):
# split req into vpath + uparam # split req into vpath + uparam
uparam = {} uparam = {}
if "?" not in self.req: if "?" not in self.req:
if not self.req.endswith("/"): self.trailing_slash = self.req.endswith("/")
self.absolute_urls = True
vpath = undot(self.req) vpath = undot(self.req)
else: else:
vpath, arglist = self.req.split("?", 1) vpath, arglist = self.req.split("?", 1)
if not vpath.endswith("/"): self.trailing_slash = vpath.endswith("/")
self.absolute_urls = True
vpath = undot(vpath) vpath = undot(vpath)
for k in arglist.split("&"): for k in arglist.split("&"):
if "=" in k: if "=" in k:
@@ -267,6 +274,15 @@ class HttpCli(object):
except Pebkac: except Pebkac:
return False return False
def permit_caching(self):
cache = self.uparam.get("cache")
if cache is None:
self.out_headers.update(NO_CACHE)
return
n = "604800" if cache == "i" else cache or "69"
self.out_headers["Cache-Control"] = "max-age=" + n
def send_headers(self, length, status=200, mime=None, headers=None): def send_headers(self, length, status=200, mime=None, headers=None):
response = ["{} {} {}".format(self.http_ver, status, HTTPCODE[status])] response = ["{} {} {}".format(self.http_ver, status, HTTPCODE[status])]
@@ -386,7 +402,7 @@ class HttpCli(object):
if not self.can_read and not self.can_write and not self.can_get: if not self.can_read and not self.can_write and not self.can_get:
if self.vpath: if self.vpath:
self.log("inaccessible: [{}]".format(self.vpath)) self.log("inaccessible: [{}]".format(self.vpath))
return self.tx_404() return self.tx_404(True)
self.uparam["h"] = False self.uparam["h"] = False
@@ -463,13 +479,13 @@ class HttpCli(object):
except: except:
raise Pebkac(400, "client d/c before 100 continue") raise Pebkac(400, "client d/c before 100 continue")
if "raw" in self.uparam:
return self.handle_stash()
ctype = self.headers.get("content-type", "").lower() ctype = self.headers.get("content-type", "").lower()
if not ctype: if not ctype:
raise Pebkac(400, "you can't post without a content-type header") raise Pebkac(400, "you can't post without a content-type header")
if "raw" in self.uparam:
return self.handle_stash()
if "multipart/form-data" in ctype: if "multipart/form-data" in ctype:
return self.handle_post_multipart() return self.handle_post_multipart()
@@ -530,17 +546,16 @@ class HttpCli(object):
fdir = os.path.join(vfs.realpath, rem) fdir = os.path.join(vfs.realpath, rem)
if lim: if lim:
fdir, rem = lim.all(self.ip, rem, remains, fdir) fdir, rem = lim.all(self.ip, rem, remains, fdir)
bos.makedirs(fdir)
addr = self.ip.replace(":", ".") fn = None
fn = "put-{:.6f}-{}.bin".format(time.time(), addr) if rem and not self.trailing_slash and not bos.path.isdir(fdir):
path = os.path.join(fdir, fn) fdir, fn = os.path.split(fdir)
if self.args.nw: rem, _ = vsplit(rem)
path = os.devnull
open_f = open bos.makedirs(fdir)
open_a = [fsenc(path), "wb", 512 * 1024]
open_ka = {} open_ka = {"fun": open}
open_a = ["wb", 512 * 1024]
# user-request || config-force # user-request || config-force
if ("gz" in vfs.flags or "xz" in vfs.flags) and ( if ("gz" in vfs.flags or "xz" in vfs.flags) and (
@@ -581,16 +596,28 @@ class HttpCli(object):
self.log("compressing with {} level {}".format(alg, lv.get(alg))) self.log("compressing with {} level {}".format(alg, lv.get(alg)))
if alg == "gz": if alg == "gz":
open_f = gzip.GzipFile open_ka["fun"] = gzip.GzipFile
open_a = [fsenc(path), "wb", lv[alg], None, 0x5FEE6600] # 2021-01-01 open_a = ["wb", lv[alg], None, 0x5FEE6600] # 2021-01-01
elif alg == "xz": elif alg == "xz":
open_f = lzma.open open_ka = {"fun": lzma.open, "preset": lv[alg]}
open_a = [fsenc(path), "wb"] open_a = ["wb"]
open_ka = {"preset": lv[alg]}
else: else:
self.log("fallthrough? thats a bug", 1) self.log("fallthrough? thats a bug", 1)
with open_f(*open_a, **open_ka) as f: suffix = "-{:.6f}-{}".format(time.time(), self.dip)
params = {"suffix": suffix, "fdir": fdir}
if self.args.nw:
params = {}
fn = os.devnull
params.update(open_ka)
if not fn:
fn = "put" + suffix
with ren_open(fn, *open_a, **params) as f:
f, fn = f["orz"]
path = os.path.join(fdir, fn)
post_sz, _, sha_b64 = hashcopy(reader, f) post_sz, _, sha_b64 = hashcopy(reader, f)
if lim: if lim:
@@ -834,7 +861,18 @@ class HttpCli(object):
reader = read_socket(self.sr, remains) reader = read_socket(self.sr, remains)
with open(fsenc(path), "rb+", 512 * 1024) as f: f = None
fpool = not self.args.no_fpool
if fpool:
with self.mutex:
try:
f = self.u2fh.pop(path)
except:
pass
f = f or open(fsenc(path), "rb+", 512 * 1024)
try:
f.seek(cstart[0]) f.seek(cstart[0])
post_sz, _, sha_b64 = hashcopy(reader, f) post_sz, _, sha_b64 = hashcopy(reader, f)
@@ -864,22 +902,36 @@ class HttpCli(object):
ofs += len(buf) ofs += len(buf)
self.log("clone {} done".format(cstart[0])) self.log("clone {} done".format(cstart[0]))
finally:
if not fpool:
f.close()
else:
with self.mutex:
self.u2fh.put(path, f)
x = self.conn.hsrv.broker.put(True, "up2k.confirm_chunk", ptop, wark, chash) x = self.conn.hsrv.broker.put(True, "up2k.confirm_chunk", ptop, wark, chash)
x = x.get() x = x.get()
try: try:
num_left, path = x num_left, fin_path = x
except: except:
self.loud_reply(x, status=500) self.loud_reply(x, status=500)
return False return False
if not ANYWIN and num_left == 0: if not num_left and fpool:
with self.mutex:
self.u2fh.close(path)
# windows cant rename open files
if ANYWIN and path != fin_path and not self.args.nw:
self.conn.hsrv.broker.put(True, "up2k.finish_upload", ptop, wark).get()
if not ANYWIN and not num_left:
times = (int(time.time()), int(lastmod)) times = (int(time.time()), int(lastmod))
self.log("no more chunks, setting times {}".format(times)) self.log("no more chunks, setting times {}".format(times))
try: try:
bos.utime(path, times) bos.utime(fin_path, times)
except: except:
self.log("failed to utime ({}, {})".format(path, times)) self.log("failed to utime ({}, {})".format(fin_path, times))
spd = self._spd(post_sz) spd = self._spd(post_sz)
self.log("{} thank".format(spd)) self.log("{} thank".format(spd))
@@ -1002,7 +1054,7 @@ class HttpCli(object):
if not bos.path.isdir(fdir): if not bos.path.isdir(fdir):
raise Pebkac(404, "that folder does not exist") raise Pebkac(404, "that folder does not exist")
suffix = ".{:.6f}-{}".format(time.time(), self.ip) suffix = "-{:.6f}-{}".format(time.time(), self.dip)
open_args = {"fdir": fdir, "suffix": suffix} open_args = {"fdir": fdir, "suffix": suffix}
else: else:
open_args = {} open_args = {}
@@ -1031,7 +1083,7 @@ class HttpCli(object):
bos.unlink(abspath) bos.unlink(abspath)
raise raise
files.append([sz, sha512_hex, p_file, fname]) files.append([sz, sha512_hex, p_file, fname, abspath])
dbv, vrem = vfs.get_dbv(rem) dbv, vrem = vfs.get_dbv(rem)
self.conn.hsrv.broker.put( self.conn.hsrv.broker.put(
False, False,
@@ -1083,14 +1135,14 @@ class HttpCli(object):
jmsg["error"] = errmsg jmsg["error"] = errmsg
errmsg = "ERROR: " + errmsg errmsg = "ERROR: " + errmsg
for sz, sha512, ofn, lfn in files: for sz, sha512, ofn, lfn, ap in files:
vsuf = "" vsuf = ""
if self.can_read and "fk" in vfs.flags: if self.can_read and "fk" in vfs.flags:
vsuf = "?k=" + gen_filekey( vsuf = "?k=" + gen_filekey(
self.args.fk_salt, self.args.fk_salt,
abspath, abspath,
sz, sz,
0 if ANYWIN else bos.stat(os.path.join(vfs.realpath, lfn)).st_ino, 0 if ANYWIN or not ap else bos.stat(ap).st_ino,
)[: vfs.flags["fk"]] )[: vfs.flags["fk"]]
vpath = "{}/{}".format(upload_vpath, lfn).strip("/") vpath = "{}/{}".format(upload_vpath, lfn).strip("/")
@@ -1101,7 +1153,7 @@ class HttpCli(object):
# using SHA-512/224, optionally SHA-512/256 = :64 # using SHA-512/224, optionally SHA-512/256 = :64
jpart = { jpart = {
"url": "{}://{}/{}".format( "url": "{}://{}/{}".format(
"https" if self.tls else "http", "https" if self.is_https else "http",
self.headers.get("host", "copyparty"), self.headers.get("host", "copyparty"),
vpath + vsuf, vpath + vsuf,
), ),
@@ -1404,10 +1456,10 @@ class HttpCli(object):
# #
# send reply # send reply
if not is_compressed and "cache" not in self.uparam: if is_compressed:
self.out_headers.update(NO_CACHE) self.out_headers["Cache-Control"] = "max-age=573"
else: else:
self.out_headers.pop("Cache-Control") self.permit_caching()
self.out_headers["Accept-Ranges"] = "bytes" self.out_headers["Accept-Ranges"] = "bytes"
self.send_headers( self.send_headers(
@@ -1503,6 +1555,7 @@ class HttpCli(object):
return True return True
def tx_ico(self, ext, exact=False): def tx_ico(self, ext, exact=False):
self.permit_caching()
if ext.endswith("/"): if ext.endswith("/"):
ext = "folder" ext = "folder"
exact = True exact = True
@@ -1533,6 +1586,10 @@ class HttpCli(object):
def tx_md(self, fs_path): def tx_md(self, fs_path):
logmsg = "{:4} {} ".format("", self.req) logmsg = "{:4} {} ".format("", self.req)
if not self.can_write:
if "edit" in self.uparam or "edit2" in self.uparam:
return self.tx_404(True)
tpl = "mde" if "edit2" in self.uparam else "md" tpl = "mde" if "edit2" in self.uparam else "md"
html_path = os.path.join(E.mod, "web", "{}.html".format(tpl)) html_path = os.path.join(E.mod, "web", "{}.html".format(tpl))
template = self.j2(tpl) template = self.j2(tpl)
@@ -1555,6 +1612,10 @@ class HttpCli(object):
self.out_headers.update(NO_CACHE) self.out_headers.update(NO_CACHE)
status = 200 if do_send else 304 status = 200 if do_send else 304
arg_base = "?"
if "k" in self.uparam:
arg_base = "?k={}&".format(self.uparam["k"])
boundary = "\roll\tide" boundary = "\roll\tide"
targs = { targs = {
"edit": "edit" in self.uparam, "edit": "edit" in self.uparam,
@@ -1564,6 +1625,7 @@ class HttpCli(object):
"md_chk_rate": self.args.mcr, "md_chk_rate": self.args.mcr,
"md": boundary, "md": boundary,
"ts": self.conn.hsrv.cachebuster(), "ts": self.conn.hsrv.cachebuster(),
"arg_base": arg_base,
} }
html = template.render(**targs).encode("utf-8", "replace") html = template.render(**targs).encode("utf-8", "replace")
html = html.split(boundary.encode("utf-8")) html = html.split(boundary.encode("utf-8"))
@@ -1628,8 +1690,14 @@ class HttpCli(object):
self.reply(html.encode("utf-8")) self.reply(html.encode("utf-8"))
return True return True
def tx_404(self): def tx_404(self, is_403=False):
m = '<h1>404 not found &nbsp;┐( ´ -`)┌</h1><p>or maybe you don\'t have access -- try logging in or <a href="/?h">go home</a></p>' if self.args.vague_403:
m = '<h1>404 not found &nbsp;┐( ´ -`)┌</h1><p>or maybe you don\'t have access -- try logging in or <a href="/?h">go home</a></p>'
elif is_403:
m = '<h1>403 forbiddena &nbsp;~┻━┻</h1><p>you\'ll have to log in or <a href="/?h">go home</a></p>'
else:
m = '<h1>404 not found &nbsp;┐( ´ -`)┌</h1><p><a href="/?h">go home</a></p>'
html = self.j2("splash", this=self, qvpath=quotep(self.vpath), msg=m) html = self.j2("splash", this=self, qvpath=quotep(self.vpath), msg=m)
self.reply(html.encode("utf-8"), status=404) self.reply(html.encode("utf-8"), status=404)
return True return True
@@ -1743,7 +1811,7 @@ class HttpCli(object):
if filt and filt not in vp: if filt and filt not in vp:
continue continue
ret.append({"vp": vp, "sz": sz, "at": at}) ret.append({"vp": quotep(vp), "sz": sz, "at": at})
if len(ret) > 3000: if len(ret) > 3000:
ret.sort(key=lambda x: x["at"], reverse=True) ret.sort(key=lambda x: x["at"], reverse=True)
ret = ret[:2000] ret = ret[:2000]
@@ -1788,6 +1856,64 @@ class HttpCli(object):
) )
self.loud_reply(x.get()) self.loud_reply(x.get())
def tx_ls(self, ls):
dirs = ls["dirs"]
files = ls["files"]
arg = self.uparam["ls"]
if arg in ["v", "t", "txt"]:
try:
biggest = max(ls["files"], key=itemgetter("sz"))["sz"]
except:
biggest = 0
if arg == "v":
fmt = "\033[0;7;36m{{}} {{:>{}}}\033[0m {{}}"
nfmt = "{}"
biggest = 0
f2 = "".join(
"{}{{}}".format(x)
for x in [
"\033[7m",
"\033[27m",
"",
"\033[0;1m",
"\033[0;36m",
"\033[0m",
]
)
ctab = {"B": 6, "K": 5, "M": 1, "G": 3}
for lst in [dirs, files]:
for x in lst:
a = x["dt"].replace("-", " ").replace(":", " ").split(" ")
x["dt"] = f2.format(*list(a))
sz = humansize(x["sz"], True)
x["sz"] = "\033[0;3{}m{:>5}".format(ctab.get(sz[-1:], 0), sz)
else:
fmt = "{{}} {{:{},}} {{}}"
nfmt = "{:,}"
fmt = fmt.format(len(nfmt.format(biggest)))
ret = [
"# {}: {}".format(x, ls[x])
for x in ["acct", "perms", "srvinf"]
if x in ls
]
ret += [
fmt.format(x["dt"], x["sz"], x["name"])
for y in [dirs, files]
for x in y
]
ret = "\n".join(ret)
mime = "text/plain; encoding=utf-8"
else:
[x.pop(k) for k in ["name", "dt"] for y in [dirs, files] for x in y]
ret = json.dumps(ls)
mime = "application/json"
self.reply(ret.encode("utf-8", "replace"), mime=mime)
return True
def tx_browser(self): def tx_browser(self):
vpath = "" vpath = ""
vpnodes = [["", "/"]] vpnodes = [["", "/"]]
@@ -1856,7 +1982,7 @@ class HttpCli(object):
return self.tx_file(abspath) return self.tx_file(abspath)
elif is_dir and not self.can_read and not self.can_write: elif is_dir and not self.can_read and not self.can_write:
return self.tx_404() return self.tx_404(True)
srv_info = [] srv_info = []
@@ -1870,11 +1996,14 @@ class HttpCli(object):
# some fuses misbehave # some fuses misbehave
if not self.args.nid: if not self.args.nid:
if WINDOWS: if WINDOWS:
bfree = ctypes.c_ulonglong(0) try:
ctypes.windll.kernel32.GetDiskFreeSpaceExW( bfree = ctypes.c_ulonglong(0)
ctypes.c_wchar_p(abspath), None, None, ctypes.pointer(bfree) ctypes.windll.kernel32.GetDiskFreeSpaceExW(
) ctypes.c_wchar_p(abspath), None, None, ctypes.pointer(bfree)
srv_info.append(humansize(bfree.value) + " free") )
srv_info.append(humansize(bfree.value) + " free")
except:
pass
else: else:
sv = os.statvfs(fsenc(abspath)) sv = os.statvfs(fsenc(abspath))
free = humansize(sv.f_frsize * sv.f_bfree, True) free = humansize(sv.f_frsize * sv.f_bfree, True)
@@ -1956,12 +2085,10 @@ class HttpCli(object):
} }
if not self.can_read: if not self.can_read:
if is_ls: if is_ls:
ret = json.dumps(ls_ret) return self.tx_ls(ls_ret)
self.reply(ret.encode("utf-8", "replace"), mime="application/json")
return True
if not stat.S_ISDIR(st.st_mode): if not stat.S_ISDIR(st.st_mode):
return self.tx_404() return self.tx_404(True)
if "zip" in self.uparam or "tar" in self.uparam: if "zip" in self.uparam or "tar" in self.uparam:
raise Pebkac(403) raise Pebkac(403)
@@ -2018,7 +2145,7 @@ class HttpCli(object):
for fn in vfs_ls: for fn in vfs_ls:
base = "" base = ""
href = fn href = fn
if not is_ls and self.absolute_urls and vpath: if not is_ls and not self.trailing_slash and vpath:
base = "/" + vpath + "/" base = "/" + vpath + "/"
href = base + fn href = base + fn
@@ -2055,6 +2182,8 @@ class HttpCli(object):
try: try:
ext = "---" if is_dir else fn.rsplit(".", 1)[1] ext = "---" if is_dir else fn.rsplit(".", 1)[1]
if len(ext) > 16:
ext = ext[:16]
except: except:
ext = "%" ext = "%"
@@ -2133,13 +2262,15 @@ class HttpCli(object):
f["tags"] = {} f["tags"] = {}
if is_ls: if is_ls:
[x.pop(k) for k in ["name", "dt"] for y in [dirs, files] for x in y]
ls_ret["dirs"] = dirs ls_ret["dirs"] = dirs
ls_ret["files"] = files ls_ret["files"] = files
ls_ret["taglist"] = taglist ls_ret["taglist"] = taglist
ret = json.dumps(ls_ret) return self.tx_ls(ls_ret)
self.reply(ret.encode("utf-8", "replace"), mime="application/json")
return True for d in dirs:
d["name"] += "/"
dirs.sort(key=itemgetter("name"))
j2a["files"] = dirs + files j2a["files"] = dirs + files
j2a["logues"] = logues j2a["logues"] = logues
@@ -2148,6 +2279,9 @@ class HttpCli(object):
if "mth" in vn.flags: if "mth" in vn.flags:
j2a["def_hcols"] = vn.flags["mth"].split(",") j2a["def_hcols"] = vn.flags["mth"].split(",")
if self.args.js_browser:
j2a["js"] = self.args.js_browser
if self.args.css_browser: if self.args.css_browser:
j2a["css"] = self.args.css_browser j2a["css"] = self.args.css_browser

View File

@@ -32,9 +32,11 @@ class HttpConn(object):
self.addr = addr self.addr = addr
self.hsrv = hsrv self.hsrv = hsrv
self.mutex = hsrv.mutex
self.args = hsrv.args self.args = hsrv.args
self.asrv = hsrv.asrv self.asrv = hsrv.asrv
self.cert_path = hsrv.cert_path self.cert_path = hsrv.cert_path
self.u2fh = hsrv.u2fh
enth = HAVE_PIL and not self.args.no_thumb enth = HAVE_PIL and not self.args.no_thumb
self.thumbcli = ThumbCli(hsrv.broker) if enth else None self.thumbcli = ThumbCli(hsrv.broker) if enth else None

View File

@@ -27,7 +27,7 @@ except ImportError:
sys.exit(1) sys.exit(1)
from .__init__ import E, PY2, MACOS from .__init__ import E, PY2, MACOS
from .util import spack, min_ex, start_stackmon, start_log_thrs from .util import FHC, spack, min_ex, start_stackmon, start_log_thrs
from .bos import bos from .bos import bos
from .httpconn import HttpConn from .httpconn import HttpConn
@@ -50,7 +50,9 @@ class HttpSrv(object):
self.log = broker.log self.log = broker.log
self.asrv = broker.asrv self.asrv = broker.asrv
self.name = "httpsrv" + ("-n{}-i{:x}".format(nid, os.getpid()) if nid else "") nsuf = "-n{}-i{:x}".format(nid, os.getpid()) if nid else ""
self.name = "hsrv" + nsuf
self.mutex = threading.Lock() self.mutex = threading.Lock()
self.stopping = False self.stopping = False
@@ -58,7 +60,9 @@ class HttpSrv(object):
self.tp_ncli = 0 # fading self.tp_ncli = 0 # fading
self.tp_time = None # latest worker collect self.tp_time = None # latest worker collect
self.tp_q = None if self.args.no_htp else queue.LifoQueue() self.tp_q = None if self.args.no_htp else queue.LifoQueue()
self.t_periodic = None
self.u2fh = FHC()
self.srvs = [] self.srvs = []
self.ncli = 0 # exact self.ncli = 0 # exact
self.clients = {} # laggy self.clients = {} # laggy
@@ -82,11 +86,6 @@ class HttpSrv(object):
if self.tp_q: if self.tp_q:
self.start_threads(4) self.start_threads(4)
name = "httpsrv-scaler" + ("-{}".format(nid) if nid else "")
t = threading.Thread(target=self.thr_scaler, name=name)
t.daemon = True
t.start()
if nid: if nid:
if self.args.stackmon: if self.args.stackmon:
start_stackmon(self.args.stackmon, nid) start_stackmon(self.args.stackmon, nid)
@@ -115,13 +114,19 @@ class HttpSrv(object):
for _ in range(n): for _ in range(n):
self.tp_q.put(None) self.tp_q.put(None)
def thr_scaler(self): def periodic(self):
while True: while True:
time.sleep(2 if self.tp_ncli else 30) time.sleep(2 if self.tp_ncli or self.ncli else 10)
with self.mutex: with self.mutex:
self.tp_ncli = max(self.ncli, self.tp_ncli - 2) self.u2fh.clean()
if self.tp_nthr > self.tp_ncli + 8: if self.tp_q:
self.stop_threads(4) self.tp_ncli = max(self.ncli, self.tp_ncli - 2)
if self.tp_nthr > self.tp_ncli + 8:
self.stop_threads(4)
if not self.ncli and not self.u2fh.cache and self.tp_nthr <= 8:
self.t_periodic = None
return
def listen(self, sck, nlisteners): def listen(self, sck, nlisteners):
ip, port = sck.getsockname() ip, port = sck.getsockname()
@@ -141,7 +146,12 @@ class HttpSrv(object):
fno = srv_sck.fileno() fno = srv_sck.fileno()
msg = "subscribed @ {}:{} f{}".format(ip, port, fno) msg = "subscribed @ {}:{} f{}".format(ip, port, fno)
self.log(self.name, msg) self.log(self.name, msg)
self.broker.put(False, "cb_httpsrv_up")
def fun():
self.broker.put(False, "cb_httpsrv_up")
threading.Thread(target=fun).start()
while not self.stopping: while not self.stopping:
if self.args.log_conn: if self.args.log_conn:
self.log(self.name, "|%sC-ncli" % ("-" * 1,), c="1;30") self.log(self.name, "|%sC-ncli" % ("-" * 1,), c="1;30")
@@ -181,6 +191,16 @@ class HttpSrv(object):
with self.mutex: with self.mutex:
self.ncli += 1 self.ncli += 1
if not self.t_periodic:
name = "hsrv-pt"
if self.nid:
name += "-{}".format(self.nid)
t = threading.Thread(target=self.periodic, name=name)
self.t_periodic = t
t.daemon = True
t.start()
if self.tp_q: if self.tp_q:
self.tp_time = self.tp_time or now self.tp_time = self.tp_time or now
self.tp_ncli = max(self.tp_ncli, self.ncli) self.tp_ncli = max(self.tp_ncli, self.ncli)

View File

@@ -413,6 +413,9 @@ class MTag(object):
return r1 return r1
def get_mutagen(self, abspath): def get_mutagen(self, abspath):
if not bos.path.isfile(abspath):
return {}
import mutagen import mutagen
try: try:
@@ -458,10 +461,16 @@ class MTag(object):
return self.normalize_tags(ret, md) return self.normalize_tags(ret, md)
def get_ffprobe(self, abspath): def get_ffprobe(self, abspath):
if not bos.path.isfile(abspath):
return {}
ret, md = ffprobe(abspath) ret, md = ffprobe(abspath)
return self.normalize_tags(ret, md) return self.normalize_tags(ret, md)
def get_bin(self, parsers, abspath): def get_bin(self, parsers, abspath):
if not bos.path.isfile(abspath):
return {}
pypath = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) pypath = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
pypath = [str(pypath)] + [str(x) for x in sys.path if x] pypath = [str(pypath)] + [str(x) for x in sys.path if x]
pypath = str(os.pathsep.join(pypath)) pypath = str(os.pathsep.join(pypath))
@@ -471,7 +480,10 @@ class MTag(object):
ret = {} ret = {}
for tagname, mp in parsers.items(): for tagname, mp in parsers.items():
try: try:
cmd = [sys.executable, mp.bin, abspath] cmd = [mp.bin, abspath]
if mp.bin.endswith(".py"):
cmd = [sys.executable] + cmd
args = {"env": env, "timeout": mp.timeout} args = {"env": env, "timeout": mp.timeout}
if WINDOWS: if WINDOWS:

View File

@@ -1,7 +1,6 @@
# coding: utf-8 # coding: utf-8
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
import re
import os import os
import sys import sys
import time import time
@@ -39,6 +38,7 @@ class SvcHub(object):
self.stop_req = False self.stop_req = False
self.stopping = False self.stopping = False
self.stop_cond = threading.Condition() self.stop_cond = threading.Condition()
self.retcode = 0
self.httpsrv_up = 0 self.httpsrv_up = 0
self.log_mutex = threading.Lock() self.log_mutex = threading.Lock()
@@ -54,6 +54,17 @@ class SvcHub(object):
if args.log_thrs: if args.log_thrs:
start_log_thrs(self.log, args.log_thrs, 0) start_log_thrs(self.log, args.log_thrs, 0)
if not ANYWIN and not args.use_fpool:
args.no_fpool = True
if not args.no_fpool and args.j != 1:
m = "WARNING: --use-fpool combined with multithreading is untested and can probably cause undefined behavior"
if ANYWIN:
m = 'windows cannot do multithreading without --no-fpool, so enabling that -- note that upload performance will suffer if you have microsoft defender "real-time protection" enabled, so you probably want to use -j 1 instead'
args.no_fpool = True
self.log("root", m, c=3)
# initiate all services to manage # initiate all services to manage
self.asrv = AuthSrv(self.args, self.log) self.asrv = AuthSrv(self.args, self.log)
if args.ls: if args.ls:
@@ -81,27 +92,36 @@ class SvcHub(object):
if self.check_mp_enable(): if self.check_mp_enable():
from .broker_mp import BrokerMp as Broker from .broker_mp import BrokerMp as Broker
else: else:
self.log("root", "cannot efficiently use multiple CPU cores")
from .broker_thr import BrokerThr as Broker from .broker_thr import BrokerThr as Broker
self.broker = Broker(self) self.broker = Broker(self)
def thr_httpsrv_up(self): def thr_httpsrv_up(self):
time.sleep(5) time.sleep(5)
failed = self.broker.num_workers - self.httpsrv_up expected = self.broker.num_workers * self.tcpsrv.nsrv
failed = expected - self.httpsrv_up
if not failed: if not failed:
return return
if self.args.ign_ebind_all:
return
if self.args.ign_ebind and self.tcpsrv.srv:
return
m = "{}/{} workers failed to start" m = "{}/{} workers failed to start"
m = m.format(failed, self.broker.num_workers) m = m.format(failed, expected)
self.log("root", m, 1) self.log("root", m, 1)
os._exit(1)
self.retcode = 1
os.kill(os.getpid(), signal.SIGTERM)
def cb_httpsrv_up(self): def cb_httpsrv_up(self):
self.httpsrv_up += 1 self.httpsrv_up += 1
if self.httpsrv_up != self.broker.num_workers: if self.httpsrv_up != self.broker.num_workers:
return return
time.sleep(0.1) # purely cosmetic dw
self.log("root", "workers OK\n") self.log("root", "workers OK\n")
self.up2k.init_vols() self.up2k.init_vols()
@@ -205,6 +225,8 @@ class SvcHub(object):
if self.stopping: if self.stopping:
return return
# start_log_thrs(print, 0.1, 1)
self.stopping = True self.stopping = True
self.stop_req = True self.stop_req = True
with self.stop_cond: with self.stop_cond:
@@ -230,7 +252,7 @@ class SvcHub(object):
print("waiting for thumbsrv (10sec)...") print("waiting for thumbsrv (10sec)...")
print("nailed it", end="") print("nailed it", end="")
ret = 0 ret = self.retcode
finally: finally:
print("\033[0m") print("\033[0m")
if self.logf: if self.logf:
@@ -327,10 +349,11 @@ class SvcHub(object):
def check_mp_enable(self): def check_mp_enable(self):
if self.args.j == 1: if self.args.j == 1:
self.log("root", "multiprocessing disabled by argument -j 1;") self.log("svchub", "multiprocessing disabled by argument -j 1")
return False return False
if mp.cpu_count() <= 1: if mp.cpu_count() <= 1:
self.log("svchub", "only one CPU detected; multiprocessing disabled")
return False return False
try: try:
@@ -345,6 +368,7 @@ class SvcHub(object):
return True return True
else: else:
self.log("svchub", err) self.log("svchub", err)
self.log("svchub", "cannot efficiently use multiple CPU cores")
return False return False
def sd_notify(self): def sd_notify(self):

View File

@@ -21,6 +21,29 @@ class TcpSrv(object):
self.stopping = False self.stopping = False
self.srv = []
self.nsrv = 0
ok = {}
for ip in self.args.i:
ok[ip] = []
for port in self.args.p:
self.nsrv += 1
try:
self._listen(ip, port)
ok[ip].append(port)
except Exception as ex:
if self.args.ign_ebind or self.args.ign_ebind_all:
m = "could not listen on {}:{}: {}"
self.log("tcpsrv", m.format(ip, port, ex), c=3)
else:
raise
if not self.srv and not self.args.ign_ebind_all:
raise Exception("could not listen on any of the given interfaces")
if self.nsrv != len(self.srv):
self.log("tcpsrv", "")
ip = "127.0.0.1" ip = "127.0.0.1"
eps = {ip: "local only"} eps = {ip: "local only"}
nonlocals = [x for x in self.args.i if x != ip] nonlocals = [x for x in self.args.i if x != ip]
@@ -34,6 +57,9 @@ class TcpSrv(object):
m = "available @ http://{}:{}/ (\033[33m{}\033[0m)" m = "available @ http://{}:{}/ (\033[33m{}\033[0m)"
for ip, desc in sorted(eps.items(), key=lambda x: x[1]): for ip, desc in sorted(eps.items(), key=lambda x: x[1]):
for port in sorted(self.args.p): for port in sorted(self.args.p):
if port not in ok.get(ip, ok.get("0.0.0.0", [])):
continue
msgs.append(m.format(ip, port, desc)) msgs.append(m.format(ip, port, desc))
if msgs: if msgs:
@@ -41,18 +67,13 @@ class TcpSrv(object):
for m in msgs: for m in msgs:
self.log("tcpsrv", m) self.log("tcpsrv", m)
self.srv = []
for ip in self.args.i:
for port in self.args.p:
self.srv.append(self._listen(ip, port))
def _listen(self, ip, port): def _listen(self, ip, port):
srv = socket.socket(socket.AF_INET, socket.SOCK_STREAM) srv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
srv.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) srv.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
try: try:
srv.bind((ip, port)) srv.bind((ip, port))
return srv self.srv.append(srv)
except (OSError, socket.error) as ex: except (OSError, socket.error) as ex:
if ex.errno in [98, 48]: if ex.errno in [98, 48]:
e = "\033[1;31mport {} is busy on interface {}\033[0m".format(port, ip) e = "\033[1;31mport {} is busy on interface {}\033[0m".format(port, ip)

View File

@@ -105,9 +105,7 @@ class ThumbSrv(object):
self.mutex = threading.Lock() self.mutex = threading.Lock()
self.busy = {} self.busy = {}
self.stopping = False self.stopping = False
self.nthr = self.args.th_mt self.nthr = max(1, self.args.th_mt)
if not self.nthr:
self.nthr = os.cpu_count() if hasattr(os, "cpu_count") else 4
self.q = Queue(self.nthr * 4) self.q = Queue(self.nthr * 4)
for n in range(self.nthr): for n in range(self.nthr):
@@ -130,7 +128,7 @@ class ThumbSrv(object):
self.log(msg, c=3) self.log(msg, c=3)
if self.args.th_clean: if self.args.th_clean:
t = threading.Thread(target=self.cleaner, name="thumb-cleaner") t = threading.Thread(target=self.cleaner, name="thumb.cln")
t.daemon = True t.daemon = True
t.start() t.start()

View File

@@ -6,9 +6,10 @@ import os
import time import time
import threading import threading
from datetime import datetime from datetime import datetime
from operator import itemgetter
from .__init__ import ANYWIN, unicode from .__init__ import ANYWIN, unicode
from .util import absreal, s3dec, Pebkac, min_ex, gen_filekey from .util import absreal, s3dec, Pebkac, min_ex, gen_filekey, quotep
from .bos import bos from .bos import bos
from .up2k import up2k_wark_from_hashlist from .up2k import up2k_wark_from_hashlist
@@ -253,21 +254,23 @@ class U2idx(object):
if rd.startswith("//") or fn.startswith("//"): if rd.startswith("//") or fn.startswith("//"):
rd, fn = s3dec(rd, fn) rd, fn = s3dec(rd, fn)
if fk: if not fk:
suf = ""
else:
try: try:
ap = absreal(os.path.join(ptop, rd, fn)) ap = absreal(os.path.join(ptop, rd, fn))
inf = bos.stat(ap) inf = bos.stat(ap)
except: except:
continue continue
fn += ( suf = (
"?k=" "?k="
+ gen_filekey( + gen_filekey(
self.args.fk_salt, ap, sz, 0 if ANYWIN else inf.st_ino self.args.fk_salt, ap, sz, 0 if ANYWIN else inf.st_ino
)[:fk] )[:fk]
) )
rp = "/".join([x for x in [vtop, rd, fn] if x]) rp = quotep("/".join([x for x in [vtop, rd, fn] if x])) + suf
sret.append({"ts": int(ts), "sz": sz, "rp": rp, "w": w[:16]}) sret.append({"ts": int(ts), "sz": sz, "rp": rp, "w": w[:16]})
for hit in sret: for hit in sret:
@@ -290,9 +293,13 @@ class U2idx(object):
# undupe hits from multiple metadata keys # undupe hits from multiple metadata keys
if len(ret) > 1: if len(ret) > 1:
ret = [ret[0]] + [ ret = [ret[0]] + [
y for x, y in zip(ret[:-1], ret[1:]) if x["rp"] != y["rp"] y
for x, y in zip(ret[:-1], ret[1:])
if x["rp"].split("?")[0] != y["rp"].split("?")[0]
] ]
ret.sort(key=itemgetter("rp"))
return ret, list(taglist.keys()) return ret, list(taglist.keys())
def terminator(self, identifier, done_flag): def terminator(self, identifier, done_flag):

View File

@@ -27,7 +27,10 @@ from .util import (
sanitize_fn, sanitize_fn,
ren_open, ren_open,
atomic_move, atomic_move,
quotep,
vsplit, vsplit,
w8b64enc,
w8b64dec,
s3enc, s3enc,
s3dec, s3dec,
rmdirs, rmdirs,
@@ -60,12 +63,14 @@ class Up2k(object):
# state # state
self.mutex = threading.Lock() self.mutex = threading.Lock()
self.rescan_cond = threading.Condition()
self.hashq = Queue() self.hashq = Queue()
self.tagq = Queue() self.tagq = Queue()
self.n_hashq = 0 self.n_hashq = 0
self.n_tagq = 0 self.n_tagq = 0
self.volstate = {} self.volstate = {}
self.need_rescan = {} self.need_rescan = {}
self.dupesched = {}
self.registry = {} self.registry = {}
self.entags = {} self.entags = {}
self.flags = {} self.flags = {}
@@ -127,9 +132,11 @@ class Up2k(object):
thr.start() thr.start()
if self.mtag: if self.mtag:
thr = threading.Thread(target=self._tagger, name="up2k-tagger") for n in range(max(1, self.args.mtag_mt)):
thr.daemon = True name = "tagger-{}".format(n)
thr.start() thr = threading.Thread(target=self._tagger, name=name)
thr.daemon = True
thr.start()
thr = threading.Thread(target=self._run_all_mtp, name="up2k-mtp-init") thr = threading.Thread(target=self._run_all_mtp, name="up2k-mtp-init")
thr.daemon = True thr.daemon = True
@@ -177,9 +184,19 @@ class Up2k(object):
def _sched_rescan(self): def _sched_rescan(self):
volage = {} volage = {}
cooldown = 0
timeout = time.time() + 3
while True: while True:
time.sleep(self.args.re_int) timeout = max(timeout, cooldown)
wait = max(0.1, timeout + 0.1 - time.time())
with self.rescan_cond:
self.rescan_cond.wait(wait)
now = time.time() now = time.time()
if now < cooldown:
continue
timeout = now + 9001
with self.mutex: with self.mutex:
for vp, vol in sorted(self.asrv.vfs.all_vols.items()): for vp, vol in sorted(self.asrv.vfs.all_vols.items()):
maxage = vol.flags.get("scan") maxage = vol.flags.get("scan")
@@ -189,13 +206,17 @@ class Up2k(object):
if vp not in volage: if vp not in volage:
volage[vp] = now volage[vp] = now
if now - volage[vp] >= maxage: deadline = volage[vp] + maxage
if deadline <= now:
self.need_rescan[vp] = 1 self.need_rescan[vp] = 1
timeout = min(timeout, deadline)
vols = list(sorted(self.need_rescan.keys())) vols = list(sorted(self.need_rescan.keys()))
self.need_rescan = {} self.need_rescan = {}
if vols: if vols:
cooldown = now + 10
err = self.rescan(self.asrv.vfs.all_vols, vols) err = self.rescan(self.asrv.vfs.all_vols, vols)
if err: if err:
for v in vols: for v in vols:
@@ -218,8 +239,11 @@ class Up2k(object):
if not cur: if not cur:
continue continue
lifetime = int(lifetime)
timeout = min(timeout, now + lifetime)
nrm = 0 nrm = 0
deadline = time.time() - int(lifetime) deadline = time.time() - lifetime
q = "select rd, fn from up where at > 0 and at < ? limit 100" q = "select rd, fn from up where at > 0 and at < ? limit 100"
while True: while True:
with self.mutex: with self.mutex:
@@ -236,12 +260,22 @@ class Up2k(object):
if vp: if vp:
fvp = "{}/{}".format(vp, fvp) fvp = "{}/{}".format(vp, fvp)
self._handle_rm(LEELOO_DALLAS, None, fvp, True) self._handle_rm(LEELOO_DALLAS, None, fvp)
nrm += 1 nrm += 1
if nrm: if nrm:
self.log("{} files graduated in {}".format(nrm, vp)) self.log("{} files graduated in {}".format(nrm, vp))
if timeout < 10:
continue
q = "select at from up where at > 0 order by at limit 1"
with self.mutex:
hits = cur.execute(q).fetchone()
if hits:
timeout = min(timeout, now + lifetime - (now - hits[0]))
def _vis_job_progress(self, job): def _vis_job_progress(self, job):
perc = 100 - (len(job["need"]) * 100.0 / len(job["hash"])) perc = 100 - (len(job["need"]) * 100.0 / len(job["hash"]))
path = os.path.join(job["ptop"], job["prel"], job["name"]) path = os.path.join(job["ptop"], job["prel"], job["name"])
@@ -462,7 +496,8 @@ class Up2k(object):
def _build_file_index(self, vol, all_vols): def _build_file_index(self, vol, all_vols):
do_vac = False do_vac = False
top = vol.realpath top = vol.realpath
nohash = "dhash" in vol.flags rei = vol.flags.get("noidx")
reh = vol.flags.get("nohash")
with self.mutex: with self.mutex:
cur, _ = self.register_vpath(top, vol.flags) cur, _ = self.register_vpath(top, vol.flags)
@@ -477,38 +512,55 @@ class Up2k(object):
if WINDOWS: if WINDOWS:
excl = [x.replace("/", "\\") for x in excl] excl = [x.replace("/", "\\") for x in excl]
n_add = self._build_dir(dbw, top, set(excl), top, nohash, []) n_add = n_rm = 0
n_rm = self._drop_lost(dbw[0], top) try:
n_add = self._build_dir(dbw, top, set(excl), top, rei, reh, [])
n_rm = self._drop_lost(dbw[0], top)
except:
m = "failed to index volume [{}]:\n{}"
self.log(m.format(top, min_ex()), c=1)
if dbw[1]: if dbw[1]:
self.log("commit {} new files".format(dbw[1])) self.log("commit {} new files".format(dbw[1]))
dbw[0].connection.commit()
dbw[0].connection.commit()
return True, n_add or n_rm or do_vac return True, n_add or n_rm or do_vac
def _build_dir(self, dbw, top, excl, cdir, nohash, seen): def _build_dir(self, dbw, top, excl, cdir, rei, reh, seen):
rcdir = absreal(cdir) # a bit expensive but worth rcdir = absreal(cdir) # a bit expensive but worth
if rcdir in seen: if rcdir in seen:
m = "bailing from symlink loop,\n prev: {}\n curr: {}\n from: {}" m = "bailing from symlink loop,\n prev: {}\n curr: {}\n from: {}"
self.log(m.format(seen[-1], rcdir, cdir), 3) self.log(m.format(seen[-1], rcdir, cdir), 3)
return 0 return 0
seen = seen + [cdir] seen = seen + [rcdir]
self.pp.msg = "a{} {}".format(self.pp.n, cdir) self.pp.msg = "a{} {}".format(self.pp.n, cdir)
histpath = self.asrv.vfs.histtab[top] histpath = self.asrv.vfs.histtab[top]
ret = 0 ret = 0
seen_files = {}
g = statdir(self.log_func, not self.args.no_scandir, False, cdir) g = statdir(self.log_func, not self.args.no_scandir, False, cdir)
for iname, inf in sorted(g): for iname, inf in sorted(g):
abspath = os.path.join(cdir, iname) abspath = os.path.join(cdir, iname)
if rei and rei.search(abspath):
continue
nohash = reh.search(abspath) if reh else False
lmod = int(inf.st_mtime) lmod = int(inf.st_mtime)
sz = inf.st_size sz = inf.st_size
if stat.S_ISDIR(inf.st_mode): if stat.S_ISDIR(inf.st_mode):
if abspath in excl or abspath == histpath: if abspath in excl or abspath == histpath:
continue continue
# self.log(" dir: {}".format(abspath)) # self.log(" dir: {}".format(abspath))
ret += self._build_dir(dbw, top, excl, abspath, nohash, seen) try:
ret += self._build_dir(dbw, top, excl, abspath, rei, reh, seen)
except:
m = "failed to index subdir [{}]:\n{}"
self.log(m.format(abspath, min_ex()), c=1)
else: else:
# self.log("file: {}".format(abspath)) # self.log("file: {}".format(abspath))
rp = abspath[len(top) + 1 :] seen_files[iname] = 1
rp = abspath[len(top) :].lstrip("/")
if WINDOWS: if WINDOWS:
rp = rp.replace("\\", "/").strip("/") rp = rp.replace("\\", "/").strip("/")
@@ -566,34 +618,65 @@ class Up2k(object):
dbw[0].connection.commit() dbw[0].connection.commit()
dbw[1] = 0 dbw[1] = 0
dbw[2] = time.time() dbw[2] = time.time()
# drop missing files
rd = cdir[len(top) + 1 :].strip("/")
if WINDOWS:
rd = rd.replace("\\", "/").strip("/")
q = "select fn from up where rd = ?"
try:
c = dbw[0].execute(q, (rd,))
except:
c = dbw[0].execute(q, ("//" + w8b64enc(rd),))
hits = [w8b64dec(x[2:]) if x.startswith("//") else x for (x,) in c]
rm_files = [x for x in hits if x not in seen_files]
n_rm = len(rm_files)
for fn in rm_files:
self.db_rm(dbw[0], rd, fn)
if n_rm:
self.log("forgot {} deleted files".format(n_rm))
return ret return ret
def _drop_lost(self, cur, top): def _drop_lost(self, cur, top):
rm = [] rm = []
n_rm = 0
nchecked = 0 nchecked = 0
nfiles = next(cur.execute("select count(w) from up"))[0] # `_build_dir` did all the files, now do dirs
c = cur.execute("select rd, fn from up") ndirs = next(cur.execute("select count(distinct rd) from up"))[0]
for drd, dfn in c: c = cur.execute("select distinct rd from up order by rd desc")
for (drd,) in c:
nchecked += 1 nchecked += 1
if drd.startswith("//") or dfn.startswith("//"): if drd.startswith("//"):
drd, dfn = s3dec(drd, dfn) rd = w8b64dec(drd[2:])
else:
rd = drd
abspath = os.path.join(top, drd, dfn) abspath = os.path.join(top, rd)
# almost zero overhead dw self.pp.msg = "b{} {}".format(ndirs - nchecked, abspath)
self.pp.msg = "b{} {}".format(nfiles - nchecked, abspath)
try: try:
if not bos.path.exists(abspath): if os.path.isdir(abspath):
rm.append([drd, dfn]) continue
except Exception as ex: except:
self.log("stat-rm: {} @ [{}]".format(repr(ex), abspath)) pass
if rm: rm.append(drd)
self.log("forgetting {} deleted files".format(len(rm)))
for rd, fn in rm:
# self.log("{} / {}".format(rd, fn))
self.db_rm(cur, rd, fn)
return len(rm) if not rm:
return 0
q = "select count(w) from up where rd = ?"
for rd in rm:
n_rm += next(cur.execute(q, (rd,)))[0]
self.log("forgetting {} deleted dirs, {} files".format(len(rm), n_rm))
for rd in rm:
cur.execute("delete from up where rd = ?", (rd,))
return n_rm
def _build_tags_index(self, vol): def _build_tags_index(self, vol):
ptop = vol.realpath ptop = vol.realpath
@@ -647,7 +730,7 @@ class Up2k(object):
return n_add, n_rm, False return n_add, n_rm, False
mpool = False mpool = False
if self.mtag.prefer_mt and not self.args.no_mtag_mt: if self.mtag.prefer_mt and self.args.mtag_mt > 1:
mpool = self._start_mpool() mpool = self._start_mpool()
conn = sqlite3.connect(db_path, timeout=15) conn = sqlite3.connect(db_path, timeout=15)
@@ -880,9 +963,7 @@ class Up2k(object):
def _start_mpool(self): def _start_mpool(self):
# mp.pool.ThreadPool and concurrent.futures.ThreadPoolExecutor # mp.pool.ThreadPool and concurrent.futures.ThreadPoolExecutor
# both do crazy runahead so lets reinvent another wheel # both do crazy runahead so lets reinvent another wheel
nw = os.cpu_count() if hasattr(os, "cpu_count") else 4 nw = max(1, self.args.mtag_mt)
if self.args.no_mtag_mt:
nw = 1
if self.pending_tags is None: if self.pending_tags is None:
self.log("using {}x {}".format(nw, self.mtag.backend)) self.log("using {}x {}".format(nw, self.mtag.backend))
@@ -940,7 +1021,15 @@ class Up2k(object):
def _tag_file(self, write_cur, entags, wark, abspath, tags=None): def _tag_file(self, write_cur, entags, wark, abspath, tags=None):
if tags is None: if tags is None:
tags = self.mtag.get(abspath) try:
tags = self.mtag.get(abspath)
except Exception as ex:
msg = "failed to read tags from {}:\n{}"
self.log(msg.format(abspath, ex), c=3)
return 0
if not bos.path.isfile(abspath):
return 0
if entags: if entags:
tags = {k: v for k, v in tags.items() if k in entags} tags = {k: v for k, v in tags.items() if k in entags}
@@ -1112,9 +1201,18 @@ class Up2k(object):
if dp_dir.startswith("//") or dp_fn.startswith("//"): if dp_dir.startswith("//") or dp_fn.startswith("//"):
dp_dir, dp_fn = s3dec(dp_dir, dp_fn) dp_dir, dp_fn = s3dec(dp_dir, dp_fn)
if job and (dp_dir != cj["prel"] or dp_fn != cj["name"]):
continue
dp_abs = "/".join([cj["ptop"], dp_dir, dp_fn]) dp_abs = "/".join([cj["ptop"], dp_dir, dp_fn])
# relying on path.exists to return false on broken symlinks # relying on this to fail on broken symlinks
if bos.path.exists(dp_abs): try:
sz = bos.path.getsize(dp_abs)
except:
sz = 0
if sz:
# self.log("--- " + wark + " " + dp_abs + " found file", 4)
job = { job = {
"name": dp_fn, "name": dp_fn,
"prel": dp_dir, "prel": dp_dir,
@@ -1127,9 +1225,9 @@ class Up2k(object):
"hash": [], "hash": [],
"need": [], "need": [],
} }
break
if job and wark in reg: if job and wark in reg:
# self.log("pop " + wark + " " + job["name"] + " handle_json db", 4)
del reg[wark] del reg[wark]
if job or wark in reg: if job or wark in reg:
@@ -1157,11 +1255,20 @@ class Up2k(object):
if job["need"]: if job["need"]:
self.log("unfinished:\n {0}\n {1}".format(src, dst)) self.log("unfinished:\n {0}\n {1}".format(src, dst))
err = "partial upload exists at a different location; please resume uploading here instead:\n" err = "partial upload exists at a different location; please resume uploading here instead:\n"
err += "/" + vsrc + " " err += "/" + quotep(vsrc) + " "
dupe = [cj["prel"], cj["name"]]
try:
self.dupesched[src].append(dupe)
except:
self.dupesched[src] = [dupe]
raise Pebkac(400, err) raise Pebkac(400, err)
elif "nodupe" in self.flags[job["ptop"]]: elif "nodupe" in self.flags[job["ptop"]]:
self.log("dupe-reject:\n {0}\n {1}".format(src, dst)) self.log("dupe-reject:\n {0}\n {1}".format(src, dst))
err = "upload rejected, file already exists:\n/" + vsrc + " " err = "upload rejected, file already exists:\n"
err += "/" + quotep(vsrc) + " "
raise Pebkac(400, err) raise Pebkac(400, err)
else: else:
# symlink to the client-provided name, # symlink to the client-provided name,
@@ -1242,7 +1349,7 @@ class Up2k(object):
# TODO broker which avoid this race and # TODO broker which avoid this race and
# provides a new filename if taken (same as bup) # provides a new filename if taken (same as bup)
suffix = ".{:.6f}-{}".format(ts, ip) suffix = "-{:.6f}-{}".format(ts, ip.replace(":", "."))
with ren_open(fname, "wb", fdir=fdir, suffix=suffix) as f: with ren_open(fname, "wb", fdir=fdir, suffix=suffix) as f:
return f["orz"][1] return f["orz"][1]
@@ -1254,6 +1361,9 @@ class Up2k(object):
return return
try: try:
if self.args.no_symlink:
raise Exception("disabled in config")
lsrc = src lsrc = src
ldst = dst ldst = dst
fs1 = bos.stat(os.path.dirname(src)).st_dev fs1 = bos.stat(os.path.dirname(src)).st_dev
@@ -1334,20 +1444,57 @@ class Up2k(object):
# del self.registry[ptop][wark] # del self.registry[ptop][wark]
return ret, dst return ret, dst
atomic_move(src, dst) # windows cant rename open files
if not ANYWIN or src == dst:
if ANYWIN: self._finish_upload(ptop, wark)
a = [dst, job["size"], (int(time.time()), int(job["lmod"]))]
self.lastmod_q.put(a)
a = [job[x] for x in "ptop wark prel name lmod size addr".split()]
a += [job.get("at") or time.time()]
if self.idx_wark(*a):
del self.registry[ptop][wark]
# in-memory registry is reserved for unfinished uploads
return ret, dst return ret, dst
def finish_upload(self, ptop, wark):
with self.mutex:
self._finish_upload(ptop, wark)
def _finish_upload(self, ptop, wark):
try:
job = self.registry[ptop][wark]
pdir = os.path.join(job["ptop"], job["prel"])
src = os.path.join(pdir, job["tnam"])
dst = os.path.join(pdir, job["name"])
except Exception as ex:
return "finish_upload, wark, " + repr(ex)
# self.log("--- " + wark + " " + dst + " finish_upload atomic " + dst, 4)
atomic_move(src, dst)
if ANYWIN:
a = [dst, job["size"], (int(time.time()), int(job["lmod"]))]
self.lastmod_q.put(a)
a = [job[x] for x in "ptop wark prel name lmod size addr".split()]
a += [job.get("at") or time.time()]
if self.idx_wark(*a):
# self.log("pop " + wark + " " + dst + " finish_upload idx_wark", 4)
del self.registry[ptop][wark]
# in-memory registry is reserved for unfinished uploads
dupes = self.dupesched.pop(dst, [])
if not dupes:
return
cur = self.cur.get(ptop)
for rd, fn in dupes:
d2 = os.path.join(ptop, rd, fn)
if os.path.exists(d2):
continue
self._symlink(dst, d2)
if cur:
self.db_rm(cur, rd, fn)
self.db_add(cur, wark, rd, fn, *a[-4:])
if cur:
cur.connection.commit()
def idx_wark(self, ptop, wark, rd, fn, lmod, sz, ip, at): def idx_wark(self, ptop, wark, rd, fn, lmod, sz, ip, at):
cur = self.cur.get(ptop) cur = self.cur.get(ptop)
if not cur: if not cur:
@@ -1385,7 +1532,7 @@ class Up2k(object):
ok = {} ok = {}
ng = {} ng = {}
for vp in vpaths: for vp in vpaths:
a, b, c = self._handle_rm(uname, ip, vp, False) a, b, c = self._handle_rm(uname, ip, vp)
n_files += a n_files += a
for k in b: for k in b:
ok[k] = 1 ok[k] = 1
@@ -1398,10 +1545,11 @@ class Up2k(object):
return "deleted {} files (and {}/{} folders)".format(n_files, ok, ok + ng) return "deleted {} files (and {}/{} folders)".format(n_files, ok, ok + ng)
def _handle_rm(self, uname, ip, vpath, rm_topdir): def _handle_rm(self, uname, ip, vpath):
try: try:
permsets = [[True, False, False, True]] permsets = [[True, False, False, True]]
vn, rem = self.asrv.vfs.get(vpath, uname, *permsets[0]) vn, rem = self.asrv.vfs.get(vpath, uname, *permsets[0])
vn, rem = vn.get_dbv(rem)
unpost = False unpost = False
except: except:
# unpost with missing permissions? try read+write and verify with db # unpost with missing permissions? try read+write and verify with db
@@ -1411,6 +1559,7 @@ class Up2k(object):
unpost = True unpost = True
permsets = [[True, True]] permsets = [[True, True]]
vn, rem = self.asrv.vfs.get(vpath, uname, *permsets[0]) vn, rem = self.asrv.vfs.get(vpath, uname, *permsets[0])
vn, rem = vn.get_dbv(rem)
_, _, _, _, dip, dat = self._find_from_vpath(vn.realpath, rem) _, _, _, _, dip, dat = self._find_from_vpath(vn.realpath, rem)
m = "you cannot delete this: " m = "you cannot delete this: "
@@ -1467,7 +1616,7 @@ class Up2k(object):
bos.unlink(abspath) bos.unlink(abspath)
rm = rmdirs(self.log_func, scandir, True, atop, 1 if rm_topdir else 0) rm = rmdirs(self.log_func, scandir, True, atop, 1)
return n_files, rm[0], rm[1] return n_files, rm[0], rm[1]
def handle_mv(self, uname, svp, dvp): def handle_mv(self, uname, svp, dvp):
@@ -1546,6 +1695,9 @@ class Up2k(object):
# folders are too scary, schedule rescan of both vols # folders are too scary, schedule rescan of both vols
self.need_rescan[svn.vpath] = 1 self.need_rescan[svn.vpath] = 1
self.need_rescan[dvn.vpath] = 1 self.need_rescan[dvn.vpath] = 1
with self.rescan_cond:
self.rescan_cond.notify_all()
return "k" return "k"
c1, w, ftime, fsize, ip, at = self._find_from_vpath(svn.realpath, srem) c1, w, ftime, fsize, ip, at = self._find_from_vpath(svn.realpath, srem)
@@ -1623,7 +1775,7 @@ class Up2k(object):
wark = [ wark = [
x x
for x, y in reg.items() for x, y in reg.items()
if fn in [y["name"], y.get("tnam")] and y["prel"] == vrem if sfn in [y["name"], y.get("tnam")] and y["prel"] == vrem
] ]
if wark and wark in reg: if wark and wark in reg:
@@ -1706,7 +1858,13 @@ class Up2k(object):
except: except:
cj["lmod"] = int(time.time()) cj["lmod"] = int(time.time())
wark = up2k_wark_from_hashlist(self.salt, cj["size"], cj["hash"]) if cj["hash"]:
wark = up2k_wark_from_hashlist(self.salt, cj["size"], cj["hash"])
else:
wark = up2k_wark_from_metadata(
self.salt, cj["size"], cj["lmod"], cj["prel"], cj["name"]
)
return wark return wark
def _hashlist_from_file(self, path): def _hashlist_from_file(self, path):
@@ -1749,9 +1907,12 @@ class Up2k(object):
if self.args.nw: if self.args.nw:
job["tnam"] = tnam job["tnam"] = tnam
if not job["hash"]:
del self.registry[job["ptop"]][job["wark"]]
return return
suffix = ".{:.6f}-{}".format(job["t0"], job["addr"]) dip = job["addr"].replace(":", ".")
suffix = "-{:.6f}-{}".format(job["t0"], dip)
with ren_open(tnam, "wb", fdir=pdir, suffix=suffix) as f: with ren_open(tnam, "wb", fdir=pdir, suffix=suffix) as f:
f, job["tnam"] = f["orz"] f, job["tnam"] = f["orz"]
if ( if (
@@ -1765,8 +1926,12 @@ class Up2k(object):
except: except:
self.log("could not sparse [{}]".format(fp), 3) self.log("could not sparse [{}]".format(fp), 3)
f.seek(job["size"] - 1) if job["hash"]:
f.write(b"e") f.seek(job["size"] - 1)
f.write(b"e")
if not job["hash"]:
self._finish_upload(job["ptop"], job["wark"])
def _lastmodder(self): def _lastmodder(self):
while True: while True:
@@ -1864,11 +2029,16 @@ class Up2k(object):
# self.log("\n " + repr([ptop, rd, fn])) # self.log("\n " + repr([ptop, rd, fn]))
abspath = os.path.join(ptop, rd, fn) abspath = os.path.join(ptop, rd, fn)
tags = self.mtag.get(abspath) try:
ntags1 = len(tags) tags = self.mtag.get(abspath)
parsers = self._get_parsers(ptop, tags, abspath) ntags1 = len(tags)
if parsers: parsers = self._get_parsers(ptop, tags, abspath)
tags.update(self.mtag.get_bin(parsers, abspath)) if parsers:
tags.update(self.mtag.get_bin(parsers, abspath))
except Exception as ex:
msg = "failed to read tags from {}:\n{}"
self.log(msg.format(abspath, ex), c=3)
continue
with self.mutex: with self.mutex:
cur = self.cur[ptop] cur = self.cur[ptop]

View File

@@ -251,6 +251,55 @@ class _LUnrecv(object):
Unrecv = _Unrecv Unrecv = _Unrecv
class FHC(object):
class CE(object):
def __init__(self, fh):
self.ts = 0
self.fhs = [fh]
def __init__(self):
self.cache = {}
def close(self, path):
try:
ce = self.cache[path]
except:
return
for fh in ce.fhs:
fh.close()
del self.cache[path]
def clean(self):
if not self.cache:
return
keep = {}
now = time.time()
for path, ce in self.cache.items():
if now < ce.ts + 5:
keep[path] = ce
else:
for fh in ce.fhs:
fh.close()
self.cache = keep
def pop(self, path):
return self.cache[path].fhs.pop()
def put(self, path, fh):
try:
ce = self.cache[path]
ce.fhs.append(fh)
except:
ce = self.CE(fh)
self.cache[path] = ce
ce.ts = time.time()
class ProgressPrinter(threading.Thread): class ProgressPrinter(threading.Thread):
""" """
periodically print progress info without linefeeds periodically print progress info without linefeeds
@@ -375,7 +424,7 @@ def stackmon(fp, ival, suffix):
def start_log_thrs(logger, ival, nid): def start_log_thrs(logger, ival, nid):
ival = int(ival) ival = float(ival)
tname = lname = "log-thrs" tname = lname = "log-thrs"
if nid: if nid:
tname = "logthr-n{}-i{:x}".format(nid, os.getpid()) tname = "logthr-n{}-i{:x}".format(nid, os.getpid())
@@ -396,7 +445,7 @@ def log_thrs(log, ival, name):
tv = [x.name for x in threading.enumerate()] tv = [x.name for x in threading.enumerate()]
tv = [ tv = [
x.split("-")[0] x.split("-")[0]
if x.startswith("httpconn-") or x.startswith("thumb-") if x.split("-")[0] in ["httpconn", "thumb", "tagger"]
else "listen" else "listen"
if "-listen-" in x if "-listen-" in x
else x else x
@@ -410,6 +459,10 @@ def log_thrs(log, ival, name):
def vol_san(vols, txt): def vol_san(vols, txt):
for vol in vols: for vol in vols:
txt = txt.replace(vol.realpath.encode("utf-8"), vol.vpath.encode("utf-8")) txt = txt.replace(vol.realpath.encode("utf-8"), vol.vpath.encode("utf-8"))
txt = txt.replace(
vol.realpath.encode("utf-8").replace(b"\\", b"\\\\"),
vol.vpath.encode("utf-8"),
)
return txt return txt
@@ -425,11 +478,12 @@ def min_ex():
@contextlib.contextmanager @contextlib.contextmanager
def ren_open(fname, *args, **kwargs): def ren_open(fname, *args, **kwargs):
fun = kwargs.pop("fun", open)
fdir = kwargs.pop("fdir", None) fdir = kwargs.pop("fdir", None)
suffix = kwargs.pop("suffix", None) suffix = kwargs.pop("suffix", None)
if fname == os.devnull: if fname == os.devnull:
with open(fname, *args, **kwargs) as f: with fun(fname, *args, **kwargs) as f:
yield {"orz": [f, fname]} yield {"orz": [f, fname]}
return return
@@ -463,7 +517,7 @@ def ren_open(fname, *args, **kwargs):
fname += suffix fname += suffix
ext += suffix ext += suffix
with open(fsenc(fpath), *args, **kwargs) as f: with fun(fsenc(fpath), *args, **kwargs) as f:
if b64: if b64:
fp2 = "fn-trunc.{}.txt".format(b64) fp2 = "fn-trunc.{}.txt".format(b64)
fp2 = os.path.join(fdir, fp2) fp2 = os.path.join(fdir, fp2)
@@ -508,8 +562,8 @@ class MultipartParser(object):
self.log = log_func self.log = log_func
self.headers = http_headers self.headers = http_headers
self.re_ctype = re.compile(r"^content-type: *([^;]+)", re.IGNORECASE) self.re_ctype = re.compile(r"^content-type: *([^; ]+)", re.IGNORECASE)
self.re_cdisp = re.compile(r"^content-disposition: *([^;]+)", re.IGNORECASE) self.re_cdisp = re.compile(r"^content-disposition: *([^; ]+)", re.IGNORECASE)
self.re_cdisp_field = re.compile( self.re_cdisp_field = re.compile(
r'^content-disposition:(?: *|.*; *)name="([^"]+)"', re.IGNORECASE r'^content-disposition:(?: *|.*; *)name="([^"]+)"', re.IGNORECASE
) )
@@ -708,7 +762,7 @@ class MultipartParser(object):
def get_boundary(headers): def get_boundary(headers):
# boundaries contain a-z A-Z 0-9 ' ( ) + _ , - . / : = ? # boundaries contain a-z A-Z 0-9 ' ( ) + _ , - . / : = ?
# (whitespace allowed except as the last char) # (whitespace allowed except as the last char)
ptn = r"^multipart/form-data; *(.*; *)?boundary=([^;]+)" ptn = r"^multipart/form-data *; *(.*; *)?boundary=([^;]+)"
ct = headers["content-type"] ct = headers["content-type"]
m = re.match(ptn, ct, re.IGNORECASE) m = re.match(ptn, ct, re.IGNORECASE)
if not m: if not m:
@@ -1137,6 +1191,9 @@ def sendfile_kern(lower, upper, f, s):
def statdir(logger, scandir, lstat, top): def statdir(logger, scandir, lstat, top):
if lstat and ANYWIN:
lstat = False
if lstat and not os.supports_follow_symlinks: if lstat and not os.supports_follow_symlinks:
scandir = False scandir = False
@@ -1167,6 +1224,7 @@ def statdir(logger, scandir, lstat, top):
def rmdirs(logger, scandir, lstat, top, depth): def rmdirs(logger, scandir, lstat, top, depth):
if not os.path.exists(fsenc(top)) or not os.path.isdir(fsenc(top)): if not os.path.exists(fsenc(top)) or not os.path.isdir(fsenc(top)):
top = os.path.dirname(top) top = os.path.dirname(top)
depth -= 1
dirs = statdir(logger, scandir, lstat, top) dirs = statdir(logger, scandir, lstat, top)
dirs = [x[0] for x in dirs if stat.S_ISDIR(x[1].st_mode)] dirs = [x[0] for x in dirs if stat.S_ISDIR(x[1].st_mode)]

View File

@@ -16,7 +16,6 @@ html,body,tr,th,td,#files,a {
} }
html { html {
color: #ccc; color: #ccc;
background: #333;
font-family: sans-serif; font-family: sans-serif;
text-shadow: 1px 1px 0px #000; text-shadow: 1px 1px 0px #000;
} }
@@ -36,11 +35,9 @@ pre, code, tt {
text-shadow: 1px 1px 0 #000; text-shadow: 1px 1px 0 #000;
font-variant: small-caps; font-variant: small-caps;
font-weight: normal; font-weight: normal;
background: #4c4c4c;
display: inline-block; display: inline-block;
padding: .35em .5em .2em .5em; padding: .35em .5em .2em .5em;
border-radius: 0 .3em .3em 0; border-radius: 0 .3em .3em 0;
box-shadow: .1em .1em .4em #222;
margin: 1.3em 0 0 0; margin: 1.3em 0 0 0;
font-size: 1.4em; font-size: 1.4em;
} }
@@ -71,7 +68,7 @@ a, #files tbody div a:last-child {
} }
#files a:hover { #files a:hover {
color: #fff; color: #fff;
background: #161616; background: #111;
text-decoration: underline; text-decoration: underline;
} }
#files thead { #files thead {
@@ -82,38 +79,23 @@ a, #files tbody div a:last-child {
color: #999; color: #999;
font-weight: normal; font-weight: normal;
} }
#files tr:hover td { #files tbody tr:hover td {
background: #1c1c1c; background: #1c1c1c;
} }
#files thead th { #files thead th {
padding: .5em .3em .3em .3em; padding: 0 .3em .3em .3em;
border-right: 2px solid #3c3c3c; border-bottom: 1px solid #444;
border-bottom: 2px solid #444;
background: #333;
cursor: pointer; cursor: pointer;
} }
#files thead th+th {
border-left: 2px solid #2a2a2a;
}
#files thead th:last-child {
border-right: none;
}
#files tbody {
background: #222;
}
#files td { #files td {
margin: 0; margin: 0;
padding: 0 .5em; padding: .1em .5em;
border-bottom: 1px solid #111; border-left: 1px solid #3c3c3c;
border-left: 1px solid #2c2c2c;
} }
#files td+td+td { #files td+td+td {
max-width: 30em; max-width: 30em;
overflow: hidden; overflow: hidden;
} }
#files tr+tr td {
border-top: 1px solid #383838;
}
#files tbody td:nth-child(3) { #files tbody td:nth-child(3) {
font-family: 'scp', monospace, monospace; font-family: 'scp', monospace, monospace;
text-align: right; text-align: right;
@@ -121,18 +103,15 @@ a, #files tbody div a:last-child {
white-space: nowrap; white-space: nowrap;
} }
#files tbody td:first-child { #files tbody td:first-child {
padding-left: 1.5em;
color: #888; color: #888;
} text-align: center;
#files tbody tr:first-child td {
padding-top: .9em;
} }
#files tbody tr:last-child td { #files tbody tr:last-child td {
padding-bottom: 1.3em; border-bottom: 1px solid #444;
border-bottom: .5em solid #444;
} }
#files tbody tr td:last-child { #files tbody tr td:last-child {
white-space: nowrap; white-space: nowrap;
border-right: 1px solid #3c3c3c;
} }
#files thead th[style] { #files thead th[style] {
width: auto !important; width: auto !important;
@@ -163,8 +142,9 @@ a, #files tbody div a:last-child {
background: linear-gradient(90deg, rgba(0,0,0,0), rgba(0,0,0,0.2), rgba(0,0,0,0)); background: linear-gradient(90deg, rgba(0,0,0,0), rgba(0,0,0,0.2), rgba(0,0,0,0));
} }
.logue { .logue {
padding: .2em 1.5em; padding: .2em 0;
} }
.logue.hidden,
.logue:empty { .logue:empty {
display: none; display: none;
} }
@@ -174,6 +154,21 @@ a, #files tbody div a:last-child {
#epi.logue { #epi.logue {
margin: .8em 0; margin: .8em 0;
} }
#epi.logue.mdo:before {
content: 'README.md';
text-align: center;
display: block;
margin-top: -1.5em;
}
#epi.logue.mdo {
border-top: 1px solid #555;
margin-top: 2.5em;
}
.mdo>h1:first-child,
.mdo>h2:first-child,
.mdo>h3:first-child {
margin-top: 1.5rem;
}
.mdo { .mdo {
max-width: 52em; max-width: 52em;
} }
@@ -183,7 +178,6 @@ a, #files tbody div a:last-child {
} }
#srv_info { #srv_info {
color: #a73; color: #a73;
background: #333;
position: absolute; position: absolute;
font-size: .8em; font-size: .8em;
top: .5em; top: .5em;
@@ -285,43 +279,6 @@ html.light #ggrid>a.sel {
#files tr:focus td:first-child { #files tr:focus td:first-child {
box-shadow: -.2em .2em 0 #fc0, -.2em -.2em 0 #fc0; box-shadow: -.2em .2em 0 #fc0, -.2em -.2em 0 #fc0;
} }
#files tr:focus+tr td {
border-top: 1px solid transparent;
}
#blocked {
position: fixed;
top: 0;
left: 0;
width: 100%;
height: 100%;
background: #333;
font-size: 2.5em;
z-index: 99;
}
#blk_play,
#blk_abrt {
position: fixed;
display: table;
width: 80%;
}
#blk_play {
height: 60%;
left: 10%;
top: 5%;
}
#blk_abrt {
height: 25%;
left: 10%;
bottom: 5%;
}
#blk_play a,
#blk_abrt a {
display: table-cell;
vertical-align: middle;
text-align: center;
background: #444;
border-radius: 2em;
}
#widget { #widget {
position: fixed; position: fixed;
font-size: 1.4em; font-size: 1.4em;
@@ -343,7 +300,6 @@ html.light #ggrid>a.sel {
z-index: 10; z-index: 10;
width: 100%; width: 100%;
height: 100%; height: 100%;
background: #3c3c3c;
} }
#wtgrid, #wtgrid,
#wtico { #wtico {
@@ -384,7 +340,6 @@ html.light #ggrid>a.sel {
line-height: 1em; line-height: 1em;
text-align: center; text-align: center;
text-shadow: none; text-shadow: none;
background: #3c3c3c;
box-shadow: 0 0 .5em #222; box-shadow: 0 0 .5em #222;
border-radius: .3em 0 0 0; border-radius: .3em 0 0 0;
padding: 0 0 0 .1em; padding: 0 0 0 .1em;
@@ -396,7 +351,7 @@ html.light #ggrid>a.sel {
#wzip, #wnp { #wzip, #wnp {
margin-right: .2em; margin-right: .2em;
padding-right: .2em; padding-right: .2em;
border: 1px solid #555; border: 1px solid #444;
border-width: 0 .1em 0 0; border-width: 0 .1em 0 0;
} }
#wfm.act+#wzip, #wfm.act+#wzip,
@@ -552,36 +507,33 @@ html.light #wfm a:not(.en) {
box-shadow: 0 -.15em .2em #000 inset; box-shadow: 0 -.15em .2em #000 inset;
padding-bottom: .3em; padding-bottom: .3em;
} }
#ops, #ops a svg {
.opbox, width: 1.75em;
#u2etas { height: 1.75em;
border: 1px solid #3a3a3a; margin: -.5em -.3em;
box-shadow: 0 0 1em #222 inset;
} }
#ops { #ops {
background: #333;
margin: 1.7em 1.5em 0 1.5em; margin: 1.7em 1.5em 0 1.5em;
padding: .3em .6em; padding: .3em .6em;
border-radius: .3em; border-radius: .3em;
border-width: .15em 0; border-width: 1px 0;
white-space: nowrap; white-space: nowrap;
} }
.opbox { .opbox {
background: #2d2d2d;
margin: 1.5em 0 0 0; margin: 1.5em 0 0 0;
padding: .5em; padding: .5em;
border-radius: 0 1em 1em 0; border-radius: 0 .3em .3em 0;
border-width: .15em .3em .3em 0; border-width: 1px 1px 1px 0;
max-width: 41em; max-width: 41em;
max-width: min(41em, calc(100% - 2.6em));
} }
.opbox input { .opbox input {
margin: .5em; margin: .5em;
} }
.opview input[type=text] { .opview input[type=text] {
background: #383838;
color: #fff; color: #fff;
border: none; border: none;
box-shadow: 0 0 .3em #222; box-shadow: 0 0 .3em #181818;
border-bottom: 1px solid #fc5; border-bottom: 1px solid #fc5;
border-radius: .2em; border-radius: .2em;
padding: .2em .3em; padding: .2em .3em;
@@ -598,14 +550,12 @@ html.light .opview input[type="text"].err {
input[type="checkbox"]+label { input[type="checkbox"]+label {
color: #f5a; color: #f5a;
} }
input[type="radio"]:checked+label,
input[type="checkbox"]:checked+label { input[type="checkbox"]:checked+label {
color: #fc5; color: #fc5;
} }
input[type="radio"]:checked+label { .opview input.i {
color: #fc0; width: calc(100% - 16.2em);
}
html.light input[type="radio"]:checked+label {
color: #07c;
} }
input.eq_gain { input.eq_gain {
width: 3em; width: 3em;
@@ -628,15 +578,13 @@ input.eq_gain {
margin-top: .5em; margin-top: .5em;
padding: 1.3em .3em; padding: 1.3em .3em;
} }
#ico1 {
cursor: pointer;
}
#srch_form { #srch_form {
border: 1px solid #3a3a3a;
box-shadow: 0 0 1em #222 inset;
background: #2d2d2d;
border-radius: .4em;
margin: 1.4em;
margin-bottom: 0; margin-bottom: 0;
padding: 0 .5em .5em 0; padding: 0 .5em .5em 0;
} }
@@ -693,8 +641,8 @@ input.eq_gain {
width: 100%; width: 100%;
} }
#wrap { #wrap {
margin-top: 2em; margin: 1.8em 1.5em 0 1.5em;
min-height: 90vh; min-height: 70vh;
padding-bottom: 5em; padding-bottom: 5em;
} }
#tree { #tree {
@@ -703,25 +651,28 @@ input.eq_gain {
left: 0; left: 0;
bottom: 0; bottom: 0;
top: 7em; top: 7em;
width: var(--nav-sz);
overflow-x: hidden; overflow-x: hidden;
overflow-y: auto; overflow-y: auto;
-ms-scroll-chaining: none; -ms-scroll-chaining: none;
overscroll-behavior-y: none; overscroll-behavior-y: none;
scrollbar-color: #eb0 #333; scrollbar-color: #eb0 #333;
border: 1px solid #333;
box-shadow: 0 0 1em #181818;
} }
#treeh { #treeh {
background: #333;
position: sticky; position: sticky;
z-index: 1; z-index: 1;
top: 0; top: 0;
height: 2.2em; height: 2.2em;
line-height: 2.2em; line-height: 2.2em;
border-bottom: 1px solid #555; border-bottom: 1px solid #111;
overflow: hidden; overflow: hidden;
} }
#thx_ff { #tree, #treeh {
padding: 5em 0; border-radius: 0 .3em 0 0;
}
.np_open #thx_ff {
padding: 4.5em 0;
/* widget */ /* widget */
} }
#tree::-webkit-scrollbar-track, #tree::-webkit-scrollbar-track,
@@ -742,8 +693,6 @@ input.eq_gain {
.btn { .btn {
padding: .2em .4em; padding: .2em .4em;
font-size: 1.2em; font-size: 1.2em;
background: #2a2a2a;
box-shadow: 0 .1em .2em #222 inset;
border-radius: .3em; border-radius: .3em;
margin: .2em; margin: .2em;
white-space: pre; white-space: pre;
@@ -772,13 +721,13 @@ input.eq_gain {
margin: 0; margin: 0;
} }
#tree ul { #tree ul {
border-left: .2em solid #555; border-left: .2em solid #444;
} }
#tree li { #tree li {
margin-left: 1em; margin-left: 1em;
list-style: none; list-style: none;
border-top: 1px solid #4c4c4c; border-top: 1px solid #444;
border-bottom: 1px solid #222; border-bottom: 1px solid #111;
} }
#tree li:last-child { #tree li:last-child {
border-bottom: none; border-bottom: none;
@@ -801,7 +750,7 @@ input.eq_gain {
white-space: nowrap; white-space: nowrap;
} }
#tree.nowrap #treeul a+a:hover { #tree.nowrap #treeul a+a:hover {
background: rgba(34, 34, 34, 0.67); background: rgba(16, 16, 16, 0.67);
min-width: calc(var(--nav-sz) - 2em); min-width: calc(var(--nav-sz) - 2em);
width: auto; width: auto;
} }
@@ -810,7 +759,7 @@ html.light #tree.nowrap #treeul a+a:hover {
color: #000; color: #000;
} }
#treeul a+a:hover { #treeul a+a:hover {
background: #222; background: #181818;
color: #fff; color: #fff;
} }
#treeul a:first-child { #treeul a:first-child {
@@ -849,30 +798,31 @@ html.light #tree.nowrap #treeul a+a:hover {
#files td:nth-child(2n) { #files td:nth-child(2n) {
color: #f5a; color: #f5a;
} }
#files tr.play td, #files tbody tr.play td,
#files tr.play div a { #files tbody tr.play div a {
background: #fc4; background: #fc4;
border-color: transparent; border-color: transparent;
color: #400; color: #400;
text-shadow: none; text-shadow: none;
} }
#files tr.play a { #files tbody tr.play a {
color: inherit; color: inherit;
} }
#files tr.play a:hover { #files tbody tr.play a:hover {
color: #300; color: #300;
background: #fea; background: #fea;
} }
.opwide, .opwide,
#op_unpost { #op_unpost,
#srch_form {
max-width: none; max-width: none;
margin-right: 1.5em; margin-right: 1.5em;
} }
.opwide>div { .opwide>div {
display: inline-block; display: inline-block;
vertical-align: top; vertical-align: top;
border-left: .2em solid #4c4c4c; border-left: .4em solid #4c4c4c;
margin-left: .5em; margin: .7em 0 .7em .5em;
padding-left: .5em; padding-left: .5em;
} }
.opwide>div.fill { .opwide>div.fill {
@@ -881,6 +831,10 @@ html.light #tree.nowrap #treeul a+a:hover {
.opwide>div>div>a { .opwide>div>div>a {
line-height: 2em; line-height: 2em;
} }
.opwide>div>h3 {
margin: 0 .4em;
padding: 0;
}
#op_cfg>div>div>span { #op_cfg>div>div>span {
display: inline-block; display: inline-block;
padding: .2em .4em; padding: .2em .4em;
@@ -904,12 +858,10 @@ html.light #tree.nowrap #treeul a+a:hover {
display: none; display: none;
} }
#ghead { #ghead {
background: #3c3c3c;
border: 1px solid #444;
border-radius: .3em; border-radius: .3em;
padding: .2em .5em; padding: .2em .5em;
line-height: 2.3em; line-height: 2.3em;
margin: 0 1.5em 1em .4em; margin-bottom: 1em;
position: sticky; position: sticky;
top: -.3em; top: -.3em;
z-index: 1; z-index: 1;
@@ -928,6 +880,7 @@ html.light #ghead {
} }
#ggrid { #ggrid {
padding-top: .5em; padding-top: .5em;
margin: 0 -.5em;
} }
#ggrid>a>span { #ggrid>a>span {
overflow: hidden; overflow: hidden;
@@ -943,17 +896,10 @@ html.light #ghead {
width: var(--grid-sz); width: var(--grid-sz);
vertical-align: top; vertical-align: top;
overflow-wrap: break-word; overflow-wrap: break-word;
background: #383838;
border: 1px solid #444;
border-top: 1px solid #555;
box-shadow: 0 .1em .2em #222;
border-radius: .3em; border-radius: .3em;
padding: .3em; padding: .3em;
margin: .5em; margin: .5em;
} }
#ggrid>a[tt] {
background: linear-gradient(135deg, #383838 95%, #555 95%);
}
#ggrid>a img { #ggrid>a img {
border-radius: .2em; border-radius: .2em;
max-width: 10em; max-width: 10em;
@@ -976,25 +922,6 @@ html.light #ghead {
border-radius: .3em; border-radius: .3em;
font-size: 2em; font-size: 2em;
} }
#ggrid>a:hover {
background: #444;
border-color: #555;
color: #fd9;
}
html.light #ggrid>a {
background: #f7f7f7;
border-color: #ddd;
box-shadow: 0 .1em .2em #ddd;
}
html.light #ggrid>a[tt] {
background: linear-gradient(135deg, #f7f7f7 95%, #ccc 95%);
}
html.light #ggrid>a:hover {
background: #fff;
border-color: #ccc;
color: #015;
box-shadow: 0 .1em .5em #aaa;
}
#op_unpost { #op_unpost {
padding: 1em; padding: 1em;
} }
@@ -1015,7 +942,6 @@ html.light #ggrid>a:hover {
max-height: calc(100% - 2em); max-height: calc(100% - 2em);
border-bottom: .5em solid #999; border-bottom: .5em solid #999;
box-shadow: 0 0 5em rgba(0,0,0,0.8); box-shadow: 0 0 5em rgba(0,0,0,0.8);
background: #333;
padding: 1em; padding: 1em;
z-index: 765; z-index: 765;
} }
@@ -1072,7 +998,8 @@ a.btn,
#rui label, #rui label,
#modal-ok, #modal-ok,
#modal-ng, #modal-ng,
#ops { #ops,
#ico1 {
-webkit-user-select: none; -webkit-user-select: none;
-moz-user-select: none; -moz-user-select: none;
-ms-user-select: none; -ms-user-select: none;
@@ -1098,6 +1025,77 @@ a.btn,
html,
#rui,
#files td,
#files thead th,
#bbox-halp,
#u2notbtn,
#srv_info {
background: #222;
}
#ops,
.opbox,
#path,
#srch_form,
#ghead {
background: #2b2b2b;
border: 1px solid #333;
box-shadow: 0 0 .3em #111;
}
#files tr:nth-child(2n+1) td {
background: #282828;
}
#tree,
#treeh {
background: #2b2b2b;
}
#wtoggle,
#widgeti {
background: #333;
}
.btn,
.opview input[type=text] {
background: #383838;
}
#ggrid>a {
background: #2c2c2c;
border: 1px solid #383838;
border-top: 1px solid #444;
box-shadow: 0 .1em .2em #181818;
}
#ggrid>a[tt] {
background: linear-gradient(135deg, #2c2c2c 95%, #444 95%);
}
#ggrid>a:hover {
background: #383838;
border-color: #555;
color: #fd9;
}
html.light #ggrid>a {
background: #f7f7f7;
border-color: #ddd;
box-shadow: 0 .1em .2em #ddd;
}
html.light #ggrid>a[tt] {
background: linear-gradient(135deg, #f7f7f7 95%, #ccc 95%);
}
html.light #ggrid>a:hover {
background: #fff;
border-color: #ccc;
color: #015;
box-shadow: 0 .1em .5em #aaa;
}
@@ -1105,15 +1103,17 @@ a.btn,
html.light { html.light {
color: #333; color: #333;
background: #eee; background: #eaeaea;
text-shadow: none; text-shadow: none;
} }
html.light #ops, html.light #ops,
html.light .opbox, html.light .opbox,
html.light #path,
html.light #srch_form, html.light #srch_form,
html.light #ghead,
html.light #u2etas { html.light #u2etas {
background: #f7f7f7; background: #f7f7f7;
box-shadow: 0 0 .3em #ddd; box-shadow: 0 0 .3em #ccc;
border-color: #f7f7f7; border-color: #f7f7f7;
} }
html.light #ops a.act { html.light #ops a.act {
@@ -1122,6 +1122,9 @@ html.light #ops a.act {
border-color: #07a; border-color: #07a;
padding-top: .4em; padding-top: .4em;
} }
html.light #ops svg circle {
stroke: black;
}
html.light #op_cfg h3 { html.light #op_cfg h3 {
border-color: #ccc; border-color: #ccc;
} }
@@ -1176,25 +1179,19 @@ html.light #ops a,
html.light #files tbody div a:last-child { html.light #files tbody div a:last-child {
color: #06a; color: #06a;
} }
html.light #files tbody { html.light #files thead th {
background: #eaeaea;
border-color: #ccc;
}
html.light #files tbody td {
background: #eee;
border-color: #ccc;
}
html.light #files tr:nth-child(2n+1) td {
background: #f7f7f7; background: #f7f7f7;
} }
html.light #files {
box-shadow: 0 0 .3em #ccc;
}
html.light #files thead th {
background: #eee;
border: 1px solid #ccc;
border-top: none;
}
html.light #files thead th+th {
border-left: 1px solid #f7f7f7;
}
html.light #files td {
border-color: #fff #fff #ddd #ddd;
}
html.light #files tbody tr:last-child td { html.light #files tbody tr:last-child td {
border-bottom: .2em solid #ccc; border-bottom: 1px solid #ccc;
} }
html.light #files tr:focus td { html.light #files tr:focus td {
background: #fff; background: #fff;
@@ -1232,14 +1229,6 @@ html.light tr.play a {
html.light #files th:hover .cfg { html.light #files th:hover .cfg {
background: #ccc; background: #ccc;
} }
html.light #blocked {
background: #eee;
}
html.light #blk_play a,
html.light #blk_abrt a {
background: #fff;
box-shadow: 0 .2em .4em #ddd;
}
html.light #widget a { html.light #widget a {
color: #06a; color: #06a;
} }
@@ -1276,6 +1265,10 @@ html.light #files tr.sel a.play.act {
html.light input[type="checkbox"] + label { html.light input[type="checkbox"] + label {
color: #333; color: #333;
} }
html.light input[type="radio"]:checked + label,
html.light input[type="checkbox"]:checked + label {
color: #07c;
}
html.light .opwide>div { html.light .opwide>div {
border-color: #ccc; border-color: #ccc;
} }
@@ -1311,20 +1304,24 @@ html.light #files a:hover,
html.light #files tr.sel a:hover { html.light #files tr.sel a:hover {
color: #000; color: #000;
background: #fff; background: #fff;
text-decoration: underline;
} }
html.light #treeh { html.light #treeh {
background: #eee; background: #f7f7f7;
border-color: #ddd; border-color: #ddd;
} }
html.light #tree { html.light #tree {
scrollbar-color: #a70 #ddd; border-color: #ddd;
box-shadow: 0 0 1em #ddd;
background: #f7f7f7;
scrollbar-color: #490 #ddd;
} }
html.light #tree::-webkit-scrollbar-track, html.light #tree::-webkit-scrollbar-track,
html.light #tree::-webkit-scrollbar { html.light #tree::-webkit-scrollbar {
background: #ddd; background: #ddd;
} }
#tree::-webkit-scrollbar-thumb { html.light #tree::-webkit-scrollbar-thumb {
background: #da0; background: #490;
} }
@@ -1403,7 +1400,7 @@ html.light #tree::-webkit-scrollbar {
box-shadow: 0 0 8px rgba(0, 0, 0, 0.6); box-shadow: 0 0 8px rgba(0, 0, 0, 0.6);
} }
.full-image video { .full-image video {
background: #333; background: #222;
} }
.full-image figcaption { .full-image figcaption {
display: block; display: block;
@@ -1499,7 +1496,6 @@ html.light #bbox-overlay figcaption a {
} }
#bbox-halp { #bbox-halp {
color: #fff; color: #fff;
background: #333;
position: absolute; position: absolute;
top: 0; top: 0;
left: 0; left: 0;
@@ -1603,7 +1599,7 @@ html.light #bbox-overlay figcaption a {
border-radius: .5em; border-radius: .5em;
border-width: 1vw; border-width: 1vw;
color: #fff; color: #fff;
transition: all 0.2s; transition: all 0.12s;
} }
#drops .dropdesc.hl.ok { #drops .dropdesc.hl.ok {
border-color: #fff; border-color: #fff;
@@ -1624,6 +1620,16 @@ html.light #bbox-overlay figcaption a {
vertical-align: middle; vertical-align: middle;
text-align: center; text-align: center;
} }
#drops .dropdesc>div>div {
position: absolute;
top: 40%;
top: calc(50% - .5em);
left: -.8em;
}
#drops .dropdesc>div>div+div {
left: auto;
right: -.8em;
}
#drops .dropzone { #drops .dropzone {
z-index: 80386; z-index: 80386;
height: 50%; height: 50%;
@@ -1697,14 +1703,20 @@ html.light #u2err.err {
cursor: pointer; cursor: pointer;
box-shadow: .4em .4em 0 #111; box-shadow: .4em .4em 0 #111;
} }
#u2conf.ww #u2btn {
line-height: 1em;
padding: .5em 0;
margin: -1.5em .5em -3em 0;
}
#op_up2k.srch #u2btn { #op_up2k.srch #u2btn {
background: linear-gradient(to bottom, #ca3 0%, #fd8 50%, #fc6 51%, #b92 100%); background: linear-gradient(to bottom, #ca3 0%, #fd8 50%, #fc6 51%, #b92 100%);
text-shadow: 1px 1px 1px #fc6; text-shadow: 1px 1px 1px #fc6;
color: #333; color: #333;
} }
#u2conf #u2btn { #u2conf #u2btn {
margin: -2.4em 0; padding: .6em 0;
padding: .8em 0; margin: -2em 0;
font-size: 1.25em;
width: 100%; width: 100%;
max-width: 12em; max-width: 12em;
display: inline-block; display: inline-block;
@@ -1715,7 +1727,6 @@ html.light #u2err.err {
#u2notbtn { #u2notbtn {
display: none; display: none;
text-align: center; text-align: center;
background: #333;
padding-top: 1em; padding-top: 1em;
} }
#u2notbtn * { #u2notbtn * {
@@ -1748,10 +1759,12 @@ html.light #u2err.err {
width: auto; width: auto;
} }
#u2tab tbody tr:hover td { #u2tab tbody tr:hover td {
background: #222; background: #333;
} }
#u2etas { #u2etas {
background: #333; background: #1c1c1c;
border: 1px solid #282828;
border-width: .1em 0;
padding: .2em .5em; padding: .2em .5em;
border-radius: .5em; border-radius: .5em;
border-width: .25em 0; border-width: .25em 0;
@@ -1768,6 +1781,7 @@ html.light #u2err.err {
display: none; display: none;
} }
#u2etas.o .o { #u2etas.o .o {
display: inherit;
display: unset; display: unset;
} }
#u2etaw { #u2etaw {
@@ -1790,16 +1804,22 @@ html.light #u2err.err {
width: 44em; width: 44em;
text-align: left; text-align: left;
} }
#u2cards.ww {
display: inline-block;
}
#u2etaw.w { #u2etaw.w {
width: 52em; width: 52em;
text-align: right; text-align: right;
margin: 3em auto -2.7em auto; margin: 3em auto -2.7em auto;
} }
#u2etaw.ww {
margin: 0 2em 1em 2em;
}
#u2cards a { #u2cards a {
padding: .2em 1em; padding: .2em 1em;
border: 1px solid #777; border: 1px solid #777;
border-width: 0 0 1px 0; border-width: 0 0 1px 0;
background: linear-gradient(to bottom, #333, #222); background: linear-gradient(to bottom, #222, #2b2b2b);
} }
#u2cards a:first-child { #u2cards a:first-child {
border-radius: .4em 0 0 0; border-radius: .4em 0 0 0;
@@ -1812,23 +1832,35 @@ html.light #u2err.err {
border-width: 1px 1px .1em 1px; border-width: 1px 1px .1em 1px;
border-radius: .3em .3em 0 0; border-radius: .3em .3em 0 0;
margin-left: -1px; margin-left: -1px;
background: linear-gradient(to bottom, #464, #333 80%); background: linear-gradient(to bottom, #353, #222 80%);
box-shadow: 0 -.17em .67em #280; box-shadow: 0 -.17em .67em #280;
border-color: #7c5 #583 #333 #583; border-color: #7c5 #583 #222 #583;
position: relative; position: relative;
color: #fd7; color: #fd7;
} }
#u2cards span { #u2cards span {
color: #fff; color: #fff;
} }
#u2cards > a:nth-child(4) > span {
display: inline-block;
text-align: center;
min-width: 1.3em;
}
#u2conf { #u2conf {
margin: 1em auto; margin: 1em auto;
width: 30em; width: 30em;
} }
#u2conf.has_btn { #u2conf.w {
width: 48em; width: 48em;
} }
#u2conf * { #u2conf.ww {
width: 74em;
}
#u2conf.ww #u2c3w {
width: 29em;
}
#u2conf .c,
#u2conf .c * {
text-align: center; text-align: center;
line-height: 1em; line-height: 1em;
margin: 0; margin: 0;
@@ -1848,7 +1880,7 @@ html.light #u2err.err {
#u2conf .txtbox.err { #u2conf .txtbox.err {
background: #922; background: #922;
} }
#u2conf a { #u2conf a.b {
color: #fff; color: #fff;
background: #c38; background: #c38;
text-decoration: none; text-decoration: none;
@@ -1862,10 +1894,10 @@ html.light #u2err.err {
position: relative; position: relative;
bottom: -0.08em; bottom: -0.08em;
} }
#u2conf input+a { #u2conf input+a.b {
background: #d80; background: #d80;
} }
#u2conf label { #u2conf .c label {
font-size: 1.6em; font-size: 1.6em;
width: 2em; width: 2em;
height: 1em; height: 1em;
@@ -1965,7 +1997,8 @@ html.light #u2foot .warn span {
background: #900; background: #900;
border-color: #d06; border-color: #d06;
} }
#u2tab a>span { #u2tab a>span,
#unpost a>span {
font-weight: bold; font-weight: bold;
font-style: italic; font-style: italic;
color: #fff; color: #fff;

View File

@@ -18,9 +18,9 @@
<div id="op_search" class="opview"> <div id="op_search" class="opview">
{%- if have_tags_idx %} {%- if have_tags_idx %}
<div id="srch_form" class="tags"></div> <div id="srch_form" class="tags opbox"></div>
{%- else %} {%- else %}
<div id="srch_form"></div> <div id="srch_form" class="opbox"></div>
{%- endif %} {%- endif %}
<div id="srch_q"></div> <div id="srch_q"></div>
</div> </div>
@@ -31,7 +31,7 @@
<div id="u2err"></div> <div id="u2err"></div>
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}"> <form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
<input type="hidden" name="act" value="bput" /> <input type="hidden" name="act" value="bput" />
<input type="file" name="f" multiple><br /> <input type="file" name="f" multiple /><br />
<input type="submit" value="start upload"> <input type="submit" value="start upload">
</form> </form>
</div> </div>
@@ -39,7 +39,7 @@
<div id="op_mkdir" class="opview opbox act"> <div id="op_mkdir" class="opview opbox act">
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}"> <form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
<input type="hidden" name="act" value="mkdir" /> <input type="hidden" name="act" value="mkdir" />
📂<input type="text" name="name" size="30"> 📂<input type="text" name="name" class="i">
<input type="submit" value="make directory"> <input type="submit" value="make directory">
</form> </form>
</div> </div>
@@ -47,15 +47,15 @@
<div id="op_new_md" class="opview opbox"> <div id="op_new_md" class="opview opbox">
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}"> <form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
<input type="hidden" name="act" value="new_md" /> <input type="hidden" name="act" value="new_md" />
📝<input type="text" name="name" size="30"> 📝<input type="text" name="name" class="i">
<input type="submit" value="new markdown doc"> <input type="submit" value="new markdown doc">
</form> </form>
</div> </div>
<div id="op_msg" class="opview opbox act"> <div id="op_msg" class="opview opbox act">
<form method="post" enctype="application/x-www-form-urlencoded" accept-charset="utf-8" action="{{ url_suf }}"> <form method="post" enctype="application/x-www-form-urlencoded" accept-charset="utf-8" action="{{ url_suf }}">
📟<input type="text" name="msg" size="30"> 📟<input type="text" name="msg" class="i">
<input type="submit" value="send msg to server log"> <input type="submit" value="send msg to srv log">
</form> </form>
</div> </div>
@@ -135,10 +135,15 @@
have_unpost = {{ have_unpost|tojson }}, have_unpost = {{ have_unpost|tojson }},
have_zip = {{ have_zip|tojson }}, have_zip = {{ have_zip|tojson }},
readme = {{ readme|tojson }}; readme = {{ readme|tojson }};
document.documentElement.setAttribute("class", localStorage.lightmode == 1 ? "light" : "dark");
</script> </script>
<script src="/.cpr/util.js?_={{ ts }}"></script> <script src="/.cpr/util.js?_={{ ts }}"></script>
<script src="/.cpr/browser.js?_={{ ts }}"></script> <script src="/.cpr/browser.js?_={{ ts }}"></script>
<script src="/.cpr/up2k.js?_={{ ts }}"></script> <script src="/.cpr/up2k.js?_={{ ts }}"></script>
{%- if js %}
<script src="{{ js }}?_={{ ts }}"></script>
{%- endif %}
</body> </body>
</html> </html>

View File

@@ -61,32 +61,29 @@ ebi('op_up2k').innerHTML = (
'<table id="u2conf">\n' + '<table id="u2conf">\n' +
' <tr>\n' + ' <tr>\n' +
' <td><br />parallel uploads:</td>\n' + ' <td class="c"><br />parallel uploads:</td>\n' +
' <td rowspan="2">\n' + ' <td class="c" rowspan="2">\n' +
' <input type="checkbox" id="multitask" />\n' + ' <input type="checkbox" id="multitask" />\n' +
' <label for="multitask" tt="continue hashing other files while uploading">🏃</label>\n' + ' <label for="multitask" tt="continue hashing other files while uploading">🏃</label>\n' +
' </td>\n' + ' </td>\n' +
' <td rowspan="2">\n' + ' <td class="c" rowspan="2">\n' +
' <input type="checkbox" id="ask_up" />\n' + ' <input type="checkbox" id="ask_up" />\n' +
' <label for="ask_up" tt="ask for confirmation before upload starts">💭</label>\n' + ' <label for="ask_up" tt="ask for confirmation before upload starts">💭</label>\n' +
' </td>\n' + ' </td>\n' +
' <td rowspan="2">\n' +
' <input type="checkbox" id="flag_en" />\n' +
' <label for="flag_en" tt="ensure only one tab is uploading at a time $N (other tabs must have this enabled too)">💤</label>\n' +
' </td>\n' +
(have_up2k_idx ? ( (have_up2k_idx ? (
' <td data-perm="read" rowspan="2">\n' + ' <td class="c" data-perm="read" rowspan="2">\n' +
' <input type="checkbox" id="fsearch" />\n' + ' <input type="checkbox" id="fsearch" />\n' +
' <label for="fsearch" tt="don\'t actually upload, instead check if the files already $N exist on the server (will scan all folders you can read)">🔎</label>\n' + ' <label for="fsearch" tt="don\'t actually upload, instead check if the files already $N exist on the server (will scan all folders you can read)">🔎</label>\n' +
' </td>\n' ' </td>\n'
) : '') + ) : '') +
' <td data-perm="read" rowspan="2" id="u2btn_cw"></td>\n' + ' <td data-perm="read" rowspan="2" id="u2btn_cw"></td>\n' +
' <td data-perm="read" rowspan="2" id="u2c3w"></td>\n' +
' </tr>\n' + ' </tr>\n' +
' <tr>\n' + ' <tr>\n' +
' <td>\n' + ' <td class="c">\n' +
' <a href="#" id="nthread_sub">&ndash;</a><input\n' + ' <a href="#" class="b" id="nthread_sub">&ndash;</a><input\n' +
' class="txtbox" id="nthread" value="2" tt="pause uploads by setting it to 0"/><a\n' + ' class="txtbox" id="nthread" value="2" tt="pause uploads by setting it to 0"/><a\n' +
' href="#" id="nthread_add">+</a><br />&nbsp;\n' + ' href="#" class="b" id="nthread_add">+</a><br />&nbsp;\n' +
' </td>\n' + ' </td>\n' +
' </tr>\n' + ' </tr>\n' +
'</table>\n' + '</table>\n' +
@@ -102,6 +99,8 @@ ebi('op_up2k').innerHTML = (
' </div>\n' + ' </div>\n' +
'</div>\n' + '</div>\n' +
'<div id="u2c3t">\n' +
'<div id="u2etaw"><div id="u2etas"><div class="o">\n' + '<div id="u2etaw"><div id="u2etas"><div class="o">\n' +
' hash: <span id="u2etah" tt="average &lt;em&gt;hashing&lt;/em&gt; speed, and estimated time until finish">(no uploads are queued yet)</span><br />\n' + ' hash: <span id="u2etah" tt="average &lt;em&gt;hashing&lt;/em&gt; speed, and estimated time until finish">(no uploads are queued yet)</span><br />\n' +
' send: <span id="u2etau" tt="average &lt;em&gt;upload&lt;/em&gt; speed and estimated time until finish">(no uploads are queued yet)</span><br />\n' + ' send: <span id="u2etau" tt="average &lt;em&gt;upload&lt;/em&gt; speed and estimated time until finish">(no uploads are queued yet)</span><br />\n' +
@@ -116,6 +115,8 @@ ebi('op_up2k').innerHTML = (
' href="#" act="q" tt="idle, pending">que <span>0</span></a>\n' + ' href="#" act="q" tt="idle, pending">que <span>0</span></a>\n' +
'</div>\n' + '</div>\n' +
'</div>\n' +
'<table id="u2tab">\n' + '<table id="u2tab">\n' +
' <thead>\n' + ' <thead>\n' +
' <tr>\n' + ' <tr>\n' +
@@ -137,8 +138,8 @@ ebi('op_up2k').innerHTML = (
var o = mknod('div'); var o = mknod('div');
o.innerHTML = ( o.innerHTML = (
'<div id="drops">\n' + '<div id="drops">\n' +
' <div class="dropdesc" id="up_zd"><div>🚀 Upload<br /><span></span></div></div>\n' + ' <div class="dropdesc" id="up_zd"><div>🚀 Upload<br /><span></span><div>🚀</div><div>🚀</div></div></div>\n' +
' <div class="dropdesc" id="srch_zd"><div>🔎 Search<br /><span></span></div></div>\n' + ' <div class="dropdesc" id="srch_zd"><div>🔎 Search<br /><span></span><div>🔎</div><div>🔎</div></div></div>\n' +
' <div class="dropzone" id="up_dz" v="up_zd"></div>\n' + ' <div class="dropzone" id="up_dz" v="up_zd"></div>\n' +
' <div class="dropzone" id="srch_dz" v="srch_zd"></div>\n' + ' <div class="dropzone" id="srch_dz" v="srch_zd"></div>\n' +
'</div>' '</div>'
@@ -168,6 +169,17 @@ ebi('op_cfg').innerHTML = (
' <div>\n' + ' <div>\n' +
' <a id="u2turbo" class="tgl btn ttb" href="#" tt="the yolo button, you probably DO NOT want to enable this:$N$Nuse this if you were uploading a huge amount of files and had to restart for some reason, and want to continue the upload ASAP$N$Nthis replaces the hash-check with a simple <em>&quot;does this have the same filesize on the server?&quot;</em> so if the file contents are different it will NOT be uploaded$N$Nyou should turn this off when the upload is done, and then &quot;upload&quot; the same files again to let the client verify them">turbo</a>\n' + ' <a id="u2turbo" class="tgl btn ttb" href="#" tt="the yolo button, you probably DO NOT want to enable this:$N$Nuse this if you were uploading a huge amount of files and had to restart for some reason, and want to continue the upload ASAP$N$Nthis replaces the hash-check with a simple <em>&quot;does this have the same filesize on the server?&quot;</em> so if the file contents are different it will NOT be uploaded$N$Nyou should turn this off when the upload is done, and then &quot;upload&quot; the same files again to let the client verify them">turbo</a>\n' +
' <a id="u2tdate" class="tgl btn ttb" href="#" tt="has no effect unless the turbo button is enabled$N$Nreduces the yolo factor by a tiny amount; checks whether the file timestamps on the server matches yours$N$Nshould <em>theoretically</em> catch most unfinished/corrupted uploads, but is not a substitute for doing a verification pass with turbo disabled afterwards">date-chk</a>\n' + ' <a id="u2tdate" class="tgl btn ttb" href="#" tt="has no effect unless the turbo button is enabled$N$Nreduces the yolo factor by a tiny amount; checks whether the file timestamps on the server matches yours$N$Nshould <em>theoretically</em> catch most unfinished/corrupted uploads, but is not a substitute for doing a verification pass with turbo disabled afterwards">date-chk</a>\n' +
' <a id="flag_en" class="tgl btn" href="#" tt="ensure only one tab is uploading at a time $N (other tabs must have this enabled too)">💤</a>\n' +
' </td>\n' +
' </div>\n' +
'</div>\n' +
'<div>\n' +
' <h3>favicon <span id="ico1">🎉</span></h3>\n' +
' <div>\n' +
' <input type="text" id="icot" style="width:1.3em" value="" tt="favicon text (blank and refresh to disable)" />' +
' <input type="text" id="icof" style="width:2em" value="" tt="foreground color" />' +
' <input type="text" id="icob" style="width:2em" value="" tt="background color" />' +
' </td>\n' +
' </div>\n' + ' </div>\n' +
'</div>\n' + '</div>\n' +
'<div><h3>key notation</h3><div id="key_notation"></div></div>\n' + '<div><h3>key notation</h3><div id="key_notation"></div></div>\n' +
@@ -256,19 +268,42 @@ function goto(dest) {
} }
var have_webp = null; var have_webp = sread('have_webp');
(function () { (function () {
if (have_webp !== null)
return;
var img = new Image(); var img = new Image();
img.onload = function () { img.onload = function () {
have_webp = img.width > 0 && img.height > 0; have_webp = img.width > 0 && img.height > 0;
swrite('have_webp', 'ya');
}; };
img.onerror = function () { img.onerror = function () {
have_webp = false; have_webp = false;
swrite('have_webp', '');
}; };
img.src = "data:image/webp;base64,UklGRhoAAABXRUJQVlA4TA0AAAAvAAAAEAcQERGIiP4HAA=="; img.src = "data:image/webp;base64,UklGRhoAAABXRUJQVlA4TA0AAAAvAAAAEAcQERGIiP4HAA==";
})(); })();
function set_files_html(html) {
var files = ebi('files');
try {
files.innerHTML = html;
return files;
}
catch (e) {
var par = files.parentNode;
par.removeChild(files);
files = mknod('div');
files.innerHTML = '<table id="files">' + html + '</table>';
par.insertBefore(files.childNodes[0], ebi('epi'));
files = ebi('files');
return files;
}
}
var mpl = (function () { var mpl = (function () {
var have_mctl = 'mediaSession' in navigator && window.MediaMetadata; var have_mctl = 'mediaSession' in navigator && window.MediaMetadata;
@@ -368,7 +403,7 @@ var mpl = (function () {
for (var a = 0, aa = files.length; a < aa; a++) { for (var a = 0, aa = files.length; a < aa; a++) {
if (/^(cover|folder)\.(jpe?g|png|gif)$/.test(files[a].textContent)) { if (/^(cover|folder)\.(jpe?g|png|gif)$/.test(files[a].textContent)) {
cover = files[a].getAttribute('href'); cover = noq_href(files[a]);
break; break;
} }
} }
@@ -427,7 +462,7 @@ function MPlayer() {
link = tds[1].getElementsByTagName('a'); link = tds[1].getElementsByTagName('a');
link = link[link.length - 1]; link = link[link.length - 1];
var url = link.getAttribute('href'), var url = noq_href(link),
m = re_audio.exec(url); m = re_audio.exec(url);
if (m) { if (m) {
@@ -586,6 +621,7 @@ var widget = (function () {
if (r.is_open) if (r.is_open)
return false; return false;
clmod(document.documentElement, 'np_open', 1);
widget.className = 'open'; widget.className = 'open';
r.is_open = true; r.is_open = true;
return true; return true;
@@ -594,6 +630,7 @@ var widget = (function () {
if (!r.is_open) if (!r.is_open)
return false; return false;
clmod(document.documentElement, 'np_open');
widget.className = ''; widget.className = '';
r.is_open = false; r.is_open = false;
return true; return true;
@@ -733,6 +770,12 @@ var pbar = (function () {
for (var p = 1, mins = adur / 60; p <= mins; p++) for (var p = 1, mins = adur / 60; p <= mins; p++)
pctx.fillRect(Math.floor(sm * p * 60), 0, 2, pc.h); pctx.fillRect(Math.floor(sm * p * 60), 0, 2, pc.h);
pctx.font = '.5em sans-serif';
pctx.fillStyle = light ? 'rgba(0,64,0,0.9)' : 'rgba(192,255,96,1)';
for (var p = 1, mins = adur / 60; p <= mins; p++) {
pctx.fillText(p, Math.floor(sm * p * 60 + 3), pc.h / 3);
}
pctx.fillStyle = light ? 'rgba(0,0,0,1)' : 'rgba(255,255,255,1)'; pctx.fillStyle = light ? 'rgba(0,0,0,1)' : 'rgba(255,255,255,1)';
for (var p = 1, mins = adur / 600; p <= mins; p++) for (var p = 1, mins = adur / 600; p <= mins; p++)
pctx.fillRect(Math.floor(sm * p * 600), 0, 2, pc.h); pctx.fillRect(Math.floor(sm * p * 600), 0, 2, pc.h);
@@ -1028,8 +1071,8 @@ var need_ogv = true;
try { try {
need_ogv = new Audio().canPlayType('audio/ogg; codecs=opus') !== 'probably'; need_ogv = new Audio().canPlayType('audio/ogg; codecs=opus') !== 'probably';
if (/ Edge\//.exec(navigator.userAgent + '')) if (document.documentMode)
need_ogv = true; need_ogv = false; // ie8-11
} }
catch (ex) { } catch (ex) { }
@@ -1349,7 +1392,7 @@ function play(tid, is_ev, seek, call_depth) {
mp.au = mp.au_ogvjs = new OGVPlayer(); mp.au = mp.au_ogvjs = new OGVPlayer();
} }
catch (ex) { catch (ex) {
return toast.err(30, 'your browser cannot play ogg/vorbis/opus\n\n' + ex + return toast.err(30, 'your browser cannot play ogg/vorbis/opus\n\n' + basenames(ex) +
'\n\n<a href="#" onclick="new OGVPlayer();">click here</a> for a full crash report'); '\n\n<a href="#" onclick="new OGVPlayer();">click here</a> for a full crash report');
} }
attempt_play = is_ev; attempt_play = is_ev;
@@ -1426,12 +1469,7 @@ function play(tid, is_ev, seek, call_depth) {
if (!seek) { if (!seek) {
var o = ebi(oid); var o = ebi(oid);
o.setAttribute('id', 'thx_js'); o.setAttribute('id', 'thx_js');
if (window.history && history.replaceState) { sethash(oid);
hist_replace(document.location.pathname + '#' + oid);
}
else {
document.location.hash = oid;
}
o.setAttribute('id', oid); o.setAttribute('id', oid);
} }
@@ -1441,7 +1479,7 @@ function play(tid, is_ev, seek, call_depth) {
return true; return true;
} }
catch (ex) { catch (ex) {
toast.err(0, esc('playback failed: ' + ex)); toast.err(0, esc('playback failed: ' + basenames(ex)));
} }
setclass(oid, 'play'); setclass(oid, 'play');
setTimeout(next_song, 500); setTimeout(next_song, 500);
@@ -1475,48 +1513,18 @@ function evau_error(e) {
err += '\n\nFile: «' + uricom_dec(eplaya.src.split('/').slice(-1)[0])[0] + '»'; err += '\n\nFile: «' + uricom_dec(eplaya.src.split('/').slice(-1)[0])[0] + '»';
toast.warn(15, esc(err + '')); toast.warn(15, esc(basenames(err)));
}
// show a fullscreen message
function show_modal(html) {
var body = document.body || document.getElementsByTagName('body')[0],
div = mknod('div');
div.setAttribute('id', 'blocked');
div.innerHTML = html;
unblocked();
body.appendChild(div);
}
// hide fullscreen message
function unblocked(e) {
ev(e);
var dom = ebi('blocked');
if (dom)
dom.parentNode.removeChild(dom);
} }
// show ui to manually start playback of a linked song // show ui to manually start playback of a linked song
function autoplay_blocked(seek) { function autoplay_blocked(seek) {
show_modal( var tid = mp.au.tid,
'<div id="blk_play"><a href="#" id="blk_go"></a></div>' +
'<div id="blk_abrt"><a href="#" id="blk_na">Cancel<br />(show file list)</a></div>');
var go = ebi('blk_go'),
na = ebi('blk_na'),
tid = mp.au.tid,
fn = mp.tracks[tid].split(/\//).pop(); fn = mp.tracks[tid].split(/\//).pop();
fn = uricom_dec(fn.replace(/\+/g, ' '))[0]; fn = uricom_dec(fn.replace(/\+/g, ' '))[0];
go.textContent = 'Play "' + fn + '"'; modal.confirm('<h6>play this audio file?</h6>\n«' + esc(fn) + '»', function () {
go.onclick = function (e) {
unblocked(e);
toast.hide();
if (mp.au !== mp.au_ogvjs) if (mp.au !== mp.au_ogvjs)
// chrome 91 may permanently taint on a failed play() // chrome 91 may permanently taint on a failed play()
// depending on win10 settings or something? idk // depending on win10 settings or something? idk
@@ -1529,14 +1537,16 @@ function autoplay_blocked(seek) {
play(tid, true, seek); play(tid, true, seek);
mp.fade_in(); mp.fade_in();
}; }, null);
na.onclick = unblocked;
} }
function play_linked() { function eval_hash() {
var v = location.hash; var v = location.hash;
if (v && v.indexOf('#af-') === 0) { if (!v)
return;
if (v.indexOf('#af-') === 0) {
var id = v.slice(2).split('&'); var id = v.slice(2).split('&');
if (id[0].length != 10) if (id[0].length != 10)
return; return;
@@ -1550,6 +1560,13 @@ function play_linked() {
return play(id[0], false, parseInt(m[1] || 0) * 60 + parseInt(m[2] || 0)); return play(id[0], false, parseInt(m[1] || 0) * 60 + parseInt(m[2] || 0));
} }
if (v.indexOf('#q=') === 0) {
goto('search');
var i = ebi('q_raw');
i.value = uricom_dec(v.slice(3))[0];
return i.oninput();
}
}; };
@@ -1561,6 +1578,9 @@ function play_linked() {
function sortfiles(nodes) { function sortfiles(nodes) {
if (!nodes.length)
return nodes;
var sopts = jread('fsort', [["href", 1, ""]]); var sopts = jread('fsort', [["href", 1, ""]]);
try { try {
@@ -2157,7 +2177,7 @@ var fileman = (function () {
links = QSA('#files tbody td:nth-child(2) a'); links = QSA('#files tbody td:nth-child(2) a');
for (var a = 0, aa = links.length; a < aa; a++) for (var a = 0, aa = links.length; a < aa; a++)
indir.push(vsplit(links[a].getAttribute('href'))[1]); indir.push(vsplit(noq_href(links[a]))[1]);
for (var a = 0; a < r.clip.length; a++) { for (var a = 0; a < r.clip.length; a++) {
var found = false; var found = false;
@@ -2297,15 +2317,6 @@ var thegrid = (function () {
for (var a = 0; a < links.length; a++) for (var a = 0; a < links.length; a++)
links[a].onclick = btnclick; links[a].onclick = btnclick;
bcfg_bind(r, 'thumbs', 'thumbs', true, r.setdirty);
bcfg_bind(r, 'sel', 'gridsel', false, r.loadsel);
bcfg_bind(r, 'en', 'griden', false, function (v) {
v ? loadgrid() : ungrid();
pbar.onresize();
vbar.onresize();
});
ebi('wtgrid').onclick = ebi('griden').onclick;
r.setvis = function (vis) { r.setvis = function (vis) {
(r.en ? gfiles : lfiles).style.display = vis ? '' : 'none'; (r.en ? gfiles : lfiles).style.display = vis ? '' : 'none';
}; };
@@ -2350,7 +2361,7 @@ var thegrid = (function () {
return true; return true;
var oth = ebi(this.getAttribute('ref')), var oth = ebi(this.getAttribute('ref')),
href = this.getAttribute('href'), href = noq_href(this),
aplay = ebi('a' + oth.getAttribute('id')), aplay = ebi('a' + oth.getAttribute('id')),
is_img = /\.(gif|jpe?g|png|webp|webm|mp4)(\?|$)/i.test(href), is_img = /\.(gif|jpe?g|png|webp|webm|mp4)(\?|$)/i.test(href),
in_tree = null, in_tree = null,
@@ -2358,21 +2369,12 @@ var thegrid = (function () {
td = oth.closest('td').nextSibling, td = oth.closest('td').nextSibling,
tr = td.parentNode; tr = td.parentNode;
if (/\/(\?|$)/.test(href)) { if (href.endsWith('/'))
var ta = QSA('#treeul a.hl+ul>li>a+a'), in_tree = treectl.find(oth.textContent.slice(0, -1));
txt = oth.textContent.slice(0, -1);
for (var a = 0, aa = ta.length; a < aa; a++) {
if (ta[a].textContent == txt) {
in_tree = ta[a];
break;
}
}
}
if (r.sel) { if (r.sel) {
td.click(); td.click();
this.setAttribute('class', tr.getAttribute('class')); clmod(this, 'sel', clgot(tr, 'sel'));
} }
else if (widget.is_open && aplay) else if (widget.is_open && aplay)
aplay.click(); aplay.click();
@@ -2397,7 +2399,7 @@ var thegrid = (function () {
var tr = ebi(ths[a].getAttribute('ref')).closest('tr'), var tr = ebi(ths[a].getAttribute('ref')).closest('tr'),
cl = tr.getAttribute('class') || ''; cl = tr.getAttribute('class') || '';
if (ths[a].getAttribute('href').endsWith('/')) if (noq_href(ths[a]).endsWith('/'))
cl += ' dir'; cl += ' dir';
ths[a].setAttribute('class', cl); ths[a].setAttribute('class', cl);
@@ -2461,15 +2463,16 @@ var thegrid = (function () {
var files = QSA('#files>tbody>tr>td:nth-child(2) a[id]'); var files = QSA('#files>tbody>tr>td:nth-child(2) a[id]');
for (var a = 0, aa = files.length; a < aa; a++) { for (var a = 0, aa = files.length; a < aa; a++) {
var ao = files[a], var ao = files[a],
href = esc(ao.getAttribute('href')), ohref = esc(ao.getAttribute('href')),
href = ohref.split('?')[0],
name = uricom_dec(vsplit(href)[1])[0], name = uricom_dec(vsplit(href)[1])[0],
ref = ao.getAttribute('id'), ref = ao.getAttribute('id'),
isdir = href.split('?')[0].slice(-1)[0] == '/', isdir = href.endsWith('/'),
ac = isdir ? ' class="dir"' : '', ac = isdir ? ' class="dir"' : '',
ihref = href; ihref = href;
if (r.thumbs) { if (r.thumbs) {
ihref += (ihref.indexOf('?') === -1 ? '?' : '&') + 'th=' + (have_webp ? 'w' : 'j'); ihref += '?th=' + (have_webp ? 'w' : 'j');
if (href == "#") if (href == "#")
ihref = '/.cpr/ico/⏏️'; ihref = '/.cpr/ico/⏏️';
} }
@@ -2477,7 +2480,7 @@ var thegrid = (function () {
ihref = '/.cpr/ico/folder'; ihref = '/.cpr/ico/folder';
} }
else { else {
var ar = href.split('?')[0].split('.'); var ar = href.split('.');
if (ar.length > 1) if (ar.length > 1)
ar = ar.slice(1); ar = ar.slice(1);
@@ -2494,9 +2497,11 @@ var thegrid = (function () {
} }
ihref = '/.cpr/ico/' + ihref.slice(0, -1); ihref = '/.cpr/ico/' + ihref.slice(0, -1);
} }
ihref += (ihref.indexOf('?') > 0 ? '&' : '?') + 'cache=i';
html.push('<a href="' + href + '" ref="' + ref + html.push('<a href="' + ohref + '" ref="' + ref +
'"' + ac + ' ttt="' + esc(name) + '"><img src="' + '"' + ac + ' ttt="' + esc(name) + '"><img style="height:' +
(r.sz / 1.25) + 'em" onload="th_onload(this)" src="' +
ihref + '" /><span' + ac + '>' + ao.innerHTML + '</span></a>'); ihref + '" /><span' + ac + '>' + ao.innerHTML + '</span></a>');
} }
ebi('ggrid').innerHTML = html.join('\n'); ebi('ggrid').innerHTML = html.join('\n');
@@ -2534,6 +2539,15 @@ var thegrid = (function () {
})[0]; })[0];
}; };
bcfg_bind(r, 'thumbs', 'thumbs', true, r.setdirty);
bcfg_bind(r, 'sel', 'gridsel', false, r.loadsel);
bcfg_bind(r, 'en', 'griden', false, function (v) {
v ? loadgrid() : ungrid();
pbar.onresize();
vbar.onresize();
});
ebi('wtgrid').onclick = ebi('griden').onclick;
setTimeout(function () { setTimeout(function () {
import_js('/.cpr/baguettebox.js', r.bagit); import_js('/.cpr/baguettebox.js', r.bagit);
}, 1); }, 1);
@@ -2546,6 +2560,11 @@ var thegrid = (function () {
})(); })();
function th_onload(el) {
el.style.height = '';
}
function tree_scrollto(e) { function tree_scrollto(e) {
ev(e); ev(e);
var act = QS('#treeul a.hl'), var act = QS('#treeul a.hl'),
@@ -2739,28 +2758,28 @@ document.onkeydown = function (e) {
(function () { (function () {
var sconf = [ var sconf = [
["size", ["size",
["szl", "sz_min", "minimum MiB", ""], ["szl", "sz_min", "minimum MiB", "16"],
["szu", "sz_max", "maximum MiB", ""] ["szu", "sz_max", "maximum MiB", "16"]
], ],
["date", ["date",
["dtl", "dt_min", "min. iso8601", ""], ["dtl", "dt_min", "min. iso8601", "16"],
["dtu", "dt_max", "max. iso8601", ""] ["dtu", "dt_max", "max. iso8601", "16"]
], ],
["path", ["path",
["path", "path", "path contains &nbsp; (space-separated)", "46"] ["path", "path", "path contains &nbsp; (space-separated)", "34"]
], ],
["name", ["name",
["name", "name", "name contains &nbsp; (negate with -nope)", "46"] ["name", "name", "name contains &nbsp; (negate with -nope)", "34"]
] ]
]; ];
var oldcfg = []; var oldcfg = [];
if (QS('#srch_form.tags')) { if (QS('#srch_form.tags')) {
sconf.push(["tags", sconf.push(["tags",
["tags", "tags", "tags contains &nbsp; (^=start, end=$)", "46"] ["tags", "tags", "tags contains &nbsp; (^=start, end=$)", "34"]
]); ]);
sconf.push(["adv.", sconf.push(["adv.",
["adv", "adv", "key>=1A&nbsp; key<=2B&nbsp; .bpm>165", "46"] ["adv", "adv", "key>=1A&nbsp; key<=2B&nbsp; .bpm>165", "34"]
]); ]);
} }
@@ -2777,8 +2796,8 @@ document.onkeydown = function (e) {
html.push( html.push(
'<td colspan="' + csp + '"><input id="' + hn + 'c" type="checkbox">\n' + '<td colspan="' + csp + '"><input id="' + hn + 'c" type="checkbox">\n' +
'<label for="' + hn + 'c">' + sconf[a][b][2] + '</label>\n' + '<label for="' + hn + 'c">' + sconf[a][b][2] + '</label>\n' +
'<br /><input id="' + hn + 'v" type="text" size="' + sconf[a][b][3] + '<br /><input id="' + hn + 'v" type="text" style="width:' + sconf[a][b][3] +
'" name="' + sconf[a][b][1] + '" /></td>'); 'em" name="' + sconf[a][b][1] + '" /></td>');
if (csp == 2) if (csp == 2)
break; break;
} }
@@ -2945,12 +2964,12 @@ document.onkeydown = function (e) {
var html = mk_files_header(tagord); var html = mk_files_header(tagord);
html.push('<tbody>'); html.push('<tbody>');
html.push('<tr><td>-</td><td colspan="42"><a href="#" id="unsearch">! close search results</a></td></tr>'); html.push('<tr><td>-</td><td colspan="42"><a href="#" id="unsearch"><big style="font-weight:bold">[❌] close search results</big></a></td></tr>');
for (var a = 0; a < res.hits.length; a++) { for (var a = 0; a < res.hits.length; a++) {
var r = res.hits[a], var r = res.hits[a],
ts = parseInt(r.ts), ts = parseInt(r.ts),
sz = esc(r.sz + ''), sz = esc(r.sz + ''),
rp = esc(r.rp + ''), rp = esc(uricom_dec(r.rp + '')[0]),
ext = rp.lastIndexOf('.') > 0 ? rp.split('.').slice(-1)[0] : '%', ext = rp.lastIndexOf('.') > 0 ? rp.split('.').slice(-1)[0] : '%',
links = linksplit(r.rp + ''); links = linksplit(r.rp + '');
@@ -2982,7 +3001,7 @@ document.onkeydown = function (e) {
orig_url = get_evpath(); orig_url = get_evpath();
} }
ofiles.innerHTML = html.join('\n'); ofiles = set_files_html(html.join('\n'));
ofiles.setAttribute("ts", this.ts); ofiles.setAttribute("ts", this.ts);
ofiles.setAttribute("q_raw", this.q_raw); ofiles.setAttribute("q_raw", this.q_raw);
set_vq(); set_vq();
@@ -2990,15 +3009,17 @@ document.onkeydown = function (e) {
reload_browser(); reload_browser();
filecols.set_style(['File Name']); filecols.set_style(['File Name']);
sethash('q=' + uricom_enc(this.q_raw));
ebi('unsearch').onclick = unsearch; ebi('unsearch').onclick = unsearch;
} }
function unsearch(e) { function unsearch(e) {
ev(e); ev(e);
treectl.show(); treectl.show();
ebi('files').innerHTML = orig_html; set_files_html(orig_html);
ebi('files').removeAttribute('q_raw'); ebi('files').removeAttribute('q_raw');
orig_html = null; orig_html = null;
sethash('');
reload_browser(); reload_browser();
} }
})(); })();
@@ -3015,7 +3036,7 @@ var treectl = (function () {
prev_atop = null, prev_atop = null,
prev_winh = null, prev_winh = null,
mentered = null, mentered = null,
treesz = clamp(icfg_get('treesz', 16), 4, 50); treesz = clamp(icfg_get('treesz', 16), 10, 50);
bcfg_bind(treectl, 'ireadme', 'ireadme', true); bcfg_bind(treectl, 'ireadme', 'ireadme', true);
bcfg_bind(treectl, 'dyn', 'dyntree', true, onresize); bcfg_bind(treectl, 'dyn', 'dyntree', true, onresize);
@@ -3058,14 +3079,14 @@ var treectl = (function () {
swrite('entreed', 'na'); swrite('entreed', 'na');
treectl.hide(); treectl.hide();
ebi('path').style.display = 'inline-block'; ebi('path').style.display = '';
} }
treectl.hide = function () { treectl.hide = function () {
treectl.hidden = true; treectl.hidden = true;
ebi('path').style.display = 'none'; ebi('path').style.display = 'none';
ebi('tree').style.display = 'none'; ebi('tree').style.display = 'none';
ebi('wrap').style.marginLeft = '0'; ebi('wrap').style.marginLeft = '';
window.removeEventListener('resize', onresize); window.removeEventListener('resize', onresize);
window.removeEventListener('scroll', onscroll); window.removeEventListener('scroll', onscroll);
} }
@@ -3116,7 +3137,7 @@ var treectl = (function () {
treeh = winh - atop; treeh = winh - atop;
tree.style.top = top + 'px'; tree.style.top = top + 'px';
tree.style.height = treeh < 10 ? '' : treeh + 'px'; tree.style.height = treeh < 10 ? '' : Math.floor(treeh - 2) + 'px';
} }
} }
timer.add(onscroll2, true); timer.add(onscroll2, true);
@@ -3126,7 +3147,7 @@ var treectl = (function () {
return; return;
var q = '#tree', var q = '#tree',
nq = 0; nq = -3;
while (treectl.dyn) { while (treectl.dyn) {
nq++; nq++;
@@ -3134,21 +3155,30 @@ var treectl = (function () {
if (!QS(q)) if (!QS(q))
break; break;
} }
var w = (treesz + nq) + 'em'; var iw = (treesz + Math.max(0, nq)),
w = iw + 'em',
w2 = (iw + 2) + 'em';
try { try {
document.documentElement.style.setProperty('--nav-sz', w); document.documentElement.style.setProperty('--nav-sz', w);
} }
catch (ex) { catch (ex) { }
ebi('tree').style.width = w; ebi('tree').style.width = w;
} ebi('wrap').style.marginLeft = w2;
ebi('wrap').style.marginLeft = w;
onscroll(); onscroll();
} }
treectl.find = function (txt) {
var ta = QSA('#treeul a.hl+ul>li>a+a');
for (var a = 0, aa = ta.length; a < aa; a++)
if (ta[a].textContent == txt)
return ta[a];
};
treectl.goto = function (url, push) { treectl.goto = function (url, push) {
get_tree("", url, true); get_tree("", url, true);
reqls(url, push); reqls(url, push, true);
} };
function get_tree(top, dst, rst) { function get_tree(top, dst, rst) {
var xhr = new XMLHttpRequest(); var xhr = new XMLHttpRequest();
@@ -3227,12 +3257,12 @@ var treectl = (function () {
} }
function reload_tree() { function reload_tree() {
var cdir = get_evpath(), var cdir = get_vpath(),
links = QSA('#treeul a+a'), links = QSA('#treeul a+a'),
nowrap = QS('#tree.nowrap') && QS('#hovertree.on'); nowrap = QS('#tree.nowrap') && QS('#hovertree.on');
for (var a = 0, aa = links.length; a < aa; a++) { for (var a = 0, aa = links.length; a < aa; a++) {
var href = links[a].getAttribute('href'); var href = uricom_dec(links[a].getAttribute('href'))[0];
links[a].setAttribute('class', href == cdir ? 'hl' : ''); links[a].setAttribute('class', href == cdir ? 'hl' : '');
links[a].onclick = treego; links[a].onclick = treego;
links[a].onmouseenter = nowrap ? menter : null; links[a].onmouseenter = nowrap ? menter : null;
@@ -3244,6 +3274,7 @@ var treectl = (function () {
links[a].onclick = treegrow; links[a].onclick = treegrow;
} }
ebi('tree').onscroll = nowrap ? unmenter : null; ebi('tree').onscroll = nowrap ? unmenter : null;
tree_scrollto();
} }
function menter(e) { function menter(e) {
@@ -3275,7 +3306,7 @@ var treectl = (function () {
reqls(this.getAttribute('href'), true); reqls(this.getAttribute('href'), true);
} }
function reqls(url, hpush) { function reqls(url, hpush, no_tree) {
var xhr = new XMLHttpRequest(); var xhr = new XMLHttpRequest();
xhr.top = url; xhr.top = url;
xhr.hpush = hpush; xhr.hpush = hpush;
@@ -3283,7 +3314,7 @@ var treectl = (function () {
xhr.open('GET', xhr.top + '?ls' + (treectl.dots ? '&dots' : ''), true); xhr.open('GET', xhr.top + '?ls' + (treectl.dots ? '&dots' : ''), true);
xhr.onreadystatechange = recvls; xhr.onreadystatechange = recvls;
xhr.send(); xhr.send();
if (hpush) if (hpush && !no_tree)
get_tree('.', xhr.top); get_tree('.', xhr.top);
enspin(thegrid.en ? '#gfiles' : '#files'); enspin(thegrid.en ? '#gfiles' : '#files');
@@ -3360,13 +3391,7 @@ var treectl = (function () {
} }
html.push('</tbody>'); html.push('</tbody>');
html = html.join('\n'); html = html.join('\n');
try { set_files_html(html);
ebi('files').innerHTML = html;
}
catch (ex) { //ie9
window.location.href = this.top;
return;
}
if (this.hpush) if (this.hpush)
hist_push(this.top); hist_push(this.top);
@@ -3405,7 +3430,7 @@ var treectl = (function () {
} }
delete res['a']; delete res['a'];
var keys = Object.keys(res); var keys = Object.keys(res);
keys.sort(); keys.sort(function (a, b) { return a.localeCompare(b); });
for (var a = 0; a < keys.length; a++) { for (var a = 0; a < keys.length; a++) {
var kk = keys[a], var kk = keys[a],
ks = kk.slice(1), ks = kk.slice(1),
@@ -3434,6 +3459,7 @@ var treectl = (function () {
if (isNaN(treesz)) if (isNaN(treesz))
treesz = 16; treesz = 16;
treesz = clamp(treesz, 2, 120);
swrite('treesz', treesz); swrite('treesz', treesz);
onresize(); onresize();
} }
@@ -3455,10 +3481,7 @@ var treectl = (function () {
treectl.goto(url.pathname); treectl.goto(url.pathname);
}; };
if (window.history && history.pushState) { hist_replace(get_evpath() + window.location.hash);
hist_replace(get_evpath() + window.location.hash);
}
treectl.onscroll = onscroll; treectl.onscroll = onscroll;
return treectl; return treectl;
})(); })();
@@ -3611,7 +3634,7 @@ var filecols = (function () {
"pixfmt": "subsampling / pixel structure", "pixfmt": "subsampling / pixel structure",
"resw": "horizontal resolution", "resw": "horizontal resolution",
"resh": "veritcal resolution", "resh": "veritcal resolution",
"acs": "audio channels", "chs": "audio channels",
"hz": "sample rate" "hz": "sample rate"
}; };
@@ -3982,7 +4005,7 @@ var msel = (function () {
vbase = get_evpath(); vbase = get_evpath();
for (var a = 0, aa = links.length; a < aa; a++) { for (var a = 0, aa = links.length; a < aa; a++) {
var href = links[a].getAttribute('href').replace(/\/$/, ""), var href = noq_href(links[a]).replace(/\/$/, ""),
item = {}; item = {};
item.id = links[a].getAttribute('id'); item.id = links[a].getAttribute('id');
@@ -4077,6 +4100,106 @@ var msel = (function () {
})(); })();
(function () {
if (!window.FormData)
return;
var form = QS('#op_mkdir>form'),
tb = QS('#op_mkdir input[name="name"]'),
sf = mknod('div');
clmod(sf, 'msg', 1);
form.parentNode.appendChild(sf);
form.onsubmit = function (e) {
ev(e);
clmod(sf, 'vis', 1);
sf.textContent = 'creating "' + tb.value + '"...';
var fd = new FormData();
fd.append("act", "mkdir");
fd.append("name", tb.value);
var xhr = new XMLHttpRequest();
xhr.vp = get_evpath();
xhr.dn = tb.value;
xhr.open('POST', xhr.vp, true);
xhr.onreadystatechange = cb;
xhr.responseType = 'text';
xhr.send(fd);
return false;
};
function cb() {
if (this.readyState != XMLHttpRequest.DONE)
return;
if (this.vp !== get_evpath()) {
sf.textContent = 'aborted due to location change';
return;
}
if (this.status !== 200) {
sf.textContent = 'error: ' + this.responseText;
return;
}
tb.value = '';
clmod(sf, 'vis');
sf.textContent = '';
treectl.goto(this.vp + uricom_enc(this.dn) + '/', true);
}
})();
(function () {
var form = QS('#op_msg>form'),
tb = QS('#op_msg input[name="msg"]'),
sf = mknod('div');
clmod(sf, 'msg', 1);
form.parentNode.appendChild(sf);
form.onsubmit = function (e) {
ev(e);
clmod(sf, 'vis', 1);
sf.textContent = 'sending...';
var xhr = new XMLHttpRequest(),
ct = 'application/x-www-form-urlencoded;charset=UTF-8';
xhr.msg = tb.value;
xhr.open('POST', get_evpath(), true);
xhr.responseType = 'text';
xhr.onreadystatechange = cb;
xhr.setRequestHeader('Content-Type', ct);
if (xhr.overrideMimeType)
xhr.overrideMimeType('Content-Type', ct);
xhr.send('msg=' + uricom_enc(xhr.msg));
return false;
};
function cb() {
if (this.readyState != XMLHttpRequest.DONE)
return;
if (this.status !== 200) {
sf.textContent = 'error: ' + this.responseText;
return;
}
tb.value = '';
clmod(sf, 'vis');
sf.textContent = 'sent: "' + this.msg + '"';
setTimeout(function () {
treectl.goto(get_evpath());
}, 100);
}
})();
function show_readme(md, url, depth) { function show_readme(md, url, depth) {
if (!treectl.ireadme) if (!treectl.ireadme)
return; return;
@@ -4128,8 +4251,8 @@ if (readme)
for (var a = 0; a < tr.length; a++) { for (var a = 0; a < tr.length; a++) {
var td = tr[a].cells[1], var td = tr[a].cells[1],
ao = td.firstChild, ao = td.firstChild,
href = ao.getAttribute('href'), href = noq_href(ao),
isdir = href.split('?')[0].slice(-1)[0] == '/', isdir = href.endsWith('/'),
txt = ao.textContent; txt = ao.textContent;
td.setAttribute('sortv', (isdir ? '\t' : '') + txt); td.setAttribute('sortv', (isdir ? '\t' : '') + txt);
@@ -4244,7 +4367,6 @@ var unpost = (function () {
} }
ct.onclick = function (e) { ct.onclick = function (e) {
ev(e);
var tgt = e.target.closest('a[me]'); var tgt = e.target.closest('a[me]');
if (!tgt) if (!tgt)
return; return;
@@ -4252,6 +4374,7 @@ var unpost = (function () {
if (!tgt.getAttribute('href')) if (!tgt.getAttribute('href'))
return; return;
ev(e);
var ame = tgt.getAttribute('me'); var ame = tgt.getAttribute('me');
if (ame != r.me) if (ame != r.me)
return toast.err(0, 'something broke, please try a refresh'); return toast.err(0, 'something broke, please try a refresh');
@@ -4262,7 +4385,7 @@ var unpost = (function () {
for (var a = n; a < n2; a++) for (var a = n; a < n2; a++)
if (QS('#op_unpost a.n' + a)) if (QS('#op_unpost a.n' + a))
req.push(r.files[a].vp); req.push(uricom_dec(r.files[a].vp)[0]);
var links = QSA('#op_unpost a.n' + n); var links = QSA('#op_unpost a.n' + n);
for (var a = 0, aa = links.length; a < aa; a++) { for (var a = 0, aa = links.length; a < aa; a++) {
@@ -4306,6 +4429,20 @@ function goto_unpost(e) {
} }
ebi('files').onclick = function (e) {
var tgt = e.target.closest('a[id]');
if (!tgt || tgt.getAttribute('id').indexOf('f-') !== 0 || !tgt.textContent.endsWith('/'))
return;
var el = treectl.find(tgt.textContent.slice(0, -1));
if (!el)
return;
ev(e);
el.click();
}
function reload_mp() { function reload_mp() {
if (mp && mp.au) { if (mp && mp.au) {
mp.au.pause(); mp.au.pause();
@@ -4350,6 +4487,9 @@ function reload_browser(not_mp) {
makeSortable(ebi('files'), mp.read_order.bind(mp)); makeSortable(ebi('files'), mp.read_order.bind(mp));
} }
for (var a = 0; a < 2; a++)
clmod(ebi(a ? 'pro' : 'epi'), 'hidden', ebi('unsearch'));
if (window['up2k']) if (window['up2k'])
up2k.set_fsearch(); up2k.set_fsearch();
@@ -4358,4 +4498,4 @@ function reload_browser(not_mp) {
} }
reload_browser(true); reload_browser(true);
mukey.render(); mukey.render();
play_linked(); setTimeout(eval_hash, 1);

View File

@@ -15,7 +15,7 @@
<a id="lightswitch" href="#">go dark</a> <a id="lightswitch" href="#">go dark</a>
<a id="navtoggle" href="#">hide nav</a> <a id="navtoggle" href="#">hide nav</a>
{%- if edit %} {%- if edit %}
<a id="save" href="?edit" tt="Hotkey: ctrl-s">save</a> <a id="save" href="{{ arg_base }}edit" tt="Hotkey: ctrl-s">save</a>
<a id="sbs" href="#" tt="editor and preview side by side">sbs</a> <a id="sbs" href="#" tt="editor and preview side by side">sbs</a>
<a id="nsbs" href="#" tt="switch between editor and preview$NHotkey: ctrl-e">editor</a> <a id="nsbs" href="#" tt="switch between editor and preview$NHotkey: ctrl-e">editor</a>
<div id="toolsbox"> <div id="toolsbox">
@@ -28,9 +28,9 @@
</div> </div>
<span id="lno">L#</span> <span id="lno">L#</span>
{%- else %} {%- else %}
<a href="?edit" tt="good: higher performance$Ngood: same document width as viewer$Nbad: assumes you know markdown">edit (basic)</a> <a href="{{ arg_base }}edit" tt="good: higher performance$Ngood: same document width as viewer$Nbad: assumes you know markdown">edit (basic)</a>
<a href="?edit2" tt="not in-house so probably less buggy">edit (fancy)</a> <a href="{{ arg_base }}edit2" tt="not in-house so probably less buggy">edit (fancy)</a>
<a href="?raw">view raw</a> <a href="{{ arg_base }}raw">view raw</a>
{%- endif %} {%- endif %}
</div> </div>
<div id="toc"></div> <div id="toc"></div>
@@ -135,13 +135,13 @@ var md_opt = {
(function () { (function () {
var l = localStorage, var l = localStorage,
drk = l.getItem('lightmode') != 1, drk = l.lightmode != 1,
btn = document.getElementById("lightswitch"), btn = document.getElementById("lightswitch"),
f = function (e) { f = function (e) {
if (e) { e.preventDefault(); drk = !drk; } if (e) { e.preventDefault(); drk = !drk; }
document.documentElement.setAttribute("class", drk? "dark":"light"); document.documentElement.setAttribute("class", drk? "dark":"light");
btn.innerHTML = "go " + (drk ? "light":"dark"); btn.innerHTML = "go " + (drk ? "light":"dark");
l.setItem('lightmode', drk? 0:1); l.lightmode = drk? 0:1;
}; };
btn.onclick = f; btn.onclick = f;

View File

@@ -33,11 +33,11 @@ var md_opt = {
var lightswitch = (function () { var lightswitch = (function () {
var l = localStorage, var l = localStorage,
drk = l.getItem('lightmode') != 1, drk = l.lightmode != 1,
f = function (e) { f = function (e) {
if (e) drk = !drk; if (e) drk = !drk;
document.documentElement.setAttribute("class", drk? "dark":"light"); document.documentElement.setAttribute("class", drk? "dark":"light");
l.setItem('lightmode', drk? 0:1); l.lightmode = drk? 0:1;
}; };
f(); f();
return f; return f;

View File

@@ -25,10 +25,20 @@ a {
color: #047; color: #047;
background: #fff; background: #fff;
text-decoration: none; text-decoration: none;
border-bottom: 1px solid #aaa; border-bottom: 1px solid #8ab;
border-radius: .2em; border-radius: .2em;
padding: .2em .8em; padding: .2em .8em;
} }
.refresh,
.logout {
float: right;
margin-top: -.2em;
}
.logout {
color: #c04;
border-color: #c7a;
margin-right: .5em;
}
#repl { #repl {
border: none; border: none;
background: none; background: none;
@@ -81,10 +91,14 @@ html.dark a {
background: #057; background: #057;
border-color: #37a; border-color: #37a;
} }
html.dark .logout {
background: #804;
border-color: #c28;
}
html.dark input { html.dark input {
color: #fff; color: #fff;
background: #624; background: #626;
border: 1px solid #c27; border: 1px solid #c2c;
border-width: 1px 0 0 0; border-width: 1px 0 0 0;
border-radius: .5em; border-radius: .5em;
padding: .5em .7em; padding: .5em .7em;

View File

@@ -12,9 +12,12 @@
<body> <body>
<div id="wrap"> <div id="wrap">
<a href="/?h" class="refresh">refresh</a>
{%- if this.uname == '*' %} {%- if this.uname == '*' %}
<p>howdy stranger &nbsp; <small>(you're not logged in)</small></p> <p>howdy stranger &nbsp; <small>(you're not logged in)</small></p>
{%- else %} {%- else %}
<a href="/?pw=x" class="logout">logout</a>
<p>welcome back, <strong>{{ this.uname }}</strong></p> <p>welcome back, <strong>{{ this.uname }}</strong></p>
{%- endif %} {%- endif %}
@@ -80,7 +83,7 @@
<a href="#" id="repl">π</a> <a href="#" id="repl">π</a>
<script> <script>
if (localStorage.getItem('lightmode') != 1) if (localStorage.lightmode != 1)
document.documentElement.setAttribute("class", "dark"); document.documentElement.setAttribute("class", "dark");
</script> </script>

View File

@@ -1,5 +1,6 @@
@font-face { @font-face {
font-family: 'scp'; font-family: 'scp';
font-display: swap;
src: local('Source Code Pro Regular'), local('SourceCodePro-Regular'), url(/.cpr/deps/scp.woff2) format('woff2'); src: local('Source Code Pro Regular'), local('SourceCodePro-Regular'), url(/.cpr/deps/scp.woff2) format('woff2');
} }
html { html {
@@ -10,9 +11,9 @@ html {
max-width: 34em; max-width: 34em;
max-width: min(34em, 90%); max-width: min(34em, 90%);
max-width: min(34em, calc(100% - 7em)); max-width: min(34em, calc(100% - 7em));
background: #222; background: #333;
border: 0 solid #777; border: 0 solid #777;
box-shadow: 0 .2em .5em #222; box-shadow: 0 .2em .5em #111;
border-radius: .4em; border-radius: .4em;
z-index: 9001; z-index: 9001;
} }
@@ -78,7 +79,8 @@ html {
} }
#toast.vis { #toast.vis {
right: 1.3em; right: 1.3em;
transform: unset; transform: inherit;
transform: initial;
} }
#toast.vis #toastc { #toast.vis #toastc {
left: -2em; left: -2em;
@@ -257,6 +259,16 @@ html.light #pctl *:focus,
html.light .btn:focus { html.light .btn:focus {
box-shadow: 0 .1em .2em #037 inset; box-shadow: 0 .1em .2em #037 inset;
} }
input[type="text"]:focus,
input:not([type]):focus,
textarea:focus {
box-shadow: 0 .1em .3em #fc0, 0 -.1em .3em #fc0;
}
html.light input[type="text"]:focus,
html.light input:not([type]):focus,
html.light textarea:focus {
box-shadow: 0 .1em .3em #037, 0 -.1em .3em #037;
}

View File

@@ -30,7 +30,10 @@ catch (ex) {
try { try {
up2k = up2k_init(false); up2k = up2k_init(false);
} }
catch (ex) { } catch (ex) {
console.log('up2k init failed:', ex);
toast.err(10, 'could not initialze up2k\n\n' + basenames(ex));
}
} }
treectl.onscroll(); treectl.onscroll();
@@ -210,14 +213,14 @@ function U2pvis(act, btns) {
}; };
r.setat = function (nfile, blocktab) { r.setat = function (nfile, blocktab) {
r.tab[nfile].cb = blocktab; var fo = r.tab[nfile], bd = 0;
var bd = 0;
for (var a = 0; a < blocktab.length; a++) for (var a = 0; a < blocktab.length; a++)
bd += blocktab[a]; bd += blocktab[a];
r.tab[nfile].bd = bd; fo.bd = bd;
r.tab[nfile].bd0 = bd; fo.bd0 = bd;
fo.cb = blocktab;
}; };
r.perc = function (bd, bd0, sz, t0) { r.perc = function (bd, bd0, sz, t0) {
@@ -246,7 +249,7 @@ function U2pvis(act, btns) {
obj.innerHTML = fo.hp; obj.innerHTML = fo.hp;
obj.style.color = '#fff'; obj.style.color = '#fff';
obj.style.background = 'linear-gradient(90deg, #025, #06a ' + o1 + '%, #09d ' + o2 + '%, #333 ' + o3 + '%, #333 99%, #777)'; obj.style.background = 'linear-gradient(90deg, #025, #06a ' + o1 + '%, #09d ' + o2 + '%, #222 ' + o3 + '%, #222 99%, #555)';
}; };
r.prog = function (fobj, nchunk, cbd) { r.prog = function (fobj, nchunk, cbd) {
@@ -303,7 +306,7 @@ function U2pvis(act, btns) {
obj.innerHTML = fo.hp; obj.innerHTML = fo.hp;
obj.style.color = '#fff'; obj.style.color = '#fff';
obj.style.background = 'linear-gradient(90deg, #050, #270 ' + o1 + '%, #4b0 ' + o2 + '%, #333 ' + o3 + '%, #333 99%, #777)'; obj.style.background = 'linear-gradient(90deg, #050, #270 ' + o1 + '%, #4b0 ' + o2 + '%, #222 ' + o3 + '%, #222 99%, #555)';
}; };
r.move = function (nfile, newcat) { r.move = function (nfile, newcat) {
@@ -477,6 +480,86 @@ function U2pvis(act, btns) {
} }
function Donut(uc, st) {
var r = this,
el = null,
psvg = null,
o = 20 * 2 * Math.PI,
optab = QS('#ops a[data-dest="up2k"]');
optab.setAttribute('ico', optab.textContent);
function svg(v) {
var ico = v !== undefined,
bg = ico ? '#333' : 'transparent',
fg = '#fff',
fsz = 52,
rc = 32;
if (r.eta && (r.eta > 99 || (uc.fsearch ? st.time.hashing : st.time.uploading) < 20))
r.eta = null;
if (r.eta) {
if (r.eta < 10) {
fg = '#fa0';
fsz = 72;
}
rc = 8;
}
return (
'<svg version="1.1" viewBox="0 0 64 64" xmlns="http://www.w3.org/2000/svg">\n' +
(ico ? '<rect width="100%" height="100%" rx="' + rc + '" fill="#333" />\n' :
'<circle stroke="white" stroke-width="6" r="3" cx="32" cy="32" />\n') +
(r.eta ? (
'<text x="55%" y="58%" dominant-baseline="middle" text-anchor="middle"' +
' font-family="sans-serif" font-weight="bold" font-size="' + fsz + 'px"' +
' fill="' + fg + '">' + r.eta + '</text></svg>'
) : (
'<circle class="donut" stroke="white" fill="' + bg +
'" stroke-dashoffset="' + (ico ? v : o) + '" stroke-dasharray="' + o + ' ' + o +
'" transform="rotate(270 32 32)" stroke-width="12" r="20" cx="32" cy="32" /></svg>'
))
);
}
function pos() {
return uc.fsearch ? Math.max(st.bytes.hashed, st.bytes.finished) : st.bytes.finished;
}
r.on = function (ya) {
r.fc = 99;
r.eta = null;
r.base = pos();
optab.innerHTML = ya ? svg() : optab.getAttribute('ico');
el = QS('#ops a .donut');
if (!ya)
favico.upd();
};
r.do = function () {
if (!el)
return;
var t = st.bytes.total - r.base,
v = pos() - r.base,
ofs = el.style.strokeDashoffset = o - o * v / t;
if (favico.txt) {
if (++r.fc < 10 && r.eta && r.eta > 99)
return;
var s = svg(ofs);
if (s == psvg || (r.eta === null && r.fc < 10))
return;
favico.upd('', s);
psvg = s;
r.fc = 0;
}
};
}
function fsearch_explain(n) { function fsearch_explain(n) {
if (n) if (n)
return toast.inf(60, 'your access to this folder is Read-Only\n\n' + (acct == '*' ? 'you are currently not logged in' : 'you are currently logged in as "' + acct + '"')); return toast.inf(60, 'your access to this folder is Read-Only\n\n' + (acct == '*' ? 'you are currently not logged in' : 'you are currently logged in as "' + acct + '"'));
@@ -512,9 +595,13 @@ function up2k_init(subtle) {
// chrome<37 firefox<34 edge<12 opera<24 safari<7 // chrome<37 firefox<34 edge<12 opera<24 safari<7
shame = 'your browser is impressively ancient'; shame = 'your browser is impressively ancient';
var got_deps = false; function got_deps() {
return subtle || window.asmCrypto || window.hashwasm;
}
var loading_deps = false;
function init_deps() { function init_deps() {
if (!got_deps && !subtle && !window.asmCrypto) { if (!loading_deps && !got_deps()) {
var fn = 'sha512.' + sha_js + '.js'; var fn = 'sha512.' + sha_js + '.js';
showmodal('<h1>loading ' + fn + '</h1><h2>since ' + shame + '</h2><h4>thanks chrome</h4>'); showmodal('<h1>loading ' + fn + '</h1><h2>since ' + shame + '</h2><h4>thanks chrome</h4>');
import_js('/.cpr/deps/' + fn, unmodal); import_js('/.cpr/deps/' + fn, unmodal);
@@ -525,7 +612,7 @@ function up2k_init(subtle) {
ebi('u2foot').innerHTML = 'seems like ' + shame + ' so do that if you want more performance <span style="color:#' + ebi('u2foot').innerHTML = 'seems like ' + shame + ' so do that if you want more performance <span style="color:#' +
(sha_js == 'ac' ? 'c84">(expecting 20' : '8a5">(but dont worry too much, expect 100') + ' MiB/s)</span>'; (sha_js == 'ac' ? 'c84">(expecting 20' : '8a5">(but dont worry too much, expect 100') + ' MiB/s)</span>';
} }
got_deps = true; loading_deps = true;
} }
if (perms.length && !has(perms, 'read') && has(perms, 'write')) if (perms.length && !has(perms, 'read') && has(perms, 'write'))
@@ -578,7 +665,7 @@ function up2k_init(subtle) {
bcfg_bind(uc, 'multitask', 'multitask', true, null, false); bcfg_bind(uc, 'multitask', 'multitask', true, null, false);
bcfg_bind(uc, 'ask_up', 'ask_up', true, null, false); bcfg_bind(uc, 'ask_up', 'ask_up', true, null, false);
bcfg_bind(uc, 'flag_en', 'flag_en', false, apply_flag_cfg, false); bcfg_bind(uc, 'flag_en', 'flag_en', false, apply_flag_cfg);
bcfg_bind(uc, 'fsearch', 'fsearch', false, set_fsearch, false); bcfg_bind(uc, 'fsearch', 'fsearch', false, set_fsearch, false);
bcfg_bind(uc, 'turbo', 'u2turbo', false, draw_turbo, false); bcfg_bind(uc, 'turbo', 'u2turbo', false, draw_turbo, false);
bcfg_bind(uc, 'datechk', 'u2tdate', true, null, false); bcfg_bind(uc, 'datechk', 'u2tdate', true, null, false);
@@ -619,7 +706,8 @@ function up2k_init(subtle) {
}); });
} }
var pvis = new U2pvis("bz", '#u2cards'); var pvis = new U2pvis("bz", '#u2cards'),
donut = new Donut(uc, st);
var bobslice = null; var bobslice = null;
if (window.File) if (window.File)
@@ -744,11 +832,14 @@ function up2k_init(subtle) {
more_one_file(); more_one_file();
var bad_files = [], var bad_files = [],
nil_files = [],
good_files = [], good_files = [],
dirs = []; dirs = [];
for (var a = 0; a < files.length; a++) { for (var a = 0; a < files.length; a++) {
var fobj = files[a]; var fobj = files[a],
dst = good_files;
if (is_itemlist) { if (is_itemlist) {
if (fobj.kind !== 'file') if (fobj.kind !== 'file')
continue; continue;
@@ -765,16 +856,15 @@ function up2k_init(subtle) {
} }
try { try {
if (fobj.size < 1) if (fobj.size < 1)
throw 1; dst = nil_files;
} }
catch (ex) { catch (ex) {
bad_files.push(fobj.name); dst = bad_files;
continue;
} }
good_files.push([fobj, fobj.name]); dst.push([fobj, fobj.name]);
} }
if (dirs) { if (dirs) {
return read_dirs(null, [], dirs, good_files, bad_files); return read_dirs(null, [], dirs, good_files, nil_files, bad_files);
} }
} }
@@ -788,7 +878,7 @@ function up2k_init(subtle) {
} }
var rd_missing_ref = []; var rd_missing_ref = [];
function read_dirs(rd, pf, dirs, good, bad, spins) { function read_dirs(rd, pf, dirs, good, nil, bad, spins) {
spins = spins || 0; spins = spins || 0;
if (++spins == 5) if (++spins == 5)
rd_missing_ref = rd_flatten(pf, dirs); rd_missing_ref = rd_flatten(pf, dirs);
@@ -809,7 +899,7 @@ function up2k_init(subtle) {
msg.push('<li>' + esc(missing[a]) + '</li>'); msg.push('<li>' + esc(missing[a]) + '</li>');
return modal.alert(msg.join('') + '</ul>', function () { return modal.alert(msg.join('') + '</ul>', function () {
read_dirs(rd, [], [], good, bad, spins); read_dirs(rd, [], [], good, nil, bad, spins);
}); });
} }
spins = 0; spins = 0;
@@ -817,11 +907,11 @@ function up2k_init(subtle) {
if (!dirs.length) { if (!dirs.length) {
if (!pf.length) if (!pf.length)
return gotallfiles(good, bad); return gotallfiles(good, nil, bad);
console.log("retry pf, " + pf.length); console.log("retry pf, " + pf.length);
setTimeout(function () { setTimeout(function () {
read_dirs(rd, pf, dirs, good, bad, spins); read_dirs(rd, pf, dirs, good, nil, bad, spins);
}, 50); }, 50);
return; return;
} }
@@ -843,14 +933,15 @@ function up2k_init(subtle) {
pf.push(name); pf.push(name);
dn.file(function (fobj) { dn.file(function (fobj) {
apop(pf, name); apop(pf, name);
var dst = good;
try { try {
if (fobj.size > 0) { if (fobj.size < 1)
good.push([fobj, name]); dst = nil;
return;
}
} }
catch (ex) { } catch (ex) {
bad.push(name); dst = bad;
}
dst.push([fobj, name]);
}); });
} }
ngot += 1; ngot += 1;
@@ -859,23 +950,33 @@ function up2k_init(subtle) {
dirs.shift(); dirs.shift();
rd = null; rd = null;
} }
return read_dirs(rd, pf, dirs, good, bad, spins); return read_dirs(rd, pf, dirs, good, nil, bad, spins);
}); });
} }
function gotallfiles(good_files, bad_files) { function gotallfiles(good_files, nil_files, bad_files) {
var ntot = good_files.concat(nil_files, bad_files).length;
if (bad_files.length) { if (bad_files.length) {
var ntot = bad_files.length + good_files.length, var msg = 'These {0} files (of {1} total) were skipped, possibly due to filesystem permissions:\n'.format(bad_files.length, ntot);
msg = 'These {0} files (of {1} total) were skipped because they are empty:\n'.format(bad_files.length, ntot);
for (var a = 0, aa = Math.min(20, bad_files.length); a < aa; a++) for (var a = 0, aa = Math.min(20, bad_files.length); a < aa; a++)
msg += '-- ' + bad_files[a] + '\n'; msg += '-- ' + bad_files[a][1] + '\n';
if (good_files.length - bad_files.length <= 1 && ANDROID)
msg += '\nFirefox-Android has a bug which prevents selecting multiple files. Try selecting one file at a time. For more info, see firefox bug 1456557';
msg += '\nMaybe it works better if you select just one file';
return modal.alert(msg, function () { return modal.alert(msg, function () {
gotallfiles(good_files, []); gotallfiles(good_files, nil_files, []);
});
}
if (nil_files.length) {
var msg = 'These {0} files (of {1} total) are blank/empty; upload them anyways?\n'.format(nil_files.length, ntot);
for (var a = 0, aa = Math.min(20, nil_files.length); a < aa; a++)
msg += '-- ' + nil_files[a][1] + '\n';
msg += '\nMaybe it works better if you select just one file';
return modal.confirm(msg, function () {
gotallfiles(good_files.concat(nil_files), [], []);
}, function () {
gotallfiles(good_files, [], []);
}); });
} }
@@ -921,10 +1022,11 @@ function up2k_init(subtle) {
"t0": now, "t0": now,
"fobj": fobj, "fobj": fobj,
"name": name, "name": name,
"size": fobj.size, "size": fobj.size || 0,
"lmod": lmod / 1000, "lmod": lmod / 1000,
"purl": fdir, "purl": fdir,
"done": false, "done": false,
"bytes_uploaded": 0,
"hash": [] "hash": []
}, },
key = entry.name + '\n' + entry.size; key = entry.name + '\n' + entry.size;
@@ -939,14 +1041,16 @@ function up2k_init(subtle) {
pvis.addfile([ pvis.addfile([
uc.fsearch ? esc(entry.name) : linksplit( uc.fsearch ? esc(entry.name) : linksplit(
uricom_dec(entry.purl)[0] + entry.name).join(' '), entry.purl + uricom_enc(entry.name)).join(' '),
'📐 hash', '📐 hash',
'' ''
], fobj.size, draw_each); ], fobj.size, draw_each);
st.bytes.total += fobj.size; st.bytes.total += fobj.size;
st.files.push(entry); st.files.push(entry);
if (uc.turbo) if (!entry.size)
push_t(st.todo.handshake, entry);
else if (uc.turbo)
push_t(st.todo.head, entry); push_t(st.todo.head, entry);
else else
push_t(st.todo.hash, entry); push_t(st.todo.hash, entry);
@@ -1047,6 +1151,7 @@ function up2k_init(subtle) {
continue; continue;
} }
donut.eta = eta;
if (etaskip) if (etaskip)
continue; continue;
@@ -1081,11 +1186,6 @@ function up2k_init(subtle) {
st.busy.handshake.length) st.busy.handshake.length)
return false; return false;
if (st.busy.handshake.length)
for (var n = t.n - 1; n >= t.n - parallel_uploads && n >= 0; n--)
if (st.files[n].t_uploading)
return false;
if ((uc.multitask ? 1 : 0) < if ((uc.multitask ? 1 : 0) <
st.todo.upload.length + st.todo.upload.length +
st.busy.upload.length) st.busy.upload.length)
@@ -1122,7 +1222,7 @@ function up2k_init(subtle) {
if (running) if (running)
return; return;
if (crashed) if (crashed || !got_deps())
return defer(); return defer();
running = true; running = true;
@@ -1138,12 +1238,31 @@ function up2k_init(subtle) {
st.busy.handshake.length + st.busy.handshake.length +
st.busy.upload.length; st.busy.upload.length;
if (was_busy && !is_busy) {
for (var a = 0; a < st.files.length; a++) {
var t = st.files[a];
if (t.want_recheck) {
t.rechecks++;
t.want_recheck = false;
push_t(st.todo.handshake, t);
}
}
is_busy = st.todo.handshake.length;
try {
if (!is_busy && !uc.fsearch && !msel.getsel().length && (!mp.au || mp.au.paused))
treectl.goto(get_evpath());
}
catch (ex) { }
}
if (was_busy != is_busy) { if (was_busy != is_busy) {
was_busy = is_busy; was_busy = is_busy;
window[(is_busy ? "add" : "remove") + window[(is_busy ? "add" : "remove") +
"EventListener"]("beforeunload", warn_uploader_busy); "EventListener"]("beforeunload", warn_uploader_busy);
donut.on(is_busy);
if (!is_busy) { if (!is_busy) {
var k = uc.fsearch ? 'searches' : 'uploads', var k = uc.fsearch ? 'searches' : 'uploads',
ks = uc.fsearch ? 'Search' : 'Upload', ks = uc.fsearch ? 'Search' : 'Upload',
@@ -1165,13 +1284,17 @@ function up2k_init(subtle) {
toast.err(t, '{0} {1}'.format(ks, tng)); toast.err(t, '{0} {1}'.format(ks, tng));
timer.rm(etafun); timer.rm(etafun);
timer.rm(donut.do);
op_minh = 0; op_minh = 0;
} }
else { else {
timer.add(donut.do);
timer.add(etafun, false); timer.add(etafun, false);
ebi('u2etas').style.textAlign = 'left'; ebi('u2etas').style.textAlign = 'left';
} }
etafun(); etafun();
if (pvis.act == 'bz')
pvis.changecard('bz');
} }
if (flag) { if (flag) {
@@ -1313,7 +1436,6 @@ function up2k_init(subtle) {
function exec_hash() { function exec_hash() {
var t = st.todo.hash.shift(); var t = st.todo.hash.shift();
st.busy.hash.push(t); st.busy.hash.push(t);
t.bytes_uploaded = 0;
var bpend = 0, var bpend = 0,
nchunk = 0, nchunk = 0,
@@ -1370,7 +1492,7 @@ function up2k_init(subtle) {
pvis.move(t.n, 'ng'); pvis.move(t.n, 'ng');
apop(st.busy.hash, t); apop(st.busy.hash, t);
st.bytes.finished += t.size; st.bytes.finished += t.size;
return tasker(); return;
} }
toast.err(0, 'y o u b r o k e i t\nfile: ' + esc(t.name + '') + '\nerror: ' + err); toast.err(0, 'y o u b r o k e i t\nfile: ' + esc(t.name + '') + '\nerror: ' + err);
@@ -1446,7 +1568,6 @@ function up2k_init(subtle) {
console.log('head onerror, retrying', t); console.log('head onerror, retrying', t);
apop(st.busy.head, t); apop(st.busy.head, t);
st.todo.head.unshift(t); st.todo.head.unshift(t);
tasker();
}; };
function orz(e) { function orz(e) {
var ok = false; var ok = false;
@@ -1468,6 +1589,7 @@ function up2k_init(subtle) {
} }
t.done = true; t.done = true;
t.fobj = null;
st.bytes.hashed += t.size; st.bytes.hashed += t.size;
st.bytes.finished += t.size; st.bytes.finished += t.size;
pvis.move(t.n, 'bz'); pvis.move(t.n, 'bz');
@@ -1511,7 +1633,6 @@ function up2k_init(subtle) {
apop(st.busy.handshake, t); apop(st.busy.handshake, t);
st.todo.handshake.unshift(t); st.todo.handshake.unshift(t);
t.keepalive = keepalive; t.keepalive = keepalive;
tasker();
}; };
function orz(e) { function orz(e) {
if (t.t_busied != me) { if (t.t_busied != me) {
@@ -1537,15 +1658,18 @@ function up2k_init(subtle) {
} }
else { else {
smsg = 'found'; smsg = 'found';
var hit = response.hits[0], var msg = [];
msg = linksplit(hit.rp).join(''), for (var a = 0, aa = Math.min(20, response.hits.length); a < aa; a++) {
tr = unix2iso(hit.ts), var hit = response.hits[a],
tu = unix2iso(t.lmod), tr = unix2iso(hit.ts),
diff = parseInt(t.lmod) - parseInt(hit.ts), tu = unix2iso(t.lmod),
cdiff = (Math.abs(diff) <= 2) ? '3c0' : 'f0b', diff = parseInt(t.lmod) - parseInt(hit.ts),
sdiff = '<span style="color:#' + cdiff + '">diff ' + diff; cdiff = (Math.abs(diff) <= 2) ? '3c0' : 'f0b',
sdiff = '<span style="color:#' + cdiff + '">diff ' + diff;
msg += '<br /><small>' + tr + ' (srv), ' + tu + ' (You), ' + sdiff + '</span></span>'; msg.push(linksplit(hit.rp).join('') + '<br /><small>' + tr + ' (srv), ' + tu + ' (You), ' + sdiff + '</small></span>');
}
msg = msg.join('<br />\n');
} }
pvis.seth(t.n, 2, msg); pvis.seth(t.n, 2, msg);
pvis.seth(t.n, 1, smsg); pvis.seth(t.n, 1, smsg);
@@ -1553,6 +1677,7 @@ function up2k_init(subtle) {
apop(st.busy.handshake, t); apop(st.busy.handshake, t);
st.bytes.finished += t.size; st.bytes.finished += t.size;
t.done = true; t.done = true;
t.fobj = null;
tasker(); tasker();
return; return;
} }
@@ -1563,7 +1688,7 @@ function up2k_init(subtle) {
console.log("server-rename [" + t.purl + "] [" + t.name + "] to [" + rsp_purl + "] [" + response.name + "]"); console.log("server-rename [" + t.purl + "] [" + t.name + "] to [" + rsp_purl + "] [" + response.name + "]");
t.purl = rsp_purl; t.purl = rsp_purl;
t.name = response.name; t.name = response.name;
pvis.seth(t.n, 0, linksplit(uricom_dec(t.purl)[0] + t.name).join(' ')); pvis.seth(t.n, 0, linksplit(t.purl + uricom_enc(t.name)).join(' '));
} }
var chunksize = get_chunksize(t.size), var chunksize = get_chunksize(t.size),
@@ -1619,6 +1744,7 @@ function up2k_init(subtle) {
if (done) { if (done) {
t.done = true; t.done = true;
t.fobj = null;
st.bytes.finished += t.size - t.bytes_uploaded; st.bytes.finished += t.size - t.bytes_uploaded;
var spd1 = (t.size / ((t.t_hashed - t.t_hashing) / 1000.)) / (1024 * 1024.), var spd1 = (t.size / ((t.t_hashed - t.t_hashing) / 1000.)) / (1024 * 1024.),
spd2 = (t.size / ((t.t_uploaded - t.t_uploading) / 1000.)) / (1024 * 1024.); spd2 = (t.size / ((t.t_uploaded - t.t_uploading) / 1000.)) / (1024 * 1024.);
@@ -1653,13 +1779,19 @@ function up2k_init(subtle) {
} }
st.bytes.finished += t.size; st.bytes.finished += t.size;
if (rsp.indexOf('partial upload exists') !== -1 || var err_pend = rsp.indexOf('partial upload exists') + 1,
rsp.indexOf('file already exists') !== -1) { err_dupe = rsp.indexOf('file already exists') + 1;
if (err_pend || err_dupe) {
err = rsp; err = rsp;
ofs = err.indexOf('\n/'); ofs = err.indexOf('\n/');
if (ofs !== -1) { if (ofs !== -1) {
err = err.slice(0, ofs + 1) + linksplit(err.slice(ofs + 2).trimEnd()).join(' '); err = err.slice(0, ofs + 1) + linksplit(err.slice(ofs + 2).trimEnd()).join(' ');
} }
if (!t.rechecks && err_pend) {
t.rechecks = 0;
t.want_recheck = true;
}
} }
if (err != "") { if (err != "") {
pvis.seth(t.n, 1, "ERROR"); pvis.seth(t.n, 1, "ERROR");
@@ -1705,7 +1837,8 @@ function up2k_init(subtle) {
st.busy.upload.push(upt); st.busy.upload.push(upt);
var npart = upt.npart, var npart = upt.npart,
t = st.files[upt.nfile]; t = st.files[upt.nfile],
tries = 0;
if (!t.t_uploading) if (!t.t_uploading)
t.t_uploading = Date.now(); t.t_uploading = Date.now();
@@ -1756,8 +1889,9 @@ function up2k_init(subtle) {
if (crashed) if (crashed)
return; return;
console.log('chunkpit onerror, retrying', t); toast.err(9.98, "failed to upload a chunk,\n" + tries + " retries so far -- retrying in 10sec\n\n" + t.name);
do_send(); console.log('chunkpit onerror,', ++tries, t);
setTimeout(do_send, 10 * 1000);
}; };
xhr.open('POST', t.purl, true); xhr.open('POST', t.purl, true);
xhr.setRequestHeader("X-Up2k-Hash", t.hash[npart]); xhr.setRequestHeader("X-Up2k-Hash", t.hash[npart]);
@@ -1782,16 +1916,28 @@ function up2k_init(subtle) {
wpx = window.innerWidth, wpx = window.innerWidth,
fpx = parseInt(getComputedStyle(bar)['font-size']), fpx = parseInt(getComputedStyle(bar)['font-size']),
wem = wpx * 1.0 / fpx, wem = wpx * 1.0 / fpx,
wide = wem > 54, wide = wem > 54 ? 'w' : '',
parent = ebi(wide && has(perms, 'write') ? 'u2btn_cw' : 'u2btn_ct'), write = has(perms, 'write'),
parent = ebi(wide && write ? 'u2btn_cw' : 'u2btn_ct'),
btn = ebi('u2btn'); btn = ebi('u2btn');
//console.log([wpx, fpx, wem]); //console.log([wpx, fpx, wem]);
if (btn.parentNode !== parent) { if (btn.parentNode !== parent) {
parent.appendChild(btn); parent.appendChild(btn);
ebi('u2conf').setAttribute('class', wide ? 'has_btn' : ''); ebi('u2conf').setAttribute('class', wide);
ebi('u2cards').setAttribute('class', wide ? 'w' : ''); ebi('u2cards').setAttribute('class', wide);
ebi('u2etaw').setAttribute('class', wide ? 'w' : ''); ebi('u2etaw').setAttribute('class', wide);
}
wide = wem > 78 ? 'ww' : wide;
parent = ebi(wide == 'ww' && write ? 'u2c3w' : 'u2c3t');
var its = [ebi('u2etaw'), ebi('u2cards')];
if (its[0].parentNode !== parent) {
ebi('u2conf').setAttribute('class', wide);
for (var a = 0; a < 2; a++) {
parent.appendChild(its[a]);
its[a].setAttribute('class', wide);
}
} }
} }
window.addEventListener('resize', onresize); window.addEventListener('resize', onresize);
@@ -1804,7 +1950,7 @@ function up2k_init(subtle) {
setTimeout(onresize, 500); setTimeout(onresize, 500);
} }
var o = QSA('#u2conf *[tt]'); var o = QSA('#u2conf .c *[tt]');
for (var a = o.length - 1; a >= 0; a--) { for (var a = o.length - 1; a >= 0; a--) {
o[a].parentNode.getElementsByTagName('input')[0].setAttribute('tt', o[a].getAttribute('tt')); o[a].parentNode.getElementsByTagName('input')[0].setAttribute('tt', o[a].getAttribute('tt'));
} }
@@ -1920,8 +2066,8 @@ function up2k_init(subtle) {
flag = up2k_flagbus(); flag = up2k_flagbus();
} }
catch (ex) { catch (ex) {
toast.err(5, "not supported on your browser:\n" + ex); toast.err(5, "not supported on your browser:\n" + esc(basenames(ex)));
tgl_flag_en(); bcfg_set('flag_en', false);
} }
} }
else if (!uc.flag_en && flag) { else if (!uc.flag_en && flag) {
@@ -1972,6 +2118,15 @@ function warn_uploader_busy(e) {
tt.init(); tt.init();
favico.init();
ebi('ico1').onclick = function () {
var a = favico.txt == this.textContent;
swrite('icot', a ? 'c' : this.textContent);
swrite('icof', a ? null : '000');
swrite('icob', a ? null : '');
favico.init();
};
if (QS('#op_up2k.act')) if (QS('#op_up2k.act'))
goto_up2k(); goto_up2k();

View File

@@ -29,18 +29,24 @@ function esc(txt) {
}[c]; }[c];
}); });
} }
window.onunhandledrejection = function (e) { function basenames(txt) {
var err = e.reason; return (txt + '').replace(/https?:\/\/[^ \/]+\//g, '/').replace(/js\?_=[a-zA-Z]{4}/g, 'js');
try { }
err += '\n' + e.reason.stack; if ((document.location + '').indexOf(',rej,') + 1)
} window.onunhandledrejection = function (e) {
catch (e) { } var err = e.reason;
console.log("REJ: " + err); try {
try { err += '\n' + e.reason.stack;
toast.warn(30, err); }
} catch (e) { }
catch (e) { } err = basenames(err);
}; console.log("REJ: " + err);
try {
toast.warn(30, err);
}
catch (e) { }
};
try { try {
console.hist = []; console.hist = [];
var hook = function (t) { var hook = function (t) {
@@ -65,7 +71,7 @@ try {
catch (ex) { catch (ex) {
if (console.stdlog) if (console.stdlog)
console.log = console.stdlog; console.log = console.stdlog;
console.log(ex); console.log('console capture failed', ex);
} }
var crashed = false, ignexd = {}; var crashed = false, ignexd = {};
function vis_exh(msg, url, lineNo, columnNo, error) { function vis_exh(msg, url, lineNo, columnNo, error) {
@@ -140,7 +146,7 @@ function vis_exh(msg, url, lineNo, columnNo, error) {
var s = mknod('style'); var s = mknod('style');
s.innerHTML = ( s.innerHTML = (
'#exbox{background:#333;color:#ddd;font-family:sans-serif;font-size:0.8em;padding:0 1em 1em 1em;z-index:80386;position:fixed;top:0;left:0;right:0;bottom:0;width:100%;height:100%;overflow:auto;width:calc(100% - 2em)} ' + '#exbox{background:#222;color:#ddd;font-family:sans-serif;font-size:0.8em;padding:0 1em 1em 1em;z-index:80386;position:fixed;top:0;left:0;right:0;bottom:0;width:100%;height:100%;overflow:auto;width:calc(100% - 2em)} ' +
'#exbox,#exbox *{line-height:1.5em;overflow-wrap:break-word} ' + '#exbox,#exbox *{line-height:1.5em;overflow-wrap:break-word} ' +
'#exbox code{color:#bf7;background:#222;padding:.1em;margin:.2em;font-size:1.1em;font-family:monospace,monospace} ' + '#exbox code{color:#bf7;background:#222;padding:.1em;margin:.2em;font-size:1.1em;font-family:monospace,monospace} ' +
'#exbox a{text-decoration:underline;color:#fc0} ' + '#exbox a{text-decoration:underline;color:#fc0} ' +
@@ -151,7 +157,7 @@ function vis_exh(msg, url, lineNo, columnNo, error) {
); );
document.head.appendChild(s); document.head.appendChild(s);
} }
exbox.innerHTML = html.join('\n').replace(/https?:\/\/[^ \/]+\//g, '/').replace(/js\?_=[a-zA-Z]{4}/g, 'js').replace(/<ghi>/, 'https://github.com/9001/copyparty/issues/new?labels=bug&template=bug_report.md'); exbox.innerHTML = basenames(html.join('\n')).replace(/<ghi>/, 'https://github.com/9001/copyparty/issues/new?labels=bug&template=bug_report.md');
exbox.style.display = 'block'; exbox.style.display = 'block';
} }
catch (e) { catch (e) {
@@ -241,7 +247,9 @@ function import_js(url, cb) {
script.src = url; script.src = url;
script.onload = cb; script.onload = cb;
script.onerror = function () { script.onerror = function () {
toast.err(0, 'Failed to load module:\n' + url); var m = 'Failed to load module:\n' + url;
console.log(m);
toast.err(0, m);
}; };
head.appendChild(script); head.appendChild(script);
} }
@@ -400,19 +408,17 @@ function linksplit(rp) {
link = rp.slice(0, ofs + 1); link = rp.slice(0, ofs + 1);
rp = rp.slice(ofs + 1); rp = rp.slice(ofs + 1);
} }
var vlink = esc(link), var vlink = esc(uricom_dec(link)[0]);
elink = uricom_enc(link);
if (link.indexOf('/') !== -1) { if (link.indexOf('/') !== -1) {
vlink = vlink.slice(0, -1) + '<span>/</span>'; vlink = vlink.slice(0, -1) + '<span>/</span>';
elink = elink.slice(0, -3) + '/';
} }
if (!rp && q) if (!rp && q)
elink += q; link += q;
ret.push('<a href="' + apath + elink + '">' + vlink + '</a>'); ret.push('<a href="' + apath + link + '">' + vlink + '</a>');
apath += elink; apath += link;
} }
return ret; return ret;
} }
@@ -494,6 +500,11 @@ function get_vpath() {
} }
function noq_href(el) {
return el.getAttribute('href').split('?')[0];
}
function get_pwd() { function get_pwd() {
var pwd = ('; ' + document.cookie).split('; cppwd='); var pwd = ('; ' + document.cookie).split('; cppwd=');
if (pwd.length < 2) if (pwd.length < 2)
@@ -572,14 +583,22 @@ function jcp(obj) {
function sread(key) { function sread(key) {
return localStorage.getItem(key); try {
return localStorage.getItem(key);
}
catch (e) {
return null;
}
} }
function swrite(key, val) { function swrite(key, val) {
if (val === undefined || val === null) try {
localStorage.removeItem(key); if (val === undefined || val === null)
else localStorage.removeItem(key);
localStorage.setItem(key, val); else
localStorage.setItem(key, val);
}
catch (e) { }
} }
function jread(key, fb) { function jread(key, fb) {
@@ -602,9 +621,9 @@ function icfg_get(name, defval) {
} }
function fcfg_get(name, defval) { function fcfg_get(name, defval) {
var o = ebi(name); var o = ebi(name),
val = parseFloat(sread(name));
var val = parseFloat(sread(name));
if (isNaN(val)) if (isNaN(val))
return parseFloat(o ? o.value : defval); return parseFloat(o ? o.value : defval);
@@ -614,6 +633,19 @@ function fcfg_get(name, defval) {
return val; return val;
} }
function scfg_get(name, defval) {
var o = ebi(name),
val = sread(name);
if (val === null)
val = defval;
if (o)
o.value = val;
return val;
}
function bcfg_get(name, defval) { function bcfg_get(name, defval) {
var o = ebi(name); var o = ebi(name);
if (!o) if (!o)
@@ -665,15 +697,41 @@ function bcfg_bind(obj, oname, cname, defval, cb, un_ev) {
return v; return v;
} }
function scfg_bind(obj, oname, cname, defval, cb) {
var v = scfg_get(cname, defval),
el = ebi(cname);
obj[oname] = v;
if (el)
el.oninput = function (e) {
swrite(cname, obj[oname] = this.value);
if (cb)
cb(obj[oname]);
};
return v;
}
function hist_push(url) { function hist_push(url) {
console.log("h-push " + url); console.log("h-push " + url);
history.pushState(url, url, url); if (window.history && history.pushState)
history.pushState(url, url, url);
} }
function hist_replace(url) { function hist_replace(url) {
console.log("h-repl " + url); console.log("h-repl " + url);
history.replaceState(url, url, url); if (window.history && history.replaceState)
history.replaceState(url, url, url);
}
function sethash(hv) {
if (window.history && history.replaceState) {
hist_replace(document.location.pathname + '#' + hv);
}
else {
document.location.hash = hv;
}
} }
@@ -830,16 +888,7 @@ var tt = (function () {
} }
r.init = function () { r.init = function () {
var ttb = ebi('tooltips'); bcfg_bind(r, 'en', 'tooltips', r.en, r.init);
if (ttb) {
ttb.onclick = function (e) {
ev(e);
r.en = !r.en;
bcfg_set('tooltips', r.en);
r.init();
};
r.en = bcfg_get('tooltips', true)
}
r.att(document); r.att(document);
}; };
@@ -902,6 +951,9 @@ var toast = (function () {
if (sec) if (sec)
te = setTimeout(r.hide, sec * 1000); te = setTimeout(r.hide, sec * 1000);
if (txt.indexOf('<body>') + 1)
txt = txt.slice(0, txt.indexOf('<')) + ' [...]';
obj.innerHTML = '<a href="#" id="toastc">x</a><div id="toastb">' + lf2br(txt) + '</div>'; obj.innerHTML = '<a href="#" id="toastc">x</a><div id="toastb">' + lf2br(txt) + '</div>';
obj.className = cl; obj.className = cl;
sec += obj.offsetWidth; sec += obj.offsetWidth;
@@ -1005,15 +1057,22 @@ var modal = (function () {
} }
function onkey(e) { function onkey(e) {
if (e.code == 'Enter') { var k = e.code,
var a = ebi('modal-ng'); eok = ebi('modal-ok'),
if (a && document.activeElement == a) eng = ebi('modal-ng'),
ae = document.activeElement;
if (k == 'Space' && ae && (ae === eok || ae === eng))
k = 'Enter';
if (k == 'Enter') {
if (ae && ae == eng)
return ng(); return ng();
return ok(); return ok();
} }
if (e.code == 'Escape') if (k == 'Escape')
return ng(); return ng();
} }
@@ -1043,7 +1102,7 @@ var modal = (function () {
} }
function _confirm(html, cok, cng, fun) { function _confirm(html, cok, cng, fun) {
cb_ok = cok; cb_ok = cok;
cb_ng = cng === undefined ? cok : null; cb_ng = cng === undefined ? cok : cng;
cb_up = fun; cb_up = fun;
html += '<div id="modalb">' + ok_cancel + '</div>'; html += '<div id="modalb">' + ok_cancel + '</div>';
r.show(html); r.show(html);
@@ -1090,6 +1149,7 @@ function repl_load() {
if (!ret.length) if (!ret.length)
ret = [ ret = [
'var v=Object.keys(localStorage); v.sort(); JSON.stringify(v)', 'var v=Object.keys(localStorage); v.sort(); JSON.stringify(v)',
"for (var a of QSA('#files a[id]')) a.setAttribute('download','')",
'console.hist.slice(-10).join("\\n")' 'console.hist.slice(-10).join("\\n")'
]; ];
@@ -1159,3 +1219,57 @@ function repl(e) {
} }
if (ebi('repl')) if (ebi('repl'))
ebi('repl').onclick = repl; ebi('repl').onclick = repl;
var svg_decl = '<?xml version="1.0" encoding="UTF-8"?>\n';
var favico = (function () {
var r = {};
r.en = true;
r.tag = null;
function gx(txt) {
return (svg_decl +
'<svg version="1.1" viewBox="0 0 64 64" xmlns="http://www.w3.org/2000/svg">\n' +
(r.bg ? '<rect width="100%" height="100%" rx="16" fill="#' + r.bg + '" />\n' : '') +
'<text x="50%" y="55%" dominant-baseline="middle" text-anchor="middle"' +
' font-family="sans-serif" font-weight="bold" font-size="64px"' +
' fill="#' + r.fg + '">' + txt + '</text></svg>'
);
}
r.upd = function (txt, svg) {
if (!r.txt)
return;
var b64;
try {
b64 = btoa(svg ? svg_decl + svg : gx(r.txt));
}
catch (ex) {
b64 = encodeURIComponent(r.txt).replace(/%([0-9A-F]{2})/g,
function x(m, v) { return String.fromCharCode('0x' + v); });
b64 = btoa(gx(unescape(encodeURIComponent(r.txt))));
}
if (!r.tag) {
r.tag = mknod('link');
r.tag.rel = 'icon';
document.head.appendChild(r.tag);
}
r.tag.href = 'data:image/svg+xml;base64,' + b64;
};
r.init = function () {
clearTimeout(r.to);
scfg_bind(r, 'txt', 'icot', '', r.upd);
scfg_bind(r, 'fg', 'icof', 'fc5', r.upd);
scfg_bind(r, 'bg', 'icob', '222', r.upd);
r.upd();
};
r.to = setTimeout(r.init, 100);
return r;
})();

View File

@@ -1,11 +1,11 @@
html { html {
background: #333 url('/wp/wallhaven-mdjrqy.jpg') center / cover no-repeat fixed; background: #222 url('/wp/wallhaven-mdjrqy.jpg') center / cover no-repeat fixed;
} }
#files th { #files th {
background: rgba(32, 32, 32, 0.9) !important; background: rgba(32, 32, 32, 0.9) !important;
} }
#ops, #ops,
#treeul, #tree,
#files td { #files td {
background: rgba(32, 32, 32, 0.3) !important; background: rgba(32, 32, 32, 0.3) !important;
} }
@@ -19,7 +19,7 @@ html.light #files th {
} }
html.light .logue, html.light .logue,
html.light #ops, html.light #ops,
html.light #treeul, html.light #tree,
html.light #files td { html.light #files td {
background: rgba(248, 248, 248, 0.8) !important; background: rgba(248, 248, 248, 0.8) !important;
} }

View File

@@ -47,5 +47,5 @@ c e2d
c nodupe c nodupe
# this entire config file can be replaced with these arguments: # this entire config file can be replaced with these arguments:
# -u ed:123 -u k:k -v .::r:a,ed -v priv:priv:r,k:rw,ed -v /home/ed/Music:music:r -v /home/ed/inc:dump:w:c,e2d:c,nodupe # -u ed:123 -u k:k -v .::r:a,ed -v priv:priv:r,k:rw,ed -v /home/ed/Music:music:r -v /home/ed/inc:dump:w:c,e2d,nodupe
# but note that the config file always wins in case of conflicts # but note that the config file always wins in case of conflicts

View File

@@ -27,7 +27,7 @@
#u2conf #u2btn, #u2btn {padding:1.5em 0} #u2conf #u2btn, #u2btn {padding:1.5em 0}
/* adjust the button area a bit */ /* adjust the button area a bit */
#u2conf.has_btn {width: 35em !important; margin: 5em auto} #u2conf.w, #u2conf.ww {width: 35em !important; margin: 5em auto}
/* a */ /* a */
#op_up2k {min-height: 0} #op_up2k {min-height: 0}

View File

@@ -162,7 +162,7 @@ brew install python@2
pip install virtualenv pip install virtualenv
# readme toc # readme toc
cat README.md | awk 'function pr() { if (!h) {return}; if (/^ *[*!#]/||!s) {printf "%s\n",h;h=0;return}; if (/.../) {printf "%s - %s\n",h,$0;h=0}; }; /^#/{s=1;pr()} /^#* *(file indexing|install on android|dev env setup|just the sfx|complete release|optional gpl stuff)|`$/{s=0} /^#/{lv=length($1);sub(/[^ ]+ /,"");bab=$0;gsub(/ /,"-",bab); h=sprintf("%" ((lv-1)*4+1) "s [%s](#%s)", "*",$0,bab);next} !h{next} {sub(/ .*/,"");sub(/[:,]$/,"")} {pr()}' > toc; grep -E '^## readme toc' -B1000 -A2 <README.md >p1; grep -E '^## quickstart' -B2 -A999999 <README.md >p2; (cat p1; grep quickstart -A1000 <toc; cat p2) >README.md cat README.md | awk 'function pr() { if (!h) {return}; if (/^ *[*!#]/||!s) {printf "%s\n",h;h=0;return}; if (/.../) {printf "%s - %s\n",h,$0;h=0}; }; /^#/{s=1;pr()} /^#* *(file indexing|install on android|dev env setup|just the sfx|complete release|optional gpl stuff)|`$/{s=0} /^#/{lv=length($1);sub(/[^ ]+ /,"");bab=$0;gsub(/ /,"-",bab); h=sprintf("%" ((lv-1)*4+1) "s [%s](#%s)", "*",$0,bab);next} !h{next} {sub(/ .*/,"");sub(/[:,]$/,"")} {pr()}' > toc; grep -E '^## readme toc' -B1000 -A2 <README.md >p1; grep -E '^## quickstart' -B2 -A999999 <README.md >p2; (cat p1; grep quickstart -A1000 <toc; cat p2) >README.md; rm p1 p2 toc
# fix firefox phantom breakpoints, # fix firefox phantom breakpoints,
# suggestions from bugtracker, doesnt work (debugger is not attachable) # suggestions from bugtracker, doesnt work (debugger is not attachable)

View File

@@ -238,7 +238,7 @@ rm have
rm -rf copyparty/web/dd rm -rf copyparty/web/dd
f=copyparty/web/browser.css f=copyparty/web/browser.css
gzip -d "$f.gz" || true gzip -d "$f.gz" || true
sed -r 's/(cursor: ?)url\([^)]+\), ?(pointer)/\1\2/; /[0-9]+% \{cursor:/d; /animation: ?cursor/d' <$f >t sed -r 's/(cursor: ?)url\([^)]+\), ?(pointer)/\1\2/; s/[0-9]+% \{cursor:[^}]+\}//; s/animation: ?cursor[^};]+//' <$f >t
tmv "$f" tmv "$f"
} }
@@ -271,7 +271,7 @@ find | grep -E '\.css$' | while IFS= read -r f; do
} }
!/\}$/ {printf "%s",$0;next} !/\}$/ {printf "%s",$0;next}
1 1
' <$f | sed 's/;\}$/}/' >t ' <$f | sed -r 's/;\}$/}/; /\{\}$/d' >t
tmv "$f" tmv "$f"
done done
unexpand -h 2>/dev/null && unexpand -h 2>/dev/null &&

36
scripts/rls.sh Executable file
View File

@@ -0,0 +1,36 @@
#!/bin/bash
set -e
cd ~/dev/copyparty/scripts
v=$1
printf '%s\n' "$v" | grep -qE '^[0-9\.]+$' || exit 1
grep -E "(${v//./, })" ../copyparty/__version__.py || exit 1
git tag v$v
git push origin --tags
rm -rf ../dist
./make-pypi-release.sh u
(cd .. && python3 ./setup.py clean2)
./make-tgz-release.sh $v
rm -f ../dist/copyparty-sfx.*
./make-sfx.sh no-sh
../dist/copyparty-sfx.py -h
ar=
while true; do
for ((a=0; a<100; a++)); do
for f in ../dist/copyparty-sfx.{py,sh}; do
[ -e $f ] || continue;
mv $f $f.$(wc -c <$f | awk '{print$1}')
done
./make-sfx.sh re $ar
done
ar=no-sh
done
# git tag -d v$v; git push --delete origin v$v

View File

@@ -9,7 +9,7 @@ import subprocess as sp
to edit this file, use HxD or "vim -b" to edit this file, use HxD or "vim -b"
(there is compressed stuff at the end) (there is compressed stuff at the end)
run me with any version of python, i will unpack and run copyparty run me with python 2.7 or 3.3+ to unpack and run copyparty
there's zero binaries! just plaintext python scripts all the way down there's zero binaries! just plaintext python scripts all the way down
so you can easily unpack the archive and inspect it for shady stuff so you can easily unpack the archive and inspect it for shady stuff

View File

@@ -60,7 +60,7 @@ class Cpp(object):
pass pass
def tc1(): def tc1(vflags):
ub = "http://127.0.0.1:4321/" ub = "http://127.0.0.1:4321/"
td = os.path.join("srv", "smoketest") td = os.path.join("srv", "smoketest")
try: try:
@@ -100,17 +100,17 @@ def tc1():
for d1 in ["r", "w", "a"]: for d1 in ["r", "w", "a"]:
pdirs.append("{}/{}".format(td, d1)) pdirs.append("{}/{}".format(td, d1))
pdirs.append("{}/{}/j".format(td, d1)) pdirs.append("{}/{}/j".format(td, d1))
for d2 in ["r", "w", "a"]: for d2 in ["r", "w", "a", "c"]:
d = os.path.join(td, d1, "j", d2) d = os.path.join(td, d1, "j", d2)
pdirs.append(d) pdirs.append(d)
os.makedirs(d) os.makedirs(d)
pdirs = [x.replace("\\", "/") for x in pdirs] pdirs = [x.replace("\\", "/") for x in pdirs]
udirs = [x.split("/", 2)[2] for x in pdirs] udirs = [x.split("/", 2)[2] for x in pdirs]
perms = [x.rstrip("j/")[-1] for x in pdirs] perms = [x.rstrip("cj/")[-1] for x in pdirs]
perms = ["rw" if x == "a" else x for x in perms] perms = ["rw" if x == "a" else x for x in perms]
for pd, ud, p in zip(pdirs, udirs, perms): for pd, ud, p in zip(pdirs, udirs, perms):
if ud[-1] == "j": if ud[-1] == "j" or ud[-1] == "c":
continue continue
hp = None hp = None
@@ -123,29 +123,37 @@ def tc1():
hp = "-" hp = "-"
hpaths[ud] = os.path.join(pd, ".hist") hpaths[ud] = os.path.join(pd, ".hist")
arg = "{}:{}:{}".format(pd, ud, p, hp) arg = "{}:{}:{}".format(pd, ud, p)
if hp: if hp:
arg += ":c,hist=" + hp arg += ":c,hist=" + hp
args += ["-v", arg] args += ["-v", arg + vflags]
# return # return
cpp = Cpp(args) cpp = Cpp(args)
CPP.append(cpp) CPP.append(cpp)
cpp.await_idle(ub, 3) cpp.await_idle(ub, 3)
for d in udirs: for d, p in zip(udirs, perms):
vid = ovid + "\n{}".format(d).encode("utf-8") vid = ovid + "\n{}".format(d).encode("utf-8")
try: r = requests.post(
requests.post(ub + d, data={"act": "bput"}, files={"f": ("a.h264", vid)}) ub + d,
except: data={"act": "bput"},
pass files={"f": (d.replace("/", "") + ".h264", vid)},
)
c = r.status_code
if c == 200 and p not in ["w", "rw"]:
raise Exception("post {} with perm {} at {}".format(c, p, d))
elif c == 403 and p not in ["r"]:
raise Exception("post {} with perm {} at {}".format(c, p, d))
elif c not in [200, 403]:
raise Exception("post {} with perm {} at {}".format(c, p, d))
cpp.clean() cpp.clean()
# GET permission # GET permission
for d, p in zip(udirs, perms): for d, p in zip(udirs, perms):
u = "{}{}/a.h264".format(ub, d) u = "{}{}/{}.h264".format(ub, d, d.replace("/", ""))
r = requests.get(u) r = requests.get(u)
ok = bool(r) ok = bool(r)
if ok != (p in ["rw"]): if ok != (p in ["rw"]):
@@ -153,14 +161,14 @@ def tc1():
# stat filesystem # stat filesystem
for d, p in zip(pdirs, perms): for d, p in zip(pdirs, perms):
u = "{}/a.h264".format(d) u = "{}/{}.h264".format(d, d.split("test/")[-1].replace("/", ""))
ok = os.path.exists(u) ok = os.path.exists(u)
if ok != (p in ["rw", "w"]): if ok != (p in ["rw", "w"]):
raise Exception("stat {} with perm {} at {}".format(ok, p, u)) raise Exception("stat {} with perm {} at {}".format(ok, p, u))
# GET thumbnail, vreify contents # GET thumbnail, vreify contents
for d, p in zip(udirs, perms): for d, p in zip(udirs, perms):
u = "{}{}/a.h264?th=j".format(ub, d) u = "{}{}/{}.h264?th=j".format(ub, d, d.replace("/", ""))
r = requests.get(u) r = requests.get(u)
ok = bool(r and r.content[:3] == b"\xff\xd8\xff") ok = bool(r and r.content[:3] == b"\xff\xd8\xff")
if ok != (p in ["rw"]): if ok != (p in ["rw"]):
@@ -192,9 +200,9 @@ def tc1():
cpp.stop(True) cpp.stop(True)
def run(tc): def run(tc, *a):
try: try:
tc() tc(*a)
finally: finally:
try: try:
CPP[0].stop(False) CPP[0].stop(False)
@@ -203,7 +211,8 @@ def run(tc):
def main(): def main():
run(tc1) run(tc1, "")
run(tc1, ":c,fk")
if __name__ == "__main__": if __name__ == "__main__":

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env python3
# coding: utf-8 # coding: utf-8
from __future__ import print_function from __future__ import print_function
@@ -114,7 +114,7 @@ args = {
"install_requires": ["jinja2"], "install_requires": ["jinja2"],
"extras_require": {"thumbnails": ["Pillow"], "audiotags": ["mutagen"]}, "extras_require": {"thumbnails": ["Pillow"], "audiotags": ["mutagen"]},
"entry_points": {"console_scripts": ["copyparty = copyparty.__main__:main"]}, "entry_points": {"console_scripts": ["copyparty = copyparty.__main__:main"]},
"scripts": ["bin/copyparty-fuse.py"], "scripts": ["bin/copyparty-fuse.py", "bin/up2k.py"],
"cmdclass": {"clean2": clean2}, "cmdclass": {"clean2": clean2},
} }

View File

@@ -48,7 +48,9 @@ class Cfg(Namespace):
mte="a", mte="a",
mth="", mth="",
hist=None, hist=None,
no_hash=False, no_idx=None,
no_hash=None,
js_browser=None,
css_browser=None, css_browser=None,
**{k: False for k in "e2d e2ds e2dsa e2t e2ts e2tsr".split()} **{k: False for k in "e2d e2ds e2dsa e2t e2ts e2tsr".split()}
) )

View File

@@ -23,7 +23,9 @@ class Cfg(Namespace):
"mte": "a", "mte": "a",
"mth": "", "mth": "",
"hist": None, "hist": None,
"no_hash": False, "no_idx": None,
"no_hash": None,
"js_browser": None,
"css_browser": None, "css_browser": None,
"no_voldump": True, "no_voldump": True,
"no_logues": False, "no_logues": False,

View File

@@ -3,6 +3,7 @@ import sys
import time import time
import shutil import shutil
import jinja2 import jinja2
import threading
import tempfile import tempfile
import platform import platform
import subprocess as sp import subprocess as sp
@@ -28,7 +29,7 @@ if MACOS:
# 25% faster; until any tests do symlink stuff # 25% faster; until any tests do symlink stuff
from copyparty.util import Unrecv from copyparty.util import Unrecv, FHC
def runcmd(argv): def runcmd(argv):
@@ -132,8 +133,10 @@ class VHttpConn(object):
self.log_src = "a" self.log_src = "a"
self.lf_url = None self.lf_url = None
self.hsrv = VHttpSrv() self.hsrv = VHttpSrv()
self.u2fh = FHC()
self.mutex = threading.Lock()
self.nreq = 0 self.nreq = 0
self.nbyte = 0 self.nbyte = 0
self.ico = None self.ico = None
self.thumbcli = None self.thumbcli = None
self.t0 = time.time() self.t0 = time.time()