Compare commits

...

72 Commits

Author SHA1 Message Date
ed
cade1990ce v1.3.5 2022-07-06 02:29:11 +02:00
ed
59b6e61816 build fstab from relabels when mtab is unreadable 2022-07-06 02:28:34 +02:00
ed
daff7ff158 v1.3.4 2022-07-06 00:12:10 +02:00
ed
0862860961 misc cleanup 2022-07-06 00:00:56 +02:00
ed
1cb24045a0 dont thumb empty files 2022-07-05 23:45:47 +02:00
ed
622358b172 flag to control mtp timeout kill behavior 2022-07-05 23:38:49 +02:00
ed
7998884a9d adopt the osd hider 2022-07-05 23:36:44 +02:00
ed
51ddecd101 improve readme 2022-07-05 23:27:48 +02:00
ed
7a35ab1d1e bbox: video seek / loop url params 2022-07-05 20:37:05 +02:00
ed
48564ba52a bbox: add A-B video loop 2022-07-05 19:53:43 +02:00
ed
49efffd740 bbox: tap left/right side of image for prev/next 2022-07-05 19:33:09 +02:00
ed
d6ac224c8f bbox: tap to show/hide buttons 2022-07-05 19:18:21 +02:00
ed
a772b8c3f2 bbox: add fullscreen for images too 2022-07-05 19:06:02 +02:00
ed
b580953dcd bbox: fix crash on swipe during close 2022-07-05 18:49:52 +02:00
ed
d86653c763 ux 2022-07-05 00:13:08 +02:00
ed
dded4fca76 option to specify favicon + default-enable it 2022-07-05 00:06:22 +02:00
ed
36365ffa6b explain the donut 2022-07-04 22:17:37 +02:00
ed
0f9aeeaa27 bump codemirror to 5.65.6 2022-07-04 22:15:52 +02:00
ed
d8ebcd0ef7 lol dpi 2022-07-04 22:13:28 +02:00
ed
6e445487b1 satisfy cloudflare DDoS protection 2022-07-03 16:04:28 +02:00
ed
6605e461c7 improve mtp section 2022-07-03 14:23:56 +02:00
ed
40ce4e2275 cleanup 2022-07-03 13:55:48 +02:00
ed
8fef9e363e recursive kill mtp on timeout 2022-07-03 04:57:15 +02:00
ed
4792c2770d fix a spin 2022-07-03 02:39:15 +02:00
ed
87bb49da36 new mtp: video integrity checker 2022-07-03 01:50:38 +02:00
ed
1c0071d9ce perf 2022-07-03 01:40:30 +02:00
ed
efded35c2e ffmpeg saying the fps is 1/0 yeah okay 2022-07-02 00:39:46 +02:00
ed
1d74240b9a ux: hide uploads table until something happens 2022-07-01 09:16:23 +02:00
ed
098184ff7b add write-only up2k ui simplifier 2022-07-01 00:55:36 +02:00
ed
4083533916 vt100 listing: reset color at eof 2022-06-29 22:41:51 +02:00
ed
feb1acd43a v1.3.3 2022-06-27 22:57:05 +02:00
ed
a9591db734 cleanup 2022-06-27 22:56:29 +02:00
ed
9ebf148cbe support android9 sdcardfs on sdcard 2022-06-27 22:15:35 +02:00
ed
a473e5e19a always include custom css/js 2022-06-27 17:24:30 +02:00
ed
5d3034c231 detect sparse support from st_blocks 2022-06-23 18:23:42 +02:00
ed
c3a895af64 android sdcardfs can be fat32 2022-06-23 16:27:30 +02:00
ed
cea5aecbf2 v1.3.2 2022-06-20 01:31:29 +02:00
ed
0e61e70670 audioplayer continues to next folder by default 2022-06-20 00:20:13 +02:00
ed
1e333c0939 fix doc traversal 2022-06-19 23:32:36 +02:00
ed
917b6ec03c naming 2022-06-19 22:58:20 +02:00
ed
fe67c52ead configurable list of sparse-supporting filesystems +
close nonsparse files after each write to force flush
2022-06-19 22:38:52 +02:00
ed
909c7bee3e ignore md plugin errors 2022-06-19 20:28:45 +02:00
ed
27ca54d138 md: ol appeared as ul 2022-06-19 19:05:41 +02:00
ed
2147c3a646 run markdown plugins in directory listings 2022-06-19 18:17:22 +02:00
ed
a99120116f ux: breadcrumb ctrl-click 2022-06-19 17:51:03 +02:00
ed
802efeaff2 dont let tags imply subdirectories when renaming 2022-06-19 16:06:39 +02:00
ed
9ad3af1ef6 misc tweaks 2022-06-19 16:05:48 +02:00
ed
715727b811 add changelog 2022-06-17 15:33:57 +02:00
ed
c6eaa7b836 aight good to know 2022-06-17 00:37:56 +02:00
ed
c2fceea2a5 v1.3.1 2022-06-16 21:56:12 +02:00
ed
190e11f7ea update deps + misc 2022-06-16 21:43:40 +02:00
ed
ad7413a5ff add .PARTIAL suffix to bup uploads too +
aggressive limits checking
2022-06-16 21:00:41 +02:00
ed
903b9e627a ux snappiness + keepalive on http-1.0 2022-06-16 20:33:09 +02:00
ed
c5c1e96cf8 ux: button to reset hidden columns 2022-06-16 19:06:28 +02:00
ed
62fbb04c9d allow moving files between filesystems 2022-06-16 18:46:50 +02:00
ed
728dc62d0b optimize nonsparse uploads (fat32, exfat, hpfs) 2022-06-16 17:51:42 +02:00
ed
2dfe1b1c6b add themes: hacker, hi-con 2022-06-16 12:21:21 +02:00
ed
35d4a1a6af ux: delay loading animation + focus outlines + explain ng 2022-06-16 11:02:05 +02:00
ed
eb3fa5aa6b add safety profiles + improve helptext + speed 2022-06-16 10:21:44 +02:00
ed
438384425a add types, isort, errorhandling 2022-06-16 01:07:15 +02:00
ed
0b6f102436 fix multiprocessing ftpd 2022-06-12 16:37:56 +02:00
ed
c9b7ec72d8 add hotkey Y to download current song / vid / pic 2022-06-09 17:23:11 +02:00
ed
256c7f1789 add option to see errors from mtp parsers 2022-06-09 14:46:35 +02:00
ed
4e5a323c62 more cleanup 2022-06-08 01:05:35 +02:00
ed
f4a3bbd237 fix ansify prepending bracket to all logfiles 2022-06-07 23:45:54 +02:00
ed
fe73f2d579 cleanup 2022-06-07 23:08:43 +02:00
ed
f79fcc7073 discover local ip under termux 2022-06-07 23:03:16 +02:00
ed
4c4b3790c7 fix read-spin on d/c during json post + errorhandling 2022-06-07 19:02:52 +02:00
ed
bd60b464bb fix misleading log-msg 2022-06-07 14:12:55 +02:00
ed
6bce852765 ux: treepar positioning 2022-06-06 22:05:13 +02:00
ed
3b19a5a59d improve a11y jumpers 2022-05-25 20:31:12 +02:00
ed
f024583011 add a11y jumpers 2022-05-24 09:09:54 +02:00
72 changed files with 7232 additions and 2188 deletions

25
.vscode/settings.json vendored
View File

@@ -23,7 +23,6 @@
"terminal.ansiBrightWhite": "#ffffff",
},
"python.testing.pytestEnabled": false,
"python.testing.nosetestsEnabled": false,
"python.testing.unittestEnabled": true,
"python.testing.unittestArgs": [
"-v",
@@ -35,18 +34,40 @@
"python.linting.pylintEnabled": true,
"python.linting.flake8Enabled": true,
"python.linting.banditEnabled": true,
"python.linting.mypyEnabled": true,
"python.linting.mypyArgs": [
"--ignore-missing-imports",
"--follow-imports=silent",
"--show-column-numbers",
"--strict"
],
"python.linting.flake8Args": [
"--max-line-length=120",
"--ignore=E722,F405,E203,W503,W293,E402",
"--ignore=E722,F405,E203,W503,W293,E402,E501,E128",
],
"python.linting.banditArgs": [
"--ignore=B104"
],
"python.linting.pylintArgs": [
"--disable=missing-module-docstring",
"--disable=missing-class-docstring",
"--disable=missing-function-docstring",
"--disable=wrong-import-position",
"--disable=raise-missing-from",
"--disable=bare-except",
"--disable=invalid-name",
"--disable=line-too-long",
"--disable=consider-using-f-string"
],
// python3 -m isort --py=27 --profile=black copyparty/
"python.formatting.provider": "black",
"editor.formatOnSave": true,
"[html]": {
"editor.formatOnSave": false,
},
"[css]": {
"editor.formatOnSave": false,
},
"files.associations": {
"*.makefile": "makefile"
},

124
README.md
View File

@@ -9,11 +9,12 @@
turn your phone or raspi into a portable file server with resumable uploads/downloads using *any* web browser
* server only needs `py2.7` or `py3.3+`, all dependencies optional
* browse/upload with IE4 / netscape4.0 on win3.11 (heh)
* *resumable* uploads need `firefox 34+` / `chrome 41+` / `safari 7+` for full speed
* code standard: `black`
* browse/upload with [IE4](#browser-support) / netscape4.0 on win3.11 (heh)
* *resumable* uploads need `firefox 34+` / `chrome 41+` / `safari 7+`
📷 **screenshots:** [browser](#the-browser) // [upload](#uploading) // [unpost](#unpost) // [thumbnails](#thumbnails) // [search](#searching) // [fsearch](#file-search) // [zip-DL](#zip-downloads) // [md-viewer](#markdown-viewer) // [ie4](#browser-support)
try the **[read-only demo server](https://a.ocv.me/pub/demo/)** 👀 running from a basement in finland
📷 **screenshots:** [browser](#the-browser) // [upload](#uploading) // [unpost](#unpost) // [thumbnails](#thumbnails) // [search](#searching) // [fsearch](#file-search) // [zip-DL](#zip-downloads) // [md-viewer](#markdown-viewer)
## get the app
@@ -43,7 +44,7 @@ turn your phone or raspi into a portable file server with resumable uploads/down
* [tabs](#tabs) - the main tabs in the ui
* [hotkeys](#hotkeys) - the browser has the following hotkeys
* [navpane](#navpane) - switching between breadcrumbs or navpane
* [thumbnails](#thumbnails) - press `g` to toggle grid-view instead of the file listing
* [thumbnails](#thumbnails) - press `g` or `田` to toggle grid-view instead of the file listing
* [zip downloads](#zip-downloads) - download folders (or file selections) as `zip` or `tar` files
* [uploading](#uploading) - drag files/folders into the web-browser to upload
* [file-search](#file-search) - dropping files into the browser also lets you see if they exist on the server
@@ -101,7 +102,7 @@ turn your phone or raspi into a portable file server with resumable uploads/down
download **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py)** and you're all set!
running the sfx without arguments (for example doubleclicking it on Windows) will give everyone read/write access to the current folder; see `-h` for help if you want [accounts and volumes](#accounts-and-volumes) etc
running the sfx without arguments (for example doubleclicking it on Windows) will give everyone read/write access to the current folder; you may want [accounts and volumes](#accounts-and-volumes)
some recommended options:
* `-e2dsa` enables general [file indexing](#file-indexing)
@@ -109,7 +110,7 @@ some recommended options:
* `-v /mnt/music:/music:r:rw,foo -a foo:bar` shares `/mnt/music` as `/music`, `r`eadable by anyone, and read-write for user `foo`, password `bar`
* replace `:r:rw,foo` with `:r,foo` to only make the folder readable by `foo` and nobody else
* see [accounts and volumes](#accounts-and-volumes) for the syntax and other permissions (`r`ead, `w`rite, `m`ove, `d`elete, `g`et)
* `--ls '**,*,ln,p,r'` to crash on startup if any of the volumes contain a symlink which point outside the volume, as that could give users unintended access
* `--ls '**,*,ln,p,r'` to crash on startup if any of the volumes contain a symlink which point outside the volume, as that could give users unintended access (see `--help-ls`)
### on servers
@@ -167,7 +168,7 @@ feature summary
* download
* ☑ single files in browser
* ☑ [folders as zip / tar files](#zip-downloads)
* ☑ FUSE client (read-only)
*[FUSE client](https://github.com/9001/copyparty/tree/hovudstraum/bin#copyparty-fusepy) (read-only)
* browser
* ☑ [navpane](#navpane) (directory tree sidebar)
* ☑ file manager (cut/paste, delete, [batch-rename](#batch-rename))
@@ -203,6 +204,7 @@ project goals / philosophy
* inverse linux philosophy -- do all the things, and do an *okay* job
* quick drop-in service to get a lot of features in a pinch
* there are probably [better alternatives](https://github.com/awesome-selfhosted/awesome-selfhosted) if you have specific/long-term needs
* but the resumable multithreaded uploads are p slick ngl
* run anywhere, support everything
* as many web-browsers and python versions as possible
* every browser should at least be able to browse, download, upload files
@@ -241,7 +243,7 @@ some improvement ideas
## general bugs
* Windows: if the up2k db is on a samba-share or network disk, you'll get unpredictable behavior if the share is disconnected for a bit
* Windows: if the `up2k.db` (filesystem index) is on a samba-share or network disk, you'll get unpredictable behavior if the share is disconnected for a bit
* use `--hist` or the `hist` volflag (`-v [...]:c,hist=/tmp/foo`) to place the db on a local disk instead
* all volumes must exist / be available on startup; up2k (mtp especially) gets funky otherwise
* probably more, pls let me know
@@ -273,7 +275,7 @@ some improvement ideas
* you can also do this with linux filesystem permissions; `chmod 111 music` will make it possible to access files and folders inside the `music` folder but not list the immediate contents -- also works with other software, not just copyparty
* can I make copyparty download a file to my server if I give it a URL?
* not officially, but there is a [terrible hack](https://github.com/9001/copyparty/blob/hovudstraum/bin/mtag/wget.py) which makes it possible
* not really, but there is a [terrible hack](https://github.com/9001/copyparty/blob/hovudstraum/bin/mtag/wget.py) which makes it possible
# accounts and volumes
@@ -281,6 +283,8 @@ some improvement ideas
per-folder, per-user permissions - if your setup is getting complex, consider making a [config file](./docs/example.conf) instead of using arguments
* much easier to manage, and you can modify the config at runtime with `systemctl reload copyparty` or more conveniently using the `[reload cfg]` button in the control-panel (if logged in as admin)
a quick summary can be seen using `--help-accounts`
configuring accounts/volumes with arguments:
* `-a usr:pwd` adds account `usr` with password `pwd`
* `-v .::r` adds current-folder `.` as the webroot, `r`eadable by anyone
@@ -337,7 +341,7 @@ the browser has the following hotkeys (always qwerty)
* `I/K` prev/next folder
* `M` parent folder (or unexpand current)
* `V` toggle folders / textfiles in the navpane
* `G` toggle list / [grid view](#thumbnails)
* `G` toggle list / [grid view](#thumbnails) -- same as `田` bottom-right
* `T` toggle thumbnails / icons
* `ESC` close various things
* `ctrl-X` cut selected files/folders
@@ -358,19 +362,23 @@ the browser has the following hotkeys (always qwerty)
* `U/O` skip 10sec back/forward
* `0..9` jump to 0%..90%
* `P` play/pause (also starts playing the folder)
* `Y` download file
* when viewing images / playing videos:
* `J/L, Left/Right` prev/next file
* `Home/End` first/last file
* `F` toggle fullscreen
* `S` toggle selection
* `R` rotate clockwise (shift=ccw)
* `Y` download file
* `Esc` close viewer
* videos:
* `U/O` skip 10sec back/forward
* `P/K/Space` play/pause
* `F` fullscreen
* `C` continue playing next video
* `V` loop
* `M` mute
* `C` continue playing next video
* `V` loop entire file
* `[` loop range (start)
* `]` loop range (end)
* when the navpane is open:
* `A/D` adjust tree width
* in the [grid view](#thumbnails):
@@ -402,7 +410,7 @@ click the `🌲` or pressing the `B` hotkey to toggle between breadcrumbs path (
## thumbnails
press `g` to toggle grid-view instead of the file listing, and `t` toggles icons / thumbnails
press `g` or `田` to toggle grid-view instead of the file listing and `t` toggles icons / thumbnails
![copyparty-thumbs-fs8](https://user-images.githubusercontent.com/241032/129636211-abd20fa2-a953-4366-9423-1c88ebb96ba9.png)
@@ -444,13 +452,13 @@ you can also zip a selection of files or folders by clicking them in the browser
## uploading
drag files/folders into the web-browser to upload
drag files/folders into the web-browser to upload (or use the [command-line uploader](https://github.com/9001/copyparty/tree/hovudstraum/bin#up2kpy))
this initiates an upload using `up2k`; there are two uploaders available:
* `[🎈] bup`, the basic uploader, supports almost every browser since netscape 4.0
* `[🚀] up2k`, the fancy one
* `[🚀] up2k`, the good / fancy one
you can also undo/delete uploads by using `[🧯]` [unpost](#unpost)
NB: you can undo/delete your own uploads with `[🧯]` [unpost](#unpost)
up2k has several advantages:
* you can drop folders into the browser (files are added recursively)
@@ -462,7 +470,7 @@ up2k has several advantages:
* much higher speeds than ftp/scp/tarpipe on some internet connections (mainly american ones) thanks to parallel connections
* the last-modified timestamp of the file is preserved
see [up2k](#up2k) for details on how it works
see [up2k](#up2k) for details on how it works, or watch a [demo video](https://a.ocv.me/pub/demo/pics-vids/#gf-0f6f5c0d)
![copyparty-upload-fs8](https://user-images.githubusercontent.com/241032/129635371-48fc54ca-fa91-48e3-9b1d-ba413e4b68cb.png)
@@ -474,7 +482,6 @@ the up2k UI is the epitome of polished inutitive experiences:
* "parallel uploads" specifies how many chunks to upload at the same time
* `[🏃]` analysis of other files should continue while one is uploading
* `[💭]` ask for confirmation before files are added to the queue
* `[💤]` sync uploading between other copyparty browser-tabs so only one is active
* `[🔎]` switch between upload and [file-search](#file-search) mode
* ignore `[🔎]` if you add files by dragging them into the browser
@@ -486,7 +493,7 @@ and then theres the tabs below it,
* plus up to 3 entries each from `[done]` and `[que]` for context
* `[que]` is all the files that are still queued
note that since up2k has to read each file twice, `[🎈 bup]` can *theoretically* be up to 2x faster in some extreme cases (files bigger than your ram, combined with an internet connection faster than the read-speed of your HDD, or if you're uploading from a cuo2duo)
note that since up2k has to read each file twice, `[🎈] bup` can *theoretically* be up to 2x faster in some extreme cases (files bigger than your ram, combined with an internet connection faster than the read-speed of your HDD, or if you're uploading from a cuo2duo)
if you are resuming a massive upload and want to skip hashing the files which already finished, you can enable `turbo` in the `[⚙️] config` tab, but please read the tooltip on that button
@@ -597,7 +604,7 @@ and there are *two* editors
* get a plaintext file listing by adding `?ls=t` to a URL, or a compact colored one with `?ls=v` (for unix terminals)
* if you are using media hotkeys to switch songs and are getting tired of seeing the OSD popup which Windows doesn't let you disable, consider https://ocv.me/dev/?media-osd-bgone.ps1
* if you are using media hotkeys to switch songs and are getting tired of seeing the OSD popup which Windows doesn't let you disable, consider [./contrib/media-osd-bgone.ps1](contrib/#media-osd-bgoneps1)
* click the bottom-left `π` to open a javascript prompt for debugging
@@ -620,7 +627,9 @@ path/name queries are space-separated, AND'ed together, and words are negated wi
* path: `shibayan -bossa` finds all files where one of the folders contain `shibayan` but filters out any results where `bossa` exists somewhere in the path
* name: `demetori styx` gives you [good stuff](https://www.youtube.com/watch?v=zGh0g14ZJ8I&list=PL3A147BD151EE5218&index=9)
add the argument `-e2ts` to also scan/index tags from music files, which brings us over to:
the `raw` field allows for more complex stuff such as `( tags like *nhato* or tags like *taishi* ) and ( not tags like *nhato* or not tags like *taishi* )` which finds all songs by either nhato or taishi, excluding collabs (terrible example, why would you do that)
for the above example to work, add the commandline argument `-e2ts` to also scan/index tags from music files, which brings us over to:
# server config
@@ -767,27 +776,32 @@ see the beautiful mess of a dictionary in [mtag.py](https://github.com/9001/copy
provide custom parsers to index additional tags, also see [./bin/mtag/README.md](./bin/mtag/README.md)
copyparty can invoke external programs to collect additional metadata for files using `mtp` (either as argument or volume flag), there is a default timeout of 30sec
copyparty can invoke external programs to collect additional metadata for files using `mtp` (either as argument or volume flag), there is a default timeout of 30sec, and only files which contain audio get analyzed by default (see ay/an/ad below)
* `-mtp .bpm=~/bin/audio-bpm.py` will execute `~/bin/audio-bpm.py` with the audio file as argument 1 to provide the `.bpm` tag, if that does not exist in the audio metadata
* `-mtp key=f,t5,~/bin/audio-key.py` uses `~/bin/audio-key.py` to get the `key` tag, replacing any existing metadata tag (`f,`), aborting if it takes longer than 5sec (`t5,`)
* `-v ~/music::r:c,mtp=.bpm=~/bin/audio-bpm.py:c,mtp=key=f,t5,~/bin/audio-key.py` both as a per-volume config wow this is getting ugly
*but wait, there's more!* `-mtp` can be used for non-audio files as well using the `a` flag: `ay` only do audio files, `an` only do non-audio files, or `ad` do all files (d as in dontcare)
*but wait, there's more!* `-mtp` can be used for non-audio files as well using the `a` flag: `ay` only do audio files (default), `an` only do non-audio files, or `ad` do all files (d as in dontcare)
* "audio file" also means videos btw, as long as there is an audio stream
* `-mtp ext=an,~/bin/file-ext.py` runs `~/bin/file-ext.py` to get the `ext` tag only if file is not audio (`an`)
* `-mtp arch,built,ver,orig=an,eexe,edll,~/bin/exe.py` runs `~/bin/exe.py` to get properties about windows-binaries only if file is not audio (`an`) and file extension is exe or dll
you can control how the parser is killed if it times out with option `kt` killing the entire process tree (default), `km` just the main process, or `kn` let it continue running until copyparty is terminated
if something doesn't work, try `--mtag-v` for verbose error messages
## upload events
trigger a script/program on each upload like so:
```
-v /mnt/inc:inc:w:c,mte=+a1:c,mtp=a1=ad,/usr/bin/notify-send
-v /mnt/inc:inc:w:c,mte=+x1:c,mtp=x1=ad,kn,/usr/bin/notify-send
```
so filesystem location `/mnt/inc` shared at `/inc`, write-only for everyone, appending `a1` to the list of tags to index, and using `/usr/bin/notify-send` to "provide" that tag
so filesystem location `/mnt/inc` shared at `/inc`, write-only for everyone, appending `x1` to the list of tags to index (`mte`), and using `/usr/bin/notify-send` to "provide" tag `x1` for any filetype (`ad`) with kill-on-timeout disabled (`kn`)
that'll run the command `notify-send` with the path to the uploaded file as the first and only argument (so on linux it'll show a notification on-screen)
@@ -834,8 +848,17 @@ see the top of [./copyparty/web/browser.css](./copyparty/web/browser.css) where
## complete examples
* read-only music server with bpm and key scanning
`python copyparty-sfx.py -v /mnt/nas/music:/music:r -e2dsa -e2ts -mtp .bpm=f,audio-bpm.py -mtp key=f,audio-key.py`
* read-only music server
`python copyparty-sfx.py -v /mnt/nas/music:/music:r -e2dsa -e2ts --no-robots --force-js --theme 2`
* ...with bpm and key scanning
`-mtp .bpm=f,audio-bpm.py -mtp key=f,audio-key.py`
* ...with a read-write folder for `kevin` whose password is `okgo`
`-a kevin:okgo -v /mnt/nas/inc:/inc:rw,kevin`
* ...with logging to disk
`-lo log/cpp-%Y-%m%d-%H%M%S.txt.xz`
# browser support
@@ -882,6 +905,7 @@ quick summary of more eccentric web-browsers trying to view a directory index:
| **netsurf** (3.10/arch) | is basically ie6 with much better css (javascript has almost no effect) |
| **opera** (11.60/winxp) | OK: thumbnails, image-viewer, zip-selection, rename/cut/paste. NG: up2k, navpane, markdown, audio |
| **ie4** and **netscape** 4.0 | can browse, upload with `?b=u`, auth with `&pw=wark` |
| **ncsa mosaic** 2.7 | does not get a pass, [pic1](https://user-images.githubusercontent.com/241032/174189227-ae816026-cf6f-4be5-a26e-1b3b072c1b2f.png) - [pic2](https://user-images.githubusercontent.com/241032/174189225-5651c059-5152-46e9-ac26-7e98e497901b.png) |
| **SerenityOS** (7e98457) | hits a page fault, works with `?b=u`, file upload not-impl |
@@ -945,12 +969,14 @@ up2k has saved a few uploads from becoming corrupted in-transfer already; caught
a single sha512 would be better, right?
this is due to `crypto.subtle` not providing a streaming api (or the option to seed the sha512 hasher with a starting hash)
this is due to `crypto.subtle` [not yet](https://github.com/w3c/webcrypto/issues/73) providing a streaming api (or the option to seed the sha512 hasher with a starting hash)
as a result, the hashes are much less useful than they could have been (search the server by sha512, provide the sha512 in the response http headers, ...)
hashwasm would solve the streaming issue but reduces hashing speed for sha512 (xxh128 does 6 GiB/s), and it would make old browsers and [iphones](https://bugs.webkit.org/show_bug.cgi?id=228552) unsupported
* blake2 might be a better choice since xxh is non-cryptographic, but that gets ~15 MiB/s on slower androids
# performance
@@ -988,13 +1014,25 @@ when uploading files,
some notes on hardening
on public copyparty instances with anonymous upload enabled:
* option `-s` is a shortcut to set the following options:
* `--no-thumb` disables thumbnails and audio transcoding to stop copyparty from running `FFmpeg`/`Pillow`/`VIPS` on uploaded files, which is a [good idea](https://www.cvedetails.com/vulnerability-list.php?vendor_id=3611) if anonymous upload is enabled
* `--no-mtag-ff` uses `mutagen` to grab music tags instead of `FFmpeg`, which is safer and faster but less accurate
* `--dotpart` hides uploads from directory listings while they're still incoming
* `--no-robots` and `--force-js` makes life harder for crawlers, see [hiding from google](#hiding-from-google)
* users can upload html/css/js which will evaluate for other visitors in a few ways,
* unless `--no-readme` is set: by uploading/modifying a file named `readme.md`
* if `move` access is granted AND none of `--no-logues`, `--no-dot-mv`, `--no-dot-ren` is set: by uploading some .html file and renaming it to `.epilogue.html` (uploading it directly is blocked)
* option `-ss` is a shortcut for the above plus:
* `--no-logues` and `--no-readme` disables support for readme's and prologues / epilogues in directory listings, which otherwise lets people upload arbitrary `<script>` tags
* `--unpost 0`, `--no-del`, `--no-mv` disables all move/delete support
* `--hardlink` creates hardlinks instead of symlinks when deduplicating uploads, which is less maintenance
* however note if you edit one file it will also affect the other copies
* `--vague-403` returns a "404 not found" instead of "403 forbidden" which is a common enterprise meme
* `--nih` removes the server hostname from directory listings
other misc:
* option `-sss` is a shortcut for the above plus:
* `-lo cpp-%Y-%m%d-%H%M%S.txt.xz` enables logging to disk
* `-ls **,*,ln,p,r` does a scan on startup for any dangerous symlinks
other misc notes:
* you can disable directory listings by giving permission `g` instead of `r`, only accepting direct URLs to files
* combine this with volume-flag `c,fk` to generate per-file accesskeys; users which have full read-access will then see URLs with `?k=...` appended to the end, and `g` users must provide that URL including the correct key to avoid a 404
@@ -1200,15 +1238,18 @@ journalctl -aS '48 hour ago' -u copyparty | grep -C10 FILENAME | tee bug.log
## dev env setup
mostly optional; if you need a working env for vscode or similar
you need python 3.9 or newer due to type hints
the rest is mostly optional; if you need a working env for vscode or similar
```sh
python3 -m venv .venv
. .venv/bin/activate
pip install jinja2 # mandatory
pip install jinja2 strip_hints # MANDATORY
pip install mutagen # audio metadata
pip install pyftpdlib # ftp server
pip install Pillow pyheif-pillow-opener pillow-avif-plugin # thumbnails
pip install black==21.12b0 bandit pylint flake8 # vscode tooling
pip install black==21.12b0 click==8.0.2 bandit pylint flake8 isort mypy # vscode tooling
```
@@ -1239,10 +1280,7 @@ also builds the sfx so skip the sfx section above
in the `scripts` folder:
* run `make -C deps-docker` to build all dependencies
* `git tag v1.2.3 && git push origin --tags`
* upload to pypi with `make-pypi-release.(sh|bat)`
* create github release with `make-tgz-release.sh`
* create sfx with `make-sfx.sh`
* run `./rls.sh 1.2.3` which uploads to pypi + creates github release + sfx
# todo
@@ -1269,7 +1307,7 @@ roughly sorted by priority
* up2k partials ui
* feels like there isn't much point
* cache sha512 chunks on client
* too dangerous
* too dangerous -- overtaken by turbo mode
* comment field
* nah
* look into android thumbnail cache file format

View File

@@ -17,7 +17,7 @@ except:
"""
calculates various checksums for uploads,
usage: -mtp crc32,md5,sha1,sha256b=bin/mtag/cksum.py
usage: -mtp crc32,md5,sha1,sha256b=ad,bin/mtag/cksum.py
"""

View File

@@ -43,7 +43,6 @@ PS: this requires e2ts to be functional,
import os
import sys
import time
import filecmp
import subprocess as sp

View File

@@ -16,7 +16,7 @@ goes without saying, but this is HELLA DANGEROUS,
GIVES RCE TO ANYONE WHO HAVE UPLOAD PERMISSIONS
example copyparty config to use this:
--urlform save,get -v.::w:c,e2d,e2t,mte=+a1:c,mtp=a1=ad,bin/mtag/very-bad-idea.py
--urlform save,get -v.::w:c,e2d,e2t,mte=+a1:c,mtp=a1=ad,kn,bin/mtag/very-bad-idea.py
recommended deps:
apt install xdotool libnotify-bin
@@ -63,8 +63,8 @@ set -e
EOF
chmod 755 /usr/local/bin/chromium-browser
# start the server (note: replace `-v.::rw:` with `-v.::r:` to disallow retrieving uploaded stuff)
cd ~/Downloads; python3 copyparty-sfx.py --urlform save,get -v.::rw:c,e2d,e2t,mte=+a1:c,mtp=a1=ad,very-bad-idea.py
# start the server (note: replace `-v.::rw:` with `-v.::w:` to disallow retrieving uploaded stuff)
cd ~/Downloads; python3 copyparty-sfx.py --urlform save,get -v.::rw:c,e2d,e2t,mte=+a1:c,mtp=a1=ad,kn,very-bad-idea.py
"""

65
bin/mtag/vidchk.py Executable file
View File

@@ -0,0 +1,65 @@
#!/usr/bin/env python3
import sys
import subprocess as sp
from copyparty.util import fsenc
from copyparty.mtag import ffprobe
"""
inspects video files for errors and such
usage: -mtp vidchk=t600,ay,bin/mtag/vidchk.py
"""
FAST = True # parse entire file at container level
# FAST = False # fully decode audio and video streams
def main():
fp = sys.argv[1]
md, _ = ffprobe(fp)
try:
w = int(md[".resw"][1])
h = int(md[".resh"][1])
if not w + h:
raise Exception()
except:
return "could not determine resolution"
if min(w, h) < 720:
return "resolution too small"
zs = (
"ffmpeg -y -hide_banner -nostdin -v warning"
+ " -err_detect +crccheck+bitstream+buffer+careful+compliant+aggressive+explode"
" -xerror -i"
)
cmd = zs.encode("ascii").split(b" ") + [fsenc(fp)]
if FAST:
zs = "-c copy -f null -"
else:
zs = "-vcodec rawvideo -acodec pcm_s16le -f null -"
cmd += zs.encode("ascii").split(b" ")
p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
so, se = p.communicate()
rc = p.returncode
if rc:
err = (so + se).decode("utf-8", "replace").split("\n", 1)[0]
return f"ERROR {rc}: {err}"
if se:
err = se.decode("utf-8", "replace").split("\n", 1)[0]
return f"Warning: {err}"
return None
if __name__ == "__main__":
print(main() or "ok")

View File

@@ -3,11 +3,11 @@ from __future__ import print_function, unicode_literals
"""
up2k.py: upload to copyparty
2021-11-28, v0.13, ed <irc.rizon.net>, MIT-Licensed
2022-06-16, v0.15, ed <irc.rizon.net>, MIT-Licensed
https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py
- dependencies: requests
- supports python 2.6, 2.7, and 3.3 through 3.10
- supports python 2.6, 2.7, and 3.3 through 3.11
- almost zero error-handling
- but if something breaks just try again and it'll autoresume
@@ -25,9 +25,10 @@ import hashlib
import argparse
import platform
import threading
import requests
import datetime
import requests
# from copyparty/__init__.py
PY2 = sys.version_info[0] == 2
@@ -76,15 +77,15 @@ class File(object):
self.up_b = 0 # type: int
self.up_c = 0 # type: int
# m = "size({}) lmod({}) top({}) rel({}) abs({}) name({})\n"
# eprint(m.format(self.size, self.lmod, self.top, self.rel, self.abs, self.name))
# t = "size({}) lmod({}) top({}) rel({}) abs({}) name({})\n"
# eprint(t.format(self.size, self.lmod, self.top, self.rel, self.abs, self.name))
class FileSlice(object):
"""file-like object providing a fixed window into a file"""
def __init__(self, file, cid):
# type: (File, str) -> FileSlice
# type: (File, str) -> None
self.car, self.len = file.kchunks[cid]
self.cdr = self.car + self.len
@@ -150,13 +151,11 @@ if not VT100:
def termsize():
import os
env = os.environ
def ioctl_GWINSZ(fd):
try:
import fcntl, termios, struct, os
import fcntl, termios, struct
cr = struct.unpack("hh", fcntl.ioctl(fd, termios.TIOCGWINSZ, "1234"))
except:
@@ -217,8 +216,8 @@ class CTermsize(object):
eprint("\033[s\033[r\033[u")
else:
self.g = 1 + self.h - margin
m = "{0}\033[{1}A".format("\n" * margin, margin)
eprint("{0}\033[s\033[1;{1}r\033[u".format(m, self.g - 1))
t = "{0}\033[{1}A".format("\n" * margin, margin)
eprint("{0}\033[s\033[1;{1}r\033[u".format(t, self.g - 1))
ss = CTermsize()
@@ -360,7 +359,7 @@ def get_hashlist(file, pcb):
def handshake(req_ses, url, file, pw, search):
# type: (requests.Session, str, File, any, bool) -> List[str]
# type: (requests.Session, str, File, any, bool) -> list[str]
"""
performs a handshake with the server; reply is:
if search, a list of search results
@@ -411,7 +410,7 @@ def handshake(req_ses, url, file, pw, search):
file.name = r["name"]
file.wark = r["wark"]
return r["hash"]
return r["hash"], r["sprs"]
def upload(req_ses, file, cid, pw):
@@ -491,11 +490,35 @@ class Ctl(object):
self.filegen = walkdirs([], ar.files)
if ar.safe:
self.safe()
self._safe()
else:
self.fancy()
self.hash_f = 0
self.hash_c = 0
self.hash_b = 0
self.up_f = 0
self.up_c = 0
self.up_b = 0
self.up_br = 0
self.hasher_busy = 1
self.handshaker_busy = 0
self.uploader_busy = 0
self.serialized = False
def safe(self):
self.t0 = time.time()
self.t0_up = None
self.spd = None
self.mutex = threading.Lock()
self.q_handshake = Queue() # type: Queue[File]
self.q_recheck = Queue() # type: Queue[File] # partial upload exists [...]
self.q_upload = Queue() # type: Queue[tuple[File, str]]
self.st_hash = [None, "(idle, starting...)"] # type: tuple[File, int]
self.st_up = [None, "(idle, starting...)"] # type: tuple[File, int]
self._fancy()
def _safe(self):
"""minimal basic slow boring fallback codepath"""
search = self.ar.s
for nf, (top, rel, inf) in enumerate(self.filegen):
@@ -508,7 +531,7 @@ class Ctl(object):
burl = self.ar.url[:12] + self.ar.url[8:].split("/")[0] + "/"
while True:
print(" hs...")
hs = handshake(req_ses, self.ar.url, file, self.ar.a, search)
hs, _ = handshake(req_ses, self.ar.url, file, self.ar.a, search)
if search:
if hs:
for hit in hs:
@@ -529,29 +552,7 @@ class Ctl(object):
print(" ok!")
def fancy(self):
self.hash_f = 0
self.hash_c = 0
self.hash_b = 0
self.up_f = 0
self.up_c = 0
self.up_b = 0
self.up_br = 0
self.hasher_busy = 1
self.handshaker_busy = 0
self.uploader_busy = 0
self.t0 = time.time()
self.t0_up = None
self.spd = None
self.mutex = threading.Lock()
self.q_handshake = Queue() # type: Queue[File]
self.q_recheck = Queue() # type: Queue[File] # partial upload exists [...]
self.q_upload = Queue() # type: Queue[tuple[File, str]]
self.st_hash = [None, "(idle, starting...)"] # type: tuple[File, int]
self.st_up = [None, "(idle, starting...)"] # type: tuple[File, int]
def _fancy(self):
if VT100:
atexit.register(self.cleanup_vt100)
ss.scroll_region(3)
@@ -597,8 +598,8 @@ class Ctl(object):
if "/" in name:
name = "\033[36m{0}\033[0m/{1}".format(*name.rsplit("/", 1))
m = "{0:6.1f}% {1} {2}\033[K"
txt += m.format(p, self.nfiles - f, name)
t = "{0:6.1f}% {1} {2}\033[K"
txt += t.format(p, self.nfiles - f, name)
txt += "\033[{0}H ".format(ss.g + 2)
else:
@@ -614,11 +615,12 @@ class Ctl(object):
spd = humansize(spd)
eta = str(datetime.timedelta(seconds=int(eta)))
left = humansize(self.nbytes - self.up_b)
sleft = humansize(self.nbytes - self.up_b)
nleft = self.nfiles - self.up_f
tail = "\033[K\033[u" if VT100 else "\r"
m = "eta: {0} @ {1}/s, {2} left".format(eta, spd, left)
eprint(txt + "\033]0;{0}\033\\\r{1}{2}".format(m, m, tail))
t = "{0} eta @ {1}/s, {2}, {3}# left".format(eta, spd, sleft, nleft)
eprint(txt + "\033]0;{0}\033\\\r{0}{1}".format(t, tail))
def cleanup_vt100(self):
ss.scroll_region(None)
@@ -709,7 +711,7 @@ class Ctl(object):
upath = file.abs.decode("utf-8", "replace")
try:
hs = handshake(req_ses, self.ar.url, file, self.ar.a, search)
hs, sprs = handshake(req_ses, self.ar.url, file, self.ar.a, search)
except Exception as ex:
if q == self.q_handshake and "<pre>partial upload exists" in str(ex):
self.q_recheck.put(file)
@@ -720,8 +722,8 @@ class Ctl(object):
if search:
if hs:
for hit in hs:
m = "found: {0}\n {1}{2}\n"
print(m.format(upath, burl, hit["rp"]), end="")
t = "found: {0}\n {1}{2}\n"
print(t.format(upath, burl, hit["rp"]), end="")
else:
print("NOT found: {0}\n".format(upath), end="")
@@ -734,6 +736,12 @@ class Ctl(object):
continue
with self.mutex:
if not sprs and not self.serialized:
t = "server filesystem does not support sparse files; serializing uploads\n"
eprint(t)
self.serialized = True
for _ in range(self.ar.j - 1):
self.q_upload.put(None)
if not hs:
# all chunks done
self.up_f += 1

View File

@@ -22,6 +22,9 @@ however if your copyparty is behind a reverse-proxy, you may want to use [`share
* `URL`: full URL to the root folder (with trailing slash) followed by `$regex:1|1$`
* `pw`: password (remove `Parameters` if anon-write)
### [`media-osd-bgone.ps1`](media-osd-bgone.ps1)
* disables the [windows OSD popup](https://user-images.githubusercontent.com/241032/122821375-0e08df80-d2dd-11eb-9fd9-184e8aacf1d0.png) (the thing on the left) which appears every time you hit media hotkeys to adjust volume or change song while playing music with the copyparty web-ui, or most other audio players really
### [`explorer-nothumbs-nofoldertypes.reg`](explorer-nothumbs-nofoldertypes.reg)
* disables thumbnails and folder-type detection in windows explorer
* makes it way faster (especially for slow/networked locations (such as copyparty-fuse))

104
contrib/media-osd-bgone.ps1 Normal file
View File

@@ -0,0 +1,104 @@
# media-osd-bgone.ps1: disable media-control OSD on win10do
# v1.1, 2021-06-25, ed <irc.rizon.net>, MIT-licensed
# https://github.com/9001/copyparty/blob/hovudstraum/contrib/media-osd-bgone.ps1
#
# locates the first window that looks like the media OSD and minimizes it;
# doing this once after each reboot should do the trick
# (adjust the width/height filter if it doesn't work)
#
# ---------------------------------------------------------------------
#
# tip: save the following as "media-osd-bgone.bat" next to this script:
# start cmd /c "powershell -command ""set-executionpolicy -scope process bypass; .\media-osd-bgone.ps1"" & ping -n 2 127.1 >nul"
#
# then create a shortcut to that bat-file and move the shortcut here:
# %appdata%\Microsoft\Windows\Start Menu\Programs\Startup
#
# and now this will autorun on bootup
Add-Type -TypeDefinition @"
using System;
using System.IO;
using System.Threading;
using System.Diagnostics;
using System.Runtime.InteropServices;
using System.Windows.Forms;
namespace A {
public class B : Control {
[DllImport("user32.dll")]
static extern void keybd_event(byte bVk, byte bScan, uint dwFlags, int dwExtraInfo);
[DllImport("user32.dll", SetLastError = true)]
static extern IntPtr FindWindowEx(IntPtr hwndParent, IntPtr hwndChildAfter, string lpszClass, string lpszWindow);
[DllImport("user32.dll", SetLastError=true)]
static extern bool GetWindowRect(IntPtr hwnd, out RECT lpRect);
[DllImport("user32.dll")]
static extern bool ShowWindow(IntPtr hWnd, int nCmdShow);
[StructLayout(LayoutKind.Sequential)]
public struct RECT {
public int x;
public int y;
public int x2;
public int y2;
}
bool fa() {
RECT r;
IntPtr it = IntPtr.Zero;
while ((it = FindWindowEx(IntPtr.Zero, it, "NativeHWNDHost", "")) != IntPtr.Zero) {
if (FindWindowEx(it, IntPtr.Zero, "DirectUIHWND", "") == IntPtr.Zero)
continue;
if (!GetWindowRect(it, out r))
continue;
int w = r.x2 - r.x + 1;
int h = r.y2 - r.y + 1;
Console.WriteLine("[*] hwnd {0:x} @ {1}x{2} sz {3}x{4}", it, r.x, r.y, w, h);
if (h != 141)
continue;
ShowWindow(it, 6);
Console.WriteLine("[+] poof");
return true;
}
return false;
}
void fb() {
keybd_event((byte)Keys.VolumeMute, 0, 0, 0);
keybd_event((byte)Keys.VolumeMute, 0, 2, 0);
Thread.Sleep(500);
keybd_event((byte)Keys.VolumeMute, 0, 0, 0);
keybd_event((byte)Keys.VolumeMute, 0, 2, 0);
while (true) {
if (fa()) {
break;
}
Console.WriteLine("[!] not found");
Thread.Sleep(1000);
}
this.Invoke((MethodInvoker)delegate {
Application.Exit();
});
}
public void Run() {
Console.WriteLine("[+] hi");
new Thread(new ThreadStart(fb)).Start();
Application.Run();
Console.WriteLine("[+] bye");
}
}
}
"@ -ReferencedAssemblies System.Windows.Forms
(New-Object -TypeName A.B).Run()

View File

@@ -11,6 +11,13 @@ save one of these as `.epilogue.html` inside a folder to customize it:
## example browser-js
point `--js-browser` to one of these by URL:
* [`minimal-up2k.js`](minimal-up2k.js) is similar to the above `minimal-up2k.html` except it applies globally to all write-only folders
## example browser-css
point `--css-browser` to one of these by URL:

View File

@@ -7,7 +7,7 @@
/* make the up2k ui REALLY minimal by hiding a bunch of stuff: */
#ops, #tree, #path, #wrap>h2:last-child, /* main tabs and navigators (tree/breadcrumbs) */
#ops, #tree, #path, #epi+h2, /* main tabs and navigators (tree/breadcrumbs) */
#u2conf tr:first-child>td[rowspan]:not(#u2btn_cw), /* most of the config options */

View File

@@ -0,0 +1,59 @@
/*
makes the up2k ui REALLY minimal by hiding a bunch of stuff
almost the same as minimal-up2k.html except this one...:
-- applies to every write-only folder when used with --js-browser
-- only applies if javascript is enabled
-- doesn't hide the total upload ETA display
-- looks slightly better
*/
var u2min = `
<style>
#ops, #path, #tree, #files, #epi+div+h2,
#u2conf td.c+.c, #u2cards, #u2foot, #srch_dz, #srch_zd {
display: none !important;
}
#u2conf {margin:5em auto 0 auto !important}
#u2conf.ww {width:70em}
#u2conf.w {width:50em}
#u2conf.w .c,
#u2conf.w #u2btn_cw {text-align:left}
#u2conf.w #u2btn_cw {width:70%}
#u2etaw {margin:3em auto}
#u2etaw.w {
text-align: center;
margin: -3.5em auto 5em auto;
}
#u2etaw.w #u2etas {margin-right:-37em}
#u2etaw.w #u2etas.o {margin-top:-2.2em}
#u2etaw.ww {margin:-1em auto}
#u2etaw.ww #u2etas {padding-left:4em}
#u2etas {
background: none !important;
border: none !important;
}
#wrap {margin-left:2em !important}
.logue {
border: none !important;
margin: 2em auto !important;
}
.logue:before {content:'' !important}
</style>
<a href="#" onclick="this.parentNode.innerHTML='';">show advanced options</a>
`;
if (!has(perms, 'read')) {
var e2 = mknod('div');
e2.innerHTML = u2min;
ebi('wrap').insertBefore(e2, QS('#epi+h2'));
}

View File

@@ -4,7 +4,7 @@
# installation:
# cp -pv copyparty.service /etc/systemd/system
# restorecon -vr /etc/systemd/system/copyparty.service
# firewall-cmd --permanent --add-port={80,443,3923}/tcp
# firewall-cmd --permanent --add-port={80,443,3923}/tcp # --zone=libvirt
# firewall-cmd --reload
# systemctl daemon-reload && systemctl enable --now copyparty
#

View File

@@ -1,32 +1,41 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import platform
import time
import sys
import os
import platform
import sys
import time
try:
from collections.abc import Callable
from typing import TYPE_CHECKING, Any
except:
TYPE_CHECKING = False
PY2 = sys.version_info[0] == 2
if PY2:
sys.dont_write_bytecode = True
unicode = unicode
unicode = unicode # noqa: F821 # pylint: disable=undefined-variable,self-assigning-variable
else:
unicode = str
WINDOWS = False
if platform.system() == "Windows":
WINDOWS = [int(x) for x in platform.version().split(".")]
WINDOWS: Any = (
[int(x) for x in platform.version().split(".")]
if platform.system() == "Windows"
else False
)
VT100 = not WINDOWS or WINDOWS >= [10, 0, 14393]
# introduced in anniversary update
ANYWIN = WINDOWS or sys.platform in ["msys"]
ANYWIN = WINDOWS or sys.platform in ["msys", "cygwin"]
MACOS = platform.system() == "Darwin"
def get_unixdir():
paths = [
def get_unixdir() -> str:
paths: list[tuple[Callable[..., str], str]] = [
(os.environ.get, "XDG_CONFIG_HOME"),
(os.path.expanduser, "~/.config"),
(os.environ.get, "TMPDIR"),
@@ -43,7 +52,7 @@ def get_unixdir():
continue
p = os.path.normpath(p)
chk(p)
chk(p) # type: ignore
p = os.path.join(p, "copyparty")
if not os.path.isdir(p):
os.mkdir(p)
@@ -56,7 +65,7 @@ def get_unixdir():
class EnvParams(object):
def __init__(self):
def __init__(self) -> None:
self.t0 = time.time()
self.mod = os.path.dirname(os.path.realpath(__file__))
if self.mod.endswith("__init__"):

View File

@@ -8,35 +8,48 @@ __copyright__ = 2019
__license__ = "MIT"
__url__ = "https://github.com/9001/copyparty/"
import re
import os
import sys
import time
import shutil
import argparse
import filecmp
import locale
import argparse
import os
import re
import shutil
import sys
import threading
import time
import traceback
from textwrap import dedent
from .__init__ import E, WINDOWS, ANYWIN, VT100, PY2, unicode
from .__version__ import S_VERSION, S_BUILD_DT, CODENAME
from .svchub import SvcHub
from .util import py_desc, align_tab, IMPLICATIONS, ansi_re, min_ex
from .__init__ import ANYWIN, PY2, VT100, WINDOWS, E, unicode
from .__version__ import CODENAME, S_BUILD_DT, S_VERSION
from .authsrv import re_vol
from .svchub import SvcHub
from .util import IMPLICATIONS, align_tab, ansi_re, min_ex, py_desc, termsize, wrap
HAVE_SSL = True
try:
from types import FrameType
from typing import Any, Optional
except:
pass
try:
HAVE_SSL = True
import ssl
except:
HAVE_SSL = False
printed = ""
printed: list[str] = []
class RiceFormatter(argparse.HelpFormatter):
def _get_help_string(self, action):
def __init__(self, *args: Any, **kwargs: Any) -> None:
if PY2:
kwargs["width"] = termsize()[0]
super(RiceFormatter, self).__init__(*args, **kwargs)
def _get_help_string(self, action: argparse.Action) -> str:
"""
same as ArgumentDefaultsHelpFormatter(HelpFormatter)
except the help += [...] line now has colors
@@ -45,41 +58,68 @@ class RiceFormatter(argparse.HelpFormatter):
if not VT100:
fmt = " (default: %(default)s)"
help = action.help
if "%(default)" not in action.help:
ret = unicode(action.help)
if "%(default)" not in ret:
if action.default is not argparse.SUPPRESS:
defaulting_nargs = [argparse.OPTIONAL, argparse.ZERO_OR_MORE]
if action.option_strings or action.nargs in defaulting_nargs:
help += fmt
return help
ret += fmt
return ret
def _fill_text(self, text, width, indent):
def _fill_text(self, text: str, width: int, indent: str) -> str:
"""same as RawDescriptionHelpFormatter(HelpFormatter)"""
return "".join(indent + line + "\n" for line in text.splitlines())
def __add_whitespace(self, idx: int, iWSpace: int, text: str) -> str:
return (" " * iWSpace) + text if idx else text
def _split_lines(self, text: str, width: int) -> list[str]:
# https://stackoverflow.com/a/35925919
textRows = text.splitlines()
ptn = re.compile(r"\s*[0-9\-]{0,}\.?\s*")
for idx, line in enumerate(textRows):
search = ptn.search(line)
if not line.strip():
textRows[idx] = " "
elif search:
lWSpace = search.end()
lines = [
self.__add_whitespace(i, lWSpace, x)
for i, x in enumerate(wrap(line, width, width - 1))
]
textRows[idx] = lines
return [item for sublist in textRows for item in sublist]
class Dodge11874(RiceFormatter):
def __init__(self, *args, **kwargs):
def __init__(self, *args: Any, **kwargs: Any) -> None:
kwargs["width"] = 9003
super(Dodge11874, self).__init__(*args, **kwargs)
def lprint(*a, **ka):
global printed
class BasicDodge11874(
argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter
):
def __init__(self, *args: Any, **kwargs: Any) -> None:
kwargs["width"] = 9003
super(BasicDodge11874, self).__init__(*args, **kwargs)
txt = " ".join(unicode(x) for x in a) + ka.get("end", "\n")
printed += txt
def lprint(*a: Any, **ka: Any) -> None:
txt: str = " ".join(unicode(x) for x in a) + ka.get("end", "\n")
printed.append(txt)
if not VT100:
txt = ansi_re.sub("", txt)
print(txt, **ka)
def warn(msg):
def warn(msg: str) -> None:
lprint("\033[1mwarning:\033[0;33m {}\033[0m\n".format(msg))
def ensure_locale():
def ensure_locale() -> None:
for x in [
"en_US.UTF-8",
"English_United States.UTF8",
@@ -93,7 +133,7 @@ def ensure_locale():
continue
def ensure_cert():
def ensure_cert() -> None:
"""
the default cert (and the entire TLS support) is only here to enable the
crypto.subtle javascript API, which is necessary due to the webkit guys
@@ -119,8 +159,8 @@ def ensure_cert():
# printf 'NO\n.\n.\n.\n.\ncopyparty-insecure\n.\n' | faketime '2000-01-01 00:00:00' openssl req -x509 -sha256 -newkey rsa:2048 -keyout insecure.pem -out insecure.pem -days $((($(printf %d 0x7fffffff)-$(date +%s --date=2000-01-01T00:00:00Z))/(60*60*24))) -nodes && ls -al insecure.pem && openssl x509 -in insecure.pem -text -noout
def configure_ssl_ver(al):
def terse_sslver(txt):
def configure_ssl_ver(al: argparse.Namespace) -> None:
def terse_sslver(txt: str) -> str:
txt = txt.lower()
for c in ["_", "v", "."]:
txt = txt.replace(c, "")
@@ -135,8 +175,8 @@ def configure_ssl_ver(al):
flags = [k for k in ssl.__dict__ if ptn.match(k)]
# SSLv2 SSLv3 TLSv1 TLSv1_1 TLSv1_2 TLSv1_3
if "help" in sslver:
avail = [terse_sslver(x[6:]) for x in flags]
avail = " ".join(sorted(avail) + ["all"])
avail1 = [terse_sslver(x[6:]) for x in flags]
avail = " ".join(sorted(avail1) + ["all"])
lprint("\navailable ssl/tls versions:\n " + avail)
sys.exit(0)
@@ -157,12 +197,12 @@ def configure_ssl_ver(al):
for k in ["ssl_flags_en", "ssl_flags_de"]:
num = getattr(al, k)
lprint("{}: {:8x} ({})".format(k, num, num))
lprint("{0}: {1:8x} ({1})".format(k, num))
# think i need that beer now
def configure_ssl_ciphers(al):
def configure_ssl_ciphers(al: argparse.Namespace) -> None:
ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
if al.ssl_ver:
ctx.options &= ~al.ssl_flags_en
@@ -186,8 +226,8 @@ def configure_ssl_ciphers(al):
sys.exit(0)
def args_from_cfg(cfg_path):
ret = []
def args_from_cfg(cfg_path: str) -> list[str]:
ret: list[str] = []
skip = False
with open(cfg_path, "rb") as f:
for ln in [x.decode("utf-8").strip() for x in f]:
@@ -212,29 +252,30 @@ def args_from_cfg(cfg_path):
return ret
def sighandler(sig=None, frame=None):
def sighandler(sig: Optional[int] = None, frame: Optional[FrameType] = None) -> None:
msg = [""] * 5
for th in threading.enumerate():
stk = sys._current_frames()[th.ident] # type: ignore
msg.append(str(th))
msg.extend(traceback.format_stack(sys._current_frames()[th.ident]))
msg.extend(traceback.format_stack(stk))
msg.append("\n")
print("\n".join(msg))
def disable_quickedit():
import ctypes
def disable_quickedit() -> None:
import atexit
import ctypes
from ctypes import wintypes
def ecb(ok, fun, args):
def ecb(ok: bool, fun: Any, args: list[Any]) -> list[Any]:
if not ok:
err = ctypes.get_last_error()
err: int = ctypes.get_last_error() # type: ignore
if err:
raise ctypes.WinError(err)
raise ctypes.WinError(err) # type: ignore
return args
k32 = ctypes.WinDLL("kernel32", use_last_error=True)
k32 = ctypes.WinDLL("kernel32", use_last_error=True) # type: ignore
if PY2:
wintypes.LPDWORD = ctypes.POINTER(wintypes.DWORD)
@@ -244,14 +285,14 @@ def disable_quickedit():
k32.GetConsoleMode.argtypes = (wintypes.HANDLE, wintypes.LPDWORD)
k32.SetConsoleMode.argtypes = (wintypes.HANDLE, wintypes.DWORD)
def cmode(out, mode=None):
def cmode(out: bool, mode: Optional[int] = None) -> int:
h = k32.GetStdHandle(-11 if out else -10)
if mode:
return k32.SetConsoleMode(h, mode)
return k32.SetConsoleMode(h, mode) # type: ignore
mode = wintypes.DWORD()
k32.GetConsoleMode(h, ctypes.byref(mode))
return mode.value
cmode = wintypes.DWORD()
k32.GetConsoleMode(h, ctypes.byref(cmode))
return cmode.value
# disable quickedit
mode = orig_in = cmode(False)
@@ -270,7 +311,7 @@ def disable_quickedit():
cmode(True, mode | 4)
def run_argparse(argv, formatter):
def run_argparse(argv: list[str], formatter: Any, retry: bool) -> argparse.Namespace:
ap = argparse.ArgumentParser(
formatter_class=formatter,
prog="copyparty",
@@ -294,7 +335,7 @@ def run_argparse(argv, formatter):
-v takes src:dst:\033[33mperm\033[0m1:\033[33mperm\033[0m2:\033[33mperm\033[0mN:\033[32mvolflag\033[0m1:\033[32mvolflag\033[0m2:\033[32mvolflag\033[0mN:...
* "\033[33mperm\033[0m" is "permissions,username1,username2,..."
* "\033[32mvolflag\033[0m" is config flags to set on this volume
list of permissions:
"r" (read): list folder contents, download files
"w" (write): upload files; need "r" to see the uploads
@@ -313,7 +354,7 @@ def run_argparse(argv, formatter):
* w (write-only) for everyone
* rw (read+write) for ed
* reject duplicate files \033[0m
if no accounts or volumes are configured,
current folder will be read/write for everyone
@@ -336,18 +377,18 @@ def run_argparse(argv, formatter):
\033[36mnosub\033[35m forces all uploads into the top folder of the vfs
\033[36mgz\033[35m allows server-side gzip of uploads with ?gz (also c,xz)
\033[36mpk\033[35m forces server-side compression, optional arg: xz,9
\033[0mupload rules:
\033[36mmaxn=250,600\033[35m max 250 uploads over 15min
\033[36mmaxb=1g,300\033[35m max 1 GiB over 5min (suffixes: b, k, m, g)
\033[36msz=1k-3m\033[35m allow filesizes between 1 KiB and 3MiB
\033[0mupload rotation:
(moves all uploads into the specified folder structure)
\033[36mrotn=100,3\033[35m 3 levels of subfolders with 100 entries in each
\033[36mrotf=%Y-%m/%d-%H\033[35m date-formatted organizing
\033[36mlifetime=3600\033[35m uploads are deleted after 1 hour
\033[0mdatabase, general:
\033[36me2d\033[35m sets -e2d (all -e2* args can be set using ce2* volflags)
\033[36md2ts\033[35m disables metadata collection for existing files
@@ -358,24 +399,24 @@ def run_argparse(argv, formatter):
\033[36mnoidx=\\.iso$\033[35m fully ignores the contents at paths matching *.iso
\033[36mhist=/tmp/cdb\033[35m puts thumbnails and indexes at that location
\033[36mscan=60\033[35m scan for new files every 60sec, same as --re-maxage
\033[0mdatabase, audio tags:
"mte", "mth", "mtp", "mtm" all work the same as -mte, -mth, ...
\033[36mmtp=.bpm=f,audio-bpm.py\033[35m uses the "audio-bpm.py" program to
generate ".bpm" tags from uploads (f = overwrite tags)
\033[36mmtp=ahash,vhash=media-hash.py\033[35m collects two tags at once
\033[0mthumbnails:
\033[36mdthumb\033[35m disables all thumbnails
\033[36mdvthumb\033[35m disables video thumbnails
\033[36mdathumb\033[35m disables audio thumbnails (spectrograms)
\033[36mdithumb\033[35m disables image thumbnails
\033[0mclient and ux:
\033[36mhtml_head=TXT\033[35m includes TXT in the <head>
\033[36mrobots\033[35m allows indexing by search engines (default)
\033[36mnorobots\033[35m kindly asks search engines to leave
\033[0mothers:
\033[36mfk=8\033[35m generates per-file accesskeys,
which will then be required at the "g" permission
@@ -433,14 +474,14 @@ def run_argparse(argv, formatter):
ap2 = ap.add_argument_group('upload options')
ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads, hiding them from clients unless -ed")
ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="windows-only: minimum size of incoming uploads through up2k before they are made into sparse files")
ap2.add_argument("--unpost", metavar="SEC", type=int, default=3600*12, help="grace period where uploads can be deleted by the uploader, even without delete permissions; 0=disabled")
ap2.add_argument("--reg-cap", metavar="N", type=int, default=38400, help="max number of uploads to keep in memory when running without -e2d; roughly 1 MiB RAM per 600")
ap2.add_argument("--no-fpool", action="store_true", help="disable file-handle pooling -- instead, repeatedly close and reopen files during upload")
ap2.add_argument("--use-fpool", action="store_true", help="force file-handle pooling, even if copyparty thinks you're better off without -- probably useful on nfs and cow filesystems (zfs, btrfs)")
ap2.add_argument("--hardlink", action="store_true", help="prefer hardlinks instead of symlinks when possible (within same filesystem)")
ap2.add_argument("--never-symlink", action="store_true", help="do not fallback to symlinks when a hardlink cannot be made")
ap2.add_argument("--no-dedup", action="store_true", help="disable symlink/hardlink creation; copy file contents instead")
ap2.add_argument("--reg-cap", metavar="N", type=int, default=9000, help="max number of uploads to keep in memory when running without -e2d")
ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="windows-only: minimum size of incoming uploads through up2k before they are made into sparse files")
ap2.add_argument("--turbo", metavar="LVL", type=int, default=0, help="configure turbo-mode in up2k client; 0 = off and warn if enabled, 1 = off, 2 = on, 3 = on and disable datecheck")
ap2 = ap.add_argument_group('network options')
@@ -477,6 +518,9 @@ def run_argparse(argv, formatter):
ap2.add_argument("--no-lifetime", action="store_true", help="disable automatic deletion of uploads after a certain time (lifetime volflag)")
ap2 = ap.add_argument_group('safety options')
ap2.add_argument("-s", action="count", default=0, help="increase safety: Disable thumbnails / potentially dangerous software (ffmpeg/pillow/vips), hide partial uploads, avoid crawlers.\n └─Alias of\033[32m --dotpart --no-thumb --no-mtag-ff --no-robots --force-js")
ap2.add_argument("-ss", action="store_true", help="further increase safety: Prevent js-injection, accidental move/delete, broken symlinks, 404 on 403.\n └─Alias of\033[32m -s --no-dot-mv --no-dot-ren --unpost=0 --no-del --no-mv --hardlink --vague-403 -nih")
ap2.add_argument("-sss", action="store_true", help="further increase safety: Enable logging to disk, scan for dangerous symlinks.\n └─Alias of\033[32m -ss -lo=cpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz --ls=**,*,ln,p,r")
ap2.add_argument("--ls", metavar="U[,V[,F]]", type=u, help="do a sanity/safety check of all volumes on startup; arguments USER,VOL,FLAGS; example [**,*,ln,p,r]")
ap2.add_argument("--salt", type=u, default="hunter2", help="up2k file-hash salt; used to generate unpredictable internal identifiers for uploads -- doesn't really matter")
ap2.add_argument("--fk-salt", metavar="SALT", type=u, default=fk_salt, help="per-file accesskey salt; used to generate unpredictable URLs for hidden files -- this one DOES matter")
@@ -547,7 +591,7 @@ def run_argparse(argv, formatter):
ap2.add_argument("--re-maxage", metavar="SEC", type=int, default=0, help="disk rescan volume interval, 0=off, can be set per-volume with the 'scan' volflag")
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline -- terminate searches running for more than SEC seconds")
ap2.add_argument("--srch-hits", metavar="N", type=int, default=7999, help="max search results to allow clients to fetch; 125 results will be shown initially")
ap2 = ap.add_argument_group('metadata db options')
ap2.add_argument("-e2t", action="store_true", help="enable metadata indexing; makes it possible to search for artist/title/codec/resolution/...")
ap2.add_argument("-e2ts", action="store_true", help="scan existing files on startup; sets -e2t")
@@ -555,6 +599,7 @@ def run_argparse(argv, formatter):
ap2.add_argument("--no-mutagen", action="store_true", help="use FFprobe for tags instead; will catch more tags")
ap2.add_argument("--no-mtag-ff", action="store_true", help="never use FFprobe as tag reader; is probably safer")
ap2.add_argument("--mtag-mt", metavar="CORES", type=int, default=cores, help="num cpu cores to use for tag scanning")
ap2.add_argument("--mtag-v", action="store_true", help="verbose tag scanning; print errors from mtp subprocesses and such")
ap2.add_argument("-mtm", metavar="M=t,t,t", type=u, action="append", help="add/replace metadata mapping")
ap2.add_argument("-mte", metavar="M,M,M", type=u, help="tags to index/display (comma-sep.)",
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,vc,ac,res,.fps,ahash,vhash")
@@ -565,7 +610,8 @@ def run_argparse(argv, formatter):
ap2 = ap.add_argument_group('ui options')
ap2.add_argument("--lang", metavar="LANG", type=u, default="eng", help="language")
ap2.add_argument("--theme", metavar="NUM", type=int, default=0, help="default theme to use")
ap2.add_argument("--themes", metavar="NUM", type=int, default=6, help="number of themes installed")
ap2.add_argument("--themes", metavar="NUM", type=int, default=8, help="number of themes installed")
ap2.add_argument("--favico", metavar="TXT", type=u, default="c 000 none" if retry else "🎉 000 none", help="favicon text [ foreground [ background ] ], set blank to disable")
ap2.add_argument("--js-browser", metavar="L", type=u, help="URL to additional JS to include")
ap2.add_argument("--css-browser", metavar="L", type=u, help="URL to additional CSS to include")
ap2.add_argument("--html-head", metavar="TXT", type=u, default="", help="text to append to the <head> of all HTML pages")
@@ -597,7 +643,7 @@ def run_argparse(argv, formatter):
return ret
def main(argv=None):
def main(argv: Optional[list[str]] = None) -> None:
time.strptime("19970815", "%Y%m%d") # python#7980
if WINDOWS:
os.system("rem") # enables colors
@@ -619,7 +665,7 @@ def main(argv=None):
supp = args_from_cfg(v)
argv.extend(supp)
deprecated = []
deprecated: list[tuple[str, str]] = []
for dk, nk in deprecated:
try:
idx = argv.index(dk)
@@ -637,21 +683,28 @@ def main(argv=None):
except:
pass
try:
al = run_argparse(argv, RiceFormatter)
except AssertionError:
al = run_argparse(argv, Dodge11874)
retry = False
for fmtr in [RiceFormatter, RiceFormatter, Dodge11874, BasicDodge11874]:
try:
al = run_argparse(argv, fmtr, retry)
except SystemExit:
raise
except:
retry = True
lprint("\n[ {} ]:\n{}\n".format(fmtr, min_ex()))
assert al
if WINDOWS and not al.keep_qem:
try:
disable_quickedit()
except:
print("\nfailed to disable quick-edit-mode:\n" + min_ex() + "\n")
lprint("\nfailed to disable quick-edit-mode:\n" + min_ex() + "\n")
if not VT100:
al.wintitle = ""
nstrs = []
nstrs: list[str] = []
anymod = False
for ostr in al.v or []:
m = re_vol.match(ostr)
@@ -722,7 +775,7 @@ def main(argv=None):
# signal.signal(signal.SIGINT, sighandler)
SvcHub(al, argv, printed).run()
SvcHub(al, argv, "".join(printed)).run()
if __name__ == "__main__":

View File

@@ -1,8 +1,8 @@
# coding: utf-8
VERSION = (1, 3, 0)
VERSION = (1, 3, 5)
CODENAME = "god dag"
BUILD_DT = (2022, 5, 22)
BUILD_DT = (2022, 7, 6)
S_VERSION = ".".join(map(str, VERSION))
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)

File diff suppressed because it is too large Load Diff

View File

@@ -2,23 +2,30 @@
from __future__ import print_function, unicode_literals
import os
from ..util import fsenc, fsdec, SYMTIME
from ..util import SYMTIME, fsdec, fsenc
from . import path
try:
from typing import Optional
except:
pass
_ = (path,)
# grep -hRiE '(^|[^a-zA-Z_\.-])os\.' . | gsed -r 's/ /\n/g;s/\(/(\n/g' | grep -hRiE '(^|[^a-zA-Z_\.-])os\.' | sort | uniq -c
# printf 'os\.(%s)' "$(grep ^def bos/__init__.py | gsed -r 's/^def //;s/\(.*//' | tr '\n' '|' | gsed -r 's/.$//')"
def chmod(p, mode):
def chmod(p: str, mode: int) -> None:
return os.chmod(fsenc(p), mode)
def listdir(p="."):
def listdir(p: str = ".") -> list[str]:
return [fsdec(x) for x in os.listdir(fsenc(p))]
def makedirs(name, mode=0o755, exist_ok=True):
def makedirs(name: str, mode: int = 0o755, exist_ok: bool = True) -> None:
bname = fsenc(name)
try:
os.makedirs(bname, mode)
@@ -27,31 +34,33 @@ def makedirs(name, mode=0o755, exist_ok=True):
raise
def mkdir(p, mode=0o755):
def mkdir(p: str, mode: int = 0o755) -> None:
return os.mkdir(fsenc(p), mode)
def rename(src, dst):
def rename(src: str, dst: str) -> None:
return os.rename(fsenc(src), fsenc(dst))
def replace(src, dst):
def replace(src: str, dst: str) -> None:
return os.replace(fsenc(src), fsenc(dst))
def rmdir(p):
def rmdir(p: str) -> None:
return os.rmdir(fsenc(p))
def stat(p):
def stat(p: str) -> os.stat_result:
return os.stat(fsenc(p))
def unlink(p):
def unlink(p: str) -> None:
return os.unlink(fsenc(p))
def utime(p, times=None, follow_symlinks=True):
def utime(
p: str, times: Optional[tuple[float, float]] = None, follow_symlinks: bool = True
) -> None:
if SYMTIME:
return os.utime(fsenc(p), times, follow_symlinks=follow_symlinks)
else:
@@ -60,7 +69,7 @@ def utime(p, times=None, follow_symlinks=True):
if hasattr(os, "lstat"):
def lstat(p):
def lstat(p: str) -> os.stat_result:
return os.lstat(fsenc(p))
else:

View File

@@ -2,43 +2,44 @@
from __future__ import print_function, unicode_literals
import os
from ..util import fsenc, fsdec, SYMTIME
from ..util import SYMTIME, fsdec, fsenc
def abspath(p):
def abspath(p: str) -> str:
return fsdec(os.path.abspath(fsenc(p)))
def exists(p):
def exists(p: str) -> bool:
return os.path.exists(fsenc(p))
def getmtime(p, follow_symlinks=True):
def getmtime(p: str, follow_symlinks: bool = True) -> float:
if not follow_symlinks and SYMTIME:
return os.lstat(fsenc(p)).st_mtime
else:
return os.path.getmtime(fsenc(p))
def getsize(p):
def getsize(p: str) -> int:
return os.path.getsize(fsenc(p))
def isfile(p):
def isfile(p: str) -> bool:
return os.path.isfile(fsenc(p))
def isdir(p):
def isdir(p: str) -> bool:
return os.path.isdir(fsenc(p))
def islink(p):
def islink(p: str) -> bool:
return os.path.islink(fsenc(p))
def lexists(p):
def lexists(p: str) -> bool:
return os.path.lexists(fsenc(p))
def realpath(p):
def realpath(p: str) -> str:
return fsdec(os.path.realpath(fsenc(p)))

View File

@@ -1,37 +1,56 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import time
import threading
import time
from .broker_util import try_exec
import queue
from .__init__ import TYPE_CHECKING
from .broker_mpw import MpWorker
from .broker_util import try_exec
from .util import mp
if TYPE_CHECKING:
from .svchub import SvcHub
try:
from typing import Any
except:
pass
class MProcess(mp.Process):
def __init__(
self,
q_pend: queue.Queue[tuple[int, str, list[Any]]],
q_yield: queue.Queue[tuple[int, str, list[Any]]],
target: Any,
args: Any,
) -> None:
super(MProcess, self).__init__(target=target, args=args)
self.q_pend = q_pend
self.q_yield = q_yield
class BrokerMp(object):
"""external api; manages MpWorkers"""
def __init__(self, hub):
def __init__(self, hub: "SvcHub") -> None:
self.hub = hub
self.log = hub.log
self.args = hub.args
self.procs = []
self.retpend = {}
self.retpend_mutex = threading.Lock()
self.mutex = threading.Lock()
self.num_workers = self.args.j or mp.cpu_count()
self.log("broker", "booting {} subprocesses".format(self.num_workers))
for n in range(1, self.num_workers + 1):
q_pend = mp.Queue(1)
q_yield = mp.Queue(64)
q_pend: queue.Queue[tuple[int, str, list[Any]]] = mp.Queue(1)
q_yield: queue.Queue[tuple[int, str, list[Any]]] = mp.Queue(64)
proc = mp.Process(target=MpWorker, args=(q_pend, q_yield, self.args, n))
proc.q_pend = q_pend
proc.q_yield = q_yield
proc.clients = {}
proc = MProcess(q_pend, q_yield, MpWorker, (q_pend, q_yield, self.args, n))
thr = threading.Thread(
target=self.collector, args=(proc,), name="mp-sink-{}".format(n)
@@ -42,11 +61,11 @@ class BrokerMp(object):
self.procs.append(proc)
proc.start()
def shutdown(self):
def shutdown(self) -> None:
self.log("broker", "shutting down")
for n, proc in enumerate(self.procs):
thr = threading.Thread(
target=proc.q_pend.put([0, "shutdown", []]),
target=proc.q_pend.put((0, "shutdown", [])),
name="mp-shutdown-{}-{}".format(n, len(self.procs)),
)
thr.start()
@@ -62,12 +81,12 @@ class BrokerMp(object):
procs.pop()
def reload(self):
def reload(self) -> None:
self.log("broker", "reloading")
for _, proc in enumerate(self.procs):
proc.q_pend.put([0, "reload", []])
proc.q_pend.put((0, "reload", []))
def collector(self, proc):
def collector(self, proc: MProcess) -> None:
"""receive message from hub in other process"""
while True:
msg = proc.q_yield.get()
@@ -78,10 +97,7 @@ class BrokerMp(object):
elif dest == "retq":
# response from previous ipc call
with self.retpend_mutex:
retq = self.retpend.pop(retq_id)
retq.put(args)
raise Exception("invalid broker_mp usage")
else:
# new ipc invoking managed service in hub
@@ -93,9 +109,9 @@ class BrokerMp(object):
rv = try_exec(retq_id, obj, *args)
if retq_id:
proc.q_pend.put([retq_id, "retq", rv])
proc.q_pend.put((retq_id, "retq", rv))
def put(self, want_retval, dest, *args):
def say(self, dest: str, *args: Any) -> None:
"""
send message to non-hub component in other process,
returns a Queue object which eventually contains the response if want_retval
@@ -103,7 +119,7 @@ class BrokerMp(object):
"""
if dest == "listen":
for p in self.procs:
p.q_pend.put([0, dest, [args[0], len(self.procs)]])
p.q_pend.put((0, dest, [args[0], len(self.procs)]))
elif dest == "cb_httpsrv_up":
self.hub.cb_httpsrv_up()

View File

@@ -1,20 +1,38 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import sys
import argparse
import signal
import sys
import threading
from .broker_util import ExceptionalQueue
import queue
from .authsrv import AuthSrv
from .broker_util import BrokerCli, ExceptionalQueue
from .httpsrv import HttpSrv
from .util import FAKE_MP
from copyparty.authsrv import AuthSrv
try:
from types import FrameType
from typing import Any, Optional, Union
except:
pass
class MpWorker(object):
class MpWorker(BrokerCli):
"""one single mp instance"""
def __init__(self, q_pend, q_yield, args, n):
def __init__(
self,
q_pend: queue.Queue[tuple[int, str, list[Any]]],
q_yield: queue.Queue[tuple[int, str, list[Any]]],
args: argparse.Namespace,
n: int,
) -> None:
super(MpWorker, self).__init__()
self.q_pend = q_pend
self.q_yield = q_yield
self.args = args
@@ -22,7 +40,7 @@ class MpWorker(object):
self.log = self._log_disabled if args.q and not args.lo else self._log_enabled
self.retpend = {}
self.retpend: dict[int, Any] = {}
self.retpend_mutex = threading.Lock()
self.mutex = threading.Lock()
@@ -45,20 +63,20 @@ class MpWorker(object):
thr.start()
thr.join()
def signal_handler(self, sig, frame):
def signal_handler(self, sig: Optional[int], frame: Optional[FrameType]) -> None:
# print('k')
pass
def _log_enabled(self, src, msg, c=0):
self.q_yield.put([0, "log", [src, msg, c]])
def _log_enabled(self, src: str, msg: str, c: Union[int, str] = 0) -> None:
self.q_yield.put((0, "log", [src, msg, c]))
def _log_disabled(self, src, msg, c=0):
def _log_disabled(self, src: str, msg: str, c: Union[int, str] = 0) -> None:
pass
def logw(self, msg, c=0):
def logw(self, msg: str, c: Union[int, str] = 0) -> None:
self.log("mp{}".format(self.n), msg, c)
def main(self):
def main(self) -> None:
while True:
retq_id, dest, args = self.q_pend.get()
@@ -87,15 +105,14 @@ class MpWorker(object):
else:
raise Exception("what is " + str(dest))
def put(self, want_retval, dest, *args):
if want_retval:
retq = ExceptionalQueue(1)
retq_id = id(retq)
with self.retpend_mutex:
self.retpend[retq_id] = retq
else:
retq = None
retq_id = 0
def ask(self, dest: str, *args: Any) -> ExceptionalQueue:
retq = ExceptionalQueue(1)
retq_id = id(retq)
with self.retpend_mutex:
self.retpend[retq_id] = retq
self.q_yield.put([retq_id, dest, args])
self.q_yield.put((retq_id, dest, list(args)))
return retq
def say(self, dest: str, *args: Any) -> None:
self.q_yield.put((0, dest, list(args)))

View File

@@ -3,14 +3,25 @@ from __future__ import print_function, unicode_literals
import threading
from .__init__ import TYPE_CHECKING
from .broker_util import BrokerCli, ExceptionalQueue, try_exec
from .httpsrv import HttpSrv
from .broker_util import ExceptionalQueue, try_exec
if TYPE_CHECKING:
from .svchub import SvcHub
try:
from typing import Any
except:
pass
class BrokerThr(object):
class BrokerThr(BrokerCli):
"""external api; behaves like BrokerMP but using plain threads"""
def __init__(self, hub):
def __init__(self, hub: "SvcHub") -> None:
super(BrokerThr, self).__init__()
self.hub = hub
self.log = hub.log
self.args = hub.args
@@ -23,29 +34,35 @@ class BrokerThr(object):
self.httpsrv = HttpSrv(self, None)
self.reload = self.noop
def shutdown(self):
def shutdown(self) -> None:
# self.log("broker", "shutting down")
self.httpsrv.shutdown()
def noop(self):
def noop(self) -> None:
pass
def put(self, want_retval, dest, *args):
def ask(self, dest: str, *args: Any) -> ExceptionalQueue:
# new ipc invoking managed service in hub
obj = self.hub
for node in dest.split("."):
obj = getattr(obj, node)
rv = try_exec(True, obj, *args)
# pretend we're broker_mp
retq = ExceptionalQueue(1)
retq.put(rv)
return retq
def say(self, dest: str, *args: Any) -> None:
if dest == "listen":
self.httpsrv.listen(args[0], 1)
return
else:
# new ipc invoking managed service in hub
obj = self.hub
for node in dest.split("."):
obj = getattr(obj, node)
# new ipc invoking managed service in hub
obj = self.hub
for node in dest.split("."):
obj = getattr(obj, node)
# TODO will deadlock if dest performs another ipc
rv = try_exec(want_retval, obj, *args)
if not want_retval:
return
# pretend we're broker_mp
retq = ExceptionalQueue(1)
retq.put(rv)
return retq
try_exec(False, obj, *args)

View File

@@ -1,17 +1,30 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import argparse
import traceback
from .util import Pebkac, Queue
from queue import Queue
from .__init__ import TYPE_CHECKING
from .authsrv import AuthSrv
from .util import Pebkac
try:
from typing import Any, Optional, Union
from .util import RootLogger
except:
pass
if TYPE_CHECKING:
from .httpsrv import HttpSrv
class ExceptionalQueue(Queue, object):
def get(self, block=True, timeout=None):
def get(self, block: bool = True, timeout: Optional[float] = None) -> Any:
rv = super(ExceptionalQueue, self).get(block, timeout)
# TODO: how expensive is this?
if isinstance(rv, list):
if rv[0] == "exception":
if rv[1] == "pebkac":
@@ -22,7 +35,26 @@ class ExceptionalQueue(Queue, object):
return rv
def try_exec(want_retval, func, *args):
class BrokerCli(object):
"""
helps mypy understand httpsrv.broker but still fails a few levels deeper,
for example resolving httpconn.* in httpcli -- see lines tagged #mypy404
"""
def __init__(self) -> None:
self.log: RootLogger = None
self.args: argparse.Namespace = None
self.asrv: AuthSrv = None
self.httpsrv: "HttpSrv" = None
def ask(self, dest: str, *args: Any) -> ExceptionalQueue:
return ExceptionalQueue(1)
def say(self, dest: str, *args: Any) -> None:
pass
def try_exec(want_retval: Union[bool, int], func: Any, *args: list[Any]) -> Any:
try:
return func(*args)

177
copyparty/fsutil.py Normal file
View File

@@ -0,0 +1,177 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import ctypes
import re
import os
import time
from .__init__ import ANYWIN, MACOS
from .authsrv import AXS, VFS
from .util import chkcmd, min_ex
try:
from typing import Any, Optional, Union
from .util import RootLogger
except:
pass
class Fstab(object):
def __init__(self, log: RootLogger):
self.log_func = log
self.trusted = False
self.tab: Optional[VFS] = None
self.cache: dict[str, str] = {}
self.age = 0.0
def log(self, msg: str, c: Union[int, str] = 0) -> None:
self.log_func("fstab", msg + "\033[K", c)
def get(self, path: str) -> str:
if len(self.cache) > 9000:
self.age = time.time()
self.tab = None
self.cache = {}
fs = "ext4"
msg = "failed to determine filesystem at [{}]; assuming {}\n{}"
if ANYWIN:
fs = "vfat" # can smb do sparse files? gonna guess no
try:
# good enough
disk = path.split(":", 1)[0]
disk = "{}:\\".format(disk).lower()
assert len(disk) == 3
path = disk
except:
self.log(msg.format(path, fs, min_ex()), 3)
return fs
path = path.lstrip("/")
try:
return self.cache[path]
except:
pass
try:
fs = self.get_w32(path) if ANYWIN else self.get_unix(path)
except:
self.log(msg.format(path, fs, min_ex()), 3)
fs = fs.lower()
self.cache[path] = fs
self.log("found {} at {}".format(fs, path))
return fs
def build_tab(self) -> None:
self.log("building tab")
sptn = r"^.*? on (.*) type ([^ ]+) \(.*"
if MACOS:
sptn = r"^.*? on (.*) \(([^ ]+), .*"
ptn = re.compile(sptn)
so, _ = chkcmd(["mount"])
tab1: list[tuple[str, str]] = []
for ln in so.split("\n"):
m = ptn.match(ln)
if not m:
continue
zs1, zs2 = m.groups()
tab1.append((str(zs1), str(zs2)))
tab1.sort(key=lambda x: (len(x[0]), x[0]))
path1, fs1 = tab1[0]
tab = VFS(self.log_func, fs1, path1, AXS(), {})
for path, fs in tab1[1:]:
tab.add(fs, path.lstrip("/"))
self.tab = tab
def relabel(self, path: str, nval: str) -> None:
assert self.tab
self.cache = {}
path = path.lstrip("/")
ptn = re.compile(r"^[^\\/]*")
vn, rem = self.tab._find(path)
if not self.trusted:
# no mtab access; have to build as we go
if "/" in rem:
self.tab.add("idk", os.path.join(vn.vpath, rem.split("/")[0]))
if rem:
self.tab.add(nval, path)
else:
vn.realpath = nval
return
visit = [vn]
while visit:
vn = visit.pop()
vn.realpath = ptn.sub(nval, vn.realpath)
visit.extend(list(vn.nodes.values()))
def get_unix(self, path: str) -> str:
if not self.tab:
try:
self.build_tab()
self.trusted = True
except:
# prisonparty or other restrictive environment
self.log("failed to build tab:\n{}".format(min_ex()), 3)
self.tab = VFS(self.log_func, "idk", "/", AXS(), {})
self.trusted = False
assert self.tab
ret = self.tab._find(path)[0]
if self.trusted or path == ret.vpath:
return ret.realpath.split("/")[0]
else:
return "idk"
def get_w32(self, path: str) -> str:
# list mountpoints: fsutil fsinfo drives
from ctypes.wintypes import BOOL, DWORD, LPCWSTR, LPDWORD, LPWSTR, MAX_PATH
def echk(rc: int, fun: Any, args: Any) -> None:
if not rc:
raise ctypes.WinError(ctypes.get_last_error())
return None
k32 = ctypes.WinDLL("kernel32", use_last_error=True)
k32.GetVolumeInformationW.errcheck = echk
k32.GetVolumeInformationW.restype = BOOL
k32.GetVolumeInformationW.argtypes = (
LPCWSTR,
LPWSTR,
DWORD,
LPDWORD,
LPDWORD,
LPDWORD,
LPWSTR,
DWORD,
)
bvolname = ctypes.create_unicode_buffer(MAX_PATH + 1)
bfstype = ctypes.create_unicode_buffer(MAX_PATH + 1)
serial = DWORD()
max_name_len = DWORD()
fs_flags = DWORD()
k32.GetVolumeInformationW(
path,
bvolname,
ctypes.sizeof(bvolname),
ctypes.byref(serial),
ctypes.byref(max_name_len),
ctypes.byref(fs_flags),
bfstype,
ctypes.sizeof(bfstype),
)
return bfstype.value

View File

@@ -1,16 +1,23 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import os
import sys
import stat
import time
import argparse
import logging
import os
import stat
import sys
import threading
import time
from .__init__ import E, PY2
from .util import Pebkac, fsenc, exclude_dotfiles
from pyftpdlib.authorizers import AuthenticationFailed, DummyAuthorizer
from pyftpdlib.filesystems import AbstractedFS, FilesystemError
from pyftpdlib.handlers import FTPHandler
from pyftpdlib.log import config_logging
from pyftpdlib.servers import FTPServer
from .__init__ import PY2, TYPE_CHECKING, E
from .bos import bos
from .util import Pebkac, exclude_dotfiles, fsenc
try:
from pyftpdlib.ioloop import IOLoop
@@ -20,65 +27,64 @@ except ImportError:
sys.path.append(p)
from pyftpdlib.ioloop import IOLoop
from pyftpdlib.authorizers import DummyAuthorizer, AuthenticationFailed
from pyftpdlib.filesystems import AbstractedFS, FilesystemError
from pyftpdlib.handlers import FTPHandler
from pyftpdlib.servers import FTPServer
from pyftpdlib.log import config_logging
if TYPE_CHECKING:
from .svchub import SvcHub
try:
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from .svchub import SvcHub
except ImportError:
import typing
from typing import Any, Optional
except:
pass
class FtpAuth(DummyAuthorizer):
def __init__(self):
def __init__(self, hub: "SvcHub") -> None:
super(FtpAuth, self).__init__()
self.hub = None # type: SvcHub
self.hub = hub
def validate_authentication(self, username, password, handler):
def validate_authentication(
self, username: str, password: str, handler: Any
) -> None:
asrv = self.hub.asrv
if username == "anonymous":
password = ""
uname = "*"
if password:
uname = asrv.iacct.get(password, None)
uname = asrv.iacct.get(password, "")
handler.username = uname
if password and not uname:
raise AuthenticationFailed("Authentication failed.")
def get_home_dir(self, username):
def get_home_dir(self, username: str) -> str:
return "/"
def has_user(self, username):
def has_user(self, username: str) -> bool:
asrv = self.hub.asrv
return username in asrv.acct
def has_perm(self, username, perm, path=None):
def has_perm(self, username: str, perm: int, path: Optional[str] = None) -> bool:
return True # handled at filesystem layer
def get_perms(self, username):
def get_perms(self, username: str) -> str:
return "elradfmwMT"
def get_msg_login(self, username):
def get_msg_login(self, username: str) -> str:
return "sup {}".format(username)
def get_msg_quit(self, username):
def get_msg_quit(self, username: str) -> str:
return "cya"
class FtpFs(AbstractedFS):
def __init__(self, root, cmd_channel):
def __init__(
self, root: str, cmd_channel: Any
) -> None: # pylint: disable=super-init-not-called
self.h = self.cmd_channel = cmd_channel # type: FTPHandler
self.hub = cmd_channel.hub # type: SvcHub
self.hub: "SvcHub" = cmd_channel.hub
self.args = cmd_channel.args
self.uname = self.hub.asrv.iacct.get(cmd_channel.password, "*")
@@ -89,7 +95,14 @@ class FtpFs(AbstractedFS):
self.listdirinfo = self.listdir
self.chdir(".")
def v2a(self, vpath, r=False, w=False, m=False, d=False):
def v2a(
self,
vpath: str,
r: bool = False,
w: bool = False,
m: bool = False,
d: bool = False,
) -> str:
try:
vpath = vpath.replace("\\", "/").lstrip("/")
vfs, rem = self.hub.asrv.vfs.get(vpath, self.uname, r, w, m, d)
@@ -100,25 +113,32 @@ class FtpFs(AbstractedFS):
except Pebkac as ex:
raise FilesystemError(str(ex))
def rv2a(self, vpath, r=False, w=False, m=False, d=False):
def rv2a(
self,
vpath: str,
r: bool = False,
w: bool = False,
m: bool = False,
d: bool = False,
) -> str:
return self.v2a(os.path.join(self.cwd, vpath), r, w, m, d)
def ftp2fs(self, ftppath):
def ftp2fs(self, ftppath: str) -> str:
# return self.v2a(ftppath)
return ftppath # self.cwd must be vpath
def fs2ftp(self, fspath):
def fs2ftp(self, fspath: str) -> str:
# raise NotImplementedError()
return fspath
def validpath(self, path):
def validpath(self, path: str) -> bool:
if "/.hist/" in path:
if "/up2k." in path or path.endswith("/dir.txt"):
raise FilesystemError("access to this file is forbidden")
return True
def open(self, filename, mode):
def open(self, filename: str, mode: str) -> typing.IO[Any]:
r = "r" in mode
w = "w" in mode or "a" in mode or "+" in mode
@@ -129,24 +149,24 @@ class FtpFs(AbstractedFS):
self.validpath(ap)
return open(fsenc(ap), mode)
def chdir(self, path):
def chdir(self, path: str) -> None:
self.cwd = join(self.cwd, path)
x = self.hub.asrv.vfs.can_access(self.cwd.lstrip("/"), self.h.username)
self.can_read, self.can_write, self.can_move, self.can_delete, self.can_get = x
def mkdir(self, path):
def mkdir(self, path: str) -> None:
ap = self.rv2a(path, w=True)
bos.mkdir(ap)
def listdir(self, path):
def listdir(self, path: str) -> list[str]:
vpath = join(self.cwd, path).lstrip("/")
try:
vfs, rem = self.hub.asrv.vfs.get(vpath, self.uname, True, False)
fsroot, vfs_ls, vfs_virt = vfs.ls(
fsroot, vfs_ls1, vfs_virt = vfs.ls(
rem, self.uname, not self.args.no_scandir, [[True], [False, True]]
)
vfs_ls = [x[0] for x in vfs_ls]
vfs_ls = [x[0] for x in vfs_ls1]
vfs_ls.extend(vfs_virt.keys())
if not self.args.ed:
@@ -154,7 +174,7 @@ class FtpFs(AbstractedFS):
vfs_ls.sort()
return vfs_ls
except Exception as ex:
except:
if vpath:
# display write-only folders as empty
return []
@@ -163,44 +183,39 @@ class FtpFs(AbstractedFS):
r = {x.split("/")[0]: 1 for x in self.hub.asrv.vfs.all_vols.keys()}
return list(sorted(list(r.keys())))
def rmdir(self, path):
def rmdir(self, path: str) -> None:
ap = self.rv2a(path, d=True)
bos.rmdir(ap)
def remove(self, path):
def remove(self, path: str) -> None:
if self.args.no_del:
raise FilesystemError("the delete feature is disabled in server config")
vp = join(self.cwd, path).lstrip("/")
x = self.hub.broker.put(
True, "up2k.handle_rm", self.uname, self.h.remote_ip, [vp]
)
try:
x.get()
self.hub.up2k.handle_rm(self.uname, self.h.remote_ip, [vp])
except Exception as ex:
raise FilesystemError(str(ex))
def rename(self, src, dst):
def rename(self, src: str, dst: str) -> None:
if not self.can_move:
raise FilesystemError("not allowed for user " + self.h.username)
if self.args.no_mv:
m = "the rename/move feature is disabled in server config"
raise FilesystemError(m)
t = "the rename/move feature is disabled in server config"
raise FilesystemError(t)
svp = join(self.cwd, src).lstrip("/")
dvp = join(self.cwd, dst).lstrip("/")
x = self.hub.broker.put(True, "up2k.handle_mv", self.uname, svp, dvp)
try:
x.get()
self.hub.up2k.handle_mv(self.uname, svp, dvp)
except Exception as ex:
raise FilesystemError(str(ex))
def chmod(self, path, mode):
def chmod(self, path: str, mode: str) -> None:
pass
def stat(self, path):
def stat(self, path: str) -> os.stat_result:
try:
ap = self.rv2a(path, r=True)
return bos.stat(ap)
@@ -212,64 +227,70 @@ class FtpFs(AbstractedFS):
return st
def utime(self, path, timeval):
def utime(self, path: str, timeval: float) -> None:
ap = self.rv2a(path, w=True)
return bos.utime(ap, (timeval, timeval))
def lstat(self, path):
def lstat(self, path: str) -> os.stat_result:
ap = self.rv2a(path)
return bos.lstat(ap)
def isfile(self, path):
def isfile(self, path: str) -> bool:
st = self.stat(path)
return stat.S_ISREG(st.st_mode)
def islink(self, path):
def islink(self, path: str) -> bool:
ap = self.rv2a(path)
return bos.path.islink(ap)
def isdir(self, path):
def isdir(self, path: str) -> bool:
try:
st = self.stat(path)
return stat.S_ISDIR(st.st_mode)
except:
return True
def getsize(self, path):
def getsize(self, path: str) -> int:
ap = self.rv2a(path)
return bos.path.getsize(ap)
def getmtime(self, path):
def getmtime(self, path: str) -> float:
ap = self.rv2a(path)
return bos.path.getmtime(ap)
def realpath(self, path):
def realpath(self, path: str) -> str:
return path
def lexists(self, path):
def lexists(self, path: str) -> bool:
ap = self.rv2a(path)
return bos.path.lexists(ap)
def get_user_by_uid(self, uid):
def get_user_by_uid(self, uid: int) -> str:
return "root"
def get_group_by_uid(self, gid):
def get_group_by_uid(self, gid: int) -> str:
return "root"
class FtpHandler(FTPHandler):
abstracted_fs = FtpFs
hub: "SvcHub" = None
args: argparse.Namespace = None
def __init__(self, conn: Any, server: Any, ioloop: Any = None) -> None:
self.hub: "SvcHub" = FtpHandler.hub
self.args: argparse.Namespace = FtpHandler.args
def __init__(self, conn, server, ioloop=None):
if PY2:
FTPHandler.__init__(self, conn, server, ioloop)
else:
super(FtpHandler, self).__init__(conn, server, ioloop)
# abspath->vpath mapping to resolve log_transfer paths
self.vfs_map = {}
self.vfs_map: dict[str, str] = {}
def ftp_STOR(self, file, mode="w"):
def ftp_STOR(self, file: str, mode: str = "w") -> Any:
# Optional[str]
vp = join(self.fs.cwd, file).lstrip("/")
ap = self.fs.v2a(vp)
self.vfs_map[ap] = vp
@@ -278,7 +299,16 @@ class FtpHandler(FTPHandler):
# print("ftp_STOR: {} {} OK".format(vp, mode))
return ret
def log_transfer(self, cmd, filename, receive, completed, elapsed, bytes):
def log_transfer(
self,
cmd: str,
filename: bytes,
receive: bool,
completed: bool,
elapsed: float,
bytes: int,
) -> Any:
# None
ap = filename.decode("utf-8", "replace")
vp = self.vfs_map.pop(ap, None)
# print("xfer_end: {} => {}".format(ap, vp))
@@ -286,9 +316,7 @@ class FtpHandler(FTPHandler):
vp, fn = os.path.split(vp)
vfs, rem = self.hub.asrv.vfs.get(vp, self.username, False, True)
vfs, rem = vfs.get_dbv(rem)
self.hub.broker.put(
False,
"up2k.hash_file",
self.hub.up2k.hash_file(
vfs.realpath,
vfs.flags,
rem,
@@ -313,7 +341,7 @@ except:
class Ftpd(object):
def __init__(self, hub):
def __init__(self, hub: "SvcHub") -> None:
self.hub = hub
self.args = hub.args
@@ -322,24 +350,23 @@ class Ftpd(object):
hs.append([FtpHandler, self.args.ftp])
if self.args.ftps:
try:
h = SftpHandler
h1 = SftpHandler
except:
m = "\nftps requires pyopenssl;\nplease run the following:\n\n {} -m pip install --user pyopenssl\n"
print(m.format(sys.executable))
t = "\nftps requires pyopenssl;\nplease run the following:\n\n {} -m pip install --user pyopenssl\n"
print(t.format(sys.executable))
sys.exit(1)
h.certfile = os.path.join(E.cfg, "cert.pem")
h.tls_control_required = True
h.tls_data_required = True
h1.certfile = os.path.join(E.cfg, "cert.pem")
h1.tls_control_required = True
h1.tls_data_required = True
hs.append([h, self.args.ftps])
hs.append([h1, self.args.ftps])
for h in hs:
h, lp = h
h.hub = hub
h.args = hub.args
h.authorizer = FtpAuth()
h.authorizer.hub = hub
for h_lp in hs:
h2, lp = h_lp
h2.hub = hub
h2.args = hub.args
h2.authorizer = FtpAuth(hub)
if self.args.ftp_pr:
p1, p2 = [int(x) for x in self.args.ftp_pr.split("-")]
@@ -351,10 +378,10 @@ class Ftpd(object):
else:
p1 += d + 1
h.passive_ports = list(range(p1, p2 + 1))
h2.passive_ports = list(range(p1, p2 + 1))
if self.args.ftp_nat:
h.masquerade_address = self.args.ftp_nat
h2.masquerade_address = self.args.ftp_nat
if self.args.ftp_dbg:
config_logging(level=logging.DEBUG)
@@ -364,11 +391,11 @@ class Ftpd(object):
for h, lp in hs:
FTPServer((ip, int(lp)), h, ioloop)
t = threading.Thread(target=ioloop.loop)
t.daemon = True
t.start()
thr = threading.Thread(target=ioloop.loop)
thr.daemon = True
thr.start()
def join(p1, p2):
def join(p1: str, p2: str) -> str:
w = os.path.join(p1, p2.replace("\\", "/"))
return os.path.normpath(w).replace("\\", "/")

File diff suppressed because it is too large Load Diff

View File

@@ -1,25 +1,36 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import re
import argparse # typechk
import os
import time
import re
import socket
import threading # typechk
import time
HAVE_SSL = True
try:
HAVE_SSL = True
import ssl
except:
HAVE_SSL = False
from .__init__ import E
from .util import Unrecv
from . import util as Util
from .__init__ import TYPE_CHECKING, E
from .authsrv import AuthSrv # typechk
from .httpcli import HttpCli
from .u2idx import U2idx
from .ico import Ico
from .mtag import HAVE_FFMPEG
from .th_cli import ThumbCli
from .th_srv import HAVE_PIL, HAVE_VIPS
from .mtag import HAVE_FFMPEG
from .ico import Ico
from .u2idx import U2idx
try:
from typing import Optional, Pattern, Union
except:
pass
if TYPE_CHECKING:
from .httpsrv import HttpSrv
class HttpConn(object):
@@ -28,31 +39,37 @@ class HttpConn(object):
creates an HttpCli for each request (Connection: Keep-Alive)
"""
def __init__(self, sck, addr, hsrv):
def __init__(
self, sck: socket.socket, addr: tuple[str, int], hsrv: "HttpSrv"
) -> None:
self.s = sck
self.sr: Optional[Util._Unrecv] = None
self.addr = addr
self.hsrv = hsrv
self.mutex = hsrv.mutex
self.args = hsrv.args
self.asrv = hsrv.asrv
self.mutex: threading.Lock = hsrv.mutex # mypy404
self.args: argparse.Namespace = hsrv.args # mypy404
self.asrv: AuthSrv = hsrv.asrv # mypy404
self.cert_path = hsrv.cert_path
self.u2fh = hsrv.u2fh
self.u2fh: Util.FHC = hsrv.u2fh # mypy404
enth = (HAVE_PIL or HAVE_VIPS or HAVE_FFMPEG) and not self.args.no_thumb
self.thumbcli = ThumbCli(hsrv) if enth else None
self.ico = Ico(self.args)
self.thumbcli: Optional[ThumbCli] = ThumbCli(hsrv) if enth else None # mypy404
self.ico: Ico = Ico(self.args) # mypy404
self.t0 = time.time()
self.t0: float = time.time() # mypy404
self.stopping = False
self.nreq = 0
self.nbyte = 0
self.u2idx = None
self.log_func = hsrv.log
self.lf_url = re.compile(self.args.lf_url) if self.args.lf_url else None
self.nreq: int = 0 # mypy404
self.nbyte: int = 0 # mypy404
self.u2idx: Optional[U2idx] = None
self.log_func: Util.RootLogger = hsrv.log # mypy404
self.log_src: str = "httpconn" # mypy404
self.lf_url: Optional[Pattern[str]] = (
re.compile(self.args.lf_url) if self.args.lf_url else None
) # mypy404
self.set_rproxy()
def shutdown(self):
def shutdown(self) -> None:
self.stopping = True
try:
self.s.shutdown(socket.SHUT_RDWR)
@@ -60,7 +77,7 @@ class HttpConn(object):
except:
pass
def set_rproxy(self, ip=None):
def set_rproxy(self, ip: Optional[str] = None) -> str:
if ip is None:
color = 36
ip = self.addr[0]
@@ -73,35 +90,37 @@ class HttpConn(object):
self.log_src = "{} \033[{}m{}".format(ip, color, self.addr[1]).ljust(26)
return self.log_src
def respath(self, res_name):
def respath(self, res_name: str) -> str:
return os.path.join(E.mod, "web", res_name)
def log(self, msg, c=0):
def log(self, msg: str, c: Union[int, str] = 0) -> None:
self.log_func(self.log_src, msg, c)
def get_u2idx(self):
def get_u2idx(self) -> U2idx:
# one u2idx per tcp connection;
# sqlite3 fully parallelizes under python threads
if not self.u2idx:
self.u2idx = U2idx(self)
return self.u2idx
def _detect_https(self):
def _detect_https(self) -> bool:
method = None
if self.cert_path:
try:
method = self.s.recv(4, socket.MSG_PEEK)
except socket.timeout:
return
return False
except AttributeError:
# jython does not support msg_peek; forget about https
method = self.s.recv(4)
self.sr = Unrecv(self.s)
self.sr = Util.Unrecv(self.s, self.log)
self.sr.buf = method
# jython used to do this, they stopped since it's broken
# but reimplementing sendall is out of scope for now
if not getattr(self.s, "sendall", None):
self.s.sendall = self.s.send
self.s.sendall = self.s.send # type: ignore
if len(method) != 4:
err = "need at least 4 bytes in the first packet; got {}".format(
@@ -111,17 +130,18 @@ class HttpConn(object):
self.log(err)
self.s.send(b"HTTP/1.1 400 Bad Request\r\n\r\n" + err.encode("utf-8"))
return
return False
return method not in [None, b"GET ", b"HEAD", b"POST", b"PUT ", b"OPTI"]
def run(self):
def run(self) -> None:
self.sr = None
if self.args.https_only:
is_https = True
elif self.args.http_only or not HAVE_SSL:
is_https = False
else:
# raise Exception("asdf")
is_https = self._detect_https()
if is_https:
@@ -150,14 +170,15 @@ class HttpConn(object):
self.s = ctx.wrap_socket(self.s, server_side=True)
msg = [
"\033[1;3{:d}m{}".format(c, s)
for c, s in zip([0, 5, 0], self.s.cipher())
for c, s in zip([0, 5, 0], self.s.cipher()) # type: ignore
]
self.log(" ".join(msg) + "\033[0m")
if self.args.ssl_dbg and hasattr(self.s, "shared_ciphers"):
overlap = [y[::-1] for y in self.s.shared_ciphers()]
lines = [str(x) for x in (["TLS cipher overlap:"] + overlap)]
self.log("\n".join(lines))
ciphers = self.s.shared_ciphers()
assert ciphers
overlap = [str(y[::-1]) for y in ciphers]
self.log("TLS cipher overlap:" + "\n".join(overlap))
for k, v in [
["compression", self.s.compression()],
["ALPN proto", self.s.selected_alpn_protocol()],
@@ -182,7 +203,7 @@ class HttpConn(object):
return
if not self.sr:
self.sr = Unrecv(self.s)
self.sr = Util.Unrecv(self.s, self.log)
while not self.stopping:
self.nreq += 1

View File

@@ -1,13 +1,15 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import os
import sys
import time
import math
import base64
import math
import os
import socket
import sys
import threading
import time
import queue
try:
import jinja2
@@ -26,15 +28,18 @@ except ImportError:
)
sys.exit(1)
from .__init__ import E, PY2, MACOS
from .util import FHC, spack, min_ex, start_stackmon, start_log_thrs
from .__init__ import MACOS, TYPE_CHECKING, E
from .bos import bos
from .httpconn import HttpConn
from .util import FHC, min_ex, spack, start_log_thrs, start_stackmon
if PY2:
import Queue as queue
else:
import queue
if TYPE_CHECKING:
from .broker_util import BrokerCli
try:
from typing import Any, Optional
except:
pass
class HttpSrv(object):
@@ -43,7 +48,7 @@ class HttpSrv(object):
relying on MpSrv for performance (HttpSrv is just plain threads)
"""
def __init__(self, broker, nid):
def __init__(self, broker: "BrokerCli", nid: Optional[int]) -> None:
self.broker = broker
self.nid = nid
self.args = broker.args
@@ -58,29 +63,25 @@ class HttpSrv(object):
self.tp_nthr = 0 # actual
self.tp_ncli = 0 # fading
self.tp_time = None # latest worker collect
self.tp_q = None if self.args.no_htp else queue.LifoQueue()
self.t_periodic = None
self.tp_time = 0.0 # latest worker collect
self.tp_q: Optional[queue.LifoQueue[Any]] = (
None if self.args.no_htp else queue.LifoQueue()
)
self.t_periodic: Optional[threading.Thread] = None
self.u2fh = FHC()
self.srvs = []
self.srvs: list[socket.socket] = []
self.ncli = 0 # exact
self.clients = {} # laggy
self.clients: set[HttpConn] = set() # laggy
self.nclimax = 0
self.cb_ts = 0
self.cb_v = 0
try:
x = self.broker.put(True, "thumbsrv.getcfg")
self.th_cfg = x.get()
except:
pass
self.cb_ts = 0.0
self.cb_v = ""
env = jinja2.Environment()
env.loader = jinja2.FileSystemLoader(os.path.join(E.mod, "web"))
self.j2 = {
x: env.get_template(x + ".html")
for x in ["splash", "browser", "browser2", "msg", "md", "mde"]
for x in ["splash", "browser", "browser2", "msg", "md", "mde", "cf"]
}
self.prism = os.path.exists(os.path.join(E.mod, "web", "deps", "prism.js.gz"))
@@ -88,7 +89,7 @@ class HttpSrv(object):
if bos.path.exists(cert_path):
self.cert_path = cert_path
else:
self.cert_path = None
self.cert_path = ""
if self.tp_q:
self.start_threads(4)
@@ -100,7 +101,19 @@ class HttpSrv(object):
if self.args.log_thrs:
start_log_thrs(self.log, self.args.log_thrs, nid)
def start_threads(self, n):
self.th_cfg: dict[str, Any] = {}
t = threading.Thread(target=self.post_init)
t.daemon = True
t.start()
def post_init(self) -> None:
try:
x = self.broker.ask("thumbsrv.getcfg")
self.th_cfg = x.get()
except:
pass
def start_threads(self, n: int) -> None:
self.tp_nthr += n
if self.args.log_htp:
self.log(self.name, "workers += {} = {}".format(n, self.tp_nthr), 6)
@@ -113,15 +126,16 @@ class HttpSrv(object):
thr.daemon = True
thr.start()
def stop_threads(self, n):
def stop_threads(self, n: int) -> None:
self.tp_nthr -= n
if self.args.log_htp:
self.log(self.name, "workers -= {} = {}".format(n, self.tp_nthr), 6)
assert self.tp_q
for _ in range(n):
self.tp_q.put(None)
def periodic(self):
def periodic(self) -> None:
while True:
time.sleep(2 if self.tp_ncli or self.ncli else 10)
with self.mutex:
@@ -135,7 +149,7 @@ class HttpSrv(object):
self.t_periodic = None
return
def listen(self, sck, nlisteners):
def listen(self, sck: socket.socket, nlisteners: int) -> None:
ip, port = sck.getsockname()
self.srvs.append(sck)
self.nclimax = math.ceil(self.args.nc * 1.0 / nlisteners)
@@ -147,15 +161,15 @@ class HttpSrv(object):
t.daemon = True
t.start()
def thr_listen(self, srv_sck):
def thr_listen(self, srv_sck: socket.socket) -> None:
"""listens on a shared tcp server"""
ip, port = srv_sck.getsockname()
fno = srv_sck.fileno()
msg = "subscribed @ {}:{} f{}".format(ip, port, fno)
self.log(self.name, msg)
def fun():
self.broker.put(False, "cb_httpsrv_up")
def fun() -> None:
self.broker.say("cb_httpsrv_up")
threading.Thread(target=fun).start()
@@ -179,21 +193,21 @@ class HttpSrv(object):
continue
if self.args.log_conn:
m = "|{}C-acc2 \033[0;36m{} \033[3{}m{}".format(
t = "|{}C-acc2 \033[0;36m{} \033[3{}m{}".format(
"-" * 3, ip, port % 8, port
)
self.log("%s %s" % addr, m, c="1;30")
self.log("%s %s" % addr, t, c="1;30")
self.accept(sck, addr)
def accept(self, sck, addr):
def accept(self, sck: socket.socket, addr: tuple[str, int]) -> None:
"""takes an incoming tcp connection and creates a thread to handle it"""
now = time.time()
if now - (self.tp_time or now) > 300:
m = "httpserver threadpool died: tpt {:.2f}, now {:.2f}, nthr {}, ncli {}"
self.log(self.name, m.format(self.tp_time, now, self.tp_nthr, self.ncli), 1)
self.tp_time = None
t = "httpserver threadpool died: tpt {:.2f}, now {:.2f}, nthr {}, ncli {}"
self.log(self.name, t.format(self.tp_time, now, self.tp_nthr, self.ncli), 1)
self.tp_time = 0
self.tp_q = None
with self.mutex:
@@ -203,10 +217,10 @@ class HttpSrv(object):
if self.nid:
name += "-{}".format(self.nid)
t = threading.Thread(target=self.periodic, name=name)
self.t_periodic = t
t.daemon = True
t.start()
thr = threading.Thread(target=self.periodic, name=name)
self.t_periodic = thr
thr.daemon = True
thr.start()
if self.tp_q:
self.tp_time = self.tp_time or now
@@ -218,8 +232,8 @@ class HttpSrv(object):
return
if not self.args.no_htp:
m = "looks like the httpserver threadpool died; please make an issue on github and tell me the story of how you pulled that off, thanks and dog bless\n"
self.log(self.name, m, 1)
t = "looks like the httpserver threadpool died; please make an issue on github and tell me the story of how you pulled that off, thanks and dog bless\n"
self.log(self.name, t, 1)
thr = threading.Thread(
target=self.thr_client,
@@ -229,14 +243,15 @@ class HttpSrv(object):
thr.daemon = True
thr.start()
def thr_poolw(self):
def thr_poolw(self) -> None:
assert self.tp_q
while True:
task = self.tp_q.get()
if not task:
break
with self.mutex:
self.tp_time = None
self.tp_time = 0
try:
sck, addr = task
@@ -249,7 +264,7 @@ class HttpSrv(object):
except:
self.log(self.name, "thr_client: " + min_ex(), 3)
def shutdown(self):
def shutdown(self) -> None:
self.stopping = True
for srv in self.srvs:
try:
@@ -257,7 +272,7 @@ class HttpSrv(object):
except:
pass
clients = list(self.clients.keys())
clients = list(self.clients)
for cli in clients:
try:
cli.shutdown()
@@ -273,13 +288,13 @@ class HttpSrv(object):
self.log(self.name, "ok bye")
def thr_client(self, sck, addr):
def thr_client(self, sck: socket.socket, addr: tuple[str, int]) -> None:
"""thread managing one tcp client"""
sck.settimeout(120)
cli = HttpConn(sck, addr, self)
with self.mutex:
self.clients[cli] = 0
self.clients.add(cli)
fno = sck.fileno()
try:
@@ -322,10 +337,10 @@ class HttpSrv(object):
raise
finally:
with self.mutex:
del self.clients[cli]
self.clients.remove(cli)
self.ncli -= 1
def cachebuster(self):
def cachebuster(self) -> str:
if time.time() - self.cb_ts < 1:
return self.cb_v

View File

@@ -1,28 +1,28 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import hashlib
import argparse # typechk
import colorsys
import hashlib
from .__init__ import PY2
class Ico(object):
def __init__(self, args):
def __init__(self, args: argparse.Namespace) -> None:
self.args = args
def get(self, ext, as_thumb):
def get(self, ext: str, as_thumb: bool) -> tuple[str, bytes]:
"""placeholder to make thumbnails not break"""
h = hashlib.md5(ext.encode("utf-8")).digest()[:2]
zb = hashlib.md5(ext.encode("utf-8")).digest()[:2]
if PY2:
h = [ord(x) for x in h]
zb = [ord(x) for x in zb]
c1 = colorsys.hsv_to_rgb(h[0] / 256.0, 1, 0.3)
c2 = colorsys.hsv_to_rgb(h[0] / 256.0, 1, 1)
c = list(c1) + list(c2)
c = [int(x * 255) for x in c]
c = "".join(["{:02x}".format(x) for x in c])
c1 = colorsys.hsv_to_rgb(zb[0] / 256.0, 1, 0.3)
c2 = colorsys.hsv_to_rgb(zb[0] / 256.0, 1, 1)
ci = [int(x * 255) for x in list(c1) + list(c2)]
c = "".join(["{:02x}".format(x) for x in ci])
h = 30
if not self.args.th_no_crop and as_thumb:
@@ -37,6 +37,6 @@ class Ico(object):
fill="#{}" font-family="monospace" font-size="14px" style="letter-spacing:.5px">{}</text>
</g></svg>
"""
svg = svg.format(h, c[:6], c[6:], ext).encode("utf-8")
svg = svg.format(h, c[:6], c[6:], ext)
return ["image/svg+xml", svg]
return "image/svg+xml", svg.encode("utf-8")

View File

@@ -1,18 +1,26 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import os
import sys
import argparse
import json
import os
import shutil
import subprocess as sp
import sys
from .__init__ import PY2, WINDOWS, unicode
from .util import fsenc, fsdec, uncyg, runcmd, retchk, REKOBO_LKEY
from .bos import bos
from .util import REKOBO_LKEY, fsenc, min_ex, retchk, runcmd, uncyg
try:
from typing import Any, Union
from .util import RootLogger
except:
pass
def have_ff(cmd):
def have_ff(cmd: str) -> bool:
if PY2:
print("# checking {}".format(cmd))
cmd = (cmd + " -version").encode("ascii").split(b" ")
@@ -30,12 +38,13 @@ HAVE_FFPROBE = have_ff("ffprobe")
class MParser(object):
def __init__(self, cmdline):
def __init__(self, cmdline: str) -> None:
self.tag, args = cmdline.split("=", 1)
self.tags = self.tag.split(",")
self.timeout = 30
self.force = False
self.kill = "t" # tree; all children recursively
self.audio = "y"
self.ext = []
@@ -58,6 +67,10 @@ class MParser(object):
self.audio = arg[1:] # [r]equire [n]ot [d]ontcare
continue
if arg.startswith("k"):
self.kill = arg[1:] # [t]ree [m]ain [n]one
continue
if arg == "f":
self.force = True
continue
@@ -73,7 +86,9 @@ class MParser(object):
raise Exception()
def ffprobe(abspath, timeout=10):
def ffprobe(
abspath: str, timeout: int = 10
) -> tuple[dict[str, tuple[int, Any]], dict[str, list[Any]]]:
cmd = [
b"ffprobe",
b"-hide_banner",
@@ -87,15 +102,15 @@ def ffprobe(abspath, timeout=10):
return parse_ffprobe(so)
def parse_ffprobe(txt):
def parse_ffprobe(txt: str) -> tuple[dict[str, tuple[int, Any]], dict[str, list[Any]]]:
"""ffprobe -show_format -show_streams"""
streams = []
fmt = {}
g = None
g = {}
for ln in [x.rstrip("\r") for x in txt.split("\n")]:
try:
k, v = ln.split("=", 1)
g[k] = v
sk, sv = ln.split("=", 1)
g[sk] = sv
continue
except:
pass
@@ -109,8 +124,8 @@ def parse_ffprobe(txt):
fmt = g
streams = [fmt] + streams
ret = {} # processed
md = {} # raw tags
ret: dict[str, Any] = {} # processed
md: dict[str, list[Any]] = {} # raw tags
is_audio = fmt.get("format_name") in ["mp3", "ogg", "flac", "wav"]
if fmt.get("filename", "").split(".")[-1].lower() in ["m4a", "aac"]:
@@ -161,49 +176,52 @@ def parse_ffprobe(txt):
kvm = [["duration", ".dur"], ["bit_rate", ".q"]]
for sk, rk in kvm:
v = strm.get(sk)
if v is None:
v1 = strm.get(sk)
if v1 is None:
continue
if rk.startswith("."):
try:
v = float(v)
zf = float(v1)
v2 = ret.get(rk)
if v2 is None or v > v2:
ret[rk] = v
if v2 is None or zf > v2:
ret[rk] = zf
except:
# sqlite doesnt care but the code below does
if v not in ["N/A"]:
ret[rk] = v
if v1 not in ["N/A"]:
ret[rk] = v1
else:
ret[rk] = v
ret[rk] = v1
if ret.get("vc") == "ansi": # shellscript
return {}, {}
for strm in streams:
for k, v in strm.items():
if not k.startswith("TAG:"):
for sk, sv in strm.items():
if not sk.startswith("TAG:"):
continue
k = k[4:].strip()
v = v.strip()
if k and v and k not in md:
md[k] = [v]
sk = sk[4:].strip()
sv = sv.strip()
if sk and sv and sk not in md:
md[sk] = [sv]
for k in [".q", ".vq", ".aq"]:
if k in ret:
ret[k] /= 1000 # bit_rate=320000
for sk in [".q", ".vq", ".aq"]:
if sk in ret:
ret[sk] /= 1000 # bit_rate=320000
for k in [".q", ".vq", ".aq", ".resw", ".resh"]:
if k in ret:
ret[k] = int(ret[k])
for sk in [".q", ".vq", ".aq", ".resw", ".resh"]:
if sk in ret:
ret[sk] = int(ret[sk])
if ".fps" in ret:
fps = ret[".fps"]
if "/" in fps:
fa, fb = fps.split("/")
fps = int(fa) * 1.0 / int(fb)
try:
fps = int(fa) * 1.0 / int(fb)
except:
fps = 9001
if fps < 1000 and fmt.get("format_name") not in ["image2", "png_pipe"]:
ret[".fps"] = round(fps, 3)
@@ -219,13 +237,13 @@ def parse_ffprobe(txt):
if ".resw" in ret and ".resh" in ret:
ret["res"] = "{}x{}".format(ret[".resw"], ret[".resh"])
ret = {k: [0, v] for k, v in ret.items()}
zd = {k: (0, v) for k, v in ret.items()}
return ret, md
return zd, md
class MTag(object):
def __init__(self, log_func, args):
def __init__(self, log_func: RootLogger, args: argparse.Namespace) -> None:
self.log_func = log_func
self.args = args
self.usable = True
@@ -242,7 +260,7 @@ class MTag(object):
if self.backend == "mutagen":
self.get = self.get_mutagen
try:
import mutagen
import mutagen # noqa: F401 # pylint: disable=unused-import,import-outside-toplevel
except:
self.log("could not load Mutagen, trying FFprobe instead", c=3)
self.backend = "ffprobe"
@@ -339,31 +357,33 @@ class MTag(object):
}
# self.get = self.compare
def log(self, msg, c=0):
def log(self, msg: str, c: Union[int, str] = 0) -> None:
self.log_func("mtag", msg, c)
def normalize_tags(self, ret, md):
for k, v in dict(md).items():
if not v:
def normalize_tags(
self, parser_output: dict[str, tuple[int, Any]], md: dict[str, list[Any]]
) -> dict[str, Union[str, float]]:
for sk, tv in dict(md).items():
if not tv:
continue
k = k.lower().split("::")[0].strip()
mk = self.rmap.get(k)
if not mk:
sk = sk.lower().split("::")[0].strip()
key_mapping = self.rmap.get(sk)
if not key_mapping:
continue
pref, mk = mk
if mk not in ret or ret[mk][0] > pref:
ret[mk] = [pref, v[0]]
priority, alias = key_mapping
if alias not in parser_output or parser_output[alias][0] > priority:
parser_output[alias] = (priority, tv[0])
# take first value
ret = {k: unicode(v[1]).strip() for k, v in ret.items()}
# take first value (lowest priority / most preferred)
ret = {sk: unicode(tv[1]).strip() for sk, tv in parser_output.items()}
# track 3/7 => track 3
for k, v in ret.items():
if k[0] == ".":
v = v.split("/")[0].strip().lstrip("0")
ret[k] = v or 0
for sk, tv in ret.items():
if sk[0] == ".":
sv = str(tv).split("/")[0].strip().lstrip("0")
ret[sk] = sv or 0
# normalize key notation to rkeobo
okey = ret.get("key")
@@ -373,7 +393,7 @@ class MTag(object):
return ret
def compare(self, abspath):
def compare(self, abspath: str) -> dict[str, Union[str, float]]:
if abspath.endswith(".au"):
return {}
@@ -411,7 +431,7 @@ class MTag(object):
return r1
def get_mutagen(self, abspath):
def get_mutagen(self, abspath: str) -> dict[str, Union[str, float]]:
if not bos.path.isfile(abspath):
return {}
@@ -421,11 +441,11 @@ class MTag(object):
md = mutagen.File(fsenc(abspath), easy=True)
if not md.info.length and not md.info.codec:
raise Exception()
except Exception as ex:
except:
return self.get_ffprobe(abspath) if self.can_ffprobe else {}
sz = bos.path.getsize(abspath)
ret = {".q": [0, int((sz / md.info.length) / 128)]}
ret = {".q": (0, int((sz / md.info.length) / 128))}
for attr, k, norm in [
["codec", "ac", unicode],
@@ -456,24 +476,24 @@ class MTag(object):
if k == "ac" and v.startswith("mp4a.40."):
v = "aac"
ret[k] = [0, norm(v)]
ret[k] = (0, norm(v))
return self.normalize_tags(ret, md)
def get_ffprobe(self, abspath):
def get_ffprobe(self, abspath: str) -> dict[str, Union[str, float]]:
if not bos.path.isfile(abspath):
return {}
ret, md = ffprobe(abspath)
return self.normalize_tags(ret, md)
def get_bin(self, parsers, abspath):
def get_bin(self, parsers: dict[str, MParser], abspath: str) -> dict[str, Any]:
if not bos.path.isfile(abspath):
return {}
pypath = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
pypath = [str(pypath)] + [str(x) for x in sys.path if x]
pypath = str(os.pathsep.join(pypath))
zsl = [str(pypath)] + [str(x) for x in sys.path if x]
pypath = str(os.pathsep.join(zsl))
env = os.environ.copy()
env["PYTHONPATH"] = pypath
@@ -484,16 +504,16 @@ class MTag(object):
if parser.bin.endswith(".py"):
cmd = [sys.executable] + cmd
args = {"env": env, "timeout": parser.timeout}
args = {"env": env, "timeout": parser.timeout, "kill": parser.kill}
if WINDOWS:
args["creationflags"] = 0x4000
else:
cmd = ["nice"] + cmd
cmd = [fsenc(x) for x in cmd]
rc, v, err = runcmd(cmd, **args)
retchk(rc, cmd, err, self.log, 5)
bcmd = [fsenc(x) for x in cmd]
rc, v, err = runcmd(bcmd, **args) # type: ignore
retchk(rc, bcmd, err, self.log, 5, self.args.mtag_v)
v = v.strip()
if not v:
continue
@@ -501,11 +521,13 @@ class MTag(object):
if "," not in tagname:
ret[tagname] = v
else:
v = json.loads(v)
zj = json.loads(v)
for tag in tagname.split(","):
if tag and tag in v:
ret[tag] = v[tag]
if tag and tag in zj:
ret[tag] = zj[tag]
except:
pass
if self.args.mtag_v:
t = "mtag error: tagname {}, parser {}, file {} => {}"
self.log(t.format(tagname, parser.bin, abspath, min_ex()))
return ret

View File

@@ -4,20 +4,29 @@ from __future__ import print_function, unicode_literals
import tarfile
import threading
from .sutil import errdesc
from .util import Queue, fsenc, min_ex
from queue import Queue
from .bos import bos
from .sutil import StreamArc, errdesc
from .util import fsenc, min_ex
try:
from typing import Any, Generator, Optional
from .util import NamedLogger
except:
pass
class QFile(object):
class QFile(object): # inherit io.StringIO for painful typing
"""file-like object which buffers writes into a queue"""
def __init__(self):
self.q = Queue(64)
self.bq = []
def __init__(self) -> None:
self.q: Queue[Optional[bytes]] = Queue(64)
self.bq: list[bytes] = []
self.nq = 0
def write(self, buf):
def write(self, buf: Optional[bytes]) -> None:
if buf is None or self.nq >= 240 * 1024:
self.q.put(b"".join(self.bq))
self.bq = []
@@ -30,27 +39,32 @@ class QFile(object):
self.nq += len(buf)
class StreamTar(object):
class StreamTar(StreamArc):
"""construct in-memory tar file from the given path"""
def __init__(self, log, fgen, **kwargs):
def __init__(
self,
log: NamedLogger,
fgen: Generator[dict[str, Any], None, None],
**kwargs: Any
):
super(StreamTar, self).__init__(log, fgen)
self.ci = 0
self.co = 0
self.qfile = QFile()
self.log = log
self.fgen = fgen
self.errf = None
self.errf: dict[str, Any] = {}
# python 3.8 changed to PAX_FORMAT as default,
# waste of space and don't care about the new features
fmt = tarfile.GNU_FORMAT
self.tar = tarfile.open(fileobj=self.qfile, mode="w|", format=fmt)
self.tar = tarfile.open(fileobj=self.qfile, mode="w|", format=fmt) # type: ignore
w = threading.Thread(target=self._gen, name="star-gen")
w.daemon = True
w.start()
def gen(self):
def gen(self) -> Generator[Optional[bytes], None, None]:
while True:
buf = self.qfile.q.get()
if not buf:
@@ -63,7 +77,7 @@ class StreamTar(object):
if self.errf:
bos.unlink(self.errf["ap"])
def ser(self, f):
def ser(self, f: dict[str, Any]) -> None:
name = f["vp"]
src = f["ap"]
fsi = f["st"]
@@ -76,21 +90,21 @@ class StreamTar(object):
inf.gid = 0
self.ci += inf.size
with open(fsenc(src), "rb", 512 * 1024) as f:
self.tar.addfile(inf, f)
with open(fsenc(src), "rb", 512 * 1024) as fo:
self.tar.addfile(inf, fo)
def _gen(self):
def _gen(self) -> None:
errors = []
for f in self.fgen:
if "err" in f:
errors.append([f["vp"], f["err"]])
errors.append((f["vp"], f["err"]))
continue
try:
self.ser(f)
except Exception:
except:
ex = min_ex(5, True).replace("\n", "\n-- ")
errors.append([f["vp"], ex])
errors.append((f["vp"], ex))
if errors:
self.errf, txt = errdesc(errors)

View File

@@ -12,23 +12,28 @@ Original source: misc/python/surrogateescape.py in https://bitbucket.org/haypo/m
# This code is released under the Python license and the BSD 2-clause license
import platform
import codecs
import platform
import sys
PY3 = sys.version_info[0] > 2
WINDOWS = platform.system() == "Windows"
FS_ERRORS = "surrogateescape"
try:
from typing import Any
except:
pass
def u(text):
def u(text: Any) -> str:
if PY3:
return text
else:
return text.decode("unicode_escape")
def b(data):
def b(data: Any) -> bytes:
if PY3:
return data.encode("latin1")
else:
@@ -43,7 +48,7 @@ else:
bytes_chr = chr
def surrogateescape_handler(exc):
def surrogateescape_handler(exc: Any) -> tuple[str, int]:
"""
Pure Python implementation of the PEP 383: the "surrogateescape" error
handler of Python 3. Undecodable bytes will be replaced by a Unicode
@@ -74,7 +79,7 @@ class NotASurrogateError(Exception):
pass
def replace_surrogate_encode(mystring):
def replace_surrogate_encode(mystring: str) -> str:
"""
Returns a (unicode) string, not the more logical bytes, because the codecs
register_error functionality expects this.
@@ -100,7 +105,7 @@ def replace_surrogate_encode(mystring):
return str().join(decoded)
def replace_surrogate_decode(mybytes):
def replace_surrogate_decode(mybytes: bytes) -> str:
"""
Returns a (unicode) string
"""
@@ -121,7 +126,7 @@ def replace_surrogate_decode(mybytes):
return str().join(decoded)
def encodefilename(fn):
def encodefilename(fn: str) -> bytes:
if FS_ENCODING == "ascii":
# ASCII encoder of Python 2 expects that the error handler returns a
# Unicode string encodable to ASCII, whereas our surrogateescape error
@@ -161,7 +166,7 @@ def encodefilename(fn):
return fn.encode(FS_ENCODING, FS_ERRORS)
def decodefilename(fn):
def decodefilename(fn: bytes) -> str:
return fn.decode(FS_ENCODING, FS_ERRORS)
@@ -181,7 +186,7 @@ if WINDOWS and not PY3:
FS_ENCODING = codecs.lookup(FS_ENCODING).name
def register_surrogateescape():
def register_surrogateescape() -> None:
"""
Registers the surrogateescape error handler on Python 2 (only)
"""

View File

@@ -1,14 +1,34 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import time
import tempfile
from datetime import datetime
from .bos import bos
try:
from typing import Any, Generator, Optional
def errdesc(errors):
from .util import NamedLogger
except:
pass
class StreamArc(object):
def __init__(
self,
log: NamedLogger,
fgen: Generator[dict[str, Any], None, None],
**kwargs: Any
):
self.log = log
self.fgen = fgen
def gen(self) -> Generator[Optional[bytes], None, None]:
pass
def errdesc(errors: list[tuple[str, str]]) -> tuple[dict[str, Any], list[str]]:
report = ["copyparty failed to add the following files to the archive:", ""]
for fn, err in errors:

View File

@@ -1,41 +1,51 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import argparse
import calendar
import os
import sys
import time
import shlex
import string
import signal
import socket
import string
import sys
import threading
import time
from datetime import datetime, timedelta
import calendar
from .__init__ import E, PY2, WINDOWS, ANYWIN, MACOS, VT100, unicode
from .util import mp, start_log_thrs, start_stackmon, min_ex, ansi_re
try:
from types import FrameType
import typing
from typing import Optional, Union
except:
pass
from .__init__ import ANYWIN, MACOS, PY2, VT100, WINDOWS, E, unicode
from .authsrv import AuthSrv
from .tcpsrv import TcpSrv
from .up2k import Up2k
from .th_srv import ThumbSrv, HAVE_PIL, HAVE_VIPS, HAVE_WEBP
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE
from .tcpsrv import TcpSrv
from .th_srv import HAVE_PIL, HAVE_VIPS, HAVE_WEBP, ThumbSrv
from .up2k import Up2k
from .util import ansi_re, min_ex, mp, start_log_thrs, start_stackmon
class SvcHub(object):
"""
Hosts all services which cannot be parallelized due to reliance on monolithic resources.
Creates a Broker which does most of the heavy stuff; hosted services can use this to perform work:
hub.broker.put(want_reply, destination, args_list).
hub.broker.<say|ask>(destination, args_list).
Either BrokerThr (plain threads) or BrokerMP (multiprocessing) is used depending on configuration.
Nothing is returned synchronously; if you want any value returned from the call,
put() can return a queue (if want_reply=True) which has a blocking get() with the response.
"""
def __init__(self, args, argv, printed):
def __init__(self, args: argparse.Namespace, argv: list[str], printed: str) -> None:
self.args = args
self.argv = argv
self.logf = None
self.logf: Optional[typing.TextIO] = None
self.logf_base_fn = ""
self.stop_req = False
self.reload_req = False
self.stopping = False
@@ -47,6 +57,29 @@ class SvcHub(object):
self.log_mutex = threading.Lock()
self.next_day = 0
if args.sss or args.s >= 3:
args.ss = True
args.lo = args.lo or "cpp-%Y-%m%d-%H%M%S.txt.xz"
args.ls = args.ls or "**,*,ln,p,r"
if args.ss or args.s >= 2:
args.s = True
args.no_logues = True
args.no_readme = True
args.unpost = 0
args.no_del = True
args.no_mv = True
args.hardlink = True
args.vague_403 = True
args.nih = True
if args.s:
args.dotpart = True
args.no_thumb = True
args.no_mtag_ff = True
args.no_robots = True
args.force_js = True
self.log = self._log_disabled if args.q else self._log_enabled
if args.lo:
self._setup_logfile(printed)
@@ -59,16 +92,16 @@ class SvcHub(object):
if not args.use_fpool and args.j != 1:
args.no_fpool = True
m = "multithreading enabled with -j {}, so disabling fpool -- this can reduce upload performance on some filesystems"
self.log("root", m.format(args.j))
t = "multithreading enabled with -j {}, so disabling fpool -- this can reduce upload performance on some filesystems"
self.log("root", t.format(args.j))
if not args.no_fpool and args.j != 1:
m = "WARNING: --use-fpool combined with multithreading is untested and can probably cause undefined behavior"
t = "WARNING: --use-fpool combined with multithreading is untested and can probably cause undefined behavior"
if ANYWIN:
m = 'windows cannot do multithreading without --no-fpool, so enabling that -- note that upload performance will suffer if you have microsoft defender "real-time protection" enabled, so you probably want to use -j 1 instead'
t = 'windows cannot do multithreading without --no-fpool, so enabling that -- note that upload performance will suffer if you have microsoft defender "real-time protection" enabled, so you probably want to use -j 1 instead'
args.no_fpool = True
self.log("root", m, c=3)
self.log("root", t, c=3)
bri = "zy"[args.theme % 2 :][:1]
ch = "abcdefghijklmnopqrstuvwx"[int(args.theme / 2)]
@@ -96,8 +129,8 @@ class SvcHub(object):
self.args.th_dec = list(decs.keys())
self.thumbsrv = None
if not args.no_thumb:
m = "decoder preference: {}".format(", ".join(self.args.th_dec))
self.log("thumb", m)
t = "decoder preference: {}".format(", ".join(self.args.th_dec))
self.log("thumb", t)
if "pil" in self.args.th_dec and not HAVE_WEBP:
msg = "disabling webp thumbnails because either libwebp is not available or your Pillow is too old"
@@ -131,11 +164,11 @@ class SvcHub(object):
if self.check_mp_enable():
from .broker_mp import BrokerMp as Broker
else:
from .broker_thr import BrokerThr as Broker
from .broker_thr import BrokerThr as Broker # type: ignore
self.broker = Broker(self)
def thr_httpsrv_up(self):
def thr_httpsrv_up(self) -> None:
time.sleep(1 if self.args.ign_ebind_all else 5)
expected = self.broker.num_workers * self.tcpsrv.nsrv
failed = expected - self.httpsrv_up
@@ -145,20 +178,20 @@ class SvcHub(object):
if self.args.ign_ebind_all:
if not self.tcpsrv.srv:
for _ in range(self.broker.num_workers):
self.broker.put(False, "cb_httpsrv_up")
self.broker.say("cb_httpsrv_up")
return
if self.args.ign_ebind and self.tcpsrv.srv:
return
m = "{}/{} workers failed to start"
m = m.format(failed, expected)
self.log("root", m, 1)
t = "{}/{} workers failed to start"
t = t.format(failed, expected)
self.log("root", t, 1)
self.retcode = 1
os.kill(os.getpid(), signal.SIGTERM)
def cb_httpsrv_up(self):
def cb_httpsrv_up(self) -> None:
self.httpsrv_up += 1
if self.httpsrv_up != self.broker.num_workers:
return
@@ -171,9 +204,9 @@ class SvcHub(object):
thr.daemon = True
thr.start()
def _logname(self):
def _logname(self) -> str:
dt = datetime.utcnow()
fn = self.args.lo
fn = str(self.args.lo)
for fs in "YmdHMS":
fs = "%" + fs
if fs in fn:
@@ -181,7 +214,7 @@ class SvcHub(object):
return fn
def _setup_logfile(self, printed):
def _setup_logfile(self, printed: str) -> None:
base_fn = fn = sel_fn = self._logname()
if fn != self.args.lo:
ctr = 0
@@ -203,8 +236,6 @@ class SvcHub(object):
lh = codecs.open(fn, "w", encoding="utf-8", errors="replace")
lh.base_fn = base_fn
argv = [sys.executable] + self.argv
if hasattr(shlex, "quote"):
argv = [shlex.quote(x) for x in argv]
@@ -215,9 +246,10 @@ class SvcHub(object):
printed += msg
lh.write("t0: {:.3f}\nargv: {}\n\n{}".format(E.t0, " ".join(argv), printed))
self.logf = lh
self.logf_base_fn = base_fn
print(msg, end="")
def run(self):
def run(self) -> None:
self.tcpsrv.run()
thr = threading.Thread(target=self.thr_httpsrv_up)
@@ -252,7 +284,7 @@ class SvcHub(object):
else:
self.stop_thr()
def reload(self):
def reload(self) -> str:
if self.reloading:
return "cannot reload; already in progress"
@@ -262,7 +294,7 @@ class SvcHub(object):
t.start()
return "reload initiated"
def _reload(self):
def _reload(self) -> None:
self.log("root", "reload scheduled")
with self.up2k.mutex:
self.asrv.reload()
@@ -271,7 +303,7 @@ class SvcHub(object):
self.reloading = False
def stop_thr(self):
def stop_thr(self) -> None:
while not self.stop_req:
with self.stop_cond:
self.stop_cond.wait(9001)
@@ -282,7 +314,7 @@ class SvcHub(object):
self.shutdown()
def signal_handler(self, sig, frame):
def signal_handler(self, sig: int, frame: Optional[FrameType]) -> None:
if self.stopping:
return
@@ -294,7 +326,7 @@ class SvcHub(object):
with self.stop_cond:
self.stop_cond.notify_all()
def shutdown(self):
def shutdown(self) -> None:
if self.stopping:
return
@@ -337,7 +369,7 @@ class SvcHub(object):
sys.exit(ret)
def _log_disabled(self, src, msg, c=0):
def _log_disabled(self, src: str, msg: str, c: Union[int, str] = 0) -> None:
if not self.logf:
return
@@ -349,8 +381,8 @@ class SvcHub(object):
if now >= self.next_day:
self._set_next_day()
def _set_next_day(self):
if self.next_day and self.logf and self.logf.base_fn != self._logname():
def _set_next_day(self) -> None:
if self.next_day and self.logf and self.logf_base_fn != self._logname():
self.logf.close()
self._setup_logfile("")
@@ -364,7 +396,7 @@ class SvcHub(object):
dt = dt.replace(hour=0, minute=0, second=0)
self.next_day = calendar.timegm(dt.utctimetuple())
def _log_enabled(self, src, msg, c=0):
def _log_enabled(self, src: str, msg: str, c: Union[int, str] = 0) -> None:
"""handles logging from all components"""
with self.log_mutex:
now = time.time()
@@ -401,7 +433,7 @@ class SvcHub(object):
if self.logf:
self.logf.write(msg)
def check_mp_support(self):
def check_mp_support(self) -> str:
vmin = sys.version_info[1]
if WINDOWS:
msg = "need python 3.3 or newer for multiprocessing;"
@@ -415,16 +447,16 @@ class SvcHub(object):
return msg
try:
x = mp.Queue(1)
x.put(["foo", "bar"])
x: mp.Queue[tuple[str, str]] = mp.Queue(1)
x.put(("foo", "bar"))
if x.get()[0] != "foo":
raise Exception()
except:
return "multiprocessing is not supported on your platform;"
return None
return ""
def check_mp_enable(self):
def check_mp_enable(self) -> bool:
if self.args.j == 1:
return False
@@ -447,18 +479,18 @@ class SvcHub(object):
self.log("svchub", "cannot efficiently use multiple CPU cores")
return False
def sd_notify(self):
def sd_notify(self) -> None:
try:
addr = os.getenv("NOTIFY_SOCKET")
if not addr:
zb = os.getenv("NOTIFY_SOCKET")
if not zb:
return
addr = unicode(addr)
addr = unicode(zb)
if addr.startswith("@"):
addr = "\0" + addr[1:]
m = "".join(x for x in addr if x in string.printable)
self.log("sd_notify", m)
t = "".join(x for x in addr if x in string.printable)
self.log("sd_notify", t)
sck = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM)
sck.connect(addr)

View File

@@ -1,16 +1,23 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import calendar
import time
import zlib
import calendar
from .sutil import errdesc
from .util import yieldfile, sanitize_fn, spack, sunpack, min_ex
from .bos import bos
from .sutil import StreamArc, errdesc
from .util import min_ex, sanitize_fn, spack, sunpack, yieldfile
try:
from typing import Any, Generator, Optional
from .util import NamedLogger
except:
pass
def dostime2unix(buf):
def dostime2unix(buf: bytes) -> int:
t, d = sunpack(b"<HH", buf)
ts = (t & 0x1F) * 2
@@ -29,7 +36,7 @@ def dostime2unix(buf):
return int(calendar.timegm(dt))
def unixtime2dos(ts):
def unixtime2dos(ts: int) -> bytes:
tt = time.gmtime(ts + 1)
dy, dm, dd, th, tm, ts = list(tt)[:6]
@@ -41,14 +48,22 @@ def unixtime2dos(ts):
return b"\x00\x00\x21\x00"
def gen_fdesc(sz, crc32, z64):
def gen_fdesc(sz: int, crc32: int, z64: bool) -> bytes:
ret = b"\x50\x4b\x07\x08"
fmt = b"<LQQ" if z64 else b"<LLL"
ret += spack(fmt, crc32, sz, sz)
return ret
def gen_hdr(h_pos, fn, sz, lastmod, utf8, crc32, pre_crc):
def gen_hdr(
h_pos: Optional[int],
fn: str,
sz: int,
lastmod: int,
utf8: bool,
icrc32: int,
pre_crc: bool,
) -> bytes:
"""
does regular file headers
and the central directory meme if h_pos is set
@@ -67,8 +82,8 @@ def gen_hdr(h_pos, fn, sz, lastmod, utf8, crc32, pre_crc):
# confusingly this doesn't bump if h_pos
req_ver = b"\x2d\x00" if z64 else b"\x0a\x00"
if crc32:
crc32 = spack(b"<L", crc32)
if icrc32:
crc32 = spack(b"<L", icrc32)
else:
crc32 = b"\x00" * 4
@@ -129,7 +144,9 @@ def gen_hdr(h_pos, fn, sz, lastmod, utf8, crc32, pre_crc):
return ret
def gen_ecdr(items, cdir_pos, cdir_end):
def gen_ecdr(
items: list[tuple[str, int, int, int, int]], cdir_pos: int, cdir_end: int
) -> tuple[bytes, bool]:
"""
summary of all file headers,
usually the zipfile footer unless something clamps
@@ -154,10 +171,12 @@ def gen_ecdr(items, cdir_pos, cdir_end):
# 2b comment length
ret += b"\x00\x00"
return [ret, need_64]
return ret, need_64
def gen_ecdr64(items, cdir_pos, cdir_end):
def gen_ecdr64(
items: list[tuple[str, int, int, int, int]], cdir_pos: int, cdir_end: int
) -> bytes:
"""
z64 end of central directory
added when numfiles or a headerptr clamps
@@ -181,7 +200,7 @@ def gen_ecdr64(items, cdir_pos, cdir_end):
return ret
def gen_ecdr64_loc(ecdr64_pos):
def gen_ecdr64_loc(ecdr64_pos: int) -> bytes:
"""
z64 end of central directory locator
points to ecdr64
@@ -196,21 +215,27 @@ def gen_ecdr64_loc(ecdr64_pos):
return ret
class StreamZip(object):
def __init__(self, log, fgen, utf8=False, pre_crc=False):
self.log = log
self.fgen = fgen
class StreamZip(StreamArc):
def __init__(
self,
log: NamedLogger,
fgen: Generator[dict[str, Any], None, None],
utf8: bool = False,
pre_crc: bool = False,
) -> None:
super(StreamZip, self).__init__(log, fgen)
self.utf8 = utf8
self.pre_crc = pre_crc
self.pos = 0
self.items = []
self.items: list[tuple[str, int, int, int, int]] = []
def _ct(self, buf):
def _ct(self, buf: bytes) -> bytes:
self.pos += len(buf)
return buf
def ser(self, f):
def ser(self, f: dict[str, Any]) -> Generator[bytes, None, None]:
name = f["vp"]
src = f["ap"]
st = f["st"]
@@ -218,9 +243,8 @@ class StreamZip(object):
sz = st.st_size
ts = st.st_mtime
crc = None
crc = 0
if self.pre_crc:
crc = 0
for buf in yieldfile(src):
crc = zlib.crc32(buf, crc)
@@ -230,7 +254,6 @@ class StreamZip(object):
buf = gen_hdr(None, name, sz, ts, self.utf8, crc, self.pre_crc)
yield self._ct(buf)
crc = crc or 0
for buf in yieldfile(src):
if not self.pre_crc:
crc = zlib.crc32(buf, crc)
@@ -239,7 +262,7 @@ class StreamZip(object):
crc &= 0xFFFFFFFF
self.items.append([name, sz, ts, crc, h_pos])
self.items.append((name, sz, ts, crc, h_pos))
z64 = sz >= 4 * 1024 * 1024 * 1024
@@ -247,19 +270,19 @@ class StreamZip(object):
buf = gen_fdesc(sz, crc, z64)
yield self._ct(buf)
def gen(self):
def gen(self) -> Generator[bytes, None, None]:
errors = []
for f in self.fgen:
if "err" in f:
errors.append([f["vp"], f["err"]])
errors.append((f["vp"], f["err"]))
continue
try:
for x in self.ser(f):
yield x
except Exception:
except:
ex = min_ex(5, True).replace("\n", "\n-- ")
errors.append([f["vp"], ex])
errors.append((f["vp"], ex))
if errors:
errf, txt = errdesc(errors)

View File

@@ -2,12 +2,15 @@
from __future__ import print_function, unicode_literals
import re
import sys
import socket
import sys
from .__init__ import MACOS, ANYWIN, unicode
from .__init__ import ANYWIN, MACOS, TYPE_CHECKING, unicode
from .util import chkcmd
if TYPE_CHECKING:
from .svchub import SvcHub
class TcpSrv(object):
"""
@@ -15,16 +18,16 @@ class TcpSrv(object):
which then uses the least busy HttpSrv to handle it
"""
def __init__(self, hub):
def __init__(self, hub: "SvcHub"):
self.hub = hub
self.args = hub.args
self.log = hub.log
self.stopping = False
self.srv = []
self.srv: list[socket.socket] = []
self.nsrv = 0
ok = {}
ok: dict[str, list[int]] = {}
for ip in self.args.i:
ok[ip] = []
for port in self.args.p:
@@ -34,8 +37,8 @@ class TcpSrv(object):
ok[ip].append(port)
except Exception as ex:
if self.args.ign_ebind or self.args.ign_ebind_all:
m = "could not listen on {}:{}: {}"
self.log("tcpsrv", m.format(ip, port, ex), c=3)
t = "could not listen on {}:{}: {}"
self.log("tcpsrv", t.format(ip, port, ex), c=3)
else:
raise
@@ -55,9 +58,9 @@ class TcpSrv(object):
eps[x] = "external"
msgs = []
title_tab = {}
title_tab: dict[str, dict[str, int]] = {}
title_vars = [x[1:] for x in self.args.wintitle.split(" ") if x.startswith("$")]
m = "available @ {}://{}:{}/ (\033[33m{}\033[0m)"
t = "available @ {}://{}:{}/ (\033[33m{}\033[0m)"
for ip, desc in sorted(eps.items(), key=lambda x: x[1]):
for port in sorted(self.args.p):
if port not in ok.get(ip, ok.get("0.0.0.0", [])):
@@ -69,7 +72,7 @@ class TcpSrv(object):
elif self.args.https_only or port == 443:
proto = "https"
msgs.append(m.format(proto, ip, port, desc))
msgs.append(t.format(proto, ip, port, desc))
if not self.args.wintitle:
continue
@@ -98,13 +101,13 @@ class TcpSrv(object):
if msgs:
msgs[-1] += "\n"
for m in msgs:
self.log("tcpsrv", m)
for t in msgs:
self.log("tcpsrv", t)
if self.args.wintitle:
self._set_wintitle(title_tab)
def _listen(self, ip, port):
def _listen(self, ip: str, port: int) -> None:
srv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
srv.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
@@ -120,7 +123,7 @@ class TcpSrv(object):
raise
raise Exception(e)
def run(self):
def run(self) -> None:
for srv in self.srv:
srv.listen(self.args.nc)
ip, port = srv.getsockname()
@@ -130,9 +133,9 @@ class TcpSrv(object):
if self.args.q:
print(msg)
self.hub.broker.put(False, "listen", srv)
self.hub.broker.say("listen", srv)
def shutdown(self):
def shutdown(self) -> None:
self.stopping = True
try:
for srv in self.srv:
@@ -142,30 +145,59 @@ class TcpSrv(object):
self.log("tcpsrv", "ok bye")
def ips_linux(self):
eps = {}
def ips_linux_ifconfig(self) -> dict[str, str]:
# for termux
try:
txt, _ = chkcmd(["ifconfig"])
except:
return {}
eps: dict[str, str] = {}
dev = None
ip = None
up = None
for ln in (txt + "\n").split("\n"):
if not ln.strip() and dev and ip:
eps[ip] = dev + ("" if up else ", \033[31mLINK-DOWN")
dev = ip = up = None
continue
if ln == ln.lstrip():
dev = re.split(r"[: ]", ln)[0]
if "UP" in re.split(r"[<>, \t]", ln):
up = True
m = re.match(r"^\s+inet\s+([^ ]+)", ln)
if m:
ip = m.group(1)
return eps
def ips_linux(self) -> dict[str, str]:
try:
txt, _ = chkcmd(["ip", "addr"])
except:
return eps
return self.ips_linux_ifconfig()
r = re.compile(r"^\s+inet ([^ ]+)/.* (.*)")
ri = re.compile(r"^\s*[0-9]+\s*:.*")
up = False
eps: dict[str, str] = {}
for ln in txt.split("\n"):
if ri.match(ln):
up = "UP" in re.split("[>,< ]", ln)
try:
ip, dev = r.match(ln.rstrip()).groups()
ip, dev = r.match(ln.rstrip()).groups() # type: ignore
eps[ip] = dev + ("" if up else ", \033[31mLINK-DOWN")
except:
pass
return eps
def ips_macos(self):
eps = {}
def ips_macos(self) -> dict[str, str]:
eps: dict[str, str] = {}
try:
txt, _ = chkcmd(["ifconfig"])
except:
@@ -173,7 +205,7 @@ class TcpSrv(object):
rdev = re.compile(r"^([^ ]+):")
rip = re.compile(r"^\tinet ([0-9\.]+) ")
dev = None
dev = "UNKNOWN"
for ln in txt.split("\n"):
m = rdev.match(ln)
if m:
@@ -182,17 +214,17 @@ class TcpSrv(object):
m = rip.match(ln)
if m:
eps[m.group(1)] = dev
dev = None
dev = "UNKNOWN"
return eps
def ips_windows_ipconfig(self):
eps = {}
offs = {}
def ips_windows_ipconfig(self) -> tuple[dict[str, str], set[str]]:
eps: dict[str, str] = {}
offs: set[str] = set()
try:
txt, _ = chkcmd(["ipconfig"])
except:
return eps
return eps, offs
rdev = re.compile(r"(^[^ ].*):$")
rip = re.compile(r"^ +IPv?4? [^:]+: *([0-9\.]{7,15})$")
@@ -202,12 +234,12 @@ class TcpSrv(object):
m = rdev.match(ln)
if m:
if dev and dev not in eps.values():
offs[dev] = 1
offs.add(dev)
dev = m.group(1).split(" adapter ", 1)[-1]
if dev and roff.match(ln):
offs[dev] = 1
offs.add(dev)
dev = None
m = rip.match(ln)
@@ -216,12 +248,12 @@ class TcpSrv(object):
dev = None
if dev and dev not in eps.values():
offs[dev] = 1
offs.add(dev)
return eps, offs
def ips_windows_netsh(self):
eps = {}
def ips_windows_netsh(self) -> dict[str, str]:
eps: dict[str, str] = {}
try:
txt, _ = chkcmd("netsh interface ip show address".split())
except:
@@ -241,7 +273,7 @@ class TcpSrv(object):
return eps
def detect_interfaces(self, listen_ips):
def detect_interfaces(self, listen_ips: list[str]) -> dict[str, str]:
if MACOS:
eps = self.ips_macos()
elif ANYWIN:
@@ -268,7 +300,6 @@ class TcpSrv(object):
]:
try:
s.connect((ip, 1))
# raise OSError(13, "a")
default_route = s.getsockname()[0]
break
except (OSError, socket.error) as ex:
@@ -289,23 +320,23 @@ class TcpSrv(object):
return eps
def _set_wintitle(self, vars):
vars["all"] = vars.get("all", {"Local-Only": 1})
vars["pub"] = vars.get("pub", vars["all"])
def _set_wintitle(self, vs: dict[str, dict[str, int]]) -> None:
vs["all"] = vs.get("all", {"Local-Only": 1})
vs["pub"] = vs.get("pub", vs["all"])
vars2 = {}
for k, eps in vars.items():
vars2[k] = {
vs2 = {}
for k, eps in vs.items():
vs2[k] = {
ep: 1
for ep in eps.keys()
if ":" not in ep or ep.split(":")[0] not in eps
}
title = ""
vars = vars2
vs = vs2
for p in self.args.wintitle.split(" "):
if p.startswith("$"):
p = " and ".join(sorted(vars.get(p[1:], {"(None)": 1}).keys()))
p = " and ".join(sorted(vs.get(p[1:], {"(None)": 1}).keys()))
title += "{} ".format(p)

View File

@@ -3,13 +3,23 @@ from __future__ import print_function, unicode_literals
import os
from .util import Cooldown
from .th_srv import thumb_path, HAVE_WEBP
from .__init__ import TYPE_CHECKING
from .authsrv import VFS
from .bos import bos
from .th_srv import HAVE_WEBP, thumb_path
from .util import Cooldown
try:
from typing import Optional, Union
except:
pass
if TYPE_CHECKING:
from .httpsrv import HttpSrv
class ThumbCli(object):
def __init__(self, hsrv):
def __init__(self, hsrv: "HttpSrv") -> None:
self.broker = hsrv.broker
self.log_func = hsrv.log
self.args = hsrv.args
@@ -34,10 +44,10 @@ class ThumbCli(object):
d = next((x for x in self.args.th_dec if x in ("vips", "pil")), None)
self.can_webp = HAVE_WEBP or d == "vips"
def log(self, msg, c=0):
def log(self, msg: str, c: Union[int, str] = 0) -> None:
self.log_func("thumbcli", msg, c)
def get(self, dbv, rem, mtime, fmt):
def get(self, dbv: VFS, rem: str, mtime: float, fmt: str) -> Optional[str]:
ptop = dbv.realpath
ext = rem.rsplit(".")[-1].lower()
if ext not in self.thumbable or "dthumb" in dbv.flags:
@@ -106,17 +116,20 @@ class ThumbCli(object):
if ret:
tdir = os.path.dirname(tpath)
if self.cooldown.poke(tdir):
self.broker.put(False, "thumbsrv.poke", tdir)
self.broker.say("thumbsrv.poke", tdir)
if want_opus:
# audio files expire individually
if self.cooldown.poke(tpath):
self.broker.put(False, "thumbsrv.poke", tpath)
self.broker.say("thumbsrv.poke", tpath)
return ret
if abort:
return None
x = self.broker.put(True, "thumbsrv.get", ptop, rem, mtime, fmt)
return x.get()
if not bos.path.getsize(os.path.join(ptop, rem)):
return None
x = self.broker.ask("thumbsrv.get", ptop, rem, mtime, fmt)
return x.get() # type: ignore

View File

@@ -1,19 +1,28 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import os
import time
import shutil
import base64
import hashlib
import threading
import os
import shutil
import subprocess as sp
import threading
import time
from .__init__ import PY2, unicode
from .util import fsenc, vsplit, statdir, runcmd, Queue, Cooldown, BytesIO, min_ex
from queue import Queue
from .__init__ import TYPE_CHECKING
from .bos import bos
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, ffprobe
from .util import BytesIO, Cooldown, fsenc, min_ex, runcmd, statdir, vsplit
try:
from typing import Optional, Union
except:
pass
if TYPE_CHECKING:
from .svchub import SvcHub
HAVE_PIL = False
HAVE_HEIF = False
@@ -21,7 +30,7 @@ HAVE_AVIF = False
HAVE_WEBP = False
try:
from PIL import Image, ImageOps, ExifTags
from PIL import ExifTags, Image, ImageOps
HAVE_PIL = True
try:
@@ -39,7 +48,7 @@ try:
pass
try:
import pillow_avif
import pillow_avif # noqa: F401 # pylint: disable=unused-import
HAVE_AVIF = True
except:
@@ -48,14 +57,13 @@ except:
pass
try:
import pyvips
HAVE_VIPS = True
import pyvips
except:
HAVE_VIPS = False
def thumb_path(histpath, rem, mtime, fmt):
def thumb_path(histpath: str, rem: str, mtime: float, fmt: str) -> str:
# base16 = 16 = 256
# b64-lc = 38 = 1444
# base64 = 64 = 4096
@@ -81,7 +89,7 @@ def thumb_path(histpath, rem, mtime, fmt):
class ThumbSrv(object):
def __init__(self, hub):
def __init__(self, hub: "SvcHub") -> None:
self.hub = hub
self.asrv = hub.asrv
self.args = hub.args
@@ -92,17 +100,17 @@ class ThumbSrv(object):
self.poke_cd = Cooldown(self.args.th_poke)
self.mutex = threading.Lock()
self.busy = {}
self.busy: dict[str, list[threading.Condition]] = {}
self.stopping = False
self.nthr = max(1, self.args.th_mt)
self.q = Queue(self.nthr * 4)
self.q: Queue[Optional[tuple[str, str]]] = Queue(self.nthr * 4)
for n in range(self.nthr):
t = threading.Thread(
thr = threading.Thread(
target=self.worker, name="thumb-{}-{}".format(n, self.nthr)
)
t.daemon = True
t.start()
thr.daemon = True
thr.start()
want_ff = not self.args.no_vthumb or not self.args.no_athumb
if want_ff and (not HAVE_FFMPEG or not HAVE_FFPROBE):
@@ -123,7 +131,7 @@ class ThumbSrv(object):
t.start()
self.fmt_pil, self.fmt_vips, self.fmt_ffi, self.fmt_ffv, self.fmt_ffa = [
{x: True for x in y.split(",")}
set(y.split(","))
for y in [
self.args.th_r_pil,
self.args.th_r_vips,
@@ -135,37 +143,37 @@ class ThumbSrv(object):
if not HAVE_HEIF:
for f in "heif heifs heic heics".split(" "):
self.fmt_pil.pop(f, None)
self.fmt_pil.discard(f)
if not HAVE_AVIF:
for f in "avif avifs".split(" "):
self.fmt_pil.pop(f, None)
self.fmt_pil.discard(f)
self.thumbable = {}
self.thumbable: set[str] = set()
if "pil" in self.args.th_dec:
self.thumbable.update(self.fmt_pil)
self.thumbable |= self.fmt_pil
if "vips" in self.args.th_dec:
self.thumbable.update(self.fmt_vips)
self.thumbable |= self.fmt_vips
if "ff" in self.args.th_dec:
for t in [self.fmt_ffi, self.fmt_ffv, self.fmt_ffa]:
self.thumbable.update(t)
for zss in [self.fmt_ffi, self.fmt_ffv, self.fmt_ffa]:
self.thumbable |= zss
def log(self, msg, c=0):
def log(self, msg: str, c: Union[int, str] = 0) -> None:
self.log_func("thumb", msg, c)
def shutdown(self):
def shutdown(self) -> None:
self.stopping = True
for _ in range(self.nthr):
self.q.put(None)
def stopped(self):
def stopped(self) -> bool:
with self.mutex:
return not self.nthr
def get(self, ptop, rem, mtime, fmt):
def get(self, ptop: str, rem: str, mtime: float, fmt: str) -> Optional[str]:
histpath = self.asrv.vfs.histtab.get(ptop)
if not histpath:
self.log("no histpath for [{}]".format(ptop))
@@ -192,7 +200,7 @@ class ThumbSrv(object):
do_conv = True
if do_conv:
self.q.put([abspath, tpath])
self.q.put((abspath, tpath))
self.log("conv {} \033[0m{}".format(tpath, abspath), c=6)
while not self.stopping:
@@ -213,7 +221,7 @@ class ThumbSrv(object):
return None
def getcfg(self):
def getcfg(self) -> dict[str, set[str]]:
return {
"thumbable": self.thumbable,
"pil": self.fmt_pil,
@@ -223,7 +231,7 @@ class ThumbSrv(object):
"ffa": self.fmt_ffa,
}
def worker(self):
def worker(self) -> None:
while not self.stopping:
task = self.q.get()
if not task:
@@ -254,7 +262,7 @@ class ThumbSrv(object):
except:
msg = "{} could not create thumbnail of {}\n{}"
msg = msg.format(fun.__name__, abspath, min_ex())
c = 1 if "<Signals.SIG" in msg else "1;30"
c: Union[str, int] = 1 if "<Signals.SIG" in msg else "1;30"
self.log(msg, c)
with open(tpath, "wb") as _:
pass
@@ -270,7 +278,7 @@ class ThumbSrv(object):
with self.mutex:
self.nthr -= 1
def fancy_pillow(self, im):
def fancy_pillow(self, im: "Image.Image") -> "Image.Image":
# exif_transpose is expensive (loads full image + unconditional copy)
r = max(*self.res) * 2
im.thumbnail((r, r), resample=Image.LANCZOS)
@@ -296,7 +304,7 @@ class ThumbSrv(object):
return im
def conv_pil(self, abspath, tpath):
def conv_pil(self, abspath: str, tpath: str) -> None:
with Image.open(fsenc(abspath)) as im:
try:
im = self.fancy_pillow(im)
@@ -325,7 +333,7 @@ class ThumbSrv(object):
im.save(tpath, **args)
def conv_vips(self, abspath, tpath):
def conv_vips(self, abspath: str, tpath: str) -> None:
crops = ["centre", "none"]
if self.args.th_no_crop:
crops = ["none"]
@@ -343,18 +351,17 @@ class ThumbSrv(object):
img.write_to_file(tpath, Q=40)
def conv_ffmpeg(self, abspath, tpath):
def conv_ffmpeg(self, abspath: str, tpath: str) -> None:
ret, _ = ffprobe(abspath)
if not ret:
return
ext = abspath.rsplit(".")[-1].lower()
if ext in ["h264", "h265"] or ext in self.fmt_ffi:
seek = []
seek: list[bytes] = []
else:
dur = ret[".dur"][1] if ".dur" in ret else 4
seek = "{:.0f}".format(dur / 3)
seek = [b"-ss", seek.encode("utf-8")]
seek = [b"-ss", "{:.0f}".format(dur / 3).encode("utf-8")]
scale = "scale={0}:{1}:force_original_aspect_ratio="
if self.args.th_no_crop:
@@ -362,7 +369,7 @@ class ThumbSrv(object):
else:
scale += "increase,crop={0}:{1},setsar=1:1"
scale = scale.format(*list(self.res)).encode("utf-8")
bscale = scale.format(*list(self.res)).encode("utf-8")
# fmt: off
cmd = [
b"ffmpeg",
@@ -374,7 +381,7 @@ class ThumbSrv(object):
cmd += [
b"-i", fsenc(abspath),
b"-map", b"0:v:0",
b"-vf", scale,
b"-vf", bscale,
b"-frames:v", b"1",
b"-metadata:s:v:0", b"rotate=0",
]
@@ -396,14 +403,14 @@ class ThumbSrv(object):
cmd += [fsenc(tpath)]
self._run_ff(cmd)
def _run_ff(self, cmd):
def _run_ff(self, cmd: list[bytes]) -> None:
# self.log((b" ".join(cmd)).decode("utf-8"))
ret, sout, serr = runcmd(cmd, timeout=self.args.th_convt)
ret, _, serr = runcmd(cmd, timeout=self.args.th_convt)
if not ret:
return
c = "1;30"
m = "FFmpeg failed (probably a corrupt video file):\n"
c: Union[str, int] = "1;30"
t = "FFmpeg failed (probably a corrupt video file):\n"
if cmd[-1].lower().endswith(b".webp") and (
"Error selecting an encoder" in serr
or "Automatic encoder selection failed" in serr
@@ -411,14 +418,14 @@ class ThumbSrv(object):
or "Please choose an encoder manually" in serr
):
self.args.th_ff_jpg = True
m = "FFmpeg failed because it was compiled without libwebp; enabling --th-ff-jpg to force jpeg output:\n"
t = "FFmpeg failed because it was compiled without libwebp; enabling --th-ff-jpg to force jpeg output:\n"
c = 1
if (
"Requested resampling engine is unavailable" in serr
or "output pad on Parsed_aresample_" in serr
):
m = "FFmpeg failed because it was compiled without libsox; you must set --th-ff-swr to force swr resampling:\n"
t = "FFmpeg failed because it was compiled without libsox; you must set --th-ff-swr to force swr resampling:\n"
c = 1
lines = serr.strip("\n").split("\n")
@@ -429,10 +436,10 @@ class ThumbSrv(object):
if len(txt) > 5000:
txt = txt[:2500] + "...\nff: [...]\nff: ..." + txt[-2500:]
self.log(m + txt, c=c)
self.log(t + txt, c=c)
raise sp.CalledProcessError(ret, (cmd[0], b"...", cmd[-1]))
def conv_spec(self, abspath, tpath):
def conv_spec(self, abspath: str, tpath: str) -> None:
ret, _ = ffprobe(abspath)
if "ac" not in ret:
raise Exception("not audio")
@@ -474,7 +481,7 @@ class ThumbSrv(object):
cmd += [fsenc(tpath)]
self._run_ff(cmd)
def conv_opus(self, abspath, tpath):
def conv_opus(self, abspath: str, tpath: str) -> None:
if self.args.no_acode:
raise Exception("disabled in server config")
@@ -522,7 +529,7 @@ class ThumbSrv(object):
# fmt: on
self._run_ff(cmd)
def poke(self, tdir):
def poke(self, tdir: str) -> None:
if not self.poke_cd.poke(tdir):
return
@@ -534,7 +541,7 @@ class ThumbSrv(object):
except:
pass
def cleaner(self):
def cleaner(self) -> None:
interval = self.args.th_clean
while True:
time.sleep(interval)
@@ -549,14 +556,14 @@ class ThumbSrv(object):
self.log("\033[Jcln ok; rm {} dirs".format(ndirs))
def clean(self, histpath):
def clean(self, histpath: str) -> int:
ret = 0
for cat in ["th", "ac"]:
ret += self._clean(histpath, cat, None)
ret += self._clean(histpath, cat, "")
return ret
def _clean(self, histpath, cat, thumbpath):
def _clean(self, histpath: str, cat: str, thumbpath: str) -> int:
if not thumbpath:
thumbpath = os.path.join(histpath, cat)
@@ -565,10 +572,10 @@ class ThumbSrv(object):
maxage = getattr(self.args, cat + "_maxage")
now = time.time()
prev_b64 = None
prev_fp = None
prev_fp = ""
try:
ents = statdir(self.log, not self.args.no_scandir, False, thumbpath)
ents = sorted(list(ents))
t1 = statdir(self.log_func, not self.args.no_scandir, False, thumbpath)
ents = sorted(list(t1))
except:
return 0
@@ -583,7 +590,7 @@ class ThumbSrv(object):
if age > maxage:
with self.mutex:
safe = True
for k in self.busy.keys():
for k in self.busy:
if k.lower().replace("\\", "/").startswith(cmp):
safe = False
break

View File

@@ -1,34 +1,37 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import re
import os
import time
import calendar
import os
import re
import threading
import time
from operator import itemgetter
from .__init__ import ANYWIN, unicode
from .util import absreal, s3dec, Pebkac, min_ex, gen_filekey, quotep
from .__init__ import ANYWIN, TYPE_CHECKING, unicode
from .bos import bos
from .up2k import up2k_wark_from_hashlist
from .util import HAVE_SQLITE3, Pebkac, absreal, gen_filekey, min_ex, quotep, s3dec
try:
HAVE_SQLITE3 = True
if HAVE_SQLITE3:
import sqlite3
except:
HAVE_SQLITE3 = False
try:
from pathlib import Path
except:
pass
try:
from typing import Any, Optional, Union
except:
pass
if TYPE_CHECKING:
from .httpconn import HttpConn
class U2idx(object):
def __init__(self, conn):
def __init__(self, conn: "HttpConn") -> None:
self.log_func = conn.log_func
self.asrv = conn.asrv
self.args = conn.args
@@ -38,17 +41,21 @@ class U2idx(object):
self.log("your python does not have sqlite3; searching will be disabled")
return
self.cur = {}
self.mem_cur = sqlite3.connect(":memory:")
self.active_id = ""
self.active_cur: Optional["sqlite3.Cursor"] = None
self.cur: dict[str, "sqlite3.Cursor"] = {}
self.mem_cur = sqlite3.connect(":memory:").cursor()
self.mem_cur.execute(r"create table a (b text)")
self.p_end = None
self.p_dur = 0
self.p_end = 0.0
self.p_dur = 0.0
def log(self, msg, c=0):
def log(self, msg: str, c: Union[int, str] = 0) -> None:
self.log_func("u2idx", msg, c)
def fsearch(self, vols, body):
def fsearch(
self, vols: list[tuple[str, str, dict[str, Any]]], body: dict[str, Any]
) -> list[dict[str, Any]]:
"""search by up2k hashlist"""
if not HAVE_SQLITE3:
return []
@@ -58,14 +65,14 @@ class U2idx(object):
wark = up2k_wark_from_hashlist(self.args.salt, fsize, fhash)
uq = "substr(w,1,16) = ? and w = ?"
uv = [wark[:16], wark]
uv: list[Union[str, int]] = [wark[:16], wark]
try:
return self.run_query(vols, uq, uv, True, False, 99999)[0]
except:
raise Pebkac(500, min_ex())
def get_cur(self, ptop):
def get_cur(self, ptop: str) -> Optional["sqlite3.Cursor"]:
if not HAVE_SQLITE3:
return None
@@ -101,13 +108,16 @@ class U2idx(object):
self.cur[ptop] = cur
return cur
def search(self, vols, uq, lim):
def search(
self, vols: list[tuple[str, str, dict[str, Any]]], uq: str, lim: int
) -> tuple[list[dict[str, Any]], list[str]]:
"""search by query params"""
if not HAVE_SQLITE3:
return []
return [], []
q = ""
va = []
v: Union[str, int] = ""
va: list[Union[str, int]] = []
have_up = False # query has up.* operands
have_mt = False
is_key = True
@@ -200,7 +210,7 @@ class U2idx(object):
"%Y",
]:
try:
v = calendar.timegm(time.strptime(v, fmt))
v = calendar.timegm(time.strptime(str(v), fmt))
break
except:
pass
@@ -228,11 +238,12 @@ class U2idx(object):
# lowercase tag searches
m = ptn_lc.search(q)
if not m or not ptn_lcv.search(unicode(v)):
zs = unicode(v)
if not m or not ptn_lcv.search(zs):
continue
va.pop()
va.append(v.lower())
va.append(zs.lower())
q = q[: m.start()]
field, oper = m.groups()
@@ -246,8 +257,16 @@ class U2idx(object):
except Exception as ex:
raise Pebkac(500, repr(ex))
def run_query(self, vols, uq, uv, have_up, have_mt, lim):
done_flag = []
def run_query(
self,
vols: list[tuple[str, str, dict[str, Any]]],
uq: str,
uv: list[Union[str, int]],
have_up: bool,
have_mt: bool,
lim: int,
) -> tuple[list[dict[str, Any]], list[str]]:
done_flag: list[bool] = []
self.active_id = "{:.6f}_{}".format(
time.time(), threading.current_thread().ident
)
@@ -264,13 +283,11 @@ class U2idx(object):
if not uq or not uv:
uq = "select * from up"
uv = ()
uv = []
elif have_mt:
uq = "select up.*, substr(up.w,1,16) mtw from up where " + uq
uv = tuple(uv)
else:
uq = "select up.* from up where " + uq
uv = tuple(uv)
self.log("qs: {!r} {!r}".format(uq, uv))
@@ -290,11 +307,10 @@ class U2idx(object):
v = vtop + "/"
vuv.append(v)
vuv = tuple(vuv)
sret = []
fk = flags.get("fk")
c = cur.execute(uq, vuv)
c = cur.execute(uq, tuple(vuv))
for hit in c:
w, ts, sz, rd, fn, ip, at = hit[:7]
lim -= 1
@@ -338,7 +354,7 @@ class U2idx(object):
# print("[{}] {}".format(ptop, sret))
done_flag.append(True)
self.active_id = None
self.active_id = ""
# undupe hits from multiple metadata keys
if len(ret) > 1:
@@ -352,11 +368,12 @@ class U2idx(object):
return ret, list(taglist.keys())
def terminator(self, identifier, done_flag):
def terminator(self, identifier: str, done_flag: list[bool]) -> None:
for _ in range(self.timeout):
time.sleep(1)
if done_flag:
return
if identifier == self.active_id:
assert self.active_cur
self.active_cur.connection.interrupt()

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -21,7 +21,7 @@ window.baguetteBox = (function () {
afterHide: null,
onChange: null,
},
overlay, slider, btnPrev, btnNext, btnHelp, btnAnim, btnRotL, btnRotR, btnSel, btnVmode, btnClose,
overlay, slider, btnPrev, btnNext, btnHelp, btnAnim, btnRotL, btnRotR, btnSel, btnFull, btnVmode, btnClose,
currentGallery = [],
currentIndex = 0,
isOverlayVisible = false,
@@ -37,6 +37,9 @@ window.baguetteBox = (function () {
vmute = false,
vloop = sread('vmode') == 'L',
vnext = sread('vmode') == 'C',
loopA = null,
loopB = null,
url_ts = null,
resume_mp = false;
var onFSC = function (e) {
@@ -182,6 +185,7 @@ window.baguetteBox = (function () {
'<button id="bbox-rotl" type="button">↶</button>' +
'<button id="bbox-rotr" type="button">↷</button>' +
'<button id="bbox-tsel" type="button">sel</button>' +
'<button id="bbox-full" type="button">⛶</button>' +
'<button id="bbox-vmode" type="button" tt="a"></button>' +
'<button id="bbox-close" type="button" aria-label="Close">X</button>' +
'</div></div>'
@@ -198,9 +202,9 @@ window.baguetteBox = (function () {
btnRotL = ebi('bbox-rotl');
btnRotR = ebi('bbox-rotr');
btnSel = ebi('bbox-tsel');
btnFull = ebi('bbox-full');
btnVmode = ebi('bbox-vmode');
btnClose = ebi('bbox-close');
bindEvents();
}
function halp() {
@@ -215,6 +219,7 @@ window.baguetteBox = (function () {
['home', 'first file'],
['end', 'last file'],
['R', 'rotate (shift=ccw)'],
['F', 'toggle fullscreen'],
['S', 'toggle file selection'],
['space, P, K', 'video: play / pause'],
['U', 'video: seek 10sec back'],
@@ -222,7 +227,7 @@ window.baguetteBox = (function () {
['M', 'video: toggle mute'],
['V', 'video: toggle loop'],
['C', 'video: toggle auto-next'],
['F', 'video: toggle fullscreen'],
['<code>[</code>, <code>]</code>', 'video: loop start / end'],
],
d = mknod('table'),
html = ['<tbody>'];
@@ -230,6 +235,8 @@ window.baguetteBox = (function () {
for (var a = 0; a < list.length; a++)
html.push('<tr><td>' + list[a][0] + '</td><td>' + list[a][1] + '</td></tr>');
html.push('<tr><td colspan="2">tap middle of img to hide btns</td></tr>');
html.push('<tr><td colspan="2">tap left/right sides for prev/next</td></tr>');
d.innerHTML = html.join('\n') + '</tbody>';
d.setAttribute('id', 'bbox-halp');
d.onclick = function () {
@@ -273,17 +280,17 @@ window.baguetteBox = (function () {
setVmode();
}
else if (k == "KeyF")
try {
if (isFullscreen)
document.exitFullscreen();
else
v.requestFullscreen();
}
catch (ex) { }
tglfull();
else if (k == "KeyS")
tglsel();
else if (k == "KeyR")
rotn(e.shiftKey ? -1 : 1);
else if (k == "KeyY")
dlpic();
else if (k == "BracketLeft")
setloop(1);
else if (k == "BracketRight")
setloop(2);
}
function anim() {
@@ -342,19 +349,39 @@ window.baguetteBox = (function () {
tt.show.bind(this)();
}
function tglsel() {
function findfile() {
var thumb = currentGallery[currentIndex].imageElement,
name = vsplit(thumb.href)[1].split('?')[0],
files = msel.getall();
for (var a = 0; a < files.length; a++)
if (vsplit(files[a].vp)[1] == name)
clmod(ebi(files[a].id).closest('tr'), 'sel', 't');
return [name, a, files, ebi(files[a].id)];
}
function tglfull() {
try {
if (isFullscreen)
document.exitFullscreen();
else
(vid() || ebi('bbox-overlay')).requestFullscreen();
}
catch (ex) { alert(ex); }
}
function tglsel() {
var o = findfile()[3];
clmod(o.closest('tr'), 'sel', 't');
msel.selui();
selbg();
}
function dlpic() {
var url = findfile()[3].href;
url += (url.indexOf('?') < 0 ? '?' : '&') + 'cache';
dl_file(url);
}
function selbg() {
var img = vidimg(),
thumb = currentGallery[currentIndex].imageElement,
@@ -404,6 +431,9 @@ window.baguetteBox = (function () {
var nonPassiveEvent = passiveSupp ? { passive: true } : null;
function bindEvents() {
bind(document, 'keydown', keyDownHandler);
bind(document, 'keyup', keyUpHandler);
bind(document, 'fullscreenchange', onFSC);
bind(overlay, 'click', overlayClickHandler);
bind(btnPrev, 'click', showPreviousImage);
bind(btnNext, 'click', showNextImage);
@@ -414,6 +444,7 @@ window.baguetteBox = (function () {
bind(btnRotL, 'click', rotl);
bind(btnRotR, 'click', rotr);
bind(btnSel, 'click', tglsel);
bind(btnFull, 'click', tglfull);
bind(slider, 'contextmenu', contextmenuHandler);
bind(overlay, 'touchstart', touchstartHandler, nonPassiveEvent);
bind(overlay, 'touchmove', touchmoveHandler, passiveEvent);
@@ -422,6 +453,9 @@ window.baguetteBox = (function () {
}
function unbindEvents() {
unbind(document, 'keydown', keyDownHandler);
unbind(document, 'keyup', keyUpHandler);
unbind(document, 'fullscreenchange', onFSC);
unbind(overlay, 'click', overlayClickHandler);
unbind(btnPrev, 'click', showPreviousImage);
unbind(btnNext, 'click', showNextImage);
@@ -432,6 +466,7 @@ window.baguetteBox = (function () {
unbind(btnRotL, 'click', rotl);
unbind(btnRotR, 'click', rotr);
unbind(btnSel, 'click', tglsel);
unbind(btnFull, 'click', tglfull);
unbind(slider, 'contextmenu', contextmenuHandler);
unbind(overlay, 'touchstart', touchstartHandler, nonPassiveEvent);
unbind(overlay, 'touchmove', touchmoveHandler, passiveEvent);
@@ -496,9 +531,7 @@ window.baguetteBox = (function () {
if (overlay.style.display === 'block')
return;
bind(document, 'keydown', keyDownHandler);
bind(document, 'keyup', keyUpHandler);
bind(document, 'fullscreenchange', onFSC);
bindEvents();
currentIndex = chosenImageIndex;
touch = {
count: 0,
@@ -510,6 +543,10 @@ window.baguetteBox = (function () {
preloadPrev(currentIndex);
});
clmod(ebi('bbox-btns'), 'off');
clmod(btnPrev, 'off');
clmod(btnNext, 'off');
updateOffset();
overlay.style.display = 'block';
// Fade in overlay
@@ -522,9 +559,10 @@ window.baguetteBox = (function () {
options.afterShow();
}, 50);
if (options.onChange)
if (options.onChange && !url_ts)
options.onChange(currentIndex, imagesElements.length);
url_ts = null;
documentLastFocus = document.activeElement;
btnClose.focus();
isOverlayVisible = true;
@@ -541,9 +579,13 @@ window.baguetteBox = (function () {
return;
sethash('');
unbind(document, 'keydown', keyDownHandler);
unbind(document, 'keyup', keyUpHandler);
unbind(document, 'fullscreenchange', onFSC);
unbindEvents();
try {
document.exitFullscreen();
isFullscreen = false;
}
catch (ex) { }
// Fade out and hide the overlay
overlay.className = '';
setTimeout(function () {
@@ -777,8 +819,18 @@ window.baguetteBox = (function () {
}
function playvid(play) {
if (vid())
vid()[play ? 'play' : 'pause']();
if (!play) {
timer.rm(loopchk);
loopA = loopB = null;
}
var v = vid();
if (!v)
return;
v[play ? 'play' : 'pause']();
if (play && loopA !== null && v.currentTime < loopA)
v.currentTime = loopA;
}
function playpause() {
@@ -797,6 +849,38 @@ window.baguetteBox = (function () {
showNextImage();
}
function setloop(side) {
var v = vid();
if (!v)
return;
var t = v.currentTime;
if (side == 1) loopA = t;
if (side == 2) loopB = t;
if (side)
toast.inf(5, 'Loop' + (side == 1 ? 'A' : 'B') + ': ' + f2f(t, 2));
if (loopB !== null) {
timer.add(loopchk);
sethash(window.location.hash.slice(1).split('&')[0] + '&t=' + (loopA || 0) + '-' + loopB);
}
}
function loopchk() {
if (loopB === null)
return;
var v = vid();
if (!v || v.paused || v.currentTime < loopB)
return;
v.currentTime = loopA || 0;
}
function urltime(txt) {
url_ts = txt;
}
function mp_ctl() {
var v = vid();
if (!vmute && v && mp.au && !mp.au.paused) {
@@ -839,6 +923,15 @@ window.baguetteBox = (function () {
playvid(true);
v.muted = vmute;
v.loop = vloop;
if (url_ts) {
var seek = ('' + url_ts).split('-');
v.currentTime = seek[0];
if (seek.length > 1) {
loopA = parseFloat(seek[0]);
loopB = parseFloat(seek[1]);
setloop();
}
}
}
selbg();
mp_ctl();
@@ -850,6 +943,22 @@ window.baguetteBox = (function () {
else
timer.rm(rotn);
el.onclick = function (e) {
var rc = e.target.getBoundingClientRect(),
x = e.clientX - rc.left,
fx = x / (rc.right - rc.left);
if (fx < 0.3)
return showPreviousImage();
if (fx > 0.7)
return showNextImage();
clmod(ebi('bbox-btns'), 'off', 't');
clmod(btnPrev, 'off', 't');
clmod(btnNext, 'off', 't');
};
var prev = QS('.full-image.vis');
if (prev)
clmod(prev, 'vis');
@@ -886,8 +995,6 @@ window.baguetteBox = (function () {
function destroyPlugin() {
unbindEvents();
clearCachedData();
unbind(document, 'keydown', keyDownHandler);
unbind(document, 'keyup', keyUpHandler);
document.getElementsByTagName('body')[0].removeChild(ebi('bbox-overlay'));
data = {};
currentGallery = [];
@@ -900,6 +1007,7 @@ window.baguetteBox = (function () {
showNext: showNextImage,
showPrevious: showPreviousImage,
relseek: relseek,
urltime: urltime,
playpause: playpause,
hide: hideOverlay,
destroy: destroyPlugin

View File

@@ -238,6 +238,7 @@ html.b {
--u2-txt-bg: transparent;
--u2-tab-1-sh: var(--bg);
--u2-b1-bg: rgba(128,128,128,0.15);
--u2-b2-bg: var(--u2-b1-bg);
--u2-o-bg: var(--btn-bg);
--u2-o-h-bg: var(--btn-h-bg);
@@ -352,9 +353,212 @@ html.cy {
--srv-1: #f00;
--op-aa-bg: #fff;
--u2-b1-bg: #f00;
--u2-b2-bg: #f00;
--u2-o-bg: #ff0;
--u2-o-1-bg: #f00;
}
html.dz {
--fg: #4d4;
--fg-max: #fff;
--fg2-max: #fff;
--fg-weak: #2a2;
--bg-u7: #020;
--bg-u6: #020;
--bg-u5: #050;
--bg-u4: #020;
--bg-u3: #020;
--bg-u2: #020;
--bg-u1: #020;
--bg: #010;
--bgg: var(--bg);
--bg-d1: #000;
--bg-d2: #020;
--bg-d3: #000;
--bg-max: #000;
--tab-alt: #6f6;
--row-alt: #030;
--scroll: #0f0;
--a: #9f9;
--a-b: #cfc;
--a-hil: #cfc;
--a-dark: #afa;
--a-gray: #2a2;
--btn-fg: var(--a);
--btn-bg: rgba(64,128,64,0.15);
--btn-h-fg: var(--a-hil);
--btn-h-bg: #050;
--btn-1-fg: #000;
--btn-1-bg: #4f4;
--btn-1h-fg: var(--btn-1-fg);
--btn-1h-bg: #3f3;
--chk-fg: var(--tab-alt);
--txt-sh: var(--bg-d2);
--txt-bg: var(--btn-bg);
--op-aa-fg: var(--a);
--op-aa-bg: var(--bg-d2);
--op-a-sh: rgba(0,0,0,0.5);
--u2-btn-b1: #999;
--u2-sbtn-b1: #999;
--u2-txt-bg: var(--bg-u5);
--u2-tab-bg: linear-gradient(to bottom, var(--bg), var(--bg-u1));
--u2-tab-1-fg: #fff;
--u2-tab-1-bg: linear-gradient(to bottom, var(#353), var(--bg) 80%);
--u2-tab-1-b1: #7c5;
--u2-tab-1-b2: #583;
--u2-tab-1-sh: #280;
--u2-b-fg: #fff;
--u2-b1-bg: #3a3;
--u2-b2-bg: #3a3;
--u2-inf-bg: #07a;
--u2-inf-b1: #0be;
--u2-ok-bg: #380;
--u2-ok-b1: #8e4;
--u2-err-bg: #900;
--u2-err-b1: #d06;
--ud-b1: #888;
--sort-1: #fff;
--sort-2: #3f3;
--srv-1: #3e3;
--srv-2: #1a1;
--srv-3: #0f0;
--srv-3b: #070;
--tree-bg: #010;
--g-play-bg: #750;
--g-play-b1: #c90;
--g-play-b2: #da4;
--g-play-sh: #b83;
--g-sel-fg: #fff;
--g-sel-bg: #925;
--g-sel-b1: #c37;
--g-sel-sh: #b36;
--g-fsel-bg: #d39;
--g-fsel-b1: #d48;
--g-fsel-ts: #804;
--g-fg: var(--a-hil);
--g-bg: var(--bg-u2);
--g-b1: var(--bg-u4);
--g-b2: var(--bg-u5);
--g-g1: var(--bg-u2);
--g-g2: var(--bg-u5);
--g-f-bg: var(--bg-u4);
--g-f-b1: var(--bg-u5);
--g-f-fg: var(--a-hil);
--g-sh: rgba(0,0,0,0.3);
--f-sh1: 0.33;
--f-sh2: 0.02;
--f-sh3: 0.2;
--f-h-b1: #3b3;
--f-play-bg: #fc5;
--f-play-fg: #000;
--f-sel-sh: #fc0;
--f-gray: #999;
--fm-off: #f6c;
--mp-sh: var(--bg-d3);
--mp-b-bg: rgba(0,0,0,0.2);
--err-fg: #fff;
--err-bg: #a20;
--err-b1: #f00;
--err-ts: #500;
text-shadow: none;
}
html.dy {
--fg: #000;
--fg-max: #000;
--fg-weak: #000;
--bg-d3: #fff;
--bg-d2: #fff;
--bg-d1: #fff;
--bg: #fff;
--bg-u1: #fff;
--bg-u2: #fff;
--bg-u3: #fff;
--bg-u4: #fff;
--bg-u5: #fff;
--bg-u6: #fff;
--bg-max: #fff;
--tab-alt: #000;
--row-alt: #eee;
--scroll: #000;
--a: #000;
--a-b: #000;
--a-hil: #000;
--a-gray: #000;
--a-dark: #000;
--btn-fg: #000;
--btn-h-fg: #000;
--btn-h-bg: #fff;
--btn-1-fg: #fff;
--btn-1-bg: #000;
--btn-1h-bg: #555;
--chk-fg: a;
--txt-sh: a;
--txt-bg: a;
--op-a-sh: a;
--u2-txt-bg: a;
--u2-tab-1-sh: a;
--u2-tab-1-b1: a;
--u2-tab-1-b2: a;
--u2-tab-1-fg: a;
--u2-tab-1-bg: a;
--u2-b1-bg: #000;
--u2-b2-bg: #000;
--ud-b1: a;
--sort-1: a;
--sort-2: a;
--srv-1: a;
--srv-2: a;
--srv-3: a;
--srv-3b: a;
--tree-bg: #fff;
--g-fg: a;
--g-bg: a;
--g-b1: a;
--g-b2: a;
--g-g1: a;
--g-g2: a;
--g-f-bg: a;
--g-f-b1: a;
--g-sh: a;
--f-sh1: a;
--f-sh2: a;
--f-sh3: a;
--f-sel-sh: #000;
--fm-off: a;
--mp-sh: a;
--mp-b-bg: #fff;
}
* {
line-height: 1.2em;
}
@@ -379,6 +583,27 @@ html, body {
pre, code, tt, #doc, #doc>code {
font-family: 'scp', monospace, monospace;
}
.ayjump {
position: fixed;
overflow: hidden;
width: 0;
height: 0;
}
html .ayjump:focus {
z-index: 80386;
color: #fff;
color: var(--a-hil);
background: #069;
background: var(--bg-u2);
border: .2em solid var(--a);
box-shadow: none;
outline: none;
width: auto;
height: auto;
top: .5em;
left: .5em;
padding: .5em .7em;
}
#path,
#path * {
font-size: 1em;
@@ -493,6 +718,7 @@ html.y #files thead th {
}
#files td:first-child {
border-radius: .25em 0 0 .25em;
white-space: nowrap;
}
#files td:last-child {
border-radius: 0 .25em .25em 0;
@@ -848,6 +1074,13 @@ html.y #widget.open {
@keyframes spin {
100% {transform: rotate(360deg)}
}
@media (prefers-reduced-motion) {
@keyframes spin { }
}
@keyframes fadein {
0% {opacity: 0}
100% {opacity: 1}
}
#wtoggle {
position: absolute;
white-space: nowrap;
@@ -989,7 +1222,6 @@ html.y #widget.open {
font-size: 1.5em;
padding: .25em .4em;
margin: 0;
outline: none;
}
#ops a.act {
color: #fff;
@@ -1176,7 +1408,7 @@ html {
z-index: 1;
position: fixed;
background: var(--tree-bg);
left: -.75em;
left: -.98em;
width: calc(var(--nav-sz) - 0.5em);
border-bottom: 1px solid var(--bg-u5);
overflow: hidden;
@@ -1219,10 +1451,16 @@ html.c .btn,
html.a .btn {
border-radius: .2em;
}
html.ca .btn {
html.cz .btn {
box-shadow: 0 .1em .6em rgba(255,0,185,0.5);
border-bottom: .2em solid #709;
}
html.dz .btn {
box-shadow: 0 0 0 .1em #080 inset;
}
html.dz .tgl.btn.on {
box-shadow: 0 0 0 .1em var(--btn-1-bg) inset;
}
.btn:hover {
color: var(--btn-h-fg);
background: var(--btn-h-bg);
@@ -1234,7 +1472,7 @@ html.ca .btn {
color: var(--btn-1-fg);
text-shadow: none;
}
html.ca .tgl.btn.on {
html.cz .tgl.btn.on {
box-shadow: 0 .1em .8em rgba(255,205,0,0.9);
border-bottom: .2em solid #e90;
}
@@ -1312,7 +1550,8 @@ html.y #tree.nowrap .ntree a+a:hover {
margin: 1em .3em 1em 1em;
padding: 0 1.2em 0 0;
font-size: 4em;
animation: spin 1s linear infinite;
opacity: 0;
animation: 1s linear .15s infinite forwards spin, .2s ease .15s 1 forwards fadein;
position: absolute;
z-index: 9;
}
@@ -1608,6 +1847,7 @@ a.btn,
.full-image img,
.full-image video {
display: inline-block;
outline: none;
width: auto;
height: auto;
max-width: 100%;
@@ -1688,6 +1928,15 @@ html.y #bbox-overlay figcaption a {
.bbox-btn {
position: fixed;
}
.bbox-btn,
#bbox-btns {
opacity: 1;
animation: opacity .2s infinite ease-in-out;
}
.bbox-btn.off,
#bbox-btns.off {
opacity: 0;
}
#bbox-overlay button {
cursor: pointer;
outline: none;
@@ -1732,7 +1981,7 @@ html.y #bbox-overlay figcaption a {
#bbox-halp td {
padding: .2em .5em;
}
#bbox-halp td:first-child {
#bbox-halp td:first-child:not([colspan]) {
text-align: right;
}
.bbox-spinner {
@@ -1912,7 +2161,6 @@ html.y #bbox-overlay figcaption a {
#u2form input {
background: var(--bg-u5);
border: 0px solid var(--bg-u5);
outline: none;
}
#u2err.err {
color: var(--a-dark);
@@ -1973,6 +2221,9 @@ html.y #bbox-overlay figcaption a {
transition: min-height .2s;
margin: 2em 0;
}
#u2tabw.na>table {
display: none;
}
#u2tab {
border-collapse: collapse;
width: calc(100% - 2em);
@@ -2109,7 +2360,6 @@ html.y #bbox-overlay figcaption a {
margin: 0;
padding: 0;
border: none;
outline: none;
}
#u2conf .txtbox {
width: 3em;
@@ -2138,6 +2388,9 @@ html.y #bbox-overlay figcaption a {
position: relative;
bottom: -0.08em;
}
#u2conf input+a.b {
background: var(--u2-b2-bg);
}
html.b #u2conf a.b:hover {
background: var(--btn-h-bg);
}
@@ -2272,9 +2525,11 @@ html.a #pctl a {
margin-right: .5em;
box-shadow: -.02em -.02em .3em rgba(0,0,0,0.2) inset;
}
html.d #pctl,
html.b #pctl {
left: .5em;
}
html.d #ops,
html.c #ops,
html.a #ops {
margin: 1.7em 1.5em 0 1.5em;
@@ -2365,9 +2620,6 @@ html.c #u2cards,
html.a #u2cards {
margin: 0 auto -1em auto;
}
html.a #u2conf input+a.b {
background: var(--u2-b2-bg);
}
html.c #u2foot:empty,
html.a #u2foot:empty {
margin-bottom: -1em;
@@ -2428,6 +2680,9 @@ html.b #acc_info {
html.b #wtoggle {
border-radius: .1em 0 0 0;
}
html.d #barpos,
html.d #barbuf,
html.d #pvol,
html.b #barpos,
html.b #barbuf,
html.b #pvol {
@@ -2471,10 +2726,14 @@ html.b #treeh,
html.b #tree li {
border: none;
}
html.b #tree li {
margin-left: .8em;
}
html.b .ntree a {
padding: .6em .2em;
}
html.b #treepar {
margin-left: .62em;
border-bottom: .2em solid var(--f-h-b1);
}
html.b #wrap {
@@ -2538,6 +2797,17 @@ html.cy #files tbody div a:last-child {
html.dz * {
border-radius: 0 !important;
}
html.d #treepar {
border-bottom: .2em solid var(--f-h-b1);
}
@media (min-width: 70em) {
#barpos,
#barbuf {
@@ -2547,7 +2817,9 @@ html.cy #files tbody div a:last-child {
height: 1.6em;
bottom: auto;
}
html.d #barpos,
html.b #barpos,
html.d #barbuf,
html.b #barbuf {
width: calc(100% - 19em);
left: 8em;
@@ -2559,12 +2831,15 @@ html.cy #files tbody div a:last-child {
#pvol {
max-width: 9em;
}
html.d #ops,
html.b #ops {
padding-left: 1.7em;
}
html.d .opview,
html.b .opview {
margin: 1em;
}
html.d #path,
html.b #path {
padding-left: 1.3em;
}

View File

@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<title>⇆🎉 {{ title }}</title>
<title>{{ title }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.8">
{{ html_head }}
@@ -139,6 +139,7 @@
dtheme = "{{ dtheme }}",
srvinf = "{{ srv_info }}",
lang = "{{ lang }}",
dfavico = "{{ favico }}",
def_hcols = {{ def_hcols|tojson }},
have_up2k_idx = {{ have_up2k_idx|tojson }},
have_tags_idx = {{ have_tags_idx|tojson }},
@@ -148,6 +149,7 @@
have_unpost = {{ have_unpost|tojson }},
have_zip = {{ have_zip|tojson }},
turbolvl = {{ turbolvl|tojson }},
have_emp = {{ have_emp|tojson }},
txt_ext = "{{ txt_ext }}",
{% if no_prism %}no_prism = 1,{% endif %}
readme = {{ readme|tojson }},

View File

@@ -36,13 +36,16 @@ var Ls = {
"ot_msg": "msg: send a message to the server log",
"ot_mp": "media player options",
"ot_cfg": "configuration options",
"ot_u2i": 'up2k: upload files (if you have write-access) or toggle into the search-mode to see if they exist somewhere on the server$N$Nuploads are resumable, multithreaded, and file timestamps are preserved, but it uses more CPU than the basic uploader',
"ot_u2w": 'up2k: upload files with resume support (close your browser and drop the same files in later)$N$Nmultithreaded, and file timestamps are preserved, but it uses more CPU than the basic uploader',
"ot_u2i": 'up2k: upload files (if you have write-access) or toggle into the search-mode to see if they exist somewhere on the server$N$Nuploads are resumable, multithreaded, and file timestamps are preserved, but it uses more CPU than the basic uploader<br /><br />during uploads, this icon becomes a progress indicator!',
"ot_u2w": 'up2k: upload files with resume support (close your browser and drop the same files in later)$N$Nmultithreaded, and file timestamps are preserved, but it uses more CPU than the basic uploader<br /><br />during uploads, this icon becomes a progress indicator!',
"ab_mkdir": "make directory",
"ab_mkdoc": "new markdown doc",
"ab_msg": "send msg to srv log",
"ay_path": "skip to folders",
"ay_files": "skip to files",
"wt_ren": "rename selected items$NHotkey: F2",
"wt_del": "delete selected items$NHotkey: ctrl-K",
"wt_cut": "cut selected items &lt;small&gt;(then paste somewhere else)&lt;/small&gt;$NHotkey: ctrl-X",
@@ -75,7 +78,7 @@ var Ls = {
"ut_etat": "average &lt;em&gt;total&lt;/em&gt; speed and estimated time until finish",
"uct_ok": "completed successfully",
"uct_ng": "failed / rejected / not-found",
"uct_ng": "no-good: failed / rejected / not-found",
"uct_done": "ok and ng combined",
"uct_bz": "hashing or uploading",
"uct_q": "idle, pending",
@@ -98,6 +101,7 @@ var Ls = {
"cl_favico": "favicon",
"cl_keytype": "key notation",
"cl_hiddenc": "hidden columns",
"cl_reset": "(reset)",
"ct_thumb": "in icon view, toggle icons or thumbnails$NHotkey: T",
"ct_dots": "show hidden files (if server permits)",
@@ -244,6 +248,7 @@ var Ls = {
"md_eshow": "cannot show ",
"xhr403": "403: Access denied\n\ntry pressing F5, maybe you got logged out",
"cf_ok": "sorry about that -- DD" + wah + "oS protection kicked in\n\nthings should resume in about 30 sec\n\nif nothing happens, hit F5 to reload the page",
"tl_xe1": "could not list subfolders:\n\nerror ",
"tl_xe2": "404: Folder not found",
"fl_xe1": "could not list files in folder:\n\nerror ",
@@ -356,13 +361,16 @@ var Ls = {
"ot_msg": "msg: send en beskjed til serverloggen",
"ot_mp": "musikkspiller-instillinger",
"ot_cfg": "andre innstillinger",
"ot_u2i": 'up2k: last opp filer (hvis du har skrivetilgang) eller bytt til søkemodus for å sjekke om filene finnes et-eller-annet sted på serveren$N$Nopplastninger kan gjenopptas etter avbrudd, skjer stykkevis for potensielt høyere ytelse, og ivaretar datostempling -- men bruker litt mer prosessorkraft enn den primitive opplasteren bup',
"ot_u2w": 'up2k: filopplastning med støtte for å gjenoppta avbrutte opplastninger -- steng ned nettleseren og dra de samme filene inn i nettleseren igjen for å plukke opp igjen der du slapp$N$Nopplastninger skjer stykkevis for potensielt høyere ytelse, og ivaretar datostempling -- men bruker litt mer prosessorkraft enn den primitive opplasteren "bup"',
"ot_u2i": 'up2k: last opp filer (hvis du har skrivetilgang) eller bytt til søkemodus for å sjekke om filene finnes et-eller-annet sted på serveren$N$Nopplastninger kan gjenopptas etter avbrudd, skjer stykkevis for potensielt høyere ytelse, og ivaretar datostempling -- men bruker litt mer prosessorkraft enn den primitive opplasteren bup<br /><br />mens opplastninger foregår så vises fremdriften her oppe!',
"ot_u2w": 'up2k: filopplastning med støtte for å gjenoppta avbrutte opplastninger -- steng ned nettleseren og dra de samme filene inn i nettleseren igjen for å plukke opp igjen der du slapp$N$Nopplastninger skjer stykkevis for potensielt høyere ytelse, og ivaretar datostempling -- men bruker litt mer prosessorkraft enn den primitive opplasteren "bup"<br /><br />mens opplastninger foregår så vises fremdriften her oppe!',
"ab_mkdir": "lag mappe",
"ab_mkdoc": "nytt dokument",
"ab_msg": "send melding",
"ay_path": "gå videre til mapper",
"ay_files": "gå videre til filer",
"wt_ren": "gi nye navn til de valgte filene$NSnarvei: F2",
"wt_del": "slett de valgte filene$NSnarvei: ctrl-K",
"wt_cut": "klipp ut de valgte filene &lt;small&gt;(for å lime inn et annet sted)&lt;/small&gt;$NSnarvei: ctrl-X",
@@ -418,6 +426,7 @@ var Ls = {
"cl_favico": "favicon",
"cl_keytype": "notasjon for musikalsk dur",
"cl_hiddenc": "skjulte kolonner",
"cl_reset": "(nullstill)",
"ct_thumb": "vis miniatyrbilder istedenfor ikoner$NSnarvei: T",
"ct_dots": "vis skjulte filer (gitt at serveren tillater det)",
@@ -564,6 +573,7 @@ var Ls = {
"md_eshow": "kan ikke vise ",
"xhr403": "403: Tilgang nektet\n\nkanskje du ble logget ut? prøv å trykk F5",
"cf_ok": "beklager -- liten tilfeldig kontroll, alt OK\n\nting skal fortsette om ca. 30 sekunder\n\nhvis ikkeno skjer, trykk F5 for å laste siden på nytt",
"tl_xe1": "kunne ikke hente undermapper:\n\nfeil ",
"tl_xe2": "404: Mappen finnes ikke",
"fl_xe1": "kunne ikke hente filer i mappen:\n\nfeil ",
@@ -589,8 +599,8 @@ var Ls = {
"un_max": "viser de første 2000 filene (bruk filteret for å innsnevre)",
"un_avail": "{0} filer kan slettes",
"un_m2": "sortert etter opplastningstid &ndash; nyeste først:",
"un_no1": "men nei, her var det jaggu ingenting",
"un_no2": "men nei, her var det jaggu ingenting som passer overens med filteret",
"un_no1": "men nei, her var det jaggu ikkeno som slettes kan",
"un_no2": "men nei, her var det jaggu ingenting som passet overens med filteret",
"un_next": "slett de neste {0} filene nedenfor",
"un_del": "slett",
"un_m3": "henter listen med nylig opplastede filer...",
@@ -616,7 +626,7 @@ var Ls = {
"u_hashing": 'les',
"u_upping": 'sender',
"u_cuerr": "kunne ikke laste opp del {0} av {1};\nsikkert harmløst, fortsetter\n\nfil: {2}",
"u_cuerr2": "server nektet opplastningen (del {0} of {1});\n\nfile: {2}\n\nerror ",
"u_cuerr2": "server nektet opplastningen (del {0} av {1});\n\nfile: {2}\n\nerror ",
"u_ehsfin": "server nektet forespørselen om å ferdigstille filen",
"u_ehssrch": "server nektet forespørselen om å utføre søk",
"u_ehsinit": "server nektet forespørselen om å begynne en ny opplastning",
@@ -729,7 +739,7 @@ ebi('op_up2k').innerHTML = (
'<div id="u2notbtn"></div>\n' +
'<div id="u2btn_ct">\n' +
' <div id="u2btn">\n' +
' <div id="u2btn" tabindex="0">\n' +
' <span id="u2bm"></span>\n' + L.ul_btn +
' </div>\n' +
'</div>\n' +
@@ -753,7 +763,7 @@ ebi('op_up2k').innerHTML = (
'</div>\n' +
'<div id="u2tabw"><table id="u2tab">\n' +
'<div id="u2tabw" class="na"><table id="u2tab">\n' +
' <thead>\n' +
' <tr>\n' +
' <td>' + L.utl_name + '</td>\n' +
@@ -827,7 +837,7 @@ ebi('op_cfg').innerHTML = (
' </div>\n' +
'</div>\n' +
'<div><h3>' + L.cl_keytype + '</h3><div id="key_notation"></div></div>\n' +
'<div class="fill"><h3>' + L.cl_hiddenc + '</h3><div id="hcols"></div></div>'
'<div class="fill"><h3>' + L.cl_hiddenc + ' <a href="#" id="hcolsr">' + L.cl_reset + '</h3><div id="hcols"></div></div>'
);
@@ -1000,7 +1010,7 @@ var mpl = (function () {
'<div><h3>' + L.ml_eq + '</h3><div id="audio_eq"></div></div>');
var r = {
"pb_mode": (sread('pb_mode') || 'loop').split('-')[0],
"pb_mode": (sread('pb_mode') || 'next').split('-')[0],
"os_ctl": bcfg_get('au_os_ctl', have_mctl) && have_mctl,
};
bcfg_bind(r, 'preload', 'au_preload', true);
@@ -1491,11 +1501,15 @@ var pbar = (function () {
return;
var sm = bc.w * 1.0 / mp.au.duration,
gk = bc.h + '' + light;
gk = bc.h + '' + light,
dz = themen == 'dz',
dy = themen == 'dy';
if (gradh != gk) {
gradh = gk;
grad = glossy_grad(bc, 85, [35, 40, 37, 35], light ? [45, 56, 50, 45] : [42, 51, 47, 42]);
grad = glossy_grad(bc, dz ? 120 : 85,
dy ? [0, 0, 0, 0] : [35, 40, 37, 35],
dy ? [20, 24, 22, 20] : light ? [45, 56, 50, 45] : [42, 51, 47, 42]);
}
bctx.fillStyle = grad;
for (var a = 0; a < mp.au.buffered.length; a++) {
@@ -1517,18 +1531,20 @@ var pbar = (function () {
if (!mp || !mp.au || isNaN(adur = mp.au.duration) || isNaN(apos = mp.au.currentTime) || apos < 0 || adur < apos)
return; // not-init || unsupp-codec
var sm = bc.w * 1.0 / adur;
var sm = bc.w * 1.0 / adur,
dz = themen == 'dz',
dy = themen == 'dy';
pctx.fillStyle = light ? 'rgba(0,64,0,0.15)' : 'rgba(204,255,128,0.15)';
pctx.fillStyle = light && !dy ? 'rgba(0,64,0,0.15)' : 'rgba(204,255,128,0.15)';
for (var p = 1, mins = adur / 10; p <= mins; p++)
pctx.fillRect(Math.floor(sm * p * 10), 0, 2, pc.h);
pctx.fillStyle = light ? 'rgba(0,64,0,0.5)' : 'rgba(192,255,96,0.5)';
pctx.fillStyle = light && !dy ? 'rgba(0,64,0,0.5)' : 'rgba(192,255,96,0.5)';
for (var p = 1, mins = adur / 60; p <= mins; p++)
pctx.fillRect(Math.floor(sm * p * 60), 0, 2, pc.h);
pctx.font = '.5em sans-serif';
pctx.fillStyle = light ? 'rgba(0,64,0,0.9)' : 'rgba(192,255,96,1)';
pctx.fillStyle = dz ? '#0f0' : dy ? '#999' : light ? 'rgba(0,64,0,0.9)' : 'rgba(192,255,96,1)';
for (var p = 1, mins = adur / 60; p <= mins; p++) {
pctx.fillText(p, Math.floor(sm * p * 60 + 3), pc.h / 3);
}
@@ -1591,11 +1607,18 @@ var vbar = (function () {
if (!mp)
return;
var gh = h + '' + light;
var gh = h + '' + light,
dz = themen == 'dz',
dy = themen == 'dy';
if (gradh != gh) {
gradh = gh;
grad1 = glossy_grad(r.can, 50, light ? [50, 55, 52, 48] : [45, 52, 47, 43], light ? [54, 60, 52, 47] : [42, 51, 47, 42]);
grad2 = glossy_grad(r.can, 205, [10, 15, 13, 10], [16, 20, 18, 16]);
grad1 = glossy_grad(r.can, dz ? 120 : 50,
dy ? [0, 0, 0, 0] : light ? [50, 55, 52, 48] : [45, 52, 47, 43],
dy ? [20, 24, 22, 20] : light ? [54, 60, 52, 47] : [42, 51, 47, 42]);
grad2 = glossy_grad(r.can, dz ? 120 : 205,
dz ? [100, 100, 100, 100] : dy ? [0, 0, 0, 0] : [10, 15, 13, 10],
dz ? [10, 14, 12, 10] : dy ? [90, 90, 90, 90] : [16, 20, 18, 16]);
}
ctx.fillStyle = grad2; ctx.fillRect(0, 0, w, h);
ctx.fillStyle = grad1; ctx.fillRect(0, 0, w * mp.vol, h);
@@ -1694,6 +1717,14 @@ function prev_song(e) {
return song_skip(-1);
}
function dl_song() {
if (!mp || !mp.au)
return;
var url = mp.tracks[mp.au.tid];
url += (url.indexOf('?') < 0 ? '?' : '&') + 'cache=987';
dl_file(url);
}
function playpause(e) {
@@ -2292,9 +2323,13 @@ function scan_hash(v) {
ts = null;
if (m.length > 3) {
m = /^&[Tt=0]*([0-9]+[Mm:])?0*([0-9]+)[Ss]?$/.exec(m[3]);
if (m) {
ts = parseInt(m[1] || 0) * 60 + parseInt(m[2] || 0);
var tm = /^&[Tt=0]*([0-9]+[Mm:])?0*([0-9]+)[Ss]?$/.exec(m[3]);
if (tm) {
ts = parseInt(tm[1] || 0) * 60 + parseInt(tm[2] || 0);
}
tm = /^&[Tt=0]*([0-9\.]+)-([0-9\.]+)$/.exec(m[3]);
if (tm) {
ts = '' + tm[1] + '-' + tm[2];
}
}
@@ -2330,6 +2365,7 @@ function eval_hash() {
return;
clearInterval(t);
baguetteBox.urltime(ts);
var im = QS('#ggrid a[ref="' + id + '"]');
im.click();
im.scrollIntoView();
@@ -2352,6 +2388,22 @@ function eval_hash() {
(function () {
for (var a = 0; a < 2; a++)
(function (a) {
var d = mknod('a');
d.setAttribute('href', '#');
d.setAttribute('class', 'ayjump');
d.innerHTML = a ? L.ay_path : L.ay_files;
document.body.insertBefore(d, ebi('ops'));
d.onclick = function (e) {
ev(e);
if (a)
QS(treectl.hidden ? '#path a:nth-last-child(2)' : '#treeul a.hl').focus();
else
QS(thegrid.en ? '#ggrid a' : '#files tbody a').focus();
};
})(a);
var d = mknod('div');
d.setAttribute('id', 'acc_info');
document.body.insertBefore(d, ebi('ops'));
@@ -2599,6 +2651,8 @@ var fileman = (function () {
if (!md.hasOwnProperty(k))
continue;
md[k] = (md[k] + '').replace(/[\/\\]/g, '-');
if (k.startsWith('.'))
md[k.slice(1)] = md[k];
}
@@ -3266,9 +3320,10 @@ var showfile = (function () {
for (var a = 0; a < src.length; a++) {
var m = /^([0-9;]+)m/.exec(src[a]);
if (!m) {
if (a || src[a])
out.push('\x1b[' + src[a]);
if (a)
out.push('\x1b[');
out.push(src[a]);
continue;
}
@@ -3781,6 +3836,7 @@ function tree_neigh(n) {
treectl.dir_cb = tree_scrollto;
links[act].click();
links[act].focus();
}
@@ -3920,6 +3976,9 @@ document.onkeydown = function (e) {
if (n !== 0)
return seek_au_rel(n) || true;
if (k == 'KeyY')
return dl_song();
n = k == 'KeyI' ? -1 : k == 'KeyK' ? 1 : 0;
if (n !== 0)
return tree_neigh(n);
@@ -4589,7 +4648,7 @@ var treectl = (function () {
}
function reload_tree() {
var cdir = get_vpath(),
var cdir = r.nextdir || get_vpath(),
links = QSA('#treeul a+a'),
nowrap = QS('#tree.nowrap') && QS('#hovertree.on'),
act = null;
@@ -4695,6 +4754,7 @@ var treectl = (function () {
if (hpush && !no_tree)
get_tree('.', xhr.top);
r.nextdir = xhr.top;
enspin(thegrid.en ? '#gfiles' : '#files');
}
@@ -4717,6 +4777,7 @@ var treectl = (function () {
if (!xhrchk(this, L.fl_xe1, L.fl_xe2))
return;
r.nextdir = null;
var cur = ebi('files').getAttribute('ts');
if (cur && parseInt(cur) > this.ts) {
console.log("reject ls");
@@ -5088,21 +5149,6 @@ function mk_files_header(taglist) {
var filecols = (function () {
var hidden = jread('filecols', []);
if (JSON.stringify(def_hcols) != sread('hfilecols')) {
console.log("applying default hidden-cols");
jwrite('hfilecols', def_hcols);
for (var a = 0; a < def_hcols.length; a++) {
var t = def_hcols[a];
t = t.slice(0, 1).toUpperCase() + t.slice(1);
if (t.startsWith("."))
t = t.slice(1);
if (hidden.indexOf(t) == -1)
hidden.push(t);
}
jwrite("filecols", hidden);
}
var add_btns = function () {
var ths = QSA('#files th>span');
for (var a = 0, aa = ths.length; a < aa; a++) {
@@ -5179,7 +5225,6 @@ var filecols = (function () {
tt.att(QS('#files thead'));
}
};
set_style();
var toggle = function (name) {
var ofs = hidden.indexOf(name);
@@ -5199,6 +5244,31 @@ var filecols = (function () {
set_style();
};
ebi('hcolsr').onclick = function (e) {
ev(e);
reset(true);
};
function reset(force) {
if (force || JSON.stringify(def_hcols) != sread('hfilecols')) {
console.log("applying default hidden-cols");
hidden = [];
jwrite('hfilecols', def_hcols);
for (var a = 0; a < def_hcols.length; a++) {
var t = def_hcols[a];
t = t.slice(0, 1).toUpperCase() + t.slice(1);
if (t.startsWith("."))
t = t.slice(1);
if (hidden.indexOf(t) == -1)
hidden.push(t);
}
jwrite("filecols", hidden);
}
set_style();
}
reset();
try {
var ci = find_file_col('dur'),
i = ci[0],
@@ -5216,6 +5286,7 @@ var filecols = (function () {
"add_btns": add_btns,
"set_style": set_style,
"toggle": toggle,
"reset": reset
};
})();
@@ -5330,7 +5401,7 @@ var mukey = (function () {
})();
var light, theme;
var light, theme, themen;
var settheme = (function () {
var ax = 'abcdefghijklmnopqrstuvwx';
@@ -5338,6 +5409,7 @@ var settheme = (function () {
if (!/^[a-x][yz]/.exec(theme))
theme = dtheme;
themen = theme.split(/ /)[0];
light = !!(theme.indexOf('y') + 1);
function freshen() {
@@ -5351,7 +5423,7 @@ var settheme = (function () {
showfile.setstyle();
var html = [], itheme = ax.indexOf(theme[0]) * 2 + (light ? 1 : 0),
names = ['classic dark', 'classic light', 'pm-monokai', 'flat light', 'vice', 'hotdog stand'];
names = ['classic dark', 'classic light', 'pm-monokai', 'flat light', 'vice', 'hotdog stand', 'hacker', 'hi-con'];
for (var a = 0; a < themes; a++)
html.push('<a href="#" class="btn tgl' + (a == itheme ? ' on' : '') +
@@ -5373,6 +5445,7 @@ var settheme = (function () {
var c = ax[Math.floor(i / 2)],
l = light ? 'y' : 'z';
theme = c + l + ' ' + c + ' ' + l;
themen = c + l;
swrite('theme', theme);
freshen();
}
@@ -5402,7 +5475,7 @@ var settheme = (function () {
L = Ls[this.textContent];
swrite("lang", this.textContent);
freshen();
modal.confirm(L.lang_set, location.reload.bind(location), null);
modal.confirm(Ls.eng.lang_set + "\n\n" + Ls.nor.lang_set, location.reload.bind(location), null);
};
freshen();
@@ -5727,13 +5800,34 @@ function show_md(md, name, div, url, depth) {
});
}
md_plug = {}
md = load_md_plug(md, 'pre');
md = load_md_plug(md, 'post');
var marked_opts = {
headerPrefix: 'md-',
breaks: true,
gfm: true
};
var ext = md_plug.pre;
if (ext)
Object.assign(marked_opts, ext[0]);
try {
clmod(div, 'mdo', 1);
div.innerHTML = marked.parse(md, {
headerPrefix: 'md-',
breaks: true,
gfm: true
});
div.innerHTML = marked.parse(md, marked_opts);
ext = md_plug.post;
ext = ext ? [ext[0].render, ext[0].render2] : [];
for (var a = 0; a < ext.length; a++)
if (ext[a])
try {
ext[a](div);
}
catch (ex) {
console.log(ex);
}
var els = QSA('#epi a');
for (var a = 0, aa = els.length; a < aa; a++) {
var href = els[a].getAttribute('href');
@@ -5823,7 +5917,7 @@ var unpost = (function () {
html.push("<table><thead><tr><td></td><td>time</td><td>size</td><td>file</td></tr></thead><tbody>");
}
else
html.push(filt.value ? L.un_no2 : L.un_no1);
html.push('-- <em>' + (filt.value ? L.un_no2 : L.un_no1) + '</em>');
var mods = [1000, 100, 10];
for (var a = 0; a < res.length; a++) {
@@ -5947,6 +6041,9 @@ function wintitle(txt) {
ebi('path').onclick = function (e) {
if (ctrl(e))
return true;
var a = e.target.closest('a[href]');
if (!a || !(a = a.getAttribute('href') + '') || !a.endsWith('/'))
return;
@@ -5974,9 +6071,11 @@ ebi('files').onclick = ebi('docul').onclick = function (e) {
tgt = e.target.closest('a[hl]');
if (tgt) {
var fun = function () {
showfile.show(noq_href(ebi(tgt.getAttribute('hl'))), tgt.getAttribute('lang'));
}, szs = ft2dict(tgt.closest('tr'))[0].sz,
var a = ebi(tgt.getAttribute('hl')),
fun = function () {
showfile.show(noq_href(a), tgt.getAttribute('lang'));
},
szs = ft2dict(a.closest('tr'))[0].sz,
sz = parseInt(szs.replace(/[, ]/g, ''));
if (sz < 1024 * 1024)

27
copyparty/web/cf.html Normal file
View File

@@ -0,0 +1,27 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>{{ svcname }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.8">
</head>
<body>
<div id="box" style="opacity: 0; font-family: sans-serif">
<h3>please press F5 to reload the page</h3>
<p>sorry for the inconvenience</p>
</div>
<script>
setTimeout(function() {
document.getElementById('box').style.opacity = 1;
}, 500);
parent.toast.ok(30, parent.L.cf_ok);
parent.qsr('#cf_frame');
</script>
</body>
</html>

View File

@@ -1,6 +1,6 @@
<!DOCTYPE html><html><head>
<meta charset="utf-8">
<title>📝🎉 {{ title }}</title>
<title>📝 {{ title }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.7">
{{ html_head }}
@@ -127,10 +127,12 @@ write markdown (most html is 🙆 too)
<script>
var last_modified = {{ lastmod }};
var last_modified = {{ lastmod }},
have_emp = {{ have_emp|tojson }},
dfavico = "{{ favico }}";
var md_opt = {
link_md_as_html: false,
allow_plugins: {{ md_plug }},
modpoll_freq: {{ md_chk_rate }}
};

View File

@@ -20,10 +20,6 @@ var dbg = function () { };
// dbg = console.log
// plugins
var md_plug = {};
// dodge browser issues
(function () {
var ua = navigator.userAgent;
@@ -160,7 +156,7 @@ function copydom(src, dst, lv) {
}
function md_plug_err(ex, js) {
md_plug_err = function (ex, js) {
qsr('#md_errbox');
if (!ex)
return;
@@ -197,50 +193,12 @@ function md_plug_err(ex, js) {
}
function load_plug(md_text, plug_type) {
if (!md_opt.allow_plugins)
return md_text;
var find = '\n```copyparty_' + plug_type + '\n';
var ofs = md_text.indexOf(find);
if (ofs === -1)
return md_text;
var ofs2 = md_text.indexOf('\n```', ofs + 1);
if (ofs2 == -1)
return md_text;
var js = md_text.slice(ofs + find.length, ofs2 + 1);
var md = md_text.slice(0, ofs + 1) + md_text.slice(ofs2 + 4);
var old_plug = md_plug[plug_type];
if (!old_plug || old_plug[1] != js) {
js = 'const x = { ' + js + ' }; x;';
try {
var x = eval(js);
}
catch (ex) {
md_plug[plug_type] = null;
md_plug_err(ex, js);
return md;
}
if (x['ctor']) {
x['ctor']();
delete x['ctor'];
}
md_plug[plug_type] = [x, js];
}
return md;
}
function convert_markdown(md_text, dest_dom) {
md_text = md_text.replace(/\r/g, '');
md_plug_err(null);
md_text = load_plug(md_text, 'pre');
md_text = load_plug(md_text, 'post');
md_text = load_md_plug(md_text, 'pre');
md_text = load_md_plug(md_text, 'post');
var marked_opts = {
//headerPrefix: 'h-',
@@ -248,7 +206,7 @@ function convert_markdown(md_text, dest_dom) {
gfm: true
};
var ext = md_plug['pre'];
var ext = md_plug.pre;
if (ext)
Object.assign(marked_opts, ext[0]);
@@ -349,7 +307,7 @@ function convert_markdown(md_text, dest_dom) {
el.innerHTML = '<a href="#' + id + '">' + el.innerHTML + '</a>';
}
ext = md_plug['post'];
ext = md_plug.post;
if (ext && ext[0].render)
try {
ext[0].render(md_dom);

View File

@@ -1,6 +1,6 @@
<!DOCTYPE html><html><head>
<meta charset="utf-8">
<title>📝🎉 {{ title }}</title>
<title>📝 {{ title }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.7">
{{ html_head }}
@@ -25,10 +25,12 @@
<a href="#" id="repl">π</a>
<script>
var last_modified = {{ lastmod }};
var last_modified = {{ lastmod }},
have_emp = {{ have_emp|tojson }},
dfavico = "{{ favico }}";
var md_opt = {
link_md_as_html: false,
allow_plugins: {{ md_plug }},
modpoll_freq: {{ md_chk_rate }}
};

View File

@@ -97,7 +97,9 @@
<a href="#" id="repl">π</a>
<script>
var lang="{{ this.args.lang }}";
var lang="{{ lang }}",
dfavico="{{ favico }}";
document.documentElement.className=localStorage.theme||"{{ this.args.theme }}";
</script>

View File

@@ -190,6 +190,18 @@ html.y #tth {
color: #000;
background: #fff;
}
#cf_frame {
position: fixed;
z-index: 573;
top: 3em;
left: 50%;
width: 40em;
height: 30em;
margin-left: -20.2em;
border-radius: .4em;
border: .4em solid var(--fg);
box-shadow: 0 2em 4em 1em var(--bg-max);
}
#modal {
position: fixed;
overflow: auto;
@@ -281,15 +293,19 @@ html.y #tth {
max-width: 24em;
}
*:focus,
*:focus+label,
#pctl *:focus,
.btn:focus {
box-shadow: 0 .1em .2em #fc0 inset;
outline: #fc0 solid .1em;
border-radius: .2em;
}
html.y *:focus,
html.y *:focus+label,
html.y #pctl *:focus,
html.y .btn:focus {
box-shadow: 0 .1em .2em #037 inset;
outline: #037 solid .1em;
}
input[type="text"]:focus,
input:not([type]):focus,
@@ -376,11 +392,13 @@ html.y textarea:focus {
padding-left: 2em;
border-left: .3em solid #ddd;
}
.mdo ul>li,
.mdo ol>li {
.mdo ul>li {
margin: .7em 0;
list-style-type: disc;
}
.mdo ol>li {
margin: .7em 0 .7em 2em;
}
.mdo strong {
color: #000;
}

View File

@@ -587,8 +587,8 @@ function up2k_init(subtle) {
function unmodal() {
ebi('u2notbtn').style.display = 'none';
ebi('u2btn').style.display = 'block';
ebi('u2conf').style.opacity = '1';
ebi('u2btn').style.display = '';
ebi('u2notbtn').innerHTML = '';
}
@@ -1062,12 +1062,13 @@ function up2k_init(subtle) {
pvis.drawcard("q");
pvis.changecard(pvis.act);
}
ebi('u2tabw').className = 'ye';
}
function more_one_file() {
fdom_ctr++;
var elm = mknod('div');
elm.innerHTML = '<input id="file{0}" type="file" name="file{0}[]" multiple="multiple" />'.format(fdom_ctr);
elm.innerHTML = '<input id="file{0}" type="file" name="file{0}[]" multiple="multiple" tabindex="-1" />'.format(fdom_ctr);
ebi('u2form').appendChild(elm);
ebi('file' + fdom_ctr).onchange = gotfile;
}
@@ -1332,7 +1333,8 @@ function up2k_init(subtle) {
}
if (st.todo.upload.length &&
st.busy.upload.length < parallel_uploads) {
st.busy.upload.length < parallel_uploads &&
can_upload_next()) {
exec_upload();
mou_ikkai = true;
}
@@ -1673,6 +1675,8 @@ function up2k_init(subtle) {
return;
}
t.sprs = response.sprs;
var rsp_purl = url_enc(response.purl);
if (rsp_purl !== t.purl || response.name !== t.name) {
// server renamed us (file exists / path restrictions)
@@ -1824,6 +1828,20 @@ function up2k_init(subtle) {
/// upload
//
function can_upload_next() {
var upt = st.todo.upload[0],
upf = st.files[upt.nfile];
if (upf.sprs)
return true;
for (var a = 0, aa = st.busy.upload.length; a < aa; a++)
if (st.busy.upload[a].nfile == upt.nfile)
return false;
return true;
}
function exec_upload() {
var upt = st.todo.upload.shift();
st.busy.upload.push(upt);
@@ -2121,8 +2139,8 @@ favico.init();
ebi('ico1').onclick = function () {
var a = favico.txt == this.textContent;
swrite('icot', a ? 'c' : this.textContent);
swrite('icof', a ? null : '000');
swrite('icob', a ? null : '');
swrite('icof', a ? 'fc5' : '000');
swrite('icob', a ? '222' : '');
favico.init();
};

View File

@@ -6,7 +6,8 @@ if (!window['console'])
};
var is_touch = 'ontouchstart' in window,
var wah = '',
is_touch = 'ontouchstart' in window,
is_https = (window.location + '').indexOf('https:') === 0,
IPHONE = is_touch && /iPhone|iPad|iPod/i.test(navigator.userAgent),
WINDOWS = navigator.platform ? navigator.platform == 'Win32' : /Windows/.test(navigator.userAgent);
@@ -85,15 +86,18 @@ catch (ex) {
}
var crashed = false, ignexd = {};
function vis_exh(msg, url, lineNo, columnNo, error) {
if ((msg + '').indexOf('ResizeObserver') !== -1)
if ((msg + '').indexOf('ResizeObserver') + 1)
return; // chrome issue 809574 (benign, from <video>)
if ((msg + '').indexOf('l2d.js') !== -1)
if ((msg + '').indexOf('l2d.js') + 1)
return; // `t` undefined in tapEvent -> hitTestSimpleCustom
if (!/\.js($|\?)/.exec('' + url))
return; // chrome debugger
if ((url + '').indexOf(' > eval') + 1)
return; // md timer
var ekey = url + '\n' + lineNo + '\n' + msg;
if (ignexd[ekey] || crashed)
return;
@@ -815,6 +819,14 @@ function sethash(hv) {
}
}
function dl_file(url) {
console.log('DL [%s]', url);
var o = mknod('a');
o.setAttribute('href', url);
o.setAttribute('download', '');
o.click();
}
var timer = (function () {
var r = {};
@@ -1347,6 +1359,49 @@ if (ebi('repl'))
ebi('repl').onclick = repl;
var md_plug = {};
var md_plug_err = function (ex, js) {
if (ex)
console.log(ex, js);
};
function load_md_plug(md_text, plug_type) {
if (!have_emp)
return md_text;
var find = '\n```copyparty_' + plug_type + '\n';
var ofs = md_text.indexOf(find);
if (ofs === -1)
return md_text;
var ofs2 = md_text.indexOf('\n```', ofs + 1);
if (ofs2 == -1)
return md_text;
var js = md_text.slice(ofs + find.length, ofs2 + 1);
var md = md_text.slice(0, ofs + 1) + md_text.slice(ofs2 + 4);
var old_plug = md_plug[plug_type];
if (!old_plug || old_plug[1] != js) {
js = 'const x = { ' + js + ' }; x;';
try {
var x = eval(js);
if (x['ctor']) {
x['ctor']();
delete x['ctor'];
}
}
catch (ex) {
md_plug[plug_type] = null;
md_plug_err(ex, js);
return md;
}
md_plug[plug_type] = [x, js];
}
return md;
}
var svg_decl = '<?xml version="1.0" encoding="UTF-8"?>\n';
@@ -1372,12 +1427,24 @@ var favico = (function () {
var b64;
try {
b64 = btoa(svg ? svg_decl + svg : gx(r.txt));
//console.log('f1');
}
catch (ex) {
b64 = encodeURIComponent(r.txt).replace(/%([0-9A-F]{2})/g,
function x(m, v) { return String.fromCharCode('0x' + v); });
b64 = btoa(gx(unescape(encodeURIComponent(r.txt))));
catch (e1) {
try {
b64 = btoa(gx(encodeURIComponent(r.txt).replace(/%([0-9A-F]{2})/g,
function x(m, v) { return String.fromCharCode('0x' + v); })));
//console.log('f2');
}
catch (e2) {
try {
b64 = btoa(gx(unescape(encodeURIComponent(r.txt))));
//console.log('f3');
}
catch (e3) {
//console.log('fe');
return;
}
}
}
if (!r.tag) {
@@ -1390,9 +1457,13 @@ var favico = (function () {
r.init = function () {
clearTimeout(r.to);
scfg_bind(r, 'txt', 'icot', '', r.upd);
scfg_bind(r, 'fg', 'icof', 'fc5', r.upd);
scfg_bind(r, 'bg', 'icob', '222', r.upd);
var dv = (window.dfavico || '').trim().split(/ +/),
fg = dv.length < 2 ? 'fc5' : dv[1].toLowerCase() == 'none' ? '' : dv[1],
bg = dv.length < 3 ? '222' : dv[2].toLowerCase() == 'none' ? '' : dv[2];
scfg_bind(r, 'txt', 'icot', dv[0], r.upd);
scfg_bind(r, 'fg', 'icof', fg, r.upd);
scfg_bind(r, 'bg', 'icob', bg, r.upd);
r.upd();
};
@@ -1401,6 +1472,7 @@ var favico = (function () {
})();
var cf_cha_t = 0;
function xhrchk(xhr, prefix, e404) {
if (xhr.status < 400 && xhr.status >= 200)
return true;
@@ -1411,6 +1483,24 @@ function xhrchk(xhr, prefix, e404) {
if (xhr.status == 404)
return toast.err(0, prefix + e404);
return toast.err(0, prefix + xhr.status + ": " + (
(xhr.response && xhr.response.err) || xhr.responseText));
var errtxt = (xhr.response && xhr.response.err) || xhr.responseText,
fun = toast.err;
if (xhr.status == 503 && /\bDD(?:wah){0}[o]S [Pp]rote[c]tion|>Just a mo[m]ent|#cf-b[u]bbles|Chec[k]ing your br[o]wser/.test(errtxt)) {
var now = Date.now(), td = now - cf_cha_t;
if (td < 15000)
return;
cf_cha_t = now;
errtxt = 'Cloudflare DD' + wah + 'oS protection kicked in\n\n<strong>trying to fix it...</strong>';
fun = toast.warn;
qsr('#cf_frame');
var fr = mknod('iframe');
fr.src = '/?cf_challenge';
fr.setAttribute('id', 'cf_frame');
document.body.appendChild(fr);
}
return fun(0, prefix + xhr.status + ": " + errtxt);
}

View File

@@ -13,6 +13,9 @@
# other stuff
## [`changelog.md`](changelog.md)
* occasionally grabbed from github release notes
## [`rclone.md`](rclone.md)
* notes on using rclone as a fuse client/server

2626
docs/changelog.md Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -200,6 +200,9 @@ git pull; git reset --hard origin/HEAD && git log --format=format:"%H %ai %d" --
# download all sfx versions
curl https://api.github.com/repos/9001/copyparty/releases?per_page=100 | jq -r '.[] | .tag_name + " " + .name' | tr -d '\r' | while read v t; do fn="$(printf '%s\n' "copyparty $v $t.py" | tr / -)"; [ -e "$fn" ] || curl https://github.com/9001/copyparty/releases/download/$v/copyparty-sfx.py -Lo "$fn"; done
# convert releasenotes to changelog
curl https://api.github.com/repos/9001/copyparty/releases?per_page=100 | jq -r '.[] | "▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀ \n# \(.created_at) `\(.tag_name)` \(.name)\n\n\(.body)\n\n\n"' | sed -r 's/^# ([0-9]{4}-)([0-9]{2})-([0-9]{2})T([0-9]{2}):([0-9]{2}):([0-9]{2})Z /# \1\2\3-\4\5 /' > changelog.md
# push to multiple git remotes
git config -l | grep '^remote'
git remote add all git@github.com:9001/copyparty.git

View File

@@ -1,10 +1,10 @@
FROM alpine:3.15
FROM alpine:3.16
WORKDIR /z
ENV ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \
ver_hashwasm=4.9.0 \
ver_marked=4.0.16 \
ver_marked=4.0.17 \
ver_mde=2.16.1 \
ver_codemirror=5.65.4 \
ver_codemirror=5.65.6 \
ver_fontawesome=5.13.0 \
ver_zopfli=1.0.3
@@ -32,7 +32,7 @@ RUN mkdir -p /z/dist/no-pk \
&& npm install \
&& npm i grunt uglify-js -g ) \
&& (tar -xf codemirror.tgz \
&& cd CodeMirror-$ver_codemirror \
&& cd codemirror5-$ver_codemirror \
&& npm install ) \
&& (tar -xf mde.tgz \
&& cd easy-markdown-editor* \
@@ -87,7 +87,7 @@ RUN cd marked-$ver_marked \
# build codemirror
COPY codemirror.patch /z/
RUN cd CodeMirror-$ver_codemirror \
RUN cd codemirror5-$ver_codemirror \
&& patch -p1 < /z/codemirror.patch \
&& sed -ri '/^var urlRE = /d' mode/gfm/gfm.js \
&& npm run build \

View File

@@ -23,4 +23,4 @@ purge:
sh:
@printf "\n\033[1;31mopening a shell in the most recently created docker image\033[0m\n"
docker run --rm -it `docker images -aq | head -n 1` /bin/bash
docker run --rm -it `docker images -aq | head -n 1` /bin/ash

View File

@@ -1,5 +1,5 @@
diff --git a/src/Lexer.js b/src/Lexer.js
adds linetracking to marked.js v4.0.6;
adds linetracking to marked.js v4.0.17;
add data-ln="%d" to most tags, %d is the source markdown line
--- a/src/Lexer.js
+++ b/src/Lexer.js

View File

@@ -90,6 +90,15 @@ function have() {
have setuptools
have wheel
have twine
# remove type hints to support python < 3.9
rm -rf build/pypi
mkdir -p build/pypi
cp -pR setup.py README.md LICENSE copyparty tests bin scripts/strip_hints build/pypi/
cd build/pypi
tar --strip-components=2 -xf ../strip-hints-0.1.10.tar.gz strip-hints-0.1.10/src/strip_hints
python3 -c 'from strip_hints.a import uh; uh("copyparty")'
./setup.py clean2
./setup.py sdist bdist_wheel --universal

View File

@@ -76,7 +76,7 @@ while [ ! -z "$1" ]; do
no-hl) no_hl=1 ; ;;
no-dd) no_dd=1 ; ;;
no-cm) no_cm=1 ; ;;
fast) zopf=100 ; ;;
fast) zopf= ; ;;
lang) shift;langs="$1"; ;;
*) help ; ;;
esac
@@ -106,7 +106,7 @@ tmpdir="$(
[ $repack ] && {
old="$tmpdir/pe-copyparty"
echo "repack of files in $old"
cp -pR "$old/"*{dep-j2,dep-ftp,copyparty} .
cp -pR "$old/"*{j2,ftp,copyparty} .
}
[ $repack ] || {
@@ -130,8 +130,8 @@ tmpdir="$(
mv MarkupSafe-*/src/markupsafe .
rm -rf MarkupSafe-* markupsafe/_speedups.c
mkdir dep-j2/
mv {markupsafe,jinja2} dep-j2/
mkdir j2/
mv {markupsafe,jinja2} j2/
echo collecting pyftpdlib
f="../build/pyftpdlib-1.5.6.tar.gz"
@@ -143,8 +143,8 @@ tmpdir="$(
mv pyftpdlib-release-*/pyftpdlib .
rm -rf pyftpdlib-release-* pyftpdlib/test
mkdir dep-ftp/
mv pyftpdlib dep-ftp/
mkdir ftp/
mv pyftpdlib ftp/
echo collecting asyncore, asynchat
for n in asyncore.py asynchat.py; do
@@ -154,6 +154,24 @@ tmpdir="$(
wget -O$f "$url" || curl -L "$url" >$f)
done
# enable this to dynamically remove type hints at startup,
# in case a future python version can use them for performance
true || (
echo collecting strip-hints
f=../build/strip-hints-0.1.10.tar.gz
[ -e $f ] ||
(url=https://files.pythonhosted.org/packages/9c/d4/312ddce71ee10f7e0ab762afc027e07a918f1c0e1be5b0069db5b0e7542d/strip-hints-0.1.10.tar.gz;
wget -O$f "$url" || curl -L "$url" >$f)
tar -zxf $f
mv strip-hints-0.1.10/src/strip_hints .
rm -rf strip-hints-* strip_hints/import_hooks*
sed -ri 's/[a-z].* as import_hooks$/"""a"""/' strip_hints/*.py
cp -pR ../scripts/strip_hints/ .
)
cp -pR ../scripts/py2/ .
# msys2 tar is bad, make the best of it
echo collecting source
[ $clean ] && {
@@ -170,6 +188,9 @@ tmpdir="$(
for n in asyncore.py asynchat.py; do
awk 'NR<4||NR>27;NR==4{print"# license: https://opensource.org/licenses/ISC\n"}' ../build/$n >copyparty/vend/$n
done
# remove type hints before build instead
(cd copyparty; python3 ../../scripts/strip_hints/a.py; rm uh)
}
ver=
@@ -274,17 +295,23 @@ rm have
tmv "$f"
done
[ $repack ] ||
find | grep -E '\.py$' |
grep -vE '__version__' |
tr '\n' '\0' |
xargs -0 "$pybin" ../scripts/uncomment.py
[ $repack ] || {
# uncomment
find | grep -E '\.py$' |
grep -vE '__version__' |
tr '\n' '\0' |
xargs -0 "$pybin" ../scripts/uncomment.py
f=dep-j2/jinja2/constants.py
# py2-compat
#find | grep -E '\.py$' | while IFS= read -r x; do
# sed -ri '/: TypeAlias = /d' "$x"; done
}
f=j2/jinja2/constants.py
awk '/^LOREM_IPSUM_WORDS/{o=1;print "LOREM_IPSUM_WORDS = u\"a\"";next} !o; /"""/{o=0}' <$f >t
tmv "$f"
grep -rLE '^#[^a-z]*coding: utf-8' dep-j2 |
grep -rLE '^#[^a-z]*coding: utf-8' j2 |
while IFS= read -r f; do
(echo "# coding: utf-8"; cat "$f") >t
tmv "$f"
@@ -313,7 +340,7 @@ find | grep -E '\.(js|html)$' | while IFS= read -r f; do
done
gzres() {
command -v pigz &&
command -v pigz && [ $zopf ] &&
pk="pigz -11 -I $zopf" ||
pk='gzip'
@@ -354,7 +381,8 @@ nf=$(ls -1 "$zdir"/arc.* | wc -l)
}
[ $use_zdir ] && {
arcs=("$zdir"/arc.*)
arc="${arcs[$RANDOM % ${#arcs[@]} ] }"
n=$(( $RANDOM % ${#arcs[@]} ))
arc="${arcs[n]}"
echo "using $arc"
tar -xf "$arc"
for f in copyparty/web/*.gz; do
@@ -364,7 +392,7 @@ nf=$(ls -1 "$zdir"/arc.* | wc -l)
echo gen tarlist
for d in copyparty dep-j2 dep-ftp; do find $d -type f; done |
for d in copyparty j2 ftp py2; do find $d -type f; done | # strip_hints
sed -r 's/(.*)\.(.*)/\2 \1/' | LC_ALL=C sort |
sed -r 's/([^ ]*) (.*)/\2.\1/' | grep -vE '/list1?$' > list1

View File

@@ -1,13 +1,23 @@
#!/bin/bash
set -ex
rm -rf unt
mkdir -p unt/srv
cp -pR copyparty tests unt/
cd unt
python3 ../scripts/strip_hints/a.py
pids=()
for py in python{2,3}; do
PYTHONPATH=
[ $py = python2 ] && PYTHONPATH=../scripts/py2
export PYTHONPATH
nice $py -m unittest discover -s tests >/dev/null &
pids+=($!)
done
python3 scripts/test/smoketest.py &
python3 ../scripts/test/smoketest.py &
pids+=($!)
for pid in ${pids[@]}; do

View File

@@ -11,6 +11,7 @@ copyparty/broker_mp.py,
copyparty/broker_mpw.py,
copyparty/broker_thr.py,
copyparty/broker_util.py,
copyparty/fsutil.py,
copyparty/ftpd.py,
copyparty/httpcli.py,
copyparty/httpconn.py,
@@ -42,6 +43,7 @@ copyparty/web/browser.html,
copyparty/web/browser.js,
copyparty/web/browser2.html,
copyparty/web/copyparty.gif,
copyparty/web/cf.html,
copyparty/web/dd,
copyparty/web/dd/2.png,
copyparty/web/dd/3.png,

View File

@@ -379,9 +379,20 @@ def run(tmp, j2, ftp):
t.daemon = True
t.start()
ld = (("", ""), (j2, "dep-j2"), (ftp, "dep-ftp"))
ld = (("", ""), (j2, "j2"), (ftp, "ftp"), (not PY2, "py2"))
ld = [os.path.join(tmp, b) for a, b in ld if not a]
# skip 1
# enable this to dynamically remove type hints at startup,
# in case a future python version can use them for performance
if sys.version_info < (3, 10) and False:
sys.path.insert(0, ld[0])
from strip_hints.a import uh
uh(tmp + "/copyparty")
# skip 0
if any([re.match(r"^-.*j[0-9]", x) for x in sys.argv]):
run_s(ld)
else:

View File

@@ -47,7 +47,7 @@ grep -E '/(python|pypy)[0-9\.-]*$' >$dir/pys || true
printf '\033[1;30mlooking for jinja2 in [%s]\033[0m\n' "$_py" >&2
$_py -c 'import jinja2' 2>/dev/null || continue
printf '%s\n' "$_py"
mv $dir/{,x.}dep-j2
mv $dir/{,x.}j2
break
done)"

72
scripts/strip_hints/a.py Normal file
View File

@@ -0,0 +1,72 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import re
import os
import sys
from strip_hints import strip_file_to_string
# list unique types used in hints:
# rm -rf unt && cp -pR copyparty unt && (cd unt && python3 ../scripts/strip_hints/a.py)
# diff -wNarU1 copyparty unt | grep -E '^\-' | sed -r 's/[^][, ]+://g; s/[^][, ]+[[(]//g; s/[],()<>{} -]/\n/g' | grep -E .. | sort | uniq -c | sort -n
def pr(m):
sys.stderr.write(m)
sys.stderr.flush()
def uh(top):
if os.path.exists(top + "/uh"):
return
# pr("building support for your python ver")
pr("unhinting")
files = []
for (dp, _, fns) in os.walk(top):
for fn in fns:
if not fn.endswith(".py"):
continue
fp = os.path.join(dp, fn)
files.append(fp)
try:
import multiprocessing as mp
with mp.Pool(os.cpu_count()) as pool:
pool.map(uh1, files)
except Exception as ex:
print("\nnon-mp fallback due to {}\n".format(ex))
for fp in files:
uh1(fp)
pr("k\n\n")
with open(top + "/uh", "wb") as f:
f.write(b"a")
def uh1(fp):
pr(".")
cs = strip_file_to_string(fp, no_ast=True, to_empty=True)
libs = "typing|types|collections\.abc"
ptn = re.compile(r"^(\s*)(from (?:{0}) import |import (?:{0})\b).*".format(libs))
# remove expensive imports too
lns = []
for ln in cs.split("\n"):
m = ptn.match(ln)
if m:
ln = m.group(1) + "raise Exception()"
lns.append(ln)
cs = "\n".join(lns)
with open(fp, "wb") as f:
f.write(cs.encode("utf-8"))
if __name__ == "__main__":
uh(".")

View File

@@ -58,13 +58,13 @@ class CState(threading.Thread):
remotes.append("?")
remotes_ok = False
m = []
ta = []
for conn, remote in zip(self.cs, remotes):
stage = len(conn.st)
m.append(f"\033[3{colors[stage]}m{remote}")
ta.append(f"\033[3{colors[stage]}m{remote}")
m = " ".join(m)
print(f"{m}\033[0m\n\033[A", end="")
t = " ".join(ta)
print(f"{t}\033[0m\n\033[A", end="")
def allget(cs, urls):

View File

@@ -72,6 +72,8 @@ def tc1(vflags):
for _ in range(10):
try:
os.mkdir(td)
if os.path.exists(td):
break
except:
time.sleep(0.1) # win10

View File

@@ -3,6 +3,7 @@
from __future__ import print_function, unicode_literals
import io
import os
import sys
import tokenize
@@ -10,6 +11,7 @@ import tokenize
def uncomment(fpath):
"""modified https://stackoverflow.com/a/62074206"""
print(".", end="", flush=True)
with open(fpath, "rb") as f:
orig = f.read().decode("utf-8")
@@ -66,9 +68,15 @@ def uncomment(fpath):
def main():
print("uncommenting", end="", flush=True)
for f in sys.argv[1:]:
print(".", end="", flush=True)
uncomment(f)
try:
import multiprocessing as mp
with mp.Pool(os.cpu_count()) as pool:
pool.map(uncomment, sys.argv[1:])
except Exception as ex:
print("\nnon-mp fallback due to {}\n".format(ex))
for f in sys.argv[1:]:
uncomment(f)
print("k")

View File

@@ -56,6 +56,7 @@ class Cfg(Namespace):
textfiles="",
doctitle="",
html_head="",
lang="eng",
theme=0,
themes=0,
turbo=0,

View File

@@ -36,6 +36,7 @@ class Cfg(Namespace):
"rsp_slp": 0,
"s_wr_slp": 0,
"s_wr_sz": 512 * 1024,
"lang": "eng",
"theme": 0,
"themes": 0,
"turbo": 0,
@@ -84,7 +85,7 @@ class TestVFS(unittest.TestCase):
pass
def assertAxs(self, dct, lst):
t1 = list(sorted(dct.keys()))
t1 = list(sorted(dct))
t2 = list(sorted(lst))
self.assertEqual(t1, t2)
@@ -207,10 +208,10 @@ class TestVFS(unittest.TestCase):
self.assertEqual(n.realpath, os.path.join(td, "a"))
self.assertAxs(n.axs.uread, ["*"])
self.assertAxs(n.axs.uwrite, [])
self.assertEqual(vfs.can_access("/", "*"), [False, False, False, False, False])
self.assertEqual(vfs.can_access("/", "k"), [True, True, False, False, False])
self.assertEqual(vfs.can_access("/a", "*"), [True, False, False, False, False])
self.assertEqual(vfs.can_access("/a", "k"), [True, False, False, False, False])
self.assertEqual(vfs.can_access("/", "*"), (False, False, False, False, False))
self.assertEqual(vfs.can_access("/", "k"), (True, True, False, False, False))
self.assertEqual(vfs.can_access("/a", "*"), (True, False, False, False, False))
self.assertEqual(vfs.can_access("/a", "k"), (True, False, False, False, False))
# breadth-first construction
vfs = AuthSrv(
@@ -278,7 +279,7 @@ class TestVFS(unittest.TestCase):
n = au.vfs
# root was not defined, so PWD with no access to anyone
self.assertEqual(n.vpath, "")
self.assertEqual(n.realpath, None)
self.assertEqual(n.realpath, "")
self.assertAxs(n.axs.uread, [])
self.assertAxs(n.axs.uwrite, [])
self.assertEqual(len(n.nodes), 1)

View File

@@ -90,7 +90,10 @@ def get_ramdisk():
class NullBroker(object):
def put(*args):
def say(*args):
pass
def ask(*args):
pass
@@ -128,7 +131,7 @@ class VHttpSrv(object):
class VHttpConn(object):
def __init__(self, args, asrv, log, buf):
self.s = VSock(buf)
self.sr = Unrecv(self.s)
self.sr = Unrecv(self.s, None)
self.addr = ("127.0.0.1", "42069")
self.args = args
self.asrv = asrv