Compare commits
425 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7000123a8b | ||
|
|
d48a7d2398 | ||
|
|
389a00ce59 | ||
|
|
7a460de3c2 | ||
|
|
8ea1f4a751 | ||
|
|
1c69ccc6cd | ||
|
|
84b5bbd3b6 | ||
|
|
9ccd327298 | ||
|
|
11df36f3cf | ||
|
|
f62dd0e3cc | ||
|
|
ad18b6e15e | ||
|
|
c00b80ca29 | ||
|
|
92ed4ba3f8 | ||
|
|
7de9775dd9 | ||
|
|
5ce9060e5c | ||
|
|
f727d5cb5a | ||
|
|
4735fb1ebb | ||
|
|
c7d05cc13d | ||
|
|
51c152ff4a | ||
|
|
eeed2a840c | ||
|
|
4aaa111925 | ||
|
|
e31248f018 | ||
|
|
8b4cf022f2 | ||
|
|
4e7455268a | ||
|
|
680f8ae814 | ||
|
|
90555a4cea | ||
|
|
56a62db591 | ||
|
|
cf51997680 | ||
|
|
f05cc18d61 | ||
|
|
5384c2e0f5 | ||
|
|
9bfbf80a0e | ||
|
|
f874d7754f | ||
|
|
a669f79480 | ||
|
|
1c3894743a | ||
|
|
75cdf17df4 | ||
|
|
de7dd1e60a | ||
|
|
0ee574a718 | ||
|
|
faac894706 | ||
|
|
dac2fad48e | ||
|
|
77f624b01e | ||
|
|
e24ffebfc8 | ||
|
|
70d07d1609 | ||
|
|
bfb3303d87 | ||
|
|
660705a436 | ||
|
|
74a3f97671 | ||
|
|
b3e35bb494 | ||
|
|
76adac7c72 | ||
|
|
5dc75ebb67 | ||
|
|
d686ce12b6 | ||
|
|
d3c40a423e | ||
|
|
2fb1e6dab8 | ||
|
|
10430b347f | ||
|
|
e0e3f6ac3e | ||
|
|
c694cbffdc | ||
|
|
bdd0e5d771 | ||
|
|
aa98e427f0 | ||
|
|
daa6f4c94c | ||
|
|
4a76663fb2 | ||
|
|
cebda5028a | ||
|
|
3fa377a580 | ||
|
|
a11c1005a8 | ||
|
|
4a6aea9328 | ||
|
|
4ca041e93e | ||
|
|
52a866a405 | ||
|
|
8b6bd0e6ac | ||
|
|
780fc4639a | ||
|
|
3692fc9d83 | ||
|
|
c2a0b1b4c6 | ||
|
|
21bbdb5419 | ||
|
|
aa1c08962c | ||
|
|
8a5d0399dd | ||
|
|
f2cd0b0c4a | ||
|
|
c2b66bbe73 | ||
|
|
48b957f1d5 | ||
|
|
3683984c8d | ||
|
|
a3431512d8 | ||
|
|
d832b787e7 | ||
|
|
6f75b02723 | ||
|
|
b8241710bd | ||
|
|
d638404b6a | ||
|
|
9362ca3ed9 | ||
|
|
d1a03c6d17 | ||
|
|
c6c31702c2 | ||
|
|
bd2d88c96e | ||
|
|
76b1857e4e | ||
|
|
095bd17d10 | ||
|
|
204bfac3fa | ||
|
|
ac49b0ca93 | ||
|
|
c5b04f6fef | ||
|
|
5c58fda46d | ||
|
|
062730c70c | ||
|
|
cade1990ce | ||
|
|
59b6e61816 | ||
|
|
daff7ff158 | ||
|
|
0862860961 | ||
|
|
1cb24045a0 | ||
|
|
622358b172 | ||
|
|
7998884a9d | ||
|
|
51ddecd101 | ||
|
|
7a35ab1d1e | ||
|
|
48564ba52a | ||
|
|
49efffd740 | ||
|
|
d6ac224c8f | ||
|
|
a772b8c3f2 | ||
|
|
b580953dcd | ||
|
|
d86653c763 | ||
|
|
dded4fca76 | ||
|
|
36365ffa6b | ||
|
|
0f9aeeaa27 | ||
|
|
d8ebcd0ef7 | ||
|
|
6e445487b1 | ||
|
|
6605e461c7 | ||
|
|
40ce4e2275 | ||
|
|
8fef9e363e | ||
|
|
4792c2770d | ||
|
|
87bb49da36 | ||
|
|
1c0071d9ce | ||
|
|
efded35c2e | ||
|
|
1d74240b9a | ||
|
|
098184ff7b | ||
|
|
4083533916 | ||
|
|
feb1acd43a | ||
|
|
a9591db734 | ||
|
|
9ebf148cbe | ||
|
|
a473e5e19a | ||
|
|
5d3034c231 | ||
|
|
c3a895af64 | ||
|
|
cea5aecbf2 | ||
|
|
0e61e70670 | ||
|
|
1e333c0939 | ||
|
|
917b6ec03c | ||
|
|
fe67c52ead | ||
|
|
909c7bee3e | ||
|
|
27ca54d138 | ||
|
|
2147c3a646 | ||
|
|
a99120116f | ||
|
|
802efeaff2 | ||
|
|
9ad3af1ef6 | ||
|
|
715727b811 | ||
|
|
c6eaa7b836 | ||
|
|
c2fceea2a5 | ||
|
|
190e11f7ea | ||
|
|
ad7413a5ff | ||
|
|
903b9e627a | ||
|
|
c5c1e96cf8 | ||
|
|
62fbb04c9d | ||
|
|
728dc62d0b | ||
|
|
2dfe1b1c6b | ||
|
|
35d4a1a6af | ||
|
|
eb3fa5aa6b | ||
|
|
438384425a | ||
|
|
0b6f102436 | ||
|
|
c9b7ec72d8 | ||
|
|
256c7f1789 | ||
|
|
4e5a323c62 | ||
|
|
f4a3bbd237 | ||
|
|
fe73f2d579 | ||
|
|
f79fcc7073 | ||
|
|
4c4b3790c7 | ||
|
|
bd60b464bb | ||
|
|
6bce852765 | ||
|
|
3b19a5a59d | ||
|
|
f024583011 | ||
|
|
1111baacb2 | ||
|
|
1b9c913efb | ||
|
|
3524c36e1b | ||
|
|
cf87cea9f8 | ||
|
|
bfa34404b8 | ||
|
|
0aba5f35bf | ||
|
|
663bc0842a | ||
|
|
7d10c96e73 | ||
|
|
6b2720fab0 | ||
|
|
e74ad5132a | ||
|
|
1f6f89c1fd | ||
|
|
4d55e60980 | ||
|
|
ddaaccd5af | ||
|
|
c20b7dac3d | ||
|
|
1f779d5094 | ||
|
|
715401ca8e | ||
|
|
e7cd922d8b | ||
|
|
187feee0c1 | ||
|
|
49e962a7dc | ||
|
|
633ff601e5 | ||
|
|
331cf37054 | ||
|
|
23e4b9002f | ||
|
|
c0de3c8053 | ||
|
|
a82a3b084a | ||
|
|
67c298e66b | ||
|
|
c110ccb9ae | ||
|
|
0143380306 | ||
|
|
af9000d3c8 | ||
|
|
097d798e5e | ||
|
|
1d9f9f221a | ||
|
|
214a367f48 | ||
|
|
2fb46551a2 | ||
|
|
6bcf330ae0 | ||
|
|
2075a8b18c | ||
|
|
1275ac6c42 | ||
|
|
708f20b7af | ||
|
|
a2c0c708e8 | ||
|
|
2f2c65d91e | ||
|
|
cd5fcc7ca7 | ||
|
|
aa29e7be48 | ||
|
|
93febe34b0 | ||
|
|
f086e6d3c1 | ||
|
|
22e51e1c96 | ||
|
|
63a5336f31 | ||
|
|
bfc6c53cc5 | ||
|
|
236017f310 | ||
|
|
0a1d9b4dfd | ||
|
|
b50d090946 | ||
|
|
00b5db52cf | ||
|
|
24cb30e2c5 | ||
|
|
4549145ab5 | ||
|
|
67b0217754 | ||
|
|
ccae9efdf0 | ||
|
|
59d596b222 | ||
|
|
4878eb2c45 | ||
|
|
7755392f57 | ||
|
|
dc2ea20959 | ||
|
|
8eaea2bd17 | ||
|
|
58e559918f | ||
|
|
f38a3fca5b | ||
|
|
1ea145b384 | ||
|
|
0d9567575a | ||
|
|
e82f176289 | ||
|
|
d4b51c040e | ||
|
|
125d0efbd8 | ||
|
|
3215afc504 | ||
|
|
c73ff3ce1b | ||
|
|
f9c159a051 | ||
|
|
2ab1325c90 | ||
|
|
5b0f7ff506 | ||
|
|
9269bc84f2 | ||
|
|
4e8b651e18 | ||
|
|
65b4f79534 | ||
|
|
5dd43dbc45 | ||
|
|
5f73074c7e | ||
|
|
f5d6ba27b2 | ||
|
|
73fa70b41f | ||
|
|
2a1cda42e7 | ||
|
|
1bd7e31466 | ||
|
|
eb49e1fb4a | ||
|
|
9838c2f0ce | ||
|
|
6041df8370 | ||
|
|
2933dce3ef | ||
|
|
dab377d37b | ||
|
|
f35e41baf1 | ||
|
|
c4083a2942 | ||
|
|
36c20bbe53 | ||
|
|
e34634f5af | ||
|
|
cba9e5b669 | ||
|
|
1f3c46a6b0 | ||
|
|
799a5ffa47 | ||
|
|
b000707c10 | ||
|
|
feba4de1d6 | ||
|
|
951fdb27ca | ||
|
|
9697fb3d84 | ||
|
|
2dbed4500a | ||
|
|
fd9d0e433d | ||
|
|
f096f3ef81 | ||
|
|
cc4a063695 | ||
|
|
b64cabc3c9 | ||
|
|
3dd460717c | ||
|
|
bf658a522b | ||
|
|
e9be7e712d | ||
|
|
e40cd2a809 | ||
|
|
dbabeb9692 | ||
|
|
8dd37d76b0 | ||
|
|
fd475aa358 | ||
|
|
f0988c0e32 | ||
|
|
0632f09bff | ||
|
|
ba599aaca0 | ||
|
|
ff05919e89 | ||
|
|
52e63fa101 | ||
|
|
96ceccd12a | ||
|
|
87994fe006 | ||
|
|
fa12c81a03 | ||
|
|
344ce63455 | ||
|
|
ec4daacf9e | ||
|
|
f3e8308718 | ||
|
|
515ac5d941 | ||
|
|
954c7e7e50 | ||
|
|
67ff57f3a3 | ||
|
|
c10c70c1e5 | ||
|
|
04592a98d2 | ||
|
|
c9c4aac6cf | ||
|
|
8b2c7586ce | ||
|
|
32e22dfe84 | ||
|
|
d70b885722 | ||
|
|
ac6c4b13f5 | ||
|
|
ececdad22d | ||
|
|
bf659781b0 | ||
|
|
2c6bb195a4 | ||
|
|
c032cd08b3 | ||
|
|
39e7a7a231 | ||
|
|
6e14cd2c39 | ||
|
|
aab3baaea7 | ||
|
|
b8453c3b4f | ||
|
|
6ce0e2cd5b | ||
|
|
76beaae7f2 | ||
|
|
c1a7f9edbe | ||
|
|
b5f2fe2f0a | ||
|
|
98a90d49cb | ||
|
|
f55e982cb5 | ||
|
|
686c7defeb | ||
|
|
0b1e483c53 | ||
|
|
457d7df129 | ||
|
|
ce776a547c | ||
|
|
ded0567cbf | ||
|
|
c9cac83d09 | ||
|
|
4fbe6b01a8 | ||
|
|
ee9585264e | ||
|
|
c9ffead7bf | ||
|
|
ed69d42005 | ||
|
|
0b47ee306b | ||
|
|
e4e63619d4 | ||
|
|
f32cca292a | ||
|
|
e87ea19ff1 | ||
|
|
0214793740 | ||
|
|
fc9dd5d743 | ||
|
|
9e6d5dd2b9 | ||
|
|
bdad197e2c | ||
|
|
7e139288a6 | ||
|
|
6e7935abaf | ||
|
|
3ba0cc20f1 | ||
|
|
dd28de1796 | ||
|
|
9eecc9e19a | ||
|
|
6530cb6b05 | ||
|
|
41ce613379 | ||
|
|
5e2785caba | ||
|
|
d7cc000976 | ||
|
|
50d8ff95ae | ||
|
|
b2de1459b6 | ||
|
|
f0ffbea0b2 | ||
|
|
199ccca0fe | ||
|
|
1d9b355743 | ||
|
|
f0437fbb07 | ||
|
|
abc404a5b7 | ||
|
|
04b9e21330 | ||
|
|
1044aa071b | ||
|
|
4c3192c8cc | ||
|
|
689e77a025 | ||
|
|
3bd89403d2 | ||
|
|
b4800d9bcb | ||
|
|
05485e8539 | ||
|
|
0e03dc0868 | ||
|
|
352b1ed10a | ||
|
|
0db1244d04 | ||
|
|
ece08b8179 | ||
|
|
b8945ae233 | ||
|
|
dcaf7b0a20 | ||
|
|
f982cdc178 | ||
|
|
b265e59834 | ||
|
|
4a843a6624 | ||
|
|
241ef5b99d | ||
|
|
f39f575a9c | ||
|
|
1521307f1e | ||
|
|
dd122111e6 | ||
|
|
00c177fa74 | ||
|
|
f6c7e49eb8 | ||
|
|
1a8dc3d18a | ||
|
|
38a163a09a | ||
|
|
8f031246d2 | ||
|
|
8f3d97dde7 | ||
|
|
4acaf24d65 | ||
|
|
9a8dbbbcf8 | ||
|
|
a3efc4c726 | ||
|
|
0278bf328f | ||
|
|
17ddd96cc6 | ||
|
|
0e82e79aea | ||
|
|
30f124c061 | ||
|
|
e19d90fcfc | ||
|
|
184bbdd23d | ||
|
|
30b50aec95 | ||
|
|
c3c3d81db1 | ||
|
|
49b7231283 | ||
|
|
edbedcdad3 | ||
|
|
e4ae5f74e6 | ||
|
|
2c7ffe08d7 | ||
|
|
3ca46bae46 | ||
|
|
7e82aaf843 | ||
|
|
315bd71adf | ||
|
|
2c612c9aeb | ||
|
|
36aee085f7 | ||
|
|
d01bb69a9c | ||
|
|
c9b1c48c72 | ||
|
|
aea3843cf2 | ||
|
|
131b6f4b9a | ||
|
|
6efb8b735a | ||
|
|
223b7af2ce | ||
|
|
e72c2a6982 | ||
|
|
dd9b93970e | ||
|
|
e4c7cd81a9 | ||
|
|
12b3a62586 | ||
|
|
2da3bdcd47 | ||
|
|
c1dccbe0ba | ||
|
|
9629fcde68 | ||
|
|
cae436b566 | ||
|
|
01714700ae | ||
|
|
51e6c4852b | ||
|
|
b206c5d64e | ||
|
|
62c3272351 | ||
|
|
c5d822c70a | ||
|
|
9c09b4061a | ||
|
|
c26fb43ced | ||
|
|
deb8f20db6 | ||
|
|
50e18ed8ff | ||
|
|
31f3895f40 | ||
|
|
615929268a | ||
|
|
b8b15814cf | ||
|
|
7766fffe83 | ||
|
|
2a16c150d1 | ||
|
|
418c2166cc | ||
|
|
a4dd44f648 | ||
|
|
5352f7cda7 | ||
|
|
5533b47099 | ||
|
|
e9b14464ee | ||
|
|
4e986e5cd1 | ||
|
|
8a59b40c53 | ||
|
|
391caca043 | ||
|
|
171ce348d6 | ||
|
|
c2cc729135 | ||
|
|
e7e71b76f0 | ||
|
|
a2af61cf6f |
13
.gitignore
vendored
13
.gitignore
vendored
@@ -5,13 +5,16 @@ __pycache__/
|
||||
MANIFEST.in
|
||||
MANIFEST
|
||||
copyparty.egg-info/
|
||||
buildenv/
|
||||
build/
|
||||
dist/
|
||||
sfx/
|
||||
py2/
|
||||
.venv/
|
||||
|
||||
/buildenv/
|
||||
/build/
|
||||
/dist/
|
||||
/py2/
|
||||
/sfx/
|
||||
/unt/
|
||||
/log/
|
||||
|
||||
# ide
|
||||
*.sublime-workspace
|
||||
|
||||
|
||||
25
.vscode/settings.json
vendored
25
.vscode/settings.json
vendored
@@ -23,7 +23,6 @@
|
||||
"terminal.ansiBrightWhite": "#ffffff",
|
||||
},
|
||||
"python.testing.pytestEnabled": false,
|
||||
"python.testing.nosetestsEnabled": false,
|
||||
"python.testing.unittestEnabled": true,
|
||||
"python.testing.unittestArgs": [
|
||||
"-v",
|
||||
@@ -35,18 +34,40 @@
|
||||
"python.linting.pylintEnabled": true,
|
||||
"python.linting.flake8Enabled": true,
|
||||
"python.linting.banditEnabled": true,
|
||||
"python.linting.mypyEnabled": true,
|
||||
"python.linting.mypyArgs": [
|
||||
"--ignore-missing-imports",
|
||||
"--follow-imports=silent",
|
||||
"--show-column-numbers",
|
||||
"--strict"
|
||||
],
|
||||
"python.linting.flake8Args": [
|
||||
"--max-line-length=120",
|
||||
"--ignore=E722,F405,E203,W503,W293,E402",
|
||||
"--ignore=E722,F405,E203,W503,W293,E402,E501,E128",
|
||||
],
|
||||
"python.linting.banditArgs": [
|
||||
"--ignore=B104"
|
||||
],
|
||||
"python.linting.pylintArgs": [
|
||||
"--disable=missing-module-docstring",
|
||||
"--disable=missing-class-docstring",
|
||||
"--disable=missing-function-docstring",
|
||||
"--disable=wrong-import-position",
|
||||
"--disable=raise-missing-from",
|
||||
"--disable=bare-except",
|
||||
"--disable=invalid-name",
|
||||
"--disable=line-too-long",
|
||||
"--disable=consider-using-f-string"
|
||||
],
|
||||
// python3 -m isort --py=27 --profile=black copyparty/
|
||||
"python.formatting.provider": "black",
|
||||
"editor.formatOnSave": true,
|
||||
"[html]": {
|
||||
"editor.formatOnSave": false,
|
||||
},
|
||||
"[css]": {
|
||||
"editor.formatOnSave": false,
|
||||
},
|
||||
"files.associations": {
|
||||
"*.makefile": "makefile"
|
||||
},
|
||||
|
||||
376
README.md
376
README.md
@@ -9,11 +9,19 @@
|
||||
turn your phone or raspi into a portable file server with resumable uploads/downloads using *any* web browser
|
||||
|
||||
* server only needs `py2.7` or `py3.3+`, all dependencies optional
|
||||
* browse/upload with IE4 / netscape4.0 on win3.11 (heh)
|
||||
* *resumable* uploads need `firefox 34+` / `chrome 41+` / `safari 7+` for full speed
|
||||
* code standard: `black`
|
||||
* browse/upload with [IE4](#browser-support) / netscape4.0 on win3.11 (heh)
|
||||
* *resumable* uploads need `firefox 34+` / `chrome 41+` / `safari 7+`
|
||||
|
||||
📷 **screenshots:** [browser](#the-browser) // [upload](#uploading) // [unpost](#unpost) // [thumbnails](#thumbnails) // [search](#searching) // [fsearch](#file-search) // [zip-DL](#zip-downloads) // [md-viewer](#markdown-viewer) // [ie4](#browser-support)
|
||||
try the **[read-only demo server](https://a.ocv.me/pub/demo/)** 👀 running from a basement in finland
|
||||
|
||||
📷 **screenshots:** [browser](#the-browser) // [upload](#uploading) // [unpost](#unpost) // [thumbnails](#thumbnails) // [search](#searching) // [fsearch](#file-search) // [zip-DL](#zip-downloads) // [md-viewer](#markdown-viewer)
|
||||
|
||||
|
||||
## get the app
|
||||
|
||||
<a href="https://f-droid.org/packages/me.ocv.partyup/"><img src="https://ocv.me/fdroid.png" alt="Get it on F-Droid" height="50" /> '' <img src="https://img.shields.io/f-droid/v/me.ocv.partyup.svg" alt="f-droid version info" /></a> '' <a href="https://github.com/9001/party-up"><img src="https://img.shields.io/github/release/9001/party-up.svg?logo=github" alt="github version info" /></a>
|
||||
|
||||
(the app is **NOT** the full copyparty server! just a basic upload client, nothing fancy yet)
|
||||
|
||||
|
||||
## readme toc
|
||||
@@ -36,7 +44,7 @@ turn your phone or raspi into a portable file server with resumable uploads/down
|
||||
* [tabs](#tabs) - the main tabs in the ui
|
||||
* [hotkeys](#hotkeys) - the browser has the following hotkeys
|
||||
* [navpane](#navpane) - switching between breadcrumbs or navpane
|
||||
* [thumbnails](#thumbnails) - press `g` to toggle grid-view instead of the file listing
|
||||
* [thumbnails](#thumbnails) - press `g` or `田` to toggle grid-view instead of the file listing
|
||||
* [zip downloads](#zip-downloads) - download folders (or file selections) as `zip` or `tar` files
|
||||
* [uploading](#uploading) - drag files/folders into the web-browser to upload
|
||||
* [file-search](#file-search) - dropping files into the browser also lets you see if they exist on the server
|
||||
@@ -47,13 +55,19 @@ turn your phone or raspi into a portable file server with resumable uploads/down
|
||||
* [other tricks](#other-tricks)
|
||||
* [searching](#searching) - search by size, date, path/name, mp3-tags, ...
|
||||
* [server config](#server-config) - using arguments or config files, or a mix of both
|
||||
* [file indexing](#file-indexing)
|
||||
* [upload rules](#upload-rules) - set upload rules using volume flags
|
||||
* [ftp-server](#ftp-server) - an FTP server can be started using `--ftp 3921`
|
||||
* [file indexing](#file-indexing) - enables dedup and music search ++
|
||||
* [exclude-patterns](#exclude-patterns) - to save some time
|
||||
* [filesystem guards](#filesystem-guards) - avoid traversing into other filesystems
|
||||
* [periodic rescan](#periodic-rescan) - filesystem monitoring
|
||||
* [upload rules](#upload-rules) - set upload rules using volflags
|
||||
* [compress uploads](#compress-uploads) - files can be autocompressed on upload
|
||||
* [database location](#database-location) - in-volume (`.hist/up2k.db`, default) or somewhere else
|
||||
* [metadata from audio files](#metadata-from-audio-files) - set `-e2t` to index tags on upload
|
||||
* [file parser plugins](#file-parser-plugins) - provide custom parsers to index additional tags
|
||||
* [file parser plugins](#file-parser-plugins) - provide custom parsers to index additional tags, also see [./bin/mtag/README.md](./bin/mtag/README.md)
|
||||
* [upload events](#upload-events) - trigger a script/program on each upload
|
||||
* [hiding from google](#hiding-from-google) - tell search engines you dont wanna be indexed
|
||||
* [themes](#themes)
|
||||
* [complete examples](#complete-examples)
|
||||
* [browser support](#browser-support) - TLDR: yes
|
||||
* [client examples](#client-examples) - interact with copyparty using non-browser clients
|
||||
@@ -75,9 +89,10 @@ turn your phone or raspi into a portable file server with resumable uploads/down
|
||||
* [optional dependencies](#optional-dependencies) - install these to enable bonus features
|
||||
* [install recommended deps](#install-recommended-deps)
|
||||
* [optional gpl stuff](#optional-gpl-stuff)
|
||||
* [sfx](#sfx) - there are two self-contained "binaries"
|
||||
* [sfx](#sfx) - the self-contained "binary"
|
||||
* [sfx repack](#sfx-repack) - reduce the size of an sfx by removing features
|
||||
* [install on android](#install-on-android)
|
||||
* [reporting bugs](#reporting-bugs) - ideas for context to include in bug reports
|
||||
* [building](#building)
|
||||
* [dev env setup](#dev-env-setup)
|
||||
* [just the sfx](#just-the-sfx)
|
||||
@@ -90,7 +105,7 @@ turn your phone or raspi into a portable file server with resumable uploads/down
|
||||
|
||||
download **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py)** and you're all set!
|
||||
|
||||
running the sfx without arguments (for example doubleclicking it on Windows) will give everyone read/write access to the current folder; see `-h` for help if you want [accounts and volumes](#accounts-and-volumes) etc
|
||||
running the sfx without arguments (for example doubleclicking it on Windows) will give everyone read/write access to the current folder; you may want [accounts and volumes](#accounts-and-volumes)
|
||||
|
||||
some recommended options:
|
||||
* `-e2dsa` enables general [file indexing](#file-indexing)
|
||||
@@ -98,7 +113,7 @@ some recommended options:
|
||||
* `-v /mnt/music:/music:r:rw,foo -a foo:bar` shares `/mnt/music` as `/music`, `r`eadable by anyone, and read-write for user `foo`, password `bar`
|
||||
* replace `:r:rw,foo` with `:r,foo` to only make the folder readable by `foo` and nobody else
|
||||
* see [accounts and volumes](#accounts-and-volumes) for the syntax and other permissions (`r`ead, `w`rite, `m`ove, `d`elete, `g`et)
|
||||
* `--ls '**,*,ln,p,r'` to crash on startup if any of the volumes contain a symlink which point outside the volume, as that could give users unintended access
|
||||
* `--ls '**,*,ln,p,r'` to crash on startup if any of the volumes contain a symlink which point outside the volume, as that could give users unintended access (see `--help-ls`)
|
||||
|
||||
|
||||
### on servers
|
||||
@@ -146,6 +161,7 @@ feature summary
|
||||
* ☑ multiprocessing (actual multithreading)
|
||||
* ☑ volumes (mountpoints)
|
||||
* ☑ [accounts](#accounts-and-volumes)
|
||||
* ☑ [ftp-server](#ftp-server)
|
||||
* upload
|
||||
* ☑ basic: plain multipart, ie6 support
|
||||
* ☑ [up2k](#uploading): js, resumable, multithreaded
|
||||
@@ -155,18 +171,19 @@ feature summary
|
||||
* download
|
||||
* ☑ single files in browser
|
||||
* ☑ [folders as zip / tar files](#zip-downloads)
|
||||
* ☑ FUSE client (read-only)
|
||||
* ☑ [FUSE client](https://github.com/9001/copyparty/tree/hovudstraum/bin#copyparty-fusepy) (read-only)
|
||||
* browser
|
||||
* ☑ [navpane](#navpane) (directory tree sidebar)
|
||||
* ☑ file manager (cut/paste, delete, [batch-rename](#batch-rename))
|
||||
* ☑ audio player (with OS media controls and opus transcoding)
|
||||
* ☑ image gallery with webm player
|
||||
* ☑ textfile browser with syntax hilighting
|
||||
* ☑ [thumbnails](#thumbnails)
|
||||
* ☑ ...of images using Pillow
|
||||
* ☑ ...of images using Pillow, pyvips, or FFmpeg
|
||||
* ☑ ...of videos using FFmpeg
|
||||
* ☑ ...of audio (spectrograms) using FFmpeg
|
||||
* ☑ cache eviction (max-age; maybe max-size eventually)
|
||||
* ☑ SPA (browse while uploading)
|
||||
* if you use the navpane to navigate, not folders in the file list
|
||||
* server indexing
|
||||
* ☑ [locate files by contents](#file-search)
|
||||
* ☑ search by name/path/date/size
|
||||
@@ -190,6 +207,7 @@ project goals / philosophy
|
||||
* inverse linux philosophy -- do all the things, and do an *okay* job
|
||||
* quick drop-in service to get a lot of features in a pinch
|
||||
* there are probably [better alternatives](https://github.com/awesome-selfhosted/awesome-selfhosted) if you have specific/long-term needs
|
||||
* but the resumable multithreaded uploads are p slick ngl
|
||||
* run anywhere, support everything
|
||||
* as many web-browsers and python versions as possible
|
||||
* every browser should at least be able to browse, download, upload files
|
||||
@@ -228,11 +246,25 @@ some improvement ideas
|
||||
|
||||
## general bugs
|
||||
|
||||
* Windows: if the `up2k.db` (filesystem index) is on a samba-share or network disk, you'll get unpredictable behavior if the share is disconnected for a bit
|
||||
* use `--hist` or the `hist` volflag (`-v [...]:c,hist=/tmp/foo`) to place the db on a local disk instead
|
||||
* all volumes must exist / be available on startup; up2k (mtp especially) gets funky otherwise
|
||||
* [the database can get stuck](https://github.com/9001/copyparty/issues/10)
|
||||
* has only happened once but that is once too many
|
||||
* luckily not dangerous for file integrity and doesn't really stop uploads or anything like that
|
||||
* but would really appreciate some logs if anyone ever runs into it again
|
||||
* probably more, pls let me know
|
||||
|
||||
## not my bugs
|
||||
|
||||
* [Chrome issue 1317069](https://bugs.chromium.org/p/chromium/issues/detail?id=1317069) -- if you try to upload a folder which contains symlinks by dragging it into the browser, the symlinked files will not get uploaded
|
||||
|
||||
* [Chrome issue 1352210](https://bugs.chromium.org/p/chromium/issues/detail?id=1352210) -- plaintext http may be faster at filehashing than https (but also extremely CPU-intensive)
|
||||
|
||||
* iPhones: the volume control doesn't work because [apple doesn't want it to](https://developer.apple.com/library/archive/documentation/AudioVideo/Conceptual/Using_HTML5_Audio_Video/Device-SpecificConsiderations/Device-SpecificConsiderations.html#//apple_ref/doc/uid/TP40009523-CH5-SW11)
|
||||
* *future workaround:* enable the equalizer, make it all-zero, and set a negative boost to reduce the volume
|
||||
* "future" because `AudioContext` is broken in the current iOS version (15.1), maybe one day...
|
||||
|
||||
* Windows: folders cannot be accessed if the name ends with `.`
|
||||
* python or windows bug
|
||||
|
||||
@@ -249,14 +281,20 @@ some improvement ideas
|
||||
|
||||
* is it possible to block read-access to folders unless you know the exact URL for a particular file inside?
|
||||
* yes, using the [`g` permission](#accounts-and-volumes), see the examples there
|
||||
* you can also do this with linux filesystem permissions; `chmod 111 music` will make it possible to access files and folders inside the `music` folder but not list the immediate contents -- also works with other software, not just copyparty
|
||||
|
||||
* can I make copyparty download a file to my server if I give it a URL?
|
||||
* not officially, but there is a [terrible hack](https://github.com/9001/copyparty/blob/hovudstraum/bin/mtag/wget.py) which makes it possible
|
||||
* not really, but there is a [terrible hack](https://github.com/9001/copyparty/blob/hovudstraum/bin/mtag/wget.py) which makes it possible
|
||||
|
||||
|
||||
# accounts and volumes
|
||||
|
||||
per-folder, per-user permissions
|
||||
per-folder, per-user permissions - if your setup is getting complex, consider making a [config file](./docs/example.conf) instead of using arguments
|
||||
* much easier to manage, and you can modify the config at runtime with `systemctl reload copyparty` or more conveniently using the `[reload cfg]` button in the control-panel (if logged in as admin)
|
||||
|
||||
a quick summary can be seen using `--help-accounts`
|
||||
|
||||
configuring accounts/volumes with arguments:
|
||||
* `-a usr:pwd` adds account `usr` with password `pwd`
|
||||
* `-v .::r` adds current-folder `.` as the webroot, `r`eadable by anyone
|
||||
* the syntax is `-v src:dst:perm:perm:...` so local-path, url-path, and one or more permissions to set
|
||||
@@ -280,7 +318,7 @@ examples:
|
||||
* `u1` can open the `inc` folder, but cannot see the contents, only upload new files to it
|
||||
* `u2` can browse it and move files *from* `/inc` into any folder where `u2` has write-access
|
||||
* make folder `/mnt/ss` available at `/i`, read-write for u1, get-only for everyone else, and enable accesskeys: `-v /mnt/ss:i:rw,u1:g:c,fk=4`
|
||||
* `c,fk=4` sets the `fk` volume-flag to 4, meaning each file gets a 4-character accesskey
|
||||
* `c,fk=4` sets the `fk` volflag to 4, meaning each file gets a 4-character accesskey
|
||||
* `u1` can upload files, browse the folder, and see the generated accesskeys
|
||||
* other users cannot browse the folder, but can access the files if they have the full file URL with the accesskey
|
||||
|
||||
@@ -312,8 +350,9 @@ the browser has the following hotkeys (always qwerty)
|
||||
* `I/K` prev/next folder
|
||||
* `M` parent folder (or unexpand current)
|
||||
* `V` toggle folders / textfiles in the navpane
|
||||
* `G` toggle list / [grid view](#thumbnails)
|
||||
* `G` toggle list / [grid view](#thumbnails) -- same as `田` bottom-right
|
||||
* `T` toggle thumbnails / icons
|
||||
* `ESC` close various things
|
||||
* `ctrl-X` cut selected files/folders
|
||||
* `ctrl-V` paste
|
||||
* `F2` [rename](#batch-rename) selected file/folder
|
||||
@@ -332,19 +371,24 @@ the browser has the following hotkeys (always qwerty)
|
||||
* `U/O` skip 10sec back/forward
|
||||
* `0..9` jump to 0%..90%
|
||||
* `P` play/pause (also starts playing the folder)
|
||||
* `Y` download file
|
||||
* when viewing images / playing videos:
|
||||
* `J/L, Left/Right` prev/next file
|
||||
* `Home/End` first/last file
|
||||
* `F` toggle fullscreen
|
||||
* `S` toggle selection
|
||||
* `R` rotate clockwise (shift=ccw)
|
||||
* `Y` download file
|
||||
* `Esc` close viewer
|
||||
* videos:
|
||||
* `U/O` skip 10sec back/forward
|
||||
* `0..9` jump to 0%..90%
|
||||
* `P/K/Space` play/pause
|
||||
* `F` fullscreen
|
||||
* `C` continue playing next video
|
||||
* `V` loop
|
||||
* `M` mute
|
||||
* `C` continue playing next video
|
||||
* `V` loop entire file
|
||||
* `[` loop range (start)
|
||||
* `]` loop range (end)
|
||||
* when the navpane is open:
|
||||
* `A/D` adjust tree width
|
||||
* in the [grid view](#thumbnails):
|
||||
@@ -365,24 +409,31 @@ switching between breadcrumbs or navpane
|
||||
|
||||
click the `🌲` or pressing the `B` hotkey to toggle between breadcrumbs path (default), or a navpane (tree-browser sidebar thing)
|
||||
|
||||
* `[-]` and `[+]` (or hotkeys `A`/`D`) adjust the size
|
||||
* `[v]` jumps to the currently open folder
|
||||
* `[+]` and `[-]` (or hotkeys `A`/`D`) adjust the size
|
||||
* `[🎯]` jumps to the currently open folder
|
||||
* `[📃]` toggles between showing folders and textfiles
|
||||
* `[📌]` shows the name of all parent folders in a docked panel
|
||||
* `[a]` toggles automatic widening as you go deeper
|
||||
* `[↵]` toggles wordwrap
|
||||
* `[👀]` show full name on hover (if wordwrap is off)
|
||||
|
||||
|
||||
## thumbnails
|
||||
|
||||
press `g` to toggle grid-view instead of the file listing, and `t` toggles icons / thumbnails
|
||||
press `g` or `田` to toggle grid-view instead of the file listing and `t` toggles icons / thumbnails
|
||||
|
||||

|
||||
|
||||
it does static images with Pillow and uses FFmpeg for video files, so you may want to `--no-thumb` or maybe just `--no-vthumb` depending on how dangerous your users are
|
||||
it does static images with Pillow / pyvips / FFmpeg, and uses FFmpeg for video files, so you may want to `--no-thumb` or maybe just `--no-vthumb` depending on how dangerous your users are
|
||||
* pyvips is 3x faster than Pillow, Pillow is 3x faster than FFmpeg
|
||||
* disable thumbnails for specific volumes with volflag `dthumb` for all, or `dvthumb` / `dathumb` / `dithumb` for video/audio/images only
|
||||
|
||||
audio files are covnerted into spectrograms using FFmpeg unless you `--no-athumb` (and some FFmpeg builds may need `--th-ff-swr`)
|
||||
|
||||
images with the following names (see `--th-covers`) become the thumbnail of the folder they're in: `folder.png`, `folder.jpg`, `cover.png`, `cover.jpg`
|
||||
|
||||
in the grid/thumbnail view, if the audio player panel is open, songs will start playing when clicked
|
||||
* indicated by the audio files having the ▶ icon instead of 💾
|
||||
|
||||
|
||||
## zip downloads
|
||||
@@ -411,13 +462,13 @@ you can also zip a selection of files or folders by clicking them in the browser
|
||||
|
||||
## uploading
|
||||
|
||||
drag files/folders into the web-browser to upload
|
||||
drag files/folders into the web-browser to upload (or use the [command-line uploader](https://github.com/9001/copyparty/tree/hovudstraum/bin#up2kpy))
|
||||
|
||||
this initiates an upload using `up2k`; there are two uploaders available:
|
||||
* `[🎈] bup`, the basic uploader, supports almost every browser since netscape 4.0
|
||||
* `[🚀] up2k`, the fancy one
|
||||
* `[🚀] up2k`, the good / fancy one
|
||||
|
||||
you can also undo/delete uploads by using `[🧯]` [unpost](#unpost)
|
||||
NB: you can undo/delete your own uploads with `[🧯]` [unpost](#unpost)
|
||||
|
||||
up2k has several advantages:
|
||||
* you can drop folders into the browser (files are added recursively)
|
||||
@@ -429,19 +480,19 @@ up2k has several advantages:
|
||||
* much higher speeds than ftp/scp/tarpipe on some internet connections (mainly american ones) thanks to parallel connections
|
||||
* the last-modified timestamp of the file is preserved
|
||||
|
||||
see [up2k](#up2k) for details on how it works
|
||||
see [up2k](#up2k) for details on how it works, or watch a [demo video](https://a.ocv.me/pub/demo/pics-vids/#gf-0f6f5c0d)
|
||||
|
||||

|
||||
|
||||
**protip:** you can avoid scaring away users with [docs/minimal-up2k.html](docs/minimal-up2k.html) which makes it look [much simpler](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png)
|
||||
**protip:** you can avoid scaring away users with [contrib/plugins/minimal-up2k.html](contrib/plugins/minimal-up2k.html) which makes it look [much simpler](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png)
|
||||
|
||||
**protip:** if you enable `favicon` in the `[⚙️] settings` tab (by typing something into the textbox), the icon in the browser tab will indicate upload progress
|
||||
|
||||
the up2k UI is the epitome of polished inutitive experiences:
|
||||
* "parallel uploads" specifies how many chunks to upload at the same time
|
||||
* `[🏃]` analysis of other files should continue while one is uploading
|
||||
* `[🥔]` shows a simpler UI for faster uploads from slow devices
|
||||
* `[💭]` ask for confirmation before files are added to the queue
|
||||
* `[💤]` sync uploading between other copyparty browser-tabs so only one is active
|
||||
* `[🔎]` switch between upload and [file-search](#file-search) mode
|
||||
* ignore `[🔎]` if you add files by dragging them into the browser
|
||||
|
||||
@@ -453,7 +504,7 @@ and then theres the tabs below it,
|
||||
* plus up to 3 entries each from `[done]` and `[que]` for context
|
||||
* `[que]` is all the files that are still queued
|
||||
|
||||
note that since up2k has to read each file twice, `[🎈 bup]` can *theoretically* be up to 2x faster in some extreme cases (files bigger than your ram, combined with an internet connection faster than the read-speed of your HDD, or if you're uploading from a cuo2duo)
|
||||
note that since up2k has to read each file twice, `[🎈] bup` can *theoretically* be up to 2x faster in some extreme cases (files bigger than your ram, combined with an internet connection faster than the read-speed of your HDD, or if you're uploading from a cuo2duo)
|
||||
|
||||
if you are resuming a massive upload and want to skip hashing the files which already finished, you can enable `turbo` in the `[⚙️] config` tab, but please read the tooltip on that button
|
||||
|
||||
@@ -473,8 +524,6 @@ the files will be hashed on the client-side, and each hash is sent to the server
|
||||
files go into `[ok]` if they exist (and you get a link to where it is), otherwise they land in `[ng]`
|
||||
* the main reason filesearch is combined with the uploader is cause the code was too spaghetti to separate it out somewhere else, this is no longer the case but now i've warmed up to the idea too much
|
||||
|
||||
adding the same file multiple times is blocked, so if you first search for a file and then decide to upload it, you have to click the `[cleanup]` button to discard `[done]` files (or just refresh the page)
|
||||
|
||||
|
||||
### unpost
|
||||
|
||||
@@ -562,9 +611,11 @@ and there are *two* editors
|
||||
|
||||
* you can link a particular timestamp in an audio file by adding it to the URL, such as `&20` / `&20s` / `&1m20` / `&t=1:20` after the `.../#af-c8960dab`
|
||||
|
||||
* enabling the audio equalizer can help make gapless albums fully gapless in some browsers (chrome), so consider leaving it on with all the values at zero
|
||||
|
||||
* get a plaintext file listing by adding `?ls=t` to a URL, or a compact colored one with `?ls=v` (for unix terminals)
|
||||
|
||||
* if you are using media hotkeys to switch songs and are getting tired of seeing the OSD popup which Windows doesn't let you disable, consider https://ocv.me/dev/?media-osd-bgone.ps1
|
||||
* if you are using media hotkeys to switch songs and are getting tired of seeing the OSD popup which Windows doesn't let you disable, consider [./contrib/media-osd-bgone.ps1](contrib/#media-osd-bgoneps1)
|
||||
|
||||
* click the bottom-left `π` to open a javascript prompt for debugging
|
||||
|
||||
@@ -587,18 +638,36 @@ path/name queries are space-separated, AND'ed together, and words are negated wi
|
||||
* path: `shibayan -bossa` finds all files where one of the folders contain `shibayan` but filters out any results where `bossa` exists somewhere in the path
|
||||
* name: `demetori styx` gives you [good stuff](https://www.youtube.com/watch?v=zGh0g14ZJ8I&list=PL3A147BD151EE5218&index=9)
|
||||
|
||||
add the argument `-e2ts` to also scan/index tags from music files, which brings us over to:
|
||||
the `raw` field allows for more complex stuff such as `( tags like *nhato* or tags like *taishi* ) and ( not tags like *nhato* or not tags like *taishi* )` which finds all songs by either nhato or taishi, excluding collabs (terrible example, why would you do that)
|
||||
|
||||
for the above example to work, add the commandline argument `-e2ts` to also scan/index tags from music files, which brings us over to:
|
||||
|
||||
|
||||
# server config
|
||||
|
||||
using arguments or config files, or a mix of both:
|
||||
* config files (`-c some.conf`) can set additional commandline arguments; see [./docs/example.conf](docs/example.conf)
|
||||
* `kill -s USR1` (same as `systemctl reload copyparty`) to reload accounts and volumes from config files without restarting
|
||||
* or click the `[reload cfg]` button in the control-panel when logged in as admin
|
||||
|
||||
|
||||
## ftp-server
|
||||
|
||||
an FTP server can be started using `--ftp 3921`, and/or `--ftps` for explicit TLS (ftpes)
|
||||
|
||||
* based on [pyftpdlib](https://github.com/giampaolo/pyftpdlib)
|
||||
* needs a dedicated port (cannot share with the HTTP/HTTPS API)
|
||||
* uploads are not resumable -- delete and restart if necessary
|
||||
* runs in active mode by default, you probably want `--ftp-pr 12000-13000`
|
||||
* if you enable both `ftp` and `ftps`, the port-range will be divided in half
|
||||
* some older software (filezilla on debian-stable) cannot passive-mode with TLS
|
||||
|
||||
|
||||
## file indexing
|
||||
|
||||
file indexing relies on two database tables, the up2k filetree (`-e2d`) and the metadata tags (`-e2t`), stored in `.hist/up2k.db`. Configuration can be done through arguments, volume flags, or a mix of both.
|
||||
enables dedup and music search ++
|
||||
|
||||
file indexing relies on two database tables, the up2k filetree (`-e2d`) and the metadata tags (`-e2t`), stored in `.hist/up2k.db`. Configuration can be done through arguments, volflags, or a mix of both.
|
||||
|
||||
through arguments:
|
||||
* `-e2d` enables file indexing on upload
|
||||
@@ -607,18 +676,25 @@ through arguments:
|
||||
* `-e2t` enables metadata indexing on upload
|
||||
* `-e2ts` also scans for tags in all files that don't have tags yet
|
||||
* `-e2tsr` also deletes all existing tags, doing a full reindex
|
||||
* `-e2v` verfies file integrity at startup, comparing hashes from the db
|
||||
* `-e2vu` patches the database with the new hashes from the filesystem
|
||||
* `-e2vp` panics and kills copyparty instead
|
||||
|
||||
the same arguments can be set as volume flags, in addition to `d2d` and `d2t` for disabling:
|
||||
the same arguments can be set as volflags, in addition to `d2d`, `d2ds`, `d2t`, `d2ts`, `d2v` for disabling:
|
||||
* `-v ~/music::r:c,e2dsa,e2tsr` does a full reindex of everything on startup
|
||||
* `-v ~/music::r:c,d2d` disables **all** indexing, even if any `-e2*` are on
|
||||
* `-v ~/music::r:c,d2t` disables all `-e2t*` (tags), does not affect `-e2d*`
|
||||
* `-v ~/music::r:c,d2ds` disables on-boot scans; only index new uploads
|
||||
* `-v ~/music::r:c,d2ts` same except only affecting tags
|
||||
|
||||
note:
|
||||
* the parser can finally handle `c,e2dsa,e2tsr` so you no longer have to `c,e2dsa:c,e2tsr`
|
||||
* `e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and `e2ts` would then reindex those, unless there is a new copyparty version with new parsers and the release note says otherwise
|
||||
* the rescan button in the admin panel has no effect unless the volume has `-e2ds` or higher
|
||||
|
||||
to save some time, you can provide a regex pattern for filepaths to only index by filename/path/size/last-modified (and not the hash of the file contents) by setting `--no-hash \.iso$` or the volume-flag `:c,nohash=\.iso$`, this has the following consequences:
|
||||
### exclude-patterns
|
||||
|
||||
to save some time, you can provide a regex pattern for filepaths to only index by filename/path/size/last-modified (and not the hash of the file contents) by setting `--no-hash \.iso$` or the volflag `:c,nohash=\.iso$`, this has the following consequences:
|
||||
* initial indexing is way faster, especially when the volume is on a network disk
|
||||
* makes it impossible to [file-search](#file-search)
|
||||
* if someone uploads the same file contents, the upload will not be detected as a dupe, so it will not get symlinked or rejected
|
||||
@@ -627,12 +703,29 @@ similarly, you can fully ignore files/folders using `--no-idx [...]` and `:c,noi
|
||||
|
||||
if you set `--no-hash [...]` globally, you can enable hashing for specific volumes using flag `:c,nohash=`
|
||||
|
||||
### filesystem guards
|
||||
|
||||
avoid traversing into other filesystems using `--xdev` / volflag `:c,xdev`, skipping any symlinks or bind-mounts to another HDD for example
|
||||
|
||||
and/or you can `--xvol` / `:c,xvol` to ignore all symlinks leaving the volume's top directory, but still allow bind-mounts pointing elsewhere
|
||||
|
||||
**NB: only affects the indexer** -- users can still access anything inside a volume, unless shadowed by another volume
|
||||
|
||||
### periodic rescan
|
||||
|
||||
filesystem monitoring; if copyparty is not the only software doing stuff on your filesystem, you may want to enable periodic rescans to keep the index up to date
|
||||
|
||||
argument `--re-maxage 60` will rescan all volumes every 60 sec, same as volflag `:c,scan=60` to specify it per-volume
|
||||
|
||||
uploads are disabled while a rescan is happening, so rescans will be delayed by `--db-act` (default 10 sec) when there is write-activity going on (uploads, renames, ...)
|
||||
|
||||
|
||||
## upload rules
|
||||
|
||||
set upload rules using volume flags, some examples:
|
||||
set upload rules using volflags, some examples:
|
||||
|
||||
* `:c,sz=1k-3m` sets allowed filesize between 1 KiB and 3 MiB inclusive (suffixes: b, k, m, g)
|
||||
* `:c,sz=1k-3m` sets allowed filesize between 1 KiB and 3 MiB inclusive (suffixes: `b`, `k`, `m`, `g`)
|
||||
* `:c,df=4g` block uploads if there would be less than 4 GiB free disk space afterwards
|
||||
* `:c,nosub` disallow uploading into subdirectories; goes well with `rotn` and `rotf`:
|
||||
* `:c,rotn=1000,2` moves uploads into subfolders, up to 1000 files in each folder before making a new one, two levels deep (must be at least 1)
|
||||
* `:c,rotf=%Y/%m/%d/%H` enforces files to be uploaded into a structure of subfolders according to that date format
|
||||
@@ -651,21 +744,27 @@ you can also set transaction limits which apply per-IP and per-volume, but these
|
||||
|
||||
files can be autocompressed on upload, either on user-request (if config allows) or forced by server-config
|
||||
|
||||
* volume flag `gz` allows gz compression
|
||||
* volume flag `xz` allows lzma compression
|
||||
* volume flag `pk` **forces** compression on all files
|
||||
* volflag `gz` allows gz compression
|
||||
* volflag `xz` allows lzma compression
|
||||
* volflag `pk` **forces** compression on all files
|
||||
* url parameter `pk` requests compression with server-default algorithm
|
||||
* url parameter `gz` or `xz` requests compression with a specific algorithm
|
||||
* url parameter `xz` requests xz compression
|
||||
|
||||
things to note,
|
||||
* the `gz` and `xz` arguments take a single optional argument, the compression level (range 0 to 9)
|
||||
* the `pk` volume flag takes the optional argument `ALGORITHM,LEVEL` which will then be forced for all uploads, for example `gz,9` or `xz,0`
|
||||
* the `pk` volflag takes the optional argument `ALGORITHM,LEVEL` which will then be forced for all uploads, for example `gz,9` or `xz,0`
|
||||
* default compression is gzip level 9
|
||||
* all upload methods except up2k are supported
|
||||
* the files will be indexed after compression, so dupe-detection and file-search will not work as expected
|
||||
|
||||
some examples,
|
||||
* `-v inc:inc:w:c,pk=xz,0`
|
||||
folder named inc, shared at inc, write-only for everyone, forces xz compression at level 0
|
||||
* `-v inc:inc:w:c,pk`
|
||||
same write-only inc, but forces gz compression (default) instead of xz
|
||||
* `-v inc:inc:w:c,gz`
|
||||
allows (but does not force) gz compression if client uploads to `/inc?pk` or `/inc?gz` or `/inc?gz=4`
|
||||
|
||||
|
||||
## database location
|
||||
@@ -674,7 +773,7 @@ in-volume (`.hist/up2k.db`, default) or somewhere else
|
||||
|
||||
copyparty creates a subfolder named `.hist` inside each volume where it stores the database, thumbnails, and some other stuff
|
||||
|
||||
this can instead be kept in a single place using the `--hist` argument, or the `hist=` volume flag, or a mix of both:
|
||||
this can instead be kept in a single place using the `--hist` argument, or the `hist=` volflag, or a mix of both:
|
||||
* `--hist ~/.cache/copyparty -v ~/music::r:c,hist=-` sets `~/.cache/copyparty` as the default place to put volume info, but `~/music` gets the regular `.hist` subfolder (`-` restores default behavior)
|
||||
|
||||
note:
|
||||
@@ -710,29 +809,34 @@ see the beautiful mess of a dictionary in [mtag.py](https://github.com/9001/copy
|
||||
|
||||
## file parser plugins
|
||||
|
||||
provide custom parsers to index additional tags
|
||||
provide custom parsers to index additional tags, also see [./bin/mtag/README.md](./bin/mtag/README.md)
|
||||
|
||||
copyparty can invoke external programs to collect additional metadata for files using `mtp` (either as argument or volume flag), there is a default timeout of 30sec
|
||||
copyparty can invoke external programs to collect additional metadata for files using `mtp` (either as argument or volflag), there is a default timeout of 30sec, and only files which contain audio get analyzed by default (see ay/an/ad below)
|
||||
|
||||
* `-mtp .bpm=~/bin/audio-bpm.py` will execute `~/bin/audio-bpm.py` with the audio file as argument 1 to provide the `.bpm` tag, if that does not exist in the audio metadata
|
||||
* `-mtp key=f,t5,~/bin/audio-key.py` uses `~/bin/audio-key.py` to get the `key` tag, replacing any existing metadata tag (`f,`), aborting if it takes longer than 5sec (`t5,`)
|
||||
* `-v ~/music::r:c,mtp=.bpm=~/bin/audio-bpm.py:c,mtp=key=f,t5,~/bin/audio-key.py` both as a per-volume config wow this is getting ugly
|
||||
|
||||
*but wait, there's more!* `-mtp` can be used for non-audio files as well using the `a` flag: `ay` only do audio files, `an` only do non-audio files, or `ad` do all files (d as in dontcare)
|
||||
*but wait, there's more!* `-mtp` can be used for non-audio files as well using the `a` flag: `ay` only do audio files (default), `an` only do non-audio files, or `ad` do all files (d as in dontcare)
|
||||
|
||||
* "audio file" also means videos btw, as long as there is an audio stream
|
||||
* `-mtp ext=an,~/bin/file-ext.py` runs `~/bin/file-ext.py` to get the `ext` tag only if file is not audio (`an`)
|
||||
* `-mtp arch,built,ver,orig=an,eexe,edll,~/bin/exe.py` runs `~/bin/exe.py` to get properties about windows-binaries only if file is not audio (`an`) and file extension is exe or dll
|
||||
|
||||
you can control how the parser is killed if it times out with option `kt` killing the entire process tree (default), `km` just the main process, or `kn` let it continue running until copyparty is terminated
|
||||
|
||||
if something doesn't work, try `--mtag-v` for verbose error messages
|
||||
|
||||
|
||||
## upload events
|
||||
|
||||
trigger a script/program on each upload like so:
|
||||
|
||||
```
|
||||
-v /mnt/inc:inc:w:c,mte=+a1:c,mtp=a1=ad,/usr/bin/notify-send
|
||||
-v /mnt/inc:inc:w:c,mte=+x1:c,mtp=x1=ad,kn,/usr/bin/notify-send
|
||||
```
|
||||
|
||||
so filesystem location `/mnt/inc` shared at `/inc`, write-only for everyone, appending `a1` to the list of tags to index, and using `/usr/bin/notify-send` to "provide" that tag
|
||||
so filesystem location `/mnt/inc` shared at `/inc`, write-only for everyone, appending `x1` to the list of tags to index (`mte`), and using `/usr/bin/notify-send` to "provide" tag `x1` for any filetype (`ad`) with kill-on-timeout disabled (`kn`)
|
||||
|
||||
that'll run the command `notify-send` with the path to the uploaded file as the first and only argument (so on linux it'll show a notification on-screen)
|
||||
|
||||
@@ -743,10 +847,53 @@ and it will occupy the parsing threads, so fork anything expensive, or if you wa
|
||||
if this becomes popular maybe there should be a less janky way to do it actually
|
||||
|
||||
|
||||
## hiding from google
|
||||
|
||||
tell search engines you dont wanna be indexed, either using the good old [robots.txt](https://www.robotstxt.org/robotstxt.html) or through copyparty settings:
|
||||
|
||||
* `--no-robots` adds HTTP (`X-Robots-Tag`) and HTML (`<meta>`) headers with `noindex, nofollow` globally
|
||||
* volflag `[...]:c,norobots` does the same thing for that single volume
|
||||
* volflag `[...]:c,robots` ALLOWS search-engine crawling for that volume, even if `--no-robots` is set globally
|
||||
|
||||
also, `--force-js` disables the plain HTML folder listing, making things harder to parse for search engines
|
||||
|
||||
|
||||
## themes
|
||||
|
||||
you can change the default theme with `--theme 2`, and add your own themes by modifying `browser.css` or providing your own css to `--css-browser`, then telling copyparty they exist by increasing `--themes`
|
||||
|
||||
<table><tr><td width="33%" align="center"><a href="https://user-images.githubusercontent.com/241032/165864907-17e2ac7d-319d-4f25-8718-2f376f614b51.png"><img src="https://user-images.githubusercontent.com/241032/165867551-fceb35dd-38f0-42bb-bef3-25ba651ca69b.png"></a>
|
||||
0. classic dark</td><td width="33%" align="center"><a href="https://user-images.githubusercontent.com/241032/168644399-68938de5-da9b-445f-8d92-b51c74b5f345.png"><img src="https://user-images.githubusercontent.com/241032/168644404-8e1a2fdc-6e59-4c41-905e-ba5399ed686f.png"></a>
|
||||
2. flat pm-monokai</td><td width="33%" align="center"><a href="https://user-images.githubusercontent.com/241032/165864901-db13a429-a5da-496d-8bc6-ce838547f69d.png"><img src="https://user-images.githubusercontent.com/241032/165867560-aa834aef-58dc-4abe-baef-7e562b647945.png"></a>
|
||||
4. vice</td></tr><tr><td align="center"><a href="https://user-images.githubusercontent.com/241032/165864905-692682eb-6fb4-4d40-b6fe-27d2c7d3e2a7.png"><img src="https://user-images.githubusercontent.com/241032/165867555-080b73b6-6d85-41bb-a7c6-ad277c608365.png"></a>
|
||||
1. classic light</td><td align="center"><a href="https://user-images.githubusercontent.com/241032/168645276-fb02fd19-190a-407a-b8d3-d58fee277e02.png"><img src="https://user-images.githubusercontent.com/241032/168645280-f0662b3c-9764-4875-a2e2-d91cc8199b23.png"></a>
|
||||
3. flat light
|
||||
</td><td align="center"><a href="https://user-images.githubusercontent.com/241032/165864898-10ce7052-a117-4fcf-845b-b56c91687908.png"><img src="https://user-images.githubusercontent.com/241032/165867562-f3003d45-dd2a-4564-8aae-fed44c1ae064.png"></a>
|
||||
5. <a href="https://blog.codinghorror.com/a-tribute-to-the-windows-31-hot-dog-stand-color-scheme/">hotdog stand</a></td></tr></table>
|
||||
|
||||
the classname of the HTML tag is set according to the selected theme, which is used to set colors as css variables ++
|
||||
|
||||
* each theme *generally* has a dark theme (even numbers) and a light theme (odd numbers), showing in pairs
|
||||
* the first theme (theme 0 and 1) is `html.a`, second theme (2 and 3) is `html.b`
|
||||
* if a light theme is selected, `html.y` is set, otherwise `html.z` is
|
||||
* so if the dark edition of the 2nd theme is selected, you use any of `html.b`, `html.z`, `html.bz` to specify rules
|
||||
|
||||
see the top of [./copyparty/web/browser.css](./copyparty/web/browser.css) where the color variables are set, and there's layout-specific stuff near the bottom
|
||||
|
||||
|
||||
## complete examples
|
||||
|
||||
* read-only music server with bpm and key scanning
|
||||
`python copyparty-sfx.py -v /mnt/nas/music:/music:r -e2dsa -e2ts -mtp .bpm=f,audio-bpm.py -mtp key=f,audio-key.py`
|
||||
* read-only music server
|
||||
`python copyparty-sfx.py -v /mnt/nas/music:/music:r -e2dsa -e2ts --no-robots --force-js --theme 2`
|
||||
|
||||
* ...with bpm and key scanning
|
||||
`-mtp .bpm=f,audio-bpm.py -mtp key=f,audio-key.py`
|
||||
|
||||
* ...with a read-write folder for `kevin` whose password is `okgo`
|
||||
`-a kevin:okgo -v /mnt/nas/inc:/inc:rw,kevin`
|
||||
|
||||
* ...with logging to disk
|
||||
`-lo log/cpp-%Y-%m%d-%H%M%S.txt.xz`
|
||||
|
||||
|
||||
# browser support
|
||||
@@ -781,7 +928,7 @@ TLDR: yes
|
||||
* internet explorer 6 to 8 behave the same
|
||||
* firefox 52 and chrome 49 are the final winxp versions
|
||||
* `*1` yes, but extremely slow (ie10: `1 MiB/s`, ie11: `270 KiB/s`)
|
||||
* `*3` using a wasm decoder which consumes a bit more power
|
||||
* `*3` iOS 11 and newer, opus only, and requires FFmpeg on the server
|
||||
|
||||
quick summary of more eccentric web-browsers trying to view a directory index:
|
||||
|
||||
@@ -792,7 +939,8 @@ quick summary of more eccentric web-browsers trying to view a directory index:
|
||||
| **w3m** (0.5.3/macports) | can browse, login, upload at 100kB/s, mkdir/msg |
|
||||
| **netsurf** (3.10/arch) | is basically ie6 with much better css (javascript has almost no effect) |
|
||||
| **opera** (11.60/winxp) | OK: thumbnails, image-viewer, zip-selection, rename/cut/paste. NG: up2k, navpane, markdown, audio |
|
||||
| **ie4** and **netscape** 4.0 | can browse, upload with `?b=u` |
|
||||
| **ie4** and **netscape** 4.0 | can browse, upload with `?b=u`, auth with `&pw=wark` |
|
||||
| **ncsa mosaic** 2.7 | does not get a pass, [pic1](https://user-images.githubusercontent.com/241032/174189227-ae816026-cf6f-4be5-a26e-1b3b072c1b2f.png) - [pic2](https://user-images.githubusercontent.com/241032/174189225-5651c059-5152-46e9-ac26-7e98e497901b.png) |
|
||||
| **SerenityOS** (7e98457) | hits a page fault, works with `?b=u`, file upload not-impl |
|
||||
|
||||
|
||||
@@ -801,8 +949,8 @@ quick summary of more eccentric web-browsers trying to view a directory index:
|
||||
interact with copyparty using non-browser clients
|
||||
|
||||
* javascript: dump some state into a file (two separate examples)
|
||||
* `await fetch('https://127.0.0.1:3923/', {method:"PUT", body: JSON.stringify(foo)});`
|
||||
* `var xhr = new XMLHttpRequest(); xhr.open('POST', 'https://127.0.0.1:3923/msgs?raw'); xhr.send('foo');`
|
||||
* `await fetch('//127.0.0.1:3923/', {method:"PUT", body: JSON.stringify(foo)});`
|
||||
* `var xhr = new XMLHttpRequest(); xhr.open('POST', '//127.0.0.1:3923/msgs?raw'); xhr.send('foo');`
|
||||
|
||||
* curl/wget: upload some files (post=file, chunk=stdin)
|
||||
* `post(){ curl -b cppwd=wark -F act=bput -F f=@"$1" http://127.0.0.1:3923/;}`
|
||||
@@ -831,7 +979,7 @@ copyparty returns a truncated sha512sum of your PUT/POST as base64; you can gene
|
||||
b512(){ printf "$((sha512sum||shasum -a512)|sed -E 's/ .*//;s/(..)/\\x\1/g')"|base64|tr '+/' '-_'|head -c44;}
|
||||
b512 <movie.mkv
|
||||
|
||||
you can provide passwords using cookie 'cppwd=hunter2', as a url query `?pw=hunter2`, or with basic-authentication (either as the username or password)
|
||||
you can provide passwords using cookie `cppwd=hunter2`, as a url query `?pw=hunter2`, or with basic-authentication (either as the username or password)
|
||||
|
||||
|
||||
# up2k
|
||||
@@ -851,17 +999,29 @@ quick outline of the up2k protocol, see [uploading](#uploading) for the web-clie
|
||||
|
||||
up2k has saved a few uploads from becoming corrupted in-transfer already; caught an android phone on wifi redhanded in wireshark with a bitflip, however bup with https would *probably* have noticed as well (thanks to tls also functioning as an integrity check)
|
||||
|
||||
regarding the frequent server log message during uploads;
|
||||
`6.0M 106M/s 2.77G 102.9M/s n948 thank 4/0/3/1 10042/7198 00:01:09`
|
||||
* this chunk was `6 MiB`, uploaded at `106 MiB/s`
|
||||
* on this http connection, `2.77 GiB` transferred, `102.9 MiB/s` average, `948` chunks handled
|
||||
* client says `4` uploads OK, `0` failed, `3` busy, `1` queued, `10042 MiB` total size, `7198 MiB` and `00:01:09` left
|
||||
|
||||
|
||||
## why chunk-hashes
|
||||
|
||||
a single sha512 would be better, right?
|
||||
|
||||
this is due to `crypto.subtle` not providing a streaming api (or the option to seed the sha512 hasher with a starting hash)
|
||||
this is due to `crypto.subtle` [not yet](https://github.com/w3c/webcrypto/issues/73) providing a streaming api (or the option to seed the sha512 hasher with a starting hash)
|
||||
|
||||
as a result, the hashes are much less useful than they could have been (search the server by sha512, provide the sha512 in the response http headers, ...)
|
||||
|
||||
however it allows for hashing multiple chunks in parallel, greatly increasing upload speed from fast storage (NVMe, raid-0 and such)
|
||||
|
||||
* both the [browser uploader](#uploading) and the [commandline one](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py) does this now, allowing for fast uploading even from plaintext http
|
||||
|
||||
hashwasm would solve the streaming issue but reduces hashing speed for sha512 (xxh128 does 6 GiB/s), and it would make old browsers and [iphones](https://bugs.webkit.org/show_bug.cgi?id=228552) unsupported
|
||||
|
||||
* blake2 might be a better choice since xxh is non-cryptographic, but that gets ~15 MiB/s on slower androids
|
||||
|
||||
|
||||
# performance
|
||||
|
||||
@@ -891,6 +1051,7 @@ when uploading files,
|
||||
|
||||
* if you're cpu-bottlenecked, or the browser is maxing a cpu core:
|
||||
* up to 30% faster uploads if you hide the upload status list by switching away from the `[🚀]` up2k ui-tab (or closing it)
|
||||
* optionally you can switch to the lightweight potato ui by clicking the `[🥔]`
|
||||
* switching to another browser-tab also works, the favicon will update every 10 seconds in that case
|
||||
* unlikely to be a problem, but can happen when uploding many small files, or your internet is too fast, or PC too slow
|
||||
|
||||
@@ -899,16 +1060,28 @@ when uploading files,
|
||||
|
||||
some notes on hardening
|
||||
|
||||
on public copyparty instances with anonymous upload enabled:
|
||||
* option `-s` is a shortcut to set the following options:
|
||||
* `--no-thumb` disables thumbnails and audio transcoding to stop copyparty from running `FFmpeg`/`Pillow`/`VIPS` on uploaded files, which is a [good idea](https://www.cvedetails.com/vulnerability-list.php?vendor_id=3611) if anonymous upload is enabled
|
||||
* `--no-mtag-ff` uses `mutagen` to grab music tags instead of `FFmpeg`, which is safer and faster but less accurate
|
||||
* `--dotpart` hides uploads from directory listings while they're still incoming
|
||||
* `--no-robots` and `--force-js` makes life harder for crawlers, see [hiding from google](#hiding-from-google)
|
||||
|
||||
* users can upload html/css/js which will evaluate for other visitors in a few ways,
|
||||
* unless `--no-readme` is set: by uploading/modifying a file named `readme.md`
|
||||
* if `move` access is granted AND none of `--no-logues`, `--no-dot-mv`, `--no-dot-ren` is set: by uploading some .html file and renaming it to `.epilogue.html` (uploading it directly is blocked)
|
||||
* option `-ss` is a shortcut for the above plus:
|
||||
* `--no-logues` and `--no-readme` disables support for readme's and prologues / epilogues in directory listings, which otherwise lets people upload arbitrary `<script>` tags
|
||||
* `--unpost 0`, `--no-del`, `--no-mv` disables all move/delete support
|
||||
* `--hardlink` creates hardlinks instead of symlinks when deduplicating uploads, which is less maintenance
|
||||
* however note if you edit one file it will also affect the other copies
|
||||
* `--vague-403` returns a "404 not found" instead of "403 forbidden" which is a common enterprise meme
|
||||
* `--nih` removes the server hostname from directory listings
|
||||
|
||||
other misc:
|
||||
* option `-sss` is a shortcut for the above plus:
|
||||
* `-lo cpp-%Y-%m%d-%H%M%S.txt.xz` enables logging to disk
|
||||
* `-ls **,*,ln,p,r` does a scan on startup for any dangerous symlinks
|
||||
|
||||
other misc notes:
|
||||
|
||||
* you can disable directory listings by giving permission `g` instead of `r`, only accepting direct URLs to files
|
||||
* combine this with volume-flag `c,fk` to generate per-file accesskeys; users which have full read-access will then see URLs with `?k=...` appended to the end, and `g` users must provide that URL including the correct key to avoid a 404
|
||||
* combine this with volflag `c,fk` to generate per-file accesskeys; users which have full read-access will then see URLs with `?k=...` appended to the end, and `g` users must provide that URL including the correct key to avoid a 404
|
||||
|
||||
|
||||
## gotchas
|
||||
@@ -970,6 +1143,7 @@ authenticate using header `Cookie: cppwd=foo` or url param `&pw=foo`
|
||||
| GET | `?txt=iso-8859-1` | ...with specific charset |
|
||||
| GET | `?th` | get image/video at URL as thumbnail |
|
||||
| GET | `?th=opus` | convert audio file to 128kbps opus |
|
||||
| GET | `?th=caf` | ...in the iOS-proprietary container |
|
||||
|
||||
| method | body | result |
|
||||
|--|--|--|
|
||||
@@ -1025,15 +1199,22 @@ mandatory deps:
|
||||
|
||||
install these to enable bonus features
|
||||
|
||||
enable ftp-server:
|
||||
* for just plaintext FTP, `pyftpdlib` (is built into the SFX)
|
||||
* with TLS encryption, `pyftpdlib pyopenssl`
|
||||
|
||||
enable music tags:
|
||||
* either `mutagen` (fast, pure-python, skips a few tags, makes copyparty GPL? idk)
|
||||
* or `ffprobe` (20x slower, more accurate, possibly dangerous depending on your distro and users)
|
||||
|
||||
enable [thumbnails](#thumbnails) of...
|
||||
* **images:** `Pillow` (requires py2.7 or py3.5+)
|
||||
* **images:** `Pillow` and/or `pyvips` and/or `ffmpeg` (requires py2.7 or py3.5+)
|
||||
* **videos/audio:** `ffmpeg` and `ffprobe` somewhere in `$PATH`
|
||||
* **HEIF pictures:** `pyheif-pillow-opener` (requires Linux or a C compiler)
|
||||
* **AVIF pictures:** `pillow-avif-plugin`
|
||||
* **HEIF pictures:** `pyvips` or `ffmpeg` or `pyheif-pillow-opener` (requires Linux or a C compiler)
|
||||
* **AVIF pictures:** `pyvips` or `ffmpeg` or `pillow-avif-plugin`
|
||||
* **JPEG XL pictures:** `pyvips` or `ffmpeg`
|
||||
|
||||
`pyvips` gives higher quality thumbnails than `Pillow` and is 320% faster, using 270% more ram: `sudo apt install libvips42 && python3 -m pip install --user -U pyvips`
|
||||
|
||||
|
||||
## install recommended deps
|
||||
@@ -1051,13 +1232,7 @@ these are standalone programs and will never be imported / evaluated by copypart
|
||||
|
||||
# sfx
|
||||
|
||||
there are two self-contained "binaries":
|
||||
* [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) -- pure python, works everywhere, **recommended**
|
||||
* [copyparty-sfx.sh](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.sh) -- smaller, but only for linux and macos, kinda deprecated
|
||||
|
||||
launch either of them (**use sfx.py on systemd**) and it'll unpack and run copyparty, assuming you have python installed of course
|
||||
|
||||
pls note that `copyparty-sfx.sh` will fail if you rename `copyparty-sfx.py` to `copyparty.py` and keep it in the same folder because `sys.path` is funky
|
||||
the self-contained "binary" [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) will unpack itself and run copyparty, assuming you have python installed of course
|
||||
|
||||
|
||||
## sfx repack
|
||||
@@ -1065,46 +1240,66 @@ pls note that `copyparty-sfx.sh` will fail if you rename `copyparty-sfx.py` to `
|
||||
reduce the size of an sfx by removing features
|
||||
|
||||
if you don't need all the features, you can repack the sfx and save a bunch of space; all you need is an sfx and a copy of this repo (nothing else to download or build, except if you're on windows then you need msys2 or WSL)
|
||||
* `525k` size of original sfx.py as of v0.11.30
|
||||
* `315k` after `./scripts/make-sfx.sh re no-ogv`
|
||||
* `223k` after `./scripts/make-sfx.sh re no-ogv no-cm`
|
||||
* `393k` size of original sfx.py as of v1.1.3
|
||||
* `310k` after `./scripts/make-sfx.sh re no-cm`
|
||||
* `269k` after `./scripts/make-sfx.sh re no-cm no-hl`
|
||||
|
||||
the features you can opt to drop are
|
||||
* `ogv`.js, the opus/vorbis decoder which is needed by apple devices to play foss audio files, saves ~192k
|
||||
* `cm`/easymde, the "fancy" markdown editor, saves ~92k
|
||||
* `cm`/easymde, the "fancy" markdown editor, saves ~82k
|
||||
* `hl`, prism, the syntax hilighter, saves ~41k
|
||||
* `fnt`, source-code-pro, the monospace font, saves ~9k
|
||||
* `dd`, the custom mouse cursor for the media player tray tab, saves ~2k
|
||||
|
||||
for the `re`pack to work, first run one of the sfx'es once to unpack it
|
||||
|
||||
**note:** you can also just download and run [scripts/copyparty-repack.sh](scripts/copyparty-repack.sh) -- this will grab the latest copyparty release from github and do a `no-ogv no-cm` repack; works on linux/macos (and windows with msys2 or WSL)
|
||||
**note:** you can also just download and run [scripts/copyparty-repack.sh](scripts/copyparty-repack.sh) -- this will grab the latest copyparty release from github and do a few repacks; works on linux/macos (and windows with msys2 or WSL)
|
||||
|
||||
|
||||
# install on android
|
||||
|
||||
install [Termux](https://termux.com/) (see [ocv.me/termux](https://ocv.me/termux/)) and then copy-paste this into Termux (long-tap) all at once:
|
||||
```sh
|
||||
apt update && apt -y full-upgrade && termux-setup-storage && apt -y install python && python -m ensurepip && python -m pip install -U copyparty
|
||||
apt update && apt -y full-upgrade && apt update && termux-setup-storage && apt -y install python && python -m ensurepip && python -m pip install --user -U copyparty
|
||||
echo $?
|
||||
```
|
||||
|
||||
after the initial setup, you can launch copyparty at any time by running `copyparty` anywhere in Termux
|
||||
|
||||
if you want thumbnails, `apt -y install ffmpeg`
|
||||
|
||||
* or if you want to use vips instead, `apt -y install libvips && python -m pip install --user -U wheel && python -m pip install --user -U pyvips && (cd /data/data/com.termux/files/usr/lib/; ln -s libgobject-2.0.so{,.0}; ln -s libvips.so{,.42})`
|
||||
|
||||
|
||||
# reporting bugs
|
||||
|
||||
ideas for context to include in bug reports
|
||||
|
||||
in general, commandline arguments (and config file if any)
|
||||
|
||||
if something broke during an upload (replacing FILENAME with a part of the filename that broke):
|
||||
```
|
||||
journalctl -aS '48 hour ago' -u copyparty | grep -C10 FILENAME | tee bug.log
|
||||
```
|
||||
|
||||
if there's a wall of base64 in the log (thread stacks) then please include that, especially if you run into something freezing up or getting stuck, for example `OperationalError('database is locked')` -- alternatively you can visit `/?stack` to see the stacks live, so http://127.0.0.1:3923/?stack for example
|
||||
|
||||
|
||||
# building
|
||||
|
||||
## dev env setup
|
||||
|
||||
mostly optional; if you need a working env for vscode or similar
|
||||
you need python 3.9 or newer due to type hints
|
||||
|
||||
the rest is mostly optional; if you need a working env for vscode or similar
|
||||
|
||||
```sh
|
||||
python3 -m venv .venv
|
||||
. .venv/bin/activate
|
||||
pip install jinja2 # mandatory
|
||||
pip install jinja2 strip_hints # MANDATORY
|
||||
pip install mutagen # audio metadata
|
||||
pip install pyftpdlib # ftp server
|
||||
pip install Pillow pyheif-pillow-opener pillow-avif-plugin # thumbnails
|
||||
pip install black bandit pylint flake8 # vscode tooling
|
||||
pip install black==21.12b0 click==8.0.2 bandit pylint flake8 isort mypy # vscode tooling
|
||||
```
|
||||
|
||||
|
||||
@@ -1123,8 +1318,8 @@ mv /tmp/pe-copyparty/copyparty/web/deps/ copyparty/web/deps/
|
||||
then build the sfx using any of the following examples:
|
||||
|
||||
```sh
|
||||
./scripts/make-sfx.sh # both python and sh editions
|
||||
./scripts/make-sfx.sh no-sh gz # just python with gzip
|
||||
./scripts/make-sfx.sh # regular edition
|
||||
./scripts/make-sfx.sh gz no-cm # gzip-compressed + no fancy markdown editor
|
||||
```
|
||||
|
||||
|
||||
@@ -1135,10 +1330,7 @@ also builds the sfx so skip the sfx section above
|
||||
in the `scripts` folder:
|
||||
|
||||
* run `make -C deps-docker` to build all dependencies
|
||||
* `git tag v1.2.3 && git push origin --tags`
|
||||
* upload to pypi with `make-pypi-release.(sh|bat)`
|
||||
* create github release with `make-tgz-release.sh`
|
||||
* create sfx with `make-sfx.sh`
|
||||
* run `./rls.sh 1.2.3` which uploads to pypi + creates github release + sfx
|
||||
|
||||
|
||||
# todo
|
||||
@@ -1165,7 +1357,7 @@ roughly sorted by priority
|
||||
* up2k partials ui
|
||||
* feels like there isn't much point
|
||||
* cache sha512 chunks on client
|
||||
* too dangerous
|
||||
* too dangerous -- overtaken by turbo mode
|
||||
* comment field
|
||||
* nah
|
||||
* look into android thumbnail cache file format
|
||||
|
||||
@@ -2,9 +2,14 @@
|
||||
* command-line up2k client [(webm)](https://ocv.me/stuff/u2cli.webm)
|
||||
* file uploads, file-search, autoresume of aborted/broken uploads
|
||||
* faster than browsers
|
||||
* early beta, if something breaks just restart it
|
||||
* if something breaks just restart it
|
||||
|
||||
|
||||
# [`partyjournal.py`](partyjournal.py)
|
||||
produces a chronological list of all uploads by collecting info from up2k databases and the filesystem
|
||||
* outputs a standalone html file
|
||||
* optional mapping from IP-addresses to nicknames
|
||||
|
||||
|
||||
# [`copyparty-fuse.py`](copyparty-fuse.py)
|
||||
* mount a copyparty server as a local filesystem (read-only)
|
||||
|
||||
@@ -42,6 +42,7 @@ import threading
|
||||
import traceback
|
||||
import http.client # py2: httplib
|
||||
import urllib.parse
|
||||
import calendar
|
||||
from datetime import datetime
|
||||
from urllib.parse import quote_from_bytes as quote
|
||||
from urllib.parse import unquote_to_bytes as unquote
|
||||
@@ -495,7 +496,7 @@ class Gateway(object):
|
||||
ts = 60 * 60 * 24 * 2
|
||||
try:
|
||||
sz = int(fsize)
|
||||
ts = datetime.strptime(fdate, "%Y-%m-%d %H:%M:%S").timestamp()
|
||||
ts = calendar.timegm(time.strptime(fdate, "%Y-%m-%d %H:%M:%S"))
|
||||
except:
|
||||
info("bad HTML or OS [{}] [{}]".format(fdate, fsize))
|
||||
# python cannot strptime(1959-01-01) on windows
|
||||
|
||||
@@ -45,6 +45,7 @@ import threading
|
||||
import traceback
|
||||
import http.client # py2: httplib
|
||||
import urllib.parse
|
||||
import calendar
|
||||
from datetime import datetime
|
||||
from urllib.parse import quote_from_bytes as quote
|
||||
from urllib.parse import unquote_to_bytes as unquote
|
||||
@@ -443,7 +444,7 @@ class Gateway(object):
|
||||
ts = 60 * 60 * 24 * 2
|
||||
try:
|
||||
sz = int(fsize)
|
||||
ts = datetime.strptime(fdate, "%Y-%m-%d %H:%M:%S").timestamp()
|
||||
ts = calendar.timegm(time.strptime(fdate, "%Y-%m-%d %H:%M:%S"))
|
||||
except:
|
||||
info("bad HTML or OS [{}] [{}]".format(fdate, fsize))
|
||||
# python cannot strptime(1959-01-01) on windows
|
||||
|
||||
@@ -11,14 +11,18 @@ import re
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import json
|
||||
import stat
|
||||
import errno
|
||||
import struct
|
||||
import codecs
|
||||
import platform
|
||||
import threading
|
||||
import http.client # py2: httplib
|
||||
import urllib.parse
|
||||
from datetime import datetime
|
||||
from urllib.parse import quote_from_bytes as quote
|
||||
from urllib.parse import unquote_to_bytes as unquote
|
||||
|
||||
try:
|
||||
import fuse
|
||||
@@ -38,7 +42,7 @@ except:
|
||||
mount a copyparty server (local or remote) as a filesystem
|
||||
|
||||
usage:
|
||||
python ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,url=http://192.168.1.69:3923 /mnt/nas
|
||||
python ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,pw=wark,url=http://192.168.1.69:3923 /mnt/nas
|
||||
|
||||
dependencies:
|
||||
sudo apk add fuse-dev python3-dev
|
||||
@@ -50,6 +54,10 @@ fork of copyparty-fuse.py based on fuse-python which
|
||||
"""
|
||||
|
||||
|
||||
WINDOWS = sys.platform == "win32"
|
||||
MACOS = platform.system() == "Darwin"
|
||||
|
||||
|
||||
def threadless_log(msg):
|
||||
print(msg + "\n", end="")
|
||||
|
||||
@@ -93,6 +101,41 @@ def html_dec(txt):
|
||||
)
|
||||
|
||||
|
||||
def register_wtf8():
|
||||
def wtf8_enc(text):
|
||||
return str(text).encode("utf-8", "surrogateescape"), len(text)
|
||||
|
||||
def wtf8_dec(binary):
|
||||
return bytes(binary).decode("utf-8", "surrogateescape"), len(binary)
|
||||
|
||||
def wtf8_search(encoding_name):
|
||||
return codecs.CodecInfo(wtf8_enc, wtf8_dec, name="wtf-8")
|
||||
|
||||
codecs.register(wtf8_search)
|
||||
|
||||
|
||||
bad_good = {}
|
||||
good_bad = {}
|
||||
|
||||
|
||||
def enwin(txt):
|
||||
return "".join([bad_good.get(x, x) for x in txt])
|
||||
|
||||
for bad, good in bad_good.items():
|
||||
txt = txt.replace(bad, good)
|
||||
|
||||
return txt
|
||||
|
||||
|
||||
def dewin(txt):
|
||||
return "".join([good_bad.get(x, x) for x in txt])
|
||||
|
||||
for bad, good in bad_good.items():
|
||||
txt = txt.replace(good, bad)
|
||||
|
||||
return txt
|
||||
|
||||
|
||||
class CacheNode(object):
|
||||
def __init__(self, tag, data):
|
||||
self.tag = tag
|
||||
@@ -115,8 +158,9 @@ class Stat(fuse.Stat):
|
||||
|
||||
|
||||
class Gateway(object):
|
||||
def __init__(self, base_url):
|
||||
def __init__(self, base_url, pw):
|
||||
self.base_url = base_url
|
||||
self.pw = pw
|
||||
|
||||
ui = urllib.parse.urlparse(base_url)
|
||||
self.web_root = ui.path.strip("/")
|
||||
@@ -135,8 +179,7 @@ class Gateway(object):
|
||||
self.conns = {}
|
||||
|
||||
def quotep(self, path):
|
||||
# TODO: mojibake support
|
||||
path = path.encode("utf-8", "ignore")
|
||||
path = path.encode("wtf-8")
|
||||
return quote(path, safe="/")
|
||||
|
||||
def getconn(self, tid=None):
|
||||
@@ -159,20 +202,29 @@ class Gateway(object):
|
||||
except:
|
||||
pass
|
||||
|
||||
def sendreq(self, *args, **kwargs):
|
||||
def sendreq(self, *args, **ka):
|
||||
tid = get_tid()
|
||||
if self.pw:
|
||||
ck = "cppwd=" + self.pw
|
||||
try:
|
||||
ka["headers"]["Cookie"] = ck
|
||||
except:
|
||||
ka["headers"] = {"Cookie": ck}
|
||||
try:
|
||||
c = self.getconn(tid)
|
||||
c.request(*list(args), **kwargs)
|
||||
c.request(*list(args), **ka)
|
||||
return c.getresponse()
|
||||
except:
|
||||
self.closeconn(tid)
|
||||
c = self.getconn(tid)
|
||||
c.request(*list(args), **kwargs)
|
||||
c.request(*list(args), **ka)
|
||||
return c.getresponse()
|
||||
|
||||
def listdir(self, path):
|
||||
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?dots"
|
||||
if bad_good:
|
||||
path = dewin(path)
|
||||
|
||||
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?dots&ls"
|
||||
r = self.sendreq("GET", web_path)
|
||||
if r.status != 200:
|
||||
self.closeconn()
|
||||
@@ -182,9 +234,12 @@ class Gateway(object):
|
||||
)
|
||||
)
|
||||
|
||||
return self.parse_html(r)
|
||||
return self.parse_jls(r)
|
||||
|
||||
def download_file_range(self, path, ofs1, ofs2):
|
||||
if bad_good:
|
||||
path = dewin(path)
|
||||
|
||||
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?raw"
|
||||
hdr_range = "bytes={}-{}".format(ofs1, ofs2 - 1)
|
||||
log("downloading {}".format(hdr_range))
|
||||
@@ -200,40 +255,27 @@ class Gateway(object):
|
||||
|
||||
return r.read()
|
||||
|
||||
def parse_html(self, datasrc):
|
||||
ret = []
|
||||
remainder = b""
|
||||
ptn = re.compile(
|
||||
r"^<tr><td>(-|DIR)</td><td><a [^>]+>([^<]+)</a></td><td>([^<]+)</td><td>([^<]+)</td></tr>$"
|
||||
)
|
||||
|
||||
def parse_jls(self, datasrc):
|
||||
rsp = b""
|
||||
while True:
|
||||
buf = remainder + datasrc.read(4096)
|
||||
# print('[{}]'.format(buf.decode('utf-8')))
|
||||
buf = datasrc.read(1024 * 32)
|
||||
if not buf:
|
||||
break
|
||||
|
||||
remainder = b""
|
||||
endpos = buf.rfind(b"\n")
|
||||
if endpos >= 0:
|
||||
remainder = buf[endpos + 1 :]
|
||||
buf = buf[:endpos]
|
||||
rsp += buf
|
||||
|
||||
lines = buf.decode("utf-8").split("\n")
|
||||
for line in lines:
|
||||
m = ptn.match(line)
|
||||
if not m:
|
||||
# print(line)
|
||||
continue
|
||||
rsp = json.loads(rsp.decode("utf-8"))
|
||||
ret = []
|
||||
for statfun, nodes in [
|
||||
[self.stat_dir, rsp["dirs"]],
|
||||
[self.stat_file, rsp["files"]],
|
||||
]:
|
||||
for n in nodes:
|
||||
fname = unquote(n["href"].split("?")[0]).rstrip(b"/").decode("wtf-8")
|
||||
if bad_good:
|
||||
fname = enwin(fname)
|
||||
|
||||
ftype, fname, fsize, fdate = m.groups()
|
||||
fname = html_dec(fname)
|
||||
ts = datetime.strptime(fdate, "%Y-%m-%d %H:%M:%S").timestamp()
|
||||
sz = int(fsize)
|
||||
if ftype == "-":
|
||||
ret.append([fname, self.stat_file(ts, sz), 0])
|
||||
else:
|
||||
ret.append([fname, self.stat_dir(ts, sz), 0])
|
||||
ret.append([fname, statfun(n["ts"], n["sz"]), 0])
|
||||
|
||||
return ret
|
||||
|
||||
@@ -262,6 +304,7 @@ class CPPF(Fuse):
|
||||
Fuse.__init__(self, *args, **kwargs)
|
||||
|
||||
self.url = None
|
||||
self.pw = None
|
||||
|
||||
self.dircache = []
|
||||
self.dircache_mtx = threading.Lock()
|
||||
@@ -271,7 +314,7 @@ class CPPF(Fuse):
|
||||
|
||||
def init2(self):
|
||||
# TODO figure out how python-fuse wanted this to go
|
||||
self.gw = Gateway(self.url) # .decode('utf-8'))
|
||||
self.gw = Gateway(self.url, self.pw) # .decode('utf-8'))
|
||||
info("up")
|
||||
|
||||
def clean_dircache(self):
|
||||
@@ -536,6 +579,8 @@ class CPPF(Fuse):
|
||||
|
||||
def getattr(self, path):
|
||||
log("getattr [{}]".format(path))
|
||||
if WINDOWS:
|
||||
path = enwin(path) # windows occasionally decodes f0xx to xx
|
||||
|
||||
path = path.strip("/")
|
||||
try:
|
||||
@@ -568,9 +613,25 @@ class CPPF(Fuse):
|
||||
|
||||
def main():
|
||||
time.strptime("19970815", "%Y%m%d") # python#7980
|
||||
register_wtf8()
|
||||
if WINDOWS:
|
||||
os.system("rem")
|
||||
|
||||
for ch in '<>:"\\|?*':
|
||||
# microsoft maps illegal characters to f0xx
|
||||
# (e000 to f8ff is basic-plane private-use)
|
||||
bad_good[ch] = chr(ord(ch) + 0xF000)
|
||||
|
||||
for n in range(0, 0x100):
|
||||
# map surrogateescape to another private-use area
|
||||
bad_good[chr(n + 0xDC00)] = chr(n + 0xF100)
|
||||
|
||||
for k, v in bad_good.items():
|
||||
good_bad[v] = k
|
||||
|
||||
server = CPPF()
|
||||
server.parser.add_option(mountopt="url", metavar="BASE_URL", default=None)
|
||||
server.parser.add_option(mountopt="pw", metavar="PASSWORD", default=None)
|
||||
server.parse(values=server, errex=1)
|
||||
if not server.url or not str(server.url).startswith("http"):
|
||||
print("\nerror:")
|
||||
@@ -578,7 +639,7 @@ def main():
|
||||
print(" need argument: mount-path")
|
||||
print("example:")
|
||||
print(
|
||||
" ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,url=http://192.168.1.69:3923 /mnt/nas"
|
||||
" ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,pw=wark,url=http://192.168.1.69:3923 /mnt/nas"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
111
bin/dbtool.py
111
bin/dbtool.py
@@ -8,7 +8,10 @@ import sqlite3
|
||||
import argparse
|
||||
|
||||
DB_VER1 = 3
|
||||
DB_VER2 = 4
|
||||
DB_VER2 = 5
|
||||
|
||||
BY_PATH = None
|
||||
NC = None
|
||||
|
||||
|
||||
def die(msg):
|
||||
@@ -57,8 +60,13 @@ def compare(n1, d1, n2, d2, verbose):
|
||||
if rd.split("/", 1)[0] == ".hist":
|
||||
continue
|
||||
|
||||
q = "select w from up where rd = ? and fn = ?"
|
||||
hit = d2.execute(q, (rd, fn)).fetchone()
|
||||
if BY_PATH:
|
||||
q = "select w from up where rd = ? and fn = ?"
|
||||
hit = d2.execute(q, (rd, fn)).fetchone()
|
||||
else:
|
||||
q = "select w from up where substr(w,1,16) = ? and +w = ?"
|
||||
hit = d2.execute(q, (w1[:16], w1)).fetchone()
|
||||
|
||||
if not hit:
|
||||
miss += 1
|
||||
if verbose:
|
||||
@@ -70,27 +78,32 @@ def compare(n1, d1, n2, d2, verbose):
|
||||
n = 0
|
||||
miss = {}
|
||||
nmiss = 0
|
||||
for w1, k, v in d1.execute("select * from mt"):
|
||||
for w1s, k, v in d1.execute("select * from mt"):
|
||||
|
||||
n += 1
|
||||
if n % 100_000 == 0:
|
||||
m = f"\033[36mchecked {n:,} of {nt:,} tags in {n1} against {n2}, so far {nmiss} missing tags\033[0m"
|
||||
print(m)
|
||||
|
||||
q = "select rd, fn from up where substr(w,1,16) = ?"
|
||||
rd, fn = d1.execute(q, (w1,)).fetchone()
|
||||
q = "select w, rd, fn from up where substr(w,1,16) = ?"
|
||||
w1, rd, fn = d1.execute(q, (w1s,)).fetchone()
|
||||
if rd.split("/", 1)[0] == ".hist":
|
||||
continue
|
||||
|
||||
q = "select substr(w,1,16) from up where rd = ? and fn = ?"
|
||||
w2 = d2.execute(q, (rd, fn)).fetchone()
|
||||
if BY_PATH:
|
||||
q = "select w from up where rd = ? and fn = ?"
|
||||
w2 = d2.execute(q, (rd, fn)).fetchone()
|
||||
else:
|
||||
q = "select w from up where substr(w,1,16) = ? and +w = ?"
|
||||
w2 = d2.execute(q, (w1s, w1)).fetchone()
|
||||
|
||||
if w2:
|
||||
w2 = w2[0]
|
||||
|
||||
v2 = None
|
||||
if w2:
|
||||
v2 = d2.execute(
|
||||
"select v from mt where w = ? and +k = ?", (w2, k)
|
||||
"select v from mt where w = ? and +k = ?", (w2[:16], k)
|
||||
).fetchone()
|
||||
if v2:
|
||||
v2 = v2[0]
|
||||
@@ -124,7 +137,7 @@ def compare(n1, d1, n2, d2, verbose):
|
||||
|
||||
for k, v in sorted(miss.items()):
|
||||
if v:
|
||||
print(f"{n1} has {v:6} more {k:<6} tags than {n2}")
|
||||
print(f"{n1} has {v:7} more {k:<7} tags than {n2}")
|
||||
|
||||
print(f"in total, {nmiss} missing tags in {n2}\n")
|
||||
|
||||
@@ -132,47 +145,75 @@ def compare(n1, d1, n2, d2, verbose):
|
||||
def copy_mtp(d1, d2, tag, rm):
|
||||
nt = next(d1.execute("select count(w) from mt where k = ?", (tag,)))[0]
|
||||
n = 0
|
||||
ndone = 0
|
||||
for w1, k, v in d1.execute("select * from mt where k = ?", (tag,)):
|
||||
ncopy = 0
|
||||
nskip = 0
|
||||
for w1s, k, v in d1.execute("select * from mt where k = ?", (tag,)):
|
||||
n += 1
|
||||
if n % 25_000 == 0:
|
||||
m = f"\033[36m{n:,} of {nt:,} tags checked, so far {ndone} copied\033[0m"
|
||||
m = f"\033[36m{n:,} of {nt:,} tags checked, so far {ncopy} copied, {nskip} skipped\033[0m"
|
||||
print(m)
|
||||
|
||||
q = "select rd, fn from up where substr(w,1,16) = ?"
|
||||
rd, fn = d1.execute(q, (w1,)).fetchone()
|
||||
q = "select w, rd, fn from up where substr(w,1,16) = ?"
|
||||
w1, rd, fn = d1.execute(q, (w1s,)).fetchone()
|
||||
if rd.split("/", 1)[0] == ".hist":
|
||||
continue
|
||||
|
||||
q = "select substr(w,1,16) from up where rd = ? and fn = ?"
|
||||
w2 = d2.execute(q, (rd, fn)).fetchone()
|
||||
if BY_PATH:
|
||||
q = "select w from up where rd = ? and fn = ?"
|
||||
w2 = d2.execute(q, (rd, fn)).fetchone()
|
||||
else:
|
||||
q = "select w from up where substr(w,1,16) = ? and +w = ?"
|
||||
w2 = d2.execute(q, (w1s, w1)).fetchone()
|
||||
|
||||
if not w2:
|
||||
continue
|
||||
|
||||
w2 = w2[0]
|
||||
hit = d2.execute("select v from mt where w = ? and +k = ?", (w2, k)).fetchone()
|
||||
w2s = w2[0][:16]
|
||||
hit = d2.execute("select v from mt where w = ? and +k = ?", (w2s, k)).fetchone()
|
||||
if hit:
|
||||
hit = hit[0]
|
||||
|
||||
if hit != v:
|
||||
ndone += 1
|
||||
if hit is not None:
|
||||
d2.execute("delete from mt where w = ? and +k = ?", (w2, k))
|
||||
if NC and hit is not None:
|
||||
nskip += 1
|
||||
continue
|
||||
|
||||
d2.execute("insert into mt values (?,?,?)", (w2, k, v))
|
||||
ncopy += 1
|
||||
if hit is not None:
|
||||
d2.execute("delete from mt where w = ? and +k = ?", (w2s, k))
|
||||
|
||||
d2.execute("insert into mt values (?,?,?)", (w2s, k, v))
|
||||
if rm:
|
||||
d2.execute("delete from mt where w = ? and +k = 't:mtp'", (w2,))
|
||||
d2.execute("delete from mt where w = ? and +k = 't:mtp'", (w2s,))
|
||||
|
||||
d2.commit()
|
||||
print(f"copied {ndone} {tag} tags over")
|
||||
print(f"copied {ncopy} {tag} tags over, skipped {nskip}")
|
||||
|
||||
|
||||
def examples():
|
||||
print(
|
||||
"""
|
||||
# clearing the journal
|
||||
./dbtool.py up2k.db
|
||||
|
||||
# copy tags ".bpm" and "key" from old.db to up2k.db, and remove the mtp flag from matching files (so copyparty won't run any mtps on it)
|
||||
./dbtool.py -ls up2k.db
|
||||
./dbtool.py -src old.db up2k.db -cmp
|
||||
./dbtool.py -src old.v3 up2k.db -rm-mtp-flag -copy key
|
||||
./dbtool.py -src old.v3 up2k.db -rm-mtp-flag -copy .bpm -vac
|
||||
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
global NC, BY_PATH
|
||||
os.system("")
|
||||
print()
|
||||
|
||||
ap = argparse.ArgumentParser()
|
||||
ap.add_argument("db", help="database to work on")
|
||||
ap.add_argument("-h2", action="store_true", help="show examples")
|
||||
ap.add_argument("-src", metavar="DB", type=str, help="database to copy from")
|
||||
|
||||
ap2 = ap.add_argument_group("informational / read-only stuff")
|
||||
@@ -185,11 +226,29 @@ def main():
|
||||
ap2.add_argument(
|
||||
"-rm-mtp-flag",
|
||||
action="store_true",
|
||||
help="when an mtp tag is copied over, also mark that as done, so copyparty won't run mtp on it",
|
||||
help="when an mtp tag is copied over, also mark that file as done, so copyparty won't run any mtps on those files",
|
||||
)
|
||||
ap2.add_argument("-vac", action="store_true", help="optimize DB")
|
||||
|
||||
ap2 = ap.add_argument_group("behavior modifiers")
|
||||
ap2.add_argument(
|
||||
"-nc",
|
||||
action="store_true",
|
||||
help="no-clobber; don't replace/overwrite existing tags",
|
||||
)
|
||||
ap2.add_argument(
|
||||
"-by-path",
|
||||
action="store_true",
|
||||
help="match files based on location rather than warks (content-hash), use this if the databases have different wark salts",
|
||||
)
|
||||
|
||||
ar = ap.parse_args()
|
||||
if ar.h2:
|
||||
examples()
|
||||
return
|
||||
|
||||
NC = ar.nc
|
||||
BY_PATH = ar.by_path
|
||||
|
||||
for v in [ar.db, ar.src]:
|
||||
if v and not os.path.exists(v):
|
||||
|
||||
@@ -6,9 +6,13 @@ some of these rely on libraries which are not MIT-compatible
|
||||
|
||||
* [audio-bpm.py](./audio-bpm.py) detects the BPM of music using the BeatRoot Vamp Plugin; imports GPL2
|
||||
* [audio-key.py](./audio-key.py) detects the melodic key of music using the Mixxx fork of keyfinder; imports GPL3
|
||||
* [media-hash.py](./media-hash.py) generates checksums for audio and video streams; uses FFmpeg (LGPL or GPL)
|
||||
|
||||
these do not have any problematic dependencies:
|
||||
these invoke standalone programs which are GPL or similar, so is legally fine for most purposes:
|
||||
|
||||
* [media-hash.py](./media-hash.py) generates checksums for audio and video streams; uses FFmpeg (LGPL or GPL)
|
||||
* [image-noexif.py](./image-noexif.py) removes exif tags from images; uses exiftool (GPLv1 or artistic-license)
|
||||
|
||||
these do not have any problematic dependencies at all:
|
||||
|
||||
* [cksum.py](./cksum.py) computes various checksums
|
||||
* [exe.py](./exe.py) grabs metadata from .exe and .dll files (example for retrieving multiple tags with one parser)
|
||||
@@ -38,7 +42,7 @@ run [`install-deps.sh`](install-deps.sh) to build/install most dependencies requ
|
||||
* `mtp` modules will not run if a file has existing tags in the db, so clear out the tags with `-e2tsr` the first time you launch with new `mtp` options
|
||||
|
||||
|
||||
## usage with volume-flags
|
||||
## usage with volflags
|
||||
|
||||
instead of affecting all volumes, you can set the options for just one volume like so:
|
||||
|
||||
|
||||
@@ -19,18 +19,18 @@ dep: ffmpeg
|
||||
def det(tf):
|
||||
# fmt: off
|
||||
sp.check_call([
|
||||
"ffmpeg",
|
||||
"-nostdin",
|
||||
"-hide_banner",
|
||||
"-v", "fatal",
|
||||
"-ss", "13",
|
||||
"-y", "-i", fsenc(sys.argv[1]),
|
||||
"-map", "0:a:0",
|
||||
"-ac", "1",
|
||||
"-ar", "22050",
|
||||
"-t", "300",
|
||||
"-f", "f32le",
|
||||
tf
|
||||
b"ffmpeg",
|
||||
b"-nostdin",
|
||||
b"-hide_banner",
|
||||
b"-v", b"fatal",
|
||||
b"-ss", b"13",
|
||||
b"-y", b"-i", fsenc(sys.argv[1]),
|
||||
b"-map", b"0:a:0",
|
||||
b"-ac", b"1",
|
||||
b"-ar", b"22050",
|
||||
b"-t", b"300",
|
||||
b"-f", b"f32le",
|
||||
fsenc(tf)
|
||||
])
|
||||
# fmt: on
|
||||
|
||||
|
||||
@@ -23,15 +23,15 @@ dep: ffmpeg
|
||||
def det(tf):
|
||||
# fmt: off
|
||||
sp.check_call([
|
||||
"ffmpeg",
|
||||
"-nostdin",
|
||||
"-hide_banner",
|
||||
"-v", "fatal",
|
||||
"-y", "-i", fsenc(sys.argv[1]),
|
||||
"-map", "0:a:0",
|
||||
"-t", "300",
|
||||
"-sample_fmt", "s16",
|
||||
tf
|
||||
b"ffmpeg",
|
||||
b"-nostdin",
|
||||
b"-hide_banner",
|
||||
b"-v", b"fatal",
|
||||
b"-y", b"-i", fsenc(sys.argv[1]),
|
||||
b"-map", b"0:a:0",
|
||||
b"-t", b"300",
|
||||
b"-sample_fmt", b"s16",
|
||||
fsenc(tf)
|
||||
])
|
||||
# fmt: on
|
||||
|
||||
|
||||
@@ -17,7 +17,7 @@ except:
|
||||
|
||||
"""
|
||||
calculates various checksums for uploads,
|
||||
usage: -mtp crc32,md5,sha1,sha256b=bin/mtag/cksum.py
|
||||
usage: -mtp crc32,md5,sha1,sha256b=ad,bin/mtag/cksum.py
|
||||
"""
|
||||
|
||||
|
||||
|
||||
95
bin/mtag/image-noexif.py
Normal file
95
bin/mtag/image-noexif.py
Normal file
@@ -0,0 +1,95 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
remove exif tags from uploaded images
|
||||
|
||||
dependencies:
|
||||
exiftool
|
||||
|
||||
about:
|
||||
creates a "noexif" subfolder and puts exif-stripped copies of each image there,
|
||||
the reason for the subfolder is to avoid issues with the up2k.db / deduplication:
|
||||
|
||||
if the original image is modified in-place, then copyparty will keep the original
|
||||
hash in up2k.db for a while (until the next volume rescan), so if the image is
|
||||
reuploaded after a rescan then the upload will be renamed and kept as a dupe
|
||||
|
||||
alternatively you could switch the logic around, making a copy of the original
|
||||
image into a subfolder named "exif" and modify the original in-place, but then
|
||||
up2k.db will be out of sync until the next rescan, so any additional uploads
|
||||
of the same image will get symlinked (deduplicated) to the modified copy
|
||||
instead of the original in "exif"
|
||||
|
||||
or maybe delete the original image after processing, that would kinda work too
|
||||
|
||||
example copyparty config to use this:
|
||||
-v/mnt/nas/pics:pics:rwmd,ed:c,e2ts,mte=+noexif:c,mtp=noexif=ejpg,ejpeg,ad,bin/mtag/image-noexif.py
|
||||
|
||||
explained:
|
||||
for realpath /mnt/nas/pics (served at /pics) with read-write-modify-delete for ed,
|
||||
enable file analysis on upload (e2ts),
|
||||
append "noexif" to the list of known tags (mtp),
|
||||
and use mtp plugin "bin/mtag/image-noexif.py" to provide that tag,
|
||||
do this on all uploads with the file extension "jpg" or "jpeg",
|
||||
ad = parse file regardless if FFmpeg thinks it is audio or not
|
||||
|
||||
PS: this requires e2ts to be functional,
|
||||
meaning you need to do at least one of these:
|
||||
* apt install ffmpeg
|
||||
* pip3 install mutagen
|
||||
and your python must have sqlite3 support compiled in
|
||||
"""
|
||||
|
||||
|
||||
import os
|
||||
import sys
|
||||
import filecmp
|
||||
import subprocess as sp
|
||||
|
||||
try:
|
||||
from copyparty.util import fsenc
|
||||
except:
|
||||
|
||||
def fsenc(p):
|
||||
return p.encode("utf-8")
|
||||
|
||||
|
||||
def main():
|
||||
cwd, fn = os.path.split(sys.argv[1])
|
||||
if os.path.basename(cwd) == "noexif":
|
||||
return
|
||||
|
||||
os.chdir(cwd)
|
||||
f1 = fsenc(fn)
|
||||
f2 = os.path.join(b"noexif", f1)
|
||||
cmd = [
|
||||
b"exiftool",
|
||||
b"-exif:all=",
|
||||
b"-iptc:all=",
|
||||
b"-xmp:all=",
|
||||
b"-P",
|
||||
b"-o",
|
||||
b"noexif/",
|
||||
b"--",
|
||||
f1,
|
||||
]
|
||||
sp.check_output(cmd)
|
||||
if not os.path.exists(f2):
|
||||
print("failed")
|
||||
return
|
||||
|
||||
if filecmp.cmp(f1, f2, shallow=False):
|
||||
print("clean")
|
||||
else:
|
||||
print("exif")
|
||||
|
||||
# lastmod = os.path.getmtime(f1)
|
||||
# times = (int(time.time()), int(lastmod))
|
||||
# os.utime(f2, times)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
except:
|
||||
pass
|
||||
@@ -4,8 +4,8 @@ set -e
|
||||
|
||||
# install dependencies for audio-*.py
|
||||
#
|
||||
# linux/alpine: requires {python3,ffmpeg,fftw}-dev py3-{wheel,pip} py3-numpy{,-dev} vamp-sdk-dev patchelf cmake
|
||||
# linux/debian: requires libav{codec,device,filter,format,resample,util}-dev {libfftw3,python3}-dev python3-{numpy,pip} vamp-{plugin-sdk,examples} patchelf cmake
|
||||
# linux/alpine: requires gcc g++ make cmake patchelf {python3,ffmpeg,fftw,libsndfile}-dev py3-{wheel,pip} py3-numpy{,-dev}
|
||||
# linux/debian: requires libav{codec,device,filter,format,resample,util}-dev {libfftw3,python3,libsndfile1}-dev python3-{numpy,pip} vamp-{plugin-sdk,examples} patchelf cmake
|
||||
# win64: requires msys2-mingw64 environment
|
||||
# macos: requires macports
|
||||
#
|
||||
@@ -101,8 +101,11 @@ export -f dl_files
|
||||
|
||||
|
||||
github_tarball() {
|
||||
rm -rf g
|
||||
mkdir g
|
||||
cd g
|
||||
dl_text "$1" |
|
||||
tee json |
|
||||
tee ../json |
|
||||
(
|
||||
# prefer jq if available
|
||||
jq -r '.tarball_url' ||
|
||||
@@ -111,8 +114,11 @@ github_tarball() {
|
||||
awk -F\" '/"tarball_url": "/ {print$4}'
|
||||
) |
|
||||
tee /dev/stderr |
|
||||
head -n 1 |
|
||||
tr -d '\r' | tr '\n' '\0' |
|
||||
xargs -0 bash -c 'dl_files "$@"' _
|
||||
mv * ../tgz
|
||||
cd ..
|
||||
}
|
||||
|
||||
|
||||
@@ -127,6 +133,7 @@ gitlab_tarball() {
|
||||
tr \" '\n' | grep -E '\.tar\.gz$' | head -n 1
|
||||
) |
|
||||
tee /dev/stderr |
|
||||
head -n 1 |
|
||||
tr -d '\r' | tr '\n' '\0' |
|
||||
tee links |
|
||||
xargs -0 bash -c 'dl_files "$@"' _
|
||||
@@ -138,10 +145,17 @@ install_keyfinder() {
|
||||
# use msys2 in mingw-w64 mode
|
||||
# pacman -S --needed mingw-w64-x86_64-{ffmpeg,python}
|
||||
|
||||
github_tarball https://api.github.com/repos/mixxxdj/libkeyfinder/releases/latest
|
||||
[ -e $HOME/pe/keyfinder ] && {
|
||||
echo found a keyfinder build in ~/pe, skipping
|
||||
return
|
||||
}
|
||||
|
||||
tar -xf mixxxdj-libkeyfinder-*
|
||||
rm -- *.tar.gz
|
||||
cd "$td"
|
||||
github_tarball https://api.github.com/repos/mixxxdj/libkeyfinder/releases/latest
|
||||
ls -al
|
||||
|
||||
tar -xf tgz
|
||||
rm tgz
|
||||
cd mixxxdj-libkeyfinder*
|
||||
|
||||
h="$HOME"
|
||||
@@ -208,6 +222,22 @@ install_vamp() {
|
||||
|
||||
$pybin -m pip install --user vamp
|
||||
|
||||
cd "$td"
|
||||
echo '#include <vamp-sdk/Plugin.h>' | gcc -x c -c -o /dev/null - || [ -e ~/pe/vamp-sdk ] || {
|
||||
printf '\033[33mcould not find the vamp-sdk, building from source\033[0m\n'
|
||||
(dl_files yolo https://code.soundsoftware.ac.uk/attachments/download/2588/vamp-plugin-sdk-2.9.0.tar.gz)
|
||||
sha512sum -c <(
|
||||
echo "7ef7f837d19a08048b059e0da408373a7964ced452b290fae40b85d6d70ca9000bcfb3302cd0b4dc76cf2a848528456f78c1ce1ee0c402228d812bd347b6983b -"
|
||||
) <vamp-plugin-sdk-2.9.0.tar.gz
|
||||
tar -xf vamp-plugin-sdk-2.9.0.tar.gz
|
||||
rm -- *.tar.gz
|
||||
ls -al
|
||||
cd vamp-plugin-sdk-*
|
||||
./configure --prefix=$HOME/pe/vamp-sdk
|
||||
make -j1 install
|
||||
}
|
||||
|
||||
cd "$td"
|
||||
have_beatroot || {
|
||||
printf '\033[33mcould not find the vamp beatroot plugin, building from source\033[0m\n'
|
||||
(dl_files yolo https://code.soundsoftware.ac.uk/attachments/download/885/beatroot-vamp-v1.0.tar.gz)
|
||||
@@ -215,8 +245,11 @@ install_vamp() {
|
||||
echo "1f444d1d58ccf565c0adfe99f1a1aa62789e19f5071e46857e2adfbc9d453037bc1c4dcb039b02c16240e9b97f444aaff3afb625c86aa2470233e711f55b6874 -"
|
||||
) <beatroot-vamp-v1.0.tar.gz
|
||||
tar -xf beatroot-vamp-v1.0.tar.gz
|
||||
rm -- *.tar.gz
|
||||
cd beatroot-vamp-v1.0
|
||||
make -f Makefile.linux -j4
|
||||
[ -e ~/pe/vamp-sdk ] &&
|
||||
sed -ri 's`^(CFLAGS :=.*)`\1 -I'$HOME'/pe/vamp-sdk/include`' Makefile.linux
|
||||
make -f Makefile.linux -j4 LDFLAGS=-L$HOME/pe/vamp-sdk/lib
|
||||
# /home/ed/vamp /home/ed/.vamp /usr/local/lib/vamp
|
||||
mkdir ~/vamp
|
||||
cp -pv beatroot-vamp.* ~/vamp/
|
||||
@@ -230,6 +263,7 @@ install_vamp() {
|
||||
|
||||
# not in use because it kinda segfaults, also no windows support
|
||||
install_soundtouch() {
|
||||
cd "$td"
|
||||
gitlab_tarball https://gitlab.com/api/v4/projects/soundtouch%2Fsoundtouch/releases
|
||||
|
||||
tar -xvf soundtouch-*
|
||||
|
||||
@@ -13,7 +13,7 @@ try:
|
||||
except:
|
||||
|
||||
def fsenc(p):
|
||||
return p
|
||||
return p.encode("utf-8")
|
||||
|
||||
|
||||
"""
|
||||
@@ -24,13 +24,13 @@ dep: ffmpeg
|
||||
def det():
|
||||
# fmt: off
|
||||
cmd = [
|
||||
"ffmpeg",
|
||||
"-nostdin",
|
||||
"-hide_banner",
|
||||
"-v", "fatal",
|
||||
"-i", fsenc(sys.argv[1]),
|
||||
"-f", "framemd5",
|
||||
"-"
|
||||
b"ffmpeg",
|
||||
b"-nostdin",
|
||||
b"-hide_banner",
|
||||
b"-v", b"fatal",
|
||||
b"-i", fsenc(sys.argv[1]),
|
||||
b"-f", b"framemd5",
|
||||
b"-"
|
||||
]
|
||||
# fmt: on
|
||||
|
||||
|
||||
38
bin/mtag/mousepad.py
Normal file
38
bin/mtag/mousepad.py
Normal file
@@ -0,0 +1,38 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import sys
|
||||
import subprocess as sp
|
||||
|
||||
|
||||
"""
|
||||
mtp test -- opens a texteditor
|
||||
|
||||
usage:
|
||||
-vsrv/v1:v1:r:c,mte=+x1:c,mtp=x1=ad,p,bin/mtag/mousepad.py
|
||||
|
||||
explained:
|
||||
c,mte: list of tags to index in this volume
|
||||
c,mtp: add new tag provider
|
||||
x1: dummy tag to provide
|
||||
ad: dontcare if audio or not
|
||||
p: priority 1 (run after initial tag-scan with ffprobe or mutagen)
|
||||
"""
|
||||
|
||||
|
||||
def main():
|
||||
env = os.environ.copy()
|
||||
env["DISPLAY"] = ":0.0"
|
||||
|
||||
if False:
|
||||
# open the uploaded file
|
||||
fp = sys.argv[-1]
|
||||
else:
|
||||
# display stdin contents (`oth_tags`)
|
||||
fp = "/dev/stdin"
|
||||
|
||||
p = sp.Popen(["/usr/bin/mousepad", fp])
|
||||
p.communicate()
|
||||
|
||||
|
||||
main()
|
||||
76
bin/mtag/rclone-upload.py
Normal file
76
bin/mtag/rclone-upload.py
Normal file
@@ -0,0 +1,76 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import json
|
||||
import os
|
||||
import subprocess as sp
|
||||
import sys
|
||||
import time
|
||||
|
||||
try:
|
||||
from copyparty.util import fsenc
|
||||
except:
|
||||
|
||||
def fsenc(p):
|
||||
return p.encode("utf-8")
|
||||
|
||||
|
||||
_ = r"""
|
||||
first checks the tag "vidchk" which must be "ok" to continue,
|
||||
then uploads all files to some cloud storage (RCLONE_REMOTE)
|
||||
and DELETES THE ORIGINAL FILES if rclone returns 0 ("success")
|
||||
|
||||
deps:
|
||||
rclone
|
||||
|
||||
usage:
|
||||
-mtp x2=t43200,ay,p2,bin/mtag/rclone-upload.py
|
||||
|
||||
explained:
|
||||
t43200: timeout 12h
|
||||
ay: only process files which contain audio (including video with audio)
|
||||
p2: set priority 2 (after vidchk's suggested priority of 1),
|
||||
so the output of vidchk will be passed in here
|
||||
|
||||
complete usage example as vflags along with vidchk:
|
||||
-vsrv/vidchk:vidchk:r:rw,ed:c,e2dsa,e2ts,mtp=vidchk=t600,p,bin/mtag/vidchk.py:c,mtp=rupload=t43200,ay,p2,bin/mtag/rclone-upload.py:c,mte=+vidchk,rupload
|
||||
|
||||
setup: see https://rclone.org/drive/
|
||||
|
||||
if you wanna use this script standalone / separately from copyparty,
|
||||
either set CONDITIONAL_UPLOAD False or provide the following stdin:
|
||||
{"vidchk":"ok"}
|
||||
"""
|
||||
|
||||
|
||||
RCLONE_REMOTE = "notmybox"
|
||||
CONDITIONAL_UPLOAD = True
|
||||
|
||||
|
||||
def main():
|
||||
fp = sys.argv[1]
|
||||
if CONDITIONAL_UPLOAD:
|
||||
zb = sys.stdin.buffer.read()
|
||||
zs = zb.decode("utf-8", "replace")
|
||||
md = json.loads(zs)
|
||||
|
||||
chk = md.get("vidchk", None)
|
||||
if chk != "ok":
|
||||
print(f"vidchk={chk}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
dst = f"{RCLONE_REMOTE}:".encode("utf-8")
|
||||
cmd = [b"rclone", b"copy", b"--", fsenc(fp), dst]
|
||||
|
||||
t0 = time.time()
|
||||
try:
|
||||
sp.check_call(cmd)
|
||||
except:
|
||||
print("rclone failed", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
print(f"{time.time() - t0:.1f} sec")
|
||||
os.unlink(fsenc(fp))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
21
bin/mtag/res/twitter-unmute.user.js
Normal file
21
bin/mtag/res/twitter-unmute.user.js
Normal file
@@ -0,0 +1,21 @@
|
||||
// ==UserScript==
|
||||
// @name twitter-unmute
|
||||
// @namespace http://ocv.me/
|
||||
// @version 0.1
|
||||
// @description memes
|
||||
// @author ed <irc.rizon.net>
|
||||
// @match https://twitter.com/*
|
||||
// @icon https://www.google.com/s2/favicons?domain=twitter.com
|
||||
// @grant GM_addStyle
|
||||
// ==/UserScript==
|
||||
|
||||
function grunnur() {
|
||||
setInterval(function () {
|
||||
//document.querySelector('div[aria-label="Unmute"]').click();
|
||||
document.querySelector('video').muted = false;
|
||||
}, 200);
|
||||
}
|
||||
|
||||
var scr = document.createElement('script');
|
||||
scr.textContent = '(' + grunnur.toString() + ')();';
|
||||
(document.head || document.getElementsByTagName('head')[0]).appendChild(scr);
|
||||
139
bin/mtag/very-bad-idea.py
Executable file
139
bin/mtag/very-bad-idea.py
Executable file
@@ -0,0 +1,139 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
use copyparty as a chromecast replacement:
|
||||
* post a URL and it will open in the default browser
|
||||
* upload a file and it will open in the default application
|
||||
* the `key` command simulates keyboard input
|
||||
* the `x` command executes other xdotool commands
|
||||
* the `c` command executes arbitrary unix commands
|
||||
|
||||
the android app makes it a breeze to post pics and links:
|
||||
https://github.com/9001/party-up/releases
|
||||
(iOS devices have to rely on the web-UI)
|
||||
|
||||
goes without saying, but this is HELLA DANGEROUS,
|
||||
GIVES RCE TO ANYONE WHO HAVE UPLOAD PERMISSIONS
|
||||
|
||||
example copyparty config to use this:
|
||||
--urlform save,get -v.::w:c,e2d,e2t,mte=+a1:c,mtp=a1=ad,kn,bin/mtag/very-bad-idea.py
|
||||
|
||||
recommended deps:
|
||||
apt install xdotool libnotify-bin
|
||||
https://github.com/9001/copyparty/blob/hovudstraum/contrib/plugins/meadup.js
|
||||
|
||||
and you probably want `twitter-unmute.user.js` from the res folder
|
||||
|
||||
|
||||
-----------------------------------------------------------------------
|
||||
-- startup script:
|
||||
-----------------------------------------------------------------------
|
||||
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# create qr code
|
||||
ip=$(ip r | awk '/^default/{print$(NF-2)}'); echo http://$ip:3923/ | qrencode -o - -s 4 >/dev/shm/cpp-qr.png
|
||||
/usr/bin/feh -x /dev/shm/cpp-qr.png &
|
||||
|
||||
# reposition and make topmost (with janky raspbian support)
|
||||
( sleep 0.5
|
||||
xdotool search --name cpp-qr.png windowactivate --sync windowmove 1780 0
|
||||
wmctrl -r :ACTIVE: -b toggle,above || true
|
||||
|
||||
ps aux | grep -E 'sleep[ ]7\.27' ||
|
||||
while true; do
|
||||
w=$(xdotool getactivewindow)
|
||||
xdotool search --name cpp-qr.png windowactivate windowraise windowfocus
|
||||
xdotool windowactivate $w
|
||||
xdotool windowfocus $w
|
||||
sleep 7.27 || break
|
||||
done &
|
||||
xeyes # distraction window to prevent ^w from closing the qr-code
|
||||
) &
|
||||
|
||||
# bail if copyparty is already running
|
||||
ps aux | grep -E '[3] copy[p]arty' && exit 0
|
||||
|
||||
# dumb chrome wrapper to allow autoplay
|
||||
cat >/usr/local/bin/chromium-browser <<'EOF'
|
||||
#!/bin/bash
|
||||
set -e
|
||||
/usr/bin/chromium-browser --autoplay-policy=no-user-gesture-required "$@"
|
||||
EOF
|
||||
chmod 755 /usr/local/bin/chromium-browser
|
||||
|
||||
# start the server (note: replace `-v.::rw:` with `-v.::w:` to disallow retrieving uploaded stuff)
|
||||
cd ~/Downloads; python3 copyparty-sfx.py --urlform save,get -v.::rw:c,e2d,e2t,mte=+a1:c,mtp=a1=ad,kn,very-bad-idea.py
|
||||
|
||||
"""
|
||||
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import subprocess as sp
|
||||
from urllib.parse import unquote_to_bytes as unquote
|
||||
|
||||
|
||||
def main():
|
||||
fp = os.path.abspath(sys.argv[1])
|
||||
with open(fp, "rb") as f:
|
||||
txt = f.read(4096)
|
||||
|
||||
if txt.startswith(b"msg="):
|
||||
open_post(txt)
|
||||
else:
|
||||
open_url(fp)
|
||||
|
||||
|
||||
def open_post(txt):
|
||||
txt = unquote(txt.replace(b"+", b" ")).decode("utf-8")[4:]
|
||||
try:
|
||||
k, v = txt.split(" ", 1)
|
||||
except:
|
||||
open_url(txt)
|
||||
|
||||
if k == "key":
|
||||
sp.call(["xdotool", "key"] + v.split(" "))
|
||||
elif k == "x":
|
||||
sp.call(["xdotool"] + v.split(" "))
|
||||
elif k == "c":
|
||||
env = os.environ.copy()
|
||||
while " " in v:
|
||||
v1, v2 = v.split(" ", 1)
|
||||
if "=" not in v1:
|
||||
break
|
||||
|
||||
ek, ev = v1.split("=", 1)
|
||||
env[ek] = ev
|
||||
v = v2
|
||||
|
||||
sp.call(v.split(" "), env=env)
|
||||
else:
|
||||
open_url(txt)
|
||||
|
||||
|
||||
def open_url(txt):
|
||||
ext = txt.rsplit(".")[-1].lower()
|
||||
sp.call(["notify-send", "--", txt])
|
||||
if ext not in ["jpg", "jpeg", "png", "gif", "webp"]:
|
||||
# sp.call(["wmctrl", "-c", ":ACTIVE:"]) # closes the active window correctly
|
||||
sp.call(["killall", "vlc"])
|
||||
sp.call(["killall", "mpv"])
|
||||
sp.call(["killall", "feh"])
|
||||
time.sleep(0.5)
|
||||
for _ in range(20):
|
||||
sp.call(["xdotool", "key", "ctrl+w"]) # closes the open tab correctly
|
||||
# else:
|
||||
# sp.call(["xdotool", "getactivewindow", "windowminimize"]) # minimizes the focused windo
|
||||
|
||||
# close any error messages:
|
||||
sp.call(["xdotool", "search", "--name", "Error", "windowclose"])
|
||||
# sp.call(["xdotool", "key", "ctrl+alt+d"]) # doesnt work at all
|
||||
# sp.call(["xdotool", "keydown", "--delay", "100", "ctrl+alt+d"])
|
||||
# sp.call(["xdotool", "keyup", "ctrl+alt+d"])
|
||||
sp.call(["xdg-open", txt])
|
||||
|
||||
|
||||
main()
|
||||
118
bin/mtag/vidchk.py
Executable file
118
bin/mtag/vidchk.py
Executable file
@@ -0,0 +1,118 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import json
|
||||
import re
|
||||
import sys
|
||||
import subprocess as sp
|
||||
|
||||
try:
|
||||
from copyparty.util import fsenc
|
||||
except:
|
||||
|
||||
def fsenc(p):
|
||||
return p.encode("utf-8")
|
||||
|
||||
|
||||
_ = r"""
|
||||
inspects video files for errors and such
|
||||
plus stores a bunch of metadata to filename.ff.json
|
||||
|
||||
usage:
|
||||
-mtp vidchk=t600,ay,p,bin/mtag/vidchk.py
|
||||
|
||||
explained:
|
||||
t600: timeout 10min
|
||||
ay: only process files which contain audio (including video with audio)
|
||||
p: set priority 1 (lowest priority after initial ffprobe/mutagen for base tags),
|
||||
makes copyparty feed base tags into this script as json
|
||||
|
||||
if you wanna use this script standalone / separately from copyparty,
|
||||
provide the video resolution on stdin as json: {"res":"1920x1080"}
|
||||
"""
|
||||
|
||||
|
||||
FAST = True # parse entire file at container level
|
||||
# FAST = False # fully decode audio and video streams
|
||||
|
||||
|
||||
# warnings to ignore
|
||||
harmless = re.compile("^Unsupported codec with id ")
|
||||
|
||||
|
||||
def wfilter(lines):
|
||||
return [x for x in lines if not harmless.search(x)]
|
||||
|
||||
|
||||
def errchk(so, se, rc):
|
||||
if rc:
|
||||
err = (so + se).decode("utf-8", "replace").split("\n", 1)
|
||||
err = wfilter(err) or err
|
||||
return f"ERROR {rc}: {err[0]}"
|
||||
|
||||
if se:
|
||||
err = se.decode("utf-8", "replace").split("\n", 1)
|
||||
err = wfilter(err)
|
||||
if err:
|
||||
return f"Warning: {err[0]}"
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def main():
|
||||
fp = sys.argv[1]
|
||||
zb = sys.stdin.buffer.read()
|
||||
zs = zb.decode("utf-8", "replace")
|
||||
md = json.loads(zs)
|
||||
|
||||
try:
|
||||
w, h = [int(x) for x in md["res"].split("x")]
|
||||
if not w + h:
|
||||
raise Exception()
|
||||
except:
|
||||
return "could not determine resolution"
|
||||
|
||||
# grab streams/format metadata + 2 seconds of frames at the start and end
|
||||
zs = "ffprobe -hide_banner -v warning -of json -show_streams -show_format -show_packets -show_data_hash crc32 -read_intervals %+2,999999%+2"
|
||||
cmd = zs.encode("ascii").split(b" ") + [fsenc(fp)]
|
||||
p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
|
||||
so, se = p.communicate()
|
||||
|
||||
# spaces to tabs, drops filesize from 69k to 48k
|
||||
so = b"\n".join(
|
||||
[
|
||||
b"\t" * int((len(x) - len(x.lstrip())) / 4) + x.lstrip()
|
||||
for x in (so or b"").split(b"\n")
|
||||
]
|
||||
)
|
||||
with open(fsenc(f"{fp}.ff.json"), "wb") as f:
|
||||
f.write(so)
|
||||
|
||||
err = errchk(so, se, p.returncode)
|
||||
if err:
|
||||
return err
|
||||
|
||||
if min(w, h) < 1080:
|
||||
return "resolution too small"
|
||||
|
||||
zs = (
|
||||
"ffmpeg -y -hide_banner -nostdin -v warning"
|
||||
+ " -err_detect +crccheck+bitstream+buffer+careful+compliant+aggressive+explode"
|
||||
+ " -xerror -i"
|
||||
)
|
||||
|
||||
cmd = zs.encode("ascii").split(b" ") + [fsenc(fp)]
|
||||
|
||||
if FAST:
|
||||
zs = "-c copy -f null -"
|
||||
else:
|
||||
zs = "-vcodec rawvideo -acodec pcm_s16le -f null -"
|
||||
|
||||
cmd += zs.encode("ascii").split(b" ")
|
||||
|
||||
p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
|
||||
so, se = p.communicate()
|
||||
return errchk(so, se, p.returncode)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print(main() or "ok")
|
||||
177
bin/partyjournal.py
Executable file
177
bin/partyjournal.py
Executable file
@@ -0,0 +1,177 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
partyjournal.py: chronological history of uploads
|
||||
2021-12-31, v0.1, ed <irc.rizon.net>, MIT-Licensed
|
||||
https://github.com/9001/copyparty/blob/hovudstraum/bin/partyjournal.py
|
||||
|
||||
produces a chronological list of all uploads,
|
||||
by collecting info from up2k databases and the filesystem
|
||||
|
||||
specify subnet `192.168.1.*` with argument `.=192.168.1.`,
|
||||
affecting all successive mappings
|
||||
|
||||
usage:
|
||||
./partyjournal.py > partyjournal.html .=192.168.1. cart=125 steen=114 steen=131 sleepy=121 fscarlet=144 ed=101 ed=123
|
||||
|
||||
"""
|
||||
|
||||
import sys
|
||||
import base64
|
||||
import sqlite3
|
||||
import argparse
|
||||
from datetime import datetime
|
||||
from urllib.parse import quote_from_bytes as quote
|
||||
from urllib.parse import unquote_to_bytes as unquote
|
||||
|
||||
|
||||
FS_ENCODING = sys.getfilesystemencoding()
|
||||
|
||||
|
||||
class APF(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter):
|
||||
pass
|
||||
|
||||
|
||||
##
|
||||
## snibbed from copyparty
|
||||
|
||||
|
||||
def s3dec(v):
|
||||
if not v.startswith("//"):
|
||||
return v
|
||||
|
||||
v = base64.urlsafe_b64decode(v.encode("ascii")[2:])
|
||||
return v.decode(FS_ENCODING, "replace")
|
||||
|
||||
|
||||
def quotep(txt):
|
||||
btxt = txt.encode("utf-8", "replace")
|
||||
quot1 = quote(btxt, safe=b"/")
|
||||
quot1 = quot1.encode("ascii")
|
||||
quot2 = quot1.replace(b" ", b"+")
|
||||
return quot2.decode("utf-8", "replace")
|
||||
|
||||
|
||||
def html_escape(s, quote=False, crlf=False):
|
||||
"""html.escape but also newlines"""
|
||||
s = s.replace("&", "&").replace("<", "<").replace(">", ">")
|
||||
if quote:
|
||||
s = s.replace('"', """).replace("'", "'")
|
||||
if crlf:
|
||||
s = s.replace("\r", " ").replace("\n", " ")
|
||||
|
||||
return s
|
||||
|
||||
|
||||
## end snibs
|
||||
##
|
||||
|
||||
|
||||
def main():
|
||||
ap = argparse.ArgumentParser(formatter_class=APF)
|
||||
ap.add_argument("who", nargs="*")
|
||||
ar = ap.parse_args()
|
||||
|
||||
imap = {}
|
||||
subnet = ""
|
||||
for v in ar.who:
|
||||
if "=" not in v:
|
||||
raise Exception("bad who: " + v)
|
||||
|
||||
k, v = v.split("=")
|
||||
if k == ".":
|
||||
subnet = v
|
||||
continue
|
||||
|
||||
imap["{}{}".format(subnet, v)] = k
|
||||
|
||||
print(repr(imap), file=sys.stderr)
|
||||
|
||||
print(
|
||||
"""\
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head><meta charset="utf-8"><style>
|
||||
|
||||
html, body {
|
||||
color: #ccc;
|
||||
background: #222;
|
||||
font-family: sans-serif;
|
||||
}
|
||||
a {
|
||||
color: #fc5;
|
||||
}
|
||||
td, th {
|
||||
padding: .2em .5em;
|
||||
border: 1px solid #999;
|
||||
border-width: 0 1px 1px 0;
|
||||
white-space: nowrap;
|
||||
}
|
||||
td:nth-child(1),
|
||||
td:nth-child(2),
|
||||
td:nth-child(3) {
|
||||
font-family: monospace, monospace;
|
||||
text-align: right;
|
||||
}
|
||||
tr:first-child {
|
||||
position: sticky;
|
||||
top: -1px;
|
||||
}
|
||||
th {
|
||||
background: #222;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
</style></head><body><table><tr>
|
||||
<th>wark</th>
|
||||
<th>time</th>
|
||||
<th>size</th>
|
||||
<th>who</th>
|
||||
<th>link</th>
|
||||
</tr>"""
|
||||
)
|
||||
|
||||
db_path = ".hist/up2k.db"
|
||||
conn = sqlite3.connect(db_path)
|
||||
q = r"pragma table_info(up)"
|
||||
inf = conn.execute(q).fetchall()
|
||||
cols = [x[1] for x in inf]
|
||||
print("<!-- " + str(cols) + " -->")
|
||||
# ['w', 'mt', 'sz', 'rd', 'fn', 'ip', 'at']
|
||||
|
||||
q = r"select * from up order by case when at > 0 then at else mt end"
|
||||
for w, mt, sz, rd, fn, ip, at in conn.execute(q):
|
||||
link = "/".join([s3dec(x) for x in [rd, fn] if x])
|
||||
if fn.startswith("put-") and sz < 4096:
|
||||
try:
|
||||
with open(link, "rb") as f:
|
||||
txt = f.read().decode("utf-8", "replace")
|
||||
except:
|
||||
continue
|
||||
|
||||
if txt.startswith("msg="):
|
||||
txt = txt.encode("utf-8", "replace")
|
||||
txt = unquote(txt.replace(b"+", b" "))
|
||||
link = txt.decode("utf-8")[4:]
|
||||
|
||||
sz = "{:,}".format(sz)
|
||||
v = [
|
||||
w[:16],
|
||||
datetime.utcfromtimestamp(at if at > 0 else mt).strftime(
|
||||
"%Y-%m-%d %H:%M:%S"
|
||||
),
|
||||
sz,
|
||||
imap.get(ip, ip),
|
||||
]
|
||||
|
||||
row = "<tr>\n "
|
||||
row += "\n ".join(["<td>{}</th>".format(x) for x in v])
|
||||
row += '\n <td><a href="{}">{}</a></td>'.format(link, html_escape(link))
|
||||
row += "\n</tr>"
|
||||
print(row)
|
||||
|
||||
print("</table></body></html>")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
67
bin/prisonparty.sh
Normal file → Executable file
67
bin/prisonparty.sh
Normal file → Executable file
@@ -11,10 +11,16 @@ sysdirs=( /bin /lib /lib32 /lib64 /sbin /usr )
|
||||
help() { cat <<'EOF'
|
||||
|
||||
usage:
|
||||
./prisonparty.sh <ROOTDIR> <UID> <GID> [VOLDIR [VOLDIR...]] -- copyparty-sfx.py [...]"
|
||||
./prisonparty.sh <ROOTDIR> <UID> <GID> [VOLDIR [VOLDIR...]] -- python3 copyparty-sfx.py [...]
|
||||
|
||||
example:
|
||||
./prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt/nas/music -- copyparty-sfx.py -v /mnt/nas/music::rwmd"
|
||||
./prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt/nas/music -- python3 copyparty-sfx.py -v /mnt/nas/music::rwmd
|
||||
|
||||
example for running straight from source (instead of using an sfx):
|
||||
PYTHONPATH=$PWD ./prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt/nas/music -- python3 -um copyparty -v /mnt/nas/music::rwmd
|
||||
|
||||
note that if you have python modules installed as --user (such as bpm/key detectors),
|
||||
you should add /home/foo/.local as a VOLDIR
|
||||
|
||||
EOF
|
||||
exit 1
|
||||
@@ -35,10 +41,20 @@ while true; do
|
||||
vols+=( "$(realpath "$v")" )
|
||||
done
|
||||
pybin="$1"; shift
|
||||
pybin="$(realpath "$pybin")"
|
||||
pybin="$(command -v "$pybin")"
|
||||
pyarg=
|
||||
while true; do
|
||||
v="$1"
|
||||
[ "${v:0:1}" = - ] || break
|
||||
pyarg="$pyarg $v"
|
||||
shift
|
||||
done
|
||||
cpp="$1"; shift
|
||||
cpp="$(realpath "$cpp")"
|
||||
cppdir="$(dirname "$cpp")"
|
||||
[ -d "$cpp" ] && cppdir="$PWD" || {
|
||||
# sfx, not module
|
||||
cpp="$(realpath "$cpp")"
|
||||
cppdir="$(dirname "$cpp")"
|
||||
}
|
||||
trap - EXIT
|
||||
|
||||
|
||||
@@ -60,11 +76,10 @@ echo
|
||||
|
||||
# remove any trailing slashes
|
||||
jail="${jail%/}"
|
||||
cppdir="${cppdir%/}"
|
||||
|
||||
|
||||
# bind-mount system directories and volumes
|
||||
printf '%s\n' "${sysdirs[@]}" "${vols[@]}" | LC_ALL=C sort |
|
||||
printf '%s\n' "${sysdirs[@]}" "${vols[@]}" | sed -r 's`/$``' | LC_ALL=C sort | uniq |
|
||||
while IFS= read -r v; do
|
||||
[ -e "$v" ] || {
|
||||
# printf '\033[1;31mfolder does not exist:\033[0m %s\n' "/$v"
|
||||
@@ -72,6 +87,7 @@ while IFS= read -r v; do
|
||||
}
|
||||
i1=$(stat -c%D.%i "$v" 2>/dev/null || echo a)
|
||||
i2=$(stat -c%D.%i "$jail$v" 2>/dev/null || echo b)
|
||||
# echo "v [$v] i1 [$i1] i2 [$i2]"
|
||||
[ $i1 = $i2 ] && continue
|
||||
|
||||
mkdir -p "$jail$v"
|
||||
@@ -79,21 +95,34 @@ while IFS= read -r v; do
|
||||
done
|
||||
|
||||
|
||||
cln() {
|
||||
rv=$?
|
||||
# cleanup if not in use
|
||||
lsof "$jail" | grep -qF "$jail" &&
|
||||
echo "chroot is in use, will not cleanup" ||
|
||||
{
|
||||
mount | grep -F " on $jail" |
|
||||
awk '{sub(/ type .*/,"");sub(/.* on /,"");print}' |
|
||||
LC_ALL=C sort -r | tee /dev/stderr | tr '\n' '\0' | xargs -r0 umount
|
||||
}
|
||||
exit $rv
|
||||
}
|
||||
trap cln EXIT
|
||||
|
||||
|
||||
# create a tmp
|
||||
mkdir -p "$jail/tmp"
|
||||
chmod 777 "$jail/tmp"
|
||||
|
||||
|
||||
# run copyparty
|
||||
/sbin/chroot --userspec=$uid:$gid "$jail" "$pybin" "$cpp" "$@" && rv=0 || rv=$?
|
||||
|
||||
|
||||
# cleanup if not in use
|
||||
lsof "$jail" | grep -qF "$jail" &&
|
||||
echo "chroot is in use, will not cleanup" ||
|
||||
{
|
||||
mount | grep -qF " on $jail" |
|
||||
awk '{sub(/ type .*/,"");sub(/.* on /,"");print}' |
|
||||
LC_ALL=C sort -r | tee /dev/stderr | tr '\n' '\0' | xargs -r0 umount
|
||||
}
|
||||
exit $rv
|
||||
export HOME=$(getent passwd $uid | cut -d: -f6)
|
||||
export USER=$(getent passwd $uid | cut -d: -f1)
|
||||
export LOGNAME="$USER"
|
||||
#echo "pybin [$pybin]"
|
||||
#echo "pyarg [$pyarg]"
|
||||
#echo "cpp [$cpp]"
|
||||
chroot --userspec=$uid:$gid "$jail" "$pybin" $pyarg "$cpp" "$@" &
|
||||
p=$!
|
||||
trap 'kill $p' INT TERM
|
||||
wait
|
||||
|
||||
312
bin/up2k.py
312
bin/up2k.py
@@ -3,11 +3,11 @@ from __future__ import print_function, unicode_literals
|
||||
|
||||
"""
|
||||
up2k.py: upload to copyparty
|
||||
2021-10-31, v0.11, ed <irc.rizon.net>, MIT-Licensed
|
||||
2022-08-13, v0.18, ed <irc.rizon.net>, MIT-Licensed
|
||||
https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py
|
||||
|
||||
- dependencies: requests
|
||||
- supports python 2.6, 2.7, and 3.3 through 3.10
|
||||
- supports python 2.6, 2.7, and 3.3 through 3.11
|
||||
|
||||
- almost zero error-handling
|
||||
- but if something breaks just try again and it'll autoresume
|
||||
@@ -22,12 +22,30 @@ import atexit
|
||||
import signal
|
||||
import base64
|
||||
import hashlib
|
||||
import argparse
|
||||
import platform
|
||||
import threading
|
||||
import requests
|
||||
import datetime
|
||||
|
||||
try:
|
||||
import argparse
|
||||
except:
|
||||
m = "\n ERROR: need 'argparse'; download it here:\n https://github.com/ThomasWaldmann/argparse/raw/master/argparse.py\n"
|
||||
print(m)
|
||||
raise
|
||||
|
||||
try:
|
||||
import requests
|
||||
except:
|
||||
if sys.version_info > (2, 7):
|
||||
m = "\n ERROR: need 'requests'; run this:\n python -m pip install --user requests\n"
|
||||
else:
|
||||
m = "requests/2.18.4 urllib3/1.23 chardet/3.0.4 certifi/2020.4.5.1 idna/2.7"
|
||||
m = [" https://pypi.org/project/" + x + "/#files" for x in m.split()]
|
||||
m = "\n ERROR: need these:\n" + "\n".join(m) + "\n"
|
||||
|
||||
print(m)
|
||||
raise
|
||||
|
||||
|
||||
# from copyparty/__init__.py
|
||||
PY2 = sys.version_info[0] == 2
|
||||
@@ -76,15 +94,15 @@ class File(object):
|
||||
self.up_b = 0 # type: int
|
||||
self.up_c = 0 # type: int
|
||||
|
||||
# m = "size({}) lmod({}) top({}) rel({}) abs({}) name({})\n"
|
||||
# eprint(m.format(self.size, self.lmod, self.top, self.rel, self.abs, self.name))
|
||||
# t = "size({}) lmod({}) top({}) rel({}) abs({}) name({})\n"
|
||||
# eprint(t.format(self.size, self.lmod, self.top, self.rel, self.abs, self.name))
|
||||
|
||||
|
||||
class FileSlice(object):
|
||||
"""file-like object providing a fixed window into a file"""
|
||||
|
||||
def __init__(self, file, cid):
|
||||
# type: (File, str) -> FileSlice
|
||||
# type: (File, str) -> None
|
||||
|
||||
self.car, self.len = file.kchunks[cid]
|
||||
self.cdr = self.car + self.len
|
||||
@@ -125,6 +143,89 @@ class FileSlice(object):
|
||||
return ret
|
||||
|
||||
|
||||
class MTHash(object):
|
||||
def __init__(self, cores):
|
||||
self.f = None
|
||||
self.sz = 0
|
||||
self.csz = 0
|
||||
self.omutex = threading.Lock()
|
||||
self.imutex = threading.Lock()
|
||||
self.work_q = Queue()
|
||||
self.done_q = Queue()
|
||||
self.thrs = []
|
||||
for _ in range(cores):
|
||||
t = threading.Thread(target=self.worker)
|
||||
t.daemon = True
|
||||
t.start()
|
||||
self.thrs.append(t)
|
||||
|
||||
def hash(self, f, fsz, chunksz, pcb=None, pcb_opaque=None):
|
||||
with self.omutex:
|
||||
self.f = f
|
||||
self.sz = fsz
|
||||
self.csz = chunksz
|
||||
|
||||
chunks = {}
|
||||
nchunks = int(math.ceil(fsz / chunksz))
|
||||
for nch in range(nchunks):
|
||||
self.work_q.put(nch)
|
||||
|
||||
ex = ""
|
||||
for nch in range(nchunks):
|
||||
qe = self.done_q.get()
|
||||
try:
|
||||
nch, dig, ofs, csz = qe
|
||||
chunks[nch] = [dig, ofs, csz]
|
||||
except:
|
||||
ex = ex or qe
|
||||
|
||||
if pcb:
|
||||
pcb(pcb_opaque, chunksz * nch)
|
||||
|
||||
if ex:
|
||||
raise Exception(ex)
|
||||
|
||||
ret = []
|
||||
for n in range(nchunks):
|
||||
ret.append(chunks[n])
|
||||
|
||||
self.f = None
|
||||
self.csz = 0
|
||||
self.sz = 0
|
||||
return ret
|
||||
|
||||
def worker(self):
|
||||
while True:
|
||||
ofs = self.work_q.get()
|
||||
try:
|
||||
v = self.hash_at(ofs)
|
||||
except Exception as ex:
|
||||
v = str(ex)
|
||||
|
||||
self.done_q.put(v)
|
||||
|
||||
def hash_at(self, nch):
|
||||
f = self.f
|
||||
ofs = ofs0 = nch * self.csz
|
||||
hashobj = hashlib.sha512()
|
||||
chunk_sz = chunk_rem = min(self.csz, self.sz - ofs)
|
||||
while chunk_rem > 0:
|
||||
with self.imutex:
|
||||
f.seek(ofs)
|
||||
buf = f.read(min(chunk_rem, 1024 * 1024 * 12))
|
||||
|
||||
if not buf:
|
||||
raise Exception("EOF at " + str(ofs))
|
||||
|
||||
hashobj.update(buf)
|
||||
chunk_rem -= len(buf)
|
||||
ofs += len(buf)
|
||||
|
||||
digest = hashobj.digest()[:33]
|
||||
digest = base64.urlsafe_b64encode(digest).decode("utf-8")
|
||||
return nch, digest, ofs0, chunk_sz
|
||||
|
||||
|
||||
_print = print
|
||||
|
||||
|
||||
@@ -150,13 +251,11 @@ if not VT100:
|
||||
|
||||
|
||||
def termsize():
|
||||
import os
|
||||
|
||||
env = os.environ
|
||||
|
||||
def ioctl_GWINSZ(fd):
|
||||
try:
|
||||
import fcntl, termios, struct, os
|
||||
import fcntl, termios, struct
|
||||
|
||||
cr = struct.unpack("hh", fcntl.ioctl(fd, termios.TIOCGWINSZ, "1234"))
|
||||
except:
|
||||
@@ -217,36 +316,60 @@ class CTermsize(object):
|
||||
eprint("\033[s\033[r\033[u")
|
||||
else:
|
||||
self.g = 1 + self.h - margin
|
||||
m = "{0}\033[{1}A".format("\n" * margin, margin)
|
||||
eprint("{0}\033[s\033[1;{1}r\033[u".format(m, self.g - 1))
|
||||
t = "{0}\033[{1}A".format("\n" * margin, margin)
|
||||
eprint("{0}\033[s\033[1;{1}r\033[u".format(t, self.g - 1))
|
||||
|
||||
|
||||
ss = CTermsize()
|
||||
|
||||
|
||||
def statdir(top):
|
||||
def _scd(err, top):
|
||||
"""non-recursive listing of directory contents, along with stat() info"""
|
||||
if hasattr(os, "scandir"):
|
||||
with os.scandir(top) as dh:
|
||||
for fh in dh:
|
||||
yield [os.path.join(top, fh.name), fh.stat()]
|
||||
else:
|
||||
for name in os.listdir(top):
|
||||
abspath = os.path.join(top, name)
|
||||
with os.scandir(top) as dh:
|
||||
for fh in dh:
|
||||
abspath = os.path.join(top, fh.name)
|
||||
try:
|
||||
yield [abspath, fh.stat()]
|
||||
except Exception as ex:
|
||||
err.append((abspath, str(ex)))
|
||||
|
||||
|
||||
def _lsd(err, top):
|
||||
"""non-recursive listing of directory contents, along with stat() info"""
|
||||
for name in os.listdir(top):
|
||||
abspath = os.path.join(top, name)
|
||||
try:
|
||||
yield [abspath, os.stat(abspath)]
|
||||
except Exception as ex:
|
||||
err.append((abspath, str(ex)))
|
||||
|
||||
|
||||
def walkdir(top):
|
||||
if hasattr(os, "scandir"):
|
||||
statdir = _scd
|
||||
else:
|
||||
statdir = _lsd
|
||||
|
||||
|
||||
def walkdir(err, top, seen):
|
||||
"""recursive statdir"""
|
||||
for ap, inf in sorted(statdir(top)):
|
||||
atop = os.path.abspath(os.path.realpath(top))
|
||||
if atop in seen:
|
||||
err.append((top, "recursive-symlink"))
|
||||
return
|
||||
|
||||
seen = seen[:] + [atop]
|
||||
for ap, inf in sorted(statdir(err, top)):
|
||||
if stat.S_ISDIR(inf.st_mode):
|
||||
for x in walkdir(ap):
|
||||
yield x
|
||||
try:
|
||||
for x in walkdir(err, ap, seen):
|
||||
yield x
|
||||
except Exception as ex:
|
||||
err.append((ap, str(ex)))
|
||||
else:
|
||||
yield ap, inf
|
||||
|
||||
|
||||
def walkdirs(tops):
|
||||
def walkdirs(err, tops):
|
||||
"""recursive statdir for a list of tops, yields [top, relpath, stat]"""
|
||||
sep = "{0}".format(os.sep).encode("ascii")
|
||||
for top in tops:
|
||||
@@ -256,7 +379,7 @@ def walkdirs(tops):
|
||||
stop = os.path.dirname(top)
|
||||
|
||||
if os.path.isdir(top):
|
||||
for ap, inf in walkdir(top):
|
||||
for ap, inf in walkdir(err, top, []):
|
||||
yield stop, ap[len(stop) :].lstrip(sep), inf
|
||||
else:
|
||||
d, n = top.rsplit(sep, 1)
|
||||
@@ -305,8 +428,8 @@ def up2k_chunksize(filesize):
|
||||
|
||||
|
||||
# mostly from copyparty/up2k.py
|
||||
def get_hashlist(file, pcb):
|
||||
# type: (File, any) -> None
|
||||
def get_hashlist(file, pcb, mth):
|
||||
# type: (File, any, any) -> None
|
||||
"""generates the up2k hashlist from file contents, inserts it into `file`"""
|
||||
|
||||
chunk_sz = up2k_chunksize(file.size)
|
||||
@@ -314,7 +437,12 @@ def get_hashlist(file, pcb):
|
||||
file_ofs = 0
|
||||
ret = []
|
||||
with open(file.abs, "rb", 512 * 1024) as f:
|
||||
if mth and file.size >= 1024 * 512:
|
||||
ret = mth.hash(f, file.size, chunk_sz, pcb, file)
|
||||
file_rem = 0
|
||||
|
||||
while file_rem > 0:
|
||||
# same as `hash_at` except for `imutex` / bufsz
|
||||
hashobj = hashlib.sha512()
|
||||
chunk_sz = chunk_rem = min(chunk_sz, file_rem)
|
||||
while chunk_rem > 0:
|
||||
@@ -342,7 +470,7 @@ def get_hashlist(file, pcb):
|
||||
|
||||
|
||||
def handshake(req_ses, url, file, pw, search):
|
||||
# type: (requests.Session, str, File, any, bool) -> List[str]
|
||||
# type: (requests.Session, str, File, any, bool) -> list[str]
|
||||
"""
|
||||
performs a handshake with the server; reply is:
|
||||
if search, a list of search results
|
||||
@@ -371,8 +499,9 @@ def handshake(req_ses, url, file, pw, search):
|
||||
try:
|
||||
r = req_ses.post(url, headers=headers, json=req)
|
||||
break
|
||||
except:
|
||||
eprint("handshake failed, retry...\n")
|
||||
except Exception as ex:
|
||||
em = str(ex).split("SSLError(")[-1]
|
||||
eprint("handshake failed, retrying: {0}\n {1}\n\n".format(file.name, em))
|
||||
time.sleep(1)
|
||||
|
||||
try:
|
||||
@@ -381,7 +510,7 @@ def handshake(req_ses, url, file, pw, search):
|
||||
raise Exception(r.text)
|
||||
|
||||
if search:
|
||||
return r["hits"]
|
||||
return r["hits"], False
|
||||
|
||||
try:
|
||||
pre, url = url.split("://")
|
||||
@@ -393,7 +522,7 @@ def handshake(req_ses, url, file, pw, search):
|
||||
file.name = r["name"]
|
||||
file.wark = r["wark"]
|
||||
|
||||
return r["hash"]
|
||||
return r["hash"], r["sprs"]
|
||||
|
||||
|
||||
def upload(req_ses, file, cid, pw):
|
||||
@@ -446,10 +575,28 @@ class Ctl(object):
|
||||
|
||||
nfiles = 0
|
||||
nbytes = 0
|
||||
for _, _, inf in walkdirs(ar.files):
|
||||
err = []
|
||||
for _, _, inf in walkdirs(err, ar.files):
|
||||
nfiles += 1
|
||||
nbytes += inf.st_size
|
||||
|
||||
if err:
|
||||
eprint("\n# failed to access {0} paths:\n".format(len(err)))
|
||||
for ap, msg in err:
|
||||
if ar.v:
|
||||
eprint("{0}\n `-{1}\n\n".format(ap.decode("utf-8", "replace"), msg))
|
||||
else:
|
||||
eprint(ap.decode("utf-8", "replace") + "\n")
|
||||
|
||||
eprint("^ failed to access those {0} paths ^\n\n".format(len(err)))
|
||||
|
||||
if not ar.v:
|
||||
eprint("hint: set -v for detailed error messages\n")
|
||||
|
||||
if not ar.ok:
|
||||
eprint("hint: aborting because --ok is not set\n")
|
||||
return
|
||||
|
||||
eprint("found {0} files, {1}\n\n".format(nfiles, humansize(nbytes)))
|
||||
self.nfiles = nfiles
|
||||
self.nbytes = nbytes
|
||||
@@ -460,13 +607,39 @@ class Ctl(object):
|
||||
if ar.te:
|
||||
req_ses.verify = ar.te
|
||||
|
||||
self.filegen = walkdirs(ar.files)
|
||||
self.filegen = walkdirs([], ar.files)
|
||||
if ar.safe:
|
||||
self.safe()
|
||||
self._safe()
|
||||
else:
|
||||
self.fancy()
|
||||
self.hash_f = 0
|
||||
self.hash_c = 0
|
||||
self.hash_b = 0
|
||||
self.up_f = 0
|
||||
self.up_c = 0
|
||||
self.up_b = 0
|
||||
self.up_br = 0
|
||||
self.hasher_busy = 1
|
||||
self.handshaker_busy = 0
|
||||
self.uploader_busy = 0
|
||||
self.serialized = False
|
||||
|
||||
def safe(self):
|
||||
self.t0 = time.time()
|
||||
self.t0_up = None
|
||||
self.spd = None
|
||||
|
||||
self.mutex = threading.Lock()
|
||||
self.q_handshake = Queue() # type: Queue[File]
|
||||
self.q_recheck = Queue() # type: Queue[File] # partial upload exists [...]
|
||||
self.q_upload = Queue() # type: Queue[tuple[File, str]]
|
||||
|
||||
self.st_hash = [None, "(idle, starting...)"] # type: tuple[File, int]
|
||||
self.st_up = [None, "(idle, starting...)"] # type: tuple[File, int]
|
||||
|
||||
self.mth = MTHash(ar.J) if ar.J > 1 else None
|
||||
|
||||
self._fancy()
|
||||
|
||||
def _safe(self):
|
||||
"""minimal basic slow boring fallback codepath"""
|
||||
search = self.ar.s
|
||||
for nf, (top, rel, inf) in enumerate(self.filegen):
|
||||
@@ -474,12 +647,12 @@ class Ctl(object):
|
||||
upath = file.abs.decode("utf-8", "replace")
|
||||
|
||||
print("{0} {1}\n hash...".format(self.nfiles - nf, upath))
|
||||
get_hashlist(file, None)
|
||||
get_hashlist(file, None, None)
|
||||
|
||||
burl = self.ar.url[:8] + self.ar.url[8:].split("/")[0] + "/"
|
||||
burl = self.ar.url[:12] + self.ar.url[8:].split("/")[0] + "/"
|
||||
while True:
|
||||
print(" hs...")
|
||||
hs = handshake(req_ses, self.ar.url, file, self.ar.a, search)
|
||||
hs, _ = handshake(req_ses, self.ar.url, file, self.ar.a, search)
|
||||
if search:
|
||||
if hs:
|
||||
for hit in hs:
|
||||
@@ -500,29 +673,7 @@ class Ctl(object):
|
||||
|
||||
print(" ok!")
|
||||
|
||||
def fancy(self):
|
||||
self.hash_f = 0
|
||||
self.hash_c = 0
|
||||
self.hash_b = 0
|
||||
self.up_f = 0
|
||||
self.up_c = 0
|
||||
self.up_b = 0
|
||||
self.up_br = 0
|
||||
self.hasher_busy = 1
|
||||
self.handshaker_busy = 0
|
||||
self.uploader_busy = 0
|
||||
|
||||
self.t0 = time.time()
|
||||
self.t0_up = None
|
||||
self.spd = None
|
||||
|
||||
self.mutex = threading.Lock()
|
||||
self.q_handshake = Queue() # type: Queue[File]
|
||||
self.q_recheck = Queue() # type: Queue[File] # partial upload exists [...]
|
||||
self.q_upload = Queue() # type: Queue[tuple[File, str]]
|
||||
|
||||
self.st_hash = [None, "(idle, starting...)"] # type: tuple[File, int]
|
||||
self.st_up = [None, "(idle, starting...)"] # type: tuple[File, int]
|
||||
def _fancy(self):
|
||||
if VT100:
|
||||
atexit.register(self.cleanup_vt100)
|
||||
ss.scroll_region(3)
|
||||
@@ -568,8 +719,8 @@ class Ctl(object):
|
||||
if "/" in name:
|
||||
name = "\033[36m{0}\033[0m/{1}".format(*name.rsplit("/", 1))
|
||||
|
||||
m = "{0:6.1f}% {1} {2}\033[K"
|
||||
txt += m.format(p, self.nfiles - f, name)
|
||||
t = "{0:6.1f}% {1} {2}\033[K"
|
||||
txt += t.format(p, self.nfiles - f, name)
|
||||
|
||||
txt += "\033[{0}H ".format(ss.g + 2)
|
||||
else:
|
||||
@@ -585,11 +736,12 @@ class Ctl(object):
|
||||
|
||||
spd = humansize(spd)
|
||||
eta = str(datetime.timedelta(seconds=int(eta)))
|
||||
left = humansize(self.nbytes - self.up_b)
|
||||
sleft = humansize(self.nbytes - self.up_b)
|
||||
nleft = self.nfiles - self.up_f
|
||||
tail = "\033[K\033[u" if VT100 else "\r"
|
||||
|
||||
m = "eta: {0} @ {1}/s, {2} left".format(eta, spd, left)
|
||||
eprint(txt + "\033]0;{0}\033\\\r{1}{2}".format(m, m, tail))
|
||||
t = "{0} eta @ {1}/s, {2}, {3}# left".format(eta, spd, sleft, nleft)
|
||||
eprint(txt + "\033]0;{0}\033\\\r{0}{1}".format(t, tail))
|
||||
|
||||
def cleanup_vt100(self):
|
||||
ss.scroll_region(None)
|
||||
@@ -648,7 +800,7 @@ class Ctl(object):
|
||||
|
||||
time.sleep(0.05)
|
||||
|
||||
get_hashlist(file, self.cb_hasher)
|
||||
get_hashlist(file, self.cb_hasher, self.mth)
|
||||
with self.mutex:
|
||||
self.hash_f += 1
|
||||
self.hash_c += len(file.cids)
|
||||
@@ -680,7 +832,7 @@ class Ctl(object):
|
||||
upath = file.abs.decode("utf-8", "replace")
|
||||
|
||||
try:
|
||||
hs = handshake(req_ses, self.ar.url, file, self.ar.a, search)
|
||||
hs, sprs = handshake(req_ses, self.ar.url, file, self.ar.a, search)
|
||||
except Exception as ex:
|
||||
if q == self.q_handshake and "<pre>partial upload exists" in str(ex):
|
||||
self.q_recheck.put(file)
|
||||
@@ -691,8 +843,8 @@ class Ctl(object):
|
||||
if search:
|
||||
if hs:
|
||||
for hit in hs:
|
||||
m = "found: {0}\n {1}{2}\n"
|
||||
print(m.format(upath, burl, hit["rp"]), end="")
|
||||
t = "found: {0}\n {1}{2}\n"
|
||||
print(t.format(upath, burl, hit["rp"]), end="")
|
||||
else:
|
||||
print("NOT found: {0}\n".format(upath), end="")
|
||||
|
||||
@@ -705,6 +857,12 @@ class Ctl(object):
|
||||
continue
|
||||
|
||||
with self.mutex:
|
||||
if not sprs and not self.serialized:
|
||||
t = "server filesystem does not support sparse files; serializing uploads\n"
|
||||
eprint(t)
|
||||
self.serialized = True
|
||||
for _ in range(self.ar.j - 1):
|
||||
self.q_upload.put(None)
|
||||
if not hs:
|
||||
# all chunks done
|
||||
self.up_f += 1
|
||||
@@ -744,7 +902,7 @@ class Ctl(object):
|
||||
try:
|
||||
upload(req_ses, file, cid, self.ar.a)
|
||||
except:
|
||||
eprint("upload failed, retry...\n")
|
||||
eprint("upload failed, retrying: {0} #{1}\n".format(file.name, cid[:8]))
|
||||
pass # handshake will fix it
|
||||
|
||||
with self.mutex:
|
||||
@@ -771,6 +929,9 @@ def main():
|
||||
if not VT100:
|
||||
os.system("rem") # enables colors
|
||||
|
||||
cores = os.cpu_count() if hasattr(os, "cpu_count") else 4
|
||||
hcores = min(cores, 3) # 4% faster than 4+ on py3.9 @ r5-4500U
|
||||
|
||||
# fmt: off
|
||||
ap = app = argparse.ArgumentParser(formatter_class=APF, epilog="""
|
||||
NOTE:
|
||||
@@ -781,10 +942,13 @@ source file/folder selection uses rsync syntax, meaning that:
|
||||
|
||||
ap.add_argument("url", type=unicode, help="server url, including destination folder")
|
||||
ap.add_argument("files", type=unicode, nargs="+", help="files and/or folders to process")
|
||||
ap.add_argument("-v", action="store_true", help="verbose")
|
||||
ap.add_argument("-a", metavar="PASSWORD", help="password")
|
||||
ap.add_argument("-s", action="store_true", help="file-search (disables upload)")
|
||||
ap.add_argument("--ok", action="store_true", help="continue even if some local files are inaccessible")
|
||||
ap = app.add_argument_group("performance tweaks")
|
||||
ap.add_argument("-j", type=int, metavar="THREADS", default=4, help="parallel connections")
|
||||
ap.add_argument("-J", type=int, metavar="THREADS", default=hcores, help="num cpu-cores to use for hashing; set 0 or 1 for single-core hashing")
|
||||
ap.add_argument("-nh", action="store_true", help="disable hashing while uploading")
|
||||
ap.add_argument("--safe", action="store_true", help="use simple fallback approach")
|
||||
ap.add_argument("-z", action="store_true", help="ZOOMIN' (skip uploading files if they exist at the destination with the ~same last-modified timestamp, so same as yolo / turbo with date-chk but even faster)")
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
### [`plugins/`](plugins/)
|
||||
* example extensions
|
||||
|
||||
### [`copyparty.bat`](copyparty.bat)
|
||||
* launches copyparty with no arguments (anon read+write within same folder)
|
||||
* intended for windows machines with no python.exe in PATH
|
||||
@@ -19,6 +22,9 @@ however if your copyparty is behind a reverse-proxy, you may want to use [`share
|
||||
* `URL`: full URL to the root folder (with trailing slash) followed by `$regex:1|1$`
|
||||
* `pw`: password (remove `Parameters` if anon-write)
|
||||
|
||||
### [`media-osd-bgone.ps1`](media-osd-bgone.ps1)
|
||||
* disables the [windows OSD popup](https://user-images.githubusercontent.com/241032/122821375-0e08df80-d2dd-11eb-9fd9-184e8aacf1d0.png) (the thing on the left) which appears every time you hit media hotkeys to adjust volume or change song while playing music with the copyparty web-ui, or most other audio players really
|
||||
|
||||
### [`explorer-nothumbs-nofoldertypes.reg`](explorer-nothumbs-nofoldertypes.reg)
|
||||
* disables thumbnails and folder-type detection in windows explorer
|
||||
* makes it way faster (especially for slow/networked locations (such as copyparty-fuse))
|
||||
@@ -26,6 +32,7 @@ however if your copyparty is behind a reverse-proxy, you may want to use [`share
|
||||
### [`cfssl.sh`](cfssl.sh)
|
||||
* creates CA and server certificates using cfssl
|
||||
* give a 3rd argument to install it to your copyparty config
|
||||
* systemd service at [`systemd/cfssl.service`](systemd/cfssl.service)
|
||||
|
||||
# OS integration
|
||||
init-scripts to start copyparty as a service
|
||||
|
||||
@@ -7,7 +7,7 @@ srv_fqdn="$2"
|
||||
|
||||
[ -z "$srv_fqdn" ] && {
|
||||
echo "need arg 1: ca name"
|
||||
echo "need arg 2: server fqdn"
|
||||
echo "need arg 2: server fqdn and/or IPs, comma-separated"
|
||||
echo "optional arg 3: if set, write cert into copyparty cfg"
|
||||
exit 1
|
||||
}
|
||||
|
||||
104
contrib/media-osd-bgone.ps1
Normal file
104
contrib/media-osd-bgone.ps1
Normal file
@@ -0,0 +1,104 @@
|
||||
# media-osd-bgone.ps1: disable media-control OSD on win10do
|
||||
# v1.1, 2021-06-25, ed <irc.rizon.net>, MIT-licensed
|
||||
# https://github.com/9001/copyparty/blob/hovudstraum/contrib/media-osd-bgone.ps1
|
||||
#
|
||||
# locates the first window that looks like the media OSD and minimizes it;
|
||||
# doing this once after each reboot should do the trick
|
||||
# (adjust the width/height filter if it doesn't work)
|
||||
#
|
||||
# ---------------------------------------------------------------------
|
||||
#
|
||||
# tip: save the following as "media-osd-bgone.bat" next to this script:
|
||||
# start cmd /c "powershell -command ""set-executionpolicy -scope process bypass; .\media-osd-bgone.ps1"" & ping -n 2 127.1 >nul"
|
||||
#
|
||||
# then create a shortcut to that bat-file and move the shortcut here:
|
||||
# %appdata%\Microsoft\Windows\Start Menu\Programs\Startup
|
||||
#
|
||||
# and now this will autorun on bootup
|
||||
|
||||
|
||||
Add-Type -TypeDefinition @"
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Diagnostics;
|
||||
using System.Runtime.InteropServices;
|
||||
using System.Windows.Forms;
|
||||
|
||||
namespace A {
|
||||
public class B : Control {
|
||||
|
||||
[DllImport("user32.dll")]
|
||||
static extern void keybd_event(byte bVk, byte bScan, uint dwFlags, int dwExtraInfo);
|
||||
|
||||
[DllImport("user32.dll", SetLastError = true)]
|
||||
static extern IntPtr FindWindowEx(IntPtr hwndParent, IntPtr hwndChildAfter, string lpszClass, string lpszWindow);
|
||||
|
||||
[DllImport("user32.dll", SetLastError=true)]
|
||||
static extern bool GetWindowRect(IntPtr hwnd, out RECT lpRect);
|
||||
|
||||
[DllImport("user32.dll")]
|
||||
static extern bool ShowWindow(IntPtr hWnd, int nCmdShow);
|
||||
|
||||
[StructLayout(LayoutKind.Sequential)]
|
||||
public struct RECT {
|
||||
public int x;
|
||||
public int y;
|
||||
public int x2;
|
||||
public int y2;
|
||||
}
|
||||
|
||||
bool fa() {
|
||||
RECT r;
|
||||
IntPtr it = IntPtr.Zero;
|
||||
while ((it = FindWindowEx(IntPtr.Zero, it, "NativeHWNDHost", "")) != IntPtr.Zero) {
|
||||
if (FindWindowEx(it, IntPtr.Zero, "DirectUIHWND", "") == IntPtr.Zero)
|
||||
continue;
|
||||
|
||||
if (!GetWindowRect(it, out r))
|
||||
continue;
|
||||
|
||||
int w = r.x2 - r.x + 1;
|
||||
int h = r.y2 - r.y + 1;
|
||||
|
||||
Console.WriteLine("[*] hwnd {0:x} @ {1}x{2} sz {3}x{4}", it, r.x, r.y, w, h);
|
||||
if (h != 141)
|
||||
continue;
|
||||
|
||||
ShowWindow(it, 6);
|
||||
Console.WriteLine("[+] poof");
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
void fb() {
|
||||
keybd_event((byte)Keys.VolumeMute, 0, 0, 0);
|
||||
keybd_event((byte)Keys.VolumeMute, 0, 2, 0);
|
||||
Thread.Sleep(500);
|
||||
keybd_event((byte)Keys.VolumeMute, 0, 0, 0);
|
||||
keybd_event((byte)Keys.VolumeMute, 0, 2, 0);
|
||||
|
||||
while (true) {
|
||||
if (fa()) {
|
||||
break;
|
||||
}
|
||||
Console.WriteLine("[!] not found");
|
||||
Thread.Sleep(1000);
|
||||
}
|
||||
this.Invoke((MethodInvoker)delegate {
|
||||
Application.Exit();
|
||||
});
|
||||
}
|
||||
|
||||
public void Run() {
|
||||
Console.WriteLine("[+] hi");
|
||||
new Thread(new ThreadStart(fb)).Start();
|
||||
Application.Run();
|
||||
Console.WriteLine("[+] bye");
|
||||
}
|
||||
}
|
||||
}
|
||||
"@ -ReferencedAssemblies System.Windows.Forms
|
||||
|
||||
(New-Object -TypeName A.B).Run()
|
||||
@@ -13,7 +13,7 @@
|
||||
|
||||
upstream cpp {
|
||||
server 127.0.0.1:3923;
|
||||
keepalive 120;
|
||||
keepalive 1;
|
||||
}
|
||||
server {
|
||||
listen 443 ssl;
|
||||
|
||||
33
contrib/plugins/README.md
Normal file
33
contrib/plugins/README.md
Normal file
@@ -0,0 +1,33 @@
|
||||
# example resource files
|
||||
|
||||
can be provided to copyparty to tweak things
|
||||
|
||||
|
||||
|
||||
## example `.epilogue.html`
|
||||
save one of these as `.epilogue.html` inside a folder to customize it:
|
||||
|
||||
* [`minimal-up2k.html`](minimal-up2k.html) will [simplify the upload ui](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png)
|
||||
|
||||
|
||||
|
||||
## example browser-js
|
||||
point `--js-browser` to one of these by URL:
|
||||
|
||||
* [`minimal-up2k.js`](minimal-up2k.js) is similar to the above `minimal-up2k.html` except it applies globally to all write-only folders
|
||||
* [`up2k-hooks.js`](up2k-hooks.js) lets you specify a ruleset for files to skip uploading
|
||||
* [`up2k-hook-ytid.js`](up2k-hook-ytid.js) is a more specific example checking youtube-IDs against some API
|
||||
|
||||
|
||||
|
||||
## example browser-css
|
||||
point `--css-browser` to one of these by URL:
|
||||
|
||||
* [`browser-icons.css`](browser-icons.css) adds filetype icons
|
||||
|
||||
|
||||
|
||||
## meadup.js
|
||||
|
||||
* turns copyparty into chromecast just more flexible (and probably way more buggy)
|
||||
* usage: put the js somewhere in the webroot and `--js-browser /memes/meadup.js`
|
||||
506
contrib/plugins/meadup.js
Normal file
506
contrib/plugins/meadup.js
Normal file
@@ -0,0 +1,506 @@
|
||||
// USAGE:
|
||||
// place this file somewhere in the webroot and then
|
||||
// python3 -m copyparty --js-browser /memes/meadup.js
|
||||
//
|
||||
// FEATURES:
|
||||
// * adds an onscreen keyboard for operating a media center remotely,
|
||||
// relies on https://github.com/9001/copyparty/blob/hovudstraum/bin/mtag/very-bad-idea.py
|
||||
// * adds an interactive anime girl (if you can find the dependencies)
|
||||
|
||||
var hambagas = [
|
||||
"https://www.youtube.com/watch?v=pFA3KGp4GuU"
|
||||
];
|
||||
|
||||
// keybaord,
|
||||
// onscreen keyboard by @steinuil
|
||||
function initKeybaord(BASE_URL, HAMBAGA, consoleLog, consoleError) {
|
||||
document.querySelector('.keybaord-container').innerHTML = `
|
||||
<div class="keybaord-body">
|
||||
<div class="keybaord-row keybaord-row-1">
|
||||
<div class="keybaord-key" data-keybaord-key="Escape">
|
||||
esc
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F1">
|
||||
F1
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F2">
|
||||
F2
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F3">
|
||||
F3
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F4">
|
||||
F4
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F5">
|
||||
F5
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F6">
|
||||
F6
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F7">
|
||||
F7
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F8">
|
||||
F8
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F9">
|
||||
F9
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F10">
|
||||
F10
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F11">
|
||||
F11
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F12">
|
||||
F12
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="Insert">
|
||||
ins
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="Delete">
|
||||
del
|
||||
</div>
|
||||
</div>
|
||||
<div class="keybaord-row keybaord-row-2">
|
||||
<div class="keybaord-key" data-keybaord-key="\`">
|
||||
\`
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="1">
|
||||
1
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="2">
|
||||
2
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="3">
|
||||
3
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="4">
|
||||
4
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="5">
|
||||
5
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="6">
|
||||
6
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="7">
|
||||
7
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="8">
|
||||
8
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="9">
|
||||
9
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="0">
|
||||
0
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="-">
|
||||
-
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="=">
|
||||
=
|
||||
</div>
|
||||
<div class="keybaord-key keybaord-backspace" data-keybaord-key="BackSpace">
|
||||
backspace
|
||||
</div>
|
||||
</div>
|
||||
<div class="keybaord-row keybaord-row-3">
|
||||
<div class="keybaord-key keybaord-tab" data-keybaord-key="Tab">
|
||||
tab
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="q">
|
||||
q
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="w">
|
||||
w
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="e">
|
||||
e
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="r">
|
||||
r
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="t">
|
||||
t
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="y">
|
||||
y
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="u">
|
||||
u
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="i">
|
||||
i
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="o">
|
||||
o
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="p">
|
||||
p
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="[">
|
||||
[
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="]">
|
||||
]
|
||||
</div>
|
||||
<div class="keybaord-key keybaord-enter" data-keybaord-key="Return">
|
||||
enter
|
||||
</div>
|
||||
</div>
|
||||
<div class="keybaord-row keybaord-row-4">
|
||||
<div class="keybaord-key keybaord-capslock" data-keybaord-key="HAMBAGA">
|
||||
🍔
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="a">
|
||||
a
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="s">
|
||||
s
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="d">
|
||||
d
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="f">
|
||||
f
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="g">
|
||||
g
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="h">
|
||||
h
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="j">
|
||||
j
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="k">
|
||||
k
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="l">
|
||||
l
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key=";">
|
||||
;
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="'">
|
||||
'
|
||||
</div>
|
||||
<div class="keybaord-key keybaord-backslash" data-keybaord-key="\\">
|
||||
\\
|
||||
</div>
|
||||
</div>
|
||||
<div class="keybaord-row keybaord-row-5">
|
||||
<div class="keybaord-key keybaord-lshift" data-keybaord-key="Shift_L">
|
||||
shift
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="\\">
|
||||
\\
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="z">
|
||||
z
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="x">
|
||||
x
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="c">
|
||||
c
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="v">
|
||||
v
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="b">
|
||||
b
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="n">
|
||||
n
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="m">
|
||||
m
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key=",">
|
||||
,
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key=".">
|
||||
.
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="/">
|
||||
/
|
||||
</div>
|
||||
<div class="keybaord-key keybaord-rshift" data-keybaord-key="Shift_R">
|
||||
shift
|
||||
</div>
|
||||
</div>
|
||||
<div class="keybaord-row keybaord-row-6">
|
||||
<div class="keybaord-key keybaord-lctrl" data-keybaord-key="Control_L">
|
||||
ctrl
|
||||
</div>
|
||||
<div class="keybaord-key keybaord-super" data-keybaord-key="Meta_L">
|
||||
win
|
||||
</div>
|
||||
<div class="keybaord-key keybaord-alt" data-keybaord-key="Alt_L">
|
||||
alt
|
||||
</div>
|
||||
<div class="keybaord-key keybaord-spacebar" data-keybaord-key="space">
|
||||
space
|
||||
</div>
|
||||
<div class="keybaord-key keybaord-altgr" data-keybaord-key="Alt_R">
|
||||
altgr
|
||||
</div>
|
||||
<div class="keybaord-key keybaord-what" data-keybaord-key="Menu">
|
||||
menu
|
||||
</div>
|
||||
<div class="keybaord-key keybaord-rctrl" data-keybaord-key="Control_R">
|
||||
ctrl
|
||||
</div>
|
||||
</div>
|
||||
<div class="keybaord-row">
|
||||
<div class="keybaord-key" data-keybaord-key="XF86AudioLowerVolume">
|
||||
🔉
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="XF86AudioRaiseVolume">
|
||||
🔊
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="Left">
|
||||
⬅️
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="Down">
|
||||
⬇️
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="Up">
|
||||
⬆️
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="Right">
|
||||
➡️
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="Page_Up">
|
||||
PgUp
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="Page_Down">
|
||||
PgDn
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="Home">
|
||||
🏠
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="End">
|
||||
End
|
||||
</div>
|
||||
</div>
|
||||
<div>
|
||||
`;
|
||||
|
||||
function arraySample(array) {
|
||||
return array[Math.floor(Math.random() * array.length)];
|
||||
}
|
||||
|
||||
function sendMessage(msg) {
|
||||
return fetch(BASE_URL, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/x-www-form-urlencoded;charset=UTF-8",
|
||||
},
|
||||
body: "msg=" + encodeURIComponent(msg),
|
||||
}).then(
|
||||
(r) => r.text(), // so the response body shows up in network tab
|
||||
(err) => consoleError(err)
|
||||
);
|
||||
}
|
||||
const MODIFIER_ON_CLASS = "keybaord-modifier-on";
|
||||
const KEY_DATASET = "data-keybaord-key";
|
||||
const KEY_CLASS = "keybaord-key";
|
||||
|
||||
const modifiers = new Set()
|
||||
|
||||
function toggleModifier(button, key) {
|
||||
button.classList.toggle(MODIFIER_ON_CLASS);
|
||||
if (modifiers.has(key)) {
|
||||
modifiers.delete(key);
|
||||
} else {
|
||||
modifiers.add(key);
|
||||
}
|
||||
}
|
||||
|
||||
function popModifiers() {
|
||||
let modifierString = "";
|
||||
|
||||
modifiers.forEach((mod) => {
|
||||
document.querySelector("[" + KEY_DATASET + "='" + mod + "']")
|
||||
.classList.remove(MODIFIER_ON_CLASS);
|
||||
|
||||
modifierString += mod + "+";
|
||||
});
|
||||
|
||||
modifiers.clear();
|
||||
|
||||
return modifierString;
|
||||
}
|
||||
|
||||
Array.from(document.querySelectorAll("." + KEY_CLASS)).forEach((button) => {
|
||||
const key = button.dataset.keybaordKey;
|
||||
|
||||
button.addEventListener("click", (ev) => {
|
||||
switch (key) {
|
||||
case "HAMBAGA":
|
||||
sendMessage(arraySample(HAMBAGA));
|
||||
break;
|
||||
|
||||
case "Shift_L":
|
||||
case "Shift_R":
|
||||
|
||||
case "Control_L":
|
||||
case "Control_R":
|
||||
|
||||
case "Meta_L":
|
||||
|
||||
case "Alt_L":
|
||||
case "Alt_R":
|
||||
toggleModifier(button, key);
|
||||
break;
|
||||
|
||||
default: {
|
||||
const keyWithModifiers = popModifiers() + key;
|
||||
|
||||
consoleLog(keyWithModifiers);
|
||||
|
||||
sendMessage("key " + keyWithModifiers)
|
||||
.then(() => consoleLog(keyWithModifiers + " OK"));
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
// keybaord integration
|
||||
(function () {
|
||||
var o = mknod('div');
|
||||
clmod(o, 'keybaord-container', 1);
|
||||
ebi('op_msg').appendChild(o);
|
||||
|
||||
o = mknod('style');
|
||||
o.innerHTML = `
|
||||
.keybaord-body {
|
||||
display: flex;
|
||||
flex-flow: column nowrap;
|
||||
margin: .6em 0;
|
||||
}
|
||||
|
||||
.keybaord-row {
|
||||
display: flex;
|
||||
}
|
||||
|
||||
.keybaord-key {
|
||||
border: 1px solid rgba(128,128,128,0.2);
|
||||
width: 41px;
|
||||
height: 40px;
|
||||
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.keybaord-key:active {
|
||||
background-color: lightgrey;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-modifier-on {
|
||||
background-color: lightblue;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-backspace {
|
||||
width: 82px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-tab {
|
||||
width: 55px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-enter {
|
||||
width: 69px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-capslock {
|
||||
width: 80px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-backslash {
|
||||
width: 88px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-lshift {
|
||||
width: 65px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-rshift {
|
||||
width: 103px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-lctrl {
|
||||
width: 55px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-super {
|
||||
width: 55px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-alt {
|
||||
width: 55px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-altgr {
|
||||
width: 55px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-what {
|
||||
width: 55px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-rctrl {
|
||||
width: 55px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-spacebar {
|
||||
width: 302px;
|
||||
}
|
||||
`;
|
||||
document.head.appendChild(o);
|
||||
|
||||
initKeybaord('/', hambagas,
|
||||
(msg) => { toast.inf(2, msg.toString()) },
|
||||
(msg) => { toast.err(30, msg.toString()) });
|
||||
})();
|
||||
|
||||
|
||||
// live2d (dumb pointless meme)
|
||||
// dependencies for this part are not tracked in git
|
||||
// so delete this section if you wanna use this file
|
||||
// (or supply your own l2d model and js)
|
||||
(function () {
|
||||
var o = mknod('link');
|
||||
o.setAttribute('rel', 'stylesheet');
|
||||
o.setAttribute('href', "/bad-memes/pio.css");
|
||||
document.head.appendChild(o);
|
||||
|
||||
o = mknod('style');
|
||||
o.innerHTML = '.pio-container{text-shadow:none;z-index:1}';
|
||||
document.head.appendChild(o);
|
||||
|
||||
o = mknod('div');
|
||||
clmod(o, 'pio-container', 1);
|
||||
o.innerHTML = '<div class="pio-action"></div><canvas id="pio" width="280" height="500"></canvas>';
|
||||
document.body.appendChild(o);
|
||||
|
||||
var remaining = 3;
|
||||
for (var a of ['pio', 'l2d', 'fireworks']) {
|
||||
import_js(`/bad-memes/${a}.js`, function () {
|
||||
if (remaining --> 1)
|
||||
return;
|
||||
|
||||
o = mknod('script');
|
||||
o.innerHTML = 'var pio = new Paul_Pio({"selector":[],"mode":"fixed","hidden":false,"content":{"close":"ok bye"},"model":["/bad-memes/sagiri/model.json"]});';
|
||||
document.body.appendChild(o);
|
||||
});
|
||||
}
|
||||
})();
|
||||
@@ -7,9 +7,9 @@
|
||||
|
||||
/* make the up2k ui REALLY minimal by hiding a bunch of stuff: */
|
||||
|
||||
#ops, #tree, #path, #wrap>h2:last-child, /* main tabs and navigators (tree/breadcrumbs) */
|
||||
#ops, #tree, #path, #epi+h2, /* main tabs and navigators (tree/breadcrumbs) */
|
||||
|
||||
#u2cleanup, #u2conf tr:first-child>td[rowspan]:not(#u2btn_cw), /* most of the config options */
|
||||
#u2conf tr:first-child>td[rowspan]:not(#u2btn_cw), /* most of the config options */
|
||||
|
||||
#srch_dz, #srch_zd, /* the filesearch dropzone */
|
||||
|
||||
59
contrib/plugins/minimal-up2k.js
Normal file
59
contrib/plugins/minimal-up2k.js
Normal file
@@ -0,0 +1,59 @@
|
||||
/*
|
||||
|
||||
makes the up2k ui REALLY minimal by hiding a bunch of stuff
|
||||
|
||||
almost the same as minimal-up2k.html except this one...:
|
||||
|
||||
-- applies to every write-only folder when used with --js-browser
|
||||
|
||||
-- only applies if javascript is enabled
|
||||
|
||||
-- doesn't hide the total upload ETA display
|
||||
|
||||
-- looks slightly better
|
||||
|
||||
*/
|
||||
|
||||
var u2min = `
|
||||
<style>
|
||||
|
||||
#ops, #path, #tree, #files, #epi+div+h2,
|
||||
#u2conf td.c+.c, #u2cards, #srch_dz, #srch_zd {
|
||||
display: none !important;
|
||||
}
|
||||
#u2conf {margin:5em auto 0 auto !important}
|
||||
#u2conf.ww {width:70em}
|
||||
#u2conf.w {width:50em}
|
||||
#u2conf.w .c,
|
||||
#u2conf.w #u2btn_cw {text-align:left}
|
||||
#u2conf.w #u2btn_cw {width:70%}
|
||||
#u2etaw {margin:3em auto}
|
||||
#u2etaw.w {
|
||||
text-align: center;
|
||||
margin: -3.5em auto 5em auto;
|
||||
}
|
||||
#u2etaw.w #u2etas {margin-right:-37em}
|
||||
#u2etaw.w #u2etas.o {margin-top:-2.2em}
|
||||
#u2etaw.ww {margin:-1em auto}
|
||||
#u2etaw.ww #u2etas {padding-left:4em}
|
||||
#u2etas {
|
||||
background: none !important;
|
||||
border: none !important;
|
||||
}
|
||||
#wrap {margin-left:2em !important}
|
||||
.logue {
|
||||
border: none !important;
|
||||
margin: 2em auto !important;
|
||||
}
|
||||
.logue:before {content:'' !important}
|
||||
|
||||
</style>
|
||||
|
||||
<a href="#" onclick="this.parentNode.innerHTML='';">show advanced options</a>
|
||||
`;
|
||||
|
||||
if (!has(perms, 'read')) {
|
||||
var e2 = mknod('div');
|
||||
e2.innerHTML = u2min;
|
||||
ebi('wrap').insertBefore(e2, QS('#epi+h2'));
|
||||
}
|
||||
220
contrib/plugins/up2k-hook-ytid.js
Normal file
220
contrib/plugins/up2k-hook-ytid.js
Normal file
@@ -0,0 +1,220 @@
|
||||
// way more specific example --
|
||||
// assumes all files dropped into the uploader have a youtube-id somewhere in the filename,
|
||||
// locates the youtube-ids and passes them to an API which returns a list of IDs which should be uploaded
|
||||
//
|
||||
// also tries to find the youtube-id in the embedded metadata
|
||||
//
|
||||
// assumes copyparty is behind nginx as /ytq is a standalone service which must be rproxied in place
|
||||
|
||||
function up2k_namefilter(good_files, nil_files, bad_files, hooks) {
|
||||
var passthru = up2k.uc.fsearch;
|
||||
if (passthru)
|
||||
return hooks[0](good_files, nil_files, bad_files, hooks.slice(1));
|
||||
|
||||
a_up2k_namefilter(good_files, nil_files, bad_files, hooks).then(() => { });
|
||||
}
|
||||
|
||||
function bstrpos(buf, ptn) {
|
||||
var ofs = 0,
|
||||
ch0 = ptn[0],
|
||||
sz = buf.byteLength;
|
||||
|
||||
while (true) {
|
||||
ofs = buf.indexOf(ch0, ofs);
|
||||
if (ofs < 0 || ofs >= sz)
|
||||
return -1;
|
||||
|
||||
for (var a = 1; a < ptn.length; a++)
|
||||
if (buf[ofs + a] !== ptn[a])
|
||||
break;
|
||||
|
||||
if (a === ptn.length)
|
||||
return ofs;
|
||||
|
||||
++ofs;
|
||||
}
|
||||
}
|
||||
|
||||
async function a_up2k_namefilter(good_files, nil_files, bad_files, hooks) {
|
||||
var t0 = Date.now(),
|
||||
yt_ids = new Set(),
|
||||
textdec = new TextDecoder('latin1'),
|
||||
md_ptn = new TextEncoder().encode('youtube.com/watch?v='),
|
||||
file_ids = [], // all IDs found for each good_files
|
||||
md_only = [], // `${id} ${fn}` where ID was only found in metadata
|
||||
mofs = 0,
|
||||
mnchk = 0,
|
||||
mfile = '';
|
||||
|
||||
for (var a = 0; a < good_files.length; a++) {
|
||||
var [fobj, name] = good_files[a],
|
||||
cname = name, // will clobber
|
||||
sz = fobj.size,
|
||||
ids = [],
|
||||
id_ok = false,
|
||||
m;
|
||||
|
||||
// all IDs found in this file
|
||||
file_ids.push(ids);
|
||||
|
||||
// look for ID in filename; reduce the
|
||||
// metadata-scan intensity if the id looks safe
|
||||
m = /[\[(-]([\w-]{11})[\])]?\.(?:mp4|webm|mkv|flv|opus|ogg|mp3|m4a|aac)$/i.exec(name);
|
||||
id_ok = !!m;
|
||||
|
||||
while (true) {
|
||||
// fuzzy catch-all;
|
||||
// some ytdl fork did %(title)-%(id).%(ext) ...
|
||||
m = /(?:^|[^\w])([\w-]{11})(?:$|[^\w-])/.exec(cname);
|
||||
if (!m)
|
||||
break;
|
||||
|
||||
cname = cname.replace(m[1], '');
|
||||
yt_ids.add(m[1]);
|
||||
ids.push(m[1]);
|
||||
}
|
||||
|
||||
// look for IDs in video metadata,
|
||||
if (/\.(mp4|webm|mkv|flv|opus|ogg|mp3|m4a|aac)$/i.exec(name)) {
|
||||
toast.show('inf r', 0, `analyzing file ${a + 1} / ${good_files.length} :\n${name}\n\nhave analysed ${++mnchk} files in ${(Date.now() - t0) / 1000} seconds, ${humantime((good_files.length - (a + 1)) * (((Date.now() - t0) / 1000) / mnchk))} remaining,\n\nbiggest offset so far is ${mofs}, in this file:\n\n${mfile}`);
|
||||
|
||||
// check first and last 128 MiB;
|
||||
// pWxOroN5WCo.mkv @ 6edb98 (6.92M)
|
||||
// Nf-nN1wF5Xo.mp4 @ 4a98034 (74.6M)
|
||||
var chunksz = 1024 * 1024 * 2, // byte
|
||||
aspan = id_ok ? 128 : 512; // MiB
|
||||
|
||||
aspan = parseInt(Math.min(sz / 2, aspan * 1024 * 1024) / chunksz) * chunksz;
|
||||
|
||||
for (var side = 0; side < 2; side++) {
|
||||
var ofs = side ? Math.max(0, sz - aspan) : 0,
|
||||
nchunks = aspan / chunksz;
|
||||
|
||||
for (var chunk = 0; chunk < nchunks; chunk++) {
|
||||
var bchunk = await fobj.slice(ofs, ofs + chunksz + 16).arrayBuffer(),
|
||||
uchunk = new Uint8Array(bchunk, 0, bchunk.byteLength),
|
||||
bofs = bstrpos(uchunk, md_ptn),
|
||||
absofs = Math.min(ofs + bofs, (sz - ofs) + bofs),
|
||||
txt = bofs < 0 ? '' : textdec.decode(uchunk.subarray(bofs)),
|
||||
m;
|
||||
|
||||
//console.log(`side ${ side }, chunk ${ chunk }, ofs ${ ofs }, bchunk ${ bchunk.byteLength }, txt ${ txt.length }`);
|
||||
while (true) {
|
||||
// mkv/webm have [a-z] immediately after url
|
||||
m = /(youtube\.com\/watch\?v=[\w-]{11})/.exec(txt);
|
||||
if (!m)
|
||||
break;
|
||||
|
||||
txt = txt.replace(m[1], '');
|
||||
m = m[1].slice(-11);
|
||||
|
||||
console.log(`found ${m} @${bofs}, ${name} `);
|
||||
yt_ids.add(m);
|
||||
if (!has(ids, m)) {
|
||||
ids.push(m);
|
||||
md_only.push(`${m} ${name}`);
|
||||
}
|
||||
|
||||
// bail after next iteration
|
||||
chunk = nchunks - 1;
|
||||
side = 9;
|
||||
|
||||
if (mofs < absofs) {
|
||||
mofs = absofs;
|
||||
mfile = name;
|
||||
}
|
||||
}
|
||||
ofs += chunksz;
|
||||
if (ofs >= sz)
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (md_only.length)
|
||||
console.log('recovered the following youtube-IDs by inspecting metadata:\n\n' + md_only.join('\n'));
|
||||
else if (yt_ids.size)
|
||||
console.log('did not discover any additional youtube-IDs by inspecting metadata; all the IDs also existed in the filenames');
|
||||
else
|
||||
console.log('failed to find any youtube-IDs at all, sorry');
|
||||
|
||||
if (false) {
|
||||
var msg = `finished analysing ${mnchk} files in ${(Date.now() - t0) / 1000} seconds,\n\nbiggest offset was ${mofs} in this file:\n\n${mfile}`,
|
||||
mfun = function () { toast.ok(0, msg); };
|
||||
|
||||
mfun();
|
||||
setTimeout(mfun, 200);
|
||||
|
||||
return hooks[0]([], [], [], hooks.slice(1));
|
||||
}
|
||||
|
||||
toast.inf(5, `running query for ${yt_ids.size} youtube-IDs...`);
|
||||
|
||||
var xhr = new XHR();
|
||||
xhr.open('POST', '/ytq', true);
|
||||
xhr.setRequestHeader('Content-Type', 'text/plain');
|
||||
xhr.onload = xhr.onerror = function () {
|
||||
if (this.status != 200)
|
||||
return toast.err(0, `sorry, database query failed ;_;\n\nplease let us know so we can look at it, thx!!\n\nerror ${this.status}: ${(this.response && this.response.err) || this.responseText}`);
|
||||
|
||||
process_id_list(this.responseText);
|
||||
};
|
||||
xhr.send(Array.from(yt_ids).join('\n'));
|
||||
|
||||
function process_id_list(txt) {
|
||||
var wanted_ids = new Set(txt.trim().split('\n')),
|
||||
wanted_names = new Set(), // basenames with a wanted ID
|
||||
wanted_files = new Set(); // filedrops
|
||||
|
||||
for (var a = 0; a < good_files.length; a++) {
|
||||
var name = good_files[a][1];
|
||||
for (var b = 0; b < file_ids[a].length; b++)
|
||||
if (wanted_ids.has(file_ids[a][b])) {
|
||||
wanted_files.add(good_files[a]);
|
||||
|
||||
var m = /(.*)\.(mp4|webm|mkv|flv|opus|ogg|mp3|m4a|aac)$/i.exec(name);
|
||||
if (m)
|
||||
wanted_names.add(m[1]);
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// add all files with the same basename as each explicitly wanted file
|
||||
// (infojson/chatlog/etc when ID was discovered from metadata)
|
||||
for (var a = 0; a < good_files.length; a++) {
|
||||
var name = good_files[a][1];
|
||||
for (var b = 0; b < 3; b++) {
|
||||
name = name.replace(/\.[^\.]+$/, '');
|
||||
if (wanted_names.has(name)) {
|
||||
wanted_files.add(good_files[a]);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function upload_filtered() {
|
||||
if (!wanted_files.size)
|
||||
return modal.alert('Good news -- turns out we already have all those.\n\nBut thank you for checking in!');
|
||||
|
||||
hooks[0](Array.from(wanted_files), nil_files, bad_files, hooks.slice(1));
|
||||
}
|
||||
|
||||
function upload_all() {
|
||||
hooks[0](good_files, nil_files, bad_files, hooks.slice(1));
|
||||
}
|
||||
|
||||
var n_skip = good_files.length - wanted_files.size,
|
||||
msg = `you added ${good_files.length} files; ${good_files.length == n_skip ? 'all' : n_skip} of them were skipped --\neither because we already have them,\nor because there is no youtube-ID in your filenames.\n\n<code>OK</code> / <code>Enter</code> = continue uploading just the ${wanted_files.size} files we definitely need\n\n<code>Cancel</code> / <code>ESC</code> = override the filter; upload ALL the files you added`;
|
||||
|
||||
if (!n_skip)
|
||||
upload_filtered();
|
||||
else
|
||||
modal.confirm(msg, upload_filtered, upload_all);
|
||||
};
|
||||
}
|
||||
|
||||
up2k_hooks.push(function () {
|
||||
up2k.gotallfiles.unshift(up2k_namefilter);
|
||||
});
|
||||
45
contrib/plugins/up2k-hooks.js
Normal file
45
contrib/plugins/up2k-hooks.js
Normal file
@@ -0,0 +1,45 @@
|
||||
// hooks into up2k
|
||||
|
||||
function up2k_namefilter(good_files, nil_files, bad_files, hooks) {
|
||||
// is called when stuff is dropped into the browser,
|
||||
// after iterating through the directory tree and discovering all files,
|
||||
// before the upload confirmation dialogue is shown
|
||||
|
||||
// good_files will successfully upload
|
||||
// nil_files are empty files and will show an alert in the final hook
|
||||
// bad_files are unreadable and cannot be uploaded
|
||||
var file_lists = [good_files, nil_files, bad_files];
|
||||
|
||||
// build a list of filenames
|
||||
var filenames = [];
|
||||
for (var lst of file_lists)
|
||||
for (var ent of lst)
|
||||
filenames.push(ent[1]);
|
||||
|
||||
toast.inf(5, "running database query...");
|
||||
|
||||
// simulate delay while passing the list to some api for checking
|
||||
setTimeout(function () {
|
||||
|
||||
// only keep webm files as an example
|
||||
var new_lists = [];
|
||||
for (var lst of file_lists) {
|
||||
var keep = [];
|
||||
new_lists.push(keep);
|
||||
|
||||
for (var ent of lst)
|
||||
if (/\.webm$/.test(ent[1]))
|
||||
keep.push(ent);
|
||||
}
|
||||
|
||||
// finally, call the next hook in the chain
|
||||
[good_files, nil_files, bad_files] = new_lists;
|
||||
hooks[0](good_files, nil_files, bad_files, hooks.slice(1));
|
||||
|
||||
}, 1000);
|
||||
}
|
||||
|
||||
// register
|
||||
up2k_hooks.push(function () {
|
||||
up2k.gotallfiles.unshift(up2k_namefilter);
|
||||
});
|
||||
23
contrib/systemd/cfssl.service
Normal file
23
contrib/systemd/cfssl.service
Normal file
@@ -0,0 +1,23 @@
|
||||
# systemd service which generates a new TLS certificate on each boot,
|
||||
# that way the one-year expiry time won't cause any issues --
|
||||
# just have everyone trust the ca.pem once every 10 years
|
||||
#
|
||||
# assumptions/placeholder values:
|
||||
# * this script and copyparty runs as user "cpp"
|
||||
# * copyparty repo is at ~cpp/dev/copyparty
|
||||
# * CA is named partylan
|
||||
# * server IPs = 10.1.2.3 and 192.168.123.1
|
||||
# * server hostname = party.lan
|
||||
|
||||
[Unit]
|
||||
Description=copyparty certificate generator
|
||||
Before=copyparty.service
|
||||
|
||||
[Service]
|
||||
User=cpp
|
||||
Type=oneshot
|
||||
SyslogIdentifier=cpp-cert
|
||||
ExecStart=/bin/bash -c 'cd ~/dev/copyparty/contrib && ./cfssl.sh partylan 10.1.2.3,192.168.123.1,party.lan y'
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
@@ -2,17 +2,22 @@
|
||||
# and share '/mnt' with anonymous read+write
|
||||
#
|
||||
# installation:
|
||||
# cp -pv copyparty.service /etc/systemd/system && systemctl enable --now copyparty
|
||||
# cp -pv copyparty.service /etc/systemd/system
|
||||
# restorecon -vr /etc/systemd/system/copyparty.service
|
||||
# firewall-cmd --permanent --add-port={80,443,3923}/tcp
|
||||
# firewall-cmd --permanent --add-port={80,443,3923}/tcp # --zone=libvirt
|
||||
# firewall-cmd --reload
|
||||
# systemctl daemon-reload && systemctl enable --now copyparty
|
||||
#
|
||||
# you may want to:
|
||||
# change "User=cpp" and "/home/cpp/" to another user
|
||||
# remove the nft lines to only listen on port 3923
|
||||
# and in the ExecStart= line:
|
||||
# change '/usr/bin/python3' to another interpreter
|
||||
# change '/mnt::rw' to another location or permission-set
|
||||
# remove '-p 80,443,3923' to only listen on port 3923
|
||||
# add '-q' to disable logging on busy servers
|
||||
# add '-i 127.0.0.1' to only allow local connections
|
||||
# add '--use-fpool' if uploading into nfs locations
|
||||
# add '-e2dsa' to enable filesystem scanning + indexing
|
||||
# add '-e2ts' to enable metadata indexing
|
||||
#
|
||||
# with `Type=notify`, copyparty will signal systemd when it is ready to
|
||||
# accept connections; correctly delaying units depending on copyparty.
|
||||
@@ -20,9 +25,11 @@
|
||||
# python disabling line-buffering, so messages are out-of-order:
|
||||
# https://user-images.githubusercontent.com/241032/126040249-cb535cc7-c599-4931-a796-a5d9af691bad.png
|
||||
#
|
||||
# if you remove -q to enable logging, you may also want to remove the
|
||||
# following line to enable buffering (slightly better performance):
|
||||
# unless you add -q to disable logging, you may want to remove the
|
||||
# following line to allow buffering (slightly better performance):
|
||||
# Environment=PYTHONUNBUFFERED=x
|
||||
#
|
||||
# keep ExecStartPre before ExecStart, at least on rhel8
|
||||
|
||||
[Unit]
|
||||
Description=copyparty file server
|
||||
@@ -32,8 +39,23 @@ Type=notify
|
||||
SyslogIdentifier=copyparty
|
||||
Environment=PYTHONUNBUFFERED=x
|
||||
ExecReload=/bin/kill -s USR1 $MAINPID
|
||||
ExecStartPre=/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
||||
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -p 80,443,3923 -v /mnt::rw
|
||||
|
||||
# user to run as + where the TLS certificate is (if any)
|
||||
User=cpp
|
||||
Environment=XDG_CONFIG_HOME=/home/cpp/.config
|
||||
|
||||
# setup forwarding from ports 80 and 443 to port 3923
|
||||
ExecStartPre=+/bin/bash -c 'nft -n -a list table nat | awk "/ to :3923 /{print\$NF}" | xargs -rL1 nft delete rule nat prerouting handle; true'
|
||||
ExecStartPre=+nft add table ip nat
|
||||
ExecStartPre=+nft -- add chain ip nat prerouting { type nat hook prerouting priority -100 \; }
|
||||
ExecStartPre=+nft add rule ip nat prerouting tcp dport 80 redirect to :3923
|
||||
ExecStartPre=+nft add rule ip nat prerouting tcp dport 443 redirect to :3923
|
||||
|
||||
# stop systemd-tmpfiles-clean.timer from deleting copyparty while it's running
|
||||
ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
||||
|
||||
# copyparty settings
|
||||
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -e2d -v /mnt::rw
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
|
||||
@@ -1,54 +1,71 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import platform
|
||||
import time
|
||||
import sys
|
||||
import os
|
||||
import platform
|
||||
import sys
|
||||
import time
|
||||
|
||||
try:
|
||||
from collections.abc import Callable
|
||||
|
||||
from typing import TYPE_CHECKING, Any
|
||||
except:
|
||||
TYPE_CHECKING = False
|
||||
|
||||
PY2 = sys.version_info[0] == 2
|
||||
if PY2:
|
||||
sys.dont_write_bytecode = True
|
||||
unicode = unicode
|
||||
unicode = unicode # noqa: F821 # pylint: disable=undefined-variable,self-assigning-variable
|
||||
else:
|
||||
unicode = str
|
||||
|
||||
WINDOWS = False
|
||||
if platform.system() == "Windows":
|
||||
WINDOWS = [int(x) for x in platform.version().split(".")]
|
||||
WINDOWS: Any = (
|
||||
[int(x) for x in platform.version().split(".")]
|
||||
if platform.system() == "Windows"
|
||||
else False
|
||||
)
|
||||
|
||||
VT100 = not WINDOWS or WINDOWS >= [10, 0, 14393]
|
||||
# introduced in anniversary update
|
||||
|
||||
ANYWIN = WINDOWS or sys.platform in ["msys"]
|
||||
ANYWIN = WINDOWS or sys.platform in ["msys", "cygwin"]
|
||||
|
||||
MACOS = platform.system() == "Darwin"
|
||||
|
||||
|
||||
def get_unix_home():
|
||||
try:
|
||||
v = os.environ["XDG_CONFIG_HOME"]
|
||||
if not v:
|
||||
raise Exception()
|
||||
ret = os.path.normpath(v)
|
||||
os.listdir(ret)
|
||||
return ret
|
||||
except:
|
||||
pass
|
||||
def get_unixdir() -> str:
|
||||
paths: list[tuple[Callable[..., str], str]] = [
|
||||
(os.environ.get, "XDG_CONFIG_HOME"),
|
||||
(os.path.expanduser, "~/.config"),
|
||||
(os.environ.get, "TMPDIR"),
|
||||
(os.environ.get, "TEMP"),
|
||||
(os.environ.get, "TMP"),
|
||||
(unicode, "/tmp"),
|
||||
]
|
||||
for chk in [os.listdir, os.mkdir]:
|
||||
for pf, pa in paths:
|
||||
try:
|
||||
p = pf(pa)
|
||||
# print(chk.__name__, p, pa)
|
||||
if not p or p.startswith("~"):
|
||||
continue
|
||||
|
||||
try:
|
||||
v = os.path.expanduser("~/.config")
|
||||
if v.startswith("~"):
|
||||
raise Exception()
|
||||
ret = os.path.normpath(v)
|
||||
os.listdir(ret)
|
||||
return ret
|
||||
except:
|
||||
return "/tmp"
|
||||
p = os.path.normpath(p)
|
||||
chk(p) # type: ignore
|
||||
p = os.path.join(p, "copyparty")
|
||||
if not os.path.isdir(p):
|
||||
os.mkdir(p)
|
||||
|
||||
return p
|
||||
except:
|
||||
pass
|
||||
|
||||
raise Exception("could not find a writable path for config")
|
||||
|
||||
|
||||
class EnvParams(object):
|
||||
def __init__(self):
|
||||
def __init__(self) -> None:
|
||||
self.t0 = time.time()
|
||||
self.mod = os.path.dirname(os.path.realpath(__file__))
|
||||
if self.mod.endswith("__init__"):
|
||||
@@ -59,7 +76,7 @@ class EnvParams(object):
|
||||
elif sys.platform == "darwin":
|
||||
self.cfg = os.path.expanduser("~/Library/Preferences/copyparty")
|
||||
else:
|
||||
self.cfg = get_unix_home() + "/copyparty"
|
||||
self.cfg = get_unixdir()
|
||||
|
||||
self.cfg = self.cfg.replace("\\", "/")
|
||||
try:
|
||||
|
||||
@@ -8,35 +8,59 @@ __copyright__ = 2019
|
||||
__license__ = "MIT"
|
||||
__url__ = "https://github.com/9001/copyparty/"
|
||||
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import shutil
|
||||
import argparse
|
||||
import filecmp
|
||||
import locale
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
import traceback
|
||||
from textwrap import dedent
|
||||
|
||||
from .__init__ import E, WINDOWS, ANYWIN, VT100, PY2, unicode
|
||||
from .__version__ import S_VERSION, S_BUILD_DT, CODENAME
|
||||
from .svchub import SvcHub
|
||||
from .util import py_desc, align_tab, IMPLICATIONS, ansi_re
|
||||
from .__init__ import ANYWIN, PY2, VT100, WINDOWS, E, unicode
|
||||
from .__version__ import CODENAME, S_BUILD_DT, S_VERSION
|
||||
from .authsrv import re_vol
|
||||
from .svchub import SvcHub
|
||||
from .util import (
|
||||
IMPLICATIONS,
|
||||
JINJA_VER,
|
||||
PYFTPD_VER,
|
||||
SQLITE_VER,
|
||||
align_tab,
|
||||
ansi_re,
|
||||
min_ex,
|
||||
py_desc,
|
||||
termsize,
|
||||
wrap,
|
||||
)
|
||||
|
||||
HAVE_SSL = True
|
||||
try:
|
||||
from types import FrameType
|
||||
|
||||
from typing import Any, Optional
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
HAVE_SSL = True
|
||||
import ssl
|
||||
except:
|
||||
HAVE_SSL = False
|
||||
|
||||
printed = ""
|
||||
printed: list[str] = []
|
||||
|
||||
|
||||
class RiceFormatter(argparse.HelpFormatter):
|
||||
def _get_help_string(self, action):
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
if PY2:
|
||||
kwargs["width"] = termsize()[0]
|
||||
|
||||
super(RiceFormatter, self).__init__(*args, **kwargs)
|
||||
|
||||
def _get_help_string(self, action: argparse.Action) -> str:
|
||||
"""
|
||||
same as ArgumentDefaultsHelpFormatter(HelpFormatter)
|
||||
except the help += [...] line now has colors
|
||||
@@ -45,41 +69,69 @@ class RiceFormatter(argparse.HelpFormatter):
|
||||
if not VT100:
|
||||
fmt = " (default: %(default)s)"
|
||||
|
||||
help = action.help
|
||||
if "%(default)" not in action.help:
|
||||
ret = unicode(action.help)
|
||||
if "%(default)" not in ret:
|
||||
if action.default is not argparse.SUPPRESS:
|
||||
defaulting_nargs = [argparse.OPTIONAL, argparse.ZERO_OR_MORE]
|
||||
if action.option_strings or action.nargs in defaulting_nargs:
|
||||
help += fmt
|
||||
return help
|
||||
ret += fmt
|
||||
return ret
|
||||
|
||||
def _fill_text(self, text, width, indent):
|
||||
def _fill_text(self, text: str, width: int, indent: str) -> str:
|
||||
"""same as RawDescriptionHelpFormatter(HelpFormatter)"""
|
||||
return "".join(indent + line + "\n" for line in text.splitlines())
|
||||
|
||||
def __add_whitespace(self, idx: int, iWSpace: int, text: str) -> str:
|
||||
return (" " * iWSpace) + text if idx else text
|
||||
|
||||
def _split_lines(self, text: str, width: int) -> list[str]:
|
||||
# https://stackoverflow.com/a/35925919
|
||||
textRows = text.splitlines()
|
||||
ptn = re.compile(r"\s*[0-9\-]{0,}\.?\s*")
|
||||
for idx, line in enumerate(textRows):
|
||||
search = ptn.search(line)
|
||||
if not line.strip():
|
||||
textRows[idx] = " "
|
||||
elif search:
|
||||
lWSpace = search.end()
|
||||
lines = [
|
||||
self.__add_whitespace(i, lWSpace, x)
|
||||
for i, x in enumerate(wrap(line, width, width - 1))
|
||||
]
|
||||
textRows[idx] = lines
|
||||
|
||||
return [item for sublist in textRows for item in sublist]
|
||||
|
||||
|
||||
class Dodge11874(RiceFormatter):
|
||||
def __init__(self, *args, **kwargs):
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
kwargs["width"] = 9003
|
||||
super(Dodge11874, self).__init__(*args, **kwargs)
|
||||
|
||||
|
||||
def lprint(*a, **ka):
|
||||
global printed
|
||||
class BasicDodge11874(
|
||||
argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter
|
||||
):
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
kwargs["width"] = 9003
|
||||
super(BasicDodge11874, self).__init__(*args, **kwargs)
|
||||
|
||||
txt = " ".join(unicode(x) for x in a) + ka.get("end", "\n")
|
||||
printed += txt
|
||||
|
||||
def lprint(*a: Any, **ka: Any) -> None:
|
||||
eol = ka.pop("end", "\n")
|
||||
txt: str = " ".join(unicode(x) for x in a) + eol
|
||||
printed.append(txt)
|
||||
if not VT100:
|
||||
txt = ansi_re.sub("", txt)
|
||||
|
||||
print(txt, **ka)
|
||||
print(txt, end="", **ka)
|
||||
|
||||
|
||||
def warn(msg):
|
||||
def warn(msg: str) -> None:
|
||||
lprint("\033[1mwarning:\033[0;33m {}\033[0m\n".format(msg))
|
||||
|
||||
|
||||
def ensure_locale():
|
||||
def ensure_locale() -> None:
|
||||
for x in [
|
||||
"en_US.UTF-8",
|
||||
"English_United States.UTF8",
|
||||
@@ -87,13 +139,13 @@ def ensure_locale():
|
||||
]:
|
||||
try:
|
||||
locale.setlocale(locale.LC_ALL, x)
|
||||
lprint("Locale:", x)
|
||||
lprint("Locale: {}\n".format(x))
|
||||
break
|
||||
except:
|
||||
continue
|
||||
|
||||
|
||||
def ensure_cert():
|
||||
def ensure_cert() -> None:
|
||||
"""
|
||||
the default cert (and the entire TLS support) is only here to enable the
|
||||
crypto.subtle javascript API, which is necessary due to the webkit guys
|
||||
@@ -119,8 +171,8 @@ def ensure_cert():
|
||||
# printf 'NO\n.\n.\n.\n.\ncopyparty-insecure\n.\n' | faketime '2000-01-01 00:00:00' openssl req -x509 -sha256 -newkey rsa:2048 -keyout insecure.pem -out insecure.pem -days $((($(printf %d 0x7fffffff)-$(date +%s --date=2000-01-01T00:00:00Z))/(60*60*24))) -nodes && ls -al insecure.pem && openssl x509 -in insecure.pem -text -noout
|
||||
|
||||
|
||||
def configure_ssl_ver(al):
|
||||
def terse_sslver(txt):
|
||||
def configure_ssl_ver(al: argparse.Namespace) -> None:
|
||||
def terse_sslver(txt: str) -> str:
|
||||
txt = txt.lower()
|
||||
for c in ["_", "v", "."]:
|
||||
txt = txt.replace(c, "")
|
||||
@@ -135,8 +187,8 @@ def configure_ssl_ver(al):
|
||||
flags = [k for k in ssl.__dict__ if ptn.match(k)]
|
||||
# SSLv2 SSLv3 TLSv1 TLSv1_1 TLSv1_2 TLSv1_3
|
||||
if "help" in sslver:
|
||||
avail = [terse_sslver(x[6:]) for x in flags]
|
||||
avail = " ".join(sorted(avail) + ["all"])
|
||||
avail1 = [terse_sslver(x[6:]) for x in flags]
|
||||
avail = " ".join(sorted(avail1) + ["all"])
|
||||
lprint("\navailable ssl/tls versions:\n " + avail)
|
||||
sys.exit(0)
|
||||
|
||||
@@ -157,12 +209,12 @@ def configure_ssl_ver(al):
|
||||
|
||||
for k in ["ssl_flags_en", "ssl_flags_de"]:
|
||||
num = getattr(al, k)
|
||||
lprint("{}: {:8x} ({})".format(k, num, num))
|
||||
lprint("{0}: {1:8x} ({1})".format(k, num))
|
||||
|
||||
# think i need that beer now
|
||||
|
||||
|
||||
def configure_ssl_ciphers(al):
|
||||
def configure_ssl_ciphers(al: argparse.Namespace) -> None:
|
||||
ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
|
||||
if al.ssl_ver:
|
||||
ctx.options &= ~al.ssl_flags_en
|
||||
@@ -186,8 +238,8 @@ def configure_ssl_ciphers(al):
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
def args_from_cfg(cfg_path):
|
||||
ret = []
|
||||
def args_from_cfg(cfg_path: str) -> list[str]:
|
||||
ret: list[str] = []
|
||||
skip = False
|
||||
with open(cfg_path, "rb") as f:
|
||||
for ln in [x.decode("utf-8").strip() for x in f]:
|
||||
@@ -212,17 +264,66 @@ def args_from_cfg(cfg_path):
|
||||
return ret
|
||||
|
||||
|
||||
def sighandler(sig=None, frame=None):
|
||||
def sighandler(sig: Optional[int] = None, frame: Optional[FrameType] = None) -> None:
|
||||
msg = [""] * 5
|
||||
for th in threading.enumerate():
|
||||
stk = sys._current_frames()[th.ident] # type: ignore
|
||||
msg.append(str(th))
|
||||
msg.extend(traceback.format_stack(sys._current_frames()[th.ident]))
|
||||
msg.extend(traceback.format_stack(stk))
|
||||
|
||||
msg.append("\n")
|
||||
print("\n".join(msg))
|
||||
|
||||
|
||||
def run_argparse(argv, formatter):
|
||||
def disable_quickedit() -> None:
|
||||
import atexit
|
||||
import ctypes
|
||||
from ctypes import wintypes
|
||||
|
||||
def ecb(ok: bool, fun: Any, args: list[Any]) -> list[Any]:
|
||||
if not ok:
|
||||
err: int = ctypes.get_last_error() # type: ignore
|
||||
if err:
|
||||
raise ctypes.WinError(err) # type: ignore
|
||||
return args
|
||||
|
||||
k32 = ctypes.WinDLL(str("kernel32"), use_last_error=True) # type: ignore
|
||||
if PY2:
|
||||
wintypes.LPDWORD = ctypes.POINTER(wintypes.DWORD)
|
||||
|
||||
k32.GetStdHandle.errcheck = ecb
|
||||
k32.GetConsoleMode.errcheck = ecb
|
||||
k32.SetConsoleMode.errcheck = ecb
|
||||
k32.GetConsoleMode.argtypes = (wintypes.HANDLE, wintypes.LPDWORD)
|
||||
k32.SetConsoleMode.argtypes = (wintypes.HANDLE, wintypes.DWORD)
|
||||
|
||||
def cmode(out: bool, mode: Optional[int] = None) -> int:
|
||||
h = k32.GetStdHandle(-11 if out else -10)
|
||||
if mode:
|
||||
return k32.SetConsoleMode(h, mode) # type: ignore
|
||||
|
||||
cmode = wintypes.DWORD()
|
||||
k32.GetConsoleMode(h, ctypes.byref(cmode))
|
||||
return cmode.value
|
||||
|
||||
# disable quickedit
|
||||
mode = orig_in = cmode(False)
|
||||
quickedit = 0x40
|
||||
extended = 0x80
|
||||
mask = quickedit + extended
|
||||
if mode & mask != extended:
|
||||
atexit.register(cmode, False, orig_in)
|
||||
cmode(False, mode & ~mask | extended)
|
||||
|
||||
# enable colors in case the os.system("rem") trick ever stops working
|
||||
if VT100:
|
||||
mode = orig_out = cmode(True)
|
||||
if mode & 4 != 4:
|
||||
atexit.register(cmode, True, orig_out)
|
||||
cmode(True, mode | 4)
|
||||
|
||||
|
||||
def run_argparse(argv: list[str], formatter: Any, retry: bool) -> argparse.Namespace:
|
||||
ap = argparse.ArgumentParser(
|
||||
formatter_class=formatter,
|
||||
prog="copyparty",
|
||||
@@ -234,7 +335,8 @@ def run_argparse(argv, formatter):
|
||||
except:
|
||||
fk_salt = "hunter2"
|
||||
|
||||
cores = os.cpu_count() if hasattr(os, "cpu_count") else 4
|
||||
cores = (os.cpu_count() if hasattr(os, "cpu_count") else 0) or 4
|
||||
hcores = min(cores, 3) # 4% faster than 4+ on py3.9 @ r5-4500U
|
||||
|
||||
sects = [
|
||||
[
|
||||
@@ -243,10 +345,10 @@ def run_argparse(argv, formatter):
|
||||
dedent(
|
||||
"""
|
||||
-a takes username:password,
|
||||
-v takes src:dst:perm1:perm2:permN:volflag1:volflag2:volflagN:...
|
||||
where "perm" is "permissions,username1,username2,..."
|
||||
and "volflag" is config flags to set on this volume
|
||||
|
||||
-v takes src:dst:\033[33mperm\033[0m1:\033[33mperm\033[0m2:\033[33mperm\033[0mN:\033[32mvolflag\033[0m1:\033[32mvolflag\033[0m2:\033[32mvolflag\033[0mN:...
|
||||
* "\033[33mperm\033[0m" is "permissions,username1,username2,..."
|
||||
* "\033[32mvolflag\033[0m" is config flags to set on this volume
|
||||
|
||||
list of permissions:
|
||||
"r" (read): list folder contents, download files
|
||||
"w" (write): upload files; need "r" to see the uploads
|
||||
@@ -265,7 +367,7 @@ def run_argparse(argv, formatter):
|
||||
* w (write-only) for everyone
|
||||
* rw (read+write) for ed
|
||||
* reject duplicate files \033[0m
|
||||
|
||||
|
||||
if no accounts or volumes are configured,
|
||||
current folder will be read/write for everyone
|
||||
|
||||
@@ -288,33 +390,50 @@ def run_argparse(argv, formatter):
|
||||
\033[36mnosub\033[35m forces all uploads into the top folder of the vfs
|
||||
\033[36mgz\033[35m allows server-side gzip of uploads with ?gz (also c,xz)
|
||||
\033[36mpk\033[35m forces server-side compression, optional arg: xz,9
|
||||
|
||||
|
||||
\033[0mupload rules:
|
||||
\033[36mmaxn=250,600\033[35m max 250 uploads over 15min
|
||||
\033[36mmaxb=1g,300\033[35m max 1 GiB over 5min (suffixes: b, k, m, g)
|
||||
\033[36msz=1k-3m\033[35m allow filesizes between 1 KiB and 3MiB
|
||||
|
||||
\033[36mdf=1g\033[35m ensure 1 GiB free disk space
|
||||
|
||||
\033[0mupload rotation:
|
||||
(moves all uploads into the specified folder structure)
|
||||
\033[36mrotn=100,3\033[35m 3 levels of subfolders with 100 entries in each
|
||||
\033[36mrotf=%Y-%m/%d-%H\033[35m date-formatted organizing
|
||||
\033[36mlifetime=3600\033[35m uploads are deleted after 1 hour
|
||||
|
||||
|
||||
\033[0mdatabase, general:
|
||||
\033[36me2d\033[35m sets -e2d (all -e2* args can be set using ce2* volflags)
|
||||
\033[36md2ts\033[35m disables metadata collection for existing files
|
||||
\033[36md2ds\033[35m disables onboot indexing, overrides -e2ds*
|
||||
\033[36md2t\033[35m disables metadata collection, overrides -e2t*
|
||||
\033[36md2v\033[35m disables file verification, overrides -e2v*
|
||||
\033[36md2d\033[35m disables all database stuff, overrides -e2*
|
||||
\033[36mnohash=\\.iso$\033[35m skips hashing file contents if path matches *.iso
|
||||
\033[36mnoidx=\\.iso$\033[35m fully ignores the contents at paths matching *.iso
|
||||
\033[36mhist=/tmp/cdb\033[35m puts thumbnails and indexes at that location
|
||||
\033[36mscan=60\033[35m scan for new files every 60sec, same as --re-maxage
|
||||
|
||||
\033[36mnohash=\\.iso$\033[35m skips hashing file contents if path matches *.iso
|
||||
\033[36mnoidx=\\.iso$\033[35m fully ignores the contents at paths matching *.iso
|
||||
\033[36mxdev\033[35m do not descend into other filesystems
|
||||
\033[36mxvol\033[35m skip symlinks leaving the volume root
|
||||
|
||||
\033[0mdatabase, audio tags:
|
||||
"mte", "mth", "mtp", "mtm" all work the same as -mte, -mth, ...
|
||||
\033[36mmtp=.bpm=f,audio-bpm.py\033[35m uses the "audio-bpm.py" program to
|
||||
generate ".bpm" tags from uploads (f = overwrite tags)
|
||||
\033[36mmtp=ahash,vhash=media-hash.py\033[35m collects two tags at once
|
||||
|
||||
|
||||
\033[0mthumbnails:
|
||||
\033[36mdthumb\033[35m disables all thumbnails
|
||||
\033[36mdvthumb\033[35m disables video thumbnails
|
||||
\033[36mdathumb\033[35m disables audio thumbnails (spectrograms)
|
||||
\033[36mdithumb\033[35m disables image thumbnails
|
||||
|
||||
\033[0mclient and ux:
|
||||
\033[36mhtml_head=TXT\033[35m includes TXT in the <head>
|
||||
\033[36mrobots\033[35m allows indexing by search engines (default)
|
||||
\033[36mnorobots\033[35m kindly asks search engines to leave
|
||||
|
||||
\033[0mothers:
|
||||
\033[36mfk=8\033[35m generates per-file accesskeys,
|
||||
which will then be required at the "g" permission
|
||||
@@ -323,7 +442,7 @@ def run_argparse(argv, formatter):
|
||||
],
|
||||
[
|
||||
"urlform",
|
||||
"",
|
||||
"how to handle url-form POSTs",
|
||||
dedent(
|
||||
"""
|
||||
values for --urlform:
|
||||
@@ -362,63 +481,88 @@ def run_argparse(argv, formatter):
|
||||
ap2.add_argument("-c", metavar="PATH", type=u, action="append", help="add config file")
|
||||
ap2.add_argument("-nc", metavar="NUM", type=int, default=64, help="max num clients")
|
||||
ap2.add_argument("-j", metavar="CORES", type=int, default=1, help="max num cpu cores, 0=all")
|
||||
ap2.add_argument("-a", metavar="ACCT", type=u, action="append", help="add account, USER:PASS; example [ed:wark")
|
||||
ap2.add_argument("-v", metavar="VOL", type=u, action="append", help="add volume, SRC:DST:FLAG; example [.::r], [/mnt/nas/music:/music:r:aed")
|
||||
ap2.add_argument("-ed", action="store_true", help="enable ?dots")
|
||||
ap2.add_argument("-emp", action="store_true", help="enable markdown plugins")
|
||||
ap2.add_argument("-a", metavar="ACCT", type=u, action="append", help="add account, USER:PASS; example [ed:wark]")
|
||||
ap2.add_argument("-v", metavar="VOL", type=u, action="append", help="add volume, SRC:DST:FLAG; examples [.::r], [/mnt/nas/music:/music:r:aed]")
|
||||
ap2.add_argument("-ed", action="store_true", help="enable the ?dots url parameter / client option which allows clients to see dotfiles / hidden files")
|
||||
ap2.add_argument("-emp", action="store_true", help="enable markdown plugins -- neat but dangerous, big XSS risk")
|
||||
ap2.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate")
|
||||
ap2.add_argument("--urlform", metavar="MODE", type=u, default="print,get", help="how to handle url-forms; examples: [stash], [save,get]")
|
||||
ap2.add_argument("--urlform", metavar="MODE", type=u, default="print,get", help="how to handle url-form POSTs; see --help-urlform")
|
||||
ap2.add_argument("--wintitle", metavar="TXT", type=u, default="cpp @ $pub", help="window title, for example '$ip-10.1.2.' or '$ip-'")
|
||||
|
||||
ap2 = ap.add_argument_group('upload options')
|
||||
ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads")
|
||||
ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="up2k min.size threshold (mswin-only)")
|
||||
ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads, hiding them from clients unless -ed")
|
||||
ap2.add_argument("--unpost", metavar="SEC", type=int, default=3600*12, help="grace period where uploads can be deleted by the uploader, even without delete permissions; 0=disabled")
|
||||
ap2.add_argument("--reg-cap", metavar="N", type=int, default=38400, help="max number of uploads to keep in memory when running without -e2d; roughly 1 MiB RAM per 600")
|
||||
ap2.add_argument("--no-fpool", action="store_true", help="disable file-handle pooling -- instead, repeatedly close and reopen files during upload")
|
||||
ap2.add_argument("--use-fpool", action="store_true", help="force file-handle pooling, even if copyparty thinks you're better off without")
|
||||
ap2.add_argument("--no-symlink", action="store_true", help="duplicate file contents instead")
|
||||
ap2.add_argument("--use-fpool", action="store_true", help="force file-handle pooling, even if copyparty thinks you're better off without -- probably useful on nfs and cow filesystems (zfs, btrfs)")
|
||||
ap2.add_argument("--hardlink", action="store_true", help="prefer hardlinks instead of symlinks when possible (within same filesystem)")
|
||||
ap2.add_argument("--never-symlink", action="store_true", help="do not fallback to symlinks when a hardlink cannot be made")
|
||||
ap2.add_argument("--no-dedup", action="store_true", help="disable symlink/hardlink creation; copy file contents instead")
|
||||
ap2.add_argument("--df", metavar="GiB", type=float, default=0, help="ensure GiB free disk space by rejecting upload requests")
|
||||
ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="windows-only: minimum size of incoming uploads through up2k before they are made into sparse files")
|
||||
ap2.add_argument("--turbo", metavar="LVL", type=int, default=0, help="configure turbo-mode in up2k client; 0 = off and warn if enabled, 1 = off, 2 = on, 3 = on and disable datecheck")
|
||||
ap2.add_argument("--u2sort", metavar="TXT", type=u, default="s", help="upload order; s=smallest-first, n=alphabetical, fs=force-s, fn=force-n -- alphabetical is a bit slower on fiber/LAN but makes it easier to eyeball if everything went fine")
|
||||
|
||||
ap2 = ap.add_argument_group('network options')
|
||||
ap2.add_argument("-i", metavar="IP", type=u, default="0.0.0.0", help="ip to bind (comma-sep.)")
|
||||
ap2.add_argument("-p", metavar="PORT", type=u, default="3923", help="ports to bind (comma/range)")
|
||||
ap2.add_argument("--rproxy", metavar="DEPTH", type=int, default=1, help="which ip to keep; 0 = tcp, 1 = origin (first x-fwd), 2 = cloudflare, 3 = nginx, -1 = closest proxy")
|
||||
|
||||
ap2.add_argument("--s-wr-sz", metavar="B", type=int, default=256*1024, help="socket write size in bytes")
|
||||
ap2.add_argument("--s-wr-slp", metavar="SEC", type=float, default=0, help="debug: socket write delay in seconds")
|
||||
ap2.add_argument("--rsp-slp", metavar="SEC", type=float, default=0, help="debug: response delay in seconds")
|
||||
|
||||
ap2 = ap.add_argument_group('SSL/TLS options')
|
||||
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls")
|
||||
ap2.add_argument("--https-only", action="store_true", help="disable plaintext")
|
||||
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls -- force plaintext")
|
||||
ap2.add_argument("--https-only", action="store_true", help="disable plaintext -- force tls")
|
||||
ap2.add_argument("--ssl-ver", metavar="LIST", type=u, help="set allowed ssl/tls versions; [help] shows available versions; default is what your python version considers safe")
|
||||
ap2.add_argument("--ciphers", metavar="LIST", type=u, help="set allowed ssl/tls ciphers; [help] shows available ciphers")
|
||||
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
|
||||
ap2.add_argument("--ssl-log", metavar="PATH", type=u, help="log master secrets")
|
||||
ap2.add_argument("--ssl-log", metavar="PATH", type=u, help="log master secrets for later decryption in wireshark")
|
||||
|
||||
ap2 = ap.add_argument_group('FTP options')
|
||||
ap2.add_argument("--ftp", metavar="PORT", type=int, help="enable FTP server on PORT, for example 3921")
|
||||
ap2.add_argument("--ftps", metavar="PORT", type=int, help="enable FTPS server on PORT, for example 3990")
|
||||
ap2.add_argument("--ftp-dbg", action="store_true", help="enable debug logging")
|
||||
ap2.add_argument("--ftp-nat", metavar="ADDR", type=u, help="the NAT address to use for passive connections")
|
||||
ap2.add_argument("--ftp-pr", metavar="P-P", type=u, help="the range of TCP ports to use for passive connections, for example 12000-13000")
|
||||
|
||||
ap2 = ap.add_argument_group('opt-outs')
|
||||
ap2.add_argument("-nw", action="store_true", help="disable writes (benchmark)")
|
||||
ap2.add_argument("-nw", action="store_true", help="never write anything to disk (debug/benchmark)")
|
||||
ap2.add_argument("--keep-qem", action="store_true", help="do not disable quick-edit-mode on windows (it is disabled to avoid accidental text selection which will deadlock copyparty)")
|
||||
ap2.add_argument("--no-del", action="store_true", help="disable delete operations")
|
||||
ap2.add_argument("--no-mv", action="store_true", help="disable move/rename operations")
|
||||
ap2.add_argument("-nih", action="store_true", help="no info hostname")
|
||||
ap2.add_argument("-nid", action="store_true", help="no info disk-usage")
|
||||
ap2.add_argument("-nih", action="store_true", help="no info hostname -- don't show in UI")
|
||||
ap2.add_argument("-nid", action="store_true", help="no info disk-usage -- don't show in UI")
|
||||
ap2.add_argument("--no-zip", action="store_true", help="disable download as zip/tar")
|
||||
ap2.add_argument("--no-lifetime", action="store_true", help="disable automatic deletion of uploads after a certain time (lifetime volflag)")
|
||||
|
||||
ap2 = ap.add_argument_group('safety options')
|
||||
ap2.add_argument("--ls", metavar="U[,V[,F]]", type=u, help="scan all volumes; arguments USER,VOL,FLAGS; example [**,*,ln,p,r]")
|
||||
ap2.add_argument("--salt", type=u, default="hunter2", help="up2k file-hash salt")
|
||||
ap2.add_argument("--fk-salt", metavar="SALT", type=u, default=fk_salt, help="per-file accesskey salt")
|
||||
ap2.add_argument("-s", action="count", default=0, help="increase safety: Disable thumbnails / potentially dangerous software (ffmpeg/pillow/vips), hide partial uploads, avoid crawlers.\n └─Alias of\033[32m --dotpart --no-thumb --no-mtag-ff --no-robots --force-js")
|
||||
ap2.add_argument("-ss", action="store_true", help="further increase safety: Prevent js-injection, accidental move/delete, broken symlinks, 404 on 403.\n └─Alias of\033[32m -s --no-dot-mv --no-dot-ren --unpost=0 --no-del --no-mv --hardlink --vague-403 -nih")
|
||||
ap2.add_argument("-sss", action="store_true", help="further increase safety: Enable logging to disk, scan for dangerous symlinks.\n └─Alias of\033[32m -ss -lo=cpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz --ls=**,*,ln,p,r")
|
||||
ap2.add_argument("--ls", metavar="U[,V[,F]]", type=u, help="do a sanity/safety check of all volumes on startup; arguments USER,VOL,FLAGS; example [**,*,ln,p,r]")
|
||||
ap2.add_argument("--salt", type=u, default="hunter2", help="up2k file-hash salt; used to generate unpredictable internal identifiers for uploads -- doesn't really matter")
|
||||
ap2.add_argument("--fk-salt", metavar="SALT", type=u, default=fk_salt, help="per-file accesskey salt; used to generate unpredictable URLs for hidden files -- this one DOES matter")
|
||||
ap2.add_argument("--no-dot-mv", action="store_true", help="disallow moving dotfiles; makes it impossible to move folders containing dotfiles")
|
||||
ap2.add_argument("--no-dot-ren", action="store_true", help="disallow renaming dotfiles; makes it impossible to make something a dotfile")
|
||||
ap2.add_argument("--no-logues", action="store_true", help="disable rendering .prologue/.epilogue.html into directory listings")
|
||||
ap2.add_argument("--no-readme", action="store_true", help="disable rendering readme.md into directory listings")
|
||||
ap2.add_argument("--vague-403", action="store_true", help="send 404 instead of 403 (security through ambiguity, very enterprise)")
|
||||
ap2.add_argument("--force-js", action="store_true", help="don't send folder listings as HTML, force clients to use the embedded json instead -- slight protection against misbehaving search engines which ignore --no-robots")
|
||||
ap2.add_argument("--no-robots", action="store_true", help="adds http and html headers asking search engines to not index anything")
|
||||
ap2.add_argument("--logout", metavar="H", type=float, default="8086", help="logout clients after H hours of inactivity (0.0028=10sec, 0.1=6min, 24=day, 168=week, 720=month, 8760=year)")
|
||||
|
||||
ap2 = ap.add_argument_group('yolo options')
|
||||
ap2 = ap.add_argument_group('shutdown options')
|
||||
ap2.add_argument("--ign-ebind", action="store_true", help="continue running even if it's impossible to listen on some of the requested endpoints")
|
||||
ap2.add_argument("--ign-ebind-all", action="store_true", help="continue running even if it's impossible to receive connections at all")
|
||||
ap2.add_argument("--exit", metavar="WHEN", type=u, default="", help="shutdown after WHEN has finished; for example 'idx' will do volume indexing + metadata analysis")
|
||||
|
||||
ap2 = ap.add_argument_group('logging options')
|
||||
ap2.add_argument("-q", action="store_true", help="quiet")
|
||||
ap2.add_argument("-lo", metavar="PATH", type=u, help="logfile, example: cpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz")
|
||||
ap2.add_argument("--no-voldump", action="store_true", help="do not list volumes and permissions on startup")
|
||||
ap2.add_argument("--log-conn", action="store_true", help="print tcp-server msgs")
|
||||
ap2.add_argument("--log-htp", action="store_true", help="print http-server threadpool scaling")
|
||||
ap2.add_argument("--log-conn", action="store_true", help="debug: print tcp-server msgs")
|
||||
ap2.add_argument("--log-htp", action="store_true", help="debug: print http-server threadpool scaling")
|
||||
ap2.add_argument("--ihead", metavar="HEADER", type=u, action='append', help="dump incoming header")
|
||||
ap2.add_argument("--lf-url", metavar="RE", type=u, default=r"^/\.cpr/|\?th=[wj]$", help="dont log URLs matching")
|
||||
|
||||
@@ -433,56 +577,84 @@ def run_argparse(argv, formatter):
|
||||
ap2.add_argument("--no-vthumb", action="store_true", help="disable video thumbnails")
|
||||
ap2.add_argument("--th-size", metavar="WxH", default="320x256", help="thumbnail res")
|
||||
ap2.add_argument("--th-mt", metavar="CORES", type=int, default=cores, help="num cpu cores to use for generating thumbnails")
|
||||
ap2.add_argument("--th-convt", metavar="SEC", type=int, default=60, help="conversion timeout in seconds")
|
||||
ap2.add_argument("--th-no-crop", action="store_true", help="dynamic height; show full image")
|
||||
ap2.add_argument("--th-dec", metavar="LIBS", default="vips,pil,ff", help="image decoders, in order of preference")
|
||||
ap2.add_argument("--th-no-jpg", action="store_true", help="disable jpg output")
|
||||
ap2.add_argument("--th-no-webp", action="store_true", help="disable webp output")
|
||||
ap2.add_argument("--th-ff-jpg", action="store_true", help="force jpg for video thumbs")
|
||||
ap2.add_argument("--th-ff-jpg", action="store_true", help="force jpg output for video thumbs")
|
||||
ap2.add_argument("--th-ff-swr", action="store_true", help="use swresample instead of soxr for audio thumbs")
|
||||
ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown")
|
||||
ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown -- avoids doing keepalive pokes (updating the mtime) on thumbnail folders more often than SEC seconds")
|
||||
ap2.add_argument("--th-clean", metavar="SEC", type=int, default=43200, help="cleanup interval; 0=disabled")
|
||||
ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age")
|
||||
ap2.add_argument("--th-covers", metavar="N,N", type=u, default="folder.png,folder.jpg,cover.png,cover.jpg", help="folder thumbnails to stat for")
|
||||
ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age -- folders which haven't been poked for longer than --th-poke seconds will get deleted every --th-clean seconds")
|
||||
ap2.add_argument("--th-covers", metavar="N,N", type=u, default="folder.png,folder.jpg,cover.png,cover.jpg", help="folder thumbnails to stat/look for")
|
||||
# https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html
|
||||
# https://github.com/libvips/libvips
|
||||
# ffmpeg -hide_banner -demuxers | awk '/^ D /{print$2}' | while IFS= read -r x; do ffmpeg -hide_banner -h demuxer=$x; done | grep -E '^Demuxer |extensions:'
|
||||
ap2.add_argument("--th-r-pil", metavar="T,T", type=u, default="bmp,dib,gif,icns,ico,jpg,jpeg,jp2,jpx,pcx,png,pbm,pgm,ppm,pnm,sgi,tga,tif,tiff,webp,xbm,dds,xpm,heif,heifs,heic,heics,avif,avifs", help="image formats to decode using pillow")
|
||||
ap2.add_argument("--th-r-vips", metavar="T,T", type=u, default="jpg,jpeg,jp2,jpx,jxl,tif,tiff,png,webp,heic,avif,fit,fits,fts,exr,svg,hdr,ppm,pgm,pfm,gif,nii", help="image formats to decode using pyvips")
|
||||
ap2.add_argument("--th-r-ffi", metavar="T,T", type=u, default="apng,avif,avifs,bmp,dds,dib,fit,fits,fts,gif,heic,heics,heif,heifs,icns,ico,jp2,jpeg,jpg,jpx,jxl,pbm,pcx,pfm,pgm,png,pnm,ppm,psd,sgi,tga,tif,tiff,webp,xbm,xpm", help="image formats to decode using ffmpeg")
|
||||
ap2.add_argument("--th-r-ffv", metavar="T,T", type=u, default="av1,asf,avi,flv,m4v,mkv,mjpeg,mjpg,mpg,mpeg,mpg2,mpeg2,h264,avc,mts,h265,hevc,mov,3gp,mp4,ts,mpegts,nut,ogv,ogm,rm,vob,webm,wmv", help="video formats to decode using ffmpeg")
|
||||
ap2.add_argument("--th-r-ffa", metavar="T,T", type=u, default="aac,m4a,ogg,opus,flac,alac,mp3,mp2,ac3,dts,wma,ra,wav,aif,aiff,au,alaw,ulaw,mulaw,amr,gsm,ape,tak,tta,wv,mpc", help="audio formats to decode using ffmpeg")
|
||||
|
||||
ap2 = ap.add_argument_group('transcoding options')
|
||||
ap2.add_argument("--no-acode", action="store_true", help="disable audio transcoding")
|
||||
ap2.add_argument("--ac-maxage", metavar="SEC", type=int, default=86400, help="delete transcode output after SEC seconds")
|
||||
ap2.add_argument("--ac-maxage", metavar="SEC", type=int, default=86400, help="delete cached transcode output after SEC seconds")
|
||||
|
||||
ap2 = ap.add_argument_group('general db options')
|
||||
ap2.add_argument("-e2d", action="store_true", help="enable up2k database")
|
||||
ap2.add_argument("-e2ds", action="store_true", help="enable up2k db-scanner, sets -e2d")
|
||||
ap2.add_argument("-e2dsa", action="store_true", help="scan all folders (for search), sets -e2ds")
|
||||
ap2.add_argument("-e2d", action="store_true", help="enable up2k database, making files searchable + enables upload deduplocation")
|
||||
ap2.add_argument("-e2ds", action="store_true", help="scan writable folders for new files on startup; sets -e2d")
|
||||
ap2.add_argument("-e2dsa", action="store_true", help="scans all folders on startup; sets -e2ds")
|
||||
ap2.add_argument("-e2v", action="store_true", help="verify file integrity; rehash all files and compare with db")
|
||||
ap2.add_argument("-e2vu", action="store_true", help="on hash mismatch: update the database with the new hash")
|
||||
ap2.add_argument("-e2vp", action="store_true", help="on hash mismatch: panic and quit copyparty")
|
||||
ap2.add_argument("--hist", metavar="PATH", type=u, help="where to store volume data (db, thumbs)")
|
||||
ap2.add_argument("--no-hash", metavar="PTN", type=u, help="regex: disable hashing of matching paths during e2ds folder scans")
|
||||
ap2.add_argument("--no-idx", metavar="PTN", type=u, help="regex: disable indexing of matching paths during e2ds folder scans")
|
||||
ap2.add_argument("--no-dhash", action="store_true", help="disable rescan acceleration; do full database integrity check -- makes the db ~5%% smaller and bootup/rescans 3~10x slower")
|
||||
ap2.add_argument("--xdev", action="store_true", help="do not descend into other filesystems (symlink or bind-mount to another HDD, ...)")
|
||||
ap2.add_argument("--xvol", action="store_true", help="skip symlinks leaving the volume root")
|
||||
ap2.add_argument("--hash-mt", metavar="CORES", type=int, default=hcores, help="num cpu cores to use for file hashing; set 0 or 1 for single-core hashing")
|
||||
ap2.add_argument("--re-maxage", metavar="SEC", type=int, default=0, help="disk rescan volume interval, 0=off, can be set per-volume with the 'scan' volflag")
|
||||
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline")
|
||||
|
||||
ap2.add_argument("--db-act", metavar="SEC", type=float, default=10, help="defer any scheduled volume reindexing until SEC seconds after last db write (uploads, renames, ...)")
|
||||
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline -- terminate searches running for more than SEC seconds")
|
||||
ap2.add_argument("--srch-hits", metavar="N", type=int, default=7999, help="max search results to allow clients to fetch; 125 results will be shown initially")
|
||||
|
||||
ap2 = ap.add_argument_group('metadata db options')
|
||||
ap2.add_argument("-e2t", action="store_true", help="enable metadata indexing")
|
||||
ap2.add_argument("-e2ts", action="store_true", help="enable metadata scanner, sets -e2t")
|
||||
ap2.add_argument("-e2tsr", action="store_true", help="rescan all metadata, sets -e2ts")
|
||||
ap2.add_argument("--no-mutagen", action="store_true", help="use FFprobe for tags instead")
|
||||
ap2.add_argument("--no-mtag-ff", action="store_true", help="never use FFprobe as tag reader")
|
||||
ap2.add_argument("-e2t", action="store_true", help="enable metadata indexing; makes it possible to search for artist/title/codec/resolution/...")
|
||||
ap2.add_argument("-e2ts", action="store_true", help="scan existing files on startup; sets -e2t")
|
||||
ap2.add_argument("-e2tsr", action="store_true", help="delete all metadata from DB and do a full rescan; sets -e2ts")
|
||||
ap2.add_argument("--no-mutagen", action="store_true", help="use FFprobe for tags instead; will catch more tags")
|
||||
ap2.add_argument("--no-mtag-ff", action="store_true", help="never use FFprobe as tag reader; is probably safer")
|
||||
ap2.add_argument("--mtag-mt", metavar="CORES", type=int, default=cores, help="num cpu cores to use for tag scanning")
|
||||
ap2.add_argument("--mtag-v", action="store_true", help="verbose tag scanning; print errors from mtp subprocesses and such")
|
||||
ap2.add_argument("-mtm", metavar="M=t,t,t", type=u, action="append", help="add/replace metadata mapping")
|
||||
ap2.add_argument("-mte", metavar="M,M,M", type=u, help="tags to index/display (comma-sep.)",
|
||||
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,vc,ac,res,.fps,ahash,vhash")
|
||||
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,vc,ac,fmt,res,.fps,ahash,vhash")
|
||||
ap2.add_argument("-mth", metavar="M,M,M", type=u, help="tags to hide by default (comma-sep.)",
|
||||
default=".vq,.aq,vc,ac,res,.fps")
|
||||
ap2.add_argument("-mtp", metavar="M=[f,]bin", type=u, action="append", help="read tag M using bin")
|
||||
default=".vq,.aq,vc,ac,fmt,res,.fps")
|
||||
ap2.add_argument("-mtp", metavar="M=[f,]BIN", type=u, action="append", help="read tag M using program BIN to parse the file")
|
||||
|
||||
ap2 = ap.add_argument_group('ui options')
|
||||
ap2.add_argument("--lang", metavar="LANG", type=u, default="eng", help="language")
|
||||
ap2.add_argument("--theme", metavar="NUM", type=int, default=0, help="default theme to use")
|
||||
ap2.add_argument("--themes", metavar="NUM", type=int, default=8, help="number of themes installed")
|
||||
ap2.add_argument("--favico", metavar="TXT", type=u, default="c 000 none" if retry else "🎉 000 none", help="favicon text [ foreground [ background ] ], set blank to disable")
|
||||
ap2.add_argument("--js-browser", metavar="L", type=u, help="URL to additional JS to include")
|
||||
ap2.add_argument("--css-browser", metavar="L", type=u, help="URL to additional CSS to include")
|
||||
ap2.add_argument("--html-head", metavar="TXT", type=u, default="", help="text to append to the <head> of all HTML pages")
|
||||
ap2.add_argument("--textfiles", metavar="CSV", type=u, default="txt,nfo,diz,cue,readme", help="file extensions to present as plaintext")
|
||||
ap2.add_argument("--txt-max", metavar="KiB", type=int, default=64, help="max size of embedded textfiles on ?doc= (anything bigger will be lazy-loaded by JS)")
|
||||
ap2.add_argument("--doctitle", metavar="TXT", type=u, default="copyparty", help="title / service-name to show in html documents")
|
||||
|
||||
ap2 = ap.add_argument_group('debug options')
|
||||
ap2.add_argument("--no-sendfile", action="store_true", help="disable sendfile")
|
||||
ap2.add_argument("--no-scandir", action="store_true", help="disable scandir")
|
||||
ap2.add_argument("--no-fastboot", action="store_true", help="wait for up2k indexing")
|
||||
ap2.add_argument("--no-sendfile", action="store_true", help="disable sendfile; instead using a traditional file read loop")
|
||||
ap2.add_argument("--no-scandir", action="store_true", help="disable scandir; instead using listdir + stat on each file")
|
||||
ap2.add_argument("--no-fastboot", action="store_true", help="wait for up2k indexing before starting the httpd")
|
||||
ap2.add_argument("--no-htp", action="store_true", help="disable httpserver threadpool, create threads as-needed instead")
|
||||
ap2.add_argument("--stackmon", metavar="P,S", type=u, help="write stacktrace to Path every S second")
|
||||
ap2.add_argument("--log-thrs", metavar="SEC", type=float, help="list active threads every SEC")
|
||||
ap2.add_argument("--log-fk", metavar="REGEX", type=u, default="", help="log filekey params for files where path matches REGEX; '.' (a single dot) = all files")
|
||||
# fmt: on
|
||||
|
||||
ap2 = ap.add_argument_group("help sections")
|
||||
@@ -500,7 +672,7 @@ def run_argparse(argv, formatter):
|
||||
return ret
|
||||
|
||||
|
||||
def main(argv=None):
|
||||
def main(argv: Optional[list[str]] = None) -> None:
|
||||
time.strptime("19970815", "%Y%m%d") # python#7980
|
||||
if WINDOWS:
|
||||
os.system("rem") # enables colors
|
||||
@@ -508,21 +680,28 @@ def main(argv=None):
|
||||
if argv is None:
|
||||
argv = sys.argv
|
||||
|
||||
desc = py_desc().replace("[", "\033[1;30m[")
|
||||
|
||||
f = '\033[36mcopyparty v{} "\033[35m{}\033[36m" ({})\n{}\033[0m\n'
|
||||
lprint(f.format(S_VERSION, CODENAME, S_BUILD_DT, desc))
|
||||
f = '\033[36mcopyparty v{} "\033[35m{}\033[36m" ({})\n{}\033[0;36m\n sqlite v{} | jinja2 v{} | pyftpd v{}\n\033[0m'
|
||||
f = f.format(
|
||||
S_VERSION,
|
||||
CODENAME,
|
||||
S_BUILD_DT,
|
||||
py_desc().replace("[", "\033[1;30m["),
|
||||
SQLITE_VER,
|
||||
JINJA_VER,
|
||||
PYFTPD_VER,
|
||||
)
|
||||
lprint(f)
|
||||
|
||||
ensure_locale()
|
||||
if HAVE_SSL:
|
||||
ensure_cert()
|
||||
|
||||
for k, v in zip(argv, argv[1:]):
|
||||
for k, v in zip(argv[1:], argv[2:]):
|
||||
if k == "-c":
|
||||
supp = args_from_cfg(v)
|
||||
argv.extend(supp)
|
||||
|
||||
deprecated = []
|
||||
deprecated: list[tuple[str, str]] = []
|
||||
for dk, nk in deprecated:
|
||||
try:
|
||||
idx = argv.index(dk)
|
||||
@@ -540,12 +719,28 @@ def main(argv=None):
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
al = run_argparse(argv, RiceFormatter)
|
||||
except AssertionError:
|
||||
al = run_argparse(argv, Dodge11874)
|
||||
retry = False
|
||||
for fmtr in [RiceFormatter, RiceFormatter, Dodge11874, BasicDodge11874]:
|
||||
try:
|
||||
al = run_argparse(argv, fmtr, retry)
|
||||
except SystemExit:
|
||||
raise
|
||||
except:
|
||||
retry = True
|
||||
lprint("\n[ {} ]:\n{}\n".format(fmtr, min_ex()))
|
||||
|
||||
nstrs = []
|
||||
assert al
|
||||
|
||||
if WINDOWS and not al.keep_qem:
|
||||
try:
|
||||
disable_quickedit()
|
||||
except:
|
||||
lprint("\nfailed to disable quick-edit-mode:\n" + min_ex() + "\n")
|
||||
|
||||
if not VT100:
|
||||
al.wintitle = ""
|
||||
|
||||
nstrs: list[str] = []
|
||||
anymod = False
|
||||
for ostr in al.v or []:
|
||||
m = re_vol.match(ostr)
|
||||
@@ -596,6 +791,12 @@ def main(argv=None):
|
||||
except:
|
||||
raise Exception("invalid value for -p")
|
||||
|
||||
for arg, kname, okays in [["--u2sort", "u2sort", "s n fs fn"]]:
|
||||
val = unicode(getattr(al, kname))
|
||||
if val not in okays.split():
|
||||
zs = "argument {} cannot be '{}'; try one of these: {}"
|
||||
raise Exception(zs.format(arg, val, okays))
|
||||
|
||||
if HAVE_SSL:
|
||||
if al.ssl_ver:
|
||||
configure_ssl_ver(al)
|
||||
@@ -616,7 +817,7 @@ def main(argv=None):
|
||||
|
||||
# signal.signal(signal.SIGINT, sighandler)
|
||||
|
||||
SvcHub(al, argv, printed).run()
|
||||
SvcHub(al, argv, "".join(printed)).run()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
# coding: utf-8
|
||||
|
||||
VERSION = (1, 1, 0)
|
||||
CODENAME = "opus"
|
||||
BUILD_DT = (2021, 11, 6)
|
||||
VERSION = (1, 3, 14)
|
||||
CODENAME = "god dag"
|
||||
BUILD_DT = (2022, 8, 15)
|
||||
|
||||
S_VERSION = ".".join(map(str, VERSION))
|
||||
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -2,27 +2,30 @@
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
from ..util import fsenc, fsdec
|
||||
|
||||
from ..util import SYMTIME, fsdec, fsenc
|
||||
from . import path
|
||||
|
||||
try:
|
||||
from typing import Optional
|
||||
except:
|
||||
pass
|
||||
|
||||
_ = (path,)
|
||||
|
||||
# grep -hRiE '(^|[^a-zA-Z_\.-])os\.' . | gsed -r 's/ /\n/g;s/\(/(\n/g' | grep -hRiE '(^|[^a-zA-Z_\.-])os\.' | sort | uniq -c
|
||||
# printf 'os\.(%s)' "$(grep ^def bos/__init__.py | gsed -r 's/^def //;s/\(.*//' | tr '\n' '|' | gsed -r 's/.$//')"
|
||||
|
||||
|
||||
def chmod(p, mode):
|
||||
def chmod(p: str, mode: int) -> None:
|
||||
return os.chmod(fsenc(p), mode)
|
||||
|
||||
|
||||
def listdir(p="."):
|
||||
def listdir(p: str = ".") -> list[str]:
|
||||
return [fsdec(x) for x in os.listdir(fsenc(p))]
|
||||
|
||||
|
||||
def lstat(p):
|
||||
return os.lstat(fsenc(p))
|
||||
|
||||
|
||||
def makedirs(name, mode=0o755, exist_ok=True):
|
||||
def makedirs(name: str, mode: int = 0o755, exist_ok: bool = True) -> None:
|
||||
bname = fsenc(name)
|
||||
try:
|
||||
os.makedirs(bname, mode)
|
||||
@@ -31,29 +34,43 @@ def makedirs(name, mode=0o755, exist_ok=True):
|
||||
raise
|
||||
|
||||
|
||||
def mkdir(p, mode=0o755):
|
||||
def mkdir(p: str, mode: int = 0o755) -> None:
|
||||
return os.mkdir(fsenc(p), mode)
|
||||
|
||||
|
||||
def rename(src, dst):
|
||||
def rename(src: str, dst: str) -> None:
|
||||
return os.rename(fsenc(src), fsenc(dst))
|
||||
|
||||
|
||||
def replace(src, dst):
|
||||
def replace(src: str, dst: str) -> None:
|
||||
return os.replace(fsenc(src), fsenc(dst))
|
||||
|
||||
|
||||
def rmdir(p):
|
||||
def rmdir(p: str) -> None:
|
||||
return os.rmdir(fsenc(p))
|
||||
|
||||
|
||||
def stat(p):
|
||||
def stat(p: str) -> os.stat_result:
|
||||
return os.stat(fsenc(p))
|
||||
|
||||
|
||||
def unlink(p):
|
||||
def unlink(p: str) -> None:
|
||||
return os.unlink(fsenc(p))
|
||||
|
||||
|
||||
def utime(p, times=None):
|
||||
return os.utime(fsenc(p), times)
|
||||
def utime(
|
||||
p: str, times: Optional[tuple[float, float]] = None, follow_symlinks: bool = True
|
||||
) -> None:
|
||||
if SYMTIME:
|
||||
return os.utime(fsenc(p), times, follow_symlinks=follow_symlinks)
|
||||
else:
|
||||
return os.utime(fsenc(p), times)
|
||||
|
||||
|
||||
if hasattr(os, "lstat"):
|
||||
|
||||
def lstat(p: str) -> os.stat_result:
|
||||
return os.lstat(fsenc(p))
|
||||
|
||||
else:
|
||||
lstat = stat
|
||||
|
||||
@@ -2,36 +2,44 @@
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
from ..util import fsenc, fsdec
|
||||
|
||||
from ..util import SYMTIME, fsdec, fsenc
|
||||
|
||||
|
||||
def abspath(p):
|
||||
def abspath(p: str) -> str:
|
||||
return fsdec(os.path.abspath(fsenc(p)))
|
||||
|
||||
|
||||
def exists(p):
|
||||
def exists(p: str) -> bool:
|
||||
return os.path.exists(fsenc(p))
|
||||
|
||||
|
||||
def getmtime(p):
|
||||
return os.path.getmtime(fsenc(p))
|
||||
def getmtime(p: str, follow_symlinks: bool = True) -> float:
|
||||
if not follow_symlinks and SYMTIME:
|
||||
return os.lstat(fsenc(p)).st_mtime
|
||||
else:
|
||||
return os.path.getmtime(fsenc(p))
|
||||
|
||||
|
||||
def getsize(p):
|
||||
def getsize(p: str) -> int:
|
||||
return os.path.getsize(fsenc(p))
|
||||
|
||||
|
||||
def isfile(p):
|
||||
def isfile(p: str) -> bool:
|
||||
return os.path.isfile(fsenc(p))
|
||||
|
||||
|
||||
def isdir(p):
|
||||
def isdir(p: str) -> bool:
|
||||
return os.path.isdir(fsenc(p))
|
||||
|
||||
|
||||
def islink(p):
|
||||
def islink(p: str) -> bool:
|
||||
return os.path.islink(fsenc(p))
|
||||
|
||||
|
||||
def realpath(p):
|
||||
def lexists(p: str) -> bool:
|
||||
return os.path.lexists(fsenc(p))
|
||||
|
||||
|
||||
def realpath(p: str) -> str:
|
||||
return fsdec(os.path.realpath(fsenc(p)))
|
||||
|
||||
@@ -1,37 +1,56 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import time
|
||||
import threading
|
||||
import time
|
||||
|
||||
from .broker_util import try_exec
|
||||
import queue
|
||||
|
||||
from .__init__ import TYPE_CHECKING
|
||||
from .broker_mpw import MpWorker
|
||||
from .broker_util import try_exec
|
||||
from .util import mp
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .svchub import SvcHub
|
||||
|
||||
try:
|
||||
from typing import Any
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
class MProcess(mp.Process):
|
||||
def __init__(
|
||||
self,
|
||||
q_pend: queue.Queue[tuple[int, str, list[Any]]],
|
||||
q_yield: queue.Queue[tuple[int, str, list[Any]]],
|
||||
target: Any,
|
||||
args: Any,
|
||||
) -> None:
|
||||
super(MProcess, self).__init__(target=target, args=args)
|
||||
self.q_pend = q_pend
|
||||
self.q_yield = q_yield
|
||||
|
||||
|
||||
class BrokerMp(object):
|
||||
"""external api; manages MpWorkers"""
|
||||
|
||||
def __init__(self, hub):
|
||||
def __init__(self, hub: "SvcHub") -> None:
|
||||
self.hub = hub
|
||||
self.log = hub.log
|
||||
self.args = hub.args
|
||||
|
||||
self.procs = []
|
||||
self.retpend = {}
|
||||
self.retpend_mutex = threading.Lock()
|
||||
self.mutex = threading.Lock()
|
||||
|
||||
self.num_workers = self.args.j or mp.cpu_count()
|
||||
self.log("broker", "booting {} subprocesses".format(self.num_workers))
|
||||
for n in range(1, self.num_workers + 1):
|
||||
q_pend = mp.Queue(1)
|
||||
q_yield = mp.Queue(64)
|
||||
q_pend: queue.Queue[tuple[int, str, list[Any]]] = mp.Queue(1)
|
||||
q_yield: queue.Queue[tuple[int, str, list[Any]]] = mp.Queue(64)
|
||||
|
||||
proc = mp.Process(target=MpWorker, args=(q_pend, q_yield, self.args, n))
|
||||
proc.q_pend = q_pend
|
||||
proc.q_yield = q_yield
|
||||
proc.clients = {}
|
||||
proc = MProcess(q_pend, q_yield, MpWorker, (q_pend, q_yield, self.args, n))
|
||||
|
||||
thr = threading.Thread(
|
||||
target=self.collector, args=(proc,), name="mp-sink-{}".format(n)
|
||||
@@ -42,11 +61,11 @@ class BrokerMp(object):
|
||||
self.procs.append(proc)
|
||||
proc.start()
|
||||
|
||||
def shutdown(self):
|
||||
def shutdown(self) -> None:
|
||||
self.log("broker", "shutting down")
|
||||
for n, proc in enumerate(self.procs):
|
||||
thr = threading.Thread(
|
||||
target=proc.q_pend.put([0, "shutdown", []]),
|
||||
target=proc.q_pend.put((0, "shutdown", [])),
|
||||
name="mp-shutdown-{}-{}".format(n, len(self.procs)),
|
||||
)
|
||||
thr.start()
|
||||
@@ -62,12 +81,12 @@ class BrokerMp(object):
|
||||
|
||||
procs.pop()
|
||||
|
||||
def reload(self):
|
||||
def reload(self) -> None:
|
||||
self.log("broker", "reloading")
|
||||
for _, proc in enumerate(self.procs):
|
||||
proc.q_pend.put([0, "reload", []])
|
||||
proc.q_pend.put((0, "reload", []))
|
||||
|
||||
def collector(self, proc):
|
||||
def collector(self, proc: MProcess) -> None:
|
||||
"""receive message from hub in other process"""
|
||||
while True:
|
||||
msg = proc.q_yield.get()
|
||||
@@ -78,10 +97,7 @@ class BrokerMp(object):
|
||||
|
||||
elif dest == "retq":
|
||||
# response from previous ipc call
|
||||
with self.retpend_mutex:
|
||||
retq = self.retpend.pop(retq_id)
|
||||
|
||||
retq.put(args)
|
||||
raise Exception("invalid broker_mp usage")
|
||||
|
||||
else:
|
||||
# new ipc invoking managed service in hub
|
||||
@@ -93,9 +109,9 @@ class BrokerMp(object):
|
||||
rv = try_exec(retq_id, obj, *args)
|
||||
|
||||
if retq_id:
|
||||
proc.q_pend.put([retq_id, "retq", rv])
|
||||
proc.q_pend.put((retq_id, "retq", rv))
|
||||
|
||||
def put(self, want_retval, dest, *args):
|
||||
def say(self, dest: str, *args: Any) -> None:
|
||||
"""
|
||||
send message to non-hub component in other process,
|
||||
returns a Queue object which eventually contains the response if want_retval
|
||||
@@ -103,7 +119,7 @@ class BrokerMp(object):
|
||||
"""
|
||||
if dest == "listen":
|
||||
for p in self.procs:
|
||||
p.q_pend.put([0, dest, [args[0], len(self.procs)]])
|
||||
p.q_pend.put((0, dest, [args[0], len(self.procs)]))
|
||||
|
||||
elif dest == "cb_httpsrv_up":
|
||||
self.hub.cb_httpsrv_up()
|
||||
|
||||
@@ -1,20 +1,38 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import sys
|
||||
import argparse
|
||||
import signal
|
||||
import sys
|
||||
import threading
|
||||
|
||||
from .broker_util import ExceptionalQueue
|
||||
import queue
|
||||
|
||||
from .authsrv import AuthSrv
|
||||
from .broker_util import BrokerCli, ExceptionalQueue
|
||||
from .httpsrv import HttpSrv
|
||||
from .util import FAKE_MP
|
||||
from copyparty.authsrv import AuthSrv
|
||||
|
||||
try:
|
||||
from types import FrameType
|
||||
|
||||
from typing import Any, Optional, Union
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
class MpWorker(object):
|
||||
class MpWorker(BrokerCli):
|
||||
"""one single mp instance"""
|
||||
|
||||
def __init__(self, q_pend, q_yield, args, n):
|
||||
def __init__(
|
||||
self,
|
||||
q_pend: queue.Queue[tuple[int, str, list[Any]]],
|
||||
q_yield: queue.Queue[tuple[int, str, list[Any]]],
|
||||
args: argparse.Namespace,
|
||||
n: int,
|
||||
) -> None:
|
||||
super(MpWorker, self).__init__()
|
||||
|
||||
self.q_pend = q_pend
|
||||
self.q_yield = q_yield
|
||||
self.args = args
|
||||
@@ -22,7 +40,7 @@ class MpWorker(object):
|
||||
|
||||
self.log = self._log_disabled if args.q and not args.lo else self._log_enabled
|
||||
|
||||
self.retpend = {}
|
||||
self.retpend: dict[int, Any] = {}
|
||||
self.retpend_mutex = threading.Lock()
|
||||
self.mutex = threading.Lock()
|
||||
|
||||
@@ -45,20 +63,20 @@ class MpWorker(object):
|
||||
thr.start()
|
||||
thr.join()
|
||||
|
||||
def signal_handler(self, sig, frame):
|
||||
def signal_handler(self, sig: Optional[int], frame: Optional[FrameType]) -> None:
|
||||
# print('k')
|
||||
pass
|
||||
|
||||
def _log_enabled(self, src, msg, c=0):
|
||||
self.q_yield.put([0, "log", [src, msg, c]])
|
||||
def _log_enabled(self, src: str, msg: str, c: Union[int, str] = 0) -> None:
|
||||
self.q_yield.put((0, "log", [src, msg, c]))
|
||||
|
||||
def _log_disabled(self, src, msg, c=0):
|
||||
def _log_disabled(self, src: str, msg: str, c: Union[int, str] = 0) -> None:
|
||||
pass
|
||||
|
||||
def logw(self, msg, c=0):
|
||||
def logw(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||
self.log("mp{}".format(self.n), msg, c)
|
||||
|
||||
def main(self):
|
||||
def main(self) -> None:
|
||||
while True:
|
||||
retq_id, dest, args = self.q_pend.get()
|
||||
|
||||
@@ -87,15 +105,14 @@ class MpWorker(object):
|
||||
else:
|
||||
raise Exception("what is " + str(dest))
|
||||
|
||||
def put(self, want_retval, dest, *args):
|
||||
if want_retval:
|
||||
retq = ExceptionalQueue(1)
|
||||
retq_id = id(retq)
|
||||
with self.retpend_mutex:
|
||||
self.retpend[retq_id] = retq
|
||||
else:
|
||||
retq = None
|
||||
retq_id = 0
|
||||
def ask(self, dest: str, *args: Any) -> ExceptionalQueue:
|
||||
retq = ExceptionalQueue(1)
|
||||
retq_id = id(retq)
|
||||
with self.retpend_mutex:
|
||||
self.retpend[retq_id] = retq
|
||||
|
||||
self.q_yield.put([retq_id, dest, args])
|
||||
self.q_yield.put((retq_id, dest, list(args)))
|
||||
return retq
|
||||
|
||||
def say(self, dest: str, *args: Any) -> None:
|
||||
self.q_yield.put((0, dest, list(args)))
|
||||
|
||||
@@ -3,14 +3,25 @@ from __future__ import print_function, unicode_literals
|
||||
|
||||
import threading
|
||||
|
||||
from .__init__ import TYPE_CHECKING
|
||||
from .broker_util import BrokerCli, ExceptionalQueue, try_exec
|
||||
from .httpsrv import HttpSrv
|
||||
from .broker_util import ExceptionalQueue, try_exec
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .svchub import SvcHub
|
||||
|
||||
try:
|
||||
from typing import Any
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
class BrokerThr(object):
|
||||
class BrokerThr(BrokerCli):
|
||||
"""external api; behaves like BrokerMP but using plain threads"""
|
||||
|
||||
def __init__(self, hub):
|
||||
def __init__(self, hub: "SvcHub") -> None:
|
||||
super(BrokerThr, self).__init__()
|
||||
|
||||
self.hub = hub
|
||||
self.log = hub.log
|
||||
self.args = hub.args
|
||||
@@ -23,29 +34,35 @@ class BrokerThr(object):
|
||||
self.httpsrv = HttpSrv(self, None)
|
||||
self.reload = self.noop
|
||||
|
||||
def shutdown(self):
|
||||
def shutdown(self) -> None:
|
||||
# self.log("broker", "shutting down")
|
||||
self.httpsrv.shutdown()
|
||||
|
||||
def noop(self):
|
||||
def noop(self) -> None:
|
||||
pass
|
||||
|
||||
def put(self, want_retval, dest, *args):
|
||||
def ask(self, dest: str, *args: Any) -> ExceptionalQueue:
|
||||
|
||||
# new ipc invoking managed service in hub
|
||||
obj = self.hub
|
||||
for node in dest.split("."):
|
||||
obj = getattr(obj, node)
|
||||
|
||||
rv = try_exec(True, obj, *args)
|
||||
|
||||
# pretend we're broker_mp
|
||||
retq = ExceptionalQueue(1)
|
||||
retq.put(rv)
|
||||
return retq
|
||||
|
||||
def say(self, dest: str, *args: Any) -> None:
|
||||
if dest == "listen":
|
||||
self.httpsrv.listen(args[0], 1)
|
||||
return
|
||||
|
||||
else:
|
||||
# new ipc invoking managed service in hub
|
||||
obj = self.hub
|
||||
for node in dest.split("."):
|
||||
obj = getattr(obj, node)
|
||||
# new ipc invoking managed service in hub
|
||||
obj = self.hub
|
||||
for node in dest.split("."):
|
||||
obj = getattr(obj, node)
|
||||
|
||||
# TODO will deadlock if dest performs another ipc
|
||||
rv = try_exec(want_retval, obj, *args)
|
||||
if not want_retval:
|
||||
return
|
||||
|
||||
# pretend we're broker_mp
|
||||
retq = ExceptionalQueue(1)
|
||||
retq.put(rv)
|
||||
return retq
|
||||
try_exec(False, obj, *args)
|
||||
|
||||
@@ -1,17 +1,30 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
|
||||
import argparse
|
||||
import traceback
|
||||
|
||||
from .util import Pebkac, Queue
|
||||
from queue import Queue
|
||||
|
||||
from .__init__ import TYPE_CHECKING
|
||||
from .authsrv import AuthSrv
|
||||
from .util import Pebkac
|
||||
|
||||
try:
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
from .util import RootLogger
|
||||
except:
|
||||
pass
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .httpsrv import HttpSrv
|
||||
|
||||
|
||||
class ExceptionalQueue(Queue, object):
|
||||
def get(self, block=True, timeout=None):
|
||||
def get(self, block: bool = True, timeout: Optional[float] = None) -> Any:
|
||||
rv = super(ExceptionalQueue, self).get(block, timeout)
|
||||
|
||||
# TODO: how expensive is this?
|
||||
if isinstance(rv, list):
|
||||
if rv[0] == "exception":
|
||||
if rv[1] == "pebkac":
|
||||
@@ -22,7 +35,26 @@ class ExceptionalQueue(Queue, object):
|
||||
return rv
|
||||
|
||||
|
||||
def try_exec(want_retval, func, *args):
|
||||
class BrokerCli(object):
|
||||
"""
|
||||
helps mypy understand httpsrv.broker but still fails a few levels deeper,
|
||||
for example resolving httpconn.* in httpcli -- see lines tagged #mypy404
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.log: "RootLogger" = None
|
||||
self.args: argparse.Namespace = None
|
||||
self.asrv: AuthSrv = None
|
||||
self.httpsrv: "HttpSrv" = None
|
||||
|
||||
def ask(self, dest: str, *args: Any) -> ExceptionalQueue:
|
||||
return ExceptionalQueue(1)
|
||||
|
||||
def say(self, dest: str, *args: Any) -> None:
|
||||
pass
|
||||
|
||||
|
||||
def try_exec(want_retval: Union[bool, int], func: Any, *args: list[Any]) -> Any:
|
||||
try:
|
||||
return func(*args)
|
||||
|
||||
|
||||
154
copyparty/fsutil.py
Normal file
154
copyparty/fsutil.py
Normal file
@@ -0,0 +1,154 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
|
||||
from .__init__ import ANYWIN, MACOS
|
||||
from .authsrv import AXS, VFS
|
||||
from .bos import bos
|
||||
from .util import chkcmd, min_ex
|
||||
|
||||
try:
|
||||
from typing import Optional, Union
|
||||
|
||||
from .util import RootLogger
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
class Fstab(object):
|
||||
def __init__(self, log: "RootLogger"):
|
||||
self.log_func = log
|
||||
|
||||
self.trusted = False
|
||||
self.tab: Optional[VFS] = None
|
||||
self.cache: dict[str, str] = {}
|
||||
self.age = 0.0
|
||||
|
||||
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||
self.log_func("fstab", msg + "\033[K", c)
|
||||
|
||||
def get(self, path: str) -> str:
|
||||
if len(self.cache) > 9000:
|
||||
self.age = time.time()
|
||||
self.tab = None
|
||||
self.cache = {}
|
||||
|
||||
fs = "ext4"
|
||||
msg = "failed to determine filesystem at [{}]; assuming {}\n{}"
|
||||
|
||||
if ANYWIN:
|
||||
fs = "vfat"
|
||||
try:
|
||||
path = self._winpath(path)
|
||||
except:
|
||||
self.log(msg.format(path, fs, min_ex()), 3)
|
||||
return fs
|
||||
|
||||
path = path.lstrip("/")
|
||||
try:
|
||||
return self.cache[path]
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
fs = self.get_w32(path) if ANYWIN else self.get_unix(path)
|
||||
except:
|
||||
self.log(msg.format(path, fs, min_ex()), 3)
|
||||
|
||||
fs = fs.lower()
|
||||
self.cache[path] = fs
|
||||
self.log("found {} at {}".format(fs, path))
|
||||
return fs
|
||||
|
||||
def _winpath(self, path: str) -> str:
|
||||
# try to combine volume-label + st_dev (vsn)
|
||||
path = path.replace("/", "\\")
|
||||
vid = path.split(":", 1)[0].strip("\\").split("\\", 1)[0]
|
||||
try:
|
||||
return "{}*{}".format(vid, bos.stat(path).st_dev)
|
||||
except:
|
||||
return vid
|
||||
|
||||
def build_fallback(self) -> None:
|
||||
self.tab = VFS(self.log_func, "idk", "/", AXS(), {})
|
||||
self.trusted = False
|
||||
|
||||
def build_tab(self) -> None:
|
||||
self.log("building tab")
|
||||
|
||||
sptn = r"^.*? on (.*) type ([^ ]+) \(.*"
|
||||
if MACOS:
|
||||
sptn = r"^.*? on (.*) \(([^ ]+), .*"
|
||||
|
||||
ptn = re.compile(sptn)
|
||||
so, _ = chkcmd(["mount"])
|
||||
tab1: list[tuple[str, str]] = []
|
||||
for ln in so.split("\n"):
|
||||
m = ptn.match(ln)
|
||||
if not m:
|
||||
continue
|
||||
|
||||
zs1, zs2 = m.groups()
|
||||
tab1.append((str(zs1), str(zs2)))
|
||||
|
||||
tab1.sort(key=lambda x: (len(x[0]), x[0]))
|
||||
path1, fs1 = tab1[0]
|
||||
tab = VFS(self.log_func, fs1, path1, AXS(), {})
|
||||
for path, fs in tab1[1:]:
|
||||
tab.add(fs, path.lstrip("/"))
|
||||
|
||||
self.tab = tab
|
||||
|
||||
def relabel(self, path: str, nval: str) -> None:
|
||||
assert self.tab
|
||||
self.cache = {}
|
||||
if ANYWIN:
|
||||
path = self._winpath(path)
|
||||
|
||||
path = path.lstrip("/")
|
||||
ptn = re.compile(r"^[^\\/]*")
|
||||
vn, rem = self.tab._find(path)
|
||||
if not self.trusted:
|
||||
# no mtab access; have to build as we go
|
||||
if "/" in rem:
|
||||
self.tab.add("idk", os.path.join(vn.vpath, rem.split("/")[0]))
|
||||
if rem:
|
||||
self.tab.add(nval, path)
|
||||
else:
|
||||
vn.realpath = nval
|
||||
|
||||
return
|
||||
|
||||
visit = [vn]
|
||||
while visit:
|
||||
vn = visit.pop()
|
||||
vn.realpath = ptn.sub(nval, vn.realpath)
|
||||
visit.extend(list(vn.nodes.values()))
|
||||
|
||||
def get_unix(self, path: str) -> str:
|
||||
if not self.tab:
|
||||
try:
|
||||
self.build_tab()
|
||||
self.trusted = True
|
||||
except:
|
||||
# prisonparty or other restrictive environment
|
||||
self.log("failed to build tab:\n{}".format(min_ex()), 3)
|
||||
self.build_fallback()
|
||||
|
||||
assert self.tab
|
||||
ret = self.tab._find(path)[0]
|
||||
if self.trusted or path == ret.vpath:
|
||||
return ret.realpath.split("/")[0]
|
||||
else:
|
||||
return "idk"
|
||||
|
||||
def get_w32(self, path: str) -> str:
|
||||
if not self.tab:
|
||||
self.build_fallback()
|
||||
|
||||
assert self.tab
|
||||
ret = self.tab._find(path)[0]
|
||||
return ret.realpath
|
||||
401
copyparty/ftpd.py
Normal file
401
copyparty/ftpd.py
Normal file
@@ -0,0 +1,401 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
import os
|
||||
import stat
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
|
||||
from pyftpdlib.authorizers import AuthenticationFailed, DummyAuthorizer
|
||||
from pyftpdlib.filesystems import AbstractedFS, FilesystemError
|
||||
from pyftpdlib.handlers import FTPHandler
|
||||
from pyftpdlib.log import config_logging
|
||||
from pyftpdlib.servers import FTPServer
|
||||
|
||||
from .__init__ import PY2, TYPE_CHECKING, E
|
||||
from .bos import bos
|
||||
from .util import Pebkac, exclude_dotfiles, fsenc
|
||||
|
||||
try:
|
||||
from pyftpdlib.ioloop import IOLoop
|
||||
except ImportError:
|
||||
p = os.path.join(E.mod, "vend")
|
||||
print("loading asynchat from " + p)
|
||||
sys.path.append(p)
|
||||
from pyftpdlib.ioloop import IOLoop
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .svchub import SvcHub
|
||||
|
||||
try:
|
||||
import typing
|
||||
from typing import Any, Optional
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
class FtpAuth(DummyAuthorizer):
|
||||
def __init__(self, hub: "SvcHub") -> None:
|
||||
super(FtpAuth, self).__init__()
|
||||
self.hub = hub
|
||||
|
||||
def validate_authentication(
|
||||
self, username: str, password: str, handler: Any
|
||||
) -> None:
|
||||
asrv = self.hub.asrv
|
||||
if username == "anonymous":
|
||||
password = ""
|
||||
|
||||
uname = "*"
|
||||
if password:
|
||||
uname = asrv.iacct.get(password, "")
|
||||
|
||||
handler.username = uname
|
||||
|
||||
if password and not uname:
|
||||
raise AuthenticationFailed("Authentication failed.")
|
||||
|
||||
def get_home_dir(self, username: str) -> str:
|
||||
return "/"
|
||||
|
||||
def has_user(self, username: str) -> bool:
|
||||
asrv = self.hub.asrv
|
||||
return username in asrv.acct
|
||||
|
||||
def has_perm(self, username: str, perm: int, path: Optional[str] = None) -> bool:
|
||||
return True # handled at filesystem layer
|
||||
|
||||
def get_perms(self, username: str) -> str:
|
||||
return "elradfmwMT"
|
||||
|
||||
def get_msg_login(self, username: str) -> str:
|
||||
return "sup {}".format(username)
|
||||
|
||||
def get_msg_quit(self, username: str) -> str:
|
||||
return "cya"
|
||||
|
||||
|
||||
class FtpFs(AbstractedFS):
|
||||
def __init__(
|
||||
self, root: str, cmd_channel: Any
|
||||
) -> None: # pylint: disable=super-init-not-called
|
||||
self.h = self.cmd_channel = cmd_channel # type: FTPHandler
|
||||
self.hub: "SvcHub" = cmd_channel.hub
|
||||
self.args = cmd_channel.args
|
||||
|
||||
self.uname = self.hub.asrv.iacct.get(cmd_channel.password, "*")
|
||||
|
||||
self.cwd = "/" # pyftpdlib convention of leading slash
|
||||
self.root = "/var/lib/empty"
|
||||
|
||||
self.listdirinfo = self.listdir
|
||||
self.chdir(".")
|
||||
|
||||
def v2a(
|
||||
self,
|
||||
vpath: str,
|
||||
r: bool = False,
|
||||
w: bool = False,
|
||||
m: bool = False,
|
||||
d: bool = False,
|
||||
) -> str:
|
||||
try:
|
||||
vpath = vpath.replace("\\", "/").lstrip("/")
|
||||
vfs, rem = self.hub.asrv.vfs.get(vpath, self.uname, r, w, m, d)
|
||||
if not vfs.realpath:
|
||||
raise FilesystemError("no filesystem mounted at this path")
|
||||
|
||||
return os.path.join(vfs.realpath, rem)
|
||||
except Pebkac as ex:
|
||||
raise FilesystemError(str(ex))
|
||||
|
||||
def rv2a(
|
||||
self,
|
||||
vpath: str,
|
||||
r: bool = False,
|
||||
w: bool = False,
|
||||
m: bool = False,
|
||||
d: bool = False,
|
||||
) -> str:
|
||||
return self.v2a(os.path.join(self.cwd, vpath), r, w, m, d)
|
||||
|
||||
def ftp2fs(self, ftppath: str) -> str:
|
||||
# return self.v2a(ftppath)
|
||||
return ftppath # self.cwd must be vpath
|
||||
|
||||
def fs2ftp(self, fspath: str) -> str:
|
||||
# raise NotImplementedError()
|
||||
return fspath
|
||||
|
||||
def validpath(self, path: str) -> bool:
|
||||
if "/.hist/" in path:
|
||||
if "/up2k." in path or path.endswith("/dir.txt"):
|
||||
raise FilesystemError("access to this file is forbidden")
|
||||
|
||||
return True
|
||||
|
||||
def open(self, filename: str, mode: str) -> typing.IO[Any]:
|
||||
r = "r" in mode
|
||||
w = "w" in mode or "a" in mode or "+" in mode
|
||||
|
||||
ap = self.rv2a(filename, r, w)
|
||||
if w and bos.path.exists(ap):
|
||||
raise FilesystemError("cannot open existing file for writing")
|
||||
|
||||
self.validpath(ap)
|
||||
return open(fsenc(ap), mode)
|
||||
|
||||
def chdir(self, path: str) -> None:
|
||||
self.cwd = join(self.cwd, path)
|
||||
x = self.hub.asrv.vfs.can_access(self.cwd.lstrip("/"), self.h.username)
|
||||
self.can_read, self.can_write, self.can_move, self.can_delete, self.can_get = x
|
||||
|
||||
def mkdir(self, path: str) -> None:
|
||||
ap = self.rv2a(path, w=True)
|
||||
bos.mkdir(ap)
|
||||
|
||||
def listdir(self, path: str) -> list[str]:
|
||||
vpath = join(self.cwd, path).lstrip("/")
|
||||
try:
|
||||
vfs, rem = self.hub.asrv.vfs.get(vpath, self.uname, True, False)
|
||||
|
||||
fsroot, vfs_ls1, vfs_virt = vfs.ls(
|
||||
rem, self.uname, not self.args.no_scandir, [[True], [False, True]]
|
||||
)
|
||||
vfs_ls = [x[0] for x in vfs_ls1]
|
||||
vfs_ls.extend(vfs_virt.keys())
|
||||
|
||||
if not self.args.ed:
|
||||
vfs_ls = exclude_dotfiles(vfs_ls)
|
||||
|
||||
vfs_ls.sort()
|
||||
return vfs_ls
|
||||
except:
|
||||
if vpath:
|
||||
# display write-only folders as empty
|
||||
return []
|
||||
|
||||
# return list of volumes
|
||||
r = {x.split("/")[0]: 1 for x in self.hub.asrv.vfs.all_vols.keys()}
|
||||
return list(sorted(list(r.keys())))
|
||||
|
||||
def rmdir(self, path: str) -> None:
|
||||
ap = self.rv2a(path, d=True)
|
||||
bos.rmdir(ap)
|
||||
|
||||
def remove(self, path: str) -> None:
|
||||
if self.args.no_del:
|
||||
raise FilesystemError("the delete feature is disabled in server config")
|
||||
|
||||
vp = join(self.cwd, path).lstrip("/")
|
||||
try:
|
||||
self.hub.up2k.handle_rm(self.uname, self.h.remote_ip, [vp])
|
||||
except Exception as ex:
|
||||
raise FilesystemError(str(ex))
|
||||
|
||||
def rename(self, src: str, dst: str) -> None:
|
||||
if not self.can_move:
|
||||
raise FilesystemError("not allowed for user " + self.h.username)
|
||||
|
||||
if self.args.no_mv:
|
||||
t = "the rename/move feature is disabled in server config"
|
||||
raise FilesystemError(t)
|
||||
|
||||
svp = join(self.cwd, src).lstrip("/")
|
||||
dvp = join(self.cwd, dst).lstrip("/")
|
||||
try:
|
||||
self.hub.up2k.handle_mv(self.uname, svp, dvp)
|
||||
except Exception as ex:
|
||||
raise FilesystemError(str(ex))
|
||||
|
||||
def chmod(self, path: str, mode: str) -> None:
|
||||
pass
|
||||
|
||||
def stat(self, path: str) -> os.stat_result:
|
||||
try:
|
||||
ap = self.rv2a(path, r=True)
|
||||
return bos.stat(ap)
|
||||
except:
|
||||
ap = self.rv2a(path)
|
||||
st = bos.stat(ap)
|
||||
if not stat.S_ISDIR(st.st_mode):
|
||||
raise
|
||||
|
||||
return st
|
||||
|
||||
def utime(self, path: str, timeval: float) -> None:
|
||||
ap = self.rv2a(path, w=True)
|
||||
return bos.utime(ap, (timeval, timeval))
|
||||
|
||||
def lstat(self, path: str) -> os.stat_result:
|
||||
ap = self.rv2a(path)
|
||||
return bos.lstat(ap)
|
||||
|
||||
def isfile(self, path: str) -> bool:
|
||||
st = self.stat(path)
|
||||
return stat.S_ISREG(st.st_mode)
|
||||
|
||||
def islink(self, path: str) -> bool:
|
||||
ap = self.rv2a(path)
|
||||
return bos.path.islink(ap)
|
||||
|
||||
def isdir(self, path: str) -> bool:
|
||||
try:
|
||||
st = self.stat(path)
|
||||
return stat.S_ISDIR(st.st_mode)
|
||||
except:
|
||||
return True
|
||||
|
||||
def getsize(self, path: str) -> int:
|
||||
ap = self.rv2a(path)
|
||||
return bos.path.getsize(ap)
|
||||
|
||||
def getmtime(self, path: str) -> float:
|
||||
ap = self.rv2a(path)
|
||||
return bos.path.getmtime(ap)
|
||||
|
||||
def realpath(self, path: str) -> str:
|
||||
return path
|
||||
|
||||
def lexists(self, path: str) -> bool:
|
||||
ap = self.rv2a(path)
|
||||
return bos.path.lexists(ap)
|
||||
|
||||
def get_user_by_uid(self, uid: int) -> str:
|
||||
return "root"
|
||||
|
||||
def get_group_by_uid(self, gid: int) -> str:
|
||||
return "root"
|
||||
|
||||
|
||||
class FtpHandler(FTPHandler):
|
||||
abstracted_fs = FtpFs
|
||||
hub: "SvcHub" = None
|
||||
args: argparse.Namespace = None
|
||||
|
||||
def __init__(self, conn: Any, server: Any, ioloop: Any = None) -> None:
|
||||
self.hub: "SvcHub" = FtpHandler.hub
|
||||
self.args: argparse.Namespace = FtpHandler.args
|
||||
|
||||
if PY2:
|
||||
FTPHandler.__init__(self, conn, server, ioloop)
|
||||
else:
|
||||
super(FtpHandler, self).__init__(conn, server, ioloop)
|
||||
|
||||
# abspath->vpath mapping to resolve log_transfer paths
|
||||
self.vfs_map: dict[str, str] = {}
|
||||
|
||||
def ftp_STOR(self, file: str, mode: str = "w") -> Any:
|
||||
# Optional[str]
|
||||
vp = join(self.fs.cwd, file).lstrip("/")
|
||||
ap = self.fs.v2a(vp)
|
||||
self.vfs_map[ap] = vp
|
||||
# print("ftp_STOR: {} {} => {}".format(vp, mode, ap))
|
||||
ret = FTPHandler.ftp_STOR(self, file, mode)
|
||||
# print("ftp_STOR: {} {} OK".format(vp, mode))
|
||||
return ret
|
||||
|
||||
def log_transfer(
|
||||
self,
|
||||
cmd: str,
|
||||
filename: bytes,
|
||||
receive: bool,
|
||||
completed: bool,
|
||||
elapsed: float,
|
||||
bytes: int,
|
||||
) -> Any:
|
||||
# None
|
||||
ap = filename.decode("utf-8", "replace")
|
||||
vp = self.vfs_map.pop(ap, None)
|
||||
# print("xfer_end: {} => {}".format(ap, vp))
|
||||
if vp:
|
||||
vp, fn = os.path.split(vp)
|
||||
vfs, rem = self.hub.asrv.vfs.get(vp, self.username, False, True)
|
||||
vfs, rem = vfs.get_dbv(rem)
|
||||
self.hub.up2k.hash_file(
|
||||
vfs.realpath,
|
||||
vfs.flags,
|
||||
rem,
|
||||
fn,
|
||||
self.remote_ip,
|
||||
time.time(),
|
||||
)
|
||||
|
||||
return FTPHandler.log_transfer(
|
||||
self, cmd, filename, receive, completed, elapsed, bytes
|
||||
)
|
||||
|
||||
|
||||
try:
|
||||
from pyftpdlib.handlers import TLS_FTPHandler
|
||||
|
||||
class SftpHandler(FtpHandler, TLS_FTPHandler):
|
||||
pass
|
||||
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
class Ftpd(object):
|
||||
def __init__(self, hub: "SvcHub") -> None:
|
||||
self.hub = hub
|
||||
self.args = hub.args
|
||||
|
||||
hs = []
|
||||
if self.args.ftp:
|
||||
hs.append([FtpHandler, self.args.ftp])
|
||||
if self.args.ftps:
|
||||
try:
|
||||
h1 = SftpHandler
|
||||
except:
|
||||
t = "\nftps requires pyopenssl;\nplease run the following:\n\n {} -m pip install --user pyopenssl\n"
|
||||
print(t.format(sys.executable))
|
||||
sys.exit(1)
|
||||
|
||||
h1.certfile = os.path.join(E.cfg, "cert.pem")
|
||||
h1.tls_control_required = True
|
||||
h1.tls_data_required = True
|
||||
|
||||
hs.append([h1, self.args.ftps])
|
||||
|
||||
for h_lp in hs:
|
||||
h2, lp = h_lp
|
||||
h2.hub = hub
|
||||
h2.args = hub.args
|
||||
h2.authorizer = FtpAuth(hub)
|
||||
|
||||
if self.args.ftp_pr:
|
||||
p1, p2 = [int(x) for x in self.args.ftp_pr.split("-")]
|
||||
if self.args.ftp and self.args.ftps:
|
||||
# divide port range in half
|
||||
d = int((p2 - p1) / 2)
|
||||
if lp == self.args.ftp:
|
||||
p2 = p1 + d
|
||||
else:
|
||||
p1 += d + 1
|
||||
|
||||
h2.passive_ports = list(range(p1, p2 + 1))
|
||||
|
||||
if self.args.ftp_nat:
|
||||
h2.masquerade_address = self.args.ftp_nat
|
||||
|
||||
if self.args.ftp_dbg:
|
||||
config_logging(level=logging.DEBUG)
|
||||
|
||||
ioloop = IOLoop()
|
||||
for ip in self.args.i:
|
||||
for h, lp in hs:
|
||||
FTPServer((ip, int(lp)), h, ioloop)
|
||||
|
||||
thr = threading.Thread(target=ioloop.loop, name="ftp")
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
|
||||
def join(p1: str, p2: str) -> str:
|
||||
w = os.path.join(p1, p2.replace("\\", "/"))
|
||||
return os.path.normpath(w).replace("\\", "/")
|
||||
1271
copyparty/httpcli.py
1271
copyparty/httpcli.py
File diff suppressed because it is too large
Load Diff
@@ -1,24 +1,36 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import re
|
||||
import argparse # typechk
|
||||
import os
|
||||
import time
|
||||
import re
|
||||
import socket
|
||||
import threading # typechk
|
||||
import time
|
||||
|
||||
HAVE_SSL = True
|
||||
try:
|
||||
HAVE_SSL = True
|
||||
import ssl
|
||||
except:
|
||||
HAVE_SSL = False
|
||||
|
||||
from .__init__ import E
|
||||
from .util import Unrecv
|
||||
from . import util as Util
|
||||
from .__init__ import TYPE_CHECKING, E
|
||||
from .authsrv import AuthSrv # typechk
|
||||
from .httpcli import HttpCli
|
||||
from .u2idx import U2idx
|
||||
from .th_cli import ThumbCli
|
||||
from .th_srv import HAVE_PIL
|
||||
from .ico import Ico
|
||||
from .mtag import HAVE_FFMPEG
|
||||
from .th_cli import ThumbCli
|
||||
from .th_srv import HAVE_PIL, HAVE_VIPS
|
||||
from .u2idx import U2idx
|
||||
|
||||
try:
|
||||
from typing import Optional, Pattern, Union
|
||||
except:
|
||||
pass
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .httpsrv import HttpSrv
|
||||
|
||||
|
||||
class HttpConn(object):
|
||||
@@ -27,31 +39,37 @@ class HttpConn(object):
|
||||
creates an HttpCli for each request (Connection: Keep-Alive)
|
||||
"""
|
||||
|
||||
def __init__(self, sck, addr, hsrv):
|
||||
def __init__(
|
||||
self, sck: socket.socket, addr: tuple[str, int], hsrv: "HttpSrv"
|
||||
) -> None:
|
||||
self.s = sck
|
||||
self.sr: Optional[Util._Unrecv] = None
|
||||
self.addr = addr
|
||||
self.hsrv = hsrv
|
||||
|
||||
self.mutex = hsrv.mutex
|
||||
self.args = hsrv.args
|
||||
self.asrv = hsrv.asrv
|
||||
self.mutex: threading.Lock = hsrv.mutex # mypy404
|
||||
self.args: argparse.Namespace = hsrv.args # mypy404
|
||||
self.asrv: AuthSrv = hsrv.asrv # mypy404
|
||||
self.cert_path = hsrv.cert_path
|
||||
self.u2fh = hsrv.u2fh
|
||||
self.u2fh: Util.FHC = hsrv.u2fh # mypy404
|
||||
|
||||
enth = HAVE_PIL and not self.args.no_thumb
|
||||
self.thumbcli = ThumbCli(hsrv) if enth else None
|
||||
self.ico = Ico(self.args)
|
||||
enth = (HAVE_PIL or HAVE_VIPS or HAVE_FFMPEG) and not self.args.no_thumb
|
||||
self.thumbcli: Optional[ThumbCli] = ThumbCli(hsrv) if enth else None # mypy404
|
||||
self.ico: Ico = Ico(self.args) # mypy404
|
||||
|
||||
self.t0 = time.time()
|
||||
self.t0: float = time.time() # mypy404
|
||||
self.stopping = False
|
||||
self.nreq = 0
|
||||
self.nbyte = 0
|
||||
self.u2idx = None
|
||||
self.log_func = hsrv.log
|
||||
self.lf_url = re.compile(self.args.lf_url) if self.args.lf_url else None
|
||||
self.nreq: int = 0 # mypy404
|
||||
self.nbyte: int = 0 # mypy404
|
||||
self.u2idx: Optional[U2idx] = None
|
||||
self.log_func: "Util.RootLogger" = hsrv.log # mypy404
|
||||
self.log_src: str = "httpconn" # mypy404
|
||||
self.lf_url: Optional[Pattern[str]] = (
|
||||
re.compile(self.args.lf_url) if self.args.lf_url else None
|
||||
) # mypy404
|
||||
self.set_rproxy()
|
||||
|
||||
def shutdown(self):
|
||||
def shutdown(self) -> None:
|
||||
self.stopping = True
|
||||
try:
|
||||
self.s.shutdown(socket.SHUT_RDWR)
|
||||
@@ -59,7 +77,7 @@ class HttpConn(object):
|
||||
except:
|
||||
pass
|
||||
|
||||
def set_rproxy(self, ip=None):
|
||||
def set_rproxy(self, ip: Optional[str] = None) -> str:
|
||||
if ip is None:
|
||||
color = 36
|
||||
ip = self.addr[0]
|
||||
@@ -72,35 +90,37 @@ class HttpConn(object):
|
||||
self.log_src = "{} \033[{}m{}".format(ip, color, self.addr[1]).ljust(26)
|
||||
return self.log_src
|
||||
|
||||
def respath(self, res_name):
|
||||
def respath(self, res_name: str) -> str:
|
||||
return os.path.join(E.mod, "web", res_name)
|
||||
|
||||
def log(self, msg, c=0):
|
||||
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||
self.log_func(self.log_src, msg, c)
|
||||
|
||||
def get_u2idx(self):
|
||||
def get_u2idx(self) -> U2idx:
|
||||
# one u2idx per tcp connection;
|
||||
# sqlite3 fully parallelizes under python threads
|
||||
if not self.u2idx:
|
||||
self.u2idx = U2idx(self)
|
||||
|
||||
return self.u2idx
|
||||
|
||||
def _detect_https(self):
|
||||
def _detect_https(self) -> bool:
|
||||
method = None
|
||||
if self.cert_path:
|
||||
try:
|
||||
method = self.s.recv(4, socket.MSG_PEEK)
|
||||
except socket.timeout:
|
||||
return
|
||||
return False
|
||||
except AttributeError:
|
||||
# jython does not support msg_peek; forget about https
|
||||
method = self.s.recv(4)
|
||||
self.sr = Unrecv(self.s)
|
||||
self.sr = Util.Unrecv(self.s, self.log)
|
||||
self.sr.buf = method
|
||||
|
||||
# jython used to do this, they stopped since it's broken
|
||||
# but reimplementing sendall is out of scope for now
|
||||
if not getattr(self.s, "sendall", None):
|
||||
self.s.sendall = self.s.send
|
||||
self.s.sendall = self.s.send # type: ignore
|
||||
|
||||
if len(method) != 4:
|
||||
err = "need at least 4 bytes in the first packet; got {}".format(
|
||||
@@ -110,17 +130,18 @@ class HttpConn(object):
|
||||
self.log(err)
|
||||
|
||||
self.s.send(b"HTTP/1.1 400 Bad Request\r\n\r\n" + err.encode("utf-8"))
|
||||
return
|
||||
return False
|
||||
|
||||
return method not in [None, b"GET ", b"HEAD", b"POST", b"PUT ", b"OPTI"]
|
||||
|
||||
def run(self):
|
||||
def run(self) -> None:
|
||||
self.sr = None
|
||||
if self.args.https_only:
|
||||
is_https = True
|
||||
elif self.args.http_only or not HAVE_SSL:
|
||||
is_https = False
|
||||
else:
|
||||
# raise Exception("asdf")
|
||||
is_https = self._detect_https()
|
||||
|
||||
if is_https:
|
||||
@@ -149,14 +170,15 @@ class HttpConn(object):
|
||||
self.s = ctx.wrap_socket(self.s, server_side=True)
|
||||
msg = [
|
||||
"\033[1;3{:d}m{}".format(c, s)
|
||||
for c, s in zip([0, 5, 0], self.s.cipher())
|
||||
for c, s in zip([0, 5, 0], self.s.cipher()) # type: ignore
|
||||
]
|
||||
self.log(" ".join(msg) + "\033[0m")
|
||||
|
||||
if self.args.ssl_dbg and hasattr(self.s, "shared_ciphers"):
|
||||
overlap = [y[::-1] for y in self.s.shared_ciphers()]
|
||||
lines = [str(x) for x in (["TLS cipher overlap:"] + overlap)]
|
||||
self.log("\n".join(lines))
|
||||
ciphers = self.s.shared_ciphers()
|
||||
assert ciphers
|
||||
overlap = [str(y[::-1]) for y in ciphers]
|
||||
self.log("TLS cipher overlap:" + "\n".join(overlap))
|
||||
for k, v in [
|
||||
["compression", self.s.compression()],
|
||||
["ALPN proto", self.s.selected_alpn_protocol()],
|
||||
@@ -181,7 +203,7 @@ class HttpConn(object):
|
||||
return
|
||||
|
||||
if not self.sr:
|
||||
self.sr = Unrecv(self.s)
|
||||
self.sr = Util.Unrecv(self.s, self.log)
|
||||
|
||||
while not self.stopping:
|
||||
self.nreq += 1
|
||||
|
||||
@@ -1,13 +1,15 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import math
|
||||
import base64
|
||||
import math
|
||||
import os
|
||||
import socket
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
|
||||
import queue
|
||||
|
||||
try:
|
||||
import jinja2
|
||||
@@ -26,15 +28,18 @@ except ImportError:
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
from .__init__ import E, PY2, MACOS
|
||||
from .util import FHC, spack, min_ex, start_stackmon, start_log_thrs
|
||||
from .__init__ import MACOS, TYPE_CHECKING, E
|
||||
from .bos import bos
|
||||
from .httpconn import HttpConn
|
||||
from .util import FHC, min_ex, spack, start_log_thrs, start_stackmon
|
||||
|
||||
if PY2:
|
||||
import Queue as queue
|
||||
else:
|
||||
import queue
|
||||
if TYPE_CHECKING:
|
||||
from .broker_util import BrokerCli
|
||||
|
||||
try:
|
||||
from typing import Any, Optional
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
class HttpSrv(object):
|
||||
@@ -43,7 +48,7 @@ class HttpSrv(object):
|
||||
relying on MpSrv for performance (HttpSrv is just plain threads)
|
||||
"""
|
||||
|
||||
def __init__(self, broker, nid):
|
||||
def __init__(self, broker: "BrokerCli", nid: Optional[int]) -> None:
|
||||
self.broker = broker
|
||||
self.nid = nid
|
||||
self.args = broker.args
|
||||
@@ -58,23 +63,25 @@ class HttpSrv(object):
|
||||
|
||||
self.tp_nthr = 0 # actual
|
||||
self.tp_ncli = 0 # fading
|
||||
self.tp_time = None # latest worker collect
|
||||
self.tp_q = None if self.args.no_htp else queue.LifoQueue()
|
||||
self.t_periodic = None
|
||||
self.tp_time = 0.0 # latest worker collect
|
||||
self.tp_q: Optional[queue.LifoQueue[Any]] = (
|
||||
None if self.args.no_htp else queue.LifoQueue()
|
||||
)
|
||||
self.t_periodic: Optional[threading.Thread] = None
|
||||
|
||||
self.u2fh = FHC()
|
||||
self.srvs = []
|
||||
self.srvs: list[socket.socket] = []
|
||||
self.ncli = 0 # exact
|
||||
self.clients = {} # laggy
|
||||
self.clients: set[HttpConn] = set() # laggy
|
||||
self.nclimax = 0
|
||||
self.cb_ts = 0
|
||||
self.cb_v = 0
|
||||
self.cb_ts = 0.0
|
||||
self.cb_v = ""
|
||||
|
||||
env = jinja2.Environment()
|
||||
env.loader = jinja2.FileSystemLoader(os.path.join(E.mod, "web"))
|
||||
self.j2 = {
|
||||
x: env.get_template(x + ".html")
|
||||
for x in ["splash", "browser", "browser2", "msg", "md", "mde"]
|
||||
for x in ["splash", "browser", "browser2", "msg", "md", "mde", "cf"]
|
||||
}
|
||||
self.prism = os.path.exists(os.path.join(E.mod, "web", "deps", "prism.js.gz"))
|
||||
|
||||
@@ -82,7 +89,7 @@ class HttpSrv(object):
|
||||
if bos.path.exists(cert_path):
|
||||
self.cert_path = cert_path
|
||||
else:
|
||||
self.cert_path = None
|
||||
self.cert_path = ""
|
||||
|
||||
if self.tp_q:
|
||||
self.start_threads(4)
|
||||
@@ -94,7 +101,19 @@ class HttpSrv(object):
|
||||
if self.args.log_thrs:
|
||||
start_log_thrs(self.log, self.args.log_thrs, nid)
|
||||
|
||||
def start_threads(self, n):
|
||||
self.th_cfg: dict[str, Any] = {}
|
||||
t = threading.Thread(target=self.post_init, name="hsrv-init2")
|
||||
t.daemon = True
|
||||
t.start()
|
||||
|
||||
def post_init(self) -> None:
|
||||
try:
|
||||
x = self.broker.ask("thumbsrv.getcfg")
|
||||
self.th_cfg = x.get()
|
||||
except:
|
||||
pass
|
||||
|
||||
def start_threads(self, n: int) -> None:
|
||||
self.tp_nthr += n
|
||||
if self.args.log_htp:
|
||||
self.log(self.name, "workers += {} = {}".format(n, self.tp_nthr), 6)
|
||||
@@ -107,15 +126,16 @@ class HttpSrv(object):
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
def stop_threads(self, n):
|
||||
def stop_threads(self, n: int) -> None:
|
||||
self.tp_nthr -= n
|
||||
if self.args.log_htp:
|
||||
self.log(self.name, "workers -= {} = {}".format(n, self.tp_nthr), 6)
|
||||
|
||||
assert self.tp_q
|
||||
for _ in range(n):
|
||||
self.tp_q.put(None)
|
||||
|
||||
def periodic(self):
|
||||
def periodic(self) -> None:
|
||||
while True:
|
||||
time.sleep(2 if self.tp_ncli or self.ncli else 10)
|
||||
with self.mutex:
|
||||
@@ -129,7 +149,7 @@ class HttpSrv(object):
|
||||
self.t_periodic = None
|
||||
return
|
||||
|
||||
def listen(self, sck, nlisteners):
|
||||
def listen(self, sck: socket.socket, nlisteners: int) -> None:
|
||||
ip, port = sck.getsockname()
|
||||
self.srvs.append(sck)
|
||||
self.nclimax = math.ceil(self.args.nc * 1.0 / nlisteners)
|
||||
@@ -141,17 +161,17 @@ class HttpSrv(object):
|
||||
t.daemon = True
|
||||
t.start()
|
||||
|
||||
def thr_listen(self, srv_sck):
|
||||
def thr_listen(self, srv_sck: socket.socket) -> None:
|
||||
"""listens on a shared tcp server"""
|
||||
ip, port = srv_sck.getsockname()
|
||||
fno = srv_sck.fileno()
|
||||
msg = "subscribed @ {}:{} f{}".format(ip, port, fno)
|
||||
msg = "subscribed @ {}:{} f{} p{}".format(ip, port, fno, os.getpid())
|
||||
self.log(self.name, msg)
|
||||
|
||||
def fun():
|
||||
self.broker.put(False, "cb_httpsrv_up")
|
||||
def fun() -> None:
|
||||
self.broker.say("cb_httpsrv_up")
|
||||
|
||||
threading.Thread(target=fun).start()
|
||||
threading.Thread(target=fun, name="sig-hsrv-up1").start()
|
||||
|
||||
while not self.stopping:
|
||||
if self.args.log_conn:
|
||||
@@ -173,21 +193,21 @@ class HttpSrv(object):
|
||||
continue
|
||||
|
||||
if self.args.log_conn:
|
||||
m = "|{}C-acc2 \033[0;36m{} \033[3{}m{}".format(
|
||||
t = "|{}C-acc2 \033[0;36m{} \033[3{}m{}".format(
|
||||
"-" * 3, ip, port % 8, port
|
||||
)
|
||||
self.log("%s %s" % addr, m, c="1;30")
|
||||
self.log("%s %s" % addr, t, c="1;30")
|
||||
|
||||
self.accept(sck, addr)
|
||||
|
||||
def accept(self, sck, addr):
|
||||
def accept(self, sck: socket.socket, addr: tuple[str, int]) -> None:
|
||||
"""takes an incoming tcp connection and creates a thread to handle it"""
|
||||
now = time.time()
|
||||
|
||||
if now - (self.tp_time or now) > 300:
|
||||
m = "httpserver threadpool died: tpt {:.2f}, now {:.2f}, nthr {}, ncli {}"
|
||||
self.log(self.name, m.format(self.tp_time, now, self.tp_nthr, self.ncli), 1)
|
||||
self.tp_time = None
|
||||
t = "httpserver threadpool died: tpt {:.2f}, now {:.2f}, nthr {}, ncli {}"
|
||||
self.log(self.name, t.format(self.tp_time, now, self.tp_nthr, self.ncli), 1)
|
||||
self.tp_time = 0
|
||||
self.tp_q = None
|
||||
|
||||
with self.mutex:
|
||||
@@ -197,10 +217,10 @@ class HttpSrv(object):
|
||||
if self.nid:
|
||||
name += "-{}".format(self.nid)
|
||||
|
||||
t = threading.Thread(target=self.periodic, name=name)
|
||||
self.t_periodic = t
|
||||
t.daemon = True
|
||||
t.start()
|
||||
thr = threading.Thread(target=self.periodic, name=name)
|
||||
self.t_periodic = thr
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
if self.tp_q:
|
||||
self.tp_time = self.tp_time or now
|
||||
@@ -212,8 +232,8 @@ class HttpSrv(object):
|
||||
return
|
||||
|
||||
if not self.args.no_htp:
|
||||
m = "looks like the httpserver threadpool died; please make an issue on github and tell me the story of how you pulled that off, thanks and dog bless\n"
|
||||
self.log(self.name, m, 1)
|
||||
t = "looks like the httpserver threadpool died; please make an issue on github and tell me the story of how you pulled that off, thanks and dog bless\n"
|
||||
self.log(self.name, t, 1)
|
||||
|
||||
thr = threading.Thread(
|
||||
target=self.thr_client,
|
||||
@@ -223,14 +243,15 @@ class HttpSrv(object):
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
def thr_poolw(self):
|
||||
def thr_poolw(self) -> None:
|
||||
assert self.tp_q
|
||||
while True:
|
||||
task = self.tp_q.get()
|
||||
if not task:
|
||||
break
|
||||
|
||||
with self.mutex:
|
||||
self.tp_time = None
|
||||
self.tp_time = 0
|
||||
|
||||
try:
|
||||
sck, addr = task
|
||||
@@ -240,10 +261,13 @@ class HttpSrv(object):
|
||||
)
|
||||
self.thr_client(sck, addr)
|
||||
me.name = self.name + "-poolw"
|
||||
except:
|
||||
self.log(self.name, "thr_client: " + min_ex(), 3)
|
||||
except Exception as ex:
|
||||
if str(ex).startswith("client d/c "):
|
||||
self.log(self.name, "thr_client: " + str(ex), 6)
|
||||
else:
|
||||
self.log(self.name, "thr_client: " + min_ex(), 3)
|
||||
|
||||
def shutdown(self):
|
||||
def shutdown(self) -> None:
|
||||
self.stopping = True
|
||||
for srv in self.srvs:
|
||||
try:
|
||||
@@ -251,7 +275,7 @@ class HttpSrv(object):
|
||||
except:
|
||||
pass
|
||||
|
||||
clients = list(self.clients.keys())
|
||||
clients = list(self.clients)
|
||||
for cli in clients:
|
||||
try:
|
||||
cli.shutdown()
|
||||
@@ -267,13 +291,13 @@ class HttpSrv(object):
|
||||
|
||||
self.log(self.name, "ok bye")
|
||||
|
||||
def thr_client(self, sck, addr):
|
||||
def thr_client(self, sck: socket.socket, addr: tuple[str, int]) -> None:
|
||||
"""thread managing one tcp client"""
|
||||
sck.settimeout(120)
|
||||
|
||||
cli = HttpConn(sck, addr, self)
|
||||
with self.mutex:
|
||||
self.clients[cli] = 0
|
||||
self.clients.add(cli)
|
||||
|
||||
fno = sck.fileno()
|
||||
try:
|
||||
@@ -316,10 +340,10 @@ class HttpSrv(object):
|
||||
raise
|
||||
finally:
|
||||
with self.mutex:
|
||||
del self.clients[cli]
|
||||
self.clients.remove(cli)
|
||||
self.ncli -= 1
|
||||
|
||||
def cachebuster(self):
|
||||
def cachebuster(self) -> str:
|
||||
if time.time() - self.cb_ts < 1:
|
||||
return self.cb_v
|
||||
|
||||
|
||||
@@ -1,28 +1,28 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import hashlib
|
||||
import argparse # typechk
|
||||
import colorsys
|
||||
import hashlib
|
||||
|
||||
from .__init__ import PY2
|
||||
|
||||
|
||||
class Ico(object):
|
||||
def __init__(self, args):
|
||||
def __init__(self, args: argparse.Namespace) -> None:
|
||||
self.args = args
|
||||
|
||||
def get(self, ext, as_thumb):
|
||||
def get(self, ext: str, as_thumb: bool) -> tuple[str, bytes]:
|
||||
"""placeholder to make thumbnails not break"""
|
||||
|
||||
h = hashlib.md5(ext.encode("utf-8")).digest()[:2]
|
||||
zb = hashlib.sha1(ext.encode("utf-8")).digest()[2:4]
|
||||
if PY2:
|
||||
h = [ord(x) for x in h]
|
||||
zb = [ord(x) for x in zb]
|
||||
|
||||
c1 = colorsys.hsv_to_rgb(h[0] / 256.0, 1, 0.3)
|
||||
c2 = colorsys.hsv_to_rgb(h[0] / 256.0, 1, 1)
|
||||
c = list(c1) + list(c2)
|
||||
c = [int(x * 255) for x in c]
|
||||
c = "".join(["{:02x}".format(x) for x in c])
|
||||
c1 = colorsys.hsv_to_rgb(zb[0] / 256.0, 1, 0.3)
|
||||
c2 = colorsys.hsv_to_rgb(zb[0] / 256.0, 1, 1)
|
||||
ci = [int(x * 255) for x in list(c1) + list(c2)]
|
||||
c = "".join(["{:02x}".format(x) for x in ci])
|
||||
|
||||
h = 30
|
||||
if not self.args.th_no_crop and as_thumb:
|
||||
@@ -37,6 +37,6 @@ class Ico(object):
|
||||
fill="#{}" font-family="monospace" font-size="14px" style="letter-spacing:.5px">{}</text>
|
||||
</g></svg>
|
||||
"""
|
||||
svg = svg.format(h, c[:6], c[6:], ext).encode("utf-8")
|
||||
svg = svg.format(h, c[:6], c[6:], ext)
|
||||
|
||||
return ["image/svg+xml", svg]
|
||||
return "image/svg+xml", svg.encode("utf-8")
|
||||
|
||||
@@ -1,18 +1,26 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import sys
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import subprocess as sp
|
||||
import sys
|
||||
|
||||
from .__init__ import PY2, WINDOWS, unicode
|
||||
from .util import fsenc, fsdec, uncyg, REKOBO_LKEY
|
||||
from .bos import bos
|
||||
from .util import REKOBO_LKEY, fsenc, min_ex, retchk, runcmd, uncyg
|
||||
|
||||
try:
|
||||
from typing import Any, Union
|
||||
|
||||
from .util import RootLogger
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def have_ff(cmd):
|
||||
def have_ff(cmd: str) -> bool:
|
||||
if PY2:
|
||||
print("# checking {}".format(cmd))
|
||||
cmd = (cmd + " -version").encode("ascii").split(b" ")
|
||||
@@ -30,13 +38,15 @@ HAVE_FFPROBE = have_ff("ffprobe")
|
||||
|
||||
|
||||
class MParser(object):
|
||||
def __init__(self, cmdline):
|
||||
def __init__(self, cmdline: str) -> None:
|
||||
self.tag, args = cmdline.split("=", 1)
|
||||
self.tags = self.tag.split(",")
|
||||
|
||||
self.timeout = 30
|
||||
self.force = False
|
||||
self.kill = "t" # tree; all children recursively
|
||||
self.audio = "y"
|
||||
self.pri = 0 # priority; higher = later
|
||||
self.ext = []
|
||||
|
||||
while True:
|
||||
@@ -58,6 +68,10 @@ class MParser(object):
|
||||
self.audio = arg[1:] # [r]equire [n]ot [d]ontcare
|
||||
continue
|
||||
|
||||
if arg.startswith("k"):
|
||||
self.kill = arg[1:] # [t]ree [m]ain [n]one
|
||||
continue
|
||||
|
||||
if arg == "f":
|
||||
self.force = True
|
||||
continue
|
||||
@@ -70,10 +84,16 @@ class MParser(object):
|
||||
self.ext.append(arg[1:])
|
||||
continue
|
||||
|
||||
if arg.startswith("p"):
|
||||
self.pri = int(arg[1:] or "1")
|
||||
continue
|
||||
|
||||
raise Exception()
|
||||
|
||||
|
||||
def ffprobe(abspath):
|
||||
def ffprobe(
|
||||
abspath: str, timeout: int = 10
|
||||
) -> tuple[dict[str, tuple[int, Any]], dict[str, list[Any]]]:
|
||||
cmd = [
|
||||
b"ffprobe",
|
||||
b"-hide_banner",
|
||||
@@ -82,21 +102,20 @@ def ffprobe(abspath):
|
||||
b"--",
|
||||
fsenc(abspath),
|
||||
]
|
||||
p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
|
||||
r = p.communicate()
|
||||
txt = r[0].decode("utf-8", "replace")
|
||||
return parse_ffprobe(txt)
|
||||
rc, so, se = runcmd(cmd, timeout=timeout)
|
||||
retchk(rc, cmd, se)
|
||||
return parse_ffprobe(so)
|
||||
|
||||
|
||||
def parse_ffprobe(txt):
|
||||
def parse_ffprobe(txt: str) -> tuple[dict[str, tuple[int, Any]], dict[str, list[Any]]]:
|
||||
"""ffprobe -show_format -show_streams"""
|
||||
streams = []
|
||||
fmt = {}
|
||||
g = None
|
||||
g = {}
|
||||
for ln in [x.rstrip("\r") for x in txt.split("\n")]:
|
||||
try:
|
||||
k, v = ln.split("=", 1)
|
||||
g[k] = v
|
||||
sk, sv = ln.split("=", 1)
|
||||
g[sk] = sv
|
||||
continue
|
||||
except:
|
||||
pass
|
||||
@@ -110,8 +129,8 @@ def parse_ffprobe(txt):
|
||||
fmt = g
|
||||
|
||||
streams = [fmt] + streams
|
||||
ret = {} # processed
|
||||
md = {} # raw tags
|
||||
ret: dict[str, Any] = {} # processed
|
||||
md: dict[str, list[Any]] = {} # raw tags
|
||||
|
||||
is_audio = fmt.get("format_name") in ["mp3", "ogg", "flac", "wav"]
|
||||
if fmt.get("filename", "").split(".")[-1].lower() in ["m4a", "aac"]:
|
||||
@@ -159,52 +178,55 @@ def parse_ffprobe(txt):
|
||||
]
|
||||
|
||||
if typ == "format":
|
||||
kvm = [["duration", ".dur"], ["bit_rate", ".q"]]
|
||||
kvm = [["duration", ".dur"], ["bit_rate", ".q"], ["format_name", "fmt"]]
|
||||
|
||||
for sk, rk in kvm:
|
||||
v = strm.get(sk)
|
||||
if v is None:
|
||||
v1 = strm.get(sk)
|
||||
if v1 is None:
|
||||
continue
|
||||
|
||||
if rk.startswith("."):
|
||||
try:
|
||||
v = float(v)
|
||||
zf = float(v1)
|
||||
v2 = ret.get(rk)
|
||||
if v2 is None or v > v2:
|
||||
ret[rk] = v
|
||||
if v2 is None or zf > v2:
|
||||
ret[rk] = zf
|
||||
except:
|
||||
# sqlite doesnt care but the code below does
|
||||
if v not in ["N/A"]:
|
||||
ret[rk] = v
|
||||
if v1 not in ["N/A"]:
|
||||
ret[rk] = v1
|
||||
else:
|
||||
ret[rk] = v
|
||||
ret[rk] = v1
|
||||
|
||||
if ret.get("vc") == "ansi": # shellscript
|
||||
return {}, {}
|
||||
|
||||
for strm in streams:
|
||||
for k, v in strm.items():
|
||||
if not k.startswith("TAG:"):
|
||||
for sk, sv in strm.items():
|
||||
if not sk.startswith("TAG:"):
|
||||
continue
|
||||
|
||||
k = k[4:].strip()
|
||||
v = v.strip()
|
||||
if k and v and k not in md:
|
||||
md[k] = [v]
|
||||
sk = sk[4:].strip()
|
||||
sv = sv.strip()
|
||||
if sk and sv and sk not in md:
|
||||
md[sk] = [sv]
|
||||
|
||||
for k in [".q", ".vq", ".aq"]:
|
||||
if k in ret:
|
||||
ret[k] /= 1000 # bit_rate=320000
|
||||
for sk in [".q", ".vq", ".aq"]:
|
||||
if sk in ret:
|
||||
ret[sk] /= 1000 # bit_rate=320000
|
||||
|
||||
for k in [".q", ".vq", ".aq", ".resw", ".resh"]:
|
||||
if k in ret:
|
||||
ret[k] = int(ret[k])
|
||||
for sk in [".q", ".vq", ".aq", ".resw", ".resh"]:
|
||||
if sk in ret:
|
||||
ret[sk] = int(ret[sk])
|
||||
|
||||
if ".fps" in ret:
|
||||
fps = ret[".fps"]
|
||||
if "/" in fps:
|
||||
fa, fb = fps.split("/")
|
||||
fps = int(fa) * 1.0 / int(fb)
|
||||
try:
|
||||
fps = int(fa) * 1.0 / int(fb)
|
||||
except:
|
||||
fps = 9001
|
||||
|
||||
if fps < 1000 and fmt.get("format_name") not in ["image2", "png_pipe"]:
|
||||
ret[".fps"] = round(fps, 3)
|
||||
@@ -217,16 +239,19 @@ def parse_ffprobe(txt):
|
||||
if ".q" in ret:
|
||||
del ret[".q"]
|
||||
|
||||
if "fmt" in ret:
|
||||
ret["fmt"] = ret["fmt"].split(",")[0]
|
||||
|
||||
if ".resw" in ret and ".resh" in ret:
|
||||
ret["res"] = "{}x{}".format(ret[".resw"], ret[".resh"])
|
||||
|
||||
ret = {k: [0, v] for k, v in ret.items()}
|
||||
zd = {k: (0, v) for k, v in ret.items()}
|
||||
|
||||
return ret, md
|
||||
return zd, md
|
||||
|
||||
|
||||
class MTag(object):
|
||||
def __init__(self, log_func, args):
|
||||
def __init__(self, log_func: "RootLogger", args: argparse.Namespace) -> None:
|
||||
self.log_func = log_func
|
||||
self.args = args
|
||||
self.usable = True
|
||||
@@ -243,7 +268,7 @@ class MTag(object):
|
||||
if self.backend == "mutagen":
|
||||
self.get = self.get_mutagen
|
||||
try:
|
||||
import mutagen
|
||||
import mutagen # noqa: F401 # pylint: disable=unused-import,import-outside-toplevel
|
||||
except:
|
||||
self.log("could not load Mutagen, trying FFprobe instead", c=3)
|
||||
self.backend = "ffprobe"
|
||||
@@ -340,31 +365,33 @@ class MTag(object):
|
||||
}
|
||||
# self.get = self.compare
|
||||
|
||||
def log(self, msg, c=0):
|
||||
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||
self.log_func("mtag", msg, c)
|
||||
|
||||
def normalize_tags(self, ret, md):
|
||||
for k, v in dict(md).items():
|
||||
if not v:
|
||||
def normalize_tags(
|
||||
self, parser_output: dict[str, tuple[int, Any]], md: dict[str, list[Any]]
|
||||
) -> dict[str, Union[str, float]]:
|
||||
for sk, tv in dict(md).items():
|
||||
if not tv:
|
||||
continue
|
||||
|
||||
k = k.lower().split("::")[0].strip()
|
||||
mk = self.rmap.get(k)
|
||||
if not mk:
|
||||
sk = sk.lower().split("::")[0].strip()
|
||||
key_mapping = self.rmap.get(sk)
|
||||
if not key_mapping:
|
||||
continue
|
||||
|
||||
pref, mk = mk
|
||||
if mk not in ret or ret[mk][0] > pref:
|
||||
ret[mk] = [pref, v[0]]
|
||||
priority, alias = key_mapping
|
||||
if alias not in parser_output or parser_output[alias][0] > priority:
|
||||
parser_output[alias] = (priority, tv[0])
|
||||
|
||||
# take first value
|
||||
ret = {k: unicode(v[1]).strip() for k, v in ret.items()}
|
||||
# take first value (lowest priority / most preferred)
|
||||
ret = {sk: unicode(tv[1]).strip() for sk, tv in parser_output.items()}
|
||||
|
||||
# track 3/7 => track 3
|
||||
for k, v in ret.items():
|
||||
if k[0] == ".":
|
||||
v = v.split("/")[0].strip().lstrip("0")
|
||||
ret[k] = v or 0
|
||||
for sk, tv in ret.items():
|
||||
if sk[0] == ".":
|
||||
sv = str(tv).split("/")[0].strip().lstrip("0")
|
||||
ret[sk] = sv or 0
|
||||
|
||||
# normalize key notation to rkeobo
|
||||
okey = ret.get("key")
|
||||
@@ -374,7 +401,7 @@ class MTag(object):
|
||||
|
||||
return ret
|
||||
|
||||
def compare(self, abspath):
|
||||
def compare(self, abspath: str) -> dict[str, Union[str, float]]:
|
||||
if abspath.endswith(".au"):
|
||||
return {}
|
||||
|
||||
@@ -412,7 +439,9 @@ class MTag(object):
|
||||
|
||||
return r1
|
||||
|
||||
def get_mutagen(self, abspath):
|
||||
def get_mutagen(self, abspath: str) -> dict[str, Union[str, float]]:
|
||||
ret: dict[str, tuple[int, Any]] = {}
|
||||
|
||||
if not bos.path.isfile(abspath):
|
||||
return {}
|
||||
|
||||
@@ -420,12 +449,16 @@ class MTag(object):
|
||||
|
||||
try:
|
||||
md = mutagen.File(fsenc(abspath), easy=True)
|
||||
x = md.info.length
|
||||
except Exception as ex:
|
||||
if not md.info.length and not md.info.codec:
|
||||
raise Exception()
|
||||
except:
|
||||
return self.get_ffprobe(abspath) if self.can_ffprobe else {}
|
||||
|
||||
sz = bos.path.getsize(abspath)
|
||||
ret = {".q": [0, int((sz / md.info.length) / 128)]}
|
||||
try:
|
||||
ret[".q"] = (0, int((sz / md.info.length) / 128))
|
||||
except:
|
||||
pass
|
||||
|
||||
for attr, k, norm in [
|
||||
["codec", "ac", unicode],
|
||||
@@ -456,54 +489,65 @@ class MTag(object):
|
||||
if k == "ac" and v.startswith("mp4a.40."):
|
||||
v = "aac"
|
||||
|
||||
ret[k] = [0, norm(v)]
|
||||
ret[k] = (0, norm(v))
|
||||
|
||||
return self.normalize_tags(ret, md)
|
||||
|
||||
def get_ffprobe(self, abspath):
|
||||
def get_ffprobe(self, abspath: str) -> dict[str, Union[str, float]]:
|
||||
if not bos.path.isfile(abspath):
|
||||
return {}
|
||||
|
||||
ret, md = ffprobe(abspath)
|
||||
return self.normalize_tags(ret, md)
|
||||
|
||||
def get_bin(self, parsers, abspath):
|
||||
def get_bin(
|
||||
self, parsers: dict[str, MParser], abspath: str, oth_tags: dict[str, Any]
|
||||
) -> dict[str, Any]:
|
||||
if not bos.path.isfile(abspath):
|
||||
return {}
|
||||
|
||||
pypath = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
|
||||
pypath = [str(pypath)] + [str(x) for x in sys.path if x]
|
||||
pypath = str(os.pathsep.join(pypath))
|
||||
zsl = [str(pypath)] + [str(x) for x in sys.path if x]
|
||||
pypath = str(os.pathsep.join(zsl))
|
||||
env = os.environ.copy()
|
||||
env["PYTHONPATH"] = pypath
|
||||
|
||||
ret = {}
|
||||
for tagname, mp in parsers.items():
|
||||
ret: dict[str, Any] = {}
|
||||
for tagname, parser in sorted(parsers.items(), key=lambda x: (x[1].pri, x[0])):
|
||||
try:
|
||||
cmd = [mp.bin, abspath]
|
||||
if mp.bin.endswith(".py"):
|
||||
cmd = [parser.bin, abspath]
|
||||
if parser.bin.endswith(".py"):
|
||||
cmd = [sys.executable] + cmd
|
||||
|
||||
args = {"env": env, "timeout": mp.timeout}
|
||||
args = {"env": env, "timeout": parser.timeout, "kill": parser.kill}
|
||||
|
||||
if parser.pri:
|
||||
zd = oth_tags.copy()
|
||||
zd.update(ret)
|
||||
args["sin"] = json.dumps(zd).encode("utf-8", "replace")
|
||||
|
||||
if WINDOWS:
|
||||
args["creationflags"] = 0x4000
|
||||
else:
|
||||
cmd = ["nice"] + cmd
|
||||
|
||||
cmd = [fsenc(x) for x in cmd]
|
||||
v = sp.check_output(cmd, **args).strip()
|
||||
bcmd = [fsenc(x) for x in cmd]
|
||||
rc, v, err = runcmd(bcmd, **args) # type: ignore
|
||||
retchk(rc, bcmd, err, self.log, 5, self.args.mtag_v)
|
||||
v = v.strip()
|
||||
if not v:
|
||||
continue
|
||||
|
||||
if "," not in tagname:
|
||||
ret[tagname] = v.decode("utf-8")
|
||||
ret[tagname] = v
|
||||
else:
|
||||
v = json.loads(v)
|
||||
zj = json.loads(v)
|
||||
for tag in tagname.split(","):
|
||||
if tag and tag in v:
|
||||
ret[tag] = v[tag]
|
||||
if tag and tag in zj:
|
||||
ret[tag] = zj[tag]
|
||||
except:
|
||||
pass
|
||||
if self.args.mtag_v:
|
||||
t = "mtag error: tagname {}, parser {}, file {} => {}"
|
||||
self.log(t.format(tagname, parser.bin, abspath, min_ex()))
|
||||
|
||||
return ret
|
||||
|
||||
@@ -4,20 +4,29 @@ from __future__ import print_function, unicode_literals
|
||||
import tarfile
|
||||
import threading
|
||||
|
||||
from .sutil import errdesc
|
||||
from .util import Queue, fsenc
|
||||
from queue import Queue
|
||||
|
||||
from .bos import bos
|
||||
from .sutil import StreamArc, errdesc
|
||||
from .util import fsenc, min_ex
|
||||
|
||||
try:
|
||||
from typing import Any, Generator, Optional
|
||||
|
||||
from .util import NamedLogger
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
class QFile(object):
|
||||
class QFile(object): # inherit io.StringIO for painful typing
|
||||
"""file-like object which buffers writes into a queue"""
|
||||
|
||||
def __init__(self):
|
||||
self.q = Queue(64)
|
||||
self.bq = []
|
||||
def __init__(self) -> None:
|
||||
self.q: Queue[Optional[bytes]] = Queue(64)
|
||||
self.bq: list[bytes] = []
|
||||
self.nq = 0
|
||||
|
||||
def write(self, buf):
|
||||
def write(self, buf: Optional[bytes]) -> None:
|
||||
if buf is None or self.nq >= 240 * 1024:
|
||||
self.q.put(b"".join(self.bq))
|
||||
self.bq = []
|
||||
@@ -30,40 +39,47 @@ class QFile(object):
|
||||
self.nq += len(buf)
|
||||
|
||||
|
||||
class StreamTar(object):
|
||||
class StreamTar(StreamArc):
|
||||
"""construct in-memory tar file from the given path"""
|
||||
|
||||
def __init__(self, log, fgen, **kwargs):
|
||||
def __init__(
|
||||
self,
|
||||
log: "NamedLogger",
|
||||
fgen: Generator[dict[str, Any], None, None],
|
||||
**kwargs: Any
|
||||
):
|
||||
super(StreamTar, self).__init__(log, fgen)
|
||||
|
||||
self.ci = 0
|
||||
self.co = 0
|
||||
self.qfile = QFile()
|
||||
self.log = log
|
||||
self.fgen = fgen
|
||||
self.errf = None
|
||||
self.errf: dict[str, Any] = {}
|
||||
|
||||
# python 3.8 changed to PAX_FORMAT as default,
|
||||
# waste of space and don't care about the new features
|
||||
fmt = tarfile.GNU_FORMAT
|
||||
self.tar = tarfile.open(fileobj=self.qfile, mode="w|", format=fmt)
|
||||
self.tar = tarfile.open(fileobj=self.qfile, mode="w|", format=fmt) # type: ignore
|
||||
|
||||
w = threading.Thread(target=self._gen, name="star-gen")
|
||||
w.daemon = True
|
||||
w.start()
|
||||
|
||||
def gen(self):
|
||||
while True:
|
||||
buf = self.qfile.q.get()
|
||||
if not buf:
|
||||
break
|
||||
def gen(self) -> Generator[Optional[bytes], None, None]:
|
||||
try:
|
||||
while True:
|
||||
buf = self.qfile.q.get()
|
||||
if not buf:
|
||||
break
|
||||
|
||||
self.co += len(buf)
|
||||
yield buf
|
||||
self.co += len(buf)
|
||||
yield buf
|
||||
|
||||
yield None
|
||||
if self.errf:
|
||||
bos.unlink(self.errf["ap"])
|
||||
yield None
|
||||
finally:
|
||||
if self.errf:
|
||||
bos.unlink(self.errf["ap"])
|
||||
|
||||
def ser(self, f):
|
||||
def ser(self, f: dict[str, Any]) -> None:
|
||||
name = f["vp"]
|
||||
src = f["ap"]
|
||||
fsi = f["st"]
|
||||
@@ -76,20 +92,21 @@ class StreamTar(object):
|
||||
inf.gid = 0
|
||||
|
||||
self.ci += inf.size
|
||||
with open(fsenc(src), "rb", 512 * 1024) as f:
|
||||
self.tar.addfile(inf, f)
|
||||
with open(fsenc(src), "rb", 512 * 1024) as fo:
|
||||
self.tar.addfile(inf, fo)
|
||||
|
||||
def _gen(self):
|
||||
def _gen(self) -> None:
|
||||
errors = []
|
||||
for f in self.fgen:
|
||||
if "err" in f:
|
||||
errors.append([f["vp"], f["err"]])
|
||||
errors.append((f["vp"], f["err"]))
|
||||
continue
|
||||
|
||||
try:
|
||||
self.ser(f)
|
||||
except Exception as ex:
|
||||
errors.append([f["vp"], repr(ex)])
|
||||
except:
|
||||
ex = min_ex(5, True).replace("\n", "\n-- ")
|
||||
errors.append((f["vp"], ex))
|
||||
|
||||
if errors:
|
||||
self.errf, txt = errdesc(errors)
|
||||
|
||||
@@ -12,23 +12,28 @@ Original source: misc/python/surrogateescape.py in https://bitbucket.org/haypo/m
|
||||
|
||||
# This code is released under the Python license and the BSD 2-clause license
|
||||
|
||||
import platform
|
||||
import codecs
|
||||
import platform
|
||||
import sys
|
||||
|
||||
PY3 = sys.version_info[0] > 2
|
||||
WINDOWS = platform.system() == "Windows"
|
||||
FS_ERRORS = "surrogateescape"
|
||||
|
||||
try:
|
||||
from typing import Any
|
||||
except:
|
||||
pass
|
||||
|
||||
def u(text):
|
||||
|
||||
def u(text: Any) -> str:
|
||||
if PY3:
|
||||
return text
|
||||
else:
|
||||
return text.decode("unicode_escape")
|
||||
|
||||
|
||||
def b(data):
|
||||
def b(data: Any) -> bytes:
|
||||
if PY3:
|
||||
return data.encode("latin1")
|
||||
else:
|
||||
@@ -43,7 +48,7 @@ else:
|
||||
bytes_chr = chr
|
||||
|
||||
|
||||
def surrogateescape_handler(exc):
|
||||
def surrogateescape_handler(exc: Any) -> tuple[str, int]:
|
||||
"""
|
||||
Pure Python implementation of the PEP 383: the "surrogateescape" error
|
||||
handler of Python 3. Undecodable bytes will be replaced by a Unicode
|
||||
@@ -74,7 +79,7 @@ class NotASurrogateError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def replace_surrogate_encode(mystring):
|
||||
def replace_surrogate_encode(mystring: str) -> str:
|
||||
"""
|
||||
Returns a (unicode) string, not the more logical bytes, because the codecs
|
||||
register_error functionality expects this.
|
||||
@@ -100,7 +105,7 @@ def replace_surrogate_encode(mystring):
|
||||
return str().join(decoded)
|
||||
|
||||
|
||||
def replace_surrogate_decode(mybytes):
|
||||
def replace_surrogate_decode(mybytes: bytes) -> str:
|
||||
"""
|
||||
Returns a (unicode) string
|
||||
"""
|
||||
@@ -121,7 +126,7 @@ def replace_surrogate_decode(mybytes):
|
||||
return str().join(decoded)
|
||||
|
||||
|
||||
def encodefilename(fn):
|
||||
def encodefilename(fn: str) -> bytes:
|
||||
if FS_ENCODING == "ascii":
|
||||
# ASCII encoder of Python 2 expects that the error handler returns a
|
||||
# Unicode string encodable to ASCII, whereas our surrogateescape error
|
||||
@@ -161,7 +166,7 @@ def encodefilename(fn):
|
||||
return fn.encode(FS_ENCODING, FS_ERRORS)
|
||||
|
||||
|
||||
def decodefilename(fn):
|
||||
def decodefilename(fn: bytes) -> str:
|
||||
return fn.decode(FS_ENCODING, FS_ERRORS)
|
||||
|
||||
|
||||
@@ -181,7 +186,7 @@ if WINDOWS and not PY3:
|
||||
FS_ENCODING = codecs.lookup(FS_ENCODING).name
|
||||
|
||||
|
||||
def register_surrogateescape():
|
||||
def register_surrogateescape() -> None:
|
||||
"""
|
||||
Registers the surrogateescape error handler on Python 2 (only)
|
||||
"""
|
||||
|
||||
@@ -1,14 +1,34 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import time
|
||||
import tempfile
|
||||
from datetime import datetime
|
||||
|
||||
from .bos import bos
|
||||
|
||||
try:
|
||||
from typing import Any, Generator, Optional
|
||||
|
||||
def errdesc(errors):
|
||||
from .util import NamedLogger
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
class StreamArc(object):
|
||||
def __init__(
|
||||
self,
|
||||
log: "NamedLogger",
|
||||
fgen: Generator[dict[str, Any], None, None],
|
||||
**kwargs: Any
|
||||
):
|
||||
self.log = log
|
||||
self.fgen = fgen
|
||||
|
||||
def gen(self) -> Generator[Optional[bytes], None, None]:
|
||||
pass
|
||||
|
||||
|
||||
def errdesc(errors: list[tuple[str, str]]) -> tuple[dict[str, Any], list[str]]:
|
||||
report = ["copyparty failed to add the following files to the archive:", ""]
|
||||
|
||||
for fn, err in errors:
|
||||
|
||||
@@ -1,51 +1,98 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import argparse
|
||||
import base64
|
||||
import calendar
|
||||
import gzip
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import re
|
||||
import shlex
|
||||
import string
|
||||
import signal
|
||||
import socket
|
||||
import string
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
from datetime import datetime, timedelta
|
||||
import calendar
|
||||
|
||||
from .__init__ import E, PY2, WINDOWS, ANYWIN, MACOS, VT100, unicode
|
||||
from .util import mp, start_log_thrs, start_stackmon, min_ex, ansi_re
|
||||
try:
|
||||
from types import FrameType
|
||||
|
||||
import typing
|
||||
from typing import Any, Optional, Union
|
||||
except:
|
||||
pass
|
||||
|
||||
from .__init__ import ANYWIN, MACOS, PY2, VT100, WINDOWS, E, unicode
|
||||
from .authsrv import AuthSrv
|
||||
from .tcpsrv import TcpSrv
|
||||
from .up2k import Up2k
|
||||
from .th_srv import ThumbSrv, HAVE_PIL, HAVE_WEBP
|
||||
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE
|
||||
from .tcpsrv import TcpSrv
|
||||
from .th_srv import HAVE_PIL, HAVE_VIPS, HAVE_WEBP, ThumbSrv
|
||||
from .up2k import Up2k
|
||||
from .util import (
|
||||
VERSIONS,
|
||||
alltrace,
|
||||
ansi_re,
|
||||
min_ex,
|
||||
mp,
|
||||
start_log_thrs,
|
||||
start_stackmon,
|
||||
)
|
||||
|
||||
|
||||
class SvcHub(object):
|
||||
"""
|
||||
Hosts all services which cannot be parallelized due to reliance on monolithic resources.
|
||||
Creates a Broker which does most of the heavy stuff; hosted services can use this to perform work:
|
||||
hub.broker.put(want_reply, destination, args_list).
|
||||
hub.broker.<say|ask>(destination, args_list).
|
||||
|
||||
Either BrokerThr (plain threads) or BrokerMP (multiprocessing) is used depending on configuration.
|
||||
Nothing is returned synchronously; if you want any value returned from the call,
|
||||
put() can return a queue (if want_reply=True) which has a blocking get() with the response.
|
||||
"""
|
||||
|
||||
def __init__(self, args, argv, printed):
|
||||
def __init__(self, args: argparse.Namespace, argv: list[str], printed: str) -> None:
|
||||
self.args = args
|
||||
self.argv = argv
|
||||
self.logf = None
|
||||
self.logf: Optional[typing.TextIO] = None
|
||||
self.logf_base_fn = ""
|
||||
self.stop_req = False
|
||||
self.reload_req = False
|
||||
self.stopping = False
|
||||
self.stopped = False
|
||||
self.reload_req = False
|
||||
self.reloading = False
|
||||
self.stop_cond = threading.Condition()
|
||||
self.nsigs = 3
|
||||
self.retcode = 0
|
||||
self.httpsrv_up = 0
|
||||
|
||||
self.log_mutex = threading.Lock()
|
||||
self.next_day = 0
|
||||
self.tstack = 0.0
|
||||
|
||||
if args.sss or args.s >= 3:
|
||||
args.ss = True
|
||||
args.lo = args.lo or "cpp-%Y-%m%d-%H%M%S.txt.xz"
|
||||
args.ls = args.ls or "**,*,ln,p,r"
|
||||
|
||||
if args.ss or args.s >= 2:
|
||||
args.s = True
|
||||
args.no_logues = True
|
||||
args.no_readme = True
|
||||
args.unpost = 0
|
||||
args.no_del = True
|
||||
args.no_mv = True
|
||||
args.hardlink = True
|
||||
args.vague_403 = True
|
||||
args.nih = True
|
||||
|
||||
if args.s:
|
||||
args.dotpart = True
|
||||
args.no_thumb = True
|
||||
args.no_mtag_ff = True
|
||||
args.no_robots = True
|
||||
args.force_js = True
|
||||
|
||||
self.log = self._log_disabled if args.q else self._log_enabled
|
||||
if args.lo:
|
||||
@@ -57,16 +104,28 @@ class SvcHub(object):
|
||||
if args.log_thrs:
|
||||
start_log_thrs(self.log, args.log_thrs, 0)
|
||||
|
||||
if not ANYWIN and not args.use_fpool:
|
||||
if not args.use_fpool and args.j != 1:
|
||||
args.no_fpool = True
|
||||
t = "multithreading enabled with -j {}, so disabling fpool -- this can reduce upload performance on some filesystems"
|
||||
self.log("root", t.format(args.j))
|
||||
|
||||
if not args.no_fpool and args.j != 1:
|
||||
m = "WARNING: --use-fpool combined with multithreading is untested and can probably cause undefined behavior"
|
||||
t = "WARNING: --use-fpool combined with multithreading is untested and can probably cause undefined behavior"
|
||||
if ANYWIN:
|
||||
m = 'windows cannot do multithreading without --no-fpool, so enabling that -- note that upload performance will suffer if you have microsoft defender "real-time protection" enabled, so you probably want to use -j 1 instead'
|
||||
t = 'windows cannot do multithreading without --no-fpool, so enabling that -- note that upload performance will suffer if you have microsoft defender "real-time protection" enabled, so you probably want to use -j 1 instead'
|
||||
args.no_fpool = True
|
||||
|
||||
self.log("root", m, c=3)
|
||||
self.log("root", t, c=3)
|
||||
|
||||
bri = "zy"[args.theme % 2 :][:1]
|
||||
ch = "abcdefghijklmnopqrstuvwx"[int(args.theme / 2)]
|
||||
args.theme = "{0}{1} {0} {1}".format(ch, bri)
|
||||
|
||||
if not args.hardlink and args.never_symlink:
|
||||
args.no_dedup = True
|
||||
|
||||
if args.log_fk:
|
||||
args.log_fk = re.compile(args.log_fk)
|
||||
|
||||
# initiate all services to manage
|
||||
self.asrv = AuthSrv(self.args, self.log)
|
||||
@@ -76,20 +135,30 @@ class SvcHub(object):
|
||||
self.tcpsrv = TcpSrv(self)
|
||||
self.up2k = Up2k(self)
|
||||
|
||||
decs = {k: 1 for k in self.args.th_dec.split(",")}
|
||||
if not HAVE_VIPS:
|
||||
decs.pop("vips", None)
|
||||
if not HAVE_PIL:
|
||||
decs.pop("pil", None)
|
||||
if not HAVE_FFMPEG or not HAVE_FFPROBE:
|
||||
decs.pop("ff", None)
|
||||
|
||||
self.args.th_dec = list(decs.keys())
|
||||
self.thumbsrv = None
|
||||
if not args.no_thumb:
|
||||
if HAVE_PIL:
|
||||
if not HAVE_WEBP:
|
||||
args.th_no_webp = True
|
||||
msg = "setting --th-no-webp because either libwebp is not available or your Pillow is too old"
|
||||
self.log("thumb", msg, c=3)
|
||||
t = ", ".join(self.args.th_dec) or "(None available)"
|
||||
self.log("thumb", "decoder preference: {}".format(t))
|
||||
|
||||
if "pil" in self.args.th_dec and not HAVE_WEBP:
|
||||
msg = "disabling webp thumbnails because either libwebp is not available or your Pillow is too old"
|
||||
self.log("thumb", msg, c=3)
|
||||
|
||||
if self.args.th_dec:
|
||||
self.thumbsrv = ThumbSrv(self)
|
||||
else:
|
||||
msg = "need Pillow to create thumbnails; for example:\n{}{} -m pip install --user Pillow\n"
|
||||
self.log(
|
||||
"thumb", msg.format(" " * 37, os.path.basename(sys.executable)), c=3
|
||||
)
|
||||
msg = "need either Pillow, pyvips, or FFmpeg to create thumbnails; for example:\n{0}{1} -m pip install --user Pillow\n{0}{1} -m pip install --user pyvips\n{0}apt install ffmpeg"
|
||||
msg = msg.format(" " * 37, os.path.basename(sys.executable))
|
||||
self.log("thumb", msg, c=3)
|
||||
|
||||
if not args.no_acode and args.no_thumb:
|
||||
msg = "setting --no-acode because --no-thumb (sorry)"
|
||||
@@ -103,35 +172,46 @@ class SvcHub(object):
|
||||
|
||||
args.th_poke = min(args.th_poke, args.th_maxage, args.ac_maxage)
|
||||
|
||||
if args.ftp or args.ftps:
|
||||
from .ftpd import Ftpd
|
||||
|
||||
self.ftpd = Ftpd(self)
|
||||
|
||||
# decide which worker impl to use
|
||||
if self.check_mp_enable():
|
||||
from .broker_mp import BrokerMp as Broker
|
||||
else:
|
||||
from .broker_thr import BrokerThr as Broker
|
||||
from .broker_thr import BrokerThr as Broker # type: ignore
|
||||
|
||||
self.broker = Broker(self)
|
||||
|
||||
def thr_httpsrv_up(self):
|
||||
time.sleep(5)
|
||||
def thr_httpsrv_up(self) -> None:
|
||||
time.sleep(1 if self.args.ign_ebind_all else 5)
|
||||
expected = self.broker.num_workers * self.tcpsrv.nsrv
|
||||
failed = expected - self.httpsrv_up
|
||||
if not failed:
|
||||
return
|
||||
|
||||
if self.args.ign_ebind_all:
|
||||
if not self.tcpsrv.srv:
|
||||
for _ in range(self.broker.num_workers):
|
||||
self.broker.say("cb_httpsrv_up")
|
||||
return
|
||||
|
||||
if self.args.ign_ebind and self.tcpsrv.srv:
|
||||
return
|
||||
|
||||
m = "{}/{} workers failed to start"
|
||||
m = m.format(failed, expected)
|
||||
self.log("root", m, 1)
|
||||
t = "{}/{} workers failed to start"
|
||||
t = t.format(failed, expected)
|
||||
self.log("root", t, 1)
|
||||
|
||||
self.retcode = 1
|
||||
self.sigterm()
|
||||
|
||||
def sigterm(self) -> None:
|
||||
os.kill(os.getpid(), signal.SIGTERM)
|
||||
|
||||
def cb_httpsrv_up(self):
|
||||
def cb_httpsrv_up(self) -> None:
|
||||
self.httpsrv_up += 1
|
||||
if self.httpsrv_up != self.broker.num_workers:
|
||||
return
|
||||
@@ -144,9 +224,9 @@ class SvcHub(object):
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
def _logname(self):
|
||||
def _logname(self) -> str:
|
||||
dt = datetime.utcnow()
|
||||
fn = self.args.lo
|
||||
fn = str(self.args.lo)
|
||||
for fs in "YmdHMS":
|
||||
fs = "%" + fs
|
||||
if fs in fn:
|
||||
@@ -154,7 +234,7 @@ class SvcHub(object):
|
||||
|
||||
return fn
|
||||
|
||||
def _setup_logfile(self, printed):
|
||||
def _setup_logfile(self, printed: str) -> None:
|
||||
base_fn = fn = sel_fn = self._logname()
|
||||
if fn != self.args.lo:
|
||||
ctr = 0
|
||||
@@ -176,8 +256,6 @@ class SvcHub(object):
|
||||
|
||||
lh = codecs.open(fn, "w", encoding="utf-8", errors="replace")
|
||||
|
||||
lh.base_fn = base_fn
|
||||
|
||||
argv = [sys.executable] + self.argv
|
||||
if hasattr(shlex, "quote"):
|
||||
argv = [shlex.quote(x) for x in argv]
|
||||
@@ -188,12 +266,13 @@ class SvcHub(object):
|
||||
printed += msg
|
||||
lh.write("t0: {:.3f}\nargv: {}\n\n{}".format(E.t0, " ".join(argv), printed))
|
||||
self.logf = lh
|
||||
self.logf_base_fn = base_fn
|
||||
print(msg, end="")
|
||||
|
||||
def run(self):
|
||||
def run(self) -> None:
|
||||
self.tcpsrv.run()
|
||||
|
||||
thr = threading.Thread(target=self.thr_httpsrv_up)
|
||||
thr = threading.Thread(target=self.thr_httpsrv_up, name="sig-hsrv-up2")
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
@@ -221,21 +300,23 @@ class SvcHub(object):
|
||||
pass
|
||||
|
||||
self.shutdown()
|
||||
thr.join()
|
||||
# cant join; eats signals on win10
|
||||
while not self.stopped:
|
||||
time.sleep(0.1)
|
||||
else:
|
||||
self.stop_thr()
|
||||
|
||||
def reload(self):
|
||||
def reload(self) -> str:
|
||||
if self.reloading:
|
||||
return "cannot reload; already in progress"
|
||||
|
||||
self.reloading = True
|
||||
t = threading.Thread(target=self._reload)
|
||||
t = threading.Thread(target=self._reload, name="reloading")
|
||||
t.daemon = True
|
||||
t.start()
|
||||
return "reload initiated"
|
||||
|
||||
def _reload(self):
|
||||
def _reload(self) -> None:
|
||||
self.log("root", "reload scheduled")
|
||||
with self.up2k.mutex:
|
||||
self.asrv.reload()
|
||||
@@ -244,7 +325,7 @@ class SvcHub(object):
|
||||
|
||||
self.reloading = False
|
||||
|
||||
def stop_thr(self):
|
||||
def stop_thr(self) -> None:
|
||||
while not self.stop_req:
|
||||
with self.stop_cond:
|
||||
self.stop_cond.wait(9001)
|
||||
@@ -255,11 +336,24 @@ class SvcHub(object):
|
||||
|
||||
self.shutdown()
|
||||
|
||||
def signal_handler(self, sig, frame):
|
||||
def signal_handler(self, sig: int, frame: Optional[FrameType]) -> None:
|
||||
if self.stopping:
|
||||
return
|
||||
if self.nsigs <= 0:
|
||||
try:
|
||||
threading.Thread(target=self.pr, args=("OMBO BREAKER",)).start()
|
||||
time.sleep(0.1)
|
||||
except:
|
||||
pass
|
||||
|
||||
if sig == signal.SIGUSR1:
|
||||
if ANYWIN:
|
||||
os.system("taskkill /f /pid {}".format(os.getpid()))
|
||||
else:
|
||||
os.kill(os.getpid(), signal.SIGKILL)
|
||||
else:
|
||||
self.nsigs -= 1
|
||||
return
|
||||
|
||||
if not ANYWIN and sig == signal.SIGUSR1:
|
||||
self.reload_req = True
|
||||
else:
|
||||
self.stop_req = True
|
||||
@@ -267,7 +361,7 @@ class SvcHub(object):
|
||||
with self.stop_cond:
|
||||
self.stop_cond.notify_all()
|
||||
|
||||
def shutdown(self):
|
||||
def shutdown(self) -> None:
|
||||
if self.stopping:
|
||||
return
|
||||
|
||||
@@ -280,9 +374,7 @@ class SvcHub(object):
|
||||
|
||||
ret = 1
|
||||
try:
|
||||
with self.log_mutex:
|
||||
print("OPYTHAT")
|
||||
|
||||
self.pr("OPYTHAT")
|
||||
self.tcpsrv.shutdown()
|
||||
self.broker.shutdown()
|
||||
self.up2k.shutdown()
|
||||
@@ -295,18 +387,26 @@ class SvcHub(object):
|
||||
break
|
||||
|
||||
if n == 3:
|
||||
print("waiting for thumbsrv (10sec)...")
|
||||
self.pr("waiting for thumbsrv (10sec)...")
|
||||
|
||||
print("nailed it", end="")
|
||||
self.pr("nailed it", end="")
|
||||
ret = self.retcode
|
||||
except:
|
||||
self.pr("\033[31m[ error during shutdown ]\n{}\033[0m".format(min_ex()))
|
||||
raise
|
||||
finally:
|
||||
print("\033[0m")
|
||||
if self.args.wintitle:
|
||||
print("\033]0;\033\\", file=sys.stderr, end="")
|
||||
sys.stderr.flush()
|
||||
|
||||
self.pr("\033[0m")
|
||||
if self.logf:
|
||||
self.logf.close()
|
||||
|
||||
self.stopped = True
|
||||
sys.exit(ret)
|
||||
|
||||
def _log_disabled(self, src, msg, c=0):
|
||||
def _log_disabled(self, src: str, msg: str, c: Union[int, str] = 0) -> None:
|
||||
if not self.logf:
|
||||
return
|
||||
|
||||
@@ -318,8 +418,8 @@ class SvcHub(object):
|
||||
if now >= self.next_day:
|
||||
self._set_next_day()
|
||||
|
||||
def _set_next_day(self):
|
||||
if self.next_day and self.logf and self.logf.base_fn != self._logname():
|
||||
def _set_next_day(self) -> None:
|
||||
if self.next_day and self.logf and self.logf_base_fn != self._logname():
|
||||
self.logf.close()
|
||||
self._setup_logfile("")
|
||||
|
||||
@@ -333,7 +433,7 @@ class SvcHub(object):
|
||||
dt = dt.replace(hour=0, minute=0, second=0)
|
||||
self.next_day = calendar.timegm(dt.utctimetuple())
|
||||
|
||||
def _log_enabled(self, src, msg, c=0):
|
||||
def _log_enabled(self, src: str, msg: str, c: Union[int, str] = 0) -> None:
|
||||
"""handles logging from all components"""
|
||||
with self.log_mutex:
|
||||
now = time.time()
|
||||
@@ -351,7 +451,7 @@ class SvcHub(object):
|
||||
src = ansi_re.sub("", src)
|
||||
elif c:
|
||||
if isinstance(c, int):
|
||||
msg = "\033[3{}m{}".format(c, msg)
|
||||
msg = "\033[3{}m{}\033[0m".format(c, msg)
|
||||
elif "\033" not in c:
|
||||
msg = "\033[{}m{}\033[0m".format(c, msg)
|
||||
else:
|
||||
@@ -370,7 +470,11 @@ class SvcHub(object):
|
||||
if self.logf:
|
||||
self.logf.write(msg)
|
||||
|
||||
def check_mp_support(self):
|
||||
def pr(self, *a: Any, **ka: Any) -> None:
|
||||
with self.log_mutex:
|
||||
print(*a, **ka)
|
||||
|
||||
def check_mp_support(self) -> str:
|
||||
vmin = sys.version_info[1]
|
||||
if WINDOWS:
|
||||
msg = "need python 3.3 or newer for multiprocessing;"
|
||||
@@ -384,18 +488,17 @@ class SvcHub(object):
|
||||
return msg
|
||||
|
||||
try:
|
||||
x = mp.Queue(1)
|
||||
x.put(["foo", "bar"])
|
||||
x: mp.Queue[tuple[str, str]] = mp.Queue(1)
|
||||
x.put(("foo", "bar"))
|
||||
if x.get()[0] != "foo":
|
||||
raise Exception()
|
||||
except:
|
||||
return "multiprocessing is not supported on your platform;"
|
||||
|
||||
return None
|
||||
return ""
|
||||
|
||||
def check_mp_enable(self):
|
||||
def check_mp_enable(self) -> bool:
|
||||
if self.args.j == 1:
|
||||
self.log("svchub", "multiprocessing disabled by argument -j 1")
|
||||
return False
|
||||
|
||||
if mp.cpu_count() <= 1:
|
||||
@@ -417,21 +520,34 @@ class SvcHub(object):
|
||||
self.log("svchub", "cannot efficiently use multiple CPU cores")
|
||||
return False
|
||||
|
||||
def sd_notify(self):
|
||||
def sd_notify(self) -> None:
|
||||
try:
|
||||
addr = os.getenv("NOTIFY_SOCKET")
|
||||
if not addr:
|
||||
zb = os.getenv("NOTIFY_SOCKET")
|
||||
if not zb:
|
||||
return
|
||||
|
||||
addr = unicode(addr)
|
||||
addr = unicode(zb)
|
||||
if addr.startswith("@"):
|
||||
addr = "\0" + addr[1:]
|
||||
|
||||
m = "".join(x for x in addr if x in string.printable)
|
||||
self.log("sd_notify", m)
|
||||
t = "".join(x for x in addr if x in string.printable)
|
||||
self.log("sd_notify", t)
|
||||
|
||||
sck = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM)
|
||||
sck.connect(addr)
|
||||
sck.sendall(b"READY=1")
|
||||
except:
|
||||
self.log("sd_notify", min_ex())
|
||||
|
||||
def log_stacks(self) -> None:
|
||||
td = time.time() - self.tstack
|
||||
if td < 300:
|
||||
self.log("stacks", "cooldown {}".format(td))
|
||||
return
|
||||
|
||||
self.tstack = time.time()
|
||||
zs = "{}\n{}".format(VERSIONS, alltrace())
|
||||
zb = zs.encode("utf-8", "replace")
|
||||
zb = gzip.compress(zb)
|
||||
zs = base64.b64encode(zb).decode("ascii")
|
||||
self.log("stacks", zs)
|
||||
|
||||
@@ -1,17 +1,23 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import calendar
|
||||
import time
|
||||
import zlib
|
||||
from datetime import datetime
|
||||
|
||||
from .sutil import errdesc
|
||||
from .util import yieldfile, sanitize_fn, spack, sunpack
|
||||
from .bos import bos
|
||||
from .sutil import StreamArc, errdesc
|
||||
from .util import min_ex, sanitize_fn, spack, sunpack, yieldfile
|
||||
|
||||
try:
|
||||
from typing import Any, Generator, Optional
|
||||
|
||||
from .util import NamedLogger
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def dostime2unix(buf):
|
||||
def dostime2unix(buf: bytes) -> int:
|
||||
t, d = sunpack(b"<HH", buf)
|
||||
|
||||
ts = (t & 0x1F) * 2
|
||||
@@ -26,27 +32,38 @@ def dostime2unix(buf):
|
||||
tf = "{:04d}-{:02d}-{:02d} {:02d}:{:02d}:{:02d}"
|
||||
iso = tf.format(*tt)
|
||||
|
||||
dt = datetime.strptime(iso, "%Y-%m-%d %H:%M:%S")
|
||||
return int(dt.timestamp())
|
||||
dt = time.strptime(iso, "%Y-%m-%d %H:%M:%S")
|
||||
return int(calendar.timegm(dt))
|
||||
|
||||
|
||||
def unixtime2dos(ts):
|
||||
tt = time.gmtime(ts)
|
||||
def unixtime2dos(ts: int) -> bytes:
|
||||
tt = time.gmtime(ts + 1)
|
||||
dy, dm, dd, th, tm, ts = list(tt)[:6]
|
||||
|
||||
bd = ((dy - 1980) << 9) + (dm << 5) + dd
|
||||
bt = (th << 11) + (tm << 5) + ts // 2
|
||||
return spack(b"<HH", bt, bd)
|
||||
try:
|
||||
return spack(b"<HH", bt, bd)
|
||||
except:
|
||||
return b"\x00\x00\x21\x00"
|
||||
|
||||
|
||||
def gen_fdesc(sz, crc32, z64):
|
||||
def gen_fdesc(sz: int, crc32: int, z64: bool) -> bytes:
|
||||
ret = b"\x50\x4b\x07\x08"
|
||||
fmt = b"<LQQ" if z64 else b"<LLL"
|
||||
ret += spack(fmt, crc32, sz, sz)
|
||||
return ret
|
||||
|
||||
|
||||
def gen_hdr(h_pos, fn, sz, lastmod, utf8, crc32, pre_crc):
|
||||
def gen_hdr(
|
||||
h_pos: Optional[int],
|
||||
fn: str,
|
||||
sz: int,
|
||||
lastmod: int,
|
||||
utf8: bool,
|
||||
icrc32: int,
|
||||
pre_crc: bool,
|
||||
) -> bytes:
|
||||
"""
|
||||
does regular file headers
|
||||
and the central directory meme if h_pos is set
|
||||
@@ -65,8 +82,8 @@ def gen_hdr(h_pos, fn, sz, lastmod, utf8, crc32, pre_crc):
|
||||
# confusingly this doesn't bump if h_pos
|
||||
req_ver = b"\x2d\x00" if z64 else b"\x0a\x00"
|
||||
|
||||
if crc32:
|
||||
crc32 = spack(b"<L", crc32)
|
||||
if icrc32:
|
||||
crc32 = spack(b"<L", icrc32)
|
||||
else:
|
||||
crc32 = b"\x00" * 4
|
||||
|
||||
@@ -74,7 +91,7 @@ def gen_hdr(h_pos, fn, sz, lastmod, utf8, crc32, pre_crc):
|
||||
# 4b magic, 2b min-ver
|
||||
ret = b"\x50\x4b\x03\x04" + req_ver
|
||||
else:
|
||||
# 4b magic, 2b spec-ver, 2b min-ver
|
||||
# 4b magic, 2b spec-ver (1b compat, 1b os (00 dos, 03 unix)), 2b min-ver
|
||||
ret = b"\x50\x4b\x01\x02\x1e\x03" + req_ver
|
||||
|
||||
ret += b"\x00" if pre_crc else b"\x08" # streaming
|
||||
@@ -93,30 +110,43 @@ def gen_hdr(h_pos, fn, sz, lastmod, utf8, crc32, pre_crc):
|
||||
fn = sanitize_fn(fn, "/", [])
|
||||
bfn = fn.encode("utf-8" if utf8 else "cp437", "replace").replace(b"?", b"_")
|
||||
|
||||
# add ntfs (0x24) and/or unix (0x10) extrafields for utc, add z64 if requested
|
||||
z64_len = len(z64v) * 8 + 4 if z64v else 0
|
||||
ret += spack(b"<HH", len(bfn), z64_len)
|
||||
ret += spack(b"<HH", len(bfn), 0x10 + z64_len)
|
||||
|
||||
if h_pos is not None:
|
||||
# 2b comment, 2b diskno
|
||||
ret += b"\x00" * 4
|
||||
|
||||
# 2b internal.attr, 4b external.attr
|
||||
# infozip-macos: 0100 0000 a481 file:644
|
||||
# infozip-macos: 0100 0100 0080 file:000
|
||||
ret += b"\x01\x00\x00\x00\xa4\x81"
|
||||
# infozip-macos: 0100 0000 a481 (spec-ver 1e03) file:644
|
||||
# infozip-macos: 0100 0100 0080 (spec-ver 1e03) file:000
|
||||
# win10-zip: 0000 2000 0000 (spec-ver xx00) FILE_ATTRIBUTE_ARCHIVE
|
||||
ret += b"\x00\x00\x00\x00\xa4\x81" # unx
|
||||
# ret += b"\x00\x00\x20\x00\x00\x00" # fat
|
||||
|
||||
# 4b local-header-ofs
|
||||
ret += spack(b"<L", min(h_pos, 0xFFFFFFFF))
|
||||
|
||||
ret += bfn
|
||||
|
||||
# ntfs: type 0a, size 20, rsvd, attr1, len 18, mtime, atime, ctime
|
||||
# b"\xa3\x2f\x82\x41\x55\x68\xd8\x01" 1652616838.798941100 ~5.861518 132970904387989411 ~58615181
|
||||
# nt = int((lastmod + 11644473600) * 10000000)
|
||||
# ret += spack(b"<HHLHHQQQ", 0xA, 0x20, 0, 1, 0x18, nt, nt, nt)
|
||||
|
||||
# unix: type 0d, size 0c, atime, mtime, uid, gid
|
||||
ret += spack(b"<HHLLHH", 0xD, 0xC, int(lastmod), int(lastmod), 1000, 1000)
|
||||
|
||||
if z64v:
|
||||
ret += spack(b"<HH" + b"Q" * len(z64v), 1, len(z64v) * 8, *z64v)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def gen_ecdr(items, cdir_pos, cdir_end):
|
||||
def gen_ecdr(
|
||||
items: list[tuple[str, int, int, int, int]], cdir_pos: int, cdir_end: int
|
||||
) -> tuple[bytes, bool]:
|
||||
"""
|
||||
summary of all file headers,
|
||||
usually the zipfile footer unless something clamps
|
||||
@@ -141,10 +171,12 @@ def gen_ecdr(items, cdir_pos, cdir_end):
|
||||
# 2b comment length
|
||||
ret += b"\x00\x00"
|
||||
|
||||
return [ret, need_64]
|
||||
return ret, need_64
|
||||
|
||||
|
||||
def gen_ecdr64(items, cdir_pos, cdir_end):
|
||||
def gen_ecdr64(
|
||||
items: list[tuple[str, int, int, int, int]], cdir_pos: int, cdir_end: int
|
||||
) -> bytes:
|
||||
"""
|
||||
z64 end of central directory
|
||||
added when numfiles or a headerptr clamps
|
||||
@@ -168,7 +200,7 @@ def gen_ecdr64(items, cdir_pos, cdir_end):
|
||||
return ret
|
||||
|
||||
|
||||
def gen_ecdr64_loc(ecdr64_pos):
|
||||
def gen_ecdr64_loc(ecdr64_pos: int) -> bytes:
|
||||
"""
|
||||
z64 end of central directory locator
|
||||
points to ecdr64
|
||||
@@ -183,31 +215,36 @@ def gen_ecdr64_loc(ecdr64_pos):
|
||||
return ret
|
||||
|
||||
|
||||
class StreamZip(object):
|
||||
def __init__(self, log, fgen, utf8=False, pre_crc=False):
|
||||
self.log = log
|
||||
self.fgen = fgen
|
||||
class StreamZip(StreamArc):
|
||||
def __init__(
|
||||
self,
|
||||
log: "NamedLogger",
|
||||
fgen: Generator[dict[str, Any], None, None],
|
||||
utf8: bool = False,
|
||||
pre_crc: bool = False,
|
||||
) -> None:
|
||||
super(StreamZip, self).__init__(log, fgen)
|
||||
|
||||
self.utf8 = utf8
|
||||
self.pre_crc = pre_crc
|
||||
|
||||
self.pos = 0
|
||||
self.items = []
|
||||
self.items: list[tuple[str, int, int, int, int]] = []
|
||||
|
||||
def _ct(self, buf):
|
||||
def _ct(self, buf: bytes) -> bytes:
|
||||
self.pos += len(buf)
|
||||
return buf
|
||||
|
||||
def ser(self, f):
|
||||
def ser(self, f: dict[str, Any]) -> Generator[bytes, None, None]:
|
||||
name = f["vp"]
|
||||
src = f["ap"]
|
||||
st = f["st"]
|
||||
|
||||
sz = st.st_size
|
||||
ts = st.st_mtime + 1
|
||||
ts = st.st_mtime
|
||||
|
||||
crc = None
|
||||
crc = 0
|
||||
if self.pre_crc:
|
||||
crc = 0
|
||||
for buf in yieldfile(src):
|
||||
crc = zlib.crc32(buf, crc)
|
||||
|
||||
@@ -217,7 +254,6 @@ class StreamZip(object):
|
||||
buf = gen_hdr(None, name, sz, ts, self.utf8, crc, self.pre_crc)
|
||||
yield self._ct(buf)
|
||||
|
||||
crc = crc or 0
|
||||
for buf in yieldfile(src):
|
||||
if not self.pre_crc:
|
||||
crc = zlib.crc32(buf, crc)
|
||||
@@ -226,7 +262,7 @@ class StreamZip(object):
|
||||
|
||||
crc &= 0xFFFFFFFF
|
||||
|
||||
self.items.append([name, sz, ts, crc, h_pos])
|
||||
self.items.append((name, sz, ts, crc, h_pos))
|
||||
|
||||
z64 = sz >= 4 * 1024 * 1024 * 1024
|
||||
|
||||
@@ -234,42 +270,46 @@ class StreamZip(object):
|
||||
buf = gen_fdesc(sz, crc, z64)
|
||||
yield self._ct(buf)
|
||||
|
||||
def gen(self):
|
||||
def gen(self) -> Generator[bytes, None, None]:
|
||||
errors = []
|
||||
for f in self.fgen:
|
||||
if "err" in f:
|
||||
errors.append([f["vp"], f["err"]])
|
||||
continue
|
||||
try:
|
||||
for f in self.fgen:
|
||||
if "err" in f:
|
||||
errors.append((f["vp"], f["err"]))
|
||||
continue
|
||||
|
||||
try:
|
||||
for x in self.ser(f):
|
||||
try:
|
||||
for x in self.ser(f):
|
||||
yield x
|
||||
except GeneratorExit:
|
||||
raise
|
||||
except:
|
||||
ex = min_ex(5, True).replace("\n", "\n-- ")
|
||||
errors.append((f["vp"], ex))
|
||||
|
||||
if errors:
|
||||
errf, txt = errdesc(errors)
|
||||
self.log("\n".join(([repr(errf)] + txt[1:])))
|
||||
for x in self.ser(errf):
|
||||
yield x
|
||||
except Exception as ex:
|
||||
errors.append([f["vp"], repr(ex)])
|
||||
|
||||
if errors:
|
||||
errf, txt = errdesc(errors)
|
||||
self.log("\n".join(([repr(errf)] + txt[1:])))
|
||||
for x in self.ser(errf):
|
||||
yield x
|
||||
cdir_pos = self.pos
|
||||
for name, sz, ts, crc, h_pos in self.items:
|
||||
buf = gen_hdr(h_pos, name, sz, ts, self.utf8, crc, self.pre_crc)
|
||||
yield self._ct(buf)
|
||||
cdir_end = self.pos
|
||||
|
||||
cdir_pos = self.pos
|
||||
for name, sz, ts, crc, h_pos in self.items:
|
||||
buf = gen_hdr(h_pos, name, sz, ts, self.utf8, crc, self.pre_crc)
|
||||
yield self._ct(buf)
|
||||
cdir_end = self.pos
|
||||
_, need_64 = gen_ecdr(self.items, cdir_pos, cdir_end)
|
||||
if need_64:
|
||||
ecdir64_pos = self.pos
|
||||
buf = gen_ecdr64(self.items, cdir_pos, cdir_end)
|
||||
yield self._ct(buf)
|
||||
|
||||
_, need_64 = gen_ecdr(self.items, cdir_pos, cdir_end)
|
||||
if need_64:
|
||||
ecdir64_pos = self.pos
|
||||
buf = gen_ecdr64(self.items, cdir_pos, cdir_end)
|
||||
yield self._ct(buf)
|
||||
buf = gen_ecdr64_loc(ecdir64_pos)
|
||||
yield self._ct(buf)
|
||||
|
||||
buf = gen_ecdr64_loc(ecdir64_pos)
|
||||
yield self._ct(buf)
|
||||
|
||||
ecdr, _ = gen_ecdr(self.items, cdir_pos, cdir_end)
|
||||
yield self._ct(ecdr)
|
||||
|
||||
if errors:
|
||||
bos.unlink(errf["ap"])
|
||||
ecdr, _ = gen_ecdr(self.items, cdir_pos, cdir_end)
|
||||
yield self._ct(ecdr)
|
||||
finally:
|
||||
if errors:
|
||||
bos.unlink(errf["ap"])
|
||||
|
||||
@@ -1,12 +1,17 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import re
|
||||
import socket
|
||||
import sys
|
||||
|
||||
from .__init__ import MACOS, ANYWIN
|
||||
from .__init__ import ANYWIN, MACOS, TYPE_CHECKING, unicode
|
||||
from .util import chkcmd
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .svchub import SvcHub
|
||||
|
||||
|
||||
class TcpSrv(object):
|
||||
"""
|
||||
@@ -14,16 +19,16 @@ class TcpSrv(object):
|
||||
which then uses the least busy HttpSrv to handle it
|
||||
"""
|
||||
|
||||
def __init__(self, hub):
|
||||
def __init__(self, hub: "SvcHub"):
|
||||
self.hub = hub
|
||||
self.args = hub.args
|
||||
self.log = hub.log
|
||||
|
||||
self.stopping = False
|
||||
|
||||
self.srv = []
|
||||
self.srv: list[socket.socket] = []
|
||||
self.nsrv = 0
|
||||
ok = {}
|
||||
ok: dict[str, list[int]] = {}
|
||||
for ip in self.args.i:
|
||||
ok[ip] = []
|
||||
for port in self.args.p:
|
||||
@@ -33,8 +38,8 @@ class TcpSrv(object):
|
||||
ok[ip].append(port)
|
||||
except Exception as ex:
|
||||
if self.args.ign_ebind or self.args.ign_ebind_all:
|
||||
m = "could not listen on {}:{}: {}"
|
||||
self.log("tcpsrv", m.format(ip, port, ex), c=3)
|
||||
t = "could not listen on {}:{}: {}"
|
||||
self.log("tcpsrv", t.format(ip, port, ex), c=3)
|
||||
else:
|
||||
raise
|
||||
|
||||
@@ -54,20 +59,56 @@ class TcpSrv(object):
|
||||
eps[x] = "external"
|
||||
|
||||
msgs = []
|
||||
m = "available @ http://{}:{}/ (\033[33m{}\033[0m)"
|
||||
title_tab: dict[str, dict[str, int]] = {}
|
||||
title_vars = [x[1:] for x in self.args.wintitle.split(" ") if x.startswith("$")]
|
||||
t = "available @ {}://{}:{}/ (\033[33m{}\033[0m)"
|
||||
for ip, desc in sorted(eps.items(), key=lambda x: x[1]):
|
||||
for port in sorted(self.args.p):
|
||||
if port not in ok.get(ip, ok.get("0.0.0.0", [])):
|
||||
continue
|
||||
|
||||
msgs.append(m.format(ip, port, desc))
|
||||
proto = " http"
|
||||
if self.args.http_only:
|
||||
pass
|
||||
elif self.args.https_only or port == 443:
|
||||
proto = "https"
|
||||
|
||||
msgs.append(t.format(proto, ip, port, desc))
|
||||
|
||||
if not self.args.wintitle:
|
||||
continue
|
||||
|
||||
if port in [80, 443]:
|
||||
ep = ip
|
||||
else:
|
||||
ep = "{}:{}".format(ip, port)
|
||||
|
||||
hits = []
|
||||
if "pub" in title_vars and "external" in unicode(desc):
|
||||
hits.append(("pub", ep))
|
||||
|
||||
if "pub" in title_vars or "all" in title_vars:
|
||||
hits.append(("all", ep))
|
||||
|
||||
for var in title_vars:
|
||||
if var.startswith("ip-") and ep.startswith(var[3:]):
|
||||
hits.append((var, ep))
|
||||
|
||||
for tk, tv in hits:
|
||||
try:
|
||||
title_tab[tk][tv] = 1
|
||||
except:
|
||||
title_tab[tk] = {tv: 1}
|
||||
|
||||
if msgs:
|
||||
msgs[-1] += "\n"
|
||||
for m in msgs:
|
||||
self.log("tcpsrv", m)
|
||||
for t in msgs:
|
||||
self.log("tcpsrv", t)
|
||||
|
||||
def _listen(self, ip, port):
|
||||
if self.args.wintitle:
|
||||
self._set_wintitle(title_tab)
|
||||
|
||||
def _listen(self, ip: str, port: int) -> None:
|
||||
srv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
srv.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||
@@ -83,19 +124,19 @@ class TcpSrv(object):
|
||||
raise
|
||||
raise Exception(e)
|
||||
|
||||
def run(self):
|
||||
def run(self) -> None:
|
||||
for srv in self.srv:
|
||||
srv.listen(self.args.nc)
|
||||
ip, port = srv.getsockname()
|
||||
fno = srv.fileno()
|
||||
msg = "listening @ {}:{} f{}".format(ip, port, fno)
|
||||
msg = "listening @ {}:{} f{} p{}".format(ip, port, fno, os.getpid())
|
||||
self.log("tcpsrv", msg)
|
||||
if self.args.q:
|
||||
print(msg)
|
||||
|
||||
self.hub.broker.put(False, "listen", srv)
|
||||
self.hub.broker.say("listen", srv)
|
||||
|
||||
def shutdown(self):
|
||||
def shutdown(self) -> None:
|
||||
self.stopping = True
|
||||
try:
|
||||
for srv in self.srv:
|
||||
@@ -105,25 +146,59 @@ class TcpSrv(object):
|
||||
|
||||
self.log("tcpsrv", "ok bye")
|
||||
|
||||
def ips_linux(self):
|
||||
eps = {}
|
||||
def ips_linux_ifconfig(self) -> dict[str, str]:
|
||||
# for termux
|
||||
try:
|
||||
txt, _ = chkcmd(["ifconfig"])
|
||||
except:
|
||||
return {}
|
||||
|
||||
eps: dict[str, str] = {}
|
||||
dev = None
|
||||
ip = None
|
||||
up = None
|
||||
for ln in (txt + "\n").split("\n"):
|
||||
if not ln.strip() and dev and ip:
|
||||
eps[ip] = dev + ("" if up else ", \033[31mLINK-DOWN")
|
||||
dev = ip = up = None
|
||||
continue
|
||||
|
||||
if ln == ln.lstrip():
|
||||
dev = re.split(r"[: ]", ln)[0]
|
||||
|
||||
if "UP" in re.split(r"[<>, \t]", ln):
|
||||
up = True
|
||||
|
||||
m = re.match(r"^\s+inet\s+([^ ]+)", ln)
|
||||
if m:
|
||||
ip = m.group(1)
|
||||
|
||||
return eps
|
||||
|
||||
def ips_linux(self) -> dict[str, str]:
|
||||
try:
|
||||
txt, _ = chkcmd(["ip", "addr"])
|
||||
except:
|
||||
return eps
|
||||
return self.ips_linux_ifconfig()
|
||||
|
||||
r = re.compile(r"^\s+inet ([^ ]+)/.* (.*)")
|
||||
ri = re.compile(r"^\s*[0-9]+\s*:.*")
|
||||
up = False
|
||||
eps: dict[str, str] = {}
|
||||
for ln in txt.split("\n"):
|
||||
if ri.match(ln):
|
||||
up = "UP" in re.split("[>,< ]", ln)
|
||||
|
||||
try:
|
||||
ip, dev = r.match(ln.rstrip()).groups()
|
||||
eps[ip] = dev
|
||||
ip, dev = r.match(ln.rstrip()).groups() # type: ignore
|
||||
eps[ip] = dev + ("" if up else ", \033[31mLINK-DOWN")
|
||||
except:
|
||||
pass
|
||||
|
||||
return eps
|
||||
|
||||
def ips_macos(self):
|
||||
eps = {}
|
||||
def ips_macos(self) -> dict[str, str]:
|
||||
eps: dict[str, str] = {}
|
||||
try:
|
||||
txt, _ = chkcmd(["ifconfig"])
|
||||
except:
|
||||
@@ -131,7 +206,7 @@ class TcpSrv(object):
|
||||
|
||||
rdev = re.compile(r"^([^ ]+):")
|
||||
rip = re.compile(r"^\tinet ([0-9\.]+) ")
|
||||
dev = None
|
||||
dev = "UNKNOWN"
|
||||
for ln in txt.split("\n"):
|
||||
m = rdev.match(ln)
|
||||
if m:
|
||||
@@ -140,34 +215,46 @@ class TcpSrv(object):
|
||||
m = rip.match(ln)
|
||||
if m:
|
||||
eps[m.group(1)] = dev
|
||||
dev = None
|
||||
dev = "UNKNOWN"
|
||||
|
||||
return eps
|
||||
|
||||
def ips_windows_ipconfig(self):
|
||||
eps = {}
|
||||
def ips_windows_ipconfig(self) -> tuple[dict[str, str], set[str]]:
|
||||
eps: dict[str, str] = {}
|
||||
offs: set[str] = set()
|
||||
try:
|
||||
txt, _ = chkcmd(["ipconfig"])
|
||||
except:
|
||||
return eps
|
||||
return eps, offs
|
||||
|
||||
rdev = re.compile(r"(^[^ ].*):$")
|
||||
rip = re.compile(r"^ +IPv?4? [^:]+: *([0-9\.]{7,15})$")
|
||||
roff = re.compile(r".*: Media disconnected$")
|
||||
dev = None
|
||||
for ln in txt.replace("\r", "").split("\n"):
|
||||
m = rdev.match(ln)
|
||||
if m:
|
||||
if dev and dev not in eps.values():
|
||||
offs.add(dev)
|
||||
|
||||
dev = m.group(1).split(" adapter ", 1)[-1]
|
||||
|
||||
if dev and roff.match(ln):
|
||||
offs.add(dev)
|
||||
dev = None
|
||||
|
||||
m = rip.match(ln)
|
||||
if m and dev:
|
||||
eps[m.group(1)] = dev
|
||||
dev = None
|
||||
|
||||
return eps
|
||||
if dev and dev not in eps.values():
|
||||
offs.add(dev)
|
||||
|
||||
def ips_windows_netsh(self):
|
||||
eps = {}
|
||||
return eps, offs
|
||||
|
||||
def ips_windows_netsh(self) -> dict[str, str]:
|
||||
eps: dict[str, str] = {}
|
||||
try:
|
||||
txt, _ = chkcmd("netsh interface ip show address".split())
|
||||
except:
|
||||
@@ -184,16 +271,18 @@ class TcpSrv(object):
|
||||
m = rip.match(ln)
|
||||
if m and dev:
|
||||
eps[m.group(1)] = dev
|
||||
dev = None
|
||||
|
||||
return eps
|
||||
|
||||
def detect_interfaces(self, listen_ips):
|
||||
def detect_interfaces(self, listen_ips: list[str]) -> dict[str, str]:
|
||||
if MACOS:
|
||||
eps = self.ips_macos()
|
||||
elif ANYWIN:
|
||||
eps = self.ips_windows_ipconfig() # sees more interfaces
|
||||
eps, off = self.ips_windows_ipconfig() # sees more interfaces + link state
|
||||
eps.update(self.ips_windows_netsh()) # has better names
|
||||
for k, v in eps.items():
|
||||
if v in off:
|
||||
eps[k] += ", \033[31mLINK-DOWN"
|
||||
else:
|
||||
eps = self.ips_linux()
|
||||
|
||||
@@ -212,7 +301,6 @@ class TcpSrv(object):
|
||||
]:
|
||||
try:
|
||||
s.connect((ip, 1))
|
||||
# raise OSError(13, "a")
|
||||
default_route = s.getsockname()[0]
|
||||
break
|
||||
except (OSError, socket.error) as ex:
|
||||
@@ -232,3 +320,26 @@ class TcpSrv(object):
|
||||
eps[default_route] = desc
|
||||
|
||||
return eps
|
||||
|
||||
def _set_wintitle(self, vs: dict[str, dict[str, int]]) -> None:
|
||||
vs["all"] = vs.get("all", {"Local-Only": 1})
|
||||
vs["pub"] = vs.get("pub", vs["all"])
|
||||
|
||||
vs2 = {}
|
||||
for k, eps in vs.items():
|
||||
vs2[k] = {
|
||||
ep: 1
|
||||
for ep in eps.keys()
|
||||
if ":" not in ep or ep.split(":")[0] not in eps
|
||||
}
|
||||
|
||||
title = ""
|
||||
vs = vs2
|
||||
for p in self.args.wintitle.split(" "):
|
||||
if p.startswith("$"):
|
||||
p = " and ".join(sorted(vs.get(p[1:], {"(None)": 1}).keys()))
|
||||
|
||||
title += "{} ".format(p)
|
||||
|
||||
print("\033]0;{}\033\\".format(title), file=sys.stderr, end="")
|
||||
sys.stderr.flush()
|
||||
|
||||
@@ -3,13 +3,23 @@ from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
|
||||
from .util import Cooldown
|
||||
from .th_srv import thumb_path, THUMBABLE, FMT_FFV, FMT_FFA
|
||||
from .__init__ import TYPE_CHECKING
|
||||
from .authsrv import VFS
|
||||
from .bos import bos
|
||||
from .th_srv import HAVE_WEBP, thumb_path
|
||||
from .util import Cooldown
|
||||
|
||||
try:
|
||||
from typing import Optional, Union
|
||||
except:
|
||||
pass
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .httpsrv import HttpSrv
|
||||
|
||||
|
||||
class ThumbCli(object):
|
||||
def __init__(self, hsrv):
|
||||
def __init__(self, hsrv: "HttpSrv") -> None:
|
||||
self.broker = hsrv.broker
|
||||
self.log_func = hsrv.log
|
||||
self.args = hsrv.args
|
||||
@@ -18,30 +28,53 @@ class ThumbCli(object):
|
||||
# cache on both sides for less broker spam
|
||||
self.cooldown = Cooldown(self.args.th_poke)
|
||||
|
||||
def log(self, msg, c=0):
|
||||
try:
|
||||
c = hsrv.th_cfg
|
||||
except:
|
||||
c = {k: {} for k in ["thumbable", "pil", "vips", "ffi", "ffv", "ffa"]}
|
||||
|
||||
self.thumbable = c["thumbable"]
|
||||
self.fmt_pil = c["pil"]
|
||||
self.fmt_vips = c["vips"]
|
||||
self.fmt_ffi = c["ffi"]
|
||||
self.fmt_ffv = c["ffv"]
|
||||
self.fmt_ffa = c["ffa"]
|
||||
|
||||
# defer args.th_ff_jpg, can change at runtime
|
||||
d = next((x for x in self.args.th_dec if x in ("vips", "pil")), None)
|
||||
self.can_webp = HAVE_WEBP or d == "vips"
|
||||
|
||||
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||
self.log_func("thumbcli", msg, c)
|
||||
|
||||
def get(self, ptop, rem, mtime, fmt):
|
||||
def get(self, dbv: VFS, rem: str, mtime: float, fmt: str) -> Optional[str]:
|
||||
ptop = dbv.realpath
|
||||
ext = rem.rsplit(".")[-1].lower()
|
||||
if ext not in THUMBABLE:
|
||||
if ext not in self.thumbable or "dthumb" in dbv.flags:
|
||||
return None
|
||||
|
||||
is_vid = ext in FMT_FFV
|
||||
if is_vid and self.args.no_vthumb:
|
||||
is_vid = ext in self.fmt_ffv
|
||||
if is_vid and "dvthumb" in dbv.flags:
|
||||
return None
|
||||
|
||||
want_opus = fmt == "opus"
|
||||
is_au = ext in FMT_FFA
|
||||
want_opus = fmt in ("opus", "caf")
|
||||
is_au = ext in self.fmt_ffa
|
||||
if is_au:
|
||||
if want_opus:
|
||||
if self.args.no_acode:
|
||||
return None
|
||||
else:
|
||||
if self.args.no_athumb:
|
||||
if "dathumb" in dbv.flags:
|
||||
return None
|
||||
elif want_opus:
|
||||
return None
|
||||
|
||||
is_img = not is_vid and not is_au
|
||||
if is_img and "dithumb" in dbv.flags:
|
||||
return None
|
||||
|
||||
preferred = self.args.th_dec[0] if self.args.th_dec else ""
|
||||
|
||||
if rem.startswith(".hist/th/") and rem.split(".")[-1] in ["webp", "jpg"]:
|
||||
return os.path.join(ptop, rem)
|
||||
|
||||
@@ -49,7 +82,11 @@ class ThumbCli(object):
|
||||
fmt = "w"
|
||||
|
||||
if fmt == "w":
|
||||
if self.args.th_no_webp or ((is_vid or is_au) and self.args.th_ff_jpg):
|
||||
if (
|
||||
self.args.th_no_webp
|
||||
or (is_img and not self.can_webp)
|
||||
or (self.args.th_ff_jpg and (not is_img or preferred == "ff"))
|
||||
):
|
||||
fmt = "j"
|
||||
|
||||
histpath = self.asrv.vfs.histtab.get(ptop)
|
||||
@@ -58,27 +95,41 @@ class ThumbCli(object):
|
||||
return None
|
||||
|
||||
tpath = thumb_path(histpath, rem, mtime, fmt)
|
||||
tpaths = [tpath]
|
||||
if fmt == "w":
|
||||
# also check for jpg (maybe webp is unavailable)
|
||||
tpaths.append(tpath.rsplit(".", 1)[0] + ".jpg")
|
||||
|
||||
ret = None
|
||||
try:
|
||||
st = bos.stat(tpath)
|
||||
if st.st_size:
|
||||
ret = tpath
|
||||
else:
|
||||
return None
|
||||
except:
|
||||
pass
|
||||
abort = False
|
||||
for tp in tpaths:
|
||||
try:
|
||||
st = bos.stat(tp)
|
||||
if st.st_size:
|
||||
ret = tpath = tp
|
||||
fmt = ret.rsplit(".")[1]
|
||||
else:
|
||||
abort = True
|
||||
except:
|
||||
pass
|
||||
|
||||
if ret:
|
||||
tdir = os.path.dirname(tpath)
|
||||
if self.cooldown.poke(tdir):
|
||||
self.broker.put(False, "thumbsrv.poke", tdir)
|
||||
self.broker.say("thumbsrv.poke", tdir)
|
||||
|
||||
if want_opus:
|
||||
# audio files expire individually
|
||||
if self.cooldown.poke(tpath):
|
||||
self.broker.put(False, "thumbsrv.poke", tpath)
|
||||
self.broker.say("thumbsrv.poke", tpath)
|
||||
|
||||
return ret
|
||||
|
||||
x = self.broker.put(True, "thumbsrv.get", ptop, rem, mtime, fmt)
|
||||
return x.get()
|
||||
if abort:
|
||||
return None
|
||||
|
||||
if not bos.path.getsize(os.path.join(ptop, rem)):
|
||||
return None
|
||||
|
||||
x = self.broker.ask("thumbsrv.get", ptop, rem, mtime, fmt)
|
||||
return x.get() # type: ignore
|
||||
|
||||
@@ -1,19 +1,28 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import time
|
||||
import shutil
|
||||
import base64
|
||||
import hashlib
|
||||
import threading
|
||||
import os
|
||||
import shutil
|
||||
import subprocess as sp
|
||||
import threading
|
||||
import time
|
||||
|
||||
from .__init__ import PY2, unicode
|
||||
from .util import fsenc, vsplit, statdir, runcmd, Queue, Cooldown, BytesIO, min_ex
|
||||
from queue import Queue
|
||||
|
||||
from .__init__ import TYPE_CHECKING
|
||||
from .bos import bos
|
||||
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, ffprobe
|
||||
from .util import BytesIO, Cooldown, fsenc, min_ex, runcmd, statdir, vsplit
|
||||
|
||||
try:
|
||||
from typing import Optional, Union
|
||||
except:
|
||||
pass
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .svchub import SvcHub
|
||||
|
||||
HAVE_PIL = False
|
||||
HAVE_HEIF = False
|
||||
@@ -21,7 +30,7 @@ HAVE_AVIF = False
|
||||
HAVE_WEBP = False
|
||||
|
||||
try:
|
||||
from PIL import Image, ImageOps, ExifTags
|
||||
from PIL import ExifTags, Image, ImageOps
|
||||
|
||||
HAVE_PIL = True
|
||||
try:
|
||||
@@ -39,7 +48,7 @@ try:
|
||||
pass
|
||||
|
||||
try:
|
||||
import pillow_avif
|
||||
import pillow_avif # noqa: F401 # pylint: disable=unused-import
|
||||
|
||||
HAVE_AVIF = True
|
||||
except:
|
||||
@@ -47,34 +56,14 @@ try:
|
||||
except:
|
||||
pass
|
||||
|
||||
# https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html
|
||||
# ffmpeg -formats
|
||||
FMT_PIL = "bmp dib gif icns ico jpg jpeg jp2 jpx pcx png pbm pgm ppm pnm sgi tga tif tiff webp xbm dds xpm"
|
||||
FMT_FFV = "av1 asf avi flv m4v mkv mjpeg mjpg mpg mpeg mpg2 mpeg2 h264 avc mts h265 hevc mov 3gp mp4 ts mpegts nut ogv ogm rm vob webm wmv"
|
||||
FMT_FFA = "aac m4a ogg opus flac alac mp3 mp2 ac3 dts wma ra wav aif aiff au alaw ulaw mulaw amr gsm ape tak tta wv"
|
||||
|
||||
if HAVE_HEIF:
|
||||
FMT_PIL += " heif heifs heic heics"
|
||||
|
||||
if HAVE_AVIF:
|
||||
FMT_PIL += " avif avifs"
|
||||
|
||||
FMT_PIL, FMT_FFV, FMT_FFA = [
|
||||
{x: True for x in y.split(" ") if x} for y in [FMT_PIL, FMT_FFV, FMT_FFA]
|
||||
]
|
||||
try:
|
||||
HAVE_VIPS = True
|
||||
import pyvips
|
||||
except:
|
||||
HAVE_VIPS = False
|
||||
|
||||
|
||||
THUMBABLE = {}
|
||||
|
||||
if HAVE_PIL:
|
||||
THUMBABLE.update(FMT_PIL)
|
||||
|
||||
if HAVE_FFMPEG and HAVE_FFPROBE:
|
||||
THUMBABLE.update(FMT_FFV)
|
||||
THUMBABLE.update(FMT_FFA)
|
||||
|
||||
|
||||
def thumb_path(histpath, rem, mtime, fmt):
|
||||
def thumb_path(histpath: str, rem: str, mtime: float, fmt: str) -> str:
|
||||
# base16 = 16 = 256
|
||||
# b64-lc = 38 = 1444
|
||||
# base64 = 64 = 4096
|
||||
@@ -90,7 +79,7 @@ def thumb_path(histpath, rem, mtime, fmt):
|
||||
h = hashlib.sha512(fsenc(fn)).digest()
|
||||
fn = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
||||
|
||||
if fmt == "opus":
|
||||
if fmt in ("opus", "caf"):
|
||||
cat = "ac"
|
||||
else:
|
||||
fmt = "webp" if fmt == "w" else "jpg"
|
||||
@@ -100,7 +89,7 @@ def thumb_path(histpath, rem, mtime, fmt):
|
||||
|
||||
|
||||
class ThumbSrv(object):
|
||||
def __init__(self, hub):
|
||||
def __init__(self, hub: "SvcHub") -> None:
|
||||
self.hub = hub
|
||||
self.asrv = hub.asrv
|
||||
self.args = hub.args
|
||||
@@ -111,17 +100,17 @@ class ThumbSrv(object):
|
||||
self.poke_cd = Cooldown(self.args.th_poke)
|
||||
|
||||
self.mutex = threading.Lock()
|
||||
self.busy = {}
|
||||
self.busy: dict[str, list[threading.Condition]] = {}
|
||||
self.stopping = False
|
||||
self.nthr = max(1, self.args.th_mt)
|
||||
|
||||
self.q = Queue(self.nthr * 4)
|
||||
self.q: Queue[Optional[tuple[str, str]]] = Queue(self.nthr * 4)
|
||||
for n in range(self.nthr):
|
||||
t = threading.Thread(
|
||||
thr = threading.Thread(
|
||||
target=self.worker, name="thumb-{}-{}".format(n, self.nthr)
|
||||
)
|
||||
t.daemon = True
|
||||
t.start()
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
want_ff = not self.args.no_vthumb or not self.args.no_athumb
|
||||
if want_ff and (not HAVE_FFMPEG or not HAVE_FFPROBE):
|
||||
@@ -141,19 +130,50 @@ class ThumbSrv(object):
|
||||
t.daemon = True
|
||||
t.start()
|
||||
|
||||
def log(self, msg, c=0):
|
||||
self.fmt_pil, self.fmt_vips, self.fmt_ffi, self.fmt_ffv, self.fmt_ffa = [
|
||||
set(y.split(","))
|
||||
for y in [
|
||||
self.args.th_r_pil,
|
||||
self.args.th_r_vips,
|
||||
self.args.th_r_ffi,
|
||||
self.args.th_r_ffv,
|
||||
self.args.th_r_ffa,
|
||||
]
|
||||
]
|
||||
|
||||
if not HAVE_HEIF:
|
||||
for f in "heif heifs heic heics".split(" "):
|
||||
self.fmt_pil.discard(f)
|
||||
|
||||
if not HAVE_AVIF:
|
||||
for f in "avif avifs".split(" "):
|
||||
self.fmt_pil.discard(f)
|
||||
|
||||
self.thumbable: set[str] = set()
|
||||
|
||||
if "pil" in self.args.th_dec:
|
||||
self.thumbable |= self.fmt_pil
|
||||
|
||||
if "vips" in self.args.th_dec:
|
||||
self.thumbable |= self.fmt_vips
|
||||
|
||||
if "ff" in self.args.th_dec:
|
||||
for zss in [self.fmt_ffi, self.fmt_ffv, self.fmt_ffa]:
|
||||
self.thumbable |= zss
|
||||
|
||||
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||
self.log_func("thumb", msg, c)
|
||||
|
||||
def shutdown(self):
|
||||
def shutdown(self) -> None:
|
||||
self.stopping = True
|
||||
for _ in range(self.nthr):
|
||||
self.q.put(None)
|
||||
|
||||
def stopped(self):
|
||||
def stopped(self) -> bool:
|
||||
with self.mutex:
|
||||
return not self.nthr
|
||||
|
||||
def get(self, ptop, rem, mtime, fmt):
|
||||
def get(self, ptop: str, rem: str, mtime: float, fmt: str) -> Optional[str]:
|
||||
histpath = self.asrv.vfs.histtab.get(ptop)
|
||||
if not histpath:
|
||||
self.log("no histpath for [{}]".format(ptop))
|
||||
@@ -180,7 +200,7 @@ class ThumbSrv(object):
|
||||
do_conv = True
|
||||
|
||||
if do_conv:
|
||||
self.q.put([abspath, tpath])
|
||||
self.q.put((abspath, tpath))
|
||||
self.log("conv {} \033[0m{}".format(tpath, abspath), c=6)
|
||||
|
||||
while not self.stopping:
|
||||
@@ -201,7 +221,17 @@ class ThumbSrv(object):
|
||||
|
||||
return None
|
||||
|
||||
def worker(self):
|
||||
def getcfg(self) -> dict[str, set[str]]:
|
||||
return {
|
||||
"thumbable": self.thumbable,
|
||||
"pil": self.fmt_pil,
|
||||
"vips": self.fmt_vips,
|
||||
"ffi": self.fmt_ffi,
|
||||
"ffv": self.fmt_ffv,
|
||||
"ffa": self.fmt_ffa,
|
||||
}
|
||||
|
||||
def worker(self) -> None:
|
||||
while not self.stopping:
|
||||
task = self.q.get()
|
||||
if not task:
|
||||
@@ -211,22 +241,29 @@ class ThumbSrv(object):
|
||||
ext = abspath.split(".")[-1].lower()
|
||||
fun = None
|
||||
if not bos.path.exists(tpath):
|
||||
if ext in FMT_PIL:
|
||||
fun = self.conv_pil
|
||||
elif ext in FMT_FFV:
|
||||
fun = self.conv_ffmpeg
|
||||
elif ext in FMT_FFA:
|
||||
if tpath.endswith(".opus"):
|
||||
fun = self.conv_opus
|
||||
else:
|
||||
fun = self.conv_spec
|
||||
for lib in self.args.th_dec:
|
||||
if fun:
|
||||
break
|
||||
elif lib == "pil" and ext in self.fmt_pil:
|
||||
fun = self.conv_pil
|
||||
elif lib == "vips" and ext in self.fmt_vips:
|
||||
fun = self.conv_vips
|
||||
elif lib == "ff" and ext in self.fmt_ffi or ext in self.fmt_ffv:
|
||||
fun = self.conv_ffmpeg
|
||||
elif lib == "ff" and ext in self.fmt_ffa:
|
||||
if tpath.endswith(".opus") or tpath.endswith(".caf"):
|
||||
fun = self.conv_opus
|
||||
else:
|
||||
fun = self.conv_spec
|
||||
|
||||
if fun:
|
||||
try:
|
||||
fun(abspath, tpath)
|
||||
except:
|
||||
msg = "{} could not create thumbnail of {}\n{}"
|
||||
self.log(msg.format(fun.__name__, abspath, min_ex()), "1;30")
|
||||
msg = msg.format(fun.__name__, abspath, min_ex())
|
||||
c: Union[str, int] = 1 if "<Signals.SIG" in msg else "1;30"
|
||||
self.log(msg, c)
|
||||
with open(tpath, "wb") as _:
|
||||
pass
|
||||
|
||||
@@ -241,7 +278,7 @@ class ThumbSrv(object):
|
||||
with self.mutex:
|
||||
self.nthr -= 1
|
||||
|
||||
def fancy_pillow(self, im):
|
||||
def fancy_pillow(self, im: "Image.Image") -> "Image.Image":
|
||||
# exif_transpose is expensive (loads full image + unconditional copy)
|
||||
r = max(*self.res) * 2
|
||||
im.thumbnail((r, r), resample=Image.LANCZOS)
|
||||
@@ -267,7 +304,7 @@ class ThumbSrv(object):
|
||||
|
||||
return im
|
||||
|
||||
def conv_pil(self, abspath, tpath):
|
||||
def conv_pil(self, abspath: str, tpath: str) -> None:
|
||||
with Image.open(fsenc(abspath)) as im:
|
||||
try:
|
||||
im = self.fancy_pillow(im)
|
||||
@@ -296,16 +333,35 @@ class ThumbSrv(object):
|
||||
|
||||
im.save(tpath, **args)
|
||||
|
||||
def conv_ffmpeg(self, abspath, tpath):
|
||||
ret, _ = ffprobe(abspath)
|
||||
def conv_vips(self, abspath: str, tpath: str) -> None:
|
||||
crops = ["centre", "none"]
|
||||
if self.args.th_no_crop:
|
||||
crops = ["none"]
|
||||
|
||||
ext = abspath.rsplit(".")[-1]
|
||||
if ext in ["h264", "h265"]:
|
||||
seek = []
|
||||
w, h = self.res
|
||||
kw = {"height": h, "size": "down", "intent": "relative"}
|
||||
|
||||
for c in crops:
|
||||
try:
|
||||
kw["crop"] = c
|
||||
img = pyvips.Image.thumbnail(abspath, w, **kw)
|
||||
break
|
||||
except:
|
||||
pass
|
||||
|
||||
img.write_to_file(tpath, Q=40)
|
||||
|
||||
def conv_ffmpeg(self, abspath: str, tpath: str) -> None:
|
||||
ret, _ = ffprobe(abspath)
|
||||
if not ret:
|
||||
return
|
||||
|
||||
ext = abspath.rsplit(".")[-1].lower()
|
||||
if ext in ["h264", "h265"] or ext in self.fmt_ffi:
|
||||
seek: list[bytes] = []
|
||||
else:
|
||||
dur = ret[".dur"][1] if ".dur" in ret else 4
|
||||
seek = "{:.0f}".format(dur / 3)
|
||||
seek = [b"-ss", seek.encode("utf-8")]
|
||||
seek = [b"-ss", "{:.0f}".format(dur / 3).encode("utf-8")]
|
||||
|
||||
scale = "scale={0}:{1}:force_original_aspect_ratio="
|
||||
if self.args.th_no_crop:
|
||||
@@ -313,7 +369,7 @@ class ThumbSrv(object):
|
||||
else:
|
||||
scale += "increase,crop={0}:{1},setsar=1:1"
|
||||
|
||||
scale = scale.format(*list(self.res)).encode("utf-8")
|
||||
bscale = scale.format(*list(self.res)).encode("utf-8")
|
||||
# fmt: off
|
||||
cmd = [
|
||||
b"ffmpeg",
|
||||
@@ -325,7 +381,7 @@ class ThumbSrv(object):
|
||||
cmd += [
|
||||
b"-i", fsenc(abspath),
|
||||
b"-map", b"0:v:0",
|
||||
b"-vf", scale,
|
||||
b"-vf", bscale,
|
||||
b"-frames:v", b"1",
|
||||
b"-metadata:s:v:0", b"rotate=0",
|
||||
]
|
||||
@@ -347,22 +403,55 @@ class ThumbSrv(object):
|
||||
cmd += [fsenc(tpath)]
|
||||
self._run_ff(cmd)
|
||||
|
||||
def _run_ff(self, cmd):
|
||||
def _run_ff(self, cmd: list[bytes]) -> None:
|
||||
# self.log((b" ".join(cmd)).decode("utf-8"))
|
||||
ret, sout, serr = runcmd(cmd)
|
||||
if ret != 0:
|
||||
m = "FFmpeg failed (probably a corrupt video file):\n"
|
||||
m += "\n".join(["ff: {}".format(x) for x in serr.split("\n")])
|
||||
self.log(m, c="1;30")
|
||||
raise sp.CalledProcessError(ret, (cmd[0], b"...", cmd[-1]))
|
||||
ret, _, serr = runcmd(cmd, timeout=self.args.th_convt)
|
||||
if not ret:
|
||||
return
|
||||
|
||||
def conv_spec(self, abspath, tpath):
|
||||
c: Union[str, int] = "1;30"
|
||||
t = "FFmpeg failed (probably a corrupt video file):\n"
|
||||
if cmd[-1].lower().endswith(b".webp") and (
|
||||
"Error selecting an encoder" in serr
|
||||
or "Automatic encoder selection failed" in serr
|
||||
or "Default encoder for format webp" in serr
|
||||
or "Please choose an encoder manually" in serr
|
||||
):
|
||||
self.args.th_ff_jpg = True
|
||||
t = "FFmpeg failed because it was compiled without libwebp; enabling --th-ff-jpg to force jpeg output:\n"
|
||||
c = 1
|
||||
|
||||
if (
|
||||
"Requested resampling engine is unavailable" in serr
|
||||
or "output pad on Parsed_aresample_" in serr
|
||||
):
|
||||
t = "FFmpeg failed because it was compiled without libsox; you must set --th-ff-swr to force swr resampling:\n"
|
||||
c = 1
|
||||
|
||||
lines = serr.strip("\n").split("\n")
|
||||
if len(lines) > 50:
|
||||
lines = lines[:25] + ["[...]"] + lines[-25:]
|
||||
|
||||
txt = "\n".join(["ff: " + str(x) for x in lines])
|
||||
if len(txt) > 5000:
|
||||
txt = txt[:2500] + "...\nff: [...]\nff: ..." + txt[-2500:]
|
||||
|
||||
self.log(t + txt, c=c)
|
||||
raise sp.CalledProcessError(ret, (cmd[0], b"...", cmd[-1]))
|
||||
|
||||
def conv_spec(self, abspath: str, tpath: str) -> None:
|
||||
ret, _ = ffprobe(abspath)
|
||||
if "ac" not in ret:
|
||||
raise Exception("not audio")
|
||||
|
||||
fc = "[0:a:0]aresample=48000{},showspectrumpic=s=640x512,crop=780:544:70:50[o]"
|
||||
fc = fc.format("" if self.args.th_ff_swr else ":resampler=soxr")
|
||||
|
||||
if self.args.th_ff_swr:
|
||||
fco = ":filter_size=128:cutoff=0.877"
|
||||
else:
|
||||
fco = ":resampler=soxr"
|
||||
|
||||
fc = fc.format(fco)
|
||||
|
||||
# fmt: off
|
||||
cmd = [
|
||||
@@ -392,7 +481,7 @@ class ThumbSrv(object):
|
||||
cmd += [fsenc(tpath)]
|
||||
self._run_ff(cmd)
|
||||
|
||||
def conv_opus(self, abspath, tpath):
|
||||
def conv_opus(self, abspath: str, tpath: str) -> None:
|
||||
if self.args.no_acode:
|
||||
raise Exception("disabled in server config")
|
||||
|
||||
@@ -400,23 +489,47 @@ class ThumbSrv(object):
|
||||
if "ac" not in ret:
|
||||
raise Exception("not audio")
|
||||
|
||||
# fmt: off
|
||||
cmd = [
|
||||
b"ffmpeg",
|
||||
b"-nostdin",
|
||||
b"-v", b"error",
|
||||
b"-hide_banner",
|
||||
b"-i", fsenc(abspath),
|
||||
b"-map", b"0:a:0",
|
||||
b"-c:a", b"libopus",
|
||||
b"-b:a", b"128k",
|
||||
fsenc(tpath)
|
||||
]
|
||||
# fmt: on
|
||||
src_opus = abspath.lower().endswith(".opus") or ret["ac"][1] == "opus"
|
||||
want_caf = tpath.endswith(".caf")
|
||||
tmp_opus = tpath
|
||||
if want_caf:
|
||||
tmp_opus = tpath.rsplit(".", 1)[0] + ".opus"
|
||||
|
||||
self._run_ff(cmd)
|
||||
if not want_caf or (not src_opus and not bos.path.isfile(tmp_opus)):
|
||||
# fmt: off
|
||||
cmd = [
|
||||
b"ffmpeg",
|
||||
b"-nostdin",
|
||||
b"-v", b"error",
|
||||
b"-hide_banner",
|
||||
b"-i", fsenc(abspath),
|
||||
b"-map_metadata", b"-1",
|
||||
b"-map", b"0:a:0",
|
||||
b"-c:a", b"libopus",
|
||||
b"-b:a", b"128k",
|
||||
fsenc(tmp_opus)
|
||||
]
|
||||
# fmt: on
|
||||
self._run_ff(cmd)
|
||||
|
||||
def poke(self, tdir):
|
||||
if want_caf:
|
||||
# fmt: off
|
||||
cmd = [
|
||||
b"ffmpeg",
|
||||
b"-nostdin",
|
||||
b"-v", b"error",
|
||||
b"-hide_banner",
|
||||
b"-i", fsenc(abspath if src_opus else tmp_opus),
|
||||
b"-map_metadata", b"-1",
|
||||
b"-map", b"0:a:0",
|
||||
b"-c:a", b"copy",
|
||||
b"-f", b"caf",
|
||||
fsenc(tpath)
|
||||
]
|
||||
# fmt: on
|
||||
self._run_ff(cmd)
|
||||
|
||||
def poke(self, tdir: str) -> None:
|
||||
if not self.poke_cd.poke(tdir):
|
||||
return
|
||||
|
||||
@@ -428,7 +541,7 @@ class ThumbSrv(object):
|
||||
except:
|
||||
pass
|
||||
|
||||
def cleaner(self):
|
||||
def cleaner(self) -> None:
|
||||
interval = self.args.th_clean
|
||||
while True:
|
||||
time.sleep(interval)
|
||||
@@ -443,26 +556,27 @@ class ThumbSrv(object):
|
||||
|
||||
self.log("\033[Jcln ok; rm {} dirs".format(ndirs))
|
||||
|
||||
def clean(self, histpath):
|
||||
def clean(self, histpath: str) -> int:
|
||||
ret = 0
|
||||
for cat in ["th", "ac"]:
|
||||
ret += self._clean(histpath, cat, None)
|
||||
top = os.path.join(histpath, cat)
|
||||
if not bos.path.isdir(top):
|
||||
continue
|
||||
|
||||
ret += self._clean(cat, top)
|
||||
|
||||
return ret
|
||||
|
||||
def _clean(self, histpath, cat, thumbpath):
|
||||
if not thumbpath:
|
||||
thumbpath = os.path.join(histpath, cat)
|
||||
|
||||
def _clean(self, cat: str, thumbpath: str) -> int:
|
||||
# self.log("cln {}".format(thumbpath))
|
||||
exts = ["jpg", "webp"] if cat == "th" else ["opus"]
|
||||
exts = ["jpg", "webp"] if cat == "th" else ["opus", "caf"]
|
||||
maxage = getattr(self.args, cat + "_maxage")
|
||||
now = time.time()
|
||||
prev_b64 = None
|
||||
prev_fp = None
|
||||
prev_fp = ""
|
||||
try:
|
||||
ents = statdir(self.log, not self.args.no_scandir, False, thumbpath)
|
||||
ents = sorted(list(ents))
|
||||
t1 = statdir(self.log_func, not self.args.no_scandir, False, thumbpath)
|
||||
ents = sorted(list(t1))
|
||||
except:
|
||||
return 0
|
||||
|
||||
@@ -477,7 +591,7 @@ class ThumbSrv(object):
|
||||
if age > maxage:
|
||||
with self.mutex:
|
||||
safe = True
|
||||
for k in self.busy.keys():
|
||||
for k in self.busy:
|
||||
if k.lower().replace("\\", "/").startswith(cmp):
|
||||
safe = False
|
||||
break
|
||||
@@ -487,7 +601,7 @@ class ThumbSrv(object):
|
||||
self.log("rm -rf [{}]".format(fp))
|
||||
shutil.rmtree(fp, ignore_errors=True)
|
||||
else:
|
||||
self._clean(histpath, cat, fp)
|
||||
ndirs += self._clean(cat, fp)
|
||||
|
||||
continue
|
||||
|
||||
|
||||
@@ -1,28 +1,37 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import re
|
||||
import calendar
|
||||
import os
|
||||
import time
|
||||
import re
|
||||
import threading
|
||||
from datetime import datetime
|
||||
import time
|
||||
from operator import itemgetter
|
||||
|
||||
from .__init__ import ANYWIN, unicode
|
||||
from .util import absreal, s3dec, Pebkac, min_ex, gen_filekey, quotep
|
||||
from .__init__ import ANYWIN, TYPE_CHECKING, unicode
|
||||
from .bos import bos
|
||||
from .up2k import up2k_wark_from_hashlist
|
||||
from .util import HAVE_SQLITE3, Pebkac, absreal, gen_filekey, min_ex, quotep, s3dec
|
||||
|
||||
if HAVE_SQLITE3:
|
||||
import sqlite3
|
||||
|
||||
try:
|
||||
HAVE_SQLITE3 = True
|
||||
import sqlite3
|
||||
from pathlib import Path
|
||||
except:
|
||||
HAVE_SQLITE3 = False
|
||||
pass
|
||||
|
||||
try:
|
||||
from typing import Any, Optional, Union
|
||||
except:
|
||||
pass
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .httpconn import HttpConn
|
||||
|
||||
|
||||
class U2idx(object):
|
||||
def __init__(self, conn):
|
||||
def __init__(self, conn: "HttpConn") -> None:
|
||||
self.log_func = conn.log_func
|
||||
self.asrv = conn.asrv
|
||||
self.args = conn.args
|
||||
@@ -32,17 +41,21 @@ class U2idx(object):
|
||||
self.log("your python does not have sqlite3; searching will be disabled")
|
||||
return
|
||||
|
||||
self.cur = {}
|
||||
self.mem_cur = sqlite3.connect(":memory:")
|
||||
self.active_id = ""
|
||||
self.active_cur: Optional["sqlite3.Cursor"] = None
|
||||
self.cur: dict[str, "sqlite3.Cursor"] = {}
|
||||
self.mem_cur = sqlite3.connect(":memory:").cursor()
|
||||
self.mem_cur.execute(r"create table a (b text)")
|
||||
|
||||
self.p_end = None
|
||||
self.p_dur = 0
|
||||
self.p_end = 0.0
|
||||
self.p_dur = 0.0
|
||||
|
||||
def log(self, msg, c=0):
|
||||
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||
self.log_func("u2idx", msg, c)
|
||||
|
||||
def fsearch(self, vols, body):
|
||||
def fsearch(
|
||||
self, vols: list[tuple[str, str, dict[str, Any]]], body: dict[str, Any]
|
||||
) -> list[dict[str, Any]]:
|
||||
"""search by up2k hashlist"""
|
||||
if not HAVE_SQLITE3:
|
||||
return []
|
||||
@@ -51,15 +64,15 @@ class U2idx(object):
|
||||
fhash = body["hash"]
|
||||
wark = up2k_wark_from_hashlist(self.args.salt, fsize, fhash)
|
||||
|
||||
uq = "where substr(w,1,16) = ? and w = ?"
|
||||
uv = [wark[:16], wark]
|
||||
uq = "substr(w,1,16) = ? and w = ?"
|
||||
uv: list[Union[str, int]] = [wark[:16], wark]
|
||||
|
||||
try:
|
||||
return self.run_query(vols, uq, uv)[0]
|
||||
return self.run_query(vols, uq, uv, True, False, 99999)[0]
|
||||
except:
|
||||
raise Pebkac(500, min_ex())
|
||||
|
||||
def get_cur(self, ptop):
|
||||
def get_cur(self, ptop: str) -> Optional["sqlite3.Cursor"]:
|
||||
if not HAVE_SQLITE3:
|
||||
return None
|
||||
|
||||
@@ -76,28 +89,45 @@ class U2idx(object):
|
||||
if not bos.path.exists(db_path):
|
||||
return None
|
||||
|
||||
cur = sqlite3.connect(db_path, 2).cursor()
|
||||
cur = None
|
||||
if ANYWIN:
|
||||
uri = ""
|
||||
try:
|
||||
uri = "{}?mode=ro&nolock=1".format(Path(db_path).as_uri())
|
||||
cur = sqlite3.connect(uri, 2, uri=True).cursor()
|
||||
self.log("ro: {}".format(db_path))
|
||||
except:
|
||||
self.log("could not open read-only: {}\n{}".format(uri, min_ex()))
|
||||
|
||||
if not cur:
|
||||
# on windows, this steals the write-lock from up2k.deferred_init --
|
||||
# seen on win 10.0.17763.2686, py 3.10.4, sqlite 3.37.2
|
||||
cur = sqlite3.connect(db_path, 2).cursor()
|
||||
self.log("opened {}".format(db_path))
|
||||
|
||||
self.cur[ptop] = cur
|
||||
return cur
|
||||
|
||||
def search(self, vols, uq):
|
||||
def search(
|
||||
self, vols: list[tuple[str, str, dict[str, Any]]], uq: str, lim: int
|
||||
) -> tuple[list[dict[str, Any]], list[str]]:
|
||||
"""search by query params"""
|
||||
if not HAVE_SQLITE3:
|
||||
return []
|
||||
return [], []
|
||||
|
||||
q = ""
|
||||
va = []
|
||||
joins = ""
|
||||
v: Union[str, int] = ""
|
||||
va: list[Union[str, int]] = []
|
||||
have_up = False # query has up.* operands
|
||||
have_mt = False
|
||||
is_key = True
|
||||
is_size = False
|
||||
is_date = False
|
||||
field_end = "" # closing parenthesis or whatever
|
||||
kw_key = ["(", ")", "and ", "or ", "not "]
|
||||
kw_val = ["==", "=", "!=", ">", ">=", "<", "<=", "like "]
|
||||
ptn_mt = re.compile(r"^\.?[a-z_-]+$")
|
||||
mt_ctr = 0
|
||||
mt_keycmp = "substr(up.w,1,16)"
|
||||
mt_keycmp2 = None
|
||||
ptn_lc = re.compile(r" (mt[0-9]+\.v) ([=<!>]+) \? $")
|
||||
ptn_lc = re.compile(r" (mt\.v) ([=<!>]+) \? \) $")
|
||||
ptn_lcv = re.compile(r"[a-zA-Z]")
|
||||
|
||||
while True:
|
||||
@@ -117,35 +147,47 @@ class U2idx(object):
|
||||
if ok:
|
||||
continue
|
||||
|
||||
v, uq = (uq + " ").split(" ", 1)
|
||||
if uq.startswith('"'):
|
||||
v, uq = uq[1:].split('"', 1)
|
||||
while v.endswith("\\"):
|
||||
v2, uq = uq.split('"', 1)
|
||||
v = v[:-1] + '"' + v2
|
||||
uq = uq.strip()
|
||||
else:
|
||||
v, uq = (uq + " ").split(" ", 1)
|
||||
v = v.replace('\\"', '"')
|
||||
|
||||
if is_key:
|
||||
is_key = False
|
||||
|
||||
if v == "size":
|
||||
v = "up.sz"
|
||||
is_size = True
|
||||
have_up = True
|
||||
|
||||
elif v == "date":
|
||||
v = "up.mt"
|
||||
is_date = True
|
||||
have_up = True
|
||||
|
||||
elif v == "path":
|
||||
v = "up.rd"
|
||||
v = "trim(?||up.rd,'/')"
|
||||
va.append("\nrd")
|
||||
have_up = True
|
||||
|
||||
elif v == "name":
|
||||
v = "up.fn"
|
||||
have_up = True
|
||||
|
||||
elif v == "tags" or ptn_mt.match(v):
|
||||
mt_ctr += 1
|
||||
mt_keycmp2 = "mt{}.w".format(mt_ctr)
|
||||
joins += "inner join mt mt{} on {} = {} ".format(
|
||||
mt_ctr, mt_keycmp, mt_keycmp2
|
||||
)
|
||||
mt_keycmp = mt_keycmp2
|
||||
have_mt = True
|
||||
field_end = ") "
|
||||
if v == "tags":
|
||||
v = "mt{0}.v".format(mt_ctr)
|
||||
vq = "mt.v"
|
||||
else:
|
||||
v = "+mt{0}.k = '{1}' and mt{0}.v".format(mt_ctr, v)
|
||||
vq = "+mt.k = '{}' and mt.v".format(v)
|
||||
|
||||
v = "exists(select 1 from mt where mt.w = mtw and " + vq
|
||||
|
||||
else:
|
||||
raise Pebkac(400, "invalid key [" + v + "]")
|
||||
@@ -158,18 +200,17 @@ class U2idx(object):
|
||||
|
||||
if is_date:
|
||||
is_date = False
|
||||
v = v.upper().rstrip("Z").replace(",", " ").replace("T", " ")
|
||||
while " " in v:
|
||||
v = v.replace(" ", " ")
|
||||
|
||||
v = re.sub(r"[tzTZ, ]+", " ", v).strip()
|
||||
for fmt in [
|
||||
"%Y-%m-%d %H:%M:%S",
|
||||
"%Y-%m-%d %H:%M",
|
||||
"%Y-%m-%d %H",
|
||||
"%Y-%m-%d",
|
||||
"%Y-%m",
|
||||
"%Y",
|
||||
]:
|
||||
try:
|
||||
v = datetime.strptime(v, fmt).timestamp()
|
||||
v = calendar.timegm(time.strptime(str(v), fmt))
|
||||
break
|
||||
except:
|
||||
pass
|
||||
@@ -191,28 +232,41 @@ class U2idx(object):
|
||||
va.append(v)
|
||||
is_key = True
|
||||
|
||||
if field_end:
|
||||
q += field_end
|
||||
field_end = ""
|
||||
|
||||
# lowercase tag searches
|
||||
m = ptn_lc.search(q)
|
||||
if not m or not ptn_lcv.search(unicode(v)):
|
||||
zs = unicode(v)
|
||||
if not m or not ptn_lcv.search(zs):
|
||||
continue
|
||||
|
||||
va.pop()
|
||||
va.append(v.lower())
|
||||
va.append(zs.lower())
|
||||
q = q[: m.start()]
|
||||
|
||||
field, oper = m.groups()
|
||||
if oper in ["=", "=="]:
|
||||
q += " {} like ? ".format(field)
|
||||
q += " {} like ? ) ".format(field)
|
||||
else:
|
||||
q += " lower({}) {} ? ".format(field, oper)
|
||||
q += " lower({}) {} ? ) ".format(field, oper)
|
||||
|
||||
try:
|
||||
return self.run_query(vols, joins + "where " + q, va)
|
||||
return self.run_query(vols, q, va, have_up, have_mt, lim)
|
||||
except Exception as ex:
|
||||
raise Pebkac(500, repr(ex))
|
||||
|
||||
def run_query(self, vols, uq, uv):
|
||||
done_flag = []
|
||||
def run_query(
|
||||
self,
|
||||
vols: list[tuple[str, str, dict[str, Any]]],
|
||||
uq: str,
|
||||
uv: list[Union[str, int]],
|
||||
have_up: bool,
|
||||
have_mt: bool,
|
||||
lim: int,
|
||||
) -> tuple[list[dict[str, Any]], list[str]]:
|
||||
done_flag: list[bool] = []
|
||||
self.active_id = "{:.6f}_{}".format(
|
||||
time.time(), threading.current_thread().ident
|
||||
)
|
||||
@@ -228,16 +282,17 @@ class U2idx(object):
|
||||
thr.start()
|
||||
|
||||
if not uq or not uv:
|
||||
q = "select * from up"
|
||||
v = ()
|
||||
uq = "select * from up"
|
||||
uv = []
|
||||
elif have_mt:
|
||||
uq = "select up.*, substr(up.w,1,16) mtw from up where " + uq
|
||||
else:
|
||||
q = "select up.* from up " + uq
|
||||
v = tuple(uv)
|
||||
uq = "select up.* from up where " + uq
|
||||
|
||||
self.log("qs: {!r} {!r}".format(q, v))
|
||||
self.log("qs: {!r} {!r}".format(uq, uv))
|
||||
|
||||
ret = []
|
||||
lim = 1000
|
||||
lim = min(lim, int(self.args.srch_hits))
|
||||
taglist = {}
|
||||
for (vtop, ptop, flags) in vols:
|
||||
cur = self.get_cur(ptop)
|
||||
@@ -246,13 +301,20 @@ class U2idx(object):
|
||||
|
||||
self.active_cur = cur
|
||||
|
||||
vuv = []
|
||||
for v in uv:
|
||||
if v == "\nrd":
|
||||
v = vtop + "/"
|
||||
|
||||
vuv.append(v)
|
||||
|
||||
sret = []
|
||||
fk = flags.get("fk")
|
||||
c = cur.execute(q, v)
|
||||
c = cur.execute(uq, tuple(vuv))
|
||||
for hit in c:
|
||||
w, ts, sz, rd, fn, ip, at = hit
|
||||
w, ts, sz, rd, fn, ip, at = hit[:7]
|
||||
lim -= 1
|
||||
if lim <= 0:
|
||||
if lim < 0:
|
||||
break
|
||||
|
||||
if rd.startswith("//") or fn.startswith("//"):
|
||||
@@ -281,7 +343,7 @@ class U2idx(object):
|
||||
w = hit["w"]
|
||||
del hit["w"]
|
||||
tags = {}
|
||||
q2 = "select k, v from mt where w = ? and k != 'x'"
|
||||
q2 = "select k, v from mt where w = ? and +k != 'x'"
|
||||
for k, v2 in cur.execute(q2, (w,)):
|
||||
taglist[k] = True
|
||||
tags[k] = v2
|
||||
@@ -292,7 +354,7 @@ class U2idx(object):
|
||||
# print("[{}] {}".format(ptop, sret))
|
||||
|
||||
done_flag.append(True)
|
||||
self.active_id = None
|
||||
self.active_id = ""
|
||||
|
||||
# undupe hits from multiple metadata keys
|
||||
if len(ret) > 1:
|
||||
@@ -306,11 +368,12 @@ class U2idx(object):
|
||||
|
||||
return ret, list(taglist.keys())
|
||||
|
||||
def terminator(self, identifier, done_flag):
|
||||
def terminator(self, identifier: str, done_flag: list[bool]) -> None:
|
||||
for _ in range(self.timeout):
|
||||
time.sleep(1)
|
||||
if done_flag:
|
||||
return
|
||||
|
||||
if identifier == self.active_id:
|
||||
assert self.active_cur
|
||||
self.active_cur.connection.interrupt()
|
||||
|
||||
1953
copyparty/up2k.py
1953
copyparty/up2k.py
File diff suppressed because it is too large
Load Diff
1239
copyparty/util.py
1239
copyparty/util.py
File diff suppressed because it is too large
Load Diff
@@ -17,12 +17,11 @@ window.baguetteBox = (function () {
|
||||
titleTag: false,
|
||||
async: false,
|
||||
preload: 2,
|
||||
animation: 'slideIn',
|
||||
afterShow: null,
|
||||
afterHide: null,
|
||||
onChange: null,
|
||||
},
|
||||
overlay, slider, btnPrev, btnNext, btnHelp, btnRotL, btnRotR, btnSel, btnVmode, btnClose,
|
||||
overlay, slider, btnPrev, btnNext, btnHelp, btnAnim, btnRotL, btnRotR, btnSel, btnFull, btnVmode, btnClose,
|
||||
currentGallery = [],
|
||||
currentIndex = 0,
|
||||
isOverlayVisible = false,
|
||||
@@ -30,13 +29,17 @@ window.baguetteBox = (function () {
|
||||
touchFlag = false, // busy
|
||||
re_i = /.+\.(gif|jpe?g|png|webp)(\?|$)/i,
|
||||
re_v = /.+\.(webm|mp4)(\?|$)/i,
|
||||
anims = ['slideIn', 'fadeIn', 'none'],
|
||||
data = {}, // all galleries
|
||||
imagesElements = [],
|
||||
documentLastFocus = null,
|
||||
isFullscreen = false,
|
||||
vmute = false,
|
||||
vloop = false,
|
||||
vnext = false,
|
||||
vloop = sread('vmode') == 'L',
|
||||
vnext = sread('vmode') == 'C',
|
||||
loopA = null,
|
||||
loopB = null,
|
||||
url_ts = null,
|
||||
resume_mp = false;
|
||||
|
||||
var onFSC = function (e) {
|
||||
@@ -178,9 +181,11 @@ window.baguetteBox = (function () {
|
||||
'<button id="bbox-next" class="bbox-btn" type="button" aria-label="Next">></button>' +
|
||||
'<div id="bbox-btns">' +
|
||||
'<button id="bbox-help" type="button">?</button>' +
|
||||
'<button id="bbox-anim" type="button" tt="a">-</button>' +
|
||||
'<button id="bbox-rotl" type="button">↶</button>' +
|
||||
'<button id="bbox-rotr" type="button">↷</button>' +
|
||||
'<button id="bbox-tsel" type="button">sel</button>' +
|
||||
'<button id="bbox-full" type="button">⛶</button>' +
|
||||
'<button id="bbox-vmode" type="button" tt="a"></button>' +
|
||||
'<button id="bbox-close" type="button" aria-label="Close">X</button>' +
|
||||
'</div></div>'
|
||||
@@ -193,12 +198,13 @@ window.baguetteBox = (function () {
|
||||
btnPrev = ebi('bbox-prev');
|
||||
btnNext = ebi('bbox-next');
|
||||
btnHelp = ebi('bbox-help');
|
||||
btnAnim = ebi('bbox-anim');
|
||||
btnRotL = ebi('bbox-rotl');
|
||||
btnRotR = ebi('bbox-rotr');
|
||||
btnSel = ebi('bbox-tsel');
|
||||
btnFull = ebi('bbox-full');
|
||||
btnVmode = ebi('bbox-vmode');
|
||||
btnClose = ebi('bbox-close');
|
||||
bindEvents();
|
||||
}
|
||||
|
||||
function halp() {
|
||||
@@ -213,23 +219,26 @@ window.baguetteBox = (function () {
|
||||
['home', 'first file'],
|
||||
['end', 'last file'],
|
||||
['R', 'rotate (shift=ccw)'],
|
||||
['F', 'toggle fullscreen'],
|
||||
['S', 'toggle file selection'],
|
||||
['space, P, K', 'video: play / pause'],
|
||||
['U', 'video: seek 10sec back'],
|
||||
['P', 'video: seek 10sec ahead'],
|
||||
['0..9', 'video: seek 0%..90%'],
|
||||
['M', 'video: toggle mute'],
|
||||
['V', 'video: toggle loop'],
|
||||
['C', 'video: toggle auto-next'],
|
||||
['F', 'video: toggle fullscreen'],
|
||||
['<code>[</code>, <code>]</code>', 'video: loop start / end'],
|
||||
],
|
||||
d = mknod('table'),
|
||||
d = mknod('table', 'bbox-halp'),
|
||||
html = ['<tbody>'];
|
||||
|
||||
for (var a = 0; a < list.length; a++)
|
||||
html.push('<tr><td>' + list[a][0] + '</td><td>' + list[a][1] + '</td></tr>');
|
||||
|
||||
html.push('<tr><td colspan="2">tap middle of img to hide btns</td></tr>');
|
||||
html.push('<tr><td colspan="2">tap left/right sides for prev/next</td></tr>');
|
||||
d.innerHTML = html.join('\n') + '</tbody>';
|
||||
d.setAttribute('id', 'bbox-halp');
|
||||
d.onclick = function () {
|
||||
overlay.removeChild(d);
|
||||
};
|
||||
@@ -237,10 +246,10 @@ window.baguetteBox = (function () {
|
||||
}
|
||||
|
||||
function keyDownHandler(e) {
|
||||
if (e.ctrlKey || e.altKey || e.metaKey || e.isComposing)
|
||||
if (e.ctrlKey || e.altKey || e.metaKey || e.isComposing || modal.busy)
|
||||
return;
|
||||
|
||||
var k = e.code + '', v = vid();
|
||||
var k = e.code + '', v = vid(), pos = -1;
|
||||
|
||||
if (k == "ArrowLeft" || k == "KeyJ")
|
||||
showPreviousImage();
|
||||
@@ -256,6 +265,8 @@ window.baguetteBox = (function () {
|
||||
playpause();
|
||||
else if (k == "KeyU" || k == "KeyO")
|
||||
relseek(k == "KeyU" ? -10 : 10);
|
||||
else if (k.indexOf('Digit') === 0)
|
||||
vid().currentTime = vid().duration * parseInt(k.slice(-1)) * 0.1;
|
||||
else if (k == "KeyM" && v) {
|
||||
v.muted = vmute = !vmute;
|
||||
mp_ctl();
|
||||
@@ -271,17 +282,27 @@ window.baguetteBox = (function () {
|
||||
setVmode();
|
||||
}
|
||||
else if (k == "KeyF")
|
||||
try {
|
||||
if (isFullscreen)
|
||||
document.exitFullscreen();
|
||||
else
|
||||
v.requestFullscreen();
|
||||
}
|
||||
catch (ex) { }
|
||||
tglfull();
|
||||
else if (k == "KeyS")
|
||||
tglsel();
|
||||
else if (k == "KeyR")
|
||||
rotn(e.shiftKey ? -1 : 1);
|
||||
else if (k == "KeyY")
|
||||
dlpic();
|
||||
else if (k == "BracketLeft")
|
||||
setloop(1);
|
||||
else if (k == "BracketRight")
|
||||
setloop(2);
|
||||
}
|
||||
|
||||
function anim() {
|
||||
var i = (anims.indexOf(options.animation) + 1) % anims.length,
|
||||
o = options;
|
||||
swrite('ganim', anims[i]);
|
||||
options = {};
|
||||
setOptions(o);
|
||||
if (tt.en)
|
||||
tt.show.bind(this)();
|
||||
}
|
||||
|
||||
function setVmode() {
|
||||
@@ -308,6 +329,7 @@ window.baguetteBox = (function () {
|
||||
btnVmode.setAttribute('aria-label', msg);
|
||||
btnVmode.setAttribute('tt', msg + tts);
|
||||
btnVmode.textContent = lbl;
|
||||
swrite('vmode', lbl[0]);
|
||||
|
||||
v.loop = vloop
|
||||
if (vloop && v.paused)
|
||||
@@ -329,19 +351,39 @@ window.baguetteBox = (function () {
|
||||
tt.show.bind(this)();
|
||||
}
|
||||
|
||||
function tglsel() {
|
||||
function findfile() {
|
||||
var thumb = currentGallery[currentIndex].imageElement,
|
||||
name = vsplit(thumb.href)[1].split('?')[0],
|
||||
files = msel.getall();
|
||||
|
||||
for (var a = 0; a < files.length; a++)
|
||||
if (vsplit(files[a].vp)[1] == name)
|
||||
clmod(ebi(files[a].id).closest('tr'), 'sel', 't');
|
||||
return [name, a, files, ebi(files[a].id)];
|
||||
}
|
||||
|
||||
function tglfull() {
|
||||
try {
|
||||
if (isFullscreen)
|
||||
document.exitFullscreen();
|
||||
else
|
||||
(vid() || ebi('bbox-overlay')).requestFullscreen();
|
||||
}
|
||||
catch (ex) { alert(ex); }
|
||||
}
|
||||
|
||||
function tglsel() {
|
||||
var o = findfile()[3];
|
||||
clmod(o.closest('tr'), 'sel', 't');
|
||||
msel.selui();
|
||||
selbg();
|
||||
}
|
||||
|
||||
function dlpic() {
|
||||
var url = findfile()[3].href;
|
||||
url += (url.indexOf('?') < 0 ? '?' : '&') + 'cache';
|
||||
dl_file(url);
|
||||
}
|
||||
|
||||
function selbg() {
|
||||
var img = vidimg(),
|
||||
thumb = currentGallery[currentIndex].imageElement,
|
||||
@@ -391,15 +433,20 @@ window.baguetteBox = (function () {
|
||||
var nonPassiveEvent = passiveSupp ? { passive: true } : null;
|
||||
|
||||
function bindEvents() {
|
||||
bind(document, 'keydown', keyDownHandler);
|
||||
bind(document, 'keyup', keyUpHandler);
|
||||
bind(document, 'fullscreenchange', onFSC);
|
||||
bind(overlay, 'click', overlayClickHandler);
|
||||
bind(btnPrev, 'click', showPreviousImage);
|
||||
bind(btnNext, 'click', showNextImage);
|
||||
bind(btnClose, 'click', hideOverlay);
|
||||
bind(btnVmode, 'click', tglVmode);
|
||||
bind(btnHelp, 'click', halp);
|
||||
bind(btnAnim, 'click', anim);
|
||||
bind(btnRotL, 'click', rotl);
|
||||
bind(btnRotR, 'click', rotr);
|
||||
bind(btnSel, 'click', tglsel);
|
||||
bind(btnFull, 'click', tglfull);
|
||||
bind(slider, 'contextmenu', contextmenuHandler);
|
||||
bind(overlay, 'touchstart', touchstartHandler, nonPassiveEvent);
|
||||
bind(overlay, 'touchmove', touchmoveHandler, passiveEvent);
|
||||
@@ -408,15 +455,20 @@ window.baguetteBox = (function () {
|
||||
}
|
||||
|
||||
function unbindEvents() {
|
||||
unbind(document, 'keydown', keyDownHandler);
|
||||
unbind(document, 'keyup', keyUpHandler);
|
||||
unbind(document, 'fullscreenchange', onFSC);
|
||||
unbind(overlay, 'click', overlayClickHandler);
|
||||
unbind(btnPrev, 'click', showPreviousImage);
|
||||
unbind(btnNext, 'click', showNextImage);
|
||||
unbind(btnClose, 'click', hideOverlay);
|
||||
unbind(btnVmode, 'click', tglVmode);
|
||||
unbind(btnHelp, 'click', halp);
|
||||
unbind(btnAnim, 'click', anim);
|
||||
unbind(btnRotL, 'click', rotl);
|
||||
unbind(btnRotR, 'click', rotr);
|
||||
unbind(btnSel, 'click', tglsel);
|
||||
unbind(btnFull, 'click', tglfull);
|
||||
unbind(slider, 'contextmenu', contextmenuHandler);
|
||||
unbind(overlay, 'touchstart', touchstartHandler, nonPassiveEvent);
|
||||
unbind(overlay, 'touchmove', touchmoveHandler, passiveEvent);
|
||||
@@ -437,9 +489,8 @@ window.baguetteBox = (function () {
|
||||
var imagesFiguresIds = [];
|
||||
var imagesCaptionsIds = [];
|
||||
for (var i = 0, fullImage; i < gallery.length; i++) {
|
||||
fullImage = mknod('div');
|
||||
fullImage = mknod('div', 'baguette-img-' + i);
|
||||
fullImage.className = 'full-image';
|
||||
fullImage.id = 'baguette-img-' + i;
|
||||
imagesElements.push(fullImage);
|
||||
|
||||
imagesFiguresIds.push('bbox-figure-' + i);
|
||||
@@ -459,7 +510,12 @@ window.baguetteBox = (function () {
|
||||
if (typeof newOptions[item] !== 'undefined')
|
||||
options[item] = newOptions[item];
|
||||
}
|
||||
slider.style.transition = (options.animation === 'fadeIn' ? 'opacity .4s ease' :
|
||||
|
||||
var an = options.animation = sread('ganim') || anims[ANIM ? 0 : 2];
|
||||
btnAnim.textContent = ['⇄', '⮺', '⚡'][anims.indexOf(an)];
|
||||
btnAnim.setAttribute('tt', 'animation: ' + an);
|
||||
|
||||
slider.style.transition = (options.animation === 'fadeIn' ? 'opacity .3s ease' :
|
||||
options.animation === 'slideIn' ? '' : 'none');
|
||||
|
||||
if (options.buttons === 'auto' && ('ontouchstart' in window || currentGallery.length === 1))
|
||||
@@ -476,9 +532,7 @@ window.baguetteBox = (function () {
|
||||
if (overlay.style.display === 'block')
|
||||
return;
|
||||
|
||||
bind(document, 'keydown', keyDownHandler);
|
||||
bind(document, 'keyup', keyUpHandler);
|
||||
bind(document, 'fullscreenchange', onFSC);
|
||||
bindEvents();
|
||||
currentIndex = chosenImageIndex;
|
||||
touch = {
|
||||
count: 0,
|
||||
@@ -490,6 +544,10 @@ window.baguetteBox = (function () {
|
||||
preloadPrev(currentIndex);
|
||||
});
|
||||
|
||||
clmod(ebi('bbox-btns'), 'off');
|
||||
clmod(btnPrev, 'off');
|
||||
clmod(btnNext, 'off');
|
||||
|
||||
updateOffset();
|
||||
overlay.style.display = 'block';
|
||||
// Fade in overlay
|
||||
@@ -502,9 +560,10 @@ window.baguetteBox = (function () {
|
||||
options.afterShow();
|
||||
}, 50);
|
||||
|
||||
if (options.onChange)
|
||||
if (options.onChange && !url_ts)
|
||||
options.onChange(currentIndex, imagesElements.length);
|
||||
|
||||
url_ts = null;
|
||||
documentLastFocus = document.activeElement;
|
||||
btnClose.focus();
|
||||
isOverlayVisible = true;
|
||||
@@ -520,9 +579,14 @@ window.baguetteBox = (function () {
|
||||
if (overlay.style.display === 'none')
|
||||
return;
|
||||
|
||||
unbind(document, 'keydown', keyDownHandler);
|
||||
unbind(document, 'keyup', keyUpHandler);
|
||||
unbind(document, 'fullscreenchange', onFSC);
|
||||
sethash('');
|
||||
unbindEvents();
|
||||
try {
|
||||
document.exitFullscreen();
|
||||
isFullscreen = false;
|
||||
}
|
||||
catch (ex) { }
|
||||
|
||||
// Fade out and hide the overlay
|
||||
overlay.className = '';
|
||||
setTimeout(function () {
|
||||
@@ -568,16 +632,14 @@ window.baguetteBox = (function () {
|
||||
if (is_vid && index != currentIndex)
|
||||
return; // no preload
|
||||
|
||||
var figure = mknod('figure');
|
||||
figure.id = 'bbox-figure-' + index;
|
||||
var figure = mknod('figure', 'bbox-figure-' + index);
|
||||
figure.innerHTML = '<div class="bbox-spinner">' +
|
||||
'<div class="bbox-double-bounce1"></div>' +
|
||||
'<div class="bbox-double-bounce2"></div>' +
|
||||
'</div>';
|
||||
|
||||
if (options.captions && imageCaption) {
|
||||
var figcaption = mknod('figcaption');
|
||||
figcaption.id = 'bbox-figcaption-' + index;
|
||||
var figcaption = mknod('figcaption', 'bbox-figcaption-' + index);
|
||||
figcaption.innerHTML = imageCaption;
|
||||
figure.appendChild(figcaption);
|
||||
}
|
||||
@@ -637,18 +699,12 @@ window.baguetteBox = (function () {
|
||||
showOverlay(index);
|
||||
return true;
|
||||
}
|
||||
if (index < 0) {
|
||||
if (options.animation)
|
||||
bounceAnimation('left');
|
||||
|
||||
return false;
|
||||
}
|
||||
if (index >= imagesElements.length) {
|
||||
if (options.animation)
|
||||
bounceAnimation('right');
|
||||
if (index < 0)
|
||||
return bounceAnimation('left');
|
||||
|
||||
return false;
|
||||
}
|
||||
if (index >= imagesElements.length)
|
||||
return bounceAnimation('right');
|
||||
|
||||
var v = vid();
|
||||
if (v) {
|
||||
@@ -756,8 +812,18 @@ window.baguetteBox = (function () {
|
||||
}
|
||||
|
||||
function playvid(play) {
|
||||
if (vid())
|
||||
vid()[play ? 'play' : 'pause']();
|
||||
if (!play) {
|
||||
timer.rm(loopchk);
|
||||
loopA = loopB = null;
|
||||
}
|
||||
|
||||
var v = vid();
|
||||
if (!v)
|
||||
return;
|
||||
|
||||
v[play ? 'play' : 'pause']();
|
||||
if (play && loopA !== null && v.currentTime < loopA)
|
||||
v.currentTime = loopA;
|
||||
}
|
||||
|
||||
function playpause() {
|
||||
@@ -776,6 +842,38 @@ window.baguetteBox = (function () {
|
||||
showNextImage();
|
||||
}
|
||||
|
||||
function setloop(side) {
|
||||
var v = vid();
|
||||
if (!v)
|
||||
return;
|
||||
|
||||
var t = v.currentTime;
|
||||
if (side == 1) loopA = t;
|
||||
if (side == 2) loopB = t;
|
||||
if (side)
|
||||
toast.inf(5, 'Loop' + (side == 1 ? 'A' : 'B') + ': ' + f2f(t, 2));
|
||||
|
||||
if (loopB !== null) {
|
||||
timer.add(loopchk);
|
||||
sethash(window.location.hash.slice(1).split('&')[0] + '&t=' + (loopA || 0) + '-' + loopB);
|
||||
}
|
||||
}
|
||||
|
||||
function loopchk() {
|
||||
if (loopB === null)
|
||||
return;
|
||||
|
||||
var v = vid();
|
||||
if (!v || v.paused || v.currentTime < loopB)
|
||||
return;
|
||||
|
||||
v.currentTime = loopA || 0;
|
||||
}
|
||||
|
||||
function urltime(txt) {
|
||||
url_ts = txt;
|
||||
}
|
||||
|
||||
function mp_ctl() {
|
||||
var v = vid();
|
||||
if (!vmute && v && mp.au && !mp.au.paused) {
|
||||
@@ -789,10 +887,11 @@ window.baguetteBox = (function () {
|
||||
}
|
||||
|
||||
function bounceAnimation(direction) {
|
||||
slider.className = 'bounce-from-' + direction;
|
||||
slider.className = options.animation == 'slideIn' ? 'bounce-from-' + direction : 'eog';
|
||||
setTimeout(function () {
|
||||
slider.className = '';
|
||||
}, 400);
|
||||
}, 300);
|
||||
return false;
|
||||
}
|
||||
|
||||
function updateOffset() {
|
||||
@@ -806,7 +905,7 @@ window.baguetteBox = (function () {
|
||||
slider.style.transform = 'translate3d(' + offset + ',0,0)' :
|
||||
slider.style.left = offset;
|
||||
slider.style.opacity = 1;
|
||||
}, 400);
|
||||
}, 100);
|
||||
} else {
|
||||
xform ?
|
||||
slider.style.transform = 'translate3d(' + offset + ',0,0)' :
|
||||
@@ -818,6 +917,15 @@ window.baguetteBox = (function () {
|
||||
playvid(true);
|
||||
v.muted = vmute;
|
||||
v.loop = vloop;
|
||||
if (url_ts) {
|
||||
var seek = ('' + url_ts).split('-');
|
||||
v.currentTime = seek[0];
|
||||
if (seek.length > 1) {
|
||||
loopA = parseFloat(seek[0]);
|
||||
loopB = parseFloat(seek[1]);
|
||||
setloop();
|
||||
}
|
||||
}
|
||||
}
|
||||
selbg();
|
||||
mp_ctl();
|
||||
@@ -829,6 +937,28 @@ window.baguetteBox = (function () {
|
||||
else
|
||||
timer.rm(rotn);
|
||||
|
||||
var ctime = 0;
|
||||
el.onclick = v ? null : function (e) {
|
||||
var rc = e.target.getBoundingClientRect(),
|
||||
x = e.clientX - rc.left,
|
||||
fx = x / (rc.right - rc.left);
|
||||
|
||||
if (fx < 0.3)
|
||||
return showPreviousImage();
|
||||
|
||||
if (fx > 0.7)
|
||||
return showNextImage();
|
||||
|
||||
clmod(ebi('bbox-btns'), 'off', 't');
|
||||
clmod(btnPrev, 'off', 't');
|
||||
clmod(btnNext, 'off', 't');
|
||||
|
||||
if (Date.now() - ctime <= 500)
|
||||
tglfull();
|
||||
|
||||
ctime = Date.now();
|
||||
};
|
||||
|
||||
var prev = QS('.full-image.vis');
|
||||
if (prev)
|
||||
clmod(prev, 'vis');
|
||||
@@ -865,8 +995,6 @@ window.baguetteBox = (function () {
|
||||
function destroyPlugin() {
|
||||
unbindEvents();
|
||||
clearCachedData();
|
||||
unbind(document, 'keydown', keyDownHandler);
|
||||
unbind(document, 'keyup', keyUpHandler);
|
||||
document.getElementsByTagName('body')[0].removeChild(ebi('bbox-overlay'));
|
||||
data = {};
|
||||
currentGallery = [];
|
||||
@@ -879,6 +1007,7 @@ window.baguetteBox = (function () {
|
||||
showNext: showNextImage,
|
||||
showPrevious: showPreviousImage,
|
||||
relseek: relseek,
|
||||
urltime: urltime,
|
||||
playpause: playpause,
|
||||
hide: hideOverlay,
|
||||
destroy: destroyPlugin
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -3,9 +3,10 @@
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>⇆🎉 {{ title }}</title>
|
||||
<title>{{ title }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
{{ html_head }}
|
||||
<link rel="stylesheet" media="screen" href="/.cpr/ui.css?_={{ ts }}">
|
||||
<link rel="stylesheet" media="screen" href="/.cpr/browser.css?_={{ ts }}">
|
||||
{%- if css %}
|
||||
@@ -34,6 +35,7 @@
|
||||
<input type="file" name="f" multiple /><br />
|
||||
<input type="submit" value="start upload">
|
||||
</form>
|
||||
<a id="bbsw" href="?b=u"><br />switch to basic browser</a>
|
||||
</div>
|
||||
|
||||
<div id="op_mkdir" class="opview opbox act">
|
||||
@@ -66,7 +68,7 @@
|
||||
<div id="op_cfg" class="opview opbox opwide"></div>
|
||||
|
||||
<h1 id="path">
|
||||
<a href="#" id="entree" tt="show navpane (directory tree sidebar)$NHotkey: B">🌲</a>
|
||||
<a href="#" id="entree">🌲</a>
|
||||
{%- for n in vpnodes %}
|
||||
<a href="/{{ n[0] }}">{{ n[1] }}</a>
|
||||
{%- endfor %}
|
||||
@@ -118,7 +120,7 @@
|
||||
|
||||
<div id="epi" class="logue">{{ logues[1] }}</div>
|
||||
|
||||
<h2><a href="/?h">control-panel</a></h2>
|
||||
<h2><a href="/?h" id="goh">control-panel</a></h2>
|
||||
|
||||
<a href="#" id="repl">π</a>
|
||||
|
||||
@@ -133,6 +135,11 @@
|
||||
<script>
|
||||
var acct = "{{ acct }}",
|
||||
perms = {{ perms }},
|
||||
themes = {{ themes }},
|
||||
dtheme = "{{ dtheme }}",
|
||||
srvinf = "{{ srv_info }}",
|
||||
lang = "{{ lang }}",
|
||||
dfavico = "{{ favico }}",
|
||||
def_hcols = {{ def_hcols|tojson }},
|
||||
have_up2k_idx = {{ have_up2k_idx|tojson }},
|
||||
have_tags_idx = {{ have_tags_idx|tojson }},
|
||||
@@ -141,13 +148,18 @@
|
||||
have_del = {{ have_del|tojson }},
|
||||
have_unpost = {{ have_unpost|tojson }},
|
||||
have_zip = {{ have_zip|tojson }},
|
||||
turbolvl = {{ turbolvl }},
|
||||
u2sort = "{{ u2sort }}",
|
||||
have_emp = {{ have_emp|tojson }},
|
||||
txt_ext = "{{ txt_ext }}",
|
||||
{% if no_prism %}no_prism = 1,{% endif %}
|
||||
readme = {{ readme|tojson }};
|
||||
readme = {{ readme|tojson }},
|
||||
ls0 = {{ ls0|tojson }};
|
||||
|
||||
document.documentElement.setAttribute("class", localStorage.lightmode == 1 ? "light" : "dark");
|
||||
document.documentElement.className = localStorage.theme || dtheme;
|
||||
</script>
|
||||
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
||||
<script src="/.cpr/baguettebox.js?_={{ ts }}"></script>
|
||||
<script src="/.cpr/browser.js?_={{ ts }}"></script>
|
||||
<script src="/.cpr/up2k.js?_={{ ts }}"></script>
|
||||
{%- if js %}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -6,6 +6,7 @@
|
||||
<title>{{ title }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
{{ html_head }}
|
||||
<style>
|
||||
html{font-family:sans-serif}
|
||||
td{border:1px solid #999;border-width:1px 1px 0 0;padding:0 5px}
|
||||
@@ -44,7 +45,9 @@
|
||||
<tr><td></td><td><a href="../{{ url_suf }}">parent folder</a></td><td>-</td><td>-</td></tr>
|
||||
|
||||
{%- for f in files %}
|
||||
<tr><td>{{ f.lead }}</td><td><a href="{{ f.href }}{{ url_suf }}">{{ f.name|e }}</a></td><td>{{ f.sz }}</td><td>{{ f.dt }}</td></tr>
|
||||
<tr><td>{{ f.lead }}</td><td><a href="{{ f.href }}{{
|
||||
'&' + url_suf[1:] if url_suf[:1] == '?' and '?' in f.href else url_suf
|
||||
}}">{{ f.name|e }}</a></td><td>{{ f.sz }}</td><td>{{ f.dt }}</td></tr>
|
||||
{%- endfor %}
|
||||
|
||||
</tbody>
|
||||
|
||||
27
copyparty/web/cf.html
Normal file
27
copyparty/web/cf.html
Normal file
@@ -0,0 +1,27 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>{{ svcname }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<div id="box" style="opacity: 0; font-family: sans-serif">
|
||||
<h3>please press F5 to reload the page</h3>
|
||||
<p>sorry for the inconvenience</p>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
setTimeout(function() {
|
||||
document.getElementById('box').style.opacity = 1;
|
||||
}, 500);
|
||||
|
||||
parent.toast.ok(30, parent.L.cf_ok);
|
||||
parent.qsr('#cf_frame');
|
||||
</script>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
@@ -13,8 +13,7 @@ audio_eq.apply = function () {
|
||||
|
||||
var can = ebi('fft_can');
|
||||
if (!can) {
|
||||
can = mknod('canvas');
|
||||
can.setAttribute('id', 'fft_can');
|
||||
can = mknod('canvas', 'fft_can');
|
||||
can.style.cssText = 'position:absolute;left:0;bottom:5em;width:' + w + 'px;height:' + h + 'px;z-index:9001';
|
||||
document.body.appendChild(can);
|
||||
can.width = w;
|
||||
|
||||
@@ -161,7 +161,7 @@ blink {
|
||||
height: 1.05em;
|
||||
margin: -.2em .3em -.2em -.4em;
|
||||
display: inline-block;
|
||||
border: 1px solid rgba(0,0,0,0.2);
|
||||
border: 1px solid rgba(154,154,154,0.6);
|
||||
border-width: .2em .2em 0 0;
|
||||
transform: rotate(45deg);
|
||||
}
|
||||
@@ -219,48 +219,45 @@ blink {
|
||||
|
||||
|
||||
|
||||
html.dark,
|
||||
html.dark body {
|
||||
html.z,
|
||||
html.z body {
|
||||
background: #222;
|
||||
color: #ccc;
|
||||
}
|
||||
html.dark #toc a {
|
||||
html.z #toc a {
|
||||
color: #ccc;
|
||||
border-left: .4em solid #444;
|
||||
border-bottom: .1em solid #333;
|
||||
}
|
||||
html.dark #toc a.act {
|
||||
html.z #toc a.act {
|
||||
color: #fff;
|
||||
border-left: .4em solid #3ad;
|
||||
}
|
||||
html.dark #toc li {
|
||||
html.z #toc li {
|
||||
border-width: 0;
|
||||
}
|
||||
html.dark #mn a:not(:last-child)::after {
|
||||
border-color: rgba(255,255,255,0.3);
|
||||
}
|
||||
html.dark #mn a {
|
||||
html.z #mn a {
|
||||
color: #ccc;
|
||||
}
|
||||
html.dark #mn {
|
||||
html.z #mn {
|
||||
border-bottom: 1px solid #333;
|
||||
}
|
||||
html.dark #mn,
|
||||
html.dark #mh {
|
||||
html.z #mn,
|
||||
html.z #mh {
|
||||
background: #222;
|
||||
}
|
||||
html.dark #mh a {
|
||||
html.z #mh a {
|
||||
color: #ccc;
|
||||
background: none;
|
||||
}
|
||||
html.dark #mh a:hover {
|
||||
html.z #mh a:hover {
|
||||
background: #333;
|
||||
color: #fff;
|
||||
}
|
||||
html.dark #toolsbox {
|
||||
html.z #toolsbox {
|
||||
background: #222;
|
||||
}
|
||||
html.dark #toolsbox.open {
|
||||
html.z #toolsbox.open {
|
||||
box-shadow: 0 .2em .2em #069;
|
||||
border-radius: 0 0 .4em .4em;
|
||||
}
|
||||
@@ -307,24 +304,24 @@ blink {
|
||||
}
|
||||
|
||||
|
||||
|
||||
html.dark #toc {
|
||||
|
||||
html.z #toc {
|
||||
background: #282828;
|
||||
border-top: 1px solid #2c2c2c;
|
||||
box-shadow: 0 0 1em #181818;
|
||||
}
|
||||
html.dark #toc,
|
||||
html.dark #mw {
|
||||
html.z #toc,
|
||||
html.z #mw {
|
||||
scrollbar-color: #b80 #282828;
|
||||
}
|
||||
html.dark #toc::-webkit-scrollbar-track {
|
||||
html.z #toc::-webkit-scrollbar-track {
|
||||
background: #282828;
|
||||
}
|
||||
html.dark #toc::-webkit-scrollbar {
|
||||
html.z #toc::-webkit-scrollbar {
|
||||
background: #282828;
|
||||
width: .8em;
|
||||
}
|
||||
html.dark #toc::-webkit-scrollbar-thumb {
|
||||
html.z #toc::-webkit-scrollbar-thumb {
|
||||
background: #b80;
|
||||
}
|
||||
}
|
||||
@@ -431,17 +428,17 @@ blink {
|
||||
}
|
||||
|
||||
|
||||
|
||||
html.dark .mdo a {
|
||||
|
||||
html.z .mdo a {
|
||||
color: #000;
|
||||
}
|
||||
html.dark .mdo pre,
|
||||
html.dark .mdo code {
|
||||
html.z .mdo pre,
|
||||
html.z .mdo code {
|
||||
color: #240;
|
||||
}
|
||||
html.dark .mdo p>em,
|
||||
html.dark .mdo li>em,
|
||||
html.dark .mdo td>em {
|
||||
html.z .mdo p>em,
|
||||
html.z .mdo li>em,
|
||||
html.z .mdo td>em {
|
||||
color: #940;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
<!DOCTYPE html><html><head>
|
||||
<meta charset="utf-8">
|
||||
<title>📝🎉 {{ title }}</title>
|
||||
<title>📝 {{ title }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.7">
|
||||
{{ html_head }}
|
||||
<link rel="stylesheet" href="/.cpr/ui.css?_={{ ts }}">
|
||||
<link rel="stylesheet" href="/.cpr/md.css?_={{ ts }}">
|
||||
{%- if edit %}
|
||||
@@ -10,7 +11,7 @@
|
||||
{%- endif %}
|
||||
</head>
|
||||
<body>
|
||||
<div id="mn">navbar</div>
|
||||
<div id="mn"></div>
|
||||
<div id="mh">
|
||||
<a id="lightswitch" href="#">go dark</a>
|
||||
<a id="navtoggle" href="#">hide nav</a>
|
||||
@@ -126,30 +127,32 @@ write markdown (most html is 🙆 too)
|
||||
|
||||
<script>
|
||||
|
||||
var last_modified = {{ lastmod }};
|
||||
var last_modified = {{ lastmod }},
|
||||
have_emp = {{ have_emp|tojson }},
|
||||
dfavico = "{{ favico }}";
|
||||
|
||||
var md_opt = {
|
||||
link_md_as_html: false,
|
||||
allow_plugins: {{ md_plug }},
|
||||
modpoll_freq: {{ md_chk_rate }}
|
||||
};
|
||||
|
||||
(function () {
|
||||
var l = localStorage,
|
||||
drk = l.lightmode != 1,
|
||||
var l = localStorage,
|
||||
drk = l.light != 1,
|
||||
btn = document.getElementById("lightswitch"),
|
||||
f = function (e) {
|
||||
if (e) { e.preventDefault(); drk = !drk; }
|
||||
document.documentElement.setAttribute("class", drk? "dark":"light");
|
||||
document.documentElement.className = drk? "z":"y";
|
||||
btn.innerHTML = "go " + (drk ? "light":"dark");
|
||||
l.lightmode = drk? 0:1;
|
||||
};
|
||||
l.light = drk? 0:1;
|
||||
};
|
||||
|
||||
btn.onclick = f;
|
||||
f();
|
||||
})();
|
||||
|
||||
</script>
|
||||
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
||||
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
||||
<script src="/.cpr/deps/marked.js?_={{ ts }}"></script>
|
||||
<script src="/.cpr/md.js?_={{ ts }}"></script>
|
||||
{%- if edit %}
|
||||
|
||||
@@ -20,10 +20,6 @@ var dbg = function () { };
|
||||
// dbg = console.log
|
||||
|
||||
|
||||
// plugins
|
||||
var md_plug = {};
|
||||
|
||||
|
||||
// dodge browser issues
|
||||
(function () {
|
||||
var ua = navigator.userAgent;
|
||||
@@ -39,20 +35,14 @@ var md_plug = {};
|
||||
|
||||
// add navbar
|
||||
(function () {
|
||||
var n = document.location + '';
|
||||
n = n.substr(n.indexOf('//') + 2).split('?')[0].split('/');
|
||||
n[0] = 'top';
|
||||
var loc = [];
|
||||
var nav = [];
|
||||
for (var a = 0; a < n.length; a++) {
|
||||
if (a > 0)
|
||||
loc.push(n[a]);
|
||||
|
||||
var dec = esc(uricom_dec(n[a])[0]);
|
||||
|
||||
nav.push('<a href="/' + loc.join('/') + '">' + dec + '</a>');
|
||||
var parts = get_evpath().split('/'), link = '', o;
|
||||
for (var a = 0, aa = parts.length - 2; a <= aa; a++) {
|
||||
link += parts[a] + (a < aa ? '/' : '');
|
||||
o = mknod('a');
|
||||
o.setAttribute('href', link);
|
||||
o.textContent = uricom_dec(parts[a])[0] || 'top';
|
||||
dom_nav.appendChild(o);
|
||||
}
|
||||
dom_nav.innerHTML = nav.join('');
|
||||
})();
|
||||
|
||||
|
||||
@@ -91,13 +81,13 @@ function copydom(src, dst, lv) {
|
||||
|
||||
var rpl = [];
|
||||
for (var a = sc.length - 1; a >= 0; a--) {
|
||||
var st = sc[a].tagName,
|
||||
dt = dc[a].tagName;
|
||||
var st = sc[a].tagName || sc[a].nodeType,
|
||||
dt = dc[a].tagName || dc[a].nodeType;
|
||||
|
||||
if (st !== dt) {
|
||||
dbg("replace L%d (%d/%d) type %s/%s", lv, a, sc.length, st, dt);
|
||||
rpl.push(a);
|
||||
continue;
|
||||
dst.innerHTML = src.innerHTML;
|
||||
return;
|
||||
}
|
||||
|
||||
var sa = sc[a].attributes || [],
|
||||
@@ -146,8 +136,11 @@ function copydom(src, dst, lv) {
|
||||
// repl is reversed; build top-down
|
||||
var nbytes = 0;
|
||||
for (var a = rpl.length - 1; a >= 0; a--) {
|
||||
var html = sc[rpl[a]].outerHTML;
|
||||
dc[rpl[a]].outerHTML = html;
|
||||
var i = rpl[a],
|
||||
prop = sc[i].nodeType == 1 ? 'outerHTML' : 'nodeValue';
|
||||
|
||||
var html = sc[i][prop];
|
||||
dc[i][prop] = html;
|
||||
nbytes += html.length;
|
||||
}
|
||||
if (nbytes > 0)
|
||||
@@ -163,7 +156,7 @@ function copydom(src, dst, lv) {
|
||||
}
|
||||
|
||||
|
||||
function md_plug_err(ex, js) {
|
||||
md_plug_err = function (ex, js) {
|
||||
qsr('#md_errbox');
|
||||
if (!ex)
|
||||
return;
|
||||
@@ -180,8 +173,7 @@ function md_plug_err(ex, js) {
|
||||
o.textContent = lns[ln - 1];
|
||||
}
|
||||
}
|
||||
var errbox = mknod('div');
|
||||
errbox.setAttribute('id', 'md_errbox');
|
||||
var errbox = mknod('div', 'md_errbox');
|
||||
errbox.style.cssText = 'position:absolute;top:0;left:0;padding:1em .5em;background:#2b2b2b;color:#fc5'
|
||||
errbox.textContent = msg;
|
||||
errbox.onclick = function () {
|
||||
@@ -200,50 +192,12 @@ function md_plug_err(ex, js) {
|
||||
}
|
||||
|
||||
|
||||
function load_plug(md_text, plug_type) {
|
||||
if (!md_opt.allow_plugins)
|
||||
return md_text;
|
||||
|
||||
var find = '\n```copyparty_' + plug_type + '\n';
|
||||
var ofs = md_text.indexOf(find);
|
||||
if (ofs === -1)
|
||||
return md_text;
|
||||
|
||||
var ofs2 = md_text.indexOf('\n```', ofs + 1);
|
||||
if (ofs2 == -1)
|
||||
return md_text;
|
||||
|
||||
var js = md_text.slice(ofs + find.length, ofs2 + 1);
|
||||
var md = md_text.slice(0, ofs + 1) + md_text.slice(ofs2 + 4);
|
||||
|
||||
var old_plug = md_plug[plug_type];
|
||||
if (!old_plug || old_plug[1] != js) {
|
||||
js = 'const x = { ' + js + ' }; x;';
|
||||
try {
|
||||
var x = eval(js);
|
||||
}
|
||||
catch (ex) {
|
||||
md_plug[plug_type] = null;
|
||||
md_plug_err(ex, js);
|
||||
return md;
|
||||
}
|
||||
if (x['ctor']) {
|
||||
x['ctor']();
|
||||
delete x['ctor'];
|
||||
}
|
||||
md_plug[plug_type] = [x, js];
|
||||
}
|
||||
|
||||
return md;
|
||||
}
|
||||
|
||||
|
||||
function convert_markdown(md_text, dest_dom) {
|
||||
md_text = md_text.replace(/\r/g, '');
|
||||
|
||||
md_plug_err(null);
|
||||
md_text = load_plug(md_text, 'pre');
|
||||
md_text = load_plug(md_text, 'post');
|
||||
md_text = load_md_plug(md_text, 'pre');
|
||||
md_text = load_md_plug(md_text, 'post');
|
||||
|
||||
var marked_opts = {
|
||||
//headerPrefix: 'h-',
|
||||
@@ -251,12 +205,12 @@ function convert_markdown(md_text, dest_dom) {
|
||||
gfm: true
|
||||
};
|
||||
|
||||
var ext = md_plug['pre'];
|
||||
var ext = md_plug.pre;
|
||||
if (ext)
|
||||
Object.assign(marked_opts, ext[0]);
|
||||
|
||||
try {
|
||||
var md_html = marked(md_text, marked_opts);
|
||||
var md_html = marked.parse(md_text, marked_opts);
|
||||
}
|
||||
catch (ex) {
|
||||
if (ext)
|
||||
@@ -281,7 +235,7 @@ function convert_markdown(md_text, dest_dom) {
|
||||
if (!txt)
|
||||
nodes[a].textContent = href;
|
||||
else if (href !== txt)
|
||||
nodes[a].setAttribute('class', 'vis');
|
||||
nodes[a].className = 'vis';
|
||||
}
|
||||
|
||||
// todo-lists (should probably be a marked extension)
|
||||
@@ -297,7 +251,7 @@ function convert_markdown(md_text, dest_dom) {
|
||||
var clas = done ? 'done' : 'pend';
|
||||
var char = done ? 'Y' : 'N';
|
||||
|
||||
dom_li.setAttribute('class', 'task-list-item');
|
||||
dom_li.className = 'task-list-item';
|
||||
dom_li.style.listStyleType = 'none';
|
||||
var html = dom_li.innerHTML;
|
||||
dom_li.innerHTML =
|
||||
@@ -352,7 +306,7 @@ function convert_markdown(md_text, dest_dom) {
|
||||
el.innerHTML = '<a href="#' + id + '">' + el.innerHTML + '</a>';
|
||||
}
|
||||
|
||||
ext = md_plug['post'];
|
||||
ext = md_plug.post;
|
||||
if (ext && ext[0].render)
|
||||
try {
|
||||
ext[0].render(md_dom);
|
||||
@@ -471,11 +425,11 @@ function init_toc() {
|
||||
for (var a = 0; a < anchors.length; a++) {
|
||||
if (anchors[a].active) {
|
||||
anchors[a].active = false;
|
||||
links[a].setAttribute('class', '');
|
||||
links[a].className = '';
|
||||
}
|
||||
}
|
||||
anchors[hit].active = true;
|
||||
links[hit].setAttribute('class', 'act');
|
||||
links[hit].className = 'act';
|
||||
}
|
||||
|
||||
var pane_height = parseInt(getComputedStyle(dom_toc).height);
|
||||
|
||||
@@ -36,6 +36,11 @@
|
||||
width: 55em;
|
||||
width: min(55em, calc(100% - 2em));
|
||||
}
|
||||
#mtw.single.editor,
|
||||
#mw.single.editor {
|
||||
width: calc(100% - 1em);
|
||||
left: .5em;
|
||||
}
|
||||
|
||||
|
||||
#mp {
|
||||
@@ -61,7 +66,7 @@
|
||||
position: relative;
|
||||
scrollbar-color: #eb0 #f7f7f7;
|
||||
}
|
||||
html.dark #mt {
|
||||
html.z #mt {
|
||||
color: #eee;
|
||||
background: #222;
|
||||
border: 1px solid #777;
|
||||
@@ -77,7 +82,7 @@ html.dark #mt {
|
||||
background: #f97;
|
||||
border-radius: .15em;
|
||||
}
|
||||
html.dark #save.force-save {
|
||||
html.z #save.force-save {
|
||||
color: #fca;
|
||||
background: #720;
|
||||
}
|
||||
@@ -102,7 +107,7 @@ html.dark #save.force-save {
|
||||
#helpclose {
|
||||
display: block;
|
||||
}
|
||||
html.dark #helpbox {
|
||||
html.z #helpbox {
|
||||
box-shadow: 0 .5em 2em #444;
|
||||
background: #222;
|
||||
border: 1px solid #079;
|
||||
|
||||
@@ -16,8 +16,7 @@ var dom_sbs = ebi('sbs');
|
||||
var dom_nsbs = ebi('nsbs');
|
||||
var dom_tbox = ebi('toolsbox');
|
||||
var dom_ref = (function () {
|
||||
var d = mknod('div');
|
||||
d.setAttribute('id', 'mtr');
|
||||
var d = mknod('div', 'mtr');
|
||||
dom_swrap.appendChild(d);
|
||||
d = ebi('mtr');
|
||||
// hide behind the textarea (offsetTop is not computed if display:none)
|
||||
@@ -144,16 +143,16 @@ redraw = (function () {
|
||||
map_pre = genmap(dom_pre, map_pre);
|
||||
}
|
||||
function setsbs() {
|
||||
dom_wrap.setAttribute('class', '');
|
||||
dom_swrap.setAttribute('class', '');
|
||||
dom_wrap.className = '';
|
||||
dom_swrap.className = '';
|
||||
onresize();
|
||||
}
|
||||
function modetoggle() {
|
||||
var mode = dom_nsbs.innerHTML;
|
||||
dom_nsbs.innerHTML = mode == 'editor' ? 'preview' : 'editor';
|
||||
mode += ' single';
|
||||
dom_wrap.setAttribute('class', mode);
|
||||
dom_swrap.setAttribute('class', mode);
|
||||
dom_wrap.className = mode;
|
||||
dom_swrap.className = mode;
|
||||
onresize();
|
||||
}
|
||||
|
||||
@@ -255,10 +254,10 @@ function Modpoll() {
|
||||
|
||||
console.log('modpoll...');
|
||||
var url = (document.location + '').split('?')[0] + '?raw&_=' + Date.now();
|
||||
var xhr = new XMLHttpRequest();
|
||||
var xhr = new XHR();
|
||||
xhr.open('GET', url, true);
|
||||
xhr.responseType = 'text';
|
||||
xhr.onreadystatechange = r.cb;
|
||||
xhr.onload = xhr.onerror = r.cb;
|
||||
xhr.send();
|
||||
};
|
||||
|
||||
@@ -268,9 +267,6 @@ function Modpoll() {
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.readyState != XMLHttpRequest.DONE)
|
||||
return;
|
||||
|
||||
if (this.status !== 200) {
|
||||
console.log('modpoll err ' + this.status + ": " + this.responseText);
|
||||
return;
|
||||
@@ -309,7 +305,7 @@ var modpoll = new Modpoll();
|
||||
|
||||
|
||||
window.onbeforeunload = function (e) {
|
||||
if ((ebi("save").getAttribute('class') + '').indexOf('disabled') >= 0)
|
||||
if ((ebi("save").className + '').indexOf('disabled') >= 0)
|
||||
return; //nice (todo)
|
||||
|
||||
e.preventDefault(); //ff
|
||||
@@ -321,7 +317,7 @@ window.onbeforeunload = function (e) {
|
||||
function save(e) {
|
||||
if (e) e.preventDefault();
|
||||
var save_btn = ebi("save"),
|
||||
save_cls = save_btn.getAttribute('class') + '';
|
||||
save_cls = save_btn.className + '';
|
||||
|
||||
if (save_cls.indexOf('disabled') >= 0)
|
||||
return toast.inf(2, "no changes");
|
||||
@@ -336,10 +332,10 @@ function save(e) {
|
||||
fd.append("body", txt);
|
||||
|
||||
var url = (document.location + '').split('?')[0];
|
||||
var xhr = new XMLHttpRequest();
|
||||
var xhr = new XHR();
|
||||
xhr.open('POST', url, true);
|
||||
xhr.responseType = 'text';
|
||||
xhr.onreadystatechange = save_cb;
|
||||
xhr.onload = xhr.onerror = save_cb;
|
||||
xhr.btn = save_btn;
|
||||
xhr.txt = txt;
|
||||
|
||||
@@ -356,9 +352,6 @@ function save(e) {
|
||||
}
|
||||
|
||||
function save_cb() {
|
||||
if (this.readyState != XMLHttpRequest.DONE)
|
||||
return;
|
||||
|
||||
if (this.status !== 200)
|
||||
return toast.err(0, 'Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
|
||||
|
||||
@@ -397,10 +390,10 @@ function save_cb() {
|
||||
function run_savechk(lastmod, txt, btn, ntry) {
|
||||
// download the saved doc from the server and compare
|
||||
var url = (document.location + '').split('?')[0] + '?raw&_=' + Date.now();
|
||||
var xhr = new XMLHttpRequest();
|
||||
var xhr = new XHR();
|
||||
xhr.open('GET', url, true);
|
||||
xhr.responseType = 'text';
|
||||
xhr.onreadystatechange = savechk_cb;
|
||||
xhr.onload = xhr.onerror = savechk_cb;
|
||||
xhr.lastmod = lastmod;
|
||||
xhr.txt = txt;
|
||||
xhr.btn = btn;
|
||||
@@ -409,9 +402,6 @@ function run_savechk(lastmod, txt, btn, ntry) {
|
||||
}
|
||||
|
||||
function savechk_cb() {
|
||||
if (this.readyState != XMLHttpRequest.DONE)
|
||||
return;
|
||||
|
||||
if (this.status !== 200)
|
||||
return toast.err(0, 'Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
|
||||
|
||||
@@ -518,6 +508,20 @@ function setsel(s) {
|
||||
}
|
||||
|
||||
|
||||
// cut/copy current line
|
||||
function md_cut(cut) {
|
||||
var s = linebounds();
|
||||
if (s.car != s.cdr)
|
||||
return;
|
||||
|
||||
dom_src.setSelectionRange(s.n1, s.n2 + 1, 'forward');
|
||||
setTimeout(function () {
|
||||
var i = cut ? s.n1 : s.car;
|
||||
dom_src.setSelectionRange(i, i, 'forward');
|
||||
}, 1);
|
||||
}
|
||||
|
||||
|
||||
// indent/dedent
|
||||
function md_indent(dedent) {
|
||||
var s = getsel(),
|
||||
@@ -678,7 +682,7 @@ function reLastIndexOf(txt, ptn, end) {
|
||||
// table formatter
|
||||
function fmt_table(e) {
|
||||
if (e) e.preventDefault();
|
||||
//dom_tbox.setAttribute('class', '');
|
||||
//dom_tbox.className = '';
|
||||
|
||||
var txt = dom_src.value,
|
||||
ofs = dom_src.selectionStart,
|
||||
@@ -829,7 +833,7 @@ function fmt_table(e) {
|
||||
// show unicode
|
||||
function mark_uni(e) {
|
||||
if (e) e.preventDefault();
|
||||
dom_tbox.setAttribute('class', '');
|
||||
dom_tbox.className = '';
|
||||
|
||||
var txt = dom_src.value,
|
||||
ptn = new RegExp('([^' + js_uni_whitelist + ']+)', 'g'),
|
||||
@@ -964,6 +968,10 @@ var set_lno = (function () {
|
||||
md_p_jump(dn);
|
||||
return false;
|
||||
}
|
||||
if (ev.code == "KeyX" || ev.code == "KeyC") {
|
||||
md_cut(ev.code == "KeyX");
|
||||
return true; //sic
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (ev.code == "Tab" || kc == 9) {
|
||||
@@ -989,14 +997,14 @@ var set_lno = (function () {
|
||||
|
||||
ebi('tools').onclick = function (e) {
|
||||
if (e) e.preventDefault();
|
||||
var is_open = dom_tbox.getAttribute('class') != 'open';
|
||||
dom_tbox.setAttribute('class', is_open ? 'open' : '');
|
||||
var is_open = dom_tbox.className != 'open';
|
||||
dom_tbox.className = is_open ? 'open' : '';
|
||||
};
|
||||
|
||||
|
||||
ebi('help').onclick = function (e) {
|
||||
if (e) e.preventDefault();
|
||||
dom_tbox.setAttribute('class', '');
|
||||
dom_tbox.className = '';
|
||||
|
||||
var dom = ebi('helpbox');
|
||||
var dtxt = dom.getElementsByTagName('textarea');
|
||||
|
||||
@@ -84,24 +84,24 @@ html .editor-toolbar>button.save.force-save {
|
||||
|
||||
|
||||
/* darkmode */
|
||||
html.dark .mdo,
|
||||
html.dark .CodeMirror {
|
||||
html.z .mdo,
|
||||
html.z .CodeMirror {
|
||||
border-color: #222;
|
||||
}
|
||||
html.dark,
|
||||
html.dark body,
|
||||
html.dark .CodeMirror {
|
||||
html.z,
|
||||
html.z body,
|
||||
html.z .CodeMirror {
|
||||
background: #222;
|
||||
color: #ccc;
|
||||
}
|
||||
html.dark .CodeMirror-cursor {
|
||||
html.z .CodeMirror-cursor {
|
||||
border-color: #fff;
|
||||
}
|
||||
html.dark .CodeMirror-selected {
|
||||
html.z .CodeMirror-selected {
|
||||
box-shadow: 0 0 1px #0cf inset;
|
||||
}
|
||||
html.dark .CodeMirror-selected,
|
||||
html.dark .CodeMirror-selectedtext {
|
||||
html.z .CodeMirror-selected,
|
||||
html.z .CodeMirror-selectedtext {
|
||||
border-radius: .1em;
|
||||
background: #246;
|
||||
color: #fff;
|
||||
@@ -109,37 +109,37 @@ html.dark .CodeMirror-selectedtext {
|
||||
|
||||
|
||||
|
||||
html.dark #mn a {
|
||||
html.z #mn a {
|
||||
color: #ccc;
|
||||
}
|
||||
html.dark #mn a:not(:last-child):after {
|
||||
html.z #mn a:not(:last-child):after {
|
||||
border-color: rgba(255,255,255,0.3);
|
||||
}
|
||||
html.dark .editor-toolbar {
|
||||
html.z .editor-toolbar {
|
||||
border-color: #2c2c2c;
|
||||
background: #1c1c1c;
|
||||
}
|
||||
html.dark .editor-toolbar>i.separator {
|
||||
html.z .editor-toolbar>i.separator {
|
||||
border-left: 1px solid #444;
|
||||
border-right: 1px solid #111;
|
||||
}
|
||||
html.dark .editor-toolbar>button {
|
||||
html.z .editor-toolbar>button {
|
||||
margin-left: -1px; border: 1px solid rgba(255,255,255,0.1);
|
||||
color: #aaa;
|
||||
}
|
||||
|
||||
|
||||
|
||||
html.dark .editor-toolbar>button:hover {
|
||||
html.z .editor-toolbar>button:hover {
|
||||
color: #333;
|
||||
}
|
||||
html.dark .editor-toolbar>button.active {
|
||||
html.z .editor-toolbar>button.active {
|
||||
color: #333;
|
||||
border-color: #ec1;
|
||||
background: #c90;
|
||||
}
|
||||
html.dark .editor-toolbar::after,
|
||||
html.dark .editor-toolbar::before {
|
||||
html.z .editor-toolbar::after,
|
||||
html.z .editor-toolbar::before {
|
||||
background: none;
|
||||
}
|
||||
|
||||
@@ -150,6 +150,6 @@ html.dark .editor-toolbar::before {
|
||||
padding: 1em;
|
||||
background: #f7f7f7;
|
||||
}
|
||||
html.dark .mdo {
|
||||
html.z .mdo {
|
||||
background: #1c1c1c;
|
||||
}
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
<!DOCTYPE html><html><head>
|
||||
<meta charset="utf-8">
|
||||
<title>📝🎉 {{ title }}</title>
|
||||
<title>📝 {{ title }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.7">
|
||||
{{ html_head }}
|
||||
<link rel="stylesheet" href="/.cpr/ui.css?_={{ ts }}">
|
||||
<link rel="stylesheet" href="/.cpr/mde.css?_={{ ts }}">
|
||||
<link rel="stylesheet" href="/.cpr/deps/mini-fa.css?_={{ ts }}">
|
||||
@@ -24,27 +25,29 @@
|
||||
<a href="#" id="repl">π</a>
|
||||
<script>
|
||||
|
||||
var last_modified = {{ lastmod }};
|
||||
var last_modified = {{ lastmod }},
|
||||
have_emp = {{ have_emp|tojson }},
|
||||
dfavico = "{{ favico }}";
|
||||
|
||||
var md_opt = {
|
||||
link_md_as_html: false,
|
||||
allow_plugins: {{ md_plug }},
|
||||
modpoll_freq: {{ md_chk_rate }}
|
||||
};
|
||||
|
||||
var lightswitch = (function () {
|
||||
var l = localStorage,
|
||||
drk = l.lightmode != 1,
|
||||
drk = l.light != 1,
|
||||
f = function (e) {
|
||||
if (e) drk = !drk;
|
||||
document.documentElement.setAttribute("class", drk? "dark":"light");
|
||||
l.lightmode = drk? 0:1;
|
||||
document.documentElement.className = drk? "z":"y";
|
||||
l.light = drk? 0:1;
|
||||
};
|
||||
f();
|
||||
return f;
|
||||
})();
|
||||
|
||||
</script>
|
||||
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
||||
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
||||
<script src="/.cpr/deps/marked.js?_={{ ts }}"></script>
|
||||
<script src="/.cpr/deps/easymde.js?_={{ ts }}"></script>
|
||||
<script src="/.cpr/mde.js?_={{ ts }}"></script>
|
||||
|
||||
@@ -114,10 +114,10 @@ function save(mde) {
|
||||
fd.append("body", txt);
|
||||
|
||||
var url = (document.location + '').split('?')[0];
|
||||
var xhr = new XMLHttpRequest();
|
||||
var xhr = new XHR();
|
||||
xhr.open('POST', url, true);
|
||||
xhr.responseType = 'text';
|
||||
xhr.onreadystatechange = save_cb;
|
||||
xhr.onload = xhr.onerror = save_cb;
|
||||
xhr.btn = save_btn;
|
||||
xhr.mde = mde;
|
||||
xhr.txt = txt;
|
||||
@@ -133,9 +133,6 @@ function save(mde) {
|
||||
}
|
||||
|
||||
function save_cb() {
|
||||
if (this.readyState != XMLHttpRequest.DONE)
|
||||
return;
|
||||
|
||||
if (this.status !== 200)
|
||||
return toast.err(0, 'Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
|
||||
|
||||
@@ -170,10 +167,10 @@ function save_cb() {
|
||||
|
||||
// download the saved doc from the server and compare
|
||||
var url = (document.location + '').split('?')[0] + '?raw';
|
||||
var xhr = new XMLHttpRequest();
|
||||
var xhr = new XHR();
|
||||
xhr.open('GET', url, true);
|
||||
xhr.responseType = 'text';
|
||||
xhr.onreadystatechange = save_chk;
|
||||
xhr.onload = xhr.onerror = save_chk;
|
||||
xhr.btn = this.save_btn;
|
||||
xhr.mde = this.mde;
|
||||
xhr.txt = this.txt;
|
||||
@@ -182,9 +179,6 @@ function save_cb() {
|
||||
}
|
||||
|
||||
function save_chk() {
|
||||
if (this.readyState != XMLHttpRequest.DONE)
|
||||
return;
|
||||
|
||||
if (this.status !== 200)
|
||||
return toast.err(0, 'Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
|
||||
|
||||
|
||||
@@ -2,48 +2,49 @@
|
||||
<html lang="en">
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>copyparty</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
<link rel="stylesheet" media="screen" href="/.cpr/msg.css?_={{ ts }}">
|
||||
<meta charset="utf-8">
|
||||
<title>{{ svcname }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
{{ html_head }}
|
||||
<link rel="stylesheet" media="screen" href="/.cpr/msg.css?_={{ ts }}">
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<div id="box">
|
||||
|
||||
{%- if h1 %}
|
||||
<h1>{{ h1 }}</h1>
|
||||
{%- endif %}
|
||||
|
||||
{%- if h2 %}
|
||||
<h2>{{ h2 }}</h2>
|
||||
{%- endif %}
|
||||
|
||||
{%- if p %}
|
||||
<p>{{ p }}</p>
|
||||
{%- endif %}
|
||||
<div id="box">
|
||||
|
||||
{%- if pre %}
|
||||
<pre>{{ pre }}</pre>
|
||||
{%- endif %}
|
||||
{%- if h1 %}
|
||||
<h1>{{ h1 }}</h1>
|
||||
{%- endif %}
|
||||
|
||||
{%- if html %}
|
||||
{{ html }}
|
||||
{%- endif %}
|
||||
{%- if h2 %}
|
||||
<h2>{{ h2 }}</h2>
|
||||
{%- endif %}
|
||||
|
||||
{%- if click %}
|
||||
<script>document.getElementsByTagName("a")[0].click()</script>
|
||||
{%- endif %}
|
||||
</div>
|
||||
{%- if p %}
|
||||
<p>{{ p }}</p>
|
||||
{%- endif %}
|
||||
|
||||
{%- if redir %}
|
||||
<script>
|
||||
setTimeout(function() {
|
||||
window.location.replace("{{ redir }}");
|
||||
}, 1000);
|
||||
</script>
|
||||
{%- endif %}
|
||||
{%- if pre %}
|
||||
<pre>{{ pre }}</pre>
|
||||
{%- endif %}
|
||||
|
||||
{%- if html %}
|
||||
{{ html }}
|
||||
{%- endif %}
|
||||
|
||||
{%- if click %}
|
||||
<script>document.getElementsByTagName("a")[0].click()</script>
|
||||
{%- endif %}
|
||||
</div>
|
||||
|
||||
{%- if redir %}
|
||||
<script>
|
||||
setTimeout(function() {
|
||||
window.location.replace("{{ redir }}");
|
||||
}, 1000);
|
||||
</script>
|
||||
{%- endif %}
|
||||
</body>
|
||||
|
||||
</html>
|
||||
@@ -1,9 +1,7 @@
|
||||
html, body, #wrap {
|
||||
html {
|
||||
color: #333;
|
||||
background: #f7f7f7;
|
||||
font-family: sans-serif;
|
||||
}
|
||||
html {
|
||||
touch-action: manipulation;
|
||||
}
|
||||
#wrap {
|
||||
@@ -37,7 +35,8 @@ a+a {
|
||||
float: right;
|
||||
margin: -.2em 0 0 .5em;
|
||||
}
|
||||
.logout {
|
||||
.logout,
|
||||
a.r {
|
||||
color: #c04;
|
||||
border-color: #c7a;
|
||||
}
|
||||
@@ -78,27 +77,32 @@ table {
|
||||
margin-top: .3em;
|
||||
text-align: right;
|
||||
}
|
||||
blockquote {
|
||||
margin: 0 0 1.6em .6em;
|
||||
padding: .7em 1em 0 1em;
|
||||
border-left: .3em solid rgba(128,128,128,0.5);
|
||||
border-radius: 0 0 0 .25em;
|
||||
}
|
||||
|
||||
|
||||
html.dark,
|
||||
html.dark body,
|
||||
html.dark #wrap {
|
||||
html.z {
|
||||
background: #222;
|
||||
color: #ccc;
|
||||
}
|
||||
html.dark h1 {
|
||||
html.z h1 {
|
||||
border-color: #777;
|
||||
}
|
||||
html.dark a {
|
||||
html.z a {
|
||||
color: #fff;
|
||||
background: #057;
|
||||
border-color: #37a;
|
||||
}
|
||||
html.dark .logout {
|
||||
html.z .logout,
|
||||
html.z a.r {
|
||||
background: #804;
|
||||
border-color: #c28;
|
||||
}
|
||||
html.dark input {
|
||||
html.z input {
|
||||
color: #fff;
|
||||
background: #626;
|
||||
border: 1px solid #c2c;
|
||||
@@ -107,6 +111,12 @@ html.dark input {
|
||||
padding: .5em .7em;
|
||||
margin: 0 .5em 0 0;
|
||||
}
|
||||
html.dark .num {
|
||||
html.z .num {
|
||||
border-color: #777;
|
||||
}
|
||||
|
||||
|
||||
html.bz {
|
||||
color: #bbd;
|
||||
background: #11121d;
|
||||
}
|
||||
|
||||
@@ -2,93 +2,109 @@
|
||||
<html lang="en">
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>copyparty</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
<link rel="stylesheet" media="screen" href="/.cpr/splash.css?_={{ ts }}">
|
||||
<link rel="stylesheet" media="screen" href="/.cpr/ui.css?_={{ ts }}">
|
||||
<meta charset="utf-8">
|
||||
<title>{{ svcname }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
{{ html_head }}
|
||||
<link rel="stylesheet" media="screen" href="/.cpr/splash.css?_={{ ts }}">
|
||||
<link rel="stylesheet" media="screen" href="/.cpr/ui.css?_={{ ts }}">
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<div id="wrap">
|
||||
<a href="/?h" class="refresh">refresh</a>
|
||||
<div id="wrap">
|
||||
<a id="a" href="/?h" class="refresh">refresh</a>
|
||||
|
||||
{%- if this.uname == '*' %}
|
||||
<p>howdy stranger <small>(you're not logged in)</small></p>
|
||||
{%- else %}
|
||||
<a href="/?pw=x" class="logout">logout</a>
|
||||
<p>welcome back, <strong>{{ this.uname }}</strong></p>
|
||||
{%- endif %}
|
||||
{%- if this.uname == '*' %}
|
||||
<p id="b">howdy stranger <small>(you're not logged in)</small></p>
|
||||
{%- else %}
|
||||
<a id="c" href="/?pw=x" class="logout">logout</a>
|
||||
<p><span id="m">welcome back,</span> <strong>{{ this.uname }}</strong></p>
|
||||
{%- endif %}
|
||||
|
||||
{%- if msg %}
|
||||
<div id="msg">
|
||||
{{ msg }}
|
||||
</div>
|
||||
{%- endif %}
|
||||
{%- if msg %}
|
||||
<div id="msg">
|
||||
{{ msg }}
|
||||
</div>
|
||||
{%- endif %}
|
||||
|
||||
{%- if avol %}
|
||||
<h1>admin panel:</h1>
|
||||
<table><tr><td> <!-- hehehe -->
|
||||
<table class="num">
|
||||
<tr><td>scanning</td><td>{{ scanning }}</td></tr>
|
||||
<tr><td>hash-q</td><td>{{ hashq }}</td></tr>
|
||||
<tr><td>tag-q</td><td>{{ tagq }}</td></tr>
|
||||
<tr><td>mtp-q</td><td>{{ mtpq }}</td></tr>
|
||||
</table>
|
||||
</td><td>
|
||||
<table class="vols">
|
||||
<thead><tr><th>vol</th><th>action</th><th>status</th></tr></thead>
|
||||
<tbody>
|
||||
{% for mp in avol %}
|
||||
{%- if mp in vstate and vstate[mp] %}
|
||||
<tr><td><a href="{{ mp }}{{ url_suf }}">{{ mp }}</a></td><td><a href="{{ mp }}?scan">rescan</a></td><td>{{ vstate[mp] }}</td></tr>
|
||||
{%- endif %}
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
</td></tr></table>
|
||||
<div class="btns">
|
||||
<a href="/?stack" tt="shows the state of all active threads">dump stack</a>
|
||||
<a href="/?reload=cfg" tt="reload config files (accounts/volumes/volflags),$Nand rescan all e2ds volumes">reload cfg</a>
|
||||
</div>
|
||||
{%- endif %}
|
||||
{%- if avol %}
|
||||
<h1>admin panel:</h1>
|
||||
<table><tr><td> <!-- hehehe -->
|
||||
<table class="num">
|
||||
<tr><td>scanning</td><td>{{ scanning }}</td></tr>
|
||||
<tr><td>hash-q</td><td>{{ hashq }}</td></tr>
|
||||
<tr><td>tag-q</td><td>{{ tagq }}</td></tr>
|
||||
<tr><td>mtp-q</td><td>{{ mtpq }}</td></tr>
|
||||
<tr><td>db-act</td><td id="u">{{ dbwt }}</td></tr>
|
||||
</table>
|
||||
</td><td>
|
||||
<table class="vols">
|
||||
<thead><tr><th>vol</th><th id="t">action</th><th>status</th></tr></thead>
|
||||
<tbody>
|
||||
{% for mp in avol %}
|
||||
{%- if mp in vstate and vstate[mp] %}
|
||||
<tr><td><a href="{{ mp }}{{ url_suf }}">{{ mp }}</a></td><td><a class="s" href="{{ mp }}?scan">rescan</a></td><td>{{ vstate[mp] }}</td></tr>
|
||||
{%- endif %}
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
</td></tr></table>
|
||||
<div class="btns">
|
||||
<a id="d" href="/?stack">dump stack</a>
|
||||
<a id="e" href="/?reload=cfg">reload cfg</a>
|
||||
</div>
|
||||
{%- endif %}
|
||||
|
||||
{%- if rvol %}
|
||||
<h1>you can browse these:</h1>
|
||||
<ul>
|
||||
{% for mp in rvol %}
|
||||
<li><a href="{{ mp }}{{ url_suf }}">{{ mp }}</a></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{%- endif %}
|
||||
{%- if rvol %}
|
||||
<h1 id="f">you can browse:</h1>
|
||||
<ul>
|
||||
{% for mp in rvol %}
|
||||
<li><a href="{{ mp }}{{ url_suf }}">{{ mp }}</a></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{%- endif %}
|
||||
|
||||
{%- if wvol %}
|
||||
<h1>you can upload to:</h1>
|
||||
<ul>
|
||||
{% for mp in wvol %}
|
||||
<li><a href="{{ mp }}{{ url_suf }}">{{ mp }}</a></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{%- endif %}
|
||||
{%- if wvol %}
|
||||
<h1 id="g">you can upload to:</h1>
|
||||
<ul>
|
||||
{% for mp in wvol %}
|
||||
<li><a href="{{ mp }}{{ url_suf }}">{{ mp }}</a></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{%- endif %}
|
||||
|
||||
<h1>login for more:</h1>
|
||||
<ul>
|
||||
<form method="post" enctype="multipart/form-data" action="/{{ qvpath }}">
|
||||
<input type="hidden" name="act" value="login" />
|
||||
<input type="password" name="cppwd" />
|
||||
<input type="submit" value="Login" />
|
||||
</form>
|
||||
</ul>
|
||||
</div>
|
||||
<h1 id="cc">client config:</h1>
|
||||
<ul>
|
||||
{% if k304 %}
|
||||
<li><a id="h" href="/?k304=n">disable k304</a> (currently enabled)
|
||||
{%- else %}
|
||||
<li><a id="i" href="/?k304=y" class="r">enable k304</a> (currently disabled)
|
||||
{% endif %}
|
||||
<blockquote id="j">enabling this will disconnect your client on every HTTP 304, which can prevent some buggy proxies from getting stuck (suddenly not loading pages), <em>but</em> it will also make things slower in general</blockquote></li>
|
||||
|
||||
<li><a id="k" href="/?reset" class="r" onclick="localStorage.clear();return true">reset client settings</a></li>
|
||||
</ul>
|
||||
|
||||
<h1 id="l">login for more:</h1>
|
||||
<ul>
|
||||
<form method="post" enctype="multipart/form-data" action="/{{ qvpath }}">
|
||||
<input type="hidden" name="act" value="login" />
|
||||
<input type="password" name="cppwd" />
|
||||
<input type="submit" value="Login" />
|
||||
</form>
|
||||
</ul>
|
||||
</div>
|
||||
<a href="#" id="repl">π</a>
|
||||
<script>
|
||||
<script>
|
||||
|
||||
if (localStorage.lightmode != 1)
|
||||
document.documentElement.setAttribute("class", "dark");
|
||||
var lang="{{ lang }}",
|
||||
dfavico="{{ favico }}";
|
||||
|
||||
document.documentElement.className=localStorage.theme||"{{ this.args.theme }}";
|
||||
|
||||
</script>
|
||||
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
||||
<script>tt.init();</script>
|
||||
<script src="/.cpr/splash.js?_={{ ts }}"></script>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
55
copyparty/web/splash.js
Normal file
55
copyparty/web/splash.js
Normal file
@@ -0,0 +1,55 @@
|
||||
var Ls = {
|
||||
"nor": {
|
||||
"a1": "oppdater",
|
||||
"b1": "halloien <small>(du er ikke logget inn)</small>",
|
||||
"c1": "logg ut",
|
||||
"d1": "tilstand",
|
||||
"d2": "vis tilstanden til alle tråder",
|
||||
"e1": "last innst.",
|
||||
"e2": "leser inn konfigurasjonsfiler på nytt$N(kontoer, volumer, volumbrytere)$Nog kartlegger alle e2ds-volumer",
|
||||
"f1": "du kan betrakte:",
|
||||
"g1": "du kan laste opp til:",
|
||||
"cc1": "klient-konfigurasjon",
|
||||
"h1": "skru av k304",
|
||||
"i1": "skru på k304",
|
||||
"j1": "k304 bryter tilkoplingen for hver HTTP 304. Dette hjelper visse mellomtjenere som kan sette seg fast / plutselig slutter å laste sider, men det reduserer også ytelsen betydelig",
|
||||
"k1": "nullstill innstillinger",
|
||||
"l1": "logg inn:",
|
||||
"m1": "velkommen tilbake,",
|
||||
"n1": "404: filen finnes ikke ┐( ´ -`)┌",
|
||||
"o1": 'eller kanskje du ikke har tilgang? prøv å logge inn eller <a href="/?h">gå hjem</a>',
|
||||
"p1": "403: tilgang nektet ~┻━┻",
|
||||
"q1": 'du må logge inn eller <a href="/?h">gå hjem</a>',
|
||||
"r1": "gå hjem",
|
||||
".s1": "kartlegg",
|
||||
"t1": "handling",
|
||||
"u2": "tid siden noen sist skrev til serveren$N( opplastning / navneendring / ... )$N$N17d = 17 dager$N1h23 = 1 time 23 minutter$N4m56 = 4 minuter 56 sekunder",
|
||||
},
|
||||
"eng": {
|
||||
"d2": "shows the state of all active threads",
|
||||
"e2": "reload config files (accounts/volumes/volflags),$Nand rescan all e2ds volumes",
|
||||
"u2": "time since the last server write$N( upload / rename / ... )$N$N17d = 17 days$N1h23 = 1 hour 23 minutes$N4m56 = 4 minutes 56 seconds",
|
||||
}
|
||||
},
|
||||
d = Ls[sread("lang") || lang];
|
||||
|
||||
for (var k in (d || {})) {
|
||||
var f = k.slice(-1),
|
||||
i = k.slice(0, -1),
|
||||
o = QSA(i.startsWith('.') ? i : '#' + i);
|
||||
|
||||
for (var a = 0; a < o.length; a++)
|
||||
if (f == 1)
|
||||
o[a].innerHTML = d[k];
|
||||
else if (f == 2)
|
||||
o[a].setAttribute("tt", d[k]);
|
||||
}
|
||||
|
||||
tt.init();
|
||||
var o = QS('input[name="cppwd"]');
|
||||
if (!ebi('c') && o.offsetTop + o.offsetHeight < window.innerHeight)
|
||||
o.focus();
|
||||
|
||||
o = ebi('u');
|
||||
if (o && /[0-9]+$/.exec(o.innerHTML))
|
||||
o.innerHTML = shumantime(o.innerHTML);
|
||||
@@ -11,6 +11,7 @@ html {
|
||||
max-width: 34em;
|
||||
max-width: min(34em, 90%);
|
||||
max-width: min(34em, calc(100% - 7em));
|
||||
color: #ddd;
|
||||
background: #333;
|
||||
border: 0 solid #777;
|
||||
box-shadow: 0 .2em .5em #111;
|
||||
@@ -74,6 +75,9 @@ html {
|
||||
margin-right: -1.2em;
|
||||
padding-right: .7em;
|
||||
}
|
||||
#toast.r #toastb {
|
||||
text-align: right;
|
||||
}
|
||||
#toast pre {
|
||||
margin: 0;
|
||||
}
|
||||
@@ -116,6 +120,20 @@ html {
|
||||
#toast.err #toastc {
|
||||
background: #d06;
|
||||
}
|
||||
#tth {
|
||||
color: #fff;
|
||||
background: #111;
|
||||
font-size: .9em;
|
||||
padding: 0 .26em;
|
||||
line-height: .97em;
|
||||
border-radius: 1em;
|
||||
position: absolute;
|
||||
display: none;
|
||||
}
|
||||
#tth.act {
|
||||
display: block;
|
||||
z-index: 9001;
|
||||
}
|
||||
#tt.b {
|
||||
padding: 0 2em;
|
||||
border-radius: .5em;
|
||||
@@ -133,7 +151,8 @@ html {
|
||||
}
|
||||
#modalc code,
|
||||
#tt code {
|
||||
background: #3c3c3c;
|
||||
color: #eee;
|
||||
background: #444;
|
||||
padding: .1em .3em;
|
||||
border-top: 1px solid #777;
|
||||
border-radius: .3em;
|
||||
@@ -142,22 +161,47 @@ html {
|
||||
#tt em {
|
||||
color: #f6a;
|
||||
}
|
||||
html.light #tt {
|
||||
html.y #tt {
|
||||
color: #333;
|
||||
background: #fff;
|
||||
border-color: #888 #000 #777 #000;
|
||||
}
|
||||
html.light #tt,
|
||||
html.light #toast {
|
||||
html.bz #tt {
|
||||
background: #202231;
|
||||
border-color: #3b3f58;
|
||||
}
|
||||
html.y #tt,
|
||||
html.y #toast {
|
||||
box-shadow: 0 .3em 1em rgba(0,0,0,0.4);
|
||||
}
|
||||
#modalc code,
|
||||
html.light #tt code {
|
||||
html.y #tt code {
|
||||
background: #060;
|
||||
color: #fff;
|
||||
}
|
||||
html.light #tt em {
|
||||
#modalc code {
|
||||
color: #060;
|
||||
background: transparent;
|
||||
border: 1px solid #ccc;
|
||||
}
|
||||
html.y #tt em {
|
||||
color: #d38;
|
||||
}
|
||||
html.y #tth {
|
||||
color: #000;
|
||||
background: #fff;
|
||||
}
|
||||
#cf_frame {
|
||||
position: fixed;
|
||||
z-index: 573;
|
||||
top: 3em;
|
||||
left: 50%;
|
||||
width: 40em;
|
||||
height: 30em;
|
||||
margin-left: -20.2em;
|
||||
border-radius: .4em;
|
||||
border: .4em solid var(--fg);
|
||||
box-shadow: 0 2em 4em 1em var(--bg-max);
|
||||
}
|
||||
#modal {
|
||||
position: fixed;
|
||||
overflow: auto;
|
||||
@@ -249,24 +293,28 @@ html.light #tt em {
|
||||
max-width: 24em;
|
||||
}
|
||||
*:focus,
|
||||
*:focus+label,
|
||||
#pctl *:focus,
|
||||
.btn:focus {
|
||||
box-shadow: 0 .1em .2em #fc0 inset;
|
||||
outline: #fc0 solid .1em;
|
||||
border-radius: .2em;
|
||||
}
|
||||
html.light *:focus,
|
||||
html.light #pctl *:focus,
|
||||
html.light .btn:focus {
|
||||
html.y *:focus,
|
||||
html.y *:focus+label,
|
||||
html.y #pctl *:focus,
|
||||
html.y .btn:focus {
|
||||
box-shadow: 0 .1em .2em #037 inset;
|
||||
outline: #037 solid .1em;
|
||||
}
|
||||
input[type="text"]:focus,
|
||||
input:not([type]):focus,
|
||||
textarea:focus {
|
||||
box-shadow: 0 .1em .3em #fc0, 0 -.1em .3em #fc0;
|
||||
}
|
||||
html.light input[type="text"]:focus,
|
||||
html.light input:not([type]):focus,
|
||||
html.light textarea:focus {
|
||||
html.y input[type="text"]:focus,
|
||||
html.y input:not([type]):focus,
|
||||
html.y textarea:focus {
|
||||
box-shadow: 0 .1em .3em #037, 0 -.1em .3em #037;
|
||||
}
|
||||
|
||||
@@ -335,13 +383,22 @@ html.light textarea:focus {
|
||||
}
|
||||
.mdo ul,
|
||||
.mdo ol {
|
||||
padding-left: 1em;
|
||||
}
|
||||
.mdo ul ul,
|
||||
.mdo ul ol,
|
||||
.mdo ol ul,
|
||||
.mdo ol ol {
|
||||
padding-left: 2em;
|
||||
border-left: .3em solid #ddd;
|
||||
}
|
||||
.mdo ul>li,
|
||||
.mdo ol>li {
|
||||
.mdo ul>li {
|
||||
margin: .7em 0;
|
||||
list-style-type: disc;
|
||||
}
|
||||
.mdo ol>li {
|
||||
margin: .7em 0 .7em 2em;
|
||||
}
|
||||
.mdo strong {
|
||||
color: #000;
|
||||
}
|
||||
@@ -388,7 +445,7 @@ html.light textarea:focus {
|
||||
overflow-wrap: break-word;
|
||||
word-wrap: break-word; /*ie*/
|
||||
}
|
||||
html.light .mdo a,
|
||||
html.y .mdo a,
|
||||
.mdo a {
|
||||
color: #fff;
|
||||
background: #39b;
|
||||
@@ -417,48 +474,58 @@ html.light textarea:focus {
|
||||
|
||||
|
||||
|
||||
html.dark .mdo a {
|
||||
html.z .mdo a {
|
||||
background: #057;
|
||||
}
|
||||
html.dark .mdo h1 a, html.dark .mdo h4 a,
|
||||
html.dark .mdo h2 a, html.dark .mdo h5 a,
|
||||
html.dark .mdo h3 a, html.dark .mdo h6 a {
|
||||
html.z .mdo h1 a, html.z .mdo h4 a,
|
||||
html.z .mdo h2 a, html.z .mdo h5 a,
|
||||
html.z .mdo h3 a, html.z .mdo h6 a {
|
||||
color: inherit;
|
||||
background: none;
|
||||
}
|
||||
html.dark .mdo pre,
|
||||
html.dark .mdo code {
|
||||
html.z .mdo pre,
|
||||
html.z .mdo code {
|
||||
color: #8c0;
|
||||
background: #1a1a1a;
|
||||
border: .07em solid #333;
|
||||
}
|
||||
html.dark .mdo ul,
|
||||
html.dark .mdo ol {
|
||||
html.z .mdo ul,
|
||||
html.z .mdo ol {
|
||||
border-color: #444;
|
||||
}
|
||||
html.dark .mdo strong {
|
||||
html.z .mdo strong {
|
||||
color: #fff;
|
||||
}
|
||||
html.dark .mdo p>em,
|
||||
html.dark .mdo li>em,
|
||||
html.dark .mdo td>em {
|
||||
html.z .mdo p>em,
|
||||
html.z .mdo li>em,
|
||||
html.z .mdo td>em {
|
||||
color: #f94;
|
||||
border-color: #666;
|
||||
}
|
||||
html.dark .mdo h1 {
|
||||
html.z .mdo h1 {
|
||||
background: #383838;
|
||||
border-top: .4em solid #b80;
|
||||
border-bottom: .4em solid #4c4c4c;
|
||||
}
|
||||
html.dark .mdo h2 {
|
||||
html.bz .mdo h1 {
|
||||
background: #202231;
|
||||
border: 1px solid #2d2f45;
|
||||
border-width: 0 0 .4em 0;
|
||||
}
|
||||
html.z .mdo h2 {
|
||||
background: #444;
|
||||
border-bottom: .22em solid #555;
|
||||
}
|
||||
html.dark .mdo td,
|
||||
html.dark .mdo th {
|
||||
html.bz .mdo h2,
|
||||
html.bz .mdo h3 {
|
||||
background: transparent;
|
||||
border-color: #3b3f58;
|
||||
}
|
||||
html.z .mdo td,
|
||||
html.z .mdo th {
|
||||
border-color: #444;
|
||||
}
|
||||
html.dark .mdo blockquote {
|
||||
html.z .mdo blockquote {
|
||||
background: #282828;
|
||||
border: .07em dashed #444;
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -6,16 +6,42 @@ if (!window['console'])
|
||||
};
|
||||
|
||||
|
||||
var is_touch = 'ontouchstart' in window,
|
||||
IPHONE = /iPhone|iPad|iPod/i.test(navigator.userAgent),
|
||||
ANDROID = /android/i.test(navigator.userAgent),
|
||||
var wah = '',
|
||||
HALFMAX = 8192 * 8192 * 8192 * 8192,
|
||||
HTTPS = (window.location + '').indexOf('https:') === 0,
|
||||
TOUCH = 'ontouchstart' in window,
|
||||
MOBILE = TOUCH,
|
||||
CHROME = !!window.chrome,
|
||||
IPHONE = TOUCH && /iPhone|iPad|iPod/i.test(navigator.userAgent),
|
||||
WINDOWS = navigator.platform ? navigator.platform == 'Win32' : /Windows/.test(navigator.userAgent);
|
||||
|
||||
|
||||
try {
|
||||
if (navigator.userAgentData.mobile)
|
||||
MOBILE = true;
|
||||
|
||||
if (navigator.userAgentData.platform == 'Windows')
|
||||
WINDOWS = true;
|
||||
|
||||
if (navigator.userAgentData.brands.some(function (d) { return d.brand == 'Chromium' }))
|
||||
CHROME = true;
|
||||
}
|
||||
catch (ex) { }
|
||||
|
||||
|
||||
var ebi = document.getElementById.bind(document),
|
||||
QS = document.querySelector.bind(document),
|
||||
QSA = document.querySelectorAll.bind(document),
|
||||
mknod = document.createElement.bind(document);
|
||||
XHR = XMLHttpRequest;
|
||||
|
||||
|
||||
function mknod(et, eid) {
|
||||
var ret = document.createElement(et);
|
||||
if (eid)
|
||||
ret.id = eid;
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
||||
function qsr(sel) {
|
||||
@@ -84,9 +110,18 @@ catch (ex) {
|
||||
}
|
||||
var crashed = false, ignexd = {};
|
||||
function vis_exh(msg, url, lineNo, columnNo, error) {
|
||||
if ((msg + '').indexOf('ResizeObserver') !== -1)
|
||||
if ((msg + '').indexOf('ResizeObserver') + 1)
|
||||
return; // chrome issue 809574 (benign, from <video>)
|
||||
|
||||
if ((msg + '').indexOf('l2d.js') + 1)
|
||||
return; // `t` undefined in tapEvent -> hitTestSimpleCustom
|
||||
|
||||
if (!/\.js($|\?)/.exec('' + url))
|
||||
return; // chrome debugger
|
||||
|
||||
if ((url + '').indexOf(' > eval') + 1)
|
||||
return; // md timer
|
||||
|
||||
var ekey = url + '\n' + lineNo + '\n' + msg;
|
||||
if (ignexd[ekey] || crashed)
|
||||
return;
|
||||
@@ -149,8 +184,7 @@ function vis_exh(msg, url, lineNo, columnNo, error) {
|
||||
try {
|
||||
var exbox = ebi('exbox');
|
||||
if (!exbox) {
|
||||
exbox = mknod('div');
|
||||
exbox.setAttribute('id', 'exbox');
|
||||
exbox = mknod('div', 'exbox');
|
||||
document.body.appendChild(exbox);
|
||||
|
||||
var s = mknod('style');
|
||||
@@ -181,6 +215,7 @@ function ignex(all) {
|
||||
if (!all)
|
||||
window.onerror = vis_exh;
|
||||
}
|
||||
window.onerror = vis_exh;
|
||||
|
||||
|
||||
function noop() { }
|
||||
@@ -210,6 +245,11 @@ function ev(e) {
|
||||
}
|
||||
|
||||
|
||||
function noope(e) {
|
||||
ev(e);
|
||||
}
|
||||
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
|
||||
if (!String.prototype.endsWith)
|
||||
String.prototype.endsWith = function (search, this_len) {
|
||||
@@ -219,15 +259,15 @@ if (!String.prototype.endsWith)
|
||||
return this.substring(this_len - search.length, this_len) === search;
|
||||
};
|
||||
|
||||
if (!String.startsWith)
|
||||
if (!String.prototype.startsWith)
|
||||
String.prototype.startsWith = function (s, i) {
|
||||
i = i > 0 ? i | 0 : 0;
|
||||
return this.substring(i, i + s.length) === s;
|
||||
};
|
||||
|
||||
if (!String.trimEnd)
|
||||
if (!String.prototype.trimEnd)
|
||||
String.prototype.trimEnd = String.prototype.trimRight = function () {
|
||||
return this.replace(/[ \t\r\n]+$/m, '');
|
||||
return this.replace(/[ \t\r\n]+$/, '');
|
||||
};
|
||||
|
||||
if (!Element.prototype.matches)
|
||||
@@ -246,6 +286,14 @@ if (!Element.prototype.closest)
|
||||
} while (el !== null && el.nodeType === 1);
|
||||
};
|
||||
|
||||
if (!String.prototype.format)
|
||||
String.prototype.format = function () {
|
||||
var args = arguments;
|
||||
return this.replace(/{(\d+)}/g, function (match, number) {
|
||||
return typeof args[number] != 'undefined' ?
|
||||
args[number] : match;
|
||||
});
|
||||
};
|
||||
|
||||
// https://stackoverflow.com/a/950146
|
||||
function import_js(url, cb) {
|
||||
@@ -286,15 +334,19 @@ function crc32(str) {
|
||||
|
||||
|
||||
function clmod(el, cls, add) {
|
||||
if (!el)
|
||||
return false;
|
||||
|
||||
if (el.classList) {
|
||||
var have = el.classList.contains(cls);
|
||||
if (add == 't')
|
||||
add = !have;
|
||||
|
||||
if (add != have)
|
||||
el.classList[add ? 'add' : 'remove'](cls);
|
||||
if (!add == !have)
|
||||
return false;
|
||||
|
||||
return;
|
||||
el.classList[add ? 'add' : 'remove'](cls);
|
||||
return true;
|
||||
}
|
||||
|
||||
var re = new RegExp('\\s*\\b' + cls + '\\s*\\b', 'g'),
|
||||
@@ -305,28 +357,75 @@ function clmod(el, cls, add) {
|
||||
|
||||
var n2 = n1.replace(re, ' ') + (add ? ' ' + cls : '');
|
||||
|
||||
if (n1 != n2)
|
||||
el.className = n2;
|
||||
if (n1 == n2)
|
||||
return false;
|
||||
|
||||
el.className = n2;
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
function clgot(el, cls) {
|
||||
if (!el)
|
||||
return;
|
||||
|
||||
if (el.classList)
|
||||
return el.classList.contains(cls);
|
||||
|
||||
var lst = (el.getAttribute('class') + '').split(/ /g);
|
||||
var lst = (el.className + '').split(/ /g);
|
||||
return has(lst, cls);
|
||||
}
|
||||
|
||||
|
||||
var ANIM = true;
|
||||
if (window.matchMedia) {
|
||||
var mq = window.matchMedia('(prefers-reduced-motion: reduce)');
|
||||
mq.onchange = function () {
|
||||
ANIM = !mq.matches;
|
||||
};
|
||||
ANIM = !mq.matches;
|
||||
}
|
||||
|
||||
|
||||
function showsort(tab) {
|
||||
var v, vn, v1, v2, th = tab.tHead,
|
||||
sopts = jread('fsort', [["href", 1, ""]]);
|
||||
|
||||
th && (th = th.rows[0]) && (th = th.cells);
|
||||
|
||||
for (var a = sopts.length - 1; a >= 0; a--) {
|
||||
if (!sopts[a][0])
|
||||
continue;
|
||||
|
||||
v2 = v1;
|
||||
v1 = sopts[a];
|
||||
}
|
||||
|
||||
v = [v1, v2];
|
||||
vn = [v1 ? v1[0] : '', v2 ? v2[0] : ''];
|
||||
|
||||
var ga = QSA('#ghead a[s]');
|
||||
for (var a = 0; a < ga.length; a++)
|
||||
ga[a].className = '';
|
||||
|
||||
for (var a = 0; a < th.length; a++) {
|
||||
var n = vn.indexOf(th[a].getAttribute('name')),
|
||||
cl = n < 0 ? ' ' : ' s' + n + (v[n][1] > 0 ? ' ' : 'r ');
|
||||
|
||||
th[a].className = th[a].className.replace(/ *s[01]r? */, ' ') + cl;
|
||||
if (n + 1) {
|
||||
ga = QS('#ghead a[s="' + vn[n] + '"]');
|
||||
if (ga)
|
||||
ga.className = cl;
|
||||
}
|
||||
}
|
||||
}
|
||||
function sortTable(table, col, cb) {
|
||||
var tb = table.tBodies[0],
|
||||
th = table.tHead.rows[0].cells,
|
||||
tr = Array.prototype.slice.call(tb.rows, 0),
|
||||
i, reverse = th[col].className.indexOf('sort1') !== -1 ? -1 : 1;
|
||||
for (var a = 0, thl = th.length; a < thl; a++)
|
||||
th[a].className = th[a].className.replace(/ *sort-?1 */, " ");
|
||||
th[col].className += ' sort' + reverse;
|
||||
i, reverse = /s0[^r]/.exec(th[col].className + ' ') ? -1 : 1;
|
||||
|
||||
var stype = th[col].getAttribute('sort');
|
||||
try {
|
||||
var nrules = [], rules = jread("fsort", []);
|
||||
@@ -344,6 +443,7 @@ function sortTable(table, col, cb) {
|
||||
break;
|
||||
}
|
||||
jwrite("fsort", nrules);
|
||||
try { showsort(table); } catch (ex) { }
|
||||
}
|
||||
catch (ex) {
|
||||
console.log("failed to persist sort rules, resetting: " + ex);
|
||||
@@ -375,6 +475,16 @@ function sortTable(table, col, cb) {
|
||||
}
|
||||
return reverse * (a.localeCompare(b));
|
||||
});
|
||||
if (sread('dir1st') !== '0') {
|
||||
var r1 = [], r2 = [];
|
||||
for (var i = 0; i < tr.length; i++) {
|
||||
var cell = tr[vl[i][1]].cells[1],
|
||||
href = cell.getAttribute('sortv') || cell.textContent.trim();
|
||||
|
||||
(href.split('?')[0].slice(-1) == '/' ? r1 : r2).push(vl[i]);
|
||||
}
|
||||
vl = r1.concat(r2);
|
||||
}
|
||||
for (i = 0; i < tr.length; ++i) tb.appendChild(tr[vl[i][1]]);
|
||||
if (cb) cb();
|
||||
}
|
||||
@@ -392,7 +502,7 @@ function makeSortable(table, cb) {
|
||||
}
|
||||
|
||||
|
||||
function linksplit(rp) {
|
||||
function linksplit(rp, id) {
|
||||
var ret = [],
|
||||
apath = '/',
|
||||
q = null;
|
||||
@@ -403,7 +513,7 @@ function linksplit(rp) {
|
||||
q = '?' + q[1];
|
||||
}
|
||||
|
||||
if (rp && rp.charAt(0) == '/')
|
||||
if (rp && rp[0] == '/')
|
||||
rp = rp.slice(1);
|
||||
|
||||
while (rp) {
|
||||
@@ -422,8 +532,13 @@ function linksplit(rp) {
|
||||
vlink = vlink.slice(0, -1) + '<span>/</span>';
|
||||
}
|
||||
|
||||
if (!rp && q)
|
||||
link += q;
|
||||
if (!rp) {
|
||||
if (q)
|
||||
link += q;
|
||||
|
||||
if (id)
|
||||
link += '" id="' + id;
|
||||
}
|
||||
|
||||
ret.push('<a href="' + apath + link + '">' + vlink + '</a>');
|
||||
apath += link;
|
||||
@@ -553,7 +668,7 @@ function humansize(b, terse) {
|
||||
|
||||
function humantime(v) {
|
||||
if (v >= 60 * 60 * 24)
|
||||
return v;
|
||||
return shumantime(v);
|
||||
|
||||
try {
|
||||
return /.*(..:..:..).*/.exec(new Date(v * 1000).toUTCString())[1];
|
||||
@@ -564,12 +679,39 @@ function humantime(v) {
|
||||
}
|
||||
|
||||
|
||||
function shumantime(v) {
|
||||
if (v < 10)
|
||||
return f2f(v, 2) + 's';
|
||||
if (v < 60)
|
||||
return f2f(v, 1) + 's';
|
||||
|
||||
v = parseInt(v);
|
||||
var st = [[60 * 60 * 24, 60 * 60, 'd'], [60 * 60, 60, 'h'], [60, 1, 'm']];
|
||||
|
||||
for (var a = 0; a < st.length; a++) {
|
||||
var m1 = st[a][0],
|
||||
m2 = st[a][1],
|
||||
ch = st[a][2];
|
||||
|
||||
if (v < m1)
|
||||
continue;
|
||||
|
||||
var v1 = parseInt(v / m1),
|
||||
v2 = ('0' + parseInt((v % m1) / m2)).slice(-2);
|
||||
|
||||
return v1 + ch + (v1 >= 10 ? '' : v2);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function clamp(v, a, b) {
|
||||
return Math.min(Math.max(v, a), b);
|
||||
}
|
||||
|
||||
|
||||
function has(haystack, needle) {
|
||||
try { return haystack.includes(needle); } catch (ex) { }
|
||||
|
||||
for (var a = 0; a < haystack.length; a++)
|
||||
if (haystack[a] == needle)
|
||||
return true;
|
||||
@@ -742,6 +884,14 @@ function sethash(hv) {
|
||||
}
|
||||
}
|
||||
|
||||
function dl_file(url) {
|
||||
console.log('DL [%s]', url);
|
||||
var o = mknod('a');
|
||||
o.setAttribute('href', url);
|
||||
o.setAttribute('download', '');
|
||||
o.click();
|
||||
}
|
||||
|
||||
|
||||
var timer = (function () {
|
||||
var r = {};
|
||||
@@ -781,14 +931,17 @@ var timer = (function () {
|
||||
|
||||
var tt = (function () {
|
||||
var r = {
|
||||
"tt": mknod("div"),
|
||||
"tt": mknod("div", 'tt'),
|
||||
"th": mknod("div", 'tth'),
|
||||
"en": true,
|
||||
"el": null,
|
||||
"skip": false
|
||||
"skip": false,
|
||||
"lvis": 0
|
||||
};
|
||||
|
||||
r.tt.setAttribute('id', 'tt');
|
||||
r.th.innerHTML = '?';
|
||||
document.body.appendChild(r.tt);
|
||||
document.body.appendChild(r.th);
|
||||
|
||||
var prev = null;
|
||||
r.cshow = function () {
|
||||
@@ -798,19 +951,42 @@ var tt = (function () {
|
||||
prev = this;
|
||||
};
|
||||
|
||||
r.show = function () {
|
||||
if (r.skip) {
|
||||
r.skip = false;
|
||||
var tev;
|
||||
r.dshow = function (e) {
|
||||
clearTimeout(tev);
|
||||
if (!r.getmsg(this))
|
||||
return;
|
||||
}
|
||||
if (QS('body.bbox-open'))
|
||||
|
||||
if (Date.now() - r.lvis < 400)
|
||||
return r.show.bind(this)();
|
||||
|
||||
tev = setTimeout(r.show.bind(this), 800);
|
||||
if (TOUCH)
|
||||
return;
|
||||
|
||||
this.addEventListener('mousemove', r.move);
|
||||
clmod(r.th, 'act', 1);
|
||||
r.move(e);
|
||||
};
|
||||
|
||||
r.getmsg = function (el) {
|
||||
if (IPHONE && QS('body.bbox-open'))
|
||||
return;
|
||||
|
||||
var cfg = sread('tooltips');
|
||||
if (cfg !== null && cfg != '1')
|
||||
return;
|
||||
|
||||
var msg = this.getAttribute('tt');
|
||||
return el.getAttribute('tt');
|
||||
};
|
||||
|
||||
r.show = function () {
|
||||
clearTimeout(tev);
|
||||
if (r.skip) {
|
||||
r.skip = false;
|
||||
return;
|
||||
}
|
||||
var msg = r.getmsg(this);
|
||||
if (!msg)
|
||||
return;
|
||||
|
||||
@@ -824,6 +1000,7 @@ var tt = (function () {
|
||||
if (dir.indexOf('u') + 1) top = false;
|
||||
if (dir.indexOf('d') + 1) top = true;
|
||||
|
||||
clmod(r.th, 'act');
|
||||
clmod(r.tt, 'b', big);
|
||||
r.tt.style.left = '0';
|
||||
r.tt.style.top = '0';
|
||||
@@ -849,14 +1026,27 @@ var tt = (function () {
|
||||
|
||||
r.hide = function (e) {
|
||||
ev(e);
|
||||
clearTimeout(tev);
|
||||
window.removeEventListener('scroll', r.hide);
|
||||
clmod(r.tt, 'show');
|
||||
|
||||
clmod(r.tt, 'b');
|
||||
clmod(r.th, 'act');
|
||||
if (clmod(r.tt, 'show'))
|
||||
r.lvis = Date.now();
|
||||
|
||||
if (r.el)
|
||||
r.el.removeEventListener('mouseleave', r.hide);
|
||||
|
||||
if (e && e.target)
|
||||
e.target.removeEventListener('mousemove', r.move);
|
||||
};
|
||||
|
||||
if (is_touch && IPHONE) {
|
||||
r.move = function (e) {
|
||||
r.th.style.left = (e.pageX + 12) + 'px';
|
||||
r.th.style.top = (e.pageY + 12) + 'px';
|
||||
};
|
||||
|
||||
if (IPHONE) {
|
||||
var f1 = r.show,
|
||||
f2 = r.hide,
|
||||
q = [];
|
||||
@@ -882,14 +1072,14 @@ var tt = (function () {
|
||||
|
||||
r.att = function (ctr) {
|
||||
var _cshow = r.en ? r.cshow : null,
|
||||
_show = r.en ? r.show : null,
|
||||
_dshow = r.en ? r.dshow : null,
|
||||
_hide = r.en ? r.hide : null,
|
||||
o = ctr.querySelectorAll('*[tt]');
|
||||
|
||||
for (var a = o.length - 1; a >= 0; a--) {
|
||||
o[a].onfocus = _cshow;
|
||||
o[a].onblur = _hide;
|
||||
o[a].onmouseenter = _show;
|
||||
o[a].onmouseenter = _dshow;
|
||||
o[a].onmouseleave = _hide;
|
||||
}
|
||||
r.hide();
|
||||
@@ -918,9 +1108,8 @@ var toast = (function () {
|
||||
var r = {},
|
||||
te = null,
|
||||
scrolling = false,
|
||||
obj = mknod('div');
|
||||
obj = mknod('div', 'toast');
|
||||
|
||||
obj.setAttribute('id', 'toast');
|
||||
document.body.appendChild(obj);
|
||||
r.visible = false;
|
||||
r.txt = null;
|
||||
@@ -1003,8 +1192,7 @@ var modal = (function () {
|
||||
r.busy = false;
|
||||
|
||||
r.show = function (html) {
|
||||
o = mknod('div');
|
||||
o.setAttribute('id', 'modal');
|
||||
o = mknod('div', 'modal');
|
||||
o.innerHTML = '<table><tr><td><div id="modalc">' + html + '</div></td></tr></table>';
|
||||
document.body.appendChild(o);
|
||||
document.addEventListener('keydown', onkey);
|
||||
@@ -1059,7 +1247,8 @@ var modal = (function () {
|
||||
return;
|
||||
|
||||
setTimeout(function () {
|
||||
ebi('modal-ok').focus();
|
||||
if (ctr = ebi('modal-ok'))
|
||||
ctr.focus();
|
||||
}, 20);
|
||||
ev(e);
|
||||
}
|
||||
@@ -1080,6 +1269,9 @@ var modal = (function () {
|
||||
return ok();
|
||||
}
|
||||
|
||||
if ((k == 'ArrowLeft' || k == 'ArrowRight') && eng && (ae == eok || ae == eng))
|
||||
return (ae == eok ? eng : eok).focus() || ev(e);
|
||||
|
||||
if (k == 'Escape')
|
||||
return ng();
|
||||
}
|
||||
@@ -1229,6 +1421,49 @@ if (ebi('repl'))
|
||||
ebi('repl').onclick = repl;
|
||||
|
||||
|
||||
var md_plug = {};
|
||||
var md_plug_err = function (ex, js) {
|
||||
if (ex)
|
||||
console.log(ex, js);
|
||||
};
|
||||
function load_md_plug(md_text, plug_type) {
|
||||
if (!have_emp)
|
||||
return md_text;
|
||||
|
||||
var find = '\n```copyparty_' + plug_type + '\n';
|
||||
var ofs = md_text.indexOf(find);
|
||||
if (ofs === -1)
|
||||
return md_text;
|
||||
|
||||
var ofs2 = md_text.indexOf('\n```', ofs + 1);
|
||||
if (ofs2 == -1)
|
||||
return md_text;
|
||||
|
||||
var js = md_text.slice(ofs + find.length, ofs2 + 1);
|
||||
var md = md_text.slice(0, ofs + 1) + md_text.slice(ofs2 + 4);
|
||||
|
||||
var old_plug = md_plug[plug_type];
|
||||
if (!old_plug || old_plug[1] != js) {
|
||||
js = 'const x = { ' + js + ' }; x;';
|
||||
try {
|
||||
var x = eval(js);
|
||||
if (x['ctor']) {
|
||||
x['ctor']();
|
||||
delete x['ctor'];
|
||||
}
|
||||
}
|
||||
catch (ex) {
|
||||
md_plug[plug_type] = null;
|
||||
md_plug_err(ex, js);
|
||||
return md;
|
||||
}
|
||||
md_plug[plug_type] = [x, js];
|
||||
}
|
||||
|
||||
return md;
|
||||
}
|
||||
|
||||
|
||||
var svg_decl = '<?xml version="1.0" encoding="UTF-8"?>\n';
|
||||
|
||||
|
||||
@@ -1254,12 +1489,24 @@ var favico = (function () {
|
||||
var b64;
|
||||
try {
|
||||
b64 = btoa(svg ? svg_decl + svg : gx(r.txt));
|
||||
//console.log('f1');
|
||||
}
|
||||
catch (ex) {
|
||||
b64 = encodeURIComponent(r.txt).replace(/%([0-9A-F]{2})/g,
|
||||
function x(m, v) { return String.fromCharCode('0x' + v); });
|
||||
|
||||
b64 = btoa(gx(unescape(encodeURIComponent(r.txt))));
|
||||
catch (e1) {
|
||||
try {
|
||||
b64 = btoa(gx(encodeURIComponent(r.txt).replace(/%([0-9A-F]{2})/g,
|
||||
function x(m, v) { return String.fromCharCode('0x' + v); })));
|
||||
//console.log('f2');
|
||||
}
|
||||
catch (e2) {
|
||||
try {
|
||||
b64 = btoa(gx(unescape(encodeURIComponent(r.txt))));
|
||||
//console.log('f3');
|
||||
}
|
||||
catch (e3) {
|
||||
//console.log('fe');
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!r.tag) {
|
||||
@@ -1272,12 +1519,49 @@ var favico = (function () {
|
||||
|
||||
r.init = function () {
|
||||
clearTimeout(r.to);
|
||||
scfg_bind(r, 'txt', 'icot', '', r.upd);
|
||||
scfg_bind(r, 'fg', 'icof', 'fc5', r.upd);
|
||||
scfg_bind(r, 'bg', 'icob', '222', r.upd);
|
||||
var dv = (window.dfavico || '').trim().split(/ +/),
|
||||
fg = dv.length < 2 ? 'fc5' : dv[1].toLowerCase() == 'none' ? '' : dv[1],
|
||||
bg = dv.length < 3 ? '222' : dv[2].toLowerCase() == 'none' ? '' : dv[2];
|
||||
|
||||
scfg_bind(r, 'txt', 'icot', dv[0], r.upd);
|
||||
scfg_bind(r, 'fg', 'icof', fg, r.upd);
|
||||
scfg_bind(r, 'bg', 'icob', bg, r.upd);
|
||||
r.upd();
|
||||
};
|
||||
|
||||
r.to = setTimeout(r.init, 100);
|
||||
return r;
|
||||
})();
|
||||
|
||||
|
||||
var cf_cha_t = 0;
|
||||
function xhrchk(xhr, prefix, e404) {
|
||||
if (xhr.status < 400 && xhr.status >= 200)
|
||||
return true;
|
||||
|
||||
if (xhr.status == 403)
|
||||
return toast.err(0, prefix + (window.L && L.xhr403 || "403: access denied\n\ntry pressing F5, maybe you got logged out"));
|
||||
|
||||
if (xhr.status == 404)
|
||||
return toast.err(0, prefix + e404);
|
||||
|
||||
var errtxt = (xhr.response && xhr.response.err) || xhr.responseText,
|
||||
fun = toast.err;
|
||||
|
||||
if (xhr.status == 503 && /[Cc]loud[f]lare|>Just a mo[m]ent|#cf-b[u]bbles|Chec[k]ing your br[o]wser/.test(errtxt)) {
|
||||
var now = Date.now(), td = now - cf_cha_t;
|
||||
if (td < 15000)
|
||||
return;
|
||||
|
||||
cf_cha_t = now;
|
||||
errtxt = 'Clou' + wah + 'dflare protection kicked in\n\n<strong>trying to fix it...</strong>';
|
||||
fun = toast.warn;
|
||||
|
||||
qsr('#cf_frame');
|
||||
var fr = mknod('iframe', 'cf_frame');
|
||||
fr.src = '/?cf_challenge';
|
||||
document.body.appendChild(fr);
|
||||
}
|
||||
|
||||
return fun(0, prefix + xhr.status + ": " + errtxt);
|
||||
}
|
||||
|
||||
77
copyparty/web/w.hash.js
Normal file
77
copyparty/web/w.hash.js
Normal file
@@ -0,0 +1,77 @@
|
||||
"use strict";
|
||||
|
||||
|
||||
function hex2u8(txt) {
|
||||
return new Uint8Array(txt.match(/.{2}/g).map(function (b) { return parseInt(b, 16); }));
|
||||
}
|
||||
|
||||
|
||||
var subtle = null;
|
||||
try {
|
||||
subtle = crypto.subtle || crypto.webkitSubtle;
|
||||
subtle.digest('SHA-512', new Uint8Array(1)).then(
|
||||
function (x) { },
|
||||
function (x) { load_fb(); }
|
||||
);
|
||||
}
|
||||
catch (ex) {
|
||||
load_fb();
|
||||
}
|
||||
function load_fb() {
|
||||
subtle = null;
|
||||
importScripts('/.cpr/deps/sha512.hw.js');
|
||||
}
|
||||
|
||||
|
||||
onmessage = (d) => {
|
||||
var [nchunk, fobj, car, cdr] = d.data,
|
||||
t0 = Date.now(),
|
||||
reader = new FileReader();
|
||||
|
||||
reader.onload = function (e) {
|
||||
try {
|
||||
//console.log('[ w] %d HASH bgin', nchunk);
|
||||
postMessage(["read", nchunk, cdr - car, Date.now() - t0]);
|
||||
hash_calc(e.target.result);
|
||||
}
|
||||
catch (ex) {
|
||||
postMessage(["panic", ex + '']);
|
||||
}
|
||||
};
|
||||
reader.onerror = function () {
|
||||
var err = reader.error + '';
|
||||
|
||||
if (err.indexOf('NotReadableError') !== -1 || // win10-chrome defender
|
||||
err.indexOf('NotFoundError') !== -1 // macos-firefox permissions
|
||||
)
|
||||
return postMessage(["fail", 'OS-error', err + ' @ ' + car]);
|
||||
|
||||
postMessage(["ferr", err]);
|
||||
};
|
||||
//console.log('[ w] %d read bgin', nchunk);
|
||||
reader.readAsArrayBuffer(
|
||||
File.prototype.slice.call(fobj, car, cdr));
|
||||
|
||||
|
||||
var hash_calc = function (buf) {
|
||||
var hash_done = function (hashbuf) {
|
||||
try {
|
||||
var hslice = new Uint8Array(hashbuf).subarray(0, 33);
|
||||
//console.log('[ w] %d HASH DONE', nchunk);
|
||||
postMessage(["done", nchunk, hslice, cdr - car]);
|
||||
}
|
||||
catch (ex) {
|
||||
postMessage(["panic", ex + '']);
|
||||
}
|
||||
};
|
||||
|
||||
if (subtle)
|
||||
subtle.digest('SHA-512', buf).then(hash_done);
|
||||
else {
|
||||
var u8buf = new Uint8Array(buf);
|
||||
hashwasm.sha512(u8buf).then(function (v) {
|
||||
hash_done(hex2u8(v))
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -2,29 +2,20 @@
|
||||
|
||||
|
||||
|
||||
# example resource files
|
||||
# utilities
|
||||
|
||||
can be provided to copyparty to tweak things
|
||||
|
||||
|
||||
|
||||
## example `.epilogue.html`
|
||||
save one of these as `.epilogue.html` inside a folder to customize it:
|
||||
|
||||
* [`minimal-up2k.html`](minimal-up2k.html) will [simplify the upload ui](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png)
|
||||
|
||||
|
||||
|
||||
## example browser-css
|
||||
point `--css-browser` to one of these by URL:
|
||||
|
||||
* [`browser.css`](browser.css) changes the background
|
||||
* [`browser-icons.css`](browser-icons.css) adds filetype icons
|
||||
## [`multisearch.html`](multisearch.html)
|
||||
* takes a list of filenames of youtube rips, grabs the youtube-id of each file, and does a search on the server for those
|
||||
* use it by putting it somewhere on the server and opening it as an html page
|
||||
* also serves as an extendable template for other specific search behaviors
|
||||
|
||||
|
||||
|
||||
# other stuff
|
||||
|
||||
## [`changelog.md`](changelog.md)
|
||||
* occasionally grabbed from github release notes
|
||||
|
||||
## [`rclone.md`](rclone.md)
|
||||
* notes on using rclone as a fuse client/server
|
||||
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
html {
|
||||
background: #222 url('/wp/wallhaven-mdjrqy.jpg') center / cover no-repeat fixed;
|
||||
}
|
||||
#files th {
|
||||
background: rgba(32, 32, 32, 0.9) !important;
|
||||
}
|
||||
#ops,
|
||||
#tree,
|
||||
#files td {
|
||||
background: rgba(32, 32, 32, 0.3) !important;
|
||||
}
|
||||
|
||||
|
||||
html.light {
|
||||
background: #eee url('/wp/wallhaven-dpxl6l.png') center / cover no-repeat fixed;
|
||||
}
|
||||
html.light #files th {
|
||||
background: rgba(255, 255, 255, 0.9) !important;
|
||||
}
|
||||
html.light .logue,
|
||||
html.light #ops,
|
||||
html.light #tree,
|
||||
html.light #files td {
|
||||
background: rgba(248, 248, 248, 0.8) !important;
|
||||
}
|
||||
|
||||
|
||||
#files * {
|
||||
background: transparent !important;
|
||||
}
|
||||
2840
docs/changelog.md
Normal file
2840
docs/changelog.md
Normal file
File diff suppressed because it is too large
Load Diff
124
docs/multisearch.html
Normal file
124
docs/multisearch.html
Normal file
@@ -0,0 +1,124 @@
|
||||
<!DOCTYPE html><html lang="en"><head>
|
||||
<meta charset="utf-8">
|
||||
<title>multisearch</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<style>
|
||||
|
||||
html, body {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
color: #ddd;
|
||||
background: #222;
|
||||
font-family: sans-serif;
|
||||
}
|
||||
body {
|
||||
padding: 1em;
|
||||
}
|
||||
a {
|
||||
color: #fc5;
|
||||
}
|
||||
ul {
|
||||
line-height: 1.5em;
|
||||
}
|
||||
code {
|
||||
color: #fc5;
|
||||
border: 1px solid #444;
|
||||
padding: .1em .2em;
|
||||
font-family: sans-serif, sans-serif;
|
||||
}
|
||||
#src {
|
||||
display: block;
|
||||
width: calc(100% - 1em);
|
||||
padding: .5em;
|
||||
margin: 0;
|
||||
}
|
||||
td {
|
||||
padding-left: 1em;
|
||||
}
|
||||
.hit,
|
||||
.miss {
|
||||
font-weight: bold;
|
||||
padding-left: 0;
|
||||
padding-top: 1em;
|
||||
}
|
||||
.hit {color: #af0;}
|
||||
.miss {color: #f0c;}
|
||||
.hit:before {content: '✅';}
|
||||
.miss:before {content: '❌';}
|
||||
|
||||
</style></head><body>
|
||||
<ul>
|
||||
<li>paste a list of filenames (youtube rips) below and hit search</li>
|
||||
<li>it will grab the youtube-id from the filenames and search for each id</li>
|
||||
<li>filenames must be like <code>-YTID.webm</code> (youtube-dl style) or <code>[YTID].webm</code> (ytdlp style)</li>
|
||||
</ul>
|
||||
<textarea id="src"></textarea>
|
||||
<button id="go">search</button>
|
||||
<div id="res"></div>
|
||||
<script>
|
||||
|
||||
var ebi = document.getElementById.bind(document);
|
||||
function esc(txt) {
|
||||
return txt.replace(/[&"<>]/g, function (c) {
|
||||
return {
|
||||
'&': '&',
|
||||
'"': '"',
|
||||
'<': '<',
|
||||
'>': '>'
|
||||
}[c];
|
||||
});
|
||||
}
|
||||
|
||||
ebi('go').onclick = async function() {
|
||||
var queries = [];
|
||||
for (var ln of ebi('src').value.split(/\n/g)) {
|
||||
// filter the list of input files,
|
||||
// only keeping youtube videos,
|
||||
// meaning the filename ends with either
|
||||
// [YOUTUBEID].EXTENSION or
|
||||
// -YOUTUBEID.EXTENSION
|
||||
var m = /[[-]([0-9a-zA-Z_-]{11})\]?\.(mp4|webm|mkv)$/.exec(ln);
|
||||
if (!m || !(m = m[1]))
|
||||
continue;
|
||||
|
||||
// create a search query for each line: name like *youtubeid*
|
||||
queries.push([ln, `name like *${m}*`]);
|
||||
}
|
||||
|
||||
var a = 0, html = ['<table>'], hits = [], misses = [];
|
||||
for (var [fn, q] of queries) {
|
||||
var r = await fetch('/?srch', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({'q': q})
|
||||
});
|
||||
r = await r.json();
|
||||
|
||||
var cl, tab2;
|
||||
if (r.hits.length) {
|
||||
tab2 = hits;
|
||||
cl = 'hit';
|
||||
}
|
||||
else {
|
||||
tab2 = misses;
|
||||
cl = 'miss';
|
||||
}
|
||||
var h = `<tr><td class="${cl}" colspan="9">${esc(fn)}</td></tr>`;
|
||||
tab2.push(h);
|
||||
html.push(h);
|
||||
for (var h of r.hits) {
|
||||
var link = `<a href="/${h.rp}">${esc(decodeURIComponent(h.rp))}</a>`;
|
||||
html.push(`<tr><td>${h.sz}</td><td>${link}</td></tr>`);
|
||||
}
|
||||
ebi('res').innerHTML = `searching, ${++a} / ${queries.length} done, ${hits.length} hits, ${misses.length} miss`;
|
||||
}
|
||||
html.push('<tr><td><h1>hits:</h1></td></tr>');
|
||||
html = html.concat(hits);
|
||||
|
||||
html.push('<tr><td><h1>miss:</h1></td></tr>');
|
||||
html = html.concat(misses);
|
||||
|
||||
html.push('</table>');
|
||||
ebi('res').innerHTML = html.join('\n');
|
||||
};
|
||||
|
||||
</script></body></html>
|
||||
4
docs/notes.bat
Normal file
4
docs/notes.bat
Normal file
@@ -0,0 +1,4 @@
|
||||
rem appending a static ip to a dhcp nic on windows 10-1703 or later
|
||||
netsh interface ipv4 show interface
|
||||
netsh interface ipv4 set interface interface="Ethernet 2" dhcpstaticipcoexistence=enabled
|
||||
netsh interface ipv4 add address "Ethernet 2" 10.1.2.4 255.255.255.0
|
||||
10
docs/notes.md
Normal file
10
docs/notes.md
Normal file
@@ -0,0 +1,10 @@
|
||||
# up2k.js
|
||||
|
||||
## potato detection
|
||||
|
||||
* tsk 0.25/8.4/31.5 bzw 1.27/22.9/18 = 77% (38.4s, 49.7s)
|
||||
* 4c locale #1313, ff-102,deb-11 @ ryzen4500u wifi -> win10
|
||||
* profiling shows 2sec heavy gc every 2sec
|
||||
|
||||
* tsk 0.41/4.1/10 bzw 1.41/9.9/7 = 73% (13.3s, 18.2s)
|
||||
* 4c locale #1313, ch-103,deb-11 @ ryzen4500u wifi -> win10
|
||||
@@ -3,6 +3,12 @@ echo not a script
|
||||
exit 1
|
||||
|
||||
|
||||
##
|
||||
## add index.html banners
|
||||
|
||||
find -name index.html | sed -r 's/index.html$//' | while IFS= read -r dir; do f="$dir/.prologue.html"; [ -e "$f" ] || echo '<h1><a href="index.html">open index.html</a></h1>' >"$f"; done
|
||||
|
||||
|
||||
##
|
||||
## delete all partial uploads
|
||||
## (supports linux/macos, probably windows+msys2)
|
||||
@@ -42,7 +48,10 @@ avg() { awk 'function pr(ncsz) {if (nsmp>0) {printf "%3s %s\n", csz, sum/nsmp} c
|
||||
## time between first and last upload
|
||||
|
||||
python3 -um copyparty -nw -v srv::rw -i 127.0.0.1 2>&1 | tee log
|
||||
cat log | awk '!/"purl"/{next} {s=$1;sub(/[^m]+m/,"");gsub(/:/," ");t=60*(60*$1+$2)+$3} !a{a=t;sa=s} {b=t;sb=s} END {print b-a,sa,sb}'
|
||||
cat log | awk '!/"purl"/{next} {s=$1;sub(/[^m]+m/,"");gsub(/:/," ");t=60*(60*$1+$2)+$3} t<p{t+=86400} !a{a=t;sa=s} {b=t;sb=s} END {print b-a,sa,sb}'
|
||||
|
||||
# or if the client youre measuring dies for ~15sec every once ina while and you wanna filter those out,
|
||||
cat log | awk '!/"purl"/{next} {s=$1;sub(/[^m]+m/,"");gsub(/:/," ");t=60*(60*$1+$2)+$3} t<p{t+=86400} !p{a=t;p=t;r=0;next} t-p>1{printf "%.3f += %.3f - %.3f (%.3f) # %.3f -> %.3f\n",r,p,a,p-a,p,t;r+=p-a;a=t} {p=t} END {print r+p-a}'
|
||||
|
||||
|
||||
##
|
||||
@@ -80,6 +89,12 @@ shab64() { sp=$1; f="$2"; v=0; sz=$(stat -c%s "$f"); while true; do w=$((v+sp*10
|
||||
command -v gdate && date() { gdate "$@"; }; while true; do t=$(date +%s.%N); (time wget http://127.0.0.1:3923/?ls -qO- | jq -C '.files[]|{sz:.sz,ta:.tags.artist,tb:.tags.".bpm"}|del(.[]|select(.==null))' | awk -F\" '/"/{t[$2]++} END {for (k in t){v=t[k];p=sprintf("%" (v+1) "s",v);gsub(/ /,"#",p);printf "\033[36m%s\033[33m%s ",k,p}}') 2>&1 | awk -v ts=$t 'NR==1{t1=$0} NR==2{sub(/.*0m/,"");sub(/s$/,"");t2=$0;c=2; if(t2>0.3){c=3} if(t2>0.8){c=1} } END{sub(/[0-9]{6}$/,"",ts);printf "%s \033[3%dm%s %s\033[0m\n",ts,c,t2,t1}'; sleep 0.1 || break; done
|
||||
|
||||
|
||||
##
|
||||
## track an up2k upload and print all chunks in file-order
|
||||
|
||||
grep '"name": "2021-07-18 02-17-59.mkv"' fug.log | head -n 1 | sed -r 's/.*"hash": \[//; s/\].*//' | tr '"' '\n' | grep -E '^[a-zA-Z0-9_-]{44}$' | while IFS= read -r cid; do cat -n fug.log | grep -vF '"purl": "' | grep -- "$cid"; echo; done | stdbuf -oL tr '\t' ' ' | while IFS=' ' read -r ln _ _ _ _ _ ts ip port msg; do [ -z "$msg" ] && echo && continue; printf '%6s [%s] [%s] %s\n' $ln "$ts" "$ip $port" "$msg"; read -r ln _ _ _ _ _ ts ip port msg < <(cat -n fug.log | tail -n +$((ln+1)) | grep -F "$ip $port" | head -n 1); printf '%6s [%s] [%s] %s\n' $ln "$ts" "$ip $port" "$msg"; done
|
||||
|
||||
|
||||
##
|
||||
## js oneliners
|
||||
|
||||
@@ -89,6 +104,7 @@ var t=[]; var b=document.location.href.split('#')[0].slice(0, -1); document.quer
|
||||
# debug md-editor line tracking
|
||||
var s=mknod('style');s.innerHTML='*[data-ln]:before {content:attr(data-ln)!important;color:#f0c;background:#000;position:absolute;left:-1.5em;font-size:1rem}';document.head.appendChild(s);
|
||||
|
||||
|
||||
##
|
||||
## bash oneliners
|
||||
|
||||
@@ -169,7 +185,7 @@ brew install python@2
|
||||
pip install virtualenv
|
||||
|
||||
# readme toc
|
||||
cat README.md | awk 'function pr() { if (!h) {return}; if (/^ *[*!#]/||!s) {printf "%s\n",h;h=0;return}; if (/.../) {printf "%s - %s\n",h,$0;h=0}; }; /^#/{s=1;pr()} /^#* *(file indexing|install on android|dev env setup|just the sfx|complete release|optional gpl stuff)|`$/{s=0} /^#/{lv=length($1);sub(/[^ ]+ /,"");bab=$0;gsub(/ /,"-",bab); h=sprintf("%" ((lv-1)*4+1) "s [%s](#%s)", "*",$0,bab);next} !h{next} {sub(/ .*/,"");sub(/[:,]$/,"")} {pr()}' > toc; grep -E '^## readme toc' -B1000 -A2 <README.md >p1; grep -E '^## quickstart' -B2 -A999999 <README.md >p2; (cat p1; grep quickstart -A1000 <toc; cat p2) >README.md; rm p1 p2 toc
|
||||
cat README.md | awk 'function pr() { if (!h) {return}; if (/^ *[*!#|]/||!s) {printf "%s\n",h;h=0;return}; if (/.../) {printf "%s - %s\n",h,$0;h=0}; }; /^#/{s=1;pr()} /^#* *(install on android|dev env setup|just the sfx|complete release|optional gpl stuff)|`$/{s=0} /^#/{lv=length($1);sub(/[^ ]+ /,"");bab=$0;gsub(/ /,"-",bab); h=sprintf("%" ((lv-1)*4+1) "s [%s](#%s)", "*",$0,bab);next} !h{next} {sub(/ .*/,"");sub(/[:;,]$/,"")} {pr()}' > toc; grep -E '^## readme toc' -B1000 -A2 <README.md >p1; grep -E '^## quickstart' -B2 -A999999 <README.md >p2; (cat p1; grep quickstart -A1000 <toc; cat p2) >README.md; rm p1 p2 toc
|
||||
|
||||
# fix firefox phantom breakpoints,
|
||||
# suggestions from bugtracker, doesnt work (debugger is not attachable)
|
||||
@@ -185,7 +201,16 @@ about:config >> devtools.debugger.prefs-schema-version = -1
|
||||
git pull; git reset --hard origin/HEAD && git log --format=format:"%H %ai %d" --decorate=full > ../revs && cat ../{util,browser,up2k}.js >../vr && cat ../revs | while read -r rev extra; do (git reset --hard $rev >/dev/null 2>/dev/null && dsz=$(cat copyparty/web/{util,browser,up2k}.js >../vg 2>/dev/null && diff -wNarU0 ../{vg,vr} | wc -c) && printf '%s %6s %s\n' "$rev" $dsz "$extra") </dev/null; done
|
||||
|
||||
# download all sfx versions
|
||||
curl https://api.github.com/repos/9001/copyparty/releases?per_page=100 | jq -r '.[] | .tag_name + " " + .name' | tr -d '\r' | while read v t; do fn="copyparty $v $t.py"; [ -e "$fn" ] || curl https://github.com/9001/copyparty/releases/download/$v/copyparty-sfx.py -Lo "$fn"; done
|
||||
curl https://api.github.com/repos/9001/copyparty/releases?per_page=100 | jq -r '.[] | .tag_name + " " + .name' | tr -d '\r' | while read v t; do fn="$(printf '%s\n' "copyparty $v $t.py" | tr / -)"; [ -e "$fn" ] || curl https://github.com/9001/copyparty/releases/download/$v/copyparty-sfx.py -Lo "$fn"; done
|
||||
|
||||
# convert releasenotes to changelog
|
||||
curl https://api.github.com/repos/9001/copyparty/releases?per_page=100 | jq -r '.[] | "▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀ \n# \(.created_at) `\(.tag_name)` \(.name)\n\n\(.body)\n\n\n"' | sed -r 's/^# ([0-9]{4}-)([0-9]{2})-([0-9]{2})T([0-9]{2}):([0-9]{2}):([0-9]{2})Z /# \1\2\3-\4\5 /' > changelog.md
|
||||
|
||||
# push to multiple git remotes
|
||||
git config -l | grep '^remote'
|
||||
git remote add all git@github.com:9001/copyparty.git
|
||||
git remote set-url --add --push all git@gitlab.com:9001/copyparty.git
|
||||
git remote set-url --add --push all git@github.com:9001/copyparty.git
|
||||
|
||||
|
||||
##
|
||||
|
||||
@@ -12,21 +12,18 @@ set -e
|
||||
#
|
||||
# output summary (filesizes and contents):
|
||||
#
|
||||
# 535672 copyparty-extras/sfx-full/copyparty-sfx.sh
|
||||
# 550760 copyparty-extras/sfx-full/copyparty-sfx.py
|
||||
# `- original unmodified sfx from github
|
||||
#
|
||||
# 572923 copyparty-extras/sfx-full/copyparty-sfx-gz.py
|
||||
# `- unmodified but recompressed from bzip2 to gzip
|
||||
#
|
||||
# 341792 copyparty-extras/sfx-ent/copyparty-sfx.sh
|
||||
# 353975 copyparty-extras/sfx-ent/copyparty-sfx.py
|
||||
# 376934 copyparty-extras/sfx-ent/copyparty-sfx-gz.py
|
||||
# `- removed iOS ogg/opus/vorbis audio decoder,
|
||||
# removed the audio tray mouse cursor,
|
||||
# "enterprise edition"
|
||||
#
|
||||
# 259288 copyparty-extras/sfx-lite/copyparty-sfx.sh
|
||||
# 270004 copyparty-extras/sfx-lite/copyparty-sfx.py
|
||||
# 293159 copyparty-extras/sfx-lite/copyparty-sfx-gz.py
|
||||
# `- also removed the codemirror markdown editor
|
||||
@@ -81,7 +78,7 @@ cache="$od/.copyparty-repack.cache"
|
||||
# fallback to awk (sorry)
|
||||
awk -F\" '/"browser_download_url".*(\.tar\.gz|-sfx\.)/ {print$4}'
|
||||
) |
|
||||
grep -E '(sfx\.(sh|py)|tar\.gz)$' |
|
||||
grep -E '(sfx\.py|tar\.gz)$' |
|
||||
tee /dev/stderr |
|
||||
tr -d '\r' | tr '\n' '\0' |
|
||||
xargs -0 bash -c 'dl_files "$@"' _
|
||||
@@ -139,11 +136,11 @@ repack() {
|
||||
)
|
||||
}
|
||||
|
||||
repack sfx-full "re gz no-sh"
|
||||
repack sfx-ent "re no-dd no-ogv"
|
||||
repack sfx-ent "re no-dd no-ogv gz no-sh"
|
||||
repack sfx-lite "re no-dd no-ogv no-cm no-hl"
|
||||
repack sfx-lite "re no-dd no-ogv no-cm no-hl gz no-sh"
|
||||
repack sfx-full "re gz"
|
||||
repack sfx-ent "re no-dd"
|
||||
repack sfx-ent "re no-dd gz"
|
||||
repack sfx-lite "re no-dd no-cm no-hl"
|
||||
repack sfx-lite "re no-dd no-cm no-hl gz"
|
||||
|
||||
|
||||
# move fuse and up2k clients into copyparty-extras/,
|
||||
|
||||
@@ -1,21 +1,19 @@
|
||||
FROM alpine:3.14
|
||||
FROM alpine:3.16
|
||||
WORKDIR /z
|
||||
ENV ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \
|
||||
ver_hashwasm=4.9.0 \
|
||||
ver_marked=3.0.4 \
|
||||
ver_ogvjs=1.8.4 \
|
||||
ver_mde=2.15.0 \
|
||||
ver_codemirror=5.62.3 \
|
||||
ver_marked=4.0.18 \
|
||||
ver_mde=2.16.1 \
|
||||
ver_codemirror=5.65.7 \
|
||||
ver_fontawesome=5.13.0 \
|
||||
ver_zopfli=1.0.3
|
||||
|
||||
|
||||
# download;
|
||||
# the scp url is latin from https://fonts.googleapis.com/css2?family=Source+Code+Pro&display=swap
|
||||
# the scp url is regular latin from https://fonts.googleapis.com/css2?family=Source+Code+Pro&display=swap
|
||||
RUN mkdir -p /z/dist/no-pk \
|
||||
&& wget https://fonts.gstatic.com/s/sourcecodepro/v11/HI_SiYsKILxRpg3hIP6sJ7fM7PqlPevW.woff2 -O scp.woff2 \
|
||||
&& apk add cmake make g++ git bash npm patch wget tar pigz brotli gzip unzip python3 python3-dev brotli py3-brotli \
|
||||
&& wget https://github.com/brion/ogv.js/releases/download/$ver_ogvjs/ogvjs-$ver_ogvjs.zip -O ogvjs.zip \
|
||||
&& wget https://github.com/openpgpjs/asmcrypto.js/archive/$ver_asmcrypto.tar.gz -O asmcrypto.tgz \
|
||||
&& wget https://github.com/markedjs/marked/archive/v$ver_marked.tar.gz -O marked.tgz \
|
||||
&& wget https://github.com/Ionaru/easy-markdown-editor/archive/$ver_mde.tar.gz -O mde.tgz \
|
||||
@@ -23,7 +21,6 @@ RUN mkdir -p /z/dist/no-pk \
|
||||
&& wget https://github.com/FortAwesome/Font-Awesome/releases/download/$ver_fontawesome/fontawesome-free-$ver_fontawesome-web.zip -O fontawesome.zip \
|
||||
&& wget https://github.com/google/zopfli/archive/zopfli-$ver_zopfli.tar.gz -O zopfli.tgz \
|
||||
&& wget https://github.com/Daninet/hash-wasm/releases/download/v$ver_hashwasm/hash-wasm@$ver_hashwasm.zip -O hash-wasm.zip \
|
||||
&& unzip ogvjs.zip \
|
||||
&& (mkdir hash-wasm \
|
||||
&& cd hash-wasm \
|
||||
&& unzip ../hash-wasm.zip) \
|
||||
@@ -35,7 +32,7 @@ RUN mkdir -p /z/dist/no-pk \
|
||||
&& npm install \
|
||||
&& npm i grunt uglify-js -g ) \
|
||||
&& (tar -xf codemirror.tgz \
|
||||
&& cd CodeMirror-$ver_codemirror \
|
||||
&& cd codemirror5-$ver_codemirror \
|
||||
&& npm install ) \
|
||||
&& (tar -xf mde.tgz \
|
||||
&& cd easy-markdown-editor* \
|
||||
@@ -46,8 +43,6 @@ RUN mkdir -p /z/dist/no-pk \
|
||||
|
||||
|
||||
# todo
|
||||
# https://cdn.jsdelivr.net/gh/highlightjs/cdn-release/build/highlight.min.js
|
||||
# https://cdn.jsdelivr.net/gh/highlightjs/cdn-release/build/styles/default.min.css
|
||||
# https://prismjs.com/download.html#themes=prism-funky&languages=markup+css+clike+javascript+autohotkey+bash+basic+batch+c+csharp+cpp+cmake+diff+docker+go+ini+java+json+kotlin+latex+less+lisp+lua+makefile+objectivec+perl+powershell+python+r+jsx+ruby+rust+sass+scss+sql+swift+systemd+toml+typescript+vbnet+verilog+vhdl+yaml&plugins=line-highlight+line-numbers+autolinker
|
||||
|
||||
|
||||
@@ -77,21 +72,6 @@ RUN cd hash-wasm \
|
||||
&& mv sha512.umd.min.js /z/dist/sha512.hw.js
|
||||
|
||||
|
||||
# build ogvjs
|
||||
RUN cd ogvjs-$ver_ogvjs \
|
||||
&& cp -pv \
|
||||
ogv-worker-audio.js \
|
||||
ogv-demuxer-ogg-wasm.js \
|
||||
ogv-demuxer-ogg-wasm.wasm \
|
||||
ogv-decoder-audio-opus-wasm.js \
|
||||
ogv-decoder-audio-opus-wasm.wasm \
|
||||
ogv-decoder-audio-vorbis-wasm.js \
|
||||
ogv-decoder-audio-vorbis-wasm.wasm \
|
||||
/z/dist \
|
||||
&& cp -pv \
|
||||
ogv-es2017.js /z/dist/ogv.js
|
||||
|
||||
|
||||
# build marked
|
||||
COPY marked.patch /z/
|
||||
COPY marked-ln.patch /z/
|
||||
@@ -100,7 +80,6 @@ RUN cd marked-$ver_marked \
|
||||
&& patch -p1 < /z/marked.patch \
|
||||
&& npm run build \
|
||||
&& cp -pv marked.min.js /z/dist/marked.js \
|
||||
&& cp -pv lib/marked.js /z/dist/marked.full.js \
|
||||
&& mkdir -p /z/nodepkgs \
|
||||
&& ln -s $(pwd) /z/nodepkgs/marked
|
||||
# && npm run test \
|
||||
@@ -108,7 +87,7 @@ RUN cd marked-$ver_marked \
|
||||
|
||||
# build codemirror
|
||||
COPY codemirror.patch /z/
|
||||
RUN cd CodeMirror-$ver_codemirror \
|
||||
RUN cd codemirror5-$ver_codemirror \
|
||||
&& patch -p1 < /z/codemirror.patch \
|
||||
&& sed -ri '/^var urlRE = /d' mode/gfm/gfm.js \
|
||||
&& npm run build \
|
||||
@@ -120,9 +99,10 @@ COPY easymde.patch /z/
|
||||
RUN cd easy-markdown-editor-$ver_mde \
|
||||
&& patch -p1 < /z/easymde.patch \
|
||||
&& sed -ri 's`https://registry.npmjs.org/marked/-/marked-[0-9\.]+.tgz`file:/z/nodepkgs/marked`' package-lock.json \
|
||||
&& sed -ri 's`https://registry.npmjs.org/codemirror/-/codemirror-[0-9\.]+.tgz`file:/z/nodepkgs/codemirror`' package-lock.json \
|
||||
&& sed -ri 's`("marked": ")[^"]+`\1file:/z/nodepkgs/marked`' ./package.json \
|
||||
&& sed -ri 's`("codemirror": ")[^"]+`\1file:/z/nodepkgs/codemirror`' ./package.json \
|
||||
&& sed -ri 's`^var marked = require\(.marked/lib/marked.\);$`var marked = window.marked;`' src/js/easymde.js \
|
||||
&& sed -ri 's`^var marked = require\(.marked.\).marked;$`var marked = window.marked;`' src/js/easymde.js \
|
||||
&& npm install
|
||||
|
||||
COPY easymde-ln.patch /z/
|
||||
@@ -136,6 +116,7 @@ RUN cd easy-markdown-editor-$ver_mde \
|
||||
# build fontawesome and scp
|
||||
COPY mini-fa.sh /z
|
||||
COPY mini-fa.css /z
|
||||
COPY shiftbase.py /z
|
||||
RUN /bin/ash /z/mini-fa.sh
|
||||
|
||||
|
||||
|
||||
@@ -23,4 +23,4 @@ purge:
|
||||
|
||||
sh:
|
||||
@printf "\n\033[1;31mopening a shell in the most recently created docker image\033[0m\n"
|
||||
docker run --rm -it `docker images -aq | head -n 1` /bin/bash
|
||||
docker run --rm -it `docker images -aq | head -n 1` /bin/ash
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
diff -NarU2 codemirror-5.59.3-orig/mode/gfm/gfm.js codemirror-5.59.3/mode/gfm/gfm.js
|
||||
--- codemirror-5.59.3-orig/mode/gfm/gfm.js 2021-02-20 21:24:57.000000000 +0000
|
||||
+++ codemirror-5.59.3/mode/gfm/gfm.js 2021-02-21 20:42:02.166174775 +0000
|
||||
diff -wNarU2 codemirror-5.65.1-orig/mode/gfm/gfm.js codemirror-5.65.1/mode/gfm/gfm.js
|
||||
--- codemirror-5.65.1-orig/mode/gfm/gfm.js 2022-01-20 13:06:23.000000000 +0100
|
||||
+++ codemirror-5.65.1/mode/gfm/gfm.js 2022-02-09 22:50:18.145862052 +0100
|
||||
@@ -97,5 +97,5 @@
|
||||
}
|
||||
}
|
||||
@@ -15,9 +15,9 @@ diff -NarU2 codemirror-5.59.3-orig/mode/gfm/gfm.js codemirror-5.59.3/mode/gfm/gf
|
||||
+ }*/
|
||||
stream.next();
|
||||
return null;
|
||||
diff -NarU2 codemirror-5.59.3-orig/mode/meta.js codemirror-5.59.3/mode/meta.js
|
||||
--- codemirror-5.59.3-orig/mode/meta.js 2021-02-20 21:24:57.000000000 +0000
|
||||
+++ codemirror-5.59.3/mode/meta.js 2021-02-21 20:42:54.798742821 +0000
|
||||
diff -wNarU2 codemirror-5.65.1-orig/mode/meta.js codemirror-5.65.1/mode/meta.js
|
||||
--- codemirror-5.65.1-orig/mode/meta.js 2022-01-20 13:06:23.000000000 +0100
|
||||
+++ codemirror-5.65.1/mode/meta.js 2022-02-09 22:50:18.145862052 +0100
|
||||
@@ -13,4 +13,5 @@
|
||||
|
||||
CodeMirror.modeInfo = [
|
||||
@@ -62,10 +62,10 @@ diff -NarU2 codemirror-5.59.3-orig/mode/meta.js codemirror-5.59.3/mode/meta.js
|
||||
+ */
|
||||
];
|
||||
// Ensure all modes have a mime property for backwards compatibility
|
||||
diff -NarU2 codemirror-5.59.3-orig/src/display/selection.js codemirror-5.59.3/src/display/selection.js
|
||||
--- codemirror-5.59.3-orig/src/display/selection.js 2021-02-20 21:24:57.000000000 +0000
|
||||
+++ codemirror-5.59.3/src/display/selection.js 2021-02-21 20:44:14.860894328 +0000
|
||||
@@ -84,29 +84,21 @@
|
||||
diff -wNarU2 codemirror-5.65.1-orig/src/display/selection.js codemirror-5.65.1/src/display/selection.js
|
||||
--- codemirror-5.65.1-orig/src/display/selection.js 2022-01-20 13:06:23.000000000 +0100
|
||||
+++ codemirror-5.65.1/src/display/selection.js 2022-02-09 22:50:18.145862052 +0100
|
||||
@@ -96,29 +96,21 @@
|
||||
let order = getOrder(lineObj, doc.direction)
|
||||
iterateBidiSections(order, fromArg || 0, toArg == null ? lineLen : toArg, (from, to, dir, i) => {
|
||||
- let ltr = dir == "ltr"
|
||||
@@ -105,24 +105,24 @@ diff -NarU2 codemirror-5.59.3-orig/src/display/selection.js codemirror-5.59.3/sr
|
||||
+ botRight = openEnd && last ? rightSide : toPos.right
|
||||
add(topLeft, fromPos.top, topRight - topLeft, fromPos.bottom)
|
||||
if (fromPos.bottom < toPos.top) add(leftSide, fromPos.bottom, null, toPos.top)
|
||||
diff -NarU2 codemirror-5.59.3-orig/src/input/ContentEditableInput.js codemirror-5.59.3/src/input/ContentEditableInput.js
|
||||
--- codemirror-5.59.3-orig/src/input/ContentEditableInput.js 2021-02-20 21:24:57.000000000 +0000
|
||||
+++ codemirror-5.59.3/src/input/ContentEditableInput.js 2021-02-21 20:44:33.273953867 +0000
|
||||
@@ -399,4 +399,5 @@
|
||||
diff -wNarU2 codemirror-5.65.1-orig/src/input/ContentEditableInput.js codemirror-5.65.1/src/input/ContentEditableInput.js
|
||||
--- codemirror-5.65.1-orig/src/input/ContentEditableInput.js 2022-01-20 13:06:23.000000000 +0100
|
||||
+++ codemirror-5.65.1/src/input/ContentEditableInput.js 2022-02-09 22:50:18.145862052 +0100
|
||||
@@ -400,4 +400,5 @@
|
||||
let info = mapFromLineView(view, line, pos.line)
|
||||
|
||||
+ /*
|
||||
let order = getOrder(line, cm.doc.direction), side = "left"
|
||||
if (order) {
|
||||
@@ -404,4 +405,5 @@
|
||||
@@ -405,4 +406,5 @@
|
||||
side = partPos % 2 ? "right" : "left"
|
||||
}
|
||||
+ */
|
||||
let result = nodeAndOffsetInLineMap(info.map, pos.ch, side)
|
||||
result.offset = result.collapse == "right" ? result.end : result.start
|
||||
diff -NarU2 codemirror-5.59.3-orig/src/input/movement.js codemirror-5.59.3/src/input/movement.js
|
||||
--- codemirror-5.59.3-orig/src/input/movement.js 2021-02-20 21:24:57.000000000 +0000
|
||||
+++ codemirror-5.59.3/src/input/movement.js 2021-02-21 20:45:12.763093671 +0000
|
||||
diff -wNarU2 codemirror-5.65.1-orig/src/input/movement.js codemirror-5.65.1/src/input/movement.js
|
||||
--- codemirror-5.65.1-orig/src/input/movement.js 2022-01-20 13:06:23.000000000 +0100
|
||||
+++ codemirror-5.65.1/src/input/movement.js 2022-02-09 22:50:18.145862052 +0100
|
||||
@@ -15,4 +15,5 @@
|
||||
|
||||
export function endOfLine(visually, cm, lineObj, lineNo, dir) {
|
||||
@@ -146,9 +146,16 @@ diff -NarU2 codemirror-5.59.3-orig/src/input/movement.js codemirror-5.59.3/src/i
|
||||
return null
|
||||
+ */
|
||||
}
|
||||
diff -NarU2 codemirror-5.59.3-orig/src/line/line_data.js codemirror-5.59.3/src/line/line_data.js
|
||||
--- codemirror-5.59.3-orig/src/line/line_data.js 2021-02-20 21:24:57.000000000 +0000
|
||||
+++ codemirror-5.59.3/src/line/line_data.js 2021-02-21 20:45:36.472549599 +0000
|
||||
diff -wNarU2 codemirror-5.65.1-orig/src/line/line_data.js codemirror-5.65.1/src/line/line_data.js
|
||||
--- codemirror-5.65.1-orig/src/line/line_data.js 2022-01-20 13:06:23.000000000 +0100
|
||||
+++ codemirror-5.65.1/src/line/line_data.js 2022-02-09 22:54:11.542722046 +0100
|
||||
@@ -3,5 +3,5 @@
|
||||
import { elt, eltP, joinClasses } from "../util/dom.js"
|
||||
import { eventMixin, signal } from "../util/event.js"
|
||||
-import { hasBadBidiRects, zeroWidthElement } from "../util/feature_detection.js"
|
||||
+import { zeroWidthElement } from "../util/feature_detection.js"
|
||||
import { lst, spaceStr } from "../util/misc.js"
|
||||
|
||||
@@ -79,6 +79,6 @@
|
||||
// Optionally wire in some hacks into the token-rendering
|
||||
// algorithm, to deal with browser quirks.
|
||||
@@ -158,10 +165,10 @@ diff -NarU2 codemirror-5.59.3-orig/src/line/line_data.js codemirror-5.59.3/src/l
|
||||
+ // builder.addToken = buildTokenBadBidi(builder.addToken, order)
|
||||
builder.map = []
|
||||
let allowFrontierUpdate = lineView != cm.display.externalMeasured && lineNo(line)
|
||||
diff -NarU2 codemirror-5.59.3-orig/src/measurement/position_measurement.js codemirror-5.59.3/src/measurement/position_measurement.js
|
||||
--- codemirror-5.59.3-orig/src/measurement/position_measurement.js 2021-02-20 21:24:57.000000000 +0000
|
||||
+++ codemirror-5.59.3/src/measurement/position_measurement.js 2021-02-21 20:50:52.372945293 +0000
|
||||
@@ -380,5 +380,6 @@
|
||||
diff -wNarU2 codemirror-5.65.1-orig/src/measurement/position_measurement.js codemirror-5.65.1/src/measurement/position_measurement.js
|
||||
--- codemirror-5.65.1-orig/src/measurement/position_measurement.js 2022-01-20 13:06:23.000000000 +0100
|
||||
+++ codemirror-5.65.1/src/measurement/position_measurement.js 2022-02-09 22:50:18.145862052 +0100
|
||||
@@ -382,5 +382,6 @@
|
||||
sticky = "after"
|
||||
}
|
||||
- if (!order) return get(sticky == "before" ? ch - 1 : ch, sticky == "before")
|
||||
@@ -169,39 +176,39 @@ diff -NarU2 codemirror-5.59.3-orig/src/measurement/position_measurement.js codem
|
||||
+ /*
|
||||
|
||||
function getBidi(ch, partPos, invert) {
|
||||
@@ -391,4 +392,5 @@
|
||||
@@ -393,4 +394,5 @@
|
||||
if (other != null) val.other = getBidi(ch, other, sticky != "before")
|
||||
return val
|
||||
+ */
|
||||
}
|
||||
|
||||
@@ -468,4 +470,5 @@
|
||||
@@ -470,4 +472,5 @@
|
||||
let begin = 0, end = lineObj.text.length, ltr = true
|
||||
|
||||
+ /*
|
||||
let order = getOrder(lineObj, cm.doc.direction)
|
||||
// If the line isn't plain left-to-right text, first figure out
|
||||
@@ -482,4 +485,5 @@
|
||||
@@ -484,4 +487,5 @@
|
||||
end = ltr ? part.to : part.from - 1
|
||||
}
|
||||
+ */
|
||||
|
||||
// A binary search to find the first character whose bounding box
|
||||
@@ -526,4 +530,5 @@
|
||||
@@ -528,4 +532,5 @@
|
||||
}
|
||||
|
||||
+/*
|
||||
function coordsBidiPart(cm, lineObj, lineNo, preparedMeasure, order, x, y) {
|
||||
// Bidi parts are sorted left-to-right, and in a non-line-wrapping
|
||||
@@ -580,4 +585,5 @@
|
||||
@@ -582,4 +587,5 @@
|
||||
return part
|
||||
}
|
||||
+*/
|
||||
|
||||
let measureText
|
||||
diff -NarU2 codemirror-5.59.3-orig/src/util/bidi.js codemirror-5.59.3/src/util/bidi.js
|
||||
--- codemirror-5.59.3-orig/src/util/bidi.js 2021-02-20 21:24:57.000000000 +0000
|
||||
+++ codemirror-5.59.3/src/util/bidi.js 2021-02-21 20:52:18.168092225 +0000
|
||||
diff -wNarU2 codemirror-5.65.1-orig/src/util/bidi.js codemirror-5.65.1/src/util/bidi.js
|
||||
--- codemirror-5.65.1-orig/src/util/bidi.js 2022-01-20 13:06:23.000000000 +0100
|
||||
+++ codemirror-5.65.1/src/util/bidi.js 2022-02-09 22:50:18.145862052 +0100
|
||||
@@ -4,5 +4,5 @@
|
||||
|
||||
export function iterateBidiSections(order, from, to, f) {
|
||||
@@ -259,9 +266,9 @@ diff -NarU2 codemirror-5.59.3-orig/src/util/bidi.js codemirror-5.59.3/src/util/b
|
||||
- return order
|
||||
+ return false;
|
||||
}
|
||||
diff -NarU2 codemirror-5.59.3-orig/src/util/feature_detection.js codemirror-5.59.3/src/util/feature_detection.js
|
||||
--- codemirror-5.59.3-orig/src/util/feature_detection.js 2021-02-20 21:24:57.000000000 +0000
|
||||
+++ codemirror-5.59.3/src/util/feature_detection.js 2021-02-21 20:49:22.191269270 +0000
|
||||
diff -wNarU2 codemirror-5.65.1-orig/src/util/feature_detection.js codemirror-5.65.1/src/util/feature_detection.js
|
||||
--- codemirror-5.65.1-orig/src/util/feature_detection.js 2022-01-20 13:06:23.000000000 +0100
|
||||
+++ codemirror-5.65.1/src/util/feature_detection.js 2022-02-09 22:50:18.145862052 +0100
|
||||
@@ -25,4 +25,5 @@
|
||||
}
|
||||
|
||||
|
||||
12
scripts/deps-docker/easymde-marked6.patch
Normal file
12
scripts/deps-docker/easymde-marked6.patch
Normal file
@@ -0,0 +1,12 @@
|
||||
diff --git a/src/js/easymde.js b/src/js/easymde.js
|
||||
--- a/src/js/easymde.js
|
||||
+++ b/src/js/easymde.js
|
||||
@@ -1962,7 +1962,7 @@ EasyMDE.prototype.markdown = function (text) {
|
||||
marked.setOptions(markedOptions);
|
||||
|
||||
// Convert the markdown to HTML
|
||||
- var htmlText = marked(text);
|
||||
+ var htmlText = marked.parse(text);
|
||||
|
||||
// Sanitize HTML
|
||||
if (this.options.renderingConfig && typeof this.options.renderingConfig.sanitizerFunction === 'function') {
|
||||
@@ -1,52 +1,52 @@
|
||||
diff -NarU2 easy-markdown-editor-2.14.0-orig/gulpfile.js easy-markdown-editor-2.14.0/gulpfile.js
|
||||
--- easy-markdown-editor-2.14.0-orig/gulpfile.js 2021-02-14 12:11:48.000000000 +0000
|
||||
+++ easy-markdown-editor-2.14.0/gulpfile.js 2021-02-21 20:55:37.134701007 +0000
|
||||
diff -wNarU2 easy-markdown-editor-2.16.1-orig/gulpfile.js easy-markdown-editor-2.16.1/gulpfile.js
|
||||
--- easy-markdown-editor-2.16.1-orig/gulpfile.js 2022-01-14 23:27:44.000000000 +0100
|
||||
+++ easy-markdown-editor-2.16.1/gulpfile.js 2022-02-09 23:06:01.694592535 +0100
|
||||
@@ -25,5 +25,4 @@
|
||||
'./node_modules/codemirror/lib/codemirror.css',
|
||||
'./src/css/*.css',
|
||||
- './node_modules/codemirror-spell-checker/src/css/spell-checker.css',
|
||||
];
|
||||
|
||||
diff -NarU2 easy-markdown-editor-2.14.0-orig/package.json easy-markdown-editor-2.14.0/package.json
|
||||
--- easy-markdown-editor-2.14.0-orig/package.json 2021-02-14 12:11:48.000000000 +0000
|
||||
+++ easy-markdown-editor-2.14.0/package.json 2021-02-21 20:55:47.761190082 +0000
|
||||
@@ -21,5 +21,4 @@
|
||||
"dependencies": {
|
||||
"codemirror": "^5.59.2",
|
||||
diff -wNarU2 easy-markdown-editor-2.16.1-orig/package.json easy-markdown-editor-2.16.1/package.json
|
||||
--- easy-markdown-editor-2.16.1-orig/package.json 2022-01-14 23:27:44.000000000 +0100
|
||||
+++ easy-markdown-editor-2.16.1/package.json 2022-02-09 23:06:24.778501888 +0100
|
||||
@@ -23,5 +23,4 @@
|
||||
"@types/marked": "^4.0.1",
|
||||
"codemirror": "^5.63.1",
|
||||
- "codemirror-spell-checker": "1.1.2",
|
||||
"marked": "^2.0.0"
|
||||
"marked": "^4.0.10"
|
||||
},
|
||||
diff -NarU2 easy-markdown-editor-2.14.0-orig/src/js/easymde.js easy-markdown-editor-2.14.0/src/js/easymde.js
|
||||
--- easy-markdown-editor-2.14.0-orig/src/js/easymde.js 2021-02-14 12:11:48.000000000 +0000
|
||||
+++ easy-markdown-editor-2.14.0/src/js/easymde.js 2021-02-21 20:57:09.143171536 +0000
|
||||
diff -wNarU2 easy-markdown-editor-2.16.1-orig/src/js/easymde.js easy-markdown-editor-2.16.1/src/js/easymde.js
|
||||
--- easy-markdown-editor-2.16.1-orig/src/js/easymde.js 2022-01-14 23:27:44.000000000 +0100
|
||||
+++ easy-markdown-editor-2.16.1/src/js/easymde.js 2022-02-09 23:07:21.203131415 +0100
|
||||
@@ -12,5 +12,4 @@
|
||||
require('codemirror/mode/gfm/gfm.js');
|
||||
require('codemirror/mode/xml/xml.js');
|
||||
-var CodeMirrorSpellChecker = require('codemirror-spell-checker');
|
||||
var marked = require('marked/lib/marked');
|
||||
var marked = require('marked').marked;
|
||||
|
||||
@@ -1762,9 +1761,4 @@
|
||||
@@ -1816,9 +1815,4 @@
|
||||
options.autosave.uniqueId = options.autosave.unique_id;
|
||||
|
||||
- // If overlay mode is specified and combine is not provided, default it to true
|
||||
- if (options.overlayMode && options.overlayMode.combine === undefined) {
|
||||
- options.overlayMode.combine = true;
|
||||
- options.overlayMode.combine = true;
|
||||
- }
|
||||
-
|
||||
// Update this options
|
||||
this.options = options;
|
||||
@@ -2003,28 +1997,7 @@
|
||||
@@ -2057,34 +2051,7 @@
|
||||
var mode, backdrop;
|
||||
|
||||
- // CodeMirror overlay mode
|
||||
- if (options.overlayMode) {
|
||||
- CodeMirror.defineMode('overlay-mode', function(config) {
|
||||
- return CodeMirror.overlayMode(CodeMirror.getMode(config, options.spellChecker !== false ? 'spell-checker' : 'gfm'), options.overlayMode.mode, options.overlayMode.combine);
|
||||
- });
|
||||
- CodeMirror.defineMode('overlay-mode', function (config) {
|
||||
- return CodeMirror.overlayMode(CodeMirror.getMode(config, options.spellChecker !== false ? 'spell-checker' : 'gfm'), options.overlayMode.mode, options.overlayMode.combine);
|
||||
- });
|
||||
-
|
||||
- mode = 'overlay-mode';
|
||||
- backdrop = options.parsingConfig;
|
||||
- backdrop.gitHubSpice = false;
|
||||
- mode = 'overlay-mode';
|
||||
- backdrop = options.parsingConfig;
|
||||
- backdrop.gitHubSpice = false;
|
||||
- } else {
|
||||
mode = options.parsingConfig;
|
||||
mode.name = 'gfm';
|
||||
@@ -58,31 +58,35 @@ diff -NarU2 easy-markdown-editor-2.14.0-orig/src/js/easymde.js easy-markdown-edi
|
||||
- backdrop.name = 'gfm';
|
||||
- backdrop.gitHubSpice = false;
|
||||
-
|
||||
- CodeMirrorSpellChecker({
|
||||
- codeMirrorInstance: CodeMirror,
|
||||
- });
|
||||
- if (typeof options.spellChecker === 'function') {
|
||||
- options.spellChecker({
|
||||
- codeMirrorInstance: CodeMirror,
|
||||
- });
|
||||
- } else {
|
||||
- CodeMirrorSpellChecker({
|
||||
- codeMirrorInstance: CodeMirror,
|
||||
- });
|
||||
- }
|
||||
- }
|
||||
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
diff -NarU2 easy-markdown-editor-2.14.0-orig/types/easymde.d.ts easy-markdown-editor-2.14.0/types/easymde.d.ts
|
||||
--- easy-markdown-editor-2.14.0-orig/types/easymde.d.ts 2021-02-14 12:11:48.000000000 +0000
|
||||
+++ easy-markdown-editor-2.14.0/types/easymde.d.ts 2021-02-21 20:57:42.492620979 +0000
|
||||
@@ -160,9 +160,4 @@
|
||||
diff -wNarU2 easy-markdown-editor-2.16.1-orig/types/easymde.d.ts easy-markdown-editor-2.16.1/types/easymde.d.ts
|
||||
--- easy-markdown-editor-2.16.1-orig/types/easymde.d.ts 2022-01-14 23:27:44.000000000 +0100
|
||||
+++ easy-markdown-editor-2.16.1/types/easymde.d.ts 2022-02-09 23:07:55.427605243 +0100
|
||||
@@ -167,9 +167,4 @@
|
||||
}
|
||||
|
||||
- interface OverlayModeOptions {
|
||||
- mode: CodeMirror.Mode<any>
|
||||
- combine?: boolean
|
||||
- mode: CodeMirror.Mode<any>;
|
||||
- combine?: boolean;
|
||||
- }
|
||||
-
|
||||
interface Options {
|
||||
autoDownloadFontAwesome?: boolean;
|
||||
@@ -214,7 +209,5 @@
|
||||
interface SpellCheckerOptions {
|
||||
codeMirrorInstance: CodeMirror.Editor;
|
||||
@@ -229,6 +224,4 @@
|
||||
syncSideBySidePreviewScroll?: boolean;
|
||||
|
||||
promptTexts?: PromptTexts;
|
||||
- syncSideBySidePreviewScroll?: boolean;
|
||||
- overlayMode?: OverlayModeOptions;
|
||||
-
|
||||
- overlayMode?: OverlayModeOptions
|
||||
+ syncSideBySidePreviewScroll?: boolean
|
||||
direction?: 'ltr' | 'rtl';
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user