Compare commits
359 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c7caecf77c | ||
|
|
1fe30363c7 | ||
|
|
54a7256c8d | ||
|
|
8e8e4ff132 | ||
|
|
1dace72092 | ||
|
|
3a5c1d9faf | ||
|
|
f38c754301 | ||
|
|
fff38f484d | ||
|
|
95390b655f | ||
|
|
5967c421ca | ||
|
|
b8b5214f44 | ||
|
|
cdd3b67a5c | ||
|
|
28c9de3f6a | ||
|
|
f3b9bfc114 | ||
|
|
c9eba39edd | ||
|
|
40a1c7116e | ||
|
|
c03af9cfcc | ||
|
|
c4cbc32cc5 | ||
|
|
1231ce199e | ||
|
|
e0cac6fd99 | ||
|
|
d9db1534b1 | ||
|
|
6a0aaaf069 | ||
|
|
4c04798aa5 | ||
|
|
3f84b0a015 | ||
|
|
917380ddbb | ||
|
|
d9ae067e52 | ||
|
|
b2e8bf6e89 | ||
|
|
170cbe98c5 | ||
|
|
c94f662095 | ||
|
|
0987dcfb1c | ||
|
|
6920c01d4a | ||
|
|
cc0cc8cdf0 | ||
|
|
fb13969798 | ||
|
|
278258ee9f | ||
|
|
9e542cf86b | ||
|
|
244e952f79 | ||
|
|
aa2a8fa223 | ||
|
|
467acb47bf | ||
|
|
0c0d6b2bfc | ||
|
|
ce0e5be406 | ||
|
|
65ce4c90fa | ||
|
|
9897a08d09 | ||
|
|
f5753ba720 | ||
|
|
fcf32a935b | ||
|
|
ec50788987 | ||
|
|
ac0a2da3b5 | ||
|
|
9f84dc42fe | ||
|
|
21f9304235 | ||
|
|
5cedd22bbd | ||
|
|
c0dacbc4dd | ||
|
|
dd6e9ea70c | ||
|
|
87598dcd7f | ||
|
|
3bb7b677f8 | ||
|
|
988a7223f4 | ||
|
|
7f044372fa | ||
|
|
552897abbc | ||
|
|
946a8c5baa | ||
|
|
888b31aa92 | ||
|
|
e2dec2510f | ||
|
|
da5ad2ab9f | ||
|
|
eaa4b04a22 | ||
|
|
3051b13108 | ||
|
|
4c4e48bab7 | ||
|
|
01a3eb29cb | ||
|
|
73f7249c5f | ||
|
|
18c6559199 | ||
|
|
e66ece993f | ||
|
|
0686860624 | ||
|
|
24ce46b380 | ||
|
|
a49bf81ff2 | ||
|
|
64501fd7f1 | ||
|
|
db3c0b0907 | ||
|
|
edda117a7a | ||
|
|
cdface0dd5 | ||
|
|
be6afe2d3a | ||
|
|
9163780000 | ||
|
|
d7aa7dfe64 | ||
|
|
f1decb531d | ||
|
|
99399c698b | ||
|
|
1f5f42f216 | ||
|
|
9082c4702f | ||
|
|
6cedcfbf77 | ||
|
|
8a631f045e | ||
|
|
a6a2ee5b6b | ||
|
|
016708276c | ||
|
|
4cfdc4c513 | ||
|
|
0f257c9308 | ||
|
|
c8104b6e78 | ||
|
|
1a1d731043 | ||
|
|
c5a000d2ae | ||
|
|
94d1924fa9 | ||
|
|
6c1cf68bca | ||
|
|
395af051bd | ||
|
|
42fd66675e | ||
|
|
21a3f3699b | ||
|
|
d168b2acac | ||
|
|
2ce8233921 | ||
|
|
697a4fa8a4 | ||
|
|
2f83c6c7d1 | ||
|
|
127f414e9c | ||
|
|
33c4ccffab | ||
|
|
bafe7f5a09 | ||
|
|
baf41112d1 | ||
|
|
a90dde94e1 | ||
|
|
7dfbfc7227 | ||
|
|
b10843d051 | ||
|
|
520ac8f4dc | ||
|
|
537a6e50e9 | ||
|
|
2d0cbdf1a8 | ||
|
|
5afb562aa3 | ||
|
|
db069c3d4a | ||
|
|
fae40c7e2f | ||
|
|
0c43b592dc | ||
|
|
2ab8924e2d | ||
|
|
0e31cfa784 | ||
|
|
8f7ffcf350 | ||
|
|
9c8507a0fd | ||
|
|
e9b2cab088 | ||
|
|
d3ccacccb1 | ||
|
|
df386c8fbc | ||
|
|
4d15dd6e17 | ||
|
|
56a0499636 | ||
|
|
10fc4768e8 | ||
|
|
2b63d7d10d | ||
|
|
1f177528c1 | ||
|
|
fc3bbb70a3 | ||
|
|
ce3cab0295 | ||
|
|
c784e5285e | ||
|
|
2bf9055cae | ||
|
|
8aba5aed4f | ||
|
|
0ce7cf5e10 | ||
|
|
96edcbccd7 | ||
|
|
4603afb6de | ||
|
|
56317b00af | ||
|
|
cacec9c1f3 | ||
|
|
44ee07f0b2 | ||
|
|
6a8d5e1731 | ||
|
|
d9962f65b3 | ||
|
|
119e88d87b | ||
|
|
71d9e010d9 | ||
|
|
5718caa957 | ||
|
|
efd8a32ed6 | ||
|
|
b22d700e16 | ||
|
|
ccdacea0c4 | ||
|
|
4bdcbc1cb5 | ||
|
|
833c6cf2ec | ||
|
|
dd6dbdd90a | ||
|
|
63013cc565 | ||
|
|
912402364a | ||
|
|
159f51b12b | ||
|
|
7678a91b0e | ||
|
|
b13899c63d | ||
|
|
3a0d882c5e | ||
|
|
cb81f0ad6d | ||
|
|
518bacf628 | ||
|
|
ca63b03e55 | ||
|
|
cecef88d6b | ||
|
|
7ffd805a03 | ||
|
|
a7e2a0c981 | ||
|
|
2a570bb4ca | ||
|
|
5ca8f0706d | ||
|
|
a9b4436cdc | ||
|
|
5f91999512 | ||
|
|
9f000beeaf | ||
|
|
ff0a71f212 | ||
|
|
22dfc6ec24 | ||
|
|
48147c079e | ||
|
|
d715479ef6 | ||
|
|
fc8298c468 | ||
|
|
e94ca5dc91 | ||
|
|
114b71b751 | ||
|
|
b2770a2087 | ||
|
|
cba1878bb2 | ||
|
|
a2e037d6af | ||
|
|
65a2b6a223 | ||
|
|
9ed799e803 | ||
|
|
c1c0ecca13 | ||
|
|
ee62836383 | ||
|
|
705f598b1a | ||
|
|
414de88925 | ||
|
|
53ffd245dd | ||
|
|
cf1b756206 | ||
|
|
22b58e31ef | ||
|
|
b7f9bf5a28 | ||
|
|
aba680b6c2 | ||
|
|
fabada95f6 | ||
|
|
9ccd8bb3ea | ||
|
|
1d68acf8f0 | ||
|
|
1e7697b551 | ||
|
|
4a4ec88d00 | ||
|
|
6adc778d62 | ||
|
|
6b7ebdb7e9 | ||
|
|
3d7facd774 | ||
|
|
eaee1f2cab | ||
|
|
ff012221ae | ||
|
|
c398553748 | ||
|
|
3ccbcf6185 | ||
|
|
f0abc0ef59 | ||
|
|
a99fa3375d | ||
|
|
22c7e09b3f | ||
|
|
0dfe1d5b35 | ||
|
|
a99a3bc6d7 | ||
|
|
9804f25de3 | ||
|
|
ae98200660 | ||
|
|
e45420646f | ||
|
|
21be82ef8b | ||
|
|
001afe00cb | ||
|
|
19a5985f29 | ||
|
|
2715ee6c61 | ||
|
|
dc157fa28f | ||
|
|
1ff14b4e05 | ||
|
|
480ac254ab | ||
|
|
4b95db81aa | ||
|
|
c81e898435 | ||
|
|
f1646b96ca | ||
|
|
44f2b63e43 | ||
|
|
847a2bdc85 | ||
|
|
03f0f99469 | ||
|
|
3900e66158 | ||
|
|
3dff6cda40 | ||
|
|
73d05095b5 | ||
|
|
fcdc1728eb | ||
|
|
8b942ea237 | ||
|
|
88a1c5ca5d | ||
|
|
047176b297 | ||
|
|
dc4d0d8e71 | ||
|
|
b9c5c7bbde | ||
|
|
9daeed923f | ||
|
|
66b260cea9 | ||
|
|
58cf01c2ad | ||
|
|
d866841c19 | ||
|
|
a462a644fb | ||
|
|
678675a9a6 | ||
|
|
de9069ef1d | ||
|
|
c0c0a1a83a | ||
|
|
1d004b6dbd | ||
|
|
b90e1200d7 | ||
|
|
4493a0a804 | ||
|
|
58835b2b42 | ||
|
|
427597b603 | ||
|
|
7d64879ba8 | ||
|
|
bb715704b7 | ||
|
|
d67e9cc507 | ||
|
|
2927bbb2d6 | ||
|
|
0527b59180 | ||
|
|
a5ce1032d3 | ||
|
|
1c2acdc985 | ||
|
|
4e75534ef8 | ||
|
|
7a573cafd1 | ||
|
|
844194ee29 | ||
|
|
609c5921d4 | ||
|
|
c79eaa089a | ||
|
|
e9d962f273 | ||
|
|
b5405174ec | ||
|
|
6eee601521 | ||
|
|
2fac2bee7c | ||
|
|
c140eeee6b | ||
|
|
c5988a04f9 | ||
|
|
a2e0f98693 | ||
|
|
1111153f06 | ||
|
|
e5a836cb7d | ||
|
|
b0de84cbc5 | ||
|
|
cbb718e10d | ||
|
|
b5ad9369fe | ||
|
|
4401de0413 | ||
|
|
6e671c5245 | ||
|
|
08848be784 | ||
|
|
b599fbae97 | ||
|
|
a8dabc99f6 | ||
|
|
f1130db131 | ||
|
|
735ec35546 | ||
|
|
5a009a2a64 | ||
|
|
d9e9526247 | ||
|
|
5a8c3b8be0 | ||
|
|
1c9c17fb9b | ||
|
|
7f82449179 | ||
|
|
e455ec994e | ||
|
|
c111027420 | ||
|
|
abcdf479e6 | ||
|
|
ad2371f810 | ||
|
|
c4e2b0f95f | ||
|
|
3da62ec234 | ||
|
|
01233991f3 | ||
|
|
ee35974273 | ||
|
|
7037e7365e | ||
|
|
03b13e8a1c | ||
|
|
cdd2da0208 | ||
|
|
cec0e0cf02 | ||
|
|
8122ddedfe | ||
|
|
55a77c5e89 | ||
|
|
461f31582d | ||
|
|
f356faa278 | ||
|
|
9f034d9c4c | ||
|
|
ba52590ae4 | ||
|
|
92edea1de5 | ||
|
|
7ff46966da | ||
|
|
fca70b3508 | ||
|
|
70009cd984 | ||
|
|
8d8b88c4fd | ||
|
|
c4b0cccefd | ||
|
|
7c2beba555 | ||
|
|
7d8d94388b | ||
|
|
0b46b1a614 | ||
|
|
5153db6bff | ||
|
|
b0af4b3712 | ||
|
|
c8f4aeaefa | ||
|
|
00da74400c | ||
|
|
83fb569d61 | ||
|
|
5a62cb4869 | ||
|
|
687df2fabd | ||
|
|
cdd0794d6e | ||
|
|
dcc988135e | ||
|
|
3db117d85f | ||
|
|
ee9aad82dd | ||
|
|
2d6eb63fce | ||
|
|
ca001c8504 | ||
|
|
4e581c59da | ||
|
|
dbd42bc6bf | ||
|
|
c862ec1b64 | ||
|
|
f709140571 | ||
|
|
ef1c4b7a20 | ||
|
|
6c94a63f1c | ||
|
|
20669c73d3 | ||
|
|
0da719f4c2 | ||
|
|
373194c38a | ||
|
|
3d245431fc | ||
|
|
250c8c56f0 | ||
|
|
e136231c8e | ||
|
|
98ffaadf52 | ||
|
|
ebb1981803 | ||
|
|
72361c99e1 | ||
|
|
d5c9c8ebbd | ||
|
|
746229846d | ||
|
|
ffd7cd3ca8 | ||
|
|
b3cecabca3 | ||
|
|
662541c64c | ||
|
|
225bd80ea8 | ||
|
|
85e54980cc | ||
|
|
a19a0fa9f3 | ||
|
|
9bb6e0dc62 | ||
|
|
15ddcf53e7 | ||
|
|
6b54972ec0 | ||
|
|
0219eada23 | ||
|
|
8916bce306 | ||
|
|
99edba4fd9 | ||
|
|
64de3e01e8 | ||
|
|
8222ccc40b | ||
|
|
dc449bf8b0 | ||
|
|
ef0ecf878b | ||
|
|
53f1e3c91d | ||
|
|
eeef80919f | ||
|
|
987bce2182 | ||
|
|
b511d686f0 | ||
|
|
132a83501e | ||
|
|
e565ad5f55 | ||
|
|
f955d2bd58 | ||
|
|
5953399090 | ||
|
|
d26a944d95 | ||
|
|
50dac15568 |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -30,6 +30,7 @@ copyparty/res/COPYING.txt
|
||||
copyparty/web/deps/
|
||||
srv/
|
||||
scripts/docker/i/
|
||||
scripts/deps-docker/uncomment.py
|
||||
contrib/package/arch/pkg/
|
||||
contrib/package/arch/src/
|
||||
|
||||
|
||||
24
.vscode/settings.json
vendored
24
.vscode/settings.json
vendored
@@ -22,6 +22,9 @@
|
||||
"terminal.ansiBrightCyan": "#9cf0ed",
|
||||
"terminal.ansiBrightWhite": "#ffffff",
|
||||
},
|
||||
"python.terminal.activateEnvironment": false,
|
||||
"python.analysis.enablePytestSupport": false,
|
||||
"python.analysis.typeCheckingMode": "standard",
|
||||
"python.testing.pytestEnabled": false,
|
||||
"python.testing.unittestEnabled": true,
|
||||
"python.testing.unittestArgs": [
|
||||
@@ -31,23 +34,8 @@
|
||||
"-p",
|
||||
"test_*.py"
|
||||
],
|
||||
"python.linting.pylintEnabled": true,
|
||||
"python.linting.flake8Enabled": true,
|
||||
"python.linting.banditEnabled": true,
|
||||
"python.linting.mypyEnabled": true,
|
||||
"python.linting.flake8Args": [
|
||||
"--max-line-length=120",
|
||||
"--ignore=E722,F405,E203,W503,W293,E402,E501,E128,E226",
|
||||
],
|
||||
"python.linting.banditArgs": [
|
||||
"--ignore=B104,B110,B112"
|
||||
],
|
||||
// python3 -m isort --py=27 --profile=black copyparty/
|
||||
"python.formatting.provider": "none",
|
||||
"[python]": {
|
||||
"editor.defaultFormatter": "ms-python.black-formatter"
|
||||
},
|
||||
"editor.formatOnSave": true,
|
||||
// python3 -m isort --py=27 --profile=black ~/dev/copyparty/{copyparty,tests}/*.py && python3 -m black -t py27 ~/dev/copyparty/{copyparty,tests,bin}/*.py $(find ~/dev/copyparty/copyparty/stolen -iname '*.py')
|
||||
"editor.formatOnSave": false,
|
||||
"[html]": {
|
||||
"editor.formatOnSave": false,
|
||||
"editor.autoIndent": "keep",
|
||||
@@ -58,6 +46,4 @@
|
||||
"files.associations": {
|
||||
"*.makefile": "makefile"
|
||||
},
|
||||
"python.linting.enabled": true,
|
||||
"python.pythonPath": "/usr/bin/python3"
|
||||
}
|
||||
476
README.md
476
README.md
@@ -1,4 +1,6 @@
|
||||
# 💾🎉 copyparty
|
||||
<img src="https://github.com/9001/copyparty/raw/hovudstraum/docs/logo.svg" width="250" align="right"/>
|
||||
|
||||
### 💾🎉 copyparty
|
||||
|
||||
turn almost any device into a file server with resumable uploads/downloads using [*any*](#browser-support) web browser
|
||||
|
||||
@@ -41,8 +43,12 @@ turn almost any device into a file server with resumable uploads/downloads using
|
||||
* [unpost](#unpost) - undo/delete accidental uploads
|
||||
* [self-destruct](#self-destruct) - uploads can be given a lifetime
|
||||
* [race the beam](#race-the-beam) - download files while they're still uploading ([demo video](http://a.ocv.me/pub/g/nerd-stuff/cpp/2024-0418-race-the-beam.webm))
|
||||
* [incoming files](#incoming-files) - the control-panel shows the ETA for all incoming files
|
||||
* [file manager](#file-manager) - cut/paste, rename, and delete files/folders (if you have permission)
|
||||
* [shares](#shares) - share a file or folder by creating a temporary link
|
||||
* [batch rename](#batch-rename) - select some files and press `F2` to bring up the rename UI
|
||||
* [rss feeds](#rss-feeds) - monitor a folder with your RSS reader
|
||||
* [recent uploads](#recent-uploads) - list all recent uploads
|
||||
* [media player](#media-player) - plays almost every audio format there is
|
||||
* [audio equalizer](#audio-equalizer) - and [dynamic range compressor](https://en.wikipedia.org/wiki/Dynamic_range_compression)
|
||||
* [fix unreliable playback on android](#fix-unreliable-playback-on-android) - due to phone / app settings
|
||||
@@ -62,7 +68,8 @@ turn almost any device into a file server with resumable uploads/downloads using
|
||||
* [smb server](#smb-server) - unsafe, slow, not recommended for wan
|
||||
* [browser ux](#browser-ux) - tweaking the ui
|
||||
* [opengraph](#opengraph) - discord and social-media embeds
|
||||
* [file indexing](#file-indexing) - enables dedup and music search ++
|
||||
* [file deduplication](#file-deduplication) - enable symlink-based upload deduplication
|
||||
* [file indexing](#file-indexing) - enable music search, upload-undo, and better dedup
|
||||
* [exclude-patterns](#exclude-patterns) - to save some time
|
||||
* [filesystem guards](#filesystem-guards) - avoid traversing into other filesystems
|
||||
* [periodic rescan](#periodic-rescan) - filesystem monitoring
|
||||
@@ -73,18 +80,24 @@ turn almost any device into a file server with resumable uploads/downloads using
|
||||
* [metadata from audio files](#metadata-from-audio-files) - set `-e2t` to index tags on upload
|
||||
* [file parser plugins](#file-parser-plugins) - provide custom parsers to index additional tags
|
||||
* [event hooks](#event-hooks) - trigger a program on uploads, renames etc ([examples](./bin/hooks/))
|
||||
* [zeromq](#zeromq) - event-hooks can send zeromq messages
|
||||
* [upload events](#upload-events) - the older, more powerful approach ([examples](./bin/mtag/))
|
||||
* [handlers](#handlers) - redefine behavior with plugins ([examples](./bin/handlers/))
|
||||
* [ip auth](#ip-auth) - autologin based on IP range (CIDR)
|
||||
* [identity providers](#identity-providers) - replace copyparty passwords with oauth and such
|
||||
* [user-changeable passwords](#user-changeable-passwords) - if permitted, users can change their own passwords
|
||||
* [using the cloud as storage](#using-the-cloud-as-storage) - connecting to an aws s3 bucket and similar
|
||||
* [hiding from google](#hiding-from-google) - tell search engines you dont wanna be indexed
|
||||
* [hiding from google](#hiding-from-google) - tell search engines you don't wanna be indexed
|
||||
* [themes](#themes)
|
||||
* [complete examples](#complete-examples)
|
||||
* [listen on port 80 and 443](#listen-on-port-80-and-443) - become a *real* webserver
|
||||
* [reverse-proxy](#reverse-proxy) - running copyparty next to other websites
|
||||
* [real-ip](#real-ip) - teaching copyparty how to see client IPs
|
||||
* [reverse-proxy performance](#reverse-proxy-performance)
|
||||
* [prometheus](#prometheus) - metrics/stats can be enabled
|
||||
* [other extremely specific features](#other-extremely-specific-features) - you'll never find a use for these
|
||||
* [custom mimetypes](#custom-mimetypes) - change the association of a file extension
|
||||
* [feature chickenbits](#feature-chickenbits) - buggy feature? rip it out
|
||||
* [packages](#packages) - the party might be closer than you think
|
||||
* [arch package](#arch-package) - now [available on aur](https://aur.archlinux.org/packages/copyparty) maintained by [@icxes](https://github.com/icxes)
|
||||
* [fedora package](#fedora-package) - does not exist yet
|
||||
@@ -107,10 +120,11 @@ turn almost any device into a file server with resumable uploads/downloads using
|
||||
* [https](#https) - both HTTP and HTTPS are accepted
|
||||
* [recovering from crashes](#recovering-from-crashes)
|
||||
* [client crashes](#client-crashes)
|
||||
* [frefox wsod](#frefox-wsod) - firefox 87 can crash during uploads
|
||||
* [firefox wsod](#firefox-wsod) - firefox 87 can crash during uploads
|
||||
* [HTTP API](#HTTP-API) - see [devnotes](./docs/devnotes.md#http-api)
|
||||
* [dependencies](#dependencies) - mandatory deps
|
||||
* [optional dependencies](#optional-dependencies) - install these to enable bonus features
|
||||
* [dependency chickenbits](#dependency-chickenbits) - prevent loading an optional dependency
|
||||
* [optional gpl stuff](#optional-gpl-stuff)
|
||||
* [sfx](#sfx) - the self-contained "binary" (recommended!)
|
||||
* [copyparty.exe](#copypartyexe) - download [copyparty.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty.exe) (win8+) or [copyparty32.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty32.exe) (win7+)
|
||||
@@ -124,10 +138,11 @@ turn almost any device into a file server with resumable uploads/downloads using
|
||||
|
||||
just run **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py)** -- that's it! 🎉
|
||||
|
||||
* or install through pypi: `python3 -m pip install --user -U copyparty`
|
||||
* or install through [pypi](https://pypi.org/project/copyparty/): `python3 -m pip install --user -U copyparty`
|
||||
* or if you cannot install python, you can use [copyparty.exe](#copypartyexe) instead
|
||||
* or install [on arch](#arch-package) ╱ [on NixOS](#nixos-module) ╱ [through nix](#nix-package)
|
||||
* or if you are on android, [install copyparty in termux](#install-on-android)
|
||||
* or maybe you have a [synology nas / dsm](./docs/synology-dsm.md)
|
||||
* or if your computer is messed up and nothing else works, [try the pyz](#zipapp)
|
||||
* or if you prefer to [use docker](./scripts/docker/) 🐋 you can do that too
|
||||
* docker has all deps built-in, so skip this step:
|
||||
@@ -194,7 +209,7 @@ firewall-cmd --reload
|
||||
also see [comparison to similar software](./docs/versus.md)
|
||||
|
||||
* backend stuff
|
||||
* ☑ IPv6
|
||||
* ☑ IPv6 + unix-sockets
|
||||
* ☑ [multiprocessing](#performance) (actual multithreading)
|
||||
* ☑ volumes (mountpoints)
|
||||
* ☑ [accounts](#accounts-and-volumes)
|
||||
@@ -209,7 +224,7 @@ also see [comparison to similar software](./docs/versus.md)
|
||||
* upload
|
||||
* ☑ basic: plain multipart, ie6 support
|
||||
* ☑ [up2k](#uploading): js, resumable, multithreaded
|
||||
* unaffected by cloudflare's max-upload-size (100 MiB)
|
||||
* **no filesize limit!** even on Cloudflare
|
||||
* ☑ stash: simple PUT filedropper
|
||||
* ☑ filename randomizer
|
||||
* ☑ write-only folders
|
||||
@@ -225,6 +240,7 @@ also see [comparison to similar software](./docs/versus.md)
|
||||
* ☑ [navpane](#navpane) (directory tree sidebar)
|
||||
* ☑ file manager (cut/paste, delete, [batch-rename](#batch-rename))
|
||||
* ☑ audio player (with [OS media controls](https://user-images.githubusercontent.com/241032/215347492-b4250797-6c90-4e09-9a4c-721edf2fb15c.png) and opus/mp3 transcoding)
|
||||
* ☑ play video files as audio (converted on server)
|
||||
* ☑ image gallery with webm player
|
||||
* ☑ textfile browser with syntax hilighting
|
||||
* ☑ [thumbnails](#thumbnails)
|
||||
@@ -232,7 +248,7 @@ also see [comparison to similar software](./docs/versus.md)
|
||||
* ☑ ...of videos using FFmpeg
|
||||
* ☑ ...of audio (spectrograms) using FFmpeg
|
||||
* ☑ cache eviction (max-age; maybe max-size eventually)
|
||||
* ☑ multilingual UI (english, norwegian, [add your own](./docs/rice/#translations)))
|
||||
* ☑ multilingual UI (english, norwegian, chinese, [add your own](./docs/rice/#translations)))
|
||||
* ☑ SPA (browse while uploading)
|
||||
* server indexing
|
||||
* ☑ [locate files by contents](#file-search)
|
||||
@@ -327,6 +343,9 @@ same order here too
|
||||
|
||||
* [Chrome issue 1352210](https://bugs.chromium.org/p/chromium/issues/detail?id=1352210) -- plaintext http may be faster at filehashing than https (but also extremely CPU-intensive)
|
||||
|
||||
* [Chrome issue 383568268](https://issues.chromium.org/issues/383568268) -- filereaders in webworkers can OOM / crash the browser-tab
|
||||
* copyparty has a workaround which seems to work well enough
|
||||
|
||||
* [Firefox issue 1790500](https://bugzilla.mozilla.org/show_bug.cgi?id=1790500) -- entire browser can crash after uploading ~4000 small files
|
||||
|
||||
* Android: music playback randomly stops due to [battery usage settings](#fix-unreliable-playback-on-android)
|
||||
@@ -334,10 +353,19 @@ same order here too
|
||||
* iPhones: the volume control doesn't work because [apple doesn't want it to](https://developer.apple.com/library/archive/documentation/AudioVideo/Conceptual/Using_HTML5_Audio_Video/Device-SpecificConsiderations/Device-SpecificConsiderations.html#//apple_ref/doc/uid/TP40009523-CH5-SW11)
|
||||
* `AudioContext` will probably never be a viable workaround as apple introduces new issues faster than they fix current ones
|
||||
|
||||
* iPhones: music volume goes on a rollercoaster during song changes
|
||||
* nothing I can do about it because `AudioContext` is still broken in safari
|
||||
|
||||
* iPhones: the preload feature (in the media-player-options tab) can cause a tiny audio glitch 20sec before the end of each song, but disabling it may cause worse iOS bugs to appear instead
|
||||
* just a hunch, but disabling preloading may cause playback to stop entirely, or possibly mess with bluetooth speakers
|
||||
* tried to add a tooltip regarding this but looks like apple broke my tooltips
|
||||
|
||||
* iPhones: preloaded awo files make safari log MEDIA_ERR_NETWORK errors as playback starts, but the song plays just fine so eh whatever
|
||||
* awo, opus-weba, is apple's new take on opus support, replacing opus-caf which was technically limited to cbr opus
|
||||
|
||||
* iPhones: preloading another awo file may cause playback to stop
|
||||
* can be somewhat mitigated with `mp.au.play()` in `mp.onpreload` but that can hit a race condition in safari that starts playing the same audio object twice in parallel...
|
||||
|
||||
* Windows: folders cannot be accessed if the name ends with `.`
|
||||
* python or windows bug
|
||||
|
||||
@@ -416,7 +444,7 @@ configuring accounts/volumes with arguments:
|
||||
|
||||
permissions:
|
||||
* `r` (read): browse folder contents, download files, download as zip/tar, see filekeys/dirkeys
|
||||
* `w` (write): upload files, move files *into* this folder
|
||||
* `w` (write): upload files, move/copy files *into* this folder
|
||||
* `m` (move): move files/folders *from* this folder
|
||||
* `d` (delete): delete files/folders
|
||||
* `.` (dots): user can ask to show dotfiles in directory listings
|
||||
@@ -496,7 +524,8 @@ the browser has the following hotkeys (always qwerty)
|
||||
* `ESC` close various things
|
||||
* `ctrl-K` delete selected files/folders
|
||||
* `ctrl-X` cut selected files/folders
|
||||
* `ctrl-V` paste
|
||||
* `ctrl-C` copy selected files/folders to clipboard
|
||||
* `ctrl-V` paste (move/copy)
|
||||
* `Y` download selected files
|
||||
* `F2` [rename](#batch-rename) selected file/folder
|
||||
* when a file/folder is selected (in not-grid-view):
|
||||
@@ -565,6 +594,7 @@ click the `🌲` or pressing the `B` hotkey to toggle between breadcrumbs path (
|
||||
|
||||
press `g` or `田` to toggle grid-view instead of the file listing and `t` toggles icons / thumbnails
|
||||
* can be made default globally with `--grid` or per-volume with volflag `grid`
|
||||
* enable by adding `?imgs` to a link, or disable with `?imgs=0`
|
||||
|
||||

|
||||
|
||||
@@ -572,15 +602,12 @@ it does static images with Pillow / pyvips / FFmpeg, and uses FFmpeg for video f
|
||||
* pyvips is 3x faster than Pillow, Pillow is 3x faster than FFmpeg
|
||||
* disable thumbnails for specific volumes with volflag `dthumb` for all, or `dvthumb` / `dathumb` / `dithumb` for video/audio/images only
|
||||
|
||||
audio files are covnerted into spectrograms using FFmpeg unless you `--no-athumb` (and some FFmpeg builds may need `--th-ff-swr`)
|
||||
audio files are converted into spectrograms using FFmpeg unless you `--no-athumb` (and some FFmpeg builds may need `--th-ff-swr`)
|
||||
|
||||
images with the following names (see `--th-covers`) become the thumbnail of the folder they're in: `folder.png`, `folder.jpg`, `cover.png`, `cover.jpg`
|
||||
* the order is significant, so if both `cover.png` and `folder.jpg` exist in a folder, it will pick the first matching `--th-covers` entry (`folder.jpg`)
|
||||
* and, if you enable [file indexing](#file-indexing), it will also try those names as dotfiles (`.folder.jpg` and so), and then fallback on the first picture in the folder (if it has any pictures at all)
|
||||
|
||||
in the grid/thumbnail view, if the audio player panel is open, songs will start playing when clicked
|
||||
* indicated by the audio files having the ▶ icon instead of 💾
|
||||
|
||||
enabling `multiselect` lets you click files to select them, and then shift-click another file for range-select
|
||||
* `multiselect` is mostly intended for phones/tablets, but the `sel` option in the `[⚙️] settings` tab is better suited for desktop use, allowing selection by CTRL-clicking and range-selection with SHIFT-click, all without affecting regular clicking
|
||||
* the `sel` option can be made default globally with `--gsel` or per-volume with volflag `gsel`
|
||||
@@ -598,8 +625,8 @@ select which type of archive you want in the `[⚙️] config` tab:
|
||||
| `pax` | `?tar=pax` | pax-format tar, futureproof, not as fast |
|
||||
| `tgz` | `?tar=gz` | gzip compressed gnu-tar (slow), for `curl \| tar -xvz` |
|
||||
| `txz` | `?tar=xz` | gnu-tar with xz / lzma compression (v.slow) |
|
||||
| `zip` | `?zip=utf8` | works everywhere, glitchy filenames on win7 and older |
|
||||
| `zip_dos` | `?zip` | traditional cp437 (no unicode) to fix glitchy filenames |
|
||||
| `zip` | `?zip` | works everywhere, glitchy filenames on win7 and older |
|
||||
| `zip_dos` | `?zip=dos` | traditional cp437 (no unicode) to fix glitchy filenames |
|
||||
| `zip_crc` | `?zip=crc` | cp437 with crc32 computed early for truly ancient software |
|
||||
|
||||
* gzip default level is `3` (0=fast, 9=best), change with `?tar=gz:9`
|
||||
@@ -607,7 +634,7 @@ select which type of archive you want in the `[⚙️] config` tab:
|
||||
* bz2 default level is `2` (1=fast, 9=best), change with `?tar=bz2:9`
|
||||
* hidden files ([dotfiles](#dotfiles)) are excluded unless account is allowed to list them
|
||||
* `up2k.db` and `dir.txt` is always excluded
|
||||
* bsdtar supports streaming unzipping: `curl foo?zip=utf8 | bsdtar -xv`
|
||||
* bsdtar supports streaming unzipping: `curl foo?zip | bsdtar -xv`
|
||||
* good, because copyparty's zip is faster than tar on small files
|
||||
* `zip_crc` will take longer to download since the server has to read each file twice
|
||||
* this is only to support MS-DOS PKZIP v2.04g (october 1993) and older
|
||||
@@ -631,7 +658,7 @@ dragdrop is the recommended way, but you may also:
|
||||
|
||||
* select some files (not folders) in your file explorer and press CTRL-V inside the browser window
|
||||
* use the [command-line uploader](https://github.com/9001/copyparty/tree/hovudstraum/bin#u2cpy)
|
||||
* upload using [curl or sharex](#client-examples)
|
||||
* upload using [curl, sharex, ishare, ...](#client-examples)
|
||||
|
||||
when uploading files through dragdrop or CTRL-V, this initiates an upload using `up2k`; there are two browser-based uploaders available:
|
||||
* `[🎈] bup`, the basic uploader, supports almost every browser since netscape 4.0
|
||||
@@ -646,6 +673,7 @@ up2k has several advantages:
|
||||
* uploads resume if you reboot your browser or pc, just upload the same files again
|
||||
* server detects any corruption; the client reuploads affected chunks
|
||||
* the client doesn't upload anything that already exists on the server
|
||||
* no filesize limit, even when a proxy limits the request size (for example Cloudflare)
|
||||
* much higher speeds than ftp/scp/tarpipe on some internet connections (mainly american ones) thanks to parallel connections
|
||||
* the last-modified timestamp of the file is preserved
|
||||
|
||||
@@ -660,7 +688,7 @@ see [up2k](./docs/devnotes.md#up2k) for details on how it works, or watch a [dem
|
||||
|
||||
**protip:** if you enable `favicon` in the `[⚙️] settings` tab (by typing something into the textbox), the icon in the browser tab will indicate upload progress -- also, the `[🔔]` and/or `[🔊]` switches enable visible and/or audible notifications on upload completion
|
||||
|
||||
the up2k UI is the epitome of polished inutitive experiences:
|
||||
the up2k UI is the epitome of polished intuitive experiences:
|
||||
* "parallel uploads" specifies how many chunks to upload at the same time
|
||||
* `[🏃]` analysis of other files should continue while one is uploading
|
||||
* `[🥔]` shows a simpler UI for faster uploads from slow devices
|
||||
@@ -681,6 +709,8 @@ note that since up2k has to read each file twice, `[🎈] bup` can *theoreticall
|
||||
|
||||
if you are resuming a massive upload and want to skip hashing the files which already finished, you can enable `turbo` in the `[⚙️] config` tab, but please read the tooltip on that button
|
||||
|
||||
if the server is behind a proxy which imposes a request-size limit, you can configure up2k to sneak below the limit with server-option `--u2sz` (the default is 96 MiB to support Cloudflare)
|
||||
|
||||
|
||||
### file-search
|
||||
|
||||
@@ -700,7 +730,7 @@ files go into `[ok]` if they exist (and you get a link to where it is), otherwis
|
||||
|
||||
### unpost
|
||||
|
||||
undo/delete accidental uploads
|
||||
undo/delete accidental uploads using the `[🧯]` tab in the UI
|
||||
|
||||

|
||||
|
||||
@@ -709,7 +739,7 @@ you can unpost even if you don't have regular move/delete access, however only f
|
||||
|
||||
### self-destruct
|
||||
|
||||
uploads can be given a lifetime, afer which they expire / self-destruct
|
||||
uploads can be given a lifetime, after which they expire / self-destruct
|
||||
|
||||
the feature must be enabled per-volume with the `lifetime` [upload rule](#upload-rules) which sets the upper limit for how long a file gets to stay on the server
|
||||
|
||||
@@ -725,11 +755,18 @@ download files while they're still uploading ([demo video](http://a.ocv.me/pub/g
|
||||
requires the file to be uploaded using up2k (which is the default drag-and-drop uploader), alternatively the command-line program
|
||||
|
||||
|
||||
### incoming files
|
||||
|
||||
the control-panel shows the ETA for all incoming files , but only for files being uploaded into volumes where you have read-access
|
||||
|
||||

|
||||
|
||||
|
||||
## file manager
|
||||
|
||||
cut/paste, rename, and delete files/folders (if you have permission)
|
||||
|
||||
file selection: click somewhere on the line (not the link itsef), then:
|
||||
file selection: click somewhere on the line (not the link itself), then:
|
||||
* `space` to toggle
|
||||
* `up/down` to move
|
||||
* `shift-up/down` to move-and-select
|
||||
@@ -737,10 +774,48 @@ file selection: click somewhere on the line (not the link itsef), then:
|
||||
* shift-click another line for range-select
|
||||
|
||||
* cut: select some files and `ctrl-x`
|
||||
* copy: select some files and `ctrl-c`
|
||||
* paste: `ctrl-v` in another folder
|
||||
* rename: `F2`
|
||||
|
||||
you can move files across browser tabs (cut in one tab, paste in another)
|
||||
you can copy/move files across browser tabs (cut/copy in one tab, paste in another)
|
||||
|
||||
|
||||
## shares
|
||||
|
||||
share a file or folder by creating a temporary link
|
||||
|
||||
when enabled in the server settings (`--shr`), click the bottom-right `share` button to share the folder you're currently in, or alternatively:
|
||||
* select a folder first to share that folder instead
|
||||
* select one or more files to share only those files
|
||||
|
||||
this feature was made with [identity providers](#identity-providers) in mind -- configure your reverseproxy to skip the IdP's access-control for a given URL prefix and use that to safely share specific files/folders sans the usual auth checks
|
||||
|
||||
when creating a share, the creator can choose any of the following options:
|
||||
|
||||
* password-protection
|
||||
* expire after a certain time; `0` or blank means infinite
|
||||
* allow visitors to upload (if the user who creates the share has write-access)
|
||||
|
||||
semi-intentional limitations:
|
||||
|
||||
* cleanup of expired shares only works when global option `e2d` is set, and/or at least one volume on the server has volflag `e2d`
|
||||
* only folders from the same volume are shared; if you are sharing a folder which contains other volumes, then the contents of those volumes will not be available
|
||||
* related to [IdP volumes being forgotten on shutdown](https://github.com/9001/copyparty/blob/hovudstraum/docs/idp.md#idp-volumes-are-forgotten-on-shutdown), any shares pointing into a user's IdP volume will be unavailable until that user makes their first request after a restart
|
||||
* no option to "delete after first access" because tricky
|
||||
* when linking something to discord (for example) it'll get accessed by their scraper and that would count as a hit
|
||||
* browsers wouldn't be able to resume a broken download unless the requester's IP gets allowlisted for X minutes (ref. tricky)
|
||||
|
||||
specify `--shr /foobar` to enable this feature; a toplevel virtual folder named `foobar` is then created, and that's where all the shares will be served from
|
||||
|
||||
* you can name it whatever, `foobar` is just an example
|
||||
* if you're using config files, put `shr: /foobar` inside the `[global]` section instead
|
||||
|
||||
users can delete their own shares in the controlpanel, and a list of privileged users (`--shr-adm`) are allowed to see and/or delet any share on the server
|
||||
|
||||
after a share has expired, it remains visible in the controlpanel for `--shr-rt` minutes (default is 1 day), and the owner can revive it by extending the expiration time there
|
||||
|
||||
**security note:** using this feature does not mean that you can skip the [accounts and volumes](#accounts-and-volumes) section -- you still need to restrict access to volumes that you do not intend to share with unauthenticated users! it is not sufficient to use rules in the reverseproxy to restrict access to just the `/share` folder.
|
||||
|
||||
|
||||
## batch rename
|
||||
@@ -790,6 +865,43 @@ or a mix of both:
|
||||
the metadata keys you can use in the format field are the ones in the file-browser table header (whatever is collected with `-mte` and `-mtp`)
|
||||
|
||||
|
||||
## rss feeds
|
||||
|
||||
monitor a folder with your RSS reader , optionally recursive
|
||||
|
||||
must be enabled per-volume with volflag `rss` or globally with `--rss`
|
||||
|
||||
the feed includes itunes metadata for use with podcast readers such as [AntennaPod](https://antennapod.org/)
|
||||
|
||||
a feed example: https://cd.ocv.me/a/d2/d22/?rss&fext=mp3
|
||||
|
||||
url parameters:
|
||||
|
||||
* `pw=hunter2` for password auth
|
||||
* `recursive` to also include subfolders
|
||||
* `title=foo` changes the feed title (default: folder name)
|
||||
* `fext=mp3,opus` only include mp3 and opus files (default: all)
|
||||
* `nf=30` only show the first 30 results (default: 250)
|
||||
* `sort=m` sort by mtime (file last-modified), newest first (default)
|
||||
* `u` = upload-time; NOTE: non-uploaded files have upload-time `0`
|
||||
* `n` = filename
|
||||
* `a` = filesize
|
||||
* uppercase = reverse-sort; `M` = oldest file first
|
||||
|
||||
|
||||
## recent uploads
|
||||
|
||||
list all recent uploads by clicking "show recent uploads" in the controlpanel
|
||||
|
||||
will show uploader IP and upload-time if the visitor has the admin permission
|
||||
|
||||
* global-option `--ups-when` makes upload-time visible to all users, and not just admins
|
||||
|
||||
* global-option `--ups-who` (volflag `ups_who`) specifies who gets access (0=nobody, 1=admins, 2=everyone), default=2
|
||||
|
||||
note that the [🧯 unpost](#unpost) feature is better suited for viewing *your own* recent uploads, as it includes the option to undo/delete them
|
||||
|
||||
|
||||
## media player
|
||||
|
||||
plays almost every audio format there is (if the server has FFmpeg installed for on-demand transcoding)
|
||||
@@ -800,6 +912,7 @@ some hilights:
|
||||
* OS integration; control playback from your phone's lockscreen ([windows](https://user-images.githubusercontent.com/241032/233213022-298a98ba-721a-4cf1-a3d4-f62634bc53d5.png) // [iOS](https://user-images.githubusercontent.com/241032/142711926-0700be6c-3e31-47b3-9928-53722221f722.png) // [android](https://user-images.githubusercontent.com/241032/233212311-a7368590-08c7-4f9f-a1af-48ccf3f36fad.png))
|
||||
* shows the audio waveform in the seekbar
|
||||
* not perfectly gapless but can get really close (see settings + eq below); good enough to enjoy gapless albums as intended
|
||||
* videos can be played as audio, without wasting bandwidth on the video
|
||||
|
||||
click the `play` link next to an audio file, or copy the link target to [share it](https://a.ocv.me/pub/demo/music/Ubiktune%20-%20SOUNDSHOCK%202%20-%20FM%20FUNK%20TERRROR!!/#af-1fbfba61&t=18) (optionally with a timestamp to start playing from, like that example does)
|
||||
|
||||
@@ -825,6 +938,11 @@ open the `[🎺]` media-player-settings tab to configure it,
|
||||
* `[aac]` converts `aac` and `m4a` files into opus (if supported by browser) or mp3
|
||||
* `[oth]` converts all other known formats into opus (if supported by browser) or mp3
|
||||
* `aac|ac3|aif|aiff|alac|alaw|amr|ape|au|dfpwm|dts|flac|gsm|it|m4a|mo3|mod|mp2|mp3|mpc|mptm|mt2|mulaw|ogg|okt|opus|ra|s3m|tak|tta|ulaw|wav|wma|wv|xm|xpk`
|
||||
* "transcode to":
|
||||
* `[opus]` produces an `opus` whenever transcoding is necessary (the best choice on Android and PCs)
|
||||
* `[awo]` is `opus` in a `weba` file, good for iPhones (iOS 17.5 and newer) but Apple is still fixing some state-confusion bugs as of iOS 18.2.1
|
||||
* `[caf]` is `opus` in a `caf` file, good for iPhones (iOS 11 through 17), technically unsupported by Apple but works for the mos tpart
|
||||
* `[mp3]` -- the myth, the legend, the undying master of mediocre sound quality that definitely works everywhere
|
||||
* "tint" reduces the contrast of the playback bar
|
||||
|
||||
|
||||
@@ -885,6 +1003,8 @@ see [./srv/expand/](./srv/expand/) for usage and examples
|
||||
|
||||
* files named `README.md` / `readme.md` will be rendered after directory listings unless `--no-readme` (but `.epilogue.html` takes precedence)
|
||||
|
||||
* and `PREADME.md` / `preadme.md` is shown above directory listings unless `--no-readme` or `.prologue.html`
|
||||
|
||||
* `README.md` and `*logue.html` can contain placeholder values which are replaced server-side before embedding into directory listings; see `--help-exp`
|
||||
|
||||
|
||||
@@ -936,7 +1056,11 @@ uses [multicast dns](https://en.wikipedia.org/wiki/Multicast_DNS) to give copypa
|
||||
|
||||
all enabled services ([webdav](#webdav-server), [ftp](#ftp-server), [smb](#smb-server)) will appear in mDNS-aware file managers (KDE, gnome, macOS, ...)
|
||||
|
||||
the domain will be http://partybox.local if the machine's hostname is `partybox` unless `--name` specifies soemthing else
|
||||
the domain will be `partybox.local` if the machine's hostname is `partybox` unless `--name` specifies something else
|
||||
|
||||
and the web-UI will be available at http://partybox.local:3923/
|
||||
|
||||
* if you want to get rid of the `:3923` so you can use http://partybox.local/ instead then see [listen on port 80 and 443](#listen-on-port-80-and-443)
|
||||
|
||||
|
||||
### ssdp
|
||||
@@ -962,7 +1086,7 @@ print a qr-code [(screenshot)](https://user-images.githubusercontent.com/241032/
|
||||
* `--qrz 1` forces 1x zoom instead of autoscaling to fit the terminal size
|
||||
* 1x may render incorrectly on some terminals/fonts, but 2x should always work
|
||||
|
||||
it uses the server hostname if [mdns](#mdns) is enbled, otherwise it'll use your external ip (default route) unless `--qri` specifies a specific ip-prefix or domain
|
||||
it uses the server hostname if [mdns](#mdns) is enabled, otherwise it'll use your external ip (default route) unless `--qri` specifies a specific ip-prefix or domain
|
||||
|
||||
|
||||
## ftp server
|
||||
@@ -987,7 +1111,7 @@ some recommended FTP / FTPS clients; `wark` = example password:
|
||||
|
||||
## webdav server
|
||||
|
||||
with read-write support, supports winXP and later, macos, nautilus/gvfs
|
||||
with read-write support, supports winXP and later, macos, nautilus/gvfs ... a great way to [access copyparty straight from the file explorer in your OS](#mount-as-drive)
|
||||
|
||||
click the [connect](http://127.0.0.1:3923/?hc) button in the control-panel to see connection instructions for windows, linux, macos
|
||||
|
||||
@@ -999,6 +1123,8 @@ on macos, connect from finder:
|
||||
|
||||
in order to grant full write-access to webdav clients, the volflag `daw` must be set and the account must also have delete-access (otherwise the client won't be allowed to replace the contents of existing files, which is how webdav works)
|
||||
|
||||
> note: if you have enabled [IdP authentication](#identity-providers) then that may cause issues for some/most webdav clients; see [the webdav section in the IdP docs](https://github.com/9001/copyparty/blob/hovudstraum/docs/idp.md#connecting-webdav-clients)
|
||||
|
||||
|
||||
### connecting to webdav from windows
|
||||
|
||||
@@ -1007,11 +1133,12 @@ using the GUI (winXP or later):
|
||||
* on winXP only, click the `Sign up for online storage` hyperlink instead and put the URL there
|
||||
* providing your password as the username is recommended; the password field can be anything or empty
|
||||
|
||||
known client bugs:
|
||||
the webdav client that's built into windows has the following list of bugs; you can avoid all of these by connecting with rclone instead:
|
||||
* win7+ doesn't actually send the password to the server when reauthenticating after a reboot unless you first try to login with an incorrect password and then switch to the correct password
|
||||
* or just type your password into the username field instead to get around it entirely
|
||||
* connecting to a folder which allows anonymous read will make writing impossible, as windows has decided it doesn't need to login
|
||||
* workaround: connect twice; first to a folder which requires auth, then to the folder you actually want, and leave both of those mounted
|
||||
* or set the server-option `--dav-auth` to force password-auth for all webdav clients
|
||||
* win7+ may open a new tcp connection for every file and sometimes forgets to close them, eventually needing a reboot
|
||||
* maybe NIC-related (??), happens with win10-ltsc on e1000e but not virtio
|
||||
* windows cannot access folders which contain filenames with invalid unicode or forbidden characters (`<>:"/\|?*`), or names ending with `.`
|
||||
@@ -1064,12 +1191,12 @@ some **BIG WARNINGS** specific to SMB/CIFS, in decreasing importance:
|
||||
* [shadowing](#shadowing) probably works as expected but no guarantees
|
||||
|
||||
and some minor issues,
|
||||
* clients only see the first ~400 files in big folders; [impacket#1433](https://github.com/SecureAuthCorp/impacket/issues/1433)
|
||||
* clients only see the first ~400 files in big folders;
|
||||
* this was originally due to [impacket#1433](https://github.com/SecureAuthCorp/impacket/issues/1433) which was fixed in impacket-0.12, so you can disable the workaround with `--smb-nwa-1` but then you get unacceptably poor performance instead
|
||||
* hot-reload of server config (`/?reload=cfg`) does not include the `[global]` section (commandline args)
|
||||
* listens on the first IPv4 `-i` interface only (default = :: = 0.0.0.0 = all)
|
||||
* login doesn't work on winxp, but anonymous access is ok -- remove all accounts from copyparty config for that to work
|
||||
* win10 onwards does not allow connecting anonymously / without accounts
|
||||
* on windows, creating a new file through rightclick --> new --> textfile throws an error due to impacket limitations -- hit OK and F5 to get your file
|
||||
* python3 only
|
||||
* slow (the builtin webdav support in windows is 5x faster, and rclone-webdav is 30x faster)
|
||||
|
||||
@@ -1091,8 +1218,8 @@ authenticate with one of the following:
|
||||
tweaking the ui
|
||||
|
||||
* set default sort order globally with `--sort` or per-volume with the `sort` volflag; specify one or more comma-separated columns to sort by, and prefix the column name with `-` for reverse sort
|
||||
* the column names you can use are visible as tooltips when hovering over the column headers in the directory listing, for example `href ext sz ts tags/.up_at tags/Cirle tags/.tn tags/Artist tags/Title`
|
||||
* to sort in music order (album, track, artist, title) with filename as fallback, you could `--sort tags/Cirle,tags/.tn,tags/Artist,tags/Title,href`
|
||||
* the column names you can use are visible as tooltips when hovering over the column headers in the directory listing, for example `href ext sz ts tags/.up_at tags/Circle tags/.tn tags/Artist tags/Title`
|
||||
* to sort in music order (album, track, artist, title) with filename as fallback, you could `--sort tags/Circle,tags/.tn,tags/Artist,tags/Title,href`
|
||||
* to sort by upload date, first enable showing the upload date in the listing with `-e2d -mte +.up_at` and then `--sort tags/.up_at`
|
||||
|
||||
see [./docs/rice](./docs/rice) for more, including how to add stuff (css/`<meta>`/...) to the html `<head>` tag, or to add your own translation
|
||||
@@ -1108,14 +1235,48 @@ note that this disables hotlinking because the opengraph spec demands it; to sne
|
||||
|
||||
you can also hotlink files regardless by appending `?raw` to the url
|
||||
|
||||
NOTE: because discord (and maybe others) strip query args such as `?raw` in opengraph tags, any links which require a filekey or dirkey will not work
|
||||
|
||||
if you want to entirely replace the copyparty response with your own jinja2 template, give the template filepath to `--og-tpl` or volflag `og_tpl` (all members of `HttpCli` are available through the `this` object)
|
||||
|
||||
|
||||
## file deduplication
|
||||
|
||||
enable symlink-based upload deduplication globally with `--dedup` or per-volume with volflag `dedup`
|
||||
|
||||
by default, when someone tries to upload a file that already exists on the server, the upload will be politely declined, and the server will copy the existing file over to where the upload would have gone
|
||||
|
||||
if you enable deduplication with `--dedup` then it'll create a symlink instead of a full copy, thus reducing disk space usage
|
||||
|
||||
* on the contrary, if your server is hooked up to s3-glacier or similar storage where reading is expensive, and you cannot use `--safe-dedup=1` because you have other software tampering with your files, so you want to entirely disable detection of duplicate data instead, then you can specify `--no-clone` globally or `noclone` as a volflag
|
||||
|
||||
**warning:** when enabling dedup, you should also:
|
||||
* enable indexing with `-e2dsa` or volflag `e2dsa` (see [file indexing](#file-indexing) section below); strongly recommended
|
||||
* ...and/or `--hardlink-only` to use hardlink-based deduplication instead of symlinks; see explanation below
|
||||
|
||||
it will not be safe to rename/delete files if you only enable dedup and none of the above; if you enable indexing then it is not *necessary* to also do hardlinks (but you may still want to)
|
||||
|
||||
by default, deduplication is done based on symlinks (symbolic links); these are tiny files which are pointers to the nearest full copy of the file
|
||||
|
||||
you can choose to use hardlinks instead of softlinks, globally with `--hardlink-only` or volflag `hardlinkonly`;
|
||||
|
||||
advantages of using hardlinks:
|
||||
* hardlinks are more compatible with other software; they behave entirely like regular files
|
||||
* you can safely move and rename files using other file managers
|
||||
* symlinks need to be managed by copyparty to ensure the destinations remain correct
|
||||
|
||||
advantages of using symlinks (default):
|
||||
* each symlink can have its own last-modified timestamp, but a single timestamp is shared by all hardlinks
|
||||
* symlinks make it more obvious to other software that the file is not a regular file, so this can be less dangerous
|
||||
* hardlinks look like regular files, so other software may assume they are safe to edit without affecting the other copies
|
||||
|
||||
**warning:** if you edit the contents of a deduplicated file, then you will also edit all other copies of that file! This is especially surprising with hardlinks, because they look like regular files, but that same file exists in multiple locations
|
||||
|
||||
global-option `--xlink` / volflag `xlink` additionally enables deduplication across volumes, but this is probably buggy and not recommended
|
||||
|
||||
|
||||
|
||||
## file indexing
|
||||
|
||||
enables dedup and music search ++
|
||||
enable music search, upload-undo, and better dedup
|
||||
|
||||
file indexing relies on two database tables, the up2k filetree (`-e2d`) and the metadata tags (`-e2t`), stored in `.hist/up2k.db`. Configuration can be done through arguments, volflags, or a mix of both.
|
||||
|
||||
@@ -1126,10 +1287,9 @@ through arguments:
|
||||
* `-e2t` enables metadata indexing on upload
|
||||
* `-e2ts` also scans for tags in all files that don't have tags yet
|
||||
* `-e2tsr` also deletes all existing tags, doing a full reindex
|
||||
* `-e2v` verfies file integrity at startup, comparing hashes from the db
|
||||
* `-e2v` verifies file integrity at startup, comparing hashes from the db
|
||||
* `-e2vu` patches the database with the new hashes from the filesystem
|
||||
* `-e2vp` panics and kills copyparty instead
|
||||
* `--xlink` enables deduplication across volumes
|
||||
|
||||
the same arguments can be set as volflags, in addition to `d2d`, `d2ds`, `d2t`, `d2ts`, `d2v` for disabling:
|
||||
* `-v ~/music::r:c,e2ds,e2tsr` does a full reindex of everything on startup
|
||||
@@ -1142,11 +1302,10 @@ note:
|
||||
* upload-times can be displayed in the file listing by enabling the `.up_at` metadata key, either globally with `-e2d -mte +.up_at` or per-volume with volflags `e2d,mte=+.up_at` (will have a ~17% performance impact on directory listings)
|
||||
* `e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and `e2ts` would then reindex those, unless there is a new copyparty version with new parsers and the release note says otherwise
|
||||
* the rescan button in the admin panel has no effect unless the volume has `-e2ds` or higher
|
||||
* deduplication is possible on windows if you run copyparty as administrator (not saying you should!)
|
||||
|
||||
### exclude-patterns
|
||||
|
||||
to save some time, you can provide a regex pattern for filepaths to only index by filename/path/size/last-modified (and not the hash of the file contents) by setting `--no-hash \.iso$` or the volflag `:c,nohash=\.iso$`, this has the following consequences:
|
||||
to save some time, you can provide a regex pattern for filepaths to only index by filename/path/size/last-modified (and not the hash of the file contents) by setting `--no-hash '\.iso$'` or the volflag `:c,nohash=\.iso$`, this has the following consequences:
|
||||
* initial indexing is way faster, especially when the volume is on a network disk
|
||||
* makes it impossible to [file-search](#file-search)
|
||||
* if someone uploads the same file contents, the upload will not be detected as a dupe, so it will not get symlinked or rejected
|
||||
@@ -1157,6 +1316,8 @@ similarly, you can fully ignore files/folders using `--no-idx [...]` and `:c,noi
|
||||
|
||||
if you set `--no-hash [...]` globally, you can enable hashing for specific volumes using flag `:c,nohash=`
|
||||
|
||||
to exclude certain filepaths from search-results, use `--srch-excl` or volflag `srch_excl` instead of `--no-idx`, for example `--srch-excl 'password|logs/[0-9]'`
|
||||
|
||||
### filesystem guards
|
||||
|
||||
avoid traversing into other filesystems using `--xdev` / volflag `:c,xdev`, skipping any symlinks or bind-mounts to another HDD for example
|
||||
@@ -1311,6 +1472,25 @@ you can set hooks before and/or after an event happens, and currently you can ho
|
||||
|
||||
there's a bunch of flags and stuff, see `--help-hooks`
|
||||
|
||||
if you want to write your own hooks, see [devnotes](./docs/devnotes.md#event-hooks)
|
||||
|
||||
|
||||
### zeromq
|
||||
|
||||
event-hooks can send zeromq messages instead of running programs
|
||||
|
||||
to send a 0mq message every time a file is uploaded,
|
||||
|
||||
* `--xau zmq:pub:tcp://*:5556` sends a PUB to any/all connected SUB clients
|
||||
* `--xau t3,zmq:push:tcp://*:5557` sends a PUSH to exactly one connected PULL client
|
||||
* `--xau t3,j,zmq:req:tcp://localhost:5555` sends a REQ to the connected REP client
|
||||
|
||||
the PUSH and REQ examples have `t3` (timeout after 3 seconds) because they block if there's no clients to talk to
|
||||
|
||||
* the REQ example does `t3,j` to send extended upload-info as json instead of just the filesystem-path
|
||||
|
||||
see [zmq-recv.py](https://github.com/9001/copyparty/blob/hovudstraum/bin/zmq-recv.py) if you need something to receive the messages with
|
||||
|
||||
|
||||
### upload events
|
||||
|
||||
@@ -1339,18 +1519,59 @@ redefine behavior with plugins ([examples](./bin/handlers/))
|
||||
replace 404 and 403 errors with something completely different (that's it for now)
|
||||
|
||||
|
||||
## ip auth
|
||||
|
||||
autologin based on IP range (CIDR) , using the global-option `--ipu`
|
||||
|
||||
for example, if everyone with an IP that starts with `192.168.123` should automatically log in as the user `spartacus`, then you can either specify `--ipu=192.168.123.0/24=spartacus` as a commandline option, or put this in a config file:
|
||||
|
||||
```yaml
|
||||
[global]
|
||||
ipu: 192.168.123.0/24=spartacus
|
||||
```
|
||||
|
||||
repeat the option to map additional subnets
|
||||
|
||||
**be careful with this one!** if you have a reverseproxy, then you definitely want to make sure you have [real-ip](#real-ip) configured correctly, and it's probably a good idea to nullmap the reverseproxy's IP just in case; so if your reverseproxy is sending requests from `172.24.27.9` then that would be `--ipu=172.24.27.9/32=`
|
||||
|
||||
|
||||
## identity providers
|
||||
|
||||
replace copyparty passwords with oauth and such
|
||||
|
||||
you can disable the built-in password-based login sysem, and instead replace it with a separate piece of software (an identity provider) which will then handle authenticating / authorizing of users; this makes it possible to login with passkeys / fido2 / webauthn / yubikey / ldap / active directory / oauth / many other single-sign-on contraptions
|
||||
you can disable the built-in password-based login system, and instead replace it with a separate piece of software (an identity provider) which will then handle authenticating / authorizing of users; this makes it possible to login with passkeys / fido2 / webauthn / yubikey / ldap / active directory / oauth / many other single-sign-on contraptions
|
||||
|
||||
a popular choice is [Authelia](https://www.authelia.com/) (config-file based), another one is [authentik](https://goauthentik.io/) (GUI-based, more complex)
|
||||
* the regular config-defined users will be used as a fallback for requests which don't include a valid (trusted) IdP username header
|
||||
|
||||
some popular identity providers are [Authelia](https://www.authelia.com/) (config-file based) and [authentik](https://goauthentik.io/) (GUI-based, more complex)
|
||||
|
||||
there is a [docker-compose example](./docs/examples/docker/idp-authelia-traefik) which is hopefully a good starting point (alternatively see [./docs/idp.md](./docs/idp.md) if you're the DIY type)
|
||||
|
||||
a more complete example of the copyparty configuration options [look like this](./docs/examples/docker/idp/copyparty.conf)
|
||||
|
||||
but if you just want to let users change their own passwords, then you probably want [user-changeable passwords](#user-changeable-passwords) instead
|
||||
|
||||
|
||||
## user-changeable passwords
|
||||
|
||||
if permitted, users can change their own passwords in the control-panel
|
||||
|
||||
* not compatible with [identity providers](#identity-providers)
|
||||
|
||||
* must be enabled with `--chpw` because account-sharing is a popular usecase
|
||||
|
||||
* if you want to enable the feature but deny password-changing for a specific list of accounts, you can do that with `--chpw-no name1,name2,name3,...`
|
||||
|
||||
* to perform a password reset, edit the server config and give the user another password there, then do a [config reload](#server-config) or server restart
|
||||
|
||||
* the custom passwords are kept in a textfile at filesystem-path `--chpw-db`, by default `chpw.json` in the copyparty config folder
|
||||
|
||||
* if you run multiple copyparty instances with different users you *almost definitely* want to specify separate DBs for each instance
|
||||
|
||||
* if [password hashing](#password-hashing) is enabled, the passwords in the db are also hashed
|
||||
|
||||
* ...which means that all user-defined passwords will be forgotten if you change password-hashing settings
|
||||
|
||||
|
||||
## using the cloud as storage
|
||||
|
||||
@@ -1358,16 +1579,20 @@ connecting to an aws s3 bucket and similar
|
||||
|
||||
there is no built-in support for this, but you can use FUSE-software such as [rclone](https://rclone.org/) / [geesefs](https://github.com/yandex-cloud/geesefs) / [JuiceFS](https://juicefs.com/en/) to first mount your cloud storage as a local disk, and then let copyparty use (a folder in) that disk as a volume
|
||||
|
||||
you may experience poor upload performance this way, but that can sometimes be fixed by specifying the volflag `sparse` to force the use of sparse files; this has improved the upload speeds from `1.5 MiB/s` to over `80 MiB/s` in one case, but note that you are also more likely to discover funny bugs in your FUSE software this way, so buckle up
|
||||
you will probably get decent speeds with the default config, however most likely restricted to using one TCP connection per file, so the upload-client won't be able to send multiple chunks in parallel
|
||||
|
||||
> before [v1.13.5](https://github.com/9001/copyparty/releases/tag/v1.13.5) it was recommended to use the volflag `sparse` to force-allow multiple chunks in parallel; this would improve the upload-speed from `1.5 MiB/s` to over `80 MiB/s` at the risk of provoking latent bugs in S3 or JuiceFS. But v1.13.5 added chunk-stitching, so this is now probably much less important. On the contrary, `nosparse` *may* now increase performance in some cases. Please try all three options (default, `sparse`, `nosparse`) as the optimal choice depends on your network conditions and software stack (both the FUSE-driver and cloud-server)
|
||||
|
||||
someone has also tested geesefs in combination with [gocryptfs](https://nuetzlich.net/gocryptfs/) with surprisingly good results, getting 60 MiB/s upload speeds on a gbit line, but JuiceFS won with 80 MiB/s using its built-in encryption
|
||||
|
||||
you may improve performance by specifying larger values for `--iobuf` / `--s-rd-sz` / `--s-wr-sz`
|
||||
|
||||
> if you've experimented with this and made interesting observations, please share your findings so we can add a section with specific recommendations :-)
|
||||
|
||||
|
||||
## hiding from google
|
||||
|
||||
tell search engines you dont wanna be indexed, either using the good old [robots.txt](https://www.robotstxt.org/robotstxt.html) or through copyparty settings:
|
||||
tell search engines you don't wanna be indexed, either using the good old [robots.txt](https://www.robotstxt.org/robotstxt.html) or through copyparty settings:
|
||||
|
||||
* `--no-robots` adds HTTP (`X-Robots-Tag`) and HTML (`<meta>`) headers with `noindex, nofollow` globally
|
||||
* volflag `[...]:c,norobots` does the same thing for that single volume
|
||||
@@ -1442,6 +1667,33 @@ if you want to change the fonts, see [./docs/rice/](./docs/rice/)
|
||||
`-lo log/cpp-%Y-%m%d-%H%M%S.txt.xz`
|
||||
|
||||
|
||||
## listen on port 80 and 443
|
||||
|
||||
become a *real* webserver which people can access by just going to your IP or domain without specifying a port
|
||||
|
||||
**if you're on windows,** then you just need to add the commandline argument `-p 80,443` and you're done! nice
|
||||
|
||||
**if you're on macos,** sorry, I don't know
|
||||
|
||||
**if you're on Linux,** you have the following 4 options:
|
||||
|
||||
* **option 1:** set up a [reverse-proxy](#reverse-proxy) -- this one makes a lot of sense if you're running on a proper headless server, because that way you get real HTTPS too
|
||||
|
||||
* **option 2:** NAT to port 3923 -- this is cumbersome since you'll need to do it every time you reboot, and the exact command may depend on your linux distribution:
|
||||
```bash
|
||||
iptables -t nat -A PREROUTING -p tcp --dport 80 -j REDIRECT --to-port 3923
|
||||
iptables -t nat -A PREROUTING -p tcp --dport 443 -j REDIRECT --to-port 3923
|
||||
```
|
||||
|
||||
* **option 3:** disable the [security policy](https://www.w3.org/Daemon/User/Installation/PrivilegedPorts.html) which prevents the use of 80 and 443; this is *probably* fine:
|
||||
```
|
||||
setcap CAP_NET_BIND_SERVICE=+eip $(realpath $(which python))
|
||||
python copyparty-sfx.py -p 80,443
|
||||
```
|
||||
|
||||
* **option 4:** run copyparty as root (please don't)
|
||||
|
||||
|
||||
## reverse-proxy
|
||||
|
||||
running copyparty next to other websites hosted on an existing webserver such as nginx, caddy, or apache
|
||||
@@ -1451,14 +1703,24 @@ you can either:
|
||||
* or do location-based proxying, using `--rp-loc=/stuff` to tell copyparty where it is mounted -- has a slight performance cost and higher chance of bugs
|
||||
* if copyparty says `incorrect --rp-loc or webserver config; expected vpath starting with [...]` it's likely because the webserver is stripping away the proxy location from the request URLs -- see the `ProxyPass` in the apache example below
|
||||
|
||||
when running behind a reverse-proxy (this includes services like cloudflare), it is important to configure real-ip correctly, as many features rely on knowing the client's IP. Look out for red and yellow log messages which explain how to do this. But basically, set `--xff-hdr` to the name of the http header to read the IP from (usually `x-forwarded-for`, but cloudflare uses `cf-connecting-ip`), and then `--xff-src` to the IP of the reverse-proxy so copyparty will trust the xff-hdr. Note that `--rp-loc` in particular will not work at all unless you do this
|
||||
|
||||
some reverse proxies (such as [Caddy](https://caddyserver.com/)) can automatically obtain a valid https/tls certificate for you, and some support HTTP/2 and QUIC which *could* be a nice speed boost, depending on a lot of factors
|
||||
* **warning:** nginx-QUIC (HTTP/3) is still experimental and can make uploads much slower, so HTTP/1.1 is recommended for now
|
||||
* depending on server/client, HTTP/1.1 can also be 5x faster than HTTP/2
|
||||
|
||||
example webserver configs:
|
||||
for improved security (and a 10% performance boost) consider listening on a unix-socket with `-i unix:770:www:/tmp/party.sock` (permission `770` means only members of group `www` can access it)
|
||||
|
||||
* [nginx config](contrib/nginx/copyparty.conf) -- entire domain/subdomain
|
||||
* [apache2 config](contrib/apache/copyparty.conf) -- location-based
|
||||
example webserver / reverse-proxy configs:
|
||||
|
||||
* [apache config](contrib/apache/copyparty.conf)
|
||||
* caddy uds: `caddy reverse-proxy --from :8080 --to unix///dev/shm/party.sock`
|
||||
* caddy tcp: `caddy reverse-proxy --from :8081 --to http://127.0.0.1:3923`
|
||||
* [haproxy config](contrib/haproxy/copyparty.conf)
|
||||
* [lighttpd subdomain](contrib/lighttpd/subdomain.conf) -- entire domain/subdomain
|
||||
* [lighttpd subpath](contrib/lighttpd/subpath.conf) -- location-based (not optimal, but in case you need it)
|
||||
* [nginx config](contrib/nginx/copyparty.conf) -- recommended
|
||||
* [traefik config](contrib/traefik/copyparty.yaml)
|
||||
|
||||
|
||||
### real-ip
|
||||
@@ -1470,6 +1732,38 @@ if you (and maybe everybody else) keep getting a message that says `thank you fo
|
||||
for most common setups, there should be a helpful message in the server-log explaining what to do, but see [docs/xff.md](docs/xff.md) if you want to learn more, including a quick hack to **just make it work** (which is **not** recommended, but hey...)
|
||||
|
||||
|
||||
### reverse-proxy performance
|
||||
|
||||
most reverse-proxies support connecting to copyparty either using uds/unix-sockets (`/dev/shm/party.sock`, faster/recommended) or using tcp (`127.0.0.1`)
|
||||
|
||||
with copyparty listening on a uds / unix-socket / unix-domain-socket and the reverse-proxy connecting to that:
|
||||
|
||||
| index.html | upload | download | software |
|
||||
| ------------ | ----------- | ----------- | -------- |
|
||||
| 28'900 req/s | 6'900 MiB/s | 7'400 MiB/s | no-proxy |
|
||||
| 18'750 req/s | 3'500 MiB/s | 2'370 MiB/s | haproxy |
|
||||
| 9'900 req/s | 3'750 MiB/s | 2'200 MiB/s | caddy |
|
||||
| 18'700 req/s | 2'200 MiB/s | 1'570 MiB/s | nginx |
|
||||
| 9'700 req/s | 1'750 MiB/s | 1'830 MiB/s | apache |
|
||||
| 9'900 req/s | 1'300 MiB/s | 1'470 MiB/s | lighttpd |
|
||||
|
||||
when connecting the reverse-proxy to `127.0.0.1` instead (the basic and/or old-fasioned way), speeds are a bit worse:
|
||||
|
||||
| index.html | upload | download | software |
|
||||
| ------------ | ----------- | ----------- | -------- |
|
||||
| 21'200 req/s | 5'700 MiB/s | 6'700 MiB/s | no-proxy |
|
||||
| 14'500 req/s | 1'700 MiB/s | 2'170 MiB/s | haproxy |
|
||||
| 11'100 req/s | 2'750 MiB/s | 2'000 MiB/s | traefik |
|
||||
| 8'400 req/s | 2'300 MiB/s | 1'950 MiB/s | caddy |
|
||||
| 13'400 req/s | 1'100 MiB/s | 1'480 MiB/s | nginx |
|
||||
| 8'400 req/s | 1'000 MiB/s | 1'000 MiB/s | apache |
|
||||
| 6'500 req/s | 1'270 MiB/s | 1'500 MiB/s | lighttpd |
|
||||
|
||||
in summary, `haproxy > caddy > traefik > nginx > apache > lighttpd`, and use uds when possible (traefik does not support it yet)
|
||||
|
||||
* if these results are bullshit because my config exampels are bad, please submit corrections!
|
||||
|
||||
|
||||
## prometheus
|
||||
|
||||
metrics/stats can be enabled at URL `/.cpr/metrics` for grafana / prometheus / etc (openmetrics 1.0.0)
|
||||
@@ -1493,6 +1787,7 @@ scrape_configs:
|
||||
currently the following metrics are available,
|
||||
* `cpp_uptime_seconds` time since last copyparty restart
|
||||
* `cpp_boot_unixtime_seconds` same but as an absolute timestamp
|
||||
* `cpp_active_dl` number of active downloads
|
||||
* `cpp_http_conns` number of open http(s) connections
|
||||
* `cpp_http_reqs` number of http(s) requests handled
|
||||
* `cpp_sus_reqs` number of 403/422/malicious requests
|
||||
@@ -1555,6 +1850,23 @@ in a config-file, this is the same as:
|
||||
run copyparty with `--mimes` to list all the default mappings
|
||||
|
||||
|
||||
### feature chickenbits
|
||||
|
||||
buggy feature? rip it out by setting any of the following environment variables to disable its associated bell or whistle,
|
||||
|
||||
| env-var | what it does |
|
||||
| -------------------- | ------------ |
|
||||
| `PRTY_NO_IFADDR` | disable ip/nic discovery by poking into your OS with ctypes |
|
||||
| `PRTY_NO_IPV6` | disable some ipv6 support (should not be necessary since windows 2000) |
|
||||
| `PRTY_NO_LZMA` | disable streaming xz compression of incoming uploads |
|
||||
| `PRTY_NO_MP` | disable all use of the python `multiprocessing` module (actual multithreading, cpu-count for parsers/thumbnailers) |
|
||||
| `PRTY_NO_SQLITE` | disable all database-related functionality (file indexing, metadata indexing, most file deduplication logic) |
|
||||
| `PRTY_NO_TLS` | disable native HTTPS support; if you still want to accept HTTPS connections then TLS must now be terminated by a reverse-proxy |
|
||||
| `PRTY_NO_TPOKE` | disable systemd-tmpfilesd avoider |
|
||||
|
||||
example: `PRTY_NO_IFADDR=1 python3 copyparty-sfx.py`
|
||||
|
||||
|
||||
# packages
|
||||
|
||||
the party might be closer than you think
|
||||
@@ -1725,6 +2037,9 @@ quick summary of more eccentric web-browsers trying to view a directory index:
|
||||
| **ie4** and **netscape** 4.0 | can browse, upload with `?b=u`, auth with `&pw=wark` |
|
||||
| **ncsa mosaic** 2.7 | does not get a pass, [pic1](https://user-images.githubusercontent.com/241032/174189227-ae816026-cf6f-4be5-a26e-1b3b072c1b2f.png) - [pic2](https://user-images.githubusercontent.com/241032/174189225-5651c059-5152-46e9-ac26-7e98e497901b.png) |
|
||||
| **SerenityOS** (7e98457) | hits a page fault, works with `?b=u`, file upload not-impl |
|
||||
| **nintendo 3ds** | can browse, upload, view thumbnails (thx bnjmn) |
|
||||
|
||||
<p align="center"><img src="https://github.com/user-attachments/assets/88deab3d-6cad-4017-8841-2f041472b853" /></p>
|
||||
|
||||
|
||||
# client examples
|
||||
@@ -1758,16 +2073,19 @@ interact with copyparty using non-browser clients
|
||||
|
||||
* FUSE: mount a copyparty server as a local filesystem
|
||||
* cross-platform python client available in [./bin/](bin/)
|
||||
* able to mount nginx and iis directory listings too, not just copyparty
|
||||
* can be downloaded from copyparty: controlpanel -> connect -> [partyfuse.py](http://127.0.0.1:3923/.cpr/a/partyfuse.py)
|
||||
* [rclone](https://rclone.org/) as client can give ~5x performance, see [./docs/rclone.md](docs/rclone.md)
|
||||
|
||||
* sharex (screenshot utility): see [./contrib/sharex.sxcu](contrib/#sharexsxcu)
|
||||
* sharex (screenshot utility): see [./contrib/sharex.sxcu](./contrib/#sharexsxcu)
|
||||
* and for screenshots on macos, see [./contrib/ishare.iscu](./contrib/#ishareiscu)
|
||||
* and for screenshots on linux, see [./contrib/flameshot.sh](./contrib/flameshot.sh)
|
||||
|
||||
* contextlet (web browser integration); see [contrib contextlet](contrib/#send-to-cppcontextletjson)
|
||||
|
||||
* [igloo irc](https://iglooirc.com/): Method: `post` Host: `https://you.com/up/?want=url&pw=hunter2` Multipart: `yes` File parameter: `f`
|
||||
|
||||
copyparty returns a truncated sha512sum of your PUT/POST as base64; you can generate the same checksum locally to verify uplaods:
|
||||
copyparty returns a truncated sha512sum of your PUT/POST as base64; you can generate the same checksum locally to verify uploads:
|
||||
|
||||
b512(){ printf "$((sha512sum||shasum -a512)|sed -E 's/ .*//;s/(..)/\\x\1/g')"|base64|tr '+/' '-_'|head -c44;}
|
||||
b512 <movie.mkv
|
||||
@@ -1796,9 +2114,9 @@ alternatively, some alternatives roughly sorted by speed (unreproducible benchma
|
||||
|
||||
* [rclone-webdav](./docs/rclone.md) (25s), read/WRITE (rclone v1.63 or later)
|
||||
* [rclone-http](./docs/rclone.md) (26s), read-only
|
||||
* [partyfuse.py](./bin/#partyfusepy) (35s), read-only
|
||||
* [partyfuse.py](./bin/#partyfusepy) (26s), read-only
|
||||
* [rclone-ftp](./docs/rclone.md) (47s), read/WRITE
|
||||
* davfs2 (103s), read/WRITE, *very fast* on small files
|
||||
* davfs2 (103s), read/WRITE
|
||||
* [win10-webdav](#webdav-server) (138s), read/WRITE
|
||||
* [win10-smb2](#smb-server) (387s), read/WRITE
|
||||
|
||||
@@ -1838,6 +2156,9 @@ below are some tweaks roughly ordered by usefulness:
|
||||
* `-q` disables logging and can help a bunch, even when combined with `-lo` to redirect logs to file
|
||||
* `--hist` pointing to a fast location (ssd) will make directory listings and searches faster when `-e2d` or `-e2t` is set
|
||||
* and also makes thumbnails load faster, regardless of e2d/e2t
|
||||
* `--dedup` enables deduplication and thus avoids writing to the HDD if someone uploads a dupe
|
||||
* `--safe-dedup 1` makes deduplication much faster during upload by skipping verification of file contents; safe if there is no other software editing/moving the files in the volumes
|
||||
* `--no-dirsz` shows the size of folder inodes instead of the total size of the contents, giving about 30% faster folder listings
|
||||
* `--no-hash .` when indexing a network-disk if you don't care about the actual filehashes and only want the names/tags searchable
|
||||
* if your volumes are on a network-disk such as NFS / SMB / s3, specifying larger values for `--iobuf` and/or `--s-rd-sz` and/or `--s-wr-sz` may help; try setting all of them to `524288` or `1048576` or `4194304`
|
||||
* `--no-htp --hash-mt=0 --mtag-mt=1 --th-mt=1` minimizes the number of threads; can help in some eccentric environments (like the vscode debugger)
|
||||
@@ -1864,7 +2185,7 @@ when uploading files,
|
||||
* up to 30% faster uploads if you hide the upload status list by switching away from the `[🚀]` up2k ui-tab (or closing it)
|
||||
* optionally you can switch to the lightweight potato ui by clicking the `[🥔]`
|
||||
* switching to another browser-tab also works, the favicon will update every 10 seconds in that case
|
||||
* unlikely to be a problem, but can happen when uploding many small files, or your internet is too fast, or PC too slow
|
||||
* unlikely to be a problem, but can happen when uploading many small files, or your internet is too fast, or PC too slow
|
||||
|
||||
|
||||
# security
|
||||
@@ -1877,6 +2198,7 @@ some notes on hardening
|
||||
* cors doesn't work right otherwise
|
||||
* if you allow anonymous uploads or otherwise don't trust the contents of a volume, you can prevent XSS with volflag `nohtml`
|
||||
* this returns html documents as plaintext, and also disables markdown rendering
|
||||
* when running behind a reverse-proxy, listen on a unix-socket for tighter access control (and more performance); see [reverse-proxy](#reverse-proxy) or `--help-bind`
|
||||
|
||||
safety profiles:
|
||||
|
||||
@@ -1891,7 +2213,7 @@ safety profiles:
|
||||
* `--hardlink` creates hardlinks instead of symlinks when deduplicating uploads, which is less maintenance
|
||||
* however note if you edit one file it will also affect the other copies
|
||||
* `--vague-403` returns a "404 not found" instead of "401 unauthorized" which is a common enterprise meme
|
||||
* `--nih` removes the server hostname from directory listings
|
||||
* `-nih` removes the server hostname from directory listings
|
||||
|
||||
* option `-sss` is a shortcut for the above plus:
|
||||
* `--no-dav` disables webdav support
|
||||
@@ -1911,7 +2233,7 @@ other misc notes:
|
||||
|
||||
behavior that might be unexpected
|
||||
|
||||
* users without read-access to a folder can still see the `.prologue.html` / `.epilogue.html` / `README.md` contents, for the purpose of showing a description on how to use the uploader for example
|
||||
* users without read-access to a folder can still see the `.prologue.html` / `.epilogue.html` / `PREADME.md` / `README.md` contents, for the purpose of showing a description on how to use the uploader for example
|
||||
* users can submit `<script>`s which autorun (in a sandbox) for other visitors in a few ways;
|
||||
* uploading a `README.md` -- avoid with `--no-readme`
|
||||
* renaming `some.html` to `.epilogue.html` -- avoid with either `--no-logues` or `--no-dot-ren`
|
||||
@@ -1954,6 +2276,8 @@ volflag `dky` disables the actual key-check, meaning anyone can see the contents
|
||||
|
||||
volflag `dks` lets people enter subfolders as well, and also enables download-as-zip/tar
|
||||
|
||||
if you enable dirkeys, it is probably a good idea to enable filekeys too, otherwise it will be impossible to hotlink files from a folder which was accessed using a dirkey
|
||||
|
||||
dirkeys are generated based on another salt (`--dk-salt`) + filesystem-path and have a few limitations:
|
||||
* the key does not change if the contents of the folder is modified
|
||||
* if you need a new dirkey, either change the salt or rename the folder
|
||||
@@ -1987,13 +2311,13 @@ if [cfssl](https://github.com/cloudflare/cfssl/releases/latest) is installed, co
|
||||
|
||||
## client crashes
|
||||
|
||||
### frefox wsod
|
||||
### firefox wsod
|
||||
|
||||
firefox 87 can crash during uploads -- the entire browser goes, including all other browser tabs, everything turns white
|
||||
|
||||
however you can hit `F12` in the up2k tab and use the devtools to see how far you got in the uploads:
|
||||
|
||||
* get a complete list of all uploads, organized by statuts (ok / no-good / busy / queued):
|
||||
* get a complete list of all uploads, organized by status (ok / no-good / busy / queued):
|
||||
`var tabs = { ok:[], ng:[], bz:[], q:[] }; for (var a of up2k.ui.tab) tabs[a.in].push(a); tabs`
|
||||
|
||||
* list of filenames which failed:
|
||||
@@ -2018,13 +2342,13 @@ mandatory deps:
|
||||
|
||||
install these to enable bonus features
|
||||
|
||||
enable hashed passwords in config: `argon2-cffi`
|
||||
enable [hashed passwords](#password-hashing) in config: `argon2-cffi`
|
||||
|
||||
enable ftp-server:
|
||||
enable [ftp-server](#ftp-server):
|
||||
* for just plaintext FTP, `pyftpdlib` (is built into the SFX)
|
||||
* with TLS encryption, `pyftpdlib pyopenssl`
|
||||
|
||||
enable music tags:
|
||||
enable [music tags](#metadata-from-audio-files):
|
||||
* either `mutagen` (fast, pure-python, skips a few tags, makes copyparty GPL? idk)
|
||||
* or `ffprobe` (20x slower, more accurate, possibly dangerous depending on your distro and users)
|
||||
|
||||
@@ -2035,12 +2359,44 @@ enable [thumbnails](#thumbnails) of...
|
||||
* **AVIF pictures:** `pyvips` or `ffmpeg` or `pillow-avif-plugin`
|
||||
* **JPEG XL pictures:** `pyvips` or `ffmpeg`
|
||||
|
||||
enable [smb](#smb-server) support (**not** recommended):
|
||||
* `impacket==0.11.0`
|
||||
enable sending [zeromq messages](#zeromq) from event-hooks: `pyzmq`
|
||||
|
||||
enable [smb](#smb-server) support (**not** recommended): `impacket==0.12.0`
|
||||
|
||||
`pyvips` gives higher quality thumbnails than `Pillow` and is 320% faster, using 270% more ram: `sudo apt install libvips42 && python3 -m pip install --user -U pyvips`
|
||||
|
||||
|
||||
### dependency chickenbits
|
||||
|
||||
prevent loading an optional dependency , for example if:
|
||||
|
||||
* you have an incompatible version installed and it causes problems
|
||||
* you just don't want copyparty to use it, maybe to save ram
|
||||
|
||||
set any of the following environment variables to disable its associated optional feature,
|
||||
|
||||
| env-var | what it does |
|
||||
| -------------------- | ------------ |
|
||||
| `PRTY_NO_ARGON2` | disable argon2-cffi password hashing |
|
||||
| `PRTY_NO_CFSSL` | never attempt to generate self-signed certificates using [cfssl](https://github.com/cloudflare/cfssl) |
|
||||
| `PRTY_NO_FFMPEG` | **audio transcoding** goes byebye, **thumbnailing** must be handled by Pillow/libvips |
|
||||
| `PRTY_NO_FFPROBE` | **audio transcoding** goes byebye, **thumbnailing** must be handled by Pillow/libvips, **metadata-scanning** must be handled by mutagen |
|
||||
| `PRTY_NO_MUTAGEN` | do not use [mutagen](https://pypi.org/project/mutagen/) for reading metadata from media files; will fallback to ffprobe |
|
||||
| `PRTY_NO_PIL` | disable all [Pillow](https://pypi.org/project/pillow/)-based thumbnail support; will fallback to libvips or ffmpeg |
|
||||
| `PRTY_NO_PILF` | disable Pillow `ImageFont` text rendering, used for folder thumbnails |
|
||||
| `PRTY_NO_PIL_AVIF` | disable 3rd-party Pillow plugin for [AVIF support](https://pypi.org/project/pillow-avif-plugin/) |
|
||||
| `PRTY_NO_PIL_HEIF` | disable 3rd-party Pillow plugin for [HEIF support](https://pypi.org/project/pyheif-pillow-opener/) |
|
||||
| `PRTY_NO_PIL_WEBP` | disable use of native webp support in Pillow |
|
||||
| `PRTY_NO_PSUTIL` | do not use [psutil](https://pypi.org/project/psutil/) for reaping stuck hooks and plugins on Windows |
|
||||
| `PRTY_NO_VIPS` | disable all [libvips](https://pypi.org/project/pyvips/)-based thumbnail support; will fallback to Pillow or ffmpeg |
|
||||
|
||||
example: `PRTY_NO_PIL=1 python3 copyparty-sfx.py`
|
||||
|
||||
* `PRTY_NO_PIL` saves ram
|
||||
* `PRTY_NO_VIPS` saves ram and startup time
|
||||
* python2.7 on windows: `PRTY_NO_FFMPEG` + `PRTY_NO_FFPROBE` saves startup time
|
||||
|
||||
|
||||
## optional gpl stuff
|
||||
|
||||
some bundled tools have copyleft dependencies, see [./bin/#mtag](bin/#mtag)
|
||||
@@ -2079,7 +2435,7 @@ then again, if you are already into downloading shady binaries from the internet
|
||||
|
||||
## zipapp
|
||||
|
||||
another emergency alternative, [copyparty.pyz](https://github.com/9001/copyparty/releases/latest/download/copyparty.pyz) has less features, requires python 3.7 or newer, worse compression, and more importantly is unable to benefit from more recent versions of jinja2 and such (which makes it less secure)... lots of drawbacks with this one really -- but it *may* just work if the regular sfx fails to start because the computer is messed up in certain funky ways, so it's worth a shot if all else fails
|
||||
another emergency alternative, [copyparty.pyz](https://github.com/9001/copyparty/releases/latest/download/copyparty.pyz) has less features, is slow, requires python 3.7 or newer, worse compression, and more importantly is unable to benefit from more recent versions of jinja2 and such (which makes it less secure)... lots of drawbacks with this one really -- but it does not unpack any temporary files to disk, so it *may* just work if the regular sfx fails to start because the computer is messed up in certain funky ways, so it's worth a shot if all else fails
|
||||
|
||||
run it by doubleclicking it, or try typing `python copyparty.pyz` in your terminal/console/commandline/telex if that fails
|
||||
|
||||
|
||||
@@ -15,22 +15,18 @@ produces a chronological list of all uploads by collecting info from up2k databa
|
||||
# [`partyfuse.py`](partyfuse.py)
|
||||
* mount a copyparty server as a local filesystem (read-only)
|
||||
* **supports Windows!** -- expect `194 MiB/s` sequential read
|
||||
* **supports Linux** -- expect `117 MiB/s` sequential read
|
||||
* **supports Linux** -- expect `600 MiB/s` sequential read
|
||||
* **supports macos** -- expect `85 MiB/s` sequential read
|
||||
|
||||
filecache is default-on for windows and macos;
|
||||
* macos readsize is 64kB, so speed ~32 MiB/s without the cache
|
||||
* windows readsize varies by software; explorer=1M, pv=32k
|
||||
|
||||
note that copyparty should run with `-ed` to enable dotfiles (hidden otherwise)
|
||||
|
||||
also consider using [../docs/rclone.md](../docs/rclone.md) instead for 5x performance
|
||||
and consider using [../docs/rclone.md](../docs/rclone.md) instead; usually a bit faster, especially on windows
|
||||
|
||||
|
||||
## to run this on windows:
|
||||
* install [winfsp](https://github.com/billziss-gh/winfsp/releases/latest) and [python 3](https://www.python.org/downloads/)
|
||||
* [x] add python 3.x to PATH (it asks during install)
|
||||
* `python -m pip install --user fusepy`
|
||||
* `python -m pip install --user fusepy` (or grab a copy of `fuse.py` from the `connect` page on your copyparty, and keep it in the same folder)
|
||||
* `python ./partyfuse.py n: http://192.168.1.69:3923/`
|
||||
|
||||
10% faster in [msys2](https://www.msys2.org/), 700% faster if debug prints are enabled:
|
||||
|
||||
@@ -2,7 +2,7 @@ standalone programs which are executed by copyparty when an event happens (uploa
|
||||
|
||||
these programs either take zero arguments, or a filepath (the affected file), or a json message with filepath + additional info
|
||||
|
||||
run copyparty with `--help-hooks` for usage details / hook type explanations (xbu/xau/xiu/xbr/xar/xbd/xad)
|
||||
run copyparty with `--help-hooks` for usage details / hook type explanations (xm/xbu/xau/xiu/xbc/xac/xbr/xar/xbd/xad/xban)
|
||||
|
||||
> **note:** in addition to event hooks (the stuff described here), copyparty has another api to run your programs/scripts while providing way more information such as audio tags / video codecs / etc and optionally daisychaining data between scripts in a processing pipeline; if that's what you want then see [mtp plugins](../mtag/) instead
|
||||
|
||||
@@ -13,6 +13,7 @@ run copyparty with `--help-hooks` for usage details / hook type explanations (xb
|
||||
* [image-noexif.py](image-noexif.py) removes image exif by overwriting / directly editing the uploaded file
|
||||
* [discord-announce.py](discord-announce.py) announces new uploads on discord using webhooks ([example](https://user-images.githubusercontent.com/241032/215304439-1c1cb3c8-ec6f-4c17-9f27-81f969b1811a.png))
|
||||
* [reject-mimetype.py](reject-mimetype.py) rejects uploads unless the mimetype is acceptable
|
||||
* [into-the-cache-it-goes.py](into-the-cache-it-goes.py) avoids bugs in caching proxies by immediately downloading each file that is uploaded
|
||||
|
||||
|
||||
# upload batches
|
||||
@@ -23,9 +24,11 @@ these are `--xiu` hooks; unlike `xbu` and `xau` (which get executed on every sin
|
||||
|
||||
# before upload
|
||||
* [reject-extension.py](reject-extension.py) rejects uploads if they match a list of file extensions
|
||||
* [reloc-by-ext.py](reloc-by-ext.py) redirects an upload to another destination based on the file extension
|
||||
|
||||
|
||||
# on message
|
||||
* [wget.py](wget.py) lets you download files by POSTing URLs to copyparty
|
||||
* [qbittorrent-magnet.py](qbittorrent-magnet.py) starts downloading a torrent if you post a magnet url
|
||||
* [usb-eject.py](usb-eject.py) adds web-UI buttons to safe-remove usb flashdrives shared through copyparty
|
||||
* [msg-log.py](msg-log.py) is a guestbook; logs messages to a doc in the same folder
|
||||
|
||||
140
bin/hooks/into-the-cache-it-goes.py
Normal file
140
bin/hooks/into-the-cache-it-goes.py
Normal file
@@ -0,0 +1,140 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
import json
|
||||
import shutil
|
||||
import platform
|
||||
import subprocess as sp
|
||||
from urllib.parse import quote
|
||||
|
||||
|
||||
_ = r"""
|
||||
try to avoid race conditions in caching proxies
|
||||
(primarily cloudflare, but probably others too)
|
||||
by means of the most obvious solution possible:
|
||||
|
||||
just as each file has finished uploading, use
|
||||
the server's external URL to download the file
|
||||
so that it ends up in the cache, warm and snug
|
||||
|
||||
this intentionally delays the upload response
|
||||
as it waits for the file to finish downloading
|
||||
before copyparty is allowed to return the URL
|
||||
|
||||
NOTE: you must edit this script before use,
|
||||
replacing https://example.com with your URL
|
||||
|
||||
NOTE: if the files are only accessible with a
|
||||
password and/or filekey, you must also add
|
||||
a cromulent password in the PASSWORD field
|
||||
|
||||
NOTE: needs either wget, curl, or "requests":
|
||||
python3 -m pip install --user -U requests
|
||||
|
||||
|
||||
example usage as global config:
|
||||
--xau j,t10,bin/hooks/into-the-cache-it-goes.py
|
||||
|
||||
parameters explained,
|
||||
xau = execute after upload
|
||||
j = this hook needs upload information as json (not just the filename)
|
||||
t10 = abort download and continue if it takes longer than 10sec
|
||||
|
||||
example usage as a volflag (per-volume config):
|
||||
-v srv/inc:inc:r:rw,ed:c,xau=j,t10,bin/hooks/into-the-cache-it-goes.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
(share filesystem-path srv/inc as volume /inc,
|
||||
readable by everyone, read-write for user 'ed',
|
||||
running this plugin on all uploads with params explained above)
|
||||
|
||||
example usage as a volflag in a copyparty config file:
|
||||
[/inc]
|
||||
srv/inc
|
||||
accs:
|
||||
r: *
|
||||
rw: ed
|
||||
flags:
|
||||
xau: j,t10,bin/hooks/into-the-cache-it-goes.py
|
||||
"""
|
||||
|
||||
|
||||
# replace this with your site's external URL
|
||||
# (including the :portnumber if necessary)
|
||||
SITE_URL = "https://example.com"
|
||||
|
||||
# if downloading is protected by passwords or filekeys,
|
||||
# specify a valid password between the quotes below:
|
||||
PASSWORD = ""
|
||||
|
||||
# if file is larger than this, skip download
|
||||
MAX_MEGABYTES = 8
|
||||
|
||||
# =============== END OF CONFIG ===============
|
||||
|
||||
|
||||
WINDOWS = platform.system() == "Windows"
|
||||
|
||||
|
||||
def main():
|
||||
fun = download_with_python
|
||||
if shutil.which("curl"):
|
||||
fun = download_with_curl
|
||||
elif shutil.which("wget"):
|
||||
fun = download_with_wget
|
||||
|
||||
inf = json.loads(sys.argv[1])
|
||||
|
||||
if inf["sz"] > 1024 * 1024 * MAX_MEGABYTES:
|
||||
print("[into-the-cache] file is too large; will not download")
|
||||
return
|
||||
|
||||
file_url = "/"
|
||||
if inf["vp"]:
|
||||
file_url += inf["vp"] + "/"
|
||||
file_url += inf["ap"].replace("\\", "/").split("/")[-1]
|
||||
file_url = SITE_URL.rstrip("/") + quote(file_url, safe=b"/")
|
||||
|
||||
print("[into-the-cache] %s(%s)" % (fun.__name__, file_url))
|
||||
fun(file_url, PASSWORD.strip())
|
||||
|
||||
print("[into-the-cache] Download OK")
|
||||
|
||||
|
||||
def download_with_curl(url, pw):
|
||||
cmd = ["curl"]
|
||||
|
||||
if pw:
|
||||
cmd += ["-HPW:%s" % (pw,)]
|
||||
|
||||
nah = sp.DEVNULL
|
||||
sp.check_call(cmd + [url], stdout=nah, stderr=nah)
|
||||
|
||||
|
||||
def download_with_wget(url, pw):
|
||||
cmd = ["wget", "-O"]
|
||||
|
||||
cmd += ["nul" if WINDOWS else "/dev/null"]
|
||||
|
||||
if pw:
|
||||
cmd += ["--header=PW:%s" % (pw,)]
|
||||
|
||||
nah = sp.DEVNULL
|
||||
sp.check_call(cmd + [url], stdout=nah, stderr=nah)
|
||||
|
||||
|
||||
def download_with_python(url, pw):
|
||||
import requests
|
||||
|
||||
headers = {}
|
||||
if pw:
|
||||
headers["PW"] = pw
|
||||
|
||||
with requests.get(url, headers=headers, stream=True) as r:
|
||||
r.raise_for_status()
|
||||
for _ in r.iter_content(chunk_size=1024 * 256):
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -23,17 +23,18 @@ because the keyword "anime" is in the DESTS config below
|
||||
needs python3
|
||||
|
||||
example usage as global config (not a good idea):
|
||||
python copyparty-sfx.py --xm f,j,t60,bin/hooks/qbittorrent-magnet.py
|
||||
python copyparty-sfx.py --xm aw,f,j,t60,bin/hooks/qbittorrent-magnet.py
|
||||
|
||||
parameters explained,
|
||||
xm = execute on message (📟)
|
||||
aw = only users with write-access can use this
|
||||
f = fork; don't delay other hooks while this is running
|
||||
j = provide message information as json (not just the text)
|
||||
t60 = abort if qbittorrent has to think about it for more than 1 min
|
||||
|
||||
example usage as a volflag (per-volume config, much better):
|
||||
-v srv/qb:qb:A,ed:c,xm=f,j,t60,bin/hooks/qbittorrent-magnet.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
-v srv/qb:qb:A,ed:c,xm=aw,f,j,t60,bin/hooks/qbittorrent-magnet.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
(share filesystem-path srv/qb as volume /qb with Admin for user 'ed',
|
||||
running this plugin on all messages with the params explained above)
|
||||
@@ -44,7 +45,7 @@ example usage as a volflag in a copyparty config file:
|
||||
accs:
|
||||
A: ed
|
||||
flags:
|
||||
xm: f,j,t60,bin/hooks/qbittorrent-magnet.py
|
||||
xm: aw,f,j,t60,bin/hooks/qbittorrent-magnet.py
|
||||
|
||||
the volflag examples only kicks in if you send the torrent magnet
|
||||
while you're in the /qb folder (or any folder below there)
|
||||
|
||||
127
bin/hooks/reloc-by-ext.py
Normal file
127
bin/hooks/reloc-by-ext.py
Normal file
@@ -0,0 +1,127 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
|
||||
_ = r"""
|
||||
relocate/redirect incoming uploads according to file extension or name
|
||||
|
||||
example usage as global config:
|
||||
--xbu j,c1,bin/hooks/reloc-by-ext.py
|
||||
|
||||
parameters explained,
|
||||
xbu = execute before upload
|
||||
j = this hook needs upload information as json (not just the filename)
|
||||
c1 = this hook returns json on stdout, so tell copyparty to read that
|
||||
|
||||
example usage as a volflag (per-volume config):
|
||||
-v srv/inc:inc:r:rw,ed:c,xbu=j,c1,bin/hooks/reloc-by-ext.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
(share filesystem-path srv/inc as volume /inc,
|
||||
readable by everyone, read-write for user 'ed',
|
||||
running this plugin on all uploads with the params explained above)
|
||||
|
||||
example usage as a volflag in a copyparty config file:
|
||||
[/inc]
|
||||
srv/inc
|
||||
accs:
|
||||
r: *
|
||||
rw: ed
|
||||
flags:
|
||||
xbu: j,c1,bin/hooks/reloc-by-ext.py
|
||||
|
||||
note: this could also work as an xau hook (after-upload), but
|
||||
because it doesn't need to read the file contents its better
|
||||
as xbu (before-upload) since that's safer / less buggy,
|
||||
and only xbu works with up2k (dragdrop into browser)
|
||||
"""
|
||||
|
||||
|
||||
PICS = "avif bmp gif heic heif jpeg jpg jxl png psd qoi tga tif tiff webp"
|
||||
VIDS = "3gp asf avi flv mkv mov mp4 mpeg mpeg2 mpegts mpg mpg2 nut ogm ogv rm ts vob webm wmv"
|
||||
MUSIC = "aac aif aiff alac amr ape dfpwm flac m4a mp3 ogg opus ra tak tta wav wma wv"
|
||||
|
||||
|
||||
def main():
|
||||
inf = json.loads(sys.argv[1])
|
||||
vdir, fn = os.path.split(inf["vp"])
|
||||
|
||||
try:
|
||||
fn, ext = fn.rsplit(".", 1)
|
||||
except:
|
||||
# no file extension; pretend it's "bin"
|
||||
ext = "bin"
|
||||
|
||||
ext = ext.lower()
|
||||
|
||||
# this function must end by printing the action to perform;
|
||||
# that's handled by the print(json.dumps(... at the bottom
|
||||
#
|
||||
# the action can contain the following keys:
|
||||
# "vp" is the folder URL to move the upload to,
|
||||
# "ap" is the filesystem-path to move it to (but "vp" is safer),
|
||||
# "fn" overrides the final filename to use
|
||||
|
||||
##
|
||||
## some example actions to take; pick one by
|
||||
## selecting it inside the print at the end:
|
||||
##
|
||||
|
||||
# create a subfolder named after the filetype and move it into there
|
||||
into_subfolder = {"vp": ext}
|
||||
|
||||
# move it into a toplevel folder named after the filetype
|
||||
into_toplevel = {"vp": "/" + ext}
|
||||
|
||||
# move it into a filetype-named folder next to the target folder
|
||||
into_sibling = {"vp": "../" + ext}
|
||||
|
||||
# move images into "/just/pics", vids into "/just/vids",
|
||||
# music into "/just/tunes", and anything else as-is
|
||||
if ext in PICS.split():
|
||||
by_category = {"vp": "/just/pics"}
|
||||
elif ext in VIDS.split():
|
||||
by_category = {"vp": "/just/vids"}
|
||||
elif ext in MUSIC.split():
|
||||
by_category = {"vp": "/just/tunes"}
|
||||
else:
|
||||
by_category = {} # no action
|
||||
|
||||
# now choose the default effect to apply; can be any of these:
|
||||
# into_subfolder into_toplevel into_sibling by_category
|
||||
effect = {"vp": "/junk"}
|
||||
|
||||
##
|
||||
## but we can keep going, adding more speicifc rules
|
||||
## which can take precedence, replacing the fallback
|
||||
## effect we just specified:
|
||||
##
|
||||
|
||||
fn = fn.lower() # lowercase filename to make this easier
|
||||
|
||||
if "screenshot" in fn:
|
||||
effect = {"vp": "/ss"}
|
||||
if "mpv_" in fn:
|
||||
effect = {"vp": "/anishots"}
|
||||
elif "debian" in fn or "biebian" in fn:
|
||||
effect = {"vp": "/linux-ISOs"}
|
||||
elif re.search(r"ep(isode |\.)?[0-9]", fn):
|
||||
effect = {"vp": "/podcasts"}
|
||||
|
||||
# regex lets you grab a part of the matching
|
||||
# text and use that in the upload path:
|
||||
m = re.search(r"\b(op|ed)([^a-z]|$)", fn)
|
||||
if m:
|
||||
# the regex matched; use "anime-op" or "anime-ed"
|
||||
effect = {"vp": "/anime-" + m[1]}
|
||||
|
||||
# aaand DO IT
|
||||
print(json.dumps({"reloc": effect}))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
54
bin/hooks/usb-eject.js
Normal file
54
bin/hooks/usb-eject.js
Normal file
@@ -0,0 +1,54 @@
|
||||
// see usb-eject.py for usage
|
||||
|
||||
function usbclick() {
|
||||
QS('#treeul a[href="/usb/"]').click();
|
||||
}
|
||||
|
||||
function eject_cb() {
|
||||
var t = this.responseText;
|
||||
if (t.indexOf('can be safely unplugged') < 0 && t.indexOf('Device can be removed') < 0)
|
||||
return toast.err(30, 'usb eject failed:\n\n' + t);
|
||||
|
||||
toast.ok(5, esc(t.replace(/ - /g, '\n\n')));
|
||||
usbclick(); setTimeout(usbclick, 10);
|
||||
};
|
||||
|
||||
function add_eject_2(a) {
|
||||
var aw = a.getAttribute('href').split(/\//g);
|
||||
if (aw.length != 4 || aw[3])
|
||||
return;
|
||||
|
||||
var v = aw[2],
|
||||
k = 'umount_' + v;
|
||||
|
||||
qsr('#' + k);
|
||||
a.appendChild(mknod('span', k, '⏏'), a);
|
||||
|
||||
var o = ebi(k);
|
||||
o.style.cssText = 'position:absolute; right:1em; margin-top:-.2em; font-size:1.3em';
|
||||
o.onclick = function (e) {
|
||||
ev(e);
|
||||
var xhr = new XHR();
|
||||
xhr.open('POST', get_evpath(), true);
|
||||
xhr.setRequestHeader('Content-Type', 'application/x-www-form-urlencoded;charset=UTF-8');
|
||||
xhr.send('msg=' + uricom_enc(':usb-eject:' + v + ':'));
|
||||
xhr.onload = xhr.onerror = eject_cb;
|
||||
toast.inf(10, "ejecting " + v + "...");
|
||||
};
|
||||
};
|
||||
|
||||
function add_eject() {
|
||||
for (var a of QSA('#treeul a[href^="/usb/"]'))
|
||||
add_eject_2(a);
|
||||
};
|
||||
|
||||
(function() {
|
||||
var f0 = treectl.rendertree;
|
||||
treectl.rendertree = function (res, ts, top0, dst, rst) {
|
||||
var ret = f0(res, ts, top0, dst, rst);
|
||||
add_eject();
|
||||
return ret;
|
||||
};
|
||||
})();
|
||||
|
||||
setTimeout(add_eject, 50);
|
||||
49
bin/hooks/usb-eject.py
Normal file
49
bin/hooks/usb-eject.py
Normal file
@@ -0,0 +1,49 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import stat
|
||||
import subprocess as sp
|
||||
import sys
|
||||
|
||||
|
||||
"""
|
||||
if you've found yourself using copyparty to serve flashdrives on a LAN
|
||||
and your only wish is that the web-UI had a button to unmount / safely
|
||||
remove those flashdrives, then boy howdy are you in the right place :D
|
||||
|
||||
put usb-eject.js in the webroot (or somewhere else http-accessible)
|
||||
then run copyparty with these args:
|
||||
|
||||
-v /run/media/ed:/usb:A:c,hist=/tmp/junk
|
||||
--xm=c1,bin/hooks/usb-eject.py
|
||||
--js-browser=/usb-eject.js
|
||||
|
||||
which does the following respectively,
|
||||
|
||||
* share all of /run/media/ed as /usb with admin for everyone
|
||||
and put the histpath somewhere it won't cause trouble
|
||||
* run the usb-eject hook with stdout redirect to the web-ui
|
||||
* add the complementary usb-eject.js to the browser
|
||||
|
||||
"""
|
||||
|
||||
|
||||
def main():
|
||||
try:
|
||||
label = sys.argv[1].split(":usb-eject:")[1].split(":")[0]
|
||||
mp = "/run/media/ed/" + label
|
||||
# print("ejecting [%s]... " % (mp,), end="")
|
||||
mp = os.path.abspath(os.path.realpath(mp.encode("utf-8")))
|
||||
st = os.lstat(mp)
|
||||
if not stat.S_ISDIR(st.st_mode):
|
||||
raise Exception("not a regular directory")
|
||||
|
||||
cmd = [b"gio", b"mount", b"-e", mp]
|
||||
print(sp.check_output(cmd).decode("utf-8", "replace").strip())
|
||||
|
||||
except Exception as ex:
|
||||
print("unmount failed: %r" % (ex,))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -12,18 +12,19 @@ application/x-www-form-urlencoded (for example using the
|
||||
📟 message-to-server-log in the web-ui)
|
||||
|
||||
example usage as global config:
|
||||
--xm f,j,t3600,bin/hooks/wget.py
|
||||
--xm aw,f,j,t3600,bin/hooks/wget.py
|
||||
|
||||
parameters explained,
|
||||
xm = execute on message-to-server-log
|
||||
aw = only users with write-access can use this
|
||||
f = fork; don't delay other hooks while this is running
|
||||
j = provide message information as json (not just the text)
|
||||
c3 = mute all output
|
||||
t3600 = timeout and abort download after 1 hour
|
||||
|
||||
example usage as a volflag (per-volume config):
|
||||
-v srv/inc:inc:r:rw,ed:c,xm=f,j,t3600,bin/hooks/wget.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
-v srv/inc:inc:r:rw,ed:c,xm=aw,f,j,t3600,bin/hooks/wget.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
(share filesystem-path srv/inc as volume /inc,
|
||||
readable by everyone, read-write for user 'ed',
|
||||
@@ -36,7 +37,7 @@ example usage as a volflag in a copyparty config file:
|
||||
r: *
|
||||
rw: ed
|
||||
flags:
|
||||
xm: f,j,t3600,bin/hooks/wget.py
|
||||
xm: aw,f,j,t3600,bin/hooks/wget.py
|
||||
|
||||
the volflag examples only kicks in if you send the message
|
||||
while you're in the /inc folder (or any folder below there)
|
||||
|
||||
@@ -31,6 +31,9 @@ plugins in this section should only be used with appropriate precautions:
|
||||
* [very-bad-idea.py](./very-bad-idea.py) combined with [meadup.js](https://github.com/9001/copyparty/blob/hovudstraum/contrib/plugins/meadup.js) converts copyparty into a janky yet extremely flexible chromecast clone
|
||||
* also adds a virtual keyboard by @steinuil to the basic-upload tab for comfy couch crowd control
|
||||
* anything uploaded through the [android app](https://github.com/9001/party-up) (files or links) are executed on the server, meaning anyone can infect your PC with malware... so protect this with a password and keep it on a LAN!
|
||||
* [kamelåså](https://github.com/steinuil/kameloso) is a much better (and MUCH safer) alternative to this plugin
|
||||
* powered by [chicken-curry-banana-pineapple-peanut pizza](https://a.ocv.me/pub/g/i/2025/01/298437ce-8351-4c8c-861c-fa131d217999.jpg?cache) so you know it's good
|
||||
* and, unlike this plugin, kamelåså even has windows support (nice)
|
||||
|
||||
|
||||
# dependencies
|
||||
|
||||
@@ -6,6 +6,11 @@ WARNING -- DANGEROUS PLUGIN --
|
||||
running this plugin, they can execute malware on your machine
|
||||
so please keep this on a LAN and protect it with a password
|
||||
|
||||
here is a MUCH BETTER ALTERNATIVE (which also works on Windows):
|
||||
https://github.com/steinuil/kameloso
|
||||
|
||||
----------------------------------------------------------------------
|
||||
|
||||
use copyparty as a chromecast replacement:
|
||||
* post a URL and it will open in the default browser
|
||||
* upload a file and it will open in the default application
|
||||
|
||||
676
bin/partyfuse.py
676
bin/partyfuse.py
File diff suppressed because it is too large
Load Diff
905
bin/u2c.py
905
bin/u2c.py
File diff suppressed because it is too large
Load Diff
76
bin/zmq-recv.py
Executable file
76
bin/zmq-recv.py
Executable file
@@ -0,0 +1,76 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
import zmq
|
||||
|
||||
"""
|
||||
zmq-recv.py: demo zmq receiver
|
||||
2025-01-22, v1.0, ed <irc.rizon.net>, MIT-Licensed
|
||||
https://github.com/9001/copyparty/blob/hovudstraum/bin/zmq-recv.py
|
||||
|
||||
basic zmq-server to receive events from copyparty; try one of
|
||||
the below and then "send a message to serverlog" in the web-ui:
|
||||
|
||||
1) dumb fire-and-forget to any and all listeners;
|
||||
run this script with "sub" and run copyparty with this:
|
||||
--xm zmq:pub:tcp://*:5556
|
||||
|
||||
2) one lucky listener gets the message, blocks if no listeners:
|
||||
run this script with "pull" and run copyparty with this:
|
||||
--xm t3,zmq:push:tcp://*:5557
|
||||
|
||||
3) blocking syn/ack mode, client must ack each message;
|
||||
run this script with "rep" and run copyparty with this:
|
||||
--xm t3,zmq:req:tcp://localhost:5555
|
||||
|
||||
note: to conditionally block uploads based on message contents,
|
||||
use rep_server to answer with "return 1" and run copyparty with
|
||||
--xau t3,c,zmq:req:tcp://localhost:5555
|
||||
"""
|
||||
|
||||
|
||||
ctx = zmq.Context()
|
||||
|
||||
|
||||
def sub_server():
|
||||
# PUB/SUB allows any number of servers/clients, and
|
||||
# messages are fire-and-forget
|
||||
sck = ctx.socket(zmq.SUB)
|
||||
sck.connect("tcp://localhost:5556")
|
||||
sck.setsockopt_string(zmq.SUBSCRIBE, "")
|
||||
while True:
|
||||
print("copyparty says %r" % (sck.recv_string(),))
|
||||
|
||||
|
||||
def pull_server():
|
||||
# PUSH/PULL allows any number of servers/clients, and
|
||||
# each message is sent to a exactly one PULL client
|
||||
sck = ctx.socket(zmq.PULL)
|
||||
sck.connect("tcp://localhost:5557")
|
||||
while True:
|
||||
print("copyparty says %r" % (sck.recv_string(),))
|
||||
|
||||
|
||||
def rep_server():
|
||||
# REP/REQ is a server/client pair where each message must be
|
||||
# acked by the other before another message can be sent, so
|
||||
# copyparty will do a blocking-wait for the ack
|
||||
sck = ctx.socket(zmq.REP)
|
||||
sck.bind("tcp://*:5555")
|
||||
while True:
|
||||
print("copyparty says %r" % (sck.recv_string(),))
|
||||
reply = b"thx"
|
||||
# reply = b"return 1" # non-zero to block an upload
|
||||
sck.send(reply)
|
||||
|
||||
|
||||
mode = sys.argv[1].lower() if len(sys.argv) > 1 else ""
|
||||
|
||||
if mode == "sub":
|
||||
sub_server()
|
||||
elif mode == "pull":
|
||||
pull_server()
|
||||
elif mode == "rep":
|
||||
rep_server()
|
||||
else:
|
||||
print("specify mode as first argument: SUB | PULL | REP")
|
||||
@@ -12,13 +12,21 @@
|
||||
* assumes the webserver and copyparty is running on the same server/IP
|
||||
* modify `10.13.1.1` as necessary if you wish to support browsers without javascript
|
||||
|
||||
### [`sharex.sxcu`](sharex.sxcu)
|
||||
* sharex config file to upload screenshots and grab the URL
|
||||
### [`sharex.sxcu`](sharex.sxcu) - Windows screenshot uploader
|
||||
* [sharex](https://getsharex.com/) config file to upload screenshots and grab the URL
|
||||
* `RequestURL`: full URL to the target folder
|
||||
* `pw`: password (remove the `pw` line if anon-write)
|
||||
* the `act:bput` thing is optional since copyparty v1.9.29
|
||||
* using an older sharex version, maybe sharex v12.1.1 for example? dw fam i got your back 👉😎👉 [`sharex12.sxcu`](sharex12.sxcu)
|
||||
|
||||
### [`ishare.iscu`](ishare.iscu) - MacOS screenshot uploader
|
||||
* [ishare](https://isharemac.app/) config file to upload screenshots and grab the URL
|
||||
* `RequestURL`: full URL to the target folder
|
||||
* `pw`: password (remove the `pw` line if anon-write)
|
||||
|
||||
### [`flameshot.sh`](flameshot.sh) - Linux screenshot uploader
|
||||
* takes a screenshot with [flameshot](https://flameshot.org/) on Linux, uploads it, and writes the URL to clipboard
|
||||
|
||||
### [`send-to-cpp.contextlet.json`](send-to-cpp.contextlet.json)
|
||||
* browser integration, kind of? custom rightclick actions and stuff
|
||||
* rightclick a pic and send it to copyparty straight from your browser
|
||||
@@ -50,5 +58,10 @@ init-scripts to start copyparty as a service
|
||||
* [`openrc/copyparty`](openrc/copyparty)
|
||||
|
||||
# Reverse-proxy
|
||||
copyparty has basic support for running behind another webserver
|
||||
* [`nginx/copyparty.conf`](nginx/copyparty.conf)
|
||||
copyparty supports running behind another webserver
|
||||
* [`apache/copyparty.conf`](apache/copyparty.conf)
|
||||
* [`haproxy/copyparty.conf`](haproxy/copyparty.conf)
|
||||
* [`lighttpd/subdomain.conf`](lighttpd/subdomain.conf)
|
||||
* [`lighttpd/subpath.conf`](lighttpd/subpath.conf)
|
||||
* [`nginx/copyparty.conf`](nginx/copyparty.conf) -- recommended
|
||||
* [`traefik/copyparty.yaml`](traefik/copyparty.yaml)
|
||||
|
||||
@@ -1,14 +1,29 @@
|
||||
# when running copyparty behind a reverse proxy,
|
||||
# the following arguments are recommended:
|
||||
# if you would like to use unix-sockets (recommended),
|
||||
# you must run copyparty with one of the following:
|
||||
#
|
||||
# -i 127.0.0.1 only accept connections from nginx
|
||||
# -i unix:777:/dev/shm/party.sock
|
||||
# -i unix:777:/dev/shm/party.sock,127.0.0.1
|
||||
#
|
||||
# if you are doing location-based proxying (such as `/stuff` below)
|
||||
# you must run copyparty with --rp-loc=stuff
|
||||
#
|
||||
# on fedora/rhel, remember to setsebool -P httpd_can_network_connect 1
|
||||
|
||||
|
||||
LoadModule proxy_module modules/mod_proxy.so
|
||||
ProxyPass "/stuff" "http://127.0.0.1:3923/stuff"
|
||||
# do not specify ProxyPassReverse
|
||||
|
||||
RequestHeader set "X-Forwarded-Proto" expr=%{REQUEST_SCHEME}
|
||||
# NOTE: do not specify ProxyPassReverse
|
||||
|
||||
|
||||
##
|
||||
## then, enable one of the below:
|
||||
|
||||
# use subdomain proxying to unix-socket (best)
|
||||
ProxyPass "/" "unix:///dev/shm/party.sock|http://whatever/"
|
||||
|
||||
# use subdomain proxying to 127.0.0.1 (slower)
|
||||
#ProxyPass "/" "http://127.0.0.1:3923/"
|
||||
|
||||
# use subpath proxying to 127.0.0.1 (slow and maybe buggy)
|
||||
#ProxyPass "/stuff" "http://127.0.0.1:3923/stuff"
|
||||
|
||||
14
contrib/flameshot.sh
Executable file
14
contrib/flameshot.sh
Executable file
@@ -0,0 +1,14 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# take a screenshot with flameshot and send it to copyparty;
|
||||
# the image url will be placed on your clipboard
|
||||
|
||||
password=wark
|
||||
url=https://a.ocv.me/up/
|
||||
filename=$(date +%Y-%m%d-%H%M%S).png
|
||||
|
||||
flameshot gui -s -r |
|
||||
curl -T- $url$filename?pw=$password |
|
||||
tail -n 1 |
|
||||
xsel -ib
|
||||
24
contrib/haproxy/copyparty.conf
Normal file
24
contrib/haproxy/copyparty.conf
Normal file
@@ -0,0 +1,24 @@
|
||||
# this config is essentially two separate examples;
|
||||
#
|
||||
# foo1 connects to copyparty using tcp, and
|
||||
# foo2 uses unix-sockets for 27% higher performance
|
||||
#
|
||||
# to use foo2 you must run copyparty with one of the following:
|
||||
#
|
||||
# -i unix:777:/dev/shm/party.sock
|
||||
# -i unix:777:/dev/shm/party.sock,127.0.0.1
|
||||
|
||||
defaults
|
||||
mode http
|
||||
option forwardfor
|
||||
timeout connect 1s
|
||||
timeout client 610s
|
||||
timeout server 610s
|
||||
|
||||
listen foo1
|
||||
bind *:8081
|
||||
server srv1 127.0.0.1:3923 maxconn 512
|
||||
|
||||
listen foo2
|
||||
bind *:8082
|
||||
server srv1 /dev/shm/party.sock maxconn 512
|
||||
10
contrib/ishare.iscu
Normal file
10
contrib/ishare.iscu
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"Name": "copyparty",
|
||||
"RequestURL": "http://127.0.0.1:3923/screenshots/",
|
||||
"Headers": {
|
||||
"pw": "PUT_YOUR_PASSWORD_HERE_MY_DUDE",
|
||||
"accept": "json"
|
||||
},
|
||||
"FileFormName": "f",
|
||||
"ResponseURL": "{{fileurl}}"
|
||||
}
|
||||
24
contrib/lighttpd/subdomain.conf
Normal file
24
contrib/lighttpd/subdomain.conf
Normal file
@@ -0,0 +1,24 @@
|
||||
# example usage for benchmarking:
|
||||
#
|
||||
# taskset -c 1 lighttpd -Df ~/dev/copyparty/contrib/lighttpd/subdomain.conf
|
||||
#
|
||||
# lighttpd can connect to copyparty using either tcp (127.0.0.1)
|
||||
# or a unix-socket, but unix-sockets are 37% faster because
|
||||
# lighttpd doesn't reuse tcp connections, so we're doing unix-sockets
|
||||
#
|
||||
# this means we must run copyparty with one of the following:
|
||||
#
|
||||
# -i unix:777:/dev/shm/party.sock
|
||||
# -i unix:777:/dev/shm/party.sock,127.0.0.1
|
||||
#
|
||||
# on fedora/rhel, remember to setsebool -P httpd_can_network_connect 1
|
||||
|
||||
server.port = 80
|
||||
server.document-root = "/var/empty"
|
||||
server.upload-dirs = ( "/dev/shm", "/tmp" )
|
||||
server.modules = ( "mod_proxy" )
|
||||
proxy.forwarded = ( "for" => 1, "proto" => 1 )
|
||||
proxy.server = ( "" => ( ( "host" => "/dev/shm/party.sock" ) ) )
|
||||
|
||||
# if you really need to use tcp instead of unix-sockets, do this instead:
|
||||
#proxy.server = ( "" => ( ( "host" => "127.0.0.1", "port" => "3923" ) ) )
|
||||
31
contrib/lighttpd/subpath.conf
Normal file
31
contrib/lighttpd/subpath.conf
Normal file
@@ -0,0 +1,31 @@
|
||||
# example usage for benchmarking:
|
||||
#
|
||||
# taskset -c 1 lighttpd -Df ~/dev/copyparty/contrib/lighttpd/subpath.conf
|
||||
#
|
||||
# lighttpd can connect to copyparty using either tcp (127.0.0.1)
|
||||
# or a unix-socket, but unix-sockets are 37% faster because
|
||||
# lighttpd doesn't reuse tcp connections, so we're doing unix-sockets
|
||||
#
|
||||
# this means we must run copyparty with one of the following:
|
||||
#
|
||||
# -i unix:777:/dev/shm/party.sock
|
||||
# -i unix:777:/dev/shm/party.sock,127.0.0.1
|
||||
#
|
||||
# also since this example proxies a subpath instead of the
|
||||
# recommended subdomain-proxying, we must also specify this:
|
||||
#
|
||||
# --rp-loc files
|
||||
#
|
||||
# on fedora/rhel, remember to setsebool -P httpd_can_network_connect 1
|
||||
|
||||
server.port = 80
|
||||
server.document-root = "/var/empty"
|
||||
server.upload-dirs = ( "/dev/shm", "/tmp" )
|
||||
server.modules = ( "mod_proxy" )
|
||||
$HTTP["url"] =~ "^/files" {
|
||||
proxy.forwarded = ( "for" => 1, "proto" => 1 )
|
||||
proxy.server = ( "" => ( ( "host" => "/dev/shm/party.sock" ) ) )
|
||||
|
||||
# if you really need to use tcp instead of unix-sockets, do this instead:
|
||||
#proxy.server = ( "" => ( ( "host" => "127.0.0.1", "port" => "3923" ) ) )
|
||||
}
|
||||
@@ -1,14 +1,10 @@
|
||||
# when running copyparty behind a reverse proxy,
|
||||
# the following arguments are recommended:
|
||||
#
|
||||
# -i 127.0.0.1 only accept connections from nginx
|
||||
#
|
||||
# -nc must match or exceed the webserver's max number of concurrent clients;
|
||||
# copyparty default is 1024 if OS permits it (see "max clients:" on startup),
|
||||
# look for "max clients:" when starting copyparty, as nginx should
|
||||
# not accept more consecutive clients than what copyparty is able to;
|
||||
# nginx default is 512 (worker_processes 1, worker_connections 512)
|
||||
#
|
||||
# you may also consider adding -j0 for CPU-intensive configurations
|
||||
# (5'000 requests per second, or 20gbps upload/download in parallel)
|
||||
# rarely, in some extreme usecases, it can be good to add -j0
|
||||
# (40'000 requests per second, or 20gbps upload/download in parallel)
|
||||
# but this is usually counterproductive and slightly buggy
|
||||
#
|
||||
# on fedora/rhel, remember to setsebool -P httpd_can_network_connect 1
|
||||
#
|
||||
@@ -20,10 +16,33 @@
|
||||
#
|
||||
# and then enable it below by uncomenting the cloudflare-only.conf line
|
||||
|
||||
upstream cpp {
|
||||
|
||||
upstream cpp_tcp {
|
||||
# alternative 1: connect to copyparty using tcp;
|
||||
# cpp_uds is slightly faster and more secure, but
|
||||
# cpp_tcp is easier to setup and "just works"
|
||||
# ...you should however restrict copyparty to only
|
||||
# accept connections from nginx by adding these args:
|
||||
# -i 127.0.0.1
|
||||
|
||||
server 127.0.0.1:3923 fail_timeout=1s;
|
||||
keepalive 1;
|
||||
}
|
||||
|
||||
|
||||
upstream cpp_uds {
|
||||
# alternative 2: unix-socket, aka. "unix domain socket";
|
||||
# 5-10% faster, and better isolation from other software,
|
||||
# but there must be at least one unix-group which both
|
||||
# nginx and copyparty is a member of; if that group is
|
||||
# "www" then run copyparty with the following args:
|
||||
# -i unix:770:www:/dev/shm/party.sock
|
||||
|
||||
server unix:/dev/shm/party.sock fail_timeout=1s;
|
||||
keepalive 1;
|
||||
}
|
||||
|
||||
|
||||
server {
|
||||
listen 443 ssl;
|
||||
listen [::]:443 ssl;
|
||||
@@ -34,13 +53,18 @@ server {
|
||||
#include /etc/nginx/cloudflare-only.conf;
|
||||
|
||||
location / {
|
||||
proxy_pass http://cpp;
|
||||
# recommendation: replace cpp_tcp with cpp_uds below
|
||||
proxy_pass http://cpp_tcp;
|
||||
proxy_redirect off;
|
||||
# disable buffering (next 4 lines)
|
||||
proxy_http_version 1.1;
|
||||
client_max_body_size 0;
|
||||
proxy_buffering off;
|
||||
proxy_request_buffering off;
|
||||
# improve download speed from 600 to 1500 MiB/s
|
||||
proxy_buffers 32 8k;
|
||||
proxy_buffer_size 16k;
|
||||
proxy_busy_buffers_size 24k;
|
||||
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
@@ -52,6 +76,7 @@ server {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
# default client_max_body_size (1M) blocks uploads larger than 256 MiB
|
||||
client_max_body_size 1024M;
|
||||
client_header_timeout 610m;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Maintainer: icxes <dev.null@need.moe>
|
||||
pkgname=copyparty
|
||||
pkgver="1.13.3"
|
||||
pkgver="1.16.10"
|
||||
pkgrel=1
|
||||
pkgdesc="File server with accelerated resumable uploads, dedup, WebDAV, FTP, TFTP, zeroconf, media indexer, thumbnails++"
|
||||
arch=("any")
|
||||
@@ -16,12 +16,13 @@ optdepends=("ffmpeg: thumbnails for videos, images (slower) and audio, music tag
|
||||
"libkeyfinder-git: detection of musical keys"
|
||||
"qm-vamp-plugins: BPM detection"
|
||||
"python-pyopenssl: ftps functionality"
|
||||
"python-argon2_cffi: hashed passwords in config"
|
||||
"python-pyzmq: send zeromq messages from event-hooks"
|
||||
"python-argon2-cffi: hashed passwords in config"
|
||||
"python-impacket-git: smb support (bad idea)"
|
||||
)
|
||||
source=("https://github.com/9001/${pkgname}/releases/download/v${pkgver}/${pkgname}-${pkgver}.tar.gz")
|
||||
backup=("etc/${pkgname}.d/init" )
|
||||
sha256sums=("35845d6335fba4a13d153d7062f365dad529202bc865b93267d899e19a0a6da3")
|
||||
sha256sums=("a33f5df985f5c6e8da717e913eb070eb488263759ee1a2668b3b26837e29a944")
|
||||
|
||||
build() {
|
||||
cd "${srcdir}/${pkgname}-${pkgver}"
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
{ lib, stdenv, makeWrapper, fetchurl, utillinux, python, jinja2, impacket, pyftpdlib, pyopenssl, argon2-cffi, pillow, pyvips, ffmpeg, mutagen,
|
||||
{ lib, stdenv, makeWrapper, fetchurl, utillinux, python, jinja2, impacket, pyftpdlib, pyopenssl, argon2-cffi, pillow, pyvips, pyzmq, ffmpeg, mutagen,
|
||||
|
||||
# use argon2id-hashed passwords in config files (sha2 is always available)
|
||||
withHashedPasswords ? true,
|
||||
@@ -21,6 +21,9 @@ withMediaProcessing ? true,
|
||||
# if MediaProcessing is not enabled, you probably want this instead (less accurate, but much safer and faster)
|
||||
withBasicAudioMetadata ? false,
|
||||
|
||||
# send ZeroMQ messages from event-hooks
|
||||
withZeroMQ ? true,
|
||||
|
||||
# enable FTPS support in the FTP server
|
||||
withFTPS ? false,
|
||||
|
||||
@@ -43,6 +46,7 @@ let
|
||||
++ lib.optional withMediaProcessing ffmpeg
|
||||
++ lib.optional withBasicAudioMetadata mutagen
|
||||
++ lib.optional withHashedPasswords argon2-cffi
|
||||
++ lib.optional withZeroMQ pyzmq
|
||||
);
|
||||
in stdenv.mkDerivation {
|
||||
pname = "copyparty";
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"url": "https://github.com/9001/copyparty/releases/download/v1.13.3/copyparty-sfx.py",
|
||||
"version": "1.13.3",
|
||||
"hash": "sha256-LtbdioAYtWGC4+5frzUjXwm0thubkyMhc86YU/rXIuo="
|
||||
"url": "https://github.com/9001/copyparty/releases/download/v1.16.10/copyparty-sfx.py",
|
||||
"version": "1.16.10",
|
||||
"hash": "sha256-4CK/491U/fdor+McO94nYsL39g73WQ7i8loUYVrbiZA="
|
||||
}
|
||||
@@ -20,6 +20,13 @@ point `--js-browser` to one of these by URL:
|
||||
|
||||
|
||||
|
||||
## example any-js
|
||||
point `--js-browser` and/or `--js-other` to one of these by URL:
|
||||
|
||||
* [`banner.js`](banner.js) shows a very enterprise [legal-banner](https://github.com/user-attachments/assets/8ae8e087-b209-449c-b08d-74e040f0284b)
|
||||
|
||||
|
||||
|
||||
## example browser-css
|
||||
point `--css-browser` to one of these by URL:
|
||||
|
||||
|
||||
93
contrib/plugins/banner.js
Normal file
93
contrib/plugins/banner.js
Normal file
@@ -0,0 +1,93 @@
|
||||
(function() {
|
||||
|
||||
// usage: copy this to '.banner.js' in your webroot,
|
||||
// and run copyparty with the following arguments:
|
||||
// --js-browser /.banner.js --js-other /.banner.js
|
||||
|
||||
|
||||
|
||||
// had to pick the most chuuni one as the default
|
||||
var bannertext = '' +
|
||||
'<h3>You are accessing a U.S. Government (USG) Information System (IS) that is provided for USG-authorized use only.</h3>' +
|
||||
'<p>By using this IS (which includes any device attached to this IS), you consent to the following conditions:</p>' +
|
||||
'<ul>' +
|
||||
'<li>The USG routinely intercepts and monitors communications on this IS for purposes including, but not limited to, penetration testing, COMSEC monitoring, network operations and defense, personnel misconduct (PM), law enforcement (LE), and counterintelligence (CI) investigations.</li>' +
|
||||
'<li>At any time, the USG may inspect and seize data stored on this IS.</li>' +
|
||||
'<li>Communications using, or data stored on, this IS are not private, are subject to routine monitoring, interception, and search, and may be disclosed or used for any USG-authorized purpose.</li>' +
|
||||
'<li>This IS includes security measures (e.g., authentication and access controls) to protect USG interests -- not for your personal benefit or privacy.</li>' +
|
||||
'<li>Notwithstanding the above, using this IS does not constitute consent to PM, LE or CI investigative searching or monitoring of the content of privileged communications, or work product, related to personal representation or services by attorneys, psychotherapists, or clergy, and their assistants. Such communications and work product are private and confidential. See User Agreement for details.</li>' +
|
||||
'</ul>';
|
||||
|
||||
|
||||
|
||||
// fancy div to insert into pages
|
||||
function bannerdiv(border) {
|
||||
var ret = mknod('div', null, bannertext);
|
||||
if (border)
|
||||
ret.setAttribute("style", "border:1em solid var(--fg); border-width:.3em 0; margin:3em 0");
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
||||
|
||||
// keep all of these false and then selectively enable them in the if-blocks below
|
||||
var show_msgbox = false,
|
||||
login_top = false,
|
||||
top = false,
|
||||
bottom = false,
|
||||
top_bordered = false,
|
||||
bottom_bordered = false;
|
||||
|
||||
if (QS("h1#cc") && QS("a#k")) {
|
||||
// this is the controlpanel
|
||||
// (you probably want to keep just one of these enabled)
|
||||
show_msgbox = true;
|
||||
login_top = true;
|
||||
bottom = true;
|
||||
}
|
||||
else if (ebi("swin") && ebi("smac")) {
|
||||
// this is the connect-page, same deal here
|
||||
show_msgbox = true;
|
||||
top_bordered = true;
|
||||
bottom_bordered = true;
|
||||
}
|
||||
else if (ebi("op_cfg") || ebi("div#mw") ) {
|
||||
// we're running in the main filebrowser (op_cfg) or markdown-viewer/editor (div#mw),
|
||||
// fragile pages which break if you do something too fancy
|
||||
show_msgbox = true;
|
||||
}
|
||||
|
||||
|
||||
|
||||
// shows a fullscreen messagebox; works on all pages
|
||||
if (show_msgbox) {
|
||||
var now = Math.floor(Date.now() / 1000),
|
||||
last_shown = sread("bannerts") || 0;
|
||||
|
||||
// 60 * 60 * 17 = 17 hour cooldown
|
||||
if (now - last_shown > 60 * 60 * 17) {
|
||||
swrite("bannerts", now);
|
||||
modal.confirm(bannertext, null, function () {
|
||||
location = 'https://this-page-intentionally-left-blank.org/';
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// show a message on the page footer; only works on the connect-page
|
||||
if (top || top_bordered) {
|
||||
var dst = ebi('wrap');
|
||||
dst.insertBefore(bannerdiv(top_bordered), dst.firstChild);
|
||||
}
|
||||
|
||||
// show a message on the page footer; only works on the controlpanel and connect-page
|
||||
if (bottom || bottom_bordered) {
|
||||
ebi('wrap').appendChild(bannerdiv(bottom_bordered));
|
||||
}
|
||||
|
||||
// show a message on the top of the page; only works on the controlpanel
|
||||
if (login_top) {
|
||||
var dst = QS('h1');
|
||||
dst.parentNode.insertBefore(bannerdiv(false), dst);
|
||||
}
|
||||
|
||||
})();
|
||||
116
contrib/themes/bsod.css
Normal file
116
contrib/themes/bsod.css
Normal file
@@ -0,0 +1,116 @@
|
||||
/* copy bsod.* into a folder named ".themes" in your webroot and then
|
||||
--themes=10 --theme=9 --css-browser=/.themes/bsod.css
|
||||
*/
|
||||
|
||||
html.ey {
|
||||
--w2: #3d7bbc;
|
||||
--w3: #5fcbec;
|
||||
|
||||
--fg: #fff;
|
||||
--fg-max: #fff;
|
||||
--fg-weak: var(--w3);
|
||||
|
||||
--bg: #2067b2;
|
||||
--bg-d3: var(--bg);
|
||||
--bg-d2: var(--w2);
|
||||
--bg-d1: var(--fg-weak);
|
||||
--bg-u2: var(--bg);
|
||||
--bg-u3: var(--bg);
|
||||
--bg-u5: var(--w2);
|
||||
|
||||
--tab-alt: var(--fg-weak);
|
||||
--row-alt: var(--w2);
|
||||
|
||||
--scroll: var(--w3);
|
||||
|
||||
--a: #fff;
|
||||
--a-b: #fff;
|
||||
--a-hil: #fff;
|
||||
--a-h-bg: var(--fg-weak);
|
||||
--a-dark: var(--a);
|
||||
--a-gray: var(--fg-weak);
|
||||
|
||||
--btn-fg: var(--a);
|
||||
--btn-bg: var(--w2);
|
||||
--btn-h-fg: var(--w2);
|
||||
--btn-1-fg: var(--bg);
|
||||
--btn-1-bg: var(--a);
|
||||
--txt-sh: a;
|
||||
--txt-bg: var(--w2);
|
||||
|
||||
--u2-b1-bg: var(--w2);
|
||||
--u2-b2-bg: var(--w2);
|
||||
--u2-txt-bg: var(--w2);
|
||||
--u2-tab-bg: a;
|
||||
--u2-tab-1-bg: var(--w2);
|
||||
|
||||
--sort-1: var(--a);
|
||||
--sort-1: var(--fg-weak);
|
||||
|
||||
--tree-bg: var(--bg);
|
||||
|
||||
--g-b1: a;
|
||||
--g-b2: a;
|
||||
--g-f-bg: var(--w2);
|
||||
|
||||
--f-sh1: 0.1;
|
||||
--f-sh2: 0.02;
|
||||
--f-sh3: 0.1;
|
||||
--f-h-b1: a;
|
||||
|
||||
--srv-1: var(--a);
|
||||
--srv-3: var(--a);
|
||||
|
||||
--mp-sh: a;
|
||||
}
|
||||
|
||||
html.ey {
|
||||
background: url('bsod.png') top 5em right 4.5em no-repeat fixed var(--bg);
|
||||
}
|
||||
html.ey body#b {
|
||||
background: var(--bg); /*sandbox*/
|
||||
}
|
||||
html.ey #ops {
|
||||
margin: 1.7em 1.5em 0 1.5em;
|
||||
border-radius: .3em;
|
||||
border-width: 1px 0;
|
||||
}
|
||||
html.ey #ops a {
|
||||
text-shadow: 1px 1px 0 rgba(0,0,0,0.5);
|
||||
}
|
||||
html.ey .opbox {
|
||||
margin: 1.5em 0 0 0;
|
||||
}
|
||||
html.ey #tree {
|
||||
box-shadow: none;
|
||||
}
|
||||
html.ey #tt {
|
||||
border-color: var(--w2);
|
||||
background: var(--w2);
|
||||
}
|
||||
html.ey .mdo a {
|
||||
background: none;
|
||||
text-decoration: underline;
|
||||
}
|
||||
html.ey .mdo pre,
|
||||
html.ey .mdo code {
|
||||
color: #fff;
|
||||
background: var(--w2);
|
||||
border: none;
|
||||
}
|
||||
html.ey .mdo h1,
|
||||
html.ey .mdo h2 {
|
||||
background: none;
|
||||
border-color: var(--w2);
|
||||
}
|
||||
html.ey .mdo ul ul,
|
||||
html.ey .mdo ul ol,
|
||||
html.ey .mdo ol ul,
|
||||
html.ey .mdo ol ol {
|
||||
border-color: var(--w2);
|
||||
}
|
||||
html.ey .mdo p>em,
|
||||
html.ey .mdo li>em,
|
||||
html.ey .mdo td>em {
|
||||
color: #fd0;
|
||||
}
|
||||
BIN
contrib/themes/bsod.png
Normal file
BIN
contrib/themes/bsod.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 1.2 KiB |
12
contrib/traefik/copyparty.yaml
Normal file
12
contrib/traefik/copyparty.yaml
Normal file
@@ -0,0 +1,12 @@
|
||||
# ./traefik --experimental.fastproxy=true --entrypoints.web.address=:8080 --providers.file.filename=copyparty.yaml
|
||||
|
||||
http:
|
||||
services:
|
||||
service-cpp:
|
||||
loadBalancer:
|
||||
servers:
|
||||
- url: "http://127.0.0.1:3923/"
|
||||
routers:
|
||||
my-router:
|
||||
rule: "PathPrefix(`/`)"
|
||||
service: service-cpp
|
||||
@@ -16,9 +16,10 @@ except:
|
||||
TYPE_CHECKING = False
|
||||
|
||||
if True:
|
||||
from typing import Any, Callable
|
||||
from typing import Any, Callable, Optional
|
||||
|
||||
PY2 = sys.version_info < (3,)
|
||||
PY36 = sys.version_info > (3, 6)
|
||||
if not PY2:
|
||||
unicode: Callable[[Any], str] = str
|
||||
else:
|
||||
@@ -50,6 +51,64 @@ try:
|
||||
except:
|
||||
CORES = (os.cpu_count() if hasattr(os, "cpu_count") else 0) or 2
|
||||
|
||||
# all embedded resources to be retrievable over http
|
||||
zs = """
|
||||
web/a/partyfuse.py
|
||||
web/a/u2c.py
|
||||
web/a/webdav-cfg.bat
|
||||
web/baguettebox.js
|
||||
web/browser.css
|
||||
web/browser.html
|
||||
web/browser.js
|
||||
web/browser2.html
|
||||
web/cf.html
|
||||
web/copyparty.gif
|
||||
web/dd/2.png
|
||||
web/dd/3.png
|
||||
web/dd/4.png
|
||||
web/dd/5.png
|
||||
web/deps/busy.mp3
|
||||
web/deps/easymde.css
|
||||
web/deps/easymde.js
|
||||
web/deps/marked.js
|
||||
web/deps/fuse.py
|
||||
web/deps/mini-fa.css
|
||||
web/deps/mini-fa.woff
|
||||
web/deps/prism.css
|
||||
web/deps/prism.js
|
||||
web/deps/prismd.css
|
||||
web/deps/scp.woff2
|
||||
web/deps/sha512.ac.js
|
||||
web/deps/sha512.hw.js
|
||||
web/iiam.gif
|
||||
web/md.css
|
||||
web/md.html
|
||||
web/md.js
|
||||
web/md2.css
|
||||
web/md2.js
|
||||
web/mde.css
|
||||
web/mde.html
|
||||
web/mde.js
|
||||
web/msg.css
|
||||
web/msg.html
|
||||
web/rups.css
|
||||
web/rups.html
|
||||
web/rups.js
|
||||
web/shares.css
|
||||
web/shares.html
|
||||
web/shares.js
|
||||
web/splash.css
|
||||
web/splash.html
|
||||
web/splash.js
|
||||
web/svcs.html
|
||||
web/svcs.js
|
||||
web/ui.css
|
||||
web/up2k.js
|
||||
web/util.js
|
||||
web/w.hash.js
|
||||
"""
|
||||
RES = set(zs.strip().split("\n"))
|
||||
|
||||
|
||||
class EnvParams(object):
|
||||
def __init__(self) -> None:
|
||||
|
||||
@@ -27,6 +27,7 @@ from .__init__ import (
|
||||
EXE,
|
||||
MACOS,
|
||||
PY2,
|
||||
PY36,
|
||||
VT100,
|
||||
WINDOWS,
|
||||
E,
|
||||
@@ -49,12 +50,19 @@ from .util import (
|
||||
PARTFTPY_VER,
|
||||
PY_DESC,
|
||||
PYFTPD_VER,
|
||||
RAM_AVAIL,
|
||||
RAM_TOTAL,
|
||||
SQLITE_VER,
|
||||
UNPLICATIONS,
|
||||
URL_BUG,
|
||||
URL_PRJ,
|
||||
Daemon,
|
||||
align_tab,
|
||||
ansi_re,
|
||||
b64enc,
|
||||
dedent,
|
||||
has_resource,
|
||||
load_resource,
|
||||
min_ex,
|
||||
pybin,
|
||||
termsize,
|
||||
@@ -67,7 +75,13 @@ if True: # pylint: disable=using-constant-test
|
||||
|
||||
from typing import Any, Optional
|
||||
|
||||
if PY2:
|
||||
range = xrange # type: ignore
|
||||
|
||||
try:
|
||||
if os.environ.get("PRTY_NO_TLS"):
|
||||
raise Exception()
|
||||
|
||||
HAVE_SSL = True
|
||||
import ssl
|
||||
except:
|
||||
@@ -198,7 +212,7 @@ def init_E(EE: EnvParams) -> None:
|
||||
errs.append("Using [%s] instead" % (p,))
|
||||
|
||||
if errs:
|
||||
print("WARNING: " + ". ".join(errs))
|
||||
warn(". ".join(errs))
|
||||
|
||||
return p # type: ignore
|
||||
except Exception as ex:
|
||||
@@ -228,7 +242,7 @@ def init_E(EE: EnvParams) -> None:
|
||||
raise
|
||||
|
||||
|
||||
def get_srvname() -> str:
|
||||
def get_srvname(verbose) -> str:
|
||||
try:
|
||||
ret: str = unicode(socket.gethostname()).split(".")[0]
|
||||
except:
|
||||
@@ -238,7 +252,8 @@ def get_srvname() -> str:
|
||||
return ret
|
||||
|
||||
fp = os.path.join(E.cfg, "name.txt")
|
||||
lprint("using hostname from {}\n".format(fp))
|
||||
if verbose:
|
||||
lprint("using hostname from {}\n".format(fp))
|
||||
try:
|
||||
with open(fp, "rb") as f:
|
||||
ret = f.read().decode("utf-8", "replace").strip()
|
||||
@@ -260,7 +275,7 @@ def get_fk_salt() -> str:
|
||||
with open(fp, "rb") as f:
|
||||
ret = f.read().strip()
|
||||
except:
|
||||
ret = base64.b64encode(os.urandom(18))
|
||||
ret = b64enc(os.urandom(18))
|
||||
with open(fp, "wb") as f:
|
||||
f.write(ret + b"\n")
|
||||
|
||||
@@ -273,7 +288,7 @@ def get_dk_salt() -> str:
|
||||
with open(fp, "rb") as f:
|
||||
ret = f.read().strip()
|
||||
except:
|
||||
ret = base64.b64encode(os.urandom(30))
|
||||
ret = b64enc(os.urandom(30))
|
||||
with open(fp, "wb") as f:
|
||||
f.write(ret + b"\n")
|
||||
|
||||
@@ -286,7 +301,7 @@ def get_ah_salt() -> str:
|
||||
with open(fp, "rb") as f:
|
||||
ret = f.read().strip()
|
||||
except:
|
||||
ret = base64.b64encode(os.urandom(18))
|
||||
ret = b64enc(os.urandom(18))
|
||||
with open(fp, "wb") as f:
|
||||
f.write(ret + b"\n")
|
||||
|
||||
@@ -316,21 +331,19 @@ def ensure_locale() -> None:
|
||||
|
||||
|
||||
def ensure_webdeps() -> None:
|
||||
ap = os.path.join(E.mod, "web/deps/mini-fa.woff")
|
||||
if os.path.exists(ap):
|
||||
if has_resource(E, "web/deps/mini-fa.woff"):
|
||||
return
|
||||
|
||||
warn(
|
||||
"""could not find webdeps;
|
||||
t = """could not find webdeps;
|
||||
if you are running the sfx, or exe, or pypi package, or docker image,
|
||||
then this is a bug! Please let me know so I can fix it, thanks :-)
|
||||
https://github.com/9001/copyparty/issues/new?labels=bug&template=bug_report.md
|
||||
%s
|
||||
|
||||
however, if you are a dev, or running copyparty from source, and you want
|
||||
full client functionality, you will need to build or obtain the webdeps:
|
||||
https://github.com/9001/copyparty/blob/hovudstraum/docs/devnotes.md#building
|
||||
%s/blob/hovudstraum/docs/devnotes.md#building
|
||||
"""
|
||||
)
|
||||
warn(t % (URL_BUG, URL_PRJ))
|
||||
|
||||
|
||||
def configure_ssl_ver(al: argparse.Namespace) -> None:
|
||||
@@ -344,7 +357,7 @@ def configure_ssl_ver(al: argparse.Namespace) -> None:
|
||||
# oh man i love openssl
|
||||
# check this out
|
||||
# hold my beer
|
||||
assert ssl
|
||||
assert ssl # type: ignore # !rm
|
||||
ptn = re.compile(r"^OP_NO_(TLS|SSL)v")
|
||||
sslver = terse_sslver(al.ssl_ver).split(",")
|
||||
flags = [k for k in ssl.__dict__ if ptn.match(k)]
|
||||
@@ -378,7 +391,7 @@ def configure_ssl_ver(al: argparse.Namespace) -> None:
|
||||
|
||||
|
||||
def configure_ssl_ciphers(al: argparse.Namespace) -> None:
|
||||
assert ssl
|
||||
assert ssl # type: ignore # !rm
|
||||
ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
|
||||
if al.ssl_ver:
|
||||
ctx.options &= ~al.ssl_flags_en
|
||||
@@ -491,27 +504,75 @@ def disable_quickedit() -> None:
|
||||
|
||||
|
||||
def sfx_tpoke(top: str):
|
||||
files = [os.path.join(dp, p) for dp, dd, df in os.walk(top) for p in dd + df]
|
||||
if os.environ.get("PRTY_NO_TPOKE"):
|
||||
return
|
||||
|
||||
files = [top] + [
|
||||
os.path.join(dp, p) for dp, dd, df in os.walk(top) for p in dd + df
|
||||
]
|
||||
while True:
|
||||
t = int(time.time())
|
||||
for f in [top] + files:
|
||||
os.utime(f, (t, t))
|
||||
for f in list(files):
|
||||
try:
|
||||
os.utime(f, (t, t))
|
||||
except Exception as ex:
|
||||
lprint("<TPOKE> [%s] %r" % (f, ex))
|
||||
files.remove(f)
|
||||
|
||||
time.sleep(78123)
|
||||
|
||||
|
||||
def showlic() -> None:
|
||||
p = os.path.join(E.mod, "res", "COPYING.txt")
|
||||
if not os.path.exists(p):
|
||||
try:
|
||||
with load_resource(E, "res/COPYING.txt") as f:
|
||||
buf = f.read()
|
||||
except:
|
||||
buf = b""
|
||||
|
||||
if buf:
|
||||
print(buf.decode("utf-8", "replace"))
|
||||
else:
|
||||
print("no relevant license info to display")
|
||||
return
|
||||
|
||||
with open(p, "rb") as f:
|
||||
print(f.read().decode("utf-8", "replace"))
|
||||
|
||||
|
||||
def get_sects():
|
||||
return [
|
||||
[
|
||||
"bind",
|
||||
"configure listening",
|
||||
dedent(
|
||||
"""
|
||||
\033[33m-i\033[0m takes a comma-separated list of interfaces to listen on;
|
||||
IP-addresses and/or unix-sockets (Unix Domain Sockets)
|
||||
|
||||
the default (\033[32m-i ::\033[0m) means all IPv4 and IPv6 addresses
|
||||
|
||||
\033[32m-i 0.0.0.0\033[0m listens on all IPv4 NICs/subnets
|
||||
\033[32m-i 127.0.0.1\033[0m listens on IPv4 localhost only
|
||||
\033[32m-i 127.1\033[0m listens on IPv4 localhost only
|
||||
\033[32m-i 127.1,192.168.123.1\033[0m = IPv4 localhost and 192.168.123.1
|
||||
|
||||
\033[33m-p\033[0m takes a comma-separated list of tcp ports to listen on;
|
||||
the default is \033[32m-p 3923\033[0m but as root you can \033[32m-p 80,443,3923\033[0m
|
||||
|
||||
when running behind a reverse-proxy, it's recommended to
|
||||
use unix-sockets for improved performance and security;
|
||||
|
||||
\033[32m-i unix:770:www:\033[33m/tmp/a.sock\033[0m listens on \033[33m/tmp/a.sock\033[0m with
|
||||
permissions \033[33m0770\033[0m; only accessible to members of the \033[33mwww\033[0m
|
||||
group. This is the best approach. Alternatively,
|
||||
|
||||
\033[32m-i unix:777:\033[33m/tmp/a.sock\033[0m sets perms \033[33m0777\033[0m so anyone can
|
||||
access it; bad unless it's inside a restricted folder
|
||||
|
||||
\033[32m-i unix:\033[33m/tmp/a.sock\033[0m keeps umask-defined permissions
|
||||
(usually \033[33m0600\033[0m) and the same user/group as copyparty
|
||||
|
||||
\033[33m-p\033[0m (tcp ports) is ignored for unix sockets
|
||||
"""
|
||||
),
|
||||
],
|
||||
[
|
||||
"accounts",
|
||||
"accounts and volumes",
|
||||
@@ -626,6 +687,8 @@ def get_sects():
|
||||
\033[36mxbu\033[35m executes CMD before a file upload starts
|
||||
\033[36mxau\033[35m executes CMD after a file upload finishes
|
||||
\033[36mxiu\033[35m executes CMD after all uploads finish and volume is idle
|
||||
\033[36mxbc\033[35m executes CMD before a file copy
|
||||
\033[36mxac\033[35m executes CMD after a file copy
|
||||
\033[36mxbr\033[35m executes CMD before a file rename/move
|
||||
\033[36mxar\033[35m executes CMD after a file rename/move
|
||||
\033[36mxbd\033[35m executes CMD before a file delete
|
||||
@@ -677,6 +740,10 @@ def get_sects():
|
||||
the \033[33m,,\033[35m stops copyparty from reading the rest as flags and
|
||||
the \033[33m--\033[35m stops notify-send from reading the message as args
|
||||
and the alert will be "hey" followed by the messagetext
|
||||
|
||||
\033[36m--xau zmq:pub:tcp://*:5556\033[35m announces uploads on zeromq;
|
||||
\033[36m--xau t3,zmq:push:tcp://*:5557\033[35m also works, and you can
|
||||
\033[36m--xau t3,j,zmq:req:tcp://localhost:5555\033[35m too for example
|
||||
\033[0m
|
||||
each hook is executed once for each event, except for \033[36mxiu\033[0m
|
||||
which builds up a backlog of uploads, running the hook just once
|
||||
@@ -689,6 +756,11 @@ def get_sects():
|
||||
\033[36mxban\033[0m can be used to overrule / cancel a user ban event;
|
||||
if the program returns 0 (true/OK) then the ban will NOT happen
|
||||
|
||||
effects can be used to redirect uploads into other
|
||||
locations, and to delete or index other files based
|
||||
on new uploads, but with certain limitations. See
|
||||
bin/hooks/reloc* and docs/devnotes.md#hook-effects
|
||||
|
||||
except for \033[36mxm\033[0m, only one hook / one action can run at a time,
|
||||
so it's recommended to use the \033[36mf\033[0m flag unless you really need
|
||||
to wait for the hook to finish before continuing (without \033[36mf\033[0m
|
||||
@@ -703,11 +775,22 @@ def get_sects():
|
||||
values for --urlform:
|
||||
\033[36mstash\033[35m dumps the data to file and returns length + checksum
|
||||
\033[36msave,get\033[35m dumps to file and returns the page like a GET
|
||||
\033[36mprint,get\033[35m prints the data in the log and returns GET
|
||||
(leave out the ",get" to return an error instead)\033[0m
|
||||
\033[36mprint \033[35m prints the data to log and returns an error
|
||||
\033[36mprint,xm \033[35m prints the data to log and returns --xm output
|
||||
\033[36mprint,get\033[35m prints the data to log and returns GET\033[0m
|
||||
|
||||
note that the \033[35m--xm\033[0m hook will only run if \033[35m--urlform\033[0m
|
||||
is either \033[36mprint\033[0m or the default \033[36mprint,get\033[0m
|
||||
note that the \033[35m--xm\033[0m hook will only run if \033[35m--urlform\033[0m is
|
||||
either \033[36mprint\033[0m or \033[36mprint,get\033[0m or the default \033[36mprint,xm\033[0m
|
||||
|
||||
if an \033[35m--xm\033[0m hook returns text, then
|
||||
the response code will be HTTP 202;
|
||||
http/get responses will be HTTP 200
|
||||
|
||||
if there are multiple \033[35m--xm\033[0m hooks defined, then
|
||||
the first hook that produced output is returned
|
||||
|
||||
if there are no \033[35m--xm\033[0m hooks defined, then the default
|
||||
\033[36mprint,xm\033[0m behaves like \033[36mprint,get\033[0m (returning html)
|
||||
"""
|
||||
),
|
||||
],
|
||||
@@ -717,7 +800,7 @@ def get_sects():
|
||||
dedent(
|
||||
"""
|
||||
specify --exp or the "exp" volflag to enable placeholder expansions
|
||||
in README.md / .prologue.html / .epilogue.html
|
||||
in README.md / PREADME.md / .prologue.html / .epilogue.html
|
||||
|
||||
--exp-md (volflag exp_md) holds the list of placeholders which can be
|
||||
expanded in READMEs, and --exp-lg (volflag exp_lg) likewise for logues;
|
||||
@@ -811,8 +894,9 @@ def get_sects():
|
||||
use argon2id with timecost 3, 256 MiB, 4 threads, version 19 (0x13/v1.3)
|
||||
|
||||
\033[36m--ah-alg scrypt\033[0m # which is the same as:
|
||||
\033[36m--ah-alg scrypt,13,2,8,4\033[0m
|
||||
use scrypt with cost 2**13, 2 iterations, blocksize 8, 4 threads
|
||||
\033[36m--ah-alg scrypt,13,2,8,4,32\033[0m
|
||||
use scrypt with cost 2**13, 2 iterations, blocksize 8, 4 threads,
|
||||
and allow using up to 32 MiB RAM (ram=cost*blksz roughly)
|
||||
|
||||
\033[36m--ah-alg sha2\033[0m # which is the same as:
|
||||
\033[36m--ah-alg sha2,424242\033[0m
|
||||
@@ -833,7 +917,7 @@ def get_sects():
|
||||
dedent(
|
||||
"""
|
||||
the mDNS protocol is multicast-based, which means there are thousands
|
||||
of fun and intersesting ways for it to break unexpectedly
|
||||
of fun and interesting ways for it to break unexpectedly
|
||||
|
||||
things to check if it does not work at all:
|
||||
|
||||
@@ -887,7 +971,7 @@ def add_general(ap, nc, srvname):
|
||||
ap2.add_argument("-v", metavar="VOL", type=u, action="append", help="add volume, \033[33mSRC\033[0m:\033[33mDST\033[0m:\033[33mFLAG\033[0m; examples [\033[32m.::r\033[0m], [\033[32m/mnt/nas/music:/music:r:aed\033[0m], see --help-accounts")
|
||||
ap2.add_argument("--grp", metavar="G:N,N", type=u, action="append", help="add group, \033[33mNAME\033[0m:\033[33mUSER1\033[0m,\033[33mUSER2\033[0m,\033[33m...\033[0m; example [\033[32madmins:ed,foo,bar\033[0m]")
|
||||
ap2.add_argument("-ed", action="store_true", help="enable the ?dots url parameter / client option which allows clients to see dotfiles / hidden files (volflag=dots)")
|
||||
ap2.add_argument("--urlform", metavar="MODE", type=u, default="print,get", help="how to handle url-form POSTs; see \033[33m--help-urlform\033[0m")
|
||||
ap2.add_argument("--urlform", metavar="MODE", type=u, default="print,xm", help="how to handle url-form POSTs; see \033[33m--help-urlform\033[0m")
|
||||
ap2.add_argument("--wintitle", metavar="TXT", type=u, default="cpp @ $pub", help="server terminal title, for example [\033[32m$ip-10.1.2.\033[0m] or [\033[32m$ip-]")
|
||||
ap2.add_argument("--name", metavar="TXT", type=u, default=srvname, help="server name (displayed topleft in browser and in mDNS)")
|
||||
ap2.add_argument("--mime", metavar="EXT=MIME", type=u, action="append", help="map file \033[33mEXT\033[0mension to \033[33mMIME\033[0mtype, for example [\033[32mjpg=image/jpeg\033[0m]")
|
||||
@@ -917,6 +1001,16 @@ def add_fs(ap):
|
||||
ap2.add_argument("--mtab-age", metavar="SEC", type=int, default=60, help="rebuild mountpoint cache every \033[33mSEC\033[0m to keep track of sparse-files support; keep low on servers with removable media")
|
||||
|
||||
|
||||
def add_share(ap):
|
||||
db_path = os.path.join(E.cfg, "shares.db")
|
||||
ap2 = ap.add_argument_group('share-url options')
|
||||
ap2.add_argument("--shr", metavar="DIR", type=u, default="", help="toplevel virtual folder for shared files/folders, for example [\033[32m/share\033[0m]")
|
||||
ap2.add_argument("--shr-db", metavar="FILE", type=u, default=db_path, help="database to store shares in")
|
||||
ap2.add_argument("--shr-adm", metavar="U,U", type=u, default="", help="comma-separated list of users allowed to view/delete any share")
|
||||
ap2.add_argument("--shr-rt", metavar="MIN", type=int, default=1440, help="shares can be revived by their owner if they expired less than MIN minutes ago; [\033[32m60\033[0m]=hour, [\033[32m1440\033[0m]=day, [\033[32m10080\033[0m]=week")
|
||||
ap2.add_argument("--shr-v", action="store_true", help="debug")
|
||||
|
||||
|
||||
def add_upload(ap):
|
||||
ap2 = ap.add_argument_group('upload options')
|
||||
ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads, hiding them from clients unless \033[33m-ed\033[0m")
|
||||
@@ -927,10 +1021,12 @@ def add_upload(ap):
|
||||
ap2.add_argument("--reg-cap", metavar="N", type=int, default=38400, help="max number of uploads to keep in memory when running without \033[33m-e2d\033[0m; roughly 1 MiB RAM per 600")
|
||||
ap2.add_argument("--no-fpool", action="store_true", help="disable file-handle pooling -- instead, repeatedly close and reopen files during upload (bad idea to enable this on windows and/or cow filesystems)")
|
||||
ap2.add_argument("--use-fpool", action="store_true", help="force file-handle pooling, even when it might be dangerous (multiprocessing, filesystems lacking sparse-files support, ...)")
|
||||
ap2.add_argument("--hardlink", action="store_true", help="prefer hardlinks instead of symlinks when possible (within same filesystem) (volflag=hardlink)")
|
||||
ap2.add_argument("--never-symlink", action="store_true", help="do not fallback to symlinks when a hardlink cannot be made (volflag=neversymlink)")
|
||||
ap2.add_argument("--no-dedup", action="store_true", help="disable symlink/hardlink creation; copy file contents instead (volflag=copydupes)")
|
||||
ap2.add_argument("--dedup", action="store_true", help="enable symlink-based upload deduplication (volflag=dedup)")
|
||||
ap2.add_argument("--safe-dedup", metavar="N", type=int, default=50, help="how careful to be when deduplicating files; [\033[32m1\033[0m] = just verify the filesize, [\033[32m50\033[0m] = verify file contents have not been altered (volflag=safededup)")
|
||||
ap2.add_argument("--hardlink", action="store_true", help="enable hardlink-based dedup; will fallback on symlinks when that is impossible (across filesystems) (volflag=hardlink)")
|
||||
ap2.add_argument("--hardlink-only", action="store_true", help="do not fallback to symlinks when a hardlink cannot be made (volflag=hardlinkonly)")
|
||||
ap2.add_argument("--no-dupe", action="store_true", help="reject duplicate files during upload; only matches within the same volume (volflag=nodupe)")
|
||||
ap2.add_argument("--no-clone", action="store_true", help="do not use existing data on disk to satisfy dupe uploads; reduces server HDD reads in exchange for much more network load (volflag=noclone)")
|
||||
ap2.add_argument("--no-snap", action="store_true", help="disable snapshots -- forget unfinished uploads on shutdown; don't create .hist/up2k.snap files -- abandoned/interrupted uploads must be cleaned up manually")
|
||||
ap2.add_argument("--snap-wri", metavar="SEC", type=int, default=300, help="write upload state to ./hist/up2k.snap every \033[33mSEC\033[0m seconds; allows resuming incomplete uploads after a server crash")
|
||||
ap2.add_argument("--snap-drop", metavar="MIN", type=float, default=1440.0, help="forget unfinished uploads after \033[33mMIN\033[0m minutes; impossible to resume them after that (360=6h, 1440=24h)")
|
||||
@@ -942,14 +1038,15 @@ def add_upload(ap):
|
||||
ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="windows-only: minimum size of incoming uploads through up2k before they are made into sparse files")
|
||||
ap2.add_argument("--turbo", metavar="LVL", type=int, default=0, help="configure turbo-mode in up2k client; [\033[32m-1\033[0m] = forbidden/always-off, [\033[32m0\033[0m] = default-off and warn if enabled, [\033[32m1\033[0m] = default-off, [\033[32m2\033[0m] = on, [\033[32m3\033[0m] = on and disable datecheck")
|
||||
ap2.add_argument("--u2j", metavar="JOBS", type=int, default=2, help="web-client: number of file chunks to upload in parallel; 1 or 2 is good for low-latency (same-country) connections, 4-8 for android clients, 16 for cross-atlantic (max=64)")
|
||||
ap2.add_argument("--u2sz", metavar="N,N,N", type=u, default="1,64,96", help="web-client: default upload chunksize (MiB); sets \033[33mmin,default,max\033[0m in the settings gui. Each HTTP POST will aim for \033[33mdefault\033[0m, and never exceed \033[33mmax\033[0m. Cloudflare max is 96. Big values are good for cross-atlantic but may increase HDD fragmentation on some FS. Disable this optimization with [\033[32m1,1,1\033[0m]")
|
||||
ap2.add_argument("--u2sort", metavar="TXT", type=u, default="s", help="upload order; [\033[32ms\033[0m]=smallest-first, [\033[32mn\033[0m]=alphabetical, [\033[32mfs\033[0m]=force-s, [\033[32mfn\033[0m]=force-n -- alphabetical is a bit slower on fiber/LAN but makes it easier to eyeball if everything went fine")
|
||||
ap2.add_argument("--write-uplog", action="store_true", help="write POST reports to textfiles in working-directory")
|
||||
|
||||
|
||||
def add_network(ap):
|
||||
ap2 = ap.add_argument_group('network options')
|
||||
ap2.add_argument("-i", metavar="IP", type=u, default="::", help="ip to bind (comma-sep.), default: all IPv4 and IPv6")
|
||||
ap2.add_argument("-p", metavar="PORT", type=u, default="3923", help="ports to bind (comma/range)")
|
||||
ap2.add_argument("-i", metavar="IP", type=u, default="::", help="IPs and/or unix-sockets to listen on (see \033[33m--help-bind\033[0m). Default: all IPv4 and IPv6")
|
||||
ap2.add_argument("-p", metavar="PORT", type=u, default="3923", help="ports to listen on (comma/range); ignored for unix-sockets")
|
||||
ap2.add_argument("--ll", action="store_true", help="include link-local IPv4/IPv6 in mDNS replies, even if the NIC has routable IPs (breaks some mDNS clients)")
|
||||
ap2.add_argument("--rproxy", metavar="DEPTH", type=int, default=1, help="which ip to associate clients with; [\033[32m0\033[0m]=tcp, [\033[32m1\033[0m]=origin (first x-fwd, unsafe), [\033[32m2\033[0m]=outermost-proxy, [\033[32m3\033[0m]=second-proxy, [\033[32m-1\033[0m]=closest-proxy")
|
||||
ap2.add_argument("--xff-hdr", metavar="NAME", type=u, default="x-forwarded-for", help="if reverse-proxied, which http header to read the client's real ip from")
|
||||
@@ -961,7 +1058,7 @@ def add_network(ap):
|
||||
else:
|
||||
ap2.add_argument("--freebind", action="store_true", help="allow listening on IPs which do not yet exist, for example if the network interfaces haven't finished going up. Only makes sense for IPs other than '0.0.0.0', '127.0.0.1', '::', and '::1'. May require running as root (unless net.ipv6.ip_nonlocal_bind)")
|
||||
ap2.add_argument("--s-thead", metavar="SEC", type=int, default=120, help="socket timeout (read request header)")
|
||||
ap2.add_argument("--s-tbody", metavar="SEC", type=float, default=186.0, help="socket timeout (read/write request/response bodies). Use 60 on fast servers (default is extremely safe). Disable with 0 if reverse-proxied for a 2%% speed boost")
|
||||
ap2.add_argument("--s-tbody", metavar="SEC", type=float, default=128.0, help="socket timeout (read/write request/response bodies). Use 60 on fast servers (default is extremely safe). Disable with 0 if reverse-proxied for a 2%% speed boost")
|
||||
ap2.add_argument("--s-rd-sz", metavar="B", type=int, default=256*1024, help="socket read size in bytes (indirectly affects filesystem writes; recommendation: keep equal-to or lower-than \033[33m--iobuf\033[0m)")
|
||||
ap2.add_argument("--s-wr-sz", metavar="B", type=int, default=256*1024, help="socket write size in bytes")
|
||||
ap2.add_argument("--s-wr-slp", metavar="SEC", type=float, default=0.0, help="debug: socket write delay in seconds")
|
||||
@@ -1000,13 +1097,28 @@ def add_cert(ap, cert_path):
|
||||
|
||||
|
||||
def add_auth(ap):
|
||||
ses_db = os.path.join(E.cfg, "sessions.db")
|
||||
ap2 = ap.add_argument_group('IdP / identity provider / user authentication options')
|
||||
ap2.add_argument("--idp-h-usr", metavar="HN", type=u, default="", help="bypass the copyparty authentication checks and assume the request-header \033[33mHN\033[0m contains the username of the requesting user (for use with authentik/oauth/...)\n\033[1;31mWARNING:\033[0m if you enable this, make sure clients are unable to specify this header themselves; must be washed away and replaced by a reverse-proxy")
|
||||
ap2.add_argument("--idp-h-usr", metavar="HN", type=u, default="", help="bypass the copyparty authentication checks if the request-header \033[33mHN\033[0m contains a username to associate the request with (for use with authentik/oauth/...)\n\033[1;31mWARNING:\033[0m if you enable this, make sure clients are unable to specify this header themselves; must be washed away and replaced by a reverse-proxy")
|
||||
ap2.add_argument("--idp-h-grp", metavar="HN", type=u, default="", help="assume the request-header \033[33mHN\033[0m contains the groupname of the requesting user; can be referenced in config files for group-based access control")
|
||||
ap2.add_argument("--idp-h-key", metavar="HN", type=u, default="", help="optional but recommended safeguard; your reverse-proxy will insert a secret header named \033[33mHN\033[0m into all requests, and the other IdP headers will be ignored if this header is not present")
|
||||
ap2.add_argument("--idp-gsep", metavar="RE", type=u, default="|:;+,", help="if there are multiple groups in \033[33m--idp-h-grp\033[0m, they are separated by one of the characters in \033[33mRE\033[0m")
|
||||
ap2.add_argument("--no-bauth", action="store_true", help="disable basic-authentication support; do not accept passwords from the 'Authenticate' header at all. NOTE: This breaks support for the android app")
|
||||
ap2.add_argument("--bauth-last", action="store_true", help="keeps basic-authentication enabled, but only as a last-resort; if a cookie is also provided then the cookie wins")
|
||||
ap2.add_argument("--ses-db", metavar="PATH", type=u, default=ses_db, help="where to store the sessions database (if you run multiple copyparty instances, make sure they use different DBs)")
|
||||
ap2.add_argument("--ses-len", metavar="CHARS", type=int, default=20, help="session key length; default is 120 bits ((20//4)*4*6)")
|
||||
ap2.add_argument("--no-ses", action="store_true", help="disable sessions; use plaintext passwords in cookies")
|
||||
ap2.add_argument("--ipu", metavar="CIDR=USR", type=u, action="append", help="users with IP matching \033[33mCIDR\033[0m are auto-authenticated as username \033[33mUSR\033[0m; example: [\033[32m172.16.24.0/24=dave]")
|
||||
|
||||
|
||||
def add_chpw(ap):
|
||||
db_path = os.path.join(E.cfg, "chpw.json")
|
||||
ap2 = ap.add_argument_group('user-changeable passwords options')
|
||||
ap2.add_argument("--chpw", action="store_true", help="allow users to change their own passwords")
|
||||
ap2.add_argument("--chpw-no", metavar="U,U,U", type=u, action="append", help="do not allow password-changes for this comma-separated list of usernames")
|
||||
ap2.add_argument("--chpw-db", metavar="PATH", type=u, default=db_path, help="where to store the passwords database (if you run multiple copyparty instances, make sure they use different DBs)")
|
||||
ap2.add_argument("--chpw-len", metavar="N", type=int, default=8, help="minimum password length")
|
||||
ap2.add_argument("--chpw-v", metavar="LVL", type=int, default=2, help="verbosity of summary on config load [\033[32m0\033[0m] = nothing at all, [\033[32m1\033[0m] = number of users, [\033[32m2\033[0m] = list users with default-pw, [\033[32m3\033[0m] = list all users")
|
||||
|
||||
|
||||
def add_zeroconf(ap):
|
||||
@@ -1028,6 +1140,8 @@ def add_zc_mdns(ap):
|
||||
ap2.add_argument("--zm6", action="store_true", help="IPv6 only")
|
||||
ap2.add_argument("--zmv", action="store_true", help="verbose mdns")
|
||||
ap2.add_argument("--zmvv", action="store_true", help="verboser mdns")
|
||||
ap2.add_argument("--zm-no-pe", action="store_true", help="mute parser errors (invalid incoming MDNS packets)")
|
||||
ap2.add_argument("--zm-nwa-1", action="store_true", help="disable workaround for avahi-bug #379 (corruption in Avahi's mDNS reflection feature)")
|
||||
ap2.add_argument("--zms", metavar="dhf", type=u, default="", help="list of services to announce -- d=webdav h=http f=ftp s=smb -- lowercase=plaintext uppercase=TLS -- default: all enabled services except http/https (\033[32mDdfs\033[0m if \033[33m--ftp\033[0m and \033[33m--smb\033[0m is set, \033[32mDd\033[0m otherwise)")
|
||||
ap2.add_argument("--zm-ld", metavar="PATH", type=u, default="", help="link a specific folder for webdav shares")
|
||||
ap2.add_argument("--zm-lh", metavar="PATH", type=u, default="", help="link a specific folder for http shares")
|
||||
@@ -1090,7 +1204,7 @@ def add_smb(ap):
|
||||
ap2.add_argument("--smbw", action="store_true", help="enable write support (please dont)")
|
||||
ap2.add_argument("--smb1", action="store_true", help="disable SMBv2, only enable SMBv1 (CIFS)")
|
||||
ap2.add_argument("--smb-port", metavar="PORT", type=int, default=445, help="port to listen on -- if you change this value, you must NAT from TCP:445 to this port using iptables or similar")
|
||||
ap2.add_argument("--smb-nwa-1", action="store_true", help="disable impacket#1433 workaround (truncate directory listings to 64kB)")
|
||||
ap2.add_argument("--smb-nwa-1", action="store_true", help="truncate directory listings to 64kB (~400 files); avoids impacket-0.11 bug, fixes impacket-0.12 performance")
|
||||
ap2.add_argument("--smb-nwa-2", action="store_true", help="disable impacket workaround for filecopy globs")
|
||||
ap2.add_argument("--smba", action="store_true", help="small performance boost: disable per-account permissions, enables account coalescing instead (if one user has write/delete-access, then everyone does)")
|
||||
ap2.add_argument("--smbv", action="store_true", help="verbose")
|
||||
@@ -1110,12 +1224,15 @@ def add_hooks(ap):
|
||||
ap2.add_argument("--xbu", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m before a file upload starts")
|
||||
ap2.add_argument("--xau", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m after a file upload finishes")
|
||||
ap2.add_argument("--xiu", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m after all uploads finish and volume is idle")
|
||||
ap2.add_argument("--xbc", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m before a file copy")
|
||||
ap2.add_argument("--xac", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m after a file copy")
|
||||
ap2.add_argument("--xbr", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m before a file move/rename")
|
||||
ap2.add_argument("--xar", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m after a file move/rename")
|
||||
ap2.add_argument("--xbd", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m before a file delete")
|
||||
ap2.add_argument("--xad", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m after a file delete")
|
||||
ap2.add_argument("--xm", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m on message")
|
||||
ap2.add_argument("--xban", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m if someone gets banned (pw/404/403/url)")
|
||||
ap2.add_argument("--hook-v", action="store_true", help="verbose hooks")
|
||||
|
||||
|
||||
def add_stats(ap):
|
||||
@@ -1141,6 +1258,7 @@ def add_optouts(ap):
|
||||
ap2.add_argument("--no-dav", action="store_true", help="disable webdav support")
|
||||
ap2.add_argument("--no-del", action="store_true", help="disable delete operations")
|
||||
ap2.add_argument("--no-mv", action="store_true", help="disable move/rename operations")
|
||||
ap2.add_argument("--no-cp", action="store_true", help="disable copy operations")
|
||||
ap2.add_argument("-nth", action="store_true", help="no title hostname; don't show \033[33m--name\033[0m in <title>")
|
||||
ap2.add_argument("-nih", action="store_true", help="no info hostname -- don't show in UI")
|
||||
ap2.add_argument("-nid", action="store_true", help="no info disk-usage -- don't show in UI")
|
||||
@@ -1163,7 +1281,7 @@ def add_safety(ap):
|
||||
ap2.add_argument("--no-dot-mv", action="store_true", help="disallow moving dotfiles; makes it impossible to move folders containing dotfiles")
|
||||
ap2.add_argument("--no-dot-ren", action="store_true", help="disallow renaming dotfiles; makes it impossible to turn something into a dotfile")
|
||||
ap2.add_argument("--no-logues", action="store_true", help="disable rendering .prologue/.epilogue.html into directory listings")
|
||||
ap2.add_argument("--no-readme", action="store_true", help="disable rendering readme.md into directory listings")
|
||||
ap2.add_argument("--no-readme", action="store_true", help="disable rendering readme/preadme.md into directory listings")
|
||||
ap2.add_argument("--vague-403", action="store_true", help="send 404 instead of 403 (security through ambiguity, very enterprise)")
|
||||
ap2.add_argument("--force-js", action="store_true", help="don't send folder listings as HTML, force clients to use the embedded json instead -- slight protection against misbehaving search engines which ignore \033[33m--no-robots\033[0m")
|
||||
ap2.add_argument("--no-robots", action="store_true", help="adds http and html headers asking search engines to not index anything (volflag=norobots)")
|
||||
@@ -1208,12 +1326,14 @@ def add_logging(ap):
|
||||
ap2.add_argument("--ansi", action="store_true", help="force colors; overrides environment-variable NO_COLOR")
|
||||
ap2.add_argument("--no-logflush", action="store_true", help="don't flush the logfile after each write; tiny bit faster")
|
||||
ap2.add_argument("--no-voldump", action="store_true", help="do not list volumes and permissions on startup")
|
||||
ap2.add_argument("--log-utc", action="store_true", help="do not use local timezone; assume the TZ env-var is UTC (tiny bit faster)")
|
||||
ap2.add_argument("--log-tdec", metavar="N", type=int, default=3, help="timestamp resolution / number of timestamp decimals")
|
||||
ap2.add_argument("--log-badpwd", metavar="N", type=int, default=1, help="log failed login attempt passwords: 0=terse, 1=plaintext, 2=hashed")
|
||||
ap2.add_argument("--log-conn", action="store_true", help="debug: print tcp-server msgs")
|
||||
ap2.add_argument("--log-htp", action="store_true", help="debug: print http-server threadpool scaling")
|
||||
ap2.add_argument("--ihead", metavar="HEADER", type=u, action='append', help="print request \033[33mHEADER\033[0m; [\033[32m*\033[0m]=all")
|
||||
ap2.add_argument("--lf-url", metavar="RE", type=u, default=r"^/\.cpr/|\?th=[wj]$|/\.(_|ql_|DS_Store$|localized$)", help="dont log URLs matching regex \033[33mRE\033[0m")
|
||||
ap2.add_argument("--ohead", metavar="HEADER", type=u, action='append', help="print response \033[33mHEADER\033[0m; [\033[32m*\033[0m]=all")
|
||||
ap2.add_argument("--lf-url", metavar="RE", type=u, default=r"^/\.cpr/|[?&]th=[wjp]|/\.(_|ql_|DS_Store$|localized$)", help="dont log URLs matching regex \033[33mRE\033[0m")
|
||||
|
||||
|
||||
def add_admin(ap):
|
||||
@@ -1221,9 +1341,16 @@ def add_admin(ap):
|
||||
ap2.add_argument("--no-reload", action="store_true", help="disable ?reload=cfg (reload users/volumes/volflags from config file)")
|
||||
ap2.add_argument("--no-rescan", action="store_true", help="disable ?scan (volume reindexing)")
|
||||
ap2.add_argument("--no-stack", action="store_true", help="disable ?stack (list all stacks)")
|
||||
ap2.add_argument("--no-ups-page", action="store_true", help="disable ?ru (list of recent uploads)")
|
||||
ap2.add_argument("--no-up-list", action="store_true", help="don't show list of incoming files in controlpanel")
|
||||
ap2.add_argument("--dl-list", metavar="LVL", type=int, default=2, help="who can see active downloads in the controlpanel? [\033[32m0\033[0m]=nobody, [\033[32m1\033[0m]=admins, [\033[32m2\033[0m]=everyone")
|
||||
ap2.add_argument("--ups-who", metavar="LVL", type=int, default=2, help="who can see recent uploads on the ?ru page? [\033[32m0\033[0m]=nobody, [\033[32m1\033[0m]=admins, [\033[32m2\033[0m]=everyone (volflag=ups_who)")
|
||||
ap2.add_argument("--ups-when", action="store_true", help="let everyone see upload timestamps on the ?ru page, not just admins")
|
||||
|
||||
|
||||
def add_thumbnail(ap):
|
||||
th_ram = (RAM_AVAIL or RAM_TOTAL or 9) * 0.6
|
||||
th_ram = int(max(min(th_ram, 6), 1) * 10) / 10
|
||||
ap2 = ap.add_argument_group('thumbnail options')
|
||||
ap2.add_argument("--no-thumb", action="store_true", help="disable all thumbnails (volflag=dthumb)")
|
||||
ap2.add_argument("--no-vthumb", action="store_true", help="disable video thumbnails (volflag=dvthumb)")
|
||||
@@ -1231,7 +1358,7 @@ def add_thumbnail(ap):
|
||||
ap2.add_argument("--th-size", metavar="WxH", default="320x256", help="thumbnail res (volflag=thsize)")
|
||||
ap2.add_argument("--th-mt", metavar="CORES", type=int, default=CORES, help="num cpu cores to use for generating thumbnails")
|
||||
ap2.add_argument("--th-convt", metavar="SEC", type=float, default=60.0, help="conversion timeout in seconds (volflag=convt)")
|
||||
ap2.add_argument("--th-ram-max", metavar="GB", type=float, default=6.0, help="max memory usage (GiB) permitted by thumbnailer; not very accurate")
|
||||
ap2.add_argument("--th-ram-max", metavar="GB", type=float, default=th_ram, help="max memory usage (GiB) permitted by thumbnailer; not very accurate")
|
||||
ap2.add_argument("--th-crop", metavar="TXT", type=u, default="y", help="crop thumbnails to 4:3 or keep dynamic height; client can override in UI unless force. [\033[32my\033[0m]=crop, [\033[32mn\033[0m]=nocrop, [\033[32mfy\033[0m]=force-y, [\033[32mfn\033[0m]=force-n (volflag=crop)")
|
||||
ap2.add_argument("--th-x3", metavar="TXT", type=u, default="n", help="show thumbs at 3x resolution; client can override in UI unless force. [\033[32my\033[0m]=yes, [\033[32mn\033[0m]=no, [\033[32mfy\033[0m]=force-yes, [\033[32mfn\033[0m]=force-no (volflag=th3x)")
|
||||
ap2.add_argument("--th-dec", metavar="LIBS", default="vips,pil,ff", help="image decoders, in order of preference")
|
||||
@@ -1246,27 +1373,37 @@ def add_thumbnail(ap):
|
||||
# https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html
|
||||
# https://github.com/libvips/libvips
|
||||
# ffmpeg -hide_banner -demuxers | awk '/^ D /{print$2}' | while IFS= read -r x; do ffmpeg -hide_banner -h demuxer=$x; done | grep -E '^Demuxer |extensions:'
|
||||
ap2.add_argument("--th-r-pil", metavar="T,T", type=u, default="avif,avifs,blp,bmp,dcx,dds,dib,emf,eps,fits,flc,fli,fpx,gif,heic,heics,heif,heifs,icns,ico,im,j2p,j2k,jp2,jpeg,jpg,jpx,pbm,pcx,pgm,png,pnm,ppm,psd,qoi,sgi,spi,tga,tif,tiff,webp,wmf,xbm,xpm", help="image formats to decode using pillow")
|
||||
ap2.add_argument("--th-r-pil", metavar="T,T", type=u, default="avif,avifs,blp,bmp,cbz,dcx,dds,dib,emf,eps,fits,flc,fli,fpx,gif,heic,heics,heif,heifs,icns,ico,im,j2p,j2k,jp2,jpeg,jpg,jpx,pbm,pcx,pgm,png,pnm,ppm,psd,qoi,sgi,spi,tga,tif,tiff,webp,wmf,xbm,xpm", help="image formats to decode using pillow")
|
||||
ap2.add_argument("--th-r-vips", metavar="T,T", type=u, default="avif,exr,fit,fits,fts,gif,hdr,heic,jp2,jpeg,jpg,jpx,jxl,nii,pfm,pgm,png,ppm,svg,tif,tiff,webp", help="image formats to decode using pyvips")
|
||||
ap2.add_argument("--th-r-ffi", metavar="T,T", type=u, default="apng,avif,avifs,bmp,dds,dib,fit,fits,fts,gif,hdr,heic,heics,heif,heifs,icns,ico,jp2,jpeg,jpg,jpx,jxl,pbm,pcx,pfm,pgm,png,pnm,ppm,psd,qoi,sgi,tga,tif,tiff,webp,xbm,xpm", help="image formats to decode using ffmpeg")
|
||||
ap2.add_argument("--th-r-ffi", metavar="T,T", type=u, default="apng,avif,avifs,bmp,cbz,dds,dib,fit,fits,fts,gif,hdr,heic,heics,heif,heifs,icns,ico,jp2,jpeg,jpg,jpx,jxl,pbm,pcx,pfm,pgm,png,pnm,ppm,psd,qoi,sgi,tga,tif,tiff,webp,xbm,xpm", help="image formats to decode using ffmpeg")
|
||||
ap2.add_argument("--th-r-ffv", metavar="T,T", type=u, default="3gp,asf,av1,avc,avi,flv,h264,h265,hevc,m4v,mjpeg,mjpg,mkv,mov,mp4,mpeg,mpeg2,mpegts,mpg,mpg2,mts,nut,ogm,ogv,rm,ts,vob,webm,wmv", help="video formats to decode using ffmpeg")
|
||||
ap2.add_argument("--th-r-ffa", metavar="T,T", type=u, default="aac,ac3,aif,aiff,alac,alaw,amr,apac,ape,au,bonk,dfpwm,dts,flac,gsm,ilbc,it,itgz,itxz,itz,m4a,mdgz,mdxz,mdz,mo3,mod,mp2,mp3,mpc,mptm,mt2,mulaw,ogg,okt,opus,ra,s3m,s3gz,s3xz,s3z,tak,tta,ulaw,wav,wma,wv,xm,xmgz,xmxz,xmz,xpk", help="audio formats to decode using ffmpeg")
|
||||
ap2.add_argument("--au-unpk", metavar="E=F.C", type=u, default="mdz=mod.zip, mdgz=mod.gz, mdxz=mod.xz, s3z=s3m.zip, s3gz=s3m.gz, s3xz=s3m.xz, xmz=xm.zip, xmgz=xm.gz, xmxz=xm.xz, itz=it.zip, itgz=it.gz, itxz=it.xz", help="audio formats to decompress before passing to ffmpeg")
|
||||
ap2.add_argument("--au-unpk", metavar="E=F.C", type=u, default="mdz=mod.zip, mdgz=mod.gz, mdxz=mod.xz, s3z=s3m.zip, s3gz=s3m.gz, s3xz=s3m.xz, xmz=xm.zip, xmgz=xm.gz, xmxz=xm.xz, itz=it.zip, itgz=it.gz, itxz=it.xz, cbz=jpg.cbz", help="audio/image formats to decompress before passing to ffmpeg")
|
||||
|
||||
|
||||
def add_transcoding(ap):
|
||||
ap2 = ap.add_argument_group('transcoding options')
|
||||
ap2.add_argument("--q-opus", metavar="KBPS", type=int, default=128, help="target bitrate for transcoding to opus; set 0 to disable")
|
||||
ap2.add_argument("--q-mp3", metavar="QUALITY", type=u, default="q2", help="target quality for transcoding to mp3, for example [\033[32m192k\033[0m] (CBR) or [\033[32mq0\033[0m] (CQ/CRF, q0=maxquality, q9=smallest); set 0 to disable")
|
||||
ap2.add_argument("--no-caf", action="store_true", help="disable transcoding to caf-opus (affects iOS v12~v17), will use mp3 instead")
|
||||
ap2.add_argument("--no-owa", action="store_true", help="disable transcoding to webm-opus (iOS v18 and later), will use mp3 instead")
|
||||
ap2.add_argument("--no-acode", action="store_true", help="disable audio transcoding")
|
||||
ap2.add_argument("--no-bacode", action="store_true", help="disable batch audio transcoding by folder download (zip/tar)")
|
||||
ap2.add_argument("--ac-maxage", metavar="SEC", type=int, default=86400, help="delete cached transcode output after \033[33mSEC\033[0m seconds")
|
||||
|
||||
|
||||
def add_rss(ap):
|
||||
ap2 = ap.add_argument_group('RSS options')
|
||||
ap2.add_argument("--rss", action="store_true", help="enable RSS output (experimental)")
|
||||
ap2.add_argument("--rss-nf", metavar="HITS", type=int, default=250, help="default number of files to return (url-param 'nf')")
|
||||
ap2.add_argument("--rss-fext", metavar="E,E", type=u, default="", help="default list of file extensions to include (url-param 'fext'); blank=all")
|
||||
ap2.add_argument("--rss-sort", metavar="ORD", type=u, default="m", help="default sort order (url-param 'sort'); [\033[32mm\033[0m]=last-modified [\033[32mu\033[0m]=upload-time [\033[32mn\033[0m]=filename [\033[32ms\033[0m]=filesize; Uppercase=oldest-first. Note that upload-time is 0 for non-uploaded files")
|
||||
|
||||
|
||||
def add_db_general(ap, hcores):
|
||||
noidx = APPLESAN_TXT if MACOS else ""
|
||||
ap2 = ap.add_argument_group('general db options')
|
||||
ap2.add_argument("-e2d", action="store_true", help="enable up2k database, making files searchable + enables upload deduplication")
|
||||
ap2.add_argument("-e2d", action="store_true", help="enable up2k database; this enables file search, upload-undo, improves deduplication")
|
||||
ap2.add_argument("-e2ds", action="store_true", help="scan writable folders for new files on startup; sets \033[33m-e2d\033[0m")
|
||||
ap2.add_argument("-e2dsa", action="store_true", help="scans all folders on startup; sets \033[33m-e2ds\033[0m")
|
||||
ap2.add_argument("-e2v", action="store_true", help="verify file integrity; rehash all files and compare with db")
|
||||
@@ -1275,16 +1412,19 @@ def add_db_general(ap, hcores):
|
||||
ap2.add_argument("--hist", metavar="PATH", type=u, default="", help="where to store volume data (db, thumbs); default is a folder named \".hist\" inside each volume (volflag=hist)")
|
||||
ap2.add_argument("--no-hash", metavar="PTN", type=u, default="", help="regex: disable hashing of matching absolute-filesystem-paths during e2ds folder scans (volflag=nohash)")
|
||||
ap2.add_argument("--no-idx", metavar="PTN", type=u, default=noidx, help="regex: disable indexing of matching absolute-filesystem-paths during e2ds folder scans (volflag=noidx)")
|
||||
ap2.add_argument("--no-dirsz", action="store_true", help="do not show total recursive size of folders in listings, show inode size instead; slightly faster (volflag=nodirsz)")
|
||||
ap2.add_argument("--re-dirsz", action="store_true", help="if the directory-sizes in the UI are bonkers, use this along with \033[33m-e2dsa\033[0m to rebuild the index from scratch")
|
||||
ap2.add_argument("--no-dhash", action="store_true", help="disable rescan acceleration; do full database integrity check -- makes the db ~5%% smaller and bootup/rescans 3~10x slower")
|
||||
ap2.add_argument("--re-dhash", action="store_true", help="force a cache rebuild on startup; enable this once if it gets out of sync (should never be necessary)")
|
||||
ap2.add_argument("--no-forget", action="store_true", help="never forget indexed files, even when deleted from disk -- makes it impossible to ever upload the same file twice -- only useful for offloading uploads to a cloud service or something (volflag=noforget)")
|
||||
ap2.add_argument("--dbd", metavar="PROFILE", default="wal", help="database durability profile; sets the tradeoff between robustness and speed, see \033[33m--help-dbd\033[0m (volflag=dbd)")
|
||||
ap2.add_argument("--xlink", action="store_true", help="on upload: check all volumes for dupes, not just the target volume (volflag=xlink)")
|
||||
ap2.add_argument("--xlink", action="store_true", help="on upload: check all volumes for dupes, not just the target volume (probably buggy, not recommended) (volflag=xlink)")
|
||||
ap2.add_argument("--hash-mt", metavar="CORES", type=int, default=hcores, help="num cpu cores to use for file hashing; set 0 or 1 for single-core hashing")
|
||||
ap2.add_argument("--re-maxage", metavar="SEC", type=int, default=0, help="rescan filesystem for changes every \033[33mSEC\033[0m seconds; 0=off (volflag=scan)")
|
||||
ap2.add_argument("--db-act", metavar="SEC", type=float, default=10.0, help="defer any scheduled volume reindexing until \033[33mSEC\033[0m seconds after last db write (uploads, renames, ...)")
|
||||
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=45, help="search deadline -- terminate searches running for more than \033[33mSEC\033[0m seconds")
|
||||
ap2.add_argument("--srch-hits", metavar="N", type=int, default=7999, help="max search results to allow clients to fetch; 125 results will be shown initially")
|
||||
ap2.add_argument("--srch-excl", metavar="PTN", type=u, default="", help="regex: exclude files from search results if the file-URL matches \033[33mPTN\033[0m (case-sensitive). Example: [\033[32mpassword|logs/[0-9]\033[0m] any URL containing 'password' or 'logs/DIGIT' (volflag=srch_excl)")
|
||||
ap2.add_argument("--dotsrch", action="store_true", help="show dotfiles in search results (volflags: dotsrch | nodotsrch)")
|
||||
|
||||
|
||||
@@ -1336,28 +1476,34 @@ def add_ui(ap, retry):
|
||||
ap2 = ap.add_argument_group('ui options')
|
||||
ap2.add_argument("--grid", action="store_true", help="show grid/thumbnails by default (volflag=grid)")
|
||||
ap2.add_argument("--gsel", action="store_true", help="select files in grid by ctrl-click (volflag=gsel)")
|
||||
ap2.add_argument("--lang", metavar="LANG", type=u, default="eng", help="language; one of the following: \033[32meng nor\033[0m")
|
||||
ap2.add_argument("--lang", metavar="LANG", type=u, default="eng", help="language; one of the following: \033[32meng nor chi\033[0m")
|
||||
ap2.add_argument("--theme", metavar="NUM", type=int, default=0, help="default theme to use (0..7)")
|
||||
ap2.add_argument("--themes", metavar="NUM", type=int, default=8, help="number of themes installed")
|
||||
ap2.add_argument("--au-vol", metavar="0-100", type=int, default=50, choices=range(0, 101), help="default audio/video volume percent")
|
||||
ap2.add_argument("--sort", metavar="C,C,C", type=u, default="href", help="default sort order, comma-separated column IDs (see header tooltips), prefix with '-' for descending. Examples: \033[32mhref -href ext sz ts tags/Album tags/.tn\033[0m (volflag=sort)")
|
||||
ap2.add_argument("--nsort", action="store_true", help="default-enable natural sort of filenames with leading numbers (volflag=nsort)")
|
||||
ap2.add_argument("--hsortn", metavar="N", type=int, default=2, help="number of sorting rules to include in media URLs by default (volflag=hsortn)")
|
||||
ap2.add_argument("--unlist", metavar="REGEX", type=u, default="", help="don't show files matching \033[33mREGEX\033[0m in file list. Purely cosmetic! Does not affect API calls, just the browser. Example: [\033[32m\\.(js|css)$\033[0m] (volflag=unlist)")
|
||||
ap2.add_argument("--favico", metavar="TXT", type=u, default="c 000 none" if retry else "🎉 000 none", help="\033[33mfavicon-text\033[0m [ \033[33mforeground\033[0m [ \033[33mbackground\033[0m ] ], set blank to disable")
|
||||
ap2.add_argument("--mpmc", metavar="URL", type=u, default="", help="change the mediaplayer-toggle mouse cursor; URL to a folder with {2..5}.png inside (or disable with [\033[32m.\033[0m])")
|
||||
ap2.add_argument("--js-browser", metavar="L", type=u, default="", help="URL to additional JS to include")
|
||||
ap2.add_argument("--css-browser", metavar="L", type=u, default="", help="URL to additional CSS to include")
|
||||
ap2.add_argument("--html-head", metavar="TXT", type=u, default="", help="text to append to the <head> of all HTML pages; can be @PATH to send the contents of a file at PATH, and/or begin with %% to render as jinja2 template (volflag=html_head)")
|
||||
ap2.add_argument("--css-browser", metavar="L", type=u, default="", help="URL to additional CSS to include in the filebrowser html")
|
||||
ap2.add_argument("--js-browser", metavar="L", type=u, default="", help="URL to additional JS to include in the filebrowser html")
|
||||
ap2.add_argument("--js-other", metavar="L", type=u, default="", help="URL to additional JS to include in all other pages")
|
||||
ap2.add_argument("--html-head", metavar="TXT", type=u, default="", help="text to append to the <head> of all HTML pages (except for basic-browser); can be @PATH to send the contents of a file at PATH, and/or begin with %% to render as jinja2 template (volflag=html_head)")
|
||||
ap2.add_argument("--ih", action="store_true", help="if a folder contains index.html, show that instead of the directory listing by default (can be changed in the client settings UI, or add ?v to URL for override)")
|
||||
ap2.add_argument("--textfiles", metavar="CSV", type=u, default="txt,nfo,diz,cue,readme", help="file extensions to present as plaintext")
|
||||
ap2.add_argument("--txt-max", metavar="KiB", type=int, default=64, help="max size of embedded textfiles on ?doc= (anything bigger will be lazy-loaded by JS)")
|
||||
ap2.add_argument("--doctitle", metavar="TXT", type=u, default="copyparty @ --name", help="title / service-name to show in html documents")
|
||||
ap2.add_argument("--bname", metavar="TXT", type=u, default="--name", help="server name (displayed in filebrowser document title)")
|
||||
ap2.add_argument("--pb-url", metavar="URL", type=u, default="https://github.com/9001/copyparty", help="powered-by link; disable with \033[33m-np\033[0m")
|
||||
ap2.add_argument("--pb-url", metavar="URL", type=u, default=URL_PRJ, help="powered-by link; disable with \033[33m-np\033[0m")
|
||||
ap2.add_argument("--ver", action="store_true", help="show version on the control panel (incompatible with \033[33m-nb\033[0m)")
|
||||
ap2.add_argument("--k304", metavar="NUM", type=int, default=0, help="configure the option to enable/disable k304 on the controlpanel (workaround for buggy reverse-proxies); [\033[32m0\033[0m] = hidden and default-off, [\033[32m1\033[0m] = visible and default-off, [\033[32m2\033[0m] = visible and default-on")
|
||||
ap2.add_argument("--md-sbf", metavar="FLAGS", type=u, default="downloads forms popups scripts top-navigation-by-user-activation", help="list of capabilities to ALLOW for README.md docs (volflag=md_sbf); see https://developer.mozilla.org/en-US/docs/Web/HTML/Element/iframe#attr-sandbox")
|
||||
ap2.add_argument("--lg-sbf", metavar="FLAGS", type=u, default="downloads forms popups scripts top-navigation-by-user-activation", help="list of capabilities to ALLOW for prologue/epilogue docs (volflag=lg_sbf)")
|
||||
ap2.add_argument("--no-sb-md", action="store_true", help="don't sandbox README.md documents (volflags: no_sb_md | sb_md)")
|
||||
ap2.add_argument("--no304", metavar="NUM", type=int, default=0, help="configure the option to enable/disable no304 on the controlpanel (workaround for buggy caching in browsers); [\033[32m0\033[0m] = hidden and default-off, [\033[32m1\033[0m] = visible and default-off, [\033[32m2\033[0m] = visible and default-on")
|
||||
ap2.add_argument("--md-sbf", metavar="FLAGS", type=u, default="downloads forms popups scripts top-navigation-by-user-activation", help="list of capabilities to allow in the iframe 'sandbox' attribute for README.md docs (volflag=md_sbf); see https://developer.mozilla.org/en-US/docs/Web/HTML/Element/iframe#sandbox")
|
||||
ap2.add_argument("--lg-sbf", metavar="FLAGS", type=u, default="downloads forms popups scripts top-navigation-by-user-activation", help="list of capabilities to allow in the iframe 'sandbox' attribute for prologue/epilogue docs (volflag=lg_sbf)")
|
||||
ap2.add_argument("--md-sba", metavar="TXT", type=u, default="", help="the value of the iframe 'allow' attribute for README.md docs, for example [\033[32mfullscreen\033[0m] (volflag=md_sba)")
|
||||
ap2.add_argument("--lg-sba", metavar="TXT", type=u, default="", help="the value of the iframe 'allow' attribute for prologue/epilogue docs (volflag=lg_sba); see https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Permissions-Policy#iframes")
|
||||
ap2.add_argument("--no-sb-md", action="store_true", help="don't sandbox README/PREADME.md documents (volflags: no_sb_md | sb_md)")
|
||||
ap2.add_argument("--no-sb-lg", action="store_true", help="don't sandbox prologue/epilogue docs (volflags: no_sb_lg | sb_lg); enables non-js support")
|
||||
|
||||
|
||||
@@ -1365,12 +1511,14 @@ def add_debug(ap):
|
||||
ap2 = ap.add_argument_group('debug options')
|
||||
ap2.add_argument("--vc", action="store_true", help="verbose config file parser (explain config)")
|
||||
ap2.add_argument("--cgen", action="store_true", help="generate config file from current config (best-effort; probably buggy)")
|
||||
ap2.add_argument("--deps", action="store_true", help="list information about detected optional dependencies")
|
||||
if hasattr(select, "poll"):
|
||||
ap2.add_argument("--no-poll", action="store_true", help="kernel-bug workaround: disable poll; use select instead (limits max num clients to ~700)")
|
||||
ap2.add_argument("--no-sendfile", action="store_true", help="kernel-bug workaround: disable sendfile; do a safe and slow read-send-loop instead")
|
||||
ap2.add_argument("--no-scandir", action="store_true", help="kernel-bug workaround: disable scandir; do a listdir + stat on each file instead")
|
||||
ap2.add_argument("--no-fastboot", action="store_true", help="wait for initial filesystem indexing before accepting client requests")
|
||||
ap2.add_argument("--no-htp", action="store_true", help="disable httpserver threadpool, create threads as-needed instead")
|
||||
ap2.add_argument("--rm-sck", action="store_true", help="when listening on unix-sockets, do a basic delete+bind instead of the default atomic bind")
|
||||
ap2.add_argument("--srch-dbg", action="store_true", help="explain search processing, and do some extra expensive sanity checks")
|
||||
ap2.add_argument("--rclone-mdns", action="store_true", help="use mdns-domain instead of server-ip on /?hc")
|
||||
ap2.add_argument("--stackmon", metavar="P,S", type=u, default="", help="write stacktrace to \033[33mP\033[0math every \033[33mS\033[0m second, for example --stackmon=\033[32m./st/%%Y-%%m/%%d/%%H%%M.xz,60")
|
||||
@@ -1379,16 +1527,19 @@ def add_debug(ap):
|
||||
ap2.add_argument("--bak-flips", action="store_true", help="[up2k] if a client uploads a bitflipped/corrupted chunk, store a copy according to \033[33m--bf-nc\033[0m and \033[33m--bf-dir\033[0m")
|
||||
ap2.add_argument("--bf-nc", metavar="NUM", type=int, default=200, help="bak-flips: stop if there's more than \033[33mNUM\033[0m files at \033[33m--kf-dir\033[0m already; default: 6.3 GiB max (200*32M)")
|
||||
ap2.add_argument("--bf-dir", metavar="PATH", type=u, default="bf", help="bak-flips: store corrupted chunks at \033[33mPATH\033[0m; default: folder named 'bf' wherever copyparty was started")
|
||||
ap2.add_argument("--bf-log", metavar="PATH", type=u, default="", help="bak-flips: log corruption info to a textfile at \033[33mPATH\033[0m")
|
||||
ap2.add_argument("--no-cfg-cmt-warn", action="store_true", help=argparse.SUPPRESS)
|
||||
|
||||
|
||||
# fmt: on
|
||||
|
||||
|
||||
def run_argparse(
|
||||
argv: list[str], formatter: Any, retry: bool, nc: int
|
||||
argv: list[str], formatter: Any, retry: bool, nc: int, verbose=True
|
||||
) -> argparse.Namespace:
|
||||
ap = argparse.ArgumentParser(
|
||||
formatter_class=formatter,
|
||||
usage=argparse.SUPPRESS,
|
||||
prog="copyparty",
|
||||
description="http file sharing hub v{} ({})".format(S_VERSION, S_BUILD_DT),
|
||||
)
|
||||
@@ -1406,23 +1557,26 @@ def run_argparse(
|
||||
|
||||
tty = os.environ.get("TERM", "").lower() == "linux"
|
||||
|
||||
srvname = get_srvname()
|
||||
srvname = get_srvname(verbose)
|
||||
|
||||
add_general(ap, nc, srvname)
|
||||
add_network(ap)
|
||||
add_tls(ap, cert_path)
|
||||
add_cert(ap, cert_path)
|
||||
add_auth(ap)
|
||||
add_chpw(ap)
|
||||
add_qr(ap, tty)
|
||||
add_zeroconf(ap)
|
||||
add_zc_mdns(ap)
|
||||
add_zc_ssdp(ap)
|
||||
add_fs(ap)
|
||||
add_share(ap)
|
||||
add_upload(ap)
|
||||
add_db_general(ap, hcores)
|
||||
add_db_metadata(ap)
|
||||
add_thumbnail(ap)
|
||||
add_transcoding(ap)
|
||||
add_rss(ap)
|
||||
add_ftp(ap)
|
||||
add_webdav(ap)
|
||||
add_tftp(ap)
|
||||
@@ -1471,16 +1625,13 @@ def run_argparse(
|
||||
return ret
|
||||
|
||||
|
||||
def main(argv: Optional[list[str]] = None, rsrc: Optional[str] = None) -> None:
|
||||
def main(argv: Optional[list[str]] = None) -> None:
|
||||
time.strptime("19970815", "%Y%m%d") # python#7980
|
||||
if WINDOWS:
|
||||
os.system("rem") # enables colors
|
||||
|
||||
init_E(E)
|
||||
|
||||
if rsrc: # pyz
|
||||
E.mod = rsrc
|
||||
|
||||
if argv is None:
|
||||
argv = sys.argv
|
||||
|
||||
@@ -1537,6 +1688,7 @@ def main(argv: Optional[list[str]] = None, rsrc: Optional[str] = None) -> None:
|
||||
("--hdr-au-usr", "--idp-h-usr"),
|
||||
("--idp-h-sep", "--idp-gsep"),
|
||||
("--th-no-crop", "--th-crop=n"),
|
||||
("--never-symlink", "--hardlink-only"),
|
||||
]
|
||||
for dk, nk in deprecated:
|
||||
idx = -1
|
||||
@@ -1561,7 +1713,7 @@ def main(argv: Optional[list[str]] = None, rsrc: Optional[str] = None) -> None:
|
||||
argv.extend(["--qr"])
|
||||
if ANYWIN or not os.geteuid():
|
||||
# win10 allows symlinks if admin; can be unexpected
|
||||
argv.extend(["-p80,443,3923", "--ign-ebind", "--no-dedup"])
|
||||
argv.extend(["-p80,443,3923", "--ign-ebind"])
|
||||
except:
|
||||
pass
|
||||
|
||||
@@ -1583,7 +1735,7 @@ def main(argv: Optional[list[str]] = None, rsrc: Optional[str] = None) -> None:
|
||||
for fmtr in [RiceFormatter, RiceFormatter, Dodge11874, BasicDodge11874]:
|
||||
try:
|
||||
al = run_argparse(argv, fmtr, retry, nc)
|
||||
dal = run_argparse([], fmtr, retry, nc)
|
||||
dal = run_argparse([], fmtr, retry, nc, False)
|
||||
break
|
||||
except SystemExit:
|
||||
raise
|
||||
@@ -1609,7 +1761,7 @@ def main(argv: Optional[list[str]] = None, rsrc: Optional[str] = None) -> None:
|
||||
except:
|
||||
lprint("\nfailed to disable quick-edit-mode:\n" + min_ex() + "\n")
|
||||
|
||||
if al.ansi:
|
||||
if not al.ansi:
|
||||
al.wintitle = ""
|
||||
|
||||
# propagate implications
|
||||
@@ -1647,6 +1799,9 @@ def main(argv: Optional[list[str]] = None, rsrc: Optional[str] = None) -> None:
|
||||
if al.ihead:
|
||||
al.ihead = [x.lower() for x in al.ihead]
|
||||
|
||||
if al.ohead:
|
||||
al.ohead = [x.lower() for x in al.ohead]
|
||||
|
||||
if HAVE_SSL:
|
||||
if al.ssl_ver:
|
||||
configure_ssl_ver(al)
|
||||
@@ -1667,7 +1822,7 @@ def main(argv: Optional[list[str]] = None, rsrc: Optional[str] = None) -> None:
|
||||
print("error: python2 cannot --smb")
|
||||
return
|
||||
|
||||
if sys.version_info < (3, 6):
|
||||
if not PY36:
|
||||
al.no_scandir = True
|
||||
|
||||
if not hasattr(os, "sendfile"):
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
# coding: utf-8
|
||||
|
||||
VERSION = (1, 13, 4)
|
||||
CODENAME = "race the beam"
|
||||
BUILD_DT = (2024, 7, 16)
|
||||
VERSION = (1, 16, 11)
|
||||
CODENAME = "COPYparty"
|
||||
BUILD_DT = (2025, 1, 27)
|
||||
|
||||
S_VERSION = ".".join(map(str, VERSION))
|
||||
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -9,14 +9,14 @@ import queue
|
||||
|
||||
from .__init__ import CORES, TYPE_CHECKING
|
||||
from .broker_mpw import MpWorker
|
||||
from .broker_util import ExceptionalQueue, try_exec
|
||||
from .broker_util import ExceptionalQueue, NotExQueue, try_exec
|
||||
from .util import Daemon, mp
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .svchub import SvcHub
|
||||
|
||||
if True: # pylint: disable=using-constant-test
|
||||
from typing import Any
|
||||
from typing import Any, Union
|
||||
|
||||
|
||||
class MProcess(mp.Process):
|
||||
@@ -43,6 +43,9 @@ class BrokerMp(object):
|
||||
self.procs = []
|
||||
self.mutex = threading.Lock()
|
||||
|
||||
self.retpend: dict[int, Any] = {}
|
||||
self.retpend_mutex = threading.Lock()
|
||||
|
||||
self.num_workers = self.args.j or CORES
|
||||
self.log("broker", "booting {} subprocesses".format(self.num_workers))
|
||||
for n in range(1, self.num_workers + 1):
|
||||
@@ -54,6 +57,8 @@ class BrokerMp(object):
|
||||
self.procs.append(proc)
|
||||
proc.start()
|
||||
|
||||
Daemon(self.periodic, "mp-periodic")
|
||||
|
||||
def shutdown(self) -> None:
|
||||
self.log("broker", "shutting down")
|
||||
for n, proc in enumerate(self.procs):
|
||||
@@ -76,6 +81,10 @@ class BrokerMp(object):
|
||||
for _, proc in enumerate(self.procs):
|
||||
proc.q_pend.put((0, "reload", []))
|
||||
|
||||
def reload_sessions(self) -> None:
|
||||
for _, proc in enumerate(self.procs):
|
||||
proc.q_pend.put((0, "reload_sessions", []))
|
||||
|
||||
def collector(self, proc: MProcess) -> None:
|
||||
"""receive message from hub in other process"""
|
||||
while True:
|
||||
@@ -86,8 +95,10 @@ class BrokerMp(object):
|
||||
self.log(*args)
|
||||
|
||||
elif dest == "retq":
|
||||
# response from previous ipc call
|
||||
raise Exception("invalid broker_mp usage")
|
||||
with self.retpend_mutex:
|
||||
retq = self.retpend.pop(retq_id)
|
||||
|
||||
retq.put(args[0])
|
||||
|
||||
else:
|
||||
# new ipc invoking managed service in hub
|
||||
@@ -104,8 +115,7 @@ class BrokerMp(object):
|
||||
if retq_id:
|
||||
proc.q_pend.put((retq_id, "retq", rv))
|
||||
|
||||
def ask(self, dest: str, *args: Any) -> ExceptionalQueue:
|
||||
|
||||
def ask(self, dest: str, *args: Any) -> Union[ExceptionalQueue, NotExQueue]:
|
||||
# new non-ipc invoking managed service in hub
|
||||
obj = self.hub
|
||||
for node in dest.split("."):
|
||||
@@ -117,17 +127,30 @@ class BrokerMp(object):
|
||||
retq.put(rv)
|
||||
return retq
|
||||
|
||||
def wask(self, dest: str, *args: Any) -> list[Union[ExceptionalQueue, NotExQueue]]:
|
||||
# call from hub to workers
|
||||
ret = []
|
||||
for p in self.procs:
|
||||
retq = ExceptionalQueue(1)
|
||||
retq_id = id(retq)
|
||||
with self.retpend_mutex:
|
||||
self.retpend[retq_id] = retq
|
||||
|
||||
p.q_pend.put((retq_id, dest, list(args)))
|
||||
ret.append(retq)
|
||||
return ret
|
||||
|
||||
def say(self, dest: str, *args: Any) -> None:
|
||||
"""
|
||||
send message to non-hub component in other process,
|
||||
returns a Queue object which eventually contains the response if want_retval
|
||||
(not-impl here since nothing uses it yet)
|
||||
"""
|
||||
if dest == "listen":
|
||||
if dest == "httpsrv.listen":
|
||||
for p in self.procs:
|
||||
p.q_pend.put((0, dest, [args[0], len(self.procs)]))
|
||||
|
||||
elif dest == "set_netdevs":
|
||||
elif dest == "httpsrv.set_netdevs":
|
||||
for p in self.procs:
|
||||
p.q_pend.put((0, dest, list(args)))
|
||||
|
||||
@@ -136,3 +159,19 @@ class BrokerMp(object):
|
||||
|
||||
else:
|
||||
raise Exception("what is " + str(dest))
|
||||
|
||||
def periodic(self) -> None:
|
||||
while True:
|
||||
time.sleep(1)
|
||||
|
||||
tdli = {}
|
||||
tdls = {}
|
||||
qs = self.wask("httpsrv.read_dls")
|
||||
for q in qs:
|
||||
qr = q.get()
|
||||
dli, dls = qr
|
||||
tdli.update(dli)
|
||||
tdls.update(dls)
|
||||
tdl = (tdli, tdls)
|
||||
for p in self.procs:
|
||||
p.q_pend.put((0, "httpsrv.write_dls", tdl))
|
||||
|
||||
@@ -11,7 +11,7 @@ import queue
|
||||
|
||||
from .__init__ import ANYWIN
|
||||
from .authsrv import AuthSrv
|
||||
from .broker_util import BrokerCli, ExceptionalQueue
|
||||
from .broker_util import BrokerCli, ExceptionalQueue, NotExQueue
|
||||
from .httpsrv import HttpSrv
|
||||
from .util import FAKE_MP, Daemon, HMaccas
|
||||
|
||||
@@ -82,35 +82,40 @@ class MpWorker(BrokerCli):
|
||||
while True:
|
||||
retq_id, dest, args = self.q_pend.get()
|
||||
|
||||
# self.logw("work: [{}]".format(d[0]))
|
||||
if dest == "retq":
|
||||
# response from previous ipc call
|
||||
with self.retpend_mutex:
|
||||
retq = self.retpend.pop(retq_id)
|
||||
|
||||
retq.put(args)
|
||||
continue
|
||||
|
||||
if dest == "shutdown":
|
||||
self.httpsrv.shutdown()
|
||||
self.logw("ok bye")
|
||||
sys.exit(0)
|
||||
return
|
||||
|
||||
elif dest == "reload":
|
||||
if dest == "reload":
|
||||
self.logw("mpw.asrv reloading")
|
||||
self.asrv.reload()
|
||||
self.logw("mpw.asrv reloaded")
|
||||
continue
|
||||
|
||||
elif dest == "listen":
|
||||
self.httpsrv.listen(args[0], args[1])
|
||||
if dest == "reload_sessions":
|
||||
with self.asrv.mutex:
|
||||
self.asrv.load_sessions()
|
||||
continue
|
||||
|
||||
elif dest == "set_netdevs":
|
||||
self.httpsrv.set_netdevs(args[0])
|
||||
obj = self
|
||||
for node in dest.split("."):
|
||||
obj = getattr(obj, node)
|
||||
|
||||
elif dest == "retq":
|
||||
# response from previous ipc call
|
||||
with self.retpend_mutex:
|
||||
retq = self.retpend.pop(retq_id)
|
||||
rv = obj(*args) # type: ignore
|
||||
if retq_id:
|
||||
self.say("retq", rv, retq_id=retq_id)
|
||||
|
||||
retq.put(args)
|
||||
|
||||
else:
|
||||
raise Exception("what is " + str(dest))
|
||||
|
||||
def ask(self, dest: str, *args: Any) -> ExceptionalQueue:
|
||||
def ask(self, dest: str, *args: Any) -> Union[ExceptionalQueue, NotExQueue]:
|
||||
retq = ExceptionalQueue(1)
|
||||
retq_id = id(retq)
|
||||
with self.retpend_mutex:
|
||||
@@ -119,5 +124,5 @@ class MpWorker(BrokerCli):
|
||||
self.q_yield.put((retq_id, dest, list(args)))
|
||||
return retq
|
||||
|
||||
def say(self, dest: str, *args: Any) -> None:
|
||||
self.q_yield.put((0, dest, list(args)))
|
||||
def say(self, dest: str, *args: Any, retq_id=0) -> None:
|
||||
self.q_yield.put((retq_id, dest, list(args)))
|
||||
|
||||
@@ -5,7 +5,7 @@ import os
|
||||
import threading
|
||||
|
||||
from .__init__ import TYPE_CHECKING
|
||||
from .broker_util import BrokerCli, ExceptionalQueue, try_exec
|
||||
from .broker_util import BrokerCli, ExceptionalQueue, NotExQueue
|
||||
from .httpsrv import HttpSrv
|
||||
from .util import HMaccas
|
||||
|
||||
@@ -13,7 +13,7 @@ if TYPE_CHECKING:
|
||||
from .svchub import SvcHub
|
||||
|
||||
if True: # pylint: disable=using-constant-test
|
||||
from typing import Any
|
||||
from typing import Any, Union
|
||||
|
||||
|
||||
class BrokerThr(BrokerCli):
|
||||
@@ -34,6 +34,7 @@ class BrokerThr(BrokerCli):
|
||||
self.iphash = HMaccas(os.path.join(self.args.E.cfg, "iphash"), 8)
|
||||
self.httpsrv = HttpSrv(self, None)
|
||||
self.reload = self.noop
|
||||
self.reload_sessions = self.noop
|
||||
|
||||
def shutdown(self) -> None:
|
||||
# self.log("broker", "shutting down")
|
||||
@@ -42,26 +43,21 @@ class BrokerThr(BrokerCli):
|
||||
def noop(self) -> None:
|
||||
pass
|
||||
|
||||
def ask(self, dest: str, *args: Any) -> ExceptionalQueue:
|
||||
def ask(self, dest: str, *args: Any) -> Union[ExceptionalQueue, NotExQueue]:
|
||||
|
||||
# new ipc invoking managed service in hub
|
||||
obj = self.hub
|
||||
for node in dest.split("."):
|
||||
obj = getattr(obj, node)
|
||||
|
||||
rv = try_exec(True, obj, *args)
|
||||
|
||||
# pretend we're broker_mp
|
||||
retq = ExceptionalQueue(1)
|
||||
retq.put(rv)
|
||||
return retq
|
||||
return NotExQueue(obj(*args)) # type: ignore
|
||||
|
||||
def say(self, dest: str, *args: Any) -> None:
|
||||
if dest == "listen":
|
||||
if dest == "httpsrv.listen":
|
||||
self.httpsrv.listen(args[0], 1)
|
||||
return
|
||||
|
||||
if dest == "set_netdevs":
|
||||
if dest == "httpsrv.set_netdevs":
|
||||
self.httpsrv.set_netdevs(args[0])
|
||||
return
|
||||
|
||||
@@ -70,4 +66,4 @@ class BrokerThr(BrokerCli):
|
||||
for node in dest.split("."):
|
||||
obj = getattr(obj, node)
|
||||
|
||||
try_exec(False, obj, *args)
|
||||
obj(*args) # type: ignore
|
||||
|
||||
@@ -28,11 +28,23 @@ class ExceptionalQueue(Queue, object):
|
||||
if rv[1] == "pebkac":
|
||||
raise Pebkac(*rv[2:])
|
||||
else:
|
||||
raise Exception(rv[2])
|
||||
raise rv[2]
|
||||
|
||||
return rv
|
||||
|
||||
|
||||
class NotExQueue(object):
|
||||
"""
|
||||
BrokerThr uses this instead of ExceptionalQueue; 7x faster
|
||||
"""
|
||||
|
||||
def __init__(self, rv: Any) -> None:
|
||||
self.rv = rv
|
||||
|
||||
def get(self) -> Any:
|
||||
return self.rv
|
||||
|
||||
|
||||
class BrokerCli(object):
|
||||
"""
|
||||
helps mypy understand httpsrv.broker but still fails a few levels deeper,
|
||||
@@ -48,7 +60,7 @@ class BrokerCli(object):
|
||||
def __init__(self) -> None:
|
||||
pass
|
||||
|
||||
def ask(self, dest: str, *args: Any) -> ExceptionalQueue:
|
||||
def ask(self, dest: str, *args: Any) -> Union[ExceptionalQueue, NotExQueue]:
|
||||
return ExceptionalQueue(1)
|
||||
|
||||
def say(self, dest: str, *args: Any) -> None:
|
||||
@@ -65,8 +77,8 @@ def try_exec(want_retval: Union[bool, int], func: Any, *args: list[Any]) -> Any:
|
||||
|
||||
return ["exception", "pebkac", ex.code, str(ex)]
|
||||
|
||||
except:
|
||||
except Exception as ex:
|
||||
if not want_retval:
|
||||
raise
|
||||
|
||||
return ["exception", "stack", traceback.format_exc()]
|
||||
return ["exception", "stack", ex]
|
||||
|
||||
@@ -7,9 +7,9 @@ import shutil
|
||||
import time
|
||||
|
||||
from .__init__ import ANYWIN
|
||||
from .util import Netdev, runcmd, wrename, wunlink
|
||||
from .util import Netdev, load_resource, runcmd, wrename, wunlink
|
||||
|
||||
HAVE_CFSSL = True
|
||||
HAVE_CFSSL = not os.environ.get("PRTY_NO_CFSSL")
|
||||
|
||||
if True: # pylint: disable=using-constant-test
|
||||
from .util import NamedLogger, RootLogger
|
||||
@@ -29,13 +29,15 @@ def ensure_cert(log: "RootLogger", args) -> None:
|
||||
|
||||
i feel awful about this and so should they
|
||||
"""
|
||||
cert_insec = os.path.join(args.E.mod, "res/insecure.pem")
|
||||
with load_resource(args.E, "res/insecure.pem") as f:
|
||||
cert_insec = f.read()
|
||||
cert_appdata = os.path.join(args.E.cfg, "cert.pem")
|
||||
if not os.path.isfile(args.cert):
|
||||
if cert_appdata != args.cert:
|
||||
raise Exception("certificate file does not exist: " + args.cert)
|
||||
|
||||
shutil.copy(cert_insec, args.cert)
|
||||
with open(args.cert, "wb") as f:
|
||||
f.write(cert_insec)
|
||||
|
||||
with open(args.cert, "rb") as f:
|
||||
buf = f.read()
|
||||
@@ -50,7 +52,9 @@ def ensure_cert(log: "RootLogger", args) -> None:
|
||||
raise Exception(m + "private key must appear before server certificate")
|
||||
|
||||
try:
|
||||
if filecmp.cmp(args.cert, cert_insec):
|
||||
with open(args.cert, "rb") as f:
|
||||
active_cert = f.read()
|
||||
if active_cert == cert_insec:
|
||||
t = "using default TLS certificate; https will be insecure:\033[36m {}"
|
||||
log("cert", t.format(args.cert), 3)
|
||||
except:
|
||||
@@ -151,14 +155,22 @@ def _gen_srv(log: "RootLogger", args, netdevs: dict[str, Netdev]):
|
||||
raise Exception("no useable cert found")
|
||||
|
||||
expired = time.time() + args.crt_sdays * 60 * 60 * 24 * 0.5 > expiry
|
||||
cert_insec = os.path.join(args.E.mod, "res/insecure.pem")
|
||||
if expired:
|
||||
raise Exception("old server-cert has expired")
|
||||
|
||||
for n in names:
|
||||
if n not in inf["sans"]:
|
||||
raise Exception("does not have {}".format(n))
|
||||
if expired:
|
||||
raise Exception("old server-cert has expired")
|
||||
if not filecmp.cmp(args.cert, cert_insec):
|
||||
|
||||
with load_resource(args.E, "res/insecure.pem") as f:
|
||||
cert_insec = f.read()
|
||||
|
||||
with open(args.cert, "rb") as f:
|
||||
active_cert = f.read()
|
||||
|
||||
if active_cert and active_cert != cert_insec:
|
||||
return
|
||||
|
||||
except Exception as ex:
|
||||
log("cert", "will create new server-cert; {}".format(ex))
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
# awk -F\" '/add_argument\("-[^-]/{print(substr($2,2))}' copyparty/__main__.py | sort | tr '\n' ' '
|
||||
zs = "a c e2d e2ds e2dsa e2t e2ts e2tsr e2v e2vp e2vu ed emp i j lo mcr mte mth mtm mtp nb nc nid nih nw p q s ss sss v z zv"
|
||||
zs = "a c e2d e2ds e2dsa e2t e2ts e2tsr e2v e2vp e2vu ed emp i j lo mcr mte mth mtm mtp nb nc nid nih nth nw p q s ss sss v z zv"
|
||||
onedash = set(zs.split())
|
||||
|
||||
|
||||
@@ -12,8 +12,9 @@ def vf_bmap() -> dict[str, str]:
|
||||
"dav_auth": "davauth",
|
||||
"dav_rt": "davrt",
|
||||
"ed": "dots",
|
||||
"never_symlink": "neversymlink",
|
||||
"no_dedup": "copydupes",
|
||||
"hardlink_only": "hardlinkonly",
|
||||
"no_clone": "noclone",
|
||||
"no_dirsz": "nodirsz",
|
||||
"no_dupe": "nodupe",
|
||||
"no_forget": "noforget",
|
||||
"no_pipe": "nopipe",
|
||||
@@ -23,6 +24,7 @@ def vf_bmap() -> dict[str, str]:
|
||||
"no_athumb": "dathumb",
|
||||
}
|
||||
for k in (
|
||||
"dedup",
|
||||
"dotsrch",
|
||||
"e2d",
|
||||
"e2ds",
|
||||
@@ -40,10 +42,12 @@ def vf_bmap() -> dict[str, str]:
|
||||
"magic",
|
||||
"no_sb_md",
|
||||
"no_sb_lg",
|
||||
"nsort",
|
||||
"og",
|
||||
"og_no_head",
|
||||
"og_s_title",
|
||||
"rand",
|
||||
"rss",
|
||||
"xdev",
|
||||
"xlink",
|
||||
"xvol",
|
||||
@@ -58,6 +62,7 @@ def vf_vmap() -> dict[str, str]:
|
||||
"no_hash": "nohash",
|
||||
"no_idx": "noidx",
|
||||
"re_maxage": "scan",
|
||||
"safe_dedup": "safededup",
|
||||
"th_convt": "convt",
|
||||
"th_size": "thsize",
|
||||
"th_crop": "crop",
|
||||
@@ -65,9 +70,12 @@ def vf_vmap() -> dict[str, str]:
|
||||
}
|
||||
for k in (
|
||||
"dbd",
|
||||
"hsortn",
|
||||
"html_head",
|
||||
"lg_sbf",
|
||||
"md_sbf",
|
||||
"lg_sba",
|
||||
"md_sba",
|
||||
"nrand",
|
||||
"og_desc",
|
||||
"og_site",
|
||||
@@ -85,6 +93,7 @@ def vf_vmap() -> dict[str, str]:
|
||||
"unlist",
|
||||
"u2abort",
|
||||
"u2ts",
|
||||
"ups_who",
|
||||
):
|
||||
ret[k] = k
|
||||
return ret
|
||||
@@ -99,10 +108,12 @@ def vf_cmap() -> dict[str, str]:
|
||||
"mte",
|
||||
"mth",
|
||||
"mtp",
|
||||
"xac",
|
||||
"xad",
|
||||
"xar",
|
||||
"xau",
|
||||
"xban",
|
||||
"xbc",
|
||||
"xbd",
|
||||
"xbr",
|
||||
"xbu",
|
||||
@@ -129,11 +140,14 @@ permdescs = {
|
||||
|
||||
flagcats = {
|
||||
"uploads, general": {
|
||||
"nodupe": "rejects existing files (instead of symlinking them)",
|
||||
"hardlink": "does dedup with hardlinks instead of symlinks",
|
||||
"neversymlink": "disables symlink fallback; full copy instead",
|
||||
"copydupes": "disables dedup, always saves full copies of dupes",
|
||||
"dedup": "enable symlink-based file deduplication",
|
||||
"hardlink": "enable hardlink-based file deduplication,\nwith fallback on symlinks when that is impossible",
|
||||
"hardlinkonly": "dedup with hardlink only, never symlink;\nmake a full copy if hardlink is impossible",
|
||||
"safededup": "verify on-disk data before using it for dedup",
|
||||
"noclone": "take dupe data from clients, even if available on HDD",
|
||||
"nodupe": "rejects existing files (instead of linking/cloning them)",
|
||||
"sparse": "force use of sparse files, mainly for s3-backed storage",
|
||||
"nosparse": "deny use of sparse files, mainly for slow storage",
|
||||
"daw": "enable full WebDAV write support (dangerous);\nPUT-operations will now \033[1;31mOVERWRITE\033[0;35m existing files",
|
||||
"nosub": "forces all uploads into the top folder of the vfs",
|
||||
"magic": "enables filetype detection for nameless uploads",
|
||||
@@ -159,7 +173,7 @@ flagcats = {
|
||||
"lifetime=3600": "uploads are deleted after 1 hour",
|
||||
},
|
||||
"database, general": {
|
||||
"e2d": "enable database; makes files searchable + enables upload dedup",
|
||||
"e2d": "enable database; makes files searchable + enables upload-undo",
|
||||
"e2ds": "scan writable folders for new files on startup; also sets -e2d",
|
||||
"e2dsa": "scans all folders for new files on startup; also sets -e2d",
|
||||
"e2t": "enable multimedia indexing; makes it possible to search for tags",
|
||||
@@ -177,11 +191,12 @@ flagcats = {
|
||||
"noforget": "don't forget files when deleted from disk",
|
||||
"fat32": "avoid excessive reindexing on android sdcardfs",
|
||||
"dbd=[acid|swal|wal|yolo]": "database speed-durability tradeoff",
|
||||
"xlink": "cross-volume dupe detection / linking",
|
||||
"xlink": "cross-volume dupe detection / linking (dangerous)",
|
||||
"xdev": "do not descend into other filesystems",
|
||||
"xvol": "do not follow symlinks leaving the volume root",
|
||||
"dotsrch": "show dotfiles in search results",
|
||||
"nodotsrch": "hide dotfiles in search results (default)",
|
||||
"srch_excl": "exclude search results with URL matching this regex",
|
||||
},
|
||||
'database, audio tags\n"mte", "mth", "mtp", "mtm" all work the same as -mte, -mth, ...': {
|
||||
"mtp=.bpm=f,audio-bpm.py": 'uses the "audio-bpm.py" program to\ngenerate ".bpm" tags from uploads (f = overwrite tags)',
|
||||
@@ -206,6 +221,8 @@ flagcats = {
|
||||
"xbu=CMD": "execute CMD before a file upload starts",
|
||||
"xau=CMD": "execute CMD after a file upload finishes",
|
||||
"xiu=CMD": "execute CMD after all uploads finish and volume is idle",
|
||||
"xbc=CMD": "execute CMD before a file copy",
|
||||
"xac=CMD": "execute CMD after a file copy",
|
||||
"xbr=CMD": "execute CMD before a file rename/move",
|
||||
"xar=CMD": "execute CMD after a file rename/move",
|
||||
"xbd=CMD": "execute CMD before a file delete",
|
||||
@@ -227,6 +244,8 @@ flagcats = {
|
||||
"sb_lg": "enable js sandbox for prologue/epilogue (default)",
|
||||
"md_sbf": "list of markdown-sandbox safeguards to disable",
|
||||
"lg_sbf": "list of *logue-sandbox safeguards to disable",
|
||||
"md_sba": "value of iframe allow-prop for markdown-sandbox",
|
||||
"lg_sba": "value of iframe allow-prop for *logue-sandbox",
|
||||
"nohtml": "return html and markdown as text/html",
|
||||
},
|
||||
"others": {
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import importlib
|
||||
import sys
|
||||
import xml.etree.ElementTree as ET
|
||||
@@ -8,6 +11,10 @@ if True: # pylint: disable=using-constant-test
|
||||
from typing import Any, Optional
|
||||
|
||||
|
||||
class BadXML(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def get_ET() -> ET.XMLParser:
|
||||
pn = "xml.etree.ElementTree"
|
||||
cn = "_elementtree"
|
||||
@@ -34,7 +41,7 @@ def get_ET() -> ET.XMLParser:
|
||||
XMLParser: ET.XMLParser = get_ET()
|
||||
|
||||
|
||||
class DXMLParser(XMLParser): # type: ignore
|
||||
class _DXMLParser(XMLParser): # type: ignore
|
||||
def __init__(self) -> None:
|
||||
tb = ET.TreeBuilder()
|
||||
super(DXMLParser, self).__init__(target=tb)
|
||||
@@ -49,8 +56,12 @@ class DXMLParser(XMLParser): # type: ignore
|
||||
raise BadXML("{}, {}".format(a, ka))
|
||||
|
||||
|
||||
class BadXML(Exception):
|
||||
pass
|
||||
class _NG(XMLParser): # type: ignore
|
||||
def __int__(self) -> None:
|
||||
raise BadXML("dxml selftest failed")
|
||||
|
||||
|
||||
DXMLParser = _DXMLParser
|
||||
|
||||
|
||||
def parse_xml(txt: str) -> ET.Element:
|
||||
@@ -59,6 +70,40 @@ def parse_xml(txt: str) -> ET.Element:
|
||||
return parser.close() # type: ignore
|
||||
|
||||
|
||||
def selftest() -> bool:
|
||||
qbe = r"""<!DOCTYPE d [
|
||||
<!ENTITY a "nice_bakuretsu">
|
||||
]>
|
||||
<root>&a;&a;&a;</root>"""
|
||||
|
||||
emb = r"""<!DOCTYPE d [
|
||||
<!ENTITY a SYSTEM "file:///etc/hostname">
|
||||
]>
|
||||
<root>&a;</root>"""
|
||||
|
||||
# future-proofing; there's never been any known vulns
|
||||
# regarding DTDs and ET.XMLParser, but might as well
|
||||
# block them since webdav-clients don't use them
|
||||
dtd = r"""<!DOCTYPE d SYSTEM "a.dtd">
|
||||
<root>a</root>"""
|
||||
|
||||
for txt in (qbe, emb, dtd):
|
||||
try:
|
||||
parse_xml(txt)
|
||||
t = "WARNING: dxml selftest failed:\n%s\n"
|
||||
print(t % (txt,), file=sys.stderr)
|
||||
return False
|
||||
except BadXML:
|
||||
pass
|
||||
|
||||
return True
|
||||
|
||||
|
||||
DXML_OK = selftest()
|
||||
if not DXML_OK:
|
||||
DXMLParser = _NG
|
||||
|
||||
|
||||
def mktnod(name: str, text: str) -> ET.Element:
|
||||
el = ET.Element(name)
|
||||
el.text = text
|
||||
|
||||
@@ -9,12 +9,12 @@ import time
|
||||
from .__init__ import ANYWIN, MACOS
|
||||
from .authsrv import AXS, VFS
|
||||
from .bos import bos
|
||||
from .util import chkcmd, min_ex
|
||||
from .util import chkcmd, min_ex, undot
|
||||
|
||||
if True: # pylint: disable=using-constant-test
|
||||
from typing import Optional, Union
|
||||
|
||||
from .util import RootLogger
|
||||
from .util import RootLogger, undot
|
||||
|
||||
|
||||
class Fstab(object):
|
||||
@@ -42,17 +42,17 @@ class Fstab(object):
|
||||
self.cache = {}
|
||||
|
||||
fs = "ext4"
|
||||
msg = "failed to determine filesystem at [{}]; assuming {}\n{}"
|
||||
msg = "failed to determine filesystem at %r; assuming %s\n%s"
|
||||
|
||||
if ANYWIN:
|
||||
fs = "vfat"
|
||||
try:
|
||||
path = self._winpath(path)
|
||||
except:
|
||||
self.log(msg.format(path, fs, min_ex()), 3)
|
||||
self.log(msg % (path, fs, min_ex()), 3)
|
||||
return fs
|
||||
|
||||
path = path.lstrip("/")
|
||||
path = undot(path)
|
||||
try:
|
||||
return self.cache[path]
|
||||
except:
|
||||
@@ -61,11 +61,11 @@ class Fstab(object):
|
||||
try:
|
||||
fs = self.get_w32(path) if ANYWIN else self.get_unix(path)
|
||||
except:
|
||||
self.log(msg.format(path, fs, min_ex()), 3)
|
||||
self.log(msg % (path, fs, min_ex()), 3)
|
||||
|
||||
fs = fs.lower()
|
||||
self.cache[path] = fs
|
||||
self.log("found {} at {}".format(fs, path))
|
||||
self.log("found %s at %r" % (fs, path))
|
||||
return fs
|
||||
|
||||
def _winpath(self, path: str) -> str:
|
||||
@@ -119,12 +119,12 @@ class Fstab(object):
|
||||
self.srctab = srctab
|
||||
|
||||
def relabel(self, path: str, nval: str) -> None:
|
||||
assert self.tab
|
||||
assert self.tab # !rm
|
||||
self.cache = {}
|
||||
if ANYWIN:
|
||||
path = self._winpath(path)
|
||||
|
||||
path = path.lstrip("/")
|
||||
path = undot(path)
|
||||
ptn = re.compile(r"^[^\\/]*")
|
||||
vn, rem = self.tab._find(path)
|
||||
if not self.trusted:
|
||||
@@ -156,7 +156,7 @@ class Fstab(object):
|
||||
self.log("failed to build tab:\n{}".format(min_ex()), 3)
|
||||
self.build_fallback()
|
||||
|
||||
assert self.tab
|
||||
assert self.tab # !rm
|
||||
ret = self.tab._find(path)[0]
|
||||
if self.trusted or path == ret.vpath:
|
||||
return ret.realpath.split("/")[0]
|
||||
@@ -167,6 +167,6 @@ class Fstab(object):
|
||||
if not self.tab:
|
||||
self.build_fallback()
|
||||
|
||||
assert self.tab
|
||||
assert self.tab # !rm
|
||||
ret = self.tab._find(path)[0]
|
||||
return ret.realpath
|
||||
|
||||
@@ -41,6 +41,9 @@ if True: # pylint: disable=using-constant-test
|
||||
import typing
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
if PY2:
|
||||
range = xrange # type: ignore
|
||||
|
||||
|
||||
class FSE(FilesystemError):
|
||||
def __init__(self, msg: str, severity: int = 0) -> None:
|
||||
@@ -73,6 +76,7 @@ class FtpAuth(DummyAuthorizer):
|
||||
else:
|
||||
raise AuthenticationFailed("banned")
|
||||
|
||||
args = self.hub.args
|
||||
asrv = self.hub.asrv
|
||||
uname = "*"
|
||||
if username != "anonymous":
|
||||
@@ -83,6 +87,9 @@ class FtpAuth(DummyAuthorizer):
|
||||
uname = zs
|
||||
break
|
||||
|
||||
if args.ipu and uname == "*":
|
||||
uname = args.ipu_iu[args.ipu_nm.map(ip)]
|
||||
|
||||
if not uname or not (asrv.vfs.aread.get(uname) or asrv.vfs.awrite.get(uname)):
|
||||
g = self.hub.gpwd
|
||||
if g.lim:
|
||||
@@ -160,7 +167,7 @@ class FtpFs(AbstractedFS):
|
||||
t = "Unsupported characters in [{}]"
|
||||
raise FSE(t.format(vpath), 1)
|
||||
|
||||
fn = sanitize_fn(fn or "", "", [".prologue.html", ".epilogue.html"])
|
||||
fn = sanitize_fn(fn or "", "")
|
||||
vpath = vjoin(rd, fn)
|
||||
vfs, rem = self.hub.asrv.vfs.get(vpath, self.uname, r, w, m, d)
|
||||
if not vfs.realpath:
|
||||
@@ -289,6 +296,7 @@ class FtpFs(AbstractedFS):
|
||||
self.uname,
|
||||
not self.args.no_scandir,
|
||||
[[True, False], [False, True]],
|
||||
throw=True,
|
||||
)
|
||||
vfs_ls = [x[0] for x in vfs_ls1]
|
||||
vfs_ls.extend(vfs_virt.keys())
|
||||
@@ -350,7 +358,7 @@ class FtpFs(AbstractedFS):
|
||||
svp = join(self.cwd, src).lstrip("/")
|
||||
dvp = join(self.cwd, dst).lstrip("/")
|
||||
try:
|
||||
self.hub.up2k.handle_mv(self.uname, svp, dvp)
|
||||
self.hub.up2k.handle_mv(self.uname, self.h.cli_ip, svp, dvp)
|
||||
except Exception as ex:
|
||||
raise FSE(str(ex))
|
||||
|
||||
@@ -468,6 +476,9 @@ class FtpHandler(FTPHandler):
|
||||
xbu = vfs.flags.get("xbu")
|
||||
if xbu and not runhook(
|
||||
None,
|
||||
None,
|
||||
self.hub.up2k,
|
||||
"xbu.ftpd",
|
||||
xbu,
|
||||
ap,
|
||||
vp,
|
||||
@@ -477,7 +488,7 @@ class FtpHandler(FTPHandler):
|
||||
0,
|
||||
0,
|
||||
self.cli_ip,
|
||||
0,
|
||||
time.time(),
|
||||
"",
|
||||
):
|
||||
raise FSE("Upload blocked by xbu server config")
|
||||
@@ -580,9 +591,15 @@ class Ftpd(object):
|
||||
if "::" in ips:
|
||||
ips.append("0.0.0.0")
|
||||
|
||||
ips = [x for x in ips if "unix:" not in x]
|
||||
|
||||
if self.args.ftp4:
|
||||
ips = [x for x in ips if ":" not in x]
|
||||
|
||||
if not ips:
|
||||
lgr.fatal("cannot start ftp-server; no compatible IPs in -i")
|
||||
return
|
||||
|
||||
ips = list(ODict.fromkeys(ips)) # dedup
|
||||
|
||||
ioloop = IOLoop()
|
||||
|
||||
2408
copyparty/httpcli.py
2408
copyparty/httpcli.py
File diff suppressed because it is too large
Load Diff
@@ -9,6 +9,9 @@ import threading # typechk
|
||||
import time
|
||||
|
||||
try:
|
||||
if os.environ.get("PRTY_NO_TLS"):
|
||||
raise Exception()
|
||||
|
||||
HAVE_SSL = True
|
||||
import ssl
|
||||
except:
|
||||
@@ -56,6 +59,8 @@ class HttpConn(object):
|
||||
self.asrv: AuthSrv = hsrv.asrv # mypy404
|
||||
self.u2fh: Util.FHC = hsrv.u2fh # mypy404
|
||||
self.pipes: Util.CachedDict = hsrv.pipes # mypy404
|
||||
self.ipu_iu: Optional[dict[str, str]] = hsrv.ipu_iu
|
||||
self.ipu_nm: Optional[NetMap] = hsrv.ipu_nm
|
||||
self.ipa_nm: Optional[NetMap] = hsrv.ipa_nm
|
||||
self.xff_nm: Optional[NetMap] = hsrv.xff_nm
|
||||
self.xff_lan: NetMap = hsrv.xff_lan # type: ignore
|
||||
@@ -100,9 +105,6 @@ class HttpConn(object):
|
||||
self.log_src = ("%s \033[%dm%d" % (ip, color, self.addr[1])).ljust(26)
|
||||
return self.log_src
|
||||
|
||||
def respath(self, res_name: str) -> str:
|
||||
return os.path.join(self.E.mod, "web", res_name)
|
||||
|
||||
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||
self.log_func(self.log_src, msg, c)
|
||||
|
||||
@@ -162,6 +164,7 @@ class HttpConn(object):
|
||||
|
||||
self.log_src = self.log_src.replace("[36m", "[35m")
|
||||
try:
|
||||
assert ssl # type: ignore # !rm
|
||||
ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
|
||||
ctx.load_cert_chain(self.args.cert)
|
||||
if self.args.ssl_ver:
|
||||
@@ -187,7 +190,7 @@ class HttpConn(object):
|
||||
|
||||
if self.args.ssl_dbg and hasattr(self.s, "shared_ciphers"):
|
||||
ciphers = self.s.shared_ciphers()
|
||||
assert ciphers
|
||||
assert ciphers # !rm
|
||||
overlap = [str(y[::-1]) for y in ciphers]
|
||||
self.log("TLS cipher overlap:" + "\n".join(overlap))
|
||||
for k, v in [
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import base64
|
||||
import hashlib
|
||||
import math
|
||||
import os
|
||||
import re
|
||||
@@ -12,7 +12,7 @@ import time
|
||||
|
||||
import queue
|
||||
|
||||
from .__init__ import ANYWIN, CORES, EXE, MACOS, TYPE_CHECKING, EnvParams, unicode
|
||||
from .__init__ import ANYWIN, CORES, EXE, MACOS, PY2, TYPE_CHECKING, EnvParams, unicode
|
||||
|
||||
try:
|
||||
MNFE = ModuleNotFoundError
|
||||
@@ -67,23 +67,38 @@ from .util import (
|
||||
Magician,
|
||||
Netdev,
|
||||
NetMap,
|
||||
absreal,
|
||||
build_netmap,
|
||||
has_resource,
|
||||
ipnorm,
|
||||
load_ipu,
|
||||
load_resource,
|
||||
min_ex,
|
||||
shut_socket,
|
||||
spack,
|
||||
start_log_thrs,
|
||||
start_stackmon,
|
||||
ub64enc,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .authsrv import VFS
|
||||
from .broker_util import BrokerCli
|
||||
from .ssdp import SSDPr
|
||||
|
||||
if True: # pylint: disable=using-constant-test
|
||||
from typing import Any, Optional
|
||||
|
||||
if PY2:
|
||||
range = xrange # type: ignore
|
||||
|
||||
if not hasattr(socket, "AF_UNIX"):
|
||||
setattr(socket, "AF_UNIX", -9001)
|
||||
|
||||
|
||||
def load_jinja2_resource(E: EnvParams, name: str):
|
||||
with load_resource(E, "web/" + name, "r") as f:
|
||||
return f.read()
|
||||
|
||||
|
||||
class HttpSrv(object):
|
||||
"""
|
||||
@@ -116,6 +131,12 @@ class HttpSrv(object):
|
||||
self.bans: dict[str, int] = {}
|
||||
self.aclose: dict[str, int] = {}
|
||||
|
||||
dli: dict[str, tuple[float, int, "VFS", str, str]] = {} # info
|
||||
dls: dict[str, tuple[float, int]] = {} # state
|
||||
self.dli = self.tdli = dli
|
||||
self.dls = self.tdls = dls
|
||||
self.iiam = '<img src="%s.cpr/iiam.gif?cache=i" />' % (self.args.SRS,)
|
||||
|
||||
self.bound: set[tuple[str, int]] = set()
|
||||
self.name = "hsrv" + nsuf
|
||||
self.mutex = threading.Lock()
|
||||
@@ -131,6 +152,7 @@ class HttpSrv(object):
|
||||
self.t_periodic: Optional[threading.Thread] = None
|
||||
|
||||
self.u2fh = FHC()
|
||||
self.u2sc: dict[str, tuple[int, "hashlib._Hash"]] = {}
|
||||
self.pipes = CachedDict(0.2)
|
||||
self.metrics = Metrics(self)
|
||||
self.nreq = 0
|
||||
@@ -146,23 +168,33 @@ class HttpSrv(object):
|
||||
self.u2idx_free: dict[str, U2idx] = {}
|
||||
self.u2idx_n = 0
|
||||
|
||||
assert jinja2 # type: ignore # !rm
|
||||
env = jinja2.Environment()
|
||||
env.loader = jinja2.FileSystemLoader(os.path.join(self.E.mod, "web"))
|
||||
jn = ["splash", "svcs", "browser", "browser2", "msg", "md", "mde", "cf"]
|
||||
env.loader = jinja2.FunctionLoader(lambda f: load_jinja2_resource(self.E, f))
|
||||
jn = [
|
||||
"browser",
|
||||
"browser2",
|
||||
"cf",
|
||||
"md",
|
||||
"mde",
|
||||
"msg",
|
||||
"rups",
|
||||
"shares",
|
||||
"splash",
|
||||
"svcs",
|
||||
]
|
||||
self.j2 = {x: env.get_template(x + ".html") for x in jn}
|
||||
zs = os.path.join(self.E.mod, "web", "deps", "prism.js.gz")
|
||||
self.prism = os.path.exists(zs)
|
||||
self.prism = has_resource(self.E, "web/deps/prism.js.gz")
|
||||
|
||||
if self.args.ipu:
|
||||
self.ipu_iu, self.ipu_nm = load_ipu(self.log, self.args.ipu)
|
||||
else:
|
||||
self.ipu_iu = self.ipu_nm = None
|
||||
|
||||
self.ipa_nm = build_netmap(self.args.ipa)
|
||||
self.xff_nm = build_netmap(self.args.xff_src)
|
||||
self.xff_lan = build_netmap("lan")
|
||||
|
||||
self.statics: set[str] = set()
|
||||
self._build_statics()
|
||||
|
||||
self.ptn_cc = re.compile(r"[\x00-\x1f]")
|
||||
self.ptn_hsafe = re.compile(r"[\x00-\x1f<>\"'&]")
|
||||
|
||||
self.mallow = "GET HEAD POST PUT DELETE OPTIONS".split()
|
||||
if not self.args.no_dav:
|
||||
zs = "PROPFIND PROPPATCH LOCK UNLOCK MKCOL COPY MOVE"
|
||||
@@ -177,6 +209,9 @@ class HttpSrv(object):
|
||||
self.start_threads(4)
|
||||
|
||||
if nid:
|
||||
self.tdli = {}
|
||||
self.tdls = {}
|
||||
|
||||
if self.args.stackmon:
|
||||
start_stackmon(self.args.stackmon, nid)
|
||||
|
||||
@@ -193,14 +228,6 @@ class HttpSrv(object):
|
||||
except:
|
||||
pass
|
||||
|
||||
def _build_statics(self) -> None:
|
||||
for dp, _, df in os.walk(os.path.join(self.E.mod, "web")):
|
||||
for fn in df:
|
||||
ap = absreal(os.path.join(dp, fn))
|
||||
self.statics.add(ap)
|
||||
if ap.endswith(".gz"):
|
||||
self.statics.add(ap[:-3])
|
||||
|
||||
def set_netdevs(self, netdevs: dict[str, Netdev]) -> None:
|
||||
ips = set()
|
||||
for ip, _ in self.bound:
|
||||
@@ -221,7 +248,7 @@ class HttpSrv(object):
|
||||
if self.args.log_htp:
|
||||
self.log(self.name, "workers -= {} = {}".format(n, self.tp_nthr), 6)
|
||||
|
||||
assert self.tp_q
|
||||
assert self.tp_q # !rm
|
||||
for _ in range(n):
|
||||
self.tp_q.put(None)
|
||||
|
||||
@@ -240,15 +267,24 @@ class HttpSrv(object):
|
||||
return
|
||||
|
||||
def listen(self, sck: socket.socket, nlisteners: int) -> None:
|
||||
tcp = sck.family != socket.AF_UNIX
|
||||
|
||||
if self.args.j != 1:
|
||||
# lost in the pickle; redefine
|
||||
if not ANYWIN or self.args.reuseaddr:
|
||||
sck.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
|
||||
sck.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||
if tcp:
|
||||
sck.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||
|
||||
sck.settimeout(None) # < does not inherit, ^ opts above do
|
||||
|
||||
ip, port = sck.getsockname()[:2]
|
||||
if tcp:
|
||||
ip, port = sck.getsockname()[:2]
|
||||
else:
|
||||
ip = re.sub(r"\.[0-9]+$", "", sck.getsockname().split("/")[-1])
|
||||
port = 0
|
||||
|
||||
self.srvs.append(sck)
|
||||
self.bound.add((ip, port))
|
||||
self.nclimax = math.ceil(self.args.nc * 1.0 / nlisteners)
|
||||
@@ -260,10 +296,19 @@ class HttpSrv(object):
|
||||
|
||||
def thr_listen(self, srv_sck: socket.socket) -> None:
|
||||
"""listens on a shared tcp server"""
|
||||
ip, port = srv_sck.getsockname()[:2]
|
||||
fno = srv_sck.fileno()
|
||||
hip = "[{}]".format(ip) if ":" in ip else ip
|
||||
msg = "subscribed @ {}:{} f{} p{}".format(hip, port, fno, os.getpid())
|
||||
if srv_sck.family == socket.AF_UNIX:
|
||||
ip = re.sub(r"\.[0-9]+$", "", srv_sck.getsockname())
|
||||
msg = "subscribed @ %s f%d p%d" % (ip, fno, os.getpid())
|
||||
ip = ip.split("/")[-1]
|
||||
port = 0
|
||||
tcp = False
|
||||
else:
|
||||
tcp = True
|
||||
ip, port = srv_sck.getsockname()[:2]
|
||||
hip = "[%s]" % (ip,) if ":" in ip else ip
|
||||
msg = "subscribed @ %s:%d f%d p%d" % (hip, port, fno, os.getpid())
|
||||
|
||||
self.log(self.name, msg)
|
||||
|
||||
Daemon(self.broker.say, "sig-hsrv-up1", ("cb_httpsrv_up",))
|
||||
@@ -335,11 +380,13 @@ class HttpSrv(object):
|
||||
|
||||
try:
|
||||
sck, saddr = srv_sck.accept()
|
||||
cip = unicode(saddr[0])
|
||||
if cip.startswith("::ffff:"):
|
||||
cip = cip[7:]
|
||||
|
||||
addr = (cip, saddr[1])
|
||||
if tcp:
|
||||
cip = unicode(saddr[0])
|
||||
if cip.startswith("::ffff:"):
|
||||
cip = cip[7:]
|
||||
addr = (cip, saddr[1])
|
||||
else:
|
||||
addr = ("127.8.3.7", sck.fileno())
|
||||
except (OSError, socket.error) as ex:
|
||||
if self.stopping:
|
||||
break
|
||||
@@ -395,7 +442,7 @@ class HttpSrv(object):
|
||||
)
|
||||
|
||||
def thr_poolw(self) -> None:
|
||||
assert self.tp_q
|
||||
assert self.tp_q # !rm
|
||||
while True:
|
||||
task = self.tp_q.get()
|
||||
if not task:
|
||||
@@ -507,8 +554,8 @@ class HttpSrv(object):
|
||||
except:
|
||||
pass
|
||||
|
||||
v = base64.urlsafe_b64encode(spack(b">xxL", int(v)))
|
||||
self.cb_v = v.decode("ascii")[-4:]
|
||||
# spack gives 4 lsb, take 3 lsb, get 4 ch
|
||||
self.cb_v = ub64enc(spack(b">L", int(v))[1:]).decode("ascii")
|
||||
self.cb_ts = time.time()
|
||||
return self.cb_v
|
||||
|
||||
@@ -539,3 +586,32 @@ class HttpSrv(object):
|
||||
ident += "a"
|
||||
|
||||
self.u2idx_free[ident] = u2idx
|
||||
|
||||
def read_dls(
|
||||
self,
|
||||
) -> tuple[
|
||||
dict[str, tuple[float, int, str, str, str]], dict[str, tuple[float, int]]
|
||||
]:
|
||||
"""
|
||||
mp-broker asking for local dl-info + dl-state;
|
||||
reduce overhead by sending just the vfs vpath
|
||||
"""
|
||||
dli = {k: (a, b, c.vpath, d, e) for k, (a, b, c, d, e) in self.dli.items()}
|
||||
return (dli, self.dls)
|
||||
|
||||
def write_dls(
|
||||
self,
|
||||
sdli: dict[str, tuple[float, int, str, str, str]],
|
||||
dls: dict[str, tuple[float, int]],
|
||||
) -> None:
|
||||
"""
|
||||
mp-broker pushing total dl-info + dl-state;
|
||||
swap out the vfs vpath with the vfs node
|
||||
"""
|
||||
dli: dict[str, tuple[float, int, "VFS", str, str]] = {}
|
||||
for k, (a, b, c, d, e) in sdli.items():
|
||||
vn = self.asrv.vfs.all_nodes[c]
|
||||
dli[k] = (a, b, vn, d, e)
|
||||
|
||||
self.tdli = dli
|
||||
self.tdls = dls
|
||||
|
||||
@@ -74,7 +74,7 @@ class Ico(object):
|
||||
try:
|
||||
_, _, tw, th = pb.textbbox((0, 0), ext)
|
||||
except:
|
||||
tw, th = pb.textsize(ext)
|
||||
tw, th = pb.textsize(ext) # type: ignore
|
||||
|
||||
tw += len(ext)
|
||||
cw = tw // len(ext)
|
||||
|
||||
@@ -25,6 +25,7 @@ from .stolen.dnslib import (
|
||||
DNSHeader,
|
||||
DNSQuestion,
|
||||
DNSRecord,
|
||||
set_avahi_379,
|
||||
)
|
||||
from .util import CachedSet, Daemon, Netdev, list_ips, min_ex
|
||||
|
||||
@@ -72,6 +73,9 @@ class MDNS(MCast):
|
||||
self.ngen = ngen
|
||||
self.ttl = 300
|
||||
|
||||
if not self.args.zm_nwa_1:
|
||||
set_avahi_379()
|
||||
|
||||
zs = self.args.name + ".local."
|
||||
zs = zs.encode("ascii", "replace").decode("ascii", "replace")
|
||||
self.hn = "-".join(x for x in zs.split("?") if x) or (
|
||||
@@ -336,6 +340,9 @@ class MDNS(MCast):
|
||||
self.log("stopped", 2)
|
||||
return
|
||||
|
||||
if self.args.zm_no_pe:
|
||||
continue
|
||||
|
||||
t = "{} {} \033[33m|{}| {}\n{}".format(
|
||||
self.srv[sck].name, addr, len(buf), repr(buf)[2:-1], min_ex()
|
||||
)
|
||||
|
||||
@@ -72,6 +72,9 @@ class Metrics(object):
|
||||
v = "{:.3f}".format(self.hsrv.t0)
|
||||
addug("cpp_boot_unixtime", "seconds", v, t)
|
||||
|
||||
t = "number of active downloads"
|
||||
addg("cpp_active_dl", str(len(self.hsrv.tdls)), t)
|
||||
|
||||
t = "number of open http(s) client connections"
|
||||
addg("cpp_http_conns", str(self.hsrv.ncli), t)
|
||||
|
||||
@@ -88,7 +91,7 @@ class Metrics(object):
|
||||
addg("cpp_total_bans", str(self.hsrv.nban), t)
|
||||
|
||||
if not args.nos_vst:
|
||||
x = self.hsrv.broker.ask("up2k.get_state")
|
||||
x = self.hsrv.broker.ask("up2k.get_state", True, "")
|
||||
vs = json.loads(x.get())
|
||||
|
||||
nvidle = 0
|
||||
@@ -128,7 +131,7 @@ class Metrics(object):
|
||||
addbh("cpp_disk_size_bytes", "total HDD size of volume")
|
||||
addbh("cpp_disk_free_bytes", "free HDD space in volume")
|
||||
for vpath, vol in allvols:
|
||||
free, total = get_df(vol.realpath)
|
||||
free, total, _ = get_df(vol.realpath, False)
|
||||
if free is None or total is None:
|
||||
continue
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import print_function, unicode_literals
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import subprocess as sp
|
||||
import sys
|
||||
@@ -32,6 +33,17 @@ if True: # pylint: disable=using-constant-test
|
||||
from .util import NamedLogger, RootLogger
|
||||
|
||||
|
||||
try:
|
||||
if os.environ.get("PRTY_NO_MUTAGEN"):
|
||||
raise Exception()
|
||||
|
||||
from mutagen import version # noqa: F401
|
||||
|
||||
HAVE_MUTAGEN = True
|
||||
except:
|
||||
HAVE_MUTAGEN = False
|
||||
|
||||
|
||||
def have_ff(scmd: str) -> bool:
|
||||
if ANYWIN:
|
||||
scmd += ".exe"
|
||||
@@ -48,8 +60,11 @@ def have_ff(scmd: str) -> bool:
|
||||
return bool(shutil.which(scmd))
|
||||
|
||||
|
||||
HAVE_FFMPEG = have_ff("ffmpeg")
|
||||
HAVE_FFPROBE = have_ff("ffprobe")
|
||||
HAVE_FFMPEG = not os.environ.get("PRTY_NO_FFMPEG") and have_ff("ffmpeg")
|
||||
HAVE_FFPROBE = not os.environ.get("PRTY_NO_FFPROBE") and have_ff("ffprobe")
|
||||
|
||||
CBZ_PICS = set("png jpg jpeg gif bmp tga tif tiff webp avif".split())
|
||||
CBZ_01 = re.compile(r"(^|[^0-9v])0+[01]\b")
|
||||
|
||||
|
||||
class MParser(object):
|
||||
@@ -115,6 +130,7 @@ def au_unpk(
|
||||
log: "NamedLogger", fmt_map: dict[str, str], abspath: str, vn: Optional[VFS] = None
|
||||
) -> str:
|
||||
ret = ""
|
||||
maxsz = 1024 * 1024 * 64
|
||||
try:
|
||||
ext = abspath.split(".")[-1].lower()
|
||||
au, pk = fmt_map[ext].split(".")
|
||||
@@ -137,24 +153,48 @@ def au_unpk(
|
||||
zf = zipfile.ZipFile(abspath, "r")
|
||||
zil = zf.infolist()
|
||||
zil = [x for x in zil if x.filename.lower().split(".")[-1] == au]
|
||||
if not zil:
|
||||
raise Exception("no audio inside zip")
|
||||
fi = zf.open(zil[0])
|
||||
|
||||
elif pk == "cbz":
|
||||
import zipfile
|
||||
|
||||
zf = zipfile.ZipFile(abspath, "r")
|
||||
znil = [(x.filename.lower(), x) for x in zf.infolist()]
|
||||
nf = len(znil)
|
||||
znil = [x for x in znil if x[0].split(".")[-1] in CBZ_PICS]
|
||||
znil = [x for x in znil if "cover" in x[0]] or znil
|
||||
znil = [x for x in znil if CBZ_01.search(x[0])] or znil
|
||||
t = "cbz: %d files, %d hits" % (nf, len(znil))
|
||||
if znil:
|
||||
t += ", using " + znil[0][1].filename
|
||||
log(t)
|
||||
if not znil:
|
||||
raise Exception("no images inside cbz")
|
||||
fi = zf.open(znil[0][1])
|
||||
|
||||
else:
|
||||
raise Exception("unknown compression %s" % (pk,))
|
||||
|
||||
fsz = 0
|
||||
with os.fdopen(fd, "wb") as fo:
|
||||
while True:
|
||||
buf = fi.read(32768)
|
||||
if not buf:
|
||||
break
|
||||
|
||||
fsz += len(buf)
|
||||
if fsz > maxsz:
|
||||
raise Exception("zipbomb defused")
|
||||
|
||||
fo.write(buf)
|
||||
|
||||
return ret
|
||||
|
||||
except Exception as ex:
|
||||
if ret:
|
||||
t = "failed to decompress audio file [%s]: %r"
|
||||
t = "failed to decompress audio file %r: %r"
|
||||
log(t % (abspath, ex))
|
||||
wunlink(log, ret, vn.flags if vn else VF_CAREFUL)
|
||||
|
||||
@@ -336,9 +376,7 @@ class MTag(object):
|
||||
|
||||
if self.backend == "mutagen":
|
||||
self._get = self.get_mutagen
|
||||
try:
|
||||
from mutagen import version # noqa: F401
|
||||
except:
|
||||
if not HAVE_MUTAGEN:
|
||||
self.log("could not load Mutagen, trying FFprobe instead", c=3)
|
||||
self.backend = "ffprobe"
|
||||
|
||||
@@ -464,7 +502,7 @@ class MTag(object):
|
||||
sv = str(zv).split("/")[0].strip().lstrip("0")
|
||||
ret[sk] = sv or 0
|
||||
|
||||
# normalize key notation to rkeobo
|
||||
# normalize key notation to rekobo
|
||||
okey = ret.get("key")
|
||||
if okey:
|
||||
key = str(okey).replace(" ", "").replace("maj", "").replace("min", "m")
|
||||
@@ -544,7 +582,7 @@ class MTag(object):
|
||||
raise Exception()
|
||||
except Exception as ex:
|
||||
if self.args.mtag_v:
|
||||
self.log("mutagen-err [{}] @ [{}]".format(ex, abspath), "90")
|
||||
self.log("mutagen-err [%s] @ %r" % (ex, abspath), "90")
|
||||
|
||||
return self.get_ffprobe(abspath) if self.can_ffprobe else {}
|
||||
|
||||
@@ -578,7 +616,7 @@ class MTag(object):
|
||||
continue
|
||||
|
||||
if k == ".aq":
|
||||
v /= 1000
|
||||
v /= 1000 # type: ignore
|
||||
|
||||
if k == "ac" and v.startswith("mp4a.40."):
|
||||
v = "aac"
|
||||
@@ -661,8 +699,8 @@ class MTag(object):
|
||||
ret[tag] = zj[tag]
|
||||
except:
|
||||
if self.args.mtag_v:
|
||||
t = "mtag error: tagname {}, parser {}, file {} => {}"
|
||||
self.log(t.format(tagname, parser.bin, abspath, min_ex()))
|
||||
t = "mtag error: tagname %r, parser %r, file %r => %r"
|
||||
self.log(t % (tagname, parser.bin, abspath, min_ex()), 6)
|
||||
|
||||
if ap != abspath:
|
||||
wunlink(self.log, ap, VF_CAREFUL)
|
||||
|
||||
@@ -4,27 +4,33 @@ from __future__ import print_function, unicode_literals
|
||||
import argparse
|
||||
import base64
|
||||
import hashlib
|
||||
import os
|
||||
import sys
|
||||
import threading
|
||||
|
||||
from .__init__ import unicode
|
||||
|
||||
try:
|
||||
if os.environ.get("PRTY_NO_ARGON2"):
|
||||
raise Exception()
|
||||
|
||||
HAVE_ARGON2 = True
|
||||
from argon2 import __version__ as argon2ver
|
||||
except:
|
||||
HAVE_ARGON2 = False
|
||||
|
||||
|
||||
class PWHash(object):
|
||||
def __init__(self, args: argparse.Namespace):
|
||||
self.args = args
|
||||
|
||||
try:
|
||||
alg, ac = args.ah_alg.split(",")
|
||||
except:
|
||||
alg = args.ah_alg
|
||||
ac = {}
|
||||
|
||||
zsl = args.ah_alg.split(",")
|
||||
alg = zsl[0]
|
||||
if alg == "none":
|
||||
alg = ""
|
||||
|
||||
self.alg = alg
|
||||
self.ac = ac
|
||||
self.ac = zsl[1:]
|
||||
if not alg:
|
||||
self.on = False
|
||||
self.hash = unicode
|
||||
@@ -80,17 +86,23 @@ class PWHash(object):
|
||||
its = 2
|
||||
blksz = 8
|
||||
para = 4
|
||||
ramcap = 0 # openssl 1.1 = 32 MiB
|
||||
try:
|
||||
cost = 2 << int(self.ac[0])
|
||||
its = int(self.ac[1])
|
||||
blksz = int(self.ac[2])
|
||||
para = int(self.ac[3])
|
||||
ramcap = int(self.ac[4]) * 1024 * 1024
|
||||
except:
|
||||
pass
|
||||
|
||||
cfg = {"salt": self.salt, "n": cost, "r": blksz, "p": para, "dklen": 24}
|
||||
if ramcap:
|
||||
cfg["maxmem"] = ramcap
|
||||
|
||||
ret = plain.encode("utf-8")
|
||||
for _ in range(its):
|
||||
ret = hashlib.scrypt(ret, salt=self.salt, n=cost, r=blksz, p=para, dklen=24)
|
||||
ret = hashlib.scrypt(ret, **cfg)
|
||||
|
||||
return "+" + base64.urlsafe_b64encode(ret).decode("utf-8")
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@ from types import SimpleNamespace
|
||||
from .__init__ import ANYWIN, EXE, TYPE_CHECKING
|
||||
from .authsrv import LEELOO_DALLAS, VFS
|
||||
from .bos import bos
|
||||
from .util import Daemon, min_ex, pybin, runhook
|
||||
from .util import Daemon, absreal, min_ex, pybin, runhook, vjoin
|
||||
|
||||
if True: # pylint: disable=using-constant-test
|
||||
from typing import Any, Union
|
||||
@@ -151,6 +151,8 @@ class SMB(object):
|
||||
def _uname(self) -> str:
|
||||
if self.noacc:
|
||||
return LEELOO_DALLAS
|
||||
if not self.asrv.acct:
|
||||
return "*"
|
||||
|
||||
try:
|
||||
# you found it! my single worst bit of code so far
|
||||
@@ -187,7 +189,9 @@ class SMB(object):
|
||||
|
||||
debug('%s("%s", %s) %s @%s\033[K\033[0m', caller, vpath, str(a), perms, uname)
|
||||
vfs, rem = self.asrv.vfs.get(vpath, uname, *perms)
|
||||
return vfs, vfs.canonical(rem)
|
||||
if not vfs.realpath:
|
||||
raise Exception("unmapped vfs")
|
||||
return vfs, vjoin(vfs.realpath, rem)
|
||||
|
||||
def _listdir(self, vpath: str, *a: Any, **ka: Any) -> list[str]:
|
||||
vpath = vpath.replace("\\", "/").lstrip("/")
|
||||
@@ -195,6 +199,8 @@ class SMB(object):
|
||||
uname = self._uname()
|
||||
# debug('listdir("%s", %s) @%s\033[K\033[0m', vpath, str(a), uname)
|
||||
vfs, rem = self.asrv.vfs.get(vpath, uname, False, False)
|
||||
if not vfs.realpath:
|
||||
raise Exception("unmapped vfs")
|
||||
_, vfs_ls, vfs_virt = vfs.ls(
|
||||
rem, uname, not self.args.no_scandir, [[False, False]]
|
||||
)
|
||||
@@ -209,7 +215,7 @@ class SMB(object):
|
||||
sz = 112 * 2 # ['.', '..']
|
||||
for n, fn in enumerate(ls):
|
||||
if sz >= 64000:
|
||||
t = "listing only %d of %d files (%d byte) in /%s; see impacket#1433"
|
||||
t = "listing only %d of %d files (%d byte) in /%s for performance; see --smb-nwa-1"
|
||||
warning(t, n, len(ls), sz, vpath)
|
||||
break
|
||||
|
||||
@@ -238,11 +244,26 @@ class SMB(object):
|
||||
t = "blocked write (no-write-acc %s): /%s @%s"
|
||||
yeet(t % (vfs.axs.uwrite, vpath, uname))
|
||||
|
||||
ap = absreal(ap)
|
||||
xbu = vfs.flags.get("xbu")
|
||||
if xbu and not runhook(
|
||||
self.nlog, xbu, ap, vpath, "", "", "", 0, 0, "1.7.6.2", 0, ""
|
||||
self.nlog,
|
||||
None,
|
||||
self.hub.up2k,
|
||||
"xbu.smb",
|
||||
xbu,
|
||||
ap,
|
||||
vpath,
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
0,
|
||||
0,
|
||||
"1.7.6.2",
|
||||
time.time(),
|
||||
"",
|
||||
):
|
||||
yeet("blocked by xbu server config: " + vpath)
|
||||
yeet("blocked by xbu server config: %r" % (vpath,))
|
||||
|
||||
ret = bos.open(ap, flags, *a, mode=chmod, **ka)
|
||||
if wr:
|
||||
@@ -297,7 +318,7 @@ class SMB(object):
|
||||
t = "blocked rename (no-move-acc %s): /%s @%s"
|
||||
yeet(t % (vfs1.axs.umove, vp1, uname))
|
||||
|
||||
self.hub.up2k.handle_mv(uname, vp1, vp2)
|
||||
self.hub.up2k.handle_mv(uname, "1.7.6.2", vp1, vp2)
|
||||
try:
|
||||
bos.makedirs(ap2)
|
||||
except:
|
||||
|
||||
@@ -5,11 +5,11 @@ import errno
|
||||
import re
|
||||
import select
|
||||
import socket
|
||||
from email.utils import formatdate
|
||||
import time
|
||||
|
||||
from .__init__ import TYPE_CHECKING
|
||||
from .multicast import MC_Sck, MCast
|
||||
from .util import CachedSet, html_escape, min_ex
|
||||
from .util import CachedSet, formatdate, html_escape, min_ex
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .broker_util import BrokerCli
|
||||
@@ -84,7 +84,7 @@ class SSDPr(object):
|
||||
name = self.args.doctitle
|
||||
zs = zs.strip().format(c(ubase), c(url), c(name), c(self.args.zsid))
|
||||
hc.reply(zs.encode("utf-8", "replace"))
|
||||
return False # close connectino
|
||||
return False # close connection
|
||||
|
||||
|
||||
class SSDPd(MCast):
|
||||
@@ -229,7 +229,7 @@ CONFIGID.UPNP.ORG: 1
|
||||
|
||||
"""
|
||||
v4 = srv.ip.replace("::ffff:", "")
|
||||
zs = zs.format(formatdate(usegmt=True), v4, srv.hport, self.args.zsid)
|
||||
zs = zs.format(formatdate(), v4, srv.hport, self.args.zsid)
|
||||
zb = zs[1:].replace("\n", "\r\n").encode("utf-8", "replace")
|
||||
srv.sck.sendto(zb, addr[:2])
|
||||
|
||||
|
||||
@@ -8,10 +8,16 @@ from itertools import chain
|
||||
from .bimap import Bimap, BimapError
|
||||
from .bit import get_bits, set_bits
|
||||
from .buffer import BufferError
|
||||
from .label import DNSBuffer, DNSLabel
|
||||
from .label import DNSBuffer, DNSLabel, set_avahi_379
|
||||
from .ranges import IP4, IP6, H, I, check_bytes
|
||||
|
||||
|
||||
try:
|
||||
range = xrange
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
class DNSError(Exception):
|
||||
pass
|
||||
|
||||
@@ -420,7 +426,7 @@ class RR(object):
|
||||
if rdlength:
|
||||
rdata = RDMAP.get(QTYPE.get(rtype), RD).parse(buffer, rdlength)
|
||||
else:
|
||||
rdata = ""
|
||||
rdata = RD(b"a")
|
||||
return cls(rname, rtype, rclass, ttl, rdata)
|
||||
except (BufferError, BimapError) as e:
|
||||
raise DNSError("Error unpacking RR [offset=%d]: %s" % (buffer.offset, e))
|
||||
|
||||
@@ -11,6 +11,23 @@ LDH = set(range(33, 127))
|
||||
ESCAPE = re.compile(r"\\([0-9][0-9][0-9])")
|
||||
|
||||
|
||||
avahi_379 = 0
|
||||
|
||||
|
||||
def set_avahi_379():
|
||||
global avahi_379
|
||||
avahi_379 = 1
|
||||
|
||||
|
||||
def log_avahi_379(args):
|
||||
global avahi_379
|
||||
if avahi_379 == 2:
|
||||
return
|
||||
avahi_379 = 2
|
||||
t = "Invalid pointer in DNSLabel [offset=%d,pointer=%d,length=%d];\n\033[35m NOTE: this is probably avahi-bug #379, packet corruption in Avahi's mDNS-reflection feature. Copyparty has a workaround and is OK, but other devices need either --zm4 or --zm6"
|
||||
raise BufferError(t % args)
|
||||
|
||||
|
||||
class DNSLabelError(Exception):
|
||||
pass
|
||||
|
||||
@@ -96,8 +113,11 @@ class DNSBuffer(Buffer):
|
||||
)
|
||||
if pointer < self.offset:
|
||||
self.offset = pointer
|
||||
elif avahi_379:
|
||||
log_avahi_379((self.offset, pointer, len(self.data)))
|
||||
label.extend(b"a")
|
||||
break
|
||||
else:
|
||||
|
||||
raise BufferError(
|
||||
"Invalid pointer in DNSLabel [offset=%d,pointer=%d,length=%d]"
|
||||
% (self.offset, pointer, len(self.data))
|
||||
|
||||
@@ -11,7 +11,21 @@ import os
|
||||
|
||||
from ._shared import IP, Adapter
|
||||
|
||||
if os.name == "nt":
|
||||
|
||||
def nope(include_unconfigured=False):
|
||||
return []
|
||||
|
||||
|
||||
try:
|
||||
S390X = os.uname().machine == "s390x"
|
||||
except:
|
||||
S390X = False
|
||||
|
||||
|
||||
if os.environ.get("PRTY_NO_IFADDR") or S390X:
|
||||
# s390x deadlocks at libc.getifaddrs
|
||||
get_adapters = nope
|
||||
elif os.name == "nt":
|
||||
from ._win32 import get_adapters
|
||||
elif os.name == "posix":
|
||||
from ._posix import get_adapters
|
||||
|
||||
@@ -17,6 +17,7 @@ if not PY2:
|
||||
U: Callable[[str], str] = str
|
||||
else:
|
||||
U = unicode # noqa: F821 # pylint: disable=undefined-variable,self-assigning-variable
|
||||
range = xrange # noqa: F821 # pylint: disable=undefined-variable,self-assigning-variable
|
||||
|
||||
|
||||
class Adapter(object):
|
||||
|
||||
@@ -16,6 +16,11 @@ if True: # pylint: disable=using-constant-test
|
||||
|
||||
from typing import Callable, List, Optional, Tuple, Union
|
||||
|
||||
try:
|
||||
range = xrange
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def num_char_count_bits(ver: int) -> int:
|
||||
return 16 if (ver + 7) // 17 else 8
|
||||
@@ -589,3 +594,20 @@ def _get_bit(x: int, i: int) -> bool:
|
||||
|
||||
class DataTooLongError(ValueError):
|
||||
pass
|
||||
|
||||
|
||||
def qr2svg(qr: QrCode, border: int) -> str:
|
||||
parts: list[str] = []
|
||||
for y in range(qr.size):
|
||||
sy = border + y
|
||||
for x in range(qr.size):
|
||||
if qr.modules[y][x]:
|
||||
parts.append("M%d,%dh1v1h-1z" % (border + x, sy))
|
||||
t = """\
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" version="1.1" viewBox="0 0 {0} {0}" stroke="none">
|
||||
<rect width="100%" height="100%" fill="#F7F7F7"/>
|
||||
<path d="{1}" fill="#111111"/>
|
||||
</svg>
|
||||
"""
|
||||
return t.format(qr.size + border * 2, " ".join(parts))
|
||||
|
||||
@@ -6,7 +6,7 @@ import tempfile
|
||||
from datetime import datetime
|
||||
|
||||
from .__init__ import CORES
|
||||
from .authsrv import AuthSrv, VFS
|
||||
from .authsrv import VFS, AuthSrv
|
||||
from .bos import bos
|
||||
from .th_cli import ThumbCli
|
||||
from .util import UTC, vjoin, vol_san
|
||||
@@ -110,7 +110,7 @@ def errdesc(
|
||||
report = ["copyparty failed to add the following files to the archive:", ""]
|
||||
|
||||
for fn, err in errors:
|
||||
report.extend([" file: {}".format(fn), "error: {}".format(err), ""])
|
||||
report.extend([" file: %r" % (fn,), "error: %s" % (err,), ""])
|
||||
|
||||
btxt = "\r\n".join(report).encode("utf-8", "replace")
|
||||
btxt = vol_san(list(vfs.all_vols.values()), btxt)
|
||||
|
||||
@@ -2,8 +2,6 @@
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import argparse
|
||||
import base64
|
||||
import calendar
|
||||
import errno
|
||||
import gzip
|
||||
import logging
|
||||
@@ -16,7 +14,7 @@ import string
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime
|
||||
|
||||
# from inspect import currentframe
|
||||
# print(currentframe().f_lineno)
|
||||
@@ -28,18 +26,32 @@ if True: # pylint: disable=using-constant-test
|
||||
import typing
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
from .__init__ import ANYWIN, EXE, MACOS, TYPE_CHECKING, E, EnvParams, unicode
|
||||
from .__init__ import ANYWIN, EXE, MACOS, PY2, TYPE_CHECKING, E, EnvParams, unicode
|
||||
from .authsrv import BAD_CFG, AuthSrv
|
||||
from .cert import ensure_cert
|
||||
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE
|
||||
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, HAVE_MUTAGEN
|
||||
from .pwhash import HAVE_ARGON2
|
||||
from .tcpsrv import TcpSrv
|
||||
from .th_srv import HAVE_PIL, HAVE_VIPS, HAVE_WEBP, ThumbSrv
|
||||
from .th_srv import (
|
||||
HAVE_AVIF,
|
||||
HAVE_FFMPEG,
|
||||
HAVE_FFPROBE,
|
||||
HAVE_HEIF,
|
||||
HAVE_PIL,
|
||||
HAVE_VIPS,
|
||||
HAVE_WEBP,
|
||||
ThumbSrv,
|
||||
)
|
||||
from .up2k import Up2k
|
||||
from .util import (
|
||||
DEF_EXP,
|
||||
DEF_MTE,
|
||||
DEF_MTH,
|
||||
FFMPEG_URL,
|
||||
HAVE_PSUTIL,
|
||||
HAVE_SQLITE3,
|
||||
HAVE_ZMQ,
|
||||
URL_BUG,
|
||||
UTC,
|
||||
VERSIONS,
|
||||
Daemon,
|
||||
@@ -50,12 +62,15 @@ from .util import (
|
||||
alltrace,
|
||||
ansi_re,
|
||||
build_netmap,
|
||||
expat_ver,
|
||||
load_ipu,
|
||||
min_ex,
|
||||
mp,
|
||||
odfusion,
|
||||
pybin,
|
||||
start_log_thrs,
|
||||
start_stackmon,
|
||||
ub64enc,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -65,6 +80,9 @@ if TYPE_CHECKING:
|
||||
except:
|
||||
pass
|
||||
|
||||
if PY2:
|
||||
range = xrange # type: ignore
|
||||
|
||||
|
||||
class SvcHub(object):
|
||||
"""
|
||||
@@ -89,20 +107,23 @@ class SvcHub(object):
|
||||
self.argv = argv
|
||||
self.E: EnvParams = args.E
|
||||
self.no_ansi = args.no_ansi
|
||||
self.tz = UTC if args.log_utc else None
|
||||
self.logf: Optional[typing.TextIO] = None
|
||||
self.logf_base_fn = ""
|
||||
self.is_dut = False # running in unittest; always False
|
||||
self.stop_req = False
|
||||
self.stopping = False
|
||||
self.stopped = False
|
||||
self.reload_req = False
|
||||
self.reloading = 0
|
||||
self.reload_mutex = threading.Lock()
|
||||
self.stop_cond = threading.Condition()
|
||||
self.nsigs = 3
|
||||
self.retcode = 0
|
||||
self.httpsrv_up = 0
|
||||
|
||||
self.log_mutex = threading.Lock()
|
||||
self.next_day = 0
|
||||
self.cday = 0
|
||||
self.cmon = 0
|
||||
self.tstack = 0.0
|
||||
|
||||
self.iphash = HMaccas(os.path.join(self.E.cfg, "iphash"), 8)
|
||||
@@ -193,6 +214,38 @@ class SvcHub(object):
|
||||
t = "WARNING: --s-rd-sz (%d) is larger than --iobuf (%d); this may lead to reduced performance"
|
||||
self.log("root", t % (args.s_rd_sz, args.iobuf), 3)
|
||||
|
||||
zs = ""
|
||||
if args.th_ram_max < 0.22:
|
||||
zs = "generate thumbnails"
|
||||
elif args.th_ram_max < 1:
|
||||
zs = "generate audio waveforms or spectrograms"
|
||||
if zs:
|
||||
t = "WARNING: --th-ram-max is very small (%.2f GiB); will not be able to %s"
|
||||
self.log("root", t % (args.th_ram_max, zs), 3)
|
||||
|
||||
if args.chpw and args.idp_h_usr:
|
||||
t = "ERROR: user-changeable passwords is incompatible with IdP/identity-providers; you must disable either --chpw or --idp-h-usr"
|
||||
self.log("root", t, 1)
|
||||
raise Exception(t)
|
||||
|
||||
noch = set()
|
||||
for zs in args.chpw_no or []:
|
||||
zsl = [x.strip() for x in zs.split(",")]
|
||||
noch.update([x for x in zsl if x])
|
||||
args.chpw_no = noch
|
||||
|
||||
if args.ipu:
|
||||
iu, nm = load_ipu(self.log, args.ipu, True)
|
||||
setattr(args, "ipu_iu", iu)
|
||||
setattr(args, "ipu_nm", nm)
|
||||
|
||||
if not self.args.no_ses:
|
||||
self.setup_session_db()
|
||||
|
||||
args.shr1 = ""
|
||||
if args.shr:
|
||||
self.setup_share_db()
|
||||
|
||||
bri = "zy"[args.theme % 2 :][:1]
|
||||
ch = "abcdefghijklmnopqrstuvwx"[int(args.theme / 2)]
|
||||
args.theme = "{0}{1} {0} {1}".format(ch, bri)
|
||||
@@ -232,6 +285,8 @@ class SvcHub(object):
|
||||
|
||||
self.up2k = Up2k(self)
|
||||
|
||||
self._feature_test()
|
||||
|
||||
decs = {k: 1 for k in self.args.th_dec.split(",")}
|
||||
if not HAVE_VIPS:
|
||||
decs.pop("vips", None)
|
||||
@@ -336,6 +391,160 @@ class SvcHub(object):
|
||||
|
||||
self.broker = Broker(self)
|
||||
|
||||
# create netmaps early to avoid firewall gaps,
|
||||
# but the mutex blocks multiprocessing startup
|
||||
for zs in "ipu_iu ftp_ipa_nm tftp_ipa_nm".split():
|
||||
try:
|
||||
getattr(args, zs).mutex = threading.Lock()
|
||||
except:
|
||||
pass
|
||||
|
||||
def setup_session_db(self) -> None:
|
||||
if not HAVE_SQLITE3:
|
||||
self.args.no_ses = True
|
||||
t = "WARNING: sqlite3 not available; disabling sessions, will use plaintext passwords in cookies"
|
||||
self.log("root", t, 3)
|
||||
return
|
||||
|
||||
import sqlite3
|
||||
|
||||
create = True
|
||||
db_path = self.args.ses_db
|
||||
self.log("root", "opening sessions-db %s" % (db_path,))
|
||||
for n in range(2):
|
||||
try:
|
||||
db = sqlite3.connect(db_path)
|
||||
cur = db.cursor()
|
||||
try:
|
||||
cur.execute("select count(*) from us").fetchone()
|
||||
create = False
|
||||
break
|
||||
except:
|
||||
pass
|
||||
except Exception as ex:
|
||||
if n:
|
||||
raise
|
||||
t = "sessions-db corrupt; deleting and recreating: %r"
|
||||
self.log("root", t % (ex,), 3)
|
||||
try:
|
||||
cur.close() # type: ignore
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
db.close() # type: ignore
|
||||
except:
|
||||
pass
|
||||
os.unlink(db_path)
|
||||
|
||||
sch = [
|
||||
r"create table kv (k text, v int)",
|
||||
r"create table us (un text, si text, t0 int)",
|
||||
# username, session-id, creation-time
|
||||
r"create index us_un on us(un)",
|
||||
r"create index us_si on us(si)",
|
||||
r"create index us_t0 on us(t0)",
|
||||
r"insert into kv values ('sver', 1)",
|
||||
]
|
||||
|
||||
assert db # type: ignore # !rm
|
||||
assert cur # type: ignore # !rm
|
||||
if create:
|
||||
for cmd in sch:
|
||||
cur.execute(cmd)
|
||||
self.log("root", "created new sessions-db")
|
||||
db.commit()
|
||||
|
||||
cur.close()
|
||||
db.close()
|
||||
|
||||
def setup_share_db(self) -> None:
|
||||
al = self.args
|
||||
if not HAVE_SQLITE3:
|
||||
self.log("root", "sqlite3 not available; disabling --shr", 1)
|
||||
al.shr = ""
|
||||
return
|
||||
|
||||
import sqlite3
|
||||
|
||||
al.shr = al.shr.strip("/")
|
||||
if "/" in al.shr or not al.shr:
|
||||
t = "config error: --shr must be the name of a virtual toplevel directory to put shares inside"
|
||||
self.log("root", t, 1)
|
||||
raise Exception(t)
|
||||
|
||||
al.shr = "/%s/" % (al.shr,)
|
||||
al.shr1 = al.shr[1:]
|
||||
|
||||
create = True
|
||||
modified = False
|
||||
db_path = self.args.shr_db
|
||||
self.log("root", "opening shares-db %s" % (db_path,))
|
||||
for n in range(2):
|
||||
try:
|
||||
db = sqlite3.connect(db_path)
|
||||
cur = db.cursor()
|
||||
try:
|
||||
cur.execute("select count(*) from sh").fetchone()
|
||||
create = False
|
||||
break
|
||||
except:
|
||||
pass
|
||||
except Exception as ex:
|
||||
if n:
|
||||
raise
|
||||
t = "shares-db corrupt; deleting and recreating: %r"
|
||||
self.log("root", t % (ex,), 3)
|
||||
try:
|
||||
cur.close() # type: ignore
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
db.close() # type: ignore
|
||||
except:
|
||||
pass
|
||||
os.unlink(db_path)
|
||||
|
||||
sch1 = [
|
||||
r"create table kv (k text, v int)",
|
||||
r"create table sh (k text, pw text, vp text, pr text, st int, un text, t0 int, t1 int)",
|
||||
# sharekey, password, src, perms, numFiles, owner, created, expires
|
||||
]
|
||||
sch2 = [
|
||||
r"create table sf (k text, vp text)",
|
||||
r"create index sf_k on sf(k)",
|
||||
r"create index sh_k on sh(k)",
|
||||
r"create index sh_t1 on sh(t1)",
|
||||
]
|
||||
|
||||
assert db # type: ignore # !rm
|
||||
assert cur # type: ignore # !rm
|
||||
if create:
|
||||
dver = 2
|
||||
modified = True
|
||||
for cmd in sch1 + sch2:
|
||||
cur.execute(cmd)
|
||||
self.log("root", "created new shares-db")
|
||||
else:
|
||||
(dver,) = cur.execute("select v from kv where k = 'sver'").fetchall()[0]
|
||||
|
||||
if dver == 1:
|
||||
modified = True
|
||||
for cmd in sch2:
|
||||
cur.execute(cmd)
|
||||
cur.execute("update sh set st = 0")
|
||||
self.log("root", "shares-db schema upgrade ok")
|
||||
|
||||
if modified:
|
||||
for cmd in [
|
||||
r"delete from kv where k = 'sver'",
|
||||
r"insert into kv values ('sver', %d)" % (2,),
|
||||
]:
|
||||
cur.execute(cmd)
|
||||
db.commit()
|
||||
|
||||
cur.close()
|
||||
db.close()
|
||||
|
||||
def start_ftpd(self) -> None:
|
||||
time.sleep(30)
|
||||
|
||||
@@ -420,6 +629,59 @@ class SvcHub(object):
|
||||
|
||||
Daemon(self.sd_notify, "sd-notify")
|
||||
|
||||
def _feature_test(self) -> None:
|
||||
fok = []
|
||||
fng = []
|
||||
t_ff = "transcode audio, create spectrograms, video thumbnails"
|
||||
to_check = [
|
||||
(HAVE_SQLITE3, "sqlite", "sessions and file/media indexing"),
|
||||
(HAVE_PIL, "pillow", "image thumbnails (plenty fast)"),
|
||||
(HAVE_VIPS, "vips", "image thumbnails (faster, eats more ram)"),
|
||||
(HAVE_WEBP, "pillow-webp", "create thumbnails as webp files"),
|
||||
(HAVE_FFMPEG, "ffmpeg", t_ff + ", good-but-slow image thumbnails"),
|
||||
(HAVE_FFPROBE, "ffprobe", t_ff + ", read audio/media tags"),
|
||||
(HAVE_MUTAGEN, "mutagen", "read audio tags (ffprobe is better but slower)"),
|
||||
(HAVE_ARGON2, "argon2", "secure password hashing (advanced users only)"),
|
||||
(HAVE_ZMQ, "pyzmq", "send zeromq messages from event-hooks"),
|
||||
(HAVE_HEIF, "pillow-heif", "read .heif images with pillow (rarely useful)"),
|
||||
(HAVE_AVIF, "pillow-avif", "read .avif images with pillow (rarely useful)"),
|
||||
]
|
||||
if ANYWIN:
|
||||
to_check += [
|
||||
(HAVE_PSUTIL, "psutil", "improved plugin cleanup (rarely useful)")
|
||||
]
|
||||
|
||||
verbose = self.args.deps
|
||||
if verbose:
|
||||
self.log("dependencies", "")
|
||||
|
||||
for have, feat, what in to_check:
|
||||
lst = fok if have else fng
|
||||
lst.append((feat, what))
|
||||
if verbose:
|
||||
zi = 2 if have else 5
|
||||
sgot = "found" if have else "missing"
|
||||
t = "%7s: %s \033[36m(%s)"
|
||||
self.log("dependencies", t % (sgot, feat, what), zi)
|
||||
|
||||
if verbose:
|
||||
self.log("dependencies", "")
|
||||
return
|
||||
|
||||
sok = ", ".join(x[0] for x in fok)
|
||||
sng = ", ".join(x[0] for x in fng)
|
||||
|
||||
t = ""
|
||||
if sok:
|
||||
t += "OK: \033[32m" + sok
|
||||
if sng:
|
||||
if t:
|
||||
t += ", "
|
||||
t += "\033[0mNG: \033[35m" + sng
|
||||
|
||||
t += "\033[0m, see --deps"
|
||||
self.log("dependencies", t, 6)
|
||||
|
||||
def _check_env(self) -> None:
|
||||
try:
|
||||
files = os.listdir(E.cfg)
|
||||
@@ -437,6 +699,15 @@ class SvcHub(object):
|
||||
if self.args.bauth_last:
|
||||
self.log("root", "WARNING: ignoring --bauth-last due to --no-bauth", 3)
|
||||
|
||||
if not self.args.no_dav:
|
||||
from .dxml import DXML_OK
|
||||
|
||||
if not DXML_OK:
|
||||
if not self.args.no_dav:
|
||||
self.args.no_dav = True
|
||||
t = "WARNING:\nDisabling WebDAV support because dxml selftest failed. Please report this bug;\n%s\n...and include the following information in the bug-report:\n%s | expat %s\n"
|
||||
self.log("root", t % (URL_BUG, VERSIONS, expat_ver()), 1)
|
||||
|
||||
def _process_config(self) -> bool:
|
||||
al = self.args
|
||||
|
||||
@@ -522,8 +793,8 @@ class SvcHub(object):
|
||||
al.idp_h_grp = al.idp_h_grp.lower()
|
||||
al.idp_h_key = al.idp_h_key.lower()
|
||||
|
||||
al.ftp_ipa_nm = build_netmap(al.ftp_ipa or al.ipa)
|
||||
al.tftp_ipa_nm = build_netmap(al.tftp_ipa or al.ipa)
|
||||
al.ftp_ipa_nm = build_netmap(al.ftp_ipa or al.ipa, True)
|
||||
al.tftp_ipa_nm = build_netmap(al.tftp_ipa or al.ipa, True)
|
||||
|
||||
mte = ODict.fromkeys(DEF_MTE.split(","), True)
|
||||
al.mte = odfusion(mte, al.mte)
|
||||
@@ -535,7 +806,7 @@ class SvcHub(object):
|
||||
al.exp_md = odfusion(exp, al.exp_md.replace(" ", ","))
|
||||
al.exp_lg = odfusion(exp, al.exp_lg.replace(" ", ","))
|
||||
|
||||
for k in ["no_hash", "no_idx", "og_ua"]:
|
||||
for k in ["no_hash", "no_idx", "og_ua", "srch_excl"]:
|
||||
ptn = getattr(self.args, k)
|
||||
if ptn:
|
||||
setattr(self.args, k, re.compile(ptn))
|
||||
@@ -570,6 +841,24 @@ class SvcHub(object):
|
||||
if len(al.tcolor) == 3: # fc5 => ffcc55
|
||||
al.tcolor = "".join([x * 2 for x in al.tcolor])
|
||||
|
||||
zs = al.u2sz
|
||||
zsl = zs.split(",")
|
||||
if len(zsl) not in (1, 3):
|
||||
t = "invalid --u2sz; must be either one number, or a comma-separated list of three numbers (min,default,max)"
|
||||
raise Exception(t)
|
||||
if len(zsl) < 3:
|
||||
zsl = ["1", zs, zs]
|
||||
zi2 = 1
|
||||
for zs in zsl:
|
||||
zi = int(zs)
|
||||
# arbitrary constraint (anything above 2 GiB is probably unintended)
|
||||
if zi < 1 or zi > 2047:
|
||||
raise Exception("invalid --u2sz; minimum is 1, max is 2047")
|
||||
if zi < zi2:
|
||||
raise Exception("invalid --u2sz; values must be equal or ascending")
|
||||
zi2 = zi
|
||||
al.u2sz = ",".join(zsl)
|
||||
|
||||
return True
|
||||
|
||||
def _ipa2re(self, txt) -> Optional[re.Pattern]:
|
||||
@@ -620,7 +909,7 @@ class SvcHub(object):
|
||||
self.args.nc = min(self.args.nc, soft // 2)
|
||||
|
||||
def _logname(self) -> str:
|
||||
dt = datetime.now(UTC)
|
||||
dt = datetime.now(self.tz)
|
||||
fn = str(self.args.lo)
|
||||
for fs in "YmdHMS":
|
||||
fs = "%" + fs
|
||||
@@ -737,38 +1026,23 @@ class SvcHub(object):
|
||||
except:
|
||||
self.log("root", "ssdp startup failed;\n" + min_ex(), 3)
|
||||
|
||||
def reload(self) -> str:
|
||||
with self.up2k.mutex:
|
||||
if self.reloading:
|
||||
return "cannot reload; already in progress"
|
||||
self.reloading = 1
|
||||
|
||||
Daemon(self._reload, "reloading")
|
||||
return "reload initiated"
|
||||
|
||||
def _reload(self, rescan_all_vols: bool = True) -> None:
|
||||
with self.up2k.mutex:
|
||||
if self.reloading != 1:
|
||||
return
|
||||
self.reloading = 2
|
||||
def reload(self, rescan_all_vols: bool, up2k: bool) -> str:
|
||||
t = "config has been reloaded"
|
||||
with self.reload_mutex:
|
||||
self.log("root", "reloading config")
|
||||
self.asrv.reload()
|
||||
self.up2k.reload(rescan_all_vols)
|
||||
self.asrv.reload(9 if up2k else 4)
|
||||
if up2k:
|
||||
self.up2k.reload(rescan_all_vols)
|
||||
t += "; volumes are now reinitializing"
|
||||
else:
|
||||
self.log("root", "reload done")
|
||||
self.broker.reload()
|
||||
self.reloading = 0
|
||||
return t
|
||||
|
||||
def _reload_blocking(self, rescan_all_vols: bool = True) -> None:
|
||||
while True:
|
||||
with self.up2k.mutex:
|
||||
if self.reloading < 2:
|
||||
self.reloading = 1
|
||||
break
|
||||
time.sleep(0.05)
|
||||
|
||||
# try to handle multiple pending IdP reloads at once:
|
||||
time.sleep(0.2)
|
||||
|
||||
self._reload(rescan_all_vols=rescan_all_vols)
|
||||
def _reload_sessions(self) -> None:
|
||||
with self.asrv.mutex:
|
||||
self.asrv.load_sessions(True)
|
||||
self.broker.reload_sessions()
|
||||
|
||||
def stop_thr(self) -> None:
|
||||
while not self.stop_req:
|
||||
@@ -777,7 +1051,7 @@ class SvcHub(object):
|
||||
|
||||
if self.reload_req:
|
||||
self.reload_req = False
|
||||
self.reload()
|
||||
self.reload(True, True)
|
||||
|
||||
self.shutdown()
|
||||
|
||||
@@ -890,12 +1164,12 @@ class SvcHub(object):
|
||||
return
|
||||
|
||||
with self.log_mutex:
|
||||
zd = datetime.now(UTC)
|
||||
dt = datetime.now(self.tz)
|
||||
ts = self.log_dfmt % (
|
||||
zd.year,
|
||||
zd.month * 100 + zd.day,
|
||||
(zd.hour * 100 + zd.minute) * 100 + zd.second,
|
||||
zd.microsecond // self.log_div,
|
||||
dt.year,
|
||||
dt.month * 100 + dt.day,
|
||||
(dt.hour * 100 + dt.minute) * 100 + dt.second,
|
||||
dt.microsecond // self.log_div,
|
||||
)
|
||||
|
||||
if c and not self.args.no_ansi:
|
||||
@@ -916,41 +1190,26 @@ class SvcHub(object):
|
||||
if not self.args.no_logflush:
|
||||
self.logf.flush()
|
||||
|
||||
now = time.time()
|
||||
if int(now) >= self.next_day:
|
||||
self._set_next_day()
|
||||
if dt.day != self.cday or dt.month != self.cmon:
|
||||
self._set_next_day(dt)
|
||||
|
||||
def _set_next_day(self) -> None:
|
||||
if self.next_day and self.logf and self.logf_base_fn != self._logname():
|
||||
def _set_next_day(self, dt: datetime) -> None:
|
||||
if self.cday and self.logf and self.logf_base_fn != self._logname():
|
||||
self.logf.close()
|
||||
self._setup_logfile("")
|
||||
|
||||
dt = datetime.now(UTC)
|
||||
|
||||
# unix timestamp of next 00:00:00 (leap-seconds safe)
|
||||
day_now = dt.day
|
||||
while dt.day == day_now:
|
||||
dt += timedelta(hours=12)
|
||||
|
||||
dt = dt.replace(hour=0, minute=0, second=0)
|
||||
try:
|
||||
tt = dt.utctimetuple()
|
||||
except:
|
||||
# still makes me hella uncomfortable
|
||||
tt = dt.timetuple()
|
||||
|
||||
self.next_day = calendar.timegm(tt)
|
||||
self.cday = dt.day
|
||||
self.cmon = dt.month
|
||||
|
||||
def _log_enabled(self, src: str, msg: str, c: Union[int, str] = 0) -> None:
|
||||
"""handles logging from all components"""
|
||||
with self.log_mutex:
|
||||
now = time.time()
|
||||
if int(now) >= self.next_day:
|
||||
dt = datetime.fromtimestamp(now, UTC)
|
||||
dt = datetime.now(self.tz)
|
||||
if dt.day != self.cday or dt.month != self.cmon:
|
||||
zs = "{}\n" if self.no_ansi else "\033[36m{}\033[0m\n"
|
||||
zs = zs.format(dt.strftime("%Y-%m-%d"))
|
||||
print(zs, end="")
|
||||
self._set_next_day()
|
||||
self._set_next_day(dt)
|
||||
if self.logf:
|
||||
self.logf.write(zs)
|
||||
|
||||
@@ -969,12 +1228,11 @@ class SvcHub(object):
|
||||
else:
|
||||
msg = "%s%s\033[0m" % (c, msg)
|
||||
|
||||
zd = datetime.fromtimestamp(now, UTC)
|
||||
ts = self.log_efmt % (
|
||||
zd.hour,
|
||||
zd.minute,
|
||||
zd.second,
|
||||
zd.microsecond // self.log_div,
|
||||
dt.hour,
|
||||
dt.minute,
|
||||
dt.second,
|
||||
dt.microsecond // self.log_div,
|
||||
)
|
||||
msg = fmt % (ts, src, msg)
|
||||
try:
|
||||
@@ -1072,5 +1330,5 @@ class SvcHub(object):
|
||||
zs = "{}\n{}".format(VERSIONS, alltrace())
|
||||
zb = zs.encode("utf-8", "replace")
|
||||
zb = gzip.compress(zb)
|
||||
zs = base64.b64encode(zb).decode("ascii")
|
||||
zs = ub64enc(zb).decode("ascii")
|
||||
self.log("stacks", zs)
|
||||
|
||||
@@ -37,9 +37,7 @@ def dostime2unix(buf: bytes) -> int:
|
||||
|
||||
|
||||
def unixtime2dos(ts: int) -> bytes:
|
||||
tt = time.gmtime(ts + 1)
|
||||
dy, dm, dd, th, tm, ts = list(tt)[:6]
|
||||
|
||||
dy, dm, dd, th, tm, ts, _, _, _ = time.gmtime(ts + 1)
|
||||
bd = ((dy - 1980) << 9) + (dm << 5) + dd
|
||||
bt = (th << 11) + (tm << 5) + ts // 2
|
||||
try:
|
||||
@@ -102,12 +100,12 @@ def gen_hdr(
|
||||
|
||||
# spec says to put zeros when !crc if bit3 (streaming)
|
||||
# however infozip does actual sz and it even works on winxp
|
||||
# (same reasning for z64 extradata later)
|
||||
# (same reasoning for z64 extradata later)
|
||||
vsz = 0xFFFFFFFF if z64 else sz
|
||||
ret += spack(b"<LL", vsz, vsz)
|
||||
|
||||
# windows support (the "?" replace below too)
|
||||
fn = sanitize_fn(fn, "/", [])
|
||||
fn = sanitize_fn(fn, "/")
|
||||
bfn = fn.encode("utf-8" if utf8 else "cp437", "replace").replace(b"?", b"_")
|
||||
|
||||
# add ntfs (0x24) and/or unix (0x10) extrafields for utc, add z64 if requested
|
||||
|
||||
@@ -17,18 +17,23 @@ from .util import (
|
||||
E_UNREACH,
|
||||
HAVE_IPV6,
|
||||
IP6ALL,
|
||||
VF_CAREFUL,
|
||||
Netdev,
|
||||
atomic_move,
|
||||
min_ex,
|
||||
sunpack,
|
||||
termsize,
|
||||
)
|
||||
|
||||
if True:
|
||||
from typing import Generator
|
||||
from typing import Generator, Union
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .svchub import SvcHub
|
||||
|
||||
if not hasattr(socket, "AF_UNIX"):
|
||||
setattr(socket, "AF_UNIX", -9001)
|
||||
|
||||
if not hasattr(socket, "IPPROTO_IPV6"):
|
||||
setattr(socket, "IPPROTO_IPV6", 41)
|
||||
|
||||
@@ -90,7 +95,7 @@ class TcpSrv(object):
|
||||
continue
|
||||
|
||||
# binding 0.0.0.0 after :: fails on dualstack
|
||||
# but is necessary on non-dualstakc
|
||||
# but is necessary on non-dualstack
|
||||
if successful_binds:
|
||||
continue
|
||||
|
||||
@@ -217,14 +222,41 @@ class TcpSrv(object):
|
||||
if self.args.qr or self.args.qrs:
|
||||
self.qr = self._qr(qr1, qr2)
|
||||
|
||||
def nlog(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||
self.log("tcpsrv", msg, c)
|
||||
|
||||
def _listen(self, ip: str, port: int) -> None:
|
||||
ipv = socket.AF_INET6 if ":" in ip else socket.AF_INET
|
||||
uds_perm = uds_gid = -1
|
||||
if "unix:" in ip:
|
||||
tcp = False
|
||||
ipv = socket.AF_UNIX
|
||||
uds = ip.split(":")
|
||||
ip = uds[-1]
|
||||
if len(uds) > 2:
|
||||
uds_perm = int(uds[1], 8)
|
||||
if len(uds) > 3:
|
||||
try:
|
||||
uds_gid = int(uds[2])
|
||||
except:
|
||||
import grp
|
||||
|
||||
uds_gid = grp.getgrnam(uds[2]).gr_gid
|
||||
|
||||
elif ":" in ip:
|
||||
tcp = True
|
||||
ipv = socket.AF_INET6
|
||||
else:
|
||||
tcp = True
|
||||
ipv = socket.AF_INET
|
||||
|
||||
srv = socket.socket(ipv, socket.SOCK_STREAM)
|
||||
|
||||
if not ANYWIN or self.args.reuseaddr:
|
||||
srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
|
||||
srv.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||
if tcp:
|
||||
srv.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||
|
||||
srv.settimeout(None) # < does not inherit, ^ opts above do
|
||||
|
||||
try:
|
||||
@@ -236,8 +268,25 @@ class TcpSrv(object):
|
||||
srv.setsockopt(socket.SOL_IP, socket.IP_FREEBIND, 1)
|
||||
|
||||
try:
|
||||
srv.bind((ip, port))
|
||||
sport = srv.getsockname()[1]
|
||||
if tcp:
|
||||
srv.bind((ip, port))
|
||||
else:
|
||||
if ANYWIN or self.args.rm_sck:
|
||||
if os.path.exists(ip):
|
||||
os.unlink(ip)
|
||||
srv.bind(ip)
|
||||
else:
|
||||
tf = "%s.%d" % (ip, os.getpid())
|
||||
if os.path.exists(tf):
|
||||
os.unlink(tf)
|
||||
srv.bind(tf)
|
||||
if uds_gid != -1:
|
||||
os.chown(tf, -1, uds_gid)
|
||||
if uds_perm != -1:
|
||||
os.chmod(tf, uds_perm)
|
||||
atomic_move(self.nlog, tf, ip, VF_CAREFUL)
|
||||
|
||||
sport = srv.getsockname()[1] if tcp else port
|
||||
if port != sport:
|
||||
# linux 6.0.16 lets you bind a port which is in use
|
||||
# except it just gives you a random port instead
|
||||
@@ -249,12 +298,23 @@ class TcpSrv(object):
|
||||
except:
|
||||
pass
|
||||
|
||||
e = ""
|
||||
if ex.errno in E_ADDR_IN_USE:
|
||||
e = "\033[1;31mport {} is busy on interface {}\033[0m".format(port, ip)
|
||||
if not tcp:
|
||||
e = "\033[1;31munix-socket {} is busy\033[0m".format(ip)
|
||||
elif ex.errno in E_ADDR_NOT_AVAIL:
|
||||
e = "\033[1;31minterface {} does not exist\033[0m".format(ip)
|
||||
else:
|
||||
|
||||
if not e:
|
||||
if not tcp:
|
||||
t = "\n\n\n NOTE: this crash may be due to a unix-socket bug; try --rm-sck\n"
|
||||
self.log("tcpsrv", t, 2)
|
||||
raise
|
||||
|
||||
if not tcp and not self.args.rm_sck:
|
||||
e += "; maybe this is a bug? try --rm-sck"
|
||||
|
||||
raise Exception(e)
|
||||
|
||||
def run(self) -> None:
|
||||
@@ -262,7 +322,14 @@ class TcpSrv(object):
|
||||
bound: list[tuple[str, int]] = []
|
||||
srvs: list[socket.socket] = []
|
||||
for srv in self.srv:
|
||||
ip, port = srv.getsockname()[:2]
|
||||
if srv.family == socket.AF_UNIX:
|
||||
tcp = False
|
||||
ip = re.sub(r"\.[0-9]+$", "", srv.getsockname())
|
||||
port = 0
|
||||
else:
|
||||
tcp = True
|
||||
ip, port = srv.getsockname()[:2]
|
||||
|
||||
if ip == IP6ALL:
|
||||
ip = "::" # jython
|
||||
|
||||
@@ -294,13 +361,17 @@ class TcpSrv(object):
|
||||
bound.append((ip, port))
|
||||
srvs.append(srv)
|
||||
fno = srv.fileno()
|
||||
hip = "[{}]".format(ip) if ":" in ip else ip
|
||||
msg = "listening @ {}:{} f{} p{}".format(hip, port, fno, os.getpid())
|
||||
if tcp:
|
||||
hip = "[{}]".format(ip) if ":" in ip else ip
|
||||
msg = "listening @ {}:{} f{} p{}".format(hip, port, fno, os.getpid())
|
||||
else:
|
||||
msg = "listening @ {} f{} p{}".format(ip, fno, os.getpid())
|
||||
|
||||
self.log("tcpsrv", msg)
|
||||
if self.args.q:
|
||||
print(msg)
|
||||
|
||||
self.hub.broker.say("listen", srv)
|
||||
self.hub.broker.say("httpsrv.listen", srv)
|
||||
|
||||
self.srv = srvs
|
||||
self.bound = bound
|
||||
@@ -308,7 +379,7 @@ class TcpSrv(object):
|
||||
self._distribute_netdevs()
|
||||
|
||||
def _distribute_netdevs(self):
|
||||
self.hub.broker.say("set_netdevs", self.netdevs)
|
||||
self.hub.broker.say("httpsrv.set_netdevs", self.netdevs)
|
||||
self.hub.start_zeroconf()
|
||||
gencert(self.log, self.args, self.netdevs)
|
||||
self.hub.restart_ftpd()
|
||||
@@ -331,23 +402,25 @@ class TcpSrv(object):
|
||||
if not netdevs:
|
||||
continue
|
||||
|
||||
added = "nothing"
|
||||
removed = "nothing"
|
||||
add = []
|
||||
rem = []
|
||||
for k, v in netdevs.items():
|
||||
if k not in self.netdevs:
|
||||
added = "{} = {}".format(k, v)
|
||||
add.append("\n\033[32m added %s = %s" % (k, v))
|
||||
for k, v in self.netdevs.items():
|
||||
if k not in netdevs:
|
||||
removed = "{} = {}".format(k, v)
|
||||
rem.append("\n\033[33mremoved %s = %s" % (k, v))
|
||||
|
||||
t = "network change detected:\n added {}\033[0;33m\nremoved {}"
|
||||
self.log("tcpsrv", t.format(added, removed), 3)
|
||||
t = "network change detected:%s%s"
|
||||
self.log("tcpsrv", t % ("".join(add), "".join(rem)), 3)
|
||||
self.netdevs = netdevs
|
||||
self._distribute_netdevs()
|
||||
|
||||
def detect_interfaces(self, listen_ips: list[str]) -> dict[str, Netdev]:
|
||||
from .stolen.ifaddr import get_adapters
|
||||
|
||||
listen_ips = [x for x in listen_ips if "unix:" not in x]
|
||||
|
||||
nics = get_adapters(True)
|
||||
eps: dict[str, Netdev] = {}
|
||||
for nic in nics:
|
||||
|
||||
@@ -36,7 +36,7 @@ from partftpy.TftpShared import TftpException
|
||||
from .__init__ import EXE, PY2, TYPE_CHECKING
|
||||
from .authsrv import VFS
|
||||
from .bos import bos
|
||||
from .util import BytesIO, Daemon, ODict, exclude_dotfiles, min_ex, runhook, undot
|
||||
from .util import UTC, BytesIO, Daemon, ODict, exclude_dotfiles, min_ex, runhook, undot
|
||||
|
||||
if True: # pylint: disable=using-constant-test
|
||||
from typing import Any, Union
|
||||
@@ -44,6 +44,9 @@ if True: # pylint: disable=using-constant-test
|
||||
if TYPE_CHECKING:
|
||||
from .svchub import SvcHub
|
||||
|
||||
if PY2:
|
||||
range = xrange # type: ignore
|
||||
|
||||
|
||||
lg = logging.getLogger("tftp")
|
||||
debug, info, warning, error = (lg.debug, lg.info, lg.warning, lg.error)
|
||||
@@ -163,9 +166,16 @@ class Tftpd(object):
|
||||
if "::" in ips:
|
||||
ips.append("0.0.0.0")
|
||||
|
||||
ips = [x for x in ips if "unix:" not in x]
|
||||
|
||||
if self.args.tftp4:
|
||||
ips = [x for x in ips if ":" not in x]
|
||||
|
||||
if not ips:
|
||||
t = "cannot start tftp-server; no compatible IPs in -i"
|
||||
self.nlog(t, 1)
|
||||
return
|
||||
|
||||
ips = list(ODict.fromkeys(ips)) # dedup
|
||||
|
||||
for ip in ips:
|
||||
@@ -241,6 +251,8 @@ class Tftpd(object):
|
||||
|
||||
debug('%s("%s", %s) %s\033[K\033[0m', caller, vpath, str(a), perms)
|
||||
vfs, rem = self.asrv.vfs.get(vpath, "*", *perms)
|
||||
if not vfs.realpath:
|
||||
raise Exception("unmapped vfs")
|
||||
return vfs, vfs.canonical(rem)
|
||||
|
||||
def _ls(self, vpath: str, raddress: str, rport: int, force=False) -> Any:
|
||||
@@ -257,12 +269,13 @@ class Tftpd(object):
|
||||
"*",
|
||||
not self.args.no_scandir,
|
||||
[[True, False]],
|
||||
throw=True,
|
||||
)
|
||||
dnames = set([x[0] for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)])
|
||||
dirs1 = [(v.st_mtime, v.st_size, k + "/") for k, v in vfs_ls if k in dnames]
|
||||
fils1 = [(v.st_mtime, v.st_size, k) for k, v in vfs_ls if k not in dnames]
|
||||
real1 = dirs1 + fils1
|
||||
realt = [(datetime.fromtimestamp(mt), sz, fn) for mt, sz, fn in real1]
|
||||
realt = [(datetime.fromtimestamp(mt, UTC), sz, fn) for mt, sz, fn in real1]
|
||||
reals = [
|
||||
(
|
||||
"%04d-%02d-%02d %02d:%02d:%02d"
|
||||
@@ -328,15 +341,29 @@ class Tftpd(object):
|
||||
|
||||
xbu = vfs.flags.get("xbu")
|
||||
if xbu and not runhook(
|
||||
self.nlog, xbu, ap, vpath, "", "", "", 0, 0, "8.3.8.7", 0, ""
|
||||
self.nlog,
|
||||
None,
|
||||
self.hub.up2k,
|
||||
"xbu.tftpd",
|
||||
xbu,
|
||||
ap,
|
||||
vpath,
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
0,
|
||||
0,
|
||||
"8.3.8.7",
|
||||
time.time(),
|
||||
"",
|
||||
):
|
||||
yeet("blocked by xbu server config: " + vpath)
|
||||
yeet("blocked by xbu server config: %r" % (vpath,))
|
||||
|
||||
if not self.args.tftp_nols and bos.path.isdir(ap):
|
||||
return self._ls(vpath, "", 0, True)
|
||||
|
||||
if not a:
|
||||
a = [self.args.iobuf]
|
||||
a = (self.args.iobuf,)
|
||||
|
||||
return open(ap, mode, *a, **ka)
|
||||
|
||||
@@ -377,7 +404,7 @@ class Tftpd(object):
|
||||
bos.stat(ap)
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
return vpath == "/"
|
||||
|
||||
def _p_isdir(self, vpath: str) -> bool:
|
||||
try:
|
||||
@@ -385,7 +412,7 @@ class Tftpd(object):
|
||||
ret = stat.S_ISDIR(st.st_mode)
|
||||
return ret
|
||||
except:
|
||||
return False
|
||||
return vpath == "/"
|
||||
|
||||
def _hook(self, *a: Any, **ka: Any) -> None:
|
||||
src = inspect.currentframe().f_back.f_code.co_name
|
||||
|
||||
@@ -6,7 +6,7 @@ import os
|
||||
from .__init__ import TYPE_CHECKING
|
||||
from .authsrv import VFS
|
||||
from .bos import bos
|
||||
from .th_srv import HAVE_WEBP, thumb_path
|
||||
from .th_srv import EXTS_AC, HAVE_WEBP, thumb_path
|
||||
from .util import Cooldown
|
||||
|
||||
if True: # pylint: disable=using-constant-test
|
||||
@@ -57,12 +57,17 @@ class ThumbCli(object):
|
||||
if is_vid and "dvthumb" in dbv.flags:
|
||||
return None
|
||||
|
||||
want_opus = fmt in ("opus", "caf", "mp3")
|
||||
want_opus = fmt in EXTS_AC
|
||||
is_au = ext in self.fmt_ffa
|
||||
if is_au:
|
||||
is_vau = want_opus and ext in self.fmt_ffv
|
||||
if is_au or is_vau:
|
||||
if want_opus:
|
||||
if self.args.no_acode:
|
||||
return None
|
||||
elif fmt == "caf" and self.args.no_caf:
|
||||
fmt = "mp3"
|
||||
elif fmt == "owa" and self.args.no_owa:
|
||||
fmt = "mp3"
|
||||
else:
|
||||
if "dathumb" in dbv.flags:
|
||||
return None
|
||||
@@ -107,14 +112,14 @@ class ThumbCli(object):
|
||||
|
||||
fmt = sfmt
|
||||
|
||||
elif fmt[:1] == "p" and not is_au:
|
||||
t = "cannot thumbnail [%s]: png only allowed for waveforms"
|
||||
self.log(t % (rem), 6)
|
||||
elif fmt[:1] == "p" and not is_au and not is_vid:
|
||||
t = "cannot thumbnail %r: png only allowed for waveforms"
|
||||
self.log(t % (rem,), 6)
|
||||
return None
|
||||
|
||||
histpath = self.asrv.vfs.histtab.get(ptop)
|
||||
if not histpath:
|
||||
self.log("no histpath for [{}]".format(ptop))
|
||||
self.log("no histpath for %r" % (ptop,))
|
||||
return None
|
||||
|
||||
tpath = thumb_path(histpath, rem, mtime, fmt, self.fmt_ffa)
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import base64
|
||||
import hashlib
|
||||
import logging
|
||||
import os
|
||||
@@ -12,7 +11,7 @@ import time
|
||||
|
||||
from queue import Queue
|
||||
|
||||
from .__init__ import ANYWIN, TYPE_CHECKING
|
||||
from .__init__ import ANYWIN, PY2, TYPE_CHECKING
|
||||
from .authsrv import VFS
|
||||
from .bos import bos
|
||||
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, au_unpk, ffprobe
|
||||
@@ -21,46 +20,64 @@ from .util import (
|
||||
FFMPEG_URL,
|
||||
Cooldown,
|
||||
Daemon,
|
||||
Pebkac,
|
||||
afsenc,
|
||||
fsenc,
|
||||
min_ex,
|
||||
runcmd,
|
||||
statdir,
|
||||
ub64enc,
|
||||
vsplit,
|
||||
wrename,
|
||||
wunlink,
|
||||
)
|
||||
|
||||
if True: # pylint: disable=using-constant-test
|
||||
from typing import Optional, Union
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .svchub import SvcHub
|
||||
|
||||
if PY2:
|
||||
range = xrange # type: ignore
|
||||
|
||||
HAVE_PIL = False
|
||||
HAVE_PILF = False
|
||||
HAVE_HEIF = False
|
||||
HAVE_AVIF = False
|
||||
HAVE_WEBP = False
|
||||
|
||||
EXTS_TH = set(["jpg", "webp", "png"])
|
||||
EXTS_AC = set(["opus", "owa", "caf", "mp3"])
|
||||
|
||||
try:
|
||||
if os.environ.get("PRTY_NO_PIL"):
|
||||
raise Exception()
|
||||
|
||||
from PIL import ExifTags, Image, ImageFont, ImageOps
|
||||
|
||||
HAVE_PIL = True
|
||||
try:
|
||||
if os.environ.get("PRTY_NO_PILF"):
|
||||
raise Exception()
|
||||
|
||||
ImageFont.load_default(size=16)
|
||||
HAVE_PILF = True
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
if os.environ.get("PRTY_NO_PIL_WEBP"):
|
||||
raise Exception()
|
||||
|
||||
Image.new("RGB", (2, 2)).save(BytesIO(), format="webp")
|
||||
HAVE_WEBP = True
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
if os.environ.get("PRTY_NO_PIL_HEIF"):
|
||||
raise Exception()
|
||||
|
||||
from pyheif_pillow_opener import register_heif_opener
|
||||
|
||||
register_heif_opener()
|
||||
@@ -69,6 +86,9 @@ try:
|
||||
pass
|
||||
|
||||
try:
|
||||
if os.environ.get("PRTY_NO_PIL_AVIF"):
|
||||
raise Exception()
|
||||
|
||||
import pillow_avif # noqa: F401 # pylint: disable=unused-import
|
||||
|
||||
HAVE_AVIF = True
|
||||
@@ -80,6 +100,9 @@ except:
|
||||
pass
|
||||
|
||||
try:
|
||||
if os.environ.get("PRTY_NO_VIPS"):
|
||||
raise Exception()
|
||||
|
||||
HAVE_VIPS = True
|
||||
import pyvips
|
||||
|
||||
@@ -88,6 +111,9 @@ except:
|
||||
HAVE_VIPS = False
|
||||
|
||||
|
||||
th_dir_cache = {}
|
||||
|
||||
|
||||
def thumb_path(histpath: str, rem: str, mtime: float, fmt: str, ffa: set[str]) -> str:
|
||||
# base16 = 16 = 256
|
||||
# b64-lc = 38 = 1444
|
||||
@@ -101,16 +127,22 @@ def thumb_path(histpath: str, rem: str, mtime: float, fmt: str, ffa: set[str]) -
|
||||
if ext in ffa and fmt[:2] in ("wf", "jf"):
|
||||
fmt = fmt.replace("f", "")
|
||||
|
||||
rd += "\n" + fmt
|
||||
h = hashlib.sha512(afsenc(rd)).digest()
|
||||
b64 = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
||||
rd = ("%s/%s/" % (b64[:2], b64[2:4])).lower() + b64
|
||||
dcache = th_dir_cache
|
||||
rd_key = rd + "\n" + fmt
|
||||
rd = dcache.get(rd_key)
|
||||
if not rd:
|
||||
h = hashlib.sha512(afsenc(rd_key)).digest()
|
||||
b64 = ub64enc(h).decode("ascii")[:24]
|
||||
rd = ("%s/%s/" % (b64[:2], b64[2:4])).lower() + b64
|
||||
if len(dcache) > 9001:
|
||||
dcache.clear()
|
||||
dcache[rd_key] = rd
|
||||
|
||||
# could keep original filenames but this is safer re pathlen
|
||||
h = hashlib.sha512(afsenc(fn)).digest()
|
||||
fn = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
||||
fn = ub64enc(h).decode("ascii")[:24]
|
||||
|
||||
if fmt in ("opus", "caf", "mp3"):
|
||||
if fmt in EXTS_AC:
|
||||
cat = "ac"
|
||||
else:
|
||||
fc = fmt[:1]
|
||||
@@ -134,6 +166,7 @@ class ThumbSrv(object):
|
||||
self.ram: dict[str, float] = {}
|
||||
self.memcond = threading.Condition(self.mutex)
|
||||
self.stopping = False
|
||||
self.rm_nullthumbs = True # forget failed conversions on startup
|
||||
self.nthr = max(1, self.args.th_mt)
|
||||
|
||||
self.q: Queue[Optional[tuple[str, str, str, VFS]]] = Queue(self.nthr * 4)
|
||||
@@ -209,7 +242,7 @@ class ThumbSrv(object):
|
||||
def get(self, ptop: str, rem: str, mtime: float, fmt: str) -> Optional[str]:
|
||||
histpath = self.asrv.vfs.histtab.get(ptop)
|
||||
if not histpath:
|
||||
self.log("no histpath for [{}]".format(ptop))
|
||||
self.log("no histpath for %r" % (ptop,))
|
||||
return None
|
||||
|
||||
tpath = thumb_path(histpath, rem, mtime, fmt, self.fmt_ffa)
|
||||
@@ -219,7 +252,7 @@ class ThumbSrv(object):
|
||||
with self.mutex:
|
||||
try:
|
||||
self.busy[tpath].append(cond)
|
||||
self.log("joined waiting room for %s" % (tpath,))
|
||||
self.log("joined waiting room for %r" % (tpath,))
|
||||
except:
|
||||
thdir = os.path.dirname(tpath)
|
||||
bos.makedirs(os.path.join(thdir, "w"))
|
||||
@@ -236,11 +269,11 @@ class ThumbSrv(object):
|
||||
allvols = list(self.asrv.vfs.all_vols.values())
|
||||
vn = next((x for x in allvols if x.realpath == ptop), None)
|
||||
if not vn:
|
||||
self.log("ptop [{}] not in {}".format(ptop, allvols), 3)
|
||||
self.log("ptop %r not in %s" % (ptop, allvols), 3)
|
||||
vn = self.asrv.vfs.all_aps[0][1]
|
||||
|
||||
self.q.put((abspath, tpath, fmt, vn))
|
||||
self.log("conv {} :{} \033[0m{}".format(tpath, fmt, abspath), c=6)
|
||||
self.log("conv %r :%s \033[0m%r" % (tpath, fmt, abspath), 6)
|
||||
|
||||
while not self.stopping:
|
||||
with self.mutex:
|
||||
@@ -304,23 +337,32 @@ class ThumbSrv(object):
|
||||
ap_unpk = abspath
|
||||
|
||||
if not bos.path.exists(tpath):
|
||||
tex = tpath.rsplit(".", 1)[-1]
|
||||
want_mp3 = tex == "mp3"
|
||||
want_opus = tex in ("opus", "owa", "caf")
|
||||
want_png = tex == "png"
|
||||
want_au = want_mp3 or want_opus
|
||||
for lib in self.args.th_dec:
|
||||
can_au = lib == "ff" and (
|
||||
ext in self.fmt_ffa or ext in self.fmt_ffv
|
||||
)
|
||||
|
||||
if lib == "pil" and ext in self.fmt_pil:
|
||||
funs.append(self.conv_pil)
|
||||
elif lib == "vips" and ext in self.fmt_vips:
|
||||
funs.append(self.conv_vips)
|
||||
elif lib == "ff" and ext in self.fmt_ffi or ext in self.fmt_ffv:
|
||||
funs.append(self.conv_ffmpeg)
|
||||
elif lib == "ff" and ext in self.fmt_ffa:
|
||||
if tpath.endswith(".opus") or tpath.endswith(".caf"):
|
||||
elif can_au and (want_png or want_au):
|
||||
if want_opus:
|
||||
funs.append(self.conv_opus)
|
||||
elif tpath.endswith(".mp3"):
|
||||
elif want_mp3:
|
||||
funs.append(self.conv_mp3)
|
||||
elif tpath.endswith(".png"):
|
||||
elif want_png:
|
||||
funs.append(self.conv_waves)
|
||||
png_ok = True
|
||||
else:
|
||||
funs.append(self.conv_spec)
|
||||
elif lib == "ff" and (ext in self.fmt_ffi or ext in self.fmt_ffv):
|
||||
funs.append(self.conv_ffmpeg)
|
||||
elif lib == "ff" and ext in self.fmt_ffa and not want_au:
|
||||
funs.append(self.conv_spec)
|
||||
|
||||
tdir, tfn = os.path.split(tpath)
|
||||
ttpath = os.path.join(tdir, "w", tfn)
|
||||
@@ -337,8 +379,8 @@ class ThumbSrv(object):
|
||||
fun(ap_unpk, ttpath, fmt, vn)
|
||||
break
|
||||
except Exception as ex:
|
||||
msg = "{} could not create thumbnail of {}\n{}"
|
||||
msg = msg.format(fun.__name__, abspath, min_ex())
|
||||
msg = "%s could not create thumbnail of %r\n%s"
|
||||
msg = msg % (fun.__name__, abspath, min_ex())
|
||||
c: Union[str, int] = 1 if "<Signals.SIG" in msg else "90"
|
||||
self.log(msg, c)
|
||||
if getattr(ex, "returncode", 0) != 321:
|
||||
@@ -450,7 +492,7 @@ class ThumbSrv(object):
|
||||
if c == crops[-1]:
|
||||
raise
|
||||
|
||||
assert img # type: ignore
|
||||
assert img # type: ignore # !rm
|
||||
img.write_to_file(tpath, Q=40)
|
||||
|
||||
def conv_ffmpeg(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
|
||||
@@ -716,47 +758,102 @@ class ThumbSrv(object):
|
||||
if "ac" not in tags:
|
||||
raise Exception("not audio")
|
||||
|
||||
sq = "%dk" % (self.args.q_opus,)
|
||||
bq = sq.encode("ascii")
|
||||
if tags["ac"][1] == "opus":
|
||||
enc = "-c:a copy"
|
||||
else:
|
||||
enc = "-c:a libopus -b:a " + sq
|
||||
|
||||
fun = self._conv_caf if fmt == "caf" else self._conv_owa
|
||||
|
||||
fun(abspath, tpath, tags, rawtags, enc, bq, vn)
|
||||
|
||||
def _conv_owa(
|
||||
self,
|
||||
abspath: str,
|
||||
tpath: str,
|
||||
tags: dict[str, tuple[int, Any]],
|
||||
rawtags: dict[str, list[Any]],
|
||||
enc: str,
|
||||
bq: bytes,
|
||||
vn: VFS,
|
||||
) -> None:
|
||||
if tpath.endswith(".owa"):
|
||||
container = b"webm"
|
||||
tagset = [b"-map_metadata", b"-1"]
|
||||
else:
|
||||
container = b"opus"
|
||||
tagset = self.big_tags(rawtags)
|
||||
|
||||
self.log("conv2 %s [%s]" % (container, enc), 6)
|
||||
benc = enc.encode("ascii").split(b" ")
|
||||
|
||||
# fmt: off
|
||||
cmd = [
|
||||
b"ffmpeg",
|
||||
b"-nostdin",
|
||||
b"-v", b"error",
|
||||
b"-hide_banner",
|
||||
b"-i", fsenc(abspath),
|
||||
] + tagset + [
|
||||
b"-map", b"0:a:0",
|
||||
] + benc + [
|
||||
b"-f", container,
|
||||
fsenc(tpath)
|
||||
]
|
||||
# fmt: on
|
||||
self._run_ff(cmd, vn, oom=300)
|
||||
|
||||
def _conv_caf(
|
||||
self,
|
||||
abspath: str,
|
||||
tpath: str,
|
||||
tags: dict[str, tuple[int, Any]],
|
||||
rawtags: dict[str, list[Any]],
|
||||
enc: str,
|
||||
bq: bytes,
|
||||
vn: VFS,
|
||||
) -> None:
|
||||
tmp_opus = tpath + ".opus"
|
||||
try:
|
||||
wunlink(self.log, tmp_opus, vn.flags)
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
dur = tags[".dur"][1]
|
||||
except:
|
||||
dur = 0
|
||||
|
||||
src_opus = abspath.lower().endswith(".opus") or tags["ac"][1] == "opus"
|
||||
want_caf = tpath.endswith(".caf")
|
||||
tmp_opus = tpath
|
||||
if want_caf:
|
||||
tmp_opus = tpath + ".opus"
|
||||
try:
|
||||
wunlink(self.log, tmp_opus, vn.flags)
|
||||
except:
|
||||
pass
|
||||
self.log("conv2 caf-tmp [%s]" % (enc,), 6)
|
||||
benc = enc.encode("ascii").split(b" ")
|
||||
|
||||
caf_src = abspath if src_opus else tmp_opus
|
||||
bq = ("%dk" % (self.args.q_opus,)).encode("ascii")
|
||||
|
||||
if not want_caf or not src_opus:
|
||||
# fmt: off
|
||||
cmd = [
|
||||
b"ffmpeg",
|
||||
b"-nostdin",
|
||||
b"-v", b"error",
|
||||
b"-hide_banner",
|
||||
b"-i", fsenc(abspath),
|
||||
] + self.big_tags(rawtags) + [
|
||||
b"-map", b"0:a:0",
|
||||
b"-c:a", b"libopus",
|
||||
b"-b:a", bq,
|
||||
fsenc(tmp_opus)
|
||||
]
|
||||
# fmt: on
|
||||
self._run_ff(cmd, vn, oom=300)
|
||||
# fmt: off
|
||||
cmd = [
|
||||
b"ffmpeg",
|
||||
b"-nostdin",
|
||||
b"-v", b"error",
|
||||
b"-hide_banner",
|
||||
b"-i", fsenc(abspath),
|
||||
b"-map_metadata", b"-1",
|
||||
b"-map", b"0:a:0",
|
||||
] + benc + [
|
||||
b"-f", b"opus",
|
||||
fsenc(tmp_opus)
|
||||
]
|
||||
# fmt: on
|
||||
self._run_ff(cmd, vn, oom=300)
|
||||
|
||||
# iOS fails to play some "insufficiently complex" files
|
||||
# (average file shorter than 8 seconds), so of course we
|
||||
# fix that by mixing in some inaudible pink noise :^)
|
||||
# 6.3 sec seems like the cutoff so lets do 7, and
|
||||
# 7 sec of psyqui-musou.opus @ 3:50 is 174 KiB
|
||||
if want_caf and (dur < 20 or bos.path.getsize(caf_src) < 256 * 1024):
|
||||
sz = bos.path.getsize(tmp_opus)
|
||||
if dur < 20 or sz < 256 * 1024:
|
||||
zs = bq.decode("ascii")
|
||||
self.log("conv2 caf-transcode; dur=%d sz=%d q=%s" % (dur, sz, zs), 6)
|
||||
# fmt: off
|
||||
cmd = [
|
||||
b"ffmpeg",
|
||||
@@ -775,15 +872,16 @@ class ThumbSrv(object):
|
||||
# fmt: on
|
||||
self._run_ff(cmd, vn, oom=300)
|
||||
|
||||
elif want_caf:
|
||||
else:
|
||||
# simple remux should be safe
|
||||
self.log("conv2 caf-remux; dur=%d sz=%d" % (dur, sz), 6)
|
||||
# fmt: off
|
||||
cmd = [
|
||||
b"ffmpeg",
|
||||
b"-nostdin",
|
||||
b"-v", b"error",
|
||||
b"-hide_banner",
|
||||
b"-i", fsenc(abspath if src_opus else tmp_opus),
|
||||
b"-i", fsenc(tmp_opus),
|
||||
b"-map_metadata", b"-1",
|
||||
b"-map", b"0:a:0",
|
||||
b"-c:a", b"copy",
|
||||
@@ -793,11 +891,10 @@ class ThumbSrv(object):
|
||||
# fmt: on
|
||||
self._run_ff(cmd, vn, oom=300)
|
||||
|
||||
if tmp_opus != tpath:
|
||||
try:
|
||||
wunlink(self.log, tmp_opus, vn.flags)
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
wunlink(self.log, tmp_opus, vn.flags)
|
||||
except:
|
||||
pass
|
||||
|
||||
def big_tags(self, raw_tags: dict[str, list[str]]) -> list[bytes]:
|
||||
ret = []
|
||||
@@ -824,7 +921,6 @@ class ThumbSrv(object):
|
||||
def cleaner(self) -> None:
|
||||
interval = self.args.th_clean
|
||||
while True:
|
||||
time.sleep(interval)
|
||||
ndirs = 0
|
||||
for vol, histpath in self.asrv.vfs.histtab.items():
|
||||
if histpath.startswith(vol):
|
||||
@@ -838,6 +934,8 @@ class ThumbSrv(object):
|
||||
self.log("\033[Jcln err in %s: %r" % (histpath, ex), 3)
|
||||
|
||||
self.log("\033[Jcln ok; rm {} dirs".format(ndirs))
|
||||
self.rm_nullthumbs = False
|
||||
time.sleep(interval)
|
||||
|
||||
def clean(self, histpath: str) -> int:
|
||||
ret = 0
|
||||
@@ -852,13 +950,15 @@ class ThumbSrv(object):
|
||||
|
||||
def _clean(self, cat: str, thumbpath: str) -> int:
|
||||
# self.log("cln {}".format(thumbpath))
|
||||
exts = ["jpg", "webp", "png"] if cat == "th" else ["opus", "caf", "mp3"]
|
||||
exts = EXTS_TH if cat == "th" else EXTS_AC
|
||||
maxage = getattr(self.args, cat + "_maxage")
|
||||
now = time.time()
|
||||
prev_b64 = None
|
||||
prev_fp = ""
|
||||
try:
|
||||
t1 = statdir(self.log_func, not self.args.no_scandir, False, thumbpath)
|
||||
t1 = statdir(
|
||||
self.log_func, not self.args.no_scandir, False, thumbpath, False
|
||||
)
|
||||
ents = sorted(list(t1))
|
||||
except:
|
||||
return 0
|
||||
@@ -899,6 +999,10 @@ class ThumbSrv(object):
|
||||
|
||||
continue
|
||||
|
||||
if self.rm_nullthumbs and not inf.st_size:
|
||||
bos.unlink(fp)
|
||||
continue
|
||||
|
||||
if b64 == prev_b64:
|
||||
self.log("rm replaced [{}]".format(fp))
|
||||
bos.unlink(prev_fp)
|
||||
|
||||
@@ -8,7 +8,7 @@ import threading
|
||||
import time
|
||||
from operator import itemgetter
|
||||
|
||||
from .__init__ import ANYWIN, TYPE_CHECKING, unicode
|
||||
from .__init__ import ANYWIN, PY2, TYPE_CHECKING, unicode
|
||||
from .authsrv import LEELOO_DALLAS, VFS
|
||||
from .bos import bos
|
||||
from .up2k import up2k_wark_from_hashlist
|
||||
@@ -38,6 +38,9 @@ if True: # pylint: disable=using-constant-test
|
||||
if TYPE_CHECKING:
|
||||
from .httpsrv import HttpSrv
|
||||
|
||||
if PY2:
|
||||
range = xrange # type: ignore
|
||||
|
||||
|
||||
class U2idx(object):
|
||||
def __init__(self, hsrv: "HttpSrv") -> None:
|
||||
@@ -50,12 +53,16 @@ class U2idx(object):
|
||||
self.log("your python does not have sqlite3; searching will be disabled")
|
||||
return
|
||||
|
||||
assert sqlite3 # type: ignore # !rm
|
||||
|
||||
self.active_id = ""
|
||||
self.active_cur: Optional["sqlite3.Cursor"] = None
|
||||
self.cur: dict[str, "sqlite3.Cursor"] = {}
|
||||
self.mem_cur = sqlite3.connect(":memory:", check_same_thread=False).cursor()
|
||||
self.mem_cur.execute(r"create table a (b text)")
|
||||
|
||||
self.sh_cur: Optional["sqlite3.Cursor"] = None
|
||||
|
||||
self.p_end = 0.0
|
||||
self.p_dur = 0.0
|
||||
|
||||
@@ -63,6 +70,9 @@ class U2idx(object):
|
||||
self.log_func("u2idx", msg, c)
|
||||
|
||||
def shutdown(self) -> None:
|
||||
if not HAVE_SQLITE3:
|
||||
return
|
||||
|
||||
for cur in self.cur.values():
|
||||
db = cur.connection
|
||||
try:
|
||||
@@ -73,6 +83,12 @@ class U2idx(object):
|
||||
cur.close()
|
||||
db.close()
|
||||
|
||||
for cur in (self.mem_cur, self.sh_cur):
|
||||
if cur:
|
||||
db = cur.connection
|
||||
cur.close()
|
||||
db.close()
|
||||
|
||||
def fsearch(
|
||||
self, uname: str, vols: list[VFS], body: dict[str, Any]
|
||||
) -> list[dict[str, Any]]:
|
||||
@@ -88,25 +104,39 @@ class U2idx(object):
|
||||
uv: list[Union[str, int]] = [wark[:16], wark]
|
||||
|
||||
try:
|
||||
return self.run_query(uname, vols, uq, uv, False, 99999)[0]
|
||||
return self.run_query(uname, vols, uq, uv, False, True, 99999)[0]
|
||||
except:
|
||||
raise Pebkac(500, min_ex())
|
||||
|
||||
def get_cur(self, vn: VFS) -> Optional["sqlite3.Cursor"]:
|
||||
if not HAVE_SQLITE3:
|
||||
def get_shr(self) -> Optional["sqlite3.Cursor"]:
|
||||
if self.sh_cur:
|
||||
return self.sh_cur
|
||||
|
||||
if not HAVE_SQLITE3 or not self.args.shr:
|
||||
return None
|
||||
|
||||
assert sqlite3 # type: ignore # !rm
|
||||
|
||||
db = sqlite3.connect(self.args.shr_db, timeout=2, check_same_thread=False)
|
||||
cur = db.cursor()
|
||||
cur.execute('pragma table_info("sh")').fetchall()
|
||||
self.sh_cur = cur
|
||||
return cur
|
||||
|
||||
def get_cur(self, vn: VFS) -> Optional["sqlite3.Cursor"]:
|
||||
cur = self.cur.get(vn.realpath)
|
||||
if cur:
|
||||
return cur
|
||||
|
||||
if "e2d" not in vn.flags:
|
||||
if not HAVE_SQLITE3 or "e2d" not in vn.flags:
|
||||
return None
|
||||
|
||||
assert sqlite3 # type: ignore # !rm
|
||||
|
||||
ptop = vn.realpath
|
||||
histpath = self.asrv.vfs.histtab.get(ptop)
|
||||
if not histpath:
|
||||
self.log("no histpath for [{}]".format(ptop))
|
||||
self.log("no histpath for %r" % (ptop,))
|
||||
return None
|
||||
|
||||
db_path = os.path.join(histpath, "up2k.db")
|
||||
@@ -121,7 +151,7 @@ class U2idx(object):
|
||||
db = sqlite3.connect(uri, timeout=2, uri=True, check_same_thread=False)
|
||||
cur = db.cursor()
|
||||
cur.execute('pragma table_info("up")').fetchone()
|
||||
self.log("ro: {}".format(db_path))
|
||||
self.log("ro: %r" % (db_path,))
|
||||
except:
|
||||
self.log("could not open read-only: {}\n{}".format(uri, min_ex()))
|
||||
# may not fail until the pragma so unset it
|
||||
@@ -131,7 +161,7 @@ class U2idx(object):
|
||||
# on windows, this steals the write-lock from up2k.deferred_init --
|
||||
# seen on win 10.0.17763.2686, py 3.10.4, sqlite 3.37.2
|
||||
cur = sqlite3.connect(db_path, timeout=2, check_same_thread=False).cursor()
|
||||
self.log("opened {}".format(db_path))
|
||||
self.log("opened %r" % (db_path,))
|
||||
|
||||
self.cur[ptop] = cur
|
||||
return cur
|
||||
@@ -280,7 +310,7 @@ class U2idx(object):
|
||||
q += " lower({}) {} ? ) ".format(field, oper)
|
||||
|
||||
try:
|
||||
return self.run_query(uname, vols, q, va, have_mt, lim)
|
||||
return self.run_query(uname, vols, q, va, have_mt, True, lim)
|
||||
except Exception as ex:
|
||||
raise Pebkac(500, repr(ex))
|
||||
|
||||
@@ -291,9 +321,11 @@ class U2idx(object):
|
||||
uq: str,
|
||||
uv: list[Union[str, int]],
|
||||
have_mt: bool,
|
||||
sort: bool,
|
||||
lim: int,
|
||||
) -> tuple[list[dict[str, Any]], list[str], bool]:
|
||||
if self.args.srch_dbg:
|
||||
dbg = self.args.srch_dbg
|
||||
if dbg:
|
||||
t = "searching across all %s volumes in which the user has 'r' (full read access):\n %s"
|
||||
zs = "\n ".join(["/%s = %s" % (x.vpath, x.realpath) for x in vols])
|
||||
self.log(t % (len(vols), zs), 5)
|
||||
@@ -336,14 +368,14 @@ class U2idx(object):
|
||||
if not cur:
|
||||
continue
|
||||
|
||||
excl = []
|
||||
for vp2 in self.asrv.vfs.all_vols.keys():
|
||||
if vp2.startswith((vtop + "/").lstrip("/")) and vtop != vp2:
|
||||
excl.append(vp2[len(vtop) :].lstrip("/"))
|
||||
dots = flags.get("dotsrch") and uname in vol.axs.udot
|
||||
zs = "srch_re_dots" if dots else "srch_re_nodot"
|
||||
rex: re.Pattern = flags.get(zs) # type: ignore
|
||||
|
||||
if self.args.srch_dbg:
|
||||
t = "searching in volume /%s (%s), excludelist %s"
|
||||
self.log(t % (vtop, ptop, excl), 5)
|
||||
if dbg:
|
||||
t = "searching in volume /%s (%s), excluding %s"
|
||||
self.log(t % (vtop, ptop, rex.pattern), 5)
|
||||
rex_cfg: Optional[re.Pattern] = flags.get("srch_excl")
|
||||
|
||||
self.active_cur = cur
|
||||
|
||||
@@ -356,7 +388,6 @@ class U2idx(object):
|
||||
|
||||
sret = []
|
||||
fk = flags.get("fk")
|
||||
dots = flags.get("dotsrch") and uname in vol.axs.udot
|
||||
fk_alg = 2 if "fka" in flags else 1
|
||||
c = cur.execute(uq, tuple(vuv))
|
||||
for hit in c:
|
||||
@@ -365,20 +396,23 @@ class U2idx(object):
|
||||
if rd.startswith("//") or fn.startswith("//"):
|
||||
rd, fn = s3dec(rd, fn)
|
||||
|
||||
if rd in excl or any([x for x in excl if rd.startswith(x + "/")]):
|
||||
if self.args.srch_dbg:
|
||||
zs = vjoin(vjoin(vtop, rd), fn)
|
||||
t = "database inconsistency in volume '/%s'; ignoring: %s"
|
||||
self.log(t % (vtop, zs), 1)
|
||||
vp = vjoin(vjoin(vtop, rd), fn)
|
||||
|
||||
if vp in seen_rps:
|
||||
continue
|
||||
|
||||
rp = quotep("/".join([x for x in [vtop, rd, fn] if x]))
|
||||
if not dots and "/." in ("/" + rp):
|
||||
continue
|
||||
|
||||
if rp in seen_rps:
|
||||
if rex.search(vp):
|
||||
if dbg:
|
||||
if rex_cfg and rex_cfg.search(vp): # type: ignore
|
||||
self.log("filtered by srch_excl: %s" % (vp,), 6)
|
||||
elif not dots and "/." in ("/" + vp):
|
||||
pass
|
||||
else:
|
||||
t = "database inconsistency in volume '/%s'; ignoring: %s"
|
||||
self.log(t % (vtop, vp), 1)
|
||||
continue
|
||||
|
||||
rp = quotep(vp)
|
||||
if not fk:
|
||||
suf = ""
|
||||
else:
|
||||
@@ -400,7 +434,7 @@ class U2idx(object):
|
||||
if lim < 0:
|
||||
break
|
||||
|
||||
if self.args.srch_dbg:
|
||||
if dbg:
|
||||
t = "in volume '/%s': hit: %s"
|
||||
self.log(t % (vtop, rp), 5)
|
||||
|
||||
@@ -430,14 +464,15 @@ class U2idx(object):
|
||||
ret.extend(sret)
|
||||
# print("[{}] {}".format(ptop, sret))
|
||||
|
||||
if self.args.srch_dbg:
|
||||
if dbg:
|
||||
t = "in volume '/%s': got %d hits, %d total so far"
|
||||
self.log(t % (vtop, len(sret), len(ret)), 5)
|
||||
|
||||
done_flag.append(True)
|
||||
self.active_id = ""
|
||||
|
||||
ret.sort(key=itemgetter("rp"))
|
||||
if sort:
|
||||
ret.sort(key=itemgetter("rp"))
|
||||
|
||||
return ret, list(taglist.keys()), lim < 0 and not clamped
|
||||
|
||||
@@ -448,5 +483,5 @@ class U2idx(object):
|
||||
return
|
||||
|
||||
if identifier == self.active_id:
|
||||
assert self.active_cur
|
||||
assert self.active_cur # !rm
|
||||
self.active_cur.connection.interrupt()
|
||||
|
||||
1773
copyparty/up2k.py
1773
copyparty/up2k.py
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -29,9 +29,10 @@ window.baguetteBox = (function () {
|
||||
isOverlayVisible = false,
|
||||
touch = {}, // start-pos
|
||||
touchFlag = false, // busy
|
||||
scrollCSS = ['', ''],
|
||||
scrollTimer = 0,
|
||||
re_i = /^[^?]+\.(a?png|avif|bmp|gif|heif|jpe?g|jfif|svg|webp)(\?|$)/i,
|
||||
re_v = /^[^?]+\.(webm|mkv|mp4)(\?|$)/i,
|
||||
re_v = /^[^?]+\.(webm|mkv|mp4|m4v|mov)(\?|$)/i,
|
||||
anims = ['slideIn', 'fadeIn', 'none'],
|
||||
data = {}, // all galleries
|
||||
imagesElements = [],
|
||||
@@ -567,6 +568,12 @@ window.baguetteBox = (function () {
|
||||
|
||||
function showOverlay(chosenImageIndex) {
|
||||
if (options.noScrollbars) {
|
||||
var a = document.documentElement.style.overflowY,
|
||||
b = document.body.style.overflowY;
|
||||
|
||||
if (a != 'hidden' || b != 'scroll')
|
||||
scrollCSS = [a, b];
|
||||
|
||||
document.documentElement.style.overflowY = 'hidden';
|
||||
document.body.style.overflowY = 'scroll';
|
||||
}
|
||||
@@ -615,8 +622,8 @@ window.baguetteBox = (function () {
|
||||
playvid(false);
|
||||
removeFromCache('#files');
|
||||
if (options.noScrollbars) {
|
||||
document.documentElement.style.overflowY = 'auto';
|
||||
document.body.style.overflowY = 'auto';
|
||||
document.documentElement.style.overflowY = scrollCSS[0];
|
||||
document.body.style.overflowY = scrollCSS[1];
|
||||
}
|
||||
|
||||
try {
|
||||
@@ -1060,7 +1067,7 @@ window.baguetteBox = (function () {
|
||||
return showNextImage();
|
||||
|
||||
show_buttons('t');
|
||||
|
||||
|
||||
if (Date.now() - ctime <= 500 && !IPHONE)
|
||||
tglfull();
|
||||
|
||||
|
||||
@@ -10,7 +10,6 @@
|
||||
--fg2-max: #fff;
|
||||
--fg-weak: #bbb;
|
||||
|
||||
--bg-u7: #555;
|
||||
--bg-u6: #4c4c4c;
|
||||
--bg-u5: #444;
|
||||
--bg-u4: #383838;
|
||||
@@ -43,8 +42,14 @@
|
||||
--btn-h-bg: #805;
|
||||
--btn-1-fg: #400;
|
||||
--btn-1-bg: var(--a);
|
||||
--btn-h-bs: var(--btn-bs);
|
||||
--btn-h-bb: var(--btn-bb);
|
||||
--btn-1-bs: var(--btn-bs);
|
||||
--btn-1-bb: var(--btn-bb);
|
||||
--btn-1h-fg: var(--btn-1-fg);
|
||||
--btn-1h-bg: #fe8;
|
||||
--btn-1h-bs: var(--btn-1-bs);
|
||||
--btn-1h-bb: var(--btn-1-bb);
|
||||
--chk-fg: var(--tab-alt);
|
||||
--txt-sh: var(--bg-d2);
|
||||
--txt-bg: var(--btn-bg);
|
||||
@@ -59,7 +64,7 @@
|
||||
--u2-tab-bg: linear-gradient(to bottom, var(--bg), var(--bg-u1));
|
||||
--u2-tab-b1: rgba(128,128,128,0.8);
|
||||
--u2-tab-1-fg: #fd7;
|
||||
--u2-tab-1-bg: linear-gradient(to bottom, var(#353), var(--bg) 80%);
|
||||
--u2-tab-1-bg: linear-gradient(to bottom, #353, var(--bg) 80%);
|
||||
--u2-tab-1-b1: #7c5;
|
||||
--u2-tab-1-b2: #583;
|
||||
--u2-tab-1-sh: #280;
|
||||
@@ -183,7 +188,6 @@ html.y {
|
||||
--srv-1: #555;
|
||||
--srv-2: #c83;
|
||||
--srv-3: #c0a;
|
||||
--srv-3b: rgba(255,68,204,0.6);
|
||||
|
||||
--tree-bg: #fff;
|
||||
|
||||
@@ -212,22 +216,19 @@ html.y {
|
||||
html.a {
|
||||
--op-aa-sh: 0 0 .2em var(--bg-d3) inset;
|
||||
|
||||
--u2-o-bg: #603;
|
||||
--u2-o-b1: #a16;
|
||||
--u2-o-sh: #a00;
|
||||
--u2-o-h-bg: var(--u2-o-bg);
|
||||
--u2-o-h-b1: #fb0;
|
||||
--u2-o-h-sh: #fb0;
|
||||
--u2-o-1-bg: #6a1;
|
||||
--u2-o-1-b1: #efa;
|
||||
--u2-o-1-sh: #0c0;
|
||||
--u2-o-1h-bg: var(--u2-o-1-bg);
|
||||
--btn-bs: 0 0 .2em var(--bg-d3);
|
||||
}
|
||||
html.az {
|
||||
--btn-1-bs: 0 0 .1em var(--fg) inset;
|
||||
}
|
||||
html.ay {
|
||||
--op-aa-sh: 0 .1em .2em #ccc;
|
||||
--op-aa-bg: var(--bg-max);
|
||||
}
|
||||
html.b {
|
||||
--btn-bs: 0 .05em 0 var(--bg-d3) inset;
|
||||
--btn-1-bs: 0 .05em 0 var(--btn-1h-bg) inset;
|
||||
|
||||
--tree-bg: var(--bg);
|
||||
|
||||
--g-bg: var(--bg);
|
||||
@@ -244,17 +245,13 @@ html.b {
|
||||
--u2-b1-bg: rgba(128,128,128,0.15);
|
||||
--u2-b2-bg: var(--u2-b1-bg);
|
||||
|
||||
--u2-o-bg: var(--btn-bg);
|
||||
--u2-o-h-bg: var(--btn-h-bg);
|
||||
--u2-o-1-bg: var(--a);
|
||||
--u2-o-1h-bg: var(--a-hil);
|
||||
|
||||
--f-sh1: 0.1;
|
||||
--mp-b-bg: transparent;
|
||||
}
|
||||
html.bz {
|
||||
--fg: #cce;
|
||||
--fg-weak: #bbd;
|
||||
|
||||
--bg-u5: #3b3f58;
|
||||
--bg-u4: #1e2130;
|
||||
--bg-u3: #1e2130;
|
||||
@@ -266,12 +263,14 @@ html.bz {
|
||||
|
||||
--row-alt: #181a27;
|
||||
|
||||
--a-b: #fb4;
|
||||
|
||||
--btn-bg: #202231;
|
||||
--btn-h-bg: #2d2f45;
|
||||
--btn-1-bg: #ba2959;
|
||||
--btn-1-is: #f59;
|
||||
--btn-1-fg: #fff;
|
||||
--btn-1-bg: #eb6;
|
||||
--btn-1-fg: #000;
|
||||
--btn-1h-fg: #000;
|
||||
--btn-1h-bg: #ff9;
|
||||
--txt-sh: a;
|
||||
|
||||
--u2-tab-b1: var(--bg-u5);
|
||||
@@ -286,6 +285,7 @@ html.bz {
|
||||
--f-h-b1: #34384e;
|
||||
--mp-sh: #11121d;
|
||||
/*--mp-b-bg: #2c3044;*/
|
||||
--f-play-bg: var(--btn-1-bg);
|
||||
}
|
||||
html.by {
|
||||
--bg: #f2f2f2;
|
||||
@@ -306,6 +306,7 @@ html.by {
|
||||
}
|
||||
html.c {
|
||||
font-weight: bold;
|
||||
|
||||
--fg: #fff;
|
||||
--fg-weak: #cef;
|
||||
--bg-u5: #409;
|
||||
@@ -326,17 +327,25 @@ html.c {
|
||||
--chk-fg: #d90;
|
||||
|
||||
--op-aa-bg: #f9dd22;
|
||||
--u2-o-1-bg: #4cf;
|
||||
|
||||
--srv-1: #ea0;
|
||||
--mp-b-bg: transparent;
|
||||
}
|
||||
html.cz {
|
||||
--bgg: var(--bg-u2);
|
||||
|
||||
--sel-bg: var(--bg-u5);
|
||||
--sel-fg: var(--fg);
|
||||
|
||||
--btn-bb: .2em solid #709;
|
||||
--btn-bs: 0 .1em .6em rgba(255,0,185,0.5);
|
||||
--btn-1-bb: .2em solid #e90;
|
||||
--btn-1-bs: 0 .1em .8em rgba(255,205,0,0.9);
|
||||
|
||||
--srv-3: #fff;
|
||||
|
||||
--u2-tab-b1: var(--bg-d3);
|
||||
--u2-tab-1-bg: a;
|
||||
}
|
||||
html.cy {
|
||||
--fg: #fff;
|
||||
@@ -363,24 +372,25 @@ html.cy {
|
||||
--btn-h-fg: #fff;
|
||||
--btn-1-bg: #ff0;
|
||||
--btn-1-fg: #000;
|
||||
--btn-bs: 0 .25em 0 #f00;
|
||||
--chk-fg: #fd0;
|
||||
|
||||
--txt-bg: #000;
|
||||
--srv-1: #f00;
|
||||
--srv-3: #fff;
|
||||
--op-aa-bg: #fff;
|
||||
|
||||
--u2-b1-bg: #f00;
|
||||
--u2-b2-bg: #f00;
|
||||
--u2-o-bg: #ff0;
|
||||
--u2-o-1-bg: #f00;
|
||||
|
||||
--g-sel-fg: #fff;
|
||||
--g-sel-bg: #aaa;
|
||||
--g-fsel-bg: #aaa;
|
||||
}
|
||||
html.dz {
|
||||
--fg: #4d4;
|
||||
--fg-max: #fff;
|
||||
--fg2-max: #fff;
|
||||
--fg-weak: #2a2;
|
||||
|
||||
--bg-u7: #020;
|
||||
--bg-u6: #020;
|
||||
--bg-u5: #050;
|
||||
--bg-u4: #020;
|
||||
@@ -388,11 +398,9 @@ html.dz {
|
||||
--bg-u2: #020;
|
||||
--bg-u1: #020;
|
||||
--bg: #010;
|
||||
--bgg: var(--bg);
|
||||
--bg-d1: #000;
|
||||
--bg-d2: #020;
|
||||
--bg-d3: #000;
|
||||
--bg-max: #000;
|
||||
|
||||
--tab-alt: #6f6;
|
||||
--row-alt: #030;
|
||||
@@ -405,48 +413,21 @@ html.dz {
|
||||
--a-dark: #afa;
|
||||
--a-gray: #2a2;
|
||||
|
||||
--btn-fg: var(--a);
|
||||
--btn-bg: rgba(64,128,64,0.15);
|
||||
--btn-h-fg: var(--a-hil);
|
||||
--btn-h-bg: #050;
|
||||
--btn-1-fg: #000;
|
||||
--btn-1-bg: #4f4;
|
||||
--btn-1h-fg: var(--btn-1-fg);
|
||||
--btn-1h-bg: #3f3;
|
||||
--chk-fg: var(--tab-alt);
|
||||
--txt-sh: var(--bg-d2);
|
||||
--txt-bg: var(--btn-bg);
|
||||
|
||||
--op-aa-fg: var(--a);
|
||||
--op-aa-bg: var(--bg-d2);
|
||||
--op-a-sh: rgba(0,0,0,0.5);
|
||||
--btn-bs: 0 0 0 .1em #080 inset;
|
||||
--btn-1-bs: a;
|
||||
|
||||
--u2-btn-b1: var(--fg-weak);
|
||||
--u2-sbtn-b1: var(--fg-weak);
|
||||
--u2-txt-bg: var(--bg-u5);
|
||||
--u2-tab-bg: linear-gradient(to bottom, var(--bg), var(--bg-u1));
|
||||
--u2-tab-b1: var(--fg-weak);
|
||||
--u2-tab-1-fg: #fff;
|
||||
--u2-tab-1-bg: linear-gradient(to bottom, var(#353), var(--bg) 80%);
|
||||
--u2-tab-1-b1: #7c5;
|
||||
--u2-tab-1-b2: #583;
|
||||
--u2-tab-1-sh: #280;
|
||||
--u2-b-fg: #fff;
|
||||
--u2-tab-1-bg: linear-gradient(to bottom, #151, var(--bg) 80%);
|
||||
--u2-b1-bg: #3a3;
|
||||
--u2-b2-bg: #3a3;
|
||||
--u2-o-bg: var(--btn-bg);
|
||||
--u2-o-b1: var(--bg-u5);
|
||||
--u2-o-h-bg: var(--fg-weak);
|
||||
--u2-o-1-bg: var(--fg-weak);
|
||||
--u2-o-1-b1: var(--a);
|
||||
--u2-o-1h-bg: var(--a);
|
||||
--u2-inf-bg: #07a;
|
||||
--u2-inf-b1: #0be;
|
||||
--u2-ok-bg: #380;
|
||||
--u2-ok-b1: #8e4;
|
||||
--u2-err-bg: #900;
|
||||
--u2-err-b1: #d06;
|
||||
--ud-b1: #888;
|
||||
|
||||
--sort-1: #fff;
|
||||
--sort-2: #3f3;
|
||||
@@ -458,47 +439,12 @@ html.dz {
|
||||
|
||||
--tree-bg: #010;
|
||||
|
||||
--g-play-bg: #750;
|
||||
--g-play-b1: #c90;
|
||||
--g-play-b2: #da4;
|
||||
--g-play-sh: #b83;
|
||||
|
||||
--g-sel-fg: #fff;
|
||||
--g-sel-bg: #925;
|
||||
--g-sel-b1: #c37;
|
||||
--g-sel-sh: #b36;
|
||||
--g-fsel-bg: #d39;
|
||||
--g-fsel-b1: #d48;
|
||||
--g-fsel-ts: #804;
|
||||
--g-fg: var(--a-hil);
|
||||
--g-bg: var(--bg-u2);
|
||||
--g-b1: var(--bg-u4);
|
||||
--g-b2: var(--bg-u5);
|
||||
--g-g1: var(--bg-u2);
|
||||
--g-g2: var(--bg-u5);
|
||||
--g-f-bg: var(--bg-u4);
|
||||
--g-f-b1: var(--bg-u5);
|
||||
--g-f-fg: var(--a-hil);
|
||||
--g-sh: rgba(0,0,0,0.3);
|
||||
|
||||
--f-sh1: 0.33;
|
||||
--f-sh2: 0.02;
|
||||
--f-sh3: 0.2;
|
||||
--f-h-b1: #3b3;
|
||||
|
||||
--f-play-bg: #fc5;
|
||||
--f-play-fg: #000;
|
||||
--f-sel-sh: #fc0;
|
||||
--f-gray: #999;
|
||||
|
||||
--fm-off: #f6c;
|
||||
--mp-sh: var(--bg-d3);
|
||||
|
||||
--err-fg: #fff;
|
||||
--err-bg: #a20;
|
||||
--err-b1: #f00;
|
||||
--err-ts: #500;
|
||||
|
||||
text-shadow: none;
|
||||
font-family: 'scp', monospace, monospace;
|
||||
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||
@@ -551,10 +497,6 @@ html.dy {
|
||||
--u2-tab-1-bg: a;
|
||||
--u2-b1-bg: #000;
|
||||
--u2-b2-bg: #000;
|
||||
--u2-o-h-bg: #999;
|
||||
--u2-o-1h-bg: #999;
|
||||
--u2-o-bg: #eee;
|
||||
--u2-o-1-bg: #000;
|
||||
|
||||
--ud-b1: a;
|
||||
|
||||
@@ -599,7 +541,7 @@ html.dy {
|
||||
background: var(--sel-bg);
|
||||
text-shadow: none;
|
||||
}
|
||||
html,body,tr,th,td,#files,a {
|
||||
html,body,tr,th,td,#files,a,#blogout {
|
||||
color: inherit;
|
||||
background: none;
|
||||
font-weight: inherit;
|
||||
@@ -627,6 +569,7 @@ pre, code, tt, #doc, #doc>code {
|
||||
overflow: hidden;
|
||||
width: 0;
|
||||
height: 0;
|
||||
color: var(--bg);
|
||||
}
|
||||
html .ayjump:focus {
|
||||
z-index: 80386;
|
||||
@@ -681,11 +624,15 @@ html.y #path {
|
||||
#files tbody div a {
|
||||
color: var(--tab-alt);
|
||||
}
|
||||
a, #files tbody div a:last-child {
|
||||
a, #blogout, #files tbody div a:last-child {
|
||||
color: var(--a);
|
||||
padding: .2em;
|
||||
text-decoration: none;
|
||||
}
|
||||
#blogout {
|
||||
margin: -.2em;
|
||||
}
|
||||
#blogout:hover,
|
||||
a:hover {
|
||||
color: var(--a-hil);
|
||||
background: var(--a-h-bg);
|
||||
@@ -886,7 +833,7 @@ html.y #path a:hover {
|
||||
max-width: 52em;
|
||||
}
|
||||
.mdo.sb,
|
||||
#epi.logue.mdo>iframe {
|
||||
.logue.mdo>iframe {
|
||||
max-width: 54em;
|
||||
}
|
||||
.mdo,
|
||||
@@ -929,6 +876,9 @@ html.y #path a:hover {
|
||||
color: var(--srv-3);
|
||||
border-bottom: 1px solid var(--srv-3b);
|
||||
}
|
||||
#flogout {
|
||||
display: inline;
|
||||
}
|
||||
#goh+span {
|
||||
color: var(--bg-u5);
|
||||
padding-left: .5em;
|
||||
@@ -963,6 +913,8 @@ html.y #path a:hover {
|
||||
#files tbody tr.play a:hover {
|
||||
color: var(--btn-1h-fg);
|
||||
background: var(--btn-1h-bg);
|
||||
box-shadow: var(--btn-1h-bs);
|
||||
border-bottom: var(--btn-1h-bb);
|
||||
}
|
||||
#ggrid {
|
||||
margin: -.2em -.5em;
|
||||
@@ -971,6 +923,7 @@ html.y #path a:hover {
|
||||
overflow: hidden;
|
||||
display: block;
|
||||
display: -webkit-box;
|
||||
line-clamp: var(--grid-ln);
|
||||
-webkit-line-clamp: var(--grid-ln);
|
||||
-webkit-box-orient: vertical;
|
||||
padding-top: .3em;
|
||||
@@ -1017,9 +970,6 @@ html.y #path a:hover {
|
||||
color: var(--g-dfg);
|
||||
}
|
||||
#ggrid>a.au:before {
|
||||
content: '💾';
|
||||
}
|
||||
html.np_open #ggrid>a.au:before {
|
||||
content: '▶';
|
||||
}
|
||||
#ggrid>a:before {
|
||||
@@ -1148,6 +1098,7 @@ html.y #widget.open {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
}
|
||||
#fshr,
|
||||
#wtgrid,
|
||||
#wtico {
|
||||
position: relative;
|
||||
@@ -1334,6 +1285,7 @@ html.y #widget.open {
|
||||
#widget.cmp #wtoggle {
|
||||
font-size: 1.2em;
|
||||
}
|
||||
#widget.cmp #fshr,
|
||||
#widget.cmp #wtgrid {
|
||||
display: none;
|
||||
}
|
||||
@@ -1347,6 +1299,7 @@ html.y #widget.open {
|
||||
}
|
||||
#widget.cmp #barpos,
|
||||
#widget.cmp #barbuf {
|
||||
height: 1.6em;
|
||||
width: calc(100% - 11em);
|
||||
border-radius: 0;
|
||||
left: 5em;
|
||||
@@ -1434,7 +1387,11 @@ input[type="checkbox"]+label {
|
||||
input[type="radio"]:checked+label,
|
||||
input[type="checkbox"]:checked+label {
|
||||
color: #0e0;
|
||||
color: var(--a);
|
||||
color: var(--btn-1-bg);
|
||||
}
|
||||
input[type="checkbox"]:checked+label {
|
||||
box-shadow: var(--btn-1-bs);
|
||||
border-bottom: var(--btn-1-bb);
|
||||
}
|
||||
html.dz input {
|
||||
font-family: 'scp', monospace, monospace;
|
||||
@@ -1612,6 +1569,8 @@ html {
|
||||
color: var(--btn-fg);
|
||||
background: #eee;
|
||||
background: var(--btn-bg);
|
||||
box-shadow: var(--btn-bs);
|
||||
border-bottom: var(--btn-bb);
|
||||
border-radius: .3em;
|
||||
padding: .2em .4em;
|
||||
font-size: 1.2em;
|
||||
@@ -1625,20 +1584,14 @@ html.c .btn,
|
||||
html.a .btn {
|
||||
border-radius: .2em;
|
||||
}
|
||||
html.cz .btn {
|
||||
box-shadow: 0 .1em .6em rgba(255,0,185,0.5);
|
||||
border-bottom: .2em solid #709;
|
||||
}
|
||||
html.dz .btn {
|
||||
font-size: 1em;
|
||||
box-shadow: 0 0 0 .1em #080 inset;
|
||||
}
|
||||
html.dz .tgl.btn.on {
|
||||
box-shadow: 0 0 0 .1em var(--btn-1-bg) inset;
|
||||
}
|
||||
.btn:hover {
|
||||
color: var(--btn-h-fg);
|
||||
background: var(--btn-h-bg);
|
||||
box-shadow: var(--btn-h-bs);
|
||||
border-bottom: var(--btn-h-bb);
|
||||
}
|
||||
.tgl.btn.on {
|
||||
background: #000;
|
||||
@@ -1646,14 +1599,14 @@ html.dz .tgl.btn.on {
|
||||
color: #fff;
|
||||
color: var(--btn-1-fg);
|
||||
text-shadow: none;
|
||||
}
|
||||
html.cz .tgl.btn.on {
|
||||
box-shadow: 0 .1em .8em rgba(255,205,0,0.9);
|
||||
border-bottom: .2em solid #e90;
|
||||
box-shadow: var(--btn-1-bs);
|
||||
border-bottom: var(--btn-1-bb);
|
||||
}
|
||||
.tgl.btn.on:hover {
|
||||
background: var(--btn-1h-bg);
|
||||
color: var(--btn-1h-fg);
|
||||
background: var(--btn-1h-bg);
|
||||
box-shadow: var(--btn-1h-bs);
|
||||
border-bottom: var(--btn-1h-bb);
|
||||
}
|
||||
#detree {
|
||||
padding: .3em .5em;
|
||||
@@ -1694,6 +1647,18 @@ html.cz .tgl.btn.on {
|
||||
background: var(--btn-1-bg);
|
||||
text-shadow: none;
|
||||
}
|
||||
#tree ul a.ld::before {
|
||||
font-weight: bold;
|
||||
font-family: sans-serif;
|
||||
display: inline-block;
|
||||
text-align: center;
|
||||
width: 1em;
|
||||
margin: 0 .3em 0 -1.3em;
|
||||
color: var(--fg-max);
|
||||
opacity: 0;
|
||||
content: '◠';
|
||||
animation: .5s linear infinite forwards spin, ease .25s 1 forwards fadein;
|
||||
}
|
||||
#tree ul a.par {
|
||||
color: var(--fg-max);
|
||||
}
|
||||
@@ -1858,6 +1823,7 @@ html.y #tree.nowrap .ntree a+a:hover {
|
||||
#unpost td:nth-child(4) {
|
||||
text-align: right;
|
||||
}
|
||||
#shui,
|
||||
#rui {
|
||||
background: #fff;
|
||||
background: var(--bg);
|
||||
@@ -1873,13 +1839,25 @@ html.y #tree.nowrap .ntree a+a:hover {
|
||||
padding: 1em;
|
||||
z-index: 765;
|
||||
}
|
||||
#shui div+div,
|
||||
#rui div+div {
|
||||
margin-top: 1em;
|
||||
}
|
||||
#shui table,
|
||||
#rui table {
|
||||
width: 100%;
|
||||
border-collapse: collapse;
|
||||
}
|
||||
#shui button {
|
||||
margin: 0 1em 0 0;
|
||||
}
|
||||
#shui .btn {
|
||||
font-size: 1em;
|
||||
}
|
||||
#shui td {
|
||||
padding: .8em 0;
|
||||
}
|
||||
#shui td+td,
|
||||
#rui td+td {
|
||||
padding: .2em 0 .2em .5em;
|
||||
}
|
||||
@@ -1887,21 +1865,25 @@ html.y #tree.nowrap .ntree a+a:hover {
|
||||
font-family: 'scp', monospace, monospace;
|
||||
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||
}
|
||||
#shui td+td,
|
||||
#rui td+td,
|
||||
#shui td input[type="text"],
|
||||
#rui td input[type="text"] {
|
||||
width: 100%;
|
||||
}
|
||||
#shui td.exs input[type="text"] {
|
||||
width: 3em;
|
||||
}
|
||||
#rn_f.m td:first-child {
|
||||
white-space: nowrap;
|
||||
}
|
||||
#rn_f.m td+td {
|
||||
width: 50%;
|
||||
}
|
||||
#rn_f .err td {
|
||||
background: var(--err-bg);
|
||||
color: var(--fg-max);
|
||||
}
|
||||
#rn_f .err input[readonly] {
|
||||
#rn_f .err td,
|
||||
#rn_f .err input[readonly],
|
||||
#rui .ng input[readonly] {
|
||||
color: var(--err-fg);
|
||||
background: var(--err-bg);
|
||||
}
|
||||
#rui input[readonly] {
|
||||
@@ -2685,23 +2667,25 @@ html.b #u2conf a.b:hover {
|
||||
#u2conf input[type="checkbox"]:checked+label {
|
||||
position: relative;
|
||||
cursor: pointer;
|
||||
background: var(--u2-o-bg);
|
||||
border-bottom: .2em solid var(--u2-o-b1);
|
||||
box-shadow: 0 .1em .3em var(--u2-o-sh) inset;
|
||||
background: var(--btn-bg);
|
||||
box-shadow: var(--btn-bs);
|
||||
border-bottom: var(--btn-bb);
|
||||
text-shadow: 1px 1px 1px #000, 1px -1px 1px #000, -1px -1px 1px #000, -1px 1px 1px #000;
|
||||
}
|
||||
#u2conf input[type="checkbox"]:checked+label {
|
||||
background: var(--u2-o-1-bg);
|
||||
border-bottom: .2em solid var(--u2-o-1-b1);
|
||||
box-shadow: 0 .1em .5em var(--u2-o-1-sh);
|
||||
background: var(--btn-1-bg);
|
||||
box-shadow: var(--btn-1-bs);
|
||||
border-bottom: var(--btn-1-bb);
|
||||
}
|
||||
#u2conf input[type="checkbox"]+label:hover {
|
||||
box-shadow: 0 .1em .3em var(--u2-o-h-sh);
|
||||
border-color: var(--u2-o-h-b1);
|
||||
background: var(--u2-o-h-bg);
|
||||
background: var(--btn-h-bg);
|
||||
box-shadow: var(--btn-h-bs);
|
||||
border-bottom: var(--btn-h-bb);
|
||||
}
|
||||
#u2conf input[type="checkbox"]:checked+label:hover {
|
||||
background: var(--u2-o-1h-bg);
|
||||
background: var(--btn-1h-bg);
|
||||
box-shadow: var(--btn-1h-bs);
|
||||
border-bottom: var(--btn-1h-bb);
|
||||
}
|
||||
#op_up2k.srch #u2conf td:nth-child(2)>*,
|
||||
#op_up2k.srch #u2conf td:nth-child(3)>* {
|
||||
@@ -2801,6 +2785,7 @@ html.b #u2conf a.b:hover {
|
||||
padding-left: .2em;
|
||||
}
|
||||
.fsearch_explain {
|
||||
color: var(--a-dark);
|
||||
padding-left: .7em;
|
||||
font-size: 1.1em;
|
||||
line-height: 0;
|
||||
@@ -3061,14 +3046,6 @@ html.b #ggrid>a {
|
||||
html.b .btn {
|
||||
top: -.1em;
|
||||
}
|
||||
html.b .btn,
|
||||
html.b #u2conf a.b,
|
||||
html.b #u2conf input[type="checkbox"]:not(:checked)+label {
|
||||
box-shadow: 0 .05em 0 var(--bg-d3) inset;
|
||||
}
|
||||
html.b .tgl.btn.on {
|
||||
box-shadow: 0 .05em 0 var(--btn-1-is) inset;
|
||||
}
|
||||
html.b #op_up2k.srch sup {
|
||||
color: #fc0;
|
||||
}
|
||||
@@ -3098,18 +3075,30 @@ html.by #u2cards a.act {
|
||||
|
||||
|
||||
|
||||
html.cy #wrap {
|
||||
color: #000;
|
||||
}
|
||||
html.cy .mdo a {
|
||||
background: #f00;
|
||||
}
|
||||
html.cy #wrap,
|
||||
html.cy #acc_info a,
|
||||
html.cy #op_up2k,
|
||||
html.cy #files,
|
||||
html.cy #files a,
|
||||
html.cy #files tbody div a:last-child {
|
||||
color: #000;
|
||||
}
|
||||
html.cy #u2tab a,
|
||||
html.cy #u2cards a {
|
||||
color: #f00;
|
||||
}
|
||||
html.cy #unpost a {
|
||||
color: #ff0;
|
||||
}
|
||||
html.cy #barbuf {
|
||||
filter: hue-rotate(267deg) brightness(0.8) contrast(4);
|
||||
}
|
||||
html.cy #pvol {
|
||||
filter: hue-rotate(4deg) contrast(2.2);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -67,14 +67,14 @@
|
||||
<div id="op_up2k" class="opview"></div>
|
||||
|
||||
<div id="op_cfg" class="opview opbox opwide"></div>
|
||||
|
||||
|
||||
<h1 id="path">
|
||||
<a href="#" id="entree">🌲</a>
|
||||
{%- for n in vpnodes %}
|
||||
<a href="{{ r }}/{{ n[0] }}">{{ n[1] }}</a>
|
||||
{%- endfor %}
|
||||
</h1>
|
||||
|
||||
|
||||
<div id="tree"></div>
|
||||
|
||||
<div id="wrap">
|
||||
@@ -108,21 +108,18 @@
|
||||
|
||||
{%- for f in files %}
|
||||
<tr><td>{{ f.lead }}</td><td><a href="{{ f.href }}">{{ f.name|e }}</a></td><td>{{ f.sz }}</td>
|
||||
{%- if f.tags is defined %}
|
||||
{%- for k in taglist %}
|
||||
<td>{{ f.tags[k] }}</td>
|
||||
{%- endfor %}
|
||||
{%- endif %}
|
||||
<td>{{ f.ext }}</td><td>{{ f.dt }}</td></tr>
|
||||
{%- if f.tags is defined %}
|
||||
{%- for k in taglist %}<td>{{ f.tags[k] }}</td>{%- endfor %}
|
||||
{%- endif %}<td>{{ f.ext }}</td><td>{{ f.dt }}</td></tr>
|
||||
{%- endfor %}
|
||||
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
|
||||
<div id="epi" class="logue">{{ "" if sb_lg else logues[1] }}</div>
|
||||
|
||||
<h2 id="wfp"><a href="{{ r }}/?h" id="goh">control-panel</a></h2>
|
||||
|
||||
|
||||
<a href="#" id="repl">π</a>
|
||||
|
||||
</div>
|
||||
@@ -134,17 +131,16 @@
|
||||
<div id="widget"></div>
|
||||
|
||||
<script>
|
||||
var SR = {{ r|tojson }},
|
||||
var SR = "{{ r }}",
|
||||
CGV1 = {{ cgv1 }},
|
||||
CGV = {{ cgv|tojson }},
|
||||
TS = "{{ ts }}",
|
||||
dtheme = "{{ dtheme }}",
|
||||
srvinf = "{{ srv_info }}",
|
||||
s_name = "{{ s_name }}",
|
||||
lang = "{{ lang }}",
|
||||
dfavico = "{{ favico }}",
|
||||
have_tags_idx = {{ have_tags_idx|tojson }},
|
||||
have_tags_idx = {{ have_tags_idx }},
|
||||
sb_lg = "{{ sb_lg }}",
|
||||
txt_ext = "{{ txt_ext }}",
|
||||
logues = {{ logues|tojson if sb_lg else "[]" }},
|
||||
ls0 = {{ ls0|tojson }};
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -11,7 +11,6 @@
|
||||
td{border:1px solid #999;border-width:1px 1px 0 0;padding:0 5px}
|
||||
a{display:block}
|
||||
</style>
|
||||
{{ html_head }}
|
||||
</head>
|
||||
|
||||
<body>
|
||||
@@ -52,11 +51,11 @@
|
||||
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
|
||||
{%- if logues[1] %}
|
||||
<div>{{ logues[1] }}</div><br />
|
||||
{%- endif %}
|
||||
|
||||
|
||||
<h2><a href="{{ r }}/{{ url_suf }}{{ url_suf and '&' or '?' }}h">control-panel</a></h2>
|
||||
|
||||
</body>
|
||||
|
||||
BIN
copyparty/web/iiam.gif
Normal file
BIN
copyparty/web/iiam.gif
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 230 B |
@@ -49,7 +49,7 @@
|
||||
<div id="mp" class="mdo"></div>
|
||||
</div>
|
||||
<a href="#" id="repl">π</a>
|
||||
|
||||
|
||||
{%- if edit %}
|
||||
<div id="helpbox">
|
||||
<textarea autocomplete="off">
|
||||
@@ -125,12 +125,12 @@ write markdown (most html is 🙆 too)
|
||||
</textarea>
|
||||
</div>
|
||||
{%- endif %}
|
||||
|
||||
|
||||
<script>
|
||||
|
||||
var SR = {{ r|tojson }},
|
||||
var SR = "{{ r }}",
|
||||
last_modified = {{ lastmod }},
|
||||
have_emp = {{ have_emp|tojson }},
|
||||
have_emp = {{ "true" if have_emp else "false" }},
|
||||
dfavico = "{{ favico }}";
|
||||
|
||||
var md_opt = {
|
||||
@@ -159,5 +159,8 @@ try { l.light = drk? 0:1; } catch (ex) { }
|
||||
{%- if edit %}
|
||||
<script src="{{ r }}/.cpr/md2.js?_={{ ts }}"></script>
|
||||
{%- endif %}
|
||||
{%- if js %}
|
||||
<script src="{{ js }}_={{ ts }}"></script>
|
||||
{%- endif %}
|
||||
</body></html>
|
||||
|
||||
|
||||
@@ -17,14 +17,13 @@ var chromedbg = function () { console.log(arguments); }
|
||||
var dbg = function () { };
|
||||
|
||||
// replace dbg with the real deal here or in the console:
|
||||
// dbg = chromedbg
|
||||
// dbg = console.log
|
||||
// dbg = chromedbg;
|
||||
// dbg = console.log;
|
||||
|
||||
|
||||
// dodge browser issues
|
||||
(function () {
|
||||
var ua = navigator.userAgent;
|
||||
if (ua.indexOf(') Gecko/') !== -1 && /Linux| Mac /.exec(ua)) {
|
||||
if (UA.indexOf(') Gecko/') !== -1 && /Linux| Mac /.exec(UA)) {
|
||||
// necessary on ff-68.7 at least
|
||||
var s = mknod('style');
|
||||
s.innerHTML = '@page { margin: .5in .6in .8in .6in; }';
|
||||
|
||||
@@ -450,7 +450,7 @@ function savechk_cb() {
|
||||
|
||||
// firefox bug: initial selection offset isn't cleared properly through js
|
||||
var ff_clearsel = (function () {
|
||||
if (navigator.userAgent.indexOf(') Gecko/') === -1)
|
||||
if (UA.indexOf(') Gecko/') === -1)
|
||||
return function () { }
|
||||
|
||||
return function () {
|
||||
|
||||
@@ -26,9 +26,9 @@
|
||||
<a href="#" id="repl">π</a>
|
||||
<script>
|
||||
|
||||
var SR = {{ r|tojson }},
|
||||
var SR = "{{ r }}",
|
||||
last_modified = {{ lastmod }},
|
||||
have_emp = {{ have_emp|tojson }},
|
||||
have_emp = {{ "true" if have_emp else "false" }},
|
||||
dfavico = "{{ favico }}";
|
||||
|
||||
var md_opt = {
|
||||
@@ -53,5 +53,8 @@ try { l.light = drk? 0:1; } catch (ex) { }
|
||||
<script src="{{ r }}/.cpr/deps/marked.js?_={{ ts }}"></script>
|
||||
<script src="{{ r }}/.cpr/deps/easymde.js?_={{ ts }}"></script>
|
||||
<script src="{{ r }}/.cpr/mde.js?_={{ ts }}"></script>
|
||||
{%- if js %}
|
||||
<script src="{{ js }}_={{ ts }}"></script>
|
||||
{%- endif %}
|
||||
</body></html>
|
||||
|
||||
|
||||
@@ -46,6 +46,9 @@
|
||||
}, 1000);
|
||||
</script>
|
||||
{%- endif %}
|
||||
{%- if js %}
|
||||
<script src="{{ js }}_={{ ts }}"></script>
|
||||
{%- endif %}
|
||||
</body>
|
||||
|
||||
</html>
|
||||
|
||||
107
copyparty/web/rups.css
Normal file
107
copyparty/web/rups.css
Normal file
@@ -0,0 +1,107 @@
|
||||
html {
|
||||
color: #333;
|
||||
background: #f7f7f7;
|
||||
font-family: sans-serif;
|
||||
font-family: var(--font-main), sans-serif;
|
||||
touch-action: manipulation;
|
||||
}
|
||||
#wrap {
|
||||
margin: 2em auto;
|
||||
padding: 0 1em 3em 1em;
|
||||
line-height: 2.3em;
|
||||
}
|
||||
a {
|
||||
color: #047;
|
||||
background: #fff;
|
||||
text-decoration: none;
|
||||
border-bottom: 1px solid #8ab;
|
||||
border-radius: .2em;
|
||||
padding: .2em .6em;
|
||||
margin: 0 .3em;
|
||||
}
|
||||
#wrap td a {
|
||||
margin: 0;
|
||||
line-height: 1em;
|
||||
display: inline-block;
|
||||
white-space: initial;
|
||||
font-family: var(--font-main), sans-serif;
|
||||
}
|
||||
#repl {
|
||||
border: none;
|
||||
background: none;
|
||||
color: inherit;
|
||||
padding: 0;
|
||||
position: fixed;
|
||||
bottom: .25em;
|
||||
left: .2em;
|
||||
}
|
||||
#wrap table {
|
||||
border-collapse: collapse;
|
||||
position: relative;
|
||||
margin-top: 2em;
|
||||
}
|
||||
#wrap th {
|
||||
top: -1px;
|
||||
position: sticky;
|
||||
background: #f7f7f7;
|
||||
}
|
||||
#wrap td {
|
||||
font-family: var(--font-mono), monospace, monospace;
|
||||
white-space: pre; /*date*/
|
||||
overflow: hidden; /*ipv6*/
|
||||
}
|
||||
#wrap th:first-child,
|
||||
#wrap td:first-child {
|
||||
text-align: right;
|
||||
}
|
||||
#wrap td,
|
||||
#wrap th {
|
||||
text-align: left;
|
||||
padding: .3em .6em;
|
||||
max-width: 30vw;
|
||||
}
|
||||
#wrap tr:hover td {
|
||||
background: #ddd;
|
||||
box-shadow: 0 -1px 0 rgba(128, 128, 128, 0.5) inset;
|
||||
}
|
||||
#wrap th:first-child,
|
||||
#wrap td:first-child {
|
||||
border-radius: .5em 0 0 .5em;
|
||||
}
|
||||
#wrap th:last-child,
|
||||
#wrap td:last-child {
|
||||
border-radius: 0 .5em .5em 0;
|
||||
}
|
||||
|
||||
|
||||
|
||||
html.z {
|
||||
background: #222;
|
||||
color: #ccc;
|
||||
}
|
||||
html.bz {
|
||||
background: #11121d;
|
||||
color: #bbd;
|
||||
}
|
||||
html.z a {
|
||||
color: #fff;
|
||||
background: #057;
|
||||
border-color: #37a;
|
||||
}
|
||||
html.z input[type=text] {
|
||||
color: #ddd;
|
||||
background: #223;
|
||||
border: none;
|
||||
border-bottom: 1px solid #fc5;
|
||||
border-radius: .2em;
|
||||
padding: .2em .3em;
|
||||
}
|
||||
html.z #wrap th {
|
||||
background: #222;
|
||||
}
|
||||
html.bz #wrap th {
|
||||
background: #223;
|
||||
}
|
||||
html.z #wrap tr:hover td {
|
||||
background: #000;
|
||||
}
|
||||
50
copyparty/web/rups.html
Normal file
50
copyparty/web/rups.html
Normal file
@@ -0,0 +1,50 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>{{ s_doctitle }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
<meta name="robots" content="noindex, nofollow">
|
||||
<meta name="theme-color" content="#{{ tcolor }}">
|
||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/rups.css?_={{ ts }}">
|
||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||
{{ html_head }}
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<div id="wrap">
|
||||
<a href="#" id="re">refresh</a>
|
||||
<a href="{{ r }}/?h">control-panel</a>
|
||||
Filter: <input type="text" id="filter" size="20" placeholder="documents/passwords" />
|
||||
<span id="hits"></span>
|
||||
<table id="tab"><thead><tr>
|
||||
<th>size</th>
|
||||
<th>who</th>
|
||||
<th>when</th>
|
||||
<th>age</th>
|
||||
<th>dir</th>
|
||||
<th>file</th>
|
||||
</tr></thead><tbody id="tb"></tbody></table>
|
||||
</div>
|
||||
<a href="#" id="repl">π</a>
|
||||
<script>
|
||||
|
||||
var SR="{{ r }}",
|
||||
lang="{{ lang }}",
|
||||
dfavico="{{ favico }}";
|
||||
|
||||
var STG = window.localStorage;
|
||||
document.documentElement.className = (STG && STG.cpp_thm) || "{{ this.args.theme }}";
|
||||
|
||||
</script>
|
||||
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
||||
<script>var V={{ v }};</script>
|
||||
<script src="{{ r }}/.cpr/rups.js?_={{ ts }}"></script>
|
||||
{%- if js %}
|
||||
<script src="{{ js }}_={{ ts }}"></script>
|
||||
{%- endif %}
|
||||
</body>
|
||||
</html>
|
||||
|
||||
66
copyparty/web/rups.js
Normal file
66
copyparty/web/rups.js
Normal file
@@ -0,0 +1,66 @@
|
||||
function render() {
|
||||
var ups = V.ups, now = V.now, html = [];
|
||||
ebi('filter').value = V.filter;
|
||||
ebi('hits').innerHTML = 'showing ' + ups.length + ' files';
|
||||
|
||||
for (var a = 0; a < ups.length; a++) {
|
||||
var f = ups[a],
|
||||
vsp = vsplit(f.vp.split('?')[0]),
|
||||
dn = esc(uricom_dec(vsp[0])),
|
||||
fn = esc(uricom_dec(vsp[1])),
|
||||
at = f.at,
|
||||
td = now - f.at,
|
||||
ts = !at ? '(?)' : unix2iso(at),
|
||||
sa = !at ? '(?)' : td > 60 ? shumantime(td) : (td + 's'),
|
||||
sz = ('' + f.sz).replace(/\B(?=(\d{3})+(?!\d))/g, " ");
|
||||
|
||||
html.push('<tr><td>' + sz +
|
||||
'</td><td>' + f.ip +
|
||||
'</td><td>' + ts +
|
||||
'</td><td>' + sa +
|
||||
'</td><td><a href="' + vsp[0] + '">' + dn +
|
||||
'</a></td><td><a href="' + f.vp + '">' + fn +
|
||||
'</a></td></tr>');
|
||||
}
|
||||
if (!ups.length) {
|
||||
var t = V.filter ? ' matching the filter' : '';
|
||||
html = ['<tr><td colspan="6">there are no uploads' + t + '</td></tr>'];
|
||||
}
|
||||
ebi('tb').innerHTML = html.join('');
|
||||
}
|
||||
render();
|
||||
|
||||
var ti;
|
||||
function ask(e) {
|
||||
ev(e);
|
||||
clearTimeout(ti);
|
||||
ebi('hits').innerHTML = 'Loading...';
|
||||
|
||||
var xhr = new XHR(),
|
||||
filter = unsmart(ebi('filter').value);
|
||||
|
||||
hist_replace(get_evpath().split('?')[0] + '?ru&filter=' + uricom_enc(filter));
|
||||
|
||||
xhr.onload = xhr.onerror = function () {
|
||||
try {
|
||||
V = JSON.parse(this.responseText)
|
||||
}
|
||||
catch (ex) {
|
||||
ebi('tb').innerHTML = '<tr><td colspan="6">failed to decode server response as json: <pre>' + esc(this.responseText) + '</pre></td></tr>';
|
||||
return;
|
||||
}
|
||||
render();
|
||||
};
|
||||
xhr.open('GET', SR + '/?ru&j&filter=' + uricom_enc(filter), true);
|
||||
xhr.send();
|
||||
}
|
||||
ebi('re').onclick = ask;
|
||||
ebi('filter').oninput = function () {
|
||||
clearTimeout(ti);
|
||||
ti = setTimeout(ask, 500);
|
||||
ebi('hits').innerHTML = '...';
|
||||
};
|
||||
ebi('filter').onkeydown = function (e) {
|
||||
if (('' + e.key).endsWith('Enter'))
|
||||
ask();
|
||||
};
|
||||
95
copyparty/web/shares.css
Normal file
95
copyparty/web/shares.css
Normal file
@@ -0,0 +1,95 @@
|
||||
html {
|
||||
color: #333;
|
||||
background: #f7f7f7;
|
||||
font-family: sans-serif;
|
||||
font-family: var(--font-main), sans-serif;
|
||||
touch-action: manipulation;
|
||||
}
|
||||
#wrap {
|
||||
margin: 2em auto;
|
||||
padding: 0 1em 3em 1em;
|
||||
line-height: 2.3em;
|
||||
}
|
||||
#wrap>span {
|
||||
margin: 0 0 0 1em;
|
||||
border-bottom: 1px solid #999;
|
||||
}
|
||||
li {
|
||||
margin: 1em 0;
|
||||
}
|
||||
a {
|
||||
color: #047;
|
||||
background: #fff;
|
||||
text-decoration: none;
|
||||
white-space: nowrap;
|
||||
border-bottom: 1px solid #8ab;
|
||||
border-radius: .2em;
|
||||
padding: .2em .6em;
|
||||
margin: 0 .3em;
|
||||
}
|
||||
#wrap td a {
|
||||
margin: 0;
|
||||
}
|
||||
#w {
|
||||
color: #fff;
|
||||
background: #940;
|
||||
border-color: #b70;
|
||||
}
|
||||
#repl {
|
||||
border: none;
|
||||
background: none;
|
||||
color: inherit;
|
||||
padding: 0;
|
||||
position: fixed;
|
||||
bottom: .25em;
|
||||
left: .2em;
|
||||
}
|
||||
#wrap table {
|
||||
border-collapse: collapse;
|
||||
position: relative;
|
||||
margin-top: 2em;
|
||||
}
|
||||
th {
|
||||
top: -1px;
|
||||
position: sticky;
|
||||
background: #f7f7f7;
|
||||
}
|
||||
#wrap td,
|
||||
#wrap th {
|
||||
padding: .3em .6em;
|
||||
text-align: left;
|
||||
white-space: nowrap;
|
||||
}
|
||||
#wrap td+td+td+td+td+td+td+td {
|
||||
font-family: var(--font-mono), monospace, monospace;
|
||||
}
|
||||
#wrap th:first-child,
|
||||
#wrap td:first-child {
|
||||
border-radius: .5em 0 0 .5em;
|
||||
}
|
||||
#wrap th:last-child,
|
||||
#wrap td:last-child {
|
||||
border-radius: 0 .5em .5em 0;
|
||||
}
|
||||
|
||||
|
||||
|
||||
html.z {
|
||||
background: #222;
|
||||
color: #ccc;
|
||||
}
|
||||
html.z a {
|
||||
color: #fff;
|
||||
background: #057;
|
||||
border-color: #37a;
|
||||
}
|
||||
html.z th {
|
||||
background: #222;
|
||||
}
|
||||
html.bz {
|
||||
color: #bbd;
|
||||
background: #11121d;
|
||||
}
|
||||
html.bz th {
|
||||
background: #223;
|
||||
}
|
||||
82
copyparty/web/shares.html
Normal file
82
copyparty/web/shares.html
Normal file
@@ -0,0 +1,82 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>{{ s_doctitle }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
<meta name="robots" content="noindex, nofollow">
|
||||
<meta name="theme-color" content="#{{ tcolor }}">
|
||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/shares.css?_={{ ts }}">
|
||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||
{{ html_head }}
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<div id="wrap">
|
||||
<a href="{{ r }}/?shares">refresh</a>
|
||||
<a href="{{ r }}/?h">control-panel</a>
|
||||
|
||||
<span>axs = perms (read,write,move,delet)</span>
|
||||
<span>nf = numFiles (0=dir)</span>
|
||||
<span>min/hrs = time left</span>
|
||||
|
||||
<table id="tab"><thead><tr>
|
||||
<th>sharekey</th>
|
||||
<th>delete</th>
|
||||
<th>pw</th>
|
||||
<th>source</th>
|
||||
<th>axs</th>
|
||||
<th>nf</th>
|
||||
<th>user</th>
|
||||
<th>created</th>
|
||||
<th>expires</th>
|
||||
<th>min</th>
|
||||
<th>hrs</th>
|
||||
<th>add time</th>
|
||||
</tr></thead><tbody>
|
||||
{% for k, pw, vp, pr, st, un, t0, t1 in rows %}
|
||||
<tr>
|
||||
<td>
|
||||
<a href="{{ r }}{{ shr }}{{ k }}?qr">qr</a>
|
||||
<a href="{{ r }}{{ shr }}{{ k }}">{{ k }}</a>
|
||||
</td>
|
||||
<td><a href="#" k="{{ k }}">delete</a></td>
|
||||
<td>{{ "yes" if pw else "--" }}</td>
|
||||
<td><a href="{{ r }}/{{ vp|e }}">/{{ vp|e }}</a></td>
|
||||
<td>{{ pr }}</td>
|
||||
<td>{{ st }}</td>
|
||||
<td>{{ un|e }}</td>
|
||||
<td>{{ t0 }}</td>
|
||||
<td>{{ t1 }}</td>
|
||||
<td>{{ "inf" if not t1 else "dead" if t1 < now else ((t1 - now) / 60) | round(1) }}</td>
|
||||
<td>{{ "inf" if not t1 else "dead" if t1 < now else ((t1 - now) / 3600) | round(1) }}</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody></table>
|
||||
{% if not rows %}
|
||||
(you don't have any active shares btw)
|
||||
{% endif %}
|
||||
</div>
|
||||
<a href="#" id="repl">π</a>
|
||||
<script>
|
||||
|
||||
var SR="{{ r }}",
|
||||
shr="{{ shr }}",
|
||||
lang="{{ lang }}",
|
||||
dfavico="{{ favico }}";
|
||||
|
||||
var STG = window.localStorage;
|
||||
document.documentElement.className = (STG && STG.cpp_thm) || "{{ this.args.theme }}";
|
||||
|
||||
</script>
|
||||
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
||||
<script src="{{ r }}/.cpr/shares.js?_={{ ts }}"></script>
|
||||
{%- if js %}
|
||||
<script src="{{ js }}_={{ ts }}"></script>
|
||||
{%- endif %}
|
||||
</body>
|
||||
</html>
|
||||
|
||||
78
copyparty/web/shares.js
Normal file
78
copyparty/web/shares.js
Normal file
@@ -0,0 +1,78 @@
|
||||
var t = QSA('a[k]');
|
||||
for (var a = 0; a < t.length; a++)
|
||||
t[a].onclick = rm;
|
||||
|
||||
function rm() {
|
||||
var u = SR + shr + uricom_enc(this.getAttribute('k')) + '?eshare=rm',
|
||||
xhr = new XHR();
|
||||
|
||||
xhr.open('POST', u, true);
|
||||
xhr.onload = xhr.onerror = cb;
|
||||
xhr.send();
|
||||
}
|
||||
|
||||
function bump() {
|
||||
var k = this.closest('tr').getElementsByTagName('a')[2].getAttribute('k'),
|
||||
u = SR + shr + uricom_enc(k) + '?eshare=' + this.value,
|
||||
xhr = new XHR();
|
||||
|
||||
xhr.open('POST', u, true);
|
||||
xhr.onload = xhr.onerror = cb;
|
||||
xhr.send();
|
||||
}
|
||||
|
||||
function cb() {
|
||||
if (this.status !== 200)
|
||||
return modal.alert('<h6>server error</h6>' + esc(unpre(this.responseText)));
|
||||
|
||||
document.location = '?shares';
|
||||
}
|
||||
|
||||
function qr(e) {
|
||||
ev(e);
|
||||
var href = this.href,
|
||||
pw = this.closest('tr').cells[2].textContent;
|
||||
|
||||
if (pw.indexOf('yes') < 0)
|
||||
return showqr(href);
|
||||
|
||||
modal.prompt("if you want to bypass the password protection by\nembedding the password into the qr-code, then\ntype the password now, otherwise leave this empty", "", function (v) {
|
||||
if (v)
|
||||
href += "&pw=" + v;
|
||||
showqr(href);
|
||||
});
|
||||
}
|
||||
|
||||
function showqr(href) {
|
||||
var vhref = href.replace('?qr&', '?').replace('?qr', '');
|
||||
modal.alert(esc(vhref) + '<img class="b64" width="100" height="100" src="' + href + '" />');
|
||||
}
|
||||
|
||||
(function() {
|
||||
var tab = ebi('tab').tBodies[0],
|
||||
tr = Array.prototype.slice.call(tab.rows, 0);
|
||||
|
||||
var buf = [];
|
||||
for (var a = 0; a < tr.length; a++) {
|
||||
tr[a].cells[0].getElementsByTagName('a')[0].onclick = qr;
|
||||
for (var b = 7; b < 9; b++)
|
||||
buf.push(parseInt(tr[a].cells[b].innerHTML));
|
||||
}
|
||||
|
||||
var ibuf = 0;
|
||||
for (var a = 0; a < tr.length; a++)
|
||||
for (var b = 7; b < 9; b++) {
|
||||
var v = buf[ibuf++];
|
||||
tr[a].cells[b].innerHTML =
|
||||
v ? unix2iso(v).replace(' ', ', ') : 'never';
|
||||
}
|
||||
|
||||
for (var a = 0; a < tr.length; a++)
|
||||
tr[a].cells[11].innerHTML =
|
||||
'<button value="1">1min</button> ' +
|
||||
'<button value="60">1h</button>';
|
||||
|
||||
var btns = QSA('td button'), aa = btns.length;
|
||||
for (var a = 0; a < aa; a++)
|
||||
btns[a].onclick = bump;
|
||||
})();
|
||||
@@ -53,7 +53,7 @@ a.r {
|
||||
border-color: #c7a;
|
||||
}
|
||||
a.g {
|
||||
color: #2b0;
|
||||
color: #0a0;
|
||||
border-color: #3a0;
|
||||
box-shadow: 0 .3em 1em #4c0;
|
||||
}
|
||||
@@ -90,6 +90,13 @@ table {
|
||||
text-align: left;
|
||||
white-space: nowrap;
|
||||
}
|
||||
.vols td:empty,
|
||||
.vols th:empty {
|
||||
padding: 0;
|
||||
}
|
||||
.vols img {
|
||||
margin: -4px 0;
|
||||
}
|
||||
.num {
|
||||
border-right: 1px solid #bbb;
|
||||
}
|
||||
@@ -152,11 +159,13 @@ pre b,
|
||||
code b {
|
||||
color: #000;
|
||||
font-weight: normal;
|
||||
text-shadow: 0 0 .2em #0f0;
|
||||
text-shadow: 0 0 .2em #3f3;
|
||||
border-bottom: 1px solid #090;
|
||||
}
|
||||
html.z pre b,
|
||||
html.z code b {
|
||||
color: #fff;
|
||||
border-bottom: 1px solid #9f9;
|
||||
}
|
||||
|
||||
|
||||
@@ -182,13 +191,18 @@ html.z a.g {
|
||||
border-color: #af4;
|
||||
box-shadow: 0 .3em 1em #7d0;
|
||||
}
|
||||
form {
|
||||
line-height: 2.5em;
|
||||
}
|
||||
#x,
|
||||
input {
|
||||
color: #a50;
|
||||
background: #fff;
|
||||
border: 1px solid #a50;
|
||||
border-radius: .5em;
|
||||
padding: .5em .7em;
|
||||
margin: 0 .5em 0 0;
|
||||
border-radius: .3em;
|
||||
padding: .25em .6em;
|
||||
margin: 0 .3em 0 0;
|
||||
font-size: 1em;
|
||||
}
|
||||
input::placeholder {
|
||||
font-size: 1.2em;
|
||||
@@ -197,6 +211,7 @@ input::placeholder {
|
||||
opacity: 0.64;
|
||||
color: #930;
|
||||
}
|
||||
#x,
|
||||
html.z input {
|
||||
color: #fff;
|
||||
background: #626;
|
||||
@@ -214,3 +229,6 @@ html.bz {
|
||||
color: #bbd;
|
||||
background: #11121d;
|
||||
}
|
||||
html.bz .vols img {
|
||||
filter: sepia(0.8) hue-rotate(180deg);
|
||||
}
|
||||
|
||||
@@ -14,6 +14,7 @@
|
||||
|
||||
<body>
|
||||
<div id="wrap">
|
||||
{%- if not in_shr %}
|
||||
<a id="a" href="{{ r }}/?h" class="af">refresh</a>
|
||||
<a id="v" href="{{ r }}/?hc" class="af">connect</a>
|
||||
|
||||
@@ -21,7 +22,8 @@
|
||||
<p id="b">howdy stranger <small>(you're not logged in)</small></p>
|
||||
{%- else %}
|
||||
<a id="c" href="{{ r }}/?pw=x" class="logout">logout</a>
|
||||
<p><span id="m">welcome back,</span> <strong>{{ this.uname }}</strong></p>
|
||||
<p><span id="m">welcome back,</span> <strong>{{ this.uname|e }}</strong></p>
|
||||
{%- endif %}
|
||||
{%- endif %}
|
||||
|
||||
{%- if msg %}
|
||||
@@ -30,6 +32,30 @@
|
||||
</div>
|
||||
{%- endif %}
|
||||
|
||||
{%- if ups %}
|
||||
<h1 id="aa">incoming files:</h1>
|
||||
<table class="vols">
|
||||
<thead><tr><th>%</th><th>speed</th><th>eta</th><th>idle</th><th>dir</th><th>file</th></tr></thead>
|
||||
<tbody>
|
||||
{% for u in ups %}
|
||||
<tr><td>{{ u[0] }}</td><td>{{ u[1] }}</td><td>{{ u[2] }}</td><td>{{ u[3] }}</td><td><a href="{{ u[4] }}">{{ u[5]|e }}</a></td><td>{{ u[6]|e }}</td></tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
{%- endif %}
|
||||
|
||||
{%- if dls %}
|
||||
<h1 id="ae">active downloads:</h1>
|
||||
<table class="vols">
|
||||
<thead><tr><th>%</th><th>sent</th><th>speed</th><th>eta</th><th>idle</th><th></th><th>dir</th><th>file</th></tr></thead>
|
||||
<tbody>
|
||||
{% for u in dls %}
|
||||
<tr><td>{{ u[0] }}</td><td>{{ u[1] }}</td><td>{{ u[2] }}</td><td>{{ u[3] }}</td><td>{{ u[4] }}</td><td>{{ u[5] }}</td><td><a href="{{ u[6] }}">{{ u[7]|e }}</a></td><td>{{ u[8] }}</td></tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
{%- endif %}
|
||||
|
||||
{%- if avol %}
|
||||
<h1>admin panel:</h1>
|
||||
<table><tr><td> <!-- hehehe -->
|
||||
@@ -76,32 +102,65 @@
|
||||
</ul>
|
||||
{%- endif %}
|
||||
|
||||
<h1 id="cc">client config:</h1>
|
||||
<ul>
|
||||
{% if k304 or k304vis %}
|
||||
{% if k304 %}
|
||||
<li><a id="h" href="{{ r }}/?k304=n">disable k304</a> (currently enabled)
|
||||
{%- else %}
|
||||
<li><a id="i" href="{{ r }}/?k304=y" class="r">enable k304</a> (currently disabled)
|
||||
{% endif %}
|
||||
<blockquote id="j">enabling this will disconnect your client on every HTTP 304, which can prevent some buggy proxies from getting stuck (suddenly not loading pages), <em>but</em> it will also make things slower in general</blockquote></li>
|
||||
{% endif %}
|
||||
|
||||
<li><a id="k" href="{{ r }}/?reset" class="r" onclick="localStorage.clear();return true">reset client settings</a></li>
|
||||
</ul>
|
||||
|
||||
<h1 id="l">login for more:</h1>
|
||||
{%- if in_shr %}
|
||||
<h1 id="z">unlock this share:</h1>
|
||||
<div>
|
||||
<form method="post" enctype="multipart/form-data" action="{{ r }}/{{ qvpath }}">
|
||||
<input type="hidden" name="act" value="login" />
|
||||
<input type="password" name="cppwd" placeholder=" password" />
|
||||
<form id="lf" method="post" enctype="multipart/form-data" action="{{ r }}/{{ qvpath }}">
|
||||
<input type="hidden" id="la" name="act" value="login" />
|
||||
<input type="password" id="lp" name="cppwd" placeholder=" password" />
|
||||
<input type="hidden" name="uhash" id="uhash" value="x" />
|
||||
<input type="submit" value="Login" />
|
||||
<input type="submit" id="ls" value="Unlock" />
|
||||
{% if ahttps %}
|
||||
<a id="w" href="{{ ahttps }}">switch to https</a>
|
||||
{% endif %}
|
||||
</form>
|
||||
</div>
|
||||
{%- else %}
|
||||
<h1 id="l">login for more:</h1>
|
||||
<div>
|
||||
<form id="lf" method="post" enctype="multipart/form-data" action="{{ r }}/{{ qvpath }}">
|
||||
<input type="hidden" id="la" name="act" value="login" />
|
||||
<input type="password" id="lp" name="cppwd" placeholder=" password" />
|
||||
<input type="hidden" name="uhash" id="uhash" value="x" />
|
||||
<input type="submit" id="ls" value="Login" />
|
||||
{% if chpw %}
|
||||
<a id="x" href="#">change password</a>
|
||||
{% endif %}
|
||||
{% if ahttps %}
|
||||
<a id="w" href="{{ ahttps }}">switch to https</a>
|
||||
{% endif %}
|
||||
</form>
|
||||
</div>
|
||||
{%- endif %}
|
||||
|
||||
<h1 id="cc">other stuff:</h1>
|
||||
<ul>
|
||||
{%- if this.uname != '*' and this.args.shr %}
|
||||
<li><a id="y" href="{{ r }}/?shares">edit shares</a></li>
|
||||
{% endif %}
|
||||
|
||||
{% if k304 or k304vis %}
|
||||
{% if k304 %}
|
||||
<li><a id="h" href="{{ r }}/?cc&setck=k304=n">disable k304</a> (currently enabled)
|
||||
{%- else %}
|
||||
<li><a id="i" href="{{ r }}/?cc&setck=k304=y" class="r">enable k304</a> (currently disabled)
|
||||
{% endif %}
|
||||
<blockquote id="j">enabling k304 will disconnect your client on every HTTP 304, which can prevent some buggy proxies from getting stuck (suddenly not loading pages), <em>but</em> it will also make things slower in general</blockquote></li>
|
||||
{% endif %}
|
||||
|
||||
{% if no304 or no304vis %}
|
||||
{% if no304 %}
|
||||
<li><a id="ab" href="{{ r }}/?cc&setck=no304=n">disable no304</a> (currently enabled)
|
||||
{%- else %}
|
||||
<li><a id="ac" href="{{ r }}/?cc&setck=no304=y" class="r">enable no304</a> (currently disabled)
|
||||
{% endif %}
|
||||
<blockquote id="ad">enabling no304 will disable all caching; try this if k304 wasn't enough. This will waste a huge amount of network traffic!</blockquote></li>
|
||||
{% endif %}
|
||||
|
||||
<li><a id="af" href="{{ r }}/?ru">show recent uploads</a></li>
|
||||
<li><a id="k" href="{{ r }}/?reset" class="r" onclick="localStorage.clear();return true">reset client settings</a></li>
|
||||
</ul>
|
||||
|
||||
</div>
|
||||
<a href="#" id="repl">π</a>
|
||||
{%- if not this.args.nb %}
|
||||
@@ -109,7 +168,7 @@
|
||||
{%- endif %}
|
||||
<script>
|
||||
|
||||
var SR = {{ r|tojson }},
|
||||
var SR="{{ r }}",
|
||||
lang="{{ lang }}",
|
||||
dfavico="{{ favico }}";
|
||||
|
||||
@@ -119,6 +178,9 @@ document.documentElement.className = (STG && STG.cpp_thm) || "{{ this.args.theme
|
||||
</script>
|
||||
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
||||
<script src="{{ r }}/.cpr/splash.js?_={{ ts }}"></script>
|
||||
{%- if js %}
|
||||
<script src="{{ js }}_={{ ts }}"></script>
|
||||
{%- endif %}
|
||||
</body>
|
||||
</html>
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ var Ls = {
|
||||
"e2": "leser inn konfigurasjonsfiler på nytt$N(kontoer, volumer, volumbrytere)$Nog kartlegger alle e2ds-volumer$N$Nmerk: endringer i globale parametere$Nkrever en full restart for å ta gjenge",
|
||||
"f1": "du kan betrakte:",
|
||||
"g1": "du kan laste opp til:",
|
||||
"cc1": "klient-konfigurasjon",
|
||||
"cc1": "brytere og sånt:",
|
||||
"h1": "skru av k304",
|
||||
"i1": "skru på k304",
|
||||
"j1": "k304 bryter tilkoplingen for hver HTTP 304. Dette hjelper mot visse mellomtjenere som kan sette seg fast / plutselig slutter å laste sider, men det reduserer også ytelsen betydelig",
|
||||
@@ -17,31 +17,87 @@ var Ls = {
|
||||
"l1": "logg inn:",
|
||||
"m1": "velkommen tilbake,",
|
||||
"n1": "404: filen finnes ikke ┐( ´ -`)┌",
|
||||
"o1": 'eller kanskje du ikke har tilgang? prøv å logge inn eller <a href="' + SR + '/?h">gå hjem</a>',
|
||||
"o1": 'eller kanskje du ikke har tilgang? prøv et passord eller <a href="' + SR + '/?h">gå hjem</a>',
|
||||
"p1": "403: tilgang nektet ~┻━┻",
|
||||
"q1": 'du må logge inn eller <a href="' + SR + '/?h">gå hjem</a>',
|
||||
"q1": 'prøv et passord eller <a href="' + SR + '/?h">gå hjem</a>',
|
||||
"r1": "gå hjem",
|
||||
".s1": "kartlegg",
|
||||
"t1": "handling",
|
||||
"u2": "tid siden noen sist skrev til serveren$N( opplastning / navneendring / ... )$N$N17d = 17 dager$N1h23 = 1 time 23 minutter$N4m56 = 4 minuter 56 sekunder",
|
||||
"v1": "koble til",
|
||||
"v2": "bruk denne serveren som en lokal harddisk$N$NADVARSEL: kommer til å vise passordet ditt!",
|
||||
"v2": "bruk denne serveren som en lokal harddisk",
|
||||
"w1": "bytt til https",
|
||||
"x1": "bytt passord",
|
||||
"y1": "dine delinger",
|
||||
"z1": "lås opp område:",
|
||||
"ta1": "du må skrive et nytt passord først",
|
||||
"ta2": "gjenta for å bekrefte nytt passord:",
|
||||
"ta3": "fant en skrivefeil; vennligst prøv igjen",
|
||||
"aa1": "innkommende:",
|
||||
"ab1": "skru av no304",
|
||||
"ac1": "skru på no304",
|
||||
"ad1": "no304 stopper all bruk av cache. Hvis ikke k304 var nok, prøv denne. Vil mangedoble dataforbruk!",
|
||||
"ae1": "utgående:",
|
||||
"af1": "vis nylig opplastede filer",
|
||||
},
|
||||
"eng": {
|
||||
"d2": "shows the state of all active threads",
|
||||
"e2": "reload config files (accounts/volumes/volflags),$Nand rescan all e2ds volumes$N$Nnote: any changes to global settings$Nrequire a full restart to take effect",
|
||||
"u2": "time since the last server write$N( upload / rename / ... )$N$N17d = 17 days$N1h23 = 1 hour 23 minutes$N4m56 = 4 minutes 56 seconds",
|
||||
"v2": "use this server as a local HDD$N$NWARNING: this will show your password!",
|
||||
"v2": "use this server as a local HDD",
|
||||
"ta1": "fill in your new password first",
|
||||
"ta2": "repeat to confirm new password:",
|
||||
"ta3": "found a typo; please try again",
|
||||
},
|
||||
|
||||
"chi": {
|
||||
"a1": "更新",
|
||||
"b1": "你好 <small>(你尚未登录)</small>",
|
||||
"c1": "登出",
|
||||
"d1": "状态",
|
||||
"d2": "显示所有活动线程的状态",
|
||||
"e1": "重新加载配置",
|
||||
"e2": "重新加载配置文件(账户/卷/卷标),$N并重新扫描所有 e2ds 卷$N$N注意:任何全局设置的更改$N都需要完全重启才能生效",
|
||||
"f1": "你可以查看:",
|
||||
"g1": "你可以上传到:",
|
||||
"cc1": "开关等",
|
||||
"h1": "关闭 k304",
|
||||
"i1": "开启 k304",
|
||||
"j1": "k304 会在每个 HTTP 304 时断开连接。这有助于避免某些代理服务器卡住或突然停止加载页面,但也会显著降低性能。",
|
||||
"k1": "重置设置",
|
||||
"l1": "登录:",
|
||||
"m1": "欢迎回来,",
|
||||
"n1": "404: 文件不存在 ┐( ´ -`)┌",
|
||||
"o1": '或者你可能没有权限?尝试输入密码或 <a href="' + SR + '/?h">回家</a>',
|
||||
"p1": "403: 访问被拒绝 ~┻━┻",
|
||||
"q1": '尝试输入密码或 <a href="' + SR + '/?h">回家</a>',
|
||||
"r1": "回家",
|
||||
".s1": "映射",
|
||||
"t1": "操作",
|
||||
"u2": "自上次服务器写入的时间$N( 上传 / 重命名 / ... )$N$N17d = 17 天$N1h23 = 1 小时 23 分钟$N4m56 = 4 分钟 56 秒",
|
||||
"v1": "连接",
|
||||
"v2": "将此服务器用作本地硬盘",
|
||||
"w1": "切换到 https",
|
||||
"x1": "更改密码",
|
||||
"y1": "你的分享",
|
||||
"z1": "解锁区域",
|
||||
"ta1": "请先输入新密码",
|
||||
"ta2": "重复以确认新密码:",
|
||||
"ta3": "发现拼写错误;请重试",
|
||||
"aa1": "正在接收的文件:", //m
|
||||
"ab1": "关闭 k304",
|
||||
"ac1": "开启 k304",
|
||||
"ad1": "启用 no304 将禁用所有缓存;如果 k304 不够,可以尝试此选项。这将消耗大量的网络流量!", //m
|
||||
"ae1": "正在下载:", //m
|
||||
"af1": "显示最近上传的文件", //m
|
||||
}
|
||||
};
|
||||
|
||||
var LANGS = ["eng", "nor"];
|
||||
|
||||
if (window.langmod)
|
||||
langmod();
|
||||
|
||||
var d = Ls[sread("cpp_lang", LANGS) || lang] || Ls.eng || Ls.nor;
|
||||
var d = Ls[sread("cpp_lang", Object.keys(Ls)) || lang] ||
|
||||
Ls.eng || Ls.nor || Ls.chi;
|
||||
|
||||
for (var k in (d || {})) {
|
||||
var f = k.slice(-1),
|
||||
@@ -74,3 +130,42 @@ if (o && /[0-9]+$/.exec(o.innerHTML))
|
||||
o.innerHTML = shumantime(o.innerHTML);
|
||||
|
||||
ebi('uhash').value = '' + location.hash;
|
||||
|
||||
(function() {
|
||||
if (!ebi('x'))
|
||||
return;
|
||||
|
||||
var pwi = ebi('lp');
|
||||
|
||||
function redo(msg) {
|
||||
modal.alert(msg, function() {
|
||||
pwi.value = '';
|
||||
pwi.focus();
|
||||
});
|
||||
}
|
||||
function mok(v) {
|
||||
if (v !== pwi.value)
|
||||
return redo(d.ta3);
|
||||
|
||||
pwi.setAttribute('name', 'pw');
|
||||
ebi('la').value = 'chpw';
|
||||
ebi('lf').submit();
|
||||
}
|
||||
function stars() {
|
||||
var m = ebi('modali');
|
||||
function enstars(n) {
|
||||
setTimeout(function() { m.value = ''; }, n);
|
||||
}
|
||||
m.setAttribute('type', 'password');
|
||||
enstars(17);
|
||||
enstars(32);
|
||||
enstars(69);
|
||||
}
|
||||
ebi('x').onclick = function (e) {
|
||||
ev(e);
|
||||
if (!pwi.value)
|
||||
return redo(d.ta1);
|
||||
|
||||
modal.prompt(d.ta2, "y", mok, null, stars);
|
||||
};
|
||||
})();
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
<meta name="theme-color" content="#{{ tcolor }}">
|
||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/splash.css?_={{ ts }}">
|
||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||
<style>ul{padding-left:1.3em}li{margin:.4em 0}</style>
|
||||
<style>ul{padding-left:1.3em}li{margin:.4em 0}.txa{float:right;margin:0 0 0 1em}</style>
|
||||
{{ html_head }}
|
||||
</head>
|
||||
|
||||
@@ -31,15 +31,22 @@
|
||||
<br />
|
||||
<span class="os win lin mac">placeholders:</span>
|
||||
<span class="os win">
|
||||
{% if accs %}<code><b>{{ pw }}</b></code>=password, {% endif %}<code><b>W:</b></code>=mountpoint
|
||||
{% if accs %}<code><b id="pw0">{{ pw }}</b></code>=password, {% endif %}<code><b>W:</b></code>=mountpoint
|
||||
</span>
|
||||
<span class="os lin mac">
|
||||
{% if accs %}<code><b>{{ pw }}</b></code>=password, {% endif %}<code><b>mp</b></code>=mountpoint
|
||||
{% if accs %}<code><b id="pw0">{{ pw }}</b></code>=password, {% endif %}<code><b>mp</b></code>=mountpoint
|
||||
</span>
|
||||
<a href="#" id="setpw">use real password</a>
|
||||
</p>
|
||||
|
||||
|
||||
|
||||
{% if args.idp_h_usr %}
|
||||
<p style="line-height:2em"><b>WARNING:</b> this server is using IdP-based authentication, so this stuff may not work as advertised. Depending on server config, these commands can probably only be used to access areas which don't require authentication, unless you auth using any non-IdP accounts defined in the copyparty config. Please see <a href="https://github.com/9001/copyparty/blob/hovudstraum/docs/idp.md#connecting-webdav-clients">the IdP docs</a></p>
|
||||
{% endif %}
|
||||
|
||||
|
||||
|
||||
{% if not args.no_dav %}
|
||||
<h1>WebDAV</h1>
|
||||
|
||||
@@ -53,10 +60,9 @@
|
||||
{% if s %}
|
||||
<li>running <code>rclone mount</code> on LAN (or just dont have valid certificates)? add <code>--no-check-certificate</code></li>
|
||||
{% endif %}
|
||||
<li>running <code>rclone mount</code> as root? add <code>--allow-other</code></li>
|
||||
<li>old version of rclone? replace all <code>=</code> with <code> </code> (space)</li>
|
||||
</ul>
|
||||
|
||||
|
||||
<p>if you want to use the native WebDAV client in windows instead (slow and buggy), first run <a href="{{ r }}/.cpr/a/webdav-cfg.bat">webdav-cfg.bat</a> to remove the 47 MiB filesize limit (also fixes latency and password login), then connect:</p>
|
||||
<pre>
|
||||
net use <b>w:</b> http{{ s }}://{{ ep }}/{{ rvp }}{% if accs %} k /user:<b>{{ pw }}</b>{% endif %}
|
||||
@@ -64,16 +70,7 @@
|
||||
</div>
|
||||
|
||||
<div class="os lin">
|
||||
<pre>
|
||||
yum install davfs2
|
||||
{% if accs %}printf '%s\n' <b>{{ pw }}</b> k | {% endif %}mount -t davfs -ouid=1000 http{{ s }}://{{ ep }}/{{ rvp }} <b>mp</b>
|
||||
</pre>
|
||||
<p>make it automount on boot:</p>
|
||||
<pre>
|
||||
printf '%s\n' "http{{ s }}://{{ ep }}/{{ rvp }} <b>{{ pw }}</b> k" >> /etc/davfs2/secrets
|
||||
printf '%s\n' "http{{ s }}://{{ ep }}/{{ rvp }} <b>mp</b> davfs rw,user,uid=1000,noauto 0 0" >> /etc/fstab
|
||||
</pre>
|
||||
<p>or you can use rclone instead, which is much slower but doesn't require root (plus it keeps lastmodified on upload):</p>
|
||||
<p>rclone (v1.63 or later) is recommended:</p>
|
||||
<pre>
|
||||
rclone config create {{ aname }}-dav webdav url=http{{ s }}://{{ rip }}{{ hport }} vendor=owncloud pacer_min_sleep=0.01ms{% if accs %} user=k pass=<b>{{ pw }}</b>{% endif %}
|
||||
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-dav:{{ rvp }} <b>mp</b>
|
||||
@@ -85,6 +82,16 @@
|
||||
<li>running <code>rclone mount</code> as root? add <code>--allow-other</code></li>
|
||||
<li>old version of rclone? replace all <code>=</code> with <code> </code> (space)</li>
|
||||
</ul>
|
||||
<p>alternatively use davfs2 (requires root, is slower, forgets lastmodified-timestamp on upload):</p>
|
||||
<pre>
|
||||
yum install davfs2
|
||||
{% if accs %}printf '%s\n' <b>{{ pw }}</b> k | {% endif %}mount -t davfs -ouid=1000 http{{ s }}://{{ ep }}/{{ rvp }} <b>mp</b>
|
||||
</pre>
|
||||
<p>make davfs2 automount on boot:</p>
|
||||
<pre>
|
||||
printf '%s\n' "http{{ s }}://{{ ep }}/{{ rvp }} <b>{{ pw }}</b> k" >> /etc/davfs2/secrets
|
||||
printf '%s\n' "http{{ s }}://{{ ep }}/{{ rvp }} <b>mp</b> davfs rw,user,uid=1000,noauto 0 0" >> /etc/fstab
|
||||
</pre>
|
||||
<p>or the emergency alternative (gnome/gui-only):</p>
|
||||
<!-- gnome-bug: ignores vp -->
|
||||
<pre>
|
||||
@@ -104,7 +111,7 @@
|
||||
<pre>
|
||||
http{{ s }}://k:<b>{{ pw }}</b>@{{ ep }}/{{ rvp }}
|
||||
</pre>
|
||||
|
||||
|
||||
{% if s %}
|
||||
<p><em>replace <code>https</code> with <code>http</code> if it doesn't work</em></p>
|
||||
{% endif %}
|
||||
@@ -136,7 +143,6 @@
|
||||
{% if args.ftps %}
|
||||
<li>running on LAN (or just dont have valid certificates)? add <code>no_check_certificate=true</code> to the config command</li>
|
||||
{% endif %}
|
||||
<li>running <code>rclone mount</code> as root? add <code>--allow-other</code></li>
|
||||
<li>old version of rclone? replace all <code>=</code> with <code> </code> (space)</li>
|
||||
</ul>
|
||||
<p>if you want to use the native FTP client in windows instead (please dont), press <code>win+R</code> and run this command:</p>
|
||||
@@ -191,6 +197,7 @@
|
||||
<h1>partyfuse</h1>
|
||||
<p>
|
||||
<a href="{{ r }}/.cpr/a/partyfuse.py">partyfuse.py</a> -- fast, read-only,
|
||||
needs <a href="{{ r }}/.cpr/deps/fuse.py">fuse.py</a> in the same folder,
|
||||
<span class="os win">needs <a href="https://winfsp.dev/rel/">winfsp</a></span>
|
||||
<span class="os lin">doesn't need root</span>
|
||||
</p>
|
||||
@@ -207,7 +214,6 @@
|
||||
|
||||
{% if args.smb %}
|
||||
<h1>SMB / CIFS</h1>
|
||||
<em><a href="https://github.com/SecureAuthCorp/impacket/issues/1433">bug:</a> max ~300 files in each folder</em>
|
||||
|
||||
<div class="os win">
|
||||
<pre>
|
||||
@@ -230,11 +236,65 @@
|
||||
|
||||
|
||||
|
||||
<div class="os win">
|
||||
<h1>ShareX</h1>
|
||||
|
||||
<p>to upload screenshots using ShareX <a href="https://github.com/ShareX/ShareX/releases/tag/v12.1.1">v12</a> or <a href="https://getsharex.com/">v15+</a>, save this as <code>copyparty.sxcu</code> and run it:</p>
|
||||
|
||||
<pre class="dl" name="copyparty.sxcu">
|
||||
{ "Name": "copyparty",
|
||||
"RequestURL": "http{{ s }}://{{ ep }}/{{ rvp }}",
|
||||
"Headers": {
|
||||
{% if accs %}"pw": "<b>{{ pw }}</b>",{% endif %}
|
||||
"accept": "url"
|
||||
},
|
||||
"DestinationType": "ImageUploader, TextUploader, FileUploader",
|
||||
"FileFormName": "f" }
|
||||
</pre>
|
||||
</div>
|
||||
|
||||
|
||||
|
||||
<div class="os mac">
|
||||
<h1>ishare</h1>
|
||||
|
||||
<p>to upload screenshots using <a href="https://isharemac.app/">ishare</a>, save this as <code>copyparty.iscu</code> and run it:</p>
|
||||
|
||||
<pre class="dl" name="copyparty.iscu">
|
||||
{ "Name": "copyparty",
|
||||
"RequestURL": "http{{ s }}://{{ ep }}/{{ rvp }}",
|
||||
"Headers": {
|
||||
{% if accs %}"pw": "<b>{{ pw }}</b>",{% endif %}
|
||||
"accept": "json"
|
||||
},
|
||||
"ResponseURL": "{{ '{{fileurl}}' }}",
|
||||
"FileFormName": "f" }
|
||||
</pre>
|
||||
</div>
|
||||
|
||||
|
||||
|
||||
<div class="os lin">
|
||||
<h1>flameshot</h1>
|
||||
|
||||
<p>to upload screenshots using <a href="https://flameshot.org/">flameshot</a>, save this as <code>flameshot.sh</code> and run it:</p>
|
||||
|
||||
<pre class="dl" name="flameshot.sh">
|
||||
#!/bin/bash
|
||||
pw="<b>{{ pw }}</b>"
|
||||
url="http{{ s }}://{{ ep }}/{{ rvp }}"
|
||||
filename="$(date +%Y-%m%d-%H%M%S).png"
|
||||
flameshot gui -s -r | curl -sT- "$url$filename?want=url&pw=$pw" | xsel -ib
|
||||
</pre>
|
||||
</div>
|
||||
|
||||
|
||||
|
||||
</div>
|
||||
<a href="#" id="repl">π</a>
|
||||
<script>
|
||||
|
||||
var SR = {{ r|tojson }},
|
||||
var SR="{{ r }}",
|
||||
lang="{{ lang }}",
|
||||
dfavico="{{ favico }}";
|
||||
|
||||
@@ -244,6 +304,9 @@ document.documentElement.className = (STG && STG.cpp_thm) || "{{ args.theme }}";
|
||||
</script>
|
||||
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
||||
<script src="{{ r }}/.cpr/svcs.js?_={{ ts }}"></script>
|
||||
{%- if js %}
|
||||
<script src="{{ js }}_={{ ts }}"></script>
|
||||
{%- endif %}
|
||||
</body>
|
||||
</html>
|
||||
|
||||
|
||||
@@ -1,11 +1,3 @@
|
||||
function QSA(x) {
|
||||
return document.querySelectorAll(x);
|
||||
}
|
||||
var LINUX = /Linux/.test(navigator.userAgent),
|
||||
MACOS = /[^a-z]mac ?os/i.test(navigator.userAgent),
|
||||
WINDOWS = /Windows/.test(navigator.userAgent);
|
||||
|
||||
|
||||
var oa = QSA('pre');
|
||||
for (var a = 0; a < oa.length; a++) {
|
||||
var html = oa[a].innerHTML,
|
||||
@@ -15,6 +7,21 @@ for (var a = 0; a < oa.length; a++) {
|
||||
oa[a].innerHTML = html.replace(rd, '$1').replace(/[ \r\n]+$/, '').replace(/\r?\n/g, '<br />');
|
||||
}
|
||||
|
||||
function add_dls() {
|
||||
oa = QSA('pre.dl');
|
||||
for (var a = 0; a < oa.length; a++) {
|
||||
var an = 'ta' + a,
|
||||
o = ebi(an) || mknod('a', an, 'download');
|
||||
|
||||
oa[a].setAttribute('id', 'tx' + a);
|
||||
oa[a].parentNode.insertBefore(o, oa[a]);
|
||||
o.setAttribute('download', oa[a].getAttribute('name'));
|
||||
o.setAttribute('href', 'data:text/plain;charset=utf-8,' + encodeURIComponent(oa[a].innerText));
|
||||
clmod(o, 'txa', 1);
|
||||
}
|
||||
}
|
||||
add_dls();
|
||||
|
||||
|
||||
oa = QSA('.ossel a');
|
||||
for (var a = 0; a < oa.length; a++)
|
||||
@@ -40,3 +47,21 @@ function setos(os) {
|
||||
}
|
||||
|
||||
setos(WINDOWS ? 'win' : LINUX ? 'lin' : MACOS ? 'mac' : 'idk');
|
||||
|
||||
|
||||
ebi('setpw').onclick = function (e) {
|
||||
ev(e);
|
||||
modal.prompt('password:', '', function (v) {
|
||||
if (!v)
|
||||
return;
|
||||
|
||||
var pw0 = ebi('pw0').innerHTML,
|
||||
oa = QSA('b');
|
||||
|
||||
for (var a = 0; a < oa.length; a++)
|
||||
if (oa[a].innerHTML == pw0)
|
||||
oa[a].textContent = v;
|
||||
|
||||
add_dls();
|
||||
});
|
||||
}
|
||||
|
||||
@@ -69,6 +69,18 @@ html {
|
||||
top: 2em;
|
||||
bottom: unset;
|
||||
}
|
||||
#toastt {
|
||||
position: absolute;
|
||||
height: 1px;
|
||||
top: 1px;
|
||||
right: 1px;
|
||||
left: 1px;
|
||||
animation: toastt var(--tmtime) 0.07s steps(var(--tmstep)) forwards;
|
||||
transform-origin: right;
|
||||
}
|
||||
@keyframes toastt {
|
||||
to {transform: scaleX(0)}
|
||||
}
|
||||
#toast a {
|
||||
color: inherit;
|
||||
text-shadow: inherit;
|
||||
@@ -130,6 +142,9 @@ html {
|
||||
#toast.inf #toastc {
|
||||
background: #0be;
|
||||
}
|
||||
#toast.inf #toastt {
|
||||
background: #8ef;
|
||||
}
|
||||
#toast.ok {
|
||||
background: #380;
|
||||
border-color: #8e4;
|
||||
@@ -137,6 +152,9 @@ html {
|
||||
#toast.ok #toastc {
|
||||
background: #8e4;
|
||||
}
|
||||
#toast.ok #toastt {
|
||||
background: #cf9;
|
||||
}
|
||||
#toast.warn {
|
||||
background: #960;
|
||||
border-color: #fc0;
|
||||
@@ -144,6 +162,9 @@ html {
|
||||
#toast.warn #toastc {
|
||||
background: #fc0;
|
||||
}
|
||||
#toast.warn #toastt {
|
||||
background: #fe9;
|
||||
}
|
||||
#toast.err {
|
||||
background: #900;
|
||||
border-color: #d06;
|
||||
@@ -151,6 +172,9 @@ html {
|
||||
#toast.err #toastc {
|
||||
background: #d06;
|
||||
}
|
||||
#toast.err #toastt {
|
||||
background: #f9c;
|
||||
}
|
||||
#toast code {
|
||||
padding: 0 .2em;
|
||||
background: rgba(0,0,0,0.2);
|
||||
@@ -265,7 +289,11 @@ html.y #tth {
|
||||
box-shadow: 0 .3em 3em rgba(0,0,0,0.5);
|
||||
max-width: 50em;
|
||||
max-height: 30em;
|
||||
overflow: auto;
|
||||
overflow-x: auto;
|
||||
overflow-y: scroll;
|
||||
}
|
||||
#modalc.yk {
|
||||
overflow-y: auto;
|
||||
}
|
||||
#modalc td {
|
||||
text-align: unset;
|
||||
@@ -289,6 +317,14 @@ html.y #tth {
|
||||
#modalc a {
|
||||
color: #07b;
|
||||
}
|
||||
#modalc .b64 {
|
||||
display: block;
|
||||
margin: .1em auto;
|
||||
width: 60%;
|
||||
height: 60%;
|
||||
background: #999;
|
||||
background: rgba(128,128,128,0.2);
|
||||
}
|
||||
#modalb {
|
||||
position: sticky;
|
||||
text-align: right;
|
||||
@@ -381,6 +417,7 @@ html.y textarea:focus {
|
||||
}
|
||||
.mdo pre,
|
||||
.mdo code,
|
||||
.mdo code[class*="language-"],
|
||||
.mdo tt {
|
||||
font-family: 'scp', monospace, monospace;
|
||||
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||
|
||||
@@ -17,10 +17,14 @@ function goto_up2k() {
|
||||
var up2k = null,
|
||||
up2k_hooks = [],
|
||||
hws = [],
|
||||
hws_ok = 0,
|
||||
hws_ng = false,
|
||||
sha_js = WebAssembly ? 'hw' : 'ac', // ff53,c57,sa11
|
||||
m = 'will use ' + sha_js + ' instead of native sha512 due to';
|
||||
|
||||
try {
|
||||
if (sread('nosubtle') || window.nosubtle)
|
||||
throw 'chickenbit';
|
||||
var cf = crypto.subtle || crypto.webkitSubtle;
|
||||
cf.digest('SHA-512', new Uint8Array(1)).then(
|
||||
function (x) { console.log('sha-ok'); up2k = up2k_init(cf); },
|
||||
@@ -152,12 +156,13 @@ function U2pvis(act, btns, uc, st) {
|
||||
r.mod0 = null;
|
||||
|
||||
var markup = {
|
||||
'404': '<span class="err">404</span>',
|
||||
'ERROR': '<span class="err">ERROR</span>',
|
||||
'OS-error': '<span class="err">OS-error</span>',
|
||||
'found': '<span class="inf">found</span>',
|
||||
'YOLO': '<span class="inf">YOLO</span>',
|
||||
'done': '<span class="ok">done</span>',
|
||||
'404': '<span class="err">' + L.utl_404 + '</span>',
|
||||
'ERROR': '<span class="err">' + L.utl_err + '</span>',
|
||||
'OS-error': '<span class="err">' + L.utl_oserr + '</span>',
|
||||
'found': '<span class="inf">' + L.utl_found + '</span>',
|
||||
'defer': '<span class="inf">' + L.utl_defer + '</span>',
|
||||
'YOLO': '<span class="inf">' + L.utl_yolo + '</span>',
|
||||
'done': '<span class="ok">' + L.utl_done + '</span>',
|
||||
};
|
||||
|
||||
r.addfile = function (entry, sz, draw) {
|
||||
@@ -241,7 +246,7 @@ function U2pvis(act, btns, uc, st) {
|
||||
p = bd * 100.0 / sz,
|
||||
nb = bd - bd0,
|
||||
spd = nb / (td / 1000),
|
||||
eta = (sz - bd) / spd;
|
||||
eta = spd ? (sz - bd) / spd : 3599;
|
||||
|
||||
return [p, s2ms(eta), spd / (1024 * 1024)];
|
||||
};
|
||||
@@ -445,9 +450,7 @@ function U2pvis(act, btns, uc, st) {
|
||||
return;
|
||||
|
||||
r.npotato = 0;
|
||||
var html = [
|
||||
"<p>files: <b>{0}</b> finished, <b>{1}</b> failed, <b>{2}</b> busy, <b>{3}</b> queued</p>".format(
|
||||
r.ctr.ok, r.ctr.ng, r.ctr.bz, r.ctr.q)];
|
||||
var html = [L.u_pott.format(r.ctr.ok, r.ctr.ng, r.ctr.bz, r.ctr.q)];
|
||||
|
||||
while (r.head < r.tab.length && has(["ok", "ng"], r.tab[r.head].in))
|
||||
r.head++;
|
||||
@@ -602,7 +605,7 @@ function U2pvis(act, btns, uc, st) {
|
||||
if (nf < 9000)
|
||||
return go();
|
||||
|
||||
modal.confirm('about to show ' + nf + ' files\n\nthis may crash your browser, are you sure?', go, null);
|
||||
modal.confirm(L.u_bigtab.format(nf), go, null);
|
||||
};
|
||||
}
|
||||
|
||||
@@ -658,7 +661,9 @@ function Donut(uc, st) {
|
||||
}
|
||||
|
||||
function pos() {
|
||||
return uc.fsearch ? Math.max(st.bytes.hashed, st.bytes.finished) : st.bytes.finished;
|
||||
return uc.fsearch ?
|
||||
Math.max(st.bytes.hashed, st.bytes.finished) :
|
||||
st.bytes.inflight + st.bytes.finished;
|
||||
}
|
||||
|
||||
r.on = function (ya) {
|
||||
@@ -690,8 +695,9 @@ function Donut(uc, st) {
|
||||
}
|
||||
|
||||
if (++r.tc >= 10) {
|
||||
var s = r.eta === null ? 'paused' : r.eta > 60 ? shumantime(r.eta) : (r.eta + 's');
|
||||
wintitle("{0}%, {1}, #{2}, ".format(
|
||||
f2f(v * 100 / t, 1), shumantime(r.eta), st.files.length - st.nfile.upload), true);
|
||||
f2f(v * 100 / t, 1), s, st.files.length - st.nfile.upload), true);
|
||||
r.tc = 0;
|
||||
}
|
||||
|
||||
@@ -852,7 +858,13 @@ function up2k_init(subtle) {
|
||||
|
||||
setmsg(suggest_up2k, 'msg');
|
||||
|
||||
var u2szs = u2sz.split(','),
|
||||
u2sz_min = parseInt(u2szs[0]),
|
||||
u2sz_tgt = parseInt(u2szs[1]),
|
||||
u2sz_max = parseInt(u2szs[2]);
|
||||
|
||||
var parallel_uploads = ebi('nthread').value = icfg_get('nthread', u2j),
|
||||
stitch_tgt = ebi('u2szg').value = icfg_get('u2sz', u2sz_tgt),
|
||||
uc = {},
|
||||
fdom_ctr = 0,
|
||||
biggest_file = 0;
|
||||
@@ -869,7 +881,7 @@ function up2k_init(subtle) {
|
||||
bcfg_bind(uc, 'turbo', 'u2turbo', turbolvl > 1, draw_turbo);
|
||||
bcfg_bind(uc, 'datechk', 'u2tdate', turbolvl < 3, null);
|
||||
bcfg_bind(uc, 'az', 'u2sort', u2sort.indexOf('n') + 1, set_u2sort);
|
||||
bcfg_bind(uc, 'hashw', 'hashw', !!WebAssembly && (!subtle || !CHROME || MOBILE || VCHROME >= 107), set_hashw);
|
||||
bcfg_bind(uc, 'hashw', 'hashw', !!WebAssembly && !(CHROME && MOBILE) && (!subtle || !CHROME), set_hashw);
|
||||
bcfg_bind(uc, 'upnag', 'upnag', false, set_upnag);
|
||||
bcfg_bind(uc, 'upsfx', 'upsfx', false, set_upsfx);
|
||||
|
||||
@@ -957,7 +969,7 @@ function up2k_init(subtle) {
|
||||
ud = function () { ebi('dir' + fdom_ctr).click(); };
|
||||
|
||||
// too buggy on chrome <= 72
|
||||
var m = / Chrome\/([0-9]+)\./.exec(navigator.userAgent);
|
||||
var m = / Chrome\/([0-9]+)\./.exec(UA);
|
||||
if (m && parseInt(m[1]) < 73)
|
||||
return uf();
|
||||
|
||||
@@ -1034,7 +1046,7 @@ function up2k_init(subtle) {
|
||||
}
|
||||
catch (ex) {
|
||||
document.body.ondragenter = document.body.ondragleave = document.body.ondragover = null;
|
||||
return modal.alert('your browser does not support drag-and-drop uploading');
|
||||
return modal.alert(L.u_nodrop);
|
||||
}
|
||||
if (btn)
|
||||
return;
|
||||
@@ -1101,7 +1113,7 @@ function up2k_init(subtle) {
|
||||
}
|
||||
|
||||
if (!good_files.length && bad_files.length)
|
||||
return toast.err(30, "that's not a folder!\n\nyour browser is too old,\nplease try dragdrop instead");
|
||||
return toast.err(30, L.u_notdir);
|
||||
|
||||
return read_dirs(null, [], [], good_files, nil_files, bad_files);
|
||||
}
|
||||
@@ -1119,7 +1131,7 @@ function up2k_init(subtle) {
|
||||
if (err)
|
||||
return modal.alert('sorry, ' + err);
|
||||
|
||||
toast.inf(0, 'Scanning files...');
|
||||
toast.inf(0, L.u_scan);
|
||||
|
||||
if ((dz == 'up_dz' && uc.fsearch) || (dz == 'srch_dz' && !uc.fsearch))
|
||||
tgl_fsearch();
|
||||
@@ -1207,7 +1219,7 @@ function up2k_init(subtle) {
|
||||
match = false;
|
||||
|
||||
if (match) {
|
||||
var msg = ['directory iterator got stuck on the following {0} items; good chance your browser is about to spinlock:<ul>'.format(missing.length)];
|
||||
var msg = [L.u_dirstuck.format(missing.length) + '<ul>'];
|
||||
for (var a = 0; a < Math.min(20, missing.length); a++)
|
||||
msg.push('<li>' + esc(missing[a]) + '</li>');
|
||||
|
||||
@@ -1278,7 +1290,7 @@ function up2k_init(subtle) {
|
||||
}
|
||||
|
||||
function gotallfiles(good_files, nil_files, bad_files) {
|
||||
if (toast.txt == 'Scanning files...')
|
||||
if (toast.txt == L.u_scan)
|
||||
toast.hide();
|
||||
|
||||
if (uc.fsearch && !uc.turbo)
|
||||
@@ -1348,9 +1360,21 @@ function up2k_init(subtle) {
|
||||
draw_each = good_files.length < 50;
|
||||
|
||||
if (WebAssembly && !hws.length) {
|
||||
for (var a = 0; a < Math.min(navigator.hardwareConcurrency || 4, 16); a++)
|
||||
var nw = Math.min(navigator.hardwareConcurrency || 4, 16);
|
||||
|
||||
if (CHROME) {
|
||||
// chrome-bug 383568268 // #124
|
||||
nw = Math.max(1, (nw > 4 ? 4 : (nw - 1)));
|
||||
nw = (subtle && !MOBILE && nw > 2) ? 2 : nw;
|
||||
}
|
||||
|
||||
for (var a = 0; a < nw; a++)
|
||||
hws.push(new Worker(SR + '/.cpr/w.hash.js?_=' + TS));
|
||||
|
||||
if (!subtle)
|
||||
for (var a = 0; a < hws.length; a++)
|
||||
hws[a].postMessage('nosubtle');
|
||||
|
||||
console.log(hws.length + " hashers");
|
||||
}
|
||||
|
||||
@@ -1434,7 +1458,7 @@ function up2k_init(subtle) {
|
||||
if (!actx || actx.state != 'suspended' || toast.visible)
|
||||
return;
|
||||
|
||||
toast.warn(30, "<div onclick=\"start_actx();toast.inf(3,'thanks!')\">please click this text to<br />unlock full upload speed</div>");
|
||||
toast.warn(30, "<div onclick=\"start_actx();toast.inf(3,'thanks!')\">" + L.u_actx + "</div>");
|
||||
}, 500);
|
||||
}
|
||||
|
||||
@@ -1476,7 +1500,7 @@ function up2k_init(subtle) {
|
||||
ev(e);
|
||||
var txt = linklist();
|
||||
cliptxt(txt + '\n', function () {
|
||||
toast.inf(5, txt.split('\n').length + ' links copied to clipboard');
|
||||
toast.inf(5, un_clip.format(txt.split('\n').length));
|
||||
});
|
||||
};
|
||||
|
||||
@@ -1541,8 +1565,10 @@ function up2k_init(subtle) {
|
||||
if (nhash) {
|
||||
st.time.hashing += td;
|
||||
t.push(['u2etah', st.bytes.hashed, st.bytes.hashed, st.time.hashing]);
|
||||
if (uc.fsearch)
|
||||
if (uc.fsearch) {
|
||||
st.time.busy += td;
|
||||
t.push(['u2etat', st.bytes.hashed, st.bytes.hashed, st.time.hashing]);
|
||||
}
|
||||
}
|
||||
|
||||
var b_up = st.bytes.inflight + st.bytes.uploaded,
|
||||
@@ -1736,16 +1762,13 @@ function up2k_init(subtle) {
|
||||
}
|
||||
}
|
||||
|
||||
var mou_ikkai = false;
|
||||
|
||||
if (st.busy.handshake.length &&
|
||||
st.busy.handshake[0].t_busied < now - 30 * 1000
|
||||
) {
|
||||
console.log("retrying stuck handshake");
|
||||
var t = st.busy.handshake.shift();
|
||||
st.todo.handshake.unshift(t);
|
||||
if (st.bytes.inflight && (st.bytes.inflight < 0 || !st.busy.upload.length)) {
|
||||
console.log('insane inflight ' + st.bytes.inflight);
|
||||
st.bytes.inflight = 0;
|
||||
}
|
||||
|
||||
var mou_ikkai = false;
|
||||
|
||||
var nprev = -1;
|
||||
for (var a = 0; a < st.todo.upload.length; a++) {
|
||||
var nf = st.todo.upload[a].nfile;
|
||||
@@ -1864,10 +1887,12 @@ function up2k_init(subtle) {
|
||||
|
||||
function chill(t) {
|
||||
var now = Date.now();
|
||||
if ((t.coolmul || 0) < 2 || now - t.cooldown < t.coolmul * 700)
|
||||
if ((t.coolmul || 0) < 5 || now - t.cooldown < t.coolmul * 700)
|
||||
t.coolmul = Math.min((t.coolmul || 0.5) * 2, 32);
|
||||
|
||||
t.cooldown = Math.max(t.cooldown || 1, Date.now() + t.coolmul * 1000);
|
||||
var cd = now + 1000 * (t.coolmul + Math.random() * 4 + 2);
|
||||
t.cooldown = Math.floor(Math.max(cd, t.cooldown || 1));
|
||||
return t;
|
||||
}
|
||||
|
||||
/////
|
||||
@@ -1947,32 +1972,84 @@ function up2k_init(subtle) {
|
||||
nchunk = 0,
|
||||
chunksize = get_chunksize(t.size),
|
||||
nchunks = Math.ceil(t.size / chunksize),
|
||||
csz_mib = chunksize / 1048576,
|
||||
tread = t.t_hashing,
|
||||
cache_buf = null,
|
||||
cache_car = 0,
|
||||
cache_cdr = 0,
|
||||
hashers = 0,
|
||||
hashtab = {};
|
||||
|
||||
// resolving subtle.digest w/o worker takes 1sec on blur if the actx hack breaks
|
||||
var use_workers = hws.length && !hws_ng && uc.hashw && (nchunks > 1 || document.visibilityState == 'hidden'),
|
||||
hash_par = (!subtle && !use_workers) ? 0 : csz_mib < 48 ? 2 : csz_mib < 96 ? 1 : 0;
|
||||
|
||||
pvis.setab(t.n, nchunks);
|
||||
pvis.move(t.n, 'bz');
|
||||
|
||||
if (hws.length && uc.hashw && (nchunks > 1 || document.visibilityState == 'hidden'))
|
||||
// resolving subtle.digest w/o worker takes 1sec on blur if the actx hack breaks
|
||||
if (use_workers)
|
||||
return wexec_hash(t, chunksize, nchunks);
|
||||
|
||||
var segm_next = function () {
|
||||
if (nchunk >= nchunks || bpend)
|
||||
return false;
|
||||
|
||||
var reader = new FileReader(),
|
||||
nch = nchunk++,
|
||||
var nch = nchunk++,
|
||||
car = nch * chunksize,
|
||||
cdr = Math.min(chunksize + car, t.size);
|
||||
|
||||
st.bytes.hashed += cdr - car;
|
||||
st.etac.h++;
|
||||
|
||||
var orz = function (e) {
|
||||
bpend--;
|
||||
segm_next();
|
||||
hash_calc(nch, e.target.result);
|
||||
if (MOBILE && CHROME && st.slow_io === null && nch == 1 && cdr - car >= 1024 * 512) {
|
||||
var spd = Math.floor((cdr - car) / (Date.now() + 1 - tread));
|
||||
st.slow_io = spd < 40 * 1024;
|
||||
console.log('spd {0}, slow: {1}'.format(spd, st.slow_io));
|
||||
}
|
||||
|
||||
if (cdr <= cache_cdr && car >= cache_car) {
|
||||
try {
|
||||
var ofs = car - cache_car,
|
||||
ofs2 = ofs + (cdr - car),
|
||||
buf = cache_buf.subarray(ofs, ofs2);
|
||||
|
||||
hash_calc(nch, buf);
|
||||
}
|
||||
catch (ex) {
|
||||
vis_exh(ex + '', 'up2k.js', '', '', ex);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
var reader = new FileReader(),
|
||||
fr_cdr = cdr;
|
||||
|
||||
if (st.slow_io) {
|
||||
var step = cdr - car,
|
||||
tgt = 48 * 1048576;
|
||||
|
||||
while (step && fr_cdr - car < tgt)
|
||||
fr_cdr += step;
|
||||
if (fr_cdr - car > tgt && fr_cdr > cdr)
|
||||
fr_cdr -= step;
|
||||
if (fr_cdr > t.size)
|
||||
fr_cdr = t.size;
|
||||
}
|
||||
|
||||
var orz = function (e) {
|
||||
bpend = 0;
|
||||
var buf = e.target.result;
|
||||
if (fr_cdr > cdr) {
|
||||
cache_buf = new Uint8Array(buf);
|
||||
cache_car = car;
|
||||
cache_cdr = fr_cdr;
|
||||
buf = cache_buf.subarray(0, cdr - car);
|
||||
}
|
||||
if (hashers < hash_par)
|
||||
segm_next();
|
||||
|
||||
hash_calc(nch, buf);
|
||||
};
|
||||
reader.onload = function (e) {
|
||||
try { orz(e); } catch (ex) { vis_exh(ex + '', 'up2k.js', '', '', ex); }
|
||||
};
|
||||
@@ -1999,17 +2076,20 @@ function up2k_init(subtle) {
|
||||
|
||||
toast.err(0, 'y o u b r o k e i t\nfile: ' + esc(t.name + '') + '\nerror: ' + err);
|
||||
};
|
||||
bpend++;
|
||||
reader.readAsArrayBuffer(t.fobj.slice(car, cdr));
|
||||
bpend = 1;
|
||||
tread = Date.now();
|
||||
reader.readAsArrayBuffer(t.fobj.slice(car, fr_cdr));
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
var hash_calc = function (nch, buf) {
|
||||
hashers++;
|
||||
var orz = function (hashbuf) {
|
||||
var hslice = new Uint8Array(hashbuf).subarray(0, 33),
|
||||
b64str = buf2b64(hslice);
|
||||
|
||||
hashers--;
|
||||
hashtab[nch] = b64str;
|
||||
t.hash.push(nch);
|
||||
pvis.hashed(t);
|
||||
@@ -2061,16 +2141,27 @@ function up2k_init(subtle) {
|
||||
free = [],
|
||||
busy = {},
|
||||
nbusy = 0,
|
||||
init = 0,
|
||||
hashtab = {},
|
||||
mem = (MOBILE ? 128 : 256) * 1024 * 1024;
|
||||
|
||||
if (!hws_ok)
|
||||
init = setTimeout(function() {
|
||||
hws_ng = true;
|
||||
toast.warn(30, 'webworkers failed to start\n\nwill be a bit slower due to\nhashing on main-thread');
|
||||
apop(st.busy.hash, t);
|
||||
st.todo.hash.unshift(t);
|
||||
exec_hash();
|
||||
}, 5000);
|
||||
|
||||
for (var a = 0; a < hws.length; a++) {
|
||||
var w = hws[a];
|
||||
free.push(w);
|
||||
w.onmessage = onmsg;
|
||||
if (init)
|
||||
w.postMessage('ping');
|
||||
if (mem > 0)
|
||||
free.push(w);
|
||||
mem -= chunksize;
|
||||
if (mem <= 0)
|
||||
break;
|
||||
}
|
||||
|
||||
function go_next() {
|
||||
@@ -2100,6 +2191,12 @@ function up2k_init(subtle) {
|
||||
d = d.data;
|
||||
var k = d[0];
|
||||
|
||||
if (k == "pong")
|
||||
if (++hws_ok == hws.length) {
|
||||
clearTimeout(init);
|
||||
go_next();
|
||||
}
|
||||
|
||||
if (k == "panic")
|
||||
return vis_exh(d[1], 'up2k.js', '', '', d[1]);
|
||||
|
||||
@@ -2162,7 +2259,8 @@ function up2k_init(subtle) {
|
||||
tasker();
|
||||
}
|
||||
}
|
||||
go_next();
|
||||
if (!init)
|
||||
go_next();
|
||||
}
|
||||
|
||||
/////
|
||||
@@ -2178,7 +2276,7 @@ function up2k_init(subtle) {
|
||||
st.busy.head.push(t);
|
||||
|
||||
var xhr = new XMLHttpRequest();
|
||||
xhr.onerror = function () {
|
||||
xhr.onerror = xhr.ontimeout = function () {
|
||||
console.log('head onerror, retrying', t.name, t);
|
||||
if (!toast.visible)
|
||||
toast.warn(9.98, L.u_enethd + "\n\nfile: " + t.name, t);
|
||||
@@ -2222,6 +2320,7 @@ function up2k_init(subtle) {
|
||||
try { orz(e); } catch (ex) { vis_exh(ex + '', 'up2k.js', '', '', ex); }
|
||||
};
|
||||
|
||||
xhr.timeout = 34000;
|
||||
xhr.open('HEAD', t.purl + uricom_enc(t.name), true);
|
||||
xhr.send();
|
||||
}
|
||||
@@ -2246,8 +2345,11 @@ function up2k_init(subtle) {
|
||||
if (keepalive)
|
||||
console.log("sending keepalive handshake", t.name, t);
|
||||
|
||||
if (!t.srch && !t.t_handshake)
|
||||
pvis.seth(t.n, 2, L.u_hs);
|
||||
|
||||
var xhr = new XMLHttpRequest();
|
||||
xhr.onerror = function () {
|
||||
xhr.onerror = xhr.ontimeout = function () {
|
||||
if (t.t_busied != me) // t.done ok
|
||||
return console.log('zombie handshake onerror', t.name, t);
|
||||
|
||||
@@ -2256,8 +2358,7 @@ function up2k_init(subtle) {
|
||||
|
||||
console.log('handshake onerror, retrying', t.name, t);
|
||||
apop(st.busy.handshake, t);
|
||||
st.todo.handshake.unshift(t);
|
||||
t.cooldown = Date.now() + 5000 + Math.floor(Math.random() * 3000);
|
||||
st.todo.handshake.unshift(chill(t));
|
||||
t.keepalive = keepalive;
|
||||
};
|
||||
var orz = function (e) {
|
||||
@@ -2270,9 +2371,9 @@ function up2k_init(subtle) {
|
||||
}
|
||||
catch (ex) {
|
||||
apop(st.busy.handshake, t);
|
||||
st.todo.handshake.unshift(t);
|
||||
t.cooldown = Date.now() + 5000 + Math.floor(Math.random() * 3000);
|
||||
return toast.err(0, 'Handshake error; will retry...\n\n' + L.badreply + ':\n\n' + unpre(xhr.responseText));
|
||||
st.todo.handshake.unshift(chill(t));
|
||||
var txt = t.t_uploading ? L.u_ehsfin : t.srch ? L.u_ehssrch : L.u_ehsinit;
|
||||
return toast.err(0, txt + '\n\n' + L.badreply + ':\n\n' + unpre(xhr.responseText));
|
||||
}
|
||||
|
||||
t.t_handshake = Date.now();
|
||||
@@ -2371,14 +2472,45 @@ function up2k_init(subtle) {
|
||||
msg = 'done';
|
||||
|
||||
if (t.postlist.length) {
|
||||
if (t.rechecks && QS('#opa_del.act'))
|
||||
toast.inf(30, L.u_started, L.u_unpt);
|
||||
|
||||
var arr = st.todo.upload,
|
||||
sort = arr.length && arr[arr.length - 1].nfile > t.n;
|
||||
|
||||
for (var a = 0; a < t.postlist.length; a++)
|
||||
if (!t.stitch_sz) {
|
||||
// keep all connections busy
|
||||
var bpc = (st.bytes.total - st.bytes.finished) / (parallel_uploads || 1),
|
||||
ocs = 1024 * 1024,
|
||||
stp = 1024 * 512,
|
||||
ccs = ocs;
|
||||
while (ccs < bpc) {
|
||||
ocs = ccs;
|
||||
ccs += stp; if (ccs < bpc) ocs = ccs;
|
||||
ccs += stp; stp *= 2;
|
||||
}
|
||||
ocs = Math.floor(ocs / 1024 / 1024);
|
||||
t.stitch_sz = Math.min(ocs, stitch_tgt);
|
||||
}
|
||||
|
||||
for (var a = 0; a < t.postlist.length; a++) {
|
||||
var nparts = [], tbytes = 0, stitch = t.stitch_sz;
|
||||
if (t.nojoin && t.nojoin - t.postlist.length < 6)
|
||||
stitch = 1;
|
||||
|
||||
--a;
|
||||
for (var b = 0; b < stitch; b++) {
|
||||
nparts.push(t.postlist[++a]);
|
||||
tbytes += chunksize;
|
||||
if (tbytes + chunksize > stitch * 1024 * 1024 || t.postlist[a + 1] - t.postlist[a] !== 1)
|
||||
break;
|
||||
}
|
||||
arr.push({
|
||||
'nfile': t.n,
|
||||
'npart': t.postlist[a]
|
||||
'nparts': nparts
|
||||
});
|
||||
}
|
||||
t.nojoin = 0;
|
||||
|
||||
msg = null;
|
||||
done = false;
|
||||
@@ -2387,7 +2519,7 @@ function up2k_init(subtle) {
|
||||
arr.sort(function (a, b) {
|
||||
return a.nfile < b.nfile ? -1 :
|
||||
/* */ a.nfile > b.nfile ? 1 :
|
||||
a.npart < b.npart ? -1 : 1;
|
||||
/* */ a.nparts[0] < b.nparts[0] ? -1 : 1;
|
||||
});
|
||||
}
|
||||
|
||||
@@ -2421,8 +2553,10 @@ function up2k_init(subtle) {
|
||||
else {
|
||||
pvis.seth(t.n, 1, "ERROR");
|
||||
pvis.seth(t.n, 2, L.u_ehstmp, t);
|
||||
apop(st.busy.handshake, t);
|
||||
|
||||
var err = "",
|
||||
cls = "ERROR",
|
||||
rsp = unpre(xhr.responseText),
|
||||
ofs = rsp.lastIndexOf('\nURL: ');
|
||||
|
||||
@@ -2433,7 +2567,6 @@ function up2k_init(subtle) {
|
||||
var penalty = rsp.replace(/.*rate-limit /, "").split(' ')[0];
|
||||
console.log("rate-limit: " + penalty);
|
||||
t.cooldown = Date.now() + parseFloat(penalty) * 1000;
|
||||
apop(st.busy.handshake, t);
|
||||
st.todo.handshake.unshift(t);
|
||||
return;
|
||||
}
|
||||
@@ -2452,23 +2585,33 @@ function up2k_init(subtle) {
|
||||
if (!t.rechecks && (err_pend || err_srcb)) {
|
||||
t.rechecks = 0;
|
||||
t.want_recheck = true;
|
||||
if (st.busy.upload.length || st.busy.handshake.length || st.bytes.uploaded) {
|
||||
err = L.u_dupdefer;
|
||||
cls = 'defer';
|
||||
}
|
||||
}
|
||||
if (err_pend) {
|
||||
err += ' <a href="#" onclick="toast.inf(60, L.ue_ab);" class="fsearch_explain">(' + L.u_expl + ')</a>';
|
||||
}
|
||||
}
|
||||
if (rsp.indexOf('server HDD is full') + 1)
|
||||
return toast.err(0, L.u_ehsdf + "\n\n" + rsp.replace(/.*; /, ''));
|
||||
|
||||
if (err != "") {
|
||||
if (!t.t_uploading)
|
||||
st.bytes.finished += t.size;
|
||||
|
||||
pvis.seth(t.n, 1, "ERROR");
|
||||
pvis.seth(t.n, 1, cls);
|
||||
pvis.seth(t.n, 2, err);
|
||||
pvis.move(t.n, 'ng');
|
||||
|
||||
apop(st.busy.handshake, t);
|
||||
tasker();
|
||||
return;
|
||||
}
|
||||
|
||||
st.todo.handshake.unshift(chill(t));
|
||||
|
||||
if (rsp.indexOf('server HDD is full') + 1)
|
||||
return toast.err(0, L.u_ehsdf + "\n\n" + rsp.replace(/.*; /, ''));
|
||||
|
||||
err = t.t_uploading ? L.u_ehsfin : t.srch ? L.u_ehssrch : L.u_ehsinit;
|
||||
xhrchk(xhr, err + "\n\nfile: " + t.name + "\n\nerror ", "404, target folder not found", "warn", t);
|
||||
}
|
||||
@@ -2493,6 +2636,8 @@ function up2k_init(subtle) {
|
||||
|
||||
xhr.open('POST', t.purl, true);
|
||||
xhr.responseType = 'text';
|
||||
xhr.timeout = 42000 + (t.srch || t.t_uploaded ? 0 :
|
||||
(t.size / (1048 * 20))); // safededup 20M/s hdd
|
||||
xhr.send(JSON.stringify(req));
|
||||
}
|
||||
|
||||
@@ -2534,8 +2679,10 @@ function up2k_init(subtle) {
|
||||
function exec_upload() {
|
||||
var upt = st.todo.upload.shift(),
|
||||
t = st.files[upt.nfile],
|
||||
npart = upt.npart,
|
||||
tries = 0;
|
||||
nparts = upt.nparts,
|
||||
pcar = nparts[0],
|
||||
pcdr = nparts[nparts.length - 1],
|
||||
maxsz = (u2sz_max > 1 ? u2sz_max : 2040) * 1024 * 1024;
|
||||
|
||||
if (t.done)
|
||||
return console.log('done; skip chunk', t.name, t);
|
||||
@@ -2549,45 +2696,82 @@ function up2k_init(subtle) {
|
||||
pvis.seth(t.n, 1, "🚀 send");
|
||||
|
||||
var chunksize = get_chunksize(t.size),
|
||||
car = npart * chunksize,
|
||||
cdr = car + chunksize;
|
||||
car = pcar * chunksize,
|
||||
cdr = (pcdr + 1) * chunksize;
|
||||
|
||||
if (cdr >= t.size)
|
||||
cdr = t.size;
|
||||
|
||||
if (cdr - car <= maxsz)
|
||||
return upload_sub(t, upt, pcar, pcdr, car, cdr, chunksize, car, []);
|
||||
|
||||
var car0 = car, subs = [];
|
||||
while (car < cdr) {
|
||||
subs.push([car, Math.min(cdr, car + maxsz)]);
|
||||
car += maxsz;
|
||||
}
|
||||
upload_sub(t, upt, pcar, pcdr, 0, 0, chunksize, car0, subs);
|
||||
}
|
||||
|
||||
function upload_sub(t, upt, pcar, pcdr, car, cdr, chunksize, car0, subs) {
|
||||
var nparts = upt.nparts,
|
||||
is_sub = subs.length;
|
||||
|
||||
if (is_sub) {
|
||||
var x = subs.shift();
|
||||
car = x[0];
|
||||
cdr = x[1];
|
||||
}
|
||||
|
||||
var snpart = is_sub ? ('' + pcar + '(' + (car-car0) +'+'+ (cdr-car)) :
|
||||
pcar == pcdr ? pcar : ('' + pcar + '~' + pcdr);
|
||||
|
||||
var orz = function (xhr) {
|
||||
st.bytes.inflight -= xhr.bsent;
|
||||
var txt = unpre((xhr.response && xhr.response.err) || xhr.responseText);
|
||||
if (txt.indexOf('upload blocked by x') + 1) {
|
||||
apop(st.busy.upload, upt);
|
||||
apop(t.postlist, npart);
|
||||
for (var a = pcar; a <= pcdr; a++)
|
||||
apop(t.postlist, a);
|
||||
pvis.seth(t.n, 1, "ERROR");
|
||||
pvis.seth(t.n, 2, txt.split(/\n/)[0]);
|
||||
pvis.move(t.n, 'ng');
|
||||
return;
|
||||
}
|
||||
if (xhr.status == 200) {
|
||||
pvis.prog(t, npart, cdr - car);
|
||||
car = car0;
|
||||
if (subs.length)
|
||||
return upload_sub(t, upt, pcar, pcdr, 0, 0, chunksize, car0, subs);
|
||||
|
||||
var bdone = cdr - car;
|
||||
for (var a = pcar; a <= pcdr; a++) {
|
||||
pvis.prog(t, a, Math.min(bdone, chunksize));
|
||||
bdone -= chunksize;
|
||||
}
|
||||
st.bytes.finished += cdr - car;
|
||||
st.bytes.uploaded += cdr - car;
|
||||
t.bytes_uploaded += cdr - car;
|
||||
t.cooldown = t.coolmul = 0;
|
||||
st.etac.u++;
|
||||
st.etac.t++;
|
||||
}
|
||||
else if (txt.indexOf('already got that') + 1 ||
|
||||
txt.indexOf('already being written') + 1) {
|
||||
console.log("ignoring dupe-segment error", t.name, t);
|
||||
t.nojoin = t.nojoin || t.postlist.length;
|
||||
console.log("ignoring dupe-segment with backoff", t.nojoin, t.name, t);
|
||||
if (!toast.visible && st.todo.upload.length < 4)
|
||||
toast.inf(10, L.u_cbusy);
|
||||
}
|
||||
else {
|
||||
xhrchk(xhr, L.u_cuerr2.format(npart, Math.ceil(t.size / chunksize), t.name), "404, target folder not found (???)", "warn", t);
|
||||
|
||||
xhrchk(xhr, L.u_cuerr2.format(snpart, Math.ceil(t.size / chunksize), t.name), "404, target folder not found (???)", "warn", t);
|
||||
chill(t);
|
||||
}
|
||||
orz2(xhr);
|
||||
}
|
||||
var orz2 = function (xhr) {
|
||||
apop(st.busy.upload, upt);
|
||||
apop(t.postlist, npart);
|
||||
for (var a = pcar; a <= pcdr; a++)
|
||||
apop(t.postlist, a);
|
||||
if (!t.postlist.length) {
|
||||
t.t_uploaded = Date.now();
|
||||
pvis.seth(t.n, 1, 'verifying');
|
||||
@@ -2601,37 +2785,63 @@ function up2k_init(subtle) {
|
||||
btot = Math.floor(st.bytes.total / 1024 / 1024);
|
||||
|
||||
xhr.upload.onprogress = function (xev) {
|
||||
var nb = xev.loaded;
|
||||
st.bytes.inflight += nb - xhr.bsent;
|
||||
var nb = xev.loaded,
|
||||
db = nb - xhr.bsent;
|
||||
|
||||
if (!db)
|
||||
return;
|
||||
|
||||
st.bytes.inflight += db;
|
||||
xhr.bsent = nb;
|
||||
pvis.prog(t, npart, nb);
|
||||
xhr.timeout = 64000 + Date.now() - xhr.t0;
|
||||
pvis.prog(t, pcar, nb);
|
||||
};
|
||||
xhr.onload = function (xev) {
|
||||
try { orz(xhr); } catch (ex) { vis_exh(ex + '', 'up2k.js', '', '', ex); }
|
||||
};
|
||||
xhr.onerror = function (xev) {
|
||||
xhr.onerror = xhr.ontimeout = function (xev) {
|
||||
if (crashed)
|
||||
return;
|
||||
|
||||
st.bytes.inflight -= (xhr.bsent || 0);
|
||||
xhr.bsent = 0;
|
||||
|
||||
if (!toast.visible)
|
||||
toast.warn(9.98, L.u_cuerr.format(npart, Math.ceil(t.size / chunksize), t.name), t);
|
||||
toast.warn(9.98, L.u_cuerr.format(snpart, Math.ceil(t.size / chunksize), t.name), t);
|
||||
|
||||
console.log('chunkpit onerror,', ++tries, t.name, t);
|
||||
t.nojoin = t.nojoin || t.postlist.length; // maybe rproxy postsize limit
|
||||
console.log('chunkpit onerror,', t.name, t);
|
||||
orz2(xhr);
|
||||
};
|
||||
|
||||
var chashes = [],
|
||||
ctxt = t.hash[pcar],
|
||||
plen = Math.floor(192 / nparts.length);
|
||||
|
||||
plen = plen > 9 ? 9 : plen < 2 ? 2 : plen;
|
||||
for (var a = pcar + 1; a <= pcdr; a++)
|
||||
chashes.push(t.hash[a].slice(0, plen));
|
||||
|
||||
if (chashes.length)
|
||||
ctxt += ',' + plen + ',' + chashes.join('');
|
||||
|
||||
xhr.open('POST', t.purl, true);
|
||||
xhr.setRequestHeader("X-Up2k-Hash", t.hash[npart]);
|
||||
xhr.setRequestHeader("X-Up2k-Hash", ctxt);
|
||||
xhr.setRequestHeader("X-Up2k-Wark", t.wark);
|
||||
if (is_sub)
|
||||
xhr.setRequestHeader("X-Up2k-Subc", car - car0);
|
||||
|
||||
xhr.setRequestHeader("X-Up2k-Stat", "{0}/{1}/{2}/{3} {4}/{5} {6}".format(
|
||||
pvis.ctr.ok, pvis.ctr.ng, pvis.ctr.bz, pvis.ctr.q, btot, btot - bfin,
|
||||
st.eta.t.split(' ').pop()));
|
||||
st.eta.t.indexOf('/s, ')+1 ? st.eta.t.split(' ').pop() : 'x'));
|
||||
|
||||
xhr.setRequestHeader('Content-Type', 'application/octet-stream');
|
||||
if (xhr.overrideMimeType)
|
||||
xhr.overrideMimeType('Content-Type', 'application/octet-stream');
|
||||
|
||||
xhr.bsent = 0;
|
||||
xhr.t0 = Date.now();
|
||||
xhr.timeout = 42000;
|
||||
xhr.responseType = 'text';
|
||||
xhr.send(t.fobj.slice(car, cdr));
|
||||
}
|
||||
@@ -2732,13 +2942,34 @@ function up2k_init(subtle) {
|
||||
if (parallel_uploads > 16)
|
||||
parallel_uploads = 16;
|
||||
|
||||
if (parallel_uploads > 7)
|
||||
if (parallel_uploads > 6)
|
||||
toast.warn(10, L.u_maxconn);
|
||||
else if (toast.txt == L.u_maxconn)
|
||||
toast.hide();
|
||||
|
||||
obj.value = parallel_uploads;
|
||||
bumpthread({ "target": 1 });
|
||||
}
|
||||
|
||||
var read_u2sz = function () {
|
||||
var el = ebi('u2szg'), n = parseInt(el.value);
|
||||
stitch_tgt = n = (
|
||||
isNaN(n) ? u2sz_tgt :
|
||||
n < u2sz_min ? u2sz_min :
|
||||
n > u2sz_max ? u2sz_max : n
|
||||
);
|
||||
if (n == u2sz_tgt) sdrop('u2sz'); else swrite('u2sz', n);
|
||||
if (el.value != n) el.value = n;
|
||||
};
|
||||
ebi('u2szg').addEventListener('blur', read_u2sz);
|
||||
ebi('u2szg').onkeydown = function (e) {
|
||||
if (anymod(e)) return;
|
||||
var n = e.code == 'ArrowUp' ? 1 : e.code == 'ArrowDown' ? -1 : 0;
|
||||
if (!n) return;
|
||||
this.value = parseInt(this.value) + n;
|
||||
read_u2sz();
|
||||
}
|
||||
|
||||
function tgl_fsearch() {
|
||||
set_fsearch(!uc.fsearch);
|
||||
}
|
||||
@@ -2868,7 +3099,7 @@ function up2k_init(subtle) {
|
||||
new_state = false;
|
||||
fixed = true;
|
||||
}
|
||||
if (new_state === undefined)
|
||||
if (new_state === undefined && preferred === undefined)
|
||||
new_state = can_write ? false : have_up2k_idx ? true : undefined;
|
||||
}
|
||||
|
||||
|
||||
@@ -5,10 +5,17 @@ if (!window.console || !console.log)
|
||||
"log": function (msg) { }
|
||||
};
|
||||
|
||||
if (!Object.assign)
|
||||
Object.assign = function (a, b) {
|
||||
for (var k in b)
|
||||
a[k] = b[k];
|
||||
};
|
||||
|
||||
if (window.CGV1)
|
||||
Object.assign(window, window.CGV1);
|
||||
|
||||
if (window.CGV)
|
||||
for (var k in CGV)
|
||||
window[k] = CGV[k];
|
||||
Object.assign(window, window.CGV);
|
||||
|
||||
|
||||
var wah = '',
|
||||
@@ -22,14 +29,15 @@ var wah = '',
|
||||
HTTPS = ('' + location).indexOf('https:') === 0,
|
||||
TOUCH = 'ontouchstart' in window,
|
||||
MOBILE = TOUCH,
|
||||
CHROME = !!window.chrome,
|
||||
CHROME = !!window.chrome, // safari=false
|
||||
VCHROME = CHROME ? 1 : 0,
|
||||
IE = /Trident\//.test(navigator.userAgent),
|
||||
FIREFOX = ('netscape' in window) && / rv:/.test(navigator.userAgent),
|
||||
IPHONE = TOUCH && /iPhone|iPad|iPod/i.test(navigator.userAgent),
|
||||
LINUX = /Linux/.test(navigator.userAgent),
|
||||
MACOS = /[^a-z]mac ?os/i.test(navigator.userAgent),
|
||||
WINDOWS = /Windows/.test(navigator.userAgent);
|
||||
UA = '' + navigator.userAgent,
|
||||
IE = /Trident\//.test(UA),
|
||||
FIREFOX = ('netscape' in window) && / rv:/.test(UA),
|
||||
IPHONE = TOUCH && /iPhone|iPad|iPod/i.test(UA),
|
||||
LINUX = /Linux/.test(UA),
|
||||
MACOS = /Macintosh/.test(UA),
|
||||
WINDOWS = /Windows/.test(UA);
|
||||
|
||||
if (!window.WebAssembly || !WebAssembly.Memory)
|
||||
window.WebAssembly = false;
|
||||
@@ -127,13 +135,13 @@ if ((document.location + '').indexOf(',rej,') + 1)
|
||||
|
||||
try {
|
||||
console.hist = [];
|
||||
var CMAXHIST = 1000;
|
||||
var CMAXHIST = MOBILE ? 9000 : 44000;
|
||||
var hook = function (t) {
|
||||
var orig = console[t].bind(console),
|
||||
cfun = function () {
|
||||
console.hist.push(Date.now() + ' ' + t + ': ' + Array.from(arguments).join(', '));
|
||||
if (console.hist.length > CMAXHIST)
|
||||
console.hist = console.hist.slice(CMAXHIST / 2);
|
||||
console.hist = console.hist.slice(CMAXHIST / 4);
|
||||
|
||||
orig.apply(console, arguments);
|
||||
};
|
||||
@@ -189,7 +197,7 @@ function vis_exh(msg, url, lineNo, columnNo, error) {
|
||||
'<p style="font-size:1.3em;margin:0;line-height:2em">try to <a href="#" onclick="localStorage.clear();location.reload();">reset copyparty settings</a> if you are stuck here, or <a href="#" onclick="ignex();">ignore this</a> / <a href="#" onclick="ignex(true);">ignore all</a> / <a href="?b=u">basic</a></p>',
|
||||
'<p style="color:#fff">please send me a screenshot arigathanks gozaimuch: <a href="<ghi>" target="_blank">new github issue</a></p>',
|
||||
'<p class="b">' + esc(url + ' @' + lineNo + ':' + columnNo), '<br />' + esc(msg).replace(/\n/g, '<br />') + '</p>',
|
||||
'<p><b>UA:</b> ' + esc(navigator.userAgent + '')
|
||||
'<p><b>UA:</b> ' + esc(UA)
|
||||
];
|
||||
|
||||
try {
|
||||
@@ -473,6 +481,24 @@ function crc32(str) {
|
||||
}
|
||||
|
||||
|
||||
function randstr(len) {
|
||||
var ret = '';
|
||||
try {
|
||||
var ar = new Uint32Array(Math.floor((len + 3) / 4));
|
||||
crypto.getRandomValues(ar);
|
||||
for (var a = 0; a < ar.length; a++)
|
||||
ret += ('000' + ar[a].toString(36)).slice(-4);
|
||||
return ret.slice(0, len);
|
||||
}
|
||||
catch (ex) {
|
||||
console.log('using unsafe randstr because ' + ex);
|
||||
while (ret.length < len)
|
||||
ret += ('000' + Math.floor(Math.random() * 1679616).toString(36)).slice(-4);
|
||||
return ret.slice(0, len);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function clmod(el, cls, add) {
|
||||
if (!el)
|
||||
return false;
|
||||
@@ -517,6 +543,14 @@ function clgot(el, cls) {
|
||||
}
|
||||
|
||||
|
||||
function setcvar(k, v) {
|
||||
try {
|
||||
document.documentElement.style.setProperty(k, v);
|
||||
}
|
||||
catch (e) { }
|
||||
}
|
||||
|
||||
|
||||
var ANIM = true;
|
||||
try {
|
||||
var mq = window.matchMedia('(prefers-reduced-motion: reduce)');
|
||||
@@ -545,7 +579,9 @@ function yscroll() {
|
||||
|
||||
function showsort(tab) {
|
||||
var v, vn, v1, v2, th = tab.tHead,
|
||||
sopts = jread('fsort', jcp(dsort));
|
||||
sopts = jread('fsort');
|
||||
|
||||
sopts = sopts && sopts.length ? sopts : dsort;
|
||||
|
||||
th && (th = th.rows[0]) && (th = th.cells);
|
||||
|
||||
@@ -582,10 +618,13 @@ function sortTable(table, col, cb) {
|
||||
tr = Array.prototype.slice.call(tb.rows, 0),
|
||||
i, reverse = /s0[^r]/.exec(th[col].className + ' ') ? -1 : 1;
|
||||
|
||||
var stype = th[col].getAttribute('sort');
|
||||
var kname = th[col].getAttribute('name'),
|
||||
stype = th[col].getAttribute('sort');
|
||||
try {
|
||||
var nrules = [], rules = jread("fsort", []);
|
||||
rules.unshift([th[col].getAttribute('name'), reverse, stype || '']);
|
||||
var nrules = [],
|
||||
rules = kname == 'href' ? [] : jread("fsort", []);
|
||||
|
||||
rules.unshift([kname, reverse, stype || '']);
|
||||
for (var a = 0; a < rules.length; a++) {
|
||||
var add = true;
|
||||
for (var b = 0; b < a; b++)
|
||||
@@ -848,6 +887,11 @@ if (window.Number && Number.isFinite)
|
||||
|
||||
function f2f(val, nd) {
|
||||
// 10.toFixed(1) returns 10.00 for certain values of 10
|
||||
if (!isNum(val)) {
|
||||
val = parseFloat(val);
|
||||
if (!isNum(val))
|
||||
val = 999;
|
||||
}
|
||||
val = (val * Math.pow(10, nd)).toFixed(0).split('.')[0];
|
||||
return nd ? (val.slice(0, -nd) || '0') + '.' + val.slice(-nd) : val;
|
||||
}
|
||||
@@ -904,15 +948,18 @@ function shumantime(v, long) {
|
||||
|
||||
|
||||
function lhumantime(v) {
|
||||
var t = shumantime(v, 1),
|
||||
tp = t.replace(/([a-z])/g, " $1 ").split(/ /g).slice(0, -1);
|
||||
var t = shumantime(v, 1);
|
||||
if (/[0-9]$/.exec(t))
|
||||
t += 's';
|
||||
|
||||
var tp = t.replace(/([a-z])/g, " $1 ").split(/ /g).slice(0, -1);
|
||||
|
||||
if (!L || tp.length < 2 || tp[1].indexOf('$') + 1)
|
||||
return t;
|
||||
|
||||
var ret = '';
|
||||
for (var a = 0; a < tp.length; a += 2)
|
||||
ret += tp[a] + ' ' + L['ht_' + tp[a + 1]].replace(tp[a] == 1 ? /!.*/ : /!/, '') + L.ht_and;
|
||||
ret += tp[a] + ' ' + L['ht_' + tp[a + 1] + (tp[a]==1?1:2)] + L.ht_and;
|
||||
|
||||
return ret.slice(0, -L.ht_and.length);
|
||||
}
|
||||
@@ -941,11 +988,33 @@ function apop(arr, v) {
|
||||
}
|
||||
|
||||
|
||||
function jcp(obj) {
|
||||
function jcp1(obj) {
|
||||
return JSON.parse(JSON.stringify(obj));
|
||||
}
|
||||
|
||||
|
||||
function jcp2(src) {
|
||||
if (Array.isArray(src)) {
|
||||
var ret = [];
|
||||
for (var a = 0; a < src.length; ++a) {
|
||||
var sub = src[a];
|
||||
ret.push((sub === null) ? sub : (sub instanceof Date) ? new Date(sub.valueOf()) : (typeof sub === 'object') ? jcp2(sub) : sub);
|
||||
}
|
||||
} else {
|
||||
var ret = {};
|
||||
for (var key in src) {
|
||||
var sub = src[key];
|
||||
ret[key] = sub === null ? sub : (sub instanceof Date) ? new Date(sub.valueOf()) : (typeof sub === 'object') ? jcp2(sub) : sub;
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
};
|
||||
|
||||
|
||||
// jcp1 50% faster on android-chrome, jcp2 7x everywhere else
|
||||
var jcp = MOBILE && CHROME ? jcp1 : jcp2;
|
||||
|
||||
|
||||
function sdrop(key) {
|
||||
try {
|
||||
STG.removeItem(key);
|
||||
@@ -1396,10 +1465,10 @@ var tt = (function () {
|
||||
o = ctr.querySelectorAll('*[tt]');
|
||||
|
||||
for (var a = o.length - 1; a >= 0; a--) {
|
||||
o[a].onfocus = _cshow;
|
||||
o[a].onblur = _hide;
|
||||
o[a].onmouseenter = _dshow;
|
||||
o[a].onmouseleave = _hide;
|
||||
o[a].addEventListener('focus', _cshow);
|
||||
o[a].addEventListener('blur', _hide);
|
||||
o[a].addEventListener('mouseenter', _dshow);
|
||||
o[a].addEventListener('mouseleave', _hide);
|
||||
}
|
||||
r.hide();
|
||||
}
|
||||
@@ -1498,13 +1567,26 @@ var toast = (function () {
|
||||
if (sec)
|
||||
te = setTimeout(r.hide, sec * 1000);
|
||||
|
||||
if (same && delta < 1000)
|
||||
if (same && delta < 1000) {
|
||||
var tb = ebi('toastt');
|
||||
if (tb) {
|
||||
tb.style.animation = 'none';
|
||||
tb.offsetHeight;
|
||||
tb.style.animation = null;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (txt.indexOf('<body>') + 1)
|
||||
txt = txt.slice(0, txt.indexOf('<')) + ' [...]';
|
||||
|
||||
obj.innerHTML = '<a href="#" id="toastc">x</a><div id="toastb">' + lf2br(txt) + '</div>';
|
||||
var html = '';
|
||||
if (sec) {
|
||||
setcvar('--tmtime', (sec - 0.15) + 's');
|
||||
setcvar('--tmstep', Math.floor(sec * 20));
|
||||
html += '<div id="toastt"></div>';
|
||||
}
|
||||
obj.innerHTML = html + '<a href="#" id="toastc">x</a><div id="toastb">' + lf2br(txt) + '</div>';
|
||||
obj.className = cl;
|
||||
sec += obj.offsetWidth;
|
||||
obj.className += ' vis';
|
||||
@@ -1536,6 +1618,7 @@ var modal = (function () {
|
||||
var r = {},
|
||||
q = [],
|
||||
o = null,
|
||||
scrolling = null,
|
||||
cb_up = null,
|
||||
cb_ok = null,
|
||||
cb_ng = null,
|
||||
@@ -1556,6 +1639,7 @@ var modal = (function () {
|
||||
r.nofocus = 0;
|
||||
|
||||
r.show = function (html) {
|
||||
tt.hide();
|
||||
o = mknod('div', 'modal');
|
||||
o.innerHTML = '<table><tr><td><div id="modalc">' + html + '</div></td></tr></table>';
|
||||
document.body.appendChild(o);
|
||||
@@ -1579,6 +1663,7 @@ var modal = (function () {
|
||||
|
||||
document.addEventListener('focus', onfocus);
|
||||
document.addEventListener('selectionchange', onselch);
|
||||
timer.add(scrollchk, 1);
|
||||
timer.add(onfocus);
|
||||
if (cb_up)
|
||||
setTimeout(cb_up, 1);
|
||||
@@ -1586,6 +1671,8 @@ var modal = (function () {
|
||||
|
||||
r.hide = function () {
|
||||
timer.rm(onfocus);
|
||||
timer.rm(scrollchk);
|
||||
scrolling = null;
|
||||
try {
|
||||
ebi('modal-ok').removeEventListener('blur', onblur);
|
||||
}
|
||||
@@ -1604,13 +1691,28 @@ var modal = (function () {
|
||||
r.hide();
|
||||
if (cb_ok)
|
||||
cb_ok(v);
|
||||
}
|
||||
};
|
||||
var ng = function (e) {
|
||||
ev(e);
|
||||
r.hide();
|
||||
if (cb_ng)
|
||||
cb_ng(null);
|
||||
}
|
||||
};
|
||||
|
||||
var scrollchk = function () {
|
||||
if (scrolling === true)
|
||||
return;
|
||||
|
||||
var o = ebi('modalc'),
|
||||
vis = o.offsetHeight,
|
||||
all = o.scrollHeight,
|
||||
nsc = 8 + vis < all;
|
||||
|
||||
if (scrolling !== nsc)
|
||||
clmod(o, 'yk', !nsc);
|
||||
|
||||
scrolling = nsc;
|
||||
};
|
||||
|
||||
var onselch = function () {
|
||||
try {
|
||||
|
||||
@@ -20,6 +20,7 @@ catch (ex) {
|
||||
function load_fb() {
|
||||
subtle = null;
|
||||
importScripts('deps/sha512.hw.js');
|
||||
console.log('using fallback hasher');
|
||||
}
|
||||
|
||||
|
||||
@@ -29,6 +30,12 @@ var reader = null,
|
||||
|
||||
|
||||
onmessage = (d) => {
|
||||
if (d.data == 'nosubtle')
|
||||
return load_fb();
|
||||
|
||||
if (d.data == 'ping')
|
||||
return postMessage(['pong']);
|
||||
|
||||
if (busy)
|
||||
return postMessage(["panic", 'worker got another task while busy']);
|
||||
|
||||
|
||||
@@ -25,6 +25,9 @@
|
||||
## [`changelog.md`](changelog.md)
|
||||
* occasionally grabbed from github release notes
|
||||
|
||||
## [`synology-dsm.md`](synology-dsm.md)
|
||||
* running copyparty on a synology nas
|
||||
|
||||
## [`devnotes.md`](devnotes.md)
|
||||
* technical stuff
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
48
docs/chunksizes.py
Executable file
48
docs/chunksizes.py
Executable file
@@ -0,0 +1,48 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# there's far better ways to do this but its 4am and i dont wanna think
|
||||
|
||||
# just pypy it my dude
|
||||
|
||||
import math
|
||||
|
||||
def humansize(sz, terse=False):
|
||||
for unit in ["B", "KiB", "MiB", "GiB", "TiB"]:
|
||||
if sz < 1024:
|
||||
break
|
||||
|
||||
sz /= 1024.0
|
||||
|
||||
ret = " ".join([str(sz)[:4].rstrip("."), unit])
|
||||
|
||||
if not terse:
|
||||
return ret
|
||||
|
||||
return ret.replace("iB", "").replace(" ", "")
|
||||
|
||||
|
||||
def up2k_chunksize(filesize):
|
||||
chunksize = 1024 * 1024
|
||||
stepsize = 512 * 1024
|
||||
while True:
|
||||
for mul in [1, 2]:
|
||||
nchunks = math.ceil(filesize * 1.0 / chunksize)
|
||||
if nchunks <= 256 or (chunksize >= 32 * 1024 * 1024 and nchunks <= 4096):
|
||||
return chunksize
|
||||
|
||||
chunksize += stepsize
|
||||
stepsize *= mul
|
||||
|
||||
|
||||
def main():
|
||||
prev = 1048576
|
||||
n = n0 = 524288
|
||||
while True:
|
||||
csz = up2k_chunksize(n)
|
||||
if csz > prev:
|
||||
print(f"| {n-n0:>18_} | {humansize(n-n0):>8} | {prev:>13_} | {humansize(prev):>8} |".replace("_", " "))
|
||||
prev = csz
|
||||
n += n0
|
||||
|
||||
|
||||
main()
|
||||
114
docs/devnotes.md
114
docs/devnotes.md
@@ -1,17 +1,20 @@
|
||||
## devnotes toc
|
||||
|
||||
* top
|
||||
* [future plans](#future-plans) - some improvement ideas
|
||||
* [future ideas](#future-ideas) - list of dreams which will probably never happen
|
||||
* [design](#design)
|
||||
* [up2k](#up2k) - quick outline of the up2k protocol
|
||||
* [why not tus](#why-not-tus) - I didn't know about [tus](https://tus.io/)
|
||||
* [why chunk-hashes](#why-chunk-hashes) - a single sha512 would be better, right?
|
||||
* [list of chunk-sizes](#list-of-chunk-sizes) - specific chunksizes are enforced
|
||||
* [hashed passwords](#hashed-passwords) - regarding the curious decisions
|
||||
* [http api](#http-api)
|
||||
* [read](#read)
|
||||
* [write](#write)
|
||||
* [admin](#admin)
|
||||
* [general](#general)
|
||||
* [event hooks](#event-hooks) - on writing your own [hooks](../README.md#event-hooks)
|
||||
* [hook effects](#hook-effects) - hooks can cause intentional side-effects
|
||||
* [assumptions](#assumptions)
|
||||
* [mdns](#mdns)
|
||||
* [sfx repack](#sfx-repack) - reduce the size of an sfx by removing features
|
||||
@@ -25,9 +28,9 @@
|
||||
* [discarded ideas](#discarded-ideas)
|
||||
|
||||
|
||||
# future plans
|
||||
# future ideas
|
||||
|
||||
some improvement ideas
|
||||
list of dreams which will probably never happen
|
||||
|
||||
* the JS is a mess -- a ~~preact~~ rewrite would be nice
|
||||
* preferably without build dependencies like webpack/babel/node.js, maybe a python thing to assemble js files into main.js
|
||||
@@ -55,8 +58,8 @@ quick outline of the up2k protocol, see [uploading](https://github.com/9001/cop
|
||||
* server creates the `wark`, an identifier for this upload
|
||||
* `sha512( salt + filesize + chunk_hashes )`
|
||||
* and a sparse file is created for the chunks to drop into
|
||||
* client uploads each chunk
|
||||
* header entries for the chunk-hash and wark
|
||||
* client sends a series of POSTs, with one or more consecutive chunks in each
|
||||
* header entries for the chunk-hashes (comma-separated) and wark
|
||||
* server writes chunks into place based on the hash
|
||||
* client does another handshake with the hashlist; server replies with OK or a list of chunks to reupload
|
||||
|
||||
@@ -93,6 +96,44 @@ hashwasm would solve the streaming issue but reduces hashing speed for sha512 (x
|
||||
|
||||
* blake2 might be a better choice since xxh is non-cryptographic, but that gets ~15 MiB/s on slower androids
|
||||
|
||||
### list of chunk-sizes
|
||||
|
||||
specific chunksizes are enforced depending on total filesize
|
||||
|
||||
each pair of filesize/chunksize is the largest filesize which will use its listed chunksize; a 512 MiB file will use chunksize 2 MiB, but if the file is one byte larger than 512 MiB then it becomes 3 MiB
|
||||
|
||||
for the purpose of performance (or dodging arbitrary proxy limitations), it is possible to upload combined and/or partial chunks using stitching and/or subchunks respectively
|
||||
|
||||
| filesize | filesize | chunksize | chunksz |
|
||||
| -----------------: | -------: | ------------: | ------: |
|
||||
| 268 435 456 | 256 MiB | 1 048 576 | 1.0 MiB |
|
||||
| 402 653 184 | 384 MiB | 1 572 864 | 1.5 MiB |
|
||||
| 536 870 912 | 512 MiB | 2 097 152 | 2.0 MiB |
|
||||
| 805 306 368 | 768 MiB | 3 145 728 | 3.0 MiB |
|
||||
| 1 073 741 824 | 1.0 GiB | 4 194 304 | 4.0 MiB |
|
||||
| 1 610 612 736 | 1.5 GiB | 6 291 456 | 6.0 MiB |
|
||||
| 2 147 483 648 | 2.0 GiB | 8 388 608 | 8.0 MiB |
|
||||
| 3 221 225 472 | 3.0 GiB | 12 582 912 | 12 MiB |
|
||||
| 4 294 967 296 | 4.0 GiB | 16 777 216 | 16 MiB |
|
||||
| 6 442 450 944 | 6.0 GiB | 25 165 824 | 24 MiB |
|
||||
| 137 438 953 472 | 128 GiB | 33 554 432 | 32 MiB |
|
||||
| 206 158 430 208 | 192 GiB | 50 331 648 | 48 MiB |
|
||||
| 274 877 906 944 | 256 GiB | 67 108 864 | 64 MiB |
|
||||
| 412 316 860 416 | 384 GiB | 100 663 296 | 96 MiB |
|
||||
| 549 755 813 888 | 512 GiB | 134 217 728 | 128 MiB |
|
||||
| 824 633 720 832 | 768 GiB | 201 326 592 | 192 MiB |
|
||||
| 1 099 511 627 776 | 1.0 TiB | 268 435 456 | 256 MiB |
|
||||
| 1 649 267 441 664 | 1.5 TiB | 402 653 184 | 384 MiB |
|
||||
| 2 199 023 255 552 | 2.0 TiB | 536 870 912 | 512 MiB |
|
||||
| 3 298 534 883 328 | 3.0 TiB | 805 306 368 | 768 MiB |
|
||||
| 4 398 046 511 104 | 4.0 TiB | 1 073 741 824 | 1.0 GiB |
|
||||
| 6 597 069 766 656 | 6.0 TiB | 1 610 612 736 | 1.5 GiB |
|
||||
| 8 796 093 022 208 | 8.0 TiB | 2 147 483 648 | 2.0 GiB |
|
||||
| 13 194 139 533 312 | 12.0 TiB | 3 221 225 472 | 3.0 GiB |
|
||||
| 17 592 186 044 416 | 16.0 TiB | 4 294 967 296 | 4.0 GiB |
|
||||
| 26 388 279 066 624 | 24.0 TiB | 6 442 450 944 | 6.0 GiB |
|
||||
| 35 184 372 088 832 | 32.0 TiB | 8 589 934 592 | 8.0 GiB |
|
||||
|
||||
|
||||
# hashed passwords
|
||||
|
||||
@@ -131,14 +172,19 @@ authenticate using header `Cookie: cppwd=foo` or url param `&pw=foo`
|
||||
| GET | `?tar=xz:9` | ...as an xz-level-9 gnu-tar file |
|
||||
| GET | `?tar=pax` | ...as a pax-tar file |
|
||||
| GET | `?tar=pax,xz` | ...as an xz-level-1 pax-tar file |
|
||||
| GET | `?zip=utf-8` | ...as a zip file |
|
||||
| GET | `?zip` | ...as a WinXP-compatible zip file |
|
||||
| GET | `?zip` | ...as a zip file |
|
||||
| GET | `?zip=dos` | ...as a WinXP-compatible zip file |
|
||||
| GET | `?zip=crc` | ...as an MSDOS-compatible zip file |
|
||||
| GET | `?tar&w` | pregenerate webp thumbnails |
|
||||
| GET | `?tar&j` | pregenerate jpg thumbnails |
|
||||
| GET | `?tar&p` | pregenerate audio waveforms |
|
||||
| GET | `?shares` | list your shared files/folders |
|
||||
| GET | `?dls` | show active downloads (do this as admin) |
|
||||
| GET | `?ups` | show recent uploads from your IP |
|
||||
| GET | `?ups&filter=f` | ...where URL contains `f` |
|
||||
| GET | `?ru` | show all recent uploads |
|
||||
| GET | `?ru&filter=f` | ...where URL contains `f` |
|
||||
| GET | `?ru&j` | ...as json |
|
||||
| GET | `?mime=foo` | specify return mimetype `foo` |
|
||||
| GET | `?v` | render markdown file at URL |
|
||||
| GET | `?v` | open image/video/audio in mediaplayer |
|
||||
@@ -160,19 +206,28 @@ authenticate using header `Cookie: cppwd=foo` or url param `&pw=foo`
|
||||
|
||||
| method | params | result |
|
||||
|--|--|--|
|
||||
| POST | `?copy=/foo/bar` | copy the file/folder at URL to /foo/bar |
|
||||
| POST | `?move=/foo/bar` | move/rename the file/folder at URL to /foo/bar |
|
||||
|
||||
| method | params | body | result |
|
||||
|--|--|--|--|
|
||||
| PUT | | (binary data) | upload into file at URL |
|
||||
| PUT | `?j` | (binary data) | ...and reply with json |
|
||||
| PUT | `?ck` | (binary data) | upload without checksum gen (faster) |
|
||||
| PUT | `?ck=md5` | (binary data) | return md5 instead of sha512 |
|
||||
| PUT | `?gz` | (binary data) | compress with gzip and write into file at URL |
|
||||
| PUT | `?xz` | (binary data) | compress with xz and write into file at URL |
|
||||
| mPOST | | `f=FILE` | upload `FILE` into the folder at URL |
|
||||
| mPOST | `?j` | `f=FILE` | ...and reply with json |
|
||||
| mPOST | `?ck` | `f=FILE` | ...and disable checksum gen (faster) |
|
||||
| mPOST | `?ck=md5` | `f=FILE` | ...and return md5 instead of sha512 |
|
||||
| mPOST | `?replace` | `f=FILE` | ...and overwrite existing files |
|
||||
| mPOST | `?media` | `f=FILE` | ...and return medialink (not hotlink) |
|
||||
| mPOST | | `act=mkdir`, `name=foo` | create directory `foo` at URL |
|
||||
| POST | `?delete` | | delete URL recursively |
|
||||
| POST | `?eshare=rm` | | stop sharing a file/folder |
|
||||
| POST | `?eshare=3` | | set expiration to 3 minutes |
|
||||
| jPOST | `?share` | (complicated) | create temp URL for file/folder |
|
||||
| jPOST | `?delete` | `["/foo","/bar"]` | delete `/foo` and `/bar` recursively |
|
||||
| uPOST | | `msg=foo` | send message `foo` into server log |
|
||||
| mPOST | | `act=tput`, `body=TEXT` | overwrite markdown document at URL |
|
||||
@@ -182,8 +237,15 @@ upload modifiers:
|
||||
| http-header | url-param | effect |
|
||||
|--|--|--|
|
||||
| `Accept: url` | `want=url` | return just the file URL |
|
||||
| `Accept: json` | `want=json` | return upload info as json; same as `?j` |
|
||||
| `Rand: 4` | `rand=4` | generate random filename with 4 characters |
|
||||
| `Life: 30` | `life=30` | delete file after 30 seconds |
|
||||
| `CK: no` | `ck` | disable serverside checksum (maybe faster) |
|
||||
| `CK: md5` | `ck=md5` | return md5 checksum instead of sha512 |
|
||||
| `CK: sha1` | `ck=sha1` | return sha1 checksum |
|
||||
| `CK: sha256` | `ck=sha256` | return sha256 checksum |
|
||||
| `CK: b2` | `ck=b2` | return blake2b checksum |
|
||||
| `CK: b2s` | `ck=b2s` | return blake2s checksum |
|
||||
|
||||
* `life` only has an effect if the volume has a lifetime, and the volume lifetime must be greater than the file's
|
||||
|
||||
@@ -202,6 +264,38 @@ upload modifiers:
|
||||
| method | params | result |
|
||||
|--|--|--|
|
||||
| GET | `?pw=x` | logout |
|
||||
| GET | `?grid` | ui: show grid-view |
|
||||
| GET | `?imgs` | ui: show grid-view with thumbnails |
|
||||
| GET | `?grid=0` | ui: show list-view |
|
||||
| GET | `?imgs=0` | ui: show list-view |
|
||||
| GET | `?thumb` | ui, grid-mode: show thumbnails |
|
||||
| GET | `?thumb=0` | ui, grid-mode: show icons |
|
||||
|
||||
|
||||
# event hooks
|
||||
|
||||
on writing your own [hooks](../README.md#event-hooks)
|
||||
|
||||
## hook effects
|
||||
|
||||
hooks can cause intentional side-effects, such as redirecting an upload into another location, or creating+indexing additional files, or deleting existing files, by returning json on stdout
|
||||
|
||||
* `reloc` can redirect uploads before/after uploading has finished, based on filename, extension, file contents, uploader ip/name etc.
|
||||
* `idx` informs copyparty about a new file to index as a consequence of this upload
|
||||
* `del` tells copyparty to delete an unrelated file by vpath
|
||||
|
||||
for these to take effect, the hook must be defined with the `c1` flag; see example [reloc-by-ext](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reloc-by-ext.py)
|
||||
|
||||
a subset of effect types are available for a subset of hook types,
|
||||
|
||||
* most hook types (xbu/xau/xbr/xar/xbd/xad/xm) support `idx` and `del` for all http protocols (up2k / basic-uploader / webdav), but not ftp/tftp/smb
|
||||
* most hook types will abort/reject the action if the hook returns nonzero, assuming flag `c` is given, see examples [reject-extension](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reject-extension.py) and [reject-mimetype](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reject-mimetype.py)
|
||||
* `xbu` supports `reloc` for all http protocols (up2k / basic-uploader / webdav), but not ftp/tftp/smb
|
||||
* `xau` supports `reloc` for basic-uploader / webdav only, not up2k or ftp/tftp/smb
|
||||
* so clients like sharex are supported, but not dragdrop into browser
|
||||
|
||||
to trigger indexing of files `/foo/1.txt` and `/foo/bar/2.txt`, a hook can `print(json.dumps({"idx":{"vp":["/foo/1.txt","/foo/bar/2.txt"]}}))` (and replace "idx" with "del" to delete instead)
|
||||
* note: paths starting with `/` are absolute URLs, but you can also do `../3.txt` relative to the destination folder of each uploaded file
|
||||
|
||||
|
||||
# assumptions
|
||||
@@ -247,6 +341,8 @@ the rest is mostly optional; if you need a working env for vscode or similar
|
||||
python3 -m venv .venv
|
||||
. .venv/bin/activate
|
||||
pip install jinja2 strip_hints # MANDATORY
|
||||
pip install argon2-cffi # password hashing
|
||||
pip install pyzmq # send 0mq from hooks
|
||||
pip install mutagen # audio metadata
|
||||
pip install pyftpdlib # ftp server
|
||||
pip install partftpy # tftp server
|
||||
@@ -327,10 +423,6 @@ can be reproduced with `--no-sendfile --s-wr-sz 8192 --s-wr-slp 0.3 --rsp-slp 6`
|
||||
* remove brokers / multiprocessing stuff; https://github.com/9001/copyparty/tree/no-broker
|
||||
* reduce the nesting / indirections in `HttpCli` / `httpcli.py`
|
||||
* nearly zero benefit from stuff like replacing all the `self.conn.hsrv` with a local `hsrv` variable
|
||||
* reduce up2k roundtrips
|
||||
* start from a chunk index and just go
|
||||
* terminate client on bad data
|
||||
* not worth the effort, just throw enough conncetions at it
|
||||
* single sha512 across all up2k chunks?
|
||||
* crypto.subtle cannot into streaming, would have to use hashwasm, expensive
|
||||
* separate sqlite table per tag
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user