Compare commits
402 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bdbcbbb002 | ||
|
|
e78af02241 | ||
|
|
115020ba60 | ||
|
|
66abf17bae | ||
|
|
b377791be7 | ||
|
|
78919e65d6 | ||
|
|
84b52ea8c5 | ||
|
|
fd89f7ecb9 | ||
|
|
2ebfdc2562 | ||
|
|
dbf1cbc8af | ||
|
|
a259704596 | ||
|
|
04b55f1a1d | ||
|
|
206af8f151 | ||
|
|
645bb5c990 | ||
|
|
f8966222e4 | ||
|
|
d71f844b43 | ||
|
|
e8b7f65f82 | ||
|
|
f193f398c1 | ||
|
|
b6554a7f8c | ||
|
|
3f05b6655c | ||
|
|
51a83b04a0 | ||
|
|
0c03921965 | ||
|
|
2527e90325 | ||
|
|
7f08f10c37 | ||
|
|
1c011ff0bb | ||
|
|
a1ad608267 | ||
|
|
547a486387 | ||
|
|
7741870dc7 | ||
|
|
8785d2f9fe | ||
|
|
d744f3ff8f | ||
|
|
8ca996e2f7 | ||
|
|
096de50889 | ||
|
|
bec3fee9ee | ||
|
|
8413ed6d1f | ||
|
|
055302b5be | ||
|
|
8016e6711b | ||
|
|
c8ea4066b1 | ||
|
|
6cc7101d31 | ||
|
|
263adec70a | ||
|
|
ac96fd9c96 | ||
|
|
e5582605cd | ||
|
|
1b52ef1f8a | ||
|
|
503face974 | ||
|
|
13e77777d7 | ||
|
|
89c6c2e0d9 | ||
|
|
14af136fcd | ||
|
|
d39a99c929 | ||
|
|
43ee6b9f5b | ||
|
|
8a38101e48 | ||
|
|
5026b21226 | ||
|
|
d07859e8e6 | ||
|
|
df7219d3b6 | ||
|
|
ad9be54f55 | ||
|
|
eeecc50757 | ||
|
|
8ff7094e4d | ||
|
|
58ae38c613 | ||
|
|
7f1c992601 | ||
|
|
fbfdd8338b | ||
|
|
bbc379906a | ||
|
|
33f41f3e61 | ||
|
|
655f6d00f8 | ||
|
|
fd552842d4 | ||
|
|
6bd087ddc5 | ||
|
|
0504b010a1 | ||
|
|
39cc92d4bc | ||
|
|
a0da0122b9 | ||
|
|
879e83e24f | ||
|
|
64ad585318 | ||
|
|
f262aee800 | ||
|
|
d4da386172 | ||
|
|
5d92f4df49 | ||
|
|
6f8a588c4d | ||
|
|
7c8e368721 | ||
|
|
f7a43a8e46 | ||
|
|
02879713a2 | ||
|
|
acbb8267e1 | ||
|
|
8796c09f56 | ||
|
|
d636316a19 | ||
|
|
a96d9ac6cb | ||
|
|
643e222986 | ||
|
|
ed524d84bb | ||
|
|
f0cdd9f25d | ||
|
|
4e797a7156 | ||
|
|
136c0fdc2b | ||
|
|
35165f8472 | ||
|
|
cab999978e | ||
|
|
fabeebd96b | ||
|
|
b1cf588452 | ||
|
|
c354a38b4c | ||
|
|
a17c267d87 | ||
|
|
c1180d6f9c | ||
|
|
d3db6d296f | ||
|
|
caf7e93f5e | ||
|
|
eefa0518db | ||
|
|
945170e271 | ||
|
|
6c2c6090dc | ||
|
|
b2e233403d | ||
|
|
e397ec2e48 | ||
|
|
fade751a3e | ||
|
|
0f386c4b08 | ||
|
|
14bccbe45f | ||
|
|
55eb692134 | ||
|
|
b32d65207b | ||
|
|
64cac003d8 | ||
|
|
6dbfcddcda | ||
|
|
b4e0a34193 | ||
|
|
01c82b54a7 | ||
|
|
4ef3106009 | ||
|
|
aa3a971961 | ||
|
|
b9d0c8536b | ||
|
|
3313503ea5 | ||
|
|
d999d3a921 | ||
|
|
e7d00bae39 | ||
|
|
650e41c717 | ||
|
|
140f6e0389 | ||
|
|
5e111ba5ee | ||
|
|
95a599961e | ||
|
|
a55e0d6eb8 | ||
|
|
2fd2c6b948 | ||
|
|
7a936ea01e | ||
|
|
226c7c3045 | ||
|
|
a4239a466b | ||
|
|
d0eb014c38 | ||
|
|
e01ba8552a | ||
|
|
024303592a | ||
|
|
86419b8f47 | ||
|
|
f1358dbaba | ||
|
|
e8a653ca0c | ||
|
|
9bc09ce949 | ||
|
|
dc8e621d7c | ||
|
|
dee0950f74 | ||
|
|
143f72fe36 | ||
|
|
a7889fb6a2 | ||
|
|
987caec15d | ||
|
|
ab40ff5051 | ||
|
|
bed133d3dd | ||
|
|
829c8fca96 | ||
|
|
5b26ab0096 | ||
|
|
39554b4bc3 | ||
|
|
97d9c149f1 | ||
|
|
59688bc8d7 | ||
|
|
a18f63895f | ||
|
|
27433d6214 | ||
|
|
374c535cfa | ||
|
|
ac7815a0ae | ||
|
|
0c50ea1757 | ||
|
|
c057c5e8e8 | ||
|
|
46d667716e | ||
|
|
cba2e10d29 | ||
|
|
b1693f95cb | ||
|
|
3f00073256 | ||
|
|
d15000062d | ||
|
|
6cb3b35a54 | ||
|
|
b4031e8d43 | ||
|
|
a3ca0638cb | ||
|
|
a360ac29da | ||
|
|
9672b8c9b3 | ||
|
|
e70ecd98ef | ||
|
|
5f7ce78d7f | ||
|
|
2077dca66f | ||
|
|
91f010290c | ||
|
|
395e3386b7 | ||
|
|
a1dce0f24e | ||
|
|
c7770904e6 | ||
|
|
1690889ed8 | ||
|
|
842817d9e3 | ||
|
|
5fc04152bd | ||
|
|
1be85bdb26 | ||
|
|
2eafaa88a2 | ||
|
|
900cc463c3 | ||
|
|
97b999c463 | ||
|
|
a7cef91b8b | ||
|
|
a4a112c0ee | ||
|
|
e6bcee28d6 | ||
|
|
626b5770a5 | ||
|
|
c2f92cacc1 | ||
|
|
4f8a1f5f6a | ||
|
|
4a98b73915 | ||
|
|
00812cb1da | ||
|
|
16766e702e | ||
|
|
5e932a9504 | ||
|
|
ccab44daf2 | ||
|
|
8c52b88767 | ||
|
|
c9fd26255b | ||
|
|
0b9b8dbe72 | ||
|
|
b7723ac245 | ||
|
|
35b75c3db1 | ||
|
|
f902779050 | ||
|
|
fdddd36a5d | ||
|
|
c4ba123779 | ||
|
|
72e355eb2c | ||
|
|
43d409a5d9 | ||
|
|
b1fffc2246 | ||
|
|
edd3e53ab3 | ||
|
|
aa0b119031 | ||
|
|
eddce00765 | ||
|
|
6f4bde2111 | ||
|
|
f3035e8869 | ||
|
|
a9730499c0 | ||
|
|
b66843efe2 | ||
|
|
cc1aaea300 | ||
|
|
9ccc238799 | ||
|
|
8526ef9368 | ||
|
|
3c36727d07 | ||
|
|
ef33ce94cd | ||
|
|
d500baf5c5 | ||
|
|
deef32335e | ||
|
|
fc4b51ad00 | ||
|
|
fa762754bf | ||
|
|
29bd8f57c4 | ||
|
|
abc37354ef | ||
|
|
ee3333362f | ||
|
|
7c0c6b94a3 | ||
|
|
bac733113c | ||
|
|
32ab65d7cb | ||
|
|
c6744dc483 | ||
|
|
b9997d677d | ||
|
|
10defe6aef | ||
|
|
736aa125a8 | ||
|
|
eb48373b8b | ||
|
|
d4a7b7d84d | ||
|
|
2923a38b87 | ||
|
|
dabdaaee33 | ||
|
|
65e4d67c3e | ||
|
|
4b720f4150 | ||
|
|
2e85a25614 | ||
|
|
713fffcb8e | ||
|
|
8020b11ea0 | ||
|
|
2523d76756 | ||
|
|
7ede509973 | ||
|
|
7c1d97af3b | ||
|
|
95566e8388 | ||
|
|
76afb62b7b | ||
|
|
7dec922c70 | ||
|
|
c07e0110f8 | ||
|
|
2808734047 | ||
|
|
1f75314463 | ||
|
|
063fa3efde | ||
|
|
44693d79ec | ||
|
|
cea746377e | ||
|
|
59a98bd2b5 | ||
|
|
250aa28185 | ||
|
|
5280792cd7 | ||
|
|
2529aa151d | ||
|
|
fc658e5b9e | ||
|
|
a4bad62b60 | ||
|
|
e1d78d8b23 | ||
|
|
c7f826dbbe | ||
|
|
801da8079b | ||
|
|
7d797dba3f | ||
|
|
cda90c285e | ||
|
|
4b5a0787ab | ||
|
|
2048b7538e | ||
|
|
ac40dccc8f | ||
|
|
9ca8154651 | ||
|
|
db668ba491 | ||
|
|
edbafd94c2 | ||
|
|
2df76eb6e1 | ||
|
|
9b77c9ce7d | ||
|
|
dc2b67f155 | ||
|
|
9f32e9e11d | ||
|
|
7086d2a305 | ||
|
|
575615ca2d | ||
|
|
c0da4b09bf | ||
|
|
22880ccc9a | ||
|
|
e4001550c1 | ||
|
|
e9f65be86a | ||
|
|
3b9919a486 | ||
|
|
acc363133f | ||
|
|
8f2d502d4d | ||
|
|
2ae93ad715 | ||
|
|
bb590e364a | ||
|
|
e7fff77735 | ||
|
|
753e3cfbaf | ||
|
|
99e9cba1f7 | ||
|
|
fcc3336760 | ||
|
|
0dc3c23b42 | ||
|
|
6aa10ecedc | ||
|
|
93125bba4d | ||
|
|
fae5a36e6f | ||
|
|
fc9b729fc2 | ||
|
|
8620ae5bb7 | ||
|
|
01a851da28 | ||
|
|
309895d39d | ||
|
|
7ac0803ded | ||
|
|
cae5ccea62 | ||
|
|
3768cb4723 | ||
|
|
0815dce4c1 | ||
|
|
a62f744a18 | ||
|
|
163e3fce46 | ||
|
|
e76a50cb9d | ||
|
|
72fc76ef48 | ||
|
|
c47047c30d | ||
|
|
3b8f66c0d5 | ||
|
|
aa96a1acdc | ||
|
|
91cafc2511 | ||
|
|
23ca00bba8 | ||
|
|
a75a992951 | ||
|
|
4fbd6853f4 | ||
|
|
71c3ad63b3 | ||
|
|
e1324e37a5 | ||
|
|
a996a09bba | ||
|
|
18c763ac08 | ||
|
|
3d9fb753ba | ||
|
|
714fd1811a | ||
|
|
4364581705 | ||
|
|
ba02c9cc12 | ||
|
|
11eefaf968 | ||
|
|
5a968f9e47 | ||
|
|
6420c4bd03 | ||
|
|
0f9877201b | ||
|
|
9ba2dec9b2 | ||
|
|
ae9cfea939 | ||
|
|
cadaeeeace | ||
|
|
767696185b | ||
|
|
c1efd227b7 | ||
|
|
a50d0563c3 | ||
|
|
e5641ddd16 | ||
|
|
700111ffeb | ||
|
|
b8adeb824a | ||
|
|
30cc9defcb | ||
|
|
61875bd773 | ||
|
|
30905c6f5d | ||
|
|
9986136dfb | ||
|
|
1c0d978979 | ||
|
|
0a0364e9f8 | ||
|
|
3376fbde1a | ||
|
|
ac21fa7782 | ||
|
|
c1c8dc5e82 | ||
|
|
5a38311481 | ||
|
|
9f8edb7f32 | ||
|
|
c5a6ac8417 | ||
|
|
50e01d6904 | ||
|
|
9b46291a20 | ||
|
|
14497b2425 | ||
|
|
f7ceae5a5f | ||
|
|
c9492d16ba | ||
|
|
9fb9ada3aa | ||
|
|
db0abbfdda | ||
|
|
e7f0009e57 | ||
|
|
4444f0f6ff | ||
|
|
418842d2d3 | ||
|
|
cafe53c055 | ||
|
|
7673beef72 | ||
|
|
b28bfe64c0 | ||
|
|
135ece3fbd | ||
|
|
bd3640d256 | ||
|
|
fc0405c8f3 | ||
|
|
7df890d964 | ||
|
|
8341041857 | ||
|
|
1b7634932d | ||
|
|
48a3898aa6 | ||
|
|
5d13ebb4ac | ||
|
|
015b87ee99 | ||
|
|
0a48acf6be | ||
|
|
2b6a3afd38 | ||
|
|
18aa82fb2f | ||
|
|
f5407b2997 | ||
|
|
474d5a155b | ||
|
|
afcd98b794 | ||
|
|
4f80e44ff7 | ||
|
|
406e413594 | ||
|
|
033b50ae1b | ||
|
|
bee26e853b | ||
|
|
04a1f7040e | ||
|
|
f9d5bb3b29 | ||
|
|
ca0cd04085 | ||
|
|
999ee2e7bc | ||
|
|
1ff7f968e8 | ||
|
|
3966266207 | ||
|
|
d03e96a392 | ||
|
|
4c843c6df9 | ||
|
|
0896c5295c | ||
|
|
cc0c9839eb | ||
|
|
d0aa20e17c | ||
|
|
1a658dedb7 | ||
|
|
8d376b854c | ||
|
|
490c16b01d | ||
|
|
2437a4e864 | ||
|
|
007d948cb9 | ||
|
|
335fcc8535 | ||
|
|
9eaa9904e0 | ||
|
|
0778da6c4d | ||
|
|
a1bb10012d | ||
|
|
1441ccee4f | ||
|
|
491803d8b7 | ||
|
|
3dcc386b6f | ||
|
|
5aa54d1217 | ||
|
|
88b876027c | ||
|
|
fcc3aa98fd | ||
|
|
f2f5e266b4 | ||
|
|
e17bf8f325 | ||
|
|
d19cb32bf3 | ||
|
|
85a637af09 | ||
|
|
043e3c7dd6 | ||
|
|
8f59afb159 | ||
|
|
77f1e51444 | ||
|
|
22fc4bb938 | ||
|
|
50c7bba6ea | ||
|
|
551d99b71b | ||
|
|
b54b7213a7 | ||
|
|
a14943c8de |
7
.vscode/launch.json
vendored
7
.vscode/launch.json
vendored
@@ -9,14 +9,17 @@
|
||||
"console": "integratedTerminal",
|
||||
"cwd": "${workspaceFolder}",
|
||||
"justMyCode": false,
|
||||
"env": {
|
||||
"PYDEVD_DISABLE_FILE_VALIDATION": "1",
|
||||
"PYTHONWARNINGS": "always", //error
|
||||
},
|
||||
"args": [
|
||||
//"-nw",
|
||||
"-ed",
|
||||
"-emp",
|
||||
"-e2dsa",
|
||||
"-e2ts",
|
||||
"-mtp",
|
||||
".bpm=f,bin/mtag/audio-bpm.py",
|
||||
"-mtp=.bpm=f,bin/mtag/audio-bpm.py",
|
||||
"-aed:wark",
|
||||
"-vsrv::r:rw,ed:c,dupe",
|
||||
"-vdist:dist:r"
|
||||
|
||||
2
.vscode/launch.py
vendored
2
.vscode/launch.py
vendored
@@ -41,7 +41,7 @@ if sfx:
|
||||
argv = [sys.executable, sfx] + argv
|
||||
sp.check_call(argv)
|
||||
elif re.search(" -j ?[0-9]", " ".join(argv)):
|
||||
argv = [sys.executable, "-m", "copyparty"] + argv
|
||||
argv = [sys.executable, "-Wa", "-m", "copyparty"] + argv
|
||||
sp.check_call(argv)
|
||||
else:
|
||||
sys.path.insert(0, os.getcwd())
|
||||
|
||||
1
.vscode/tasks.json
vendored
1
.vscode/tasks.json
vendored
@@ -11,6 +11,7 @@
|
||||
"type": "shell",
|
||||
"command": "${config:python.pythonPath}",
|
||||
"args": [
|
||||
"-Wa", //-We
|
||||
".vscode/launch.py"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1,3 +1,43 @@
|
||||
* do something cool
|
||||
|
||||
really tho, send a PR or an issue or whatever, all appreciated, anything goes, just behave aight
|
||||
really tho, send a PR or an issue or whatever, all appreciated, anything goes, just behave aight 👍👍
|
||||
|
||||
but to be more specific,
|
||||
|
||||
|
||||
# contribution ideas
|
||||
|
||||
|
||||
## documentation
|
||||
|
||||
I think we can agree that the documentation leaves a LOT to be desired. I've realized I'm not exactly qualified for this 😅 but maybe the [soon-to-come setup GUI](https://github.com/9001/copyparty/issues/57) will make this more manageable. The best documentation is the one that never had to be written, right? :> so I suppose we can give this a wait-and-see approach for a bit longer.
|
||||
|
||||
|
||||
## crazy ideas & features
|
||||
|
||||
assuming they won't cause too much problems or side-effects :>
|
||||
|
||||
i think someone was working on a way to list directories over DNS for example...
|
||||
|
||||
if you wanna have a go at coding it up yourself then maybe mention the idea on discord before you get too far, otherwise just go nuts 👍
|
||||
|
||||
|
||||
## others
|
||||
|
||||
aside from documentation and ideas, some other things that would be cool to have some help with is:
|
||||
|
||||
* **translations** -- the copyparty web-UI has translations for english and norwegian at the top of [browser.js](https://github.com/9001/copyparty/blob/hovudstraum/copyparty/web/browser.js); if you'd like to add a translation for another language then that'd be welcome! and if that language has a grammar that doesn't fit into the way the strings are assembled, then we'll fix that as we go :>
|
||||
|
||||
* **UI ideas** -- at some point I was thinking of rewriting the UI in react/preact/something-not-vanilla-javascript, but I'll admit the comfiness of not having any build stage combined with raw performance has kinda convinced me otherwise :p but I'd be very open to ideas on how the UI could be improved, or be more intuitive.
|
||||
|
||||
* **docker improvements** -- I don't really know what I'm doing when it comes to containers, so I'm sure there's a *huge* room for improvement here, mainly regarding how you're supposed to use the container with kubernetes / docker-compose / any of the other popular ways to do things. At some point I swear I'll start learning about docker so I can pick up clach04's [docker-compose draft](https://github.com/9001/copyparty/issues/38) and learn how that stuff ticks, unless someone beats me to it!
|
||||
|
||||
* **packaging** for various linux distributions -- this could either be as simple as just plopping the sfx.py in the right place and calling that from systemd (the archlinux package [originally did this](https://github.com/9001/copyparty/pull/18)); maybe with a small config-file which would cause copyparty to load settings from `/etc/copyparty.d` (like the [archlinux package](https://github.com/9001/copyparty/tree/hovudstraum/contrib/package/arch) does with `copyparty.conf`), or it could be a proper installation of the copyparty python package into /usr/lib or similar (the archlinux package [eventually went for this approach](https://github.com/9001/copyparty/pull/26))
|
||||
|
||||
* [fpm](https://github.com/jordansissel/fpm) can probably help with the technical part of it, but someone needs to handle distro relations :-)
|
||||
|
||||
* **software integration** -- I'm sure there's a lot of usecases where copyparty could complement something else, or the other way around, so any ideas or any work in this regard would be dope. This doesn't necessarily have to be code inside copyparty itself;
|
||||
|
||||
* [hooks](https://github.com/9001/copyparty/tree/hovudstraum/bin/hooks) -- these are small programs which are called by copyparty when certain things happen (files are uploaded, someone hits a 404, etc.), and could be a fun way to add support for more usecases
|
||||
|
||||
* [parser plugins](https://github.com/9001/copyparty/tree/hovudstraum/bin/mtag) -- if you want to have copyparty analyze and index metadata for some oddball file-formats, then additional plugins would be neat :>
|
||||
|
||||
2
LICENSE
2
LICENSE
@@ -1,6 +1,6 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2019 ed
|
||||
Copyright (c) 2019 ed <oss@ocv.me>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
||||
399
README.md
399
README.md
@@ -3,7 +3,7 @@
|
||||
turn almost any device into a file server with resumable uploads/downloads using [*any*](#browser-support) web browser
|
||||
|
||||
* server only needs Python (2 or 3), all dependencies optional
|
||||
* 🔌 protocols: [http](#the-browser) // [ftp](#ftp-server) // [webdav](#webdav-server) // [smb/cifs](#smb-server)
|
||||
* 🔌 protocols: [http](#the-browser) // [webdav](#webdav-server) // [ftp](#ftp-server) // [tftp](#tftp-server) // [smb/cifs](#smb-server)
|
||||
* 📱 [android app](#android-app) // [iPhone shortcuts](#ios-shortcuts)
|
||||
|
||||
👉 **[Get started](#quickstart)!** or visit the **[read-only demo server](https://a.ocv.me/pub/demo/)** 👀 running from a basement in finland
|
||||
@@ -20,13 +20,13 @@ turn almost any device into a file server with resumable uploads/downloads using
|
||||
* [testimonials](#testimonials) - small collection of user feedback
|
||||
* [motivations](#motivations) - project goals / philosophy
|
||||
* [notes](#notes) - general notes
|
||||
* [bugs](#bugs)
|
||||
* [general bugs](#general-bugs)
|
||||
* [not my bugs](#not-my-bugs)
|
||||
* [bugs](#bugs) - roughly sorted by chance of encounter
|
||||
* [not my bugs](#not-my-bugs) - same order here too
|
||||
* [breaking changes](#breaking-changes) - upgrade notes
|
||||
* [FAQ](#FAQ) - "frequently" asked questions
|
||||
* [accounts and volumes](#accounts-and-volumes) - per-folder, per-user permissions
|
||||
* [shadowing](#shadowing) - hiding specific subfolders
|
||||
* [dotfiles](#dotfiles) - unix-style hidden files/folders
|
||||
* [the browser](#the-browser) - accessing a copyparty server using a web-browser
|
||||
* [tabs](#tabs) - the main tabs in the ui
|
||||
* [hotkeys](#hotkeys) - the browser has the following hotkeys
|
||||
@@ -40,7 +40,7 @@ turn almost any device into a file server with resumable uploads/downloads using
|
||||
* [file manager](#file-manager) - cut/paste, rename, and delete files/folders (if you have permission)
|
||||
* [batch rename](#batch-rename) - select some files and press `F2` to bring up the rename UI
|
||||
* [media player](#media-player) - plays almost every audio format there is
|
||||
* [audio equalizer](#audio-equalizer) - bass boosted
|
||||
* [audio equalizer](#audio-equalizer) - and [dynamic range compressor](https://en.wikipedia.org/wiki/Dynamic_range_compression)
|
||||
* [fix unreliable playback on android](#fix-unreliable-playback-on-android) - due to phone / app settings
|
||||
* [markdown viewer](#markdown-viewer) - and there are *two* editors
|
||||
* [other tricks](#other-tricks)
|
||||
@@ -53,7 +53,9 @@ turn almost any device into a file server with resumable uploads/downloads using
|
||||
* [ftp server](#ftp-server) - an FTP server can be started using `--ftp 3921`
|
||||
* [webdav server](#webdav-server) - with read-write support
|
||||
* [connecting to webdav from windows](#connecting-to-webdav-from-windows) - using the GUI
|
||||
* [tftp server](#tftp-server) - a TFTP server (read/write) can be started using `--tftp 3969`
|
||||
* [smb server](#smb-server) - unsafe, slow, not recommended for wan
|
||||
* [browser ux](#browser-ux) - tweaking the ui
|
||||
* [file indexing](#file-indexing) - enables dedup and music search ++
|
||||
* [exclude-patterns](#exclude-patterns) - to save some time
|
||||
* [filesystem guards](#filesystem-guards) - avoid traversing into other filesystems
|
||||
@@ -67,13 +69,16 @@ turn almost any device into a file server with resumable uploads/downloads using
|
||||
* [event hooks](#event-hooks) - trigger a program on uploads, renames etc ([examples](./bin/hooks/))
|
||||
* [upload events](#upload-events) - the older, more powerful approach ([examples](./bin/mtag/))
|
||||
* [handlers](#handlers) - redefine behavior with plugins ([examples](./bin/handlers/))
|
||||
* [identity providers](#identity-providers) - replace copyparty passwords with oauth and such
|
||||
* [using the cloud as storage](#using-the-cloud-as-storage) - connecting to an aws s3 bucket and similar
|
||||
* [hiding from google](#hiding-from-google) - tell search engines you dont wanna be indexed
|
||||
* [themes](#themes)
|
||||
* [complete examples](#complete-examples)
|
||||
* [reverse-proxy](#reverse-proxy) - running copyparty next to other websites
|
||||
* [prometheus](#prometheus) - metrics/stats can be enabled
|
||||
* [packages](#packages) - the party might be closer than you think
|
||||
* [arch package](#arch-package) - now [available on aur](https://aur.archlinux.org/packages/copyparty) maintained by [@icxes](https://github.com/icxes)
|
||||
* [fedora package](#fedora-package) - now [available on copr-pypi](https://copr.fedorainfracloud.org/coprs/g/copr/PyPI/)
|
||||
* [fedora package](#fedora-package) - does not exist yet
|
||||
* [nix package](#nix-package) - `nix profile install github:9001/copyparty`
|
||||
* [nixos module](#nixos-module)
|
||||
* [browser support](#browser-support) - TLDR: yes
|
||||
@@ -84,9 +89,10 @@ turn almost any device into a file server with resumable uploads/downloads using
|
||||
* [iOS shortcuts](#iOS-shortcuts) - there is no iPhone app, but
|
||||
* [performance](#performance) - defaults are usually fine - expect `8 GiB/s` download, `1 GiB/s` upload
|
||||
* [client-side](#client-side) - when uploading files
|
||||
* [security](#security) - some notes on hardening
|
||||
* [security](#security) - there is a [discord server](https://discord.gg/25J8CdTT6G)
|
||||
* [gotchas](#gotchas) - behavior that might be unexpected
|
||||
* [cors](#cors) - cross-site request config
|
||||
* [filekeys](#filekeys) - prevent filename bruteforcing
|
||||
* [password hashing](#password-hashing) - you can hash passwords
|
||||
* [https](#https) - both HTTP and HTTPS are accepted
|
||||
* [recovering from crashes](#recovering-from-crashes)
|
||||
@@ -99,7 +105,7 @@ turn almost any device into a file server with resumable uploads/downloads using
|
||||
* [sfx](#sfx) - the self-contained "binary"
|
||||
* [copyparty.exe](#copypartyexe) - download [copyparty.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty.exe) (win8+) or [copyparty32.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty32.exe) (win7+)
|
||||
* [install on android](#install-on-android)
|
||||
* [reporting bugs](#reporting-bugs) - ideas for context to include in bug reports
|
||||
* [reporting bugs](#reporting-bugs) - ideas for context to include, and where to submit them
|
||||
* [devnotes](#devnotes) - for build instructions etc, see [./docs/devnotes.md](./docs/devnotes.md)
|
||||
|
||||
|
||||
@@ -117,8 +123,8 @@ just run **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/
|
||||
enable thumbnails (images/audio/video), media indexing, and audio transcoding by installing some recommended deps:
|
||||
|
||||
* **Alpine:** `apk add py3-pillow ffmpeg`
|
||||
* **Debian:** `apt install python3-pil ffmpeg`
|
||||
* **Fedora:** `dnf install python3-pillow ffmpeg`
|
||||
* **Debian:** `apt install --no-install-recommends python3-pil ffmpeg`
|
||||
* **Fedora:** rpmfusion + `dnf install python3-pillow ffmpeg`
|
||||
* **FreeBSD:** `pkg install py39-sqlite3 py39-pillow ffmpeg`
|
||||
* **MacOS:** `port install py-Pillow ffmpeg`
|
||||
* **MacOS** (alternative): `brew install pillow ffmpeg`
|
||||
@@ -145,18 +151,19 @@ you may also want these, especially on servers:
|
||||
|
||||
* [contrib/systemd/copyparty.service](contrib/systemd/copyparty.service) to run copyparty as a systemd service (see guide inside)
|
||||
* [contrib/systemd/prisonparty.service](contrib/systemd/prisonparty.service) to run it in a chroot (for extra security)
|
||||
* [contrib/openrc/copyparty](contrib/openrc/copyparty) to run copyparty on Alpine / Gentoo
|
||||
* [contrib/rc/copyparty](contrib/rc/copyparty) to run copyparty on FreeBSD
|
||||
* [contrib/nginx/copyparty.conf](contrib/nginx/copyparty.conf) to [reverse-proxy](#reverse-proxy) behind nginx (for better https)
|
||||
* [nixos module](#nixos-module) to run copyparty on NixOS hosts
|
||||
* [contrib/nginx/copyparty.conf](contrib/nginx/copyparty.conf) to [reverse-proxy](#reverse-proxy) behind nginx (for better https)
|
||||
|
||||
and remember to open the ports you want; here's a complete example including every feature copyparty has to offer:
|
||||
```
|
||||
firewall-cmd --permanent --add-port={80,443,3921,3923,3945,3990}/tcp # --zone=libvirt
|
||||
firewall-cmd --permanent --add-port=12000-12099/tcp --permanent # --zone=libvirt
|
||||
firewall-cmd --permanent --add-port={1900,5353}/udp # --zone=libvirt
|
||||
firewall-cmd --permanent --add-port=12000-12099/tcp # --zone=libvirt
|
||||
firewall-cmd --permanent --add-port={69,1900,3969,5353}/udp # --zone=libvirt
|
||||
firewall-cmd --reload
|
||||
```
|
||||
(1900:ssdp, 3921:ftp, 3923:http/https, 3945:smb, 3990:ftps, 5353:mdns, 12000:passive-ftp)
|
||||
(69:tftp, 1900:ssdp, 3921:ftp, 3923:http/https, 3945:smb, 3969:tftp, 3990:ftps, 5353:mdns, 12000:passive-ftp)
|
||||
|
||||
|
||||
## features
|
||||
@@ -167,6 +174,7 @@ firewall-cmd --reload
|
||||
* ☑ volumes (mountpoints)
|
||||
* ☑ [accounts](#accounts-and-volumes)
|
||||
* ☑ [ftp server](#ftp-server)
|
||||
* ☑ [tftp server](#tftp-server)
|
||||
* ☑ [webdav server](#webdav-server)
|
||||
* ☑ [smb/cifs server](#smb-server)
|
||||
* ☑ [qr-code](#qr-code) for quick access
|
||||
@@ -261,20 +269,31 @@ server notes:
|
||||
|
||||
# bugs
|
||||
|
||||
* Windows: python 2.7 cannot index non-ascii filenames with `-e2d`
|
||||
* Windows: python 2.7 cannot handle filenames with mojibake
|
||||
* `--th-ff-jpg` may fix video thumbnails on some FFmpeg versions (macos, some linux)
|
||||
* `--th-ff-swr` may fix audio thumbnails on some FFmpeg versions
|
||||
roughly sorted by chance of encounter
|
||||
|
||||
## general bugs
|
||||
* general:
|
||||
* `--th-ff-jpg` may fix video thumbnails on some FFmpeg versions (macos, some linux)
|
||||
* `--th-ff-swr` may fix audio thumbnails on some FFmpeg versions
|
||||
* if the `up2k.db` (filesystem index) is on a samba-share or network disk, you'll get unpredictable behavior if the share is disconnected for a bit
|
||||
* use `--hist` or the `hist` volflag (`-v [...]:c,hist=/tmp/foo`) to place the db on a local disk instead
|
||||
* all volumes must exist / be available on startup; up2k (mtp especially) gets funky otherwise
|
||||
* probably more, pls let me know
|
||||
|
||||
* python 3.4 and older (including 2.7):
|
||||
* many rare and exciting edge-cases because [python didn't handle EINTR yet](https://peps.python.org/pep-0475/)
|
||||
* downloads from copyparty may suddenly fail, but uploads *should* be fine
|
||||
|
||||
* python 2.7 on Windows:
|
||||
* cannot index non-ascii filenames with `-e2d`
|
||||
* cannot handle filenames with mojibake
|
||||
|
||||
if you have a new exciting bug to share, see [reporting bugs](#reporting-bugs)
|
||||
|
||||
* Windows: if the `up2k.db` (filesystem index) is on a samba-share or network disk, you'll get unpredictable behavior if the share is disconnected for a bit
|
||||
* use `--hist` or the `hist` volflag (`-v [...]:c,hist=/tmp/foo`) to place the db on a local disk instead
|
||||
* all volumes must exist / be available on startup; up2k (mtp especially) gets funky otherwise
|
||||
* probably more, pls let me know
|
||||
|
||||
## not my bugs
|
||||
|
||||
same order here too
|
||||
|
||||
* [Chrome issue 1317069](https://bugs.chromium.org/p/chromium/issues/detail?id=1317069) -- if you try to upload a folder which contains symlinks by dragging it into the browser, the symlinked files will not get uploaded
|
||||
|
||||
* [Chrome issue 1352210](https://bugs.chromium.org/p/chromium/issues/detail?id=1352210) -- plaintext http may be faster at filehashing than https (but also extremely CPU-intensive)
|
||||
@@ -284,8 +303,11 @@ server notes:
|
||||
* Android: music playback randomly stops due to [battery usage settings](#fix-unreliable-playback-on-android)
|
||||
|
||||
* iPhones: the volume control doesn't work because [apple doesn't want it to](https://developer.apple.com/library/archive/documentation/AudioVideo/Conceptual/Using_HTML5_Audio_Video/Device-SpecificConsiderations/Device-SpecificConsiderations.html#//apple_ref/doc/uid/TP40009523-CH5-SW11)
|
||||
* *future workaround:* enable the equalizer, make it all-zero, and set a negative boost to reduce the volume
|
||||
* "future" because `AudioContext` can't maintain a stable playback speed in the current iOS version (15.7), maybe one day...
|
||||
* `AudioContext` will probably never be a viable workaround as apple introduces new issues faster than they fix current ones
|
||||
|
||||
* iPhones: the preload feature (in the media-player-options tab) can cause a tiny audio glitch 20sec before the end of each song, but disabling it may cause worse iOS bugs to appear instead
|
||||
* just a hunch, but disabling preloading may cause playback to stop entirely, or possibly mess with bluetooth speakers
|
||||
* tried to add a tooltip regarding this but looks like apple broke my tooltips
|
||||
|
||||
* Windows: folders cannot be accessed if the name ends with `.`
|
||||
* python or windows bug
|
||||
@@ -295,6 +317,7 @@ server notes:
|
||||
|
||||
* VirtualBox: sqlite throws `Disk I/O Error` when running in a VM and the up2k database is in a vboxsf
|
||||
* use `--hist` or the `hist` volflag (`-v [...]:c,hist=/tmp/foo`) to place the db inside the vm instead
|
||||
* also happens on mergerfs, so put the db elsewhere
|
||||
|
||||
* Ubuntu: dragging files from certain folders into firefox or chrome is impossible
|
||||
* due to snap security policies -- see `snap connections firefox` for the allowlist, `removable-media` permits all of `/mnt` and `/media` apparently
|
||||
@@ -304,6 +327,8 @@ server notes:
|
||||
|
||||
upgrade notes
|
||||
|
||||
* `1.9.16` (2023-11-04):
|
||||
* `--stats`/prometheus: `cpp_bans` renamed to `cpp_active_bans`, and that + `cpp_uptime` are gauges
|
||||
* `1.6.0` (2023-01-29):
|
||||
* http-api: delete/move is now `POST` instead of `GET`
|
||||
* everything other than `GET` and `HEAD` must pass [cors validation](#cors)
|
||||
@@ -320,14 +345,32 @@ upgrade notes
|
||||
* yes, using the [`g` permission](#accounts-and-volumes), see the examples there
|
||||
* you can also do this with linux filesystem permissions; `chmod 111 music` will make it possible to access files and folders inside the `music` folder but not list the immediate contents -- also works with other software, not just copyparty
|
||||
|
||||
* can I link someone to a password-protected volume/file by including the password in the URL?
|
||||
* yes, by adding `?pw=hunter2` to the end; replace `?` with `&` if there are parameters in the URL already, meaning it contains a `?` near the end
|
||||
|
||||
* how do I stop `.hist` folders from appearing everywhere on my HDD?
|
||||
* by default, a `.hist` folder is created inside each volume for the filesystem index, thumbnails, audio transcodes, and markdown document history. Use the `--hist` global-option or the `hist` volflag to move it somewhere else; see [database location](#database-location)
|
||||
|
||||
* can I make copyparty download a file to my server if I give it a URL?
|
||||
* yes, using [hooks](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/wget.py)
|
||||
|
||||
* firefox refuses to connect over https, saying "Secure Connection Failed" or "SEC_ERROR_BAD_SIGNATURE", but the usual button to "Accept the Risk and Continue" is not shown
|
||||
* firefox has corrupted its certstore; fix this by exiting firefox, then find and delete the file named `cert9.db` somewhere in your firefox profile folder
|
||||
|
||||
* copyparty seems to think I am using http, even though the URL is https
|
||||
* your reverse-proxy is not sending the `X-Forwarded-Proto: https` header; this could be because your reverse-proxy itself is confused. Ensure that none of the intermediates (such as cloudflare) are terminating https before the traffic hits your entrypoint
|
||||
|
||||
* i want to learn python and/or programming and am considering looking at the copyparty source code in that occasion
|
||||
* ```bash
|
||||
_| _ __ _ _|_
|
||||
(_| (_) | | (_) |_
|
||||
```
|
||||
|
||||
|
||||
# accounts and volumes
|
||||
|
||||
per-folder, per-user permissions - if your setup is getting complex, consider making a [config file](./docs/example.conf) instead of using arguments
|
||||
* much easier to manage, and you can modify the config at runtime with `systemctl reload copyparty` or more conveniently using the `[reload cfg]` button in the control-panel (if logged in as admin)
|
||||
* much easier to manage, and you can modify the config at runtime with `systemctl reload copyparty` or more conveniently using the `[reload cfg]` button in the control-panel (if the user has `a`/admin in any volume)
|
||||
* changes to the `[global]` config section requires a restart to take effect
|
||||
|
||||
a quick summary can be seen using `--help-accounts`
|
||||
@@ -344,8 +387,12 @@ permissions:
|
||||
* `w` (write): upload files, move files *into* this folder
|
||||
* `m` (move): move files/folders *from* this folder
|
||||
* `d` (delete): delete files/folders
|
||||
* `.` (dots): user can ask to show dotfiles in directory listings
|
||||
* `g` (get): only download files, cannot see folder contents or zip/tar
|
||||
* `G` (upget): same as `g` except uploaders get to see their own filekeys (see `fk` in examples below)
|
||||
* `G` (upget): same as `g` except uploaders get to see their own [filekeys](#filekeys) (see `fk` in examples below)
|
||||
* `h` (html): same as `g` except folders return their index.html, and filekeys are not necessary for index.html
|
||||
* `a` (admin): can see upload time, uploader IPs, config-reload
|
||||
* `A` ("all"): same as `rwmda.` (read/write/move/delete/admin/dotfiles)
|
||||
|
||||
examples:
|
||||
* add accounts named u1, u2, u3 with passwords p1, p2, p3: `-a u1:p1 -a u2:p2 -a u3:p3`
|
||||
@@ -357,7 +404,7 @@ examples:
|
||||
* `u1` can open the `inc` folder, but cannot see the contents, only upload new files to it
|
||||
* `u2` can browse it and move files *from* `/inc` into any folder where `u2` has write-access
|
||||
* make folder `/mnt/ss` available at `/i`, read-write for u1, get-only for everyone else, and enable filekeys: `-v /mnt/ss:i:rw,u1:g:c,fk=4`
|
||||
* `c,fk=4` sets the `fk` (filekey) volflag to 4, meaning each file gets a 4-character accesskey
|
||||
* `c,fk=4` sets the `fk` ([filekey](#filekeys)) volflag to 4, meaning each file gets a 4-character accesskey
|
||||
* `u1` can upload files, browse the folder, and see the generated filekeys
|
||||
* other users cannot browse the folder, but can access the files if they have the full file URL with the filekey
|
||||
* replacing the `g` permission with `wg` would let anonymous users upload files, but not see the required filekey to access it
|
||||
@@ -373,6 +420,17 @@ hiding specific subfolders by mounting another volume on top of them
|
||||
for example `-v /mnt::r -v /var/empty:web/certs:r` mounts the server folder `/mnt` as the webroot, but another volume is mounted at `/web/certs` -- so visitors can only see the contents of `/mnt` and `/mnt/web` (at URLs `/` and `/web`), but not `/mnt/web/certs` because URL `/web/certs` is mapped to `/var/empty`
|
||||
|
||||
|
||||
## dotfiles
|
||||
|
||||
unix-style hidden files/folders by starting the name with a dot
|
||||
|
||||
anyone can access these if they know the name, but they normally don't appear in directory listings
|
||||
|
||||
a client can request to see dotfiles in directory listings if global option `-ed` is specified, or the volume has volflag `dots`, or the user has permission `.`
|
||||
|
||||
dotfiles do not appear in search results unless one of the above is true, **and** the global option / volflag `dotsrch` is set
|
||||
|
||||
|
||||
# the browser
|
||||
|
||||
accessing a copyparty server using a web-browser
|
||||
@@ -485,11 +543,14 @@ it does static images with Pillow / pyvips / FFmpeg, and uses FFmpeg for video f
|
||||
audio files are covnerted into spectrograms using FFmpeg unless you `--no-athumb` (and some FFmpeg builds may need `--th-ff-swr`)
|
||||
|
||||
images with the following names (see `--th-covers`) become the thumbnail of the folder they're in: `folder.png`, `folder.jpg`, `cover.png`, `cover.jpg`
|
||||
* and, if you enable [file indexing](#file-indexing), all remaining folders will also get thumbnails (as long as they contain any pics at all)
|
||||
* and, if you enable [file indexing](#file-indexing), it will also try those names as dotfiles (`.folder.jpg` and so), and then fallback on the first picture in the folder (if it has any pictures at all)
|
||||
|
||||
in the grid/thumbnail view, if the audio player panel is open, songs will start playing when clicked
|
||||
* indicated by the audio files having the ▶ icon instead of 💾
|
||||
|
||||
enabling `multiselect` lets you click files to select them, and then shift-click another file for range-select
|
||||
* `multiselect` is mostly intended for phones/tablets, but the `sel` option in the `[⚙️] settings` tab is better suited for desktop use, allowing selection by CTRL-clicking and range-selection with SHIFT-click, all without affecting regular clicking
|
||||
|
||||
|
||||
## zip downloads
|
||||
|
||||
@@ -500,12 +561,20 @@ select which type of archive you want in the `[⚙️] config` tab:
|
||||
| name | url-suffix | description |
|
||||
|--|--|--|
|
||||
| `tar` | `?tar` | plain gnutar, works great with `curl \| tar -xv` |
|
||||
| `pax` | `?tar=pax` | pax-format tar, futureproof, not as fast |
|
||||
| `tgz` | `?tar=gz` | gzip compressed gnu-tar (slow), for `curl \| tar -xvz` |
|
||||
| `txz` | `?tar=xz` | gnu-tar with xz / lzma compression (v.slow) |
|
||||
| `zip` | `?zip=utf8` | works everywhere, glitchy filenames on win7 and older |
|
||||
| `zip_dos` | `?zip` | traditional cp437 (no unicode) to fix glitchy filenames |
|
||||
| `zip_crc` | `?zip=crc` | cp437 with crc32 computed early for truly ancient software |
|
||||
|
||||
* hidden files (dotfiles) are excluded unless `-ed`
|
||||
* gzip default level is `3` (0=fast, 9=best), change with `?tar=gz:9`
|
||||
* xz default level is `1` (0=fast, 9=best), change with `?tar=xz:9`
|
||||
* bz2 default level is `2` (1=fast, 9=best), change with `?tar=bz2:9`
|
||||
* hidden files ([dotfiles](#dotfiles)) are excluded unless account is allowed to list them
|
||||
* `up2k.db` and `dir.txt` is always excluded
|
||||
* bsdtar supports streaming unzipping: `curl foo?zip=utf8 | bsdtar -xv`
|
||||
* good, because copyparty's zip is faster than tar on small files
|
||||
* `zip_crc` will take longer to download since the server has to read each file twice
|
||||
* this is only to support MS-DOS PKZIP v2.04g (october 1993) and older
|
||||
* how are you accessing copyparty actually
|
||||
@@ -514,6 +583,11 @@ you can also zip a selection of files or folders by clicking them in the browser
|
||||
|
||||

|
||||
|
||||
cool trick: download a folder by appending url-params `?tar&opus` to transcode all audio files (except aac|m4a|mp3|ogg|opus|wma) to opus before they're added to the archive
|
||||
* super useful if you're 5 minutes away from takeoff and realize you don't have any music on your phone but your server only has flac files and downloading those will burn through all your data + there wouldn't be enough time anyways
|
||||
* and url-params `&j` / `&w` produce jpeg/webm thumbnails/spectrograms instead of the original audio/video/images
|
||||
* can also be used to pregenerate thumbnails; combine with `--th-maxage=9999999` or `--th-clean=0`
|
||||
|
||||
|
||||
## uploading
|
||||
|
||||
@@ -550,7 +624,8 @@ the up2k UI is the epitome of polished inutitive experiences:
|
||||
* "parallel uploads" specifies how many chunks to upload at the same time
|
||||
* `[🏃]` analysis of other files should continue while one is uploading
|
||||
* `[🥔]` shows a simpler UI for faster uploads from slow devices
|
||||
* `[💭]` ask for confirmation before files are added to the queue
|
||||
* `[🎲]` generate random filenames during upload
|
||||
* `[📅]` preserve last-modified timestamps; server times will match yours
|
||||
* `[🔎]` switch between upload and [file-search](#file-search) mode
|
||||
* ignore `[🔎]` if you add files by dragging them into the browser
|
||||
|
||||
@@ -612,6 +687,7 @@ file selection: click somewhere on the line (not the link itsef), then:
|
||||
* `up/down` to move
|
||||
* `shift-up/down` to move-and-select
|
||||
* `ctrl-shift-up/down` to also scroll
|
||||
* shift-click another line for range-select
|
||||
|
||||
* cut: select some files and `ctrl-x`
|
||||
* paste: `ctrl-v` in another folder
|
||||
@@ -681,7 +757,8 @@ some hilights:
|
||||
click the `play` link next to an audio file, or copy the link target to [share it](https://a.ocv.me/pub/demo/music/Ubiktune%20-%20SOUNDSHOCK%202%20-%20FM%20FUNK%20TERRROR!!/#af-1fbfba61&t=18) (optionally with a timestamp to start playing from, like that example does)
|
||||
|
||||
open the `[🎺]` media-player-settings tab to configure it,
|
||||
* switches:
|
||||
* "switches":
|
||||
* `[🔀]` shuffles the files inside each folder
|
||||
* `[preload]` starts loading the next track when it's about to end, reduces the silence between songs
|
||||
* `[full]` does a full preload by downloading the entire next file; good for unreliable connections, bad for slow connections
|
||||
* `[~s]` toggles the seekbar waveform display
|
||||
@@ -691,11 +768,13 @@ open the `[🎺]` media-player-settings tab to configure it,
|
||||
* `[art]` shows album art on the lockscreen
|
||||
* `[🎯]` keeps the playing song scrolled into view (good when using the player as a taskbar dock)
|
||||
* `[⟎]` shrinks the playback controls
|
||||
* playback mode:
|
||||
* "buttons":
|
||||
* `[uncache]` may fix songs that won't play correctly due to bad files in browser cache
|
||||
* "at end of folder":
|
||||
* `[loop]` keeps looping the folder
|
||||
* `[next]` plays into the next folder
|
||||
* transcode:
|
||||
* `[flac]` convers `flac` and `wav` files into opus
|
||||
* "transcode":
|
||||
* `[flac]` converts `flac` and `wav` files into opus
|
||||
* `[aac]` converts `aac` and `m4a` files into opus
|
||||
* `[oth]` converts all other known formats into opus
|
||||
* `aac|ac3|aif|aiff|alac|alaw|amr|ape|au|dfpwm|dts|flac|gsm|it|m4a|mo3|mod|mp2|mp3|mpc|mptm|mt2|mulaw|ogg|okt|opus|ra|s3m|tak|tta|ulaw|wav|wma|wv|xm|xpk`
|
||||
@@ -704,12 +783,14 @@ open the `[🎺]` media-player-settings tab to configure it,
|
||||
|
||||
### audio equalizer
|
||||
|
||||
bass boosted
|
||||
and [dynamic range compressor](https://en.wikipedia.org/wiki/Dynamic_range_compression)
|
||||
|
||||
can also boost the volume in general, or increase/decrease stereo width (like [crossfeed](https://www.foobar2000.org/components/view/foo_dsp_meiercf) just worse)
|
||||
|
||||
has the convenient side-effect of reducing the pause between songs, so gapless albums play better with the eq enabled (just make it flat)
|
||||
|
||||
not available on iPhones / iPads because AudioContext currently breaks background audio playback on iOS (15.7.8)
|
||||
|
||||
|
||||
### fix unreliable playback on android
|
||||
|
||||
@@ -748,6 +829,8 @@ other notes,
|
||||
|
||||
* files named `README.md` / `readme.md` will be rendered after directory listings unless `--no-readme` (but `.epilogue.html` takes precedence)
|
||||
|
||||
* `README.md` and `*logue.html` can contain placeholder values which are replaced server-side before embedding into directory listings; see `--help-exp`
|
||||
|
||||
|
||||
## searching
|
||||
|
||||
@@ -773,9 +856,12 @@ for the above example to work, add the commandline argument `-e2ts` to also scan
|
||||
using arguments or config files, or a mix of both:
|
||||
* config files (`-c some.conf`) can set additional commandline arguments; see [./docs/example.conf](docs/example.conf) and [./docs/example2.conf](docs/example2.conf)
|
||||
* `kill -s USR1` (same as `systemctl reload copyparty`) to reload accounts and volumes from config files without restarting
|
||||
* or click the `[reload cfg]` button in the control-panel when logged in as admin
|
||||
* or click the `[reload cfg]` button in the control-panel if the user has `a`/admin in any volume
|
||||
* changes to the `[global]` config section requires a restart to take effect
|
||||
|
||||
**NB:** as humongous as this readme is, there is also a lot of undocumented features. Run copyparty with `--help` to see all available global options; all of those can be used in the `[global]` section of config files, and everything listed in `--help-flags` can be used in volumes as volflags.
|
||||
* if running in docker/podman, try this: `docker run --rm -it copyparty/ac --help`
|
||||
|
||||
|
||||
## zeroconf
|
||||
|
||||
@@ -875,21 +961,48 @@ known client bugs:
|
||||
* latin-1 is fine, hiragana is not (not even as shift-jis on japanese xp)
|
||||
|
||||
|
||||
## tftp server
|
||||
|
||||
a TFTP server (read/write) can be started using `--tftp 3969` (you probably want [ftp](#ftp-server) instead unless you are *actually* communicating with hardware from the 90s (in which case we should definitely hang some time))
|
||||
|
||||
> that makes this the first RTX DECT Base that has been updated using copyparty 🎉
|
||||
|
||||
* based on [partftpy](https://github.com/9001/partftpy)
|
||||
* no accounts; read from world-readable folders, write to world-writable, overwrite in world-deletable
|
||||
* needs a dedicated port (cannot share with the HTTP/HTTPS API)
|
||||
* run as root (or see below) to use the spec-recommended port `69` (nice)
|
||||
* can reply from a predefined portrange (good for firewalls)
|
||||
* only supports the binary/octet/image transfer mode (no netascii)
|
||||
* [RFC 7440](https://datatracker.ietf.org/doc/html/rfc7440) is **not** supported, so will be extremely slow over WAN
|
||||
* assuming default blksize (512), expect 1100 KiB/s over 100BASE-T, 400-500 KiB/s over wifi, 200 on bad wifi
|
||||
|
||||
most clients expect to find TFTP on port 69, but on linux and macos you need to be root to listen on that. Alternatively, listen on 3969 and use NAT on the server to forward 69 to that port;
|
||||
* on linux: `iptables -t nat -A PREROUTING -i eth0 -p udp --dport 69 -j REDIRECT --to-port 3969`
|
||||
|
||||
some recommended TFTP clients:
|
||||
* curl (cross-platform, read/write)
|
||||
* get: `curl --tftp-blksize 1428 tftp://127.0.0.1:3969/firmware.bin`
|
||||
* put: `curl --tftp-blksize 1428 -T firmware.bin tftp://127.0.0.1:3969/`
|
||||
* windows: `tftp.exe` (you probably already have it)
|
||||
* `tftp -i 127.0.0.1 put firmware.bin`
|
||||
* linux: `tftp-hpa`, `atftp`
|
||||
* `atftp --option "blksize 1428" 127.0.0.1 3969 -p -l firmware.bin -r firmware.bin`
|
||||
* `tftp -v -m binary 127.0.0.1 3969 -c put firmware.bin`
|
||||
|
||||
|
||||
## smb server
|
||||
|
||||
unsafe, slow, not recommended for wan, enable with `--smb` for read-only or `--smbw` for read-write
|
||||
|
||||
click the [connect](http://127.0.0.1:3923/?hc) button in the control-panel to see connection instructions for windows, linux, macos
|
||||
|
||||
dependencies: `python3 -m pip install --user -U impacket==0.10.0`
|
||||
dependencies: `python3 -m pip install --user -U impacket==0.11.0`
|
||||
* newer versions of impacket will hopefully work just fine but there is monkeypatching so maybe not
|
||||
|
||||
some **BIG WARNINGS** specific to SMB/CIFS, in decreasing importance:
|
||||
* not entirely confident that read-only is read-only
|
||||
* the smb backend is not fully integrated with vfs, meaning there could be security issues (path traversal). Please use `--smb-port` (see below) and [prisonparty](./bin/prisonparty.sh)
|
||||
* account passwords work per-volume as expected, but account permissions are coalesced; all accounts have read-access to all volumes, and if a single account has write-access to some volume then all other accounts also do
|
||||
* if no accounts have write-access to a specific volume, or if `--smbw` is not set, then writing to that volume from smb *should* be impossible
|
||||
* will be fixed once [impacket v0.11.0](https://github.com/SecureAuthCorp/impacket/commit/d923c00f75d54b972bca573a211a82f09b55261a) is released
|
||||
* account passwords work per-volume as expected, and so does account permissions (read/write/move/delete), but `--smbw` must be given to allow write-access from smb
|
||||
* [shadowing](#shadowing) probably works as expected but no guarantees
|
||||
|
||||
and some minor issues,
|
||||
@@ -900,14 +1013,14 @@ and some minor issues,
|
||||
* win10 onwards does not allow connecting anonymously / without accounts
|
||||
* on windows, creating a new file through rightclick --> new --> textfile throws an error due to impacket limitations -- hit OK and F5 to get your file
|
||||
* python3 only
|
||||
* slow
|
||||
* slow (the builtin webdav support in windows is 5x faster, and rclone-webdav is 30x faster)
|
||||
|
||||
known client bugs:
|
||||
* on win7 only, `--smb1` is much faster than smb2 (default) because it keeps rescanning folders on smb2
|
||||
* however smb1 is buggy and is not enabled by default on win10 onwards
|
||||
* windows cannot access folders which contain filenames with invalid unicode or forbidden characters (`<>:"/\|?*`), or names ending with `.`
|
||||
|
||||
the smb protocol listens on TCP port 445, which is a privileged port on linux and macos, which would require running copyparty as root. However, this can be avoided by listening on another port using `--smb-port 3945` and then using NAT to forward the traffic from 445 to there;
|
||||
the smb protocol listens on TCP port 445, which is a privileged port on linux and macos, which would require running copyparty as root. However, this can be avoided by listening on another port using `--smb-port 3945` and then using NAT on the server to forward the traffic from 445 to there;
|
||||
* on linux: `iptables -t nat -A PREROUTING -i eth0 -p tcp --dport 445 -j REDIRECT --to-port 3945`
|
||||
|
||||
authenticate with one of the following:
|
||||
@@ -915,6 +1028,16 @@ authenticate with one of the following:
|
||||
* username `$password`, password `k`
|
||||
|
||||
|
||||
## browser ux
|
||||
|
||||
tweaking the ui
|
||||
|
||||
* set default sort order globally with `--sort` or per-volume with the `sort` volflag; specify one or more comma-separated columns to sort by, and prefix the column name with `-` for reverse sort
|
||||
* the column names you can use are visible as tooltips when hovering over the column headers in the directory listing, for example `href ext sz ts tags/.up_at tags/Cirle tags/.tn tags/Artist tags/Title`
|
||||
* to sort in music order (album, track, artist, title) with filename as fallback, you could `--sort tags/Cirle,tags/.tn,tags/Artist,tags/Title,href`
|
||||
* to sort by upload date, first enable showing the upload date in the listing with `-e2d -mte +.up_at` and then `--sort tags/.up_at`
|
||||
|
||||
|
||||
## file indexing
|
||||
|
||||
enables dedup and music search ++
|
||||
@@ -941,6 +1064,7 @@ the same arguments can be set as volflags, in addition to `d2d`, `d2ds`, `d2t`,
|
||||
* `-v ~/music::r:c,d2ts` same except only affecting tags
|
||||
|
||||
note:
|
||||
* upload-times can be displayed in the file listing by enabling the `.up_at` metadata key, either globally with `-e2d -mte +.up_at` or per-volume with volflags `e2d,mte=+.up_at` (will have a ~17% performance impact on directory listings)
|
||||
* `e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and `e2ts` would then reindex those, unless there is a new copyparty version with new parsers and the release note says otherwise
|
||||
* the rescan button in the admin panel has no effect unless the volume has `-e2ds` or higher
|
||||
* deduplication is possible on windows if you run copyparty as administrator (not saying you should!)
|
||||
@@ -954,6 +1078,8 @@ to save some time, you can provide a regex pattern for filepaths to only index
|
||||
|
||||
similarly, you can fully ignore files/folders using `--no-idx [...]` and `:c,noidx=\.iso$`
|
||||
|
||||
* when running on macos, all the usual apple metadata files are excluded by default
|
||||
|
||||
if you set `--no-hash [...]` globally, you can enable hashing for specific volumes using flag `:c,nohash=`
|
||||
|
||||
### filesystem guards
|
||||
@@ -998,6 +1124,9 @@ you can also set transaction limits which apply per-IP and per-volume, but these
|
||||
* `:c,maxn=250,3600` allows 250 files over 1 hour from each IP (tracked per-volume)
|
||||
* `:c,maxb=1g,300` allows 1 GiB total over 5 minutes from each IP (tracked per-volume)
|
||||
|
||||
notes:
|
||||
* `vmaxb` and `vmaxn` requires either the `e2ds` volflag or `-e2dsa` global-option
|
||||
|
||||
|
||||
## compress uploads
|
||||
|
||||
@@ -1135,6 +1264,30 @@ redefine behavior with plugins ([examples](./bin/handlers/))
|
||||
replace 404 and 403 errors with something completely different (that's it for now)
|
||||
|
||||
|
||||
## identity providers
|
||||
|
||||
replace copyparty passwords with oauth and such
|
||||
|
||||
you can disable the built-in password-based login sysem, and instead replace it with a separate piece of software (an identity provider) which will then handle authenticating / authorizing of users; this makes it possible to login with passkeys / fido2 / webauthn / yubikey / ldap / active directory / oauth / many other single-sign-on contraptions
|
||||
|
||||
a popular choice is [Authelia](https://www.authelia.com/) (config-file based), another one is [authentik](https://goauthentik.io/) (GUI-based, more complex)
|
||||
|
||||
there is a [docker-compose example](./docs/examples/docker/idp-authelia-traefik) which is hopefully a good starting point (alternatively see [./docs/idp.md](./docs/idp.md) if you're the DIY type)
|
||||
|
||||
a more complete example of the copyparty configuration options [look like this](./docs/examples/docker/idp/copyparty.conf)
|
||||
|
||||
|
||||
## using the cloud as storage
|
||||
|
||||
connecting to an aws s3 bucket and similar
|
||||
|
||||
there is no built-in support for this, but you can use FUSE-software such as [rclone](https://rclone.org/) / [geesefs](https://github.com/yandex-cloud/geesefs) / [JuiceFS](https://juicefs.com/en/) to first mount your cloud storage as a local disk, and then let copyparty use (a folder in) that disk as a volume
|
||||
|
||||
you may experience poor upload performance this way, but that can sometimes be fixed by specifying the volflag `sparse` to force the use of sparse files; this has improved the upload speeds from `1.5 MiB/s` to over `80 MiB/s` in one case, but note that you are also more likely to discover funny bugs in your FUSE software this way, so buckle up
|
||||
|
||||
someone has also tested geesefs in combination with [gocryptfs](https://nuetzlich.net/gocryptfs/) with surprisingly good results, getting 60 MiB/s upload speeds on a gbit line, but JuiceFS won with 80 MiB/s using its built-in encryption
|
||||
|
||||
|
||||
## hiding from google
|
||||
|
||||
tell search engines you dont wanna be indexed, either using the good old [robots.txt](https://www.robotstxt.org/robotstxt.html) or through copyparty settings:
|
||||
@@ -1168,6 +1321,8 @@ the classname of the HTML tag is set according to the selected theme, which is u
|
||||
|
||||
see the top of [./copyparty/web/browser.css](./copyparty/web/browser.css) where the color variables are set, and there's layout-specific stuff near the bottom
|
||||
|
||||
if you want to change the fonts, see [./docs/rice/](./docs/rice/)
|
||||
|
||||
|
||||
## complete examples
|
||||
|
||||
@@ -1194,8 +1349,8 @@ see the top of [./copyparty/web/browser.css](./copyparty/web/browser.css) where
|
||||
* anyone can upload, and receive "secret" links for each upload they do:
|
||||
`python copyparty-sfx.py -e2dsa -v .::wG:c,fk=8`
|
||||
|
||||
* anyone can browse, only `kevin` (password `okgo`) can upload/move/delete files:
|
||||
`python copyparty-sfx.py -e2dsa -a kevin:okgo -v .::r:rwmd,kevin`
|
||||
* anyone can browse (`r`), only `kevin` (password `okgo`) can upload/move/delete (`A`) files:
|
||||
`python copyparty-sfx.py -e2dsa -a kevin:okgo -v .::r:A,kevin`
|
||||
|
||||
* read-only music server:
|
||||
`python copyparty-sfx.py -v /mnt/nas/music:/music:r -e2dsa -e2ts --no-robots --force-js --theme 2`
|
||||
@@ -1220,6 +1375,7 @@ you can either:
|
||||
* if copyparty says `incorrect --rp-loc or webserver config; expected vpath starting with [...]` it's likely because the webserver is stripping away the proxy location from the request URLs -- see the `ProxyPass` in the apache example below
|
||||
|
||||
some reverse proxies (such as [Caddy](https://caddyserver.com/)) can automatically obtain a valid https/tls certificate for you, and some support HTTP/2 and QUIC which could be a nice speed boost
|
||||
* **warning:** nginx-QUIC is still experimental and can make uploads much slower, so HTTP/2 is recommended for now
|
||||
|
||||
example webserver configs:
|
||||
|
||||
@@ -1227,27 +1383,86 @@ example webserver configs:
|
||||
* [apache2 config](contrib/apache/copyparty.conf) -- location-based
|
||||
|
||||
|
||||
## prometheus
|
||||
|
||||
metrics/stats can be enabled at URL `/.cpr/metrics` for grafana / prometheus / etc (openmetrics 1.0.0)
|
||||
|
||||
must be enabled with `--stats` since it reduces startup time a tiny bit, and you probably want `-e2dsa` too
|
||||
|
||||
the endpoint is only accessible by `admin` accounts, meaning the `a` in `rwmda` in the following example commandline: `python3 -m copyparty -a ed:wark -v /mnt/nas::rwmda,ed --stats -e2dsa`
|
||||
|
||||
follow a guide for setting up `node_exporter` except have it read from copyparty instead; example `/etc/prometheus/prometheus.yml` below
|
||||
|
||||
```yaml
|
||||
scrape_configs:
|
||||
- job_name: copyparty
|
||||
metrics_path: /.cpr/metrics
|
||||
basic_auth:
|
||||
password: wark
|
||||
static_configs:
|
||||
- targets: ['192.168.123.1:3923']
|
||||
```
|
||||
|
||||
currently the following metrics are available,
|
||||
* `cpp_uptime_seconds` time since last copyparty restart
|
||||
* `cpp_boot_unixtime_seconds` same but as an absolute timestamp
|
||||
* `cpp_http_conns` number of open http(s) connections
|
||||
* `cpp_http_reqs` number of http(s) requests handled
|
||||
* `cpp_sus_reqs` number of 403/422/malicious requests
|
||||
* `cpp_active_bans` number of currently banned IPs
|
||||
* `cpp_total_bans` number of IPs banned since last restart
|
||||
|
||||
these are available unless `--nos-vst` is specified:
|
||||
* `cpp_db_idle_seconds` time since last database activity (upload/rename/delete)
|
||||
* `cpp_db_act_seconds` same but as an absolute timestamp
|
||||
* `cpp_idle_vols` number of volumes which are idle / ready
|
||||
* `cpp_busy_vols` number of volumes which are busy / indexing
|
||||
* `cpp_offline_vols` number of volumes which are offline / unavailable
|
||||
* `cpp_hashing_files` number of files queued for hashing / indexing
|
||||
* `cpp_tagq_files` number of files queued for metadata scanning
|
||||
* `cpp_mtpq_files` number of files queued for plugin-based analysis
|
||||
|
||||
and these are available per-volume only:
|
||||
* `cpp_disk_size_bytes` total HDD size
|
||||
* `cpp_disk_free_bytes` free HDD space
|
||||
|
||||
and these are per-volume and `total`:
|
||||
* `cpp_vol_bytes` size of all files in volume
|
||||
* `cpp_vol_files` number of files
|
||||
* `cpp_dupe_bytes` disk space presumably saved by deduplication
|
||||
* `cpp_dupe_files` number of dupe files
|
||||
* `cpp_unf_bytes` currently unfinished / incoming uploads
|
||||
|
||||
some of the metrics have additional requirements to function correctly,
|
||||
* `cpp_vol_*` requires either the `e2ds` volflag or `-e2dsa` global-option
|
||||
|
||||
the following options are available to disable some of the metrics:
|
||||
* `--nos-hdd` disables `cpp_disk_*` which can prevent spinning up HDDs
|
||||
* `--nos-vol` disables `cpp_vol_*` which reduces server startup time
|
||||
* `--nos-vst` disables volume state, reducing the worst-case prometheus query time by 0.5 sec
|
||||
* `--nos-dup` disables `cpp_dupe_*` which reduces the server load caused by prometheus queries
|
||||
* `--nos-unf` disables `cpp_unf_*` for no particular purpose
|
||||
|
||||
note: the following metrics are counted incorrectly if multiprocessing is enabled with `-j`: `cpp_http_conns`, `cpp_http_reqs`, `cpp_sus_reqs`, `cpp_active_bans`, `cpp_total_bans`
|
||||
|
||||
|
||||
# packages
|
||||
|
||||
the party might be closer than you think
|
||||
|
||||
if your distro/OS is not mentioned below, there might be some hints in the [«on servers»](#on-servers) section
|
||||
|
||||
|
||||
## arch package
|
||||
|
||||
now [available on aur](https://aur.archlinux.org/packages/copyparty) maintained by [@icxes](https://github.com/icxes)
|
||||
|
||||
it comes with a [systemd service](./contrib/package/arch/copyparty.service) and expects to find one or more [config files](./docs/example.conf) in `/etc/copyparty.d/`
|
||||
|
||||
|
||||
## fedora package
|
||||
|
||||
now [available on copr-pypi](https://copr.fedorainfracloud.org/coprs/g/copr/PyPI/) , maintained autonomously -- [track record](https://copr.fedorainfracloud.org/coprs/g/copr/PyPI/package/python-copyparty/) seems OK
|
||||
|
||||
```bash
|
||||
dnf copr enable @copr/PyPI
|
||||
dnf install python3-copyparty # just a minimal install, or...
|
||||
dnf install python3-{copyparty,pillow,argon2-cffi,pyftpdlib,pyOpenSSL} ffmpeg-free # with recommended deps
|
||||
```
|
||||
|
||||
this *may* also work on RHEL but [I'm not paying IBM to verify that](https://www.jeffgeerling.com/blog/2023/dear-red-hat-are-you-dumb)
|
||||
does not exist yet; using the [copr-pypi](https://copr.fedorainfracloud.org/coprs/g/copr/PyPI/) builds is **NOT recommended** because updates can be delayed by [several months](https://github.com/fedora-copr/copr/issues/3056)
|
||||
|
||||
|
||||
## nix package
|
||||
@@ -1377,15 +1592,16 @@ TLDR: yes
|
||||
| navpane | - | yep | yep | yep | yep | yep | yep | yep |
|
||||
| image viewer | - | yep | yep | yep | yep | yep | yep | yep |
|
||||
| video player | - | yep | yep | yep | yep | yep | yep | yep |
|
||||
| markdown editor | - | - | yep | yep | yep | yep | yep | yep |
|
||||
| markdown viewer | - | yep | yep | yep | yep | yep | yep | yep |
|
||||
| markdown editor | - | - | `*2` | `*2` | yep | yep | yep | yep |
|
||||
| markdown viewer | - | `*2` | `*2` | `*2` | yep | yep | yep | yep |
|
||||
| play mp3/m4a | - | yep | yep | yep | yep | yep | yep | yep |
|
||||
| play ogg/opus | - | - | - | - | yep | yep | `*3` | yep |
|
||||
| **= feature =** | ie6 | ie9 | ie10 | ie11 | ff 52 | c 49 | iOS | Andr |
|
||||
|
||||
* internet explorer 6 to 8 behave the same
|
||||
* internet explorer 6 through 8 behave the same
|
||||
* firefox 52 and chrome 49 are the final winxp versions
|
||||
* `*1` yes, but extremely slow (ie10: `1 MiB/s`, ie11: `270 KiB/s`)
|
||||
* `*2` only able to do plaintext documents (no markdown rendering)
|
||||
* `*3` iOS 11 and newer, opus only, and requires FFmpeg on the server
|
||||
|
||||
quick summary of more eccentric web-browsers trying to view a directory index:
|
||||
@@ -1411,10 +1627,12 @@ interact with copyparty using non-browser clients
|
||||
* `var xhr = new XMLHttpRequest(); xhr.open('POST', '//127.0.0.1:3923/msgs?raw'); xhr.send('foo');`
|
||||
|
||||
* curl/wget: upload some files (post=file, chunk=stdin)
|
||||
* `post(){ curl -F act=bput -F f=@"$1" http://127.0.0.1:3923/?pw=wark;}`
|
||||
`post movie.mkv`
|
||||
* `post(){ curl -F f=@"$1" http://127.0.0.1:3923/?pw=wark;}`
|
||||
`post movie.mkv` (gives HTML in return)
|
||||
* `post(){ curl -F f=@"$1" 'http://127.0.0.1:3923/?want=url&pw=wark';}`
|
||||
`post movie.mkv` (gives hotlink in return)
|
||||
* `post(){ curl -H pw:wark -H rand:8 -T "$1" http://127.0.0.1:3923/;}`
|
||||
`post movie.mkv`
|
||||
`post movie.mkv` (randomized filename)
|
||||
* `post(){ wget --header='pw: wark' --post-file="$1" -O- http://127.0.0.1:3923/?raw;}`
|
||||
`post movie.mkv`
|
||||
* `chunk(){ curl -H pw:wark -T- http://127.0.0.1:3923/;}`
|
||||
@@ -1436,6 +1654,10 @@ interact with copyparty using non-browser clients
|
||||
|
||||
* sharex (screenshot utility): see [./contrib/sharex.sxcu](contrib/#sharexsxcu)
|
||||
|
||||
* contextlet (web browser integration); see [contrib contextlet](contrib/#send-to-cppcontextletjson)
|
||||
|
||||
* [igloo irc](https://iglooirc.com/): Method: `post` Host: `https://you.com/up/?want=url&pw=hunter2` Multipart: `yes` File parameter: `f`
|
||||
|
||||
copyparty returns a truncated sha512sum of your PUT/POST as base64; you can generate the same checksum locally to verify uplaods:
|
||||
|
||||
b512(){ printf "$((sha512sum||shasum -a512)|sed -E 's/ .*//;s/(..)/\\x\1/g')"|base64|tr '+/' '-_'|head -c44;}
|
||||
@@ -1454,7 +1676,7 @@ the commandline uploader [u2c.py](https://github.com/9001/copyparty/tree/hovudst
|
||||
|
||||
alternatively there is [rclone](./docs/rclone.md) which allows for bidirectional sync and is *way* more flexible (stream files straight from sftp/s3/gcs to copyparty, ...), although there is no integrity check and it won't work with files over 100 MiB if copyparty is behind cloudflare
|
||||
|
||||
* starting from rclone v1.63 (currently [in beta](https://beta.rclone.org/?filter=latest)), rclone will also be faster than u2c.py
|
||||
* starting from rclone v1.63, rclone is faster than u2c.py on low-latency connections
|
||||
|
||||
|
||||
## mount as drive
|
||||
@@ -1463,7 +1685,7 @@ a remote copyparty server as a local filesystem; go to the control-panel and cl
|
||||
|
||||
alternatively, some alternatives roughly sorted by speed (unreproducible benchmark), best first:
|
||||
|
||||
* [rclone-webdav](./docs/rclone.md) (25s), read/WRITE ([v1.63-beta](https://beta.rclone.org/?filter=latest))
|
||||
* [rclone-webdav](./docs/rclone.md) (25s), read/WRITE (rclone v1.63 or later)
|
||||
* [rclone-http](./docs/rclone.md) (26s), read-only
|
||||
* [partyfuse.py](./bin/#partyfusepy) (35s), read-only
|
||||
* [rclone-ftp](./docs/rclone.md) (47s), read/WRITE
|
||||
@@ -1503,13 +1725,15 @@ below are some tweaks roughly ordered by usefulness:
|
||||
|
||||
* `-q` disables logging and can help a bunch, even when combined with `-lo` to redirect logs to file
|
||||
* `--hist` pointing to a fast location (ssd) will make directory listings and searches faster when `-e2d` or `-e2t` is set
|
||||
* and also makes thumbnails load faster, regardless of e2d/e2t
|
||||
* `--no-hash .` when indexing a network-disk if you don't care about the actual filehashes and only want the names/tags searchable
|
||||
* `--no-htp --hash-mt=0 --mtag-mt=1 --th-mt=1` minimizes the number of threads; can help in some eccentric environments (like the vscode debugger)
|
||||
* `-j0` enables multiprocessing (actual multithreading), can reduce latency to `20+80/numCores` percent and generally improve performance in cpu-intensive workloads, for example:
|
||||
* lots of connections (many users or heavy clients)
|
||||
* simultaneous downloads and uploads saturating a 20gbps connection
|
||||
* if `-e2d` is enabled, `-j2` gives 4x performance for directory listings; `-j4` gives 16x
|
||||
|
||||
...however it adds an overhead to internal communication so it might be a net loss, see if it works 4 u
|
||||
...however it also increases the server/filesystem/HDD load during uploads, and adds an overhead to internal communication, so it is usually a better idea to don't
|
||||
* using [pypy](https://www.pypy.org/) instead of [cpython](https://www.python.org/) *can* be 70% faster for some workloads, but slower for many others
|
||||
* and pypy can sometimes crash on startup with `-j0` (TODO make issue)
|
||||
|
||||
@@ -1518,7 +1742,7 @@ below are some tweaks roughly ordered by usefulness:
|
||||
|
||||
when uploading files,
|
||||
|
||||
* chrome is recommended, at least compared to firefox:
|
||||
* chrome is recommended (unfortunately), at least compared to firefox:
|
||||
* up to 90% faster when hashing, especially on SSDs
|
||||
* up to 40% faster when uploading over extremely fast internets
|
||||
* but [u2c.py](https://github.com/9001/copyparty/blob/hovudstraum/bin/u2c.py) can be 40% faster than chrome again
|
||||
@@ -1532,10 +1756,14 @@ when uploading files,
|
||||
|
||||
# security
|
||||
|
||||
there is a [discord server](https://discord.gg/25J8CdTT6G) with an `@everyone` for all important updates (at the lack of better ideas)
|
||||
|
||||
some notes on hardening
|
||||
|
||||
* set `--rproxy 0` if your copyparty is directly facing the internet (not through a reverse-proxy)
|
||||
* cors doesn't work right otherwise
|
||||
* if you allow anonymous uploads or otherwise don't trust the contents of a volume, you can prevent XSS with volflag `nohtml`
|
||||
* this returns html documents as plaintext, and also disables markdown rendering
|
||||
|
||||
safety profiles:
|
||||
|
||||
@@ -1549,9 +1777,7 @@ safety profiles:
|
||||
* `--unpost 0`, `--no-del`, `--no-mv` disables all move/delete support
|
||||
* `--hardlink` creates hardlinks instead of symlinks when deduplicating uploads, which is less maintenance
|
||||
* however note if you edit one file it will also affect the other copies
|
||||
* `--vague-401` returns a "404 not found" instead of "401 unauthorized" which is a common enterprise meme
|
||||
* `--ban-404=50,60,1440` ban client for 1440min (24h) if they hit 50 404's in 60min
|
||||
* **NB:** will ban anyone who enables up2k turbo
|
||||
* `--vague-403` returns a "404 not found" instead of "401 unauthorized" which is a common enterprise meme
|
||||
* `--nih` removes the server hostname from directory listings
|
||||
|
||||
* option `-sss` is a shortcut for the above plus:
|
||||
@@ -1563,9 +1789,9 @@ safety profiles:
|
||||
other misc notes:
|
||||
|
||||
* you can disable directory listings by giving permission `g` instead of `r`, only accepting direct URLs to files
|
||||
* combine this with volflag `c,fk` to generate filekeys (per-file accesskeys); users which have full read-access will then see URLs with `?k=...` appended to the end, and `g` users must provide that URL including the correct key to avoid a 404
|
||||
* the default filekey entropy is fairly small so give `--fk-salt` around 30 characters if you want filekeys longer than 16 chars
|
||||
* permissions `wG` lets users upload files and receive their own filekeys, still without being able to see other uploads
|
||||
* you may want [filekeys](#filekeys) to prevent filename bruteforcing
|
||||
* permission `h` instead of `r` makes copyparty behave like a traditional webserver with directory listing/index disabled, returning index.html instead
|
||||
* compatibility with filekeys: index.html itself can be retrieved without the correct filekey, but all other files are protected
|
||||
|
||||
|
||||
## gotchas
|
||||
@@ -1573,10 +1799,12 @@ other misc notes:
|
||||
behavior that might be unexpected
|
||||
|
||||
* users without read-access to a folder can still see the `.prologue.html` / `.epilogue.html` / `README.md` contents, for the purpose of showing a description on how to use the uploader for example
|
||||
* users can submit `<script>`s which autorun for other visitors in a few ways;
|
||||
* users can submit `<script>`s which autorun (in a sandbox) for other visitors in a few ways;
|
||||
* uploading a `README.md` -- avoid with `--no-readme`
|
||||
* renaming `some.html` to `.epilogue.html` -- avoid with either `--no-logues` or `--no-dot-ren`
|
||||
* the directory-listing embed is sandboxed (so any malicious scripts can't do any damage) but the markdown editor is not
|
||||
* the directory-listing embed is sandboxed (so any malicious scripts can't do any damage) but the markdown editor is not 100% safe, see below
|
||||
* markdown documents can contain html and `<script>`s; attempts are made to prevent scripts from executing (unless `-emp` is specified) but this is not 100% bulletproof, so setting the `nohtml` volflag is still the safest choice
|
||||
* or eliminate the problem entirely by only giving write-access to trustworthy people :^)
|
||||
|
||||
|
||||
## cors
|
||||
@@ -1591,6 +1819,17 @@ by default, except for `GET` and `HEAD` operations, all requests must either:
|
||||
cors can be configured with `--acao` and `--acam`, or the protections entirely disabled with `--allow-csrf`
|
||||
|
||||
|
||||
## filekeys
|
||||
|
||||
prevent filename bruteforcing
|
||||
|
||||
volflag `c,fk` generates filekeys (per-file accesskeys) for all files; users which have full read-access (permission `r`) will then see URLs with the correct filekey `?k=...` appended to the end, and `g` users must provide that URL including the correct key to avoid a 404
|
||||
|
||||
by default, filekeys are generated based on salt (`--fk-salt`) + filesystem-path + file-size + inode (if not windows); add volflag `fka` to generate slightly weaker filekeys which will not be invalidated if the file is edited (only salt + path)
|
||||
|
||||
permissions `wG` (write + upget) lets users upload files and receive their own filekeys, still without being able to see other uploads
|
||||
|
||||
|
||||
## password hashing
|
||||
|
||||
you can hash passwords before putting them into config files / providing them as arguments; see `--help-pwhash` for all the details
|
||||
@@ -1667,7 +1906,7 @@ enable [thumbnails](#thumbnails) of...
|
||||
* **JPEG XL pictures:** `pyvips` or `ffmpeg`
|
||||
|
||||
enable [smb](#smb-server) support (**not** recommended):
|
||||
* `impacket==0.10.0`
|
||||
* `impacket==0.11.0`
|
||||
|
||||
`pyvips` gives higher quality thumbnails than `Pillow` and is 320% faster, using 270% more ram: `sudo apt install libvips42 && python3 -m pip install --user -U pyvips`
|
||||
|
||||
@@ -1701,11 +1940,11 @@ can be convenient on machines where installing python is problematic, however is
|
||||
|
||||
* dangerous: [copyparty32.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty32.exe) is compatible with [windows7](https://user-images.githubusercontent.com/241032/221445944-ae85d1f4-d351-4837-b130-82cab57d6cca.png), which means it uses an ancient copy of python (3.7.9) which cannot be upgraded and should never be exposed to the internet (LAN is fine)
|
||||
|
||||
* dangerous and deprecated: [copyparty-winpe64.exe](https://github.com/9001/copyparty/releases/download/v1.6.8/copyparty-winpe64.exe) lets you [run copyparty in WinPE](https://user-images.githubusercontent.com/241032/205454984-e6b550df-3c49-486d-9267-1614078dd0dd.png) and is otherwise completely useless
|
||||
* dangerous and deprecated: [copyparty-winpe64.exe](https://github.com/9001/copyparty/releases/download/v1.8.7/copyparty-winpe64.exe) lets you [run copyparty in WinPE](https://user-images.githubusercontent.com/241032/205454984-e6b550df-3c49-486d-9267-1614078dd0dd.png) and is otherwise completely useless
|
||||
|
||||
meanwhile [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) instead relies on your system python which gives better performance and will stay safe as long as you keep your python install up-to-date
|
||||
|
||||
then again, if you are already into downloading shady binaries from the internet, you may also want my [minimal builds](./scripts/pyinstaller#ffmpeg) of [ffmpeg](https://ocv.me/stuff/bin/ffmpeg.exe) and [ffprobe](https://ocv.me/stuff/bin/ffprobe.exe) which enables copyparty to extract multimedia-info, do audio-transcoding, and thumbnails/spectrograms/waveforms, however it's much better to instead grab a [recent official build](https://github.com/BtbN/FFmpeg-Builds/releases/download/latest/ffmpeg-master-latest-win64-gpl.zip) every once ina while if you can afford the size
|
||||
then again, if you are already into downloading shady binaries from the internet, you may also want my [minimal builds](./scripts/pyinstaller#ffmpeg) of [ffmpeg](https://ocv.me/stuff/bin/ffmpeg.exe) and [ffprobe](https://ocv.me/stuff/bin/ffprobe.exe) which enables copyparty to extract multimedia-info, do audio-transcoding, and thumbnails/spectrograms/waveforms, however it's much better to instead grab a [recent official build](https://www.gyan.dev/ffmpeg/builds/ffmpeg-git-full.7z) every once ina while if you can afford the size
|
||||
|
||||
|
||||
# install on android
|
||||
@@ -1725,7 +1964,12 @@ if you want thumbnails (photos+videos) and you're okay with spending another 132
|
||||
|
||||
# reporting bugs
|
||||
|
||||
ideas for context to include in bug reports
|
||||
ideas for context to include, and where to submit them
|
||||
|
||||
please get in touch using any of the following URLs:
|
||||
* https://github.com/9001/copyparty/ **(primary)**
|
||||
* https://gitlab.com/9001/copyparty/ *(mirror)*
|
||||
* https://codeberg.org/9001/copyparty *(mirror)*
|
||||
|
||||
in general, commandline arguments (and config file if any)
|
||||
|
||||
@@ -1740,3 +1984,6 @@ if there's a wall of base64 in the log (thread stacks) then please include that,
|
||||
# devnotes
|
||||
|
||||
for build instructions etc, see [./docs/devnotes.md](./docs/devnotes.md)
|
||||
|
||||
see [./docs/TODO.md](./docs/TODO.md) for planned features / fixes / changes
|
||||
|
||||
|
||||
@@ -207,7 +207,7 @@ def examples():
|
||||
|
||||
|
||||
def main():
|
||||
global NC, BY_PATH
|
||||
global NC, BY_PATH # pylint: disable=global-statement
|
||||
os.system("")
|
||||
print()
|
||||
|
||||
@@ -282,7 +282,8 @@ def main():
|
||||
if ver == "corrupt":
|
||||
die("{} database appears to be corrupt, sorry")
|
||||
|
||||
if ver < DB_VER1 or ver > DB_VER2:
|
||||
iver = int(ver)
|
||||
if iver < DB_VER1 or iver > DB_VER2:
|
||||
m = f"{n} db is version {ver}, this tool only supports versions between {DB_VER1} and {DB_VER2}, please upgrade it with copyparty first"
|
||||
die(m)
|
||||
|
||||
|
||||
123
bin/hooks/msg-log.py
Executable file
123
bin/hooks/msg-log.py
Executable file
@@ -0,0 +1,123 @@
|
||||
#!/usr/bin/env python
|
||||
# coding: utf-8
|
||||
# vim:ts=4:sw=4:softtabstop=4:smarttab:expandtab
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
|
||||
try:
|
||||
from datetime import datetime, timezone
|
||||
except:
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
"""
|
||||
use copyparty as a dumb messaging server / guestbook thing;
|
||||
initially contributed by @clach04 in https://github.com/9001/copyparty/issues/35 (thanks!)
|
||||
|
||||
Sample usage:
|
||||
|
||||
python copyparty-sfx.py --xm j,bin/hooks/msg-log.py
|
||||
|
||||
Where:
|
||||
|
||||
xm = execute on message-to-server-log
|
||||
j = provide message information as json; not just the text - this script REQUIRES json
|
||||
t10 = timeout and kill download after 10 secs
|
||||
"""
|
||||
|
||||
|
||||
# output filename
|
||||
FILENAME = os.environ.get("COPYPARTY_MESSAGE_FILENAME", "") or "README.md"
|
||||
|
||||
# set True to write in descending order (newest message at top of file);
|
||||
# note that this becomes very slow/expensive as the file gets bigger
|
||||
DESCENDING = True
|
||||
|
||||
# the message template; the following parameters are provided by copyparty and can be referenced below:
|
||||
# 'ap' = absolute filesystem path where the message was posted
|
||||
# 'vp' = virtual path (URL 'path') where the message was posted
|
||||
# 'mt' = 'at' = unix-timestamp when the message was posted
|
||||
# 'datetime' = ISO-8601 time when the message was posted
|
||||
# 'sz' = message size in bytes
|
||||
# 'host' = the server hostname which the user was accessing (URL 'host')
|
||||
# 'user' = username (if logged in), otherwise '*'
|
||||
# 'txt' = the message text itself
|
||||
# (uncomment the print(msg_info) to see if additional information has been introduced by copyparty since this was written)
|
||||
TEMPLATE = """
|
||||
🕒 %(datetime)s, 👤 %(user)s @ %(ip)s
|
||||
%(txt)s
|
||||
"""
|
||||
|
||||
|
||||
def write_ascending(filepath, msg_text):
|
||||
with open(filepath, "a", encoding="utf-8", errors="replace") as outfile:
|
||||
outfile.write(msg_text)
|
||||
|
||||
|
||||
def write_descending(filepath, msg_text):
|
||||
lockpath = filepath + ".lock"
|
||||
got_it = False
|
||||
for _ in range(16):
|
||||
try:
|
||||
os.mkdir(lockpath)
|
||||
got_it = True
|
||||
break
|
||||
except:
|
||||
time.sleep(0.1)
|
||||
continue
|
||||
|
||||
if not got_it:
|
||||
return sys.exit(1)
|
||||
|
||||
try:
|
||||
oldpath = filepath + ".old"
|
||||
os.rename(filepath, oldpath)
|
||||
with open(oldpath, "r", encoding="utf-8", errors="replace") as infile, open(
|
||||
filepath, "w", encoding="utf-8", errors="replace"
|
||||
) as outfile:
|
||||
outfile.write(msg_text)
|
||||
while True:
|
||||
buf = infile.read(4096)
|
||||
if not buf:
|
||||
break
|
||||
outfile.write(buf)
|
||||
finally:
|
||||
try:
|
||||
os.unlink(oldpath)
|
||||
except:
|
||||
pass
|
||||
os.rmdir(lockpath)
|
||||
|
||||
|
||||
def main(argv=None):
|
||||
if argv is None:
|
||||
argv = sys.argv
|
||||
|
||||
msg_info = json.loads(sys.argv[1])
|
||||
# print(msg_info)
|
||||
|
||||
try:
|
||||
dt = datetime.fromtimestamp(msg_info["at"], timezone.utc)
|
||||
except:
|
||||
dt = datetime.utcfromtimestamp(msg_info["at"])
|
||||
|
||||
msg_info["datetime"] = dt.strftime("%Y-%m-%d, %H:%M:%S")
|
||||
|
||||
msg_text = TEMPLATE % msg_info
|
||||
|
||||
filepath = os.path.join(msg_info["ap"], FILENAME)
|
||||
|
||||
if DESCENDING and os.path.exists(filepath):
|
||||
write_descending(filepath, msg_text)
|
||||
else:
|
||||
write_ascending(filepath, msg_text)
|
||||
|
||||
print(msg_text)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -4,7 +4,7 @@ import json
|
||||
import os
|
||||
import sys
|
||||
import subprocess as sp
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
from plyer import notification
|
||||
|
||||
|
||||
@@ -43,7 +43,8 @@ def main():
|
||||
fp = inf["ap"]
|
||||
sz = humansize(inf["sz"])
|
||||
dp, fn = os.path.split(fp)
|
||||
mt = datetime.utcfromtimestamp(inf["mt"]).strftime("%Y-%m-%d %H:%M:%S")
|
||||
dt = datetime.fromtimestamp(inf["mt"], timezone.utc)
|
||||
mt = dt.strftime("%Y-%m-%d %H:%M:%S")
|
||||
|
||||
msg = f"{fn} ({sz})\n📁 {dp}"
|
||||
title = "File received"
|
||||
|
||||
@@ -37,6 +37,10 @@ def main():
|
||||
if "://" not in url:
|
||||
url = "https://" + url
|
||||
|
||||
proto = url.split("://")[0].lower()
|
||||
if proto not in ("http", "https", "ftp", "ftps"):
|
||||
raise Exception("bad proto {}".format(proto))
|
||||
|
||||
os.chdir(inf["ap"])
|
||||
|
||||
name = url.split("?")[0].split("/")[-1]
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
import hashlib
|
||||
import json
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
|
||||
|
||||
_ = r"""
|
||||
@@ -43,8 +43,11 @@ except:
|
||||
return p
|
||||
|
||||
|
||||
UTC = timezone.utc
|
||||
|
||||
|
||||
def humantime(ts):
|
||||
return datetime.utcfromtimestamp(ts).strftime("%Y-%m-%d %H:%M:%S")
|
||||
return datetime.fromtimestamp(ts, UTC).strftime("%Y-%m-%d %H:%M:%S")
|
||||
|
||||
|
||||
def find_files_root(inf):
|
||||
@@ -96,7 +99,7 @@ def main():
|
||||
|
||||
ret.append("# {} files, {} bytes total".format(len(inf), total_sz))
|
||||
ret.append("")
|
||||
ftime = datetime.utcnow().strftime("%Y-%m%d-%H%M%S.%f")
|
||||
ftime = datetime.now(UTC).strftime("%Y-%m%d-%H%M%S.%f")
|
||||
fp = "{}xfer-{}.sha512".format(inf[0]["ap"][:di], ftime)
|
||||
with open(fsenc(fp), "wb") as f:
|
||||
f.write("\n".join(ret).encode("utf-8", "replace"))
|
||||
|
||||
@@ -7,6 +7,7 @@ set -e
|
||||
# linux/alpine: requires gcc g++ make cmake patchelf {python3,ffmpeg,fftw,libsndfile}-dev py3-{wheel,pip} py3-numpy{,-dev}
|
||||
# linux/debian: requires libav{codec,device,filter,format,resample,util}-dev {libfftw3,python3,libsndfile1}-dev python3-{numpy,pip} vamp-{plugin-sdk,examples} patchelf cmake
|
||||
# linux/fedora: requires gcc gcc-c++ make cmake patchelf {python3,ffmpeg,fftw,libsndfile}-devel python3-numpy vamp-plugin-sdk qm-vamp-plugins
|
||||
# linux/arch: requires gcc make cmake patchelf python3 ffmpeg fftw libsndfile python-{numpy,wheel,pip,setuptools}
|
||||
# win64: requires msys2-mingw64 environment
|
||||
# macos: requires macports
|
||||
#
|
||||
@@ -227,15 +228,16 @@ install_vamp() {
|
||||
cd "$td"
|
||||
echo '#include <vamp-sdk/Plugin.h>' | g++ -x c++ -c -o /dev/null - || [ -e ~/pe/vamp-sdk ] || {
|
||||
printf '\033[33mcould not find the vamp-sdk, building from source\033[0m\n'
|
||||
(dl_files yolo https://code.soundsoftware.ac.uk/attachments/download/2588/vamp-plugin-sdk-2.9.0.tar.gz)
|
||||
(dl_files yolo https://code.soundsoftware.ac.uk/attachments/download/2691/vamp-plugin-sdk-2.10.0.tar.gz)
|
||||
sha512sum -c <(
|
||||
echo "7ef7f837d19a08048b059e0da408373a7964ced452b290fae40b85d6d70ca9000bcfb3302cd0b4dc76cf2a848528456f78c1ce1ee0c402228d812bd347b6983b -"
|
||||
) <vamp-plugin-sdk-2.9.0.tar.gz
|
||||
tar -xf vamp-plugin-sdk-2.9.0.tar.gz
|
||||
echo "153b7f2fa01b77c65ad393ca0689742d66421017fd5931d216caa0fcf6909355fff74706fabbc062a3a04588a619c9b515a1dae00f21a57afd97902a355c48ed -"
|
||||
) <vamp-plugin-sdk-2.10.0.tar.gz
|
||||
tar -xf vamp-plugin-sdk-2.10.0.tar.gz
|
||||
rm -- *.tar.gz
|
||||
ls -al
|
||||
cd vamp-plugin-sdk-*
|
||||
./configure --prefix=$HOME/pe/vamp-sdk
|
||||
printf '%s\n' "int main(int argc, char **argv) { return 0; }" > host/vamp-simple-host.cpp
|
||||
./configure --disable-programs --prefix=$HOME/pe/vamp-sdk
|
||||
make -j1 install
|
||||
}
|
||||
|
||||
@@ -250,8 +252,9 @@ install_vamp() {
|
||||
rm -- *.tar.gz
|
||||
cd beatroot-vamp-v1.0
|
||||
[ -e ~/pe/vamp-sdk ] &&
|
||||
sed -ri 's`^(CFLAGS :=.*)`\1 -I'$HOME'/pe/vamp-sdk/include`' Makefile.linux
|
||||
make -f Makefile.linux -j4 LDFLAGS=-L$HOME/pe/vamp-sdk/lib
|
||||
sed -ri 's`^(CFLAGS :=.*)`\1 -I'$HOME'/pe/vamp-sdk/include`' Makefile.linux ||
|
||||
sed -ri 's`^(CFLAGS :=.*)`\1 -I/usr/include/vamp-sdk`' Makefile.linux
|
||||
make -f Makefile.linux -j4 LDFLAGS="-L$HOME/pe/vamp-sdk/lib -L/usr/lib64"
|
||||
# /home/ed/vamp /home/ed/.vamp /usr/local/lib/vamp
|
||||
mkdir ~/vamp
|
||||
cp -pv beatroot-vamp.* ~/vamp/
|
||||
|
||||
@@ -65,6 +65,10 @@ def main():
|
||||
if "://" not in url:
|
||||
url = "https://" + url
|
||||
|
||||
proto = url.split("://")[0].lower()
|
||||
if proto not in ("http", "https", "ftp", "ftps"):
|
||||
raise Exception("bad proto {}".format(proto))
|
||||
|
||||
os.chdir(fdir)
|
||||
|
||||
name = url.split("?")[0].split("/")[-1]
|
||||
|
||||
@@ -46,13 +46,20 @@ import traceback
|
||||
import http.client # py2: httplib
|
||||
import urllib.parse
|
||||
import calendar
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
from urllib.parse import quote_from_bytes as quote
|
||||
from urllib.parse import unquote_to_bytes as unquote
|
||||
|
||||
WINDOWS = sys.platform == "win32"
|
||||
MACOS = platform.system() == "Darwin"
|
||||
info = log = dbg = None
|
||||
UTC = timezone.utc
|
||||
|
||||
|
||||
def print(*args, **kwargs):
|
||||
try:
|
||||
builtins.print(*list(args), **kwargs)
|
||||
except:
|
||||
builtins.print(termsafe(" ".join(str(x) for x in args)), **kwargs)
|
||||
|
||||
|
||||
print(
|
||||
@@ -64,6 +71,13 @@ print(
|
||||
)
|
||||
|
||||
|
||||
def null_log(msg):
|
||||
pass
|
||||
|
||||
|
||||
info = log = dbg = null_log
|
||||
|
||||
|
||||
try:
|
||||
from fuse import FUSE, FuseOSError, Operations
|
||||
except:
|
||||
@@ -83,13 +97,6 @@ except:
|
||||
raise
|
||||
|
||||
|
||||
def print(*args, **kwargs):
|
||||
try:
|
||||
builtins.print(*list(args), **kwargs)
|
||||
except:
|
||||
builtins.print(termsafe(" ".join(str(x) for x in args)), **kwargs)
|
||||
|
||||
|
||||
def termsafe(txt):
|
||||
try:
|
||||
return txt.encode(sys.stdout.encoding, "backslashreplace").decode(
|
||||
@@ -118,10 +125,6 @@ def fancy_log(msg):
|
||||
print("{:10.6f} {} {}\n".format(time.time() % 900, rice_tid(), msg), end="")
|
||||
|
||||
|
||||
def null_log(msg):
|
||||
pass
|
||||
|
||||
|
||||
def hexler(binary):
|
||||
return binary.replace("\r", "\\r").replace("\n", "\\n")
|
||||
return " ".join(["{}\033[36m{:02x}\033[0m".format(b, ord(b)) for b in binary])
|
||||
@@ -176,7 +179,7 @@ class RecentLog(object):
|
||||
def put(self, msg):
|
||||
msg = "{:10.6f} {} {}\n".format(time.time() % 900, rice_tid(), msg)
|
||||
if self.f:
|
||||
fmsg = " ".join([datetime.utcnow().strftime("%H%M%S.%f"), str(msg)])
|
||||
fmsg = " ".join([datetime.now(UTC).strftime("%H%M%S.%f"), str(msg)])
|
||||
self.f.write(fmsg.encode("utf-8"))
|
||||
|
||||
with self.mtx:
|
||||
|
||||
@@ -20,12 +20,13 @@ import sys
|
||||
import base64
|
||||
import sqlite3
|
||||
import argparse
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
from urllib.parse import quote_from_bytes as quote
|
||||
from urllib.parse import unquote_to_bytes as unquote
|
||||
|
||||
|
||||
FS_ENCODING = sys.getfilesystemencoding()
|
||||
UTC = timezone.utc
|
||||
|
||||
|
||||
class APF(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter):
|
||||
@@ -155,11 +156,10 @@ th {
|
||||
link = txt.decode("utf-8")[4:]
|
||||
|
||||
sz = "{:,}".format(sz)
|
||||
dt = datetime.fromtimestamp(at if at > 0 else mt, UTC)
|
||||
v = [
|
||||
w[:16],
|
||||
datetime.utcfromtimestamp(at if at > 0 else mt).strftime(
|
||||
"%Y-%m-%d %H:%M:%S"
|
||||
),
|
||||
dt.strftime("%Y-%m-%d %H:%M:%S"),
|
||||
sz,
|
||||
imap.get(ip, ip),
|
||||
]
|
||||
|
||||
@@ -12,13 +12,13 @@ done
|
||||
help() { cat <<'EOF'
|
||||
|
||||
usage:
|
||||
./prisonparty.sh <ROOTDIR> <UID> <GID> [VOLDIR [VOLDIR...]] -- python3 copyparty-sfx.py [...]
|
||||
./prisonparty.sh <ROOTDIR> <USER|UID> <GROUP|GID> [VOLDIR [VOLDIR...]] -- python3 copyparty-sfx.py [...]
|
||||
|
||||
example:
|
||||
./prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt/nas/music -- python3 copyparty-sfx.py -v /mnt/nas/music::rwmd
|
||||
./prisonparty.sh /var/lib/copyparty-jail cpp cpp /mnt/nas/music -- python3 copyparty-sfx.py -v /mnt/nas/music::rwmd
|
||||
|
||||
example for running straight from source (instead of using an sfx):
|
||||
PYTHONPATH=$PWD ./prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt/nas/music -- python3 -um copyparty -v /mnt/nas/music::rwmd
|
||||
PYTHONPATH=$PWD ./prisonparty.sh /var/lib/copyparty-jail cpp cpp /mnt/nas/music -- python3 -um copyparty -v /mnt/nas/music::rwmd
|
||||
|
||||
note that if you have python modules installed as --user (such as bpm/key detectors),
|
||||
you should add /home/foo/.local as a VOLDIR
|
||||
@@ -28,6 +28,16 @@ exit 1
|
||||
}
|
||||
|
||||
|
||||
errs=
|
||||
for c in awk chroot dirname getent lsof mknod mount realpath sed sort stat uniq; do
|
||||
command -v $c >/dev/null || {
|
||||
echo ERROR: command not found: $c
|
||||
errs=1
|
||||
}
|
||||
done
|
||||
[ $errs ] && exit 1
|
||||
|
||||
|
||||
# read arguments
|
||||
trap help EXIT
|
||||
jail="$(realpath "$1")"; shift
|
||||
@@ -58,11 +68,18 @@ cpp="$1"; shift
|
||||
}
|
||||
trap - EXIT
|
||||
|
||||
usr="$(getent passwd $uid | cut -d: -f1)"
|
||||
[ "$usr" ] || { echo "ERROR invalid username/uid $uid"; exit 1; }
|
||||
uid="$(getent passwd $uid | cut -d: -f3)"
|
||||
|
||||
grp="$(getent group $gid | cut -d: -f1)"
|
||||
[ "$grp" ] || { echo "ERROR invalid groupname/gid $gid"; exit 1; }
|
||||
gid="$(getent group $gid | cut -d: -f3)"
|
||||
|
||||
# debug/vis
|
||||
echo
|
||||
echo "chroot-dir = $jail"
|
||||
echo "user:group = $uid:$gid"
|
||||
echo "user:group = $uid:$gid ($usr:$grp)"
|
||||
echo " copyparty = $cpp"
|
||||
echo
|
||||
printf '\033[33m%s\033[0m\n' "copyparty can access these folders and all their subdirectories:"
|
||||
@@ -80,34 +97,39 @@ jail="${jail%/}"
|
||||
|
||||
|
||||
# bind-mount system directories and volumes
|
||||
for a in {1..30}; do mkdir "$jail/.prisonlock" && break; sleep 0.1; done
|
||||
printf '%s\n' "${sysdirs[@]}" "${vols[@]}" | sed -r 's`/$``' | LC_ALL=C sort | uniq |
|
||||
while IFS= read -r v; do
|
||||
[ -e "$v" ] || {
|
||||
printf '\033[1;31mfolder does not exist:\033[0m %s\n' "$v"
|
||||
continue
|
||||
}
|
||||
i1=$(stat -c%D.%i "$v" 2>/dev/null || echo a)
|
||||
i2=$(stat -c%D.%i "$jail$v" 2>/dev/null || echo b)
|
||||
# echo "v [$v] i1 [$i1] i2 [$i2]"
|
||||
i1=$(stat -c%D.%i "$v/" 2>/dev/null || echo a)
|
||||
i2=$(stat -c%D.%i "$jail$v/" 2>/dev/null || echo b)
|
||||
[ $i1 = $i2 ] && continue
|
||||
|
||||
mount | grep -qF " $jail$v " && echo wtf $i1 $i2 $v && continue
|
||||
mkdir -p "$jail$v"
|
||||
mount --bind "$v" "$jail$v"
|
||||
done
|
||||
rmdir "$jail/.prisonlock" || true
|
||||
|
||||
|
||||
cln() {
|
||||
rv=$?
|
||||
wait -f -p rv $p || true
|
||||
trap - EXIT
|
||||
wait -f -n $p && rv=0 || rv=$?
|
||||
cd /
|
||||
echo "stopping chroot..."
|
||||
lsof "$jail" | grep -F "$jail" &&
|
||||
for a in {1..30}; do mkdir "$jail/.prisonlock" && break; sleep 0.1; done
|
||||
lsof "$jail" 2>/dev/null | grep -F "$jail" &&
|
||||
echo "chroot is in use; will not unmount" ||
|
||||
{
|
||||
mount | grep -F " on $jail" |
|
||||
awk '{sub(/ type .*/,"");sub(/.* on /,"");print}' |
|
||||
LC_ALL=C sort -r | tee /dev/stderr | tr '\n' '\0' | xargs -r0 umount
|
||||
LC_ALL=C sort -r | while IFS= read -r v; do
|
||||
umount "$v" && echo "umount OK: $v"
|
||||
done
|
||||
}
|
||||
rmdir "$jail/.prisonlock" || true
|
||||
exit $rv
|
||||
}
|
||||
trap cln EXIT
|
||||
@@ -128,8 +150,8 @@ chmod 777 "$jail/tmp"
|
||||
|
||||
|
||||
# run copyparty
|
||||
export HOME=$(getent passwd $uid | cut -d: -f6)
|
||||
export USER=$(getent passwd $uid | cut -d: -f1)
|
||||
export HOME="$(getent passwd $uid | cut -d: -f6)"
|
||||
export USER="$usr"
|
||||
export LOGNAME="$USER"
|
||||
#echo "pybin [$pybin]"
|
||||
#echo "pyarg [$pyarg]"
|
||||
@@ -137,5 +159,5 @@ export LOGNAME="$USER"
|
||||
chroot --userspec=$uid:$gid "$jail" "$pybin" $pyarg "$cpp" "$@" &
|
||||
p=$!
|
||||
trap 'kill -USR1 $p' USR1
|
||||
trap 'kill $p' INT TERM
|
||||
trap 'trap - INT TERM; kill $p' INT TERM
|
||||
wait
|
||||
|
||||
118
bin/u2c.py
118
bin/u2c.py
@@ -1,8 +1,8 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
S_VERSION = "1.9"
|
||||
S_BUILD_DT = "2023-05-07"
|
||||
S_VERSION = "1.15"
|
||||
S_BUILD_DT = "2024-02-18"
|
||||
|
||||
"""
|
||||
u2c.py: upload to copyparty
|
||||
@@ -14,6 +14,7 @@ https://github.com/9001/copyparty/blob/hovudstraum/bin/u2c.py
|
||||
- if something breaks just try again and it'll autoresume
|
||||
"""
|
||||
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import stat
|
||||
@@ -28,7 +29,7 @@ import platform
|
||||
import threading
|
||||
import datetime
|
||||
|
||||
EXE = sys.executable.endswith("exe")
|
||||
EXE = bool(getattr(sys, "frozen", False))
|
||||
|
||||
try:
|
||||
import argparse
|
||||
@@ -39,7 +40,7 @@ except:
|
||||
|
||||
try:
|
||||
import requests
|
||||
except ImportError:
|
||||
except ImportError as ex:
|
||||
if EXE:
|
||||
raise
|
||||
elif sys.version_info > (2, 7):
|
||||
@@ -50,7 +51,7 @@ except ImportError:
|
||||
m = "\n ERROR: need these:\n" + "\n".join(m) + "\n"
|
||||
m += "\n for f in *.whl; do unzip $f; done; rm -r *.dist-info\n"
|
||||
|
||||
print(m.format(sys.executable))
|
||||
print(m.format(sys.executable), "\nspecifically,", ex)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
@@ -104,12 +105,14 @@ class File(object):
|
||||
# set by handshake
|
||||
self.recheck = False # duplicate; redo handshake after all files done
|
||||
self.ucids = [] # type: list[str] # chunks which need to be uploaded
|
||||
self.wark = None # type: str
|
||||
self.url = None # type: str
|
||||
self.wark = "" # type: str
|
||||
self.url = "" # type: str
|
||||
self.nhs = 0
|
||||
|
||||
# set by upload
|
||||
self.up_b = 0 # type: int
|
||||
self.up_c = 0 # type: int
|
||||
self.cd = 0
|
||||
|
||||
# t = "size({}) lmod({}) top({}) rel({}) abs({}) name({})\n"
|
||||
# eprint(t.format(self.size, self.lmod, self.top, self.rel, self.abs, self.name))
|
||||
@@ -220,6 +223,7 @@ class MTHash(object):
|
||||
|
||||
def hash_at(self, nch):
|
||||
f = self.f
|
||||
assert f
|
||||
ofs = ofs0 = nch * self.csz
|
||||
hashobj = hashlib.sha512()
|
||||
chunk_sz = chunk_rem = min(self.csz, self.sz - ofs)
|
||||
@@ -411,10 +415,11 @@ def walkdir(err, top, seen):
|
||||
err.append((ap, str(ex)))
|
||||
|
||||
|
||||
def walkdirs(err, tops):
|
||||
def walkdirs(err, tops, excl):
|
||||
"""recursive statdir for a list of tops, yields [top, relpath, stat]"""
|
||||
sep = "{0}".format(os.sep).encode("ascii")
|
||||
if not VT100:
|
||||
excl = excl.replace("/", r"\\")
|
||||
za = []
|
||||
for td in tops:
|
||||
try:
|
||||
@@ -431,6 +436,8 @@ def walkdirs(err, tops):
|
||||
za = [x.replace(b"/", b"\\") for x in za]
|
||||
tops = za
|
||||
|
||||
ptn = re.compile(excl.encode("utf-8") or b"\n", re.I)
|
||||
|
||||
for top in tops:
|
||||
isdir = os.path.isdir(top)
|
||||
if top[-1:] == sep:
|
||||
@@ -443,6 +450,8 @@ def walkdirs(err, tops):
|
||||
|
||||
if isdir:
|
||||
for ap, inf in walkdir(err, top, []):
|
||||
if ptn.match(ap):
|
||||
continue
|
||||
yield stop, ap[len(stop) :].lstrip(sep), inf
|
||||
else:
|
||||
d, n = top.rsplit(sep, 1)
|
||||
@@ -455,7 +464,7 @@ def quotep(btxt):
|
||||
if not PY2:
|
||||
quot1 = quot1.encode("ascii")
|
||||
|
||||
return quot1.replace(b" ", b"+")
|
||||
return quot1.replace(b" ", b"+") # type: ignore
|
||||
|
||||
|
||||
# from copyparty/util.py
|
||||
@@ -492,7 +501,7 @@ def up2k_chunksize(filesize):
|
||||
|
||||
# mostly from copyparty/up2k.py
|
||||
def get_hashlist(file, pcb, mth):
|
||||
# type: (File, any, any) -> None
|
||||
# type: (File, Any, Any) -> None
|
||||
"""generates the up2k hashlist from file contents, inserts it into `file`"""
|
||||
|
||||
chunk_sz = up2k_chunksize(file.size)
|
||||
@@ -551,8 +560,11 @@ def handshake(ar, file, search):
|
||||
}
|
||||
if search:
|
||||
req["srch"] = 1
|
||||
elif ar.dr:
|
||||
req["replace"] = True
|
||||
else:
|
||||
if ar.touch:
|
||||
req["umod"] = True
|
||||
if ar.dr:
|
||||
req["replace"] = True
|
||||
|
||||
headers = {"Content-Type": "text/plain"} # <=1.5.1 compat
|
||||
if pw:
|
||||
@@ -592,7 +604,7 @@ def handshake(ar, file, search):
|
||||
raise
|
||||
|
||||
eprint("handshake failed, retrying: {0}\n {1}\n\n".format(file.name, em))
|
||||
time.sleep(1)
|
||||
time.sleep(ar.cd)
|
||||
|
||||
try:
|
||||
r = r.json()
|
||||
@@ -654,7 +666,7 @@ class Ctl(object):
|
||||
nfiles = 0
|
||||
nbytes = 0
|
||||
err = []
|
||||
for _, _, inf in walkdirs(err, ar.files):
|
||||
for _, _, inf in walkdirs(err, ar.files, ar.x):
|
||||
if stat.S_ISDIR(inf.st_mode):
|
||||
continue
|
||||
|
||||
@@ -683,6 +695,7 @@ class Ctl(object):
|
||||
|
||||
def __init__(self, ar, stats=None):
|
||||
self.ok = False
|
||||
self.errs = 0
|
||||
self.ar = ar
|
||||
self.stats = stats or self._scan()
|
||||
if not self.stats:
|
||||
@@ -696,7 +709,7 @@ class Ctl(object):
|
||||
if ar.te:
|
||||
req_ses.verify = ar.te
|
||||
|
||||
self.filegen = walkdirs([], ar.files)
|
||||
self.filegen = walkdirs([], ar.files, ar.x)
|
||||
self.recheck = [] # type: list[File]
|
||||
|
||||
if ar.safe:
|
||||
@@ -730,7 +743,7 @@ class Ctl(object):
|
||||
|
||||
self._fancy()
|
||||
|
||||
self.ok = True
|
||||
self.ok = not self.errs
|
||||
|
||||
def _safe(self):
|
||||
"""minimal basic slow boring fallback codepath"""
|
||||
@@ -833,12 +846,12 @@ class Ctl(object):
|
||||
txt = " "
|
||||
|
||||
if not self.up_br:
|
||||
spd = self.hash_b / (time.time() - self.t0)
|
||||
eta = (self.nbytes - self.hash_b) / (spd + 1)
|
||||
spd = self.hash_b / ((time.time() - self.t0) or 1)
|
||||
eta = (self.nbytes - self.hash_b) / (spd or 1)
|
||||
else:
|
||||
spd = self.up_br / (time.time() - self.t0_up)
|
||||
spd = self.up_br / ((time.time() - self.t0_up) or 1)
|
||||
spd = self.spd = (self.spd or spd) * 0.9 + spd * 0.1
|
||||
eta = (self.nbytes - self.up_b) / (spd + 1)
|
||||
eta = (self.nbytes - self.up_b) / (spd or 1)
|
||||
|
||||
spd = humansize(spd)
|
||||
self.eta = str(datetime.timedelta(seconds=int(eta)))
|
||||
@@ -864,6 +877,8 @@ class Ctl(object):
|
||||
self.st_hash = [file, ofs]
|
||||
|
||||
def hasher(self):
|
||||
ptn = re.compile(self.ar.x.encode("utf-8"), re.I) if self.ar.x else None
|
||||
sep = "{0}".format(os.sep).encode("ascii")
|
||||
prd = None
|
||||
ls = {}
|
||||
for top, rel, inf in self.filegen:
|
||||
@@ -896,13 +911,29 @@ class Ctl(object):
|
||||
if self.ar.drd:
|
||||
dp = os.path.join(top, rd)
|
||||
lnodes = set(os.listdir(dp))
|
||||
bnames = [x for x in ls if x not in lnodes]
|
||||
if bnames:
|
||||
vpath = self.ar.url.split("://")[-1].split("/", 1)[-1]
|
||||
names = [x.decode("utf-8", "replace") for x in bnames]
|
||||
locs = [vpath + srd + "/" + x for x in names]
|
||||
print("DELETING ~{0}/#{1}".format(srd, len(names)))
|
||||
req_ses.post(self.ar.url + "?delete", json=locs)
|
||||
if ptn:
|
||||
zs = dp.replace(sep, b"/").rstrip(b"/") + b"/"
|
||||
zls = [zs + x for x in lnodes]
|
||||
zls = [x for x in zls if not ptn.match(x)]
|
||||
lnodes = [x.split(b"/")[-1] for x in zls]
|
||||
bnames = [x for x in ls if x not in lnodes and x != b".hist"]
|
||||
vpath = self.ar.url.split("://")[-1].split("/", 1)[-1]
|
||||
names = [x.decode("utf-8", "replace") for x in bnames]
|
||||
locs = [vpath + srd + "/" + x for x in names]
|
||||
while locs:
|
||||
req = locs
|
||||
while req:
|
||||
print("DELETING ~%s/#%s" % (srd, len(req)))
|
||||
r = req_ses.post(self.ar.url + "?delete", json=req)
|
||||
if r.status_code == 413 and "json 2big" in r.text:
|
||||
print(" (delete request too big; slicing...)")
|
||||
req = req[: len(req) // 2]
|
||||
continue
|
||||
elif not r:
|
||||
t = "delete request failed: %r %s"
|
||||
raise Exception(t % (r, r.text))
|
||||
break
|
||||
locs = locs[len(req) :]
|
||||
|
||||
if isdir:
|
||||
continue
|
||||
@@ -955,13 +986,22 @@ class Ctl(object):
|
||||
self.q_upload.put(None)
|
||||
break
|
||||
|
||||
with self.mutex:
|
||||
self.handshaker_busy += 1
|
||||
|
||||
upath = file.abs.decode("utf-8", "replace")
|
||||
if not VT100:
|
||||
upath = upath.lstrip("\\?")
|
||||
|
||||
file.nhs += 1
|
||||
if file.nhs > 32:
|
||||
print("ERROR: giving up on file %s" % (upath))
|
||||
self.errs += 1
|
||||
continue
|
||||
|
||||
with self.mutex:
|
||||
self.handshaker_busy += 1
|
||||
|
||||
while time.time() < file.cd:
|
||||
time.sleep(0.1)
|
||||
|
||||
hs, sprs = handshake(self.ar, file, search)
|
||||
if search:
|
||||
if hs:
|
||||
@@ -1027,14 +1067,13 @@ class Ctl(object):
|
||||
self.uploader_busy += 1
|
||||
self.t0_up = self.t0_up or time.time()
|
||||
|
||||
zs = "{0}/{1}/{2}/{3} {4}/{5} {6}"
|
||||
stats = zs.format(
|
||||
stats = "%d/%d/%d/%d %d/%d %s" % (
|
||||
self.up_f,
|
||||
len(self.recheck),
|
||||
self.uploader_busy,
|
||||
self.nfiles - self.up_f,
|
||||
int(self.nbytes / (1024 * 1024)),
|
||||
int((self.nbytes - self.up_b) / (1024 * 1024)),
|
||||
self.nbytes // (1024 * 1024),
|
||||
(self.nbytes - self.up_b) // (1024 * 1024),
|
||||
self.eta,
|
||||
)
|
||||
|
||||
@@ -1044,6 +1083,7 @@ class Ctl(object):
|
||||
except Exception as ex:
|
||||
t = "upload failed, retrying: {0} #{1} ({2})\n"
|
||||
eprint(t.format(file.name, cid[:8], ex))
|
||||
file.cd = time.time() + self.ar.cd
|
||||
# handshake will fix it
|
||||
|
||||
with self.mutex:
|
||||
@@ -1097,7 +1137,9 @@ source file/folder selection uses rsync syntax, meaning that:
|
||||
ap.add_argument("-v", action="store_true", help="verbose")
|
||||
ap.add_argument("-a", metavar="PASSWORD", help="password or $filepath")
|
||||
ap.add_argument("-s", action="store_true", help="file-search (disables upload)")
|
||||
ap.add_argument("-x", type=unicode, metavar="REGEX", default="", help="skip file if filesystem-abspath matches REGEX, example: '.*/\\.hist/.*'")
|
||||
ap.add_argument("--ok", action="store_true", help="continue even if some local files are inaccessible")
|
||||
ap.add_argument("--touch", action="store_true", help="if last-modified timestamps differ, push local to server (need write+delete perms)")
|
||||
ap.add_argument("--version", action="store_true", help="show version and exit")
|
||||
|
||||
ap = app.add_argument_group("compatibility")
|
||||
@@ -1113,7 +1155,8 @@ source file/folder selection uses rsync syntax, meaning that:
|
||||
ap.add_argument("-j", type=int, metavar="THREADS", default=4, help="parallel connections")
|
||||
ap.add_argument("-J", type=int, metavar="THREADS", default=hcores, help="num cpu-cores to use for hashing; set 0 or 1 for single-core hashing")
|
||||
ap.add_argument("-nh", action="store_true", help="disable hashing while uploading")
|
||||
ap.add_argument("-ns", action="store_true", help="no status panel (for slow consoles)")
|
||||
ap.add_argument("-ns", action="store_true", help="no status panel (for slow consoles and macos)")
|
||||
ap.add_argument("--cd", type=float, metavar="SEC", default=5, help="delay before reattempting a failed handshake/upload")
|
||||
ap.add_argument("--safe", action="store_true", help="use simple fallback approach")
|
||||
ap.add_argument("-z", action="store_true", help="ZOOMIN' (skip uploading files if they exist at the destination with the ~same last-modified timestamp, so same as yolo / turbo with date-chk but even faster)")
|
||||
|
||||
@@ -1170,7 +1213,7 @@ source file/folder selection uses rsync syntax, meaning that:
|
||||
raise
|
||||
|
||||
if ar.cls:
|
||||
eprint("\x1b\x5b\x48\x1b\x5b\x32\x4a\x1b\x5b\x33\x4a", end="")
|
||||
eprint("\033[H\033[2J\033[3J", end="")
|
||||
|
||||
ctl = Ctl(ar)
|
||||
|
||||
@@ -1180,6 +1223,9 @@ source file/folder selection uses rsync syntax, meaning that:
|
||||
ar.z = True
|
||||
ctl = Ctl(ar, ctl.stats)
|
||||
|
||||
if ctl.errs:
|
||||
print("WARNING: %d errors" % (ctl.errs))
|
||||
|
||||
sys.exit(0 if ctl.ok else 1)
|
||||
|
||||
|
||||
|
||||
@@ -66,7 +66,7 @@ def main():
|
||||
ofs = ln.find("{")
|
||||
j = json.loads(ln[ofs:])
|
||||
except:
|
||||
pass
|
||||
continue
|
||||
|
||||
w = j["wark"]
|
||||
if db.execute("select w from up where w = ?", (w,)).fetchone():
|
||||
|
||||
@@ -16,11 +16,13 @@
|
||||
* sharex config file to upload screenshots and grab the URL
|
||||
* `RequestURL`: full URL to the target folder
|
||||
* `pw`: password (remove the `pw` line if anon-write)
|
||||
* the `act:bput` thing is optional since copyparty v1.9.29
|
||||
* using an older sharex version, maybe sharex v12.1.1 for example? dw fam i got your back 👉😎👉 [`sharex12.sxcu`](sharex12.sxcu)
|
||||
|
||||
however if your copyparty is behind a reverse-proxy, you may want to use [`sharex-html.sxcu`](sharex-html.sxcu) instead:
|
||||
* `RequestURL`: full URL to the target folder
|
||||
* `URL`: full URL to the root folder (with trailing slash) followed by `$regex:1|1$`
|
||||
* `pw`: password (remove `Parameters` if anon-write)
|
||||
### [`send-to-cpp.contextlet.json`](send-to-cpp.contextlet.json)
|
||||
* browser integration, kind of? custom rightclick actions and stuff
|
||||
* rightclick a pic and send it to copyparty straight from your browser
|
||||
* for the [contextlet](https://addons.mozilla.org/en-US/firefox/addon/contextlets/) firefox extension
|
||||
|
||||
### [`media-osd-bgone.ps1`](media-osd-bgone.ps1)
|
||||
* disables the [windows OSD popup](https://user-images.githubusercontent.com/241032/122821375-0e08df80-d2dd-11eb-9fd9-184e8aacf1d0.png) (the thing on the left) which appears every time you hit media hotkeys to adjust volume or change song while playing music with the copyparty web-ui, or most other audio players really
|
||||
|
||||
@@ -26,8 +26,8 @@ a {
|
||||
<script>
|
||||
|
||||
var a = document.getElementById('redir'),
|
||||
proto = window.location.protocol.indexOf('https') === 0 ? 'https' : 'http',
|
||||
loc = window.location.hostname || '127.0.0.1',
|
||||
proto = location.protocol.indexOf('https') === 0 ? 'https' : 'http',
|
||||
loc = location.hostname || '127.0.0.1',
|
||||
port = a.getAttribute('href').split(':').pop().split('/')[0],
|
||||
url = proto + '://' + loc + ':' + port + '/';
|
||||
|
||||
@@ -35,7 +35,7 @@ a.setAttribute('href', url);
|
||||
document.getElementById('desc').innerHTML = 'redirecting to';
|
||||
|
||||
setTimeout(function() {
|
||||
window.location.href = url;
|
||||
location.href = url;
|
||||
}, 500);
|
||||
|
||||
</script>
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
# on fedora/rhel, remember to setsebool -P httpd_can_network_connect 1
|
||||
|
||||
upstream cpp {
|
||||
server 127.0.0.1:3923;
|
||||
server 127.0.0.1:3923 fail_timeout=1s;
|
||||
keepalive 1;
|
||||
}
|
||||
server {
|
||||
@@ -34,6 +34,8 @@ server {
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
# NOTE: with cloudflare you want this instead:
|
||||
#proxy_set_header X-Forwarded-For $http_cf_connecting_ip;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header Connection "Keep-Alive";
|
||||
}
|
||||
|
||||
@@ -138,6 +138,8 @@ in {
|
||||
"d" (delete): permanently delete files and folders
|
||||
"g" (get): download files, but cannot see folder contents
|
||||
"G" (upget): "get", but can see filekeys of their own uploads
|
||||
"h" (html): "get", but folders return their index.html
|
||||
"a" (admin): can see uploader IPs, config-reload
|
||||
|
||||
For example: "rwmd"
|
||||
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
# Maintainer: icxes <dev.null@need.moe>
|
||||
pkgname=copyparty
|
||||
pkgver="1.8.0"
|
||||
pkgver="1.10.2"
|
||||
pkgrel=1
|
||||
pkgdesc="Portable file sharing hub"
|
||||
pkgdesc="File server with accelerated resumable uploads, dedup, WebDAV, FTP, TFTP, zeroconf, media indexer, thumbnails++"
|
||||
arch=("any")
|
||||
url="https://github.com/9001/${pkgname}"
|
||||
license=('MIT')
|
||||
depends=("python" "lsof" "python-jinja")
|
||||
makedepends=("python-wheel" "python-setuptools" "python-build" "python-installer" "make" "pigz")
|
||||
optdepends=("ffmpeg: thumbnails for videos, images (slower) and audio, music tags"
|
||||
"cfssl: generate TLS certificates on startup (pointless when reverse-proxied)"
|
||||
"python-mutagen: music tags (alternative)"
|
||||
"python-pillow: thumbnails for images"
|
||||
"python-pyvips: thumbnails for images (higher quality, faster, uses more ram)"
|
||||
@@ -20,7 +21,7 @@ optdepends=("ffmpeg: thumbnails for videos, images (slower) and audio, music tag
|
||||
)
|
||||
source=("https://github.com/9001/${pkgname}/releases/download/v${pkgver}/${pkgname}-${pkgver}.tar.gz")
|
||||
backup=("etc/${pkgname}.d/init" )
|
||||
sha256sums=("e8ac0442abbf4c95428db0ad35a6965ad73038e0f384795907cde08af9e76612")
|
||||
sha256sums=("001be979a0fdd8ace7d48cab79a137c13b87b78be35fc9633430f45a2831c3ed")
|
||||
|
||||
build() {
|
||||
cd "${srcdir}/${pkgname}-${pkgver}"
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
# this will start `/usr/bin/copyparty-sfx.py`
|
||||
# in a chroot, preventing accidental access elsewhere
|
||||
# and read config from `/etc/copyparty.d/*.conf`
|
||||
# in a chroot, preventing accidental access elsewhere,
|
||||
# and read copyparty config from `/etc/copyparty.d/*.conf`
|
||||
#
|
||||
# expose additional filesystem locations to copyparty
|
||||
# by listing them between the last `1000` and `--`
|
||||
# by listing them between the last `cpp` and `--`
|
||||
#
|
||||
# `1000 1000` = what user to run copyparty as
|
||||
# `cpp cpp` = user/group to run copyparty as; can be IDs (1000 1000)
|
||||
#
|
||||
# unless you add -q to disable logging, you may want to remove the
|
||||
# following line to allow buffering (slightly better performance):
|
||||
@@ -24,7 +24,9 @@ ExecReload=/bin/kill -s USR1 $MAINPID
|
||||
ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
||||
|
||||
# run copyparty
|
||||
ExecStart=/bin/bash /usr/bin/prisonparty /var/lib/copyparty-jail 1000 1000 /etc/copyparty.d -- \
|
||||
ExecStart=/bin/bash /usr/bin/prisonparty /var/lib/copyparty-jail cpp cpp \
|
||||
/etc/copyparty.d \
|
||||
-- \
|
||||
/usr/bin/python3 /usr/bin/copyparty -c /etc/copyparty.d/init
|
||||
|
||||
[Install]
|
||||
|
||||
@@ -3,6 +3,9 @@
|
||||
# use argon2id-hashed passwords in config files (sha2 is always available)
|
||||
withHashedPasswords ? true,
|
||||
|
||||
# generate TLS certificates on startup (pointless when reverse-proxied)
|
||||
withCertgen ? false,
|
||||
|
||||
# create thumbnails with Pillow; faster than FFmpeg / MediaProcessing
|
||||
withThumbnails ? true,
|
||||
|
||||
@@ -34,6 +37,7 @@ let
|
||||
]
|
||||
++ lib.optional withSMB impacket
|
||||
++ lib.optional withFTPS pyopenssl
|
||||
++ lib.optional withCertgen cfssl
|
||||
++ lib.optional withThumbnails pillow
|
||||
++ lib.optional withFastThumbnails pyvips
|
||||
++ lib.optional withMediaProcessing ffmpeg
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"url": "https://github.com/9001/copyparty/releases/download/v1.8.0/copyparty-sfx.py",
|
||||
"version": "1.8.0",
|
||||
"hash": "sha256-7e0XL1r+m7e7scOsvbG9I7jk/Z24OGBuWJHxST8Ko7Y="
|
||||
"url": "https://github.com/9001/copyparty/releases/download/v1.10.2/copyparty-sfx.py",
|
||||
"version": "1.10.2",
|
||||
"hash": "sha256-O9lkN30gy3kwIH+39O4dN7agZPkuH36BDTk8mEsQCVg="
|
||||
}
|
||||
@@ -10,7 +10,7 @@ name="copyparty"
|
||||
rcvar="copyparty_enable"
|
||||
copyparty_user="copyparty"
|
||||
copyparty_args="-e2dsa -v /storage:/storage:r" # change as you see fit
|
||||
copyparty_command="/usr/local/bin/python3.8 /usr/local/copyparty/copyparty-sfx.py ${copyparty_args}"
|
||||
copyparty_command="/usr/local/bin/python3.9 /usr/local/copyparty/copyparty-sfx.py ${copyparty_args}"
|
||||
pidfile="/var/run/copyparty/${name}.pid"
|
||||
command="/usr/sbin/daemon"
|
||||
command_args="-P ${pidfile} -r -f ${copyparty_command}"
|
||||
|
||||
11
contrib/send-to-cpp.contextlet.json
Normal file
11
contrib/send-to-cpp.contextlet.json
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"code": "// https://addons.mozilla.org/en-US/firefox/addon/contextlets/\n// https://github.com/davidmhammond/contextlets\n\nvar url = 'http://partybox.local:3923/';\nvar pw = 'wark';\n\nvar xhr = new XMLHttpRequest();\nxhr.msg = this.info.linkUrl || this.info.srcUrl;\nxhr.open('POST', url, true);\nxhr.setRequestHeader('Content-Type', 'application/x-www-form-urlencoded;charset=UTF-8');\nxhr.setRequestHeader('PW', pw);\nxhr.send('msg=' + xhr.msg);\n",
|
||||
"contexts": [
|
||||
"link"
|
||||
],
|
||||
"icons": null,
|
||||
"patterns": "",
|
||||
"scope": "background",
|
||||
"title": "send to cpp",
|
||||
"type": "normal"
|
||||
}
|
||||
@@ -1,19 +0,0 @@
|
||||
{
|
||||
"Version": "13.5.0",
|
||||
"Name": "copyparty-html",
|
||||
"DestinationType": "ImageUploader",
|
||||
"RequestMethod": "POST",
|
||||
"RequestURL": "http://127.0.0.1:3923/sharex",
|
||||
"Parameters": {
|
||||
"pw": "wark"
|
||||
},
|
||||
"Body": "MultipartFormData",
|
||||
"Arguments": {
|
||||
"act": "bput"
|
||||
},
|
||||
"FileFormName": "f",
|
||||
"RegexList": [
|
||||
"bytes // <a href=\"/([^\"]+)\""
|
||||
],
|
||||
"URL": "http://127.0.0.1:3923/$regex:1|1$"
|
||||
}
|
||||
@@ -1,17 +1,19 @@
|
||||
{
|
||||
"Version": "13.5.0",
|
||||
"Version": "15.0.0",
|
||||
"Name": "copyparty",
|
||||
"DestinationType": "ImageUploader",
|
||||
"RequestMethod": "POST",
|
||||
"RequestURL": "http://127.0.0.1:3923/sharex",
|
||||
"Parameters": {
|
||||
"pw": "wark",
|
||||
"j": null
|
||||
},
|
||||
"Headers": {
|
||||
"pw": "PUT_YOUR_PASSWORD_HERE_MY_DUDE"
|
||||
},
|
||||
"Body": "MultipartFormData",
|
||||
"Arguments": {
|
||||
"act": "bput"
|
||||
},
|
||||
"FileFormName": "f",
|
||||
"URL": "$json:files[0].url$"
|
||||
"URL": "{json:files[0].url}"
|
||||
}
|
||||
|
||||
13
contrib/sharex12.sxcu
Normal file
13
contrib/sharex12.sxcu
Normal file
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"Name": "copyparty",
|
||||
"DestinationType": "ImageUploader, TextUploader, FileUploader",
|
||||
"RequestURL": "http://127.0.0.1:3923/sharex",
|
||||
"FileFormName": "f",
|
||||
"Arguments": {
|
||||
"act": "bput"
|
||||
},
|
||||
"Headers": {
|
||||
"accept": "url",
|
||||
"pw": "PUT_YOUR_PASSWORD_HERE_MY_DUDE"
|
||||
}
|
||||
}
|
||||
42
contrib/systemd/copyparty.conf
Normal file
42
contrib/systemd/copyparty.conf
Normal file
@@ -0,0 +1,42 @@
|
||||
# not actually YAML but lets pretend:
|
||||
# -*- mode: yaml -*-
|
||||
# vim: ft=yaml:
|
||||
|
||||
|
||||
# put this file in /etc/
|
||||
|
||||
|
||||
[global]
|
||||
e2dsa # enable file indexing and filesystem scanning
|
||||
e2ts # and enable multimedia indexing
|
||||
ansi # and colors in log messages
|
||||
|
||||
# disable logging to stdout/journalctl and log to a file instead;
|
||||
# $LOGS_DIRECTORY is usually /var/log/copyparty (comes from systemd)
|
||||
# and copyparty replaces %Y-%m%d with Year-MonthDay, so the
|
||||
# full path will be something like /var/log/copyparty/2023-1130.txt
|
||||
# (note: enable compression by adding .xz at the end)
|
||||
q, lo: $LOGS_DIRECTORY/%Y-%m%d.log
|
||||
|
||||
# p: 80,443,3923 # listen on 80/443 as well (requires CAP_NET_BIND_SERVICE)
|
||||
# i: 127.0.0.1 # only allow connections from localhost (reverse-proxies)
|
||||
# ftp: 3921 # enable ftp server on port 3921
|
||||
# p: 3939 # listen on another port
|
||||
# df: 16 # stop accepting uploads if less than 16 GB free disk space
|
||||
# ver # show copyparty version in the controlpanel
|
||||
# grid # show thumbnails/grid-view by default
|
||||
# theme: 2 # monokai
|
||||
# name: datasaver # change the server-name that's displayed in the browser
|
||||
# stats, nos-dup # enable the prometheus endpoint, but disable the dupes counter (too slow)
|
||||
# no-robots, force-js # make it harder for search engines to read your server
|
||||
|
||||
|
||||
[accounts]
|
||||
ed: wark # username: password
|
||||
|
||||
|
||||
[/] # create a volume at "/" (the webroot), which will
|
||||
/mnt # share the contents of the "/mnt" folder
|
||||
accs:
|
||||
rw: * # everyone gets read-write access, but
|
||||
rwmda: ed # the user "ed" gets read-write-move-delete-admin
|
||||
@@ -1,28 +1,27 @@
|
||||
# this will start `/usr/local/bin/copyparty-sfx.py`
|
||||
# and share '/mnt' with anonymous read+write
|
||||
# this will start `/usr/local/bin/copyparty-sfx.py` and
|
||||
# read copyparty config from `/etc/copyparty.conf`, for example:
|
||||
# https://github.com/9001/copyparty/blob/hovudstraum/contrib/systemd/copyparty.conf
|
||||
#
|
||||
# installation:
|
||||
# wget https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py -O /usr/local/bin/copyparty-sfx.py
|
||||
# cp -pv copyparty.service /etc/systemd/system/
|
||||
# restorecon -vr /etc/systemd/system/copyparty.service # on fedora/rhel
|
||||
# firewall-cmd --permanent --add-port={80,443,3923}/tcp # --zone=libvirt
|
||||
# useradd -r -s /sbin/nologin -d /var/lib/copyparty copyparty
|
||||
# firewall-cmd --permanent --add-port=3923/tcp # --zone=libvirt
|
||||
# firewall-cmd --reload
|
||||
# cp -pv copyparty.service /etc/systemd/system/
|
||||
# cp -pv copyparty.conf /etc/
|
||||
# restorecon -vr /etc/systemd/system/copyparty.service # on fedora/rhel
|
||||
# systemctl daemon-reload && systemctl enable --now copyparty
|
||||
#
|
||||
# if it fails to start, first check this: systemctl status copyparty
|
||||
# then try starting it while viewing logs: journalctl -fan 100
|
||||
# then try starting it while viewing logs:
|
||||
# journalctl -fan 100
|
||||
# tail -Fn 100 /var/log/copyparty/$(date +%Y-%m%d.log)
|
||||
#
|
||||
# you may want to:
|
||||
# change "User=cpp" and "/home/cpp/" to another user
|
||||
# remove the nft lines to only listen on port 3923
|
||||
# - change "User=copyparty" and "/var/lib/copyparty/" to another user
|
||||
# - edit /etc/copyparty.conf to configure copyparty
|
||||
# and in the ExecStart= line:
|
||||
# change '/usr/bin/python3' to another interpreter
|
||||
# change '/mnt::rw' to another location or permission-set
|
||||
# add '-q' to disable logging on busy servers
|
||||
# add '-i 127.0.0.1' to only allow local connections
|
||||
# add '-e2dsa' to enable filesystem scanning + indexing
|
||||
# add '-e2ts' to enable metadata indexing
|
||||
# remove '--ansi' to disable colored logs
|
||||
# - change '/usr/bin/python3' to another interpreter
|
||||
#
|
||||
# with `Type=notify`, copyparty will signal systemd when it is ready to
|
||||
# accept connections; correctly delaying units depending on copyparty.
|
||||
@@ -30,11 +29,9 @@
|
||||
# python disabling line-buffering, so messages are out-of-order:
|
||||
# https://user-images.githubusercontent.com/241032/126040249-cb535cc7-c599-4931-a796-a5d9af691bad.png
|
||||
#
|
||||
# unless you add -q to disable logging, you may want to remove the
|
||||
# following line to allow buffering (slightly better performance):
|
||||
# Environment=PYTHONUNBUFFERED=x
|
||||
#
|
||||
# keep ExecStartPre before ExecStart, at least on rhel8
|
||||
########################################################################
|
||||
########################################################################
|
||||
|
||||
|
||||
[Unit]
|
||||
Description=copyparty file server
|
||||
@@ -44,23 +41,52 @@ Type=notify
|
||||
SyslogIdentifier=copyparty
|
||||
Environment=PYTHONUNBUFFERED=x
|
||||
ExecReload=/bin/kill -s USR1 $MAINPID
|
||||
PermissionsStartOnly=true
|
||||
|
||||
# user to run as + where the TLS certificate is (if any)
|
||||
User=cpp
|
||||
Environment=XDG_CONFIG_HOME=/home/cpp/.config
|
||||
## user to run as + where the TLS certificate is (if any)
|
||||
##
|
||||
User=copyparty
|
||||
Group=copyparty
|
||||
WorkingDirectory=/var/lib/copyparty
|
||||
Environment=XDG_CONFIG_HOME=/var/lib/copyparty/.config
|
||||
|
||||
# OPTIONAL: setup forwarding from ports 80 and 443 to port 3923
|
||||
ExecStartPre=+/bin/bash -c 'nft -n -a list table nat | awk "/ to :3923 /{print\$NF}" | xargs -rL1 nft delete rule nat prerouting handle; true'
|
||||
ExecStartPre=+nft add table ip nat
|
||||
ExecStartPre=+nft -- add chain ip nat prerouting { type nat hook prerouting priority -100 \; }
|
||||
ExecStartPre=+nft add rule ip nat prerouting tcp dport 80 redirect to :3923
|
||||
ExecStartPre=+nft add rule ip nat prerouting tcp dport 443 redirect to :3923
|
||||
## OPTIONAL: allow copyparty to listen on low ports (like 80/443);
|
||||
## you need to uncomment the "p: 80,443,3923" in the config too
|
||||
## ------------------------------------------------------------
|
||||
## a slightly safer alternative is to enable partyalone.service
|
||||
## which does portforwarding with nftables instead, but an even
|
||||
## better option is to use a reverse-proxy (nginx/caddy/...)
|
||||
##
|
||||
AmbientCapabilities=CAP_NET_BIND_SERVICE
|
||||
|
||||
# stop systemd-tmpfiles-clean.timer from deleting copyparty while it's running
|
||||
ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
||||
## some quick hardening; TODO port more from the nixos package
|
||||
##
|
||||
MemoryMax=50%
|
||||
MemorySwapMax=50%
|
||||
ProtectClock=true
|
||||
ProtectControlGroups=true
|
||||
ProtectHostname=true
|
||||
ProtectKernelLogs=true
|
||||
ProtectKernelModules=true
|
||||
ProtectKernelTunables=true
|
||||
ProtectProc=invisible
|
||||
RemoveIPC=true
|
||||
RestrictNamespaces=true
|
||||
RestrictRealtime=true
|
||||
RestrictSUIDSGID=true
|
||||
|
||||
# copyparty settings
|
||||
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py --ansi -e2d -v /mnt::rw
|
||||
## create a directory for logfiles;
|
||||
## this defines $LOGS_DIRECTORY which is used in copyparty.conf
|
||||
##
|
||||
LogsDirectory=copyparty
|
||||
|
||||
## finally, start copyparty and give it the config file:
|
||||
##
|
||||
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -c /etc/copyparty.conf
|
||||
|
||||
# NOTE: if you installed copyparty from an OS package repo (nice)
|
||||
# then you probably want something like this instead:
|
||||
#ExecStart=/usr/bin/copyparty -c /etc/copyparty.conf
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# this will start `/usr/local/bin/copyparty-sfx.py`
|
||||
# in a chroot, preventing accidental access elsewhere
|
||||
# in a chroot, preventing accidental access elsewhere,
|
||||
# and share '/mnt' with anonymous read+write
|
||||
#
|
||||
# installation:
|
||||
@@ -7,9 +7,9 @@
|
||||
# 2) cp -pv prisonparty.service /etc/systemd/system && systemctl enable --now prisonparty
|
||||
#
|
||||
# expose additional filesystem locations to copyparty
|
||||
# by listing them between the last `1000` and `--`
|
||||
# by listing them between the last `cpp` and `--`
|
||||
#
|
||||
# `1000 1000` = what user to run copyparty as
|
||||
# `cpp cpp` = user/group to run copyparty as; can be IDs (1000 1000)
|
||||
#
|
||||
# you may want to:
|
||||
# change '/mnt::rw' to another location or permission-set
|
||||
@@ -32,7 +32,9 @@ ExecReload=/bin/kill -s USR1 $MAINPID
|
||||
ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
||||
|
||||
# run copyparty
|
||||
ExecStart=/bin/bash /usr/local/bin/prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt -- \
|
||||
ExecStart=/bin/bash /usr/local/bin/prisonparty.sh /var/lib/copyparty-jail cpp cpp \
|
||||
/mnt \
|
||||
-- \
|
||||
/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -v /mnt::rw
|
||||
|
||||
[Install]
|
||||
|
||||
2
contrib/windows/copyparty-ctmp.bat
Executable file
2
contrib/windows/copyparty-ctmp.bat
Executable file
@@ -0,0 +1,2 @@
|
||||
rem run copyparty.exe on machines with busted environment variables
|
||||
cmd /v /c "set TMP=\tmp && copyparty.exe"
|
||||
@@ -23,7 +23,7 @@ if not PY2:
|
||||
unicode: Callable[[Any], str] = str
|
||||
else:
|
||||
sys.dont_write_bytecode = True
|
||||
unicode = unicode # noqa: F821 # pylint: disable=undefined-variable,self-assigning-variable
|
||||
unicode = unicode # type: ignore
|
||||
|
||||
WINDOWS: Any = (
|
||||
[int(x) for x in platform.version().split(".")]
|
||||
|
||||
@@ -19,23 +19,39 @@ import threading
|
||||
import time
|
||||
import traceback
|
||||
import uuid
|
||||
from textwrap import dedent
|
||||
|
||||
from .__init__ import ANYWIN, CORES, EXE, PY2, VT100, WINDOWS, E, EnvParams, unicode
|
||||
from .__init__ import (
|
||||
ANYWIN,
|
||||
CORES,
|
||||
EXE,
|
||||
MACOS,
|
||||
PY2,
|
||||
VT100,
|
||||
WINDOWS,
|
||||
E,
|
||||
EnvParams,
|
||||
unicode,
|
||||
)
|
||||
from .__version__ import CODENAME, S_BUILD_DT, S_VERSION
|
||||
from .authsrv import expand_config_file, re_vol, split_cfg_ln, upgrade_cfg_fmt
|
||||
from .authsrv import expand_config_file, split_cfg_ln, upgrade_cfg_fmt
|
||||
from .cfg import flagcats, onedash
|
||||
from .svchub import SvcHub
|
||||
from .util import (
|
||||
APPLESAN_TXT,
|
||||
DEF_EXP,
|
||||
DEF_MTE,
|
||||
DEF_MTH,
|
||||
IMPLICATIONS,
|
||||
JINJA_VER,
|
||||
PARTFTPY_VER,
|
||||
PY_DESC,
|
||||
PYFTPD_VER,
|
||||
SQLITE_VER,
|
||||
UNPLICATIONS,
|
||||
align_tab,
|
||||
ansi_re,
|
||||
dedent,
|
||||
min_ex,
|
||||
py_desc,
|
||||
pybin,
|
||||
termsize,
|
||||
wrap,
|
||||
@@ -140,9 +156,11 @@ def warn(msg: str) -> None:
|
||||
lprint("\033[1mwarning:\033[0;33m {}\033[0m\n".format(msg))
|
||||
|
||||
|
||||
def init_E(E: EnvParams) -> None:
|
||||
def init_E(EE: EnvParams) -> None:
|
||||
# __init__ runs 18 times when oxidized; do expensive stuff here
|
||||
|
||||
E = EE # pylint: disable=redefined-outer-name
|
||||
|
||||
def get_unixdir() -> str:
|
||||
paths: list[tuple[Callable[..., Any], str]] = [
|
||||
(os.environ.get, "XDG_CONFIG_HOME"),
|
||||
@@ -184,7 +202,10 @@ def init_E(E: EnvParams) -> None:
|
||||
|
||||
with open_binary("copyparty", "z.tar") as tgz:
|
||||
with tarfile.open(fileobj=tgz) as tf:
|
||||
tf.extractall(tdn) # nosec (archive is safe)
|
||||
try:
|
||||
tf.extractall(tdn, filter="tar")
|
||||
except TypeError:
|
||||
tf.extractall(tdn) # nosec (archive is safe)
|
||||
|
||||
return tdn
|
||||
|
||||
@@ -240,18 +261,13 @@ def get_srvname() -> str:
|
||||
return ret
|
||||
|
||||
|
||||
def get_fk_salt(cert_path) -> str:
|
||||
def get_fk_salt() -> str:
|
||||
fp = os.path.join(E.cfg, "fk-salt.txt")
|
||||
try:
|
||||
with open(fp, "rb") as f:
|
||||
ret = f.read().strip()
|
||||
except:
|
||||
if os.path.exists(cert_path):
|
||||
zi = os.path.getmtime(cert_path)
|
||||
ret = "{}".format(zi).encode("utf-8")
|
||||
else:
|
||||
ret = base64.b64encode(os.urandom(18))
|
||||
|
||||
ret = base64.b64encode(os.urandom(18))
|
||||
with open(fp, "wb") as f:
|
||||
f.write(ret + b"\n")
|
||||
|
||||
@@ -319,6 +335,7 @@ def configure_ssl_ver(al: argparse.Namespace) -> None:
|
||||
# oh man i love openssl
|
||||
# check this out
|
||||
# hold my beer
|
||||
assert ssl
|
||||
ptn = re.compile(r"^OP_NO_(TLS|SSL)v")
|
||||
sslver = terse_sslver(al.ssl_ver).split(",")
|
||||
flags = [k for k in ssl.__dict__ if ptn.match(k)]
|
||||
@@ -352,6 +369,7 @@ def configure_ssl_ver(al: argparse.Namespace) -> None:
|
||||
|
||||
|
||||
def configure_ssl_ciphers(al: argparse.Namespace) -> None:
|
||||
assert ssl
|
||||
ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
|
||||
if al.ssl_ver:
|
||||
ctx.options &= ~al.ssl_flags_en
|
||||
@@ -377,7 +395,7 @@ def configure_ssl_ciphers(al: argparse.Namespace) -> None:
|
||||
|
||||
def args_from_cfg(cfg_path: str) -> list[str]:
|
||||
lines: list[str] = []
|
||||
expand_config_file(lines, cfg_path, "")
|
||||
expand_config_file(None, lines, cfg_path, "")
|
||||
lines = upgrade_cfg_fmt(None, argparse.Namespace(vc=False), lines, "")
|
||||
|
||||
ret: list[str] = []
|
||||
@@ -431,9 +449,9 @@ def disable_quickedit() -> None:
|
||||
if PY2:
|
||||
wintypes.LPDWORD = ctypes.POINTER(wintypes.DWORD)
|
||||
|
||||
k32.GetStdHandle.errcheck = ecb
|
||||
k32.GetConsoleMode.errcheck = ecb
|
||||
k32.SetConsoleMode.errcheck = ecb
|
||||
k32.GetStdHandle.errcheck = ecb # type: ignore
|
||||
k32.GetConsoleMode.errcheck = ecb # type: ignore
|
||||
k32.SetConsoleMode.errcheck = ecb # type: ignore
|
||||
k32.GetConsoleMode.argtypes = (wintypes.HANDLE, wintypes.LPDWORD)
|
||||
k32.SetConsoleMode.argtypes = (wintypes.HANDLE, wintypes.DWORD)
|
||||
|
||||
@@ -485,6 +503,10 @@ def get_sects():
|
||||
* "\033[33mperm\033[0m" is "permissions,username1,username2,..."
|
||||
* "\033[32mvolflag\033[0m" is config flags to set on this volume
|
||||
|
||||
--grp takes groupname:username1,username2,...
|
||||
and groupnames can be used instead of usernames in -v
|
||||
by prefixing the groupname with %
|
||||
|
||||
list of permissions:
|
||||
"r" (read): list folder contents, download files
|
||||
"w" (write): upload files; need "r" to see the uploads
|
||||
@@ -492,6 +514,10 @@ def get_sects():
|
||||
"d" (delete): permanently delete files and folders
|
||||
"g" (get): download files, but cannot see folder contents
|
||||
"G" (upget): "get", but can see filekeys of their own uploads
|
||||
"h" (html): "get", but folders return their index.html
|
||||
"." (dots): user can ask to show dotfiles in listings
|
||||
"a" (admin): can see uploader IPs, config-reload
|
||||
"A" ("all"): same as "rwmda." (read/write/move/delete/admin/dotfiles)
|
||||
|
||||
too many volflags to list here, see --help-flags
|
||||
|
||||
@@ -586,6 +612,7 @@ def get_sects():
|
||||
\033[36mxbd\033[35m executes CMD before a file delete
|
||||
\033[36mxad\033[35m executes CMD after a file delete
|
||||
\033[36mxm\033[35m executes CMD on message
|
||||
\033[36mxban\033[35m executes CMD if someone gets banned
|
||||
\033[0m
|
||||
can be defined as --args or volflags; for example \033[36m
|
||||
--xau notify-send
|
||||
@@ -621,6 +648,9 @@ def get_sects():
|
||||
executed program on STDIN instead of as argv arguments, and
|
||||
it also includes the wark (file-id/hash) as a json property
|
||||
|
||||
\033[36mxban\033[0m can be used to overrule / cancel a user ban event;
|
||||
if the program returns 0 (true/OK) then the ban will NOT happen
|
||||
|
||||
except for \033[36mxm\033[0m, only one hook / one action can run at a time,
|
||||
so it's recommended to use the \033[36mf\033[0m flag unless you really need
|
||||
to wait for the hook to finish before continuing (without \033[36mf\033[0m
|
||||
@@ -640,6 +670,47 @@ def get_sects():
|
||||
"""
|
||||
),
|
||||
],
|
||||
[
|
||||
"exp",
|
||||
"text expansion",
|
||||
dedent(
|
||||
"""
|
||||
specify --exp or the "exp" volflag to enable placeholder expansions
|
||||
in README.md / .prologue.html / .epilogue.html
|
||||
|
||||
--exp-md (volflag exp_md) holds the list of placeholders which can be
|
||||
expanded in READMEs, and --exp-lg (volflag exp_lg) likewise for logues;
|
||||
any placeholder not given in those lists will be ignored and shown as-is
|
||||
|
||||
the default list will expand the following placeholders:
|
||||
\033[36m{{self.ip}} \033[35mclient ip
|
||||
\033[36m{{self.ua}} \033[35mclient user-agent
|
||||
\033[36m{{self.uname}} \033[35mclient username
|
||||
\033[36m{{self.host}} \033[35mthe "Host" header, or the server's external IP otherwise
|
||||
\033[36m{{cfg.name}} \033[35mthe --name global-config
|
||||
\033[36m{{cfg.logout}} \033[35mthe --logout global-config
|
||||
\033[36m{{vf.scan}} \033[35mthe "scan" volflag
|
||||
\033[36m{{vf.thsize}} \033[35mthumbnail size
|
||||
\033[36m{{srv.itime}} \033[35mserver time in seconds
|
||||
\033[36m{{srv.htime}} \033[35mserver time as YY-mm-dd, HH:MM:SS (UTC)
|
||||
\033[36m{{hdr.cf_ipcountry}} \033[35mthe "CF-IPCountry" client header (probably blank)
|
||||
\033[0m
|
||||
so the following types of placeholders can be added to the lists:
|
||||
* any client header can be accessed through {{hdr.*}}
|
||||
* any variable in httpcli.py can be accessed through {{self.*}}
|
||||
* any global server setting can be accessed through {{cfg.*}}
|
||||
* any volflag can be accessed through {{vf.*}}
|
||||
|
||||
remove vf.scan from default list using --exp-md /vf.scan
|
||||
add "accept" header to def. list using --exp-md +hdr.accept
|
||||
|
||||
for performance reasons, expansion only happens while embedding
|
||||
documents into directory listings, and when accessing a ?doc=...
|
||||
link, but never otherwise, so if you click a -txt- link you'll
|
||||
have to refresh the page to apply expansion
|
||||
"""
|
||||
),
|
||||
],
|
||||
[
|
||||
"ls",
|
||||
"volume inspection",
|
||||
@@ -653,6 +724,7 @@ def get_sects():
|
||||
\033[36mln\033[0m only prints symlinks leaving the volume mountpoint
|
||||
\033[36mp\033[0m exits 1 if any such symlinks are found
|
||||
\033[36mr\033[0m resumes startup after the listing
|
||||
|
||||
examples:
|
||||
--ls '**' # list all files which are possible to read
|
||||
--ls '**,*,ln' # check for dangerous symlinks
|
||||
@@ -686,9 +758,12 @@ def get_sects():
|
||||
"""
|
||||
when \033[36m--ah-alg\033[0m is not the default [\033[32mnone\033[0m], all account passwords must be hashed
|
||||
|
||||
passwords can be hashed on the commandline with \033[36m--ah-gen\033[0m, but copyparty will also hash and print any passwords that are non-hashed (password which do not start with '+') and then terminate afterwards
|
||||
passwords can be hashed on the commandline with \033[36m--ah-gen\033[0m, but
|
||||
copyparty will also hash and print any passwords that are non-hashed
|
||||
(password which do not start with '+') and then terminate afterwards
|
||||
|
||||
\033[36m--ah-alg\033[0m specifies the hashing algorithm and a list of optional comma-separated arguments:
|
||||
\033[36m--ah-alg\033[0m specifies the hashing algorithm and a
|
||||
list of optional comma-separated arguments:
|
||||
|
||||
\033[36m--ah-alg argon2\033[0m # which is the same as:
|
||||
\033[36m--ah-alg argon2,3,256,4,19\033[0m
|
||||
@@ -711,6 +786,40 @@ def get_sects():
|
||||
"""
|
||||
),
|
||||
],
|
||||
[
|
||||
"zm",
|
||||
"mDNS debugging",
|
||||
dedent(
|
||||
"""
|
||||
the mDNS protocol is multicast-based, which means there are thousands
|
||||
of fun and intersesting ways for it to break unexpectedly
|
||||
|
||||
things to check if it does not work at all:
|
||||
|
||||
* is there a firewall blocking port 5353 on either the server or client?
|
||||
(for example, clients may be able to send queries to copyparty,
|
||||
but the replies could get lost)
|
||||
|
||||
* is multicast accidentally disabled on either the server or client?
|
||||
(look for mDNS log messages saying "new client on [...]")
|
||||
|
||||
* the router/switch must be multicast and igmp capable
|
||||
|
||||
things to check if it works for a while but then it doesn't:
|
||||
|
||||
* is there a firewall blocking port 5353 on either the server or client?
|
||||
(copyparty may be unable to see the queries from the clients, but the
|
||||
clients may still be able to see the initial unsolicited announce,
|
||||
so it works for about 2 minutes after startup until TTL expires)
|
||||
|
||||
* does the client have multiple IPs on its interface, and some of the
|
||||
IPs are in subnets which the copyparty server is not a member of?
|
||||
|
||||
for both of the above intermittent issues, try --zm-spam 30
|
||||
(not spec-compliant but nothing will mind)
|
||||
"""
|
||||
),
|
||||
],
|
||||
]
|
||||
|
||||
|
||||
@@ -734,12 +843,11 @@ def add_general(ap, nc, srvname):
|
||||
ap2.add_argument("-nc", metavar="NUM", type=int, default=nc, help="max num clients")
|
||||
ap2.add_argument("-j", metavar="CORES", type=int, default=1, help="max num cpu cores, 0=all")
|
||||
ap2.add_argument("-a", metavar="ACCT", type=u, action="append", help="add account, \033[33mUSER\033[0m:\033[33mPASS\033[0m; example [\033[32med:wark\033[0m]")
|
||||
ap2.add_argument("-v", metavar="VOL", type=u, action="append", help="add volume, \033[33mSRC\033[0m:\033[33mDST\033[0m:\033[33mFLAG\033[0m; examples [\033[32m.::r\033[0m], [\033[32m/mnt/nas/music:/music:r:aed\033[0m]")
|
||||
ap2.add_argument("-ed", action="store_true", help="enable the ?dots url parameter / client option which allows clients to see dotfiles / hidden files")
|
||||
ap2.add_argument("-emp", action="store_true", help="enable markdown plugins -- neat but dangerous, big XSS risk")
|
||||
ap2.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate")
|
||||
ap2.add_argument("--urlform", metavar="MODE", type=u, default="print,get", help="how to handle url-form POSTs; see --help-urlform")
|
||||
ap2.add_argument("--wintitle", metavar="TXT", type=u, default="cpp @ $pub", help="window title, for example [\033[32m$ip-10.1.2.\033[0m] or [\033[32m$ip-]")
|
||||
ap2.add_argument("-v", metavar="VOL", type=u, action="append", help="add volume, \033[33mSRC\033[0m:\033[33mDST\033[0m:\033[33mFLAG\033[0m; examples [\033[32m.::r\033[0m], [\033[32m/mnt/nas/music:/music:r:aed\033[0m], see --help-accounts")
|
||||
ap2.add_argument("--grp", metavar="G:N,N", type=u, action="append", help="add group, \033[33mNAME\033[0m:\033[33mUSER1\033[0m,\033[33mUSER2\033[0m,\033[33m...\033[0m; example [\033[32madmins:ed,foo,bar\033[0m]")
|
||||
ap2.add_argument("-ed", action="store_true", help="enable the ?dots url parameter / client option which allows clients to see dotfiles / hidden files (volflag=dots)")
|
||||
ap2.add_argument("--urlform", metavar="MODE", type=u, default="print,get", help="how to handle url-form POSTs; see \033[33m--help-urlform\033[0m")
|
||||
ap2.add_argument("--wintitle", metavar="TXT", type=u, default="cpp @ $pub", help="server terminal title, for example [\033[32m$ip-10.1.2.\033[0m] or [\033[32m$ip-]")
|
||||
ap2.add_argument("--name", metavar="TXT", type=u, default=srvname, help="server name (displayed topleft in browser and in mDNS)")
|
||||
ap2.add_argument("--license", action="store_true", help="show licenses and exit")
|
||||
ap2.add_argument("--version", action="store_true", help="show versions and exit")
|
||||
@@ -750,33 +858,44 @@ def add_qr(ap, tty):
|
||||
ap2.add_argument("--qr", action="store_true", help="show http:// QR-code on startup")
|
||||
ap2.add_argument("--qrs", action="store_true", help="show https:// QR-code on startup")
|
||||
ap2.add_argument("--qrl", metavar="PATH", type=u, default="", help="location to include in the url, for example [\033[32mpriv/?pw=hunter2\033[0m]")
|
||||
ap2.add_argument("--qri", metavar="PREFIX", type=u, default="", help="select IP which starts with PREFIX; [\033[32m.\033[0m] to force default IP when mDNS URL would have been used instead")
|
||||
ap2.add_argument("--qri", metavar="PREFIX", type=u, default="", help="select IP which starts with \033[33mPREFIX\033[0m; [\033[32m.\033[0m] to force default IP when mDNS URL would have been used instead")
|
||||
ap2.add_argument("--qr-fg", metavar="COLOR", type=int, default=0 if tty else 16, help="foreground; try [\033[32m0\033[0m] if the qr-code is unreadable")
|
||||
ap2.add_argument("--qr-bg", metavar="COLOR", type=int, default=229, help="background (white=255)")
|
||||
ap2.add_argument("--qrp", metavar="CELLS", type=int, default=4, help="padding (spec says 4 or more, but 1 is usually fine)")
|
||||
ap2.add_argument("--qrz", metavar="N", type=int, default=0, help="[\033[32m1\033[0m]=1x, [\033[32m2\033[0m]=2x, [\033[32m0\033[0m]=auto (try [\033[32m2\033[0m] on broken fonts)")
|
||||
|
||||
|
||||
def add_fs(ap):
|
||||
ap2 = ap.add_argument_group("filesystem options")
|
||||
rm_re_def = "5/0.1" if ANYWIN else "0/0"
|
||||
ap2.add_argument("--rm-retry", metavar="T/R", type=u, default=rm_re_def, help="if a file cannot be deleted because it is busy, continue trying for \033[33mT\033[0m seconds, retry every \033[33mR\033[0m seconds; disable with 0/0 (volflag=rm_retry)")
|
||||
|
||||
|
||||
def add_upload(ap):
|
||||
ap2 = ap.add_argument_group('upload options')
|
||||
ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads, hiding them from clients unless -ed")
|
||||
ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads, hiding them from clients unless \033[33m-ed\033[0m")
|
||||
ap2.add_argument("--plain-ip", action="store_true", help="when avoiding filename collisions by appending the uploader's ip to the filename: append the plaintext ip instead of salting and hashing the ip")
|
||||
ap2.add_argument("--unpost", metavar="SEC", type=int, default=3600*12, help="grace period where uploads can be deleted by the uploader, even without delete permissions; 0=disabled")
|
||||
ap2.add_argument("--blank-wt", metavar="SEC", type=int, default=300, help="file write grace period (any client can write to a blank file last-modified more recently than SEC seconds ago)")
|
||||
ap2.add_argument("--reg-cap", metavar="N", type=int, default=38400, help="max number of uploads to keep in memory when running without -e2d; roughly 1 MiB RAM per 600")
|
||||
ap2.add_argument("--no-fpool", action="store_true", help="disable file-handle pooling -- instead, repeatedly close and reopen files during upload (very slow on windows)")
|
||||
ap2.add_argument("--unpost", metavar="SEC", type=int, default=3600*12, help="grace period where uploads can be deleted by the uploader, even without delete permissions; 0=disabled, default=12h")
|
||||
ap2.add_argument("--u2abort", metavar="NUM", type=int, default=1, help="clients can abort incomplete uploads by using the unpost tab (requires \033[33m-e2d\033[0m). [\033[32m0\033[0m] = never allowed (disable feature), [\033[32m1\033[0m] = allow if client has the same IP as the upload AND is using the same account, [\033[32m2\033[0m] = just check the IP, [\033[32m3\033[0m] = just check account-name (volflag=u2abort)")
|
||||
ap2.add_argument("--blank-wt", metavar="SEC", type=int, default=300, help="file write grace period (any client can write to a blank file last-modified more recently than \033[33mSEC\033[0m seconds ago)")
|
||||
ap2.add_argument("--reg-cap", metavar="N", type=int, default=38400, help="max number of uploads to keep in memory when running without \033[33m-e2d\033[0m; roughly 1 MiB RAM per 600")
|
||||
ap2.add_argument("--no-fpool", action="store_true", help="disable file-handle pooling -- instead, repeatedly close and reopen files during upload (bad idea to enable this on windows and/or cow filesystems)")
|
||||
ap2.add_argument("--use-fpool", action="store_true", help="force file-handle pooling, even when it might be dangerous (multiprocessing, filesystems lacking sparse-files support, ...)")
|
||||
ap2.add_argument("--hardlink", action="store_true", help="prefer hardlinks instead of symlinks when possible (within same filesystem) (volflag=hardlink)")
|
||||
ap2.add_argument("--never-symlink", action="store_true", help="do not fallback to symlinks when a hardlink cannot be made (volflag=neversymlink)")
|
||||
ap2.add_argument("--no-dedup", action="store_true", help="disable symlink/hardlink creation; copy file contents instead (volflag=copydupes")
|
||||
ap2.add_argument("--no-dedup", action="store_true", help="disable symlink/hardlink creation; copy file contents instead (volflag=copydupes)")
|
||||
ap2.add_argument("--no-dupe", action="store_true", help="reject duplicate files during upload; only matches within the same volume (volflag=nodupe)")
|
||||
ap2.add_argument("--no-snap", action="store_true", help="disable snapshots -- forget unfinished uploads on shutdown; don't create .hist/up2k.snap files -- abandoned/interrupted uploads must be cleaned up manually")
|
||||
ap2.add_argument("--rand", action="store_true", help="force randomized filenames, --nrand chars long (volflag=rand)")
|
||||
ap2.add_argument("--snap-wri", metavar="SEC", type=int, default=300, help="write upload state to ./hist/up2k.snap every \033[33mSEC\033[0m seconds; allows resuming incomplete uploads after a server crash")
|
||||
ap2.add_argument("--snap-drop", metavar="MIN", type=float, default=1440, help="forget unfinished uploads after \033[33mMIN\033[0m minutes; impossible to resume them after that (360=6h, 1440=24h)")
|
||||
ap2.add_argument("--u2ts", metavar="TXT", type=u, default="c", help="how to timestamp uploaded files; [\033[32mc\033[0m]=client-last-modified, [\033[32mu\033[0m]=upload-time, [\033[32mfc\033[0m]=force-c, [\033[32mfu\033[0m]=force-u (volflag=u2ts)")
|
||||
ap2.add_argument("--rand", action="store_true", help="force randomized filenames, \033[33m--nrand\033[0m chars long (volflag=rand)")
|
||||
ap2.add_argument("--nrand", metavar="NUM", type=int, default=9, help="randomized filenames length (volflag=nrand)")
|
||||
ap2.add_argument("--magic", action="store_true", help="enable filetype detection on nameless uploads (volflag=magic)")
|
||||
ap2.add_argument("--df", metavar="GiB", type=float, default=0, help="ensure GiB free disk space by rejecting upload requests")
|
||||
ap2.add_argument("--df", metavar="GiB", type=float, default=0, help="ensure \033[33mGiB\033[0m free disk space by rejecting upload requests")
|
||||
ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="windows-only: minimum size of incoming uploads through up2k before they are made into sparse files")
|
||||
ap2.add_argument("--turbo", metavar="LVL", type=int, default=0, help="configure turbo-mode in up2k client; [\033[32m0\033[0m] = off and warn if enabled, [\033[32m1\033[0m] = off, [\033[32m2\033[0m] = on, [\033[32m3\033[0m] = on and disable datecheck")
|
||||
ap2.add_argument("--turbo", metavar="LVL", type=int, default=0, help="configure turbo-mode in up2k client; [\033[32m-1\033[0m] = forbidden/always-off, [\033[32m0\033[0m] = default-off and warn if enabled, [\033[32m1\033[0m] = default-off, [\033[32m2\033[0m] = on, [\033[32m3\033[0m] = on and disable datecheck")
|
||||
ap2.add_argument("--u2j", metavar="JOBS", type=int, default=2, help="web-client: number of file chunks to upload in parallel; 1 or 2 is good for low-latency (same-country) connections, 4-8 for android clients, 16 for cross-atlantic (max=64)")
|
||||
ap2.add_argument("--u2sort", metavar="TXT", type=u, default="s", help="upload order; [\033[32ms\033[0m]=smallest-first, [\033[32mn\033[0m]=alphabetical, [\033[32mfs\033[0m]=force-s, [\033[32mfn\033[0m]=force-n -- alphabetical is a bit slower on fiber/LAN but makes it easier to eyeball if everything went fine")
|
||||
ap2.add_argument("--write-uplog", action="store_true", help="write POST reports to textfiles in working-directory")
|
||||
|
||||
@@ -785,9 +904,12 @@ def add_network(ap):
|
||||
ap2 = ap.add_argument_group('network options')
|
||||
ap2.add_argument("-i", metavar="IP", type=u, default="::", help="ip to bind (comma-sep.), default: all IPv4 and IPv6")
|
||||
ap2.add_argument("-p", metavar="PORT", type=u, default="3923", help="ports to bind (comma/range)")
|
||||
ap2.add_argument("--ll", action="store_true", help="include link-local IPv4/IPv6 even if the NIC has routable IPs (breaks some mdns clients)")
|
||||
ap2.add_argument("--rproxy", metavar="DEPTH", type=int, default=1, help="which ip to keep; [\033[32m0\033[0m]=tcp, [\033[32m1\033[0m]=origin (first x-fwd), [\033[32m2\033[0m]=cloudflare, [\033[32m3\033[0m]=nginx, [\033[32m-1\033[0m]=closest proxy")
|
||||
ap2.add_argument("--rp-loc", metavar="PATH", type=u, default="", help="if reverse-proxying on a location instead of a dedicated domain/subdomain, provide the base location here (eg. /foo/bar)")
|
||||
ap2.add_argument("--ll", action="store_true", help="include link-local IPv4/IPv6 in mDNS replies, even if the NIC has routable IPs (breaks some mDNS clients)")
|
||||
ap2.add_argument("--rproxy", metavar="DEPTH", type=int, default=1, help="which ip to associate clients with; [\033[32m0\033[0m]=tcp, [\033[32m1\033[0m]=origin (first x-fwd, unsafe), [\033[32m2\033[0m]=outermost-proxy, [\033[32m3\033[0m]=second-proxy, [\033[32m-1\033[0m]=closest-proxy")
|
||||
ap2.add_argument("--xff-hdr", metavar="NAME", type=u, default="x-forwarded-for", help="if reverse-proxied, which http header to read the client's real ip from")
|
||||
ap2.add_argument("--xff-src", metavar="CIDR", type=u, default="127.0.0.0/8, ::1/128", help="comma-separated list of trusted reverse-proxy CIDRs; only accept the real-ip header (\033[33m--xff-hdr\033[0m) and IdP headers if the incoming connection is from an IP within either of these subnets. Specify [\033[32mlan\033[0m] to allow all LAN / private / non-internet IPs. Can be disabled with [\033[32many\033[0m] if you are behind cloudflare (or similar) and are using \033[32m--xff-hdr=cf-connecting-ip\033[0m (or similar)")
|
||||
ap2.add_argument("--ipa", metavar="CIDR", type=u, default="", help="only accept connections from IP-addresses inside \033[33mCIDR\033[0m; examples: [\033[32mlan\033[0m] or [\033[32m10.89.0.0/16, 192.168.33.0/24\033[0m]")
|
||||
ap2.add_argument("--rp-loc", metavar="PATH", type=u, default="", help="if reverse-proxying on a location instead of a dedicated domain/subdomain, provide the base location here; example: [\033[32m/foo/bar\033[0m]")
|
||||
if ANYWIN:
|
||||
ap2.add_argument("--reuseaddr", action="store_true", help="set reuseaddr on listening sockets on windows; allows rapid restart of copyparty at the expense of being able to accidentally start multiple instances")
|
||||
else:
|
||||
@@ -797,7 +919,7 @@ def add_network(ap):
|
||||
ap2.add_argument("--s-wr-sz", metavar="B", type=int, default=256*1024, help="socket write size in bytes")
|
||||
ap2.add_argument("--s-wr-slp", metavar="SEC", type=float, default=0, help="debug: socket write delay in seconds")
|
||||
ap2.add_argument("--rsp-slp", metavar="SEC", type=float, default=0, help="debug: response delay in seconds")
|
||||
ap2.add_argument("--rsp-jtr", metavar="SEC", type=float, default=0, help="debug: response delay, random duration 0..SEC")
|
||||
ap2.add_argument("--rsp-jtr", metavar="SEC", type=float, default=0, help="debug: response delay, random duration 0..\033[33mSEC\033[0m")
|
||||
|
||||
|
||||
def add_tls(ap, cert_path):
|
||||
@@ -816,7 +938,7 @@ def add_cert(ap, cert_path):
|
||||
ap2 = ap.add_argument_group('TLS certificate generator options')
|
||||
ap2.add_argument("--no-crt", action="store_true", help="disable automatic certificate creation")
|
||||
ap2.add_argument("--crt-ns", metavar="N,N", type=u, default="", help="comma-separated list of FQDNs (domains) to add into the certificate")
|
||||
ap2.add_argument("--crt-exact", action="store_true", help="do not add wildcard entries for each --crt-ns")
|
||||
ap2.add_argument("--crt-exact", action="store_true", help="do not add wildcard entries for each \033[33m--crt-ns\033[0m")
|
||||
ap2.add_argument("--crt-noip", action="store_true", help="do not add autodetected IP addresses into cert")
|
||||
ap2.add_argument("--crt-nolo", action="store_true", help="do not add 127.0.0.1 / localhost into cert")
|
||||
ap2.add_argument("--crt-nohn", action="store_true", help="do not add mDNS names / hostname into cert")
|
||||
@@ -827,7 +949,15 @@ def add_cert(ap, cert_path):
|
||||
ap2.add_argument("--crt-cnc", metavar="TXT", type=u, default="--crt-cn", help="override CA name")
|
||||
ap2.add_argument("--crt-cns", metavar="TXT", type=u, default="--crt-cn cpp", help="override server-cert name")
|
||||
ap2.add_argument("--crt-back", metavar="HRS", type=float, default=72, help="backdate in hours")
|
||||
ap2.add_argument("--crt-alg", metavar="S-N", type=u, default="ecdsa-256", help="algorithm and keysize; one of these: ecdsa-256 rsa-4096 rsa-2048")
|
||||
ap2.add_argument("--crt-alg", metavar="S-N", type=u, default="ecdsa-256", help="algorithm and keysize; one of these: \033[32mecdsa-256 rsa-4096 rsa-2048\033[0m")
|
||||
|
||||
|
||||
def add_auth(ap):
|
||||
ap2 = ap.add_argument_group('IdP / identity provider / user authentication options')
|
||||
ap2.add_argument("--idp-h-usr", metavar="HN", type=u, default="", help="bypass the copyparty authentication checks and assume the request-header \033[33mHN\033[0m contains the username of the requesting user (for use with authentik/oauth/...)\n\033[1;31mWARNING:\033[0m if you enable this, make sure clients are unable to specify this header themselves; must be washed away and replaced by a reverse-proxy")
|
||||
ap2.add_argument("--idp-h-grp", metavar="HN", type=u, default="", help="assume the request-header \033[33mHN\033[0m contains the groupname of the requesting user; can be referenced in config files for group-based access control")
|
||||
ap2.add_argument("--idp-h-key", metavar="HN", type=u, default="", help="optional but recommended safeguard; your reverse-proxy will insert a secret header named \033[33mHN\033[0m into all requests, and the other IdP headers will be ignored if this header is not present")
|
||||
ap2.add_argument("--idp-gsep", metavar="RE", type=u, default="|:;+,", help="if there are multiple groups in \033[33m--idp-h-grp\033[0m, they are separated by one of the characters in \033[33mRE\033[0m")
|
||||
|
||||
|
||||
def add_zeroconf(ap):
|
||||
@@ -835,47 +965,49 @@ def add_zeroconf(ap):
|
||||
ap2.add_argument("-z", action="store_true", help="enable all zeroconf backends (mdns, ssdp)")
|
||||
ap2.add_argument("--z-on", metavar="NETS", type=u, default="", help="enable zeroconf ONLY on the comma-separated list of subnets and/or interface names/indexes\n └─example: \033[32meth0, wlo1, virhost0, 192.168.123.0/24, fd00:fda::/96\033[0m")
|
||||
ap2.add_argument("--z-off", metavar="NETS", type=u, default="", help="disable zeroconf on the comma-separated list of subnets and/or interface names/indexes")
|
||||
ap2.add_argument("--z-chk", metavar="SEC", type=int, default=10, help="check for network changes every SEC seconds (0=disable)")
|
||||
ap2.add_argument("--z-chk", metavar="SEC", type=int, default=10, help="check for network changes every \033[33mSEC\033[0m seconds (0=disable)")
|
||||
ap2.add_argument("-zv", action="store_true", help="verbose all zeroconf backends")
|
||||
ap2.add_argument("--mc-hop", metavar="SEC", type=int, default=0, help="rejoin multicast groups every SEC seconds (workaround for some switches/routers which cause mDNS to suddenly stop working after some time); try [\033[32m300\033[0m] or [\033[32m180\033[0m]")
|
||||
ap2.add_argument("--mc-hop", metavar="SEC", type=int, default=0, help="rejoin multicast groups every \033[33mSEC\033[0m seconds (workaround for some switches/routers which cause mDNS to suddenly stop working after some time); try [\033[32m300\033[0m] or [\033[32m180\033[0m]\n └─note: can be due to firewalls; make sure UDP port 5353 is open in both directions (on clients too)")
|
||||
|
||||
|
||||
def add_zc_mdns(ap):
|
||||
ap2 = ap.add_argument_group("Zeroconf-mDNS options")
|
||||
ap2 = ap.add_argument_group("Zeroconf-mDNS options; also see --help-zm")
|
||||
ap2.add_argument("--zm", action="store_true", help="announce the enabled protocols over mDNS (multicast DNS-SD) -- compatible with KDE, gnome, macOS, ...")
|
||||
ap2.add_argument("--zm-on", metavar="NETS", type=u, default="", help="enable zeroconf ONLY on the comma-separated list of subnets and/or interface names/indexes")
|
||||
ap2.add_argument("--zm-off", metavar="NETS", type=u, default="", help="disable zeroconf on the comma-separated list of subnets and/or interface names/indexes")
|
||||
ap2.add_argument("--zm-on", metavar="NETS", type=u, default="", help="enable mDNS ONLY on the comma-separated list of subnets and/or interface names/indexes")
|
||||
ap2.add_argument("--zm-off", metavar="NETS", type=u, default="", help="disable mDNS on the comma-separated list of subnets and/or interface names/indexes")
|
||||
ap2.add_argument("--zm4", action="store_true", help="IPv4 only -- try this if some clients can't connect")
|
||||
ap2.add_argument("--zm6", action="store_true", help="IPv6 only")
|
||||
ap2.add_argument("--zmv", action="store_true", help="verbose mdns")
|
||||
ap2.add_argument("--zmvv", action="store_true", help="verboser mdns")
|
||||
ap2.add_argument("--zms", metavar="dhf", type=u, default="", help="list of services to announce -- d=webdav h=http f=ftp s=smb -- lowercase=plaintext uppercase=TLS -- default: all enabled services except http/https (\033[32mDdfs\033[0m if \033[33m--ftp\033[0m and \033[33m--smb\033[0m is set)")
|
||||
ap2.add_argument("--zms", metavar="dhf", type=u, default="", help="list of services to announce -- d=webdav h=http f=ftp s=smb -- lowercase=plaintext uppercase=TLS -- default: all enabled services except http/https (\033[32mDdfs\033[0m if \033[33m--ftp\033[0m and \033[33m--smb\033[0m is set, \033[32mDd\033[0m otherwise)")
|
||||
ap2.add_argument("--zm-ld", metavar="PATH", type=u, default="", help="link a specific folder for webdav shares")
|
||||
ap2.add_argument("--zm-lh", metavar="PATH", type=u, default="", help="link a specific folder for http shares")
|
||||
ap2.add_argument("--zm-lf", metavar="PATH", type=u, default="", help="link a specific folder for ftp shares")
|
||||
ap2.add_argument("--zm-ls", metavar="PATH", type=u, default="", help="link a specific folder for smb shares")
|
||||
ap2.add_argument("--zm-mnic", action="store_true", help="merge NICs which share subnets; assume that same subnet means same network")
|
||||
ap2.add_argument("--zm-msub", action="store_true", help="merge subnets on each NIC -- always enabled for ipv6 -- reduces network load, but gnome-gvfs clients may stop working")
|
||||
ap2.add_argument("--zm-msub", action="store_true", help="merge subnets on each NIC -- always enabled for ipv6 -- reduces network load, but gnome-gvfs clients may stop working, and clients cannot be in subnets that the server is not")
|
||||
ap2.add_argument("--zm-noneg", action="store_true", help="disable NSEC replies -- try this if some clients don't see copyparty")
|
||||
ap2.add_argument("--zm-spam", metavar="SEC", type=float, default=0, help="send unsolicited announce every \033[33mSEC\033[0m; useful if clients have IPs in a subnet which doesn't overlap with the server, or to avoid some firewall issues")
|
||||
|
||||
|
||||
def add_zc_ssdp(ap):
|
||||
ap2 = ap.add_argument_group("Zeroconf-SSDP options")
|
||||
ap2.add_argument("--zs", action="store_true", help="announce the enabled protocols over SSDP -- compatible with Windows")
|
||||
ap2.add_argument("--zs-on", metavar="NETS", type=u, default="", help="enable zeroconf ONLY on the comma-separated list of subnets and/or interface names/indexes")
|
||||
ap2.add_argument("--zs-off", metavar="NETS", type=u, default="", help="disable zeroconf on the comma-separated list of subnets and/or interface names/indexes")
|
||||
ap2.add_argument("--zs-on", metavar="NETS", type=u, default="", help="enable SSDP ONLY on the comma-separated list of subnets and/or interface names/indexes")
|
||||
ap2.add_argument("--zs-off", metavar="NETS", type=u, default="", help="disable SSDP on the comma-separated list of subnets and/or interface names/indexes")
|
||||
ap2.add_argument("--zsv", action="store_true", help="verbose SSDP")
|
||||
ap2.add_argument("--zsl", metavar="PATH", type=u, default="/?hc", help="location to include in the url (or a complete external URL), for example [\033[32mpriv/?pw=hunter2\033[0m] (goes directly to /priv/ with password hunter2) or [\033[32m?hc=priv&pw=hunter2\033[0m] (shows mounting options for /priv/ with password)")
|
||||
ap2.add_argument("--zsid", metavar="UUID", type=u, default=zsid, help="USN (device identifier) to announce")
|
||||
|
||||
|
||||
def add_ftp(ap):
|
||||
ap2 = ap.add_argument_group('FTP options')
|
||||
ap2.add_argument("--ftp", metavar="PORT", type=int, help="enable FTP server on PORT, for example \033[32m3921")
|
||||
ap2.add_argument("--ftps", metavar="PORT", type=int, help="enable FTPS server on PORT, for example \033[32m3990")
|
||||
ap2 = ap.add_argument_group('FTP options (TCP only)')
|
||||
ap2.add_argument("--ftp", metavar="PORT", type=int, help="enable FTP server on \033[33mPORT\033[0m, for example \033[32m3921")
|
||||
ap2.add_argument("--ftps", metavar="PORT", type=int, help="enable FTPS server on \033[33mPORT\033[0m, for example \033[32m3990")
|
||||
ap2.add_argument("--ftpv", action="store_true", help="verbose")
|
||||
ap2.add_argument("--ftp4", action="store_true", help="only listen on IPv4")
|
||||
ap2.add_argument("--ftp-wt", metavar="SEC", type=int, default=7, help="grace period for resuming interrupted uploads (any client can write to any file last-modified more recently than SEC seconds ago)")
|
||||
ap2.add_argument("--ftp-ipa", metavar="CIDR", type=u, default="", help="only accept connections from IP-addresses inside \033[33mCIDR\033[0m; specify [\033[32many\033[0m] to disable inheriting \033[33m--ipa\033[0m. Examples: [\033[32mlan\033[0m] or [\033[32m10.89.0.0/16, 192.168.33.0/24\033[0m]")
|
||||
ap2.add_argument("--ftp-wt", metavar="SEC", type=int, default=7, help="grace period for resuming interrupted uploads (any client can write to any file last-modified more recently than \033[33mSEC\033[0m seconds ago)")
|
||||
ap2.add_argument("--ftp-nat", metavar="ADDR", type=u, help="the NAT address to use for passive connections")
|
||||
ap2.add_argument("--ftp-pr", metavar="P-P", type=u, help="the range of TCP ports to use for passive connections, for example \033[32m12000-13000")
|
||||
|
||||
@@ -889,14 +1021,27 @@ def add_webdav(ap):
|
||||
ap2.add_argument("--dav-auth", action="store_true", help="force auth for all folders (required by davfs2 when only some folders are world-readable) (volflag=davauth)")
|
||||
|
||||
|
||||
def add_tftp(ap):
|
||||
ap2 = ap.add_argument_group('TFTP options (UDP only)')
|
||||
ap2.add_argument("--tftp", metavar="PORT", type=int, help="enable TFTP server on \033[33mPORT\033[0m, for example \033[32m69 \033[0mor \033[32m3969")
|
||||
ap2.add_argument("--tftpv", action="store_true", help="verbose")
|
||||
ap2.add_argument("--tftpvv", action="store_true", help="verboser")
|
||||
ap2.add_argument("--tftp-no-fast", action="store_true", help="debug: disable optimizations")
|
||||
ap2.add_argument("--tftp-lsf", metavar="PTN", type=u, default="\\.?(dir|ls)(\\.txt)?", help="return a directory listing if a file with this name is requested and it does not exist; defaults matches .ls, dir, .dir.txt, ls.txt, ...")
|
||||
ap2.add_argument("--tftp-nols", action="store_true", help="if someone tries to download a directory, return an error instead of showing its directory listing")
|
||||
ap2.add_argument("--tftp-ipa", metavar="CIDR", type=u, default="", help="only accept connections from IP-addresses inside \033[33mCIDR\033[0m; specify [\033[32many\033[0m] to disable inheriting \033[33m--ipa\033[0m. Examples: [\033[32mlan\033[0m] or [\033[32m10.89.0.0/16, 192.168.33.0/24\033[0m]")
|
||||
ap2.add_argument("--tftp-pr", metavar="P-P", type=u, help="the range of UDP ports to use for data transfer, for example \033[32m12000-13000")
|
||||
|
||||
|
||||
def add_smb(ap):
|
||||
ap2 = ap.add_argument_group('SMB/CIFS options')
|
||||
ap2.add_argument("--smb", action="store_true", help="enable smb (read-only) -- this requires running copyparty as root on linux and macos unless --smb-port is set above 1024 and your OS does port-forwarding from 445 to that.\n\033[1;31mWARNING:\033[0m this protocol is dangerous! Never expose to the internet. Account permissions are coalesced; if one account has write-access to a volume, then all accounts do.")
|
||||
ap2.add_argument("--smb", action="store_true", help="enable smb (read-only) -- this requires running copyparty as root on linux and macos unless \033[33m--smb-port\033[0m is set above 1024 and your OS does port-forwarding from 445 to that.\n\033[1;31mWARNING:\033[0m this protocol is DANGEROUS and buggy! Never expose to the internet!")
|
||||
ap2.add_argument("--smbw", action="store_true", help="enable write support (please dont)")
|
||||
ap2.add_argument("--smb1", action="store_true", help="disable SMBv2, only enable SMBv1 (CIFS)")
|
||||
ap2.add_argument("--smb-port", metavar="PORT", type=int, default=445, help="port to listen on -- if you change this value, you must NAT from TCP:445 to this port using iptables or similar")
|
||||
ap2.add_argument("--smb-nwa-1", action="store_true", help="disable impacket#1433 workaround (truncate directory listings to 64kB)")
|
||||
ap2.add_argument("--smb-nwa-2", action="store_true", help="disable impacket workaround for filecopy globs")
|
||||
ap2.add_argument("--smba", action="store_true", help="small performance boost: disable per-account permissions, enables account coalescing instead (if one user has write/delete-access, then everyone does)")
|
||||
ap2.add_argument("--smbv", action="store_true", help="verbose")
|
||||
ap2.add_argument("--smbvv", action="store_true", help="verboser")
|
||||
ap2.add_argument("--smbvvv", action="store_true", help="verbosest")
|
||||
@@ -904,21 +1049,32 @@ def add_smb(ap):
|
||||
|
||||
def add_handlers(ap):
|
||||
ap2 = ap.add_argument_group('handlers (see --help-handlers)')
|
||||
ap2.add_argument("--on404", metavar="PY", type=u, action="append", help="handle 404s by executing PY file")
|
||||
ap2.add_argument("--on403", metavar="PY", type=u, action="append", help="handle 403s by executing PY file")
|
||||
ap2.add_argument("--hot-handlers", action="store_true", help="reload handlers on each request -- expensive but convenient when hacking on stuff")
|
||||
ap2.add_argument("--on404", metavar="PY", type=u, action="append", help="handle 404s by executing \033[33mPY\033[0m file")
|
||||
ap2.add_argument("--on403", metavar="PY", type=u, action="append", help="handle 403s by executing \033[33mPY\033[0m file")
|
||||
ap2.add_argument("--hot-handlers", action="store_true", help="recompile handlers on each request -- expensive but convenient when hacking on stuff")
|
||||
|
||||
|
||||
def add_hooks(ap):
|
||||
ap2 = ap.add_argument_group('event hooks (see --help-hooks)')
|
||||
ap2.add_argument("--xbu", metavar="CMD", type=u, action="append", help="execute CMD before a file upload starts")
|
||||
ap2.add_argument("--xau", metavar="CMD", type=u, action="append", help="execute CMD after a file upload finishes")
|
||||
ap2.add_argument("--xiu", metavar="CMD", type=u, action="append", help="execute CMD after all uploads finish and volume is idle")
|
||||
ap2.add_argument("--xbr", metavar="CMD", type=u, action="append", help="execute CMD before a file move/rename")
|
||||
ap2.add_argument("--xar", metavar="CMD", type=u, action="append", help="execute CMD after a file move/rename")
|
||||
ap2.add_argument("--xbd", metavar="CMD", type=u, action="append", help="execute CMD before a file delete")
|
||||
ap2.add_argument("--xad", metavar="CMD", type=u, action="append", help="execute CMD after a file delete")
|
||||
ap2.add_argument("--xm", metavar="CMD", type=u, action="append", help="execute CMD on message")
|
||||
ap2.add_argument("--xbu", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m before a file upload starts")
|
||||
ap2.add_argument("--xau", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m after a file upload finishes")
|
||||
ap2.add_argument("--xiu", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m after all uploads finish and volume is idle")
|
||||
ap2.add_argument("--xbr", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m before a file move/rename")
|
||||
ap2.add_argument("--xar", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m after a file move/rename")
|
||||
ap2.add_argument("--xbd", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m before a file delete")
|
||||
ap2.add_argument("--xad", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m after a file delete")
|
||||
ap2.add_argument("--xm", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m on message")
|
||||
ap2.add_argument("--xban", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m if someone gets banned (pw/404/403/url)")
|
||||
|
||||
|
||||
def add_stats(ap):
|
||||
ap2 = ap.add_argument_group('grafana/prometheus metrics endpoint')
|
||||
ap2.add_argument("--stats", action="store_true", help="enable openmetrics at /.cpr/metrics for admin accounts")
|
||||
ap2.add_argument("--nos-hdd", action="store_true", help="disable disk-space metrics (used/free space)")
|
||||
ap2.add_argument("--nos-vol", action="store_true", help="disable volume size metrics (num files, total bytes, vmaxb/vmaxn)")
|
||||
ap2.add_argument("--nos-vst", action="store_true", help="disable volume state metrics (indexing, analyzing, activity)")
|
||||
ap2.add_argument("--nos-dup", action="store_true", help="disable dupe-files metrics (good idea; very slow)")
|
||||
ap2.add_argument("--nos-unf", action="store_true", help="disable unfinished-uploads metrics")
|
||||
|
||||
|
||||
def add_yolo(ap):
|
||||
@@ -930,47 +1086,54 @@ def add_yolo(ap):
|
||||
def add_optouts(ap):
|
||||
ap2 = ap.add_argument_group('opt-outs')
|
||||
ap2.add_argument("-nw", action="store_true", help="never write anything to disk (debug/benchmark)")
|
||||
ap2.add_argument("--keep-qem", action="store_true", help="do not disable quick-edit-mode on windows (it is disabled to avoid accidental text selection which will deadlock copyparty)")
|
||||
ap2.add_argument("--keep-qem", action="store_true", help="do not disable quick-edit-mode on windows (it is disabled to avoid accidental text selection in the terminal window, as this would pause execution)")
|
||||
ap2.add_argument("--no-dav", action="store_true", help="disable webdav support")
|
||||
ap2.add_argument("--no-del", action="store_true", help="disable delete operations")
|
||||
ap2.add_argument("--no-mv", action="store_true", help="disable move/rename operations")
|
||||
ap2.add_argument("-nth", action="store_true", help="no title hostname; don't show \033[33m--name\033[0m in <title>")
|
||||
ap2.add_argument("-nih", action="store_true", help="no info hostname -- don't show in UI")
|
||||
ap2.add_argument("-nid", action="store_true", help="no info disk-usage -- don't show in UI")
|
||||
ap2.add_argument("-nb", action="store_true", help="no powered-by-copyparty branding in UI")
|
||||
ap2.add_argument("--no-zip", action="store_true", help="disable download as zip/tar")
|
||||
ap2.add_argument("--no-lifetime", action="store_true", help="disable automatic deletion of uploads after a certain time (as specified by the 'lifetime' volflag)")
|
||||
ap2.add_argument("--no-tarcmp", action="store_true", help="disable download as compressed tar (?tar=gz, ?tar=bz2, ?tar=xz, ?tar=gz:9, ...)")
|
||||
ap2.add_argument("--no-lifetime", action="store_true", help="do not allow clients (or server config) to schedule an upload to be deleted after a given time")
|
||||
|
||||
|
||||
def add_safety(ap):
|
||||
ap2 = ap.add_argument_group('safety options')
|
||||
ap2.add_argument("-s", action="count", default=0, help="increase safety: Disable thumbnails / potentially dangerous software (ffmpeg/pillow/vips), hide partial uploads, avoid crawlers.\n └─Alias of\033[32m --dotpart --no-thumb --no-mtag-ff --no-robots --force-js")
|
||||
ap2.add_argument("-ss", action="store_true", help="further increase safety: Prevent js-injection, accidental move/delete, broken symlinks, webdav, 404 on 403, ban on excessive 404s.\n └─Alias of\033[32m -s --unpost=0 --no-del --no-mv --hardlink --vague-403 --ban-404=50,60,1440 -nih")
|
||||
ap2.add_argument("-ss", action="store_true", help="further increase safety: Prevent js-injection, accidental move/delete, broken symlinks, webdav, 404 on 403, ban on excessive 404s.\n └─Alias of\033[32m -s --unpost=0 --no-del --no-mv --hardlink --vague-403 -nih")
|
||||
ap2.add_argument("-sss", action="store_true", help="further increase safety: Enable logging to disk, scan for dangerous symlinks.\n └─Alias of\033[32m -ss --no-dav --no-logues --no-readme -lo=cpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz --ls=**,*,ln,p,r")
|
||||
ap2.add_argument("--ls", metavar="U[,V[,F]]", type=u, help="do a sanity/safety check of all volumes on startup; arguments \033[33mUSER\033[0m,\033[33mVOL\033[0m,\033[33mFLAGS\033[0m; example [\033[32m**,*,ln,p,r\033[0m]")
|
||||
ap2.add_argument("--ls", metavar="U[,V[,F]]", type=u, help="do a sanity/safety check of all volumes on startup; arguments \033[33mUSER\033[0m,\033[33mVOL\033[0m,\033[33mFLAGS\033[0m (see \033[33m--help-ls\033[0m); example [\033[32m**,*,ln,p,r\033[0m]")
|
||||
ap2.add_argument("--xvol", action="store_true", help="never follow symlinks leaving the volume root, unless the link is into another volume where the user has similar access (volflag=xvol)")
|
||||
ap2.add_argument("--xdev", action="store_true", help="stay within the filesystem of the volume root; do not descend into other devices (symlink or bind-mount to another HDD, ...) (volflag=xdev)")
|
||||
ap2.add_argument("--no-dot-mv", action="store_true", help="disallow moving dotfiles; makes it impossible to move folders containing dotfiles")
|
||||
ap2.add_argument("--no-dot-ren", action="store_true", help="disallow renaming dotfiles; makes it impossible to make something a dotfile")
|
||||
ap2.add_argument("--no-dot-ren", action="store_true", help="disallow renaming dotfiles; makes it impossible to turn something into a dotfile")
|
||||
ap2.add_argument("--no-logues", action="store_true", help="disable rendering .prologue/.epilogue.html into directory listings")
|
||||
ap2.add_argument("--no-readme", action="store_true", help="disable rendering readme.md into directory listings")
|
||||
ap2.add_argument("--vague-403", action="store_true", help="send 404 instead of 403 (security through ambiguity, very enterprise)")
|
||||
ap2.add_argument("--force-js", action="store_true", help="don't send folder listings as HTML, force clients to use the embedded json instead -- slight protection against misbehaving search engines which ignore --no-robots")
|
||||
ap2.add_argument("--force-js", action="store_true", help="don't send folder listings as HTML, force clients to use the embedded json instead -- slight protection against misbehaving search engines which ignore \033[33m--no-robots\033[0m")
|
||||
ap2.add_argument("--no-robots", action="store_true", help="adds http and html headers asking search engines to not index anything (volflag=norobots)")
|
||||
ap2.add_argument("--logout", metavar="H", type=float, default="8086", help="logout clients after H hours of inactivity; [\033[32m0.0028\033[0m]=10sec, [\033[32m0.1\033[0m]=6min, [\033[32m24\033[0m]=day, [\033[32m168\033[0m]=week, [\033[32m720\033[0m]=month, [\033[32m8760\033[0m]=year)")
|
||||
ap2.add_argument("--logout", metavar="H", type=float, default="8086", help="logout clients after \033[33mH\033[0m hours of inactivity; [\033[32m0.0028\033[0m]=10sec, [\033[32m0.1\033[0m]=6min, [\033[32m24\033[0m]=day, [\033[32m168\033[0m]=week, [\033[32m720\033[0m]=month, [\033[32m8760\033[0m]=year)")
|
||||
ap2.add_argument("--ban-pw", metavar="N,W,B", type=u, default="9,60,1440", help="more than \033[33mN\033[0m wrong passwords in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; disable with [\033[32mno\033[0m]")
|
||||
ap2.add_argument("--ban-404", metavar="N,W,B", type=u, default="no", help="hitting more than \033[33mN\033[0m 404's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes (disabled by default since turbo-up2k counts as 404s)")
|
||||
ap2.add_argument("--aclose", metavar="MIN", type=int, default=10, help="if a client maxes out the server connection limit, downgrade it from connection:keep-alive to connection:close for MIN minutes (and also kill its active connections) -- disable with 0")
|
||||
ap2.add_argument("--loris", metavar="B", type=int, default=60, help="if a client maxes out the server connection limit without sending headers, ban it for B minutes; disable with [\033[32m0\033[0m]")
|
||||
ap2.add_argument("--ban-404", metavar="N,W,B", type=u, default="50,60,1440", help="hitting more than \033[33mN\033[0m 404's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; only affects users who cannot see directory listings because their access is either g/G/h")
|
||||
ap2.add_argument("--ban-403", metavar="N,W,B", type=u, default="9,2,1440", help="hitting more than \033[33mN\033[0m 403's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; [\033[32m1440\033[0m]=day, [\033[32m10080\033[0m]=week, [\033[32m43200\033[0m]=month")
|
||||
ap2.add_argument("--ban-422", metavar="N,W,B", type=u, default="9,2,1440", help="hitting more than \033[33mN\033[0m 422's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes (invalid requests, attempted exploits ++)")
|
||||
ap2.add_argument("--ban-url", metavar="N,W,B", type=u, default="9,2,1440", help="hitting more than \033[33mN\033[0m sus URL's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; applies only to permissions g/G/h (decent replacement for \033[33m--ban-404\033[0m if that can't be used)")
|
||||
ap2.add_argument("--sus-urls", metavar="R", type=u, default=r"\.php$|(^|/)wp-(admin|content|includes)/", help="URLs which are considered sus / eligible for banning; disable with blank or [\033[32mno\033[0m]")
|
||||
ap2.add_argument("--nonsus-urls", metavar="R", type=u, default=r"^(favicon\.ico|robots\.txt)$|^apple-touch-icon|^\.well-known", help="harmless URLs ignored from 404-bans; disable with blank or [\033[32mno\033[0m]")
|
||||
ap2.add_argument("--aclose", metavar="MIN", type=int, default=10, help="if a client maxes out the server connection limit, downgrade it from connection:keep-alive to connection:close for \033[33mMIN\033[0m minutes (and also kill its active connections) -- disable with 0")
|
||||
ap2.add_argument("--loris", metavar="B", type=int, default=60, help="if a client maxes out the server connection limit without sending headers, ban it for \033[33mB\033[0m minutes; disable with [\033[32m0\033[0m]")
|
||||
ap2.add_argument("--acao", metavar="V[,V]", type=u, default="*", help="Access-Control-Allow-Origin; list of origins (domains/IPs without port) to accept requests from; [\033[32mhttps://1.2.3.4\033[0m]. Default [\033[32m*\033[0m] allows requests from all sites but removes cookies and http-auth; only ?pw=hunter2 survives")
|
||||
ap2.add_argument("--acam", metavar="V[,V]", type=u, default="GET,HEAD", help="Access-Control-Allow-Methods; list of methods to accept from offsite ('*' behaves like described in --acao)")
|
||||
ap2.add_argument("--acam", metavar="V[,V]", type=u, default="GET,HEAD", help="Access-Control-Allow-Methods; list of methods to accept from offsite ('*' behaves like \033[33m--acao\033[0m's description)")
|
||||
|
||||
|
||||
def add_salt(ap, fk_salt, ah_salt):
|
||||
ap2 = ap.add_argument_group('salting options')
|
||||
ap2.add_argument("--ah-alg", metavar="ALG", type=u, default="none", help="account-pw hashing algorithm; one of these, best to worst: argon2 scrypt sha2 none (each optionally followed by alg-specific comma-sep. config)")
|
||||
ap2.add_argument("--ah-salt", metavar="SALT", type=u, default=ah_salt, help="account-pw salt; ignored if --ah-alg is none (default)")
|
||||
ap2.add_argument("--ah-alg", metavar="ALG", type=u, default="none", help="account-pw hashing algorithm; one of these, best to worst: \033[32margon2 scrypt sha2 none\033[0m (each optionally followed by alg-specific comma-sep. config)")
|
||||
ap2.add_argument("--ah-salt", metavar="SALT", type=u, default=ah_salt, help="account-pw salt; ignored if \033[33m--ah-alg\033[0m is none (default)")
|
||||
ap2.add_argument("--ah-gen", metavar="PW", type=u, default="", help="generate hashed password for \033[33mPW\033[0m, or read passwords from STDIN if \033[33mPW\033[0m is [\033[32m-\033[0m]")
|
||||
ap2.add_argument("--ah-cli", action="store_true", help="interactive shell which hashes passwords without ever storing or displaying the original passwords")
|
||||
ap2.add_argument("--ah-cli", action="store_true", help="launch an interactive shell which hashes passwords without ever storing or displaying the original passwords")
|
||||
ap2.add_argument("--fk-salt", metavar="SALT", type=u, default=fk_salt, help="per-file accesskey salt; used to generate unpredictable URLs for hidden files")
|
||||
ap2.add_argument("--warksalt", metavar="SALT", type=u, default="hunter2", help="up2k file-hash salt; serves no purpose, no reason to change this (but delete all databases if you do)")
|
||||
|
||||
@@ -979,20 +1142,23 @@ def add_shutdown(ap):
|
||||
ap2 = ap.add_argument_group('shutdown options')
|
||||
ap2.add_argument("--ign-ebind", action="store_true", help="continue running even if it's impossible to listen on some of the requested endpoints")
|
||||
ap2.add_argument("--ign-ebind-all", action="store_true", help="continue running even if it's impossible to receive connections at all")
|
||||
ap2.add_argument("--exit", metavar="WHEN", type=u, default="", help="shutdown after WHEN has finished; [\033[32mcfg\033[0m] config parsing, [\033[32midx\033[0m] volscan + multimedia indexing")
|
||||
ap2.add_argument("--exit", metavar="WHEN", type=u, default="", help="shutdown after \033[33mWHEN\033[0m has finished; [\033[32mcfg\033[0m] config parsing, [\033[32midx\033[0m] volscan + multimedia indexing")
|
||||
|
||||
|
||||
def add_logging(ap):
|
||||
ap2 = ap.add_argument_group('logging options')
|
||||
ap2.add_argument("-q", action="store_true", help="quiet")
|
||||
ap2.add_argument("-lo", metavar="PATH", type=u, help="logfile, example: \033[32mcpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz")
|
||||
ap2.add_argument("-q", action="store_true", help="quiet; disable most STDOUT messages")
|
||||
ap2.add_argument("-lo", metavar="PATH", type=u, help="logfile, example: \033[32mcpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz\033[0m (NB: some errors may appear on STDOUT only)")
|
||||
ap2.add_argument("--no-ansi", action="store_true", default=not VT100, help="disable colors; same as environment-variable NO_COLOR")
|
||||
ap2.add_argument("--ansi", action="store_true", help="force colors; overrides environment-variable NO_COLOR")
|
||||
ap2.add_argument("--no-logflush", action="store_true", help="don't flush the logfile after each write; tiny bit faster")
|
||||
ap2.add_argument("--no-voldump", action="store_true", help="do not list volumes and permissions on startup")
|
||||
ap2.add_argument("--log-tdec", metavar="N", type=int, default=3, help="timestamp resolution / number of timestamp decimals")
|
||||
ap2.add_argument("--log-badpwd", metavar="N", type=int, default=1, help="log failed login attempt passwords: 0=terse, 1=plaintext, 2=hashed")
|
||||
ap2.add_argument("--log-conn", action="store_true", help="debug: print tcp-server msgs")
|
||||
ap2.add_argument("--log-htp", action="store_true", help="debug: print http-server threadpool scaling")
|
||||
ap2.add_argument("--ihead", metavar="HEADER", type=u, action='append', help="dump incoming header")
|
||||
ap2.add_argument("--lf-url", metavar="RE", type=u, default=r"^/\.cpr/|\?th=[wj]$|/\.(_|ql_|DS_Store$|localized$)", help="dont log URLs matching")
|
||||
ap2.add_argument("--ihead", metavar="HEADER", type=u, action='append', help="print request \033[33mHEADER\033[0m; [\033[32m*\033[0m]=all")
|
||||
ap2.add_argument("--lf-url", metavar="RE", type=u, default=r"^/\.cpr/|\?th=[wj]$|/\.(_|ql_|DS_Store$|localized$)", help="dont log URLs matching regex \033[33mRE\033[0m")
|
||||
|
||||
|
||||
def add_admin(ap):
|
||||
@@ -1007,25 +1173,27 @@ def add_thumbnail(ap):
|
||||
ap2.add_argument("--no-thumb", action="store_true", help="disable all thumbnails (volflag=dthumb)")
|
||||
ap2.add_argument("--no-vthumb", action="store_true", help="disable video thumbnails (volflag=dvthumb)")
|
||||
ap2.add_argument("--no-athumb", action="store_true", help="disable audio thumbnails (spectrograms) (volflag=dathumb)")
|
||||
ap2.add_argument("--th-size", metavar="WxH", default="320x256", help="thumbnail res")
|
||||
ap2.add_argument("--th-size", metavar="WxH", default="320x256", help="thumbnail res (volflag=thsize)")
|
||||
ap2.add_argument("--th-mt", metavar="CORES", type=int, default=CORES, help="num cpu cores to use for generating thumbnails")
|
||||
ap2.add_argument("--th-convt", metavar="SEC", type=int, default=60, help="conversion timeout in seconds")
|
||||
ap2.add_argument("--th-no-crop", action="store_true", help="dynamic height; show full image")
|
||||
ap2.add_argument("--th-convt", metavar="SEC", type=float, default=60, help="conversion timeout in seconds (volflag=convt)")
|
||||
ap2.add_argument("--th-ram-max", metavar="GB", type=float, default=6, help="max memory usage (GiB) permitted by thumbnailer; not very accurate")
|
||||
ap2.add_argument("--th-crop", metavar="TXT", type=u, default="y", help="crop thumbnails to 4:3 or keep dynamic height; client can override in UI unless force. [\033[32mfy\033[0m]=crop, [\033[32mfn\033[0m]=nocrop, [\033[32mfy\033[0m]=force-y, [\033[32mfn\033[0m]=force-n (volflag=crop)")
|
||||
ap2.add_argument("--th-x3", metavar="TXT", type=u, default="n", help="show thumbs at 3x resolution; client can override in UI unless force. [\033[32mfy\033[0m]=yes, [\033[32mfn\033[0m]=no, [\033[32mfy\033[0m]=force-yes, [\033[32mfn\033[0m]=force-no (volflag=th3x)")
|
||||
ap2.add_argument("--th-dec", metavar="LIBS", default="vips,pil,ff", help="image decoders, in order of preference")
|
||||
ap2.add_argument("--th-no-jpg", action="store_true", help="disable jpg output")
|
||||
ap2.add_argument("--th-no-webp", action="store_true", help="disable webp output")
|
||||
ap2.add_argument("--th-ff-jpg", action="store_true", help="force jpg output for video thumbs")
|
||||
ap2.add_argument("--th-ff-swr", action="store_true", help="use swresample instead of soxr for audio thumbs")
|
||||
ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown -- avoids doing keepalive pokes (updating the mtime) on thumbnail folders more often than SEC seconds")
|
||||
ap2.add_argument("--th-ff-jpg", action="store_true", help="force jpg output for video thumbs (avoids issues on some FFmpeg builds)")
|
||||
ap2.add_argument("--th-ff-swr", action="store_true", help="use swresample instead of soxr for audio thumbs (faster, lower accuracy, avoids issues on some FFmpeg builds)")
|
||||
ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown -- avoids doing keepalive pokes (updating the mtime) on thumbnail folders more often than \033[33mSEC\033[0m seconds")
|
||||
ap2.add_argument("--th-clean", metavar="SEC", type=int, default=43200, help="cleanup interval; 0=disabled")
|
||||
ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age -- folders which haven't been poked for longer than --th-poke seconds will get deleted every --th-clean seconds")
|
||||
ap2.add_argument("--th-covers", metavar="N,N", type=u, default="folder.png,folder.jpg,cover.png,cover.jpg", help="folder thumbnails to stat/look for; enabling -e2d will make these case-insensitive, and also automatically select thumbnails for all folders that contain pics, even if none match this pattern")
|
||||
ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age -- folders which haven't been poked for longer than \033[33m--th-poke\033[0m seconds will get deleted every \033[33m--th-clean\033[0m seconds")
|
||||
ap2.add_argument("--th-covers", metavar="N,N", type=u, default="folder.png,folder.jpg,cover.png,cover.jpg", help="folder thumbnails to stat/look for; enabling \033[33m-e2d\033[0m will make these case-insensitive, and try them as dotfiles (.folder.jpg), and also automatically select thumbnails for all folders that contain pics, even if none match this pattern")
|
||||
# https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html
|
||||
# https://github.com/libvips/libvips
|
||||
# ffmpeg -hide_banner -demuxers | awk '/^ D /{print$2}' | while IFS= read -r x; do ffmpeg -hide_banner -h demuxer=$x; done | grep -E '^Demuxer |extensions:'
|
||||
ap2.add_argument("--th-r-pil", metavar="T,T", type=u, default="avif,avifs,blp,bmp,dcx,dds,dib,emf,eps,fits,flc,fli,fpx,gif,heic,heics,heif,heifs,icns,ico,im,j2p,j2k,jp2,jpeg,jpg,jpx,pbm,pcx,pgm,png,pnm,ppm,psd,sgi,spi,tga,tif,tiff,webp,wmf,xbm,xpm", help="image formats to decode using pillow")
|
||||
ap2.add_argument("--th-r-pil", metavar="T,T", type=u, default="avif,avifs,blp,bmp,dcx,dds,dib,emf,eps,fits,flc,fli,fpx,gif,heic,heics,heif,heifs,icns,ico,im,j2p,j2k,jp2,jpeg,jpg,jpx,pbm,pcx,pgm,png,pnm,ppm,psd,qoi,sgi,spi,tga,tif,tiff,webp,wmf,xbm,xpm", help="image formats to decode using pillow")
|
||||
ap2.add_argument("--th-r-vips", metavar="T,T", type=u, default="avif,exr,fit,fits,fts,gif,hdr,heic,jp2,jpeg,jpg,jpx,jxl,nii,pfm,pgm,png,ppm,svg,tif,tiff,webp", help="image formats to decode using pyvips")
|
||||
ap2.add_argument("--th-r-ffi", metavar="T,T", type=u, default="apng,avif,avifs,bmp,dds,dib,fit,fits,fts,gif,hdr,heic,heics,heif,heifs,icns,ico,jp2,jpeg,jpg,jpx,jxl,pbm,pcx,pfm,pgm,png,pnm,ppm,psd,sgi,tga,tif,tiff,webp,xbm,xpm", help="image formats to decode using ffmpeg")
|
||||
ap2.add_argument("--th-r-ffi", metavar="T,T", type=u, default="apng,avif,avifs,bmp,dds,dib,fit,fits,fts,gif,hdr,heic,heics,heif,heifs,icns,ico,jp2,jpeg,jpg,jpx,jxl,pbm,pcx,pfm,pgm,png,pnm,ppm,psd,qoi,sgi,tga,tif,tiff,webp,xbm,xpm", help="image formats to decode using ffmpeg")
|
||||
ap2.add_argument("--th-r-ffv", metavar="T,T", type=u, default="3gp,asf,av1,avc,avi,flv,h264,h265,hevc,m4v,mjpeg,mjpg,mkv,mov,mp4,mpeg,mpeg2,mpegts,mpg,mpg2,mts,nut,ogm,ogv,rm,ts,vob,webm,wmv", help="video formats to decode using ffmpeg")
|
||||
ap2.add_argument("--th-r-ffa", metavar="T,T", type=u, default="aac,ac3,aif,aiff,alac,alaw,amr,apac,ape,au,bonk,dfpwm,dts,flac,gsm,ilbc,it,m4a,mo3,mod,mp2,mp3,mpc,mptm,mt2,mulaw,ogg,okt,opus,ra,s3m,tak,tta,ulaw,wav,wma,wv,xm,xpk", help="audio formats to decode using ffmpeg")
|
||||
|
||||
@@ -1033,29 +1201,31 @@ def add_thumbnail(ap):
|
||||
def add_transcoding(ap):
|
||||
ap2 = ap.add_argument_group('transcoding options')
|
||||
ap2.add_argument("--no-acode", action="store_true", help="disable audio transcoding")
|
||||
ap2.add_argument("--ac-maxage", metavar="SEC", type=int, default=86400, help="delete cached transcode output after SEC seconds")
|
||||
ap2.add_argument("--no-bacode", action="store_true", help="disable batch audio transcoding by folder download (zip/tar)")
|
||||
ap2.add_argument("--ac-maxage", metavar="SEC", type=int, default=86400, help="delete cached transcode output after \033[33mSEC\033[0m seconds")
|
||||
|
||||
|
||||
def add_db_general(ap, hcores):
|
||||
noidx = APPLESAN_TXT if MACOS else ""
|
||||
ap2 = ap.add_argument_group('general db options')
|
||||
ap2.add_argument("-e2d", action="store_true", help="enable up2k database, making files searchable + enables upload deduplication")
|
||||
ap2.add_argument("-e2ds", action="store_true", help="scan writable folders for new files on startup; sets -e2d")
|
||||
ap2.add_argument("-e2dsa", action="store_true", help="scans all folders on startup; sets -e2ds")
|
||||
ap2.add_argument("-e2ds", action="store_true", help="scan writable folders for new files on startup; sets \033[33m-e2d\033[0m")
|
||||
ap2.add_argument("-e2dsa", action="store_true", help="scans all folders on startup; sets \033[33m-e2ds\033[0m")
|
||||
ap2.add_argument("-e2v", action="store_true", help="verify file integrity; rehash all files and compare with db")
|
||||
ap2.add_argument("-e2vu", action="store_true", help="on hash mismatch: update the database with the new hash")
|
||||
ap2.add_argument("-e2vp", action="store_true", help="on hash mismatch: panic and quit copyparty")
|
||||
ap2.add_argument("--hist", metavar="PATH", type=u, help="where to store volume data (db, thumbs) (volflag=hist)")
|
||||
ap2.add_argument("--no-hash", metavar="PTN", type=u, help="regex: disable hashing of matching absolute-filesystem-paths during e2ds folder scans (volflag=nohash)")
|
||||
ap2.add_argument("--no-idx", metavar="PTN", type=u, help="regex: disable indexing of matching absolute-filesystem-paths during e2ds folder scans (volflag=noidx)")
|
||||
ap2.add_argument("--no-idx", metavar="PTN", type=u, default=noidx, help="regex: disable indexing of matching absolute-filesystem-paths during e2ds folder scans (volflag=noidx)")
|
||||
ap2.add_argument("--no-dhash", action="store_true", help="disable rescan acceleration; do full database integrity check -- makes the db ~5%% smaller and bootup/rescans 3~10x slower")
|
||||
ap2.add_argument("--re-dhash", action="store_true", help="rebuild the cache if it gets out of sync (for example crash on startup during metadata scanning)")
|
||||
ap2.add_argument("--no-forget", action="store_true", help="never forget indexed files, even when deleted from disk -- makes it impossible to ever upload the same file twice (volflag=noforget)")
|
||||
ap2.add_argument("--dbd", metavar="PROFILE", default="wal", help="database durability profile; sets the tradeoff between robustness and speed, see --help-dbd (volflag=dbd)")
|
||||
ap2.add_argument("--no-forget", action="store_true", help="never forget indexed files, even when deleted from disk -- makes it impossible to ever upload the same file twice -- only useful for offloading uploads to a cloud service or something (volflag=noforget)")
|
||||
ap2.add_argument("--dbd", metavar="PROFILE", default="wal", help="database durability profile; sets the tradeoff between robustness and speed, see \033[33m--help-dbd\033[0m (volflag=dbd)")
|
||||
ap2.add_argument("--xlink", action="store_true", help="on upload: check all volumes for dupes, not just the target volume (volflag=xlink)")
|
||||
ap2.add_argument("--hash-mt", metavar="CORES", type=int, default=hcores, help="num cpu cores to use for file hashing; set 0 or 1 for single-core hashing")
|
||||
ap2.add_argument("--re-maxage", metavar="SEC", type=int, default=0, help="disk rescan volume interval, 0=off (volflag=scan)")
|
||||
ap2.add_argument("--db-act", metavar="SEC", type=float, default=10, help="defer any scheduled volume reindexing until SEC seconds after last db write (uploads, renames, ...)")
|
||||
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=45, help="search deadline -- terminate searches running for more than SEC seconds")
|
||||
ap2.add_argument("--re-maxage", metavar="SEC", type=int, default=0, help="rescan filesystem for changes every \033[33mSEC\033[0m seconds; 0=off (volflag=scan)")
|
||||
ap2.add_argument("--db-act", metavar="SEC", type=float, default=10, help="defer any scheduled volume reindexing until \033[33mSEC\033[0m seconds after last db write (uploads, renames, ...)")
|
||||
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=45, help="search deadline -- terminate searches running for more than \033[33mSEC\033[0m seconds")
|
||||
ap2.add_argument("--srch-hits", metavar="N", type=int, default=7999, help="max search results to allow clients to fetch; 125 results will be shown initially")
|
||||
ap2.add_argument("--dotsrch", action="store_true", help="show dotfiles in search results (volflags: dotsrch | nodotsrch)")
|
||||
|
||||
@@ -1063,40 +1233,50 @@ def add_db_general(ap, hcores):
|
||||
def add_db_metadata(ap):
|
||||
ap2 = ap.add_argument_group('metadata db options')
|
||||
ap2.add_argument("-e2t", action="store_true", help="enable metadata indexing; makes it possible to search for artist/title/codec/resolution/...")
|
||||
ap2.add_argument("-e2ts", action="store_true", help="scan existing files on startup; sets -e2t")
|
||||
ap2.add_argument("-e2tsr", action="store_true", help="delete all metadata from DB and do a full rescan; sets -e2ts")
|
||||
ap2.add_argument("--no-mutagen", action="store_true", help="use FFprobe for tags instead; will catch more tags")
|
||||
ap2.add_argument("-e2ts", action="store_true", help="scan newly discovered files for metadata on startup; sets \033[33m-e2t\033[0m")
|
||||
ap2.add_argument("-e2tsr", action="store_true", help="delete all metadata from DB and do a full rescan; sets \033[33m-e2ts\033[0m")
|
||||
ap2.add_argument("--no-mutagen", action="store_true", help="use FFprobe for tags instead; will detect more tags")
|
||||
ap2.add_argument("--no-mtag-ff", action="store_true", help="never use FFprobe as tag reader; is probably safer")
|
||||
ap2.add_argument("--mtag-to", metavar="SEC", type=int, default=60, help="timeout for ffprobe tag-scan")
|
||||
ap2.add_argument("--mtag-to", metavar="SEC", type=int, default=60, help="timeout for FFprobe tag-scan")
|
||||
ap2.add_argument("--mtag-mt", metavar="CORES", type=int, default=CORES, help="num cpu cores to use for tag scanning")
|
||||
ap2.add_argument("--mtag-v", action="store_true", help="verbose tag scanning; print errors from mtp subprocesses and such")
|
||||
ap2.add_argument("--mtag-vv", action="store_true", help="debug mtp settings and mutagen/ffprobe parsers")
|
||||
ap2.add_argument("--mtag-vv", action="store_true", help="debug mtp settings and mutagen/FFprobe parsers")
|
||||
ap2.add_argument("-mtm", metavar="M=t,t,t", type=u, action="append", help="add/replace metadata mapping")
|
||||
ap2.add_argument("-mte", metavar="M,M,M", type=u, help="tags to index/display (comma-sep.)",
|
||||
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,vc,ac,fmt,res,.fps,ahash,vhash")
|
||||
ap2.add_argument("-mth", metavar="M,M,M", type=u, help="tags to hide by default (comma-sep.)",
|
||||
default=".vq,.aq,vc,ac,fmt,res,.fps")
|
||||
ap2.add_argument("-mtp", metavar="M=[f,]BIN", type=u, action="append", help="read tag M using program BIN to parse the file")
|
||||
ap2.add_argument("-mte", metavar="M,M,M", type=u, help="tags to index/display (comma-sep.); either an entire replacement list, or add/remove stuff on the default-list with +foo or /bar", default=DEF_MTE)
|
||||
ap2.add_argument("-mth", metavar="M,M,M", type=u, help="tags to hide by default (comma-sep.); assign/add/remove same as \033[33m-mte\033[0m", default=DEF_MTH)
|
||||
ap2.add_argument("-mtp", metavar="M=[f,]BIN", type=u, action="append", help="read tag \033[33mM\033[0m using program \033[33mBIN\033[0m to parse the file")
|
||||
|
||||
|
||||
def add_txt(ap):
|
||||
ap2 = ap.add_argument_group('textfile options')
|
||||
ap2.add_argument("-mcr", metavar="SEC", type=int, default=60, help="the textfile editor will check for serverside changes every \033[33mSEC\033[0m seconds")
|
||||
ap2.add_argument("-emp", action="store_true", help="enable markdown plugins -- neat but dangerous, big XSS risk")
|
||||
ap2.add_argument("--exp", action="store_true", help="enable textfile expansion -- replace {{self.ip}} and such; see \033[33m--help-exp\033[0m (volflag=exp)")
|
||||
ap2.add_argument("--exp-md", metavar="V,V,V", type=u, default=DEF_EXP, help="comma/space-separated list of placeholders to expand in markdown files; add/remove stuff on the default list with +hdr_foo or /vf.scan (volflag=exp_md)")
|
||||
ap2.add_argument("--exp-lg", metavar="V,V,V", type=u, default=DEF_EXP, help="comma/space-separated list of placeholders to expand in prologue/epilogue files (volflag=exp_lg)")
|
||||
|
||||
|
||||
def add_ui(ap, retry):
|
||||
ap2 = ap.add_argument_group('ui options')
|
||||
ap2.add_argument("--grid", action="store_true", help="show grid/thumbnails by default (volflag=grid)")
|
||||
ap2.add_argument("--lang", metavar="LANG", type=u, default="eng", help="language")
|
||||
ap2.add_argument("--theme", metavar="NUM", type=int, default=0, help="default theme to use")
|
||||
ap2.add_argument("--lang", metavar="LANG", type=u, default="eng", help="language; one of the following: \033[32meng nor\033[0m")
|
||||
ap2.add_argument("--theme", metavar="NUM", type=int, default=0, help="default theme to use (0..7)")
|
||||
ap2.add_argument("--themes", metavar="NUM", type=int, default=8, help="number of themes installed")
|
||||
ap2.add_argument("--unlist", metavar="REGEX", type=u, default="", help="don't show files matching REGEX in file list. Purely cosmetic! Does not affect API calls, just the browser. Example: [\033[32m\\.(js|css)$\033[0m] (volflag=unlist)")
|
||||
ap2.add_argument("--sort", metavar="C,C,C", type=u, default="href", help="default sort order, comma-separated column IDs (see header tooltips), prefix with '-' for descending. Examples: \033[32mhref -href ext sz ts tags/Album tags/.tn\033[0m (volflag=sort)")
|
||||
ap2.add_argument("--unlist", metavar="REGEX", type=u, default="", help="don't show files matching \033[33mREGEX\033[0m in file list. Purely cosmetic! Does not affect API calls, just the browser. Example: [\033[32m\\.(js|css)$\033[0m] (volflag=unlist)")
|
||||
ap2.add_argument("--favico", metavar="TXT", type=u, default="c 000 none" if retry else "🎉 000 none", help="\033[33mfavicon-text\033[0m [ \033[33mforeground\033[0m [ \033[33mbackground\033[0m ] ], set blank to disable")
|
||||
ap2.add_argument("--mpmc", metavar="URL", type=u, default="", help="change the mediaplayer-toggle mouse cursor; URL to a folder with {2..5}.png inside (or disable with [\033[32m.\033[0m])")
|
||||
ap2.add_argument("--js-browser", metavar="L", type=u, help="URL to additional JS to include")
|
||||
ap2.add_argument("--css-browser", metavar="L", type=u, help="URL to additional CSS to include")
|
||||
ap2.add_argument("--html-head", metavar="TXT", type=u, default="", help="text to append to the <head> of all HTML pages")
|
||||
ap2.add_argument("--ih", action="store_true", help="if a folder contains index.html, show that instead of the directory listing by default (can be changed in the client settings UI)")
|
||||
ap2.add_argument("--ih", action="store_true", help="if a folder contains index.html, show that instead of the directory listing by default (can be changed in the client settings UI, or add ?v to URL for override)")
|
||||
ap2.add_argument("--textfiles", metavar="CSV", type=u, default="txt,nfo,diz,cue,readme", help="file extensions to present as plaintext")
|
||||
ap2.add_argument("--txt-max", metavar="KiB", type=int, default=64, help="max size of embedded textfiles on ?doc= (anything bigger will be lazy-loaded by JS)")
|
||||
ap2.add_argument("--doctitle", metavar="TXT", type=u, default="copyparty", help="title / service-name to show in html documents")
|
||||
ap2.add_argument("--pb-url", metavar="URL", type=u, default="https://github.com/9001/copyparty", help="powered-by link; disable with -np")
|
||||
ap2.add_argument("--ver", action="store_true", help="show version on the control panel (incompatible by -np)")
|
||||
ap2.add_argument("--doctitle", metavar="TXT", type=u, default="copyparty @ --name", help="title / service-name to show in html documents")
|
||||
ap2.add_argument("--bname", metavar="TXT", type=u, default="--name", help="server name (displayed in filebrowser document title)")
|
||||
ap2.add_argument("--pb-url", metavar="URL", type=u, default="https://github.com/9001/copyparty", help="powered-by link; disable with \033[33m-np\033[0m")
|
||||
ap2.add_argument("--ver", action="store_true", help="show version on the control panel (incompatible with \033[33m-nb\033[0m)")
|
||||
ap2.add_argument("--k304", metavar="NUM", type=int, default=0, help="configure the option to enable/disable k304 on the controlpanel (workaround for buggy reverse-proxies); [\033[32m0\033[0m] = hidden and default-off, [\033[32m1\033[0m] = visible and default-off, [\033[32m2\033[0m] = visible and default-on")
|
||||
ap2.add_argument("--md-sbf", metavar="FLAGS", type=u, default="downloads forms popups scripts top-navigation-by-user-activation", help="list of capabilities to ALLOW for README.md docs (volflag=md_sbf); see https://developer.mozilla.org/en-US/docs/Web/HTML/Element/iframe#attr-sandbox")
|
||||
ap2.add_argument("--lg-sbf", metavar="FLAGS", type=u, default="downloads forms popups scripts top-navigation-by-user-activation", help="list of capabilities to ALLOW for prologue/epilogue docs (volflag=lg_sbf)")
|
||||
ap2.add_argument("--no-sb-md", action="store_true", help="don't sandbox README.md documents (volflags: no_sb_md | sb_md)")
|
||||
@@ -1107,17 +1287,18 @@ def add_debug(ap):
|
||||
ap2 = ap.add_argument_group('debug options')
|
||||
ap2.add_argument("--vc", action="store_true", help="verbose config file parser (explain config)")
|
||||
ap2.add_argument("--cgen", action="store_true", help="generate config file from current config (best-effort; probably buggy)")
|
||||
ap2.add_argument("--no-sendfile", action="store_true", help="disable sendfile; instead using a traditional file read loop")
|
||||
ap2.add_argument("--no-scandir", action="store_true", help="disable scandir; instead using listdir + stat on each file")
|
||||
ap2.add_argument("--no-fastboot", action="store_true", help="wait for up2k indexing before starting the httpd")
|
||||
ap2.add_argument("--no-sendfile", action="store_true", help="kernel-bug workaround: disable sendfile; do a safe and slow read-send-loop instead")
|
||||
ap2.add_argument("--no-scandir", action="store_true", help="kernel-bug workaround: disable scandir; do a listdir + stat on each file instead")
|
||||
ap2.add_argument("--no-fastboot", action="store_true", help="wait for initial filesystem indexing before accepting client requests")
|
||||
ap2.add_argument("--no-htp", action="store_true", help="disable httpserver threadpool, create threads as-needed instead")
|
||||
ap2.add_argument("--srch-dbg", action="store_true", help="explain search processing, and do some extra expensive sanity checks")
|
||||
ap2.add_argument("--rclone-mdns", action="store_true", help="use mdns-domain instead of server-ip on /?hc")
|
||||
ap2.add_argument("--stackmon", metavar="P,S", type=u, help="write stacktrace to Path every S second, for example --stackmon=\033[32m./st/%%Y-%%m/%%d/%%H%%M.xz,60")
|
||||
ap2.add_argument("--log-thrs", metavar="SEC", type=float, help="list active threads every SEC")
|
||||
ap2.add_argument("--log-fk", metavar="REGEX", type=u, default="", help="log filekey params for files where path matches REGEX; [\033[32m.\033[0m] (a single dot) = all files")
|
||||
ap2.add_argument("--bak-flips", action="store_true", help="[up2k] if a client uploads a bitflipped/corrupted chunk, store a copy according to --bf-nc and --bf-dir")
|
||||
ap2.add_argument("--bf-nc", metavar="NUM", type=int, default=200, help="bak-flips: stop if there's more than NUM files at --kf-dir already; default: 6.3 GiB max (200*32M)")
|
||||
ap2.add_argument("--bf-dir", metavar="PATH", type=u, default="bf", help="bak-flips: store corrupted chunks at PATH; default: folder named 'bf' wherever copyparty was started")
|
||||
ap2.add_argument("--stackmon", metavar="P,S", type=u, help="write stacktrace to \033[33mP\033[0math every \033[33mS\033[0m second, for example --stackmon=\033[32m./st/%%Y-%%m/%%d/%%H%%M.xz,60")
|
||||
ap2.add_argument("--log-thrs", metavar="SEC", type=float, help="list active threads every \033[33mSEC\033[0m")
|
||||
ap2.add_argument("--log-fk", metavar="REGEX", type=u, default="", help="log filekey params for files where path matches \033[33mREGEX\033[0m; [\033[32m.\033[0m] (a single dot) = all files")
|
||||
ap2.add_argument("--bak-flips", action="store_true", help="[up2k] if a client uploads a bitflipped/corrupted chunk, store a copy according to \033[33m--bf-nc\033[0m and \033[33m--bf-dir\033[0m")
|
||||
ap2.add_argument("--bf-nc", metavar="NUM", type=int, default=200, help="bak-flips: stop if there's more than \033[33mNUM\033[0m files at \033[33m--kf-dir\033[0m already; default: 6.3 GiB max (200*32M)")
|
||||
ap2.add_argument("--bf-dir", metavar="PATH", type=u, default="bf", help="bak-flips: store corrupted chunks at \033[33mPATH\033[0m; default: folder named 'bf' wherever copyparty was started")
|
||||
|
||||
|
||||
# fmt: on
|
||||
@@ -1134,10 +1315,13 @@ def run_argparse(
|
||||
|
||||
cert_path = os.path.join(E.cfg, "cert.pem")
|
||||
|
||||
fk_salt = get_fk_salt(cert_path)
|
||||
fk_salt = get_fk_salt()
|
||||
ah_salt = get_ah_salt()
|
||||
|
||||
hcores = min(CORES, 4) # optimal on py3.11 @ r5-4500U
|
||||
# alpine peaks at 5 threads for some reason,
|
||||
# all others scale past that (but try to avoid SMT),
|
||||
# 5 should be plenty anyways (3 GiB/s on most machines)
|
||||
hcores = min(CORES, 5 if CORES > 8 else 4)
|
||||
|
||||
tty = os.environ.get("TERM", "").lower() == "linux"
|
||||
|
||||
@@ -1147,10 +1331,12 @@ def run_argparse(
|
||||
add_network(ap)
|
||||
add_tls(ap, cert_path)
|
||||
add_cert(ap, cert_path)
|
||||
add_auth(ap)
|
||||
add_qr(ap, tty)
|
||||
add_zeroconf(ap)
|
||||
add_zc_mdns(ap)
|
||||
add_zc_ssdp(ap)
|
||||
add_fs(ap)
|
||||
add_upload(ap)
|
||||
add_db_general(ap, hcores)
|
||||
add_db_metadata(ap)
|
||||
@@ -1158,6 +1344,7 @@ def run_argparse(
|
||||
add_transcoding(ap)
|
||||
add_ftp(ap)
|
||||
add_webdav(ap)
|
||||
add_tftp(ap)
|
||||
add_smb(ap)
|
||||
add_safety(ap)
|
||||
add_salt(ap, fk_salt, ah_salt)
|
||||
@@ -1166,6 +1353,8 @@ def run_argparse(
|
||||
add_yolo(ap)
|
||||
add_handlers(ap)
|
||||
add_hooks(ap)
|
||||
add_stats(ap)
|
||||
add_txt(ap)
|
||||
add_ui(ap, retry)
|
||||
add_admin(ap)
|
||||
add_logging(ap)
|
||||
@@ -1193,7 +1382,7 @@ def run_argparse(
|
||||
for k, h, t in sects:
|
||||
k2 = "help_" + k.replace("-", "_")
|
||||
if vars(ret)[k2]:
|
||||
lprint("# {} help page".format(k))
|
||||
lprint("# %s help page (%s)" % (k, h))
|
||||
lprint(t + "\033[0m")
|
||||
sys.exit(0)
|
||||
|
||||
@@ -1209,15 +1398,16 @@ def main(argv: Optional[list[str]] = None) -> None:
|
||||
if argv is None:
|
||||
argv = sys.argv
|
||||
|
||||
f = '\033[36mcopyparty v{} "\033[35m{}\033[36m" ({})\n{}\033[0;36m\n sqlite v{} | jinja2 v{} | pyftpd v{}\n\033[0m'
|
||||
f = '\033[36mcopyparty v{} "\033[35m{}\033[36m" ({})\n{}\033[0;36m\n sqlite {} | jinja {} | pyftpd {} | tftp {}\n\033[0m'
|
||||
f = f.format(
|
||||
S_VERSION,
|
||||
CODENAME,
|
||||
S_BUILD_DT,
|
||||
py_desc().replace("[", "\033[90m["),
|
||||
PY_DESC.replace("[", "\033[90m["),
|
||||
SQLITE_VER,
|
||||
JINJA_VER,
|
||||
PYFTPD_VER,
|
||||
PARTFTPY_VER,
|
||||
)
|
||||
lprint(f)
|
||||
|
||||
@@ -1246,7 +1436,12 @@ def main(argv: Optional[list[str]] = None) -> None:
|
||||
supp = args_from_cfg(v)
|
||||
argv.extend(supp)
|
||||
|
||||
deprecated: list[tuple[str, str]] = [("--salt", "--warksalt")]
|
||||
deprecated: list[tuple[str, str]] = [
|
||||
("--salt", "--warksalt"),
|
||||
("--hdr-au-usr", "--idp-h-usr"),
|
||||
("--idp-h-sep", "--idp-gsep"),
|
||||
("--th-no-crop", "--th-crop=n"),
|
||||
]
|
||||
for dk, nk in deprecated:
|
||||
idx = -1
|
||||
ov = ""
|
||||
@@ -1284,7 +1479,7 @@ def main(argv: Optional[list[str]] = None) -> None:
|
||||
|
||||
_, hard = resource.getrlimit(resource.RLIMIT_NOFILE)
|
||||
if hard > 0: # -1 == infinite
|
||||
nc = min(nc, hard // 4)
|
||||
nc = min(nc, int(hard / 4))
|
||||
except:
|
||||
nc = 512
|
||||
|
||||
@@ -1321,42 +1516,6 @@ def main(argv: Optional[list[str]] = None) -> None:
|
||||
if al.ansi:
|
||||
al.wintitle = ""
|
||||
|
||||
nstrs: list[str] = []
|
||||
anymod = False
|
||||
for ostr in al.v or []:
|
||||
m = re_vol.match(ostr)
|
||||
if not m:
|
||||
# not our problem
|
||||
nstrs.append(ostr)
|
||||
continue
|
||||
|
||||
src, dst, perms = m.groups()
|
||||
na = [src, dst]
|
||||
mod = False
|
||||
for opt in perms.split(":"):
|
||||
if re.match("c[^,]", opt):
|
||||
mod = True
|
||||
na.append("c," + opt[1:])
|
||||
elif re.sub("^[rwmdgG]*", "", opt) and "," not in opt:
|
||||
mod = True
|
||||
perm = opt[0]
|
||||
if perm == "a":
|
||||
perm = "rw"
|
||||
na.append(perm + "," + opt[1:])
|
||||
else:
|
||||
na.append(opt)
|
||||
|
||||
nstr = ":".join(na)
|
||||
nstrs.append(nstr if mod else ostr)
|
||||
if mod:
|
||||
msg = "\033[1;31mWARNING:\033[0;1m\n -v {} \033[0;33mwas replaced with\033[0;1m\n -v {} \n\033[0m"
|
||||
lprint(msg.format(ostr, nstr))
|
||||
anymod = True
|
||||
|
||||
if anymod:
|
||||
al.v = nstrs
|
||||
time.sleep(2)
|
||||
|
||||
# propagate implications
|
||||
for k1, k2 in IMPLICATIONS:
|
||||
if getattr(al, k1):
|
||||
@@ -1412,6 +1571,9 @@ def main(argv: Optional[list[str]] = None) -> None:
|
||||
if sys.version_info < (3, 6):
|
||||
al.no_scandir = True
|
||||
|
||||
if not hasattr(os, "sendfile"):
|
||||
al.no_sendfile = True
|
||||
|
||||
# signal.signal(signal.SIGINT, sighandler)
|
||||
|
||||
SvcHub(al, dal, argv, "".join(printed)).run()
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
# coding: utf-8
|
||||
|
||||
VERSION = (1, 8, 1)
|
||||
CODENAME = "argon"
|
||||
BUILD_DT = (2023, 7, 7)
|
||||
VERSION = (1, 11, 0)
|
||||
CODENAME = "You Can (Not) Proceed"
|
||||
BUILD_DT = (2024, 3, 15)
|
||||
|
||||
S_VERSION = ".".join(map(str, VERSION))
|
||||
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -43,6 +43,10 @@ def open(p: str, *a, **ka) -> int:
|
||||
return os.open(fsenc(p), *a, **ka)
|
||||
|
||||
|
||||
def readlink(p: str) -> str:
|
||||
return fsdec(os.readlink(fsenc(p)))
|
||||
|
||||
|
||||
def rename(src: str, dst: str) -> None:
|
||||
return os.rename(fsenc(src), fsenc(dst))
|
||||
|
||||
|
||||
@@ -46,8 +46,8 @@ class BrokerMp(object):
|
||||
self.num_workers = self.args.j or CORES
|
||||
self.log("broker", "booting {} subprocesses".format(self.num_workers))
|
||||
for n in range(1, self.num_workers + 1):
|
||||
q_pend: queue.Queue[tuple[int, str, list[Any]]] = mp.Queue(1)
|
||||
q_yield: queue.Queue[tuple[int, str, list[Any]]] = mp.Queue(64)
|
||||
q_pend: queue.Queue[tuple[int, str, list[Any]]] = mp.Queue(1) # type: ignore
|
||||
q_yield: queue.Queue[tuple[int, str, list[Any]]] = mp.Queue(64) # type: ignore
|
||||
|
||||
proc = MProcess(q_pend, q_yield, MpWorker, (q_pend, q_yield, self.args, n))
|
||||
Daemon(self.collector, "mp-sink-{}".format(n), (proc,))
|
||||
@@ -69,7 +69,7 @@ class BrokerMp(object):
|
||||
|
||||
while procs:
|
||||
if procs[-1].is_alive():
|
||||
time.sleep(0.1)
|
||||
time.sleep(0.05)
|
||||
continue
|
||||
|
||||
procs.pop()
|
||||
|
||||
@@ -76,7 +76,7 @@ class MpWorker(BrokerCli):
|
||||
pass
|
||||
|
||||
def logw(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||
self.log("mp{}".format(self.n), msg, c)
|
||||
self.log("mp%d" % (self.n,), msg, c)
|
||||
|
||||
def main(self) -> None:
|
||||
while True:
|
||||
|
||||
@@ -132,7 +132,10 @@ def _gen_srv(log: "RootLogger", args, netdevs: dict[str, Netdev]):
|
||||
|
||||
try:
|
||||
expiry, inf = _read_crt(args, "srv.pem")
|
||||
expired = time.time() + args.crt_sdays * 60 * 60 * 24 * 0.1 > expiry
|
||||
if "sans" not in inf:
|
||||
raise Exception("no useable cert found")
|
||||
|
||||
expired = time.time() + args.crt_sdays * 60 * 60 * 24 * 0.5 > expiry
|
||||
cert_insec = os.path.join(args.E.mod, "res/insecure.pem")
|
||||
for n in names:
|
||||
if n not in inf["sans"]:
|
||||
@@ -181,6 +184,10 @@ def _gen_srv(log: "RootLogger", args, netdevs: dict[str, Netdev]):
|
||||
raise Exception("failed to translate cert: {}, {}".format(rc, se))
|
||||
|
||||
bname = os.path.join(args.crt_dir, "srv")
|
||||
try:
|
||||
os.unlink(bname + ".key")
|
||||
except:
|
||||
pass
|
||||
os.rename(bname + "-key.pem", bname + ".key")
|
||||
os.unlink(bname + ".csr")
|
||||
|
||||
@@ -216,7 +223,7 @@ def gencert(log: "RootLogger", args, netdevs: dict[str, Netdev]):
|
||||
HAVE_CFSSL = False
|
||||
log("cert", "could not create TLS certificates: {}".format(ex), 3)
|
||||
if getattr(ex, "errno", 0) == errno.ENOENT:
|
||||
t = "install cfssl if you want to fix this; https://github.com/cloudflare/cfssl/releases/latest"
|
||||
t = "install cfssl if you want to fix this; https://github.com/cloudflare/cfssl/releases/latest (cfssl, cfssljson, cfssl-certinfo)"
|
||||
log("cert", t, 6)
|
||||
|
||||
ensure_cert(log, args)
|
||||
|
||||
@@ -9,21 +9,30 @@ onedash = set(zs.split())
|
||||
def vf_bmap() -> dict[str, str]:
|
||||
"""argv-to-volflag: simple bools"""
|
||||
ret = {
|
||||
"dav_auth": "davauth",
|
||||
"dav_rt": "davrt",
|
||||
"ed": "dots",
|
||||
"never_symlink": "neversymlink",
|
||||
"no_dedup": "copydupes",
|
||||
"no_dupe": "nodupe",
|
||||
"no_forget": "noforget",
|
||||
"dav_auth": "davauth",
|
||||
"dav_rt": "davrt",
|
||||
"no_robots": "norobots",
|
||||
"no_thumb": "dthumb",
|
||||
"no_vthumb": "dvthumb",
|
||||
"no_athumb": "dathumb",
|
||||
}
|
||||
for k in (
|
||||
"dotsrch",
|
||||
"e2d",
|
||||
"e2ds",
|
||||
"e2dsa",
|
||||
"e2t",
|
||||
"e2ts",
|
||||
"e2tsr",
|
||||
"e2v",
|
||||
"e2vu",
|
||||
"e2vp",
|
||||
"exp",
|
||||
"grid",
|
||||
"hardlink",
|
||||
"magic",
|
||||
@@ -40,8 +49,26 @@ def vf_bmap() -> dict[str, str]:
|
||||
|
||||
def vf_vmap() -> dict[str, str]:
|
||||
"""argv-to-volflag: simple values"""
|
||||
ret = {}
|
||||
for k in ("lg_sbf", "md_sbf", "unlist"):
|
||||
ret = {
|
||||
"no_hash": "nohash",
|
||||
"no_idx": "noidx",
|
||||
"re_maxage": "scan",
|
||||
"th_convt": "convt",
|
||||
"th_size": "thsize",
|
||||
"th_crop": "crop",
|
||||
"th_x3": "th3x",
|
||||
}
|
||||
for k in (
|
||||
"dbd",
|
||||
"lg_sbf",
|
||||
"md_sbf",
|
||||
"nrand",
|
||||
"rm_retry",
|
||||
"sort",
|
||||
"unlist",
|
||||
"u2abort",
|
||||
"u2ts",
|
||||
):
|
||||
ret[k] = k
|
||||
return ret
|
||||
|
||||
@@ -49,7 +76,23 @@ def vf_vmap() -> dict[str, str]:
|
||||
def vf_cmap() -> dict[str, str]:
|
||||
"""argv-to-volflag: complex/lists"""
|
||||
ret = {}
|
||||
for k in ("dbd", "html_head", "mte", "mth", "nrand"):
|
||||
for k in (
|
||||
"exp_lg",
|
||||
"exp_md",
|
||||
"html_head",
|
||||
"mte",
|
||||
"mth",
|
||||
"mtp",
|
||||
"xad",
|
||||
"xar",
|
||||
"xau",
|
||||
"xban",
|
||||
"xbd",
|
||||
"xbr",
|
||||
"xbu",
|
||||
"xiu",
|
||||
"xm",
|
||||
):
|
||||
ret[k] = k
|
||||
return ret
|
||||
|
||||
@@ -59,8 +102,12 @@ permdescs = {
|
||||
"w": 'write; upload files; need "r" to see the uploads',
|
||||
"m": 'move; move files and folders; need "w" at destination',
|
||||
"d": "delete; permanently delete files and folders",
|
||||
".": "dots; user can ask to show dotfiles in listings",
|
||||
"g": "get; download files, but cannot see folder contents",
|
||||
"G": 'upget; same as "g" but can see filekeys of their own uploads',
|
||||
"h": 'html; same as "g" but folders return their index.html',
|
||||
"a": "admin; can see uploader IPs, config-reload",
|
||||
"A": "all; same as 'rwmda.' (read/write/move/delete/dotfiles)",
|
||||
}
|
||||
|
||||
|
||||
@@ -70,6 +117,7 @@ flagcats = {
|
||||
"hardlink": "does dedup with hardlinks instead of symlinks",
|
||||
"neversymlink": "disables symlink fallback; full copy instead",
|
||||
"copydupes": "disables dedup, always saves full copies of dupes",
|
||||
"sparse": "force use of sparse files, mainly for s3-backed storage",
|
||||
"daw": "enable full WebDAV write support (dangerous);\nPUT-operations will now \033[1;31mOVERWRITE\033[0;35m existing files",
|
||||
"nosub": "forces all uploads into the top folder of the vfs",
|
||||
"magic": "enables filetype detection for nameless uploads",
|
||||
@@ -83,6 +131,8 @@ flagcats = {
|
||||
"vmaxn=4k": "max 4096 files in volume (suffixes: b, k, m, g, t)",
|
||||
"rand": "force randomized filenames, 9 chars long by default",
|
||||
"nrand=N": "randomized filenames are N chars long",
|
||||
"u2ts=fc": "[f]orce [c]lient-last-modified or [u]pload-time",
|
||||
"u2abort=1": "allow aborting unfinished uploads? 0=no 1=strict 2=ip-chk 3=acct-chk",
|
||||
"sz=1k-3m": "allow filesizes between 1 KiB and 3MiB",
|
||||
"df=1g": "ensure 1 GiB free disk space",
|
||||
},
|
||||
@@ -108,6 +158,7 @@ flagcats = {
|
||||
"nohash=\\.iso$": "skips hashing file contents if path matches *.iso",
|
||||
"noidx=\\.iso$": "fully ignores the contents at paths matching *.iso",
|
||||
"noforget": "don't forget files when deleted from disk",
|
||||
"fat32": "avoid excessive reindexing on android sdcardfs",
|
||||
"dbd=[acid|swal|wal|yolo]": "database speed-durability tradeoff",
|
||||
"xlink": "cross-volume dupe detection / linking",
|
||||
"xdev": "do not descend into other filesystems",
|
||||
@@ -124,6 +175,10 @@ flagcats = {
|
||||
"dvthumb": "disables video thumbnails",
|
||||
"dathumb": "disables audio thumbnails (spectrograms)",
|
||||
"dithumb": "disables image thumbnails",
|
||||
"thsize": "thumbnail res; WxH",
|
||||
"crop": "center-cropping (y/n/fy/fn)",
|
||||
"th3x": "3x resolution (y/n/fy/fn)",
|
||||
"convt": "conversion timeout in seconds",
|
||||
},
|
||||
"handlers\n(better explained in --help-handlers)": {
|
||||
"on404=PY": "handle 404s by executing PY file",
|
||||
@@ -138,9 +193,11 @@ flagcats = {
|
||||
"xbd=CMD": "execute CMD before a file delete",
|
||||
"xad=CMD": "execute CMD after a file delete",
|
||||
"xm=CMD": "execute CMD on message",
|
||||
"xban=CMD": "execute CMD if someone gets banned",
|
||||
},
|
||||
"client and ux": {
|
||||
"grid": "show grid/thumbnails by default",
|
||||
"sort": "default sort order",
|
||||
"unlist": "dont list files matching REGEX",
|
||||
"html_head=TXT": "includes TXT in the <head>",
|
||||
"robots": "allows indexing by search engines (default)",
|
||||
@@ -151,9 +208,13 @@ flagcats = {
|
||||
"sb_lg": "enable js sandbox for prologue/epilogue (default)",
|
||||
"md_sbf": "list of markdown-sandbox safeguards to disable",
|
||||
"lg_sbf": "list of *logue-sandbox safeguards to disable",
|
||||
"nohtml": "return html and markdown as text/html",
|
||||
},
|
||||
"others": {
|
||||
"fk=8": 'generates per-file accesskeys,\nwhich will then be required at the "g" permission',
|
||||
"dots": "allow all users with read-access to\nenable the option to show dotfiles in listings",
|
||||
"fk=8": 'generates per-file accesskeys,\nwhich are then required at the "g" permission;\nkeys are invalidated if filesize or inode changes',
|
||||
"fka=8": 'generates slightly weaker per-file accesskeys,\nwhich are then required at the "g" permission;\nnot affected by filesize or inode numbers',
|
||||
"rm_retry": "ms-windows: timeout for deleting busy files",
|
||||
"davauth": "ask webdav clients to login for all folders",
|
||||
"davrt": "show lastmod time of symlink destination, not the link itself\n(note: this option is always enabled for recursive listings)",
|
||||
},
|
||||
|
||||
@@ -12,13 +12,15 @@ import time
|
||||
from pyftpdlib.authorizers import AuthenticationFailed, DummyAuthorizer
|
||||
from pyftpdlib.filesystems import AbstractedFS, FilesystemError
|
||||
from pyftpdlib.handlers import FTPHandler
|
||||
from pyftpdlib.ioloop import IOLoop
|
||||
from pyftpdlib.servers import FTPServer
|
||||
|
||||
from .__init__ import ANYWIN, PY2, TYPE_CHECKING, E
|
||||
from .__init__ import PY2, TYPE_CHECKING
|
||||
from .authsrv import VFS
|
||||
from .bos import bos
|
||||
from .util import (
|
||||
Daemon,
|
||||
ODict,
|
||||
Pebkac,
|
||||
exclude_dotfiles,
|
||||
fsenc,
|
||||
@@ -30,15 +32,6 @@ from .util import (
|
||||
vjoin,
|
||||
)
|
||||
|
||||
try:
|
||||
from pyftpdlib.ioloop import IOLoop
|
||||
except ImportError:
|
||||
p = os.path.join(E.mod, "vend")
|
||||
print("loading asynchat from " + p)
|
||||
sys.path.append(p)
|
||||
from pyftpdlib.ioloop import IOLoop
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .svchub import SvcHub
|
||||
|
||||
@@ -81,6 +74,7 @@ class FtpAuth(DummyAuthorizer):
|
||||
asrv = self.hub.asrv
|
||||
uname = "*"
|
||||
if username != "anonymous":
|
||||
uname = ""
|
||||
for zs in (password, username):
|
||||
zs = asrv.iacct.get(asrv.ah.hash(zs), "")
|
||||
if zs:
|
||||
@@ -94,6 +88,12 @@ class FtpAuth(DummyAuthorizer):
|
||||
if bonk:
|
||||
logging.warning("client banned: invalid passwords")
|
||||
bans[ip] = bonk
|
||||
try:
|
||||
# only possible if multiprocessing disabled
|
||||
self.hub.broker.httpsrv.bans[ip] = bonk # type: ignore
|
||||
self.hub.broker.httpsrv.nban += 1 # type: ignore
|
||||
except:
|
||||
pass
|
||||
|
||||
raise AuthenticationFailed("Authentication failed.")
|
||||
|
||||
@@ -134,6 +134,7 @@ class FtpFs(AbstractedFS):
|
||||
|
||||
self.can_read = self.can_write = self.can_move = False
|
||||
self.can_delete = self.can_get = self.can_upget = False
|
||||
self.can_admin = self.can_dot = False
|
||||
|
||||
self.listdirinfo = self.listdir
|
||||
self.chdir(".")
|
||||
@@ -149,7 +150,7 @@ class FtpFs(AbstractedFS):
|
||||
try:
|
||||
vpath = vpath.replace("\\", "/").strip("/")
|
||||
rd, fn = os.path.split(vpath)
|
||||
if ANYWIN and relchk(rd):
|
||||
if relchk(rd):
|
||||
logging.warning("malicious vpath: %s", vpath)
|
||||
t = "Unsupported characters in [{}]"
|
||||
raise FSE(t.format(vpath), 1)
|
||||
@@ -168,7 +169,7 @@ class FtpFs(AbstractedFS):
|
||||
if not avfs:
|
||||
raise FSE(t.format(vpath), 1)
|
||||
|
||||
cr, cw, cm, cd, _, _ = avfs.can_access("", self.h.uname)
|
||||
cr, cw, cm, cd, _, _, _, _ = avfs.can_access("", self.h.uname)
|
||||
if r and not cr or w and not cw or m and not cm or d and not cd:
|
||||
raise FSE(t.format(vpath), 1)
|
||||
|
||||
@@ -243,6 +244,8 @@ class FtpFs(AbstractedFS):
|
||||
self.can_delete,
|
||||
self.can_get,
|
||||
self.can_upget,
|
||||
self.can_admin,
|
||||
self.can_dot,
|
||||
) = avfs.can_access("", self.h.uname)
|
||||
|
||||
def mkdir(self, path: str) -> None:
|
||||
@@ -265,7 +268,7 @@ class FtpFs(AbstractedFS):
|
||||
vfs_ls = [x[0] for x in vfs_ls1]
|
||||
vfs_ls.extend(vfs_virt.keys())
|
||||
|
||||
if not self.args.ed:
|
||||
if not self.can_dot:
|
||||
vfs_ls = exclude_dotfiles(vfs_ls)
|
||||
|
||||
vfs_ls.sort()
|
||||
@@ -297,7 +300,7 @@ class FtpFs(AbstractedFS):
|
||||
|
||||
vp = join(self.cwd, path).lstrip("/")
|
||||
try:
|
||||
self.hub.up2k.handle_rm(self.uname, self.h.cli_ip, [vp], [], False)
|
||||
self.hub.up2k.handle_rm(self.uname, self.h.cli_ip, [vp], [], False, False)
|
||||
except Exception as ex:
|
||||
raise FSE(str(ex))
|
||||
|
||||
@@ -404,7 +407,16 @@ class FtpHandler(FTPHandler):
|
||||
super(FtpHandler, self).__init__(conn, server, ioloop)
|
||||
|
||||
cip = self.remote_ip
|
||||
self.cli_ip = cip[7:] if cip.startswith("::ffff:") else cip
|
||||
if cip.startswith("::ffff:"):
|
||||
cip = cip[7:]
|
||||
|
||||
if self.args.ftp_ipa_nm and not self.args.ftp_ipa_nm.map(cip):
|
||||
logging.warning("client rejected (--ftp-ipa): %s", cip)
|
||||
self.connected = False
|
||||
conn.close()
|
||||
return
|
||||
|
||||
self.cli_ip = cip
|
||||
|
||||
# abspath->vpath mapping to resolve log_transfer paths
|
||||
self.vfs_map: dict[str, str] = {}
|
||||
@@ -534,6 +546,8 @@ class Ftpd(object):
|
||||
if self.args.ftp4:
|
||||
ips = [x for x in ips if ":" not in x]
|
||||
|
||||
ips = list(ODict.fromkeys(ips)) # dedup
|
||||
|
||||
ioloop = IOLoop()
|
||||
for ip in ips:
|
||||
for h, lp in hs:
|
||||
|
||||
1090
copyparty/httpcli.py
1090
copyparty/httpcli.py
File diff suppressed because it is too large
Load Diff
@@ -23,7 +23,7 @@ from .mtag import HAVE_FFMPEG
|
||||
from .th_cli import ThumbCli
|
||||
from .th_srv import HAVE_PIL, HAVE_VIPS
|
||||
from .u2idx import U2idx
|
||||
from .util import HMaccas, shut_socket
|
||||
from .util import HMaccas, NetMap, shut_socket
|
||||
|
||||
if True: # pylint: disable=using-constant-test
|
||||
from typing import Optional, Pattern, Union
|
||||
@@ -50,11 +50,13 @@ class HttpConn(object):
|
||||
self.addr = addr
|
||||
self.hsrv = hsrv
|
||||
|
||||
self.mutex: threading.Lock = hsrv.mutex # mypy404
|
||||
self.u2mutex: threading.Lock = hsrv.u2mutex # mypy404
|
||||
self.args: argparse.Namespace = hsrv.args # mypy404
|
||||
self.E: EnvParams = self.args.E
|
||||
self.asrv: AuthSrv = hsrv.asrv # mypy404
|
||||
self.u2fh: Util.FHC = hsrv.u2fh # mypy404
|
||||
self.ipa_nm: NetMap = hsrv.ipa_nm
|
||||
self.xff_nm: NetMap = hsrv.xff_nm
|
||||
self.iphash: HMaccas = hsrv.broker.iphash
|
||||
self.bans: dict[str, int] = hsrv.bans
|
||||
self.aclose: dict[str, int] = hsrv.aclose
|
||||
@@ -93,7 +95,7 @@ class HttpConn(object):
|
||||
self.rproxy = ip
|
||||
|
||||
self.ip = ip
|
||||
self.log_src = "{} \033[{}m{}".format(ip, color, self.addr[1]).ljust(26)
|
||||
self.log_src = ("%s \033[%dm%d" % (ip, color, self.addr[1])).ljust(26)
|
||||
return self.log_src
|
||||
|
||||
def respath(self, res_name: str) -> str:
|
||||
@@ -112,32 +114,30 @@ class HttpConn(object):
|
||||
return self.u2idx
|
||||
|
||||
def _detect_https(self) -> bool:
|
||||
method = None
|
||||
if True:
|
||||
try:
|
||||
method = self.s.recv(4, socket.MSG_PEEK)
|
||||
except socket.timeout:
|
||||
return False
|
||||
except AttributeError:
|
||||
# jython does not support msg_peek; forget about https
|
||||
method = self.s.recv(4)
|
||||
self.sr = Util.Unrecv(self.s, self.log)
|
||||
self.sr.buf = method
|
||||
try:
|
||||
method = self.s.recv(4, socket.MSG_PEEK)
|
||||
except socket.timeout:
|
||||
return False
|
||||
except AttributeError:
|
||||
# jython does not support msg_peek; forget about https
|
||||
method = self.s.recv(4)
|
||||
self.sr = Util.Unrecv(self.s, self.log)
|
||||
self.sr.buf = method
|
||||
|
||||
# jython used to do this, they stopped since it's broken
|
||||
# but reimplementing sendall is out of scope for now
|
||||
if not getattr(self.s, "sendall", None):
|
||||
self.s.sendall = self.s.send # type: ignore
|
||||
# jython used to do this, they stopped since it's broken
|
||||
# but reimplementing sendall is out of scope for now
|
||||
if not getattr(self.s, "sendall", None):
|
||||
self.s.sendall = self.s.send # type: ignore
|
||||
|
||||
if len(method) != 4:
|
||||
err = "need at least 4 bytes in the first packet; got {}".format(
|
||||
len(method)
|
||||
)
|
||||
if method:
|
||||
self.log(err)
|
||||
if len(method) != 4:
|
||||
err = "need at least 4 bytes in the first packet; got {}".format(
|
||||
len(method)
|
||||
)
|
||||
if method:
|
||||
self.log(err)
|
||||
|
||||
self.s.send(b"HTTP/1.1 400 Bad Request\r\n\r\n" + err.encode("utf-8"))
|
||||
return False
|
||||
self.s.send(b"HTTP/1.1 400 Bad Request\r\n\r\n" + err.encode("utf-8"))
|
||||
return False
|
||||
|
||||
return not method or not bool(PTN_HTTP.match(method))
|
||||
|
||||
@@ -178,7 +178,7 @@ class HttpConn(object):
|
||||
|
||||
self.s = ctx.wrap_socket(self.s, server_side=True)
|
||||
msg = [
|
||||
"\033[1;3{:d}m{}".format(c, s)
|
||||
"\033[1;3%dm%s" % (c, s)
|
||||
for c, s in zip([0, 5, 0], self.s.cipher()) # type: ignore
|
||||
]
|
||||
self.log(" ".join(msg) + "\033[0m")
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import print_function, unicode_literals
|
||||
import base64
|
||||
import math
|
||||
import os
|
||||
import re
|
||||
import socket
|
||||
import sys
|
||||
import threading
|
||||
@@ -54,8 +55,8 @@ except SyntaxError:
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
from .bos import bos
|
||||
from .httpconn import HttpConn
|
||||
from .metrics import Metrics
|
||||
from .u2idx import U2idx
|
||||
from .util import (
|
||||
E_SCK,
|
||||
@@ -65,6 +66,8 @@ from .util import (
|
||||
Magician,
|
||||
Netdev,
|
||||
NetMap,
|
||||
absreal,
|
||||
build_netmap,
|
||||
ipnorm,
|
||||
min_ex,
|
||||
shut_socket,
|
||||
@@ -98,18 +101,24 @@ class HttpSrv(object):
|
||||
# redefine in case of multiprocessing
|
||||
socket.setdefaulttimeout(120)
|
||||
|
||||
self.t0 = time.time()
|
||||
nsuf = "-n{}-i{:x}".format(nid, os.getpid()) if nid else ""
|
||||
self.magician = Magician()
|
||||
self.nm = NetMap([], {})
|
||||
self.ssdp: Optional["SSDPr"] = None
|
||||
self.gpwd = Garda(self.args.ban_pw)
|
||||
self.g404 = Garda(self.args.ban_404)
|
||||
self.g403 = Garda(self.args.ban_403)
|
||||
self.g422 = Garda(self.args.ban_422, False)
|
||||
self.gmal = Garda(self.args.ban_422)
|
||||
self.gurl = Garda(self.args.ban_url)
|
||||
self.bans: dict[str, int] = {}
|
||||
self.aclose: dict[str, int] = {}
|
||||
|
||||
self.bound: set[tuple[str, int]] = set()
|
||||
self.name = "hsrv" + nsuf
|
||||
self.mutex = threading.Lock()
|
||||
self.u2mutex = threading.Lock()
|
||||
self.stopping = False
|
||||
|
||||
self.tp_nthr = 0 # actual
|
||||
@@ -121,6 +130,10 @@ class HttpSrv(object):
|
||||
self.t_periodic: Optional[threading.Thread] = None
|
||||
|
||||
self.u2fh = FHC()
|
||||
self.metrics = Metrics(self)
|
||||
self.nreq = 0
|
||||
self.nsus = 0
|
||||
self.nban = 0
|
||||
self.srvs: list[socket.socket] = []
|
||||
self.ncli = 0 # exact
|
||||
self.clients: set[HttpConn] = set() # laggy
|
||||
@@ -138,6 +151,16 @@ class HttpSrv(object):
|
||||
zs = os.path.join(self.E.mod, "web", "deps", "prism.js.gz")
|
||||
self.prism = os.path.exists(zs)
|
||||
|
||||
self.ipa_nm = build_netmap(self.args.ipa)
|
||||
self.xff_nm = build_netmap(self.args.xff_src)
|
||||
self.xff_lan = build_netmap("lan")
|
||||
|
||||
self.statics: set[str] = set()
|
||||
self._build_statics()
|
||||
|
||||
self.ptn_cc = re.compile(r"[\x00-\x1f]")
|
||||
self.ptn_hsafe = re.compile(r"[\x00-\x1f<>\"'&]")
|
||||
|
||||
self.mallow = "GET HEAD POST PUT DELETE OPTIONS".split()
|
||||
if not self.args.no_dav:
|
||||
zs = "PROPFIND PROPPATCH LOCK UNLOCK MKCOL COPY MOVE"
|
||||
@@ -158,7 +181,7 @@ class HttpSrv(object):
|
||||
if self.args.log_thrs:
|
||||
start_log_thrs(self.log, self.args.log_thrs, nid)
|
||||
|
||||
self.th_cfg: dict[str, Any] = {}
|
||||
self.th_cfg: dict[str, set[str]] = {}
|
||||
Daemon(self.post_init, "hsrv-init2")
|
||||
|
||||
def post_init(self) -> None:
|
||||
@@ -168,12 +191,20 @@ class HttpSrv(object):
|
||||
except:
|
||||
pass
|
||||
|
||||
def _build_statics(self) -> None:
|
||||
for dp, _, df in os.walk(os.path.join(self.E.mod, "web")):
|
||||
for fn in df:
|
||||
ap = absreal(os.path.join(dp, fn))
|
||||
self.statics.add(ap)
|
||||
if ap.endswith(".gz"):
|
||||
self.statics.add(ap[:-3])
|
||||
|
||||
def set_netdevs(self, netdevs: dict[str, Netdev]) -> None:
|
||||
ips = set()
|
||||
for ip, _ in self.bound:
|
||||
ips.add(ip)
|
||||
|
||||
self.nm = NetMap(list(ips), netdevs)
|
||||
self.nm = NetMap(list(ips), list(netdevs))
|
||||
|
||||
def start_threads(self, n: int) -> None:
|
||||
self.tp_nthr += n
|
||||
@@ -195,7 +226,7 @@ class HttpSrv(object):
|
||||
def periodic(self) -> None:
|
||||
while True:
|
||||
time.sleep(2 if self.tp_ncli or self.ncli else 10)
|
||||
with self.mutex:
|
||||
with self.u2mutex, self.mutex:
|
||||
self.u2fh.clean()
|
||||
if self.tp_q:
|
||||
self.tp_ncli = max(self.ncli, self.tp_ncli - 2)
|
||||
@@ -341,7 +372,7 @@ class HttpSrv(object):
|
||||
if not self.t_periodic:
|
||||
name = "hsrv-pt"
|
||||
if self.nid:
|
||||
name += "-{}".format(self.nid)
|
||||
name += "-%d" % (self.nid,)
|
||||
|
||||
self.t_periodic = Daemon(self.periodic, name)
|
||||
|
||||
@@ -360,7 +391,7 @@ class HttpSrv(object):
|
||||
|
||||
Daemon(
|
||||
self.thr_client,
|
||||
"httpconn-{}-{}".format(addr[0].split(".", 2)[-1][-6:], addr[1]),
|
||||
"httpconn-%s-%d" % (addr[0].split(".", 2)[-1][-6:], addr[1]),
|
||||
(sck, addr),
|
||||
)
|
||||
|
||||
@@ -377,9 +408,7 @@ class HttpSrv(object):
|
||||
try:
|
||||
sck, addr = task
|
||||
me = threading.current_thread()
|
||||
me.name = "httpconn-{}-{}".format(
|
||||
addr[0].split(".", 2)[-1][-6:], addr[1]
|
||||
)
|
||||
me.name = "httpconn-%s-%d" % (addr[0].split(".", 2)[-1][-6:], addr[1])
|
||||
self.thr_client(sck, addr)
|
||||
me.name = self.name + "-poolw"
|
||||
except Exception as ex:
|
||||
|
||||
@@ -4,10 +4,11 @@ from __future__ import print_function, unicode_literals
|
||||
import argparse # typechk
|
||||
import colorsys
|
||||
import hashlib
|
||||
import re
|
||||
|
||||
from .__init__ import PY2
|
||||
from .th_srv import HAVE_PIL
|
||||
from .util import BytesIO
|
||||
from .th_srv import HAVE_PIL, HAVE_PILF
|
||||
from .util import BytesIO, html_escape # type: ignore
|
||||
|
||||
|
||||
class Ico(object):
|
||||
@@ -21,27 +22,52 @@ class Ico(object):
|
||||
ext = bext.decode("utf-8")
|
||||
zb = hashlib.sha1(bext).digest()[2:4]
|
||||
if PY2:
|
||||
zb = [ord(x) for x in zb]
|
||||
zb = [ord(x) for x in zb] # type: ignore
|
||||
|
||||
c1 = colorsys.hsv_to_rgb(zb[0] / 256.0, 1, 0.3)
|
||||
c2 = colorsys.hsv_to_rgb(zb[0] / 256.0, 1, 1)
|
||||
c2 = colorsys.hsv_to_rgb(zb[0] / 256.0, 0.8 if HAVE_PILF else 1, 1)
|
||||
ci = [int(x * 255) for x in list(c1) + list(c2)]
|
||||
c = "".join(["{:02x}".format(x) for x in ci])
|
||||
c = "".join(["%02x" % (x,) for x in ci])
|
||||
|
||||
w = 100
|
||||
h = 30
|
||||
if not self.args.th_no_crop and as_thumb:
|
||||
if as_thumb:
|
||||
sw, sh = self.args.th_size.split("x")
|
||||
h = int(100 / (float(sw) / float(sh)))
|
||||
w = 100
|
||||
h = int(100.0 / (float(sw) / float(sh)))
|
||||
|
||||
if chrome:
|
||||
# cannot handle more than ~2000 unique SVGs
|
||||
if HAVE_PILF:
|
||||
# pillow 10.1 made this the default font;
|
||||
# svg: 3.7s, this: 36s
|
||||
try:
|
||||
from PIL import Image, ImageDraw
|
||||
|
||||
# [.lt] are hard to see lowercase / unspaced
|
||||
ext2 = re.sub("(.)", "\\1 ", ext).upper()
|
||||
|
||||
h = int(128.0 * h / w)
|
||||
w = 128
|
||||
img = Image.new("RGB", (w, h), "#" + c[:6])
|
||||
pb = ImageDraw.Draw(img)
|
||||
_, _, tw, th = pb.textbbox((0, 0), ext2, font_size=16)
|
||||
xy = (int((w - tw) / 2), int((h - th) / 2))
|
||||
pb.text(xy, ext2, fill="#" + c[6:], font_size=16)
|
||||
|
||||
img = img.resize((w * 2, h * 2), Image.NEAREST)
|
||||
|
||||
buf = BytesIO()
|
||||
img.save(buf, format="PNG", compress_level=1)
|
||||
return "image/png", buf.getvalue()
|
||||
|
||||
except:
|
||||
pass
|
||||
|
||||
if HAVE_PIL:
|
||||
# svg: 3s, cache: 6s, this: 8s
|
||||
from PIL import Image, ImageDraw
|
||||
|
||||
h = int(64 * h / w)
|
||||
h = int(64.0 * h / w)
|
||||
w = 64
|
||||
img = Image.new("RGB", (w, h), "#" + c[:6])
|
||||
pb = ImageDraw.Draw(img)
|
||||
@@ -64,20 +90,6 @@ class Ico(object):
|
||||
img.save(buf, format="PNG", compress_level=1)
|
||||
return "image/png", buf.getvalue()
|
||||
|
||||
elif False:
|
||||
# 48s, too slow
|
||||
import pyvips
|
||||
|
||||
h = int(192 * h / w)
|
||||
w = 192
|
||||
img = pyvips.Image.text(
|
||||
ext, width=w, height=h, dpi=192, align=pyvips.Align.CENTRE
|
||||
)
|
||||
img = img.ifthenelse(ci[3:], ci[:3], blend=True)
|
||||
# i = i.resize(3, kernel=pyvips.Kernel.NEAREST)
|
||||
buf = img.write_to_buffer(".png[compression=1]")
|
||||
return "image/png", buf
|
||||
|
||||
svg = """\
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg version="1.1" viewBox="0 0 100 {}" xmlns="http://www.w3.org/2000/svg"><g>
|
||||
@@ -86,6 +98,6 @@ class Ico(object):
|
||||
fill="#{}" font-family="monospace" font-size="14px" style="letter-spacing:.5px">{}</text>
|
||||
</g></svg>
|
||||
"""
|
||||
svg = svg.format(h, c[:6], c[6:], ext)
|
||||
svg = svg.format(h, c[:6], c[6:], html_escape(ext, True))
|
||||
|
||||
return "image/svg+xml", svg.encode("utf-8")
|
||||
|
||||
@@ -295,7 +295,9 @@ class MDNS(MCast):
|
||||
while self.running:
|
||||
timeout = (
|
||||
0.02 + random.random() * 0.07
|
||||
if self.probing or self.q or self.defend or self.unsolicited
|
||||
if self.probing or self.q or self.defend
|
||||
else max(0.05, self.unsolicited[0] - time.time())
|
||||
if self.unsolicited
|
||||
else (last_hop + ihop if ihop else 180)
|
||||
)
|
||||
rdy = select.select(self.srv, [], [], timeout)
|
||||
@@ -513,6 +515,10 @@ class MDNS(MCast):
|
||||
for srv in self.srv.values():
|
||||
tx.add(srv)
|
||||
|
||||
if not self.unsolicited and self.args.zm_spam:
|
||||
zf = time.time() + self.args.zm_spam + random.random() * 0.07
|
||||
self.unsolicited.append(zf)
|
||||
|
||||
for srv, deadline in list(self.defend.items()):
|
||||
if now < deadline:
|
||||
continue
|
||||
|
||||
236
copyparty/metrics.py
Normal file
236
copyparty/metrics.py
Normal file
@@ -0,0 +1,236 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import json
|
||||
import time
|
||||
|
||||
from .__init__ import TYPE_CHECKING
|
||||
from .util import Pebkac, get_df, unhumanize
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .httpcli import HttpCli
|
||||
from .httpsrv import HttpSrv
|
||||
|
||||
|
||||
class Metrics(object):
|
||||
def __init__(self, hsrv: "HttpSrv") -> None:
|
||||
self.hsrv = hsrv
|
||||
|
||||
def tx(self, cli: "HttpCli") -> bool:
|
||||
if not cli.avol:
|
||||
raise Pebkac(403, "not allowed for user " + cli.uname)
|
||||
|
||||
args = cli.args
|
||||
if not args.stats:
|
||||
raise Pebkac(403, "the stats feature is not enabled in server config")
|
||||
|
||||
conn = cli.conn
|
||||
vfs = conn.asrv.vfs
|
||||
allvols = list(sorted(vfs.all_vols.items()))
|
||||
|
||||
idx = conn.get_u2idx()
|
||||
if not idx or not hasattr(idx, "p_end"):
|
||||
idx = None
|
||||
|
||||
ret: list[str] = []
|
||||
|
||||
def addc(k: str, v: str, desc: str) -> None:
|
||||
zs = "# TYPE %s counter\n# HELP %s %s\n%s_created %s\n%s_total %s"
|
||||
ret.append(zs % (k, k, desc, k, int(self.hsrv.t0), k, v))
|
||||
|
||||
def adduc(k: str, unit: str, v: str, desc: str) -> None:
|
||||
k += "_" + unit
|
||||
zs = "# TYPE %s counter\n# UNIT %s %s\n# HELP %s %s\n%s_created %s\n%s_total %s"
|
||||
ret.append(zs % (k, k, unit, k, desc, k, int(self.hsrv.t0), k, v))
|
||||
|
||||
def addg(k: str, v: str, desc: str) -> None:
|
||||
zs = "# TYPE %s gauge\n# HELP %s %s\n%s %s"
|
||||
ret.append(zs % (k, k, desc, k, v))
|
||||
|
||||
def addug(k: str, unit: str, v: str, desc: str) -> None:
|
||||
k += "_" + unit
|
||||
zs = "# TYPE %s gauge\n# UNIT %s %s\n# HELP %s %s\n%s %s"
|
||||
ret.append(zs % (k, k, unit, k, desc, k, v))
|
||||
|
||||
def addh(k: str, typ: str, desc: str) -> None:
|
||||
zs = "# TYPE %s %s\n# HELP %s %s"
|
||||
ret.append(zs % (k, typ, k, desc))
|
||||
|
||||
def addbh(k: str, desc: str) -> None:
|
||||
zs = "# TYPE %s gauge\n# UNIT %s bytes\n# HELP %s %s"
|
||||
ret.append(zs % (k, k, k, desc))
|
||||
|
||||
def addv(k: str, v: str) -> None:
|
||||
ret.append("%s %s" % (k, v))
|
||||
|
||||
t = "time since last copyparty restart"
|
||||
v = "{:.3f}".format(time.time() - self.hsrv.t0)
|
||||
addug("cpp_uptime", "seconds", v, t)
|
||||
|
||||
# timestamps are gauges because initial value is not zero
|
||||
t = "unixtime of last copyparty restart"
|
||||
v = "{:.3f}".format(self.hsrv.t0)
|
||||
addug("cpp_boot_unixtime", "seconds", v, t)
|
||||
|
||||
t = "number of open http(s) client connections"
|
||||
addg("cpp_http_conns", str(self.hsrv.ncli), t)
|
||||
|
||||
t = "number of http(s) requests since last restart"
|
||||
addc("cpp_http_reqs", str(self.hsrv.nreq), t)
|
||||
|
||||
t = "number of 403/422/malicious reqs since restart"
|
||||
addc("cpp_sus_reqs", str(self.hsrv.nsus), t)
|
||||
|
||||
v = str(len(conn.bans or []))
|
||||
addg("cpp_active_bans", v, "number of currently banned IPs")
|
||||
|
||||
t = "number of IPs banned since last restart"
|
||||
addg("cpp_total_bans", str(self.hsrv.nban), t)
|
||||
|
||||
if not args.nos_vst:
|
||||
x = self.hsrv.broker.ask("up2k.get_state")
|
||||
vs = json.loads(x.get())
|
||||
|
||||
nvidle = 0
|
||||
nvbusy = 0
|
||||
nvoffline = 0
|
||||
for v in vs["volstate"].values():
|
||||
if v == "online, idle":
|
||||
nvidle += 1
|
||||
elif "OFFLINE" in v:
|
||||
nvoffline += 1
|
||||
else:
|
||||
nvbusy += 1
|
||||
|
||||
addg("cpp_idle_vols", str(nvidle), "number of idle/ready volumes")
|
||||
addg("cpp_busy_vols", str(nvbusy), "number of busy/indexing volumes")
|
||||
addg("cpp_offline_vols", str(nvoffline), "number of offline volumes")
|
||||
|
||||
t = "time since last database activity (upload/rename/delete)"
|
||||
addug("cpp_db_idle", "seconds", str(vs["dbwt"]), t)
|
||||
|
||||
t = "unixtime of last database activity (upload/rename/delete)"
|
||||
addug("cpp_db_act", "seconds", str(vs["dbwu"]), t)
|
||||
|
||||
t = "number of files queued for hashing/indexing"
|
||||
addg("cpp_hashing_files", str(vs["hashq"]), t)
|
||||
|
||||
t = "number of files queued for metadata scanning"
|
||||
addg("cpp_tagq_files", str(vs["tagq"]), t)
|
||||
|
||||
try:
|
||||
t = "number of files queued for plugin-based analysis"
|
||||
addg("cpp_mtpq_files", str(int(vs["mtpq"])), t)
|
||||
except:
|
||||
pass
|
||||
|
||||
if not args.nos_hdd:
|
||||
addbh("cpp_disk_size_bytes", "total HDD size of volume")
|
||||
addbh("cpp_disk_free_bytes", "free HDD space in volume")
|
||||
for vpath, vol in allvols:
|
||||
free, total = get_df(vol.realpath)
|
||||
if free is None or total is None:
|
||||
continue
|
||||
|
||||
addv('cpp_disk_size_bytes{vol="/%s"}' % (vpath), str(total))
|
||||
addv('cpp_disk_free_bytes{vol="/%s"}' % (vpath), str(free))
|
||||
|
||||
if idx and not args.nos_vol:
|
||||
addbh("cpp_vol_bytes", "num bytes of data in volume")
|
||||
addh("cpp_vol_files", "gauge", "num files in volume")
|
||||
addbh("cpp_vol_free_bytes", "free space (vmaxb) in volume")
|
||||
addh("cpp_vol_free_files", "gauge", "free space (vmaxn) in volume")
|
||||
tnbytes = 0
|
||||
tnfiles = 0
|
||||
|
||||
volsizes = []
|
||||
try:
|
||||
ptops = [x.realpath for _, x in allvols]
|
||||
x = self.hsrv.broker.ask("up2k.get_volsizes", ptops)
|
||||
volsizes = x.get()
|
||||
except Exception as ex:
|
||||
cli.log("tx_stats get_volsizes: {!r}".format(ex), 3)
|
||||
|
||||
for (vpath, vol), (nbytes, nfiles) in zip(allvols, volsizes):
|
||||
tnbytes += nbytes
|
||||
tnfiles += nfiles
|
||||
addv('cpp_vol_bytes{vol="/%s"}' % (vpath), str(nbytes))
|
||||
addv('cpp_vol_files{vol="/%s"}' % (vpath), str(nfiles))
|
||||
|
||||
if vol.flags.get("vmaxb") or vol.flags.get("vmaxn"):
|
||||
|
||||
zi = unhumanize(vol.flags.get("vmaxb") or "0")
|
||||
if zi:
|
||||
v = str(zi - nbytes)
|
||||
addv('cpp_vol_free_bytes{vol="/%s"}' % (vpath), v)
|
||||
|
||||
zi = unhumanize(vol.flags.get("vmaxn") or "0")
|
||||
if zi:
|
||||
v = str(zi - nfiles)
|
||||
addv('cpp_vol_free_files{vol="/%s"}' % (vpath), v)
|
||||
|
||||
if volsizes:
|
||||
addv('cpp_vol_bytes{vol="total"}', str(tnbytes))
|
||||
addv('cpp_vol_files{vol="total"}', str(tnfiles))
|
||||
|
||||
if idx and not args.nos_dup:
|
||||
addbh("cpp_dupe_bytes", "num dupe bytes in volume")
|
||||
addh("cpp_dupe_files", "gauge", "num dupe files in volume")
|
||||
tnbytes = 0
|
||||
tnfiles = 0
|
||||
for vpath, vol in allvols:
|
||||
cur = idx.get_cur(vol.realpath)
|
||||
if not cur:
|
||||
continue
|
||||
|
||||
nbytes = 0
|
||||
nfiles = 0
|
||||
q = "select sz, count(*)-1 c from up group by w having c"
|
||||
for sz, c in cur.execute(q):
|
||||
nbytes += sz * c
|
||||
nfiles += c
|
||||
|
||||
tnbytes += nbytes
|
||||
tnfiles += nfiles
|
||||
addv('cpp_dupe_bytes{vol="/%s"}' % (vpath), str(nbytes))
|
||||
addv('cpp_dupe_files{vol="/%s"}' % (vpath), str(nfiles))
|
||||
|
||||
addv('cpp_dupe_bytes{vol="total"}', str(tnbytes))
|
||||
addv('cpp_dupe_files{vol="total"}', str(tnfiles))
|
||||
|
||||
if not args.nos_unf:
|
||||
addbh("cpp_unf_bytes", "incoming/unfinished uploads (num bytes)")
|
||||
addh("cpp_unf_files", "gauge", "incoming/unfinished uploads (num files)")
|
||||
tnbytes = 0
|
||||
tnfiles = 0
|
||||
try:
|
||||
x = self.hsrv.broker.ask("up2k.get_unfinished")
|
||||
xs = x.get()
|
||||
if not xs:
|
||||
raise Exception("up2k mutex acquisition timed out")
|
||||
|
||||
xj = json.loads(xs)
|
||||
for ptop, (nbytes, nfiles) in xj.items():
|
||||
tnbytes += nbytes
|
||||
tnfiles += nfiles
|
||||
vol = next((x[1] for x in allvols if x[1].realpath == ptop), None)
|
||||
if not vol:
|
||||
t = "tx_stats get_unfinished: could not map {}"
|
||||
cli.log(t.format(ptop), 3)
|
||||
continue
|
||||
|
||||
addv('cpp_unf_bytes{vol="/%s"}' % (vol.vpath), str(nbytes))
|
||||
addv('cpp_unf_files{vol="/%s"}' % (vol.vpath), str(nfiles))
|
||||
|
||||
addv('cpp_unf_bytes{vol="total"}', str(tnbytes))
|
||||
addv('cpp_unf_files{vol="total"}', str(tnfiles))
|
||||
|
||||
except Exception as ex:
|
||||
cli.log("tx_stats get_unfinished: {!r}".format(ex), 3)
|
||||
|
||||
ret.append("# EOF")
|
||||
|
||||
mime = "application/openmetrics-text; version=1.0.0; charset=utf-8"
|
||||
mime = cli.uparam.get("mime") or mime
|
||||
cli.reply("\n".join(ret).encode("utf-8"), mime=mime)
|
||||
return True
|
||||
@@ -8,7 +8,7 @@ import shutil
|
||||
import subprocess as sp
|
||||
import sys
|
||||
|
||||
from .__init__ import EXE, PY2, WINDOWS, E, unicode
|
||||
from .__init__ import ANYWIN, EXE, PY2, WINDOWS, E, unicode
|
||||
from .bos import bos
|
||||
from .util import (
|
||||
FFMPEG_URL,
|
||||
@@ -29,6 +29,9 @@ if True: # pylint: disable=using-constant-test
|
||||
|
||||
|
||||
def have_ff(scmd: str) -> bool:
|
||||
if ANYWIN:
|
||||
scmd += ".exe"
|
||||
|
||||
if PY2:
|
||||
print("# checking {}".format(scmd))
|
||||
acmd = (scmd + " -version").encode("ascii").split(b" ")
|
||||
@@ -115,7 +118,7 @@ def ffprobe(
|
||||
b"--",
|
||||
fsenc(abspath),
|
||||
]
|
||||
rc, so, se = runcmd(cmd, timeout=timeout)
|
||||
rc, so, se = runcmd(cmd, timeout=timeout, nice=True, oom=200)
|
||||
retchk(rc, cmd, se)
|
||||
return parse_ffprobe(so)
|
||||
|
||||
@@ -237,7 +240,7 @@ def parse_ffprobe(txt: str) -> tuple[dict[str, tuple[int, Any]], dict[str, list[
|
||||
if "/" in fps:
|
||||
fa, fb = fps.split("/")
|
||||
try:
|
||||
fps = int(fa) * 1.0 / int(fb)
|
||||
fps = float(fa) / float(fb)
|
||||
except:
|
||||
fps = 9001
|
||||
|
||||
@@ -258,7 +261,8 @@ def parse_ffprobe(txt: str) -> tuple[dict[str, tuple[int, Any]], dict[str, list[
|
||||
if ".resw" in ret and ".resh" in ret:
|
||||
ret["res"] = "{}x{}".format(ret[".resw"], ret[".resh"])
|
||||
|
||||
zd = {k: (0, v) for k, v in ret.items()}
|
||||
zero = int("0")
|
||||
zd = {k: (zero, v) for k, v in ret.items()}
|
||||
|
||||
return zd, md
|
||||
|
||||
@@ -559,6 +563,8 @@ class MTag(object):
|
||||
|
||||
args = {
|
||||
"env": env,
|
||||
"nice": True,
|
||||
"oom": 300,
|
||||
"timeout": parser.timeout,
|
||||
"kill": parser.kill,
|
||||
"capture": parser.capture,
|
||||
@@ -569,11 +575,6 @@ class MTag(object):
|
||||
zd.update(ret)
|
||||
args["sin"] = json.dumps(zd).encode("utf-8", "replace")
|
||||
|
||||
if WINDOWS:
|
||||
args["creationflags"] = 0x4000
|
||||
else:
|
||||
cmd = ["nice"] + cmd
|
||||
|
||||
bcmd = [sfsenc(x) for x in cmd[:-1]] + [fsenc(cmd[-1])]
|
||||
rc, v, err = runcmd(bcmd, **args) # type: ignore
|
||||
retchk(rc, bcmd, err, self.log, 5, self.args.mtag_v)
|
||||
|
||||
@@ -15,7 +15,7 @@ from ipaddress import (
|
||||
)
|
||||
|
||||
from .__init__ import MACOS, TYPE_CHECKING
|
||||
from .util import Netdev, find_prefix, min_ex, spack
|
||||
from .util import Daemon, Netdev, find_prefix, min_ex, spack
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .svchub import SvcHub
|
||||
@@ -110,7 +110,7 @@ class MCast(object):
|
||||
)
|
||||
|
||||
ips = [x for x in ips if x not in ("::1", "127.0.0.1")]
|
||||
ips = find_prefix(ips, netdevs)
|
||||
ips = find_prefix(ips, list(netdevs))
|
||||
|
||||
on = self.on[:]
|
||||
off = self.off[:]
|
||||
@@ -228,6 +228,7 @@ class MCast(object):
|
||||
for srv in self.srv.values():
|
||||
assert srv.ip in self.sips
|
||||
|
||||
Daemon(self.hopper, "mc-hop")
|
||||
return bound
|
||||
|
||||
def setup_socket(self, srv: MC_Sck) -> None:
|
||||
@@ -299,33 +300,57 @@ class MCast(object):
|
||||
t = "failed to set IPv4 TTL/LOOP; announcements may not survive multiple switches/routers"
|
||||
self.log(t, 3)
|
||||
|
||||
self.hop(srv)
|
||||
if self.hop(srv, False):
|
||||
self.log("igmp was already joined?? chilling for a sec", 3)
|
||||
time.sleep(1.2)
|
||||
|
||||
self.hop(srv, True)
|
||||
self.b4.sort(reverse=True)
|
||||
self.b6.sort(reverse=True)
|
||||
|
||||
def hop(self, srv: MC_Sck) -> None:
|
||||
def hop(self, srv: MC_Sck, on: bool) -> bool:
|
||||
"""rejoin to keepalive on routers/switches without igmp-snooping"""
|
||||
sck = srv.sck
|
||||
req = srv.mreq
|
||||
if ":" in srv.ip:
|
||||
try:
|
||||
sck.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_LEAVE_GROUP, req)
|
||||
# linux does leaves/joins twice with 0.2~1.05s spacing
|
||||
time.sleep(1.2)
|
||||
except:
|
||||
pass
|
||||
|
||||
sck.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_JOIN_GROUP, req)
|
||||
if not on:
|
||||
try:
|
||||
sck.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_LEAVE_GROUP, req)
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
else:
|
||||
sck.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_JOIN_GROUP, req)
|
||||
else:
|
||||
try:
|
||||
sck.setsockopt(socket.IPPROTO_IP, socket.IP_DROP_MEMBERSHIP, req)
|
||||
time.sleep(1.2)
|
||||
except:
|
||||
pass
|
||||
if not on:
|
||||
try:
|
||||
sck.setsockopt(socket.IPPROTO_IP, socket.IP_DROP_MEMBERSHIP, req)
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
else:
|
||||
# t = "joining {} from ip {} idx {} with mreq {}"
|
||||
# self.log(t.format(srv.grp, srv.ip, srv.idx, repr(srv.mreq)), 6)
|
||||
sck.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, req)
|
||||
|
||||
# t = "joining {} from ip {} idx {} with mreq {}"
|
||||
# self.log(t.format(srv.grp, srv.ip, srv.idx, repr(srv.mreq)), 6)
|
||||
sck.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, req)
|
||||
return True
|
||||
|
||||
def hopper(self):
|
||||
while self.args.mc_hop and self.running:
|
||||
time.sleep(self.args.mc_hop)
|
||||
if not self.running:
|
||||
return
|
||||
|
||||
for srv in self.srv.values():
|
||||
self.hop(srv, False)
|
||||
|
||||
# linux does leaves/joins twice with 0.2~1.05s spacing
|
||||
time.sleep(1.2)
|
||||
if not self.running:
|
||||
return
|
||||
|
||||
for srv in self.srv.values():
|
||||
self.hop(srv, True)
|
||||
|
||||
def map_client(self, cip: str) -> Optional[MC_Sck]:
|
||||
try:
|
||||
|
||||
@@ -136,8 +136,12 @@ class PWHash(object):
|
||||
import getpass
|
||||
|
||||
while True:
|
||||
p1 = getpass.getpass("password> ")
|
||||
p2 = getpass.getpass("again or just hit ENTER> ")
|
||||
try:
|
||||
p1 = getpass.getpass("password> ")
|
||||
p2 = getpass.getpass("again or just hit ENTER> ")
|
||||
except EOFError:
|
||||
return
|
||||
|
||||
if p2 and p1 != p2:
|
||||
print("\033[31minputs don't match; try again\033[0m", file=sys.stderr)
|
||||
continue
|
||||
|
||||
@@ -32,6 +32,8 @@ class SMB(object):
|
||||
self.asrv = hub.asrv
|
||||
self.log = hub.log
|
||||
self.files: dict[int, tuple[float, str]] = {}
|
||||
self.noacc = self.args.smba
|
||||
self.accs = not self.args.smba
|
||||
|
||||
lg.setLevel(logging.DEBUG if self.args.smbvvv else logging.INFO)
|
||||
for x in ["impacket", "impacket.smbserver"]:
|
||||
@@ -94,6 +96,14 @@ class SMB(object):
|
||||
|
||||
port = int(self.args.smb_port)
|
||||
srv = smbserver.SimpleSMBServer(listenAddress=ip, listenPort=port)
|
||||
try:
|
||||
if self.accs:
|
||||
srv.setAuthCallback(self._auth_cb)
|
||||
except:
|
||||
self.accs = False
|
||||
self.noacc = True
|
||||
t = "impacket too old; access permissions will not work! all accounts are admin!"
|
||||
self.log("smb", t, 1)
|
||||
|
||||
ro = "no" if self.args.smbw else "yes" # (does nothing)
|
||||
srv.addShare("A", "/", readOnly=ro)
|
||||
@@ -119,24 +129,74 @@ class SMB(object):
|
||||
def start(self) -> None:
|
||||
Daemon(self.srv.start)
|
||||
|
||||
def _v2a(self, caller: str, vpath: str, *a: Any) -> tuple[VFS, str]:
|
||||
def _auth_cb(self, *a, **ka):
|
||||
debug("auth-result: %s %s", a, ka)
|
||||
conndata = ka["connData"]
|
||||
auth_ok = conndata["Authenticated"]
|
||||
uname = ka["user_name"] if auth_ok else "*"
|
||||
uname = self.asrv.iacct.get(uname, uname) or "*"
|
||||
oldname = conndata.get("partygoer", "*") or "*"
|
||||
cli_ip = conndata["ClientIP"]
|
||||
cli_hn = ka["host_name"]
|
||||
if uname != "*":
|
||||
conndata["partygoer"] = uname
|
||||
info("client %s [%s] authed as %s", cli_ip, cli_hn, uname)
|
||||
elif oldname != "*":
|
||||
info("client %s [%s] keeping old auth as %s", cli_ip, cli_hn, oldname)
|
||||
elif auth_ok:
|
||||
info("client %s [%s] authed as [*] (anon)", cli_ip, cli_hn)
|
||||
else:
|
||||
info("client %s [%s] rejected", cli_ip, cli_hn)
|
||||
|
||||
def _uname(self) -> str:
|
||||
if self.noacc:
|
||||
return LEELOO_DALLAS
|
||||
|
||||
try:
|
||||
# you found it! my single worst bit of code so far
|
||||
# (if you can think of a better way to track users through impacket i'm all ears)
|
||||
cf0 = inspect.currentframe().f_back.f_back
|
||||
cf = cf0.f_back
|
||||
for n in range(3):
|
||||
cl = cf.f_locals
|
||||
if "connData" in cl:
|
||||
return cl["connData"]["partygoer"]
|
||||
cf = cf.f_back
|
||||
raise Exception()
|
||||
except:
|
||||
warning(
|
||||
"nyoron... %s <<-- %s <<-- %s <<-- %s",
|
||||
cf0.f_code.co_name,
|
||||
cf0.f_back.f_code.co_name,
|
||||
cf0.f_back.f_back.f_code.co_name,
|
||||
cf0.f_back.f_back.f_back.f_code.co_name,
|
||||
)
|
||||
return "*"
|
||||
|
||||
def _v2a(
|
||||
self, caller: str, vpath: str, *a: Any, uname="", perms=None
|
||||
) -> tuple[VFS, str]:
|
||||
vpath = vpath.replace("\\", "/").lstrip("/")
|
||||
# cf = inspect.currentframe().f_back
|
||||
# c1 = cf.f_back.f_code.co_name
|
||||
# c2 = cf.f_code.co_name
|
||||
debug('%s("%s", %s)\033[K\033[0m', caller, vpath, str(a))
|
||||
if not uname:
|
||||
uname = self._uname()
|
||||
if not perms:
|
||||
perms = [True, True]
|
||||
|
||||
# TODO find a way to grab `identity` in smbComSessionSetupAndX and smb2SessionSetup
|
||||
vfs, rem = self.asrv.vfs.get(vpath, LEELOO_DALLAS, True, True)
|
||||
debug('%s("%s", %s) %s @%s\033[K\033[0m', caller, vpath, str(a), perms, uname)
|
||||
vfs, rem = self.asrv.vfs.get(vpath, uname, *perms)
|
||||
return vfs, vfs.canonical(rem)
|
||||
|
||||
def _listdir(self, vpath: str, *a: Any, **ka: Any) -> list[str]:
|
||||
vpath = vpath.replace("\\", "/").lstrip("/")
|
||||
# caller = inspect.currentframe().f_back.f_code.co_name
|
||||
debug('listdir("%s", %s)\033[K\033[0m', vpath, str(a))
|
||||
vfs, rem = self.asrv.vfs.get(vpath, LEELOO_DALLAS, False, False)
|
||||
uname = self._uname()
|
||||
# debug('listdir("%s", %s) @%s\033[K\033[0m', vpath, str(a), uname)
|
||||
vfs, rem = self.asrv.vfs.get(vpath, uname, False, False)
|
||||
_, vfs_ls, vfs_virt = vfs.ls(
|
||||
rem, LEELOO_DALLAS, not self.args.no_scandir, [[False, False]]
|
||||
rem, uname, not self.args.no_scandir, [[False, False]]
|
||||
)
|
||||
dirs = [x[0] for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)]
|
||||
fils = [x[0] for x in vfs_ls if x[0] not in dirs]
|
||||
@@ -149,8 +209,8 @@ class SMB(object):
|
||||
sz = 112 * 2 # ['.', '..']
|
||||
for n, fn in enumerate(ls):
|
||||
if sz >= 64000:
|
||||
t = "listing only %d of %d files (%d byte); see impacket#1433"
|
||||
warning(t, n, len(ls), sz)
|
||||
t = "listing only %d of %d files (%d byte) in /%s; see impacket#1433"
|
||||
warning(t, n, len(ls), sz, vpath)
|
||||
break
|
||||
|
||||
nsz = len(fn.encode("utf-16", "replace"))
|
||||
@@ -171,10 +231,12 @@ class SMB(object):
|
||||
if wr and not self.args.smbw:
|
||||
yeet("blocked write (no --smbw): " + vpath)
|
||||
|
||||
vfs, ap = self._v2a("open", vpath, *a)
|
||||
uname = self._uname()
|
||||
vfs, ap = self._v2a("open", vpath, *a, uname=uname, perms=[True, wr])
|
||||
if wr:
|
||||
if not vfs.axs.uwrite:
|
||||
yeet("blocked write (no-write-acc): " + vpath)
|
||||
t = "blocked write (no-write-acc %s): /%s @%s"
|
||||
yeet(t % (vfs.axs.uwrite, vpath, uname))
|
||||
|
||||
xbu = vfs.flags.get("xbu")
|
||||
if xbu and not runhook(
|
||||
@@ -204,7 +266,7 @@ class SMB(object):
|
||||
|
||||
_, vp = self.files.pop(fd)
|
||||
vp, fn = os.path.split(vp)
|
||||
vfs, rem = self.hub.asrv.vfs.get(vp, LEELOO_DALLAS, False, True)
|
||||
vfs, rem = self.hub.asrv.vfs.get(vp, self._uname(), False, True)
|
||||
vfs, rem = vfs.get_dbv(rem)
|
||||
self.hub.up2k.hash_file(
|
||||
vfs.realpath,
|
||||
@@ -224,15 +286,18 @@ class SMB(object):
|
||||
vp1 = vp1.lstrip("/")
|
||||
vp2 = vp2.lstrip("/")
|
||||
|
||||
vfs2, ap2 = self._v2a("rename", vp2, vp1)
|
||||
uname = self._uname()
|
||||
vfs2, ap2 = self._v2a("rename", vp2, vp1, uname=uname)
|
||||
if not vfs2.axs.uwrite:
|
||||
yeet("blocked rename (no-write-acc): " + vp2)
|
||||
t = "blocked write (no-write-acc %s): /%s @%s"
|
||||
yeet(t % (vfs2.axs.uwrite, vp2, uname))
|
||||
|
||||
vfs1, _ = self.asrv.vfs.get(vp1, LEELOO_DALLAS, True, True)
|
||||
vfs1, _ = self.asrv.vfs.get(vp1, uname, True, True, True)
|
||||
if not vfs1.axs.umove:
|
||||
yeet("blocked rename (no-move-acc): " + vp1)
|
||||
t = "blocked rename (no-move-acc %s): /%s @%s"
|
||||
yeet(t % (vfs1.axs.umove, vp1, uname))
|
||||
|
||||
self.hub.up2k.handle_mv(LEELOO_DALLAS, vp1, vp2)
|
||||
self.hub.up2k.handle_mv(uname, vp1, vp2)
|
||||
try:
|
||||
bos.makedirs(ap2)
|
||||
except:
|
||||
@@ -242,52 +307,74 @@ class SMB(object):
|
||||
if not self.args.smbw:
|
||||
yeet("blocked mkdir (no --smbw): " + vpath)
|
||||
|
||||
vfs, ap = self._v2a("mkdir", vpath)
|
||||
uname = self._uname()
|
||||
vfs, ap = self._v2a("mkdir", vpath, uname=uname)
|
||||
if not vfs.axs.uwrite:
|
||||
yeet("blocked mkdir (no-write-acc): " + vpath)
|
||||
t = "blocked mkdir (no-write-acc %s): /%s @%s"
|
||||
yeet(t % (vfs.axs.uwrite, vpath, uname))
|
||||
|
||||
return bos.mkdir(ap)
|
||||
|
||||
def _stat(self, vpath: str, *a: Any, **ka: Any) -> os.stat_result:
|
||||
return bos.stat(self._v2a("stat", vpath, *a)[1], *a, **ka)
|
||||
try:
|
||||
ap = self._v2a("stat", vpath, *a, perms=[True, False])[1]
|
||||
ret = bos.stat(ap, *a, **ka)
|
||||
# debug(" `-stat:ok")
|
||||
return ret
|
||||
except:
|
||||
# white lie: windows freaks out if we raise due to an offline volume
|
||||
# debug(" `-stat:NOPE (faking a directory)")
|
||||
ts = int(time.time())
|
||||
return os.stat_result((16877, -1, -1, 1, 1000, 1000, 8, ts, ts, ts))
|
||||
|
||||
def _unlink(self, vpath: str) -> None:
|
||||
if not self.args.smbw:
|
||||
yeet("blocked delete (no --smbw): " + vpath)
|
||||
|
||||
# return bos.unlink(self._v2a("stat", vpath, *a)[1])
|
||||
vfs, ap = self._v2a("delete", vpath)
|
||||
uname = self._uname()
|
||||
vfs, ap = self._v2a(
|
||||
"delete", vpath, uname=uname, perms=[True, False, False, True]
|
||||
)
|
||||
if not vfs.axs.udel:
|
||||
yeet("blocked delete (no-del-acc): " + vpath)
|
||||
|
||||
vpath = vpath.replace("\\", "/").lstrip("/")
|
||||
self.hub.up2k.handle_rm(LEELOO_DALLAS, "1.7.6.2", [vpath], [], False)
|
||||
self.hub.up2k.handle_rm(uname, "1.7.6.2", [vpath], [], False, False)
|
||||
|
||||
def _utime(self, vpath: str, times: tuple[float, float]) -> None:
|
||||
if not self.args.smbw:
|
||||
yeet("blocked utime (no --smbw): " + vpath)
|
||||
|
||||
vfs, ap = self._v2a("utime", vpath)
|
||||
uname = self._uname()
|
||||
vfs, ap = self._v2a("utime", vpath, uname=uname)
|
||||
if not vfs.axs.uwrite:
|
||||
yeet("blocked utime (no-write-acc): " + vpath)
|
||||
t = "blocked utime (no-write-acc %s): /%s @%s"
|
||||
yeet(t % (vfs.axs.uwrite, vpath, uname))
|
||||
|
||||
return bos.utime(ap, times)
|
||||
|
||||
def _p_exists(self, vpath: str) -> bool:
|
||||
# ap = "?"
|
||||
try:
|
||||
bos.stat(self._v2a("p.exists", vpath)[1])
|
||||
ap = self._v2a("p.exists", vpath, perms=[True, False])[1]
|
||||
bos.stat(ap)
|
||||
# debug(" `-exists((%s)->(%s)):ok", vpath, ap)
|
||||
return True
|
||||
except:
|
||||
# debug(" `-exists((%s)->(%s)):NOPE", vpath, ap)
|
||||
return False
|
||||
|
||||
def _p_getsize(self, vpath: str) -> int:
|
||||
st = bos.stat(self._v2a("p.getsize", vpath)[1])
|
||||
st = bos.stat(self._v2a("p.getsize", vpath, perms=[True, False])[1])
|
||||
return st.st_size
|
||||
|
||||
def _p_isdir(self, vpath: str) -> bool:
|
||||
try:
|
||||
st = bos.stat(self._v2a("p.isdir", vpath)[1])
|
||||
return stat.S_ISDIR(st.st_mode)
|
||||
st = bos.stat(self._v2a("p.isdir", vpath, perms=[True, False])[1])
|
||||
ret = stat.S_ISDIR(st.st_mode)
|
||||
# debug(" `-isdir:%s:%s", st.st_mode, ret)
|
||||
return ret
|
||||
except:
|
||||
return False
|
||||
|
||||
@@ -319,6 +406,7 @@ class SMB(object):
|
||||
|
||||
smbserver.os.path.abspath = self._hook
|
||||
smbserver.os.path.expanduser = self._hook
|
||||
smbserver.os.path.expandvars = self._hook
|
||||
smbserver.os.path.getatime = self._hook
|
||||
smbserver.os.path.getctime = self._hook
|
||||
smbserver.os.path.getmtime = self._hook
|
||||
|
||||
@@ -81,7 +81,7 @@ class SSDPr(object):
|
||||
ubase = "{}://{}:{}".format(proto, sip, sport)
|
||||
zsl = self.args.zsl
|
||||
url = zsl if "://" in zsl else ubase + "/" + zsl.lstrip("/")
|
||||
name = "{} @ {}".format(self.args.doctitle, self.args.name)
|
||||
name = self.args.doctitle
|
||||
zs = zs.strip().format(c(ubase), c(url), c(name), c(self.args.zsid))
|
||||
hc.reply(zs.encode("utf-8", "replace"))
|
||||
return False # close connectino
|
||||
@@ -214,7 +214,7 @@ CONFIGID.UPNP.ORG: 1
|
||||
srv.sck.sendto(zb, addr[:2])
|
||||
|
||||
if cip not in self.txc.c:
|
||||
self.log("{} [{}] --> {}".format(srv.name, srv.ip, cip), "6")
|
||||
self.log("{} [{}] --> {}".format(srv.name, srv.ip, cip), 6)
|
||||
|
||||
self.txc.add(cip)
|
||||
self.txc.cln()
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import re
|
||||
import stat
|
||||
import tarfile
|
||||
|
||||
@@ -44,6 +45,7 @@ class StreamTar(StreamArc):
|
||||
self,
|
||||
log: "NamedLogger",
|
||||
fgen: Generator[dict[str, Any], None, None],
|
||||
cmp: str = "",
|
||||
**kwargs: Any
|
||||
):
|
||||
super(StreamTar, self).__init__(log, fgen)
|
||||
@@ -53,14 +55,41 @@ class StreamTar(StreamArc):
|
||||
self.qfile = QFile()
|
||||
self.errf: dict[str, Any] = {}
|
||||
|
||||
# python 3.8 changed to PAX_FORMAT as default,
|
||||
# waste of space and don't care about the new features
|
||||
# python 3.8 changed to PAX_FORMAT as default;
|
||||
# slower, bigger, and no particular advantage
|
||||
fmt = tarfile.GNU_FORMAT
|
||||
self.tar = tarfile.open(fileobj=self.qfile, mode="w|", format=fmt) # type: ignore
|
||||
if "pax" in cmp:
|
||||
# unless a client asks for it (currently
|
||||
# gnu-tar has wider support than pax-tar)
|
||||
fmt = tarfile.PAX_FORMAT
|
||||
cmp = re.sub(r"[^a-z0-9]*pax[^a-z0-9]*", "", cmp)
|
||||
|
||||
try:
|
||||
cmp, zs = cmp.replace(":", ",").split(",")
|
||||
lv = int(zs)
|
||||
except:
|
||||
lv = -1
|
||||
|
||||
arg = {"name": None, "fileobj": self.qfile, "mode": "w", "format": fmt}
|
||||
if cmp == "gz":
|
||||
fun = tarfile.TarFile.gzopen
|
||||
arg["compresslevel"] = lv if lv >= 0 else 3
|
||||
elif cmp == "bz2":
|
||||
fun = tarfile.TarFile.bz2open
|
||||
arg["compresslevel"] = lv if lv >= 0 else 2
|
||||
elif cmp == "xz":
|
||||
fun = tarfile.TarFile.xzopen
|
||||
arg["preset"] = lv if lv >= 0 else 1
|
||||
else:
|
||||
fun = tarfile.open
|
||||
arg["mode"] = "w|"
|
||||
|
||||
self.tar = fun(**arg)
|
||||
|
||||
Daemon(self._gen, "star-gen")
|
||||
|
||||
def gen(self) -> Generator[Optional[bytes], None, None]:
|
||||
buf = b""
|
||||
try:
|
||||
while True:
|
||||
buf = self.qfile.q.get()
|
||||
@@ -72,6 +101,12 @@ class StreamTar(StreamArc):
|
||||
|
||||
yield None
|
||||
finally:
|
||||
while buf:
|
||||
try:
|
||||
buf = self.qfile.q.get()
|
||||
except:
|
||||
pass
|
||||
|
||||
if self.errf:
|
||||
bos.unlink(self.errf["ap"])
|
||||
|
||||
@@ -101,6 +136,9 @@ class StreamTar(StreamArc):
|
||||
errors.append((f["vp"], f["err"]))
|
||||
continue
|
||||
|
||||
if self.stopped:
|
||||
break
|
||||
|
||||
try:
|
||||
self.ser(f)
|
||||
except:
|
||||
|
||||
@@ -61,7 +61,7 @@ class Adapter(object):
|
||||
)
|
||||
|
||||
|
||||
if True:
|
||||
if True: # pylint: disable=using-constant-test
|
||||
# Type of an IPv4 address (a string in "xxx.xxx.xxx.xxx" format)
|
||||
_IPv4Address = str
|
||||
|
||||
|
||||
@@ -1,10 +1,14 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import tempfile
|
||||
from datetime import datetime
|
||||
|
||||
from .__init__ import CORES
|
||||
from .bos import bos
|
||||
from .th_cli import ThumbCli
|
||||
from .util import UTC, vjoin
|
||||
|
||||
if True: # pylint: disable=using-constant-test
|
||||
from typing import Any, Generator, Optional
|
||||
@@ -21,10 +25,78 @@ class StreamArc(object):
|
||||
):
|
||||
self.log = log
|
||||
self.fgen = fgen
|
||||
self.stopped = False
|
||||
|
||||
def gen(self) -> Generator[Optional[bytes], None, None]:
|
||||
raise Exception("override me")
|
||||
|
||||
def stop(self) -> None:
|
||||
self.stopped = True
|
||||
|
||||
|
||||
def gfilter(
|
||||
fgen: Generator[dict[str, Any], None, None],
|
||||
thumbcli: ThumbCli,
|
||||
uname: str,
|
||||
vtop: str,
|
||||
fmt: str,
|
||||
) -> Generator[dict[str, Any], None, None]:
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
|
||||
pend = []
|
||||
with ThreadPoolExecutor(max_workers=CORES) as tp:
|
||||
try:
|
||||
for f in fgen:
|
||||
task = tp.submit(enthumb, thumbcli, uname, vtop, f, fmt)
|
||||
pend.append((task, f))
|
||||
if pend[0][0].done() or len(pend) > CORES * 4:
|
||||
task, f = pend.pop(0)
|
||||
try:
|
||||
f = task.result(600)
|
||||
except:
|
||||
pass
|
||||
yield f
|
||||
|
||||
for task, f in pend:
|
||||
try:
|
||||
f = task.result(600)
|
||||
except:
|
||||
pass
|
||||
yield f
|
||||
except Exception as ex:
|
||||
thumbcli.log("gfilter flushing ({})".format(ex))
|
||||
for task, f in pend:
|
||||
try:
|
||||
task.result(600)
|
||||
except:
|
||||
pass
|
||||
thumbcli.log("gfilter flushed")
|
||||
|
||||
|
||||
def enthumb(
|
||||
thumbcli: ThumbCli, uname: str, vtop: str, f: dict[str, Any], fmt: str
|
||||
) -> dict[str, Any]:
|
||||
rem = f["vp"]
|
||||
ext = rem.rsplit(".", 1)[-1].lower()
|
||||
if fmt == "opus" and ext in "aac|m4a|mp3|ogg|opus|wma".split("|"):
|
||||
raise Exception()
|
||||
|
||||
vp = vjoin(vtop, rem.split("/", 1)[1])
|
||||
vn, rem = thumbcli.asrv.vfs.get(vp, uname, True, False)
|
||||
dbv, vrem = vn.get_dbv(rem)
|
||||
thp = thumbcli.get(dbv, vrem, f["st"].st_mtime, fmt)
|
||||
if not thp:
|
||||
raise Exception()
|
||||
|
||||
ext = "jpg" if fmt == "j" else "webp" if fmt == "w" else fmt
|
||||
sz = bos.path.getsize(thp)
|
||||
st: os.stat_result = f["st"]
|
||||
ts = st.st_mtime
|
||||
f["ap"] = thp
|
||||
f["vp"] = f["vp"].rsplit(".", 1)[0] + "." + ext
|
||||
f["st"] = os.stat_result((st.st_mode, -1, -1, 1, 1000, 1000, sz, ts, ts, ts))
|
||||
return f
|
||||
|
||||
|
||||
def errdesc(errors: list[tuple[str, str]]) -> tuple[dict[str, Any], list[str]]:
|
||||
report = ["copyparty failed to add the following files to the archive:", ""]
|
||||
@@ -36,7 +108,7 @@ def errdesc(errors: list[tuple[str, str]]) -> tuple[dict[str, Any], list[str]]:
|
||||
tf_path = tf.name
|
||||
tf.write("\r\n".join(report).encode("utf-8", "replace"))
|
||||
|
||||
dt = datetime.utcnow().strftime("%Y-%m%d-%H%M%S")
|
||||
dt = datetime.now(UTC).strftime("%Y-%m%d-%H%M%S")
|
||||
|
||||
bos.chmod(tf_path, 0o444)
|
||||
return {
|
||||
|
||||
@@ -28,24 +28,31 @@ if True: # pylint: disable=using-constant-test
|
||||
import typing
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
from .__init__ import ANYWIN, EXE, MACOS, TYPE_CHECKING, EnvParams, unicode
|
||||
from .authsrv import AuthSrv
|
||||
from .__init__ import ANYWIN, E, EXE, MACOS, TYPE_CHECKING, EnvParams, unicode
|
||||
from .authsrv import BAD_CFG, AuthSrv
|
||||
from .cert import ensure_cert
|
||||
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE
|
||||
from .tcpsrv import TcpSrv
|
||||
from .th_srv import HAVE_PIL, HAVE_VIPS, HAVE_WEBP, ThumbSrv
|
||||
from .up2k import Up2k
|
||||
from .util import (
|
||||
DEF_EXP,
|
||||
DEF_MTE,
|
||||
DEF_MTH,
|
||||
FFMPEG_URL,
|
||||
UTC,
|
||||
VERSIONS,
|
||||
Daemon,
|
||||
Garda,
|
||||
HLog,
|
||||
HMaccas,
|
||||
ODict,
|
||||
alltrace,
|
||||
ansi_re,
|
||||
build_netmap,
|
||||
min_ex,
|
||||
mp,
|
||||
odfusion,
|
||||
pybin,
|
||||
start_log_thrs,
|
||||
start_stackmon,
|
||||
@@ -88,7 +95,7 @@ class SvcHub(object):
|
||||
self.stopping = False
|
||||
self.stopped = False
|
||||
self.reload_req = False
|
||||
self.reloading = False
|
||||
self.reloading = 0
|
||||
self.stop_cond = threading.Condition()
|
||||
self.nsigs = 3
|
||||
self.retcode = 0
|
||||
@@ -100,11 +107,6 @@ class SvcHub(object):
|
||||
|
||||
self.iphash = HMaccas(os.path.join(self.E.cfg, "iphash"), 8)
|
||||
|
||||
# for non-http clients (ftp)
|
||||
self.bans: dict[str, int] = {}
|
||||
self.gpwd = Garda(self.args.ban_pw)
|
||||
self.g404 = Garda(self.args.ban_404)
|
||||
|
||||
if args.sss or args.s >= 3:
|
||||
args.ss = True
|
||||
args.no_dav = True
|
||||
@@ -120,7 +122,6 @@ class SvcHub(object):
|
||||
args.no_mv = True
|
||||
args.hardlink = True
|
||||
args.vague_403 = True
|
||||
args.ban_404 = "50,60,1440"
|
||||
args.nih = True
|
||||
|
||||
if args.s:
|
||||
@@ -131,8 +132,20 @@ class SvcHub(object):
|
||||
args.force_js = True
|
||||
|
||||
if not self._process_config():
|
||||
raise Exception("bad config")
|
||||
raise Exception(BAD_CFG)
|
||||
|
||||
# for non-http clients (ftp, tftp)
|
||||
self.bans: dict[str, int] = {}
|
||||
self.gpwd = Garda(self.args.ban_pw)
|
||||
self.g404 = Garda(self.args.ban_404)
|
||||
self.g403 = Garda(self.args.ban_403)
|
||||
self.g422 = Garda(self.args.ban_422, False)
|
||||
self.gmal = Garda(self.args.ban_422)
|
||||
self.gurl = Garda(self.args.ban_url)
|
||||
|
||||
self.log_div = 10 ** (6 - args.log_tdec)
|
||||
self.log_efmt = "%02d:%02d:%02d.%0{}d".format(args.log_tdec)
|
||||
self.log_dfmt = "%04d-%04d-%06d.%0{}d".format(args.log_tdec)
|
||||
self.log = self._log_disabled if args.q else self._log_enabled
|
||||
if args.lo:
|
||||
self._setup_logfile(printed)
|
||||
@@ -142,6 +155,8 @@ class SvcHub(object):
|
||||
lg.handlers = [lh]
|
||||
lg.setLevel(logging.DEBUG)
|
||||
|
||||
self._check_env()
|
||||
|
||||
if args.stackmon:
|
||||
start_stackmon(args.stackmon, 0)
|
||||
|
||||
@@ -162,6 +177,14 @@ class SvcHub(object):
|
||||
ch = "abcdefghijklmnopqrstuvwx"[int(args.theme / 2)]
|
||||
args.theme = "{0}{1} {0} {1}".format(ch, bri)
|
||||
|
||||
if args.nih:
|
||||
args.vname = ""
|
||||
args.doctitle = args.doctitle.replace(" @ --name", "")
|
||||
else:
|
||||
args.vname = args.name
|
||||
args.doctitle = args.doctitle.replace("--name", args.vname)
|
||||
args.bname = args.bname.replace("--name", args.vname) or args.vname
|
||||
|
||||
if args.log_fk:
|
||||
args.log_fk = re.compile(args.log_fk)
|
||||
|
||||
@@ -183,6 +206,10 @@ class SvcHub(object):
|
||||
self.log("root", "max clients: {}".format(self.args.nc))
|
||||
|
||||
self.tcpsrv = TcpSrv(self)
|
||||
|
||||
if not self.tcpsrv.srv and self.args.ign_ebind_all:
|
||||
self.args.no_fastboot = True
|
||||
|
||||
self.up2k = Up2k(self)
|
||||
|
||||
decs = {k: 1 for k in self.args.th_dec.split(",")}
|
||||
@@ -244,6 +271,12 @@ class SvcHub(object):
|
||||
Daemon(self.start_ftpd, "start_ftpd")
|
||||
zms += "f" if args.ftp else "F"
|
||||
|
||||
if args.tftp:
|
||||
from .tftpd import Tftpd
|
||||
|
||||
self.tftpd: Optional[Tftpd] = None
|
||||
Daemon(self.start_ftpd, "start_tftpd")
|
||||
|
||||
if args.smb:
|
||||
# impacket.dcerpc is noisy about listen timeouts
|
||||
sto = socket.getdefaulttimeout()
|
||||
@@ -273,10 +306,12 @@ class SvcHub(object):
|
||||
|
||||
def start_ftpd(self) -> None:
|
||||
time.sleep(30)
|
||||
if self.ftpd:
|
||||
return
|
||||
|
||||
self.restart_ftpd()
|
||||
if hasattr(self, "ftpd") and not self.ftpd:
|
||||
self.restart_ftpd()
|
||||
|
||||
if hasattr(self, "tftpd") and not self.tftpd:
|
||||
self.restart_tftpd()
|
||||
|
||||
def restart_ftpd(self) -> None:
|
||||
if not hasattr(self, "ftpd"):
|
||||
@@ -293,6 +328,17 @@ class SvcHub(object):
|
||||
self.ftpd = Ftpd(self)
|
||||
self.log("root", "started FTPd")
|
||||
|
||||
def restart_tftpd(self) -> None:
|
||||
if not hasattr(self, "tftpd"):
|
||||
return
|
||||
|
||||
from .tftpd import Tftpd
|
||||
|
||||
if self.tftpd:
|
||||
return # todo
|
||||
|
||||
self.tftpd = Tftpd(self)
|
||||
|
||||
def thr_httpsrv_up(self) -> None:
|
||||
time.sleep(1 if self.args.ign_ebind_all else 5)
|
||||
expected = self.broker.num_workers * self.tcpsrv.nsrv
|
||||
@@ -324,16 +370,35 @@ class SvcHub(object):
|
||||
if self.httpsrv_up != self.broker.num_workers:
|
||||
return
|
||||
|
||||
time.sleep(0.1) # purely cosmetic dw
|
||||
ar = self.args
|
||||
for _ in range(10 if ar.ftp or ar.ftps else 0):
|
||||
time.sleep(0.03)
|
||||
if self.ftpd:
|
||||
break
|
||||
|
||||
if self.tcpsrv.qr:
|
||||
self.log("qr-code", self.tcpsrv.qr)
|
||||
else:
|
||||
self.log("root", "workers OK\n")
|
||||
|
||||
self.after_httpsrv_up()
|
||||
|
||||
def after_httpsrv_up(self) -> None:
|
||||
self.up2k.init_vols()
|
||||
|
||||
Daemon(self.sd_notify, "sd-notify")
|
||||
|
||||
def _check_env(self) -> None:
|
||||
try:
|
||||
files = os.listdir(E.cfg)
|
||||
except:
|
||||
files = []
|
||||
|
||||
hits = [x for x in files if x.lower().endswith(".conf")]
|
||||
if hits:
|
||||
t = "WARNING: found config files in [%s]: %s\n config files are not expected here, and will NOT be loaded (unless your setup is intentionally hella funky)"
|
||||
self.log("root", t % (E.cfg, ", ".join(hits)), 3)
|
||||
|
||||
def _process_config(self) -> bool:
|
||||
al = self.args
|
||||
|
||||
@@ -373,29 +438,102 @@ class SvcHub(object):
|
||||
if al.rsp_jtr:
|
||||
al.rsp_slp = 0.000001
|
||||
|
||||
al.th_covers = set(al.th_covers.split(","))
|
||||
zsl = al.th_covers.split(",")
|
||||
zsl = [x.strip() for x in zsl]
|
||||
zsl = [x for x in zsl if x]
|
||||
al.th_covers = set(zsl)
|
||||
al.th_coversd = set(zsl + ["." + x for x in zsl])
|
||||
|
||||
for k in "c".split(" "):
|
||||
vl = getattr(al, k)
|
||||
if not vl:
|
||||
continue
|
||||
|
||||
vl = [os.path.expanduser(x) if x.startswith("~") else x for x in vl]
|
||||
vl = [os.path.expandvars(os.path.expanduser(x)) for x in vl]
|
||||
setattr(al, k, vl)
|
||||
|
||||
for k in "lo hist ssl_log".split(" "):
|
||||
vs = getattr(al, k)
|
||||
if vs and vs.startswith("~"):
|
||||
setattr(al, k, os.path.expanduser(vs))
|
||||
if vs:
|
||||
vs = os.path.expandvars(os.path.expanduser(vs))
|
||||
setattr(al, k, vs)
|
||||
|
||||
for k in "sus_urls nonsus_urls".split(" "):
|
||||
vs = getattr(al, k)
|
||||
if not vs or vs == "no":
|
||||
setattr(al, k, None)
|
||||
else:
|
||||
setattr(al, k, re.compile(vs))
|
||||
|
||||
for k in "tftp_lsf".split(" "):
|
||||
vs = getattr(al, k)
|
||||
if not vs or vs == "no":
|
||||
setattr(al, k, None)
|
||||
else:
|
||||
setattr(al, k, re.compile("^" + vs + "$"))
|
||||
|
||||
if not al.sus_urls:
|
||||
al.ban_url = "no"
|
||||
elif al.ban_url == "no":
|
||||
al.sus_urls = None
|
||||
|
||||
al.xff_hdr = al.xff_hdr.lower()
|
||||
al.idp_h_usr = al.idp_h_usr.lower()
|
||||
al.idp_h_grp = al.idp_h_grp.lower()
|
||||
al.idp_h_key = al.idp_h_key.lower()
|
||||
|
||||
al.ftp_ipa_nm = build_netmap(al.ftp_ipa or al.ipa)
|
||||
al.tftp_ipa_nm = build_netmap(al.tftp_ipa or al.ipa)
|
||||
|
||||
mte = ODict.fromkeys(DEF_MTE.split(","), True)
|
||||
al.mte = odfusion(mte, al.mte)
|
||||
|
||||
mth = ODict.fromkeys(DEF_MTH.split(","), True)
|
||||
al.mth = odfusion(mth, al.mth)
|
||||
|
||||
exp = ODict.fromkeys(DEF_EXP.split(" "), True)
|
||||
al.exp_md = odfusion(exp, al.exp_md.replace(" ", ","))
|
||||
al.exp_lg = odfusion(exp, al.exp_lg.replace(" ", ","))
|
||||
|
||||
for k in ["no_hash", "no_idx"]:
|
||||
ptn = getattr(self.args, k)
|
||||
if ptn:
|
||||
setattr(self.args, k, re.compile(ptn))
|
||||
|
||||
for k in ["idp_gsep"]:
|
||||
ptn = getattr(self.args, k)
|
||||
if "]" in ptn:
|
||||
ptn = "]" + ptn.replace("]", "")
|
||||
if "[" in ptn:
|
||||
ptn = ptn.replace("[", "") + "["
|
||||
if "-" in ptn:
|
||||
ptn = ptn.replace("-", "") + "-"
|
||||
|
||||
ptn = ptn.replace("\\", "\\\\").replace("^", "\\^")
|
||||
setattr(self.args, k, re.compile("[%s]" % (ptn,)))
|
||||
|
||||
try:
|
||||
zf1, zf2 = self.args.rm_retry.split("/")
|
||||
self.args.rm_re_t = float(zf1)
|
||||
self.args.rm_re_r = float(zf2)
|
||||
except:
|
||||
raise Exception("invalid --rm-retry [%s]" % (self.args.rm_retry,))
|
||||
|
||||
return True
|
||||
|
||||
def _ipa2re(self, txt) -> Optional[re.Pattern]:
|
||||
if txt in ("any", "0", ""):
|
||||
return None
|
||||
|
||||
zs = txt.replace(" ", "").replace(".", "\\.").replace(",", "|")
|
||||
return re.compile("^(?:" + zs + ")")
|
||||
|
||||
def _setlimits(self) -> None:
|
||||
try:
|
||||
import resource
|
||||
|
||||
soft, hard = [
|
||||
x if x > 0 else 1024 * 1024
|
||||
int(x) if x > 0 else 1024 * 1024
|
||||
for x in list(resource.getrlimit(resource.RLIMIT_NOFILE))
|
||||
]
|
||||
except:
|
||||
@@ -431,7 +569,7 @@ class SvcHub(object):
|
||||
self.args.nc = min(self.args.nc, soft // 2)
|
||||
|
||||
def _logname(self) -> str:
|
||||
dt = datetime.utcnow()
|
||||
dt = datetime.now(UTC)
|
||||
fn = str(self.args.lo)
|
||||
for fs in "YmdHMS":
|
||||
fs = "%" + fs
|
||||
@@ -452,12 +590,17 @@ class SvcHub(object):
|
||||
sel_fn = "{}.{}".format(fn, ctr)
|
||||
|
||||
fn = sel_fn
|
||||
try:
|
||||
os.makedirs(os.path.dirname(fn))
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
if do_xz:
|
||||
import lzma
|
||||
|
||||
lh = lzma.open(fn, "wt", encoding="utf-8", errors="replace", preset=0)
|
||||
self.args.no_logflush = True
|
||||
else:
|
||||
lh = open(fn, "wt", encoding="utf-8", errors="replace")
|
||||
except:
|
||||
@@ -544,21 +687,37 @@ class SvcHub(object):
|
||||
self.log("root", "ssdp startup failed;\n" + min_ex(), 3)
|
||||
|
||||
def reload(self) -> str:
|
||||
if self.reloading:
|
||||
return "cannot reload; already in progress"
|
||||
with self.up2k.mutex:
|
||||
if self.reloading:
|
||||
return "cannot reload; already in progress"
|
||||
self.reloading = 1
|
||||
|
||||
self.reloading = True
|
||||
Daemon(self._reload, "reloading")
|
||||
return "reload initiated"
|
||||
|
||||
def _reload(self) -> None:
|
||||
self.log("root", "reload scheduled")
|
||||
def _reload(self, rescan_all_vols: bool = True) -> None:
|
||||
with self.up2k.mutex:
|
||||
if self.reloading != 1:
|
||||
return
|
||||
self.reloading = 2
|
||||
self.log("root", "reloading config")
|
||||
self.asrv.reload()
|
||||
self.up2k.reload()
|
||||
self.up2k.reload(rescan_all_vols)
|
||||
self.broker.reload()
|
||||
self.reloading = 0
|
||||
|
||||
self.reloading = False
|
||||
def _reload_blocking(self, rescan_all_vols: bool = True) -> None:
|
||||
while True:
|
||||
with self.up2k.mutex:
|
||||
if self.reloading < 2:
|
||||
self.reloading = 1
|
||||
break
|
||||
time.sleep(0.05)
|
||||
|
||||
# try to handle multiple pending IdP reloads at once:
|
||||
time.sleep(0.2)
|
||||
|
||||
self._reload(rescan_all_vols=rescan_all_vols)
|
||||
|
||||
def stop_thr(self) -> None:
|
||||
while not self.stop_req:
|
||||
@@ -618,19 +777,25 @@ class SvcHub(object):
|
||||
ret = 1
|
||||
try:
|
||||
self.pr("OPYTHAT")
|
||||
tasks = []
|
||||
slp = 0.0
|
||||
|
||||
if self.mdns:
|
||||
Daemon(self.mdns.stop)
|
||||
tasks.append(Daemon(self.mdns.stop, "mdns"))
|
||||
slp = time.time() + 0.5
|
||||
|
||||
if self.ssdp:
|
||||
Daemon(self.ssdp.stop)
|
||||
tasks.append(Daemon(self.ssdp.stop, "ssdp"))
|
||||
slp = time.time() + 0.5
|
||||
|
||||
self.broker.shutdown()
|
||||
self.tcpsrv.shutdown()
|
||||
self.up2k.shutdown()
|
||||
|
||||
if hasattr(self, "smbd"):
|
||||
slp = max(slp, time.time() + 0.5)
|
||||
tasks.append(Daemon(self.smbd.stop, "smbd"))
|
||||
|
||||
if self.thumbsrv:
|
||||
self.thumbsrv.shutdown()
|
||||
|
||||
@@ -640,17 +805,19 @@ class SvcHub(object):
|
||||
break
|
||||
|
||||
if n == 3:
|
||||
self.pr("waiting for thumbsrv (10sec)...")
|
||||
self.log("root", "waiting for thumbsrv (10sec)...")
|
||||
|
||||
if hasattr(self, "smbd"):
|
||||
slp = max(slp, time.time() + 0.5)
|
||||
Daemon(self.kill9, a=(1,))
|
||||
Daemon(self.smbd.stop)
|
||||
zf = max(time.time() - slp, 0)
|
||||
Daemon(self.kill9, a=(zf + 0.5,))
|
||||
|
||||
while time.time() < slp:
|
||||
time.sleep(0.1)
|
||||
if not next((x for x in tasks if x.is_alive), None):
|
||||
break
|
||||
|
||||
self.pr("nailed it", end="")
|
||||
time.sleep(0.05)
|
||||
|
||||
self.log("root", "nailed it")
|
||||
ret = self.retcode
|
||||
except:
|
||||
self.pr("\033[31m[ error during shutdown ]\n{}\033[0m".format(min_ex()))
|
||||
@@ -660,7 +827,7 @@ class SvcHub(object):
|
||||
print("\033]0;\033\\", file=sys.stderr, end="")
|
||||
sys.stderr.flush()
|
||||
|
||||
self.pr("\033[0m")
|
||||
self.pr("\033[0m", end="")
|
||||
if self.logf:
|
||||
self.logf.close()
|
||||
|
||||
@@ -672,17 +839,34 @@ class SvcHub(object):
|
||||
return
|
||||
|
||||
with self.log_mutex:
|
||||
zd = datetime.utcnow()
|
||||
ts = "%04d-%04d-%06d.%03d" % (
|
||||
zd = datetime.now(UTC)
|
||||
ts = self.log_dfmt % (
|
||||
zd.year,
|
||||
zd.month * 100 + zd.day,
|
||||
(zd.hour * 100 + zd.minute) * 100 + zd.second,
|
||||
zd.microsecond // 1000,
|
||||
zd.microsecond // self.log_div,
|
||||
)
|
||||
self.logf.write("@%s [%s\033[0m] %s\n" % (ts, src, msg))
|
||||
|
||||
if c and not self.args.no_ansi:
|
||||
if isinstance(c, int):
|
||||
msg = "\033[3%sm%s\033[0m" % (c, msg)
|
||||
elif "\033" not in c:
|
||||
msg = "\033[%sm%s\033[0m" % (c, msg)
|
||||
else:
|
||||
msg = "%s%s\033[0m" % (c, msg)
|
||||
|
||||
if "\033" in src:
|
||||
src += "\033[0m"
|
||||
|
||||
if "\033" in msg:
|
||||
msg += "\033[0m"
|
||||
|
||||
self.logf.write("@%s [%-21s] %s\n" % (ts, src, msg))
|
||||
if not self.args.no_logflush:
|
||||
self.logf.flush()
|
||||
|
||||
now = time.time()
|
||||
if now >= self.next_day:
|
||||
if int(now) >= self.next_day:
|
||||
self._set_next_day()
|
||||
|
||||
def _set_next_day(self) -> None:
|
||||
@@ -690,7 +874,7 @@ class SvcHub(object):
|
||||
self.logf.close()
|
||||
self._setup_logfile("")
|
||||
|
||||
dt = datetime.utcnow()
|
||||
dt = datetime.now(UTC)
|
||||
|
||||
# unix timestamp of next 00:00:00 (leap-seconds safe)
|
||||
day_now = dt.day
|
||||
@@ -698,14 +882,20 @@ class SvcHub(object):
|
||||
dt += timedelta(hours=12)
|
||||
|
||||
dt = dt.replace(hour=0, minute=0, second=0)
|
||||
self.next_day = calendar.timegm(dt.utctimetuple())
|
||||
try:
|
||||
tt = dt.utctimetuple()
|
||||
except:
|
||||
# still makes me hella uncomfortable
|
||||
tt = dt.timetuple()
|
||||
|
||||
self.next_day = calendar.timegm(tt)
|
||||
|
||||
def _log_enabled(self, src: str, msg: str, c: Union[int, str] = 0) -> None:
|
||||
"""handles logging from all components"""
|
||||
with self.log_mutex:
|
||||
now = time.time()
|
||||
if now >= self.next_day:
|
||||
dt = datetime.utcfromtimestamp(now)
|
||||
if int(now) >= self.next_day:
|
||||
dt = datetime.fromtimestamp(now, UTC)
|
||||
zs = "{}\n" if self.no_ansi else "\033[36m{}\033[0m\n"
|
||||
zs = zs.format(dt.strftime("%Y-%m-%d"))
|
||||
print(zs, end="")
|
||||
@@ -728,12 +918,12 @@ class SvcHub(object):
|
||||
else:
|
||||
msg = "%s%s\033[0m" % (c, msg)
|
||||
|
||||
zd = datetime.utcfromtimestamp(now)
|
||||
ts = "%02d:%02d:%02d.%03d" % (
|
||||
zd = datetime.fromtimestamp(now, UTC)
|
||||
ts = self.log_efmt % (
|
||||
zd.hour,
|
||||
zd.minute,
|
||||
zd.second,
|
||||
zd.microsecond // 1000,
|
||||
zd.microsecond // self.log_div,
|
||||
)
|
||||
msg = fmt % (ts, src, msg)
|
||||
try:
|
||||
@@ -749,6 +939,8 @@ class SvcHub(object):
|
||||
|
||||
if self.logf:
|
||||
self.logf.write(msg)
|
||||
if not self.args.no_logflush:
|
||||
self.logf.flush()
|
||||
|
||||
def pr(self, *a: Any, **ka: Any) -> None:
|
||||
try:
|
||||
|
||||
@@ -221,6 +221,7 @@ class StreamZip(StreamArc):
|
||||
fgen: Generator[dict[str, Any], None, None],
|
||||
utf8: bool = False,
|
||||
pre_crc: bool = False,
|
||||
**kwargs: Any
|
||||
) -> None:
|
||||
super(StreamZip, self).__init__(log, fgen)
|
||||
|
||||
@@ -275,6 +276,7 @@ class StreamZip(StreamArc):
|
||||
def gen(self) -> Generator[bytes, None, None]:
|
||||
errf: dict[str, Any] = {}
|
||||
errors = []
|
||||
mbuf = b""
|
||||
try:
|
||||
for f in self.fgen:
|
||||
if "err" in f:
|
||||
@@ -283,13 +285,20 @@ class StreamZip(StreamArc):
|
||||
|
||||
try:
|
||||
for x in self.ser(f):
|
||||
yield x
|
||||
mbuf += x
|
||||
if len(mbuf) >= 16384:
|
||||
yield mbuf
|
||||
mbuf = b""
|
||||
except GeneratorExit:
|
||||
raise
|
||||
except:
|
||||
ex = min_ex(5, True).replace("\n", "\n-- ")
|
||||
errors.append((f["vp"], ex))
|
||||
|
||||
if mbuf:
|
||||
yield mbuf
|
||||
mbuf = b""
|
||||
|
||||
if errors:
|
||||
errf, txt = errdesc(errors)
|
||||
self.log("\n".join(([repr(errf)] + txt[1:])))
|
||||
@@ -299,20 +308,23 @@ class StreamZip(StreamArc):
|
||||
cdir_pos = self.pos
|
||||
for name, sz, ts, crc, h_pos in self.items:
|
||||
buf = gen_hdr(h_pos, name, sz, ts, self.utf8, crc, self.pre_crc)
|
||||
yield self._ct(buf)
|
||||
mbuf += self._ct(buf)
|
||||
if len(mbuf) >= 16384:
|
||||
yield mbuf
|
||||
mbuf = b""
|
||||
cdir_end = self.pos
|
||||
|
||||
_, need_64 = gen_ecdr(self.items, cdir_pos, cdir_end)
|
||||
if need_64:
|
||||
ecdir64_pos = self.pos
|
||||
buf = gen_ecdr64(self.items, cdir_pos, cdir_end)
|
||||
yield self._ct(buf)
|
||||
mbuf += self._ct(buf)
|
||||
|
||||
buf = gen_ecdr64_loc(ecdir64_pos)
|
||||
yield self._ct(buf)
|
||||
mbuf += self._ct(buf)
|
||||
|
||||
ecdr, _ = gen_ecdr(self.items, cdir_pos, cdir_end)
|
||||
yield self._ct(ecdr)
|
||||
yield mbuf + self._ct(ecdr)
|
||||
finally:
|
||||
if errf:
|
||||
bos.unlink(errf["ap"])
|
||||
|
||||
@@ -15,6 +15,7 @@ from .util import (
|
||||
E_ADDR_IN_USE,
|
||||
E_ADDR_NOT_AVAIL,
|
||||
E_UNREACH,
|
||||
IP6ALL,
|
||||
Netdev,
|
||||
min_ex,
|
||||
sunpack,
|
||||
@@ -240,6 +241,11 @@ class TcpSrv(object):
|
||||
raise OSError(E_ADDR_IN_USE[0], "")
|
||||
self.srv.append(srv)
|
||||
except (OSError, socket.error) as ex:
|
||||
try:
|
||||
srv.close()
|
||||
except:
|
||||
pass
|
||||
|
||||
if ex.errno in E_ADDR_IN_USE:
|
||||
e = "\033[1;31mport {} is busy on interface {}\033[0m".format(port, ip)
|
||||
elif ex.errno in E_ADDR_NOT_AVAIL:
|
||||
@@ -254,6 +260,9 @@ class TcpSrv(object):
|
||||
srvs: list[socket.socket] = []
|
||||
for srv in self.srv:
|
||||
ip, port = srv.getsockname()[:2]
|
||||
if ip == IP6ALL:
|
||||
ip = "::" # jython
|
||||
|
||||
try:
|
||||
srv.listen(self.args.nc)
|
||||
try:
|
||||
@@ -275,6 +284,8 @@ class TcpSrv(object):
|
||||
srv.close()
|
||||
continue
|
||||
|
||||
t = "\n\nERROR: could not open listening socket, probably because one of the server ports ({}) is busy on one of the requested interfaces ({}); avoid this issue by specifying a different port (-p 3939) and/or a specific interface to listen on (-i 192.168.56.1)\n"
|
||||
self.log("tcpsrv", t.format(port, ip), 1)
|
||||
raise
|
||||
|
||||
bound.append((ip, port))
|
||||
@@ -298,6 +309,7 @@ class TcpSrv(object):
|
||||
self.hub.start_zeroconf()
|
||||
gencert(self.log, self.args, self.netdevs)
|
||||
self.hub.restart_ftpd()
|
||||
self.hub.restart_tftpd()
|
||||
|
||||
def shutdown(self) -> None:
|
||||
self.stopping = True
|
||||
|
||||
433
copyparty/tftpd.py
Normal file
433
copyparty/tftpd.py
Normal file
@@ -0,0 +1,433 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
try:
|
||||
from types import SimpleNamespace
|
||||
except:
|
||||
|
||||
class SimpleNamespace(object):
|
||||
def __init__(self, **attr):
|
||||
self.__dict__.update(attr)
|
||||
|
||||
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import socket
|
||||
import stat
|
||||
import threading
|
||||
import time
|
||||
from datetime import datetime
|
||||
|
||||
try:
|
||||
import inspect
|
||||
except:
|
||||
pass
|
||||
|
||||
from partftpy import (
|
||||
TftpContexts,
|
||||
TftpPacketFactory,
|
||||
TftpPacketTypes,
|
||||
TftpServer,
|
||||
TftpStates,
|
||||
)
|
||||
from partftpy.TftpShared import TftpException
|
||||
|
||||
from .__init__ import EXE, TYPE_CHECKING
|
||||
from .authsrv import VFS
|
||||
from .bos import bos
|
||||
from .util import BytesIO, Daemon, ODict, exclude_dotfiles, min_ex, runhook, undot
|
||||
|
||||
if True: # pylint: disable=using-constant-test
|
||||
from typing import Any, Union
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .svchub import SvcHub
|
||||
|
||||
|
||||
lg = logging.getLogger("tftp")
|
||||
debug, info, warning, error = (lg.debug, lg.info, lg.warning, lg.error)
|
||||
|
||||
|
||||
def noop(*a, **ka) -> None:
|
||||
pass
|
||||
|
||||
|
||||
def _serverInitial(self, pkt: Any, raddress: str, rport: int) -> bool:
|
||||
info("connection from %s:%s", raddress, rport)
|
||||
ret = _sinitial[0](self, pkt, raddress, rport)
|
||||
nm = _hub[0].args.tftp_ipa_nm
|
||||
if nm and not nm.map(raddress):
|
||||
yeet("client rejected (--tftp-ipa): %s" % (raddress,))
|
||||
return ret
|
||||
|
||||
|
||||
# patch ipa-check into partftpd (part 1/2)
|
||||
_hub: list["SvcHub"] = []
|
||||
_sinitial: list[Any] = []
|
||||
|
||||
|
||||
class Tftpd(object):
|
||||
def __init__(self, hub: "SvcHub") -> None:
|
||||
self.hub = hub
|
||||
self.args = hub.args
|
||||
self.asrv = hub.asrv
|
||||
self.log = hub.log
|
||||
self.mutex = threading.Lock()
|
||||
|
||||
_hub[:] = []
|
||||
_hub.append(hub)
|
||||
|
||||
lg.setLevel(logging.DEBUG if self.args.tftpv else logging.INFO)
|
||||
for x in ["partftpy", "partftpy.TftpStates", "partftpy.TftpServer"]:
|
||||
lgr = logging.getLogger(x)
|
||||
lgr.setLevel(logging.DEBUG if self.args.tftpv else logging.INFO)
|
||||
|
||||
if not self.args.tftpv and not self.args.tftpvv:
|
||||
# contexts -> states -> packettypes -> shared
|
||||
# contexts -> packetfactory
|
||||
# packetfactory -> packettypes
|
||||
Cs = [
|
||||
TftpPacketTypes,
|
||||
TftpPacketFactory,
|
||||
TftpStates,
|
||||
TftpContexts,
|
||||
TftpServer,
|
||||
]
|
||||
cbak = []
|
||||
if not self.args.tftp_no_fast and not EXE:
|
||||
try:
|
||||
import inspect
|
||||
|
||||
ptn = re.compile(r"(^\s*)log\.debug\(.*\)$")
|
||||
for C in Cs:
|
||||
cbak.append(C.__dict__)
|
||||
src1 = inspect.getsource(C).split("\n")
|
||||
src2 = "\n".join([ptn.sub("\\1pass", ln) for ln in src1])
|
||||
cfn = C.__spec__.origin
|
||||
exec (compile(src2, filename=cfn, mode="exec"), C.__dict__)
|
||||
except Exception:
|
||||
t = "failed to optimize tftp code; run with --tftp-noopt if there are issues:\n"
|
||||
self.log("tftp", t + min_ex(), 3)
|
||||
for n, zd in enumerate(cbak):
|
||||
Cs[n].__dict__ = zd
|
||||
|
||||
for C in Cs:
|
||||
C.log.debug = noop
|
||||
|
||||
# patch ipa-check into partftpd (part 2/2)
|
||||
_sinitial[:] = []
|
||||
_sinitial.append(TftpStates.TftpServerState.serverInitial)
|
||||
TftpStates.TftpServerState.serverInitial = _serverInitial
|
||||
|
||||
# patch vfs into partftpy
|
||||
TftpContexts.open = self._open
|
||||
TftpStates.open = self._open
|
||||
|
||||
fos = SimpleNamespace()
|
||||
for k in os.__dict__:
|
||||
try:
|
||||
setattr(fos, k, getattr(os, k))
|
||||
except:
|
||||
pass
|
||||
fos.access = self._access
|
||||
fos.mkdir = self._mkdir
|
||||
fos.unlink = self._unlink
|
||||
fos.sep = "/"
|
||||
TftpContexts.os = fos
|
||||
TftpServer.os = fos
|
||||
TftpStates.os = fos
|
||||
|
||||
fop = SimpleNamespace()
|
||||
for k in os.path.__dict__:
|
||||
try:
|
||||
setattr(fop, k, getattr(os.path, k))
|
||||
except:
|
||||
pass
|
||||
fop.abspath = self._p_abspath
|
||||
fop.exists = self._p_exists
|
||||
fop.isdir = self._p_isdir
|
||||
fop.normpath = self._p_normpath
|
||||
fos.path = fop
|
||||
|
||||
self._disarm(fos)
|
||||
|
||||
ip = next((x for x in self.args.i if ":" not in x), None)
|
||||
if not ip:
|
||||
self.log("tftp", "IPv6 not supported for tftp; listening on 0.0.0.0", 3)
|
||||
ip = "0.0.0.0"
|
||||
|
||||
self.port = int(self.args.tftp)
|
||||
self.srv = []
|
||||
self.ips = []
|
||||
|
||||
ports = []
|
||||
if self.args.tftp_pr:
|
||||
p1, p2 = [int(x) for x in self.args.tftp_pr.split("-")]
|
||||
ports = list(range(p1, p2 + 1))
|
||||
|
||||
ips = self.args.i
|
||||
if "::" in ips:
|
||||
ips.append("0.0.0.0")
|
||||
|
||||
if self.args.ftp4:
|
||||
ips = [x for x in ips if ":" not in x]
|
||||
|
||||
ips = list(ODict.fromkeys(ips)) # dedup
|
||||
|
||||
for ip in ips:
|
||||
name = "tftp_%s" % (ip,)
|
||||
Daemon(self._start, name, [ip, ports])
|
||||
time.sleep(0.2) # give dualstack a chance
|
||||
|
||||
def nlog(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||
self.log("tftp", msg, c)
|
||||
|
||||
def _start(self, ip, ports):
|
||||
fam = socket.AF_INET6 if ":" in ip else socket.AF_INET
|
||||
have_been_alive = False
|
||||
while True:
|
||||
srv = TftpServer.TftpServer("/", self._ls)
|
||||
with self.mutex:
|
||||
self.srv.append(srv)
|
||||
self.ips.append(ip)
|
||||
|
||||
try:
|
||||
# this is the listen loop; it should block forever
|
||||
srv.listen(ip, self.port, af_family=fam, ports=ports)
|
||||
except:
|
||||
with self.mutex:
|
||||
self.srv.remove(srv)
|
||||
self.ips.remove(ip)
|
||||
|
||||
try:
|
||||
srv.sock.close()
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
bound = bool(srv.listenport)
|
||||
except:
|
||||
bound = False
|
||||
|
||||
if bound:
|
||||
# this instance has managed to bind at least once
|
||||
have_been_alive = True
|
||||
|
||||
if have_been_alive:
|
||||
t = "tftp server [%s]:%d crashed; restarting in 3 sec:\n%s"
|
||||
error(t, ip, self.port, min_ex())
|
||||
time.sleep(3)
|
||||
continue
|
||||
|
||||
# server failed to start; could be due to dualstack (ipv6 managed to bind and this is ipv4)
|
||||
if ip != "0.0.0.0" or "::" not in self.ips:
|
||||
# nope, it's fatal
|
||||
t = "tftp server [%s]:%d failed to start:\n%s"
|
||||
error(t, ip, self.port, min_ex())
|
||||
|
||||
# yep; ignore
|
||||
# (TODO: move the "listening @ ..." infolog in partftpy to
|
||||
# after the bind attempt so it doesn't print twice)
|
||||
return
|
||||
|
||||
info("tftp server [%s]:%d terminated", ip, self.port)
|
||||
break
|
||||
|
||||
def stop(self):
|
||||
with self.mutex:
|
||||
srvs = self.srv[:]
|
||||
|
||||
for srv in srvs:
|
||||
srv.stop()
|
||||
|
||||
def _v2a(self, caller: str, vpath: str, perms: list, *a: Any) -> tuple[VFS, str]:
|
||||
vpath = vpath.replace("\\", "/").lstrip("/")
|
||||
if not perms:
|
||||
perms = [True, True]
|
||||
|
||||
debug('%s("%s", %s) %s\033[K\033[0m', caller, vpath, str(a), perms)
|
||||
vfs, rem = self.asrv.vfs.get(vpath, "*", *perms)
|
||||
return vfs, vfs.canonical(rem)
|
||||
|
||||
def _ls(self, vpath: str, raddress: str, rport: int, force=False) -> Any:
|
||||
# generate file listing if vpath is dir.txt and return as file object
|
||||
if not force:
|
||||
vpath, fn = os.path.split(vpath.replace("\\", "/"))
|
||||
ptn = self.args.tftp_lsf
|
||||
if not ptn or not ptn.match(fn.lower()):
|
||||
return None
|
||||
|
||||
vn, rem = self.asrv.vfs.get(vpath, "*", True, False)
|
||||
fsroot, vfs_ls, vfs_virt = vn.ls(
|
||||
rem,
|
||||
"*",
|
||||
not self.args.no_scandir,
|
||||
[[True, False]],
|
||||
)
|
||||
dnames = set([x[0] for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)])
|
||||
dirs1 = [(v.st_mtime, v.st_size, k + "/") for k, v in vfs_ls if k in dnames]
|
||||
fils1 = [(v.st_mtime, v.st_size, k) for k, v in vfs_ls if k not in dnames]
|
||||
real1 = dirs1 + fils1
|
||||
realt = [(datetime.fromtimestamp(mt), sz, fn) for mt, sz, fn in real1]
|
||||
reals = [
|
||||
(
|
||||
"%04d-%02d-%02d %02d:%02d:%02d"
|
||||
% (
|
||||
zd.year,
|
||||
zd.month,
|
||||
zd.day,
|
||||
zd.hour,
|
||||
zd.minute,
|
||||
zd.second,
|
||||
),
|
||||
sz,
|
||||
fn,
|
||||
)
|
||||
for zd, sz, fn in realt
|
||||
]
|
||||
virs = [("????-??-?? ??:??:??", 0, k + "/") for k in vfs_virt.keys()]
|
||||
ls = virs + reals
|
||||
|
||||
if "*" not in vn.axs.udot:
|
||||
names = set(exclude_dotfiles([x[2] for x in ls]))
|
||||
ls = [x for x in ls if x[2] in names]
|
||||
|
||||
try:
|
||||
biggest = max([x[1] for x in ls])
|
||||
except:
|
||||
biggest = 0
|
||||
|
||||
perms = []
|
||||
if "*" in vn.axs.uread:
|
||||
perms.append("read")
|
||||
if "*" in vn.axs.udot:
|
||||
perms.append("hidden")
|
||||
if "*" in vn.axs.uwrite:
|
||||
if "*" in vn.axs.udel:
|
||||
perms.append("overwrite")
|
||||
else:
|
||||
perms.append("write")
|
||||
|
||||
fmt = "{{}} {{:{},}} {{}}"
|
||||
fmt = fmt.format(len("{:,}".format(biggest)))
|
||||
retl = ["# permissions: %s" % (", ".join(perms),)]
|
||||
retl += [fmt.format(*x) for x in ls]
|
||||
ret = "\n".join(retl).encode("utf-8", "replace")
|
||||
return BytesIO(ret + b"\n")
|
||||
|
||||
def _open(self, vpath: str, mode: str, *a: Any, **ka: Any) -> Any:
|
||||
rd = wr = False
|
||||
if mode == "rb":
|
||||
rd = True
|
||||
elif mode == "wb":
|
||||
wr = True
|
||||
else:
|
||||
raise Exception("bad mode %s" % (mode,))
|
||||
|
||||
vfs, ap = self._v2a("open", vpath, [rd, wr])
|
||||
if wr:
|
||||
if "*" not in vfs.axs.uwrite:
|
||||
yeet("blocked write; folder not world-writable: /%s" % (vpath,))
|
||||
|
||||
if bos.path.exists(ap) and "*" not in vfs.axs.udel:
|
||||
yeet("blocked write; folder not world-deletable: /%s" % (vpath,))
|
||||
|
||||
xbu = vfs.flags.get("xbu")
|
||||
if xbu and not runhook(
|
||||
self.nlog, xbu, ap, vpath, "", "", 0, 0, "8.3.8.7", 0, ""
|
||||
):
|
||||
yeet("blocked by xbu server config: " + vpath)
|
||||
|
||||
if not self.args.tftp_nols and bos.path.isdir(ap):
|
||||
return self._ls(vpath, "", 0, True)
|
||||
|
||||
return open(ap, mode, *a, **ka)
|
||||
|
||||
def _mkdir(self, vpath: str, *a) -> None:
|
||||
vfs, ap = self._v2a("mkdir", vpath, [])
|
||||
if "*" not in vfs.axs.uwrite:
|
||||
yeet("blocked mkdir; folder not world-writable: /%s" % (vpath,))
|
||||
|
||||
return bos.mkdir(ap)
|
||||
|
||||
def _unlink(self, vpath: str) -> None:
|
||||
# return bos.unlink(self._v2a("stat", vpath, *a)[1])
|
||||
vfs, ap = self._v2a("delete", vpath, [True, False, False, True])
|
||||
|
||||
try:
|
||||
inf = bos.stat(ap)
|
||||
except:
|
||||
return
|
||||
|
||||
if not stat.S_ISREG(inf.st_mode) or inf.st_size:
|
||||
yeet("attempted delete of non-empty file")
|
||||
|
||||
vpath = vpath.replace("\\", "/").lstrip("/")
|
||||
self.hub.up2k.handle_rm("*", "8.3.8.7", [vpath], [], False, False)
|
||||
|
||||
def _access(self, *a: Any) -> bool:
|
||||
return True
|
||||
|
||||
def _p_abspath(self, vpath: str) -> str:
|
||||
return "/" + undot(vpath)
|
||||
|
||||
def _p_normpath(self, *a: Any) -> str:
|
||||
return ""
|
||||
|
||||
def _p_exists(self, vpath: str) -> bool:
|
||||
try:
|
||||
ap = self._v2a("p.exists", vpath, [False, False])[1]
|
||||
bos.stat(ap)
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
|
||||
def _p_isdir(self, vpath: str) -> bool:
|
||||
try:
|
||||
st = bos.stat(self._v2a("p.isdir", vpath, [False, False])[1])
|
||||
ret = stat.S_ISDIR(st.st_mode)
|
||||
return ret
|
||||
except:
|
||||
return False
|
||||
|
||||
def _hook(self, *a: Any, **ka: Any) -> None:
|
||||
src = inspect.currentframe().f_back.f_code.co_name
|
||||
error("\033[31m%s:hook(%s)\033[0m", src, a)
|
||||
raise Exception("nope")
|
||||
|
||||
def _disarm(self, fos: SimpleNamespace) -> None:
|
||||
fos.chmod = self._hook
|
||||
fos.chown = self._hook
|
||||
fos.close = self._hook
|
||||
fos.ftruncate = self._hook
|
||||
fos.lchown = self._hook
|
||||
fos.link = self._hook
|
||||
fos.listdir = self._hook
|
||||
fos.lstat = self._hook
|
||||
fos.open = self._hook
|
||||
fos.remove = self._hook
|
||||
fos.rename = self._hook
|
||||
fos.replace = self._hook
|
||||
fos.scandir = self._hook
|
||||
fos.stat = self._hook
|
||||
fos.symlink = self._hook
|
||||
fos.truncate = self._hook
|
||||
fos.utime = self._hook
|
||||
fos.walk = self._hook
|
||||
|
||||
fos.path.expanduser = self._hook
|
||||
fos.path.expandvars = self._hook
|
||||
fos.path.getatime = self._hook
|
||||
fos.path.getctime = self._hook
|
||||
fos.path.getmtime = self._hook
|
||||
fos.path.getsize = self._hook
|
||||
fos.path.isabs = self._hook
|
||||
fos.path.isfile = self._hook
|
||||
fos.path.islink = self._hook
|
||||
fos.path.realpath = self._hook
|
||||
|
||||
|
||||
def yeet(msg: str) -> None:
|
||||
warning(msg)
|
||||
raise TftpException(msg)
|
||||
@@ -31,7 +31,7 @@ class ThumbCli(object):
|
||||
if not c:
|
||||
raise Exception()
|
||||
except:
|
||||
c = {k: {} for k in ["thumbable", "pil", "vips", "ffi", "ffv", "ffa"]}
|
||||
c = {k: set() for k in ["thumbable", "pil", "vips", "ffi", "ffv", "ffa"]}
|
||||
|
||||
self.thumbable = c["thumbable"]
|
||||
self.fmt_pil = c["pil"]
|
||||
@@ -78,23 +78,41 @@ class ThumbCli(object):
|
||||
if rem.startswith(".hist/th/") and rem.split(".")[-1] in ["webp", "jpg", "png"]:
|
||||
return os.path.join(ptop, rem)
|
||||
|
||||
if fmt == "j" and self.args.th_no_jpg:
|
||||
fmt = "w"
|
||||
if fmt[:1] in "jw":
|
||||
sfmt = fmt[:1]
|
||||
|
||||
if fmt == "w":
|
||||
if (
|
||||
self.args.th_no_webp
|
||||
or (is_img and not self.can_webp)
|
||||
or (self.args.th_ff_jpg and (not is_img or preferred == "ff"))
|
||||
):
|
||||
fmt = "j"
|
||||
if sfmt == "j" and self.args.th_no_jpg:
|
||||
sfmt = "w"
|
||||
|
||||
if sfmt == "w":
|
||||
if (
|
||||
self.args.th_no_webp
|
||||
or (is_img and not self.can_webp)
|
||||
or (self.args.th_ff_jpg and (not is_img or preferred == "ff"))
|
||||
):
|
||||
sfmt = "j"
|
||||
|
||||
vf_crop = dbv.flags["crop"]
|
||||
vf_th3x = dbv.flags["th3x"]
|
||||
|
||||
if "f" in vf_crop:
|
||||
sfmt += "f" if "n" in vf_crop else ""
|
||||
else:
|
||||
sfmt += "f" if "f" in fmt else ""
|
||||
|
||||
if "f" in vf_th3x:
|
||||
sfmt += "3" if "y" in vf_th3x else ""
|
||||
else:
|
||||
sfmt += "3" if "3" in fmt else ""
|
||||
|
||||
fmt = sfmt
|
||||
|
||||
histpath = self.asrv.vfs.histtab.get(ptop)
|
||||
if not histpath:
|
||||
self.log("no histpath for [{}]".format(ptop))
|
||||
return None
|
||||
|
||||
tpath = thumb_path(histpath, rem, mtime, fmt)
|
||||
tpath = thumb_path(histpath, rem, mtime, fmt, self.fmt_ffa)
|
||||
tpaths = [tpath]
|
||||
if fmt == "w":
|
||||
# also check for jpg (maybe webp is unavailable)
|
||||
@@ -108,6 +126,7 @@ class ThumbCli(object):
|
||||
if st.st_size:
|
||||
ret = tpath = tp
|
||||
fmt = ret.rsplit(".")[1]
|
||||
break
|
||||
else:
|
||||
abort = True
|
||||
except:
|
||||
|
||||
@@ -13,11 +13,12 @@ import time
|
||||
from queue import Queue
|
||||
|
||||
from .__init__ import ANYWIN, TYPE_CHECKING
|
||||
from .authsrv import VFS
|
||||
from .bos import bos
|
||||
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, ffprobe
|
||||
from .util import (
|
||||
FFMPEG_URL,
|
||||
BytesIO,
|
||||
BytesIO, # type: ignore
|
||||
Cooldown,
|
||||
Daemon,
|
||||
Pebkac,
|
||||
@@ -27,6 +28,7 @@ from .util import (
|
||||
runcmd,
|
||||
statdir,
|
||||
vsplit,
|
||||
wunlink,
|
||||
)
|
||||
|
||||
if True: # pylint: disable=using-constant-test
|
||||
@@ -36,14 +38,21 @@ if TYPE_CHECKING:
|
||||
from .svchub import SvcHub
|
||||
|
||||
HAVE_PIL = False
|
||||
HAVE_PILF = False
|
||||
HAVE_HEIF = False
|
||||
HAVE_AVIF = False
|
||||
HAVE_WEBP = False
|
||||
|
||||
try:
|
||||
from PIL import ExifTags, Image, ImageOps
|
||||
from PIL import ExifTags, Image, ImageFont, ImageOps
|
||||
|
||||
HAVE_PIL = True
|
||||
try:
|
||||
ImageFont.load_default(size=16)
|
||||
HAVE_PILF = True
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
Image.new("RGB", (2, 2)).save(BytesIO(), format="webp")
|
||||
HAVE_WEBP = True
|
||||
@@ -78,17 +87,23 @@ except:
|
||||
HAVE_VIPS = False
|
||||
|
||||
|
||||
def thumb_path(histpath: str, rem: str, mtime: float, fmt: str) -> str:
|
||||
def thumb_path(histpath: str, rem: str, mtime: float, fmt: str, ffa: set[str]) -> str:
|
||||
# base16 = 16 = 256
|
||||
# b64-lc = 38 = 1444
|
||||
# base64 = 64 = 4096
|
||||
rd, fn = vsplit(rem)
|
||||
if rd:
|
||||
h = hashlib.sha512(afsenc(rd)).digest()
|
||||
b64 = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
||||
rd = "{}/{}/".format(b64[:2], b64[2:4]).lower() + b64
|
||||
else:
|
||||
rd = "top"
|
||||
if not rd:
|
||||
rd = "\ntop"
|
||||
|
||||
# spectrograms are never cropped; strip fullsize flag
|
||||
ext = rem.split(".")[-1].lower()
|
||||
if ext in ffa and fmt[:2] in ("wf", "jf"):
|
||||
fmt = fmt.replace("f", "")
|
||||
|
||||
rd += "\n" + fmt
|
||||
h = hashlib.sha512(afsenc(rd)).digest()
|
||||
b64 = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
||||
rd = ("%s/%s/" % (b64[:2], b64[2:4])).lower() + b64
|
||||
|
||||
# could keep original filenames but this is safer re pathlen
|
||||
h = hashlib.sha512(afsenc(fn)).digest()
|
||||
@@ -97,10 +112,11 @@ def thumb_path(histpath: str, rem: str, mtime: float, fmt: str) -> str:
|
||||
if fmt in ("opus", "caf"):
|
||||
cat = "ac"
|
||||
else:
|
||||
fmt = "webp" if fmt == "w" else "png" if fmt == "p" else "jpg"
|
||||
fc = fmt[:1]
|
||||
fmt = "webp" if fc == "w" else "png" if fc == "p" else "jpg"
|
||||
cat = "th"
|
||||
|
||||
return "{}/{}/{}/{}.{:x}.{}".format(histpath, cat, rd, fn, int(mtime), fmt)
|
||||
return "%s/%s/%s/%s.%x.%s" % (histpath, cat, rd, fn, int(mtime), fmt)
|
||||
|
||||
|
||||
class ThumbSrv(object):
|
||||
@@ -110,16 +126,16 @@ class ThumbSrv(object):
|
||||
self.args = hub.args
|
||||
self.log_func = hub.log
|
||||
|
||||
res = hub.args.th_size.split("x")
|
||||
self.res = tuple([int(x) for x in res])
|
||||
self.poke_cd = Cooldown(self.args.th_poke)
|
||||
|
||||
self.mutex = threading.Lock()
|
||||
self.busy: dict[str, list[threading.Condition]] = {}
|
||||
self.ram: dict[str, float] = {}
|
||||
self.memcond = threading.Condition(self.mutex)
|
||||
self.stopping = False
|
||||
self.nthr = max(1, self.args.th_mt)
|
||||
|
||||
self.q: Queue[Optional[tuple[str, str]]] = Queue(self.nthr * 4)
|
||||
self.q: Queue[Optional[tuple[str, str, str, VFS]]] = Queue(self.nthr * 4)
|
||||
for n in range(self.nthr):
|
||||
Daemon(self.worker, "thumb-{}-{}".format(n, self.nthr))
|
||||
|
||||
@@ -184,20 +200,25 @@ class ThumbSrv(object):
|
||||
with self.mutex:
|
||||
return not self.nthr
|
||||
|
||||
def getres(self, vn: VFS, fmt: str) -> tuple[int, int]:
|
||||
mul = 3 if "3" in fmt else 1
|
||||
w, h = vn.flags["thsize"].split("x")
|
||||
return int(w) * mul, int(h) * mul
|
||||
|
||||
def get(self, ptop: str, rem: str, mtime: float, fmt: str) -> Optional[str]:
|
||||
histpath = self.asrv.vfs.histtab.get(ptop)
|
||||
if not histpath:
|
||||
self.log("no histpath for [{}]".format(ptop))
|
||||
return None
|
||||
|
||||
tpath = thumb_path(histpath, rem, mtime, fmt)
|
||||
tpath = thumb_path(histpath, rem, mtime, fmt, self.fmt_ffa)
|
||||
abspath = os.path.join(ptop, rem)
|
||||
cond = threading.Condition(self.mutex)
|
||||
do_conv = False
|
||||
with self.mutex:
|
||||
try:
|
||||
self.busy[tpath].append(cond)
|
||||
self.log("wait {}".format(tpath))
|
||||
self.log("joined waiting room for %s" % (tpath,))
|
||||
except:
|
||||
thdir = os.path.dirname(tpath)
|
||||
bos.makedirs(os.path.join(thdir, "w"))
|
||||
@@ -211,8 +232,14 @@ class ThumbSrv(object):
|
||||
do_conv = True
|
||||
|
||||
if do_conv:
|
||||
self.q.put((abspath, tpath))
|
||||
self.log("conv {} \033[0m{}".format(tpath, abspath), c=6)
|
||||
allvols = list(self.asrv.vfs.all_vols.values())
|
||||
vn = next((x for x in allvols if x.realpath == ptop), None)
|
||||
if not vn:
|
||||
self.log("ptop [{}] not in {}".format(ptop, allvols), 3)
|
||||
vn = self.asrv.vfs.all_aps[0][1]
|
||||
|
||||
self.q.put((abspath, tpath, fmt, vn))
|
||||
self.log("conv {} :{} \033[0m{}".format(tpath, fmt, abspath), c=6)
|
||||
|
||||
while not self.stopping:
|
||||
with self.mutex:
|
||||
@@ -242,13 +269,30 @@ class ThumbSrv(object):
|
||||
"ffa": self.fmt_ffa,
|
||||
}
|
||||
|
||||
def wait4ram(self, need: float, ttpath: str) -> None:
|
||||
ram = self.args.th_ram_max
|
||||
if need > ram * 0.99:
|
||||
t = "file too big; need %.2f GiB RAM, but --th-ram-max is only %.1f"
|
||||
raise Exception(t % (need, ram))
|
||||
|
||||
while True:
|
||||
with self.mutex:
|
||||
used = sum([v for k, v in self.ram.items() if k != ttpath]) + need
|
||||
if used < ram:
|
||||
# self.log("XXX self.ram: %s" % (self.ram,), 5)
|
||||
self.ram[ttpath] = need
|
||||
return
|
||||
with self.memcond:
|
||||
# self.log("at RAM limit; used %.2f GiB, need %.2f more" % (used-need, need), 1)
|
||||
self.memcond.wait(3)
|
||||
|
||||
def worker(self) -> None:
|
||||
while not self.stopping:
|
||||
task = self.q.get()
|
||||
if not task:
|
||||
break
|
||||
|
||||
abspath, tpath = task
|
||||
abspath, tpath, fmt, vn = task
|
||||
ext = abspath.split(".")[-1].lower()
|
||||
png_ok = False
|
||||
funs = []
|
||||
@@ -275,13 +319,13 @@ class ThumbSrv(object):
|
||||
tdir, tfn = os.path.split(tpath)
|
||||
ttpath = os.path.join(tdir, "w", tfn)
|
||||
try:
|
||||
bos.unlink(ttpath)
|
||||
wunlink(self.log, ttpath, vn.flags)
|
||||
except:
|
||||
pass
|
||||
|
||||
for fun in funs:
|
||||
try:
|
||||
fun(abspath, ttpath)
|
||||
fun(abspath, ttpath, fmt, vn)
|
||||
break
|
||||
except Exception as ex:
|
||||
msg = "{} could not create thumbnail of {}\n{}"
|
||||
@@ -295,7 +339,7 @@ class ThumbSrv(object):
|
||||
else:
|
||||
# ffmpeg may spawn empty files on windows
|
||||
try:
|
||||
os.unlink(ttpath)
|
||||
wunlink(self.log, ttpath, vn.flags)
|
||||
except:
|
||||
pass
|
||||
|
||||
@@ -307,17 +351,22 @@ class ThumbSrv(object):
|
||||
with self.mutex:
|
||||
subs = self.busy[tpath]
|
||||
del self.busy[tpath]
|
||||
self.ram.pop(ttpath, None)
|
||||
|
||||
for x in subs:
|
||||
with x:
|
||||
x.notify_all()
|
||||
|
||||
with self.memcond:
|
||||
self.memcond.notify_all()
|
||||
|
||||
with self.mutex:
|
||||
self.nthr -= 1
|
||||
|
||||
def fancy_pillow(self, im: "Image.Image") -> "Image.Image":
|
||||
def fancy_pillow(self, im: "Image.Image", fmt: str, vn: VFS) -> "Image.Image":
|
||||
# exif_transpose is expensive (loads full image + unconditional copy)
|
||||
r = max(*self.res) * 2
|
||||
res = self.getres(vn, fmt)
|
||||
r = max(*res) * 2
|
||||
im.thumbnail((r, r), resample=Image.LANCZOS)
|
||||
try:
|
||||
k = next(k for k, v in ExifTags.TAGS.items() if v == "Orientation")
|
||||
@@ -331,23 +380,24 @@ class ThumbSrv(object):
|
||||
if rot in rots:
|
||||
im = im.transpose(rots[rot])
|
||||
|
||||
if self.args.th_no_crop:
|
||||
im.thumbnail(self.res, resample=Image.LANCZOS)
|
||||
if "f" in fmt:
|
||||
im.thumbnail(res, resample=Image.LANCZOS)
|
||||
else:
|
||||
iw, ih = im.size
|
||||
dw, dh = self.res
|
||||
dw, dh = res
|
||||
res = (min(iw, dw), min(ih, dh))
|
||||
im = ImageOps.fit(im, res, method=Image.LANCZOS)
|
||||
|
||||
return im
|
||||
|
||||
def conv_pil(self, abspath: str, tpath: str) -> None:
|
||||
def conv_pil(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
|
||||
self.wait4ram(0.2, tpath)
|
||||
with Image.open(fsenc(abspath)) as im:
|
||||
try:
|
||||
im = self.fancy_pillow(im)
|
||||
im = self.fancy_pillow(im, fmt, vn)
|
||||
except Exception as ex:
|
||||
self.log("fancy_pillow {}".format(ex), "90")
|
||||
im.thumbnail(self.res)
|
||||
im.thumbnail(self.getres(vn, fmt))
|
||||
|
||||
fmts = ["RGB", "L"]
|
||||
args = {"quality": 40}
|
||||
@@ -358,7 +408,7 @@ class ThumbSrv(object):
|
||||
# method 0 = pillow-default, fast
|
||||
# method 4 = ffmpeg-default
|
||||
# method 6 = max, slow
|
||||
fmts += ["RGBA", "LA"]
|
||||
fmts.extend(("RGBA", "LA"))
|
||||
args["method"] = 6
|
||||
else:
|
||||
# default q = 75
|
||||
@@ -370,12 +420,13 @@ class ThumbSrv(object):
|
||||
|
||||
im.save(tpath, **args)
|
||||
|
||||
def conv_vips(self, abspath: str, tpath: str) -> None:
|
||||
def conv_vips(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
|
||||
self.wait4ram(0.2, tpath)
|
||||
crops = ["centre", "none"]
|
||||
if self.args.th_no_crop:
|
||||
if "f" in fmt:
|
||||
crops = ["none"]
|
||||
|
||||
w, h = self.res
|
||||
w, h = self.getres(vn, fmt)
|
||||
kw = {"height": h, "size": "down", "intent": "relative"}
|
||||
|
||||
for c in crops:
|
||||
@@ -387,10 +438,12 @@ class ThumbSrv(object):
|
||||
if c == crops[-1]:
|
||||
raise
|
||||
|
||||
assert img # type: ignore
|
||||
img.write_to_file(tpath, Q=40)
|
||||
|
||||
def conv_ffmpeg(self, abspath: str, tpath: str) -> None:
|
||||
ret, _ = ffprobe(abspath, int(self.args.th_convt / 2))
|
||||
def conv_ffmpeg(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
|
||||
self.wait4ram(0.2, tpath)
|
||||
ret, _ = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
||||
if not ret:
|
||||
return
|
||||
|
||||
@@ -402,12 +455,13 @@ class ThumbSrv(object):
|
||||
seek = [b"-ss", "{:.0f}".format(dur / 3).encode("utf-8")]
|
||||
|
||||
scale = "scale={0}:{1}:force_original_aspect_ratio="
|
||||
if self.args.th_no_crop:
|
||||
if "f" in fmt:
|
||||
scale += "decrease,setsar=1:1"
|
||||
else:
|
||||
scale += "increase,crop={0}:{1},setsar=1:1"
|
||||
|
||||
bscale = scale.format(*list(self.res)).encode("utf-8")
|
||||
res = self.getres(vn, fmt)
|
||||
bscale = scale.format(*list(res)).encode("utf-8")
|
||||
# fmt: off
|
||||
cmd = [
|
||||
b"ffmpeg",
|
||||
@@ -439,11 +493,11 @@ class ThumbSrv(object):
|
||||
]
|
||||
|
||||
cmd += [fsenc(tpath)]
|
||||
self._run_ff(cmd)
|
||||
self._run_ff(cmd, vn)
|
||||
|
||||
def _run_ff(self, cmd: list[bytes]) -> None:
|
||||
def _run_ff(self, cmd: list[bytes], vn: VFS, oom: int = 400) -> None:
|
||||
# self.log((b" ".join(cmd)).decode("utf-8"))
|
||||
ret, _, serr = runcmd(cmd, timeout=self.args.th_convt)
|
||||
ret, _, serr = runcmd(cmd, timeout=vn.flags["convt"], nice=True, oom=oom)
|
||||
if not ret:
|
||||
return
|
||||
|
||||
@@ -486,13 +540,26 @@ class ThumbSrv(object):
|
||||
self.log(t + txt, c=c)
|
||||
raise sp.CalledProcessError(ret, (cmd[0], b"...", cmd[-1]))
|
||||
|
||||
def conv_waves(self, abspath: str, tpath: str) -> None:
|
||||
ret, _ = ffprobe(abspath, int(self.args.th_convt / 2))
|
||||
def conv_waves(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
|
||||
ret, _ = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
||||
if "ac" not in ret:
|
||||
raise Exception("not audio")
|
||||
|
||||
flt = (
|
||||
b"[0:a:0]"
|
||||
# jt_versi.xm: 405M/839s
|
||||
dur = ret[".dur"][1] if ".dur" in ret else 300
|
||||
need = 0.2 + dur / 3000
|
||||
speedup = b""
|
||||
if need > self.args.th_ram_max * 0.7:
|
||||
self.log("waves too big (need %.2f GiB); trying to optimize" % (need,))
|
||||
need = 0.2 + dur / 4200 # only helps about this much...
|
||||
speedup = b"aresample=8000,"
|
||||
if need > self.args.th_ram_max * 0.96:
|
||||
raise Exception("file too big; cannot waves")
|
||||
|
||||
self.wait4ram(need, tpath)
|
||||
|
||||
flt = b"[0:a:0]" + speedup
|
||||
flt += (
|
||||
b"compand=.3|.3:1|1:-90/-60|-60/-40|-40/-30|-20/-20:6:0:-90:0.2"
|
||||
b",volume=2"
|
||||
b",showwavespic=s=2048x64:colors=white"
|
||||
@@ -512,14 +579,27 @@ class ThumbSrv(object):
|
||||
# fmt: on
|
||||
|
||||
cmd += [fsenc(tpath)]
|
||||
self._run_ff(cmd)
|
||||
self._run_ff(cmd, vn)
|
||||
|
||||
def conv_spec(self, abspath: str, tpath: str) -> None:
|
||||
ret, _ = ffprobe(abspath, int(self.args.th_convt / 2))
|
||||
def conv_spec(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
|
||||
ret, _ = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
||||
if "ac" not in ret:
|
||||
raise Exception("not audio")
|
||||
|
||||
fc = "[0:a:0]aresample=48000{},showspectrumpic=s=640x512,crop=780:544:70:50[o]"
|
||||
# https://trac.ffmpeg.org/ticket/10797
|
||||
# expect 1 GiB every 600 seconds when duration is tricky;
|
||||
# simple filetypes are generally safer so let's special-case those
|
||||
safe = ("flac", "wav", "aif", "aiff", "opus")
|
||||
coeff = 1800 if abspath.split(".")[-1].lower() in safe else 600
|
||||
dur = ret[".dur"][1] if ".dur" in ret else 300
|
||||
need = 0.2 + dur / coeff
|
||||
self.wait4ram(need, tpath)
|
||||
|
||||
fc = "[0:a:0]aresample=48000{},showspectrumpic=s="
|
||||
if "3" in fmt:
|
||||
fc += "1280x1024,crop=1420:1056:70:48[o]"
|
||||
else:
|
||||
fc += "640x512,crop=780:544:70:48[o]"
|
||||
|
||||
if self.args.th_ff_swr:
|
||||
fco = ":filter_size=128:cutoff=0.877"
|
||||
@@ -555,13 +635,14 @@ class ThumbSrv(object):
|
||||
]
|
||||
|
||||
cmd += [fsenc(tpath)]
|
||||
self._run_ff(cmd)
|
||||
self._run_ff(cmd, vn)
|
||||
|
||||
def conv_opus(self, abspath: str, tpath: str) -> None:
|
||||
def conv_opus(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
|
||||
if self.args.no_acode:
|
||||
raise Exception("disabled in server config")
|
||||
|
||||
ret, _ = ffprobe(abspath, int(self.args.th_convt / 2))
|
||||
self.wait4ram(0.2, tpath)
|
||||
ret, _ = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
||||
if "ac" not in ret:
|
||||
raise Exception("not audio")
|
||||
|
||||
@@ -576,7 +657,7 @@ class ThumbSrv(object):
|
||||
if want_caf:
|
||||
tmp_opus = tpath + ".opus"
|
||||
try:
|
||||
bos.unlink(tmp_opus)
|
||||
wunlink(self.log, tmp_opus, vn.flags)
|
||||
except:
|
||||
pass
|
||||
|
||||
@@ -597,7 +678,7 @@ class ThumbSrv(object):
|
||||
fsenc(tmp_opus)
|
||||
]
|
||||
# fmt: on
|
||||
self._run_ff(cmd)
|
||||
self._run_ff(cmd, vn, oom=300)
|
||||
|
||||
# iOS fails to play some "insufficiently complex" files
|
||||
# (average file shorter than 8 seconds), so of course we
|
||||
@@ -621,7 +702,7 @@ class ThumbSrv(object):
|
||||
fsenc(tpath)
|
||||
]
|
||||
# fmt: on
|
||||
self._run_ff(cmd)
|
||||
self._run_ff(cmd, vn, oom=300)
|
||||
|
||||
elif want_caf:
|
||||
# simple remux should be safe
|
||||
@@ -639,11 +720,11 @@ class ThumbSrv(object):
|
||||
fsenc(tpath)
|
||||
]
|
||||
# fmt: on
|
||||
self._run_ff(cmd)
|
||||
self._run_ff(cmd, vn, oom=300)
|
||||
|
||||
if tmp_opus != tpath:
|
||||
try:
|
||||
bos.unlink(tmp_opus)
|
||||
wunlink(self.log, tmp_opus, vn.flags)
|
||||
except:
|
||||
pass
|
||||
|
||||
@@ -670,7 +751,10 @@ class ThumbSrv(object):
|
||||
else:
|
||||
self.log("\033[Jcln {} ({})/\033[A".format(histpath, vol))
|
||||
|
||||
ndirs += self.clean(histpath)
|
||||
try:
|
||||
ndirs += self.clean(histpath)
|
||||
except Exception as ex:
|
||||
self.log("\033[Jcln err in %s: %r" % (histpath, ex), 3)
|
||||
|
||||
self.log("\033[Jcln ok; rm {} dirs".format(ndirs))
|
||||
|
||||
|
||||
@@ -9,6 +9,7 @@ import time
|
||||
from operator import itemgetter
|
||||
|
||||
from .__init__ import ANYWIN, TYPE_CHECKING, unicode
|
||||
from .authsrv import LEELOO_DALLAS, VFS
|
||||
from .bos import bos
|
||||
from .up2k import up2k_wark_from_hashlist
|
||||
from .util import (
|
||||
@@ -20,6 +21,7 @@ from .util import (
|
||||
min_ex,
|
||||
quotep,
|
||||
s3dec,
|
||||
vjoin,
|
||||
)
|
||||
|
||||
if HAVE_SQLITE3:
|
||||
@@ -61,7 +63,7 @@ class U2idx(object):
|
||||
self.log_func("u2idx", msg, c)
|
||||
|
||||
def fsearch(
|
||||
self, vols: list[tuple[str, str, dict[str, Any]]], body: dict[str, Any]
|
||||
self, uname: str, vols: list[VFS], body: dict[str, Any]
|
||||
) -> list[dict[str, Any]]:
|
||||
"""search by up2k hashlist"""
|
||||
if not HAVE_SQLITE3:
|
||||
@@ -75,7 +77,7 @@ class U2idx(object):
|
||||
uv: list[Union[str, int]] = [wark[:16], wark]
|
||||
|
||||
try:
|
||||
return self.run_query(vols, uq, uv, True, False, 99999)[0]
|
||||
return self.run_query(uname, vols, uq, uv, False, 99999)[0]
|
||||
except:
|
||||
raise Pebkac(500, min_ex())
|
||||
|
||||
@@ -101,7 +103,7 @@ class U2idx(object):
|
||||
uri = ""
|
||||
try:
|
||||
uri = "{}?mode=ro&nolock=1".format(Path(db_path).as_uri())
|
||||
db = sqlite3.connect(uri, 2, uri=True, check_same_thread=False)
|
||||
db = sqlite3.connect(uri, timeout=2, uri=True, check_same_thread=False)
|
||||
cur = db.cursor()
|
||||
cur.execute('pragma table_info("up")').fetchone()
|
||||
self.log("ro: {}".format(db_path))
|
||||
@@ -113,14 +115,14 @@ class U2idx(object):
|
||||
if not cur:
|
||||
# on windows, this steals the write-lock from up2k.deferred_init --
|
||||
# seen on win 10.0.17763.2686, py 3.10.4, sqlite 3.37.2
|
||||
cur = sqlite3.connect(db_path, 2, check_same_thread=False).cursor()
|
||||
cur = sqlite3.connect(db_path, timeout=2, check_same_thread=False).cursor()
|
||||
self.log("opened {}".format(db_path))
|
||||
|
||||
self.cur[ptop] = cur
|
||||
return cur
|
||||
|
||||
def search(
|
||||
self, vols: list[tuple[str, str, dict[str, Any]]], uq: str, lim: int
|
||||
self, uname: str, vols: list[VFS], uq: str, lim: int
|
||||
) -> tuple[list[dict[str, Any]], list[str], bool]:
|
||||
"""search by query params"""
|
||||
if not HAVE_SQLITE3:
|
||||
@@ -129,7 +131,6 @@ class U2idx(object):
|
||||
q = ""
|
||||
v: Union[str, int] = ""
|
||||
va: list[Union[str, int]] = []
|
||||
have_up = False # query has up.* operands
|
||||
have_mt = False
|
||||
is_key = True
|
||||
is_size = False
|
||||
@@ -174,21 +175,21 @@ class U2idx(object):
|
||||
if v == "size":
|
||||
v = "up.sz"
|
||||
is_size = True
|
||||
have_up = True
|
||||
|
||||
elif v == "date":
|
||||
v = "up.mt"
|
||||
is_date = True
|
||||
have_up = True
|
||||
|
||||
elif v == "up_at":
|
||||
v = "up.at"
|
||||
is_date = True
|
||||
|
||||
elif v == "path":
|
||||
v = "trim(?||up.rd,'/')"
|
||||
va.append("\nrd")
|
||||
have_up = True
|
||||
|
||||
elif v == "name":
|
||||
v = "up.fn"
|
||||
have_up = True
|
||||
|
||||
elif v == "tags" or ptn_mt.match(v):
|
||||
have_mt = True
|
||||
@@ -264,19 +265,24 @@ class U2idx(object):
|
||||
q += " lower({}) {} ? ) ".format(field, oper)
|
||||
|
||||
try:
|
||||
return self.run_query(vols, q, va, have_up, have_mt, lim)
|
||||
return self.run_query(uname, vols, q, va, have_mt, lim)
|
||||
except Exception as ex:
|
||||
raise Pebkac(500, repr(ex))
|
||||
|
||||
def run_query(
|
||||
self,
|
||||
vols: list[tuple[str, str, dict[str, Any]]],
|
||||
uname: str,
|
||||
vols: list[VFS],
|
||||
uq: str,
|
||||
uv: list[Union[str, int]],
|
||||
have_up: bool,
|
||||
have_mt: bool,
|
||||
lim: int,
|
||||
) -> tuple[list[dict[str, Any]], list[str], bool]:
|
||||
if self.args.srch_dbg:
|
||||
t = "searching across all %s volumes in which the user has 'r' (full read access):\n %s"
|
||||
zs = "\n ".join(["/%s = %s" % (x.vpath, x.realpath) for x in vols])
|
||||
self.log(t % (len(vols), zs), 5)
|
||||
|
||||
done_flag: list[bool] = []
|
||||
self.active_id = "{:.6f}_{}".format(
|
||||
time.time(), threading.current_thread().ident
|
||||
@@ -295,13 +301,35 @@ class U2idx(object):
|
||||
|
||||
ret = []
|
||||
seen_rps: set[str] = set()
|
||||
lim = min(lim, int(self.args.srch_hits))
|
||||
clamp = int(self.args.srch_hits)
|
||||
if lim >= clamp:
|
||||
lim = clamp
|
||||
clamped = True
|
||||
else:
|
||||
clamped = False
|
||||
|
||||
taglist = {}
|
||||
for (vtop, ptop, flags) in vols:
|
||||
for vol in vols:
|
||||
if lim < 0:
|
||||
break
|
||||
|
||||
vtop = vol.vpath
|
||||
ptop = vol.realpath
|
||||
flags = vol.flags
|
||||
|
||||
cur = self.get_cur(ptop)
|
||||
if not cur:
|
||||
continue
|
||||
|
||||
excl = []
|
||||
for vp2 in self.asrv.vfs.all_vols.keys():
|
||||
if vp2.startswith((vtop + "/").lstrip("/")) and vtop != vp2:
|
||||
excl.append(vp2[len(vtop) :].lstrip("/"))
|
||||
|
||||
if self.args.srch_dbg:
|
||||
t = "searching in volume /%s (%s), excludelist %s"
|
||||
self.log(t % (vtop, ptop, excl), 5)
|
||||
|
||||
self.active_cur = cur
|
||||
|
||||
vuv = []
|
||||
@@ -313,7 +341,8 @@ class U2idx(object):
|
||||
|
||||
sret = []
|
||||
fk = flags.get("fk")
|
||||
dots = flags.get("dotsrch")
|
||||
dots = flags.get("dotsrch") and uname in vol.axs.udot
|
||||
fk_alg = 2 if "fka" in flags else 1
|
||||
c = cur.execute(uq, tuple(vuv))
|
||||
for hit in c:
|
||||
w, ts, sz, rd, fn, ip, at = hit[:7]
|
||||
@@ -321,6 +350,13 @@ class U2idx(object):
|
||||
if rd.startswith("//") or fn.startswith("//"):
|
||||
rd, fn = s3dec(rd, fn)
|
||||
|
||||
if rd in excl or any([x for x in excl if rd.startswith(x + "/")]):
|
||||
if self.args.srch_dbg:
|
||||
zs = vjoin(vjoin(vtop, rd), fn)
|
||||
t = "database inconsistency in volume '/%s'; ignoring: %s"
|
||||
self.log(t % (vtop, zs), 1)
|
||||
continue
|
||||
|
||||
rp = quotep("/".join([x for x in [vtop, rd, fn] if x]))
|
||||
if not dots and "/." in ("/" + rp):
|
||||
continue
|
||||
@@ -333,21 +369,35 @@ class U2idx(object):
|
||||
else:
|
||||
try:
|
||||
ap = absreal(os.path.join(ptop, rd, fn))
|
||||
inf = bos.stat(ap)
|
||||
ino = 0 if ANYWIN or fk_alg == 2 else bos.stat(ap).st_ino
|
||||
except:
|
||||
continue
|
||||
|
||||
suf = (
|
||||
"?k="
|
||||
+ gen_filekey(
|
||||
self.args.fk_salt, ap, sz, 0 if ANYWIN else inf.st_ino
|
||||
)[:fk]
|
||||
)
|
||||
suf = "?k=" + gen_filekey(
|
||||
fk_alg,
|
||||
self.args.fk_salt,
|
||||
ap,
|
||||
sz,
|
||||
ino,
|
||||
)[:fk]
|
||||
|
||||
lim -= 1
|
||||
if lim < 0:
|
||||
break
|
||||
|
||||
if self.args.srch_dbg:
|
||||
t = "in volume '/%s': hit: %s"
|
||||
self.log(t % (vtop, rp), 5)
|
||||
|
||||
zs = vjoin(vtop, rp)
|
||||
chk_vn, _ = self.asrv.vfs.get(zs, LEELOO_DALLAS, True, False)
|
||||
chk_vn = chk_vn.dbv or chk_vn
|
||||
if chk_vn.vpath != vtop:
|
||||
raise Exception(
|
||||
"database inconsistency! in volume '/%s' (%s), found file [%s] which belongs to volume '/%s' (%s)"
|
||||
% (vtop, ptop, zs, chk_vn.vpath, chk_vn.realpath)
|
||||
)
|
||||
|
||||
seen_rps.add(rp)
|
||||
sret.append({"ts": int(ts), "sz": sz, "rp": rp + suf, "w": w[:16]})
|
||||
|
||||
@@ -365,12 +415,16 @@ class U2idx(object):
|
||||
ret.extend(sret)
|
||||
# print("[{}] {}".format(ptop, sret))
|
||||
|
||||
if self.args.srch_dbg:
|
||||
t = "in volume '/%s': got %d hits, %d total so far"
|
||||
self.log(t % (vtop, len(sret), len(ret)), 5)
|
||||
|
||||
done_flag.append(True)
|
||||
self.active_id = ""
|
||||
|
||||
ret.sort(key=itemgetter("rp"))
|
||||
|
||||
return ret, list(taglist.keys()), lim < 0
|
||||
return ret, list(taglist.keys()), lim < 0 and not clamped
|
||||
|
||||
def terminator(self, identifier: str, done_flag: list[bool]) -> None:
|
||||
for _ in range(self.timeout):
|
||||
|
||||
1181
copyparty/up2k.py
1181
copyparty/up2k.py
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,7 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import argparse
|
||||
import base64
|
||||
import contextlib
|
||||
import errno
|
||||
@@ -25,7 +26,6 @@ import threading
|
||||
import time
|
||||
import traceback
|
||||
from collections import Counter
|
||||
from datetime import datetime
|
||||
from email.utils import formatdate
|
||||
|
||||
from ipaddress import IPv4Address, IPv4Network, IPv6Address, IPv6Network
|
||||
@@ -35,6 +35,35 @@ from .__init__ import ANYWIN, EXE, MACOS, PY2, TYPE_CHECKING, VT100, WINDOWS
|
||||
from .__version__ import S_BUILD_DT, S_VERSION
|
||||
from .stolen import surrogateescape
|
||||
|
||||
try:
|
||||
from datetime import datetime, timezone
|
||||
|
||||
UTC = timezone.utc
|
||||
except:
|
||||
from datetime import datetime, timedelta, tzinfo
|
||||
|
||||
TD_ZERO = timedelta(0)
|
||||
|
||||
class _UTC(tzinfo):
|
||||
def utcoffset(self, dt):
|
||||
return TD_ZERO
|
||||
|
||||
def tzname(self, dt):
|
||||
return "UTC"
|
||||
|
||||
def dst(self, dt):
|
||||
return TD_ZERO
|
||||
|
||||
UTC = _UTC()
|
||||
|
||||
|
||||
if sys.version_info >= (3, 7) or (
|
||||
sys.version_info >= (3, 6) and platform.python_implementation() == "CPython"
|
||||
):
|
||||
ODict = dict
|
||||
else:
|
||||
from collections import OrderedDict as ODict
|
||||
|
||||
|
||||
def _ens(want: str) -> tuple[int, ...]:
|
||||
ret: list[int] = []
|
||||
@@ -56,6 +85,8 @@ E_ADDR_IN_USE = _ens("EADDRINUSE WSAEADDRINUSE")
|
||||
E_ACCESS = _ens("EACCES WSAEACCES")
|
||||
E_UNREACH = _ens("EHOSTUNREACH WSAEHOSTUNREACH ENETUNREACH WSAENETUNREACH")
|
||||
|
||||
IP6ALL = "0:0:0:0:0:0:0:0"
|
||||
|
||||
|
||||
try:
|
||||
import ctypes
|
||||
@@ -66,7 +97,9 @@ except:
|
||||
|
||||
try:
|
||||
HAVE_SQLITE3 = True
|
||||
import sqlite3 # pylint: disable=unused-import # typechk
|
||||
import sqlite3
|
||||
|
||||
assert hasattr(sqlite3, "connect") # graalpy
|
||||
except:
|
||||
HAVE_SQLITE3 = False
|
||||
|
||||
@@ -83,6 +116,11 @@ if True: # pylint: disable=using-constant-test
|
||||
import typing
|
||||
from typing import Any, Generator, Optional, Pattern, Protocol, Union
|
||||
|
||||
try:
|
||||
from typing import LiteralString
|
||||
except:
|
||||
pass
|
||||
|
||||
class RootLogger(Protocol):
|
||||
def __call__(self, src: str, msg: str, c: Union[int, str] = 0) -> None:
|
||||
return None
|
||||
@@ -112,15 +150,15 @@ if not PY2:
|
||||
from urllib.parse import quote_from_bytes as quote
|
||||
from urllib.parse import unquote_to_bytes as unquote
|
||||
else:
|
||||
from StringIO import StringIO as BytesIO
|
||||
from urllib import quote # pylint: disable=no-name-in-module
|
||||
from urllib import unquote # pylint: disable=no-name-in-module
|
||||
from StringIO import StringIO as BytesIO # type: ignore
|
||||
from urllib import quote # type: ignore # pylint: disable=no-name-in-module
|
||||
from urllib import unquote # type: ignore # pylint: disable=no-name-in-module
|
||||
|
||||
|
||||
try:
|
||||
struct.unpack(b">i", b"idgi")
|
||||
spack = struct.pack
|
||||
sunpack = struct.unpack
|
||||
spack = struct.pack # type: ignore
|
||||
sunpack = struct.unpack # type: ignore
|
||||
except:
|
||||
|
||||
def spack(fmt: bytes, *a: Any) -> bytes:
|
||||
@@ -130,6 +168,12 @@ except:
|
||||
return struct.unpack(fmt.decode("ascii"), a)
|
||||
|
||||
|
||||
try:
|
||||
BITNESS = struct.calcsize(b"P") * 8
|
||||
except:
|
||||
BITNESS = struct.calcsize("P") * 8
|
||||
|
||||
|
||||
ansi_re = re.compile("\033\\[[^mK]*[mK]")
|
||||
|
||||
|
||||
@@ -142,7 +186,7 @@ else:
|
||||
|
||||
SYMTIME = sys.version_info > (3, 6) and os.utime in os.supports_follow_symlinks
|
||||
|
||||
META_NOBOTS = '<meta name="robots" content="noindex, nofollow">'
|
||||
META_NOBOTS = '<meta name="robots" content="noindex, nofollow">\n'
|
||||
|
||||
FFMPEG_URL = "https://www.gyan.dev/ffmpeg/builds/ffmpeg-git-full.7z"
|
||||
|
||||
@@ -171,6 +215,7 @@ HTTPCODE = {
|
||||
500: "Internal Server Error",
|
||||
501: "Not Implemented",
|
||||
503: "Service Unavailable",
|
||||
999: "MissingNo",
|
||||
}
|
||||
|
||||
|
||||
@@ -256,6 +301,13 @@ EXTS["vnd.mozilla.apng"] = "png"
|
||||
MAGIC_MAP = {"jpeg": "jpg"}
|
||||
|
||||
|
||||
DEF_EXP = "self.ip self.ua self.uname self.host cfg.name cfg.logout vf.scan vf.thsize hdr.cf_ipcountry srv.itime srv.htime"
|
||||
|
||||
DEF_MTE = "circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,vc,ac,fmt,res,.fps,ahash,vhash"
|
||||
|
||||
DEF_MTH = ".vq,.aq,vc,ac,fmt,res,.fps"
|
||||
|
||||
|
||||
REKOBO_KEY = {
|
||||
v: ln.split(" ", 1)[0]
|
||||
for ln in """
|
||||
@@ -293,11 +345,24 @@ REKOBO_KEY = {
|
||||
REKOBO_LKEY = {k.lower(): v for k, v in REKOBO_KEY.items()}
|
||||
|
||||
|
||||
_exestr = "python3 python ffmpeg ffprobe cfssl cfssljson cfssl-certinfo"
|
||||
CMD_EXEB = set(_exestr.encode("utf-8").split())
|
||||
CMD_EXES = set(_exestr.split())
|
||||
|
||||
|
||||
# mostly from https://github.com/github/gitignore/blob/main/Global/macOS.gitignore
|
||||
APPLESAN_TXT = r"/(__MACOS|Icon\r\r)|/\.(_|DS_Store|AppleDouble|LSOverride|DocumentRevisions-|fseventsd|Spotlight-|TemporaryItems|Trashes|VolumeIcon\.icns|com\.apple\.timemachine\.donotpresent|AppleDB|AppleDesktop|apdisk)"
|
||||
APPLESAN_RE = re.compile(APPLESAN_TXT)
|
||||
|
||||
|
||||
pybin = sys.executable or ""
|
||||
if EXE:
|
||||
pybin = ""
|
||||
for zsg in "python3 python".split():
|
||||
try:
|
||||
if ANYWIN:
|
||||
zsg += ".exe"
|
||||
|
||||
zsg = shutil.which(zsg)
|
||||
if zsg:
|
||||
pybin = zsg
|
||||
@@ -313,11 +378,6 @@ def py_desc() -> str:
|
||||
if ofs > 0:
|
||||
py_ver = py_ver[:ofs]
|
||||
|
||||
try:
|
||||
bitness = struct.calcsize(b"P") * 8
|
||||
except:
|
||||
bitness = struct.calcsize("P") * 8
|
||||
|
||||
host_os = platform.system()
|
||||
compiler = platform.python_compiler().split("http")[0]
|
||||
|
||||
@@ -325,11 +385,12 @@ def py_desc() -> str:
|
||||
os_ver = m.group(1) if m else ""
|
||||
|
||||
return "{:>9} v{} on {}{} {} [{}]".format(
|
||||
interp, py_ver, host_os, bitness, os_ver, compiler
|
||||
interp, py_ver, host_os, BITNESS, os_ver, compiler
|
||||
)
|
||||
|
||||
|
||||
def _sqlite_ver() -> str:
|
||||
assert sqlite3 # type: ignore
|
||||
try:
|
||||
co = sqlite3.connect(":memory:")
|
||||
cur = co.cursor()
|
||||
@@ -362,14 +423,32 @@ try:
|
||||
except:
|
||||
PYFTPD_VER = "(None)"
|
||||
|
||||
try:
|
||||
from partftpy.__init__ import __version__ as PARTFTPY_VER
|
||||
except:
|
||||
PARTFTPY_VER = "(None)"
|
||||
|
||||
VERSIONS = "copyparty v{} ({})\n{}\n sqlite v{} | jinja v{} | pyftpd v{}".format(
|
||||
S_VERSION, S_BUILD_DT, py_desc(), SQLITE_VER, JINJA_VER, PYFTPD_VER
|
||||
|
||||
PY_DESC = py_desc()
|
||||
|
||||
VERSIONS = (
|
||||
"copyparty v{} ({})\n{}\n sqlite {} | jinja {} | pyftpd {} | tftp {}".format(
|
||||
S_VERSION, S_BUILD_DT, PY_DESC, SQLITE_VER, JINJA_VER, PYFTPD_VER, PARTFTPY_VER
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
_: Any = (mp, BytesIO, quote, unquote, SQLITE_VER, JINJA_VER, PYFTPD_VER)
|
||||
__all__ = ["mp", "BytesIO", "quote", "unquote", "SQLITE_VER", "JINJA_VER", "PYFTPD_VER"]
|
||||
_: Any = (mp, BytesIO, quote, unquote, SQLITE_VER, JINJA_VER, PYFTPD_VER, PARTFTPY_VER)
|
||||
__all__ = [
|
||||
"mp",
|
||||
"BytesIO",
|
||||
"quote",
|
||||
"unquote",
|
||||
"SQLITE_VER",
|
||||
"JINJA_VER",
|
||||
"PYFTPD_VER",
|
||||
"PARTFTPY_VER",
|
||||
]
|
||||
|
||||
|
||||
class Daemon(threading.Thread):
|
||||
@@ -473,25 +552,33 @@ class HLog(logging.Handler):
|
||||
elif record.name.startswith("impacket"):
|
||||
if self.ptn_smb_ign.match(msg):
|
||||
return
|
||||
elif record.name.startswith("partftpy."):
|
||||
record.name = record.name[9:]
|
||||
|
||||
self.log_func(record.name[-21:], msg, c)
|
||||
|
||||
|
||||
class NetMap(object):
|
||||
def __init__(self, ips: list[str], netdevs: dict[str, Netdev]) -> None:
|
||||
def __init__(self, ips: list[str], cidrs: list[str], keep_lo=False) -> None:
|
||||
"""
|
||||
ips: list of plain ipv4/ipv6 IPs, not cidr
|
||||
cidrs: list of cidr-notation IPs (ip/prefix)
|
||||
"""
|
||||
if "::" in ips:
|
||||
ips = [x for x in ips if x != "::"] + list(
|
||||
[x.split("/")[0] for x in netdevs if ":" in x]
|
||||
[x.split("/")[0] for x in cidrs if ":" in x]
|
||||
)
|
||||
ips.append("0.0.0.0")
|
||||
|
||||
if "0.0.0.0" in ips:
|
||||
ips = [x for x in ips if x != "0.0.0.0"] + list(
|
||||
[x.split("/")[0] for x in netdevs if ":" not in x]
|
||||
[x.split("/")[0] for x in cidrs if ":" not in x]
|
||||
)
|
||||
|
||||
ips = [x for x in ips if x not in ("::1", "127.0.0.1")]
|
||||
ips = find_prefix(ips, netdevs)
|
||||
if not keep_lo:
|
||||
ips = [x for x in ips if x not in ("::1", "127.0.0.1")]
|
||||
|
||||
ips = find_prefix(ips, cidrs)
|
||||
|
||||
self.cache: dict[str, str] = {}
|
||||
self.b2sip: dict[bytes, str] = {}
|
||||
@@ -508,6 +595,9 @@ class NetMap(object):
|
||||
self.bip.sort(reverse=True)
|
||||
|
||||
def map(self, ip: str) -> str:
|
||||
if ip.startswith("::ffff:"):
|
||||
ip = ip[7:]
|
||||
|
||||
try:
|
||||
return self.cache[ip]
|
||||
except:
|
||||
@@ -569,9 +659,14 @@ class _Unrecv(object):
|
||||
while nbytes > len(ret):
|
||||
ret += self.recv(nbytes - len(ret))
|
||||
except OSError:
|
||||
t = "client only sent {} of {} expected bytes".format(len(ret), nbytes)
|
||||
if len(ret) <= 16:
|
||||
t += "; got {!r}".format(ret)
|
||||
t = "client stopped sending data; expected at least %d more bytes"
|
||||
if not ret:
|
||||
t = t % (nbytes,)
|
||||
else:
|
||||
t += ", only got %d"
|
||||
t = t % (nbytes, len(ret))
|
||||
if len(ret) <= 16:
|
||||
t += "; %r" % (ret,)
|
||||
|
||||
if raise_on_trunc:
|
||||
raise UnrecvEOF(5, t)
|
||||
@@ -721,16 +816,20 @@ class ProgressPrinter(threading.Thread):
|
||||
periodically print progress info without linefeeds
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
def __init__(self, log: "NamedLogger", args: argparse.Namespace) -> None:
|
||||
threading.Thread.__init__(self, name="pp")
|
||||
self.daemon = True
|
||||
self.log = log
|
||||
self.args = args
|
||||
self.msg = ""
|
||||
self.end = False
|
||||
self.n = -1
|
||||
self.start()
|
||||
|
||||
def run(self) -> None:
|
||||
tp = 0
|
||||
msg = None
|
||||
no_stdout = self.args.q
|
||||
fmt = " {}\033[K\r" if VT100 else " {} $\r"
|
||||
while not self.end:
|
||||
time.sleep(0.1)
|
||||
@@ -738,10 +837,21 @@ class ProgressPrinter(threading.Thread):
|
||||
continue
|
||||
|
||||
msg = self.msg
|
||||
now = time.time()
|
||||
if msg and now - tp > 10:
|
||||
tp = now
|
||||
self.log("progress: %s" % (msg,), 6)
|
||||
|
||||
if no_stdout:
|
||||
continue
|
||||
|
||||
uprint(fmt.format(msg))
|
||||
if PY2:
|
||||
sys.stdout.flush()
|
||||
|
||||
if no_stdout:
|
||||
return
|
||||
|
||||
if VT100:
|
||||
print("\033[K", end="")
|
||||
elif msg:
|
||||
@@ -795,7 +905,7 @@ class MTHash(object):
|
||||
ex = ex or str(qe)
|
||||
|
||||
if pp:
|
||||
mb = int((fsz - nch * chunksz) / 1024 / 1024)
|
||||
mb = (fsz - nch * chunksz) // (1024 * 1024)
|
||||
pp.msg = prefix + str(mb) + suffix
|
||||
|
||||
if ex:
|
||||
@@ -925,7 +1035,8 @@ class Magician(object):
|
||||
class Garda(object):
|
||||
"""ban clients for repeated offenses"""
|
||||
|
||||
def __init__(self, cfg: str) -> None:
|
||||
def __init__(self, cfg: str, uniq: bool = True) -> None:
|
||||
self.uniq = uniq
|
||||
try:
|
||||
a, b, c = cfg.strip().split(",")
|
||||
self.lim = int(a)
|
||||
@@ -971,7 +1082,7 @@ class Garda(object):
|
||||
# assume /64 clients; drop 4 groups
|
||||
ip = IPv6Address(ip).exploded[:-20]
|
||||
|
||||
if prev:
|
||||
if prev and self.uniq:
|
||||
if self.prev.get(ip) == prev:
|
||||
return 0, ip
|
||||
|
||||
@@ -1016,7 +1127,18 @@ def uprint(msg: str) -> None:
|
||||
|
||||
|
||||
def nuprint(msg: str) -> None:
|
||||
uprint("{}\n".format(msg))
|
||||
uprint("%s\n" % (msg,))
|
||||
|
||||
|
||||
def dedent(txt: str) -> str:
|
||||
pad = 64
|
||||
lns = txt.replace("\r", "").split("\n")
|
||||
for ln in lns:
|
||||
zs = ln.lstrip()
|
||||
pad2 = len(ln) - len(zs)
|
||||
if zs and pad > pad2:
|
||||
pad = pad2
|
||||
return "\n".join([ln[pad:] for ln in lns])
|
||||
|
||||
|
||||
def rice_tid() -> str:
|
||||
@@ -1028,10 +1150,10 @@ def rice_tid() -> str:
|
||||
def trace(*args: Any, **kwargs: Any) -> None:
|
||||
t = time.time()
|
||||
stack = "".join(
|
||||
"\033[36m{}\033[33m{}".format(x[0].split(os.sep)[-1][:-3], x[1])
|
||||
"\033[36m%s\033[33m%s" % (x[0].split(os.sep)[-1][:-3], x[1])
|
||||
for x in traceback.extract_stack()[3:-1]
|
||||
)
|
||||
parts = ["{:.6f}".format(t), rice_tid(), stack]
|
||||
parts = ["%.6f" % (t,), rice_tid(), stack]
|
||||
|
||||
if args:
|
||||
parts.append(repr(args))
|
||||
@@ -1048,17 +1170,17 @@ def alltrace() -> str:
|
||||
threads: dict[str, types.FrameType] = {}
|
||||
names = dict([(t.ident, t.name) for t in threading.enumerate()])
|
||||
for tid, stack in sys._current_frames().items():
|
||||
name = "{} ({:x})".format(names.get(tid), tid)
|
||||
name = "%s (%x)" % (names.get(tid), tid)
|
||||
threads[name] = stack
|
||||
|
||||
rret: list[str] = []
|
||||
bret: list[str] = []
|
||||
for name, stack in sorted(threads.items()):
|
||||
ret = ["\n\n# {}".format(name)]
|
||||
ret = ["\n\n# %s" % (name,)]
|
||||
pad = None
|
||||
for fn, lno, name, line in traceback.extract_stack(stack):
|
||||
fn = os.sep.join(fn.split(os.sep)[-3:])
|
||||
ret.append('File: "{}", line {}, in {}'.format(fn, lno, name))
|
||||
ret.append('File: "%s", line %d, in %s' % (fn, lno, name))
|
||||
if line:
|
||||
ret.append(" " + str(line.strip()))
|
||||
if "self.not_empty.wait()" in line:
|
||||
@@ -1067,7 +1189,7 @@ def alltrace() -> str:
|
||||
if pad:
|
||||
bret += [ret[0]] + [pad + x for x in ret[1:]]
|
||||
else:
|
||||
rret += ret
|
||||
rret.extend(ret)
|
||||
|
||||
return "\n".join(rret + bret) + "\n"
|
||||
|
||||
@@ -1104,7 +1226,7 @@ def stackmon(fp: str, ival: float, suffix: str) -> None:
|
||||
buf = lzma.compress(buf, preset=0)
|
||||
|
||||
if "%" in fp:
|
||||
dt = datetime.utcnow()
|
||||
dt = datetime.now(UTC)
|
||||
for fs in "YmdHMS":
|
||||
fs = "%" + fs
|
||||
if fs in fp:
|
||||
@@ -1150,12 +1272,20 @@ def log_thrs(log: Callable[[str, str, int], None], ival: float, name: str) -> No
|
||||
|
||||
|
||||
def vol_san(vols: list["VFS"], txt: bytes) -> bytes:
|
||||
txt0 = txt
|
||||
for vol in vols:
|
||||
txt = txt.replace(vol.realpath.encode("utf-8"), vol.vpath.encode("utf-8"))
|
||||
txt = txt.replace(
|
||||
vol.realpath.encode("utf-8").replace(b"\\", b"\\\\"),
|
||||
vol.vpath.encode("utf-8"),
|
||||
)
|
||||
bap = vol.realpath.encode("utf-8")
|
||||
bhp = vol.histpath.encode("utf-8")
|
||||
bvp = vol.vpath.encode("utf-8")
|
||||
bvph = b"$hist(/" + bvp + b")"
|
||||
|
||||
txt = txt.replace(bap, bvp)
|
||||
txt = txt.replace(bhp, bvph)
|
||||
txt = txt.replace(bap.replace(b"\\", b"\\\\"), bvp)
|
||||
txt = txt.replace(bhp.replace(b"\\", b"\\\\"), bvph)
|
||||
|
||||
if txt != txt0:
|
||||
txt += b"\r\nNOTE: filepaths sanitized; see serverlog for correct values"
|
||||
|
||||
return txt
|
||||
|
||||
@@ -1163,9 +1293,9 @@ def vol_san(vols: list["VFS"], txt: bytes) -> bytes:
|
||||
def min_ex(max_lines: int = 8, reverse: bool = False) -> str:
|
||||
et, ev, tb = sys.exc_info()
|
||||
stb = traceback.extract_tb(tb)
|
||||
fmt = "{} @ {} <{}>: {}"
|
||||
ex = [fmt.format(fp.split(os.sep)[-1], ln, fun, txt) for fp, ln, fun, txt in stb]
|
||||
ex.append("[{}] {}".format(et.__name__ if et else "(anonymous)", ev))
|
||||
fmt = "%s @ %d <%s>: %s"
|
||||
ex = [fmt % (fp.split(os.sep)[-1], ln, fun, txt) for fp, ln, fun, txt in stb]
|
||||
ex.append("[%s] %s" % (et.__name__ if et else "(anonymous)", ev))
|
||||
return "\n".join(ex[-max_lines:][:: -1 if reverse else 1])
|
||||
|
||||
|
||||
@@ -1216,7 +1346,7 @@ def ren_open(
|
||||
with fun(fsenc(fpath), *args, **kwargs) as f:
|
||||
if b64:
|
||||
assert fdir
|
||||
fp2 = "fn-trunc.{}.txt".format(b64)
|
||||
fp2 = "fn-trunc.%s.txt" % (b64,)
|
||||
fp2 = os.path.join(fdir, fp2)
|
||||
with open(fsenc(fp2), "wb") as f2:
|
||||
f2.write(orig_name.encode("utf-8"))
|
||||
@@ -1227,12 +1357,15 @@ def ren_open(
|
||||
except OSError as ex_:
|
||||
ex = ex_
|
||||
|
||||
if ex.errno == errno.EINVAL and not asciified:
|
||||
# EPERM: android13
|
||||
if ex.errno in (errno.EINVAL, errno.EPERM) and not asciified:
|
||||
asciified = True
|
||||
bname, fname = [
|
||||
zs.encode("ascii", "replace").decode("ascii").replace("?", "_")
|
||||
for zs in [bname, fname]
|
||||
]
|
||||
zsl = []
|
||||
for zs in (bname, fname):
|
||||
zs = zs.encode("ascii", "replace").decode("ascii")
|
||||
zs = re.sub(r"[^][a-zA-Z0-9(){}.,+=!-]", "_", zs)
|
||||
zsl.append(zs)
|
||||
bname, fname = zsl
|
||||
continue
|
||||
|
||||
# ENOTSUP: zfs on ubuntu 20.04
|
||||
@@ -1242,7 +1375,7 @@ def ren_open(
|
||||
raise
|
||||
|
||||
if not b64:
|
||||
zs = "{}\n{}".format(orig_name, suffix).encode("utf-8", "replace")
|
||||
zs = ("%s\n%s" % (orig_name, suffix)).encode("utf-8", "replace")
|
||||
zs = hashlib.sha512(zs).digest()[:12]
|
||||
b64 = base64.urlsafe_b64encode(zs).decode("utf-8")
|
||||
|
||||
@@ -1262,7 +1395,7 @@ def ren_open(
|
||||
# okay do the first letter then
|
||||
ext = "." + ext[2:]
|
||||
|
||||
fname = "{}~{}{}".format(bname, b64, ext)
|
||||
fname = "%s~%s%s" % (bname, b64, ext)
|
||||
|
||||
|
||||
class MultipartParser(object):
|
||||
@@ -1443,20 +1576,25 @@ class MultipartParser(object):
|
||||
for buf in iterable:
|
||||
ret += buf
|
||||
if len(ret) > max_len:
|
||||
raise Pebkac(400, "field length is too long")
|
||||
raise Pebkac(422, "field length is too long")
|
||||
|
||||
return ret
|
||||
|
||||
def parse(self) -> None:
|
||||
boundary = get_boundary(self.headers)
|
||||
self.log("boundary=%r" % (boundary,))
|
||||
|
||||
# spec says there might be junk before the first boundary,
|
||||
# can't have the leading \r\n if that's not the case
|
||||
self.boundary = b"--" + get_boundary(self.headers).encode("utf-8")
|
||||
self.boundary = b"--" + boundary.encode("utf-8")
|
||||
|
||||
# discard junk before the first boundary
|
||||
for junk in self._read_data():
|
||||
self.log(
|
||||
"discarding preamble: [{}]".format(junk.decode("utf-8", "replace"))
|
||||
)
|
||||
if not junk:
|
||||
continue
|
||||
|
||||
jtxt = junk.decode("utf-8", "replace")
|
||||
self.log("discarding preamble |%d| %r" % (len(junk), jtxt))
|
||||
|
||||
# nice, now make it fast
|
||||
self.boundary = b"\r\n" + self.boundary
|
||||
@@ -1468,11 +1606,9 @@ class MultipartParser(object):
|
||||
raises if the field name is not as expected
|
||||
"""
|
||||
assert self.gen
|
||||
p_field, _, p_data = next(self.gen)
|
||||
p_field, p_fname, p_data = next(self.gen)
|
||||
if p_field != field_name:
|
||||
raise Pebkac(
|
||||
422, 'expected field "{}", got "{}"'.format(field_name, p_field)
|
||||
)
|
||||
raise WrongPostKey(field_name, p_field, p_fname, p_data)
|
||||
|
||||
return self._read_value(p_data, max_len).decode("utf-8", "surrogateescape")
|
||||
|
||||
@@ -1511,8 +1647,8 @@ def read_header(sr: Unrecv, t_idle: int, t_tot: int) -> list[str]:
|
||||
|
||||
raise Pebkac(
|
||||
400,
|
||||
"protocol error while reading headers:\n"
|
||||
+ ret.decode("utf-8", "replace"),
|
||||
"protocol error while reading headers",
|
||||
log=ret.decode("utf-8", "replace"),
|
||||
)
|
||||
|
||||
ofs = ret.find(b"\r\n\r\n")
|
||||
@@ -1541,7 +1677,7 @@ def rand_name(fdir: str, fn: str, rnd: int) -> str:
|
||||
break
|
||||
|
||||
nc = rnd + extra
|
||||
nb = int((6 + 6 * nc) / 8)
|
||||
nb = (6 + 6 * nc) // 8
|
||||
zb = os.urandom(nb)
|
||||
zb = base64.urlsafe_b64encode(zb)
|
||||
fn = zb[:nc].decode("utf-8") + ext
|
||||
@@ -1550,15 +1686,18 @@ def rand_name(fdir: str, fn: str, rnd: int) -> str:
|
||||
return fn
|
||||
|
||||
|
||||
def gen_filekey(salt: str, fspath: str, fsize: int, inode: int) -> str:
|
||||
return base64.urlsafe_b64encode(
|
||||
hashlib.sha512(
|
||||
("%s %s %s %s" % (salt, fspath, fsize, inode)).encode("utf-8", "replace")
|
||||
).digest()
|
||||
).decode("ascii")
|
||||
def gen_filekey(alg: int, salt: str, fspath: str, fsize: int, inode: int) -> str:
|
||||
if alg == 1:
|
||||
zs = "%s %s %s %s" % (salt, fspath, fsize, inode)
|
||||
else:
|
||||
zs = "%s %s" % (salt, fspath)
|
||||
|
||||
zb = zs.encode("utf-8", "replace")
|
||||
return base64.urlsafe_b64encode(hashlib.sha512(zb).digest()).decode("ascii")
|
||||
|
||||
|
||||
def gen_filekey_dbg(
|
||||
alg: int,
|
||||
salt: str,
|
||||
fspath: str,
|
||||
fsize: int,
|
||||
@@ -1566,7 +1705,7 @@ def gen_filekey_dbg(
|
||||
log: "NamedLogger",
|
||||
log_ptn: Optional[Pattern[str]],
|
||||
) -> str:
|
||||
ret = gen_filekey(salt, fspath, fsize, inode)
|
||||
ret = gen_filekey(alg, salt, fspath, fsize, inode)
|
||||
|
||||
assert log_ptn
|
||||
if log_ptn.search(fspath):
|
||||
@@ -1592,16 +1731,15 @@ def gen_filekey_dbg(
|
||||
return ret
|
||||
|
||||
|
||||
def gencookie(k: str, v: str, r: str, tls: bool, dur: Optional[int]) -> str:
|
||||
def gencookie(k: str, v: str, r: str, tls: bool, dur: int = 0, txt: str = "") -> str:
|
||||
v = v.replace("%", "%25").replace(";", "%3B")
|
||||
if dur:
|
||||
exp = formatdate(time.time() + dur, usegmt=True)
|
||||
else:
|
||||
exp = "Fri, 15 Aug 1997 01:00:00 GMT"
|
||||
|
||||
return "{}={}; Path=/{}; Expires={}{}; SameSite=Lax".format(
|
||||
k, v, r, exp, "; Secure" if tls else ""
|
||||
)
|
||||
t = "%s=%s; Path=/%s; Expires=%s%s%s; SameSite=Lax"
|
||||
return t % (k, v, r, exp, "; Secure" if tls else "", txt)
|
||||
|
||||
|
||||
def humansize(sz: float, terse: bool = False) -> str:
|
||||
@@ -1639,10 +1777,10 @@ def get_spd(nbyte: int, t0: float, t: Optional[float] = None) -> str:
|
||||
if t is None:
|
||||
t = time.time()
|
||||
|
||||
bps = nbyte / ((t - t0) + 0.001)
|
||||
bps = nbyte / ((t - t0) or 0.001)
|
||||
s1 = humansize(nbyte).replace(" ", "\033[33m").replace("iB", "")
|
||||
s2 = humansize(bps).replace(" ", "\033[35m").replace("iB", "")
|
||||
return "{} \033[0m{}/s\033[0m".format(s1, s2)
|
||||
return "%s \033[0m%s/s\033[0m" % (s1, s2)
|
||||
|
||||
|
||||
def s2hms(s: float, optional_h: bool = False) -> str:
|
||||
@@ -1650,9 +1788,9 @@ def s2hms(s: float, optional_h: bool = False) -> str:
|
||||
h, s = divmod(s, 3600)
|
||||
m, s = divmod(s, 60)
|
||||
if not h and optional_h:
|
||||
return "{}:{:02}".format(m, s)
|
||||
return "%d:%02d" % (m, s)
|
||||
|
||||
return "{}:{:02}:{:02}".format(h, m, s)
|
||||
return "%d:%02d:%02d" % (h, m, s)
|
||||
|
||||
|
||||
def djoin(*paths: str) -> str:
|
||||
@@ -1718,7 +1856,16 @@ def sanitize_fn(fn: str, ok: str, bad: list[str]) -> str:
|
||||
return fn.strip()
|
||||
|
||||
|
||||
def sanitize_vpath(vp: str, ok: str, bad: list[str]) -> str:
|
||||
parts = vp.replace(os.sep, "/").split("/")
|
||||
ret = [sanitize_fn(x, ok, bad) for x in parts]
|
||||
return "/".join(ret)
|
||||
|
||||
|
||||
def relchk(rp: str) -> str:
|
||||
if "\x00" in rp:
|
||||
return "[nul]"
|
||||
|
||||
if ANYWIN:
|
||||
if "\n" in rp or "\r" in rp:
|
||||
return "x\nx"
|
||||
@@ -1754,6 +1901,26 @@ def exclude_dotfiles(filepaths: list[str]) -> list[str]:
|
||||
return [x for x in filepaths if not x.split("/")[-1].startswith(".")]
|
||||
|
||||
|
||||
def odfusion(
|
||||
base: Union[ODict[str, bool], ODict["LiteralString", bool]], oth: str
|
||||
) -> ODict[str, bool]:
|
||||
# merge an "ordered set" (just a dict really) with another list of keys
|
||||
words0 = [x for x in oth.split(",") if x]
|
||||
words1 = [x for x in oth[1:].split(",") if x]
|
||||
|
||||
ret = base.copy()
|
||||
if oth.startswith("+"):
|
||||
for k in words1:
|
||||
ret[k] = True
|
||||
elif oth[:1] in ("-", "/"):
|
||||
for k in words1:
|
||||
ret.pop(k, None)
|
||||
else:
|
||||
ret = ODict.fromkeys(words0, True)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def ipnorm(ip: str) -> str:
|
||||
if ":" in ip:
|
||||
# assume /64 clients; drop 4 groups
|
||||
@@ -1762,10 +1929,10 @@ def ipnorm(ip: str) -> str:
|
||||
return ip
|
||||
|
||||
|
||||
def find_prefix(ips: list[str], netdevs: dict[str, Netdev]) -> list[str]:
|
||||
def find_prefix(ips: list[str], cidrs: list[str]) -> list[str]:
|
||||
ret = []
|
||||
for ip in ips:
|
||||
hit = next((x for x in netdevs if x.startswith(ip + "/")), None)
|
||||
hit = next((x for x in cidrs if x.startswith(ip + "/") or ip == x), None)
|
||||
if hit:
|
||||
ret.append(hit)
|
||||
return ret
|
||||
@@ -1906,10 +2073,10 @@ else:
|
||||
# moonrunes become \x3f with bytestrings,
|
||||
# losing mojibake support is worth
|
||||
def _not_actually_mbcs_enc(txt: str) -> bytes:
|
||||
return txt
|
||||
return txt # type: ignore
|
||||
|
||||
def _not_actually_mbcs_dec(txt: bytes) -> str:
|
||||
return txt
|
||||
return txt # type: ignore
|
||||
|
||||
fsenc = afsenc = sfsenc = _not_actually_mbcs_enc
|
||||
fsdec = _not_actually_mbcs_dec
|
||||
@@ -1965,9 +2132,51 @@ def atomic_move(usrc: str, udst: str) -> None:
|
||||
os.rename(src, dst)
|
||||
|
||||
|
||||
def wunlink(log: "NamedLogger", abspath: str, flags: dict[str, Any]) -> bool:
|
||||
maxtime = flags.get("rm_re_t", 0.0)
|
||||
bpath = fsenc(abspath)
|
||||
if not maxtime:
|
||||
os.unlink(bpath)
|
||||
return True
|
||||
|
||||
chill = flags.get("rm_re_r", 0.0)
|
||||
if chill < 0.001:
|
||||
chill = 0.1
|
||||
|
||||
ino = 0
|
||||
t0 = now = time.time()
|
||||
for attempt in range(90210):
|
||||
try:
|
||||
if ino and os.stat(bpath).st_ino != ino:
|
||||
log("inode changed; aborting delete")
|
||||
return False
|
||||
os.unlink(bpath)
|
||||
if attempt:
|
||||
now = time.time()
|
||||
t = "deleted in %.2f sec, attempt %d"
|
||||
log(t % (now - t0, attempt + 1))
|
||||
return True
|
||||
except OSError as ex:
|
||||
now = time.time()
|
||||
if ex.errno == errno.ENOENT:
|
||||
return False
|
||||
if now - t0 > maxtime or attempt == 90209:
|
||||
raise
|
||||
if not attempt:
|
||||
if not PY2:
|
||||
ino = os.stat(bpath).st_ino
|
||||
t = "delete failed (err.%d); retrying for %d sec: %s"
|
||||
log(t % (ex.errno, maxtime + 0.99, abspath))
|
||||
|
||||
time.sleep(chill)
|
||||
|
||||
return False # makes pylance happy
|
||||
|
||||
|
||||
def get_df(abspath: str) -> tuple[Optional[int], Optional[int]]:
|
||||
try:
|
||||
# some fuses misbehave
|
||||
assert ctypes
|
||||
if ANYWIN:
|
||||
bfree = ctypes.c_ulonglong(0)
|
||||
ctypes.windll.kernel32.GetDiskFreeSpaceExW( # type: ignore
|
||||
@@ -2092,7 +2301,7 @@ def read_socket_chunked(
|
||||
raise Pebkac(400, t.format(x))
|
||||
|
||||
if log:
|
||||
log("receiving {} byte chunk".format(chunklen))
|
||||
log("receiving %d byte chunk" % (chunklen,))
|
||||
|
||||
for chunk in read_socket(sr, chunklen):
|
||||
yield chunk
|
||||
@@ -2117,10 +2326,45 @@ def list_ips() -> list[str]:
|
||||
return list(ret)
|
||||
|
||||
|
||||
def build_netmap(csv: str):
|
||||
csv = csv.lower().strip()
|
||||
|
||||
if csv in ("any", "all", "no", ",", ""):
|
||||
return None
|
||||
|
||||
if csv in ("lan", "local", "private", "prvt"):
|
||||
csv = "10.0.0.0/8, 172.16.0.0/12, 192.168.0.0/16, fd00::/8" # lan
|
||||
csv += ", 169.254.0.0/16, fe80::/10" # link-local
|
||||
csv += ", 127.0.0.0/8, ::1/128" # loopback
|
||||
|
||||
srcs = [x.strip() for x in csv.split(",") if x.strip()]
|
||||
cidrs = []
|
||||
for zs in srcs:
|
||||
if not zs.endswith("."):
|
||||
cidrs.append(zs)
|
||||
continue
|
||||
|
||||
# translate old syntax "172.19." => "172.19.0.0/16"
|
||||
words = len(zs.rstrip(".").split("."))
|
||||
if words == 1:
|
||||
zs += "0.0.0/8"
|
||||
elif words == 2:
|
||||
zs += "0.0/16"
|
||||
elif words == 3:
|
||||
zs += "0/24"
|
||||
else:
|
||||
raise Exception("invalid config value [%s]" % (zs,))
|
||||
|
||||
cidrs.append(zs)
|
||||
|
||||
ips = [x.split("/")[0] for x in cidrs]
|
||||
return NetMap(ips, cidrs, True)
|
||||
|
||||
|
||||
def yieldfile(fn: str) -> Generator[bytes, None, None]:
|
||||
with open(fsenc(fn), "rb", 512 * 1024) as f:
|
||||
while True:
|
||||
buf = f.read(64 * 1024)
|
||||
buf = f.read(128 * 1024)
|
||||
if not buf:
|
||||
break
|
||||
|
||||
@@ -2264,6 +2508,12 @@ def statdir(
|
||||
print(t)
|
||||
|
||||
|
||||
def dir_is_empty(logger: "RootLogger", scandir: bool, top: str):
|
||||
for _ in statdir(logger, scandir, False, top):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def rmdirs(
|
||||
logger: "RootLogger", scandir: bool, lstat: bool, top: str, depth: int
|
||||
) -> tuple[list[str], list[str]]:
|
||||
@@ -2370,6 +2620,7 @@ def getalive(pids: list[int], pgid: int) -> list[int]:
|
||||
alive.append(pid)
|
||||
else:
|
||||
# windows doesn't have pgroups; assume
|
||||
assert psutil
|
||||
psutil.Process(pid)
|
||||
alive.append(pid)
|
||||
except:
|
||||
@@ -2387,6 +2638,7 @@ def killtree(root: int) -> None:
|
||||
pgid = 0
|
||||
|
||||
if HAVE_PSUTIL:
|
||||
assert psutil
|
||||
pids = [root]
|
||||
parent = psutil.Process(root)
|
||||
for child in parent.children(recursive=True):
|
||||
@@ -2426,9 +2678,35 @@ def killtree(root: int) -> None:
|
||||
pass
|
||||
|
||||
|
||||
def _find_nice() -> str:
|
||||
if WINDOWS:
|
||||
return "" # use creationflags
|
||||
|
||||
try:
|
||||
zs = shutil.which("nice")
|
||||
if zs:
|
||||
return zs
|
||||
except:
|
||||
pass
|
||||
|
||||
# busted PATHs and/or py2
|
||||
for zs in ("/bin", "/sbin", "/usr/bin", "/usr/sbin"):
|
||||
zs += "/nice"
|
||||
if os.path.exists(zs):
|
||||
return zs
|
||||
|
||||
return ""
|
||||
|
||||
|
||||
NICES = _find_nice()
|
||||
NICEB = NICES.encode("utf-8")
|
||||
|
||||
|
||||
def runcmd(
|
||||
argv: Union[list[bytes], list[str]], timeout: Optional[int] = None, **ka: Any
|
||||
argv: Union[list[bytes], list[str]], timeout: Optional[float] = None, **ka: Any
|
||||
) -> tuple[int, str, str]:
|
||||
isbytes = isinstance(argv[0], (bytes, bytearray))
|
||||
oom = ka.pop("oom", 0) # 0..1000
|
||||
kill = ka.pop("kill", "t") # [t]ree [m]ain [n]one
|
||||
capture = ka.pop("capture", 3) # 0=none 1=stdout 2=stderr 3=both
|
||||
|
||||
@@ -2441,7 +2719,32 @@ def runcmd(
|
||||
bout: bytes
|
||||
berr: bytes
|
||||
|
||||
if ANYWIN:
|
||||
if isbytes:
|
||||
if argv[0] in CMD_EXEB:
|
||||
argv[0] += b".exe"
|
||||
else:
|
||||
if argv[0] in CMD_EXES:
|
||||
argv[0] += ".exe"
|
||||
|
||||
if ka.pop("nice", None):
|
||||
if WINDOWS:
|
||||
ka["creationflags"] = 0x4000
|
||||
elif NICEB:
|
||||
if isbytes:
|
||||
argv = [NICEB] + argv
|
||||
else:
|
||||
argv = [NICES] + argv
|
||||
|
||||
p = sp.Popen(argv, stdout=cout, stderr=cerr, **ka)
|
||||
|
||||
if oom and not ANYWIN and not MACOS:
|
||||
try:
|
||||
with open("/proc/%d/oom_score_adj" % (p.pid,), "wb") as f:
|
||||
f.write(("%d\n" % (oom,)).encode("utf-8"))
|
||||
except:
|
||||
pass
|
||||
|
||||
if not timeout or PY2:
|
||||
bout, berr = p.communicate(sin)
|
||||
else:
|
||||
@@ -2480,7 +2783,7 @@ def chkcmd(argv: Union[list[bytes], list[str]], **ka: Any) -> tuple[str, str]:
|
||||
return sout, serr
|
||||
|
||||
|
||||
def mchkcmd(argv: Union[list[bytes], list[str]], timeout: int = 10) -> None:
|
||||
def mchkcmd(argv: Union[list[bytes], list[str]], timeout: float = 10) -> None:
|
||||
if PY2:
|
||||
with open(os.devnull, "wb") as f:
|
||||
rv = sp.call(argv, stdout=f, stderr=f)
|
||||
@@ -2588,13 +2891,14 @@ def _parsehook(
|
||||
|
||||
sp_ka = {
|
||||
"env": env,
|
||||
"nice": True,
|
||||
"oom": 300,
|
||||
"timeout": tout,
|
||||
"kill": kill,
|
||||
"capture": cap,
|
||||
}
|
||||
|
||||
if cmd.startswith("~"):
|
||||
cmd = os.path.expanduser(cmd)
|
||||
cmd = os.path.expandvars(os.path.expanduser(cmd))
|
||||
|
||||
return chk, fork, jtxt, wait, sp_ka, cmd
|
||||
|
||||
@@ -2733,9 +3037,7 @@ def loadpy(ap: str, hot: bool) -> Any:
|
||||
depending on what other inconveniently named files happen
|
||||
to be in the same folder
|
||||
"""
|
||||
if ap.startswith("~"):
|
||||
ap = os.path.expanduser(ap)
|
||||
|
||||
ap = os.path.expandvars(os.path.expanduser(ap))
|
||||
mdir, mfile = os.path.split(absreal(ap))
|
||||
mname = mfile.rsplit(".", 1)[0]
|
||||
sys.path.insert(0, mdir)
|
||||
@@ -2743,7 +3045,7 @@ def loadpy(ap: str, hot: bool) -> Any:
|
||||
if PY2:
|
||||
mod = __import__(mname)
|
||||
if hot:
|
||||
reload(mod)
|
||||
reload(mod) # type: ignore
|
||||
else:
|
||||
import importlib
|
||||
|
||||
@@ -2807,7 +3109,7 @@ def visual_length(txt: str) -> int:
|
||||
pend = None
|
||||
else:
|
||||
if ch == "\033":
|
||||
pend = "{0}".format(ch)
|
||||
pend = "%s" % (ch,)
|
||||
else:
|
||||
co = ord(ch)
|
||||
# the safe parts of latin1 and cp437 (no greek stuff)
|
||||
@@ -2886,6 +3188,7 @@ def termsize() -> tuple[int, int]:
|
||||
def hidedir(dp) -> None:
|
||||
if ANYWIN:
|
||||
try:
|
||||
assert ctypes
|
||||
k32 = ctypes.WinDLL("kernel32")
|
||||
attrs = k32.GetFileAttributesW(dp)
|
||||
if attrs >= 0:
|
||||
@@ -2895,9 +3198,29 @@ def hidedir(dp) -> None:
|
||||
|
||||
|
||||
class Pebkac(Exception):
|
||||
def __init__(self, code: int, msg: Optional[str] = None) -> None:
|
||||
def __init__(
|
||||
self, code: int, msg: Optional[str] = None, log: Optional[str] = None
|
||||
) -> None:
|
||||
super(Pebkac, self).__init__(msg or HTTPCODE[code])
|
||||
self.code = code
|
||||
self.log = log
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "Pebkac({}, {})".format(self.code, repr(self.args))
|
||||
|
||||
|
||||
class WrongPostKey(Pebkac):
|
||||
def __init__(
|
||||
self,
|
||||
expected: str,
|
||||
got: str,
|
||||
fname: Optional[str],
|
||||
datagen: Generator[bytes, None, None],
|
||||
) -> None:
|
||||
msg = 'expected field "{}", got "{}"'.format(expected, got)
|
||||
super(WrongPostKey, self).__init__(422, msg)
|
||||
|
||||
self.expected = expected
|
||||
self.got = got
|
||||
self.fname = fname
|
||||
self.datagen = datagen
|
||||
|
||||
@@ -17,8 +17,10 @@ window.baguetteBox = (function () {
|
||||
titleTag: false,
|
||||
async: false,
|
||||
preload: 2,
|
||||
refocus: true,
|
||||
afterShow: null,
|
||||
afterHide: null,
|
||||
duringHide: null,
|
||||
onChange: null,
|
||||
},
|
||||
overlay, slider, btnPrev, btnNext, btnHelp, btnAnim, btnRotL, btnRotR, btnSel, btnFull, btnVmode, btnClose,
|
||||
@@ -127,7 +129,7 @@ window.baguetteBox = (function () {
|
||||
var gallery = [];
|
||||
[].forEach.call(tagsNodeList, function (imageElement, imageIndex) {
|
||||
var imageElementClickHandler = function (e) {
|
||||
if (ctrl(e))
|
||||
if (ctrl(e) || e && e.shiftKey)
|
||||
return true;
|
||||
|
||||
e.preventDefault ? e.preventDefault() : e.returnValue = false;
|
||||
@@ -144,7 +146,7 @@ window.baguetteBox = (function () {
|
||||
selectorData.galleries.push(gallery);
|
||||
});
|
||||
|
||||
return selectorData.galleries;
|
||||
return [selectorData.galleries, options];
|
||||
}
|
||||
|
||||
function clearCachedData() {
|
||||
@@ -255,19 +257,19 @@ window.baguetteBox = (function () {
|
||||
if (anymod(e, true))
|
||||
return;
|
||||
|
||||
var k = e.code + '', v = vid(), pos = -1;
|
||||
var k = (e.code || e.key) + '', v = vid(), pos = -1;
|
||||
|
||||
if (k == "BracketLeft")
|
||||
setloop(1);
|
||||
else if (k == "BracketRight")
|
||||
setloop(2);
|
||||
else if (e.shiftKey)
|
||||
else if (e.shiftKey && k != "KeyR" && k != "R")
|
||||
return;
|
||||
else if (k == "ArrowLeft" || k == "KeyJ")
|
||||
else if (k == "ArrowLeft" || k == "KeyJ" || k == "Left" || k == "j")
|
||||
showPreviousImage();
|
||||
else if (k == "ArrowRight" || k == "KeyL")
|
||||
else if (k == "ArrowRight" || k == "KeyL" || k == "Right" || k == "l")
|
||||
showNextImage();
|
||||
else if (k == "Escape")
|
||||
else if (k == "Escape" || k == "Esc")
|
||||
hideOverlay();
|
||||
else if (k == "Home")
|
||||
showFirstImage(e);
|
||||
@@ -295,9 +297,9 @@ window.baguetteBox = (function () {
|
||||
}
|
||||
else if (k == "KeyF")
|
||||
tglfull();
|
||||
else if (k == "KeyS")
|
||||
else if (k == "KeyS" || k == "s")
|
||||
tglsel();
|
||||
else if (k == "KeyR")
|
||||
else if (k == "KeyR" || k == "r" || k == "R")
|
||||
rotn(e.shiftKey ? -1 : 1);
|
||||
else if (k == "KeyY")
|
||||
dlpic();
|
||||
@@ -310,7 +312,7 @@ window.baguetteBox = (function () {
|
||||
options = {};
|
||||
setOptions(o);
|
||||
if (tt.en)
|
||||
tt.show.bind(this)();
|
||||
tt.show.call(this);
|
||||
}
|
||||
|
||||
function setVmode() {
|
||||
@@ -356,7 +358,7 @@ window.baguetteBox = (function () {
|
||||
|
||||
setVmode();
|
||||
if (tt.en)
|
||||
tt.show.bind(this)();
|
||||
tt.show.call(this);
|
||||
}
|
||||
|
||||
function findfile() {
|
||||
@@ -376,7 +378,12 @@ window.baguetteBox = (function () {
|
||||
else
|
||||
(vid() || ebi('bbox-overlay')).requestFullscreen();
|
||||
}
|
||||
catch (ex) { alert(ex); }
|
||||
catch (ex) {
|
||||
if (IPHONE)
|
||||
alert('sorry, apple decided to make this impossible on iphones (should work on ipad tho)');
|
||||
else
|
||||
alert(ex);
|
||||
}
|
||||
}
|
||||
|
||||
function tglsel() {
|
||||
@@ -519,7 +526,7 @@ window.baguetteBox = (function () {
|
||||
options[item] = newOptions[item];
|
||||
}
|
||||
|
||||
var an = options.animation = sread('ganim') || anims[ANIM ? 0 : 2];
|
||||
var an = options.animation = sread('ganim', anims) || anims[ANIM ? 0 : 2];
|
||||
btnAnim.textContent = ['⇄', '⮺', '⚡'][anims.indexOf(an)];
|
||||
btnAnim.setAttribute('tt', 'animation: ' + an);
|
||||
|
||||
@@ -588,6 +595,9 @@ window.baguetteBox = (function () {
|
||||
if (overlay.style.display === 'none')
|
||||
return;
|
||||
|
||||
if (options.duringHide)
|
||||
options.duringHide();
|
||||
|
||||
sethash('');
|
||||
unbindEvents();
|
||||
try {
|
||||
@@ -608,9 +618,45 @@ window.baguetteBox = (function () {
|
||||
if (options.afterHide)
|
||||
options.afterHide();
|
||||
|
||||
documentLastFocus && documentLastFocus.focus();
|
||||
options.refocus && documentLastFocus && documentLastFocus.focus();
|
||||
isOverlayVisible = false;
|
||||
}, 500);
|
||||
unvid();
|
||||
unfig();
|
||||
}, 250);
|
||||
}
|
||||
|
||||
function unvid(keep) {
|
||||
var vids = QSA('#bbox-overlay video');
|
||||
for (var a = vids.length - 1; a >= 0; a--) {
|
||||
var v = vids[a];
|
||||
if (v == keep)
|
||||
continue;
|
||||
|
||||
v.src = '';
|
||||
v.load();
|
||||
|
||||
var p = v.parentNode;
|
||||
p.removeChild(v);
|
||||
p.parentNode.removeChild(p);
|
||||
}
|
||||
}
|
||||
|
||||
function unfig(keep) {
|
||||
var figs = QSA('#bbox-overlay figure'),
|
||||
npre = options.preload || 0,
|
||||
k = [];
|
||||
|
||||
if (keep === undefined)
|
||||
keep = -9;
|
||||
|
||||
for (var a = keep - npre; a <= keep + npre; a++)
|
||||
k.push('bbox-figure-' + a);
|
||||
|
||||
for (var a = figs.length - 1; a >= 0; a--) {
|
||||
var f = figs[a];
|
||||
if (!has(k, f.getAttribute('id')))
|
||||
f.parentNode.removeChild(f);
|
||||
}
|
||||
}
|
||||
|
||||
function loadImage(index, callback) {
|
||||
@@ -703,6 +749,7 @@ window.baguetteBox = (function () {
|
||||
}
|
||||
|
||||
function show(index, gallery) {
|
||||
gallery = gallery || currentGallery;
|
||||
if (!isOverlayVisible && index >= 0 && index < gallery.length) {
|
||||
prepareOverlay(gallery, options);
|
||||
showOverlay(index);
|
||||
@@ -715,12 +762,10 @@ window.baguetteBox = (function () {
|
||||
if (index >= imagesElements.length)
|
||||
return bounceAnimation('right');
|
||||
|
||||
var v = vid();
|
||||
if (v) {
|
||||
v.src = '';
|
||||
v.load();
|
||||
v.parentNode.removeChild(v);
|
||||
try {
|
||||
vid().pause();
|
||||
}
|
||||
catch (ex) { }
|
||||
|
||||
currentIndex = index;
|
||||
loadImage(currentIndex, function () {
|
||||
@@ -729,6 +774,15 @@ window.baguetteBox = (function () {
|
||||
});
|
||||
updateOffset();
|
||||
|
||||
if (options.animation == 'none')
|
||||
unvid(vid());
|
||||
else
|
||||
setTimeout(function () {
|
||||
unvid(vid());
|
||||
}, 100);
|
||||
|
||||
unfig(index);
|
||||
|
||||
if (options.onChange)
|
||||
options.onChange(currentIndex, imagesElements.length);
|
||||
|
||||
@@ -870,7 +924,7 @@ window.baguetteBox = (function () {
|
||||
|
||||
if (loopB !== null) {
|
||||
timer.add(loopchk);
|
||||
sethash(window.location.hash.slice(1).split('&')[0] + '&t=' + (loopA || 0) + '-' + loopB);
|
||||
sethash(location.hash.slice(1).split('&')[0] + '&t=' + (loopA || 0) + '-' + loopB);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -968,7 +1022,7 @@ window.baguetteBox = (function () {
|
||||
clmod(btnPrev, 'off', 't');
|
||||
clmod(btnNext, 'off', 't');
|
||||
|
||||
if (Date.now() - ctime <= 500)
|
||||
if (Date.now() - ctime <= 500 && !IPHONE)
|
||||
tglfull();
|
||||
|
||||
ctime = Date.now();
|
||||
|
||||
@@ -493,6 +493,8 @@ html.dz {
|
||||
--err-ts: #500;
|
||||
|
||||
text-shadow: none;
|
||||
font-family: 'scp', monospace, monospace;
|
||||
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||
}
|
||||
html.dy {
|
||||
--fg: #000;
|
||||
@@ -602,6 +604,7 @@ html {
|
||||
color: var(--fg);
|
||||
background: var(--bgg);
|
||||
font-family: sans-serif;
|
||||
font-family: var(--font-main), sans-serif;
|
||||
text-shadow: 1px 1px 0px var(--bg-max);
|
||||
}
|
||||
html, body {
|
||||
@@ -610,6 +613,7 @@ html, body {
|
||||
}
|
||||
pre, code, tt, #doc, #doc>code {
|
||||
font-family: 'scp', monospace, monospace;
|
||||
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||
}
|
||||
.ayjump {
|
||||
position: fixed;
|
||||
@@ -727,18 +731,22 @@ a:hover {
|
||||
html.y #files thead th {
|
||||
box-shadow: 0 1px 0 rgba(0,0,0,0.12);
|
||||
}
|
||||
html #files.hhpick thead th {
|
||||
color: #f7d;
|
||||
background: #000;
|
||||
box-shadow: .1em .2em 0 #f6c inset, -.1em -.1em 0 #f6c inset;
|
||||
}
|
||||
#files td {
|
||||
margin: 0;
|
||||
padding: .3em .5em;
|
||||
background: var(--bg);
|
||||
max-width: var(--file-td-w);
|
||||
word-wrap: break-word;
|
||||
overflow: hidden;
|
||||
}
|
||||
#files tr:nth-child(2n) td {
|
||||
background: var(--row-alt);
|
||||
}
|
||||
#files td+td+td {
|
||||
max-width: 30em;
|
||||
overflow: hidden;
|
||||
}
|
||||
#files td+td {
|
||||
box-shadow: 1px 0 0 0 rgba(128,128,128,var(--f-sh1)) inset, 0 1px 0 rgba(255,255,255,var(--f-sh2)) inset, 0 -1px 0 rgba(255,255,255,var(--f-sh2)) inset;
|
||||
}
|
||||
@@ -754,6 +762,7 @@ html.y #files thead th {
|
||||
}
|
||||
#files tbody td:nth-child(3) {
|
||||
font-family: 'scp', monospace, monospace;
|
||||
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||
text-align: right;
|
||||
padding-right: 1em;
|
||||
white-space: nowrap;
|
||||
@@ -813,6 +822,11 @@ html.y #path a:hover {
|
||||
.logue:empty {
|
||||
display: none;
|
||||
}
|
||||
.logue.raw {
|
||||
white-space: pre;
|
||||
font-family: 'scp', 'consolas', monospace;
|
||||
font-family: var(--font-mono), 'scp', 'consolas', monospace;
|
||||
}
|
||||
#doc>iframe,
|
||||
.logue>iframe {
|
||||
background: var(--bgg);
|
||||
@@ -861,7 +875,7 @@ html.y #path a:hover {
|
||||
}
|
||||
.mdo,
|
||||
.mdo * {
|
||||
line-height: 1.4em;
|
||||
line-height: 1.5em;
|
||||
}
|
||||
#srv_info,
|
||||
#srv_info2,
|
||||
@@ -976,6 +990,10 @@ html.y #path a:hover {
|
||||
margin: 0 auto;
|
||||
display: block;
|
||||
}
|
||||
#ggrid.nocrop>a img {
|
||||
max-height: 20em;
|
||||
max-height: calc(var(--grid-sz)*2);
|
||||
}
|
||||
#ggrid>a.dir:before {
|
||||
content: '📂';
|
||||
}
|
||||
@@ -1142,9 +1160,6 @@ html.y #widget.open {
|
||||
@keyframes spin {
|
||||
100% {transform: rotate(360deg)}
|
||||
}
|
||||
@media (prefers-reduced-motion) {
|
||||
@keyframes spin { }
|
||||
}
|
||||
@keyframes fadein {
|
||||
0% {opacity: 0}
|
||||
100% {opacity: 1}
|
||||
@@ -1230,13 +1245,21 @@ html.y #widget.open {
|
||||
#wfm a.hide {
|
||||
display: none;
|
||||
}
|
||||
#files tbody tr.fcut td {
|
||||
#files tbody tr.fcut td,
|
||||
#ggrid>a.fcut {
|
||||
animation: fcut .5s ease-out;
|
||||
}
|
||||
@keyframes fcut {
|
||||
0% {opacity:0}
|
||||
100% {opacity:1}
|
||||
}
|
||||
#ggrid>a.glow {
|
||||
animation: gexit .6s ease-out;
|
||||
}
|
||||
@keyframes gexit {
|
||||
0% {box-shadow: 0 0 0 2em var(--a)}
|
||||
100% {box-shadow: 0 0 0em 0em var(--a)}
|
||||
}
|
||||
#wzip a {
|
||||
font-size: .4em;
|
||||
margin: -.3em .1em;
|
||||
@@ -1397,6 +1420,10 @@ input[type="checkbox"]:checked+label {
|
||||
color: #0e0;
|
||||
color: var(--a);
|
||||
}
|
||||
html.dz input {
|
||||
font-family: 'scp', monospace, monospace;
|
||||
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||
}
|
||||
.opwide div>span>input+label {
|
||||
padding: .3em 0 .3em .3em;
|
||||
margin: 0 0 0 -.3em;
|
||||
@@ -1405,14 +1432,17 @@ input[type="checkbox"]:checked+label {
|
||||
.opview input.i {
|
||||
width: calc(100% - 16.2em);
|
||||
}
|
||||
input.drc_v,
|
||||
input.eq_gain {
|
||||
width: 3em;
|
||||
text-align: center;
|
||||
margin: 0 .6em;
|
||||
}
|
||||
#audio_drc table,
|
||||
#audio_eq table {
|
||||
border-collapse: collapse;
|
||||
}
|
||||
#audio_drc td,
|
||||
#audio_eq td {
|
||||
text-align: center;
|
||||
}
|
||||
@@ -1421,11 +1451,15 @@ input.eq_gain {
|
||||
display: block;
|
||||
padding: 0;
|
||||
}
|
||||
#au_drc,
|
||||
#au_eq {
|
||||
display: block;
|
||||
margin-top: .5em;
|
||||
padding: 1.3em .3em;
|
||||
}
|
||||
#au_drc {
|
||||
padding: .4em .3em;
|
||||
}
|
||||
#ico1 {
|
||||
cursor: pointer;
|
||||
}
|
||||
@@ -1466,6 +1500,8 @@ input.eq_gain {
|
||||
width: calc(100% - 2em);
|
||||
margin: .3em 0 0 1.4em;
|
||||
}
|
||||
@media (max-width: 130em) { #srch_form.tags #tq_raw { width: calc(100% - 34em) } }
|
||||
@media (max-width: 95em) { #srch_form.tags #tq_raw { width: calc(100% - 2em) } }
|
||||
#tq_raw td+td {
|
||||
width: 100%;
|
||||
}
|
||||
@@ -1577,6 +1613,7 @@ html.cz .btn {
|
||||
border-bottom: .2em solid #709;
|
||||
}
|
||||
html.dz .btn {
|
||||
font-size: 1em;
|
||||
box-shadow: 0 0 0 .1em #080 inset;
|
||||
}
|
||||
html.dz .tgl.btn.on {
|
||||
@@ -1620,6 +1657,12 @@ html.cz .tgl.btn.on {
|
||||
list-style: none;
|
||||
border-top: 1px solid var(--bg-u5);
|
||||
}
|
||||
#tree li.offline>a:first-child:before {
|
||||
content: '❌';
|
||||
position: absolute;
|
||||
margin-left: -.25em;
|
||||
z-index: 3;
|
||||
}
|
||||
#tree ul a.sel {
|
||||
background: #000;
|
||||
background: var(--bg-d3);
|
||||
@@ -1628,7 +1671,9 @@ html.cz .tgl.btn.on {
|
||||
color: var(--fg-max);
|
||||
}
|
||||
#tree ul a.hl {
|
||||
color: #fff;
|
||||
color: var(--btn-1-fg);
|
||||
background: #000;
|
||||
background: var(--btn-1-bg);
|
||||
text-shadow: none;
|
||||
}
|
||||
@@ -1663,6 +1708,7 @@ html.y #tree.nowrap .ntree a+a:hover {
|
||||
}
|
||||
.ntree a:first-child {
|
||||
font-family: 'scp', monospace, monospace;
|
||||
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||
font-size: 1.2em;
|
||||
line-height: 0;
|
||||
}
|
||||
@@ -1744,6 +1790,7 @@ html.y #tree.nowrap .ntree a+a:hover {
|
||||
padding: 0;
|
||||
}
|
||||
#thumbs,
|
||||
#au_prescan,
|
||||
#au_fullpre,
|
||||
#au_os_seek,
|
||||
#au_osd_cv,
|
||||
@@ -1751,7 +1798,8 @@ html.y #tree.nowrap .ntree a+a:hover {
|
||||
opacity: .3;
|
||||
}
|
||||
#griden.on+#thumbs,
|
||||
#au_preload.on+#au_fullpre,
|
||||
#au_preload.on+#au_prescan,
|
||||
#au_preload.on+#au_prescan+#au_fullpre,
|
||||
#au_os_ctl.on+#au_os_seek,
|
||||
#au_os_ctl.on+#au_os_seek+#au_osd_cv,
|
||||
#u2turbo.on+#u2tdate {
|
||||
@@ -1761,6 +1809,7 @@ html.y #tree.nowrap .ntree a+a:hover {
|
||||
display: none;
|
||||
}
|
||||
.ghead {
|
||||
background: #fff;
|
||||
background: var(--bg-u2);
|
||||
border-radius: .3em;
|
||||
padding: .2em .5em;
|
||||
@@ -1790,7 +1839,12 @@ html.y #tree.nowrap .ntree a+a:hover {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
#unpost td:nth-child(3),
|
||||
#unpost td:nth-child(4) {
|
||||
text-align: right;
|
||||
}
|
||||
#rui {
|
||||
background: #fff;
|
||||
background: var(--bg);
|
||||
position: fixed;
|
||||
top: 0;
|
||||
@@ -1816,6 +1870,7 @@ html.y #tree.nowrap .ntree a+a:hover {
|
||||
}
|
||||
#rn_vadv input {
|
||||
font-family: 'scp', monospace, monospace;
|
||||
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||
}
|
||||
#rui td+td,
|
||||
#rui td input[type="text"] {
|
||||
@@ -1847,6 +1902,7 @@ html.y #tree.nowrap .ntree a+a:hover {
|
||||
}
|
||||
#doc {
|
||||
overflow: visible;
|
||||
background: #fff;
|
||||
background: var(--bg);
|
||||
margin: -1em 0 .5em 0;
|
||||
padding: 1em 0 1em 0;
|
||||
@@ -1863,6 +1919,10 @@ html.y #doc {
|
||||
text-align: center;
|
||||
padding: .5em;
|
||||
}
|
||||
#docul li.bn span {
|
||||
font-weight: bold;
|
||||
color: var(--fg-max);
|
||||
}
|
||||
#doc.prism {
|
||||
padding-left: 3em;
|
||||
}
|
||||
@@ -1874,6 +1934,7 @@ html.y #doc {
|
||||
#doc.mdo {
|
||||
white-space: normal;
|
||||
font-family: sans-serif;
|
||||
font-family: var(--font-main), sans-serif;
|
||||
}
|
||||
#doc.prism * {
|
||||
line-height: 1.5em;
|
||||
@@ -1933,6 +1994,7 @@ a.btn,
|
||||
}
|
||||
#hkhelp td:first-child {
|
||||
font-family: 'scp', monospace, monospace;
|
||||
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||
}
|
||||
html.noscroll,
|
||||
html.noscroll .sbar {
|
||||
@@ -2101,12 +2163,12 @@ html.y #bbox-overlay figcaption a {
|
||||
}
|
||||
.bbox-btn,
|
||||
#bbox-btns {
|
||||
opacity: 1;
|
||||
opacity: 1;
|
||||
animation: opacity .2s infinite ease-in-out;
|
||||
}
|
||||
.bbox-btn.off,
|
||||
#bbox-btns.off {
|
||||
opacity: 0;
|
||||
opacity: 0;
|
||||
}
|
||||
#bbox-overlay button {
|
||||
cursor: pointer;
|
||||
@@ -2142,6 +2204,7 @@ html.y #bbox-overlay figcaption a {
|
||||
}
|
||||
#bbox-halp {
|
||||
color: var(--fg-max);
|
||||
background: #fff;
|
||||
background: var(--bg);
|
||||
position: absolute;
|
||||
top: 0;
|
||||
@@ -2376,7 +2439,7 @@ html.y #bbox-overlay figcaption a {
|
||||
display: block;
|
||||
}
|
||||
#u2bm sup {
|
||||
font-weight: bold;
|
||||
font-weight: bold;
|
||||
}
|
||||
#u2notbtn {
|
||||
display: none;
|
||||
@@ -2441,6 +2504,7 @@ html.y #bbox-overlay figcaption a {
|
||||
}
|
||||
#op_up2k.srch td.prog {
|
||||
font-family: sans-serif;
|
||||
font-family: var(--font-main), sans-serif;
|
||||
font-size: 1em;
|
||||
width: auto;
|
||||
}
|
||||
@@ -2455,6 +2519,7 @@ html.y #bbox-overlay figcaption a {
|
||||
white-space: nowrap;
|
||||
display: inline-block;
|
||||
font-family: 'scp', monospace, monospace;
|
||||
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||
}
|
||||
#u2etas.o {
|
||||
width: 20em;
|
||||
@@ -2483,14 +2548,14 @@ html.y #bbox-overlay figcaption a {
|
||||
min-width: 24em;
|
||||
}
|
||||
#u2cards.w {
|
||||
width: 44em;
|
||||
width: 48em;
|
||||
text-align: left;
|
||||
}
|
||||
#u2cards.ww {
|
||||
display: inline-block;
|
||||
}
|
||||
#u2etaw.w {
|
||||
width: 52em;
|
||||
width: 55em;
|
||||
text-align: right;
|
||||
margin: 2em auto -2.7em auto;
|
||||
}
|
||||
@@ -2524,6 +2589,7 @@ html.y #bbox-overlay figcaption a {
|
||||
#u2cards span {
|
||||
color: var(--fg-max);
|
||||
font-family: 'scp', monospace;
|
||||
font-family: var(--font-mono), 'scp', monospace;
|
||||
}
|
||||
#u2cards > a:nth-child(4) > span {
|
||||
display: inline-block;
|
||||
@@ -2535,10 +2601,10 @@ html.y #bbox-overlay figcaption a {
|
||||
width: 30em;
|
||||
}
|
||||
#u2conf.w {
|
||||
width: 48em;
|
||||
width: 51em;
|
||||
}
|
||||
#u2conf.ww {
|
||||
width: 78em;
|
||||
width: 82em;
|
||||
}
|
||||
#u2conf.ww #u2c3w {
|
||||
width: 29em;
|
||||
@@ -2689,6 +2755,7 @@ html.b #u2conf a.b:hover {
|
||||
}
|
||||
.prog {
|
||||
font-family: 'scp', monospace, monospace;
|
||||
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||
}
|
||||
#u2tab span.inf,
|
||||
#u2tab span.ok,
|
||||
@@ -2760,6 +2827,9 @@ html.c .opbox,
|
||||
html.a .opbox {
|
||||
margin: 1.5em 0 0 0;
|
||||
}
|
||||
html.dz .opview input.i {
|
||||
width: calc(100% - 18em);
|
||||
}
|
||||
html.c #tree,
|
||||
html.c #treeh,
|
||||
html.a #tree,
|
||||
@@ -2812,6 +2882,9 @@ html.a #u2btn {
|
||||
html.ay #u2btn {
|
||||
box-shadow: .4em .4em 0 #ccc;
|
||||
}
|
||||
html.dz #u2btn {
|
||||
letter-spacing: -.033em;
|
||||
}
|
||||
html.c #u2conf.ww #u2btn,
|
||||
html.a #u2conf.ww #u2btn {
|
||||
margin: -2em .5em -3em 0;
|
||||
@@ -2974,13 +3047,13 @@ html.b .btn {
|
||||
top: -.1em;
|
||||
}
|
||||
html.b #op_up2k.srch sup {
|
||||
color: #fc0;
|
||||
color: #fc0;
|
||||
}
|
||||
html.by #u2btn sup {
|
||||
color: #06b;
|
||||
color: #06b;
|
||||
}
|
||||
html.by #op_up2k.srch sup {
|
||||
color: #b70;
|
||||
color: #b70;
|
||||
}
|
||||
html.bz #u2cards a.act {
|
||||
box-shadow: 0 -.1em .2em var(--bg-d2);
|
||||
@@ -3029,6 +3102,16 @@ html.d #treepar {
|
||||
|
||||
|
||||
|
||||
@media (max-width: 32em) {
|
||||
#u2conf {
|
||||
font-size: .9em;
|
||||
}
|
||||
}
|
||||
@media (max-width: 28em) {
|
||||
#u2conf {
|
||||
font-size: .8em;
|
||||
}
|
||||
}
|
||||
@media (min-width: 70em) {
|
||||
#barpos,
|
||||
#barbuf {
|
||||
@@ -3081,7 +3164,7 @@ html.d #treepar {
|
||||
margin-top: 1.7em;
|
||||
}
|
||||
}
|
||||
@supports (display: grid) {
|
||||
@supports (display: grid) and (gap: 1em) {
|
||||
#ggrid {
|
||||
display: grid;
|
||||
margin: 0em 0.25em;
|
||||
@@ -3106,3 +3189,24 @@ html.d #treepar {
|
||||
padding: 0.2em;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@media (prefers-reduced-motion) {
|
||||
@keyframes spin { }
|
||||
@keyframes gexit { }
|
||||
@keyframes bounce { }
|
||||
@keyframes bounceFromLeft { }
|
||||
@keyframes bounceFromRight { }
|
||||
|
||||
#ggrid>a:before,
|
||||
#widget.anim,
|
||||
#u2tabw,
|
||||
.dropdesc,
|
||||
.dropdesc b,
|
||||
.dropdesc>div>div {
|
||||
transition: none;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,11 +7,11 @@
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8, minimum-scale=0.6">
|
||||
<meta name="theme-color" content="#333">
|
||||
{{ html_head }}
|
||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/browser.css?_={{ ts }}">
|
||||
{{ html_head }}
|
||||
{%- if css %}
|
||||
<link rel="stylesheet" media="screen" href="{{ css }}?_={{ ts }}">
|
||||
<link rel="stylesheet" media="screen" href="{{ css }}_={{ ts }}">
|
||||
{%- endif %}
|
||||
</head>
|
||||
|
||||
@@ -29,7 +29,7 @@
|
||||
|
||||
<div id="op_player" class="opview opbox opwide"></div>
|
||||
|
||||
<div id="op_bup" class="opview opbox act">
|
||||
<div id="op_bup" class="opview opbox {% if not ls0 %}act{% endif %}">
|
||||
<div id="u2err"></div>
|
||||
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
|
||||
<input type="hidden" name="act" value="bput" />
|
||||
@@ -39,7 +39,7 @@
|
||||
<a id="bbsw" href="?b=u" rel="nofollow"><br />switch to basic browser</a>
|
||||
</div>
|
||||
|
||||
<div id="op_mkdir" class="opview opbox act">
|
||||
<div id="op_mkdir" class="opview opbox {% if not ls0 %}act{% endif %}">
|
||||
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
|
||||
<input type="hidden" name="act" value="mkdir" />
|
||||
📂<input type="text" name="name" class="i" placeholder="awesome mix vol.1">
|
||||
@@ -55,7 +55,7 @@
|
||||
</form>
|
||||
</div>
|
||||
|
||||
<div id="op_msg" class="opview opbox act">
|
||||
<div id="op_msg" class="opview opbox {% if not ls0 %}act{% endif %}">
|
||||
<form method="post" enctype="application/x-www-form-urlencoded" accept-charset="utf-8" action="{{ url_suf }}">
|
||||
📟<input type="text" name="msg" class="i" placeholder="lorem ipsum dolor sit amet">
|
||||
<input type="submit" value="send msg to srv log">
|
||||
@@ -135,45 +135,30 @@
|
||||
|
||||
<script>
|
||||
var SR = {{ r|tojson }},
|
||||
CGV = {{ cgv|tojson }},
|
||||
TS = "{{ ts }}",
|
||||
acct = "{{ acct }}",
|
||||
perms = {{ perms }},
|
||||
dgrid = {{ dgrid|tojson }},
|
||||
themes = {{ themes }},
|
||||
dtheme = "{{ dtheme }}",
|
||||
srvinf = "{{ srv_info }}",
|
||||
s_name = "{{ s_name }}",
|
||||
lang = "{{ lang }}",
|
||||
dfavico = "{{ favico }}",
|
||||
def_hcols = {{ def_hcols|tojson }},
|
||||
have_up2k_idx = {{ have_up2k_idx|tojson }},
|
||||
have_tags_idx = {{ have_tags_idx|tojson }},
|
||||
have_acode = {{ have_acode|tojson }},
|
||||
have_mv = {{ have_mv|tojson }},
|
||||
have_del = {{ have_del|tojson }},
|
||||
have_unpost = {{ have_unpost }},
|
||||
have_zip = {{ have_zip|tojson }},
|
||||
sb_md = "{{ sb_md }}",
|
||||
sb_lg = "{{ sb_lg }}",
|
||||
lifetime = {{ lifetime }},
|
||||
turbolvl = {{ turbolvl }},
|
||||
idxh = {{ idxh }},
|
||||
frand = {{ frand|tojson }},
|
||||
u2sort = "{{ u2sort }}",
|
||||
have_emp = {{ have_emp|tojson }},
|
||||
txt_ext = "{{ txt_ext }}",
|
||||
logues = {{ logues|tojson if sb_lg else "[]" }},
|
||||
readme = {{ readme|tojson }},
|
||||
ls0 = {{ ls0|tojson }};
|
||||
|
||||
document.documentElement.className = localStorage.theme || dtheme;
|
||||
var STG = window.localStorage;
|
||||
document.documentElement.className = (STG && STG.cpp_thm) || dtheme;
|
||||
</script>
|
||||
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
||||
<script src="{{ r }}/.cpr/baguettebox.js?_={{ ts }}"></script>
|
||||
<script src="{{ r }}/.cpr/browser.js?_={{ ts }}"></script>
|
||||
<script src="{{ r }}/.cpr/up2k.js?_={{ ts }}"></script>
|
||||
{%- if js %}
|
||||
<script src="{{ js }}?_={{ ts }}"></script>
|
||||
<script src="{{ js }}_={{ ts }}"></script>
|
||||
{%- endif %}
|
||||
</body>
|
||||
|
||||
</html>
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -6,12 +6,12 @@
|
||||
<title>{{ title }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
{{ html_head }}
|
||||
<style>
|
||||
html{font-family:sans-serif}
|
||||
td{border:1px solid #999;border-width:1px 1px 0 0;padding:0 5px}
|
||||
a{display:block}
|
||||
</style>
|
||||
{{ html_head }}
|
||||
</head>
|
||||
|
||||
<body>
|
||||
@@ -61,3 +61,4 @@
|
||||
|
||||
</body>
|
||||
</html>
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>{{ svcname }}</title>
|
||||
<title>{{ s_doctitle }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
</head>
|
||||
@@ -25,3 +25,4 @@
|
||||
</body>
|
||||
|
||||
</html>
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@ html, body {
|
||||
color: #333;
|
||||
background: #eee;
|
||||
font-family: sans-serif;
|
||||
font-family: var(--font-main), sans-serif;
|
||||
line-height: 1.5em;
|
||||
}
|
||||
html.y #helpbox a {
|
||||
@@ -67,6 +68,7 @@ a {
|
||||
position: relative;
|
||||
display: inline-block;
|
||||
font-family: 'scp', monospace, monospace;
|
||||
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||
font-weight: bold;
|
||||
font-size: 1.3em;
|
||||
line-height: .1em;
|
||||
|
||||
@@ -4,12 +4,12 @@
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.7">
|
||||
<meta name="theme-color" content="#333">
|
||||
{{ html_head }}
|
||||
<link rel="stylesheet" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||
<link rel="stylesheet" href="{{ r }}/.cpr/md.css?_={{ ts }}">
|
||||
{%- if edit %}
|
||||
<link rel="stylesheet" href="{{ r }}/.cpr/md2.css?_={{ ts }}">
|
||||
{%- endif %}
|
||||
{{ html_head }}
|
||||
</head>
|
||||
<body>
|
||||
<div id="mn"></div>
|
||||
@@ -31,7 +31,7 @@
|
||||
<span id="lno">L#</span>
|
||||
{%- else %}
|
||||
<a href="{{ arg_base }}edit" tt="good: higher performance$Ngood: same document width as viewer$Nbad: assumes you know markdown">edit (basic)</a>
|
||||
<a href="{{ arg_base }}edit2" tt="not in-house so probably less buggy">edit (fancy)</a>
|
||||
<a href="{{ arg_base }}edit2" id="edit2" tt="not in-house so probably less buggy">edit (fancy)</a>
|
||||
<a href="{{ arg_base }}">view raw</a>
|
||||
{%- endif %}
|
||||
</div>
|
||||
@@ -139,16 +139,15 @@ var md_opt = {
|
||||
};
|
||||
|
||||
(function () {
|
||||
var l = localStorage,
|
||||
drk = l.light != 1,
|
||||
var l = window.localStorage,
|
||||
drk = (l && l.light) != 1,
|
||||
btn = document.getElementById("lightswitch"),
|
||||
f = function (e) {
|
||||
if (e) { e.preventDefault(); drk = !drk; }
|
||||
document.documentElement.className = drk? "z":"y";
|
||||
btn.innerHTML = "go " + (drk ? "light":"dark");
|
||||
l.light = drk? 0:1;
|
||||
try { l.light = drk? 0:1; } catch (ex) { }
|
||||
};
|
||||
|
||||
btn.onclick = f;
|
||||
f();
|
||||
})();
|
||||
@@ -161,3 +160,4 @@ l.light = drk? 0:1;
|
||||
<script src="{{ r }}/.cpr/md2.js?_={{ ts }}"></script>
|
||||
{%- endif %}
|
||||
</body></html>
|
||||
|
||||
|
||||
@@ -52,7 +52,7 @@ var img_load = (function () {
|
||||
var r = {};
|
||||
r.callbacks = [];
|
||||
|
||||
function fire() {
|
||||
var fire = function () {
|
||||
for (var a = 0; a < r.callbacks.length; a++)
|
||||
r.callbacks[a]();
|
||||
}
|
||||
@@ -212,8 +212,15 @@ function convert_markdown(md_text, dest_dom) {
|
||||
|
||||
try {
|
||||
var md_html = marked.parse(md_text, marked_opts);
|
||||
if (!have_emp)
|
||||
md_html = DOMPurify.sanitize(md_html);
|
||||
}
|
||||
catch (ex) {
|
||||
if (IE) {
|
||||
dest_dom.innerHTML = 'IE cannot into markdown ;_;';
|
||||
return;
|
||||
}
|
||||
|
||||
if (ext)
|
||||
md_plug_err(ex, ext[1]);
|
||||
|
||||
@@ -470,7 +477,7 @@ img_load.callbacks = [toc.refresh];
|
||||
// scroll handler
|
||||
var redraw = (function () {
|
||||
var sbs = true;
|
||||
function onresize() {
|
||||
var onresize = function () {
|
||||
if (window.matchMedia)
|
||||
sbs = window.matchMedia('(min-width: 64em)').matches;
|
||||
|
||||
@@ -483,7 +490,7 @@ var redraw = (function () {
|
||||
onscroll();
|
||||
}
|
||||
|
||||
function onscroll() {
|
||||
var onscroll = function () {
|
||||
toc.refresh();
|
||||
}
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
width: calc(100% - 56em);
|
||||
}
|
||||
#mw {
|
||||
left: calc(100% - 55em);
|
||||
left: max(0em, calc(100% - 55em));
|
||||
overflow-y: auto;
|
||||
position: fixed;
|
||||
bottom: 0;
|
||||
@@ -56,6 +56,7 @@
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
font-family: 'scp', monospace, monospace;
|
||||
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||
white-space: pre-wrap;
|
||||
word-break: break-word;
|
||||
overflow-wrap: break-word;
|
||||
|
||||
@@ -92,7 +92,7 @@ var action_stack = null;
|
||||
var nlines = 0;
|
||||
var draw_md = (function () {
|
||||
var delay = 1;
|
||||
function draw_md() {
|
||||
var draw_md = function () {
|
||||
var t0 = Date.now();
|
||||
var src = dom_src.value;
|
||||
convert_markdown(src, dom_pre);
|
||||
@@ -135,7 +135,7 @@ img_load.callbacks = [function () {
|
||||
|
||||
// resize handler
|
||||
redraw = (function () {
|
||||
function onresize() {
|
||||
var onresize = function () {
|
||||
var y = (dom_hbar.offsetTop + dom_hbar.offsetHeight) + 'px';
|
||||
dom_wrap.style.top = y;
|
||||
dom_swrap.style.top = y;
|
||||
@@ -143,12 +143,12 @@ redraw = (function () {
|
||||
map_src = genmap(dom_ref, map_src);
|
||||
map_pre = genmap(dom_pre, map_pre);
|
||||
}
|
||||
function setsbs() {
|
||||
var setsbs = function () {
|
||||
dom_wrap.className = '';
|
||||
dom_swrap.className = '';
|
||||
onresize();
|
||||
}
|
||||
function modetoggle() {
|
||||
var modetoggle = function () {
|
||||
var mode = dom_nsbs.innerHTML;
|
||||
dom_nsbs.innerHTML = mode == 'editor' ? 'preview' : 'editor';
|
||||
mode += ' single';
|
||||
@@ -163,7 +163,7 @@ redraw = (function () {
|
||||
dom_sbs.onclick = setsbs;
|
||||
dom_nsbs.onclick = modetoggle;
|
||||
|
||||
onresize();
|
||||
(IE ? modetoggle : onresize)();
|
||||
return onresize;
|
||||
})();
|
||||
|
||||
@@ -172,7 +172,7 @@ redraw = (function () {
|
||||
(function () {
|
||||
var skip_src = false, skip_pre = false;
|
||||
|
||||
function scroll(src, srcmap, dst, dstmap) {
|
||||
var scroll = function (src, srcmap, dst, dstmap) {
|
||||
var y = src.scrollTop;
|
||||
if (y < 8) {
|
||||
dst.scrollTop = 0;
|
||||
@@ -278,6 +278,7 @@ function Modpoll() {
|
||||
return;
|
||||
|
||||
var new_md = this.responseText,
|
||||
new_mt = this.getResponseHeader('X-Lastmod3') || r.lastmod,
|
||||
server_ref = server_md.replace(/\r/g, ''),
|
||||
server_now = new_md.replace(/\r/g, '');
|
||||
|
||||
@@ -285,6 +286,7 @@ function Modpoll() {
|
||||
if (r.initial && server_ref != server_now)
|
||||
return modal.confirm('Your browser decided to show an outdated copy of the document!\n\nDo you want to load the latest version from the server instead?', function () {
|
||||
dom_src.value = server_md = new_md;
|
||||
last_modified = new_mt;
|
||||
draw_md();
|
||||
}, null);
|
||||
|
||||
@@ -366,14 +368,14 @@ function save(e) {
|
||||
|
||||
function save_cb() {
|
||||
if (this.status !== 200)
|
||||
return toast.err(0, 'Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
|
||||
return toast.err(0, 'Error! The file was NOT saved.\n\nError ' + this.status + ":\n" + unpre(this.responseText));
|
||||
|
||||
var r;
|
||||
try {
|
||||
r = JSON.parse(this.responseText);
|
||||
}
|
||||
catch (ex) {
|
||||
return toast.err(0, 'Failed to parse reply from server:\n\n' + this.responseText);
|
||||
return toast.err(0, 'Error! The file was likely NOT saved.\n\nFailed to parse reply from server:\n\n' + unpre(this.responseText));
|
||||
}
|
||||
|
||||
if (!r.ok) {
|
||||
@@ -416,7 +418,7 @@ function run_savechk(lastmod, txt, btn, ntry) {
|
||||
|
||||
function savechk_cb() {
|
||||
if (this.status !== 200)
|
||||
return toast.err(0, 'Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
|
||||
return toast.err(0, 'Error! The file was NOT saved.\n\nError ' + this.status + ":\n" + unpre(this.responseText));
|
||||
|
||||
var doc1 = this.txt.replace(/\r\n/g, "\n");
|
||||
var doc2 = this.responseText.replace(/\r\n/g, "\n");
|
||||
@@ -898,12 +900,12 @@ var set_lno = (function () {
|
||||
pv = null,
|
||||
lno = ebi('lno');
|
||||
|
||||
function poke() {
|
||||
var poke = function () {
|
||||
clearTimeout(t);
|
||||
t = setTimeout(fire, 20);
|
||||
}
|
||||
|
||||
function fire() {
|
||||
var fire = function () {
|
||||
try {
|
||||
clearTimeout(t);
|
||||
|
||||
@@ -928,8 +930,14 @@ var set_lno = (function () {
|
||||
|
||||
// hotkeys / toolbar
|
||||
(function () {
|
||||
function keydown(ev) {
|
||||
ev = ev || window.event;
|
||||
var keydown = function (ev) {
|
||||
if (!ev && window.event) {
|
||||
ev = window.event;
|
||||
if (dev_fbw == 1) {
|
||||
toast.warn(10, 'hello from fallback code ;_;\ncheck console trace');
|
||||
console.error('using window.event');
|
||||
}
|
||||
}
|
||||
var kc = ev.code || ev.keyCode || ev.which,
|
||||
editing = document.activeElement == dom_src;
|
||||
|
||||
@@ -1001,7 +1009,7 @@ var set_lno = (function () {
|
||||
md_home(ev.shiftKey);
|
||||
return false;
|
||||
}
|
||||
if (!ev.shiftKey && (ev.code == "Enter" || kc == 13)) {
|
||||
if (!ev.shiftKey && ((ev.code + '').endsWith("Enter") || kc == 13)) {
|
||||
return md_newline();
|
||||
}
|
||||
if (!ev.shiftKey && kc == 8) {
|
||||
@@ -1056,7 +1064,7 @@ action_stack = (function () {
|
||||
var ignore = false;
|
||||
var ref = dom_src.value;
|
||||
|
||||
function diff(from, to, cpos) {
|
||||
var diff = function (from, to, cpos) {
|
||||
if (from === to)
|
||||
return null;
|
||||
|
||||
@@ -1087,14 +1095,14 @@ action_stack = (function () {
|
||||
};
|
||||
}
|
||||
|
||||
function undiff(from, change) {
|
||||
var undiff = function (from, change) {
|
||||
return {
|
||||
txt: from.substring(0, change.car) + change.txt + from.substring(change.cdr),
|
||||
cpos: change.cpos
|
||||
};
|
||||
}
|
||||
|
||||
function apply(src, dst) {
|
||||
var apply = function (src, dst) {
|
||||
dbg('undos(%d) redos(%d)', hist.un.length, hist.re.length);
|
||||
|
||||
if (src.length === 0)
|
||||
@@ -1118,7 +1126,7 @@ action_stack = (function () {
|
||||
return true;
|
||||
}
|
||||
|
||||
function schedule_push() {
|
||||
var schedule_push = function () {
|
||||
if (ignore) {
|
||||
ignore = false;
|
||||
return;
|
||||
@@ -1129,7 +1137,7 @@ action_stack = (function () {
|
||||
sched_timer = setTimeout(push, 500);
|
||||
}
|
||||
|
||||
function undo() {
|
||||
var undo = function () {
|
||||
if (hist.re.length == 0) {
|
||||
clearTimeout(sched_timer);
|
||||
push();
|
||||
@@ -1137,11 +1145,11 @@ action_stack = (function () {
|
||||
return apply(hist.un, hist.re);
|
||||
}
|
||||
|
||||
function redo() {
|
||||
var redo = function () {
|
||||
return apply(hist.re, hist.un);
|
||||
}
|
||||
|
||||
function push() {
|
||||
var push = function () {
|
||||
var newtxt = dom_src.value;
|
||||
var change = diff(ref, newtxt, sched_cpos);
|
||||
if (change !== null)
|
||||
|
||||
@@ -17,6 +17,7 @@ html, body {
|
||||
padding: 0;
|
||||
min-height: 100%;
|
||||
font-family: sans-serif;
|
||||
font-family: var(--font-main), sans-serif;
|
||||
background: #f7f7f7;
|
||||
color: #333;
|
||||
}
|
||||
|
||||
@@ -4,11 +4,11 @@
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.7">
|
||||
<meta name="theme-color" content="#333">
|
||||
{{ html_head }}
|
||||
<link rel="stylesheet" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||
<link rel="stylesheet" href="{{ r }}/.cpr/mde.css?_={{ ts }}">
|
||||
<link rel="stylesheet" href="{{ r }}/.cpr/deps/mini-fa.css?_={{ ts }}">
|
||||
<link rel="stylesheet" href="{{ r }}/.cpr/deps/easymde.css?_={{ ts }}">
|
||||
{{ html_head }}
|
||||
</head>
|
||||
<body>
|
||||
<div id="mw">
|
||||
@@ -37,12 +37,12 @@ var md_opt = {
|
||||
};
|
||||
|
||||
var lightswitch = (function () {
|
||||
var l = localStorage,
|
||||
drk = l.light != 1,
|
||||
var l = window.localStorage,
|
||||
drk = (l && l.light) != 1,
|
||||
f = function (e) {
|
||||
if (e) drk = !drk;
|
||||
document.documentElement.className = drk? "z":"y";
|
||||
l.light = drk? 0:1;
|
||||
try { l.light = drk? 0:1; } catch (ex) { }
|
||||
};
|
||||
f();
|
||||
return f;
|
||||
@@ -54,3 +54,4 @@ l.light = drk? 0:1;
|
||||
<script src="{{ r }}/.cpr/deps/easymde.js?_={{ ts }}"></script>
|
||||
<script src="{{ r }}/.cpr/mde.js?_={{ ts }}"></script>
|
||||
</body></html>
|
||||
|
||||
|
||||
@@ -134,14 +134,14 @@ function save(mde) {
|
||||
|
||||
function save_cb() {
|
||||
if (this.status !== 200)
|
||||
return toast.err(0, 'Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
|
||||
return toast.err(0, 'Error! The file was NOT saved.\n\nError ' + this.status + ":\n" + unpre(this.responseText));
|
||||
|
||||
var r;
|
||||
try {
|
||||
r = JSON.parse(this.responseText);
|
||||
}
|
||||
catch (ex) {
|
||||
return toast.err(0, 'Failed to parse reply from server:\n\n' + this.responseText);
|
||||
return toast.err(0, 'Error! The file was likely NOT saved.\n\nFailed to parse reply from server:\n\n' + unpre(this.responseText));
|
||||
}
|
||||
|
||||
if (!r.ok) {
|
||||
@@ -180,7 +180,7 @@ function save_cb() {
|
||||
|
||||
function save_chk() {
|
||||
if (this.status !== 200)
|
||||
return toast.err(0, 'Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
|
||||
return toast.err(0, 'Error! The file was NOT saved.\n\nError ' + this.status + ":\n" + unpre(this.responseText));
|
||||
|
||||
var doc1 = this.txt.replace(/\r\n/g, "\n");
|
||||
var doc2 = this.responseText.replace(/\r\n/g, "\n");
|
||||
|
||||
@@ -1,3 +1,8 @@
|
||||
:root {
|
||||
--font-main: sans-serif;
|
||||
--font-serif: serif;
|
||||
--font-mono: 'scp';
|
||||
}
|
||||
html,body,tr,th,td,#files,a {
|
||||
color: inherit;
|
||||
background: none;
|
||||
@@ -10,6 +15,7 @@ html {
|
||||
color: #ccc;
|
||||
background: #333;
|
||||
font-family: sans-serif;
|
||||
font-family: var(--font-main), sans-serif;
|
||||
text-shadow: 1px 1px 0px #000;
|
||||
touch-action: manipulation;
|
||||
}
|
||||
@@ -23,6 +29,7 @@ html, body {
|
||||
}
|
||||
pre {
|
||||
font-family: monospace, monospace;
|
||||
font-family: var(--font-mono), monospace, monospace;
|
||||
}
|
||||
a {
|
||||
color: #fc5;
|
||||
|
||||
@@ -3,12 +3,12 @@
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>{{ svcname }}</title>
|
||||
<title>{{ s_doctitle }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
<meta name="theme-color" content="#333">
|
||||
{{ html_head }}
|
||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/msg.css?_={{ ts }}">
|
||||
{{ html_head }}
|
||||
</head>
|
||||
|
||||
<body>
|
||||
@@ -42,10 +42,11 @@
|
||||
{%- if redir %}
|
||||
<script>
|
||||
setTimeout(function() {
|
||||
window.location.replace("{{ redir }}");
|
||||
location.replace("{{ redir }}");
|
||||
}, 1000);
|
||||
</script>
|
||||
{%- endif %}
|
||||
</body>
|
||||
|
||||
</html>
|
||||
</html>
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@ html {
|
||||
color: #333;
|
||||
background: #f7f7f7;
|
||||
font-family: sans-serif;
|
||||
font-family: var(--font-main), sans-serif;
|
||||
touch-action: manipulation;
|
||||
}
|
||||
#wrap {
|
||||
@@ -127,6 +128,7 @@ pre, code {
|
||||
color: #480;
|
||||
background: #fff;
|
||||
font-family: 'scp', monospace, monospace;
|
||||
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||
border: 1px solid rgba(128,128,128,0.3);
|
||||
border-radius: .2em;
|
||||
padding: .15em .2em;
|
||||
|
||||
@@ -3,13 +3,13 @@
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>{{ svcname }}</title>
|
||||
<title>{{ s_doctitle }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
<meta name="theme-color" content="#333">
|
||||
{{ html_head }}
|
||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/splash.css?_={{ ts }}">
|
||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||
{{ html_head }}
|
||||
</head>
|
||||
|
||||
<body>
|
||||
@@ -78,13 +78,15 @@
|
||||
|
||||
<h1 id="cc">client config:</h1>
|
||||
<ul>
|
||||
{% if k304 or k304vis %}
|
||||
{% if k304 %}
|
||||
<li><a id="h" href="{{ r }}/?k304=n">disable k304</a> (currently enabled)
|
||||
{%- else %}
|
||||
<li><a id="i" href="{{ r }}/?k304=y" class="r">enable k304</a> (currently disabled)
|
||||
{% endif %}
|
||||
<blockquote id="j">enabling this will disconnect your client on every HTTP 304, which can prevent some buggy proxies from getting stuck (suddenly not loading pages), <em>but</em> it will also make things slower in general</blockquote></li>
|
||||
|
||||
{% endif %}
|
||||
|
||||
<li><a id="k" href="{{ r }}/?reset" class="r" onclick="localStorage.clear();return true">reset client settings</a></li>
|
||||
</ul>
|
||||
|
||||
@@ -110,10 +112,12 @@ var SR = {{ r|tojson }},
|
||||
lang="{{ lang }}",
|
||||
dfavico="{{ favico }}";
|
||||
|
||||
document.documentElement.className=localStorage.theme||"{{ this.args.theme }}";
|
||||
var STG = window.localStorage;
|
||||
document.documentElement.className = (STG && STG.cpp_thm) || "{{ this.args.theme }}";
|
||||
|
||||
</script>
|
||||
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
||||
<script src="{{ r }}/.cpr/splash.js?_={{ ts }}"></script>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ var Ls = {
|
||||
"d1": "tilstand",
|
||||
"d2": "vis tilstanden til alle tråder",
|
||||
"e1": "last innst.",
|
||||
"e2": "leser inn konfigurasjonsfiler på nytt$N(kontoer, volumer, volumbrytere)$Nog kartlegger alle e2ds-volumer",
|
||||
"e2": "leser inn konfigurasjonsfiler på nytt$N(kontoer, volumer, volumbrytere)$Nog kartlegger alle e2ds-volumer$N$Nmerk: endringer i globale parametere$Nkrever en full restart for å ta gjenge",
|
||||
"f1": "du kan betrakte:",
|
||||
"g1": "du kan laste opp til:",
|
||||
"cc1": "klient-konfigurasjon",
|
||||
@@ -30,12 +30,12 @@ var Ls = {
|
||||
},
|
||||
"eng": {
|
||||
"d2": "shows the state of all active threads",
|
||||
"e2": "reload config files (accounts/volumes/volflags),$Nand rescan all e2ds volumes",
|
||||
"e2": "reload config files (accounts/volumes/volflags),$Nand rescan all e2ds volumes$N$Nnote: any changes to global settings$Nrequire a full restart to take effect",
|
||||
"u2": "time since the last server write$N( upload / rename / ... )$N$N17d = 17 days$N1h23 = 1 hour 23 minutes$N4m56 = 4 minutes 56 seconds",
|
||||
"v2": "use this server as a local HDD$N$NWARNING: this will show your password!",
|
||||
}
|
||||
},
|
||||
d = Ls[sread("lang") || lang];
|
||||
d = Ls[sread("cpp_lang", ["eng", "nor"]) || lang] || Ls.eng || Ls.nor;
|
||||
|
||||
for (var k in (d || {})) {
|
||||
var f = k.slice(-1),
|
||||
@@ -49,6 +49,15 @@ for (var k in (d || {})) {
|
||||
o[a].setAttribute("tt", d[k]);
|
||||
}
|
||||
|
||||
try {
|
||||
if (is_idp) {
|
||||
var z = ['#l+div', '#l', '#c'];
|
||||
for (var a = 0; a < z.length; a++)
|
||||
QS(z[a]).style.display = 'none';
|
||||
}
|
||||
}
|
||||
catch (ex) { }
|
||||
|
||||
tt.init();
|
||||
var o = QS('input[name="cppwd"]');
|
||||
if (!ebi('c') && o.offsetTop + o.offsetHeight < window.innerHeight)
|
||||
|
||||
@@ -3,13 +3,14 @@
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>{{ args.doctitle }} @ {{ args.name }}</title>
|
||||
<title>{{ s_doctitle }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
<meta name="theme-color" content="#333">
|
||||
{{ html_head }}
|
||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/splash.css?_={{ ts }}">
|
||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||
<style>ul{padding-left:1.3em}li{margin:.4em 0}</style>
|
||||
{{ html_head }}
|
||||
</head>
|
||||
|
||||
<body>
|
||||
@@ -48,9 +49,13 @@
|
||||
rclone config create {{ aname }}-dav webdav url=http{{ s }}://{{ rip }}{{ hport }} vendor=owncloud pacer_min_sleep=0.01ms{% if accs %} user=k pass=<b>{{ pw }}</b>{% endif %}
|
||||
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-dav:{{ rvp }} <b>W:</b>
|
||||
</pre>
|
||||
{% if s %}
|
||||
<p><em>note: if you are on LAN (or just dont have valid certificates), add <code>--no-check-certificate</code> to the mount command</em><br />---</p>
|
||||
{% endif %}
|
||||
<ul>
|
||||
{% if s %}
|
||||
<li>running <code>rclone mount</code> on LAN (or just dont have valid certificates)? add <code>--no-check-certificate</code></li>
|
||||
{% endif %}
|
||||
<li>running <code>rclone mount</code> as root? add <code>--allow-other</code></li>
|
||||
<li>old version of rclone? replace all <code>=</code> with <code> </code> (space)</li>
|
||||
</ul>
|
||||
|
||||
<p>if you want to use the native WebDAV client in windows instead (slow and buggy), first run <a href="{{ r }}/.cpr/a/webdav-cfg.bat">webdav-cfg.bat</a> to remove the 47 MiB filesize limit (also fixes latency and password login), then connect:</p>
|
||||
<pre>
|
||||
@@ -73,10 +78,13 @@
|
||||
rclone config create {{ aname }}-dav webdav url=http{{ s }}://{{ rip }}{{ hport }} vendor=owncloud pacer_min_sleep=0.01ms{% if accs %} user=k pass=<b>{{ pw }}</b>{% endif %}
|
||||
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-dav:{{ rvp }} <b>mp</b>
|
||||
</pre>
|
||||
{% if s %}
|
||||
<p><em>note: if you are on LAN (or just dont have valid certificates), add <code>--no-check-certificate</code> to the mount command</em><br />---</p>
|
||||
{% endif %}
|
||||
|
||||
<ul>
|
||||
{% if s %}
|
||||
<li>running <code>rclone mount</code> on LAN (or just dont have valid certificates)? add <code>--no-check-certificate</code></li>
|
||||
{% endif %}
|
||||
<li>running <code>rclone mount</code> as root? add <code>--allow-other</code></li>
|
||||
<li>old version of rclone? replace all <code>=</code> with <code> </code> (space)</li>
|
||||
</ul>
|
||||
<p>or the emergency alternative (gnome/gui-only):</p>
|
||||
<!-- gnome-bug: ignores vp -->
|
||||
<pre>
|
||||
@@ -123,8 +131,14 @@
|
||||
rclone config create {{ aname }}-ftps ftp host={{ rip }} port={{ args.ftps }} pass=k user={% if accs %}<b>{{ pw }}</b>{% else %}anonymous{% endif %} tls=false explicit_tls=true
|
||||
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-ftps:{{ rvp }} <b>W:</b>
|
||||
</pre>
|
||||
<p><em>note: if you are on LAN (or just dont have valid certificates), add <code>no_check_certificate=true</code> to the config command</em><br />---</p>
|
||||
{% endif %}
|
||||
<ul>
|
||||
{% if args.ftps %}
|
||||
<li>running on LAN (or just dont have valid certificates)? add <code>no_check_certificate=true</code> to the config command</li>
|
||||
{% endif %}
|
||||
<li>running <code>rclone mount</code> as root? add <code>--allow-other</code></li>
|
||||
<li>old version of rclone? replace all <code>=</code> with <code> </code> (space)</li>
|
||||
</ul>
|
||||
<p>if you want to use the native FTP client in windows instead (please dont), press <code>win+R</code> and run this command:</p>
|
||||
<pre>
|
||||
explorer {{ "ftp" if args.ftp else "ftps" }}://{% if accs %}<b>{{ pw }}</b>:k@{% endif %}{{ host }}:{{ args.ftp or args.ftps }}/{{ rvp }}
|
||||
@@ -145,8 +159,14 @@
|
||||
rclone config create {{ aname }}-ftps ftp host={{ rip }} port={{ args.ftps }} pass=k user={% if accs %}<b>{{ pw }}</b>{% else %}anonymous{% endif %} tls=false explicit_tls=true
|
||||
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-ftps:{{ rvp }} <b>mp</b>
|
||||
</pre>
|
||||
<p><em>note: if you are on LAN (or just dont have valid certificates), add <code>no_check_certificate=true</code> to the config command</em><br />---</p>
|
||||
{% endif %}
|
||||
<ul>
|
||||
{% if args.ftps %}
|
||||
<li>running on LAN (or just dont have valid certificates)? add <code>no_check_certificate=true</code> to the config command</li>
|
||||
{% endif %}
|
||||
<li>running <code>rclone mount</code> as root? add <code>--allow-other</code></li>
|
||||
<li>old version of rclone? replace all <code>=</code> with <code> </code> (space)</li>
|
||||
</ul>
|
||||
<p>emergency alternative (gnome/gui-only):</p>
|
||||
<!-- gnome-bug: ignores vp -->
|
||||
<pre>
|
||||
@@ -178,7 +198,7 @@
|
||||
partyfuse.py{% if accs %} -a <b>{{ pw }}</b>{% endif %} http{{ s }}://{{ ep }}/{{ rvp }} <b><span class="os win">W:</span><span class="os lin mac">mp</span></b>
|
||||
</pre>
|
||||
{% if s %}
|
||||
<p><em>note: if you are on LAN (or just dont have valid certificates), add <code>-td</code></em></p>
|
||||
<ul><li>if you are on LAN (or just dont have valid certificates), add <code>-td</code></li></ul>
|
||||
{% endif %}
|
||||
<p>
|
||||
you can use <a href="{{ r }}/.cpr/a/u2c.py">u2c.py</a> to upload (sometimes faster than web-browsers)
|
||||
@@ -218,10 +238,12 @@ var SR = {{ r|tojson }},
|
||||
lang="{{ lang }}",
|
||||
dfavico="{{ favico }}";
|
||||
|
||||
document.documentElement.className=localStorage.theme||"{{ args.theme }}";
|
||||
var STG = window.localStorage;
|
||||
document.documentElement.className = (STG && STG.cpp_thm) || "{{ args.theme }}";
|
||||
|
||||
</script>
|
||||
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
||||
<script src="{{ r }}/.cpr/svcs.js?_={{ ts }}"></script>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
|
||||
@@ -1,9 +1,30 @@
|
||||
:root {
|
||||
--font-main: sans-serif;
|
||||
--font-serif: serif;
|
||||
--font-mono: 'scp';
|
||||
|
||||
--fg: #ccc;
|
||||
--fg-max: #fff;
|
||||
--bg-u2: #2b2b2b;
|
||||
--bg-u5: #444;
|
||||
}
|
||||
html.y {
|
||||
--fg: #222;
|
||||
--fg-max: #000;
|
||||
--bg-u2: #f7f7f7;
|
||||
--bg-u5: #ccc;
|
||||
}
|
||||
html.bz {
|
||||
--bg-u2: #202231;
|
||||
}
|
||||
@font-face {
|
||||
font-family: 'scp';
|
||||
font-display: swap;
|
||||
src: local('Source Code Pro Regular'), local('SourceCodePro-Regular'), url(deps/scp.woff2) format('woff2');
|
||||
}
|
||||
html {
|
||||
text-size-adjust: 100%;
|
||||
-webkit-text-size-adjust: 100%;
|
||||
touch-action: manipulation;
|
||||
}
|
||||
#tt, #toast {
|
||||
@@ -12,7 +33,9 @@ html {
|
||||
max-width: min(34em, 90%);
|
||||
max-width: min(34em, calc(100% - 7em));
|
||||
color: #ddd;
|
||||
color: var(--fg);
|
||||
background: #333;
|
||||
background: var(--bg-u2);
|
||||
border: 0 solid #777;
|
||||
box-shadow: 0 .2em .5em #111;
|
||||
border-radius: .4em;
|
||||
@@ -86,6 +109,9 @@ html {
|
||||
#toast pre {
|
||||
margin: 0;
|
||||
}
|
||||
#toast.hide {
|
||||
display: none;
|
||||
}
|
||||
#toast.vis {
|
||||
right: 1.3em;
|
||||
transform: inherit;
|
||||
@@ -125,6 +151,10 @@ html {
|
||||
#toast.err #toastc {
|
||||
background: #d06;
|
||||
}
|
||||
#toast code {
|
||||
padding: 0 .2em;
|
||||
background: rgba(0,0,0,0.2);
|
||||
}
|
||||
#tth {
|
||||
color: #fff;
|
||||
background: #111;
|
||||
@@ -157,9 +187,10 @@ html {
|
||||
#modalc code,
|
||||
#tt code {
|
||||
color: #eee;
|
||||
color: var(--fg-max);
|
||||
background: #444;
|
||||
background: var(--bg-u5);
|
||||
padding: .1em .3em;
|
||||
border-top: 1px solid #777;
|
||||
border-radius: .3em;
|
||||
line-height: 1.7em;
|
||||
}
|
||||
@@ -167,22 +198,15 @@ html {
|
||||
color: #f6a;
|
||||
}
|
||||
html.y #tt {
|
||||
color: #333;
|
||||
background: #fff;
|
||||
border-color: #888 #000 #777 #000;
|
||||
}
|
||||
html.bz #tt {
|
||||
background: #202231;
|
||||
border-color: #3b3f58;
|
||||
}
|
||||
html.y #tt,
|
||||
html.y #toast {
|
||||
box-shadow: 0 .3em 1em rgba(0,0,0,0.4);
|
||||
}
|
||||
html.y #tt code {
|
||||
background: #060;
|
||||
color: #fff;
|
||||
}
|
||||
#modalc code {
|
||||
color: #060;
|
||||
background: transparent;
|
||||
@@ -320,6 +344,9 @@ html.y .btn:focus {
|
||||
box-shadow: 0 .1em .2em #037 inset;
|
||||
outline: #037 solid .1em;
|
||||
}
|
||||
input[type="submit"] {
|
||||
cursor: pointer;
|
||||
}
|
||||
input[type="text"]:focus,
|
||||
input:not([type]):focus,
|
||||
textarea:focus {
|
||||
@@ -355,6 +382,7 @@ html.y textarea:focus {
|
||||
.mdo code,
|
||||
.mdo tt {
|
||||
font-family: 'scp', monospace, monospace;
|
||||
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||
white-space: pre-wrap;
|
||||
word-break: break-all;
|
||||
}
|
||||
@@ -424,6 +452,7 @@ html.y textarea:focus {
|
||||
}
|
||||
.mdo blockquote {
|
||||
font-family: serif;
|
||||
font-family: var(--font-serif), serif;
|
||||
background: #f7f7f7;
|
||||
border: .07em dashed #ccc;
|
||||
padding: 0 2em;
|
||||
@@ -557,3 +586,11 @@ hr {
|
||||
border: .07em dashed #444;
|
||||
}
|
||||
}
|
||||
|
||||
@media (prefers-reduced-motion) {
|
||||
#toast,
|
||||
#toast a#toastc,
|
||||
#tt {
|
||||
transition: none;
|
||||
}
|
||||
}
|
||||
@@ -431,7 +431,7 @@ function U2pvis(act, btns, uc, st) {
|
||||
if (sread('potato') === null) {
|
||||
btn.click();
|
||||
toast.inf(30, L.u_gotpot);
|
||||
localStorage.removeItem('potato');
|
||||
sdrop('potato');
|
||||
}
|
||||
|
||||
u2f.appendChild(ode);
|
||||
@@ -588,7 +588,7 @@ function U2pvis(act, btns, uc, st) {
|
||||
btns[a].onclick = function (e) {
|
||||
ev(e);
|
||||
var newtab = this.getAttribute('act');
|
||||
function go() {
|
||||
var go = function () {
|
||||
for (var b = 0; b < btns.length; b++) {
|
||||
btns[b].className = (
|
||||
btns[b].getAttribute('act') == newtab) ? 'act' : '';
|
||||
@@ -723,7 +723,7 @@ function Donut(uc, st) {
|
||||
|
||||
function strobe() {
|
||||
var txt = strobes.pop();
|
||||
wintitle(txt);
|
||||
wintitle(txt, false);
|
||||
if (!txt)
|
||||
clearInterval(tstrober);
|
||||
}
|
||||
@@ -807,7 +807,7 @@ function up2k_init(subtle) {
|
||||
function init_deps() {
|
||||
if (!loading_deps && !got_deps()) {
|
||||
var fn = 'sha512.' + sha_js + '.js',
|
||||
m = L.u_https1 + ' <a href="' + (window.location + '').replace(':', 's:') + '">' + L.u_https2 + '</a> ' + L.u_https3;
|
||||
m = L.u_https1 + ' <a href="' + (location + '').replace(':', 's:') + '">' + L.u_https2 + '</a> ' + L.u_https3;
|
||||
|
||||
showmodal('<h1>loading ' + fn + '</h1>');
|
||||
import_js(SR + '/.cpr/deps/' + fn, unmodal);
|
||||
@@ -852,7 +852,7 @@ function up2k_init(subtle) {
|
||||
|
||||
setmsg(suggest_up2k, 'msg');
|
||||
|
||||
var parallel_uploads = icfg_get('nthread'),
|
||||
var parallel_uploads = ebi('nthread').value = icfg_get('nthread', u2j),
|
||||
uc = {},
|
||||
fdom_ctr = 0,
|
||||
biggest_file = 0;
|
||||
@@ -861,6 +861,8 @@ function up2k_init(subtle) {
|
||||
bcfg_bind(uc, 'multitask', 'multitask', true, null, false);
|
||||
bcfg_bind(uc, 'potato', 'potato', false, set_potato, false);
|
||||
bcfg_bind(uc, 'ask_up', 'ask_up', true, null, false);
|
||||
bcfg_bind(uc, 'umod', 'umod', false, null, false);
|
||||
bcfg_bind(uc, 'u2ts', 'u2ts', !u2ts.endsWith('u'), set_u2ts, false);
|
||||
bcfg_bind(uc, 'fsearch', 'fsearch', false, set_fsearch, false);
|
||||
|
||||
bcfg_bind(uc, 'flag_en', 'flag_en', false, apply_flag_cfg);
|
||||
@@ -893,6 +895,7 @@ function up2k_init(subtle) {
|
||||
"bytes": {
|
||||
"total": 0,
|
||||
"hashed": 0,
|
||||
"inflight": 0,
|
||||
"uploaded": 0,
|
||||
"finished": 0
|
||||
},
|
||||
@@ -971,7 +974,7 @@ function up2k_init(subtle) {
|
||||
if (++nenters <= 0)
|
||||
nenters = 1;
|
||||
|
||||
if (onover.bind(this)(e))
|
||||
if (onover.call(this, e))
|
||||
return true;
|
||||
|
||||
var mup, up = QS('#up_zd');
|
||||
@@ -995,16 +998,29 @@ function up2k_init(subtle) {
|
||||
function onoverb(e) {
|
||||
// zones are alive; disable cuo2duo branch
|
||||
document.body.ondragover = document.body.ondrop = null;
|
||||
return onover.bind(this)(e);
|
||||
return onover.call(this, e);
|
||||
}
|
||||
function onover(e) {
|
||||
return onovercmn(this, e, false);
|
||||
}
|
||||
function onoverbtn(e) {
|
||||
return onovercmn(this, e, true);
|
||||
}
|
||||
function onovercmn(self, e, btn) {
|
||||
try {
|
||||
var ok = false, dt = e.dataTransfer.types;
|
||||
for (var a = 0; a < dt.length; a++)
|
||||
if (dt[a] == 'Files')
|
||||
ok = true;
|
||||
else if (dt[a] == 'text/uri-list')
|
||||
return true;
|
||||
else if (dt[a] == 'text/uri-list') {
|
||||
if (btn) {
|
||||
ok = true;
|
||||
if (toast.txt == L.u_uri)
|
||||
toast.hide();
|
||||
}
|
||||
else
|
||||
return toast.inf(10, L.u_uri) || true;
|
||||
}
|
||||
|
||||
if (!ok)
|
||||
return true;
|
||||
@@ -1020,13 +1036,16 @@ function up2k_init(subtle) {
|
||||
document.body.ondragenter = document.body.ondragleave = document.body.ondragover = null;
|
||||
return modal.alert('your browser does not support drag-and-drop uploading');
|
||||
}
|
||||
if (btn)
|
||||
return;
|
||||
|
||||
clmod(ebi('drops'), 'vis', 1);
|
||||
var v = this.getAttribute('v');
|
||||
var v = self.getAttribute('v');
|
||||
if (v)
|
||||
clmod(ebi(v), 'hl', 1);
|
||||
}
|
||||
function offdrag(e) {
|
||||
ev(e);
|
||||
noope(e);
|
||||
|
||||
var v = this.getAttribute('v');
|
||||
if (v)
|
||||
@@ -1045,6 +1064,8 @@ function up2k_init(subtle) {
|
||||
document.body.ondragleave = offdrag;
|
||||
document.body.ondragover = onover;
|
||||
document.body.ondrop = gotfile;
|
||||
ebi('u2btn').ondrop = gotfile;
|
||||
ebi('u2btn').ondragover = onoverbtn;
|
||||
|
||||
var drops = [ebi('up_dz'), ebi('srch_dz')];
|
||||
for (var a = 0; a < 2; a++) {
|
||||
@@ -1088,7 +1109,7 @@ function up2k_init(subtle) {
|
||||
function gotfile(e) {
|
||||
ev(e);
|
||||
nenters = 0;
|
||||
offdrag.bind(this)();
|
||||
offdrag.call(this);
|
||||
var dz = this && this.getAttribute('id');
|
||||
if (!dz && e && e.clientY)
|
||||
// cuo2duo fallback
|
||||
@@ -1132,7 +1153,7 @@ function up2k_init(subtle) {
|
||||
dst = good_files;
|
||||
|
||||
if (is_itemlist) {
|
||||
if (fobj.kind !== 'file')
|
||||
if (fobj.kind !== 'file' && fobj.type !== 'text/uri-list')
|
||||
continue;
|
||||
|
||||
try {
|
||||
@@ -1144,6 +1165,8 @@ function up2k_init(subtle) {
|
||||
}
|
||||
catch (ex) { }
|
||||
fobj = fobj.getAsFile();
|
||||
if (!fobj)
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
if (fobj.size < 1)
|
||||
@@ -1311,7 +1334,8 @@ function up2k_init(subtle) {
|
||||
return modal.confirm(msg.join('') + '</ul>', function () {
|
||||
start_actx();
|
||||
up_them(good_files);
|
||||
toast.inf(15, L.u_unpt, L.u_unpt);
|
||||
if (have_up2k_idx)
|
||||
toast.inf(15, L.u_unpt, L.u_unpt);
|
||||
}, null);
|
||||
|
||||
up_them(good_files);
|
||||
@@ -1319,6 +1343,7 @@ function up2k_init(subtle) {
|
||||
|
||||
function up_them(good_files) {
|
||||
start_actx();
|
||||
draw_turbo();
|
||||
var evpath = get_evpath(),
|
||||
draw_each = good_files.length < 50;
|
||||
|
||||
@@ -1341,7 +1366,7 @@ function up2k_init(subtle) {
|
||||
name = good_files[a][1],
|
||||
fdir = evpath,
|
||||
now = Date.now(),
|
||||
lmod = fobj.lastModified || now,
|
||||
lmod = uc.u2ts ? (fobj.lastModified || now) : 0,
|
||||
ofs = name.lastIndexOf('/') + 1;
|
||||
|
||||
if (ofs) {
|
||||
@@ -1369,6 +1394,8 @@ function up2k_init(subtle) {
|
||||
entry.rand = true;
|
||||
entry.name = 'a\n' + entry.name;
|
||||
}
|
||||
else if (uc.umod)
|
||||
entry.umod = true;
|
||||
|
||||
if (biggest_file < entry.size)
|
||||
biggest_file = entry.size;
|
||||
@@ -1385,7 +1412,7 @@ function up2k_init(subtle) {
|
||||
|
||||
pvis.addfile([
|
||||
uc.fsearch ? esc(entry.name) : linksplit(
|
||||
entry.purl + uricom_enc(entry.name)).join(' '),
|
||||
entry.purl + uricom_enc(entry.name)).join(' / '),
|
||||
'📐 ' + L.u_hashing,
|
||||
''
|
||||
], entry.size, draw_each);
|
||||
@@ -1517,17 +1544,21 @@ function up2k_init(subtle) {
|
||||
if (uc.fsearch)
|
||||
t.push(['u2etat', st.bytes.hashed, st.bytes.hashed, st.time.hashing]);
|
||||
}
|
||||
|
||||
var b_up = st.bytes.inflight + st.bytes.uploaded,
|
||||
b_fin = st.bytes.inflight + st.bytes.finished;
|
||||
|
||||
if (nsend) {
|
||||
st.time.uploading += td;
|
||||
t.push(['u2etau', st.bytes.uploaded, st.bytes.finished, st.time.uploading]);
|
||||
t.push(['u2etau', b_up, b_fin, st.time.uploading]);
|
||||
}
|
||||
if ((nhash || nsend) && !uc.fsearch) {
|
||||
if (!st.bytes.finished) {
|
||||
if (!b_fin) {
|
||||
ebi('u2etat').innerHTML = L.u_etaprep;
|
||||
}
|
||||
else {
|
||||
st.time.busy += td;
|
||||
t.push(['u2etat', st.bytes.finished, st.bytes.finished, st.time.busy]);
|
||||
t.push(['u2etat', b_fin, b_fin, st.time.busy]);
|
||||
}
|
||||
}
|
||||
for (var a = 0; a < t.length; a++) {
|
||||
@@ -1568,7 +1599,7 @@ function up2k_init(subtle) {
|
||||
return;
|
||||
|
||||
st.oserr = true;
|
||||
var msg = HTTPS ? L.u_emtleak3 : L.u_emtleak2.format((window.location + '').replace(':', 's:'));
|
||||
var msg = HTTPS ? L.u_emtleak3 : L.u_emtleak2.format((location + '').replace(':', 's:'));
|
||||
modal.alert(L.u_emtleak1 + msg + (CHROME ? L.u_emtleakc : FIREFOX ? L.u_emtleakf : ''));
|
||||
}
|
||||
|
||||
@@ -1634,11 +1665,11 @@ function up2k_init(subtle) {
|
||||
var running = false,
|
||||
was_busy = false;
|
||||
|
||||
function defer() {
|
||||
var defer = function () {
|
||||
running = false;
|
||||
}
|
||||
|
||||
function taskerd() {
|
||||
var taskerd = function () {
|
||||
if (running)
|
||||
return;
|
||||
|
||||
@@ -1668,7 +1699,7 @@ function up2k_init(subtle) {
|
||||
is_busy = st.todo.handshake.length;
|
||||
try {
|
||||
if (!is_busy && !uc.fsearch && !msel.getsel().length && (!mp.au || mp.au.paused))
|
||||
treectl.goto(get_evpath());
|
||||
treectl.goto();
|
||||
}
|
||||
catch (ex) { }
|
||||
}
|
||||
@@ -1691,8 +1722,6 @@ function up2k_init(subtle) {
|
||||
ebi('u2etas').style.textAlign = 'left';
|
||||
}
|
||||
etafun();
|
||||
if (pvis.act == 'bz')
|
||||
pvis.changecard('bz');
|
||||
}
|
||||
|
||||
if (flag) {
|
||||
@@ -1828,6 +1857,9 @@ function up2k_init(subtle) {
|
||||
timer.rm(donut.do);
|
||||
ebi('u2tabw').style.minHeight = '0px';
|
||||
utw_minh = 0;
|
||||
|
||||
if (pvis.act == 'bz')
|
||||
pvis.changecard('bz');
|
||||
}
|
||||
|
||||
function chill(t) {
|
||||
@@ -1936,7 +1968,7 @@ function up2k_init(subtle) {
|
||||
st.bytes.hashed += cdr - car;
|
||||
st.etac.h++;
|
||||
|
||||
function orz(e) {
|
||||
var orz = function (e) {
|
||||
bpend--;
|
||||
segm_next();
|
||||
hash_calc(nch, e.target.result);
|
||||
@@ -2140,6 +2172,9 @@ function up2k_init(subtle) {
|
||||
|
||||
function exec_head() {
|
||||
var t = st.todo.head.shift();
|
||||
if (t.done)
|
||||
return console.log('done; skip head1', t.name, t);
|
||||
|
||||
st.busy.head.push(t);
|
||||
|
||||
var xhr = new XMLHttpRequest();
|
||||
@@ -2152,6 +2187,9 @@ function up2k_init(subtle) {
|
||||
st.todo.head.unshift(t);
|
||||
};
|
||||
function orz(e) {
|
||||
if (t.done)
|
||||
return console.log('done; skip head2', t.name, t);
|
||||
|
||||
var ok = false;
|
||||
if (xhr.status == 200) {
|
||||
var srv_sz = xhr.getResponseHeader('Content-Length'),
|
||||
@@ -2198,6 +2236,9 @@ function up2k_init(subtle) {
|
||||
keepalive = t.keepalive,
|
||||
me = Date.now();
|
||||
|
||||
if (t.done)
|
||||
return console.log('done; skip hs', t.name, t);
|
||||
|
||||
st.busy.handshake.push(t);
|
||||
t.keepalive = undefined;
|
||||
t.t_busied = me;
|
||||
@@ -2207,34 +2248,43 @@ function up2k_init(subtle) {
|
||||
|
||||
var xhr = new XMLHttpRequest();
|
||||
xhr.onerror = function () {
|
||||
if (t.t_busied != me) {
|
||||
console.log('zombie handshake onerror,', t.name, t);
|
||||
return;
|
||||
}
|
||||
if (t.t_busied != me) // t.done ok
|
||||
return console.log('zombie handshake onerror', t.name, t);
|
||||
|
||||
if (!toast.visible)
|
||||
toast.warn(9.98, L.u_eneths + "\n\nfile: " + t.name, t);
|
||||
|
||||
console.log('handshake onerror, retrying', t.name, t);
|
||||
apop(st.busy.handshake, t);
|
||||
st.todo.handshake.unshift(t);
|
||||
t.cooldown = Date.now() + 5000 + Math.floor(Math.random() * 3000);
|
||||
t.keepalive = keepalive;
|
||||
};
|
||||
function orz(e) {
|
||||
if (t.t_busied != me) {
|
||||
console.log('zombie handshake onload,', t.name, t);
|
||||
return;
|
||||
}
|
||||
var orz = function (e) {
|
||||
if (t.t_busied != me || t.done)
|
||||
return console.log('zombie handshake onload', t.name, t);
|
||||
|
||||
if (xhr.status == 200) {
|
||||
try {
|
||||
var response = JSON.parse(xhr.responseText);
|
||||
}
|
||||
catch (ex) {
|
||||
apop(st.busy.handshake, t);
|
||||
st.todo.handshake.unshift(t);
|
||||
t.cooldown = Date.now() + 5000 + Math.floor(Math.random() * 3000);
|
||||
return toast.err(0, 'Handshake error; will retry...\n\n' + L.badreply + ':\n\n' + unpre(xhr.responseText));
|
||||
}
|
||||
|
||||
t.t_handshake = Date.now();
|
||||
if (keepalive) {
|
||||
apop(st.busy.handshake, t);
|
||||
tasker();
|
||||
return;
|
||||
}
|
||||
|
||||
if (toast.tag === t)
|
||||
toast.ok(5, L.u_fixed);
|
||||
|
||||
var response = JSON.parse(xhr.responseText);
|
||||
if (!response.name) {
|
||||
var msg = '',
|
||||
smsg = '';
|
||||
@@ -2255,7 +2305,7 @@ function up2k_init(subtle) {
|
||||
cdiff = (Math.abs(diff) <= 2) ? '3c0' : 'f0b',
|
||||
sdiff = '<span style="color:#' + cdiff + '">diff ' + diff;
|
||||
|
||||
msg.push(linksplit(hit.rp).join('') + '<br /><small>' + tr + ' (srv), ' + tu + ' (You), ' + sdiff + '</small></span>');
|
||||
msg.push(linksplit(hit.rp).join(' / ') + '<br /><small>' + tr + ' (srv), ' + tu + ' (You), ' + sdiff + '</small></span>');
|
||||
}
|
||||
msg = msg.join('<br />\n');
|
||||
}
|
||||
@@ -2289,7 +2339,7 @@ function up2k_init(subtle) {
|
||||
url += '?k=' + fk;
|
||||
}
|
||||
|
||||
pvis.seth(t.n, 0, linksplit(url).join(' '));
|
||||
pvis.seth(t.n, 0, linksplit(url).join(' / '));
|
||||
}
|
||||
|
||||
var chunksize = get_chunksize(t.size),
|
||||
@@ -2373,15 +2423,12 @@ function up2k_init(subtle) {
|
||||
pvis.seth(t.n, 2, L.u_ehstmp, t);
|
||||
|
||||
var err = "",
|
||||
rsp = (xhr.responseText + ''),
|
||||
rsp = unpre(xhr.responseText),
|
||||
ofs = rsp.lastIndexOf('\nURL: ');
|
||||
|
||||
if (ofs !== -1)
|
||||
rsp = rsp.slice(0, ofs);
|
||||
|
||||
if (rsp.indexOf('<pre>') === 0)
|
||||
rsp = rsp.slice(5);
|
||||
|
||||
if (rsp.indexOf('rate-limit ') !== -1) {
|
||||
var penalty = rsp.replace(/.*rate-limit /, "").split(' ')[0];
|
||||
console.log("rate-limit: " + penalty);
|
||||
@@ -2400,7 +2447,7 @@ function up2k_init(subtle) {
|
||||
err = rsp;
|
||||
ofs = err.indexOf('\n/');
|
||||
if (ofs !== -1) {
|
||||
err = err.slice(0, ofs + 1) + linksplit(err.slice(ofs + 2).trimEnd()).join(' ');
|
||||
err = err.slice(0, ofs + 1) + linksplit(err.slice(ofs + 2).trimEnd()).join(' / ');
|
||||
}
|
||||
if (!t.rechecks && (err_pend || err_srcb)) {
|
||||
t.rechecks = 0;
|
||||
@@ -2441,6 +2488,8 @@ function up2k_init(subtle) {
|
||||
req.srch = 1;
|
||||
else if (t.rand)
|
||||
req.rand = true;
|
||||
else if (t.umod)
|
||||
req.umod = true;
|
||||
|
||||
xhr.open('POST', t.purl, true);
|
||||
xhr.responseType = 'text';
|
||||
@@ -2483,14 +2532,17 @@ function up2k_init(subtle) {
|
||||
}
|
||||
|
||||
function exec_upload() {
|
||||
var upt = st.todo.upload.shift();
|
||||
var upt = st.todo.upload.shift(),
|
||||
t = st.files[upt.nfile],
|
||||
npart = upt.npart,
|
||||
tries = 0;
|
||||
|
||||
if (t.done)
|
||||
return console.log('done; skip chunk', t.name, t);
|
||||
|
||||
st.busy.upload.push(upt);
|
||||
st.nfile.upload = upt.nfile;
|
||||
|
||||
var npart = upt.npart,
|
||||
t = st.files[upt.nfile],
|
||||
tries = 0;
|
||||
|
||||
if (!t.t_uploading)
|
||||
t.t_uploading = Date.now();
|
||||
|
||||
@@ -2503,8 +2555,9 @@ function up2k_init(subtle) {
|
||||
if (cdr >= t.size)
|
||||
cdr = t.size;
|
||||
|
||||
function orz(xhr) {
|
||||
var txt = ((xhr.response && xhr.response.err) || xhr.responseText) + '';
|
||||
var orz = function (xhr) {
|
||||
st.bytes.inflight -= xhr.bsent;
|
||||
var txt = unpre((xhr.response && xhr.response.err) || xhr.responseText);
|
||||
if (txt.indexOf('upload blocked by x') + 1) {
|
||||
apop(st.busy.upload, upt);
|
||||
apop(t.postlist, npart);
|
||||
@@ -2532,7 +2585,7 @@ function up2k_init(subtle) {
|
||||
}
|
||||
orz2(xhr);
|
||||
}
|
||||
function orz2(xhr) {
|
||||
var orz2 = function (xhr) {
|
||||
apop(st.busy.upload, upt);
|
||||
apop(t.postlist, npart);
|
||||
if (!t.postlist.length) {
|
||||
@@ -2548,7 +2601,10 @@ function up2k_init(subtle) {
|
||||
btot = Math.floor(st.bytes.total / 1024 / 1024);
|
||||
|
||||
xhr.upload.onprogress = function (xev) {
|
||||
pvis.prog(t, npart, xev.loaded);
|
||||
var nb = xev.loaded;
|
||||
st.bytes.inflight += nb - xhr.bsent;
|
||||
xhr.bsent = nb;
|
||||
pvis.prog(t, npart, nb);
|
||||
};
|
||||
xhr.onload = function (xev) {
|
||||
try { orz(xhr); } catch (ex) { vis_exh(ex + '', 'up2k.js', '', '', ex); }
|
||||
@@ -2557,6 +2613,8 @@ function up2k_init(subtle) {
|
||||
if (crashed)
|
||||
return;
|
||||
|
||||
st.bytes.inflight -= (xhr.bsent || 0);
|
||||
|
||||
if (!toast.visible)
|
||||
toast.warn(9.98, L.u_cuerr.format(npart, Math.ceil(t.size / chunksize), t.name), t);
|
||||
|
||||
@@ -2573,6 +2631,7 @@ function up2k_init(subtle) {
|
||||
if (xhr.overrideMimeType)
|
||||
xhr.overrideMimeType('Content-Type', 'application/octet-stream');
|
||||
|
||||
xhr.bsent = 0;
|
||||
xhr.responseType = 'text';
|
||||
xhr.send(t.fobj.slice(car, cdr));
|
||||
}
|
||||
@@ -2590,7 +2649,7 @@ function up2k_init(subtle) {
|
||||
wpx = window.innerWidth,
|
||||
fpx = parseInt(getComputedStyle(bar)['font-size']),
|
||||
wem = wpx * 1.0 / fpx,
|
||||
wide = wem > 54 ? 'w' : '',
|
||||
wide = wem > 57 ? 'w' : '',
|
||||
parent = ebi(wide ? 'u2btn_cw' : 'u2btn_ct'),
|
||||
btn = ebi('u2btn');
|
||||
|
||||
@@ -2599,7 +2658,7 @@ function up2k_init(subtle) {
|
||||
ebi('u2conf').className = ebi('u2cards').className = ebi('u2etaw').className = wide;
|
||||
}
|
||||
|
||||
wide = wem > 82 ? 'ww' : wide;
|
||||
wide = wem > 86 ? 'ww' : wide;
|
||||
parent = ebi(wide == 'ww' ? 'u2c3w' : 'u2c3t');
|
||||
var its = [ebi('u2etaw'), ebi('u2cards')];
|
||||
if (its[0].parentNode !== parent) {
|
||||
@@ -2610,8 +2669,7 @@ function up2k_init(subtle) {
|
||||
}
|
||||
}
|
||||
}
|
||||
window.addEventListener('resize', onresize);
|
||||
onresize();
|
||||
onresize100.add(onresize, true);
|
||||
|
||||
if (MOBILE) {
|
||||
// android-chrome wobbles for a bit; firefox / iOS-safari are OK
|
||||
@@ -2657,7 +2715,11 @@ function up2k_init(subtle) {
|
||||
}
|
||||
|
||||
parallel_uploads = v;
|
||||
swrite('nthread', v);
|
||||
if (v == u2j)
|
||||
sdrop('nthread');
|
||||
else
|
||||
swrite('nthread', v);
|
||||
|
||||
clmod(obj, 'err');
|
||||
return;
|
||||
}
|
||||
@@ -2670,8 +2732,11 @@ function up2k_init(subtle) {
|
||||
if (parallel_uploads > 16)
|
||||
parallel_uploads = 16;
|
||||
|
||||
if (parallel_uploads > 7)
|
||||
toast.warn(10, L.u_maxconn);
|
||||
|
||||
obj.value = parallel_uploads;
|
||||
bumpthread({ "target": 1 })
|
||||
bumpthread({ "target": 1 });
|
||||
}
|
||||
|
||||
function tgl_fsearch() {
|
||||
@@ -2679,6 +2744,16 @@ function up2k_init(subtle) {
|
||||
}
|
||||
|
||||
function draw_turbo() {
|
||||
if (turbolvl < 0 && uc.turbo) {
|
||||
bcfg_set('u2turbo', uc.turbo = false);
|
||||
toast.err(10, L.u_turbo_c);
|
||||
}
|
||||
|
||||
if (uc.turbo && !has(perms, 'read')) {
|
||||
bcfg_set('u2turbo', uc.turbo = false);
|
||||
toast.warn(30, L.u_turbo_g);
|
||||
}
|
||||
|
||||
var msg = (turbolvl || !uc.turbo) ? null : uc.fsearch ? L.u_ts : L.u_tu,
|
||||
html = ebi('u2foot').innerHTML;
|
||||
|
||||
@@ -2777,6 +2852,8 @@ function up2k_init(subtle) {
|
||||
|
||||
function set_fsearch(new_state) {
|
||||
var fixed = false,
|
||||
persist = new_state !== undefined,
|
||||
preferred = bcfg_get('fsearch', undefined),
|
||||
can_write = false;
|
||||
|
||||
if (!ebi('fsearch')) {
|
||||
@@ -2791,10 +2868,18 @@ function up2k_init(subtle) {
|
||||
new_state = false;
|
||||
fixed = true;
|
||||
}
|
||||
if (new_state === undefined)
|
||||
new_state = can_write ? false : have_up2k_idx ? true : undefined;
|
||||
}
|
||||
|
||||
if (new_state === undefined)
|
||||
new_state = preferred;
|
||||
|
||||
if (new_state !== undefined)
|
||||
bcfg_set('fsearch', uc.fsearch = new_state);
|
||||
if (persist)
|
||||
bcfg_set('fsearch', uc.fsearch = new_state);
|
||||
else
|
||||
bcfg_upd_ui('fsearch', uc.fsearch = new_state);
|
||||
|
||||
try {
|
||||
clmod(ebi('u2c3w'), 's', !can_write);
|
||||
@@ -2817,6 +2902,9 @@ function up2k_init(subtle) {
|
||||
ebi('u2cards').style.display = ebi('u2tab').style.display = potato ? 'none' : '';
|
||||
ebi('u2mu').style.display = potato ? '' : 'none';
|
||||
|
||||
if (u2ts.startsWith('f') || !sread('u2ts'))
|
||||
uc.u2ts = bcfg_upd_ui('u2ts', !u2ts.endsWith('u'));
|
||||
|
||||
draw_turbo();
|
||||
draw_life();
|
||||
onresize();
|
||||
@@ -2841,12 +2929,24 @@ function up2k_init(subtle) {
|
||||
}
|
||||
}
|
||||
|
||||
function set_u2sort() {
|
||||
function set_u2sort(en) {
|
||||
if (u2sort.indexOf('f') < 0)
|
||||
return;
|
||||
|
||||
bcfg_set('u2sort', uc.az = u2sort.indexOf('n') + 1);
|
||||
localStorage.removeItem('u2sort');
|
||||
var fen = uc.az = u2sort.indexOf('n') + 1;
|
||||
bcfg_upd_ui('u2sort', fen);
|
||||
if (en != fen)
|
||||
toast.warn(10, L.ul_btnlk);
|
||||
}
|
||||
|
||||
function set_u2ts(en) {
|
||||
if (u2ts.indexOf('f') < 0)
|
||||
return;
|
||||
|
||||
var fen = !u2ts.endsWith('u');
|
||||
bcfg_upd_ui('u2ts', fen);
|
||||
if (en != fen)
|
||||
toast.warn(10, L.ul_btnlk);
|
||||
}
|
||||
|
||||
function set_hashw() {
|
||||
@@ -2944,7 +3044,7 @@ ebi('ico1').onclick = function () {
|
||||
if (QS('#op_up2k.act'))
|
||||
goto_up2k();
|
||||
|
||||
apply_perms({ "perms": perms, "frand": frand });
|
||||
apply_perms({ "perms": perms, "frand": frand, "u2ts": u2ts });
|
||||
|
||||
|
||||
(function () {
|
||||
|
||||
@@ -6,13 +6,21 @@ if (!window.console || !console.log)
|
||||
};
|
||||
|
||||
|
||||
if (window.CGV)
|
||||
for (var k in CGV)
|
||||
window[k] = CGV[k];
|
||||
|
||||
|
||||
var wah = '',
|
||||
STG = null,
|
||||
NOAC = 'autocorrect="off" autocapitalize="off"',
|
||||
L, tt, treectl, thegrid, up2k, asmCrypto, hashwasm, vbar, marked,
|
||||
CB = '?_=' + Date.now(),
|
||||
T0 = Date.now(),
|
||||
CB = '?_=' + Math.floor(T0 / 1000).toString(36),
|
||||
R = SR.slice(1),
|
||||
RS = R ? "/" + R : "",
|
||||
HALFMAX = 8192 * 8192 * 8192 * 8192,
|
||||
HTTPS = (window.location + '').indexOf('https:') === 0,
|
||||
HTTPS = ('' + location).indexOf('https:') === 0,
|
||||
TOUCH = 'ontouchstart' in window,
|
||||
MOBILE = TOUCH,
|
||||
CHROME = !!window.chrome,
|
||||
@@ -33,6 +41,16 @@ if (!window.Notification || !Notification.permission)
|
||||
if (!window.FormData)
|
||||
window.FormData = false;
|
||||
|
||||
try {
|
||||
STG = window.localStorage;
|
||||
STG.STG;
|
||||
}
|
||||
catch (ex) {
|
||||
STG = null;
|
||||
if ((ex + '').indexOf('sandbox') < 0)
|
||||
console.log('no localStorage: ' + ex);
|
||||
}
|
||||
|
||||
try {
|
||||
CB = '?' + document.currentScript.src.split('?').pop();
|
||||
|
||||
@@ -139,29 +157,41 @@ catch (ex) {
|
||||
}
|
||||
var crashed = false, ignexd = {}, evalex_fatal = false;
|
||||
function vis_exh(msg, url, lineNo, columnNo, error) {
|
||||
if ((msg + '').indexOf('ResizeObserver') + 1)
|
||||
return; // chrome issue 809574 (benign, from <video>)
|
||||
|
||||
if ((msg + '').indexOf('l2d.js') + 1)
|
||||
return; // `t` undefined in tapEvent -> hitTestSimpleCustom
|
||||
|
||||
if (!/\.js($|\?)/.exec('' + url))
|
||||
return; // chrome debugger
|
||||
|
||||
if ((url + '').indexOf(' > eval') + 1 && !evalex_fatal)
|
||||
return; // md timer
|
||||
|
||||
var ekey = url + '\n' + lineNo + '\n' + msg;
|
||||
if (ignexd[ekey] || crashed)
|
||||
return;
|
||||
|
||||
msg = String(msg);
|
||||
url = String(url);
|
||||
|
||||
if (msg.indexOf('ResizeObserver') + 1)
|
||||
return; // chrome issue 809574 (benign, from <video>)
|
||||
|
||||
if (msg.indexOf('l2d.js') + 1)
|
||||
return; // `t` undefined in tapEvent -> hitTestSimpleCustom
|
||||
|
||||
if (!/\.js($|\?)/.exec(url))
|
||||
return; // chrome debugger
|
||||
|
||||
if (url.indexOf(' > eval') + 1 && !evalex_fatal)
|
||||
return; // md timer
|
||||
|
||||
if (IE && url.indexOf('prism.js') + 1)
|
||||
return;
|
||||
|
||||
if (url.indexOf('easymde.js') + 1)
|
||||
return; // clicking the preview pane
|
||||
|
||||
if (url.indexOf('deps/marked.js') + 1 && !window.WebAssembly)
|
||||
return; // ff<52
|
||||
|
||||
crashed = true;
|
||||
window.onerror = undefined;
|
||||
var html = [
|
||||
'<h1>you hit a bug!</h1>',
|
||||
'<p style="font-size:1.3em;margin:0;line-height:2em">try to <a href="#" onclick="localStorage.clear();location.reload();">reset copyparty settings</a> if you are stuck here, or <a href="#" onclick="ignex();">ignore this</a> / <a href="#" onclick="ignex(true);">ignore all</a> / <a href="?b=u">basic</a></p>',
|
||||
'<p style="color:#fff">please send me a screenshot arigathanks gozaimuch: <a href="<ghi>" target="_blank">new github issue</a></p>',
|
||||
'<p class="b">' + esc(url + ' @' + lineNo + ':' + columnNo), '<br />' + esc(String(msg)).replace(/\n/g, '<br />') + '</p>',
|
||||
'<p class="b">' + esc(url + ' @' + lineNo + ':' + columnNo), '<br />' + esc(msg).replace(/\n/g, '<br />') + '</p>',
|
||||
'<p><b>UA:</b> ' + esc(navigator.userAgent + '')
|
||||
];
|
||||
|
||||
@@ -190,19 +220,24 @@ function vis_exh(msg, url, lineNo, columnNo, error) {
|
||||
}
|
||||
ignexd[ekey] = true;
|
||||
|
||||
var ls = jcp(localStorage);
|
||||
if (ls.fman_clip)
|
||||
ls.fman_clip = ls.fman_clip.length + ' items';
|
||||
var ls = {},
|
||||
lsk = Object.keys(localStorage),
|
||||
nka = lsk.length,
|
||||
nk = Math.min(200, nka);
|
||||
|
||||
var lsk = Object.keys(ls);
|
||||
lsk.sort();
|
||||
html.push('<p class="b">');
|
||||
for (var a = 0; a < lsk.length; a++) {
|
||||
if (ls[lsk[a]].length > 9000)
|
||||
continue;
|
||||
for (var a = 0; a < nk; a++) {
|
||||
var k = lsk[a],
|
||||
v = localStorage.getItem(k);
|
||||
|
||||
html.push(' <b>' + esc(lsk[a]) + '</b> <code>' + esc(ls[lsk[a]]) + '</code> ');
|
||||
ls[k] = v.length > 256 ? v.slice(0, 32) + '[...' + v.length + 'b]' : v;
|
||||
}
|
||||
|
||||
lsk = Object.keys(ls);
|
||||
lsk.sort();
|
||||
html.push('<p class="b"><b>' + nka + ': </b>');
|
||||
for (var a = 0; a < nk; a++)
|
||||
html.push(' <b>' + esc(lsk[a]) + '</b> <code>' + esc(ls[lsk[a]]) + '</code> ');
|
||||
|
||||
html.push('</p>');
|
||||
}
|
||||
catch (e) { }
|
||||
@@ -264,8 +299,15 @@ function anymod(e, shift_ok) {
|
||||
}
|
||||
|
||||
|
||||
var dev_fbw = sread('dev_fbw');
|
||||
function ev(e) {
|
||||
e = e || window.event;
|
||||
if (!e && window.event) {
|
||||
e = window.event;
|
||||
if (dev_fbw == 1) {
|
||||
toast.warn(10, 'hello from fallback code ;_;\ncheck console trace');
|
||||
console.error('using window.event');
|
||||
}
|
||||
}
|
||||
if (!e)
|
||||
return;
|
||||
|
||||
@@ -284,7 +326,7 @@ function ev(e) {
|
||||
|
||||
|
||||
function noope(e) {
|
||||
ev(e);
|
||||
try { ev(e); } catch (ex) { }
|
||||
}
|
||||
|
||||
|
||||
@@ -352,14 +394,30 @@ catch (ex) {
|
||||
}
|
||||
}
|
||||
|
||||
if (!window.Set)
|
||||
window.Set = function () {
|
||||
var r = this;
|
||||
r.size = 0;
|
||||
r.d = {};
|
||||
r.add = function (k) {
|
||||
if (!r.d[k]) {
|
||||
r.d[k] = 1;
|
||||
r.size++;
|
||||
}
|
||||
};
|
||||
r.has = function (k) {
|
||||
return r.d[k];
|
||||
};
|
||||
};
|
||||
|
||||
// https://stackoverflow.com/a/950146
|
||||
function import_js(url, cb) {
|
||||
function import_js(url, cb, ecb) {
|
||||
var head = document.head || document.getElementsByTagName('head')[0];
|
||||
var script = mknod('script');
|
||||
script.type = 'text/javascript';
|
||||
script.src = url;
|
||||
script.src = url + '?_=' + (window.TS || 'a');
|
||||
script.onload = cb;
|
||||
script.onerror = function () {
|
||||
script.onerror = ecb || function () {
|
||||
var m = 'Failed to load module:\n' + url;
|
||||
console.log(m);
|
||||
toast.err(0, m);
|
||||
@@ -368,6 +426,34 @@ function import_js(url, cb) {
|
||||
}
|
||||
|
||||
|
||||
function unsmart(txt) {
|
||||
return !IPHONE ? txt : (txt.
|
||||
replace(/[\u2014]/g, "--").
|
||||
replace(/[\u2022]/g, "*").
|
||||
replace(/[\u2018\u2019]/g, "'").
|
||||
replace(/[\u201c\u201d]/g, '"'));
|
||||
}
|
||||
|
||||
|
||||
function namesan(txt, win, fslash) {
|
||||
if (win)
|
||||
txt = (txt.
|
||||
replace(/</g, "<").
|
||||
replace(/>/g, ">").
|
||||
replace(/:/g, ":").
|
||||
replace(/"/g, """).
|
||||
replace(/\\/g, "\").
|
||||
replace(/\|/g, "|").
|
||||
replace(/\?/g, "?").
|
||||
replace(/\*/g, "*"));
|
||||
|
||||
if (fslash)
|
||||
txt = txt.replace(/\//g, "/");
|
||||
|
||||
return txt;
|
||||
}
|
||||
|
||||
|
||||
var crctab = (function () {
|
||||
var c, tab = [];
|
||||
for (var n = 0; n < 256; n++) {
|
||||
@@ -462,7 +548,7 @@ function yscroll() {
|
||||
|
||||
function showsort(tab) {
|
||||
var v, vn, v1, v2, th = tab.tHead,
|
||||
sopts = jread('fsort', [["href", 1, ""]]);
|
||||
sopts = jread('fsort', jcp(dsort));
|
||||
|
||||
th && (th = th.rows[0]) && (th = th.cells);
|
||||
|
||||
@@ -601,9 +687,8 @@ function linksplit(rp, id) {
|
||||
}
|
||||
var vlink = esc(uricom_dec(link));
|
||||
|
||||
if (link.indexOf('/') !== -1) {
|
||||
vlink = vlink.slice(0, -1) + '<span>/</span>';
|
||||
}
|
||||
if (link.indexOf('/') !== -1)
|
||||
vlink = vlink.slice(0, -1);
|
||||
|
||||
if (!rp) {
|
||||
if (q)
|
||||
@@ -735,17 +820,6 @@ function noq_href(el) {
|
||||
}
|
||||
|
||||
|
||||
function get_pwd() {
|
||||
var k = HTTPS ? 's=' : 'd=',
|
||||
pwd = ('; ' + document.cookie).split('; cppw' + k);
|
||||
|
||||
if (pwd.length < 2)
|
||||
return null;
|
||||
|
||||
return decodeURIComponent(pwd[1].split(';')[0]);
|
||||
}
|
||||
|
||||
|
||||
function unix2iso(ts) {
|
||||
return new Date(ts * 1000).toISOString().replace("T", " ").slice(0, -5);
|
||||
}
|
||||
@@ -866,9 +940,17 @@ function jcp(obj) {
|
||||
}
|
||||
|
||||
|
||||
function sread(key) {
|
||||
function sdrop(key) {
|
||||
try {
|
||||
return localStorage.getItem(key);
|
||||
STG.removeItem(key);
|
||||
}
|
||||
catch (ex) { }
|
||||
}
|
||||
|
||||
function sread(key, al) {
|
||||
try {
|
||||
var ret = STG.getItem(key);
|
||||
return (!al || has(al, ret)) ? ret : null;
|
||||
}
|
||||
catch (e) {
|
||||
return null;
|
||||
@@ -878,9 +960,9 @@ function sread(key) {
|
||||
function swrite(key, val) {
|
||||
try {
|
||||
if (val === undefined || val === null)
|
||||
localStorage.removeItem(key);
|
||||
STG.removeItem(key);
|
||||
else
|
||||
localStorage.setItem(key, val);
|
||||
STG.setItem(key, val);
|
||||
}
|
||||
catch (e) { }
|
||||
}
|
||||
@@ -890,7 +972,13 @@ function jread(key, fb) {
|
||||
if (!str)
|
||||
return fb;
|
||||
|
||||
return JSON.parse(str);
|
||||
try {
|
||||
// '' throws, null is ok, sasuga
|
||||
return JSON.parse(str);
|
||||
}
|
||||
catch (e) {
|
||||
return fb;
|
||||
}
|
||||
}
|
||||
|
||||
function jwrite(key, val) {
|
||||
@@ -909,7 +997,7 @@ function fcfg_get(name, defval) {
|
||||
val = parseFloat(sread(name));
|
||||
|
||||
if (!isNum(val))
|
||||
return parseFloat(o ? o.value : defval);
|
||||
return parseFloat(o && o.value !== '' ? o.value : defval);
|
||||
|
||||
if (o)
|
||||
o.value = val;
|
||||
@@ -954,13 +1042,14 @@ function bcfg_set(name, val) {
|
||||
function bcfg_upd_ui(name, val) {
|
||||
var o = ebi(name);
|
||||
if (!o)
|
||||
return;
|
||||
return val;
|
||||
|
||||
if (o.getAttribute('type') == 'checkbox')
|
||||
o.checked = val;
|
||||
else if (o) {
|
||||
clmod(o, 'on', val);
|
||||
}
|
||||
return val;
|
||||
}
|
||||
|
||||
function bcfg_bind(obj, oname, cname, defval, cb, un_ev) {
|
||||
@@ -1034,7 +1123,7 @@ function dl_file(url) {
|
||||
|
||||
function cliptxt(txt, ok) {
|
||||
var fb = function () {
|
||||
console.log('fb');
|
||||
console.log('clip-fb');
|
||||
var o = mknod('input');
|
||||
o.value = txt;
|
||||
document.body.appendChild(o);
|
||||
@@ -1051,10 +1140,13 @@ function cliptxt(txt, ok) {
|
||||
}
|
||||
|
||||
|
||||
var timer = (function () {
|
||||
var r = {};
|
||||
function Debounce(delay) {
|
||||
var r = this;
|
||||
r.delay = delay;
|
||||
r.timer = 0;
|
||||
r.t_hit = 0;
|
||||
r.t_run = 0;
|
||||
r.q = [];
|
||||
r.last = 0;
|
||||
|
||||
r.add = function (fun, run) {
|
||||
r.rm(fun);
|
||||
@@ -1068,7 +1160,67 @@ var timer = (function () {
|
||||
apop(r.q, fun);
|
||||
};
|
||||
|
||||
function doevents() {
|
||||
r.run = function () {
|
||||
if (crashed)
|
||||
return;
|
||||
|
||||
r.t_run = Date.now();
|
||||
|
||||
var q = r.q.slice(0);
|
||||
for (var a = 0; a < q.length; a++)
|
||||
q[a]();
|
||||
};
|
||||
|
||||
r.hit = function () {
|
||||
if (crashed)
|
||||
return;
|
||||
|
||||
var now = Date.now(),
|
||||
td_hit = now - r.t_hit,
|
||||
td_run = now - r.t_run;
|
||||
|
||||
if (td_run >= r.delay * 2)
|
||||
r.t_run = now;
|
||||
|
||||
if (td_run >= r.delay && td_run <= r.delay * 2) {
|
||||
// r.delay is also deadline
|
||||
clearTimeout(r.timer);
|
||||
return r.run();
|
||||
}
|
||||
|
||||
if (td_hit < r.delay / 5)
|
||||
return;
|
||||
|
||||
clearTimeout(r.timer);
|
||||
r.timer = setTimeout(r.run, r.delay);
|
||||
r.t_hit = now;
|
||||
};
|
||||
};
|
||||
|
||||
var onresize100 = new Debounce(100);
|
||||
window.addEventListener('resize', onresize100.hit);
|
||||
|
||||
|
||||
var timer = (function () {
|
||||
var r = {};
|
||||
r.q = [];
|
||||
r.last = 0;
|
||||
r.fs = 0;
|
||||
r.fc = 0;
|
||||
|
||||
r.add = function (fun, run) {
|
||||
r.rm(fun);
|
||||
r.q.push(fun);
|
||||
|
||||
if (run)
|
||||
fun();
|
||||
};
|
||||
|
||||
r.rm = function (fun) {
|
||||
apop(r.q, fun);
|
||||
};
|
||||
|
||||
var doevents = function () {
|
||||
if (crashed)
|
||||
return;
|
||||
|
||||
@@ -1080,6 +1232,7 @@ var timer = (function () {
|
||||
q[a]();
|
||||
|
||||
r.last = Date.now();
|
||||
//r.fc++; if (r.last - r.fs >= 2000) { console.log(r.last - r.fs, r.fc); r.fs = r.last; r.fc = 0; }
|
||||
}
|
||||
setInterval(doevents, 100);
|
||||
|
||||
@@ -1104,7 +1257,7 @@ var tt = (function () {
|
||||
var prev = null;
|
||||
r.cshow = function () {
|
||||
if (this !== prev)
|
||||
r.show.bind(this)();
|
||||
r.show.call(this);
|
||||
|
||||
prev = this;
|
||||
};
|
||||
@@ -1116,7 +1269,7 @@ var tt = (function () {
|
||||
return;
|
||||
|
||||
if (Date.now() - r.lvis < 400)
|
||||
return r.show.bind(this)();
|
||||
return r.show.call(this);
|
||||
|
||||
tev = setTimeout(r.show.bind(this), 800);
|
||||
if (TOUCH)
|
||||
@@ -1264,6 +1417,14 @@ function lf2br(txt) {
|
||||
}
|
||||
|
||||
|
||||
function hunpre(txt) {
|
||||
return ('' + txt).replace(/^<pre>/, '');
|
||||
}
|
||||
function unpre(txt) {
|
||||
return esc(hunpre(txt));
|
||||
}
|
||||
|
||||
|
||||
var toast = (function () {
|
||||
var r = {},
|
||||
te = null,
|
||||
@@ -1274,8 +1435,11 @@ var toast = (function () {
|
||||
r.visible = false;
|
||||
r.txt = null;
|
||||
r.tag = obj; // filler value (null is scary)
|
||||
r.p_txt = '';
|
||||
r.p_sec = 0;
|
||||
r.p_t = 0;
|
||||
|
||||
function scrollchk() {
|
||||
var scrollchk = function () {
|
||||
if (scrolling)
|
||||
return;
|
||||
|
||||
@@ -1290,26 +1454,47 @@ var toast = (function () {
|
||||
scrolling = true;
|
||||
}
|
||||
|
||||
function unscroll() {
|
||||
var unscroll = function () {
|
||||
timer.rm(scrollchk);
|
||||
clmod(obj, 'scroll');
|
||||
scrolling = false;
|
||||
}
|
||||
|
||||
r.hide = function (e) {
|
||||
ev(e);
|
||||
if (this === ebi('toastc'))
|
||||
ev(e);
|
||||
|
||||
unscroll();
|
||||
clearTimeout(te);
|
||||
clmod(obj, 'vis');
|
||||
r.visible = false;
|
||||
r.tag = obj;
|
||||
if (!window.WebAssembly)
|
||||
te = setTimeout(function () {
|
||||
obj.className = 'hide';
|
||||
}, 500);
|
||||
};
|
||||
|
||||
r.show = function (cl, sec, txt, tag) {
|
||||
txt = (txt + '').slice(0, 16384);
|
||||
|
||||
var same = r.visible && txt == r.p_txt && r.p_sec == sec,
|
||||
delta = Date.now() - r.p_t;
|
||||
|
||||
if (same && delta < 100)
|
||||
return;
|
||||
|
||||
r.p_txt = txt;
|
||||
r.p_sec = sec;
|
||||
r.p_t = Date.now();
|
||||
|
||||
clearTimeout(te);
|
||||
if (sec)
|
||||
te = setTimeout(r.hide, sec * 1000);
|
||||
|
||||
if (same && delta < 1000)
|
||||
return;
|
||||
|
||||
if (txt.indexOf('<body>') + 1)
|
||||
txt = txt.slice(0, txt.indexOf('<')) + ' [...]';
|
||||
|
||||
@@ -1360,6 +1545,7 @@ var modal = (function () {
|
||||
r.load();
|
||||
|
||||
r.busy = false;
|
||||
r.nofocus = 0;
|
||||
|
||||
r.show = function (html) {
|
||||
o = mknod('div', 'modal');
|
||||
@@ -1373,6 +1559,7 @@ var modal = (function () {
|
||||
a.onclick = ng;
|
||||
|
||||
a = ebi('modal-ok');
|
||||
a.addEventListener('blur', onblur);
|
||||
a.onclick = ok;
|
||||
|
||||
var inp = ebi('modali');
|
||||
@@ -1383,6 +1570,7 @@ var modal = (function () {
|
||||
}, 0);
|
||||
|
||||
document.addEventListener('focus', onfocus);
|
||||
document.addEventListener('selectionchange', onselch);
|
||||
timer.add(onfocus);
|
||||
if (cb_up)
|
||||
setTimeout(cb_up, 1);
|
||||
@@ -1390,13 +1578,18 @@ var modal = (function () {
|
||||
|
||||
r.hide = function () {
|
||||
timer.rm(onfocus);
|
||||
try {
|
||||
ebi('modal-ok').removeEventListener('blur', onblur);
|
||||
}
|
||||
catch (ex) { }
|
||||
document.removeEventListener('selectionchange', onselch);
|
||||
document.removeEventListener('focus', onfocus);
|
||||
document.removeEventListener('keydown', onkey);
|
||||
o.parentNode.removeChild(o);
|
||||
r.busy = false;
|
||||
setTimeout(next, 50);
|
||||
};
|
||||
function ok(e) {
|
||||
var ok = function (e) {
|
||||
ev(e);
|
||||
var v = ebi('modali');
|
||||
v = v ? v.value : true;
|
||||
@@ -1404,27 +1597,45 @@ var modal = (function () {
|
||||
if (cb_ok)
|
||||
cb_ok(v);
|
||||
}
|
||||
function ng(e) {
|
||||
var ng = function (e) {
|
||||
ev(e);
|
||||
r.hide();
|
||||
if (cb_ng)
|
||||
cb_ng(null);
|
||||
}
|
||||
|
||||
function onfocus(e) {
|
||||
var onselch = function () {
|
||||
try {
|
||||
if (window.getSelection() + '')
|
||||
r.nofocus = 15;
|
||||
}
|
||||
catch (ex) { }
|
||||
};
|
||||
|
||||
var onblur = function () {
|
||||
r.nofocus = 3;
|
||||
};
|
||||
|
||||
var onfocus = function (e) {
|
||||
if (MOBILE)
|
||||
return;
|
||||
|
||||
var ctr = ebi('modalc');
|
||||
if (!ctr || !ctr.contains || !document.activeElement || ctr.contains(document.activeElement))
|
||||
return;
|
||||
|
||||
setTimeout(function () {
|
||||
if (--r.nofocus >= 0)
|
||||
return;
|
||||
|
||||
if (ctr = ebi('modal-ok'))
|
||||
ctr.focus();
|
||||
}, 20);
|
||||
ev(e);
|
||||
}
|
||||
};
|
||||
|
||||
function onkey(e) {
|
||||
var k = e.code,
|
||||
var onkey = function (e) {
|
||||
var k = (e.code || e.key) + '',
|
||||
eok = ebi('modal-ok'),
|
||||
eng = ebi('modal-ng'),
|
||||
ae = document.activeElement;
|
||||
@@ -1432,21 +1643,21 @@ var modal = (function () {
|
||||
if (k == 'Space' && ae && (ae === eok || ae === eng))
|
||||
k = 'Enter';
|
||||
|
||||
if (k == 'Enter') {
|
||||
if (k.endsWith('Enter')) {
|
||||
if (ae && ae == eng)
|
||||
return ng();
|
||||
return ng(e);
|
||||
|
||||
return ok();
|
||||
return ok(e);
|
||||
}
|
||||
|
||||
if ((k == 'ArrowLeft' || k == 'ArrowRight') && eng && (ae == eok || ae == eng))
|
||||
if ((k == 'ArrowLeft' || k == 'ArrowRight' || k == 'Left' || k == 'Right') && eng && (ae == eok || ae == eng))
|
||||
return (ae == eok ? eng : eok).focus() || ev(e);
|
||||
|
||||
if (k == 'Escape')
|
||||
return ng();
|
||||
if (k == 'Escape' || k == 'Esc')
|
||||
return ng(e);
|
||||
}
|
||||
|
||||
function next() {
|
||||
var next = function () {
|
||||
if (!r.busy && q.length)
|
||||
q.shift()();
|
||||
}
|
||||
@@ -1457,7 +1668,7 @@ var modal = (function () {
|
||||
});
|
||||
next();
|
||||
};
|
||||
function _alert(html, cb, fun) {
|
||||
var _alert = function (html, cb, fun) {
|
||||
cb_ok = cb_ng = cb;
|
||||
cb_up = fun;
|
||||
html += '<div id="modalb"><a href="#" id="modal-ok">OK</a></div>';
|
||||
@@ -1470,7 +1681,7 @@ var modal = (function () {
|
||||
});
|
||||
next();
|
||||
}
|
||||
function _confirm(html, cok, cng, fun, btns) {
|
||||
var _confirm = function (html, cok, cng, fun, btns) {
|
||||
cb_ok = cok;
|
||||
cb_ng = cng === undefined ? cok : cng;
|
||||
cb_up = fun;
|
||||
@@ -1484,11 +1695,11 @@ var modal = (function () {
|
||||
});
|
||||
next();
|
||||
}
|
||||
function _prompt(html, v, cok, cng, fun) {
|
||||
var _prompt = function (html, v, cok, cng, fun) {
|
||||
cb_ok = cok;
|
||||
cb_ng = cng === undefined ? cok : null;
|
||||
cb_up = fun;
|
||||
html += '<input id="modali" type="text" /><div id="modalb">' + ok_cancel + '</div>';
|
||||
html += '<input id="modali" type="text" ' + NOAC + ' /><div id="modalb">' + ok_cancel + '</div>';
|
||||
r.show(html);
|
||||
|
||||
ebi('modali').value = v || '';
|
||||
@@ -1520,7 +1731,7 @@ function repl_load() {
|
||||
ret = [
|
||||
'var v=Object.keys(localStorage); v.sort(); JSON.stringify(v)',
|
||||
"for (var a of QSA('#files a[id]')) a.setAttribute('download','')",
|
||||
'console.hist.slice(-10).join("\\n")'
|
||||
'console.hist.slice(-50).join("\\n")'
|
||||
];
|
||||
|
||||
ipre.innerHTML = '<option value=""></option>';
|
||||
@@ -1576,6 +1787,8 @@ function repl(e) {
|
||||
if (!cmd)
|
||||
return toast.inf(3, 'eval aborted');
|
||||
|
||||
cmd = unsmart(cmd);
|
||||
|
||||
if (cmd.startsWith(',')) {
|
||||
evalex_fatal = true;
|
||||
return modal.alert(esc(eval(cmd.slice(1)) + ''));
|
||||
@@ -1614,7 +1827,7 @@ function load_md_plug(md_text, plug_type, defer) {
|
||||
return md_text;
|
||||
|
||||
var find = '\n```copyparty_' + plug_type + '\n',
|
||||
md = md_text.replace(/\r/g, ''),
|
||||
md = '\n' + md_text.replace(/\r/g, '') + '\n',
|
||||
ofs = md.indexOf(find),
|
||||
ofs2 = md.indexOf('\n```', ofs + 1);
|
||||
|
||||
@@ -1701,7 +1914,7 @@ var favico = (function () {
|
||||
r.en = true;
|
||||
r.tag = null;
|
||||
|
||||
function gx(txt) {
|
||||
var gx = function (txt) {
|
||||
return (svg_decl +
|
||||
'<svg version="1.1" viewBox="0 0 64 64" xmlns="http://www.w3.org/2000/svg">\n' +
|
||||
(r.bg ? '<rect width="100%" height="100%" rx="16" fill="#' + r.bg + '" />\n' : '') +
|
||||
@@ -1718,21 +1931,17 @@ var favico = (function () {
|
||||
var b64;
|
||||
try {
|
||||
b64 = btoa(svg ? svg_decl + svg : gx(r.txt));
|
||||
//console.log('f1');
|
||||
}
|
||||
catch (e1) {
|
||||
try {
|
||||
b64 = btoa(gx(encodeURIComponent(r.txt).replace(/%([0-9A-F]{2})/g,
|
||||
function x(m, v) { return String.fromCharCode('0x' + v); })));
|
||||
//console.log('f2');
|
||||
}
|
||||
catch (e2) {
|
||||
try {
|
||||
b64 = btoa(gx(unescape(encodeURIComponent(r.txt))));
|
||||
//console.log('f3');
|
||||
}
|
||||
catch (e3) {
|
||||
//console.log('fe');
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -1786,15 +1995,24 @@ function xhrchk(xhr, prefix, e404, lvl, tag) {
|
||||
if (xhr.status < 400 && xhr.status >= 200)
|
||||
return true;
|
||||
|
||||
var errtxt = (xhr.response && xhr.response.err) || xhr.responseText,
|
||||
if (tag === undefined)
|
||||
tag = prefix;
|
||||
|
||||
var errtxt = ((xhr.response && xhr.response.err) || xhr.responseText) || '',
|
||||
suf = '',
|
||||
fun = toast[lvl || 'err'],
|
||||
is_cf = /[Cc]loud[f]lare|>Just a mo[m]ent|#cf-b[u]bbles|Chec[k]ing your br[o]wser|\/chall[e]nge-platform|"chall[e]nge-error|nable Ja[v]aScript and cook/.test(errtxt);
|
||||
|
||||
if (errtxt.startsWith('<pre>'))
|
||||
suf = '\n\nerror-details: «' + unpre(errtxt).split('\n')[0].trim() + '»';
|
||||
else
|
||||
errtxt = esc(errtxt).slice(0, 32768);
|
||||
|
||||
if (xhr.status == 403 && !is_cf)
|
||||
return toast.err(0, prefix + (L && L.xhr403 || "403: access denied\n\ntry pressing F5, maybe you got logged out"), tag);
|
||||
return toast.err(0, prefix + (L && L.xhr403 || "403: access denied\n\ntry pressing F5, maybe you got logged out") + suf, tag);
|
||||
|
||||
if (xhr.status == 404)
|
||||
return toast.err(0, prefix + e404, tag);
|
||||
return toast.err(0, prefix + e404 + suf, tag);
|
||||
|
||||
if (is_cf && (xhr.status == 403 || xhr.status == 503)) {
|
||||
var now = Date.now(), td = now - cf_cha_t;
|
||||
|
||||
@@ -13,6 +13,9 @@
|
||||
|
||||
# other stuff
|
||||
|
||||
## [`TODO.md`](TODO.md)
|
||||
* planned features / fixes / changes
|
||||
|
||||
## [`example.conf`](example.conf)
|
||||
* example config file for `-c`
|
||||
|
||||
|
||||
18
docs/TODO.md
Normal file
18
docs/TODO.md
Normal file
@@ -0,0 +1,18 @@
|
||||
a living list of upcoming features / fixes / changes, very roughly in order of priority
|
||||
|
||||
* download accelerator
|
||||
* definitely download chunks in parallel
|
||||
* maybe resumable downloads (chrome-only, jank api)
|
||||
* maybe checksum validation (return sha512 of requested range in responses, and probably also warks)
|
||||
|
||||
* [github issue #64](https://github.com/9001/copyparty/issues/64) - dirkeys 2nd season
|
||||
* popular feature request, finally time to refactor browser.js i suppose...
|
||||
|
||||
* [github issue #37](https://github.com/9001/copyparty/issues/37) - upload PWA
|
||||
* or [maybe not](https://arstechnica.com/tech-policy/2024/02/apple-under-fire-for-disabling-iphone-web-apps-eu-asks-developers-to-weigh-in/), or [maybe](https://arstechnica.com/gadgets/2024/03/apple-changes-course-will-keep-iphone-eu-web-apps-how-they-are-in-ios-17-4/)
|
||||
|
||||
* [github issue #57](https://github.com/9001/copyparty/issues/57) - config GUI
|
||||
* configs given to -c can be ordered with numerical prefix
|
||||
* autorevert settings if it fails to apply
|
||||
* countdown until session invalidates in settings gui, with refresh-button
|
||||
|
||||
1711
docs/changelog.md
1711
docs/changelog.md
File diff suppressed because it is too large
Load Diff
@@ -125,8 +125,14 @@ authenticate using header `Cookie: cppwd=foo` or url param `&pw=foo`
|
||||
| GET | `?b` | list files/folders at URL as simplified HTML |
|
||||
| GET | `?tree=.` | list one level of subdirectories inside URL |
|
||||
| GET | `?tree` | list one level of subdirectories for each level until URL |
|
||||
| GET | `?tar` | download everything below URL as a tar file |
|
||||
| GET | `?zip=utf-8` | download everything below URL as a zip file |
|
||||
| GET | `?tar` | download everything below URL as a gnu-tar file |
|
||||
| GET | `?tar=gz:9` | ...as a gzip-level-9 gnu-tar file |
|
||||
| GET | `?tar=xz:9` | ...as an xz-level-9 gnu-tar file |
|
||||
| GET | `?tar=pax` | ...as a pax-tar file |
|
||||
| GET | `?tar=pax,xz` | ...as an xz-level-1 pax-tar file |
|
||||
| GET | `?zip=utf-8` | ...as a zip file |
|
||||
| GET | `?zip` | ...as a WinXP-compatible zip file |
|
||||
| GET | `?zip=crc` | ...as an MSDOS-compatible zip file |
|
||||
| GET | `?ups` | show recent uploads from your IP |
|
||||
| GET | `?ups&filter=f` | ...where URL contains `f` |
|
||||
| GET | `?mime=foo` | specify return mimetype `foo` |
|
||||
@@ -156,8 +162,8 @@ authenticate using header `Cookie: cppwd=foo` or url param `&pw=foo`
|
||||
| PUT | | (binary data) | upload into file at URL |
|
||||
| PUT | `?gz` | (binary data) | compress with gzip and write into file at URL |
|
||||
| PUT | `?xz` | (binary data) | compress with xz and write into file at URL |
|
||||
| mPOST | | `act=bput`, `f=FILE` | upload `FILE` into the folder at URL |
|
||||
| mPOST | `?j` | `act=bput`, `f=FILE` | ...and reply with json |
|
||||
| mPOST | | `f=FILE` | upload `FILE` into the folder at URL |
|
||||
| mPOST | `?j` | `f=FILE` | ...and reply with json |
|
||||
| mPOST | | `act=mkdir`, `name=foo` | create directory `foo` at URL |
|
||||
| POST | `?delete` | | delete URL recursively |
|
||||
| jPOST | `?delete` | `["/foo","/bar"]` | delete `/foo` and `/bar` recursively |
|
||||
@@ -212,7 +218,7 @@ if you don't need all the features, you can repack the sfx and save a bunch of s
|
||||
* `269k` after `./scripts/make-sfx.sh re no-cm no-hl`
|
||||
|
||||
the features you can opt to drop are
|
||||
* `cm`/easymde, the "fancy" markdown editor, saves ~82k
|
||||
* `cm`/easymde, the "fancy" markdown editor, saves ~89k
|
||||
* `hl`, prism, the syntax hilighter, saves ~41k
|
||||
* `fnt`, source-code-pro, the monospace font, saves ~9k
|
||||
* `dd`, the custom mouse cursor for the media player tray tab, saves ~2k
|
||||
@@ -236,6 +242,7 @@ python3 -m venv .venv
|
||||
pip install jinja2 strip_hints # MANDATORY
|
||||
pip install mutagen # audio metadata
|
||||
pip install pyftpdlib # ftp server
|
||||
pip install partftpy # tftp server
|
||||
pip install impacket # smb server -- disable Windows Defender if you REALLY need this on windows
|
||||
pip install Pillow pyheif-pillow-opener pillow-avif-plugin # thumbnails
|
||||
pip install pyvips # faster thumbnails
|
||||
|
||||
32
docs/examples/docker/basic-docker-compose/copyparty.conf
Normal file
32
docs/examples/docker/basic-docker-compose/copyparty.conf
Normal file
@@ -0,0 +1,32 @@
|
||||
# not actually YAML but lets pretend:
|
||||
# -*- mode: yaml -*-
|
||||
# vim: ft=yaml:
|
||||
|
||||
|
||||
[global]
|
||||
e2dsa # enable file indexing and filesystem scanning
|
||||
e2ts # enable multimedia indexing
|
||||
ansi # enable colors in log messages
|
||||
|
||||
# q, lo: /cfg/log/%Y-%m%d.log # log to file instead of docker
|
||||
|
||||
# p: 3939 # listen on another port
|
||||
# ipa: 10.89. # only allow connections from 10.89.*
|
||||
# df: 16 # stop accepting uploads if less than 16 GB free disk space
|
||||
# ver # show copyparty version in the controlpanel
|
||||
# grid # show thumbnails/grid-view by default
|
||||
# theme: 2 # monokai
|
||||
# name: datasaver # change the server-name that's displayed in the browser
|
||||
# stats, nos-dup # enable the prometheus endpoint, but disable the dupes counter (too slow)
|
||||
# no-robots, force-js # make it harder for search engines to read your server
|
||||
|
||||
|
||||
[accounts]
|
||||
ed: wark # username: password
|
||||
|
||||
|
||||
[/] # create a volume at "/" (the webroot), which will
|
||||
/w # share /w (the docker data volume)
|
||||
accs:
|
||||
rw: * # everyone gets read-write access, but
|
||||
rwmda: ed # the user "ed" gets read-write-move-delete-admin
|
||||
20
docs/examples/docker/basic-docker-compose/docker-compose.yml
Normal file
20
docs/examples/docker/basic-docker-compose/docker-compose.yml
Normal file
@@ -0,0 +1,20 @@
|
||||
version: '3'
|
||||
services:
|
||||
|
||||
copyparty:
|
||||
image: copyparty/ac:latest
|
||||
container_name: copyparty
|
||||
user: "1000:1000"
|
||||
ports:
|
||||
- 3923:3923
|
||||
volumes:
|
||||
- ./:/cfg:z
|
||||
- /path/to/your/fileshare/top/folder:/w:z
|
||||
|
||||
stop_grace_period: 15s # thumbnailer is allowed to continue finishing up for 10s after the shutdown signal
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "wget --spider -q 127.0.0.1:3923/?reset"]
|
||||
interval: 1m
|
||||
timeout: 2s
|
||||
retries: 5
|
||||
start_period: 15s
|
||||
50
docs/examples/docker/idp-authelia-traefik/README.md
Normal file
50
docs/examples/docker/idp-authelia-traefik/README.md
Normal file
@@ -0,0 +1,50 @@
|
||||
> [!WARNING]
|
||||
> I am unable to guarantee the quality, safety, and security of anything in this folder; it is a combination of examples I found online. Please submit corrections or improvements 🙏
|
||||
|
||||
to try this out with minimal adjustments:
|
||||
* specify what filesystem-path to share with copyparty, replacing the default/example value `/srv/pub` in `docker-compose.yml`
|
||||
* add `127.0.0.1 fs.example.com traefik.example.com authelia.example.com` to your `/etc/hosts`
|
||||
* `sudo docker-compose up`
|
||||
* login to https://fs.example.com/ with username `authelia` password `authelia`
|
||||
|
||||
to use this in a safe and secure manner:
|
||||
* follow a guide on setting up authelia properly (TODO:link) and use the copyparty-specific parts of this folder as inspiration for your own config; namely the `cpp` subfolder and the `copyparty` service in `docker-compose.yml`
|
||||
|
||||
this folder is based on:
|
||||
* https://github.com/authelia/authelia/tree/39763aaed24c4abdecd884b47357a052b235942d/examples/compose/lite
|
||||
|
||||
incomplete list of modifications made:
|
||||
* support for running with podman as root on fedora (`:z` volumes, `label:disable`)
|
||||
* explicitly using authelia `v4.38.0-beta3` because config syntax changed since last stable release
|
||||
* disabled automatic letsencrypt certificate signing
|
||||
* reduced logging from debug to info
|
||||
* added a warning that traefik is given access to the docker socket (as recommended by traefik docs) which means traefik is able to break out of the container and has full root access on the host machine
|
||||
|
||||
|
||||
# security
|
||||
|
||||
there is probably/definitely room for improvement in this example setup. Some ideas taken from [github issue #62](https://github.com/9001/copyparty/issues/62):
|
||||
|
||||
* Add in a redis password to limit attacker lateral movement in the system
|
||||
* Move redis to a private network shared with just authelia
|
||||
* Pin to image hashes (or go all in on updates and add `watchtower`)
|
||||
* Drop bridge networking for just exposing traefik's public ports
|
||||
* Configure docker for non-root access to docker socket and then move traefik to use [non-root perms](https://docs.docker.com/engine/security/rootless/)
|
||||
|
||||
if you manage to improve on any of this, especially in a way that might be useful for other people, consider sending a PR :>
|
||||
|
||||
|
||||
# performance
|
||||
|
||||
currently **not optimal,** at least when compared to running the python sfx outside of docker... some numbers from my laptop (ryzen4500u/fedora39):
|
||||
|
||||
| req/s | https D/L | http D/L | approach |
|
||||
| -----:| ----------:|:--------:| -------- |
|
||||
| 5200 | 1294 MiB/s | 5+ GiB/s | [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) running on host |
|
||||
| 4370 | 725 MiB/s | 4+ GiB/s | `docker run copyparty/ac` |
|
||||
| 2420 | 694 MiB/s | n/a | `copyparty/ac` behind traefik |
|
||||
| 75 | 694 MiB/s | n/a | traefik and authelia **(you are here)** |
|
||||
|
||||
authelia is behaving strangely, handling 340 requests per second for a while, but then it suddenly drops to 75 and stays there...
|
||||
|
||||
I'm assuming all of the performance issues is due to a misconfiguration of authelia/traefik/docker on my end, but I don't relly know where to start
|
||||
@@ -0,0 +1,66 @@
|
||||
# based on https://github.com/authelia/authelia/blob/39763aaed24c4abdecd884b47357a052b235942d/examples/compose/lite/authelia/configuration.yml
|
||||
|
||||
# Authelia configuration
|
||||
|
||||
# This secret can also be set using the env variables AUTHELIA_JWT_SECRET_FILE
|
||||
jwt_secret: a_very_important_secret
|
||||
|
||||
server:
|
||||
address: 'tcp://:9091'
|
||||
|
||||
log:
|
||||
level: info # debug
|
||||
|
||||
totp:
|
||||
issuer: authelia.com
|
||||
|
||||
authentication_backend:
|
||||
file:
|
||||
path: /config/users_database.yml
|
||||
|
||||
access_control:
|
||||
default_policy: deny
|
||||
rules:
|
||||
# Rules applied to everyone
|
||||
- domain: traefik.example.com
|
||||
policy: one_factor
|
||||
- domain: fs.example.com
|
||||
policy: one_factor
|
||||
|
||||
session:
|
||||
# This secret can also be set using the env variables AUTHELIA_SESSION_SECRET_FILE
|
||||
secret: unsecure_session_secret
|
||||
|
||||
cookies:
|
||||
- name: authelia_session
|
||||
domain: example.com # Should match whatever your root protected domain is
|
||||
default_redirection_url: https://fs.example.com
|
||||
authelia_url: https://authelia.example.com/
|
||||
expiration: 3600 # 1 hour
|
||||
inactivity: 300 # 5 minutes
|
||||
|
||||
redis:
|
||||
host: redis
|
||||
port: 6379
|
||||
# This secret can also be set using the env variables AUTHELIA_SESSION_REDIS_PASSWORD_FILE
|
||||
# password: authelia
|
||||
|
||||
regulation:
|
||||
max_retries: 3
|
||||
find_time: 120
|
||||
ban_time: 300
|
||||
|
||||
storage:
|
||||
encryption_key: you_must_generate_a_random_string_of_more_than_twenty_chars_and_configure_this
|
||||
local:
|
||||
path: /config/db.sqlite3
|
||||
|
||||
notifier:
|
||||
disable_startup_check: true
|
||||
smtp:
|
||||
username: test
|
||||
# This secret can also be set using the env variables AUTHELIA_NOTIFIER_SMTP_PASSWORD_FILE
|
||||
password: password
|
||||
host: mail.example.com
|
||||
port: 25
|
||||
sender: admin@example.com
|
||||
@@ -0,0 +1,18 @@
|
||||
# based on https://github.com/authelia/authelia/blob/39763aaed24c4abdecd884b47357a052b235942d/examples/compose/lite/authelia/users_database.yml
|
||||
|
||||
# Users Database
|
||||
|
||||
# This file can be used if you do not have an LDAP set up.
|
||||
|
||||
# List of users
|
||||
users:
|
||||
authelia:
|
||||
disabled: false
|
||||
displayname: "Authelia User"
|
||||
# Password is authelia
|
||||
password: "$6$rounds=50000$BpLnfgDsc2WD8F2q$Zis.ixdg9s/UOJYrs56b5QEZFiZECu0qZVNsIYxBaNJ7ucIL.nlxVCT5tqh8KHG8X4tlwCFm5r6NTOZZ5qRFN/"
|
||||
email: authelia@authelia.com
|
||||
groups:
|
||||
- admins
|
||||
- dev
|
||||
- su
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user