Compare commits
432 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8222ccc40b | ||
|
|
dc449bf8b0 | ||
|
|
ef0ecf878b | ||
|
|
53f1e3c91d | ||
|
|
eeef80919f | ||
|
|
987bce2182 | ||
|
|
b511d686f0 | ||
|
|
132a83501e | ||
|
|
e565ad5f55 | ||
|
|
f955d2bd58 | ||
|
|
5953399090 | ||
|
|
d26a944d95 | ||
|
|
50dac15568 | ||
|
|
ac1e11e4ce | ||
|
|
d749683d48 | ||
|
|
84e8e1ddfb | ||
|
|
6e58514b84 | ||
|
|
803e156509 | ||
|
|
c06aa683eb | ||
|
|
6644ceef49 | ||
|
|
bd3b3863ae | ||
|
|
ffd4f9c8b9 | ||
|
|
760ff2db72 | ||
|
|
f37187a041 | ||
|
|
1cdb170290 | ||
|
|
d5de3f2fe0 | ||
|
|
d76673e62d | ||
|
|
c549f367c1 | ||
|
|
927c3bce96 | ||
|
|
d75a2c77da | ||
|
|
e6c55d7ff9 | ||
|
|
4c2cb26991 | ||
|
|
dfe7f1d9af | ||
|
|
666297f6fb | ||
|
|
55a011b9c1 | ||
|
|
27aff12a1e | ||
|
|
9a87ee2fe4 | ||
|
|
0a9f4c6074 | ||
|
|
7219331057 | ||
|
|
2fd12a839c | ||
|
|
8c73e0cbc2 | ||
|
|
52e06226a2 | ||
|
|
452592519d | ||
|
|
c9281f8912 | ||
|
|
36d6d29a0c | ||
|
|
db6059e100 | ||
|
|
aab57cb24b | ||
|
|
f00b939402 | ||
|
|
bef9617638 | ||
|
|
692175f5b0 | ||
|
|
5ad65450c4 | ||
|
|
60c96f990a | ||
|
|
07b2bf1104 | ||
|
|
ac1bc232a9 | ||
|
|
5919607ad0 | ||
|
|
07ea629ca5 | ||
|
|
b629d18df6 | ||
|
|
566cbb6507 | ||
|
|
400d700845 | ||
|
|
82ce6862ee | ||
|
|
38e4fdfe03 | ||
|
|
c04662798d | ||
|
|
19d156ff4e | ||
|
|
87c60a1ec9 | ||
|
|
2c92dab165 | ||
|
|
5c1e23907d | ||
|
|
925c7f0a57 | ||
|
|
feed08deb2 | ||
|
|
560d7b6672 | ||
|
|
565daee98b | ||
|
|
e396c5c2b5 | ||
|
|
1ee2cdd089 | ||
|
|
beacedab50 | ||
|
|
25139a4358 | ||
|
|
f8491970fd | ||
|
|
da091aec85 | ||
|
|
e9eb5affcd | ||
|
|
c1918bc36c | ||
|
|
fdda567f50 | ||
|
|
603d0ed72b | ||
|
|
b15a4ef79f | ||
|
|
48a6789d36 | ||
|
|
36f2c446af | ||
|
|
69517e4624 | ||
|
|
ea270ab9f2 | ||
|
|
b6cf2d3089 | ||
|
|
e8db3dd37f | ||
|
|
27485a4cb1 | ||
|
|
253a414443 | ||
|
|
f6e693f0f5 | ||
|
|
c5f7cfc355 | ||
|
|
bc2c1e427a | ||
|
|
95d9e693c6 | ||
|
|
70a3cf36d1 | ||
|
|
aa45fccf11 | ||
|
|
42d00050c1 | ||
|
|
4bb0e6e75a | ||
|
|
2f7f9de3f5 | ||
|
|
f31ac90932 | ||
|
|
439cb7f85b | ||
|
|
af193ee834 | ||
|
|
c06126cc9d | ||
|
|
897ffbbbd0 | ||
|
|
8244d3b4fc | ||
|
|
74266af6d1 | ||
|
|
8c552f1ad1 | ||
|
|
bf5850785f | ||
|
|
feecb3e0b8 | ||
|
|
08d8c82167 | ||
|
|
5239e7ac0c | ||
|
|
9937c2e755 | ||
|
|
f1e947f37d | ||
|
|
a70a49b9c9 | ||
|
|
fe700dcf1a | ||
|
|
c8e3ed3aae | ||
|
|
b8733653a3 | ||
|
|
b772a4f8bb | ||
|
|
9e5253ef87 | ||
|
|
7b94e4edf3 | ||
|
|
da26ec36ca | ||
|
|
443acf2f8b | ||
|
|
6c90e3893d | ||
|
|
ea002ee71d | ||
|
|
ab18893cd2 | ||
|
|
844d16b9e5 | ||
|
|
989cc613ef | ||
|
|
4f0cad5468 | ||
|
|
f89de6b35d | ||
|
|
e0bcb88ee7 | ||
|
|
a0022805d1 | ||
|
|
853adb5d04 | ||
|
|
7744226b5c | ||
|
|
d94b5b3fc9 | ||
|
|
e6ba065bc2 | ||
|
|
59a53ba9ac | ||
|
|
b88cc7b5ce | ||
|
|
5ab54763c6 | ||
|
|
59f815ff8c | ||
|
|
9c42cbec6f | ||
|
|
f471b05aa4 | ||
|
|
34c32e3e89 | ||
|
|
a080759a03 | ||
|
|
0ae12868e5 | ||
|
|
ef52e2c06c | ||
|
|
32c912bb16 | ||
|
|
20870fda79 | ||
|
|
bdfe2c1a5f | ||
|
|
cb99fbf442 | ||
|
|
bccc44dc21 | ||
|
|
2f20d29edd | ||
|
|
c6acd3a904 | ||
|
|
2b24c50eb7 | ||
|
|
d30ae8453d | ||
|
|
8e5c436bef | ||
|
|
f500e55e68 | ||
|
|
9700a12366 | ||
|
|
2b6a34dc5c | ||
|
|
ee80cdb9cf | ||
|
|
2def4cd248 | ||
|
|
0287c7baa5 | ||
|
|
51d31588e6 | ||
|
|
32553e4520 | ||
|
|
211a30da38 | ||
|
|
bdbcbbb002 | ||
|
|
e78af02241 | ||
|
|
115020ba60 | ||
|
|
66abf17bae | ||
|
|
b377791be7 | ||
|
|
78919e65d6 | ||
|
|
84b52ea8c5 | ||
|
|
fd89f7ecb9 | ||
|
|
2ebfdc2562 | ||
|
|
dbf1cbc8af | ||
|
|
a259704596 | ||
|
|
04b55f1a1d | ||
|
|
206af8f151 | ||
|
|
645bb5c990 | ||
|
|
f8966222e4 | ||
|
|
d71f844b43 | ||
|
|
e8b7f65f82 | ||
|
|
f193f398c1 | ||
|
|
b6554a7f8c | ||
|
|
3f05b6655c | ||
|
|
51a83b04a0 | ||
|
|
0c03921965 | ||
|
|
2527e90325 | ||
|
|
7f08f10c37 | ||
|
|
1c011ff0bb | ||
|
|
a1ad608267 | ||
|
|
547a486387 | ||
|
|
7741870dc7 | ||
|
|
8785d2f9fe | ||
|
|
d744f3ff8f | ||
|
|
8ca996e2f7 | ||
|
|
096de50889 | ||
|
|
bec3fee9ee | ||
|
|
8413ed6d1f | ||
|
|
055302b5be | ||
|
|
8016e6711b | ||
|
|
c8ea4066b1 | ||
|
|
6cc7101d31 | ||
|
|
263adec70a | ||
|
|
ac96fd9c96 | ||
|
|
e5582605cd | ||
|
|
1b52ef1f8a | ||
|
|
503face974 | ||
|
|
13e77777d7 | ||
|
|
89c6c2e0d9 | ||
|
|
14af136fcd | ||
|
|
d39a99c929 | ||
|
|
43ee6b9f5b | ||
|
|
8a38101e48 | ||
|
|
5026b21226 | ||
|
|
d07859e8e6 | ||
|
|
df7219d3b6 | ||
|
|
ad9be54f55 | ||
|
|
eeecc50757 | ||
|
|
8ff7094e4d | ||
|
|
58ae38c613 | ||
|
|
7f1c992601 | ||
|
|
fbfdd8338b | ||
|
|
bbc379906a | ||
|
|
33f41f3e61 | ||
|
|
655f6d00f8 | ||
|
|
fd552842d4 | ||
|
|
6bd087ddc5 | ||
|
|
0504b010a1 | ||
|
|
39cc92d4bc | ||
|
|
a0da0122b9 | ||
|
|
879e83e24f | ||
|
|
64ad585318 | ||
|
|
f262aee800 | ||
|
|
d4da386172 | ||
|
|
5d92f4df49 | ||
|
|
6f8a588c4d | ||
|
|
7c8e368721 | ||
|
|
f7a43a8e46 | ||
|
|
02879713a2 | ||
|
|
acbb8267e1 | ||
|
|
8796c09f56 | ||
|
|
d636316a19 | ||
|
|
a96d9ac6cb | ||
|
|
643e222986 | ||
|
|
ed524d84bb | ||
|
|
f0cdd9f25d | ||
|
|
4e797a7156 | ||
|
|
136c0fdc2b | ||
|
|
35165f8472 | ||
|
|
cab999978e | ||
|
|
fabeebd96b | ||
|
|
b1cf588452 | ||
|
|
c354a38b4c | ||
|
|
a17c267d87 | ||
|
|
c1180d6f9c | ||
|
|
d3db6d296f | ||
|
|
caf7e93f5e | ||
|
|
eefa0518db | ||
|
|
945170e271 | ||
|
|
6c2c6090dc | ||
|
|
b2e233403d | ||
|
|
e397ec2e48 | ||
|
|
fade751a3e | ||
|
|
0f386c4b08 | ||
|
|
14bccbe45f | ||
|
|
55eb692134 | ||
|
|
b32d65207b | ||
|
|
64cac003d8 | ||
|
|
6dbfcddcda | ||
|
|
b4e0a34193 | ||
|
|
01c82b54a7 | ||
|
|
4ef3106009 | ||
|
|
aa3a971961 | ||
|
|
b9d0c8536b | ||
|
|
3313503ea5 | ||
|
|
d999d3a921 | ||
|
|
e7d00bae39 | ||
|
|
650e41c717 | ||
|
|
140f6e0389 | ||
|
|
5e111ba5ee | ||
|
|
95a599961e | ||
|
|
a55e0d6eb8 | ||
|
|
2fd2c6b948 | ||
|
|
7a936ea01e | ||
|
|
226c7c3045 | ||
|
|
a4239a466b | ||
|
|
d0eb014c38 | ||
|
|
e01ba8552a | ||
|
|
024303592a | ||
|
|
86419b8f47 | ||
|
|
f1358dbaba | ||
|
|
e8a653ca0c | ||
|
|
9bc09ce949 | ||
|
|
dc8e621d7c | ||
|
|
dee0950f74 | ||
|
|
143f72fe36 | ||
|
|
a7889fb6a2 | ||
|
|
987caec15d | ||
|
|
ab40ff5051 | ||
|
|
bed133d3dd | ||
|
|
829c8fca96 | ||
|
|
5b26ab0096 | ||
|
|
39554b4bc3 | ||
|
|
97d9c149f1 | ||
|
|
59688bc8d7 | ||
|
|
a18f63895f | ||
|
|
27433d6214 | ||
|
|
374c535cfa | ||
|
|
ac7815a0ae | ||
|
|
10bc2d9205 | ||
|
|
0c50ea1757 | ||
|
|
c057c5e8e8 | ||
|
|
46d667716e | ||
|
|
cba2e10d29 | ||
|
|
b1693f95cb | ||
|
|
3f00073256 | ||
|
|
d15000062d | ||
|
|
6cb3b35a54 | ||
|
|
b4031e8d43 | ||
|
|
a3ca0638cb | ||
|
|
a360ac29da | ||
|
|
9672b8c9b3 | ||
|
|
e70ecd98ef | ||
|
|
5f7ce78d7f | ||
|
|
2077dca66f | ||
|
|
91f010290c | ||
|
|
395e3386b7 | ||
|
|
a1dce0f24e | ||
|
|
c7770904e6 | ||
|
|
1690889ed8 | ||
|
|
842817d9e3 | ||
|
|
5fc04152bd | ||
|
|
1be85bdb26 | ||
|
|
2eafaa88a2 | ||
|
|
900cc463c3 | ||
|
|
97b999c463 | ||
|
|
a7cef91b8b | ||
|
|
a4a112c0ee | ||
|
|
e6bcee28d6 | ||
|
|
626b5770a5 | ||
|
|
c2f92cacc1 | ||
|
|
4f8a1f5f6a | ||
|
|
4a98b73915 | ||
|
|
00812cb1da | ||
|
|
16766e702e | ||
|
|
5e932a9504 | ||
|
|
ccab44daf2 | ||
|
|
8c52b88767 | ||
|
|
c9fd26255b | ||
|
|
0b9b8dbe72 | ||
|
|
b7723ac245 | ||
|
|
35b75c3db1 | ||
|
|
f902779050 | ||
|
|
fdddd36a5d | ||
|
|
c4ba123779 | ||
|
|
72e355eb2c | ||
|
|
43d409a5d9 | ||
|
|
b1fffc2246 | ||
|
|
edd3e53ab3 | ||
|
|
aa0b119031 | ||
|
|
eddce00765 | ||
|
|
6f4bde2111 | ||
|
|
f3035e8869 | ||
|
|
a9730499c0 | ||
|
|
b66843efe2 | ||
|
|
cc1aaea300 | ||
|
|
9ccc238799 | ||
|
|
8526ef9368 | ||
|
|
3c36727d07 | ||
|
|
ef33ce94cd | ||
|
|
d500baf5c5 | ||
|
|
deef32335e | ||
|
|
fc4b51ad00 | ||
|
|
fa762754bf | ||
|
|
29bd8f57c4 | ||
|
|
abc37354ef | ||
|
|
ee3333362f | ||
|
|
7c0c6b94a3 | ||
|
|
bac733113c | ||
|
|
32ab65d7cb | ||
|
|
c6744dc483 | ||
|
|
b9997d677d | ||
|
|
10defe6aef | ||
|
|
736aa125a8 | ||
|
|
eb48373b8b | ||
|
|
d4a7b7d84d | ||
|
|
2923a38b87 | ||
|
|
dabdaaee33 | ||
|
|
65e4d67c3e | ||
|
|
4b720f4150 | ||
|
|
2e85a25614 | ||
|
|
713fffcb8e | ||
|
|
8020b11ea0 | ||
|
|
2523d76756 | ||
|
|
7ede509973 | ||
|
|
7c1d97af3b | ||
|
|
95566e8388 | ||
|
|
76afb62b7b | ||
|
|
7dec922c70 | ||
|
|
c07e0110f8 | ||
|
|
2808734047 | ||
|
|
1f75314463 | ||
|
|
063fa3efde | ||
|
|
44693d79ec | ||
|
|
cea746377e | ||
|
|
59a98bd2b5 | ||
|
|
250aa28185 | ||
|
|
5280792cd7 | ||
|
|
2529aa151d | ||
|
|
fc658e5b9e | ||
|
|
a4bad62b60 | ||
|
|
e1d78d8b23 | ||
|
|
c7f826dbbe | ||
|
|
801da8079b | ||
|
|
7d797dba3f | ||
|
|
cda90c285e | ||
|
|
4b5a0787ab | ||
|
|
2048b7538e | ||
|
|
ac40dccc8f | ||
|
|
9ca8154651 | ||
|
|
db668ba491 | ||
|
|
edbafd94c2 | ||
|
|
2df76eb6e1 | ||
|
|
9b77c9ce7d | ||
|
|
dc2b67f155 | ||
|
|
9f32e9e11d | ||
|
|
7086d2a305 | ||
|
|
575615ca2d | ||
|
|
c0da4b09bf | ||
|
|
22880ccc9a | ||
|
|
e4001550c1 | ||
|
|
e9f65be86a | ||
|
|
3b9919a486 |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -12,6 +12,7 @@ copyparty.egg-info/
|
|||||||
/dist/
|
/dist/
|
||||||
/py2/
|
/py2/
|
||||||
/sfx*
|
/sfx*
|
||||||
|
/pyz/
|
||||||
/unt/
|
/unt/
|
||||||
/log/
|
/log/
|
||||||
|
|
||||||
|
|||||||
7
.vscode/launch.json
vendored
7
.vscode/launch.json
vendored
@@ -9,14 +9,17 @@
|
|||||||
"console": "integratedTerminal",
|
"console": "integratedTerminal",
|
||||||
"cwd": "${workspaceFolder}",
|
"cwd": "${workspaceFolder}",
|
||||||
"justMyCode": false,
|
"justMyCode": false,
|
||||||
|
"env": {
|
||||||
|
"PYDEVD_DISABLE_FILE_VALIDATION": "1",
|
||||||
|
"PYTHONWARNINGS": "always", //error
|
||||||
|
},
|
||||||
"args": [
|
"args": [
|
||||||
//"-nw",
|
//"-nw",
|
||||||
"-ed",
|
"-ed",
|
||||||
"-emp",
|
"-emp",
|
||||||
"-e2dsa",
|
"-e2dsa",
|
||||||
"-e2ts",
|
"-e2ts",
|
||||||
"-mtp",
|
"-mtp=.bpm=f,bin/mtag/audio-bpm.py",
|
||||||
".bpm=f,bin/mtag/audio-bpm.py",
|
|
||||||
"-aed:wark",
|
"-aed:wark",
|
||||||
"-vsrv::r:rw,ed:c,dupe",
|
"-vsrv::r:rw,ed:c,dupe",
|
||||||
"-vdist:dist:r"
|
"-vdist:dist:r"
|
||||||
|
|||||||
2
.vscode/launch.py
vendored
2
.vscode/launch.py
vendored
@@ -41,7 +41,7 @@ if sfx:
|
|||||||
argv = [sys.executable, sfx] + argv
|
argv = [sys.executable, sfx] + argv
|
||||||
sp.check_call(argv)
|
sp.check_call(argv)
|
||||||
elif re.search(" -j ?[0-9]", " ".join(argv)):
|
elif re.search(" -j ?[0-9]", " ".join(argv)):
|
||||||
argv = [sys.executable, "-m", "copyparty"] + argv
|
argv = [sys.executable, "-Wa", "-m", "copyparty"] + argv
|
||||||
sp.check_call(argv)
|
sp.check_call(argv)
|
||||||
else:
|
else:
|
||||||
sys.path.insert(0, os.getcwd())
|
sys.path.insert(0, os.getcwd())
|
||||||
|
|||||||
1
.vscode/tasks.json
vendored
1
.vscode/tasks.json
vendored
@@ -11,6 +11,7 @@
|
|||||||
"type": "shell",
|
"type": "shell",
|
||||||
"command": "${config:python.pythonPath}",
|
"command": "${config:python.pythonPath}",
|
||||||
"args": [
|
"args": [
|
||||||
|
"-Wa", //-We
|
||||||
".vscode/launch.py"
|
".vscode/launch.py"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,3 +1,43 @@
|
|||||||
* do something cool
|
* do something cool
|
||||||
|
|
||||||
really tho, send a PR or an issue or whatever, all appreciated, anything goes, just behave aight
|
really tho, send a PR or an issue or whatever, all appreciated, anything goes, just behave aight 👍👍
|
||||||
|
|
||||||
|
but to be more specific,
|
||||||
|
|
||||||
|
|
||||||
|
# contribution ideas
|
||||||
|
|
||||||
|
|
||||||
|
## documentation
|
||||||
|
|
||||||
|
I think we can agree that the documentation leaves a LOT to be desired. I've realized I'm not exactly qualified for this 😅 but maybe the [soon-to-come setup GUI](https://github.com/9001/copyparty/issues/57) will make this more manageable. The best documentation is the one that never had to be written, right? :> so I suppose we can give this a wait-and-see approach for a bit longer.
|
||||||
|
|
||||||
|
|
||||||
|
## crazy ideas & features
|
||||||
|
|
||||||
|
assuming they won't cause too much problems or side-effects :>
|
||||||
|
|
||||||
|
i think someone was working on a way to list directories over DNS for example...
|
||||||
|
|
||||||
|
if you wanna have a go at coding it up yourself then maybe mention the idea on discord before you get too far, otherwise just go nuts 👍
|
||||||
|
|
||||||
|
|
||||||
|
## others
|
||||||
|
|
||||||
|
aside from documentation and ideas, some other things that would be cool to have some help with is:
|
||||||
|
|
||||||
|
* **translations** -- the copyparty web-UI has translations for english and norwegian at the top of [browser.js](https://github.com/9001/copyparty/blob/hovudstraum/copyparty/web/browser.js); if you'd like to add a translation for another language then that'd be welcome! and if that language has a grammar that doesn't fit into the way the strings are assembled, then we'll fix that as we go :>
|
||||||
|
|
||||||
|
* **UI ideas** -- at some point I was thinking of rewriting the UI in react/preact/something-not-vanilla-javascript, but I'll admit the comfiness of not having any build stage combined with raw performance has kinda convinced me otherwise :p but I'd be very open to ideas on how the UI could be improved, or be more intuitive.
|
||||||
|
|
||||||
|
* **docker improvements** -- I don't really know what I'm doing when it comes to containers, so I'm sure there's a *huge* room for improvement here, mainly regarding how you're supposed to use the container with kubernetes / docker-compose / any of the other popular ways to do things. At some point I swear I'll start learning about docker so I can pick up clach04's [docker-compose draft](https://github.com/9001/copyparty/issues/38) and learn how that stuff ticks, unless someone beats me to it!
|
||||||
|
|
||||||
|
* **packaging** for various linux distributions -- this could either be as simple as just plopping the sfx.py in the right place and calling that from systemd (the archlinux package [originally did this](https://github.com/9001/copyparty/pull/18)); maybe with a small config-file which would cause copyparty to load settings from `/etc/copyparty.d` (like the [archlinux package](https://github.com/9001/copyparty/tree/hovudstraum/contrib/package/arch) does with `copyparty.conf`), or it could be a proper installation of the copyparty python package into /usr/lib or similar (the archlinux package [eventually went for this approach](https://github.com/9001/copyparty/pull/26))
|
||||||
|
|
||||||
|
* [fpm](https://github.com/jordansissel/fpm) can probably help with the technical part of it, but someone needs to handle distro relations :-)
|
||||||
|
|
||||||
|
* **software integration** -- I'm sure there's a lot of usecases where copyparty could complement something else, or the other way around, so any ideas or any work in this regard would be dope. This doesn't necessarily have to be code inside copyparty itself;
|
||||||
|
|
||||||
|
* [hooks](https://github.com/9001/copyparty/tree/hovudstraum/bin/hooks) -- these are small programs which are called by copyparty when certain things happen (files are uploaded, someone hits a 404, etc.), and could be a fun way to add support for more usecases
|
||||||
|
|
||||||
|
* [parser plugins](https://github.com/9001/copyparty/tree/hovudstraum/bin/mtag) -- if you want to have copyparty analyze and index metadata for some oddball file-formats, then additional plugins would be neat :>
|
||||||
|
|||||||
2
LICENSE
2
LICENSE
@@ -1,6 +1,6 @@
|
|||||||
MIT License
|
MIT License
|
||||||
|
|
||||||
Copyright (c) 2019 ed
|
Copyright (c) 2019 ed <oss@ocv.me>
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
|||||||
456
README.md
456
README.md
@@ -3,30 +3,33 @@
|
|||||||
turn almost any device into a file server with resumable uploads/downloads using [*any*](#browser-support) web browser
|
turn almost any device into a file server with resumable uploads/downloads using [*any*](#browser-support) web browser
|
||||||
|
|
||||||
* server only needs Python (2 or 3), all dependencies optional
|
* server only needs Python (2 or 3), all dependencies optional
|
||||||
* 🔌 protocols: [http](#the-browser) // [ftp](#ftp-server) // [webdav](#webdav-server) // [smb/cifs](#smb-server)
|
* 🔌 protocols: [http](#the-browser) // [webdav](#webdav-server) // [ftp](#ftp-server) // [tftp](#tftp-server) // [smb/cifs](#smb-server)
|
||||||
* 📱 [android app](#android-app) // [iPhone shortcuts](#ios-shortcuts)
|
* 📱 [android app](#android-app) // [iPhone shortcuts](#ios-shortcuts)
|
||||||
|
|
||||||
👉 **[Get started](#quickstart)!** or visit the **[read-only demo server](https://a.ocv.me/pub/demo/)** 👀 running from a basement in finland
|
👉 **[Get started](#quickstart)!** or visit the **[read-only demo server](https://a.ocv.me/pub/demo/)** 👀 running from a basement in finland
|
||||||
|
|
||||||
📷 **screenshots:** [browser](#the-browser) // [upload](#uploading) // [unpost](#unpost) // [thumbnails](#thumbnails) // [search](#searching) // [fsearch](#file-search) // [zip-DL](#zip-downloads) // [md-viewer](#markdown-viewer)
|
📷 **screenshots:** [browser](#the-browser) // [upload](#uploading) // [unpost](#unpost) // [thumbnails](#thumbnails) // [search](#searching) // [fsearch](#file-search) // [zip-DL](#zip-downloads) // [md-viewer](#markdown-viewer)
|
||||||
|
|
||||||
|
🎬 **videos:** [upload](https://a.ocv.me/pub/demo/pics-vids/up2k.webm) // [cli-upload](https://a.ocv.me/pub/demo/pics-vids/u2cli.webm) // [race-the-beam](https://a.ocv.me/pub/g/nerd-stuff/cpp/2024-0418-race-the-beam.webm)
|
||||||
|
|
||||||
|
|
||||||
## readme toc
|
## readme toc
|
||||||
|
|
||||||
* top
|
* top
|
||||||
* [quickstart](#quickstart) - just run **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py)** -- that's it! 🎉
|
* [quickstart](#quickstart) - just run **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py)** -- that's it! 🎉
|
||||||
|
* [at home](#at-home) - make it accessible over the internet
|
||||||
* [on servers](#on-servers) - you may also want these, especially on servers
|
* [on servers](#on-servers) - you may also want these, especially on servers
|
||||||
* [features](#features)
|
* [features](#features) - also see [comparison to similar software](./docs/versus.md)
|
||||||
* [testimonials](#testimonials) - small collection of user feedback
|
* [testimonials](#testimonials) - small collection of user feedback
|
||||||
* [motivations](#motivations) - project goals / philosophy
|
* [motivations](#motivations) - project goals / philosophy
|
||||||
* [notes](#notes) - general notes
|
* [notes](#notes) - general notes
|
||||||
* [bugs](#bugs)
|
* [bugs](#bugs) - roughly sorted by chance of encounter
|
||||||
* [general bugs](#general-bugs)
|
* [not my bugs](#not-my-bugs) - same order here too
|
||||||
* [not my bugs](#not-my-bugs)
|
|
||||||
* [breaking changes](#breaking-changes) - upgrade notes
|
* [breaking changes](#breaking-changes) - upgrade notes
|
||||||
* [FAQ](#FAQ) - "frequently" asked questions
|
* [FAQ](#FAQ) - "frequently" asked questions
|
||||||
* [accounts and volumes](#accounts-and-volumes) - per-folder, per-user permissions
|
* [accounts and volumes](#accounts-and-volumes) - per-folder, per-user permissions
|
||||||
* [shadowing](#shadowing) - hiding specific subfolders
|
* [shadowing](#shadowing) - hiding specific subfolders
|
||||||
|
* [dotfiles](#dotfiles) - unix-style hidden files/folders
|
||||||
* [the browser](#the-browser) - accessing a copyparty server using a web-browser
|
* [the browser](#the-browser) - accessing a copyparty server using a web-browser
|
||||||
* [tabs](#tabs) - the main tabs in the ui
|
* [tabs](#tabs) - the main tabs in the ui
|
||||||
* [hotkeys](#hotkeys) - the browser has the following hotkeys
|
* [hotkeys](#hotkeys) - the browser has the following hotkeys
|
||||||
@@ -37,12 +40,14 @@ turn almost any device into a file server with resumable uploads/downloads using
|
|||||||
* [file-search](#file-search) - dropping files into the browser also lets you see if they exist on the server
|
* [file-search](#file-search) - dropping files into the browser also lets you see if they exist on the server
|
||||||
* [unpost](#unpost) - undo/delete accidental uploads
|
* [unpost](#unpost) - undo/delete accidental uploads
|
||||||
* [self-destruct](#self-destruct) - uploads can be given a lifetime
|
* [self-destruct](#self-destruct) - uploads can be given a lifetime
|
||||||
|
* [race the beam](#race-the-beam) - download files while they're still uploading ([demo video](http://a.ocv.me/pub/g/nerd-stuff/cpp/2024-0418-race-the-beam.webm))
|
||||||
* [file manager](#file-manager) - cut/paste, rename, and delete files/folders (if you have permission)
|
* [file manager](#file-manager) - cut/paste, rename, and delete files/folders (if you have permission)
|
||||||
* [batch rename](#batch-rename) - select some files and press `F2` to bring up the rename UI
|
* [batch rename](#batch-rename) - select some files and press `F2` to bring up the rename UI
|
||||||
* [media player](#media-player) - plays almost every audio format there is
|
* [media player](#media-player) - plays almost every audio format there is
|
||||||
* [audio equalizer](#audio-equalizer) - bass boosted
|
* [audio equalizer](#audio-equalizer) - and [dynamic range compressor](https://en.wikipedia.org/wiki/Dynamic_range_compression)
|
||||||
* [fix unreliable playback on android](#fix-unreliable-playback-on-android) - due to phone / app settings
|
* [fix unreliable playback on android](#fix-unreliable-playback-on-android) - due to phone / app settings
|
||||||
* [markdown viewer](#markdown-viewer) - and there are *two* editors
|
* [markdown viewer](#markdown-viewer) - and there are *two* editors
|
||||||
|
* [markdown vars](#markdown-vars) - dynamic docs with serverside variable expansion
|
||||||
* [other tricks](#other-tricks)
|
* [other tricks](#other-tricks)
|
||||||
* [searching](#searching) - search by size, date, path/name, mp3-tags, ...
|
* [searching](#searching) - search by size, date, path/name, mp3-tags, ...
|
||||||
* [server config](#server-config) - using arguments or config files, or a mix of both
|
* [server config](#server-config) - using arguments or config files, or a mix of both
|
||||||
@@ -53,7 +58,10 @@ turn almost any device into a file server with resumable uploads/downloads using
|
|||||||
* [ftp server](#ftp-server) - an FTP server can be started using `--ftp 3921`
|
* [ftp server](#ftp-server) - an FTP server can be started using `--ftp 3921`
|
||||||
* [webdav server](#webdav-server) - with read-write support
|
* [webdav server](#webdav-server) - with read-write support
|
||||||
* [connecting to webdav from windows](#connecting-to-webdav-from-windows) - using the GUI
|
* [connecting to webdav from windows](#connecting-to-webdav-from-windows) - using the GUI
|
||||||
|
* [tftp server](#tftp-server) - a TFTP server (read/write) can be started using `--tftp 3969`
|
||||||
* [smb server](#smb-server) - unsafe, slow, not recommended for wan
|
* [smb server](#smb-server) - unsafe, slow, not recommended for wan
|
||||||
|
* [browser ux](#browser-ux) - tweaking the ui
|
||||||
|
* [opengraph](#opengraph) - discord and social-media embeds
|
||||||
* [file indexing](#file-indexing) - enables dedup and music search ++
|
* [file indexing](#file-indexing) - enables dedup and music search ++
|
||||||
* [exclude-patterns](#exclude-patterns) - to save some time
|
* [exclude-patterns](#exclude-patterns) - to save some time
|
||||||
* [filesystem guards](#filesystem-guards) - avoid traversing into other filesystems
|
* [filesystem guards](#filesystem-guards) - avoid traversing into other filesystems
|
||||||
@@ -67,14 +75,19 @@ turn almost any device into a file server with resumable uploads/downloads using
|
|||||||
* [event hooks](#event-hooks) - trigger a program on uploads, renames etc ([examples](./bin/hooks/))
|
* [event hooks](#event-hooks) - trigger a program on uploads, renames etc ([examples](./bin/hooks/))
|
||||||
* [upload events](#upload-events) - the older, more powerful approach ([examples](./bin/mtag/))
|
* [upload events](#upload-events) - the older, more powerful approach ([examples](./bin/mtag/))
|
||||||
* [handlers](#handlers) - redefine behavior with plugins ([examples](./bin/handlers/))
|
* [handlers](#handlers) - redefine behavior with plugins ([examples](./bin/handlers/))
|
||||||
|
* [identity providers](#identity-providers) - replace copyparty passwords with oauth and such
|
||||||
|
* [using the cloud as storage](#using-the-cloud-as-storage) - connecting to an aws s3 bucket and similar
|
||||||
* [hiding from google](#hiding-from-google) - tell search engines you dont wanna be indexed
|
* [hiding from google](#hiding-from-google) - tell search engines you dont wanna be indexed
|
||||||
* [themes](#themes)
|
* [themes](#themes)
|
||||||
* [complete examples](#complete-examples)
|
* [complete examples](#complete-examples)
|
||||||
* [reverse-proxy](#reverse-proxy) - running copyparty next to other websites
|
* [reverse-proxy](#reverse-proxy) - running copyparty next to other websites
|
||||||
|
* [real-ip](#real-ip) - teaching copyparty how to see client IPs
|
||||||
* [prometheus](#prometheus) - metrics/stats can be enabled
|
* [prometheus](#prometheus) - metrics/stats can be enabled
|
||||||
|
* [other extremely specific features](#other-extremely-specific-features) - you'll never find a use for these
|
||||||
|
* [custom mimetypes](#custom-mimetypes) - change the association of a file extension
|
||||||
* [packages](#packages) - the party might be closer than you think
|
* [packages](#packages) - the party might be closer than you think
|
||||||
* [arch package](#arch-package) - now [available on aur](https://aur.archlinux.org/packages/copyparty) maintained by [@icxes](https://github.com/icxes)
|
* [arch package](#arch-package) - now [available on aur](https://aur.archlinux.org/packages/copyparty) maintained by [@icxes](https://github.com/icxes)
|
||||||
* [fedora package](#fedora-package) - now [available on copr-pypi](https://copr.fedorainfracloud.org/coprs/g/copr/PyPI/)
|
* [fedora package](#fedora-package) - does not exist yet
|
||||||
* [nix package](#nix-package) - `nix profile install github:9001/copyparty`
|
* [nix package](#nix-package) - `nix profile install github:9001/copyparty`
|
||||||
* [nixos module](#nixos-module)
|
* [nixos module](#nixos-module)
|
||||||
* [browser support](#browser-support) - TLDR: yes
|
* [browser support](#browser-support) - TLDR: yes
|
||||||
@@ -89,6 +102,7 @@ turn almost any device into a file server with resumable uploads/downloads using
|
|||||||
* [gotchas](#gotchas) - behavior that might be unexpected
|
* [gotchas](#gotchas) - behavior that might be unexpected
|
||||||
* [cors](#cors) - cross-site request config
|
* [cors](#cors) - cross-site request config
|
||||||
* [filekeys](#filekeys) - prevent filename bruteforcing
|
* [filekeys](#filekeys) - prevent filename bruteforcing
|
||||||
|
* [dirkeys](#dirkeys) - share specific folders in a volume
|
||||||
* [password hashing](#password-hashing) - you can hash passwords
|
* [password hashing](#password-hashing) - you can hash passwords
|
||||||
* [https](#https) - both HTTP and HTTPS are accepted
|
* [https](#https) - both HTTP and HTTPS are accepted
|
||||||
* [recovering from crashes](#recovering-from-crashes)
|
* [recovering from crashes](#recovering-from-crashes)
|
||||||
@@ -98,10 +112,11 @@ turn almost any device into a file server with resumable uploads/downloads using
|
|||||||
* [dependencies](#dependencies) - mandatory deps
|
* [dependencies](#dependencies) - mandatory deps
|
||||||
* [optional dependencies](#optional-dependencies) - install these to enable bonus features
|
* [optional dependencies](#optional-dependencies) - install these to enable bonus features
|
||||||
* [optional gpl stuff](#optional-gpl-stuff)
|
* [optional gpl stuff](#optional-gpl-stuff)
|
||||||
* [sfx](#sfx) - the self-contained "binary"
|
* [sfx](#sfx) - the self-contained "binary" (recommended!)
|
||||||
* [copyparty.exe](#copypartyexe) - download [copyparty.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty.exe) (win8+) or [copyparty32.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty32.exe) (win7+)
|
* [copyparty.exe](#copypartyexe) - download [copyparty.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty.exe) (win8+) or [copyparty32.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty32.exe) (win7+)
|
||||||
|
* [zipapp](#zipapp) - another emergency alternative, [copyparty.pyz](https://github.com/9001/copyparty/releases/latest/download/copyparty.pyz)
|
||||||
* [install on android](#install-on-android)
|
* [install on android](#install-on-android)
|
||||||
* [reporting bugs](#reporting-bugs) - ideas for context to include in bug reports
|
* [reporting bugs](#reporting-bugs) - ideas for context to include, and where to submit them
|
||||||
* [devnotes](#devnotes) - for build instructions etc, see [./docs/devnotes.md](./docs/devnotes.md)
|
* [devnotes](#devnotes) - for build instructions etc, see [./docs/devnotes.md](./docs/devnotes.md)
|
||||||
|
|
||||||
|
|
||||||
@@ -111,16 +126,17 @@ just run **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/
|
|||||||
|
|
||||||
* or install through pypi: `python3 -m pip install --user -U copyparty`
|
* or install through pypi: `python3 -m pip install --user -U copyparty`
|
||||||
* or if you cannot install python, you can use [copyparty.exe](#copypartyexe) instead
|
* or if you cannot install python, you can use [copyparty.exe](#copypartyexe) instead
|
||||||
* or install [on arch](#arch-package) ╱ [on fedora](#fedora-package) ╱ [on NixOS](#nixos-module) ╱ [through nix](#nix-package)
|
* or install [on arch](#arch-package) ╱ [on NixOS](#nixos-module) ╱ [through nix](#nix-package)
|
||||||
* or if you are on android, [install copyparty in termux](#install-on-android)
|
* or if you are on android, [install copyparty in termux](#install-on-android)
|
||||||
|
* or if your computer is messed up and nothing else works, [try the pyz](#zipapp)
|
||||||
* or if you prefer to [use docker](./scripts/docker/) 🐋 you can do that too
|
* or if you prefer to [use docker](./scripts/docker/) 🐋 you can do that too
|
||||||
* docker has all deps built-in, so skip this step:
|
* docker has all deps built-in, so skip this step:
|
||||||
|
|
||||||
enable thumbnails (images/audio/video), media indexing, and audio transcoding by installing some recommended deps:
|
enable thumbnails (images/audio/video), media indexing, and audio transcoding by installing some recommended deps:
|
||||||
|
|
||||||
* **Alpine:** `apk add py3-pillow ffmpeg`
|
* **Alpine:** `apk add py3-pillow ffmpeg`
|
||||||
* **Debian:** `apt install python3-pil ffmpeg`
|
* **Debian:** `apt install --no-install-recommends python3-pil ffmpeg`
|
||||||
* **Fedora:** `dnf install python3-pillow ffmpeg`
|
* **Fedora:** rpmfusion + `dnf install python3-pillow ffmpeg --allowerasing`
|
||||||
* **FreeBSD:** `pkg install py39-sqlite3 py39-pillow ffmpeg`
|
* **FreeBSD:** `pkg install py39-sqlite3 py39-pillow ffmpeg`
|
||||||
* **MacOS:** `port install py-Pillow ffmpeg`
|
* **MacOS:** `port install py-Pillow ffmpeg`
|
||||||
* **MacOS** (alternative): `brew install pillow ffmpeg`
|
* **MacOS** (alternative): `brew install pillow ffmpeg`
|
||||||
@@ -141,34 +157,49 @@ some recommended options:
|
|||||||
* see [accounts and volumes](#accounts-and-volumes) (or `--help-accounts`) for the syntax and other permissions
|
* see [accounts and volumes](#accounts-and-volumes) (or `--help-accounts`) for the syntax and other permissions
|
||||||
|
|
||||||
|
|
||||||
|
### at home
|
||||||
|
|
||||||
|
make it accessible over the internet by starting a [cloudflare quicktunnel](https://developers.cloudflare.com/cloudflare-one/connections/connect-networks/do-more-with-tunnels/trycloudflare/) like so:
|
||||||
|
|
||||||
|
first download [cloudflared](https://developers.cloudflare.com/cloudflare-one/connections/connect-networks/downloads/) and then start the tunnel with `cloudflared tunnel --url http://127.0.0.1:3923`
|
||||||
|
|
||||||
|
as the tunnel starts, it will show a URL which you can share to let anyone browse your stash or upload files to you
|
||||||
|
|
||||||
|
since people will be connecting through cloudflare, run copyparty with `--xff-hdr cf-connecting-ip` to detect client IPs correctly
|
||||||
|
|
||||||
|
|
||||||
### on servers
|
### on servers
|
||||||
|
|
||||||
you may also want these, especially on servers:
|
you may also want these, especially on servers:
|
||||||
|
|
||||||
* [contrib/systemd/copyparty.service](contrib/systemd/copyparty.service) to run copyparty as a systemd service (see guide inside)
|
* [contrib/systemd/copyparty.service](contrib/systemd/copyparty.service) to run copyparty as a systemd service (see guide inside)
|
||||||
* [contrib/systemd/prisonparty.service](contrib/systemd/prisonparty.service) to run it in a chroot (for extra security)
|
* [contrib/systemd/prisonparty.service](contrib/systemd/prisonparty.service) to run it in a chroot (for extra security)
|
||||||
|
* [contrib/openrc/copyparty](contrib/openrc/copyparty) to run copyparty on Alpine / Gentoo
|
||||||
* [contrib/rc/copyparty](contrib/rc/copyparty) to run copyparty on FreeBSD
|
* [contrib/rc/copyparty](contrib/rc/copyparty) to run copyparty on FreeBSD
|
||||||
* [contrib/nginx/copyparty.conf](contrib/nginx/copyparty.conf) to [reverse-proxy](#reverse-proxy) behind nginx (for better https)
|
|
||||||
* [nixos module](#nixos-module) to run copyparty on NixOS hosts
|
* [nixos module](#nixos-module) to run copyparty on NixOS hosts
|
||||||
|
* [contrib/nginx/copyparty.conf](contrib/nginx/copyparty.conf) to [reverse-proxy](#reverse-proxy) behind nginx (for better https)
|
||||||
|
|
||||||
and remember to open the ports you want; here's a complete example including every feature copyparty has to offer:
|
and remember to open the ports you want; here's a complete example including every feature copyparty has to offer:
|
||||||
```
|
```
|
||||||
firewall-cmd --permanent --add-port={80,443,3921,3923,3945,3990}/tcp # --zone=libvirt
|
firewall-cmd --permanent --add-port={80,443,3921,3923,3945,3990}/tcp # --zone=libvirt
|
||||||
firewall-cmd --permanent --add-port=12000-12099/tcp --permanent # --zone=libvirt
|
firewall-cmd --permanent --add-port=12000-12099/tcp # --zone=libvirt
|
||||||
firewall-cmd --permanent --add-port={1900,5353}/udp # --zone=libvirt
|
firewall-cmd --permanent --add-port={69,1900,3969,5353}/udp # --zone=libvirt
|
||||||
firewall-cmd --reload
|
firewall-cmd --reload
|
||||||
```
|
```
|
||||||
(1900:ssdp, 3921:ftp, 3923:http/https, 3945:smb, 3990:ftps, 5353:mdns, 12000:passive-ftp)
|
(69:tftp, 1900:ssdp, 3921:ftp, 3923:http/https, 3945:smb, 3969:tftp, 3990:ftps, 5353:mdns, 12000:passive-ftp)
|
||||||
|
|
||||||
|
|
||||||
## features
|
## features
|
||||||
|
|
||||||
|
also see [comparison to similar software](./docs/versus.md)
|
||||||
|
|
||||||
* backend stuff
|
* backend stuff
|
||||||
* ☑ IPv6
|
* ☑ IPv6
|
||||||
* ☑ [multiprocessing](#performance) (actual multithreading)
|
* ☑ [multiprocessing](#performance) (actual multithreading)
|
||||||
* ☑ volumes (mountpoints)
|
* ☑ volumes (mountpoints)
|
||||||
* ☑ [accounts](#accounts-and-volumes)
|
* ☑ [accounts](#accounts-and-volumes)
|
||||||
* ☑ [ftp server](#ftp-server)
|
* ☑ [ftp server](#ftp-server)
|
||||||
|
* ☑ [tftp server](#tftp-server)
|
||||||
* ☑ [webdav server](#webdav-server)
|
* ☑ [webdav server](#webdav-server)
|
||||||
* ☑ [smb/cifs server](#smb-server)
|
* ☑ [smb/cifs server](#smb-server)
|
||||||
* ☑ [qr-code](#qr-code) for quick access
|
* ☑ [qr-code](#qr-code) for quick access
|
||||||
@@ -178,12 +209,13 @@ firewall-cmd --reload
|
|||||||
* upload
|
* upload
|
||||||
* ☑ basic: plain multipart, ie6 support
|
* ☑ basic: plain multipart, ie6 support
|
||||||
* ☑ [up2k](#uploading): js, resumable, multithreaded
|
* ☑ [up2k](#uploading): js, resumable, multithreaded
|
||||||
* unaffected by cloudflare's max-upload-size (100 MiB)
|
* **no filesize limit!** ...unless you use Cloudflare, then it's 383.9 GiB
|
||||||
* ☑ stash: simple PUT filedropper
|
* ☑ stash: simple PUT filedropper
|
||||||
* ☑ filename randomizer
|
* ☑ filename randomizer
|
||||||
* ☑ write-only folders
|
* ☑ write-only folders
|
||||||
* ☑ [unpost](#unpost): undo/delete accidental uploads
|
* ☑ [unpost](#unpost): undo/delete accidental uploads
|
||||||
* ☑ [self-destruct](#self-destruct) (specified server-side or client-side)
|
* ☑ [self-destruct](#self-destruct) (specified server-side or client-side)
|
||||||
|
* ☑ [race the beam](#race-the-beam) (almost like peer-to-peer)
|
||||||
* ☑ symlink/discard duplicates (content-matching)
|
* ☑ symlink/discard duplicates (content-matching)
|
||||||
* download
|
* download
|
||||||
* ☑ single files in browser
|
* ☑ single files in browser
|
||||||
@@ -192,7 +224,8 @@ firewall-cmd --reload
|
|||||||
* browser
|
* browser
|
||||||
* ☑ [navpane](#navpane) (directory tree sidebar)
|
* ☑ [navpane](#navpane) (directory tree sidebar)
|
||||||
* ☑ file manager (cut/paste, delete, [batch-rename](#batch-rename))
|
* ☑ file manager (cut/paste, delete, [batch-rename](#batch-rename))
|
||||||
* ☑ audio player (with [OS media controls](https://user-images.githubusercontent.com/241032/215347492-b4250797-6c90-4e09-9a4c-721edf2fb15c.png) and opus transcoding)
|
* ☑ audio player (with [OS media controls](https://user-images.githubusercontent.com/241032/215347492-b4250797-6c90-4e09-9a4c-721edf2fb15c.png) and opus/mp3 transcoding)
|
||||||
|
* ☑ play video files as audio (converted on server)
|
||||||
* ☑ image gallery with webm player
|
* ☑ image gallery with webm player
|
||||||
* ☑ textfile browser with syntax hilighting
|
* ☑ textfile browser with syntax hilighting
|
||||||
* ☑ [thumbnails](#thumbnails)
|
* ☑ [thumbnails](#thumbnails)
|
||||||
@@ -200,6 +233,7 @@ firewall-cmd --reload
|
|||||||
* ☑ ...of videos using FFmpeg
|
* ☑ ...of videos using FFmpeg
|
||||||
* ☑ ...of audio (spectrograms) using FFmpeg
|
* ☑ ...of audio (spectrograms) using FFmpeg
|
||||||
* ☑ cache eviction (max-age; maybe max-size eventually)
|
* ☑ cache eviction (max-age; maybe max-size eventually)
|
||||||
|
* ☑ multilingual UI (english, norwegian, [add your own](./docs/rice/#translations)))
|
||||||
* ☑ SPA (browse while uploading)
|
* ☑ SPA (browse while uploading)
|
||||||
* server indexing
|
* server indexing
|
||||||
* ☑ [locate files by contents](#file-search)
|
* ☑ [locate files by contents](#file-search)
|
||||||
@@ -208,9 +242,11 @@ firewall-cmd --reload
|
|||||||
* client support
|
* client support
|
||||||
* ☑ [folder sync](#folder-sync)
|
* ☑ [folder sync](#folder-sync)
|
||||||
* ☑ [curl-friendly](https://user-images.githubusercontent.com/241032/215322619-ea5fd606-3654-40ad-94ee-2bc058647bb2.png)
|
* ☑ [curl-friendly](https://user-images.githubusercontent.com/241032/215322619-ea5fd606-3654-40ad-94ee-2bc058647bb2.png)
|
||||||
|
* ☑ [opengraph](#opengraph) (discord embeds)
|
||||||
* markdown
|
* markdown
|
||||||
* ☑ [viewer](#markdown-viewer)
|
* ☑ [viewer](#markdown-viewer)
|
||||||
* ☑ editor (sure why not)
|
* ☑ editor (sure why not)
|
||||||
|
* ☑ [variables](#markdown-vars)
|
||||||
|
|
||||||
PS: something missing? post any crazy ideas you've got as a [feature request](https://github.com/9001/copyparty/issues/new?assignees=9001&labels=enhancement&template=feature_request.md) or [discussion](https://github.com/9001/copyparty/discussions/new?category=ideas) 🤙
|
PS: something missing? post any crazy ideas you've got as a [feature request](https://github.com/9001/copyparty/issues/new?assignees=9001&labels=enhancement&template=feature_request.md) or [discussion](https://github.com/9001/copyparty/discussions/new?category=ideas) 🤙
|
||||||
|
|
||||||
@@ -263,20 +299,31 @@ server notes:
|
|||||||
|
|
||||||
# bugs
|
# bugs
|
||||||
|
|
||||||
* Windows: python 2.7 cannot index non-ascii filenames with `-e2d`
|
roughly sorted by chance of encounter
|
||||||
* Windows: python 2.7 cannot handle filenames with mojibake
|
|
||||||
* `--th-ff-jpg` may fix video thumbnails on some FFmpeg versions (macos, some linux)
|
|
||||||
* `--th-ff-swr` may fix audio thumbnails on some FFmpeg versions
|
|
||||||
|
|
||||||
## general bugs
|
* general:
|
||||||
|
* `--th-ff-jpg` may fix video thumbnails on some FFmpeg versions (macos, some linux)
|
||||||
|
* `--th-ff-swr` may fix audio thumbnails on some FFmpeg versions
|
||||||
|
* if the `up2k.db` (filesystem index) is on a samba-share or network disk, you'll get unpredictable behavior if the share is disconnected for a bit
|
||||||
|
* use `--hist` or the `hist` volflag (`-v [...]:c,hist=/tmp/foo`) to place the db on a local disk instead
|
||||||
|
* all volumes must exist / be available on startup; up2k (mtp especially) gets funky otherwise
|
||||||
|
* probably more, pls let me know
|
||||||
|
|
||||||
|
* python 3.4 and older (including 2.7):
|
||||||
|
* many rare and exciting edge-cases because [python didn't handle EINTR yet](https://peps.python.org/pep-0475/)
|
||||||
|
* downloads from copyparty may suddenly fail, but uploads *should* be fine
|
||||||
|
|
||||||
|
* python 2.7 on Windows:
|
||||||
|
* cannot index non-ascii filenames with `-e2d`
|
||||||
|
* cannot handle filenames with mojibake
|
||||||
|
|
||||||
|
if you have a new exciting bug to share, see [reporting bugs](#reporting-bugs)
|
||||||
|
|
||||||
* Windows: if the `up2k.db` (filesystem index) is on a samba-share or network disk, you'll get unpredictable behavior if the share is disconnected for a bit
|
|
||||||
* use `--hist` or the `hist` volflag (`-v [...]:c,hist=/tmp/foo`) to place the db on a local disk instead
|
|
||||||
* all volumes must exist / be available on startup; up2k (mtp especially) gets funky otherwise
|
|
||||||
* probably more, pls let me know
|
|
||||||
|
|
||||||
## not my bugs
|
## not my bugs
|
||||||
|
|
||||||
|
same order here too
|
||||||
|
|
||||||
* [Chrome issue 1317069](https://bugs.chromium.org/p/chromium/issues/detail?id=1317069) -- if you try to upload a folder which contains symlinks by dragging it into the browser, the symlinked files will not get uploaded
|
* [Chrome issue 1317069](https://bugs.chromium.org/p/chromium/issues/detail?id=1317069) -- if you try to upload a folder which contains symlinks by dragging it into the browser, the symlinked files will not get uploaded
|
||||||
|
|
||||||
* [Chrome issue 1352210](https://bugs.chromium.org/p/chromium/issues/detail?id=1352210) -- plaintext http may be faster at filehashing than https (but also extremely CPU-intensive)
|
* [Chrome issue 1352210](https://bugs.chromium.org/p/chromium/issues/detail?id=1352210) -- plaintext http may be faster at filehashing than https (but also extremely CPU-intensive)
|
||||||
@@ -310,6 +357,8 @@ server notes:
|
|||||||
|
|
||||||
upgrade notes
|
upgrade notes
|
||||||
|
|
||||||
|
* `1.9.16` (2023-11-04):
|
||||||
|
* `--stats`/prometheus: `cpp_bans` renamed to `cpp_active_bans`, and that + `cpp_uptime` are gauges
|
||||||
* `1.6.0` (2023-01-29):
|
* `1.6.0` (2023-01-29):
|
||||||
* http-api: delete/move is now `POST` instead of `GET`
|
* http-api: delete/move is now `POST` instead of `GET`
|
||||||
* everything other than `GET` and `HEAD` must pass [cors validation](#cors)
|
* everything other than `GET` and `HEAD` must pass [cors validation](#cors)
|
||||||
@@ -326,11 +375,26 @@ upgrade notes
|
|||||||
* yes, using the [`g` permission](#accounts-and-volumes), see the examples there
|
* yes, using the [`g` permission](#accounts-and-volumes), see the examples there
|
||||||
* you can also do this with linux filesystem permissions; `chmod 111 music` will make it possible to access files and folders inside the `music` folder but not list the immediate contents -- also works with other software, not just copyparty
|
* you can also do this with linux filesystem permissions; `chmod 111 music` will make it possible to access files and folders inside the `music` folder but not list the immediate contents -- also works with other software, not just copyparty
|
||||||
|
|
||||||
|
* can I link someone to a password-protected volume/file by including the password in the URL?
|
||||||
|
* yes, by adding `?pw=hunter2` to the end; replace `?` with `&` if there are parameters in the URL already, meaning it contains a `?` near the end
|
||||||
|
|
||||||
|
* how do I stop `.hist` folders from appearing everywhere on my HDD?
|
||||||
|
* by default, a `.hist` folder is created inside each volume for the filesystem index, thumbnails, audio transcodes, and markdown document history. Use the `--hist` global-option or the `hist` volflag to move it somewhere else; see [database location](#database-location)
|
||||||
|
|
||||||
* can I make copyparty download a file to my server if I give it a URL?
|
* can I make copyparty download a file to my server if I give it a URL?
|
||||||
* yes, using [hooks](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/wget.py)
|
* yes, using [hooks](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/wget.py)
|
||||||
|
|
||||||
|
* firefox refuses to connect over https, saying "Secure Connection Failed" or "SEC_ERROR_BAD_SIGNATURE", but the usual button to "Accept the Risk and Continue" is not shown
|
||||||
|
* firefox has corrupted its certstore; fix this by exiting firefox, then find and delete the file named `cert9.db` somewhere in your firefox profile folder
|
||||||
|
|
||||||
|
* the server keeps saying `thank you for playing` when I try to access the website
|
||||||
|
* you've gotten banned for malicious traffic! if this happens by mistake, and you're running a reverse-proxy and/or something like cloudflare, see [real-ip](#real-ip) on how to fix this
|
||||||
|
|
||||||
|
* copyparty seems to think I am using http, even though the URL is https
|
||||||
|
* your reverse-proxy is not sending the `X-Forwarded-Proto: https` header; this could be because your reverse-proxy itself is confused. Ensure that none of the intermediates (such as cloudflare) are terminating https before the traffic hits your entrypoint
|
||||||
|
|
||||||
* i want to learn python and/or programming and am considering looking at the copyparty source code in that occasion
|
* i want to learn python and/or programming and am considering looking at the copyparty source code in that occasion
|
||||||
```bash
|
* ```bash
|
||||||
_| _ __ _ _|_
|
_| _ __ _ _|_
|
||||||
(_| (_) | | (_) |_
|
(_| (_) | | (_) |_
|
||||||
```
|
```
|
||||||
@@ -352,14 +416,16 @@ configuring accounts/volumes with arguments:
|
|||||||
`-v .::r,usr1,usr2:rw,usr3,usr4` = usr1/2 read-only, 3/4 read-write
|
`-v .::r,usr1,usr2:rw,usr3,usr4` = usr1/2 read-only, 3/4 read-write
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
* `r` (read): browse folder contents, download files, download as zip/tar
|
* `r` (read): browse folder contents, download files, download as zip/tar, see filekeys/dirkeys
|
||||||
* `w` (write): upload files, move files *into* this folder
|
* `w` (write): upload files, move files *into* this folder
|
||||||
* `m` (move): move files/folders *from* this folder
|
* `m` (move): move files/folders *from* this folder
|
||||||
* `d` (delete): delete files/folders
|
* `d` (delete): delete files/folders
|
||||||
|
* `.` (dots): user can ask to show dotfiles in directory listings
|
||||||
* `g` (get): only download files, cannot see folder contents or zip/tar
|
* `g` (get): only download files, cannot see folder contents or zip/tar
|
||||||
* `G` (upget): same as `g` except uploaders get to see their own [filekeys](#filekeys) (see `fk` in examples below)
|
* `G` (upget): same as `g` except uploaders get to see their own [filekeys](#filekeys) (see `fk` in examples below)
|
||||||
* `h` (html): same as `g` except folders return their index.html, and filekeys are not necessary for index.html
|
* `h` (html): same as `g` except folders return their index.html, and filekeys are not necessary for index.html
|
||||||
* `a` (admin): can see uploader IPs, config-reload
|
* `a` (admin): can see upload time, uploader IPs, config-reload
|
||||||
|
* `A` ("all"): same as `rwmda.` (read/write/move/delete/admin/dotfiles)
|
||||||
|
|
||||||
examples:
|
examples:
|
||||||
* add accounts named u1, u2, u3 with passwords p1, p2, p3: `-a u1:p1 -a u2:p2 -a u3:p3`
|
* add accounts named u1, u2, u3 with passwords p1, p2, p3: `-a u1:p1 -a u2:p2 -a u3:p3`
|
||||||
@@ -387,6 +453,17 @@ hiding specific subfolders by mounting another volume on top of them
|
|||||||
for example `-v /mnt::r -v /var/empty:web/certs:r` mounts the server folder `/mnt` as the webroot, but another volume is mounted at `/web/certs` -- so visitors can only see the contents of `/mnt` and `/mnt/web` (at URLs `/` and `/web`), but not `/mnt/web/certs` because URL `/web/certs` is mapped to `/var/empty`
|
for example `-v /mnt::r -v /var/empty:web/certs:r` mounts the server folder `/mnt` as the webroot, but another volume is mounted at `/web/certs` -- so visitors can only see the contents of `/mnt` and `/mnt/web` (at URLs `/` and `/web`), but not `/mnt/web/certs` because URL `/web/certs` is mapped to `/var/empty`
|
||||||
|
|
||||||
|
|
||||||
|
## dotfiles
|
||||||
|
|
||||||
|
unix-style hidden files/folders by starting the name with a dot
|
||||||
|
|
||||||
|
anyone can access these if they know the name, but they normally don't appear in directory listings
|
||||||
|
|
||||||
|
a client can request to see dotfiles in directory listings if global option `-ed` is specified, or the volume has volflag `dots`, or the user has permission `.`
|
||||||
|
|
||||||
|
dotfiles do not appear in search results unless one of the above is true, **and** the global option / volflag `dotsrch` is set
|
||||||
|
|
||||||
|
|
||||||
# the browser
|
# the browser
|
||||||
|
|
||||||
accessing a copyparty server using a web-browser
|
accessing a copyparty server using a web-browser
|
||||||
@@ -499,13 +576,15 @@ it does static images with Pillow / pyvips / FFmpeg, and uses FFmpeg for video f
|
|||||||
audio files are covnerted into spectrograms using FFmpeg unless you `--no-athumb` (and some FFmpeg builds may need `--th-ff-swr`)
|
audio files are covnerted into spectrograms using FFmpeg unless you `--no-athumb` (and some FFmpeg builds may need `--th-ff-swr`)
|
||||||
|
|
||||||
images with the following names (see `--th-covers`) become the thumbnail of the folder they're in: `folder.png`, `folder.jpg`, `cover.png`, `cover.jpg`
|
images with the following names (see `--th-covers`) become the thumbnail of the folder they're in: `folder.png`, `folder.jpg`, `cover.png`, `cover.jpg`
|
||||||
* and, if you enable [file indexing](#file-indexing), all remaining folders will also get thumbnails (as long as they contain any pics at all)
|
* the order is significant, so if both `cover.png` and `folder.jpg` exist in a folder, it will pick the first matching `--th-covers` entry (`folder.jpg`)
|
||||||
|
* and, if you enable [file indexing](#file-indexing), it will also try those names as dotfiles (`.folder.jpg` and so), and then fallback on the first picture in the folder (if it has any pictures at all)
|
||||||
|
|
||||||
in the grid/thumbnail view, if the audio player panel is open, songs will start playing when clicked
|
in the grid/thumbnail view, if the audio player panel is open, songs will start playing when clicked
|
||||||
* indicated by the audio files having the ▶ icon instead of 💾
|
* indicated by the audio files having the ▶ icon instead of 💾
|
||||||
|
|
||||||
enabling `multiselect` lets you click files to select them, and then shift-click another file for range-select
|
enabling `multiselect` lets you click files to select them, and then shift-click another file for range-select
|
||||||
* `multiselect` is mostly intended for phones/tablets, but the `sel` option in the `[⚙️] settings` tab is better suited for desktop use, allowing selection by CTRL-clicking and range-selection with SHIFT-click, all without affecting regular clicking
|
* `multiselect` is mostly intended for phones/tablets, but the `sel` option in the `[⚙️] settings` tab is better suited for desktop use, allowing selection by CTRL-clicking and range-selection with SHIFT-click, all without affecting regular clicking
|
||||||
|
* the `sel` option can be made default globally with `--gsel` or per-volume with volflag `gsel`
|
||||||
|
|
||||||
|
|
||||||
## zip downloads
|
## zip downloads
|
||||||
@@ -527,7 +606,7 @@ select which type of archive you want in the `[⚙️] config` tab:
|
|||||||
* gzip default level is `3` (0=fast, 9=best), change with `?tar=gz:9`
|
* gzip default level is `3` (0=fast, 9=best), change with `?tar=gz:9`
|
||||||
* xz default level is `1` (0=fast, 9=best), change with `?tar=xz:9`
|
* xz default level is `1` (0=fast, 9=best), change with `?tar=xz:9`
|
||||||
* bz2 default level is `2` (1=fast, 9=best), change with `?tar=bz2:9`
|
* bz2 default level is `2` (1=fast, 9=best), change with `?tar=bz2:9`
|
||||||
* hidden files (dotfiles) are excluded unless `-ed`
|
* hidden files ([dotfiles](#dotfiles)) are excluded unless account is allowed to list them
|
||||||
* `up2k.db` and `dir.txt` is always excluded
|
* `up2k.db` and `dir.txt` is always excluded
|
||||||
* bsdtar supports streaming unzipping: `curl foo?zip=utf8 | bsdtar -xv`
|
* bsdtar supports streaming unzipping: `curl foo?zip=utf8 | bsdtar -xv`
|
||||||
* good, because copyparty's zip is faster than tar on small files
|
* good, because copyparty's zip is faster than tar on small files
|
||||||
@@ -539,21 +618,27 @@ you can also zip a selection of files or folders by clicking them in the browser
|
|||||||
|
|
||||||

|

|
||||||
|
|
||||||
cool trick: download a folder by appending url-params `?tar&opus` to transcode all audio files (except aac|m4a|mp3|ogg|opus|wma) to opus before they're added to the archive
|
cool trick: download a folder by appending url-params `?tar&opus` or `?tar&mp3` to transcode all audio files (except aac|m4a|mp3|ogg|opus|wma) to opus/mp3 before they're added to the archive
|
||||||
* super useful if you're 5 minutes away from takeoff and realize you don't have any music on your phone but your server only has flac files and downloading those will burn through all your data + there wouldn't be enough time anyways
|
* super useful if you're 5 minutes away from takeoff and realize you don't have any music on your phone but your server only has flac files and downloading those will burn through all your data + there wouldn't be enough time anyways
|
||||||
* and url-params `&j` / `&w` produce jpeg/webm thumbnails/spectrograms instead of the original audio/video/images
|
* and url-params `&j` / `&w` produce jpeg/webm thumbnails/spectrograms instead of the original audio/video/images (`&p` for audio waveforms)
|
||||||
* can also be used to pregenerate thumbnails; combine with `--th-maxage=9999999` or `--th-clean=0`
|
* can also be used to pregenerate thumbnails; combine with `--th-maxage=9999999` or `--th-clean=0`
|
||||||
|
|
||||||
|
|
||||||
## uploading
|
## uploading
|
||||||
|
|
||||||
drag files/folders into the web-browser to upload (or use the [command-line uploader](https://github.com/9001/copyparty/tree/hovudstraum/bin#u2cpy))
|
drag files/folders into the web-browser to upload
|
||||||
|
|
||||||
this initiates an upload using `up2k`; there are two uploaders available:
|
dragdrop is the recommended way, but you may also:
|
||||||
|
|
||||||
|
* select some files (not folders) in your file explorer and press CTRL-V inside the browser window
|
||||||
|
* use the [command-line uploader](https://github.com/9001/copyparty/tree/hovudstraum/bin#u2cpy)
|
||||||
|
* upload using [curl or sharex](#client-examples)
|
||||||
|
|
||||||
|
when uploading files through dragdrop or CTRL-V, this initiates an upload using `up2k`; there are two browser-based uploaders available:
|
||||||
* `[🎈] bup`, the basic uploader, supports almost every browser since netscape 4.0
|
* `[🎈] bup`, the basic uploader, supports almost every browser since netscape 4.0
|
||||||
* `[🚀] up2k`, the good / fancy one
|
* `[🚀] up2k`, the good / fancy one
|
||||||
|
|
||||||
NB: you can undo/delete your own uploads with `[🧯]` [unpost](#unpost)
|
NB: you can undo/delete your own uploads with `[🧯]` [unpost](#unpost) (and this is also where you abort unfinished uploads, but you have to refresh the page first)
|
||||||
|
|
||||||
up2k has several advantages:
|
up2k has several advantages:
|
||||||
* you can drop folders into the browser (files are added recursively)
|
* you can drop folders into the browser (files are added recursively)
|
||||||
@@ -562,13 +647,14 @@ up2k has several advantages:
|
|||||||
* uploads resume if you reboot your browser or pc, just upload the same files again
|
* uploads resume if you reboot your browser or pc, just upload the same files again
|
||||||
* server detects any corruption; the client reuploads affected chunks
|
* server detects any corruption; the client reuploads affected chunks
|
||||||
* the client doesn't upload anything that already exists on the server
|
* the client doesn't upload anything that already exists on the server
|
||||||
|
* no filesize limit unless imposed by a proxy, for example Cloudflare, which blocks uploads over 383.9 GiB
|
||||||
* much higher speeds than ftp/scp/tarpipe on some internet connections (mainly american ones) thanks to parallel connections
|
* much higher speeds than ftp/scp/tarpipe on some internet connections (mainly american ones) thanks to parallel connections
|
||||||
* the last-modified timestamp of the file is preserved
|
* the last-modified timestamp of the file is preserved
|
||||||
|
|
||||||
> it is perfectly safe to restart / upgrade copyparty while someone is uploading to it!
|
> it is perfectly safe to restart / upgrade copyparty while someone is uploading to it!
|
||||||
> all known up2k clients will resume just fine 💪
|
> all known up2k clients will resume just fine 💪
|
||||||
|
|
||||||
see [up2k](#up2k) for details on how it works, or watch a [demo video](https://a.ocv.me/pub/demo/pics-vids/#gf-0f6f5c0d)
|
see [up2k](./docs/devnotes.md#up2k) for details on how it works, or watch a [demo video](https://a.ocv.me/pub/demo/pics-vids/#gf-0f6f5c0d)
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
@@ -580,7 +666,8 @@ the up2k UI is the epitome of polished inutitive experiences:
|
|||||||
* "parallel uploads" specifies how many chunks to upload at the same time
|
* "parallel uploads" specifies how many chunks to upload at the same time
|
||||||
* `[🏃]` analysis of other files should continue while one is uploading
|
* `[🏃]` analysis of other files should continue while one is uploading
|
||||||
* `[🥔]` shows a simpler UI for faster uploads from slow devices
|
* `[🥔]` shows a simpler UI for faster uploads from slow devices
|
||||||
* `[💭]` ask for confirmation before files are added to the queue
|
* `[🎲]` generate random filenames during upload
|
||||||
|
* `[📅]` preserve last-modified timestamps; server times will match yours
|
||||||
* `[🔎]` switch between upload and [file-search](#file-search) mode
|
* `[🔎]` switch between upload and [file-search](#file-search) mode
|
||||||
* ignore `[🔎]` if you add files by dragging them into the browser
|
* ignore `[🔎]` if you add files by dragging them into the browser
|
||||||
|
|
||||||
@@ -628,11 +715,18 @@ uploads can be given a lifetime, afer which they expire / self-destruct
|
|||||||
|
|
||||||
the feature must be enabled per-volume with the `lifetime` [upload rule](#upload-rules) which sets the upper limit for how long a file gets to stay on the server
|
the feature must be enabled per-volume with the `lifetime` [upload rule](#upload-rules) which sets the upper limit for how long a file gets to stay on the server
|
||||||
|
|
||||||
clients can specify a shorter expiration time using the [up2k ui](#uploading) -- the relevant options become visible upon navigating into a folder with `lifetimes` enabled -- or by using the `life` [upload modifier](#write)
|
clients can specify a shorter expiration time using the [up2k ui](#uploading) -- the relevant options become visible upon navigating into a folder with `lifetimes` enabled -- or by using the `life` [upload modifier](./docs/devnotes.md#write)
|
||||||
|
|
||||||
specifying a custom expiration time client-side will affect the timespan in which unposts are permitted, so keep an eye on the estimates in the up2k ui
|
specifying a custom expiration time client-side will affect the timespan in which unposts are permitted, so keep an eye on the estimates in the up2k ui
|
||||||
|
|
||||||
|
|
||||||
|
### race the beam
|
||||||
|
|
||||||
|
download files while they're still uploading ([demo video](http://a.ocv.me/pub/g/nerd-stuff/cpp/2024-0418-race-the-beam.webm)) -- it's almost like peer-to-peer
|
||||||
|
|
||||||
|
requires the file to be uploaded using up2k (which is the default drag-and-drop uploader), alternatively the command-line program
|
||||||
|
|
||||||
|
|
||||||
## file manager
|
## file manager
|
||||||
|
|
||||||
cut/paste, rename, and delete files/folders (if you have permission)
|
cut/paste, rename, and delete files/folders (if you have permission)
|
||||||
@@ -708,11 +802,13 @@ some hilights:
|
|||||||
* OS integration; control playback from your phone's lockscreen ([windows](https://user-images.githubusercontent.com/241032/233213022-298a98ba-721a-4cf1-a3d4-f62634bc53d5.png) // [iOS](https://user-images.githubusercontent.com/241032/142711926-0700be6c-3e31-47b3-9928-53722221f722.png) // [android](https://user-images.githubusercontent.com/241032/233212311-a7368590-08c7-4f9f-a1af-48ccf3f36fad.png))
|
* OS integration; control playback from your phone's lockscreen ([windows](https://user-images.githubusercontent.com/241032/233213022-298a98ba-721a-4cf1-a3d4-f62634bc53d5.png) // [iOS](https://user-images.githubusercontent.com/241032/142711926-0700be6c-3e31-47b3-9928-53722221f722.png) // [android](https://user-images.githubusercontent.com/241032/233212311-a7368590-08c7-4f9f-a1af-48ccf3f36fad.png))
|
||||||
* shows the audio waveform in the seekbar
|
* shows the audio waveform in the seekbar
|
||||||
* not perfectly gapless but can get really close (see settings + eq below); good enough to enjoy gapless albums as intended
|
* not perfectly gapless but can get really close (see settings + eq below); good enough to enjoy gapless albums as intended
|
||||||
|
* videos can be played as audio, without wasting bandwidth on the video
|
||||||
|
|
||||||
click the `play` link next to an audio file, or copy the link target to [share it](https://a.ocv.me/pub/demo/music/Ubiktune%20-%20SOUNDSHOCK%202%20-%20FM%20FUNK%20TERRROR!!/#af-1fbfba61&t=18) (optionally with a timestamp to start playing from, like that example does)
|
click the `play` link next to an audio file, or copy the link target to [share it](https://a.ocv.me/pub/demo/music/Ubiktune%20-%20SOUNDSHOCK%202%20-%20FM%20FUNK%20TERRROR!!/#af-1fbfba61&t=18) (optionally with a timestamp to start playing from, like that example does)
|
||||||
|
|
||||||
open the `[🎺]` media-player-settings tab to configure it,
|
open the `[🎺]` media-player-settings tab to configure it,
|
||||||
* switches:
|
* "switches":
|
||||||
|
* `[🔀]` shuffles the files inside each folder
|
||||||
* `[preload]` starts loading the next track when it's about to end, reduces the silence between songs
|
* `[preload]` starts loading the next track when it's about to end, reduces the silence between songs
|
||||||
* `[full]` does a full preload by downloading the entire next file; good for unreliable connections, bad for slow connections
|
* `[full]` does a full preload by downloading the entire next file; good for unreliable connections, bad for slow connections
|
||||||
* `[~s]` toggles the seekbar waveform display
|
* `[~s]` toggles the seekbar waveform display
|
||||||
@@ -722,20 +818,22 @@ open the `[🎺]` media-player-settings tab to configure it,
|
|||||||
* `[art]` shows album art on the lockscreen
|
* `[art]` shows album art on the lockscreen
|
||||||
* `[🎯]` keeps the playing song scrolled into view (good when using the player as a taskbar dock)
|
* `[🎯]` keeps the playing song scrolled into view (good when using the player as a taskbar dock)
|
||||||
* `[⟎]` shrinks the playback controls
|
* `[⟎]` shrinks the playback controls
|
||||||
* playback mode:
|
* "buttons":
|
||||||
|
* `[uncache]` may fix songs that won't play correctly due to bad files in browser cache
|
||||||
|
* "at end of folder":
|
||||||
* `[loop]` keeps looping the folder
|
* `[loop]` keeps looping the folder
|
||||||
* `[next]` plays into the next folder
|
* `[next]` plays into the next folder
|
||||||
* transcode:
|
* "transcode":
|
||||||
* `[flac]` converts `flac` and `wav` files into opus
|
* `[flac]` converts `flac` and `wav` files into opus (if supported by browser) or mp3
|
||||||
* `[aac]` converts `aac` and `m4a` files into opus
|
* `[aac]` converts `aac` and `m4a` files into opus (if supported by browser) or mp3
|
||||||
* `[oth]` converts all other known formats into opus
|
* `[oth]` converts all other known formats into opus (if supported by browser) or mp3
|
||||||
* `aac|ac3|aif|aiff|alac|alaw|amr|ape|au|dfpwm|dts|flac|gsm|it|m4a|mo3|mod|mp2|mp3|mpc|mptm|mt2|mulaw|ogg|okt|opus|ra|s3m|tak|tta|ulaw|wav|wma|wv|xm|xpk`
|
* `aac|ac3|aif|aiff|alac|alaw|amr|ape|au|dfpwm|dts|flac|gsm|it|m4a|mo3|mod|mp2|mp3|mpc|mptm|mt2|mulaw|ogg|okt|opus|ra|s3m|tak|tta|ulaw|wav|wma|wv|xm|xpk`
|
||||||
* "tint" reduces the contrast of the playback bar
|
* "tint" reduces the contrast of the playback bar
|
||||||
|
|
||||||
|
|
||||||
### audio equalizer
|
### audio equalizer
|
||||||
|
|
||||||
bass boosted
|
and [dynamic range compressor](https://en.wikipedia.org/wiki/Dynamic_range_compression)
|
||||||
|
|
||||||
can also boost the volume in general, or increase/decrease stereo width (like [crossfeed](https://www.foobar2000.org/components/view/foo_dsp_meiercf) just worse)
|
can also boost the volume in general, or increase/decrease stereo width (like [crossfeed](https://www.foobar2000.org/components/view/foo_dsp_meiercf) just worse)
|
||||||
|
|
||||||
@@ -765,6 +863,13 @@ other notes,
|
|||||||
* the document preview has a max-width which is the same as an A4 paper when printed
|
* the document preview has a max-width which is the same as an A4 paper when printed
|
||||||
|
|
||||||
|
|
||||||
|
### markdown vars
|
||||||
|
|
||||||
|
dynamic docs with serverside variable expansion to replace stuff like `{{self.ip}}` with the client's IP, or `{{srv.htime}}` with the current time on the server
|
||||||
|
|
||||||
|
see [./srv/expand/](./srv/expand/) for usage and examples
|
||||||
|
|
||||||
|
|
||||||
## other tricks
|
## other tricks
|
||||||
|
|
||||||
* you can link a particular timestamp in an audio file by adding it to the URL, such as `&20` / `&20s` / `&1m20` / `&t=1:20` after the `.../#af-c8960dab`
|
* you can link a particular timestamp in an audio file by adding it to the URL, such as `&20` / `&20s` / `&1m20` / `&t=1:20` after the `.../#af-c8960dab`
|
||||||
@@ -779,8 +884,12 @@ other notes,
|
|||||||
|
|
||||||
* files named `.prologue.html` / `.epilogue.html` will be rendered before/after directory listings unless `--no-logues`
|
* files named `.prologue.html` / `.epilogue.html` will be rendered before/after directory listings unless `--no-logues`
|
||||||
|
|
||||||
|
* files named `descript.ion` / `DESCRIPT.ION` are parsed and displayed in the file listing, or as the epilogue if nonstandard
|
||||||
|
|
||||||
* files named `README.md` / `readme.md` will be rendered after directory listings unless `--no-readme` (but `.epilogue.html` takes precedence)
|
* files named `README.md` / `readme.md` will be rendered after directory listings unless `--no-readme` (but `.epilogue.html` takes precedence)
|
||||||
|
|
||||||
|
* `README.md` and `*logue.html` can contain placeholder values which are replaced server-side before embedding into directory listings; see `--help-exp`
|
||||||
|
|
||||||
|
|
||||||
## searching
|
## searching
|
||||||
|
|
||||||
@@ -809,6 +918,11 @@ using arguments or config files, or a mix of both:
|
|||||||
* or click the `[reload cfg]` button in the control-panel if the user has `a`/admin in any volume
|
* or click the `[reload cfg]` button in the control-panel if the user has `a`/admin in any volume
|
||||||
* changes to the `[global]` config section requires a restart to take effect
|
* changes to the `[global]` config section requires a restart to take effect
|
||||||
|
|
||||||
|
**NB:** as humongous as this readme is, there is also a lot of undocumented features. Run copyparty with `--help` to see all available global options; all of those can be used in the `[global]` section of config files, and everything listed in `--help-flags` can be used in volumes as volflags.
|
||||||
|
* if running in docker/podman, try this: `docker run --rm -it copyparty/ac --help`
|
||||||
|
* or see this (probably outdated): https://ocv.me/copyparty/helptext.html
|
||||||
|
* or if you prefer plaintext, https://ocv.me/copyparty/helptext.txt
|
||||||
|
|
||||||
|
|
||||||
## zeroconf
|
## zeroconf
|
||||||
|
|
||||||
@@ -876,7 +990,7 @@ some recommended FTP / FTPS clients; `wark` = example password:
|
|||||||
|
|
||||||
## webdav server
|
## webdav server
|
||||||
|
|
||||||
with read-write support, supports winXP and later, macos, nautilus/gvfs
|
with read-write support, supports winXP and later, macos, nautilus/gvfs ... a greay way to [access copyparty straight from the file explorer in your OS](#mount-as-drive)
|
||||||
|
|
||||||
click the [connect](http://127.0.0.1:3923/?hc) button in the control-panel to see connection instructions for windows, linux, macos
|
click the [connect](http://127.0.0.1:3923/?hc) button in the control-panel to see connection instructions for windows, linux, macos
|
||||||
|
|
||||||
@@ -908,6 +1022,35 @@ known client bugs:
|
|||||||
* latin-1 is fine, hiragana is not (not even as shift-jis on japanese xp)
|
* latin-1 is fine, hiragana is not (not even as shift-jis on japanese xp)
|
||||||
|
|
||||||
|
|
||||||
|
## tftp server
|
||||||
|
|
||||||
|
a TFTP server (read/write) can be started using `--tftp 3969` (you probably want [ftp](#ftp-server) instead unless you are *actually* communicating with hardware from the 90s (in which case we should definitely hang some time))
|
||||||
|
|
||||||
|
> that makes this the first RTX DECT Base that has been updated using copyparty 🎉
|
||||||
|
|
||||||
|
* based on [partftpy](https://github.com/9001/partftpy)
|
||||||
|
* no accounts; read from world-readable folders, write to world-writable, overwrite in world-deletable
|
||||||
|
* needs a dedicated port (cannot share with the HTTP/HTTPS API)
|
||||||
|
* run as root (or see below) to use the spec-recommended port `69` (nice)
|
||||||
|
* can reply from a predefined portrange (good for firewalls)
|
||||||
|
* only supports the binary/octet/image transfer mode (no netascii)
|
||||||
|
* [RFC 7440](https://datatracker.ietf.org/doc/html/rfc7440) is **not** supported, so will be extremely slow over WAN
|
||||||
|
* assuming default blksize (512), expect 1100 KiB/s over 100BASE-T, 400-500 KiB/s over wifi, 200 on bad wifi
|
||||||
|
|
||||||
|
most clients expect to find TFTP on port 69, but on linux and macos you need to be root to listen on that. Alternatively, listen on 3969 and use NAT on the server to forward 69 to that port;
|
||||||
|
* on linux: `iptables -t nat -A PREROUTING -i eth0 -p udp --dport 69 -j REDIRECT --to-port 3969`
|
||||||
|
|
||||||
|
some recommended TFTP clients:
|
||||||
|
* curl (cross-platform, read/write)
|
||||||
|
* get: `curl --tftp-blksize 1428 tftp://127.0.0.1:3969/firmware.bin`
|
||||||
|
* put: `curl --tftp-blksize 1428 -T firmware.bin tftp://127.0.0.1:3969/`
|
||||||
|
* windows: `tftp.exe` (you probably already have it)
|
||||||
|
* `tftp -i 127.0.0.1 put firmware.bin`
|
||||||
|
* linux: `tftp-hpa`, `atftp`
|
||||||
|
* `atftp --option "blksize 1428" 127.0.0.1 3969 -p -l firmware.bin -r firmware.bin`
|
||||||
|
* `tftp -v -m binary 127.0.0.1 3969 -c put firmware.bin`
|
||||||
|
|
||||||
|
|
||||||
## smb server
|
## smb server
|
||||||
|
|
||||||
unsafe, slow, not recommended for wan, enable with `--smb` for read-only or `--smbw` for read-write
|
unsafe, slow, not recommended for wan, enable with `--smb` for read-only or `--smbw` for read-write
|
||||||
@@ -938,7 +1081,7 @@ known client bugs:
|
|||||||
* however smb1 is buggy and is not enabled by default on win10 onwards
|
* however smb1 is buggy and is not enabled by default on win10 onwards
|
||||||
* windows cannot access folders which contain filenames with invalid unicode or forbidden characters (`<>:"/\|?*`), or names ending with `.`
|
* windows cannot access folders which contain filenames with invalid unicode or forbidden characters (`<>:"/\|?*`), or names ending with `.`
|
||||||
|
|
||||||
the smb protocol listens on TCP port 445, which is a privileged port on linux and macos, which would require running copyparty as root. However, this can be avoided by listening on another port using `--smb-port 3945` and then using NAT to forward the traffic from 445 to there;
|
the smb protocol listens on TCP port 445, which is a privileged port on linux and macos, which would require running copyparty as root. However, this can be avoided by listening on another port using `--smb-port 3945` and then using NAT on the server to forward the traffic from 445 to there;
|
||||||
* on linux: `iptables -t nat -A PREROUTING -i eth0 -p tcp --dport 445 -j REDIRECT --to-port 3945`
|
* on linux: `iptables -t nat -A PREROUTING -i eth0 -p tcp --dport 445 -j REDIRECT --to-port 3945`
|
||||||
|
|
||||||
authenticate with one of the following:
|
authenticate with one of the following:
|
||||||
@@ -946,6 +1089,33 @@ authenticate with one of the following:
|
|||||||
* username `$password`, password `k`
|
* username `$password`, password `k`
|
||||||
|
|
||||||
|
|
||||||
|
## browser ux
|
||||||
|
|
||||||
|
tweaking the ui
|
||||||
|
|
||||||
|
* set default sort order globally with `--sort` or per-volume with the `sort` volflag; specify one or more comma-separated columns to sort by, and prefix the column name with `-` for reverse sort
|
||||||
|
* the column names you can use are visible as tooltips when hovering over the column headers in the directory listing, for example `href ext sz ts tags/.up_at tags/Cirle tags/.tn tags/Artist tags/Title`
|
||||||
|
* to sort in music order (album, track, artist, title) with filename as fallback, you could `--sort tags/Cirle,tags/.tn,tags/Artist,tags/Title,href`
|
||||||
|
* to sort by upload date, first enable showing the upload date in the listing with `-e2d -mte +.up_at` and then `--sort tags/.up_at`
|
||||||
|
|
||||||
|
see [./docs/rice](./docs/rice) for more, including how to add stuff (css/`<meta>`/...) to the html `<head>` tag, or to add your own translation
|
||||||
|
|
||||||
|
|
||||||
|
## opengraph
|
||||||
|
|
||||||
|
discord and social-media embeds
|
||||||
|
|
||||||
|
can be enabled globally with `--og` or per-volume with volflag `og`
|
||||||
|
|
||||||
|
note that this disables hotlinking because the opengraph spec demands it; to sneak past this intentional limitation, you can enable opengraph selectively by user-agent, for example `--og-ua '(Discord|Twitter|Slack)bot'` (or volflag `og_ua`)
|
||||||
|
|
||||||
|
you can also hotlink files regardless by appending `?raw` to the url
|
||||||
|
|
||||||
|
NOTE: because discord (and maybe others) strip query args such as `?raw` in opengraph tags, any links which require a filekey or dirkey will not work
|
||||||
|
|
||||||
|
if you want to entirely replace the copyparty response with your own jinja2 template, give the template filepath to `--og-tpl` or volflag `og_tpl` (all members of `HttpCli` are available through the `this` object)
|
||||||
|
|
||||||
|
|
||||||
## file indexing
|
## file indexing
|
||||||
|
|
||||||
enables dedup and music search ++
|
enables dedup and music search ++
|
||||||
@@ -972,6 +1142,7 @@ the same arguments can be set as volflags, in addition to `d2d`, `d2ds`, `d2t`,
|
|||||||
* `-v ~/music::r:c,d2ts` same except only affecting tags
|
* `-v ~/music::r:c,d2ts` same except only affecting tags
|
||||||
|
|
||||||
note:
|
note:
|
||||||
|
* upload-times can be displayed in the file listing by enabling the `.up_at` metadata key, either globally with `-e2d -mte +.up_at` or per-volume with volflags `e2d,mte=+.up_at` (will have a ~17% performance impact on directory listings)
|
||||||
* `e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and `e2ts` would then reindex those, unless there is a new copyparty version with new parsers and the release note says otherwise
|
* `e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and `e2ts` would then reindex those, unless there is a new copyparty version with new parsers and the release note says otherwise
|
||||||
* the rescan button in the admin panel has no effect unless the volume has `-e2ds` or higher
|
* the rescan button in the admin panel has no effect unless the volume has `-e2ds` or higher
|
||||||
* deduplication is possible on windows if you run copyparty as administrator (not saying you should!)
|
* deduplication is possible on windows if you run copyparty as administrator (not saying you should!)
|
||||||
@@ -985,6 +1156,8 @@ to save some time, you can provide a regex pattern for filepaths to only index
|
|||||||
|
|
||||||
similarly, you can fully ignore files/folders using `--no-idx [...]` and `:c,noidx=\.iso$`
|
similarly, you can fully ignore files/folders using `--no-idx [...]` and `:c,noidx=\.iso$`
|
||||||
|
|
||||||
|
* when running on macos, all the usual apple metadata files are excluded by default
|
||||||
|
|
||||||
if you set `--no-hash [...]` globally, you can enable hashing for specific volumes using flag `:c,nohash=`
|
if you set `--no-hash [...]` globally, you can enable hashing for specific volumes using flag `:c,nohash=`
|
||||||
|
|
||||||
### filesystem guards
|
### filesystem guards
|
||||||
@@ -1169,6 +1342,32 @@ redefine behavior with plugins ([examples](./bin/handlers/))
|
|||||||
replace 404 and 403 errors with something completely different (that's it for now)
|
replace 404 and 403 errors with something completely different (that's it for now)
|
||||||
|
|
||||||
|
|
||||||
|
## identity providers
|
||||||
|
|
||||||
|
replace copyparty passwords with oauth and such
|
||||||
|
|
||||||
|
you can disable the built-in password-based login sysem, and instead replace it with a separate piece of software (an identity provider) which will then handle authenticating / authorizing of users; this makes it possible to login with passkeys / fido2 / webauthn / yubikey / ldap / active directory / oauth / many other single-sign-on contraptions
|
||||||
|
|
||||||
|
a popular choice is [Authelia](https://www.authelia.com/) (config-file based), another one is [authentik](https://goauthentik.io/) (GUI-based, more complex)
|
||||||
|
|
||||||
|
there is a [docker-compose example](./docs/examples/docker/idp-authelia-traefik) which is hopefully a good starting point (alternatively see [./docs/idp.md](./docs/idp.md) if you're the DIY type)
|
||||||
|
|
||||||
|
a more complete example of the copyparty configuration options [look like this](./docs/examples/docker/idp/copyparty.conf)
|
||||||
|
|
||||||
|
|
||||||
|
## using the cloud as storage
|
||||||
|
|
||||||
|
connecting to an aws s3 bucket and similar
|
||||||
|
|
||||||
|
there is no built-in support for this, but you can use FUSE-software such as [rclone](https://rclone.org/) / [geesefs](https://github.com/yandex-cloud/geesefs) / [JuiceFS](https://juicefs.com/en/) to first mount your cloud storage as a local disk, and then let copyparty use (a folder in) that disk as a volume
|
||||||
|
|
||||||
|
you may experience poor upload performance this way, but that can sometimes be fixed by specifying the volflag `sparse` to force the use of sparse files; this has improved the upload speeds from `1.5 MiB/s` to over `80 MiB/s` in one case, but note that you are also more likely to discover funny bugs in your FUSE software this way, so buckle up
|
||||||
|
|
||||||
|
someone has also tested geesefs in combination with [gocryptfs](https://nuetzlich.net/gocryptfs/) with surprisingly good results, getting 60 MiB/s upload speeds on a gbit line, but JuiceFS won with 80 MiB/s using its built-in encryption
|
||||||
|
|
||||||
|
you may improve performance by specifying larger values for `--iobuf` / `--s-rd-sz` / `--s-wr-sz`
|
||||||
|
|
||||||
|
|
||||||
## hiding from google
|
## hiding from google
|
||||||
|
|
||||||
tell search engines you dont wanna be indexed, either using the good old [robots.txt](https://www.robotstxt.org/robotstxt.html) or through copyparty settings:
|
tell search engines you dont wanna be indexed, either using the good old [robots.txt](https://www.robotstxt.org/robotstxt.html) or through copyparty settings:
|
||||||
@@ -1202,6 +1401,8 @@ the classname of the HTML tag is set according to the selected theme, which is u
|
|||||||
|
|
||||||
see the top of [./copyparty/web/browser.css](./copyparty/web/browser.css) where the color variables are set, and there's layout-specific stuff near the bottom
|
see the top of [./copyparty/web/browser.css](./copyparty/web/browser.css) where the color variables are set, and there's layout-specific stuff near the bottom
|
||||||
|
|
||||||
|
if you want to change the fonts, see [./docs/rice/](./docs/rice/)
|
||||||
|
|
||||||
|
|
||||||
## complete examples
|
## complete examples
|
||||||
|
|
||||||
@@ -1228,8 +1429,8 @@ see the top of [./copyparty/web/browser.css](./copyparty/web/browser.css) where
|
|||||||
* anyone can upload, and receive "secret" links for each upload they do:
|
* anyone can upload, and receive "secret" links for each upload they do:
|
||||||
`python copyparty-sfx.py -e2dsa -v .::wG:c,fk=8`
|
`python copyparty-sfx.py -e2dsa -v .::wG:c,fk=8`
|
||||||
|
|
||||||
* anyone can browse, only `kevin` (password `okgo`) can upload/move/delete files:
|
* anyone can browse (`r`), only `kevin` (password `okgo`) can upload/move/delete (`A`) files:
|
||||||
`python copyparty-sfx.py -e2dsa -a kevin:okgo -v .::r:rwmd,kevin`
|
`python copyparty-sfx.py -e2dsa -a kevin:okgo -v .::r:A,kevin`
|
||||||
|
|
||||||
* read-only music server:
|
* read-only music server:
|
||||||
`python copyparty-sfx.py -v /mnt/nas/music:/music:r -e2dsa -e2ts --no-robots --force-js --theme 2`
|
`python copyparty-sfx.py -v /mnt/nas/music:/music:r -e2dsa -e2ts --no-robots --force-js --theme 2`
|
||||||
@@ -1253,8 +1454,9 @@ you can either:
|
|||||||
* or do location-based proxying, using `--rp-loc=/stuff` to tell copyparty where it is mounted -- has a slight performance cost and higher chance of bugs
|
* or do location-based proxying, using `--rp-loc=/stuff` to tell copyparty where it is mounted -- has a slight performance cost and higher chance of bugs
|
||||||
* if copyparty says `incorrect --rp-loc or webserver config; expected vpath starting with [...]` it's likely because the webserver is stripping away the proxy location from the request URLs -- see the `ProxyPass` in the apache example below
|
* if copyparty says `incorrect --rp-loc or webserver config; expected vpath starting with [...]` it's likely because the webserver is stripping away the proxy location from the request URLs -- see the `ProxyPass` in the apache example below
|
||||||
|
|
||||||
some reverse proxies (such as [Caddy](https://caddyserver.com/)) can automatically obtain a valid https/tls certificate for you, and some support HTTP/2 and QUIC which could be a nice speed boost
|
some reverse proxies (such as [Caddy](https://caddyserver.com/)) can automatically obtain a valid https/tls certificate for you, and some support HTTP/2 and QUIC which *could* be a nice speed boost, depending on a lot of factors
|
||||||
* **warning:** nginx-QUIC is still experimental and can make uploads much slower, so HTTP/2 is recommended for now
|
* **warning:** nginx-QUIC (HTTP/3) is still experimental and can make uploads much slower, so HTTP/1.1 is recommended for now
|
||||||
|
* depending on server/client, HTTP/1.1 can also be 5x faster than HTTP/2
|
||||||
|
|
||||||
example webserver configs:
|
example webserver configs:
|
||||||
|
|
||||||
@@ -1262,6 +1464,15 @@ example webserver configs:
|
|||||||
* [apache2 config](contrib/apache/copyparty.conf) -- location-based
|
* [apache2 config](contrib/apache/copyparty.conf) -- location-based
|
||||||
|
|
||||||
|
|
||||||
|
### real-ip
|
||||||
|
|
||||||
|
teaching copyparty how to see client IPs when running behind a reverse-proxy, or a WAF, or another protection service such as cloudflare
|
||||||
|
|
||||||
|
if you (and maybe everybody else) keep getting a message that says `thank you for playing`, then you've gotten banned for malicious traffic. This ban applies to the IP address that copyparty *thinks* identifies the shady client -- so, depending on your setup, you might have to tell copyparty where to find the correct IP
|
||||||
|
|
||||||
|
for most common setups, there should be a helpful message in the server-log explaining what to do, but see [docs/xff.md](docs/xff.md) if you want to learn more, including a quick hack to **just make it work** (which is **not** recommended, but hey...)
|
||||||
|
|
||||||
|
|
||||||
## prometheus
|
## prometheus
|
||||||
|
|
||||||
metrics/stats can be enabled at URL `/.cpr/metrics` for grafana / prometheus / etc (openmetrics 1.0.0)
|
metrics/stats can be enabled at URL `/.cpr/metrics` for grafana / prometheus / etc (openmetrics 1.0.0)
|
||||||
@@ -1283,8 +1494,23 @@ scrape_configs:
|
|||||||
```
|
```
|
||||||
|
|
||||||
currently the following metrics are available,
|
currently the following metrics are available,
|
||||||
* `cpp_uptime_seconds`
|
* `cpp_uptime_seconds` time since last copyparty restart
|
||||||
* `cpp_bans` number of banned IPs
|
* `cpp_boot_unixtime_seconds` same but as an absolute timestamp
|
||||||
|
* `cpp_http_conns` number of open http(s) connections
|
||||||
|
* `cpp_http_reqs` number of http(s) requests handled
|
||||||
|
* `cpp_sus_reqs` number of 403/422/malicious requests
|
||||||
|
* `cpp_active_bans` number of currently banned IPs
|
||||||
|
* `cpp_total_bans` number of IPs banned since last restart
|
||||||
|
|
||||||
|
these are available unless `--nos-vst` is specified:
|
||||||
|
* `cpp_db_idle_seconds` time since last database activity (upload/rename/delete)
|
||||||
|
* `cpp_db_act_seconds` same but as an absolute timestamp
|
||||||
|
* `cpp_idle_vols` number of volumes which are idle / ready
|
||||||
|
* `cpp_busy_vols` number of volumes which are busy / indexing
|
||||||
|
* `cpp_offline_vols` number of volumes which are offline / unavailable
|
||||||
|
* `cpp_hashing_files` number of files queued for hashing / indexing
|
||||||
|
* `cpp_tagq_files` number of files queued for metadata scanning
|
||||||
|
* `cpp_mtpq_files` number of files queued for plugin-based analysis
|
||||||
|
|
||||||
and these are available per-volume only:
|
and these are available per-volume only:
|
||||||
* `cpp_disk_size_bytes` total HDD size
|
* `cpp_disk_size_bytes` total HDD size
|
||||||
@@ -1303,31 +1529,52 @@ some of the metrics have additional requirements to function correctly,
|
|||||||
the following options are available to disable some of the metrics:
|
the following options are available to disable some of the metrics:
|
||||||
* `--nos-hdd` disables `cpp_disk_*` which can prevent spinning up HDDs
|
* `--nos-hdd` disables `cpp_disk_*` which can prevent spinning up HDDs
|
||||||
* `--nos-vol` disables `cpp_vol_*` which reduces server startup time
|
* `--nos-vol` disables `cpp_vol_*` which reduces server startup time
|
||||||
|
* `--nos-vst` disables volume state, reducing the worst-case prometheus query time by 0.5 sec
|
||||||
* `--nos-dup` disables `cpp_dupe_*` which reduces the server load caused by prometheus queries
|
* `--nos-dup` disables `cpp_dupe_*` which reduces the server load caused by prometheus queries
|
||||||
* `--nos-unf` disables `cpp_unf_*` for no particular purpose
|
* `--nos-unf` disables `cpp_unf_*` for no particular purpose
|
||||||
|
|
||||||
|
note: the following metrics are counted incorrectly if multiprocessing is enabled with `-j`: `cpp_http_conns`, `cpp_http_reqs`, `cpp_sus_reqs`, `cpp_active_bans`, `cpp_total_bans`
|
||||||
|
|
||||||
|
|
||||||
|
## other extremely specific features
|
||||||
|
|
||||||
|
you'll never find a use for these:
|
||||||
|
|
||||||
|
|
||||||
|
### custom mimetypes
|
||||||
|
|
||||||
|
change the association of a file extension
|
||||||
|
|
||||||
|
using commandline args, you can do something like `--mime gif=image/jif` and `--mime ts=text/x.typescript` (can be specified multiple times)
|
||||||
|
|
||||||
|
in a config-file, this is the same as:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
[global]
|
||||||
|
mime: gif=image/jif
|
||||||
|
mime: ts=text/x.typescript
|
||||||
|
```
|
||||||
|
|
||||||
|
run copyparty with `--mimes` to list all the default mappings
|
||||||
|
|
||||||
|
|
||||||
# packages
|
# packages
|
||||||
|
|
||||||
the party might be closer than you think
|
the party might be closer than you think
|
||||||
|
|
||||||
|
if your distro/OS is not mentioned below, there might be some hints in the [«on servers»](#on-servers) section
|
||||||
|
|
||||||
|
|
||||||
## arch package
|
## arch package
|
||||||
|
|
||||||
now [available on aur](https://aur.archlinux.org/packages/copyparty) maintained by [@icxes](https://github.com/icxes)
|
now [available on aur](https://aur.archlinux.org/packages/copyparty) maintained by [@icxes](https://github.com/icxes)
|
||||||
|
|
||||||
|
it comes with a [systemd service](./contrib/package/arch/copyparty.service) and expects to find one or more [config files](./docs/example.conf) in `/etc/copyparty.d/`
|
||||||
|
|
||||||
|
|
||||||
## fedora package
|
## fedora package
|
||||||
|
|
||||||
now [available on copr-pypi](https://copr.fedorainfracloud.org/coprs/g/copr/PyPI/) , maintained autonomously -- [track record](https://copr.fedorainfracloud.org/coprs/g/copr/PyPI/package/python-copyparty/) seems OK
|
does not exist yet; using the [copr-pypi](https://copr.fedorainfracloud.org/coprs/g/copr/PyPI/) builds is **NOT recommended** because updates can be delayed by [several months](https://github.com/fedora-copr/copr/issues/3056)
|
||||||
|
|
||||||
```bash
|
|
||||||
dnf copr enable @copr/PyPI
|
|
||||||
dnf install python3-copyparty # just a minimal install, or...
|
|
||||||
dnf install python3-{copyparty,pillow,argon2-cffi,pyftpdlib,pyOpenSSL} ffmpeg-free # with recommended deps
|
|
||||||
```
|
|
||||||
|
|
||||||
this *may* also work on RHEL but [I'm not paying IBM to verify that](https://www.jeffgeerling.com/blog/2023/dear-red-hat-are-you-dumb)
|
|
||||||
|
|
||||||
|
|
||||||
## nix package
|
## nix package
|
||||||
@@ -1457,15 +1704,16 @@ TLDR: yes
|
|||||||
| navpane | - | yep | yep | yep | yep | yep | yep | yep |
|
| navpane | - | yep | yep | yep | yep | yep | yep | yep |
|
||||||
| image viewer | - | yep | yep | yep | yep | yep | yep | yep |
|
| image viewer | - | yep | yep | yep | yep | yep | yep | yep |
|
||||||
| video player | - | yep | yep | yep | yep | yep | yep | yep |
|
| video player | - | yep | yep | yep | yep | yep | yep | yep |
|
||||||
| markdown editor | - | - | yep | yep | yep | yep | yep | yep |
|
| markdown editor | - | - | `*2` | `*2` | yep | yep | yep | yep |
|
||||||
| markdown viewer | - | yep | yep | yep | yep | yep | yep | yep |
|
| markdown viewer | - | `*2` | `*2` | `*2` | yep | yep | yep | yep |
|
||||||
| play mp3/m4a | - | yep | yep | yep | yep | yep | yep | yep |
|
| play mp3/m4a | - | yep | yep | yep | yep | yep | yep | yep |
|
||||||
| play ogg/opus | - | - | - | - | yep | yep | `*3` | yep |
|
| play ogg/opus | - | - | - | - | yep | yep | `*3` | yep |
|
||||||
| **= feature =** | ie6 | ie9 | ie10 | ie11 | ff 52 | c 49 | iOS | Andr |
|
| **= feature =** | ie6 | ie9 | ie10 | ie11 | ff 52 | c 49 | iOS | Andr |
|
||||||
|
|
||||||
* internet explorer 6 to 8 behave the same
|
* internet explorer 6 through 8 behave the same
|
||||||
* firefox 52 and chrome 49 are the final winxp versions
|
* firefox 52 and chrome 49 are the final winxp versions
|
||||||
* `*1` yes, but extremely slow (ie10: `1 MiB/s`, ie11: `270 KiB/s`)
|
* `*1` yes, but extremely slow (ie10: `1 MiB/s`, ie11: `270 KiB/s`)
|
||||||
|
* `*2` only able to do plaintext documents (no markdown rendering)
|
||||||
* `*3` iOS 11 and newer, opus only, and requires FFmpeg on the server
|
* `*3` iOS 11 and newer, opus only, and requires FFmpeg on the server
|
||||||
|
|
||||||
quick summary of more eccentric web-browsers trying to view a directory index:
|
quick summary of more eccentric web-browsers trying to view a directory index:
|
||||||
@@ -1491,10 +1739,12 @@ interact with copyparty using non-browser clients
|
|||||||
* `var xhr = new XMLHttpRequest(); xhr.open('POST', '//127.0.0.1:3923/msgs?raw'); xhr.send('foo');`
|
* `var xhr = new XMLHttpRequest(); xhr.open('POST', '//127.0.0.1:3923/msgs?raw'); xhr.send('foo');`
|
||||||
|
|
||||||
* curl/wget: upload some files (post=file, chunk=stdin)
|
* curl/wget: upload some files (post=file, chunk=stdin)
|
||||||
* `post(){ curl -F act=bput -F f=@"$1" http://127.0.0.1:3923/?pw=wark;}`
|
* `post(){ curl -F f=@"$1" http://127.0.0.1:3923/?pw=wark;}`
|
||||||
`post movie.mkv`
|
`post movie.mkv` (gives HTML in return)
|
||||||
|
* `post(){ curl -F f=@"$1" 'http://127.0.0.1:3923/?want=url&pw=wark';}`
|
||||||
|
`post movie.mkv` (gives hotlink in return)
|
||||||
* `post(){ curl -H pw:wark -H rand:8 -T "$1" http://127.0.0.1:3923/;}`
|
* `post(){ curl -H pw:wark -H rand:8 -T "$1" http://127.0.0.1:3923/;}`
|
||||||
`post movie.mkv`
|
`post movie.mkv` (randomized filename)
|
||||||
* `post(){ wget --header='pw: wark' --post-file="$1" -O- http://127.0.0.1:3923/?raw;}`
|
* `post(){ wget --header='pw: wark' --post-file="$1" -O- http://127.0.0.1:3923/?raw;}`
|
||||||
`post movie.mkv`
|
`post movie.mkv`
|
||||||
* `chunk(){ curl -H pw:wark -T- http://127.0.0.1:3923/;}`
|
* `chunk(){ curl -H pw:wark -T- http://127.0.0.1:3923/;}`
|
||||||
@@ -1516,6 +1766,10 @@ interact with copyparty using non-browser clients
|
|||||||
|
|
||||||
* sharex (screenshot utility): see [./contrib/sharex.sxcu](contrib/#sharexsxcu)
|
* sharex (screenshot utility): see [./contrib/sharex.sxcu](contrib/#sharexsxcu)
|
||||||
|
|
||||||
|
* contextlet (web browser integration); see [contrib contextlet](contrib/#send-to-cppcontextletjson)
|
||||||
|
|
||||||
|
* [igloo irc](https://iglooirc.com/): Method: `post` Host: `https://you.com/up/?want=url&pw=hunter2` Multipart: `yes` File parameter: `f`
|
||||||
|
|
||||||
copyparty returns a truncated sha512sum of your PUT/POST as base64; you can generate the same checksum locally to verify uplaods:
|
copyparty returns a truncated sha512sum of your PUT/POST as base64; you can generate the same checksum locally to verify uplaods:
|
||||||
|
|
||||||
b512(){ printf "$((sha512sum||shasum -a512)|sed -E 's/ .*//;s/(..)/\\x\1/g')"|base64|tr '+/' '-_'|head -c44;}
|
b512(){ printf "$((sha512sum||shasum -a512)|sed -E 's/ .*//;s/(..)/\\x\1/g')"|base64|tr '+/' '-_'|head -c44;}
|
||||||
@@ -1534,7 +1788,7 @@ the commandline uploader [u2c.py](https://github.com/9001/copyparty/tree/hovudst
|
|||||||
|
|
||||||
alternatively there is [rclone](./docs/rclone.md) which allows for bidirectional sync and is *way* more flexible (stream files straight from sftp/s3/gcs to copyparty, ...), although there is no integrity check and it won't work with files over 100 MiB if copyparty is behind cloudflare
|
alternatively there is [rclone](./docs/rclone.md) which allows for bidirectional sync and is *way* more flexible (stream files straight from sftp/s3/gcs to copyparty, ...), although there is no integrity check and it won't work with files over 100 MiB if copyparty is behind cloudflare
|
||||||
|
|
||||||
* starting from rclone v1.63 (currently [in beta](https://beta.rclone.org/?filter=latest)), rclone will also be faster than u2c.py
|
* starting from rclone v1.63, rclone is faster than u2c.py on low-latency connections
|
||||||
|
|
||||||
|
|
||||||
## mount as drive
|
## mount as drive
|
||||||
@@ -1543,16 +1797,18 @@ a remote copyparty server as a local filesystem; go to the control-panel and cl
|
|||||||
|
|
||||||
alternatively, some alternatives roughly sorted by speed (unreproducible benchmark), best first:
|
alternatively, some alternatives roughly sorted by speed (unreproducible benchmark), best first:
|
||||||
|
|
||||||
* [rclone-webdav](./docs/rclone.md) (25s), read/WRITE ([v1.63-beta](https://beta.rclone.org/?filter=latest))
|
* [rclone-webdav](./docs/rclone.md) (25s), read/WRITE (rclone v1.63 or later)
|
||||||
* [rclone-http](./docs/rclone.md) (26s), read-only
|
* [rclone-http](./docs/rclone.md) (26s), read-only
|
||||||
* [partyfuse.py](./bin/#partyfusepy) (35s), read-only
|
* [partyfuse.py](./bin/#partyfusepy) (35s), read-only
|
||||||
* [rclone-ftp](./docs/rclone.md) (47s), read/WRITE
|
* [rclone-ftp](./docs/rclone.md) (47s), read/WRITE
|
||||||
* davfs2 (103s), read/WRITE, *very fast* on small files
|
* davfs2 (103s), read/WRITE
|
||||||
* [win10-webdav](#webdav-server) (138s), read/WRITE
|
* [win10-webdav](#webdav-server) (138s), read/WRITE
|
||||||
* [win10-smb2](#smb-server) (387s), read/WRITE
|
* [win10-smb2](#smb-server) (387s), read/WRITE
|
||||||
|
|
||||||
most clients will fail to mount the root of a copyparty server unless there is a root volume (so you get the admin-panel instead of a browser when accessing it) -- in that case, mount a specific volume instead
|
most clients will fail to mount the root of a copyparty server unless there is a root volume (so you get the admin-panel instead of a browser when accessing it) -- in that case, mount a specific volume instead
|
||||||
|
|
||||||
|
if you have volumes that are accessible without a password, then some webdav clients (such as davfs2) require the global-option `--dav-auth` to access any password-protected areas
|
||||||
|
|
||||||
|
|
||||||
# android app
|
# android app
|
||||||
|
|
||||||
@@ -1581,16 +1837,19 @@ defaults are usually fine - expect `8 GiB/s` download, `1 GiB/s` upload
|
|||||||
|
|
||||||
below are some tweaks roughly ordered by usefulness:
|
below are some tweaks roughly ordered by usefulness:
|
||||||
|
|
||||||
|
* disabling HTTP/2 and HTTP/3 can make uploads 5x faster, depending on server/client software
|
||||||
* `-q` disables logging and can help a bunch, even when combined with `-lo` to redirect logs to file
|
* `-q` disables logging and can help a bunch, even when combined with `-lo` to redirect logs to file
|
||||||
* `--hist` pointing to a fast location (ssd) will make directory listings and searches faster when `-e2d` or `-e2t` is set
|
* `--hist` pointing to a fast location (ssd) will make directory listings and searches faster when `-e2d` or `-e2t` is set
|
||||||
|
* and also makes thumbnails load faster, regardless of e2d/e2t
|
||||||
* `--no-hash .` when indexing a network-disk if you don't care about the actual filehashes and only want the names/tags searchable
|
* `--no-hash .` when indexing a network-disk if you don't care about the actual filehashes and only want the names/tags searchable
|
||||||
|
* if your volumes are on a network-disk such as NFS / SMB / s3, specifying larger values for `--iobuf` and/or `--s-rd-sz` and/or `--s-wr-sz` may help; try setting all of them to `524288` or `1048576` or `4194304`
|
||||||
* `--no-htp --hash-mt=0 --mtag-mt=1 --th-mt=1` minimizes the number of threads; can help in some eccentric environments (like the vscode debugger)
|
* `--no-htp --hash-mt=0 --mtag-mt=1 --th-mt=1` minimizes the number of threads; can help in some eccentric environments (like the vscode debugger)
|
||||||
* `-j0` enables multiprocessing (actual multithreading), can reduce latency to `20+80/numCores` percent and generally improve performance in cpu-intensive workloads, for example:
|
* `-j0` enables multiprocessing (actual multithreading), can reduce latency to `20+80/numCores` percent and generally improve performance in cpu-intensive workloads, for example:
|
||||||
* lots of connections (many users or heavy clients)
|
* lots of connections (many users or heavy clients)
|
||||||
* simultaneous downloads and uploads saturating a 20gbps connection
|
* simultaneous downloads and uploads saturating a 20gbps connection
|
||||||
* if `-e2d` is enabled, `-j2` gives 4x performance for directory listings; `-j4` gives 16x
|
* if `-e2d` is enabled, `-j2` gives 4x performance for directory listings; `-j4` gives 16x
|
||||||
|
|
||||||
...however it adds an overhead to internal communication so it might be a net loss, see if it works 4 u
|
...however it also increases the server/filesystem/HDD load during uploads, and adds an overhead to internal communication, so it is usually a better idea to don't
|
||||||
* using [pypy](https://www.pypy.org/) instead of [cpython](https://www.python.org/) *can* be 70% faster for some workloads, but slower for many others
|
* using [pypy](https://www.pypy.org/) instead of [cpython](https://www.python.org/) *can* be 70% faster for some workloads, but slower for many others
|
||||||
* and pypy can sometimes crash on startup with `-j0` (TODO make issue)
|
* and pypy can sometimes crash on startup with `-j0` (TODO make issue)
|
||||||
|
|
||||||
@@ -1599,7 +1858,7 @@ below are some tweaks roughly ordered by usefulness:
|
|||||||
|
|
||||||
when uploading files,
|
when uploading files,
|
||||||
|
|
||||||
* chrome is recommended, at least compared to firefox:
|
* chrome is recommended (unfortunately), at least compared to firefox:
|
||||||
* up to 90% faster when hashing, especially on SSDs
|
* up to 90% faster when hashing, especially on SSDs
|
||||||
* up to 40% faster when uploading over extremely fast internets
|
* up to 40% faster when uploading over extremely fast internets
|
||||||
* but [u2c.py](https://github.com/9001/copyparty/blob/hovudstraum/bin/u2c.py) can be 40% faster than chrome again
|
* but [u2c.py](https://github.com/9001/copyparty/blob/hovudstraum/bin/u2c.py) can be 40% faster than chrome again
|
||||||
@@ -1635,8 +1894,6 @@ safety profiles:
|
|||||||
* `--hardlink` creates hardlinks instead of symlinks when deduplicating uploads, which is less maintenance
|
* `--hardlink` creates hardlinks instead of symlinks when deduplicating uploads, which is less maintenance
|
||||||
* however note if you edit one file it will also affect the other copies
|
* however note if you edit one file it will also affect the other copies
|
||||||
* `--vague-403` returns a "404 not found" instead of "401 unauthorized" which is a common enterprise meme
|
* `--vague-403` returns a "404 not found" instead of "401 unauthorized" which is a common enterprise meme
|
||||||
* `--ban-404=50,60,1440` ban client for 1440min (24h) if they hit 50 404's in 60min
|
|
||||||
* `--turbo=-1` to force-disable turbo-mode in the uploader which could otherwise hit the 404-ban
|
|
||||||
* `--nih` removes the server hostname from directory listings
|
* `--nih` removes the server hostname from directory listings
|
||||||
|
|
||||||
* option `-sss` is a shortcut for the above plus:
|
* option `-sss` is a shortcut for the above plus:
|
||||||
@@ -1682,12 +1939,29 @@ cors can be configured with `--acao` and `--acam`, or the protections entirely d
|
|||||||
|
|
||||||
prevent filename bruteforcing
|
prevent filename bruteforcing
|
||||||
|
|
||||||
volflag `c,fk` generates filekeys (per-file accesskeys) for all files; users which have full read-access (permission `r`) will then see URLs with the correct filekey `?k=...` appended to the end, and `g` users must provide that URL including the correct key to avoid a 404
|
volflag `fk` generates filekeys (per-file accesskeys) for all files; users which have full read-access (permission `r`) will then see URLs with the correct filekey `?k=...` appended to the end, and `g` users must provide that URL including the correct key to avoid a 404
|
||||||
|
|
||||||
by default, filekeys are generated based on salt (`--fk-salt`) + filesystem-path + file-size + inode (if not windows); add volflag `fka` to generate slightly weaker filekeys which will not be invalidated if the file is edited (only salt + path)
|
by default, filekeys are generated based on salt (`--fk-salt`) + filesystem-path + file-size + inode (if not windows); add volflag `fka` to generate slightly weaker filekeys which will not be invalidated if the file is edited (only salt + path)
|
||||||
|
|
||||||
permissions `wG` (write + upget) lets users upload files and receive their own filekeys, still without being able to see other uploads
|
permissions `wG` (write + upget) lets users upload files and receive their own filekeys, still without being able to see other uploads
|
||||||
|
|
||||||
|
### dirkeys
|
||||||
|
|
||||||
|
share specific folders in a volume without giving away full read-access to the rest -- the visitor only needs the `g` (get) permission to view the link
|
||||||
|
|
||||||
|
volflag `dk` generates dirkeys (per-directory accesskeys) for all folders, granting read-access to that folder; by default only that folder itself, no subfolders
|
||||||
|
|
||||||
|
volflag `dky` disables the actual key-check, meaning anyone can see the contents of a folder where they have `g` access, but not its subdirectories
|
||||||
|
|
||||||
|
* `dk` + `dky` gives the same behavior as if all users with `g` access have full read-access, but subfolders are hidden files (as if their names start with a dot), so `dky` is an alternative to renaming all the folders for that purpose, maybe just for some users
|
||||||
|
|
||||||
|
volflag `dks` lets people enter subfolders as well, and also enables download-as-zip/tar
|
||||||
|
|
||||||
|
dirkeys are generated based on another salt (`--dk-salt`) + filesystem-path and have a few limitations:
|
||||||
|
* the key does not change if the contents of the folder is modified
|
||||||
|
* if you need a new dirkey, either change the salt or rename the folder
|
||||||
|
* linking to a textfile (so it opens in the textfile viewer) is not possible if recipient doesn't have read-access
|
||||||
|
|
||||||
|
|
||||||
## password hashing
|
## password hashing
|
||||||
|
|
||||||
@@ -1704,7 +1978,7 @@ the default configs take about 0.4 sec and 256 MiB RAM to process a new password
|
|||||||
|
|
||||||
both HTTP and HTTPS are accepted by default, but letting a [reverse proxy](#reverse-proxy) handle the https/tls/ssl would be better (probably more secure by default)
|
both HTTP and HTTPS are accepted by default, but letting a [reverse proxy](#reverse-proxy) handle the https/tls/ssl would be better (probably more secure by default)
|
||||||
|
|
||||||
copyparty doesn't speak HTTP/2 or QUIC, so using a reverse proxy would solve that as well
|
copyparty doesn't speak HTTP/2 or QUIC, so using a reverse proxy would solve that as well -- but note that HTTP/1 is usually faster than both HTTP/2 and HTTP/3
|
||||||
|
|
||||||
if [cfssl](https://github.com/cloudflare/cfssl/releases/latest) is installed, copyparty will automatically create a CA and server-cert on startup
|
if [cfssl](https://github.com/cloudflare/cfssl/releases/latest) is installed, copyparty will automatically create a CA and server-cert on startup
|
||||||
* the certs are written to `--crt-dir` for distribution, see `--help` for the other `--crt` options
|
* the certs are written to `--crt-dir` for distribution, see `--help` for the other `--crt` options
|
||||||
@@ -1779,7 +2053,7 @@ these are standalone programs and will never be imported / evaluated by copypart
|
|||||||
|
|
||||||
# sfx
|
# sfx
|
||||||
|
|
||||||
the self-contained "binary" [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) will unpack itself and run copyparty, assuming you have python installed of course
|
the self-contained "binary" (recommended!) [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) will unpack itself and run copyparty, assuming you have python installed of course
|
||||||
|
|
||||||
you can reduce the sfx size by repacking it; see [./docs/devnotes.md#sfx-repack](./docs/devnotes.md#sfx-repack)
|
you can reduce the sfx size by repacking it; see [./docs/devnotes.md#sfx-repack](./docs/devnotes.md#sfx-repack)
|
||||||
|
|
||||||
@@ -1803,7 +2077,17 @@ can be convenient on machines where installing python is problematic, however is
|
|||||||
|
|
||||||
meanwhile [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) instead relies on your system python which gives better performance and will stay safe as long as you keep your python install up-to-date
|
meanwhile [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) instead relies on your system python which gives better performance and will stay safe as long as you keep your python install up-to-date
|
||||||
|
|
||||||
then again, if you are already into downloading shady binaries from the internet, you may also want my [minimal builds](./scripts/pyinstaller#ffmpeg) of [ffmpeg](https://ocv.me/stuff/bin/ffmpeg.exe) and [ffprobe](https://ocv.me/stuff/bin/ffprobe.exe) which enables copyparty to extract multimedia-info, do audio-transcoding, and thumbnails/spectrograms/waveforms, however it's much better to instead grab a [recent official build](https://github.com/BtbN/FFmpeg-Builds/releases/download/latest/ffmpeg-master-latest-win64-gpl.zip) every once ina while if you can afford the size
|
then again, if you are already into downloading shady binaries from the internet, you may also want my [minimal builds](./scripts/pyinstaller#ffmpeg) of [ffmpeg](https://ocv.me/stuff/bin/ffmpeg.exe) and [ffprobe](https://ocv.me/stuff/bin/ffprobe.exe) which enables copyparty to extract multimedia-info, do audio-transcoding, and thumbnails/spectrograms/waveforms, however it's much better to instead grab a [recent official build](https://www.gyan.dev/ffmpeg/builds/ffmpeg-git-full.7z) every once ina while if you can afford the size
|
||||||
|
|
||||||
|
|
||||||
|
## zipapp
|
||||||
|
|
||||||
|
another emergency alternative, [copyparty.pyz](https://github.com/9001/copyparty/releases/latest/download/copyparty.pyz) has less features, requires python 3.7 or newer, worse compression, and more importantly is unable to benefit from more recent versions of jinja2 and such (which makes it less secure)... lots of drawbacks with this one really -- but it *may* just work if the regular sfx fails to start because the computer is messed up in certain funky ways, so it's worth a shot if all else fails
|
||||||
|
|
||||||
|
run it by doubleclicking it, or try typing `python copyparty.pyz` in your terminal/console/commandline/telex if that fails
|
||||||
|
|
||||||
|
it is a python [zipapp](https://docs.python.org/3/library/zipapp.html) meaning it doesn't have to unpack its own python code anywhere to run, so if the filesystem is busted it has a better chance of getting somewhere
|
||||||
|
* but note that it currently still needs to extract the web-resources somewhere (they'll land in the default TEMP-folder of your OS)
|
||||||
|
|
||||||
|
|
||||||
# install on android
|
# install on android
|
||||||
@@ -1823,7 +2107,12 @@ if you want thumbnails (photos+videos) and you're okay with spending another 132
|
|||||||
|
|
||||||
# reporting bugs
|
# reporting bugs
|
||||||
|
|
||||||
ideas for context to include in bug reports
|
ideas for context to include, and where to submit them
|
||||||
|
|
||||||
|
please get in touch using any of the following URLs:
|
||||||
|
* https://github.com/9001/copyparty/ **(primary)**
|
||||||
|
* https://gitlab.com/9001/copyparty/ *(mirror)*
|
||||||
|
* https://codeberg.org/9001/copyparty *(mirror)*
|
||||||
|
|
||||||
in general, commandline arguments (and config file if any)
|
in general, commandline arguments (and config file if any)
|
||||||
|
|
||||||
@@ -1838,3 +2127,6 @@ if there's a wall of base64 in the log (thread stacks) then please include that,
|
|||||||
# devnotes
|
# devnotes
|
||||||
|
|
||||||
for build instructions etc, see [./docs/devnotes.md](./docs/devnotes.md)
|
for build instructions etc, see [./docs/devnotes.md](./docs/devnotes.md)
|
||||||
|
|
||||||
|
see [./docs/TODO.md](./docs/TODO.md) for planned features / fixes / changes
|
||||||
|
|
||||||
|
|||||||
@@ -207,7 +207,7 @@ def examples():
|
|||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
global NC, BY_PATH
|
global NC, BY_PATH # pylint: disable=global-statement
|
||||||
os.system("")
|
os.system("")
|
||||||
print()
|
print()
|
||||||
|
|
||||||
@@ -282,7 +282,8 @@ def main():
|
|||||||
if ver == "corrupt":
|
if ver == "corrupt":
|
||||||
die("{} database appears to be corrupt, sorry")
|
die("{} database appears to be corrupt, sorry")
|
||||||
|
|
||||||
if ver < DB_VER1 or ver > DB_VER2:
|
iver = int(ver)
|
||||||
|
if iver < DB_VER1 or iver > DB_VER2:
|
||||||
m = f"{n} db is version {ver}, this tool only supports versions between {DB_VER1} and {DB_VER2}, please upgrade it with copyparty first"
|
m = f"{n} db is version {ver}, this tool only supports versions between {DB_VER1} and {DB_VER2}, please upgrade it with copyparty first"
|
||||||
die(m)
|
die(m)
|
||||||
|
|
||||||
|
|||||||
@@ -13,6 +13,7 @@ run copyparty with `--help-hooks` for usage details / hook type explanations (xb
|
|||||||
* [image-noexif.py](image-noexif.py) removes image exif by overwriting / directly editing the uploaded file
|
* [image-noexif.py](image-noexif.py) removes image exif by overwriting / directly editing the uploaded file
|
||||||
* [discord-announce.py](discord-announce.py) announces new uploads on discord using webhooks ([example](https://user-images.githubusercontent.com/241032/215304439-1c1cb3c8-ec6f-4c17-9f27-81f969b1811a.png))
|
* [discord-announce.py](discord-announce.py) announces new uploads on discord using webhooks ([example](https://user-images.githubusercontent.com/241032/215304439-1c1cb3c8-ec6f-4c17-9f27-81f969b1811a.png))
|
||||||
* [reject-mimetype.py](reject-mimetype.py) rejects uploads unless the mimetype is acceptable
|
* [reject-mimetype.py](reject-mimetype.py) rejects uploads unless the mimetype is acceptable
|
||||||
|
* [into-the-cache-it-goes.py](into-the-cache-it-goes.py) avoids bugs in caching proxies by immediately downloading each file that is uploaded
|
||||||
|
|
||||||
|
|
||||||
# upload batches
|
# upload batches
|
||||||
@@ -27,3 +28,5 @@ these are `--xiu` hooks; unlike `xbu` and `xau` (which get executed on every sin
|
|||||||
|
|
||||||
# on message
|
# on message
|
||||||
* [wget.py](wget.py) lets you download files by POSTing URLs to copyparty
|
* [wget.py](wget.py) lets you download files by POSTing URLs to copyparty
|
||||||
|
* [qbittorrent-magnet.py](qbittorrent-magnet.py) starts downloading a torrent if you post a magnet url
|
||||||
|
* [msg-log.py](msg-log.py) is a guestbook; logs messages to a doc in the same folder
|
||||||
|
|||||||
@@ -12,19 +12,28 @@ announces a new upload on discord
|
|||||||
example usage as global config:
|
example usage as global config:
|
||||||
--xau f,t5,j,bin/hooks/discord-announce.py
|
--xau f,t5,j,bin/hooks/discord-announce.py
|
||||||
|
|
||||||
|
parameters explained,
|
||||||
|
xau = execute after upload
|
||||||
|
f = fork; don't delay other hooks while this is running
|
||||||
|
t5 = timeout if it's still running after 5 sec
|
||||||
|
j = this hook needs upload information as json (not just the filename)
|
||||||
|
|
||||||
example usage as a volflag (per-volume config):
|
example usage as a volflag (per-volume config):
|
||||||
-v srv/inc:inc:r:rw,ed:c,xau=f,t5,j,bin/hooks/discord-announce.py
|
-v srv/inc:inc:r:rw,ed:c,xau=f,t5,j,bin/hooks/discord-announce.py
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
(share filesystem-path srv/inc as volume /inc,
|
(share filesystem-path srv/inc as volume /inc,
|
||||||
readable by everyone, read-write for user 'ed',
|
readable by everyone, read-write for user 'ed',
|
||||||
running this plugin on all uploads with the params listed below)
|
running this plugin on all uploads with the params explained above)
|
||||||
|
|
||||||
parameters explained,
|
example usage as a volflag in a copyparty config file:
|
||||||
xbu = execute after upload
|
[/inc]
|
||||||
f = fork; don't wait for it to finish
|
srv/inc
|
||||||
t5 = timeout if it's still running after 5 sec
|
accs:
|
||||||
j = provide upload information as json; not just the filename
|
r: *
|
||||||
|
rw: ed
|
||||||
|
flags:
|
||||||
|
xau: f,t5,j,bin/hooks/discord-announce.py
|
||||||
|
|
||||||
replace "xau" with "xbu" to announce Before upload starts instead of After completion
|
replace "xau" with "xbu" to announce Before upload starts instead of After completion
|
||||||
|
|
||||||
|
|||||||
140
bin/hooks/into-the-cache-it-goes.py
Normal file
140
bin/hooks/into-the-cache-it-goes.py
Normal file
@@ -0,0 +1,140 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import json
|
||||||
|
import shutil
|
||||||
|
import platform
|
||||||
|
import subprocess as sp
|
||||||
|
from urllib.parse import quote
|
||||||
|
|
||||||
|
|
||||||
|
_ = r"""
|
||||||
|
try to avoid race conditions in caching proxies
|
||||||
|
(primarily cloudflare, but probably others too)
|
||||||
|
by means of the most obvious solution possible:
|
||||||
|
|
||||||
|
just as each file has finished uploading, use
|
||||||
|
the server's external URL to download the file
|
||||||
|
so that it ends up in the cache, warm and snug
|
||||||
|
|
||||||
|
this intentionally delays the upload response
|
||||||
|
as it waits for the file to finish downloading
|
||||||
|
before copyparty is allowed to return the URL
|
||||||
|
|
||||||
|
NOTE: you must edit this script before use,
|
||||||
|
replacing https://example.com with your URL
|
||||||
|
|
||||||
|
NOTE: if the files are only accessible with a
|
||||||
|
password and/or filekey, you must also add
|
||||||
|
a cromulent password in the PASSWORD field
|
||||||
|
|
||||||
|
NOTE: needs either wget, curl, or "requests":
|
||||||
|
python3 -m pip install --user -U requests
|
||||||
|
|
||||||
|
|
||||||
|
example usage as global config:
|
||||||
|
--xau j,t10,bin/hooks/into-the-cache-it-goes.py
|
||||||
|
|
||||||
|
parameters explained,
|
||||||
|
xau = execute after upload
|
||||||
|
j = this hook needs upload information as json (not just the filename)
|
||||||
|
t10 = abort download and continue if it takes longer than 10sec
|
||||||
|
|
||||||
|
example usage as a volflag (per-volume config):
|
||||||
|
-v srv/inc:inc:r:rw,ed:xau=j,t10,bin/hooks/into-the-cache-it-goes.py
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
(share filesystem-path srv/inc as volume /inc,
|
||||||
|
readable by everyone, read-write for user 'ed',
|
||||||
|
running this plugin on all uploads with params explained above)
|
||||||
|
|
||||||
|
example usage as a volflag in a copyparty config file:
|
||||||
|
[/inc]
|
||||||
|
srv/inc
|
||||||
|
accs:
|
||||||
|
r: *
|
||||||
|
rw: ed
|
||||||
|
flags:
|
||||||
|
xau: j,t10,bin/hooks/into-the-cache-it-goes.py
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
# replace this with your site's external URL
|
||||||
|
# (including the :portnumber if necessary)
|
||||||
|
SITE_URL = "https://example.com"
|
||||||
|
|
||||||
|
# if downloading is protected by passwords or filekeys,
|
||||||
|
# specify a valid password between the quotes below:
|
||||||
|
PASSWORD = ""
|
||||||
|
|
||||||
|
# if file is larger than this, skip download
|
||||||
|
MAX_MEGABYTES = 8
|
||||||
|
|
||||||
|
# =============== END OF CONFIG ===============
|
||||||
|
|
||||||
|
|
||||||
|
WINDOWS = platform.system() == "Windows"
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
fun = download_with_python
|
||||||
|
if shutil.which("curl"):
|
||||||
|
fun = download_with_curl
|
||||||
|
elif shutil.which("wget"):
|
||||||
|
fun = download_with_wget
|
||||||
|
|
||||||
|
inf = json.loads(sys.argv[1])
|
||||||
|
|
||||||
|
if inf["sz"] > 1024 * 1024 * MAX_MEGABYTES:
|
||||||
|
print("[into-the-cache] file is too large; will not download")
|
||||||
|
return
|
||||||
|
|
||||||
|
file_url = "/"
|
||||||
|
if inf["vp"]:
|
||||||
|
file_url += inf["vp"] + "/"
|
||||||
|
file_url += inf["ap"].replace("\\", "/").split("/")[-1]
|
||||||
|
file_url = SITE_URL.rstrip("/") + quote(file_url, safe=b"/")
|
||||||
|
|
||||||
|
print("[into-the-cache] %s(%s)" % (fun.__name__, file_url))
|
||||||
|
fun(file_url, PASSWORD.strip())
|
||||||
|
|
||||||
|
print("[into-the-cache] Download OK")
|
||||||
|
|
||||||
|
|
||||||
|
def download_with_curl(url, pw):
|
||||||
|
cmd = ["curl"]
|
||||||
|
|
||||||
|
if pw:
|
||||||
|
cmd += ["-HPW:%s" % (pw,)]
|
||||||
|
|
||||||
|
nah = sp.DEVNULL
|
||||||
|
sp.check_call(cmd + [url], stdout=nah, stderr=nah)
|
||||||
|
|
||||||
|
|
||||||
|
def download_with_wget(url, pw):
|
||||||
|
cmd = ["wget", "-O"]
|
||||||
|
|
||||||
|
cmd += ["nul" if WINDOWS else "/dev/null"]
|
||||||
|
|
||||||
|
if pw:
|
||||||
|
cmd += ["--header=PW:%s" % (pw,)]
|
||||||
|
|
||||||
|
nah = sp.DEVNULL
|
||||||
|
sp.check_call(cmd + [url], stdout=nah, stderr=nah)
|
||||||
|
|
||||||
|
|
||||||
|
def download_with_python(url, pw):
|
||||||
|
import requests
|
||||||
|
|
||||||
|
headers = {}
|
||||||
|
if pw:
|
||||||
|
headers["PW"] = pw
|
||||||
|
|
||||||
|
with requests.get(url, headers=headers, stream=True) as r:
|
||||||
|
r.raise_for_status()
|
||||||
|
for _ in r.iter_content(chunk_size=1024 * 256):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
@@ -7,22 +7,39 @@ import json
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
from datetime import datetime
|
|
||||||
|
try:
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
except:
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
|
||||||
"""
|
_ = r"""
|
||||||
use copyparty as a dumb messaging server / guestbook thing;
|
use copyparty as a dumb messaging server / guestbook thing;
|
||||||
|
accepts guestbook entries from 📟 (message-to-server-log) in the web-ui
|
||||||
initially contributed by @clach04 in https://github.com/9001/copyparty/issues/35 (thanks!)
|
initially contributed by @clach04 in https://github.com/9001/copyparty/issues/35 (thanks!)
|
||||||
|
|
||||||
Sample usage:
|
example usage as global config:
|
||||||
|
|
||||||
python copyparty-sfx.py --xm j,bin/hooks/msg-log.py
|
python copyparty-sfx.py --xm j,bin/hooks/msg-log.py
|
||||||
|
|
||||||
Where:
|
parameters explained,
|
||||||
|
xm = execute on message (📟)
|
||||||
|
j = this hook needs message information as json (not just the message-text)
|
||||||
|
|
||||||
xm = execute on message-to-server-log
|
example usage as a volflag (per-volume config):
|
||||||
j = provide message information as json; not just the text - this script REQUIRES json
|
python copyparty-sfx.py -v srv/log:log:r:c,xm=j,bin/hooks/msg-log.py
|
||||||
t10 = timeout and kill download after 10 secs
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
(share filesystem-path srv/log as volume /log, readable by everyone,
|
||||||
|
running this plugin on all messages with the params explained above)
|
||||||
|
|
||||||
|
example usage as a volflag in a copyparty config file:
|
||||||
|
[/log]
|
||||||
|
srv/log
|
||||||
|
accs:
|
||||||
|
r: *
|
||||||
|
flags:
|
||||||
|
xm: j,bin/hooks/msg-log.py
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
@@ -96,7 +113,11 @@ def main(argv=None):
|
|||||||
msg_info = json.loads(sys.argv[1])
|
msg_info = json.loads(sys.argv[1])
|
||||||
# print(msg_info)
|
# print(msg_info)
|
||||||
|
|
||||||
dt = datetime.utcfromtimestamp(msg_info["at"])
|
try:
|
||||||
|
dt = datetime.fromtimestamp(msg_info["at"], timezone.utc)
|
||||||
|
except:
|
||||||
|
dt = datetime.utcfromtimestamp(msg_info["at"])
|
||||||
|
|
||||||
msg_info["datetime"] = dt.strftime("%Y-%m-%d, %H:%M:%S")
|
msg_info["datetime"] = dt.strftime("%Y-%m-%d, %H:%M:%S")
|
||||||
|
|
||||||
msg_text = TEMPLATE % msg_info
|
msg_text = TEMPLATE % msg_info
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ import json
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import subprocess as sp
|
import subprocess as sp
|
||||||
from datetime import datetime
|
from datetime import datetime, timezone
|
||||||
from plyer import notification
|
from plyer import notification
|
||||||
|
|
||||||
|
|
||||||
@@ -43,7 +43,8 @@ def main():
|
|||||||
fp = inf["ap"]
|
fp = inf["ap"]
|
||||||
sz = humansize(inf["sz"])
|
sz = humansize(inf["sz"])
|
||||||
dp, fn = os.path.split(fp)
|
dp, fn = os.path.split(fp)
|
||||||
mt = datetime.utcfromtimestamp(inf["mt"]).strftime("%Y-%m-%d %H:%M:%S")
|
dt = datetime.fromtimestamp(inf["mt"], timezone.utc)
|
||||||
|
mt = dt.strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
|
||||||
msg = f"{fn} ({sz})\n📁 {dp}"
|
msg = f"{fn} ({sz})\n📁 {dp}"
|
||||||
title = "File received"
|
title = "File received"
|
||||||
|
|||||||
127
bin/hooks/qbittorrent-magnet.py
Executable file
127
bin/hooks/qbittorrent-magnet.py
Executable file
@@ -0,0 +1,127 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# coding: utf-8
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import json
|
||||||
|
import shutil
|
||||||
|
import subprocess as sp
|
||||||
|
|
||||||
|
|
||||||
|
_ = r"""
|
||||||
|
start downloading a torrent by POSTing a magnet URL to copyparty,
|
||||||
|
for example using 📟 (message-to-server-log) in the web-ui
|
||||||
|
|
||||||
|
by default it will download the torrent to the folder you were in
|
||||||
|
when you pasted the magnet into the message-to-server-log field
|
||||||
|
|
||||||
|
you can optionally specify another location by adding a whitespace
|
||||||
|
after the magnet URL followed by the name of the subfolder to DL into,
|
||||||
|
or for example "anime/airing" would download to /srv/media/anime/airing
|
||||||
|
because the keyword "anime" is in the DESTS config below
|
||||||
|
|
||||||
|
needs python3
|
||||||
|
|
||||||
|
example usage as global config (not a good idea):
|
||||||
|
python copyparty-sfx.py --xm f,j,t60,bin/hooks/qbittorrent-magnet.py
|
||||||
|
|
||||||
|
parameters explained,
|
||||||
|
xm = execute on message (📟)
|
||||||
|
f = fork; don't delay other hooks while this is running
|
||||||
|
j = provide message information as json (not just the text)
|
||||||
|
t60 = abort if qbittorrent has to think about it for more than 1 min
|
||||||
|
|
||||||
|
example usage as a volflag (per-volume config, much better):
|
||||||
|
-v srv/qb:qb:A,ed:c,xm=f,j,t60,bin/hooks/qbittorrent-magnet.py
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
(share filesystem-path srv/qb as volume /qb with Admin for user 'ed',
|
||||||
|
running this plugin on all messages with the params explained above)
|
||||||
|
|
||||||
|
example usage as a volflag in a copyparty config file:
|
||||||
|
[/qb]
|
||||||
|
srv/qb
|
||||||
|
accs:
|
||||||
|
A: ed
|
||||||
|
flags:
|
||||||
|
xm: f,j,t60,bin/hooks/qbittorrent-magnet.py
|
||||||
|
|
||||||
|
the volflag examples only kicks in if you send the torrent magnet
|
||||||
|
while you're in the /qb folder (or any folder below there)
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
# list of usernames to allow
|
||||||
|
ALLOWLIST = [ "ed", "morpheus" ]
|
||||||
|
|
||||||
|
|
||||||
|
# list of destination aliases to translate into full filesystem
|
||||||
|
# paths; takes effect if the first folder component in the
|
||||||
|
# custom download location matches anything in this dict
|
||||||
|
DESTS = {
|
||||||
|
"iso": "/srv/pub/linux-isos",
|
||||||
|
"anime": "/srv/media/anime",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
inf = json.loads(sys.argv[1])
|
||||||
|
url = inf["txt"]
|
||||||
|
if not url.lower().startswith("magnet:?"):
|
||||||
|
# not a magnet, abort
|
||||||
|
return
|
||||||
|
|
||||||
|
if inf["user"] not in ALLOWLIST:
|
||||||
|
print("🧲 denied for user", inf["user"])
|
||||||
|
return
|
||||||
|
|
||||||
|
# might as well run the command inside the filesystem folder
|
||||||
|
# which matches the URL that the magnet message was sent to
|
||||||
|
os.chdir(inf["ap"])
|
||||||
|
|
||||||
|
# is there is a custom download location in the url?
|
||||||
|
dst = ""
|
||||||
|
if " " in url:
|
||||||
|
url, dst = url.split(" ", 1)
|
||||||
|
|
||||||
|
# is the location in the predefined list of locations?
|
||||||
|
parts = dst.replace("\\", "/").split("/")
|
||||||
|
if parts[0] in DESTS:
|
||||||
|
dst = os.path.join(DESTS[parts[0]], *(parts[1:]))
|
||||||
|
|
||||||
|
else:
|
||||||
|
# nope, so download to the current folder instead;
|
||||||
|
# comment the dst line below to instead use the default
|
||||||
|
# download location from your qbittorrent settings
|
||||||
|
dst = inf["ap"]
|
||||||
|
pass
|
||||||
|
|
||||||
|
# archlinux has a -nox suffix for qbittorrent if headless
|
||||||
|
# so check if we should be using that
|
||||||
|
if shutil.which("qbittorrent-nox"):
|
||||||
|
torrent_bin = "qbittorrent-nox"
|
||||||
|
else:
|
||||||
|
torrent_bin = "qbittorrent"
|
||||||
|
|
||||||
|
# the command to add a new torrent, adjust if necessary
|
||||||
|
cmd = [torrent_bin, url]
|
||||||
|
if dst:
|
||||||
|
cmd += ["--save-path=%s" % (dst,)]
|
||||||
|
|
||||||
|
# if copyparty and qbittorrent are running as different users
|
||||||
|
# you may have to do something like the following
|
||||||
|
# (assuming qbittorrent* is nopasswd-allowed in sudoers):
|
||||||
|
#
|
||||||
|
# cmd = ["sudo", "-u", "qbitter"] + cmd
|
||||||
|
|
||||||
|
print("🧲", cmd)
|
||||||
|
|
||||||
|
try:
|
||||||
|
sp.check_call(cmd)
|
||||||
|
except:
|
||||||
|
print("🧲 FAILED TO ADD", url)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
|
||||||
@@ -9,25 +9,37 @@ import subprocess as sp
|
|||||||
_ = r"""
|
_ = r"""
|
||||||
use copyparty as a file downloader by POSTing URLs as
|
use copyparty as a file downloader by POSTing URLs as
|
||||||
application/x-www-form-urlencoded (for example using the
|
application/x-www-form-urlencoded (for example using the
|
||||||
message/pager function on the website)
|
📟 message-to-server-log in the web-ui)
|
||||||
|
|
||||||
example usage as global config:
|
example usage as global config:
|
||||||
--xm f,j,t3600,bin/hooks/wget.py
|
--xm f,j,t3600,bin/hooks/wget.py
|
||||||
|
|
||||||
|
parameters explained,
|
||||||
|
xm = execute on message-to-server-log
|
||||||
|
f = fork; don't delay other hooks while this is running
|
||||||
|
j = provide message information as json (not just the text)
|
||||||
|
c3 = mute all output
|
||||||
|
t3600 = timeout and abort download after 1 hour
|
||||||
|
|
||||||
example usage as a volflag (per-volume config):
|
example usage as a volflag (per-volume config):
|
||||||
-v srv/inc:inc:r:rw,ed:c,xm=f,j,t3600,bin/hooks/wget.py
|
-v srv/inc:inc:r:rw,ed:c,xm=f,j,t3600,bin/hooks/wget.py
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
(share filesystem-path srv/inc as volume /inc,
|
(share filesystem-path srv/inc as volume /inc,
|
||||||
readable by everyone, read-write for user 'ed',
|
readable by everyone, read-write for user 'ed',
|
||||||
running this plugin on all messages with the params listed below)
|
running this plugin on all messages with the params explained above)
|
||||||
|
|
||||||
parameters explained,
|
example usage as a volflag in a copyparty config file:
|
||||||
xm = execute on message-to-server-log
|
[/inc]
|
||||||
f = fork so it doesn't block uploads
|
srv/inc
|
||||||
j = provide message information as json; not just the text
|
accs:
|
||||||
c3 = mute all output
|
r: *
|
||||||
t3600 = timeout and kill download after 1 hour
|
rw: ed
|
||||||
|
flags:
|
||||||
|
xm: f,j,t3600,bin/hooks/wget.py
|
||||||
|
|
||||||
|
the volflag examples only kicks in if you send the message
|
||||||
|
while you're in the /inc folder (or any folder below there)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
import hashlib
|
import hashlib
|
||||||
import json
|
import json
|
||||||
import sys
|
import sys
|
||||||
from datetime import datetime
|
from datetime import datetime, timezone
|
||||||
|
|
||||||
|
|
||||||
_ = r"""
|
_ = r"""
|
||||||
@@ -43,8 +43,11 @@ except:
|
|||||||
return p
|
return p
|
||||||
|
|
||||||
|
|
||||||
|
UTC = timezone.utc
|
||||||
|
|
||||||
|
|
||||||
def humantime(ts):
|
def humantime(ts):
|
||||||
return datetime.utcfromtimestamp(ts).strftime("%Y-%m-%d %H:%M:%S")
|
return datetime.fromtimestamp(ts, UTC).strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
|
||||||
|
|
||||||
def find_files_root(inf):
|
def find_files_root(inf):
|
||||||
@@ -96,7 +99,7 @@ def main():
|
|||||||
|
|
||||||
ret.append("# {} files, {} bytes total".format(len(inf), total_sz))
|
ret.append("# {} files, {} bytes total".format(len(inf), total_sz))
|
||||||
ret.append("")
|
ret.append("")
|
||||||
ftime = datetime.utcnow().strftime("%Y-%m%d-%H%M%S.%f")
|
ftime = datetime.now(UTC).strftime("%Y-%m%d-%H%M%S.%f")
|
||||||
fp = "{}xfer-{}.sha512".format(inf[0]["ap"][:di], ftime)
|
fp = "{}xfer-{}.sha512".format(inf[0]["ap"][:di], ftime)
|
||||||
with open(fsenc(fp), "wb") as f:
|
with open(fsenc(fp), "wb") as f:
|
||||||
f.write("\n".join(ret).encode("utf-8", "replace"))
|
f.write("\n".join(ret).encode("utf-8", "replace"))
|
||||||
|
|||||||
@@ -223,12 +223,15 @@ install_vamp() {
|
|||||||
# use msys2 in mingw-w64 mode
|
# use msys2 in mingw-w64 mode
|
||||||
# pacman -S --needed mingw-w64-x86_64-{ffmpeg,python,python-pip,vamp-plugin-sdk}
|
# pacman -S --needed mingw-w64-x86_64-{ffmpeg,python,python-pip,vamp-plugin-sdk}
|
||||||
|
|
||||||
$pybin -m pip install --user vamp
|
$pybin -m pip install --user vamp || {
|
||||||
|
printf '\n\033[7malright, trying something else...\033[0m\n'
|
||||||
|
$pybin -m pip install --user --no-build-isolation vamp
|
||||||
|
}
|
||||||
|
|
||||||
cd "$td"
|
cd "$td"
|
||||||
echo '#include <vamp-sdk/Plugin.h>' | g++ -x c++ -c -o /dev/null - || [ -e ~/pe/vamp-sdk ] || {
|
echo '#include <vamp-sdk/Plugin.h>' | g++ -x c++ -c -o /dev/null - || [ -e ~/pe/vamp-sdk ] || {
|
||||||
printf '\033[33mcould not find the vamp-sdk, building from source\033[0m\n'
|
printf '\033[33mcould not find the vamp-sdk, building from source\033[0m\n'
|
||||||
(dl_files yolo https://code.soundsoftware.ac.uk/attachments/download/2691/vamp-plugin-sdk-2.10.0.tar.gz)
|
(dl_files yolo https://ocv.me/mirror/vamp-plugin-sdk-2.10.0.tar.gz)
|
||||||
sha512sum -c <(
|
sha512sum -c <(
|
||||||
echo "153b7f2fa01b77c65ad393ca0689742d66421017fd5931d216caa0fcf6909355fff74706fabbc062a3a04588a619c9b515a1dae00f21a57afd97902a355c48ed -"
|
echo "153b7f2fa01b77c65ad393ca0689742d66421017fd5931d216caa0fcf6909355fff74706fabbc062a3a04588a619c9b515a1dae00f21a57afd97902a355c48ed -"
|
||||||
) <vamp-plugin-sdk-2.10.0.tar.gz
|
) <vamp-plugin-sdk-2.10.0.tar.gz
|
||||||
@@ -244,7 +247,7 @@ install_vamp() {
|
|||||||
cd "$td"
|
cd "$td"
|
||||||
have_beatroot || {
|
have_beatroot || {
|
||||||
printf '\033[33mcould not find the vamp beatroot plugin, building from source\033[0m\n'
|
printf '\033[33mcould not find the vamp beatroot plugin, building from source\033[0m\n'
|
||||||
(dl_files yolo https://code.soundsoftware.ac.uk/attachments/download/885/beatroot-vamp-v1.0.tar.gz)
|
(dl_files yolo https://ocv.me/mirror/beatroot-vamp-v1.0.tar.gz)
|
||||||
sha512sum -c <(
|
sha512sum -c <(
|
||||||
echo "1f444d1d58ccf565c0adfe99f1a1aa62789e19f5071e46857e2adfbc9d453037bc1c4dcb039b02c16240e9b97f444aaff3afb625c86aa2470233e711f55b6874 -"
|
echo "1f444d1d58ccf565c0adfe99f1a1aa62789e19f5071e46857e2adfbc9d453037bc1c4dcb039b02c16240e9b97f444aaff3afb625c86aa2470233e711f55b6874 -"
|
||||||
) <beatroot-vamp-v1.0.tar.gz
|
) <beatroot-vamp-v1.0.tar.gz
|
||||||
|
|||||||
@@ -46,13 +46,20 @@ import traceback
|
|||||||
import http.client # py2: httplib
|
import http.client # py2: httplib
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
import calendar
|
import calendar
|
||||||
from datetime import datetime
|
from datetime import datetime, timezone
|
||||||
from urllib.parse import quote_from_bytes as quote
|
from urllib.parse import quote_from_bytes as quote
|
||||||
from urllib.parse import unquote_to_bytes as unquote
|
from urllib.parse import unquote_to_bytes as unquote
|
||||||
|
|
||||||
WINDOWS = sys.platform == "win32"
|
WINDOWS = sys.platform == "win32"
|
||||||
MACOS = platform.system() == "Darwin"
|
MACOS = platform.system() == "Darwin"
|
||||||
info = log = dbg = None
|
UTC = timezone.utc
|
||||||
|
|
||||||
|
|
||||||
|
def print(*args, **kwargs):
|
||||||
|
try:
|
||||||
|
builtins.print(*list(args), **kwargs)
|
||||||
|
except:
|
||||||
|
builtins.print(termsafe(" ".join(str(x) for x in args)), **kwargs)
|
||||||
|
|
||||||
|
|
||||||
print(
|
print(
|
||||||
@@ -64,6 +71,13 @@ print(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def null_log(msg):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
info = log = dbg = null_log
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from fuse import FUSE, FuseOSError, Operations
|
from fuse import FUSE, FuseOSError, Operations
|
||||||
except:
|
except:
|
||||||
@@ -83,13 +97,6 @@ except:
|
|||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
def print(*args, **kwargs):
|
|
||||||
try:
|
|
||||||
builtins.print(*list(args), **kwargs)
|
|
||||||
except:
|
|
||||||
builtins.print(termsafe(" ".join(str(x) for x in args)), **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def termsafe(txt):
|
def termsafe(txt):
|
||||||
try:
|
try:
|
||||||
return txt.encode(sys.stdout.encoding, "backslashreplace").decode(
|
return txt.encode(sys.stdout.encoding, "backslashreplace").decode(
|
||||||
@@ -118,10 +125,6 @@ def fancy_log(msg):
|
|||||||
print("{:10.6f} {} {}\n".format(time.time() % 900, rice_tid(), msg), end="")
|
print("{:10.6f} {} {}\n".format(time.time() % 900, rice_tid(), msg), end="")
|
||||||
|
|
||||||
|
|
||||||
def null_log(msg):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def hexler(binary):
|
def hexler(binary):
|
||||||
return binary.replace("\r", "\\r").replace("\n", "\\n")
|
return binary.replace("\r", "\\r").replace("\n", "\\n")
|
||||||
return " ".join(["{}\033[36m{:02x}\033[0m".format(b, ord(b)) for b in binary])
|
return " ".join(["{}\033[36m{:02x}\033[0m".format(b, ord(b)) for b in binary])
|
||||||
@@ -176,7 +179,7 @@ class RecentLog(object):
|
|||||||
def put(self, msg):
|
def put(self, msg):
|
||||||
msg = "{:10.6f} {} {}\n".format(time.time() % 900, rice_tid(), msg)
|
msg = "{:10.6f} {} {}\n".format(time.time() % 900, rice_tid(), msg)
|
||||||
if self.f:
|
if self.f:
|
||||||
fmsg = " ".join([datetime.utcnow().strftime("%H%M%S.%f"), str(msg)])
|
fmsg = " ".join([datetime.now(UTC).strftime("%H%M%S.%f"), str(msg)])
|
||||||
self.f.write(fmsg.encode("utf-8"))
|
self.f.write(fmsg.encode("utf-8"))
|
||||||
|
|
||||||
with self.mtx:
|
with self.mtx:
|
||||||
|
|||||||
@@ -20,12 +20,13 @@ import sys
|
|||||||
import base64
|
import base64
|
||||||
import sqlite3
|
import sqlite3
|
||||||
import argparse
|
import argparse
|
||||||
from datetime import datetime
|
from datetime import datetime, timezone
|
||||||
from urllib.parse import quote_from_bytes as quote
|
from urllib.parse import quote_from_bytes as quote
|
||||||
from urllib.parse import unquote_to_bytes as unquote
|
from urllib.parse import unquote_to_bytes as unquote
|
||||||
|
|
||||||
|
|
||||||
FS_ENCODING = sys.getfilesystemencoding()
|
FS_ENCODING = sys.getfilesystemencoding()
|
||||||
|
UTC = timezone.utc
|
||||||
|
|
||||||
|
|
||||||
class APF(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter):
|
class APF(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter):
|
||||||
@@ -155,11 +156,10 @@ th {
|
|||||||
link = txt.decode("utf-8")[4:]
|
link = txt.decode("utf-8")[4:]
|
||||||
|
|
||||||
sz = "{:,}".format(sz)
|
sz = "{:,}".format(sz)
|
||||||
|
dt = datetime.fromtimestamp(at if at > 0 else mt, UTC)
|
||||||
v = [
|
v = [
|
||||||
w[:16],
|
w[:16],
|
||||||
datetime.utcfromtimestamp(at if at > 0 else mt).strftime(
|
dt.strftime("%Y-%m-%d %H:%M:%S"),
|
||||||
"%Y-%m-%d %H:%M:%S"
|
|
||||||
),
|
|
||||||
sz,
|
sz,
|
||||||
imap.get(ip, ip),
|
imap.get(ip, ip),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -12,13 +12,13 @@ done
|
|||||||
help() { cat <<'EOF'
|
help() { cat <<'EOF'
|
||||||
|
|
||||||
usage:
|
usage:
|
||||||
./prisonparty.sh <ROOTDIR> <UID> <GID> [VOLDIR [VOLDIR...]] -- python3 copyparty-sfx.py [...]
|
./prisonparty.sh <ROOTDIR> <USER|UID> <GROUP|GID> [VOLDIR [VOLDIR...]] -- python3 copyparty-sfx.py [...]
|
||||||
|
|
||||||
example:
|
example:
|
||||||
./prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt/nas/music -- python3 copyparty-sfx.py -v /mnt/nas/music::rwmd
|
./prisonparty.sh /var/lib/copyparty-jail cpp cpp /mnt/nas/music -- python3 copyparty-sfx.py -v /mnt/nas/music::rwmd
|
||||||
|
|
||||||
example for running straight from source (instead of using an sfx):
|
example for running straight from source (instead of using an sfx):
|
||||||
PYTHONPATH=$PWD ./prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt/nas/music -- python3 -um copyparty -v /mnt/nas/music::rwmd
|
PYTHONPATH=$PWD ./prisonparty.sh /var/lib/copyparty-jail cpp cpp /mnt/nas/music -- python3 -um copyparty -v /mnt/nas/music::rwmd
|
||||||
|
|
||||||
note that if you have python modules installed as --user (such as bpm/key detectors),
|
note that if you have python modules installed as --user (such as bpm/key detectors),
|
||||||
you should add /home/foo/.local as a VOLDIR
|
you should add /home/foo/.local as a VOLDIR
|
||||||
@@ -28,6 +28,16 @@ exit 1
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
errs=
|
||||||
|
for c in awk chroot dirname getent lsof mknod mount realpath sed sort stat uniq; do
|
||||||
|
command -v $c >/dev/null || {
|
||||||
|
echo ERROR: command not found: $c
|
||||||
|
errs=1
|
||||||
|
}
|
||||||
|
done
|
||||||
|
[ $errs ] && exit 1
|
||||||
|
|
||||||
|
|
||||||
# read arguments
|
# read arguments
|
||||||
trap help EXIT
|
trap help EXIT
|
||||||
jail="$(realpath "$1")"; shift
|
jail="$(realpath "$1")"; shift
|
||||||
@@ -58,11 +68,18 @@ cpp="$1"; shift
|
|||||||
}
|
}
|
||||||
trap - EXIT
|
trap - EXIT
|
||||||
|
|
||||||
|
usr="$(getent passwd $uid | cut -d: -f1)"
|
||||||
|
[ "$usr" ] || { echo "ERROR invalid username/uid $uid"; exit 1; }
|
||||||
|
uid="$(getent passwd $uid | cut -d: -f3)"
|
||||||
|
|
||||||
|
grp="$(getent group $gid | cut -d: -f1)"
|
||||||
|
[ "$grp" ] || { echo "ERROR invalid groupname/gid $gid"; exit 1; }
|
||||||
|
gid="$(getent group $gid | cut -d: -f3)"
|
||||||
|
|
||||||
# debug/vis
|
# debug/vis
|
||||||
echo
|
echo
|
||||||
echo "chroot-dir = $jail"
|
echo "chroot-dir = $jail"
|
||||||
echo "user:group = $uid:$gid"
|
echo "user:group = $uid:$gid ($usr:$grp)"
|
||||||
echo " copyparty = $cpp"
|
echo " copyparty = $cpp"
|
||||||
echo
|
echo
|
||||||
printf '\033[33m%s\033[0m\n' "copyparty can access these folders and all their subdirectories:"
|
printf '\033[33m%s\033[0m\n' "copyparty can access these folders and all their subdirectories:"
|
||||||
@@ -80,34 +97,39 @@ jail="${jail%/}"
|
|||||||
|
|
||||||
|
|
||||||
# bind-mount system directories and volumes
|
# bind-mount system directories and volumes
|
||||||
|
for a in {1..30}; do mkdir "$jail/.prisonlock" && break; sleep 0.1; done
|
||||||
printf '%s\n' "${sysdirs[@]}" "${vols[@]}" | sed -r 's`/$``' | LC_ALL=C sort | uniq |
|
printf '%s\n' "${sysdirs[@]}" "${vols[@]}" | sed -r 's`/$``' | LC_ALL=C sort | uniq |
|
||||||
while IFS= read -r v; do
|
while IFS= read -r v; do
|
||||||
[ -e "$v" ] || {
|
[ -e "$v" ] || {
|
||||||
printf '\033[1;31mfolder does not exist:\033[0m %s\n' "$v"
|
printf '\033[1;31mfolder does not exist:\033[0m %s\n' "$v"
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
i1=$(stat -c%D.%i "$v" 2>/dev/null || echo a)
|
i1=$(stat -c%D.%i "$v/" 2>/dev/null || echo a)
|
||||||
i2=$(stat -c%D.%i "$jail$v" 2>/dev/null || echo b)
|
i2=$(stat -c%D.%i "$jail$v/" 2>/dev/null || echo b)
|
||||||
# echo "v [$v] i1 [$i1] i2 [$i2]"
|
|
||||||
[ $i1 = $i2 ] && continue
|
[ $i1 = $i2 ] && continue
|
||||||
|
mount | grep -qF " $jail$v " && echo wtf $i1 $i2 $v && continue
|
||||||
mkdir -p "$jail$v"
|
mkdir -p "$jail$v"
|
||||||
mount --bind "$v" "$jail$v"
|
mount --bind "$v" "$jail$v"
|
||||||
done
|
done
|
||||||
|
rmdir "$jail/.prisonlock" || true
|
||||||
|
|
||||||
|
|
||||||
cln() {
|
cln() {
|
||||||
rv=$?
|
trap - EXIT
|
||||||
wait -f -p rv $p || true
|
wait -f -n $p && rv=0 || rv=$?
|
||||||
cd /
|
cd /
|
||||||
echo "stopping chroot..."
|
echo "stopping chroot..."
|
||||||
lsof "$jail" | grep -F "$jail" &&
|
for a in {1..30}; do mkdir "$jail/.prisonlock" && break; sleep 0.1; done
|
||||||
|
lsof "$jail" 2>/dev/null | grep -F "$jail" &&
|
||||||
echo "chroot is in use; will not unmount" ||
|
echo "chroot is in use; will not unmount" ||
|
||||||
{
|
{
|
||||||
mount | grep -F " on $jail" |
|
mount | grep -F " on $jail" |
|
||||||
awk '{sub(/ type .*/,"");sub(/.* on /,"");print}' |
|
awk '{sub(/ type .*/,"");sub(/.* on /,"");print}' |
|
||||||
LC_ALL=C sort -r | tee /dev/stderr | tr '\n' '\0' | xargs -r0 umount
|
LC_ALL=C sort -r | while IFS= read -r v; do
|
||||||
|
umount "$v" && echo "umount OK: $v"
|
||||||
|
done
|
||||||
}
|
}
|
||||||
|
rmdir "$jail/.prisonlock" || true
|
||||||
exit $rv
|
exit $rv
|
||||||
}
|
}
|
||||||
trap cln EXIT
|
trap cln EXIT
|
||||||
@@ -128,8 +150,8 @@ chmod 777 "$jail/tmp"
|
|||||||
|
|
||||||
|
|
||||||
# run copyparty
|
# run copyparty
|
||||||
export HOME=$(getent passwd $uid | cut -d: -f6)
|
export HOME="$(getent passwd $uid | cut -d: -f6)"
|
||||||
export USER=$(getent passwd $uid | cut -d: -f1)
|
export USER="$usr"
|
||||||
export LOGNAME="$USER"
|
export LOGNAME="$USER"
|
||||||
#echo "pybin [$pybin]"
|
#echo "pybin [$pybin]"
|
||||||
#echo "pyarg [$pyarg]"
|
#echo "pyarg [$pyarg]"
|
||||||
@@ -137,5 +159,5 @@ export LOGNAME="$USER"
|
|||||||
chroot --userspec=$uid:$gid "$jail" "$pybin" $pyarg "$cpp" "$@" &
|
chroot --userspec=$uid:$gid "$jail" "$pybin" $pyarg "$cpp" "$@" &
|
||||||
p=$!
|
p=$!
|
||||||
trap 'kill -USR1 $p' USR1
|
trap 'kill -USR1 $p' USR1
|
||||||
trap 'kill $p' INT TERM
|
trap 'trap - INT TERM; kill $p' INT TERM
|
||||||
wait
|
wait
|
||||||
|
|||||||
279
bin/u2c.py
279
bin/u2c.py
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
S_VERSION = "1.10"
|
S_VERSION = "1.20"
|
||||||
S_BUILD_DT = "2023-08-15"
|
S_BUILD_DT = "2024-07-22"
|
||||||
|
|
||||||
"""
|
"""
|
||||||
u2c.py: upload to copyparty
|
u2c.py: upload to copyparty
|
||||||
@@ -20,6 +20,7 @@ import sys
|
|||||||
import stat
|
import stat
|
||||||
import math
|
import math
|
||||||
import time
|
import time
|
||||||
|
import json
|
||||||
import atexit
|
import atexit
|
||||||
import signal
|
import signal
|
||||||
import socket
|
import socket
|
||||||
@@ -29,7 +30,7 @@ import platform
|
|||||||
import threading
|
import threading
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
EXE = sys.executable.endswith("exe")
|
EXE = bool(getattr(sys, "frozen", False))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import argparse
|
import argparse
|
||||||
@@ -80,11 +81,20 @@ req_ses = requests.Session()
|
|||||||
|
|
||||||
class Daemon(threading.Thread):
|
class Daemon(threading.Thread):
|
||||||
def __init__(self, target, name=None, a=None):
|
def __init__(self, target, name=None, a=None):
|
||||||
# type: (Any, Any, Any) -> None
|
threading.Thread.__init__(self, name=name)
|
||||||
threading.Thread.__init__(self, target=target, args=a or (), name=name)
|
self.a = a or ()
|
||||||
|
self.fun = target
|
||||||
self.daemon = True
|
self.daemon = True
|
||||||
self.start()
|
self.start()
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
try:
|
||||||
|
signal.pthread_sigmask(signal.SIG_BLOCK, [signal.SIGINT, signal.SIGTERM])
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
self.fun(*self.a)
|
||||||
|
|
||||||
|
|
||||||
class File(object):
|
class File(object):
|
||||||
"""an up2k upload task; represents a single file"""
|
"""an up2k upload task; represents a single file"""
|
||||||
@@ -101,16 +111,22 @@ class File(object):
|
|||||||
# set by get_hashlist
|
# set by get_hashlist
|
||||||
self.cids = [] # type: list[tuple[str, int, int]] # [ hash, ofs, sz ]
|
self.cids = [] # type: list[tuple[str, int, int]] # [ hash, ofs, sz ]
|
||||||
self.kchunks = {} # type: dict[str, tuple[int, int]] # hash: [ ofs, sz ]
|
self.kchunks = {} # type: dict[str, tuple[int, int]] # hash: [ ofs, sz ]
|
||||||
|
self.t_hash = 0.0 # type: float
|
||||||
|
|
||||||
# set by handshake
|
# set by handshake
|
||||||
self.recheck = False # duplicate; redo handshake after all files done
|
self.recheck = False # duplicate; redo handshake after all files done
|
||||||
self.ucids = [] # type: list[str] # chunks which need to be uploaded
|
self.ucids = [] # type: list[str] # chunks which need to be uploaded
|
||||||
self.wark = None # type: str
|
self.wark = "" # type: str
|
||||||
self.url = None # type: str
|
self.url = "" # type: str
|
||||||
|
self.nhs = 0 # type: int
|
||||||
|
|
||||||
# set by upload
|
# set by upload
|
||||||
|
self.t0_up = 0.0 # type: float
|
||||||
|
self.t1_up = 0.0 # type: float
|
||||||
|
self.nojoin = 0 # type: int
|
||||||
self.up_b = 0 # type: int
|
self.up_b = 0 # type: int
|
||||||
self.up_c = 0 # type: int
|
self.up_c = 0 # type: int
|
||||||
|
self.cd = 0 # type: int
|
||||||
|
|
||||||
# t = "size({}) lmod({}) top({}) rel({}) abs({}) name({})\n"
|
# t = "size({}) lmod({}) top({}) rel({}) abs({}) name({})\n"
|
||||||
# eprint(t.format(self.size, self.lmod, self.top, self.rel, self.abs, self.name))
|
# eprint(t.format(self.size, self.lmod, self.top, self.rel, self.abs, self.name))
|
||||||
@@ -119,10 +135,20 @@ class File(object):
|
|||||||
class FileSlice(object):
|
class FileSlice(object):
|
||||||
"""file-like object providing a fixed window into a file"""
|
"""file-like object providing a fixed window into a file"""
|
||||||
|
|
||||||
def __init__(self, file, cid):
|
def __init__(self, file, cids):
|
||||||
# type: (File, str) -> None
|
# type: (File, str) -> None
|
||||||
|
|
||||||
self.car, self.len = file.kchunks[cid]
|
self.file = file
|
||||||
|
self.cids = cids
|
||||||
|
|
||||||
|
self.car, tlen = file.kchunks[cids[0]]
|
||||||
|
for cid in cids[1:]:
|
||||||
|
ofs, clen = file.kchunks[cid]
|
||||||
|
if ofs != self.car + tlen:
|
||||||
|
raise Exception(9)
|
||||||
|
tlen += clen
|
||||||
|
|
||||||
|
self.len = tlen
|
||||||
self.cdr = self.car + self.len
|
self.cdr = self.car + self.len
|
||||||
self.ofs = 0 # type: int
|
self.ofs = 0 # type: int
|
||||||
self.f = open(file.abs, "rb", 512 * 1024)
|
self.f = open(file.abs, "rb", 512 * 1024)
|
||||||
@@ -221,6 +247,7 @@ class MTHash(object):
|
|||||||
|
|
||||||
def hash_at(self, nch):
|
def hash_at(self, nch):
|
||||||
f = self.f
|
f = self.f
|
||||||
|
assert f
|
||||||
ofs = ofs0 = nch * self.csz
|
ofs = ofs0 = nch * self.csz
|
||||||
hashobj = hashlib.sha512()
|
hashobj = hashlib.sha512()
|
||||||
chunk_sz = chunk_rem = min(self.csz, self.sz - ofs)
|
chunk_sz = chunk_rem = min(self.csz, self.sz - ofs)
|
||||||
@@ -345,7 +372,7 @@ def undns(url):
|
|||||||
usp = urlsplit(url)
|
usp = urlsplit(url)
|
||||||
hn = usp.hostname
|
hn = usp.hostname
|
||||||
gai = None
|
gai = None
|
||||||
eprint("resolving host [{0}] ...".format(hn), end="")
|
eprint("resolving host [%s] ..." % (hn,))
|
||||||
try:
|
try:
|
||||||
gai = socket.getaddrinfo(hn, None)
|
gai = socket.getaddrinfo(hn, None)
|
||||||
hn = gai[0][4][0]
|
hn = gai[0][4][0]
|
||||||
@@ -363,7 +390,7 @@ def undns(url):
|
|||||||
|
|
||||||
usp = usp._replace(netloc=hn)
|
usp = usp._replace(netloc=hn)
|
||||||
url = urlunsplit(usp)
|
url = urlunsplit(usp)
|
||||||
eprint(" {0}".format(url))
|
eprint(" %s\n" % (url,))
|
||||||
return url
|
return url
|
||||||
|
|
||||||
|
|
||||||
@@ -433,7 +460,7 @@ def walkdirs(err, tops, excl):
|
|||||||
za = [x.replace(b"/", b"\\") for x in za]
|
za = [x.replace(b"/", b"\\") for x in za]
|
||||||
tops = za
|
tops = za
|
||||||
|
|
||||||
ptn = re.compile(excl.encode("utf-8") or b"\n")
|
ptn = re.compile(excl.encode("utf-8") or b"\n", re.I)
|
||||||
|
|
||||||
for top in tops:
|
for top in tops:
|
||||||
isdir = os.path.isdir(top)
|
isdir = os.path.isdir(top)
|
||||||
@@ -461,7 +488,7 @@ def quotep(btxt):
|
|||||||
if not PY2:
|
if not PY2:
|
||||||
quot1 = quot1.encode("ascii")
|
quot1 = quot1.encode("ascii")
|
||||||
|
|
||||||
return quot1.replace(b" ", b"+")
|
return quot1.replace(b" ", b"+") # type: ignore
|
||||||
|
|
||||||
|
|
||||||
# from copyparty/util.py
|
# from copyparty/util.py
|
||||||
@@ -498,7 +525,7 @@ def up2k_chunksize(filesize):
|
|||||||
|
|
||||||
# mostly from copyparty/up2k.py
|
# mostly from copyparty/up2k.py
|
||||||
def get_hashlist(file, pcb, mth):
|
def get_hashlist(file, pcb, mth):
|
||||||
# type: (File, any, any) -> None
|
# type: (File, Any, Any) -> None
|
||||||
"""generates the up2k hashlist from file contents, inserts it into `file`"""
|
"""generates the up2k hashlist from file contents, inserts it into `file`"""
|
||||||
|
|
||||||
chunk_sz = up2k_chunksize(file.size)
|
chunk_sz = up2k_chunksize(file.size)
|
||||||
@@ -506,6 +533,8 @@ def get_hashlist(file, pcb, mth):
|
|||||||
file_ofs = 0
|
file_ofs = 0
|
||||||
ret = []
|
ret = []
|
||||||
with open(file.abs, "rb", 512 * 1024) as f:
|
with open(file.abs, "rb", 512 * 1024) as f:
|
||||||
|
t0 = time.time()
|
||||||
|
|
||||||
if mth and file.size >= 1024 * 512:
|
if mth and file.size >= 1024 * 512:
|
||||||
ret = mth.hash(f, file.size, chunk_sz, pcb, file)
|
ret = mth.hash(f, file.size, chunk_sz, pcb, file)
|
||||||
file_rem = 0
|
file_rem = 0
|
||||||
@@ -532,10 +561,12 @@ def get_hashlist(file, pcb, mth):
|
|||||||
if pcb:
|
if pcb:
|
||||||
pcb(file, file_ofs)
|
pcb(file, file_ofs)
|
||||||
|
|
||||||
|
file.t_hash = time.time() - t0
|
||||||
file.cids = ret
|
file.cids = ret
|
||||||
file.kchunks = {}
|
file.kchunks = {}
|
||||||
for k, v1, v2 in ret:
|
for k, v1, v2 in ret:
|
||||||
file.kchunks[k] = [v1, v2]
|
if k not in file.kchunks:
|
||||||
|
file.kchunks[k] = [v1, v2]
|
||||||
|
|
||||||
|
|
||||||
def handshake(ar, file, search):
|
def handshake(ar, file, search):
|
||||||
@@ -557,8 +588,11 @@ def handshake(ar, file, search):
|
|||||||
}
|
}
|
||||||
if search:
|
if search:
|
||||||
req["srch"] = 1
|
req["srch"] = 1
|
||||||
elif ar.dr:
|
else:
|
||||||
req["replace"] = True
|
if ar.touch:
|
||||||
|
req["umod"] = True
|
||||||
|
if ar.ow:
|
||||||
|
req["replace"] = True
|
||||||
|
|
||||||
headers = {"Content-Type": "text/plain"} # <=1.5.1 compat
|
headers = {"Content-Type": "text/plain"} # <=1.5.1 compat
|
||||||
if pw:
|
if pw:
|
||||||
@@ -574,7 +608,8 @@ def handshake(ar, file, search):
|
|||||||
sc = 600
|
sc = 600
|
||||||
txt = ""
|
txt = ""
|
||||||
try:
|
try:
|
||||||
r = req_ses.post(url, headers=headers, json=req)
|
zs = json.dumps(req, separators=(",\n", ": "))
|
||||||
|
r = req_ses.post(url, headers=headers, data=zs)
|
||||||
sc = r.status_code
|
sc = r.status_code
|
||||||
txt = r.text
|
txt = r.text
|
||||||
if sc < 400:
|
if sc < 400:
|
||||||
@@ -598,7 +633,7 @@ def handshake(ar, file, search):
|
|||||||
raise
|
raise
|
||||||
|
|
||||||
eprint("handshake failed, retrying: {0}\n {1}\n\n".format(file.name, em))
|
eprint("handshake failed, retrying: {0}\n {1}\n\n".format(file.name, em))
|
||||||
time.sleep(1)
|
time.sleep(ar.cd)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
r = r.json()
|
r = r.json()
|
||||||
@@ -621,13 +656,13 @@ def handshake(ar, file, search):
|
|||||||
return r["hash"], r["sprs"]
|
return r["hash"], r["sprs"]
|
||||||
|
|
||||||
|
|
||||||
def upload(file, cid, pw, stats):
|
def upload(fsl, pw, stats):
|
||||||
# type: (File, str, str, str) -> None
|
# type: (FileSlice, str, str) -> None
|
||||||
"""upload one specific chunk, `cid` (a chunk-hash)"""
|
"""upload a range of file data, defined by one or more `cid` (chunk-hash)"""
|
||||||
|
|
||||||
headers = {
|
headers = {
|
||||||
"X-Up2k-Hash": cid,
|
"X-Up2k-Hash": ",".join(fsl.cids),
|
||||||
"X-Up2k-Wark": file.wark,
|
"X-Up2k-Wark": fsl.file.wark,
|
||||||
"Content-Type": "application/octet-stream",
|
"Content-Type": "application/octet-stream",
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -637,15 +672,24 @@ def upload(file, cid, pw, stats):
|
|||||||
if pw:
|
if pw:
|
||||||
headers["Cookie"] = "=".join(["cppwd", pw])
|
headers["Cookie"] = "=".join(["cppwd", pw])
|
||||||
|
|
||||||
f = FileSlice(file, cid)
|
|
||||||
try:
|
try:
|
||||||
r = req_ses.post(file.url, headers=headers, data=f)
|
r = req_ses.post(fsl.file.url, headers=headers, data=fsl)
|
||||||
|
|
||||||
|
if r.status_code == 400:
|
||||||
|
txt = r.text
|
||||||
|
if (
|
||||||
|
"already being written" in txt
|
||||||
|
or "already got that" in txt
|
||||||
|
or "only sibling chunks" in txt
|
||||||
|
):
|
||||||
|
fsl.file.nojoin = 1
|
||||||
|
|
||||||
if not r:
|
if not r:
|
||||||
raise Exception(repr(r))
|
raise Exception(repr(r))
|
||||||
|
|
||||||
_ = r.content
|
_ = r.content
|
||||||
finally:
|
finally:
|
||||||
f.f.close()
|
fsl.f.close()
|
||||||
|
|
||||||
|
|
||||||
class Ctl(object):
|
class Ctl(object):
|
||||||
@@ -689,6 +733,7 @@ class Ctl(object):
|
|||||||
|
|
||||||
def __init__(self, ar, stats=None):
|
def __init__(self, ar, stats=None):
|
||||||
self.ok = False
|
self.ok = False
|
||||||
|
self.errs = 0
|
||||||
self.ar = ar
|
self.ar = ar
|
||||||
self.stats = stats or self._scan()
|
self.stats = stats or self._scan()
|
||||||
if not self.stats:
|
if not self.stats:
|
||||||
@@ -708,6 +753,8 @@ class Ctl(object):
|
|||||||
if ar.safe:
|
if ar.safe:
|
||||||
self._safe()
|
self._safe()
|
||||||
else:
|
else:
|
||||||
|
self.at_hash = 0.0
|
||||||
|
self.at_up = 0.0
|
||||||
self.hash_f = 0
|
self.hash_f = 0
|
||||||
self.hash_c = 0
|
self.hash_c = 0
|
||||||
self.hash_b = 0
|
self.hash_b = 0
|
||||||
@@ -727,7 +774,7 @@ class Ctl(object):
|
|||||||
|
|
||||||
self.mutex = threading.Lock()
|
self.mutex = threading.Lock()
|
||||||
self.q_handshake = Queue() # type: Queue[File]
|
self.q_handshake = Queue() # type: Queue[File]
|
||||||
self.q_upload = Queue() # type: Queue[tuple[File, str]]
|
self.q_upload = Queue() # type: Queue[FileSlice]
|
||||||
|
|
||||||
self.st_hash = [None, "(idle, starting...)"] # type: tuple[File, int]
|
self.st_hash = [None, "(idle, starting...)"] # type: tuple[File, int]
|
||||||
self.st_up = [None, "(idle, starting...)"] # type: tuple[File, int]
|
self.st_up = [None, "(idle, starting...)"] # type: tuple[File, int]
|
||||||
@@ -736,7 +783,7 @@ class Ctl(object):
|
|||||||
|
|
||||||
self._fancy()
|
self._fancy()
|
||||||
|
|
||||||
self.ok = True
|
self.ok = not self.errs
|
||||||
|
|
||||||
def _safe(self):
|
def _safe(self):
|
||||||
"""minimal basic slow boring fallback codepath"""
|
"""minimal basic slow boring fallback codepath"""
|
||||||
@@ -772,7 +819,8 @@ class Ctl(object):
|
|||||||
for nc, cid in enumerate(hs):
|
for nc, cid in enumerate(hs):
|
||||||
print(" {0} up {1}".format(ncs - nc, cid))
|
print(" {0} up {1}".format(ncs - nc, cid))
|
||||||
stats = "{0}/0/0/{1}".format(nf, self.nfiles - nf)
|
stats = "{0}/0/0/{1}".format(nf, self.nfiles - nf)
|
||||||
upload(file, cid, self.ar.a, stats)
|
fslice = FileSlice(file, [cid])
|
||||||
|
upload(fslice, self.ar.a, stats)
|
||||||
|
|
||||||
print(" ok!")
|
print(" ok!")
|
||||||
if file.recheck:
|
if file.recheck:
|
||||||
@@ -781,7 +829,7 @@ class Ctl(object):
|
|||||||
if not self.recheck:
|
if not self.recheck:
|
||||||
return
|
return
|
||||||
|
|
||||||
eprint("finalizing {0} duplicate files".format(len(self.recheck)))
|
eprint("finalizing %d duplicate files\n" % (len(self.recheck),))
|
||||||
for file in self.recheck:
|
for file in self.recheck:
|
||||||
handshake(self.ar, file, search)
|
handshake(self.ar, file, search)
|
||||||
|
|
||||||
@@ -839,12 +887,12 @@ class Ctl(object):
|
|||||||
txt = " "
|
txt = " "
|
||||||
|
|
||||||
if not self.up_br:
|
if not self.up_br:
|
||||||
spd = self.hash_b / (time.time() - self.t0)
|
spd = self.hash_b / ((time.time() - self.t0) or 1)
|
||||||
eta = (self.nbytes - self.hash_b) / (spd + 1)
|
eta = (self.nbytes - self.hash_b) / (spd or 1)
|
||||||
else:
|
else:
|
||||||
spd = self.up_br / (time.time() - self.t0_up)
|
spd = self.up_br / ((time.time() - self.t0_up) or 1)
|
||||||
spd = self.spd = (self.spd or spd) * 0.9 + spd * 0.1
|
spd = self.spd = (self.spd or spd) * 0.9 + spd * 0.1
|
||||||
eta = (self.nbytes - self.up_b) / (spd + 1)
|
eta = (self.nbytes - self.up_b) / (spd or 1)
|
||||||
|
|
||||||
spd = humansize(spd)
|
spd = humansize(spd)
|
||||||
self.eta = str(datetime.timedelta(seconds=int(eta)))
|
self.eta = str(datetime.timedelta(seconds=int(eta)))
|
||||||
@@ -855,10 +903,16 @@ class Ctl(object):
|
|||||||
t = "{0} eta @ {1}/s, {2}, {3}# left".format(self.eta, spd, sleft, nleft)
|
t = "{0} eta @ {1}/s, {2}, {3}# left".format(self.eta, spd, sleft, nleft)
|
||||||
eprint(txt + "\033]0;{0}\033\\\r{0}{1}".format(t, tail))
|
eprint(txt + "\033]0;{0}\033\\\r{0}{1}".format(t, tail))
|
||||||
|
|
||||||
|
spd = humansize(self.hash_b / self.at_hash)
|
||||||
|
eprint("\nhasher: %.2f sec, %s/s\n" % (self.at_hash, spd))
|
||||||
|
if self.up_b and self.at_up:
|
||||||
|
spd = humansize(self.up_b / self.at_up)
|
||||||
|
eprint("upload: %.2f sec, %s/s\n" % (self.at_up, spd))
|
||||||
|
|
||||||
if not self.recheck:
|
if not self.recheck:
|
||||||
return
|
return
|
||||||
|
|
||||||
eprint("finalizing {0} duplicate files".format(len(self.recheck)))
|
eprint("finalizing %d duplicate files\n" % (len(self.recheck),))
|
||||||
for file in self.recheck:
|
for file in self.recheck:
|
||||||
handshake(self.ar, file, False)
|
handshake(self.ar, file, False)
|
||||||
|
|
||||||
@@ -870,6 +924,8 @@ class Ctl(object):
|
|||||||
self.st_hash = [file, ofs]
|
self.st_hash = [file, ofs]
|
||||||
|
|
||||||
def hasher(self):
|
def hasher(self):
|
||||||
|
ptn = re.compile(self.ar.x.encode("utf-8"), re.I) if self.ar.x else None
|
||||||
|
sep = "{0}".format(os.sep).encode("ascii")
|
||||||
prd = None
|
prd = None
|
||||||
ls = {}
|
ls = {}
|
||||||
for top, rel, inf in self.filegen:
|
for top, rel, inf in self.filegen:
|
||||||
@@ -902,13 +958,29 @@ class Ctl(object):
|
|||||||
if self.ar.drd:
|
if self.ar.drd:
|
||||||
dp = os.path.join(top, rd)
|
dp = os.path.join(top, rd)
|
||||||
lnodes = set(os.listdir(dp))
|
lnodes = set(os.listdir(dp))
|
||||||
bnames = [x for x in ls if x not in lnodes]
|
if ptn:
|
||||||
if bnames:
|
zs = dp.replace(sep, b"/").rstrip(b"/") + b"/"
|
||||||
vpath = self.ar.url.split("://")[-1].split("/", 1)[-1]
|
zls = [zs + x for x in lnodes]
|
||||||
names = [x.decode("utf-8", "replace") for x in bnames]
|
zls = [x for x in zls if not ptn.match(x)]
|
||||||
locs = [vpath + srd + "/" + x for x in names]
|
lnodes = [x.split(b"/")[-1] for x in zls]
|
||||||
print("DELETING ~{0}/#{1}".format(srd, len(names)))
|
bnames = [x for x in ls if x not in lnodes and x != b".hist"]
|
||||||
req_ses.post(self.ar.url + "?delete", json=locs)
|
vpath = self.ar.url.split("://")[-1].split("/", 1)[-1]
|
||||||
|
names = [x.decode("utf-8", "replace") for x in bnames]
|
||||||
|
locs = [vpath + srd + "/" + x for x in names]
|
||||||
|
while locs:
|
||||||
|
req = locs
|
||||||
|
while req:
|
||||||
|
print("DELETING ~%s/#%s" % (srd, len(req)))
|
||||||
|
r = req_ses.post(self.ar.url + "?delete", json=req)
|
||||||
|
if r.status_code == 413 and "json 2big" in r.text:
|
||||||
|
print(" (delete request too big; slicing...)")
|
||||||
|
req = req[: len(req) // 2]
|
||||||
|
continue
|
||||||
|
elif not r:
|
||||||
|
t = "delete request failed: %r %s"
|
||||||
|
raise Exception(t % (r, r.text))
|
||||||
|
break
|
||||||
|
locs = locs[len(req) :]
|
||||||
|
|
||||||
if isdir:
|
if isdir:
|
||||||
continue
|
continue
|
||||||
@@ -961,13 +1033,22 @@ class Ctl(object):
|
|||||||
self.q_upload.put(None)
|
self.q_upload.put(None)
|
||||||
break
|
break
|
||||||
|
|
||||||
with self.mutex:
|
|
||||||
self.handshaker_busy += 1
|
|
||||||
|
|
||||||
upath = file.abs.decode("utf-8", "replace")
|
upath = file.abs.decode("utf-8", "replace")
|
||||||
if not VT100:
|
if not VT100:
|
||||||
upath = upath.lstrip("\\?")
|
upath = upath.lstrip("\\?")
|
||||||
|
|
||||||
|
file.nhs += 1
|
||||||
|
if file.nhs > 32:
|
||||||
|
print("ERROR: giving up on file %s" % (upath))
|
||||||
|
self.errs += 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
with self.mutex:
|
||||||
|
self.handshaker_busy += 1
|
||||||
|
|
||||||
|
while time.time() < file.cd:
|
||||||
|
time.sleep(0.1)
|
||||||
|
|
||||||
hs, sprs = handshake(self.ar, file, search)
|
hs, sprs = handshake(self.ar, file, search)
|
||||||
if search:
|
if search:
|
||||||
if hs:
|
if hs:
|
||||||
@@ -1017,48 +1098,87 @@ class Ctl(object):
|
|||||||
self.handshaker_busy -= 1
|
self.handshaker_busy -= 1
|
||||||
|
|
||||||
if not hs:
|
if not hs:
|
||||||
kw = "uploaded" if file.up_b else " found"
|
self.at_hash += file.t_hash
|
||||||
print("{0} {1}".format(kw, upath))
|
if file.up_b:
|
||||||
for cid in hs:
|
t_up = file.t1_up - file.t0_up
|
||||||
self.q_upload.put([file, cid])
|
self.at_up += t_up
|
||||||
|
|
||||||
|
if self.ar.spd:
|
||||||
|
if VT100:
|
||||||
|
c1 = "\033[36m"
|
||||||
|
c2 = "\033[0m"
|
||||||
|
else:
|
||||||
|
c1 = c2 = ""
|
||||||
|
|
||||||
|
spd_h = humansize(file.size / file.t_hash, True)
|
||||||
|
if file.up_b:
|
||||||
|
spd_u = humansize(file.size / t_up, True)
|
||||||
|
|
||||||
|
t = "uploaded %s %s(h:%.2fs,%s/s,up:%.2fs,%s/s)%s"
|
||||||
|
print(t % (upath, c1, file.t_hash, spd_h, t_up, spd_u, c2))
|
||||||
|
else:
|
||||||
|
t = " found %s %s(%.2fs,%s/s)%s"
|
||||||
|
print(t % (upath, c1, file.t_hash, spd_h, c2))
|
||||||
|
else:
|
||||||
|
kw = "uploaded" if file.up_b else " found"
|
||||||
|
print("{0} {1}".format(kw, upath))
|
||||||
|
|
||||||
|
cs = hs[:]
|
||||||
|
while cs:
|
||||||
|
fsl = FileSlice(file, cs[:1])
|
||||||
|
try:
|
||||||
|
if file.nojoin:
|
||||||
|
raise Exception()
|
||||||
|
for n in range(2, min(len(cs), self.ar.sz) + 1):
|
||||||
|
fsl = FileSlice(file, cs[:n])
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
cs = cs[len(fsl.cids) :]
|
||||||
|
self.q_upload.put(fsl)
|
||||||
|
|
||||||
def uploader(self):
|
def uploader(self):
|
||||||
while True:
|
while True:
|
||||||
task = self.q_upload.get()
|
fsl = self.q_upload.get()
|
||||||
if not task:
|
if not fsl:
|
||||||
self.st_up = [None, "(finished)"]
|
self.st_up = [None, "(finished)"]
|
||||||
break
|
break
|
||||||
|
|
||||||
|
file = fsl.file
|
||||||
|
cids = fsl.cids
|
||||||
|
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
self.uploader_busy += 1
|
self.uploader_busy += 1
|
||||||
self.t0_up = self.t0_up or time.time()
|
if not file.t0_up:
|
||||||
|
file.t0_up = time.time()
|
||||||
|
if not self.t0_up:
|
||||||
|
self.t0_up = file.t0_up
|
||||||
|
|
||||||
zs = "{0}/{1}/{2}/{3} {4}/{5} {6}"
|
stats = "%d/%d/%d/%d %d/%d %s" % (
|
||||||
stats = zs.format(
|
|
||||||
self.up_f,
|
self.up_f,
|
||||||
len(self.recheck),
|
len(self.recheck),
|
||||||
self.uploader_busy,
|
self.uploader_busy,
|
||||||
self.nfiles - self.up_f,
|
self.nfiles - self.up_f,
|
||||||
int(self.nbytes / (1024 * 1024)),
|
self.nbytes // (1024 * 1024),
|
||||||
int((self.nbytes - self.up_b) / (1024 * 1024)),
|
(self.nbytes - self.up_b) // (1024 * 1024),
|
||||||
self.eta,
|
self.eta,
|
||||||
)
|
)
|
||||||
|
|
||||||
file, cid = task
|
|
||||||
try:
|
try:
|
||||||
upload(file, cid, self.ar.a, stats)
|
upload(fsl, self.ar.a, stats)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
t = "upload failed, retrying: {0} #{1} ({2})\n"
|
t = "upload failed, retrying: %s #%s+%d (%s)\n"
|
||||||
eprint(t.format(file.name, cid[:8], ex))
|
eprint(t % (file.name, cids[0][:8], len(cids) - 1, ex))
|
||||||
|
file.cd = time.time() + self.ar.cd
|
||||||
# handshake will fix it
|
# handshake will fix it
|
||||||
|
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
sz = file.kchunks[cid][1]
|
sz = fsl.len
|
||||||
file.ucids = [x for x in file.ucids if x != cid]
|
file.ucids = [x for x in file.ucids if x not in cids]
|
||||||
if not file.ucids:
|
if not file.ucids:
|
||||||
|
file.t1_up = time.time()
|
||||||
self.q_handshake.put(file)
|
self.q_handshake.put(file)
|
||||||
|
|
||||||
self.st_up = [file, cid]
|
self.st_up = [file, cids[0]]
|
||||||
file.up_b += sz
|
file.up_b += sz
|
||||||
self.up_b += sz
|
self.up_b += sz
|
||||||
self.up_br += sz
|
self.up_br += sz
|
||||||
@@ -1101,10 +1221,13 @@ source file/folder selection uses rsync syntax, meaning that:
|
|||||||
ap.add_argument("url", type=unicode, help="server url, including destination folder")
|
ap.add_argument("url", type=unicode, help="server url, including destination folder")
|
||||||
ap.add_argument("files", type=unicode, nargs="+", help="files and/or folders to process")
|
ap.add_argument("files", type=unicode, nargs="+", help="files and/or folders to process")
|
||||||
ap.add_argument("-v", action="store_true", help="verbose")
|
ap.add_argument("-v", action="store_true", help="verbose")
|
||||||
ap.add_argument("-a", metavar="PASSWORD", help="password or $filepath")
|
ap.add_argument("-a", metavar="PASSWD", help="password or $filepath")
|
||||||
ap.add_argument("-s", action="store_true", help="file-search (disables upload)")
|
ap.add_argument("-s", action="store_true", help="file-search (disables upload)")
|
||||||
ap.add_argument("-x", type=unicode, metavar="REGEX", default="", help="skip file if filesystem-abspath matches REGEX, example: '.*/\.hist/.*'")
|
ap.add_argument("-x", type=unicode, metavar="REGEX", default="", help="skip file if filesystem-abspath matches REGEX, example: '.*/\\.hist/.*'")
|
||||||
ap.add_argument("--ok", action="store_true", help="continue even if some local files are inaccessible")
|
ap.add_argument("--ok", action="store_true", help="continue even if some local files are inaccessible")
|
||||||
|
ap.add_argument("--touch", action="store_true", help="if last-modified timestamps differ, push local to server (need write+delete perms)")
|
||||||
|
ap.add_argument("--ow", action="store_true", help="overwrite existing files instead of autorenaming")
|
||||||
|
ap.add_argument("--spd", action="store_true", help="print speeds for each file")
|
||||||
ap.add_argument("--version", action="store_true", help="show version and exit")
|
ap.add_argument("--version", action="store_true", help="show version and exit")
|
||||||
|
|
||||||
ap = app.add_argument_group("compatibility")
|
ap = app.add_argument_group("compatibility")
|
||||||
@@ -1113,19 +1236,21 @@ source file/folder selection uses rsync syntax, meaning that:
|
|||||||
|
|
||||||
ap = app.add_argument_group("folder sync")
|
ap = app.add_argument_group("folder sync")
|
||||||
ap.add_argument("--dl", action="store_true", help="delete local files after uploading")
|
ap.add_argument("--dl", action="store_true", help="delete local files after uploading")
|
||||||
ap.add_argument("--dr", action="store_true", help="delete remote files which don't exist locally")
|
ap.add_argument("--dr", action="store_true", help="delete remote files which don't exist locally (implies --ow)")
|
||||||
ap.add_argument("--drd", action="store_true", help="delete remote files during upload instead of afterwards; reduces peak disk space usage, but will reupload instead of detecting renames")
|
ap.add_argument("--drd", action="store_true", help="delete remote files during upload instead of afterwards; reduces peak disk space usage, but will reupload instead of detecting renames")
|
||||||
|
|
||||||
ap = app.add_argument_group("performance tweaks")
|
ap = app.add_argument_group("performance tweaks")
|
||||||
ap.add_argument("-j", type=int, metavar="THREADS", default=4, help="parallel connections")
|
ap.add_argument("-j", type=int, metavar="CONNS", default=2, help="parallel connections")
|
||||||
ap.add_argument("-J", type=int, metavar="THREADS", default=hcores, help="num cpu-cores to use for hashing; set 0 or 1 for single-core hashing")
|
ap.add_argument("-J", type=int, metavar="CORES", default=hcores, help="num cpu-cores to use for hashing; set 0 or 1 for single-core hashing")
|
||||||
|
ap.add_argument("--sz", type=int, metavar="MiB", default=64, help="try to make each POST this big")
|
||||||
ap.add_argument("-nh", action="store_true", help="disable hashing while uploading")
|
ap.add_argument("-nh", action="store_true", help="disable hashing while uploading")
|
||||||
ap.add_argument("-ns", action="store_true", help="no status panel (for slow consoles and macos)")
|
ap.add_argument("-ns", action="store_true", help="no status panel (for slow consoles and macos)")
|
||||||
|
ap.add_argument("--cd", type=float, metavar="SEC", default=5, help="delay before reattempting a failed handshake/upload")
|
||||||
ap.add_argument("--safe", action="store_true", help="use simple fallback approach")
|
ap.add_argument("--safe", action="store_true", help="use simple fallback approach")
|
||||||
ap.add_argument("-z", action="store_true", help="ZOOMIN' (skip uploading files if they exist at the destination with the ~same last-modified timestamp, so same as yolo / turbo with date-chk but even faster)")
|
ap.add_argument("-z", action="store_true", help="ZOOMIN' (skip uploading files if they exist at the destination with the ~same last-modified timestamp, so same as yolo / turbo with date-chk but even faster)")
|
||||||
|
|
||||||
ap = app.add_argument_group("tls")
|
ap = app.add_argument_group("tls")
|
||||||
ap.add_argument("-te", metavar="PEM_FILE", help="certificate to expect/verify")
|
ap.add_argument("-te", metavar="PATH", help="path to ca.pem or cert.pem to expect/verify")
|
||||||
ap.add_argument("-td", action="store_true", help="disable certificate check")
|
ap.add_argument("-td", action="store_true", help="disable certificate check")
|
||||||
# fmt: on
|
# fmt: on
|
||||||
|
|
||||||
@@ -1142,6 +1267,9 @@ source file/folder selection uses rsync syntax, meaning that:
|
|||||||
if ar.drd:
|
if ar.drd:
|
||||||
ar.dr = True
|
ar.dr = True
|
||||||
|
|
||||||
|
if ar.dr:
|
||||||
|
ar.ow = True
|
||||||
|
|
||||||
for k in "dl dr drd".split():
|
for k in "dl dr drd".split():
|
||||||
errs = []
|
errs = []
|
||||||
if ar.safe and getattr(ar, k):
|
if ar.safe and getattr(ar, k):
|
||||||
@@ -1160,6 +1288,14 @@ source file/folder selection uses rsync syntax, meaning that:
|
|||||||
if "://" not in ar.url:
|
if "://" not in ar.url:
|
||||||
ar.url = "http://" + ar.url
|
ar.url = "http://" + ar.url
|
||||||
|
|
||||||
|
if "https://" in ar.url.lower():
|
||||||
|
try:
|
||||||
|
import ssl, zipfile
|
||||||
|
except:
|
||||||
|
t = "ERROR: https is not available for some reason; please use http"
|
||||||
|
print("\n\n %s\n\n" % (t,))
|
||||||
|
raise
|
||||||
|
|
||||||
if ar.a and ar.a.startswith("$"):
|
if ar.a and ar.a.startswith("$"):
|
||||||
fn = ar.a[1:]
|
fn = ar.a[1:]
|
||||||
print("reading password from file [{0}]".format(fn))
|
print("reading password from file [{0}]".format(fn))
|
||||||
@@ -1187,6 +1323,9 @@ source file/folder selection uses rsync syntax, meaning that:
|
|||||||
ar.z = True
|
ar.z = True
|
||||||
ctl = Ctl(ar, ctl.stats)
|
ctl = Ctl(ar, ctl.stats)
|
||||||
|
|
||||||
|
if ctl.errs:
|
||||||
|
print("WARNING: %d errors" % (ctl.errs))
|
||||||
|
|
||||||
sys.exit(0 if ctl.ok else 1)
|
sys.exit(0 if ctl.ok else 1)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -66,7 +66,7 @@ def main():
|
|||||||
ofs = ln.find("{")
|
ofs = ln.find("{")
|
||||||
j = json.loads(ln[ofs:])
|
j = json.loads(ln[ofs:])
|
||||||
except:
|
except:
|
||||||
pass
|
continue
|
||||||
|
|
||||||
w = j["wark"]
|
w = j["wark"]
|
||||||
if db.execute("select w from up where w = ?", (w,)).fetchone():
|
if db.execute("select w from up where w = ?", (w,)).fetchone():
|
||||||
|
|||||||
@@ -16,11 +16,13 @@
|
|||||||
* sharex config file to upload screenshots and grab the URL
|
* sharex config file to upload screenshots and grab the URL
|
||||||
* `RequestURL`: full URL to the target folder
|
* `RequestURL`: full URL to the target folder
|
||||||
* `pw`: password (remove the `pw` line if anon-write)
|
* `pw`: password (remove the `pw` line if anon-write)
|
||||||
|
* the `act:bput` thing is optional since copyparty v1.9.29
|
||||||
|
* using an older sharex version, maybe sharex v12.1.1 for example? dw fam i got your back 👉😎👉 [`sharex12.sxcu`](sharex12.sxcu)
|
||||||
|
|
||||||
however if your copyparty is behind a reverse-proxy, you may want to use [`sharex-html.sxcu`](sharex-html.sxcu) instead:
|
### [`send-to-cpp.contextlet.json`](send-to-cpp.contextlet.json)
|
||||||
* `RequestURL`: full URL to the target folder
|
* browser integration, kind of? custom rightclick actions and stuff
|
||||||
* `URL`: full URL to the root folder (with trailing slash) followed by `$regex:1|1$`
|
* rightclick a pic and send it to copyparty straight from your browser
|
||||||
* `pw`: password (remove `Parameters` if anon-write)
|
* for the [contextlet](https://addons.mozilla.org/en-US/firefox/addon/contextlets/) firefox extension
|
||||||
|
|
||||||
### [`media-osd-bgone.ps1`](media-osd-bgone.ps1)
|
### [`media-osd-bgone.ps1`](media-osd-bgone.ps1)
|
||||||
* disables the [windows OSD popup](https://user-images.githubusercontent.com/241032/122821375-0e08df80-d2dd-11eb-9fd9-184e8aacf1d0.png) (the thing on the left) which appears every time you hit media hotkeys to adjust volume or change song while playing music with the copyparty web-ui, or most other audio players really
|
* disables the [windows OSD popup](https://user-images.githubusercontent.com/241032/122821375-0e08df80-d2dd-11eb-9fd9-184e8aacf1d0.png) (the thing on the left) which appears every time you hit media hotkeys to adjust volume or change song while playing music with the copyparty web-ui, or most other audio players really
|
||||||
|
|||||||
@@ -11,9 +11,17 @@
|
|||||||
# (5'000 requests per second, or 20gbps upload/download in parallel)
|
# (5'000 requests per second, or 20gbps upload/download in parallel)
|
||||||
#
|
#
|
||||||
# on fedora/rhel, remember to setsebool -P httpd_can_network_connect 1
|
# on fedora/rhel, remember to setsebool -P httpd_can_network_connect 1
|
||||||
|
#
|
||||||
|
# if you are behind cloudflare (or another protection service),
|
||||||
|
# remember to reject all connections which are not coming from your
|
||||||
|
# protection service -- for cloudflare in particular, you can
|
||||||
|
# generate the list of permitted IP ranges like so:
|
||||||
|
# (curl -s https://www.cloudflare.com/ips-v{4,6} | sed 's/^/allow /; s/$/;/'; echo; echo "deny all;") > /etc/nginx/cloudflare-only.conf
|
||||||
|
#
|
||||||
|
# and then enable it below by uncomenting the cloudflare-only.conf line
|
||||||
|
|
||||||
upstream cpp {
|
upstream cpp {
|
||||||
server 127.0.0.1:3923;
|
server 127.0.0.1:3923 fail_timeout=1s;
|
||||||
keepalive 1;
|
keepalive 1;
|
||||||
}
|
}
|
||||||
server {
|
server {
|
||||||
@@ -21,7 +29,10 @@ server {
|
|||||||
listen [::]:443 ssl;
|
listen [::]:443 ssl;
|
||||||
|
|
||||||
server_name fs.example.com;
|
server_name fs.example.com;
|
||||||
|
|
||||||
|
# uncomment the following line to reject non-cloudflare connections, ensuring client IPs cannot be spoofed:
|
||||||
|
#include /etc/nginx/cloudflare-only.conf;
|
||||||
|
|
||||||
location / {
|
location / {
|
||||||
proxy_pass http://cpp;
|
proxy_pass http://cpp;
|
||||||
proxy_redirect off;
|
proxy_redirect off;
|
||||||
|
|||||||
@@ -1,14 +1,15 @@
|
|||||||
# Maintainer: icxes <dev.null@need.moe>
|
# Maintainer: icxes <dev.null@need.moe>
|
||||||
pkgname=copyparty
|
pkgname=copyparty
|
||||||
pkgver="1.9.9"
|
pkgver="1.13.4"
|
||||||
pkgrel=1
|
pkgrel=1
|
||||||
pkgdesc="Portable file sharing hub"
|
pkgdesc="File server with accelerated resumable uploads, dedup, WebDAV, FTP, TFTP, zeroconf, media indexer, thumbnails++"
|
||||||
arch=("any")
|
arch=("any")
|
||||||
url="https://github.com/9001/${pkgname}"
|
url="https://github.com/9001/${pkgname}"
|
||||||
license=('MIT')
|
license=('MIT')
|
||||||
depends=("python" "lsof" "python-jinja")
|
depends=("python" "lsof" "python-jinja")
|
||||||
makedepends=("python-wheel" "python-setuptools" "python-build" "python-installer" "make" "pigz")
|
makedepends=("python-wheel" "python-setuptools" "python-build" "python-installer" "make" "pigz")
|
||||||
optdepends=("ffmpeg: thumbnails for videos, images (slower) and audio, music tags"
|
optdepends=("ffmpeg: thumbnails for videos, images (slower) and audio, music tags"
|
||||||
|
"cfssl: generate TLS certificates on startup (pointless when reverse-proxied)"
|
||||||
"python-mutagen: music tags (alternative)"
|
"python-mutagen: music tags (alternative)"
|
||||||
"python-pillow: thumbnails for images"
|
"python-pillow: thumbnails for images"
|
||||||
"python-pyvips: thumbnails for images (higher quality, faster, uses more ram)"
|
"python-pyvips: thumbnails for images (higher quality, faster, uses more ram)"
|
||||||
@@ -20,7 +21,7 @@ optdepends=("ffmpeg: thumbnails for videos, images (slower) and audio, music tag
|
|||||||
)
|
)
|
||||||
source=("https://github.com/9001/${pkgname}/releases/download/v${pkgver}/${pkgname}-${pkgver}.tar.gz")
|
source=("https://github.com/9001/${pkgname}/releases/download/v${pkgver}/${pkgname}-${pkgver}.tar.gz")
|
||||||
backup=("etc/${pkgname}.d/init" )
|
backup=("etc/${pkgname}.d/init" )
|
||||||
sha256sums=("756bb13d136dcd8629a6449a9fcb6c3949ae499bcfb2b8efdb48e5b12c5c1ca1")
|
sha256sums=("b7039feca555e8f08ee7bd980ff37190f26d9bf544fc88ca3dcbadc19a13912e")
|
||||||
|
|
||||||
build() {
|
build() {
|
||||||
cd "${srcdir}/${pkgname}-${pkgver}"
|
cd "${srcdir}/${pkgname}-${pkgver}"
|
||||||
|
|||||||
@@ -1,11 +1,11 @@
|
|||||||
# this will start `/usr/bin/copyparty-sfx.py`
|
# this will start `/usr/bin/copyparty-sfx.py`
|
||||||
# in a chroot, preventing accidental access elsewhere
|
# in a chroot, preventing accidental access elsewhere,
|
||||||
# and read config from `/etc/copyparty.d/*.conf`
|
# and read copyparty config from `/etc/copyparty.d/*.conf`
|
||||||
#
|
#
|
||||||
# expose additional filesystem locations to copyparty
|
# expose additional filesystem locations to copyparty
|
||||||
# by listing them between the last `1000` and `--`
|
# by listing them between the last `cpp` and `--`
|
||||||
#
|
#
|
||||||
# `1000 1000` = what user to run copyparty as
|
# `cpp cpp` = user/group to run copyparty as; can be IDs (1000 1000)
|
||||||
#
|
#
|
||||||
# unless you add -q to disable logging, you may want to remove the
|
# unless you add -q to disable logging, you may want to remove the
|
||||||
# following line to allow buffering (slightly better performance):
|
# following line to allow buffering (slightly better performance):
|
||||||
@@ -24,7 +24,9 @@ ExecReload=/bin/kill -s USR1 $MAINPID
|
|||||||
ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
||||||
|
|
||||||
# run copyparty
|
# run copyparty
|
||||||
ExecStart=/bin/bash /usr/bin/prisonparty /var/lib/copyparty-jail 1000 1000 /etc/copyparty.d -- \
|
ExecStart=/bin/bash /usr/bin/prisonparty /var/lib/copyparty-jail cpp cpp \
|
||||||
|
/etc/copyparty.d \
|
||||||
|
-- \
|
||||||
/usr/bin/python3 /usr/bin/copyparty -c /etc/copyparty.d/init
|
/usr/bin/python3 /usr/bin/copyparty -c /etc/copyparty.d/init
|
||||||
|
|
||||||
[Install]
|
[Install]
|
||||||
|
|||||||
@@ -3,6 +3,9 @@
|
|||||||
# use argon2id-hashed passwords in config files (sha2 is always available)
|
# use argon2id-hashed passwords in config files (sha2 is always available)
|
||||||
withHashedPasswords ? true,
|
withHashedPasswords ? true,
|
||||||
|
|
||||||
|
# generate TLS certificates on startup (pointless when reverse-proxied)
|
||||||
|
withCertgen ? false,
|
||||||
|
|
||||||
# create thumbnails with Pillow; faster than FFmpeg / MediaProcessing
|
# create thumbnails with Pillow; faster than FFmpeg / MediaProcessing
|
||||||
withThumbnails ? true,
|
withThumbnails ? true,
|
||||||
|
|
||||||
@@ -34,6 +37,7 @@ let
|
|||||||
]
|
]
|
||||||
++ lib.optional withSMB impacket
|
++ lib.optional withSMB impacket
|
||||||
++ lib.optional withFTPS pyopenssl
|
++ lib.optional withFTPS pyopenssl
|
||||||
|
++ lib.optional withCertgen cfssl
|
||||||
++ lib.optional withThumbnails pillow
|
++ lib.optional withThumbnails pillow
|
||||||
++ lib.optional withFastThumbnails pyvips
|
++ lib.optional withFastThumbnails pyvips
|
||||||
++ lib.optional withMediaProcessing ffmpeg
|
++ lib.optional withMediaProcessing ffmpeg
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
{
|
{
|
||||||
"url": "https://github.com/9001/copyparty/releases/download/v1.9.9/copyparty-sfx.py",
|
"url": "https://github.com/9001/copyparty/releases/download/v1.13.4/copyparty-sfx.py",
|
||||||
"version": "1.9.9",
|
"version": "1.13.4",
|
||||||
"hash": "sha256-z8CvPYBwh8Nt0z/JtzKjJpVGul+rJ3zmWcqltUCnsdc="
|
"hash": "sha256-KqT6mtbLbPwoQKiDbkcBb0EvgZ+VDXB3A0BWiIPpbqs="
|
||||||
}
|
}
|
||||||
@@ -10,7 +10,7 @@ name="copyparty"
|
|||||||
rcvar="copyparty_enable"
|
rcvar="copyparty_enable"
|
||||||
copyparty_user="copyparty"
|
copyparty_user="copyparty"
|
||||||
copyparty_args="-e2dsa -v /storage:/storage:r" # change as you see fit
|
copyparty_args="-e2dsa -v /storage:/storage:r" # change as you see fit
|
||||||
copyparty_command="/usr/local/bin/python3.8 /usr/local/copyparty/copyparty-sfx.py ${copyparty_args}"
|
copyparty_command="/usr/local/bin/python3.9 /usr/local/copyparty/copyparty-sfx.py ${copyparty_args}"
|
||||||
pidfile="/var/run/copyparty/${name}.pid"
|
pidfile="/var/run/copyparty/${name}.pid"
|
||||||
command="/usr/sbin/daemon"
|
command="/usr/sbin/daemon"
|
||||||
command_args="-P ${pidfile} -r -f ${copyparty_command}"
|
command_args="-P ${pidfile} -r -f ${copyparty_command}"
|
||||||
|
|||||||
11
contrib/send-to-cpp.contextlet.json
Normal file
11
contrib/send-to-cpp.contextlet.json
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"code": "// https://addons.mozilla.org/en-US/firefox/addon/contextlets/\n// https://github.com/davidmhammond/contextlets\n\nvar url = 'http://partybox.local:3923/';\nvar pw = 'wark';\n\nvar xhr = new XMLHttpRequest();\nxhr.msg = this.info.linkUrl || this.info.srcUrl;\nxhr.open('POST', url, true);\nxhr.setRequestHeader('Content-Type', 'application/x-www-form-urlencoded;charset=UTF-8');\nxhr.setRequestHeader('PW', pw);\nxhr.send('msg=' + xhr.msg);\n",
|
||||||
|
"contexts": [
|
||||||
|
"link"
|
||||||
|
],
|
||||||
|
"icons": null,
|
||||||
|
"patterns": "",
|
||||||
|
"scope": "background",
|
||||||
|
"title": "send to cpp",
|
||||||
|
"type": "normal"
|
||||||
|
}
|
||||||
@@ -1,19 +0,0 @@
|
|||||||
{
|
|
||||||
"Version": "13.5.0",
|
|
||||||
"Name": "copyparty-html",
|
|
||||||
"DestinationType": "ImageUploader",
|
|
||||||
"RequestMethod": "POST",
|
|
||||||
"RequestURL": "http://127.0.0.1:3923/sharex",
|
|
||||||
"Parameters": {
|
|
||||||
"pw": "wark"
|
|
||||||
},
|
|
||||||
"Body": "MultipartFormData",
|
|
||||||
"Arguments": {
|
|
||||||
"act": "bput"
|
|
||||||
},
|
|
||||||
"FileFormName": "f",
|
|
||||||
"RegexList": [
|
|
||||||
"bytes // <a href=\"/([^\"]+)\""
|
|
||||||
],
|
|
||||||
"URL": "http://127.0.0.1:3923/$regex:1|1$"
|
|
||||||
}
|
|
||||||
@@ -1,17 +1,19 @@
|
|||||||
{
|
{
|
||||||
"Version": "13.5.0",
|
"Version": "15.0.0",
|
||||||
"Name": "copyparty",
|
"Name": "copyparty",
|
||||||
"DestinationType": "ImageUploader",
|
"DestinationType": "ImageUploader",
|
||||||
"RequestMethod": "POST",
|
"RequestMethod": "POST",
|
||||||
"RequestURL": "http://127.0.0.1:3923/sharex",
|
"RequestURL": "http://127.0.0.1:3923/sharex",
|
||||||
"Parameters": {
|
"Parameters": {
|
||||||
"pw": "wark",
|
|
||||||
"j": null
|
"j": null
|
||||||
},
|
},
|
||||||
|
"Headers": {
|
||||||
|
"pw": "PUT_YOUR_PASSWORD_HERE_MY_DUDE"
|
||||||
|
},
|
||||||
"Body": "MultipartFormData",
|
"Body": "MultipartFormData",
|
||||||
"Arguments": {
|
"Arguments": {
|
||||||
"act": "bput"
|
"act": "bput"
|
||||||
},
|
},
|
||||||
"FileFormName": "f",
|
"FileFormName": "f",
|
||||||
"URL": "$json:files[0].url$"
|
"URL": "{json:files[0].url}"
|
||||||
}
|
}
|
||||||
|
|||||||
13
contrib/sharex12.sxcu
Normal file
13
contrib/sharex12.sxcu
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
{
|
||||||
|
"Name": "copyparty",
|
||||||
|
"DestinationType": "ImageUploader, TextUploader, FileUploader",
|
||||||
|
"RequestURL": "http://127.0.0.1:3923/sharex",
|
||||||
|
"FileFormName": "f",
|
||||||
|
"Arguments": {
|
||||||
|
"act": "bput"
|
||||||
|
},
|
||||||
|
"Headers": {
|
||||||
|
"accept": "url",
|
||||||
|
"pw": "PUT_YOUR_PASSWORD_HERE_MY_DUDE"
|
||||||
|
}
|
||||||
|
}
|
||||||
42
contrib/systemd/copyparty.conf
Normal file
42
contrib/systemd/copyparty.conf
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
# not actually YAML but lets pretend:
|
||||||
|
# -*- mode: yaml -*-
|
||||||
|
# vim: ft=yaml:
|
||||||
|
|
||||||
|
|
||||||
|
# put this file in /etc/
|
||||||
|
|
||||||
|
|
||||||
|
[global]
|
||||||
|
e2dsa # enable file indexing and filesystem scanning
|
||||||
|
e2ts # and enable multimedia indexing
|
||||||
|
ansi # and colors in log messages
|
||||||
|
|
||||||
|
# disable logging to stdout/journalctl and log to a file instead;
|
||||||
|
# $LOGS_DIRECTORY is usually /var/log/copyparty (comes from systemd)
|
||||||
|
# and copyparty replaces %Y-%m%d with Year-MonthDay, so the
|
||||||
|
# full path will be something like /var/log/copyparty/2023-1130.txt
|
||||||
|
# (note: enable compression by adding .xz at the end)
|
||||||
|
q, lo: $LOGS_DIRECTORY/%Y-%m%d.log
|
||||||
|
|
||||||
|
# p: 80,443,3923 # listen on 80/443 as well (requires CAP_NET_BIND_SERVICE)
|
||||||
|
# i: 127.0.0.1 # only allow connections from localhost (reverse-proxies)
|
||||||
|
# ftp: 3921 # enable ftp server on port 3921
|
||||||
|
# p: 3939 # listen on another port
|
||||||
|
# df: 16 # stop accepting uploads if less than 16 GB free disk space
|
||||||
|
# ver # show copyparty version in the controlpanel
|
||||||
|
# grid # show thumbnails/grid-view by default
|
||||||
|
# theme: 2 # monokai
|
||||||
|
# name: datasaver # change the server-name that's displayed in the browser
|
||||||
|
# stats, nos-dup # enable the prometheus endpoint, but disable the dupes counter (too slow)
|
||||||
|
# no-robots, force-js # make it harder for search engines to read your server
|
||||||
|
|
||||||
|
|
||||||
|
[accounts]
|
||||||
|
ed: wark # username: password
|
||||||
|
|
||||||
|
|
||||||
|
[/] # create a volume at "/" (the webroot), which will
|
||||||
|
/mnt # share the contents of the "/mnt" folder
|
||||||
|
accs:
|
||||||
|
rw: * # everyone gets read-write access, but
|
||||||
|
rwmda: ed # the user "ed" gets read-write-move-delete-admin
|
||||||
@@ -1,28 +1,34 @@
|
|||||||
# this will start `/usr/local/bin/copyparty-sfx.py`
|
# this will start `/usr/local/bin/copyparty-sfx.py` and
|
||||||
# and share '/mnt' with anonymous read+write
|
# read copyparty config from `/etc/copyparty.conf`, for example:
|
||||||
|
# https://github.com/9001/copyparty/blob/hovudstraum/contrib/systemd/copyparty.conf
|
||||||
#
|
#
|
||||||
# installation:
|
# installation:
|
||||||
# wget https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py -O /usr/local/bin/copyparty-sfx.py
|
# wget https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py -O /usr/local/bin/copyparty-sfx.py
|
||||||
# cp -pv copyparty.service /etc/systemd/system/
|
# useradd -r -s /sbin/nologin -m -d /var/lib/copyparty copyparty
|
||||||
# restorecon -vr /etc/systemd/system/copyparty.service # on fedora/rhel
|
# firewall-cmd --permanent --add-port=3923/tcp # --zone=libvirt
|
||||||
# firewall-cmd --permanent --add-port={80,443,3923}/tcp # --zone=libvirt
|
|
||||||
# firewall-cmd --reload
|
# firewall-cmd --reload
|
||||||
|
# cp -pv copyparty.service /etc/systemd/system/
|
||||||
|
# cp -pv copyparty.conf /etc/
|
||||||
|
# restorecon -vr /etc/systemd/system/copyparty.service # on fedora/rhel
|
||||||
# systemctl daemon-reload && systemctl enable --now copyparty
|
# systemctl daemon-reload && systemctl enable --now copyparty
|
||||||
#
|
#
|
||||||
|
# every time you edit this file, you must "systemctl daemon-reload"
|
||||||
|
# for the changes to take effect and then "systemctl restart copyparty"
|
||||||
|
#
|
||||||
# if it fails to start, first check this: systemctl status copyparty
|
# if it fails to start, first check this: systemctl status copyparty
|
||||||
# then try starting it while viewing logs: journalctl -fan 100
|
# then try starting it while viewing logs:
|
||||||
|
# journalctl -fan 100
|
||||||
|
# tail -Fn 100 /var/log/copyparty/$(date +%Y-%m%d.log)
|
||||||
|
#
|
||||||
|
# if you run into any issues, for example thumbnails not working,
|
||||||
|
# try removing the "some quick hardening" section and then please
|
||||||
|
# let me know if that actually helped so we can look into it
|
||||||
#
|
#
|
||||||
# you may want to:
|
# you may want to:
|
||||||
# change "User=cpp" and "/home/cpp/" to another user
|
# - change "User=copyparty" and "/var/lib/copyparty/" to another user
|
||||||
# remove the nft lines to only listen on port 3923
|
# - edit /etc/copyparty.conf to configure copyparty
|
||||||
# and in the ExecStart= line:
|
# and in the ExecStart= line:
|
||||||
# change '/usr/bin/python3' to another interpreter
|
# - change '/usr/bin/python3' to another interpreter
|
||||||
# change '/mnt::rw' to another location or permission-set
|
|
||||||
# add '-q' to disable logging on busy servers
|
|
||||||
# add '-i 127.0.0.1' to only allow local connections
|
|
||||||
# add '-e2dsa' to enable filesystem scanning + indexing
|
|
||||||
# add '-e2ts' to enable metadata indexing
|
|
||||||
# remove '--ansi' to disable colored logs
|
|
||||||
#
|
#
|
||||||
# with `Type=notify`, copyparty will signal systemd when it is ready to
|
# with `Type=notify`, copyparty will signal systemd when it is ready to
|
||||||
# accept connections; correctly delaying units depending on copyparty.
|
# accept connections; correctly delaying units depending on copyparty.
|
||||||
@@ -30,11 +36,9 @@
|
|||||||
# python disabling line-buffering, so messages are out-of-order:
|
# python disabling line-buffering, so messages are out-of-order:
|
||||||
# https://user-images.githubusercontent.com/241032/126040249-cb535cc7-c599-4931-a796-a5d9af691bad.png
|
# https://user-images.githubusercontent.com/241032/126040249-cb535cc7-c599-4931-a796-a5d9af691bad.png
|
||||||
#
|
#
|
||||||
# unless you add -q to disable logging, you may want to remove the
|
########################################################################
|
||||||
# following line to allow buffering (slightly better performance):
|
########################################################################
|
||||||
# Environment=PYTHONUNBUFFERED=x
|
|
||||||
#
|
|
||||||
# keep ExecStartPre before ExecStart, at least on rhel8
|
|
||||||
|
|
||||||
[Unit]
|
[Unit]
|
||||||
Description=copyparty file server
|
Description=copyparty file server
|
||||||
@@ -44,23 +48,52 @@ Type=notify
|
|||||||
SyslogIdentifier=copyparty
|
SyslogIdentifier=copyparty
|
||||||
Environment=PYTHONUNBUFFERED=x
|
Environment=PYTHONUNBUFFERED=x
|
||||||
ExecReload=/bin/kill -s USR1 $MAINPID
|
ExecReload=/bin/kill -s USR1 $MAINPID
|
||||||
|
PermissionsStartOnly=true
|
||||||
|
|
||||||
# user to run as + where the TLS certificate is (if any)
|
## user to run as + where the TLS certificate is (if any)
|
||||||
User=cpp
|
##
|
||||||
Environment=XDG_CONFIG_HOME=/home/cpp/.config
|
User=copyparty
|
||||||
|
Group=copyparty
|
||||||
|
WorkingDirectory=/var/lib/copyparty
|
||||||
|
Environment=XDG_CONFIG_HOME=/var/lib/copyparty/.config
|
||||||
|
|
||||||
# OPTIONAL: setup forwarding from ports 80 and 443 to port 3923
|
## OPTIONAL: allow copyparty to listen on low ports (like 80/443);
|
||||||
ExecStartPre=+/bin/bash -c 'nft -n -a list table nat | awk "/ to :3923 /{print\$NF}" | xargs -rL1 nft delete rule nat prerouting handle; true'
|
## you need to uncomment the "p: 80,443,3923" in the config too
|
||||||
ExecStartPre=+nft add table ip nat
|
## ------------------------------------------------------------
|
||||||
ExecStartPre=+nft -- add chain ip nat prerouting { type nat hook prerouting priority -100 \; }
|
## a slightly safer alternative is to enable partyalone.service
|
||||||
ExecStartPre=+nft add rule ip nat prerouting tcp dport 80 redirect to :3923
|
## which does portforwarding with nftables instead, but an even
|
||||||
ExecStartPre=+nft add rule ip nat prerouting tcp dport 443 redirect to :3923
|
## better option is to use a reverse-proxy (nginx/caddy/...)
|
||||||
|
##
|
||||||
|
AmbientCapabilities=CAP_NET_BIND_SERVICE
|
||||||
|
|
||||||
# stop systemd-tmpfiles-clean.timer from deleting copyparty while it's running
|
## some quick hardening; TODO port more from the nixos package
|
||||||
ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
##
|
||||||
|
MemoryMax=50%
|
||||||
|
MemorySwapMax=50%
|
||||||
|
ProtectClock=true
|
||||||
|
ProtectControlGroups=true
|
||||||
|
ProtectHostname=true
|
||||||
|
ProtectKernelLogs=true
|
||||||
|
ProtectKernelModules=true
|
||||||
|
ProtectKernelTunables=true
|
||||||
|
ProtectProc=invisible
|
||||||
|
RemoveIPC=true
|
||||||
|
RestrictNamespaces=true
|
||||||
|
RestrictRealtime=true
|
||||||
|
RestrictSUIDSGID=true
|
||||||
|
|
||||||
# copyparty settings
|
## create a directory for logfiles;
|
||||||
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py --ansi -e2d -v /mnt::rw
|
## this defines $LOGS_DIRECTORY which is used in copyparty.conf
|
||||||
|
##
|
||||||
|
LogsDirectory=copyparty
|
||||||
|
|
||||||
|
## finally, start copyparty and give it the config file:
|
||||||
|
##
|
||||||
|
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -c /etc/copyparty.conf
|
||||||
|
|
||||||
|
# NOTE: if you installed copyparty from an OS package repo (nice)
|
||||||
|
# then you probably want something like this instead:
|
||||||
|
#ExecStart=/usr/bin/copyparty -c /etc/copyparty.conf
|
||||||
|
|
||||||
[Install]
|
[Install]
|
||||||
WantedBy=multi-user.target
|
WantedBy=multi-user.target
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# this will start `/usr/local/bin/copyparty-sfx.py`
|
# this will start `/usr/local/bin/copyparty-sfx.py`
|
||||||
# in a chroot, preventing accidental access elsewhere
|
# in a chroot, preventing accidental access elsewhere,
|
||||||
# and share '/mnt' with anonymous read+write
|
# and share '/mnt' with anonymous read+write
|
||||||
#
|
#
|
||||||
# installation:
|
# installation:
|
||||||
@@ -7,9 +7,9 @@
|
|||||||
# 2) cp -pv prisonparty.service /etc/systemd/system && systemctl enable --now prisonparty
|
# 2) cp -pv prisonparty.service /etc/systemd/system && systemctl enable --now prisonparty
|
||||||
#
|
#
|
||||||
# expose additional filesystem locations to copyparty
|
# expose additional filesystem locations to copyparty
|
||||||
# by listing them between the last `1000` and `--`
|
# by listing them between the last `cpp` and `--`
|
||||||
#
|
#
|
||||||
# `1000 1000` = what user to run copyparty as
|
# `cpp cpp` = user/group to run copyparty as; can be IDs (1000 1000)
|
||||||
#
|
#
|
||||||
# you may want to:
|
# you may want to:
|
||||||
# change '/mnt::rw' to another location or permission-set
|
# change '/mnt::rw' to another location or permission-set
|
||||||
@@ -32,7 +32,9 @@ ExecReload=/bin/kill -s USR1 $MAINPID
|
|||||||
ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
||||||
|
|
||||||
# run copyparty
|
# run copyparty
|
||||||
ExecStart=/bin/bash /usr/local/bin/prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt -- \
|
ExecStart=/bin/bash /usr/local/bin/prisonparty.sh /var/lib/copyparty-jail cpp cpp \
|
||||||
|
/mnt \
|
||||||
|
-- \
|
||||||
/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -v /mnt::rw
|
/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -v /mnt::rw
|
||||||
|
|
||||||
[Install]
|
[Install]
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ if not PY2:
|
|||||||
unicode: Callable[[Any], str] = str
|
unicode: Callable[[Any], str] = str
|
||||||
else:
|
else:
|
||||||
sys.dont_write_bytecode = True
|
sys.dont_write_bytecode = True
|
||||||
unicode = unicode # noqa: F821 # pylint: disable=undefined-variable,self-assigning-variable
|
unicode = unicode # type: ignore
|
||||||
|
|
||||||
WINDOWS: Any = (
|
WINDOWS: Any = (
|
||||||
[int(x) for x in platform.version().split(".")]
|
[int(x) for x in platform.version().split(".")]
|
||||||
@@ -56,7 +56,6 @@ class EnvParams(object):
|
|||||||
self.t0 = time.time()
|
self.t0 = time.time()
|
||||||
self.mod = ""
|
self.mod = ""
|
||||||
self.cfg = ""
|
self.cfg = ""
|
||||||
self.ox = getattr(sys, "oxidized", None)
|
|
||||||
|
|
||||||
|
|
||||||
E = EnvParams()
|
E = EnvParams()
|
||||||
|
|||||||
655
copyparty/__main__.py
Executable file → Normal file
655
copyparty/__main__.py
Executable file → Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,8 +1,8 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
|
|
||||||
VERSION = (1, 9, 10)
|
VERSION = (1, 13, 5)
|
||||||
CODENAME = "prometheable"
|
CODENAME = "race the beam"
|
||||||
BUILD_DT = (2023, 10, 8)
|
BUILD_DT = (2024, 7, 22)
|
||||||
|
|
||||||
S_VERSION = ".".join(map(str, VERSION))
|
S_VERSION = ".".join(map(str, VERSION))
|
||||||
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -43,6 +43,10 @@ def open(p: str, *a, **ka) -> int:
|
|||||||
return os.open(fsenc(p), *a, **ka)
|
return os.open(fsenc(p), *a, **ka)
|
||||||
|
|
||||||
|
|
||||||
|
def readlink(p: str) -> str:
|
||||||
|
return fsdec(os.readlink(fsenc(p)))
|
||||||
|
|
||||||
|
|
||||||
def rename(src: str, dst: str) -> None:
|
def rename(src: str, dst: str) -> None:
|
||||||
return os.rename(fsenc(src), fsenc(dst))
|
return os.rename(fsenc(src), fsenc(dst))
|
||||||
|
|
||||||
|
|||||||
@@ -46,8 +46,8 @@ class BrokerMp(object):
|
|||||||
self.num_workers = self.args.j or CORES
|
self.num_workers = self.args.j or CORES
|
||||||
self.log("broker", "booting {} subprocesses".format(self.num_workers))
|
self.log("broker", "booting {} subprocesses".format(self.num_workers))
|
||||||
for n in range(1, self.num_workers + 1):
|
for n in range(1, self.num_workers + 1):
|
||||||
q_pend: queue.Queue[tuple[int, str, list[Any]]] = mp.Queue(1)
|
q_pend: queue.Queue[tuple[int, str, list[Any]]] = mp.Queue(1) # type: ignore
|
||||||
q_yield: queue.Queue[tuple[int, str, list[Any]]] = mp.Queue(64)
|
q_yield: queue.Queue[tuple[int, str, list[Any]]] = mp.Queue(64) # type: ignore
|
||||||
|
|
||||||
proc = MProcess(q_pend, q_yield, MpWorker, (q_pend, q_yield, self.args, n))
|
proc = MProcess(q_pend, q_yield, MpWorker, (q_pend, q_yield, self.args, n))
|
||||||
Daemon(self.collector, "mp-sink-{}".format(n), (proc,))
|
Daemon(self.collector, "mp-sink-{}".format(n), (proc,))
|
||||||
@@ -57,11 +57,8 @@ class BrokerMp(object):
|
|||||||
def shutdown(self) -> None:
|
def shutdown(self) -> None:
|
||||||
self.log("broker", "shutting down")
|
self.log("broker", "shutting down")
|
||||||
for n, proc in enumerate(self.procs):
|
for n, proc in enumerate(self.procs):
|
||||||
thr = threading.Thread(
|
name = "mp-shut-%d-%d" % (n, len(self.procs))
|
||||||
target=proc.q_pend.put((0, "shutdown", [])),
|
Daemon(proc.q_pend.put, name, ((0, "shutdown", []),))
|
||||||
name="mp-shutdown-{}-{}".format(n, len(self.procs)),
|
|
||||||
)
|
|
||||||
thr.start()
|
|
||||||
|
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
procs = self.procs
|
procs = self.procs
|
||||||
|
|||||||
@@ -76,7 +76,7 @@ class MpWorker(BrokerCli):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
def logw(self, msg: str, c: Union[int, str] = 0) -> None:
|
def logw(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||||
self.log("mp{}".format(self.n), msg, c)
|
self.log("mp%d" % (self.n,), msg, c)
|
||||||
|
|
||||||
def main(self) -> None:
|
def main(self) -> None:
|
||||||
while True:
|
while True:
|
||||||
|
|||||||
@@ -28,7 +28,7 @@ class ExceptionalQueue(Queue, object):
|
|||||||
if rv[1] == "pebkac":
|
if rv[1] == "pebkac":
|
||||||
raise Pebkac(*rv[2:])
|
raise Pebkac(*rv[2:])
|
||||||
else:
|
else:
|
||||||
raise Exception(rv[2])
|
raise rv[2]
|
||||||
|
|
||||||
return rv
|
return rv
|
||||||
|
|
||||||
@@ -65,8 +65,8 @@ def try_exec(want_retval: Union[bool, int], func: Any, *args: list[Any]) -> Any:
|
|||||||
|
|
||||||
return ["exception", "pebkac", ex.code, str(ex)]
|
return ["exception", "pebkac", ex.code, str(ex)]
|
||||||
|
|
||||||
except:
|
except Exception as ex:
|
||||||
if not want_retval:
|
if not want_retval:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
return ["exception", "stack", traceback.format_exc()]
|
return ["exception", "stack", ex]
|
||||||
|
|||||||
@@ -6,12 +6,19 @@ import os
|
|||||||
import shutil
|
import shutil
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from .util import Netdev, runcmd
|
from .__init__ import ANYWIN
|
||||||
|
from .util import Netdev, runcmd, wrename, wunlink
|
||||||
|
|
||||||
HAVE_CFSSL = True
|
HAVE_CFSSL = True
|
||||||
|
|
||||||
if True: # pylint: disable=using-constant-test
|
if True: # pylint: disable=using-constant-test
|
||||||
from .util import RootLogger
|
from .util import NamedLogger, RootLogger
|
||||||
|
|
||||||
|
|
||||||
|
if ANYWIN:
|
||||||
|
VF = {"mv_re_t": 5, "rm_re_t": 5, "mv_re_r": 0.1, "rm_re_r": 0.1}
|
||||||
|
else:
|
||||||
|
VF = {"mv_re_t": 0, "rm_re_t": 0}
|
||||||
|
|
||||||
|
|
||||||
def ensure_cert(log: "RootLogger", args) -> None:
|
def ensure_cert(log: "RootLogger", args) -> None:
|
||||||
@@ -76,6 +83,8 @@ def _read_crt(args, fn):
|
|||||||
|
|
||||||
|
|
||||||
def _gen_ca(log: "RootLogger", args):
|
def _gen_ca(log: "RootLogger", args):
|
||||||
|
nlog: "NamedLogger" = lambda msg, c=0: log("cert-gen-ca", msg, c)
|
||||||
|
|
||||||
expiry = _read_crt(args, "ca.pem")[0]
|
expiry = _read_crt(args, "ca.pem")[0]
|
||||||
if time.time() + args.crt_cdays * 60 * 60 * 24 * 0.1 < expiry:
|
if time.time() + args.crt_cdays * 60 * 60 * 24 * 0.1 < expiry:
|
||||||
return
|
return
|
||||||
@@ -105,13 +114,19 @@ def _gen_ca(log: "RootLogger", args):
|
|||||||
raise Exception("failed to translate ca-cert: {}, {}".format(rc, se), 3)
|
raise Exception("failed to translate ca-cert: {}, {}".format(rc, se), 3)
|
||||||
|
|
||||||
bname = os.path.join(args.crt_dir, "ca")
|
bname = os.path.join(args.crt_dir, "ca")
|
||||||
os.rename(bname + "-key.pem", bname + ".key")
|
try:
|
||||||
os.unlink(bname + ".csr")
|
wunlink(nlog, bname + ".key", VF)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
wrename(nlog, bname + "-key.pem", bname + ".key", VF)
|
||||||
|
wunlink(nlog, bname + ".csr", VF)
|
||||||
|
|
||||||
log("cert", "new ca OK", 2)
|
log("cert", "new ca OK", 2)
|
||||||
|
|
||||||
|
|
||||||
def _gen_srv(log: "RootLogger", args, netdevs: dict[str, Netdev]):
|
def _gen_srv(log: "RootLogger", args, netdevs: dict[str, Netdev]):
|
||||||
|
nlog: "NamedLogger" = lambda msg, c=0: log("cert-gen-srv", msg, c)
|
||||||
|
|
||||||
names = args.crt_ns.split(",") if args.crt_ns else []
|
names = args.crt_ns.split(",") if args.crt_ns else []
|
||||||
if not args.crt_exact:
|
if not args.crt_exact:
|
||||||
for n in names[:]:
|
for n in names[:]:
|
||||||
@@ -132,7 +147,10 @@ def _gen_srv(log: "RootLogger", args, netdevs: dict[str, Netdev]):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
expiry, inf = _read_crt(args, "srv.pem")
|
expiry, inf = _read_crt(args, "srv.pem")
|
||||||
expired = time.time() + args.crt_sdays * 60 * 60 * 24 * 0.1 > expiry
|
if "sans" not in inf:
|
||||||
|
raise Exception("no useable cert found")
|
||||||
|
|
||||||
|
expired = time.time() + args.crt_sdays * 60 * 60 * 24 * 0.5 > expiry
|
||||||
cert_insec = os.path.join(args.E.mod, "res/insecure.pem")
|
cert_insec = os.path.join(args.E.mod, "res/insecure.pem")
|
||||||
for n in names:
|
for n in names:
|
||||||
if n not in inf["sans"]:
|
if n not in inf["sans"]:
|
||||||
@@ -182,11 +200,11 @@ def _gen_srv(log: "RootLogger", args, netdevs: dict[str, Netdev]):
|
|||||||
|
|
||||||
bname = os.path.join(args.crt_dir, "srv")
|
bname = os.path.join(args.crt_dir, "srv")
|
||||||
try:
|
try:
|
||||||
os.unlink(bname + ".key")
|
wunlink(nlog, bname + ".key", VF)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
os.rename(bname + "-key.pem", bname + ".key")
|
wrename(nlog, bname + "-key.pem", bname + ".key", VF)
|
||||||
os.unlink(bname + ".csr")
|
wunlink(nlog, bname + ".csr", VF)
|
||||||
|
|
||||||
with open(os.path.join(args.crt_dir, "ca.pem"), "rb") as f:
|
with open(os.path.join(args.crt_dir, "ca.pem"), "rb") as f:
|
||||||
ca = f.read()
|
ca = f.read()
|
||||||
|
|||||||
@@ -9,27 +9,40 @@ onedash = set(zs.split())
|
|||||||
def vf_bmap() -> dict[str, str]:
|
def vf_bmap() -> dict[str, str]:
|
||||||
"""argv-to-volflag: simple bools"""
|
"""argv-to-volflag: simple bools"""
|
||||||
ret = {
|
ret = {
|
||||||
|
"dav_auth": "davauth",
|
||||||
|
"dav_rt": "davrt",
|
||||||
|
"ed": "dots",
|
||||||
"never_symlink": "neversymlink",
|
"never_symlink": "neversymlink",
|
||||||
"no_dedup": "copydupes",
|
"no_dedup": "copydupes",
|
||||||
"no_dupe": "nodupe",
|
"no_dupe": "nodupe",
|
||||||
"no_forget": "noforget",
|
"no_forget": "noforget",
|
||||||
"th_no_crop": "nocrop",
|
"no_pipe": "nopipe",
|
||||||
"dav_auth": "davauth",
|
"no_robots": "norobots",
|
||||||
"dav_rt": "davrt",
|
"no_thumb": "dthumb",
|
||||||
|
"no_vthumb": "dvthumb",
|
||||||
|
"no_athumb": "dathumb",
|
||||||
}
|
}
|
||||||
for k in (
|
for k in (
|
||||||
"dotsrch",
|
"dotsrch",
|
||||||
|
"e2d",
|
||||||
|
"e2ds",
|
||||||
|
"e2dsa",
|
||||||
"e2t",
|
"e2t",
|
||||||
"e2ts",
|
"e2ts",
|
||||||
"e2tsr",
|
"e2tsr",
|
||||||
"e2v",
|
"e2v",
|
||||||
"e2vu",
|
"e2vu",
|
||||||
"e2vp",
|
"e2vp",
|
||||||
|
"exp",
|
||||||
"grid",
|
"grid",
|
||||||
|
"gsel",
|
||||||
"hardlink",
|
"hardlink",
|
||||||
"magic",
|
"magic",
|
||||||
"no_sb_md",
|
"no_sb_md",
|
||||||
"no_sb_lg",
|
"no_sb_lg",
|
||||||
|
"og",
|
||||||
|
"og_no_head",
|
||||||
|
"og_s_title",
|
||||||
"rand",
|
"rand",
|
||||||
"xdev",
|
"xdev",
|
||||||
"xlink",
|
"xlink",
|
||||||
@@ -41,8 +54,38 @@ def vf_bmap() -> dict[str, str]:
|
|||||||
|
|
||||||
def vf_vmap() -> dict[str, str]:
|
def vf_vmap() -> dict[str, str]:
|
||||||
"""argv-to-volflag: simple values"""
|
"""argv-to-volflag: simple values"""
|
||||||
ret = {"th_convt": "convt", "th_size": "thsize"}
|
ret = {
|
||||||
for k in ("dbd", "lg_sbf", "md_sbf", "nrand", "unlist"):
|
"no_hash": "nohash",
|
||||||
|
"no_idx": "noidx",
|
||||||
|
"re_maxage": "scan",
|
||||||
|
"th_convt": "convt",
|
||||||
|
"th_size": "thsize",
|
||||||
|
"th_crop": "crop",
|
||||||
|
"th_x3": "th3x",
|
||||||
|
}
|
||||||
|
for k in (
|
||||||
|
"dbd",
|
||||||
|
"html_head",
|
||||||
|
"lg_sbf",
|
||||||
|
"md_sbf",
|
||||||
|
"nrand",
|
||||||
|
"og_desc",
|
||||||
|
"og_site",
|
||||||
|
"og_th",
|
||||||
|
"og_title",
|
||||||
|
"og_title_a",
|
||||||
|
"og_title_v",
|
||||||
|
"og_title_i",
|
||||||
|
"og_tpl",
|
||||||
|
"og_ua",
|
||||||
|
"mv_retry",
|
||||||
|
"rm_retry",
|
||||||
|
"sort",
|
||||||
|
"tcolor",
|
||||||
|
"unlist",
|
||||||
|
"u2abort",
|
||||||
|
"u2ts",
|
||||||
|
):
|
||||||
ret[k] = k
|
ret[k] = k
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
@@ -50,7 +93,22 @@ def vf_vmap() -> dict[str, str]:
|
|||||||
def vf_cmap() -> dict[str, str]:
|
def vf_cmap() -> dict[str, str]:
|
||||||
"""argv-to-volflag: complex/lists"""
|
"""argv-to-volflag: complex/lists"""
|
||||||
ret = {}
|
ret = {}
|
||||||
for k in ("html_head", "mte", "mth"):
|
for k in (
|
||||||
|
"exp_lg",
|
||||||
|
"exp_md",
|
||||||
|
"mte",
|
||||||
|
"mth",
|
||||||
|
"mtp",
|
||||||
|
"xad",
|
||||||
|
"xar",
|
||||||
|
"xau",
|
||||||
|
"xban",
|
||||||
|
"xbd",
|
||||||
|
"xbr",
|
||||||
|
"xbu",
|
||||||
|
"xiu",
|
||||||
|
"xm",
|
||||||
|
):
|
||||||
ret[k] = k
|
ret[k] = k
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
@@ -60,10 +118,12 @@ permdescs = {
|
|||||||
"w": 'write; upload files; need "r" to see the uploads',
|
"w": 'write; upload files; need "r" to see the uploads',
|
||||||
"m": 'move; move files and folders; need "w" at destination',
|
"m": 'move; move files and folders; need "w" at destination',
|
||||||
"d": "delete; permanently delete files and folders",
|
"d": "delete; permanently delete files and folders",
|
||||||
|
".": "dots; user can ask to show dotfiles in listings",
|
||||||
"g": "get; download files, but cannot see folder contents",
|
"g": "get; download files, but cannot see folder contents",
|
||||||
"G": 'upget; same as "g" but can see filekeys of their own uploads',
|
"G": 'upget; same as "g" but can see filekeys of their own uploads',
|
||||||
"h": 'html; same as "g" but folders return their index.html',
|
"h": 'html; same as "g" but folders return their index.html',
|
||||||
"a": "admin; can see uploader IPs, config-reload",
|
"a": "admin; can see uploader IPs, config-reload",
|
||||||
|
"A": "all; same as 'rwmda.' (read/write/move/delete/dotfiles)",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -73,6 +133,7 @@ flagcats = {
|
|||||||
"hardlink": "does dedup with hardlinks instead of symlinks",
|
"hardlink": "does dedup with hardlinks instead of symlinks",
|
||||||
"neversymlink": "disables symlink fallback; full copy instead",
|
"neversymlink": "disables symlink fallback; full copy instead",
|
||||||
"copydupes": "disables dedup, always saves full copies of dupes",
|
"copydupes": "disables dedup, always saves full copies of dupes",
|
||||||
|
"sparse": "force use of sparse files, mainly for s3-backed storage",
|
||||||
"daw": "enable full WebDAV write support (dangerous);\nPUT-operations will now \033[1;31mOVERWRITE\033[0;35m existing files",
|
"daw": "enable full WebDAV write support (dangerous);\nPUT-operations will now \033[1;31mOVERWRITE\033[0;35m existing files",
|
||||||
"nosub": "forces all uploads into the top folder of the vfs",
|
"nosub": "forces all uploads into the top folder of the vfs",
|
||||||
"magic": "enables filetype detection for nameless uploads",
|
"magic": "enables filetype detection for nameless uploads",
|
||||||
@@ -84,8 +145,11 @@ flagcats = {
|
|||||||
"maxb=1g,300": "max 1 GiB over 5min (suffixes: b, k, m, g, t)",
|
"maxb=1g,300": "max 1 GiB over 5min (suffixes: b, k, m, g, t)",
|
||||||
"vmaxb=1g": "total volume size max 1 GiB (suffixes: b, k, m, g, t)",
|
"vmaxb=1g": "total volume size max 1 GiB (suffixes: b, k, m, g, t)",
|
||||||
"vmaxn=4k": "max 4096 files in volume (suffixes: b, k, m, g, t)",
|
"vmaxn=4k": "max 4096 files in volume (suffixes: b, k, m, g, t)",
|
||||||
|
"medialinks": "return medialinks for non-up2k uploads (not hotlinks)",
|
||||||
"rand": "force randomized filenames, 9 chars long by default",
|
"rand": "force randomized filenames, 9 chars long by default",
|
||||||
"nrand=N": "randomized filenames are N chars long",
|
"nrand=N": "randomized filenames are N chars long",
|
||||||
|
"u2ts=fc": "[f]orce [c]lient-last-modified or [u]pload-time",
|
||||||
|
"u2abort=1": "allow aborting unfinished uploads? 0=no 1=strict 2=ip-chk 3=acct-chk",
|
||||||
"sz=1k-3m": "allow filesizes between 1 KiB and 3MiB",
|
"sz=1k-3m": "allow filesizes between 1 KiB and 3MiB",
|
||||||
"df=1g": "ensure 1 GiB free disk space",
|
"df=1g": "ensure 1 GiB free disk space",
|
||||||
},
|
},
|
||||||
@@ -128,8 +192,10 @@ flagcats = {
|
|||||||
"dvthumb": "disables video thumbnails",
|
"dvthumb": "disables video thumbnails",
|
||||||
"dathumb": "disables audio thumbnails (spectrograms)",
|
"dathumb": "disables audio thumbnails (spectrograms)",
|
||||||
"dithumb": "disables image thumbnails",
|
"dithumb": "disables image thumbnails",
|
||||||
|
"pngquant": "compress audio waveforms 33% better",
|
||||||
"thsize": "thumbnail res; WxH",
|
"thsize": "thumbnail res; WxH",
|
||||||
"nocrop": "disable center-cropping",
|
"crop": "center-cropping (y/n/fy/fn)",
|
||||||
|
"th3x": "3x resolution (y/n/fy/fn)",
|
||||||
"convt": "conversion timeout in seconds",
|
"convt": "conversion timeout in seconds",
|
||||||
},
|
},
|
||||||
"handlers\n(better explained in --help-handlers)": {
|
"handlers\n(better explained in --help-handlers)": {
|
||||||
@@ -149,8 +215,10 @@ flagcats = {
|
|||||||
},
|
},
|
||||||
"client and ux": {
|
"client and ux": {
|
||||||
"grid": "show grid/thumbnails by default",
|
"grid": "show grid/thumbnails by default",
|
||||||
|
"gsel": "select files in grid by ctrl-click",
|
||||||
|
"sort": "default sort order",
|
||||||
"unlist": "dont list files matching REGEX",
|
"unlist": "dont list files matching REGEX",
|
||||||
"html_head=TXT": "includes TXT in the <head>",
|
"html_head=TXT": "includes TXT in the <head>, or @PATH for file at PATH",
|
||||||
"robots": "allows indexing by search engines (default)",
|
"robots": "allows indexing by search engines (default)",
|
||||||
"norobots": "kindly asks search engines to leave",
|
"norobots": "kindly asks search engines to leave",
|
||||||
"no_sb_md": "disable js sandbox for markdown files",
|
"no_sb_md": "disable js sandbox for markdown files",
|
||||||
@@ -162,8 +230,11 @@ flagcats = {
|
|||||||
"nohtml": "return html and markdown as text/html",
|
"nohtml": "return html and markdown as text/html",
|
||||||
},
|
},
|
||||||
"others": {
|
"others": {
|
||||||
|
"dots": "allow all users with read-access to\nenable the option to show dotfiles in listings",
|
||||||
"fk=8": 'generates per-file accesskeys,\nwhich are then required at the "g" permission;\nkeys are invalidated if filesize or inode changes',
|
"fk=8": 'generates per-file accesskeys,\nwhich are then required at the "g" permission;\nkeys are invalidated if filesize or inode changes',
|
||||||
"fka=8": 'generates slightly weaker per-file accesskeys,\nwhich are then required at the "g" permission;\nnot affected by filesize or inode numbers',
|
"fka=8": 'generates slightly weaker per-file accesskeys,\nwhich are then required at the "g" permission;\nnot affected by filesize or inode numbers',
|
||||||
|
"mv_retry": "ms-windows: timeout for renaming busy files",
|
||||||
|
"rm_retry": "ms-windows: timeout for deleting busy files",
|
||||||
"davauth": "ask webdav clients to login for all folders",
|
"davauth": "ask webdav clients to login for all folders",
|
||||||
"davrt": "show lastmod time of symlink destination, not the link itself\n(note: this option is always enabled for recursive listings)",
|
"davrt": "show lastmod time of symlink destination, not the link itself\n(note: this option is always enabled for recursive listings)",
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
|
import argparse
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import time
|
import time
|
||||||
@@ -17,20 +18,26 @@ if True: # pylint: disable=using-constant-test
|
|||||||
|
|
||||||
|
|
||||||
class Fstab(object):
|
class Fstab(object):
|
||||||
def __init__(self, log: "RootLogger"):
|
def __init__(self, log: "RootLogger", args: argparse.Namespace):
|
||||||
self.log_func = log
|
self.log_func = log
|
||||||
|
|
||||||
|
self.warned = False
|
||||||
self.trusted = False
|
self.trusted = False
|
||||||
self.tab: Optional[VFS] = None
|
self.tab: Optional[VFS] = None
|
||||||
|
self.oldtab: Optional[VFS] = None
|
||||||
|
self.srctab = "a"
|
||||||
self.cache: dict[str, str] = {}
|
self.cache: dict[str, str] = {}
|
||||||
self.age = 0.0
|
self.age = 0.0
|
||||||
|
self.maxage = args.mtab_age
|
||||||
|
|
||||||
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||||
self.log_func("fstab", msg, c)
|
self.log_func("fstab", msg, c)
|
||||||
|
|
||||||
def get(self, path: str) -> str:
|
def get(self, path: str) -> str:
|
||||||
if len(self.cache) > 9000:
|
now = time.time()
|
||||||
self.age = time.time()
|
if now - self.age > self.maxage or len(self.cache) > 9000:
|
||||||
|
self.age = now
|
||||||
|
self.oldtab = self.tab or self.oldtab
|
||||||
self.tab = None
|
self.tab = None
|
||||||
self.cache = {}
|
self.cache = {}
|
||||||
|
|
||||||
@@ -75,7 +82,7 @@ class Fstab(object):
|
|||||||
self.trusted = False
|
self.trusted = False
|
||||||
|
|
||||||
def build_tab(self) -> None:
|
def build_tab(self) -> None:
|
||||||
self.log("building tab")
|
self.log("inspecting mtab for changes")
|
||||||
|
|
||||||
sptn = r"^.*? on (.*) type ([^ ]+) \(.*"
|
sptn = r"^.*? on (.*) type ([^ ]+) \(.*"
|
||||||
if MACOS:
|
if MACOS:
|
||||||
@@ -84,6 +91,7 @@ class Fstab(object):
|
|||||||
ptn = re.compile(sptn)
|
ptn = re.compile(sptn)
|
||||||
so, _ = chkcmd(["mount"])
|
so, _ = chkcmd(["mount"])
|
||||||
tab1: list[tuple[str, str]] = []
|
tab1: list[tuple[str, str]] = []
|
||||||
|
atab = []
|
||||||
for ln in so.split("\n"):
|
for ln in so.split("\n"):
|
||||||
m = ptn.match(ln)
|
m = ptn.match(ln)
|
||||||
if not m:
|
if not m:
|
||||||
@@ -91,6 +99,15 @@ class Fstab(object):
|
|||||||
|
|
||||||
zs1, zs2 = m.groups()
|
zs1, zs2 = m.groups()
|
||||||
tab1.append((str(zs1), str(zs2)))
|
tab1.append((str(zs1), str(zs2)))
|
||||||
|
atab.append(ln)
|
||||||
|
|
||||||
|
# keep empirically-correct values if mounttab unchanged
|
||||||
|
srctab = "\n".join(sorted(atab))
|
||||||
|
if srctab == self.srctab:
|
||||||
|
self.tab = self.oldtab
|
||||||
|
return
|
||||||
|
|
||||||
|
self.log("mtab has changed; reevaluating support for sparse files")
|
||||||
|
|
||||||
tab1.sort(key=lambda x: (len(x[0]), x[0]))
|
tab1.sort(key=lambda x: (len(x[0]), x[0]))
|
||||||
path1, fs1 = tab1[0]
|
path1, fs1 = tab1[0]
|
||||||
@@ -99,6 +116,7 @@ class Fstab(object):
|
|||||||
tab.add(fs, path.lstrip("/"))
|
tab.add(fs, path.lstrip("/"))
|
||||||
|
|
||||||
self.tab = tab
|
self.tab = tab
|
||||||
|
self.srctab = srctab
|
||||||
|
|
||||||
def relabel(self, path: str, nval: str) -> None:
|
def relabel(self, path: str, nval: str) -> None:
|
||||||
assert self.tab
|
assert self.tab
|
||||||
@@ -133,7 +151,9 @@ class Fstab(object):
|
|||||||
self.trusted = True
|
self.trusted = True
|
||||||
except:
|
except:
|
||||||
# prisonparty or other restrictive environment
|
# prisonparty or other restrictive environment
|
||||||
self.log("failed to build tab:\n{}".format(min_ex()), 3)
|
if not self.warned:
|
||||||
|
self.warned = True
|
||||||
|
self.log("failed to build tab:\n{}".format(min_ex()), 3)
|
||||||
self.build_fallback()
|
self.build_fallback()
|
||||||
|
|
||||||
assert self.tab
|
assert self.tab
|
||||||
|
|||||||
@@ -9,23 +9,19 @@ import stat
|
|||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from .__init__ import ANYWIN, PY2, TYPE_CHECKING, E
|
|
||||||
|
|
||||||
try:
|
|
||||||
import asynchat
|
|
||||||
except:
|
|
||||||
sys.path.append(os.path.join(E.mod, "vend"))
|
|
||||||
|
|
||||||
from pyftpdlib.authorizers import AuthenticationFailed, DummyAuthorizer
|
from pyftpdlib.authorizers import AuthenticationFailed, DummyAuthorizer
|
||||||
from pyftpdlib.filesystems import AbstractedFS, FilesystemError
|
from pyftpdlib.filesystems import AbstractedFS, FilesystemError
|
||||||
from pyftpdlib.handlers import FTPHandler
|
from pyftpdlib.handlers import FTPHandler
|
||||||
from pyftpdlib.ioloop import IOLoop
|
from pyftpdlib.ioloop import IOLoop
|
||||||
from pyftpdlib.servers import FTPServer
|
from pyftpdlib.servers import FTPServer
|
||||||
|
|
||||||
|
from .__init__ import PY2, TYPE_CHECKING
|
||||||
from .authsrv import VFS
|
from .authsrv import VFS
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .util import (
|
from .util import (
|
||||||
|
VF_CAREFUL,
|
||||||
Daemon,
|
Daemon,
|
||||||
|
ODict,
|
||||||
Pebkac,
|
Pebkac,
|
||||||
exclude_dotfiles,
|
exclude_dotfiles,
|
||||||
fsenc,
|
fsenc,
|
||||||
@@ -35,6 +31,7 @@ from .util import (
|
|||||||
runhook,
|
runhook,
|
||||||
sanitize_fn,
|
sanitize_fn,
|
||||||
vjoin,
|
vjoin,
|
||||||
|
wunlink,
|
||||||
)
|
)
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
@@ -42,7 +39,7 @@ if TYPE_CHECKING:
|
|||||||
|
|
||||||
if True: # pylint: disable=using-constant-test
|
if True: # pylint: disable=using-constant-test
|
||||||
import typing
|
import typing
|
||||||
from typing import Any, Optional
|
from typing import Any, Optional, Union
|
||||||
|
|
||||||
|
|
||||||
class FSE(FilesystemError):
|
class FSE(FilesystemError):
|
||||||
@@ -79,6 +76,7 @@ class FtpAuth(DummyAuthorizer):
|
|||||||
asrv = self.hub.asrv
|
asrv = self.hub.asrv
|
||||||
uname = "*"
|
uname = "*"
|
||||||
if username != "anonymous":
|
if username != "anonymous":
|
||||||
|
uname = ""
|
||||||
for zs in (password, username):
|
for zs in (password, username):
|
||||||
zs = asrv.iacct.get(asrv.ah.hash(zs), "")
|
zs = asrv.iacct.get(asrv.ah.hash(zs), "")
|
||||||
if zs:
|
if zs:
|
||||||
@@ -92,6 +90,12 @@ class FtpAuth(DummyAuthorizer):
|
|||||||
if bonk:
|
if bonk:
|
||||||
logging.warning("client banned: invalid passwords")
|
logging.warning("client banned: invalid passwords")
|
||||||
bans[ip] = bonk
|
bans[ip] = bonk
|
||||||
|
try:
|
||||||
|
# only possible if multiprocessing disabled
|
||||||
|
self.hub.broker.httpsrv.bans[ip] = bonk # type: ignore
|
||||||
|
self.hub.broker.httpsrv.nban += 1 # type: ignore
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
raise AuthenticationFailed("Authentication failed.")
|
raise AuthenticationFailed("Authentication failed.")
|
||||||
|
|
||||||
@@ -132,11 +136,14 @@ class FtpFs(AbstractedFS):
|
|||||||
|
|
||||||
self.can_read = self.can_write = self.can_move = False
|
self.can_read = self.can_write = self.can_move = False
|
||||||
self.can_delete = self.can_get = self.can_upget = False
|
self.can_delete = self.can_get = self.can_upget = False
|
||||||
self.can_admin = False
|
self.can_admin = self.can_dot = False
|
||||||
|
|
||||||
self.listdirinfo = self.listdir
|
self.listdirinfo = self.listdir
|
||||||
self.chdir(".")
|
self.chdir(".")
|
||||||
|
|
||||||
|
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||||
|
self.hub.log("ftpd", msg, c)
|
||||||
|
|
||||||
def v2a(
|
def v2a(
|
||||||
self,
|
self,
|
||||||
vpath: str,
|
vpath: str,
|
||||||
@@ -148,7 +155,7 @@ class FtpFs(AbstractedFS):
|
|||||||
try:
|
try:
|
||||||
vpath = vpath.replace("\\", "/").strip("/")
|
vpath = vpath.replace("\\", "/").strip("/")
|
||||||
rd, fn = os.path.split(vpath)
|
rd, fn = os.path.split(vpath)
|
||||||
if ANYWIN and relchk(rd):
|
if relchk(rd):
|
||||||
logging.warning("malicious vpath: %s", vpath)
|
logging.warning("malicious vpath: %s", vpath)
|
||||||
t = "Unsupported characters in [{}]"
|
t = "Unsupported characters in [{}]"
|
||||||
raise FSE(t.format(vpath), 1)
|
raise FSE(t.format(vpath), 1)
|
||||||
@@ -167,7 +174,7 @@ class FtpFs(AbstractedFS):
|
|||||||
if not avfs:
|
if not avfs:
|
||||||
raise FSE(t.format(vpath), 1)
|
raise FSE(t.format(vpath), 1)
|
||||||
|
|
||||||
cr, cw, cm, cd, _, _, _ = avfs.can_access("", self.h.uname)
|
cr, cw, cm, cd, _, _, _, _ = avfs.can_access("", self.h.uname)
|
||||||
if r and not cr or w and not cw or m and not cm or d and not cd:
|
if r and not cr or w and not cw or m and not cm or d and not cd:
|
||||||
raise FSE(t.format(vpath), 1)
|
raise FSE(t.format(vpath), 1)
|
||||||
|
|
||||||
@@ -205,18 +212,38 @@ class FtpFs(AbstractedFS):
|
|||||||
w = "w" in mode or "a" in mode or "+" in mode
|
w = "w" in mode or "a" in mode or "+" in mode
|
||||||
|
|
||||||
ap = self.rv2a(filename, r, w)[0]
|
ap = self.rv2a(filename, r, w)[0]
|
||||||
|
self.validpath(ap)
|
||||||
if w:
|
if w:
|
||||||
try:
|
try:
|
||||||
st = bos.stat(ap)
|
st = bos.stat(ap)
|
||||||
td = time.time() - st.st_mtime
|
td = time.time() - st.st_mtime
|
||||||
|
need_unlink = True
|
||||||
except:
|
except:
|
||||||
|
need_unlink = False
|
||||||
td = 0
|
td = 0
|
||||||
|
|
||||||
if td < -1 or td > self.args.ftp_wt:
|
if w and need_unlink:
|
||||||
raise FSE("Cannot open existing file for writing")
|
if td >= -1 and td <= self.args.ftp_wt:
|
||||||
|
# within permitted timeframe; unlink and accept
|
||||||
|
do_it = True
|
||||||
|
elif self.args.no_del or self.args.ftp_no_ow:
|
||||||
|
# file too old, or overwrite not allowed; reject
|
||||||
|
do_it = False
|
||||||
|
else:
|
||||||
|
# allow overwrite if user has delete permission
|
||||||
|
# (avoids win2000 freaking out and deleting the server copy without uploading its own)
|
||||||
|
try:
|
||||||
|
self.rv2a(filename, False, True, False, True)
|
||||||
|
do_it = True
|
||||||
|
except:
|
||||||
|
do_it = False
|
||||||
|
|
||||||
self.validpath(ap)
|
if not do_it:
|
||||||
return open(fsenc(ap), mode)
|
raise FSE("File already exists")
|
||||||
|
|
||||||
|
wunlink(self.log, ap, VF_CAREFUL)
|
||||||
|
|
||||||
|
return open(fsenc(ap), mode, self.args.iobuf)
|
||||||
|
|
||||||
def chdir(self, path: str) -> None:
|
def chdir(self, path: str) -> None:
|
||||||
nwd = join(self.cwd, path)
|
nwd = join(self.cwd, path)
|
||||||
@@ -243,6 +270,7 @@ class FtpFs(AbstractedFS):
|
|||||||
self.can_get,
|
self.can_get,
|
||||||
self.can_upget,
|
self.can_upget,
|
||||||
self.can_admin,
|
self.can_admin,
|
||||||
|
self.can_dot,
|
||||||
) = avfs.can_access("", self.h.uname)
|
) = avfs.can_access("", self.h.uname)
|
||||||
|
|
||||||
def mkdir(self, path: str) -> None:
|
def mkdir(self, path: str) -> None:
|
||||||
@@ -265,7 +293,7 @@ class FtpFs(AbstractedFS):
|
|||||||
vfs_ls = [x[0] for x in vfs_ls1]
|
vfs_ls = [x[0] for x in vfs_ls1]
|
||||||
vfs_ls.extend(vfs_virt.keys())
|
vfs_ls.extend(vfs_virt.keys())
|
||||||
|
|
||||||
if not self.args.ed:
|
if not self.can_dot:
|
||||||
vfs_ls = exclude_dotfiles(vfs_ls)
|
vfs_ls = exclude_dotfiles(vfs_ls)
|
||||||
|
|
||||||
vfs_ls.sort()
|
vfs_ls.sort()
|
||||||
@@ -279,9 +307,20 @@ class FtpFs(AbstractedFS):
|
|||||||
# display write-only folders as empty
|
# display write-only folders as empty
|
||||||
return []
|
return []
|
||||||
|
|
||||||
# return list of volumes
|
# return list of accessible volumes
|
||||||
r = {x.split("/")[0]: 1 for x in self.hub.asrv.vfs.all_vols.keys()}
|
ret = []
|
||||||
return list(sorted(list(r.keys())))
|
for vn in self.hub.asrv.vfs.all_vols.values():
|
||||||
|
if "/" in vn.vpath or not vn.vpath:
|
||||||
|
continue # only include toplevel-mounted vols
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.hub.asrv.vfs.get(vn.vpath, self.uname, True, False)
|
||||||
|
ret.append(vn.vpath)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
ret.sort()
|
||||||
|
return ret
|
||||||
|
|
||||||
def rmdir(self, path: str) -> None:
|
def rmdir(self, path: str) -> None:
|
||||||
ap = self.rv2a(path, d=True)[0]
|
ap = self.rv2a(path, d=True)[0]
|
||||||
@@ -297,7 +336,7 @@ class FtpFs(AbstractedFS):
|
|||||||
|
|
||||||
vp = join(self.cwd, path).lstrip("/")
|
vp = join(self.cwd, path).lstrip("/")
|
||||||
try:
|
try:
|
||||||
self.hub.up2k.handle_rm(self.uname, self.h.cli_ip, [vp], [], False)
|
self.hub.up2k.handle_rm(self.uname, self.h.cli_ip, [vp], [], False, False)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
raise FSE(str(ex))
|
raise FSE(str(ex))
|
||||||
|
|
||||||
@@ -404,7 +443,16 @@ class FtpHandler(FTPHandler):
|
|||||||
super(FtpHandler, self).__init__(conn, server, ioloop)
|
super(FtpHandler, self).__init__(conn, server, ioloop)
|
||||||
|
|
||||||
cip = self.remote_ip
|
cip = self.remote_ip
|
||||||
self.cli_ip = cip[7:] if cip.startswith("::ffff:") else cip
|
if cip.startswith("::ffff:"):
|
||||||
|
cip = cip[7:]
|
||||||
|
|
||||||
|
if self.args.ftp_ipa_nm and not self.args.ftp_ipa_nm.map(cip):
|
||||||
|
logging.warning("client rejected (--ftp-ipa): %s", cip)
|
||||||
|
self.connected = False
|
||||||
|
conn.close()
|
||||||
|
return
|
||||||
|
|
||||||
|
self.cli_ip = cip
|
||||||
|
|
||||||
# abspath->vpath mapping to resolve log_transfer paths
|
# abspath->vpath mapping to resolve log_transfer paths
|
||||||
self.vfs_map: dict[str, str] = {}
|
self.vfs_map: dict[str, str] = {}
|
||||||
@@ -422,9 +470,10 @@ class FtpHandler(FTPHandler):
|
|||||||
None,
|
None,
|
||||||
xbu,
|
xbu,
|
||||||
ap,
|
ap,
|
||||||
vfs.canonical(rem),
|
vp,
|
||||||
"",
|
"",
|
||||||
self.uname,
|
self.uname,
|
||||||
|
self.hub.asrv.vfs.get_perms(vp, self.uname),
|
||||||
0,
|
0,
|
||||||
0,
|
0,
|
||||||
self.cli_ip,
|
self.cli_ip,
|
||||||
@@ -534,6 +583,8 @@ class Ftpd(object):
|
|||||||
if self.args.ftp4:
|
if self.args.ftp4:
|
||||||
ips = [x for x in ips if ":" not in x]
|
ips = [x for x in ips if ":" not in x]
|
||||||
|
|
||||||
|
ips = list(ODict.fromkeys(ips)) # dedup
|
||||||
|
|
||||||
ioloop = IOLoop()
|
ioloop = IOLoop()
|
||||||
for ip in ips:
|
for ip in ips:
|
||||||
for h, lp in hs:
|
for h, lp in hs:
|
||||||
|
|||||||
1596
copyparty/httpcli.py
1596
copyparty/httpcli.py
File diff suppressed because it is too large
Load Diff
@@ -23,7 +23,7 @@ from .mtag import HAVE_FFMPEG
|
|||||||
from .th_cli import ThumbCli
|
from .th_cli import ThumbCli
|
||||||
from .th_srv import HAVE_PIL, HAVE_VIPS
|
from .th_srv import HAVE_PIL, HAVE_VIPS
|
||||||
from .u2idx import U2idx
|
from .u2idx import U2idx
|
||||||
from .util import HMaccas, shut_socket
|
from .util import HMaccas, NetMap, shut_socket
|
||||||
|
|
||||||
if True: # pylint: disable=using-constant-test
|
if True: # pylint: disable=using-constant-test
|
||||||
from typing import Optional, Pattern, Union
|
from typing import Optional, Pattern, Union
|
||||||
@@ -50,11 +50,15 @@ class HttpConn(object):
|
|||||||
self.addr = addr
|
self.addr = addr
|
||||||
self.hsrv = hsrv
|
self.hsrv = hsrv
|
||||||
|
|
||||||
self.mutex: threading.Lock = hsrv.mutex # mypy404
|
self.u2mutex: threading.Lock = hsrv.u2mutex # mypy404
|
||||||
self.args: argparse.Namespace = hsrv.args # mypy404
|
self.args: argparse.Namespace = hsrv.args # mypy404
|
||||||
self.E: EnvParams = self.args.E
|
self.E: EnvParams = self.args.E
|
||||||
self.asrv: AuthSrv = hsrv.asrv # mypy404
|
self.asrv: AuthSrv = hsrv.asrv # mypy404
|
||||||
self.u2fh: Util.FHC = hsrv.u2fh # mypy404
|
self.u2fh: Util.FHC = hsrv.u2fh # mypy404
|
||||||
|
self.pipes: Util.CachedDict = hsrv.pipes # mypy404
|
||||||
|
self.ipa_nm: Optional[NetMap] = hsrv.ipa_nm
|
||||||
|
self.xff_nm: Optional[NetMap] = hsrv.xff_nm
|
||||||
|
self.xff_lan: NetMap = hsrv.xff_lan # type: ignore
|
||||||
self.iphash: HMaccas = hsrv.broker.iphash
|
self.iphash: HMaccas = hsrv.broker.iphash
|
||||||
self.bans: dict[str, int] = hsrv.bans
|
self.bans: dict[str, int] = hsrv.bans
|
||||||
self.aclose: dict[str, int] = hsrv.aclose
|
self.aclose: dict[str, int] = hsrv.aclose
|
||||||
@@ -93,7 +97,7 @@ class HttpConn(object):
|
|||||||
self.rproxy = ip
|
self.rproxy = ip
|
||||||
|
|
||||||
self.ip = ip
|
self.ip = ip
|
||||||
self.log_src = "{} \033[{}m{}".format(ip, color, self.addr[1]).ljust(26)
|
self.log_src = ("%s \033[%dm%d" % (ip, color, self.addr[1])).ljust(26)
|
||||||
return self.log_src
|
return self.log_src
|
||||||
|
|
||||||
def respath(self, res_name: str) -> str:
|
def respath(self, res_name: str) -> str:
|
||||||
@@ -112,32 +116,30 @@ class HttpConn(object):
|
|||||||
return self.u2idx
|
return self.u2idx
|
||||||
|
|
||||||
def _detect_https(self) -> bool:
|
def _detect_https(self) -> bool:
|
||||||
method = None
|
try:
|
||||||
if True:
|
method = self.s.recv(4, socket.MSG_PEEK)
|
||||||
try:
|
except socket.timeout:
|
||||||
method = self.s.recv(4, socket.MSG_PEEK)
|
return False
|
||||||
except socket.timeout:
|
except AttributeError:
|
||||||
return False
|
# jython does not support msg_peek; forget about https
|
||||||
except AttributeError:
|
method = self.s.recv(4)
|
||||||
# jython does not support msg_peek; forget about https
|
self.sr = Util.Unrecv(self.s, self.log)
|
||||||
method = self.s.recv(4)
|
self.sr.buf = method
|
||||||
self.sr = Util.Unrecv(self.s, self.log)
|
|
||||||
self.sr.buf = method
|
|
||||||
|
|
||||||
# jython used to do this, they stopped since it's broken
|
# jython used to do this, they stopped since it's broken
|
||||||
# but reimplementing sendall is out of scope for now
|
# but reimplementing sendall is out of scope for now
|
||||||
if not getattr(self.s, "sendall", None):
|
if not getattr(self.s, "sendall", None):
|
||||||
self.s.sendall = self.s.send # type: ignore
|
self.s.sendall = self.s.send # type: ignore
|
||||||
|
|
||||||
if len(method) != 4:
|
if len(method) != 4:
|
||||||
err = "need at least 4 bytes in the first packet; got {}".format(
|
err = "need at least 4 bytes in the first packet; got {}".format(
|
||||||
len(method)
|
len(method)
|
||||||
)
|
)
|
||||||
if method:
|
if method:
|
||||||
self.log(err)
|
self.log(err)
|
||||||
|
|
||||||
self.s.send(b"HTTP/1.1 400 Bad Request\r\n\r\n" + err.encode("utf-8"))
|
self.s.send(b"HTTP/1.1 400 Bad Request\r\n\r\n" + err.encode("utf-8"))
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return not method or not bool(PTN_HTTP.match(method))
|
return not method or not bool(PTN_HTTP.match(method))
|
||||||
|
|
||||||
@@ -178,7 +180,7 @@ class HttpConn(object):
|
|||||||
|
|
||||||
self.s = ctx.wrap_socket(self.s, server_side=True)
|
self.s = ctx.wrap_socket(self.s, server_side=True)
|
||||||
msg = [
|
msg = [
|
||||||
"\033[1;3{:d}m{}".format(c, s)
|
"\033[1;3%dm%s" % (c, s)
|
||||||
for c, s in zip([0, 5, 0], self.s.cipher()) # type: ignore
|
for c, s in zip([0, 5, 0], self.s.cipher()) # type: ignore
|
||||||
]
|
]
|
||||||
self.log(" ".join(msg) + "\033[0m")
|
self.log(" ".join(msg) + "\033[0m")
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ import time
|
|||||||
|
|
||||||
import queue
|
import queue
|
||||||
|
|
||||||
from .__init__ import ANYWIN, CORES, EXE, MACOS, TYPE_CHECKING, EnvParams
|
from .__init__ import ANYWIN, CORES, EXE, MACOS, TYPE_CHECKING, EnvParams, unicode
|
||||||
|
|
||||||
try:
|
try:
|
||||||
MNFE = ModuleNotFoundError
|
MNFE = ModuleNotFoundError
|
||||||
@@ -61,12 +61,14 @@ from .u2idx import U2idx
|
|||||||
from .util import (
|
from .util import (
|
||||||
E_SCK,
|
E_SCK,
|
||||||
FHC,
|
FHC,
|
||||||
|
CachedDict,
|
||||||
Daemon,
|
Daemon,
|
||||||
Garda,
|
Garda,
|
||||||
Magician,
|
Magician,
|
||||||
Netdev,
|
Netdev,
|
||||||
NetMap,
|
NetMap,
|
||||||
absreal,
|
absreal,
|
||||||
|
build_netmap,
|
||||||
ipnorm,
|
ipnorm,
|
||||||
min_ex,
|
min_ex,
|
||||||
shut_socket,
|
shut_socket,
|
||||||
@@ -103,12 +105,13 @@ class HttpSrv(object):
|
|||||||
self.t0 = time.time()
|
self.t0 = time.time()
|
||||||
nsuf = "-n{}-i{:x}".format(nid, os.getpid()) if nid else ""
|
nsuf = "-n{}-i{:x}".format(nid, os.getpid()) if nid else ""
|
||||||
self.magician = Magician()
|
self.magician = Magician()
|
||||||
self.nm = NetMap([], {})
|
self.nm = NetMap([], [])
|
||||||
self.ssdp: Optional["SSDPr"] = None
|
self.ssdp: Optional["SSDPr"] = None
|
||||||
self.gpwd = Garda(self.args.ban_pw)
|
self.gpwd = Garda(self.args.ban_pw)
|
||||||
self.g404 = Garda(self.args.ban_404)
|
self.g404 = Garda(self.args.ban_404)
|
||||||
self.g403 = Garda(self.args.ban_403)
|
self.g403 = Garda(self.args.ban_403)
|
||||||
self.g422 = Garda(self.args.ban_422, False)
|
self.g422 = Garda(self.args.ban_422, False)
|
||||||
|
self.gmal = Garda(self.args.ban_422)
|
||||||
self.gurl = Garda(self.args.ban_url)
|
self.gurl = Garda(self.args.ban_url)
|
||||||
self.bans: dict[str, int] = {}
|
self.bans: dict[str, int] = {}
|
||||||
self.aclose: dict[str, int] = {}
|
self.aclose: dict[str, int] = {}
|
||||||
@@ -116,6 +119,7 @@ class HttpSrv(object):
|
|||||||
self.bound: set[tuple[str, int]] = set()
|
self.bound: set[tuple[str, int]] = set()
|
||||||
self.name = "hsrv" + nsuf
|
self.name = "hsrv" + nsuf
|
||||||
self.mutex = threading.Lock()
|
self.mutex = threading.Lock()
|
||||||
|
self.u2mutex = threading.Lock()
|
||||||
self.stopping = False
|
self.stopping = False
|
||||||
|
|
||||||
self.tp_nthr = 0 # actual
|
self.tp_nthr = 0 # actual
|
||||||
@@ -127,7 +131,11 @@ class HttpSrv(object):
|
|||||||
self.t_periodic: Optional[threading.Thread] = None
|
self.t_periodic: Optional[threading.Thread] = None
|
||||||
|
|
||||||
self.u2fh = FHC()
|
self.u2fh = FHC()
|
||||||
|
self.pipes = CachedDict(0.2)
|
||||||
self.metrics = Metrics(self)
|
self.metrics = Metrics(self)
|
||||||
|
self.nreq = 0
|
||||||
|
self.nsus = 0
|
||||||
|
self.nban = 0
|
||||||
self.srvs: list[socket.socket] = []
|
self.srvs: list[socket.socket] = []
|
||||||
self.ncli = 0 # exact
|
self.ncli = 0 # exact
|
||||||
self.clients: set[HttpConn] = set() # laggy
|
self.clients: set[HttpConn] = set() # laggy
|
||||||
@@ -145,10 +153,15 @@ class HttpSrv(object):
|
|||||||
zs = os.path.join(self.E.mod, "web", "deps", "prism.js.gz")
|
zs = os.path.join(self.E.mod, "web", "deps", "prism.js.gz")
|
||||||
self.prism = os.path.exists(zs)
|
self.prism = os.path.exists(zs)
|
||||||
|
|
||||||
|
self.ipa_nm = build_netmap(self.args.ipa)
|
||||||
|
self.xff_nm = build_netmap(self.args.xff_src)
|
||||||
|
self.xff_lan = build_netmap("lan")
|
||||||
|
|
||||||
self.statics: set[str] = set()
|
self.statics: set[str] = set()
|
||||||
self._build_statics()
|
self._build_statics()
|
||||||
|
|
||||||
self.ptn_cc = re.compile(r"[\x00-\x1f]")
|
self.ptn_cc = re.compile(r"[\x00-\x1f]")
|
||||||
|
self.ptn_hsafe = re.compile(r"[\x00-\x1f<>\"'&]")
|
||||||
|
|
||||||
self.mallow = "GET HEAD POST PUT DELETE OPTIONS".split()
|
self.mallow = "GET HEAD POST PUT DELETE OPTIONS".split()
|
||||||
if not self.args.no_dav:
|
if not self.args.no_dav:
|
||||||
@@ -170,7 +183,7 @@ class HttpSrv(object):
|
|||||||
if self.args.log_thrs:
|
if self.args.log_thrs:
|
||||||
start_log_thrs(self.log, self.args.log_thrs, nid)
|
start_log_thrs(self.log, self.args.log_thrs, nid)
|
||||||
|
|
||||||
self.th_cfg: dict[str, Any] = {}
|
self.th_cfg: dict[str, set[str]] = {}
|
||||||
Daemon(self.post_init, "hsrv-init2")
|
Daemon(self.post_init, "hsrv-init2")
|
||||||
|
|
||||||
def post_init(self) -> None:
|
def post_init(self) -> None:
|
||||||
@@ -185,7 +198,7 @@ class HttpSrv(object):
|
|||||||
for fn in df:
|
for fn in df:
|
||||||
ap = absreal(os.path.join(dp, fn))
|
ap = absreal(os.path.join(dp, fn))
|
||||||
self.statics.add(ap)
|
self.statics.add(ap)
|
||||||
if ap.endswith(".gz") or ap.endswith(".br"):
|
if ap.endswith(".gz"):
|
||||||
self.statics.add(ap[:-3])
|
self.statics.add(ap[:-3])
|
||||||
|
|
||||||
def set_netdevs(self, netdevs: dict[str, Netdev]) -> None:
|
def set_netdevs(self, netdevs: dict[str, Netdev]) -> None:
|
||||||
@@ -193,7 +206,7 @@ class HttpSrv(object):
|
|||||||
for ip, _ in self.bound:
|
for ip, _ in self.bound:
|
||||||
ips.add(ip)
|
ips.add(ip)
|
||||||
|
|
||||||
self.nm = NetMap(list(ips), netdevs)
|
self.nm = NetMap(list(ips), list(netdevs))
|
||||||
|
|
||||||
def start_threads(self, n: int) -> None:
|
def start_threads(self, n: int) -> None:
|
||||||
self.tp_nthr += n
|
self.tp_nthr += n
|
||||||
@@ -215,7 +228,7 @@ class HttpSrv(object):
|
|||||||
def periodic(self) -> None:
|
def periodic(self) -> None:
|
||||||
while True:
|
while True:
|
||||||
time.sleep(2 if self.tp_ncli or self.ncli else 10)
|
time.sleep(2 if self.tp_ncli or self.ncli else 10)
|
||||||
with self.mutex:
|
with self.u2mutex, self.mutex:
|
||||||
self.u2fh.clean()
|
self.u2fh.clean()
|
||||||
if self.tp_q:
|
if self.tp_q:
|
||||||
self.tp_ncli = max(self.ncli, self.tp_ncli - 2)
|
self.tp_ncli = max(self.ncli, self.tp_ncli - 2)
|
||||||
@@ -253,10 +266,7 @@ class HttpSrv(object):
|
|||||||
msg = "subscribed @ {}:{} f{} p{}".format(hip, port, fno, os.getpid())
|
msg = "subscribed @ {}:{} f{} p{}".format(hip, port, fno, os.getpid())
|
||||||
self.log(self.name, msg)
|
self.log(self.name, msg)
|
||||||
|
|
||||||
def fun() -> None:
|
Daemon(self.broker.say, "sig-hsrv-up1", ("cb_httpsrv_up",))
|
||||||
self.broker.say("cb_httpsrv_up")
|
|
||||||
|
|
||||||
threading.Thread(target=fun, name="sig-hsrv-up1").start()
|
|
||||||
|
|
||||||
while not self.stopping:
|
while not self.stopping:
|
||||||
if self.args.log_conn:
|
if self.args.log_conn:
|
||||||
@@ -325,11 +335,11 @@ class HttpSrv(object):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
sck, saddr = srv_sck.accept()
|
sck, saddr = srv_sck.accept()
|
||||||
cip, cport = saddr[:2]
|
cip = unicode(saddr[0])
|
||||||
if cip.startswith("::ffff:"):
|
if cip.startswith("::ffff:"):
|
||||||
cip = cip[7:]
|
cip = cip[7:]
|
||||||
|
|
||||||
addr = (cip, cport)
|
addr = (cip, saddr[1])
|
||||||
except (OSError, socket.error) as ex:
|
except (OSError, socket.error) as ex:
|
||||||
if self.stopping:
|
if self.stopping:
|
||||||
break
|
break
|
||||||
@@ -361,7 +371,7 @@ class HttpSrv(object):
|
|||||||
if not self.t_periodic:
|
if not self.t_periodic:
|
||||||
name = "hsrv-pt"
|
name = "hsrv-pt"
|
||||||
if self.nid:
|
if self.nid:
|
||||||
name += "-{}".format(self.nid)
|
name += "-%d" % (self.nid,)
|
||||||
|
|
||||||
self.t_periodic = Daemon(self.periodic, name)
|
self.t_periodic = Daemon(self.periodic, name)
|
||||||
|
|
||||||
@@ -380,7 +390,7 @@ class HttpSrv(object):
|
|||||||
|
|
||||||
Daemon(
|
Daemon(
|
||||||
self.thr_client,
|
self.thr_client,
|
||||||
"httpconn-{}-{}".format(addr[0].split(".", 2)[-1][-6:], addr[1]),
|
"httpconn-%s-%d" % (addr[0].split(".", 2)[-1][-6:], addr[1]),
|
||||||
(sck, addr),
|
(sck, addr),
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -397,9 +407,7 @@ class HttpSrv(object):
|
|||||||
try:
|
try:
|
||||||
sck, addr = task
|
sck, addr = task
|
||||||
me = threading.current_thread()
|
me = threading.current_thread()
|
||||||
me.name = "httpconn-{}-{}".format(
|
me.name = "httpconn-%s-%d" % (addr[0].split(".", 2)[-1][-6:], addr[1])
|
||||||
addr[0].split(".", 2)[-1][-6:], addr[1]
|
|
||||||
)
|
|
||||||
self.thr_client(sck, addr)
|
self.thr_client(sck, addr)
|
||||||
me.name = self.name + "-poolw"
|
me.name = self.name + "-poolw"
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
|
|||||||
@@ -4,10 +4,11 @@ from __future__ import print_function, unicode_literals
|
|||||||
import argparse # typechk
|
import argparse # typechk
|
||||||
import colorsys
|
import colorsys
|
||||||
import hashlib
|
import hashlib
|
||||||
|
import re
|
||||||
|
|
||||||
from .__init__ import PY2
|
from .__init__ import PY2
|
||||||
from .th_srv import HAVE_PIL
|
from .th_srv import HAVE_PIL, HAVE_PILF
|
||||||
from .util import BytesIO
|
from .util import BytesIO, html_escape # type: ignore
|
||||||
|
|
||||||
|
|
||||||
class Ico(object):
|
class Ico(object):
|
||||||
@@ -21,27 +22,52 @@ class Ico(object):
|
|||||||
ext = bext.decode("utf-8")
|
ext = bext.decode("utf-8")
|
||||||
zb = hashlib.sha1(bext).digest()[2:4]
|
zb = hashlib.sha1(bext).digest()[2:4]
|
||||||
if PY2:
|
if PY2:
|
||||||
zb = [ord(x) for x in zb]
|
zb = [ord(x) for x in zb] # type: ignore
|
||||||
|
|
||||||
c1 = colorsys.hsv_to_rgb(zb[0] / 256.0, 1, 0.3)
|
c1 = colorsys.hsv_to_rgb(zb[0] / 256.0, 1, 0.3)
|
||||||
c2 = colorsys.hsv_to_rgb(zb[0] / 256.0, 1, 1)
|
c2 = colorsys.hsv_to_rgb(zb[0] / 256.0, 0.8 if HAVE_PILF else 1, 1)
|
||||||
ci = [int(x * 255) for x in list(c1) + list(c2)]
|
ci = [int(x * 255) for x in list(c1) + list(c2)]
|
||||||
c = "".join(["{:02x}".format(x) for x in ci])
|
c = "".join(["%02x" % (x,) for x in ci])
|
||||||
|
|
||||||
w = 100
|
w = 100
|
||||||
h = 30
|
h = 30
|
||||||
if not self.args.th_no_crop and as_thumb:
|
if as_thumb:
|
||||||
sw, sh = self.args.th_size.split("x")
|
sw, sh = self.args.th_size.split("x")
|
||||||
h = int(100 / (float(sw) / float(sh)))
|
h = int(100.0 / (float(sw) / float(sh)))
|
||||||
w = 100
|
|
||||||
|
|
||||||
if chrome:
|
if chrome:
|
||||||
# cannot handle more than ~2000 unique SVGs
|
# cannot handle more than ~2000 unique SVGs
|
||||||
|
if HAVE_PILF:
|
||||||
|
# pillow 10.1 made this the default font;
|
||||||
|
# svg: 3.7s, this: 36s
|
||||||
|
try:
|
||||||
|
from PIL import Image, ImageDraw
|
||||||
|
|
||||||
|
# [.lt] are hard to see lowercase / unspaced
|
||||||
|
ext2 = re.sub("(.)", "\\1 ", ext).upper()
|
||||||
|
|
||||||
|
h = int(128.0 * h / w)
|
||||||
|
w = 128
|
||||||
|
img = Image.new("RGB", (w, h), "#" + c[:6])
|
||||||
|
pb = ImageDraw.Draw(img)
|
||||||
|
_, _, tw, th = pb.textbbox((0, 0), ext2, font_size=16)
|
||||||
|
xy = (int((w - tw) / 2), int((h - th) / 2))
|
||||||
|
pb.text(xy, ext2, fill="#" + c[6:], font_size=16)
|
||||||
|
|
||||||
|
img = img.resize((w * 2, h * 2), Image.NEAREST)
|
||||||
|
|
||||||
|
buf = BytesIO()
|
||||||
|
img.save(buf, format="PNG", compress_level=1)
|
||||||
|
return "image/png", buf.getvalue()
|
||||||
|
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
if HAVE_PIL:
|
if HAVE_PIL:
|
||||||
# svg: 3s, cache: 6s, this: 8s
|
# svg: 3s, cache: 6s, this: 8s
|
||||||
from PIL import Image, ImageDraw
|
from PIL import Image, ImageDraw
|
||||||
|
|
||||||
h = int(64 * h / w)
|
h = int(64.0 * h / w)
|
||||||
w = 64
|
w = 64
|
||||||
img = Image.new("RGB", (w, h), "#" + c[:6])
|
img = Image.new("RGB", (w, h), "#" + c[:6])
|
||||||
pb = ImageDraw.Draw(img)
|
pb = ImageDraw.Draw(img)
|
||||||
@@ -64,20 +90,6 @@ class Ico(object):
|
|||||||
img.save(buf, format="PNG", compress_level=1)
|
img.save(buf, format="PNG", compress_level=1)
|
||||||
return "image/png", buf.getvalue()
|
return "image/png", buf.getvalue()
|
||||||
|
|
||||||
elif False:
|
|
||||||
# 48s, too slow
|
|
||||||
import pyvips
|
|
||||||
|
|
||||||
h = int(192 * h / w)
|
|
||||||
w = 192
|
|
||||||
img = pyvips.Image.text(
|
|
||||||
ext, width=w, height=h, dpi=192, align=pyvips.Align.CENTRE
|
|
||||||
)
|
|
||||||
img = img.ifthenelse(ci[3:], ci[:3], blend=True)
|
|
||||||
# i = i.resize(3, kernel=pyvips.Kernel.NEAREST)
|
|
||||||
buf = img.write_to_buffer(".png[compression=1]")
|
|
||||||
return "image/png", buf
|
|
||||||
|
|
||||||
svg = """\
|
svg = """\
|
||||||
<?xml version="1.0" encoding="UTF-8"?>
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
<svg version="1.1" viewBox="0 0 100 {}" xmlns="http://www.w3.org/2000/svg"><g>
|
<svg version="1.1" viewBox="0 0 100 {}" xmlns="http://www.w3.org/2000/svg"><g>
|
||||||
@@ -86,6 +98,6 @@ class Ico(object):
|
|||||||
fill="#{}" font-family="monospace" font-size="14px" style="letter-spacing:.5px">{}</text>
|
fill="#{}" font-family="monospace" font-size="14px" style="letter-spacing:.5px">{}</text>
|
||||||
</g></svg>
|
</g></svg>
|
||||||
"""
|
"""
|
||||||
svg = svg.format(h, c[:6], c[6:], ext)
|
svg = svg.format(h, c[:6], c[6:], html_escape(ext, True))
|
||||||
|
|
||||||
return "image/svg+xml", svg.encode("utf-8")
|
return "image/svg+xml", svg.encode("utf-8")
|
||||||
|
|||||||
@@ -292,6 +292,22 @@ class MDNS(MCast):
|
|||||||
def run2(self) -> None:
|
def run2(self) -> None:
|
||||||
last_hop = time.time()
|
last_hop = time.time()
|
||||||
ihop = self.args.mc_hop
|
ihop = self.args.mc_hop
|
||||||
|
|
||||||
|
try:
|
||||||
|
if self.args.no_poll:
|
||||||
|
raise Exception()
|
||||||
|
fd2sck = {}
|
||||||
|
srvpoll = select.poll()
|
||||||
|
for sck in self.srv:
|
||||||
|
fd = sck.fileno()
|
||||||
|
fd2sck[fd] = sck
|
||||||
|
srvpoll.register(fd, select.POLLIN)
|
||||||
|
except Exception as ex:
|
||||||
|
srvpoll = None
|
||||||
|
if not self.args.no_poll:
|
||||||
|
t = "WARNING: failed to poll(), will use select() instead: %r"
|
||||||
|
self.log(t % (ex,), 3)
|
||||||
|
|
||||||
while self.running:
|
while self.running:
|
||||||
timeout = (
|
timeout = (
|
||||||
0.02 + random.random() * 0.07
|
0.02 + random.random() * 0.07
|
||||||
@@ -300,8 +316,13 @@ class MDNS(MCast):
|
|||||||
if self.unsolicited
|
if self.unsolicited
|
||||||
else (last_hop + ihop if ihop else 180)
|
else (last_hop + ihop if ihop else 180)
|
||||||
)
|
)
|
||||||
rdy = select.select(self.srv, [], [], timeout)
|
if srvpoll:
|
||||||
rx: list[socket.socket] = rdy[0] # type: ignore
|
pr = srvpoll.poll(timeout * 1000)
|
||||||
|
rx = [fd2sck[x[0]] for x in pr if x[1] & select.POLLIN]
|
||||||
|
else:
|
||||||
|
rdy = select.select(self.srv, [], [], timeout)
|
||||||
|
rx: list[socket.socket] = rdy[0] # type: ignore
|
||||||
|
|
||||||
self.rx4.cln()
|
self.rx4.cln()
|
||||||
self.rx6.cln()
|
self.rx6.cln()
|
||||||
buf = b""
|
buf = b""
|
||||||
@@ -340,7 +361,7 @@ class MDNS(MCast):
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
self.srv = {}
|
self.srv.clear()
|
||||||
|
|
||||||
def eat(self, buf: bytes, addr: tuple[str, int], sck: socket.socket) -> None:
|
def eat(self, buf: bytes, addr: tuple[str, int], sck: socket.socket) -> None:
|
||||||
cip = addr[0]
|
cip = addr[0]
|
||||||
|
|||||||
@@ -34,14 +34,23 @@ class Metrics(object):
|
|||||||
|
|
||||||
ret: list[str] = []
|
ret: list[str] = []
|
||||||
|
|
||||||
def addc(k: str, unit: str, v: str, desc: str) -> None:
|
def addc(k: str, v: str, desc: str) -> None:
|
||||||
if unit:
|
zs = "# TYPE %s counter\n# HELP %s %s\n%s_created %s\n%s_total %s"
|
||||||
k += "_" + unit
|
ret.append(zs % (k, k, desc, k, int(self.hsrv.t0), k, v))
|
||||||
zs = "# TYPE %s counter\n# UNIT %s %s\n# HELP %s %s\n%s_created %s\n%s_total %s"
|
|
||||||
ret.append(zs % (k, k, unit, k, desc, k, int(self.hsrv.t0), k, v))
|
def adduc(k: str, unit: str, v: str, desc: str) -> None:
|
||||||
else:
|
k += "_" + unit
|
||||||
zs = "# TYPE %s counter\n# HELP %s %s\n%s_created %s\n%s_total %s"
|
zs = "# TYPE %s counter\n# UNIT %s %s\n# HELP %s %s\n%s_created %s\n%s_total %s"
|
||||||
ret.append(zs % (k, k, desc, k, int(self.hsrv.t0), k, v))
|
ret.append(zs % (k, k, unit, k, desc, k, int(self.hsrv.t0), k, v))
|
||||||
|
|
||||||
|
def addg(k: str, v: str, desc: str) -> None:
|
||||||
|
zs = "# TYPE %s gauge\n# HELP %s %s\n%s %s"
|
||||||
|
ret.append(zs % (k, k, desc, k, v))
|
||||||
|
|
||||||
|
def addug(k: str, unit: str, v: str, desc: str) -> None:
|
||||||
|
k += "_" + unit
|
||||||
|
zs = "# TYPE %s gauge\n# UNIT %s %s\n# HELP %s %s\n%s %s"
|
||||||
|
ret.append(zs % (k, k, unit, k, desc, k, v))
|
||||||
|
|
||||||
def addh(k: str, typ: str, desc: str) -> None:
|
def addh(k: str, typ: str, desc: str) -> None:
|
||||||
zs = "# TYPE %s %s\n# HELP %s %s"
|
zs = "# TYPE %s %s\n# HELP %s %s"
|
||||||
@@ -54,17 +63,75 @@ class Metrics(object):
|
|||||||
def addv(k: str, v: str) -> None:
|
def addv(k: str, v: str) -> None:
|
||||||
ret.append("%s %s" % (k, v))
|
ret.append("%s %s" % (k, v))
|
||||||
|
|
||||||
|
t = "time since last copyparty restart"
|
||||||
v = "{:.3f}".format(time.time() - self.hsrv.t0)
|
v = "{:.3f}".format(time.time() - self.hsrv.t0)
|
||||||
addc("cpp_uptime", "seconds", v, "time since last server restart")
|
addug("cpp_uptime", "seconds", v, t)
|
||||||
|
|
||||||
|
# timestamps are gauges because initial value is not zero
|
||||||
|
t = "unixtime of last copyparty restart"
|
||||||
|
v = "{:.3f}".format(self.hsrv.t0)
|
||||||
|
addug("cpp_boot_unixtime", "seconds", v, t)
|
||||||
|
|
||||||
|
t = "number of open http(s) client connections"
|
||||||
|
addg("cpp_http_conns", str(self.hsrv.ncli), t)
|
||||||
|
|
||||||
|
t = "number of http(s) requests since last restart"
|
||||||
|
addc("cpp_http_reqs", str(self.hsrv.nreq), t)
|
||||||
|
|
||||||
|
t = "number of 403/422/malicious reqs since restart"
|
||||||
|
addc("cpp_sus_reqs", str(self.hsrv.nsus), t)
|
||||||
|
|
||||||
v = str(len(conn.bans or []))
|
v = str(len(conn.bans or []))
|
||||||
addc("cpp_bans", "", v, "number of banned IPs")
|
addg("cpp_active_bans", v, "number of currently banned IPs")
|
||||||
|
|
||||||
|
t = "number of IPs banned since last restart"
|
||||||
|
addg("cpp_total_bans", str(self.hsrv.nban), t)
|
||||||
|
|
||||||
|
if not args.nos_vst:
|
||||||
|
x = self.hsrv.broker.ask("up2k.get_state")
|
||||||
|
vs = json.loads(x.get())
|
||||||
|
|
||||||
|
nvidle = 0
|
||||||
|
nvbusy = 0
|
||||||
|
nvoffline = 0
|
||||||
|
for v in vs["volstate"].values():
|
||||||
|
if v == "online, idle":
|
||||||
|
nvidle += 1
|
||||||
|
elif "OFFLINE" in v:
|
||||||
|
nvoffline += 1
|
||||||
|
else:
|
||||||
|
nvbusy += 1
|
||||||
|
|
||||||
|
addg("cpp_idle_vols", str(nvidle), "number of idle/ready volumes")
|
||||||
|
addg("cpp_busy_vols", str(nvbusy), "number of busy/indexing volumes")
|
||||||
|
addg("cpp_offline_vols", str(nvoffline), "number of offline volumes")
|
||||||
|
|
||||||
|
t = "time since last database activity (upload/rename/delete)"
|
||||||
|
addug("cpp_db_idle", "seconds", str(vs["dbwt"]), t)
|
||||||
|
|
||||||
|
t = "unixtime of last database activity (upload/rename/delete)"
|
||||||
|
addug("cpp_db_act", "seconds", str(vs["dbwu"]), t)
|
||||||
|
|
||||||
|
t = "number of files queued for hashing/indexing"
|
||||||
|
addg("cpp_hashing_files", str(vs["hashq"]), t)
|
||||||
|
|
||||||
|
t = "number of files queued for metadata scanning"
|
||||||
|
addg("cpp_tagq_files", str(vs["tagq"]), t)
|
||||||
|
|
||||||
|
try:
|
||||||
|
t = "number of files queued for plugin-based analysis"
|
||||||
|
addg("cpp_mtpq_files", str(int(vs["mtpq"])), t)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
if not args.nos_hdd:
|
if not args.nos_hdd:
|
||||||
addbh("cpp_disk_size_bytes", "total HDD size of volume")
|
addbh("cpp_disk_size_bytes", "total HDD size of volume")
|
||||||
addbh("cpp_disk_free_bytes", "free HDD space in volume")
|
addbh("cpp_disk_free_bytes", "free HDD space in volume")
|
||||||
for vpath, vol in allvols:
|
for vpath, vol in allvols:
|
||||||
free, total = get_df(vol.realpath)
|
free, total = get_df(vol.realpath)
|
||||||
|
if free is None or total is None:
|
||||||
|
continue
|
||||||
|
|
||||||
addv('cpp_disk_size_bytes{vol="/%s"}' % (vpath), str(total))
|
addv('cpp_disk_size_bytes{vol="/%s"}' % (vpath), str(total))
|
||||||
addv('cpp_disk_free_bytes{vol="/%s"}' % (vpath), str(free))
|
addv('cpp_disk_free_bytes{vol="/%s"}' % (vpath), str(free))
|
||||||
|
|
||||||
@@ -112,7 +179,7 @@ class Metrics(object):
|
|||||||
tnbytes = 0
|
tnbytes = 0
|
||||||
tnfiles = 0
|
tnfiles = 0
|
||||||
for vpath, vol in allvols:
|
for vpath, vol in allvols:
|
||||||
cur = idx.get_cur(vol.realpath)
|
cur = idx.get_cur(vol)
|
||||||
if not cur:
|
if not cur:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -139,6 +206,9 @@ class Metrics(object):
|
|||||||
try:
|
try:
|
||||||
x = self.hsrv.broker.ask("up2k.get_unfinished")
|
x = self.hsrv.broker.ask("up2k.get_unfinished")
|
||||||
xs = x.get()
|
xs = x.get()
|
||||||
|
if not xs:
|
||||||
|
raise Exception("up2k mutex acquisition timed out")
|
||||||
|
|
||||||
xj = json.loads(xs)
|
xj = json.loads(xs)
|
||||||
for ptop, (nbytes, nfiles) in xj.items():
|
for ptop, (nbytes, nfiles) in xj.items():
|
||||||
tnbytes += nbytes
|
tnbytes += nbytes
|
||||||
@@ -161,5 +231,6 @@ class Metrics(object):
|
|||||||
ret.append("# EOF")
|
ret.append("# EOF")
|
||||||
|
|
||||||
mime = "application/openmetrics-text; version=1.0.0; charset=utf-8"
|
mime = "application/openmetrics-text; version=1.0.0; charset=utf-8"
|
||||||
|
mime = cli.uparam.get("mime") or mime
|
||||||
cli.reply("\n".join(ret).encode("utf-8"), mime=mime)
|
cli.reply("\n".join(ret).encode("utf-8"), mime=mime)
|
||||||
return True
|
return True
|
||||||
|
|||||||
@@ -7,12 +7,15 @@ import os
|
|||||||
import shutil
|
import shutil
|
||||||
import subprocess as sp
|
import subprocess as sp
|
||||||
import sys
|
import sys
|
||||||
|
import tempfile
|
||||||
|
|
||||||
from .__init__ import ANYWIN, EXE, PY2, WINDOWS, E, unicode
|
from .__init__ import ANYWIN, EXE, PY2, WINDOWS, E, unicode
|
||||||
|
from .authsrv import VFS
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .util import (
|
from .util import (
|
||||||
FFMPEG_URL,
|
FFMPEG_URL,
|
||||||
REKOBO_LKEY,
|
REKOBO_LKEY,
|
||||||
|
VF_CAREFUL,
|
||||||
fsenc,
|
fsenc,
|
||||||
min_ex,
|
min_ex,
|
||||||
pybin,
|
pybin,
|
||||||
@@ -20,12 +23,13 @@ from .util import (
|
|||||||
runcmd,
|
runcmd,
|
||||||
sfsenc,
|
sfsenc,
|
||||||
uncyg,
|
uncyg,
|
||||||
|
wunlink,
|
||||||
)
|
)
|
||||||
|
|
||||||
if True: # pylint: disable=using-constant-test
|
if True: # pylint: disable=using-constant-test
|
||||||
from typing import Any, Union
|
from typing import Any, Optional, Union
|
||||||
|
|
||||||
from .util import RootLogger
|
from .util import NamedLogger, RootLogger
|
||||||
|
|
||||||
|
|
||||||
def have_ff(scmd: str) -> bool:
|
def have_ff(scmd: str) -> bool:
|
||||||
@@ -107,6 +111,56 @@ class MParser(object):
|
|||||||
raise Exception()
|
raise Exception()
|
||||||
|
|
||||||
|
|
||||||
|
def au_unpk(
|
||||||
|
log: "NamedLogger", fmt_map: dict[str, str], abspath: str, vn: Optional[VFS] = None
|
||||||
|
) -> str:
|
||||||
|
ret = ""
|
||||||
|
try:
|
||||||
|
ext = abspath.split(".")[-1].lower()
|
||||||
|
au, pk = fmt_map[ext].split(".")
|
||||||
|
|
||||||
|
fd, ret = tempfile.mkstemp("." + au)
|
||||||
|
|
||||||
|
if pk == "gz":
|
||||||
|
import gzip
|
||||||
|
|
||||||
|
fi = gzip.GzipFile(abspath, mode="rb")
|
||||||
|
|
||||||
|
elif pk == "xz":
|
||||||
|
import lzma
|
||||||
|
|
||||||
|
fi = lzma.open(abspath, "rb")
|
||||||
|
|
||||||
|
elif pk == "zip":
|
||||||
|
import zipfile
|
||||||
|
|
||||||
|
zf = zipfile.ZipFile(abspath, "r")
|
||||||
|
zil = zf.infolist()
|
||||||
|
zil = [x for x in zil if x.filename.lower().split(".")[-1] == au]
|
||||||
|
fi = zf.open(zil[0])
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise Exception("unknown compression %s" % (pk,))
|
||||||
|
|
||||||
|
with os.fdopen(fd, "wb") as fo:
|
||||||
|
while True:
|
||||||
|
buf = fi.read(32768)
|
||||||
|
if not buf:
|
||||||
|
break
|
||||||
|
|
||||||
|
fo.write(buf)
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
except Exception as ex:
|
||||||
|
if ret:
|
||||||
|
t = "failed to decompress audio file [%s]: %r"
|
||||||
|
log(t % (abspath, ex))
|
||||||
|
wunlink(log, ret, vn.flags if vn else VF_CAREFUL)
|
||||||
|
|
||||||
|
return abspath
|
||||||
|
|
||||||
|
|
||||||
def ffprobe(
|
def ffprobe(
|
||||||
abspath: str, timeout: int = 60
|
abspath: str, timeout: int = 60
|
||||||
) -> tuple[dict[str, tuple[int, Any]], dict[str, list[Any]]]:
|
) -> tuple[dict[str, tuple[int, Any]], dict[str, list[Any]]]:
|
||||||
@@ -118,7 +172,7 @@ def ffprobe(
|
|||||||
b"--",
|
b"--",
|
||||||
fsenc(abspath),
|
fsenc(abspath),
|
||||||
]
|
]
|
||||||
rc, so, se = runcmd(cmd, timeout=timeout)
|
rc, so, se = runcmd(cmd, timeout=timeout, nice=True, oom=200)
|
||||||
retchk(rc, cmd, se)
|
retchk(rc, cmd, se)
|
||||||
return parse_ffprobe(so)
|
return parse_ffprobe(so)
|
||||||
|
|
||||||
@@ -240,7 +294,7 @@ def parse_ffprobe(txt: str) -> tuple[dict[str, tuple[int, Any]], dict[str, list[
|
|||||||
if "/" in fps:
|
if "/" in fps:
|
||||||
fa, fb = fps.split("/")
|
fa, fb = fps.split("/")
|
||||||
try:
|
try:
|
||||||
fps = int(fa) * 1.0 / int(fb)
|
fps = float(fa) / float(fb)
|
||||||
except:
|
except:
|
||||||
fps = 9001
|
fps = 9001
|
||||||
|
|
||||||
@@ -261,7 +315,8 @@ def parse_ffprobe(txt: str) -> tuple[dict[str, tuple[int, Any]], dict[str, list[
|
|||||||
if ".resw" in ret and ".resh" in ret:
|
if ".resw" in ret and ".resh" in ret:
|
||||||
ret["res"] = "{}x{}".format(ret[".resw"], ret[".resh"])
|
ret["res"] = "{}x{}".format(ret[".resw"], ret[".resh"])
|
||||||
|
|
||||||
zd = {k: (0, v) for k, v in ret.items()}
|
zero = int("0")
|
||||||
|
zd = {k: (zero, v) for k, v in ret.items()}
|
||||||
|
|
||||||
return zd, md
|
return zd, md
|
||||||
|
|
||||||
@@ -280,7 +335,7 @@ class MTag(object):
|
|||||||
or_ffprobe = " or FFprobe"
|
or_ffprobe = " or FFprobe"
|
||||||
|
|
||||||
if self.backend == "mutagen":
|
if self.backend == "mutagen":
|
||||||
self.get = self.get_mutagen
|
self._get = self.get_mutagen
|
||||||
try:
|
try:
|
||||||
from mutagen import version # noqa: F401
|
from mutagen import version # noqa: F401
|
||||||
except:
|
except:
|
||||||
@@ -289,7 +344,7 @@ class MTag(object):
|
|||||||
|
|
||||||
if self.backend == "ffprobe":
|
if self.backend == "ffprobe":
|
||||||
self.usable = self.can_ffprobe
|
self.usable = self.can_ffprobe
|
||||||
self.get = self.get_ffprobe
|
self._get = self.get_ffprobe
|
||||||
self.prefer_mt = True
|
self.prefer_mt = True
|
||||||
|
|
||||||
if not HAVE_FFPROBE:
|
if not HAVE_FFPROBE:
|
||||||
@@ -459,6 +514,17 @@ class MTag(object):
|
|||||||
|
|
||||||
return r1
|
return r1
|
||||||
|
|
||||||
|
def get(self, abspath: str) -> dict[str, Union[str, float]]:
|
||||||
|
ext = abspath.split(".")[-1].lower()
|
||||||
|
if ext not in self.args.au_unpk:
|
||||||
|
return self._get(abspath)
|
||||||
|
|
||||||
|
ap = au_unpk(self.log, self.args.au_unpk, abspath)
|
||||||
|
ret = self._get(ap)
|
||||||
|
if ap != abspath:
|
||||||
|
wunlink(self.log, ap, VF_CAREFUL)
|
||||||
|
return ret
|
||||||
|
|
||||||
def get_mutagen(self, abspath: str) -> dict[str, Union[str, float]]:
|
def get_mutagen(self, abspath: str) -> dict[str, Union[str, float]]:
|
||||||
ret: dict[str, tuple[int, Any]] = {}
|
ret: dict[str, tuple[int, Any]] = {}
|
||||||
|
|
||||||
@@ -550,18 +616,25 @@ class MTag(object):
|
|||||||
pypath = str(os.pathsep.join(zsl))
|
pypath = str(os.pathsep.join(zsl))
|
||||||
env["PYTHONPATH"] = pypath
|
env["PYTHONPATH"] = pypath
|
||||||
except:
|
except:
|
||||||
if not E.ox and not EXE:
|
raise # might be expected outside cpython
|
||||||
raise
|
|
||||||
|
ext = abspath.split(".")[-1].lower()
|
||||||
|
if ext in self.args.au_unpk:
|
||||||
|
ap = au_unpk(self.log, self.args.au_unpk, abspath)
|
||||||
|
else:
|
||||||
|
ap = abspath
|
||||||
|
|
||||||
ret: dict[str, Any] = {}
|
ret: dict[str, Any] = {}
|
||||||
for tagname, parser in sorted(parsers.items(), key=lambda x: (x[1].pri, x[0])):
|
for tagname, parser in sorted(parsers.items(), key=lambda x: (x[1].pri, x[0])):
|
||||||
try:
|
try:
|
||||||
cmd = [parser.bin, abspath]
|
cmd = [parser.bin, ap]
|
||||||
if parser.bin.endswith(".py"):
|
if parser.bin.endswith(".py"):
|
||||||
cmd = [pybin] + cmd
|
cmd = [pybin] + cmd
|
||||||
|
|
||||||
args = {
|
args = {
|
||||||
"env": env,
|
"env": env,
|
||||||
|
"nice": True,
|
||||||
|
"oom": 300,
|
||||||
"timeout": parser.timeout,
|
"timeout": parser.timeout,
|
||||||
"kill": parser.kill,
|
"kill": parser.kill,
|
||||||
"capture": parser.capture,
|
"capture": parser.capture,
|
||||||
@@ -572,11 +645,6 @@ class MTag(object):
|
|||||||
zd.update(ret)
|
zd.update(ret)
|
||||||
args["sin"] = json.dumps(zd).encode("utf-8", "replace")
|
args["sin"] = json.dumps(zd).encode("utf-8", "replace")
|
||||||
|
|
||||||
if WINDOWS:
|
|
||||||
args["creationflags"] = 0x4000
|
|
||||||
else:
|
|
||||||
cmd = ["nice"] + cmd
|
|
||||||
|
|
||||||
bcmd = [sfsenc(x) for x in cmd[:-1]] + [fsenc(cmd[-1])]
|
bcmd = [sfsenc(x) for x in cmd[:-1]] + [fsenc(cmd[-1])]
|
||||||
rc, v, err = runcmd(bcmd, **args) # type: ignore
|
rc, v, err = runcmd(bcmd, **args) # type: ignore
|
||||||
retchk(rc, bcmd, err, self.log, 5, self.args.mtag_v)
|
retchk(rc, bcmd, err, self.log, 5, self.args.mtag_v)
|
||||||
@@ -596,4 +664,7 @@ class MTag(object):
|
|||||||
t = "mtag error: tagname {}, parser {}, file {} => {}"
|
t = "mtag error: tagname {}, parser {}, file {} => {}"
|
||||||
self.log(t.format(tagname, parser.bin, abspath, min_ex()))
|
self.log(t.format(tagname, parser.bin, abspath, min_ex()))
|
||||||
|
|
||||||
|
if ap != abspath:
|
||||||
|
wunlink(self.log, ap, VF_CAREFUL)
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|||||||
@@ -110,7 +110,7 @@ class MCast(object):
|
|||||||
)
|
)
|
||||||
|
|
||||||
ips = [x for x in ips if x not in ("::1", "127.0.0.1")]
|
ips = [x for x in ips if x not in ("::1", "127.0.0.1")]
|
||||||
ips = find_prefix(ips, netdevs)
|
ips = find_prefix(ips, list(netdevs))
|
||||||
|
|
||||||
on = self.on[:]
|
on = self.on[:]
|
||||||
off = self.off[:]
|
off = self.off[:]
|
||||||
@@ -206,6 +206,7 @@ class MCast(object):
|
|||||||
except:
|
except:
|
||||||
t = "announce failed on {} [{}]:\n{}"
|
t = "announce failed on {} [{}]:\n{}"
|
||||||
self.log(t.format(netdev, ip, min_ex()), 3)
|
self.log(t.format(netdev, ip, min_ex()), 3)
|
||||||
|
sck.close()
|
||||||
|
|
||||||
if self.args.zm_msub:
|
if self.args.zm_msub:
|
||||||
for s1 in self.srv.values():
|
for s1 in self.srv.values():
|
||||||
|
|||||||
@@ -136,8 +136,12 @@ class PWHash(object):
|
|||||||
import getpass
|
import getpass
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
p1 = getpass.getpass("password> ")
|
try:
|
||||||
p2 = getpass.getpass("again or just hit ENTER> ")
|
p1 = getpass.getpass("password> ")
|
||||||
|
p2 = getpass.getpass("again or just hit ENTER> ")
|
||||||
|
except EOFError:
|
||||||
|
return
|
||||||
|
|
||||||
if p2 and p1 != p2:
|
if p2 and p1 != p2:
|
||||||
print("\033[31minputs don't match; try again\033[0m", file=sys.stderr)
|
print("\033[31minputs don't match; try again\033[0m", file=sys.stderr)
|
||||||
continue
|
continue
|
||||||
|
|||||||
@@ -127,7 +127,7 @@ class SMB(object):
|
|||||||
self.log("smb", msg, c)
|
self.log("smb", msg, c)
|
||||||
|
|
||||||
def start(self) -> None:
|
def start(self) -> None:
|
||||||
Daemon(self.srv.start)
|
Daemon(self.srv.start, "smbd")
|
||||||
|
|
||||||
def _auth_cb(self, *a, **ka):
|
def _auth_cb(self, *a, **ka):
|
||||||
debug("auth-result: %s %s", a, ka)
|
debug("auth-result: %s %s", a, ka)
|
||||||
@@ -162,6 +162,7 @@ class SMB(object):
|
|||||||
if "connData" in cl:
|
if "connData" in cl:
|
||||||
return cl["connData"]["partygoer"]
|
return cl["connData"]["partygoer"]
|
||||||
cf = cf.f_back
|
cf = cf.f_back
|
||||||
|
raise Exception()
|
||||||
except:
|
except:
|
||||||
warning(
|
warning(
|
||||||
"nyoron... %s <<-- %s <<-- %s <<-- %s",
|
"nyoron... %s <<-- %s <<-- %s <<-- %s",
|
||||||
@@ -239,7 +240,7 @@ class SMB(object):
|
|||||||
|
|
||||||
xbu = vfs.flags.get("xbu")
|
xbu = vfs.flags.get("xbu")
|
||||||
if xbu and not runhook(
|
if xbu and not runhook(
|
||||||
self.nlog, xbu, ap, vpath, "", "", 0, 0, "1.7.6.2", 0, ""
|
self.nlog, xbu, ap, vpath, "", "", "", 0, 0, "1.7.6.2", 0, ""
|
||||||
):
|
):
|
||||||
yeet("blocked by xbu server config: " + vpath)
|
yeet("blocked by xbu server config: " + vpath)
|
||||||
|
|
||||||
@@ -339,7 +340,7 @@ class SMB(object):
|
|||||||
yeet("blocked delete (no-del-acc): " + vpath)
|
yeet("blocked delete (no-del-acc): " + vpath)
|
||||||
|
|
||||||
vpath = vpath.replace("\\", "/").lstrip("/")
|
vpath = vpath.replace("\\", "/").lstrip("/")
|
||||||
self.hub.up2k.handle_rm(uname, "1.7.6.2", [vpath], [], False)
|
self.hub.up2k.handle_rm(uname, "1.7.6.2", [vpath], [], False, False)
|
||||||
|
|
||||||
def _utime(self, vpath: str, times: tuple[float, float]) -> None:
|
def _utime(self, vpath: str, times: tuple[float, float]) -> None:
|
||||||
if not self.args.smbw:
|
if not self.args.smbw:
|
||||||
@@ -405,6 +406,7 @@ class SMB(object):
|
|||||||
|
|
||||||
smbserver.os.path.abspath = self._hook
|
smbserver.os.path.abspath = self._hook
|
||||||
smbserver.os.path.expanduser = self._hook
|
smbserver.os.path.expanduser = self._hook
|
||||||
|
smbserver.os.path.expandvars = self._hook
|
||||||
smbserver.os.path.getatime = self._hook
|
smbserver.os.path.getatime = self._hook
|
||||||
smbserver.os.path.getctime = self._hook
|
smbserver.os.path.getctime = self._hook
|
||||||
smbserver.os.path.getmtime = self._hook
|
smbserver.os.path.getmtime = self._hook
|
||||||
|
|||||||
@@ -141,9 +141,29 @@ class SSDPd(MCast):
|
|||||||
self.log("stopped", 2)
|
self.log("stopped", 2)
|
||||||
|
|
||||||
def run2(self) -> None:
|
def run2(self) -> None:
|
||||||
|
try:
|
||||||
|
if self.args.no_poll:
|
||||||
|
raise Exception()
|
||||||
|
fd2sck = {}
|
||||||
|
srvpoll = select.poll()
|
||||||
|
for sck in self.srv:
|
||||||
|
fd = sck.fileno()
|
||||||
|
fd2sck[fd] = sck
|
||||||
|
srvpoll.register(fd, select.POLLIN)
|
||||||
|
except Exception as ex:
|
||||||
|
srvpoll = None
|
||||||
|
if not self.args.no_poll:
|
||||||
|
t = "WARNING: failed to poll(), will use select() instead: %r"
|
||||||
|
self.log(t % (ex,), 3)
|
||||||
|
|
||||||
while self.running:
|
while self.running:
|
||||||
rdy = select.select(self.srv, [], [], self.args.z_chk or 180)
|
if srvpoll:
|
||||||
rx: list[socket.socket] = rdy[0] # type: ignore
|
pr = srvpoll.poll((self.args.z_chk or 180) * 1000)
|
||||||
|
rx = [fd2sck[x[0]] for x in pr if x[1] & select.POLLIN]
|
||||||
|
else:
|
||||||
|
rdy = select.select(self.srv, [], [], self.args.z_chk or 180)
|
||||||
|
rx: list[socket.socket] = rdy[0] # type: ignore
|
||||||
|
|
||||||
self.rxc.cln()
|
self.rxc.cln()
|
||||||
buf = b""
|
buf = b""
|
||||||
addr = ("0", 0)
|
addr = ("0", 0)
|
||||||
@@ -168,7 +188,7 @@ class SSDPd(MCast):
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
self.srv = {}
|
self.srv.clear()
|
||||||
|
|
||||||
def eat(self, buf: bytes, addr: tuple[str, int]) -> None:
|
def eat(self, buf: bytes, addr: tuple[str, int]) -> None:
|
||||||
cip = addr[0]
|
cip = addr[0]
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ import tarfile
|
|||||||
|
|
||||||
from queue import Queue
|
from queue import Queue
|
||||||
|
|
||||||
|
from .authsrv import AuthSrv
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .sutil import StreamArc, errdesc
|
from .sutil import StreamArc, errdesc
|
||||||
from .util import Daemon, fsenc, min_ex
|
from .util import Daemon, fsenc, min_ex
|
||||||
@@ -44,11 +45,12 @@ class StreamTar(StreamArc):
|
|||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
log: "NamedLogger",
|
log: "NamedLogger",
|
||||||
|
asrv: AuthSrv,
|
||||||
fgen: Generator[dict[str, Any], None, None],
|
fgen: Generator[dict[str, Any], None, None],
|
||||||
cmp: str = "",
|
cmp: str = "",
|
||||||
**kwargs: Any
|
**kwargs: Any
|
||||||
):
|
):
|
||||||
super(StreamTar, self).__init__(log, fgen)
|
super(StreamTar, self).__init__(log, asrv, fgen)
|
||||||
|
|
||||||
self.ci = 0
|
self.ci = 0
|
||||||
self.co = 0
|
self.co = 0
|
||||||
@@ -65,21 +67,21 @@ class StreamTar(StreamArc):
|
|||||||
cmp = re.sub(r"[^a-z0-9]*pax[^a-z0-9]*", "", cmp)
|
cmp = re.sub(r"[^a-z0-9]*pax[^a-z0-9]*", "", cmp)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
cmp, lv = cmp.replace(":", ",").split(",")
|
cmp, zs = cmp.replace(":", ",").split(",")
|
||||||
lv = int(lv)
|
lv = int(zs)
|
||||||
except:
|
except:
|
||||||
lv = None
|
lv = -1
|
||||||
|
|
||||||
arg = {"name": None, "fileobj": self.qfile, "mode": "w", "format": fmt}
|
arg = {"name": None, "fileobj": self.qfile, "mode": "w", "format": fmt}
|
||||||
if cmp == "gz":
|
if cmp == "gz":
|
||||||
fun = tarfile.TarFile.gzopen
|
fun = tarfile.TarFile.gzopen
|
||||||
arg["compresslevel"] = lv if lv is not None else 3
|
arg["compresslevel"] = lv if lv >= 0 else 3
|
||||||
elif cmp == "bz2":
|
elif cmp == "bz2":
|
||||||
fun = tarfile.TarFile.bz2open
|
fun = tarfile.TarFile.bz2open
|
||||||
arg["compresslevel"] = lv if lv is not None else 2
|
arg["compresslevel"] = lv if lv >= 0 else 2
|
||||||
elif cmp == "xz":
|
elif cmp == "xz":
|
||||||
fun = tarfile.TarFile.xzopen
|
fun = tarfile.TarFile.xzopen
|
||||||
arg["preset"] = lv if lv is not None else 1
|
arg["preset"] = lv if lv >= 0 else 1
|
||||||
else:
|
else:
|
||||||
fun = tarfile.open
|
fun = tarfile.open
|
||||||
arg["mode"] = "w|"
|
arg["mode"] = "w|"
|
||||||
@@ -126,7 +128,7 @@ class StreamTar(StreamArc):
|
|||||||
inf.gid = 0
|
inf.gid = 0
|
||||||
|
|
||||||
self.ci += inf.size
|
self.ci += inf.size
|
||||||
with open(fsenc(src), "rb", 512 * 1024) as fo:
|
with open(fsenc(src), "rb", self.args.iobuf) as fo:
|
||||||
self.tar.addfile(inf, fo)
|
self.tar.addfile(inf, fo)
|
||||||
|
|
||||||
def _gen(self) -> None:
|
def _gen(self) -> None:
|
||||||
@@ -146,7 +148,7 @@ class StreamTar(StreamArc):
|
|||||||
errors.append((f["vp"], ex))
|
errors.append((f["vp"], ex))
|
||||||
|
|
||||||
if errors:
|
if errors:
|
||||||
self.errf, txt = errdesc(errors)
|
self.errf, txt = errdesc(self.asrv.vfs, errors)
|
||||||
self.log("\n".join(([repr(self.errf)] + txt[1:])))
|
self.log("\n".join(([repr(self.errf)] + txt[1:])))
|
||||||
self.ser(self.errf)
|
self.ser(self.errf)
|
||||||
|
|
||||||
|
|||||||
@@ -61,7 +61,7 @@ class Adapter(object):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
if True:
|
if True: # pylint: disable=using-constant-test
|
||||||
# Type of an IPv4 address (a string in "xxx.xxx.xxx.xxx" format)
|
# Type of an IPv4 address (a string in "xxx.xxx.xxx.xxx" format)
|
||||||
_IPv4Address = str
|
_IPv4Address = str
|
||||||
|
|
||||||
|
|||||||
@@ -6,9 +6,10 @@ import tempfile
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
from .__init__ import CORES
|
from .__init__ import CORES
|
||||||
|
from .authsrv import AuthSrv, VFS
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .th_cli import ThumbCli
|
from .th_cli import ThumbCli
|
||||||
from .util import vjoin
|
from .util import UTC, vjoin, vol_san
|
||||||
|
|
||||||
if True: # pylint: disable=using-constant-test
|
if True: # pylint: disable=using-constant-test
|
||||||
from typing import Any, Generator, Optional
|
from typing import Any, Generator, Optional
|
||||||
@@ -20,10 +21,13 @@ class StreamArc(object):
|
|||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
log: "NamedLogger",
|
log: "NamedLogger",
|
||||||
|
asrv: AuthSrv,
|
||||||
fgen: Generator[dict[str, Any], None, None],
|
fgen: Generator[dict[str, Any], None, None],
|
||||||
**kwargs: Any
|
**kwargs: Any
|
||||||
):
|
):
|
||||||
self.log = log
|
self.log = log
|
||||||
|
self.asrv = asrv
|
||||||
|
self.args = asrv.args
|
||||||
self.fgen = fgen
|
self.fgen = fgen
|
||||||
self.stopped = False
|
self.stopped = False
|
||||||
|
|
||||||
@@ -78,7 +82,9 @@ def enthumb(
|
|||||||
) -> dict[str, Any]:
|
) -> dict[str, Any]:
|
||||||
rem = f["vp"]
|
rem = f["vp"]
|
||||||
ext = rem.rsplit(".", 1)[-1].lower()
|
ext = rem.rsplit(".", 1)[-1].lower()
|
||||||
if fmt == "opus" and ext in "aac|m4a|mp3|ogg|opus|wma".split("|"):
|
if (fmt == "mp3" and ext == "mp3") or (
|
||||||
|
fmt == "opus" and ext in "aac|m4a|mp3|ogg|opus|wma".split("|")
|
||||||
|
):
|
||||||
raise Exception()
|
raise Exception()
|
||||||
|
|
||||||
vp = vjoin(vtop, rem.split("/", 1)[1])
|
vp = vjoin(vtop, rem.split("/", 1)[1])
|
||||||
@@ -98,17 +104,22 @@ def enthumb(
|
|||||||
return f
|
return f
|
||||||
|
|
||||||
|
|
||||||
def errdesc(errors: list[tuple[str, str]]) -> tuple[dict[str, Any], list[str]]:
|
def errdesc(
|
||||||
|
vfs: VFS, errors: list[tuple[str, str]]
|
||||||
|
) -> tuple[dict[str, Any], list[str]]:
|
||||||
report = ["copyparty failed to add the following files to the archive:", ""]
|
report = ["copyparty failed to add the following files to the archive:", ""]
|
||||||
|
|
||||||
for fn, err in errors:
|
for fn, err in errors:
|
||||||
report.extend([" file: {}".format(fn), "error: {}".format(err), ""])
|
report.extend([" file: {}".format(fn), "error: {}".format(err), ""])
|
||||||
|
|
||||||
|
btxt = "\r\n".join(report).encode("utf-8", "replace")
|
||||||
|
btxt = vol_san(list(vfs.all_vols.values()), btxt)
|
||||||
|
|
||||||
with tempfile.NamedTemporaryFile(prefix="copyparty-", delete=False) as tf:
|
with tempfile.NamedTemporaryFile(prefix="copyparty-", delete=False) as tf:
|
||||||
tf_path = tf.name
|
tf_path = tf.name
|
||||||
tf.write("\r\n".join(report).encode("utf-8", "replace"))
|
tf.write(btxt)
|
||||||
|
|
||||||
dt = datetime.utcnow().strftime("%Y-%m%d-%H%M%S")
|
dt = datetime.now(UTC).strftime("%Y-%m%d-%H%M%S")
|
||||||
|
|
||||||
bos.chmod(tf_path, 0o444)
|
bos.chmod(tf_path, 0o444)
|
||||||
return {
|
return {
|
||||||
|
|||||||
@@ -28,7 +28,7 @@ if True: # pylint: disable=using-constant-test
|
|||||||
import typing
|
import typing
|
||||||
from typing import Any, Optional, Union
|
from typing import Any, Optional, Union
|
||||||
|
|
||||||
from .__init__ import ANYWIN, EXE, MACOS, TYPE_CHECKING, EnvParams, unicode
|
from .__init__ import ANYWIN, EXE, MACOS, TYPE_CHECKING, E, EnvParams, unicode
|
||||||
from .authsrv import BAD_CFG, AuthSrv
|
from .authsrv import BAD_CFG, AuthSrv
|
||||||
from .cert import ensure_cert
|
from .cert import ensure_cert
|
||||||
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE
|
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE
|
||||||
@@ -36,16 +36,23 @@ from .tcpsrv import TcpSrv
|
|||||||
from .th_srv import HAVE_PIL, HAVE_VIPS, HAVE_WEBP, ThumbSrv
|
from .th_srv import HAVE_PIL, HAVE_VIPS, HAVE_WEBP, ThumbSrv
|
||||||
from .up2k import Up2k
|
from .up2k import Up2k
|
||||||
from .util import (
|
from .util import (
|
||||||
|
DEF_EXP,
|
||||||
|
DEF_MTE,
|
||||||
|
DEF_MTH,
|
||||||
FFMPEG_URL,
|
FFMPEG_URL,
|
||||||
|
UTC,
|
||||||
VERSIONS,
|
VERSIONS,
|
||||||
Daemon,
|
Daemon,
|
||||||
Garda,
|
Garda,
|
||||||
HLog,
|
HLog,
|
||||||
HMaccas,
|
HMaccas,
|
||||||
|
ODict,
|
||||||
alltrace,
|
alltrace,
|
||||||
ansi_re,
|
ansi_re,
|
||||||
|
build_netmap,
|
||||||
min_ex,
|
min_ex,
|
||||||
mp,
|
mp,
|
||||||
|
odfusion,
|
||||||
pybin,
|
pybin,
|
||||||
start_log_thrs,
|
start_log_thrs,
|
||||||
start_stackmon,
|
start_stackmon,
|
||||||
@@ -88,7 +95,7 @@ class SvcHub(object):
|
|||||||
self.stopping = False
|
self.stopping = False
|
||||||
self.stopped = False
|
self.stopped = False
|
||||||
self.reload_req = False
|
self.reload_req = False
|
||||||
self.reloading = False
|
self.reloading = 0
|
||||||
self.stop_cond = threading.Condition()
|
self.stop_cond = threading.Condition()
|
||||||
self.nsigs = 3
|
self.nsigs = 3
|
||||||
self.retcode = 0
|
self.retcode = 0
|
||||||
@@ -115,8 +122,6 @@ class SvcHub(object):
|
|||||||
args.no_mv = True
|
args.no_mv = True
|
||||||
args.hardlink = True
|
args.hardlink = True
|
||||||
args.vague_403 = True
|
args.vague_403 = True
|
||||||
args.ban_404 = "50,60,1440"
|
|
||||||
args.turbo = -1
|
|
||||||
args.nih = True
|
args.nih = True
|
||||||
|
|
||||||
if args.s:
|
if args.s:
|
||||||
@@ -129,12 +134,13 @@ class SvcHub(object):
|
|||||||
if not self._process_config():
|
if not self._process_config():
|
||||||
raise Exception(BAD_CFG)
|
raise Exception(BAD_CFG)
|
||||||
|
|
||||||
# for non-http clients (ftp)
|
# for non-http clients (ftp, tftp)
|
||||||
self.bans: dict[str, int] = {}
|
self.bans: dict[str, int] = {}
|
||||||
self.gpwd = Garda(self.args.ban_pw)
|
self.gpwd = Garda(self.args.ban_pw)
|
||||||
self.g404 = Garda(self.args.ban_404)
|
self.g404 = Garda(self.args.ban_404)
|
||||||
self.g403 = Garda(self.args.ban_403)
|
self.g403 = Garda(self.args.ban_403)
|
||||||
self.g422 = Garda(self.args.ban_422)
|
self.g422 = Garda(self.args.ban_422, False)
|
||||||
|
self.gmal = Garda(self.args.ban_422)
|
||||||
self.gurl = Garda(self.args.ban_url)
|
self.gurl = Garda(self.args.ban_url)
|
||||||
|
|
||||||
self.log_div = 10 ** (6 - args.log_tdec)
|
self.log_div = 10 ** (6 - args.log_tdec)
|
||||||
@@ -149,6 +155,8 @@ class SvcHub(object):
|
|||||||
lg.handlers = [lh]
|
lg.handlers = [lh]
|
||||||
lg.setLevel(logging.DEBUG)
|
lg.setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
self._check_env()
|
||||||
|
|
||||||
if args.stackmon:
|
if args.stackmon:
|
||||||
start_stackmon(args.stackmon, 0)
|
start_stackmon(args.stackmon, 0)
|
||||||
|
|
||||||
@@ -165,6 +173,26 @@ class SvcHub(object):
|
|||||||
self.log("root", t.format(args.j), c=3)
|
self.log("root", t.format(args.j), c=3)
|
||||||
args.no_fpool = True
|
args.no_fpool = True
|
||||||
|
|
||||||
|
for name, arg in (
|
||||||
|
("iobuf", "iobuf"),
|
||||||
|
("s-rd-sz", "s_rd_sz"),
|
||||||
|
("s-wr-sz", "s_wr_sz"),
|
||||||
|
):
|
||||||
|
zi = getattr(args, arg)
|
||||||
|
if zi < 32768:
|
||||||
|
t = "WARNING: expect very poor performance because you specified a very low value (%d) for --%s"
|
||||||
|
self.log("root", t % (zi, name), 3)
|
||||||
|
zi = 2
|
||||||
|
zi2 = 2 ** (zi - 1).bit_length()
|
||||||
|
if zi != zi2:
|
||||||
|
zi3 = 2 ** ((zi - 1).bit_length() - 1)
|
||||||
|
t = "WARNING: expect poor performance because --%s is not a power-of-two; consider using %d or %d instead of %d"
|
||||||
|
self.log("root", t % (name, zi2, zi3, zi), 3)
|
||||||
|
|
||||||
|
if args.s_rd_sz > args.iobuf:
|
||||||
|
t = "WARNING: --s-rd-sz (%d) is larger than --iobuf (%d); this may lead to reduced performance"
|
||||||
|
self.log("root", t % (args.s_rd_sz, args.iobuf), 3)
|
||||||
|
|
||||||
bri = "zy"[args.theme % 2 :][:1]
|
bri = "zy"[args.theme % 2 :][:1]
|
||||||
ch = "abcdefghijklmnopqrstuvwx"[int(args.theme / 2)]
|
ch = "abcdefghijklmnopqrstuvwx"[int(args.theme / 2)]
|
||||||
args.theme = "{0}{1} {0} {1}".format(ch, bri)
|
args.theme = "{0}{1} {0} {1}".format(ch, bri)
|
||||||
@@ -212,6 +240,10 @@ class SvcHub(object):
|
|||||||
if not HAVE_FFMPEG or not HAVE_FFPROBE:
|
if not HAVE_FFMPEG or not HAVE_FFPROBE:
|
||||||
decs.pop("ff", None)
|
decs.pop("ff", None)
|
||||||
|
|
||||||
|
# compressed formats; "s3z=s3m.zip, s3gz=s3m.gz, ..."
|
||||||
|
zlss = [x.strip().lower().split("=", 1) for x in args.au_unpk.split(",")]
|
||||||
|
args.au_unpk = {x[0]: x[1] for x in zlss}
|
||||||
|
|
||||||
self.args.th_dec = list(decs.keys())
|
self.args.th_dec = list(decs.keys())
|
||||||
self.thumbsrv = None
|
self.thumbsrv = None
|
||||||
want_ff = False
|
want_ff = False
|
||||||
@@ -248,6 +280,13 @@ class SvcHub(object):
|
|||||||
if want_ff and ANYWIN:
|
if want_ff and ANYWIN:
|
||||||
self.log("thumb", "download FFmpeg to fix it:\033[0m " + FFMPEG_URL, 3)
|
self.log("thumb", "download FFmpeg to fix it:\033[0m " + FFMPEG_URL, 3)
|
||||||
|
|
||||||
|
if not args.no_acode:
|
||||||
|
if not re.match("^(0|[qv][0-9]|[0-9]{2,3}k)$", args.q_mp3.lower()):
|
||||||
|
t = "invalid mp3 transcoding quality [%s] specified; only supports [0] to disable, a CBR value such as [192k], or a CQ/CRF value such as [v2]"
|
||||||
|
raise Exception(t % (args.q_mp3,))
|
||||||
|
else:
|
||||||
|
args.au_unpk = {}
|
||||||
|
|
||||||
args.th_poke = min(args.th_poke, args.th_maxage, args.ac_maxage)
|
args.th_poke = min(args.th_poke, args.th_maxage, args.ac_maxage)
|
||||||
|
|
||||||
zms = ""
|
zms = ""
|
||||||
@@ -260,9 +299,16 @@ class SvcHub(object):
|
|||||||
from .ftpd import Ftpd
|
from .ftpd import Ftpd
|
||||||
|
|
||||||
self.ftpd: Optional[Ftpd] = None
|
self.ftpd: Optional[Ftpd] = None
|
||||||
Daemon(self.start_ftpd, "start_ftpd")
|
|
||||||
zms += "f" if args.ftp else "F"
|
zms += "f" if args.ftp else "F"
|
||||||
|
|
||||||
|
if args.tftp:
|
||||||
|
from .tftpd import Tftpd
|
||||||
|
|
||||||
|
self.tftpd: Optional[Tftpd] = None
|
||||||
|
|
||||||
|
if args.ftp or args.ftps or args.tftp:
|
||||||
|
Daemon(self.start_ftpd, "start_tftpd")
|
||||||
|
|
||||||
if args.smb:
|
if args.smb:
|
||||||
# impacket.dcerpc is noisy about listen timeouts
|
# impacket.dcerpc is noisy about listen timeouts
|
||||||
sto = socket.getdefaulttimeout()
|
sto = socket.getdefaulttimeout()
|
||||||
@@ -292,10 +338,12 @@ class SvcHub(object):
|
|||||||
|
|
||||||
def start_ftpd(self) -> None:
|
def start_ftpd(self) -> None:
|
||||||
time.sleep(30)
|
time.sleep(30)
|
||||||
if self.ftpd:
|
|
||||||
return
|
|
||||||
|
|
||||||
self.restart_ftpd()
|
if hasattr(self, "ftpd") and not self.ftpd:
|
||||||
|
self.restart_ftpd()
|
||||||
|
|
||||||
|
if hasattr(self, "tftpd") and not self.tftpd:
|
||||||
|
self.restart_tftpd()
|
||||||
|
|
||||||
def restart_ftpd(self) -> None:
|
def restart_ftpd(self) -> None:
|
||||||
if not hasattr(self, "ftpd"):
|
if not hasattr(self, "ftpd"):
|
||||||
@@ -312,6 +360,17 @@ class SvcHub(object):
|
|||||||
self.ftpd = Ftpd(self)
|
self.ftpd = Ftpd(self)
|
||||||
self.log("root", "started FTPd")
|
self.log("root", "started FTPd")
|
||||||
|
|
||||||
|
def restart_tftpd(self) -> None:
|
||||||
|
if not hasattr(self, "tftpd"):
|
||||||
|
return
|
||||||
|
|
||||||
|
from .tftpd import Tftpd
|
||||||
|
|
||||||
|
if self.tftpd:
|
||||||
|
return # todo
|
||||||
|
|
||||||
|
self.tftpd = Tftpd(self)
|
||||||
|
|
||||||
def thr_httpsrv_up(self) -> None:
|
def thr_httpsrv_up(self) -> None:
|
||||||
time.sleep(1 if self.args.ign_ebind_all else 5)
|
time.sleep(1 if self.args.ign_ebind_all else 5)
|
||||||
expected = self.broker.num_workers * self.tcpsrv.nsrv
|
expected = self.broker.num_workers * self.tcpsrv.nsrv
|
||||||
@@ -336,7 +395,7 @@ class SvcHub(object):
|
|||||||
self.sigterm()
|
self.sigterm()
|
||||||
|
|
||||||
def sigterm(self) -> None:
|
def sigterm(self) -> None:
|
||||||
os.kill(os.getpid(), signal.SIGTERM)
|
self.signal_handler(signal.SIGTERM, None)
|
||||||
|
|
||||||
def cb_httpsrv_up(self) -> None:
|
def cb_httpsrv_up(self) -> None:
|
||||||
self.httpsrv_up += 1
|
self.httpsrv_up += 1
|
||||||
@@ -361,6 +420,23 @@ class SvcHub(object):
|
|||||||
|
|
||||||
Daemon(self.sd_notify, "sd-notify")
|
Daemon(self.sd_notify, "sd-notify")
|
||||||
|
|
||||||
|
def _check_env(self) -> None:
|
||||||
|
try:
|
||||||
|
files = os.listdir(E.cfg)
|
||||||
|
except:
|
||||||
|
files = []
|
||||||
|
|
||||||
|
hits = [x for x in files if x.lower().endswith(".conf")]
|
||||||
|
if hits:
|
||||||
|
t = "WARNING: found config files in [%s]: %s\n config files are not expected here, and will NOT be loaded (unless your setup is intentionally hella funky)"
|
||||||
|
self.log("root", t % (E.cfg, ", ".join(hits)), 3)
|
||||||
|
|
||||||
|
if self.args.no_bauth:
|
||||||
|
t = "WARNING: --no-bauth disables support for the Android app; you may want to use --bauth-last instead"
|
||||||
|
self.log("root", t, 3)
|
||||||
|
if self.args.bauth_last:
|
||||||
|
self.log("root", "WARNING: ignoring --bauth-last due to --no-bauth", 3)
|
||||||
|
|
||||||
def _process_config(self) -> bool:
|
def _process_config(self) -> bool:
|
||||||
al = self.args
|
al = self.args
|
||||||
|
|
||||||
@@ -400,20 +476,27 @@ class SvcHub(object):
|
|||||||
if al.rsp_jtr:
|
if al.rsp_jtr:
|
||||||
al.rsp_slp = 0.000001
|
al.rsp_slp = 0.000001
|
||||||
|
|
||||||
al.th_covers = set(al.th_covers.split(","))
|
zsl = al.th_covers.split(",")
|
||||||
|
zsl = [x.strip() for x in zsl]
|
||||||
|
zsl = [x for x in zsl if x]
|
||||||
|
al.th_covers = zsl
|
||||||
|
al.th_coversd = zsl + ["." + x for x in zsl]
|
||||||
|
al.th_covers_set = set(al.th_covers)
|
||||||
|
al.th_coversd_set = set(al.th_coversd)
|
||||||
|
|
||||||
for k in "c".split(" "):
|
for k in "c".split(" "):
|
||||||
vl = getattr(al, k)
|
vl = getattr(al, k)
|
||||||
if not vl:
|
if not vl:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
vl = [os.path.expanduser(x) if x.startswith("~") else x for x in vl]
|
vl = [os.path.expandvars(os.path.expanduser(x)) for x in vl]
|
||||||
setattr(al, k, vl)
|
setattr(al, k, vl)
|
||||||
|
|
||||||
for k in "lo hist ssl_log".split(" "):
|
for k in "lo hist ssl_log".split(" "):
|
||||||
vs = getattr(al, k)
|
vs = getattr(al, k)
|
||||||
if vs and vs.startswith("~"):
|
if vs:
|
||||||
setattr(al, k, os.path.expanduser(vs))
|
vs = os.path.expandvars(os.path.expanduser(vs))
|
||||||
|
setattr(al, k, vs)
|
||||||
|
|
||||||
for k in "sus_urls nonsus_urls".split(" "):
|
for k in "sus_urls nonsus_urls".split(" "):
|
||||||
vs = getattr(al, k)
|
vs = getattr(al, k)
|
||||||
@@ -422,25 +505,86 @@ class SvcHub(object):
|
|||||||
else:
|
else:
|
||||||
setattr(al, k, re.compile(vs))
|
setattr(al, k, re.compile(vs))
|
||||||
|
|
||||||
|
for k in "tftp_lsf".split(" "):
|
||||||
|
vs = getattr(al, k)
|
||||||
|
if not vs or vs == "no":
|
||||||
|
setattr(al, k, None)
|
||||||
|
else:
|
||||||
|
setattr(al, k, re.compile("^" + vs + "$"))
|
||||||
|
|
||||||
if not al.sus_urls:
|
if not al.sus_urls:
|
||||||
al.ban_url = "no"
|
al.ban_url = "no"
|
||||||
elif al.ban_url == "no":
|
elif al.ban_url == "no":
|
||||||
al.sus_urls = None
|
al.sus_urls = None
|
||||||
|
|
||||||
if al.xff_src in ("any", "0", ""):
|
al.xff_hdr = al.xff_hdr.lower()
|
||||||
al.xff_re = None
|
al.idp_h_usr = al.idp_h_usr.lower()
|
||||||
else:
|
al.idp_h_grp = al.idp_h_grp.lower()
|
||||||
zs = al.xff_src.replace(" ", "").replace(".", "\\.").replace(",", "|")
|
al.idp_h_key = al.idp_h_key.lower()
|
||||||
al.xff_re = re.compile("^(?:" + zs + ")")
|
|
||||||
|
al.ftp_ipa_nm = build_netmap(al.ftp_ipa or al.ipa)
|
||||||
|
al.tftp_ipa_nm = build_netmap(al.tftp_ipa or al.ipa)
|
||||||
|
|
||||||
|
mte = ODict.fromkeys(DEF_MTE.split(","), True)
|
||||||
|
al.mte = odfusion(mte, al.mte)
|
||||||
|
|
||||||
|
mth = ODict.fromkeys(DEF_MTH.split(","), True)
|
||||||
|
al.mth = odfusion(mth, al.mth)
|
||||||
|
|
||||||
|
exp = ODict.fromkeys(DEF_EXP.split(" "), True)
|
||||||
|
al.exp_md = odfusion(exp, al.exp_md.replace(" ", ","))
|
||||||
|
al.exp_lg = odfusion(exp, al.exp_lg.replace(" ", ","))
|
||||||
|
|
||||||
|
for k in ["no_hash", "no_idx", "og_ua"]:
|
||||||
|
ptn = getattr(self.args, k)
|
||||||
|
if ptn:
|
||||||
|
setattr(self.args, k, re.compile(ptn))
|
||||||
|
|
||||||
|
for k in ["idp_gsep"]:
|
||||||
|
ptn = getattr(self.args, k)
|
||||||
|
if "]" in ptn:
|
||||||
|
ptn = "]" + ptn.replace("]", "")
|
||||||
|
if "[" in ptn:
|
||||||
|
ptn = ptn.replace("[", "") + "["
|
||||||
|
if "-" in ptn:
|
||||||
|
ptn = ptn.replace("-", "") + "-"
|
||||||
|
|
||||||
|
ptn = ptn.replace("\\", "\\\\").replace("^", "\\^")
|
||||||
|
setattr(self.args, k, re.compile("[%s]" % (ptn,)))
|
||||||
|
|
||||||
|
try:
|
||||||
|
zf1, zf2 = self.args.rm_retry.split("/")
|
||||||
|
self.args.rm_re_t = float(zf1)
|
||||||
|
self.args.rm_re_r = float(zf2)
|
||||||
|
except:
|
||||||
|
raise Exception("invalid --rm-retry [%s]" % (self.args.rm_retry,))
|
||||||
|
|
||||||
|
try:
|
||||||
|
zf1, zf2 = self.args.mv_retry.split("/")
|
||||||
|
self.args.mv_re_t = float(zf1)
|
||||||
|
self.args.mv_re_r = float(zf2)
|
||||||
|
except:
|
||||||
|
raise Exception("invalid --mv-retry [%s]" % (self.args.mv_retry,))
|
||||||
|
|
||||||
|
al.tcolor = al.tcolor.lstrip("#")
|
||||||
|
if len(al.tcolor) == 3: # fc5 => ffcc55
|
||||||
|
al.tcolor = "".join([x * 2 for x in al.tcolor])
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
def _ipa2re(self, txt) -> Optional[re.Pattern]:
|
||||||
|
if txt in ("any", "0", ""):
|
||||||
|
return None
|
||||||
|
|
||||||
|
zs = txt.replace(" ", "").replace(".", "\\.").replace(",", "|")
|
||||||
|
return re.compile("^(?:" + zs + ")")
|
||||||
|
|
||||||
def _setlimits(self) -> None:
|
def _setlimits(self) -> None:
|
||||||
try:
|
try:
|
||||||
import resource
|
import resource
|
||||||
|
|
||||||
soft, hard = [
|
soft, hard = [
|
||||||
x if x > 0 else 1024 * 1024
|
int(x) if x > 0 else 1024 * 1024
|
||||||
for x in list(resource.getrlimit(resource.RLIMIT_NOFILE))
|
for x in list(resource.getrlimit(resource.RLIMIT_NOFILE))
|
||||||
]
|
]
|
||||||
except:
|
except:
|
||||||
@@ -476,7 +620,7 @@ class SvcHub(object):
|
|||||||
self.args.nc = min(self.args.nc, soft // 2)
|
self.args.nc = min(self.args.nc, soft // 2)
|
||||||
|
|
||||||
def _logname(self) -> str:
|
def _logname(self) -> str:
|
||||||
dt = datetime.utcnow()
|
dt = datetime.now(UTC)
|
||||||
fn = str(self.args.lo)
|
fn = str(self.args.lo)
|
||||||
for fs in "YmdHMS":
|
for fs in "YmdHMS":
|
||||||
fs = "%" + fs
|
fs = "%" + fs
|
||||||
@@ -497,12 +641,17 @@ class SvcHub(object):
|
|||||||
sel_fn = "{}.{}".format(fn, ctr)
|
sel_fn = "{}.{}".format(fn, ctr)
|
||||||
|
|
||||||
fn = sel_fn
|
fn = sel_fn
|
||||||
|
try:
|
||||||
|
os.makedirs(os.path.dirname(fn))
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if do_xz:
|
if do_xz:
|
||||||
import lzma
|
import lzma
|
||||||
|
|
||||||
lh = lzma.open(fn, "wt", encoding="utf-8", errors="replace", preset=0)
|
lh = lzma.open(fn, "wt", encoding="utf-8", errors="replace", preset=0)
|
||||||
|
self.args.no_logflush = True
|
||||||
else:
|
else:
|
||||||
lh = open(fn, "wt", encoding="utf-8", errors="replace")
|
lh = open(fn, "wt", encoding="utf-8", errors="replace")
|
||||||
except:
|
except:
|
||||||
@@ -589,21 +738,37 @@ class SvcHub(object):
|
|||||||
self.log("root", "ssdp startup failed;\n" + min_ex(), 3)
|
self.log("root", "ssdp startup failed;\n" + min_ex(), 3)
|
||||||
|
|
||||||
def reload(self) -> str:
|
def reload(self) -> str:
|
||||||
if self.reloading:
|
with self.up2k.mutex:
|
||||||
return "cannot reload; already in progress"
|
if self.reloading:
|
||||||
|
return "cannot reload; already in progress"
|
||||||
|
self.reloading = 1
|
||||||
|
|
||||||
self.reloading = True
|
|
||||||
Daemon(self._reload, "reloading")
|
Daemon(self._reload, "reloading")
|
||||||
return "reload initiated"
|
return "reload initiated"
|
||||||
|
|
||||||
def _reload(self) -> None:
|
def _reload(self, rescan_all_vols: bool = True) -> None:
|
||||||
self.log("root", "reload scheduled")
|
|
||||||
with self.up2k.mutex:
|
with self.up2k.mutex:
|
||||||
|
if self.reloading != 1:
|
||||||
|
return
|
||||||
|
self.reloading = 2
|
||||||
|
self.log("root", "reloading config")
|
||||||
self.asrv.reload()
|
self.asrv.reload()
|
||||||
self.up2k.reload()
|
self.up2k.reload(rescan_all_vols)
|
||||||
self.broker.reload()
|
self.broker.reload()
|
||||||
|
self.reloading = 0
|
||||||
|
|
||||||
self.reloading = False
|
def _reload_blocking(self, rescan_all_vols: bool = True) -> None:
|
||||||
|
while True:
|
||||||
|
with self.up2k.mutex:
|
||||||
|
if self.reloading < 2:
|
||||||
|
self.reloading = 1
|
||||||
|
break
|
||||||
|
time.sleep(0.05)
|
||||||
|
|
||||||
|
# try to handle multiple pending IdP reloads at once:
|
||||||
|
time.sleep(0.2)
|
||||||
|
|
||||||
|
self._reload(rescan_all_vols=rescan_all_vols)
|
||||||
|
|
||||||
def stop_thr(self) -> None:
|
def stop_thr(self) -> None:
|
||||||
while not self.stop_req:
|
while not self.stop_req:
|
||||||
@@ -725,17 +890,34 @@ class SvcHub(object):
|
|||||||
return
|
return
|
||||||
|
|
||||||
with self.log_mutex:
|
with self.log_mutex:
|
||||||
zd = datetime.utcnow()
|
zd = datetime.now(UTC)
|
||||||
ts = self.log_dfmt % (
|
ts = self.log_dfmt % (
|
||||||
zd.year,
|
zd.year,
|
||||||
zd.month * 100 + zd.day,
|
zd.month * 100 + zd.day,
|
||||||
(zd.hour * 100 + zd.minute) * 100 + zd.second,
|
(zd.hour * 100 + zd.minute) * 100 + zd.second,
|
||||||
zd.microsecond // self.log_div,
|
zd.microsecond // self.log_div,
|
||||||
)
|
)
|
||||||
self.logf.write("@%s [%s\033[0m] %s\n" % (ts, src, msg))
|
|
||||||
|
if c and not self.args.no_ansi:
|
||||||
|
if isinstance(c, int):
|
||||||
|
msg = "\033[3%sm%s\033[0m" % (c, msg)
|
||||||
|
elif "\033" not in c:
|
||||||
|
msg = "\033[%sm%s\033[0m" % (c, msg)
|
||||||
|
else:
|
||||||
|
msg = "%s%s\033[0m" % (c, msg)
|
||||||
|
|
||||||
|
if "\033" in src:
|
||||||
|
src += "\033[0m"
|
||||||
|
|
||||||
|
if "\033" in msg:
|
||||||
|
msg += "\033[0m"
|
||||||
|
|
||||||
|
self.logf.write("@%s [%-21s] %s\n" % (ts, src, msg))
|
||||||
|
if not self.args.no_logflush:
|
||||||
|
self.logf.flush()
|
||||||
|
|
||||||
now = time.time()
|
now = time.time()
|
||||||
if now >= self.next_day:
|
if int(now) >= self.next_day:
|
||||||
self._set_next_day()
|
self._set_next_day()
|
||||||
|
|
||||||
def _set_next_day(self) -> None:
|
def _set_next_day(self) -> None:
|
||||||
@@ -743,7 +925,7 @@ class SvcHub(object):
|
|||||||
self.logf.close()
|
self.logf.close()
|
||||||
self._setup_logfile("")
|
self._setup_logfile("")
|
||||||
|
|
||||||
dt = datetime.utcnow()
|
dt = datetime.now(UTC)
|
||||||
|
|
||||||
# unix timestamp of next 00:00:00 (leap-seconds safe)
|
# unix timestamp of next 00:00:00 (leap-seconds safe)
|
||||||
day_now = dt.day
|
day_now = dt.day
|
||||||
@@ -751,14 +933,20 @@ class SvcHub(object):
|
|||||||
dt += timedelta(hours=12)
|
dt += timedelta(hours=12)
|
||||||
|
|
||||||
dt = dt.replace(hour=0, minute=0, second=0)
|
dt = dt.replace(hour=0, minute=0, second=0)
|
||||||
self.next_day = calendar.timegm(dt.utctimetuple())
|
try:
|
||||||
|
tt = dt.utctimetuple()
|
||||||
|
except:
|
||||||
|
# still makes me hella uncomfortable
|
||||||
|
tt = dt.timetuple()
|
||||||
|
|
||||||
|
self.next_day = calendar.timegm(tt)
|
||||||
|
|
||||||
def _log_enabled(self, src: str, msg: str, c: Union[int, str] = 0) -> None:
|
def _log_enabled(self, src: str, msg: str, c: Union[int, str] = 0) -> None:
|
||||||
"""handles logging from all components"""
|
"""handles logging from all components"""
|
||||||
with self.log_mutex:
|
with self.log_mutex:
|
||||||
now = time.time()
|
now = time.time()
|
||||||
if now >= self.next_day:
|
if int(now) >= self.next_day:
|
||||||
dt = datetime.utcfromtimestamp(now)
|
dt = datetime.fromtimestamp(now, UTC)
|
||||||
zs = "{}\n" if self.no_ansi else "\033[36m{}\033[0m\n"
|
zs = "{}\n" if self.no_ansi else "\033[36m{}\033[0m\n"
|
||||||
zs = zs.format(dt.strftime("%Y-%m-%d"))
|
zs = zs.format(dt.strftime("%Y-%m-%d"))
|
||||||
print(zs, end="")
|
print(zs, end="")
|
||||||
@@ -781,7 +969,7 @@ class SvcHub(object):
|
|||||||
else:
|
else:
|
||||||
msg = "%s%s\033[0m" % (c, msg)
|
msg = "%s%s\033[0m" % (c, msg)
|
||||||
|
|
||||||
zd = datetime.utcfromtimestamp(now)
|
zd = datetime.fromtimestamp(now, UTC)
|
||||||
ts = self.log_efmt % (
|
ts = self.log_efmt % (
|
||||||
zd.hour,
|
zd.hour,
|
||||||
zd.minute,
|
zd.minute,
|
||||||
@@ -802,6 +990,8 @@ class SvcHub(object):
|
|||||||
|
|
||||||
if self.logf:
|
if self.logf:
|
||||||
self.logf.write(msg)
|
self.logf.write(msg)
|
||||||
|
if not self.args.no_logflush:
|
||||||
|
self.logf.flush()
|
||||||
|
|
||||||
def pr(self, *a: Any, **ka: Any) -> None:
|
def pr(self, *a: Any, **ka: Any) -> None:
|
||||||
try:
|
try:
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ import stat
|
|||||||
import time
|
import time
|
||||||
import zlib
|
import zlib
|
||||||
|
|
||||||
|
from .authsrv import AuthSrv
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .sutil import StreamArc, errdesc
|
from .sutil import StreamArc, errdesc
|
||||||
from .util import min_ex, sanitize_fn, spack, sunpack, yieldfile
|
from .util import min_ex, sanitize_fn, spack, sunpack, yieldfile
|
||||||
@@ -218,12 +219,13 @@ class StreamZip(StreamArc):
|
|||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
log: "NamedLogger",
|
log: "NamedLogger",
|
||||||
|
asrv: AuthSrv,
|
||||||
fgen: Generator[dict[str, Any], None, None],
|
fgen: Generator[dict[str, Any], None, None],
|
||||||
utf8: bool = False,
|
utf8: bool = False,
|
||||||
pre_crc: bool = False,
|
pre_crc: bool = False,
|
||||||
**kwargs: Any
|
**kwargs: Any
|
||||||
) -> None:
|
) -> None:
|
||||||
super(StreamZip, self).__init__(log, fgen)
|
super(StreamZip, self).__init__(log, asrv, fgen)
|
||||||
|
|
||||||
self.utf8 = utf8
|
self.utf8 = utf8
|
||||||
self.pre_crc = pre_crc
|
self.pre_crc = pre_crc
|
||||||
@@ -248,7 +250,7 @@ class StreamZip(StreamArc):
|
|||||||
|
|
||||||
crc = 0
|
crc = 0
|
||||||
if self.pre_crc:
|
if self.pre_crc:
|
||||||
for buf in yieldfile(src):
|
for buf in yieldfile(src, self.args.iobuf):
|
||||||
crc = zlib.crc32(buf, crc)
|
crc = zlib.crc32(buf, crc)
|
||||||
|
|
||||||
crc &= 0xFFFFFFFF
|
crc &= 0xFFFFFFFF
|
||||||
@@ -257,7 +259,7 @@ class StreamZip(StreamArc):
|
|||||||
buf = gen_hdr(None, name, sz, ts, self.utf8, crc, self.pre_crc)
|
buf = gen_hdr(None, name, sz, ts, self.utf8, crc, self.pre_crc)
|
||||||
yield self._ct(buf)
|
yield self._ct(buf)
|
||||||
|
|
||||||
for buf in yieldfile(src):
|
for buf in yieldfile(src, self.args.iobuf):
|
||||||
if not self.pre_crc:
|
if not self.pre_crc:
|
||||||
crc = zlib.crc32(buf, crc)
|
crc = zlib.crc32(buf, crc)
|
||||||
|
|
||||||
@@ -300,7 +302,7 @@ class StreamZip(StreamArc):
|
|||||||
mbuf = b""
|
mbuf = b""
|
||||||
|
|
||||||
if errors:
|
if errors:
|
||||||
errf, txt = errdesc(errors)
|
errf, txt = errdesc(self.asrv.vfs, errors)
|
||||||
self.log("\n".join(([repr(errf)] + txt[1:])))
|
self.log("\n".join(([repr(errf)] + txt[1:])))
|
||||||
for x in self.ser(errf):
|
for x in self.ser(errf):
|
||||||
yield x
|
yield x
|
||||||
|
|||||||
@@ -15,6 +15,7 @@ from .util import (
|
|||||||
E_ADDR_IN_USE,
|
E_ADDR_IN_USE,
|
||||||
E_ADDR_NOT_AVAIL,
|
E_ADDR_NOT_AVAIL,
|
||||||
E_UNREACH,
|
E_UNREACH,
|
||||||
|
HAVE_IPV6,
|
||||||
IP6ALL,
|
IP6ALL,
|
||||||
Netdev,
|
Netdev,
|
||||||
min_ex,
|
min_ex,
|
||||||
@@ -111,8 +112,10 @@ class TcpSrv(object):
|
|||||||
|
|
||||||
eps = {
|
eps = {
|
||||||
"127.0.0.1": Netdev("127.0.0.1", 0, "", "local only"),
|
"127.0.0.1": Netdev("127.0.0.1", 0, "", "local only"),
|
||||||
"::1": Netdev("::1", 0, "", "local only"),
|
|
||||||
}
|
}
|
||||||
|
if HAVE_IPV6:
|
||||||
|
eps["::1"] = Netdev("::1", 0, "", "local only")
|
||||||
|
|
||||||
nonlocals = [x for x in self.args.i if x not in [k.split("/")[0] for k in eps]]
|
nonlocals = [x for x in self.args.i if x not in [k.split("/")[0] for k in eps]]
|
||||||
if nonlocals:
|
if nonlocals:
|
||||||
try:
|
try:
|
||||||
@@ -241,6 +244,11 @@ class TcpSrv(object):
|
|||||||
raise OSError(E_ADDR_IN_USE[0], "")
|
raise OSError(E_ADDR_IN_USE[0], "")
|
||||||
self.srv.append(srv)
|
self.srv.append(srv)
|
||||||
except (OSError, socket.error) as ex:
|
except (OSError, socket.error) as ex:
|
||||||
|
try:
|
||||||
|
srv.close()
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
if ex.errno in E_ADDR_IN_USE:
|
if ex.errno in E_ADDR_IN_USE:
|
||||||
e = "\033[1;31mport {} is busy on interface {}\033[0m".format(port, ip)
|
e = "\033[1;31mport {} is busy on interface {}\033[0m".format(port, ip)
|
||||||
elif ex.errno in E_ADDR_NOT_AVAIL:
|
elif ex.errno in E_ADDR_NOT_AVAIL:
|
||||||
@@ -304,6 +312,7 @@ class TcpSrv(object):
|
|||||||
self.hub.start_zeroconf()
|
self.hub.start_zeroconf()
|
||||||
gencert(self.log, self.args, self.netdevs)
|
gencert(self.log, self.args, self.netdevs)
|
||||||
self.hub.restart_ftpd()
|
self.hub.restart_ftpd()
|
||||||
|
self.hub.restart_tftpd()
|
||||||
|
|
||||||
def shutdown(self) -> None:
|
def shutdown(self) -> None:
|
||||||
self.stopping = True
|
self.stopping = True
|
||||||
@@ -457,6 +466,12 @@ class TcpSrv(object):
|
|||||||
sys.stderr.flush()
|
sys.stderr.flush()
|
||||||
|
|
||||||
def _qr(self, t1: dict[str, list[int]], t2: dict[str, list[int]]) -> str:
|
def _qr(self, t1: dict[str, list[int]], t2: dict[str, list[int]]) -> str:
|
||||||
|
t2c = {zs: zli for zs, zli in t2.items() if zs in ("127.0.0.1", "::1")}
|
||||||
|
t2b = {zs: zli for zs, zli in t2.items() if ":" in zs and zs not in t2c}
|
||||||
|
t2 = {zs: zli for zs, zli in t2.items() if zs not in t2b and zs not in t2c}
|
||||||
|
t2.update(t2b) # first ipv4, then ipv6...
|
||||||
|
t2.update(t2c) # ...and finally localhost
|
||||||
|
|
||||||
ip = None
|
ip = None
|
||||||
ips = list(t1) + list(t2)
|
ips = list(t1) + list(t2)
|
||||||
qri = self.args.qri
|
qri = self.args.qri
|
||||||
|
|||||||
429
copyparty/tftpd.py
Normal file
429
copyparty/tftpd.py
Normal file
@@ -0,0 +1,429 @@
|
|||||||
|
# coding: utf-8
|
||||||
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
|
try:
|
||||||
|
from types import SimpleNamespace
|
||||||
|
except:
|
||||||
|
|
||||||
|
class SimpleNamespace(object):
|
||||||
|
def __init__(self, **attr):
|
||||||
|
self.__dict__.update(attr)
|
||||||
|
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import socket
|
||||||
|
import stat
|
||||||
|
import threading
|
||||||
|
import time
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
try:
|
||||||
|
import inspect
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
from partftpy import (
|
||||||
|
TftpContexts,
|
||||||
|
TftpPacketFactory,
|
||||||
|
TftpPacketTypes,
|
||||||
|
TftpServer,
|
||||||
|
TftpStates,
|
||||||
|
)
|
||||||
|
from partftpy.TftpShared import TftpException
|
||||||
|
|
||||||
|
from .__init__ import EXE, PY2, TYPE_CHECKING
|
||||||
|
from .authsrv import VFS
|
||||||
|
from .bos import bos
|
||||||
|
from .util import BytesIO, Daemon, ODict, exclude_dotfiles, min_ex, runhook, undot
|
||||||
|
|
||||||
|
if True: # pylint: disable=using-constant-test
|
||||||
|
from typing import Any, Union
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from .svchub import SvcHub
|
||||||
|
|
||||||
|
|
||||||
|
lg = logging.getLogger("tftp")
|
||||||
|
debug, info, warning, error = (lg.debug, lg.info, lg.warning, lg.error)
|
||||||
|
|
||||||
|
|
||||||
|
def noop(*a, **ka) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def _serverInitial(self, pkt: Any, raddress: str, rport: int) -> bool:
|
||||||
|
info("connection from %s:%s", raddress, rport)
|
||||||
|
ret = _sinitial[0](self, pkt, raddress, rport)
|
||||||
|
nm = _hub[0].args.tftp_ipa_nm
|
||||||
|
if nm and not nm.map(raddress):
|
||||||
|
yeet("client rejected (--tftp-ipa): %s" % (raddress,))
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
# patch ipa-check into partftpd (part 1/2)
|
||||||
|
_hub: list["SvcHub"] = []
|
||||||
|
_sinitial: list[Any] = []
|
||||||
|
|
||||||
|
|
||||||
|
class Tftpd(object):
|
||||||
|
def __init__(self, hub: "SvcHub") -> None:
|
||||||
|
self.hub = hub
|
||||||
|
self.args = hub.args
|
||||||
|
self.asrv = hub.asrv
|
||||||
|
self.log = hub.log
|
||||||
|
self.mutex = threading.Lock()
|
||||||
|
|
||||||
|
_hub[:] = []
|
||||||
|
_hub.append(hub)
|
||||||
|
|
||||||
|
lg.setLevel(logging.DEBUG if self.args.tftpv else logging.INFO)
|
||||||
|
for x in ["partftpy", "partftpy.TftpStates", "partftpy.TftpServer"]:
|
||||||
|
lgr = logging.getLogger(x)
|
||||||
|
lgr.setLevel(logging.DEBUG if self.args.tftpv else logging.INFO)
|
||||||
|
|
||||||
|
if not self.args.tftpv and not self.args.tftpvv:
|
||||||
|
# contexts -> states -> packettypes -> shared
|
||||||
|
# contexts -> packetfactory
|
||||||
|
# packetfactory -> packettypes
|
||||||
|
Cs = [
|
||||||
|
TftpPacketTypes,
|
||||||
|
TftpPacketFactory,
|
||||||
|
TftpStates,
|
||||||
|
TftpContexts,
|
||||||
|
TftpServer,
|
||||||
|
]
|
||||||
|
cbak = []
|
||||||
|
if not self.args.tftp_no_fast and not EXE and not PY2:
|
||||||
|
try:
|
||||||
|
ptn = re.compile(r"(^\s*)log\.debug\(.*\)$")
|
||||||
|
for C in Cs:
|
||||||
|
cbak.append(C.__dict__)
|
||||||
|
src1 = inspect.getsource(C).split("\n")
|
||||||
|
src2 = "\n".join([ptn.sub("\\1pass", ln) for ln in src1])
|
||||||
|
cfn = C.__spec__.origin
|
||||||
|
exec (compile(src2, filename=cfn, mode="exec"), C.__dict__)
|
||||||
|
except Exception:
|
||||||
|
t = "failed to optimize tftp code; run with --tftp-no-fast if there are issues:\n"
|
||||||
|
self.log("tftp", t + min_ex(), 3)
|
||||||
|
for n, zd in enumerate(cbak):
|
||||||
|
Cs[n].__dict__ = zd
|
||||||
|
|
||||||
|
for C in Cs:
|
||||||
|
C.log.debug = noop
|
||||||
|
|
||||||
|
# patch ipa-check into partftpd (part 2/2)
|
||||||
|
_sinitial[:] = []
|
||||||
|
_sinitial.append(TftpStates.TftpServerState.serverInitial)
|
||||||
|
TftpStates.TftpServerState.serverInitial = _serverInitial
|
||||||
|
|
||||||
|
# patch vfs into partftpy
|
||||||
|
TftpContexts.open = self._open
|
||||||
|
TftpStates.open = self._open
|
||||||
|
|
||||||
|
fos = SimpleNamespace()
|
||||||
|
for k in os.__dict__:
|
||||||
|
try:
|
||||||
|
setattr(fos, k, getattr(os, k))
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
fos.access = self._access
|
||||||
|
fos.mkdir = self._mkdir
|
||||||
|
fos.unlink = self._unlink
|
||||||
|
fos.sep = "/"
|
||||||
|
TftpContexts.os = fos
|
||||||
|
TftpServer.os = fos
|
||||||
|
TftpStates.os = fos
|
||||||
|
|
||||||
|
fop = SimpleNamespace()
|
||||||
|
for k in os.path.__dict__:
|
||||||
|
try:
|
||||||
|
setattr(fop, k, getattr(os.path, k))
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
fop.abspath = self._p_abspath
|
||||||
|
fop.exists = self._p_exists
|
||||||
|
fop.isdir = self._p_isdir
|
||||||
|
fop.normpath = self._p_normpath
|
||||||
|
fos.path = fop
|
||||||
|
|
||||||
|
self._disarm(fos)
|
||||||
|
|
||||||
|
self.port = int(self.args.tftp)
|
||||||
|
self.srv = []
|
||||||
|
self.ips = []
|
||||||
|
|
||||||
|
ports = []
|
||||||
|
if self.args.tftp_pr:
|
||||||
|
p1, p2 = [int(x) for x in self.args.tftp_pr.split("-")]
|
||||||
|
ports = list(range(p1, p2 + 1))
|
||||||
|
|
||||||
|
ips = self.args.i
|
||||||
|
if "::" in ips:
|
||||||
|
ips.append("0.0.0.0")
|
||||||
|
|
||||||
|
if self.args.tftp4:
|
||||||
|
ips = [x for x in ips if ":" not in x]
|
||||||
|
|
||||||
|
ips = list(ODict.fromkeys(ips)) # dedup
|
||||||
|
|
||||||
|
for ip in ips:
|
||||||
|
name = "tftp_%s" % (ip,)
|
||||||
|
Daemon(self._start, name, [ip, ports])
|
||||||
|
time.sleep(0.2) # give dualstack a chance
|
||||||
|
|
||||||
|
def nlog(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||||
|
self.log("tftp", msg, c)
|
||||||
|
|
||||||
|
def _start(self, ip, ports):
|
||||||
|
fam = socket.AF_INET6 if ":" in ip else socket.AF_INET
|
||||||
|
have_been_alive = False
|
||||||
|
while True:
|
||||||
|
srv = TftpServer.TftpServer("/", self._ls)
|
||||||
|
with self.mutex:
|
||||||
|
self.srv.append(srv)
|
||||||
|
self.ips.append(ip)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# this is the listen loop; it should block forever
|
||||||
|
srv.listen(ip, self.port, af_family=fam, ports=ports)
|
||||||
|
except:
|
||||||
|
with self.mutex:
|
||||||
|
self.srv.remove(srv)
|
||||||
|
self.ips.remove(ip)
|
||||||
|
|
||||||
|
try:
|
||||||
|
srv.sock.close()
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
bound = bool(srv.listenport)
|
||||||
|
except:
|
||||||
|
bound = False
|
||||||
|
|
||||||
|
if bound:
|
||||||
|
# this instance has managed to bind at least once
|
||||||
|
have_been_alive = True
|
||||||
|
|
||||||
|
if have_been_alive:
|
||||||
|
t = "tftp server [%s]:%d crashed; restarting in 3 sec:\n%s"
|
||||||
|
error(t, ip, self.port, min_ex())
|
||||||
|
time.sleep(3)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# server failed to start; could be due to dualstack (ipv6 managed to bind and this is ipv4)
|
||||||
|
if ip != "0.0.0.0" or "::" not in self.ips:
|
||||||
|
# nope, it's fatal
|
||||||
|
t = "tftp server [%s]:%d failed to start:\n%s"
|
||||||
|
error(t, ip, self.port, min_ex())
|
||||||
|
|
||||||
|
# yep; ignore
|
||||||
|
# (TODO: move the "listening @ ..." infolog in partftpy to
|
||||||
|
# after the bind attempt so it doesn't print twice)
|
||||||
|
return
|
||||||
|
|
||||||
|
info("tftp server [%s]:%d terminated", ip, self.port)
|
||||||
|
break
|
||||||
|
|
||||||
|
def stop(self):
|
||||||
|
with self.mutex:
|
||||||
|
srvs = self.srv[:]
|
||||||
|
|
||||||
|
for srv in srvs:
|
||||||
|
srv.stop()
|
||||||
|
|
||||||
|
def _v2a(self, caller: str, vpath: str, perms: list, *a: Any) -> tuple[VFS, str]:
|
||||||
|
vpath = vpath.replace("\\", "/").lstrip("/")
|
||||||
|
if not perms:
|
||||||
|
perms = [True, True]
|
||||||
|
|
||||||
|
debug('%s("%s", %s) %s\033[K\033[0m', caller, vpath, str(a), perms)
|
||||||
|
vfs, rem = self.asrv.vfs.get(vpath, "*", *perms)
|
||||||
|
return vfs, vfs.canonical(rem)
|
||||||
|
|
||||||
|
def _ls(self, vpath: str, raddress: str, rport: int, force=False) -> Any:
|
||||||
|
# generate file listing if vpath is dir.txt and return as file object
|
||||||
|
if not force:
|
||||||
|
vpath, fn = os.path.split(vpath.replace("\\", "/"))
|
||||||
|
ptn = self.args.tftp_lsf
|
||||||
|
if not ptn or not ptn.match(fn.lower()):
|
||||||
|
return None
|
||||||
|
|
||||||
|
vn, rem = self.asrv.vfs.get(vpath, "*", True, False)
|
||||||
|
fsroot, vfs_ls, vfs_virt = vn.ls(
|
||||||
|
rem,
|
||||||
|
"*",
|
||||||
|
not self.args.no_scandir,
|
||||||
|
[[True, False]],
|
||||||
|
)
|
||||||
|
dnames = set([x[0] for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)])
|
||||||
|
dirs1 = [(v.st_mtime, v.st_size, k + "/") for k, v in vfs_ls if k in dnames]
|
||||||
|
fils1 = [(v.st_mtime, v.st_size, k) for k, v in vfs_ls if k not in dnames]
|
||||||
|
real1 = dirs1 + fils1
|
||||||
|
realt = [(datetime.fromtimestamp(mt), sz, fn) for mt, sz, fn in real1]
|
||||||
|
reals = [
|
||||||
|
(
|
||||||
|
"%04d-%02d-%02d %02d:%02d:%02d"
|
||||||
|
% (
|
||||||
|
zd.year,
|
||||||
|
zd.month,
|
||||||
|
zd.day,
|
||||||
|
zd.hour,
|
||||||
|
zd.minute,
|
||||||
|
zd.second,
|
||||||
|
),
|
||||||
|
sz,
|
||||||
|
fn,
|
||||||
|
)
|
||||||
|
for zd, sz, fn in realt
|
||||||
|
]
|
||||||
|
virs = [("????-??-?? ??:??:??", 0, k + "/") for k in vfs_virt.keys()]
|
||||||
|
ls = virs + reals
|
||||||
|
|
||||||
|
if "*" not in vn.axs.udot:
|
||||||
|
names = set(exclude_dotfiles([x[2] for x in ls]))
|
||||||
|
ls = [x for x in ls if x[2] in names]
|
||||||
|
|
||||||
|
try:
|
||||||
|
biggest = max([x[1] for x in ls])
|
||||||
|
except:
|
||||||
|
biggest = 0
|
||||||
|
|
||||||
|
perms = []
|
||||||
|
if "*" in vn.axs.uread:
|
||||||
|
perms.append("read")
|
||||||
|
if "*" in vn.axs.udot:
|
||||||
|
perms.append("hidden")
|
||||||
|
if "*" in vn.axs.uwrite:
|
||||||
|
if "*" in vn.axs.udel:
|
||||||
|
perms.append("overwrite")
|
||||||
|
else:
|
||||||
|
perms.append("write")
|
||||||
|
|
||||||
|
fmt = "{{}} {{:{},}} {{}}"
|
||||||
|
fmt = fmt.format(len("{:,}".format(biggest)))
|
||||||
|
retl = ["# permissions: %s" % (", ".join(perms),)]
|
||||||
|
retl += [fmt.format(*x) for x in ls]
|
||||||
|
ret = "\n".join(retl).encode("utf-8", "replace")
|
||||||
|
return BytesIO(ret + b"\n")
|
||||||
|
|
||||||
|
def _open(self, vpath: str, mode: str, *a: Any, **ka: Any) -> Any:
|
||||||
|
rd = wr = False
|
||||||
|
if mode == "rb":
|
||||||
|
rd = True
|
||||||
|
elif mode == "wb":
|
||||||
|
wr = True
|
||||||
|
else:
|
||||||
|
raise Exception("bad mode %s" % (mode,))
|
||||||
|
|
||||||
|
vfs, ap = self._v2a("open", vpath, [rd, wr])
|
||||||
|
if wr:
|
||||||
|
if "*" not in vfs.axs.uwrite:
|
||||||
|
yeet("blocked write; folder not world-writable: /%s" % (vpath,))
|
||||||
|
|
||||||
|
if bos.path.exists(ap) and "*" not in vfs.axs.udel:
|
||||||
|
yeet("blocked write; folder not world-deletable: /%s" % (vpath,))
|
||||||
|
|
||||||
|
xbu = vfs.flags.get("xbu")
|
||||||
|
if xbu and not runhook(
|
||||||
|
self.nlog, xbu, ap, vpath, "", "", "", 0, 0, "8.3.8.7", 0, ""
|
||||||
|
):
|
||||||
|
yeet("blocked by xbu server config: " + vpath)
|
||||||
|
|
||||||
|
if not self.args.tftp_nols and bos.path.isdir(ap):
|
||||||
|
return self._ls(vpath, "", 0, True)
|
||||||
|
|
||||||
|
if not a:
|
||||||
|
a = [self.args.iobuf]
|
||||||
|
|
||||||
|
return open(ap, mode, *a, **ka)
|
||||||
|
|
||||||
|
def _mkdir(self, vpath: str, *a) -> None:
|
||||||
|
vfs, ap = self._v2a("mkdir", vpath, [])
|
||||||
|
if "*" not in vfs.axs.uwrite:
|
||||||
|
yeet("blocked mkdir; folder not world-writable: /%s" % (vpath,))
|
||||||
|
|
||||||
|
return bos.mkdir(ap)
|
||||||
|
|
||||||
|
def _unlink(self, vpath: str) -> None:
|
||||||
|
# return bos.unlink(self._v2a("stat", vpath, *a)[1])
|
||||||
|
vfs, ap = self._v2a("delete", vpath, [True, False, False, True])
|
||||||
|
|
||||||
|
try:
|
||||||
|
inf = bos.stat(ap)
|
||||||
|
except:
|
||||||
|
return
|
||||||
|
|
||||||
|
if not stat.S_ISREG(inf.st_mode) or inf.st_size:
|
||||||
|
yeet("attempted delete of non-empty file")
|
||||||
|
|
||||||
|
vpath = vpath.replace("\\", "/").lstrip("/")
|
||||||
|
self.hub.up2k.handle_rm("*", "8.3.8.7", [vpath], [], False, False)
|
||||||
|
|
||||||
|
def _access(self, *a: Any) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _p_abspath(self, vpath: str) -> str:
|
||||||
|
return "/" + undot(vpath)
|
||||||
|
|
||||||
|
def _p_normpath(self, *a: Any) -> str:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def _p_exists(self, vpath: str) -> bool:
|
||||||
|
try:
|
||||||
|
ap = self._v2a("p.exists", vpath, [False, False])[1]
|
||||||
|
bos.stat(ap)
|
||||||
|
return True
|
||||||
|
except:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _p_isdir(self, vpath: str) -> bool:
|
||||||
|
try:
|
||||||
|
st = bos.stat(self._v2a("p.isdir", vpath, [False, False])[1])
|
||||||
|
ret = stat.S_ISDIR(st.st_mode)
|
||||||
|
return ret
|
||||||
|
except:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _hook(self, *a: Any, **ka: Any) -> None:
|
||||||
|
src = inspect.currentframe().f_back.f_code.co_name
|
||||||
|
error("\033[31m%s:hook(%s)\033[0m", src, a)
|
||||||
|
raise Exception("nope")
|
||||||
|
|
||||||
|
def _disarm(self, fos: SimpleNamespace) -> None:
|
||||||
|
fos.chmod = self._hook
|
||||||
|
fos.chown = self._hook
|
||||||
|
fos.close = self._hook
|
||||||
|
fos.ftruncate = self._hook
|
||||||
|
fos.lchown = self._hook
|
||||||
|
fos.link = self._hook
|
||||||
|
fos.listdir = self._hook
|
||||||
|
fos.lstat = self._hook
|
||||||
|
fos.open = self._hook
|
||||||
|
fos.remove = self._hook
|
||||||
|
fos.rename = self._hook
|
||||||
|
fos.replace = self._hook
|
||||||
|
fos.scandir = self._hook
|
||||||
|
fos.stat = self._hook
|
||||||
|
fos.symlink = self._hook
|
||||||
|
fos.truncate = self._hook
|
||||||
|
fos.utime = self._hook
|
||||||
|
fos.walk = self._hook
|
||||||
|
|
||||||
|
fos.path.expanduser = self._hook
|
||||||
|
fos.path.expandvars = self._hook
|
||||||
|
fos.path.getatime = self._hook
|
||||||
|
fos.path.getctime = self._hook
|
||||||
|
fos.path.getmtime = self._hook
|
||||||
|
fos.path.getsize = self._hook
|
||||||
|
fos.path.isabs = self._hook
|
||||||
|
fos.path.isfile = self._hook
|
||||||
|
fos.path.islink = self._hook
|
||||||
|
fos.path.realpath = self._hook
|
||||||
|
|
||||||
|
|
||||||
|
def yeet(msg: str) -> None:
|
||||||
|
warning(msg)
|
||||||
|
raise TftpException(msg)
|
||||||
@@ -31,7 +31,7 @@ class ThumbCli(object):
|
|||||||
if not c:
|
if not c:
|
||||||
raise Exception()
|
raise Exception()
|
||||||
except:
|
except:
|
||||||
c = {k: {} for k in ["thumbable", "pil", "vips", "ffi", "ffv", "ffa"]}
|
c = {k: set() for k in ["thumbable", "pil", "vips", "ffi", "ffv", "ffa"]}
|
||||||
|
|
||||||
self.thumbable = c["thumbable"]
|
self.thumbable = c["thumbable"]
|
||||||
self.fmt_pil = c["pil"]
|
self.fmt_pil = c["pil"]
|
||||||
@@ -57,9 +57,10 @@ class ThumbCli(object):
|
|||||||
if is_vid and "dvthumb" in dbv.flags:
|
if is_vid and "dvthumb" in dbv.flags:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
want_opus = fmt in ("opus", "caf")
|
want_opus = fmt in ("opus", "caf", "mp3")
|
||||||
is_au = ext in self.fmt_ffa
|
is_au = ext in self.fmt_ffa
|
||||||
if is_au:
|
is_vau = want_opus and ext in self.fmt_ffv
|
||||||
|
if is_au or is_vau:
|
||||||
if want_opus:
|
if want_opus:
|
||||||
if self.args.no_acode:
|
if self.args.no_acode:
|
||||||
return None
|
return None
|
||||||
@@ -78,23 +79,46 @@ class ThumbCli(object):
|
|||||||
if rem.startswith(".hist/th/") and rem.split(".")[-1] in ["webp", "jpg", "png"]:
|
if rem.startswith(".hist/th/") and rem.split(".")[-1] in ["webp", "jpg", "png"]:
|
||||||
return os.path.join(ptop, rem)
|
return os.path.join(ptop, rem)
|
||||||
|
|
||||||
if fmt == "j" and self.args.th_no_jpg:
|
if fmt[:1] in "jw":
|
||||||
fmt = "w"
|
sfmt = fmt[:1]
|
||||||
|
|
||||||
if fmt == "w":
|
if sfmt == "j" and self.args.th_no_jpg:
|
||||||
if (
|
sfmt = "w"
|
||||||
self.args.th_no_webp
|
|
||||||
or (is_img and not self.can_webp)
|
if sfmt == "w":
|
||||||
or (self.args.th_ff_jpg and (not is_img or preferred == "ff"))
|
if (
|
||||||
):
|
self.args.th_no_webp
|
||||||
fmt = "j"
|
or (is_img and not self.can_webp)
|
||||||
|
or (self.args.th_ff_jpg and (not is_img or preferred == "ff"))
|
||||||
|
):
|
||||||
|
sfmt = "j"
|
||||||
|
|
||||||
|
vf_crop = dbv.flags["crop"]
|
||||||
|
vf_th3x = dbv.flags["th3x"]
|
||||||
|
|
||||||
|
if "f" in vf_crop:
|
||||||
|
sfmt += "f" if "n" in vf_crop else ""
|
||||||
|
else:
|
||||||
|
sfmt += "f" if "f" in fmt else ""
|
||||||
|
|
||||||
|
if "f" in vf_th3x:
|
||||||
|
sfmt += "3" if "y" in vf_th3x else ""
|
||||||
|
else:
|
||||||
|
sfmt += "3" if "3" in fmt else ""
|
||||||
|
|
||||||
|
fmt = sfmt
|
||||||
|
|
||||||
|
elif fmt[:1] == "p" and not is_au and not is_vid:
|
||||||
|
t = "cannot thumbnail [%s]: png only allowed for waveforms"
|
||||||
|
self.log(t % (rem), 6)
|
||||||
|
return None
|
||||||
|
|
||||||
histpath = self.asrv.vfs.histtab.get(ptop)
|
histpath = self.asrv.vfs.histtab.get(ptop)
|
||||||
if not histpath:
|
if not histpath:
|
||||||
self.log("no histpath for [{}]".format(ptop))
|
self.log("no histpath for [{}]".format(ptop))
|
||||||
return None
|
return None
|
||||||
|
|
||||||
tpath = thumb_path(histpath, rem, mtime, fmt)
|
tpath = thumb_path(histpath, rem, mtime, fmt, self.fmt_ffa)
|
||||||
tpaths = [tpath]
|
tpaths = [tpath]
|
||||||
if fmt == "w":
|
if fmt == "w":
|
||||||
# also check for jpg (maybe webp is unavailable)
|
# also check for jpg (maybe webp is unavailable)
|
||||||
|
|||||||
@@ -15,10 +15,10 @@ from queue import Queue
|
|||||||
from .__init__ import ANYWIN, TYPE_CHECKING
|
from .__init__ import ANYWIN, TYPE_CHECKING
|
||||||
from .authsrv import VFS
|
from .authsrv import VFS
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, ffprobe
|
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, au_unpk, ffprobe
|
||||||
|
from .util import BytesIO # type: ignore
|
||||||
from .util import (
|
from .util import (
|
||||||
FFMPEG_URL,
|
FFMPEG_URL,
|
||||||
BytesIO,
|
|
||||||
Cooldown,
|
Cooldown,
|
||||||
Daemon,
|
Daemon,
|
||||||
Pebkac,
|
Pebkac,
|
||||||
@@ -28,6 +28,8 @@ from .util import (
|
|||||||
runcmd,
|
runcmd,
|
||||||
statdir,
|
statdir,
|
||||||
vsplit,
|
vsplit,
|
||||||
|
wrename,
|
||||||
|
wunlink,
|
||||||
)
|
)
|
||||||
|
|
||||||
if True: # pylint: disable=using-constant-test
|
if True: # pylint: disable=using-constant-test
|
||||||
@@ -37,14 +39,21 @@ if TYPE_CHECKING:
|
|||||||
from .svchub import SvcHub
|
from .svchub import SvcHub
|
||||||
|
|
||||||
HAVE_PIL = False
|
HAVE_PIL = False
|
||||||
|
HAVE_PILF = False
|
||||||
HAVE_HEIF = False
|
HAVE_HEIF = False
|
||||||
HAVE_AVIF = False
|
HAVE_AVIF = False
|
||||||
HAVE_WEBP = False
|
HAVE_WEBP = False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from PIL import ExifTags, Image, ImageOps
|
from PIL import ExifTags, Image, ImageFont, ImageOps
|
||||||
|
|
||||||
HAVE_PIL = True
|
HAVE_PIL = True
|
||||||
|
try:
|
||||||
|
ImageFont.load_default(size=16)
|
||||||
|
HAVE_PILF = True
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
try:
|
try:
|
||||||
Image.new("RGB", (2, 2)).save(BytesIO(), format="webp")
|
Image.new("RGB", (2, 2)).save(BytesIO(), format="webp")
|
||||||
HAVE_WEBP = True
|
HAVE_WEBP = True
|
||||||
@@ -79,29 +88,36 @@ except:
|
|||||||
HAVE_VIPS = False
|
HAVE_VIPS = False
|
||||||
|
|
||||||
|
|
||||||
def thumb_path(histpath: str, rem: str, mtime: float, fmt: str) -> str:
|
def thumb_path(histpath: str, rem: str, mtime: float, fmt: str, ffa: set[str]) -> str:
|
||||||
# base16 = 16 = 256
|
# base16 = 16 = 256
|
||||||
# b64-lc = 38 = 1444
|
# b64-lc = 38 = 1444
|
||||||
# base64 = 64 = 4096
|
# base64 = 64 = 4096
|
||||||
rd, fn = vsplit(rem)
|
rd, fn = vsplit(rem)
|
||||||
if rd:
|
if not rd:
|
||||||
h = hashlib.sha512(afsenc(rd)).digest()
|
rd = "\ntop"
|
||||||
b64 = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
|
||||||
rd = "{}/{}/".format(b64[:2], b64[2:4]).lower() + b64
|
# spectrograms are never cropped; strip fullsize flag
|
||||||
else:
|
ext = rem.split(".")[-1].lower()
|
||||||
rd = "top"
|
if ext in ffa and fmt[:2] in ("wf", "jf"):
|
||||||
|
fmt = fmt.replace("f", "")
|
||||||
|
|
||||||
|
rd += "\n" + fmt
|
||||||
|
h = hashlib.sha512(afsenc(rd)).digest()
|
||||||
|
b64 = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
||||||
|
rd = ("%s/%s/" % (b64[:2], b64[2:4])).lower() + b64
|
||||||
|
|
||||||
# could keep original filenames but this is safer re pathlen
|
# could keep original filenames but this is safer re pathlen
|
||||||
h = hashlib.sha512(afsenc(fn)).digest()
|
h = hashlib.sha512(afsenc(fn)).digest()
|
||||||
fn = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
fn = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
||||||
|
|
||||||
if fmt in ("opus", "caf"):
|
if fmt in ("opus", "caf", "mp3"):
|
||||||
cat = "ac"
|
cat = "ac"
|
||||||
else:
|
else:
|
||||||
fmt = "webp" if fmt == "w" else "png" if fmt == "p" else "jpg"
|
fc = fmt[:1]
|
||||||
|
fmt = "webp" if fc == "w" else "png" if fc == "p" else "jpg"
|
||||||
cat = "th"
|
cat = "th"
|
||||||
|
|
||||||
return "{}/{}/{}/{}.{:x}.{}".format(histpath, cat, rd, fn, int(mtime), fmt)
|
return "%s/%s/%s/%s.%x.%s" % (histpath, cat, rd, fn, int(mtime), fmt)
|
||||||
|
|
||||||
|
|
||||||
class ThumbSrv(object):
|
class ThumbSrv(object):
|
||||||
@@ -115,10 +131,12 @@ class ThumbSrv(object):
|
|||||||
|
|
||||||
self.mutex = threading.Lock()
|
self.mutex = threading.Lock()
|
||||||
self.busy: dict[str, list[threading.Condition]] = {}
|
self.busy: dict[str, list[threading.Condition]] = {}
|
||||||
|
self.ram: dict[str, float] = {}
|
||||||
|
self.memcond = threading.Condition(self.mutex)
|
||||||
self.stopping = False
|
self.stopping = False
|
||||||
self.nthr = max(1, self.args.th_mt)
|
self.nthr = max(1, self.args.th_mt)
|
||||||
|
|
||||||
self.q: Queue[Optional[tuple[str, str, VFS]]] = Queue(self.nthr * 4)
|
self.q: Queue[Optional[tuple[str, str, str, VFS]]] = Queue(self.nthr * 4)
|
||||||
for n in range(self.nthr):
|
for n in range(self.nthr):
|
||||||
Daemon(self.worker, "thumb-{}-{}".format(n, self.nthr))
|
Daemon(self.worker, "thumb-{}-{}".format(n, self.nthr))
|
||||||
|
|
||||||
@@ -183,9 +201,10 @@ class ThumbSrv(object):
|
|||||||
with self.mutex:
|
with self.mutex:
|
||||||
return not self.nthr
|
return not self.nthr
|
||||||
|
|
||||||
def getres(self, vn: VFS) -> tuple[int, int]:
|
def getres(self, vn: VFS, fmt: str) -> tuple[int, int]:
|
||||||
|
mul = 3 if "3" in fmt else 1
|
||||||
w, h = vn.flags["thsize"].split("x")
|
w, h = vn.flags["thsize"].split("x")
|
||||||
return int(w), int(h)
|
return int(w) * mul, int(h) * mul
|
||||||
|
|
||||||
def get(self, ptop: str, rem: str, mtime: float, fmt: str) -> Optional[str]:
|
def get(self, ptop: str, rem: str, mtime: float, fmt: str) -> Optional[str]:
|
||||||
histpath = self.asrv.vfs.histtab.get(ptop)
|
histpath = self.asrv.vfs.histtab.get(ptop)
|
||||||
@@ -193,14 +212,14 @@ class ThumbSrv(object):
|
|||||||
self.log("no histpath for [{}]".format(ptop))
|
self.log("no histpath for [{}]".format(ptop))
|
||||||
return None
|
return None
|
||||||
|
|
||||||
tpath = thumb_path(histpath, rem, mtime, fmt)
|
tpath = thumb_path(histpath, rem, mtime, fmt, self.fmt_ffa)
|
||||||
abspath = os.path.join(ptop, rem)
|
abspath = os.path.join(ptop, rem)
|
||||||
cond = threading.Condition(self.mutex)
|
cond = threading.Condition(self.mutex)
|
||||||
do_conv = False
|
do_conv = False
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
try:
|
try:
|
||||||
self.busy[tpath].append(cond)
|
self.busy[tpath].append(cond)
|
||||||
self.log("wait {}".format(tpath))
|
self.log("joined waiting room for %s" % (tpath,))
|
||||||
except:
|
except:
|
||||||
thdir = os.path.dirname(tpath)
|
thdir = os.path.dirname(tpath)
|
||||||
bos.makedirs(os.path.join(thdir, "w"))
|
bos.makedirs(os.path.join(thdir, "w"))
|
||||||
@@ -220,8 +239,8 @@ class ThumbSrv(object):
|
|||||||
self.log("ptop [{}] not in {}".format(ptop, allvols), 3)
|
self.log("ptop [{}] not in {}".format(ptop, allvols), 3)
|
||||||
vn = self.asrv.vfs.all_aps[0][1]
|
vn = self.asrv.vfs.all_aps[0][1]
|
||||||
|
|
||||||
self.q.put((abspath, tpath, vn))
|
self.q.put((abspath, tpath, fmt, vn))
|
||||||
self.log("conv {} \033[0m{}".format(tpath, abspath), c=6)
|
self.log("conv {} :{} \033[0m{}".format(tpath, fmt, abspath), c=6)
|
||||||
|
|
||||||
while not self.stopping:
|
while not self.stopping:
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
@@ -251,46 +270,79 @@ class ThumbSrv(object):
|
|||||||
"ffa": self.fmt_ffa,
|
"ffa": self.fmt_ffa,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def wait4ram(self, need: float, ttpath: str) -> None:
|
||||||
|
ram = self.args.th_ram_max
|
||||||
|
if need > ram * 0.99:
|
||||||
|
t = "file too big; need %.2f GiB RAM, but --th-ram-max is only %.1f"
|
||||||
|
raise Exception(t % (need, ram))
|
||||||
|
|
||||||
|
while True:
|
||||||
|
with self.mutex:
|
||||||
|
used = sum([v for k, v in self.ram.items() if k != ttpath]) + need
|
||||||
|
if used < ram:
|
||||||
|
# self.log("XXX self.ram: %s" % (self.ram,), 5)
|
||||||
|
self.ram[ttpath] = need
|
||||||
|
return
|
||||||
|
with self.memcond:
|
||||||
|
# self.log("at RAM limit; used %.2f GiB, need %.2f more" % (used-need, need), 1)
|
||||||
|
self.memcond.wait(3)
|
||||||
|
|
||||||
def worker(self) -> None:
|
def worker(self) -> None:
|
||||||
while not self.stopping:
|
while not self.stopping:
|
||||||
task = self.q.get()
|
task = self.q.get()
|
||||||
if not task:
|
if not task:
|
||||||
break
|
break
|
||||||
|
|
||||||
abspath, tpath, vn = task
|
abspath, tpath, fmt, vn = task
|
||||||
ext = abspath.split(".")[-1].lower()
|
ext = abspath.split(".")[-1].lower()
|
||||||
png_ok = False
|
png_ok = False
|
||||||
funs = []
|
funs = []
|
||||||
|
|
||||||
|
if ext in self.args.au_unpk:
|
||||||
|
ap_unpk = au_unpk(self.log, self.args.au_unpk, abspath, vn)
|
||||||
|
else:
|
||||||
|
ap_unpk = abspath
|
||||||
|
|
||||||
if not bos.path.exists(tpath):
|
if not bos.path.exists(tpath):
|
||||||
|
want_mp3 = tpath.endswith(".mp3")
|
||||||
|
want_opus = tpath.endswith(".opus") or tpath.endswith(".caf")
|
||||||
|
want_png = tpath.endswith(".png")
|
||||||
|
want_au = want_mp3 or want_opus
|
||||||
for lib in self.args.th_dec:
|
for lib in self.args.th_dec:
|
||||||
|
can_au = lib == "ff" and (
|
||||||
|
ext in self.fmt_ffa or ext in self.fmt_ffv
|
||||||
|
)
|
||||||
|
|
||||||
if lib == "pil" and ext in self.fmt_pil:
|
if lib == "pil" and ext in self.fmt_pil:
|
||||||
funs.append(self.conv_pil)
|
funs.append(self.conv_pil)
|
||||||
elif lib == "vips" and ext in self.fmt_vips:
|
elif lib == "vips" and ext in self.fmt_vips:
|
||||||
funs.append(self.conv_vips)
|
funs.append(self.conv_vips)
|
||||||
elif lib == "ff" and ext in self.fmt_ffi or ext in self.fmt_ffv:
|
elif can_au and (want_png or want_au):
|
||||||
funs.append(self.conv_ffmpeg)
|
if want_opus:
|
||||||
elif lib == "ff" and ext in self.fmt_ffa:
|
|
||||||
if tpath.endswith(".opus") or tpath.endswith(".caf"):
|
|
||||||
funs.append(self.conv_opus)
|
funs.append(self.conv_opus)
|
||||||
elif tpath.endswith(".png"):
|
elif want_mp3:
|
||||||
|
funs.append(self.conv_mp3)
|
||||||
|
elif want_png:
|
||||||
funs.append(self.conv_waves)
|
funs.append(self.conv_waves)
|
||||||
png_ok = True
|
png_ok = True
|
||||||
else:
|
elif lib == "ff" and (ext in self.fmt_ffi or ext in self.fmt_ffv):
|
||||||
funs.append(self.conv_spec)
|
funs.append(self.conv_ffmpeg)
|
||||||
|
elif lib == "ff" and ext in self.fmt_ffa and not want_au:
|
||||||
if not png_ok and tpath.endswith(".png"):
|
funs.append(self.conv_spec)
|
||||||
raise Pebkac(400, "png only allowed for waveforms")
|
|
||||||
|
|
||||||
tdir, tfn = os.path.split(tpath)
|
tdir, tfn = os.path.split(tpath)
|
||||||
ttpath = os.path.join(tdir, "w", tfn)
|
ttpath = os.path.join(tdir, "w", tfn)
|
||||||
try:
|
try:
|
||||||
bos.unlink(ttpath)
|
wunlink(self.log, ttpath, vn.flags)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
for fun in funs:
|
for fun in funs:
|
||||||
try:
|
try:
|
||||||
fun(abspath, ttpath, vn)
|
if not png_ok and tpath.endswith(".png"):
|
||||||
|
raise Exception("png only allowed for waveforms")
|
||||||
|
|
||||||
|
fun(ap_unpk, ttpath, fmt, vn)
|
||||||
break
|
break
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
msg = "{} could not create thumbnail of {}\n{}"
|
msg = "{} could not create thumbnail of {}\n{}"
|
||||||
@@ -304,29 +356,36 @@ class ThumbSrv(object):
|
|||||||
else:
|
else:
|
||||||
# ffmpeg may spawn empty files on windows
|
# ffmpeg may spawn empty files on windows
|
||||||
try:
|
try:
|
||||||
os.unlink(ttpath)
|
wunlink(self.log, ttpath, vn.flags)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
if abspath != ap_unpk:
|
||||||
|
wunlink(self.log, ap_unpk, vn.flags)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
bos.rename(ttpath, tpath)
|
wrename(self.log, ttpath, tpath, vn.flags)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
subs = self.busy[tpath]
|
subs = self.busy[tpath]
|
||||||
del self.busy[tpath]
|
del self.busy[tpath]
|
||||||
|
self.ram.pop(ttpath, None)
|
||||||
|
|
||||||
for x in subs:
|
for x in subs:
|
||||||
with x:
|
with x:
|
||||||
x.notify_all()
|
x.notify_all()
|
||||||
|
|
||||||
|
with self.memcond:
|
||||||
|
self.memcond.notify_all()
|
||||||
|
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
self.nthr -= 1
|
self.nthr -= 1
|
||||||
|
|
||||||
def fancy_pillow(self, im: "Image.Image", vn: VFS) -> "Image.Image":
|
def fancy_pillow(self, im: "Image.Image", fmt: str, vn: VFS) -> "Image.Image":
|
||||||
# exif_transpose is expensive (loads full image + unconditional copy)
|
# exif_transpose is expensive (loads full image + unconditional copy)
|
||||||
res = self.getres(vn)
|
res = self.getres(vn, fmt)
|
||||||
r = max(*res) * 2
|
r = max(*res) * 2
|
||||||
im.thumbnail((r, r), resample=Image.LANCZOS)
|
im.thumbnail((r, r), resample=Image.LANCZOS)
|
||||||
try:
|
try:
|
||||||
@@ -341,7 +400,7 @@ class ThumbSrv(object):
|
|||||||
if rot in rots:
|
if rot in rots:
|
||||||
im = im.transpose(rots[rot])
|
im = im.transpose(rots[rot])
|
||||||
|
|
||||||
if "nocrop" in vn.flags:
|
if "f" in fmt:
|
||||||
im.thumbnail(res, resample=Image.LANCZOS)
|
im.thumbnail(res, resample=Image.LANCZOS)
|
||||||
else:
|
else:
|
||||||
iw, ih = im.size
|
iw, ih = im.size
|
||||||
@@ -351,13 +410,14 @@ class ThumbSrv(object):
|
|||||||
|
|
||||||
return im
|
return im
|
||||||
|
|
||||||
def conv_pil(self, abspath: str, tpath: str, vn: VFS) -> None:
|
def conv_pil(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
|
||||||
|
self.wait4ram(0.2, tpath)
|
||||||
with Image.open(fsenc(abspath)) as im:
|
with Image.open(fsenc(abspath)) as im:
|
||||||
try:
|
try:
|
||||||
im = self.fancy_pillow(im, vn)
|
im = self.fancy_pillow(im, fmt, vn)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
self.log("fancy_pillow {}".format(ex), "90")
|
self.log("fancy_pillow {}".format(ex), "90")
|
||||||
im.thumbnail(self.getres(vn))
|
im.thumbnail(self.getres(vn, fmt))
|
||||||
|
|
||||||
fmts = ["RGB", "L"]
|
fmts = ["RGB", "L"]
|
||||||
args = {"quality": 40}
|
args = {"quality": 40}
|
||||||
@@ -368,7 +428,7 @@ class ThumbSrv(object):
|
|||||||
# method 0 = pillow-default, fast
|
# method 0 = pillow-default, fast
|
||||||
# method 4 = ffmpeg-default
|
# method 4 = ffmpeg-default
|
||||||
# method 6 = max, slow
|
# method 6 = max, slow
|
||||||
fmts += ["RGBA", "LA"]
|
fmts.extend(("RGBA", "LA"))
|
||||||
args["method"] = 6
|
args["method"] = 6
|
||||||
else:
|
else:
|
||||||
# default q = 75
|
# default q = 75
|
||||||
@@ -380,12 +440,13 @@ class ThumbSrv(object):
|
|||||||
|
|
||||||
im.save(tpath, **args)
|
im.save(tpath, **args)
|
||||||
|
|
||||||
def conv_vips(self, abspath: str, tpath: str, vn: VFS) -> None:
|
def conv_vips(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
|
||||||
|
self.wait4ram(0.2, tpath)
|
||||||
crops = ["centre", "none"]
|
crops = ["centre", "none"]
|
||||||
if "nocrop" in vn.flags:
|
if "f" in fmt:
|
||||||
crops = ["none"]
|
crops = ["none"]
|
||||||
|
|
||||||
w, h = self.getres(vn)
|
w, h = self.getres(vn, fmt)
|
||||||
kw = {"height": h, "size": "down", "intent": "relative"}
|
kw = {"height": h, "size": "down", "intent": "relative"}
|
||||||
|
|
||||||
for c in crops:
|
for c in crops:
|
||||||
@@ -397,9 +458,11 @@ class ThumbSrv(object):
|
|||||||
if c == crops[-1]:
|
if c == crops[-1]:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
assert img # type: ignore
|
||||||
img.write_to_file(tpath, Q=40)
|
img.write_to_file(tpath, Q=40)
|
||||||
|
|
||||||
def conv_ffmpeg(self, abspath: str, tpath: str, vn: VFS) -> None:
|
def conv_ffmpeg(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
|
||||||
|
self.wait4ram(0.2, tpath)
|
||||||
ret, _ = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
ret, _ = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
||||||
if not ret:
|
if not ret:
|
||||||
return
|
return
|
||||||
@@ -412,12 +475,12 @@ class ThumbSrv(object):
|
|||||||
seek = [b"-ss", "{:.0f}".format(dur / 3).encode("utf-8")]
|
seek = [b"-ss", "{:.0f}".format(dur / 3).encode("utf-8")]
|
||||||
|
|
||||||
scale = "scale={0}:{1}:force_original_aspect_ratio="
|
scale = "scale={0}:{1}:force_original_aspect_ratio="
|
||||||
if "nocrop" in vn.flags:
|
if "f" in fmt:
|
||||||
scale += "decrease,setsar=1:1"
|
scale += "decrease,setsar=1:1"
|
||||||
else:
|
else:
|
||||||
scale += "increase,crop={0}:{1},setsar=1:1"
|
scale += "increase,crop={0}:{1},setsar=1:1"
|
||||||
|
|
||||||
res = self.getres(vn)
|
res = self.getres(vn, fmt)
|
||||||
bscale = scale.format(*list(res)).encode("utf-8")
|
bscale = scale.format(*list(res)).encode("utf-8")
|
||||||
# fmt: off
|
# fmt: off
|
||||||
cmd = [
|
cmd = [
|
||||||
@@ -452,9 +515,9 @@ class ThumbSrv(object):
|
|||||||
cmd += [fsenc(tpath)]
|
cmd += [fsenc(tpath)]
|
||||||
self._run_ff(cmd, vn)
|
self._run_ff(cmd, vn)
|
||||||
|
|
||||||
def _run_ff(self, cmd: list[bytes], vn: VFS) -> None:
|
def _run_ff(self, cmd: list[bytes], vn: VFS, oom: int = 400) -> None:
|
||||||
# self.log((b" ".join(cmd)).decode("utf-8"))
|
# self.log((b" ".join(cmd)).decode("utf-8"))
|
||||||
ret, _, serr = runcmd(cmd, timeout=vn.flags["convt"])
|
ret, _, serr = runcmd(cmd, timeout=vn.flags["convt"], nice=True, oom=oom)
|
||||||
if not ret:
|
if not ret:
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -497,13 +560,26 @@ class ThumbSrv(object):
|
|||||||
self.log(t + txt, c=c)
|
self.log(t + txt, c=c)
|
||||||
raise sp.CalledProcessError(ret, (cmd[0], b"...", cmd[-1]))
|
raise sp.CalledProcessError(ret, (cmd[0], b"...", cmd[-1]))
|
||||||
|
|
||||||
def conv_waves(self, abspath: str, tpath: str, vn: VFS) -> None:
|
def conv_waves(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
|
||||||
ret, _ = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
ret, _ = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
||||||
if "ac" not in ret:
|
if "ac" not in ret:
|
||||||
raise Exception("not audio")
|
raise Exception("not audio")
|
||||||
|
|
||||||
flt = (
|
# jt_versi.xm: 405M/839s
|
||||||
b"[0:a:0]"
|
dur = ret[".dur"][1] if ".dur" in ret else 300
|
||||||
|
need = 0.2 + dur / 3000
|
||||||
|
speedup = b""
|
||||||
|
if need > self.args.th_ram_max * 0.7:
|
||||||
|
self.log("waves too big (need %.2f GiB); trying to optimize" % (need,))
|
||||||
|
need = 0.2 + dur / 4200 # only helps about this much...
|
||||||
|
speedup = b"aresample=8000,"
|
||||||
|
if need > self.args.th_ram_max * 0.96:
|
||||||
|
raise Exception("file too big; cannot waves")
|
||||||
|
|
||||||
|
self.wait4ram(need, tpath)
|
||||||
|
|
||||||
|
flt = b"[0:a:0]" + speedup
|
||||||
|
flt += (
|
||||||
b"compand=.3|.3:1|1:-90/-60|-60/-40|-40/-30|-20/-20:6:0:-90:0.2"
|
b"compand=.3|.3:1|1:-90/-60|-60/-40|-40/-30|-20/-20:6:0:-90:0.2"
|
||||||
b",volume=2"
|
b",volume=2"
|
||||||
b",showwavespic=s=2048x64:colors=white"
|
b",showwavespic=s=2048x64:colors=white"
|
||||||
@@ -525,12 +601,44 @@ class ThumbSrv(object):
|
|||||||
cmd += [fsenc(tpath)]
|
cmd += [fsenc(tpath)]
|
||||||
self._run_ff(cmd, vn)
|
self._run_ff(cmd, vn)
|
||||||
|
|
||||||
def conv_spec(self, abspath: str, tpath: str, vn: VFS) -> None:
|
if "pngquant" in vn.flags:
|
||||||
|
wtpath = tpath + ".png"
|
||||||
|
cmd = [
|
||||||
|
b"pngquant",
|
||||||
|
b"--strip",
|
||||||
|
b"--nofs",
|
||||||
|
b"--output",
|
||||||
|
fsenc(wtpath),
|
||||||
|
fsenc(tpath),
|
||||||
|
]
|
||||||
|
ret = runcmd(cmd, timeout=vn.flags["convt"], nice=True, oom=400)[0]
|
||||||
|
if ret:
|
||||||
|
try:
|
||||||
|
wunlink(self.log, wtpath, vn.flags)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
wrename(self.log, wtpath, tpath, vn.flags)
|
||||||
|
|
||||||
|
def conv_spec(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
|
||||||
ret, _ = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
ret, _ = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
||||||
if "ac" not in ret:
|
if "ac" not in ret:
|
||||||
raise Exception("not audio")
|
raise Exception("not audio")
|
||||||
|
|
||||||
fc = "[0:a:0]aresample=48000{},showspectrumpic=s=640x512,crop=780:544:70:50[o]"
|
# https://trac.ffmpeg.org/ticket/10797
|
||||||
|
# expect 1 GiB every 600 seconds when duration is tricky;
|
||||||
|
# simple filetypes are generally safer so let's special-case those
|
||||||
|
safe = ("flac", "wav", "aif", "aiff", "opus")
|
||||||
|
coeff = 1800 if abspath.split(".")[-1].lower() in safe else 600
|
||||||
|
dur = ret[".dur"][1] if ".dur" in ret else 300
|
||||||
|
need = 0.2 + dur / coeff
|
||||||
|
self.wait4ram(need, tpath)
|
||||||
|
|
||||||
|
fc = "[0:a:0]aresample=48000{},showspectrumpic=s="
|
||||||
|
if "3" in fmt:
|
||||||
|
fc += "1280x1024,crop=1420:1056:70:48[o]"
|
||||||
|
else:
|
||||||
|
fc += "640x512,crop=780:544:70:48[o]"
|
||||||
|
|
||||||
if self.args.th_ff_swr:
|
if self.args.th_ff_swr:
|
||||||
fco = ":filter_size=128:cutoff=0.877"
|
fco = ":filter_size=128:cutoff=0.877"
|
||||||
@@ -568,30 +676,71 @@ class ThumbSrv(object):
|
|||||||
cmd += [fsenc(tpath)]
|
cmd += [fsenc(tpath)]
|
||||||
self._run_ff(cmd, vn)
|
self._run_ff(cmd, vn)
|
||||||
|
|
||||||
def conv_opus(self, abspath: str, tpath: str, vn: VFS) -> None:
|
def conv_mp3(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
|
||||||
if self.args.no_acode:
|
quality = self.args.q_mp3.lower()
|
||||||
|
if self.args.no_acode or not quality:
|
||||||
raise Exception("disabled in server config")
|
raise Exception("disabled in server config")
|
||||||
|
|
||||||
ret, _ = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
self.wait4ram(0.2, tpath)
|
||||||
if "ac" not in ret:
|
tags, rawtags = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
||||||
|
if "ac" not in tags:
|
||||||
|
raise Exception("not audio")
|
||||||
|
|
||||||
|
if quality.endswith("k"):
|
||||||
|
qk = b"-b:a"
|
||||||
|
qv = quality.encode("ascii")
|
||||||
|
else:
|
||||||
|
qk = b"-q:a"
|
||||||
|
qv = quality[1:].encode("ascii")
|
||||||
|
|
||||||
|
# extremely conservative choices for output format
|
||||||
|
# (always 2ch 44k1) because if a device is old enough
|
||||||
|
# to not support opus then it's probably also super picky
|
||||||
|
|
||||||
|
# fmt: off
|
||||||
|
cmd = [
|
||||||
|
b"ffmpeg",
|
||||||
|
b"-nostdin",
|
||||||
|
b"-v", b"error",
|
||||||
|
b"-hide_banner",
|
||||||
|
b"-i", fsenc(abspath),
|
||||||
|
] + self.big_tags(rawtags) + [
|
||||||
|
b"-map", b"0:a:0",
|
||||||
|
b"-ar", b"44100",
|
||||||
|
b"-ac", b"2",
|
||||||
|
b"-c:a", b"libmp3lame",
|
||||||
|
qk, qv,
|
||||||
|
fsenc(tpath)
|
||||||
|
]
|
||||||
|
# fmt: on
|
||||||
|
self._run_ff(cmd, vn, oom=300)
|
||||||
|
|
||||||
|
def conv_opus(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
|
||||||
|
if self.args.no_acode or not self.args.q_opus:
|
||||||
|
raise Exception("disabled in server config")
|
||||||
|
|
||||||
|
self.wait4ram(0.2, tpath)
|
||||||
|
tags, rawtags = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
||||||
|
if "ac" not in tags:
|
||||||
raise Exception("not audio")
|
raise Exception("not audio")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
dur = ret[".dur"][1]
|
dur = tags[".dur"][1]
|
||||||
except:
|
except:
|
||||||
dur = 0
|
dur = 0
|
||||||
|
|
||||||
src_opus = abspath.lower().endswith(".opus") or ret["ac"][1] == "opus"
|
src_opus = abspath.lower().endswith(".opus") or tags["ac"][1] == "opus"
|
||||||
want_caf = tpath.endswith(".caf")
|
want_caf = tpath.endswith(".caf")
|
||||||
tmp_opus = tpath
|
tmp_opus = tpath
|
||||||
if want_caf:
|
if want_caf:
|
||||||
tmp_opus = tpath + ".opus"
|
tmp_opus = tpath + ".opus"
|
||||||
try:
|
try:
|
||||||
bos.unlink(tmp_opus)
|
wunlink(self.log, tmp_opus, vn.flags)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
caf_src = abspath if src_opus else tmp_opus
|
caf_src = abspath if src_opus else tmp_opus
|
||||||
|
bq = ("%dk" % (self.args.q_opus,)).encode("ascii")
|
||||||
|
|
||||||
if not want_caf or not src_opus:
|
if not want_caf or not src_opus:
|
||||||
# fmt: off
|
# fmt: off
|
||||||
@@ -601,14 +750,14 @@ class ThumbSrv(object):
|
|||||||
b"-v", b"error",
|
b"-v", b"error",
|
||||||
b"-hide_banner",
|
b"-hide_banner",
|
||||||
b"-i", fsenc(abspath),
|
b"-i", fsenc(abspath),
|
||||||
b"-map_metadata", b"-1",
|
] + self.big_tags(rawtags) + [
|
||||||
b"-map", b"0:a:0",
|
b"-map", b"0:a:0",
|
||||||
b"-c:a", b"libopus",
|
b"-c:a", b"libopus",
|
||||||
b"-b:a", b"128k",
|
b"-b:a", bq,
|
||||||
fsenc(tmp_opus)
|
fsenc(tmp_opus)
|
||||||
]
|
]
|
||||||
# fmt: on
|
# fmt: on
|
||||||
self._run_ff(cmd, vn)
|
self._run_ff(cmd, vn, oom=300)
|
||||||
|
|
||||||
# iOS fails to play some "insufficiently complex" files
|
# iOS fails to play some "insufficiently complex" files
|
||||||
# (average file shorter than 8 seconds), so of course we
|
# (average file shorter than 8 seconds), so of course we
|
||||||
@@ -627,12 +776,12 @@ class ThumbSrv(object):
|
|||||||
b"-map_metadata", b"-1",
|
b"-map_metadata", b"-1",
|
||||||
b"-ac", b"2",
|
b"-ac", b"2",
|
||||||
b"-c:a", b"libopus",
|
b"-c:a", b"libopus",
|
||||||
b"-b:a", b"128k",
|
b"-b:a", bq,
|
||||||
b"-f", b"caf",
|
b"-f", b"caf",
|
||||||
fsenc(tpath)
|
fsenc(tpath)
|
||||||
]
|
]
|
||||||
# fmt: on
|
# fmt: on
|
||||||
self._run_ff(cmd, vn)
|
self._run_ff(cmd, vn, oom=300)
|
||||||
|
|
||||||
elif want_caf:
|
elif want_caf:
|
||||||
# simple remux should be safe
|
# simple remux should be safe
|
||||||
@@ -650,14 +799,24 @@ class ThumbSrv(object):
|
|||||||
fsenc(tpath)
|
fsenc(tpath)
|
||||||
]
|
]
|
||||||
# fmt: on
|
# fmt: on
|
||||||
self._run_ff(cmd, vn)
|
self._run_ff(cmd, vn, oom=300)
|
||||||
|
|
||||||
if tmp_opus != tpath:
|
if tmp_opus != tpath:
|
||||||
try:
|
try:
|
||||||
bos.unlink(tmp_opus)
|
wunlink(self.log, tmp_opus, vn.flags)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
def big_tags(self, raw_tags: dict[str, list[str]]) -> list[bytes]:
|
||||||
|
ret = []
|
||||||
|
for k, vs in raw_tags.items():
|
||||||
|
for v in vs:
|
||||||
|
if len(str(v)) >= 1024:
|
||||||
|
bv = k.encode("utf-8", "replace")
|
||||||
|
ret += [b"-metadata", bv + b"="]
|
||||||
|
break
|
||||||
|
return ret
|
||||||
|
|
||||||
def poke(self, tdir: str) -> None:
|
def poke(self, tdir: str) -> None:
|
||||||
if not self.poke_cd.poke(tdir):
|
if not self.poke_cd.poke(tdir):
|
||||||
return
|
return
|
||||||
@@ -681,7 +840,10 @@ class ThumbSrv(object):
|
|||||||
else:
|
else:
|
||||||
self.log("\033[Jcln {} ({})/\033[A".format(histpath, vol))
|
self.log("\033[Jcln {} ({})/\033[A".format(histpath, vol))
|
||||||
|
|
||||||
ndirs += self.clean(histpath)
|
try:
|
||||||
|
ndirs += self.clean(histpath)
|
||||||
|
except Exception as ex:
|
||||||
|
self.log("\033[Jcln err in %s: %r" % (histpath, ex), 3)
|
||||||
|
|
||||||
self.log("\033[Jcln ok; rm {} dirs".format(ndirs))
|
self.log("\033[Jcln ok; rm {} dirs".format(ndirs))
|
||||||
|
|
||||||
@@ -698,7 +860,7 @@ class ThumbSrv(object):
|
|||||||
|
|
||||||
def _clean(self, cat: str, thumbpath: str) -> int:
|
def _clean(self, cat: str, thumbpath: str) -> int:
|
||||||
# self.log("cln {}".format(thumbpath))
|
# self.log("cln {}".format(thumbpath))
|
||||||
exts = ["jpg", "webp", "png"] if cat == "th" else ["opus", "caf"]
|
exts = ["jpg", "webp", "png"] if cat == "th" else ["opus", "caf", "mp3"]
|
||||||
maxage = getattr(self.args, cat + "_maxage")
|
maxage = getattr(self.args, cat + "_maxage")
|
||||||
now = time.time()
|
now = time.time()
|
||||||
prev_b64 = None
|
prev_b64 = None
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ import time
|
|||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
|
|
||||||
from .__init__ import ANYWIN, TYPE_CHECKING, unicode
|
from .__init__ import ANYWIN, TYPE_CHECKING, unicode
|
||||||
|
from .authsrv import LEELOO_DALLAS, VFS
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .up2k import up2k_wark_from_hashlist
|
from .up2k import up2k_wark_from_hashlist
|
||||||
from .util import (
|
from .util import (
|
||||||
@@ -20,6 +21,7 @@ from .util import (
|
|||||||
min_ex,
|
min_ex,
|
||||||
quotep,
|
quotep,
|
||||||
s3dec,
|
s3dec,
|
||||||
|
vjoin,
|
||||||
)
|
)
|
||||||
|
|
||||||
if HAVE_SQLITE3:
|
if HAVE_SQLITE3:
|
||||||
@@ -60,8 +62,19 @@ class U2idx(object):
|
|||||||
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||||
self.log_func("u2idx", msg, c)
|
self.log_func("u2idx", msg, c)
|
||||||
|
|
||||||
|
def shutdown(self) -> None:
|
||||||
|
for cur in self.cur.values():
|
||||||
|
db = cur.connection
|
||||||
|
try:
|
||||||
|
db.interrupt()
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
cur.close()
|
||||||
|
db.close()
|
||||||
|
|
||||||
def fsearch(
|
def fsearch(
|
||||||
self, vols: list[tuple[str, str, dict[str, Any]]], body: dict[str, Any]
|
self, uname: str, vols: list[VFS], body: dict[str, Any]
|
||||||
) -> list[dict[str, Any]]:
|
) -> list[dict[str, Any]]:
|
||||||
"""search by up2k hashlist"""
|
"""search by up2k hashlist"""
|
||||||
if not HAVE_SQLITE3:
|
if not HAVE_SQLITE3:
|
||||||
@@ -75,18 +88,22 @@ class U2idx(object):
|
|||||||
uv: list[Union[str, int]] = [wark[:16], wark]
|
uv: list[Union[str, int]] = [wark[:16], wark]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return self.run_query(vols, uq, uv, True, False, 99999)[0]
|
return self.run_query(uname, vols, uq, uv, False, 99999)[0]
|
||||||
except:
|
except:
|
||||||
raise Pebkac(500, min_ex())
|
raise Pebkac(500, min_ex())
|
||||||
|
|
||||||
def get_cur(self, ptop: str) -> Optional["sqlite3.Cursor"]:
|
def get_cur(self, vn: VFS) -> Optional["sqlite3.Cursor"]:
|
||||||
if not HAVE_SQLITE3:
|
if not HAVE_SQLITE3:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
cur = self.cur.get(ptop)
|
cur = self.cur.get(vn.realpath)
|
||||||
if cur:
|
if cur:
|
||||||
return cur
|
return cur
|
||||||
|
|
||||||
|
if "e2d" not in vn.flags:
|
||||||
|
return None
|
||||||
|
|
||||||
|
ptop = vn.realpath
|
||||||
histpath = self.asrv.vfs.histtab.get(ptop)
|
histpath = self.asrv.vfs.histtab.get(ptop)
|
||||||
if not histpath:
|
if not histpath:
|
||||||
self.log("no histpath for [{}]".format(ptop))
|
self.log("no histpath for [{}]".format(ptop))
|
||||||
@@ -101,7 +118,7 @@ class U2idx(object):
|
|||||||
uri = ""
|
uri = ""
|
||||||
try:
|
try:
|
||||||
uri = "{}?mode=ro&nolock=1".format(Path(db_path).as_uri())
|
uri = "{}?mode=ro&nolock=1".format(Path(db_path).as_uri())
|
||||||
db = sqlite3.connect(uri, 2, uri=True, check_same_thread=False)
|
db = sqlite3.connect(uri, timeout=2, uri=True, check_same_thread=False)
|
||||||
cur = db.cursor()
|
cur = db.cursor()
|
||||||
cur.execute('pragma table_info("up")').fetchone()
|
cur.execute('pragma table_info("up")').fetchone()
|
||||||
self.log("ro: {}".format(db_path))
|
self.log("ro: {}".format(db_path))
|
||||||
@@ -113,14 +130,14 @@ class U2idx(object):
|
|||||||
if not cur:
|
if not cur:
|
||||||
# on windows, this steals the write-lock from up2k.deferred_init --
|
# on windows, this steals the write-lock from up2k.deferred_init --
|
||||||
# seen on win 10.0.17763.2686, py 3.10.4, sqlite 3.37.2
|
# seen on win 10.0.17763.2686, py 3.10.4, sqlite 3.37.2
|
||||||
cur = sqlite3.connect(db_path, 2, check_same_thread=False).cursor()
|
cur = sqlite3.connect(db_path, timeout=2, check_same_thread=False).cursor()
|
||||||
self.log("opened {}".format(db_path))
|
self.log("opened {}".format(db_path))
|
||||||
|
|
||||||
self.cur[ptop] = cur
|
self.cur[ptop] = cur
|
||||||
return cur
|
return cur
|
||||||
|
|
||||||
def search(
|
def search(
|
||||||
self, vols: list[tuple[str, str, dict[str, Any]]], uq: str, lim: int
|
self, uname: str, vols: list[VFS], uq: str, lim: int
|
||||||
) -> tuple[list[dict[str, Any]], list[str], bool]:
|
) -> tuple[list[dict[str, Any]], list[str], bool]:
|
||||||
"""search by query params"""
|
"""search by query params"""
|
||||||
if not HAVE_SQLITE3:
|
if not HAVE_SQLITE3:
|
||||||
@@ -129,7 +146,6 @@ class U2idx(object):
|
|||||||
q = ""
|
q = ""
|
||||||
v: Union[str, int] = ""
|
v: Union[str, int] = ""
|
||||||
va: list[Union[str, int]] = []
|
va: list[Union[str, int]] = []
|
||||||
have_up = False # query has up.* operands
|
|
||||||
have_mt = False
|
have_mt = False
|
||||||
is_key = True
|
is_key = True
|
||||||
is_size = False
|
is_size = False
|
||||||
@@ -174,21 +190,21 @@ class U2idx(object):
|
|||||||
if v == "size":
|
if v == "size":
|
||||||
v = "up.sz"
|
v = "up.sz"
|
||||||
is_size = True
|
is_size = True
|
||||||
have_up = True
|
|
||||||
|
|
||||||
elif v == "date":
|
elif v == "date":
|
||||||
v = "up.mt"
|
v = "up.mt"
|
||||||
is_date = True
|
is_date = True
|
||||||
have_up = True
|
|
||||||
|
elif v == "up_at":
|
||||||
|
v = "up.at"
|
||||||
|
is_date = True
|
||||||
|
|
||||||
elif v == "path":
|
elif v == "path":
|
||||||
v = "trim(?||up.rd,'/')"
|
v = "trim(?||up.rd,'/')"
|
||||||
va.append("\nrd")
|
va.append("\nrd")
|
||||||
have_up = True
|
|
||||||
|
|
||||||
elif v == "name":
|
elif v == "name":
|
||||||
v = "up.fn"
|
v = "up.fn"
|
||||||
have_up = True
|
|
||||||
|
|
||||||
elif v == "tags" or ptn_mt.match(v):
|
elif v == "tags" or ptn_mt.match(v):
|
||||||
have_mt = True
|
have_mt = True
|
||||||
@@ -264,19 +280,24 @@ class U2idx(object):
|
|||||||
q += " lower({}) {} ? ) ".format(field, oper)
|
q += " lower({}) {} ? ) ".format(field, oper)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return self.run_query(vols, q, va, have_up, have_mt, lim)
|
return self.run_query(uname, vols, q, va, have_mt, lim)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
raise Pebkac(500, repr(ex))
|
raise Pebkac(500, repr(ex))
|
||||||
|
|
||||||
def run_query(
|
def run_query(
|
||||||
self,
|
self,
|
||||||
vols: list[tuple[str, str, dict[str, Any]]],
|
uname: str,
|
||||||
|
vols: list[VFS],
|
||||||
uq: str,
|
uq: str,
|
||||||
uv: list[Union[str, int]],
|
uv: list[Union[str, int]],
|
||||||
have_up: bool,
|
|
||||||
have_mt: bool,
|
have_mt: bool,
|
||||||
lim: int,
|
lim: int,
|
||||||
) -> tuple[list[dict[str, Any]], list[str], bool]:
|
) -> tuple[list[dict[str, Any]], list[str], bool]:
|
||||||
|
if self.args.srch_dbg:
|
||||||
|
t = "searching across all %s volumes in which the user has 'r' (full read access):\n %s"
|
||||||
|
zs = "\n ".join(["/%s = %s" % (x.vpath, x.realpath) for x in vols])
|
||||||
|
self.log(t % (len(vols), zs), 5)
|
||||||
|
|
||||||
done_flag: list[bool] = []
|
done_flag: list[bool] = []
|
||||||
self.active_id = "{:.6f}_{}".format(
|
self.active_id = "{:.6f}_{}".format(
|
||||||
time.time(), threading.current_thread().ident
|
time.time(), threading.current_thread().ident
|
||||||
@@ -295,13 +316,35 @@ class U2idx(object):
|
|||||||
|
|
||||||
ret = []
|
ret = []
|
||||||
seen_rps: set[str] = set()
|
seen_rps: set[str] = set()
|
||||||
lim = min(lim, int(self.args.srch_hits))
|
clamp = int(self.args.srch_hits)
|
||||||
|
if lim >= clamp:
|
||||||
|
lim = clamp
|
||||||
|
clamped = True
|
||||||
|
else:
|
||||||
|
clamped = False
|
||||||
|
|
||||||
taglist = {}
|
taglist = {}
|
||||||
for (vtop, ptop, flags) in vols:
|
for vol in vols:
|
||||||
cur = self.get_cur(ptop)
|
if lim < 0:
|
||||||
|
break
|
||||||
|
|
||||||
|
vtop = vol.vpath
|
||||||
|
ptop = vol.realpath
|
||||||
|
flags = vol.flags
|
||||||
|
|
||||||
|
cur = self.get_cur(vol)
|
||||||
if not cur:
|
if not cur:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
excl = []
|
||||||
|
for vp2 in self.asrv.vfs.all_vols.keys():
|
||||||
|
if vp2.startswith((vtop + "/").lstrip("/")) and vtop != vp2:
|
||||||
|
excl.append(vp2[len(vtop) :].lstrip("/"))
|
||||||
|
|
||||||
|
if self.args.srch_dbg:
|
||||||
|
t = "searching in volume /%s (%s), excludelist %s"
|
||||||
|
self.log(t % (vtop, ptop, excl), 5)
|
||||||
|
|
||||||
self.active_cur = cur
|
self.active_cur = cur
|
||||||
|
|
||||||
vuv = []
|
vuv = []
|
||||||
@@ -313,7 +356,7 @@ class U2idx(object):
|
|||||||
|
|
||||||
sret = []
|
sret = []
|
||||||
fk = flags.get("fk")
|
fk = flags.get("fk")
|
||||||
dots = flags.get("dotsrch")
|
dots = flags.get("dotsrch") and uname in vol.axs.udot
|
||||||
fk_alg = 2 if "fka" in flags else 1
|
fk_alg = 2 if "fka" in flags else 1
|
||||||
c = cur.execute(uq, tuple(vuv))
|
c = cur.execute(uq, tuple(vuv))
|
||||||
for hit in c:
|
for hit in c:
|
||||||
@@ -322,6 +365,13 @@ class U2idx(object):
|
|||||||
if rd.startswith("//") or fn.startswith("//"):
|
if rd.startswith("//") or fn.startswith("//"):
|
||||||
rd, fn = s3dec(rd, fn)
|
rd, fn = s3dec(rd, fn)
|
||||||
|
|
||||||
|
if rd in excl or any([x for x in excl if rd.startswith(x + "/")]):
|
||||||
|
if self.args.srch_dbg:
|
||||||
|
zs = vjoin(vjoin(vtop, rd), fn)
|
||||||
|
t = "database inconsistency in volume '/%s'; ignoring: %s"
|
||||||
|
self.log(t % (vtop, zs), 1)
|
||||||
|
continue
|
||||||
|
|
||||||
rp = quotep("/".join([x for x in [vtop, rd, fn] if x]))
|
rp = quotep("/".join([x for x in [vtop, rd, fn] if x]))
|
||||||
if not dots and "/." in ("/" + rp):
|
if not dots and "/." in ("/" + rp):
|
||||||
continue
|
continue
|
||||||
@@ -350,6 +400,19 @@ class U2idx(object):
|
|||||||
if lim < 0:
|
if lim < 0:
|
||||||
break
|
break
|
||||||
|
|
||||||
|
if self.args.srch_dbg:
|
||||||
|
t = "in volume '/%s': hit: %s"
|
||||||
|
self.log(t % (vtop, rp), 5)
|
||||||
|
|
||||||
|
zs = vjoin(vtop, rp)
|
||||||
|
chk_vn, _ = self.asrv.vfs.get(zs, LEELOO_DALLAS, True, False)
|
||||||
|
chk_vn = chk_vn.dbv or chk_vn
|
||||||
|
if chk_vn.vpath != vtop:
|
||||||
|
raise Exception(
|
||||||
|
"database inconsistency! in volume '/%s' (%s), found file [%s] which belongs to volume '/%s' (%s)"
|
||||||
|
% (vtop, ptop, zs, chk_vn.vpath, chk_vn.realpath)
|
||||||
|
)
|
||||||
|
|
||||||
seen_rps.add(rp)
|
seen_rps.add(rp)
|
||||||
sret.append({"ts": int(ts), "sz": sz, "rp": rp + suf, "w": w[:16]})
|
sret.append({"ts": int(ts), "sz": sz, "rp": rp + suf, "w": w[:16]})
|
||||||
|
|
||||||
@@ -367,12 +430,16 @@ class U2idx(object):
|
|||||||
ret.extend(sret)
|
ret.extend(sret)
|
||||||
# print("[{}] {}".format(ptop, sret))
|
# print("[{}] {}".format(ptop, sret))
|
||||||
|
|
||||||
|
if self.args.srch_dbg:
|
||||||
|
t = "in volume '/%s': got %d hits, %d total so far"
|
||||||
|
self.log(t % (vtop, len(sret), len(ret)), 5)
|
||||||
|
|
||||||
done_flag.append(True)
|
done_flag.append(True)
|
||||||
self.active_id = ""
|
self.active_id = ""
|
||||||
|
|
||||||
ret.sort(key=itemgetter("rp"))
|
ret.sort(key=itemgetter("rp"))
|
||||||
|
|
||||||
return ret, list(taglist.keys()), lim < 0
|
return ret, list(taglist.keys()), lim < 0 and not clamped
|
||||||
|
|
||||||
def terminator(self, identifier: str, done_flag: list[bool]) -> None:
|
def terminator(self, identifier: str, done_flag: list[bool]) -> None:
|
||||||
for _ in range(self.timeout):
|
for _ in range(self.timeout):
|
||||||
|
|||||||
1293
copyparty/up2k.py
1293
copyparty/up2k.py
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -17,8 +17,10 @@ window.baguetteBox = (function () {
|
|||||||
titleTag: false,
|
titleTag: false,
|
||||||
async: false,
|
async: false,
|
||||||
preload: 2,
|
preload: 2,
|
||||||
|
refocus: true,
|
||||||
afterShow: null,
|
afterShow: null,
|
||||||
afterHide: null,
|
afterHide: null,
|
||||||
|
duringHide: null,
|
||||||
onChange: null,
|
onChange: null,
|
||||||
},
|
},
|
||||||
overlay, slider, btnPrev, btnNext, btnHelp, btnAnim, btnRotL, btnRotR, btnSel, btnFull, btnVmode, btnClose,
|
overlay, slider, btnPrev, btnNext, btnHelp, btnAnim, btnRotL, btnRotR, btnSel, btnFull, btnVmode, btnClose,
|
||||||
@@ -27,6 +29,8 @@ window.baguetteBox = (function () {
|
|||||||
isOverlayVisible = false,
|
isOverlayVisible = false,
|
||||||
touch = {}, // start-pos
|
touch = {}, // start-pos
|
||||||
touchFlag = false, // busy
|
touchFlag = false, // busy
|
||||||
|
scrollCSS = ['', ''],
|
||||||
|
scrollTimer = 0,
|
||||||
re_i = /^[^?]+\.(a?png|avif|bmp|gif|heif|jpe?g|jfif|svg|webp)(\?|$)/i,
|
re_i = /^[^?]+\.(a?png|avif|bmp|gif|heif|jpe?g|jfif|svg|webp)(\?|$)/i,
|
||||||
re_v = /^[^?]+\.(webm|mkv|mp4)(\?|$)/i,
|
re_v = /^[^?]+\.(webm|mkv|mp4)(\?|$)/i,
|
||||||
anims = ['slideIn', 'fadeIn', 'none'],
|
anims = ['slideIn', 'fadeIn', 'none'],
|
||||||
@@ -89,6 +93,30 @@ window.baguetteBox = (function () {
|
|||||||
touchendHandler();
|
touchendHandler();
|
||||||
};
|
};
|
||||||
|
|
||||||
|
var overlayWheelHandler = function (e) {
|
||||||
|
if (!options.noScrollbars || anymod(e))
|
||||||
|
return;
|
||||||
|
|
||||||
|
ev(e);
|
||||||
|
|
||||||
|
var x = e.deltaX,
|
||||||
|
y = e.deltaY,
|
||||||
|
d = Math.abs(x) > Math.abs(y) ? x : y;
|
||||||
|
|
||||||
|
if (e.deltaMode)
|
||||||
|
d *= 10;
|
||||||
|
|
||||||
|
if (Date.now() - scrollTimer < (Math.abs(d) > 20 ? 100 : 300))
|
||||||
|
return;
|
||||||
|
|
||||||
|
scrollTimer = Date.now();
|
||||||
|
|
||||||
|
if (d > 0)
|
||||||
|
showNextImage();
|
||||||
|
else
|
||||||
|
showPreviousImage();
|
||||||
|
};
|
||||||
|
|
||||||
var trapFocusInsideOverlay = function (e) {
|
var trapFocusInsideOverlay = function (e) {
|
||||||
if (overlay.style.display === 'block' && (overlay.contains && !overlay.contains(e.target))) {
|
if (overlay.style.display === 'block' && (overlay.contains && !overlay.contains(e.target))) {
|
||||||
e.stopPropagation();
|
e.stopPropagation();
|
||||||
@@ -144,7 +172,7 @@ window.baguetteBox = (function () {
|
|||||||
selectorData.galleries.push(gallery);
|
selectorData.galleries.push(gallery);
|
||||||
});
|
});
|
||||||
|
|
||||||
return selectorData.galleries;
|
return [selectorData.galleries, options];
|
||||||
}
|
}
|
||||||
|
|
||||||
function clearCachedData() {
|
function clearCachedData() {
|
||||||
@@ -255,19 +283,19 @@ window.baguetteBox = (function () {
|
|||||||
if (anymod(e, true))
|
if (anymod(e, true))
|
||||||
return;
|
return;
|
||||||
|
|
||||||
var k = e.code + '', v = vid(), pos = -1;
|
var k = (e.code || e.key) + '', v = vid(), pos = -1;
|
||||||
|
|
||||||
if (k == "BracketLeft")
|
if (k == "BracketLeft")
|
||||||
setloop(1);
|
setloop(1);
|
||||||
else if (k == "BracketRight")
|
else if (k == "BracketRight")
|
||||||
setloop(2);
|
setloop(2);
|
||||||
else if (e.shiftKey && k != 'KeyR')
|
else if (e.shiftKey && k != "KeyR" && k != "R")
|
||||||
return;
|
return;
|
||||||
else if (k == "ArrowLeft" || k == "KeyJ")
|
else if (k == "ArrowLeft" || k == "KeyJ" || k == "Left" || k == "j")
|
||||||
showPreviousImage();
|
showPreviousImage();
|
||||||
else if (k == "ArrowRight" || k == "KeyL")
|
else if (k == "ArrowRight" || k == "KeyL" || k == "Right" || k == "l")
|
||||||
showNextImage();
|
showNextImage();
|
||||||
else if (k == "Escape")
|
else if (k == "Escape" || k == "Esc")
|
||||||
hideOverlay();
|
hideOverlay();
|
||||||
else if (k == "Home")
|
else if (k == "Home")
|
||||||
showFirstImage(e);
|
showFirstImage(e);
|
||||||
@@ -295,9 +323,9 @@ window.baguetteBox = (function () {
|
|||||||
}
|
}
|
||||||
else if (k == "KeyF")
|
else if (k == "KeyF")
|
||||||
tglfull();
|
tglfull();
|
||||||
else if (k == "KeyS")
|
else if (k == "KeyS" || k == "s")
|
||||||
tglsel();
|
tglsel();
|
||||||
else if (k == "KeyR")
|
else if (k == "KeyR" || k == "r" || k == "R")
|
||||||
rotn(e.shiftKey ? -1 : 1);
|
rotn(e.shiftKey ? -1 : 1);
|
||||||
else if (k == "KeyY")
|
else if (k == "KeyY")
|
||||||
dlpic();
|
dlpic();
|
||||||
@@ -392,8 +420,7 @@ window.baguetteBox = (function () {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function dlpic() {
|
function dlpic() {
|
||||||
var url = findfile()[3].href;
|
var url = addq(findfile()[3].href, 'cache');
|
||||||
url += (url.indexOf('?') < 0 ? '?' : '&') + 'cache';
|
|
||||||
dl_file(url);
|
dl_file(url);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -450,6 +477,7 @@ window.baguetteBox = (function () {
|
|||||||
bind(document, 'keyup', keyUpHandler);
|
bind(document, 'keyup', keyUpHandler);
|
||||||
bind(document, 'fullscreenchange', onFSC);
|
bind(document, 'fullscreenchange', onFSC);
|
||||||
bind(overlay, 'click', overlayClickHandler);
|
bind(overlay, 'click', overlayClickHandler);
|
||||||
|
bind(overlay, 'wheel', overlayWheelHandler);
|
||||||
bind(btnPrev, 'click', showPreviousImage);
|
bind(btnPrev, 'click', showPreviousImage);
|
||||||
bind(btnNext, 'click', showNextImage);
|
bind(btnNext, 'click', showNextImage);
|
||||||
bind(btnClose, 'click', hideOverlay);
|
bind(btnClose, 'click', hideOverlay);
|
||||||
@@ -472,6 +500,7 @@ window.baguetteBox = (function () {
|
|||||||
unbind(document, 'keyup', keyUpHandler);
|
unbind(document, 'keyup', keyUpHandler);
|
||||||
unbind(document, 'fullscreenchange', onFSC);
|
unbind(document, 'fullscreenchange', onFSC);
|
||||||
unbind(overlay, 'click', overlayClickHandler);
|
unbind(overlay, 'click', overlayClickHandler);
|
||||||
|
unbind(overlay, 'wheel', overlayWheelHandler);
|
||||||
unbind(btnPrev, 'click', showPreviousImage);
|
unbind(btnPrev, 'click', showPreviousImage);
|
||||||
unbind(btnNext, 'click', showNextImage);
|
unbind(btnNext, 'click', showNextImage);
|
||||||
unbind(btnClose, 'click', hideOverlay);
|
unbind(btnClose, 'click', hideOverlay);
|
||||||
@@ -539,6 +568,12 @@ window.baguetteBox = (function () {
|
|||||||
|
|
||||||
function showOverlay(chosenImageIndex) {
|
function showOverlay(chosenImageIndex) {
|
||||||
if (options.noScrollbars) {
|
if (options.noScrollbars) {
|
||||||
|
var a = document.documentElement.style.overflowY,
|
||||||
|
b = document.body.style.overflowY;
|
||||||
|
|
||||||
|
if (a != 'hidden' || b != 'scroll')
|
||||||
|
scrollCSS = [a, b];
|
||||||
|
|
||||||
document.documentElement.style.overflowY = 'hidden';
|
document.documentElement.style.overflowY = 'hidden';
|
||||||
document.body.style.overflowY = 'scroll';
|
document.body.style.overflowY = 'scroll';
|
||||||
}
|
}
|
||||||
@@ -582,24 +617,30 @@ window.baguetteBox = (function () {
|
|||||||
isOverlayVisible = true;
|
isOverlayVisible = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
function hideOverlay(e) {
|
function hideOverlay(e, dtor) {
|
||||||
ev(e);
|
ev(e);
|
||||||
playvid(false);
|
playvid(false);
|
||||||
removeFromCache('#files');
|
removeFromCache('#files');
|
||||||
if (options.noScrollbars) {
|
if (options.noScrollbars) {
|
||||||
document.documentElement.style.overflowY = 'auto';
|
document.documentElement.style.overflowY = scrollCSS[0];
|
||||||
document.body.style.overflowY = 'auto';
|
document.body.style.overflowY = scrollCSS[1];
|
||||||
}
|
}
|
||||||
if (overlay.style.display === 'none')
|
|
||||||
|
try {
|
||||||
|
if (document.fullscreenElement)
|
||||||
|
document.exitFullscreen();
|
||||||
|
}
|
||||||
|
catch (ex) { }
|
||||||
|
isFullscreen = false;
|
||||||
|
|
||||||
|
if (dtor || overlay.style.display === 'none')
|
||||||
return;
|
return;
|
||||||
|
|
||||||
|
if (options.duringHide)
|
||||||
|
options.duringHide();
|
||||||
|
|
||||||
sethash('');
|
sethash('');
|
||||||
unbindEvents();
|
unbindEvents();
|
||||||
try {
|
|
||||||
document.exitFullscreen();
|
|
||||||
isFullscreen = false;
|
|
||||||
}
|
|
||||||
catch (ex) { }
|
|
||||||
|
|
||||||
// Fade out and hide the overlay
|
// Fade out and hide the overlay
|
||||||
overlay.className = '';
|
overlay.className = '';
|
||||||
@@ -613,9 +654,45 @@ window.baguetteBox = (function () {
|
|||||||
if (options.afterHide)
|
if (options.afterHide)
|
||||||
options.afterHide();
|
options.afterHide();
|
||||||
|
|
||||||
documentLastFocus && documentLastFocus.focus();
|
options.refocus && documentLastFocus && documentLastFocus.focus();
|
||||||
isOverlayVisible = false;
|
isOverlayVisible = false;
|
||||||
}, 500);
|
unvid();
|
||||||
|
unfig();
|
||||||
|
}, 250);
|
||||||
|
}
|
||||||
|
|
||||||
|
function unvid(keep) {
|
||||||
|
var vids = QSA('#bbox-overlay video');
|
||||||
|
for (var a = vids.length - 1; a >= 0; a--) {
|
||||||
|
var v = vids[a];
|
||||||
|
if (v == keep)
|
||||||
|
continue;
|
||||||
|
|
||||||
|
v.src = '';
|
||||||
|
v.load();
|
||||||
|
|
||||||
|
var p = v.parentNode;
|
||||||
|
p.removeChild(v);
|
||||||
|
p.parentNode.removeChild(p);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function unfig(keep) {
|
||||||
|
var figs = QSA('#bbox-overlay figure'),
|
||||||
|
npre = options.preload || 0,
|
||||||
|
k = [];
|
||||||
|
|
||||||
|
if (keep === undefined)
|
||||||
|
keep = -9;
|
||||||
|
|
||||||
|
for (var a = keep - npre; a <= keep + npre; a++)
|
||||||
|
k.push('bbox-figure-' + a);
|
||||||
|
|
||||||
|
for (var a = figs.length - 1; a >= 0; a--) {
|
||||||
|
var f = figs[a];
|
||||||
|
if (!has(k, f.getAttribute('id')))
|
||||||
|
f.parentNode.removeChild(f);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function loadImage(index, callback) {
|
function loadImage(index, callback) {
|
||||||
@@ -641,7 +718,7 @@ window.baguetteBox = (function () {
|
|||||||
options.captions.call(currentGallery, imageElement) :
|
options.captions.call(currentGallery, imageElement) :
|
||||||
imageElement.getAttribute('data-caption') || imageElement.title;
|
imageElement.getAttribute('data-caption') || imageElement.title;
|
||||||
|
|
||||||
imageSrc += imageSrc.indexOf('?') < 0 ? '?cache' : '&cache';
|
imageSrc = addq(imageSrc, 'cache');
|
||||||
|
|
||||||
if (is_vid && index != currentIndex)
|
if (is_vid && index != currentIndex)
|
||||||
return; // no preload
|
return; // no preload
|
||||||
@@ -670,8 +747,11 @@ window.baguetteBox = (function () {
|
|||||||
});
|
});
|
||||||
image.setAttribute('src', imageSrc);
|
image.setAttribute('src', imageSrc);
|
||||||
if (is_vid) {
|
if (is_vid) {
|
||||||
|
image.volume = clamp(fcfg_get('vol', dvol / 100), 0, 1);
|
||||||
image.setAttribute('controls', 'controls');
|
image.setAttribute('controls', 'controls');
|
||||||
image.onended = vidEnd;
|
image.onended = vidEnd;
|
||||||
|
image.onplay = function () { show_buttons(1); };
|
||||||
|
image.onpause = function () { show_buttons(); };
|
||||||
}
|
}
|
||||||
image.alt = thumbnailElement ? thumbnailElement.alt || '' : '';
|
image.alt = thumbnailElement ? thumbnailElement.alt || '' : '';
|
||||||
if (options.titleTag && imageCaption)
|
if (options.titleTag && imageCaption)
|
||||||
@@ -679,6 +759,9 @@ window.baguetteBox = (function () {
|
|||||||
|
|
||||||
figure.appendChild(image);
|
figure.appendChild(image);
|
||||||
|
|
||||||
|
if (is_vid && window.afilt)
|
||||||
|
afilt.apply(undefined, image);
|
||||||
|
|
||||||
if (options.async && callback)
|
if (options.async && callback)
|
||||||
callback();
|
callback();
|
||||||
}
|
}
|
||||||
@@ -708,6 +791,7 @@ window.baguetteBox = (function () {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function show(index, gallery) {
|
function show(index, gallery) {
|
||||||
|
gallery = gallery || currentGallery;
|
||||||
if (!isOverlayVisible && index >= 0 && index < gallery.length) {
|
if (!isOverlayVisible && index >= 0 && index < gallery.length) {
|
||||||
prepareOverlay(gallery, options);
|
prepareOverlay(gallery, options);
|
||||||
showOverlay(index);
|
showOverlay(index);
|
||||||
@@ -720,12 +804,10 @@ window.baguetteBox = (function () {
|
|||||||
if (index >= imagesElements.length)
|
if (index >= imagesElements.length)
|
||||||
return bounceAnimation('right');
|
return bounceAnimation('right');
|
||||||
|
|
||||||
var v = vid();
|
try {
|
||||||
if (v) {
|
vid().pause();
|
||||||
v.src = '';
|
|
||||||
v.load();
|
|
||||||
v.parentNode.removeChild(v);
|
|
||||||
}
|
}
|
||||||
|
catch (ex) { }
|
||||||
|
|
||||||
currentIndex = index;
|
currentIndex = index;
|
||||||
loadImage(currentIndex, function () {
|
loadImage(currentIndex, function () {
|
||||||
@@ -734,6 +816,15 @@ window.baguetteBox = (function () {
|
|||||||
});
|
});
|
||||||
updateOffset();
|
updateOffset();
|
||||||
|
|
||||||
|
if (options.animation == 'none')
|
||||||
|
unvid(vid());
|
||||||
|
else
|
||||||
|
setTimeout(function () {
|
||||||
|
unvid(vid());
|
||||||
|
}, 100);
|
||||||
|
|
||||||
|
unfig(index);
|
||||||
|
|
||||||
if (options.onChange)
|
if (options.onChange)
|
||||||
options.onChange(currentIndex, imagesElements.length);
|
options.onChange(currentIndex, imagesElements.length);
|
||||||
|
|
||||||
@@ -906,6 +997,12 @@ window.baguetteBox = (function () {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function show_buttons(v) {
|
||||||
|
clmod(ebi('bbox-btns'), 'off', v);
|
||||||
|
clmod(btnPrev, 'off', v);
|
||||||
|
clmod(btnNext, 'off', v);
|
||||||
|
}
|
||||||
|
|
||||||
function bounceAnimation(direction) {
|
function bounceAnimation(direction) {
|
||||||
slider.className = options.animation == 'slideIn' ? 'bounce-from-' + direction : 'eog';
|
slider.className = options.animation == 'slideIn' ? 'bounce-from-' + direction : 'eog';
|
||||||
setTimeout(function () {
|
setTimeout(function () {
|
||||||
@@ -969,10 +1066,8 @@ window.baguetteBox = (function () {
|
|||||||
if (fx > 0.7)
|
if (fx > 0.7)
|
||||||
return showNextImage();
|
return showNextImage();
|
||||||
|
|
||||||
clmod(ebi('bbox-btns'), 'off', 't');
|
show_buttons('t');
|
||||||
clmod(btnPrev, 'off', 't');
|
|
||||||
clmod(btnNext, 'off', 't');
|
|
||||||
|
|
||||||
if (Date.now() - ctime <= 500 && !IPHONE)
|
if (Date.now() - ctime <= 500 && !IPHONE)
|
||||||
tglfull();
|
tglfull();
|
||||||
|
|
||||||
@@ -1013,6 +1108,7 @@ window.baguetteBox = (function () {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function destroyPlugin() {
|
function destroyPlugin() {
|
||||||
|
hideOverlay(undefined, true);
|
||||||
unbindEvents();
|
unbindEvents();
|
||||||
clearCachedData();
|
clearCachedData();
|
||||||
document.getElementsByTagName('body')[0].removeChild(ebi('bbox-overlay'));
|
document.getElementsByTagName('body')[0].removeChild(ebi('bbox-overlay'));
|
||||||
|
|||||||
@@ -28,6 +28,8 @@
|
|||||||
--row-alt: #282828;
|
--row-alt: #282828;
|
||||||
|
|
||||||
--scroll: #eb0;
|
--scroll: #eb0;
|
||||||
|
--sel-fg: var(--bg-d1);
|
||||||
|
--sel-bg: var(--fg);
|
||||||
|
|
||||||
--a: #fc5;
|
--a: #fc5;
|
||||||
--a-b: #c90;
|
--a-b: #c90;
|
||||||
@@ -267,6 +269,7 @@ html.bz {
|
|||||||
--btn-bg: #202231;
|
--btn-bg: #202231;
|
||||||
--btn-h-bg: #2d2f45;
|
--btn-h-bg: #2d2f45;
|
||||||
--btn-1-bg: #ba2959;
|
--btn-1-bg: #ba2959;
|
||||||
|
--btn-1-is: #f59;
|
||||||
--btn-1-fg: #fff;
|
--btn-1-fg: #fff;
|
||||||
--btn-1h-fg: #000;
|
--btn-1h-fg: #000;
|
||||||
--txt-sh: a;
|
--txt-sh: a;
|
||||||
@@ -330,6 +333,8 @@ html.c {
|
|||||||
}
|
}
|
||||||
html.cz {
|
html.cz {
|
||||||
--bgg: var(--bg-u2);
|
--bgg: var(--bg-u2);
|
||||||
|
--sel-bg: var(--bg-u5);
|
||||||
|
--sel-fg: var(--fg);
|
||||||
--srv-3: #fff;
|
--srv-3: #fff;
|
||||||
--u2-tab-b1: var(--bg-d3);
|
--u2-tab-b1: var(--bg-d3);
|
||||||
}
|
}
|
||||||
@@ -343,6 +348,8 @@ html.cy {
|
|||||||
--bg-d3: #f77;
|
--bg-d3: #f77;
|
||||||
--bg-d2: #ff0;
|
--bg-d2: #ff0;
|
||||||
|
|
||||||
|
--sel-bg: #f77;
|
||||||
|
|
||||||
--a: #fff;
|
--a: #fff;
|
||||||
--a-hil: #fff;
|
--a-hil: #fff;
|
||||||
--a-h-bg: #000;
|
--a-h-bg: #000;
|
||||||
@@ -494,6 +501,7 @@ html.dz {
|
|||||||
|
|
||||||
text-shadow: none;
|
text-shadow: none;
|
||||||
font-family: 'scp', monospace, monospace;
|
font-family: 'scp', monospace, monospace;
|
||||||
|
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||||
}
|
}
|
||||||
html.dy {
|
html.dy {
|
||||||
--fg: #000;
|
--fg: #000;
|
||||||
@@ -587,8 +595,8 @@ html.dy {
|
|||||||
line-height: 1.2em;
|
line-height: 1.2em;
|
||||||
}
|
}
|
||||||
::selection {
|
::selection {
|
||||||
color: var(--bg-d1);
|
color: var(--sel-fg);
|
||||||
background: var(--fg);
|
background: var(--sel-bg);
|
||||||
text-shadow: none;
|
text-shadow: none;
|
||||||
}
|
}
|
||||||
html,body,tr,th,td,#files,a {
|
html,body,tr,th,td,#files,a {
|
||||||
@@ -603,6 +611,7 @@ html {
|
|||||||
color: var(--fg);
|
color: var(--fg);
|
||||||
background: var(--bgg);
|
background: var(--bgg);
|
||||||
font-family: sans-serif;
|
font-family: sans-serif;
|
||||||
|
font-family: var(--font-main), sans-serif;
|
||||||
text-shadow: 1px 1px 0px var(--bg-max);
|
text-shadow: 1px 1px 0px var(--bg-max);
|
||||||
}
|
}
|
||||||
html, body {
|
html, body {
|
||||||
@@ -611,6 +620,7 @@ html, body {
|
|||||||
}
|
}
|
||||||
pre, code, tt, #doc, #doc>code {
|
pre, code, tt, #doc, #doc>code {
|
||||||
font-family: 'scp', monospace, monospace;
|
font-family: 'scp', monospace, monospace;
|
||||||
|
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||||
}
|
}
|
||||||
.ayjump {
|
.ayjump {
|
||||||
position: fixed;
|
position: fixed;
|
||||||
@@ -696,12 +706,12 @@ a:hover {
|
|||||||
.s0:after,
|
.s0:after,
|
||||||
.s1:after {
|
.s1:after {
|
||||||
content: '⌄';
|
content: '⌄';
|
||||||
margin-left: -.1em;
|
margin-left: -.15em;
|
||||||
}
|
}
|
||||||
.s0r:after,
|
.s0r:after,
|
||||||
.s1r:after {
|
.s1r:after {
|
||||||
content: '⌃';
|
content: '⌃';
|
||||||
margin-left: -.1em;
|
margin-left: -.15em;
|
||||||
}
|
}
|
||||||
.s0:after,
|
.s0:after,
|
||||||
.s0r:after {
|
.s0r:after {
|
||||||
@@ -712,7 +722,7 @@ a:hover {
|
|||||||
color: var(--sort-2);
|
color: var(--sort-2);
|
||||||
}
|
}
|
||||||
#files thead th:after {
|
#files thead th:after {
|
||||||
margin-right: -.7em;
|
margin-right: -.5em;
|
||||||
}
|
}
|
||||||
#files tbody tr:hover td,
|
#files tbody tr:hover td,
|
||||||
#files tbody tr:hover td+td {
|
#files tbody tr:hover td+td {
|
||||||
@@ -741,6 +751,15 @@ html #files.hhpick thead th {
|
|||||||
word-wrap: break-word;
|
word-wrap: break-word;
|
||||||
overflow: hidden;
|
overflow: hidden;
|
||||||
}
|
}
|
||||||
|
#files tr.fade a {
|
||||||
|
color: #999;
|
||||||
|
color: rgba(255, 255, 255, 0.4);
|
||||||
|
font-style: italic;
|
||||||
|
}
|
||||||
|
html.y #files tr.fade a {
|
||||||
|
color: #999;
|
||||||
|
color: rgba(0, 0, 0, 0.4);
|
||||||
|
}
|
||||||
#files tr:nth-child(2n) td {
|
#files tr:nth-child(2n) td {
|
||||||
background: var(--row-alt);
|
background: var(--row-alt);
|
||||||
}
|
}
|
||||||
@@ -759,6 +778,7 @@ html #files.hhpick thead th {
|
|||||||
}
|
}
|
||||||
#files tbody td:nth-child(3) {
|
#files tbody td:nth-child(3) {
|
||||||
font-family: 'scp', monospace, monospace;
|
font-family: 'scp', monospace, monospace;
|
||||||
|
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||||
text-align: right;
|
text-align: right;
|
||||||
padding-right: 1em;
|
padding-right: 1em;
|
||||||
white-space: nowrap;
|
white-space: nowrap;
|
||||||
@@ -818,6 +838,11 @@ html.y #path a:hover {
|
|||||||
.logue:empty {
|
.logue:empty {
|
||||||
display: none;
|
display: none;
|
||||||
}
|
}
|
||||||
|
.logue.raw {
|
||||||
|
white-space: pre;
|
||||||
|
font-family: 'scp', 'consolas', monospace;
|
||||||
|
font-family: var(--font-mono), 'scp', 'consolas', monospace;
|
||||||
|
}
|
||||||
#doc>iframe,
|
#doc>iframe,
|
||||||
.logue>iframe {
|
.logue>iframe {
|
||||||
background: var(--bgg);
|
background: var(--bgg);
|
||||||
@@ -981,6 +1006,10 @@ html.y #path a:hover {
|
|||||||
margin: 0 auto;
|
margin: 0 auto;
|
||||||
display: block;
|
display: block;
|
||||||
}
|
}
|
||||||
|
#ggrid.nocrop>a img {
|
||||||
|
max-height: 20em;
|
||||||
|
max-height: calc(var(--grid-sz)*2);
|
||||||
|
}
|
||||||
#ggrid>a.dir:before {
|
#ggrid>a.dir:before {
|
||||||
content: '📂';
|
content: '📂';
|
||||||
}
|
}
|
||||||
@@ -1147,9 +1176,6 @@ html.y #widget.open {
|
|||||||
@keyframes spin {
|
@keyframes spin {
|
||||||
100% {transform: rotate(360deg)}
|
100% {transform: rotate(360deg)}
|
||||||
}
|
}
|
||||||
@media (prefers-reduced-motion) {
|
|
||||||
@keyframes spin { }
|
|
||||||
}
|
|
||||||
@keyframes fadein {
|
@keyframes fadein {
|
||||||
0% {opacity: 0}
|
0% {opacity: 0}
|
||||||
100% {opacity: 1}
|
100% {opacity: 1}
|
||||||
@@ -1243,6 +1269,13 @@ html.y #widget.open {
|
|||||||
0% {opacity:0}
|
0% {opacity:0}
|
||||||
100% {opacity:1}
|
100% {opacity:1}
|
||||||
}
|
}
|
||||||
|
#ggrid>a.glow {
|
||||||
|
animation: gexit .6s ease-out;
|
||||||
|
}
|
||||||
|
@keyframes gexit {
|
||||||
|
0% {box-shadow: 0 0 0 2em var(--a)}
|
||||||
|
100% {box-shadow: 0 0 0em 0em var(--a)}
|
||||||
|
}
|
||||||
#wzip a {
|
#wzip a {
|
||||||
font-size: .4em;
|
font-size: .4em;
|
||||||
margin: -.3em .1em;
|
margin: -.3em .1em;
|
||||||
@@ -1405,6 +1438,7 @@ input[type="checkbox"]:checked+label {
|
|||||||
}
|
}
|
||||||
html.dz input {
|
html.dz input {
|
||||||
font-family: 'scp', monospace, monospace;
|
font-family: 'scp', monospace, monospace;
|
||||||
|
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||||
}
|
}
|
||||||
.opwide div>span>input+label {
|
.opwide div>span>input+label {
|
||||||
padding: .3em 0 .3em .3em;
|
padding: .3em 0 .3em .3em;
|
||||||
@@ -1414,14 +1448,17 @@ html.dz input {
|
|||||||
.opview input.i {
|
.opview input.i {
|
||||||
width: calc(100% - 16.2em);
|
width: calc(100% - 16.2em);
|
||||||
}
|
}
|
||||||
|
input.drc_v,
|
||||||
input.eq_gain {
|
input.eq_gain {
|
||||||
width: 3em;
|
width: 3em;
|
||||||
text-align: center;
|
text-align: center;
|
||||||
margin: 0 .6em;
|
margin: 0 .6em;
|
||||||
}
|
}
|
||||||
|
#audio_drc table,
|
||||||
#audio_eq table {
|
#audio_eq table {
|
||||||
border-collapse: collapse;
|
border-collapse: collapse;
|
||||||
}
|
}
|
||||||
|
#audio_drc td,
|
||||||
#audio_eq td {
|
#audio_eq td {
|
||||||
text-align: center;
|
text-align: center;
|
||||||
}
|
}
|
||||||
@@ -1430,11 +1467,15 @@ input.eq_gain {
|
|||||||
display: block;
|
display: block;
|
||||||
padding: 0;
|
padding: 0;
|
||||||
}
|
}
|
||||||
|
#au_drc,
|
||||||
#au_eq {
|
#au_eq {
|
||||||
display: block;
|
display: block;
|
||||||
margin-top: .5em;
|
margin-top: .5em;
|
||||||
padding: 1.3em .3em;
|
padding: 1.3em .3em;
|
||||||
}
|
}
|
||||||
|
#au_drc {
|
||||||
|
padding: .4em .3em;
|
||||||
|
}
|
||||||
#ico1 {
|
#ico1 {
|
||||||
cursor: pointer;
|
cursor: pointer;
|
||||||
}
|
}
|
||||||
@@ -1475,6 +1516,8 @@ input.eq_gain {
|
|||||||
width: calc(100% - 2em);
|
width: calc(100% - 2em);
|
||||||
margin: .3em 0 0 1.4em;
|
margin: .3em 0 0 1.4em;
|
||||||
}
|
}
|
||||||
|
@media (max-width: 130em) { #srch_form.tags #tq_raw { width: calc(100% - 34em) } }
|
||||||
|
@media (max-width: 95em) { #srch_form.tags #tq_raw { width: calc(100% - 2em) } }
|
||||||
#tq_raw td+td {
|
#tq_raw td+td {
|
||||||
width: 100%;
|
width: 100%;
|
||||||
}
|
}
|
||||||
@@ -1573,6 +1616,7 @@ html {
|
|||||||
padding: .2em .4em;
|
padding: .2em .4em;
|
||||||
font-size: 1.2em;
|
font-size: 1.2em;
|
||||||
margin: .2em;
|
margin: .2em;
|
||||||
|
display: inline-block;
|
||||||
white-space: pre;
|
white-space: pre;
|
||||||
position: relative;
|
position: relative;
|
||||||
top: -.12em;
|
top: -.12em;
|
||||||
@@ -1644,7 +1688,9 @@ html.cz .tgl.btn.on {
|
|||||||
color: var(--fg-max);
|
color: var(--fg-max);
|
||||||
}
|
}
|
||||||
#tree ul a.hl {
|
#tree ul a.hl {
|
||||||
|
color: #fff;
|
||||||
color: var(--btn-1-fg);
|
color: var(--btn-1-fg);
|
||||||
|
background: #000;
|
||||||
background: var(--btn-1-bg);
|
background: var(--btn-1-bg);
|
||||||
text-shadow: none;
|
text-shadow: none;
|
||||||
}
|
}
|
||||||
@@ -1679,6 +1725,7 @@ html.y #tree.nowrap .ntree a+a:hover {
|
|||||||
}
|
}
|
||||||
.ntree a:first-child {
|
.ntree a:first-child {
|
||||||
font-family: 'scp', monospace, monospace;
|
font-family: 'scp', monospace, monospace;
|
||||||
|
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||||
font-size: 1.2em;
|
font-size: 1.2em;
|
||||||
line-height: 0;
|
line-height: 0;
|
||||||
}
|
}
|
||||||
@@ -1706,6 +1753,7 @@ html.y #tree.nowrap .ntree a+a:hover {
|
|||||||
}
|
}
|
||||||
#files th span {
|
#files th span {
|
||||||
position: relative;
|
position: relative;
|
||||||
|
white-space: nowrap;
|
||||||
}
|
}
|
||||||
#files>thead>tr>th.min,
|
#files>thead>tr>th.min,
|
||||||
#files td.min {
|
#files td.min {
|
||||||
@@ -1743,9 +1791,6 @@ html.y #tree.nowrap .ntree a+a:hover {
|
|||||||
margin: .7em 0 .7em .5em;
|
margin: .7em 0 .7em .5em;
|
||||||
padding-left: .5em;
|
padding-left: .5em;
|
||||||
}
|
}
|
||||||
.opwide>div>div>a {
|
|
||||||
line-height: 2em;
|
|
||||||
}
|
|
||||||
.opwide>div>h3 {
|
.opwide>div>h3 {
|
||||||
color: var(--fg-weak);
|
color: var(--fg-weak);
|
||||||
margin: 0 .4em;
|
margin: 0 .4em;
|
||||||
@@ -1760,6 +1805,7 @@ html.y #tree.nowrap .ntree a+a:hover {
|
|||||||
padding: 0;
|
padding: 0;
|
||||||
}
|
}
|
||||||
#thumbs,
|
#thumbs,
|
||||||
|
#au_prescan,
|
||||||
#au_fullpre,
|
#au_fullpre,
|
||||||
#au_os_seek,
|
#au_os_seek,
|
||||||
#au_osd_cv,
|
#au_osd_cv,
|
||||||
@@ -1767,7 +1813,8 @@ html.y #tree.nowrap .ntree a+a:hover {
|
|||||||
opacity: .3;
|
opacity: .3;
|
||||||
}
|
}
|
||||||
#griden.on+#thumbs,
|
#griden.on+#thumbs,
|
||||||
#au_preload.on+#au_fullpre,
|
#au_preload.on+#au_prescan,
|
||||||
|
#au_preload.on+#au_prescan+#au_fullpre,
|
||||||
#au_os_ctl.on+#au_os_seek,
|
#au_os_ctl.on+#au_os_seek,
|
||||||
#au_os_ctl.on+#au_os_seek+#au_osd_cv,
|
#au_os_ctl.on+#au_os_seek+#au_osd_cv,
|
||||||
#u2turbo.on+#u2tdate {
|
#u2turbo.on+#u2tdate {
|
||||||
@@ -1777,6 +1824,7 @@ html.y #tree.nowrap .ntree a+a:hover {
|
|||||||
display: none;
|
display: none;
|
||||||
}
|
}
|
||||||
.ghead {
|
.ghead {
|
||||||
|
background: #fff;
|
||||||
background: var(--bg-u2);
|
background: var(--bg-u2);
|
||||||
border-radius: .3em;
|
border-radius: .3em;
|
||||||
padding: .2em .5em;
|
padding: .2em .5em;
|
||||||
@@ -1806,7 +1854,12 @@ html.y #tree.nowrap .ntree a+a:hover {
|
|||||||
margin: 0;
|
margin: 0;
|
||||||
padding: 0;
|
padding: 0;
|
||||||
}
|
}
|
||||||
|
#unpost td:nth-child(3),
|
||||||
|
#unpost td:nth-child(4) {
|
||||||
|
text-align: right;
|
||||||
|
}
|
||||||
#rui {
|
#rui {
|
||||||
|
background: #fff;
|
||||||
background: var(--bg);
|
background: var(--bg);
|
||||||
position: fixed;
|
position: fixed;
|
||||||
top: 0;
|
top: 0;
|
||||||
@@ -1832,6 +1885,7 @@ html.y #tree.nowrap .ntree a+a:hover {
|
|||||||
}
|
}
|
||||||
#rn_vadv input {
|
#rn_vadv input {
|
||||||
font-family: 'scp', monospace, monospace;
|
font-family: 'scp', monospace, monospace;
|
||||||
|
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||||
}
|
}
|
||||||
#rui td+td,
|
#rui td+td,
|
||||||
#rui td input[type="text"] {
|
#rui td input[type="text"] {
|
||||||
@@ -1863,6 +1917,7 @@ html.y #tree.nowrap .ntree a+a:hover {
|
|||||||
}
|
}
|
||||||
#doc {
|
#doc {
|
||||||
overflow: visible;
|
overflow: visible;
|
||||||
|
background: #fff;
|
||||||
background: var(--bg);
|
background: var(--bg);
|
||||||
margin: -1em 0 .5em 0;
|
margin: -1em 0 .5em 0;
|
||||||
padding: 1em 0 1em 0;
|
padding: 1em 0 1em 0;
|
||||||
@@ -1879,6 +1934,10 @@ html.y #doc {
|
|||||||
text-align: center;
|
text-align: center;
|
||||||
padding: .5em;
|
padding: .5em;
|
||||||
}
|
}
|
||||||
|
#docul li.bn span {
|
||||||
|
font-weight: bold;
|
||||||
|
color: var(--fg-max);
|
||||||
|
}
|
||||||
#doc.prism {
|
#doc.prism {
|
||||||
padding-left: 3em;
|
padding-left: 3em;
|
||||||
}
|
}
|
||||||
@@ -1890,6 +1949,7 @@ html.y #doc {
|
|||||||
#doc.mdo {
|
#doc.mdo {
|
||||||
white-space: normal;
|
white-space: normal;
|
||||||
font-family: sans-serif;
|
font-family: sans-serif;
|
||||||
|
font-family: var(--font-main), sans-serif;
|
||||||
}
|
}
|
||||||
#doc.prism * {
|
#doc.prism * {
|
||||||
line-height: 1.5em;
|
line-height: 1.5em;
|
||||||
@@ -1949,6 +2009,7 @@ a.btn,
|
|||||||
}
|
}
|
||||||
#hkhelp td:first-child {
|
#hkhelp td:first-child {
|
||||||
font-family: 'scp', monospace, monospace;
|
font-family: 'scp', monospace, monospace;
|
||||||
|
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||||
}
|
}
|
||||||
html.noscroll,
|
html.noscroll,
|
||||||
html.noscroll .sbar {
|
html.noscroll .sbar {
|
||||||
@@ -2158,6 +2219,7 @@ html.y #bbox-overlay figcaption a {
|
|||||||
}
|
}
|
||||||
#bbox-halp {
|
#bbox-halp {
|
||||||
color: var(--fg-max);
|
color: var(--fg-max);
|
||||||
|
background: #fff;
|
||||||
background: var(--bg);
|
background: var(--bg);
|
||||||
position: absolute;
|
position: absolute;
|
||||||
top: 0;
|
top: 0;
|
||||||
@@ -2457,6 +2519,7 @@ html.y #bbox-overlay figcaption a {
|
|||||||
}
|
}
|
||||||
#op_up2k.srch td.prog {
|
#op_up2k.srch td.prog {
|
||||||
font-family: sans-serif;
|
font-family: sans-serif;
|
||||||
|
font-family: var(--font-main), sans-serif;
|
||||||
font-size: 1em;
|
font-size: 1em;
|
||||||
width: auto;
|
width: auto;
|
||||||
}
|
}
|
||||||
@@ -2471,6 +2534,7 @@ html.y #bbox-overlay figcaption a {
|
|||||||
white-space: nowrap;
|
white-space: nowrap;
|
||||||
display: inline-block;
|
display: inline-block;
|
||||||
font-family: 'scp', monospace, monospace;
|
font-family: 'scp', monospace, monospace;
|
||||||
|
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||||
}
|
}
|
||||||
#u2etas.o {
|
#u2etas.o {
|
||||||
width: 20em;
|
width: 20em;
|
||||||
@@ -2499,14 +2563,14 @@ html.y #bbox-overlay figcaption a {
|
|||||||
min-width: 24em;
|
min-width: 24em;
|
||||||
}
|
}
|
||||||
#u2cards.w {
|
#u2cards.w {
|
||||||
width: 44em;
|
width: 48em;
|
||||||
text-align: left;
|
text-align: left;
|
||||||
}
|
}
|
||||||
#u2cards.ww {
|
#u2cards.ww {
|
||||||
display: inline-block;
|
display: inline-block;
|
||||||
}
|
}
|
||||||
#u2etaw.w {
|
#u2etaw.w {
|
||||||
width: 52em;
|
width: 55em;
|
||||||
text-align: right;
|
text-align: right;
|
||||||
margin: 2em auto -2.7em auto;
|
margin: 2em auto -2.7em auto;
|
||||||
}
|
}
|
||||||
@@ -2540,6 +2604,7 @@ html.y #bbox-overlay figcaption a {
|
|||||||
#u2cards span {
|
#u2cards span {
|
||||||
color: var(--fg-max);
|
color: var(--fg-max);
|
||||||
font-family: 'scp', monospace;
|
font-family: 'scp', monospace;
|
||||||
|
font-family: var(--font-mono), 'scp', monospace;
|
||||||
}
|
}
|
||||||
#u2cards > a:nth-child(4) > span {
|
#u2cards > a:nth-child(4) > span {
|
||||||
display: inline-block;
|
display: inline-block;
|
||||||
@@ -2551,10 +2616,10 @@ html.y #bbox-overlay figcaption a {
|
|||||||
width: 30em;
|
width: 30em;
|
||||||
}
|
}
|
||||||
#u2conf.w {
|
#u2conf.w {
|
||||||
width: 48em;
|
width: 51em;
|
||||||
}
|
}
|
||||||
#u2conf.ww {
|
#u2conf.ww {
|
||||||
width: 78em;
|
width: 82em;
|
||||||
}
|
}
|
||||||
#u2conf.ww #u2c3w {
|
#u2conf.ww #u2c3w {
|
||||||
width: 29em;
|
width: 29em;
|
||||||
@@ -2705,6 +2770,7 @@ html.b #u2conf a.b:hover {
|
|||||||
}
|
}
|
||||||
.prog {
|
.prog {
|
||||||
font-family: 'scp', monospace, monospace;
|
font-family: 'scp', monospace, monospace;
|
||||||
|
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||||
}
|
}
|
||||||
#u2tab span.inf,
|
#u2tab span.inf,
|
||||||
#u2tab span.ok,
|
#u2tab span.ok,
|
||||||
@@ -2995,6 +3061,14 @@ html.b #ggrid>a {
|
|||||||
html.b .btn {
|
html.b .btn {
|
||||||
top: -.1em;
|
top: -.1em;
|
||||||
}
|
}
|
||||||
|
html.b .btn,
|
||||||
|
html.b #u2conf a.b,
|
||||||
|
html.b #u2conf input[type="checkbox"]:not(:checked)+label {
|
||||||
|
box-shadow: 0 .05em 0 var(--bg-d3) inset;
|
||||||
|
}
|
||||||
|
html.b .tgl.btn.on {
|
||||||
|
box-shadow: 0 .05em 0 var(--btn-1-is) inset;
|
||||||
|
}
|
||||||
html.b #op_up2k.srch sup {
|
html.b #op_up2k.srch sup {
|
||||||
color: #fc0;
|
color: #fc0;
|
||||||
}
|
}
|
||||||
@@ -3051,6 +3125,16 @@ html.d #treepar {
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@media (max-width: 32em) {
|
||||||
|
#u2conf {
|
||||||
|
font-size: .9em;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@media (max-width: 28em) {
|
||||||
|
#u2conf {
|
||||||
|
font-size: .8em;
|
||||||
|
}
|
||||||
|
}
|
||||||
@media (min-width: 70em) {
|
@media (min-width: 70em) {
|
||||||
#barpos,
|
#barpos,
|
||||||
#barbuf {
|
#barbuf {
|
||||||
@@ -3103,7 +3187,7 @@ html.d #treepar {
|
|||||||
margin-top: 1.7em;
|
margin-top: 1.7em;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@supports (display: grid) {
|
@supports (display: grid) and (gap: 1em) {
|
||||||
#ggrid {
|
#ggrid {
|
||||||
display: grid;
|
display: grid;
|
||||||
margin: 0em 0.25em;
|
margin: 0em 0.25em;
|
||||||
@@ -3128,3 +3212,24 @@ html.d #treepar {
|
|||||||
padding: 0.2em;
|
padding: 0.2em;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@media (prefers-reduced-motion) {
|
||||||
|
@keyframes spin { }
|
||||||
|
@keyframes gexit { }
|
||||||
|
@keyframes bounce { }
|
||||||
|
@keyframes bounceFromLeft { }
|
||||||
|
@keyframes bounceFromRight { }
|
||||||
|
|
||||||
|
#ggrid>a:before,
|
||||||
|
#widget.anim,
|
||||||
|
#u2tabw,
|
||||||
|
.dropdesc,
|
||||||
|
.dropdesc b,
|
||||||
|
.dropdesc>div>div {
|
||||||
|
transition: none;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -6,10 +6,10 @@
|
|||||||
<title>{{ title }}</title>
|
<title>{{ title }}</title>
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=0.8, minimum-scale=0.6">
|
<meta name="viewport" content="width=device-width, initial-scale=0.8, minimum-scale=0.6">
|
||||||
<meta name="theme-color" content="#333">
|
<meta name="theme-color" content="#{{ tcolor }}">
|
||||||
{{ html_head }}
|
|
||||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/browser.css?_={{ ts }}">
|
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/browser.css?_={{ ts }}">
|
||||||
|
{{ html_head }}
|
||||||
{%- if css %}
|
{%- if css %}
|
||||||
<link rel="stylesheet" media="screen" href="{{ css }}_={{ ts }}">
|
<link rel="stylesheet" media="screen" href="{{ css }}_={{ ts }}">
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
@@ -135,38 +135,21 @@
|
|||||||
|
|
||||||
<script>
|
<script>
|
||||||
var SR = {{ r|tojson }},
|
var SR = {{ r|tojson }},
|
||||||
|
CGV = {{ cgv|tojson }},
|
||||||
TS = "{{ ts }}",
|
TS = "{{ ts }}",
|
||||||
acct = "{{ acct }}",
|
|
||||||
perms = {{ perms }},
|
|
||||||
dgrid = {{ dgrid|tojson }},
|
|
||||||
themes = {{ themes }},
|
|
||||||
dtheme = "{{ dtheme }}",
|
dtheme = "{{ dtheme }}",
|
||||||
srvinf = "{{ srv_info }}",
|
srvinf = "{{ srv_info }}",
|
||||||
s_name = "{{ s_name }}",
|
s_name = "{{ s_name }}",
|
||||||
lang = "{{ lang }}",
|
lang = "{{ lang }}",
|
||||||
dfavico = "{{ favico }}",
|
dfavico = "{{ favico }}",
|
||||||
def_hcols = {{ def_hcols|tojson }},
|
|
||||||
have_up2k_idx = {{ have_up2k_idx|tojson }},
|
|
||||||
have_tags_idx = {{ have_tags_idx|tojson }},
|
have_tags_idx = {{ have_tags_idx|tojson }},
|
||||||
have_acode = {{ have_acode|tojson }},
|
|
||||||
have_mv = {{ have_mv|tojson }},
|
|
||||||
have_del = {{ have_del|tojson }},
|
|
||||||
have_unpost = {{ have_unpost }},
|
|
||||||
have_zip = {{ have_zip|tojson }},
|
|
||||||
sb_md = "{{ sb_md }}",
|
|
||||||
sb_lg = "{{ sb_lg }}",
|
sb_lg = "{{ sb_lg }}",
|
||||||
lifetime = {{ lifetime }},
|
|
||||||
turbolvl = {{ turbolvl }},
|
|
||||||
idxh = {{ idxh }},
|
|
||||||
frand = {{ frand|tojson }},
|
|
||||||
u2sort = "{{ u2sort }}",
|
|
||||||
have_emp = {{ have_emp|tojson }},
|
|
||||||
txt_ext = "{{ txt_ext }}",
|
txt_ext = "{{ txt_ext }}",
|
||||||
logues = {{ logues|tojson if sb_lg else "[]" }},
|
logues = {{ logues|tojson if sb_lg else "[]" }},
|
||||||
readme = {{ readme|tojson }},
|
|
||||||
ls0 = {{ ls0|tojson }};
|
ls0 = {{ ls0|tojson }};
|
||||||
|
|
||||||
document.documentElement.className = localStorage.cpp_thm || dtheme;
|
var STG = window.localStorage;
|
||||||
|
document.documentElement.className = (STG && STG.cpp_thm) || dtheme;
|
||||||
</script>
|
</script>
|
||||||
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
||||||
<script src="{{ r }}/.cpr/baguettebox.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/baguettebox.js?_={{ ts }}"></script>
|
||||||
@@ -178,3 +161,4 @@
|
|||||||
</body>
|
</body>
|
||||||
|
|
||||||
</html>
|
</html>
|
||||||
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -6,12 +6,12 @@
|
|||||||
<title>{{ title }}</title>
|
<title>{{ title }}</title>
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||||
{{ html_head }}
|
|
||||||
<style>
|
<style>
|
||||||
html{font-family:sans-serif}
|
html{font-family:sans-serif}
|
||||||
td{border:1px solid #999;border-width:1px 1px 0 0;padding:0 5px}
|
td{border:1px solid #999;border-width:1px 1px 0 0;padding:0 5px}
|
||||||
a{display:block}
|
a{display:block}
|
||||||
</style>
|
</style>
|
||||||
|
{{ html_head }}
|
||||||
</head>
|
</head>
|
||||||
|
|
||||||
<body>
|
<body>
|
||||||
@@ -61,3 +61,4 @@
|
|||||||
|
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
|
||||||
|
|||||||
@@ -25,3 +25,4 @@
|
|||||||
</body>
|
</body>
|
||||||
|
|
||||||
</html>
|
</html>
|
||||||
|
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ html, body {
|
|||||||
color: #333;
|
color: #333;
|
||||||
background: #eee;
|
background: #eee;
|
||||||
font-family: sans-serif;
|
font-family: sans-serif;
|
||||||
|
font-family: var(--font-main), sans-serif;
|
||||||
line-height: 1.5em;
|
line-height: 1.5em;
|
||||||
}
|
}
|
||||||
html.y #helpbox a {
|
html.y #helpbox a {
|
||||||
@@ -67,6 +68,7 @@ a {
|
|||||||
position: relative;
|
position: relative;
|
||||||
display: inline-block;
|
display: inline-block;
|
||||||
font-family: 'scp', monospace, monospace;
|
font-family: 'scp', monospace, monospace;
|
||||||
|
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||||
font-weight: bold;
|
font-weight: bold;
|
||||||
font-size: 1.3em;
|
font-size: 1.3em;
|
||||||
line-height: .1em;
|
line-height: .1em;
|
||||||
|
|||||||
@@ -3,13 +3,13 @@
|
|||||||
<title>📝 {{ title }}</title>
|
<title>📝 {{ title }}</title>
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=0.7">
|
<meta name="viewport" content="width=device-width, initial-scale=0.7">
|
||||||
<meta name="theme-color" content="#333">
|
<meta name="theme-color" content="#{{ tcolor }}">
|
||||||
{{ html_head }}
|
|
||||||
<link rel="stylesheet" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
<link rel="stylesheet" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||||
<link rel="stylesheet" href="{{ r }}/.cpr/md.css?_={{ ts }}">
|
<link rel="stylesheet" href="{{ r }}/.cpr/md.css?_={{ ts }}">
|
||||||
{%- if edit %}
|
{%- if edit %}
|
||||||
<link rel="stylesheet" href="{{ r }}/.cpr/md2.css?_={{ ts }}">
|
<link rel="stylesheet" href="{{ r }}/.cpr/md2.css?_={{ ts }}">
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
{{ html_head }}
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<div id="mn"></div>
|
<div id="mn"></div>
|
||||||
@@ -139,16 +139,15 @@ var md_opt = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
(function () {
|
(function () {
|
||||||
var l = localStorage,
|
var l = window.localStorage,
|
||||||
drk = l.light != 1,
|
drk = (l && l.light) != 1,
|
||||||
btn = document.getElementById("lightswitch"),
|
btn = document.getElementById("lightswitch"),
|
||||||
f = function (e) {
|
f = function (e) {
|
||||||
if (e) { e.preventDefault(); drk = !drk; }
|
if (e) { e.preventDefault(); drk = !drk; }
|
||||||
document.documentElement.className = drk? "z":"y";
|
document.documentElement.className = drk? "z":"y";
|
||||||
btn.innerHTML = "go " + (drk ? "light":"dark");
|
btn.innerHTML = "go " + (drk ? "light":"dark");
|
||||||
l.light = drk? 0:1;
|
try { l.light = drk? 0:1; } catch (ex) { }
|
||||||
};
|
};
|
||||||
|
|
||||||
btn.onclick = f;
|
btn.onclick = f;
|
||||||
f();
|
f();
|
||||||
})();
|
})();
|
||||||
@@ -161,3 +160,4 @@ l.light = drk? 0:1;
|
|||||||
<script src="{{ r }}/.cpr/md2.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/md2.js?_={{ ts }}"></script>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
</body></html>
|
</body></html>
|
||||||
|
|
||||||
|
|||||||
@@ -216,6 +216,11 @@ function convert_markdown(md_text, dest_dom) {
|
|||||||
md_html = DOMPurify.sanitize(md_html);
|
md_html = DOMPurify.sanitize(md_html);
|
||||||
}
|
}
|
||||||
catch (ex) {
|
catch (ex) {
|
||||||
|
if (IE) {
|
||||||
|
dest_dom.innerHTML = 'IE cannot into markdown ;_;';
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
if (ext)
|
if (ext)
|
||||||
md_plug_err(ex, ext[1]);
|
md_plug_err(ex, ext[1]);
|
||||||
|
|
||||||
@@ -507,13 +512,6 @@ dom_navtgl.onclick = function () {
|
|||||||
redraw();
|
redraw();
|
||||||
};
|
};
|
||||||
|
|
||||||
if (!HTTPS && location.hostname != '127.0.0.1') try {
|
|
||||||
ebi('edit2').onclick = function (e) {
|
|
||||||
toast.err(0, "the fancy editor is only available over https");
|
|
||||||
return ev(e);
|
|
||||||
}
|
|
||||||
} catch (ex) { }
|
|
||||||
|
|
||||||
if (sread('hidenav') == 1)
|
if (sread('hidenav') == 1)
|
||||||
dom_navtgl.onclick();
|
dom_navtgl.onclick();
|
||||||
|
|
||||||
|
|||||||
@@ -9,7 +9,7 @@
|
|||||||
width: calc(100% - 56em);
|
width: calc(100% - 56em);
|
||||||
}
|
}
|
||||||
#mw {
|
#mw {
|
||||||
left: calc(100% - 55em);
|
left: max(0em, calc(100% - 55em));
|
||||||
overflow-y: auto;
|
overflow-y: auto;
|
||||||
position: fixed;
|
position: fixed;
|
||||||
bottom: 0;
|
bottom: 0;
|
||||||
@@ -56,6 +56,7 @@
|
|||||||
padding: 0;
|
padding: 0;
|
||||||
margin: 0;
|
margin: 0;
|
||||||
font-family: 'scp', monospace, monospace;
|
font-family: 'scp', monospace, monospace;
|
||||||
|
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||||
white-space: pre-wrap;
|
white-space: pre-wrap;
|
||||||
word-break: break-word;
|
word-break: break-word;
|
||||||
overflow-wrap: break-word;
|
overflow-wrap: break-word;
|
||||||
|
|||||||
@@ -163,7 +163,7 @@ redraw = (function () {
|
|||||||
dom_sbs.onclick = setsbs;
|
dom_sbs.onclick = setsbs;
|
||||||
dom_nsbs.onclick = modetoggle;
|
dom_nsbs.onclick = modetoggle;
|
||||||
|
|
||||||
onresize();
|
(IE ? modetoggle : onresize)();
|
||||||
return onresize;
|
return onresize;
|
||||||
})();
|
})();
|
||||||
|
|
||||||
@@ -368,14 +368,14 @@ function save(e) {
|
|||||||
|
|
||||||
function save_cb() {
|
function save_cb() {
|
||||||
if (this.status !== 200)
|
if (this.status !== 200)
|
||||||
return toast.err(0, 'Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
|
return toast.err(0, 'Error! The file was NOT saved.\n\nError ' + this.status + ":\n" + unpre(this.responseText));
|
||||||
|
|
||||||
var r;
|
var r;
|
||||||
try {
|
try {
|
||||||
r = JSON.parse(this.responseText);
|
r = JSON.parse(this.responseText);
|
||||||
}
|
}
|
||||||
catch (ex) {
|
catch (ex) {
|
||||||
return toast.err(0, 'Failed to parse reply from server:\n\n' + this.responseText);
|
return toast.err(0, 'Error! The file was likely NOT saved.\n\nFailed to parse reply from server:\n\n' + unpre(this.responseText));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!r.ok) {
|
if (!r.ok) {
|
||||||
@@ -418,7 +418,7 @@ function run_savechk(lastmod, txt, btn, ntry) {
|
|||||||
|
|
||||||
function savechk_cb() {
|
function savechk_cb() {
|
||||||
if (this.status !== 200)
|
if (this.status !== 200)
|
||||||
return toast.err(0, 'Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
|
return toast.err(0, 'Error! The file was NOT saved.\n\nError ' + this.status + ":\n" + unpre(this.responseText));
|
||||||
|
|
||||||
var doc1 = this.txt.replace(/\r\n/g, "\n");
|
var doc1 = this.txt.replace(/\r\n/g, "\n");
|
||||||
var doc2 = this.responseText.replace(/\r\n/g, "\n");
|
var doc2 = this.responseText.replace(/\r\n/g, "\n");
|
||||||
@@ -607,10 +607,10 @@ function md_newline() {
|
|||||||
var s = linebounds(true),
|
var s = linebounds(true),
|
||||||
ln = s.md.substring(s.n1, s.n2),
|
ln = s.md.substring(s.n1, s.n2),
|
||||||
m1 = /^( *)([0-9]+)(\. +)/.exec(ln),
|
m1 = /^( *)([0-9]+)(\. +)/.exec(ln),
|
||||||
m2 = /^[ \t>+-]*(\* )?/.exec(ln),
|
m2 = /^[ \t]*[>+*-]{0,2}[ \t]/.exec(ln),
|
||||||
drop = dom_src.selectionEnd - dom_src.selectionStart;
|
drop = dom_src.selectionEnd - dom_src.selectionStart;
|
||||||
|
|
||||||
var pre = m2[0];
|
var pre = m2 ? m2[0] : '';
|
||||||
if (m1 !== null)
|
if (m1 !== null)
|
||||||
pre = m1[1] + (parseInt(m1[2]) + 1) + m1[3];
|
pre = m1[1] + (parseInt(m1[2]) + 1) + m1[3];
|
||||||
|
|
||||||
@@ -931,7 +931,13 @@ var set_lno = (function () {
|
|||||||
// hotkeys / toolbar
|
// hotkeys / toolbar
|
||||||
(function () {
|
(function () {
|
||||||
var keydown = function (ev) {
|
var keydown = function (ev) {
|
||||||
ev = ev || window.event;
|
if (!ev && window.event) {
|
||||||
|
ev = window.event;
|
||||||
|
if (dev_fbw == 1) {
|
||||||
|
toast.warn(10, 'hello from fallback code ;_;\ncheck console trace');
|
||||||
|
console.error('using window.event');
|
||||||
|
}
|
||||||
|
}
|
||||||
var kc = ev.code || ev.keyCode || ev.which,
|
var kc = ev.code || ev.keyCode || ev.which,
|
||||||
editing = document.activeElement == dom_src;
|
editing = document.activeElement == dom_src;
|
||||||
|
|
||||||
@@ -1003,7 +1009,7 @@ var set_lno = (function () {
|
|||||||
md_home(ev.shiftKey);
|
md_home(ev.shiftKey);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
if (!ev.shiftKey && (ev.code == "Enter" || kc == 13)) {
|
if (!ev.shiftKey && ((ev.code + '').endsWith("Enter") || kc == 13)) {
|
||||||
return md_newline();
|
return md_newline();
|
||||||
}
|
}
|
||||||
if (!ev.shiftKey && kc == 8) {
|
if (!ev.shiftKey && kc == 8) {
|
||||||
|
|||||||
@@ -17,6 +17,7 @@ html, body {
|
|||||||
padding: 0;
|
padding: 0;
|
||||||
min-height: 100%;
|
min-height: 100%;
|
||||||
font-family: sans-serif;
|
font-family: sans-serif;
|
||||||
|
font-family: var(--font-main), sans-serif;
|
||||||
background: #f7f7f7;
|
background: #f7f7f7;
|
||||||
color: #333;
|
color: #333;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,12 +3,12 @@
|
|||||||
<title>📝 {{ title }}</title>
|
<title>📝 {{ title }}</title>
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=0.7">
|
<meta name="viewport" content="width=device-width, initial-scale=0.7">
|
||||||
<meta name="theme-color" content="#333">
|
<meta name="theme-color" content="#{{ tcolor }}">
|
||||||
{{ html_head }}
|
|
||||||
<link rel="stylesheet" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
<link rel="stylesheet" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||||
<link rel="stylesheet" href="{{ r }}/.cpr/mde.css?_={{ ts }}">
|
<link rel="stylesheet" href="{{ r }}/.cpr/mde.css?_={{ ts }}">
|
||||||
<link rel="stylesheet" href="{{ r }}/.cpr/deps/mini-fa.css?_={{ ts }}">
|
<link rel="stylesheet" href="{{ r }}/.cpr/deps/mini-fa.css?_={{ ts }}">
|
||||||
<link rel="stylesheet" href="{{ r }}/.cpr/deps/easymde.css?_={{ ts }}">
|
<link rel="stylesheet" href="{{ r }}/.cpr/deps/easymde.css?_={{ ts }}">
|
||||||
|
{{ html_head }}
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<div id="mw">
|
<div id="mw">
|
||||||
@@ -37,12 +37,12 @@ var md_opt = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
var lightswitch = (function () {
|
var lightswitch = (function () {
|
||||||
var l = localStorage,
|
var l = window.localStorage,
|
||||||
drk = l.light != 1,
|
drk = (l && l.light) != 1,
|
||||||
f = function (e) {
|
f = function (e) {
|
||||||
if (e) drk = !drk;
|
if (e) drk = !drk;
|
||||||
document.documentElement.className = drk? "z":"y";
|
document.documentElement.className = drk? "z":"y";
|
||||||
l.light = drk? 0:1;
|
try { l.light = drk? 0:1; } catch (ex) { }
|
||||||
};
|
};
|
||||||
f();
|
f();
|
||||||
return f;
|
return f;
|
||||||
@@ -54,3 +54,4 @@ l.light = drk? 0:1;
|
|||||||
<script src="{{ r }}/.cpr/deps/easymde.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/deps/easymde.js?_={{ ts }}"></script>
|
||||||
<script src="{{ r }}/.cpr/mde.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/mde.js?_={{ ts }}"></script>
|
||||||
</body></html>
|
</body></html>
|
||||||
|
|
||||||
|
|||||||
@@ -134,14 +134,14 @@ function save(mde) {
|
|||||||
|
|
||||||
function save_cb() {
|
function save_cb() {
|
||||||
if (this.status !== 200)
|
if (this.status !== 200)
|
||||||
return toast.err(0, 'Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
|
return toast.err(0, 'Error! The file was NOT saved.\n\nError ' + this.status + ":\n" + unpre(this.responseText));
|
||||||
|
|
||||||
var r;
|
var r;
|
||||||
try {
|
try {
|
||||||
r = JSON.parse(this.responseText);
|
r = JSON.parse(this.responseText);
|
||||||
}
|
}
|
||||||
catch (ex) {
|
catch (ex) {
|
||||||
return toast.err(0, 'Failed to parse reply from server:\n\n' + this.responseText);
|
return toast.err(0, 'Error! The file was likely NOT saved.\n\nFailed to parse reply from server:\n\n' + unpre(this.responseText));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!r.ok) {
|
if (!r.ok) {
|
||||||
@@ -180,7 +180,7 @@ function save_cb() {
|
|||||||
|
|
||||||
function save_chk() {
|
function save_chk() {
|
||||||
if (this.status !== 200)
|
if (this.status !== 200)
|
||||||
return toast.err(0, 'Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
|
return toast.err(0, 'Error! The file was NOT saved.\n\nError ' + this.status + ":\n" + unpre(this.responseText));
|
||||||
|
|
||||||
var doc1 = this.txt.replace(/\r\n/g, "\n");
|
var doc1 = this.txt.replace(/\r\n/g, "\n");
|
||||||
var doc2 = this.responseText.replace(/\r\n/g, "\n");
|
var doc2 = this.responseText.replace(/\r\n/g, "\n");
|
||||||
|
|||||||
@@ -1,3 +1,8 @@
|
|||||||
|
:root {
|
||||||
|
--font-main: sans-serif;
|
||||||
|
--font-serif: serif;
|
||||||
|
--font-mono: 'scp';
|
||||||
|
}
|
||||||
html,body,tr,th,td,#files,a {
|
html,body,tr,th,td,#files,a {
|
||||||
color: inherit;
|
color: inherit;
|
||||||
background: none;
|
background: none;
|
||||||
@@ -10,6 +15,7 @@ html {
|
|||||||
color: #ccc;
|
color: #ccc;
|
||||||
background: #333;
|
background: #333;
|
||||||
font-family: sans-serif;
|
font-family: sans-serif;
|
||||||
|
font-family: var(--font-main), sans-serif;
|
||||||
text-shadow: 1px 1px 0px #000;
|
text-shadow: 1px 1px 0px #000;
|
||||||
touch-action: manipulation;
|
touch-action: manipulation;
|
||||||
}
|
}
|
||||||
@@ -23,6 +29,7 @@ html, body {
|
|||||||
}
|
}
|
||||||
pre {
|
pre {
|
||||||
font-family: monospace, monospace;
|
font-family: monospace, monospace;
|
||||||
|
font-family: var(--font-mono), monospace, monospace;
|
||||||
}
|
}
|
||||||
a {
|
a {
|
||||||
color: #fc5;
|
color: #fc5;
|
||||||
|
|||||||
@@ -6,9 +6,9 @@
|
|||||||
<title>{{ s_doctitle }}</title>
|
<title>{{ s_doctitle }}</title>
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||||
<meta name="theme-color" content="#333">
|
<meta name="theme-color" content="#{{ tcolor }}">
|
||||||
{{ html_head }}
|
|
||||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/msg.css?_={{ ts }}">
|
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/msg.css?_={{ ts }}">
|
||||||
|
{{ html_head }}
|
||||||
</head>
|
</head>
|
||||||
|
|
||||||
<body>
|
<body>
|
||||||
@@ -48,4 +48,5 @@
|
|||||||
{%- endif %}
|
{%- endif %}
|
||||||
</body>
|
</body>
|
||||||
|
|
||||||
</html>
|
</html>
|
||||||
|
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ html {
|
|||||||
color: #333;
|
color: #333;
|
||||||
background: #f7f7f7;
|
background: #f7f7f7;
|
||||||
font-family: sans-serif;
|
font-family: sans-serif;
|
||||||
|
font-family: var(--font-main), sans-serif;
|
||||||
touch-action: manipulation;
|
touch-action: manipulation;
|
||||||
}
|
}
|
||||||
#wrap {
|
#wrap {
|
||||||
@@ -127,6 +128,7 @@ pre, code {
|
|||||||
color: #480;
|
color: #480;
|
||||||
background: #fff;
|
background: #fff;
|
||||||
font-family: 'scp', monospace, monospace;
|
font-family: 'scp', monospace, monospace;
|
||||||
|
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||||
border: 1px solid rgba(128,128,128,0.3);
|
border: 1px solid rgba(128,128,128,0.3);
|
||||||
border-radius: .2em;
|
border-radius: .2em;
|
||||||
padding: .15em .2em;
|
padding: .15em .2em;
|
||||||
@@ -188,11 +190,21 @@ input {
|
|||||||
padding: .5em .7em;
|
padding: .5em .7em;
|
||||||
margin: 0 .5em 0 0;
|
margin: 0 .5em 0 0;
|
||||||
}
|
}
|
||||||
|
input::placeholder {
|
||||||
|
font-size: 1.2em;
|
||||||
|
font-style: italic;
|
||||||
|
letter-spacing: .04em;
|
||||||
|
opacity: 0.64;
|
||||||
|
color: #930;
|
||||||
|
}
|
||||||
html.z input {
|
html.z input {
|
||||||
color: #fff;
|
color: #fff;
|
||||||
background: #626;
|
background: #626;
|
||||||
border-color: #c2c;
|
border-color: #c2c;
|
||||||
}
|
}
|
||||||
|
html.z input::placeholder {
|
||||||
|
color: #fff;
|
||||||
|
}
|
||||||
html.z .num {
|
html.z .num {
|
||||||
border-color: #777;
|
border-color: #777;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,10 +6,10 @@
|
|||||||
<title>{{ s_doctitle }}</title>
|
<title>{{ s_doctitle }}</title>
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||||
<meta name="theme-color" content="#333">
|
<meta name="theme-color" content="#{{ tcolor }}">
|
||||||
{{ html_head }}
|
|
||||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/splash.css?_={{ ts }}">
|
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/splash.css?_={{ ts }}">
|
||||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||||
|
{{ html_head }}
|
||||||
</head>
|
</head>
|
||||||
|
|
||||||
<body>
|
<body>
|
||||||
@@ -78,13 +78,15 @@
|
|||||||
|
|
||||||
<h1 id="cc">client config:</h1>
|
<h1 id="cc">client config:</h1>
|
||||||
<ul>
|
<ul>
|
||||||
|
{% if k304 or k304vis %}
|
||||||
{% if k304 %}
|
{% if k304 %}
|
||||||
<li><a id="h" href="{{ r }}/?k304=n">disable k304</a> (currently enabled)
|
<li><a id="h" href="{{ r }}/?k304=n">disable k304</a> (currently enabled)
|
||||||
{%- else %}
|
{%- else %}
|
||||||
<li><a id="i" href="{{ r }}/?k304=y" class="r">enable k304</a> (currently disabled)
|
<li><a id="i" href="{{ r }}/?k304=y" class="r">enable k304</a> (currently disabled)
|
||||||
{% endif %}
|
{% endif %}
|
||||||
<blockquote id="j">enabling this will disconnect your client on every HTTP 304, which can prevent some buggy proxies from getting stuck (suddenly not loading pages), <em>but</em> it will also make things slower in general</blockquote></li>
|
<blockquote id="j">enabling this will disconnect your client on every HTTP 304, which can prevent some buggy proxies from getting stuck (suddenly not loading pages), <em>but</em> it will also make things slower in general</blockquote></li>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
<li><a id="k" href="{{ r }}/?reset" class="r" onclick="localStorage.clear();return true">reset client settings</a></li>
|
<li><a id="k" href="{{ r }}/?reset" class="r" onclick="localStorage.clear();return true">reset client settings</a></li>
|
||||||
</ul>
|
</ul>
|
||||||
|
|
||||||
@@ -92,7 +94,8 @@
|
|||||||
<div>
|
<div>
|
||||||
<form method="post" enctype="multipart/form-data" action="{{ r }}/{{ qvpath }}">
|
<form method="post" enctype="multipart/form-data" action="{{ r }}/{{ qvpath }}">
|
||||||
<input type="hidden" name="act" value="login" />
|
<input type="hidden" name="act" value="login" />
|
||||||
<input type="password" name="cppwd" />
|
<input type="password" name="cppwd" placeholder=" password" />
|
||||||
|
<input type="hidden" name="uhash" id="uhash" value="x" />
|
||||||
<input type="submit" value="Login" />
|
<input type="submit" value="Login" />
|
||||||
{% if ahttps %}
|
{% if ahttps %}
|
||||||
<a id="w" href="{{ ahttps }}">switch to https</a>
|
<a id="w" href="{{ ahttps }}">switch to https</a>
|
||||||
@@ -110,10 +113,12 @@ var SR = {{ r|tojson }},
|
|||||||
lang="{{ lang }}",
|
lang="{{ lang }}",
|
||||||
dfavico="{{ favico }}";
|
dfavico="{{ favico }}";
|
||||||
|
|
||||||
document.documentElement.className=localStorage.cpp_thm||"{{ this.args.theme }}";
|
var STG = window.localStorage;
|
||||||
|
document.documentElement.className = (STG && STG.cpp_thm) || "{{ this.args.theme }}";
|
||||||
|
|
||||||
</script>
|
</script>
|
||||||
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
||||||
<script src="{{ r }}/.cpr/splash.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/splash.js?_={{ ts }}"></script>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ var Ls = {
|
|||||||
"d1": "tilstand",
|
"d1": "tilstand",
|
||||||
"d2": "vis tilstanden til alle tråder",
|
"d2": "vis tilstanden til alle tråder",
|
||||||
"e1": "last innst.",
|
"e1": "last innst.",
|
||||||
"e2": "leser inn konfigurasjonsfiler på nytt$N(kontoer, volumer, volumbrytere)$Nog kartlegger alle e2ds-volumer",
|
"e2": "leser inn konfigurasjonsfiler på nytt$N(kontoer, volumer, volumbrytere)$Nog kartlegger alle e2ds-volumer$N$Nmerk: endringer i globale parametere$Nkrever en full restart for å ta gjenge",
|
||||||
"f1": "du kan betrakte:",
|
"f1": "du kan betrakte:",
|
||||||
"g1": "du kan laste opp til:",
|
"g1": "du kan laste opp til:",
|
||||||
"cc1": "klient-konfigurasjon",
|
"cc1": "klient-konfigurasjon",
|
||||||
@@ -30,12 +30,18 @@ var Ls = {
|
|||||||
},
|
},
|
||||||
"eng": {
|
"eng": {
|
||||||
"d2": "shows the state of all active threads",
|
"d2": "shows the state of all active threads",
|
||||||
"e2": "reload config files (accounts/volumes/volflags),$Nand rescan all e2ds volumes",
|
"e2": "reload config files (accounts/volumes/volflags),$Nand rescan all e2ds volumes$N$Nnote: any changes to global settings$Nrequire a full restart to take effect",
|
||||||
"u2": "time since the last server write$N( upload / rename / ... )$N$N17d = 17 days$N1h23 = 1 hour 23 minutes$N4m56 = 4 minutes 56 seconds",
|
"u2": "time since the last server write$N( upload / rename / ... )$N$N17d = 17 days$N1h23 = 1 hour 23 minutes$N4m56 = 4 minutes 56 seconds",
|
||||||
"v2": "use this server as a local HDD$N$NWARNING: this will show your password!",
|
"v2": "use this server as a local HDD$N$NWARNING: this will show your password!",
|
||||||
}
|
}
|
||||||
},
|
};
|
||||||
d = Ls[sread("cpp_lang", ["eng", "nor"]) || lang] || Ls.eng || Ls.nor;
|
|
||||||
|
var LANGS = ["eng", "nor"];
|
||||||
|
|
||||||
|
if (window.langmod)
|
||||||
|
langmod();
|
||||||
|
|
||||||
|
var d = Ls[sread("cpp_lang", LANGS) || lang] || Ls.eng || Ls.nor;
|
||||||
|
|
||||||
for (var k in (d || {})) {
|
for (var k in (d || {})) {
|
||||||
var f = k.slice(-1),
|
var f = k.slice(-1),
|
||||||
@@ -49,6 +55,15 @@ for (var k in (d || {})) {
|
|||||||
o[a].setAttribute("tt", d[k]);
|
o[a].setAttribute("tt", d[k]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (is_idp) {
|
||||||
|
var z = ['#l+div', '#l', '#c'];
|
||||||
|
for (var a = 0; a < z.length; a++)
|
||||||
|
QS(z[a]).style.display = 'none';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (ex) { }
|
||||||
|
|
||||||
tt.init();
|
tt.init();
|
||||||
var o = QS('input[name="cppwd"]');
|
var o = QS('input[name="cppwd"]');
|
||||||
if (!ebi('c') && o.offsetTop + o.offsetHeight < window.innerHeight)
|
if (!ebi('c') && o.offsetTop + o.offsetHeight < window.innerHeight)
|
||||||
@@ -57,3 +72,5 @@ if (!ebi('c') && o.offsetTop + o.offsetHeight < window.innerHeight)
|
|||||||
o = ebi('u');
|
o = ebi('u');
|
||||||
if (o && /[0-9]+$/.exec(o.innerHTML))
|
if (o && /[0-9]+$/.exec(o.innerHTML))
|
||||||
o.innerHTML = shumantime(o.innerHTML);
|
o.innerHTML = shumantime(o.innerHTML);
|
||||||
|
|
||||||
|
ebi('uhash').value = '' + location.hash;
|
||||||
|
|||||||
@@ -6,10 +6,11 @@
|
|||||||
<title>{{ s_doctitle }}</title>
|
<title>{{ s_doctitle }}</title>
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||||
<meta name="theme-color" content="#333">
|
<meta name="theme-color" content="#{{ tcolor }}">
|
||||||
{{ html_head }}
|
|
||||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/splash.css?_={{ ts }}">
|
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/splash.css?_={{ ts }}">
|
||||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||||
|
<style>ul{padding-left:1.3em}li{margin:.4em 0}</style>
|
||||||
|
{{ html_head }}
|
||||||
</head>
|
</head>
|
||||||
|
|
||||||
<body>
|
<body>
|
||||||
@@ -48,9 +49,13 @@
|
|||||||
rclone config create {{ aname }}-dav webdav url=http{{ s }}://{{ rip }}{{ hport }} vendor=owncloud pacer_min_sleep=0.01ms{% if accs %} user=k pass=<b>{{ pw }}</b>{% endif %}
|
rclone config create {{ aname }}-dav webdav url=http{{ s }}://{{ rip }}{{ hport }} vendor=owncloud pacer_min_sleep=0.01ms{% if accs %} user=k pass=<b>{{ pw }}</b>{% endif %}
|
||||||
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-dav:{{ rvp }} <b>W:</b>
|
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-dav:{{ rvp }} <b>W:</b>
|
||||||
</pre>
|
</pre>
|
||||||
{% if s %}
|
<ul>
|
||||||
<p><em>note: if you are on LAN (or just dont have valid certificates), add <code>--no-check-certificate</code> to the mount command</em><br />---</p>
|
{% if s %}
|
||||||
{% endif %}
|
<li>running <code>rclone mount</code> on LAN (or just dont have valid certificates)? add <code>--no-check-certificate</code></li>
|
||||||
|
{% endif %}
|
||||||
|
<li>running <code>rclone mount</code> as root? add <code>--allow-other</code></li>
|
||||||
|
<li>old version of rclone? replace all <code>=</code> with <code> </code> (space)</li>
|
||||||
|
</ul>
|
||||||
|
|
||||||
<p>if you want to use the native WebDAV client in windows instead (slow and buggy), first run <a href="{{ r }}/.cpr/a/webdav-cfg.bat">webdav-cfg.bat</a> to remove the 47 MiB filesize limit (also fixes latency and password login), then connect:</p>
|
<p>if you want to use the native WebDAV client in windows instead (slow and buggy), first run <a href="{{ r }}/.cpr/a/webdav-cfg.bat">webdav-cfg.bat</a> to remove the 47 MiB filesize limit (also fixes latency and password login), then connect:</p>
|
||||||
<pre>
|
<pre>
|
||||||
@@ -59,24 +64,28 @@
|
|||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="os lin">
|
<div class="os lin">
|
||||||
<pre>
|
<p>rclone (v1.63 or later) is recommended:</p>
|
||||||
yum install davfs2
|
|
||||||
{% if accs %}printf '%s\n' <b>{{ pw }}</b> k | {% endif %}mount -t davfs -ouid=1000 http{{ s }}://{{ ep }}/{{ rvp }} <b>mp</b>
|
|
||||||
</pre>
|
|
||||||
<p>make it automount on boot:</p>
|
|
||||||
<pre>
|
|
||||||
printf '%s\n' "http{{ s }}://{{ ep }}/{{ rvp }} <b>{{ pw }}</b> k" >> /etc/davfs2/secrets
|
|
||||||
printf '%s\n' "http{{ s }}://{{ ep }}/{{ rvp }} <b>mp</b> davfs rw,user,uid=1000,noauto 0 0" >> /etc/fstab
|
|
||||||
</pre>
|
|
||||||
<p>or you can use rclone instead, which is much slower but doesn't require root (plus it keeps lastmodified on upload):</p>
|
|
||||||
<pre>
|
<pre>
|
||||||
rclone config create {{ aname }}-dav webdav url=http{{ s }}://{{ rip }}{{ hport }} vendor=owncloud pacer_min_sleep=0.01ms{% if accs %} user=k pass=<b>{{ pw }}</b>{% endif %}
|
rclone config create {{ aname }}-dav webdav url=http{{ s }}://{{ rip }}{{ hport }} vendor=owncloud pacer_min_sleep=0.01ms{% if accs %} user=k pass=<b>{{ pw }}</b>{% endif %}
|
||||||
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-dav:{{ rvp }} <b>mp</b>
|
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-dav:{{ rvp }} <b>mp</b>
|
||||||
</pre>
|
</pre>
|
||||||
{% if s %}
|
<ul>
|
||||||
<p><em>note: if you are on LAN (or just dont have valid certificates), add <code>--no-check-certificate</code> to the mount command</em><br />---</p>
|
{% if s %}
|
||||||
{% endif %}
|
<li>running <code>rclone mount</code> on LAN (or just dont have valid certificates)? add <code>--no-check-certificate</code></li>
|
||||||
|
{% endif %}
|
||||||
|
<li>running <code>rclone mount</code> as root? add <code>--allow-other</code></li>
|
||||||
|
<li>old version of rclone? replace all <code>=</code> with <code> </code> (space)</li>
|
||||||
|
</ul>
|
||||||
|
<p>alternatively use davfs2 (requires root, is slower, forgets lastmodified-timestamp on upload):</p>
|
||||||
|
<pre>
|
||||||
|
yum install davfs2
|
||||||
|
{% if accs %}printf '%s\n' <b>{{ pw }}</b> k | {% endif %}mount -t davfs -ouid=1000 http{{ s }}://{{ ep }}/{{ rvp }} <b>mp</b>
|
||||||
|
</pre>
|
||||||
|
<p>make davfs2 automount on boot:</p>
|
||||||
|
<pre>
|
||||||
|
printf '%s\n' "http{{ s }}://{{ ep }}/{{ rvp }} <b>{{ pw }}</b> k" >> /etc/davfs2/secrets
|
||||||
|
printf '%s\n' "http{{ s }}://{{ ep }}/{{ rvp }} <b>mp</b> davfs rw,user,uid=1000,noauto 0 0" >> /etc/fstab
|
||||||
|
</pre>
|
||||||
<p>or the emergency alternative (gnome/gui-only):</p>
|
<p>or the emergency alternative (gnome/gui-only):</p>
|
||||||
<!-- gnome-bug: ignores vp -->
|
<!-- gnome-bug: ignores vp -->
|
||||||
<pre>
|
<pre>
|
||||||
@@ -123,8 +132,14 @@
|
|||||||
rclone config create {{ aname }}-ftps ftp host={{ rip }} port={{ args.ftps }} pass=k user={% if accs %}<b>{{ pw }}</b>{% else %}anonymous{% endif %} tls=false explicit_tls=true
|
rclone config create {{ aname }}-ftps ftp host={{ rip }} port={{ args.ftps }} pass=k user={% if accs %}<b>{{ pw }}</b>{% else %}anonymous{% endif %} tls=false explicit_tls=true
|
||||||
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-ftps:{{ rvp }} <b>W:</b>
|
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-ftps:{{ rvp }} <b>W:</b>
|
||||||
</pre>
|
</pre>
|
||||||
<p><em>note: if you are on LAN (or just dont have valid certificates), add <code>no_check_certificate=true</code> to the config command</em><br />---</p>
|
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
<ul>
|
||||||
|
{% if args.ftps %}
|
||||||
|
<li>running on LAN (or just dont have valid certificates)? add <code>no_check_certificate=true</code> to the config command</li>
|
||||||
|
{% endif %}
|
||||||
|
<li>running <code>rclone mount</code> as root? add <code>--allow-other</code></li>
|
||||||
|
<li>old version of rclone? replace all <code>=</code> with <code> </code> (space)</li>
|
||||||
|
</ul>
|
||||||
<p>if you want to use the native FTP client in windows instead (please dont), press <code>win+R</code> and run this command:</p>
|
<p>if you want to use the native FTP client in windows instead (please dont), press <code>win+R</code> and run this command:</p>
|
||||||
<pre>
|
<pre>
|
||||||
explorer {{ "ftp" if args.ftp else "ftps" }}://{% if accs %}<b>{{ pw }}</b>:k@{% endif %}{{ host }}:{{ args.ftp or args.ftps }}/{{ rvp }}
|
explorer {{ "ftp" if args.ftp else "ftps" }}://{% if accs %}<b>{{ pw }}</b>:k@{% endif %}{{ host }}:{{ args.ftp or args.ftps }}/{{ rvp }}
|
||||||
@@ -145,8 +160,14 @@
|
|||||||
rclone config create {{ aname }}-ftps ftp host={{ rip }} port={{ args.ftps }} pass=k user={% if accs %}<b>{{ pw }}</b>{% else %}anonymous{% endif %} tls=false explicit_tls=true
|
rclone config create {{ aname }}-ftps ftp host={{ rip }} port={{ args.ftps }} pass=k user={% if accs %}<b>{{ pw }}</b>{% else %}anonymous{% endif %} tls=false explicit_tls=true
|
||||||
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-ftps:{{ rvp }} <b>mp</b>
|
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-ftps:{{ rvp }} <b>mp</b>
|
||||||
</pre>
|
</pre>
|
||||||
<p><em>note: if you are on LAN (or just dont have valid certificates), add <code>no_check_certificate=true</code> to the config command</em><br />---</p>
|
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
<ul>
|
||||||
|
{% if args.ftps %}
|
||||||
|
<li>running on LAN (or just dont have valid certificates)? add <code>no_check_certificate=true</code> to the config command</li>
|
||||||
|
{% endif %}
|
||||||
|
<li>running <code>rclone mount</code> as root? add <code>--allow-other</code></li>
|
||||||
|
<li>old version of rclone? replace all <code>=</code> with <code> </code> (space)</li>
|
||||||
|
</ul>
|
||||||
<p>emergency alternative (gnome/gui-only):</p>
|
<p>emergency alternative (gnome/gui-only):</p>
|
||||||
<!-- gnome-bug: ignores vp -->
|
<!-- gnome-bug: ignores vp -->
|
||||||
<pre>
|
<pre>
|
||||||
@@ -178,7 +199,7 @@
|
|||||||
partyfuse.py{% if accs %} -a <b>{{ pw }}</b>{% endif %} http{{ s }}://{{ ep }}/{{ rvp }} <b><span class="os win">W:</span><span class="os lin mac">mp</span></b>
|
partyfuse.py{% if accs %} -a <b>{{ pw }}</b>{% endif %} http{{ s }}://{{ ep }}/{{ rvp }} <b><span class="os win">W:</span><span class="os lin mac">mp</span></b>
|
||||||
</pre>
|
</pre>
|
||||||
{% if s %}
|
{% if s %}
|
||||||
<p><em>note: if you are on LAN (or just dont have valid certificates), add <code>-td</code></em></p>
|
<ul><li>if you are on LAN (or just dont have valid certificates), add <code>-td</code></li></ul>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
<p>
|
<p>
|
||||||
you can use <a href="{{ r }}/.cpr/a/u2c.py">u2c.py</a> to upload (sometimes faster than web-browsers)
|
you can use <a href="{{ r }}/.cpr/a/u2c.py">u2c.py</a> to upload (sometimes faster than web-browsers)
|
||||||
@@ -218,10 +239,12 @@ var SR = {{ r|tojson }},
|
|||||||
lang="{{ lang }}",
|
lang="{{ lang }}",
|
||||||
dfavico="{{ favico }}";
|
dfavico="{{ favico }}";
|
||||||
|
|
||||||
document.documentElement.className=localStorage.cpp_thm||"{{ args.theme }}";
|
var STG = window.localStorage;
|
||||||
|
document.documentElement.className = (STG && STG.cpp_thm) || "{{ args.theme }}";
|
||||||
|
|
||||||
</script>
|
</script>
|
||||||
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
||||||
<script src="{{ r }}/.cpr/svcs.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/svcs.js?_={{ ts }}"></script>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
|
||||||
|
|||||||
@@ -1,3 +1,22 @@
|
|||||||
|
:root {
|
||||||
|
--font-main: sans-serif;
|
||||||
|
--font-serif: serif;
|
||||||
|
--font-mono: 'scp';
|
||||||
|
|
||||||
|
--fg: #ccc;
|
||||||
|
--fg-max: #fff;
|
||||||
|
--bg-u2: #2b2b2b;
|
||||||
|
--bg-u5: #444;
|
||||||
|
}
|
||||||
|
html.y {
|
||||||
|
--fg: #222;
|
||||||
|
--fg-max: #000;
|
||||||
|
--bg-u2: #f7f7f7;
|
||||||
|
--bg-u5: #ccc;
|
||||||
|
}
|
||||||
|
html.bz {
|
||||||
|
--bg-u2: #202231;
|
||||||
|
}
|
||||||
@font-face {
|
@font-face {
|
||||||
font-family: 'scp';
|
font-family: 'scp';
|
||||||
font-display: swap;
|
font-display: swap;
|
||||||
@@ -14,7 +33,9 @@ html {
|
|||||||
max-width: min(34em, 90%);
|
max-width: min(34em, 90%);
|
||||||
max-width: min(34em, calc(100% - 7em));
|
max-width: min(34em, calc(100% - 7em));
|
||||||
color: #ddd;
|
color: #ddd;
|
||||||
|
color: var(--fg);
|
||||||
background: #333;
|
background: #333;
|
||||||
|
background: var(--bg-u2);
|
||||||
border: 0 solid #777;
|
border: 0 solid #777;
|
||||||
box-shadow: 0 .2em .5em #111;
|
box-shadow: 0 .2em .5em #111;
|
||||||
border-radius: .4em;
|
border-radius: .4em;
|
||||||
@@ -88,6 +109,9 @@ html {
|
|||||||
#toast pre {
|
#toast pre {
|
||||||
margin: 0;
|
margin: 0;
|
||||||
}
|
}
|
||||||
|
#toast.hide {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
#toast.vis {
|
#toast.vis {
|
||||||
right: 1.3em;
|
right: 1.3em;
|
||||||
transform: inherit;
|
transform: inherit;
|
||||||
@@ -127,6 +151,10 @@ html {
|
|||||||
#toast.err #toastc {
|
#toast.err #toastc {
|
||||||
background: #d06;
|
background: #d06;
|
||||||
}
|
}
|
||||||
|
#toast code {
|
||||||
|
padding: 0 .2em;
|
||||||
|
background: rgba(0,0,0,0.2);
|
||||||
|
}
|
||||||
#tth {
|
#tth {
|
||||||
color: #fff;
|
color: #fff;
|
||||||
background: #111;
|
background: #111;
|
||||||
@@ -156,12 +184,14 @@ html {
|
|||||||
padding: 1.5em 2em;
|
padding: 1.5em 2em;
|
||||||
border-width: .5em 0;
|
border-width: .5em 0;
|
||||||
}
|
}
|
||||||
|
.logue code,
|
||||||
#modalc code,
|
#modalc code,
|
||||||
#tt code {
|
#tt code {
|
||||||
color: #eee;
|
color: #eee;
|
||||||
|
color: var(--fg-max);
|
||||||
background: #444;
|
background: #444;
|
||||||
|
background: var(--bg-u5);
|
||||||
padding: .1em .3em;
|
padding: .1em .3em;
|
||||||
border-top: 1px solid #777;
|
|
||||||
border-radius: .3em;
|
border-radius: .3em;
|
||||||
line-height: 1.7em;
|
line-height: 1.7em;
|
||||||
}
|
}
|
||||||
@@ -169,22 +199,15 @@ html {
|
|||||||
color: #f6a;
|
color: #f6a;
|
||||||
}
|
}
|
||||||
html.y #tt {
|
html.y #tt {
|
||||||
color: #333;
|
|
||||||
background: #fff;
|
|
||||||
border-color: #888 #000 #777 #000;
|
border-color: #888 #000 #777 #000;
|
||||||
}
|
}
|
||||||
html.bz #tt {
|
html.bz #tt {
|
||||||
background: #202231;
|
|
||||||
border-color: #3b3f58;
|
border-color: #3b3f58;
|
||||||
}
|
}
|
||||||
html.y #tt,
|
html.y #tt,
|
||||||
html.y #toast {
|
html.y #toast {
|
||||||
box-shadow: 0 .3em 1em rgba(0,0,0,0.4);
|
box-shadow: 0 .3em 1em rgba(0,0,0,0.4);
|
||||||
}
|
}
|
||||||
html.y #tt code {
|
|
||||||
background: #060;
|
|
||||||
color: #fff;
|
|
||||||
}
|
|
||||||
#modalc code {
|
#modalc code {
|
||||||
color: #060;
|
color: #060;
|
||||||
background: transparent;
|
background: transparent;
|
||||||
@@ -322,6 +345,9 @@ html.y .btn:focus {
|
|||||||
box-shadow: 0 .1em .2em #037 inset;
|
box-shadow: 0 .1em .2em #037 inset;
|
||||||
outline: #037 solid .1em;
|
outline: #037 solid .1em;
|
||||||
}
|
}
|
||||||
|
input[type="submit"] {
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
input[type="text"]:focus,
|
input[type="text"]:focus,
|
||||||
input:not([type]):focus,
|
input:not([type]):focus,
|
||||||
textarea:focus {
|
textarea:focus {
|
||||||
@@ -357,6 +383,7 @@ html.y textarea:focus {
|
|||||||
.mdo code,
|
.mdo code,
|
||||||
.mdo tt {
|
.mdo tt {
|
||||||
font-family: 'scp', monospace, monospace;
|
font-family: 'scp', monospace, monospace;
|
||||||
|
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||||
white-space: pre-wrap;
|
white-space: pre-wrap;
|
||||||
word-break: break-all;
|
word-break: break-all;
|
||||||
}
|
}
|
||||||
@@ -426,6 +453,7 @@ html.y textarea:focus {
|
|||||||
}
|
}
|
||||||
.mdo blockquote {
|
.mdo blockquote {
|
||||||
font-family: serif;
|
font-family: serif;
|
||||||
|
font-family: var(--font-serif), serif;
|
||||||
background: #f7f7f7;
|
background: #f7f7f7;
|
||||||
border: .07em dashed #ccc;
|
border: .07em dashed #ccc;
|
||||||
padding: 0 2em;
|
padding: 0 2em;
|
||||||
@@ -559,3 +587,11 @@ hr {
|
|||||||
border: .07em dashed #444;
|
border: .07em dashed #444;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@media (prefers-reduced-motion) {
|
||||||
|
#toast,
|
||||||
|
#toast a#toastc,
|
||||||
|
#tt {
|
||||||
|
transition: none;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -17,7 +17,7 @@ function goto_up2k() {
|
|||||||
var up2k = null,
|
var up2k = null,
|
||||||
up2k_hooks = [],
|
up2k_hooks = [],
|
||||||
hws = [],
|
hws = [],
|
||||||
sha_js = window.WebAssembly ? 'hw' : 'ac', // ff53,c57,sa11
|
sha_js = WebAssembly ? 'hw' : 'ac', // ff53,c57,sa11
|
||||||
m = 'will use ' + sha_js + ' instead of native sha512 due to';
|
m = 'will use ' + sha_js + ' instead of native sha512 due to';
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@@ -431,7 +431,7 @@ function U2pvis(act, btns, uc, st) {
|
|||||||
if (sread('potato') === null) {
|
if (sread('potato') === null) {
|
||||||
btn.click();
|
btn.click();
|
||||||
toast.inf(30, L.u_gotpot);
|
toast.inf(30, L.u_gotpot);
|
||||||
localStorage.removeItem('potato');
|
sdrop('potato');
|
||||||
}
|
}
|
||||||
|
|
||||||
u2f.appendChild(ode);
|
u2f.appendChild(ode);
|
||||||
@@ -658,7 +658,9 @@ function Donut(uc, st) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function pos() {
|
function pos() {
|
||||||
return uc.fsearch ? Math.max(st.bytes.hashed, st.bytes.finished) : st.bytes.finished;
|
return uc.fsearch ?
|
||||||
|
Math.max(st.bytes.hashed, st.bytes.finished) :
|
||||||
|
st.bytes.inflight + st.bytes.finished;
|
||||||
}
|
}
|
||||||
|
|
||||||
r.on = function (ya) {
|
r.on = function (ya) {
|
||||||
@@ -717,7 +719,7 @@ function Donut(uc, st) {
|
|||||||
sfx();
|
sfx();
|
||||||
|
|
||||||
// firefox may forget that filedrops are user-gestures so it can skip this:
|
// firefox may forget that filedrops are user-gestures so it can skip this:
|
||||||
if (uc.upnag && window.Notification && Notification.permission == 'granted')
|
if (uc.upnag && Notification && Notification.permission == 'granted')
|
||||||
new Notification(uc.nagtxt);
|
new Notification(uc.nagtxt);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -779,8 +781,8 @@ function up2k_init(subtle) {
|
|||||||
};
|
};
|
||||||
|
|
||||||
setTimeout(function () {
|
setTimeout(function () {
|
||||||
if (window.WebAssembly && !hws.length)
|
if (WebAssembly && !hws.length)
|
||||||
fetch(SR + '/.cpr/w.hash.js' + CB);
|
fetch(SR + '/.cpr/w.hash.js?_=' + TS);
|
||||||
}, 1000);
|
}, 1000);
|
||||||
|
|
||||||
function showmodal(msg) {
|
function showmodal(msg) {
|
||||||
@@ -852,7 +854,8 @@ function up2k_init(subtle) {
|
|||||||
|
|
||||||
setmsg(suggest_up2k, 'msg');
|
setmsg(suggest_up2k, 'msg');
|
||||||
|
|
||||||
var parallel_uploads = icfg_get('nthread'),
|
var parallel_uploads = ebi('nthread').value = icfg_get('nthread', u2j),
|
||||||
|
stitch_tgt = ebi('u2szg').value = icfg_get('u2sz', u2sz.split(',')[1]),
|
||||||
uc = {},
|
uc = {},
|
||||||
fdom_ctr = 0,
|
fdom_ctr = 0,
|
||||||
biggest_file = 0;
|
biggest_file = 0;
|
||||||
@@ -861,13 +864,15 @@ function up2k_init(subtle) {
|
|||||||
bcfg_bind(uc, 'multitask', 'multitask', true, null, false);
|
bcfg_bind(uc, 'multitask', 'multitask', true, null, false);
|
||||||
bcfg_bind(uc, 'potato', 'potato', false, set_potato, false);
|
bcfg_bind(uc, 'potato', 'potato', false, set_potato, false);
|
||||||
bcfg_bind(uc, 'ask_up', 'ask_up', true, null, false);
|
bcfg_bind(uc, 'ask_up', 'ask_up', true, null, false);
|
||||||
|
bcfg_bind(uc, 'umod', 'umod', false, null, false);
|
||||||
|
bcfg_bind(uc, 'u2ts', 'u2ts', !u2ts.endsWith('u'), set_u2ts, false);
|
||||||
bcfg_bind(uc, 'fsearch', 'fsearch', false, set_fsearch, false);
|
bcfg_bind(uc, 'fsearch', 'fsearch', false, set_fsearch, false);
|
||||||
|
|
||||||
bcfg_bind(uc, 'flag_en', 'flag_en', false, apply_flag_cfg);
|
bcfg_bind(uc, 'flag_en', 'flag_en', false, apply_flag_cfg);
|
||||||
bcfg_bind(uc, 'turbo', 'u2turbo', turbolvl > 1, draw_turbo);
|
bcfg_bind(uc, 'turbo', 'u2turbo', turbolvl > 1, draw_turbo);
|
||||||
bcfg_bind(uc, 'datechk', 'u2tdate', turbolvl < 3, null);
|
bcfg_bind(uc, 'datechk', 'u2tdate', turbolvl < 3, null);
|
||||||
bcfg_bind(uc, 'az', 'u2sort', u2sort.indexOf('n') + 1, set_u2sort);
|
bcfg_bind(uc, 'az', 'u2sort', u2sort.indexOf('n') + 1, set_u2sort);
|
||||||
bcfg_bind(uc, 'hashw', 'hashw', !!window.WebAssembly && (!subtle || !CHROME || MOBILE || VCHROME >= 107), set_hashw);
|
bcfg_bind(uc, 'hashw', 'hashw', !!WebAssembly && (!subtle || !CHROME || MOBILE || VCHROME >= 107), set_hashw);
|
||||||
bcfg_bind(uc, 'upnag', 'upnag', false, set_upnag);
|
bcfg_bind(uc, 'upnag', 'upnag', false, set_upnag);
|
||||||
bcfg_bind(uc, 'upsfx', 'upsfx', false, set_upsfx);
|
bcfg_bind(uc, 'upsfx', 'upsfx', false, set_upsfx);
|
||||||
|
|
||||||
@@ -893,6 +898,7 @@ function up2k_init(subtle) {
|
|||||||
"bytes": {
|
"bytes": {
|
||||||
"total": 0,
|
"total": 0,
|
||||||
"hashed": 0,
|
"hashed": 0,
|
||||||
|
"inflight": 0,
|
||||||
"uploaded": 0,
|
"uploaded": 0,
|
||||||
"finished": 0
|
"finished": 0
|
||||||
},
|
},
|
||||||
@@ -1042,7 +1048,7 @@ function up2k_init(subtle) {
|
|||||||
clmod(ebi(v), 'hl', 1);
|
clmod(ebi(v), 'hl', 1);
|
||||||
}
|
}
|
||||||
function offdrag(e) {
|
function offdrag(e) {
|
||||||
ev(e);
|
noope(e);
|
||||||
|
|
||||||
var v = this.getAttribute('v');
|
var v = this.getAttribute('v');
|
||||||
if (v)
|
if (v)
|
||||||
@@ -1331,7 +1337,8 @@ function up2k_init(subtle) {
|
|||||||
return modal.confirm(msg.join('') + '</ul>', function () {
|
return modal.confirm(msg.join('') + '</ul>', function () {
|
||||||
start_actx();
|
start_actx();
|
||||||
up_them(good_files);
|
up_them(good_files);
|
||||||
toast.inf(15, L.u_unpt, L.u_unpt);
|
if (have_up2k_idx)
|
||||||
|
toast.inf(15, L.u_unpt, L.u_unpt);
|
||||||
}, null);
|
}, null);
|
||||||
|
|
||||||
up_them(good_files);
|
up_them(good_files);
|
||||||
@@ -1339,12 +1346,13 @@ function up2k_init(subtle) {
|
|||||||
|
|
||||||
function up_them(good_files) {
|
function up_them(good_files) {
|
||||||
start_actx();
|
start_actx();
|
||||||
|
draw_turbo();
|
||||||
var evpath = get_evpath(),
|
var evpath = get_evpath(),
|
||||||
draw_each = good_files.length < 50;
|
draw_each = good_files.length < 50;
|
||||||
|
|
||||||
if (window.WebAssembly && !hws.length) {
|
if (WebAssembly && !hws.length) {
|
||||||
for (var a = 0; a < Math.min(navigator.hardwareConcurrency || 4, 16); a++)
|
for (var a = 0; a < Math.min(navigator.hardwareConcurrency || 4, 16); a++)
|
||||||
hws.push(new Worker(SR + '/.cpr/w.hash.js' + CB));
|
hws.push(new Worker(SR + '/.cpr/w.hash.js?_=' + TS));
|
||||||
|
|
||||||
console.log(hws.length + " hashers");
|
console.log(hws.length + " hashers");
|
||||||
}
|
}
|
||||||
@@ -1361,7 +1369,7 @@ function up2k_init(subtle) {
|
|||||||
name = good_files[a][1],
|
name = good_files[a][1],
|
||||||
fdir = evpath,
|
fdir = evpath,
|
||||||
now = Date.now(),
|
now = Date.now(),
|
||||||
lmod = fobj.lastModified || now,
|
lmod = uc.u2ts ? (fobj.lastModified || now) : 0,
|
||||||
ofs = name.lastIndexOf('/') + 1;
|
ofs = name.lastIndexOf('/') + 1;
|
||||||
|
|
||||||
if (ofs) {
|
if (ofs) {
|
||||||
@@ -1389,6 +1397,8 @@ function up2k_init(subtle) {
|
|||||||
entry.rand = true;
|
entry.rand = true;
|
||||||
entry.name = 'a\n' + entry.name;
|
entry.name = 'a\n' + entry.name;
|
||||||
}
|
}
|
||||||
|
else if (uc.umod)
|
||||||
|
entry.umod = true;
|
||||||
|
|
||||||
if (biggest_file < entry.size)
|
if (biggest_file < entry.size)
|
||||||
biggest_file = entry.size;
|
biggest_file = entry.size;
|
||||||
@@ -1405,7 +1415,7 @@ function up2k_init(subtle) {
|
|||||||
|
|
||||||
pvis.addfile([
|
pvis.addfile([
|
||||||
uc.fsearch ? esc(entry.name) : linksplit(
|
uc.fsearch ? esc(entry.name) : linksplit(
|
||||||
entry.purl + uricom_enc(entry.name)).join(' '),
|
entry.purl + uricom_enc(entry.name)).join(' / '),
|
||||||
'📐 ' + L.u_hashing,
|
'📐 ' + L.u_hashing,
|
||||||
''
|
''
|
||||||
], entry.size, draw_each);
|
], entry.size, draw_each);
|
||||||
@@ -1537,17 +1547,21 @@ function up2k_init(subtle) {
|
|||||||
if (uc.fsearch)
|
if (uc.fsearch)
|
||||||
t.push(['u2etat', st.bytes.hashed, st.bytes.hashed, st.time.hashing]);
|
t.push(['u2etat', st.bytes.hashed, st.bytes.hashed, st.time.hashing]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var b_up = st.bytes.inflight + st.bytes.uploaded,
|
||||||
|
b_fin = st.bytes.inflight + st.bytes.finished;
|
||||||
|
|
||||||
if (nsend) {
|
if (nsend) {
|
||||||
st.time.uploading += td;
|
st.time.uploading += td;
|
||||||
t.push(['u2etau', st.bytes.uploaded, st.bytes.finished, st.time.uploading]);
|
t.push(['u2etau', b_up, b_fin, st.time.uploading]);
|
||||||
}
|
}
|
||||||
if ((nhash || nsend) && !uc.fsearch) {
|
if ((nhash || nsend) && !uc.fsearch) {
|
||||||
if (!st.bytes.finished) {
|
if (!b_fin) {
|
||||||
ebi('u2etat').innerHTML = L.u_etaprep;
|
ebi('u2etat').innerHTML = L.u_etaprep;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
st.time.busy += td;
|
st.time.busy += td;
|
||||||
t.push(['u2etat', st.bytes.finished, st.bytes.finished, st.time.busy]);
|
t.push(['u2etat', b_fin, b_fin, st.time.busy]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for (var a = 0; a < t.length; a++) {
|
for (var a = 0; a < t.length; a++) {
|
||||||
@@ -1711,8 +1725,6 @@ function up2k_init(subtle) {
|
|||||||
ebi('u2etas').style.textAlign = 'left';
|
ebi('u2etas').style.textAlign = 'left';
|
||||||
}
|
}
|
||||||
etafun();
|
etafun();
|
||||||
if (pvis.act == 'bz')
|
|
||||||
pvis.changecard('bz');
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (flag) {
|
if (flag) {
|
||||||
@@ -1727,6 +1739,11 @@ function up2k_init(subtle) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (st.bytes.inflight && (st.bytes.inflight < 0 || !st.busy.upload.length)) {
|
||||||
|
console.log('insane inflight ' + st.bytes.inflight);
|
||||||
|
st.bytes.inflight = 0;
|
||||||
|
}
|
||||||
|
|
||||||
var mou_ikkai = false;
|
var mou_ikkai = false;
|
||||||
|
|
||||||
if (st.busy.handshake.length &&
|
if (st.busy.handshake.length &&
|
||||||
@@ -1848,6 +1865,9 @@ function up2k_init(subtle) {
|
|||||||
timer.rm(donut.do);
|
timer.rm(donut.do);
|
||||||
ebi('u2tabw').style.minHeight = '0px';
|
ebi('u2tabw').style.minHeight = '0px';
|
||||||
utw_minh = 0;
|
utw_minh = 0;
|
||||||
|
|
||||||
|
if (pvis.act == 'bz')
|
||||||
|
pvis.changecard('bz');
|
||||||
}
|
}
|
||||||
|
|
||||||
function chill(t) {
|
function chill(t) {
|
||||||
@@ -2245,6 +2265,7 @@ function up2k_init(subtle) {
|
|||||||
console.log('handshake onerror, retrying', t.name, t);
|
console.log('handshake onerror, retrying', t.name, t);
|
||||||
apop(st.busy.handshake, t);
|
apop(st.busy.handshake, t);
|
||||||
st.todo.handshake.unshift(t);
|
st.todo.handshake.unshift(t);
|
||||||
|
t.cooldown = Date.now() + 5000 + Math.floor(Math.random() * 3000);
|
||||||
t.keepalive = keepalive;
|
t.keepalive = keepalive;
|
||||||
};
|
};
|
||||||
var orz = function (e) {
|
var orz = function (e) {
|
||||||
@@ -2252,16 +2273,26 @@ function up2k_init(subtle) {
|
|||||||
return console.log('zombie handshake onload', t.name, t);
|
return console.log('zombie handshake onload', t.name, t);
|
||||||
|
|
||||||
if (xhr.status == 200) {
|
if (xhr.status == 200) {
|
||||||
|
try {
|
||||||
|
var response = JSON.parse(xhr.responseText);
|
||||||
|
}
|
||||||
|
catch (ex) {
|
||||||
|
apop(st.busy.handshake, t);
|
||||||
|
st.todo.handshake.unshift(t);
|
||||||
|
t.cooldown = Date.now() + 5000 + Math.floor(Math.random() * 3000);
|
||||||
|
return toast.err(0, 'Handshake error; will retry...\n\n' + L.badreply + ':\n\n' + unpre(xhr.responseText));
|
||||||
|
}
|
||||||
|
|
||||||
t.t_handshake = Date.now();
|
t.t_handshake = Date.now();
|
||||||
if (keepalive) {
|
if (keepalive) {
|
||||||
apop(st.busy.handshake, t);
|
apop(st.busy.handshake, t);
|
||||||
|
tasker();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (toast.tag === t)
|
if (toast.tag === t)
|
||||||
toast.ok(5, L.u_fixed);
|
toast.ok(5, L.u_fixed);
|
||||||
|
|
||||||
var response = JSON.parse(xhr.responseText);
|
|
||||||
if (!response.name) {
|
if (!response.name) {
|
||||||
var msg = '',
|
var msg = '',
|
||||||
smsg = '';
|
smsg = '';
|
||||||
@@ -2282,7 +2313,7 @@ function up2k_init(subtle) {
|
|||||||
cdiff = (Math.abs(diff) <= 2) ? '3c0' : 'f0b',
|
cdiff = (Math.abs(diff) <= 2) ? '3c0' : 'f0b',
|
||||||
sdiff = '<span style="color:#' + cdiff + '">diff ' + diff;
|
sdiff = '<span style="color:#' + cdiff + '">diff ' + diff;
|
||||||
|
|
||||||
msg.push(linksplit(hit.rp).join('') + '<br /><small>' + tr + ' (srv), ' + tu + ' (You), ' + sdiff + '</small></span>');
|
msg.push(linksplit(hit.rp).join(' / ') + '<br /><small>' + tr + ' (srv), ' + tu + ' (You), ' + sdiff + '</small></span>');
|
||||||
}
|
}
|
||||||
msg = msg.join('<br />\n');
|
msg = msg.join('<br />\n');
|
||||||
}
|
}
|
||||||
@@ -2316,7 +2347,7 @@ function up2k_init(subtle) {
|
|||||||
url += '?k=' + fk;
|
url += '?k=' + fk;
|
||||||
}
|
}
|
||||||
|
|
||||||
pvis.seth(t.n, 0, linksplit(url).join(' '));
|
pvis.seth(t.n, 0, linksplit(url).join(' / '));
|
||||||
}
|
}
|
||||||
|
|
||||||
var chunksize = get_chunksize(t.size),
|
var chunksize = get_chunksize(t.size),
|
||||||
@@ -2351,11 +2382,22 @@ function up2k_init(subtle) {
|
|||||||
var arr = st.todo.upload,
|
var arr = st.todo.upload,
|
||||||
sort = arr.length && arr[arr.length - 1].nfile > t.n;
|
sort = arr.length && arr[arr.length - 1].nfile > t.n;
|
||||||
|
|
||||||
for (var a = 0; a < t.postlist.length; a++)
|
for (var a = 0; a < t.postlist.length; a++) {
|
||||||
|
var nparts = [], tbytes = 0, stitch = stitch_tgt;
|
||||||
|
if (t.nojoin && t.nojoin - t.postlist.length < 6)
|
||||||
|
stitch = 1;
|
||||||
|
|
||||||
|
--a;
|
||||||
|
for (var b = 0; b < stitch; b++) {
|
||||||
|
nparts.push(t.postlist[++a]);
|
||||||
|
if (tbytes + chunksize > 64 * 1024 * 1024 || t.postlist[a+1] - t.postlist[a] !== 1)
|
||||||
|
break;
|
||||||
|
}
|
||||||
arr.push({
|
arr.push({
|
||||||
'nfile': t.n,
|
'nfile': t.n,
|
||||||
'npart': t.postlist[a]
|
'nparts': nparts
|
||||||
});
|
});
|
||||||
|
}
|
||||||
|
|
||||||
msg = null;
|
msg = null;
|
||||||
done = false;
|
done = false;
|
||||||
@@ -2364,7 +2406,7 @@ function up2k_init(subtle) {
|
|||||||
arr.sort(function (a, b) {
|
arr.sort(function (a, b) {
|
||||||
return a.nfile < b.nfile ? -1 :
|
return a.nfile < b.nfile ? -1 :
|
||||||
/* */ a.nfile > b.nfile ? 1 :
|
/* */ a.nfile > b.nfile ? 1 :
|
||||||
a.npart < b.npart ? -1 : 1;
|
/* */ a.nparts[0] < b.nparts[0] ? -1 : 1;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -2400,15 +2442,12 @@ function up2k_init(subtle) {
|
|||||||
pvis.seth(t.n, 2, L.u_ehstmp, t);
|
pvis.seth(t.n, 2, L.u_ehstmp, t);
|
||||||
|
|
||||||
var err = "",
|
var err = "",
|
||||||
rsp = (xhr.responseText + ''),
|
rsp = unpre(xhr.responseText),
|
||||||
ofs = rsp.lastIndexOf('\nURL: ');
|
ofs = rsp.lastIndexOf('\nURL: ');
|
||||||
|
|
||||||
if (ofs !== -1)
|
if (ofs !== -1)
|
||||||
rsp = rsp.slice(0, ofs);
|
rsp = rsp.slice(0, ofs);
|
||||||
|
|
||||||
if (rsp.indexOf('<pre>') === 0)
|
|
||||||
rsp = rsp.slice(5);
|
|
||||||
|
|
||||||
if (rsp.indexOf('rate-limit ') !== -1) {
|
if (rsp.indexOf('rate-limit ') !== -1) {
|
||||||
var penalty = rsp.replace(/.*rate-limit /, "").split(' ')[0];
|
var penalty = rsp.replace(/.*rate-limit /, "").split(' ')[0];
|
||||||
console.log("rate-limit: " + penalty);
|
console.log("rate-limit: " + penalty);
|
||||||
@@ -2427,7 +2466,7 @@ function up2k_init(subtle) {
|
|||||||
err = rsp;
|
err = rsp;
|
||||||
ofs = err.indexOf('\n/');
|
ofs = err.indexOf('\n/');
|
||||||
if (ofs !== -1) {
|
if (ofs !== -1) {
|
||||||
err = err.slice(0, ofs + 1) + linksplit(err.slice(ofs + 2).trimEnd()).join(' ');
|
err = err.slice(0, ofs + 1) + linksplit(err.slice(ofs + 2).trimEnd()).join(' / ');
|
||||||
}
|
}
|
||||||
if (!t.rechecks && (err_pend || err_srcb)) {
|
if (!t.rechecks && (err_pend || err_srcb)) {
|
||||||
t.rechecks = 0;
|
t.rechecks = 0;
|
||||||
@@ -2468,6 +2507,8 @@ function up2k_init(subtle) {
|
|||||||
req.srch = 1;
|
req.srch = 1;
|
||||||
else if (t.rand)
|
else if (t.rand)
|
||||||
req.rand = true;
|
req.rand = true;
|
||||||
|
else if (t.umod)
|
||||||
|
req.umod = true;
|
||||||
|
|
||||||
xhr.open('POST', t.purl, true);
|
xhr.open('POST', t.purl, true);
|
||||||
xhr.responseType = 'text';
|
xhr.responseType = 'text';
|
||||||
@@ -2512,7 +2553,10 @@ function up2k_init(subtle) {
|
|||||||
function exec_upload() {
|
function exec_upload() {
|
||||||
var upt = st.todo.upload.shift(),
|
var upt = st.todo.upload.shift(),
|
||||||
t = st.files[upt.nfile],
|
t = st.files[upt.nfile],
|
||||||
npart = upt.npart,
|
nparts = upt.nparts,
|
||||||
|
pcar = nparts[0],
|
||||||
|
pcdr = nparts[nparts.length - 1],
|
||||||
|
snpart = pcar == pcdr ? pcar : ('' + pcar + '~' + pcdr),
|
||||||
tries = 0;
|
tries = 0;
|
||||||
|
|
||||||
if (t.done)
|
if (t.done)
|
||||||
@@ -2527,24 +2571,30 @@ function up2k_init(subtle) {
|
|||||||
pvis.seth(t.n, 1, "🚀 send");
|
pvis.seth(t.n, 1, "🚀 send");
|
||||||
|
|
||||||
var chunksize = get_chunksize(t.size),
|
var chunksize = get_chunksize(t.size),
|
||||||
car = npart * chunksize,
|
car = pcar * chunksize,
|
||||||
cdr = car + chunksize;
|
cdr = (pcdr + 1) * chunksize;
|
||||||
|
|
||||||
if (cdr >= t.size)
|
if (cdr >= t.size)
|
||||||
cdr = t.size;
|
cdr = t.size;
|
||||||
|
|
||||||
var orz = function (xhr) {
|
var orz = function (xhr) {
|
||||||
var txt = ((xhr.response && xhr.response.err) || xhr.responseText) + '';
|
st.bytes.inflight -= xhr.bsent;
|
||||||
|
var txt = unpre((xhr.response && xhr.response.err) || xhr.responseText);
|
||||||
if (txt.indexOf('upload blocked by x') + 1) {
|
if (txt.indexOf('upload blocked by x') + 1) {
|
||||||
apop(st.busy.upload, upt);
|
apop(st.busy.upload, upt);
|
||||||
apop(t.postlist, npart);
|
for (var a = pcar; a <= pcdr; a++)
|
||||||
|
apop(t.postlist, a);
|
||||||
pvis.seth(t.n, 1, "ERROR");
|
pvis.seth(t.n, 1, "ERROR");
|
||||||
pvis.seth(t.n, 2, txt.split(/\n/)[0]);
|
pvis.seth(t.n, 2, txt.split(/\n/)[0]);
|
||||||
pvis.move(t.n, 'ng');
|
pvis.move(t.n, 'ng');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (xhr.status == 200) {
|
if (xhr.status == 200) {
|
||||||
pvis.prog(t, npart, cdr - car);
|
var bdone = cdr - car;
|
||||||
|
for (var a = pcar; a <= pcdr; a++) {
|
||||||
|
pvis.prog(t, a, Math.min(bdone, chunksize));
|
||||||
|
bdone -= chunksize;
|
||||||
|
}
|
||||||
st.bytes.finished += cdr - car;
|
st.bytes.finished += cdr - car;
|
||||||
st.bytes.uploaded += cdr - car;
|
st.bytes.uploaded += cdr - car;
|
||||||
t.bytes_uploaded += cdr - car;
|
t.bytes_uploaded += cdr - car;
|
||||||
@@ -2553,18 +2603,21 @@ function up2k_init(subtle) {
|
|||||||
}
|
}
|
||||||
else if (txt.indexOf('already got that') + 1 ||
|
else if (txt.indexOf('already got that') + 1 ||
|
||||||
txt.indexOf('already being written') + 1) {
|
txt.indexOf('already being written') + 1) {
|
||||||
console.log("ignoring dupe-segment error", t.name, t);
|
t.nojoin = t.postlist.length;
|
||||||
|
console.log("ignoring dupe-segment with backoff", t.nojoin, t.name, t);
|
||||||
|
if (!toast.visible && st.todo.upload.length < 4)
|
||||||
|
toast.msg(10, L.u_cbusy);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
xhrchk(xhr, L.u_cuerr2.format(npart, Math.ceil(t.size / chunksize), t.name), "404, target folder not found (???)", "warn", t);
|
xhrchk(xhr, L.u_cuerr2.format(snpart, Math.ceil(t.size / chunksize), t.name), "404, target folder not found (???)", "warn", t);
|
||||||
|
|
||||||
chill(t);
|
chill(t);
|
||||||
}
|
}
|
||||||
orz2(xhr);
|
orz2(xhr);
|
||||||
}
|
}
|
||||||
var orz2 = function (xhr) {
|
var orz2 = function (xhr) {
|
||||||
apop(st.busy.upload, upt);
|
apop(st.busy.upload, upt);
|
||||||
apop(t.postlist, npart);
|
for (var a = pcar; a <= pcdr; a++)
|
||||||
|
apop(t.postlist, a);
|
||||||
if (!t.postlist.length) {
|
if (!t.postlist.length) {
|
||||||
t.t_uploaded = Date.now();
|
t.t_uploaded = Date.now();
|
||||||
pvis.seth(t.n, 1, 'verifying');
|
pvis.seth(t.n, 1, 'verifying');
|
||||||
@@ -2578,7 +2631,10 @@ function up2k_init(subtle) {
|
|||||||
btot = Math.floor(st.bytes.total / 1024 / 1024);
|
btot = Math.floor(st.bytes.total / 1024 / 1024);
|
||||||
|
|
||||||
xhr.upload.onprogress = function (xev) {
|
xhr.upload.onprogress = function (xev) {
|
||||||
pvis.prog(t, npart, xev.loaded);
|
var nb = xev.loaded;
|
||||||
|
st.bytes.inflight += nb - xhr.bsent;
|
||||||
|
xhr.bsent = nb;
|
||||||
|
pvis.prog(t, pcar, nb);
|
||||||
};
|
};
|
||||||
xhr.onload = function (xev) {
|
xhr.onload = function (xev) {
|
||||||
try { orz(xhr); } catch (ex) { vis_exh(ex + '', 'up2k.js', '', '', ex); }
|
try { orz(xhr); } catch (ex) { vis_exh(ex + '', 'up2k.js', '', '', ex); }
|
||||||
@@ -2587,14 +2643,20 @@ function up2k_init(subtle) {
|
|||||||
if (crashed)
|
if (crashed)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
|
st.bytes.inflight -= (xhr.bsent || 0);
|
||||||
|
|
||||||
if (!toast.visible)
|
if (!toast.visible)
|
||||||
toast.warn(9.98, L.u_cuerr.format(npart, Math.ceil(t.size / chunksize), t.name), t);
|
toast.warn(9.98, L.u_cuerr.format(snpart, Math.ceil(t.size / chunksize), t.name), t);
|
||||||
|
|
||||||
console.log('chunkpit onerror,', ++tries, t.name, t);
|
console.log('chunkpit onerror,', ++tries, t.name, t);
|
||||||
orz2(xhr);
|
orz2(xhr);
|
||||||
};
|
};
|
||||||
|
var chashes = [];
|
||||||
|
for (var a = pcar; a <= pcdr; a++)
|
||||||
|
chashes.push(t.hash[a]);
|
||||||
|
|
||||||
xhr.open('POST', t.purl, true);
|
xhr.open('POST', t.purl, true);
|
||||||
xhr.setRequestHeader("X-Up2k-Hash", t.hash[npart]);
|
xhr.setRequestHeader("X-Up2k-Hash", chashes.join(","));
|
||||||
xhr.setRequestHeader("X-Up2k-Wark", t.wark);
|
xhr.setRequestHeader("X-Up2k-Wark", t.wark);
|
||||||
xhr.setRequestHeader("X-Up2k-Stat", "{0}/{1}/{2}/{3} {4}/{5} {6}".format(
|
xhr.setRequestHeader("X-Up2k-Stat", "{0}/{1}/{2}/{3} {4}/{5} {6}".format(
|
||||||
pvis.ctr.ok, pvis.ctr.ng, pvis.ctr.bz, pvis.ctr.q, btot, btot - bfin,
|
pvis.ctr.ok, pvis.ctr.ng, pvis.ctr.bz, pvis.ctr.q, btot, btot - bfin,
|
||||||
@@ -2603,6 +2665,7 @@ function up2k_init(subtle) {
|
|||||||
if (xhr.overrideMimeType)
|
if (xhr.overrideMimeType)
|
||||||
xhr.overrideMimeType('Content-Type', 'application/octet-stream');
|
xhr.overrideMimeType('Content-Type', 'application/octet-stream');
|
||||||
|
|
||||||
|
xhr.bsent = 0;
|
||||||
xhr.responseType = 'text';
|
xhr.responseType = 'text';
|
||||||
xhr.send(t.fobj.slice(car, cdr));
|
xhr.send(t.fobj.slice(car, cdr));
|
||||||
}
|
}
|
||||||
@@ -2620,7 +2683,7 @@ function up2k_init(subtle) {
|
|||||||
wpx = window.innerWidth,
|
wpx = window.innerWidth,
|
||||||
fpx = parseInt(getComputedStyle(bar)['font-size']),
|
fpx = parseInt(getComputedStyle(bar)['font-size']),
|
||||||
wem = wpx * 1.0 / fpx,
|
wem = wpx * 1.0 / fpx,
|
||||||
wide = wem > 54 ? 'w' : '',
|
wide = wem > 57 ? 'w' : '',
|
||||||
parent = ebi(wide ? 'u2btn_cw' : 'u2btn_ct'),
|
parent = ebi(wide ? 'u2btn_cw' : 'u2btn_ct'),
|
||||||
btn = ebi('u2btn');
|
btn = ebi('u2btn');
|
||||||
|
|
||||||
@@ -2629,7 +2692,7 @@ function up2k_init(subtle) {
|
|||||||
ebi('u2conf').className = ebi('u2cards').className = ebi('u2etaw').className = wide;
|
ebi('u2conf').className = ebi('u2cards').className = ebi('u2etaw').className = wide;
|
||||||
}
|
}
|
||||||
|
|
||||||
wide = wem > 82 ? 'ww' : wide;
|
wide = wem > 86 ? 'ww' : wide;
|
||||||
parent = ebi(wide == 'ww' ? 'u2c3w' : 'u2c3t');
|
parent = ebi(wide == 'ww' ? 'u2c3w' : 'u2c3t');
|
||||||
var its = [ebi('u2etaw'), ebi('u2cards')];
|
var its = [ebi('u2etaw'), ebi('u2cards')];
|
||||||
if (its[0].parentNode !== parent) {
|
if (its[0].parentNode !== parent) {
|
||||||
@@ -2686,7 +2749,11 @@ function up2k_init(subtle) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
parallel_uploads = v;
|
parallel_uploads = v;
|
||||||
swrite('nthread', v);
|
if (v == u2j)
|
||||||
|
sdrop('nthread');
|
||||||
|
else
|
||||||
|
swrite('nthread', v);
|
||||||
|
|
||||||
clmod(obj, 'err');
|
clmod(obj, 'err');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -2699,8 +2766,30 @@ function up2k_init(subtle) {
|
|||||||
if (parallel_uploads > 16)
|
if (parallel_uploads > 16)
|
||||||
parallel_uploads = 16;
|
parallel_uploads = 16;
|
||||||
|
|
||||||
|
if (parallel_uploads > 7)
|
||||||
|
toast.warn(10, L.u_maxconn);
|
||||||
|
|
||||||
obj.value = parallel_uploads;
|
obj.value = parallel_uploads;
|
||||||
bumpthread({ "target": 1 })
|
bumpthread({ "target": 1 });
|
||||||
|
}
|
||||||
|
|
||||||
|
var read_u2sz = function () {
|
||||||
|
var el = ebi('u2szg'), n = parseInt(el.value), dv = u2sz.split(',');
|
||||||
|
stitch_tgt = n = (
|
||||||
|
isNaN(n) ? dv[1] :
|
||||||
|
n < dv[0] ? dv[0] :
|
||||||
|
n > dv[2] ? dv[2] : n
|
||||||
|
);
|
||||||
|
if (n == dv[1]) sdrop('u2sz'); else swrite('u2sz', n);
|
||||||
|
if (el.value != n) el.value = n;
|
||||||
|
};
|
||||||
|
ebi('u2szg').addEventListener('blur', read_u2sz);
|
||||||
|
ebi('u2szg').onkeydown = function (e) {
|
||||||
|
if (anymod(e)) return;
|
||||||
|
var n = e.code == 'ArrowUp' ? 1 : e.code == 'ArrowDown' ? -1 : 0;
|
||||||
|
if (!n) return;
|
||||||
|
this.value = parseInt(this.value) + n;
|
||||||
|
read_u2sz();
|
||||||
}
|
}
|
||||||
|
|
||||||
function tgl_fsearch() {
|
function tgl_fsearch() {
|
||||||
@@ -2710,7 +2799,12 @@ function up2k_init(subtle) {
|
|||||||
function draw_turbo() {
|
function draw_turbo() {
|
||||||
if (turbolvl < 0 && uc.turbo) {
|
if (turbolvl < 0 && uc.turbo) {
|
||||||
bcfg_set('u2turbo', uc.turbo = false);
|
bcfg_set('u2turbo', uc.turbo = false);
|
||||||
toast.err(10, "turbo is disabled in server config");
|
toast.err(10, L.u_turbo_c);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (uc.turbo && !has(perms, 'read')) {
|
||||||
|
bcfg_set('u2turbo', uc.turbo = false);
|
||||||
|
toast.warn(30, L.u_turbo_g);
|
||||||
}
|
}
|
||||||
|
|
||||||
var msg = (turbolvl || !uc.turbo) ? null : uc.fsearch ? L.u_ts : L.u_tu,
|
var msg = (turbolvl || !uc.turbo) ? null : uc.fsearch ? L.u_ts : L.u_tu,
|
||||||
@@ -2811,6 +2905,8 @@ function up2k_init(subtle) {
|
|||||||
|
|
||||||
function set_fsearch(new_state) {
|
function set_fsearch(new_state) {
|
||||||
var fixed = false,
|
var fixed = false,
|
||||||
|
persist = new_state !== undefined,
|
||||||
|
preferred = bcfg_get('fsearch', undefined),
|
||||||
can_write = false;
|
can_write = false;
|
||||||
|
|
||||||
if (!ebi('fsearch')) {
|
if (!ebi('fsearch')) {
|
||||||
@@ -2825,10 +2921,18 @@ function up2k_init(subtle) {
|
|||||||
new_state = false;
|
new_state = false;
|
||||||
fixed = true;
|
fixed = true;
|
||||||
}
|
}
|
||||||
|
if (new_state === undefined)
|
||||||
|
new_state = can_write ? false : have_up2k_idx ? true : undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (new_state === undefined)
|
||||||
|
new_state = preferred;
|
||||||
|
|
||||||
if (new_state !== undefined)
|
if (new_state !== undefined)
|
||||||
bcfg_set('fsearch', uc.fsearch = new_state);
|
if (persist)
|
||||||
|
bcfg_set('fsearch', uc.fsearch = new_state);
|
||||||
|
else
|
||||||
|
bcfg_upd_ui('fsearch', uc.fsearch = new_state);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
clmod(ebi('u2c3w'), 's', !can_write);
|
clmod(ebi('u2c3w'), 's', !can_write);
|
||||||
@@ -2851,6 +2955,9 @@ function up2k_init(subtle) {
|
|||||||
ebi('u2cards').style.display = ebi('u2tab').style.display = potato ? 'none' : '';
|
ebi('u2cards').style.display = ebi('u2tab').style.display = potato ? 'none' : '';
|
||||||
ebi('u2mu').style.display = potato ? '' : 'none';
|
ebi('u2mu').style.display = potato ? '' : 'none';
|
||||||
|
|
||||||
|
if (u2ts.startsWith('f') || !sread('u2ts'))
|
||||||
|
uc.u2ts = bcfg_upd_ui('u2ts', !u2ts.endsWith('u'));
|
||||||
|
|
||||||
draw_turbo();
|
draw_turbo();
|
||||||
draw_life();
|
draw_life();
|
||||||
onresize();
|
onresize();
|
||||||
@@ -2875,16 +2982,28 @@ function up2k_init(subtle) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function set_u2sort() {
|
function set_u2sort(en) {
|
||||||
if (u2sort.indexOf('f') < 0)
|
if (u2sort.indexOf('f') < 0)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
bcfg_set('u2sort', uc.az = u2sort.indexOf('n') + 1);
|
var fen = uc.az = u2sort.indexOf('n') + 1;
|
||||||
localStorage.removeItem('u2sort');
|
bcfg_upd_ui('u2sort', fen);
|
||||||
|
if (en != fen)
|
||||||
|
toast.warn(10, L.ul_btnlk);
|
||||||
|
}
|
||||||
|
|
||||||
|
function set_u2ts(en) {
|
||||||
|
if (u2ts.indexOf('f') < 0)
|
||||||
|
return;
|
||||||
|
|
||||||
|
var fen = !u2ts.endsWith('u');
|
||||||
|
bcfg_upd_ui('u2ts', fen);
|
||||||
|
if (en != fen)
|
||||||
|
toast.warn(10, L.ul_btnlk);
|
||||||
}
|
}
|
||||||
|
|
||||||
function set_hashw() {
|
function set_hashw() {
|
||||||
if (!window.WebAssembly) {
|
if (!WebAssembly) {
|
||||||
bcfg_set('hashw', uc.hashw = false);
|
bcfg_set('hashw', uc.hashw = false);
|
||||||
toast.err(10, L.u_nowork);
|
toast.err(10, L.u_nowork);
|
||||||
}
|
}
|
||||||
@@ -2901,7 +3020,7 @@ function up2k_init(subtle) {
|
|||||||
nopenag();
|
nopenag();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!window.Notification || !HTTPS)
|
if (!Notification || !HTTPS)
|
||||||
return nopenag();
|
return nopenag();
|
||||||
|
|
||||||
if (en && Notification.permission == 'default')
|
if (en && Notification.permission == 'default')
|
||||||
@@ -2923,7 +3042,7 @@ function up2k_init(subtle) {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
if (uc.upnag && (!window.Notification || Notification.permission != 'granted'))
|
if (uc.upnag && (!Notification || Notification.permission != 'granted'))
|
||||||
bcfg_set('upnag', uc.upnag = false);
|
bcfg_set('upnag', uc.upnag = false);
|
||||||
|
|
||||||
ebi('nthread_add').onclick = function (e) {
|
ebi('nthread_add').onclick = function (e) {
|
||||||
@@ -2978,7 +3097,7 @@ ebi('ico1').onclick = function () {
|
|||||||
if (QS('#op_up2k.act'))
|
if (QS('#op_up2k.act'))
|
||||||
goto_up2k();
|
goto_up2k();
|
||||||
|
|
||||||
apply_perms({ "perms": perms, "frand": frand });
|
apply_perms({ "perms": perms, "frand": frand, "u2ts": u2ts });
|
||||||
|
|
||||||
|
|
||||||
(function () {
|
(function () {
|
||||||
|
|||||||
@@ -6,10 +6,16 @@ if (!window.console || !console.log)
|
|||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
if (window.CGV)
|
||||||
|
for (var k in CGV)
|
||||||
|
window[k] = CGV[k];
|
||||||
|
|
||||||
|
|
||||||
var wah = '',
|
var wah = '',
|
||||||
|
STG = null,
|
||||||
NOAC = 'autocorrect="off" autocapitalize="off"',
|
NOAC = 'autocorrect="off" autocapitalize="off"',
|
||||||
L, tt, treectl, thegrid, up2k, asmCrypto, hashwasm, vbar, marked,
|
L, tt, treectl, thegrid, up2k, asmCrypto, hashwasm, vbar, marked,
|
||||||
CB = '?_=' + Date.now(),
|
T0 = Date.now(),
|
||||||
R = SR.slice(1),
|
R = SR.slice(1),
|
||||||
RS = R ? "/" + R : "",
|
RS = R ? "/" + R : "",
|
||||||
HALFMAX = 8192 * 8192 * 8192 * 8192,
|
HALFMAX = 8192 * 8192 * 8192 * 8192,
|
||||||
@@ -35,8 +41,16 @@ if (!window.FormData)
|
|||||||
window.FormData = false;
|
window.FormData = false;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
CB = '?' + document.currentScript.src.split('?').pop();
|
STG = window.localStorage;
|
||||||
|
STG.STG;
|
||||||
|
}
|
||||||
|
catch (ex) {
|
||||||
|
STG = null;
|
||||||
|
if ((ex + '').indexOf('sandbox') < 0)
|
||||||
|
console.log('no localStorage: ' + ex);
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
if (navigator.userAgentData.mobile)
|
if (navigator.userAgentData.mobile)
|
||||||
MOBILE = true;
|
MOBILE = true;
|
||||||
|
|
||||||
@@ -113,7 +127,7 @@ if ((document.location + '').indexOf(',rej,') + 1)
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
console.hist = [];
|
console.hist = [];
|
||||||
var CMAXHIST = 100;
|
var CMAXHIST = 1000;
|
||||||
var hook = function (t) {
|
var hook = function (t) {
|
||||||
var orig = console[t].bind(console),
|
var orig = console[t].bind(console),
|
||||||
cfun = function () {
|
cfun = function () {
|
||||||
@@ -140,6 +154,10 @@ catch (ex) {
|
|||||||
}
|
}
|
||||||
var crashed = false, ignexd = {}, evalex_fatal = false;
|
var crashed = false, ignexd = {}, evalex_fatal = false;
|
||||||
function vis_exh(msg, url, lineNo, columnNo, error) {
|
function vis_exh(msg, url, lineNo, columnNo, error) {
|
||||||
|
var ekey = url + '\n' + lineNo + '\n' + msg;
|
||||||
|
if (ignexd[ekey] || crashed)
|
||||||
|
return;
|
||||||
|
|
||||||
msg = String(msg);
|
msg = String(msg);
|
||||||
url = String(url);
|
url = String(url);
|
||||||
|
|
||||||
@@ -155,11 +173,13 @@ function vis_exh(msg, url, lineNo, columnNo, error) {
|
|||||||
if (url.indexOf(' > eval') + 1 && !evalex_fatal)
|
if (url.indexOf(' > eval') + 1 && !evalex_fatal)
|
||||||
return; // md timer
|
return; // md timer
|
||||||
|
|
||||||
var ekey = url + '\n' + lineNo + '\n' + msg;
|
if (IE && url.indexOf('prism.js') + 1)
|
||||||
if (ignexd[ekey] || crashed)
|
|
||||||
return;
|
return;
|
||||||
|
|
||||||
if (url.indexOf('deps/marked.js') + 1 && !window.WebAssembly)
|
if (url.indexOf('easymde.js') + 1)
|
||||||
|
return; // clicking the preview pane
|
||||||
|
|
||||||
|
if (url.indexOf('deps/marked.js') + 1 && !WebAssembly)
|
||||||
return; // ff<52
|
return; // ff<52
|
||||||
|
|
||||||
crashed = true;
|
crashed = true;
|
||||||
@@ -197,19 +217,24 @@ function vis_exh(msg, url, lineNo, columnNo, error) {
|
|||||||
}
|
}
|
||||||
ignexd[ekey] = true;
|
ignexd[ekey] = true;
|
||||||
|
|
||||||
var ls = jcp(localStorage);
|
var ls = {},
|
||||||
if (ls.fman_clip)
|
lsk = Object.keys(localStorage),
|
||||||
ls.fman_clip = ls.fman_clip.length + ' items';
|
nka = lsk.length,
|
||||||
|
nk = Math.min(200, nka);
|
||||||
|
|
||||||
var lsk = Object.keys(ls);
|
for (var a = 0; a < nk; a++) {
|
||||||
lsk.sort();
|
var k = lsk[a],
|
||||||
html.push('<p class="b">');
|
v = localStorage.getItem(k);
|
||||||
for (var a = 0; a < lsk.length; a++) {
|
|
||||||
if (ls[lsk[a]].length > 9000)
|
|
||||||
continue;
|
|
||||||
|
|
||||||
html.push(' <b>' + esc(lsk[a]) + '</b> <code>' + esc(ls[lsk[a]]) + '</code> ');
|
ls[k] = v.length > 256 ? v.slice(0, 32) + '[...' + v.length + 'b]' : v;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
lsk = Object.keys(ls);
|
||||||
|
lsk.sort();
|
||||||
|
html.push('<p class="b"><b>' + nka + ': </b>');
|
||||||
|
for (var a = 0; a < nk; a++)
|
||||||
|
html.push(' <b>' + esc(lsk[a]) + '</b> <code>' + esc(ls[lsk[a]]) + '</code> ');
|
||||||
|
|
||||||
html.push('</p>');
|
html.push('</p>');
|
||||||
}
|
}
|
||||||
catch (e) { }
|
catch (e) { }
|
||||||
@@ -271,8 +296,15 @@ function anymod(e, shift_ok) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
var dev_fbw = sread('dev_fbw');
|
||||||
function ev(e) {
|
function ev(e) {
|
||||||
e = e || window.event;
|
if (!e && window.event) {
|
||||||
|
e = window.event;
|
||||||
|
if (dev_fbw == 1) {
|
||||||
|
toast.warn(10, 'hello from fallback code ;_;\ncheck console trace');
|
||||||
|
console.error('using window.event');
|
||||||
|
}
|
||||||
|
}
|
||||||
if (!e)
|
if (!e)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
@@ -291,7 +323,7 @@ function ev(e) {
|
|||||||
|
|
||||||
|
|
||||||
function noope(e) {
|
function noope(e) {
|
||||||
ev(e);
|
try { ev(e); } catch (ex) { }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -359,12 +391,28 @@ catch (ex) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!window.Set)
|
||||||
|
window.Set = function () {
|
||||||
|
var r = this;
|
||||||
|
r.size = 0;
|
||||||
|
r.d = {};
|
||||||
|
r.add = function (k) {
|
||||||
|
if (!r.d[k]) {
|
||||||
|
r.d[k] = 1;
|
||||||
|
r.size++;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
r.has = function (k) {
|
||||||
|
return r.d[k];
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
// https://stackoverflow.com/a/950146
|
// https://stackoverflow.com/a/950146
|
||||||
function import_js(url, cb, ecb) {
|
function import_js(url, cb, ecb) {
|
||||||
var head = document.head || document.getElementsByTagName('head')[0];
|
var head = document.head || document.getElementsByTagName('head')[0];
|
||||||
var script = mknod('script');
|
var script = mknod('script');
|
||||||
script.type = 'text/javascript';
|
script.type = 'text/javascript';
|
||||||
script.src = url;
|
script.src = url + '?_=' + (window.TS || 'a');
|
||||||
script.onload = cb;
|
script.onload = cb;
|
||||||
script.onerror = ecb || function () {
|
script.onerror = ecb || function () {
|
||||||
var m = 'Failed to load module:\n' + url;
|
var m = 'Failed to load module:\n' + url;
|
||||||
@@ -384,6 +432,25 @@ function unsmart(txt) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function namesan(txt, win, fslash) {
|
||||||
|
if (win)
|
||||||
|
txt = (txt.
|
||||||
|
replace(/</g, "<").
|
||||||
|
replace(/>/g, ">").
|
||||||
|
replace(/:/g, ":").
|
||||||
|
replace(/"/g, """).
|
||||||
|
replace(/\\/g, "\").
|
||||||
|
replace(/\|/g, "|").
|
||||||
|
replace(/\?/g, "?").
|
||||||
|
replace(/\*/g, "*"));
|
||||||
|
|
||||||
|
if (fslash)
|
||||||
|
txt = txt.replace(/\//g, "/");
|
||||||
|
|
||||||
|
return txt;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
var crctab = (function () {
|
var crctab = (function () {
|
||||||
var c, tab = [];
|
var c, tab = [];
|
||||||
for (var n = 0; n < 256; n++) {
|
for (var n = 0; n < 256; n++) {
|
||||||
@@ -478,7 +545,7 @@ function yscroll() {
|
|||||||
|
|
||||||
function showsort(tab) {
|
function showsort(tab) {
|
||||||
var v, vn, v1, v2, th = tab.tHead,
|
var v, vn, v1, v2, th = tab.tHead,
|
||||||
sopts = jread('fsort', [["href", 1, ""]]);
|
sopts = jread('fsort', jcp(dsort));
|
||||||
|
|
||||||
th && (th = th.rows[0]) && (th = th.cells);
|
th && (th = th.rows[0]) && (th = th.cells);
|
||||||
|
|
||||||
@@ -617,9 +684,8 @@ function linksplit(rp, id) {
|
|||||||
}
|
}
|
||||||
var vlink = esc(uricom_dec(link));
|
var vlink = esc(uricom_dec(link));
|
||||||
|
|
||||||
if (link.indexOf('/') !== -1) {
|
if (link.indexOf('/') !== -1)
|
||||||
vlink = vlink.slice(0, -1) + '<span>/</span>';
|
vlink = vlink.slice(0, -1);
|
||||||
}
|
|
||||||
|
|
||||||
if (!rp) {
|
if (!rp) {
|
||||||
if (q)
|
if (q)
|
||||||
@@ -671,6 +737,15 @@ function vjoin(p1, p2) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function addq(url, q) {
|
||||||
|
var uh = url.split('#', 1),
|
||||||
|
u = uh[0],
|
||||||
|
h = uh.length == 1 ? '' : '#' + uh[1];
|
||||||
|
|
||||||
|
return u + (u.indexOf('?') < 0 ? '?' : '&') + (q === undefined ? '' : q) + h;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
function uricom_enc(txt, do_fb_enc) {
|
function uricom_enc(txt, do_fb_enc) {
|
||||||
try {
|
try {
|
||||||
return encodeURIComponent(txt);
|
return encodeURIComponent(txt);
|
||||||
@@ -751,17 +826,6 @@ function noq_href(el) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
function get_pwd() {
|
|
||||||
var k = HTTPS ? 's=' : 'd=',
|
|
||||||
pwd = ('; ' + document.cookie).split('; cppw' + k);
|
|
||||||
|
|
||||||
if (pwd.length < 2)
|
|
||||||
return null;
|
|
||||||
|
|
||||||
return decodeURIComponent(pwd[1].split(';')[0]);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
function unix2iso(ts) {
|
function unix2iso(ts) {
|
||||||
return new Date(ts * 1000).toISOString().replace("T", " ").slice(0, -5);
|
return new Date(ts * 1000).toISOString().replace("T", " ").slice(0, -5);
|
||||||
}
|
}
|
||||||
@@ -882,9 +946,16 @@ function jcp(obj) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function sdrop(key) {
|
||||||
|
try {
|
||||||
|
STG.removeItem(key);
|
||||||
|
}
|
||||||
|
catch (ex) { }
|
||||||
|
}
|
||||||
|
|
||||||
function sread(key, al) {
|
function sread(key, al) {
|
||||||
try {
|
try {
|
||||||
var ret = localStorage.getItem(key);
|
var ret = STG.getItem(key);
|
||||||
return (!al || has(al, ret)) ? ret : null;
|
return (!al || has(al, ret)) ? ret : null;
|
||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
@@ -895,9 +966,9 @@ function sread(key, al) {
|
|||||||
function swrite(key, val) {
|
function swrite(key, val) {
|
||||||
try {
|
try {
|
||||||
if (val === undefined || val === null)
|
if (val === undefined || val === null)
|
||||||
localStorage.removeItem(key);
|
STG.removeItem(key);
|
||||||
else
|
else
|
||||||
localStorage.setItem(key, val);
|
STG.setItem(key, val);
|
||||||
}
|
}
|
||||||
catch (e) { }
|
catch (e) { }
|
||||||
}
|
}
|
||||||
@@ -932,7 +1003,7 @@ function fcfg_get(name, defval) {
|
|||||||
val = parseFloat(sread(name));
|
val = parseFloat(sread(name));
|
||||||
|
|
||||||
if (!isNum(val))
|
if (!isNum(val))
|
||||||
return parseFloat(o ? o.value : defval);
|
return parseFloat(o && o.value !== '' ? o.value : defval);
|
||||||
|
|
||||||
if (o)
|
if (o)
|
||||||
o.value = val;
|
o.value = val;
|
||||||
@@ -977,13 +1048,14 @@ function bcfg_set(name, val) {
|
|||||||
function bcfg_upd_ui(name, val) {
|
function bcfg_upd_ui(name, val) {
|
||||||
var o = ebi(name);
|
var o = ebi(name);
|
||||||
if (!o)
|
if (!o)
|
||||||
return;
|
return val;
|
||||||
|
|
||||||
if (o.getAttribute('type') == 'checkbox')
|
if (o.getAttribute('type') == 'checkbox')
|
||||||
o.checked = val;
|
o.checked = val;
|
||||||
else if (o) {
|
else if (o) {
|
||||||
clmod(o, 'on', val);
|
clmod(o, 'on', val);
|
||||||
}
|
}
|
||||||
|
return val;
|
||||||
}
|
}
|
||||||
|
|
||||||
function bcfg_bind(obj, oname, cname, defval, cb, un_ev) {
|
function bcfg_bind(obj, oname, cname, defval, cb, un_ev) {
|
||||||
@@ -1057,7 +1129,7 @@ function dl_file(url) {
|
|||||||
|
|
||||||
function cliptxt(txt, ok) {
|
function cliptxt(txt, ok) {
|
||||||
var fb = function () {
|
var fb = function () {
|
||||||
console.log('fb');
|
console.log('clip-fb');
|
||||||
var o = mknod('input');
|
var o = mknod('input');
|
||||||
o.value = txt;
|
o.value = txt;
|
||||||
document.body.appendChild(o);
|
document.body.appendChild(o);
|
||||||
@@ -1324,10 +1396,10 @@ var tt = (function () {
|
|||||||
o = ctr.querySelectorAll('*[tt]');
|
o = ctr.querySelectorAll('*[tt]');
|
||||||
|
|
||||||
for (var a = o.length - 1; a >= 0; a--) {
|
for (var a = o.length - 1; a >= 0; a--) {
|
||||||
o[a].onfocus = _cshow;
|
o[a].addEventListener('focus', _cshow);
|
||||||
o[a].onblur = _hide;
|
o[a].addEventListener('blur', _hide);
|
||||||
o[a].onmouseenter = _dshow;
|
o[a].addEventListener('mouseenter', _dshow);
|
||||||
o[a].onmouseleave = _hide;
|
o[a].addEventListener('mouseleave', _hide);
|
||||||
}
|
}
|
||||||
r.hide();
|
r.hide();
|
||||||
}
|
}
|
||||||
@@ -1351,6 +1423,14 @@ function lf2br(txt) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function hunpre(txt) {
|
||||||
|
return ('' + txt).replace(/^<pre>/, '');
|
||||||
|
}
|
||||||
|
function unpre(txt) {
|
||||||
|
return esc(hunpre(txt));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
var toast = (function () {
|
var toast = (function () {
|
||||||
var r = {},
|
var r = {},
|
||||||
te = null,
|
te = null,
|
||||||
@@ -1387,15 +1467,23 @@ var toast = (function () {
|
|||||||
}
|
}
|
||||||
|
|
||||||
r.hide = function (e) {
|
r.hide = function (e) {
|
||||||
ev(e);
|
if (this === ebi('toastc'))
|
||||||
|
ev(e);
|
||||||
|
|
||||||
unscroll();
|
unscroll();
|
||||||
clearTimeout(te);
|
clearTimeout(te);
|
||||||
clmod(obj, 'vis');
|
clmod(obj, 'vis');
|
||||||
r.visible = false;
|
r.visible = false;
|
||||||
r.tag = obj;
|
r.tag = obj;
|
||||||
|
if (!WebAssembly)
|
||||||
|
te = setTimeout(function () {
|
||||||
|
obj.className = 'hide';
|
||||||
|
}, 500);
|
||||||
};
|
};
|
||||||
|
|
||||||
r.show = function (cl, sec, txt, tag) {
|
r.show = function (cl, sec, txt, tag) {
|
||||||
|
txt = (txt + '').slice(0, 16384);
|
||||||
|
|
||||||
var same = r.visible && txt == r.p_txt && r.p_sec == sec,
|
var same = r.visible && txt == r.p_txt && r.p_sec == sec,
|
||||||
delta = Date.now() - r.p_t;
|
delta = Date.now() - r.p_t;
|
||||||
|
|
||||||
@@ -1451,6 +1539,8 @@ var modal = (function () {
|
|||||||
cb_up = null,
|
cb_up = null,
|
||||||
cb_ok = null,
|
cb_ok = null,
|
||||||
cb_ng = null,
|
cb_ng = null,
|
||||||
|
sel_0 = 0,
|
||||||
|
sel_1 = 0,
|
||||||
tok, tng, prim, sec, ok_cancel;
|
tok, tng, prim, sec, ok_cancel;
|
||||||
|
|
||||||
r.load = function () {
|
r.load = function () {
|
||||||
@@ -1463,6 +1553,7 @@ var modal = (function () {
|
|||||||
r.load();
|
r.load();
|
||||||
|
|
||||||
r.busy = false;
|
r.busy = false;
|
||||||
|
r.nofocus = 0;
|
||||||
|
|
||||||
r.show = function (html) {
|
r.show = function (html) {
|
||||||
o = mknod('div', 'modal');
|
o = mknod('div', 'modal');
|
||||||
@@ -1476,16 +1567,18 @@ var modal = (function () {
|
|||||||
a.onclick = ng;
|
a.onclick = ng;
|
||||||
|
|
||||||
a = ebi('modal-ok');
|
a = ebi('modal-ok');
|
||||||
|
a.addEventListener('blur', onblur);
|
||||||
a.onclick = ok;
|
a.onclick = ok;
|
||||||
|
|
||||||
var inp = ebi('modali');
|
var inp = ebi('modali');
|
||||||
(inp || a).focus();
|
(inp || a).focus();
|
||||||
if (inp)
|
if (inp)
|
||||||
setTimeout(function () {
|
setTimeout(function () {
|
||||||
inp.setSelectionRange(0, inp.value.length, "forward");
|
inp.setSelectionRange(sel_0, sel_1, "forward");
|
||||||
}, 0);
|
}, 0);
|
||||||
|
|
||||||
document.addEventListener('focus', onfocus);
|
document.addEventListener('focus', onfocus);
|
||||||
|
document.addEventListener('selectionchange', onselch);
|
||||||
timer.add(onfocus);
|
timer.add(onfocus);
|
||||||
if (cb_up)
|
if (cb_up)
|
||||||
setTimeout(cb_up, 1);
|
setTimeout(cb_up, 1);
|
||||||
@@ -1493,6 +1586,11 @@ var modal = (function () {
|
|||||||
|
|
||||||
r.hide = function () {
|
r.hide = function () {
|
||||||
timer.rm(onfocus);
|
timer.rm(onfocus);
|
||||||
|
try {
|
||||||
|
ebi('modal-ok').removeEventListener('blur', onblur);
|
||||||
|
}
|
||||||
|
catch (ex) { }
|
||||||
|
document.removeEventListener('selectionchange', onselch);
|
||||||
document.removeEventListener('focus', onfocus);
|
document.removeEventListener('focus', onfocus);
|
||||||
document.removeEventListener('keydown', onkey);
|
document.removeEventListener('keydown', onkey);
|
||||||
o.parentNode.removeChild(o);
|
o.parentNode.removeChild(o);
|
||||||
@@ -1514,20 +1612,38 @@ var modal = (function () {
|
|||||||
cb_ng(null);
|
cb_ng(null);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var onselch = function () {
|
||||||
|
try {
|
||||||
|
if (window.getSelection() + '')
|
||||||
|
r.nofocus = 15;
|
||||||
|
}
|
||||||
|
catch (ex) { }
|
||||||
|
};
|
||||||
|
|
||||||
|
var onblur = function () {
|
||||||
|
r.nofocus = 3;
|
||||||
|
};
|
||||||
|
|
||||||
var onfocus = function (e) {
|
var onfocus = function (e) {
|
||||||
|
if (MOBILE)
|
||||||
|
return;
|
||||||
|
|
||||||
var ctr = ebi('modalc');
|
var ctr = ebi('modalc');
|
||||||
if (!ctr || !ctr.contains || !document.activeElement || ctr.contains(document.activeElement))
|
if (!ctr || !ctr.contains || !document.activeElement || ctr.contains(document.activeElement))
|
||||||
return;
|
return;
|
||||||
|
|
||||||
setTimeout(function () {
|
setTimeout(function () {
|
||||||
|
if (--r.nofocus >= 0)
|
||||||
|
return;
|
||||||
|
|
||||||
if (ctr = ebi('modal-ok'))
|
if (ctr = ebi('modal-ok'))
|
||||||
ctr.focus();
|
ctr.focus();
|
||||||
}, 20);
|
}, 20);
|
||||||
ev(e);
|
ev(e);
|
||||||
}
|
};
|
||||||
|
|
||||||
var onkey = function (e) {
|
var onkey = function (e) {
|
||||||
var k = e.code,
|
var k = (e.code || e.key) + '',
|
||||||
eok = ebi('modal-ok'),
|
eok = ebi('modal-ok'),
|
||||||
eng = ebi('modal-ng'),
|
eng = ebi('modal-ng'),
|
||||||
ae = document.activeElement;
|
ae = document.activeElement;
|
||||||
@@ -1535,18 +1651,18 @@ var modal = (function () {
|
|||||||
if (k == 'Space' && ae && (ae === eok || ae === eng))
|
if (k == 'Space' && ae && (ae === eok || ae === eng))
|
||||||
k = 'Enter';
|
k = 'Enter';
|
||||||
|
|
||||||
if (k == 'Enter') {
|
if (k.endsWith('Enter')) {
|
||||||
if (ae && ae == eng)
|
if (ae && ae == eng)
|
||||||
return ng();
|
return ng(e);
|
||||||
|
|
||||||
return ok();
|
return ok(e);
|
||||||
}
|
}
|
||||||
|
|
||||||
if ((k == 'ArrowLeft' || k == 'ArrowRight') && eng && (ae == eok || ae == eng))
|
if ((k == 'ArrowLeft' || k == 'ArrowRight' || k == 'Left' || k == 'Right') && eng && (ae == eok || ae == eng))
|
||||||
return (ae == eok ? eng : eok).focus() || ev(e);
|
return (ae == eok ? eng : eok).focus() || ev(e);
|
||||||
|
|
||||||
if (k == 'Escape')
|
if (k == 'Escape' || k == 'Esc')
|
||||||
return ng();
|
return ng(e);
|
||||||
}
|
}
|
||||||
|
|
||||||
var next = function () {
|
var next = function () {
|
||||||
@@ -1581,16 +1697,18 @@ var modal = (function () {
|
|||||||
r.show(html);
|
r.show(html);
|
||||||
}
|
}
|
||||||
|
|
||||||
r.prompt = function (html, v, cok, cng, fun) {
|
r.prompt = function (html, v, cok, cng, fun, so0, so1) {
|
||||||
q.push(function () {
|
q.push(function () {
|
||||||
_prompt(lf2br(html), v, cok, cng, fun);
|
_prompt(lf2br(html), v, cok, cng, fun, so0, so1);
|
||||||
});
|
});
|
||||||
next();
|
next();
|
||||||
}
|
}
|
||||||
var _prompt = function (html, v, cok, cng, fun) {
|
var _prompt = function (html, v, cok, cng, fun, so0, so1) {
|
||||||
cb_ok = cok;
|
cb_ok = cok;
|
||||||
cb_ng = cng === undefined ? cok : null;
|
cb_ng = cng === undefined ? cok : null;
|
||||||
cb_up = fun;
|
cb_up = fun;
|
||||||
|
sel_0 = so0 || 0;
|
||||||
|
sel_1 = so1 === undefined ? v.length : so1;
|
||||||
html += '<input id="modali" type="text" ' + NOAC + ' /><div id="modalb">' + ok_cancel + '</div>';
|
html += '<input id="modali" type="text" ' + NOAC + ' /><div id="modalb">' + ok_cancel + '</div>';
|
||||||
r.show(html);
|
r.show(html);
|
||||||
|
|
||||||
@@ -1777,7 +1895,7 @@ function md_thumbs(md) {
|
|||||||
float = has(flags, 'l') ? 'left' : has(flags, 'r') ? 'right' : '';
|
float = has(flags, 'l') ? 'left' : has(flags, 'r') ? 'right' : '';
|
||||||
|
|
||||||
if (!/[?&]cache/.exec(url))
|
if (!/[?&]cache/.exec(url))
|
||||||
url += (url.indexOf('?') < 0 ? '?' : '&') + 'cache=i';
|
url = addq(url, 'cache=i');
|
||||||
|
|
||||||
md[a] = '<a href="' + url + '" class="mdth mdth' + float.slice(0, 1) + '"><img src="' + url + '&th=w" alt="' + alt + '" /></a>' + md[a].slice(o2 + 1);
|
md[a] = '<a href="' + url + '" class="mdth mdth' + float.slice(0, 1) + '"><img src="' + url + '&th=w" alt="' + alt + '" /></a>' + md[a].slice(o2 + 1);
|
||||||
}
|
}
|
||||||
@@ -1823,21 +1941,17 @@ var favico = (function () {
|
|||||||
var b64;
|
var b64;
|
||||||
try {
|
try {
|
||||||
b64 = btoa(svg ? svg_decl + svg : gx(r.txt));
|
b64 = btoa(svg ? svg_decl + svg : gx(r.txt));
|
||||||
//console.log('f1');
|
|
||||||
}
|
}
|
||||||
catch (e1) {
|
catch (e1) {
|
||||||
try {
|
try {
|
||||||
b64 = btoa(gx(encodeURIComponent(r.txt).replace(/%([0-9A-F]{2})/g,
|
b64 = btoa(gx(encodeURIComponent(r.txt).replace(/%([0-9A-F]{2})/g,
|
||||||
function x(m, v) { return String.fromCharCode('0x' + v); })));
|
function x(m, v) { return String.fromCharCode('0x' + v); })));
|
||||||
//console.log('f2');
|
|
||||||
}
|
}
|
||||||
catch (e2) {
|
catch (e2) {
|
||||||
try {
|
try {
|
||||||
b64 = btoa(gx(unescape(encodeURIComponent(r.txt))));
|
b64 = btoa(gx(unescape(encodeURIComponent(r.txt))));
|
||||||
//console.log('f3');
|
|
||||||
}
|
}
|
||||||
catch (e3) {
|
catch (e3) {
|
||||||
//console.log('fe');
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1891,15 +2005,27 @@ function xhrchk(xhr, prefix, e404, lvl, tag) {
|
|||||||
if (xhr.status < 400 && xhr.status >= 200)
|
if (xhr.status < 400 && xhr.status >= 200)
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
var errtxt = (xhr.response && xhr.response.err) || xhr.responseText,
|
if (tag === undefined)
|
||||||
|
tag = prefix;
|
||||||
|
|
||||||
|
var errtxt = ((xhr.response && xhr.response.err) || xhr.responseText) || '',
|
||||||
|
suf = '',
|
||||||
fun = toast[lvl || 'err'],
|
fun = toast[lvl || 'err'],
|
||||||
is_cf = /[Cc]loud[f]lare|>Just a mo[m]ent|#cf-b[u]bbles|Chec[k]ing your br[o]wser|\/chall[e]nge-platform|"chall[e]nge-error|nable Ja[v]aScript and cook/.test(errtxt);
|
is_cf = /[Cc]loud[f]lare|>Just a mo[m]ent|#cf-b[u]bbles|Chec[k]ing your br[o]wser|\/chall[e]nge-platform|"chall[e]nge-error|nable Ja[v]aScript and cook/.test(errtxt);
|
||||||
|
|
||||||
|
if (errtxt.startsWith('<pre>'))
|
||||||
|
suf = '\n\nerror-details: «' + unpre(errtxt).split('\n')[0].trim() + '»';
|
||||||
|
else
|
||||||
|
errtxt = esc(errtxt).slice(0, 32768);
|
||||||
|
|
||||||
if (xhr.status == 403 && !is_cf)
|
if (xhr.status == 403 && !is_cf)
|
||||||
return toast.err(0, prefix + (L && L.xhr403 || "403: access denied\n\ntry pressing F5, maybe you got logged out"), tag);
|
return toast.err(0, prefix + (L && L.xhr403 || "403: access denied\n\ntry pressing F5, maybe you got logged out") + suf, tag);
|
||||||
|
|
||||||
if (xhr.status == 404)
|
if (xhr.status == 404)
|
||||||
return toast.err(0, prefix + e404, tag);
|
return toast.err(0, prefix + e404 + suf, tag);
|
||||||
|
|
||||||
|
if (!xhr.status && !errtxt)
|
||||||
|
return toast.err(0, prefix + L.xhr0);
|
||||||
|
|
||||||
if (is_cf && (xhr.status == 403 || xhr.status == 503)) {
|
if (is_cf && (xhr.status == 403 || xhr.status == 503)) {
|
||||||
var now = Date.now(), td = now - cf_cha_t;
|
var now = Date.now(), td = now - cf_cha_t;
|
||||||
|
|||||||
@@ -13,6 +13,9 @@
|
|||||||
|
|
||||||
# other stuff
|
# other stuff
|
||||||
|
|
||||||
|
## [`TODO.md`](TODO.md)
|
||||||
|
* planned features / fixes / changes
|
||||||
|
|
||||||
## [`example.conf`](example.conf)
|
## [`example.conf`](example.conf)
|
||||||
* example config file for `-c`
|
* example config file for `-c`
|
||||||
|
|
||||||
|
|||||||
15
docs/TODO.md
Normal file
15
docs/TODO.md
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
a living list of upcoming features / fixes / changes, very roughly in order of priority
|
||||||
|
|
||||||
|
* download accelerator
|
||||||
|
* definitely download chunks in parallel
|
||||||
|
* maybe resumable downloads (chrome-only, jank api)
|
||||||
|
* maybe checksum validation (return sha512 of requested range in responses, and probably also warks)
|
||||||
|
|
||||||
|
* [github issue #37](https://github.com/9001/copyparty/issues/37) - upload PWA
|
||||||
|
* or [maybe not](https://arstechnica.com/tech-policy/2024/02/apple-under-fire-for-disabling-iphone-web-apps-eu-asks-developers-to-weigh-in/), or [maybe](https://arstechnica.com/gadgets/2024/03/apple-changes-course-will-keep-iphone-eu-web-apps-how-they-are-in-ios-17-4/)
|
||||||
|
|
||||||
|
* [github issue #57](https://github.com/9001/copyparty/issues/57) - config GUI
|
||||||
|
* configs given to -c can be ordered with numerical prefix
|
||||||
|
* autorevert settings if it fails to apply
|
||||||
|
* countdown until session invalidates in settings gui, with refresh-button
|
||||||
|
|
||||||
96
docs/bufsize.txt
Normal file
96
docs/bufsize.txt
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
notes from testing various buffer sizes of files and sockets
|
||||||
|
|
||||||
|
summary:
|
||||||
|
|
||||||
|
download-folder-as-tar: would be 7% faster with --iobuf 65536 (but got 20% faster in v1.11.2)
|
||||||
|
|
||||||
|
download-folder-as-zip: optimal with default --iobuf 262144
|
||||||
|
|
||||||
|
download-file-over-https: optimal with default --iobuf 262144
|
||||||
|
|
||||||
|
put-large-file: optimal with default --iobuf 262144, --s-rd-sz 262144 (and got 14% faster in v1.11.2)
|
||||||
|
|
||||||
|
post-large-file: optimal with default --iobuf 262144, --s-rd-sz 262144 (and got 18% faster in v1.11.2)
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
|
oha -z10s -c1 --ipv4 --insecure http://127.0.0.1:3923/bigs/?tar
|
||||||
|
3.3 req/s 1.11.1
|
||||||
|
4.3 4.0 3.3 req/s 1.12.2
|
||||||
|
64 256 512 --iobuf 256 (prefer smaller)
|
||||||
|
32 32 32 --s-rd-sz
|
||||||
|
|
||||||
|
oha -z10s -c1 --ipv4 --insecure http://127.0.0.1:3923/bigs/?zip
|
||||||
|
2.9 req/s 1.11.1
|
||||||
|
2.5 2.9 2.9 req/s 1.12.2
|
||||||
|
64 256 512 --iobuf 256 (prefer bigger)
|
||||||
|
32 32 32 --s-rd-sz
|
||||||
|
|
||||||
|
oha -z10s -c1 --ipv4 --insecure http://127.0.0.1:3923/pairdupes/?tar
|
||||||
|
8.3 req/s 1.11.1
|
||||||
|
8.4 8.4 8.5 req/s 1.12.2
|
||||||
|
64 256 512 --iobuf 256 (prefer bigger)
|
||||||
|
32 32 32 --s-rd-sz
|
||||||
|
|
||||||
|
oha -z10s -c1 --ipv4 --insecure http://127.0.0.1:3923/pairdupes/?zip
|
||||||
|
13.9 req/s 1.11.1
|
||||||
|
14.1 14.0 13.8 req/s 1.12.2
|
||||||
|
64 256 512 --iobuf 256 (prefer smaller)
|
||||||
|
32 32 32 --s-rd-sz
|
||||||
|
|
||||||
|
oha -z10s -c1 --ipv4 --insecure http://127.0.0.1:3923/pairdupes/987a
|
||||||
|
5260 req/s 1.11.1
|
||||||
|
5246 5246 5280 5268 req/s 1.12.2
|
||||||
|
64 256 512 256 --iobuf dontcare
|
||||||
|
32 32 32 512 --s-rd-sz dontcare
|
||||||
|
|
||||||
|
oha -z10s -c1 --ipv4 --insecure https://127.0.0.1:3923/pairdupes/987a
|
||||||
|
4445 req/s 1.11.1
|
||||||
|
4462 4494 4444 req/s 1.12.2
|
||||||
|
64 256 512 --iobuf dontcare
|
||||||
|
32 32 32 --s-rd-sz
|
||||||
|
|
||||||
|
oha -z10s -c1 --ipv4 --insecure http://127.0.0.1:3923/bigs/gssc-02-cannonball-skydrift/track10.cdda.flac
|
||||||
|
95 req/s 1.11.1
|
||||||
|
95 97 req/s 1.12.2
|
||||||
|
64 512 --iobuf dontcare
|
||||||
|
32 32 --s-rd-sz
|
||||||
|
|
||||||
|
oha -z10s -c1 --ipv4 --insecure https://127.0.0.1:3923/bigs/gssc-02-cannonball-skydrift/track10.cdda.flac
|
||||||
|
15.4 req/s 1.11.1
|
||||||
|
15.4 15.3 14.9 15.4 req/s 1.12.2
|
||||||
|
64 256 512 512 --iobuf 256 (prefer smaller, and smaller than s-wr-sz)
|
||||||
|
32 32 32 32 --s-rd-sz
|
||||||
|
256 256 256 512 --s-wr-sz
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
|
python3 ~/dev/old/copyparty\ v1.11.1\ dont\ ban\ the\ pipes.py -q -i 127.0.0.1 -v .::A --daw
|
||||||
|
python3 ~/dev/copyparty/dist/copyparty-sfx.py -q -i 127.0.0.1 -v .::A --daw --iobuf $((1024*512))
|
||||||
|
|
||||||
|
oha -z10s -c1 --ipv4 --insecure -mPUT -r0 -D ~/Music/gssc-02-cannonball-skydrift/track10.cdda.flac http://127.0.0.1:3923/a.bin
|
||||||
|
10.8 req/s 1.11.1
|
||||||
|
10.8 11.5 11.8 12.1 12.2 12.3 req/s new
|
||||||
|
512 512 512 512 512 256 --iobuf 256
|
||||||
|
32 64 128 256 512 256 --s-rd-sz 256 (prefer bigger)
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
|
buildpost() {
|
||||||
|
b=--jeg-er-grensestaven;
|
||||||
|
printf -- "$b\r\nContent-Disposition: form-data; name=\"act\"\r\n\r\nbput\r\n$b\r\nContent-Disposition: form-data; name=\"f\"; filename=\"a.bin\"\r\nContent-Type: audio/mpeg\r\n\r\n"
|
||||||
|
cat "$1"
|
||||||
|
printf -- "\r\n${b}--\r\n"
|
||||||
|
}
|
||||||
|
buildpost ~/Music/gssc-02-cannonball-skydrift/track10.cdda.flac >big.post
|
||||||
|
buildpost ~/Music/bottomtext.txt >smol.post
|
||||||
|
|
||||||
|
oha -z10s -c1 --ipv4 --insecure -mPOST -r0 -T 'multipart/form-data; boundary=jeg-er-grensestaven' -D big.post http://127.0.0.1:3923/?replace
|
||||||
|
9.6 11.2 11.3 11.1 10.9 req/s v1.11.2
|
||||||
|
512 512 256 128 256 --iobuf 256
|
||||||
|
32 512 256 128 128 --s-rd-sz 256
|
||||||
|
|
||||||
|
oha -z10s -c1 --ipv4 --insecure -mPOST -r0 -T 'multipart/form-data; boundary=jeg-er-grensestaven' -D smol.post http://127.0.0.1:3923/?replace
|
||||||
|
2445 2414 2401 2437
|
||||||
|
256 128 256 256 --iobuf 256
|
||||||
|
128 128 256 64 --s-rd-sz 128 (but use 256 since big posts are more important)
|
||||||
1723
docs/changelog.md
1723
docs/changelog.md
File diff suppressed because it is too large
Load Diff
@@ -20,7 +20,8 @@
|
|||||||
* [just the sfx](#just-the-sfx)
|
* [just the sfx](#just-the-sfx)
|
||||||
* [build from release tarball](#build-from-release-tarball) - uses the included prebuilt webdeps
|
* [build from release tarball](#build-from-release-tarball) - uses the included prebuilt webdeps
|
||||||
* [complete release](#complete-release)
|
* [complete release](#complete-release)
|
||||||
* [todo](#todo) - roughly sorted by priority
|
* [debugging](#debugging)
|
||||||
|
* [music playback halting on phones](#music-playback-halting-on-phones) - mostly fine on android
|
||||||
* [discarded ideas](#discarded-ideas)
|
* [discarded ideas](#discarded-ideas)
|
||||||
|
|
||||||
|
|
||||||
@@ -54,8 +55,8 @@ quick outline of the up2k protocol, see [uploading](https://github.com/9001/cop
|
|||||||
* server creates the `wark`, an identifier for this upload
|
* server creates the `wark`, an identifier for this upload
|
||||||
* `sha512( salt + filesize + chunk_hashes )`
|
* `sha512( salt + filesize + chunk_hashes )`
|
||||||
* and a sparse file is created for the chunks to drop into
|
* and a sparse file is created for the chunks to drop into
|
||||||
* client uploads each chunk
|
* client sends a series of POSTs, with one or more consecutive chunks in each
|
||||||
* header entries for the chunk-hash and wark
|
* header entries for the chunk-hashes (comma-separated) and wark
|
||||||
* server writes chunks into place based on the hash
|
* server writes chunks into place based on the hash
|
||||||
* client does another handshake with the hashlist; server replies with OK or a list of chunks to reupload
|
* client does another handshake with the hashlist; server replies with OK or a list of chunks to reupload
|
||||||
|
|
||||||
@@ -133,10 +134,14 @@ authenticate using header `Cookie: cppwd=foo` or url param `&pw=foo`
|
|||||||
| GET | `?zip=utf-8` | ...as a zip file |
|
| GET | `?zip=utf-8` | ...as a zip file |
|
||||||
| GET | `?zip` | ...as a WinXP-compatible zip file |
|
| GET | `?zip` | ...as a WinXP-compatible zip file |
|
||||||
| GET | `?zip=crc` | ...as an MSDOS-compatible zip file |
|
| GET | `?zip=crc` | ...as an MSDOS-compatible zip file |
|
||||||
|
| GET | `?tar&w` | pregenerate webp thumbnails |
|
||||||
|
| GET | `?tar&j` | pregenerate jpg thumbnails |
|
||||||
|
| GET | `?tar&p` | pregenerate audio waveforms |
|
||||||
| GET | `?ups` | show recent uploads from your IP |
|
| GET | `?ups` | show recent uploads from your IP |
|
||||||
| GET | `?ups&filter=f` | ...where URL contains `f` |
|
| GET | `?ups&filter=f` | ...where URL contains `f` |
|
||||||
| GET | `?mime=foo` | specify return mimetype `foo` |
|
| GET | `?mime=foo` | specify return mimetype `foo` |
|
||||||
| GET | `?v` | render markdown file at URL |
|
| GET | `?v` | render markdown file at URL |
|
||||||
|
| GET | `?v` | open image/video/audio in mediaplayer |
|
||||||
| GET | `?txt` | get file at URL as plaintext |
|
| GET | `?txt` | get file at URL as plaintext |
|
||||||
| GET | `?txt=iso-8859-1` | ...with specific charset |
|
| GET | `?txt=iso-8859-1` | ...with specific charset |
|
||||||
| GET | `?th` | get image/video at URL as thumbnail |
|
| GET | `?th` | get image/video at URL as thumbnail |
|
||||||
@@ -162,8 +167,10 @@ authenticate using header `Cookie: cppwd=foo` or url param `&pw=foo`
|
|||||||
| PUT | | (binary data) | upload into file at URL |
|
| PUT | | (binary data) | upload into file at URL |
|
||||||
| PUT | `?gz` | (binary data) | compress with gzip and write into file at URL |
|
| PUT | `?gz` | (binary data) | compress with gzip and write into file at URL |
|
||||||
| PUT | `?xz` | (binary data) | compress with xz and write into file at URL |
|
| PUT | `?xz` | (binary data) | compress with xz and write into file at URL |
|
||||||
| mPOST | | `act=bput`, `f=FILE` | upload `FILE` into the folder at URL |
|
| mPOST | | `f=FILE` | upload `FILE` into the folder at URL |
|
||||||
| mPOST | `?j` | `act=bput`, `f=FILE` | ...and reply with json |
|
| mPOST | `?j` | `f=FILE` | ...and reply with json |
|
||||||
|
| mPOST | `?replace` | `f=FILE` | ...and overwrite existing files |
|
||||||
|
| mPOST | `?media` | `f=FILE` | ...and return medialink (not hotlink) |
|
||||||
| mPOST | | `act=mkdir`, `name=foo` | create directory `foo` at URL |
|
| mPOST | | `act=mkdir`, `name=foo` | create directory `foo` at URL |
|
||||||
| POST | `?delete` | | delete URL recursively |
|
| POST | `?delete` | | delete URL recursively |
|
||||||
| jPOST | `?delete` | `["/foo","/bar"]` | delete `/foo` and `/bar` recursively |
|
| jPOST | `?delete` | `["/foo","/bar"]` | delete `/foo` and `/bar` recursively |
|
||||||
@@ -218,7 +225,7 @@ if you don't need all the features, you can repack the sfx and save a bunch of s
|
|||||||
* `269k` after `./scripts/make-sfx.sh re no-cm no-hl`
|
* `269k` after `./scripts/make-sfx.sh re no-cm no-hl`
|
||||||
|
|
||||||
the features you can opt to drop are
|
the features you can opt to drop are
|
||||||
* `cm`/easymde, the "fancy" markdown editor, saves ~82k
|
* `cm`/easymde, the "fancy" markdown editor, saves ~89k
|
||||||
* `hl`, prism, the syntax hilighter, saves ~41k
|
* `hl`, prism, the syntax hilighter, saves ~41k
|
||||||
* `fnt`, source-code-pro, the monospace font, saves ~9k
|
* `fnt`, source-code-pro, the monospace font, saves ~9k
|
||||||
* `dd`, the custom mouse cursor for the media player tray tab, saves ~2k
|
* `dd`, the custom mouse cursor for the media player tray tab, saves ~2k
|
||||||
@@ -242,6 +249,7 @@ python3 -m venv .venv
|
|||||||
pip install jinja2 strip_hints # MANDATORY
|
pip install jinja2 strip_hints # MANDATORY
|
||||||
pip install mutagen # audio metadata
|
pip install mutagen # audio metadata
|
||||||
pip install pyftpdlib # ftp server
|
pip install pyftpdlib # ftp server
|
||||||
|
pip install partftpy # tftp server
|
||||||
pip install impacket # smb server -- disable Windows Defender if you REALLY need this on windows
|
pip install impacket # smb server -- disable Windows Defender if you REALLY need this on windows
|
||||||
pip install Pillow pyheif-pillow-opener pillow-avif-plugin # thumbnails
|
pip install Pillow pyheif-pillow-opener pillow-avif-plugin # thumbnails
|
||||||
pip install pyvips # faster thumbnails
|
pip install pyvips # faster thumbnails
|
||||||
@@ -299,19 +307,26 @@ in the `scripts` folder:
|
|||||||
* run `./rls.sh 1.2.3` which uploads to pypi + creates github release + sfx
|
* run `./rls.sh 1.2.3` which uploads to pypi + creates github release + sfx
|
||||||
|
|
||||||
|
|
||||||
# todo
|
# debugging
|
||||||
|
|
||||||
roughly sorted by priority
|
## music playback halting on phones
|
||||||
|
|
||||||
* nothing! currently
|
mostly fine on android, but still haven't find a way to massage iphones into behaving well
|
||||||
|
|
||||||
|
* conditionally starting/stopping mp.fau according to mp.au.readyState <3 or <4 doesn't help
|
||||||
|
* loop=true doesn't work, and manually looping mp.fau from an onended also doesn't work (it does nothing)
|
||||||
|
* assigning fau.currentTime in a timer doesn't work, as safari merely pretends to assign it
|
||||||
|
* on ios 16.7.7, mp.fau can sometimes make everything visibly work correctly, but no audio is actually hitting the speakers
|
||||||
|
|
||||||
|
can be reproduced with `--no-sendfile --s-wr-sz 8192 --s-wr-slp 0.3 --rsp-slp 6` and then play a collection of small audio files with the screen off, `ffmpeg -i track01.cdda.flac -c:a libopus -b:a 128k -segment_time 12 -f segment smol-%02d.opus`
|
||||||
|
|
||||||
|
|
||||||
## discarded ideas
|
## discarded ideas
|
||||||
|
|
||||||
* reduce up2k roundtrips
|
* optimization attempts which didn't improve performance
|
||||||
* start from a chunk index and just go
|
* remove brokers / multiprocessing stuff; https://github.com/9001/copyparty/tree/no-broker
|
||||||
* terminate client on bad data
|
* reduce the nesting / indirections in `HttpCli` / `httpcli.py`
|
||||||
* not worth the effort, just throw enough conncetions at it
|
* nearly zero benefit from stuff like replacing all the `self.conn.hsrv` with a local `hsrv` variable
|
||||||
* single sha512 across all up2k chunks?
|
* single sha512 across all up2k chunks?
|
||||||
* crypto.subtle cannot into streaming, would have to use hashwasm, expensive
|
* crypto.subtle cannot into streaming, would have to use hashwasm, expensive
|
||||||
* separate sqlite table per tag
|
* separate sqlite table per tag
|
||||||
|
|||||||
32
docs/examples/docker/basic-docker-compose/copyparty.conf
Normal file
32
docs/examples/docker/basic-docker-compose/copyparty.conf
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
# not actually YAML but lets pretend:
|
||||||
|
# -*- mode: yaml -*-
|
||||||
|
# vim: ft=yaml:
|
||||||
|
|
||||||
|
|
||||||
|
[global]
|
||||||
|
e2dsa # enable file indexing and filesystem scanning
|
||||||
|
e2ts # enable multimedia indexing
|
||||||
|
ansi # enable colors in log messages
|
||||||
|
|
||||||
|
# q, lo: /cfg/log/%Y-%m%d.log # log to file instead of docker
|
||||||
|
|
||||||
|
# p: 3939 # listen on another port
|
||||||
|
# ipa: 10.89. # only allow connections from 10.89.*
|
||||||
|
# df: 16 # stop accepting uploads if less than 16 GB free disk space
|
||||||
|
# ver # show copyparty version in the controlpanel
|
||||||
|
# grid # show thumbnails/grid-view by default
|
||||||
|
# theme: 2 # monokai
|
||||||
|
# name: datasaver # change the server-name that's displayed in the browser
|
||||||
|
# stats, nos-dup # enable the prometheus endpoint, but disable the dupes counter (too slow)
|
||||||
|
# no-robots, force-js # make it harder for search engines to read your server
|
||||||
|
|
||||||
|
|
||||||
|
[accounts]
|
||||||
|
ed: wark # username: password
|
||||||
|
|
||||||
|
|
||||||
|
[/] # create a volume at "/" (the webroot), which will
|
||||||
|
/w # share /w (the docker data volume)
|
||||||
|
accs:
|
||||||
|
rw: * # everyone gets read-write access, but
|
||||||
|
rwmda: ed # the user "ed" gets read-write-move-delete-admin
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user