Compare commits
445 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c0dacbc4dd | ||
|
|
dd6e9ea70c | ||
|
|
87598dcd7f | ||
|
|
3bb7b677f8 | ||
|
|
988a7223f4 | ||
|
|
7f044372fa | ||
|
|
552897abbc | ||
|
|
946a8c5baa | ||
|
|
888b31aa92 | ||
|
|
e2dec2510f | ||
|
|
da5ad2ab9f | ||
|
|
eaa4b04a22 | ||
|
|
3051b13108 | ||
|
|
4c4e48bab7 | ||
|
|
01a3eb29cb | ||
|
|
73f7249c5f | ||
|
|
18c6559199 | ||
|
|
e66ece993f | ||
|
|
0686860624 | ||
|
|
24ce46b380 | ||
|
|
a49bf81ff2 | ||
|
|
64501fd7f1 | ||
|
|
db3c0b0907 | ||
|
|
edda117a7a | ||
|
|
cdface0dd5 | ||
|
|
be6afe2d3a | ||
|
|
9163780000 | ||
|
|
d7aa7dfe64 | ||
|
|
f1decb531d | ||
|
|
99399c698b | ||
|
|
1f5f42f216 | ||
|
|
9082c4702f | ||
|
|
6cedcfbf77 | ||
|
|
8a631f045e | ||
|
|
a6a2ee5b6b | ||
|
|
016708276c | ||
|
|
4cfdc4c513 | ||
|
|
0f257c9308 | ||
|
|
c8104b6e78 | ||
|
|
1a1d731043 | ||
|
|
c5a000d2ae | ||
|
|
94d1924fa9 | ||
|
|
6c1cf68bca | ||
|
|
395af051bd | ||
|
|
42fd66675e | ||
|
|
21a3f3699b | ||
|
|
d168b2acac | ||
|
|
2ce8233921 | ||
|
|
697a4fa8a4 | ||
|
|
2f83c6c7d1 | ||
|
|
127f414e9c | ||
|
|
33c4ccffab | ||
|
|
bafe7f5a09 | ||
|
|
baf41112d1 | ||
|
|
a90dde94e1 | ||
|
|
7dfbfc7227 | ||
|
|
b10843d051 | ||
|
|
520ac8f4dc | ||
|
|
537a6e50e9 | ||
|
|
2d0cbdf1a8 | ||
|
|
5afb562aa3 | ||
|
|
db069c3d4a | ||
|
|
fae40c7e2f | ||
|
|
0c43b592dc | ||
|
|
2ab8924e2d | ||
|
|
0e31cfa784 | ||
|
|
8f7ffcf350 | ||
|
|
9c8507a0fd | ||
|
|
e9b2cab088 | ||
|
|
d3ccacccb1 | ||
|
|
df386c8fbc | ||
|
|
4d15dd6e17 | ||
|
|
56a0499636 | ||
|
|
10fc4768e8 | ||
|
|
2b63d7d10d | ||
|
|
1f177528c1 | ||
|
|
fc3bbb70a3 | ||
|
|
ce3cab0295 | ||
|
|
c784e5285e | ||
|
|
2bf9055cae | ||
|
|
8aba5aed4f | ||
|
|
0ce7cf5e10 | ||
|
|
96edcbccd7 | ||
|
|
4603afb6de | ||
|
|
56317b00af | ||
|
|
cacec9c1f3 | ||
|
|
44ee07f0b2 | ||
|
|
6a8d5e1731 | ||
|
|
d9962f65b3 | ||
|
|
119e88d87b | ||
|
|
71d9e010d9 | ||
|
|
5718caa957 | ||
|
|
efd8a32ed6 | ||
|
|
b22d700e16 | ||
|
|
ccdacea0c4 | ||
|
|
4bdcbc1cb5 | ||
|
|
833c6cf2ec | ||
|
|
dd6dbdd90a | ||
|
|
63013cc565 | ||
|
|
912402364a | ||
|
|
159f51b12b | ||
|
|
7678a91b0e | ||
|
|
b13899c63d | ||
|
|
3a0d882c5e | ||
|
|
cb81f0ad6d | ||
|
|
518bacf628 | ||
|
|
ca63b03e55 | ||
|
|
cecef88d6b | ||
|
|
7ffd805a03 | ||
|
|
a7e2a0c981 | ||
|
|
2a570bb4ca | ||
|
|
5ca8f0706d | ||
|
|
a9b4436cdc | ||
|
|
5f91999512 | ||
|
|
9f000beeaf | ||
|
|
ff0a71f212 | ||
|
|
22dfc6ec24 | ||
|
|
48147c079e | ||
|
|
d715479ef6 | ||
|
|
fc8298c468 | ||
|
|
e94ca5dc91 | ||
|
|
114b71b751 | ||
|
|
b2770a2087 | ||
|
|
cba1878bb2 | ||
|
|
a2e037d6af | ||
|
|
65a2b6a223 | ||
|
|
9ed799e803 | ||
|
|
c1c0ecca13 | ||
|
|
ee62836383 | ||
|
|
705f598b1a | ||
|
|
414de88925 | ||
|
|
53ffd245dd | ||
|
|
cf1b756206 | ||
|
|
22b58e31ef | ||
|
|
b7f9bf5a28 | ||
|
|
aba680b6c2 | ||
|
|
fabada95f6 | ||
|
|
9ccd8bb3ea | ||
|
|
1d68acf8f0 | ||
|
|
1e7697b551 | ||
|
|
4a4ec88d00 | ||
|
|
6adc778d62 | ||
|
|
6b7ebdb7e9 | ||
|
|
3d7facd774 | ||
|
|
eaee1f2cab | ||
|
|
ff012221ae | ||
|
|
c398553748 | ||
|
|
3ccbcf6185 | ||
|
|
f0abc0ef59 | ||
|
|
a99fa3375d | ||
|
|
22c7e09b3f | ||
|
|
0dfe1d5b35 | ||
|
|
a99a3bc6d7 | ||
|
|
9804f25de3 | ||
|
|
ae98200660 | ||
|
|
e45420646f | ||
|
|
21be82ef8b | ||
|
|
001afe00cb | ||
|
|
19a5985f29 | ||
|
|
2715ee6c61 | ||
|
|
dc157fa28f | ||
|
|
1ff14b4e05 | ||
|
|
480ac254ab | ||
|
|
4b95db81aa | ||
|
|
c81e898435 | ||
|
|
f1646b96ca | ||
|
|
44f2b63e43 | ||
|
|
847a2bdc85 | ||
|
|
03f0f99469 | ||
|
|
3900e66158 | ||
|
|
3dff6cda40 | ||
|
|
73d05095b5 | ||
|
|
fcdc1728eb | ||
|
|
8b942ea237 | ||
|
|
88a1c5ca5d | ||
|
|
047176b297 | ||
|
|
dc4d0d8e71 | ||
|
|
b9c5c7bbde | ||
|
|
9daeed923f | ||
|
|
66b260cea9 | ||
|
|
58cf01c2ad | ||
|
|
d866841c19 | ||
|
|
a462a644fb | ||
|
|
678675a9a6 | ||
|
|
de9069ef1d | ||
|
|
c0c0a1a83a | ||
|
|
1d004b6dbd | ||
|
|
b90e1200d7 | ||
|
|
4493a0a804 | ||
|
|
58835b2b42 | ||
|
|
427597b603 | ||
|
|
7d64879ba8 | ||
|
|
bb715704b7 | ||
|
|
d67e9cc507 | ||
|
|
2927bbb2d6 | ||
|
|
0527b59180 | ||
|
|
a5ce1032d3 | ||
|
|
1c2acdc985 | ||
|
|
4e75534ef8 | ||
|
|
7a573cafd1 | ||
|
|
844194ee29 | ||
|
|
609c5921d4 | ||
|
|
c79eaa089a | ||
|
|
e9d962f273 | ||
|
|
b5405174ec | ||
|
|
6eee601521 | ||
|
|
2fac2bee7c | ||
|
|
c140eeee6b | ||
|
|
c5988a04f9 | ||
|
|
a2e0f98693 | ||
|
|
1111153f06 | ||
|
|
e5a836cb7d | ||
|
|
b0de84cbc5 | ||
|
|
cbb718e10d | ||
|
|
b5ad9369fe | ||
|
|
4401de0413 | ||
|
|
6e671c5245 | ||
|
|
08848be784 | ||
|
|
b599fbae97 | ||
|
|
a8dabc99f6 | ||
|
|
f1130db131 | ||
|
|
735ec35546 | ||
|
|
5a009a2a64 | ||
|
|
d9e9526247 | ||
|
|
5a8c3b8be0 | ||
|
|
1c9c17fb9b | ||
|
|
7f82449179 | ||
|
|
e455ec994e | ||
|
|
c111027420 | ||
|
|
abcdf479e6 | ||
|
|
ad2371f810 | ||
|
|
c4e2b0f95f | ||
|
|
3da62ec234 | ||
|
|
01233991f3 | ||
|
|
ee35974273 | ||
|
|
7037e7365e | ||
|
|
03b13e8a1c | ||
|
|
cdd2da0208 | ||
|
|
cec0e0cf02 | ||
|
|
8122ddedfe | ||
|
|
55a77c5e89 | ||
|
|
461f31582d | ||
|
|
f356faa278 | ||
|
|
9f034d9c4c | ||
|
|
ba52590ae4 | ||
|
|
92edea1de5 | ||
|
|
7ff46966da | ||
|
|
fca70b3508 | ||
|
|
70009cd984 | ||
|
|
8d8b88c4fd | ||
|
|
c4b0cccefd | ||
|
|
7c2beba555 | ||
|
|
7d8d94388b | ||
|
|
0b46b1a614 | ||
|
|
5153db6bff | ||
|
|
b0af4b3712 | ||
|
|
c8f4aeaefa | ||
|
|
00da74400c | ||
|
|
83fb569d61 | ||
|
|
5a62cb4869 | ||
|
|
687df2fabd | ||
|
|
cdd0794d6e | ||
|
|
dcc988135e | ||
|
|
3db117d85f | ||
|
|
ee9aad82dd | ||
|
|
2d6eb63fce | ||
|
|
ca001c8504 | ||
|
|
4e581c59da | ||
|
|
dbd42bc6bf | ||
|
|
c862ec1b64 | ||
|
|
f709140571 | ||
|
|
ef1c4b7a20 | ||
|
|
6c94a63f1c | ||
|
|
20669c73d3 | ||
|
|
0da719f4c2 | ||
|
|
373194c38a | ||
|
|
3d245431fc | ||
|
|
250c8c56f0 | ||
|
|
e136231c8e | ||
|
|
98ffaadf52 | ||
|
|
ebb1981803 | ||
|
|
72361c99e1 | ||
|
|
d5c9c8ebbd | ||
|
|
746229846d | ||
|
|
ffd7cd3ca8 | ||
|
|
b3cecabca3 | ||
|
|
662541c64c | ||
|
|
225bd80ea8 | ||
|
|
85e54980cc | ||
|
|
a19a0fa9f3 | ||
|
|
9bb6e0dc62 | ||
|
|
15ddcf53e7 | ||
|
|
6b54972ec0 | ||
|
|
0219eada23 | ||
|
|
8916bce306 | ||
|
|
99edba4fd9 | ||
|
|
64de3e01e8 | ||
|
|
8222ccc40b | ||
|
|
dc449bf8b0 | ||
|
|
ef0ecf878b | ||
|
|
53f1e3c91d | ||
|
|
eeef80919f | ||
|
|
987bce2182 | ||
|
|
b511d686f0 | ||
|
|
132a83501e | ||
|
|
e565ad5f55 | ||
|
|
f955d2bd58 | ||
|
|
5953399090 | ||
|
|
d26a944d95 | ||
|
|
50dac15568 | ||
|
|
ac1e11e4ce | ||
|
|
d749683d48 | ||
|
|
84e8e1ddfb | ||
|
|
6e58514b84 | ||
|
|
803e156509 | ||
|
|
c06aa683eb | ||
|
|
6644ceef49 | ||
|
|
bd3b3863ae | ||
|
|
ffd4f9c8b9 | ||
|
|
760ff2db72 | ||
|
|
f37187a041 | ||
|
|
1cdb170290 | ||
|
|
d5de3f2fe0 | ||
|
|
d76673e62d | ||
|
|
c549f367c1 | ||
|
|
927c3bce96 | ||
|
|
d75a2c77da | ||
|
|
e6c55d7ff9 | ||
|
|
4c2cb26991 | ||
|
|
dfe7f1d9af | ||
|
|
666297f6fb | ||
|
|
55a011b9c1 | ||
|
|
27aff12a1e | ||
|
|
9a87ee2fe4 | ||
|
|
0a9f4c6074 | ||
|
|
7219331057 | ||
|
|
2fd12a839c | ||
|
|
8c73e0cbc2 | ||
|
|
52e06226a2 | ||
|
|
452592519d | ||
|
|
c9281f8912 | ||
|
|
36d6d29a0c | ||
|
|
db6059e100 | ||
|
|
aab57cb24b | ||
|
|
f00b939402 | ||
|
|
bef9617638 | ||
|
|
692175f5b0 | ||
|
|
5ad65450c4 | ||
|
|
60c96f990a | ||
|
|
07b2bf1104 | ||
|
|
ac1bc232a9 | ||
|
|
5919607ad0 | ||
|
|
07ea629ca5 | ||
|
|
b629d18df6 | ||
|
|
566cbb6507 | ||
|
|
400d700845 | ||
|
|
82ce6862ee | ||
|
|
38e4fdfe03 | ||
|
|
c04662798d | ||
|
|
19d156ff4e | ||
|
|
87c60a1ec9 | ||
|
|
2c92dab165 | ||
|
|
5c1e23907d | ||
|
|
925c7f0a57 | ||
|
|
feed08deb2 | ||
|
|
560d7b6672 | ||
|
|
565daee98b | ||
|
|
e396c5c2b5 | ||
|
|
1ee2cdd089 | ||
|
|
beacedab50 | ||
|
|
25139a4358 | ||
|
|
f8491970fd | ||
|
|
da091aec85 | ||
|
|
e9eb5affcd | ||
|
|
c1918bc36c | ||
|
|
fdda567f50 | ||
|
|
603d0ed72b | ||
|
|
b15a4ef79f | ||
|
|
48a6789d36 | ||
|
|
36f2c446af | ||
|
|
69517e4624 | ||
|
|
ea270ab9f2 | ||
|
|
b6cf2d3089 | ||
|
|
e8db3dd37f | ||
|
|
27485a4cb1 | ||
|
|
253a414443 | ||
|
|
f6e693f0f5 | ||
|
|
c5f7cfc355 | ||
|
|
bc2c1e427a | ||
|
|
95d9e693c6 | ||
|
|
70a3cf36d1 | ||
|
|
aa45fccf11 | ||
|
|
42d00050c1 | ||
|
|
4bb0e6e75a | ||
|
|
2f7f9de3f5 | ||
|
|
f31ac90932 | ||
|
|
439cb7f85b | ||
|
|
af193ee834 | ||
|
|
c06126cc9d | ||
|
|
897ffbbbd0 | ||
|
|
8244d3b4fc | ||
|
|
74266af6d1 | ||
|
|
8c552f1ad1 | ||
|
|
bf5850785f | ||
|
|
feecb3e0b8 | ||
|
|
08d8c82167 | ||
|
|
5239e7ac0c | ||
|
|
9937c2e755 | ||
|
|
f1e947f37d | ||
|
|
a70a49b9c9 | ||
|
|
fe700dcf1a | ||
|
|
c8e3ed3aae | ||
|
|
b8733653a3 | ||
|
|
b772a4f8bb | ||
|
|
9e5253ef87 | ||
|
|
7b94e4edf3 | ||
|
|
da26ec36ca | ||
|
|
443acf2f8b | ||
|
|
6c90e3893d | ||
|
|
ea002ee71d | ||
|
|
ab18893cd2 | ||
|
|
844d16b9e5 | ||
|
|
989cc613ef | ||
|
|
4f0cad5468 | ||
|
|
f89de6b35d | ||
|
|
e0bcb88ee7 | ||
|
|
a0022805d1 | ||
|
|
853adb5d04 | ||
|
|
7744226b5c | ||
|
|
d94b5b3fc9 | ||
|
|
e6ba065bc2 | ||
|
|
59a53ba9ac | ||
|
|
b88cc7b5ce | ||
|
|
5ab54763c6 | ||
|
|
59f815ff8c | ||
|
|
9c42cbec6f | ||
|
|
f471b05aa4 | ||
|
|
34c32e3e89 | ||
|
|
a080759a03 | ||
|
|
0ae12868e5 | ||
|
|
ef52e2c06c | ||
|
|
32c912bb16 | ||
|
|
20870fda79 | ||
|
|
bdfe2c1a5f | ||
|
|
10bc2d9205 |
2
.gitignore
vendored
2
.gitignore
vendored
@@ -12,6 +12,7 @@ copyparty.egg-info/
|
|||||||
/dist/
|
/dist/
|
||||||
/py2/
|
/py2/
|
||||||
/sfx*
|
/sfx*
|
||||||
|
/pyz/
|
||||||
/unt/
|
/unt/
|
||||||
/log/
|
/log/
|
||||||
|
|
||||||
@@ -29,6 +30,7 @@ copyparty/res/COPYING.txt
|
|||||||
copyparty/web/deps/
|
copyparty/web/deps/
|
||||||
srv/
|
srv/
|
||||||
scripts/docker/i/
|
scripts/docker/i/
|
||||||
|
scripts/deps-docker/uncomment.py
|
||||||
contrib/package/arch/pkg/
|
contrib/package/arch/pkg/
|
||||||
contrib/package/arch/src/
|
contrib/package/arch/src/
|
||||||
|
|
||||||
|
|||||||
24
.vscode/settings.json
vendored
24
.vscode/settings.json
vendored
@@ -22,6 +22,9 @@
|
|||||||
"terminal.ansiBrightCyan": "#9cf0ed",
|
"terminal.ansiBrightCyan": "#9cf0ed",
|
||||||
"terminal.ansiBrightWhite": "#ffffff",
|
"terminal.ansiBrightWhite": "#ffffff",
|
||||||
},
|
},
|
||||||
|
"python.terminal.activateEnvironment": false,
|
||||||
|
"python.analysis.enablePytestSupport": false,
|
||||||
|
"python.analysis.typeCheckingMode": "standard",
|
||||||
"python.testing.pytestEnabled": false,
|
"python.testing.pytestEnabled": false,
|
||||||
"python.testing.unittestEnabled": true,
|
"python.testing.unittestEnabled": true,
|
||||||
"python.testing.unittestArgs": [
|
"python.testing.unittestArgs": [
|
||||||
@@ -31,23 +34,8 @@
|
|||||||
"-p",
|
"-p",
|
||||||
"test_*.py"
|
"test_*.py"
|
||||||
],
|
],
|
||||||
"python.linting.pylintEnabled": true,
|
// python3 -m isort --py=27 --profile=black ~/dev/copyparty/{copyparty,tests}/*.py && python3 -m black -t py27 ~/dev/copyparty/{copyparty,tests,bin}/*.py $(find ~/dev/copyparty/copyparty/stolen -iname '*.py')
|
||||||
"python.linting.flake8Enabled": true,
|
"editor.formatOnSave": false,
|
||||||
"python.linting.banditEnabled": true,
|
|
||||||
"python.linting.mypyEnabled": true,
|
|
||||||
"python.linting.flake8Args": [
|
|
||||||
"--max-line-length=120",
|
|
||||||
"--ignore=E722,F405,E203,W503,W293,E402,E501,E128,E226",
|
|
||||||
],
|
|
||||||
"python.linting.banditArgs": [
|
|
||||||
"--ignore=B104,B110,B112"
|
|
||||||
],
|
|
||||||
// python3 -m isort --py=27 --profile=black copyparty/
|
|
||||||
"python.formatting.provider": "none",
|
|
||||||
"[python]": {
|
|
||||||
"editor.defaultFormatter": "ms-python.black-formatter"
|
|
||||||
},
|
|
||||||
"editor.formatOnSave": true,
|
|
||||||
"[html]": {
|
"[html]": {
|
||||||
"editor.formatOnSave": false,
|
"editor.formatOnSave": false,
|
||||||
"editor.autoIndent": "keep",
|
"editor.autoIndent": "keep",
|
||||||
@@ -58,6 +46,4 @@
|
|||||||
"files.associations": {
|
"files.associations": {
|
||||||
"*.makefile": "makefile"
|
"*.makefile": "makefile"
|
||||||
},
|
},
|
||||||
"python.linting.enabled": true,
|
|
||||||
"python.pythonPath": "/usr/bin/python3"
|
|
||||||
}
|
}
|
||||||
@@ -15,22 +15,18 @@ produces a chronological list of all uploads by collecting info from up2k databa
|
|||||||
# [`partyfuse.py`](partyfuse.py)
|
# [`partyfuse.py`](partyfuse.py)
|
||||||
* mount a copyparty server as a local filesystem (read-only)
|
* mount a copyparty server as a local filesystem (read-only)
|
||||||
* **supports Windows!** -- expect `194 MiB/s` sequential read
|
* **supports Windows!** -- expect `194 MiB/s` sequential read
|
||||||
* **supports Linux** -- expect `117 MiB/s` sequential read
|
* **supports Linux** -- expect `600 MiB/s` sequential read
|
||||||
* **supports macos** -- expect `85 MiB/s` sequential read
|
* **supports macos** -- expect `85 MiB/s` sequential read
|
||||||
|
|
||||||
filecache is default-on for windows and macos;
|
|
||||||
* macos readsize is 64kB, so speed ~32 MiB/s without the cache
|
|
||||||
* windows readsize varies by software; explorer=1M, pv=32k
|
|
||||||
|
|
||||||
note that copyparty should run with `-ed` to enable dotfiles (hidden otherwise)
|
note that copyparty should run with `-ed` to enable dotfiles (hidden otherwise)
|
||||||
|
|
||||||
also consider using [../docs/rclone.md](../docs/rclone.md) instead for 5x performance
|
and consider using [../docs/rclone.md](../docs/rclone.md) instead; usually a bit faster, especially on windows
|
||||||
|
|
||||||
|
|
||||||
## to run this on windows:
|
## to run this on windows:
|
||||||
* install [winfsp](https://github.com/billziss-gh/winfsp/releases/latest) and [python 3](https://www.python.org/downloads/)
|
* install [winfsp](https://github.com/billziss-gh/winfsp/releases/latest) and [python 3](https://www.python.org/downloads/)
|
||||||
* [x] add python 3.x to PATH (it asks during install)
|
* [x] add python 3.x to PATH (it asks during install)
|
||||||
* `python -m pip install --user fusepy`
|
* `python -m pip install --user fusepy` (or grab a copy of `fuse.py` from the `connect` page on your copyparty, and keep it in the same folder)
|
||||||
* `python ./partyfuse.py n: http://192.168.1.69:3923/`
|
* `python ./partyfuse.py n: http://192.168.1.69:3923/`
|
||||||
|
|
||||||
10% faster in [msys2](https://www.msys2.org/), 700% faster if debug prints are enabled:
|
10% faster in [msys2](https://www.msys2.org/), 700% faster if debug prints are enabled:
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ standalone programs which are executed by copyparty when an event happens (uploa
|
|||||||
|
|
||||||
these programs either take zero arguments, or a filepath (the affected file), or a json message with filepath + additional info
|
these programs either take zero arguments, or a filepath (the affected file), or a json message with filepath + additional info
|
||||||
|
|
||||||
run copyparty with `--help-hooks` for usage details / hook type explanations (xbu/xau/xiu/xbr/xar/xbd/xad)
|
run copyparty with `--help-hooks` for usage details / hook type explanations (xm/xbu/xau/xiu/xbc/xac/xbr/xar/xbd/xad/xban)
|
||||||
|
|
||||||
> **note:** in addition to event hooks (the stuff described here), copyparty has another api to run your programs/scripts while providing way more information such as audio tags / video codecs / etc and optionally daisychaining data between scripts in a processing pipeline; if that's what you want then see [mtp plugins](../mtag/) instead
|
> **note:** in addition to event hooks (the stuff described here), copyparty has another api to run your programs/scripts while providing way more information such as audio tags / video codecs / etc and optionally daisychaining data between scripts in a processing pipeline; if that's what you want then see [mtp plugins](../mtag/) instead
|
||||||
|
|
||||||
@@ -13,6 +13,7 @@ run copyparty with `--help-hooks` for usage details / hook type explanations (xb
|
|||||||
* [image-noexif.py](image-noexif.py) removes image exif by overwriting / directly editing the uploaded file
|
* [image-noexif.py](image-noexif.py) removes image exif by overwriting / directly editing the uploaded file
|
||||||
* [discord-announce.py](discord-announce.py) announces new uploads on discord using webhooks ([example](https://user-images.githubusercontent.com/241032/215304439-1c1cb3c8-ec6f-4c17-9f27-81f969b1811a.png))
|
* [discord-announce.py](discord-announce.py) announces new uploads on discord using webhooks ([example](https://user-images.githubusercontent.com/241032/215304439-1c1cb3c8-ec6f-4c17-9f27-81f969b1811a.png))
|
||||||
* [reject-mimetype.py](reject-mimetype.py) rejects uploads unless the mimetype is acceptable
|
* [reject-mimetype.py](reject-mimetype.py) rejects uploads unless the mimetype is acceptable
|
||||||
|
* [into-the-cache-it-goes.py](into-the-cache-it-goes.py) avoids bugs in caching proxies by immediately downloading each file that is uploaded
|
||||||
|
|
||||||
|
|
||||||
# upload batches
|
# upload batches
|
||||||
@@ -23,7 +24,10 @@ these are `--xiu` hooks; unlike `xbu` and `xau` (which get executed on every sin
|
|||||||
|
|
||||||
# before upload
|
# before upload
|
||||||
* [reject-extension.py](reject-extension.py) rejects uploads if they match a list of file extensions
|
* [reject-extension.py](reject-extension.py) rejects uploads if they match a list of file extensions
|
||||||
|
* [reloc-by-ext.py](reloc-by-ext.py) redirects an upload to another destination based on the file extension
|
||||||
|
|
||||||
|
|
||||||
# on message
|
# on message
|
||||||
* [wget.py](wget.py) lets you download files by POSTing URLs to copyparty
|
* [wget.py](wget.py) lets you download files by POSTing URLs to copyparty
|
||||||
|
* [qbittorrent-magnet.py](qbittorrent-magnet.py) starts downloading a torrent if you post a magnet url
|
||||||
|
* [msg-log.py](msg-log.py) is a guestbook; logs messages to a doc in the same folder
|
||||||
|
|||||||
@@ -12,19 +12,28 @@ announces a new upload on discord
|
|||||||
example usage as global config:
|
example usage as global config:
|
||||||
--xau f,t5,j,bin/hooks/discord-announce.py
|
--xau f,t5,j,bin/hooks/discord-announce.py
|
||||||
|
|
||||||
|
parameters explained,
|
||||||
|
xau = execute after upload
|
||||||
|
f = fork; don't delay other hooks while this is running
|
||||||
|
t5 = timeout if it's still running after 5 sec
|
||||||
|
j = this hook needs upload information as json (not just the filename)
|
||||||
|
|
||||||
example usage as a volflag (per-volume config):
|
example usage as a volflag (per-volume config):
|
||||||
-v srv/inc:inc:r:rw,ed:c,xau=f,t5,j,bin/hooks/discord-announce.py
|
-v srv/inc:inc:r:rw,ed:c,xau=f,t5,j,bin/hooks/discord-announce.py
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
(share filesystem-path srv/inc as volume /inc,
|
(share filesystem-path srv/inc as volume /inc,
|
||||||
readable by everyone, read-write for user 'ed',
|
readable by everyone, read-write for user 'ed',
|
||||||
running this plugin on all uploads with the params listed below)
|
running this plugin on all uploads with the params explained above)
|
||||||
|
|
||||||
parameters explained,
|
example usage as a volflag in a copyparty config file:
|
||||||
xbu = execute after upload
|
[/inc]
|
||||||
f = fork; don't wait for it to finish
|
srv/inc
|
||||||
t5 = timeout if it's still running after 5 sec
|
accs:
|
||||||
j = provide upload information as json; not just the filename
|
r: *
|
||||||
|
rw: ed
|
||||||
|
flags:
|
||||||
|
xau: f,t5,j,bin/hooks/discord-announce.py
|
||||||
|
|
||||||
replace "xau" with "xbu" to announce Before upload starts instead of After completion
|
replace "xau" with "xbu" to announce Before upload starts instead of After completion
|
||||||
|
|
||||||
|
|||||||
140
bin/hooks/into-the-cache-it-goes.py
Normal file
140
bin/hooks/into-the-cache-it-goes.py
Normal file
@@ -0,0 +1,140 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import json
|
||||||
|
import shutil
|
||||||
|
import platform
|
||||||
|
import subprocess as sp
|
||||||
|
from urllib.parse import quote
|
||||||
|
|
||||||
|
|
||||||
|
_ = r"""
|
||||||
|
try to avoid race conditions in caching proxies
|
||||||
|
(primarily cloudflare, but probably others too)
|
||||||
|
by means of the most obvious solution possible:
|
||||||
|
|
||||||
|
just as each file has finished uploading, use
|
||||||
|
the server's external URL to download the file
|
||||||
|
so that it ends up in the cache, warm and snug
|
||||||
|
|
||||||
|
this intentionally delays the upload response
|
||||||
|
as it waits for the file to finish downloading
|
||||||
|
before copyparty is allowed to return the URL
|
||||||
|
|
||||||
|
NOTE: you must edit this script before use,
|
||||||
|
replacing https://example.com with your URL
|
||||||
|
|
||||||
|
NOTE: if the files are only accessible with a
|
||||||
|
password and/or filekey, you must also add
|
||||||
|
a cromulent password in the PASSWORD field
|
||||||
|
|
||||||
|
NOTE: needs either wget, curl, or "requests":
|
||||||
|
python3 -m pip install --user -U requests
|
||||||
|
|
||||||
|
|
||||||
|
example usage as global config:
|
||||||
|
--xau j,t10,bin/hooks/into-the-cache-it-goes.py
|
||||||
|
|
||||||
|
parameters explained,
|
||||||
|
xau = execute after upload
|
||||||
|
j = this hook needs upload information as json (not just the filename)
|
||||||
|
t10 = abort download and continue if it takes longer than 10sec
|
||||||
|
|
||||||
|
example usage as a volflag (per-volume config):
|
||||||
|
-v srv/inc:inc:r:rw,ed:c,xau=j,t10,bin/hooks/into-the-cache-it-goes.py
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
(share filesystem-path srv/inc as volume /inc,
|
||||||
|
readable by everyone, read-write for user 'ed',
|
||||||
|
running this plugin on all uploads with params explained above)
|
||||||
|
|
||||||
|
example usage as a volflag in a copyparty config file:
|
||||||
|
[/inc]
|
||||||
|
srv/inc
|
||||||
|
accs:
|
||||||
|
r: *
|
||||||
|
rw: ed
|
||||||
|
flags:
|
||||||
|
xau: j,t10,bin/hooks/into-the-cache-it-goes.py
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
# replace this with your site's external URL
|
||||||
|
# (including the :portnumber if necessary)
|
||||||
|
SITE_URL = "https://example.com"
|
||||||
|
|
||||||
|
# if downloading is protected by passwords or filekeys,
|
||||||
|
# specify a valid password between the quotes below:
|
||||||
|
PASSWORD = ""
|
||||||
|
|
||||||
|
# if file is larger than this, skip download
|
||||||
|
MAX_MEGABYTES = 8
|
||||||
|
|
||||||
|
# =============== END OF CONFIG ===============
|
||||||
|
|
||||||
|
|
||||||
|
WINDOWS = platform.system() == "Windows"
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
fun = download_with_python
|
||||||
|
if shutil.which("curl"):
|
||||||
|
fun = download_with_curl
|
||||||
|
elif shutil.which("wget"):
|
||||||
|
fun = download_with_wget
|
||||||
|
|
||||||
|
inf = json.loads(sys.argv[1])
|
||||||
|
|
||||||
|
if inf["sz"] > 1024 * 1024 * MAX_MEGABYTES:
|
||||||
|
print("[into-the-cache] file is too large; will not download")
|
||||||
|
return
|
||||||
|
|
||||||
|
file_url = "/"
|
||||||
|
if inf["vp"]:
|
||||||
|
file_url += inf["vp"] + "/"
|
||||||
|
file_url += inf["ap"].replace("\\", "/").split("/")[-1]
|
||||||
|
file_url = SITE_URL.rstrip("/") + quote(file_url, safe=b"/")
|
||||||
|
|
||||||
|
print("[into-the-cache] %s(%s)" % (fun.__name__, file_url))
|
||||||
|
fun(file_url, PASSWORD.strip())
|
||||||
|
|
||||||
|
print("[into-the-cache] Download OK")
|
||||||
|
|
||||||
|
|
||||||
|
def download_with_curl(url, pw):
|
||||||
|
cmd = ["curl"]
|
||||||
|
|
||||||
|
if pw:
|
||||||
|
cmd += ["-HPW:%s" % (pw,)]
|
||||||
|
|
||||||
|
nah = sp.DEVNULL
|
||||||
|
sp.check_call(cmd + [url], stdout=nah, stderr=nah)
|
||||||
|
|
||||||
|
|
||||||
|
def download_with_wget(url, pw):
|
||||||
|
cmd = ["wget", "-O"]
|
||||||
|
|
||||||
|
cmd += ["nul" if WINDOWS else "/dev/null"]
|
||||||
|
|
||||||
|
if pw:
|
||||||
|
cmd += ["--header=PW:%s" % (pw,)]
|
||||||
|
|
||||||
|
nah = sp.DEVNULL
|
||||||
|
sp.check_call(cmd + [url], stdout=nah, stderr=nah)
|
||||||
|
|
||||||
|
|
||||||
|
def download_with_python(url, pw):
|
||||||
|
import requests
|
||||||
|
|
||||||
|
headers = {}
|
||||||
|
if pw:
|
||||||
|
headers["PW"] = pw
|
||||||
|
|
||||||
|
with requests.get(url, headers=headers, stream=True) as r:
|
||||||
|
r.raise_for_status()
|
||||||
|
for _ in r.iter_content(chunk_size=1024 * 256):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
@@ -14,19 +14,32 @@ except:
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
|
|
||||||
"""
|
_ = r"""
|
||||||
use copyparty as a dumb messaging server / guestbook thing;
|
use copyparty as a dumb messaging server / guestbook thing;
|
||||||
|
accepts guestbook entries from 📟 (message-to-server-log) in the web-ui
|
||||||
initially contributed by @clach04 in https://github.com/9001/copyparty/issues/35 (thanks!)
|
initially contributed by @clach04 in https://github.com/9001/copyparty/issues/35 (thanks!)
|
||||||
|
|
||||||
Sample usage:
|
example usage as global config:
|
||||||
|
|
||||||
python copyparty-sfx.py --xm j,bin/hooks/msg-log.py
|
python copyparty-sfx.py --xm j,bin/hooks/msg-log.py
|
||||||
|
|
||||||
Where:
|
parameters explained,
|
||||||
|
xm = execute on message (📟)
|
||||||
|
j = this hook needs message information as json (not just the message-text)
|
||||||
|
|
||||||
xm = execute on message-to-server-log
|
example usage as a volflag (per-volume config):
|
||||||
j = provide message information as json; not just the text - this script REQUIRES json
|
python copyparty-sfx.py -v srv/log:log:r:c,xm=j,bin/hooks/msg-log.py
|
||||||
t10 = timeout and kill download after 10 secs
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
(share filesystem-path srv/log as volume /log, readable by everyone,
|
||||||
|
running this plugin on all messages with the params explained above)
|
||||||
|
|
||||||
|
example usage as a volflag in a copyparty config file:
|
||||||
|
[/log]
|
||||||
|
srv/log
|
||||||
|
accs:
|
||||||
|
r: *
|
||||||
|
flags:
|
||||||
|
xm: j,bin/hooks/msg-log.py
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
128
bin/hooks/qbittorrent-magnet.py
Executable file
128
bin/hooks/qbittorrent-magnet.py
Executable file
@@ -0,0 +1,128 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# coding: utf-8
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import json
|
||||||
|
import shutil
|
||||||
|
import subprocess as sp
|
||||||
|
|
||||||
|
|
||||||
|
_ = r"""
|
||||||
|
start downloading a torrent by POSTing a magnet URL to copyparty,
|
||||||
|
for example using 📟 (message-to-server-log) in the web-ui
|
||||||
|
|
||||||
|
by default it will download the torrent to the folder you were in
|
||||||
|
when you pasted the magnet into the message-to-server-log field
|
||||||
|
|
||||||
|
you can optionally specify another location by adding a whitespace
|
||||||
|
after the magnet URL followed by the name of the subfolder to DL into,
|
||||||
|
or for example "anime/airing" would download to /srv/media/anime/airing
|
||||||
|
because the keyword "anime" is in the DESTS config below
|
||||||
|
|
||||||
|
needs python3
|
||||||
|
|
||||||
|
example usage as global config (not a good idea):
|
||||||
|
python copyparty-sfx.py --xm aw,f,j,t60,bin/hooks/qbittorrent-magnet.py
|
||||||
|
|
||||||
|
parameters explained,
|
||||||
|
xm = execute on message (📟)
|
||||||
|
aw = only users with write-access can use this
|
||||||
|
f = fork; don't delay other hooks while this is running
|
||||||
|
j = provide message information as json (not just the text)
|
||||||
|
t60 = abort if qbittorrent has to think about it for more than 1 min
|
||||||
|
|
||||||
|
example usage as a volflag (per-volume config, much better):
|
||||||
|
-v srv/qb:qb:A,ed:c,xm=aw,f,j,t60,bin/hooks/qbittorrent-magnet.py
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
(share filesystem-path srv/qb as volume /qb with Admin for user 'ed',
|
||||||
|
running this plugin on all messages with the params explained above)
|
||||||
|
|
||||||
|
example usage as a volflag in a copyparty config file:
|
||||||
|
[/qb]
|
||||||
|
srv/qb
|
||||||
|
accs:
|
||||||
|
A: ed
|
||||||
|
flags:
|
||||||
|
xm: aw,f,j,t60,bin/hooks/qbittorrent-magnet.py
|
||||||
|
|
||||||
|
the volflag examples only kicks in if you send the torrent magnet
|
||||||
|
while you're in the /qb folder (or any folder below there)
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
# list of usernames to allow
|
||||||
|
ALLOWLIST = [ "ed", "morpheus" ]
|
||||||
|
|
||||||
|
|
||||||
|
# list of destination aliases to translate into full filesystem
|
||||||
|
# paths; takes effect if the first folder component in the
|
||||||
|
# custom download location matches anything in this dict
|
||||||
|
DESTS = {
|
||||||
|
"iso": "/srv/pub/linux-isos",
|
||||||
|
"anime": "/srv/media/anime",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
inf = json.loads(sys.argv[1])
|
||||||
|
url = inf["txt"]
|
||||||
|
if not url.lower().startswith("magnet:?"):
|
||||||
|
# not a magnet, abort
|
||||||
|
return
|
||||||
|
|
||||||
|
if inf["user"] not in ALLOWLIST:
|
||||||
|
print("🧲 denied for user", inf["user"])
|
||||||
|
return
|
||||||
|
|
||||||
|
# might as well run the command inside the filesystem folder
|
||||||
|
# which matches the URL that the magnet message was sent to
|
||||||
|
os.chdir(inf["ap"])
|
||||||
|
|
||||||
|
# is there is a custom download location in the url?
|
||||||
|
dst = ""
|
||||||
|
if " " in url:
|
||||||
|
url, dst = url.split(" ", 1)
|
||||||
|
|
||||||
|
# is the location in the predefined list of locations?
|
||||||
|
parts = dst.replace("\\", "/").split("/")
|
||||||
|
if parts[0] in DESTS:
|
||||||
|
dst = os.path.join(DESTS[parts[0]], *(parts[1:]))
|
||||||
|
|
||||||
|
else:
|
||||||
|
# nope, so download to the current folder instead;
|
||||||
|
# comment the dst line below to instead use the default
|
||||||
|
# download location from your qbittorrent settings
|
||||||
|
dst = inf["ap"]
|
||||||
|
pass
|
||||||
|
|
||||||
|
# archlinux has a -nox suffix for qbittorrent if headless
|
||||||
|
# so check if we should be using that
|
||||||
|
if shutil.which("qbittorrent-nox"):
|
||||||
|
torrent_bin = "qbittorrent-nox"
|
||||||
|
else:
|
||||||
|
torrent_bin = "qbittorrent"
|
||||||
|
|
||||||
|
# the command to add a new torrent, adjust if necessary
|
||||||
|
cmd = [torrent_bin, url]
|
||||||
|
if dst:
|
||||||
|
cmd += ["--save-path=%s" % (dst,)]
|
||||||
|
|
||||||
|
# if copyparty and qbittorrent are running as different users
|
||||||
|
# you may have to do something like the following
|
||||||
|
# (assuming qbittorrent* is nopasswd-allowed in sudoers):
|
||||||
|
#
|
||||||
|
# cmd = ["sudo", "-u", "qbitter"] + cmd
|
||||||
|
|
||||||
|
print("🧲", cmd)
|
||||||
|
|
||||||
|
try:
|
||||||
|
sp.check_call(cmd)
|
||||||
|
except:
|
||||||
|
print("🧲 FAILED TO ADD", url)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
|
||||||
127
bin/hooks/reloc-by-ext.py
Normal file
127
bin/hooks/reloc-by-ext.py
Normal file
@@ -0,0 +1,127 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
_ = r"""
|
||||||
|
relocate/redirect incoming uploads according to file extension or name
|
||||||
|
|
||||||
|
example usage as global config:
|
||||||
|
--xbu j,c1,bin/hooks/reloc-by-ext.py
|
||||||
|
|
||||||
|
parameters explained,
|
||||||
|
xbu = execute before upload
|
||||||
|
j = this hook needs upload information as json (not just the filename)
|
||||||
|
c1 = this hook returns json on stdout, so tell copyparty to read that
|
||||||
|
|
||||||
|
example usage as a volflag (per-volume config):
|
||||||
|
-v srv/inc:inc:r:rw,ed:c,xbu=j,c1,bin/hooks/reloc-by-ext.py
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
(share filesystem-path srv/inc as volume /inc,
|
||||||
|
readable by everyone, read-write for user 'ed',
|
||||||
|
running this plugin on all uploads with the params explained above)
|
||||||
|
|
||||||
|
example usage as a volflag in a copyparty config file:
|
||||||
|
[/inc]
|
||||||
|
srv/inc
|
||||||
|
accs:
|
||||||
|
r: *
|
||||||
|
rw: ed
|
||||||
|
flags:
|
||||||
|
xbu: j,c1,bin/hooks/reloc-by-ext.py
|
||||||
|
|
||||||
|
note: this could also work as an xau hook (after-upload), but
|
||||||
|
because it doesn't need to read the file contents its better
|
||||||
|
as xbu (before-upload) since that's safer / less buggy,
|
||||||
|
and only xbu works with up2k (dragdrop into browser)
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
PICS = "avif bmp gif heic heif jpeg jpg jxl png psd qoi tga tif tiff webp"
|
||||||
|
VIDS = "3gp asf avi flv mkv mov mp4 mpeg mpeg2 mpegts mpg mpg2 nut ogm ogv rm ts vob webm wmv"
|
||||||
|
MUSIC = "aac aif aiff alac amr ape dfpwm flac m4a mp3 ogg opus ra tak tta wav wma wv"
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
inf = json.loads(sys.argv[1])
|
||||||
|
vdir, fn = os.path.split(inf["vp"])
|
||||||
|
|
||||||
|
try:
|
||||||
|
fn, ext = fn.rsplit(".", 1)
|
||||||
|
except:
|
||||||
|
# no file extension; pretend it's "bin"
|
||||||
|
ext = "bin"
|
||||||
|
|
||||||
|
ext = ext.lower()
|
||||||
|
|
||||||
|
# this function must end by printing the action to perform;
|
||||||
|
# that's handled by the print(json.dumps(... at the bottom
|
||||||
|
#
|
||||||
|
# the action can contain the following keys:
|
||||||
|
# "vp" is the folder URL to move the upload to,
|
||||||
|
# "ap" is the filesystem-path to move it to (but "vp" is safer),
|
||||||
|
# "fn" overrides the final filename to use
|
||||||
|
|
||||||
|
##
|
||||||
|
## some example actions to take; pick one by
|
||||||
|
## selecting it inside the print at the end:
|
||||||
|
##
|
||||||
|
|
||||||
|
# create a subfolder named after the filetype and move it into there
|
||||||
|
into_subfolder = {"vp": ext}
|
||||||
|
|
||||||
|
# move it into a toplevel folder named after the filetype
|
||||||
|
into_toplevel = {"vp": "/" + ext}
|
||||||
|
|
||||||
|
# move it into a filetype-named folder next to the target folder
|
||||||
|
into_sibling = {"vp": "../" + ext}
|
||||||
|
|
||||||
|
# move images into "/just/pics", vids into "/just/vids",
|
||||||
|
# music into "/just/tunes", and anything else as-is
|
||||||
|
if ext in PICS.split():
|
||||||
|
by_category = {"vp": "/just/pics"}
|
||||||
|
elif ext in VIDS.split():
|
||||||
|
by_category = {"vp": "/just/vids"}
|
||||||
|
elif ext in MUSIC.split():
|
||||||
|
by_category = {"vp": "/just/tunes"}
|
||||||
|
else:
|
||||||
|
by_category = {} # no action
|
||||||
|
|
||||||
|
# now choose the default effect to apply; can be any of these:
|
||||||
|
# into_subfolder into_toplevel into_sibling by_category
|
||||||
|
effect = {"vp": "/junk"}
|
||||||
|
|
||||||
|
##
|
||||||
|
## but we can keep going, adding more speicifc rules
|
||||||
|
## which can take precedence, replacing the fallback
|
||||||
|
## effect we just specified:
|
||||||
|
##
|
||||||
|
|
||||||
|
fn = fn.lower() # lowercase filename to make this easier
|
||||||
|
|
||||||
|
if "screenshot" in fn:
|
||||||
|
effect = {"vp": "/ss"}
|
||||||
|
if "mpv_" in fn:
|
||||||
|
effect = {"vp": "/anishots"}
|
||||||
|
elif "debian" in fn or "biebian" in fn:
|
||||||
|
effect = {"vp": "/linux-ISOs"}
|
||||||
|
elif re.search(r"ep(isode |\.)?[0-9]", fn):
|
||||||
|
effect = {"vp": "/podcasts"}
|
||||||
|
|
||||||
|
# regex lets you grab a part of the matching
|
||||||
|
# text and use that in the upload path:
|
||||||
|
m = re.search(r"\b(op|ed)([^a-z]|$)", fn)
|
||||||
|
if m:
|
||||||
|
# the regex matched; use "anime-op" or "anime-ed"
|
||||||
|
effect = {"vp": "/anime-" + m[1]}
|
||||||
|
|
||||||
|
# aaand DO IT
|
||||||
|
print(json.dumps({"reloc": effect}))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
@@ -9,25 +9,38 @@ import subprocess as sp
|
|||||||
_ = r"""
|
_ = r"""
|
||||||
use copyparty as a file downloader by POSTing URLs as
|
use copyparty as a file downloader by POSTing URLs as
|
||||||
application/x-www-form-urlencoded (for example using the
|
application/x-www-form-urlencoded (for example using the
|
||||||
message/pager function on the website)
|
📟 message-to-server-log in the web-ui)
|
||||||
|
|
||||||
example usage as global config:
|
example usage as global config:
|
||||||
--xm f,j,t3600,bin/hooks/wget.py
|
--xm aw,f,j,t3600,bin/hooks/wget.py
|
||||||
|
|
||||||
example usage as a volflag (per-volume config):
|
|
||||||
-v srv/inc:inc:r:rw,ed:c,xm=f,j,t3600,bin/hooks/wget.py
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
(share filesystem-path srv/inc as volume /inc,
|
|
||||||
readable by everyone, read-write for user 'ed',
|
|
||||||
running this plugin on all messages with the params listed below)
|
|
||||||
|
|
||||||
parameters explained,
|
parameters explained,
|
||||||
xm = execute on message-to-server-log
|
xm = execute on message-to-server-log
|
||||||
f = fork so it doesn't block uploads
|
aw = only users with write-access can use this
|
||||||
j = provide message information as json; not just the text
|
f = fork; don't delay other hooks while this is running
|
||||||
|
j = provide message information as json (not just the text)
|
||||||
c3 = mute all output
|
c3 = mute all output
|
||||||
t3600 = timeout and kill download after 1 hour
|
t3600 = timeout and abort download after 1 hour
|
||||||
|
|
||||||
|
example usage as a volflag (per-volume config):
|
||||||
|
-v srv/inc:inc:r:rw,ed:c,xm=aw,f,j,t3600,bin/hooks/wget.py
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
(share filesystem-path srv/inc as volume /inc,
|
||||||
|
readable by everyone, read-write for user 'ed',
|
||||||
|
running this plugin on all messages with the params explained above)
|
||||||
|
|
||||||
|
example usage as a volflag in a copyparty config file:
|
||||||
|
[/inc]
|
||||||
|
srv/inc
|
||||||
|
accs:
|
||||||
|
r: *
|
||||||
|
rw: ed
|
||||||
|
flags:
|
||||||
|
xm: aw,f,j,t3600,bin/hooks/wget.py
|
||||||
|
|
||||||
|
the volflag examples only kicks in if you send the message
|
||||||
|
while you're in the /inc folder (or any folder below there)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -231,7 +231,7 @@ install_vamp() {
|
|||||||
cd "$td"
|
cd "$td"
|
||||||
echo '#include <vamp-sdk/Plugin.h>' | g++ -x c++ -c -o /dev/null - || [ -e ~/pe/vamp-sdk ] || {
|
echo '#include <vamp-sdk/Plugin.h>' | g++ -x c++ -c -o /dev/null - || [ -e ~/pe/vamp-sdk ] || {
|
||||||
printf '\033[33mcould not find the vamp-sdk, building from source\033[0m\n'
|
printf '\033[33mcould not find the vamp-sdk, building from source\033[0m\n'
|
||||||
(dl_files yolo https://code.soundsoftware.ac.uk/attachments/download/2691/vamp-plugin-sdk-2.10.0.tar.gz)
|
(dl_files yolo https://ocv.me/mirror/vamp-plugin-sdk-2.10.0.tar.gz)
|
||||||
sha512sum -c <(
|
sha512sum -c <(
|
||||||
echo "153b7f2fa01b77c65ad393ca0689742d66421017fd5931d216caa0fcf6909355fff74706fabbc062a3a04588a619c9b515a1dae00f21a57afd97902a355c48ed -"
|
echo "153b7f2fa01b77c65ad393ca0689742d66421017fd5931d216caa0fcf6909355fff74706fabbc062a3a04588a619c9b515a1dae00f21a57afd97902a355c48ed -"
|
||||||
) <vamp-plugin-sdk-2.10.0.tar.gz
|
) <vamp-plugin-sdk-2.10.0.tar.gz
|
||||||
@@ -247,7 +247,7 @@ install_vamp() {
|
|||||||
cd "$td"
|
cd "$td"
|
||||||
have_beatroot || {
|
have_beatroot || {
|
||||||
printf '\033[33mcould not find the vamp beatroot plugin, building from source\033[0m\n'
|
printf '\033[33mcould not find the vamp beatroot plugin, building from source\033[0m\n'
|
||||||
(dl_files yolo https://code.soundsoftware.ac.uk/attachments/download/885/beatroot-vamp-v1.0.tar.gz)
|
(dl_files yolo https://ocv.me/mirror/beatroot-vamp-v1.0.tar.gz)
|
||||||
sha512sum -c <(
|
sha512sum -c <(
|
||||||
echo "1f444d1d58ccf565c0adfe99f1a1aa62789e19f5071e46857e2adfbc9d453037bc1c4dcb039b02c16240e9b97f444aaff3afb625c86aa2470233e711f55b6874 -"
|
echo "1f444d1d58ccf565c0adfe99f1a1aa62789e19f5071e46857e2adfbc9d453037bc1c4dcb039b02c16240e9b97f444aaff3afb625c86aa2470233e711f55b6874 -"
|
||||||
) <beatroot-vamp-v1.0.tar.gz
|
) <beatroot-vamp-v1.0.tar.gz
|
||||||
|
|||||||
676
bin/partyfuse.py
676
bin/partyfuse.py
File diff suppressed because it is too large
Load Diff
889
bin/u2c.py
889
bin/u2c.py
File diff suppressed because it is too large
Load Diff
@@ -19,6 +19,9 @@
|
|||||||
* the `act:bput` thing is optional since copyparty v1.9.29
|
* the `act:bput` thing is optional since copyparty v1.9.29
|
||||||
* using an older sharex version, maybe sharex v12.1.1 for example? dw fam i got your back 👉😎👉 [`sharex12.sxcu`](sharex12.sxcu)
|
* using an older sharex version, maybe sharex v12.1.1 for example? dw fam i got your back 👉😎👉 [`sharex12.sxcu`](sharex12.sxcu)
|
||||||
|
|
||||||
|
### [`flameshot.sh`](flameshot.sh)
|
||||||
|
* takes a screenshot with [flameshot](https://flameshot.org/) on Linux, uploads it, and writes the URL to clipboard
|
||||||
|
|
||||||
### [`send-to-cpp.contextlet.json`](send-to-cpp.contextlet.json)
|
### [`send-to-cpp.contextlet.json`](send-to-cpp.contextlet.json)
|
||||||
* browser integration, kind of? custom rightclick actions and stuff
|
* browser integration, kind of? custom rightclick actions and stuff
|
||||||
* rightclick a pic and send it to copyparty straight from your browser
|
* rightclick a pic and send it to copyparty straight from your browser
|
||||||
|
|||||||
14
contrib/flameshot.sh
Executable file
14
contrib/flameshot.sh
Executable file
@@ -0,0 +1,14 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# take a screenshot with flameshot and send it to copyparty;
|
||||||
|
# the image url will be placed on your clipboard
|
||||||
|
|
||||||
|
password=wark
|
||||||
|
url=https://a.ocv.me/up/
|
||||||
|
filename=$(date +%Y-%m%d-%H%M%S).png
|
||||||
|
|
||||||
|
flameshot gui -s -r |
|
||||||
|
curl -T- $url$filename?pw=$password |
|
||||||
|
tail -n 1 |
|
||||||
|
xsel -ib
|
||||||
@@ -1,14 +1,10 @@
|
|||||||
# when running copyparty behind a reverse proxy,
|
# look for "max clients:" when starting copyparty, as nginx should
|
||||||
# the following arguments are recommended:
|
# not accept more consecutive clients than what copyparty is able to;
|
||||||
#
|
|
||||||
# -i 127.0.0.1 only accept connections from nginx
|
|
||||||
#
|
|
||||||
# -nc must match or exceed the webserver's max number of concurrent clients;
|
|
||||||
# copyparty default is 1024 if OS permits it (see "max clients:" on startup),
|
|
||||||
# nginx default is 512 (worker_processes 1, worker_connections 512)
|
# nginx default is 512 (worker_processes 1, worker_connections 512)
|
||||||
#
|
#
|
||||||
# you may also consider adding -j0 for CPU-intensive configurations
|
# rarely, in some extreme usecases, it can be good to add -j0
|
||||||
# (5'000 requests per second, or 20gbps upload/download in parallel)
|
# (40'000 requests per second, or 20gbps upload/download in parallel)
|
||||||
|
# but this is usually counterproductive and slightly buggy
|
||||||
#
|
#
|
||||||
# on fedora/rhel, remember to setsebool -P httpd_can_network_connect 1
|
# on fedora/rhel, remember to setsebool -P httpd_can_network_connect 1
|
||||||
#
|
#
|
||||||
@@ -20,10 +16,33 @@
|
|||||||
#
|
#
|
||||||
# and then enable it below by uncomenting the cloudflare-only.conf line
|
# and then enable it below by uncomenting the cloudflare-only.conf line
|
||||||
|
|
||||||
upstream cpp {
|
|
||||||
|
upstream cpp_tcp {
|
||||||
|
# alternative 1: connect to copyparty using tcp;
|
||||||
|
# cpp_uds is slightly faster and more secure, but
|
||||||
|
# cpp_tcp is easier to setup and "just works"
|
||||||
|
# ...you should however restrict copyparty to only
|
||||||
|
# accept connections from nginx by adding these args:
|
||||||
|
# -i 127.0.0.1
|
||||||
|
|
||||||
server 127.0.0.1:3923 fail_timeout=1s;
|
server 127.0.0.1:3923 fail_timeout=1s;
|
||||||
keepalive 1;
|
keepalive 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
upstream cpp_uds {
|
||||||
|
# alternative 2: unix-socket, aka. "unix domain socket";
|
||||||
|
# 5-10% faster, and better isolation from other software,
|
||||||
|
# but there must be at least one unix-group which both
|
||||||
|
# nginx and copyparty is a member of; if that group is
|
||||||
|
# "www" then run copyparty with the following args:
|
||||||
|
# -i unix:770:www:/tmp/party.sock
|
||||||
|
|
||||||
|
server unix:/tmp/party.sock fail_timeout=1s;
|
||||||
|
keepalive 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
server {
|
server {
|
||||||
listen 443 ssl;
|
listen 443 ssl;
|
||||||
listen [::]:443 ssl;
|
listen [::]:443 ssl;
|
||||||
@@ -34,7 +53,8 @@ server {
|
|||||||
#include /etc/nginx/cloudflare-only.conf;
|
#include /etc/nginx/cloudflare-only.conf;
|
||||||
|
|
||||||
location / {
|
location / {
|
||||||
proxy_pass http://cpp;
|
# recommendation: replace cpp_tcp with cpp_uds below
|
||||||
|
proxy_pass http://cpp_tcp;
|
||||||
proxy_redirect off;
|
proxy_redirect off;
|
||||||
# disable buffering (next 4 lines)
|
# disable buffering (next 4 lines)
|
||||||
proxy_http_version 1.1;
|
proxy_http_version 1.1;
|
||||||
@@ -52,6 +72,7 @@ server {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
# default client_max_body_size (1M) blocks uploads larger than 256 MiB
|
# default client_max_body_size (1M) blocks uploads larger than 256 MiB
|
||||||
client_max_body_size 1024M;
|
client_max_body_size 1024M;
|
||||||
client_header_timeout 610m;
|
client_header_timeout 610m;
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# Maintainer: icxes <dev.null@need.moe>
|
# Maintainer: icxes <dev.null@need.moe>
|
||||||
pkgname=copyparty
|
pkgname=copyparty
|
||||||
pkgver="1.11.2"
|
pkgver="1.16.6"
|
||||||
pkgrel=1
|
pkgrel=1
|
||||||
pkgdesc="File server with accelerated resumable uploads, dedup, WebDAV, FTP, TFTP, zeroconf, media indexer, thumbnails++"
|
pkgdesc="File server with accelerated resumable uploads, dedup, WebDAV, FTP, TFTP, zeroconf, media indexer, thumbnails++"
|
||||||
arch=("any")
|
arch=("any")
|
||||||
@@ -21,7 +21,7 @@ optdepends=("ffmpeg: thumbnails for videos, images (slower) and audio, music tag
|
|||||||
)
|
)
|
||||||
source=("https://github.com/9001/${pkgname}/releases/download/v${pkgver}/${pkgname}-${pkgver}.tar.gz")
|
source=("https://github.com/9001/${pkgname}/releases/download/v${pkgver}/${pkgname}-${pkgver}.tar.gz")
|
||||||
backup=("etc/${pkgname}.d/init" )
|
backup=("etc/${pkgname}.d/init" )
|
||||||
sha256sums=("0b37641746d698681691ea9e7070096404afc64a42d3d4e96cc4e036074fded9")
|
sha256sums=("29a119f7e238c44b0697e5858da8154d883a97ae20ecbb10393904406fa4fe06")
|
||||||
|
|
||||||
build() {
|
build() {
|
||||||
cd "${srcdir}/${pkgname}-${pkgver}"
|
cd "${srcdir}/${pkgname}-${pkgver}"
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
{
|
{
|
||||||
"url": "https://github.com/9001/copyparty/releases/download/v1.11.2/copyparty-sfx.py",
|
"url": "https://github.com/9001/copyparty/releases/download/v1.16.6/copyparty-sfx.py",
|
||||||
"version": "1.11.2",
|
"version": "1.16.6",
|
||||||
"hash": "sha256-3nIHLM4xJ9RQH3ExSGvBckHuS40IdzyREAtMfpJmfug="
|
"hash": "sha256-gs2jSaXa0XbVbvpW1H4i/Vzovg68Usry0iHWfbddBCc="
|
||||||
}
|
}
|
||||||
@@ -20,6 +20,13 @@ point `--js-browser` to one of these by URL:
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## example any-js
|
||||||
|
point `--js-browser` and/or `--js-other` to one of these by URL:
|
||||||
|
|
||||||
|
* [`banner.js`](banner.js) shows a very enterprise [legal-banner](https://github.com/user-attachments/assets/8ae8e087-b209-449c-b08d-74e040f0284b)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## example browser-css
|
## example browser-css
|
||||||
point `--css-browser` to one of these by URL:
|
point `--css-browser` to one of these by URL:
|
||||||
|
|
||||||
|
|||||||
93
contrib/plugins/banner.js
Normal file
93
contrib/plugins/banner.js
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
(function() {
|
||||||
|
|
||||||
|
// usage: copy this to '.banner.js' in your webroot,
|
||||||
|
// and run copyparty with the following arguments:
|
||||||
|
// --js-browser /.banner.js --js-other /.banner.js
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
// had to pick the most chuuni one as the default
|
||||||
|
var bannertext = '' +
|
||||||
|
'<h3>You are accessing a U.S. Government (USG) Information System (IS) that is provided for USG-authorized use only.</h3>' +
|
||||||
|
'<p>By using this IS (which includes any device attached to this IS), you consent to the following conditions:</p>' +
|
||||||
|
'<ul>' +
|
||||||
|
'<li>The USG routinely intercepts and monitors communications on this IS for purposes including, but not limited to, penetration testing, COMSEC monitoring, network operations and defense, personnel misconduct (PM), law enforcement (LE), and counterintelligence (CI) investigations.</li>' +
|
||||||
|
'<li>At any time, the USG may inspect and seize data stored on this IS.</li>' +
|
||||||
|
'<li>Communications using, or data stored on, this IS are not private, are subject to routine monitoring, interception, and search, and may be disclosed or used for any USG-authorized purpose.</li>' +
|
||||||
|
'<li>This IS includes security measures (e.g., authentication and access controls) to protect USG interests -- not for your personal benefit or privacy.</li>' +
|
||||||
|
'<li>Notwithstanding the above, using this IS does not constitute consent to PM, LE or CI investigative searching or monitoring of the content of privileged communications, or work product, related to personal representation or services by attorneys, psychotherapists, or clergy, and their assistants. Such communications and work product are private and confidential. See User Agreement for details.</li>' +
|
||||||
|
'</ul>';
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
// fancy div to insert into pages
|
||||||
|
function bannerdiv(border) {
|
||||||
|
var ret = mknod('div', null, bannertext);
|
||||||
|
if (border)
|
||||||
|
ret.setAttribute("style", "border:1em solid var(--fg); border-width:.3em 0; margin:3em 0");
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
// keep all of these false and then selectively enable them in the if-blocks below
|
||||||
|
var show_msgbox = false,
|
||||||
|
login_top = false,
|
||||||
|
top = false,
|
||||||
|
bottom = false,
|
||||||
|
top_bordered = false,
|
||||||
|
bottom_bordered = false;
|
||||||
|
|
||||||
|
if (QS("h1#cc") && QS("a#k")) {
|
||||||
|
// this is the controlpanel
|
||||||
|
// (you probably want to keep just one of these enabled)
|
||||||
|
show_msgbox = true;
|
||||||
|
login_top = true;
|
||||||
|
bottom = true;
|
||||||
|
}
|
||||||
|
else if (ebi("swin") && ebi("smac")) {
|
||||||
|
// this is the connect-page, same deal here
|
||||||
|
show_msgbox = true;
|
||||||
|
top_bordered = true;
|
||||||
|
bottom_bordered = true;
|
||||||
|
}
|
||||||
|
else if (ebi("op_cfg") || ebi("div#mw") ) {
|
||||||
|
// we're running in the main filebrowser (op_cfg) or markdown-viewer/editor (div#mw),
|
||||||
|
// fragile pages which break if you do something too fancy
|
||||||
|
show_msgbox = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
// shows a fullscreen messagebox; works on all pages
|
||||||
|
if (show_msgbox) {
|
||||||
|
var now = Math.floor(Date.now() / 1000),
|
||||||
|
last_shown = sread("bannerts") || 0;
|
||||||
|
|
||||||
|
// 60 * 60 * 17 = 17 hour cooldown
|
||||||
|
if (now - last_shown > 60 * 60 * 17) {
|
||||||
|
swrite("bannerts", now);
|
||||||
|
modal.confirm(bannertext, null, function () {
|
||||||
|
location = 'https://this-page-intentionally-left-blank.org/';
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// show a message on the page footer; only works on the connect-page
|
||||||
|
if (top || top_bordered) {
|
||||||
|
var dst = ebi('wrap');
|
||||||
|
dst.insertBefore(bannerdiv(top_bordered), dst.firstChild);
|
||||||
|
}
|
||||||
|
|
||||||
|
// show a message on the page footer; only works on the controlpanel and connect-page
|
||||||
|
if (bottom || bottom_bordered) {
|
||||||
|
ebi('wrap').appendChild(bannerdiv(bottom_bordered));
|
||||||
|
}
|
||||||
|
|
||||||
|
// show a message on the top of the page; only works on the controlpanel
|
||||||
|
if (login_top) {
|
||||||
|
var dst = QS('h1');
|
||||||
|
dst.parentNode.insertBefore(bannerdiv(false), dst);
|
||||||
|
}
|
||||||
|
|
||||||
|
})();
|
||||||
@@ -4,7 +4,7 @@
|
|||||||
#
|
#
|
||||||
# installation:
|
# installation:
|
||||||
# wget https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py -O /usr/local/bin/copyparty-sfx.py
|
# wget https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py -O /usr/local/bin/copyparty-sfx.py
|
||||||
# useradd -r -s /sbin/nologin -d /var/lib/copyparty copyparty
|
# useradd -r -s /sbin/nologin -m -d /var/lib/copyparty copyparty
|
||||||
# firewall-cmd --permanent --add-port=3923/tcp # --zone=libvirt
|
# firewall-cmd --permanent --add-port=3923/tcp # --zone=libvirt
|
||||||
# firewall-cmd --reload
|
# firewall-cmd --reload
|
||||||
# cp -pv copyparty.service /etc/systemd/system/
|
# cp -pv copyparty.service /etc/systemd/system/
|
||||||
@@ -12,11 +12,18 @@
|
|||||||
# restorecon -vr /etc/systemd/system/copyparty.service # on fedora/rhel
|
# restorecon -vr /etc/systemd/system/copyparty.service # on fedora/rhel
|
||||||
# systemctl daemon-reload && systemctl enable --now copyparty
|
# systemctl daemon-reload && systemctl enable --now copyparty
|
||||||
#
|
#
|
||||||
|
# every time you edit this file, you must "systemctl daemon-reload"
|
||||||
|
# for the changes to take effect and then "systemctl restart copyparty"
|
||||||
|
#
|
||||||
# if it fails to start, first check this: systemctl status copyparty
|
# if it fails to start, first check this: systemctl status copyparty
|
||||||
# then try starting it while viewing logs:
|
# then try starting it while viewing logs:
|
||||||
# journalctl -fan 100
|
# journalctl -fan 100
|
||||||
# tail -Fn 100 /var/log/copyparty/$(date +%Y-%m%d.log)
|
# tail -Fn 100 /var/log/copyparty/$(date +%Y-%m%d.log)
|
||||||
#
|
#
|
||||||
|
# if you run into any issues, for example thumbnails not working,
|
||||||
|
# try removing the "some quick hardening" section and then please
|
||||||
|
# let me know if that actually helped so we can look into it
|
||||||
|
#
|
||||||
# you may want to:
|
# you may want to:
|
||||||
# - change "User=copyparty" and "/var/lib/copyparty/" to another user
|
# - change "User=copyparty" and "/var/lib/copyparty/" to another user
|
||||||
# - edit /etc/copyparty.conf to configure copyparty
|
# - edit /etc/copyparty.conf to configure copyparty
|
||||||
|
|||||||
116
contrib/themes/bsod.css
Normal file
116
contrib/themes/bsod.css
Normal file
@@ -0,0 +1,116 @@
|
|||||||
|
/* copy bsod.* into a folder named ".themes" in your webroot and then
|
||||||
|
--themes=10 --theme=9 --css-browser=/.themes/bsod.css
|
||||||
|
*/
|
||||||
|
|
||||||
|
html.ey {
|
||||||
|
--w2: #3d7bbc;
|
||||||
|
--w3: #5fcbec;
|
||||||
|
|
||||||
|
--fg: #fff;
|
||||||
|
--fg-max: #fff;
|
||||||
|
--fg-weak: var(--w3);
|
||||||
|
|
||||||
|
--bg: #2067b2;
|
||||||
|
--bg-d3: var(--bg);
|
||||||
|
--bg-d2: var(--w2);
|
||||||
|
--bg-d1: var(--fg-weak);
|
||||||
|
--bg-u2: var(--bg);
|
||||||
|
--bg-u3: var(--bg);
|
||||||
|
--bg-u5: var(--w2);
|
||||||
|
|
||||||
|
--tab-alt: var(--fg-weak);
|
||||||
|
--row-alt: var(--w2);
|
||||||
|
|
||||||
|
--scroll: var(--w3);
|
||||||
|
|
||||||
|
--a: #fff;
|
||||||
|
--a-b: #fff;
|
||||||
|
--a-hil: #fff;
|
||||||
|
--a-h-bg: var(--fg-weak);
|
||||||
|
--a-dark: var(--a);
|
||||||
|
--a-gray: var(--fg-weak);
|
||||||
|
|
||||||
|
--btn-fg: var(--a);
|
||||||
|
--btn-bg: var(--w2);
|
||||||
|
--btn-h-fg: var(--w2);
|
||||||
|
--btn-1-fg: var(--bg);
|
||||||
|
--btn-1-bg: var(--a);
|
||||||
|
--txt-sh: a;
|
||||||
|
--txt-bg: var(--w2);
|
||||||
|
|
||||||
|
--u2-b1-bg: var(--w2);
|
||||||
|
--u2-b2-bg: var(--w2);
|
||||||
|
--u2-txt-bg: var(--w2);
|
||||||
|
--u2-tab-bg: a;
|
||||||
|
--u2-tab-1-bg: var(--w2);
|
||||||
|
|
||||||
|
--sort-1: var(--a);
|
||||||
|
--sort-1: var(--fg-weak);
|
||||||
|
|
||||||
|
--tree-bg: var(--bg);
|
||||||
|
|
||||||
|
--g-b1: a;
|
||||||
|
--g-b2: a;
|
||||||
|
--g-f-bg: var(--w2);
|
||||||
|
|
||||||
|
--f-sh1: 0.1;
|
||||||
|
--f-sh2: 0.02;
|
||||||
|
--f-sh3: 0.1;
|
||||||
|
--f-h-b1: a;
|
||||||
|
|
||||||
|
--srv-1: var(--a);
|
||||||
|
--srv-3: var(--a);
|
||||||
|
|
||||||
|
--mp-sh: a;
|
||||||
|
}
|
||||||
|
|
||||||
|
html.ey {
|
||||||
|
background: url('bsod.png') top 5em right 4.5em no-repeat fixed var(--bg);
|
||||||
|
}
|
||||||
|
html.ey body#b {
|
||||||
|
background: var(--bg); /*sandbox*/
|
||||||
|
}
|
||||||
|
html.ey #ops {
|
||||||
|
margin: 1.7em 1.5em 0 1.5em;
|
||||||
|
border-radius: .3em;
|
||||||
|
border-width: 1px 0;
|
||||||
|
}
|
||||||
|
html.ey #ops a {
|
||||||
|
text-shadow: 1px 1px 0 rgba(0,0,0,0.5);
|
||||||
|
}
|
||||||
|
html.ey .opbox {
|
||||||
|
margin: 1.5em 0 0 0;
|
||||||
|
}
|
||||||
|
html.ey #tree {
|
||||||
|
box-shadow: none;
|
||||||
|
}
|
||||||
|
html.ey #tt {
|
||||||
|
border-color: var(--w2);
|
||||||
|
background: var(--w2);
|
||||||
|
}
|
||||||
|
html.ey .mdo a {
|
||||||
|
background: none;
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
html.ey .mdo pre,
|
||||||
|
html.ey .mdo code {
|
||||||
|
color: #fff;
|
||||||
|
background: var(--w2);
|
||||||
|
border: none;
|
||||||
|
}
|
||||||
|
html.ey .mdo h1,
|
||||||
|
html.ey .mdo h2 {
|
||||||
|
background: none;
|
||||||
|
border-color: var(--w2);
|
||||||
|
}
|
||||||
|
html.ey .mdo ul ul,
|
||||||
|
html.ey .mdo ul ol,
|
||||||
|
html.ey .mdo ol ul,
|
||||||
|
html.ey .mdo ol ol {
|
||||||
|
border-color: var(--w2);
|
||||||
|
}
|
||||||
|
html.ey .mdo p>em,
|
||||||
|
html.ey .mdo li>em,
|
||||||
|
html.ey .mdo td>em {
|
||||||
|
color: #fd0;
|
||||||
|
}
|
||||||
BIN
contrib/themes/bsod.png
Normal file
BIN
contrib/themes/bsod.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 1.2 KiB |
@@ -16,9 +16,10 @@ except:
|
|||||||
TYPE_CHECKING = False
|
TYPE_CHECKING = False
|
||||||
|
|
||||||
if True:
|
if True:
|
||||||
from typing import Any, Callable
|
from typing import Any, Callable, Optional
|
||||||
|
|
||||||
PY2 = sys.version_info < (3,)
|
PY2 = sys.version_info < (3,)
|
||||||
|
PY36 = sys.version_info > (3, 6)
|
||||||
if not PY2:
|
if not PY2:
|
||||||
unicode: Callable[[Any], str] = str
|
unicode: Callable[[Any], str] = str
|
||||||
else:
|
else:
|
||||||
@@ -50,13 +51,70 @@ try:
|
|||||||
except:
|
except:
|
||||||
CORES = (os.cpu_count() if hasattr(os, "cpu_count") else 0) or 2
|
CORES = (os.cpu_count() if hasattr(os, "cpu_count") else 0) or 2
|
||||||
|
|
||||||
|
# all embedded resources to be retrievable over http
|
||||||
|
zs = """
|
||||||
|
web/a/partyfuse.py
|
||||||
|
web/a/u2c.py
|
||||||
|
web/a/webdav-cfg.bat
|
||||||
|
web/baguettebox.js
|
||||||
|
web/browser.css
|
||||||
|
web/browser.html
|
||||||
|
web/browser.js
|
||||||
|
web/browser2.html
|
||||||
|
web/cf.html
|
||||||
|
web/copyparty.gif
|
||||||
|
web/dd/2.png
|
||||||
|
web/dd/3.png
|
||||||
|
web/dd/4.png
|
||||||
|
web/dd/5.png
|
||||||
|
web/deps/busy.mp3
|
||||||
|
web/deps/easymde.css
|
||||||
|
web/deps/easymde.js
|
||||||
|
web/deps/marked.js
|
||||||
|
web/deps/fuse.py
|
||||||
|
web/deps/mini-fa.css
|
||||||
|
web/deps/mini-fa.woff
|
||||||
|
web/deps/prism.css
|
||||||
|
web/deps/prism.js
|
||||||
|
web/deps/prismd.css
|
||||||
|
web/deps/scp.woff2
|
||||||
|
web/deps/sha512.ac.js
|
||||||
|
web/deps/sha512.hw.js
|
||||||
|
web/iiam.gif
|
||||||
|
web/md.css
|
||||||
|
web/md.html
|
||||||
|
web/md.js
|
||||||
|
web/md2.css
|
||||||
|
web/md2.js
|
||||||
|
web/mde.css
|
||||||
|
web/mde.html
|
||||||
|
web/mde.js
|
||||||
|
web/msg.css
|
||||||
|
web/msg.html
|
||||||
|
web/rups.css
|
||||||
|
web/rups.html
|
||||||
|
web/rups.js
|
||||||
|
web/shares.css
|
||||||
|
web/shares.html
|
||||||
|
web/shares.js
|
||||||
|
web/splash.css
|
||||||
|
web/splash.html
|
||||||
|
web/splash.js
|
||||||
|
web/svcs.html
|
||||||
|
web/svcs.js
|
||||||
|
web/ui.css
|
||||||
|
web/up2k.js
|
||||||
|
web/util.js
|
||||||
|
web/w.hash.js
|
||||||
|
"""
|
||||||
|
RES = set(zs.strip().split("\n"))
|
||||||
|
|
||||||
|
|
||||||
class EnvParams(object):
|
class EnvParams(object):
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
self.t0 = time.time()
|
self.t0 = time.time()
|
||||||
self.mod = ""
|
self.mod = ""
|
||||||
self.cfg = ""
|
self.cfg = ""
|
||||||
self.ox = getattr(sys, "oxidized", None)
|
|
||||||
|
|
||||||
|
|
||||||
E = EnvParams()
|
E = EnvParams()
|
||||||
|
|||||||
471
copyparty/__main__.py
Executable file → Normal file
471
copyparty/__main__.py
Executable file → Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,8 +1,8 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
|
|
||||||
VERSION = (1, 11, 2)
|
VERSION = (1, 16, 7)
|
||||||
CODENAME = "You Can (Not) Proceed"
|
CODENAME = "COPYparty"
|
||||||
BUILD_DT = (2024, 3, 23)
|
BUILD_DT = (2024, 12, 23)
|
||||||
|
|
||||||
S_VERSION = ".".join(map(str, VERSION))
|
S_VERSION = ".".join(map(str, VERSION))
|
||||||
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -9,14 +9,14 @@ import queue
|
|||||||
|
|
||||||
from .__init__ import CORES, TYPE_CHECKING
|
from .__init__ import CORES, TYPE_CHECKING
|
||||||
from .broker_mpw import MpWorker
|
from .broker_mpw import MpWorker
|
||||||
from .broker_util import ExceptionalQueue, try_exec
|
from .broker_util import ExceptionalQueue, NotExQueue, try_exec
|
||||||
from .util import Daemon, mp
|
from .util import Daemon, mp
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .svchub import SvcHub
|
from .svchub import SvcHub
|
||||||
|
|
||||||
if True: # pylint: disable=using-constant-test
|
if True: # pylint: disable=using-constant-test
|
||||||
from typing import Any
|
from typing import Any, Union
|
||||||
|
|
||||||
|
|
||||||
class MProcess(mp.Process):
|
class MProcess(mp.Process):
|
||||||
@@ -43,6 +43,9 @@ class BrokerMp(object):
|
|||||||
self.procs = []
|
self.procs = []
|
||||||
self.mutex = threading.Lock()
|
self.mutex = threading.Lock()
|
||||||
|
|
||||||
|
self.retpend: dict[int, Any] = {}
|
||||||
|
self.retpend_mutex = threading.Lock()
|
||||||
|
|
||||||
self.num_workers = self.args.j or CORES
|
self.num_workers = self.args.j or CORES
|
||||||
self.log("broker", "booting {} subprocesses".format(self.num_workers))
|
self.log("broker", "booting {} subprocesses".format(self.num_workers))
|
||||||
for n in range(1, self.num_workers + 1):
|
for n in range(1, self.num_workers + 1):
|
||||||
@@ -54,14 +57,13 @@ class BrokerMp(object):
|
|||||||
self.procs.append(proc)
|
self.procs.append(proc)
|
||||||
proc.start()
|
proc.start()
|
||||||
|
|
||||||
|
Daemon(self.periodic, "mp-periodic")
|
||||||
|
|
||||||
def shutdown(self) -> None:
|
def shutdown(self) -> None:
|
||||||
self.log("broker", "shutting down")
|
self.log("broker", "shutting down")
|
||||||
for n, proc in enumerate(self.procs):
|
for n, proc in enumerate(self.procs):
|
||||||
thr = threading.Thread(
|
name = "mp-shut-%d-%d" % (n, len(self.procs))
|
||||||
target=proc.q_pend.put((0, "shutdown", [])),
|
Daemon(proc.q_pend.put, name, ((0, "shutdown", []),))
|
||||||
name="mp-shutdown-{}-{}".format(n, len(self.procs)),
|
|
||||||
)
|
|
||||||
thr.start()
|
|
||||||
|
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
procs = self.procs
|
procs = self.procs
|
||||||
@@ -79,6 +81,10 @@ class BrokerMp(object):
|
|||||||
for _, proc in enumerate(self.procs):
|
for _, proc in enumerate(self.procs):
|
||||||
proc.q_pend.put((0, "reload", []))
|
proc.q_pend.put((0, "reload", []))
|
||||||
|
|
||||||
|
def reload_sessions(self) -> None:
|
||||||
|
for _, proc in enumerate(self.procs):
|
||||||
|
proc.q_pend.put((0, "reload_sessions", []))
|
||||||
|
|
||||||
def collector(self, proc: MProcess) -> None:
|
def collector(self, proc: MProcess) -> None:
|
||||||
"""receive message from hub in other process"""
|
"""receive message from hub in other process"""
|
||||||
while True:
|
while True:
|
||||||
@@ -89,8 +95,10 @@ class BrokerMp(object):
|
|||||||
self.log(*args)
|
self.log(*args)
|
||||||
|
|
||||||
elif dest == "retq":
|
elif dest == "retq":
|
||||||
# response from previous ipc call
|
with self.retpend_mutex:
|
||||||
raise Exception("invalid broker_mp usage")
|
retq = self.retpend.pop(retq_id)
|
||||||
|
|
||||||
|
retq.put(args[0])
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# new ipc invoking managed service in hub
|
# new ipc invoking managed service in hub
|
||||||
@@ -107,8 +115,7 @@ class BrokerMp(object):
|
|||||||
if retq_id:
|
if retq_id:
|
||||||
proc.q_pend.put((retq_id, "retq", rv))
|
proc.q_pend.put((retq_id, "retq", rv))
|
||||||
|
|
||||||
def ask(self, dest: str, *args: Any) -> ExceptionalQueue:
|
def ask(self, dest: str, *args: Any) -> Union[ExceptionalQueue, NotExQueue]:
|
||||||
|
|
||||||
# new non-ipc invoking managed service in hub
|
# new non-ipc invoking managed service in hub
|
||||||
obj = self.hub
|
obj = self.hub
|
||||||
for node in dest.split("."):
|
for node in dest.split("."):
|
||||||
@@ -120,17 +127,30 @@ class BrokerMp(object):
|
|||||||
retq.put(rv)
|
retq.put(rv)
|
||||||
return retq
|
return retq
|
||||||
|
|
||||||
|
def wask(self, dest: str, *args: Any) -> list[Union[ExceptionalQueue, NotExQueue]]:
|
||||||
|
# call from hub to workers
|
||||||
|
ret = []
|
||||||
|
for p in self.procs:
|
||||||
|
retq = ExceptionalQueue(1)
|
||||||
|
retq_id = id(retq)
|
||||||
|
with self.retpend_mutex:
|
||||||
|
self.retpend[retq_id] = retq
|
||||||
|
|
||||||
|
p.q_pend.put((retq_id, dest, list(args)))
|
||||||
|
ret.append(retq)
|
||||||
|
return ret
|
||||||
|
|
||||||
def say(self, dest: str, *args: Any) -> None:
|
def say(self, dest: str, *args: Any) -> None:
|
||||||
"""
|
"""
|
||||||
send message to non-hub component in other process,
|
send message to non-hub component in other process,
|
||||||
returns a Queue object which eventually contains the response if want_retval
|
returns a Queue object which eventually contains the response if want_retval
|
||||||
(not-impl here since nothing uses it yet)
|
(not-impl here since nothing uses it yet)
|
||||||
"""
|
"""
|
||||||
if dest == "listen":
|
if dest == "httpsrv.listen":
|
||||||
for p in self.procs:
|
for p in self.procs:
|
||||||
p.q_pend.put((0, dest, [args[0], len(self.procs)]))
|
p.q_pend.put((0, dest, [args[0], len(self.procs)]))
|
||||||
|
|
||||||
elif dest == "set_netdevs":
|
elif dest == "httpsrv.set_netdevs":
|
||||||
for p in self.procs:
|
for p in self.procs:
|
||||||
p.q_pend.put((0, dest, list(args)))
|
p.q_pend.put((0, dest, list(args)))
|
||||||
|
|
||||||
@@ -139,3 +159,19 @@ class BrokerMp(object):
|
|||||||
|
|
||||||
else:
|
else:
|
||||||
raise Exception("what is " + str(dest))
|
raise Exception("what is " + str(dest))
|
||||||
|
|
||||||
|
def periodic(self) -> None:
|
||||||
|
while True:
|
||||||
|
time.sleep(1)
|
||||||
|
|
||||||
|
tdli = {}
|
||||||
|
tdls = {}
|
||||||
|
qs = self.wask("httpsrv.read_dls")
|
||||||
|
for q in qs:
|
||||||
|
qr = q.get()
|
||||||
|
dli, dls = qr
|
||||||
|
tdli.update(dli)
|
||||||
|
tdls.update(dls)
|
||||||
|
tdl = (tdli, tdls)
|
||||||
|
for p in self.procs:
|
||||||
|
p.q_pend.put((0, "httpsrv.write_dls", tdl))
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ import queue
|
|||||||
|
|
||||||
from .__init__ import ANYWIN
|
from .__init__ import ANYWIN
|
||||||
from .authsrv import AuthSrv
|
from .authsrv import AuthSrv
|
||||||
from .broker_util import BrokerCli, ExceptionalQueue
|
from .broker_util import BrokerCli, ExceptionalQueue, NotExQueue
|
||||||
from .httpsrv import HttpSrv
|
from .httpsrv import HttpSrv
|
||||||
from .util import FAKE_MP, Daemon, HMaccas
|
from .util import FAKE_MP, Daemon, HMaccas
|
||||||
|
|
||||||
@@ -82,35 +82,40 @@ class MpWorker(BrokerCli):
|
|||||||
while True:
|
while True:
|
||||||
retq_id, dest, args = self.q_pend.get()
|
retq_id, dest, args = self.q_pend.get()
|
||||||
|
|
||||||
# self.logw("work: [{}]".format(d[0]))
|
if dest == "retq":
|
||||||
|
# response from previous ipc call
|
||||||
|
with self.retpend_mutex:
|
||||||
|
retq = self.retpend.pop(retq_id)
|
||||||
|
|
||||||
|
retq.put(args)
|
||||||
|
continue
|
||||||
|
|
||||||
if dest == "shutdown":
|
if dest == "shutdown":
|
||||||
self.httpsrv.shutdown()
|
self.httpsrv.shutdown()
|
||||||
self.logw("ok bye")
|
self.logw("ok bye")
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
return
|
return
|
||||||
|
|
||||||
elif dest == "reload":
|
if dest == "reload":
|
||||||
self.logw("mpw.asrv reloading")
|
self.logw("mpw.asrv reloading")
|
||||||
self.asrv.reload()
|
self.asrv.reload()
|
||||||
self.logw("mpw.asrv reloaded")
|
self.logw("mpw.asrv reloaded")
|
||||||
|
continue
|
||||||
|
|
||||||
elif dest == "listen":
|
if dest == "reload_sessions":
|
||||||
self.httpsrv.listen(args[0], args[1])
|
with self.asrv.mutex:
|
||||||
|
self.asrv.load_sessions()
|
||||||
|
continue
|
||||||
|
|
||||||
elif dest == "set_netdevs":
|
obj = self
|
||||||
self.httpsrv.set_netdevs(args[0])
|
for node in dest.split("."):
|
||||||
|
obj = getattr(obj, node)
|
||||||
|
|
||||||
elif dest == "retq":
|
rv = obj(*args) # type: ignore
|
||||||
# response from previous ipc call
|
if retq_id:
|
||||||
with self.retpend_mutex:
|
self.say("retq", rv, retq_id=retq_id)
|
||||||
retq = self.retpend.pop(retq_id)
|
|
||||||
|
|
||||||
retq.put(args)
|
def ask(self, dest: str, *args: Any) -> Union[ExceptionalQueue, NotExQueue]:
|
||||||
|
|
||||||
else:
|
|
||||||
raise Exception("what is " + str(dest))
|
|
||||||
|
|
||||||
def ask(self, dest: str, *args: Any) -> ExceptionalQueue:
|
|
||||||
retq = ExceptionalQueue(1)
|
retq = ExceptionalQueue(1)
|
||||||
retq_id = id(retq)
|
retq_id = id(retq)
|
||||||
with self.retpend_mutex:
|
with self.retpend_mutex:
|
||||||
@@ -119,5 +124,5 @@ class MpWorker(BrokerCli):
|
|||||||
self.q_yield.put((retq_id, dest, list(args)))
|
self.q_yield.put((retq_id, dest, list(args)))
|
||||||
return retq
|
return retq
|
||||||
|
|
||||||
def say(self, dest: str, *args: Any) -> None:
|
def say(self, dest: str, *args: Any, retq_id=0) -> None:
|
||||||
self.q_yield.put((0, dest, list(args)))
|
self.q_yield.put((retq_id, dest, list(args)))
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ import os
|
|||||||
import threading
|
import threading
|
||||||
|
|
||||||
from .__init__ import TYPE_CHECKING
|
from .__init__ import TYPE_CHECKING
|
||||||
from .broker_util import BrokerCli, ExceptionalQueue, try_exec
|
from .broker_util import BrokerCli, ExceptionalQueue, NotExQueue
|
||||||
from .httpsrv import HttpSrv
|
from .httpsrv import HttpSrv
|
||||||
from .util import HMaccas
|
from .util import HMaccas
|
||||||
|
|
||||||
@@ -13,7 +13,7 @@ if TYPE_CHECKING:
|
|||||||
from .svchub import SvcHub
|
from .svchub import SvcHub
|
||||||
|
|
||||||
if True: # pylint: disable=using-constant-test
|
if True: # pylint: disable=using-constant-test
|
||||||
from typing import Any
|
from typing import Any, Union
|
||||||
|
|
||||||
|
|
||||||
class BrokerThr(BrokerCli):
|
class BrokerThr(BrokerCli):
|
||||||
@@ -34,6 +34,7 @@ class BrokerThr(BrokerCli):
|
|||||||
self.iphash = HMaccas(os.path.join(self.args.E.cfg, "iphash"), 8)
|
self.iphash = HMaccas(os.path.join(self.args.E.cfg, "iphash"), 8)
|
||||||
self.httpsrv = HttpSrv(self, None)
|
self.httpsrv = HttpSrv(self, None)
|
||||||
self.reload = self.noop
|
self.reload = self.noop
|
||||||
|
self.reload_sessions = self.noop
|
||||||
|
|
||||||
def shutdown(self) -> None:
|
def shutdown(self) -> None:
|
||||||
# self.log("broker", "shutting down")
|
# self.log("broker", "shutting down")
|
||||||
@@ -42,26 +43,21 @@ class BrokerThr(BrokerCli):
|
|||||||
def noop(self) -> None:
|
def noop(self) -> None:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def ask(self, dest: str, *args: Any) -> ExceptionalQueue:
|
def ask(self, dest: str, *args: Any) -> Union[ExceptionalQueue, NotExQueue]:
|
||||||
|
|
||||||
# new ipc invoking managed service in hub
|
# new ipc invoking managed service in hub
|
||||||
obj = self.hub
|
obj = self.hub
|
||||||
for node in dest.split("."):
|
for node in dest.split("."):
|
||||||
obj = getattr(obj, node)
|
obj = getattr(obj, node)
|
||||||
|
|
||||||
rv = try_exec(True, obj, *args)
|
return NotExQueue(obj(*args)) # type: ignore
|
||||||
|
|
||||||
# pretend we're broker_mp
|
|
||||||
retq = ExceptionalQueue(1)
|
|
||||||
retq.put(rv)
|
|
||||||
return retq
|
|
||||||
|
|
||||||
def say(self, dest: str, *args: Any) -> None:
|
def say(self, dest: str, *args: Any) -> None:
|
||||||
if dest == "listen":
|
if dest == "httpsrv.listen":
|
||||||
self.httpsrv.listen(args[0], 1)
|
self.httpsrv.listen(args[0], 1)
|
||||||
return
|
return
|
||||||
|
|
||||||
if dest == "set_netdevs":
|
if dest == "httpsrv.set_netdevs":
|
||||||
self.httpsrv.set_netdevs(args[0])
|
self.httpsrv.set_netdevs(args[0])
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -70,4 +66,4 @@ class BrokerThr(BrokerCli):
|
|||||||
for node in dest.split("."):
|
for node in dest.split("."):
|
||||||
obj = getattr(obj, node)
|
obj = getattr(obj, node)
|
||||||
|
|
||||||
try_exec(False, obj, *args)
|
obj(*args) # type: ignore
|
||||||
|
|||||||
@@ -28,11 +28,23 @@ class ExceptionalQueue(Queue, object):
|
|||||||
if rv[1] == "pebkac":
|
if rv[1] == "pebkac":
|
||||||
raise Pebkac(*rv[2:])
|
raise Pebkac(*rv[2:])
|
||||||
else:
|
else:
|
||||||
raise Exception(rv[2])
|
raise rv[2]
|
||||||
|
|
||||||
return rv
|
return rv
|
||||||
|
|
||||||
|
|
||||||
|
class NotExQueue(object):
|
||||||
|
"""
|
||||||
|
BrokerThr uses this instead of ExceptionalQueue; 7x faster
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, rv: Any) -> None:
|
||||||
|
self.rv = rv
|
||||||
|
|
||||||
|
def get(self) -> Any:
|
||||||
|
return self.rv
|
||||||
|
|
||||||
|
|
||||||
class BrokerCli(object):
|
class BrokerCli(object):
|
||||||
"""
|
"""
|
||||||
helps mypy understand httpsrv.broker but still fails a few levels deeper,
|
helps mypy understand httpsrv.broker but still fails a few levels deeper,
|
||||||
@@ -48,7 +60,7 @@ class BrokerCli(object):
|
|||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def ask(self, dest: str, *args: Any) -> ExceptionalQueue:
|
def ask(self, dest: str, *args: Any) -> Union[ExceptionalQueue, NotExQueue]:
|
||||||
return ExceptionalQueue(1)
|
return ExceptionalQueue(1)
|
||||||
|
|
||||||
def say(self, dest: str, *args: Any) -> None:
|
def say(self, dest: str, *args: Any) -> None:
|
||||||
@@ -65,8 +77,8 @@ def try_exec(want_retval: Union[bool, int], func: Any, *args: list[Any]) -> Any:
|
|||||||
|
|
||||||
return ["exception", "pebkac", ex.code, str(ex)]
|
return ["exception", "pebkac", ex.code, str(ex)]
|
||||||
|
|
||||||
except:
|
except Exception as ex:
|
||||||
if not want_retval:
|
if not want_retval:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
return ["exception", "stack", traceback.format_exc()]
|
return ["exception", "stack", ex]
|
||||||
|
|||||||
@@ -6,12 +6,19 @@ import os
|
|||||||
import shutil
|
import shutil
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from .util import Netdev, runcmd
|
from .__init__ import ANYWIN
|
||||||
|
from .util import Netdev, load_resource, runcmd, wrename, wunlink
|
||||||
|
|
||||||
HAVE_CFSSL = True
|
HAVE_CFSSL = not os.environ.get("PRTY_NO_CFSSL")
|
||||||
|
|
||||||
if True: # pylint: disable=using-constant-test
|
if True: # pylint: disable=using-constant-test
|
||||||
from .util import RootLogger
|
from .util import NamedLogger, RootLogger
|
||||||
|
|
||||||
|
|
||||||
|
if ANYWIN:
|
||||||
|
VF = {"mv_re_t": 5, "rm_re_t": 5, "mv_re_r": 0.1, "rm_re_r": 0.1}
|
||||||
|
else:
|
||||||
|
VF = {"mv_re_t": 0, "rm_re_t": 0}
|
||||||
|
|
||||||
|
|
||||||
def ensure_cert(log: "RootLogger", args) -> None:
|
def ensure_cert(log: "RootLogger", args) -> None:
|
||||||
@@ -22,13 +29,15 @@ def ensure_cert(log: "RootLogger", args) -> None:
|
|||||||
|
|
||||||
i feel awful about this and so should they
|
i feel awful about this and so should they
|
||||||
"""
|
"""
|
||||||
cert_insec = os.path.join(args.E.mod, "res/insecure.pem")
|
with load_resource(args.E, "res/insecure.pem") as f:
|
||||||
|
cert_insec = f.read()
|
||||||
cert_appdata = os.path.join(args.E.cfg, "cert.pem")
|
cert_appdata = os.path.join(args.E.cfg, "cert.pem")
|
||||||
if not os.path.isfile(args.cert):
|
if not os.path.isfile(args.cert):
|
||||||
if cert_appdata != args.cert:
|
if cert_appdata != args.cert:
|
||||||
raise Exception("certificate file does not exist: " + args.cert)
|
raise Exception("certificate file does not exist: " + args.cert)
|
||||||
|
|
||||||
shutil.copy(cert_insec, args.cert)
|
with open(args.cert, "wb") as f:
|
||||||
|
f.write(cert_insec)
|
||||||
|
|
||||||
with open(args.cert, "rb") as f:
|
with open(args.cert, "rb") as f:
|
||||||
buf = f.read()
|
buf = f.read()
|
||||||
@@ -43,7 +52,9 @@ def ensure_cert(log: "RootLogger", args) -> None:
|
|||||||
raise Exception(m + "private key must appear before server certificate")
|
raise Exception(m + "private key must appear before server certificate")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if filecmp.cmp(args.cert, cert_insec):
|
with open(args.cert, "rb") as f:
|
||||||
|
active_cert = f.read()
|
||||||
|
if active_cert == cert_insec:
|
||||||
t = "using default TLS certificate; https will be insecure:\033[36m {}"
|
t = "using default TLS certificate; https will be insecure:\033[36m {}"
|
||||||
log("cert", t.format(args.cert), 3)
|
log("cert", t.format(args.cert), 3)
|
||||||
except:
|
except:
|
||||||
@@ -76,6 +87,8 @@ def _read_crt(args, fn):
|
|||||||
|
|
||||||
|
|
||||||
def _gen_ca(log: "RootLogger", args):
|
def _gen_ca(log: "RootLogger", args):
|
||||||
|
nlog: "NamedLogger" = lambda msg, c=0: log("cert-gen-ca", msg, c)
|
||||||
|
|
||||||
expiry = _read_crt(args, "ca.pem")[0]
|
expiry = _read_crt(args, "ca.pem")[0]
|
||||||
if time.time() + args.crt_cdays * 60 * 60 * 24 * 0.1 < expiry:
|
if time.time() + args.crt_cdays * 60 * 60 * 24 * 0.1 < expiry:
|
||||||
return
|
return
|
||||||
@@ -105,13 +118,19 @@ def _gen_ca(log: "RootLogger", args):
|
|||||||
raise Exception("failed to translate ca-cert: {}, {}".format(rc, se), 3)
|
raise Exception("failed to translate ca-cert: {}, {}".format(rc, se), 3)
|
||||||
|
|
||||||
bname = os.path.join(args.crt_dir, "ca")
|
bname = os.path.join(args.crt_dir, "ca")
|
||||||
os.rename(bname + "-key.pem", bname + ".key")
|
try:
|
||||||
os.unlink(bname + ".csr")
|
wunlink(nlog, bname + ".key", VF)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
wrename(nlog, bname + "-key.pem", bname + ".key", VF)
|
||||||
|
wunlink(nlog, bname + ".csr", VF)
|
||||||
|
|
||||||
log("cert", "new ca OK", 2)
|
log("cert", "new ca OK", 2)
|
||||||
|
|
||||||
|
|
||||||
def _gen_srv(log: "RootLogger", args, netdevs: dict[str, Netdev]):
|
def _gen_srv(log: "RootLogger", args, netdevs: dict[str, Netdev]):
|
||||||
|
nlog: "NamedLogger" = lambda msg, c=0: log("cert-gen-srv", msg, c)
|
||||||
|
|
||||||
names = args.crt_ns.split(",") if args.crt_ns else []
|
names = args.crt_ns.split(",") if args.crt_ns else []
|
||||||
if not args.crt_exact:
|
if not args.crt_exact:
|
||||||
for n in names[:]:
|
for n in names[:]:
|
||||||
@@ -136,14 +155,22 @@ def _gen_srv(log: "RootLogger", args, netdevs: dict[str, Netdev]):
|
|||||||
raise Exception("no useable cert found")
|
raise Exception("no useable cert found")
|
||||||
|
|
||||||
expired = time.time() + args.crt_sdays * 60 * 60 * 24 * 0.5 > expiry
|
expired = time.time() + args.crt_sdays * 60 * 60 * 24 * 0.5 > expiry
|
||||||
cert_insec = os.path.join(args.E.mod, "res/insecure.pem")
|
if expired:
|
||||||
|
raise Exception("old server-cert has expired")
|
||||||
|
|
||||||
for n in names:
|
for n in names:
|
||||||
if n not in inf["sans"]:
|
if n not in inf["sans"]:
|
||||||
raise Exception("does not have {}".format(n))
|
raise Exception("does not have {}".format(n))
|
||||||
if expired:
|
|
||||||
raise Exception("old server-cert has expired")
|
with load_resource(args.E, "res/insecure.pem") as f:
|
||||||
if not filecmp.cmp(args.cert, cert_insec):
|
cert_insec = f.read()
|
||||||
|
|
||||||
|
with open(args.cert, "rb") as f:
|
||||||
|
active_cert = f.read()
|
||||||
|
|
||||||
|
if active_cert and active_cert != cert_insec:
|
||||||
return
|
return
|
||||||
|
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
log("cert", "will create new server-cert; {}".format(ex))
|
log("cert", "will create new server-cert; {}".format(ex))
|
||||||
|
|
||||||
@@ -185,11 +212,11 @@ def _gen_srv(log: "RootLogger", args, netdevs: dict[str, Netdev]):
|
|||||||
|
|
||||||
bname = os.path.join(args.crt_dir, "srv")
|
bname = os.path.join(args.crt_dir, "srv")
|
||||||
try:
|
try:
|
||||||
os.unlink(bname + ".key")
|
wunlink(nlog, bname + ".key", VF)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
os.rename(bname + "-key.pem", bname + ".key")
|
wrename(nlog, bname + "-key.pem", bname + ".key", VF)
|
||||||
os.unlink(bname + ".csr")
|
wunlink(nlog, bname + ".csr", VF)
|
||||||
|
|
||||||
with open(os.path.join(args.crt_dir, "ca.pem"), "rb") as f:
|
with open(os.path.join(args.crt_dir, "ca.pem"), "rb") as f:
|
||||||
ca = f.read()
|
ca = f.read()
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
# awk -F\" '/add_argument\("-[^-]/{print(substr($2,2))}' copyparty/__main__.py | sort | tr '\n' ' '
|
# awk -F\" '/add_argument\("-[^-]/{print(substr($2,2))}' copyparty/__main__.py | sort | tr '\n' ' '
|
||||||
zs = "a c e2d e2ds e2dsa e2t e2ts e2tsr e2v e2vp e2vu ed emp i j lo mcr mte mth mtm mtp nb nc nid nih nw p q s ss sss v z zv"
|
zs = "a c e2d e2ds e2dsa e2t e2ts e2tsr e2v e2vp e2vu ed emp i j lo mcr mte mth mtm mtp nb nc nid nih nth nw p q s ss sss v z zv"
|
||||||
onedash = set(zs.split())
|
onedash = set(zs.split())
|
||||||
|
|
||||||
|
|
||||||
@@ -12,16 +12,19 @@ def vf_bmap() -> dict[str, str]:
|
|||||||
"dav_auth": "davauth",
|
"dav_auth": "davauth",
|
||||||
"dav_rt": "davrt",
|
"dav_rt": "davrt",
|
||||||
"ed": "dots",
|
"ed": "dots",
|
||||||
"never_symlink": "neversymlink",
|
"hardlink_only": "hardlinkonly",
|
||||||
"no_dedup": "copydupes",
|
"no_clone": "noclone",
|
||||||
|
"no_dirsz": "nodirsz",
|
||||||
"no_dupe": "nodupe",
|
"no_dupe": "nodupe",
|
||||||
"no_forget": "noforget",
|
"no_forget": "noforget",
|
||||||
|
"no_pipe": "nopipe",
|
||||||
"no_robots": "norobots",
|
"no_robots": "norobots",
|
||||||
"no_thumb": "dthumb",
|
"no_thumb": "dthumb",
|
||||||
"no_vthumb": "dvthumb",
|
"no_vthumb": "dvthumb",
|
||||||
"no_athumb": "dathumb",
|
"no_athumb": "dathumb",
|
||||||
}
|
}
|
||||||
for k in (
|
for k in (
|
||||||
|
"dedup",
|
||||||
"dotsrch",
|
"dotsrch",
|
||||||
"e2d",
|
"e2d",
|
||||||
"e2ds",
|
"e2ds",
|
||||||
@@ -34,11 +37,17 @@ def vf_bmap() -> dict[str, str]:
|
|||||||
"e2vp",
|
"e2vp",
|
||||||
"exp",
|
"exp",
|
||||||
"grid",
|
"grid",
|
||||||
|
"gsel",
|
||||||
"hardlink",
|
"hardlink",
|
||||||
"magic",
|
"magic",
|
||||||
"no_sb_md",
|
"no_sb_md",
|
||||||
"no_sb_lg",
|
"no_sb_lg",
|
||||||
|
"nsort",
|
||||||
|
"og",
|
||||||
|
"og_no_head",
|
||||||
|
"og_s_title",
|
||||||
"rand",
|
"rand",
|
||||||
|
"rss",
|
||||||
"xdev",
|
"xdev",
|
||||||
"xlink",
|
"xlink",
|
||||||
"xvol",
|
"xvol",
|
||||||
@@ -53,6 +62,7 @@ def vf_vmap() -> dict[str, str]:
|
|||||||
"no_hash": "nohash",
|
"no_hash": "nohash",
|
||||||
"no_idx": "noidx",
|
"no_idx": "noidx",
|
||||||
"re_maxage": "scan",
|
"re_maxage": "scan",
|
||||||
|
"safe_dedup": "safededup",
|
||||||
"th_convt": "convt",
|
"th_convt": "convt",
|
||||||
"th_size": "thsize",
|
"th_size": "thsize",
|
||||||
"th_crop": "crop",
|
"th_crop": "crop",
|
||||||
@@ -60,11 +70,24 @@ def vf_vmap() -> dict[str, str]:
|
|||||||
}
|
}
|
||||||
for k in (
|
for k in (
|
||||||
"dbd",
|
"dbd",
|
||||||
|
"hsortn",
|
||||||
|
"html_head",
|
||||||
"lg_sbf",
|
"lg_sbf",
|
||||||
"md_sbf",
|
"md_sbf",
|
||||||
"nrand",
|
"nrand",
|
||||||
|
"og_desc",
|
||||||
|
"og_site",
|
||||||
|
"og_th",
|
||||||
|
"og_title",
|
||||||
|
"og_title_a",
|
||||||
|
"og_title_v",
|
||||||
|
"og_title_i",
|
||||||
|
"og_tpl",
|
||||||
|
"og_ua",
|
||||||
|
"mv_retry",
|
||||||
"rm_retry",
|
"rm_retry",
|
||||||
"sort",
|
"sort",
|
||||||
|
"tcolor",
|
||||||
"unlist",
|
"unlist",
|
||||||
"u2abort",
|
"u2abort",
|
||||||
"u2ts",
|
"u2ts",
|
||||||
@@ -79,14 +102,15 @@ def vf_cmap() -> dict[str, str]:
|
|||||||
for k in (
|
for k in (
|
||||||
"exp_lg",
|
"exp_lg",
|
||||||
"exp_md",
|
"exp_md",
|
||||||
"html_head",
|
|
||||||
"mte",
|
"mte",
|
||||||
"mth",
|
"mth",
|
||||||
"mtp",
|
"mtp",
|
||||||
|
"xac",
|
||||||
"xad",
|
"xad",
|
||||||
"xar",
|
"xar",
|
||||||
"xau",
|
"xau",
|
||||||
"xban",
|
"xban",
|
||||||
|
"xbc",
|
||||||
"xbd",
|
"xbd",
|
||||||
"xbr",
|
"xbr",
|
||||||
"xbu",
|
"xbu",
|
||||||
@@ -113,10 +137,12 @@ permdescs = {
|
|||||||
|
|
||||||
flagcats = {
|
flagcats = {
|
||||||
"uploads, general": {
|
"uploads, general": {
|
||||||
"nodupe": "rejects existing files (instead of symlinking them)",
|
"dedup": "enable symlink-based file deduplication",
|
||||||
"hardlink": "does dedup with hardlinks instead of symlinks",
|
"hardlink": "enable hardlink-based file deduplication,\nwith fallback on symlinks when that is impossible",
|
||||||
"neversymlink": "disables symlink fallback; full copy instead",
|
"hardlinkonly": "dedup with hardlink only, never symlink;\nmake a full copy if hardlink is impossible",
|
||||||
"copydupes": "disables dedup, always saves full copies of dupes",
|
"safededup": "verify on-disk data before using it for dedup",
|
||||||
|
"noclone": "take dupe data from clients, even if available on HDD",
|
||||||
|
"nodupe": "rejects existing files (instead of linking/cloning them)",
|
||||||
"sparse": "force use of sparse files, mainly for s3-backed storage",
|
"sparse": "force use of sparse files, mainly for s3-backed storage",
|
||||||
"daw": "enable full WebDAV write support (dangerous);\nPUT-operations will now \033[1;31mOVERWRITE\033[0;35m existing files",
|
"daw": "enable full WebDAV write support (dangerous);\nPUT-operations will now \033[1;31mOVERWRITE\033[0;35m existing files",
|
||||||
"nosub": "forces all uploads into the top folder of the vfs",
|
"nosub": "forces all uploads into the top folder of the vfs",
|
||||||
@@ -129,6 +155,7 @@ flagcats = {
|
|||||||
"maxb=1g,300": "max 1 GiB over 5min (suffixes: b, k, m, g, t)",
|
"maxb=1g,300": "max 1 GiB over 5min (suffixes: b, k, m, g, t)",
|
||||||
"vmaxb=1g": "total volume size max 1 GiB (suffixes: b, k, m, g, t)",
|
"vmaxb=1g": "total volume size max 1 GiB (suffixes: b, k, m, g, t)",
|
||||||
"vmaxn=4k": "max 4096 files in volume (suffixes: b, k, m, g, t)",
|
"vmaxn=4k": "max 4096 files in volume (suffixes: b, k, m, g, t)",
|
||||||
|
"medialinks": "return medialinks for non-up2k uploads (not hotlinks)",
|
||||||
"rand": "force randomized filenames, 9 chars long by default",
|
"rand": "force randomized filenames, 9 chars long by default",
|
||||||
"nrand=N": "randomized filenames are N chars long",
|
"nrand=N": "randomized filenames are N chars long",
|
||||||
"u2ts=fc": "[f]orce [c]lient-last-modified or [u]pload-time",
|
"u2ts=fc": "[f]orce [c]lient-last-modified or [u]pload-time",
|
||||||
@@ -142,7 +169,7 @@ flagcats = {
|
|||||||
"lifetime=3600": "uploads are deleted after 1 hour",
|
"lifetime=3600": "uploads are deleted after 1 hour",
|
||||||
},
|
},
|
||||||
"database, general": {
|
"database, general": {
|
||||||
"e2d": "enable database; makes files searchable + enables upload dedup",
|
"e2d": "enable database; makes files searchable + enables upload-undo",
|
||||||
"e2ds": "scan writable folders for new files on startup; also sets -e2d",
|
"e2ds": "scan writable folders for new files on startup; also sets -e2d",
|
||||||
"e2dsa": "scans all folders for new files on startup; also sets -e2d",
|
"e2dsa": "scans all folders for new files on startup; also sets -e2d",
|
||||||
"e2t": "enable multimedia indexing; makes it possible to search for tags",
|
"e2t": "enable multimedia indexing; makes it possible to search for tags",
|
||||||
@@ -160,11 +187,12 @@ flagcats = {
|
|||||||
"noforget": "don't forget files when deleted from disk",
|
"noforget": "don't forget files when deleted from disk",
|
||||||
"fat32": "avoid excessive reindexing on android sdcardfs",
|
"fat32": "avoid excessive reindexing on android sdcardfs",
|
||||||
"dbd=[acid|swal|wal|yolo]": "database speed-durability tradeoff",
|
"dbd=[acid|swal|wal|yolo]": "database speed-durability tradeoff",
|
||||||
"xlink": "cross-volume dupe detection / linking",
|
"xlink": "cross-volume dupe detection / linking (dangerous)",
|
||||||
"xdev": "do not descend into other filesystems",
|
"xdev": "do not descend into other filesystems",
|
||||||
"xvol": "do not follow symlinks leaving the volume root",
|
"xvol": "do not follow symlinks leaving the volume root",
|
||||||
"dotsrch": "show dotfiles in search results",
|
"dotsrch": "show dotfiles in search results",
|
||||||
"nodotsrch": "hide dotfiles in search results (default)",
|
"nodotsrch": "hide dotfiles in search results (default)",
|
||||||
|
"srch_excl": "exclude search results with URL matching this regex",
|
||||||
},
|
},
|
||||||
'database, audio tags\n"mte", "mth", "mtp", "mtm" all work the same as -mte, -mth, ...': {
|
'database, audio tags\n"mte", "mth", "mtp", "mtm" all work the same as -mte, -mth, ...': {
|
||||||
"mtp=.bpm=f,audio-bpm.py": 'uses the "audio-bpm.py" program to\ngenerate ".bpm" tags from uploads (f = overwrite tags)',
|
"mtp=.bpm=f,audio-bpm.py": 'uses the "audio-bpm.py" program to\ngenerate ".bpm" tags from uploads (f = overwrite tags)',
|
||||||
@@ -175,6 +203,7 @@ flagcats = {
|
|||||||
"dvthumb": "disables video thumbnails",
|
"dvthumb": "disables video thumbnails",
|
||||||
"dathumb": "disables audio thumbnails (spectrograms)",
|
"dathumb": "disables audio thumbnails (spectrograms)",
|
||||||
"dithumb": "disables image thumbnails",
|
"dithumb": "disables image thumbnails",
|
||||||
|
"pngquant": "compress audio waveforms 33% better",
|
||||||
"thsize": "thumbnail res; WxH",
|
"thsize": "thumbnail res; WxH",
|
||||||
"crop": "center-cropping (y/n/fy/fn)",
|
"crop": "center-cropping (y/n/fy/fn)",
|
||||||
"th3x": "3x resolution (y/n/fy/fn)",
|
"th3x": "3x resolution (y/n/fy/fn)",
|
||||||
@@ -188,6 +217,8 @@ flagcats = {
|
|||||||
"xbu=CMD": "execute CMD before a file upload starts",
|
"xbu=CMD": "execute CMD before a file upload starts",
|
||||||
"xau=CMD": "execute CMD after a file upload finishes",
|
"xau=CMD": "execute CMD after a file upload finishes",
|
||||||
"xiu=CMD": "execute CMD after all uploads finish and volume is idle",
|
"xiu=CMD": "execute CMD after all uploads finish and volume is idle",
|
||||||
|
"xbc=CMD": "execute CMD before a file copy",
|
||||||
|
"xac=CMD": "execute CMD after a file copy",
|
||||||
"xbr=CMD": "execute CMD before a file rename/move",
|
"xbr=CMD": "execute CMD before a file rename/move",
|
||||||
"xar=CMD": "execute CMD after a file rename/move",
|
"xar=CMD": "execute CMD after a file rename/move",
|
||||||
"xbd=CMD": "execute CMD before a file delete",
|
"xbd=CMD": "execute CMD before a file delete",
|
||||||
@@ -197,9 +228,10 @@ flagcats = {
|
|||||||
},
|
},
|
||||||
"client and ux": {
|
"client and ux": {
|
||||||
"grid": "show grid/thumbnails by default",
|
"grid": "show grid/thumbnails by default",
|
||||||
|
"gsel": "select files in grid by ctrl-click",
|
||||||
"sort": "default sort order",
|
"sort": "default sort order",
|
||||||
"unlist": "dont list files matching REGEX",
|
"unlist": "dont list files matching REGEX",
|
||||||
"html_head=TXT": "includes TXT in the <head>",
|
"html_head=TXT": "includes TXT in the <head>, or @PATH for file at PATH",
|
||||||
"robots": "allows indexing by search engines (default)",
|
"robots": "allows indexing by search engines (default)",
|
||||||
"norobots": "kindly asks search engines to leave",
|
"norobots": "kindly asks search engines to leave",
|
||||||
"no_sb_md": "disable js sandbox for markdown files",
|
"no_sb_md": "disable js sandbox for markdown files",
|
||||||
@@ -214,6 +246,7 @@ flagcats = {
|
|||||||
"dots": "allow all users with read-access to\nenable the option to show dotfiles in listings",
|
"dots": "allow all users with read-access to\nenable the option to show dotfiles in listings",
|
||||||
"fk=8": 'generates per-file accesskeys,\nwhich are then required at the "g" permission;\nkeys are invalidated if filesize or inode changes',
|
"fk=8": 'generates per-file accesskeys,\nwhich are then required at the "g" permission;\nkeys are invalidated if filesize or inode changes',
|
||||||
"fka=8": 'generates slightly weaker per-file accesskeys,\nwhich are then required at the "g" permission;\nnot affected by filesize or inode numbers',
|
"fka=8": 'generates slightly weaker per-file accesskeys,\nwhich are then required at the "g" permission;\nnot affected by filesize or inode numbers',
|
||||||
|
"mv_retry": "ms-windows: timeout for renaming busy files",
|
||||||
"rm_retry": "ms-windows: timeout for deleting busy files",
|
"rm_retry": "ms-windows: timeout for deleting busy files",
|
||||||
"davauth": "ask webdav clients to login for all folders",
|
"davauth": "ask webdav clients to login for all folders",
|
||||||
"davrt": "show lastmod time of symlink destination, not the link itself\n(note: this option is always enabled for recursive listings)",
|
"davrt": "show lastmod time of symlink destination, not the link itself\n(note: this option is always enabled for recursive listings)",
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
|
import argparse
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import time
|
import time
|
||||||
@@ -8,44 +9,50 @@ import time
|
|||||||
from .__init__ import ANYWIN, MACOS
|
from .__init__ import ANYWIN, MACOS
|
||||||
from .authsrv import AXS, VFS
|
from .authsrv import AXS, VFS
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .util import chkcmd, min_ex
|
from .util import chkcmd, min_ex, undot
|
||||||
|
|
||||||
if True: # pylint: disable=using-constant-test
|
if True: # pylint: disable=using-constant-test
|
||||||
from typing import Optional, Union
|
from typing import Optional, Union
|
||||||
|
|
||||||
from .util import RootLogger
|
from .util import RootLogger, undot
|
||||||
|
|
||||||
|
|
||||||
class Fstab(object):
|
class Fstab(object):
|
||||||
def __init__(self, log: "RootLogger"):
|
def __init__(self, log: "RootLogger", args: argparse.Namespace):
|
||||||
self.log_func = log
|
self.log_func = log
|
||||||
|
|
||||||
|
self.warned = False
|
||||||
self.trusted = False
|
self.trusted = False
|
||||||
self.tab: Optional[VFS] = None
|
self.tab: Optional[VFS] = None
|
||||||
|
self.oldtab: Optional[VFS] = None
|
||||||
|
self.srctab = "a"
|
||||||
self.cache: dict[str, str] = {}
|
self.cache: dict[str, str] = {}
|
||||||
self.age = 0.0
|
self.age = 0.0
|
||||||
|
self.maxage = args.mtab_age
|
||||||
|
|
||||||
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||||
self.log_func("fstab", msg, c)
|
self.log_func("fstab", msg, c)
|
||||||
|
|
||||||
def get(self, path: str) -> str:
|
def get(self, path: str) -> str:
|
||||||
if len(self.cache) > 9000:
|
now = time.time()
|
||||||
self.age = time.time()
|
if now - self.age > self.maxage or len(self.cache) > 9000:
|
||||||
|
self.age = now
|
||||||
|
self.oldtab = self.tab or self.oldtab
|
||||||
self.tab = None
|
self.tab = None
|
||||||
self.cache = {}
|
self.cache = {}
|
||||||
|
|
||||||
fs = "ext4"
|
fs = "ext4"
|
||||||
msg = "failed to determine filesystem at [{}]; assuming {}\n{}"
|
msg = "failed to determine filesystem at %r; assuming %s\n%s"
|
||||||
|
|
||||||
if ANYWIN:
|
if ANYWIN:
|
||||||
fs = "vfat"
|
fs = "vfat"
|
||||||
try:
|
try:
|
||||||
path = self._winpath(path)
|
path = self._winpath(path)
|
||||||
except:
|
except:
|
||||||
self.log(msg.format(path, fs, min_ex()), 3)
|
self.log(msg % (path, fs, min_ex()), 3)
|
||||||
return fs
|
return fs
|
||||||
|
|
||||||
path = path.lstrip("/")
|
path = undot(path)
|
||||||
try:
|
try:
|
||||||
return self.cache[path]
|
return self.cache[path]
|
||||||
except:
|
except:
|
||||||
@@ -54,11 +61,11 @@ class Fstab(object):
|
|||||||
try:
|
try:
|
||||||
fs = self.get_w32(path) if ANYWIN else self.get_unix(path)
|
fs = self.get_w32(path) if ANYWIN else self.get_unix(path)
|
||||||
except:
|
except:
|
||||||
self.log(msg.format(path, fs, min_ex()), 3)
|
self.log(msg % (path, fs, min_ex()), 3)
|
||||||
|
|
||||||
fs = fs.lower()
|
fs = fs.lower()
|
||||||
self.cache[path] = fs
|
self.cache[path] = fs
|
||||||
self.log("found {} at {}".format(fs, path))
|
self.log("found %s at %r" % (fs, path))
|
||||||
return fs
|
return fs
|
||||||
|
|
||||||
def _winpath(self, path: str) -> str:
|
def _winpath(self, path: str) -> str:
|
||||||
@@ -75,7 +82,7 @@ class Fstab(object):
|
|||||||
self.trusted = False
|
self.trusted = False
|
||||||
|
|
||||||
def build_tab(self) -> None:
|
def build_tab(self) -> None:
|
||||||
self.log("building tab")
|
self.log("inspecting mtab for changes")
|
||||||
|
|
||||||
sptn = r"^.*? on (.*) type ([^ ]+) \(.*"
|
sptn = r"^.*? on (.*) type ([^ ]+) \(.*"
|
||||||
if MACOS:
|
if MACOS:
|
||||||
@@ -84,6 +91,7 @@ class Fstab(object):
|
|||||||
ptn = re.compile(sptn)
|
ptn = re.compile(sptn)
|
||||||
so, _ = chkcmd(["mount"])
|
so, _ = chkcmd(["mount"])
|
||||||
tab1: list[tuple[str, str]] = []
|
tab1: list[tuple[str, str]] = []
|
||||||
|
atab = []
|
||||||
for ln in so.split("\n"):
|
for ln in so.split("\n"):
|
||||||
m = ptn.match(ln)
|
m = ptn.match(ln)
|
||||||
if not m:
|
if not m:
|
||||||
@@ -91,6 +99,15 @@ class Fstab(object):
|
|||||||
|
|
||||||
zs1, zs2 = m.groups()
|
zs1, zs2 = m.groups()
|
||||||
tab1.append((str(zs1), str(zs2)))
|
tab1.append((str(zs1), str(zs2)))
|
||||||
|
atab.append(ln)
|
||||||
|
|
||||||
|
# keep empirically-correct values if mounttab unchanged
|
||||||
|
srctab = "\n".join(sorted(atab))
|
||||||
|
if srctab == self.srctab:
|
||||||
|
self.tab = self.oldtab
|
||||||
|
return
|
||||||
|
|
||||||
|
self.log("mtab has changed; reevaluating support for sparse files")
|
||||||
|
|
||||||
tab1.sort(key=lambda x: (len(x[0]), x[0]))
|
tab1.sort(key=lambda x: (len(x[0]), x[0]))
|
||||||
path1, fs1 = tab1[0]
|
path1, fs1 = tab1[0]
|
||||||
@@ -99,14 +116,15 @@ class Fstab(object):
|
|||||||
tab.add(fs, path.lstrip("/"))
|
tab.add(fs, path.lstrip("/"))
|
||||||
|
|
||||||
self.tab = tab
|
self.tab = tab
|
||||||
|
self.srctab = srctab
|
||||||
|
|
||||||
def relabel(self, path: str, nval: str) -> None:
|
def relabel(self, path: str, nval: str) -> None:
|
||||||
assert self.tab
|
assert self.tab # !rm
|
||||||
self.cache = {}
|
self.cache = {}
|
||||||
if ANYWIN:
|
if ANYWIN:
|
||||||
path = self._winpath(path)
|
path = self._winpath(path)
|
||||||
|
|
||||||
path = path.lstrip("/")
|
path = undot(path)
|
||||||
ptn = re.compile(r"^[^\\/]*")
|
ptn = re.compile(r"^[^\\/]*")
|
||||||
vn, rem = self.tab._find(path)
|
vn, rem = self.tab._find(path)
|
||||||
if not self.trusted:
|
if not self.trusted:
|
||||||
@@ -133,10 +151,12 @@ class Fstab(object):
|
|||||||
self.trusted = True
|
self.trusted = True
|
||||||
except:
|
except:
|
||||||
# prisonparty or other restrictive environment
|
# prisonparty or other restrictive environment
|
||||||
self.log("failed to build tab:\n{}".format(min_ex()), 3)
|
if not self.warned:
|
||||||
|
self.warned = True
|
||||||
|
self.log("failed to build tab:\n{}".format(min_ex()), 3)
|
||||||
self.build_fallback()
|
self.build_fallback()
|
||||||
|
|
||||||
assert self.tab
|
assert self.tab # !rm
|
||||||
ret = self.tab._find(path)[0]
|
ret = self.tab._find(path)[0]
|
||||||
if self.trusted or path == ret.vpath:
|
if self.trusted or path == ret.vpath:
|
||||||
return ret.realpath.split("/")[0]
|
return ret.realpath.split("/")[0]
|
||||||
@@ -147,6 +167,6 @@ class Fstab(object):
|
|||||||
if not self.tab:
|
if not self.tab:
|
||||||
self.build_fallback()
|
self.build_fallback()
|
||||||
|
|
||||||
assert self.tab
|
assert self.tab # !rm
|
||||||
ret = self.tab._find(path)[0]
|
ret = self.tab._find(path)[0]
|
||||||
return ret.realpath
|
return ret.realpath
|
||||||
|
|||||||
@@ -19,6 +19,7 @@ from .__init__ import PY2, TYPE_CHECKING
|
|||||||
from .authsrv import VFS
|
from .authsrv import VFS
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .util import (
|
from .util import (
|
||||||
|
VF_CAREFUL,
|
||||||
Daemon,
|
Daemon,
|
||||||
ODict,
|
ODict,
|
||||||
Pebkac,
|
Pebkac,
|
||||||
@@ -30,6 +31,7 @@ from .util import (
|
|||||||
runhook,
|
runhook,
|
||||||
sanitize_fn,
|
sanitize_fn,
|
||||||
vjoin,
|
vjoin,
|
||||||
|
wunlink,
|
||||||
)
|
)
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
@@ -37,7 +39,10 @@ if TYPE_CHECKING:
|
|||||||
|
|
||||||
if True: # pylint: disable=using-constant-test
|
if True: # pylint: disable=using-constant-test
|
||||||
import typing
|
import typing
|
||||||
from typing import Any, Optional
|
from typing import Any, Optional, Union
|
||||||
|
|
||||||
|
if PY2:
|
||||||
|
range = xrange # type: ignore
|
||||||
|
|
||||||
|
|
||||||
class FSE(FilesystemError):
|
class FSE(FilesystemError):
|
||||||
@@ -71,6 +76,7 @@ class FtpAuth(DummyAuthorizer):
|
|||||||
else:
|
else:
|
||||||
raise AuthenticationFailed("banned")
|
raise AuthenticationFailed("banned")
|
||||||
|
|
||||||
|
args = self.hub.args
|
||||||
asrv = self.hub.asrv
|
asrv = self.hub.asrv
|
||||||
uname = "*"
|
uname = "*"
|
||||||
if username != "anonymous":
|
if username != "anonymous":
|
||||||
@@ -81,6 +87,9 @@ class FtpAuth(DummyAuthorizer):
|
|||||||
uname = zs
|
uname = zs
|
||||||
break
|
break
|
||||||
|
|
||||||
|
if args.ipu and uname == "*":
|
||||||
|
uname = args.ipu_iu[args.ipu_nm.map(ip)]
|
||||||
|
|
||||||
if not uname or not (asrv.vfs.aread.get(uname) or asrv.vfs.awrite.get(uname)):
|
if not uname or not (asrv.vfs.aread.get(uname) or asrv.vfs.awrite.get(uname)):
|
||||||
g = self.hub.gpwd
|
g = self.hub.gpwd
|
||||||
if g.lim:
|
if g.lim:
|
||||||
@@ -139,6 +148,9 @@ class FtpFs(AbstractedFS):
|
|||||||
self.listdirinfo = self.listdir
|
self.listdirinfo = self.listdir
|
||||||
self.chdir(".")
|
self.chdir(".")
|
||||||
|
|
||||||
|
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||||
|
self.hub.log("ftpd", msg, c)
|
||||||
|
|
||||||
def v2a(
|
def v2a(
|
||||||
self,
|
self,
|
||||||
vpath: str,
|
vpath: str,
|
||||||
@@ -155,7 +167,7 @@ class FtpFs(AbstractedFS):
|
|||||||
t = "Unsupported characters in [{}]"
|
t = "Unsupported characters in [{}]"
|
||||||
raise FSE(t.format(vpath), 1)
|
raise FSE(t.format(vpath), 1)
|
||||||
|
|
||||||
fn = sanitize_fn(fn or "", "", [".prologue.html", ".epilogue.html"])
|
fn = sanitize_fn(fn or "", "")
|
||||||
vpath = vjoin(rd, fn)
|
vpath = vjoin(rd, fn)
|
||||||
vfs, rem = self.hub.asrv.vfs.get(vpath, self.uname, r, w, m, d)
|
vfs, rem = self.hub.asrv.vfs.get(vpath, self.uname, r, w, m, d)
|
||||||
if not vfs.realpath:
|
if not vfs.realpath:
|
||||||
@@ -207,17 +219,37 @@ class FtpFs(AbstractedFS):
|
|||||||
w = "w" in mode or "a" in mode or "+" in mode
|
w = "w" in mode or "a" in mode or "+" in mode
|
||||||
|
|
||||||
ap = self.rv2a(filename, r, w)[0]
|
ap = self.rv2a(filename, r, w)[0]
|
||||||
|
self.validpath(ap)
|
||||||
if w:
|
if w:
|
||||||
try:
|
try:
|
||||||
st = bos.stat(ap)
|
st = bos.stat(ap)
|
||||||
td = time.time() - st.st_mtime
|
td = time.time() - st.st_mtime
|
||||||
|
need_unlink = True
|
||||||
except:
|
except:
|
||||||
|
need_unlink = False
|
||||||
td = 0
|
td = 0
|
||||||
|
|
||||||
if td < -1 or td > self.args.ftp_wt:
|
if w and need_unlink:
|
||||||
raise FSE("Cannot open existing file for writing")
|
if td >= -1 and td <= self.args.ftp_wt:
|
||||||
|
# within permitted timeframe; unlink and accept
|
||||||
|
do_it = True
|
||||||
|
elif self.args.no_del or self.args.ftp_no_ow:
|
||||||
|
# file too old, or overwrite not allowed; reject
|
||||||
|
do_it = False
|
||||||
|
else:
|
||||||
|
# allow overwrite if user has delete permission
|
||||||
|
# (avoids win2000 freaking out and deleting the server copy without uploading its own)
|
||||||
|
try:
|
||||||
|
self.rv2a(filename, False, True, False, True)
|
||||||
|
do_it = True
|
||||||
|
except:
|
||||||
|
do_it = False
|
||||||
|
|
||||||
|
if not do_it:
|
||||||
|
raise FSE("File already exists")
|
||||||
|
|
||||||
|
wunlink(self.log, ap, VF_CAREFUL)
|
||||||
|
|
||||||
self.validpath(ap)
|
|
||||||
return open(fsenc(ap), mode, self.args.iobuf)
|
return open(fsenc(ap), mode, self.args.iobuf)
|
||||||
|
|
||||||
def chdir(self, path: str) -> None:
|
def chdir(self, path: str) -> None:
|
||||||
@@ -264,6 +296,7 @@ class FtpFs(AbstractedFS):
|
|||||||
self.uname,
|
self.uname,
|
||||||
not self.args.no_scandir,
|
not self.args.no_scandir,
|
||||||
[[True, False], [False, True]],
|
[[True, False], [False, True]],
|
||||||
|
throw=True,
|
||||||
)
|
)
|
||||||
vfs_ls = [x[0] for x in vfs_ls1]
|
vfs_ls = [x[0] for x in vfs_ls1]
|
||||||
vfs_ls.extend(vfs_virt.keys())
|
vfs_ls.extend(vfs_virt.keys())
|
||||||
@@ -282,9 +315,20 @@ class FtpFs(AbstractedFS):
|
|||||||
# display write-only folders as empty
|
# display write-only folders as empty
|
||||||
return []
|
return []
|
||||||
|
|
||||||
# return list of volumes
|
# return list of accessible volumes
|
||||||
r = {x.split("/")[0]: 1 for x in self.hub.asrv.vfs.all_vols.keys()}
|
ret = []
|
||||||
return list(sorted(list(r.keys())))
|
for vn in self.hub.asrv.vfs.all_vols.values():
|
||||||
|
if "/" in vn.vpath or not vn.vpath:
|
||||||
|
continue # only include toplevel-mounted vols
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.hub.asrv.vfs.get(vn.vpath, self.uname, True, False)
|
||||||
|
ret.append(vn.vpath)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
ret.sort()
|
||||||
|
return ret
|
||||||
|
|
||||||
def rmdir(self, path: str) -> None:
|
def rmdir(self, path: str) -> None:
|
||||||
ap = self.rv2a(path, d=True)[0]
|
ap = self.rv2a(path, d=True)[0]
|
||||||
@@ -314,7 +358,7 @@ class FtpFs(AbstractedFS):
|
|||||||
svp = join(self.cwd, src).lstrip("/")
|
svp = join(self.cwd, src).lstrip("/")
|
||||||
dvp = join(self.cwd, dst).lstrip("/")
|
dvp = join(self.cwd, dst).lstrip("/")
|
||||||
try:
|
try:
|
||||||
self.hub.up2k.handle_mv(self.uname, svp, dvp)
|
self.hub.up2k.handle_mv(self.uname, self.h.cli_ip, svp, dvp)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
raise FSE(str(ex))
|
raise FSE(str(ex))
|
||||||
|
|
||||||
@@ -432,15 +476,19 @@ class FtpHandler(FTPHandler):
|
|||||||
xbu = vfs.flags.get("xbu")
|
xbu = vfs.flags.get("xbu")
|
||||||
if xbu and not runhook(
|
if xbu and not runhook(
|
||||||
None,
|
None,
|
||||||
|
None,
|
||||||
|
self.hub.up2k,
|
||||||
|
"xbu.ftpd",
|
||||||
xbu,
|
xbu,
|
||||||
ap,
|
ap,
|
||||||
vfs.canonical(rem),
|
vp,
|
||||||
"",
|
"",
|
||||||
self.uname,
|
self.uname,
|
||||||
|
self.hub.asrv.vfs.get_perms(vp, self.uname),
|
||||||
0,
|
0,
|
||||||
0,
|
0,
|
||||||
self.cli_ip,
|
self.cli_ip,
|
||||||
0,
|
time.time(),
|
||||||
"",
|
"",
|
||||||
):
|
):
|
||||||
raise FSE("Upload blocked by xbu server config")
|
raise FSE("Upload blocked by xbu server config")
|
||||||
@@ -543,9 +591,15 @@ class Ftpd(object):
|
|||||||
if "::" in ips:
|
if "::" in ips:
|
||||||
ips.append("0.0.0.0")
|
ips.append("0.0.0.0")
|
||||||
|
|
||||||
|
ips = [x for x in ips if "unix:" not in x]
|
||||||
|
|
||||||
if self.args.ftp4:
|
if self.args.ftp4:
|
||||||
ips = [x for x in ips if ":" not in x]
|
ips = [x for x in ips if ":" not in x]
|
||||||
|
|
||||||
|
if not ips:
|
||||||
|
lgr.fatal("cannot start ftp-server; no compatible IPs in -i")
|
||||||
|
return
|
||||||
|
|
||||||
ips = list(ODict.fromkeys(ips)) # dedup
|
ips = list(ODict.fromkeys(ips)) # dedup
|
||||||
|
|
||||||
ioloop = IOLoop()
|
ioloop = IOLoop()
|
||||||
|
|||||||
3001
copyparty/httpcli.py
3001
copyparty/httpcli.py
File diff suppressed because it is too large
Load Diff
@@ -9,6 +9,9 @@ import threading # typechk
|
|||||||
import time
|
import time
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
if os.environ.get("PRTY_NO_TLS"):
|
||||||
|
raise Exception()
|
||||||
|
|
||||||
HAVE_SSL = True
|
HAVE_SSL = True
|
||||||
import ssl
|
import ssl
|
||||||
except:
|
except:
|
||||||
@@ -55,6 +58,9 @@ class HttpConn(object):
|
|||||||
self.E: EnvParams = self.args.E
|
self.E: EnvParams = self.args.E
|
||||||
self.asrv: AuthSrv = hsrv.asrv # mypy404
|
self.asrv: AuthSrv = hsrv.asrv # mypy404
|
||||||
self.u2fh: Util.FHC = hsrv.u2fh # mypy404
|
self.u2fh: Util.FHC = hsrv.u2fh # mypy404
|
||||||
|
self.pipes: Util.CachedDict = hsrv.pipes # mypy404
|
||||||
|
self.ipu_iu: Optional[dict[str, str]] = hsrv.ipu_iu
|
||||||
|
self.ipu_nm: Optional[NetMap] = hsrv.ipu_nm
|
||||||
self.ipa_nm: Optional[NetMap] = hsrv.ipa_nm
|
self.ipa_nm: Optional[NetMap] = hsrv.ipa_nm
|
||||||
self.xff_nm: Optional[NetMap] = hsrv.xff_nm
|
self.xff_nm: Optional[NetMap] = hsrv.xff_nm
|
||||||
self.xff_lan: NetMap = hsrv.xff_lan # type: ignore
|
self.xff_lan: NetMap = hsrv.xff_lan # type: ignore
|
||||||
@@ -99,9 +105,6 @@ class HttpConn(object):
|
|||||||
self.log_src = ("%s \033[%dm%d" % (ip, color, self.addr[1])).ljust(26)
|
self.log_src = ("%s \033[%dm%d" % (ip, color, self.addr[1])).ljust(26)
|
||||||
return self.log_src
|
return self.log_src
|
||||||
|
|
||||||
def respath(self, res_name: str) -> str:
|
|
||||||
return os.path.join(self.E.mod, "web", res_name)
|
|
||||||
|
|
||||||
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||||
self.log_func(self.log_src, msg, c)
|
self.log_func(self.log_src, msg, c)
|
||||||
|
|
||||||
@@ -161,6 +164,7 @@ class HttpConn(object):
|
|||||||
|
|
||||||
self.log_src = self.log_src.replace("[36m", "[35m")
|
self.log_src = self.log_src.replace("[36m", "[35m")
|
||||||
try:
|
try:
|
||||||
|
assert ssl # type: ignore # !rm
|
||||||
ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
|
ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
|
||||||
ctx.load_cert_chain(self.args.cert)
|
ctx.load_cert_chain(self.args.cert)
|
||||||
if self.args.ssl_ver:
|
if self.args.ssl_ver:
|
||||||
@@ -186,7 +190,7 @@ class HttpConn(object):
|
|||||||
|
|
||||||
if self.args.ssl_dbg and hasattr(self.s, "shared_ciphers"):
|
if self.args.ssl_dbg and hasattr(self.s, "shared_ciphers"):
|
||||||
ciphers = self.s.shared_ciphers()
|
ciphers = self.s.shared_ciphers()
|
||||||
assert ciphers
|
assert ciphers # !rm
|
||||||
overlap = [str(y[::-1]) for y in ciphers]
|
overlap = [str(y[::-1]) for y in ciphers]
|
||||||
self.log("TLS cipher overlap:" + "\n".join(overlap))
|
self.log("TLS cipher overlap:" + "\n".join(overlap))
|
||||||
for k, v in [
|
for k, v in [
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
import base64
|
import hashlib
|
||||||
import math
|
import math
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
@@ -12,7 +12,7 @@ import time
|
|||||||
|
|
||||||
import queue
|
import queue
|
||||||
|
|
||||||
from .__init__ import ANYWIN, CORES, EXE, MACOS, TYPE_CHECKING, EnvParams
|
from .__init__ import ANYWIN, CORES, EXE, MACOS, PY2, TYPE_CHECKING, EnvParams, unicode
|
||||||
|
|
||||||
try:
|
try:
|
||||||
MNFE = ModuleNotFoundError
|
MNFE = ModuleNotFoundError
|
||||||
@@ -61,28 +61,44 @@ from .u2idx import U2idx
|
|||||||
from .util import (
|
from .util import (
|
||||||
E_SCK,
|
E_SCK,
|
||||||
FHC,
|
FHC,
|
||||||
|
CachedDict,
|
||||||
Daemon,
|
Daemon,
|
||||||
Garda,
|
Garda,
|
||||||
Magician,
|
Magician,
|
||||||
Netdev,
|
Netdev,
|
||||||
NetMap,
|
NetMap,
|
||||||
absreal,
|
|
||||||
build_netmap,
|
build_netmap,
|
||||||
|
has_resource,
|
||||||
ipnorm,
|
ipnorm,
|
||||||
|
load_ipu,
|
||||||
|
load_resource,
|
||||||
min_ex,
|
min_ex,
|
||||||
shut_socket,
|
shut_socket,
|
||||||
spack,
|
spack,
|
||||||
start_log_thrs,
|
start_log_thrs,
|
||||||
start_stackmon,
|
start_stackmon,
|
||||||
|
ub64enc,
|
||||||
)
|
)
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
|
from .authsrv import VFS
|
||||||
from .broker_util import BrokerCli
|
from .broker_util import BrokerCli
|
||||||
from .ssdp import SSDPr
|
from .ssdp import SSDPr
|
||||||
|
|
||||||
if True: # pylint: disable=using-constant-test
|
if True: # pylint: disable=using-constant-test
|
||||||
from typing import Any, Optional
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
if PY2:
|
||||||
|
range = xrange # type: ignore
|
||||||
|
|
||||||
|
if not hasattr(socket, "AF_UNIX"):
|
||||||
|
setattr(socket, "AF_UNIX", -9001)
|
||||||
|
|
||||||
|
|
||||||
|
def load_jinja2_resource(E: EnvParams, name: str):
|
||||||
|
with load_resource(E, "web/" + name, "r") as f:
|
||||||
|
return f.read()
|
||||||
|
|
||||||
|
|
||||||
class HttpSrv(object):
|
class HttpSrv(object):
|
||||||
"""
|
"""
|
||||||
@@ -115,6 +131,12 @@ class HttpSrv(object):
|
|||||||
self.bans: dict[str, int] = {}
|
self.bans: dict[str, int] = {}
|
||||||
self.aclose: dict[str, int] = {}
|
self.aclose: dict[str, int] = {}
|
||||||
|
|
||||||
|
dli: dict[str, tuple[float, int, "VFS", str, str]] = {} # info
|
||||||
|
dls: dict[str, tuple[float, int]] = {} # state
|
||||||
|
self.dli = self.tdli = dli
|
||||||
|
self.dls = self.tdls = dls
|
||||||
|
self.iiam = '<img src="%s.cpr/iiam.gif?cache=i" />' % (self.args.SRS,)
|
||||||
|
|
||||||
self.bound: set[tuple[str, int]] = set()
|
self.bound: set[tuple[str, int]] = set()
|
||||||
self.name = "hsrv" + nsuf
|
self.name = "hsrv" + nsuf
|
||||||
self.mutex = threading.Lock()
|
self.mutex = threading.Lock()
|
||||||
@@ -130,6 +152,8 @@ class HttpSrv(object):
|
|||||||
self.t_periodic: Optional[threading.Thread] = None
|
self.t_periodic: Optional[threading.Thread] = None
|
||||||
|
|
||||||
self.u2fh = FHC()
|
self.u2fh = FHC()
|
||||||
|
self.u2sc: dict[str, tuple[int, "hashlib._Hash"]] = {}
|
||||||
|
self.pipes = CachedDict(0.2)
|
||||||
self.metrics = Metrics(self)
|
self.metrics = Metrics(self)
|
||||||
self.nreq = 0
|
self.nreq = 0
|
||||||
self.nsus = 0
|
self.nsus = 0
|
||||||
@@ -144,23 +168,33 @@ class HttpSrv(object):
|
|||||||
self.u2idx_free: dict[str, U2idx] = {}
|
self.u2idx_free: dict[str, U2idx] = {}
|
||||||
self.u2idx_n = 0
|
self.u2idx_n = 0
|
||||||
|
|
||||||
|
assert jinja2 # type: ignore # !rm
|
||||||
env = jinja2.Environment()
|
env = jinja2.Environment()
|
||||||
env.loader = jinja2.FileSystemLoader(os.path.join(self.E.mod, "web"))
|
env.loader = jinja2.FunctionLoader(lambda f: load_jinja2_resource(self.E, f))
|
||||||
jn = ["splash", "svcs", "browser", "browser2", "msg", "md", "mde", "cf"]
|
jn = [
|
||||||
|
"browser",
|
||||||
|
"browser2",
|
||||||
|
"cf",
|
||||||
|
"md",
|
||||||
|
"mde",
|
||||||
|
"msg",
|
||||||
|
"rups",
|
||||||
|
"shares",
|
||||||
|
"splash",
|
||||||
|
"svcs",
|
||||||
|
]
|
||||||
self.j2 = {x: env.get_template(x + ".html") for x in jn}
|
self.j2 = {x: env.get_template(x + ".html") for x in jn}
|
||||||
zs = os.path.join(self.E.mod, "web", "deps", "prism.js.gz")
|
self.prism = has_resource(self.E, "web/deps/prism.js.gz")
|
||||||
self.prism = os.path.exists(zs)
|
|
||||||
|
if self.args.ipu:
|
||||||
|
self.ipu_iu, self.ipu_nm = load_ipu(self.log, self.args.ipu)
|
||||||
|
else:
|
||||||
|
self.ipu_iu = self.ipu_nm = None
|
||||||
|
|
||||||
self.ipa_nm = build_netmap(self.args.ipa)
|
self.ipa_nm = build_netmap(self.args.ipa)
|
||||||
self.xff_nm = build_netmap(self.args.xff_src)
|
self.xff_nm = build_netmap(self.args.xff_src)
|
||||||
self.xff_lan = build_netmap("lan")
|
self.xff_lan = build_netmap("lan")
|
||||||
|
|
||||||
self.statics: set[str] = set()
|
|
||||||
self._build_statics()
|
|
||||||
|
|
||||||
self.ptn_cc = re.compile(r"[\x00-\x1f]")
|
|
||||||
self.ptn_hsafe = re.compile(r"[\x00-\x1f<>\"'&]")
|
|
||||||
|
|
||||||
self.mallow = "GET HEAD POST PUT DELETE OPTIONS".split()
|
self.mallow = "GET HEAD POST PUT DELETE OPTIONS".split()
|
||||||
if not self.args.no_dav:
|
if not self.args.no_dav:
|
||||||
zs = "PROPFIND PROPPATCH LOCK UNLOCK MKCOL COPY MOVE"
|
zs = "PROPFIND PROPPATCH LOCK UNLOCK MKCOL COPY MOVE"
|
||||||
@@ -175,6 +209,9 @@ class HttpSrv(object):
|
|||||||
self.start_threads(4)
|
self.start_threads(4)
|
||||||
|
|
||||||
if nid:
|
if nid:
|
||||||
|
self.tdli = {}
|
||||||
|
self.tdls = {}
|
||||||
|
|
||||||
if self.args.stackmon:
|
if self.args.stackmon:
|
||||||
start_stackmon(self.args.stackmon, nid)
|
start_stackmon(self.args.stackmon, nid)
|
||||||
|
|
||||||
@@ -191,14 +228,6 @@ class HttpSrv(object):
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def _build_statics(self) -> None:
|
|
||||||
for dp, _, df in os.walk(os.path.join(self.E.mod, "web")):
|
|
||||||
for fn in df:
|
|
||||||
ap = absreal(os.path.join(dp, fn))
|
|
||||||
self.statics.add(ap)
|
|
||||||
if ap.endswith(".gz"):
|
|
||||||
self.statics.add(ap[:-3])
|
|
||||||
|
|
||||||
def set_netdevs(self, netdevs: dict[str, Netdev]) -> None:
|
def set_netdevs(self, netdevs: dict[str, Netdev]) -> None:
|
||||||
ips = set()
|
ips = set()
|
||||||
for ip, _ in self.bound:
|
for ip, _ in self.bound:
|
||||||
@@ -219,7 +248,7 @@ class HttpSrv(object):
|
|||||||
if self.args.log_htp:
|
if self.args.log_htp:
|
||||||
self.log(self.name, "workers -= {} = {}".format(n, self.tp_nthr), 6)
|
self.log(self.name, "workers -= {} = {}".format(n, self.tp_nthr), 6)
|
||||||
|
|
||||||
assert self.tp_q
|
assert self.tp_q # !rm
|
||||||
for _ in range(n):
|
for _ in range(n):
|
||||||
self.tp_q.put(None)
|
self.tp_q.put(None)
|
||||||
|
|
||||||
@@ -238,15 +267,24 @@ class HttpSrv(object):
|
|||||||
return
|
return
|
||||||
|
|
||||||
def listen(self, sck: socket.socket, nlisteners: int) -> None:
|
def listen(self, sck: socket.socket, nlisteners: int) -> None:
|
||||||
|
tcp = sck.family != socket.AF_UNIX
|
||||||
|
|
||||||
if self.args.j != 1:
|
if self.args.j != 1:
|
||||||
# lost in the pickle; redefine
|
# lost in the pickle; redefine
|
||||||
if not ANYWIN or self.args.reuseaddr:
|
if not ANYWIN or self.args.reuseaddr:
|
||||||
sck.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
sck.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||||
|
|
||||||
sck.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
if tcp:
|
||||||
|
sck.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||||
|
|
||||||
sck.settimeout(None) # < does not inherit, ^ opts above do
|
sck.settimeout(None) # < does not inherit, ^ opts above do
|
||||||
|
|
||||||
ip, port = sck.getsockname()[:2]
|
if tcp:
|
||||||
|
ip, port = sck.getsockname()[:2]
|
||||||
|
else:
|
||||||
|
ip = re.sub(r"\.[0-9]+$", "", sck.getsockname().split("/")[-1])
|
||||||
|
port = 0
|
||||||
|
|
||||||
self.srvs.append(sck)
|
self.srvs.append(sck)
|
||||||
self.bound.add((ip, port))
|
self.bound.add((ip, port))
|
||||||
self.nclimax = math.ceil(self.args.nc * 1.0 / nlisteners)
|
self.nclimax = math.ceil(self.args.nc * 1.0 / nlisteners)
|
||||||
@@ -258,16 +296,22 @@ class HttpSrv(object):
|
|||||||
|
|
||||||
def thr_listen(self, srv_sck: socket.socket) -> None:
|
def thr_listen(self, srv_sck: socket.socket) -> None:
|
||||||
"""listens on a shared tcp server"""
|
"""listens on a shared tcp server"""
|
||||||
ip, port = srv_sck.getsockname()[:2]
|
|
||||||
fno = srv_sck.fileno()
|
fno = srv_sck.fileno()
|
||||||
hip = "[{}]".format(ip) if ":" in ip else ip
|
if srv_sck.family == socket.AF_UNIX:
|
||||||
msg = "subscribed @ {}:{} f{} p{}".format(hip, port, fno, os.getpid())
|
ip = re.sub(r"\.[0-9]+$", "", srv_sck.getsockname())
|
||||||
|
msg = "subscribed @ %s f%d p%d" % (ip, fno, os.getpid())
|
||||||
|
ip = ip.split("/")[-1]
|
||||||
|
port = 0
|
||||||
|
tcp = False
|
||||||
|
else:
|
||||||
|
tcp = True
|
||||||
|
ip, port = srv_sck.getsockname()[:2]
|
||||||
|
hip = "[%s]" % (ip,) if ":" in ip else ip
|
||||||
|
msg = "subscribed @ %s:%d f%d p%d" % (hip, port, fno, os.getpid())
|
||||||
|
|
||||||
self.log(self.name, msg)
|
self.log(self.name, msg)
|
||||||
|
|
||||||
def fun() -> None:
|
Daemon(self.broker.say, "sig-hsrv-up1", ("cb_httpsrv_up",))
|
||||||
self.broker.say("cb_httpsrv_up")
|
|
||||||
|
|
||||||
threading.Thread(target=fun, name="sig-hsrv-up1").start()
|
|
||||||
|
|
||||||
while not self.stopping:
|
while not self.stopping:
|
||||||
if self.args.log_conn:
|
if self.args.log_conn:
|
||||||
@@ -336,11 +380,13 @@ class HttpSrv(object):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
sck, saddr = srv_sck.accept()
|
sck, saddr = srv_sck.accept()
|
||||||
cip, cport = saddr[:2]
|
if tcp:
|
||||||
if cip.startswith("::ffff:"):
|
cip = unicode(saddr[0])
|
||||||
cip = cip[7:]
|
if cip.startswith("::ffff:"):
|
||||||
|
cip = cip[7:]
|
||||||
addr = (cip, cport)
|
addr = (cip, saddr[1])
|
||||||
|
else:
|
||||||
|
addr = ("127.8.3.7", sck.fileno())
|
||||||
except (OSError, socket.error) as ex:
|
except (OSError, socket.error) as ex:
|
||||||
if self.stopping:
|
if self.stopping:
|
||||||
break
|
break
|
||||||
@@ -396,7 +442,7 @@ class HttpSrv(object):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def thr_poolw(self) -> None:
|
def thr_poolw(self) -> None:
|
||||||
assert self.tp_q
|
assert self.tp_q # !rm
|
||||||
while True:
|
while True:
|
||||||
task = self.tp_q.get()
|
task = self.tp_q.get()
|
||||||
if not task:
|
if not task:
|
||||||
@@ -508,8 +554,8 @@ class HttpSrv(object):
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
v = base64.urlsafe_b64encode(spack(b">xxL", int(v)))
|
# spack gives 4 lsb, take 3 lsb, get 4 ch
|
||||||
self.cb_v = v.decode("ascii")[-4:]
|
self.cb_v = ub64enc(spack(b">L", int(v))[1:]).decode("ascii")
|
||||||
self.cb_ts = time.time()
|
self.cb_ts = time.time()
|
||||||
return self.cb_v
|
return self.cb_v
|
||||||
|
|
||||||
@@ -540,3 +586,32 @@ class HttpSrv(object):
|
|||||||
ident += "a"
|
ident += "a"
|
||||||
|
|
||||||
self.u2idx_free[ident] = u2idx
|
self.u2idx_free[ident] = u2idx
|
||||||
|
|
||||||
|
def read_dls(
|
||||||
|
self,
|
||||||
|
) -> tuple[
|
||||||
|
dict[str, tuple[float, int, str, str, str]], dict[str, tuple[float, int]]
|
||||||
|
]:
|
||||||
|
"""
|
||||||
|
mp-broker asking for local dl-info + dl-state;
|
||||||
|
reduce overhead by sending just the vfs vpath
|
||||||
|
"""
|
||||||
|
dli = {k: (a, b, c.vpath, d, e) for k, (a, b, c, d, e) in self.dli.items()}
|
||||||
|
return (dli, self.dls)
|
||||||
|
|
||||||
|
def write_dls(
|
||||||
|
self,
|
||||||
|
sdli: dict[str, tuple[float, int, str, str, str]],
|
||||||
|
dls: dict[str, tuple[float, int]],
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
mp-broker pushing total dl-info + dl-state;
|
||||||
|
swap out the vfs vpath with the vfs node
|
||||||
|
"""
|
||||||
|
dli: dict[str, tuple[float, int, "VFS", str, str]] = {}
|
||||||
|
for k, (a, b, c, d, e) in sdli.items():
|
||||||
|
vn = self.asrv.vfs.all_nodes[c]
|
||||||
|
dli[k] = (a, b, vn, d, e)
|
||||||
|
|
||||||
|
self.tdli = dli
|
||||||
|
self.tdls = dls
|
||||||
|
|||||||
@@ -74,7 +74,7 @@ class Ico(object):
|
|||||||
try:
|
try:
|
||||||
_, _, tw, th = pb.textbbox((0, 0), ext)
|
_, _, tw, th = pb.textbbox((0, 0), ext)
|
||||||
except:
|
except:
|
||||||
tw, th = pb.textsize(ext)
|
tw, th = pb.textsize(ext) # type: ignore
|
||||||
|
|
||||||
tw += len(ext)
|
tw += len(ext)
|
||||||
cw = tw // len(ext)
|
cw = tw // len(ext)
|
||||||
|
|||||||
@@ -25,6 +25,7 @@ from .stolen.dnslib import (
|
|||||||
DNSHeader,
|
DNSHeader,
|
||||||
DNSQuestion,
|
DNSQuestion,
|
||||||
DNSRecord,
|
DNSRecord,
|
||||||
|
set_avahi_379,
|
||||||
)
|
)
|
||||||
from .util import CachedSet, Daemon, Netdev, list_ips, min_ex
|
from .util import CachedSet, Daemon, Netdev, list_ips, min_ex
|
||||||
|
|
||||||
@@ -72,6 +73,9 @@ class MDNS(MCast):
|
|||||||
self.ngen = ngen
|
self.ngen = ngen
|
||||||
self.ttl = 300
|
self.ttl = 300
|
||||||
|
|
||||||
|
if not self.args.zm_nwa_1:
|
||||||
|
set_avahi_379()
|
||||||
|
|
||||||
zs = self.args.name + ".local."
|
zs = self.args.name + ".local."
|
||||||
zs = zs.encode("ascii", "replace").decode("ascii", "replace")
|
zs = zs.encode("ascii", "replace").decode("ascii", "replace")
|
||||||
self.hn = "-".join(x for x in zs.split("?") if x) or (
|
self.hn = "-".join(x for x in zs.split("?") if x) or (
|
||||||
@@ -292,6 +296,22 @@ class MDNS(MCast):
|
|||||||
def run2(self) -> None:
|
def run2(self) -> None:
|
||||||
last_hop = time.time()
|
last_hop = time.time()
|
||||||
ihop = self.args.mc_hop
|
ihop = self.args.mc_hop
|
||||||
|
|
||||||
|
try:
|
||||||
|
if self.args.no_poll:
|
||||||
|
raise Exception()
|
||||||
|
fd2sck = {}
|
||||||
|
srvpoll = select.poll()
|
||||||
|
for sck in self.srv:
|
||||||
|
fd = sck.fileno()
|
||||||
|
fd2sck[fd] = sck
|
||||||
|
srvpoll.register(fd, select.POLLIN)
|
||||||
|
except Exception as ex:
|
||||||
|
srvpoll = None
|
||||||
|
if not self.args.no_poll:
|
||||||
|
t = "WARNING: failed to poll(), will use select() instead: %r"
|
||||||
|
self.log(t % (ex,), 3)
|
||||||
|
|
||||||
while self.running:
|
while self.running:
|
||||||
timeout = (
|
timeout = (
|
||||||
0.02 + random.random() * 0.07
|
0.02 + random.random() * 0.07
|
||||||
@@ -300,8 +320,13 @@ class MDNS(MCast):
|
|||||||
if self.unsolicited
|
if self.unsolicited
|
||||||
else (last_hop + ihop if ihop else 180)
|
else (last_hop + ihop if ihop else 180)
|
||||||
)
|
)
|
||||||
rdy = select.select(self.srv, [], [], timeout)
|
if srvpoll:
|
||||||
rx: list[socket.socket] = rdy[0] # type: ignore
|
pr = srvpoll.poll(timeout * 1000)
|
||||||
|
rx = [fd2sck[x[0]] for x in pr if x[1] & select.POLLIN]
|
||||||
|
else:
|
||||||
|
rdy = select.select(self.srv, [], [], timeout)
|
||||||
|
rx: list[socket.socket] = rdy[0] # type: ignore
|
||||||
|
|
||||||
self.rx4.cln()
|
self.rx4.cln()
|
||||||
self.rx6.cln()
|
self.rx6.cln()
|
||||||
buf = b""
|
buf = b""
|
||||||
@@ -315,6 +340,9 @@ class MDNS(MCast):
|
|||||||
self.log("stopped", 2)
|
self.log("stopped", 2)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
if self.args.zm_no_pe:
|
||||||
|
continue
|
||||||
|
|
||||||
t = "{} {} \033[33m|{}| {}\n{}".format(
|
t = "{} {} \033[33m|{}| {}\n{}".format(
|
||||||
self.srv[sck].name, addr, len(buf), repr(buf)[2:-1], min_ex()
|
self.srv[sck].name, addr, len(buf), repr(buf)[2:-1], min_ex()
|
||||||
)
|
)
|
||||||
@@ -340,7 +368,7 @@ class MDNS(MCast):
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
self.srv = {}
|
self.srv.clear()
|
||||||
|
|
||||||
def eat(self, buf: bytes, addr: tuple[str, int], sck: socket.socket) -> None:
|
def eat(self, buf: bytes, addr: tuple[str, int], sck: socket.socket) -> None:
|
||||||
cip = addr[0]
|
cip = addr[0]
|
||||||
|
|||||||
@@ -72,6 +72,9 @@ class Metrics(object):
|
|||||||
v = "{:.3f}".format(self.hsrv.t0)
|
v = "{:.3f}".format(self.hsrv.t0)
|
||||||
addug("cpp_boot_unixtime", "seconds", v, t)
|
addug("cpp_boot_unixtime", "seconds", v, t)
|
||||||
|
|
||||||
|
t = "number of active downloads"
|
||||||
|
addg("cpp_active_dl", str(len(self.hsrv.tdls)), t)
|
||||||
|
|
||||||
t = "number of open http(s) client connections"
|
t = "number of open http(s) client connections"
|
||||||
addg("cpp_http_conns", str(self.hsrv.ncli), t)
|
addg("cpp_http_conns", str(self.hsrv.ncli), t)
|
||||||
|
|
||||||
@@ -88,7 +91,7 @@ class Metrics(object):
|
|||||||
addg("cpp_total_bans", str(self.hsrv.nban), t)
|
addg("cpp_total_bans", str(self.hsrv.nban), t)
|
||||||
|
|
||||||
if not args.nos_vst:
|
if not args.nos_vst:
|
||||||
x = self.hsrv.broker.ask("up2k.get_state")
|
x = self.hsrv.broker.ask("up2k.get_state", True, "")
|
||||||
vs = json.loads(x.get())
|
vs = json.loads(x.get())
|
||||||
|
|
||||||
nvidle = 0
|
nvidle = 0
|
||||||
@@ -128,7 +131,7 @@ class Metrics(object):
|
|||||||
addbh("cpp_disk_size_bytes", "total HDD size of volume")
|
addbh("cpp_disk_size_bytes", "total HDD size of volume")
|
||||||
addbh("cpp_disk_free_bytes", "free HDD space in volume")
|
addbh("cpp_disk_free_bytes", "free HDD space in volume")
|
||||||
for vpath, vol in allvols:
|
for vpath, vol in allvols:
|
||||||
free, total = get_df(vol.realpath)
|
free, total, _ = get_df(vol.realpath, False)
|
||||||
if free is None or total is None:
|
if free is None or total is None:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -179,7 +182,7 @@ class Metrics(object):
|
|||||||
tnbytes = 0
|
tnbytes = 0
|
||||||
tnfiles = 0
|
tnfiles = 0
|
||||||
for vpath, vol in allvols:
|
for vpath, vol in allvols:
|
||||||
cur = idx.get_cur(vol.realpath)
|
cur = idx.get_cur(vol)
|
||||||
if not cur:
|
if not cur:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|||||||
@@ -4,15 +4,19 @@ from __future__ import print_function, unicode_literals
|
|||||||
import argparse
|
import argparse
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import subprocess as sp
|
import subprocess as sp
|
||||||
import sys
|
import sys
|
||||||
|
import tempfile
|
||||||
|
|
||||||
from .__init__ import ANYWIN, EXE, PY2, WINDOWS, E, unicode
|
from .__init__ import ANYWIN, EXE, PY2, WINDOWS, E, unicode
|
||||||
|
from .authsrv import VFS
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .util import (
|
from .util import (
|
||||||
FFMPEG_URL,
|
FFMPEG_URL,
|
||||||
REKOBO_LKEY,
|
REKOBO_LKEY,
|
||||||
|
VF_CAREFUL,
|
||||||
fsenc,
|
fsenc,
|
||||||
min_ex,
|
min_ex,
|
||||||
pybin,
|
pybin,
|
||||||
@@ -20,12 +24,24 @@ from .util import (
|
|||||||
runcmd,
|
runcmd,
|
||||||
sfsenc,
|
sfsenc,
|
||||||
uncyg,
|
uncyg,
|
||||||
|
wunlink,
|
||||||
)
|
)
|
||||||
|
|
||||||
if True: # pylint: disable=using-constant-test
|
if True: # pylint: disable=using-constant-test
|
||||||
from typing import Any, Union
|
from typing import Any, Optional, Union
|
||||||
|
|
||||||
from .util import RootLogger
|
from .util import NamedLogger, RootLogger
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
if os.environ.get("PRTY_NO_MUTAGEN"):
|
||||||
|
raise Exception()
|
||||||
|
|
||||||
|
from mutagen import version # noqa: F401
|
||||||
|
|
||||||
|
HAVE_MUTAGEN = True
|
||||||
|
except:
|
||||||
|
HAVE_MUTAGEN = False
|
||||||
|
|
||||||
|
|
||||||
def have_ff(scmd: str) -> bool:
|
def have_ff(scmd: str) -> bool:
|
||||||
@@ -44,8 +60,11 @@ def have_ff(scmd: str) -> bool:
|
|||||||
return bool(shutil.which(scmd))
|
return bool(shutil.which(scmd))
|
||||||
|
|
||||||
|
|
||||||
HAVE_FFMPEG = have_ff("ffmpeg")
|
HAVE_FFMPEG = not os.environ.get("PRTY_NO_FFMPEG") and have_ff("ffmpeg")
|
||||||
HAVE_FFPROBE = have_ff("ffprobe")
|
HAVE_FFPROBE = not os.environ.get("PRTY_NO_FFPROBE") and have_ff("ffprobe")
|
||||||
|
|
||||||
|
CBZ_PICS = set("png jpg jpeg gif bmp tga tif tiff webp avif".split())
|
||||||
|
CBZ_01 = re.compile(r"(^|[^0-9v])0+[01]\b")
|
||||||
|
|
||||||
|
|
||||||
class MParser(object):
|
class MParser(object):
|
||||||
@@ -107,6 +126,81 @@ class MParser(object):
|
|||||||
raise Exception()
|
raise Exception()
|
||||||
|
|
||||||
|
|
||||||
|
def au_unpk(
|
||||||
|
log: "NamedLogger", fmt_map: dict[str, str], abspath: str, vn: Optional[VFS] = None
|
||||||
|
) -> str:
|
||||||
|
ret = ""
|
||||||
|
maxsz = 1024 * 1024 * 64
|
||||||
|
try:
|
||||||
|
ext = abspath.split(".")[-1].lower()
|
||||||
|
au, pk = fmt_map[ext].split(".")
|
||||||
|
|
||||||
|
fd, ret = tempfile.mkstemp("." + au)
|
||||||
|
|
||||||
|
if pk == "gz":
|
||||||
|
import gzip
|
||||||
|
|
||||||
|
fi = gzip.GzipFile(abspath, mode="rb")
|
||||||
|
|
||||||
|
elif pk == "xz":
|
||||||
|
import lzma
|
||||||
|
|
||||||
|
fi = lzma.open(abspath, "rb")
|
||||||
|
|
||||||
|
elif pk == "zip":
|
||||||
|
import zipfile
|
||||||
|
|
||||||
|
zf = zipfile.ZipFile(abspath, "r")
|
||||||
|
zil = zf.infolist()
|
||||||
|
zil = [x for x in zil if x.filename.lower().split(".")[-1] == au]
|
||||||
|
if not zil:
|
||||||
|
raise Exception("no audio inside zip")
|
||||||
|
fi = zf.open(zil[0])
|
||||||
|
|
||||||
|
elif pk == "cbz":
|
||||||
|
import zipfile
|
||||||
|
|
||||||
|
zf = zipfile.ZipFile(abspath, "r")
|
||||||
|
znil = [(x.filename.lower(), x) for x in zf.infolist()]
|
||||||
|
nf = len(znil)
|
||||||
|
znil = [x for x in znil if x[0].split(".")[-1] in CBZ_PICS]
|
||||||
|
znil = [x for x in znil if "cover" in x[0]] or znil
|
||||||
|
znil = [x for x in znil if CBZ_01.search(x[0])] or znil
|
||||||
|
t = "cbz: %d files, %d hits" % (nf, len(znil))
|
||||||
|
if znil:
|
||||||
|
t += ", using " + znil[0][1].filename
|
||||||
|
log(t)
|
||||||
|
if not znil:
|
||||||
|
raise Exception("no images inside cbz")
|
||||||
|
fi = zf.open(znil[0][1])
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise Exception("unknown compression %s" % (pk,))
|
||||||
|
|
||||||
|
fsz = 0
|
||||||
|
with os.fdopen(fd, "wb") as fo:
|
||||||
|
while True:
|
||||||
|
buf = fi.read(32768)
|
||||||
|
if not buf:
|
||||||
|
break
|
||||||
|
|
||||||
|
fsz += len(buf)
|
||||||
|
if fsz > maxsz:
|
||||||
|
raise Exception("zipbomb defused")
|
||||||
|
|
||||||
|
fo.write(buf)
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
except Exception as ex:
|
||||||
|
if ret:
|
||||||
|
t = "failed to decompress audio file %r: %r"
|
||||||
|
log(t % (abspath, ex))
|
||||||
|
wunlink(log, ret, vn.flags if vn else VF_CAREFUL)
|
||||||
|
|
||||||
|
return abspath
|
||||||
|
|
||||||
|
|
||||||
def ffprobe(
|
def ffprobe(
|
||||||
abspath: str, timeout: int = 60
|
abspath: str, timeout: int = 60
|
||||||
) -> tuple[dict[str, tuple[int, Any]], dict[str, list[Any]]]:
|
) -> tuple[dict[str, tuple[int, Any]], dict[str, list[Any]]]:
|
||||||
@@ -281,16 +375,14 @@ class MTag(object):
|
|||||||
or_ffprobe = " or FFprobe"
|
or_ffprobe = " or FFprobe"
|
||||||
|
|
||||||
if self.backend == "mutagen":
|
if self.backend == "mutagen":
|
||||||
self.get = self.get_mutagen
|
self._get = self.get_mutagen
|
||||||
try:
|
if not HAVE_MUTAGEN:
|
||||||
from mutagen import version # noqa: F401
|
|
||||||
except:
|
|
||||||
self.log("could not load Mutagen, trying FFprobe instead", c=3)
|
self.log("could not load Mutagen, trying FFprobe instead", c=3)
|
||||||
self.backend = "ffprobe"
|
self.backend = "ffprobe"
|
||||||
|
|
||||||
if self.backend == "ffprobe":
|
if self.backend == "ffprobe":
|
||||||
self.usable = self.can_ffprobe
|
self.usable = self.can_ffprobe
|
||||||
self.get = self.get_ffprobe
|
self._get = self.get_ffprobe
|
||||||
self.prefer_mt = True
|
self.prefer_mt = True
|
||||||
|
|
||||||
if not HAVE_FFPROBE:
|
if not HAVE_FFPROBE:
|
||||||
@@ -410,7 +502,7 @@ class MTag(object):
|
|||||||
sv = str(zv).split("/")[0].strip().lstrip("0")
|
sv = str(zv).split("/")[0].strip().lstrip("0")
|
||||||
ret[sk] = sv or 0
|
ret[sk] = sv or 0
|
||||||
|
|
||||||
# normalize key notation to rkeobo
|
# normalize key notation to rekobo
|
||||||
okey = ret.get("key")
|
okey = ret.get("key")
|
||||||
if okey:
|
if okey:
|
||||||
key = str(okey).replace(" ", "").replace("maj", "").replace("min", "m")
|
key = str(okey).replace(" ", "").replace("maj", "").replace("min", "m")
|
||||||
@@ -460,6 +552,17 @@ class MTag(object):
|
|||||||
|
|
||||||
return r1
|
return r1
|
||||||
|
|
||||||
|
def get(self, abspath: str) -> dict[str, Union[str, float]]:
|
||||||
|
ext = abspath.split(".")[-1].lower()
|
||||||
|
if ext not in self.args.au_unpk:
|
||||||
|
return self._get(abspath)
|
||||||
|
|
||||||
|
ap = au_unpk(self.log, self.args.au_unpk, abspath)
|
||||||
|
ret = self._get(ap)
|
||||||
|
if ap != abspath:
|
||||||
|
wunlink(self.log, ap, VF_CAREFUL)
|
||||||
|
return ret
|
||||||
|
|
||||||
def get_mutagen(self, abspath: str) -> dict[str, Union[str, float]]:
|
def get_mutagen(self, abspath: str) -> dict[str, Union[str, float]]:
|
||||||
ret: dict[str, tuple[int, Any]] = {}
|
ret: dict[str, tuple[int, Any]] = {}
|
||||||
|
|
||||||
@@ -479,7 +582,7 @@ class MTag(object):
|
|||||||
raise Exception()
|
raise Exception()
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
if self.args.mtag_v:
|
if self.args.mtag_v:
|
||||||
self.log("mutagen-err [{}] @ [{}]".format(ex, abspath), "90")
|
self.log("mutagen-err [%s] @ %r" % (ex, abspath), "90")
|
||||||
|
|
||||||
return self.get_ffprobe(abspath) if self.can_ffprobe else {}
|
return self.get_ffprobe(abspath) if self.can_ffprobe else {}
|
||||||
|
|
||||||
@@ -513,7 +616,7 @@ class MTag(object):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
if k == ".aq":
|
if k == ".aq":
|
||||||
v /= 1000
|
v /= 1000 # type: ignore
|
||||||
|
|
||||||
if k == "ac" and v.startswith("mp4a.40."):
|
if k == "ac" and v.startswith("mp4a.40."):
|
||||||
v = "aac"
|
v = "aac"
|
||||||
@@ -551,13 +654,18 @@ class MTag(object):
|
|||||||
pypath = str(os.pathsep.join(zsl))
|
pypath = str(os.pathsep.join(zsl))
|
||||||
env["PYTHONPATH"] = pypath
|
env["PYTHONPATH"] = pypath
|
||||||
except:
|
except:
|
||||||
if not E.ox and not EXE:
|
raise # might be expected outside cpython
|
||||||
raise
|
|
||||||
|
ext = abspath.split(".")[-1].lower()
|
||||||
|
if ext in self.args.au_unpk:
|
||||||
|
ap = au_unpk(self.log, self.args.au_unpk, abspath)
|
||||||
|
else:
|
||||||
|
ap = abspath
|
||||||
|
|
||||||
ret: dict[str, Any] = {}
|
ret: dict[str, Any] = {}
|
||||||
for tagname, parser in sorted(parsers.items(), key=lambda x: (x[1].pri, x[0])):
|
for tagname, parser in sorted(parsers.items(), key=lambda x: (x[1].pri, x[0])):
|
||||||
try:
|
try:
|
||||||
cmd = [parser.bin, abspath]
|
cmd = [parser.bin, ap]
|
||||||
if parser.bin.endswith(".py"):
|
if parser.bin.endswith(".py"):
|
||||||
cmd = [pybin] + cmd
|
cmd = [pybin] + cmd
|
||||||
|
|
||||||
@@ -591,7 +699,10 @@ class MTag(object):
|
|||||||
ret[tag] = zj[tag]
|
ret[tag] = zj[tag]
|
||||||
except:
|
except:
|
||||||
if self.args.mtag_v:
|
if self.args.mtag_v:
|
||||||
t = "mtag error: tagname {}, parser {}, file {} => {}"
|
t = "mtag error: tagname %r, parser %r, file %r => %r"
|
||||||
self.log(t.format(tagname, parser.bin, abspath, min_ex()))
|
self.log(t % (tagname, parser.bin, abspath, min_ex()), 6)
|
||||||
|
|
||||||
|
if ap != abspath:
|
||||||
|
wunlink(self.log, ap, VF_CAREFUL)
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|||||||
@@ -206,6 +206,7 @@ class MCast(object):
|
|||||||
except:
|
except:
|
||||||
t = "announce failed on {} [{}]:\n{}"
|
t = "announce failed on {} [{}]:\n{}"
|
||||||
self.log(t.format(netdev, ip, min_ex()), 3)
|
self.log(t.format(netdev, ip, min_ex()), 3)
|
||||||
|
sck.close()
|
||||||
|
|
||||||
if self.args.zm_msub:
|
if self.args.zm_msub:
|
||||||
for s1 in self.srv.values():
|
for s1 in self.srv.values():
|
||||||
|
|||||||
@@ -4,27 +4,33 @@ from __future__ import print_function, unicode_literals
|
|||||||
import argparse
|
import argparse
|
||||||
import base64
|
import base64
|
||||||
import hashlib
|
import hashlib
|
||||||
|
import os
|
||||||
import sys
|
import sys
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
from .__init__ import unicode
|
from .__init__ import unicode
|
||||||
|
|
||||||
|
try:
|
||||||
|
if os.environ.get("PRTY_NO_ARGON2"):
|
||||||
|
raise Exception()
|
||||||
|
|
||||||
|
HAVE_ARGON2 = True
|
||||||
|
from argon2 import __version__ as argon2ver
|
||||||
|
except:
|
||||||
|
HAVE_ARGON2 = False
|
||||||
|
|
||||||
|
|
||||||
class PWHash(object):
|
class PWHash(object):
|
||||||
def __init__(self, args: argparse.Namespace):
|
def __init__(self, args: argparse.Namespace):
|
||||||
self.args = args
|
self.args = args
|
||||||
|
|
||||||
try:
|
zsl = args.ah_alg.split(",")
|
||||||
alg, ac = args.ah_alg.split(",")
|
alg = zsl[0]
|
||||||
except:
|
|
||||||
alg = args.ah_alg
|
|
||||||
ac = {}
|
|
||||||
|
|
||||||
if alg == "none":
|
if alg == "none":
|
||||||
alg = ""
|
alg = ""
|
||||||
|
|
||||||
self.alg = alg
|
self.alg = alg
|
||||||
self.ac = ac
|
self.ac = zsl[1:]
|
||||||
if not alg:
|
if not alg:
|
||||||
self.on = False
|
self.on = False
|
||||||
self.hash = unicode
|
self.hash = unicode
|
||||||
@@ -80,17 +86,23 @@ class PWHash(object):
|
|||||||
its = 2
|
its = 2
|
||||||
blksz = 8
|
blksz = 8
|
||||||
para = 4
|
para = 4
|
||||||
|
ramcap = 0 # openssl 1.1 = 32 MiB
|
||||||
try:
|
try:
|
||||||
cost = 2 << int(self.ac[0])
|
cost = 2 << int(self.ac[0])
|
||||||
its = int(self.ac[1])
|
its = int(self.ac[1])
|
||||||
blksz = int(self.ac[2])
|
blksz = int(self.ac[2])
|
||||||
para = int(self.ac[3])
|
para = int(self.ac[3])
|
||||||
|
ramcap = int(self.ac[4]) * 1024 * 1024
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
cfg = {"salt": self.salt, "n": cost, "r": blksz, "p": para, "dklen": 24}
|
||||||
|
if ramcap:
|
||||||
|
cfg["maxmem"] = ramcap
|
||||||
|
|
||||||
ret = plain.encode("utf-8")
|
ret = plain.encode("utf-8")
|
||||||
for _ in range(its):
|
for _ in range(its):
|
||||||
ret = hashlib.scrypt(ret, salt=self.salt, n=cost, r=blksz, p=para, dklen=24)
|
ret = hashlib.scrypt(ret, **cfg)
|
||||||
|
|
||||||
return "+" + base64.urlsafe_b64encode(ret).decode("utf-8")
|
return "+" + base64.urlsafe_b64encode(ret).decode("utf-8")
|
||||||
|
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ from types import SimpleNamespace
|
|||||||
from .__init__ import ANYWIN, EXE, TYPE_CHECKING
|
from .__init__ import ANYWIN, EXE, TYPE_CHECKING
|
||||||
from .authsrv import LEELOO_DALLAS, VFS
|
from .authsrv import LEELOO_DALLAS, VFS
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .util import Daemon, min_ex, pybin, runhook
|
from .util import Daemon, absreal, min_ex, pybin, runhook, vjoin
|
||||||
|
|
||||||
if True: # pylint: disable=using-constant-test
|
if True: # pylint: disable=using-constant-test
|
||||||
from typing import Any, Union
|
from typing import Any, Union
|
||||||
@@ -127,7 +127,7 @@ class SMB(object):
|
|||||||
self.log("smb", msg, c)
|
self.log("smb", msg, c)
|
||||||
|
|
||||||
def start(self) -> None:
|
def start(self) -> None:
|
||||||
Daemon(self.srv.start)
|
Daemon(self.srv.start, "smbd")
|
||||||
|
|
||||||
def _auth_cb(self, *a, **ka):
|
def _auth_cb(self, *a, **ka):
|
||||||
debug("auth-result: %s %s", a, ka)
|
debug("auth-result: %s %s", a, ka)
|
||||||
@@ -151,6 +151,8 @@ class SMB(object):
|
|||||||
def _uname(self) -> str:
|
def _uname(self) -> str:
|
||||||
if self.noacc:
|
if self.noacc:
|
||||||
return LEELOO_DALLAS
|
return LEELOO_DALLAS
|
||||||
|
if not self.asrv.acct:
|
||||||
|
return "*"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# you found it! my single worst bit of code so far
|
# you found it! my single worst bit of code so far
|
||||||
@@ -187,7 +189,9 @@ class SMB(object):
|
|||||||
|
|
||||||
debug('%s("%s", %s) %s @%s\033[K\033[0m', caller, vpath, str(a), perms, uname)
|
debug('%s("%s", %s) %s @%s\033[K\033[0m', caller, vpath, str(a), perms, uname)
|
||||||
vfs, rem = self.asrv.vfs.get(vpath, uname, *perms)
|
vfs, rem = self.asrv.vfs.get(vpath, uname, *perms)
|
||||||
return vfs, vfs.canonical(rem)
|
if not vfs.realpath:
|
||||||
|
raise Exception("unmapped vfs")
|
||||||
|
return vfs, vjoin(vfs.realpath, rem)
|
||||||
|
|
||||||
def _listdir(self, vpath: str, *a: Any, **ka: Any) -> list[str]:
|
def _listdir(self, vpath: str, *a: Any, **ka: Any) -> list[str]:
|
||||||
vpath = vpath.replace("\\", "/").lstrip("/")
|
vpath = vpath.replace("\\", "/").lstrip("/")
|
||||||
@@ -195,6 +199,8 @@ class SMB(object):
|
|||||||
uname = self._uname()
|
uname = self._uname()
|
||||||
# debug('listdir("%s", %s) @%s\033[K\033[0m', vpath, str(a), uname)
|
# debug('listdir("%s", %s) @%s\033[K\033[0m', vpath, str(a), uname)
|
||||||
vfs, rem = self.asrv.vfs.get(vpath, uname, False, False)
|
vfs, rem = self.asrv.vfs.get(vpath, uname, False, False)
|
||||||
|
if not vfs.realpath:
|
||||||
|
raise Exception("unmapped vfs")
|
||||||
_, vfs_ls, vfs_virt = vfs.ls(
|
_, vfs_ls, vfs_virt = vfs.ls(
|
||||||
rem, uname, not self.args.no_scandir, [[False, False]]
|
rem, uname, not self.args.no_scandir, [[False, False]]
|
||||||
)
|
)
|
||||||
@@ -209,7 +215,7 @@ class SMB(object):
|
|||||||
sz = 112 * 2 # ['.', '..']
|
sz = 112 * 2 # ['.', '..']
|
||||||
for n, fn in enumerate(ls):
|
for n, fn in enumerate(ls):
|
||||||
if sz >= 64000:
|
if sz >= 64000:
|
||||||
t = "listing only %d of %d files (%d byte) in /%s; see impacket#1433"
|
t = "listing only %d of %d files (%d byte) in /%s for performance; see --smb-nwa-1"
|
||||||
warning(t, n, len(ls), sz, vpath)
|
warning(t, n, len(ls), sz, vpath)
|
||||||
break
|
break
|
||||||
|
|
||||||
@@ -238,11 +244,26 @@ class SMB(object):
|
|||||||
t = "blocked write (no-write-acc %s): /%s @%s"
|
t = "blocked write (no-write-acc %s): /%s @%s"
|
||||||
yeet(t % (vfs.axs.uwrite, vpath, uname))
|
yeet(t % (vfs.axs.uwrite, vpath, uname))
|
||||||
|
|
||||||
|
ap = absreal(ap)
|
||||||
xbu = vfs.flags.get("xbu")
|
xbu = vfs.flags.get("xbu")
|
||||||
if xbu and not runhook(
|
if xbu and not runhook(
|
||||||
self.nlog, xbu, ap, vpath, "", "", 0, 0, "1.7.6.2", 0, ""
|
self.nlog,
|
||||||
|
None,
|
||||||
|
self.hub.up2k,
|
||||||
|
"xbu.smb",
|
||||||
|
xbu,
|
||||||
|
ap,
|
||||||
|
vpath,
|
||||||
|
"",
|
||||||
|
"",
|
||||||
|
"",
|
||||||
|
0,
|
||||||
|
0,
|
||||||
|
"1.7.6.2",
|
||||||
|
time.time(),
|
||||||
|
"",
|
||||||
):
|
):
|
||||||
yeet("blocked by xbu server config: " + vpath)
|
yeet("blocked by xbu server config: %r" % (vpath,))
|
||||||
|
|
||||||
ret = bos.open(ap, flags, *a, mode=chmod, **ka)
|
ret = bos.open(ap, flags, *a, mode=chmod, **ka)
|
||||||
if wr:
|
if wr:
|
||||||
@@ -297,7 +318,7 @@ class SMB(object):
|
|||||||
t = "blocked rename (no-move-acc %s): /%s @%s"
|
t = "blocked rename (no-move-acc %s): /%s @%s"
|
||||||
yeet(t % (vfs1.axs.umove, vp1, uname))
|
yeet(t % (vfs1.axs.umove, vp1, uname))
|
||||||
|
|
||||||
self.hub.up2k.handle_mv(uname, vp1, vp2)
|
self.hub.up2k.handle_mv(uname, "1.7.6.2", vp1, vp2)
|
||||||
try:
|
try:
|
||||||
bos.makedirs(ap2)
|
bos.makedirs(ap2)
|
||||||
except:
|
except:
|
||||||
|
|||||||
@@ -5,11 +5,11 @@ import errno
|
|||||||
import re
|
import re
|
||||||
import select
|
import select
|
||||||
import socket
|
import socket
|
||||||
from email.utils import formatdate
|
import time
|
||||||
|
|
||||||
from .__init__ import TYPE_CHECKING
|
from .__init__ import TYPE_CHECKING
|
||||||
from .multicast import MC_Sck, MCast
|
from .multicast import MC_Sck, MCast
|
||||||
from .util import CachedSet, html_escape, min_ex
|
from .util import CachedSet, formatdate, html_escape, min_ex
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .broker_util import BrokerCli
|
from .broker_util import BrokerCli
|
||||||
@@ -84,7 +84,7 @@ class SSDPr(object):
|
|||||||
name = self.args.doctitle
|
name = self.args.doctitle
|
||||||
zs = zs.strip().format(c(ubase), c(url), c(name), c(self.args.zsid))
|
zs = zs.strip().format(c(ubase), c(url), c(name), c(self.args.zsid))
|
||||||
hc.reply(zs.encode("utf-8", "replace"))
|
hc.reply(zs.encode("utf-8", "replace"))
|
||||||
return False # close connectino
|
return False # close connection
|
||||||
|
|
||||||
|
|
||||||
class SSDPd(MCast):
|
class SSDPd(MCast):
|
||||||
@@ -141,9 +141,29 @@ class SSDPd(MCast):
|
|||||||
self.log("stopped", 2)
|
self.log("stopped", 2)
|
||||||
|
|
||||||
def run2(self) -> None:
|
def run2(self) -> None:
|
||||||
|
try:
|
||||||
|
if self.args.no_poll:
|
||||||
|
raise Exception()
|
||||||
|
fd2sck = {}
|
||||||
|
srvpoll = select.poll()
|
||||||
|
for sck in self.srv:
|
||||||
|
fd = sck.fileno()
|
||||||
|
fd2sck[fd] = sck
|
||||||
|
srvpoll.register(fd, select.POLLIN)
|
||||||
|
except Exception as ex:
|
||||||
|
srvpoll = None
|
||||||
|
if not self.args.no_poll:
|
||||||
|
t = "WARNING: failed to poll(), will use select() instead: %r"
|
||||||
|
self.log(t % (ex,), 3)
|
||||||
|
|
||||||
while self.running:
|
while self.running:
|
||||||
rdy = select.select(self.srv, [], [], self.args.z_chk or 180)
|
if srvpoll:
|
||||||
rx: list[socket.socket] = rdy[0] # type: ignore
|
pr = srvpoll.poll((self.args.z_chk or 180) * 1000)
|
||||||
|
rx = [fd2sck[x[0]] for x in pr if x[1] & select.POLLIN]
|
||||||
|
else:
|
||||||
|
rdy = select.select(self.srv, [], [], self.args.z_chk or 180)
|
||||||
|
rx: list[socket.socket] = rdy[0] # type: ignore
|
||||||
|
|
||||||
self.rxc.cln()
|
self.rxc.cln()
|
||||||
buf = b""
|
buf = b""
|
||||||
addr = ("0", 0)
|
addr = ("0", 0)
|
||||||
@@ -168,7 +188,7 @@ class SSDPd(MCast):
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
self.srv = {}
|
self.srv.clear()
|
||||||
|
|
||||||
def eat(self, buf: bytes, addr: tuple[str, int]) -> None:
|
def eat(self, buf: bytes, addr: tuple[str, int]) -> None:
|
||||||
cip = addr[0]
|
cip = addr[0]
|
||||||
@@ -209,7 +229,7 @@ CONFIGID.UPNP.ORG: 1
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
v4 = srv.ip.replace("::ffff:", "")
|
v4 = srv.ip.replace("::ffff:", "")
|
||||||
zs = zs.format(formatdate(usegmt=True), v4, srv.hport, self.args.zsid)
|
zs = zs.format(formatdate(), v4, srv.hport, self.args.zsid)
|
||||||
zb = zs[1:].replace("\n", "\r\n").encode("utf-8", "replace")
|
zb = zs[1:].replace("\n", "\r\n").encode("utf-8", "replace")
|
||||||
srv.sck.sendto(zb, addr[:2])
|
srv.sck.sendto(zb, addr[:2])
|
||||||
|
|
||||||
|
|||||||
@@ -1,13 +1,13 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
import argparse
|
|
||||||
import re
|
import re
|
||||||
import stat
|
import stat
|
||||||
import tarfile
|
import tarfile
|
||||||
|
|
||||||
from queue import Queue
|
from queue import Queue
|
||||||
|
|
||||||
|
from .authsrv import AuthSrv
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .sutil import StreamArc, errdesc
|
from .sutil import StreamArc, errdesc
|
||||||
from .util import Daemon, fsenc, min_ex
|
from .util import Daemon, fsenc, min_ex
|
||||||
@@ -45,12 +45,12 @@ class StreamTar(StreamArc):
|
|||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
log: "NamedLogger",
|
log: "NamedLogger",
|
||||||
args: argparse.Namespace,
|
asrv: AuthSrv,
|
||||||
fgen: Generator[dict[str, Any], None, None],
|
fgen: Generator[dict[str, Any], None, None],
|
||||||
cmp: str = "",
|
cmp: str = "",
|
||||||
**kwargs: Any
|
**kwargs: Any
|
||||||
):
|
):
|
||||||
super(StreamTar, self).__init__(log, args, fgen)
|
super(StreamTar, self).__init__(log, asrv, fgen)
|
||||||
|
|
||||||
self.ci = 0
|
self.ci = 0
|
||||||
self.co = 0
|
self.co = 0
|
||||||
@@ -148,7 +148,7 @@ class StreamTar(StreamArc):
|
|||||||
errors.append((f["vp"], ex))
|
errors.append((f["vp"], ex))
|
||||||
|
|
||||||
if errors:
|
if errors:
|
||||||
self.errf, txt = errdesc(errors)
|
self.errf, txt = errdesc(self.asrv.vfs, errors)
|
||||||
self.log("\n".join(([repr(self.errf)] + txt[1:])))
|
self.log("\n".join(([repr(self.errf)] + txt[1:])))
|
||||||
self.ser(self.errf)
|
self.ser(self.errf)
|
||||||
|
|
||||||
|
|||||||
@@ -8,10 +8,16 @@ from itertools import chain
|
|||||||
from .bimap import Bimap, BimapError
|
from .bimap import Bimap, BimapError
|
||||||
from .bit import get_bits, set_bits
|
from .bit import get_bits, set_bits
|
||||||
from .buffer import BufferError
|
from .buffer import BufferError
|
||||||
from .label import DNSBuffer, DNSLabel
|
from .label import DNSBuffer, DNSLabel, set_avahi_379
|
||||||
from .ranges import IP4, IP6, H, I, check_bytes
|
from .ranges import IP4, IP6, H, I, check_bytes
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
range = xrange
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class DNSError(Exception):
|
class DNSError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -420,7 +426,7 @@ class RR(object):
|
|||||||
if rdlength:
|
if rdlength:
|
||||||
rdata = RDMAP.get(QTYPE.get(rtype), RD).parse(buffer, rdlength)
|
rdata = RDMAP.get(QTYPE.get(rtype), RD).parse(buffer, rdlength)
|
||||||
else:
|
else:
|
||||||
rdata = ""
|
rdata = RD(b"a")
|
||||||
return cls(rname, rtype, rclass, ttl, rdata)
|
return cls(rname, rtype, rclass, ttl, rdata)
|
||||||
except (BufferError, BimapError) as e:
|
except (BufferError, BimapError) as e:
|
||||||
raise DNSError("Error unpacking RR [offset=%d]: %s" % (buffer.offset, e))
|
raise DNSError("Error unpacking RR [offset=%d]: %s" % (buffer.offset, e))
|
||||||
|
|||||||
@@ -11,6 +11,23 @@ LDH = set(range(33, 127))
|
|||||||
ESCAPE = re.compile(r"\\([0-9][0-9][0-9])")
|
ESCAPE = re.compile(r"\\([0-9][0-9][0-9])")
|
||||||
|
|
||||||
|
|
||||||
|
avahi_379 = 0
|
||||||
|
|
||||||
|
|
||||||
|
def set_avahi_379():
|
||||||
|
global avahi_379
|
||||||
|
avahi_379 = 1
|
||||||
|
|
||||||
|
|
||||||
|
def log_avahi_379(args):
|
||||||
|
global avahi_379
|
||||||
|
if avahi_379 == 2:
|
||||||
|
return
|
||||||
|
avahi_379 = 2
|
||||||
|
t = "Invalid pointer in DNSLabel [offset=%d,pointer=%d,length=%d];\n\033[35m NOTE: this is probably avahi-bug #379, packet corruption in Avahi's mDNS-reflection feature. Copyparty has a workaround and is OK, but other devices need either --zm4 or --zm6"
|
||||||
|
raise BufferError(t % args)
|
||||||
|
|
||||||
|
|
||||||
class DNSLabelError(Exception):
|
class DNSLabelError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -96,8 +113,11 @@ class DNSBuffer(Buffer):
|
|||||||
)
|
)
|
||||||
if pointer < self.offset:
|
if pointer < self.offset:
|
||||||
self.offset = pointer
|
self.offset = pointer
|
||||||
|
elif avahi_379:
|
||||||
|
log_avahi_379((self.offset, pointer, len(self.data)))
|
||||||
|
label.extend(b"a")
|
||||||
|
break
|
||||||
else:
|
else:
|
||||||
|
|
||||||
raise BufferError(
|
raise BufferError(
|
||||||
"Invalid pointer in DNSLabel [offset=%d,pointer=%d,length=%d]"
|
"Invalid pointer in DNSLabel [offset=%d,pointer=%d,length=%d]"
|
||||||
% (self.offset, pointer, len(self.data))
|
% (self.offset, pointer, len(self.data))
|
||||||
|
|||||||
@@ -11,7 +11,21 @@ import os
|
|||||||
|
|
||||||
from ._shared import IP, Adapter
|
from ._shared import IP, Adapter
|
||||||
|
|
||||||
if os.name == "nt":
|
|
||||||
|
def nope(include_unconfigured=False):
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
S390X = os.uname().machine == "s390x"
|
||||||
|
except:
|
||||||
|
S390X = False
|
||||||
|
|
||||||
|
|
||||||
|
if os.environ.get("PRTY_NO_IFADDR") or S390X:
|
||||||
|
# s390x deadlocks at libc.getifaddrs
|
||||||
|
get_adapters = nope
|
||||||
|
elif os.name == "nt":
|
||||||
from ._win32 import get_adapters
|
from ._win32 import get_adapters
|
||||||
elif os.name == "posix":
|
elif os.name == "posix":
|
||||||
from ._posix import get_adapters
|
from ._posix import get_adapters
|
||||||
|
|||||||
@@ -17,6 +17,7 @@ if not PY2:
|
|||||||
U: Callable[[str], str] = str
|
U: Callable[[str], str] = str
|
||||||
else:
|
else:
|
||||||
U = unicode # noqa: F821 # pylint: disable=undefined-variable,self-assigning-variable
|
U = unicode # noqa: F821 # pylint: disable=undefined-variable,self-assigning-variable
|
||||||
|
range = xrange # noqa: F821 # pylint: disable=undefined-variable,self-assigning-variable
|
||||||
|
|
||||||
|
|
||||||
class Adapter(object):
|
class Adapter(object):
|
||||||
|
|||||||
@@ -16,6 +16,11 @@ if True: # pylint: disable=using-constant-test
|
|||||||
|
|
||||||
from typing import Callable, List, Optional, Tuple, Union
|
from typing import Callable, List, Optional, Tuple, Union
|
||||||
|
|
||||||
|
try:
|
||||||
|
range = xrange
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
def num_char_count_bits(ver: int) -> int:
|
def num_char_count_bits(ver: int) -> int:
|
||||||
return 16 if (ver + 7) // 17 else 8
|
return 16 if (ver + 7) // 17 else 8
|
||||||
@@ -589,3 +594,20 @@ def _get_bit(x: int, i: int) -> bool:
|
|||||||
|
|
||||||
class DataTooLongError(ValueError):
|
class DataTooLongError(ValueError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def qr2svg(qr: QrCode, border: int) -> str:
|
||||||
|
parts: list[str] = []
|
||||||
|
for y in range(qr.size):
|
||||||
|
sy = border + y
|
||||||
|
for x in range(qr.size):
|
||||||
|
if qr.modules[y][x]:
|
||||||
|
parts.append("M%d,%dh1v1h-1z" % (border + x, sy))
|
||||||
|
t = """\
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<svg xmlns="http://www.w3.org/2000/svg" version="1.1" viewBox="0 0 {0} {0}" stroke="none">
|
||||||
|
<rect width="100%" height="100%" fill="#F7F7F7"/>
|
||||||
|
<path d="{1}" fill="#111111"/>
|
||||||
|
</svg>
|
||||||
|
"""
|
||||||
|
return t.format(qr.size + border * 2, " ".join(parts))
|
||||||
|
|||||||
@@ -1,15 +1,15 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
import argparse
|
|
||||||
import os
|
import os
|
||||||
import tempfile
|
import tempfile
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
from .__init__ import CORES
|
from .__init__ import CORES
|
||||||
|
from .authsrv import VFS, AuthSrv
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .th_cli import ThumbCli
|
from .th_cli import ThumbCli
|
||||||
from .util import UTC, vjoin
|
from .util import UTC, vjoin, vol_san
|
||||||
|
|
||||||
if True: # pylint: disable=using-constant-test
|
if True: # pylint: disable=using-constant-test
|
||||||
from typing import Any, Generator, Optional
|
from typing import Any, Generator, Optional
|
||||||
@@ -21,12 +21,13 @@ class StreamArc(object):
|
|||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
log: "NamedLogger",
|
log: "NamedLogger",
|
||||||
args: argparse.Namespace,
|
asrv: AuthSrv,
|
||||||
fgen: Generator[dict[str, Any], None, None],
|
fgen: Generator[dict[str, Any], None, None],
|
||||||
**kwargs: Any
|
**kwargs: Any
|
||||||
):
|
):
|
||||||
self.log = log
|
self.log = log
|
||||||
self.args = args
|
self.asrv = asrv
|
||||||
|
self.args = asrv.args
|
||||||
self.fgen = fgen
|
self.fgen = fgen
|
||||||
self.stopped = False
|
self.stopped = False
|
||||||
|
|
||||||
@@ -81,7 +82,9 @@ def enthumb(
|
|||||||
) -> dict[str, Any]:
|
) -> dict[str, Any]:
|
||||||
rem = f["vp"]
|
rem = f["vp"]
|
||||||
ext = rem.rsplit(".", 1)[-1].lower()
|
ext = rem.rsplit(".", 1)[-1].lower()
|
||||||
if fmt == "opus" and ext in "aac|m4a|mp3|ogg|opus|wma".split("|"):
|
if (fmt == "mp3" and ext == "mp3") or (
|
||||||
|
fmt == "opus" and ext in "aac|m4a|mp3|ogg|opus|wma".split("|")
|
||||||
|
):
|
||||||
raise Exception()
|
raise Exception()
|
||||||
|
|
||||||
vp = vjoin(vtop, rem.split("/", 1)[1])
|
vp = vjoin(vtop, rem.split("/", 1)[1])
|
||||||
@@ -101,15 +104,20 @@ def enthumb(
|
|||||||
return f
|
return f
|
||||||
|
|
||||||
|
|
||||||
def errdesc(errors: list[tuple[str, str]]) -> tuple[dict[str, Any], list[str]]:
|
def errdesc(
|
||||||
|
vfs: VFS, errors: list[tuple[str, str]]
|
||||||
|
) -> tuple[dict[str, Any], list[str]]:
|
||||||
report = ["copyparty failed to add the following files to the archive:", ""]
|
report = ["copyparty failed to add the following files to the archive:", ""]
|
||||||
|
|
||||||
for fn, err in errors:
|
for fn, err in errors:
|
||||||
report.extend([" file: {}".format(fn), "error: {}".format(err), ""])
|
report.extend([" file: %r" % (fn,), "error: %s" % (err,), ""])
|
||||||
|
|
||||||
|
btxt = "\r\n".join(report).encode("utf-8", "replace")
|
||||||
|
btxt = vol_san(list(vfs.all_vols.values()), btxt)
|
||||||
|
|
||||||
with tempfile.NamedTemporaryFile(prefix="copyparty-", delete=False) as tf:
|
with tempfile.NamedTemporaryFile(prefix="copyparty-", delete=False) as tf:
|
||||||
tf_path = tf.name
|
tf_path = tf.name
|
||||||
tf.write("\r\n".join(report).encode("utf-8", "replace"))
|
tf.write(btxt)
|
||||||
|
|
||||||
dt = datetime.now(UTC).strftime("%Y-%m%d-%H%M%S")
|
dt = datetime.now(UTC).strftime("%Y-%m%d-%H%M%S")
|
||||||
|
|
||||||
|
|||||||
@@ -2,8 +2,6 @@
|
|||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import base64
|
|
||||||
import calendar
|
|
||||||
import errno
|
import errno
|
||||||
import gzip
|
import gzip
|
||||||
import logging
|
import logging
|
||||||
@@ -16,7 +14,7 @@ import string
|
|||||||
import sys
|
import sys
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime
|
||||||
|
|
||||||
# from inspect import currentframe
|
# from inspect import currentframe
|
||||||
# print(currentframe().f_lineno)
|
# print(currentframe().f_lineno)
|
||||||
@@ -28,18 +26,30 @@ if True: # pylint: disable=using-constant-test
|
|||||||
import typing
|
import typing
|
||||||
from typing import Any, Optional, Union
|
from typing import Any, Optional, Union
|
||||||
|
|
||||||
from .__init__ import ANYWIN, EXE, MACOS, TYPE_CHECKING, E, EnvParams, unicode
|
from .__init__ import ANYWIN, EXE, MACOS, PY2, TYPE_CHECKING, E, EnvParams, unicode
|
||||||
from .authsrv import BAD_CFG, AuthSrv
|
from .authsrv import BAD_CFG, AuthSrv
|
||||||
from .cert import ensure_cert
|
from .cert import ensure_cert
|
||||||
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE
|
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, HAVE_MUTAGEN
|
||||||
|
from .pwhash import HAVE_ARGON2
|
||||||
from .tcpsrv import TcpSrv
|
from .tcpsrv import TcpSrv
|
||||||
from .th_srv import HAVE_PIL, HAVE_VIPS, HAVE_WEBP, ThumbSrv
|
from .th_srv import (
|
||||||
|
HAVE_AVIF,
|
||||||
|
HAVE_FFMPEG,
|
||||||
|
HAVE_FFPROBE,
|
||||||
|
HAVE_HEIF,
|
||||||
|
HAVE_PIL,
|
||||||
|
HAVE_VIPS,
|
||||||
|
HAVE_WEBP,
|
||||||
|
ThumbSrv,
|
||||||
|
)
|
||||||
from .up2k import Up2k
|
from .up2k import Up2k
|
||||||
from .util import (
|
from .util import (
|
||||||
DEF_EXP,
|
DEF_EXP,
|
||||||
DEF_MTE,
|
DEF_MTE,
|
||||||
DEF_MTH,
|
DEF_MTH,
|
||||||
FFMPEG_URL,
|
FFMPEG_URL,
|
||||||
|
HAVE_PSUTIL,
|
||||||
|
HAVE_SQLITE3,
|
||||||
UTC,
|
UTC,
|
||||||
VERSIONS,
|
VERSIONS,
|
||||||
Daemon,
|
Daemon,
|
||||||
@@ -50,12 +60,14 @@ from .util import (
|
|||||||
alltrace,
|
alltrace,
|
||||||
ansi_re,
|
ansi_re,
|
||||||
build_netmap,
|
build_netmap,
|
||||||
|
load_ipu,
|
||||||
min_ex,
|
min_ex,
|
||||||
mp,
|
mp,
|
||||||
odfusion,
|
odfusion,
|
||||||
pybin,
|
pybin,
|
||||||
start_log_thrs,
|
start_log_thrs,
|
||||||
start_stackmon,
|
start_stackmon,
|
||||||
|
ub64enc,
|
||||||
)
|
)
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
@@ -65,6 +77,9 @@ if TYPE_CHECKING:
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
if PY2:
|
||||||
|
range = xrange # type: ignore
|
||||||
|
|
||||||
|
|
||||||
class SvcHub(object):
|
class SvcHub(object):
|
||||||
"""
|
"""
|
||||||
@@ -89,20 +104,23 @@ class SvcHub(object):
|
|||||||
self.argv = argv
|
self.argv = argv
|
||||||
self.E: EnvParams = args.E
|
self.E: EnvParams = args.E
|
||||||
self.no_ansi = args.no_ansi
|
self.no_ansi = args.no_ansi
|
||||||
|
self.tz = UTC if args.log_utc else None
|
||||||
self.logf: Optional[typing.TextIO] = None
|
self.logf: Optional[typing.TextIO] = None
|
||||||
self.logf_base_fn = ""
|
self.logf_base_fn = ""
|
||||||
|
self.is_dut = False # running in unittest; always False
|
||||||
self.stop_req = False
|
self.stop_req = False
|
||||||
self.stopping = False
|
self.stopping = False
|
||||||
self.stopped = False
|
self.stopped = False
|
||||||
self.reload_req = False
|
self.reload_req = False
|
||||||
self.reloading = 0
|
self.reload_mutex = threading.Lock()
|
||||||
self.stop_cond = threading.Condition()
|
self.stop_cond = threading.Condition()
|
||||||
self.nsigs = 3
|
self.nsigs = 3
|
||||||
self.retcode = 0
|
self.retcode = 0
|
||||||
self.httpsrv_up = 0
|
self.httpsrv_up = 0
|
||||||
|
|
||||||
self.log_mutex = threading.Lock()
|
self.log_mutex = threading.Lock()
|
||||||
self.next_day = 0
|
self.cday = 0
|
||||||
|
self.cmon = 0
|
||||||
self.tstack = 0.0
|
self.tstack = 0.0
|
||||||
|
|
||||||
self.iphash = HMaccas(os.path.join(self.E.cfg, "iphash"), 8)
|
self.iphash = HMaccas(os.path.join(self.E.cfg, "iphash"), 8)
|
||||||
@@ -193,6 +211,38 @@ class SvcHub(object):
|
|||||||
t = "WARNING: --s-rd-sz (%d) is larger than --iobuf (%d); this may lead to reduced performance"
|
t = "WARNING: --s-rd-sz (%d) is larger than --iobuf (%d); this may lead to reduced performance"
|
||||||
self.log("root", t % (args.s_rd_sz, args.iobuf), 3)
|
self.log("root", t % (args.s_rd_sz, args.iobuf), 3)
|
||||||
|
|
||||||
|
zs = ""
|
||||||
|
if args.th_ram_max < 0.22:
|
||||||
|
zs = "generate thumbnails"
|
||||||
|
elif args.th_ram_max < 1:
|
||||||
|
zs = "generate audio waveforms or spectrograms"
|
||||||
|
if zs:
|
||||||
|
t = "WARNING: --th-ram-max is very small (%.2f GiB); will not be able to %s"
|
||||||
|
self.log("root", t % (args.th_ram_max, zs), 3)
|
||||||
|
|
||||||
|
if args.chpw and args.idp_h_usr:
|
||||||
|
t = "ERROR: user-changeable passwords is incompatible with IdP/identity-providers; you must disable either --chpw or --idp-h-usr"
|
||||||
|
self.log("root", t, 1)
|
||||||
|
raise Exception(t)
|
||||||
|
|
||||||
|
noch = set()
|
||||||
|
for zs in args.chpw_no or []:
|
||||||
|
zsl = [x.strip() for x in zs.split(",")]
|
||||||
|
noch.update([x for x in zsl if x])
|
||||||
|
args.chpw_no = noch
|
||||||
|
|
||||||
|
if args.ipu:
|
||||||
|
iu, nm = load_ipu(self.log, args.ipu, True)
|
||||||
|
setattr(args, "ipu_iu", iu)
|
||||||
|
setattr(args, "ipu_nm", nm)
|
||||||
|
|
||||||
|
if not self.args.no_ses:
|
||||||
|
self.setup_session_db()
|
||||||
|
|
||||||
|
args.shr1 = ""
|
||||||
|
if args.shr:
|
||||||
|
self.setup_share_db()
|
||||||
|
|
||||||
bri = "zy"[args.theme % 2 :][:1]
|
bri = "zy"[args.theme % 2 :][:1]
|
||||||
ch = "abcdefghijklmnopqrstuvwx"[int(args.theme / 2)]
|
ch = "abcdefghijklmnopqrstuvwx"[int(args.theme / 2)]
|
||||||
args.theme = "{0}{1} {0} {1}".format(ch, bri)
|
args.theme = "{0}{1} {0} {1}".format(ch, bri)
|
||||||
@@ -232,6 +282,8 @@ class SvcHub(object):
|
|||||||
|
|
||||||
self.up2k = Up2k(self)
|
self.up2k = Up2k(self)
|
||||||
|
|
||||||
|
self._feature_test()
|
||||||
|
|
||||||
decs = {k: 1 for k in self.args.th_dec.split(",")}
|
decs = {k: 1 for k in self.args.th_dec.split(",")}
|
||||||
if not HAVE_VIPS:
|
if not HAVE_VIPS:
|
||||||
decs.pop("vips", None)
|
decs.pop("vips", None)
|
||||||
@@ -240,6 +292,10 @@ class SvcHub(object):
|
|||||||
if not HAVE_FFMPEG or not HAVE_FFPROBE:
|
if not HAVE_FFMPEG or not HAVE_FFPROBE:
|
||||||
decs.pop("ff", None)
|
decs.pop("ff", None)
|
||||||
|
|
||||||
|
# compressed formats; "s3z=s3m.zip, s3gz=s3m.gz, ..."
|
||||||
|
zlss = [x.strip().lower().split("=", 1) for x in args.au_unpk.split(",")]
|
||||||
|
args.au_unpk = {x[0]: x[1] for x in zlss}
|
||||||
|
|
||||||
self.args.th_dec = list(decs.keys())
|
self.args.th_dec = list(decs.keys())
|
||||||
self.thumbsrv = None
|
self.thumbsrv = None
|
||||||
want_ff = False
|
want_ff = False
|
||||||
@@ -276,6 +332,13 @@ class SvcHub(object):
|
|||||||
if want_ff and ANYWIN:
|
if want_ff and ANYWIN:
|
||||||
self.log("thumb", "download FFmpeg to fix it:\033[0m " + FFMPEG_URL, 3)
|
self.log("thumb", "download FFmpeg to fix it:\033[0m " + FFMPEG_URL, 3)
|
||||||
|
|
||||||
|
if not args.no_acode:
|
||||||
|
if not re.match("^(0|[qv][0-9]|[0-9]{2,3}k)$", args.q_mp3.lower()):
|
||||||
|
t = "invalid mp3 transcoding quality [%s] specified; only supports [0] to disable, a CBR value such as [192k], or a CQ/CRF value such as [v2]"
|
||||||
|
raise Exception(t % (args.q_mp3,))
|
||||||
|
else:
|
||||||
|
args.au_unpk = {}
|
||||||
|
|
||||||
args.th_poke = min(args.th_poke, args.th_maxage, args.ac_maxage)
|
args.th_poke = min(args.th_poke, args.th_maxage, args.ac_maxage)
|
||||||
|
|
||||||
zms = ""
|
zms = ""
|
||||||
@@ -288,13 +351,14 @@ class SvcHub(object):
|
|||||||
from .ftpd import Ftpd
|
from .ftpd import Ftpd
|
||||||
|
|
||||||
self.ftpd: Optional[Ftpd] = None
|
self.ftpd: Optional[Ftpd] = None
|
||||||
Daemon(self.start_ftpd, "start_ftpd")
|
|
||||||
zms += "f" if args.ftp else "F"
|
zms += "f" if args.ftp else "F"
|
||||||
|
|
||||||
if args.tftp:
|
if args.tftp:
|
||||||
from .tftpd import Tftpd
|
from .tftpd import Tftpd
|
||||||
|
|
||||||
self.tftpd: Optional[Tftpd] = None
|
self.tftpd: Optional[Tftpd] = None
|
||||||
|
|
||||||
|
if args.ftp or args.ftps or args.tftp:
|
||||||
Daemon(self.start_ftpd, "start_tftpd")
|
Daemon(self.start_ftpd, "start_tftpd")
|
||||||
|
|
||||||
if args.smb:
|
if args.smb:
|
||||||
@@ -324,6 +388,160 @@ class SvcHub(object):
|
|||||||
|
|
||||||
self.broker = Broker(self)
|
self.broker = Broker(self)
|
||||||
|
|
||||||
|
# create netmaps early to avoid firewall gaps,
|
||||||
|
# but the mutex blocks multiprocessing startup
|
||||||
|
for zs in "ipu_iu ftp_ipa_nm tftp_ipa_nm".split():
|
||||||
|
try:
|
||||||
|
getattr(args, zs).mutex = threading.Lock()
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def setup_session_db(self) -> None:
|
||||||
|
if not HAVE_SQLITE3:
|
||||||
|
self.args.no_ses = True
|
||||||
|
t = "WARNING: sqlite3 not available; disabling sessions, will use plaintext passwords in cookies"
|
||||||
|
self.log("root", t, 3)
|
||||||
|
return
|
||||||
|
|
||||||
|
import sqlite3
|
||||||
|
|
||||||
|
create = True
|
||||||
|
db_path = self.args.ses_db
|
||||||
|
self.log("root", "opening sessions-db %s" % (db_path,))
|
||||||
|
for n in range(2):
|
||||||
|
try:
|
||||||
|
db = sqlite3.connect(db_path)
|
||||||
|
cur = db.cursor()
|
||||||
|
try:
|
||||||
|
cur.execute("select count(*) from us").fetchone()
|
||||||
|
create = False
|
||||||
|
break
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
except Exception as ex:
|
||||||
|
if n:
|
||||||
|
raise
|
||||||
|
t = "sessions-db corrupt; deleting and recreating: %r"
|
||||||
|
self.log("root", t % (ex,), 3)
|
||||||
|
try:
|
||||||
|
cur.close() # type: ignore
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
db.close() # type: ignore
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
os.unlink(db_path)
|
||||||
|
|
||||||
|
sch = [
|
||||||
|
r"create table kv (k text, v int)",
|
||||||
|
r"create table us (un text, si text, t0 int)",
|
||||||
|
# username, session-id, creation-time
|
||||||
|
r"create index us_un on us(un)",
|
||||||
|
r"create index us_si on us(si)",
|
||||||
|
r"create index us_t0 on us(t0)",
|
||||||
|
r"insert into kv values ('sver', 1)",
|
||||||
|
]
|
||||||
|
|
||||||
|
assert db # type: ignore # !rm
|
||||||
|
assert cur # type: ignore # !rm
|
||||||
|
if create:
|
||||||
|
for cmd in sch:
|
||||||
|
cur.execute(cmd)
|
||||||
|
self.log("root", "created new sessions-db")
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
cur.close()
|
||||||
|
db.close()
|
||||||
|
|
||||||
|
def setup_share_db(self) -> None:
|
||||||
|
al = self.args
|
||||||
|
if not HAVE_SQLITE3:
|
||||||
|
self.log("root", "sqlite3 not available; disabling --shr", 1)
|
||||||
|
al.shr = ""
|
||||||
|
return
|
||||||
|
|
||||||
|
import sqlite3
|
||||||
|
|
||||||
|
al.shr = al.shr.strip("/")
|
||||||
|
if "/" in al.shr or not al.shr:
|
||||||
|
t = "config error: --shr must be the name of a virtual toplevel directory to put shares inside"
|
||||||
|
self.log("root", t, 1)
|
||||||
|
raise Exception(t)
|
||||||
|
|
||||||
|
al.shr = "/%s/" % (al.shr,)
|
||||||
|
al.shr1 = al.shr[1:]
|
||||||
|
|
||||||
|
create = True
|
||||||
|
modified = False
|
||||||
|
db_path = self.args.shr_db
|
||||||
|
self.log("root", "opening shares-db %s" % (db_path,))
|
||||||
|
for n in range(2):
|
||||||
|
try:
|
||||||
|
db = sqlite3.connect(db_path)
|
||||||
|
cur = db.cursor()
|
||||||
|
try:
|
||||||
|
cur.execute("select count(*) from sh").fetchone()
|
||||||
|
create = False
|
||||||
|
break
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
except Exception as ex:
|
||||||
|
if n:
|
||||||
|
raise
|
||||||
|
t = "shares-db corrupt; deleting and recreating: %r"
|
||||||
|
self.log("root", t % (ex,), 3)
|
||||||
|
try:
|
||||||
|
cur.close() # type: ignore
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
db.close() # type: ignore
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
os.unlink(db_path)
|
||||||
|
|
||||||
|
sch1 = [
|
||||||
|
r"create table kv (k text, v int)",
|
||||||
|
r"create table sh (k text, pw text, vp text, pr text, st int, un text, t0 int, t1 int)",
|
||||||
|
# sharekey, password, src, perms, numFiles, owner, created, expires
|
||||||
|
]
|
||||||
|
sch2 = [
|
||||||
|
r"create table sf (k text, vp text)",
|
||||||
|
r"create index sf_k on sf(k)",
|
||||||
|
r"create index sh_k on sh(k)",
|
||||||
|
r"create index sh_t1 on sh(t1)",
|
||||||
|
]
|
||||||
|
|
||||||
|
assert db # type: ignore # !rm
|
||||||
|
assert cur # type: ignore # !rm
|
||||||
|
if create:
|
||||||
|
dver = 2
|
||||||
|
modified = True
|
||||||
|
for cmd in sch1 + sch2:
|
||||||
|
cur.execute(cmd)
|
||||||
|
self.log("root", "created new shares-db")
|
||||||
|
else:
|
||||||
|
(dver,) = cur.execute("select v from kv where k = 'sver'").fetchall()[0]
|
||||||
|
|
||||||
|
if dver == 1:
|
||||||
|
modified = True
|
||||||
|
for cmd in sch2:
|
||||||
|
cur.execute(cmd)
|
||||||
|
cur.execute("update sh set st = 0")
|
||||||
|
self.log("root", "shares-db schema upgrade ok")
|
||||||
|
|
||||||
|
if modified:
|
||||||
|
for cmd in [
|
||||||
|
r"delete from kv where k = 'sver'",
|
||||||
|
r"insert into kv values ('sver', %d)" % (2,),
|
||||||
|
]:
|
||||||
|
cur.execute(cmd)
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
cur.close()
|
||||||
|
db.close()
|
||||||
|
|
||||||
def start_ftpd(self) -> None:
|
def start_ftpd(self) -> None:
|
||||||
time.sleep(30)
|
time.sleep(30)
|
||||||
|
|
||||||
@@ -383,7 +601,7 @@ class SvcHub(object):
|
|||||||
self.sigterm()
|
self.sigterm()
|
||||||
|
|
||||||
def sigterm(self) -> None:
|
def sigterm(self) -> None:
|
||||||
os.kill(os.getpid(), signal.SIGTERM)
|
self.signal_handler(signal.SIGTERM, None)
|
||||||
|
|
||||||
def cb_httpsrv_up(self) -> None:
|
def cb_httpsrv_up(self) -> None:
|
||||||
self.httpsrv_up += 1
|
self.httpsrv_up += 1
|
||||||
@@ -408,6 +626,58 @@ class SvcHub(object):
|
|||||||
|
|
||||||
Daemon(self.sd_notify, "sd-notify")
|
Daemon(self.sd_notify, "sd-notify")
|
||||||
|
|
||||||
|
def _feature_test(self) -> None:
|
||||||
|
fok = []
|
||||||
|
fng = []
|
||||||
|
t_ff = "transcode audio, create spectrograms, video thumbnails"
|
||||||
|
to_check = [
|
||||||
|
(HAVE_SQLITE3, "sqlite", "sessions and file/media indexing"),
|
||||||
|
(HAVE_PIL, "pillow", "image thumbnails (plenty fast)"),
|
||||||
|
(HAVE_VIPS, "vips", "image thumbnails (faster, eats more ram)"),
|
||||||
|
(HAVE_WEBP, "pillow-webp", "create thumbnails as webp files"),
|
||||||
|
(HAVE_FFMPEG, "ffmpeg", t_ff + ", good-but-slow image thumbnails"),
|
||||||
|
(HAVE_FFPROBE, "ffprobe", t_ff + ", read audio/media tags"),
|
||||||
|
(HAVE_MUTAGEN, "mutagen", "read audio tags (ffprobe is better but slower)"),
|
||||||
|
(HAVE_ARGON2, "argon2", "secure password hashing (advanced users only)"),
|
||||||
|
(HAVE_HEIF, "pillow-heif", "read .heif images with pillow (rarely useful)"),
|
||||||
|
(HAVE_AVIF, "pillow-avif", "read .avif images with pillow (rarely useful)"),
|
||||||
|
]
|
||||||
|
if ANYWIN:
|
||||||
|
to_check += [
|
||||||
|
(HAVE_PSUTIL, "psutil", "improved plugin cleanup (rarely useful)")
|
||||||
|
]
|
||||||
|
|
||||||
|
verbose = self.args.deps
|
||||||
|
if verbose:
|
||||||
|
self.log("dependencies", "")
|
||||||
|
|
||||||
|
for have, feat, what in to_check:
|
||||||
|
lst = fok if have else fng
|
||||||
|
lst.append((feat, what))
|
||||||
|
if verbose:
|
||||||
|
zi = 2 if have else 5
|
||||||
|
sgot = "found" if have else "missing"
|
||||||
|
t = "%7s: %s \033[36m(%s)"
|
||||||
|
self.log("dependencies", t % (sgot, feat, what), zi)
|
||||||
|
|
||||||
|
if verbose:
|
||||||
|
self.log("dependencies", "")
|
||||||
|
return
|
||||||
|
|
||||||
|
sok = ", ".join(x[0] for x in fok)
|
||||||
|
sng = ", ".join(x[0] for x in fng)
|
||||||
|
|
||||||
|
t = ""
|
||||||
|
if sok:
|
||||||
|
t += "OK: \033[32m" + sok
|
||||||
|
if sng:
|
||||||
|
if t:
|
||||||
|
t += ", "
|
||||||
|
t += "\033[0mNG: \033[35m" + sng
|
||||||
|
|
||||||
|
t += "\033[0m, see --deps"
|
||||||
|
self.log("dependencies", t, 6)
|
||||||
|
|
||||||
def _check_env(self) -> None:
|
def _check_env(self) -> None:
|
||||||
try:
|
try:
|
||||||
files = os.listdir(E.cfg)
|
files = os.listdir(E.cfg)
|
||||||
@@ -419,6 +689,12 @@ class SvcHub(object):
|
|||||||
t = "WARNING: found config files in [%s]: %s\n config files are not expected here, and will NOT be loaded (unless your setup is intentionally hella funky)"
|
t = "WARNING: found config files in [%s]: %s\n config files are not expected here, and will NOT be loaded (unless your setup is intentionally hella funky)"
|
||||||
self.log("root", t % (E.cfg, ", ".join(hits)), 3)
|
self.log("root", t % (E.cfg, ", ".join(hits)), 3)
|
||||||
|
|
||||||
|
if self.args.no_bauth:
|
||||||
|
t = "WARNING: --no-bauth disables support for the Android app; you may want to use --bauth-last instead"
|
||||||
|
self.log("root", t, 3)
|
||||||
|
if self.args.bauth_last:
|
||||||
|
self.log("root", "WARNING: ignoring --bauth-last due to --no-bauth", 3)
|
||||||
|
|
||||||
def _process_config(self) -> bool:
|
def _process_config(self) -> bool:
|
||||||
al = self.args
|
al = self.args
|
||||||
|
|
||||||
@@ -461,8 +737,10 @@ class SvcHub(object):
|
|||||||
zsl = al.th_covers.split(",")
|
zsl = al.th_covers.split(",")
|
||||||
zsl = [x.strip() for x in zsl]
|
zsl = [x.strip() for x in zsl]
|
||||||
zsl = [x for x in zsl if x]
|
zsl = [x for x in zsl if x]
|
||||||
al.th_covers = set(zsl)
|
al.th_covers = zsl
|
||||||
al.th_coversd = set(zsl + ["." + x for x in zsl])
|
al.th_coversd = zsl + ["." + x for x in zsl]
|
||||||
|
al.th_covers_set = set(al.th_covers)
|
||||||
|
al.th_coversd_set = set(al.th_coversd)
|
||||||
|
|
||||||
for k in "c".split(" "):
|
for k in "c".split(" "):
|
||||||
vl = getattr(al, k)
|
vl = getattr(al, k)
|
||||||
@@ -502,8 +780,8 @@ class SvcHub(object):
|
|||||||
al.idp_h_grp = al.idp_h_grp.lower()
|
al.idp_h_grp = al.idp_h_grp.lower()
|
||||||
al.idp_h_key = al.idp_h_key.lower()
|
al.idp_h_key = al.idp_h_key.lower()
|
||||||
|
|
||||||
al.ftp_ipa_nm = build_netmap(al.ftp_ipa or al.ipa)
|
al.ftp_ipa_nm = build_netmap(al.ftp_ipa or al.ipa, True)
|
||||||
al.tftp_ipa_nm = build_netmap(al.tftp_ipa or al.ipa)
|
al.tftp_ipa_nm = build_netmap(al.tftp_ipa or al.ipa, True)
|
||||||
|
|
||||||
mte = ODict.fromkeys(DEF_MTE.split(","), True)
|
mte = ODict.fromkeys(DEF_MTE.split(","), True)
|
||||||
al.mte = odfusion(mte, al.mte)
|
al.mte = odfusion(mte, al.mte)
|
||||||
@@ -515,7 +793,7 @@ class SvcHub(object):
|
|||||||
al.exp_md = odfusion(exp, al.exp_md.replace(" ", ","))
|
al.exp_md = odfusion(exp, al.exp_md.replace(" ", ","))
|
||||||
al.exp_lg = odfusion(exp, al.exp_lg.replace(" ", ","))
|
al.exp_lg = odfusion(exp, al.exp_lg.replace(" ", ","))
|
||||||
|
|
||||||
for k in ["no_hash", "no_idx"]:
|
for k in ["no_hash", "no_idx", "og_ua", "srch_excl"]:
|
||||||
ptn = getattr(self.args, k)
|
ptn = getattr(self.args, k)
|
||||||
if ptn:
|
if ptn:
|
||||||
setattr(self.args, k, re.compile(ptn))
|
setattr(self.args, k, re.compile(ptn))
|
||||||
@@ -539,6 +817,35 @@ class SvcHub(object):
|
|||||||
except:
|
except:
|
||||||
raise Exception("invalid --rm-retry [%s]" % (self.args.rm_retry,))
|
raise Exception("invalid --rm-retry [%s]" % (self.args.rm_retry,))
|
||||||
|
|
||||||
|
try:
|
||||||
|
zf1, zf2 = self.args.mv_retry.split("/")
|
||||||
|
self.args.mv_re_t = float(zf1)
|
||||||
|
self.args.mv_re_r = float(zf2)
|
||||||
|
except:
|
||||||
|
raise Exception("invalid --mv-retry [%s]" % (self.args.mv_retry,))
|
||||||
|
|
||||||
|
al.tcolor = al.tcolor.lstrip("#")
|
||||||
|
if len(al.tcolor) == 3: # fc5 => ffcc55
|
||||||
|
al.tcolor = "".join([x * 2 for x in al.tcolor])
|
||||||
|
|
||||||
|
zs = al.u2sz
|
||||||
|
zsl = zs.split(",")
|
||||||
|
if len(zsl) not in (1, 3):
|
||||||
|
t = "invalid --u2sz; must be either one number, or a comma-separated list of three numbers (min,default,max)"
|
||||||
|
raise Exception(t)
|
||||||
|
if len(zsl) < 3:
|
||||||
|
zsl = ["1", zs, zs]
|
||||||
|
zi2 = 1
|
||||||
|
for zs in zsl:
|
||||||
|
zi = int(zs)
|
||||||
|
# arbitrary constraint (anything above 2 GiB is probably unintended)
|
||||||
|
if zi < 1 or zi > 2047:
|
||||||
|
raise Exception("invalid --u2sz; minimum is 1, max is 2047")
|
||||||
|
if zi < zi2:
|
||||||
|
raise Exception("invalid --u2sz; values must be equal or ascending")
|
||||||
|
zi2 = zi
|
||||||
|
al.u2sz = ",".join(zsl)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def _ipa2re(self, txt) -> Optional[re.Pattern]:
|
def _ipa2re(self, txt) -> Optional[re.Pattern]:
|
||||||
@@ -589,7 +896,7 @@ class SvcHub(object):
|
|||||||
self.args.nc = min(self.args.nc, soft // 2)
|
self.args.nc = min(self.args.nc, soft // 2)
|
||||||
|
|
||||||
def _logname(self) -> str:
|
def _logname(self) -> str:
|
||||||
dt = datetime.now(UTC)
|
dt = datetime.now(self.tz)
|
||||||
fn = str(self.args.lo)
|
fn = str(self.args.lo)
|
||||||
for fs in "YmdHMS":
|
for fs in "YmdHMS":
|
||||||
fs = "%" + fs
|
fs = "%" + fs
|
||||||
@@ -706,38 +1013,23 @@ class SvcHub(object):
|
|||||||
except:
|
except:
|
||||||
self.log("root", "ssdp startup failed;\n" + min_ex(), 3)
|
self.log("root", "ssdp startup failed;\n" + min_ex(), 3)
|
||||||
|
|
||||||
def reload(self) -> str:
|
def reload(self, rescan_all_vols: bool, up2k: bool) -> str:
|
||||||
with self.up2k.mutex:
|
t = "config has been reloaded"
|
||||||
if self.reloading:
|
with self.reload_mutex:
|
||||||
return "cannot reload; already in progress"
|
|
||||||
self.reloading = 1
|
|
||||||
|
|
||||||
Daemon(self._reload, "reloading")
|
|
||||||
return "reload initiated"
|
|
||||||
|
|
||||||
def _reload(self, rescan_all_vols: bool = True) -> None:
|
|
||||||
with self.up2k.mutex:
|
|
||||||
if self.reloading != 1:
|
|
||||||
return
|
|
||||||
self.reloading = 2
|
|
||||||
self.log("root", "reloading config")
|
self.log("root", "reloading config")
|
||||||
self.asrv.reload()
|
self.asrv.reload(9 if up2k else 4)
|
||||||
self.up2k.reload(rescan_all_vols)
|
if up2k:
|
||||||
|
self.up2k.reload(rescan_all_vols)
|
||||||
|
t += "; volumes are now reinitializing"
|
||||||
|
else:
|
||||||
|
self.log("root", "reload done")
|
||||||
self.broker.reload()
|
self.broker.reload()
|
||||||
self.reloading = 0
|
return t
|
||||||
|
|
||||||
def _reload_blocking(self, rescan_all_vols: bool = True) -> None:
|
def _reload_sessions(self) -> None:
|
||||||
while True:
|
with self.asrv.mutex:
|
||||||
with self.up2k.mutex:
|
self.asrv.load_sessions(True)
|
||||||
if self.reloading < 2:
|
self.broker.reload_sessions()
|
||||||
self.reloading = 1
|
|
||||||
break
|
|
||||||
time.sleep(0.05)
|
|
||||||
|
|
||||||
# try to handle multiple pending IdP reloads at once:
|
|
||||||
time.sleep(0.2)
|
|
||||||
|
|
||||||
self._reload(rescan_all_vols=rescan_all_vols)
|
|
||||||
|
|
||||||
def stop_thr(self) -> None:
|
def stop_thr(self) -> None:
|
||||||
while not self.stop_req:
|
while not self.stop_req:
|
||||||
@@ -746,7 +1038,7 @@ class SvcHub(object):
|
|||||||
|
|
||||||
if self.reload_req:
|
if self.reload_req:
|
||||||
self.reload_req = False
|
self.reload_req = False
|
||||||
self.reload()
|
self.reload(True, True)
|
||||||
|
|
||||||
self.shutdown()
|
self.shutdown()
|
||||||
|
|
||||||
@@ -859,12 +1151,12 @@ class SvcHub(object):
|
|||||||
return
|
return
|
||||||
|
|
||||||
with self.log_mutex:
|
with self.log_mutex:
|
||||||
zd = datetime.now(UTC)
|
dt = datetime.now(self.tz)
|
||||||
ts = self.log_dfmt % (
|
ts = self.log_dfmt % (
|
||||||
zd.year,
|
dt.year,
|
||||||
zd.month * 100 + zd.day,
|
dt.month * 100 + dt.day,
|
||||||
(zd.hour * 100 + zd.minute) * 100 + zd.second,
|
(dt.hour * 100 + dt.minute) * 100 + dt.second,
|
||||||
zd.microsecond // self.log_div,
|
dt.microsecond // self.log_div,
|
||||||
)
|
)
|
||||||
|
|
||||||
if c and not self.args.no_ansi:
|
if c and not self.args.no_ansi:
|
||||||
@@ -885,41 +1177,26 @@ class SvcHub(object):
|
|||||||
if not self.args.no_logflush:
|
if not self.args.no_logflush:
|
||||||
self.logf.flush()
|
self.logf.flush()
|
||||||
|
|
||||||
now = time.time()
|
if dt.day != self.cday or dt.month != self.cmon:
|
||||||
if int(now) >= self.next_day:
|
self._set_next_day(dt)
|
||||||
self._set_next_day()
|
|
||||||
|
|
||||||
def _set_next_day(self) -> None:
|
def _set_next_day(self, dt: datetime) -> None:
|
||||||
if self.next_day and self.logf and self.logf_base_fn != self._logname():
|
if self.cday and self.logf and self.logf_base_fn != self._logname():
|
||||||
self.logf.close()
|
self.logf.close()
|
||||||
self._setup_logfile("")
|
self._setup_logfile("")
|
||||||
|
|
||||||
dt = datetime.now(UTC)
|
self.cday = dt.day
|
||||||
|
self.cmon = dt.month
|
||||||
# unix timestamp of next 00:00:00 (leap-seconds safe)
|
|
||||||
day_now = dt.day
|
|
||||||
while dt.day == day_now:
|
|
||||||
dt += timedelta(hours=12)
|
|
||||||
|
|
||||||
dt = dt.replace(hour=0, minute=0, second=0)
|
|
||||||
try:
|
|
||||||
tt = dt.utctimetuple()
|
|
||||||
except:
|
|
||||||
# still makes me hella uncomfortable
|
|
||||||
tt = dt.timetuple()
|
|
||||||
|
|
||||||
self.next_day = calendar.timegm(tt)
|
|
||||||
|
|
||||||
def _log_enabled(self, src: str, msg: str, c: Union[int, str] = 0) -> None:
|
def _log_enabled(self, src: str, msg: str, c: Union[int, str] = 0) -> None:
|
||||||
"""handles logging from all components"""
|
"""handles logging from all components"""
|
||||||
with self.log_mutex:
|
with self.log_mutex:
|
||||||
now = time.time()
|
dt = datetime.now(self.tz)
|
||||||
if int(now) >= self.next_day:
|
if dt.day != self.cday or dt.month != self.cmon:
|
||||||
dt = datetime.fromtimestamp(now, UTC)
|
|
||||||
zs = "{}\n" if self.no_ansi else "\033[36m{}\033[0m\n"
|
zs = "{}\n" if self.no_ansi else "\033[36m{}\033[0m\n"
|
||||||
zs = zs.format(dt.strftime("%Y-%m-%d"))
|
zs = zs.format(dt.strftime("%Y-%m-%d"))
|
||||||
print(zs, end="")
|
print(zs, end="")
|
||||||
self._set_next_day()
|
self._set_next_day(dt)
|
||||||
if self.logf:
|
if self.logf:
|
||||||
self.logf.write(zs)
|
self.logf.write(zs)
|
||||||
|
|
||||||
@@ -938,12 +1215,11 @@ class SvcHub(object):
|
|||||||
else:
|
else:
|
||||||
msg = "%s%s\033[0m" % (c, msg)
|
msg = "%s%s\033[0m" % (c, msg)
|
||||||
|
|
||||||
zd = datetime.fromtimestamp(now, UTC)
|
|
||||||
ts = self.log_efmt % (
|
ts = self.log_efmt % (
|
||||||
zd.hour,
|
dt.hour,
|
||||||
zd.minute,
|
dt.minute,
|
||||||
zd.second,
|
dt.second,
|
||||||
zd.microsecond // self.log_div,
|
dt.microsecond // self.log_div,
|
||||||
)
|
)
|
||||||
msg = fmt % (ts, src, msg)
|
msg = fmt % (ts, src, msg)
|
||||||
try:
|
try:
|
||||||
@@ -1041,5 +1317,5 @@ class SvcHub(object):
|
|||||||
zs = "{}\n{}".format(VERSIONS, alltrace())
|
zs = "{}\n{}".format(VERSIONS, alltrace())
|
||||||
zb = zs.encode("utf-8", "replace")
|
zb = zs.encode("utf-8", "replace")
|
||||||
zb = gzip.compress(zb)
|
zb = gzip.compress(zb)
|
||||||
zs = base64.b64encode(zb).decode("ascii")
|
zs = ub64enc(zb).decode("ascii")
|
||||||
self.log("stacks", zs)
|
self.log("stacks", zs)
|
||||||
|
|||||||
@@ -1,12 +1,12 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
import argparse
|
|
||||||
import calendar
|
import calendar
|
||||||
import stat
|
import stat
|
||||||
import time
|
import time
|
||||||
import zlib
|
import zlib
|
||||||
|
|
||||||
|
from .authsrv import AuthSrv
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .sutil import StreamArc, errdesc
|
from .sutil import StreamArc, errdesc
|
||||||
from .util import min_ex, sanitize_fn, spack, sunpack, yieldfile
|
from .util import min_ex, sanitize_fn, spack, sunpack, yieldfile
|
||||||
@@ -37,9 +37,7 @@ def dostime2unix(buf: bytes) -> int:
|
|||||||
|
|
||||||
|
|
||||||
def unixtime2dos(ts: int) -> bytes:
|
def unixtime2dos(ts: int) -> bytes:
|
||||||
tt = time.gmtime(ts + 1)
|
dy, dm, dd, th, tm, ts, _, _, _ = time.gmtime(ts + 1)
|
||||||
dy, dm, dd, th, tm, ts = list(tt)[:6]
|
|
||||||
|
|
||||||
bd = ((dy - 1980) << 9) + (dm << 5) + dd
|
bd = ((dy - 1980) << 9) + (dm << 5) + dd
|
||||||
bt = (th << 11) + (tm << 5) + ts // 2
|
bt = (th << 11) + (tm << 5) + ts // 2
|
||||||
try:
|
try:
|
||||||
@@ -102,12 +100,12 @@ def gen_hdr(
|
|||||||
|
|
||||||
# spec says to put zeros when !crc if bit3 (streaming)
|
# spec says to put zeros when !crc if bit3 (streaming)
|
||||||
# however infozip does actual sz and it even works on winxp
|
# however infozip does actual sz and it even works on winxp
|
||||||
# (same reasning for z64 extradata later)
|
# (same reasoning for z64 extradata later)
|
||||||
vsz = 0xFFFFFFFF if z64 else sz
|
vsz = 0xFFFFFFFF if z64 else sz
|
||||||
ret += spack(b"<LL", vsz, vsz)
|
ret += spack(b"<LL", vsz, vsz)
|
||||||
|
|
||||||
# windows support (the "?" replace below too)
|
# windows support (the "?" replace below too)
|
||||||
fn = sanitize_fn(fn, "/", [])
|
fn = sanitize_fn(fn, "/")
|
||||||
bfn = fn.encode("utf-8" if utf8 else "cp437", "replace").replace(b"?", b"_")
|
bfn = fn.encode("utf-8" if utf8 else "cp437", "replace").replace(b"?", b"_")
|
||||||
|
|
||||||
# add ntfs (0x24) and/or unix (0x10) extrafields for utc, add z64 if requested
|
# add ntfs (0x24) and/or unix (0x10) extrafields for utc, add z64 if requested
|
||||||
@@ -219,13 +217,13 @@ class StreamZip(StreamArc):
|
|||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
log: "NamedLogger",
|
log: "NamedLogger",
|
||||||
args: argparse.Namespace,
|
asrv: AuthSrv,
|
||||||
fgen: Generator[dict[str, Any], None, None],
|
fgen: Generator[dict[str, Any], None, None],
|
||||||
utf8: bool = False,
|
utf8: bool = False,
|
||||||
pre_crc: bool = False,
|
pre_crc: bool = False,
|
||||||
**kwargs: Any
|
**kwargs: Any
|
||||||
) -> None:
|
) -> None:
|
||||||
super(StreamZip, self).__init__(log, args, fgen)
|
super(StreamZip, self).__init__(log, asrv, fgen)
|
||||||
|
|
||||||
self.utf8 = utf8
|
self.utf8 = utf8
|
||||||
self.pre_crc = pre_crc
|
self.pre_crc = pre_crc
|
||||||
@@ -302,7 +300,7 @@ class StreamZip(StreamArc):
|
|||||||
mbuf = b""
|
mbuf = b""
|
||||||
|
|
||||||
if errors:
|
if errors:
|
||||||
errf, txt = errdesc(errors)
|
errf, txt = errdesc(self.asrv.vfs, errors)
|
||||||
self.log("\n".join(([repr(errf)] + txt[1:])))
|
self.log("\n".join(([repr(errf)] + txt[1:])))
|
||||||
for x in self.ser(errf):
|
for x in self.ser(errf):
|
||||||
yield x
|
yield x
|
||||||
|
|||||||
@@ -15,19 +15,25 @@ from .util import (
|
|||||||
E_ADDR_IN_USE,
|
E_ADDR_IN_USE,
|
||||||
E_ADDR_NOT_AVAIL,
|
E_ADDR_NOT_AVAIL,
|
||||||
E_UNREACH,
|
E_UNREACH,
|
||||||
|
HAVE_IPV6,
|
||||||
IP6ALL,
|
IP6ALL,
|
||||||
|
VF_CAREFUL,
|
||||||
Netdev,
|
Netdev,
|
||||||
|
atomic_move,
|
||||||
min_ex,
|
min_ex,
|
||||||
sunpack,
|
sunpack,
|
||||||
termsize,
|
termsize,
|
||||||
)
|
)
|
||||||
|
|
||||||
if True:
|
if True:
|
||||||
from typing import Generator
|
from typing import Generator, Union
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .svchub import SvcHub
|
from .svchub import SvcHub
|
||||||
|
|
||||||
|
if not hasattr(socket, "AF_UNIX"):
|
||||||
|
setattr(socket, "AF_UNIX", -9001)
|
||||||
|
|
||||||
if not hasattr(socket, "IPPROTO_IPV6"):
|
if not hasattr(socket, "IPPROTO_IPV6"):
|
||||||
setattr(socket, "IPPROTO_IPV6", 41)
|
setattr(socket, "IPPROTO_IPV6", 41)
|
||||||
|
|
||||||
@@ -89,7 +95,7 @@ class TcpSrv(object):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
# binding 0.0.0.0 after :: fails on dualstack
|
# binding 0.0.0.0 after :: fails on dualstack
|
||||||
# but is necessary on non-dualstakc
|
# but is necessary on non-dualstack
|
||||||
if successful_binds:
|
if successful_binds:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -111,8 +117,10 @@ class TcpSrv(object):
|
|||||||
|
|
||||||
eps = {
|
eps = {
|
||||||
"127.0.0.1": Netdev("127.0.0.1", 0, "", "local only"),
|
"127.0.0.1": Netdev("127.0.0.1", 0, "", "local only"),
|
||||||
"::1": Netdev("::1", 0, "", "local only"),
|
|
||||||
}
|
}
|
||||||
|
if HAVE_IPV6:
|
||||||
|
eps["::1"] = Netdev("::1", 0, "", "local only")
|
||||||
|
|
||||||
nonlocals = [x for x in self.args.i if x not in [k.split("/")[0] for k in eps]]
|
nonlocals = [x for x in self.args.i if x not in [k.split("/")[0] for k in eps]]
|
||||||
if nonlocals:
|
if nonlocals:
|
||||||
try:
|
try:
|
||||||
@@ -214,14 +222,41 @@ class TcpSrv(object):
|
|||||||
if self.args.qr or self.args.qrs:
|
if self.args.qr or self.args.qrs:
|
||||||
self.qr = self._qr(qr1, qr2)
|
self.qr = self._qr(qr1, qr2)
|
||||||
|
|
||||||
|
def nlog(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||||
|
self.log("tcpsrv", msg, c)
|
||||||
|
|
||||||
def _listen(self, ip: str, port: int) -> None:
|
def _listen(self, ip: str, port: int) -> None:
|
||||||
ipv = socket.AF_INET6 if ":" in ip else socket.AF_INET
|
uds_perm = uds_gid = -1
|
||||||
|
if "unix:" in ip:
|
||||||
|
tcp = False
|
||||||
|
ipv = socket.AF_UNIX
|
||||||
|
uds = ip.split(":")
|
||||||
|
ip = uds[-1]
|
||||||
|
if len(uds) > 2:
|
||||||
|
uds_perm = int(uds[1], 8)
|
||||||
|
if len(uds) > 3:
|
||||||
|
try:
|
||||||
|
uds_gid = int(uds[2])
|
||||||
|
except:
|
||||||
|
import grp
|
||||||
|
|
||||||
|
uds_gid = grp.getgrnam(uds[2]).gr_gid
|
||||||
|
|
||||||
|
elif ":" in ip:
|
||||||
|
tcp = True
|
||||||
|
ipv = socket.AF_INET6
|
||||||
|
else:
|
||||||
|
tcp = True
|
||||||
|
ipv = socket.AF_INET
|
||||||
|
|
||||||
srv = socket.socket(ipv, socket.SOCK_STREAM)
|
srv = socket.socket(ipv, socket.SOCK_STREAM)
|
||||||
|
|
||||||
if not ANYWIN or self.args.reuseaddr:
|
if not ANYWIN or self.args.reuseaddr:
|
||||||
srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||||
|
|
||||||
srv.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
if tcp:
|
||||||
|
srv.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||||
|
|
||||||
srv.settimeout(None) # < does not inherit, ^ opts above do
|
srv.settimeout(None) # < does not inherit, ^ opts above do
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -233,8 +268,25 @@ class TcpSrv(object):
|
|||||||
srv.setsockopt(socket.SOL_IP, socket.IP_FREEBIND, 1)
|
srv.setsockopt(socket.SOL_IP, socket.IP_FREEBIND, 1)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
srv.bind((ip, port))
|
if tcp:
|
||||||
sport = srv.getsockname()[1]
|
srv.bind((ip, port))
|
||||||
|
else:
|
||||||
|
if ANYWIN or self.args.rm_sck:
|
||||||
|
if os.path.exists(ip):
|
||||||
|
os.unlink(ip)
|
||||||
|
srv.bind(ip)
|
||||||
|
else:
|
||||||
|
tf = "%s.%d" % (ip, os.getpid())
|
||||||
|
if os.path.exists(tf):
|
||||||
|
os.unlink(tf)
|
||||||
|
srv.bind(tf)
|
||||||
|
if uds_gid != -1:
|
||||||
|
os.chown(tf, -1, uds_gid)
|
||||||
|
if uds_perm != -1:
|
||||||
|
os.chmod(tf, uds_perm)
|
||||||
|
atomic_move(self.nlog, tf, ip, VF_CAREFUL)
|
||||||
|
|
||||||
|
sport = srv.getsockname()[1] if tcp else port
|
||||||
if port != sport:
|
if port != sport:
|
||||||
# linux 6.0.16 lets you bind a port which is in use
|
# linux 6.0.16 lets you bind a port which is in use
|
||||||
# except it just gives you a random port instead
|
# except it just gives you a random port instead
|
||||||
@@ -246,12 +298,23 @@ class TcpSrv(object):
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
e = ""
|
||||||
if ex.errno in E_ADDR_IN_USE:
|
if ex.errno in E_ADDR_IN_USE:
|
||||||
e = "\033[1;31mport {} is busy on interface {}\033[0m".format(port, ip)
|
e = "\033[1;31mport {} is busy on interface {}\033[0m".format(port, ip)
|
||||||
|
if not tcp:
|
||||||
|
e = "\033[1;31munix-socket {} is busy\033[0m".format(ip)
|
||||||
elif ex.errno in E_ADDR_NOT_AVAIL:
|
elif ex.errno in E_ADDR_NOT_AVAIL:
|
||||||
e = "\033[1;31minterface {} does not exist\033[0m".format(ip)
|
e = "\033[1;31minterface {} does not exist\033[0m".format(ip)
|
||||||
else:
|
|
||||||
|
if not e:
|
||||||
|
if not tcp:
|
||||||
|
t = "\n\n\n NOTE: this crash may be due to a unix-socket bug; try --rm-sck\n"
|
||||||
|
self.log("tcpsrv", t, 2)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
if not tcp and not self.args.rm_sck:
|
||||||
|
e += "; maybe this is a bug? try --rm-sck"
|
||||||
|
|
||||||
raise Exception(e)
|
raise Exception(e)
|
||||||
|
|
||||||
def run(self) -> None:
|
def run(self) -> None:
|
||||||
@@ -259,7 +322,14 @@ class TcpSrv(object):
|
|||||||
bound: list[tuple[str, int]] = []
|
bound: list[tuple[str, int]] = []
|
||||||
srvs: list[socket.socket] = []
|
srvs: list[socket.socket] = []
|
||||||
for srv in self.srv:
|
for srv in self.srv:
|
||||||
ip, port = srv.getsockname()[:2]
|
if srv.family == socket.AF_UNIX:
|
||||||
|
tcp = False
|
||||||
|
ip = re.sub(r"\.[0-9]+$", "", srv.getsockname())
|
||||||
|
port = 0
|
||||||
|
else:
|
||||||
|
tcp = True
|
||||||
|
ip, port = srv.getsockname()[:2]
|
||||||
|
|
||||||
if ip == IP6ALL:
|
if ip == IP6ALL:
|
||||||
ip = "::" # jython
|
ip = "::" # jython
|
||||||
|
|
||||||
@@ -291,13 +361,17 @@ class TcpSrv(object):
|
|||||||
bound.append((ip, port))
|
bound.append((ip, port))
|
||||||
srvs.append(srv)
|
srvs.append(srv)
|
||||||
fno = srv.fileno()
|
fno = srv.fileno()
|
||||||
hip = "[{}]".format(ip) if ":" in ip else ip
|
if tcp:
|
||||||
msg = "listening @ {}:{} f{} p{}".format(hip, port, fno, os.getpid())
|
hip = "[{}]".format(ip) if ":" in ip else ip
|
||||||
|
msg = "listening @ {}:{} f{} p{}".format(hip, port, fno, os.getpid())
|
||||||
|
else:
|
||||||
|
msg = "listening @ {} f{} p{}".format(ip, fno, os.getpid())
|
||||||
|
|
||||||
self.log("tcpsrv", msg)
|
self.log("tcpsrv", msg)
|
||||||
if self.args.q:
|
if self.args.q:
|
||||||
print(msg)
|
print(msg)
|
||||||
|
|
||||||
self.hub.broker.say("listen", srv)
|
self.hub.broker.say("httpsrv.listen", srv)
|
||||||
|
|
||||||
self.srv = srvs
|
self.srv = srvs
|
||||||
self.bound = bound
|
self.bound = bound
|
||||||
@@ -305,7 +379,7 @@ class TcpSrv(object):
|
|||||||
self._distribute_netdevs()
|
self._distribute_netdevs()
|
||||||
|
|
||||||
def _distribute_netdevs(self):
|
def _distribute_netdevs(self):
|
||||||
self.hub.broker.say("set_netdevs", self.netdevs)
|
self.hub.broker.say("httpsrv.set_netdevs", self.netdevs)
|
||||||
self.hub.start_zeroconf()
|
self.hub.start_zeroconf()
|
||||||
gencert(self.log, self.args, self.netdevs)
|
gencert(self.log, self.args, self.netdevs)
|
||||||
self.hub.restart_ftpd()
|
self.hub.restart_ftpd()
|
||||||
@@ -328,23 +402,25 @@ class TcpSrv(object):
|
|||||||
if not netdevs:
|
if not netdevs:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
added = "nothing"
|
add = []
|
||||||
removed = "nothing"
|
rem = []
|
||||||
for k, v in netdevs.items():
|
for k, v in netdevs.items():
|
||||||
if k not in self.netdevs:
|
if k not in self.netdevs:
|
||||||
added = "{} = {}".format(k, v)
|
add.append("\n\033[32m added %s = %s" % (k, v))
|
||||||
for k, v in self.netdevs.items():
|
for k, v in self.netdevs.items():
|
||||||
if k not in netdevs:
|
if k not in netdevs:
|
||||||
removed = "{} = {}".format(k, v)
|
rem.append("\n\033[33mremoved %s = %s" % (k, v))
|
||||||
|
|
||||||
t = "network change detected:\n added {}\033[0;33m\nremoved {}"
|
t = "network change detected:%s%s"
|
||||||
self.log("tcpsrv", t.format(added, removed), 3)
|
self.log("tcpsrv", t % ("".join(add), "".join(rem)), 3)
|
||||||
self.netdevs = netdevs
|
self.netdevs = netdevs
|
||||||
self._distribute_netdevs()
|
self._distribute_netdevs()
|
||||||
|
|
||||||
def detect_interfaces(self, listen_ips: list[str]) -> dict[str, Netdev]:
|
def detect_interfaces(self, listen_ips: list[str]) -> dict[str, Netdev]:
|
||||||
from .stolen.ifaddr import get_adapters
|
from .stolen.ifaddr import get_adapters
|
||||||
|
|
||||||
|
listen_ips = [x for x in listen_ips if "unix:" not in x]
|
||||||
|
|
||||||
nics = get_adapters(True)
|
nics = get_adapters(True)
|
||||||
eps: dict[str, Netdev] = {}
|
eps: dict[str, Netdev] = {}
|
||||||
for nic in nics:
|
for nic in nics:
|
||||||
@@ -463,6 +539,12 @@ class TcpSrv(object):
|
|||||||
sys.stderr.flush()
|
sys.stderr.flush()
|
||||||
|
|
||||||
def _qr(self, t1: dict[str, list[int]], t2: dict[str, list[int]]) -> str:
|
def _qr(self, t1: dict[str, list[int]], t2: dict[str, list[int]]) -> str:
|
||||||
|
t2c = {zs: zli for zs, zli in t2.items() if zs in ("127.0.0.1", "::1")}
|
||||||
|
t2b = {zs: zli for zs, zli in t2.items() if ":" in zs and zs not in t2c}
|
||||||
|
t2 = {zs: zli for zs, zli in t2.items() if zs not in t2b and zs not in t2c}
|
||||||
|
t2.update(t2b) # first ipv4, then ipv6...
|
||||||
|
t2.update(t2c) # ...and finally localhost
|
||||||
|
|
||||||
ip = None
|
ip = None
|
||||||
ips = list(t1) + list(t2)
|
ips = list(t1) + list(t2)
|
||||||
qri = self.args.qri
|
qri = self.args.qri
|
||||||
|
|||||||
@@ -33,10 +33,10 @@ from partftpy import (
|
|||||||
)
|
)
|
||||||
from partftpy.TftpShared import TftpException
|
from partftpy.TftpShared import TftpException
|
||||||
|
|
||||||
from .__init__ import EXE, TYPE_CHECKING
|
from .__init__ import EXE, PY2, TYPE_CHECKING
|
||||||
from .authsrv import VFS
|
from .authsrv import VFS
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .util import BytesIO, Daemon, ODict, exclude_dotfiles, min_ex, runhook, undot
|
from .util import UTC, BytesIO, Daemon, ODict, exclude_dotfiles, min_ex, runhook, undot
|
||||||
|
|
||||||
if True: # pylint: disable=using-constant-test
|
if True: # pylint: disable=using-constant-test
|
||||||
from typing import Any, Union
|
from typing import Any, Union
|
||||||
@@ -44,6 +44,9 @@ if True: # pylint: disable=using-constant-test
|
|||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .svchub import SvcHub
|
from .svchub import SvcHub
|
||||||
|
|
||||||
|
if PY2:
|
||||||
|
range = xrange # type: ignore
|
||||||
|
|
||||||
|
|
||||||
lg = logging.getLogger("tftp")
|
lg = logging.getLogger("tftp")
|
||||||
debug, info, warning, error = (lg.debug, lg.info, lg.warning, lg.error)
|
debug, info, warning, error = (lg.debug, lg.info, lg.warning, lg.error)
|
||||||
@@ -95,7 +98,7 @@ class Tftpd(object):
|
|||||||
TftpServer,
|
TftpServer,
|
||||||
]
|
]
|
||||||
cbak = []
|
cbak = []
|
||||||
if not self.args.tftp_no_fast and not EXE:
|
if not self.args.tftp_no_fast and not EXE and not PY2:
|
||||||
try:
|
try:
|
||||||
ptn = re.compile(r"(^\s*)log\.debug\(.*\)$")
|
ptn = re.compile(r"(^\s*)log\.debug\(.*\)$")
|
||||||
for C in Cs:
|
for C in Cs:
|
||||||
@@ -105,7 +108,7 @@ class Tftpd(object):
|
|||||||
cfn = C.__spec__.origin
|
cfn = C.__spec__.origin
|
||||||
exec (compile(src2, filename=cfn, mode="exec"), C.__dict__)
|
exec (compile(src2, filename=cfn, mode="exec"), C.__dict__)
|
||||||
except Exception:
|
except Exception:
|
||||||
t = "failed to optimize tftp code; run with --tftp-noopt if there are issues:\n"
|
t = "failed to optimize tftp code; run with --tftp-no-fast if there are issues:\n"
|
||||||
self.log("tftp", t + min_ex(), 3)
|
self.log("tftp", t + min_ex(), 3)
|
||||||
for n, zd in enumerate(cbak):
|
for n, zd in enumerate(cbak):
|
||||||
Cs[n].__dict__ = zd
|
Cs[n].__dict__ = zd
|
||||||
@@ -150,11 +153,6 @@ class Tftpd(object):
|
|||||||
|
|
||||||
self._disarm(fos)
|
self._disarm(fos)
|
||||||
|
|
||||||
ip = next((x for x in self.args.i if ":" not in x), None)
|
|
||||||
if not ip:
|
|
||||||
self.log("tftp", "IPv6 not supported for tftp; listening on 0.0.0.0", 3)
|
|
||||||
ip = "0.0.0.0"
|
|
||||||
|
|
||||||
self.port = int(self.args.tftp)
|
self.port = int(self.args.tftp)
|
||||||
self.srv = []
|
self.srv = []
|
||||||
self.ips = []
|
self.ips = []
|
||||||
@@ -168,9 +166,16 @@ class Tftpd(object):
|
|||||||
if "::" in ips:
|
if "::" in ips:
|
||||||
ips.append("0.0.0.0")
|
ips.append("0.0.0.0")
|
||||||
|
|
||||||
if self.args.ftp4:
|
ips = [x for x in ips if "unix:" not in x]
|
||||||
|
|
||||||
|
if self.args.tftp4:
|
||||||
ips = [x for x in ips if ":" not in x]
|
ips = [x for x in ips if ":" not in x]
|
||||||
|
|
||||||
|
if not ips:
|
||||||
|
t = "cannot start tftp-server; no compatible IPs in -i"
|
||||||
|
self.nlog(t, 1)
|
||||||
|
return
|
||||||
|
|
||||||
ips = list(ODict.fromkeys(ips)) # dedup
|
ips = list(ODict.fromkeys(ips)) # dedup
|
||||||
|
|
||||||
for ip in ips:
|
for ip in ips:
|
||||||
@@ -246,6 +251,8 @@ class Tftpd(object):
|
|||||||
|
|
||||||
debug('%s("%s", %s) %s\033[K\033[0m', caller, vpath, str(a), perms)
|
debug('%s("%s", %s) %s\033[K\033[0m', caller, vpath, str(a), perms)
|
||||||
vfs, rem = self.asrv.vfs.get(vpath, "*", *perms)
|
vfs, rem = self.asrv.vfs.get(vpath, "*", *perms)
|
||||||
|
if not vfs.realpath:
|
||||||
|
raise Exception("unmapped vfs")
|
||||||
return vfs, vfs.canonical(rem)
|
return vfs, vfs.canonical(rem)
|
||||||
|
|
||||||
def _ls(self, vpath: str, raddress: str, rport: int, force=False) -> Any:
|
def _ls(self, vpath: str, raddress: str, rport: int, force=False) -> Any:
|
||||||
@@ -262,12 +269,13 @@ class Tftpd(object):
|
|||||||
"*",
|
"*",
|
||||||
not self.args.no_scandir,
|
not self.args.no_scandir,
|
||||||
[[True, False]],
|
[[True, False]],
|
||||||
|
throw=True,
|
||||||
)
|
)
|
||||||
dnames = set([x[0] for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)])
|
dnames = set([x[0] for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)])
|
||||||
dirs1 = [(v.st_mtime, v.st_size, k + "/") for k, v in vfs_ls if k in dnames]
|
dirs1 = [(v.st_mtime, v.st_size, k + "/") for k, v in vfs_ls if k in dnames]
|
||||||
fils1 = [(v.st_mtime, v.st_size, k) for k, v in vfs_ls if k not in dnames]
|
fils1 = [(v.st_mtime, v.st_size, k) for k, v in vfs_ls if k not in dnames]
|
||||||
real1 = dirs1 + fils1
|
real1 = dirs1 + fils1
|
||||||
realt = [(datetime.fromtimestamp(mt), sz, fn) for mt, sz, fn in real1]
|
realt = [(datetime.fromtimestamp(mt, UTC), sz, fn) for mt, sz, fn in real1]
|
||||||
reals = [
|
reals = [
|
||||||
(
|
(
|
||||||
"%04d-%02d-%02d %02d:%02d:%02d"
|
"%04d-%02d-%02d %02d:%02d:%02d"
|
||||||
@@ -333,15 +341,29 @@ class Tftpd(object):
|
|||||||
|
|
||||||
xbu = vfs.flags.get("xbu")
|
xbu = vfs.flags.get("xbu")
|
||||||
if xbu and not runhook(
|
if xbu and not runhook(
|
||||||
self.nlog, xbu, ap, vpath, "", "", 0, 0, "8.3.8.7", 0, ""
|
self.nlog,
|
||||||
|
None,
|
||||||
|
self.hub.up2k,
|
||||||
|
"xbu.tftpd",
|
||||||
|
xbu,
|
||||||
|
ap,
|
||||||
|
vpath,
|
||||||
|
"",
|
||||||
|
"",
|
||||||
|
"",
|
||||||
|
0,
|
||||||
|
0,
|
||||||
|
"8.3.8.7",
|
||||||
|
time.time(),
|
||||||
|
"",
|
||||||
):
|
):
|
||||||
yeet("blocked by xbu server config: " + vpath)
|
yeet("blocked by xbu server config: %r" % (vpath,))
|
||||||
|
|
||||||
if not self.args.tftp_nols and bos.path.isdir(ap):
|
if not self.args.tftp_nols and bos.path.isdir(ap):
|
||||||
return self._ls(vpath, "", 0, True)
|
return self._ls(vpath, "", 0, True)
|
||||||
|
|
||||||
if not a:
|
if not a:
|
||||||
a = [self.args.iobuf]
|
a = (self.args.iobuf,)
|
||||||
|
|
||||||
return open(ap, mode, *a, **ka)
|
return open(ap, mode, *a, **ka)
|
||||||
|
|
||||||
@@ -382,7 +404,7 @@ class Tftpd(object):
|
|||||||
bos.stat(ap)
|
bos.stat(ap)
|
||||||
return True
|
return True
|
||||||
except:
|
except:
|
||||||
return False
|
return vpath == "/"
|
||||||
|
|
||||||
def _p_isdir(self, vpath: str) -> bool:
|
def _p_isdir(self, vpath: str) -> bool:
|
||||||
try:
|
try:
|
||||||
@@ -390,7 +412,7 @@ class Tftpd(object):
|
|||||||
ret = stat.S_ISDIR(st.st_mode)
|
ret = stat.S_ISDIR(st.st_mode)
|
||||||
return ret
|
return ret
|
||||||
except:
|
except:
|
||||||
return False
|
return vpath == "/"
|
||||||
|
|
||||||
def _hook(self, *a: Any, **ka: Any) -> None:
|
def _hook(self, *a: Any, **ka: Any) -> None:
|
||||||
src = inspect.currentframe().f_back.f_code.co_name
|
src = inspect.currentframe().f_back.f_code.co_name
|
||||||
|
|||||||
@@ -57,9 +57,10 @@ class ThumbCli(object):
|
|||||||
if is_vid and "dvthumb" in dbv.flags:
|
if is_vid and "dvthumb" in dbv.flags:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
want_opus = fmt in ("opus", "caf")
|
want_opus = fmt in ("opus", "caf", "mp3")
|
||||||
is_au = ext in self.fmt_ffa
|
is_au = ext in self.fmt_ffa
|
||||||
if is_au:
|
is_vau = want_opus and ext in self.fmt_ffv
|
||||||
|
if is_au or is_vau:
|
||||||
if want_opus:
|
if want_opus:
|
||||||
if self.args.no_acode:
|
if self.args.no_acode:
|
||||||
return None
|
return None
|
||||||
@@ -107,9 +108,14 @@ class ThumbCli(object):
|
|||||||
|
|
||||||
fmt = sfmt
|
fmt = sfmt
|
||||||
|
|
||||||
|
elif fmt[:1] == "p" and not is_au and not is_vid:
|
||||||
|
t = "cannot thumbnail %r: png only allowed for waveforms"
|
||||||
|
self.log(t % (rem,), 6)
|
||||||
|
return None
|
||||||
|
|
||||||
histpath = self.asrv.vfs.histtab.get(ptop)
|
histpath = self.asrv.vfs.histtab.get(ptop)
|
||||||
if not histpath:
|
if not histpath:
|
||||||
self.log("no histpath for [{}]".format(ptop))
|
self.log("no histpath for %r" % (ptop,))
|
||||||
return None
|
return None
|
||||||
|
|
||||||
tpath = thumb_path(histpath, rem, mtime, fmt, self.fmt_ffa)
|
tpath = thumb_path(histpath, rem, mtime, fmt, self.fmt_ffa)
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
import base64
|
|
||||||
import hashlib
|
import hashlib
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
@@ -12,22 +11,23 @@ import time
|
|||||||
|
|
||||||
from queue import Queue
|
from queue import Queue
|
||||||
|
|
||||||
from .__init__ import ANYWIN, TYPE_CHECKING
|
from .__init__ import ANYWIN, PY2, TYPE_CHECKING
|
||||||
from .authsrv import VFS
|
from .authsrv import VFS
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, ffprobe
|
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, au_unpk, ffprobe
|
||||||
from .util import BytesIO # type: ignore
|
from .util import BytesIO # type: ignore
|
||||||
from .util import (
|
from .util import (
|
||||||
FFMPEG_URL,
|
FFMPEG_URL,
|
||||||
Cooldown,
|
Cooldown,
|
||||||
Daemon,
|
Daemon,
|
||||||
Pebkac,
|
|
||||||
afsenc,
|
afsenc,
|
||||||
fsenc,
|
fsenc,
|
||||||
min_ex,
|
min_ex,
|
||||||
runcmd,
|
runcmd,
|
||||||
statdir,
|
statdir,
|
||||||
|
ub64enc,
|
||||||
vsplit,
|
vsplit,
|
||||||
|
wrename,
|
||||||
wunlink,
|
wunlink,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -37,6 +37,9 @@ if True: # pylint: disable=using-constant-test
|
|||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .svchub import SvcHub
|
from .svchub import SvcHub
|
||||||
|
|
||||||
|
if PY2:
|
||||||
|
range = xrange # type: ignore
|
||||||
|
|
||||||
HAVE_PIL = False
|
HAVE_PIL = False
|
||||||
HAVE_PILF = False
|
HAVE_PILF = False
|
||||||
HAVE_HEIF = False
|
HAVE_HEIF = False
|
||||||
@@ -44,22 +47,34 @@ HAVE_AVIF = False
|
|||||||
HAVE_WEBP = False
|
HAVE_WEBP = False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
if os.environ.get("PRTY_NO_PIL"):
|
||||||
|
raise Exception()
|
||||||
|
|
||||||
from PIL import ExifTags, Image, ImageFont, ImageOps
|
from PIL import ExifTags, Image, ImageFont, ImageOps
|
||||||
|
|
||||||
HAVE_PIL = True
|
HAVE_PIL = True
|
||||||
try:
|
try:
|
||||||
|
if os.environ.get("PRTY_NO_PILF"):
|
||||||
|
raise Exception()
|
||||||
|
|
||||||
ImageFont.load_default(size=16)
|
ImageFont.load_default(size=16)
|
||||||
HAVE_PILF = True
|
HAVE_PILF = True
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
if os.environ.get("PRTY_NO_PIL_WEBP"):
|
||||||
|
raise Exception()
|
||||||
|
|
||||||
Image.new("RGB", (2, 2)).save(BytesIO(), format="webp")
|
Image.new("RGB", (2, 2)).save(BytesIO(), format="webp")
|
||||||
HAVE_WEBP = True
|
HAVE_WEBP = True
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
if os.environ.get("PRTY_NO_PIL_HEIF"):
|
||||||
|
raise Exception()
|
||||||
|
|
||||||
from pyheif_pillow_opener import register_heif_opener
|
from pyheif_pillow_opener import register_heif_opener
|
||||||
|
|
||||||
register_heif_opener()
|
register_heif_opener()
|
||||||
@@ -68,6 +83,9 @@ try:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
if os.environ.get("PRTY_NO_PIL_AVIF"):
|
||||||
|
raise Exception()
|
||||||
|
|
||||||
import pillow_avif # noqa: F401 # pylint: disable=unused-import
|
import pillow_avif # noqa: F401 # pylint: disable=unused-import
|
||||||
|
|
||||||
HAVE_AVIF = True
|
HAVE_AVIF = True
|
||||||
@@ -79,6 +97,9 @@ except:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
if os.environ.get("PRTY_NO_VIPS"):
|
||||||
|
raise Exception()
|
||||||
|
|
||||||
HAVE_VIPS = True
|
HAVE_VIPS = True
|
||||||
import pyvips
|
import pyvips
|
||||||
|
|
||||||
@@ -87,6 +108,9 @@ except:
|
|||||||
HAVE_VIPS = False
|
HAVE_VIPS = False
|
||||||
|
|
||||||
|
|
||||||
|
th_dir_cache = {}
|
||||||
|
|
||||||
|
|
||||||
def thumb_path(histpath: str, rem: str, mtime: float, fmt: str, ffa: set[str]) -> str:
|
def thumb_path(histpath: str, rem: str, mtime: float, fmt: str, ffa: set[str]) -> str:
|
||||||
# base16 = 16 = 256
|
# base16 = 16 = 256
|
||||||
# b64-lc = 38 = 1444
|
# b64-lc = 38 = 1444
|
||||||
@@ -100,16 +124,22 @@ def thumb_path(histpath: str, rem: str, mtime: float, fmt: str, ffa: set[str]) -
|
|||||||
if ext in ffa and fmt[:2] in ("wf", "jf"):
|
if ext in ffa and fmt[:2] in ("wf", "jf"):
|
||||||
fmt = fmt.replace("f", "")
|
fmt = fmt.replace("f", "")
|
||||||
|
|
||||||
rd += "\n" + fmt
|
dcache = th_dir_cache
|
||||||
h = hashlib.sha512(afsenc(rd)).digest()
|
rd_key = rd + "\n" + fmt
|
||||||
b64 = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
rd = dcache.get(rd_key)
|
||||||
rd = ("%s/%s/" % (b64[:2], b64[2:4])).lower() + b64
|
if not rd:
|
||||||
|
h = hashlib.sha512(afsenc(rd_key)).digest()
|
||||||
|
b64 = ub64enc(h).decode("ascii")[:24]
|
||||||
|
rd = ("%s/%s/" % (b64[:2], b64[2:4])).lower() + b64
|
||||||
|
if len(dcache) > 9001:
|
||||||
|
dcache.clear()
|
||||||
|
dcache[rd_key] = rd
|
||||||
|
|
||||||
# could keep original filenames but this is safer re pathlen
|
# could keep original filenames but this is safer re pathlen
|
||||||
h = hashlib.sha512(afsenc(fn)).digest()
|
h = hashlib.sha512(afsenc(fn)).digest()
|
||||||
fn = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
fn = ub64enc(h).decode("ascii")[:24]
|
||||||
|
|
||||||
if fmt in ("opus", "caf"):
|
if fmt in ("opus", "caf", "mp3"):
|
||||||
cat = "ac"
|
cat = "ac"
|
||||||
else:
|
else:
|
||||||
fc = fmt[:1]
|
fc = fmt[:1]
|
||||||
@@ -133,6 +163,7 @@ class ThumbSrv(object):
|
|||||||
self.ram: dict[str, float] = {}
|
self.ram: dict[str, float] = {}
|
||||||
self.memcond = threading.Condition(self.mutex)
|
self.memcond = threading.Condition(self.mutex)
|
||||||
self.stopping = False
|
self.stopping = False
|
||||||
|
self.rm_nullthumbs = True # forget failed conversions on startup
|
||||||
self.nthr = max(1, self.args.th_mt)
|
self.nthr = max(1, self.args.th_mt)
|
||||||
|
|
||||||
self.q: Queue[Optional[tuple[str, str, str, VFS]]] = Queue(self.nthr * 4)
|
self.q: Queue[Optional[tuple[str, str, str, VFS]]] = Queue(self.nthr * 4)
|
||||||
@@ -208,7 +239,7 @@ class ThumbSrv(object):
|
|||||||
def get(self, ptop: str, rem: str, mtime: float, fmt: str) -> Optional[str]:
|
def get(self, ptop: str, rem: str, mtime: float, fmt: str) -> Optional[str]:
|
||||||
histpath = self.asrv.vfs.histtab.get(ptop)
|
histpath = self.asrv.vfs.histtab.get(ptop)
|
||||||
if not histpath:
|
if not histpath:
|
||||||
self.log("no histpath for [{}]".format(ptop))
|
self.log("no histpath for %r" % (ptop,))
|
||||||
return None
|
return None
|
||||||
|
|
||||||
tpath = thumb_path(histpath, rem, mtime, fmt, self.fmt_ffa)
|
tpath = thumb_path(histpath, rem, mtime, fmt, self.fmt_ffa)
|
||||||
@@ -218,7 +249,7 @@ class ThumbSrv(object):
|
|||||||
with self.mutex:
|
with self.mutex:
|
||||||
try:
|
try:
|
||||||
self.busy[tpath].append(cond)
|
self.busy[tpath].append(cond)
|
||||||
self.log("joined waiting room for %s" % (tpath,))
|
self.log("joined waiting room for %r" % (tpath,))
|
||||||
except:
|
except:
|
||||||
thdir = os.path.dirname(tpath)
|
thdir = os.path.dirname(tpath)
|
||||||
bos.makedirs(os.path.join(thdir, "w"))
|
bos.makedirs(os.path.join(thdir, "w"))
|
||||||
@@ -235,11 +266,11 @@ class ThumbSrv(object):
|
|||||||
allvols = list(self.asrv.vfs.all_vols.values())
|
allvols = list(self.asrv.vfs.all_vols.values())
|
||||||
vn = next((x for x in allvols if x.realpath == ptop), None)
|
vn = next((x for x in allvols if x.realpath == ptop), None)
|
||||||
if not vn:
|
if not vn:
|
||||||
self.log("ptop [{}] not in {}".format(ptop, allvols), 3)
|
self.log("ptop %r not in %s" % (ptop, allvols), 3)
|
||||||
vn = self.asrv.vfs.all_aps[0][1]
|
vn = self.asrv.vfs.all_aps[0][1]
|
||||||
|
|
||||||
self.q.put((abspath, tpath, fmt, vn))
|
self.q.put((abspath, tpath, fmt, vn))
|
||||||
self.log("conv {} :{} \033[0m{}".format(tpath, fmt, abspath), c=6)
|
self.log("conv %r :%s \033[0m%r" % (tpath, fmt, abspath), 6)
|
||||||
|
|
||||||
while not self.stopping:
|
while not self.stopping:
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
@@ -296,25 +327,38 @@ class ThumbSrv(object):
|
|||||||
ext = abspath.split(".")[-1].lower()
|
ext = abspath.split(".")[-1].lower()
|
||||||
png_ok = False
|
png_ok = False
|
||||||
funs = []
|
funs = []
|
||||||
|
|
||||||
|
if ext in self.args.au_unpk:
|
||||||
|
ap_unpk = au_unpk(self.log, self.args.au_unpk, abspath, vn)
|
||||||
|
else:
|
||||||
|
ap_unpk = abspath
|
||||||
|
|
||||||
if not bos.path.exists(tpath):
|
if not bos.path.exists(tpath):
|
||||||
|
want_mp3 = tpath.endswith(".mp3")
|
||||||
|
want_opus = tpath.endswith(".opus") or tpath.endswith(".caf")
|
||||||
|
want_png = tpath.endswith(".png")
|
||||||
|
want_au = want_mp3 or want_opus
|
||||||
for lib in self.args.th_dec:
|
for lib in self.args.th_dec:
|
||||||
|
can_au = lib == "ff" and (
|
||||||
|
ext in self.fmt_ffa or ext in self.fmt_ffv
|
||||||
|
)
|
||||||
|
|
||||||
if lib == "pil" and ext in self.fmt_pil:
|
if lib == "pil" and ext in self.fmt_pil:
|
||||||
funs.append(self.conv_pil)
|
funs.append(self.conv_pil)
|
||||||
elif lib == "vips" and ext in self.fmt_vips:
|
elif lib == "vips" and ext in self.fmt_vips:
|
||||||
funs.append(self.conv_vips)
|
funs.append(self.conv_vips)
|
||||||
elif lib == "ff" and ext in self.fmt_ffi or ext in self.fmt_ffv:
|
elif can_au and (want_png or want_au):
|
||||||
funs.append(self.conv_ffmpeg)
|
if want_opus:
|
||||||
elif lib == "ff" and ext in self.fmt_ffa:
|
|
||||||
if tpath.endswith(".opus") or tpath.endswith(".caf"):
|
|
||||||
funs.append(self.conv_opus)
|
funs.append(self.conv_opus)
|
||||||
elif tpath.endswith(".png"):
|
elif want_mp3:
|
||||||
|
funs.append(self.conv_mp3)
|
||||||
|
elif want_png:
|
||||||
funs.append(self.conv_waves)
|
funs.append(self.conv_waves)
|
||||||
png_ok = True
|
png_ok = True
|
||||||
else:
|
elif lib == "ff" and (ext in self.fmt_ffi or ext in self.fmt_ffv):
|
||||||
funs.append(self.conv_spec)
|
funs.append(self.conv_ffmpeg)
|
||||||
|
elif lib == "ff" and ext in self.fmt_ffa and not want_au:
|
||||||
if not png_ok and tpath.endswith(".png"):
|
funs.append(self.conv_spec)
|
||||||
raise Pebkac(400, "png only allowed for waveforms")
|
|
||||||
|
|
||||||
tdir, tfn = os.path.split(tpath)
|
tdir, tfn = os.path.split(tpath)
|
||||||
ttpath = os.path.join(tdir, "w", tfn)
|
ttpath = os.path.join(tdir, "w", tfn)
|
||||||
@@ -325,11 +369,14 @@ class ThumbSrv(object):
|
|||||||
|
|
||||||
for fun in funs:
|
for fun in funs:
|
||||||
try:
|
try:
|
||||||
fun(abspath, ttpath, fmt, vn)
|
if not png_ok and tpath.endswith(".png"):
|
||||||
|
raise Exception("png only allowed for waveforms")
|
||||||
|
|
||||||
|
fun(ap_unpk, ttpath, fmt, vn)
|
||||||
break
|
break
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
msg = "{} could not create thumbnail of {}\n{}"
|
msg = "%s could not create thumbnail of %r\n%s"
|
||||||
msg = msg.format(fun.__name__, abspath, min_ex())
|
msg = msg % (fun.__name__, abspath, min_ex())
|
||||||
c: Union[str, int] = 1 if "<Signals.SIG" in msg else "90"
|
c: Union[str, int] = 1 if "<Signals.SIG" in msg else "90"
|
||||||
self.log(msg, c)
|
self.log(msg, c)
|
||||||
if getattr(ex, "returncode", 0) != 321:
|
if getattr(ex, "returncode", 0) != 321:
|
||||||
@@ -343,8 +390,11 @@ class ThumbSrv(object):
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
if abspath != ap_unpk:
|
||||||
|
wunlink(self.log, ap_unpk, vn.flags)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
bos.rename(ttpath, tpath)
|
wrename(self.log, ttpath, tpath, vn.flags)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -438,7 +488,7 @@ class ThumbSrv(object):
|
|||||||
if c == crops[-1]:
|
if c == crops[-1]:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
assert img # type: ignore
|
assert img # type: ignore # !rm
|
||||||
img.write_to_file(tpath, Q=40)
|
img.write_to_file(tpath, Q=40)
|
||||||
|
|
||||||
def conv_ffmpeg(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
|
def conv_ffmpeg(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
|
||||||
@@ -581,6 +631,25 @@ class ThumbSrv(object):
|
|||||||
cmd += [fsenc(tpath)]
|
cmd += [fsenc(tpath)]
|
||||||
self._run_ff(cmd, vn)
|
self._run_ff(cmd, vn)
|
||||||
|
|
||||||
|
if "pngquant" in vn.flags:
|
||||||
|
wtpath = tpath + ".png"
|
||||||
|
cmd = [
|
||||||
|
b"pngquant",
|
||||||
|
b"--strip",
|
||||||
|
b"--nofs",
|
||||||
|
b"--output",
|
||||||
|
fsenc(wtpath),
|
||||||
|
fsenc(tpath),
|
||||||
|
]
|
||||||
|
ret = runcmd(cmd, timeout=vn.flags["convt"], nice=True, oom=400)[0]
|
||||||
|
if ret:
|
||||||
|
try:
|
||||||
|
wunlink(self.log, wtpath, vn.flags)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
wrename(self.log, wtpath, tpath, vn.flags)
|
||||||
|
|
||||||
def conv_spec(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
|
def conv_spec(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
|
||||||
ret, _ = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
ret, _ = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
||||||
if "ac" not in ret:
|
if "ac" not in ret:
|
||||||
@@ -637,21 +706,60 @@ class ThumbSrv(object):
|
|||||||
cmd += [fsenc(tpath)]
|
cmd += [fsenc(tpath)]
|
||||||
self._run_ff(cmd, vn)
|
self._run_ff(cmd, vn)
|
||||||
|
|
||||||
def conv_opus(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
|
def conv_mp3(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
|
||||||
if self.args.no_acode:
|
quality = self.args.q_mp3.lower()
|
||||||
|
if self.args.no_acode or not quality:
|
||||||
raise Exception("disabled in server config")
|
raise Exception("disabled in server config")
|
||||||
|
|
||||||
self.wait4ram(0.2, tpath)
|
self.wait4ram(0.2, tpath)
|
||||||
ret, _ = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
tags, rawtags = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
||||||
if "ac" not in ret:
|
if "ac" not in tags:
|
||||||
|
raise Exception("not audio")
|
||||||
|
|
||||||
|
if quality.endswith("k"):
|
||||||
|
qk = b"-b:a"
|
||||||
|
qv = quality.encode("ascii")
|
||||||
|
else:
|
||||||
|
qk = b"-q:a"
|
||||||
|
qv = quality[1:].encode("ascii")
|
||||||
|
|
||||||
|
# extremely conservative choices for output format
|
||||||
|
# (always 2ch 44k1) because if a device is old enough
|
||||||
|
# to not support opus then it's probably also super picky
|
||||||
|
|
||||||
|
# fmt: off
|
||||||
|
cmd = [
|
||||||
|
b"ffmpeg",
|
||||||
|
b"-nostdin",
|
||||||
|
b"-v", b"error",
|
||||||
|
b"-hide_banner",
|
||||||
|
b"-i", fsenc(abspath),
|
||||||
|
] + self.big_tags(rawtags) + [
|
||||||
|
b"-map", b"0:a:0",
|
||||||
|
b"-ar", b"44100",
|
||||||
|
b"-ac", b"2",
|
||||||
|
b"-c:a", b"libmp3lame",
|
||||||
|
qk, qv,
|
||||||
|
fsenc(tpath)
|
||||||
|
]
|
||||||
|
# fmt: on
|
||||||
|
self._run_ff(cmd, vn, oom=300)
|
||||||
|
|
||||||
|
def conv_opus(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
|
||||||
|
if self.args.no_acode or not self.args.q_opus:
|
||||||
|
raise Exception("disabled in server config")
|
||||||
|
|
||||||
|
self.wait4ram(0.2, tpath)
|
||||||
|
tags, rawtags = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
||||||
|
if "ac" not in tags:
|
||||||
raise Exception("not audio")
|
raise Exception("not audio")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
dur = ret[".dur"][1]
|
dur = tags[".dur"][1]
|
||||||
except:
|
except:
|
||||||
dur = 0
|
dur = 0
|
||||||
|
|
||||||
src_opus = abspath.lower().endswith(".opus") or ret["ac"][1] == "opus"
|
src_opus = abspath.lower().endswith(".opus") or tags["ac"][1] == "opus"
|
||||||
want_caf = tpath.endswith(".caf")
|
want_caf = tpath.endswith(".caf")
|
||||||
tmp_opus = tpath
|
tmp_opus = tpath
|
||||||
if want_caf:
|
if want_caf:
|
||||||
@@ -662,6 +770,7 @@ class ThumbSrv(object):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
caf_src = abspath if src_opus else tmp_opus
|
caf_src = abspath if src_opus else tmp_opus
|
||||||
|
bq = ("%dk" % (self.args.q_opus,)).encode("ascii")
|
||||||
|
|
||||||
if not want_caf or not src_opus:
|
if not want_caf or not src_opus:
|
||||||
# fmt: off
|
# fmt: off
|
||||||
@@ -671,10 +780,10 @@ class ThumbSrv(object):
|
|||||||
b"-v", b"error",
|
b"-v", b"error",
|
||||||
b"-hide_banner",
|
b"-hide_banner",
|
||||||
b"-i", fsenc(abspath),
|
b"-i", fsenc(abspath),
|
||||||
b"-map_metadata", b"-1",
|
] + self.big_tags(rawtags) + [
|
||||||
b"-map", b"0:a:0",
|
b"-map", b"0:a:0",
|
||||||
b"-c:a", b"libopus",
|
b"-c:a", b"libopus",
|
||||||
b"-b:a", b"128k",
|
b"-b:a", bq,
|
||||||
fsenc(tmp_opus)
|
fsenc(tmp_opus)
|
||||||
]
|
]
|
||||||
# fmt: on
|
# fmt: on
|
||||||
@@ -697,7 +806,7 @@ class ThumbSrv(object):
|
|||||||
b"-map_metadata", b"-1",
|
b"-map_metadata", b"-1",
|
||||||
b"-ac", b"2",
|
b"-ac", b"2",
|
||||||
b"-c:a", b"libopus",
|
b"-c:a", b"libopus",
|
||||||
b"-b:a", b"128k",
|
b"-b:a", bq,
|
||||||
b"-f", b"caf",
|
b"-f", b"caf",
|
||||||
fsenc(tpath)
|
fsenc(tpath)
|
||||||
]
|
]
|
||||||
@@ -728,6 +837,16 @@ class ThumbSrv(object):
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
def big_tags(self, raw_tags: dict[str, list[str]]) -> list[bytes]:
|
||||||
|
ret = []
|
||||||
|
for k, vs in raw_tags.items():
|
||||||
|
for v in vs:
|
||||||
|
if len(str(v)) >= 1024:
|
||||||
|
bv = k.encode("utf-8", "replace")
|
||||||
|
ret += [b"-metadata", bv + b"="]
|
||||||
|
break
|
||||||
|
return ret
|
||||||
|
|
||||||
def poke(self, tdir: str) -> None:
|
def poke(self, tdir: str) -> None:
|
||||||
if not self.poke_cd.poke(tdir):
|
if not self.poke_cd.poke(tdir):
|
||||||
return
|
return
|
||||||
@@ -743,7 +862,6 @@ class ThumbSrv(object):
|
|||||||
def cleaner(self) -> None:
|
def cleaner(self) -> None:
|
||||||
interval = self.args.th_clean
|
interval = self.args.th_clean
|
||||||
while True:
|
while True:
|
||||||
time.sleep(interval)
|
|
||||||
ndirs = 0
|
ndirs = 0
|
||||||
for vol, histpath in self.asrv.vfs.histtab.items():
|
for vol, histpath in self.asrv.vfs.histtab.items():
|
||||||
if histpath.startswith(vol):
|
if histpath.startswith(vol):
|
||||||
@@ -757,6 +875,8 @@ class ThumbSrv(object):
|
|||||||
self.log("\033[Jcln err in %s: %r" % (histpath, ex), 3)
|
self.log("\033[Jcln err in %s: %r" % (histpath, ex), 3)
|
||||||
|
|
||||||
self.log("\033[Jcln ok; rm {} dirs".format(ndirs))
|
self.log("\033[Jcln ok; rm {} dirs".format(ndirs))
|
||||||
|
self.rm_nullthumbs = False
|
||||||
|
time.sleep(interval)
|
||||||
|
|
||||||
def clean(self, histpath: str) -> int:
|
def clean(self, histpath: str) -> int:
|
||||||
ret = 0
|
ret = 0
|
||||||
@@ -771,13 +891,15 @@ class ThumbSrv(object):
|
|||||||
|
|
||||||
def _clean(self, cat: str, thumbpath: str) -> int:
|
def _clean(self, cat: str, thumbpath: str) -> int:
|
||||||
# self.log("cln {}".format(thumbpath))
|
# self.log("cln {}".format(thumbpath))
|
||||||
exts = ["jpg", "webp", "png"] if cat == "th" else ["opus", "caf"]
|
exts = ["jpg", "webp", "png"] if cat == "th" else ["opus", "caf", "mp3"]
|
||||||
maxage = getattr(self.args, cat + "_maxage")
|
maxage = getattr(self.args, cat + "_maxage")
|
||||||
now = time.time()
|
now = time.time()
|
||||||
prev_b64 = None
|
prev_b64 = None
|
||||||
prev_fp = ""
|
prev_fp = ""
|
||||||
try:
|
try:
|
||||||
t1 = statdir(self.log_func, not self.args.no_scandir, False, thumbpath)
|
t1 = statdir(
|
||||||
|
self.log_func, not self.args.no_scandir, False, thumbpath, False
|
||||||
|
)
|
||||||
ents = sorted(list(t1))
|
ents = sorted(list(t1))
|
||||||
except:
|
except:
|
||||||
return 0
|
return 0
|
||||||
@@ -818,6 +940,10 @@ class ThumbSrv(object):
|
|||||||
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
if self.rm_nullthumbs and not inf.st_size:
|
||||||
|
bos.unlink(fp)
|
||||||
|
continue
|
||||||
|
|
||||||
if b64 == prev_b64:
|
if b64 == prev_b64:
|
||||||
self.log("rm replaced [{}]".format(fp))
|
self.log("rm replaced [{}]".format(fp))
|
||||||
bos.unlink(prev_fp)
|
bos.unlink(prev_fp)
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ import threading
|
|||||||
import time
|
import time
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
|
|
||||||
from .__init__ import ANYWIN, TYPE_CHECKING, unicode
|
from .__init__ import ANYWIN, PY2, TYPE_CHECKING, unicode
|
||||||
from .authsrv import LEELOO_DALLAS, VFS
|
from .authsrv import LEELOO_DALLAS, VFS
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .up2k import up2k_wark_from_hashlist
|
from .up2k import up2k_wark_from_hashlist
|
||||||
@@ -38,6 +38,9 @@ if True: # pylint: disable=using-constant-test
|
|||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .httpsrv import HttpSrv
|
from .httpsrv import HttpSrv
|
||||||
|
|
||||||
|
if PY2:
|
||||||
|
range = xrange # type: ignore
|
||||||
|
|
||||||
|
|
||||||
class U2idx(object):
|
class U2idx(object):
|
||||||
def __init__(self, hsrv: "HttpSrv") -> None:
|
def __init__(self, hsrv: "HttpSrv") -> None:
|
||||||
@@ -50,18 +53,42 @@ class U2idx(object):
|
|||||||
self.log("your python does not have sqlite3; searching will be disabled")
|
self.log("your python does not have sqlite3; searching will be disabled")
|
||||||
return
|
return
|
||||||
|
|
||||||
|
assert sqlite3 # type: ignore # !rm
|
||||||
|
|
||||||
self.active_id = ""
|
self.active_id = ""
|
||||||
self.active_cur: Optional["sqlite3.Cursor"] = None
|
self.active_cur: Optional["sqlite3.Cursor"] = None
|
||||||
self.cur: dict[str, "sqlite3.Cursor"] = {}
|
self.cur: dict[str, "sqlite3.Cursor"] = {}
|
||||||
self.mem_cur = sqlite3.connect(":memory:", check_same_thread=False).cursor()
|
self.mem_cur = sqlite3.connect(":memory:", check_same_thread=False).cursor()
|
||||||
self.mem_cur.execute(r"create table a (b text)")
|
self.mem_cur.execute(r"create table a (b text)")
|
||||||
|
|
||||||
|
self.sh_cur: Optional["sqlite3.Cursor"] = None
|
||||||
|
|
||||||
self.p_end = 0.0
|
self.p_end = 0.0
|
||||||
self.p_dur = 0.0
|
self.p_dur = 0.0
|
||||||
|
|
||||||
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||||
self.log_func("u2idx", msg, c)
|
self.log_func("u2idx", msg, c)
|
||||||
|
|
||||||
|
def shutdown(self) -> None:
|
||||||
|
if not HAVE_SQLITE3:
|
||||||
|
return
|
||||||
|
|
||||||
|
for cur in self.cur.values():
|
||||||
|
db = cur.connection
|
||||||
|
try:
|
||||||
|
db.interrupt()
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
cur.close()
|
||||||
|
db.close()
|
||||||
|
|
||||||
|
for cur in (self.mem_cur, self.sh_cur):
|
||||||
|
if cur:
|
||||||
|
db = cur.connection
|
||||||
|
cur.close()
|
||||||
|
db.close()
|
||||||
|
|
||||||
def fsearch(
|
def fsearch(
|
||||||
self, uname: str, vols: list[VFS], body: dict[str, Any]
|
self, uname: str, vols: list[VFS], body: dict[str, Any]
|
||||||
) -> list[dict[str, Any]]:
|
) -> list[dict[str, Any]]:
|
||||||
@@ -77,21 +104,39 @@ class U2idx(object):
|
|||||||
uv: list[Union[str, int]] = [wark[:16], wark]
|
uv: list[Union[str, int]] = [wark[:16], wark]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return self.run_query(uname, vols, uq, uv, False, 99999)[0]
|
return self.run_query(uname, vols, uq, uv, False, True, 99999)[0]
|
||||||
except:
|
except:
|
||||||
raise Pebkac(500, min_ex())
|
raise Pebkac(500, min_ex())
|
||||||
|
|
||||||
def get_cur(self, ptop: str) -> Optional["sqlite3.Cursor"]:
|
def get_shr(self) -> Optional["sqlite3.Cursor"]:
|
||||||
if not HAVE_SQLITE3:
|
if self.sh_cur:
|
||||||
|
return self.sh_cur
|
||||||
|
|
||||||
|
if not HAVE_SQLITE3 or not self.args.shr:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
cur = self.cur.get(ptop)
|
assert sqlite3 # type: ignore # !rm
|
||||||
|
|
||||||
|
db = sqlite3.connect(self.args.shr_db, timeout=2, check_same_thread=False)
|
||||||
|
cur = db.cursor()
|
||||||
|
cur.execute('pragma table_info("sh")').fetchall()
|
||||||
|
self.sh_cur = cur
|
||||||
|
return cur
|
||||||
|
|
||||||
|
def get_cur(self, vn: VFS) -> Optional["sqlite3.Cursor"]:
|
||||||
|
cur = self.cur.get(vn.realpath)
|
||||||
if cur:
|
if cur:
|
||||||
return cur
|
return cur
|
||||||
|
|
||||||
|
if not HAVE_SQLITE3 or "e2d" not in vn.flags:
|
||||||
|
return None
|
||||||
|
|
||||||
|
assert sqlite3 # type: ignore # !rm
|
||||||
|
|
||||||
|
ptop = vn.realpath
|
||||||
histpath = self.asrv.vfs.histtab.get(ptop)
|
histpath = self.asrv.vfs.histtab.get(ptop)
|
||||||
if not histpath:
|
if not histpath:
|
||||||
self.log("no histpath for [{}]".format(ptop))
|
self.log("no histpath for %r" % (ptop,))
|
||||||
return None
|
return None
|
||||||
|
|
||||||
db_path = os.path.join(histpath, "up2k.db")
|
db_path = os.path.join(histpath, "up2k.db")
|
||||||
@@ -106,7 +151,7 @@ class U2idx(object):
|
|||||||
db = sqlite3.connect(uri, timeout=2, uri=True, check_same_thread=False)
|
db = sqlite3.connect(uri, timeout=2, uri=True, check_same_thread=False)
|
||||||
cur = db.cursor()
|
cur = db.cursor()
|
||||||
cur.execute('pragma table_info("up")').fetchone()
|
cur.execute('pragma table_info("up")').fetchone()
|
||||||
self.log("ro: {}".format(db_path))
|
self.log("ro: %r" % (db_path,))
|
||||||
except:
|
except:
|
||||||
self.log("could not open read-only: {}\n{}".format(uri, min_ex()))
|
self.log("could not open read-only: {}\n{}".format(uri, min_ex()))
|
||||||
# may not fail until the pragma so unset it
|
# may not fail until the pragma so unset it
|
||||||
@@ -116,7 +161,7 @@ class U2idx(object):
|
|||||||
# on windows, this steals the write-lock from up2k.deferred_init --
|
# on windows, this steals the write-lock from up2k.deferred_init --
|
||||||
# seen on win 10.0.17763.2686, py 3.10.4, sqlite 3.37.2
|
# seen on win 10.0.17763.2686, py 3.10.4, sqlite 3.37.2
|
||||||
cur = sqlite3.connect(db_path, timeout=2, check_same_thread=False).cursor()
|
cur = sqlite3.connect(db_path, timeout=2, check_same_thread=False).cursor()
|
||||||
self.log("opened {}".format(db_path))
|
self.log("opened %r" % (db_path,))
|
||||||
|
|
||||||
self.cur[ptop] = cur
|
self.cur[ptop] = cur
|
||||||
return cur
|
return cur
|
||||||
@@ -265,7 +310,7 @@ class U2idx(object):
|
|||||||
q += " lower({}) {} ? ) ".format(field, oper)
|
q += " lower({}) {} ? ) ".format(field, oper)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return self.run_query(uname, vols, q, va, have_mt, lim)
|
return self.run_query(uname, vols, q, va, have_mt, True, lim)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
raise Pebkac(500, repr(ex))
|
raise Pebkac(500, repr(ex))
|
||||||
|
|
||||||
@@ -276,9 +321,11 @@ class U2idx(object):
|
|||||||
uq: str,
|
uq: str,
|
||||||
uv: list[Union[str, int]],
|
uv: list[Union[str, int]],
|
||||||
have_mt: bool,
|
have_mt: bool,
|
||||||
|
sort: bool,
|
||||||
lim: int,
|
lim: int,
|
||||||
) -> tuple[list[dict[str, Any]], list[str], bool]:
|
) -> tuple[list[dict[str, Any]], list[str], bool]:
|
||||||
if self.args.srch_dbg:
|
dbg = self.args.srch_dbg
|
||||||
|
if dbg:
|
||||||
t = "searching across all %s volumes in which the user has 'r' (full read access):\n %s"
|
t = "searching across all %s volumes in which the user has 'r' (full read access):\n %s"
|
||||||
zs = "\n ".join(["/%s = %s" % (x.vpath, x.realpath) for x in vols])
|
zs = "\n ".join(["/%s = %s" % (x.vpath, x.realpath) for x in vols])
|
||||||
self.log(t % (len(vols), zs), 5)
|
self.log(t % (len(vols), zs), 5)
|
||||||
@@ -317,18 +364,18 @@ class U2idx(object):
|
|||||||
ptop = vol.realpath
|
ptop = vol.realpath
|
||||||
flags = vol.flags
|
flags = vol.flags
|
||||||
|
|
||||||
cur = self.get_cur(ptop)
|
cur = self.get_cur(vol)
|
||||||
if not cur:
|
if not cur:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
excl = []
|
dots = flags.get("dotsrch") and uname in vol.axs.udot
|
||||||
for vp2 in self.asrv.vfs.all_vols.keys():
|
zs = "srch_re_dots" if dots else "srch_re_nodot"
|
||||||
if vp2.startswith((vtop + "/").lstrip("/")) and vtop != vp2:
|
rex: re.Pattern = flags.get(zs) # type: ignore
|
||||||
excl.append(vp2[len(vtop) :].lstrip("/"))
|
|
||||||
|
|
||||||
if self.args.srch_dbg:
|
if dbg:
|
||||||
t = "searching in volume /%s (%s), excludelist %s"
|
t = "searching in volume /%s (%s), excluding %s"
|
||||||
self.log(t % (vtop, ptop, excl), 5)
|
self.log(t % (vtop, ptop, rex.pattern), 5)
|
||||||
|
rex_cfg: Optional[re.Pattern] = flags.get("srch_excl")
|
||||||
|
|
||||||
self.active_cur = cur
|
self.active_cur = cur
|
||||||
|
|
||||||
@@ -341,7 +388,6 @@ class U2idx(object):
|
|||||||
|
|
||||||
sret = []
|
sret = []
|
||||||
fk = flags.get("fk")
|
fk = flags.get("fk")
|
||||||
dots = flags.get("dotsrch") and uname in vol.axs.udot
|
|
||||||
fk_alg = 2 if "fka" in flags else 1
|
fk_alg = 2 if "fka" in flags else 1
|
||||||
c = cur.execute(uq, tuple(vuv))
|
c = cur.execute(uq, tuple(vuv))
|
||||||
for hit in c:
|
for hit in c:
|
||||||
@@ -350,20 +396,23 @@ class U2idx(object):
|
|||||||
if rd.startswith("//") or fn.startswith("//"):
|
if rd.startswith("//") or fn.startswith("//"):
|
||||||
rd, fn = s3dec(rd, fn)
|
rd, fn = s3dec(rd, fn)
|
||||||
|
|
||||||
if rd in excl or any([x for x in excl if rd.startswith(x + "/")]):
|
vp = vjoin(vjoin(vtop, rd), fn)
|
||||||
if self.args.srch_dbg:
|
|
||||||
zs = vjoin(vjoin(vtop, rd), fn)
|
if vp in seen_rps:
|
||||||
t = "database inconsistency in volume '/%s'; ignoring: %s"
|
|
||||||
self.log(t % (vtop, zs), 1)
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
rp = quotep("/".join([x for x in [vtop, rd, fn] if x]))
|
if rex.search(vp):
|
||||||
if not dots and "/." in ("/" + rp):
|
if dbg:
|
||||||
continue
|
if rex_cfg and rex_cfg.search(vp): # type: ignore
|
||||||
|
self.log("filtered by srch_excl: %s" % (vp,), 6)
|
||||||
if rp in seen_rps:
|
elif not dots and "/." in ("/" + vp):
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
t = "database inconsistency in volume '/%s'; ignoring: %s"
|
||||||
|
self.log(t % (vtop, vp), 1)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
rp = quotep(vp)
|
||||||
if not fk:
|
if not fk:
|
||||||
suf = ""
|
suf = ""
|
||||||
else:
|
else:
|
||||||
@@ -385,7 +434,7 @@ class U2idx(object):
|
|||||||
if lim < 0:
|
if lim < 0:
|
||||||
break
|
break
|
||||||
|
|
||||||
if self.args.srch_dbg:
|
if dbg:
|
||||||
t = "in volume '/%s': hit: %s"
|
t = "in volume '/%s': hit: %s"
|
||||||
self.log(t % (vtop, rp), 5)
|
self.log(t % (vtop, rp), 5)
|
||||||
|
|
||||||
@@ -415,14 +464,15 @@ class U2idx(object):
|
|||||||
ret.extend(sret)
|
ret.extend(sret)
|
||||||
# print("[{}] {}".format(ptop, sret))
|
# print("[{}] {}".format(ptop, sret))
|
||||||
|
|
||||||
if self.args.srch_dbg:
|
if dbg:
|
||||||
t = "in volume '/%s': got %d hits, %d total so far"
|
t = "in volume '/%s': got %d hits, %d total so far"
|
||||||
self.log(t % (vtop, len(sret), len(ret)), 5)
|
self.log(t % (vtop, len(sret), len(ret)), 5)
|
||||||
|
|
||||||
done_flag.append(True)
|
done_flag.append(True)
|
||||||
self.active_id = ""
|
self.active_id = ""
|
||||||
|
|
||||||
ret.sort(key=itemgetter("rp"))
|
if sort:
|
||||||
|
ret.sort(key=itemgetter("rp"))
|
||||||
|
|
||||||
return ret, list(taglist.keys()), lim < 0 and not clamped
|
return ret, list(taglist.keys()), lim < 0 and not clamped
|
||||||
|
|
||||||
@@ -433,5 +483,5 @@ class U2idx(object):
|
|||||||
return
|
return
|
||||||
|
|
||||||
if identifier == self.active_id:
|
if identifier == self.active_id:
|
||||||
assert self.active_cur
|
assert self.active_cur # !rm
|
||||||
self.active_cur.connection.interrupt()
|
self.active_cur.connection.interrupt()
|
||||||
|
|||||||
1985
copyparty/up2k.py
1985
copyparty/up2k.py
File diff suppressed because it is too large
Load Diff
1030
copyparty/util.py
1030
copyparty/util.py
File diff suppressed because it is too large
Load Diff
@@ -29,8 +29,10 @@ window.baguetteBox = (function () {
|
|||||||
isOverlayVisible = false,
|
isOverlayVisible = false,
|
||||||
touch = {}, // start-pos
|
touch = {}, // start-pos
|
||||||
touchFlag = false, // busy
|
touchFlag = false, // busy
|
||||||
|
scrollCSS = ['', ''],
|
||||||
|
scrollTimer = 0,
|
||||||
re_i = /^[^?]+\.(a?png|avif|bmp|gif|heif|jpe?g|jfif|svg|webp)(\?|$)/i,
|
re_i = /^[^?]+\.(a?png|avif|bmp|gif|heif|jpe?g|jfif|svg|webp)(\?|$)/i,
|
||||||
re_v = /^[^?]+\.(webm|mkv|mp4)(\?|$)/i,
|
re_v = /^[^?]+\.(webm|mkv|mp4|m4v|mov)(\?|$)/i,
|
||||||
anims = ['slideIn', 'fadeIn', 'none'],
|
anims = ['slideIn', 'fadeIn', 'none'],
|
||||||
data = {}, // all galleries
|
data = {}, // all galleries
|
||||||
imagesElements = [],
|
imagesElements = [],
|
||||||
@@ -91,6 +93,30 @@ window.baguetteBox = (function () {
|
|||||||
touchendHandler();
|
touchendHandler();
|
||||||
};
|
};
|
||||||
|
|
||||||
|
var overlayWheelHandler = function (e) {
|
||||||
|
if (!options.noScrollbars || anymod(e))
|
||||||
|
return;
|
||||||
|
|
||||||
|
ev(e);
|
||||||
|
|
||||||
|
var x = e.deltaX,
|
||||||
|
y = e.deltaY,
|
||||||
|
d = Math.abs(x) > Math.abs(y) ? x : y;
|
||||||
|
|
||||||
|
if (e.deltaMode)
|
||||||
|
d *= 10;
|
||||||
|
|
||||||
|
if (Date.now() - scrollTimer < (Math.abs(d) > 20 ? 100 : 300))
|
||||||
|
return;
|
||||||
|
|
||||||
|
scrollTimer = Date.now();
|
||||||
|
|
||||||
|
if (d > 0)
|
||||||
|
showNextImage();
|
||||||
|
else
|
||||||
|
showPreviousImage();
|
||||||
|
};
|
||||||
|
|
||||||
var trapFocusInsideOverlay = function (e) {
|
var trapFocusInsideOverlay = function (e) {
|
||||||
if (overlay.style.display === 'block' && (overlay.contains && !overlay.contains(e.target))) {
|
if (overlay.style.display === 'block' && (overlay.contains && !overlay.contains(e.target))) {
|
||||||
e.stopPropagation();
|
e.stopPropagation();
|
||||||
@@ -394,8 +420,7 @@ window.baguetteBox = (function () {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function dlpic() {
|
function dlpic() {
|
||||||
var url = findfile()[3].href;
|
var url = addq(findfile()[3].href, 'cache');
|
||||||
url += (url.indexOf('?') < 0 ? '?' : '&') + 'cache';
|
|
||||||
dl_file(url);
|
dl_file(url);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -452,6 +477,7 @@ window.baguetteBox = (function () {
|
|||||||
bind(document, 'keyup', keyUpHandler);
|
bind(document, 'keyup', keyUpHandler);
|
||||||
bind(document, 'fullscreenchange', onFSC);
|
bind(document, 'fullscreenchange', onFSC);
|
||||||
bind(overlay, 'click', overlayClickHandler);
|
bind(overlay, 'click', overlayClickHandler);
|
||||||
|
bind(overlay, 'wheel', overlayWheelHandler);
|
||||||
bind(btnPrev, 'click', showPreviousImage);
|
bind(btnPrev, 'click', showPreviousImage);
|
||||||
bind(btnNext, 'click', showNextImage);
|
bind(btnNext, 'click', showNextImage);
|
||||||
bind(btnClose, 'click', hideOverlay);
|
bind(btnClose, 'click', hideOverlay);
|
||||||
@@ -474,6 +500,7 @@ window.baguetteBox = (function () {
|
|||||||
unbind(document, 'keyup', keyUpHandler);
|
unbind(document, 'keyup', keyUpHandler);
|
||||||
unbind(document, 'fullscreenchange', onFSC);
|
unbind(document, 'fullscreenchange', onFSC);
|
||||||
unbind(overlay, 'click', overlayClickHandler);
|
unbind(overlay, 'click', overlayClickHandler);
|
||||||
|
unbind(overlay, 'wheel', overlayWheelHandler);
|
||||||
unbind(btnPrev, 'click', showPreviousImage);
|
unbind(btnPrev, 'click', showPreviousImage);
|
||||||
unbind(btnNext, 'click', showNextImage);
|
unbind(btnNext, 'click', showNextImage);
|
||||||
unbind(btnClose, 'click', hideOverlay);
|
unbind(btnClose, 'click', hideOverlay);
|
||||||
@@ -541,6 +568,12 @@ window.baguetteBox = (function () {
|
|||||||
|
|
||||||
function showOverlay(chosenImageIndex) {
|
function showOverlay(chosenImageIndex) {
|
||||||
if (options.noScrollbars) {
|
if (options.noScrollbars) {
|
||||||
|
var a = document.documentElement.style.overflowY,
|
||||||
|
b = document.body.style.overflowY;
|
||||||
|
|
||||||
|
if (a != 'hidden' || b != 'scroll')
|
||||||
|
scrollCSS = [a, b];
|
||||||
|
|
||||||
document.documentElement.style.overflowY = 'hidden';
|
document.documentElement.style.overflowY = 'hidden';
|
||||||
document.body.style.overflowY = 'scroll';
|
document.body.style.overflowY = 'scroll';
|
||||||
}
|
}
|
||||||
@@ -584,15 +617,23 @@ window.baguetteBox = (function () {
|
|||||||
isOverlayVisible = true;
|
isOverlayVisible = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
function hideOverlay(e) {
|
function hideOverlay(e, dtor) {
|
||||||
ev(e);
|
ev(e);
|
||||||
playvid(false);
|
playvid(false);
|
||||||
removeFromCache('#files');
|
removeFromCache('#files');
|
||||||
if (options.noScrollbars) {
|
if (options.noScrollbars) {
|
||||||
document.documentElement.style.overflowY = 'auto';
|
document.documentElement.style.overflowY = scrollCSS[0];
|
||||||
document.body.style.overflowY = 'auto';
|
document.body.style.overflowY = scrollCSS[1];
|
||||||
}
|
}
|
||||||
if (overlay.style.display === 'none')
|
|
||||||
|
try {
|
||||||
|
if (document.fullscreenElement)
|
||||||
|
document.exitFullscreen();
|
||||||
|
}
|
||||||
|
catch (ex) { }
|
||||||
|
isFullscreen = false;
|
||||||
|
|
||||||
|
if (dtor || overlay.style.display === 'none')
|
||||||
return;
|
return;
|
||||||
|
|
||||||
if (options.duringHide)
|
if (options.duringHide)
|
||||||
@@ -600,11 +641,6 @@ window.baguetteBox = (function () {
|
|||||||
|
|
||||||
sethash('');
|
sethash('');
|
||||||
unbindEvents();
|
unbindEvents();
|
||||||
try {
|
|
||||||
document.exitFullscreen();
|
|
||||||
isFullscreen = false;
|
|
||||||
}
|
|
||||||
catch (ex) { }
|
|
||||||
|
|
||||||
// Fade out and hide the overlay
|
// Fade out and hide the overlay
|
||||||
overlay.className = '';
|
overlay.className = '';
|
||||||
@@ -682,7 +718,7 @@ window.baguetteBox = (function () {
|
|||||||
options.captions.call(currentGallery, imageElement) :
|
options.captions.call(currentGallery, imageElement) :
|
||||||
imageElement.getAttribute('data-caption') || imageElement.title;
|
imageElement.getAttribute('data-caption') || imageElement.title;
|
||||||
|
|
||||||
imageSrc += imageSrc.indexOf('?') < 0 ? '?cache' : '&cache';
|
imageSrc = addq(imageSrc, 'cache');
|
||||||
|
|
||||||
if (is_vid && index != currentIndex)
|
if (is_vid && index != currentIndex)
|
||||||
return; // no preload
|
return; // no preload
|
||||||
@@ -711,8 +747,11 @@ window.baguetteBox = (function () {
|
|||||||
});
|
});
|
||||||
image.setAttribute('src', imageSrc);
|
image.setAttribute('src', imageSrc);
|
||||||
if (is_vid) {
|
if (is_vid) {
|
||||||
|
image.volume = clamp(fcfg_get('vol', dvol / 100), 0, 1);
|
||||||
image.setAttribute('controls', 'controls');
|
image.setAttribute('controls', 'controls');
|
||||||
image.onended = vidEnd;
|
image.onended = vidEnd;
|
||||||
|
image.onplay = function () { show_buttons(1); };
|
||||||
|
image.onpause = function () { show_buttons(); };
|
||||||
}
|
}
|
||||||
image.alt = thumbnailElement ? thumbnailElement.alt || '' : '';
|
image.alt = thumbnailElement ? thumbnailElement.alt || '' : '';
|
||||||
if (options.titleTag && imageCaption)
|
if (options.titleTag && imageCaption)
|
||||||
@@ -720,6 +759,9 @@ window.baguetteBox = (function () {
|
|||||||
|
|
||||||
figure.appendChild(image);
|
figure.appendChild(image);
|
||||||
|
|
||||||
|
if (is_vid && window.afilt)
|
||||||
|
afilt.apply(undefined, image);
|
||||||
|
|
||||||
if (options.async && callback)
|
if (options.async && callback)
|
||||||
callback();
|
callback();
|
||||||
}
|
}
|
||||||
@@ -955,6 +997,12 @@ window.baguetteBox = (function () {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function show_buttons(v) {
|
||||||
|
clmod(ebi('bbox-btns'), 'off', v);
|
||||||
|
clmod(btnPrev, 'off', v);
|
||||||
|
clmod(btnNext, 'off', v);
|
||||||
|
}
|
||||||
|
|
||||||
function bounceAnimation(direction) {
|
function bounceAnimation(direction) {
|
||||||
slider.className = options.animation == 'slideIn' ? 'bounce-from-' + direction : 'eog';
|
slider.className = options.animation == 'slideIn' ? 'bounce-from-' + direction : 'eog';
|
||||||
setTimeout(function () {
|
setTimeout(function () {
|
||||||
@@ -1018,9 +1066,7 @@ window.baguetteBox = (function () {
|
|||||||
if (fx > 0.7)
|
if (fx > 0.7)
|
||||||
return showNextImage();
|
return showNextImage();
|
||||||
|
|
||||||
clmod(ebi('bbox-btns'), 'off', 't');
|
show_buttons('t');
|
||||||
clmod(btnPrev, 'off', 't');
|
|
||||||
clmod(btnNext, 'off', 't');
|
|
||||||
|
|
||||||
if (Date.now() - ctime <= 500 && !IPHONE)
|
if (Date.now() - ctime <= 500 && !IPHONE)
|
||||||
tglfull();
|
tglfull();
|
||||||
@@ -1062,6 +1108,7 @@ window.baguetteBox = (function () {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function destroyPlugin() {
|
function destroyPlugin() {
|
||||||
|
hideOverlay(undefined, true);
|
||||||
unbindEvents();
|
unbindEvents();
|
||||||
clearCachedData();
|
clearCachedData();
|
||||||
document.getElementsByTagName('body')[0].removeChild(ebi('bbox-overlay'));
|
document.getElementsByTagName('body')[0].removeChild(ebi('bbox-overlay'));
|
||||||
|
|||||||
@@ -10,7 +10,6 @@
|
|||||||
--fg2-max: #fff;
|
--fg2-max: #fff;
|
||||||
--fg-weak: #bbb;
|
--fg-weak: #bbb;
|
||||||
|
|
||||||
--bg-u7: #555;
|
|
||||||
--bg-u6: #4c4c4c;
|
--bg-u6: #4c4c4c;
|
||||||
--bg-u5: #444;
|
--bg-u5: #444;
|
||||||
--bg-u4: #383838;
|
--bg-u4: #383838;
|
||||||
@@ -28,6 +27,8 @@
|
|||||||
--row-alt: #282828;
|
--row-alt: #282828;
|
||||||
|
|
||||||
--scroll: #eb0;
|
--scroll: #eb0;
|
||||||
|
--sel-fg: var(--bg-d1);
|
||||||
|
--sel-bg: var(--fg);
|
||||||
|
|
||||||
--a: #fc5;
|
--a: #fc5;
|
||||||
--a-b: #c90;
|
--a-b: #c90;
|
||||||
@@ -41,8 +42,14 @@
|
|||||||
--btn-h-bg: #805;
|
--btn-h-bg: #805;
|
||||||
--btn-1-fg: #400;
|
--btn-1-fg: #400;
|
||||||
--btn-1-bg: var(--a);
|
--btn-1-bg: var(--a);
|
||||||
|
--btn-h-bs: var(--btn-bs);
|
||||||
|
--btn-h-bb: var(--btn-bb);
|
||||||
|
--btn-1-bs: var(--btn-bs);
|
||||||
|
--btn-1-bb: var(--btn-bb);
|
||||||
--btn-1h-fg: var(--btn-1-fg);
|
--btn-1h-fg: var(--btn-1-fg);
|
||||||
--btn-1h-bg: #fe8;
|
--btn-1h-bg: #fe8;
|
||||||
|
--btn-1h-bs: var(--btn-1-bs);
|
||||||
|
--btn-1h-bb: var(--btn-1-bb);
|
||||||
--chk-fg: var(--tab-alt);
|
--chk-fg: var(--tab-alt);
|
||||||
--txt-sh: var(--bg-d2);
|
--txt-sh: var(--bg-d2);
|
||||||
--txt-bg: var(--btn-bg);
|
--txt-bg: var(--btn-bg);
|
||||||
@@ -57,7 +64,7 @@
|
|||||||
--u2-tab-bg: linear-gradient(to bottom, var(--bg), var(--bg-u1));
|
--u2-tab-bg: linear-gradient(to bottom, var(--bg), var(--bg-u1));
|
||||||
--u2-tab-b1: rgba(128,128,128,0.8);
|
--u2-tab-b1: rgba(128,128,128,0.8);
|
||||||
--u2-tab-1-fg: #fd7;
|
--u2-tab-1-fg: #fd7;
|
||||||
--u2-tab-1-bg: linear-gradient(to bottom, var(#353), var(--bg) 80%);
|
--u2-tab-1-bg: linear-gradient(to bottom, #353, var(--bg) 80%);
|
||||||
--u2-tab-1-b1: #7c5;
|
--u2-tab-1-b1: #7c5;
|
||||||
--u2-tab-1-b2: #583;
|
--u2-tab-1-b2: #583;
|
||||||
--u2-tab-1-sh: #280;
|
--u2-tab-1-sh: #280;
|
||||||
@@ -181,7 +188,6 @@ html.y {
|
|||||||
--srv-1: #555;
|
--srv-1: #555;
|
||||||
--srv-2: #c83;
|
--srv-2: #c83;
|
||||||
--srv-3: #c0a;
|
--srv-3: #c0a;
|
||||||
--srv-3b: rgba(255,68,204,0.6);
|
|
||||||
|
|
||||||
--tree-bg: #fff;
|
--tree-bg: #fff;
|
||||||
|
|
||||||
@@ -210,22 +216,19 @@ html.y {
|
|||||||
html.a {
|
html.a {
|
||||||
--op-aa-sh: 0 0 .2em var(--bg-d3) inset;
|
--op-aa-sh: 0 0 .2em var(--bg-d3) inset;
|
||||||
|
|
||||||
--u2-o-bg: #603;
|
--btn-bs: 0 0 .2em var(--bg-d3);
|
||||||
--u2-o-b1: #a16;
|
}
|
||||||
--u2-o-sh: #a00;
|
html.az {
|
||||||
--u2-o-h-bg: var(--u2-o-bg);
|
--btn-1-bs: 0 0 .1em var(--fg) inset;
|
||||||
--u2-o-h-b1: #fb0;
|
|
||||||
--u2-o-h-sh: #fb0;
|
|
||||||
--u2-o-1-bg: #6a1;
|
|
||||||
--u2-o-1-b1: #efa;
|
|
||||||
--u2-o-1-sh: #0c0;
|
|
||||||
--u2-o-1h-bg: var(--u2-o-1-bg);
|
|
||||||
}
|
}
|
||||||
html.ay {
|
html.ay {
|
||||||
--op-aa-sh: 0 .1em .2em #ccc;
|
--op-aa-sh: 0 .1em .2em #ccc;
|
||||||
--op-aa-bg: var(--bg-max);
|
--op-aa-bg: var(--bg-max);
|
||||||
}
|
}
|
||||||
html.b {
|
html.b {
|
||||||
|
--btn-bs: 0 .05em 0 var(--bg-d3) inset;
|
||||||
|
--btn-1-bs: 0 .05em 0 var(--btn-1h-bg) inset;
|
||||||
|
|
||||||
--tree-bg: var(--bg);
|
--tree-bg: var(--bg);
|
||||||
|
|
||||||
--g-bg: var(--bg);
|
--g-bg: var(--bg);
|
||||||
@@ -242,17 +245,13 @@ html.b {
|
|||||||
--u2-b1-bg: rgba(128,128,128,0.15);
|
--u2-b1-bg: rgba(128,128,128,0.15);
|
||||||
--u2-b2-bg: var(--u2-b1-bg);
|
--u2-b2-bg: var(--u2-b1-bg);
|
||||||
|
|
||||||
--u2-o-bg: var(--btn-bg);
|
|
||||||
--u2-o-h-bg: var(--btn-h-bg);
|
|
||||||
--u2-o-1-bg: var(--a);
|
|
||||||
--u2-o-1h-bg: var(--a-hil);
|
|
||||||
|
|
||||||
--f-sh1: 0.1;
|
--f-sh1: 0.1;
|
||||||
--mp-b-bg: transparent;
|
--mp-b-bg: transparent;
|
||||||
}
|
}
|
||||||
html.bz {
|
html.bz {
|
||||||
--fg: #cce;
|
--fg: #cce;
|
||||||
--fg-weak: #bbd;
|
--fg-weak: #bbd;
|
||||||
|
|
||||||
--bg-u5: #3b3f58;
|
--bg-u5: #3b3f58;
|
||||||
--bg-u4: #1e2130;
|
--bg-u4: #1e2130;
|
||||||
--bg-u3: #1e2130;
|
--bg-u3: #1e2130;
|
||||||
@@ -264,11 +263,14 @@ html.bz {
|
|||||||
|
|
||||||
--row-alt: #181a27;
|
--row-alt: #181a27;
|
||||||
|
|
||||||
|
--a-b: #fb4;
|
||||||
|
|
||||||
--btn-bg: #202231;
|
--btn-bg: #202231;
|
||||||
--btn-h-bg: #2d2f45;
|
--btn-h-bg: #2d2f45;
|
||||||
--btn-1-bg: #ba2959;
|
--btn-1-bg: #eb6;
|
||||||
--btn-1-fg: #fff;
|
--btn-1-fg: #000;
|
||||||
--btn-1h-fg: #000;
|
--btn-1h-fg: #000;
|
||||||
|
--btn-1h-bg: #ff9;
|
||||||
--txt-sh: a;
|
--txt-sh: a;
|
||||||
|
|
||||||
--u2-tab-b1: var(--bg-u5);
|
--u2-tab-b1: var(--bg-u5);
|
||||||
@@ -283,6 +285,7 @@ html.bz {
|
|||||||
--f-h-b1: #34384e;
|
--f-h-b1: #34384e;
|
||||||
--mp-sh: #11121d;
|
--mp-sh: #11121d;
|
||||||
/*--mp-b-bg: #2c3044;*/
|
/*--mp-b-bg: #2c3044;*/
|
||||||
|
--f-play-bg: var(--btn-1-bg);
|
||||||
}
|
}
|
||||||
html.by {
|
html.by {
|
||||||
--bg: #f2f2f2;
|
--bg: #f2f2f2;
|
||||||
@@ -303,6 +306,7 @@ html.by {
|
|||||||
}
|
}
|
||||||
html.c {
|
html.c {
|
||||||
font-weight: bold;
|
font-weight: bold;
|
||||||
|
|
||||||
--fg: #fff;
|
--fg: #fff;
|
||||||
--fg-weak: #cef;
|
--fg-weak: #cef;
|
||||||
--bg-u5: #409;
|
--bg-u5: #409;
|
||||||
@@ -323,15 +327,25 @@ html.c {
|
|||||||
--chk-fg: #d90;
|
--chk-fg: #d90;
|
||||||
|
|
||||||
--op-aa-bg: #f9dd22;
|
--op-aa-bg: #f9dd22;
|
||||||
--u2-o-1-bg: #4cf;
|
|
||||||
|
|
||||||
--srv-1: #ea0;
|
--srv-1: #ea0;
|
||||||
--mp-b-bg: transparent;
|
--mp-b-bg: transparent;
|
||||||
}
|
}
|
||||||
html.cz {
|
html.cz {
|
||||||
--bgg: var(--bg-u2);
|
--bgg: var(--bg-u2);
|
||||||
|
|
||||||
|
--sel-bg: var(--bg-u5);
|
||||||
|
--sel-fg: var(--fg);
|
||||||
|
|
||||||
|
--btn-bb: .2em solid #709;
|
||||||
|
--btn-bs: 0 .1em .6em rgba(255,0,185,0.5);
|
||||||
|
--btn-1-bb: .2em solid #e90;
|
||||||
|
--btn-1-bs: 0 .1em .8em rgba(255,205,0,0.9);
|
||||||
|
|
||||||
--srv-3: #fff;
|
--srv-3: #fff;
|
||||||
|
|
||||||
--u2-tab-b1: var(--bg-d3);
|
--u2-tab-b1: var(--bg-d3);
|
||||||
|
--u2-tab-1-bg: a;
|
||||||
}
|
}
|
||||||
html.cy {
|
html.cy {
|
||||||
--fg: #fff;
|
--fg: #fff;
|
||||||
@@ -343,6 +357,8 @@ html.cy {
|
|||||||
--bg-d3: #f77;
|
--bg-d3: #f77;
|
||||||
--bg-d2: #ff0;
|
--bg-d2: #ff0;
|
||||||
|
|
||||||
|
--sel-bg: #f77;
|
||||||
|
|
||||||
--a: #fff;
|
--a: #fff;
|
||||||
--a-hil: #fff;
|
--a-hil: #fff;
|
||||||
--a-h-bg: #000;
|
--a-h-bg: #000;
|
||||||
@@ -356,24 +372,25 @@ html.cy {
|
|||||||
--btn-h-fg: #fff;
|
--btn-h-fg: #fff;
|
||||||
--btn-1-bg: #ff0;
|
--btn-1-bg: #ff0;
|
||||||
--btn-1-fg: #000;
|
--btn-1-fg: #000;
|
||||||
|
--btn-bs: 0 .25em 0 #f00;
|
||||||
--chk-fg: #fd0;
|
--chk-fg: #fd0;
|
||||||
|
|
||||||
|
--txt-bg: #000;
|
||||||
--srv-1: #f00;
|
--srv-1: #f00;
|
||||||
--srv-3: #fff;
|
--srv-3: #fff;
|
||||||
--op-aa-bg: #fff;
|
--op-aa-bg: #fff;
|
||||||
|
|
||||||
--u2-b1-bg: #f00;
|
--u2-b1-bg: #f00;
|
||||||
--u2-b2-bg: #f00;
|
--u2-b2-bg: #f00;
|
||||||
--u2-o-bg: #ff0;
|
|
||||||
--u2-o-1-bg: #f00;
|
--g-sel-fg: #fff;
|
||||||
|
--g-sel-bg: #aaa;
|
||||||
|
--g-fsel-bg: #aaa;
|
||||||
}
|
}
|
||||||
html.dz {
|
html.dz {
|
||||||
--fg: #4d4;
|
--fg: #4d4;
|
||||||
--fg-max: #fff;
|
|
||||||
--fg2-max: #fff;
|
|
||||||
--fg-weak: #2a2;
|
--fg-weak: #2a2;
|
||||||
|
|
||||||
--bg-u7: #020;
|
|
||||||
--bg-u6: #020;
|
--bg-u6: #020;
|
||||||
--bg-u5: #050;
|
--bg-u5: #050;
|
||||||
--bg-u4: #020;
|
--bg-u4: #020;
|
||||||
@@ -381,11 +398,9 @@ html.dz {
|
|||||||
--bg-u2: #020;
|
--bg-u2: #020;
|
||||||
--bg-u1: #020;
|
--bg-u1: #020;
|
||||||
--bg: #010;
|
--bg: #010;
|
||||||
--bgg: var(--bg);
|
|
||||||
--bg-d1: #000;
|
--bg-d1: #000;
|
||||||
--bg-d2: #020;
|
--bg-d2: #020;
|
||||||
--bg-d3: #000;
|
--bg-d3: #000;
|
||||||
--bg-max: #000;
|
|
||||||
|
|
||||||
--tab-alt: #6f6;
|
--tab-alt: #6f6;
|
||||||
--row-alt: #030;
|
--row-alt: #030;
|
||||||
@@ -398,48 +413,21 @@ html.dz {
|
|||||||
--a-dark: #afa;
|
--a-dark: #afa;
|
||||||
--a-gray: #2a2;
|
--a-gray: #2a2;
|
||||||
|
|
||||||
--btn-fg: var(--a);
|
|
||||||
--btn-bg: rgba(64,128,64,0.15);
|
--btn-bg: rgba(64,128,64,0.15);
|
||||||
--btn-h-fg: var(--a-hil);
|
|
||||||
--btn-h-bg: #050;
|
--btn-h-bg: #050;
|
||||||
--btn-1-fg: #000;
|
--btn-1-fg: #000;
|
||||||
--btn-1-bg: #4f4;
|
--btn-1-bg: #4f4;
|
||||||
--btn-1h-fg: var(--btn-1-fg);
|
|
||||||
--btn-1h-bg: #3f3;
|
--btn-1h-bg: #3f3;
|
||||||
--chk-fg: var(--tab-alt);
|
--btn-bs: 0 0 0 .1em #080 inset;
|
||||||
--txt-sh: var(--bg-d2);
|
--btn-1-bs: a;
|
||||||
--txt-bg: var(--btn-bg);
|
|
||||||
|
|
||||||
--op-aa-fg: var(--a);
|
|
||||||
--op-aa-bg: var(--bg-d2);
|
|
||||||
--op-a-sh: rgba(0,0,0,0.5);
|
|
||||||
|
|
||||||
--u2-btn-b1: var(--fg-weak);
|
--u2-btn-b1: var(--fg-weak);
|
||||||
--u2-sbtn-b1: var(--fg-weak);
|
--u2-sbtn-b1: var(--fg-weak);
|
||||||
--u2-txt-bg: var(--bg-u5);
|
|
||||||
--u2-tab-bg: linear-gradient(to bottom, var(--bg), var(--bg-u1));
|
|
||||||
--u2-tab-b1: var(--fg-weak);
|
--u2-tab-b1: var(--fg-weak);
|
||||||
--u2-tab-1-fg: #fff;
|
--u2-tab-1-fg: #fff;
|
||||||
--u2-tab-1-bg: linear-gradient(to bottom, var(#353), var(--bg) 80%);
|
--u2-tab-1-bg: linear-gradient(to bottom, #151, var(--bg) 80%);
|
||||||
--u2-tab-1-b1: #7c5;
|
|
||||||
--u2-tab-1-b2: #583;
|
|
||||||
--u2-tab-1-sh: #280;
|
|
||||||
--u2-b-fg: #fff;
|
|
||||||
--u2-b1-bg: #3a3;
|
--u2-b1-bg: #3a3;
|
||||||
--u2-b2-bg: #3a3;
|
--u2-b2-bg: #3a3;
|
||||||
--u2-o-bg: var(--btn-bg);
|
|
||||||
--u2-o-b1: var(--bg-u5);
|
|
||||||
--u2-o-h-bg: var(--fg-weak);
|
|
||||||
--u2-o-1-bg: var(--fg-weak);
|
|
||||||
--u2-o-1-b1: var(--a);
|
|
||||||
--u2-o-1h-bg: var(--a);
|
|
||||||
--u2-inf-bg: #07a;
|
|
||||||
--u2-inf-b1: #0be;
|
|
||||||
--u2-ok-bg: #380;
|
|
||||||
--u2-ok-b1: #8e4;
|
|
||||||
--u2-err-bg: #900;
|
|
||||||
--u2-err-b1: #d06;
|
|
||||||
--ud-b1: #888;
|
|
||||||
|
|
||||||
--sort-1: #fff;
|
--sort-1: #fff;
|
||||||
--sort-2: #3f3;
|
--sort-2: #3f3;
|
||||||
@@ -451,47 +439,12 @@ html.dz {
|
|||||||
|
|
||||||
--tree-bg: #010;
|
--tree-bg: #010;
|
||||||
|
|
||||||
--g-play-bg: #750;
|
|
||||||
--g-play-b1: #c90;
|
|
||||||
--g-play-b2: #da4;
|
|
||||||
--g-play-sh: #b83;
|
|
||||||
|
|
||||||
--g-sel-fg: #fff;
|
|
||||||
--g-sel-bg: #925;
|
|
||||||
--g-sel-b1: #c37;
|
--g-sel-b1: #c37;
|
||||||
--g-sel-sh: #b36;
|
--g-sel-sh: #b36;
|
||||||
--g-fsel-bg: #d39;
|
|
||||||
--g-fsel-b1: #d48;
|
--g-fsel-b1: #d48;
|
||||||
--g-fsel-ts: #804;
|
|
||||||
--g-fg: var(--a-hil);
|
|
||||||
--g-bg: var(--bg-u2);
|
|
||||||
--g-b1: var(--bg-u4);
|
|
||||||
--g-b2: var(--bg-u5);
|
|
||||||
--g-g1: var(--bg-u2);
|
|
||||||
--g-g2: var(--bg-u5);
|
|
||||||
--g-f-bg: var(--bg-u4);
|
|
||||||
--g-f-b1: var(--bg-u5);
|
|
||||||
--g-f-fg: var(--a-hil);
|
|
||||||
--g-sh: rgba(0,0,0,0.3);
|
|
||||||
|
|
||||||
--f-sh1: 0.33;
|
|
||||||
--f-sh2: 0.02;
|
|
||||||
--f-sh3: 0.2;
|
|
||||||
--f-h-b1: #3b3;
|
--f-h-b1: #3b3;
|
||||||
|
|
||||||
--f-play-bg: #fc5;
|
|
||||||
--f-play-fg: #000;
|
|
||||||
--f-sel-sh: #fc0;
|
|
||||||
--f-gray: #999;
|
|
||||||
|
|
||||||
--fm-off: #f6c;
|
|
||||||
--mp-sh: var(--bg-d3);
|
|
||||||
|
|
||||||
--err-fg: #fff;
|
|
||||||
--err-bg: #a20;
|
|
||||||
--err-b1: #f00;
|
|
||||||
--err-ts: #500;
|
|
||||||
|
|
||||||
text-shadow: none;
|
text-shadow: none;
|
||||||
font-family: 'scp', monospace, monospace;
|
font-family: 'scp', monospace, monospace;
|
||||||
font-family: var(--font-mono), 'scp', monospace, monospace;
|
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||||
@@ -544,10 +497,6 @@ html.dy {
|
|||||||
--u2-tab-1-bg: a;
|
--u2-tab-1-bg: a;
|
||||||
--u2-b1-bg: #000;
|
--u2-b1-bg: #000;
|
||||||
--u2-b2-bg: #000;
|
--u2-b2-bg: #000;
|
||||||
--u2-o-h-bg: #999;
|
|
||||||
--u2-o-1h-bg: #999;
|
|
||||||
--u2-o-bg: #eee;
|
|
||||||
--u2-o-1-bg: #000;
|
|
||||||
|
|
||||||
--ud-b1: a;
|
--ud-b1: a;
|
||||||
|
|
||||||
@@ -588,11 +537,11 @@ html.dy {
|
|||||||
line-height: 1.2em;
|
line-height: 1.2em;
|
||||||
}
|
}
|
||||||
::selection {
|
::selection {
|
||||||
color: var(--bg-d1);
|
color: var(--sel-fg);
|
||||||
background: var(--fg);
|
background: var(--sel-bg);
|
||||||
text-shadow: none;
|
text-shadow: none;
|
||||||
}
|
}
|
||||||
html,body,tr,th,td,#files,a {
|
html,body,tr,th,td,#files,a,#blogout {
|
||||||
color: inherit;
|
color: inherit;
|
||||||
background: none;
|
background: none;
|
||||||
font-weight: inherit;
|
font-weight: inherit;
|
||||||
@@ -620,6 +569,7 @@ pre, code, tt, #doc, #doc>code {
|
|||||||
overflow: hidden;
|
overflow: hidden;
|
||||||
width: 0;
|
width: 0;
|
||||||
height: 0;
|
height: 0;
|
||||||
|
color: var(--bg);
|
||||||
}
|
}
|
||||||
html .ayjump:focus {
|
html .ayjump:focus {
|
||||||
z-index: 80386;
|
z-index: 80386;
|
||||||
@@ -674,11 +624,15 @@ html.y #path {
|
|||||||
#files tbody div a {
|
#files tbody div a {
|
||||||
color: var(--tab-alt);
|
color: var(--tab-alt);
|
||||||
}
|
}
|
||||||
a, #files tbody div a:last-child {
|
a, #blogout, #files tbody div a:last-child {
|
||||||
color: var(--a);
|
color: var(--a);
|
||||||
padding: .2em;
|
padding: .2em;
|
||||||
text-decoration: none;
|
text-decoration: none;
|
||||||
}
|
}
|
||||||
|
#blogout {
|
||||||
|
margin: -.2em;
|
||||||
|
}
|
||||||
|
#blogout:hover,
|
||||||
a:hover {
|
a:hover {
|
||||||
color: var(--a-hil);
|
color: var(--a-hil);
|
||||||
background: var(--a-h-bg);
|
background: var(--a-h-bg);
|
||||||
@@ -699,12 +653,12 @@ a:hover {
|
|||||||
.s0:after,
|
.s0:after,
|
||||||
.s1:after {
|
.s1:after {
|
||||||
content: '⌄';
|
content: '⌄';
|
||||||
margin-left: -.1em;
|
margin-left: -.15em;
|
||||||
}
|
}
|
||||||
.s0r:after,
|
.s0r:after,
|
||||||
.s1r:after {
|
.s1r:after {
|
||||||
content: '⌃';
|
content: '⌃';
|
||||||
margin-left: -.1em;
|
margin-left: -.15em;
|
||||||
}
|
}
|
||||||
.s0:after,
|
.s0:after,
|
||||||
.s0r:after {
|
.s0r:after {
|
||||||
@@ -715,7 +669,7 @@ a:hover {
|
|||||||
color: var(--sort-2);
|
color: var(--sort-2);
|
||||||
}
|
}
|
||||||
#files thead th:after {
|
#files thead th:after {
|
||||||
margin-right: -.7em;
|
margin-right: -.5em;
|
||||||
}
|
}
|
||||||
#files tbody tr:hover td,
|
#files tbody tr:hover td,
|
||||||
#files tbody tr:hover td+td {
|
#files tbody tr:hover td+td {
|
||||||
@@ -744,6 +698,15 @@ html #files.hhpick thead th {
|
|||||||
word-wrap: break-word;
|
word-wrap: break-word;
|
||||||
overflow: hidden;
|
overflow: hidden;
|
||||||
}
|
}
|
||||||
|
#files tr.fade a {
|
||||||
|
color: #999;
|
||||||
|
color: rgba(255, 255, 255, 0.4);
|
||||||
|
font-style: italic;
|
||||||
|
}
|
||||||
|
html.y #files tr.fade a {
|
||||||
|
color: #999;
|
||||||
|
color: rgba(0, 0, 0, 0.4);
|
||||||
|
}
|
||||||
#files tr:nth-child(2n) td {
|
#files tr:nth-child(2n) td {
|
||||||
background: var(--row-alt);
|
background: var(--row-alt);
|
||||||
}
|
}
|
||||||
@@ -870,7 +833,7 @@ html.y #path a:hover {
|
|||||||
max-width: 52em;
|
max-width: 52em;
|
||||||
}
|
}
|
||||||
.mdo.sb,
|
.mdo.sb,
|
||||||
#epi.logue.mdo>iframe {
|
.logue.mdo>iframe {
|
||||||
max-width: 54em;
|
max-width: 54em;
|
||||||
}
|
}
|
||||||
.mdo,
|
.mdo,
|
||||||
@@ -913,6 +876,9 @@ html.y #path a:hover {
|
|||||||
color: var(--srv-3);
|
color: var(--srv-3);
|
||||||
border-bottom: 1px solid var(--srv-3b);
|
border-bottom: 1px solid var(--srv-3b);
|
||||||
}
|
}
|
||||||
|
#flogout {
|
||||||
|
display: inline;
|
||||||
|
}
|
||||||
#goh+span {
|
#goh+span {
|
||||||
color: var(--bg-u5);
|
color: var(--bg-u5);
|
||||||
padding-left: .5em;
|
padding-left: .5em;
|
||||||
@@ -947,6 +913,8 @@ html.y #path a:hover {
|
|||||||
#files tbody tr.play a:hover {
|
#files tbody tr.play a:hover {
|
||||||
color: var(--btn-1h-fg);
|
color: var(--btn-1h-fg);
|
||||||
background: var(--btn-1h-bg);
|
background: var(--btn-1h-bg);
|
||||||
|
box-shadow: var(--btn-1h-bs);
|
||||||
|
border-bottom: var(--btn-1h-bb);
|
||||||
}
|
}
|
||||||
#ggrid {
|
#ggrid {
|
||||||
margin: -.2em -.5em;
|
margin: -.2em -.5em;
|
||||||
@@ -955,6 +923,7 @@ html.y #path a:hover {
|
|||||||
overflow: hidden;
|
overflow: hidden;
|
||||||
display: block;
|
display: block;
|
||||||
display: -webkit-box;
|
display: -webkit-box;
|
||||||
|
line-clamp: var(--grid-ln);
|
||||||
-webkit-line-clamp: var(--grid-ln);
|
-webkit-line-clamp: var(--grid-ln);
|
||||||
-webkit-box-orient: vertical;
|
-webkit-box-orient: vertical;
|
||||||
padding-top: .3em;
|
padding-top: .3em;
|
||||||
@@ -1001,9 +970,6 @@ html.y #path a:hover {
|
|||||||
color: var(--g-dfg);
|
color: var(--g-dfg);
|
||||||
}
|
}
|
||||||
#ggrid>a.au:before {
|
#ggrid>a.au:before {
|
||||||
content: '💾';
|
|
||||||
}
|
|
||||||
html.np_open #ggrid>a.au:before {
|
|
||||||
content: '▶';
|
content: '▶';
|
||||||
}
|
}
|
||||||
#ggrid>a:before {
|
#ggrid>a:before {
|
||||||
@@ -1132,6 +1098,7 @@ html.y #widget.open {
|
|||||||
width: 100%;
|
width: 100%;
|
||||||
height: 100%;
|
height: 100%;
|
||||||
}
|
}
|
||||||
|
#fshr,
|
||||||
#wtgrid,
|
#wtgrid,
|
||||||
#wtico {
|
#wtico {
|
||||||
position: relative;
|
position: relative;
|
||||||
@@ -1318,6 +1285,7 @@ html.y #widget.open {
|
|||||||
#widget.cmp #wtoggle {
|
#widget.cmp #wtoggle {
|
||||||
font-size: 1.2em;
|
font-size: 1.2em;
|
||||||
}
|
}
|
||||||
|
#widget.cmp #fshr,
|
||||||
#widget.cmp #wtgrid {
|
#widget.cmp #wtgrid {
|
||||||
display: none;
|
display: none;
|
||||||
}
|
}
|
||||||
@@ -1331,6 +1299,7 @@ html.y #widget.open {
|
|||||||
}
|
}
|
||||||
#widget.cmp #barpos,
|
#widget.cmp #barpos,
|
||||||
#widget.cmp #barbuf {
|
#widget.cmp #barbuf {
|
||||||
|
height: 1.6em;
|
||||||
width: calc(100% - 11em);
|
width: calc(100% - 11em);
|
||||||
border-radius: 0;
|
border-radius: 0;
|
||||||
left: 5em;
|
left: 5em;
|
||||||
@@ -1418,7 +1387,11 @@ input[type="checkbox"]+label {
|
|||||||
input[type="radio"]:checked+label,
|
input[type="radio"]:checked+label,
|
||||||
input[type="checkbox"]:checked+label {
|
input[type="checkbox"]:checked+label {
|
||||||
color: #0e0;
|
color: #0e0;
|
||||||
color: var(--a);
|
color: var(--btn-1-bg);
|
||||||
|
}
|
||||||
|
input[type="checkbox"]:checked+label {
|
||||||
|
box-shadow: var(--btn-1-bs);
|
||||||
|
border-bottom: var(--btn-1-bb);
|
||||||
}
|
}
|
||||||
html.dz input {
|
html.dz input {
|
||||||
font-family: 'scp', monospace, monospace;
|
font-family: 'scp', monospace, monospace;
|
||||||
@@ -1596,10 +1569,13 @@ html {
|
|||||||
color: var(--btn-fg);
|
color: var(--btn-fg);
|
||||||
background: #eee;
|
background: #eee;
|
||||||
background: var(--btn-bg);
|
background: var(--btn-bg);
|
||||||
|
box-shadow: var(--btn-bs);
|
||||||
|
border-bottom: var(--btn-bb);
|
||||||
border-radius: .3em;
|
border-radius: .3em;
|
||||||
padding: .2em .4em;
|
padding: .2em .4em;
|
||||||
font-size: 1.2em;
|
font-size: 1.2em;
|
||||||
margin: .2em;
|
margin: .2em;
|
||||||
|
display: inline-block;
|
||||||
white-space: pre;
|
white-space: pre;
|
||||||
position: relative;
|
position: relative;
|
||||||
top: -.12em;
|
top: -.12em;
|
||||||
@@ -1608,20 +1584,14 @@ html.c .btn,
|
|||||||
html.a .btn {
|
html.a .btn {
|
||||||
border-radius: .2em;
|
border-radius: .2em;
|
||||||
}
|
}
|
||||||
html.cz .btn {
|
|
||||||
box-shadow: 0 .1em .6em rgba(255,0,185,0.5);
|
|
||||||
border-bottom: .2em solid #709;
|
|
||||||
}
|
|
||||||
html.dz .btn {
|
html.dz .btn {
|
||||||
font-size: 1em;
|
font-size: 1em;
|
||||||
box-shadow: 0 0 0 .1em #080 inset;
|
|
||||||
}
|
|
||||||
html.dz .tgl.btn.on {
|
|
||||||
box-shadow: 0 0 0 .1em var(--btn-1-bg) inset;
|
|
||||||
}
|
}
|
||||||
.btn:hover {
|
.btn:hover {
|
||||||
color: var(--btn-h-fg);
|
color: var(--btn-h-fg);
|
||||||
background: var(--btn-h-bg);
|
background: var(--btn-h-bg);
|
||||||
|
box-shadow: var(--btn-h-bs);
|
||||||
|
border-bottom: var(--btn-h-bb);
|
||||||
}
|
}
|
||||||
.tgl.btn.on {
|
.tgl.btn.on {
|
||||||
background: #000;
|
background: #000;
|
||||||
@@ -1629,14 +1599,14 @@ html.dz .tgl.btn.on {
|
|||||||
color: #fff;
|
color: #fff;
|
||||||
color: var(--btn-1-fg);
|
color: var(--btn-1-fg);
|
||||||
text-shadow: none;
|
text-shadow: none;
|
||||||
}
|
box-shadow: var(--btn-1-bs);
|
||||||
html.cz .tgl.btn.on {
|
border-bottom: var(--btn-1-bb);
|
||||||
box-shadow: 0 .1em .8em rgba(255,205,0,0.9);
|
|
||||||
border-bottom: .2em solid #e90;
|
|
||||||
}
|
}
|
||||||
.tgl.btn.on:hover {
|
.tgl.btn.on:hover {
|
||||||
background: var(--btn-1h-bg);
|
|
||||||
color: var(--btn-1h-fg);
|
color: var(--btn-1h-fg);
|
||||||
|
background: var(--btn-1h-bg);
|
||||||
|
box-shadow: var(--btn-1h-bs);
|
||||||
|
border-bottom: var(--btn-1h-bb);
|
||||||
}
|
}
|
||||||
#detree {
|
#detree {
|
||||||
padding: .3em .5em;
|
padding: .3em .5em;
|
||||||
@@ -1677,6 +1647,18 @@ html.cz .tgl.btn.on {
|
|||||||
background: var(--btn-1-bg);
|
background: var(--btn-1-bg);
|
||||||
text-shadow: none;
|
text-shadow: none;
|
||||||
}
|
}
|
||||||
|
#tree ul a.ld::before {
|
||||||
|
font-weight: bold;
|
||||||
|
font-family: sans-serif;
|
||||||
|
display: inline-block;
|
||||||
|
text-align: center;
|
||||||
|
width: 1em;
|
||||||
|
margin: 0 .3em 0 -1.3em;
|
||||||
|
color: var(--fg-max);
|
||||||
|
opacity: 0;
|
||||||
|
content: '◠';
|
||||||
|
animation: .5s linear infinite forwards spin, ease .25s 1 forwards fadein;
|
||||||
|
}
|
||||||
#tree ul a.par {
|
#tree ul a.par {
|
||||||
color: var(--fg-max);
|
color: var(--fg-max);
|
||||||
}
|
}
|
||||||
@@ -1736,6 +1718,7 @@ html.y #tree.nowrap .ntree a+a:hover {
|
|||||||
}
|
}
|
||||||
#files th span {
|
#files th span {
|
||||||
position: relative;
|
position: relative;
|
||||||
|
white-space: nowrap;
|
||||||
}
|
}
|
||||||
#files>thead>tr>th.min,
|
#files>thead>tr>th.min,
|
||||||
#files td.min {
|
#files td.min {
|
||||||
@@ -1773,9 +1756,6 @@ html.y #tree.nowrap .ntree a+a:hover {
|
|||||||
margin: .7em 0 .7em .5em;
|
margin: .7em 0 .7em .5em;
|
||||||
padding-left: .5em;
|
padding-left: .5em;
|
||||||
}
|
}
|
||||||
.opwide>div>div>a {
|
|
||||||
line-height: 2em;
|
|
||||||
}
|
|
||||||
.opwide>div>h3 {
|
.opwide>div>h3 {
|
||||||
color: var(--fg-weak);
|
color: var(--fg-weak);
|
||||||
margin: 0 .4em;
|
margin: 0 .4em;
|
||||||
@@ -1843,6 +1823,7 @@ html.y #tree.nowrap .ntree a+a:hover {
|
|||||||
#unpost td:nth-child(4) {
|
#unpost td:nth-child(4) {
|
||||||
text-align: right;
|
text-align: right;
|
||||||
}
|
}
|
||||||
|
#shui,
|
||||||
#rui {
|
#rui {
|
||||||
background: #fff;
|
background: #fff;
|
||||||
background: var(--bg);
|
background: var(--bg);
|
||||||
@@ -1858,13 +1839,25 @@ html.y #tree.nowrap .ntree a+a:hover {
|
|||||||
padding: 1em;
|
padding: 1em;
|
||||||
z-index: 765;
|
z-index: 765;
|
||||||
}
|
}
|
||||||
|
#shui div+div,
|
||||||
#rui div+div {
|
#rui div+div {
|
||||||
margin-top: 1em;
|
margin-top: 1em;
|
||||||
}
|
}
|
||||||
|
#shui table,
|
||||||
#rui table {
|
#rui table {
|
||||||
width: 100%;
|
width: 100%;
|
||||||
border-collapse: collapse;
|
border-collapse: collapse;
|
||||||
}
|
}
|
||||||
|
#shui button {
|
||||||
|
margin: 0 1em 0 0;
|
||||||
|
}
|
||||||
|
#shui .btn {
|
||||||
|
font-size: 1em;
|
||||||
|
}
|
||||||
|
#shui td {
|
||||||
|
padding: .8em 0;
|
||||||
|
}
|
||||||
|
#shui td+td,
|
||||||
#rui td+td {
|
#rui td+td {
|
||||||
padding: .2em 0 .2em .5em;
|
padding: .2em 0 .2em .5em;
|
||||||
}
|
}
|
||||||
@@ -1872,21 +1865,25 @@ html.y #tree.nowrap .ntree a+a:hover {
|
|||||||
font-family: 'scp', monospace, monospace;
|
font-family: 'scp', monospace, monospace;
|
||||||
font-family: var(--font-mono), 'scp', monospace, monospace;
|
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||||
}
|
}
|
||||||
|
#shui td+td,
|
||||||
#rui td+td,
|
#rui td+td,
|
||||||
|
#shui td input[type="text"],
|
||||||
#rui td input[type="text"] {
|
#rui td input[type="text"] {
|
||||||
width: 100%;
|
width: 100%;
|
||||||
}
|
}
|
||||||
|
#shui td.exs input[type="text"] {
|
||||||
|
width: 3em;
|
||||||
|
}
|
||||||
#rn_f.m td:first-child {
|
#rn_f.m td:first-child {
|
||||||
white-space: nowrap;
|
white-space: nowrap;
|
||||||
}
|
}
|
||||||
#rn_f.m td+td {
|
#rn_f.m td+td {
|
||||||
width: 50%;
|
width: 50%;
|
||||||
}
|
}
|
||||||
#rn_f .err td {
|
#rn_f .err td,
|
||||||
background: var(--err-bg);
|
#rn_f .err input[readonly],
|
||||||
color: var(--fg-max);
|
#rui .ng input[readonly] {
|
||||||
}
|
color: var(--err-fg);
|
||||||
#rn_f .err input[readonly] {
|
|
||||||
background: var(--err-bg);
|
background: var(--err-bg);
|
||||||
}
|
}
|
||||||
#rui input[readonly] {
|
#rui input[readonly] {
|
||||||
@@ -2670,23 +2667,25 @@ html.b #u2conf a.b:hover {
|
|||||||
#u2conf input[type="checkbox"]:checked+label {
|
#u2conf input[type="checkbox"]:checked+label {
|
||||||
position: relative;
|
position: relative;
|
||||||
cursor: pointer;
|
cursor: pointer;
|
||||||
background: var(--u2-o-bg);
|
background: var(--btn-bg);
|
||||||
border-bottom: .2em solid var(--u2-o-b1);
|
box-shadow: var(--btn-bs);
|
||||||
box-shadow: 0 .1em .3em var(--u2-o-sh) inset;
|
border-bottom: var(--btn-bb);
|
||||||
text-shadow: 1px 1px 1px #000, 1px -1px 1px #000, -1px -1px 1px #000, -1px 1px 1px #000;
|
text-shadow: 1px 1px 1px #000, 1px -1px 1px #000, -1px -1px 1px #000, -1px 1px 1px #000;
|
||||||
}
|
}
|
||||||
#u2conf input[type="checkbox"]:checked+label {
|
#u2conf input[type="checkbox"]:checked+label {
|
||||||
background: var(--u2-o-1-bg);
|
background: var(--btn-1-bg);
|
||||||
border-bottom: .2em solid var(--u2-o-1-b1);
|
box-shadow: var(--btn-1-bs);
|
||||||
box-shadow: 0 .1em .5em var(--u2-o-1-sh);
|
border-bottom: var(--btn-1-bb);
|
||||||
}
|
}
|
||||||
#u2conf input[type="checkbox"]+label:hover {
|
#u2conf input[type="checkbox"]+label:hover {
|
||||||
box-shadow: 0 .1em .3em var(--u2-o-h-sh);
|
background: var(--btn-h-bg);
|
||||||
border-color: var(--u2-o-h-b1);
|
box-shadow: var(--btn-h-bs);
|
||||||
background: var(--u2-o-h-bg);
|
border-bottom: var(--btn-h-bb);
|
||||||
}
|
}
|
||||||
#u2conf input[type="checkbox"]:checked+label:hover {
|
#u2conf input[type="checkbox"]:checked+label:hover {
|
||||||
background: var(--u2-o-1h-bg);
|
background: var(--btn-1h-bg);
|
||||||
|
box-shadow: var(--btn-1h-bs);
|
||||||
|
border-bottom: var(--btn-1h-bb);
|
||||||
}
|
}
|
||||||
#op_up2k.srch #u2conf td:nth-child(2)>*,
|
#op_up2k.srch #u2conf td:nth-child(2)>*,
|
||||||
#op_up2k.srch #u2conf td:nth-child(3)>* {
|
#op_up2k.srch #u2conf td:nth-child(3)>* {
|
||||||
@@ -2786,6 +2785,7 @@ html.b #u2conf a.b:hover {
|
|||||||
padding-left: .2em;
|
padding-left: .2em;
|
||||||
}
|
}
|
||||||
.fsearch_explain {
|
.fsearch_explain {
|
||||||
|
color: var(--a-dark);
|
||||||
padding-left: .7em;
|
padding-left: .7em;
|
||||||
font-size: 1.1em;
|
font-size: 1.1em;
|
||||||
line-height: 0;
|
line-height: 0;
|
||||||
@@ -3075,18 +3075,30 @@ html.by #u2cards a.act {
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
html.cy #wrap {
|
|
||||||
color: #000;
|
|
||||||
}
|
|
||||||
html.cy .mdo a {
|
html.cy .mdo a {
|
||||||
background: #f00;
|
background: #f00;
|
||||||
}
|
}
|
||||||
|
html.cy #wrap,
|
||||||
|
html.cy #acc_info a,
|
||||||
html.cy #op_up2k,
|
html.cy #op_up2k,
|
||||||
html.cy #files,
|
html.cy #files,
|
||||||
html.cy #files a,
|
html.cy #files a,
|
||||||
html.cy #files tbody div a:last-child {
|
html.cy #files tbody div a:last-child {
|
||||||
color: #000;
|
color: #000;
|
||||||
}
|
}
|
||||||
|
html.cy #u2tab a,
|
||||||
|
html.cy #u2cards a {
|
||||||
|
color: #f00;
|
||||||
|
}
|
||||||
|
html.cy #unpost a {
|
||||||
|
color: #ff0;
|
||||||
|
}
|
||||||
|
html.cy #barbuf {
|
||||||
|
filter: hue-rotate(267deg) brightness(0.8) contrast(4);
|
||||||
|
}
|
||||||
|
html.cy #pvol {
|
||||||
|
filter: hue-rotate(4deg) contrast(2.2);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@
|
|||||||
<title>{{ title }}</title>
|
<title>{{ title }}</title>
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=0.8, minimum-scale=0.6">
|
<meta name="viewport" content="width=device-width, initial-scale=0.8, minimum-scale=0.6">
|
||||||
<meta name="theme-color" content="#333">
|
<meta name="theme-color" content="#{{ tcolor }}">
|
||||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/browser.css?_={{ ts }}">
|
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/browser.css?_={{ ts }}">
|
||||||
{{ html_head }}
|
{{ html_head }}
|
||||||
@@ -67,14 +67,14 @@
|
|||||||
<div id="op_up2k" class="opview"></div>
|
<div id="op_up2k" class="opview"></div>
|
||||||
|
|
||||||
<div id="op_cfg" class="opview opbox opwide"></div>
|
<div id="op_cfg" class="opview opbox opwide"></div>
|
||||||
|
|
||||||
<h1 id="path">
|
<h1 id="path">
|
||||||
<a href="#" id="entree">🌲</a>
|
<a href="#" id="entree">🌲</a>
|
||||||
{%- for n in vpnodes %}
|
{%- for n in vpnodes %}
|
||||||
<a href="{{ r }}/{{ n[0] }}">{{ n[1] }}</a>
|
<a href="{{ r }}/{{ n[0] }}">{{ n[1] }}</a>
|
||||||
{%- endfor %}
|
{%- endfor %}
|
||||||
</h1>
|
</h1>
|
||||||
|
|
||||||
<div id="tree"></div>
|
<div id="tree"></div>
|
||||||
|
|
||||||
<div id="wrap">
|
<div id="wrap">
|
||||||
@@ -108,21 +108,18 @@
|
|||||||
|
|
||||||
{%- for f in files %}
|
{%- for f in files %}
|
||||||
<tr><td>{{ f.lead }}</td><td><a href="{{ f.href }}">{{ f.name|e }}</a></td><td>{{ f.sz }}</td>
|
<tr><td>{{ f.lead }}</td><td><a href="{{ f.href }}">{{ f.name|e }}</a></td><td>{{ f.sz }}</td>
|
||||||
{%- if f.tags is defined %}
|
{%- if f.tags is defined %}
|
||||||
{%- for k in taglist %}
|
{%- for k in taglist %}<td>{{ f.tags[k] }}</td>{%- endfor %}
|
||||||
<td>{{ f.tags[k] }}</td>
|
{%- endif %}<td>{{ f.ext }}</td><td>{{ f.dt }}</td></tr>
|
||||||
{%- endfor %}
|
|
||||||
{%- endif %}
|
|
||||||
<td>{{ f.ext }}</td><td>{{ f.dt }}</td></tr>
|
|
||||||
{%- endfor %}
|
{%- endfor %}
|
||||||
|
|
||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
|
|
||||||
<div id="epi" class="logue">{{ "" if sb_lg else logues[1] }}</div>
|
<div id="epi" class="logue">{{ "" if sb_lg else logues[1] }}</div>
|
||||||
|
|
||||||
<h2 id="wfp"><a href="{{ r }}/?h" id="goh">control-panel</a></h2>
|
<h2 id="wfp"><a href="{{ r }}/?h" id="goh">control-panel</a></h2>
|
||||||
|
|
||||||
<a href="#" id="repl">π</a>
|
<a href="#" id="repl">π</a>
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
@@ -134,17 +131,16 @@
|
|||||||
<div id="widget"></div>
|
<div id="widget"></div>
|
||||||
|
|
||||||
<script>
|
<script>
|
||||||
var SR = {{ r|tojson }},
|
var SR = "{{ r }}",
|
||||||
|
CGV1 = {{ cgv1 }},
|
||||||
CGV = {{ cgv|tojson }},
|
CGV = {{ cgv|tojson }},
|
||||||
TS = "{{ ts }}",
|
TS = "{{ ts }}",
|
||||||
dtheme = "{{ dtheme }}",
|
dtheme = "{{ dtheme }}",
|
||||||
srvinf = "{{ srv_info }}",
|
srvinf = "{{ srv_info }}",
|
||||||
s_name = "{{ s_name }}",
|
|
||||||
lang = "{{ lang }}",
|
lang = "{{ lang }}",
|
||||||
dfavico = "{{ favico }}",
|
dfavico = "{{ favico }}",
|
||||||
have_tags_idx = {{ have_tags_idx|tojson }},
|
have_tags_idx = {{ have_tags_idx }},
|
||||||
sb_lg = "{{ sb_lg }}",
|
sb_lg = "{{ sb_lg }}",
|
||||||
txt_ext = "{{ txt_ext }}",
|
|
||||||
logues = {{ logues|tojson if sb_lg else "[]" }},
|
logues = {{ logues|tojson if sb_lg else "[]" }},
|
||||||
ls0 = {{ ls0|tojson }};
|
ls0 = {{ ls0|tojson }};
|
||||||
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -11,7 +11,6 @@
|
|||||||
td{border:1px solid #999;border-width:1px 1px 0 0;padding:0 5px}
|
td{border:1px solid #999;border-width:1px 1px 0 0;padding:0 5px}
|
||||||
a{display:block}
|
a{display:block}
|
||||||
</style>
|
</style>
|
||||||
{{ html_head }}
|
|
||||||
</head>
|
</head>
|
||||||
|
|
||||||
<body>
|
<body>
|
||||||
@@ -52,11 +51,11 @@
|
|||||||
|
|
||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
|
|
||||||
{%- if logues[1] %}
|
{%- if logues[1] %}
|
||||||
<div>{{ logues[1] }}</div><br />
|
<div>{{ logues[1] }}</div><br />
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
<h2><a href="{{ r }}/{{ url_suf }}{{ url_suf and '&' or '?' }}h">control-panel</a></h2>
|
<h2><a href="{{ r }}/{{ url_suf }}{{ url_suf and '&' or '?' }}h">control-panel</a></h2>
|
||||||
|
|
||||||
</body>
|
</body>
|
||||||
|
|||||||
BIN
copyparty/web/iiam.gif
Normal file
BIN
copyparty/web/iiam.gif
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 230 B |
@@ -3,7 +3,7 @@
|
|||||||
<title>📝 {{ title }}</title>
|
<title>📝 {{ title }}</title>
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=0.7">
|
<meta name="viewport" content="width=device-width, initial-scale=0.7">
|
||||||
<meta name="theme-color" content="#333">
|
<meta name="theme-color" content="#{{ tcolor }}">
|
||||||
<link rel="stylesheet" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
<link rel="stylesheet" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||||
<link rel="stylesheet" href="{{ r }}/.cpr/md.css?_={{ ts }}">
|
<link rel="stylesheet" href="{{ r }}/.cpr/md.css?_={{ ts }}">
|
||||||
{%- if edit %}
|
{%- if edit %}
|
||||||
@@ -49,7 +49,7 @@
|
|||||||
<div id="mp" class="mdo"></div>
|
<div id="mp" class="mdo"></div>
|
||||||
</div>
|
</div>
|
||||||
<a href="#" id="repl">π</a>
|
<a href="#" id="repl">π</a>
|
||||||
|
|
||||||
{%- if edit %}
|
{%- if edit %}
|
||||||
<div id="helpbox">
|
<div id="helpbox">
|
||||||
<textarea autocomplete="off">
|
<textarea autocomplete="off">
|
||||||
@@ -125,12 +125,12 @@ write markdown (most html is 🙆 too)
|
|||||||
</textarea>
|
</textarea>
|
||||||
</div>
|
</div>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
<script>
|
<script>
|
||||||
|
|
||||||
var SR = {{ r|tojson }},
|
var SR = "{{ r }}",
|
||||||
last_modified = {{ lastmod }},
|
last_modified = {{ lastmod }},
|
||||||
have_emp = {{ have_emp|tojson }},
|
have_emp = {{ "true" if have_emp else "false" }},
|
||||||
dfavico = "{{ favico }}";
|
dfavico = "{{ favico }}";
|
||||||
|
|
||||||
var md_opt = {
|
var md_opt = {
|
||||||
@@ -159,5 +159,8 @@ try { l.light = drk? 0:1; } catch (ex) { }
|
|||||||
{%- if edit %}
|
{%- if edit %}
|
||||||
<script src="{{ r }}/.cpr/md2.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/md2.js?_={{ ts }}"></script>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
{%- if js %}
|
||||||
|
<script src="{{ js }}_={{ ts }}"></script>
|
||||||
|
{%- endif %}
|
||||||
</body></html>
|
</body></html>
|
||||||
|
|
||||||
|
|||||||
@@ -17,8 +17,8 @@ var chromedbg = function () { console.log(arguments); }
|
|||||||
var dbg = function () { };
|
var dbg = function () { };
|
||||||
|
|
||||||
// replace dbg with the real deal here or in the console:
|
// replace dbg with the real deal here or in the console:
|
||||||
// dbg = chromedbg
|
// dbg = chromedbg;
|
||||||
// dbg = console.log
|
// dbg = console.log;
|
||||||
|
|
||||||
|
|
||||||
// dodge browser issues
|
// dodge browser issues
|
||||||
|
|||||||
@@ -607,10 +607,10 @@ function md_newline() {
|
|||||||
var s = linebounds(true),
|
var s = linebounds(true),
|
||||||
ln = s.md.substring(s.n1, s.n2),
|
ln = s.md.substring(s.n1, s.n2),
|
||||||
m1 = /^( *)([0-9]+)(\. +)/.exec(ln),
|
m1 = /^( *)([0-9]+)(\. +)/.exec(ln),
|
||||||
m2 = /^[ \t>+-]*(\* )?/.exec(ln),
|
m2 = /^[ \t]*[>+*-]{0,2}[ \t]/.exec(ln),
|
||||||
drop = dom_src.selectionEnd - dom_src.selectionStart;
|
drop = dom_src.selectionEnd - dom_src.selectionStart;
|
||||||
|
|
||||||
var pre = m2[0];
|
var pre = m2 ? m2[0] : '';
|
||||||
if (m1 !== null)
|
if (m1 !== null)
|
||||||
pre = m1[1] + (parseInt(m1[2]) + 1) + m1[3];
|
pre = m1[1] + (parseInt(m1[2]) + 1) + m1[3];
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
<title>📝 {{ title }}</title>
|
<title>📝 {{ title }}</title>
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=0.7">
|
<meta name="viewport" content="width=device-width, initial-scale=0.7">
|
||||||
<meta name="theme-color" content="#333">
|
<meta name="theme-color" content="#{{ tcolor }}">
|
||||||
<link rel="stylesheet" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
<link rel="stylesheet" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||||
<link rel="stylesheet" href="{{ r }}/.cpr/mde.css?_={{ ts }}">
|
<link rel="stylesheet" href="{{ r }}/.cpr/mde.css?_={{ ts }}">
|
||||||
<link rel="stylesheet" href="{{ r }}/.cpr/deps/mini-fa.css?_={{ ts }}">
|
<link rel="stylesheet" href="{{ r }}/.cpr/deps/mini-fa.css?_={{ ts }}">
|
||||||
@@ -26,9 +26,9 @@
|
|||||||
<a href="#" id="repl">π</a>
|
<a href="#" id="repl">π</a>
|
||||||
<script>
|
<script>
|
||||||
|
|
||||||
var SR = {{ r|tojson }},
|
var SR = "{{ r }}",
|
||||||
last_modified = {{ lastmod }},
|
last_modified = {{ lastmod }},
|
||||||
have_emp = {{ have_emp|tojson }},
|
have_emp = {{ "true" if have_emp else "false" }},
|
||||||
dfavico = "{{ favico }}";
|
dfavico = "{{ favico }}";
|
||||||
|
|
||||||
var md_opt = {
|
var md_opt = {
|
||||||
@@ -53,5 +53,8 @@ try { l.light = drk? 0:1; } catch (ex) { }
|
|||||||
<script src="{{ r }}/.cpr/deps/marked.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/deps/marked.js?_={{ ts }}"></script>
|
||||||
<script src="{{ r }}/.cpr/deps/easymde.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/deps/easymde.js?_={{ ts }}"></script>
|
||||||
<script src="{{ r }}/.cpr/mde.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/mde.js?_={{ ts }}"></script>
|
||||||
|
{%- if js %}
|
||||||
|
<script src="{{ js }}_={{ ts }}"></script>
|
||||||
|
{%- endif %}
|
||||||
</body></html>
|
</body></html>
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@
|
|||||||
<title>{{ s_doctitle }}</title>
|
<title>{{ s_doctitle }}</title>
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||||
<meta name="theme-color" content="#333">
|
<meta name="theme-color" content="#{{ tcolor }}">
|
||||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/msg.css?_={{ ts }}">
|
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/msg.css?_={{ ts }}">
|
||||||
{{ html_head }}
|
{{ html_head }}
|
||||||
</head>
|
</head>
|
||||||
@@ -46,6 +46,9 @@
|
|||||||
}, 1000);
|
}, 1000);
|
||||||
</script>
|
</script>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
{%- if js %}
|
||||||
|
<script src="{{ js }}_={{ ts }}"></script>
|
||||||
|
{%- endif %}
|
||||||
</body>
|
</body>
|
||||||
|
|
||||||
</html>
|
</html>
|
||||||
|
|||||||
107
copyparty/web/rups.css
Normal file
107
copyparty/web/rups.css
Normal file
@@ -0,0 +1,107 @@
|
|||||||
|
html {
|
||||||
|
color: #333;
|
||||||
|
background: #f7f7f7;
|
||||||
|
font-family: sans-serif;
|
||||||
|
font-family: var(--font-main), sans-serif;
|
||||||
|
touch-action: manipulation;
|
||||||
|
}
|
||||||
|
#wrap {
|
||||||
|
margin: 2em auto;
|
||||||
|
padding: 0 1em 3em 1em;
|
||||||
|
line-height: 2.3em;
|
||||||
|
}
|
||||||
|
a {
|
||||||
|
color: #047;
|
||||||
|
background: #fff;
|
||||||
|
text-decoration: none;
|
||||||
|
border-bottom: 1px solid #8ab;
|
||||||
|
border-radius: .2em;
|
||||||
|
padding: .2em .6em;
|
||||||
|
margin: 0 .3em;
|
||||||
|
}
|
||||||
|
#wrap td a {
|
||||||
|
margin: 0;
|
||||||
|
line-height: 1em;
|
||||||
|
display: inline-block;
|
||||||
|
white-space: initial;
|
||||||
|
font-family: var(--font-main), sans-serif;
|
||||||
|
}
|
||||||
|
#repl {
|
||||||
|
border: none;
|
||||||
|
background: none;
|
||||||
|
color: inherit;
|
||||||
|
padding: 0;
|
||||||
|
position: fixed;
|
||||||
|
bottom: .25em;
|
||||||
|
left: .2em;
|
||||||
|
}
|
||||||
|
#wrap table {
|
||||||
|
border-collapse: collapse;
|
||||||
|
position: relative;
|
||||||
|
margin-top: 2em;
|
||||||
|
}
|
||||||
|
#wrap th {
|
||||||
|
top: -1px;
|
||||||
|
position: sticky;
|
||||||
|
background: #f7f7f7;
|
||||||
|
}
|
||||||
|
#wrap td {
|
||||||
|
font-family: var(--font-mono), monospace, monospace;
|
||||||
|
white-space: pre; /*date*/
|
||||||
|
overflow: hidden; /*ipv6*/
|
||||||
|
}
|
||||||
|
#wrap th:first-child,
|
||||||
|
#wrap td:first-child {
|
||||||
|
text-align: right;
|
||||||
|
}
|
||||||
|
#wrap td,
|
||||||
|
#wrap th {
|
||||||
|
text-align: left;
|
||||||
|
padding: .3em .6em;
|
||||||
|
max-width: 30vw;
|
||||||
|
}
|
||||||
|
#wrap tr:hover td {
|
||||||
|
background: #ddd;
|
||||||
|
box-shadow: 0 -1px 0 rgba(128, 128, 128, 0.5) inset;
|
||||||
|
}
|
||||||
|
#wrap th:first-child,
|
||||||
|
#wrap td:first-child {
|
||||||
|
border-radius: .5em 0 0 .5em;
|
||||||
|
}
|
||||||
|
#wrap th:last-child,
|
||||||
|
#wrap td:last-child {
|
||||||
|
border-radius: 0 .5em .5em 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
html.z {
|
||||||
|
background: #222;
|
||||||
|
color: #ccc;
|
||||||
|
}
|
||||||
|
html.bz {
|
||||||
|
background: #11121d;
|
||||||
|
color: #bbd;
|
||||||
|
}
|
||||||
|
html.z a {
|
||||||
|
color: #fff;
|
||||||
|
background: #057;
|
||||||
|
border-color: #37a;
|
||||||
|
}
|
||||||
|
html.z input[type=text] {
|
||||||
|
color: #ddd;
|
||||||
|
background: #223;
|
||||||
|
border: none;
|
||||||
|
border-bottom: 1px solid #fc5;
|
||||||
|
border-radius: .2em;
|
||||||
|
padding: .2em .3em;
|
||||||
|
}
|
||||||
|
html.z #wrap th {
|
||||||
|
background: #222;
|
||||||
|
}
|
||||||
|
html.bz #wrap th {
|
||||||
|
background: #223;
|
||||||
|
}
|
||||||
|
html.z #wrap tr:hover td {
|
||||||
|
background: #000;
|
||||||
|
}
|
||||||
50
copyparty/web/rups.html
Normal file
50
copyparty/web/rups.html
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<title>{{ s_doctitle }}</title>
|
||||||
|
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||||
|
<meta name="robots" content="noindex, nofollow">
|
||||||
|
<meta name="theme-color" content="#{{ tcolor }}">
|
||||||
|
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/rups.css?_={{ ts }}">
|
||||||
|
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||||
|
{{ html_head }}
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body>
|
||||||
|
<div id="wrap">
|
||||||
|
<a href="#" id="re">refresh</a>
|
||||||
|
<a href="{{ r }}/?h">control-panel</a>
|
||||||
|
Filter: <input type="text" id="filter" size="20" placeholder="documents/passwords" />
|
||||||
|
<span id="hits"></span>
|
||||||
|
<table id="tab"><thead><tr>
|
||||||
|
<th>size</th>
|
||||||
|
<th>who</th>
|
||||||
|
<th>when</th>
|
||||||
|
<th>age</th>
|
||||||
|
<th>dir</th>
|
||||||
|
<th>file</th>
|
||||||
|
</tr></thead><tbody id="tb"></tbody></table>
|
||||||
|
</div>
|
||||||
|
<a href="#" id="repl">π</a>
|
||||||
|
<script>
|
||||||
|
|
||||||
|
var SR="{{ r }}",
|
||||||
|
lang="{{ lang }}",
|
||||||
|
dfavico="{{ favico }}";
|
||||||
|
|
||||||
|
var STG = window.localStorage;
|
||||||
|
document.documentElement.className = (STG && STG.cpp_thm) || "{{ this.args.theme }}";
|
||||||
|
|
||||||
|
</script>
|
||||||
|
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
||||||
|
<script>var V={{ v }};</script>
|
||||||
|
<script src="{{ r }}/.cpr/rups.js?_={{ ts }}"></script>
|
||||||
|
{%- if js %}
|
||||||
|
<script src="{{ js }}_={{ ts }}"></script>
|
||||||
|
{%- endif %}
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
|
||||||
66
copyparty/web/rups.js
Normal file
66
copyparty/web/rups.js
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
function render() {
|
||||||
|
var ups = V.ups, now = V.now, html = [];
|
||||||
|
ebi('filter').value = V.filter;
|
||||||
|
ebi('hits').innerHTML = 'showing ' + ups.length + ' files';
|
||||||
|
|
||||||
|
for (var a = 0; a < ups.length; a++) {
|
||||||
|
var f = ups[a],
|
||||||
|
vsp = vsplit(f.vp.split('?')[0]),
|
||||||
|
dn = esc(uricom_dec(vsp[0])),
|
||||||
|
fn = esc(uricom_dec(vsp[1])),
|
||||||
|
at = f.at,
|
||||||
|
td = now - f.at,
|
||||||
|
ts = !at ? '(?)' : unix2iso(at),
|
||||||
|
sa = !at ? '(?)' : td > 60 ? shumantime(td) : (td + 's'),
|
||||||
|
sz = ('' + f.sz).replace(/\B(?=(\d{3})+(?!\d))/g, " ");
|
||||||
|
|
||||||
|
html.push('<tr><td>' + sz +
|
||||||
|
'</td><td>' + f.ip +
|
||||||
|
'</td><td>' + ts +
|
||||||
|
'</td><td>' + sa +
|
||||||
|
'</td><td><a href="' + vsp[0] + '">' + dn +
|
||||||
|
'</a></td><td><a href="' + f.vp + '">' + fn +
|
||||||
|
'</a></td></tr>');
|
||||||
|
}
|
||||||
|
if (!ups.length) {
|
||||||
|
var t = V.filter ? ' matching the filter' : '';
|
||||||
|
html = ['<tr><td colspan="6">there are no uploads' + t + '</td></tr>'];
|
||||||
|
}
|
||||||
|
ebi('tb').innerHTML = html.join('');
|
||||||
|
}
|
||||||
|
render();
|
||||||
|
|
||||||
|
var ti;
|
||||||
|
function ask(e) {
|
||||||
|
ev(e);
|
||||||
|
clearTimeout(ti);
|
||||||
|
ebi('hits').innerHTML = 'Loading...';
|
||||||
|
|
||||||
|
var xhr = new XHR(),
|
||||||
|
filter = unsmart(ebi('filter').value);
|
||||||
|
|
||||||
|
hist_replace(get_evpath().split('?')[0] + '?ru&filter=' + uricom_enc(filter));
|
||||||
|
|
||||||
|
xhr.onload = xhr.onerror = function () {
|
||||||
|
try {
|
||||||
|
V = JSON.parse(this.responseText)
|
||||||
|
}
|
||||||
|
catch (ex) {
|
||||||
|
ebi('tb').innerHTML = '<tr><td colspan="6">failed to decode server response as json: <pre>' + esc(this.responseText) + '</pre></td></tr>';
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
render();
|
||||||
|
};
|
||||||
|
xhr.open('GET', SR + '/?ru&j&filter=' + uricom_enc(filter), true);
|
||||||
|
xhr.send();
|
||||||
|
}
|
||||||
|
ebi('re').onclick = ask;
|
||||||
|
ebi('filter').oninput = function () {
|
||||||
|
clearTimeout(ti);
|
||||||
|
ti = setTimeout(ask, 500);
|
||||||
|
ebi('hits').innerHTML = '...';
|
||||||
|
};
|
||||||
|
ebi('filter').onkeydown = function (e) {
|
||||||
|
if (('' + e.key).endsWith('Enter'))
|
||||||
|
ask();
|
||||||
|
};
|
||||||
95
copyparty/web/shares.css
Normal file
95
copyparty/web/shares.css
Normal file
@@ -0,0 +1,95 @@
|
|||||||
|
html {
|
||||||
|
color: #333;
|
||||||
|
background: #f7f7f7;
|
||||||
|
font-family: sans-serif;
|
||||||
|
font-family: var(--font-main), sans-serif;
|
||||||
|
touch-action: manipulation;
|
||||||
|
}
|
||||||
|
#wrap {
|
||||||
|
margin: 2em auto;
|
||||||
|
padding: 0 1em 3em 1em;
|
||||||
|
line-height: 2.3em;
|
||||||
|
}
|
||||||
|
#wrap>span {
|
||||||
|
margin: 0 0 0 1em;
|
||||||
|
border-bottom: 1px solid #999;
|
||||||
|
}
|
||||||
|
li {
|
||||||
|
margin: 1em 0;
|
||||||
|
}
|
||||||
|
a {
|
||||||
|
color: #047;
|
||||||
|
background: #fff;
|
||||||
|
text-decoration: none;
|
||||||
|
white-space: nowrap;
|
||||||
|
border-bottom: 1px solid #8ab;
|
||||||
|
border-radius: .2em;
|
||||||
|
padding: .2em .6em;
|
||||||
|
margin: 0 .3em;
|
||||||
|
}
|
||||||
|
#wrap td a {
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
|
#w {
|
||||||
|
color: #fff;
|
||||||
|
background: #940;
|
||||||
|
border-color: #b70;
|
||||||
|
}
|
||||||
|
#repl {
|
||||||
|
border: none;
|
||||||
|
background: none;
|
||||||
|
color: inherit;
|
||||||
|
padding: 0;
|
||||||
|
position: fixed;
|
||||||
|
bottom: .25em;
|
||||||
|
left: .2em;
|
||||||
|
}
|
||||||
|
#wrap table {
|
||||||
|
border-collapse: collapse;
|
||||||
|
position: relative;
|
||||||
|
margin-top: 2em;
|
||||||
|
}
|
||||||
|
th {
|
||||||
|
top: -1px;
|
||||||
|
position: sticky;
|
||||||
|
background: #f7f7f7;
|
||||||
|
}
|
||||||
|
#wrap td,
|
||||||
|
#wrap th {
|
||||||
|
padding: .3em .6em;
|
||||||
|
text-align: left;
|
||||||
|
white-space: nowrap;
|
||||||
|
}
|
||||||
|
#wrap td+td+td+td+td+td+td+td {
|
||||||
|
font-family: var(--font-mono), monospace, monospace;
|
||||||
|
}
|
||||||
|
#wrap th:first-child,
|
||||||
|
#wrap td:first-child {
|
||||||
|
border-radius: .5em 0 0 .5em;
|
||||||
|
}
|
||||||
|
#wrap th:last-child,
|
||||||
|
#wrap td:last-child {
|
||||||
|
border-radius: 0 .5em .5em 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
html.z {
|
||||||
|
background: #222;
|
||||||
|
color: #ccc;
|
||||||
|
}
|
||||||
|
html.z a {
|
||||||
|
color: #fff;
|
||||||
|
background: #057;
|
||||||
|
border-color: #37a;
|
||||||
|
}
|
||||||
|
html.z th {
|
||||||
|
background: #222;
|
||||||
|
}
|
||||||
|
html.bz {
|
||||||
|
color: #bbd;
|
||||||
|
background: #11121d;
|
||||||
|
}
|
||||||
|
html.bz th {
|
||||||
|
background: #223;
|
||||||
|
}
|
||||||
82
copyparty/web/shares.html
Normal file
82
copyparty/web/shares.html
Normal file
@@ -0,0 +1,82 @@
|
|||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<title>{{ s_doctitle }}</title>
|
||||||
|
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||||
|
<meta name="robots" content="noindex, nofollow">
|
||||||
|
<meta name="theme-color" content="#{{ tcolor }}">
|
||||||
|
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/shares.css?_={{ ts }}">
|
||||||
|
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||||
|
{{ html_head }}
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body>
|
||||||
|
<div id="wrap">
|
||||||
|
<a href="{{ r }}/?shares">refresh</a>
|
||||||
|
<a href="{{ r }}/?h">control-panel</a>
|
||||||
|
|
||||||
|
<span>axs = perms (read,write,move,delet)</span>
|
||||||
|
<span>nf = numFiles (0=dir)</span>
|
||||||
|
<span>min/hrs = time left</span>
|
||||||
|
|
||||||
|
<table id="tab"><thead><tr>
|
||||||
|
<th>sharekey</th>
|
||||||
|
<th>delete</th>
|
||||||
|
<th>pw</th>
|
||||||
|
<th>source</th>
|
||||||
|
<th>axs</th>
|
||||||
|
<th>nf</th>
|
||||||
|
<th>user</th>
|
||||||
|
<th>created</th>
|
||||||
|
<th>expires</th>
|
||||||
|
<th>min</th>
|
||||||
|
<th>hrs</th>
|
||||||
|
<th>add time</th>
|
||||||
|
</tr></thead><tbody>
|
||||||
|
{% for k, pw, vp, pr, st, un, t0, t1 in rows %}
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
<a href="{{ r }}{{ shr }}{{ k }}?qr">qr</a>
|
||||||
|
<a href="{{ r }}{{ shr }}{{ k }}">{{ k }}</a>
|
||||||
|
</td>
|
||||||
|
<td><a href="#" k="{{ k }}">delete</a></td>
|
||||||
|
<td>{{ "yes" if pw else "--" }}</td>
|
||||||
|
<td><a href="{{ r }}/{{ vp|e }}">/{{ vp|e }}</a></td>
|
||||||
|
<td>{{ pr }}</td>
|
||||||
|
<td>{{ st }}</td>
|
||||||
|
<td>{{ un|e }}</td>
|
||||||
|
<td>{{ t0 }}</td>
|
||||||
|
<td>{{ t1 }}</td>
|
||||||
|
<td>{{ "inf" if not t1 else "dead" if t1 < now else ((t1 - now) / 60) | round(1) }}</td>
|
||||||
|
<td>{{ "inf" if not t1 else "dead" if t1 < now else ((t1 - now) / 3600) | round(1) }}</td>
|
||||||
|
<td></td>
|
||||||
|
</tr>
|
||||||
|
{% endfor %}
|
||||||
|
</tbody></table>
|
||||||
|
{% if not rows %}
|
||||||
|
(you don't have any active shares btw)
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
<a href="#" id="repl">π</a>
|
||||||
|
<script>
|
||||||
|
|
||||||
|
var SR="{{ r }}",
|
||||||
|
shr="{{ shr }}",
|
||||||
|
lang="{{ lang }}",
|
||||||
|
dfavico="{{ favico }}";
|
||||||
|
|
||||||
|
var STG = window.localStorage;
|
||||||
|
document.documentElement.className = (STG && STG.cpp_thm) || "{{ this.args.theme }}";
|
||||||
|
|
||||||
|
</script>
|
||||||
|
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
||||||
|
<script src="{{ r }}/.cpr/shares.js?_={{ ts }}"></script>
|
||||||
|
{%- if js %}
|
||||||
|
<script src="{{ js }}_={{ ts }}"></script>
|
||||||
|
{%- endif %}
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
|
||||||
78
copyparty/web/shares.js
Normal file
78
copyparty/web/shares.js
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
var t = QSA('a[k]');
|
||||||
|
for (var a = 0; a < t.length; a++)
|
||||||
|
t[a].onclick = rm;
|
||||||
|
|
||||||
|
function rm() {
|
||||||
|
var u = SR + shr + uricom_enc(this.getAttribute('k')) + '?eshare=rm',
|
||||||
|
xhr = new XHR();
|
||||||
|
|
||||||
|
xhr.open('POST', u, true);
|
||||||
|
xhr.onload = xhr.onerror = cb;
|
||||||
|
xhr.send();
|
||||||
|
}
|
||||||
|
|
||||||
|
function bump() {
|
||||||
|
var k = this.closest('tr').getElementsByTagName('a')[2].getAttribute('k'),
|
||||||
|
u = SR + shr + uricom_enc(k) + '?eshare=' + this.value,
|
||||||
|
xhr = new XHR();
|
||||||
|
|
||||||
|
xhr.open('POST', u, true);
|
||||||
|
xhr.onload = xhr.onerror = cb;
|
||||||
|
xhr.send();
|
||||||
|
}
|
||||||
|
|
||||||
|
function cb() {
|
||||||
|
if (this.status !== 200)
|
||||||
|
return modal.alert('<h6>server error</h6>' + esc(unpre(this.responseText)));
|
||||||
|
|
||||||
|
document.location = '?shares';
|
||||||
|
}
|
||||||
|
|
||||||
|
function qr(e) {
|
||||||
|
ev(e);
|
||||||
|
var href = this.href,
|
||||||
|
pw = this.closest('tr').cells[2].textContent;
|
||||||
|
|
||||||
|
if (pw.indexOf('yes') < 0)
|
||||||
|
return showqr(href);
|
||||||
|
|
||||||
|
modal.prompt("if you want to bypass the password protection by\nembedding the password into the qr-code, then\ntype the password now, otherwise leave this empty", "", function (v) {
|
||||||
|
if (v)
|
||||||
|
href += "&pw=" + v;
|
||||||
|
showqr(href);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function showqr(href) {
|
||||||
|
var vhref = href.replace('?qr&', '?').replace('?qr', '');
|
||||||
|
modal.alert(esc(vhref) + '<img class="b64" width="100" height="100" src="' + href + '" />');
|
||||||
|
}
|
||||||
|
|
||||||
|
(function() {
|
||||||
|
var tab = ebi('tab').tBodies[0],
|
||||||
|
tr = Array.prototype.slice.call(tab.rows, 0);
|
||||||
|
|
||||||
|
var buf = [];
|
||||||
|
for (var a = 0; a < tr.length; a++) {
|
||||||
|
tr[a].cells[0].getElementsByTagName('a')[0].onclick = qr;
|
||||||
|
for (var b = 7; b < 9; b++)
|
||||||
|
buf.push(parseInt(tr[a].cells[b].innerHTML));
|
||||||
|
}
|
||||||
|
|
||||||
|
var ibuf = 0;
|
||||||
|
for (var a = 0; a < tr.length; a++)
|
||||||
|
for (var b = 7; b < 9; b++) {
|
||||||
|
var v = buf[ibuf++];
|
||||||
|
tr[a].cells[b].innerHTML =
|
||||||
|
v ? unix2iso(v).replace(' ', ', ') : 'never';
|
||||||
|
}
|
||||||
|
|
||||||
|
for (var a = 0; a < tr.length; a++)
|
||||||
|
tr[a].cells[11].innerHTML =
|
||||||
|
'<button value="1">1min</button> ' +
|
||||||
|
'<button value="60">1h</button>';
|
||||||
|
|
||||||
|
var btns = QSA('td button'), aa = btns.length;
|
||||||
|
for (var a = 0; a < aa; a++)
|
||||||
|
btns[a].onclick = bump;
|
||||||
|
})();
|
||||||
@@ -53,7 +53,7 @@ a.r {
|
|||||||
border-color: #c7a;
|
border-color: #c7a;
|
||||||
}
|
}
|
||||||
a.g {
|
a.g {
|
||||||
color: #2b0;
|
color: #0a0;
|
||||||
border-color: #3a0;
|
border-color: #3a0;
|
||||||
box-shadow: 0 .3em 1em #4c0;
|
box-shadow: 0 .3em 1em #4c0;
|
||||||
}
|
}
|
||||||
@@ -90,6 +90,13 @@ table {
|
|||||||
text-align: left;
|
text-align: left;
|
||||||
white-space: nowrap;
|
white-space: nowrap;
|
||||||
}
|
}
|
||||||
|
.vols td:empty,
|
||||||
|
.vols th:empty {
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
.vols img {
|
||||||
|
margin: -4px 0;
|
||||||
|
}
|
||||||
.num {
|
.num {
|
||||||
border-right: 1px solid #bbb;
|
border-right: 1px solid #bbb;
|
||||||
}
|
}
|
||||||
@@ -152,11 +159,13 @@ pre b,
|
|||||||
code b {
|
code b {
|
||||||
color: #000;
|
color: #000;
|
||||||
font-weight: normal;
|
font-weight: normal;
|
||||||
text-shadow: 0 0 .2em #0f0;
|
text-shadow: 0 0 .2em #3f3;
|
||||||
|
border-bottom: 1px solid #090;
|
||||||
}
|
}
|
||||||
html.z pre b,
|
html.z pre b,
|
||||||
html.z code b {
|
html.z code b {
|
||||||
color: #fff;
|
color: #fff;
|
||||||
|
border-bottom: 1px solid #9f9;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -182,19 +191,35 @@ html.z a.g {
|
|||||||
border-color: #af4;
|
border-color: #af4;
|
||||||
box-shadow: 0 .3em 1em #7d0;
|
box-shadow: 0 .3em 1em #7d0;
|
||||||
}
|
}
|
||||||
|
form {
|
||||||
|
line-height: 2.5em;
|
||||||
|
}
|
||||||
|
#x,
|
||||||
input {
|
input {
|
||||||
color: #a50;
|
color: #a50;
|
||||||
background: #fff;
|
background: #fff;
|
||||||
border: 1px solid #a50;
|
border: 1px solid #a50;
|
||||||
border-radius: .5em;
|
border-radius: .3em;
|
||||||
padding: .5em .7em;
|
padding: .25em .6em;
|
||||||
margin: 0 .5em 0 0;
|
margin: 0 .3em 0 0;
|
||||||
|
font-size: 1em;
|
||||||
}
|
}
|
||||||
|
input::placeholder {
|
||||||
|
font-size: 1.2em;
|
||||||
|
font-style: italic;
|
||||||
|
letter-spacing: .04em;
|
||||||
|
opacity: 0.64;
|
||||||
|
color: #930;
|
||||||
|
}
|
||||||
|
#x,
|
||||||
html.z input {
|
html.z input {
|
||||||
color: #fff;
|
color: #fff;
|
||||||
background: #626;
|
background: #626;
|
||||||
border-color: #c2c;
|
border-color: #c2c;
|
||||||
}
|
}
|
||||||
|
html.z input::placeholder {
|
||||||
|
color: #fff;
|
||||||
|
}
|
||||||
html.z .num {
|
html.z .num {
|
||||||
border-color: #777;
|
border-color: #777;
|
||||||
}
|
}
|
||||||
@@ -204,3 +229,6 @@ html.bz {
|
|||||||
color: #bbd;
|
color: #bbd;
|
||||||
background: #11121d;
|
background: #11121d;
|
||||||
}
|
}
|
||||||
|
html.bz .vols img {
|
||||||
|
filter: sepia(0.8) hue-rotate(180deg);
|
||||||
|
}
|
||||||
|
|||||||
@@ -6,7 +6,7 @@
|
|||||||
<title>{{ s_doctitle }}</title>
|
<title>{{ s_doctitle }}</title>
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||||
<meta name="theme-color" content="#333">
|
<meta name="theme-color" content="#{{ tcolor }}">
|
||||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/splash.css?_={{ ts }}">
|
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/splash.css?_={{ ts }}">
|
||||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||||
{{ html_head }}
|
{{ html_head }}
|
||||||
@@ -14,6 +14,7 @@
|
|||||||
|
|
||||||
<body>
|
<body>
|
||||||
<div id="wrap">
|
<div id="wrap">
|
||||||
|
{%- if not in_shr %}
|
||||||
<a id="a" href="{{ r }}/?h" class="af">refresh</a>
|
<a id="a" href="{{ r }}/?h" class="af">refresh</a>
|
||||||
<a id="v" href="{{ r }}/?hc" class="af">connect</a>
|
<a id="v" href="{{ r }}/?hc" class="af">connect</a>
|
||||||
|
|
||||||
@@ -21,7 +22,8 @@
|
|||||||
<p id="b">howdy stranger <small>(you're not logged in)</small></p>
|
<p id="b">howdy stranger <small>(you're not logged in)</small></p>
|
||||||
{%- else %}
|
{%- else %}
|
||||||
<a id="c" href="{{ r }}/?pw=x" class="logout">logout</a>
|
<a id="c" href="{{ r }}/?pw=x" class="logout">logout</a>
|
||||||
<p><span id="m">welcome back,</span> <strong>{{ this.uname }}</strong></p>
|
<p><span id="m">welcome back,</span> <strong>{{ this.uname|e }}</strong></p>
|
||||||
|
{%- endif %}
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
{%- if msg %}
|
{%- if msg %}
|
||||||
@@ -30,6 +32,30 @@
|
|||||||
</div>
|
</div>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
|
{%- if ups %}
|
||||||
|
<h1 id="aa">incoming files:</h1>
|
||||||
|
<table class="vols">
|
||||||
|
<thead><tr><th>%</th><th>speed</th><th>eta</th><th>idle</th><th>dir</th><th>file</th></tr></thead>
|
||||||
|
<tbody>
|
||||||
|
{% for u in ups %}
|
||||||
|
<tr><td>{{ u[0] }}</td><td>{{ u[1] }}</td><td>{{ u[2] }}</td><td>{{ u[3] }}</td><td><a href="{{ u[4] }}">{{ u[5]|e }}</a></td><td>{{ u[6]|e }}</td></tr>
|
||||||
|
{% endfor %}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
{%- endif %}
|
||||||
|
|
||||||
|
{%- if dls %}
|
||||||
|
<h1 id="ae">active downloads:</h1>
|
||||||
|
<table class="vols">
|
||||||
|
<thead><tr><th>%</th><th>sent</th><th>speed</th><th>eta</th><th>idle</th><th></th><th>dir</th><th>file</th></tr></thead>
|
||||||
|
<tbody>
|
||||||
|
{% for u in dls %}
|
||||||
|
<tr><td>{{ u[0] }}</td><td>{{ u[1] }}</td><td>{{ u[2] }}</td><td>{{ u[3] }}</td><td>{{ u[4] }}</td><td>{{ u[5] }}</td><td><a href="{{ u[6] }}">{{ u[7]|e }}</a></td><td>{{ u[8] }}</td></tr>
|
||||||
|
{% endfor %}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
{%- endif %}
|
||||||
|
|
||||||
{%- if avol %}
|
{%- if avol %}
|
||||||
<h1>admin panel:</h1>
|
<h1>admin panel:</h1>
|
||||||
<table><tr><td> <!-- hehehe -->
|
<table><tr><td> <!-- hehehe -->
|
||||||
@@ -76,31 +102,65 @@
|
|||||||
</ul>
|
</ul>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
<h1 id="cc">client config:</h1>
|
{%- if in_shr %}
|
||||||
<ul>
|
<h1 id="z">unlock this share:</h1>
|
||||||
{% if k304 or k304vis %}
|
|
||||||
{% if k304 %}
|
|
||||||
<li><a id="h" href="{{ r }}/?k304=n">disable k304</a> (currently enabled)
|
|
||||||
{%- else %}
|
|
||||||
<li><a id="i" href="{{ r }}/?k304=y" class="r">enable k304</a> (currently disabled)
|
|
||||||
{% endif %}
|
|
||||||
<blockquote id="j">enabling this will disconnect your client on every HTTP 304, which can prevent some buggy proxies from getting stuck (suddenly not loading pages), <em>but</em> it will also make things slower in general</blockquote></li>
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
<li><a id="k" href="{{ r }}/?reset" class="r" onclick="localStorage.clear();return true">reset client settings</a></li>
|
|
||||||
</ul>
|
|
||||||
|
|
||||||
<h1 id="l">login for more:</h1>
|
|
||||||
<div>
|
<div>
|
||||||
<form method="post" enctype="multipart/form-data" action="{{ r }}/{{ qvpath }}">
|
<form id="lf" method="post" enctype="multipart/form-data" action="{{ r }}/{{ qvpath }}">
|
||||||
<input type="hidden" name="act" value="login" />
|
<input type="hidden" id="la" name="act" value="login" />
|
||||||
<input type="password" name="cppwd" />
|
<input type="password" id="lp" name="cppwd" placeholder=" password" />
|
||||||
<input type="submit" value="Login" />
|
<input type="hidden" name="uhash" id="uhash" value="x" />
|
||||||
|
<input type="submit" id="ls" value="Unlock" />
|
||||||
{% if ahttps %}
|
{% if ahttps %}
|
||||||
<a id="w" href="{{ ahttps }}">switch to https</a>
|
<a id="w" href="{{ ahttps }}">switch to https</a>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
</form>
|
</form>
|
||||||
</div>
|
</div>
|
||||||
|
{%- else %}
|
||||||
|
<h1 id="l">login for more:</h1>
|
||||||
|
<div>
|
||||||
|
<form id="lf" method="post" enctype="multipart/form-data" action="{{ r }}/{{ qvpath }}">
|
||||||
|
<input type="hidden" id="la" name="act" value="login" />
|
||||||
|
<input type="password" id="lp" name="cppwd" placeholder=" password" />
|
||||||
|
<input type="hidden" name="uhash" id="uhash" value="x" />
|
||||||
|
<input type="submit" id="ls" value="Login" />
|
||||||
|
{% if chpw %}
|
||||||
|
<a id="x" href="#">change password</a>
|
||||||
|
{% endif %}
|
||||||
|
{% if ahttps %}
|
||||||
|
<a id="w" href="{{ ahttps }}">switch to https</a>
|
||||||
|
{% endif %}
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
{%- endif %}
|
||||||
|
|
||||||
|
<h1 id="cc">other stuff:</h1>
|
||||||
|
<ul>
|
||||||
|
{%- if this.uname != '*' and this.args.shr %}
|
||||||
|
<li><a id="y" href="{{ r }}/?shares">edit shares</a></li>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if k304 or k304vis %}
|
||||||
|
{% if k304 %}
|
||||||
|
<li><a id="h" href="{{ r }}/?cc&setck=k304=n">disable k304</a> (currently enabled)
|
||||||
|
{%- else %}
|
||||||
|
<li><a id="i" href="{{ r }}/?cc&setck=k304=y" class="r">enable k304</a> (currently disabled)
|
||||||
|
{% endif %}
|
||||||
|
<blockquote id="j">enabling k304 will disconnect your client on every HTTP 304, which can prevent some buggy proxies from getting stuck (suddenly not loading pages), <em>but</em> it will also make things slower in general</blockquote></li>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if no304 or no304vis %}
|
||||||
|
{% if no304 %}
|
||||||
|
<li><a id="ab" href="{{ r }}/?cc&setck=no304=n">disable no304</a> (currently enabled)
|
||||||
|
{%- else %}
|
||||||
|
<li><a id="ac" href="{{ r }}/?cc&setck=no304=y" class="r">enable no304</a> (currently disabled)
|
||||||
|
{% endif %}
|
||||||
|
<blockquote id="ad">enabling no304 will disable all caching; try this if k304 wasn't enough. This will waste a huge amount of network traffic!</blockquote></li>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
<li><a id="af" href="{{ r }}/?ru">show recent uploads</a></li>
|
||||||
|
<li><a id="k" href="{{ r }}/?reset" class="r" onclick="localStorage.clear();return true">reset client settings</a></li>
|
||||||
|
</ul>
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
<a href="#" id="repl">π</a>
|
<a href="#" id="repl">π</a>
|
||||||
{%- if not this.args.nb %}
|
{%- if not this.args.nb %}
|
||||||
@@ -108,7 +168,7 @@
|
|||||||
{%- endif %}
|
{%- endif %}
|
||||||
<script>
|
<script>
|
||||||
|
|
||||||
var SR = {{ r|tojson }},
|
var SR="{{ r }}",
|
||||||
lang="{{ lang }}",
|
lang="{{ lang }}",
|
||||||
dfavico="{{ favico }}";
|
dfavico="{{ favico }}";
|
||||||
|
|
||||||
@@ -118,6 +178,9 @@ document.documentElement.className = (STG && STG.cpp_thm) || "{{ this.args.theme
|
|||||||
</script>
|
</script>
|
||||||
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
||||||
<script src="{{ r }}/.cpr/splash.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/splash.js?_={{ ts }}"></script>
|
||||||
|
{%- if js %}
|
||||||
|
<script src="{{ js }}_={{ ts }}"></script>
|
||||||
|
{%- endif %}
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ var Ls = {
|
|||||||
"e2": "leser inn konfigurasjonsfiler på nytt$N(kontoer, volumer, volumbrytere)$Nog kartlegger alle e2ds-volumer$N$Nmerk: endringer i globale parametere$Nkrever en full restart for å ta gjenge",
|
"e2": "leser inn konfigurasjonsfiler på nytt$N(kontoer, volumer, volumbrytere)$Nog kartlegger alle e2ds-volumer$N$Nmerk: endringer i globale parametere$Nkrever en full restart for å ta gjenge",
|
||||||
"f1": "du kan betrakte:",
|
"f1": "du kan betrakte:",
|
||||||
"g1": "du kan laste opp til:",
|
"g1": "du kan laste opp til:",
|
||||||
"cc1": "klient-konfigurasjon",
|
"cc1": "brytere og sånt:",
|
||||||
"h1": "skru av k304",
|
"h1": "skru av k304",
|
||||||
"i1": "skru på k304",
|
"i1": "skru på k304",
|
||||||
"j1": "k304 bryter tilkoplingen for hver HTTP 304. Dette hjelper mot visse mellomtjenere som kan sette seg fast / plutselig slutter å laste sider, men det reduserer også ytelsen betydelig",
|
"j1": "k304 bryter tilkoplingen for hver HTTP 304. Dette hjelper mot visse mellomtjenere som kan sette seg fast / plutselig slutter å laste sider, men det reduserer også ytelsen betydelig",
|
||||||
@@ -17,25 +17,87 @@ var Ls = {
|
|||||||
"l1": "logg inn:",
|
"l1": "logg inn:",
|
||||||
"m1": "velkommen tilbake,",
|
"m1": "velkommen tilbake,",
|
||||||
"n1": "404: filen finnes ikke ┐( ´ -`)┌",
|
"n1": "404: filen finnes ikke ┐( ´ -`)┌",
|
||||||
"o1": 'eller kanskje du ikke har tilgang? prøv å logge inn eller <a href="' + SR + '/?h">gå hjem</a>',
|
"o1": 'eller kanskje du ikke har tilgang? prøv et passord eller <a href="' + SR + '/?h">gå hjem</a>',
|
||||||
"p1": "403: tilgang nektet ~┻━┻",
|
"p1": "403: tilgang nektet ~┻━┻",
|
||||||
"q1": 'du må logge inn eller <a href="' + SR + '/?h">gå hjem</a>',
|
"q1": 'prøv et passord eller <a href="' + SR + '/?h">gå hjem</a>',
|
||||||
"r1": "gå hjem",
|
"r1": "gå hjem",
|
||||||
".s1": "kartlegg",
|
".s1": "kartlegg",
|
||||||
"t1": "handling",
|
"t1": "handling",
|
||||||
"u2": "tid siden noen sist skrev til serveren$N( opplastning / navneendring / ... )$N$N17d = 17 dager$N1h23 = 1 time 23 minutter$N4m56 = 4 minuter 56 sekunder",
|
"u2": "tid siden noen sist skrev til serveren$N( opplastning / navneendring / ... )$N$N17d = 17 dager$N1h23 = 1 time 23 minutter$N4m56 = 4 minuter 56 sekunder",
|
||||||
"v1": "koble til",
|
"v1": "koble til",
|
||||||
"v2": "bruk denne serveren som en lokal harddisk$N$NADVARSEL: kommer til å vise passordet ditt!",
|
"v2": "bruk denne serveren som en lokal harddisk",
|
||||||
"w1": "bytt til https",
|
"w1": "bytt til https",
|
||||||
|
"x1": "bytt passord",
|
||||||
|
"y1": "dine delinger",
|
||||||
|
"z1": "lås opp område:",
|
||||||
|
"ta1": "du må skrive et nytt passord først",
|
||||||
|
"ta2": "gjenta for å bekrefte nytt passord:",
|
||||||
|
"ta3": "fant en skrivefeil; vennligst prøv igjen",
|
||||||
|
"aa1": "innkommende:",
|
||||||
|
"ab1": "skru av no304",
|
||||||
|
"ac1": "skru på no304",
|
||||||
|
"ad1": "no304 stopper all bruk av cache. Hvis ikke k304 var nok, prøv denne. Vil mangedoble dataforbruk!",
|
||||||
|
"ae1": "utgående:",
|
||||||
|
"af1": "vis nylig opplastede filer",
|
||||||
},
|
},
|
||||||
"eng": {
|
"eng": {
|
||||||
"d2": "shows the state of all active threads",
|
"d2": "shows the state of all active threads",
|
||||||
"e2": "reload config files (accounts/volumes/volflags),$Nand rescan all e2ds volumes$N$Nnote: any changes to global settings$Nrequire a full restart to take effect",
|
"e2": "reload config files (accounts/volumes/volflags),$Nand rescan all e2ds volumes$N$Nnote: any changes to global settings$Nrequire a full restart to take effect",
|
||||||
"u2": "time since the last server write$N( upload / rename / ... )$N$N17d = 17 days$N1h23 = 1 hour 23 minutes$N4m56 = 4 minutes 56 seconds",
|
"u2": "time since the last server write$N( upload / rename / ... )$N$N17d = 17 days$N1h23 = 1 hour 23 minutes$N4m56 = 4 minutes 56 seconds",
|
||||||
"v2": "use this server as a local HDD$N$NWARNING: this will show your password!",
|
"v2": "use this server as a local HDD",
|
||||||
|
"ta1": "fill in your new password first",
|
||||||
|
"ta2": "repeat to confirm new password:",
|
||||||
|
"ta3": "found a typo; please try again",
|
||||||
|
},
|
||||||
|
|
||||||
|
"chi": {
|
||||||
|
"a1": "更新",
|
||||||
|
"b1": "你好 <small>(你尚未登录)</small>",
|
||||||
|
"c1": "登出",
|
||||||
|
"d1": "状态",
|
||||||
|
"d2": "显示所有活动线程的状态",
|
||||||
|
"e1": "重新加载配置",
|
||||||
|
"e2": "重新加载配置文件(账户/卷/卷标),$N并重新扫描所有 e2ds 卷$N$N注意:任何全局设置的更改$N都需要完全重启才能生效",
|
||||||
|
"f1": "你可以查看:",
|
||||||
|
"g1": "你可以上传到:",
|
||||||
|
"cc1": "开关等",
|
||||||
|
"h1": "关闭 k304",
|
||||||
|
"i1": "开启 k304",
|
||||||
|
"j1": "k304 会在每个 HTTP 304 时断开连接。这有助于避免某些代理服务器卡住或突然停止加载页面,但也会显著降低性能。",
|
||||||
|
"k1": "重置设置",
|
||||||
|
"l1": "登录:",
|
||||||
|
"m1": "欢迎回来,",
|
||||||
|
"n1": "404: 文件不存在 ┐( ´ -`)┌",
|
||||||
|
"o1": '或者你可能没有权限?尝试输入密码或 <a href="' + SR + '/?h">回家</a>',
|
||||||
|
"p1": "403: 访问被拒绝 ~┻━┻",
|
||||||
|
"q1": '尝试输入密码或 <a href="' + SR + '/?h">回家</a>',
|
||||||
|
"r1": "回家",
|
||||||
|
".s1": "映射",
|
||||||
|
"t1": "操作",
|
||||||
|
"u2": "自上次服务器写入的时间$N( 上传 / 重命名 / ... )$N$N17d = 17 天$N1h23 = 1 小时 23 分钟$N4m56 = 4 分钟 56 秒",
|
||||||
|
"v1": "连接",
|
||||||
|
"v2": "将此服务器用作本地硬盘",
|
||||||
|
"w1": "切换到 https",
|
||||||
|
"x1": "更改密码",
|
||||||
|
"y1": "你的分享",
|
||||||
|
"z1": "解锁区域",
|
||||||
|
"ta1": "请先输入新密码",
|
||||||
|
"ta2": "重复以确认新密码:",
|
||||||
|
"ta3": "发现拼写错误;请重试",
|
||||||
|
"aa1": "正在接收的文件:", //m
|
||||||
|
"ab1": "关闭 k304",
|
||||||
|
"ac1": "开启 k304",
|
||||||
|
"ad1": "启用 no304 将禁用所有缓存;如果 k304 不够,可以尝试此选项。这将消耗大量的网络流量!", //m
|
||||||
|
"ae1": "正在下载:", //m
|
||||||
|
"af1": "显示最近上传的文件", //m
|
||||||
}
|
}
|
||||||
},
|
};
|
||||||
d = Ls[sread("cpp_lang", ["eng", "nor"]) || lang] || Ls.eng || Ls.nor;
|
|
||||||
|
if (window.langmod)
|
||||||
|
langmod();
|
||||||
|
|
||||||
|
var d = Ls[sread("cpp_lang", Object.keys(Ls)) || lang] ||
|
||||||
|
Ls.eng || Ls.nor || Ls.chi;
|
||||||
|
|
||||||
for (var k in (d || {})) {
|
for (var k in (d || {})) {
|
||||||
var f = k.slice(-1),
|
var f = k.slice(-1),
|
||||||
@@ -66,3 +128,44 @@ if (!ebi('c') && o.offsetTop + o.offsetHeight < window.innerHeight)
|
|||||||
o = ebi('u');
|
o = ebi('u');
|
||||||
if (o && /[0-9]+$/.exec(o.innerHTML))
|
if (o && /[0-9]+$/.exec(o.innerHTML))
|
||||||
o.innerHTML = shumantime(o.innerHTML);
|
o.innerHTML = shumantime(o.innerHTML);
|
||||||
|
|
||||||
|
ebi('uhash').value = '' + location.hash;
|
||||||
|
|
||||||
|
(function() {
|
||||||
|
if (!ebi('x'))
|
||||||
|
return;
|
||||||
|
|
||||||
|
var pwi = ebi('lp');
|
||||||
|
|
||||||
|
function redo(msg) {
|
||||||
|
modal.alert(msg, function() {
|
||||||
|
pwi.value = '';
|
||||||
|
pwi.focus();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
function mok(v) {
|
||||||
|
if (v !== pwi.value)
|
||||||
|
return redo(d.ta3);
|
||||||
|
|
||||||
|
pwi.setAttribute('name', 'pw');
|
||||||
|
ebi('la').value = 'chpw';
|
||||||
|
ebi('lf').submit();
|
||||||
|
}
|
||||||
|
function stars() {
|
||||||
|
var m = ebi('modali');
|
||||||
|
function enstars(n) {
|
||||||
|
setTimeout(function() { m.value = ''; }, n);
|
||||||
|
}
|
||||||
|
m.setAttribute('type', 'password');
|
||||||
|
enstars(17);
|
||||||
|
enstars(32);
|
||||||
|
enstars(69);
|
||||||
|
}
|
||||||
|
ebi('x').onclick = function (e) {
|
||||||
|
ev(e);
|
||||||
|
if (!pwi.value)
|
||||||
|
return redo(d.ta1);
|
||||||
|
|
||||||
|
modal.prompt(d.ta2, "y", mok, null, stars);
|
||||||
|
};
|
||||||
|
})();
|
||||||
|
|||||||
@@ -6,7 +6,7 @@
|
|||||||
<title>{{ s_doctitle }}</title>
|
<title>{{ s_doctitle }}</title>
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||||
<meta name="theme-color" content="#333">
|
<meta name="theme-color" content="#{{ tcolor }}">
|
||||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/splash.css?_={{ ts }}">
|
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/splash.css?_={{ ts }}">
|
||||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||||
<style>ul{padding-left:1.3em}li{margin:.4em 0}</style>
|
<style>ul{padding-left:1.3em}li{margin:.4em 0}</style>
|
||||||
@@ -53,10 +53,9 @@
|
|||||||
{% if s %}
|
{% if s %}
|
||||||
<li>running <code>rclone mount</code> on LAN (or just dont have valid certificates)? add <code>--no-check-certificate</code></li>
|
<li>running <code>rclone mount</code> on LAN (or just dont have valid certificates)? add <code>--no-check-certificate</code></li>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
<li>running <code>rclone mount</code> as root? add <code>--allow-other</code></li>
|
|
||||||
<li>old version of rclone? replace all <code>=</code> with <code> </code> (space)</li>
|
<li>old version of rclone? replace all <code>=</code> with <code> </code> (space)</li>
|
||||||
</ul>
|
</ul>
|
||||||
|
|
||||||
<p>if you want to use the native WebDAV client in windows instead (slow and buggy), first run <a href="{{ r }}/.cpr/a/webdav-cfg.bat">webdav-cfg.bat</a> to remove the 47 MiB filesize limit (also fixes latency and password login), then connect:</p>
|
<p>if you want to use the native WebDAV client in windows instead (slow and buggy), first run <a href="{{ r }}/.cpr/a/webdav-cfg.bat">webdav-cfg.bat</a> to remove the 47 MiB filesize limit (also fixes latency and password login), then connect:</p>
|
||||||
<pre>
|
<pre>
|
||||||
net use <b>w:</b> http{{ s }}://{{ ep }}/{{ rvp }}{% if accs %} k /user:<b>{{ pw }}</b>{% endif %}
|
net use <b>w:</b> http{{ s }}://{{ ep }}/{{ rvp }}{% if accs %} k /user:<b>{{ pw }}</b>{% endif %}
|
||||||
@@ -64,16 +63,7 @@
|
|||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="os lin">
|
<div class="os lin">
|
||||||
<pre>
|
<p>rclone (v1.63 or later) is recommended:</p>
|
||||||
yum install davfs2
|
|
||||||
{% if accs %}printf '%s\n' <b>{{ pw }}</b> k | {% endif %}mount -t davfs -ouid=1000 http{{ s }}://{{ ep }}/{{ rvp }} <b>mp</b>
|
|
||||||
</pre>
|
|
||||||
<p>make it automount on boot:</p>
|
|
||||||
<pre>
|
|
||||||
printf '%s\n' "http{{ s }}://{{ ep }}/{{ rvp }} <b>{{ pw }}</b> k" >> /etc/davfs2/secrets
|
|
||||||
printf '%s\n' "http{{ s }}://{{ ep }}/{{ rvp }} <b>mp</b> davfs rw,user,uid=1000,noauto 0 0" >> /etc/fstab
|
|
||||||
</pre>
|
|
||||||
<p>or you can use rclone instead, which is much slower but doesn't require root (plus it keeps lastmodified on upload):</p>
|
|
||||||
<pre>
|
<pre>
|
||||||
rclone config create {{ aname }}-dav webdav url=http{{ s }}://{{ rip }}{{ hport }} vendor=owncloud pacer_min_sleep=0.01ms{% if accs %} user=k pass=<b>{{ pw }}</b>{% endif %}
|
rclone config create {{ aname }}-dav webdav url=http{{ s }}://{{ rip }}{{ hport }} vendor=owncloud pacer_min_sleep=0.01ms{% if accs %} user=k pass=<b>{{ pw }}</b>{% endif %}
|
||||||
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-dav:{{ rvp }} <b>mp</b>
|
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-dav:{{ rvp }} <b>mp</b>
|
||||||
@@ -85,6 +75,16 @@
|
|||||||
<li>running <code>rclone mount</code> as root? add <code>--allow-other</code></li>
|
<li>running <code>rclone mount</code> as root? add <code>--allow-other</code></li>
|
||||||
<li>old version of rclone? replace all <code>=</code> with <code> </code> (space)</li>
|
<li>old version of rclone? replace all <code>=</code> with <code> </code> (space)</li>
|
||||||
</ul>
|
</ul>
|
||||||
|
<p>alternatively use davfs2 (requires root, is slower, forgets lastmodified-timestamp on upload):</p>
|
||||||
|
<pre>
|
||||||
|
yum install davfs2
|
||||||
|
{% if accs %}printf '%s\n' <b>{{ pw }}</b> k | {% endif %}mount -t davfs -ouid=1000 http{{ s }}://{{ ep }}/{{ rvp }} <b>mp</b>
|
||||||
|
</pre>
|
||||||
|
<p>make davfs2 automount on boot:</p>
|
||||||
|
<pre>
|
||||||
|
printf '%s\n' "http{{ s }}://{{ ep }}/{{ rvp }} <b>{{ pw }}</b> k" >> /etc/davfs2/secrets
|
||||||
|
printf '%s\n' "http{{ s }}://{{ ep }}/{{ rvp }} <b>mp</b> davfs rw,user,uid=1000,noauto 0 0" >> /etc/fstab
|
||||||
|
</pre>
|
||||||
<p>or the emergency alternative (gnome/gui-only):</p>
|
<p>or the emergency alternative (gnome/gui-only):</p>
|
||||||
<!-- gnome-bug: ignores vp -->
|
<!-- gnome-bug: ignores vp -->
|
||||||
<pre>
|
<pre>
|
||||||
@@ -104,7 +104,7 @@
|
|||||||
<pre>
|
<pre>
|
||||||
http{{ s }}://k:<b>{{ pw }}</b>@{{ ep }}/{{ rvp }}
|
http{{ s }}://k:<b>{{ pw }}</b>@{{ ep }}/{{ rvp }}
|
||||||
</pre>
|
</pre>
|
||||||
|
|
||||||
{% if s %}
|
{% if s %}
|
||||||
<p><em>replace <code>https</code> with <code>http</code> if it doesn't work</em></p>
|
<p><em>replace <code>https</code> with <code>http</code> if it doesn't work</em></p>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
@@ -136,7 +136,6 @@
|
|||||||
{% if args.ftps %}
|
{% if args.ftps %}
|
||||||
<li>running on LAN (or just dont have valid certificates)? add <code>no_check_certificate=true</code> to the config command</li>
|
<li>running on LAN (or just dont have valid certificates)? add <code>no_check_certificate=true</code> to the config command</li>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
<li>running <code>rclone mount</code> as root? add <code>--allow-other</code></li>
|
|
||||||
<li>old version of rclone? replace all <code>=</code> with <code> </code> (space)</li>
|
<li>old version of rclone? replace all <code>=</code> with <code> </code> (space)</li>
|
||||||
</ul>
|
</ul>
|
||||||
<p>if you want to use the native FTP client in windows instead (please dont), press <code>win+R</code> and run this command:</p>
|
<p>if you want to use the native FTP client in windows instead (please dont), press <code>win+R</code> and run this command:</p>
|
||||||
@@ -191,6 +190,7 @@
|
|||||||
<h1>partyfuse</h1>
|
<h1>partyfuse</h1>
|
||||||
<p>
|
<p>
|
||||||
<a href="{{ r }}/.cpr/a/partyfuse.py">partyfuse.py</a> -- fast, read-only,
|
<a href="{{ r }}/.cpr/a/partyfuse.py">partyfuse.py</a> -- fast, read-only,
|
||||||
|
needs <a href="{{ r }}/.cpr/deps/fuse.py">fuse.py</a> in the same folder,
|
||||||
<span class="os win">needs <a href="https://winfsp.dev/rel/">winfsp</a></span>
|
<span class="os win">needs <a href="https://winfsp.dev/rel/">winfsp</a></span>
|
||||||
<span class="os lin">doesn't need root</span>
|
<span class="os lin">doesn't need root</span>
|
||||||
</p>
|
</p>
|
||||||
@@ -207,7 +207,6 @@
|
|||||||
|
|
||||||
{% if args.smb %}
|
{% if args.smb %}
|
||||||
<h1>SMB / CIFS</h1>
|
<h1>SMB / CIFS</h1>
|
||||||
<em><a href="https://github.com/SecureAuthCorp/impacket/issues/1433">bug:</a> max ~300 files in each folder</em>
|
|
||||||
|
|
||||||
<div class="os win">
|
<div class="os win">
|
||||||
<pre>
|
<pre>
|
||||||
@@ -234,7 +233,7 @@
|
|||||||
<a href="#" id="repl">π</a>
|
<a href="#" id="repl">π</a>
|
||||||
<script>
|
<script>
|
||||||
|
|
||||||
var SR = {{ r|tojson }},
|
var SR="{{ r }}",
|
||||||
lang="{{ lang }}",
|
lang="{{ lang }}",
|
||||||
dfavico="{{ favico }}";
|
dfavico="{{ favico }}";
|
||||||
|
|
||||||
@@ -244,6 +243,9 @@ document.documentElement.className = (STG && STG.cpp_thm) || "{{ args.theme }}";
|
|||||||
</script>
|
</script>
|
||||||
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
||||||
<script src="{{ r }}/.cpr/svcs.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/svcs.js?_={{ ts }}"></script>
|
||||||
|
{%- if js %}
|
||||||
|
<script src="{{ js }}_={{ ts }}"></script>
|
||||||
|
{%- endif %}
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
|
||||||
|
|||||||
@@ -69,6 +69,18 @@ html {
|
|||||||
top: 2em;
|
top: 2em;
|
||||||
bottom: unset;
|
bottom: unset;
|
||||||
}
|
}
|
||||||
|
#toastt {
|
||||||
|
position: absolute;
|
||||||
|
height: 1px;
|
||||||
|
top: 1px;
|
||||||
|
right: 1px;
|
||||||
|
left: 1px;
|
||||||
|
animation: toastt var(--tmtime) 0.07s steps(var(--tmstep)) forwards;
|
||||||
|
transform-origin: right;
|
||||||
|
}
|
||||||
|
@keyframes toastt {
|
||||||
|
to {transform: scaleX(0)}
|
||||||
|
}
|
||||||
#toast a {
|
#toast a {
|
||||||
color: inherit;
|
color: inherit;
|
||||||
text-shadow: inherit;
|
text-shadow: inherit;
|
||||||
@@ -130,6 +142,9 @@ html {
|
|||||||
#toast.inf #toastc {
|
#toast.inf #toastc {
|
||||||
background: #0be;
|
background: #0be;
|
||||||
}
|
}
|
||||||
|
#toast.inf #toastt {
|
||||||
|
background: #8ef;
|
||||||
|
}
|
||||||
#toast.ok {
|
#toast.ok {
|
||||||
background: #380;
|
background: #380;
|
||||||
border-color: #8e4;
|
border-color: #8e4;
|
||||||
@@ -137,6 +152,9 @@ html {
|
|||||||
#toast.ok #toastc {
|
#toast.ok #toastc {
|
||||||
background: #8e4;
|
background: #8e4;
|
||||||
}
|
}
|
||||||
|
#toast.ok #toastt {
|
||||||
|
background: #cf9;
|
||||||
|
}
|
||||||
#toast.warn {
|
#toast.warn {
|
||||||
background: #960;
|
background: #960;
|
||||||
border-color: #fc0;
|
border-color: #fc0;
|
||||||
@@ -144,6 +162,9 @@ html {
|
|||||||
#toast.warn #toastc {
|
#toast.warn #toastc {
|
||||||
background: #fc0;
|
background: #fc0;
|
||||||
}
|
}
|
||||||
|
#toast.warn #toastt {
|
||||||
|
background: #fe9;
|
||||||
|
}
|
||||||
#toast.err {
|
#toast.err {
|
||||||
background: #900;
|
background: #900;
|
||||||
border-color: #d06;
|
border-color: #d06;
|
||||||
@@ -151,6 +172,9 @@ html {
|
|||||||
#toast.err #toastc {
|
#toast.err #toastc {
|
||||||
background: #d06;
|
background: #d06;
|
||||||
}
|
}
|
||||||
|
#toast.err #toastt {
|
||||||
|
background: #f9c;
|
||||||
|
}
|
||||||
#toast code {
|
#toast code {
|
||||||
padding: 0 .2em;
|
padding: 0 .2em;
|
||||||
background: rgba(0,0,0,0.2);
|
background: rgba(0,0,0,0.2);
|
||||||
@@ -184,6 +208,7 @@ html {
|
|||||||
padding: 1.5em 2em;
|
padding: 1.5em 2em;
|
||||||
border-width: .5em 0;
|
border-width: .5em 0;
|
||||||
}
|
}
|
||||||
|
.logue code,
|
||||||
#modalc code,
|
#modalc code,
|
||||||
#tt code {
|
#tt code {
|
||||||
color: #eee;
|
color: #eee;
|
||||||
@@ -264,7 +289,11 @@ html.y #tth {
|
|||||||
box-shadow: 0 .3em 3em rgba(0,0,0,0.5);
|
box-shadow: 0 .3em 3em rgba(0,0,0,0.5);
|
||||||
max-width: 50em;
|
max-width: 50em;
|
||||||
max-height: 30em;
|
max-height: 30em;
|
||||||
overflow: auto;
|
overflow-x: auto;
|
||||||
|
overflow-y: scroll;
|
||||||
|
}
|
||||||
|
#modalc.yk {
|
||||||
|
overflow-y: auto;
|
||||||
}
|
}
|
||||||
#modalc td {
|
#modalc td {
|
||||||
text-align: unset;
|
text-align: unset;
|
||||||
@@ -288,6 +317,14 @@ html.y #tth {
|
|||||||
#modalc a {
|
#modalc a {
|
||||||
color: #07b;
|
color: #07b;
|
||||||
}
|
}
|
||||||
|
#modalc .b64 {
|
||||||
|
display: block;
|
||||||
|
margin: .1em auto;
|
||||||
|
width: 60%;
|
||||||
|
height: 60%;
|
||||||
|
background: #999;
|
||||||
|
background: rgba(128,128,128,0.2);
|
||||||
|
}
|
||||||
#modalb {
|
#modalb {
|
||||||
position: sticky;
|
position: sticky;
|
||||||
text-align: right;
|
text-align: right;
|
||||||
@@ -380,6 +417,7 @@ html.y textarea:focus {
|
|||||||
}
|
}
|
||||||
.mdo pre,
|
.mdo pre,
|
||||||
.mdo code,
|
.mdo code,
|
||||||
|
.mdo code[class*="language-"],
|
||||||
.mdo tt {
|
.mdo tt {
|
||||||
font-family: 'scp', monospace, monospace;
|
font-family: 'scp', monospace, monospace;
|
||||||
font-family: var(--font-mono), 'scp', monospace, monospace;
|
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||||
|
|||||||
@@ -17,10 +17,14 @@ function goto_up2k() {
|
|||||||
var up2k = null,
|
var up2k = null,
|
||||||
up2k_hooks = [],
|
up2k_hooks = [],
|
||||||
hws = [],
|
hws = [],
|
||||||
sha_js = window.WebAssembly ? 'hw' : 'ac', // ff53,c57,sa11
|
hws_ok = 0,
|
||||||
|
hws_ng = false,
|
||||||
|
sha_js = WebAssembly ? 'hw' : 'ac', // ff53,c57,sa11
|
||||||
m = 'will use ' + sha_js + ' instead of native sha512 due to';
|
m = 'will use ' + sha_js + ' instead of native sha512 due to';
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
if (sread('nosubtle') || window.nosubtle)
|
||||||
|
throw 'chickenbit';
|
||||||
var cf = crypto.subtle || crypto.webkitSubtle;
|
var cf = crypto.subtle || crypto.webkitSubtle;
|
||||||
cf.digest('SHA-512', new Uint8Array(1)).then(
|
cf.digest('SHA-512', new Uint8Array(1)).then(
|
||||||
function (x) { console.log('sha-ok'); up2k = up2k_init(cf); },
|
function (x) { console.log('sha-ok'); up2k = up2k_init(cf); },
|
||||||
@@ -152,12 +156,13 @@ function U2pvis(act, btns, uc, st) {
|
|||||||
r.mod0 = null;
|
r.mod0 = null;
|
||||||
|
|
||||||
var markup = {
|
var markup = {
|
||||||
'404': '<span class="err">404</span>',
|
'404': '<span class="err">' + L.utl_404 + '</span>',
|
||||||
'ERROR': '<span class="err">ERROR</span>',
|
'ERROR': '<span class="err">' + L.utl_err + '</span>',
|
||||||
'OS-error': '<span class="err">OS-error</span>',
|
'OS-error': '<span class="err">' + L.utl_oserr + '</span>',
|
||||||
'found': '<span class="inf">found</span>',
|
'found': '<span class="inf">' + L.utl_found + '</span>',
|
||||||
'YOLO': '<span class="inf">YOLO</span>',
|
'defer': '<span class="inf">' + L.utl_defer + '</span>',
|
||||||
'done': '<span class="ok">done</span>',
|
'YOLO': '<span class="inf">' + L.utl_yolo + '</span>',
|
||||||
|
'done': '<span class="ok">' + L.utl_done + '</span>',
|
||||||
};
|
};
|
||||||
|
|
||||||
r.addfile = function (entry, sz, draw) {
|
r.addfile = function (entry, sz, draw) {
|
||||||
@@ -241,7 +246,7 @@ function U2pvis(act, btns, uc, st) {
|
|||||||
p = bd * 100.0 / sz,
|
p = bd * 100.0 / sz,
|
||||||
nb = bd - bd0,
|
nb = bd - bd0,
|
||||||
spd = nb / (td / 1000),
|
spd = nb / (td / 1000),
|
||||||
eta = (sz - bd) / spd;
|
eta = spd ? (sz - bd) / spd : 3599;
|
||||||
|
|
||||||
return [p, s2ms(eta), spd / (1024 * 1024)];
|
return [p, s2ms(eta), spd / (1024 * 1024)];
|
||||||
};
|
};
|
||||||
@@ -445,9 +450,7 @@ function U2pvis(act, btns, uc, st) {
|
|||||||
return;
|
return;
|
||||||
|
|
||||||
r.npotato = 0;
|
r.npotato = 0;
|
||||||
var html = [
|
var html = [L.u_pott.format(r.ctr.ok, r.ctr.ng, r.ctr.bz, r.ctr.q)];
|
||||||
"<p>files: <b>{0}</b> finished, <b>{1}</b> failed, <b>{2}</b> busy, <b>{3}</b> queued</p>".format(
|
|
||||||
r.ctr.ok, r.ctr.ng, r.ctr.bz, r.ctr.q)];
|
|
||||||
|
|
||||||
while (r.head < r.tab.length && has(["ok", "ng"], r.tab[r.head].in))
|
while (r.head < r.tab.length && has(["ok", "ng"], r.tab[r.head].in))
|
||||||
r.head++;
|
r.head++;
|
||||||
@@ -602,7 +605,7 @@ function U2pvis(act, btns, uc, st) {
|
|||||||
if (nf < 9000)
|
if (nf < 9000)
|
||||||
return go();
|
return go();
|
||||||
|
|
||||||
modal.confirm('about to show ' + nf + ' files\n\nthis may crash your browser, are you sure?', go, null);
|
modal.confirm(L.u_bigtab.format(nf), go, null);
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -658,7 +661,9 @@ function Donut(uc, st) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function pos() {
|
function pos() {
|
||||||
return uc.fsearch ? Math.max(st.bytes.hashed, st.bytes.finished) : st.bytes.finished;
|
return uc.fsearch ?
|
||||||
|
Math.max(st.bytes.hashed, st.bytes.finished) :
|
||||||
|
st.bytes.inflight + st.bytes.finished;
|
||||||
}
|
}
|
||||||
|
|
||||||
r.on = function (ya) {
|
r.on = function (ya) {
|
||||||
@@ -690,8 +695,9 @@ function Donut(uc, st) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (++r.tc >= 10) {
|
if (++r.tc >= 10) {
|
||||||
|
var s = r.eta === null ? 'paused' : r.eta > 60 ? shumantime(r.eta) : (r.eta + 's');
|
||||||
wintitle("{0}%, {1}, #{2}, ".format(
|
wintitle("{0}%, {1}, #{2}, ".format(
|
||||||
f2f(v * 100 / t, 1), shumantime(r.eta), st.files.length - st.nfile.upload), true);
|
f2f(v * 100 / t, 1), s, st.files.length - st.nfile.upload), true);
|
||||||
r.tc = 0;
|
r.tc = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -717,7 +723,7 @@ function Donut(uc, st) {
|
|||||||
sfx();
|
sfx();
|
||||||
|
|
||||||
// firefox may forget that filedrops are user-gestures so it can skip this:
|
// firefox may forget that filedrops are user-gestures so it can skip this:
|
||||||
if (uc.upnag && window.Notification && Notification.permission == 'granted')
|
if (uc.upnag && Notification && Notification.permission == 'granted')
|
||||||
new Notification(uc.nagtxt);
|
new Notification(uc.nagtxt);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -779,8 +785,8 @@ function up2k_init(subtle) {
|
|||||||
};
|
};
|
||||||
|
|
||||||
setTimeout(function () {
|
setTimeout(function () {
|
||||||
if (window.WebAssembly && !hws.length)
|
if (WebAssembly && !hws.length)
|
||||||
fetch(SR + '/.cpr/w.hash.js' + CB);
|
fetch(SR + '/.cpr/w.hash.js?_=' + TS);
|
||||||
}, 1000);
|
}, 1000);
|
||||||
|
|
||||||
function showmodal(msg) {
|
function showmodal(msg) {
|
||||||
@@ -852,7 +858,13 @@ function up2k_init(subtle) {
|
|||||||
|
|
||||||
setmsg(suggest_up2k, 'msg');
|
setmsg(suggest_up2k, 'msg');
|
||||||
|
|
||||||
|
var u2szs = u2sz.split(','),
|
||||||
|
u2sz_min = parseInt(u2szs[0]),
|
||||||
|
u2sz_tgt = parseInt(u2szs[1]),
|
||||||
|
u2sz_max = parseInt(u2szs[2]);
|
||||||
|
|
||||||
var parallel_uploads = ebi('nthread').value = icfg_get('nthread', u2j),
|
var parallel_uploads = ebi('nthread').value = icfg_get('nthread', u2j),
|
||||||
|
stitch_tgt = ebi('u2szg').value = icfg_get('u2sz', u2sz_tgt),
|
||||||
uc = {},
|
uc = {},
|
||||||
fdom_ctr = 0,
|
fdom_ctr = 0,
|
||||||
biggest_file = 0;
|
biggest_file = 0;
|
||||||
@@ -869,7 +881,7 @@ function up2k_init(subtle) {
|
|||||||
bcfg_bind(uc, 'turbo', 'u2turbo', turbolvl > 1, draw_turbo);
|
bcfg_bind(uc, 'turbo', 'u2turbo', turbolvl > 1, draw_turbo);
|
||||||
bcfg_bind(uc, 'datechk', 'u2tdate', turbolvl < 3, null);
|
bcfg_bind(uc, 'datechk', 'u2tdate', turbolvl < 3, null);
|
||||||
bcfg_bind(uc, 'az', 'u2sort', u2sort.indexOf('n') + 1, set_u2sort);
|
bcfg_bind(uc, 'az', 'u2sort', u2sort.indexOf('n') + 1, set_u2sort);
|
||||||
bcfg_bind(uc, 'hashw', 'hashw', !!window.WebAssembly && (!subtle || !CHROME || MOBILE || VCHROME >= 107), set_hashw);
|
bcfg_bind(uc, 'hashw', 'hashw', !!WebAssembly && (!subtle || !CHROME || MOBILE || VCHROME >= 107), set_hashw);
|
||||||
bcfg_bind(uc, 'upnag', 'upnag', false, set_upnag);
|
bcfg_bind(uc, 'upnag', 'upnag', false, set_upnag);
|
||||||
bcfg_bind(uc, 'upsfx', 'upsfx', false, set_upsfx);
|
bcfg_bind(uc, 'upsfx', 'upsfx', false, set_upsfx);
|
||||||
|
|
||||||
@@ -1034,7 +1046,7 @@ function up2k_init(subtle) {
|
|||||||
}
|
}
|
||||||
catch (ex) {
|
catch (ex) {
|
||||||
document.body.ondragenter = document.body.ondragleave = document.body.ondragover = null;
|
document.body.ondragenter = document.body.ondragleave = document.body.ondragover = null;
|
||||||
return modal.alert('your browser does not support drag-and-drop uploading');
|
return modal.alert(L.u_nodrop);
|
||||||
}
|
}
|
||||||
if (btn)
|
if (btn)
|
||||||
return;
|
return;
|
||||||
@@ -1101,7 +1113,7 @@ function up2k_init(subtle) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (!good_files.length && bad_files.length)
|
if (!good_files.length && bad_files.length)
|
||||||
return toast.err(30, "that's not a folder!\n\nyour browser is too old,\nplease try dragdrop instead");
|
return toast.err(30, L.u_notdir);
|
||||||
|
|
||||||
return read_dirs(null, [], [], good_files, nil_files, bad_files);
|
return read_dirs(null, [], [], good_files, nil_files, bad_files);
|
||||||
}
|
}
|
||||||
@@ -1119,7 +1131,7 @@ function up2k_init(subtle) {
|
|||||||
if (err)
|
if (err)
|
||||||
return modal.alert('sorry, ' + err);
|
return modal.alert('sorry, ' + err);
|
||||||
|
|
||||||
toast.inf(0, 'Scanning files...');
|
toast.inf(0, L.u_scan);
|
||||||
|
|
||||||
if ((dz == 'up_dz' && uc.fsearch) || (dz == 'srch_dz' && !uc.fsearch))
|
if ((dz == 'up_dz' && uc.fsearch) || (dz == 'srch_dz' && !uc.fsearch))
|
||||||
tgl_fsearch();
|
tgl_fsearch();
|
||||||
@@ -1207,7 +1219,7 @@ function up2k_init(subtle) {
|
|||||||
match = false;
|
match = false;
|
||||||
|
|
||||||
if (match) {
|
if (match) {
|
||||||
var msg = ['directory iterator got stuck on the following {0} items; good chance your browser is about to spinlock:<ul>'.format(missing.length)];
|
var msg = [L.u_dirstuck.format(missing.length) + '<ul>'];
|
||||||
for (var a = 0; a < Math.min(20, missing.length); a++)
|
for (var a = 0; a < Math.min(20, missing.length); a++)
|
||||||
msg.push('<li>' + esc(missing[a]) + '</li>');
|
msg.push('<li>' + esc(missing[a]) + '</li>');
|
||||||
|
|
||||||
@@ -1278,7 +1290,7 @@ function up2k_init(subtle) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function gotallfiles(good_files, nil_files, bad_files) {
|
function gotallfiles(good_files, nil_files, bad_files) {
|
||||||
if (toast.txt == 'Scanning files...')
|
if (toast.txt == L.u_scan)
|
||||||
toast.hide();
|
toast.hide();
|
||||||
|
|
||||||
if (uc.fsearch && !uc.turbo)
|
if (uc.fsearch && !uc.turbo)
|
||||||
@@ -1347,9 +1359,21 @@ function up2k_init(subtle) {
|
|||||||
var evpath = get_evpath(),
|
var evpath = get_evpath(),
|
||||||
draw_each = good_files.length < 50;
|
draw_each = good_files.length < 50;
|
||||||
|
|
||||||
if (window.WebAssembly && !hws.length) {
|
if (WebAssembly && !hws.length) {
|
||||||
for (var a = 0; a < Math.min(navigator.hardwareConcurrency || 4, 16); a++)
|
var nw = Math.min(navigator.hardwareConcurrency || 4, 16);
|
||||||
hws.push(new Worker(SR + '/.cpr/w.hash.js' + CB));
|
|
||||||
|
if (CHROME) {
|
||||||
|
// chrome-bug 383568268 // #124
|
||||||
|
nw = Math.max(1, (nw > 4 ? 4 : (nw - 1)));
|
||||||
|
nw = (subtle && !MOBILE && nw > 2) ? 2 : nw;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (var a = 0; a < nw; a++)
|
||||||
|
hws.push(new Worker(SR + '/.cpr/w.hash.js?_=' + TS));
|
||||||
|
|
||||||
|
if (!subtle)
|
||||||
|
for (var a = 0; a < hws.length; a++)
|
||||||
|
hws[a].postMessage('nosubtle');
|
||||||
|
|
||||||
console.log(hws.length + " hashers");
|
console.log(hws.length + " hashers");
|
||||||
}
|
}
|
||||||
@@ -1434,7 +1458,7 @@ function up2k_init(subtle) {
|
|||||||
if (!actx || actx.state != 'suspended' || toast.visible)
|
if (!actx || actx.state != 'suspended' || toast.visible)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
toast.warn(30, "<div onclick=\"start_actx();toast.inf(3,'thanks!')\">please click this text to<br />unlock full upload speed</div>");
|
toast.warn(30, "<div onclick=\"start_actx();toast.inf(3,'thanks!')\">" + L.u_actx + "</div>");
|
||||||
}, 500);
|
}, 500);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1476,7 +1500,7 @@ function up2k_init(subtle) {
|
|||||||
ev(e);
|
ev(e);
|
||||||
var txt = linklist();
|
var txt = linklist();
|
||||||
cliptxt(txt + '\n', function () {
|
cliptxt(txt + '\n', function () {
|
||||||
toast.inf(5, txt.split('\n').length + ' links copied to clipboard');
|
toast.inf(5, un_clip.format(txt.split('\n').length));
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -1541,8 +1565,10 @@ function up2k_init(subtle) {
|
|||||||
if (nhash) {
|
if (nhash) {
|
||||||
st.time.hashing += td;
|
st.time.hashing += td;
|
||||||
t.push(['u2etah', st.bytes.hashed, st.bytes.hashed, st.time.hashing]);
|
t.push(['u2etah', st.bytes.hashed, st.bytes.hashed, st.time.hashing]);
|
||||||
if (uc.fsearch)
|
if (uc.fsearch) {
|
||||||
|
st.time.busy += td;
|
||||||
t.push(['u2etat', st.bytes.hashed, st.bytes.hashed, st.time.hashing]);
|
t.push(['u2etat', st.bytes.hashed, st.bytes.hashed, st.time.hashing]);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var b_up = st.bytes.inflight + st.bytes.uploaded,
|
var b_up = st.bytes.inflight + st.bytes.uploaded,
|
||||||
@@ -1736,16 +1762,13 @@ function up2k_init(subtle) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var mou_ikkai = false;
|
if (st.bytes.inflight && (st.bytes.inflight < 0 || !st.busy.upload.length)) {
|
||||||
|
console.log('insane inflight ' + st.bytes.inflight);
|
||||||
if (st.busy.handshake.length &&
|
st.bytes.inflight = 0;
|
||||||
st.busy.handshake[0].t_busied < now - 30 * 1000
|
|
||||||
) {
|
|
||||||
console.log("retrying stuck handshake");
|
|
||||||
var t = st.busy.handshake.shift();
|
|
||||||
st.todo.handshake.unshift(t);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var mou_ikkai = false;
|
||||||
|
|
||||||
var nprev = -1;
|
var nprev = -1;
|
||||||
for (var a = 0; a < st.todo.upload.length; a++) {
|
for (var a = 0; a < st.todo.upload.length; a++) {
|
||||||
var nf = st.todo.upload[a].nfile;
|
var nf = st.todo.upload[a].nfile;
|
||||||
@@ -1864,10 +1887,12 @@ function up2k_init(subtle) {
|
|||||||
|
|
||||||
function chill(t) {
|
function chill(t) {
|
||||||
var now = Date.now();
|
var now = Date.now();
|
||||||
if ((t.coolmul || 0) < 2 || now - t.cooldown < t.coolmul * 700)
|
if ((t.coolmul || 0) < 5 || now - t.cooldown < t.coolmul * 700)
|
||||||
t.coolmul = Math.min((t.coolmul || 0.5) * 2, 32);
|
t.coolmul = Math.min((t.coolmul || 0.5) * 2, 32);
|
||||||
|
|
||||||
t.cooldown = Math.max(t.cooldown || 1, Date.now() + t.coolmul * 1000);
|
var cd = now + 1000 * (t.coolmul + Math.random() * 4 + 2);
|
||||||
|
t.cooldown = Math.floor(Math.max(cd, t.cooldown || 1));
|
||||||
|
return t;
|
||||||
}
|
}
|
||||||
|
|
||||||
/////
|
/////
|
||||||
@@ -1952,7 +1977,7 @@ function up2k_init(subtle) {
|
|||||||
pvis.setab(t.n, nchunks);
|
pvis.setab(t.n, nchunks);
|
||||||
pvis.move(t.n, 'bz');
|
pvis.move(t.n, 'bz');
|
||||||
|
|
||||||
if (hws.length && uc.hashw && (nchunks > 1 || document.visibilityState == 'hidden'))
|
if (hws.length && !hws_ng && uc.hashw && (nchunks > 1 || document.visibilityState == 'hidden'))
|
||||||
// resolving subtle.digest w/o worker takes 1sec on blur if the actx hack breaks
|
// resolving subtle.digest w/o worker takes 1sec on blur if the actx hack breaks
|
||||||
return wexec_hash(t, chunksize, nchunks);
|
return wexec_hash(t, chunksize, nchunks);
|
||||||
|
|
||||||
@@ -2061,16 +2086,27 @@ function up2k_init(subtle) {
|
|||||||
free = [],
|
free = [],
|
||||||
busy = {},
|
busy = {},
|
||||||
nbusy = 0,
|
nbusy = 0,
|
||||||
|
init = 0,
|
||||||
hashtab = {},
|
hashtab = {},
|
||||||
mem = (MOBILE ? 128 : 256) * 1024 * 1024;
|
mem = (MOBILE ? 128 : 256) * 1024 * 1024;
|
||||||
|
|
||||||
|
if (!hws_ok)
|
||||||
|
init = setTimeout(function() {
|
||||||
|
hws_ng = true;
|
||||||
|
toast.warn(30, 'webworkers failed to start\n\nwill be a bit slower due to\nhashing on main-thread');
|
||||||
|
apop(st.busy.hash, t);
|
||||||
|
st.todo.hash.unshift(t);
|
||||||
|
exec_hash();
|
||||||
|
}, 5000);
|
||||||
|
|
||||||
for (var a = 0; a < hws.length; a++) {
|
for (var a = 0; a < hws.length; a++) {
|
||||||
var w = hws[a];
|
var w = hws[a];
|
||||||
free.push(w);
|
|
||||||
w.onmessage = onmsg;
|
w.onmessage = onmsg;
|
||||||
|
if (init)
|
||||||
|
w.postMessage('ping');
|
||||||
|
if (mem > 0)
|
||||||
|
free.push(w);
|
||||||
mem -= chunksize;
|
mem -= chunksize;
|
||||||
if (mem <= 0)
|
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function go_next() {
|
function go_next() {
|
||||||
@@ -2100,6 +2136,12 @@ function up2k_init(subtle) {
|
|||||||
d = d.data;
|
d = d.data;
|
||||||
var k = d[0];
|
var k = d[0];
|
||||||
|
|
||||||
|
if (k == "pong")
|
||||||
|
if (++hws_ok == hws.length) {
|
||||||
|
clearTimeout(init);
|
||||||
|
go_next();
|
||||||
|
}
|
||||||
|
|
||||||
if (k == "panic")
|
if (k == "panic")
|
||||||
return vis_exh(d[1], 'up2k.js', '', '', d[1]);
|
return vis_exh(d[1], 'up2k.js', '', '', d[1]);
|
||||||
|
|
||||||
@@ -2162,7 +2204,8 @@ function up2k_init(subtle) {
|
|||||||
tasker();
|
tasker();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
go_next();
|
if (!init)
|
||||||
|
go_next();
|
||||||
}
|
}
|
||||||
|
|
||||||
/////
|
/////
|
||||||
@@ -2178,7 +2221,7 @@ function up2k_init(subtle) {
|
|||||||
st.busy.head.push(t);
|
st.busy.head.push(t);
|
||||||
|
|
||||||
var xhr = new XMLHttpRequest();
|
var xhr = new XMLHttpRequest();
|
||||||
xhr.onerror = function () {
|
xhr.onerror = xhr.ontimeout = function () {
|
||||||
console.log('head onerror, retrying', t.name, t);
|
console.log('head onerror, retrying', t.name, t);
|
||||||
if (!toast.visible)
|
if (!toast.visible)
|
||||||
toast.warn(9.98, L.u_enethd + "\n\nfile: " + t.name, t);
|
toast.warn(9.98, L.u_enethd + "\n\nfile: " + t.name, t);
|
||||||
@@ -2222,6 +2265,7 @@ function up2k_init(subtle) {
|
|||||||
try { orz(e); } catch (ex) { vis_exh(ex + '', 'up2k.js', '', '', ex); }
|
try { orz(e); } catch (ex) { vis_exh(ex + '', 'up2k.js', '', '', ex); }
|
||||||
};
|
};
|
||||||
|
|
||||||
|
xhr.timeout = 34000;
|
||||||
xhr.open('HEAD', t.purl + uricom_enc(t.name), true);
|
xhr.open('HEAD', t.purl + uricom_enc(t.name), true);
|
||||||
xhr.send();
|
xhr.send();
|
||||||
}
|
}
|
||||||
@@ -2246,8 +2290,11 @@ function up2k_init(subtle) {
|
|||||||
if (keepalive)
|
if (keepalive)
|
||||||
console.log("sending keepalive handshake", t.name, t);
|
console.log("sending keepalive handshake", t.name, t);
|
||||||
|
|
||||||
|
if (!t.srch && !t.t_handshake)
|
||||||
|
pvis.seth(t.n, 2, L.u_hs);
|
||||||
|
|
||||||
var xhr = new XMLHttpRequest();
|
var xhr = new XMLHttpRequest();
|
||||||
xhr.onerror = function () {
|
xhr.onerror = xhr.ontimeout = function () {
|
||||||
if (t.t_busied != me) // t.done ok
|
if (t.t_busied != me) // t.done ok
|
||||||
return console.log('zombie handshake onerror', t.name, t);
|
return console.log('zombie handshake onerror', t.name, t);
|
||||||
|
|
||||||
@@ -2256,8 +2303,7 @@ function up2k_init(subtle) {
|
|||||||
|
|
||||||
console.log('handshake onerror, retrying', t.name, t);
|
console.log('handshake onerror, retrying', t.name, t);
|
||||||
apop(st.busy.handshake, t);
|
apop(st.busy.handshake, t);
|
||||||
st.todo.handshake.unshift(t);
|
st.todo.handshake.unshift(chill(t));
|
||||||
t.cooldown = Date.now() + 5000 + Math.floor(Math.random() * 3000);
|
|
||||||
t.keepalive = keepalive;
|
t.keepalive = keepalive;
|
||||||
};
|
};
|
||||||
var orz = function (e) {
|
var orz = function (e) {
|
||||||
@@ -2270,9 +2316,9 @@ function up2k_init(subtle) {
|
|||||||
}
|
}
|
||||||
catch (ex) {
|
catch (ex) {
|
||||||
apop(st.busy.handshake, t);
|
apop(st.busy.handshake, t);
|
||||||
st.todo.handshake.unshift(t);
|
st.todo.handshake.unshift(chill(t));
|
||||||
t.cooldown = Date.now() + 5000 + Math.floor(Math.random() * 3000);
|
var txt = t.t_uploading ? L.u_ehsfin : t.srch ? L.u_ehssrch : L.u_ehsinit;
|
||||||
return toast.err(0, 'Handshake error; will retry...\n\n' + L.badreply + ':\n\n' + unpre(xhr.responseText));
|
return toast.err(0, txt + '\n\n' + L.badreply + ':\n\n' + unpre(xhr.responseText));
|
||||||
}
|
}
|
||||||
|
|
||||||
t.t_handshake = Date.now();
|
t.t_handshake = Date.now();
|
||||||
@@ -2371,14 +2417,45 @@ function up2k_init(subtle) {
|
|||||||
msg = 'done';
|
msg = 'done';
|
||||||
|
|
||||||
if (t.postlist.length) {
|
if (t.postlist.length) {
|
||||||
|
if (t.rechecks && QS('#opa_del.act'))
|
||||||
|
toast.inf(30, L.u_started, L.u_unpt);
|
||||||
|
|
||||||
var arr = st.todo.upload,
|
var arr = st.todo.upload,
|
||||||
sort = arr.length && arr[arr.length - 1].nfile > t.n;
|
sort = arr.length && arr[arr.length - 1].nfile > t.n;
|
||||||
|
|
||||||
for (var a = 0; a < t.postlist.length; a++)
|
if (!t.stitch_sz) {
|
||||||
|
// keep all connections busy
|
||||||
|
var bpc = (st.bytes.total - st.bytes.finished) / (parallel_uploads || 1),
|
||||||
|
ocs = 1024 * 1024,
|
||||||
|
stp = 1024 * 512,
|
||||||
|
ccs = ocs;
|
||||||
|
while (ccs < bpc) {
|
||||||
|
ocs = ccs;
|
||||||
|
ccs += stp; if (ccs < bpc) ocs = ccs;
|
||||||
|
ccs += stp; stp *= 2;
|
||||||
|
}
|
||||||
|
ocs = Math.floor(ocs / 1024 / 1024);
|
||||||
|
t.stitch_sz = Math.min(ocs, stitch_tgt);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (var a = 0; a < t.postlist.length; a++) {
|
||||||
|
var nparts = [], tbytes = 0, stitch = t.stitch_sz;
|
||||||
|
if (t.nojoin && t.nojoin - t.postlist.length < 6)
|
||||||
|
stitch = 1;
|
||||||
|
|
||||||
|
--a;
|
||||||
|
for (var b = 0; b < stitch; b++) {
|
||||||
|
nparts.push(t.postlist[++a]);
|
||||||
|
tbytes += chunksize;
|
||||||
|
if (tbytes + chunksize > stitch * 1024 * 1024 || t.postlist[a + 1] - t.postlist[a] !== 1)
|
||||||
|
break;
|
||||||
|
}
|
||||||
arr.push({
|
arr.push({
|
||||||
'nfile': t.n,
|
'nfile': t.n,
|
||||||
'npart': t.postlist[a]
|
'nparts': nparts
|
||||||
});
|
});
|
||||||
|
}
|
||||||
|
t.nojoin = 0;
|
||||||
|
|
||||||
msg = null;
|
msg = null;
|
||||||
done = false;
|
done = false;
|
||||||
@@ -2387,7 +2464,7 @@ function up2k_init(subtle) {
|
|||||||
arr.sort(function (a, b) {
|
arr.sort(function (a, b) {
|
||||||
return a.nfile < b.nfile ? -1 :
|
return a.nfile < b.nfile ? -1 :
|
||||||
/* */ a.nfile > b.nfile ? 1 :
|
/* */ a.nfile > b.nfile ? 1 :
|
||||||
a.npart < b.npart ? -1 : 1;
|
/* */ a.nparts[0] < b.nparts[0] ? -1 : 1;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -2421,8 +2498,10 @@ function up2k_init(subtle) {
|
|||||||
else {
|
else {
|
||||||
pvis.seth(t.n, 1, "ERROR");
|
pvis.seth(t.n, 1, "ERROR");
|
||||||
pvis.seth(t.n, 2, L.u_ehstmp, t);
|
pvis.seth(t.n, 2, L.u_ehstmp, t);
|
||||||
|
apop(st.busy.handshake, t);
|
||||||
|
|
||||||
var err = "",
|
var err = "",
|
||||||
|
cls = "ERROR",
|
||||||
rsp = unpre(xhr.responseText),
|
rsp = unpre(xhr.responseText),
|
||||||
ofs = rsp.lastIndexOf('\nURL: ');
|
ofs = rsp.lastIndexOf('\nURL: ');
|
||||||
|
|
||||||
@@ -2433,7 +2512,6 @@ function up2k_init(subtle) {
|
|||||||
var penalty = rsp.replace(/.*rate-limit /, "").split(' ')[0];
|
var penalty = rsp.replace(/.*rate-limit /, "").split(' ')[0];
|
||||||
console.log("rate-limit: " + penalty);
|
console.log("rate-limit: " + penalty);
|
||||||
t.cooldown = Date.now() + parseFloat(penalty) * 1000;
|
t.cooldown = Date.now() + parseFloat(penalty) * 1000;
|
||||||
apop(st.busy.handshake, t);
|
|
||||||
st.todo.handshake.unshift(t);
|
st.todo.handshake.unshift(t);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -2452,23 +2530,33 @@ function up2k_init(subtle) {
|
|||||||
if (!t.rechecks && (err_pend || err_srcb)) {
|
if (!t.rechecks && (err_pend || err_srcb)) {
|
||||||
t.rechecks = 0;
|
t.rechecks = 0;
|
||||||
t.want_recheck = true;
|
t.want_recheck = true;
|
||||||
|
if (st.busy.upload.length || st.busy.handshake.length || st.bytes.uploaded) {
|
||||||
|
err = L.u_dupdefer;
|
||||||
|
cls = 'defer';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (err_pend) {
|
||||||
|
err += ' <a href="#" onclick="toast.inf(60, L.ue_ab);" class="fsearch_explain">(' + L.u_expl + ')</a>';
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (rsp.indexOf('server HDD is full') + 1)
|
|
||||||
return toast.err(0, L.u_ehsdf + "\n\n" + rsp.replace(/.*; /, ''));
|
|
||||||
|
|
||||||
if (err != "") {
|
if (err != "") {
|
||||||
if (!t.t_uploading)
|
if (!t.t_uploading)
|
||||||
st.bytes.finished += t.size;
|
st.bytes.finished += t.size;
|
||||||
|
|
||||||
pvis.seth(t.n, 1, "ERROR");
|
pvis.seth(t.n, 1, cls);
|
||||||
pvis.seth(t.n, 2, err);
|
pvis.seth(t.n, 2, err);
|
||||||
pvis.move(t.n, 'ng');
|
pvis.move(t.n, 'ng');
|
||||||
|
|
||||||
apop(st.busy.handshake, t);
|
|
||||||
tasker();
|
tasker();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
st.todo.handshake.unshift(chill(t));
|
||||||
|
|
||||||
|
if (rsp.indexOf('server HDD is full') + 1)
|
||||||
|
return toast.err(0, L.u_ehsdf + "\n\n" + rsp.replace(/.*; /, ''));
|
||||||
|
|
||||||
err = t.t_uploading ? L.u_ehsfin : t.srch ? L.u_ehssrch : L.u_ehsinit;
|
err = t.t_uploading ? L.u_ehsfin : t.srch ? L.u_ehssrch : L.u_ehsinit;
|
||||||
xhrchk(xhr, err + "\n\nfile: " + t.name + "\n\nerror ", "404, target folder not found", "warn", t);
|
xhrchk(xhr, err + "\n\nfile: " + t.name + "\n\nerror ", "404, target folder not found", "warn", t);
|
||||||
}
|
}
|
||||||
@@ -2493,6 +2581,8 @@ function up2k_init(subtle) {
|
|||||||
|
|
||||||
xhr.open('POST', t.purl, true);
|
xhr.open('POST', t.purl, true);
|
||||||
xhr.responseType = 'text';
|
xhr.responseType = 'text';
|
||||||
|
xhr.timeout = 42000 + (t.srch || t.t_uploaded ? 0 :
|
||||||
|
(t.size / (1048 * 20))); // safededup 20M/s hdd
|
||||||
xhr.send(JSON.stringify(req));
|
xhr.send(JSON.stringify(req));
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -2534,8 +2624,10 @@ function up2k_init(subtle) {
|
|||||||
function exec_upload() {
|
function exec_upload() {
|
||||||
var upt = st.todo.upload.shift(),
|
var upt = st.todo.upload.shift(),
|
||||||
t = st.files[upt.nfile],
|
t = st.files[upt.nfile],
|
||||||
npart = upt.npart,
|
nparts = upt.nparts,
|
||||||
tries = 0;
|
pcar = nparts[0],
|
||||||
|
pcdr = nparts[nparts.length - 1],
|
||||||
|
maxsz = (u2sz_max > 1 ? u2sz_max : 2040) * 1024 * 1024;
|
||||||
|
|
||||||
if (t.done)
|
if (t.done)
|
||||||
return console.log('done; skip chunk', t.name, t);
|
return console.log('done; skip chunk', t.name, t);
|
||||||
@@ -2549,45 +2641,82 @@ function up2k_init(subtle) {
|
|||||||
pvis.seth(t.n, 1, "🚀 send");
|
pvis.seth(t.n, 1, "🚀 send");
|
||||||
|
|
||||||
var chunksize = get_chunksize(t.size),
|
var chunksize = get_chunksize(t.size),
|
||||||
car = npart * chunksize,
|
car = pcar * chunksize,
|
||||||
cdr = car + chunksize;
|
cdr = (pcdr + 1) * chunksize;
|
||||||
|
|
||||||
if (cdr >= t.size)
|
if (cdr >= t.size)
|
||||||
cdr = t.size;
|
cdr = t.size;
|
||||||
|
|
||||||
|
if (cdr - car <= maxsz)
|
||||||
|
return upload_sub(t, upt, pcar, pcdr, car, cdr, chunksize, car, []);
|
||||||
|
|
||||||
|
var car0 = car, subs = [];
|
||||||
|
while (car < cdr) {
|
||||||
|
subs.push([car, Math.min(cdr, car + maxsz)]);
|
||||||
|
car += maxsz;
|
||||||
|
}
|
||||||
|
upload_sub(t, upt, pcar, pcdr, 0, 0, chunksize, car0, subs);
|
||||||
|
}
|
||||||
|
|
||||||
|
function upload_sub(t, upt, pcar, pcdr, car, cdr, chunksize, car0, subs) {
|
||||||
|
var nparts = upt.nparts,
|
||||||
|
is_sub = subs.length;
|
||||||
|
|
||||||
|
if (is_sub) {
|
||||||
|
var x = subs.shift();
|
||||||
|
car = x[0];
|
||||||
|
cdr = x[1];
|
||||||
|
}
|
||||||
|
|
||||||
|
var snpart = is_sub ? ('' + pcar + '(' + (car-car0) +'+'+ (cdr-car)) :
|
||||||
|
pcar == pcdr ? pcar : ('' + pcar + '~' + pcdr);
|
||||||
|
|
||||||
var orz = function (xhr) {
|
var orz = function (xhr) {
|
||||||
st.bytes.inflight -= xhr.bsent;
|
st.bytes.inflight -= xhr.bsent;
|
||||||
var txt = unpre((xhr.response && xhr.response.err) || xhr.responseText);
|
var txt = unpre((xhr.response && xhr.response.err) || xhr.responseText);
|
||||||
if (txt.indexOf('upload blocked by x') + 1) {
|
if (txt.indexOf('upload blocked by x') + 1) {
|
||||||
apop(st.busy.upload, upt);
|
apop(st.busy.upload, upt);
|
||||||
apop(t.postlist, npart);
|
for (var a = pcar; a <= pcdr; a++)
|
||||||
|
apop(t.postlist, a);
|
||||||
pvis.seth(t.n, 1, "ERROR");
|
pvis.seth(t.n, 1, "ERROR");
|
||||||
pvis.seth(t.n, 2, txt.split(/\n/)[0]);
|
pvis.seth(t.n, 2, txt.split(/\n/)[0]);
|
||||||
pvis.move(t.n, 'ng');
|
pvis.move(t.n, 'ng');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (xhr.status == 200) {
|
if (xhr.status == 200) {
|
||||||
pvis.prog(t, npart, cdr - car);
|
car = car0;
|
||||||
|
if (subs.length)
|
||||||
|
return upload_sub(t, upt, pcar, pcdr, 0, 0, chunksize, car0, subs);
|
||||||
|
|
||||||
|
var bdone = cdr - car;
|
||||||
|
for (var a = pcar; a <= pcdr; a++) {
|
||||||
|
pvis.prog(t, a, Math.min(bdone, chunksize));
|
||||||
|
bdone -= chunksize;
|
||||||
|
}
|
||||||
st.bytes.finished += cdr - car;
|
st.bytes.finished += cdr - car;
|
||||||
st.bytes.uploaded += cdr - car;
|
st.bytes.uploaded += cdr - car;
|
||||||
t.bytes_uploaded += cdr - car;
|
t.bytes_uploaded += cdr - car;
|
||||||
|
t.cooldown = t.coolmul = 0;
|
||||||
st.etac.u++;
|
st.etac.u++;
|
||||||
st.etac.t++;
|
st.etac.t++;
|
||||||
}
|
}
|
||||||
else if (txt.indexOf('already got that') + 1 ||
|
else if (txt.indexOf('already got that') + 1 ||
|
||||||
txt.indexOf('already being written') + 1) {
|
txt.indexOf('already being written') + 1) {
|
||||||
console.log("ignoring dupe-segment error", t.name, t);
|
t.nojoin = t.nojoin || t.postlist.length;
|
||||||
|
console.log("ignoring dupe-segment with backoff", t.nojoin, t.name, t);
|
||||||
|
if (!toast.visible && st.todo.upload.length < 4)
|
||||||
|
toast.inf(10, L.u_cbusy);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
xhrchk(xhr, L.u_cuerr2.format(npart, Math.ceil(t.size / chunksize), t.name), "404, target folder not found (???)", "warn", t);
|
xhrchk(xhr, L.u_cuerr2.format(snpart, Math.ceil(t.size / chunksize), t.name), "404, target folder not found (???)", "warn", t);
|
||||||
|
|
||||||
chill(t);
|
chill(t);
|
||||||
}
|
}
|
||||||
orz2(xhr);
|
orz2(xhr);
|
||||||
}
|
}
|
||||||
var orz2 = function (xhr) {
|
var orz2 = function (xhr) {
|
||||||
apop(st.busy.upload, upt);
|
apop(st.busy.upload, upt);
|
||||||
apop(t.postlist, npart);
|
for (var a = pcar; a <= pcdr; a++)
|
||||||
|
apop(t.postlist, a);
|
||||||
if (!t.postlist.length) {
|
if (!t.postlist.length) {
|
||||||
t.t_uploaded = Date.now();
|
t.t_uploaded = Date.now();
|
||||||
pvis.seth(t.n, 1, 'verifying');
|
pvis.seth(t.n, 1, 'verifying');
|
||||||
@@ -2601,37 +2730,63 @@ function up2k_init(subtle) {
|
|||||||
btot = Math.floor(st.bytes.total / 1024 / 1024);
|
btot = Math.floor(st.bytes.total / 1024 / 1024);
|
||||||
|
|
||||||
xhr.upload.onprogress = function (xev) {
|
xhr.upload.onprogress = function (xev) {
|
||||||
var nb = xev.loaded;
|
var nb = xev.loaded,
|
||||||
st.bytes.inflight += nb - xhr.bsent;
|
db = nb - xhr.bsent;
|
||||||
|
|
||||||
|
if (!db)
|
||||||
|
return;
|
||||||
|
|
||||||
|
st.bytes.inflight += db;
|
||||||
xhr.bsent = nb;
|
xhr.bsent = nb;
|
||||||
pvis.prog(t, npart, nb);
|
xhr.timeout = 64000 + Date.now() - xhr.t0;
|
||||||
|
pvis.prog(t, pcar, nb);
|
||||||
};
|
};
|
||||||
xhr.onload = function (xev) {
|
xhr.onload = function (xev) {
|
||||||
try { orz(xhr); } catch (ex) { vis_exh(ex + '', 'up2k.js', '', '', ex); }
|
try { orz(xhr); } catch (ex) { vis_exh(ex + '', 'up2k.js', '', '', ex); }
|
||||||
};
|
};
|
||||||
xhr.onerror = function (xev) {
|
xhr.onerror = xhr.ontimeout = function (xev) {
|
||||||
if (crashed)
|
if (crashed)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
st.bytes.inflight -= (xhr.bsent || 0);
|
st.bytes.inflight -= (xhr.bsent || 0);
|
||||||
|
xhr.bsent = 0;
|
||||||
|
|
||||||
if (!toast.visible)
|
if (!toast.visible)
|
||||||
toast.warn(9.98, L.u_cuerr.format(npart, Math.ceil(t.size / chunksize), t.name), t);
|
toast.warn(9.98, L.u_cuerr.format(snpart, Math.ceil(t.size / chunksize), t.name), t);
|
||||||
|
|
||||||
console.log('chunkpit onerror,', ++tries, t.name, t);
|
t.nojoin = t.nojoin || t.postlist.length; // maybe rproxy postsize limit
|
||||||
|
console.log('chunkpit onerror,', t.name, t);
|
||||||
orz2(xhr);
|
orz2(xhr);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
var chashes = [],
|
||||||
|
ctxt = t.hash[pcar],
|
||||||
|
plen = Math.floor(192 / nparts.length);
|
||||||
|
|
||||||
|
plen = plen > 9 ? 9 : plen < 2 ? 2 : plen;
|
||||||
|
for (var a = pcar + 1; a <= pcdr; a++)
|
||||||
|
chashes.push(t.hash[a].slice(0, plen));
|
||||||
|
|
||||||
|
if (chashes.length)
|
||||||
|
ctxt += ',' + plen + ',' + chashes.join('');
|
||||||
|
|
||||||
xhr.open('POST', t.purl, true);
|
xhr.open('POST', t.purl, true);
|
||||||
xhr.setRequestHeader("X-Up2k-Hash", t.hash[npart]);
|
xhr.setRequestHeader("X-Up2k-Hash", ctxt);
|
||||||
xhr.setRequestHeader("X-Up2k-Wark", t.wark);
|
xhr.setRequestHeader("X-Up2k-Wark", t.wark);
|
||||||
|
if (is_sub)
|
||||||
|
xhr.setRequestHeader("X-Up2k-Subc", car - car0);
|
||||||
|
|
||||||
xhr.setRequestHeader("X-Up2k-Stat", "{0}/{1}/{2}/{3} {4}/{5} {6}".format(
|
xhr.setRequestHeader("X-Up2k-Stat", "{0}/{1}/{2}/{3} {4}/{5} {6}".format(
|
||||||
pvis.ctr.ok, pvis.ctr.ng, pvis.ctr.bz, pvis.ctr.q, btot, btot - bfin,
|
pvis.ctr.ok, pvis.ctr.ng, pvis.ctr.bz, pvis.ctr.q, btot, btot - bfin,
|
||||||
st.eta.t.split(' ').pop()));
|
st.eta.t.indexOf('/s, ')+1 ? st.eta.t.split(' ').pop() : 'x'));
|
||||||
|
|
||||||
xhr.setRequestHeader('Content-Type', 'application/octet-stream');
|
xhr.setRequestHeader('Content-Type', 'application/octet-stream');
|
||||||
if (xhr.overrideMimeType)
|
if (xhr.overrideMimeType)
|
||||||
xhr.overrideMimeType('Content-Type', 'application/octet-stream');
|
xhr.overrideMimeType('Content-Type', 'application/octet-stream');
|
||||||
|
|
||||||
xhr.bsent = 0;
|
xhr.bsent = 0;
|
||||||
|
xhr.t0 = Date.now();
|
||||||
|
xhr.timeout = 42000;
|
||||||
xhr.responseType = 'text';
|
xhr.responseType = 'text';
|
||||||
xhr.send(t.fobj.slice(car, cdr));
|
xhr.send(t.fobj.slice(car, cdr));
|
||||||
}
|
}
|
||||||
@@ -2732,13 +2887,34 @@ function up2k_init(subtle) {
|
|||||||
if (parallel_uploads > 16)
|
if (parallel_uploads > 16)
|
||||||
parallel_uploads = 16;
|
parallel_uploads = 16;
|
||||||
|
|
||||||
if (parallel_uploads > 7)
|
if (parallel_uploads > 6)
|
||||||
toast.warn(10, L.u_maxconn);
|
toast.warn(10, L.u_maxconn);
|
||||||
|
else if (toast.txt == L.u_maxconn)
|
||||||
|
toast.hide();
|
||||||
|
|
||||||
obj.value = parallel_uploads;
|
obj.value = parallel_uploads;
|
||||||
bumpthread({ "target": 1 });
|
bumpthread({ "target": 1 });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var read_u2sz = function () {
|
||||||
|
var el = ebi('u2szg'), n = parseInt(el.value);
|
||||||
|
stitch_tgt = n = (
|
||||||
|
isNaN(n) ? u2sz_tgt :
|
||||||
|
n < u2sz_min ? u2sz_min :
|
||||||
|
n > u2sz_max ? u2sz_max : n
|
||||||
|
);
|
||||||
|
if (n == u2sz_tgt) sdrop('u2sz'); else swrite('u2sz', n);
|
||||||
|
if (el.value != n) el.value = n;
|
||||||
|
};
|
||||||
|
ebi('u2szg').addEventListener('blur', read_u2sz);
|
||||||
|
ebi('u2szg').onkeydown = function (e) {
|
||||||
|
if (anymod(e)) return;
|
||||||
|
var n = e.code == 'ArrowUp' ? 1 : e.code == 'ArrowDown' ? -1 : 0;
|
||||||
|
if (!n) return;
|
||||||
|
this.value = parseInt(this.value) + n;
|
||||||
|
read_u2sz();
|
||||||
|
}
|
||||||
|
|
||||||
function tgl_fsearch() {
|
function tgl_fsearch() {
|
||||||
set_fsearch(!uc.fsearch);
|
set_fsearch(!uc.fsearch);
|
||||||
}
|
}
|
||||||
@@ -2950,7 +3126,7 @@ function up2k_init(subtle) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function set_hashw() {
|
function set_hashw() {
|
||||||
if (!window.WebAssembly) {
|
if (!WebAssembly) {
|
||||||
bcfg_set('hashw', uc.hashw = false);
|
bcfg_set('hashw', uc.hashw = false);
|
||||||
toast.err(10, L.u_nowork);
|
toast.err(10, L.u_nowork);
|
||||||
}
|
}
|
||||||
@@ -2967,7 +3143,7 @@ function up2k_init(subtle) {
|
|||||||
nopenag();
|
nopenag();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!window.Notification || !HTTPS)
|
if (!Notification || !HTTPS)
|
||||||
return nopenag();
|
return nopenag();
|
||||||
|
|
||||||
if (en && Notification.permission == 'default')
|
if (en && Notification.permission == 'default')
|
||||||
@@ -2989,7 +3165,7 @@ function up2k_init(subtle) {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
if (uc.upnag && (!window.Notification || Notification.permission != 'granted'))
|
if (uc.upnag && (!Notification || Notification.permission != 'granted'))
|
||||||
bcfg_set('upnag', uc.upnag = false);
|
bcfg_set('upnag', uc.upnag = false);
|
||||||
|
|
||||||
ebi('nthread_add').onclick = function (e) {
|
ebi('nthread_add').onclick = function (e) {
|
||||||
|
|||||||
@@ -5,10 +5,17 @@ if (!window.console || !console.log)
|
|||||||
"log": function (msg) { }
|
"log": function (msg) { }
|
||||||
};
|
};
|
||||||
|
|
||||||
|
if (!Object.assign)
|
||||||
|
Object.assign = function (a, b) {
|
||||||
|
for (var k in b)
|
||||||
|
a[k] = b[k];
|
||||||
|
};
|
||||||
|
|
||||||
|
if (window.CGV1)
|
||||||
|
Object.assign(window, window.CGV1);
|
||||||
|
|
||||||
if (window.CGV)
|
if (window.CGV)
|
||||||
for (var k in CGV)
|
Object.assign(window, window.CGV);
|
||||||
window[k] = CGV[k];
|
|
||||||
|
|
||||||
|
|
||||||
var wah = '',
|
var wah = '',
|
||||||
@@ -16,7 +23,6 @@ var wah = '',
|
|||||||
NOAC = 'autocorrect="off" autocapitalize="off"',
|
NOAC = 'autocorrect="off" autocapitalize="off"',
|
||||||
L, tt, treectl, thegrid, up2k, asmCrypto, hashwasm, vbar, marked,
|
L, tt, treectl, thegrid, up2k, asmCrypto, hashwasm, vbar, marked,
|
||||||
T0 = Date.now(),
|
T0 = Date.now(),
|
||||||
CB = '?_=' + Math.floor(T0 / 1000).toString(36),
|
|
||||||
R = SR.slice(1),
|
R = SR.slice(1),
|
||||||
RS = R ? "/" + R : "",
|
RS = R ? "/" + R : "",
|
||||||
HALFMAX = 8192 * 8192 * 8192 * 8192,
|
HALFMAX = 8192 * 8192 * 8192 * 8192,
|
||||||
@@ -52,8 +58,6 @@ catch (ex) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
CB = '?' + document.currentScript.src.split('?').pop();
|
|
||||||
|
|
||||||
if (navigator.userAgentData.mobile)
|
if (navigator.userAgentData.mobile)
|
||||||
MOBILE = true;
|
MOBILE = true;
|
||||||
|
|
||||||
@@ -130,13 +134,13 @@ if ((document.location + '').indexOf(',rej,') + 1)
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
console.hist = [];
|
console.hist = [];
|
||||||
var CMAXHIST = 100;
|
var CMAXHIST = MOBILE ? 9000 : 44000;
|
||||||
var hook = function (t) {
|
var hook = function (t) {
|
||||||
var orig = console[t].bind(console),
|
var orig = console[t].bind(console),
|
||||||
cfun = function () {
|
cfun = function () {
|
||||||
console.hist.push(Date.now() + ' ' + t + ': ' + Array.from(arguments).join(', '));
|
console.hist.push(Date.now() + ' ' + t + ': ' + Array.from(arguments).join(', '));
|
||||||
if (console.hist.length > CMAXHIST)
|
if (console.hist.length > CMAXHIST)
|
||||||
console.hist = console.hist.slice(CMAXHIST / 2);
|
console.hist = console.hist.slice(CMAXHIST / 4);
|
||||||
|
|
||||||
orig.apply(console, arguments);
|
orig.apply(console, arguments);
|
||||||
};
|
};
|
||||||
@@ -182,7 +186,7 @@ function vis_exh(msg, url, lineNo, columnNo, error) {
|
|||||||
if (url.indexOf('easymde.js') + 1)
|
if (url.indexOf('easymde.js') + 1)
|
||||||
return; // clicking the preview pane
|
return; // clicking the preview pane
|
||||||
|
|
||||||
if (url.indexOf('deps/marked.js') + 1 && !window.WebAssembly)
|
if (url.indexOf('deps/marked.js') + 1 && !WebAssembly)
|
||||||
return; // ff<52
|
return; // ff<52
|
||||||
|
|
||||||
crashed = true;
|
crashed = true;
|
||||||
@@ -476,6 +480,24 @@ function crc32(str) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function randstr(len) {
|
||||||
|
var ret = '';
|
||||||
|
try {
|
||||||
|
var ar = new Uint32Array(Math.floor((len + 3) / 4));
|
||||||
|
crypto.getRandomValues(ar);
|
||||||
|
for (var a = 0; a < ar.length; a++)
|
||||||
|
ret += ('000' + ar[a].toString(36)).slice(-4);
|
||||||
|
return ret.slice(0, len);
|
||||||
|
}
|
||||||
|
catch (ex) {
|
||||||
|
console.log('using unsafe randstr because ' + ex);
|
||||||
|
while (ret.length < len)
|
||||||
|
ret += ('000' + Math.floor(Math.random() * 1679616).toString(36)).slice(-4);
|
||||||
|
return ret.slice(0, len);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
function clmod(el, cls, add) {
|
function clmod(el, cls, add) {
|
||||||
if (!el)
|
if (!el)
|
||||||
return false;
|
return false;
|
||||||
@@ -520,6 +542,14 @@ function clgot(el, cls) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function setcvar(k, v) {
|
||||||
|
try {
|
||||||
|
document.documentElement.style.setProperty(k, v);
|
||||||
|
}
|
||||||
|
catch (e) { }
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
var ANIM = true;
|
var ANIM = true;
|
||||||
try {
|
try {
|
||||||
var mq = window.matchMedia('(prefers-reduced-motion: reduce)');
|
var mq = window.matchMedia('(prefers-reduced-motion: reduce)');
|
||||||
@@ -548,7 +578,9 @@ function yscroll() {
|
|||||||
|
|
||||||
function showsort(tab) {
|
function showsort(tab) {
|
||||||
var v, vn, v1, v2, th = tab.tHead,
|
var v, vn, v1, v2, th = tab.tHead,
|
||||||
sopts = jread('fsort', jcp(dsort));
|
sopts = jread('fsort');
|
||||||
|
|
||||||
|
sopts = sopts && sopts.length ? sopts : dsort;
|
||||||
|
|
||||||
th && (th = th.rows[0]) && (th = th.cells);
|
th && (th = th.rows[0]) && (th = th.cells);
|
||||||
|
|
||||||
@@ -585,10 +617,13 @@ function sortTable(table, col, cb) {
|
|||||||
tr = Array.prototype.slice.call(tb.rows, 0),
|
tr = Array.prototype.slice.call(tb.rows, 0),
|
||||||
i, reverse = /s0[^r]/.exec(th[col].className + ' ') ? -1 : 1;
|
i, reverse = /s0[^r]/.exec(th[col].className + ' ') ? -1 : 1;
|
||||||
|
|
||||||
var stype = th[col].getAttribute('sort');
|
var kname = th[col].getAttribute('name'),
|
||||||
|
stype = th[col].getAttribute('sort');
|
||||||
try {
|
try {
|
||||||
var nrules = [], rules = jread("fsort", []);
|
var nrules = [],
|
||||||
rules.unshift([th[col].getAttribute('name'), reverse, stype || '']);
|
rules = kname == 'href' ? [] : jread("fsort", []);
|
||||||
|
|
||||||
|
rules.unshift([kname, reverse, stype || '']);
|
||||||
for (var a = 0; a < rules.length; a++) {
|
for (var a = 0; a < rules.length; a++) {
|
||||||
var add = true;
|
var add = true;
|
||||||
for (var b = 0; b < a; b++)
|
for (var b = 0; b < a; b++)
|
||||||
@@ -740,6 +775,15 @@ function vjoin(p1, p2) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function addq(url, q) {
|
||||||
|
var uh = url.split('#', 1),
|
||||||
|
u = uh[0],
|
||||||
|
h = uh.length == 1 ? '' : '#' + uh[1];
|
||||||
|
|
||||||
|
return u + (u.indexOf('?') < 0 ? '?' : '&') + (q === undefined ? '' : q) + h;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
function uricom_enc(txt, do_fb_enc) {
|
function uricom_enc(txt, do_fb_enc) {
|
||||||
try {
|
try {
|
||||||
return encodeURIComponent(txt);
|
return encodeURIComponent(txt);
|
||||||
@@ -842,6 +886,11 @@ if (window.Number && Number.isFinite)
|
|||||||
|
|
||||||
function f2f(val, nd) {
|
function f2f(val, nd) {
|
||||||
// 10.toFixed(1) returns 10.00 for certain values of 10
|
// 10.toFixed(1) returns 10.00 for certain values of 10
|
||||||
|
if (!isNum(val)) {
|
||||||
|
val = parseFloat(val);
|
||||||
|
if (!isNum(val))
|
||||||
|
val = 999;
|
||||||
|
}
|
||||||
val = (val * Math.pow(10, nd)).toFixed(0).split('.')[0];
|
val = (val * Math.pow(10, nd)).toFixed(0).split('.')[0];
|
||||||
return nd ? (val.slice(0, -nd) || '0') + '.' + val.slice(-nd) : val;
|
return nd ? (val.slice(0, -nd) || '0') + '.' + val.slice(-nd) : val;
|
||||||
}
|
}
|
||||||
@@ -898,15 +947,18 @@ function shumantime(v, long) {
|
|||||||
|
|
||||||
|
|
||||||
function lhumantime(v) {
|
function lhumantime(v) {
|
||||||
var t = shumantime(v, 1),
|
var t = shumantime(v, 1);
|
||||||
tp = t.replace(/([a-z])/g, " $1 ").split(/ /g).slice(0, -1);
|
if (/[0-9]$/.exec(t))
|
||||||
|
t += 's';
|
||||||
|
|
||||||
|
var tp = t.replace(/([a-z])/g, " $1 ").split(/ /g).slice(0, -1);
|
||||||
|
|
||||||
if (!L || tp.length < 2 || tp[1].indexOf('$') + 1)
|
if (!L || tp.length < 2 || tp[1].indexOf('$') + 1)
|
||||||
return t;
|
return t;
|
||||||
|
|
||||||
var ret = '';
|
var ret = '';
|
||||||
for (var a = 0; a < tp.length; a += 2)
|
for (var a = 0; a < tp.length; a += 2)
|
||||||
ret += tp[a] + ' ' + L['ht_' + tp[a + 1]].replace(tp[a] == 1 ? /!.*/ : /!/, '') + L.ht_and;
|
ret += tp[a] + ' ' + L['ht_' + tp[a + 1] + (tp[a]==1?1:2)] + L.ht_and;
|
||||||
|
|
||||||
return ret.slice(0, -L.ht_and.length);
|
return ret.slice(0, -L.ht_and.length);
|
||||||
}
|
}
|
||||||
@@ -935,11 +987,33 @@ function apop(arr, v) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
function jcp(obj) {
|
function jcp1(obj) {
|
||||||
return JSON.parse(JSON.stringify(obj));
|
return JSON.parse(JSON.stringify(obj));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function jcp2(src) {
|
||||||
|
if (Array.isArray(src)) {
|
||||||
|
var ret = [];
|
||||||
|
for (var a = 0; a < src.length; ++a) {
|
||||||
|
var sub = src[a];
|
||||||
|
ret.push((sub === null) ? sub : (sub instanceof Date) ? new Date(sub.valueOf()) : (typeof sub === 'object') ? jcp2(sub) : sub);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
var ret = {};
|
||||||
|
for (var key in src) {
|
||||||
|
var sub = src[key];
|
||||||
|
ret[key] = sub === null ? sub : (sub instanceof Date) ? new Date(sub.valueOf()) : (typeof sub === 'object') ? jcp2(sub) : sub;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ret;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
// jcp1 50% faster on android-chrome, jcp2 7x everywhere else
|
||||||
|
var jcp = MOBILE && CHROME ? jcp1 : jcp2;
|
||||||
|
|
||||||
|
|
||||||
function sdrop(key) {
|
function sdrop(key) {
|
||||||
try {
|
try {
|
||||||
STG.removeItem(key);
|
STG.removeItem(key);
|
||||||
@@ -1390,10 +1464,10 @@ var tt = (function () {
|
|||||||
o = ctr.querySelectorAll('*[tt]');
|
o = ctr.querySelectorAll('*[tt]');
|
||||||
|
|
||||||
for (var a = o.length - 1; a >= 0; a--) {
|
for (var a = o.length - 1; a >= 0; a--) {
|
||||||
o[a].onfocus = _cshow;
|
o[a].addEventListener('focus', _cshow);
|
||||||
o[a].onblur = _hide;
|
o[a].addEventListener('blur', _hide);
|
||||||
o[a].onmouseenter = _dshow;
|
o[a].addEventListener('mouseenter', _dshow);
|
||||||
o[a].onmouseleave = _hide;
|
o[a].addEventListener('mouseleave', _hide);
|
||||||
}
|
}
|
||||||
r.hide();
|
r.hide();
|
||||||
}
|
}
|
||||||
@@ -1469,7 +1543,7 @@ var toast = (function () {
|
|||||||
clmod(obj, 'vis');
|
clmod(obj, 'vis');
|
||||||
r.visible = false;
|
r.visible = false;
|
||||||
r.tag = obj;
|
r.tag = obj;
|
||||||
if (!window.WebAssembly)
|
if (!WebAssembly)
|
||||||
te = setTimeout(function () {
|
te = setTimeout(function () {
|
||||||
obj.className = 'hide';
|
obj.className = 'hide';
|
||||||
}, 500);
|
}, 500);
|
||||||
@@ -1492,13 +1566,26 @@ var toast = (function () {
|
|||||||
if (sec)
|
if (sec)
|
||||||
te = setTimeout(r.hide, sec * 1000);
|
te = setTimeout(r.hide, sec * 1000);
|
||||||
|
|
||||||
if (same && delta < 1000)
|
if (same && delta < 1000) {
|
||||||
|
var tb = ebi('toastt');
|
||||||
|
if (tb) {
|
||||||
|
tb.style.animation = 'none';
|
||||||
|
tb.offsetHeight;
|
||||||
|
tb.style.animation = null;
|
||||||
|
}
|
||||||
return;
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
if (txt.indexOf('<body>') + 1)
|
if (txt.indexOf('<body>') + 1)
|
||||||
txt = txt.slice(0, txt.indexOf('<')) + ' [...]';
|
txt = txt.slice(0, txt.indexOf('<')) + ' [...]';
|
||||||
|
|
||||||
obj.innerHTML = '<a href="#" id="toastc">x</a><div id="toastb">' + lf2br(txt) + '</div>';
|
var html = '';
|
||||||
|
if (sec) {
|
||||||
|
setcvar('--tmtime', (sec - 0.15) + 's');
|
||||||
|
setcvar('--tmstep', Math.floor(sec * 20));
|
||||||
|
html += '<div id="toastt"></div>';
|
||||||
|
}
|
||||||
|
obj.innerHTML = html + '<a href="#" id="toastc">x</a><div id="toastb">' + lf2br(txt) + '</div>';
|
||||||
obj.className = cl;
|
obj.className = cl;
|
||||||
sec += obj.offsetWidth;
|
sec += obj.offsetWidth;
|
||||||
obj.className += ' vis';
|
obj.className += ' vis';
|
||||||
@@ -1530,9 +1617,12 @@ var modal = (function () {
|
|||||||
var r = {},
|
var r = {},
|
||||||
q = [],
|
q = [],
|
||||||
o = null,
|
o = null,
|
||||||
|
scrolling = null,
|
||||||
cb_up = null,
|
cb_up = null,
|
||||||
cb_ok = null,
|
cb_ok = null,
|
||||||
cb_ng = null,
|
cb_ng = null,
|
||||||
|
sel_0 = 0,
|
||||||
|
sel_1 = 0,
|
||||||
tok, tng, prim, sec, ok_cancel;
|
tok, tng, prim, sec, ok_cancel;
|
||||||
|
|
||||||
r.load = function () {
|
r.load = function () {
|
||||||
@@ -1548,6 +1638,7 @@ var modal = (function () {
|
|||||||
r.nofocus = 0;
|
r.nofocus = 0;
|
||||||
|
|
||||||
r.show = function (html) {
|
r.show = function (html) {
|
||||||
|
tt.hide();
|
||||||
o = mknod('div', 'modal');
|
o = mknod('div', 'modal');
|
||||||
o.innerHTML = '<table><tr><td><div id="modalc">' + html + '</div></td></tr></table>';
|
o.innerHTML = '<table><tr><td><div id="modalc">' + html + '</div></td></tr></table>';
|
||||||
document.body.appendChild(o);
|
document.body.appendChild(o);
|
||||||
@@ -1566,11 +1657,12 @@ var modal = (function () {
|
|||||||
(inp || a).focus();
|
(inp || a).focus();
|
||||||
if (inp)
|
if (inp)
|
||||||
setTimeout(function () {
|
setTimeout(function () {
|
||||||
inp.setSelectionRange(0, inp.value.length, "forward");
|
inp.setSelectionRange(sel_0, sel_1, "forward");
|
||||||
}, 0);
|
}, 0);
|
||||||
|
|
||||||
document.addEventListener('focus', onfocus);
|
document.addEventListener('focus', onfocus);
|
||||||
document.addEventListener('selectionchange', onselch);
|
document.addEventListener('selectionchange', onselch);
|
||||||
|
timer.add(scrollchk, 1);
|
||||||
timer.add(onfocus);
|
timer.add(onfocus);
|
||||||
if (cb_up)
|
if (cb_up)
|
||||||
setTimeout(cb_up, 1);
|
setTimeout(cb_up, 1);
|
||||||
@@ -1578,6 +1670,8 @@ var modal = (function () {
|
|||||||
|
|
||||||
r.hide = function () {
|
r.hide = function () {
|
||||||
timer.rm(onfocus);
|
timer.rm(onfocus);
|
||||||
|
timer.rm(scrollchk);
|
||||||
|
scrolling = null;
|
||||||
try {
|
try {
|
||||||
ebi('modal-ok').removeEventListener('blur', onblur);
|
ebi('modal-ok').removeEventListener('blur', onblur);
|
||||||
}
|
}
|
||||||
@@ -1596,13 +1690,28 @@ var modal = (function () {
|
|||||||
r.hide();
|
r.hide();
|
||||||
if (cb_ok)
|
if (cb_ok)
|
||||||
cb_ok(v);
|
cb_ok(v);
|
||||||
}
|
};
|
||||||
var ng = function (e) {
|
var ng = function (e) {
|
||||||
ev(e);
|
ev(e);
|
||||||
r.hide();
|
r.hide();
|
||||||
if (cb_ng)
|
if (cb_ng)
|
||||||
cb_ng(null);
|
cb_ng(null);
|
||||||
}
|
};
|
||||||
|
|
||||||
|
var scrollchk = function () {
|
||||||
|
if (scrolling === true)
|
||||||
|
return;
|
||||||
|
|
||||||
|
var o = ebi('modalc'),
|
||||||
|
vis = o.offsetHeight,
|
||||||
|
all = o.scrollHeight,
|
||||||
|
nsc = 8 + vis < all;
|
||||||
|
|
||||||
|
if (scrolling !== nsc)
|
||||||
|
clmod(o, 'yk', !nsc);
|
||||||
|
|
||||||
|
scrolling = nsc;
|
||||||
|
};
|
||||||
|
|
||||||
var onselch = function () {
|
var onselch = function () {
|
||||||
try {
|
try {
|
||||||
@@ -1689,16 +1798,18 @@ var modal = (function () {
|
|||||||
r.show(html);
|
r.show(html);
|
||||||
}
|
}
|
||||||
|
|
||||||
r.prompt = function (html, v, cok, cng, fun) {
|
r.prompt = function (html, v, cok, cng, fun, so0, so1) {
|
||||||
q.push(function () {
|
q.push(function () {
|
||||||
_prompt(lf2br(html), v, cok, cng, fun);
|
_prompt(lf2br(html), v, cok, cng, fun, so0, so1);
|
||||||
});
|
});
|
||||||
next();
|
next();
|
||||||
}
|
}
|
||||||
var _prompt = function (html, v, cok, cng, fun) {
|
var _prompt = function (html, v, cok, cng, fun, so0, so1) {
|
||||||
cb_ok = cok;
|
cb_ok = cok;
|
||||||
cb_ng = cng === undefined ? cok : null;
|
cb_ng = cng === undefined ? cok : null;
|
||||||
cb_up = fun;
|
cb_up = fun;
|
||||||
|
sel_0 = so0 || 0;
|
||||||
|
sel_1 = so1 === undefined ? v.length : so1;
|
||||||
html += '<input id="modali" type="text" ' + NOAC + ' /><div id="modalb">' + ok_cancel + '</div>';
|
html += '<input id="modali" type="text" ' + NOAC + ' /><div id="modalb">' + ok_cancel + '</div>';
|
||||||
r.show(html);
|
r.show(html);
|
||||||
|
|
||||||
@@ -1885,7 +1996,7 @@ function md_thumbs(md) {
|
|||||||
float = has(flags, 'l') ? 'left' : has(flags, 'r') ? 'right' : '';
|
float = has(flags, 'l') ? 'left' : has(flags, 'r') ? 'right' : '';
|
||||||
|
|
||||||
if (!/[?&]cache/.exec(url))
|
if (!/[?&]cache/.exec(url))
|
||||||
url += (url.indexOf('?') < 0 ? '?' : '&') + 'cache=i';
|
url = addq(url, 'cache=i');
|
||||||
|
|
||||||
md[a] = '<a href="' + url + '" class="mdth mdth' + float.slice(0, 1) + '"><img src="' + url + '&th=w" alt="' + alt + '" /></a>' + md[a].slice(o2 + 1);
|
md[a] = '<a href="' + url + '" class="mdth mdth' + float.slice(0, 1) + '"><img src="' + url + '&th=w" alt="' + alt + '" /></a>' + md[a].slice(o2 + 1);
|
||||||
}
|
}
|
||||||
@@ -2014,6 +2125,9 @@ function xhrchk(xhr, prefix, e404, lvl, tag) {
|
|||||||
if (xhr.status == 404)
|
if (xhr.status == 404)
|
||||||
return toast.err(0, prefix + e404 + suf, tag);
|
return toast.err(0, prefix + e404 + suf, tag);
|
||||||
|
|
||||||
|
if (!xhr.status && !errtxt)
|
||||||
|
return toast.err(0, prefix + L.xhr0);
|
||||||
|
|
||||||
if (is_cf && (xhr.status == 403 || xhr.status == 503)) {
|
if (is_cf && (xhr.status == 403 || xhr.status == 503)) {
|
||||||
var now = Date.now(), td = now - cf_cha_t;
|
var now = Date.now(), td = now - cf_cha_t;
|
||||||
if (td < 15000)
|
if (td < 15000)
|
||||||
|
|||||||
@@ -20,6 +20,7 @@ catch (ex) {
|
|||||||
function load_fb() {
|
function load_fb() {
|
||||||
subtle = null;
|
subtle = null;
|
||||||
importScripts('deps/sha512.hw.js');
|
importScripts('deps/sha512.hw.js');
|
||||||
|
console.log('using fallback hasher');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -29,6 +30,12 @@ var reader = null,
|
|||||||
|
|
||||||
|
|
||||||
onmessage = (d) => {
|
onmessage = (d) => {
|
||||||
|
if (d.data == 'nosubtle')
|
||||||
|
return load_fb();
|
||||||
|
|
||||||
|
if (d.data == 'ping')
|
||||||
|
return postMessage(['pong']);
|
||||||
|
|
||||||
if (busy)
|
if (busy)
|
||||||
return postMessage(["panic", 'worker got another task while busy']);
|
return postMessage(["panic", 'worker got another task while busy']);
|
||||||
|
|
||||||
|
|||||||
@@ -5,9 +5,6 @@ a living list of upcoming features / fixes / changes, very roughly in order of p
|
|||||||
* maybe resumable downloads (chrome-only, jank api)
|
* maybe resumable downloads (chrome-only, jank api)
|
||||||
* maybe checksum validation (return sha512 of requested range in responses, and probably also warks)
|
* maybe checksum validation (return sha512 of requested range in responses, and probably also warks)
|
||||||
|
|
||||||
* [github issue #64](https://github.com/9001/copyparty/issues/64) - dirkeys 2nd season
|
|
||||||
* popular feature request, finally time to refactor browser.js i suppose...
|
|
||||||
|
|
||||||
* [github issue #37](https://github.com/9001/copyparty/issues/37) - upload PWA
|
* [github issue #37](https://github.com/9001/copyparty/issues/37) - upload PWA
|
||||||
* or [maybe not](https://arstechnica.com/tech-policy/2024/02/apple-under-fire-for-disabling-iphone-web-apps-eu-asks-developers-to-weigh-in/), or [maybe](https://arstechnica.com/gadgets/2024/03/apple-changes-course-will-keep-iphone-eu-web-apps-how-they-are-in-ios-17-4/)
|
* or [maybe not](https://arstechnica.com/tech-policy/2024/02/apple-under-fire-for-disabling-iphone-web-apps-eu-asks-developers-to-weigh-in/), or [maybe](https://arstechnica.com/gadgets/2024/03/apple-changes-course-will-keep-iphone-eu-web-apps-how-they-are-in-ios-17-4/)
|
||||||
|
|
||||||
|
|||||||
1095
docs/changelog.md
1095
docs/changelog.md
File diff suppressed because it is too large
Load Diff
@@ -1,7 +1,7 @@
|
|||||||
## devnotes toc
|
## devnotes toc
|
||||||
|
|
||||||
* top
|
* top
|
||||||
* [future plans](#future-plans) - some improvement ideas
|
* [future ideas](#future-ideas) - list of dreams which will probably never happen
|
||||||
* [design](#design)
|
* [design](#design)
|
||||||
* [up2k](#up2k) - quick outline of the up2k protocol
|
* [up2k](#up2k) - quick outline of the up2k protocol
|
||||||
* [why not tus](#why-not-tus) - I didn't know about [tus](https://tus.io/)
|
* [why not tus](#why-not-tus) - I didn't know about [tus](https://tus.io/)
|
||||||
@@ -12,6 +12,8 @@
|
|||||||
* [write](#write)
|
* [write](#write)
|
||||||
* [admin](#admin)
|
* [admin](#admin)
|
||||||
* [general](#general)
|
* [general](#general)
|
||||||
|
* [event hooks](#event-hooks) - on writing your own [hooks](../README.md#event-hooks)
|
||||||
|
* [hook effects](#hook-effects) - hooks can cause intentional side-effects
|
||||||
* [assumptions](#assumptions)
|
* [assumptions](#assumptions)
|
||||||
* [mdns](#mdns)
|
* [mdns](#mdns)
|
||||||
* [sfx repack](#sfx-repack) - reduce the size of an sfx by removing features
|
* [sfx repack](#sfx-repack) - reduce the size of an sfx by removing features
|
||||||
@@ -20,13 +22,14 @@
|
|||||||
* [just the sfx](#just-the-sfx)
|
* [just the sfx](#just-the-sfx)
|
||||||
* [build from release tarball](#build-from-release-tarball) - uses the included prebuilt webdeps
|
* [build from release tarball](#build-from-release-tarball) - uses the included prebuilt webdeps
|
||||||
* [complete release](#complete-release)
|
* [complete release](#complete-release)
|
||||||
* [todo](#todo) - roughly sorted by priority
|
* [debugging](#debugging)
|
||||||
|
* [music playback halting on phones](#music-playback-halting-on-phones) - mostly fine on android
|
||||||
* [discarded ideas](#discarded-ideas)
|
* [discarded ideas](#discarded-ideas)
|
||||||
|
|
||||||
|
|
||||||
# future plans
|
# future ideas
|
||||||
|
|
||||||
some improvement ideas
|
list of dreams which will probably never happen
|
||||||
|
|
||||||
* the JS is a mess -- a ~~preact~~ rewrite would be nice
|
* the JS is a mess -- a ~~preact~~ rewrite would be nice
|
||||||
* preferably without build dependencies like webpack/babel/node.js, maybe a python thing to assemble js files into main.js
|
* preferably without build dependencies like webpack/babel/node.js, maybe a python thing to assemble js files into main.js
|
||||||
@@ -54,8 +57,8 @@ quick outline of the up2k protocol, see [uploading](https://github.com/9001/cop
|
|||||||
* server creates the `wark`, an identifier for this upload
|
* server creates the `wark`, an identifier for this upload
|
||||||
* `sha512( salt + filesize + chunk_hashes )`
|
* `sha512( salt + filesize + chunk_hashes )`
|
||||||
* and a sparse file is created for the chunks to drop into
|
* and a sparse file is created for the chunks to drop into
|
||||||
* client uploads each chunk
|
* client sends a series of POSTs, with one or more consecutive chunks in each
|
||||||
* header entries for the chunk-hash and wark
|
* header entries for the chunk-hashes (comma-separated) and wark
|
||||||
* server writes chunks into place based on the hash
|
* server writes chunks into place based on the hash
|
||||||
* client does another handshake with the hashlist; server replies with OK or a list of chunks to reupload
|
* client does another handshake with the hashlist; server replies with OK or a list of chunks to reupload
|
||||||
|
|
||||||
@@ -133,10 +136,19 @@ authenticate using header `Cookie: cppwd=foo` or url param `&pw=foo`
|
|||||||
| GET | `?zip=utf-8` | ...as a zip file |
|
| GET | `?zip=utf-8` | ...as a zip file |
|
||||||
| GET | `?zip` | ...as a WinXP-compatible zip file |
|
| GET | `?zip` | ...as a WinXP-compatible zip file |
|
||||||
| GET | `?zip=crc` | ...as an MSDOS-compatible zip file |
|
| GET | `?zip=crc` | ...as an MSDOS-compatible zip file |
|
||||||
|
| GET | `?tar&w` | pregenerate webp thumbnails |
|
||||||
|
| GET | `?tar&j` | pregenerate jpg thumbnails |
|
||||||
|
| GET | `?tar&p` | pregenerate audio waveforms |
|
||||||
|
| GET | `?shares` | list your shared files/folders |
|
||||||
|
| GET | `?dls` | show active downloads (do this as admin) |
|
||||||
| GET | `?ups` | show recent uploads from your IP |
|
| GET | `?ups` | show recent uploads from your IP |
|
||||||
| GET | `?ups&filter=f` | ...where URL contains `f` |
|
| GET | `?ups&filter=f` | ...where URL contains `f` |
|
||||||
|
| GET | `?ru` | show all recent uploads |
|
||||||
|
| GET | `?ru&filter=f` | ...where URL contains `f` |
|
||||||
|
| GET | `?ru&j` | ...as json |
|
||||||
| GET | `?mime=foo` | specify return mimetype `foo` |
|
| GET | `?mime=foo` | specify return mimetype `foo` |
|
||||||
| GET | `?v` | render markdown file at URL |
|
| GET | `?v` | render markdown file at URL |
|
||||||
|
| GET | `?v` | open image/video/audio in mediaplayer |
|
||||||
| GET | `?txt` | get file at URL as plaintext |
|
| GET | `?txt` | get file at URL as plaintext |
|
||||||
| GET | `?txt=iso-8859-1` | ...with specific charset |
|
| GET | `?txt=iso-8859-1` | ...with specific charset |
|
||||||
| GET | `?th` | get image/video at URL as thumbnail |
|
| GET | `?th` | get image/video at URL as thumbnail |
|
||||||
@@ -155,18 +167,27 @@ authenticate using header `Cookie: cppwd=foo` or url param `&pw=foo`
|
|||||||
|
|
||||||
| method | params | result |
|
| method | params | result |
|
||||||
|--|--|--|
|
|--|--|--|
|
||||||
|
| POST | `?copy=/foo/bar` | copy the file/folder at URL to /foo/bar |
|
||||||
| POST | `?move=/foo/bar` | move/rename the file/folder at URL to /foo/bar |
|
| POST | `?move=/foo/bar` | move/rename the file/folder at URL to /foo/bar |
|
||||||
|
|
||||||
| method | params | body | result |
|
| method | params | body | result |
|
||||||
|--|--|--|--|
|
|--|--|--|--|
|
||||||
| PUT | | (binary data) | upload into file at URL |
|
| PUT | | (binary data) | upload into file at URL |
|
||||||
|
| PUT | `?ck` | (binary data) | upload without checksum gen (faster) |
|
||||||
|
| PUT | `?ck=md5` | (binary data) | return md5 instead of sha512 |
|
||||||
| PUT | `?gz` | (binary data) | compress with gzip and write into file at URL |
|
| PUT | `?gz` | (binary data) | compress with gzip and write into file at URL |
|
||||||
| PUT | `?xz` | (binary data) | compress with xz and write into file at URL |
|
| PUT | `?xz` | (binary data) | compress with xz and write into file at URL |
|
||||||
| mPOST | | `f=FILE` | upload `FILE` into the folder at URL |
|
| mPOST | | `f=FILE` | upload `FILE` into the folder at URL |
|
||||||
| mPOST | `?j` | `f=FILE` | ...and reply with json |
|
| mPOST | `?j` | `f=FILE` | ...and reply with json |
|
||||||
|
| mPOST | `?ck` | `f=FILE` | ...and disable checksum gen (faster) |
|
||||||
|
| mPOST | `?ck=md5` | `f=FILE` | ...and return md5 instead of sha512 |
|
||||||
| mPOST | `?replace` | `f=FILE` | ...and overwrite existing files |
|
| mPOST | `?replace` | `f=FILE` | ...and overwrite existing files |
|
||||||
|
| mPOST | `?media` | `f=FILE` | ...and return medialink (not hotlink) |
|
||||||
| mPOST | | `act=mkdir`, `name=foo` | create directory `foo` at URL |
|
| mPOST | | `act=mkdir`, `name=foo` | create directory `foo` at URL |
|
||||||
| POST | `?delete` | | delete URL recursively |
|
| POST | `?delete` | | delete URL recursively |
|
||||||
|
| POST | `?eshare=rm` | | stop sharing a file/folder |
|
||||||
|
| POST | `?eshare=3` | | set expiration to 3 minutes |
|
||||||
|
| jPOST | `?share` | (complicated) | create temp URL for file/folder |
|
||||||
| jPOST | `?delete` | `["/foo","/bar"]` | delete `/foo` and `/bar` recursively |
|
| jPOST | `?delete` | `["/foo","/bar"]` | delete `/foo` and `/bar` recursively |
|
||||||
| uPOST | | `msg=foo` | send message `foo` into server log |
|
| uPOST | | `msg=foo` | send message `foo` into server log |
|
||||||
| mPOST | | `act=tput`, `body=TEXT` | overwrite markdown document at URL |
|
| mPOST | | `act=tput`, `body=TEXT` | overwrite markdown document at URL |
|
||||||
@@ -178,6 +199,12 @@ upload modifiers:
|
|||||||
| `Accept: url` | `want=url` | return just the file URL |
|
| `Accept: url` | `want=url` | return just the file URL |
|
||||||
| `Rand: 4` | `rand=4` | generate random filename with 4 characters |
|
| `Rand: 4` | `rand=4` | generate random filename with 4 characters |
|
||||||
| `Life: 30` | `life=30` | delete file after 30 seconds |
|
| `Life: 30` | `life=30` | delete file after 30 seconds |
|
||||||
|
| `CK: no` | `ck` | disable serverside checksum (maybe faster) |
|
||||||
|
| `CK: md5` | `ck=md5` | return md5 checksum instead of sha512 |
|
||||||
|
| `CK: sha1` | `ck=sha1` | return sha1 checksum |
|
||||||
|
| `CK: sha256` | `ck=sha256` | return sha256 checksum |
|
||||||
|
| `CK: b2` | `ck=b2` | return blake2b checksum |
|
||||||
|
| `CK: b2s` | `ck=b2s` | return blake2s checksum |
|
||||||
|
|
||||||
* `life` only has an effect if the volume has a lifetime, and the volume lifetime must be greater than the file's
|
* `life` only has an effect if the volume has a lifetime, and the volume lifetime must be greater than the file's
|
||||||
|
|
||||||
@@ -196,6 +223,38 @@ upload modifiers:
|
|||||||
| method | params | result |
|
| method | params | result |
|
||||||
|--|--|--|
|
|--|--|--|
|
||||||
| GET | `?pw=x` | logout |
|
| GET | `?pw=x` | logout |
|
||||||
|
| GET | `?grid` | ui: show grid-view |
|
||||||
|
| GET | `?imgs` | ui: show grid-view with thumbnails |
|
||||||
|
| GET | `?grid=0` | ui: show list-view |
|
||||||
|
| GET | `?imgs=0` | ui: show list-view |
|
||||||
|
| GET | `?thumb` | ui, grid-mode: show thumbnails |
|
||||||
|
| GET | `?thumb=0` | ui, grid-mode: show icons |
|
||||||
|
|
||||||
|
|
||||||
|
# event hooks
|
||||||
|
|
||||||
|
on writing your own [hooks](../README.md#event-hooks)
|
||||||
|
|
||||||
|
## hook effects
|
||||||
|
|
||||||
|
hooks can cause intentional side-effects, such as redirecting an upload into another location, or creating+indexing additional files, or deleting existing files, by returning json on stdout
|
||||||
|
|
||||||
|
* `reloc` can redirect uploads before/after uploading has finished, based on filename, extension, file contents, uploader ip/name etc.
|
||||||
|
* `idx` informs copyparty about a new file to index as a consequence of this upload
|
||||||
|
* `del` tells copyparty to delete an unrelated file by vpath
|
||||||
|
|
||||||
|
for these to take effect, the hook must be defined with the `c1` flag; see example [reloc-by-ext](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reloc-by-ext.py)
|
||||||
|
|
||||||
|
a subset of effect types are available for a subset of hook types,
|
||||||
|
|
||||||
|
* most hook types (xbu/xau/xbr/xar/xbd/xad/xm) support `idx` and `del` for all http protocols (up2k / basic-uploader / webdav), but not ftp/tftp/smb
|
||||||
|
* most hook types will abort/reject the action if the hook returns nonzero, assuming flag `c` is given, see examples [reject-extension](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reject-extension.py) and [reject-mimetype](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reject-mimetype.py)
|
||||||
|
* `xbu` supports `reloc` for all http protocols (up2k / basic-uploader / webdav), but not ftp/tftp/smb
|
||||||
|
* `xau` supports `reloc` for basic-uploader / webdav only, not up2k or ftp/tftp/smb
|
||||||
|
* so clients like sharex are supported, but not dragdrop into browser
|
||||||
|
|
||||||
|
to trigger indexing of files `/foo/1.txt` and `/foo/bar/2.txt`, a hook can `print(json.dumps({"idx":{"vp":["/foo/1.txt","/foo/bar/2.txt"]}}))` (and replace "idx" with "del" to delete instead)
|
||||||
|
* note: paths starting with `/` are absolute URLs, but you can also do `../3.txt` relative to the destination folder of each uploaded file
|
||||||
|
|
||||||
|
|
||||||
# assumptions
|
# assumptions
|
||||||
@@ -241,6 +300,7 @@ the rest is mostly optional; if you need a working env for vscode or similar
|
|||||||
python3 -m venv .venv
|
python3 -m venv .venv
|
||||||
. .venv/bin/activate
|
. .venv/bin/activate
|
||||||
pip install jinja2 strip_hints # MANDATORY
|
pip install jinja2 strip_hints # MANDATORY
|
||||||
|
pip install argon2-cffi # password hashing
|
||||||
pip install mutagen # audio metadata
|
pip install mutagen # audio metadata
|
||||||
pip install pyftpdlib # ftp server
|
pip install pyftpdlib # ftp server
|
||||||
pip install partftpy # tftp server
|
pip install partftpy # tftp server
|
||||||
@@ -301,19 +361,26 @@ in the `scripts` folder:
|
|||||||
* run `./rls.sh 1.2.3` which uploads to pypi + creates github release + sfx
|
* run `./rls.sh 1.2.3` which uploads to pypi + creates github release + sfx
|
||||||
|
|
||||||
|
|
||||||
# todo
|
# debugging
|
||||||
|
|
||||||
roughly sorted by priority
|
## music playback halting on phones
|
||||||
|
|
||||||
* nothing! currently
|
mostly fine on android, but still haven't find a way to massage iphones into behaving well
|
||||||
|
|
||||||
|
* conditionally starting/stopping mp.fau according to mp.au.readyState <3 or <4 doesn't help
|
||||||
|
* loop=true doesn't work, and manually looping mp.fau from an onended also doesn't work (it does nothing)
|
||||||
|
* assigning fau.currentTime in a timer doesn't work, as safari merely pretends to assign it
|
||||||
|
* on ios 16.7.7, mp.fau can sometimes make everything visibly work correctly, but no audio is actually hitting the speakers
|
||||||
|
|
||||||
|
can be reproduced with `--no-sendfile --s-wr-sz 8192 --s-wr-slp 0.3 --rsp-slp 6` and then play a collection of small audio files with the screen off, `ffmpeg -i track01.cdda.flac -c:a libopus -b:a 128k -segment_time 12 -f segment smol-%02d.opus`
|
||||||
|
|
||||||
|
|
||||||
## discarded ideas
|
## discarded ideas
|
||||||
|
|
||||||
* reduce up2k roundtrips
|
* optimization attempts which didn't improve performance
|
||||||
* start from a chunk index and just go
|
* remove brokers / multiprocessing stuff; https://github.com/9001/copyparty/tree/no-broker
|
||||||
* terminate client on bad data
|
* reduce the nesting / indirections in `HttpCli` / `httpcli.py`
|
||||||
* not worth the effort, just throw enough conncetions at it
|
* nearly zero benefit from stuff like replacing all the `self.conn.hsrv` with a local `hsrv` variable
|
||||||
* single sha512 across all up2k chunks?
|
* single sha512 across all up2k chunks?
|
||||||
* crypto.subtle cannot into streaming, would have to use hashwasm, expensive
|
* crypto.subtle cannot into streaming, would have to use hashwasm, expensive
|
||||||
* separate sqlite table per tag
|
* separate sqlite table per tag
|
||||||
|
|||||||
45
docs/examples/docker/portainer.md
Normal file
45
docs/examples/docker/portainer.md
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
the following setup appears to work (copyparty starts, accepts uploads, is able to persist config)
|
||||||
|
|
||||||
|
tested on debian 12 using [portainer-ce](https://docs.portainer.io/start/install-ce/server/docker/linux) with [docker-ce](https://docs.docker.com/engine/install/debian/) as root (not rootless)
|
||||||
|
|
||||||
|
before making the container, first `mkdir /etc/copyparty /srv/pub` which will be bind-mounts into the container
|
||||||
|
|
||||||
|
> both `/etc/copyparty` and `/srv/pub` are examples; you can change them if you'd like
|
||||||
|
|
||||||
|
put your copyparty config files directly into `/etc/copyparty` and the files to share inside `/srv/pub`
|
||||||
|
|
||||||
|
on first startup, copyparty will create a subfolder inside `/etc/copyparty` called `copyparty` where it puts some runtime state; for example replacing `/etc/copyparty/copyparty/cert.pem` with another TLS certificate is a quick and dirty way to get valid HTTPS (if you really want copyparty to handle that and not a reverse-proxy)
|
||||||
|
|
||||||
|
|
||||||
|
## in portainer:
|
||||||
|
|
||||||
|
```
|
||||||
|
environments -> local -> containers -> add container:
|
||||||
|
|
||||||
|
name = copyparty-ac
|
||||||
|
registry = docker hub
|
||||||
|
image = copyparty/ac
|
||||||
|
always pull = no
|
||||||
|
|
||||||
|
manual network port publishing:
|
||||||
|
3923 to 3923 [TCP]
|
||||||
|
|
||||||
|
advanced -> command & logging:
|
||||||
|
console = interactive & tty
|
||||||
|
|
||||||
|
advanced -> volumes -> map additional volume:
|
||||||
|
container = /cfg [Bind]
|
||||||
|
host = /etc/copyparty [Writable]
|
||||||
|
|
||||||
|
advanced -> volumes -> map additional volume:
|
||||||
|
container = /w [Bind]
|
||||||
|
host = /srv/pub [Writable]
|
||||||
|
```
|
||||||
|
|
||||||
|
notes:
|
||||||
|
|
||||||
|
* `/cfg` is where copyparty expects to find its config files; `/etc/copyparty` is just an example mapping to that
|
||||||
|
|
||||||
|
* `/w` is where copyparty expects to find the folder to share; `/srv/pub` is just an example mapping to that
|
||||||
|
|
||||||
|
* the volumes must be bind-mounts to avoid permission issues (or so the theory goes)
|
||||||
@@ -16,7 +16,7 @@ open up notepad and save the following as `c:\users\you\documents\party.conf` (f
|
|||||||
```yaml
|
```yaml
|
||||||
[global]
|
[global]
|
||||||
lo: ~/logs/cpp-%Y-%m%d.xz # log to c:\users\you\logs\
|
lo: ~/logs/cpp-%Y-%m%d.xz # log to c:\users\you\logs\
|
||||||
e2dsa, e2ts, no-dedup, z # sets 4 flags; see expl.
|
e2dsa, e2ts, z # sets 3 flags; see explanation
|
||||||
p: 80, 443 # listen on ports 80 and 443, not 3923
|
p: 80, 443 # listen on ports 80 and 443, not 3923
|
||||||
theme: 2 # default theme: protonmail-monokai
|
theme: 2 # default theme: protonmail-monokai
|
||||||
lang: nor # default language: viking
|
lang: nor # default language: viking
|
||||||
@@ -46,11 +46,10 @@ open up notepad and save the following as `c:\users\you\documents\party.conf` (f
|
|||||||
|
|
||||||
### config explained: [global]
|
### config explained: [global]
|
||||||
|
|
||||||
the `[global]` section accepts any config parameters you can see when running copyparty (either the exe or the sfx.py) with `--help`, so this is the same as running copyparty with arguments `--lo c:\users\you\logs\copyparty-%Y-%m%d.xz -e2dsa -e2ts --no-dedup -z -p 80,443 --theme 2 --lang nor`
|
the `[global]` section accepts any config parameters [listed here](https://ocv.me/copyparty/helptext.html), also viewable by running copyparty (either the exe or the sfx.py) with `--help`, so this is the same as running copyparty with arguments `--lo c:\users\you\logs\copyparty-%Y-%m%d.xz -e2dsa -e2ts -z -p 80,443 --theme 2 --lang nor`
|
||||||
* `lo: ~/logs/cpp-%Y-%m%d.xz` writes compressed logs (the compression will make them delayed)
|
* `lo: ~/logs/cpp-%Y-%m%d.xz` writes compressed logs (the compression will make them delayed)
|
||||||
* `e2dsa` enables the upload deduplicator and file indexer, which enables searching
|
* `e2dsa` enables the file indexer, which enables searching and upload-undo
|
||||||
* `e2ts` enables music metadata indexing, making albums / titles etc. searchable too
|
* `e2ts` enables music metadata indexing, making albums / titles etc. searchable too
|
||||||
* `no-dedup` writes full dupes to disk instead of symlinking, since lots of windows software doesn't handle symlinks well
|
|
||||||
* but the improved upload speed from `e2dsa` is not affected
|
* but the improved upload speed from `e2dsa` is not affected
|
||||||
* `z` enables zeroconf, making the server available at `http://HOSTNAME.local/` from any other machine in the LAN
|
* `z` enables zeroconf, making the server available at `http://HOSTNAME.local/` from any other machine in the LAN
|
||||||
* `p: 80,443` listens on the ports `80` and `443` instead of the default `3923`
|
* `p: 80,443` listens on the ports `80` and `443` instead of the default `3923`
|
||||||
|
|||||||
210
docs/logo.svg
Normal file
210
docs/logo.svg
Normal file
@@ -0,0 +1,210 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
|
<svg
|
||||||
|
width="300mm"
|
||||||
|
height="207mm"
|
||||||
|
viewBox="0 0 300 207"
|
||||||
|
version="1.1"
|
||||||
|
id="svg1"
|
||||||
|
inkscape:version="1.3.2 (091e20ef0f, 2023-11-25)"
|
||||||
|
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||||
|
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||||
|
xmlns:xlink="http://www.w3.org/1999/xlink"
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
xmlns:svg="http://www.w3.org/2000/svg"
|
||||||
|
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||||
|
xmlns:cc="http://creativecommons.org/ns#"
|
||||||
|
xmlns:dc="http://purl.org/dc/elements/1.1/">
|
||||||
|
<title
|
||||||
|
id="title1">copyparty_logo</title>
|
||||||
|
<defs
|
||||||
|
id="defs1">
|
||||||
|
<linearGradient
|
||||||
|
inkscape:collect="always"
|
||||||
|
id="linearGradient1">
|
||||||
|
<stop
|
||||||
|
style="stop-color:#ffcc55;stop-opacity:1"
|
||||||
|
offset="0"
|
||||||
|
id="stop1" />
|
||||||
|
<stop
|
||||||
|
style="stop-color:#ffcc00;stop-opacity:1"
|
||||||
|
offset="0.2"
|
||||||
|
id="stop2" />
|
||||||
|
<stop
|
||||||
|
style="stop-color:#ff8800;stop-opacity:1"
|
||||||
|
offset="1"
|
||||||
|
id="stop3" />
|
||||||
|
</linearGradient>
|
||||||
|
<linearGradient
|
||||||
|
inkscape:collect="always"
|
||||||
|
xlink:href="#linearGradient1"
|
||||||
|
id="linearGradient2"
|
||||||
|
x1="15"
|
||||||
|
y1="15"
|
||||||
|
x2="15"
|
||||||
|
y2="143"
|
||||||
|
gradientUnits="userSpaceOnUse" />
|
||||||
|
</defs>
|
||||||
|
<metadata
|
||||||
|
id="metadata5">
|
||||||
|
<rdf:RDF>
|
||||||
|
<cc:Work
|
||||||
|
rdf:about="">
|
||||||
|
<dc:format>image/svg+xml</dc:format>
|
||||||
|
<dc:type
|
||||||
|
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
|
||||||
|
<dc:title>copyparty_logo</dc:title>
|
||||||
|
<dc:source>github.com/9001/copyparty</dc:source>
|
||||||
|
</cc:Work>
|
||||||
|
</rdf:RDF>
|
||||||
|
</metadata>
|
||||||
|
<g
|
||||||
|
inkscape:groupmode="layer"
|
||||||
|
id="layer1"
|
||||||
|
inkscape:label="kassett">
|
||||||
|
<rect
|
||||||
|
style="fill:#333333"
|
||||||
|
id="rect1"
|
||||||
|
width="300"
|
||||||
|
height="205"
|
||||||
|
x="0"
|
||||||
|
y="0"
|
||||||
|
rx="12"
|
||||||
|
ry="12" />
|
||||||
|
<rect
|
||||||
|
style="fill:url(#linearGradient2)"
|
||||||
|
id="rect2"
|
||||||
|
width="270"
|
||||||
|
height="128"
|
||||||
|
x="15"
|
||||||
|
y="15"
|
||||||
|
rx="8"
|
||||||
|
ry="8" />
|
||||||
|
<rect
|
||||||
|
style="fill:#333333"
|
||||||
|
id="rect3"
|
||||||
|
width="172"
|
||||||
|
height="52"
|
||||||
|
x="64"
|
||||||
|
y="72"
|
||||||
|
rx="26"
|
||||||
|
ry="26" />
|
||||||
|
<circle
|
||||||
|
style="fill:#cccccc"
|
||||||
|
id="circle1"
|
||||||
|
cx="91"
|
||||||
|
cy="98"
|
||||||
|
r="18" />
|
||||||
|
<circle
|
||||||
|
style="fill:#cccccc"
|
||||||
|
id="circle2"
|
||||||
|
cx="209"
|
||||||
|
cy="98"
|
||||||
|
r="18" />
|
||||||
|
<path
|
||||||
|
style="fill:#737373;stroke-width:1px"
|
||||||
|
d="m 48,207 10,-39 c 1.79,-6.2 5.6,-7.8 12,-8 60,-1 100,-1 160,0 6.4,0.2 10,1.8 12,8 l 10,39 z"
|
||||||
|
id="path1"
|
||||||
|
sodipodi:nodetypes="ccccccc" />
|
||||||
|
</g>
|
||||||
|
<g
|
||||||
|
inkscape:groupmode="layer"
|
||||||
|
id="layer3"
|
||||||
|
inkscape:label="tekst"
|
||||||
|
style="display:none">
|
||||||
|
<text
|
||||||
|
xml:space="preserve"
|
||||||
|
style="font-size:38.8056px;line-height:1.25;font-family:Akbar;-inkscape-font-specification:Akbar;letter-spacing:3.70417px;word-spacing:0px;fill:#333333"
|
||||||
|
x="47.153069"
|
||||||
|
y="55.548954"
|
||||||
|
id="text1"><tspan
|
||||||
|
sodipodi:role="line"
|
||||||
|
id="tspan1"
|
||||||
|
x="47.153069"
|
||||||
|
y="55.548954"
|
||||||
|
style="-inkscape-font-specification:Akbar"
|
||||||
|
rotate="0 0">copyparty</tspan></text>
|
||||||
|
</g>
|
||||||
|
<g
|
||||||
|
inkscape:groupmode="layer"
|
||||||
|
id="layer4"
|
||||||
|
inkscape:label="stensatt">
|
||||||
|
<path
|
||||||
|
d="m 63.5,50.9 q -0.85,0.93 -4.73,2.3 -3.6,1.3 -4.4,1.3 -3.3,0 -5.1,-2.1 -1.75,-2 -1.75,-5.36 0,-4.6 3.76,-7.64 3.3,-2.7 7.3,-2.7 0.4,0 0.93,0.74 0.54,0.7 0.54,1.16 0,2.06 -2.2,2.7 -1.36,0.4 -4.04,1.16 -2.2,1.16 -2.2,4.4 0,3.2 2.9,3.2 0.85,0 0.85,0 0.54,0 1.44,-0.16 1.1,-0.23 2.9,-0.74 1.8,-0.54 2.13,-0.54 0.4,0 1.75,0.6 z"
|
||||||
|
style="fill:#333333"
|
||||||
|
id="path11" />
|
||||||
|
<path
|
||||||
|
d="m 87.6,45 q 0,4.2 -3.7,6.95 -3.2,2.3 -6.87,2.3 -3.4,0 -6,-2.6 -2.5,-2.6 -2.5,-6 0,-3.6 3.14,-6.64 3.2,-3 6.8,-3 3.5,0 6.3,2.76 2.83,2.76 2.83,6.25 z m -3.4,0.16 q 0,-2.25 -1.75,-3.7 -1.7,-1.5 -4,-1.5 -0.1,0 -1.6,1.6 -1.44,1.55 -2.44,1.55 -0.6,0 -0.8,-0.3 -1.16,2.3 -1.16,3 0,2.25 2.13,3.4 1.6,0.9 3.6,0.9 2,0 3.76,-1.1 2.25,-1.4 2.25,-3.84 z"
|
||||||
|
style="fill:#333333"
|
||||||
|
id="path12" />
|
||||||
|
<path
|
||||||
|
d="m 112.8,46.8 q 0,2.8 -1.9,4.4 -1.8,1.5 -4.7,1.5 -0.7,0 -2.7,-0.4 -1.9,-0.4 -2.6,-0.4 -2.1,0 -2.1,2.64 0,0.85 0.23,2.6 0.2,1.75 0.2,2.6 0,1.9 -0.77,2.83 -1.44,0 -3,-0.85 -1.46,-9.5 -1.46,-12 0,-3.65 1.75,-8.1 2.37,-6.05 6.45,-6.05 3.7,0 7.3,4.1 3.3,3.84 3.3,7.14 z m -3.8,0.2 q -0.6,-2.2 -2.6,-4.4 -2.3,-2.5 -4.3,-2.5 -1.3,0 -2.33,2.2 -0.9,1.8 -0.9,3.26 0,0.47 0.38,1.24 0.43,0.8 0.85,0.8 1.1,0 3.2,0.3 2.1,0.3 3.2,0.3 0.3,0 1.3,-0.4 1,-0.47 1.3,-0.74 z"
|
||||||
|
style="fill:#333333"
|
||||||
|
id="path13" />
|
||||||
|
<path
|
||||||
|
d="m 133,40 q -2.1,4.1 -3.2,7 -0.1,0.3 -1.6,4.5 -0.4,1.36 -1,4.2 -0.5,2.83 -1,4.2 -1,2.83 -2.3,2.64 -1.4,-0.2 -1.6,-1.6 0,-0.2 0,-0.5 0,-0.16 0.3,-1.5 1,-5.04 1,-6.44 0,-0.54 -0.1,-0.74 -1.4,-2.44 -4.1,-7.4 -2.7,-4.97 -2.4,-7.7 1.5,-1.36 2.1,-1.36 0.4,0 1.1,0.6 0.6,0.6 0.7,1.1 0.8,6.2 4.9,11.1 1,-1.8 1.8,-4.04 0.5,-1.4 1.6,-4.15 1.9,-4.46 3.4,-4.46 0.2,0 0.4,0.1 0.9,0.3 1.3,2.8 z"
|
||||||
|
style="fill:#333333"
|
||||||
|
id="path14" />
|
||||||
|
<path
|
||||||
|
d="m 157.5,48 q 0,2.8 -1.9,4.4 -1.8,1.5 -4.7,1.5 -0.7,0 -2.7,-0.4 -1.9,-0.4 -2.6,-0.4 -2,0 -2,2.64 0,0.85 0.2,2.6 0.2,1.75 0.2,2.6 0,1.9 -0.7,2.83 -1.5,0 -3,-0.85 -1.5,-9.5 -1.5,-11.95 0,-3.65 1.8,-8.1 2.3,-6.05 6.4,-6.05 3.7,0 7.2,4.1 3.3,3.84 3.3,7.14 z m -3.8,0.2 q -0.6,-2.2 -2.6,-4.4 -2.3,-2.5 -4.3,-2.5 -1.3,0 -2.3,2.2 -0.9,1.8 -0.9,3.26 0,0.47 0.4,1.24 0.4,0.8 0.8,0.8 1.1,0 3.2,0.3 2.1,0.3 3.2,0.3 0.3,0 1.3,-0.4 1,-0.47 1.3,-0.74 z"
|
||||||
|
style="fill:#333333"
|
||||||
|
id="path15" />
|
||||||
|
<path
|
||||||
|
d="m 182,53.3 q 0,0.9 -0.6,1.5 -0.6,0.6 -1.4,0.6 -1.6,0 -3,-0.9 -1.4,-0.93 -2.1,-2.3 -0.7,-0.1 -1.5,0.85 -0.9,1.16 -1.1,1.24 -1.2,0.54 -3.9,0.54 -2.2,0 -3.9,-2.44 -1.5,-2.13 -1.5,-4 0,-3.4 3.4,-6.4 3.2,-2.9 6.7,-2.9 0.9,0 1.7,0.6 0.8,0.6 0.8,1.44 0,0.54 -0.4,1.1 2.4,0.9 2.4,2.83 0,0.35 -0.1,1.05 -0.1,0.7 -0.1,1.05 0,0.4 0.1,0.6 0.5,1.3 2.5,3.4 1.9,1.9 1.9,2.2 z m -8.1,-10.1 q -0.4,0 -1.1,-0.1 -0.8,-0.16 -1.1,-0.16 -1.3,0 -3.2,1.94 -1.9,1.94 -1.9,3.3 0,0.8 0.7,1.8 0.9,1.3 2.2,1.3 2.6,0 3.5,-2.9 0.5,-2.6 1,-5.16 z"
|
||||||
|
style="fill:#333333"
|
||||||
|
id="path16" />
|
||||||
|
<path
|
||||||
|
d="m 203.8,42.4 q -0.4,0.4 -1.5,0.4 -0.9,0 -2.5,-0.3 -1.7,-0.3 -2.5,-0.3 -4.7,0 -5.5,6.9 -0.3,3.1 -0.4,3.3 -0.4,1 -1.7,2.3 h -1.1 q -0.7,-1.2 -1.3,-4.1 -0.6,-2.76 -0.6,-4.27 0,-1.16 0.1,-1.5 0.2,-0.54 1,-0.54 0.3,0 0.6,0.3 0.4,0.3 0.4,0.3 1.9,-3.53 3.1,-4.6 1.8,-1.7 5.1,-1.7 1.4,0 3.6,0.9 2.8,1.16 3.3,2.8 z"
|
||||||
|
style="fill:#333333"
|
||||||
|
id="path17" />
|
||||||
|
<path
|
||||||
|
d="m 229.5,37.16 q 0.3,0.8 0.3,1.44 0,1.86 -2.4,1.86 -1,0 -3.5,-0.5 -2.5,-0.54 -3.4,-0.54 -1.3,0 -1.5,0.1 -0.4,0.2 -0.4,1.2 0,2.2 0.6,6.9 0.7,5.86 1.6,6.13 -0.4,0.35 -0.4,1.1 -1.2,0.7 -2.6,0.7 -1.4,0 -2,-3.9 -0.2,-1.36 -0.5,-7.76 -0.2,-4.6 -0.8,-5.5 -0.3,-0.47 -4.3,-0.35 -1,0 -1.6,0.1 -0.5,0 -0.3,0 -0.8,0 -1.2,-0.7 -0.5,-1.3 -0.5,-1.4 0,-1.44 4.1,-2 1.6,-0.16 4.7,-0.5 0,-0.85 -0.1,-2.56 0,-1.75 0,-2.6 0,-4.35 2.1,-4.35 0.5,0 1.1,0.6 0.6,0.6 0.6,1.1 v 7.9 q 1.1,1.2 5,1.7 3.9,0.5 5.3,1.86 z"
|
||||||
|
style="fill:#333333"
|
||||||
|
id="path18" />
|
||||||
|
<path
|
||||||
|
d="m 251.2,40.2 q -2,4.1 -3.2,7 -0.1,0.3 -1.5,4.5 -0.5,1.36 -1,4.2 -0.5,2.83 -1,4.2 -1,2.83 -2.4,2.64 -1.4,-0.2 -1.5,-1.6 -0.1,-0.2 -0.1,-0.5 0,-0.16 0.3,-1.5 1.1,-5.04 1.1,-6.44 0,-0.54 -0.1,-0.74 -1.4,-2.44 -4.1,-7.4 -2.7,-4.97 -2.4,-7.7 1.4,-1.36 2.1,-1.36 0.4,0 1,0.6 0.6,0.6 0.7,1.1 0.9,6.2 4.9,11.1 1,-1.8 1.9,-4.04 0.5,-1.4 1.6,-4.15 1.8,-4.46 3.4,-4.46 0.2,0 0.4,0.1 0.8,0.3 1.2,2.8 z"
|
||||||
|
style="fill:#333333"
|
||||||
|
id="path19" />
|
||||||
|
</g>
|
||||||
|
<g
|
||||||
|
inkscape:groupmode="layer"
|
||||||
|
id="layer5"
|
||||||
|
inkscape:label="tagger">
|
||||||
|
<g
|
||||||
|
id="g1">
|
||||||
|
<path
|
||||||
|
id="path4"
|
||||||
|
style="fill:#333333"
|
||||||
|
d="m 111.4,83.335 -9.526,5.5 2.5,4.33 9.526,-5.5 z m -33.775,19.5 -9.526,5.5 2.5,4.33 9.526,-5.5 z"
|
||||||
|
sodipodi:nodetypes="cccccccccc" />
|
||||||
|
<path
|
||||||
|
id="path5"
|
||||||
|
style="fill:#333333"
|
||||||
|
d="M 88.5,73 V 84 h 5 V 73 Z m 0,39 v 11 h 5 V 112 Z"
|
||||||
|
sodipodi:nodetypes="cccccccccc" />
|
||||||
|
<path
|
||||||
|
id="path6"
|
||||||
|
style="fill:#333333"
|
||||||
|
d="m 68.1,87.665 9.526,5.5 2.5,-4.33 -9.526,-5.5 z m 33.775,19.5 9.527,5.5 2.5,-4.33 -9.527,-5.5 z"
|
||||||
|
sodipodi:nodetypes="cccccccccc" />
|
||||||
|
</g>
|
||||||
|
<g
|
||||||
|
id="g2"
|
||||||
|
transform="rotate(30,150,318.19)">
|
||||||
|
<path
|
||||||
|
id="path7"
|
||||||
|
style="fill:#333333"
|
||||||
|
d="m 111.4,83.335 -9.526,5.5 2.5,4.33 9.526,-5.5 z m -33.775,19.5 -9.526,5.5 2.5,4.33 9.526,-5.5 z"
|
||||||
|
sodipodi:nodetypes="cccccccccc" />
|
||||||
|
<path
|
||||||
|
id="path8"
|
||||||
|
style="fill:#333333"
|
||||||
|
d="M 88.5,73 V 84 h 5 V 73 Z m 0,39 v 11 h 5 V 112 Z"
|
||||||
|
sodipodi:nodetypes="cccccccccc" />
|
||||||
|
<path
|
||||||
|
id="path9"
|
||||||
|
style="fill:#333333"
|
||||||
|
d="m 68.1,87.665 9.526,5.5 2.5,-4.33 -9.526,-5.5 z m 33.775,19.5 9.527,5.5 2.5,-4.33 -9.527,-5.5 z"
|
||||||
|
sodipodi:nodetypes="cccccccccc" />
|
||||||
|
</g>
|
||||||
|
</g>
|
||||||
|
</svg>
|
||||||
|
After Width: | Height: | Size: 8.3 KiB |
@@ -141,6 +141,9 @@ find -maxdepth 1 -printf '%s %p\n' | sort -n | awk '!/-([0-9a-zA-Z_-]{11})\.(mkv
|
|||||||
# unique stacks in a stackdump
|
# unique stacks in a stackdump
|
||||||
f=a; rm -rf stacks; mkdir stacks; grep -E '^#' $f | while IFS= read -r n; do awk -v n="$n" '!$0{o=0} o; $0==n{o=1}' <$f >stacks/f; h=$(sha1sum <stacks/f | cut -c-16); mv stacks/f stacks/$h-"$n"; done ; find stacks/ | sort | uniq -cw24
|
f=a; rm -rf stacks; mkdir stacks; grep -E '^#' $f | while IFS= read -r n; do awk -v n="$n" '!$0{o=0} o; $0==n{o=1}' <$f >stacks/f; h=$(sha1sum <stacks/f | cut -c-16); mv stacks/f stacks/$h-"$n"; done ; find stacks/ | sort | uniq -cw24
|
||||||
|
|
||||||
|
# find unused css variables
|
||||||
|
cat browser.css | sed -r 's/(var\()/\n\1/g' | awk '{sub(/:/," ")} $1~/^--/{d[$1]=1} /var\(/{sub(/.*var\(/,"");sub(/\).*/,"");u[$1]=1} END{for (x in u) delete d[x]; for (x in d) print x}' | tr '\n' '|'
|
||||||
|
|
||||||
|
|
||||||
##
|
##
|
||||||
## sqlite3 stuff
|
## sqlite3 stuff
|
||||||
@@ -221,6 +224,11 @@ sox -DnV -r8000 -b8 -c1 /dev/shm/a.wav synth 1.1 sin 400 vol 0.02
|
|||||||
# play icon calibration pics
|
# play icon calibration pics
|
||||||
for w in 150 170 190 210 230 250; do for h in 130 150 170 190 210; do /c/Program\ Files/ImageMagick-7.0.11-Q16-HDRI/magick.exe convert -size ${w}x${h} xc:brown -fill orange -draw "circle $((w/2)),$((h/2)) $((w/2)),$((h/3))" $w-$h.png; done; done
|
for w in 150 170 190 210 230 250; do for h in 130 150 170 190 210; do /c/Program\ Files/ImageMagick-7.0.11-Q16-HDRI/magick.exe convert -size ${w}x${h} xc:brown -fill orange -draw "circle $((w/2)),$((h/2)) $((w/2)),$((h/3))" $w-$h.png; done; done
|
||||||
|
|
||||||
|
# compress chiptune modules
|
||||||
|
mkdir gz; for f in *.*; do pigz -c11 -I100 <"$f" >gz/"$f"gz; touch -r "$f" gz/"$f"gz; done
|
||||||
|
mkdir xz; for f in *.*; do xz -cz9 <"$f" >xz/"$f"xz; touch -r "$f" xz/"$f"xz; done
|
||||||
|
mkdir z; for f in *.*; do 7z a -tzip -mx=9 -mm=lzma "z/${f}z" "$f" && touch -r "$f" z/"$f"z; done
|
||||||
|
|
||||||
|
|
||||||
##
|
##
|
||||||
## vscode
|
## vscode
|
||||||
@@ -247,6 +255,11 @@ cat copyparty/httpcli.py | awk '/^[^a-zA-Z0-9]+def / {printf "%s\n%s\n\n", f, pl
|
|||||||
# create a folder with symlinks to big files
|
# create a folder with symlinks to big files
|
||||||
for d in /usr /var; do find $d -type f -size +30M 2>/dev/null; done | while IFS= read -r x; do ln -s "$x" big/; done
|
for d in /usr /var; do find $d -type f -size +30M 2>/dev/null; done | while IFS= read -r x; do ln -s "$x" big/; done
|
||||||
|
|
||||||
|
# up2k worst-case testfiles: create 64 GiB (256 x 256 MiB) of sparse files; each file takes 1 MiB disk space; each 1 MiB chunk is globally unique
|
||||||
|
for f in {0..255}; do echo $f; truncate -s 256M $f; b1=$(printf '%02x' $f); for o in {0..255}; do b2=$(printf '%02x' $o); printf "\x$b1\x$b2" | dd of=$f bs=2 seek=$((o*1024*1024)) conv=notrunc 2>/dev/null; done; done
|
||||||
|
# create 6.06G file with 16 bytes of unique data at start+end of each 32M chunk
|
||||||
|
sz=6509559808; truncate -s $sz f; csz=33554432; sz=$((sz/16)); step=$((csz/16)); ofs=0; while [ $ofs -lt $sz ]; do dd if=/dev/urandom of=f bs=16 count=2 seek=$ofs conv=notrunc iflag=fullblock; [ $ofs = 0 ] && ofs=$((ofs+step-1)) || ofs=$((ofs+step)); done
|
||||||
|
|
||||||
# py2 on osx
|
# py2 on osx
|
||||||
brew install python@2
|
brew install python@2
|
||||||
pip install virtualenv
|
pip install virtualenv
|
||||||
|
|||||||
129
docs/nuitka.txt
129
docs/nuitka.txt
@@ -1,82 +1,71 @@
|
|||||||
# recipe for building an exe with nuitka (extreme jank edition)
|
# recipe for building an exe with nuitka (extreme jank edition)
|
||||||
#
|
|
||||||
# NOTE: win7 and win10 builds both work on win10 but
|
NOTE: copyparty runs SLOWER when compiled with nuitka;
|
||||||
# on win7 they immediately c0000005 in kernelbase.dll
|
just use copyparty-sfx.py and/or pyinstaller instead
|
||||||
#
|
|
||||||
# first install python-3.6.8-amd64.exe
|
( the sfx and the pyinstaller EXEs are equally fast if you
|
||||||
# [x] add to path
|
have the latest jinja2 installed, but the older jinja that
|
||||||
#
|
comes bundled with the sfx is slightly faster yet )
|
||||||
|
|
||||||
|
roughly, copyparty-sfx.py is 6% faster than copyparty.exe
|
||||||
|
(win10-pyinstaller), and copyparty.exe is 10% faster than
|
||||||
|
nuitka, making copyparty-sfx.py 17% faster than nuitka
|
||||||
|
|
||||||
|
NOTE: every time a nuitka-compiled copyparty.exe is launched,
|
||||||
|
it will show the windows firewall prompt since nuitka will
|
||||||
|
pick a new unique location in %TEMP% to unpack an exe into,
|
||||||
|
unlike pyinstaller which doesn't fork itself on startup...
|
||||||
|
might be fixable by configuring nuitka differently, idk
|
||||||
|
|
||||||
|
NOTE: nuitka EXEs are larger than pyinstaller ones;
|
||||||
|
a minimal nuitka build of just the sfx (with its bundled
|
||||||
|
dependencies) was already the same size as the pyinstaller
|
||||||
|
copyparty.exe which also includes Mutagen and Pillow
|
||||||
|
|
||||||
|
NOTE: nuitka takes a lot longer to build than pyinstaller
|
||||||
|
(due to actual compilation of course, but still)
|
||||||
|
|
||||||
|
NOTE: binaries built with nuitka cannot run on windows7,
|
||||||
|
even when compiled with python 3.6 on windows 7 itself
|
||||||
|
|
||||||
|
NOTE: `--python-flags=-m` is the magic sauce to
|
||||||
|
correctly compile `from .util import Daemon`
|
||||||
|
(which otherwise only explodes at runtime)
|
||||||
|
|
||||||
|
NOTE: `--deployment` doesn't seem to affect performance
|
||||||
|
|
||||||
|
########################################################################
|
||||||
# copypaste the rest of this file into cmd
|
# copypaste the rest of this file into cmd
|
||||||
|
|
||||||
rem from pypi
|
|
||||||
cd \users\ed\downloads
|
|
||||||
python -m pip install --user Nuitka-0.6.14.7.tar.gz
|
|
||||||
|
|
||||||
rem https://github.com/brechtsanders/winlibs_mingw/releases/download/10.2.0-11.0.0-8.0.0-r5/winlibs-x86_64-posix-seh-gcc-10.2.0-llvm-11.0.0-mingw-w64-8.0.0-r5.zip
|
|
||||||
mkdir C:\Users\ed\AppData\Local\Nuitka\
|
|
||||||
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\
|
|
||||||
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\gcc\
|
|
||||||
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\gcc\x86_64\
|
|
||||||
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\gcc\x86_64\10.2.0-11.0.0-8.0.0-r5\
|
|
||||||
copy c:\users\ed\downloads\winlibs-x86_64-posix-seh-gcc-10.2.0-llvm-11.0.0-mingw-w64-8.0.0-r5.zip C:\Users\ed\AppData\Local\Nuitka\Nuitka\gcc\x86_64\10.2.0-11.0.0-8.0.0-r5\winlibs-x86_64-posix-seh-gcc-10.2.0-llvm-11.0.0-mingw-w64-8.0.0-r5.zip
|
|
||||||
|
|
||||||
rem https://github.com/ccache/ccache/releases/download/v3.7.12/ccache-3.7.12-windows-32.zip
|
python -m pip install --user -U nuitka
|
||||||
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\ccache\
|
|
||||||
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\ccache\v3.7.12\
|
|
||||||
copy c:\users\ed\downloads\ccache-3.7.12-windows-32.zip C:\Users\ed\AppData\Local\Nuitka\Nuitka\ccache\v3.7.12\ccache-3.7.12-windows-32.zip
|
|
||||||
|
|
||||||
rem https://dependencywalker.com/depends22_x64.zip
|
cd %homedrive%
|
||||||
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\depends\
|
cd %homepath%\downloads
|
||||||
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\depends\x86_64\
|
|
||||||
copy c:\users\ed\downloads\depends22_x64.zip C:\Users\ed\AppData\Local\Nuitka\Nuitka\depends\x86_64\depends22_x64.zip
|
|
||||||
|
|
||||||
cd \
|
rd /s /q copypuitka
|
||||||
rd /s /q %appdata%\..\local\temp\pe-copyparty
|
mkdir copypuitka
|
||||||
cd \users\ed\downloads
|
cd copypuitka
|
||||||
python copyparty-sfx.py -h
|
|
||||||
cd %appdata%\..\local\temp\pe-copyparty\copyparty
|
|
||||||
|
|
||||||
python
|
rd /s /q %temp%\pe-copyparty
|
||||||
import os, re
|
python ..\copyparty-sfx.py --version
|
||||||
os.rename('../dep-j2/jinja2', '../jinja2')
|
|
||||||
os.rename('../dep-j2/markupsafe', '../markupsafe')
|
|
||||||
|
|
||||||
print("# nuitka dies if .__init__.stuff is imported")
|
move %temp%\pe-copyparty\copyparty .\
|
||||||
with open('__init__.py','r',encoding='utf-8') as f:
|
move %temp%\pe-copyparty\partftpy .\
|
||||||
t1 = f.read()
|
move %temp%\pe-copyparty\ftp\pyftpdlib .\
|
||||||
|
move %temp%\pe-copyparty\j2\jinja2 .\
|
||||||
|
move %temp%\pe-copyparty\j2\markupsafe .\
|
||||||
|
|
||||||
with open('util.py','r',encoding='utf-8') as f:
|
rd /s /q %temp%\pe-copyparty
|
||||||
t2 = f.read().split('\n')[3:]
|
|
||||||
|
|
||||||
t2 = [x for x in t2 if 'from .__init__' not in x]
|
python -m nuitka ^
|
||||||
t = t1 + '\n'.join(t2)
|
--onefile --deployment --python-flag=-m ^
|
||||||
with open('__init__.py','w',encoding='utf-8') as f:
|
--include-package=markupsafe ^
|
||||||
f.write('\n')
|
--include-package=jinja2 ^
|
||||||
|
--include-package=partftpy ^
|
||||||
|
--include-package=pyftpdlib ^
|
||||||
|
--include-data-dir=copyparty\web=copyparty\web ^
|
||||||
|
--include-data-dir=copyparty\res=copyparty\res ^
|
||||||
|
--run copyparty
|
||||||
|
|
||||||
with open('util.py','w',encoding='utf-8') as f:
|
|
||||||
f.write(t)
|
|
||||||
|
|
||||||
print("# local-imports fail, prefix module names")
|
|
||||||
ptn = re.compile(r'^( *from )(\.[^ ]+ import .*)')
|
|
||||||
for d, _, fs in os.walk('.'):
|
|
||||||
for f in fs:
|
|
||||||
fp = os.path.join(d, f)
|
|
||||||
if not fp.endswith('.py'):
|
|
||||||
continue
|
|
||||||
t = ''
|
|
||||||
with open(fp,'r',encoding='utf-8') as f:
|
|
||||||
for ln in [x.rstrip('\r\n') for x in f]:
|
|
||||||
m = ptn.match(ln)
|
|
||||||
if not m:
|
|
||||||
t += ln + '\n'
|
|
||||||
continue
|
|
||||||
p1, p2 = m.groups()
|
|
||||||
t += "{}copyparty{}\n".format(p1, p2).replace("__init__", "util")
|
|
||||||
with open(fp,'w',encoding='utf-8') as f:
|
|
||||||
f.write(t)
|
|
||||||
|
|
||||||
exit()
|
|
||||||
|
|
||||||
cd ..
|
|
||||||
|
|
||||||
rd /s /q bout & python -m nuitka --standalone --onefile --windows-onefile-tempdir --python-flag=no_site --assume-yes-for-downloads --include-data-dir=copyparty\web=copyparty\web --include-data-dir=copyparty\res=copyparty\res --run --output-dir=bout --mingw64 --include-package=markupsafe --include-package=jinja2 copyparty
|
|
||||||
|
|||||||
@@ -1,52 +0,0 @@
|
|||||||
pyoxidizer doesn't crosscompile yet so need to build in a windows vm,
|
|
||||||
luckily possible to do mostly airgapped (https-proxy for crates)
|
|
||||||
|
|
||||||
none of this is version-specific but doing absolute links just in case
|
|
||||||
(only exception is py3.8 which is the final win7 ver)
|
|
||||||
|
|
||||||
# deps (download on linux host):
|
|
||||||
https://www.python.org/ftp/python/3.10.7/python-3.10.7-amd64.exe
|
|
||||||
https://github.com/indygreg/PyOxidizer/releases/download/pyoxidizer%2F0.22.0/pyoxidizer-0.22.0-x86_64-pc-windows-msvc.zip
|
|
||||||
https://github.com/upx/upx/releases/download/v3.96/upx-3.96-win64.zip
|
|
||||||
https://static.rust-lang.org/dist/rust-1.61.0-x86_64-pc-windows-msvc.msi
|
|
||||||
https://github.com/indygreg/python-build-standalone/releases/download/20220528/cpython-3.8.13%2B20220528-i686-pc-windows-msvc-static-noopt-full.tar.zst
|
|
||||||
|
|
||||||
# need cl.exe, prefer 2017 -- download on linux host:
|
|
||||||
https://visualstudio.microsoft.com/downloads/?q=build+tools
|
|
||||||
https://docs.microsoft.com/en-us/visualstudio/releases/2022/release-history#release-dates-and-build-numbers
|
|
||||||
https://aka.ms/vs/15/release/vs_buildtools.exe # 2017
|
|
||||||
https://aka.ms/vs/16/release/vs_buildtools.exe # 2019
|
|
||||||
https://aka.ms/vs/17/release/vs_buildtools.exe # 2022
|
|
||||||
https://docs.microsoft.com/en-us/visualstudio/install/workload-component-id-vs-build-tools?view=vs-2017
|
|
||||||
|
|
||||||
# use disposable w10 vm to prep offline installer; xfer to linux host with firefox to copyparty
|
|
||||||
vs_buildtools-2017.exe --add Microsoft.VisualStudio.Workload.MSBuildTools --add Microsoft.VisualStudio.Workload.VCTools --add Microsoft.VisualStudio.Component.Windows10SDK.17763 --layout c:\msbt2017 --lang en-us
|
|
||||||
|
|
||||||
# need two proxies on host; s5s or ssh for msys2(socks5), and tinyproxy for rust(http)
|
|
||||||
UP=- python3 socks5server.py 192.168.123.1 4321
|
|
||||||
ssh -vND 192.168.123.1:4321 localhost
|
|
||||||
git clone https://github.com/tinyproxy/tinyproxy.git
|
|
||||||
./autogen.sh
|
|
||||||
./configure --prefix=/home/ed/pe/tinyproxy
|
|
||||||
make -j24 install
|
|
||||||
printf '%s\n' >cfg "Port 4380" "Listen 192.168.123.1"
|
|
||||||
./tinyproxy -dccfg
|
|
||||||
|
|
||||||
https://github.com/msys2/msys2-installer/releases/download/2022-09-04/msys2-x86_64-20220904.exe
|
|
||||||
export all_proxy=socks5h://192.168.123.1:4321
|
|
||||||
# if chat dies after auth (2 messages) it probably failed dns, note the h in socks5h to tunnel dns
|
|
||||||
pacman -Syuu
|
|
||||||
pacman -S git patch mingw64/mingw-w64-x86_64-zopfli
|
|
||||||
cd /c && curl -k https://192.168.123.1:3923/ro/ox/msbt2017/?tar | tar -xv
|
|
||||||
|
|
||||||
first install certs from msbt/certificates then admin-cmd `vs_buildtools.exe --noweb`,
|
|
||||||
default selection (vc++2017-v15.9-v14.16, vc++redist, vc++bt-core) += win10sdk (for io.h)
|
|
||||||
|
|
||||||
install rust without documentation, python 3.10, put upx and pyoxidizer into ~/bin,
|
|
||||||
[cmd.exe] python -m pip install --user -U wheel-0.37.1.tar.gz strip-hints-0.1.10.tar.gz
|
|
||||||
p=192.168.123.1:4380; export https_proxy=$p; export http_proxy=$p
|
|
||||||
|
|
||||||
# and with all of the one-time-setup out of the way,
|
|
||||||
mkdir /c/d; cd /c/d && curl -k https://192.168.123.1:3923/cpp/gb?pw=wark > gb && git clone gb copyparty
|
|
||||||
cd /c/d/copyparty/ && curl -k https://192.168.123.1:3923/cpp/patch?pw=wark | patch -p1
|
|
||||||
cd /c/d/copyparty/scripts && CARGO_HTTP_CHECK_REVOKE=false PATH=/c/Users/$USER/AppData/Local/Programs/Python/Python310:/c/Users/$USER/bin:"$(cygpath "C:\Program Files (x86)\Microsoft Visual Studio\2017\BuildTools\VC\Tools\MSVC\14.16.27023\bin\Hostx86\x86"):$PATH" ./make-sfx.sh ox ultra
|
|
||||||
@@ -47,3 +47,44 @@ and if you want to have a monospace font in the fancy markdown editor, do this:
|
|||||||
|
|
||||||
NB: `<textarea id="mt">` and `<div id="mtr">` in the regular markdown editor must have the same font; none of the suggestions above will cause any issues but keep it in mind if you're getting creative
|
NB: `<textarea id="mt">` and `<div id="mtr">` in the regular markdown editor must have the same font; none of the suggestions above will cause any issues but keep it in mind if you're getting creative
|
||||||
|
|
||||||
|
|
||||||
|
# `<head>`
|
||||||
|
|
||||||
|
to add stuff to the html `<head>`, for example a css `<link>` or `<meta>` tags, use either the global-option `--html-head` or the volflag `html_head`
|
||||||
|
|
||||||
|
if you give it the value `@ASDF` it will try to open a file named ASDF and send the text within
|
||||||
|
|
||||||
|
if the value starts with `%` it will assume a jinja2 template and expand it; the template has access to the `HttpCli` object through a property named `this` as well as everything in `j2a` and the stuff added by `self.j2s`; see [browser.html](https://github.com/9001/copyparty/blob/hovudstraum/copyparty/web/browser.html) for inspiration or look under the hood in [httpcli.py](https://github.com/9001/copyparty/blob/hovudstraum/copyparty/httpcli.py)
|
||||||
|
|
||||||
|
|
||||||
|
# translations
|
||||||
|
|
||||||
|
add your own translations by using the english or norwegian one from `browser.js` as a template
|
||||||
|
|
||||||
|
the easy way is to open up and modify `browser.js` in your own installation; depending on how you installed copyparty it might be named `browser.js.gz` instead, in which case just decompress it, restart copyparty, and start editing it anyways
|
||||||
|
|
||||||
|
you will be delighted to see inline html in the translation strings; to help prevent syntax errors, there is [a very jank linux script](https://github.com/9001/copyparty/blob/hovudstraum/scripts/tlcheck.sh) which is slightly better than nothing -- just beware the false-positives, so even if it complains it's not necessarily wrong/bad
|
||||||
|
|
||||||
|
if you're running `copyparty-sfx.py` then you'll find it at `/tmp/pe-copyparty.1000/copyparty/web` (on linux) or `%TEMP%\pe-copyparty\copyparty\web` (on windows)
|
||||||
|
* make sure to keep backups of your work religiously! since that location is volatile af
|
||||||
|
|
||||||
|
|
||||||
|
## translations (docker-friendly)
|
||||||
|
|
||||||
|
if editing `browser.js` is inconvenient in your setup, for example if you're running in docker, then you can instead do this:
|
||||||
|
* if you have python, go to the `scripts` folder and run `./tl.py fra Français` to generate a `tl.js` which is perfect for translating to French, using the three-letter language code `fra`
|
||||||
|
* if you do not have python, you can also just grab `tl.js` from the scripts folder, but I'll probably forget to keep that up to date... and then you'll have to find/replace all `"eng"` and `Ls.eng` to your three-letter language code
|
||||||
|
* put your `tl.js` inside a folder that is being shared by your copyparty, preferably the webroot
|
||||||
|
* run copyparty with the argument `--html-head='<script src="/tl.js"></script>'`
|
||||||
|
* if you placed `tl.js` in the webroot then you're all good, but if you put it somewhere else then change `/tl.js` accordingly
|
||||||
|
* if you are running copyparty with config files, you can do this:
|
||||||
|
```yaml
|
||||||
|
[global]
|
||||||
|
html-head: <script src="/tl.js"></script>
|
||||||
|
```
|
||||||
|
|
||||||
|
you can now edit `tl.js` and press CTRL-SHIFT-R in the browser to see your changes take effect as you go
|
||||||
|
|
||||||
|
if you want to contribute your translation back to the project (please do!) then you'll want to...
|
||||||
|
* grab all of the text inside your `var tl_cpanel = {` and add it to the translations inside `copyparty/web/splash.js` in the repo
|
||||||
|
* and the text inside your `var tl_browser = {` and add that to the translations inside `copyparty/web/browser.js` in the repo
|
||||||
|
|||||||
432
docs/versus.md
432
docs/versus.md
@@ -20,6 +20,7 @@ currently up to date with [awesome-selfhosted](https://github.com/awesome-selfho
|
|||||||
* 💾 = what copyparty offers as an alternative
|
* 💾 = what copyparty offers as an alternative
|
||||||
* 🔵 = similarities
|
* 🔵 = similarities
|
||||||
* ⚠️ = disadvantages (something copyparty does "better")
|
* ⚠️ = disadvantages (something copyparty does "better")
|
||||||
|
* 🔥 = hazards
|
||||||
|
|
||||||
|
|
||||||
## toc
|
## toc
|
||||||
@@ -37,7 +38,7 @@ currently up to date with [awesome-selfhosted](https://github.com/awesome-selfho
|
|||||||
* [another matrix](#another-matrix)
|
* [another matrix](#another-matrix)
|
||||||
* [reviews](#reviews)
|
* [reviews](#reviews)
|
||||||
* [copyparty](#copyparty)
|
* [copyparty](#copyparty)
|
||||||
* [hfs2](#hfs2)
|
* [hfs2](#hfs2) 🔥
|
||||||
* [hfs3](#hfs3)
|
* [hfs3](#hfs3)
|
||||||
* [nextcloud](#nextcloud)
|
* [nextcloud](#nextcloud)
|
||||||
* [seafile](#seafile)
|
* [seafile](#seafile)
|
||||||
@@ -48,6 +49,7 @@ currently up to date with [awesome-selfhosted](https://github.com/awesome-selfho
|
|||||||
* [filebrowser](#filebrowser)
|
* [filebrowser](#filebrowser)
|
||||||
* [filegator](#filegator)
|
* [filegator](#filegator)
|
||||||
* [sftpgo](#sftpgo)
|
* [sftpgo](#sftpgo)
|
||||||
|
* [arozos](#arozos)
|
||||||
* [updog](#updog)
|
* [updog](#updog)
|
||||||
* [goshs](#goshs)
|
* [goshs](#goshs)
|
||||||
* [gimme-that](#gimme-that)
|
* [gimme-that](#gimme-that)
|
||||||
@@ -56,7 +58,9 @@ currently up to date with [awesome-selfhosted](https://github.com/awesome-selfho
|
|||||||
* [h5ai](#h5ai)
|
* [h5ai](#h5ai)
|
||||||
* [autoindex](#autoindex)
|
* [autoindex](#autoindex)
|
||||||
* [miniserve](#miniserve)
|
* [miniserve](#miniserve)
|
||||||
|
* [pingvin-share](#pingvin-share)
|
||||||
* [briefly considered](#briefly-considered)
|
* [briefly considered](#briefly-considered)
|
||||||
|
* [notes](#notes)
|
||||||
|
|
||||||
|
|
||||||
# recommendations
|
# recommendations
|
||||||
@@ -82,8 +86,8 @@ the table headers in the matrixes below are the different softwares, with a quic
|
|||||||
|
|
||||||
the softwares,
|
the softwares,
|
||||||
* `a` = [copyparty](https://github.com/9001/copyparty)
|
* `a` = [copyparty](https://github.com/9001/copyparty)
|
||||||
* `b` = [hfs2](https://rejetto.com/hfs/)
|
* `b` = [hfs2](https://github.com/rejetto/hfs2/) 🔥
|
||||||
* `c` = [hfs3](https://github.com/rejetto/hfs)
|
* `c` = [hfs3](https://rejetto.com/hfs/)
|
||||||
* `d` = [nextcloud](https://github.com/nextcloud/server)
|
* `d` = [nextcloud](https://github.com/nextcloud/server)
|
||||||
* `e` = [seafile](https://github.com/haiwen/seafile)
|
* `e` = [seafile](https://github.com/haiwen/seafile)
|
||||||
* `f` = [rclone](https://github.com/rclone/rclone), specifically `rclone serve webdav .`
|
* `f` = [rclone](https://github.com/rclone/rclone), specifically `rclone serve webdav .`
|
||||||
@@ -93,6 +97,7 @@ the softwares,
|
|||||||
* `j` = [filebrowser](https://github.com/filebrowser/filebrowser)
|
* `j` = [filebrowser](https://github.com/filebrowser/filebrowser)
|
||||||
* `k` = [filegator](https://github.com/filegator/filegator)
|
* `k` = [filegator](https://github.com/filegator/filegator)
|
||||||
* `l` = [sftpgo](https://github.com/drakkan/sftpgo)
|
* `l` = [sftpgo](https://github.com/drakkan/sftpgo)
|
||||||
|
* `m` = [arozos](https://github.com/tobychui/arozos)
|
||||||
|
|
||||||
some softwares not in the matrixes,
|
some softwares not in the matrixes,
|
||||||
* [updog](#updog)
|
* [updog](#updog)
|
||||||
@@ -103,6 +108,7 @@ some softwares not in the matrixes,
|
|||||||
* [h5ai](#h5ai)
|
* [h5ai](#h5ai)
|
||||||
* [autoindex](#autoindex)
|
* [autoindex](#autoindex)
|
||||||
* [miniserve](#miniserve)
|
* [miniserve](#miniserve)
|
||||||
|
* [pingvin-share](#pingvin-share)
|
||||||
|
|
||||||
symbol legend,
|
symbol legend,
|
||||||
* `█` = absolutely
|
* `█` = absolutely
|
||||||
@@ -113,22 +119,22 @@ symbol legend,
|
|||||||
|
|
||||||
## general
|
## general
|
||||||
|
|
||||||
| feature / software | a | b | c | d | e | f | g | h | i | j | k | l |
|
| feature / software | a | b | c | d | e | f | g | h | i | j | k | l | m |
|
||||||
| ----------------------- | - | - | - | - | - | - | - | - | - | - | - | - |
|
| ----------------------- | - | - | - | - | - | - | - | - | - | - | - | - | - |
|
||||||
| intuitive UX | | ╱ | █ | █ | █ | | █ | █ | █ | █ | █ | █ |
|
| intuitive UX | | ╱ | █ | █ | █ | | █ | █ | █ | █ | █ | █ | █ |
|
||||||
| config GUI | | █ | █ | █ | █ | | | █ | █ | █ | | █ |
|
| config GUI | | █ | █ | █ | █ | | | █ | █ | █ | | █ | █ |
|
||||||
| good documentation | | | | █ | █ | █ | █ | | | █ | █ | ╱ |
|
| good documentation | | | | █ | █ | █ | █ | | | █ | █ | ╱ | ╱ |
|
||||||
| runs on iOS | ╱ | | | | | ╱ | | | | | | |
|
| runs on iOS | ╱ | | | | | ╱ | | | | | | | |
|
||||||
| runs on Android | █ | | | | | █ | | | | | | |
|
| runs on Android | █ | | | | | █ | | | | | | | |
|
||||||
| runs on WinXP | █ | █ | | | | █ | | | | | | |
|
| runs on WinXP | █ | █ | | | | █ | | | | | | | |
|
||||||
| runs on Windows | █ | █ | █ | █ | █ | █ | █ | ╱ | █ | █ | █ | █ |
|
| runs on Windows | █ | █ | █ | █ | █ | █ | █ | ╱ | █ | █ | █ | █ | ╱ |
|
||||||
| runs on Linux | █ | ╱ | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ |
|
| runs on Linux | █ | ╱ | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ |
|
||||||
| runs on Macos | █ | | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ |
|
| runs on Macos | █ | | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ | |
|
||||||
| runs on FreeBSD | █ | | | • | █ | █ | █ | • | █ | █ | | █ |
|
| runs on FreeBSD | █ | | | • | █ | █ | █ | • | █ | █ | | █ | |
|
||||||
| portable binary | █ | █ | █ | | | █ | █ | | | █ | | █ |
|
| portable binary | █ | █ | █ | | | █ | █ | | | █ | | █ | █ |
|
||||||
| zero setup, just go | █ | █ | █ | | | ╱ | █ | | | █ | | ╱ |
|
| zero setup, just go | █ | █ | █ | | | ╱ | █ | | | █ | | ╱ | █ |
|
||||||
| android app | ╱ | | | █ | █ | | | | | | | |
|
| android app | ╱ | | | █ | █ | | | | | | | | |
|
||||||
| iOS app | ╱ | | | █ | █ | | | | | | | |
|
| iOS app | ╱ | | | █ | █ | | | | | | | | |
|
||||||
|
|
||||||
* `zero setup` = you can get a mostly working setup by just launching the app, without having to install any software or configure whatever
|
* `zero setup` = you can get a mostly working setup by just launching the app, without having to install any software or configure whatever
|
||||||
* `a`/copyparty remarks:
|
* `a`/copyparty remarks:
|
||||||
@@ -140,37 +146,41 @@ symbol legend,
|
|||||||
* `f`/rclone must be started with the command `rclone serve webdav .` or similar
|
* `f`/rclone must be started with the command `rclone serve webdav .` or similar
|
||||||
* `h`/chibisafe has undocumented windows support
|
* `h`/chibisafe has undocumented windows support
|
||||||
* `i`/sftpgo must be launched with a command
|
* `i`/sftpgo must be launched with a command
|
||||||
|
* `m`/arozos has partial windows support
|
||||||
|
|
||||||
|
|
||||||
## file transfer
|
## file transfer
|
||||||
|
|
||||||
*the thing that copyparty is actually kinda good at*
|
*the thing that copyparty is actually kinda good at*
|
||||||
|
|
||||||
| feature / software | a | b | c | d | e | f | g | h | i | j | k | l |
|
| feature / software | a | b | c | d | e | f | g | h | i | j | k | l | m |
|
||||||
| ----------------------- | - | - | - | - | - | - | - | - | - | - | - | - |
|
| ----------------------- | - | - | - | - | - | - | - | - | - | - | - | - | - |
|
||||||
| download folder as zip | █ | █ | █ | █ | ╱ | | █ | | █ | █ | ╱ | █ |
|
| download folder as zip | █ | █ | █ | █ | ╱ | | █ | | █ | █ | ╱ | █ | ╱ |
|
||||||
| download folder as tar | █ | | | | | | | | | █ | | |
|
| download folder as tar | █ | | | | | | | | | | | | |
|
||||||
| upload | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ |
|
| upload | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ | ╱ | █ | █ |
|
||||||
| parallel uploads | █ | | | █ | █ | | • | | █ | | █ | |
|
| parallel uploads | █ | | | █ | █ | | • | | █ | █ | █ | | █ |
|
||||||
| resumable uploads | █ | | | | | | | | █ | | █ | ╱ |
|
| resumable uploads | █ | | █ | | | | | | █ | █ | █ | ╱ | |
|
||||||
| upload segmenting | █ | | | | | | | █ | █ | | █ | ╱ |
|
| upload segmenting | █ | | | | | | | █ | █ | █ | █ | ╱ | █ |
|
||||||
| upload acceleration | █ | | | | | | | | █ | | █ | |
|
| upload acceleration | █ | | | | | | | | █ | | █ | | |
|
||||||
| upload verification | █ | | | █ | █ | | | | █ | | | |
|
| upload verification | █ | | | █ | █ | | | | █ | | | | |
|
||||||
| upload deduplication | █ | | | | █ | | | | █ | | | |
|
| upload deduplication | █ | | | | █ | | | | █ | | | | |
|
||||||
| upload a 999 TiB file | █ | | | | █ | █ | • | | █ | | █ | ╱ |
|
| upload a 999 TiB file | █ | | | | █ | █ | • | | █ | | █ | ╱ | ╱ |
|
||||||
| keep last-modified time | █ | | | █ | █ | █ | | | | | | █ |
|
| CTRL-V from device | █ | | | █ | | | | | | | | | |
|
||||||
| upload rules | ╱ | ╱ | ╱ | ╱ | ╱ | | | ╱ | ╱ | | ╱ | ╱ |
|
| race the beam ("p2p") | █ | | | | | | | | | | | | |
|
||||||
| ┗ max disk usage | █ | █ | | | █ | | | | █ | | | █ |
|
| keep last-modified time | █ | | | █ | █ | █ | | | | | | █ | |
|
||||||
| ┗ max filesize | █ | | | | | | | █ | | | █ | █ |
|
| upload rules | ╱ | ╱ | ╱ | ╱ | ╱ | | | ╱ | ╱ | | ╱ | ╱ | ╱ |
|
||||||
| ┗ max items in folder | █ | | | | | | | | | | | ╱ |
|
| ┗ max disk usage | █ | █ | █ | | █ | | | | █ | | | █ | █ |
|
||||||
| ┗ max file age | █ | | | | | | | | █ | | | |
|
| ┗ max filesize | █ | | | | | | | █ | | | █ | █ | █ |
|
||||||
| ┗ max uploads over time | █ | | | | | | | | | | | ╱ |
|
| ┗ max items in folder | █ | | | | | | | | | | | ╱ | |
|
||||||
| ┗ compress before write | █ | | | | | | | | | | | |
|
| ┗ max file age | █ | | | | | | | | █ | | | | |
|
||||||
| ┗ randomize filename | █ | | | | | | | █ | █ | | | |
|
| ┗ max uploads over time | █ | | | | | | | | | | | ╱ | |
|
||||||
| ┗ mimetype reject-list | ╱ | | | | | | | | • | ╱ | | ╱ |
|
| ┗ compress before write | █ | | | | | | | | | | | | |
|
||||||
| ┗ extension reject-list | ╱ | | | | | | | █ | • | ╱ | | ╱ |
|
| ┗ randomize filename | █ | | | | | | | █ | █ | | | | |
|
||||||
| checksums provided | | | | █ | █ | | | | █ | ╱ | | |
|
| ┗ mimetype reject-list | ╱ | | | | | | | | • | ╱ | | ╱ | • |
|
||||||
| cloud storage backend | ╱ | ╱ | ╱ | █ | █ | █ | ╱ | | | ╱ | █ | █ |
|
| ┗ extension reject-list | ╱ | | | | | | | █ | • | ╱ | | ╱ | • |
|
||||||
|
| ┗ upload routing | █ | | | | | | | | | | | | |
|
||||||
|
| checksums provided | | | | █ | █ | | | | █ | ╱ | | | |
|
||||||
|
| cloud storage backend | ╱ | ╱ | ╱ | █ | █ | █ | ╱ | | | ╱ | █ | █ | ╱ |
|
||||||
|
|
||||||
* `upload segmenting` = files are sliced into chunks, making it possible to upload files larger than 100 MiB on cloudflare for example
|
* `upload segmenting` = files are sliced into chunks, making it possible to upload files larger than 100 MiB on cloudflare for example
|
||||||
|
|
||||||
@@ -178,6 +188,13 @@ symbol legend,
|
|||||||
|
|
||||||
* `upload verification` = uploads are checksummed or otherwise confirmed to have been transferred correctly
|
* `upload verification` = uploads are checksummed or otherwise confirmed to have been transferred correctly
|
||||||
|
|
||||||
|
* `CTRL-V from device` = press CTRL-C in Windows Explorer (or whatever) and paste into the webbrowser to upload it
|
||||||
|
|
||||||
|
* `race the beam` = files can be downloaded while they're still uploading; downloaders are slowed down such that the uploader is always ahead
|
||||||
|
|
||||||
|
* `upload routing` = depending on filetype / contents / uploader etc., the file can be redirected to another location or otherwise transformed; mitigates limitations such as [sharex#3992](https://github.com/ShareX/ShareX/issues/3992)
|
||||||
|
* copyparty example: [reloc-by-ext](https://github.com/9001/copyparty/tree/hovudstraum/bin/hooks#before-upload)
|
||||||
|
|
||||||
* `checksums provided` = when downloading a file from the server, the file's checksum is provided for verification client-side
|
* `checksums provided` = when downloading a file from the server, the file's checksum is provided for verification client-side
|
||||||
|
|
||||||
* `cloud storage backend` = able to serve files from (and write to) s3 or similar cloud services; `╱` means the software can do this with some help from `rclone mount` as a bridge
|
* `cloud storage backend` = able to serve files from (and write to) s3 or similar cloud services; `╱` means the software can do this with some help from `rclone mount` as a bridge
|
||||||
@@ -192,26 +209,27 @@ symbol legend,
|
|||||||
* resumable/segmented uploads only over SFTP, not over HTTP
|
* resumable/segmented uploads only over SFTP, not over HTTP
|
||||||
* upload rules are totals only, not over time
|
* upload rules are totals only, not over time
|
||||||
* can probably do extension/mimetype rejection similar to copyparty
|
* can probably do extension/mimetype rejection similar to copyparty
|
||||||
|
* `m`/arozos download-as-zip is not streaming; it creates the full zipfile before download can start, and fails on big folders
|
||||||
|
|
||||||
|
|
||||||
## protocols and client support
|
## protocols and client support
|
||||||
|
|
||||||
| feature / software | a | b | c | d | e | f | g | h | i | j | k | l |
|
| feature / software | a | b | c | d | e | f | g | h | i | j | k | l | m |
|
||||||
| ----------------------- | - | - | - | - | - | - | - | - | - | - | - | - |
|
| ----------------------- | - | - | - | - | - | - | - | - | - | - | - | - | - |
|
||||||
| serve https | █ | | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ |
|
| serve https | █ | | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ |
|
||||||
| serve webdav | █ | | | █ | █ | █ | █ | | █ | | | █ |
|
| serve webdav | █ | | | █ | █ | █ | █ | | █ | | | █ | █ |
|
||||||
| serve ftp (tcp) | █ | | | | | █ | | | | | | █ |
|
| serve ftp (tcp) | █ | | | | | █ | | | | | | █ | █ |
|
||||||
| serve ftps (tls) | █ | | | | | █ | | | | | | █ |
|
| serve ftps (tls) | █ | | | | | █ | | | | | | █ | |
|
||||||
| serve tftp (udp) | █ | | | | | | | | | | | |
|
| serve tftp (udp) | █ | | | | | | | | | | | | |
|
||||||
| serve sftp (ssh) | | | | | | █ | | | | | | █ |
|
| serve sftp (ssh) | | | | | | █ | | | | | | █ | █ |
|
||||||
| serve smb/cifs | ╱ | | | | | █ | | | | | | |
|
| serve smb/cifs | ╱ | | | | | █ | | | | | | | |
|
||||||
| serve dlna | | | | | | █ | | | | | | |
|
| serve dlna | | | | | | █ | | | | | | | |
|
||||||
| listen on unix-socket | | | | █ | █ | | █ | █ | █ | | █ | █ |
|
| listen on unix-socket | █ | | | █ | █ | | █ | █ | █ | █ | █ | █ | |
|
||||||
| zeroconf | █ | | | | | | | | | | | |
|
| zeroconf | █ | | | | | | | | | | | | █ |
|
||||||
| supports netscape 4 | ╱ | | | | | █ | | | | | • | |
|
| supports netscape 4 | ╱ | | | | | █ | | | | | • | | ╱ |
|
||||||
| ...internet explorer 6 | ╱ | █ | | █ | | █ | | | | | • | |
|
| ...internet explorer 6 | ╱ | █ | | █ | | █ | | | | | • | | ╱ |
|
||||||
| mojibake filenames | █ | | | • | • | █ | █ | • | • | • | | ╱ |
|
| mojibake filenames | █ | | | • | • | █ | █ | • | █ | • | | ╱ | |
|
||||||
| undecodable filenames | █ | | | • | • | █ | | • | • | | | ╱ |
|
| undecodable filenames | █ | | | • | • | █ | | • | | | | ╱ | |
|
||||||
|
|
||||||
* `webdav` = protocol convenient for mounting a remote server as a local filesystem; see zeroconf:
|
* `webdav` = protocol convenient for mounting a remote server as a local filesystem; see zeroconf:
|
||||||
* `zeroconf` = the server announces itself on the LAN, [automatically appearing](https://user-images.githubusercontent.com/241032/215344737-0eae8d98-9496-4256-9aa8-cd2f6971810d.png) on other zeroconf-capable devices
|
* `zeroconf` = the server announces itself on the LAN, [automatically appearing](https://user-images.githubusercontent.com/241032/215344737-0eae8d98-9496-4256-9aa8-cd2f6971810d.png) on other zeroconf-capable devices
|
||||||
@@ -222,61 +240,66 @@ symbol legend,
|
|||||||
* extremely minimal samba/cifs server
|
* extremely minimal samba/cifs server
|
||||||
* netscape 4 / ie6 support is mostly listed as a joke altho some people have actually found it useful ([ie4 tho](https://user-images.githubusercontent.com/241032/118192791-fb31fe00-b446-11eb-9647-898ea8efc1f7.png))
|
* netscape 4 / ie6 support is mostly listed as a joke altho some people have actually found it useful ([ie4 tho](https://user-images.githubusercontent.com/241032/118192791-fb31fe00-b446-11eb-9647-898ea8efc1f7.png))
|
||||||
* `l`/sftpgo translates mojibake filenames into valid utf-8 (information loss)
|
* `l`/sftpgo translates mojibake filenames into valid utf-8 (information loss)
|
||||||
|
* `m`/arozos has readonly-support for older browsers; no uploading
|
||||||
|
|
||||||
|
|
||||||
## server configuration
|
## server configuration
|
||||||
|
|
||||||
| feature / software | a | b | c | d | e | f | g | h | i | j | k | l |
|
| feature / software | a | b | c | d | e | f | g | h | i | j | k | l | m |
|
||||||
| ----------------------- | - | - | - | - | - | - | - | - | - | - | - | - |
|
| ----------------------- | - | - | - | - | - | - | - | - | - | - | - | - | - |
|
||||||
| config from cmd args | █ | | | | | █ | █ | | | █ | | ╱ |
|
| config from cmd args | █ | | | | | █ | █ | | | █ | | ╱ | ╱ |
|
||||||
| config files | █ | █ | █ | ╱ | ╱ | █ | | █ | | █ | • | ╱ |
|
| config files | █ | █ | █ | ╱ | ╱ | █ | | █ | | █ | • | ╱ | ╱ |
|
||||||
| runtime config reload | █ | █ | █ | | | | | █ | █ | █ | █ | |
|
| runtime config reload | █ | █ | █ | | | | | █ | █ | █ | █ | | █ |
|
||||||
| same-port http / https | █ | | | | | | | | | | | |
|
| same-port http / https | █ | | | | | | | | | | | | |
|
||||||
| listen multiple ports | █ | | | | | | | | | | | █ |
|
| listen multiple ports | █ | | | | | | | | | | | █ | |
|
||||||
| virtual file system | █ | █ | █ | | | | █ | | | | | █ |
|
| virtual file system | █ | █ | █ | | | | █ | | | | | █ | |
|
||||||
| reverse-proxy ok | █ | | █ | █ | █ | █ | █ | █ | • | • | • | █ |
|
| reverse-proxy ok | █ | | █ | █ | █ | █ | █ | █ | • | • | • | █ | ╱ |
|
||||||
| folder-rproxy ok | █ | | | | █ | █ | | • | • | • | • | |
|
| folder-rproxy ok | █ | | █ | | █ | █ | | • | • | █ | • | | • |
|
||||||
|
|
||||||
* `folder-rproxy` = reverse-proxying without dedicating an entire (sub)domain, using a subfolder instead
|
* `folder-rproxy` = reverse-proxying without dedicating an entire (sub)domain, using a subfolder instead
|
||||||
* `l`/sftpgo:
|
* `l`/sftpgo:
|
||||||
* config: users must be added through gui / api calls
|
* config: users must be added through gui / api calls
|
||||||
|
* `m`/arozos:
|
||||||
|
* configuration is primarily through GUI
|
||||||
|
* reverse-proxy is not guaranteed to see the correct client IP
|
||||||
|
|
||||||
|
|
||||||
## server capabilities
|
## server capabilities
|
||||||
|
|
||||||
| feature / software | a | b | c | d | e | f | g | h | i | j | k | l |
|
| feature / software | a | b | c | d | e | f | g | h | i | j | k | l | m |
|
||||||
| ----------------------- | - | - | - | - | - | - | - | - | - | - | - | - |
|
| ----------------------- | - | - | - | - | - | - | - | - | - | - | - | - | - |
|
||||||
| accounts | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ |
|
| accounts | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ |
|
||||||
| per-account chroot | | | | | | | | | | | | █ |
|
| per-account chroot | | | | | | | | | | | | █ | |
|
||||||
| single-sign-on | ╱ | | | █ | █ | | | | • | | | |
|
| single-sign-on | ╱ | | | █ | █ | | | | • | | | | |
|
||||||
| token auth | ╱ | | | █ | █ | | | █ | | | | |
|
| token auth | ╱ | | | █ | █ | | | █ | | | | | █ |
|
||||||
| 2fa | ╱ | | | █ | █ | | | | | | | █ |
|
| 2fa | ╱ | | | █ | █ | | | | | | | █ | ╱ |
|
||||||
| per-volume permissions | █ | █ | █ | █ | █ | █ | █ | | █ | █ | ╱ | █ |
|
| per-volume permissions | █ | █ | █ | █ | █ | █ | █ | | █ | █ | ╱ | █ | █ |
|
||||||
| per-folder permissions | ╱ | | | █ | █ | | █ | | █ | █ | ╱ | █ |
|
| per-folder permissions | ╱ | | | █ | █ | | █ | | █ | █ | ╱ | █ | █ |
|
||||||
| per-file permissions | | | | █ | █ | | █ | | █ | | | |
|
| per-file permissions | | | | █ | █ | | █ | | █ | | | | █ |
|
||||||
| per-file passwords | █ | | | █ | █ | | █ | | █ | | | |
|
| per-file passwords | █ | | | █ | █ | | █ | | █ | | | | █ |
|
||||||
| unmap subfolders | █ | | | | | | █ | | | █ | ╱ | • |
|
| unmap subfolders | █ | | █ | | | | █ | | | █ | ╱ | • | |
|
||||||
| index.html blocks list | ╱ | | | | | | █ | | | • | | |
|
| index.html blocks list | ╱ | | | | | | █ | | | • | | | |
|
||||||
| write-only folders | █ | | | | | | | | | | █ | █ |
|
| write-only folders | █ | | █ | | | | | | | | █ | █ | |
|
||||||
| files stored as-is | █ | █ | █ | █ | | █ | █ | | | █ | █ | █ |
|
| files stored as-is | █ | █ | █ | █ | | █ | █ | | | █ | █ | █ | █ |
|
||||||
| file versioning | | | | █ | █ | | | | | | | |
|
| file versioning | | | | █ | █ | | | | | | | | |
|
||||||
| file encryption | | | | █ | █ | █ | | | | | | █ |
|
| file encryption | | | | █ | █ | █ | | | | | | █ | |
|
||||||
| file indexing | █ | | █ | █ | █ | | | █ | █ | █ | | |
|
| file indexing | █ | | █ | █ | █ | | | █ | █ | █ | | | |
|
||||||
| ┗ per-volume db | █ | | • | • | • | | | • | • | | | |
|
| ┗ per-volume db | █ | | • | • | • | | | • | • | | | | |
|
||||||
| ┗ db stored in folder | █ | | | | | | | • | • | █ | | |
|
| ┗ db stored in folder | █ | | | | | | | • | • | █ | | | |
|
||||||
| ┗ db stored out-of-tree | █ | | █ | █ | █ | | | • | • | █ | | |
|
| ┗ db stored out-of-tree | █ | | █ | █ | █ | | | • | • | █ | | | |
|
||||||
| ┗ existing file tree | █ | | █ | | | | | | | █ | | |
|
| ┗ existing file tree | █ | | █ | | | | | | | █ | | | |
|
||||||
| file action event hooks | █ | | | | | | | | | █ | | █ |
|
| file action event hooks | █ | | | | | | | | | █ | | █ | • |
|
||||||
| one-way folder sync | █ | | | █ | █ | █ | | | | | | |
|
| one-way folder sync | █ | | | █ | █ | █ | | | | | | | |
|
||||||
| full sync | | | | █ | █ | | | | | | | |
|
| full sync | | | | █ | █ | | | | | | | | |
|
||||||
| speed throttle | | █ | █ | | | █ | | | █ | | | █ |
|
| speed throttle | | █ | █ | | | █ | | | █ | | | █ | |
|
||||||
| anti-bruteforce | █ | █ | █ | █ | █ | | | | • | | | █ |
|
| anti-bruteforce | █ | █ | █ | █ | █ | | | | • | | | █ | • |
|
||||||
| dyndns updater | | █ | | | | | | | | | | |
|
| dyndns updater | | █ | | | | | | | | | | | |
|
||||||
| self-updater | | | █ | | | | | | | | | |
|
| self-updater | | | █ | | | | | | | | | | █ |
|
||||||
| log rotation | █ | | █ | █ | █ | | | • | █ | | | █ |
|
| log rotation | █ | | █ | █ | █ | | | • | █ | | | █ | • |
|
||||||
| upload tracking / log | █ | █ | • | █ | █ | | | █ | █ | | | ╱ |
|
| upload tracking / log | █ | █ | • | █ | █ | | | █ | █ | | | ╱ | █ |
|
||||||
| curl-friendly ls | █ | | | | | | | | | | | |
|
| prometheus metrics | █ | | | █ | | | | | | | | █ | |
|
||||||
| curl-friendly upload | █ | | | | | █ | █ | • | | | | |
|
| curl-friendly ls | █ | | | | | | | | | | | | |
|
||||||
|
| curl-friendly upload | █ | | | | | █ | █ | • | | | | | |
|
||||||
|
|
||||||
* `unmap subfolders` = "shadowing"; mounting a local folder in the middle of an existing filesystem tree in order to disable access below that path
|
* `unmap subfolders` = "shadowing"; mounting a local folder in the middle of an existing filesystem tree in order to disable access below that path
|
||||||
* `files stored as-is` = uploaded files are trivially readable from the server HDD, not sliced into chunks or in weird folder structures or anything like that
|
* `files stored as-is` = uploaded files are trivially readable from the server HDD, not sliced into chunks or in weird folder structures or anything like that
|
||||||
@@ -286,6 +309,7 @@ symbol legend,
|
|||||||
* `file action event hooks` = run script before/after upload, move, rename, ...
|
* `file action event hooks` = run script before/after upload, move, rename, ...
|
||||||
* `one-way folder sync` = like rsync, optionally deleting unexpected files at target
|
* `one-way folder sync` = like rsync, optionally deleting unexpected files at target
|
||||||
* `full sync` = stateful, dropbox-like sync
|
* `full sync` = stateful, dropbox-like sync
|
||||||
|
* `speed throttle` = rate limiting (per ip, per user, per connection, anything like that)
|
||||||
* `curl-friendly ls` = returns a [sortable plaintext folder listing](https://user-images.githubusercontent.com/241032/215322619-ea5fd606-3654-40ad-94ee-2bc058647bb2.png) when curled
|
* `curl-friendly ls` = returns a [sortable plaintext folder listing](https://user-images.githubusercontent.com/241032/215322619-ea5fd606-3654-40ad-94ee-2bc058647bb2.png) when curled
|
||||||
* `curl-friendly upload` = uploading with curl is just `curl -T some.bin http://.../`
|
* `curl-friendly upload` = uploading with curl is just `curl -T some.bin http://.../`
|
||||||
* `a`/copyparty remarks:
|
* `a`/copyparty remarks:
|
||||||
@@ -302,49 +326,51 @@ symbol legend,
|
|||||||
* `l`/sftpgo:
|
* `l`/sftpgo:
|
||||||
* `file action event hooks` also include on-download triggers
|
* `file action event hooks` also include on-download triggers
|
||||||
* `upload tracking / log` in main logfile
|
* `upload tracking / log` in main logfile
|
||||||
|
* `m`/arozos:
|
||||||
|
* `2fa` maybe possible through LDAP/Oauth
|
||||||
|
|
||||||
|
|
||||||
## client features
|
## client features
|
||||||
|
|
||||||
| feature / software | a | b | c | d | e | f | g | h | i | j | k | l |
|
| feature / software | a | b | c | d | e | f | g | h | i | j | k | l | m |
|
||||||
| ---------------------- | - | - | - | - | - | - | - | - | - | - | - | - |
|
| ---------------------- | - | - | - | - | - | - | - | - | - | - | - | - | - |
|
||||||
| single-page app | █ | | █ | █ | █ | | | █ | █ | █ | █ | |
|
| single-page app | █ | | █ | █ | █ | | | █ | █ | █ | █ | | █ |
|
||||||
| themes | █ | █ | | █ | | | | | █ | | | |
|
| themes | █ | █ | █ | █ | | | | | █ | | | | |
|
||||||
| directory tree nav | █ | ╱ | | | █ | | | | █ | | ╱ | |
|
| directory tree nav | █ | ╱ | | | █ | | | | █ | | ╱ | | |
|
||||||
| multi-column sorting | █ | | | | | | | | | | | |
|
| multi-column sorting | █ | | | | | | | | | | | | |
|
||||||
| thumbnails | █ | | | ╱ | ╱ | | | █ | █ | ╱ | | |
|
| thumbnails | █ | | | ╱ | ╱ | | | █ | █ | ╱ | | | █ |
|
||||||
| ┗ image thumbnails | █ | | | █ | █ | | | █ | █ | █ | | |
|
| ┗ image thumbnails | █ | | | █ | █ | | | █ | █ | █ | | | █ |
|
||||||
| ┗ video thumbnails | █ | | | █ | █ | | | | █ | | | |
|
| ┗ video thumbnails | █ | | | █ | █ | | | | █ | | | | █ |
|
||||||
| ┗ audio spectrograms | █ | | | | | | | | | | | |
|
| ┗ audio spectrograms | █ | | | | | | | | | | | | |
|
||||||
| audio player | █ | | | █ | █ | | | | █ | ╱ | | |
|
| audio player | █ | | ╱ | █ | █ | | | | █ | ╱ | | | █ |
|
||||||
| ┗ gapless playback | █ | | | | | | | | • | | | |
|
| ┗ gapless playback | █ | | | | | | | | • | | | | |
|
||||||
| ┗ audio equalizer | █ | | | | | | | | | | | |
|
| ┗ audio equalizer | █ | | | | | | | | | | | | |
|
||||||
| ┗ waveform seekbar | █ | | | | | | | | | | | |
|
| ┗ waveform seekbar | █ | | | | | | | | | | | | |
|
||||||
| ┗ OS integration | █ | | | | | | | | | | | |
|
| ┗ OS integration | █ | | | | | | | | | | | | |
|
||||||
| ┗ transcode to lossy | █ | | | | | | | | | | | |
|
| ┗ transcode to lossy | █ | | | | | | | | | | | | |
|
||||||
| video player | █ | | | █ | █ | | | | █ | █ | | |
|
| video player | █ | | | █ | █ | | | | █ | █ | | | █ |
|
||||||
| ┗ video transcoding | | | | | | | | | █ | | | |
|
| ┗ video transcoding | | | | | | | | | █ | | | | |
|
||||||
| audio BPM detector | █ | | | | | | | | | | | |
|
| audio BPM detector | █ | | | | | | | | | | | | |
|
||||||
| audio key detector | █ | | | | | | | | | | | |
|
| audio key detector | █ | | | | | | | | | | | | |
|
||||||
| search by path / name | █ | █ | █ | █ | █ | | █ | | █ | █ | ╱ | |
|
| search by path / name | █ | █ | █ | █ | █ | | █ | | █ | █ | ╱ | | |
|
||||||
| search by date / size | █ | | | | █ | | | █ | █ | | | |
|
| search by date / size | █ | | | | █ | | | █ | █ | | | | |
|
||||||
| search by bpm / key | █ | | | | | | | | | | | |
|
| search by bpm / key | █ | | | | | | | | | | | | |
|
||||||
| search by custom tags | | | | | | | | █ | █ | | | |
|
| search by custom tags | | | | | | | | █ | █ | | | | |
|
||||||
| search in file contents | | | | █ | █ | | | | █ | | | |
|
| search in file contents | | | | █ | █ | | | | █ | | | | |
|
||||||
| search by custom parser | █ | | | | | | | | | | | |
|
| search by custom parser | █ | | | | | | | | | | | | |
|
||||||
| find local file | █ | | | | | | | | | | | |
|
| find local file | █ | | | | | | | | | | | | |
|
||||||
| undo recent uploads | █ | | | | | | | | | | | |
|
| undo recent uploads | █ | | | | | | | | | | | | |
|
||||||
| create directories | █ | | | █ | █ | ╱ | █ | █ | █ | █ | █ | █ |
|
| create directories | █ | | █ | █ | █ | ╱ | █ | █ | █ | █ | █ | █ | █ |
|
||||||
| image viewer | █ | | | █ | █ | | | | █ | █ | █ | |
|
| image viewer | █ | | █ | █ | █ | | | | █ | █ | █ | | █ |
|
||||||
| markdown viewer | █ | | | | █ | | | | █ | ╱ | ╱ | |
|
| markdown viewer | █ | | | | █ | | | | █ | ╱ | ╱ | | █ |
|
||||||
| markdown editor | █ | | | | █ | | | | █ | ╱ | ╱ | |
|
| markdown editor | █ | | | | █ | | | | █ | ╱ | ╱ | | █ |
|
||||||
| readme.md in listing | █ | | | █ | | | | | | | | |
|
| readme.md in listing | █ | | | █ | | | | | | | | | |
|
||||||
| rename files | █ | █ | █ | █ | █ | ╱ | █ | | █ | █ | █ | █ |
|
| rename files | █ | █ | █ | █ | █ | ╱ | █ | | █ | █ | █ | █ | █ |
|
||||||
| batch rename | █ | | | | | | | | █ | | | |
|
| batch rename | █ | | | | | | | | █ | | | | |
|
||||||
| cut / paste files | █ | █ | | █ | █ | | | | █ | | | |
|
| cut / paste files | █ | █ | | █ | █ | | | | █ | | | | █ |
|
||||||
| move files | █ | █ | | █ | █ | | █ | | █ | █ | █ | |
|
| move files | █ | █ | █ | █ | █ | | █ | | █ | █ | █ | | █ |
|
||||||
| delete files | █ | █ | | █ | █ | ╱ | █ | █ | █ | █ | █ | █ |
|
| delete files | █ | █ | █ | █ | █ | ╱ | █ | █ | █ | █ | █ | █ | █ |
|
||||||
| copy files | | | | | █ | | | | █ | █ | █ | |
|
| copy files | | | | | █ | | | | █ | █ | █ | | █ |
|
||||||
|
|
||||||
* `single-page app` = multitasking; possible to continue navigating while uploading
|
* `single-page app` = multitasking; possible to continue navigating while uploading
|
||||||
* `audio player » os-integration` = use the [lockscreen](https://user-images.githubusercontent.com/241032/142711926-0700be6c-3e31-47b3-9928-53722221f722.png) or [media hotkeys](https://user-images.githubusercontent.com/241032/215347492-b4250797-6c90-4e09-9a4c-721edf2fb15c.png) to play/pause, prev/next song
|
* `audio player » os-integration` = use the [lockscreen](https://user-images.githubusercontent.com/241032/142711926-0700be6c-3e31-47b3-9928-53722221f722.png) or [media hotkeys](https://user-images.githubusercontent.com/241032/215347492-b4250797-6c90-4e09-9a4c-721edf2fb15c.png) to play/pause, prev/next song
|
||||||
@@ -353,21 +379,25 @@ symbol legend,
|
|||||||
* `undo recent uploads` = accounts without delete permissions have a time window where they can undo their own uploads
|
* `undo recent uploads` = accounts without delete permissions have a time window where they can undo their own uploads
|
||||||
* `a`/copyparty has teeny-tiny skips playing gapless albums depending on audio codec (opus best)
|
* `a`/copyparty has teeny-tiny skips playing gapless albums depending on audio codec (opus best)
|
||||||
* `b`/hfs2 has a very basic directory tree view, not showing sibling folders
|
* `b`/hfs2 has a very basic directory tree view, not showing sibling folders
|
||||||
|
* `c`/hfs3 remarks:
|
||||||
|
* audio playback does not continue into next song
|
||||||
* `f`/rclone can do some file management (mkdir, rename, delete) when hosting througn webdav
|
* `f`/rclone can do some file management (mkdir, rename, delete) when hosting througn webdav
|
||||||
* `j`/filebrowser has a plaintext viewer/editor
|
* `j`/filebrowser remarks:
|
||||||
|
* audio playback does not continue into next song
|
||||||
|
* plaintext viewer/editor
|
||||||
* `k`/filegator directory tree is a modal window
|
* `k`/filegator directory tree is a modal window
|
||||||
|
|
||||||
|
|
||||||
## integration
|
## integration
|
||||||
|
|
||||||
| feature / software | a | b | c | d | e | f | g | h | i | j | k | l |
|
| feature / software | a | b | c | d | e | f | g | h | i | j | k | l | m |
|
||||||
| ----------------------- | - | - | - | - | - | - | - | - | - | - | - | - |
|
| ----------------------- | - | - | - | - | - | - | - | - | - | - | - | - | - |
|
||||||
| OS alert on upload | █ | | | | | | | | | ╱ | | ╱ |
|
| OS alert on upload | █ | | | | | | | | | ╱ | | ╱ | |
|
||||||
| discord | █ | | | | | | | | | ╱ | | ╱ |
|
| discord | █ | | | | | | | | | ╱ | | ╱ | |
|
||||||
| ┗ announce uploads | █ | | | | | | | | | | | ╱ |
|
| ┗ announce uploads | █ | | | | | | | | | | | ╱ | |
|
||||||
| ┗ custom embeds | | | | | | | | | | | | ╱ |
|
| ┗ custom embeds | | | | | | | | | | | | ╱ | |
|
||||||
| sharex | █ | | | █ | | █ | ╱ | █ | | | | |
|
| sharex | █ | | | █ | | █ | ╱ | █ | | | | | |
|
||||||
| flameshot | | | | | | █ | | | | | | |
|
| flameshot | | | | | | █ | | | | | | | |
|
||||||
|
|
||||||
* sharex `╱` = yes, but does not provide example sharex config
|
* sharex `╱` = yes, but does not provide example sharex config
|
||||||
* `a`/copyparty remarks:
|
* `a`/copyparty remarks:
|
||||||
@@ -393,11 +423,16 @@ symbol legend,
|
|||||||
| filebrowser | go | █ apl2 | 20 MB |
|
| filebrowser | go | █ apl2 | 20 MB |
|
||||||
| filegator | php | █ mit | • |
|
| filegator | php | █ mit | • |
|
||||||
| sftpgo | go | ‼ agpl | 44 MB |
|
| sftpgo | go | ‼ agpl | 44 MB |
|
||||||
|
| arozos | go | ░ gpl3 | 531 MB |
|
||||||
| updog | python | █ mit | 17 MB |
|
| updog | python | █ mit | 17 MB |
|
||||||
| goshs | go | █ mit | 11 MB |
|
| goshs | go | █ mit | 11 MB |
|
||||||
| gimme-that | python | █ mit | 4.8 MB |
|
| gimme-that | python | █ mit | 4.8 MB |
|
||||||
| ass | ts | █ isc | • |
|
| ass | ts | █ isc | • |
|
||||||
| linx | go | ░ gpl3 | 20 MB |
|
| linx | go | ░ gpl3 | 20 MB |
|
||||||
|
| h5ai | php | █ mit | • |
|
||||||
|
| autoindex | go | █ mpl2 | 11 MB |
|
||||||
|
| miniserve | rust | █ mit | 2 MB |
|
||||||
|
| pingvin-share | go | █ bsd2 | 487 MB |
|
||||||
|
|
||||||
* `size` = binary (if available) or installed size of program and its dependencies
|
* `size` = binary (if available) or installed size of program and its dependencies
|
||||||
* copyparty size is for the [standalone python](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) file; the [windows exe](https://github.com/9001/copyparty/releases/latest/download/copyparty.exe) is **6 MiB**
|
* copyparty size is for the [standalone python](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) file; the [windows exe](https://github.com/9001/copyparty/releases/latest/download/copyparty.exe) is **6 MiB**
|
||||||
@@ -409,6 +444,7 @@ symbol legend,
|
|||||||
* 💾 are what copyparty offers as an alternative
|
* 💾 are what copyparty offers as an alternative
|
||||||
* 🔵 are similarities
|
* 🔵 are similarities
|
||||||
* ⚠️ are disadvantages (something copyparty does "better")
|
* ⚠️ are disadvantages (something copyparty does "better")
|
||||||
|
* 🔥 are hazards
|
||||||
|
|
||||||
## [copyparty](https://github.com/9001/copyparty)
|
## [copyparty](https://github.com/9001/copyparty)
|
||||||
* resumable uploads which are verified server-side
|
* resumable uploads which are verified server-side
|
||||||
@@ -416,8 +452,9 @@ symbol legend,
|
|||||||
* both of the above are surprisingly uncommon features
|
* both of the above are surprisingly uncommon features
|
||||||
* very cross-platform (python, no dependencies)
|
* very cross-platform (python, no dependencies)
|
||||||
|
|
||||||
## [hfs2](https://rejetto.com/hfs/)
|
## [hfs2](https://github.com/rejetto/hfs2/)
|
||||||
* the OG, the legend
|
* the OG, the legend (now replaced by [hfs3](#hfs3))
|
||||||
|
* 🔥 hfs2 is dead and dangerous! unfixed RCE: [info](https://github.com/rejetto/hfs2/issues/44), [info](https://github.com/drapid/hfs/issues/3), [info](https://asec.ahnlab.com/en/67650/)
|
||||||
* ⚠️ uploads not resumable / accelerated / integrity-checked
|
* ⚠️ uploads not resumable / accelerated / integrity-checked
|
||||||
* ⚠️ on cloudflare: max upload size 100 MiB
|
* ⚠️ on cloudflare: max upload size 100 MiB
|
||||||
* ⚠️ windows-only
|
* ⚠️ windows-only
|
||||||
@@ -425,18 +462,30 @@ symbol legend,
|
|||||||
* vfs with gui config, per-volume permissions
|
* vfs with gui config, per-volume permissions
|
||||||
* starting to show its age, hence the rewrite:
|
* starting to show its age, hence the rewrite:
|
||||||
|
|
||||||
## [hfs3](https://github.com/rejetto/hfs)
|
## [hfs3](https://rejetto.com/hfs/)
|
||||||
* nodejs; cross-platform
|
* nodejs; cross-platform
|
||||||
* vfs with gui config, per-volume permissions
|
* vfs with gui config, per-volume permissions
|
||||||
* still early development, let's revisit later
|
* tested locally, v0.53.2 on archlinux
|
||||||
|
* 🔵 uploads are resumable
|
||||||
|
* ⚠️ uploads are not segmented; max upload size 100 MiB on cloudflare
|
||||||
|
* ⚠️ uploads are not accelerated (copyparty is 3x faster across the atlantic)
|
||||||
|
* ⚠️ uploads are not integrity-checked
|
||||||
|
* ⚠️ copies the file after upload; need twice filesize free disk space
|
||||||
|
* ⚠️ uploading small files is decent; `107` files per sec (copyparty does `670`/sec, 6x faster)
|
||||||
|
* ⚠️ doesn't support crazy filenames
|
||||||
|
* ✅ config GUI
|
||||||
|
* ✅ download counter
|
||||||
|
* ✅ watch active connections
|
||||||
|
* ✅ plugins
|
||||||
|
|
||||||
## [nextcloud](https://github.com/nextcloud/server)
|
## [nextcloud](https://github.com/nextcloud/server)
|
||||||
* php, mariadb
|
* php, mariadb
|
||||||
|
* tested locally, [linuxserver/nextcloud](https://hub.docker.com/r/linuxserver/nextcloud) v30.0.2 (sqlite)
|
||||||
* ⚠️ [isolated on-disk file hierarchy] in per-user folders
|
* ⚠️ [isolated on-disk file hierarchy] in per-user folders
|
||||||
* not that bad, can probably be remedied with bindmounts or maybe symlinks
|
* not that bad, can probably be remedied with bindmounts or maybe symlinks
|
||||||
* ⚠️ uploads not resumable / accelerated / integrity-checked
|
* ⚠️ uploads not resumable / accelerated / integrity-checked
|
||||||
* ⚠️ on cloudflare: max upload size 100 MiB
|
* ⚠️ on cloudflare: max upload size 100 MiB
|
||||||
* ⚠️ uploading small files is slow; `2.2` files per sec (copyparty does `87`/sec), tested locally with [linuxserver/nextcloud](https://hub.docker.com/r/linuxserver/nextcloud) (sqlite)
|
* ⚠️ uploading small files is slow; `4` files per sec (copyparty does `670`/sec, 160x faster)
|
||||||
* ⚠️ no write-only / upload-only folders
|
* ⚠️ no write-only / upload-only folders
|
||||||
* ⚠️ http/webdav only; no ftp, zeroconf
|
* ⚠️ http/webdav only; no ftp, zeroconf
|
||||||
* ⚠️ less awesome music player
|
* ⚠️ less awesome music player
|
||||||
@@ -452,11 +501,12 @@ symbol legend,
|
|||||||
|
|
||||||
## [seafile](https://github.com/haiwen/seafile)
|
## [seafile](https://github.com/haiwen/seafile)
|
||||||
* c, mariadb
|
* c, mariadb
|
||||||
|
* tested locally, [official container](https://manual.seafile.com/latest/docker/deploy_seafile_with_docker/) v11.0.13
|
||||||
* ⚠️ [isolated on-disk file hierarchy](https://manual.seafile.com/maintain/seafile_fsck/), incompatible with other software
|
* ⚠️ [isolated on-disk file hierarchy](https://manual.seafile.com/maintain/seafile_fsck/), incompatible with other software
|
||||||
* *much worse than nextcloud* in that regard
|
* *much worse than nextcloud* in that regard
|
||||||
* ⚠️ uploads not resumable / accelerated / integrity-checked
|
* ⚠️ uploads not resumable / accelerated / integrity-checked
|
||||||
* ⚠️ on cloudflare: max upload size 100 MiB
|
* ⚠️ on cloudflare: max upload size 100 MiB
|
||||||
* ⚠️ uploading small files is slow; `2.7` files per sec (copyparty does `87`/sec), tested locally with [official container](https://manual.seafile.com/docker/deploy_seafile_with_docker/)
|
* ⚠️ uploading small files is slow; `4.7` files per sec (copyparty does `670`/sec, 140x faster)
|
||||||
* ⚠️ no write-only / upload-only folders
|
* ⚠️ no write-only / upload-only folders
|
||||||
* ⚠️ big folders cannot be zip-downloaded
|
* ⚠️ big folders cannot be zip-downloaded
|
||||||
* ⚠️ http/webdav only; no ftp, zeroconf
|
* ⚠️ http/webdav only; no ftp, zeroconf
|
||||||
@@ -480,8 +530,11 @@ symbol legend,
|
|||||||
|
|
||||||
## [dufs](https://github.com/sigoden/dufs)
|
## [dufs](https://github.com/sigoden/dufs)
|
||||||
* rust; cross-platform (windows, linux, macos)
|
* rust; cross-platform (windows, linux, macos)
|
||||||
|
* tested locally, v0.43.0 on archlinux (plain binary)
|
||||||
* ⚠️ uploads not resumable / accelerated / integrity-checked
|
* ⚠️ uploads not resumable / accelerated / integrity-checked
|
||||||
* ⚠️ on cloudflare: max upload size 100 MiB
|
* ⚠️ on cloudflare: max upload size 100 MiB
|
||||||
|
* ⚠️ across the atlantic, copyparty is 3x faster
|
||||||
|
* ⚠️ uploading small files is decent; `97` files per sec (copyparty does `670`/sec, 7x faster)
|
||||||
* ⚠️ doesn't support crazy filenames
|
* ⚠️ doesn't support crazy filenames
|
||||||
* ✅ per-url access control (copyparty is per-volume)
|
* ✅ per-url access control (copyparty is per-volume)
|
||||||
* 🔵 basic but really snappy ui
|
* 🔵 basic but really snappy ui
|
||||||
@@ -504,12 +557,14 @@ symbol legend,
|
|||||||
* ✅ token auth (api keys)
|
* ✅ token auth (api keys)
|
||||||
|
|
||||||
## [kodbox](https://github.com/kalcaddle/kodbox)
|
## [kodbox](https://github.com/kalcaddle/kodbox)
|
||||||
* this thing is insane
|
* this thing is insane (but is getting competition from [arozos](#arozos))
|
||||||
* php; [docker](https://hub.docker.com/r/kodcloud/kodbox)
|
* php; [docker](https://hub.docker.com/r/kodcloud/kodbox)
|
||||||
* 🔵 *upload segmenting, acceleration, and integrity checking!*
|
* 🔵 *upload segmenting, acceleration, and integrity checking!*
|
||||||
* ⚠️ but uploads are not resumable(?)
|
* ⚠️ but uploads are not resumable(?)
|
||||||
* ⚠️ not portable
|
* ⚠️ not portable
|
||||||
* ⚠️ isolated on-disk file hierarchy, incompatible with other software
|
* ⚠️ isolated on-disk file hierarchy, incompatible with other software
|
||||||
|
* ⚠️ uploading small files to copyparty is 16x faster
|
||||||
|
* ⚠️ uploading large files to copyparty is 3x faster
|
||||||
* ⚠️ http/webdav only; no ftp or zeroconf
|
* ⚠️ http/webdav only; no ftp or zeroconf
|
||||||
* ⚠️ some parts of the GUI are in chinese
|
* ⚠️ some parts of the GUI are in chinese
|
||||||
* ✅ fantastic ui/ux
|
* ✅ fantastic ui/ux
|
||||||
@@ -522,8 +577,12 @@ symbol legend,
|
|||||||
|
|
||||||
## [filebrowser](https://github.com/filebrowser/filebrowser)
|
## [filebrowser](https://github.com/filebrowser/filebrowser)
|
||||||
* go; cross-platform (windows, linux, mac)
|
* go; cross-platform (windows, linux, mac)
|
||||||
* ⚠️ uploads not resumable / accelerated / integrity-checked
|
* tested locally, v2.31.2 on archlinux (plain binary)
|
||||||
* ⚠️ on cloudflare: max upload size 100 MiB
|
* 🔵 uploads are resumable and segmented
|
||||||
|
* 🔵 multiple files are uploaded in parallel, but...
|
||||||
|
* ⚠️ big files are not accelerated (copyparty is 5x faster across the atlantic)
|
||||||
|
* ⚠️ uploads are not integrity-checked
|
||||||
|
* ⚠️ uploading small files is decent; `69` files per sec (copyparty does `670`/sec, 9x faster)
|
||||||
* ⚠️ http only; no webdav / ftp / zeroconf
|
* ⚠️ http only; no webdav / ftp / zeroconf
|
||||||
* ⚠️ doesn't support crazy filenames
|
* ⚠️ doesn't support crazy filenames
|
||||||
* ⚠️ no directory tree nav
|
* ⚠️ no directory tree nav
|
||||||
@@ -533,12 +592,14 @@ symbol legend,
|
|||||||
* ⚠️ but no directory tree for navigation
|
* ⚠️ but no directory tree for navigation
|
||||||
* ✅ user signup
|
* ✅ user signup
|
||||||
* ✅ command runner / remote shell
|
* ✅ command runner / remote shell
|
||||||
* 🔵 supposed to have write-only folders but couldn't get it to work
|
* ✅ more efficient; can handle around twice as much simultaneous traffic
|
||||||
|
|
||||||
## [filegator](https://github.com/filegator/filegator)
|
## [filegator](https://github.com/filegator/filegator)
|
||||||
* go; cross-platform (windows, linux, mac)
|
* php; cross-platform (windows, linux, mac)
|
||||||
* 🔵 *it has upload segmenting and acceleration*
|
* 🔵 *it has upload segmenting and acceleration*
|
||||||
* ⚠️ but uploads are still not integrity-checked
|
* ⚠️ but uploads are still not integrity-checked
|
||||||
|
* ⚠️ on copyparty, uploads are 40x faster
|
||||||
|
* compared to the official filegator docker example which might be bad
|
||||||
* ⚠️ http only; no webdav / ftp / zeroconf
|
* ⚠️ http only; no webdav / ftp / zeroconf
|
||||||
* ⚠️ does not support symlinks
|
* ⚠️ does not support symlinks
|
||||||
* ⚠️ expensive download-as-zip feature
|
* ⚠️ expensive download-as-zip feature
|
||||||
@@ -549,6 +610,7 @@ symbol legend,
|
|||||||
* go; cross-platform (windows, linux, mac)
|
* go; cross-platform (windows, linux, mac)
|
||||||
* ⚠️ http uploads not resumable / accelerated / integrity-checked
|
* ⚠️ http uploads not resumable / accelerated / integrity-checked
|
||||||
* ⚠️ on cloudflare: max upload size 100 MiB
|
* ⚠️ on cloudflare: max upload size 100 MiB
|
||||||
|
* ⚠️ across the atlantic, copyparty is 2.5x faster
|
||||||
* 🔵 sftp uploads are resumable
|
* 🔵 sftp uploads are resumable
|
||||||
* ⚠️ web UI is very minimal + a bit slow
|
* ⚠️ web UI is very minimal + a bit slow
|
||||||
* ⚠️ no thumbnails / image viewer / audio player
|
* ⚠️ no thumbnails / image viewer / audio player
|
||||||
@@ -556,7 +618,9 @@ symbol legend,
|
|||||||
* ⚠️ no filesystem indexing / search
|
* ⚠️ no filesystem indexing / search
|
||||||
* ⚠️ doesn't run on phones, tablets
|
* ⚠️ doesn't run on phones, tablets
|
||||||
* ⚠️ no zeroconf (mdns/ssdp)
|
* ⚠️ no zeroconf (mdns/ssdp)
|
||||||
|
* ⚠️ impractical directory URLs
|
||||||
* ⚠️ AGPL licensed
|
* ⚠️ AGPL licensed
|
||||||
|
* 🔵 uploading small files is fast; `340` files per sec (copyparty does `670`/sec)
|
||||||
* 🔵 ftp, ftps, webdav
|
* 🔵 ftp, ftps, webdav
|
||||||
* ✅ sftp server
|
* ✅ sftp server
|
||||||
* ✅ settings gui
|
* ✅ settings gui
|
||||||
@@ -569,6 +633,26 @@ symbol legend,
|
|||||||
* ✅ on-download event hook (otherwise same as copyparty)
|
* ✅ on-download event hook (otherwise same as copyparty)
|
||||||
* ✅ more extensive permissions control
|
* ✅ more extensive permissions control
|
||||||
|
|
||||||
|
## [arozos](https://github.com/tobychui/arozos)
|
||||||
|
* big suite of applications similar to [kodbox](#kodbox), copyparty is better at downloading/uploading/music/indexing but arozos has other advantages
|
||||||
|
* go; primarily linux (limited support for windows)
|
||||||
|
* ⚠️ needs root
|
||||||
|
* ⚠️ uploads not resumable / integrity-checked
|
||||||
|
* ⚠️ uploading small files to copyparty is 2.7x faster
|
||||||
|
* ⚠️ uploading large files to copyparty is at least 10% faster
|
||||||
|
* arozos is websocket-based, 512 KiB chunks; writes each chunk to separate files and then merges
|
||||||
|
* copyparty splices directly into the final file; faster and better for the HDD and filesystem
|
||||||
|
* ⚠️ across the atlantic, uploading to copyparty is 6x faster
|
||||||
|
* ⚠️ no directory tree navpane; not as easy to navigate
|
||||||
|
* ⚠️ download-as-zip is not streaming; creates a temp.file on the server
|
||||||
|
* ⚠️ not self-contained (pulls from jsdelivr)
|
||||||
|
* ⚠️ has an audio player, but supports less filetypes
|
||||||
|
* ⚠️ limited support for configuring real-ip detection
|
||||||
|
* ✅ sftp server
|
||||||
|
* ✅ settings gui
|
||||||
|
* ✅ good-looking gui
|
||||||
|
* ✅ an IDE, msoffice viewer, rich host integration, much more
|
||||||
|
|
||||||
## [updog](https://github.com/sc0tfree/updog)
|
## [updog](https://github.com/sc0tfree/updog)
|
||||||
* python; cross-platform
|
* python; cross-platform
|
||||||
* basic directory listing with upload feature
|
* basic directory listing with upload feature
|
||||||
@@ -651,7 +735,31 @@ symbol legend,
|
|||||||
* 🔵 upload, tar/zip download, qr-code
|
* 🔵 upload, tar/zip download, qr-code
|
||||||
* ✅ faster at loading huge folders
|
* ✅ faster at loading huge folders
|
||||||
|
|
||||||
|
## [pingvin-share](https://github.com/stonith404/pingvin-share)
|
||||||
|
* node; linux (docker)
|
||||||
|
* mainly for uploads, not a general file server
|
||||||
|
* 🔵 uploads are segmented (avoids cloudflare size limit)
|
||||||
|
* 🔵 segments are written directly to target file (HDD-friendly)
|
||||||
|
* ⚠️ uploads not resumable after a browser or laptop crash
|
||||||
|
* ⚠️ uploads are not accelerated / integrity-checked
|
||||||
|
* ⚠️ across the atlantic, copyparty is 3x faster
|
||||||
|
* measured with chunksize 96 MiB; pingvin's default 10 MiB is much slower
|
||||||
|
* ⚠️ can't upload folders with subfolders
|
||||||
|
* ⚠️ no upload ETA
|
||||||
|
* 🔵 expiration times, shares, upload-undo
|
||||||
|
* ✅ config + user-registration gui
|
||||||
|
* ✅ built-in OpenID and LDAP support
|
||||||
|
* 💾 [IdP middleware](https://github.com/9001/copyparty#identity-providers) and config-files
|
||||||
|
* ✅ probably more than one person who understands the code
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# briefly considered
|
# briefly considered
|
||||||
* [pydio](https://github.com/pydio/cells): python/agpl3, looks great, fantastic ux -- but needs mariadb, systemwide install
|
* [pydio](https://github.com/pydio/cells): python/agpl3, looks great, fantastic ux -- but needs mariadb, systemwide install
|
||||||
* [gossa](https://github.com/pldubouilh/gossa): go/mit, minimalistic, basic file upload, text editor, mkdir and rename (no delete/move)
|
* [gossa](https://github.com/pldubouilh/gossa): go/mit, minimalistic, basic file upload, text editor, mkdir and rename (no delete/move)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# notes
|
||||||
|
|
||||||
|
* high-latency connections (cross-atlantic uploads) can be accurately simulated with `tc qdisc add dev eth0 root netem delay 100ms`
|
||||||
|
|||||||
@@ -1,48 +0,0 @@
|
|||||||
# builds win7-i386 exe on win10-ltsc-1809(17763.316)
|
|
||||||
# see docs/pyoxidizer.txt
|
|
||||||
|
|
||||||
def make_exe():
|
|
||||||
dist = default_python_distribution(flavor="standalone_static", python_version="3.8")
|
|
||||||
policy = dist.make_python_packaging_policy()
|
|
||||||
policy.allow_files = True
|
|
||||||
policy.allow_in_memory_shared_library_loading = True
|
|
||||||
#policy.bytecode_optimize_level_zero = True
|
|
||||||
#policy.include_distribution_sources = False # error instantiating embedded Python interpreter: during initializing Python main: init_fs_encoding: failed to get the Python codec of the filesystem encoding
|
|
||||||
policy.include_distribution_resources = False
|
|
||||||
policy.include_non_distribution_sources = False
|
|
||||||
policy.include_test = False
|
|
||||||
python_config = dist.make_python_interpreter_config()
|
|
||||||
#python_config.module_search_paths = ["$ORIGIN/lib"]
|
|
||||||
|
|
||||||
python_config.run_module = "copyparty"
|
|
||||||
exe = dist.to_python_executable(
|
|
||||||
name="copyparty",
|
|
||||||
config=python_config,
|
|
||||||
packaging_policy=policy,
|
|
||||||
)
|
|
||||||
exe.windows_runtime_dlls_mode = "never"
|
|
||||||
exe.windows_subsystem = "console"
|
|
||||||
exe.add_python_resources(exe.read_package_root(
|
|
||||||
path="sfx",
|
|
||||||
packages=[
|
|
||||||
"copyparty",
|
|
||||||
"jinja2",
|
|
||||||
"markupsafe",
|
|
||||||
"pyftpdlib",
|
|
||||||
"python-magic",
|
|
||||||
]
|
|
||||||
))
|
|
||||||
return exe
|
|
||||||
|
|
||||||
def make_embedded_resources(exe):
|
|
||||||
return exe.to_embedded_resources()
|
|
||||||
|
|
||||||
def make_install(exe):
|
|
||||||
files = FileManifest()
|
|
||||||
files.add_python_resource("copyparty", exe)
|
|
||||||
return files
|
|
||||||
|
|
||||||
register_target("exe", make_exe)
|
|
||||||
register_target("resources", make_embedded_resources, depends=["exe"], default_build_script=True)
|
|
||||||
register_target("install", make_install, depends=["exe"], default=True)
|
|
||||||
resolve_targets()
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user