Compare commits
413 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e9d962f273 | ||
|
|
b5405174ec | ||
|
|
6eee601521 | ||
|
|
2fac2bee7c | ||
|
|
c140eeee6b | ||
|
|
c5988a04f9 | ||
|
|
a2e0f98693 | ||
|
|
1111153f06 | ||
|
|
e5a836cb7d | ||
|
|
b0de84cbc5 | ||
|
|
cbb718e10d | ||
|
|
b5ad9369fe | ||
|
|
4401de0413 | ||
|
|
6e671c5245 | ||
|
|
08848be784 | ||
|
|
b599fbae97 | ||
|
|
a8dabc99f6 | ||
|
|
f1130db131 | ||
|
|
735ec35546 | ||
|
|
5a009a2a64 | ||
|
|
d9e9526247 | ||
|
|
5a8c3b8be0 | ||
|
|
1c9c17fb9b | ||
|
|
7f82449179 | ||
|
|
e455ec994e | ||
|
|
c111027420 | ||
|
|
abcdf479e6 | ||
|
|
ad2371f810 | ||
|
|
c4e2b0f95f | ||
|
|
3da62ec234 | ||
|
|
01233991f3 | ||
|
|
ee35974273 | ||
|
|
7037e7365e | ||
|
|
03b13e8a1c | ||
|
|
cdd2da0208 | ||
|
|
cec0e0cf02 | ||
|
|
8122ddedfe | ||
|
|
55a77c5e89 | ||
|
|
461f31582d | ||
|
|
f356faa278 | ||
|
|
9f034d9c4c | ||
|
|
ba52590ae4 | ||
|
|
92edea1de5 | ||
|
|
7ff46966da | ||
|
|
fca70b3508 | ||
|
|
70009cd984 | ||
|
|
8d8b88c4fd | ||
|
|
c4b0cccefd | ||
|
|
7c2beba555 | ||
|
|
7d8d94388b | ||
|
|
0b46b1a614 | ||
|
|
5153db6bff | ||
|
|
b0af4b3712 | ||
|
|
c8f4aeaefa | ||
|
|
00da74400c | ||
|
|
83fb569d61 | ||
|
|
5a62cb4869 | ||
|
|
687df2fabd | ||
|
|
cdd0794d6e | ||
|
|
dcc988135e | ||
|
|
3db117d85f | ||
|
|
ee9aad82dd | ||
|
|
2d6eb63fce | ||
|
|
ca001c8504 | ||
|
|
4e581c59da | ||
|
|
dbd42bc6bf | ||
|
|
c862ec1b64 | ||
|
|
f709140571 | ||
|
|
ef1c4b7a20 | ||
|
|
6c94a63f1c | ||
|
|
20669c73d3 | ||
|
|
0da719f4c2 | ||
|
|
373194c38a | ||
|
|
3d245431fc | ||
|
|
250c8c56f0 | ||
|
|
e136231c8e | ||
|
|
98ffaadf52 | ||
|
|
ebb1981803 | ||
|
|
72361c99e1 | ||
|
|
d5c9c8ebbd | ||
|
|
746229846d | ||
|
|
ffd7cd3ca8 | ||
|
|
b3cecabca3 | ||
|
|
662541c64c | ||
|
|
225bd80ea8 | ||
|
|
85e54980cc | ||
|
|
a19a0fa9f3 | ||
|
|
9bb6e0dc62 | ||
|
|
15ddcf53e7 | ||
|
|
6b54972ec0 | ||
|
|
0219eada23 | ||
|
|
8916bce306 | ||
|
|
99edba4fd9 | ||
|
|
64de3e01e8 | ||
|
|
8222ccc40b | ||
|
|
dc449bf8b0 | ||
|
|
ef0ecf878b | ||
|
|
53f1e3c91d | ||
|
|
eeef80919f | ||
|
|
987bce2182 | ||
|
|
b511d686f0 | ||
|
|
132a83501e | ||
|
|
e565ad5f55 | ||
|
|
f955d2bd58 | ||
|
|
5953399090 | ||
|
|
d26a944d95 | ||
|
|
50dac15568 | ||
|
|
ac1e11e4ce | ||
|
|
d749683d48 | ||
|
|
84e8e1ddfb | ||
|
|
6e58514b84 | ||
|
|
803e156509 | ||
|
|
c06aa683eb | ||
|
|
6644ceef49 | ||
|
|
bd3b3863ae | ||
|
|
ffd4f9c8b9 | ||
|
|
760ff2db72 | ||
|
|
f37187a041 | ||
|
|
1cdb170290 | ||
|
|
d5de3f2fe0 | ||
|
|
d76673e62d | ||
|
|
c549f367c1 | ||
|
|
927c3bce96 | ||
|
|
d75a2c77da | ||
|
|
e6c55d7ff9 | ||
|
|
4c2cb26991 | ||
|
|
dfe7f1d9af | ||
|
|
666297f6fb | ||
|
|
55a011b9c1 | ||
|
|
27aff12a1e | ||
|
|
9a87ee2fe4 | ||
|
|
0a9f4c6074 | ||
|
|
7219331057 | ||
|
|
2fd12a839c | ||
|
|
8c73e0cbc2 | ||
|
|
52e06226a2 | ||
|
|
452592519d | ||
|
|
c9281f8912 | ||
|
|
36d6d29a0c | ||
|
|
db6059e100 | ||
|
|
aab57cb24b | ||
|
|
f00b939402 | ||
|
|
bef9617638 | ||
|
|
692175f5b0 | ||
|
|
5ad65450c4 | ||
|
|
60c96f990a | ||
|
|
07b2bf1104 | ||
|
|
ac1bc232a9 | ||
|
|
5919607ad0 | ||
|
|
07ea629ca5 | ||
|
|
b629d18df6 | ||
|
|
566cbb6507 | ||
|
|
400d700845 | ||
|
|
82ce6862ee | ||
|
|
38e4fdfe03 | ||
|
|
c04662798d | ||
|
|
19d156ff4e | ||
|
|
87c60a1ec9 | ||
|
|
2c92dab165 | ||
|
|
5c1e23907d | ||
|
|
925c7f0a57 | ||
|
|
feed08deb2 | ||
|
|
560d7b6672 | ||
|
|
565daee98b | ||
|
|
e396c5c2b5 | ||
|
|
1ee2cdd089 | ||
|
|
beacedab50 | ||
|
|
25139a4358 | ||
|
|
f8491970fd | ||
|
|
da091aec85 | ||
|
|
e9eb5affcd | ||
|
|
c1918bc36c | ||
|
|
fdda567f50 | ||
|
|
603d0ed72b | ||
|
|
b15a4ef79f | ||
|
|
48a6789d36 | ||
|
|
36f2c446af | ||
|
|
69517e4624 | ||
|
|
ea270ab9f2 | ||
|
|
b6cf2d3089 | ||
|
|
e8db3dd37f | ||
|
|
27485a4cb1 | ||
|
|
253a414443 | ||
|
|
f6e693f0f5 | ||
|
|
c5f7cfc355 | ||
|
|
bc2c1e427a | ||
|
|
95d9e693c6 | ||
|
|
70a3cf36d1 | ||
|
|
aa45fccf11 | ||
|
|
42d00050c1 | ||
|
|
4bb0e6e75a | ||
|
|
2f7f9de3f5 | ||
|
|
f31ac90932 | ||
|
|
439cb7f85b | ||
|
|
af193ee834 | ||
|
|
c06126cc9d | ||
|
|
897ffbbbd0 | ||
|
|
8244d3b4fc | ||
|
|
74266af6d1 | ||
|
|
8c552f1ad1 | ||
|
|
bf5850785f | ||
|
|
feecb3e0b8 | ||
|
|
08d8c82167 | ||
|
|
5239e7ac0c | ||
|
|
9937c2e755 | ||
|
|
f1e947f37d | ||
|
|
a70a49b9c9 | ||
|
|
fe700dcf1a | ||
|
|
c8e3ed3aae | ||
|
|
b8733653a3 | ||
|
|
b772a4f8bb | ||
|
|
9e5253ef87 | ||
|
|
7b94e4edf3 | ||
|
|
da26ec36ca | ||
|
|
443acf2f8b | ||
|
|
6c90e3893d | ||
|
|
ea002ee71d | ||
|
|
ab18893cd2 | ||
|
|
844d16b9e5 | ||
|
|
989cc613ef | ||
|
|
4f0cad5468 | ||
|
|
f89de6b35d | ||
|
|
e0bcb88ee7 | ||
|
|
a0022805d1 | ||
|
|
853adb5d04 | ||
|
|
7744226b5c | ||
|
|
d94b5b3fc9 | ||
|
|
e6ba065bc2 | ||
|
|
59a53ba9ac | ||
|
|
b88cc7b5ce | ||
|
|
5ab54763c6 | ||
|
|
59f815ff8c | ||
|
|
9c42cbec6f | ||
|
|
f471b05aa4 | ||
|
|
34c32e3e89 | ||
|
|
a080759a03 | ||
|
|
0ae12868e5 | ||
|
|
ef52e2c06c | ||
|
|
32c912bb16 | ||
|
|
20870fda79 | ||
|
|
bdfe2c1a5f | ||
|
|
cb99fbf442 | ||
|
|
bccc44dc21 | ||
|
|
2f20d29edd | ||
|
|
c6acd3a904 | ||
|
|
2b24c50eb7 | ||
|
|
d30ae8453d | ||
|
|
8e5c436bef | ||
|
|
f500e55e68 | ||
|
|
9700a12366 | ||
|
|
2b6a34dc5c | ||
|
|
ee80cdb9cf | ||
|
|
2def4cd248 | ||
|
|
0287c7baa5 | ||
|
|
51d31588e6 | ||
|
|
32553e4520 | ||
|
|
211a30da38 | ||
|
|
bdbcbbb002 | ||
|
|
e78af02241 | ||
|
|
115020ba60 | ||
|
|
66abf17bae | ||
|
|
b377791be7 | ||
|
|
78919e65d6 | ||
|
|
84b52ea8c5 | ||
|
|
fd89f7ecb9 | ||
|
|
2ebfdc2562 | ||
|
|
dbf1cbc8af | ||
|
|
a259704596 | ||
|
|
04b55f1a1d | ||
|
|
206af8f151 | ||
|
|
645bb5c990 | ||
|
|
f8966222e4 | ||
|
|
d71f844b43 | ||
|
|
e8b7f65f82 | ||
|
|
f193f398c1 | ||
|
|
b6554a7f8c | ||
|
|
3f05b6655c | ||
|
|
51a83b04a0 | ||
|
|
0c03921965 | ||
|
|
2527e90325 | ||
|
|
7f08f10c37 | ||
|
|
1c011ff0bb | ||
|
|
a1ad608267 | ||
|
|
547a486387 | ||
|
|
7741870dc7 | ||
|
|
8785d2f9fe | ||
|
|
d744f3ff8f | ||
|
|
8ca996e2f7 | ||
|
|
096de50889 | ||
|
|
bec3fee9ee | ||
|
|
8413ed6d1f | ||
|
|
055302b5be | ||
|
|
8016e6711b | ||
|
|
c8ea4066b1 | ||
|
|
6cc7101d31 | ||
|
|
263adec70a | ||
|
|
ac96fd9c96 | ||
|
|
e5582605cd | ||
|
|
1b52ef1f8a | ||
|
|
503face974 | ||
|
|
13e77777d7 | ||
|
|
89c6c2e0d9 | ||
|
|
14af136fcd | ||
|
|
d39a99c929 | ||
|
|
43ee6b9f5b | ||
|
|
8a38101e48 | ||
|
|
5026b21226 | ||
|
|
d07859e8e6 | ||
|
|
df7219d3b6 | ||
|
|
ad9be54f55 | ||
|
|
eeecc50757 | ||
|
|
8ff7094e4d | ||
|
|
58ae38c613 | ||
|
|
7f1c992601 | ||
|
|
fbfdd8338b | ||
|
|
bbc379906a | ||
|
|
33f41f3e61 | ||
|
|
655f6d00f8 | ||
|
|
fd552842d4 | ||
|
|
6bd087ddc5 | ||
|
|
0504b010a1 | ||
|
|
39cc92d4bc | ||
|
|
a0da0122b9 | ||
|
|
879e83e24f | ||
|
|
64ad585318 | ||
|
|
f262aee800 | ||
|
|
d4da386172 | ||
|
|
5d92f4df49 | ||
|
|
6f8a588c4d | ||
|
|
7c8e368721 | ||
|
|
f7a43a8e46 | ||
|
|
02879713a2 | ||
|
|
acbb8267e1 | ||
|
|
8796c09f56 | ||
|
|
d636316a19 | ||
|
|
a96d9ac6cb | ||
|
|
643e222986 | ||
|
|
ed524d84bb | ||
|
|
f0cdd9f25d | ||
|
|
4e797a7156 | ||
|
|
136c0fdc2b | ||
|
|
35165f8472 | ||
|
|
cab999978e | ||
|
|
fabeebd96b | ||
|
|
b1cf588452 | ||
|
|
c354a38b4c | ||
|
|
a17c267d87 | ||
|
|
c1180d6f9c | ||
|
|
d3db6d296f | ||
|
|
caf7e93f5e | ||
|
|
eefa0518db | ||
|
|
945170e271 | ||
|
|
6c2c6090dc | ||
|
|
b2e233403d | ||
|
|
e397ec2e48 | ||
|
|
fade751a3e | ||
|
|
0f386c4b08 | ||
|
|
14bccbe45f | ||
|
|
55eb692134 | ||
|
|
b32d65207b | ||
|
|
64cac003d8 | ||
|
|
6dbfcddcda | ||
|
|
b4e0a34193 | ||
|
|
01c82b54a7 | ||
|
|
4ef3106009 | ||
|
|
aa3a971961 | ||
|
|
b9d0c8536b | ||
|
|
3313503ea5 | ||
|
|
d999d3a921 | ||
|
|
e7d00bae39 | ||
|
|
650e41c717 | ||
|
|
140f6e0389 | ||
|
|
5e111ba5ee | ||
|
|
95a599961e | ||
|
|
a55e0d6eb8 | ||
|
|
2fd2c6b948 | ||
|
|
7a936ea01e | ||
|
|
226c7c3045 | ||
|
|
a4239a466b | ||
|
|
d0eb014c38 | ||
|
|
e01ba8552a | ||
|
|
024303592a | ||
|
|
86419b8f47 | ||
|
|
f1358dbaba | ||
|
|
e8a653ca0c | ||
|
|
9bc09ce949 | ||
|
|
dc8e621d7c | ||
|
|
dee0950f74 | ||
|
|
143f72fe36 | ||
|
|
a7889fb6a2 | ||
|
|
987caec15d | ||
|
|
ab40ff5051 | ||
|
|
bed133d3dd | ||
|
|
829c8fca96 | ||
|
|
5b26ab0096 | ||
|
|
39554b4bc3 | ||
|
|
97d9c149f1 | ||
|
|
59688bc8d7 | ||
|
|
a18f63895f | ||
|
|
27433d6214 | ||
|
|
374c535cfa | ||
|
|
ac7815a0ae | ||
|
|
10bc2d9205 | ||
|
|
0c50ea1757 | ||
|
|
c057c5e8e8 | ||
|
|
46d667716e | ||
|
|
cba2e10d29 | ||
|
|
b1693f95cb | ||
|
|
3f00073256 | ||
|
|
d15000062d | ||
|
|
6cb3b35a54 | ||
|
|
b4031e8d43 | ||
|
|
a3ca0638cb |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -12,6 +12,7 @@ copyparty.egg-info/
|
||||
/dist/
|
||||
/py2/
|
||||
/sfx*
|
||||
/pyz/
|
||||
/unt/
|
||||
/log/
|
||||
|
||||
|
||||
3
.vscode/launch.json
vendored
3
.vscode/launch.json
vendored
@@ -19,8 +19,7 @@
|
||||
"-emp",
|
||||
"-e2dsa",
|
||||
"-e2ts",
|
||||
"-mtp",
|
||||
".bpm=f,bin/mtag/audio-bpm.py",
|
||||
"-mtp=.bpm=f,bin/mtag/audio-bpm.py",
|
||||
"-aed:wark",
|
||||
"-vsrv::r:rw,ed:c,dupe",
|
||||
"-vdist:dist:r"
|
||||
|
||||
24
.vscode/settings.json
vendored
24
.vscode/settings.json
vendored
@@ -22,6 +22,9 @@
|
||||
"terminal.ansiBrightCyan": "#9cf0ed",
|
||||
"terminal.ansiBrightWhite": "#ffffff",
|
||||
},
|
||||
"python.terminal.activateEnvironment": false,
|
||||
"python.analysis.enablePytestSupport": false,
|
||||
"python.analysis.typeCheckingMode": "standard",
|
||||
"python.testing.pytestEnabled": false,
|
||||
"python.testing.unittestEnabled": true,
|
||||
"python.testing.unittestArgs": [
|
||||
@@ -31,23 +34,8 @@
|
||||
"-p",
|
||||
"test_*.py"
|
||||
],
|
||||
"python.linting.pylintEnabled": true,
|
||||
"python.linting.flake8Enabled": true,
|
||||
"python.linting.banditEnabled": true,
|
||||
"python.linting.mypyEnabled": true,
|
||||
"python.linting.flake8Args": [
|
||||
"--max-line-length=120",
|
||||
"--ignore=E722,F405,E203,W503,W293,E402,E501,E128,E226",
|
||||
],
|
||||
"python.linting.banditArgs": [
|
||||
"--ignore=B104,B110,B112"
|
||||
],
|
||||
// python3 -m isort --py=27 --profile=black copyparty/
|
||||
"python.formatting.provider": "none",
|
||||
"[python]": {
|
||||
"editor.defaultFormatter": "ms-python.black-formatter"
|
||||
},
|
||||
"editor.formatOnSave": true,
|
||||
// python3 -m isort --py=27 --profile=black ~/dev/copyparty/{copyparty,tests}/*.py && python3 -m black -t py27 ~/dev/copyparty/{copyparty,tests,bin}/*.py $(find ~/dev/copyparty/copyparty/stolen -iname '*.py')
|
||||
"editor.formatOnSave": false,
|
||||
"[html]": {
|
||||
"editor.formatOnSave": false,
|
||||
"editor.autoIndent": "keep",
|
||||
@@ -58,6 +46,4 @@
|
||||
"files.associations": {
|
||||
"*.makefile": "makefile"
|
||||
},
|
||||
"python.linting.enabled": true,
|
||||
"python.pythonPath": "/usr/bin/python3"
|
||||
}
|
||||
2
LICENSE
2
LICENSE
@@ -1,6 +1,6 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2019 ed
|
||||
Copyright (c) 2019 ed <oss@ocv.me>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
||||
@@ -2,7 +2,7 @@ standalone programs which are executed by copyparty when an event happens (uploa
|
||||
|
||||
these programs either take zero arguments, or a filepath (the affected file), or a json message with filepath + additional info
|
||||
|
||||
run copyparty with `--help-hooks` for usage details / hook type explanations (xbu/xau/xiu/xbr/xar/xbd/xad)
|
||||
run copyparty with `--help-hooks` for usage details / hook type explanations (xm/xbu/xau/xiu/xbr/xar/xbd/xad/xban)
|
||||
|
||||
> **note:** in addition to event hooks (the stuff described here), copyparty has another api to run your programs/scripts while providing way more information such as audio tags / video codecs / etc and optionally daisychaining data between scripts in a processing pipeline; if that's what you want then see [mtp plugins](../mtag/) instead
|
||||
|
||||
@@ -13,6 +13,7 @@ run copyparty with `--help-hooks` for usage details / hook type explanations (xb
|
||||
* [image-noexif.py](image-noexif.py) removes image exif by overwriting / directly editing the uploaded file
|
||||
* [discord-announce.py](discord-announce.py) announces new uploads on discord using webhooks ([example](https://user-images.githubusercontent.com/241032/215304439-1c1cb3c8-ec6f-4c17-9f27-81f969b1811a.png))
|
||||
* [reject-mimetype.py](reject-mimetype.py) rejects uploads unless the mimetype is acceptable
|
||||
* [into-the-cache-it-goes.py](into-the-cache-it-goes.py) avoids bugs in caching proxies by immediately downloading each file that is uploaded
|
||||
|
||||
|
||||
# upload batches
|
||||
@@ -23,7 +24,10 @@ these are `--xiu` hooks; unlike `xbu` and `xau` (which get executed on every sin
|
||||
|
||||
# before upload
|
||||
* [reject-extension.py](reject-extension.py) rejects uploads if they match a list of file extensions
|
||||
* [reloc-by-ext.py](reloc-by-ext.py) redirects an upload to another destination based on the file extension
|
||||
|
||||
|
||||
# on message
|
||||
* [wget.py](wget.py) lets you download files by POSTing URLs to copyparty
|
||||
* [qbittorrent-magnet.py](qbittorrent-magnet.py) starts downloading a torrent if you post a magnet url
|
||||
* [msg-log.py](msg-log.py) is a guestbook; logs messages to a doc in the same folder
|
||||
|
||||
@@ -12,19 +12,28 @@ announces a new upload on discord
|
||||
example usage as global config:
|
||||
--xau f,t5,j,bin/hooks/discord-announce.py
|
||||
|
||||
parameters explained,
|
||||
xau = execute after upload
|
||||
f = fork; don't delay other hooks while this is running
|
||||
t5 = timeout if it's still running after 5 sec
|
||||
j = this hook needs upload information as json (not just the filename)
|
||||
|
||||
example usage as a volflag (per-volume config):
|
||||
-v srv/inc:inc:r:rw,ed:c,xau=f,t5,j,bin/hooks/discord-announce.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
(share filesystem-path srv/inc as volume /inc,
|
||||
readable by everyone, read-write for user 'ed',
|
||||
running this plugin on all uploads with the params listed below)
|
||||
running this plugin on all uploads with the params explained above)
|
||||
|
||||
parameters explained,
|
||||
xbu = execute after upload
|
||||
f = fork; don't wait for it to finish
|
||||
t5 = timeout if it's still running after 5 sec
|
||||
j = provide upload information as json; not just the filename
|
||||
example usage as a volflag in a copyparty config file:
|
||||
[/inc]
|
||||
srv/inc
|
||||
accs:
|
||||
r: *
|
||||
rw: ed
|
||||
flags:
|
||||
xau: f,t5,j,bin/hooks/discord-announce.py
|
||||
|
||||
replace "xau" with "xbu" to announce Before upload starts instead of After completion
|
||||
|
||||
|
||||
140
bin/hooks/into-the-cache-it-goes.py
Normal file
140
bin/hooks/into-the-cache-it-goes.py
Normal file
@@ -0,0 +1,140 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
import json
|
||||
import shutil
|
||||
import platform
|
||||
import subprocess as sp
|
||||
from urllib.parse import quote
|
||||
|
||||
|
||||
_ = r"""
|
||||
try to avoid race conditions in caching proxies
|
||||
(primarily cloudflare, but probably others too)
|
||||
by means of the most obvious solution possible:
|
||||
|
||||
just as each file has finished uploading, use
|
||||
the server's external URL to download the file
|
||||
so that it ends up in the cache, warm and snug
|
||||
|
||||
this intentionally delays the upload response
|
||||
as it waits for the file to finish downloading
|
||||
before copyparty is allowed to return the URL
|
||||
|
||||
NOTE: you must edit this script before use,
|
||||
replacing https://example.com with your URL
|
||||
|
||||
NOTE: if the files are only accessible with a
|
||||
password and/or filekey, you must also add
|
||||
a cromulent password in the PASSWORD field
|
||||
|
||||
NOTE: needs either wget, curl, or "requests":
|
||||
python3 -m pip install --user -U requests
|
||||
|
||||
|
||||
example usage as global config:
|
||||
--xau j,t10,bin/hooks/into-the-cache-it-goes.py
|
||||
|
||||
parameters explained,
|
||||
xau = execute after upload
|
||||
j = this hook needs upload information as json (not just the filename)
|
||||
t10 = abort download and continue if it takes longer than 10sec
|
||||
|
||||
example usage as a volflag (per-volume config):
|
||||
-v srv/inc:inc:r:rw,ed:c,xau=j,t10,bin/hooks/into-the-cache-it-goes.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
(share filesystem-path srv/inc as volume /inc,
|
||||
readable by everyone, read-write for user 'ed',
|
||||
running this plugin on all uploads with params explained above)
|
||||
|
||||
example usage as a volflag in a copyparty config file:
|
||||
[/inc]
|
||||
srv/inc
|
||||
accs:
|
||||
r: *
|
||||
rw: ed
|
||||
flags:
|
||||
xau: j,t10,bin/hooks/into-the-cache-it-goes.py
|
||||
"""
|
||||
|
||||
|
||||
# replace this with your site's external URL
|
||||
# (including the :portnumber if necessary)
|
||||
SITE_URL = "https://example.com"
|
||||
|
||||
# if downloading is protected by passwords or filekeys,
|
||||
# specify a valid password between the quotes below:
|
||||
PASSWORD = ""
|
||||
|
||||
# if file is larger than this, skip download
|
||||
MAX_MEGABYTES = 8
|
||||
|
||||
# =============== END OF CONFIG ===============
|
||||
|
||||
|
||||
WINDOWS = platform.system() == "Windows"
|
||||
|
||||
|
||||
def main():
|
||||
fun = download_with_python
|
||||
if shutil.which("curl"):
|
||||
fun = download_with_curl
|
||||
elif shutil.which("wget"):
|
||||
fun = download_with_wget
|
||||
|
||||
inf = json.loads(sys.argv[1])
|
||||
|
||||
if inf["sz"] > 1024 * 1024 * MAX_MEGABYTES:
|
||||
print("[into-the-cache] file is too large; will not download")
|
||||
return
|
||||
|
||||
file_url = "/"
|
||||
if inf["vp"]:
|
||||
file_url += inf["vp"] + "/"
|
||||
file_url += inf["ap"].replace("\\", "/").split("/")[-1]
|
||||
file_url = SITE_URL.rstrip("/") + quote(file_url, safe=b"/")
|
||||
|
||||
print("[into-the-cache] %s(%s)" % (fun.__name__, file_url))
|
||||
fun(file_url, PASSWORD.strip())
|
||||
|
||||
print("[into-the-cache] Download OK")
|
||||
|
||||
|
||||
def download_with_curl(url, pw):
|
||||
cmd = ["curl"]
|
||||
|
||||
if pw:
|
||||
cmd += ["-HPW:%s" % (pw,)]
|
||||
|
||||
nah = sp.DEVNULL
|
||||
sp.check_call(cmd + [url], stdout=nah, stderr=nah)
|
||||
|
||||
|
||||
def download_with_wget(url, pw):
|
||||
cmd = ["wget", "-O"]
|
||||
|
||||
cmd += ["nul" if WINDOWS else "/dev/null"]
|
||||
|
||||
if pw:
|
||||
cmd += ["--header=PW:%s" % (pw,)]
|
||||
|
||||
nah = sp.DEVNULL
|
||||
sp.check_call(cmd + [url], stdout=nah, stderr=nah)
|
||||
|
||||
|
||||
def download_with_python(url, pw):
|
||||
import requests
|
||||
|
||||
headers = {}
|
||||
if pw:
|
||||
headers["PW"] = pw
|
||||
|
||||
with requests.get(url, headers=headers, stream=True) as r:
|
||||
r.raise_for_status()
|
||||
for _ in r.iter_content(chunk_size=1024 * 256):
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -14,19 +14,32 @@ except:
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
"""
|
||||
_ = r"""
|
||||
use copyparty as a dumb messaging server / guestbook thing;
|
||||
accepts guestbook entries from 📟 (message-to-server-log) in the web-ui
|
||||
initially contributed by @clach04 in https://github.com/9001/copyparty/issues/35 (thanks!)
|
||||
|
||||
Sample usage:
|
||||
|
||||
example usage as global config:
|
||||
python copyparty-sfx.py --xm j,bin/hooks/msg-log.py
|
||||
|
||||
Where:
|
||||
parameters explained,
|
||||
xm = execute on message (📟)
|
||||
j = this hook needs message information as json (not just the message-text)
|
||||
|
||||
xm = execute on message-to-server-log
|
||||
j = provide message information as json; not just the text - this script REQUIRES json
|
||||
t10 = timeout and kill download after 10 secs
|
||||
example usage as a volflag (per-volume config):
|
||||
python copyparty-sfx.py -v srv/log:log:r:c,xm=j,bin/hooks/msg-log.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
(share filesystem-path srv/log as volume /log, readable by everyone,
|
||||
running this plugin on all messages with the params explained above)
|
||||
|
||||
example usage as a volflag in a copyparty config file:
|
||||
[/log]
|
||||
srv/log
|
||||
accs:
|
||||
r: *
|
||||
flags:
|
||||
xm: j,bin/hooks/msg-log.py
|
||||
"""
|
||||
|
||||
|
||||
|
||||
128
bin/hooks/qbittorrent-magnet.py
Executable file
128
bin/hooks/qbittorrent-magnet.py
Executable file
@@ -0,0 +1,128 @@
|
||||
#!/usr/bin/env python3
|
||||
# coding: utf-8
|
||||
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import shutil
|
||||
import subprocess as sp
|
||||
|
||||
|
||||
_ = r"""
|
||||
start downloading a torrent by POSTing a magnet URL to copyparty,
|
||||
for example using 📟 (message-to-server-log) in the web-ui
|
||||
|
||||
by default it will download the torrent to the folder you were in
|
||||
when you pasted the magnet into the message-to-server-log field
|
||||
|
||||
you can optionally specify another location by adding a whitespace
|
||||
after the magnet URL followed by the name of the subfolder to DL into,
|
||||
or for example "anime/airing" would download to /srv/media/anime/airing
|
||||
because the keyword "anime" is in the DESTS config below
|
||||
|
||||
needs python3
|
||||
|
||||
example usage as global config (not a good idea):
|
||||
python copyparty-sfx.py --xm aw,f,j,t60,bin/hooks/qbittorrent-magnet.py
|
||||
|
||||
parameters explained,
|
||||
xm = execute on message (📟)
|
||||
aw = only users with write-access can use this
|
||||
f = fork; don't delay other hooks while this is running
|
||||
j = provide message information as json (not just the text)
|
||||
t60 = abort if qbittorrent has to think about it for more than 1 min
|
||||
|
||||
example usage as a volflag (per-volume config, much better):
|
||||
-v srv/qb:qb:A,ed:c,xm=aw,f,j,t60,bin/hooks/qbittorrent-magnet.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
(share filesystem-path srv/qb as volume /qb with Admin for user 'ed',
|
||||
running this plugin on all messages with the params explained above)
|
||||
|
||||
example usage as a volflag in a copyparty config file:
|
||||
[/qb]
|
||||
srv/qb
|
||||
accs:
|
||||
A: ed
|
||||
flags:
|
||||
xm: aw,f,j,t60,bin/hooks/qbittorrent-magnet.py
|
||||
|
||||
the volflag examples only kicks in if you send the torrent magnet
|
||||
while you're in the /qb folder (or any folder below there)
|
||||
"""
|
||||
|
||||
|
||||
# list of usernames to allow
|
||||
ALLOWLIST = [ "ed", "morpheus" ]
|
||||
|
||||
|
||||
# list of destination aliases to translate into full filesystem
|
||||
# paths; takes effect if the first folder component in the
|
||||
# custom download location matches anything in this dict
|
||||
DESTS = {
|
||||
"iso": "/srv/pub/linux-isos",
|
||||
"anime": "/srv/media/anime",
|
||||
}
|
||||
|
||||
|
||||
def main():
|
||||
inf = json.loads(sys.argv[1])
|
||||
url = inf["txt"]
|
||||
if not url.lower().startswith("magnet:?"):
|
||||
# not a magnet, abort
|
||||
return
|
||||
|
||||
if inf["user"] not in ALLOWLIST:
|
||||
print("🧲 denied for user", inf["user"])
|
||||
return
|
||||
|
||||
# might as well run the command inside the filesystem folder
|
||||
# which matches the URL that the magnet message was sent to
|
||||
os.chdir(inf["ap"])
|
||||
|
||||
# is there is a custom download location in the url?
|
||||
dst = ""
|
||||
if " " in url:
|
||||
url, dst = url.split(" ", 1)
|
||||
|
||||
# is the location in the predefined list of locations?
|
||||
parts = dst.replace("\\", "/").split("/")
|
||||
if parts[0] in DESTS:
|
||||
dst = os.path.join(DESTS[parts[0]], *(parts[1:]))
|
||||
|
||||
else:
|
||||
# nope, so download to the current folder instead;
|
||||
# comment the dst line below to instead use the default
|
||||
# download location from your qbittorrent settings
|
||||
dst = inf["ap"]
|
||||
pass
|
||||
|
||||
# archlinux has a -nox suffix for qbittorrent if headless
|
||||
# so check if we should be using that
|
||||
if shutil.which("qbittorrent-nox"):
|
||||
torrent_bin = "qbittorrent-nox"
|
||||
else:
|
||||
torrent_bin = "qbittorrent"
|
||||
|
||||
# the command to add a new torrent, adjust if necessary
|
||||
cmd = [torrent_bin, url]
|
||||
if dst:
|
||||
cmd += ["--save-path=%s" % (dst,)]
|
||||
|
||||
# if copyparty and qbittorrent are running as different users
|
||||
# you may have to do something like the following
|
||||
# (assuming qbittorrent* is nopasswd-allowed in sudoers):
|
||||
#
|
||||
# cmd = ["sudo", "-u", "qbitter"] + cmd
|
||||
|
||||
print("🧲", cmd)
|
||||
|
||||
try:
|
||||
sp.check_call(cmd)
|
||||
except:
|
||||
print("🧲 FAILED TO ADD", url)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
127
bin/hooks/reloc-by-ext.py
Normal file
127
bin/hooks/reloc-by-ext.py
Normal file
@@ -0,0 +1,127 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
|
||||
_ = r"""
|
||||
relocate/redirect incoming uploads according to file extension or name
|
||||
|
||||
example usage as global config:
|
||||
--xbu j,c1,bin/hooks/reloc-by-ext.py
|
||||
|
||||
parameters explained,
|
||||
xbu = execute before upload
|
||||
j = this hook needs upload information as json (not just the filename)
|
||||
c1 = this hook returns json on stdout, so tell copyparty to read that
|
||||
|
||||
example usage as a volflag (per-volume config):
|
||||
-v srv/inc:inc:r:rw,ed:c,xbu=j,c1,bin/hooks/reloc-by-ext.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
(share filesystem-path srv/inc as volume /inc,
|
||||
readable by everyone, read-write for user 'ed',
|
||||
running this plugin on all uploads with the params explained above)
|
||||
|
||||
example usage as a volflag in a copyparty config file:
|
||||
[/inc]
|
||||
srv/inc
|
||||
accs:
|
||||
r: *
|
||||
rw: ed
|
||||
flags:
|
||||
xbu: j,c1,bin/hooks/reloc-by-ext.py
|
||||
|
||||
note: this could also work as an xau hook (after-upload), but
|
||||
because it doesn't need to read the file contents its better
|
||||
as xbu (before-upload) since that's safer / less buggy,
|
||||
and only xbu works with up2k (dragdrop into browser)
|
||||
"""
|
||||
|
||||
|
||||
PICS = "avif bmp gif heic heif jpeg jpg jxl png psd qoi tga tif tiff webp"
|
||||
VIDS = "3gp asf avi flv mkv mov mp4 mpeg mpeg2 mpegts mpg mpg2 nut ogm ogv rm ts vob webm wmv"
|
||||
MUSIC = "aac aif aiff alac amr ape dfpwm flac m4a mp3 ogg opus ra tak tta wav wma wv"
|
||||
|
||||
|
||||
def main():
|
||||
inf = json.loads(sys.argv[1])
|
||||
vdir, fn = os.path.split(inf["vp"])
|
||||
|
||||
try:
|
||||
fn, ext = fn.rsplit(".", 1)
|
||||
except:
|
||||
# no file extension; pretend it's "bin"
|
||||
ext = "bin"
|
||||
|
||||
ext = ext.lower()
|
||||
|
||||
# this function must end by printing the action to perform;
|
||||
# that's handled by the print(json.dumps(... at the bottom
|
||||
#
|
||||
# the action can contain the following keys:
|
||||
# "vp" is the folder URL to move the upload to,
|
||||
# "ap" is the filesystem-path to move it to (but "vp" is safer),
|
||||
# "fn" overrides the final filename to use
|
||||
|
||||
##
|
||||
## some example actions to take; pick one by
|
||||
## selecting it inside the print at the end:
|
||||
##
|
||||
|
||||
# create a subfolder named after the filetype and move it into there
|
||||
into_subfolder = {"vp": ext}
|
||||
|
||||
# move it into a toplevel folder named after the filetype
|
||||
into_toplevel = {"vp": "/" + ext}
|
||||
|
||||
# move it into a filetype-named folder next to the target folder
|
||||
into_sibling = {"vp": "../" + ext}
|
||||
|
||||
# move images into "/just/pics", vids into "/just/vids",
|
||||
# music into "/just/tunes", and anything else as-is
|
||||
if ext in PICS.split():
|
||||
by_category = {"vp": "/just/pics"}
|
||||
elif ext in VIDS.split():
|
||||
by_category = {"vp": "/just/vids"}
|
||||
elif ext in MUSIC.split():
|
||||
by_category = {"vp": "/just/tunes"}
|
||||
else:
|
||||
by_category = {} # no action
|
||||
|
||||
# now choose the default effect to apply; can be any of these:
|
||||
# into_subfolder into_toplevel into_sibling by_category
|
||||
effect = {"vp": "/junk"}
|
||||
|
||||
##
|
||||
## but we can keep going, adding more speicifc rules
|
||||
## which can take precedence, replacing the fallback
|
||||
## effect we just specified:
|
||||
##
|
||||
|
||||
fn = fn.lower() # lowercase filename to make this easier
|
||||
|
||||
if "screenshot" in fn:
|
||||
effect = {"vp": "/ss"}
|
||||
if "mpv_" in fn:
|
||||
effect = {"vp": "/anishots"}
|
||||
elif "debian" in fn or "biebian" in fn:
|
||||
effect = {"vp": "/linux-ISOs"}
|
||||
elif re.search(r"ep(isode |\.)?[0-9]", fn):
|
||||
effect = {"vp": "/podcasts"}
|
||||
|
||||
# regex lets you grab a part of the matching
|
||||
# text and use that in the upload path:
|
||||
m = re.search(r"\b(op|ed)([^a-z]|$)", fn)
|
||||
if m:
|
||||
# the regex matched; use "anime-op" or "anime-ed"
|
||||
effect = {"vp": "/anime-" + m[1]}
|
||||
|
||||
# aaand DO IT
|
||||
print(json.dumps({"reloc": effect}))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -9,25 +9,38 @@ import subprocess as sp
|
||||
_ = r"""
|
||||
use copyparty as a file downloader by POSTing URLs as
|
||||
application/x-www-form-urlencoded (for example using the
|
||||
message/pager function on the website)
|
||||
📟 message-to-server-log in the web-ui)
|
||||
|
||||
example usage as global config:
|
||||
--xm f,j,t3600,bin/hooks/wget.py
|
||||
|
||||
example usage as a volflag (per-volume config):
|
||||
-v srv/inc:inc:r:rw,ed:c,xm=f,j,t3600,bin/hooks/wget.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
(share filesystem-path srv/inc as volume /inc,
|
||||
readable by everyone, read-write for user 'ed',
|
||||
running this plugin on all messages with the params listed below)
|
||||
--xm aw,f,j,t3600,bin/hooks/wget.py
|
||||
|
||||
parameters explained,
|
||||
xm = execute on message-to-server-log
|
||||
f = fork so it doesn't block uploads
|
||||
j = provide message information as json; not just the text
|
||||
aw = only users with write-access can use this
|
||||
f = fork; don't delay other hooks while this is running
|
||||
j = provide message information as json (not just the text)
|
||||
c3 = mute all output
|
||||
t3600 = timeout and kill download after 1 hour
|
||||
t3600 = timeout and abort download after 1 hour
|
||||
|
||||
example usage as a volflag (per-volume config):
|
||||
-v srv/inc:inc:r:rw,ed:c,xm=aw,f,j,t3600,bin/hooks/wget.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
(share filesystem-path srv/inc as volume /inc,
|
||||
readable by everyone, read-write for user 'ed',
|
||||
running this plugin on all messages with the params explained above)
|
||||
|
||||
example usage as a volflag in a copyparty config file:
|
||||
[/inc]
|
||||
srv/inc
|
||||
accs:
|
||||
r: *
|
||||
rw: ed
|
||||
flags:
|
||||
xm: aw,f,j,t3600,bin/hooks/wget.py
|
||||
|
||||
the volflag examples only kicks in if you send the message
|
||||
while you're in the /inc folder (or any folder below there)
|
||||
"""
|
||||
|
||||
|
||||
|
||||
@@ -223,12 +223,15 @@ install_vamp() {
|
||||
# use msys2 in mingw-w64 mode
|
||||
# pacman -S --needed mingw-w64-x86_64-{ffmpeg,python,python-pip,vamp-plugin-sdk}
|
||||
|
||||
$pybin -m pip install --user vamp
|
||||
$pybin -m pip install --user vamp || {
|
||||
printf '\n\033[7malright, trying something else...\033[0m\n'
|
||||
$pybin -m pip install --user --no-build-isolation vamp
|
||||
}
|
||||
|
||||
cd "$td"
|
||||
echo '#include <vamp-sdk/Plugin.h>' | g++ -x c++ -c -o /dev/null - || [ -e ~/pe/vamp-sdk ] || {
|
||||
printf '\033[33mcould not find the vamp-sdk, building from source\033[0m\n'
|
||||
(dl_files yolo https://code.soundsoftware.ac.uk/attachments/download/2691/vamp-plugin-sdk-2.10.0.tar.gz)
|
||||
(dl_files yolo https://ocv.me/mirror/vamp-plugin-sdk-2.10.0.tar.gz)
|
||||
sha512sum -c <(
|
||||
echo "153b7f2fa01b77c65ad393ca0689742d66421017fd5931d216caa0fcf6909355fff74706fabbc062a3a04588a619c9b515a1dae00f21a57afd97902a355c48ed -"
|
||||
) <vamp-plugin-sdk-2.10.0.tar.gz
|
||||
@@ -244,7 +247,7 @@ install_vamp() {
|
||||
cd "$td"
|
||||
have_beatroot || {
|
||||
printf '\033[33mcould not find the vamp beatroot plugin, building from source\033[0m\n'
|
||||
(dl_files yolo https://code.soundsoftware.ac.uk/attachments/download/885/beatroot-vamp-v1.0.tar.gz)
|
||||
(dl_files yolo https://ocv.me/mirror/beatroot-vamp-v1.0.tar.gz)
|
||||
sha512sum -c <(
|
||||
echo "1f444d1d58ccf565c0adfe99f1a1aa62789e19f5071e46857e2adfbc9d453037bc1c4dcb039b02c16240e9b97f444aaff3afb625c86aa2470233e711f55b6874 -"
|
||||
) <beatroot-vamp-v1.0.tar.gz
|
||||
|
||||
384
bin/u2c.py
384
bin/u2c.py
@@ -1,8 +1,8 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
S_VERSION = "1.11"
|
||||
S_BUILD_DT = "2023-11-11"
|
||||
S_VERSION = "1.24"
|
||||
S_BUILD_DT = "2024-09-05"
|
||||
|
||||
"""
|
||||
u2c.py: upload to copyparty
|
||||
@@ -20,6 +20,7 @@ import sys
|
||||
import stat
|
||||
import math
|
||||
import time
|
||||
import json
|
||||
import atexit
|
||||
import signal
|
||||
import socket
|
||||
@@ -29,7 +30,7 @@ import platform
|
||||
import threading
|
||||
import datetime
|
||||
|
||||
EXE = sys.executable.endswith("exe")
|
||||
EXE = bool(getattr(sys, "frozen", False))
|
||||
|
||||
try:
|
||||
import argparse
|
||||
@@ -40,19 +41,25 @@ except:
|
||||
|
||||
try:
|
||||
import requests
|
||||
|
||||
req_ses = requests.Session()
|
||||
except ImportError as ex:
|
||||
if EXE:
|
||||
if "-" in sys.argv or "-h" in sys.argv:
|
||||
m = ""
|
||||
elif EXE:
|
||||
raise
|
||||
elif sys.version_info > (2, 7):
|
||||
m = "\nERROR: need 'requests'; please run this command:\n {0} -m pip install --user requests\n"
|
||||
m = "\nERROR: need 'requests'{0}; please run this command:\n {1} -m pip install --user requests\n"
|
||||
else:
|
||||
m = "requests/2.18.4 urllib3/1.23 chardet/3.0.4 certifi/2020.4.5.1 idna/2.7"
|
||||
m = [" https://pypi.org/project/" + x + "/#files" for x in m.split()]
|
||||
m = "\n ERROR: need these:\n" + "\n".join(m) + "\n"
|
||||
m = "\n ERROR: need these{0}:\n" + "\n".join(m) + "\n"
|
||||
m += "\n for f in *.whl; do unzip $f; done; rm -r *.dist-info\n"
|
||||
|
||||
print(m.format(sys.executable), "\nspecifically,", ex)
|
||||
sys.exit(1)
|
||||
if m:
|
||||
t = " when not running with '-h' or url '-'"
|
||||
print(m.format(t, sys.executable), "\nspecifically,", ex)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
# from copyparty/__init__.py
|
||||
@@ -75,16 +82,40 @@ else:
|
||||
VT100 = platform.system() != "Windows"
|
||||
|
||||
|
||||
req_ses = requests.Session()
|
||||
try:
|
||||
UTC = datetime.timezone.utc
|
||||
except:
|
||||
TD_ZERO = datetime.timedelta(0)
|
||||
|
||||
class _UTC(datetime.tzinfo):
|
||||
def utcoffset(self, dt):
|
||||
return TD_ZERO
|
||||
|
||||
def tzname(self, dt):
|
||||
return "UTC"
|
||||
|
||||
def dst(self, dt):
|
||||
return TD_ZERO
|
||||
|
||||
UTC = _UTC()
|
||||
|
||||
|
||||
class Daemon(threading.Thread):
|
||||
def __init__(self, target, name=None, a=None):
|
||||
# type: (Any, Any, Any) -> None
|
||||
threading.Thread.__init__(self, target=target, args=a or (), name=name)
|
||||
threading.Thread.__init__(self, name=name)
|
||||
self.a = a or ()
|
||||
self.fun = target
|
||||
self.daemon = True
|
||||
self.start()
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
signal.pthread_sigmask(signal.SIG_BLOCK, [signal.SIGINT, signal.SIGTERM])
|
||||
except:
|
||||
pass
|
||||
|
||||
self.fun(*self.a)
|
||||
|
||||
|
||||
class File(object):
|
||||
"""an up2k upload task; represents a single file"""
|
||||
@@ -101,18 +132,22 @@ class File(object):
|
||||
# set by get_hashlist
|
||||
self.cids = [] # type: list[tuple[str, int, int]] # [ hash, ofs, sz ]
|
||||
self.kchunks = {} # type: dict[str, tuple[int, int]] # hash: [ ofs, sz ]
|
||||
self.t_hash = 0.0 # type: float
|
||||
|
||||
# set by handshake
|
||||
self.recheck = False # duplicate; redo handshake after all files done
|
||||
self.ucids = [] # type: list[str] # chunks which need to be uploaded
|
||||
self.wark = "" # type: str
|
||||
self.url = "" # type: str
|
||||
self.nhs = 0
|
||||
self.nhs = 0 # type: int
|
||||
|
||||
# set by upload
|
||||
self.t0_up = 0.0 # type: float
|
||||
self.t1_up = 0.0 # type: float
|
||||
self.nojoin = 0 # type: int
|
||||
self.up_b = 0 # type: int
|
||||
self.up_c = 0 # type: int
|
||||
self.cd = 0
|
||||
self.cd = 0 # type: int
|
||||
|
||||
# t = "size({}) lmod({}) top({}) rel({}) abs({}) name({})\n"
|
||||
# eprint(t.format(self.size, self.lmod, self.top, self.rel, self.abs, self.name))
|
||||
@@ -121,10 +156,20 @@ class File(object):
|
||||
class FileSlice(object):
|
||||
"""file-like object providing a fixed window into a file"""
|
||||
|
||||
def __init__(self, file, cid):
|
||||
def __init__(self, file, cids):
|
||||
# type: (File, str) -> None
|
||||
|
||||
self.car, self.len = file.kchunks[cid]
|
||||
self.file = file
|
||||
self.cids = cids
|
||||
|
||||
self.car, tlen = file.kchunks[cids[0]]
|
||||
for cid in cids[1:]:
|
||||
ofs, clen = file.kchunks[cid]
|
||||
if ofs != self.car + tlen:
|
||||
raise Exception(9)
|
||||
tlen += clen
|
||||
|
||||
self.len = tlen
|
||||
self.cdr = self.car + self.len
|
||||
self.ofs = 0 # type: int
|
||||
self.f = open(file.abs, "rb", 512 * 1024)
|
||||
@@ -247,6 +292,12 @@ class MTHash(object):
|
||||
_print = print
|
||||
|
||||
|
||||
def safe_print(*a, **ka):
|
||||
ka["end"] = ""
|
||||
zs = " ".join([unicode(x) for x in a])
|
||||
_print(zs + "\n", **ka)
|
||||
|
||||
|
||||
def eprint(*a, **ka):
|
||||
ka["file"] = sys.stderr
|
||||
ka["end"] = ""
|
||||
@@ -260,18 +311,17 @@ def eprint(*a, **ka):
|
||||
|
||||
def flushing_print(*a, **ka):
|
||||
try:
|
||||
_print(*a, **ka)
|
||||
safe_print(*a, **ka)
|
||||
except:
|
||||
v = " ".join(str(x) for x in a)
|
||||
v = v.encode("ascii", "replace").decode("ascii")
|
||||
_print(v, **ka)
|
||||
safe_print(v, **ka)
|
||||
|
||||
if "flush" not in ka:
|
||||
sys.stdout.flush()
|
||||
|
||||
|
||||
if not VT100:
|
||||
print = flushing_print
|
||||
print = safe_print if VT100 else flushing_print
|
||||
|
||||
|
||||
def termsize():
|
||||
@@ -348,7 +398,7 @@ def undns(url):
|
||||
usp = urlsplit(url)
|
||||
hn = usp.hostname
|
||||
gai = None
|
||||
eprint("resolving host [{0}] ...".format(hn), end="")
|
||||
eprint("resolving host [%s] ..." % (hn,))
|
||||
try:
|
||||
gai = socket.getaddrinfo(hn, None)
|
||||
hn = gai[0][4][0]
|
||||
@@ -366,7 +416,7 @@ def undns(url):
|
||||
|
||||
usp = usp._replace(netloc=hn)
|
||||
url = urlunsplit(usp)
|
||||
eprint(" {0}".format(url))
|
||||
eprint(" %s\n" % (url,))
|
||||
return url
|
||||
|
||||
|
||||
@@ -509,6 +559,8 @@ def get_hashlist(file, pcb, mth):
|
||||
file_ofs = 0
|
||||
ret = []
|
||||
with open(file.abs, "rb", 512 * 1024) as f:
|
||||
t0 = time.time()
|
||||
|
||||
if mth and file.size >= 1024 * 512:
|
||||
ret = mth.hash(f, file.size, chunk_sz, pcb, file)
|
||||
file_rem = 0
|
||||
@@ -535,10 +587,12 @@ def get_hashlist(file, pcb, mth):
|
||||
if pcb:
|
||||
pcb(file, file_ofs)
|
||||
|
||||
file.t_hash = time.time() - t0
|
||||
file.cids = ret
|
||||
file.kchunks = {}
|
||||
for k, v1, v2 in ret:
|
||||
file.kchunks[k] = [v1, v2]
|
||||
if k not in file.kchunks:
|
||||
file.kchunks[k] = [v1, v2]
|
||||
|
||||
|
||||
def handshake(ar, file, search):
|
||||
@@ -560,8 +614,11 @@ def handshake(ar, file, search):
|
||||
}
|
||||
if search:
|
||||
req["srch"] = 1
|
||||
elif ar.dr:
|
||||
req["replace"] = True
|
||||
else:
|
||||
if ar.touch:
|
||||
req["umod"] = True
|
||||
if ar.ow:
|
||||
req["replace"] = True
|
||||
|
||||
headers = {"Content-Type": "text/plain"} # <=1.5.1 compat
|
||||
if pw:
|
||||
@@ -577,7 +634,8 @@ def handshake(ar, file, search):
|
||||
sc = 600
|
||||
txt = ""
|
||||
try:
|
||||
r = req_ses.post(url, headers=headers, json=req)
|
||||
zs = json.dumps(req, separators=(",\n", ": "))
|
||||
r = req_ses.post(url, headers=headers, data=zs)
|
||||
sc = r.status_code
|
||||
txt = r.text
|
||||
if sc < 400:
|
||||
@@ -624,13 +682,20 @@ def handshake(ar, file, search):
|
||||
return r["hash"], r["sprs"]
|
||||
|
||||
|
||||
def upload(file, cid, pw, stats):
|
||||
# type: (File, str, str, str) -> None
|
||||
"""upload one specific chunk, `cid` (a chunk-hash)"""
|
||||
def upload(fsl, pw, stats):
|
||||
# type: (FileSlice, str, str) -> None
|
||||
"""upload a range of file data, defined by one or more `cid` (chunk-hash)"""
|
||||
|
||||
ctxt = fsl.cids[0]
|
||||
if len(fsl.cids) > 1:
|
||||
n = 192 // len(fsl.cids)
|
||||
n = 9 if n > 9 else 2 if n < 2 else n
|
||||
zsl = [zs[:n] for zs in fsl.cids[1:]]
|
||||
ctxt += ",%d,%s" % (n, "".join(zsl))
|
||||
|
||||
headers = {
|
||||
"X-Up2k-Hash": cid,
|
||||
"X-Up2k-Wark": file.wark,
|
||||
"X-Up2k-Hash": ctxt,
|
||||
"X-Up2k-Wark": fsl.file.wark,
|
||||
"Content-Type": "application/octet-stream",
|
||||
}
|
||||
|
||||
@@ -640,15 +705,24 @@ def upload(file, cid, pw, stats):
|
||||
if pw:
|
||||
headers["Cookie"] = "=".join(["cppwd", pw])
|
||||
|
||||
f = FileSlice(file, cid)
|
||||
try:
|
||||
r = req_ses.post(file.url, headers=headers, data=f)
|
||||
r = req_ses.post(fsl.file.url, headers=headers, data=fsl)
|
||||
|
||||
if r.status_code == 400:
|
||||
txt = r.text
|
||||
if (
|
||||
"already being written" in txt
|
||||
or "already got that" in txt
|
||||
or "only sibling chunks" in txt
|
||||
):
|
||||
fsl.file.nojoin = 1
|
||||
|
||||
if not r:
|
||||
raise Exception(repr(r))
|
||||
|
||||
_ = r.content
|
||||
finally:
|
||||
f.f.close()
|
||||
fsl.f.close()
|
||||
|
||||
|
||||
class Ctl(object):
|
||||
@@ -712,6 +786,9 @@ class Ctl(object):
|
||||
if ar.safe:
|
||||
self._safe()
|
||||
else:
|
||||
self.at_hash = 0.0
|
||||
self.at_up = 0.0
|
||||
self.at_upr = 0.0
|
||||
self.hash_f = 0
|
||||
self.hash_c = 0
|
||||
self.hash_b = 0
|
||||
@@ -719,8 +796,6 @@ class Ctl(object):
|
||||
self.up_c = 0
|
||||
self.up_b = 0
|
||||
self.up_br = 0
|
||||
self.hasher_busy = 1
|
||||
self.handshaker_busy = 0
|
||||
self.uploader_busy = 0
|
||||
self.serialized = False
|
||||
|
||||
@@ -730,8 +805,11 @@ class Ctl(object):
|
||||
self.eta = "99:99:99"
|
||||
|
||||
self.mutex = threading.Lock()
|
||||
self.exit_cond = threading.Condition()
|
||||
self.uploader_alive = ar.j
|
||||
self.handshaker_alive = ar.j
|
||||
self.q_handshake = Queue() # type: Queue[File]
|
||||
self.q_upload = Queue() # type: Queue[tuple[File, str]]
|
||||
self.q_upload = Queue() # type: Queue[FileSlice]
|
||||
|
||||
self.st_hash = [None, "(idle, starting...)"] # type: tuple[File, int]
|
||||
self.st_up = [None, "(idle, starting...)"] # type: tuple[File, int]
|
||||
@@ -776,7 +854,8 @@ class Ctl(object):
|
||||
for nc, cid in enumerate(hs):
|
||||
print(" {0} up {1}".format(ncs - nc, cid))
|
||||
stats = "{0}/0/0/{1}".format(nf, self.nfiles - nf)
|
||||
upload(file, cid, self.ar.a, stats)
|
||||
fslice = FileSlice(file, [cid])
|
||||
upload(fslice, self.ar.a, stats)
|
||||
|
||||
print(" ok!")
|
||||
if file.recheck:
|
||||
@@ -785,7 +864,7 @@ class Ctl(object):
|
||||
if not self.recheck:
|
||||
return
|
||||
|
||||
eprint("finalizing {0} duplicate files".format(len(self.recheck)))
|
||||
eprint("finalizing %d duplicate files\n" % (len(self.recheck),))
|
||||
for file in self.recheck:
|
||||
handshake(self.ar, file, search)
|
||||
|
||||
@@ -799,27 +878,21 @@ class Ctl(object):
|
||||
Daemon(self.handshaker)
|
||||
Daemon(self.uploader)
|
||||
|
||||
idles = 0
|
||||
while idles < 3:
|
||||
time.sleep(0.07)
|
||||
while True:
|
||||
with self.exit_cond:
|
||||
self.exit_cond.wait(0.07)
|
||||
with self.mutex:
|
||||
if (
|
||||
self.q_handshake.empty()
|
||||
and self.q_upload.empty()
|
||||
and not self.hasher_busy
|
||||
and not self.handshaker_busy
|
||||
and not self.uploader_busy
|
||||
):
|
||||
idles += 1
|
||||
else:
|
||||
idles = 0
|
||||
if not self.handshaker_alive and not self.uploader_alive:
|
||||
break
|
||||
st_hash = self.st_hash[:]
|
||||
st_up = self.st_up[:]
|
||||
|
||||
if VT100 and not self.ar.ns:
|
||||
maxlen = ss.w - len(str(self.nfiles)) - 14
|
||||
txt = "\033[s\033[{0}H".format(ss.g)
|
||||
for y, k, st, f in [
|
||||
[0, "hash", self.st_hash, self.hash_f],
|
||||
[1, "send", self.st_up, self.up_f],
|
||||
[0, "hash", st_hash, self.hash_f],
|
||||
[1, "send", st_up, self.up_f],
|
||||
]:
|
||||
txt += "\033[{0}H{1}:".format(ss.g + y, k)
|
||||
file, arg = st
|
||||
@@ -843,12 +916,12 @@ class Ctl(object):
|
||||
txt = " "
|
||||
|
||||
if not self.up_br:
|
||||
spd = self.hash_b / (time.time() - self.t0)
|
||||
eta = (self.nbytes - self.hash_b) / (spd + 1)
|
||||
spd = self.hash_b / ((time.time() - self.t0) or 1)
|
||||
eta = (self.nbytes - self.hash_b) / (spd or 1)
|
||||
else:
|
||||
spd = self.up_br / (time.time() - self.t0_up)
|
||||
spd = self.up_br / ((time.time() - self.t0_up) or 1)
|
||||
spd = self.spd = (self.spd or spd) * 0.9 + spd * 0.1
|
||||
eta = (self.nbytes - self.up_b) / (spd + 1)
|
||||
eta = (self.nbytes - self.up_b) / (spd or 1)
|
||||
|
||||
spd = humansize(spd)
|
||||
self.eta = str(datetime.timedelta(seconds=int(eta)))
|
||||
@@ -859,10 +932,17 @@ class Ctl(object):
|
||||
t = "{0} eta @ {1}/s, {2}, {3}# left".format(self.eta, spd, sleft, nleft)
|
||||
eprint(txt + "\033]0;{0}\033\\\r{0}{1}".format(t, tail))
|
||||
|
||||
if self.hash_b and self.at_hash:
|
||||
spd = humansize(self.hash_b / self.at_hash)
|
||||
eprint("\nhasher: %.2f sec, %s/s\n" % (self.at_hash, spd))
|
||||
if self.up_b and self.at_up:
|
||||
spd = humansize(self.up_b / self.at_up)
|
||||
eprint("upload: %.2f sec, %s/s\n" % (self.at_up, spd))
|
||||
|
||||
if not self.recheck:
|
||||
return
|
||||
|
||||
eprint("finalizing {0} duplicate files".format(len(self.recheck)))
|
||||
eprint("finalizing %d duplicate files\n" % (len(self.recheck),))
|
||||
for file in self.recheck:
|
||||
handshake(self.ar, file, False)
|
||||
|
||||
@@ -874,6 +954,8 @@ class Ctl(object):
|
||||
self.st_hash = [file, ofs]
|
||||
|
||||
def hasher(self):
|
||||
ptn = re.compile(self.ar.x.encode("utf-8"), re.I) if self.ar.x else None
|
||||
sep = "{0}".format(os.sep).encode("ascii")
|
||||
prd = None
|
||||
ls = {}
|
||||
for top, rel, inf in self.filegen:
|
||||
@@ -906,7 +988,12 @@ class Ctl(object):
|
||||
if self.ar.drd:
|
||||
dp = os.path.join(top, rd)
|
||||
lnodes = set(os.listdir(dp))
|
||||
bnames = [x for x in ls if x not in lnodes]
|
||||
if ptn:
|
||||
zs = dp.replace(sep, b"/").rstrip(b"/") + b"/"
|
||||
zls = [zs + x for x in lnodes]
|
||||
zls = [x for x in zls if not ptn.match(x)]
|
||||
lnodes = [x.split(b"/")[-1] for x in zls]
|
||||
bnames = [x for x in ls if x not in lnodes and x != b".hist"]
|
||||
vpath = self.ar.url.split("://")[-1].split("/", 1)[-1]
|
||||
names = [x.decode("utf-8", "replace") for x in bnames]
|
||||
locs = [vpath + srd + "/" + x for x in names]
|
||||
@@ -961,11 +1048,42 @@ class Ctl(object):
|
||||
self.hash_f += 1
|
||||
self.hash_c += len(file.cids)
|
||||
self.hash_b += file.size
|
||||
if self.ar.wlist:
|
||||
self.up_f = self.hash_f
|
||||
self.up_c = self.hash_c
|
||||
self.up_b = self.hash_b
|
||||
|
||||
if self.ar.wlist:
|
||||
zsl = [self.ar.wsalt, str(file.size)] + [x[0] for x in file.kchunks]
|
||||
zb = hashlib.sha512("\n".join(zsl).encode("utf-8")).digest()[:33]
|
||||
wark = base64.urlsafe_b64encode(zb).decode("utf-8")
|
||||
vp = file.rel.decode("utf-8")
|
||||
if self.ar.jw:
|
||||
print("%s %s" % (wark, vp))
|
||||
else:
|
||||
zd = datetime.datetime.fromtimestamp(file.lmod, UTC)
|
||||
dt = "%04d-%02d-%02d %02d:%02d:%02d" % (
|
||||
zd.year,
|
||||
zd.month,
|
||||
zd.day,
|
||||
zd.hour,
|
||||
zd.minute,
|
||||
zd.second,
|
||||
)
|
||||
print("%s %12d %s %s" % (dt, file.size, wark, vp))
|
||||
continue
|
||||
|
||||
self.q_handshake.put(file)
|
||||
|
||||
self.hasher_busy = 0
|
||||
self.st_hash = [None, "(finished)"]
|
||||
self._check_if_done()
|
||||
|
||||
def _check_if_done(self):
|
||||
with self.mutex:
|
||||
if self.nfiles - self.up_f:
|
||||
return
|
||||
for _ in range(self.ar.j):
|
||||
self.q_handshake.put(None)
|
||||
|
||||
def handshaker(self):
|
||||
search = self.ar.s
|
||||
@@ -973,8 +1091,10 @@ class Ctl(object):
|
||||
while True:
|
||||
file = self.q_handshake.get()
|
||||
if not file:
|
||||
with self.mutex:
|
||||
self.handshaker_alive -= 1
|
||||
self.q_upload.put(None)
|
||||
break
|
||||
return
|
||||
|
||||
upath = file.abs.decode("utf-8", "replace")
|
||||
if not VT100:
|
||||
@@ -986,9 +1106,6 @@ class Ctl(object):
|
||||
self.errs += 1
|
||||
continue
|
||||
|
||||
with self.mutex:
|
||||
self.handshaker_busy += 1
|
||||
|
||||
while time.time() < file.cd:
|
||||
time.sleep(0.1)
|
||||
|
||||
@@ -996,17 +1113,17 @@ class Ctl(object):
|
||||
if search:
|
||||
if hs:
|
||||
for hit in hs:
|
||||
t = "found: {0}\n {1}{2}\n"
|
||||
print(t.format(upath, burl, hit["rp"]), end="")
|
||||
t = "found: {0}\n {1}{2}"
|
||||
print(t.format(upath, burl, hit["rp"]))
|
||||
else:
|
||||
print("NOT found: {0}\n".format(upath), end="")
|
||||
print("NOT found: {0}".format(upath))
|
||||
|
||||
with self.mutex:
|
||||
self.up_f += 1
|
||||
self.up_c += len(file.cids)
|
||||
self.up_b += file.size
|
||||
self.handshaker_busy -= 1
|
||||
|
||||
self._check_if_done()
|
||||
continue
|
||||
|
||||
if file.recheck:
|
||||
@@ -1038,58 +1155,110 @@ class Ctl(object):
|
||||
file.up_b -= sz
|
||||
|
||||
file.ucids = hs
|
||||
self.handshaker_busy -= 1
|
||||
|
||||
if not hs:
|
||||
kw = "uploaded" if file.up_b else " found"
|
||||
print("{0} {1}".format(kw, upath))
|
||||
for cid in hs:
|
||||
self.q_upload.put([file, cid])
|
||||
self.at_hash += file.t_hash
|
||||
|
||||
if self.ar.spd:
|
||||
if VT100:
|
||||
c1 = "\033[36m"
|
||||
c2 = "\033[0m"
|
||||
else:
|
||||
c1 = c2 = ""
|
||||
|
||||
spd_h = humansize(file.size / file.t_hash, True)
|
||||
if file.up_b:
|
||||
t_up = file.t1_up - file.t0_up
|
||||
spd_u = humansize(file.size / t_up, True)
|
||||
|
||||
t = "uploaded %s %s(h:%.2fs,%s/s,up:%.2fs,%s/s)%s"
|
||||
print(t % (upath, c1, file.t_hash, spd_h, t_up, spd_u, c2))
|
||||
else:
|
||||
t = " found %s %s(%.2fs,%s/s)%s"
|
||||
print(t % (upath, c1, file.t_hash, spd_h, c2))
|
||||
else:
|
||||
kw = "uploaded" if file.up_b else " found"
|
||||
print("{0} {1}".format(kw, upath))
|
||||
|
||||
self._check_if_done()
|
||||
continue
|
||||
|
||||
chunksz = up2k_chunksize(file.size)
|
||||
njoin = (self.ar.sz * 1024 * 1024) // chunksz
|
||||
cs = hs[:]
|
||||
while cs:
|
||||
fsl = FileSlice(file, cs[:1])
|
||||
try:
|
||||
if file.nojoin:
|
||||
raise Exception()
|
||||
for n in range(2, min(len(cs), njoin + 1)):
|
||||
fsl = FileSlice(file, cs[:n])
|
||||
except:
|
||||
pass
|
||||
cs = cs[len(fsl.cids) :]
|
||||
self.q_upload.put(fsl)
|
||||
|
||||
def uploader(self):
|
||||
while True:
|
||||
task = self.q_upload.get()
|
||||
if not task:
|
||||
self.st_up = [None, "(finished)"]
|
||||
break
|
||||
fsl = self.q_upload.get()
|
||||
if not fsl:
|
||||
done = False
|
||||
with self.mutex:
|
||||
self.uploader_alive -= 1
|
||||
if not self.uploader_alive:
|
||||
done = not self.handshaker_alive
|
||||
self.st_up = [None, "(finished)"]
|
||||
if done:
|
||||
with self.exit_cond:
|
||||
self.exit_cond.notify_all()
|
||||
return
|
||||
|
||||
file = fsl.file
|
||||
cids = fsl.cids
|
||||
|
||||
with self.mutex:
|
||||
if not self.uploader_busy:
|
||||
self.at_upr = time.time()
|
||||
self.uploader_busy += 1
|
||||
self.t0_up = self.t0_up or time.time()
|
||||
if not file.t0_up:
|
||||
file.t0_up = time.time()
|
||||
if not self.t0_up:
|
||||
self.t0_up = file.t0_up
|
||||
|
||||
zs = "{0}/{1}/{2}/{3} {4}/{5} {6}"
|
||||
stats = zs.format(
|
||||
stats = "%d/%d/%d/%d %d/%d %s" % (
|
||||
self.up_f,
|
||||
len(self.recheck),
|
||||
self.uploader_busy,
|
||||
self.nfiles - self.up_f,
|
||||
int(self.nbytes / (1024 * 1024)),
|
||||
int((self.nbytes - self.up_b) / (1024 * 1024)),
|
||||
self.nbytes // (1024 * 1024),
|
||||
(self.nbytes - self.up_b) // (1024 * 1024),
|
||||
self.eta,
|
||||
)
|
||||
|
||||
file, cid = task
|
||||
try:
|
||||
upload(file, cid, self.ar.a, stats)
|
||||
upload(fsl, self.ar.a, stats)
|
||||
except Exception as ex:
|
||||
t = "upload failed, retrying: {0} #{1} ({2})\n"
|
||||
eprint(t.format(file.name, cid[:8], ex))
|
||||
t = "upload failed, retrying: %s #%s+%d (%s)\n"
|
||||
eprint(t % (file.name, cids[0][:8], len(cids) - 1, ex))
|
||||
file.cd = time.time() + self.ar.cd
|
||||
# handshake will fix it
|
||||
|
||||
with self.mutex:
|
||||
sz = file.kchunks[cid][1]
|
||||
file.ucids = [x for x in file.ucids if x != cid]
|
||||
sz = fsl.len
|
||||
file.ucids = [x for x in file.ucids if x not in cids]
|
||||
if not file.ucids:
|
||||
file.t1_up = time.time()
|
||||
self.q_handshake.put(file)
|
||||
|
||||
self.st_up = [file, cid]
|
||||
self.st_up = [file, cids[0]]
|
||||
file.up_b += sz
|
||||
self.up_b += sz
|
||||
self.up_br += sz
|
||||
file.up_c += 1
|
||||
self.up_c += 1
|
||||
self.uploader_busy -= 1
|
||||
if not self.uploader_busy:
|
||||
self.at_up += time.time() - self.at_upr
|
||||
|
||||
def up_done(self, file):
|
||||
if self.ar.dl:
|
||||
@@ -1126,10 +1295,13 @@ source file/folder selection uses rsync syntax, meaning that:
|
||||
ap.add_argument("url", type=unicode, help="server url, including destination folder")
|
||||
ap.add_argument("files", type=unicode, nargs="+", help="files and/or folders to process")
|
||||
ap.add_argument("-v", action="store_true", help="verbose")
|
||||
ap.add_argument("-a", metavar="PASSWORD", help="password or $filepath")
|
||||
ap.add_argument("-a", metavar="PASSWD", help="password or $filepath")
|
||||
ap.add_argument("-s", action="store_true", help="file-search (disables upload)")
|
||||
ap.add_argument("-x", type=unicode, metavar="REGEX", default="", help="skip file if filesystem-abspath matches REGEX, example: '.*/\\.hist/.*'")
|
||||
ap.add_argument("-x", type=unicode, metavar="REGEX", action="append", help="skip file if filesystem-abspath matches REGEX (option can be repeated), example: '.*/\\.hist/.*'")
|
||||
ap.add_argument("--ok", action="store_true", help="continue even if some local files are inaccessible")
|
||||
ap.add_argument("--touch", action="store_true", help="if last-modified timestamps differ, push local to server (need write+delete perms)")
|
||||
ap.add_argument("--ow", action="store_true", help="overwrite existing files instead of autorenaming")
|
||||
ap.add_argument("--spd", action="store_true", help="print speeds for each file")
|
||||
ap.add_argument("--version", action="store_true", help="show version and exit")
|
||||
|
||||
ap = app.add_argument_group("compatibility")
|
||||
@@ -1138,12 +1310,17 @@ source file/folder selection uses rsync syntax, meaning that:
|
||||
|
||||
ap = app.add_argument_group("folder sync")
|
||||
ap.add_argument("--dl", action="store_true", help="delete local files after uploading")
|
||||
ap.add_argument("--dr", action="store_true", help="delete remote files which don't exist locally")
|
||||
ap.add_argument("--dr", action="store_true", help="delete remote files which don't exist locally (implies --ow)")
|
||||
ap.add_argument("--drd", action="store_true", help="delete remote files during upload instead of afterwards; reduces peak disk space usage, but will reupload instead of detecting renames")
|
||||
|
||||
ap = app.add_argument_group("file-ID calculator; enable with url '-' to list warks (file identifiers) instead of upload/search")
|
||||
ap.add_argument("--wsalt", type=unicode, metavar="S", default="hunter2", help="salt to use when creating warks; must match server config")
|
||||
ap.add_argument("--jw", action="store_true", help="just identifier+filepath, not mtime/size too")
|
||||
|
||||
ap = app.add_argument_group("performance tweaks")
|
||||
ap.add_argument("-j", type=int, metavar="THREADS", default=4, help="parallel connections")
|
||||
ap.add_argument("-J", type=int, metavar="THREADS", default=hcores, help="num cpu-cores to use for hashing; set 0 or 1 for single-core hashing")
|
||||
ap.add_argument("-j", type=int, metavar="CONNS", default=2, help="parallel connections")
|
||||
ap.add_argument("-J", type=int, metavar="CORES", default=hcores, help="num cpu-cores to use for hashing; set 0 or 1 for single-core hashing")
|
||||
ap.add_argument("--sz", type=int, metavar="MiB", default=64, help="try to make each POST this big")
|
||||
ap.add_argument("-nh", action="store_true", help="disable hashing while uploading")
|
||||
ap.add_argument("-ns", action="store_true", help="no status panel (for slow consoles and macos)")
|
||||
ap.add_argument("--cd", type=float, metavar="SEC", default=5, help="delay before reattempting a failed handshake/upload")
|
||||
@@ -1151,7 +1328,7 @@ source file/folder selection uses rsync syntax, meaning that:
|
||||
ap.add_argument("-z", action="store_true", help="ZOOMIN' (skip uploading files if they exist at the destination with the ~same last-modified timestamp, so same as yolo / turbo with date-chk but even faster)")
|
||||
|
||||
ap = app.add_argument_group("tls")
|
||||
ap.add_argument("-te", metavar="PEM_FILE", help="certificate to expect/verify")
|
||||
ap.add_argument("-te", metavar="PATH", help="path to ca.pem or cert.pem to expect/verify")
|
||||
ap.add_argument("-td", action="store_true", help="disable certificate check")
|
||||
# fmt: on
|
||||
|
||||
@@ -1168,7 +1345,14 @@ source file/folder selection uses rsync syntax, meaning that:
|
||||
if ar.drd:
|
||||
ar.dr = True
|
||||
|
||||
for k in "dl dr drd".split():
|
||||
if ar.dr:
|
||||
ar.ow = True
|
||||
|
||||
ar.x = "|".join(ar.x or [])
|
||||
|
||||
setattr(ar, "wlist", ar.url == "-")
|
||||
|
||||
for k in "dl dr drd wlist".split():
|
||||
errs = []
|
||||
if ar.safe and getattr(ar, k):
|
||||
errs.append(k)
|
||||
@@ -1186,6 +1370,14 @@ source file/folder selection uses rsync syntax, meaning that:
|
||||
if "://" not in ar.url:
|
||||
ar.url = "http://" + ar.url
|
||||
|
||||
if "https://" in ar.url.lower():
|
||||
try:
|
||||
import ssl, zipfile
|
||||
except:
|
||||
t = "ERROR: https is not available for some reason; please use http"
|
||||
print("\n\n %s\n\n" % (t,))
|
||||
raise
|
||||
|
||||
if ar.a and ar.a.startswith("$"):
|
||||
fn = ar.a[1:]
|
||||
print("reading password from file [{0}]".format(fn))
|
||||
|
||||
@@ -16,11 +16,13 @@
|
||||
* sharex config file to upload screenshots and grab the URL
|
||||
* `RequestURL`: full URL to the target folder
|
||||
* `pw`: password (remove the `pw` line if anon-write)
|
||||
* the `act:bput` thing is optional since copyparty v1.9.29
|
||||
* using an older sharex version, maybe sharex v12.1.1 for example? dw fam i got your back 👉😎👉 [`sharex12.sxcu`](sharex12.sxcu)
|
||||
|
||||
however if your copyparty is behind a reverse-proxy, you may want to use [`sharex-html.sxcu`](sharex-html.sxcu) instead:
|
||||
* `RequestURL`: full URL to the target folder
|
||||
* `URL`: full URL to the root folder (with trailing slash) followed by `$regex:1|1$`
|
||||
* `pw`: password (remove `Parameters` if anon-write)
|
||||
### [`send-to-cpp.contextlet.json`](send-to-cpp.contextlet.json)
|
||||
* browser integration, kind of? custom rightclick actions and stuff
|
||||
* rightclick a pic and send it to copyparty straight from your browser
|
||||
* for the [contextlet](https://addons.mozilla.org/en-US/firefox/addon/contextlets/) firefox extension
|
||||
|
||||
### [`media-osd-bgone.ps1`](media-osd-bgone.ps1)
|
||||
* disables the [windows OSD popup](https://user-images.githubusercontent.com/241032/122821375-0e08df80-d2dd-11eb-9fd9-184e8aacf1d0.png) (the thing on the left) which appears every time you hit media hotkeys to adjust volume or change song while playing music with the copyparty web-ui, or most other audio players really
|
||||
|
||||
@@ -11,6 +11,14 @@
|
||||
# (5'000 requests per second, or 20gbps upload/download in parallel)
|
||||
#
|
||||
# on fedora/rhel, remember to setsebool -P httpd_can_network_connect 1
|
||||
#
|
||||
# if you are behind cloudflare (or another protection service),
|
||||
# remember to reject all connections which are not coming from your
|
||||
# protection service -- for cloudflare in particular, you can
|
||||
# generate the list of permitted IP ranges like so:
|
||||
# (curl -s https://www.cloudflare.com/ips-v{4,6} | sed 's/^/allow /; s/$/;/'; echo; echo "deny all;") > /etc/nginx/cloudflare-only.conf
|
||||
#
|
||||
# and then enable it below by uncomenting the cloudflare-only.conf line
|
||||
|
||||
upstream cpp {
|
||||
server 127.0.0.1:3923 fail_timeout=1s;
|
||||
@@ -21,7 +29,10 @@ server {
|
||||
listen [::]:443 ssl;
|
||||
|
||||
server_name fs.example.com;
|
||||
|
||||
|
||||
# uncomment the following line to reject non-cloudflare connections, ensuring client IPs cannot be spoofed:
|
||||
#include /etc/nginx/cloudflare-only.conf;
|
||||
|
||||
location / {
|
||||
proxy_pass http://cpp;
|
||||
proxy_redirect off;
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
# Maintainer: icxes <dev.null@need.moe>
|
||||
pkgname=copyparty
|
||||
pkgver="1.9.25"
|
||||
pkgver="1.15.0"
|
||||
pkgrel=1
|
||||
pkgdesc="File server with accelerated resumable uploads, dedup, WebDAV, FTP, zeroconf, media indexer, thumbnails++"
|
||||
pkgdesc="File server with accelerated resumable uploads, dedup, WebDAV, FTP, TFTP, zeroconf, media indexer, thumbnails++"
|
||||
arch=("any")
|
||||
url="https://github.com/9001/${pkgname}"
|
||||
license=('MIT')
|
||||
@@ -21,7 +21,7 @@ optdepends=("ffmpeg: thumbnails for videos, images (slower) and audio, music tag
|
||||
)
|
||||
source=("https://github.com/9001/${pkgname}/releases/download/v${pkgver}/${pkgname}-${pkgver}.tar.gz")
|
||||
backup=("etc/${pkgname}.d/init" )
|
||||
sha256sums=("2bb876f8f3bcb0f1edf0eb2e14155e41de89b6c3c7637cd82f4919f9706b3cdf")
|
||||
sha256sums=("cd082e1dc93ef0bd8b6115155f467e14bf450874d0a822567416f5e30fc55618")
|
||||
|
||||
build() {
|
||||
cd "${srcdir}/${pkgname}-${pkgver}"
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"url": "https://github.com/9001/copyparty/releases/download/v1.9.25/copyparty-sfx.py",
|
||||
"version": "1.9.25",
|
||||
"hash": "sha256-6xI/YDw53aFt4g8FiCVKG60/oiZfsRW7WoxEF9k65A8="
|
||||
"url": "https://github.com/9001/copyparty/releases/download/v1.15.0/copyparty-sfx.py",
|
||||
"version": "1.15.0",
|
||||
"hash": "sha256-4W7GMdukwG6CNaVrCCOF12tdQ/12XZz/orHAoB/3G8U="
|
||||
}
|
||||
@@ -20,6 +20,13 @@ point `--js-browser` to one of these by URL:
|
||||
|
||||
|
||||
|
||||
## example any-js
|
||||
point `--js-browser` and/or `--js-other` to one of these by URL:
|
||||
|
||||
* [`banner.js`](banner.js) shows a very enterprise [legal-banner](https://github.com/user-attachments/assets/8ae8e087-b209-449c-b08d-74e040f0284b)
|
||||
|
||||
|
||||
|
||||
## example browser-css
|
||||
point `--css-browser` to one of these by URL:
|
||||
|
||||
|
||||
93
contrib/plugins/banner.js
Normal file
93
contrib/plugins/banner.js
Normal file
@@ -0,0 +1,93 @@
|
||||
(function() {
|
||||
|
||||
// usage: copy this to '.banner.js' in your webroot,
|
||||
// and run copyparty with the following arguments:
|
||||
// --js-browser /.banner.js --js-other /.banner.js
|
||||
|
||||
|
||||
|
||||
// had to pick the most chuuni one as the default
|
||||
var bannertext = '' +
|
||||
'<h3>You are accessing a U.S. Government (USG) Information System (IS) that is provided for USG-authorized use only.</h3>' +
|
||||
'<p>By using this IS (which includes any device attached to this IS), you consent to the following conditions:</p>' +
|
||||
'<ul>' +
|
||||
'<li>The USG routinely intercepts and monitors communications on this IS for purposes including, but not limited to, penetration testing, COMSEC monitoring, network operations and defense, personnel misconduct (PM), law enforcement (LE), and counterintelligence (CI) investigations.</li>' +
|
||||
'<li>At any time, the USG may inspect and seize data stored on this IS.</li>' +
|
||||
'<li>Communications using, or data stored on, this IS are not private, are subject to routine monitoring, interception, and search, and may be disclosed or used for any USG-authorized purpose.</li>' +
|
||||
'<li>This IS includes security measures (e.g., authentication and access controls) to protect USG interests -- not for your personal benefit or privacy.</li>' +
|
||||
'<li>Notwithstanding the above, using this IS does not constitute consent to PM, LE or CI investigative searching or monitoring of the content of privileged communications, or work product, related to personal representation or services by attorneys, psychotherapists, or clergy, and their assistants. Such communications and work product are private and confidential. See User Agreement for details.</li>' +
|
||||
'</ul>';
|
||||
|
||||
|
||||
|
||||
// fancy div to insert into pages
|
||||
function bannerdiv(border) {
|
||||
var ret = mknod('div', null, bannertext);
|
||||
if (border)
|
||||
ret.setAttribute("style", "border:1em solid var(--fg); border-width:.3em 0; margin:3em 0");
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
||||
|
||||
// keep all of these false and then selectively enable them in the if-blocks below
|
||||
var show_msgbox = false,
|
||||
login_top = false,
|
||||
top = false,
|
||||
bottom = false,
|
||||
top_bordered = false,
|
||||
bottom_bordered = false;
|
||||
|
||||
if (QS("h1#cc") && QS("a#k")) {
|
||||
// this is the controlpanel
|
||||
// (you probably want to keep just one of these enabled)
|
||||
show_msgbox = true;
|
||||
login_top = true;
|
||||
bottom = true;
|
||||
}
|
||||
else if (ebi("swin") && ebi("smac")) {
|
||||
// this is the connect-page, same deal here
|
||||
show_msgbox = true;
|
||||
top_bordered = true;
|
||||
bottom_bordered = true;
|
||||
}
|
||||
else if (ebi("op_cfg") || ebi("div#mw") ) {
|
||||
// we're running in the main filebrowser (op_cfg) or markdown-viewer/editor (div#mw),
|
||||
// fragile pages which break if you do something too fancy
|
||||
show_msgbox = true;
|
||||
}
|
||||
|
||||
|
||||
|
||||
// shows a fullscreen messagebox; works on all pages
|
||||
if (show_msgbox) {
|
||||
var now = Math.floor(Date.now() / 1000),
|
||||
last_shown = sread("bannerts") || 0;
|
||||
|
||||
// 60 * 60 * 17 = 17 hour cooldown
|
||||
if (now - last_shown > 60 * 60 * 17) {
|
||||
swrite("bannerts", now);
|
||||
modal.confirm(bannertext, null, function () {
|
||||
location = 'https://this-page-intentionally-left-blank.org/';
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// show a message on the page footer; only works on the connect-page
|
||||
if (top || top_bordered) {
|
||||
var dst = ebi('wrap');
|
||||
dst.insertBefore(bannerdiv(top_bordered), dst.firstChild);
|
||||
}
|
||||
|
||||
// show a message on the page footer; only works on the controlpanel and connect-page
|
||||
if (bottom || bottom_bordered) {
|
||||
ebi('wrap').appendChild(bannerdiv(bottom_bordered));
|
||||
}
|
||||
|
||||
// show a message on the top of the page; only works on the controlpanel
|
||||
if (login_top) {
|
||||
var dst = QS('h1');
|
||||
dst.parentNode.insertBefore(bannerdiv(false), dst);
|
||||
}
|
||||
|
||||
})();
|
||||
11
contrib/send-to-cpp.contextlet.json
Normal file
11
contrib/send-to-cpp.contextlet.json
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"code": "// https://addons.mozilla.org/en-US/firefox/addon/contextlets/\n// https://github.com/davidmhammond/contextlets\n\nvar url = 'http://partybox.local:3923/';\nvar pw = 'wark';\n\nvar xhr = new XMLHttpRequest();\nxhr.msg = this.info.linkUrl || this.info.srcUrl;\nxhr.open('POST', url, true);\nxhr.setRequestHeader('Content-Type', 'application/x-www-form-urlencoded;charset=UTF-8');\nxhr.setRequestHeader('PW', pw);\nxhr.send('msg=' + xhr.msg);\n",
|
||||
"contexts": [
|
||||
"link"
|
||||
],
|
||||
"icons": null,
|
||||
"patterns": "",
|
||||
"scope": "background",
|
||||
"title": "send to cpp",
|
||||
"type": "normal"
|
||||
}
|
||||
@@ -1,19 +0,0 @@
|
||||
{
|
||||
"Version": "13.5.0",
|
||||
"Name": "copyparty-html",
|
||||
"DestinationType": "ImageUploader",
|
||||
"RequestMethod": "POST",
|
||||
"RequestURL": "http://127.0.0.1:3923/sharex",
|
||||
"Parameters": {
|
||||
"pw": "wark"
|
||||
},
|
||||
"Body": "MultipartFormData",
|
||||
"Arguments": {
|
||||
"act": "bput"
|
||||
},
|
||||
"FileFormName": "f",
|
||||
"RegexList": [
|
||||
"bytes // <a href=\"/([^\"]+)\""
|
||||
],
|
||||
"URL": "http://127.0.0.1:3923/$regex:1|1$"
|
||||
}
|
||||
@@ -1,17 +1,19 @@
|
||||
{
|
||||
"Version": "13.5.0",
|
||||
"Version": "15.0.0",
|
||||
"Name": "copyparty",
|
||||
"DestinationType": "ImageUploader",
|
||||
"RequestMethod": "POST",
|
||||
"RequestURL": "http://127.0.0.1:3923/sharex",
|
||||
"Parameters": {
|
||||
"pw": "wark",
|
||||
"j": null
|
||||
},
|
||||
"Headers": {
|
||||
"pw": "PUT_YOUR_PASSWORD_HERE_MY_DUDE"
|
||||
},
|
||||
"Body": "MultipartFormData",
|
||||
"Arguments": {
|
||||
"act": "bput"
|
||||
},
|
||||
"FileFormName": "f",
|
||||
"URL": "$json:files[0].url$"
|
||||
"URL": "{json:files[0].url}"
|
||||
}
|
||||
|
||||
13
contrib/sharex12.sxcu
Normal file
13
contrib/sharex12.sxcu
Normal file
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"Name": "copyparty",
|
||||
"DestinationType": "ImageUploader, TextUploader, FileUploader",
|
||||
"RequestURL": "http://127.0.0.1:3923/sharex",
|
||||
"FileFormName": "f",
|
||||
"Arguments": {
|
||||
"act": "bput"
|
||||
},
|
||||
"Headers": {
|
||||
"accept": "url",
|
||||
"pw": "PUT_YOUR_PASSWORD_HERE_MY_DUDE"
|
||||
}
|
||||
}
|
||||
@@ -4,7 +4,7 @@
|
||||
#
|
||||
# installation:
|
||||
# wget https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py -O /usr/local/bin/copyparty-sfx.py
|
||||
# useradd -r -s /sbin/nologin -d /var/lib/copyparty copyparty
|
||||
# useradd -r -s /sbin/nologin -m -d /var/lib/copyparty copyparty
|
||||
# firewall-cmd --permanent --add-port=3923/tcp # --zone=libvirt
|
||||
# firewall-cmd --reload
|
||||
# cp -pv copyparty.service /etc/systemd/system/
|
||||
@@ -12,11 +12,18 @@
|
||||
# restorecon -vr /etc/systemd/system/copyparty.service # on fedora/rhel
|
||||
# systemctl daemon-reload && systemctl enable --now copyparty
|
||||
#
|
||||
# every time you edit this file, you must "systemctl daemon-reload"
|
||||
# for the changes to take effect and then "systemctl restart copyparty"
|
||||
#
|
||||
# if it fails to start, first check this: systemctl status copyparty
|
||||
# then try starting it while viewing logs:
|
||||
# journalctl -fan 100
|
||||
# tail -Fn 100 /var/log/copyparty/$(date +%Y-%m%d.log)
|
||||
#
|
||||
# if you run into any issues, for example thumbnails not working,
|
||||
# try removing the "some quick hardening" section and then please
|
||||
# let me know if that actually helped so we can look into it
|
||||
#
|
||||
# you may want to:
|
||||
# - change "User=copyparty" and "/var/lib/copyparty/" to another user
|
||||
# - edit /etc/copyparty.conf to configure copyparty
|
||||
|
||||
116
contrib/themes/bsod.css
Normal file
116
contrib/themes/bsod.css
Normal file
@@ -0,0 +1,116 @@
|
||||
/* copy bsod.* into a folder named ".themes" in your webroot and then
|
||||
--themes=10 --theme=9 --css-browser=/.themes/bsod.css
|
||||
*/
|
||||
|
||||
html.ey {
|
||||
--w2: #3d7bbc;
|
||||
--w3: #5fcbec;
|
||||
|
||||
--fg: #fff;
|
||||
--fg-max: #fff;
|
||||
--fg-weak: var(--w3);
|
||||
|
||||
--bg: #2067b2;
|
||||
--bg-d3: var(--bg);
|
||||
--bg-d2: var(--w2);
|
||||
--bg-d1: var(--fg-weak);
|
||||
--bg-u2: var(--bg);
|
||||
--bg-u3: var(--bg);
|
||||
--bg-u5: var(--w2);
|
||||
|
||||
--tab-alt: var(--fg-weak);
|
||||
--row-alt: var(--w2);
|
||||
|
||||
--scroll: var(--w3);
|
||||
|
||||
--a: #fff;
|
||||
--a-b: #fff;
|
||||
--a-hil: #fff;
|
||||
--a-h-bg: var(--fg-weak);
|
||||
--a-dark: var(--a);
|
||||
--a-gray: var(--fg-weak);
|
||||
|
||||
--btn-fg: var(--a);
|
||||
--btn-bg: var(--w2);
|
||||
--btn-h-fg: var(--w2);
|
||||
--btn-1-fg: var(--bg);
|
||||
--btn-1-bg: var(--a);
|
||||
--txt-sh: a;
|
||||
--txt-bg: var(--w2);
|
||||
|
||||
--u2-b1-bg: var(--w2);
|
||||
--u2-b2-bg: var(--w2);
|
||||
--u2-txt-bg: var(--w2);
|
||||
--u2-tab-bg: a;
|
||||
--u2-tab-1-bg: var(--w2);
|
||||
|
||||
--sort-1: var(--a);
|
||||
--sort-1: var(--fg-weak);
|
||||
|
||||
--tree-bg: var(--bg);
|
||||
|
||||
--g-b1: a;
|
||||
--g-b2: a;
|
||||
--g-f-bg: var(--w2);
|
||||
|
||||
--f-sh1: 0.1;
|
||||
--f-sh2: 0.02;
|
||||
--f-sh3: 0.1;
|
||||
--f-h-b1: a;
|
||||
|
||||
--srv-1: var(--a);
|
||||
--srv-3: var(--a);
|
||||
|
||||
--mp-sh: a;
|
||||
}
|
||||
|
||||
html.ey {
|
||||
background: url('bsod.png') top 5em right 4.5em no-repeat fixed var(--bg);
|
||||
}
|
||||
html.ey body#b {
|
||||
background: var(--bg); /*sandbox*/
|
||||
}
|
||||
html.ey #ops {
|
||||
margin: 1.7em 1.5em 0 1.5em;
|
||||
border-radius: .3em;
|
||||
border-width: 1px 0;
|
||||
}
|
||||
html.ey #ops a {
|
||||
text-shadow: 1px 1px 0 rgba(0,0,0,0.5);
|
||||
}
|
||||
html.ey .opbox {
|
||||
margin: 1.5em 0 0 0;
|
||||
}
|
||||
html.ey #tree {
|
||||
box-shadow: none;
|
||||
}
|
||||
html.ey #tt {
|
||||
border-color: var(--w2);
|
||||
background: var(--w2);
|
||||
}
|
||||
html.ey .mdo a {
|
||||
background: none;
|
||||
text-decoration: underline;
|
||||
}
|
||||
html.ey .mdo pre,
|
||||
html.ey .mdo code {
|
||||
color: #fff;
|
||||
background: var(--w2);
|
||||
border: none;
|
||||
}
|
||||
html.ey .mdo h1,
|
||||
html.ey .mdo h2 {
|
||||
background: none;
|
||||
border-color: var(--w2);
|
||||
}
|
||||
html.ey .mdo ul ul,
|
||||
html.ey .mdo ul ol,
|
||||
html.ey .mdo ol ul,
|
||||
html.ey .mdo ol ol {
|
||||
border-color: var(--w2);
|
||||
}
|
||||
html.ey .mdo p>em,
|
||||
html.ey .mdo li>em,
|
||||
html.ey .mdo td>em {
|
||||
color: #fd0;
|
||||
}
|
||||
BIN
contrib/themes/bsod.png
Normal file
BIN
contrib/themes/bsod.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 1.2 KiB |
@@ -56,7 +56,6 @@ class EnvParams(object):
|
||||
self.t0 = time.time()
|
||||
self.mod = ""
|
||||
self.cfg = ""
|
||||
self.ox = getattr(sys, "oxidized", None)
|
||||
|
||||
|
||||
E = EnvParams()
|
||||
|
||||
508
copyparty/__main__.py
Executable file → Normal file
508
copyparty/__main__.py
Executable file → Normal file
@@ -13,32 +13,49 @@ import base64
|
||||
import locale
|
||||
import os
|
||||
import re
|
||||
import select
|
||||
import socket
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
import traceback
|
||||
import uuid
|
||||
from textwrap import dedent
|
||||
|
||||
from .__init__ import ANYWIN, CORES, EXE, PY2, VT100, WINDOWS, E, EnvParams, unicode
|
||||
from .__init__ import (
|
||||
ANYWIN,
|
||||
CORES,
|
||||
EXE,
|
||||
MACOS,
|
||||
PY2,
|
||||
VT100,
|
||||
WINDOWS,
|
||||
E,
|
||||
EnvParams,
|
||||
unicode,
|
||||
)
|
||||
from .__version__ import CODENAME, S_BUILD_DT, S_VERSION
|
||||
from .authsrv import expand_config_file, re_vol, split_cfg_ln, upgrade_cfg_fmt
|
||||
from .authsrv import expand_config_file, split_cfg_ln, upgrade_cfg_fmt
|
||||
from .cfg import flagcats, onedash
|
||||
from .svchub import SvcHub
|
||||
from .util import (
|
||||
APPLESAN_TXT,
|
||||
DEF_EXP,
|
||||
DEF_MTE,
|
||||
DEF_MTH,
|
||||
HAVE_IPV6,
|
||||
IMPLICATIONS,
|
||||
JINJA_VER,
|
||||
MIMES,
|
||||
PARTFTPY_VER,
|
||||
PY_DESC,
|
||||
PYFTPD_VER,
|
||||
SQLITE_VER,
|
||||
UNPLICATIONS,
|
||||
Daemon,
|
||||
align_tab,
|
||||
ansi_re,
|
||||
dedent,
|
||||
min_ex,
|
||||
py_desc,
|
||||
pybin,
|
||||
termsize,
|
||||
wrap,
|
||||
@@ -50,7 +67,13 @@ if True: # pylint: disable=using-constant-test
|
||||
|
||||
from typing import Any, Optional
|
||||
|
||||
if PY2:
|
||||
range = xrange # type: ignore
|
||||
|
||||
try:
|
||||
if os.environ.get("PRTY_NO_TLS"):
|
||||
raise Exception()
|
||||
|
||||
HAVE_SSL = True
|
||||
import ssl
|
||||
except:
|
||||
@@ -144,7 +167,8 @@ def warn(msg: str) -> None:
|
||||
|
||||
|
||||
def init_E(EE: EnvParams) -> None:
|
||||
# __init__ runs 18 times when oxidized; do expensive stuff here
|
||||
# some cpython alternatives (such as pyoxidizer) can
|
||||
# __init__ several times, so do expensive stuff here
|
||||
|
||||
E = EE # pylint: disable=redefined-outer-name
|
||||
|
||||
@@ -157,8 +181,10 @@ def init_E(EE: EnvParams) -> None:
|
||||
(os.environ.get, "TMP"),
|
||||
(unicode, "/tmp"),
|
||||
]
|
||||
errs = []
|
||||
for chk in [os.listdir, os.mkdir]:
|
||||
for pf, pa in paths:
|
||||
for npath, (pf, pa) in enumerate(paths):
|
||||
p = ""
|
||||
try:
|
||||
p = pf(pa)
|
||||
# print(chk.__name__, p, pa)
|
||||
@@ -171,40 +197,26 @@ def init_E(EE: EnvParams) -> None:
|
||||
if not os.path.isdir(p):
|
||||
os.mkdir(p)
|
||||
|
||||
if npath > 1:
|
||||
t = "Using [%s] for config; filekeys/dirkeys will change on every restart. Consider setting XDG_CONFIG_HOME or giving the unix-user a ~/.config/"
|
||||
errs.append(t % (p,))
|
||||
elif errs:
|
||||
errs.append("Using [%s] instead" % (p,))
|
||||
|
||||
if errs:
|
||||
print("WARNING: " + ". ".join(errs))
|
||||
|
||||
return p # type: ignore
|
||||
except:
|
||||
pass
|
||||
except Exception as ex:
|
||||
if p and npath < 2:
|
||||
t = "Unable to store config in [%s] due to %r"
|
||||
errs.append(t % (p, ex))
|
||||
|
||||
raise Exception("could not find a writable path for config")
|
||||
|
||||
def _unpack() -> str:
|
||||
import atexit
|
||||
import tarfile
|
||||
import tempfile
|
||||
from importlib.resources import open_binary
|
||||
|
||||
td = tempfile.TemporaryDirectory(prefix="")
|
||||
atexit.register(td.cleanup)
|
||||
tdn = td.name
|
||||
|
||||
with open_binary("copyparty", "z.tar") as tgz:
|
||||
with tarfile.open(fileobj=tgz) as tf:
|
||||
try:
|
||||
tf.extractall(tdn, filter="tar")
|
||||
except TypeError:
|
||||
tf.extractall(tdn) # nosec (archive is safe)
|
||||
|
||||
return tdn
|
||||
|
||||
try:
|
||||
E.mod = os.path.dirname(os.path.realpath(__file__))
|
||||
if E.mod.endswith("__init__"):
|
||||
E.mod = os.path.dirname(E.mod)
|
||||
except:
|
||||
if not E.ox:
|
||||
raise
|
||||
|
||||
E.mod = _unpack()
|
||||
E.mod = os.path.dirname(os.path.realpath(__file__))
|
||||
if E.mod.endswith("__init__"):
|
||||
E.mod = os.path.dirname(E.mod)
|
||||
|
||||
if sys.platform == "win32":
|
||||
bdir = os.environ.get("APPDATA") or os.environ.get("TEMP") or "."
|
||||
@@ -261,6 +273,19 @@ def get_fk_salt() -> str:
|
||||
return ret.decode("utf-8")
|
||||
|
||||
|
||||
def get_dk_salt() -> str:
|
||||
fp = os.path.join(E.cfg, "dk-salt.txt")
|
||||
try:
|
||||
with open(fp, "rb") as f:
|
||||
ret = f.read().strip()
|
||||
except:
|
||||
ret = base64.b64encode(os.urandom(30))
|
||||
with open(fp, "wb") as f:
|
||||
f.write(ret + b"\n")
|
||||
|
||||
return ret.decode("utf-8")
|
||||
|
||||
|
||||
def get_ah_salt() -> str:
|
||||
fp = os.path.join(E.cfg, "ah-salt.txt")
|
||||
try:
|
||||
@@ -275,6 +300,9 @@ def get_ah_salt() -> str:
|
||||
|
||||
|
||||
def ensure_locale() -> None:
|
||||
if ANYWIN and PY2:
|
||||
return # maybe XP, so busted 65001
|
||||
|
||||
safe = "en_US.UTF-8"
|
||||
for x in [
|
||||
safe,
|
||||
@@ -322,7 +350,7 @@ def configure_ssl_ver(al: argparse.Namespace) -> None:
|
||||
# oh man i love openssl
|
||||
# check this out
|
||||
# hold my beer
|
||||
assert ssl
|
||||
assert ssl # type: ignore
|
||||
ptn = re.compile(r"^OP_NO_(TLS|SSL)v")
|
||||
sslver = terse_sslver(al.ssl_ver).split(",")
|
||||
flags = [k for k in ssl.__dict__ if ptn.match(k)]
|
||||
@@ -356,7 +384,7 @@ def configure_ssl_ver(al: argparse.Namespace) -> None:
|
||||
|
||||
|
||||
def configure_ssl_ciphers(al: argparse.Namespace) -> None:
|
||||
assert ssl
|
||||
assert ssl # type: ignore
|
||||
ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
|
||||
if al.ssl_ver:
|
||||
ctx.options &= ~al.ssl_flags_en
|
||||
@@ -382,7 +410,7 @@ def configure_ssl_ciphers(al: argparse.Namespace) -> None:
|
||||
|
||||
def args_from_cfg(cfg_path: str) -> list[str]:
|
||||
lines: list[str] = []
|
||||
expand_config_file(lines, cfg_path, "")
|
||||
expand_config_file(None, lines, cfg_path, "")
|
||||
lines = upgrade_cfg_fmt(None, argparse.Namespace(vc=False), lines, "")
|
||||
|
||||
ret: list[str] = []
|
||||
@@ -436,7 +464,7 @@ def disable_quickedit() -> None:
|
||||
if PY2:
|
||||
wintypes.LPDWORD = ctypes.POINTER(wintypes.DWORD)
|
||||
|
||||
k32.GetStdHandle.errcheck = ecb # type: ignore
|
||||
k32.GetStdHandle.errcheck = ecb # type: ignore
|
||||
k32.GetConsoleMode.errcheck = ecb # type: ignore
|
||||
k32.SetConsoleMode.errcheck = ecb # type: ignore
|
||||
k32.GetConsoleMode.argtypes = (wintypes.HANDLE, wintypes.LPDWORD)
|
||||
@@ -468,6 +496,25 @@ def disable_quickedit() -> None:
|
||||
cmode(True, mode | 4)
|
||||
|
||||
|
||||
def sfx_tpoke(top: str):
|
||||
if os.environ.get("PRTY_NO_TPOKE"):
|
||||
return
|
||||
|
||||
files = [top] + [
|
||||
os.path.join(dp, p) for dp, dd, df in os.walk(top) for p in dd + df
|
||||
]
|
||||
while True:
|
||||
t = int(time.time())
|
||||
for f in list(files):
|
||||
try:
|
||||
os.utime(f, (t, t))
|
||||
except Exception as ex:
|
||||
lprint("<TPOKE> [%s] %r" % (f, ex))
|
||||
files.remove(f)
|
||||
|
||||
time.sleep(78123)
|
||||
|
||||
|
||||
def showlic() -> None:
|
||||
p = os.path.join(E.mod, "res", "COPYING.txt")
|
||||
if not os.path.exists(p):
|
||||
@@ -480,6 +527,41 @@ def showlic() -> None:
|
||||
|
||||
def get_sects():
|
||||
return [
|
||||
[
|
||||
"bind",
|
||||
"configure listening",
|
||||
dedent(
|
||||
"""
|
||||
\033[33m-i\033[0m takes a comma-separated list of interfaces to listen on;
|
||||
IP-addresses and/or unix-sockets (Unix Domain Sockets)
|
||||
|
||||
the default (\033[32m-i ::\033[0m) means all IPv4 and IPv6 addresses
|
||||
|
||||
\033[32m-i 0.0.0.0\033[0m listens on all IPv4 NICs/subnets
|
||||
\033[32m-i 127.0.0.1\033[0m listens on IPv4 localhost only
|
||||
\033[32m-i 127.1\033[0m listens on IPv4 localhost only
|
||||
\033[32m-i 127.1,192.168.123.1\033[0m = IPv4 localhost and 192.168.123.1
|
||||
|
||||
\033[33m-p\033[0m takes a comma-separated list of tcp ports to listen on;
|
||||
the default is \033[32m-p 3923\033[0m but as root you can \033[32m-p 80,443,3923\033[0m
|
||||
|
||||
when running behind a reverse-proxy, it's recommended to
|
||||
use unix-sockets for improved performance and security;
|
||||
|
||||
\033[32m-i unix:770:www:\033[33m/tmp/a.sock\033[0m listens on \033[33m/tmp/a.sock\033[0m with
|
||||
permissions \033[33m0770\033[0m; only accessible to members of the \033[33mwww\033[0m
|
||||
group. This is the best approach. Alternatively,
|
||||
|
||||
\033[32m-i unix:777:\033[33m/tmp/a.sock\033[0m sets perms \033[33m0777\033[0m so anyone can
|
||||
access it; bad unless it's inside a restricted folder
|
||||
|
||||
\033[32m-i unix:\033[33m/tmp/a.sock\033[0m keeps umask-defined permissions
|
||||
(usually \033[33m0600\033[0m) and the same user/group as copyparty
|
||||
|
||||
\033[33m-p\033[0m (tcp ports) is ignored for unix sockets
|
||||
"""
|
||||
),
|
||||
],
|
||||
[
|
||||
"accounts",
|
||||
"accounts and volumes",
|
||||
@@ -490,6 +572,10 @@ def get_sects():
|
||||
* "\033[33mperm\033[0m" is "permissions,username1,username2,..."
|
||||
* "\033[32mvolflag\033[0m" is config flags to set on this volume
|
||||
|
||||
--grp takes groupname:username1,username2,...
|
||||
and groupnames can be used instead of usernames in -v
|
||||
by prefixing the groupname with %
|
||||
|
||||
list of permissions:
|
||||
"r" (read): list folder contents, download files
|
||||
"w" (write): upload files; need "r" to see the uploads
|
||||
@@ -498,7 +584,9 @@ def get_sects():
|
||||
"g" (get): download files, but cannot see folder contents
|
||||
"G" (upget): "get", but can see filekeys of their own uploads
|
||||
"h" (html): "get", but folders return their index.html
|
||||
"." (dots): user can ask to show dotfiles in listings
|
||||
"a" (admin): can see uploader IPs, config-reload
|
||||
"A" ("all"): same as "rwmda." (read/write/move/delete/admin/dotfiles)
|
||||
|
||||
too many volflags to list here, see --help-flags
|
||||
|
||||
@@ -596,12 +684,12 @@ def get_sects():
|
||||
\033[36mxban\033[35m executes CMD if someone gets banned
|
||||
\033[0m
|
||||
can be defined as --args or volflags; for example \033[36m
|
||||
--xau notify-send
|
||||
-v .::r:c,xau=notify-send
|
||||
--xau foo.py
|
||||
-v .::r:c,xau=bar.py
|
||||
\033[0m
|
||||
commands specified as --args are appended to volflags;
|
||||
each --arg and volflag can be specified multiple times,
|
||||
each command will execute in order unless one returns non-zero
|
||||
hooks specified as commandline --args are appended to volflags;
|
||||
each commandline --arg and volflag can be specified multiple times,
|
||||
each hook will execute in order unless one returns non-zero
|
||||
|
||||
optionally prefix the command with comma-sep. flags similar to -mtp:
|
||||
|
||||
@@ -612,6 +700,10 @@ def get_sects():
|
||||
\033[36mtN\033[35m sets an N sec timeout before the command is abandoned
|
||||
\033[36miN\033[35m xiu only: volume must be idle for N sec (default = 5)
|
||||
|
||||
\033[36mar\033[35m only run hook if user has read-access
|
||||
\033[36marw\033[35m only run hook if user has read-write-access
|
||||
\033[36marwmd\033[35m ...and so on... (doesn't work for xiu or xban)
|
||||
|
||||
\033[36mkt\033[35m kills the entire process tree on timeout (default),
|
||||
\033[36mkm\033[35m kills just the main process
|
||||
\033[36mkn\033[35m lets it continue running until copyparty is terminated
|
||||
@@ -621,6 +713,21 @@ def get_sects():
|
||||
\033[36mc2\033[35m show only stdout
|
||||
\033[36mc3\033[35m mute all process otput
|
||||
\033[0m
|
||||
examples:
|
||||
|
||||
\033[36m--xm some.py\033[35m runs \033[33msome.py msgtxt\033[35m on each 📟 message;
|
||||
\033[33mmsgtxt\033[35m is the message that was written into the web-ui
|
||||
|
||||
\033[36m--xm j,some.py\033[35m runs \033[33msome.py jsontext\033[35m on each 📟 message;
|
||||
\033[33mjsontext\033[35m is the message info (ip, user, ..., msg-text)
|
||||
|
||||
\033[36m--xm aw,j,some.py\033[35m requires user to have write-access
|
||||
|
||||
\033[36m--xm aw,,notify-send,hey,--\033[35m shows an OS alert on linux;
|
||||
the \033[33m,,\033[35m stops copyparty from reading the rest as flags and
|
||||
the \033[33m--\033[35m stops notify-send from reading the message as args
|
||||
and the alert will be "hey" followed by the messagetext
|
||||
\033[0m
|
||||
each hook is executed once for each event, except for \033[36mxiu\033[0m
|
||||
which builds up a backlog of uploads, running the hook just once
|
||||
as soon as the volume has been idle for iN seconds (5 by default)
|
||||
@@ -632,6 +739,11 @@ def get_sects():
|
||||
\033[36mxban\033[0m can be used to overrule / cancel a user ban event;
|
||||
if the program returns 0 (true/OK) then the ban will NOT happen
|
||||
|
||||
effects can be used to redirect uploads into other
|
||||
locations, and to delete or index other files based
|
||||
on new uploads, but with certain limitations. See
|
||||
bin/hooks/reloc* and docs/devnotes.md#hook-effects
|
||||
|
||||
except for \033[36mxm\033[0m, only one hook / one action can run at a time,
|
||||
so it's recommended to use the \033[36mf\033[0m flag unless you really need
|
||||
to wait for the hook to finish before continuing (without \033[36mf\033[0m
|
||||
@@ -647,7 +759,10 @@ def get_sects():
|
||||
\033[36mstash\033[35m dumps the data to file and returns length + checksum
|
||||
\033[36msave,get\033[35m dumps to file and returns the page like a GET
|
||||
\033[36mprint,get\033[35m prints the data in the log and returns GET
|
||||
(leave out the ",get" to return an error instead)
|
||||
(leave out the ",get" to return an error instead)\033[0m
|
||||
|
||||
note that the \033[35m--xm\033[0m hook will only run if \033[35m--urlform\033[0m
|
||||
is either \033[36mprint\033[0m or the default \033[36mprint,get\033[0m
|
||||
"""
|
||||
),
|
||||
],
|
||||
@@ -705,6 +820,7 @@ def get_sects():
|
||||
\033[36mln\033[0m only prints symlinks leaving the volume mountpoint
|
||||
\033[36mp\033[0m exits 1 if any such symlinks are found
|
||||
\033[36mr\033[0m resumes startup after the listing
|
||||
|
||||
examples:
|
||||
--ls '**' # list all files which are possible to read
|
||||
--ls '**,*,ln' # check for dangerous symlinks
|
||||
@@ -738,9 +854,12 @@ def get_sects():
|
||||
"""
|
||||
when \033[36m--ah-alg\033[0m is not the default [\033[32mnone\033[0m], all account passwords must be hashed
|
||||
|
||||
passwords can be hashed on the commandline with \033[36m--ah-gen\033[0m, but copyparty will also hash and print any passwords that are non-hashed (password which do not start with '+') and then terminate afterwards
|
||||
passwords can be hashed on the commandline with \033[36m--ah-gen\033[0m, but
|
||||
copyparty will also hash and print any passwords that are non-hashed
|
||||
(password which do not start with '+') and then terminate afterwards
|
||||
|
||||
\033[36m--ah-alg\033[0m specifies the hashing algorithm and a list of optional comma-separated arguments:
|
||||
\033[36m--ah-alg\033[0m specifies the hashing algorithm and a
|
||||
list of optional comma-separated arguments:
|
||||
|
||||
\033[36m--ah-alg argon2\033[0m # which is the same as:
|
||||
\033[36m--ah-alg argon2,3,256,4,19\033[0m
|
||||
@@ -808,7 +927,7 @@ def build_flags_desc():
|
||||
v = v.replace("\n", "\n ")
|
||||
ret += "\n \033[36m{}\033[35m {}".format(k, v)
|
||||
|
||||
return ret + "\033[0m"
|
||||
return ret
|
||||
|
||||
|
||||
# fmt: off
|
||||
@@ -820,11 +939,14 @@ def add_general(ap, nc, srvname):
|
||||
ap2.add_argument("-nc", metavar="NUM", type=int, default=nc, help="max num clients")
|
||||
ap2.add_argument("-j", metavar="CORES", type=int, default=1, help="max num cpu cores, 0=all")
|
||||
ap2.add_argument("-a", metavar="ACCT", type=u, action="append", help="add account, \033[33mUSER\033[0m:\033[33mPASS\033[0m; example [\033[32med:wark\033[0m]")
|
||||
ap2.add_argument("-v", metavar="VOL", type=u, action="append", help="add volume, \033[33mSRC\033[0m:\033[33mDST\033[0m:\033[33mFLAG\033[0m; examples [\033[32m.::r\033[0m], [\033[32m/mnt/nas/music:/music:r:aed\033[0m]")
|
||||
ap2.add_argument("-ed", action="store_true", help="enable the ?dots url parameter / client option which allows clients to see dotfiles / hidden files")
|
||||
ap2.add_argument("-v", metavar="VOL", type=u, action="append", help="add volume, \033[33mSRC\033[0m:\033[33mDST\033[0m:\033[33mFLAG\033[0m; examples [\033[32m.::r\033[0m], [\033[32m/mnt/nas/music:/music:r:aed\033[0m], see --help-accounts")
|
||||
ap2.add_argument("--grp", metavar="G:N,N", type=u, action="append", help="add group, \033[33mNAME\033[0m:\033[33mUSER1\033[0m,\033[33mUSER2\033[0m,\033[33m...\033[0m; example [\033[32madmins:ed,foo,bar\033[0m]")
|
||||
ap2.add_argument("-ed", action="store_true", help="enable the ?dots url parameter / client option which allows clients to see dotfiles / hidden files (volflag=dots)")
|
||||
ap2.add_argument("--urlform", metavar="MODE", type=u, default="print,get", help="how to handle url-form POSTs; see \033[33m--help-urlform\033[0m")
|
||||
ap2.add_argument("--wintitle", metavar="TXT", type=u, default="cpp @ $pub", help="server terminal title, for example [\033[32m$ip-10.1.2.\033[0m] or [\033[32m$ip-]")
|
||||
ap2.add_argument("--name", metavar="TXT", type=u, default=srvname, help="server name (displayed topleft in browser and in mDNS)")
|
||||
ap2.add_argument("--mime", metavar="EXT=MIME", type=u, action="append", help="map file \033[33mEXT\033[0mension to \033[33mMIME\033[0mtype, for example [\033[32mjpg=image/jpeg\033[0m]")
|
||||
ap2.add_argument("--mimes", action="store_true", help="list default mimetype mapping and exit")
|
||||
ap2.add_argument("--license", action="store_true", help="show licenses and exit")
|
||||
ap2.add_argument("--version", action="store_true", help="show versions and exit")
|
||||
|
||||
@@ -841,53 +963,77 @@ def add_qr(ap, tty):
|
||||
ap2.add_argument("--qrz", metavar="N", type=int, default=0, help="[\033[32m1\033[0m]=1x, [\033[32m2\033[0m]=2x, [\033[32m0\033[0m]=auto (try [\033[32m2\033[0m] on broken fonts)")
|
||||
|
||||
|
||||
def add_fs(ap):
|
||||
ap2 = ap.add_argument_group("filesystem options")
|
||||
rm_re_def = "15/0.1" if ANYWIN else "0/0"
|
||||
ap2.add_argument("--rm-retry", metavar="T/R", type=u, default=rm_re_def, help="if a file cannot be deleted because it is busy, continue trying for \033[33mT\033[0m seconds, retry every \033[33mR\033[0m seconds; disable with 0/0 (volflag=rm_retry)")
|
||||
ap2.add_argument("--mv-retry", metavar="T/R", type=u, default=rm_re_def, help="if a file cannot be renamed because it is busy, continue trying for \033[33mT\033[0m seconds, retry every \033[33mR\033[0m seconds; disable with 0/0 (volflag=mv_retry)")
|
||||
ap2.add_argument("--iobuf", metavar="BYTES", type=int, default=256*1024, help="file I/O buffer-size; if your volumes are on a network drive, try increasing to \033[32m524288\033[0m or even \033[32m4194304\033[0m (and let me know if that improves your performance)")
|
||||
ap2.add_argument("--mtab-age", metavar="SEC", type=int, default=60, help="rebuild mountpoint cache every \033[33mSEC\033[0m to keep track of sparse-files support; keep low on servers with removable media")
|
||||
|
||||
|
||||
def add_share(ap):
|
||||
db_path = os.path.join(E.cfg, "shares.db")
|
||||
ap2 = ap.add_argument_group('share-url options')
|
||||
ap2.add_argument("--shr", metavar="DIR", type=u, default="", help="toplevel virtual folder for shared files/folders, for example [\033[32m/share\033[0m]")
|
||||
ap2.add_argument("--shr-db", metavar="FILE", type=u, default=db_path, help="database to store shares in")
|
||||
ap2.add_argument("--shr-adm", metavar="U,U", type=u, default="", help="comma-separated list of users allowed to view/delete any share")
|
||||
ap2.add_argument("--shr-rt", metavar="MIN", type=int, default=1440, help="shares can be revived by their owner if they expired less than MIN minutes ago; [\033[32m60\033[0m]=hour, [\033[32m1440\033[0m]=day, [\033[32m10080\033[0m]=week")
|
||||
ap2.add_argument("--shr-v", action="store_true", help="debug")
|
||||
|
||||
|
||||
def add_upload(ap):
|
||||
ap2 = ap.add_argument_group('upload options')
|
||||
ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads, hiding them from clients unless \033[33m-ed\033[0m")
|
||||
ap2.add_argument("--plain-ip", action="store_true", help="when avoiding filename collisions by appending the uploader's ip to the filename: append the plaintext ip instead of salting and hashing the ip")
|
||||
ap2.add_argument("--unpost", metavar="SEC", type=int, default=3600*12, help="grace period where uploads can be deleted by the uploader, even without delete permissions; 0=disabled, default=12h")
|
||||
ap2.add_argument("--u2abort", metavar="NUM", type=int, default=1, help="clients can abort incomplete uploads by using the unpost tab (requires \033[33m-e2d\033[0m). [\033[32m0\033[0m] = never allowed (disable feature), [\033[32m1\033[0m] = allow if client has the same IP as the upload AND is using the same account, [\033[32m2\033[0m] = just check the IP, [\033[32m3\033[0m] = just check account-name (volflag=u2abort)")
|
||||
ap2.add_argument("--blank-wt", metavar="SEC", type=int, default=300, help="file write grace period (any client can write to a blank file last-modified more recently than \033[33mSEC\033[0m seconds ago)")
|
||||
ap2.add_argument("--reg-cap", metavar="N", type=int, default=38400, help="max number of uploads to keep in memory when running without \033[33m-e2d\033[0m; roughly 1 MiB RAM per 600")
|
||||
ap2.add_argument("--no-fpool", action="store_true", help="disable file-handle pooling -- instead, repeatedly close and reopen files during upload (bad idea to enable this on windows and/or cow filesystems)")
|
||||
ap2.add_argument("--use-fpool", action="store_true", help="force file-handle pooling, even when it might be dangerous (multiprocessing, filesystems lacking sparse-files support, ...)")
|
||||
ap2.add_argument("--hardlink", action="store_true", help="prefer hardlinks instead of symlinks when possible (within same filesystem) (volflag=hardlink)")
|
||||
ap2.add_argument("--never-symlink", action="store_true", help="do not fallback to symlinks when a hardlink cannot be made (volflag=neversymlink)")
|
||||
ap2.add_argument("--no-dedup", action="store_true", help="disable symlink/hardlink creation; copy file contents instead (volflag=copydupes)")
|
||||
ap2.add_argument("--dedup", action="store_true", help="enable symlink-based upload deduplication (volflag=dedup)")
|
||||
ap2.add_argument("--safe-dedup", metavar="N", type=int, default=50, help="how careful to be when deduplicating files; [\033[32m1\033[0m] = just verify the filesize, [\033[32m50\033[0m] = verify file contents have not been altered (volflag=safededup)")
|
||||
ap2.add_argument("--hardlink", action="store_true", help="enable hardlink-based dedup; will fallback on symlinks when that is impossible (across filesystems) (volflag=hardlink)")
|
||||
ap2.add_argument("--hardlink-only", action="store_true", help="do not fallback to symlinks when a hardlink cannot be made (volflag=hardlinkonly)")
|
||||
ap2.add_argument("--no-dupe", action="store_true", help="reject duplicate files during upload; only matches within the same volume (volflag=nodupe)")
|
||||
ap2.add_argument("--no-snap", action="store_true", help="disable snapshots -- forget unfinished uploads on shutdown; don't create .hist/up2k.snap files -- abandoned/interrupted uploads must be cleaned up manually")
|
||||
ap2.add_argument("--snap-wri", metavar="SEC", type=int, default=300, help="write upload state to ./hist/up2k.snap every \033[33mSEC\033[0m seconds; allows resuming incomplete uploads after a server crash")
|
||||
ap2.add_argument("--snap-drop", metavar="MIN", type=float, default=1440, help="forget unfinished uploads after \033[33mMIN\033[0m minutes; impossible to resume them after that (360=6h, 1440=24h)")
|
||||
ap2.add_argument("--snap-drop", metavar="MIN", type=float, default=1440.0, help="forget unfinished uploads after \033[33mMIN\033[0m minutes; impossible to resume them after that (360=6h, 1440=24h)")
|
||||
ap2.add_argument("--u2ts", metavar="TXT", type=u, default="c", help="how to timestamp uploaded files; [\033[32mc\033[0m]=client-last-modified, [\033[32mu\033[0m]=upload-time, [\033[32mfc\033[0m]=force-c, [\033[32mfu\033[0m]=force-u (volflag=u2ts)")
|
||||
ap2.add_argument("--rand", action="store_true", help="force randomized filenames, \033[33m--nrand\033[0m chars long (volflag=rand)")
|
||||
ap2.add_argument("--nrand", metavar="NUM", type=int, default=9, help="randomized filenames length (volflag=nrand)")
|
||||
ap2.add_argument("--magic", action="store_true", help="enable filetype detection on nameless uploads (volflag=magic)")
|
||||
ap2.add_argument("--df", metavar="GiB", type=float, default=0, help="ensure \033[33mGiB\033[0m free disk space by rejecting upload requests")
|
||||
ap2.add_argument("--df", metavar="GiB", type=u, default="0", help="ensure \033[33mGiB\033[0m free disk space by rejecting upload requests; assumes gigabytes unless a unit suffix is given: [\033[32m256m\033[0m], [\033[32m4\033[0m], [\033[32m2T\033[0m] (volflag=df)")
|
||||
ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="windows-only: minimum size of incoming uploads through up2k before they are made into sparse files")
|
||||
ap2.add_argument("--turbo", metavar="LVL", type=int, default=0, help="configure turbo-mode in up2k client; [\033[32m-1\033[0m] = forbidden/always-off, [\033[32m0\033[0m] = default-off and warn if enabled, [\033[32m1\033[0m] = default-off, [\033[32m2\033[0m] = on, [\033[32m3\033[0m] = on and disable datecheck")
|
||||
ap2.add_argument("--u2j", metavar="JOBS", type=int, default=2, help="web-client: number of file chunks to upload in parallel; 1 or 2 is good for low-latency (same-country) connections, 4-8 for android clients, 16 for cross-atlantic (max=64)")
|
||||
ap2.add_argument("--u2sz", metavar="N,N,N", type=u, default="1,64,96", help="web-client: default upload chunksize (MiB); sets \033[33mmin,default,max\033[0m in the settings gui. Each HTTP POST will aim for this size. Cloudflare max is 96. Big values are good for cross-atlantic but may increase HDD fragmentation on some FS. Disable this optimization with [\033[32m1,1,1\033[0m]")
|
||||
ap2.add_argument("--u2sort", metavar="TXT", type=u, default="s", help="upload order; [\033[32ms\033[0m]=smallest-first, [\033[32mn\033[0m]=alphabetical, [\033[32mfs\033[0m]=force-s, [\033[32mfn\033[0m]=force-n -- alphabetical is a bit slower on fiber/LAN but makes it easier to eyeball if everything went fine")
|
||||
ap2.add_argument("--write-uplog", action="store_true", help="write POST reports to textfiles in working-directory")
|
||||
|
||||
|
||||
def add_network(ap):
|
||||
ap2 = ap.add_argument_group('network options')
|
||||
ap2.add_argument("-i", metavar="IP", type=u, default="::", help="ip to bind (comma-sep.), default: all IPv4 and IPv6")
|
||||
ap2.add_argument("-p", metavar="PORT", type=u, default="3923", help="ports to bind (comma/range)")
|
||||
ap2.add_argument("-i", metavar="IP", type=u, default="::", help="IPs and/or unix-sockets to listen on (see \033[33m--help-bind\033[0m). Default: all IPv4 and IPv6")
|
||||
ap2.add_argument("-p", metavar="PORT", type=u, default="3923", help="ports to listen on (comma/range); ignored for unix-sockets")
|
||||
ap2.add_argument("--ll", action="store_true", help="include link-local IPv4/IPv6 in mDNS replies, even if the NIC has routable IPs (breaks some mDNS clients)")
|
||||
ap2.add_argument("--rproxy", metavar="DEPTH", type=int, default=1, help="which ip to associate clients with; [\033[32m0\033[0m]=tcp, [\033[32m1\033[0m]=origin (first x-fwd, unsafe), [\033[32m2\033[0m]=outermost-proxy, [\033[32m3\033[0m]=second-proxy, [\033[32m-1\033[0m]=closest-proxy")
|
||||
ap2.add_argument("--xff-hdr", metavar="NAME", type=u, default="x-forwarded-for", help="if reverse-proxied, which http header to read the client's real ip from (argument must be lowercase, but not the actual header)")
|
||||
ap2.add_argument("--xff-src", metavar="IP", type=u, default="127., ::1", help="comma-separated list of trusted reverse-proxy IPs; only accept the real-ip header (\033[33m--xff-hdr\033[0m) if the incoming connection is from an IP starting with either of these. Can be disabled with [\033[32many\033[0m] if you are behind cloudflare (or similar) and are using \033[32m--xff-hdr=cf-connecting-ip\033[0m (or similar)")
|
||||
ap2.add_argument("--ipa", metavar="PREFIX", type=u, default="", help="only accept connections from IP-addresses starting with \033[33mPREFIX\033[0m; example: [\033[32m127., 10.89., 192.168.\033[0m]")
|
||||
ap2.add_argument("--xff-hdr", metavar="NAME", type=u, default="x-forwarded-for", help="if reverse-proxied, which http header to read the client's real ip from")
|
||||
ap2.add_argument("--xff-src", metavar="CIDR", type=u, default="127.0.0.0/8, ::1/128", help="comma-separated list of trusted reverse-proxy CIDRs; only accept the real-ip header (\033[33m--xff-hdr\033[0m) and IdP headers if the incoming connection is from an IP within either of these subnets. Specify [\033[32mlan\033[0m] to allow all LAN / private / non-internet IPs. Can be disabled with [\033[32many\033[0m] if you are behind cloudflare (or similar) and are using \033[32m--xff-hdr=cf-connecting-ip\033[0m (or similar)")
|
||||
ap2.add_argument("--ipa", metavar="CIDR", type=u, default="", help="only accept connections from IP-addresses inside \033[33mCIDR\033[0m; examples: [\033[32mlan\033[0m] or [\033[32m10.89.0.0/16, 192.168.33.0/24\033[0m]")
|
||||
ap2.add_argument("--rp-loc", metavar="PATH", type=u, default="", help="if reverse-proxying on a location instead of a dedicated domain/subdomain, provide the base location here; example: [\033[32m/foo/bar\033[0m]")
|
||||
if ANYWIN:
|
||||
ap2.add_argument("--reuseaddr", action="store_true", help="set reuseaddr on listening sockets on windows; allows rapid restart of copyparty at the expense of being able to accidentally start multiple instances")
|
||||
else:
|
||||
ap2.add_argument("--freebind", action="store_true", help="allow listening on IPs which do not yet exist, for example if the network interfaces haven't finished going up. Only makes sense for IPs other than '0.0.0.0', '127.0.0.1', '::', and '::1'. May require running as root (unless net.ipv6.ip_nonlocal_bind)")
|
||||
ap2.add_argument("--s-thead", metavar="SEC", type=int, default=120, help="socket timeout (read request header)")
|
||||
ap2.add_argument("--s-tbody", metavar="SEC", type=float, default=186, help="socket timeout (read/write request/response bodies). Use 60 on fast servers (default is extremely safe). Disable with 0 if reverse-proxied for a 2%% speed boost")
|
||||
ap2.add_argument("--s-tbody", metavar="SEC", type=float, default=186.0, help="socket timeout (read/write request/response bodies). Use 60 on fast servers (default is extremely safe). Disable with 0 if reverse-proxied for a 2%% speed boost")
|
||||
ap2.add_argument("--s-rd-sz", metavar="B", type=int, default=256*1024, help="socket read size in bytes (indirectly affects filesystem writes; recommendation: keep equal-to or lower-than \033[33m--iobuf\033[0m)")
|
||||
ap2.add_argument("--s-wr-sz", metavar="B", type=int, default=256*1024, help="socket write size in bytes")
|
||||
ap2.add_argument("--s-wr-slp", metavar="SEC", type=float, default=0, help="debug: socket write delay in seconds")
|
||||
ap2.add_argument("--rsp-slp", metavar="SEC", type=float, default=0, help="debug: response delay in seconds")
|
||||
ap2.add_argument("--rsp-jtr", metavar="SEC", type=float, default=0, help="debug: response delay, random duration 0..\033[33mSEC\033[0m")
|
||||
ap2.add_argument("--s-wr-slp", metavar="SEC", type=float, default=0.0, help="debug: socket write delay in seconds")
|
||||
ap2.add_argument("--rsp-slp", metavar="SEC", type=float, default=0.0, help="debug: response delay in seconds")
|
||||
ap2.add_argument("--rsp-jtr", metavar="SEC", type=float, default=0.0, help="debug: response delay, random duration 0..\033[33mSEC\033[0m")
|
||||
|
||||
|
||||
def add_tls(ap, cert_path):
|
||||
@@ -895,10 +1041,10 @@ def add_tls(ap, cert_path):
|
||||
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls -- force plaintext")
|
||||
ap2.add_argument("--https-only", action="store_true", help="disable plaintext -- force tls")
|
||||
ap2.add_argument("--cert", metavar="PATH", type=u, default=cert_path, help="path to TLS certificate")
|
||||
ap2.add_argument("--ssl-ver", metavar="LIST", type=u, help="set allowed ssl/tls versions; [\033[32mhelp\033[0m] shows available versions; default is what your python version considers safe")
|
||||
ap2.add_argument("--ciphers", metavar="LIST", type=u, help="set allowed ssl/tls ciphers; [\033[32mhelp\033[0m] shows available ciphers")
|
||||
ap2.add_argument("--ssl-ver", metavar="LIST", type=u, default="", help="set allowed ssl/tls versions; [\033[32mhelp\033[0m] shows available versions; default is what your python version considers safe")
|
||||
ap2.add_argument("--ciphers", metavar="LIST", type=u, default="", help="set allowed ssl/tls ciphers; [\033[32mhelp\033[0m] shows available ciphers")
|
||||
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
|
||||
ap2.add_argument("--ssl-log", metavar="PATH", type=u, help="log master secrets for later decryption in wireshark")
|
||||
ap2.add_argument("--ssl-log", metavar="PATH", type=u, default="", help="log master secrets for later decryption in wireshark")
|
||||
|
||||
|
||||
def add_cert(ap, cert_path):
|
||||
@@ -911,18 +1057,37 @@ def add_cert(ap, cert_path):
|
||||
ap2.add_argument("--crt-nolo", action="store_true", help="do not add 127.0.0.1 / localhost into cert")
|
||||
ap2.add_argument("--crt-nohn", action="store_true", help="do not add mDNS names / hostname into cert")
|
||||
ap2.add_argument("--crt-dir", metavar="PATH", default=cert_dir, help="where to save the CA cert")
|
||||
ap2.add_argument("--crt-cdays", metavar="D", type=float, default=3650, help="ca-certificate expiration time in days")
|
||||
ap2.add_argument("--crt-sdays", metavar="D", type=float, default=365, help="server-cert expiration time in days")
|
||||
ap2.add_argument("--crt-cdays", metavar="D", type=float, default=3650.0, help="ca-certificate expiration time in days")
|
||||
ap2.add_argument("--crt-sdays", metavar="D", type=float, default=365.0, help="server-cert expiration time in days")
|
||||
ap2.add_argument("--crt-cn", metavar="TXT", type=u, default="partyco", help="CA/server-cert common-name")
|
||||
ap2.add_argument("--crt-cnc", metavar="TXT", type=u, default="--crt-cn", help="override CA name")
|
||||
ap2.add_argument("--crt-cns", metavar="TXT", type=u, default="--crt-cn cpp", help="override server-cert name")
|
||||
ap2.add_argument("--crt-back", metavar="HRS", type=float, default=72, help="backdate in hours")
|
||||
ap2.add_argument("--crt-back", metavar="HRS", type=float, default=72.0, help="backdate in hours")
|
||||
ap2.add_argument("--crt-alg", metavar="S-N", type=u, default="ecdsa-256", help="algorithm and keysize; one of these: \033[32mecdsa-256 rsa-4096 rsa-2048\033[0m")
|
||||
|
||||
|
||||
def add_auth(ap):
|
||||
ap2 = ap.add_argument_group('user authentication options')
|
||||
ap2.add_argument("--hdr-au-usr", metavar="HN", type=u, default="", help="bypass the copyparty authentication checks and assume the request-header \033[33mHN\033[0m contains the username of the requesting user (for use with authentik/oauth/...)\n\033[1;31mWARNING:\033[0m if you enable this, make sure clients are unable to specify this header themselves; must be washed away and replaced by a reverse-proxy. Also, the argument must be lowercase, but not the actual header")
|
||||
ses_db = os.path.join(E.cfg, "sessions.db")
|
||||
ap2 = ap.add_argument_group('IdP / identity provider / user authentication options')
|
||||
ap2.add_argument("--idp-h-usr", metavar="HN", type=u, default="", help="bypass the copyparty authentication checks and assume the request-header \033[33mHN\033[0m contains the username of the requesting user (for use with authentik/oauth/...)\n\033[1;31mWARNING:\033[0m if you enable this, make sure clients are unable to specify this header themselves; must be washed away and replaced by a reverse-proxy")
|
||||
ap2.add_argument("--idp-h-grp", metavar="HN", type=u, default="", help="assume the request-header \033[33mHN\033[0m contains the groupname of the requesting user; can be referenced in config files for group-based access control")
|
||||
ap2.add_argument("--idp-h-key", metavar="HN", type=u, default="", help="optional but recommended safeguard; your reverse-proxy will insert a secret header named \033[33mHN\033[0m into all requests, and the other IdP headers will be ignored if this header is not present")
|
||||
ap2.add_argument("--idp-gsep", metavar="RE", type=u, default="|:;+,", help="if there are multiple groups in \033[33m--idp-h-grp\033[0m, they are separated by one of the characters in \033[33mRE\033[0m")
|
||||
ap2.add_argument("--no-bauth", action="store_true", help="disable basic-authentication support; do not accept passwords from the 'Authenticate' header at all. NOTE: This breaks support for the android app")
|
||||
ap2.add_argument("--bauth-last", action="store_true", help="keeps basic-authentication enabled, but only as a last-resort; if a cookie is also provided then the cookie wins")
|
||||
ap2.add_argument("--ses-db", metavar="PATH", type=u, default=ses_db, help="where to store the sessions database (if you run multiple copyparty instances, make sure they use different DBs)")
|
||||
ap2.add_argument("--ses-len", metavar="CHARS", type=int, default=20, help="session key length; default is 120 bits ((20//4)*4*6)")
|
||||
ap2.add_argument("--no-ses", action="store_true", help="disable sessions; use plaintext passwords in cookies")
|
||||
|
||||
|
||||
def add_chpw(ap):
|
||||
db_path = os.path.join(E.cfg, "chpw.json")
|
||||
ap2 = ap.add_argument_group('user-changeable passwords options')
|
||||
ap2.add_argument("--chpw", action="store_true", help="allow users to change their own passwords")
|
||||
ap2.add_argument("--chpw-no", metavar="U,U,U", type=u, action="append", help="do not allow password-changes for this comma-separated list of usernames")
|
||||
ap2.add_argument("--chpw-db", metavar="PATH", type=u, default=db_path, help="where to store the passwords database (if you run multiple copyparty instances, make sure they use different DBs)")
|
||||
ap2.add_argument("--chpw-len", metavar="N", type=int, default=8, help="minimum password length")
|
||||
ap2.add_argument("--chpw-v", metavar="LVL", type=int, default=2, help="verbosity of summary on config load [\033[32m0\033[0m] = nothing at all, [\033[32m1\033[0m] = number of users, [\033[32m2\033[0m] = list users with default-pw, [\033[32m3\033[0m] = list all users")
|
||||
|
||||
|
||||
def add_zeroconf(ap):
|
||||
@@ -952,7 +1117,7 @@ def add_zc_mdns(ap):
|
||||
ap2.add_argument("--zm-mnic", action="store_true", help="merge NICs which share subnets; assume that same subnet means same network")
|
||||
ap2.add_argument("--zm-msub", action="store_true", help="merge subnets on each NIC -- always enabled for ipv6 -- reduces network load, but gnome-gvfs clients may stop working, and clients cannot be in subnets that the server is not")
|
||||
ap2.add_argument("--zm-noneg", action="store_true", help="disable NSEC replies -- try this if some clients don't see copyparty")
|
||||
ap2.add_argument("--zm-spam", metavar="SEC", type=float, default=0, help="send unsolicited announce every \033[33mSEC\033[0m; useful if clients have IPs in a subnet which doesn't overlap with the server, or to avoid some firewall issues")
|
||||
ap2.add_argument("--zm-spam", metavar="SEC", type=float, default=0.0, help="send unsolicited announce every \033[33mSEC\033[0m; useful if clients have IPs in a subnet which doesn't overlap with the server, or to avoid some firewall issues")
|
||||
|
||||
|
||||
def add_zc_ssdp(ap):
|
||||
@@ -966,15 +1131,16 @@ def add_zc_ssdp(ap):
|
||||
|
||||
|
||||
def add_ftp(ap):
|
||||
ap2 = ap.add_argument_group('FTP options')
|
||||
ap2.add_argument("--ftp", metavar="PORT", type=int, help="enable FTP server on \033[33mPORT\033[0m, for example \033[32m3921")
|
||||
ap2.add_argument("--ftps", metavar="PORT", type=int, help="enable FTPS server on \033[33mPORT\033[0m, for example \033[32m3990")
|
||||
ap2 = ap.add_argument_group('FTP options (TCP only)')
|
||||
ap2.add_argument("--ftp", metavar="PORT", type=int, default=0, help="enable FTP server on \033[33mPORT\033[0m, for example \033[32m3921")
|
||||
ap2.add_argument("--ftps", metavar="PORT", type=int, default=0, help="enable FTPS server on \033[33mPORT\033[0m, for example \033[32m3990")
|
||||
ap2.add_argument("--ftpv", action="store_true", help="verbose")
|
||||
ap2.add_argument("--ftp4", action="store_true", help="only listen on IPv4")
|
||||
ap2.add_argument("--ftp-ipa", metavar="PFX", type=u, default="", help="only accept connections from IP-addresses starting with \033[33mPFX\033[0m; specify [\033[32many\033[0m] to disable inheriting \033[33m--ipa\033[0m. Example: [\033[32m127., 10.89., 192.168.\033[0m]")
|
||||
ap2.add_argument("--ftp-ipa", metavar="CIDR", type=u, default="", help="only accept connections from IP-addresses inside \033[33mCIDR\033[0m; specify [\033[32many\033[0m] to disable inheriting \033[33m--ipa\033[0m. Examples: [\033[32mlan\033[0m] or [\033[32m10.89.0.0/16, 192.168.33.0/24\033[0m]")
|
||||
ap2.add_argument("--ftp-no-ow", action="store_true", help="if target file exists, reject upload instead of overwrite")
|
||||
ap2.add_argument("--ftp-wt", metavar="SEC", type=int, default=7, help="grace period for resuming interrupted uploads (any client can write to any file last-modified more recently than \033[33mSEC\033[0m seconds ago)")
|
||||
ap2.add_argument("--ftp-nat", metavar="ADDR", type=u, help="the NAT address to use for passive connections")
|
||||
ap2.add_argument("--ftp-pr", metavar="P-P", type=u, help="the range of TCP ports to use for passive connections, for example \033[32m12000-13000")
|
||||
ap2.add_argument("--ftp-nat", metavar="ADDR", type=u, default="", help="the NAT address to use for passive connections")
|
||||
ap2.add_argument("--ftp-pr", metavar="P-P", type=u, default="", help="the range of TCP ports to use for passive connections, for example \033[32m12000-13000")
|
||||
|
||||
|
||||
def add_webdav(ap):
|
||||
@@ -986,6 +1152,19 @@ def add_webdav(ap):
|
||||
ap2.add_argument("--dav-auth", action="store_true", help="force auth for all folders (required by davfs2 when only some folders are world-readable) (volflag=davauth)")
|
||||
|
||||
|
||||
def add_tftp(ap):
|
||||
ap2 = ap.add_argument_group('TFTP options (UDP only)')
|
||||
ap2.add_argument("--tftp", metavar="PORT", type=int, default=0, help="enable TFTP server on \033[33mPORT\033[0m, for example \033[32m69 \033[0mor \033[32m3969")
|
||||
ap2.add_argument("--tftp4", action="store_true", help="only listen on IPv4")
|
||||
ap2.add_argument("--tftpv", action="store_true", help="verbose")
|
||||
ap2.add_argument("--tftpvv", action="store_true", help="verboser")
|
||||
ap2.add_argument("--tftp-no-fast", action="store_true", help="debug: disable optimizations")
|
||||
ap2.add_argument("--tftp-lsf", metavar="PTN", type=u, default="\\.?(dir|ls)(\\.txt)?", help="return a directory listing if a file with this name is requested and it does not exist; defaults matches .ls, dir, .dir.txt, ls.txt, ...")
|
||||
ap2.add_argument("--tftp-nols", action="store_true", help="if someone tries to download a directory, return an error instead of showing its directory listing")
|
||||
ap2.add_argument("--tftp-ipa", metavar="CIDR", type=u, default="", help="only accept connections from IP-addresses inside \033[33mCIDR\033[0m; specify [\033[32many\033[0m] to disable inheriting \033[33m--ipa\033[0m. Examples: [\033[32mlan\033[0m] or [\033[32m10.89.0.0/16, 192.168.33.0/24\033[0m]")
|
||||
ap2.add_argument("--tftp-pr", metavar="P-P", type=u, default="", help="the range of UDP ports to use for data transfer, for example \033[32m12000-13000")
|
||||
|
||||
|
||||
def add_smb(ap):
|
||||
ap2 = ap.add_argument_group('SMB/CIFS options')
|
||||
ap2.add_argument("--smb", action="store_true", help="enable smb (read-only) -- this requires running copyparty as root on linux and macos unless \033[33m--smb-port\033[0m is set above 1024 and your OS does port-forwarding from 445 to that.\n\033[1;31mWARNING:\033[0m this protocol is DANGEROUS and buggy! Never expose to the internet!")
|
||||
@@ -1018,6 +1197,7 @@ def add_hooks(ap):
|
||||
ap2.add_argument("--xad", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m after a file delete")
|
||||
ap2.add_argument("--xm", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m on message")
|
||||
ap2.add_argument("--xban", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m if someone gets banned (pw/404/403/url)")
|
||||
ap2.add_argument("--hook-v", action="store_true", help="verbose hooks")
|
||||
|
||||
|
||||
def add_stats(ap):
|
||||
@@ -1050,6 +1230,8 @@ def add_optouts(ap):
|
||||
ap2.add_argument("--no-zip", action="store_true", help="disable download as zip/tar")
|
||||
ap2.add_argument("--no-tarcmp", action="store_true", help="disable download as compressed tar (?tar=gz, ?tar=bz2, ?tar=xz, ?tar=gz:9, ...)")
|
||||
ap2.add_argument("--no-lifetime", action="store_true", help="do not allow clients (or server config) to schedule an upload to be deleted after a given time")
|
||||
ap2.add_argument("--no-pipe", action="store_true", help="disable race-the-beam (lockstep download of files which are currently being uploaded) (volflag=nopipe)")
|
||||
ap2.add_argument("--no-db-ip", action="store_true", help="do not write uploader IPs into the database")
|
||||
|
||||
|
||||
def add_safety(ap):
|
||||
@@ -1057,7 +1239,7 @@ def add_safety(ap):
|
||||
ap2.add_argument("-s", action="count", default=0, help="increase safety: Disable thumbnails / potentially dangerous software (ffmpeg/pillow/vips), hide partial uploads, avoid crawlers.\n └─Alias of\033[32m --dotpart --no-thumb --no-mtag-ff --no-robots --force-js")
|
||||
ap2.add_argument("-ss", action="store_true", help="further increase safety: Prevent js-injection, accidental move/delete, broken symlinks, webdav, 404 on 403, ban on excessive 404s.\n └─Alias of\033[32m -s --unpost=0 --no-del --no-mv --hardlink --vague-403 -nih")
|
||||
ap2.add_argument("-sss", action="store_true", help="further increase safety: Enable logging to disk, scan for dangerous symlinks.\n └─Alias of\033[32m -ss --no-dav --no-logues --no-readme -lo=cpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz --ls=**,*,ln,p,r")
|
||||
ap2.add_argument("--ls", metavar="U[,V[,F]]", type=u, help="do a sanity/safety check of all volumes on startup; arguments \033[33mUSER\033[0m,\033[33mVOL\033[0m,\033[33mFLAGS\033[0m (see \033[33m--help-ls\033[0m); example [\033[32m**,*,ln,p,r\033[0m]")
|
||||
ap2.add_argument("--ls", metavar="U[,V[,F]]", type=u, default="", help="do a sanity/safety check of all volumes on startup; arguments \033[33mUSER\033[0m,\033[33mVOL\033[0m,\033[33mFLAGS\033[0m (see \033[33m--help-ls\033[0m); example [\033[32m**,*,ln,p,r\033[0m]")
|
||||
ap2.add_argument("--xvol", action="store_true", help="never follow symlinks leaving the volume root, unless the link is into another volume where the user has similar access (volflag=xvol)")
|
||||
ap2.add_argument("--xdev", action="store_true", help="stay within the filesystem of the volume root; do not descend into other devices (symlink or bind-mount to another HDD, ...) (volflag=xdev)")
|
||||
ap2.add_argument("--no-dot-mv", action="store_true", help="disallow moving dotfiles; makes it impossible to move folders containing dotfiles")
|
||||
@@ -1067,7 +1249,7 @@ def add_safety(ap):
|
||||
ap2.add_argument("--vague-403", action="store_true", help="send 404 instead of 403 (security through ambiguity, very enterprise)")
|
||||
ap2.add_argument("--force-js", action="store_true", help="don't send folder listings as HTML, force clients to use the embedded json instead -- slight protection against misbehaving search engines which ignore \033[33m--no-robots\033[0m")
|
||||
ap2.add_argument("--no-robots", action="store_true", help="adds http and html headers asking search engines to not index anything (volflag=norobots)")
|
||||
ap2.add_argument("--logout", metavar="H", type=float, default="8086", help="logout clients after \033[33mH\033[0m hours of inactivity; [\033[32m0.0028\033[0m]=10sec, [\033[32m0.1\033[0m]=6min, [\033[32m24\033[0m]=day, [\033[32m168\033[0m]=week, [\033[32m720\033[0m]=month, [\033[32m8760\033[0m]=year)")
|
||||
ap2.add_argument("--logout", metavar="H", type=float, default=8086.0, help="logout clients after \033[33mH\033[0m hours of inactivity; [\033[32m0.0028\033[0m]=10sec, [\033[32m0.1\033[0m]=6min, [\033[32m24\033[0m]=day, [\033[32m168\033[0m]=week, [\033[32m720\033[0m]=month, [\033[32m8760\033[0m]=year)")
|
||||
ap2.add_argument("--ban-pw", metavar="N,W,B", type=u, default="9,60,1440", help="more than \033[33mN\033[0m wrong passwords in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; disable with [\033[32mno\033[0m]")
|
||||
ap2.add_argument("--ban-404", metavar="N,W,B", type=u, default="50,60,1440", help="hitting more than \033[33mN\033[0m 404's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; only affects users who cannot see directory listings because their access is either g/G/h")
|
||||
ap2.add_argument("--ban-403", metavar="N,W,B", type=u, default="9,2,1440", help="hitting more than \033[33mN\033[0m 403's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; [\033[32m1440\033[0m]=day, [\033[32m10080\033[0m]=week, [\033[32m43200\033[0m]=month")
|
||||
@@ -1075,19 +1257,21 @@ def add_safety(ap):
|
||||
ap2.add_argument("--ban-url", metavar="N,W,B", type=u, default="9,2,1440", help="hitting more than \033[33mN\033[0m sus URL's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; applies only to permissions g/G/h (decent replacement for \033[33m--ban-404\033[0m if that can't be used)")
|
||||
ap2.add_argument("--sus-urls", metavar="R", type=u, default=r"\.php$|(^|/)wp-(admin|content|includes)/", help="URLs which are considered sus / eligible for banning; disable with blank or [\033[32mno\033[0m]")
|
||||
ap2.add_argument("--nonsus-urls", metavar="R", type=u, default=r"^(favicon\.ico|robots\.txt)$|^apple-touch-icon|^\.well-known", help="harmless URLs ignored from 404-bans; disable with blank or [\033[32mno\033[0m]")
|
||||
ap2.add_argument("--early-ban", action="store_true", help="if a client is banned, reject its connection as soon as possible; not a good idea to enable when proxied behind cloudflare since it could ban your reverse-proxy")
|
||||
ap2.add_argument("--aclose", metavar="MIN", type=int, default=10, help="if a client maxes out the server connection limit, downgrade it from connection:keep-alive to connection:close for \033[33mMIN\033[0m minutes (and also kill its active connections) -- disable with 0")
|
||||
ap2.add_argument("--loris", metavar="B", type=int, default=60, help="if a client maxes out the server connection limit without sending headers, ban it for \033[33mB\033[0m minutes; disable with [\033[32m0\033[0m]")
|
||||
ap2.add_argument("--acao", metavar="V[,V]", type=u, default="*", help="Access-Control-Allow-Origin; list of origins (domains/IPs without port) to accept requests from; [\033[32mhttps://1.2.3.4\033[0m]. Default [\033[32m*\033[0m] allows requests from all sites but removes cookies and http-auth; only ?pw=hunter2 survives")
|
||||
ap2.add_argument("--acam", metavar="V[,V]", type=u, default="GET,HEAD", help="Access-Control-Allow-Methods; list of methods to accept from offsite ('*' behaves like \033[33m--acao\033[0m's description)")
|
||||
|
||||
|
||||
def add_salt(ap, fk_salt, ah_salt):
|
||||
def add_salt(ap, fk_salt, dk_salt, ah_salt):
|
||||
ap2 = ap.add_argument_group('salting options')
|
||||
ap2.add_argument("--ah-alg", metavar="ALG", type=u, default="none", help="account-pw hashing algorithm; one of these, best to worst: \033[32margon2 scrypt sha2 none\033[0m (each optionally followed by alg-specific comma-sep. config)")
|
||||
ap2.add_argument("--ah-salt", metavar="SALT", type=u, default=ah_salt, help="account-pw salt; ignored if \033[33m--ah-alg\033[0m is none (default)")
|
||||
ap2.add_argument("--ah-gen", metavar="PW", type=u, default="", help="generate hashed password for \033[33mPW\033[0m, or read passwords from STDIN if \033[33mPW\033[0m is [\033[32m-\033[0m]")
|
||||
ap2.add_argument("--ah-cli", action="store_true", help="launch an interactive shell which hashes passwords without ever storing or displaying the original passwords")
|
||||
ap2.add_argument("--fk-salt", metavar="SALT", type=u, default=fk_salt, help="per-file accesskey salt; used to generate unpredictable URLs for hidden files")
|
||||
ap2.add_argument("--dk-salt", metavar="SALT", type=u, default=dk_salt, help="per-directory accesskey salt; used to generate unpredictable URLs to share folders with users who only have the 'get' permission")
|
||||
ap2.add_argument("--warksalt", metavar="SALT", type=u, default="hunter2", help="up2k file-hash salt; serves no purpose, no reason to change this (but delete all databases if you do)")
|
||||
|
||||
|
||||
@@ -1101,11 +1285,12 @@ def add_shutdown(ap):
|
||||
def add_logging(ap):
|
||||
ap2 = ap.add_argument_group('logging options')
|
||||
ap2.add_argument("-q", action="store_true", help="quiet; disable most STDOUT messages")
|
||||
ap2.add_argument("-lo", metavar="PATH", type=u, help="logfile, example: \033[32mcpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz\033[0m (NB: some errors may appear on STDOUT only)")
|
||||
ap2.add_argument("-lo", metavar="PATH", type=u, default="", help="logfile, example: \033[32mcpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz\033[0m (NB: some errors may appear on STDOUT only)")
|
||||
ap2.add_argument("--no-ansi", action="store_true", default=not VT100, help="disable colors; same as environment-variable NO_COLOR")
|
||||
ap2.add_argument("--ansi", action="store_true", help="force colors; overrides environment-variable NO_COLOR")
|
||||
ap2.add_argument("--no-logflush", action="store_true", help="don't flush the logfile after each write; tiny bit faster")
|
||||
ap2.add_argument("--no-voldump", action="store_true", help="do not list volumes and permissions on startup")
|
||||
ap2.add_argument("--log-utc", action="store_true", help="do not use local timezone; assume the TZ env-var is UTC (tiny bit faster)")
|
||||
ap2.add_argument("--log-tdec", metavar="N", type=int, default=3, help="timestamp resolution / number of timestamp decimals")
|
||||
ap2.add_argument("--log-badpwd", metavar="N", type=int, default=1, help="log failed login attempt passwords: 0=terse, 1=plaintext, 2=hashed")
|
||||
ap2.add_argument("--log-conn", action="store_true", help="debug: print tcp-server msgs")
|
||||
@@ -1128,8 +1313,10 @@ def add_thumbnail(ap):
|
||||
ap2.add_argument("--no-athumb", action="store_true", help="disable audio thumbnails (spectrograms) (volflag=dathumb)")
|
||||
ap2.add_argument("--th-size", metavar="WxH", default="320x256", help="thumbnail res (volflag=thsize)")
|
||||
ap2.add_argument("--th-mt", metavar="CORES", type=int, default=CORES, help="num cpu cores to use for generating thumbnails")
|
||||
ap2.add_argument("--th-convt", metavar="SEC", type=float, default=60, help="conversion timeout in seconds (volflag=convt)")
|
||||
ap2.add_argument("--th-no-crop", action="store_true", help="dynamic height; show full image by default (client can override in UI) (volflag=nocrop)")
|
||||
ap2.add_argument("--th-convt", metavar="SEC", type=float, default=60.0, help="conversion timeout in seconds (volflag=convt)")
|
||||
ap2.add_argument("--th-ram-max", metavar="GB", type=float, default=6.0, help="max memory usage (GiB) permitted by thumbnailer; not very accurate")
|
||||
ap2.add_argument("--th-crop", metavar="TXT", type=u, default="y", help="crop thumbnails to 4:3 or keep dynamic height; client can override in UI unless force. [\033[32my\033[0m]=crop, [\033[32mn\033[0m]=nocrop, [\033[32mfy\033[0m]=force-y, [\033[32mfn\033[0m]=force-n (volflag=crop)")
|
||||
ap2.add_argument("--th-x3", metavar="TXT", type=u, default="n", help="show thumbs at 3x resolution; client can override in UI unless force. [\033[32my\033[0m]=yes, [\033[32mn\033[0m]=no, [\033[32mfy\033[0m]=force-yes, [\033[32mfn\033[0m]=force-no (volflag=th3x)")
|
||||
ap2.add_argument("--th-dec", metavar="LIBS", default="vips,pil,ff", help="image decoders, in order of preference")
|
||||
ap2.add_argument("--th-no-jpg", action="store_true", help="disable jpg output")
|
||||
ap2.add_argument("--th-no-webp", action="store_true", help="disable webp output")
|
||||
@@ -1138,7 +1325,7 @@ def add_thumbnail(ap):
|
||||
ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown -- avoids doing keepalive pokes (updating the mtime) on thumbnail folders more often than \033[33mSEC\033[0m seconds")
|
||||
ap2.add_argument("--th-clean", metavar="SEC", type=int, default=43200, help="cleanup interval; 0=disabled")
|
||||
ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age -- folders which haven't been poked for longer than \033[33m--th-poke\033[0m seconds will get deleted every \033[33m--th-clean\033[0m seconds")
|
||||
ap2.add_argument("--th-covers", metavar="N,N", type=u, default="folder.png,folder.jpg,cover.png,cover.jpg", help="folder thumbnails to stat/look for; enabling \033[33m-e2d\033[0m will make these case-insensitive, and also automatically select thumbnails for all folders that contain pics, even if none match this pattern")
|
||||
ap2.add_argument("--th-covers", metavar="N,N", type=u, default="folder.png,folder.jpg,cover.png,cover.jpg", help="folder thumbnails to stat/look for; enabling \033[33m-e2d\033[0m will make these case-insensitive, and try them as dotfiles (.folder.jpg), and also automatically select thumbnails for all folders that contain pics, even if none match this pattern")
|
||||
# https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html
|
||||
# https://github.com/libvips/libvips
|
||||
# ffmpeg -hide_banner -demuxers | awk '/^ D /{print$2}' | while IFS= read -r x; do ffmpeg -hide_banner -h demuxer=$x; done | grep -E '^Demuxer |extensions:'
|
||||
@@ -1146,35 +1333,39 @@ def add_thumbnail(ap):
|
||||
ap2.add_argument("--th-r-vips", metavar="T,T", type=u, default="avif,exr,fit,fits,fts,gif,hdr,heic,jp2,jpeg,jpg,jpx,jxl,nii,pfm,pgm,png,ppm,svg,tif,tiff,webp", help="image formats to decode using pyvips")
|
||||
ap2.add_argument("--th-r-ffi", metavar="T,T", type=u, default="apng,avif,avifs,bmp,dds,dib,fit,fits,fts,gif,hdr,heic,heics,heif,heifs,icns,ico,jp2,jpeg,jpg,jpx,jxl,pbm,pcx,pfm,pgm,png,pnm,ppm,psd,qoi,sgi,tga,tif,tiff,webp,xbm,xpm", help="image formats to decode using ffmpeg")
|
||||
ap2.add_argument("--th-r-ffv", metavar="T,T", type=u, default="3gp,asf,av1,avc,avi,flv,h264,h265,hevc,m4v,mjpeg,mjpg,mkv,mov,mp4,mpeg,mpeg2,mpegts,mpg,mpg2,mts,nut,ogm,ogv,rm,ts,vob,webm,wmv", help="video formats to decode using ffmpeg")
|
||||
ap2.add_argument("--th-r-ffa", metavar="T,T", type=u, default="aac,ac3,aif,aiff,alac,alaw,amr,apac,ape,au,bonk,dfpwm,dts,flac,gsm,ilbc,it,m4a,mo3,mod,mp2,mp3,mpc,mptm,mt2,mulaw,ogg,okt,opus,ra,s3m,tak,tta,ulaw,wav,wma,wv,xm,xpk", help="audio formats to decode using ffmpeg")
|
||||
ap2.add_argument("--th-r-ffa", metavar="T,T", type=u, default="aac,ac3,aif,aiff,alac,alaw,amr,apac,ape,au,bonk,dfpwm,dts,flac,gsm,ilbc,it,itgz,itxz,itz,m4a,mdgz,mdxz,mdz,mo3,mod,mp2,mp3,mpc,mptm,mt2,mulaw,ogg,okt,opus,ra,s3m,s3gz,s3xz,s3z,tak,tta,ulaw,wav,wma,wv,xm,xmgz,xmxz,xmz,xpk", help="audio formats to decode using ffmpeg")
|
||||
ap2.add_argument("--au-unpk", metavar="E=F.C", type=u, default="mdz=mod.zip, mdgz=mod.gz, mdxz=mod.xz, s3z=s3m.zip, s3gz=s3m.gz, s3xz=s3m.xz, xmz=xm.zip, xmgz=xm.gz, xmxz=xm.xz, itz=it.zip, itgz=it.gz, itxz=it.xz", help="audio formats to decompress before passing to ffmpeg")
|
||||
|
||||
|
||||
def add_transcoding(ap):
|
||||
ap2 = ap.add_argument_group('transcoding options')
|
||||
ap2.add_argument("--q-opus", metavar="KBPS", type=int, default=128, help="target bitrate for transcoding to opus; set 0 to disable")
|
||||
ap2.add_argument("--q-mp3", metavar="QUALITY", type=u, default="q2", help="target quality for transcoding to mp3, for example [\033[32m192k\033[0m] (CBR) or [\033[32mq0\033[0m] (CQ/CRF, q0=maxquality, q9=smallest); set 0 to disable")
|
||||
ap2.add_argument("--no-acode", action="store_true", help="disable audio transcoding")
|
||||
ap2.add_argument("--no-bacode", action="store_true", help="disable batch audio transcoding by folder download (zip/tar)")
|
||||
ap2.add_argument("--ac-maxage", metavar="SEC", type=int, default=86400, help="delete cached transcode output after \033[33mSEC\033[0m seconds")
|
||||
|
||||
|
||||
def add_db_general(ap, hcores):
|
||||
noidx = APPLESAN_TXT if MACOS else ""
|
||||
ap2 = ap.add_argument_group('general db options')
|
||||
ap2.add_argument("-e2d", action="store_true", help="enable up2k database, making files searchable + enables upload deduplication")
|
||||
ap2.add_argument("-e2d", action="store_true", help="enable up2k database; this enables file search, upload-undo, improves deduplication")
|
||||
ap2.add_argument("-e2ds", action="store_true", help="scan writable folders for new files on startup; sets \033[33m-e2d\033[0m")
|
||||
ap2.add_argument("-e2dsa", action="store_true", help="scans all folders on startup; sets \033[33m-e2ds\033[0m")
|
||||
ap2.add_argument("-e2v", action="store_true", help="verify file integrity; rehash all files and compare with db")
|
||||
ap2.add_argument("-e2vu", action="store_true", help="on hash mismatch: update the database with the new hash")
|
||||
ap2.add_argument("-e2vp", action="store_true", help="on hash mismatch: panic and quit copyparty")
|
||||
ap2.add_argument("--hist", metavar="PATH", type=u, help="where to store volume data (db, thumbs) (volflag=hist)")
|
||||
ap2.add_argument("--no-hash", metavar="PTN", type=u, help="regex: disable hashing of matching absolute-filesystem-paths during e2ds folder scans (volflag=nohash)")
|
||||
ap2.add_argument("--no-idx", metavar="PTN", type=u, help="regex: disable indexing of matching absolute-filesystem-paths during e2ds folder scans (volflag=noidx)")
|
||||
ap2.add_argument("--hist", metavar="PATH", type=u, default="", help="where to store volume data (db, thumbs); default is a folder named \".hist\" inside each volume (volflag=hist)")
|
||||
ap2.add_argument("--no-hash", metavar="PTN", type=u, default="", help="regex: disable hashing of matching absolute-filesystem-paths during e2ds folder scans (volflag=nohash)")
|
||||
ap2.add_argument("--no-idx", metavar="PTN", type=u, default=noidx, help="regex: disable indexing of matching absolute-filesystem-paths during e2ds folder scans (volflag=noidx)")
|
||||
ap2.add_argument("--no-dhash", action="store_true", help="disable rescan acceleration; do full database integrity check -- makes the db ~5%% smaller and bootup/rescans 3~10x slower")
|
||||
ap2.add_argument("--re-dhash", action="store_true", help="rebuild the cache if it gets out of sync (for example crash on startup during metadata scanning)")
|
||||
ap2.add_argument("--re-dhash", action="store_true", help="force a cache rebuild on startup; enable this once if it gets out of sync (should never be necessary)")
|
||||
ap2.add_argument("--no-forget", action="store_true", help="never forget indexed files, even when deleted from disk -- makes it impossible to ever upload the same file twice -- only useful for offloading uploads to a cloud service or something (volflag=noforget)")
|
||||
ap2.add_argument("--dbd", metavar="PROFILE", default="wal", help="database durability profile; sets the tradeoff between robustness and speed, see \033[33m--help-dbd\033[0m (volflag=dbd)")
|
||||
ap2.add_argument("--xlink", action="store_true", help="on upload: check all volumes for dupes, not just the target volume (volflag=xlink)")
|
||||
ap2.add_argument("--xlink", action="store_true", help="on upload: check all volumes for dupes, not just the target volume (probably buggy, not recommended) (volflag=xlink)")
|
||||
ap2.add_argument("--hash-mt", metavar="CORES", type=int, default=hcores, help="num cpu cores to use for file hashing; set 0 or 1 for single-core hashing")
|
||||
ap2.add_argument("--re-maxage", metavar="SEC", type=int, default=0, help="rescan filesystem for changes every \033[33mSEC\033[0m seconds; 0=off (volflag=scan)")
|
||||
ap2.add_argument("--db-act", metavar="SEC", type=float, default=10, help="defer any scheduled volume reindexing until \033[33mSEC\033[0m seconds after last db write (uploads, renames, ...)")
|
||||
ap2.add_argument("--db-act", metavar="SEC", type=float, default=10.0, help="defer any scheduled volume reindexing until \033[33mSEC\033[0m seconds after last db write (uploads, renames, ...)")
|
||||
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=45, help="search deadline -- terminate searches running for more than \033[33mSEC\033[0m seconds")
|
||||
ap2.add_argument("--srch-hits", metavar="N", type=int, default=7999, help="max search results to allow clients to fetch; 125 results will be shown initially")
|
||||
ap2.add_argument("--dotsrch", action="store_true", help="show dotfiles in search results (volflags: dotsrch | nodotsrch)")
|
||||
@@ -1206,19 +1397,40 @@ def add_txt(ap):
|
||||
ap2.add_argument("--exp-lg", metavar="V,V,V", type=u, default=DEF_EXP, help="comma/space-separated list of placeholders to expand in prologue/epilogue files (volflag=exp_lg)")
|
||||
|
||||
|
||||
def add_og(ap):
|
||||
ap2 = ap.add_argument_group('og / open graph / discord-embed options')
|
||||
ap2.add_argument("--og", action="store_true", help="disable hotlinking and return an html document instead; this is required by open-graph, but can also be useful on its own (volflag=og)")
|
||||
ap2.add_argument("--og-ua", metavar="RE", type=u, default="", help="only disable hotlinking / engage OG behavior if the useragent matches regex \033[33mRE\033[0m (volflag=og_ua)")
|
||||
ap2.add_argument("--og-tpl", metavar="PATH", type=u, default="", help="do not return the regular copyparty html, but instead load the jinja2 template at \033[33mPATH\033[0m (if path contains 'EXT' then EXT will be replaced with the requested file's extension) (volflag=og_tpl)")
|
||||
ap2.add_argument("--og-no-head", action="store_true", help="do not automatically add OG entries into <head> (useful if you're doing this yourself in a template or such) (volflag=og_no_head)")
|
||||
ap2.add_argument("--og-th", metavar="FMT", type=u, default="jf3", help="thumbnail format; j=jpeg, jf=jpeg-uncropped, jf3=jpeg-uncropped-large, w=webm, ... (volflag=og_th)")
|
||||
ap2.add_argument("--og-title", metavar="TXT", type=u, default="", help="fallback title if there is nothing in the \033[33m-e2t\033[0m database (volflag=og_title)")
|
||||
ap2.add_argument("--og-title-a", metavar="T", type=u, default="🎵 {{ artist }} - {{ title }}", help="audio title format; takes any metadata key (volflag=og_title_a)")
|
||||
ap2.add_argument("--og-title-v", metavar="T", type=u, default="{{ title }}", help="video title format; takes any metadata key (volflag=og_title_v)")
|
||||
ap2.add_argument("--og-title-i", metavar="T", type=u, default="{{ title }}", help="image title format; takes any metadata key (volflag=og_title_i)")
|
||||
ap2.add_argument("--og-s-title", action="store_true", help="force default title; do not read from tags (volflag=og_s_title)")
|
||||
ap2.add_argument("--og-desc", metavar="TXT", type=u, default="", help="description text; same for all files, disable with [\033[32m-\033[0m] (volflag=og_desc)")
|
||||
ap2.add_argument("--og-site", metavar="TXT", type=u, default="", help="sitename; defaults to \033[33m--name\033[0m, disable with [\033[32m-\033[0m] (volflag=og_site)")
|
||||
ap2.add_argument("--tcolor", metavar="RGB", type=u, default="333", help="accent color (3 or 6 hex digits); may also affect safari and/or android-chrome (volflag=tcolor)")
|
||||
ap2.add_argument("--uqe", action="store_true", help="query-string parceling; translate a request for \033[33m/foo/.uqe/BASE64\033[0m into \033[33m/foo?TEXT\033[0m, or \033[33m/foo/?TEXT\033[0m if the first character in \033[33mTEXT\033[0m is a slash. Automatically enabled for \033[33m--og\033[0m")
|
||||
|
||||
|
||||
def add_ui(ap, retry):
|
||||
ap2 = ap.add_argument_group('ui options')
|
||||
ap2.add_argument("--grid", action="store_true", help="show grid/thumbnails by default (volflag=grid)")
|
||||
ap2.add_argument("--lang", metavar="LANG", type=u, default="eng", help="language; one of the following: \033[32meng nor\033[0m")
|
||||
ap2.add_argument("--gsel", action="store_true", help="select files in grid by ctrl-click (volflag=gsel)")
|
||||
ap2.add_argument("--lang", metavar="LANG", type=u, default="eng", help="language; one of the following: \033[32meng nor chi\033[0m")
|
||||
ap2.add_argument("--theme", metavar="NUM", type=int, default=0, help="default theme to use (0..7)")
|
||||
ap2.add_argument("--themes", metavar="NUM", type=int, default=8, help="number of themes installed")
|
||||
ap2.add_argument("--au-vol", metavar="0-100", type=int, default=50, choices=range(0, 101), help="default audio/video volume percent")
|
||||
ap2.add_argument("--sort", metavar="C,C,C", type=u, default="href", help="default sort order, comma-separated column IDs (see header tooltips), prefix with '-' for descending. Examples: \033[32mhref -href ext sz ts tags/Album tags/.tn\033[0m (volflag=sort)")
|
||||
ap2.add_argument("--unlist", metavar="REGEX", type=u, default="", help="don't show files matching \033[33mREGEX\033[0m in file list. Purely cosmetic! Does not affect API calls, just the browser. Example: [\033[32m\\.(js|css)$\033[0m] (volflag=unlist)")
|
||||
ap2.add_argument("--favico", metavar="TXT", type=u, default="c 000 none" if retry else "🎉 000 none", help="\033[33mfavicon-text\033[0m [ \033[33mforeground\033[0m [ \033[33mbackground\033[0m ] ], set blank to disable")
|
||||
ap2.add_argument("--mpmc", metavar="URL", type=u, default="", help="change the mediaplayer-toggle mouse cursor; URL to a folder with {2..5}.png inside (or disable with [\033[32m.\033[0m])")
|
||||
ap2.add_argument("--js-browser", metavar="L", type=u, help="URL to additional JS to include")
|
||||
ap2.add_argument("--css-browser", metavar="L", type=u, help="URL to additional CSS to include")
|
||||
ap2.add_argument("--html-head", metavar="TXT", type=u, default="", help="text to append to the <head> of all HTML pages")
|
||||
ap2.add_argument("--css-browser", metavar="L", type=u, default="", help="URL to additional CSS to include in the filebrowser html")
|
||||
ap2.add_argument("--js-browser", metavar="L", type=u, default="", help="URL to additional JS to include in the filebrowser html")
|
||||
ap2.add_argument("--js-other", metavar="L", type=u, default="", help="URL to additional JS to include in all other pages")
|
||||
ap2.add_argument("--html-head", metavar="TXT", type=u, default="", help="text to append to the <head> of all HTML pages (except for basic-browser); can be @PATH to send the contents of a file at PATH, and/or begin with %% to render as jinja2 template (volflag=html_head)")
|
||||
ap2.add_argument("--ih", action="store_true", help="if a folder contains index.html, show that instead of the directory listing by default (can be changed in the client settings UI, or add ?v to URL for override)")
|
||||
ap2.add_argument("--textfiles", metavar="CSV", type=u, default="txt,nfo,diz,cue,readme", help="file extensions to present as plaintext")
|
||||
ap2.add_argument("--txt-max", metavar="KiB", type=int, default=64, help="max size of embedded textfiles on ?doc= (anything bigger will be lazy-loaded by JS)")
|
||||
@@ -1226,6 +1438,7 @@ def add_ui(ap, retry):
|
||||
ap2.add_argument("--bname", metavar="TXT", type=u, default="--name", help="server name (displayed in filebrowser document title)")
|
||||
ap2.add_argument("--pb-url", metavar="URL", type=u, default="https://github.com/9001/copyparty", help="powered-by link; disable with \033[33m-np\033[0m")
|
||||
ap2.add_argument("--ver", action="store_true", help="show version on the control panel (incompatible with \033[33m-nb\033[0m)")
|
||||
ap2.add_argument("--k304", metavar="NUM", type=int, default=0, help="configure the option to enable/disable k304 on the controlpanel (workaround for buggy reverse-proxies); [\033[32m0\033[0m] = hidden and default-off, [\033[32m1\033[0m] = visible and default-off, [\033[32m2\033[0m] = visible and default-on")
|
||||
ap2.add_argument("--md-sbf", metavar="FLAGS", type=u, default="downloads forms popups scripts top-navigation-by-user-activation", help="list of capabilities to ALLOW for README.md docs (volflag=md_sbf); see https://developer.mozilla.org/en-US/docs/Web/HTML/Element/iframe#attr-sandbox")
|
||||
ap2.add_argument("--lg-sbf", metavar="FLAGS", type=u, default="downloads forms popups scripts top-navigation-by-user-activation", help="list of capabilities to ALLOW for prologue/epilogue docs (volflag=lg_sbf)")
|
||||
ap2.add_argument("--no-sb-md", action="store_true", help="don't sandbox README.md documents (volflags: no_sb_md | sb_md)")
|
||||
@@ -1236,14 +1449,18 @@ def add_debug(ap):
|
||||
ap2 = ap.add_argument_group('debug options')
|
||||
ap2.add_argument("--vc", action="store_true", help="verbose config file parser (explain config)")
|
||||
ap2.add_argument("--cgen", action="store_true", help="generate config file from current config (best-effort; probably buggy)")
|
||||
ap2.add_argument("--deps", action="store_true", help="list information about detected optional dependencies")
|
||||
if hasattr(select, "poll"):
|
||||
ap2.add_argument("--no-poll", action="store_true", help="kernel-bug workaround: disable poll; use select instead (limits max num clients to ~700)")
|
||||
ap2.add_argument("--no-sendfile", action="store_true", help="kernel-bug workaround: disable sendfile; do a safe and slow read-send-loop instead")
|
||||
ap2.add_argument("--no-scandir", action="store_true", help="kernel-bug workaround: disable scandir; do a listdir + stat on each file instead")
|
||||
ap2.add_argument("--no-fastboot", action="store_true", help="wait for initial filesystem indexing before accepting client requests")
|
||||
ap2.add_argument("--no-htp", action="store_true", help="disable httpserver threadpool, create threads as-needed instead")
|
||||
ap2.add_argument("--rm-sck", action="store_true", help="when listening on unix-sockets, do a basic delete+bind instead of the default atomic bind")
|
||||
ap2.add_argument("--srch-dbg", action="store_true", help="explain search processing, and do some extra expensive sanity checks")
|
||||
ap2.add_argument("--rclone-mdns", action="store_true", help="use mdns-domain instead of server-ip on /?hc")
|
||||
ap2.add_argument("--stackmon", metavar="P,S", type=u, help="write stacktrace to \033[33mP\033[0math every \033[33mS\033[0m second, for example --stackmon=\033[32m./st/%%Y-%%m/%%d/%%H%%M.xz,60")
|
||||
ap2.add_argument("--log-thrs", metavar="SEC", type=float, help="list active threads every \033[33mSEC\033[0m")
|
||||
ap2.add_argument("--stackmon", metavar="P,S", type=u, default="", help="write stacktrace to \033[33mP\033[0math every \033[33mS\033[0m second, for example --stackmon=\033[32m./st/%%Y-%%m/%%d/%%H%%M.xz,60")
|
||||
ap2.add_argument("--log-thrs", metavar="SEC", type=float, default=0.0, help="list active threads every \033[33mSEC\033[0m")
|
||||
ap2.add_argument("--log-fk", metavar="REGEX", type=u, default="", help="log filekey params for files where path matches \033[33mREGEX\033[0m; [\033[32m.\033[0m] (a single dot) = all files")
|
||||
ap2.add_argument("--bak-flips", action="store_true", help="[up2k] if a client uploads a bitflipped/corrupted chunk, store a copy according to \033[33m--bf-nc\033[0m and \033[33m--bf-dir\033[0m")
|
||||
ap2.add_argument("--bf-nc", metavar="NUM", type=int, default=200, help="bak-flips: stop if there's more than \033[33mNUM\033[0m files at \033[33m--kf-dir\033[0m already; default: 6.3 GiB max (200*32M)")
|
||||
@@ -1258,6 +1475,7 @@ def run_argparse(
|
||||
) -> argparse.Namespace:
|
||||
ap = argparse.ArgumentParser(
|
||||
formatter_class=formatter,
|
||||
usage=argparse.SUPPRESS,
|
||||
prog="copyparty",
|
||||
description="http file sharing hub v{} ({})".format(S_VERSION, S_BUILD_DT),
|
||||
)
|
||||
@@ -1265,6 +1483,7 @@ def run_argparse(
|
||||
cert_path = os.path.join(E.cfg, "cert.pem")
|
||||
|
||||
fk_salt = get_fk_salt()
|
||||
dk_salt = get_dk_salt()
|
||||
ah_salt = get_ah_salt()
|
||||
|
||||
# alpine peaks at 5 threads for some reason,
|
||||
@@ -1281,10 +1500,13 @@ def run_argparse(
|
||||
add_tls(ap, cert_path)
|
||||
add_cert(ap, cert_path)
|
||||
add_auth(ap)
|
||||
add_chpw(ap)
|
||||
add_qr(ap, tty)
|
||||
add_zeroconf(ap)
|
||||
add_zc_mdns(ap)
|
||||
add_zc_ssdp(ap)
|
||||
add_fs(ap)
|
||||
add_share(ap)
|
||||
add_upload(ap)
|
||||
add_db_general(ap, hcores)
|
||||
add_db_metadata(ap)
|
||||
@@ -1292,9 +1514,10 @@ def run_argparse(
|
||||
add_transcoding(ap)
|
||||
add_ftp(ap)
|
||||
add_webdav(ap)
|
||||
add_tftp(ap)
|
||||
add_smb(ap)
|
||||
add_safety(ap)
|
||||
add_salt(ap, fk_salt, ah_salt)
|
||||
add_salt(ap, fk_salt, dk_salt, ah_salt)
|
||||
add_optouts(ap)
|
||||
add_shutdown(ap)
|
||||
add_yolo(ap)
|
||||
@@ -1302,6 +1525,7 @@ def run_argparse(
|
||||
add_hooks(ap)
|
||||
add_stats(ap)
|
||||
add_txt(ap)
|
||||
add_og(ap)
|
||||
add_ui(ap, retry)
|
||||
add_admin(ap)
|
||||
add_logging(ap)
|
||||
@@ -1330,30 +1554,35 @@ def run_argparse(
|
||||
k2 = "help_" + k.replace("-", "_")
|
||||
if vars(ret)[k2]:
|
||||
lprint("# %s help page (%s)" % (k, h))
|
||||
lprint(t + "\033[0m")
|
||||
lprint(t.rstrip() + "\033[0m")
|
||||
sys.exit(0)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def main(argv: Optional[list[str]] = None) -> None:
|
||||
def main(argv: Optional[list[str]] = None, rsrc: Optional[str] = None) -> None:
|
||||
time.strptime("19970815", "%Y%m%d") # python#7980
|
||||
if WINDOWS:
|
||||
os.system("rem") # enables colors
|
||||
|
||||
init_E(E)
|
||||
|
||||
if rsrc: # pyz
|
||||
E.mod = rsrc
|
||||
|
||||
if argv is None:
|
||||
argv = sys.argv
|
||||
|
||||
f = '\033[36mcopyparty v{} "\033[35m{}\033[36m" ({})\n{}\033[0;36m\n sqlite v{} | jinja2 v{} | pyftpd v{}\n\033[0m'
|
||||
f = '\033[36mcopyparty v{} "\033[35m{}\033[36m" ({})\n{}\033[0;36m\n sqlite {} | jinja {} | pyftpd {} | tftp {}\n\033[0m'
|
||||
f = f.format(
|
||||
S_VERSION,
|
||||
CODENAME,
|
||||
S_BUILD_DT,
|
||||
py_desc().replace("[", "\033[90m["),
|
||||
PY_DESC.replace("[", "\033[90m["),
|
||||
SQLITE_VER,
|
||||
JINJA_VER,
|
||||
PYFTPD_VER,
|
||||
PARTFTPY_VER,
|
||||
)
|
||||
lprint(f)
|
||||
|
||||
@@ -1364,9 +1593,19 @@ def main(argv: Optional[list[str]] = None) -> None:
|
||||
showlic()
|
||||
sys.exit(0)
|
||||
|
||||
if "--mimes" in argv:
|
||||
print("\n".join("%8s %s" % (k, v) for k, v in sorted(MIMES.items())))
|
||||
sys.exit(0)
|
||||
|
||||
if EXE:
|
||||
print("pybin: {}\n".format(pybin), end="")
|
||||
|
||||
for n, zs in enumerate(argv):
|
||||
if zs.startswith("--sfx-tpoke="):
|
||||
Daemon(sfx_tpoke, "sfx-tpoke", (zs.split("=", 1)[1],))
|
||||
argv.pop(n)
|
||||
break
|
||||
|
||||
ensure_locale()
|
||||
|
||||
ensure_webdeps()
|
||||
@@ -1382,7 +1621,13 @@ def main(argv: Optional[list[str]] = None) -> None:
|
||||
supp = args_from_cfg(v)
|
||||
argv.extend(supp)
|
||||
|
||||
deprecated: list[tuple[str, str]] = [("--salt", "--warksalt")]
|
||||
deprecated: list[tuple[str, str]] = [
|
||||
("--salt", "--warksalt"),
|
||||
("--hdr-au-usr", "--idp-h-usr"),
|
||||
("--idp-h-sep", "--idp-gsep"),
|
||||
("--th-no-crop", "--th-crop=n"),
|
||||
("--never-symlink", "--hardlink-only"),
|
||||
]
|
||||
for dk, nk in deprecated:
|
||||
idx = -1
|
||||
ov = ""
|
||||
@@ -1406,7 +1651,7 @@ def main(argv: Optional[list[str]] = None) -> None:
|
||||
argv.extend(["--qr"])
|
||||
if ANYWIN or not os.geteuid():
|
||||
# win10 allows symlinks if admin; can be unexpected
|
||||
argv.extend(["-p80,443,3923", "--ign-ebind", "--no-dedup"])
|
||||
argv.extend(["-p80,443,3923", "--ign-ebind"])
|
||||
except:
|
||||
pass
|
||||
|
||||
@@ -1420,9 +1665,9 @@ def main(argv: Optional[list[str]] = None) -> None:
|
||||
|
||||
_, hard = resource.getrlimit(resource.RLIMIT_NOFILE)
|
||||
if hard > 0: # -1 == infinite
|
||||
nc = min(nc, hard // 4)
|
||||
nc = min(nc, int(hard / 4))
|
||||
except:
|
||||
nc = 512
|
||||
nc = 486 # mdns/ssdp restart headroom; select() maxfd is 512 on windows
|
||||
|
||||
retry = False
|
||||
for fmtr in [RiceFormatter, RiceFormatter, Dodge11874, BasicDodge11874]:
|
||||
@@ -1457,40 +1702,6 @@ def main(argv: Optional[list[str]] = None) -> None:
|
||||
if al.ansi:
|
||||
al.wintitle = ""
|
||||
|
||||
nstrs: list[str] = []
|
||||
anymod = False
|
||||
for ostr in al.v or []:
|
||||
m = re_vol.match(ostr)
|
||||
if not m:
|
||||
# not our problem
|
||||
nstrs.append(ostr)
|
||||
continue
|
||||
|
||||
src, dst, perms = m.groups()
|
||||
na = [src, dst]
|
||||
mod = False
|
||||
for opt in perms.split(":"):
|
||||
if re.match("c[^,]", opt):
|
||||
mod = True
|
||||
na.append("c," + opt[1:])
|
||||
elif re.sub("^[rwmdgGha]*", "", opt) and "," not in opt:
|
||||
mod = True
|
||||
perm = opt[0]
|
||||
na.append(perm + "," + opt[1:])
|
||||
else:
|
||||
na.append(opt)
|
||||
|
||||
nstr = ":".join(na)
|
||||
nstrs.append(nstr if mod else ostr)
|
||||
if mod:
|
||||
msg = "\033[1;31mWARNING:\033[0;1m\n -v {} \033[0;33mwas replaced with\033[0;1m\n -v {} \n\033[0m"
|
||||
lprint(msg.format(ostr, nstr))
|
||||
anymod = True
|
||||
|
||||
if anymod:
|
||||
al.v = nstrs
|
||||
time.sleep(2)
|
||||
|
||||
# propagate implications
|
||||
for k1, k2 in IMPLICATIONS:
|
||||
if getattr(al, k1):
|
||||
@@ -1501,6 +1712,9 @@ def main(argv: Optional[list[str]] = None) -> None:
|
||||
if getattr(al, k1):
|
||||
setattr(al, k2, False)
|
||||
|
||||
if not HAVE_IPV6 and al.i == "::":
|
||||
al.i = "0.0.0.0"
|
||||
|
||||
al.i = al.i.split(",")
|
||||
try:
|
||||
if "-" in al.p:
|
||||
@@ -1546,6 +1760,12 @@ def main(argv: Optional[list[str]] = None) -> None:
|
||||
if sys.version_info < (3, 6):
|
||||
al.no_scandir = True
|
||||
|
||||
if not hasattr(os, "sendfile"):
|
||||
al.no_sendfile = True
|
||||
|
||||
if not hasattr(select, "poll"):
|
||||
al.no_poll = True
|
||||
|
||||
# signal.signal(signal.SIGINT, sighandler)
|
||||
|
||||
SvcHub(al, dal, argv, "".join(printed)).run()
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
# coding: utf-8
|
||||
|
||||
VERSION = (1, 9, 26)
|
||||
CODENAME = "prometheable"
|
||||
BUILD_DT = (2023, 12, 8)
|
||||
VERSION = (1, 15, 1)
|
||||
CODENAME = "fill the drives"
|
||||
BUILD_DT = (2024, 9, 9)
|
||||
|
||||
S_VERSION = ".".join(map(str, VERSION))
|
||||
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
||||
|
||||
1086
copyparty/authsrv.py
1086
copyparty/authsrv.py
File diff suppressed because it is too large
Load Diff
@@ -57,11 +57,8 @@ class BrokerMp(object):
|
||||
def shutdown(self) -> None:
|
||||
self.log("broker", "shutting down")
|
||||
for n, proc in enumerate(self.procs):
|
||||
thr = threading.Thread(
|
||||
target=proc.q_pend.put((0, "shutdown", [])),
|
||||
name="mp-shutdown-{}-{}".format(n, len(self.procs)),
|
||||
)
|
||||
thr.start()
|
||||
name = "mp-shut-%d-%d" % (n, len(self.procs))
|
||||
Daemon(proc.q_pend.put, name, ((0, "shutdown", []),))
|
||||
|
||||
with self.mutex:
|
||||
procs = self.procs
|
||||
@@ -79,6 +76,10 @@ class BrokerMp(object):
|
||||
for _, proc in enumerate(self.procs):
|
||||
proc.q_pend.put((0, "reload", []))
|
||||
|
||||
def reload_sessions(self) -> None:
|
||||
for _, proc in enumerate(self.procs):
|
||||
proc.q_pend.put((0, "reload_sessions", []))
|
||||
|
||||
def collector(self, proc: MProcess) -> None:
|
||||
"""receive message from hub in other process"""
|
||||
while True:
|
||||
|
||||
@@ -76,7 +76,7 @@ class MpWorker(BrokerCli):
|
||||
pass
|
||||
|
||||
def logw(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||
self.log("mp{}".format(self.n), msg, c)
|
||||
self.log("mp%d" % (self.n,), msg, c)
|
||||
|
||||
def main(self) -> None:
|
||||
while True:
|
||||
@@ -94,6 +94,10 @@ class MpWorker(BrokerCli):
|
||||
self.asrv.reload()
|
||||
self.logw("mpw.asrv reloaded")
|
||||
|
||||
elif dest == "reload_sessions":
|
||||
with self.asrv.mutex:
|
||||
self.asrv.load_sessions()
|
||||
|
||||
elif dest == "listen":
|
||||
self.httpsrv.listen(args[0], args[1])
|
||||
|
||||
|
||||
@@ -34,6 +34,7 @@ class BrokerThr(BrokerCli):
|
||||
self.iphash = HMaccas(os.path.join(self.args.E.cfg, "iphash"), 8)
|
||||
self.httpsrv = HttpSrv(self, None)
|
||||
self.reload = self.noop
|
||||
self.reload_sessions = self.noop
|
||||
|
||||
def shutdown(self) -> None:
|
||||
# self.log("broker", "shutting down")
|
||||
|
||||
@@ -28,7 +28,7 @@ class ExceptionalQueue(Queue, object):
|
||||
if rv[1] == "pebkac":
|
||||
raise Pebkac(*rv[2:])
|
||||
else:
|
||||
raise Exception(rv[2])
|
||||
raise rv[2]
|
||||
|
||||
return rv
|
||||
|
||||
@@ -65,8 +65,8 @@ def try_exec(want_retval: Union[bool, int], func: Any, *args: list[Any]) -> Any:
|
||||
|
||||
return ["exception", "pebkac", ex.code, str(ex)]
|
||||
|
||||
except:
|
||||
except Exception as ex:
|
||||
if not want_retval:
|
||||
raise
|
||||
|
||||
return ["exception", "stack", traceback.format_exc()]
|
||||
return ["exception", "stack", ex]
|
||||
|
||||
@@ -6,12 +6,19 @@ import os
|
||||
import shutil
|
||||
import time
|
||||
|
||||
from .util import Netdev, runcmd
|
||||
from .__init__ import ANYWIN
|
||||
from .util import Netdev, runcmd, wrename, wunlink
|
||||
|
||||
HAVE_CFSSL = True
|
||||
HAVE_CFSSL = not os.environ.get("PRTY_NO_CFSSL")
|
||||
|
||||
if True: # pylint: disable=using-constant-test
|
||||
from .util import RootLogger
|
||||
from .util import NamedLogger, RootLogger
|
||||
|
||||
|
||||
if ANYWIN:
|
||||
VF = {"mv_re_t": 5, "rm_re_t": 5, "mv_re_r": 0.1, "rm_re_r": 0.1}
|
||||
else:
|
||||
VF = {"mv_re_t": 0, "rm_re_t": 0}
|
||||
|
||||
|
||||
def ensure_cert(log: "RootLogger", args) -> None:
|
||||
@@ -76,6 +83,8 @@ def _read_crt(args, fn):
|
||||
|
||||
|
||||
def _gen_ca(log: "RootLogger", args):
|
||||
nlog: "NamedLogger" = lambda msg, c=0: log("cert-gen-ca", msg, c)
|
||||
|
||||
expiry = _read_crt(args, "ca.pem")[0]
|
||||
if time.time() + args.crt_cdays * 60 * 60 * 24 * 0.1 < expiry:
|
||||
return
|
||||
@@ -105,13 +114,19 @@ def _gen_ca(log: "RootLogger", args):
|
||||
raise Exception("failed to translate ca-cert: {}, {}".format(rc, se), 3)
|
||||
|
||||
bname = os.path.join(args.crt_dir, "ca")
|
||||
os.rename(bname + "-key.pem", bname + ".key")
|
||||
os.unlink(bname + ".csr")
|
||||
try:
|
||||
wunlink(nlog, bname + ".key", VF)
|
||||
except:
|
||||
pass
|
||||
wrename(nlog, bname + "-key.pem", bname + ".key", VF)
|
||||
wunlink(nlog, bname + ".csr", VF)
|
||||
|
||||
log("cert", "new ca OK", 2)
|
||||
|
||||
|
||||
def _gen_srv(log: "RootLogger", args, netdevs: dict[str, Netdev]):
|
||||
nlog: "NamedLogger" = lambda msg, c=0: log("cert-gen-srv", msg, c)
|
||||
|
||||
names = args.crt_ns.split(",") if args.crt_ns else []
|
||||
if not args.crt_exact:
|
||||
for n in names[:]:
|
||||
@@ -185,11 +200,11 @@ def _gen_srv(log: "RootLogger", args, netdevs: dict[str, Netdev]):
|
||||
|
||||
bname = os.path.join(args.crt_dir, "srv")
|
||||
try:
|
||||
os.unlink(bname + ".key")
|
||||
wunlink(nlog, bname + ".key", VF)
|
||||
except:
|
||||
pass
|
||||
os.rename(bname + "-key.pem", bname + ".key")
|
||||
os.unlink(bname + ".csr")
|
||||
wrename(nlog, bname + "-key.pem", bname + ".key", VF)
|
||||
wunlink(nlog, bname + ".csr", VF)
|
||||
|
||||
with open(os.path.join(args.crt_dir, "ca.pem"), "rb") as f:
|
||||
ca = f.read()
|
||||
|
||||
@@ -9,19 +9,20 @@ onedash = set(zs.split())
|
||||
def vf_bmap() -> dict[str, str]:
|
||||
"""argv-to-volflag: simple bools"""
|
||||
ret = {
|
||||
"never_symlink": "neversymlink",
|
||||
"no_dedup": "copydupes",
|
||||
"dav_auth": "davauth",
|
||||
"dav_rt": "davrt",
|
||||
"ed": "dots",
|
||||
"hardlink_only": "hardlinkonly",
|
||||
"no_dupe": "nodupe",
|
||||
"no_forget": "noforget",
|
||||
"no_pipe": "nopipe",
|
||||
"no_robots": "norobots",
|
||||
"no_thumb": "dthumb",
|
||||
"no_vthumb": "dvthumb",
|
||||
"no_athumb": "dathumb",
|
||||
"th_no_crop": "nocrop",
|
||||
"dav_auth": "davauth",
|
||||
"dav_rt": "davrt",
|
||||
}
|
||||
for k in (
|
||||
"dedup",
|
||||
"dotsrch",
|
||||
"e2d",
|
||||
"e2ds",
|
||||
@@ -34,10 +35,14 @@ def vf_bmap() -> dict[str, str]:
|
||||
"e2vp",
|
||||
"exp",
|
||||
"grid",
|
||||
"gsel",
|
||||
"hardlink",
|
||||
"magic",
|
||||
"no_sb_md",
|
||||
"no_sb_lg",
|
||||
"og",
|
||||
"og_no_head",
|
||||
"og_s_title",
|
||||
"rand",
|
||||
"xdev",
|
||||
"xlink",
|
||||
@@ -53,16 +58,33 @@ def vf_vmap() -> dict[str, str]:
|
||||
"no_hash": "nohash",
|
||||
"no_idx": "noidx",
|
||||
"re_maxage": "scan",
|
||||
"safe_dedup": "safededup",
|
||||
"th_convt": "convt",
|
||||
"th_size": "thsize",
|
||||
"th_crop": "crop",
|
||||
"th_x3": "th3x",
|
||||
}
|
||||
for k in (
|
||||
"dbd",
|
||||
"html_head",
|
||||
"lg_sbf",
|
||||
"md_sbf",
|
||||
"nrand",
|
||||
"og_desc",
|
||||
"og_site",
|
||||
"og_th",
|
||||
"og_title",
|
||||
"og_title_a",
|
||||
"og_title_v",
|
||||
"og_title_i",
|
||||
"og_tpl",
|
||||
"og_ua",
|
||||
"mv_retry",
|
||||
"rm_retry",
|
||||
"sort",
|
||||
"tcolor",
|
||||
"unlist",
|
||||
"u2abort",
|
||||
"u2ts",
|
||||
):
|
||||
ret[k] = k
|
||||
@@ -75,7 +97,6 @@ def vf_cmap() -> dict[str, str]:
|
||||
for k in (
|
||||
"exp_lg",
|
||||
"exp_md",
|
||||
"html_head",
|
||||
"mte",
|
||||
"mth",
|
||||
"mtp",
|
||||
@@ -98,19 +119,23 @@ permdescs = {
|
||||
"w": 'write; upload files; need "r" to see the uploads',
|
||||
"m": 'move; move files and folders; need "w" at destination',
|
||||
"d": "delete; permanently delete files and folders",
|
||||
".": "dots; user can ask to show dotfiles in listings",
|
||||
"g": "get; download files, but cannot see folder contents",
|
||||
"G": 'upget; same as "g" but can see filekeys of their own uploads',
|
||||
"h": 'html; same as "g" but folders return their index.html',
|
||||
"a": "admin; can see uploader IPs, config-reload",
|
||||
"A": "all; same as 'rwmda.' (read/write/move/delete/dotfiles)",
|
||||
}
|
||||
|
||||
|
||||
flagcats = {
|
||||
"uploads, general": {
|
||||
"dedup": "enable symlink-based file deduplication",
|
||||
"hardlink": "enable hardlink-based file deduplication,\nwith fallback on symlinks when that is impossible",
|
||||
"hardlinkonly": "dedup with hardlink only, never symlink;\nmake a full copy if hardlink is impossible",
|
||||
"safededup": "verify on-disk data before using it for dedup",
|
||||
"nodupe": "rejects existing files (instead of symlinking them)",
|
||||
"hardlink": "does dedup with hardlinks instead of symlinks",
|
||||
"neversymlink": "disables symlink fallback; full copy instead",
|
||||
"copydupes": "disables dedup, always saves full copies of dupes",
|
||||
"sparse": "force use of sparse files, mainly for s3-backed storage",
|
||||
"daw": "enable full WebDAV write support (dangerous);\nPUT-operations will now \033[1;31mOVERWRITE\033[0;35m existing files",
|
||||
"nosub": "forces all uploads into the top folder of the vfs",
|
||||
"magic": "enables filetype detection for nameless uploads",
|
||||
@@ -122,9 +147,11 @@ flagcats = {
|
||||
"maxb=1g,300": "max 1 GiB over 5min (suffixes: b, k, m, g, t)",
|
||||
"vmaxb=1g": "total volume size max 1 GiB (suffixes: b, k, m, g, t)",
|
||||
"vmaxn=4k": "max 4096 files in volume (suffixes: b, k, m, g, t)",
|
||||
"medialinks": "return medialinks for non-up2k uploads (not hotlinks)",
|
||||
"rand": "force randomized filenames, 9 chars long by default",
|
||||
"nrand=N": "randomized filenames are N chars long",
|
||||
"u2ts=fc": "[f]orce [c]lient-last-modified or [u]pload-time",
|
||||
"u2abort=1": "allow aborting unfinished uploads? 0=no 1=strict 2=ip-chk 3=acct-chk",
|
||||
"sz=1k-3m": "allow filesizes between 1 KiB and 3MiB",
|
||||
"df=1g": "ensure 1 GiB free disk space",
|
||||
},
|
||||
@@ -134,7 +161,7 @@ flagcats = {
|
||||
"lifetime=3600": "uploads are deleted after 1 hour",
|
||||
},
|
||||
"database, general": {
|
||||
"e2d": "enable database; makes files searchable + enables upload dedup",
|
||||
"e2d": "enable database; makes files searchable + enables upload-undo",
|
||||
"e2ds": "scan writable folders for new files on startup; also sets -e2d",
|
||||
"e2dsa": "scans all folders for new files on startup; also sets -e2d",
|
||||
"e2t": "enable multimedia indexing; makes it possible to search for tags",
|
||||
@@ -152,7 +179,7 @@ flagcats = {
|
||||
"noforget": "don't forget files when deleted from disk",
|
||||
"fat32": "avoid excessive reindexing on android sdcardfs",
|
||||
"dbd=[acid|swal|wal|yolo]": "database speed-durability tradeoff",
|
||||
"xlink": "cross-volume dupe detection / linking",
|
||||
"xlink": "cross-volume dupe detection / linking (dangerous)",
|
||||
"xdev": "do not descend into other filesystems",
|
||||
"xvol": "do not follow symlinks leaving the volume root",
|
||||
"dotsrch": "show dotfiles in search results",
|
||||
@@ -167,8 +194,10 @@ flagcats = {
|
||||
"dvthumb": "disables video thumbnails",
|
||||
"dathumb": "disables audio thumbnails (spectrograms)",
|
||||
"dithumb": "disables image thumbnails",
|
||||
"pngquant": "compress audio waveforms 33% better",
|
||||
"thsize": "thumbnail res; WxH",
|
||||
"nocrop": "disable center-cropping by default",
|
||||
"crop": "center-cropping (y/n/fy/fn)",
|
||||
"th3x": "3x resolution (y/n/fy/fn)",
|
||||
"convt": "conversion timeout in seconds",
|
||||
},
|
||||
"handlers\n(better explained in --help-handlers)": {
|
||||
@@ -188,9 +217,10 @@ flagcats = {
|
||||
},
|
||||
"client and ux": {
|
||||
"grid": "show grid/thumbnails by default",
|
||||
"gsel": "select files in grid by ctrl-click",
|
||||
"sort": "default sort order",
|
||||
"unlist": "dont list files matching REGEX",
|
||||
"html_head=TXT": "includes TXT in the <head>",
|
||||
"html_head=TXT": "includes TXT in the <head>, or @PATH for file at PATH",
|
||||
"robots": "allows indexing by search engines (default)",
|
||||
"norobots": "kindly asks search engines to leave",
|
||||
"no_sb_md": "disable js sandbox for markdown files",
|
||||
@@ -202,8 +232,11 @@ flagcats = {
|
||||
"nohtml": "return html and markdown as text/html",
|
||||
},
|
||||
"others": {
|
||||
"dots": "allow all users with read-access to\nenable the option to show dotfiles in listings",
|
||||
"fk=8": 'generates per-file accesskeys,\nwhich are then required at the "g" permission;\nkeys are invalidated if filesize or inode changes',
|
||||
"fka=8": 'generates slightly weaker per-file accesskeys,\nwhich are then required at the "g" permission;\nnot affected by filesize or inode numbers',
|
||||
"mv_retry": "ms-windows: timeout for renaming busy files",
|
||||
"rm_retry": "ms-windows: timeout for deleting busy files",
|
||||
"davauth": "ask webdav clients to login for all folders",
|
||||
"davrt": "show lastmod time of symlink destination, not the link itself\n(note: this option is always enabled for recursive listings)",
|
||||
},
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
@@ -8,29 +9,35 @@ import time
|
||||
from .__init__ import ANYWIN, MACOS
|
||||
from .authsrv import AXS, VFS
|
||||
from .bos import bos
|
||||
from .util import chkcmd, min_ex
|
||||
from .util import chkcmd, min_ex, undot
|
||||
|
||||
if True: # pylint: disable=using-constant-test
|
||||
from typing import Optional, Union
|
||||
|
||||
from .util import RootLogger
|
||||
from .util import RootLogger, undot
|
||||
|
||||
|
||||
class Fstab(object):
|
||||
def __init__(self, log: "RootLogger"):
|
||||
def __init__(self, log: "RootLogger", args: argparse.Namespace):
|
||||
self.log_func = log
|
||||
|
||||
self.warned = False
|
||||
self.trusted = False
|
||||
self.tab: Optional[VFS] = None
|
||||
self.oldtab: Optional[VFS] = None
|
||||
self.srctab = "a"
|
||||
self.cache: dict[str, str] = {}
|
||||
self.age = 0.0
|
||||
self.maxage = args.mtab_age
|
||||
|
||||
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||
self.log_func("fstab", msg, c)
|
||||
|
||||
def get(self, path: str) -> str:
|
||||
if len(self.cache) > 9000:
|
||||
self.age = time.time()
|
||||
now = time.time()
|
||||
if now - self.age > self.maxage or len(self.cache) > 9000:
|
||||
self.age = now
|
||||
self.oldtab = self.tab or self.oldtab
|
||||
self.tab = None
|
||||
self.cache = {}
|
||||
|
||||
@@ -45,7 +52,7 @@ class Fstab(object):
|
||||
self.log(msg.format(path, fs, min_ex()), 3)
|
||||
return fs
|
||||
|
||||
path = path.lstrip("/")
|
||||
path = undot(path)
|
||||
try:
|
||||
return self.cache[path]
|
||||
except:
|
||||
@@ -75,7 +82,7 @@ class Fstab(object):
|
||||
self.trusted = False
|
||||
|
||||
def build_tab(self) -> None:
|
||||
self.log("building tab")
|
||||
self.log("inspecting mtab for changes")
|
||||
|
||||
sptn = r"^.*? on (.*) type ([^ ]+) \(.*"
|
||||
if MACOS:
|
||||
@@ -84,6 +91,7 @@ class Fstab(object):
|
||||
ptn = re.compile(sptn)
|
||||
so, _ = chkcmd(["mount"])
|
||||
tab1: list[tuple[str, str]] = []
|
||||
atab = []
|
||||
for ln in so.split("\n"):
|
||||
m = ptn.match(ln)
|
||||
if not m:
|
||||
@@ -91,6 +99,15 @@ class Fstab(object):
|
||||
|
||||
zs1, zs2 = m.groups()
|
||||
tab1.append((str(zs1), str(zs2)))
|
||||
atab.append(ln)
|
||||
|
||||
# keep empirically-correct values if mounttab unchanged
|
||||
srctab = "\n".join(sorted(atab))
|
||||
if srctab == self.srctab:
|
||||
self.tab = self.oldtab
|
||||
return
|
||||
|
||||
self.log("mtab has changed; reevaluating support for sparse files")
|
||||
|
||||
tab1.sort(key=lambda x: (len(x[0]), x[0]))
|
||||
path1, fs1 = tab1[0]
|
||||
@@ -99,6 +116,7 @@ class Fstab(object):
|
||||
tab.add(fs, path.lstrip("/"))
|
||||
|
||||
self.tab = tab
|
||||
self.srctab = srctab
|
||||
|
||||
def relabel(self, path: str, nval: str) -> None:
|
||||
assert self.tab
|
||||
@@ -106,7 +124,7 @@ class Fstab(object):
|
||||
if ANYWIN:
|
||||
path = self._winpath(path)
|
||||
|
||||
path = path.lstrip("/")
|
||||
path = undot(path)
|
||||
ptn = re.compile(r"^[^\\/]*")
|
||||
vn, rem = self.tab._find(path)
|
||||
if not self.trusted:
|
||||
@@ -133,7 +151,9 @@ class Fstab(object):
|
||||
self.trusted = True
|
||||
except:
|
||||
# prisonparty or other restrictive environment
|
||||
self.log("failed to build tab:\n{}".format(min_ex()), 3)
|
||||
if not self.warned:
|
||||
self.warned = True
|
||||
self.log("failed to build tab:\n{}".format(min_ex()), 3)
|
||||
self.build_fallback()
|
||||
|
||||
assert self.tab
|
||||
|
||||
@@ -19,7 +19,9 @@ from .__init__ import PY2, TYPE_CHECKING
|
||||
from .authsrv import VFS
|
||||
from .bos import bos
|
||||
from .util import (
|
||||
VF_CAREFUL,
|
||||
Daemon,
|
||||
ODict,
|
||||
Pebkac,
|
||||
exclude_dotfiles,
|
||||
fsenc,
|
||||
@@ -29,6 +31,7 @@ from .util import (
|
||||
runhook,
|
||||
sanitize_fn,
|
||||
vjoin,
|
||||
wunlink,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -36,7 +39,10 @@ if TYPE_CHECKING:
|
||||
|
||||
if True: # pylint: disable=using-constant-test
|
||||
import typing
|
||||
from typing import Any, Optional
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
if PY2:
|
||||
range = xrange # type: ignore
|
||||
|
||||
|
||||
class FSE(FilesystemError):
|
||||
@@ -73,6 +79,7 @@ class FtpAuth(DummyAuthorizer):
|
||||
asrv = self.hub.asrv
|
||||
uname = "*"
|
||||
if username != "anonymous":
|
||||
uname = ""
|
||||
for zs in (password, username):
|
||||
zs = asrv.iacct.get(asrv.ah.hash(zs), "")
|
||||
if zs:
|
||||
@@ -132,11 +139,14 @@ class FtpFs(AbstractedFS):
|
||||
|
||||
self.can_read = self.can_write = self.can_move = False
|
||||
self.can_delete = self.can_get = self.can_upget = False
|
||||
self.can_admin = False
|
||||
self.can_admin = self.can_dot = False
|
||||
|
||||
self.listdirinfo = self.listdir
|
||||
self.chdir(".")
|
||||
|
||||
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||
self.hub.log("ftpd", msg, c)
|
||||
|
||||
def v2a(
|
||||
self,
|
||||
vpath: str,
|
||||
@@ -167,7 +177,7 @@ class FtpFs(AbstractedFS):
|
||||
if not avfs:
|
||||
raise FSE(t.format(vpath), 1)
|
||||
|
||||
cr, cw, cm, cd, _, _, _ = avfs.can_access("", self.h.uname)
|
||||
cr, cw, cm, cd, _, _, _, _ = avfs.can_access("", self.h.uname)
|
||||
if r and not cr or w and not cw or m and not cm or d and not cd:
|
||||
raise FSE(t.format(vpath), 1)
|
||||
|
||||
@@ -205,18 +215,38 @@ class FtpFs(AbstractedFS):
|
||||
w = "w" in mode or "a" in mode or "+" in mode
|
||||
|
||||
ap = self.rv2a(filename, r, w)[0]
|
||||
self.validpath(ap)
|
||||
if w:
|
||||
try:
|
||||
st = bos.stat(ap)
|
||||
td = time.time() - st.st_mtime
|
||||
need_unlink = True
|
||||
except:
|
||||
need_unlink = False
|
||||
td = 0
|
||||
|
||||
if td < -1 or td > self.args.ftp_wt:
|
||||
raise FSE("Cannot open existing file for writing")
|
||||
if w and need_unlink:
|
||||
if td >= -1 and td <= self.args.ftp_wt:
|
||||
# within permitted timeframe; unlink and accept
|
||||
do_it = True
|
||||
elif self.args.no_del or self.args.ftp_no_ow:
|
||||
# file too old, or overwrite not allowed; reject
|
||||
do_it = False
|
||||
else:
|
||||
# allow overwrite if user has delete permission
|
||||
# (avoids win2000 freaking out and deleting the server copy without uploading its own)
|
||||
try:
|
||||
self.rv2a(filename, False, True, False, True)
|
||||
do_it = True
|
||||
except:
|
||||
do_it = False
|
||||
|
||||
self.validpath(ap)
|
||||
return open(fsenc(ap), mode)
|
||||
if not do_it:
|
||||
raise FSE("File already exists")
|
||||
|
||||
wunlink(self.log, ap, VF_CAREFUL)
|
||||
|
||||
return open(fsenc(ap), mode, self.args.iobuf)
|
||||
|
||||
def chdir(self, path: str) -> None:
|
||||
nwd = join(self.cwd, path)
|
||||
@@ -243,6 +273,7 @@ class FtpFs(AbstractedFS):
|
||||
self.can_get,
|
||||
self.can_upget,
|
||||
self.can_admin,
|
||||
self.can_dot,
|
||||
) = avfs.can_access("", self.h.uname)
|
||||
|
||||
def mkdir(self, path: str) -> None:
|
||||
@@ -265,7 +296,7 @@ class FtpFs(AbstractedFS):
|
||||
vfs_ls = [x[0] for x in vfs_ls1]
|
||||
vfs_ls.extend(vfs_virt.keys())
|
||||
|
||||
if not self.args.ed:
|
||||
if not self.can_dot:
|
||||
vfs_ls = exclude_dotfiles(vfs_ls)
|
||||
|
||||
vfs_ls.sort()
|
||||
@@ -279,9 +310,20 @@ class FtpFs(AbstractedFS):
|
||||
# display write-only folders as empty
|
||||
return []
|
||||
|
||||
# return list of volumes
|
||||
r = {x.split("/")[0]: 1 for x in self.hub.asrv.vfs.all_vols.keys()}
|
||||
return list(sorted(list(r.keys())))
|
||||
# return list of accessible volumes
|
||||
ret = []
|
||||
for vn in self.hub.asrv.vfs.all_vols.values():
|
||||
if "/" in vn.vpath or not vn.vpath:
|
||||
continue # only include toplevel-mounted vols
|
||||
|
||||
try:
|
||||
self.hub.asrv.vfs.get(vn.vpath, self.uname, True, False)
|
||||
ret.append(vn.vpath)
|
||||
except:
|
||||
pass
|
||||
|
||||
ret.sort()
|
||||
return ret
|
||||
|
||||
def rmdir(self, path: str) -> None:
|
||||
ap = self.rv2a(path, d=True)[0]
|
||||
@@ -297,7 +339,7 @@ class FtpFs(AbstractedFS):
|
||||
|
||||
vp = join(self.cwd, path).lstrip("/")
|
||||
try:
|
||||
self.hub.up2k.handle_rm(self.uname, self.h.cli_ip, [vp], [], False)
|
||||
self.hub.up2k.handle_rm(self.uname, self.h.cli_ip, [vp], [], False, False)
|
||||
except Exception as ex:
|
||||
raise FSE(str(ex))
|
||||
|
||||
@@ -311,7 +353,7 @@ class FtpFs(AbstractedFS):
|
||||
svp = join(self.cwd, src).lstrip("/")
|
||||
dvp = join(self.cwd, dst).lstrip("/")
|
||||
try:
|
||||
self.hub.up2k.handle_mv(self.uname, svp, dvp)
|
||||
self.hub.up2k.handle_mv(self.uname, self.h.cli_ip, svp, dvp)
|
||||
except Exception as ex:
|
||||
raise FSE(str(ex))
|
||||
|
||||
@@ -407,7 +449,7 @@ class FtpHandler(FTPHandler):
|
||||
if cip.startswith("::ffff:"):
|
||||
cip = cip[7:]
|
||||
|
||||
if self.args.ftp_ipa_re and not self.args.ftp_ipa_re.match(cip):
|
||||
if self.args.ftp_ipa_nm and not self.args.ftp_ipa_nm.map(cip):
|
||||
logging.warning("client rejected (--ftp-ipa): %s", cip)
|
||||
self.connected = False
|
||||
conn.close()
|
||||
@@ -429,15 +471,19 @@ class FtpHandler(FTPHandler):
|
||||
xbu = vfs.flags.get("xbu")
|
||||
if xbu and not runhook(
|
||||
None,
|
||||
None,
|
||||
self.hub.up2k,
|
||||
"xbu.ftpd",
|
||||
xbu,
|
||||
ap,
|
||||
vfs.canonical(rem),
|
||||
vp,
|
||||
"",
|
||||
self.uname,
|
||||
self.hub.asrv.vfs.get_perms(vp, self.uname),
|
||||
0,
|
||||
0,
|
||||
self.cli_ip,
|
||||
0,
|
||||
time.time(),
|
||||
"",
|
||||
):
|
||||
raise FSE("Upload blocked by xbu server config")
|
||||
@@ -540,9 +586,17 @@ class Ftpd(object):
|
||||
if "::" in ips:
|
||||
ips.append("0.0.0.0")
|
||||
|
||||
ips = [x for x in ips if "unix:" not in x]
|
||||
|
||||
if self.args.ftp4:
|
||||
ips = [x for x in ips if ":" not in x]
|
||||
|
||||
if not ips:
|
||||
lgr.fatal("cannot start ftp-server; no compatible IPs in -i")
|
||||
return
|
||||
|
||||
ips = list(ODict.fromkeys(ips)) # dedup
|
||||
|
||||
ioloop = IOLoop()
|
||||
for ip in ips:
|
||||
for h, lp in hs:
|
||||
|
||||
1872
copyparty/httpcli.py
1872
copyparty/httpcli.py
File diff suppressed because it is too large
Load Diff
@@ -9,6 +9,9 @@ import threading # typechk
|
||||
import time
|
||||
|
||||
try:
|
||||
if os.environ.get("PRTY_NO_TLS"):
|
||||
raise Exception()
|
||||
|
||||
HAVE_SSL = True
|
||||
import ssl
|
||||
except:
|
||||
@@ -23,7 +26,7 @@ from .mtag import HAVE_FFMPEG
|
||||
from .th_cli import ThumbCli
|
||||
from .th_srv import HAVE_PIL, HAVE_VIPS
|
||||
from .u2idx import U2idx
|
||||
from .util import HMaccas, shut_socket
|
||||
from .util import HMaccas, NetMap, shut_socket
|
||||
|
||||
if True: # pylint: disable=using-constant-test
|
||||
from typing import Optional, Pattern, Union
|
||||
@@ -50,11 +53,15 @@ class HttpConn(object):
|
||||
self.addr = addr
|
||||
self.hsrv = hsrv
|
||||
|
||||
self.mutex: threading.Lock = hsrv.mutex # mypy404
|
||||
self.u2mutex: threading.Lock = hsrv.u2mutex # mypy404
|
||||
self.args: argparse.Namespace = hsrv.args # mypy404
|
||||
self.E: EnvParams = self.args.E
|
||||
self.asrv: AuthSrv = hsrv.asrv # mypy404
|
||||
self.u2fh: Util.FHC = hsrv.u2fh # mypy404
|
||||
self.pipes: Util.CachedDict = hsrv.pipes # mypy404
|
||||
self.ipa_nm: Optional[NetMap] = hsrv.ipa_nm
|
||||
self.xff_nm: Optional[NetMap] = hsrv.xff_nm
|
||||
self.xff_lan: NetMap = hsrv.xff_lan # type: ignore
|
||||
self.iphash: HMaccas = hsrv.broker.iphash
|
||||
self.bans: dict[str, int] = hsrv.bans
|
||||
self.aclose: dict[str, int] = hsrv.aclose
|
||||
@@ -93,7 +100,7 @@ class HttpConn(object):
|
||||
self.rproxy = ip
|
||||
|
||||
self.ip = ip
|
||||
self.log_src = "{} \033[{}m{}".format(ip, color, self.addr[1]).ljust(26)
|
||||
self.log_src = ("%s \033[%dm%d" % (ip, color, self.addr[1])).ljust(26)
|
||||
return self.log_src
|
||||
|
||||
def respath(self, res_name: str) -> str:
|
||||
@@ -112,32 +119,30 @@ class HttpConn(object):
|
||||
return self.u2idx
|
||||
|
||||
def _detect_https(self) -> bool:
|
||||
method = None
|
||||
if True:
|
||||
try:
|
||||
method = self.s.recv(4, socket.MSG_PEEK)
|
||||
except socket.timeout:
|
||||
return False
|
||||
except AttributeError:
|
||||
# jython does not support msg_peek; forget about https
|
||||
method = self.s.recv(4)
|
||||
self.sr = Util.Unrecv(self.s, self.log)
|
||||
self.sr.buf = method
|
||||
try:
|
||||
method = self.s.recv(4, socket.MSG_PEEK)
|
||||
except socket.timeout:
|
||||
return False
|
||||
except AttributeError:
|
||||
# jython does not support msg_peek; forget about https
|
||||
method = self.s.recv(4)
|
||||
self.sr = Util.Unrecv(self.s, self.log)
|
||||
self.sr.buf = method
|
||||
|
||||
# jython used to do this, they stopped since it's broken
|
||||
# but reimplementing sendall is out of scope for now
|
||||
if not getattr(self.s, "sendall", None):
|
||||
self.s.sendall = self.s.send # type: ignore
|
||||
# jython used to do this, they stopped since it's broken
|
||||
# but reimplementing sendall is out of scope for now
|
||||
if not getattr(self.s, "sendall", None):
|
||||
self.s.sendall = self.s.send # type: ignore
|
||||
|
||||
if len(method) != 4:
|
||||
err = "need at least 4 bytes in the first packet; got {}".format(
|
||||
len(method)
|
||||
)
|
||||
if method:
|
||||
self.log(err)
|
||||
if len(method) != 4:
|
||||
err = "need at least 4 bytes in the first packet; got {}".format(
|
||||
len(method)
|
||||
)
|
||||
if method:
|
||||
self.log(err)
|
||||
|
||||
self.s.send(b"HTTP/1.1 400 Bad Request\r\n\r\n" + err.encode("utf-8"))
|
||||
return False
|
||||
self.s.send(b"HTTP/1.1 400 Bad Request\r\n\r\n" + err.encode("utf-8"))
|
||||
return False
|
||||
|
||||
return not method or not bool(PTN_HTTP.match(method))
|
||||
|
||||
@@ -178,7 +183,7 @@ class HttpConn(object):
|
||||
|
||||
self.s = ctx.wrap_socket(self.s, server_side=True)
|
||||
msg = [
|
||||
"\033[1;3{:d}m{}".format(c, s)
|
||||
"\033[1;3%dm%s" % (c, s)
|
||||
for c, s in zip([0, 5, 0], self.s.cipher()) # type: ignore
|
||||
]
|
||||
self.log(" ".join(msg) + "\033[0m")
|
||||
|
||||
@@ -12,7 +12,7 @@ import time
|
||||
|
||||
import queue
|
||||
|
||||
from .__init__ import ANYWIN, CORES, EXE, MACOS, TYPE_CHECKING, EnvParams
|
||||
from .__init__ import ANYWIN, CORES, EXE, MACOS, PY2, TYPE_CHECKING, EnvParams, unicode
|
||||
|
||||
try:
|
||||
MNFE = ModuleNotFoundError
|
||||
@@ -61,12 +61,14 @@ from .u2idx import U2idx
|
||||
from .util import (
|
||||
E_SCK,
|
||||
FHC,
|
||||
CachedDict,
|
||||
Daemon,
|
||||
Garda,
|
||||
Magician,
|
||||
Netdev,
|
||||
NetMap,
|
||||
absreal,
|
||||
build_netmap,
|
||||
ipnorm,
|
||||
min_ex,
|
||||
shut_socket,
|
||||
@@ -82,6 +84,12 @@ if TYPE_CHECKING:
|
||||
if True: # pylint: disable=using-constant-test
|
||||
from typing import Any, Optional
|
||||
|
||||
if PY2:
|
||||
range = xrange # type: ignore
|
||||
|
||||
if not hasattr(socket, "AF_UNIX"):
|
||||
setattr(socket, "AF_UNIX", -9001)
|
||||
|
||||
|
||||
class HttpSrv(object):
|
||||
"""
|
||||
@@ -103,7 +111,7 @@ class HttpSrv(object):
|
||||
self.t0 = time.time()
|
||||
nsuf = "-n{}-i{:x}".format(nid, os.getpid()) if nid else ""
|
||||
self.magician = Magician()
|
||||
self.nm = NetMap([], {})
|
||||
self.nm = NetMap([], [])
|
||||
self.ssdp: Optional["SSDPr"] = None
|
||||
self.gpwd = Garda(self.args.ban_pw)
|
||||
self.g404 = Garda(self.args.ban_404)
|
||||
@@ -117,6 +125,7 @@ class HttpSrv(object):
|
||||
self.bound: set[tuple[str, int]] = set()
|
||||
self.name = "hsrv" + nsuf
|
||||
self.mutex = threading.Lock()
|
||||
self.u2mutex = threading.Lock()
|
||||
self.stopping = False
|
||||
|
||||
self.tp_nthr = 0 # actual
|
||||
@@ -128,6 +137,7 @@ class HttpSrv(object):
|
||||
self.t_periodic: Optional[threading.Thread] = None
|
||||
|
||||
self.u2fh = FHC()
|
||||
self.pipes = CachedDict(0.2)
|
||||
self.metrics = Metrics(self)
|
||||
self.nreq = 0
|
||||
self.nsus = 0
|
||||
@@ -144,11 +154,25 @@ class HttpSrv(object):
|
||||
|
||||
env = jinja2.Environment()
|
||||
env.loader = jinja2.FileSystemLoader(os.path.join(self.E.mod, "web"))
|
||||
jn = ["splash", "svcs", "browser", "browser2", "msg", "md", "mde", "cf"]
|
||||
jn = [
|
||||
"splash",
|
||||
"shares",
|
||||
"svcs",
|
||||
"browser",
|
||||
"browser2",
|
||||
"msg",
|
||||
"md",
|
||||
"mde",
|
||||
"cf",
|
||||
]
|
||||
self.j2 = {x: env.get_template(x + ".html") for x in jn}
|
||||
zs = os.path.join(self.E.mod, "web", "deps", "prism.js.gz")
|
||||
self.prism = os.path.exists(zs)
|
||||
|
||||
self.ipa_nm = build_netmap(self.args.ipa)
|
||||
self.xff_nm = build_netmap(self.args.xff_src)
|
||||
self.xff_lan = build_netmap("lan")
|
||||
|
||||
self.statics: set[str] = set()
|
||||
self._build_statics()
|
||||
|
||||
@@ -190,7 +214,7 @@ class HttpSrv(object):
|
||||
for fn in df:
|
||||
ap = absreal(os.path.join(dp, fn))
|
||||
self.statics.add(ap)
|
||||
if ap.endswith(".gz") or ap.endswith(".br"):
|
||||
if ap.endswith(".gz"):
|
||||
self.statics.add(ap[:-3])
|
||||
|
||||
def set_netdevs(self, netdevs: dict[str, Netdev]) -> None:
|
||||
@@ -198,7 +222,7 @@ class HttpSrv(object):
|
||||
for ip, _ in self.bound:
|
||||
ips.add(ip)
|
||||
|
||||
self.nm = NetMap(list(ips), netdevs)
|
||||
self.nm = NetMap(list(ips), list(netdevs))
|
||||
|
||||
def start_threads(self, n: int) -> None:
|
||||
self.tp_nthr += n
|
||||
@@ -220,7 +244,7 @@ class HttpSrv(object):
|
||||
def periodic(self) -> None:
|
||||
while True:
|
||||
time.sleep(2 if self.tp_ncli or self.ncli else 10)
|
||||
with self.mutex:
|
||||
with self.u2mutex, self.mutex:
|
||||
self.u2fh.clean()
|
||||
if self.tp_q:
|
||||
self.tp_ncli = max(self.ncli, self.tp_ncli - 2)
|
||||
@@ -232,15 +256,24 @@ class HttpSrv(object):
|
||||
return
|
||||
|
||||
def listen(self, sck: socket.socket, nlisteners: int) -> None:
|
||||
tcp = sck.family != socket.AF_UNIX
|
||||
|
||||
if self.args.j != 1:
|
||||
# lost in the pickle; redefine
|
||||
if not ANYWIN or self.args.reuseaddr:
|
||||
sck.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
|
||||
sck.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||
if tcp:
|
||||
sck.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||
|
||||
sck.settimeout(None) # < does not inherit, ^ opts above do
|
||||
|
||||
ip, port = sck.getsockname()[:2]
|
||||
if tcp:
|
||||
ip, port = sck.getsockname()[:2]
|
||||
else:
|
||||
ip = re.sub(r"\.[0-9]+$", "", sck.getsockname().split("/")[-1])
|
||||
port = 0
|
||||
|
||||
self.srvs.append(sck)
|
||||
self.bound.add((ip, port))
|
||||
self.nclimax = math.ceil(self.args.nc * 1.0 / nlisteners)
|
||||
@@ -252,16 +285,22 @@ class HttpSrv(object):
|
||||
|
||||
def thr_listen(self, srv_sck: socket.socket) -> None:
|
||||
"""listens on a shared tcp server"""
|
||||
ip, port = srv_sck.getsockname()[:2]
|
||||
fno = srv_sck.fileno()
|
||||
hip = "[{}]".format(ip) if ":" in ip else ip
|
||||
msg = "subscribed @ {}:{} f{} p{}".format(hip, port, fno, os.getpid())
|
||||
if srv_sck.family == socket.AF_UNIX:
|
||||
ip = re.sub(r"\.[0-9]+$", "", srv_sck.getsockname())
|
||||
msg = "subscribed @ %s f%d p%d" % (ip, fno, os.getpid())
|
||||
ip = ip.split("/")[-1]
|
||||
port = 0
|
||||
tcp = False
|
||||
else:
|
||||
tcp = True
|
||||
ip, port = srv_sck.getsockname()[:2]
|
||||
hip = "[%s]" % (ip,) if ":" in ip else ip
|
||||
msg = "subscribed @ %s:%d f%d p%d" % (hip, port, fno, os.getpid())
|
||||
|
||||
self.log(self.name, msg)
|
||||
|
||||
def fun() -> None:
|
||||
self.broker.say("cb_httpsrv_up")
|
||||
|
||||
threading.Thread(target=fun, name="sig-hsrv-up1").start()
|
||||
Daemon(self.broker.say, "sig-hsrv-up1", ("cb_httpsrv_up",))
|
||||
|
||||
while not self.stopping:
|
||||
if self.args.log_conn:
|
||||
@@ -330,11 +369,13 @@ class HttpSrv(object):
|
||||
|
||||
try:
|
||||
sck, saddr = srv_sck.accept()
|
||||
cip, cport = saddr[:2]
|
||||
if cip.startswith("::ffff:"):
|
||||
cip = cip[7:]
|
||||
|
||||
addr = (cip, cport)
|
||||
if tcp:
|
||||
cip = unicode(saddr[0])
|
||||
if cip.startswith("::ffff:"):
|
||||
cip = cip[7:]
|
||||
addr = (cip, saddr[1])
|
||||
else:
|
||||
addr = ("127.8.3.7", sck.fileno())
|
||||
except (OSError, socket.error) as ex:
|
||||
if self.stopping:
|
||||
break
|
||||
@@ -366,7 +407,7 @@ class HttpSrv(object):
|
||||
if not self.t_periodic:
|
||||
name = "hsrv-pt"
|
||||
if self.nid:
|
||||
name += "-{}".format(self.nid)
|
||||
name += "-%d" % (self.nid,)
|
||||
|
||||
self.t_periodic = Daemon(self.periodic, name)
|
||||
|
||||
@@ -385,7 +426,7 @@ class HttpSrv(object):
|
||||
|
||||
Daemon(
|
||||
self.thr_client,
|
||||
"httpconn-{}-{}".format(addr[0].split(".", 2)[-1][-6:], addr[1]),
|
||||
"httpconn-%s-%d" % (addr[0].split(".", 2)[-1][-6:], addr[1]),
|
||||
(sck, addr),
|
||||
)
|
||||
|
||||
@@ -402,9 +443,7 @@ class HttpSrv(object):
|
||||
try:
|
||||
sck, addr = task
|
||||
me = threading.current_thread()
|
||||
me.name = "httpconn-{}-{}".format(
|
||||
addr[0].split(".", 2)[-1][-6:], addr[1]
|
||||
)
|
||||
me.name = "httpconn-%s-%d" % (addr[0].split(".", 2)[-1][-6:], addr[1])
|
||||
self.thr_client(sck, addr)
|
||||
me.name = self.name + "-poolw"
|
||||
except Exception as ex:
|
||||
|
||||
@@ -8,7 +8,7 @@ import re
|
||||
|
||||
from .__init__ import PY2
|
||||
from .th_srv import HAVE_PIL, HAVE_PILF
|
||||
from .util import BytesIO # type: ignore
|
||||
from .util import BytesIO, html_escape # type: ignore
|
||||
|
||||
|
||||
class Ico(object):
|
||||
@@ -27,14 +27,13 @@ class Ico(object):
|
||||
c1 = colorsys.hsv_to_rgb(zb[0] / 256.0, 1, 0.3)
|
||||
c2 = colorsys.hsv_to_rgb(zb[0] / 256.0, 0.8 if HAVE_PILF else 1, 1)
|
||||
ci = [int(x * 255) for x in list(c1) + list(c2)]
|
||||
c = "".join(["{:02x}".format(x) for x in ci])
|
||||
c = "".join(["%02x" % (x,) for x in ci])
|
||||
|
||||
w = 100
|
||||
h = 30
|
||||
if not self.args.th_no_crop and as_thumb:
|
||||
if as_thumb:
|
||||
sw, sh = self.args.th_size.split("x")
|
||||
h = int(100 / (float(sw) / float(sh)))
|
||||
w = 100
|
||||
h = int(100.0 / (float(sw) / float(sh)))
|
||||
|
||||
if chrome:
|
||||
# cannot handle more than ~2000 unique SVGs
|
||||
@@ -47,12 +46,12 @@ class Ico(object):
|
||||
# [.lt] are hard to see lowercase / unspaced
|
||||
ext2 = re.sub("(.)", "\\1 ", ext).upper()
|
||||
|
||||
h = int(128 * h / w)
|
||||
h = int(128.0 * h / w)
|
||||
w = 128
|
||||
img = Image.new("RGB", (w, h), "#" + c[:6])
|
||||
pb = ImageDraw.Draw(img)
|
||||
_, _, tw, th = pb.textbbox((0, 0), ext2, font_size=16)
|
||||
xy = ((w - tw) // 2, (h - th) // 2)
|
||||
xy = (int((w - tw) / 2), int((h - th) / 2))
|
||||
pb.text(xy, ext2, fill="#" + c[6:], font_size=16)
|
||||
|
||||
img = img.resize((w * 2, h * 2), Image.NEAREST)
|
||||
@@ -68,14 +67,14 @@ class Ico(object):
|
||||
# svg: 3s, cache: 6s, this: 8s
|
||||
from PIL import Image, ImageDraw
|
||||
|
||||
h = int(64 * h / w)
|
||||
h = int(64.0 * h / w)
|
||||
w = 64
|
||||
img = Image.new("RGB", (w, h), "#" + c[:6])
|
||||
pb = ImageDraw.Draw(img)
|
||||
try:
|
||||
_, _, tw, th = pb.textbbox((0, 0), ext)
|
||||
except:
|
||||
tw, th = pb.textsize(ext)
|
||||
tw, th = pb.textsize(ext) # type: ignore
|
||||
|
||||
tw += len(ext)
|
||||
cw = tw // len(ext)
|
||||
@@ -99,6 +98,6 @@ class Ico(object):
|
||||
fill="#{}" font-family="monospace" font-size="14px" style="letter-spacing:.5px">{}</text>
|
||||
</g></svg>
|
||||
"""
|
||||
svg = svg.format(h, c[:6], c[6:], ext)
|
||||
svg = svg.format(h, c[:6], c[6:], html_escape(ext, True))
|
||||
|
||||
return "image/svg+xml", svg.encode("utf-8")
|
||||
|
||||
@@ -292,6 +292,22 @@ class MDNS(MCast):
|
||||
def run2(self) -> None:
|
||||
last_hop = time.time()
|
||||
ihop = self.args.mc_hop
|
||||
|
||||
try:
|
||||
if self.args.no_poll:
|
||||
raise Exception()
|
||||
fd2sck = {}
|
||||
srvpoll = select.poll()
|
||||
for sck in self.srv:
|
||||
fd = sck.fileno()
|
||||
fd2sck[fd] = sck
|
||||
srvpoll.register(fd, select.POLLIN)
|
||||
except Exception as ex:
|
||||
srvpoll = None
|
||||
if not self.args.no_poll:
|
||||
t = "WARNING: failed to poll(), will use select() instead: %r"
|
||||
self.log(t % (ex,), 3)
|
||||
|
||||
while self.running:
|
||||
timeout = (
|
||||
0.02 + random.random() * 0.07
|
||||
@@ -300,8 +316,13 @@ class MDNS(MCast):
|
||||
if self.unsolicited
|
||||
else (last_hop + ihop if ihop else 180)
|
||||
)
|
||||
rdy = select.select(self.srv, [], [], timeout)
|
||||
rx: list[socket.socket] = rdy[0] # type: ignore
|
||||
if srvpoll:
|
||||
pr = srvpoll.poll(timeout * 1000)
|
||||
rx = [fd2sck[x[0]] for x in pr if x[1] & select.POLLIN]
|
||||
else:
|
||||
rdy = select.select(self.srv, [], [], timeout)
|
||||
rx: list[socket.socket] = rdy[0] # type: ignore
|
||||
|
||||
self.rx4.cln()
|
||||
self.rx6.cln()
|
||||
buf = b""
|
||||
@@ -340,7 +361,7 @@ class MDNS(MCast):
|
||||
except:
|
||||
pass
|
||||
|
||||
self.srv = {}
|
||||
self.srv.clear()
|
||||
|
||||
def eat(self, buf: bytes, addr: tuple[str, int], sck: socket.socket) -> None:
|
||||
cip = addr[0]
|
||||
|
||||
@@ -179,7 +179,7 @@ class Metrics(object):
|
||||
tnbytes = 0
|
||||
tnfiles = 0
|
||||
for vpath, vol in allvols:
|
||||
cur = idx.get_cur(vol.realpath)
|
||||
cur = idx.get_cur(vol)
|
||||
if not cur:
|
||||
continue
|
||||
|
||||
@@ -206,6 +206,9 @@ class Metrics(object):
|
||||
try:
|
||||
x = self.hsrv.broker.ask("up2k.get_unfinished")
|
||||
xs = x.get()
|
||||
if not xs:
|
||||
raise Exception("up2k mutex acquisition timed out")
|
||||
|
||||
xj = json.loads(xs)
|
||||
for ptop, (nbytes, nfiles) in xj.items():
|
||||
tnbytes += nbytes
|
||||
|
||||
@@ -7,12 +7,15 @@ import os
|
||||
import shutil
|
||||
import subprocess as sp
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
from .__init__ import ANYWIN, EXE, PY2, WINDOWS, E, unicode
|
||||
from .authsrv import VFS
|
||||
from .bos import bos
|
||||
from .util import (
|
||||
FFMPEG_URL,
|
||||
REKOBO_LKEY,
|
||||
VF_CAREFUL,
|
||||
fsenc,
|
||||
min_ex,
|
||||
pybin,
|
||||
@@ -20,12 +23,24 @@ from .util import (
|
||||
runcmd,
|
||||
sfsenc,
|
||||
uncyg,
|
||||
wunlink,
|
||||
)
|
||||
|
||||
if True: # pylint: disable=using-constant-test
|
||||
from typing import Any, Union
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
from .util import RootLogger
|
||||
from .util import NamedLogger, RootLogger
|
||||
|
||||
|
||||
try:
|
||||
if os.environ.get("PRTY_NO_MUTAGEN"):
|
||||
raise Exception()
|
||||
|
||||
from mutagen import version # noqa: F401
|
||||
|
||||
HAVE_MUTAGEN = True
|
||||
except:
|
||||
HAVE_MUTAGEN = False
|
||||
|
||||
|
||||
def have_ff(scmd: str) -> bool:
|
||||
@@ -44,8 +59,8 @@ def have_ff(scmd: str) -> bool:
|
||||
return bool(shutil.which(scmd))
|
||||
|
||||
|
||||
HAVE_FFMPEG = have_ff("ffmpeg")
|
||||
HAVE_FFPROBE = have_ff("ffprobe")
|
||||
HAVE_FFMPEG = not os.environ.get("PRTY_NO_FFMPEG") and have_ff("ffmpeg")
|
||||
HAVE_FFPROBE = not os.environ.get("PRTY_NO_FFPROBE") and have_ff("ffprobe")
|
||||
|
||||
|
||||
class MParser(object):
|
||||
@@ -107,6 +122,56 @@ class MParser(object):
|
||||
raise Exception()
|
||||
|
||||
|
||||
def au_unpk(
|
||||
log: "NamedLogger", fmt_map: dict[str, str], abspath: str, vn: Optional[VFS] = None
|
||||
) -> str:
|
||||
ret = ""
|
||||
try:
|
||||
ext = abspath.split(".")[-1].lower()
|
||||
au, pk = fmt_map[ext].split(".")
|
||||
|
||||
fd, ret = tempfile.mkstemp("." + au)
|
||||
|
||||
if pk == "gz":
|
||||
import gzip
|
||||
|
||||
fi = gzip.GzipFile(abspath, mode="rb")
|
||||
|
||||
elif pk == "xz":
|
||||
import lzma
|
||||
|
||||
fi = lzma.open(abspath, "rb")
|
||||
|
||||
elif pk == "zip":
|
||||
import zipfile
|
||||
|
||||
zf = zipfile.ZipFile(abspath, "r")
|
||||
zil = zf.infolist()
|
||||
zil = [x for x in zil if x.filename.lower().split(".")[-1] == au]
|
||||
fi = zf.open(zil[0])
|
||||
|
||||
else:
|
||||
raise Exception("unknown compression %s" % (pk,))
|
||||
|
||||
with os.fdopen(fd, "wb") as fo:
|
||||
while True:
|
||||
buf = fi.read(32768)
|
||||
if not buf:
|
||||
break
|
||||
|
||||
fo.write(buf)
|
||||
|
||||
return ret
|
||||
|
||||
except Exception as ex:
|
||||
if ret:
|
||||
t = "failed to decompress audio file [%s]: %r"
|
||||
log(t % (abspath, ex))
|
||||
wunlink(log, ret, vn.flags if vn else VF_CAREFUL)
|
||||
|
||||
return abspath
|
||||
|
||||
|
||||
def ffprobe(
|
||||
abspath: str, timeout: int = 60
|
||||
) -> tuple[dict[str, tuple[int, Any]], dict[str, list[Any]]]:
|
||||
@@ -118,7 +183,7 @@ def ffprobe(
|
||||
b"--",
|
||||
fsenc(abspath),
|
||||
]
|
||||
rc, so, se = runcmd(cmd, timeout=timeout, nice=True)
|
||||
rc, so, se = runcmd(cmd, timeout=timeout, nice=True, oom=200)
|
||||
retchk(rc, cmd, se)
|
||||
return parse_ffprobe(so)
|
||||
|
||||
@@ -240,7 +305,7 @@ def parse_ffprobe(txt: str) -> tuple[dict[str, tuple[int, Any]], dict[str, list[
|
||||
if "/" in fps:
|
||||
fa, fb = fps.split("/")
|
||||
try:
|
||||
fps = int(fa) * 1.0 / int(fb)
|
||||
fps = float(fa) / float(fb)
|
||||
except:
|
||||
fps = 9001
|
||||
|
||||
@@ -281,16 +346,14 @@ class MTag(object):
|
||||
or_ffprobe = " or FFprobe"
|
||||
|
||||
if self.backend == "mutagen":
|
||||
self.get = self.get_mutagen
|
||||
try:
|
||||
from mutagen import version # noqa: F401
|
||||
except:
|
||||
self._get = self.get_mutagen
|
||||
if not HAVE_MUTAGEN:
|
||||
self.log("could not load Mutagen, trying FFprobe instead", c=3)
|
||||
self.backend = "ffprobe"
|
||||
|
||||
if self.backend == "ffprobe":
|
||||
self.usable = self.can_ffprobe
|
||||
self.get = self.get_ffprobe
|
||||
self._get = self.get_ffprobe
|
||||
self.prefer_mt = True
|
||||
|
||||
if not HAVE_FFPROBE:
|
||||
@@ -460,6 +523,17 @@ class MTag(object):
|
||||
|
||||
return r1
|
||||
|
||||
def get(self, abspath: str) -> dict[str, Union[str, float]]:
|
||||
ext = abspath.split(".")[-1].lower()
|
||||
if ext not in self.args.au_unpk:
|
||||
return self._get(abspath)
|
||||
|
||||
ap = au_unpk(self.log, self.args.au_unpk, abspath)
|
||||
ret = self._get(ap)
|
||||
if ap != abspath:
|
||||
wunlink(self.log, ap, VF_CAREFUL)
|
||||
return ret
|
||||
|
||||
def get_mutagen(self, abspath: str) -> dict[str, Union[str, float]]:
|
||||
ret: dict[str, tuple[int, Any]] = {}
|
||||
|
||||
@@ -513,7 +587,7 @@ class MTag(object):
|
||||
continue
|
||||
|
||||
if k == ".aq":
|
||||
v /= 1000
|
||||
v /= 1000 # type: ignore
|
||||
|
||||
if k == "ac" and v.startswith("mp4a.40."):
|
||||
v = "aac"
|
||||
@@ -551,19 +625,25 @@ class MTag(object):
|
||||
pypath = str(os.pathsep.join(zsl))
|
||||
env["PYTHONPATH"] = pypath
|
||||
except:
|
||||
if not E.ox and not EXE:
|
||||
raise
|
||||
raise # might be expected outside cpython
|
||||
|
||||
ext = abspath.split(".")[-1].lower()
|
||||
if ext in self.args.au_unpk:
|
||||
ap = au_unpk(self.log, self.args.au_unpk, abspath)
|
||||
else:
|
||||
ap = abspath
|
||||
|
||||
ret: dict[str, Any] = {}
|
||||
for tagname, parser in sorted(parsers.items(), key=lambda x: (x[1].pri, x[0])):
|
||||
try:
|
||||
cmd = [parser.bin, abspath]
|
||||
cmd = [parser.bin, ap]
|
||||
if parser.bin.endswith(".py"):
|
||||
cmd = [pybin] + cmd
|
||||
|
||||
args = {
|
||||
"env": env,
|
||||
"nice": True,
|
||||
"oom": 300,
|
||||
"timeout": parser.timeout,
|
||||
"kill": parser.kill,
|
||||
"capture": parser.capture,
|
||||
@@ -593,4 +673,7 @@ class MTag(object):
|
||||
t = "mtag error: tagname {}, parser {}, file {} => {}"
|
||||
self.log(t.format(tagname, parser.bin, abspath, min_ex()))
|
||||
|
||||
if ap != abspath:
|
||||
wunlink(self.log, ap, VF_CAREFUL)
|
||||
|
||||
return ret
|
||||
|
||||
@@ -110,7 +110,7 @@ class MCast(object):
|
||||
)
|
||||
|
||||
ips = [x for x in ips if x not in ("::1", "127.0.0.1")]
|
||||
ips = find_prefix(ips, netdevs)
|
||||
ips = find_prefix(ips, list(netdevs))
|
||||
|
||||
on = self.on[:]
|
||||
off = self.off[:]
|
||||
@@ -206,6 +206,7 @@ class MCast(object):
|
||||
except:
|
||||
t = "announce failed on {} [{}]:\n{}"
|
||||
self.log(t.format(netdev, ip, min_ex()), 3)
|
||||
sck.close()
|
||||
|
||||
if self.args.zm_msub:
|
||||
for s1 in self.srv.values():
|
||||
|
||||
@@ -4,11 +4,21 @@ from __future__ import print_function, unicode_literals
|
||||
import argparse
|
||||
import base64
|
||||
import hashlib
|
||||
import os
|
||||
import sys
|
||||
import threading
|
||||
|
||||
from .__init__ import unicode
|
||||
|
||||
try:
|
||||
if os.environ.get("PRTY_NO_ARGON2"):
|
||||
raise Exception()
|
||||
|
||||
HAVE_ARGON2 = True
|
||||
from argon2 import __version__ as argon2ver
|
||||
except:
|
||||
HAVE_ARGON2 = False
|
||||
|
||||
|
||||
class PWHash(object):
|
||||
def __init__(self, args: argparse.Namespace):
|
||||
|
||||
@@ -127,7 +127,7 @@ class SMB(object):
|
||||
self.log("smb", msg, c)
|
||||
|
||||
def start(self) -> None:
|
||||
Daemon(self.srv.start)
|
||||
Daemon(self.srv.start, "smbd")
|
||||
|
||||
def _auth_cb(self, *a, **ka):
|
||||
debug("auth-result: %s %s", a, ka)
|
||||
@@ -187,6 +187,8 @@ class SMB(object):
|
||||
|
||||
debug('%s("%s", %s) %s @%s\033[K\033[0m', caller, vpath, str(a), perms, uname)
|
||||
vfs, rem = self.asrv.vfs.get(vpath, uname, *perms)
|
||||
if not vfs.realpath:
|
||||
raise Exception("unmapped vfs")
|
||||
return vfs, vfs.canonical(rem)
|
||||
|
||||
def _listdir(self, vpath: str, *a: Any, **ka: Any) -> list[str]:
|
||||
@@ -195,6 +197,8 @@ class SMB(object):
|
||||
uname = self._uname()
|
||||
# debug('listdir("%s", %s) @%s\033[K\033[0m', vpath, str(a), uname)
|
||||
vfs, rem = self.asrv.vfs.get(vpath, uname, False, False)
|
||||
if not vfs.realpath:
|
||||
raise Exception("unmapped vfs")
|
||||
_, vfs_ls, vfs_virt = vfs.ls(
|
||||
rem, uname, not self.args.no_scandir, [[False, False]]
|
||||
)
|
||||
@@ -240,7 +244,21 @@ class SMB(object):
|
||||
|
||||
xbu = vfs.flags.get("xbu")
|
||||
if xbu and not runhook(
|
||||
self.nlog, xbu, ap, vpath, "", "", 0, 0, "1.7.6.2", 0, ""
|
||||
self.nlog,
|
||||
None,
|
||||
self.hub.up2k,
|
||||
"xbu.smb",
|
||||
xbu,
|
||||
ap,
|
||||
vpath,
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
0,
|
||||
0,
|
||||
"1.7.6.2",
|
||||
time.time(),
|
||||
"",
|
||||
):
|
||||
yeet("blocked by xbu server config: " + vpath)
|
||||
|
||||
@@ -297,7 +315,7 @@ class SMB(object):
|
||||
t = "blocked rename (no-move-acc %s): /%s @%s"
|
||||
yeet(t % (vfs1.axs.umove, vp1, uname))
|
||||
|
||||
self.hub.up2k.handle_mv(uname, vp1, vp2)
|
||||
self.hub.up2k.handle_mv(uname, "1.7.6.2", vp1, vp2)
|
||||
try:
|
||||
bos.makedirs(ap2)
|
||||
except:
|
||||
@@ -340,7 +358,7 @@ class SMB(object):
|
||||
yeet("blocked delete (no-del-acc): " + vpath)
|
||||
|
||||
vpath = vpath.replace("\\", "/").lstrip("/")
|
||||
self.hub.up2k.handle_rm(uname, "1.7.6.2", [vpath], [], False)
|
||||
self.hub.up2k.handle_rm(uname, "1.7.6.2", [vpath], [], False, False)
|
||||
|
||||
def _utime(self, vpath: str, times: tuple[float, float]) -> None:
|
||||
if not self.args.smbw:
|
||||
|
||||
@@ -5,11 +5,11 @@ import errno
|
||||
import re
|
||||
import select
|
||||
import socket
|
||||
from email.utils import formatdate
|
||||
import time
|
||||
|
||||
from .__init__ import TYPE_CHECKING
|
||||
from .multicast import MC_Sck, MCast
|
||||
from .util import CachedSet, html_escape, min_ex
|
||||
from .util import CachedSet, formatdate, html_escape, min_ex
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .broker_util import BrokerCli
|
||||
@@ -141,9 +141,29 @@ class SSDPd(MCast):
|
||||
self.log("stopped", 2)
|
||||
|
||||
def run2(self) -> None:
|
||||
try:
|
||||
if self.args.no_poll:
|
||||
raise Exception()
|
||||
fd2sck = {}
|
||||
srvpoll = select.poll()
|
||||
for sck in self.srv:
|
||||
fd = sck.fileno()
|
||||
fd2sck[fd] = sck
|
||||
srvpoll.register(fd, select.POLLIN)
|
||||
except Exception as ex:
|
||||
srvpoll = None
|
||||
if not self.args.no_poll:
|
||||
t = "WARNING: failed to poll(), will use select() instead: %r"
|
||||
self.log(t % (ex,), 3)
|
||||
|
||||
while self.running:
|
||||
rdy = select.select(self.srv, [], [], self.args.z_chk or 180)
|
||||
rx: list[socket.socket] = rdy[0] # type: ignore
|
||||
if srvpoll:
|
||||
pr = srvpoll.poll((self.args.z_chk or 180) * 1000)
|
||||
rx = [fd2sck[x[0]] for x in pr if x[1] & select.POLLIN]
|
||||
else:
|
||||
rdy = select.select(self.srv, [], [], self.args.z_chk or 180)
|
||||
rx: list[socket.socket] = rdy[0] # type: ignore
|
||||
|
||||
self.rxc.cln()
|
||||
buf = b""
|
||||
addr = ("0", 0)
|
||||
@@ -168,7 +188,7 @@ class SSDPd(MCast):
|
||||
except:
|
||||
pass
|
||||
|
||||
self.srv = {}
|
||||
self.srv.clear()
|
||||
|
||||
def eat(self, buf: bytes, addr: tuple[str, int]) -> None:
|
||||
cip = addr[0]
|
||||
@@ -209,7 +229,7 @@ CONFIGID.UPNP.ORG: 1
|
||||
|
||||
"""
|
||||
v4 = srv.ip.replace("::ffff:", "")
|
||||
zs = zs.format(formatdate(usegmt=True), v4, srv.hport, self.args.zsid)
|
||||
zs = zs.format(formatdate(), v4, srv.hport, self.args.zsid)
|
||||
zb = zs[1:].replace("\n", "\r\n").encode("utf-8", "replace")
|
||||
srv.sck.sendto(zb, addr[:2])
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ import tarfile
|
||||
|
||||
from queue import Queue
|
||||
|
||||
from .authsrv import AuthSrv
|
||||
from .bos import bos
|
||||
from .sutil import StreamArc, errdesc
|
||||
from .util import Daemon, fsenc, min_ex
|
||||
@@ -44,11 +45,12 @@ class StreamTar(StreamArc):
|
||||
def __init__(
|
||||
self,
|
||||
log: "NamedLogger",
|
||||
asrv: AuthSrv,
|
||||
fgen: Generator[dict[str, Any], None, None],
|
||||
cmp: str = "",
|
||||
**kwargs: Any
|
||||
):
|
||||
super(StreamTar, self).__init__(log, fgen)
|
||||
super(StreamTar, self).__init__(log, asrv, fgen)
|
||||
|
||||
self.ci = 0
|
||||
self.co = 0
|
||||
@@ -65,21 +67,21 @@ class StreamTar(StreamArc):
|
||||
cmp = re.sub(r"[^a-z0-9]*pax[^a-z0-9]*", "", cmp)
|
||||
|
||||
try:
|
||||
cmp, lv = cmp.replace(":", ",").split(",")
|
||||
lv = int(lv)
|
||||
cmp, zs = cmp.replace(":", ",").split(",")
|
||||
lv = int(zs)
|
||||
except:
|
||||
lv = None
|
||||
lv = -1
|
||||
|
||||
arg = {"name": None, "fileobj": self.qfile, "mode": "w", "format": fmt}
|
||||
if cmp == "gz":
|
||||
fun = tarfile.TarFile.gzopen
|
||||
arg["compresslevel"] = lv if lv is not None else 3
|
||||
arg["compresslevel"] = lv if lv >= 0 else 3
|
||||
elif cmp == "bz2":
|
||||
fun = tarfile.TarFile.bz2open
|
||||
arg["compresslevel"] = lv if lv is not None else 2
|
||||
arg["compresslevel"] = lv if lv >= 0 else 2
|
||||
elif cmp == "xz":
|
||||
fun = tarfile.TarFile.xzopen
|
||||
arg["preset"] = lv if lv is not None else 1
|
||||
arg["preset"] = lv if lv >= 0 else 1
|
||||
else:
|
||||
fun = tarfile.open
|
||||
arg["mode"] = "w|"
|
||||
@@ -126,7 +128,7 @@ class StreamTar(StreamArc):
|
||||
inf.gid = 0
|
||||
|
||||
self.ci += inf.size
|
||||
with open(fsenc(src), "rb", 512 * 1024) as fo:
|
||||
with open(fsenc(src), "rb", self.args.iobuf) as fo:
|
||||
self.tar.addfile(inf, fo)
|
||||
|
||||
def _gen(self) -> None:
|
||||
@@ -146,7 +148,7 @@ class StreamTar(StreamArc):
|
||||
errors.append((f["vp"], ex))
|
||||
|
||||
if errors:
|
||||
self.errf, txt = errdesc(errors)
|
||||
self.errf, txt = errdesc(self.asrv.vfs, errors)
|
||||
self.log("\n".join(([repr(self.errf)] + txt[1:])))
|
||||
self.ser(self.errf)
|
||||
|
||||
|
||||
@@ -12,6 +12,12 @@ from .label import DNSBuffer, DNSLabel
|
||||
from .ranges import IP4, IP6, H, I, check_bytes
|
||||
|
||||
|
||||
try:
|
||||
range = xrange
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
class DNSError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
@@ -11,7 +11,21 @@ import os
|
||||
|
||||
from ._shared import IP, Adapter
|
||||
|
||||
if os.name == "nt":
|
||||
|
||||
def nope(include_unconfigured=False):
|
||||
return []
|
||||
|
||||
|
||||
try:
|
||||
S390X = os.uname().machine == "s390x"
|
||||
except:
|
||||
S390X = False
|
||||
|
||||
|
||||
if os.environ.get("PRTY_NO_IFADDR") or S390X:
|
||||
# s390x deadlocks at libc.getifaddrs
|
||||
get_adapters = nope
|
||||
elif os.name == "nt":
|
||||
from ._win32 import get_adapters
|
||||
elif os.name == "posix":
|
||||
from ._posix import get_adapters
|
||||
|
||||
@@ -17,6 +17,7 @@ if not PY2:
|
||||
U: Callable[[str], str] = str
|
||||
else:
|
||||
U = unicode # noqa: F821 # pylint: disable=undefined-variable,self-assigning-variable
|
||||
range = xrange # noqa: F821 # pylint: disable=undefined-variable,self-assigning-variable
|
||||
|
||||
|
||||
class Adapter(object):
|
||||
@@ -61,7 +62,7 @@ class Adapter(object):
|
||||
)
|
||||
|
||||
|
||||
if True:
|
||||
if True: # pylint: disable=using-constant-test
|
||||
# Type of an IPv4 address (a string in "xxx.xxx.xxx.xxx" format)
|
||||
_IPv4Address = str
|
||||
|
||||
|
||||
@@ -16,6 +16,11 @@ if True: # pylint: disable=using-constant-test
|
||||
|
||||
from typing import Callable, List, Optional, Tuple, Union
|
||||
|
||||
try:
|
||||
range = xrange
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def num_char_count_bits(ver: int) -> int:
|
||||
return 16 if (ver + 7) // 17 else 8
|
||||
|
||||
@@ -6,9 +6,10 @@ import tempfile
|
||||
from datetime import datetime
|
||||
|
||||
from .__init__ import CORES
|
||||
from .authsrv import VFS, AuthSrv
|
||||
from .bos import bos
|
||||
from .th_cli import ThumbCli
|
||||
from .util import UTC, vjoin
|
||||
from .util import UTC, vjoin, vol_san
|
||||
|
||||
if True: # pylint: disable=using-constant-test
|
||||
from typing import Any, Generator, Optional
|
||||
@@ -20,10 +21,13 @@ class StreamArc(object):
|
||||
def __init__(
|
||||
self,
|
||||
log: "NamedLogger",
|
||||
asrv: AuthSrv,
|
||||
fgen: Generator[dict[str, Any], None, None],
|
||||
**kwargs: Any
|
||||
):
|
||||
self.log = log
|
||||
self.asrv = asrv
|
||||
self.args = asrv.args
|
||||
self.fgen = fgen
|
||||
self.stopped = False
|
||||
|
||||
@@ -78,7 +82,9 @@ def enthumb(
|
||||
) -> dict[str, Any]:
|
||||
rem = f["vp"]
|
||||
ext = rem.rsplit(".", 1)[-1].lower()
|
||||
if fmt == "opus" and ext in "aac|m4a|mp3|ogg|opus|wma".split("|"):
|
||||
if (fmt == "mp3" and ext == "mp3") or (
|
||||
fmt == "opus" and ext in "aac|m4a|mp3|ogg|opus|wma".split("|")
|
||||
):
|
||||
raise Exception()
|
||||
|
||||
vp = vjoin(vtop, rem.split("/", 1)[1])
|
||||
@@ -98,15 +104,20 @@ def enthumb(
|
||||
return f
|
||||
|
||||
|
||||
def errdesc(errors: list[tuple[str, str]]) -> tuple[dict[str, Any], list[str]]:
|
||||
def errdesc(
|
||||
vfs: VFS, errors: list[tuple[str, str]]
|
||||
) -> tuple[dict[str, Any], list[str]]:
|
||||
report = ["copyparty failed to add the following files to the archive:", ""]
|
||||
|
||||
for fn, err in errors:
|
||||
report.extend([" file: {}".format(fn), "error: {}".format(err), ""])
|
||||
|
||||
btxt = "\r\n".join(report).encode("utf-8", "replace")
|
||||
btxt = vol_san(list(vfs.all_vols.values()), btxt)
|
||||
|
||||
with tempfile.NamedTemporaryFile(prefix="copyparty-", delete=False) as tf:
|
||||
tf_path = tf.name
|
||||
tf.write("\r\n".join(report).encode("utf-8", "replace"))
|
||||
tf.write(btxt)
|
||||
|
||||
dt = datetime.now(UTC).strftime("%Y-%m%d-%H%M%S")
|
||||
|
||||
|
||||
@@ -3,7 +3,6 @@ from __future__ import print_function, unicode_literals
|
||||
|
||||
import argparse
|
||||
import base64
|
||||
import calendar
|
||||
import errno
|
||||
import gzip
|
||||
import logging
|
||||
@@ -16,7 +15,7 @@ import string
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime
|
||||
|
||||
# from inspect import currentframe
|
||||
# print(currentframe().f_lineno)
|
||||
@@ -28,18 +27,30 @@ if True: # pylint: disable=using-constant-test
|
||||
import typing
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
from .__init__ import ANYWIN, EXE, MACOS, TYPE_CHECKING, EnvParams, unicode
|
||||
from .__init__ import ANYWIN, EXE, MACOS, PY2, TYPE_CHECKING, E, EnvParams, unicode
|
||||
from .authsrv import BAD_CFG, AuthSrv
|
||||
from .cert import ensure_cert
|
||||
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE
|
||||
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, HAVE_MUTAGEN
|
||||
from .pwhash import HAVE_ARGON2
|
||||
from .tcpsrv import TcpSrv
|
||||
from .th_srv import HAVE_PIL, HAVE_VIPS, HAVE_WEBP, ThumbSrv
|
||||
from .th_srv import (
|
||||
HAVE_AVIF,
|
||||
HAVE_FFMPEG,
|
||||
HAVE_FFPROBE,
|
||||
HAVE_HEIF,
|
||||
HAVE_PIL,
|
||||
HAVE_VIPS,
|
||||
HAVE_WEBP,
|
||||
ThumbSrv,
|
||||
)
|
||||
from .up2k import Up2k
|
||||
from .util import (
|
||||
DEF_EXP,
|
||||
DEF_MTE,
|
||||
DEF_MTH,
|
||||
FFMPEG_URL,
|
||||
HAVE_PSUTIL,
|
||||
HAVE_SQLITE3,
|
||||
UTC,
|
||||
VERSIONS,
|
||||
Daemon,
|
||||
@@ -49,6 +60,7 @@ from .util import (
|
||||
ODict,
|
||||
alltrace,
|
||||
ansi_re,
|
||||
build_netmap,
|
||||
min_ex,
|
||||
mp,
|
||||
odfusion,
|
||||
@@ -64,6 +76,9 @@ if TYPE_CHECKING:
|
||||
except:
|
||||
pass
|
||||
|
||||
if PY2:
|
||||
range = xrange # type: ignore
|
||||
|
||||
|
||||
class SvcHub(object):
|
||||
"""
|
||||
@@ -88,20 +103,23 @@ class SvcHub(object):
|
||||
self.argv = argv
|
||||
self.E: EnvParams = args.E
|
||||
self.no_ansi = args.no_ansi
|
||||
self.tz = UTC if args.log_utc else None
|
||||
self.logf: Optional[typing.TextIO] = None
|
||||
self.logf_base_fn = ""
|
||||
self.is_dut = False # running in unittest; always False
|
||||
self.stop_req = False
|
||||
self.stopping = False
|
||||
self.stopped = False
|
||||
self.reload_req = False
|
||||
self.reloading = False
|
||||
self.reloading = 0
|
||||
self.stop_cond = threading.Condition()
|
||||
self.nsigs = 3
|
||||
self.retcode = 0
|
||||
self.httpsrv_up = 0
|
||||
|
||||
self.log_mutex = threading.Lock()
|
||||
self.next_day = 0
|
||||
self.cday = 0
|
||||
self.cmon = 0
|
||||
self.tstack = 0.0
|
||||
|
||||
self.iphash = HMaccas(os.path.join(self.E.cfg, "iphash"), 8)
|
||||
@@ -133,7 +151,7 @@ class SvcHub(object):
|
||||
if not self._process_config():
|
||||
raise Exception(BAD_CFG)
|
||||
|
||||
# for non-http clients (ftp)
|
||||
# for non-http clients (ftp, tftp)
|
||||
self.bans: dict[str, int] = {}
|
||||
self.gpwd = Garda(self.args.ban_pw)
|
||||
self.g404 = Garda(self.args.ban_404)
|
||||
@@ -154,6 +172,8 @@ class SvcHub(object):
|
||||
lg.handlers = [lh]
|
||||
lg.setLevel(logging.DEBUG)
|
||||
|
||||
self._check_env()
|
||||
|
||||
if args.stackmon:
|
||||
start_stackmon(args.stackmon, 0)
|
||||
|
||||
@@ -170,6 +190,43 @@ class SvcHub(object):
|
||||
self.log("root", t.format(args.j), c=3)
|
||||
args.no_fpool = True
|
||||
|
||||
for name, arg in (
|
||||
("iobuf", "iobuf"),
|
||||
("s-rd-sz", "s_rd_sz"),
|
||||
("s-wr-sz", "s_wr_sz"),
|
||||
):
|
||||
zi = getattr(args, arg)
|
||||
if zi < 32768:
|
||||
t = "WARNING: expect very poor performance because you specified a very low value (%d) for --%s"
|
||||
self.log("root", t % (zi, name), 3)
|
||||
zi = 2
|
||||
zi2 = 2 ** (zi - 1).bit_length()
|
||||
if zi != zi2:
|
||||
zi3 = 2 ** ((zi - 1).bit_length() - 1)
|
||||
t = "WARNING: expect poor performance because --%s is not a power-of-two; consider using %d or %d instead of %d"
|
||||
self.log("root", t % (name, zi2, zi3, zi), 3)
|
||||
|
||||
if args.s_rd_sz > args.iobuf:
|
||||
t = "WARNING: --s-rd-sz (%d) is larger than --iobuf (%d); this may lead to reduced performance"
|
||||
self.log("root", t % (args.s_rd_sz, args.iobuf), 3)
|
||||
|
||||
if args.chpw and args.idp_h_usr:
|
||||
t = "ERROR: user-changeable passwords is incompatible with IdP/identity-providers; you must disable either --chpw or --idp-h-usr"
|
||||
self.log("root", t, 1)
|
||||
raise Exception(t)
|
||||
|
||||
noch = set()
|
||||
for zs in args.chpw_no or []:
|
||||
zsl = [x.strip() for x in zs.split(",")]
|
||||
noch.update([x for x in zsl if x])
|
||||
args.chpw_no = noch
|
||||
|
||||
if not self.args.no_ses:
|
||||
self.setup_session_db()
|
||||
|
||||
if args.shr:
|
||||
self.setup_share_db()
|
||||
|
||||
bri = "zy"[args.theme % 2 :][:1]
|
||||
ch = "abcdefghijklmnopqrstuvwx"[int(args.theme / 2)]
|
||||
args.theme = "{0}{1} {0} {1}".format(ch, bri)
|
||||
@@ -209,6 +266,8 @@ class SvcHub(object):
|
||||
|
||||
self.up2k = Up2k(self)
|
||||
|
||||
self._feature_test()
|
||||
|
||||
decs = {k: 1 for k in self.args.th_dec.split(",")}
|
||||
if not HAVE_VIPS:
|
||||
decs.pop("vips", None)
|
||||
@@ -217,6 +276,10 @@ class SvcHub(object):
|
||||
if not HAVE_FFMPEG or not HAVE_FFPROBE:
|
||||
decs.pop("ff", None)
|
||||
|
||||
# compressed formats; "s3z=s3m.zip, s3gz=s3m.gz, ..."
|
||||
zlss = [x.strip().lower().split("=", 1) for x in args.au_unpk.split(",")]
|
||||
args.au_unpk = {x[0]: x[1] for x in zlss}
|
||||
|
||||
self.args.th_dec = list(decs.keys())
|
||||
self.thumbsrv = None
|
||||
want_ff = False
|
||||
@@ -253,6 +316,13 @@ class SvcHub(object):
|
||||
if want_ff and ANYWIN:
|
||||
self.log("thumb", "download FFmpeg to fix it:\033[0m " + FFMPEG_URL, 3)
|
||||
|
||||
if not args.no_acode:
|
||||
if not re.match("^(0|[qv][0-9]|[0-9]{2,3}k)$", args.q_mp3.lower()):
|
||||
t = "invalid mp3 transcoding quality [%s] specified; only supports [0] to disable, a CBR value such as [192k], or a CQ/CRF value such as [v2]"
|
||||
raise Exception(t % (args.q_mp3,))
|
||||
else:
|
||||
args.au_unpk = {}
|
||||
|
||||
args.th_poke = min(args.th_poke, args.th_maxage, args.ac_maxage)
|
||||
|
||||
zms = ""
|
||||
@@ -265,9 +335,16 @@ class SvcHub(object):
|
||||
from .ftpd import Ftpd
|
||||
|
||||
self.ftpd: Optional[Ftpd] = None
|
||||
Daemon(self.start_ftpd, "start_ftpd")
|
||||
zms += "f" if args.ftp else "F"
|
||||
|
||||
if args.tftp:
|
||||
from .tftpd import Tftpd
|
||||
|
||||
self.tftpd: Optional[Tftpd] = None
|
||||
|
||||
if args.ftp or args.ftps or args.tftp:
|
||||
Daemon(self.start_ftpd, "start_tftpd")
|
||||
|
||||
if args.smb:
|
||||
# impacket.dcerpc is noisy about listen timeouts
|
||||
sto = socket.getdefaulttimeout()
|
||||
@@ -295,12 +372,159 @@ class SvcHub(object):
|
||||
|
||||
self.broker = Broker(self)
|
||||
|
||||
def start_ftpd(self) -> None:
|
||||
time.sleep(30)
|
||||
if self.ftpd:
|
||||
def setup_session_db(self) -> None:
|
||||
if not HAVE_SQLITE3:
|
||||
self.args.no_ses = True
|
||||
t = "WARNING: sqlite3 not available; disabling sessions, will use plaintext passwords in cookies"
|
||||
self.log("root", t, 3)
|
||||
return
|
||||
|
||||
self.restart_ftpd()
|
||||
import sqlite3
|
||||
|
||||
create = True
|
||||
db_path = self.args.ses_db
|
||||
self.log("root", "opening sessions-db %s" % (db_path,))
|
||||
for n in range(2):
|
||||
try:
|
||||
db = sqlite3.connect(db_path)
|
||||
cur = db.cursor()
|
||||
try:
|
||||
cur.execute("select count(*) from us").fetchone()
|
||||
create = False
|
||||
break
|
||||
except:
|
||||
pass
|
||||
except Exception as ex:
|
||||
if n:
|
||||
raise
|
||||
t = "sessions-db corrupt; deleting and recreating: %r"
|
||||
self.log("root", t % (ex,), 3)
|
||||
try:
|
||||
cur.close() # type: ignore
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
db.close() # type: ignore
|
||||
except:
|
||||
pass
|
||||
os.unlink(db_path)
|
||||
|
||||
sch = [
|
||||
r"create table kv (k text, v int)",
|
||||
r"create table us (un text, si text, t0 int)",
|
||||
# username, session-id, creation-time
|
||||
r"create index us_un on us(un)",
|
||||
r"create index us_si on us(si)",
|
||||
r"create index us_t0 on us(t0)",
|
||||
r"insert into kv values ('sver', 1)",
|
||||
]
|
||||
|
||||
assert db # type: ignore
|
||||
assert cur # type: ignore
|
||||
if create:
|
||||
for cmd in sch:
|
||||
cur.execute(cmd)
|
||||
self.log("root", "created new sessions-db")
|
||||
db.commit()
|
||||
|
||||
cur.close()
|
||||
db.close()
|
||||
|
||||
def setup_share_db(self) -> None:
|
||||
al = self.args
|
||||
if not HAVE_SQLITE3:
|
||||
self.log("root", "sqlite3 not available; disabling --shr", 1)
|
||||
al.shr = ""
|
||||
return
|
||||
|
||||
import sqlite3
|
||||
|
||||
al.shr = al.shr.strip("/")
|
||||
if "/" in al.shr or not al.shr:
|
||||
t = "config error: --shr must be the name of a virtual toplevel directory to put shares inside"
|
||||
self.log("root", t, 1)
|
||||
raise Exception(t)
|
||||
|
||||
al.shr = "/%s/" % (al.shr,)
|
||||
|
||||
create = True
|
||||
modified = False
|
||||
db_path = self.args.shr_db
|
||||
self.log("root", "opening shares-db %s" % (db_path,))
|
||||
for n in range(2):
|
||||
try:
|
||||
db = sqlite3.connect(db_path)
|
||||
cur = db.cursor()
|
||||
try:
|
||||
cur.execute("select count(*) from sh").fetchone()
|
||||
create = False
|
||||
break
|
||||
except:
|
||||
pass
|
||||
except Exception as ex:
|
||||
if n:
|
||||
raise
|
||||
t = "shares-db corrupt; deleting and recreating: %r"
|
||||
self.log("root", t % (ex,), 3)
|
||||
try:
|
||||
cur.close() # type: ignore
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
db.close() # type: ignore
|
||||
except:
|
||||
pass
|
||||
os.unlink(db_path)
|
||||
|
||||
sch1 = [
|
||||
r"create table kv (k text, v int)",
|
||||
r"create table sh (k text, pw text, vp text, pr text, st int, un text, t0 int, t1 int)",
|
||||
# sharekey, password, src, perms, numFiles, owner, created, expires
|
||||
]
|
||||
sch2 = [
|
||||
r"create table sf (k text, vp text)",
|
||||
r"create index sf_k on sf(k)",
|
||||
r"create index sh_k on sh(k)",
|
||||
r"create index sh_t1 on sh(t1)",
|
||||
]
|
||||
|
||||
assert db # type: ignore
|
||||
assert cur # type: ignore
|
||||
if create:
|
||||
dver = 2
|
||||
modified = True
|
||||
for cmd in sch1 + sch2:
|
||||
cur.execute(cmd)
|
||||
self.log("root", "created new shares-db")
|
||||
else:
|
||||
(dver,) = cur.execute("select v from kv where k = 'sver'").fetchall()[0]
|
||||
|
||||
if dver == 1:
|
||||
modified = True
|
||||
for cmd in sch2:
|
||||
cur.execute(cmd)
|
||||
cur.execute("update sh set st = 0")
|
||||
self.log("root", "shares-db schema upgrade ok")
|
||||
|
||||
if modified:
|
||||
for cmd in [
|
||||
r"delete from kv where k = 'sver'",
|
||||
r"insert into kv values ('sver', %d)" % (2,),
|
||||
]:
|
||||
cur.execute(cmd)
|
||||
db.commit()
|
||||
|
||||
cur.close()
|
||||
db.close()
|
||||
|
||||
def start_ftpd(self) -> None:
|
||||
time.sleep(30)
|
||||
|
||||
if hasattr(self, "ftpd") and not self.ftpd:
|
||||
self.restart_ftpd()
|
||||
|
||||
if hasattr(self, "tftpd") and not self.tftpd:
|
||||
self.restart_tftpd()
|
||||
|
||||
def restart_ftpd(self) -> None:
|
||||
if not hasattr(self, "ftpd"):
|
||||
@@ -317,6 +541,17 @@ class SvcHub(object):
|
||||
self.ftpd = Ftpd(self)
|
||||
self.log("root", "started FTPd")
|
||||
|
||||
def restart_tftpd(self) -> None:
|
||||
if not hasattr(self, "tftpd"):
|
||||
return
|
||||
|
||||
from .tftpd import Tftpd
|
||||
|
||||
if self.tftpd:
|
||||
return # todo
|
||||
|
||||
self.tftpd = Tftpd(self)
|
||||
|
||||
def thr_httpsrv_up(self) -> None:
|
||||
time.sleep(1 if self.args.ign_ebind_all else 5)
|
||||
expected = self.broker.num_workers * self.tcpsrv.nsrv
|
||||
@@ -341,7 +576,7 @@ class SvcHub(object):
|
||||
self.sigterm()
|
||||
|
||||
def sigterm(self) -> None:
|
||||
os.kill(os.getpid(), signal.SIGTERM)
|
||||
self.signal_handler(signal.SIGTERM, None)
|
||||
|
||||
def cb_httpsrv_up(self) -> None:
|
||||
self.httpsrv_up += 1
|
||||
@@ -366,6 +601,75 @@ class SvcHub(object):
|
||||
|
||||
Daemon(self.sd_notify, "sd-notify")
|
||||
|
||||
def _feature_test(self) -> None:
|
||||
fok = []
|
||||
fng = []
|
||||
t_ff = "transcode audio, create spectrograms, video thumbnails"
|
||||
to_check = [
|
||||
(HAVE_SQLITE3, "sqlite", "sessions and file/media indexing"),
|
||||
(HAVE_PIL, "pillow", "image thumbnails (plenty fast)"),
|
||||
(HAVE_VIPS, "vips", "image thumbnails (faster, eats more ram)"),
|
||||
(HAVE_WEBP, "pillow-webp", "create thumbnails as webp files"),
|
||||
(HAVE_FFMPEG, "ffmpeg", t_ff + ", good-but-slow image thumbnails"),
|
||||
(HAVE_FFPROBE, "ffprobe", t_ff + ", read audio/media tags"),
|
||||
(HAVE_MUTAGEN, "mutagen", "read audio tags (ffprobe is better but slower)"),
|
||||
(HAVE_ARGON2, "argon2", "secure password hashing (advanced users only)"),
|
||||
(HAVE_HEIF, "pillow-heif", "read .heif images with pillow (rarely useful)"),
|
||||
(HAVE_AVIF, "pillow-avif", "read .avif images with pillow (rarely useful)"),
|
||||
]
|
||||
if ANYWIN:
|
||||
to_check += [
|
||||
(HAVE_PSUTIL, "psutil", "improved plugin cleanup (rarely useful)")
|
||||
]
|
||||
|
||||
verbose = self.args.deps
|
||||
if verbose:
|
||||
self.log("dependencies", "")
|
||||
|
||||
for have, feat, what in to_check:
|
||||
lst = fok if have else fng
|
||||
lst.append((feat, what))
|
||||
if verbose:
|
||||
zi = 2 if have else 5
|
||||
sgot = "found" if have else "missing"
|
||||
t = "%7s: %s \033[36m(%s)"
|
||||
self.log("dependencies", t % (sgot, feat, what), zi)
|
||||
|
||||
if verbose:
|
||||
self.log("dependencies", "")
|
||||
return
|
||||
|
||||
sok = ", ".join(x[0] for x in fok)
|
||||
sng = ", ".join(x[0] for x in fng)
|
||||
|
||||
t = ""
|
||||
if sok:
|
||||
t += "OK: \033[32m" + sok
|
||||
if sng:
|
||||
if t:
|
||||
t += ", "
|
||||
t += "\033[0mNG: \033[35m" + sng
|
||||
|
||||
t += "\033[0m, see --deps"
|
||||
self.log("dependencies", t, 6)
|
||||
|
||||
def _check_env(self) -> None:
|
||||
try:
|
||||
files = os.listdir(E.cfg)
|
||||
except:
|
||||
files = []
|
||||
|
||||
hits = [x for x in files if x.lower().endswith(".conf")]
|
||||
if hits:
|
||||
t = "WARNING: found config files in [%s]: %s\n config files are not expected here, and will NOT be loaded (unless your setup is intentionally hella funky)"
|
||||
self.log("root", t % (E.cfg, ", ".join(hits)), 3)
|
||||
|
||||
if self.args.no_bauth:
|
||||
t = "WARNING: --no-bauth disables support for the Android app; you may want to use --bauth-last instead"
|
||||
self.log("root", t, 3)
|
||||
if self.args.bauth_last:
|
||||
self.log("root", "WARNING: ignoring --bauth-last due to --no-bauth", 3)
|
||||
|
||||
def _process_config(self) -> bool:
|
||||
al = self.args
|
||||
|
||||
@@ -405,7 +709,13 @@ class SvcHub(object):
|
||||
if al.rsp_jtr:
|
||||
al.rsp_slp = 0.000001
|
||||
|
||||
al.th_covers = set(al.th_covers.split(","))
|
||||
zsl = al.th_covers.split(",")
|
||||
zsl = [x.strip() for x in zsl]
|
||||
zsl = [x for x in zsl if x]
|
||||
al.th_covers = zsl
|
||||
al.th_coversd = zsl + ["." + x for x in zsl]
|
||||
al.th_covers_set = set(al.th_covers)
|
||||
al.th_coversd_set = set(al.th_coversd)
|
||||
|
||||
for k in "c".split(" "):
|
||||
vl = getattr(al, k)
|
||||
@@ -428,14 +738,25 @@ class SvcHub(object):
|
||||
else:
|
||||
setattr(al, k, re.compile(vs))
|
||||
|
||||
for k in "tftp_lsf".split(" "):
|
||||
vs = getattr(al, k)
|
||||
if not vs or vs == "no":
|
||||
setattr(al, k, None)
|
||||
else:
|
||||
setattr(al, k, re.compile("^" + vs + "$"))
|
||||
|
||||
if not al.sus_urls:
|
||||
al.ban_url = "no"
|
||||
elif al.ban_url == "no":
|
||||
al.sus_urls = None
|
||||
|
||||
al.xff_re = self._ipa2re(al.xff_src)
|
||||
al.ipa_re = self._ipa2re(al.ipa)
|
||||
al.ftp_ipa_re = self._ipa2re(al.ftp_ipa or al.ipa)
|
||||
al.xff_hdr = al.xff_hdr.lower()
|
||||
al.idp_h_usr = al.idp_h_usr.lower()
|
||||
al.idp_h_grp = al.idp_h_grp.lower()
|
||||
al.idp_h_key = al.idp_h_key.lower()
|
||||
|
||||
al.ftp_ipa_nm = build_netmap(al.ftp_ipa or al.ipa)
|
||||
al.tftp_ipa_nm = build_netmap(al.tftp_ipa or al.ipa)
|
||||
|
||||
mte = ODict.fromkeys(DEF_MTE.split(","), True)
|
||||
al.mte = odfusion(mte, al.mte)
|
||||
@@ -447,17 +768,47 @@ class SvcHub(object):
|
||||
al.exp_md = odfusion(exp, al.exp_md.replace(" ", ","))
|
||||
al.exp_lg = odfusion(exp, al.exp_lg.replace(" ", ","))
|
||||
|
||||
for k in ["no_hash", "no_idx"]:
|
||||
for k in ["no_hash", "no_idx", "og_ua"]:
|
||||
ptn = getattr(self.args, k)
|
||||
if ptn:
|
||||
setattr(self.args, k, re.compile(ptn))
|
||||
|
||||
for k in ["idp_gsep"]:
|
||||
ptn = getattr(self.args, k)
|
||||
if "]" in ptn:
|
||||
ptn = "]" + ptn.replace("]", "")
|
||||
if "[" in ptn:
|
||||
ptn = ptn.replace("[", "") + "["
|
||||
if "-" in ptn:
|
||||
ptn = ptn.replace("-", "") + "-"
|
||||
|
||||
ptn = ptn.replace("\\", "\\\\").replace("^", "\\^")
|
||||
setattr(self.args, k, re.compile("[%s]" % (ptn,)))
|
||||
|
||||
try:
|
||||
zf1, zf2 = self.args.rm_retry.split("/")
|
||||
self.args.rm_re_t = float(zf1)
|
||||
self.args.rm_re_r = float(zf2)
|
||||
except:
|
||||
raise Exception("invalid --rm-retry [%s]" % (self.args.rm_retry,))
|
||||
|
||||
try:
|
||||
zf1, zf2 = self.args.mv_retry.split("/")
|
||||
self.args.mv_re_t = float(zf1)
|
||||
self.args.mv_re_r = float(zf2)
|
||||
except:
|
||||
raise Exception("invalid --mv-retry [%s]" % (self.args.mv_retry,))
|
||||
|
||||
al.tcolor = al.tcolor.lstrip("#")
|
||||
if len(al.tcolor) == 3: # fc5 => ffcc55
|
||||
al.tcolor = "".join([x * 2 for x in al.tcolor])
|
||||
|
||||
return True
|
||||
|
||||
def _ipa2re(self, txt) -> Optional[re.Pattern]:
|
||||
if txt in ("any", "0", ""):
|
||||
return None
|
||||
|
||||
|
||||
zs = txt.replace(" ", "").replace(".", "\\.").replace(",", "|")
|
||||
return re.compile("^(?:" + zs + ")")
|
||||
|
||||
@@ -466,7 +817,7 @@ class SvcHub(object):
|
||||
import resource
|
||||
|
||||
soft, hard = [
|
||||
x if x > 0 else 1024 * 1024
|
||||
int(x) if x > 0 else 1024 * 1024
|
||||
for x in list(resource.getrlimit(resource.RLIMIT_NOFILE))
|
||||
]
|
||||
except:
|
||||
@@ -502,7 +853,7 @@ class SvcHub(object):
|
||||
self.args.nc = min(self.args.nc, soft // 2)
|
||||
|
||||
def _logname(self) -> str:
|
||||
dt = datetime.now(UTC)
|
||||
dt = datetime.now(self.tz)
|
||||
fn = str(self.args.lo)
|
||||
for fs in "YmdHMS":
|
||||
fs = "%" + fs
|
||||
@@ -620,21 +971,45 @@ class SvcHub(object):
|
||||
self.log("root", "ssdp startup failed;\n" + min_ex(), 3)
|
||||
|
||||
def reload(self) -> str:
|
||||
if self.reloading:
|
||||
return "cannot reload; already in progress"
|
||||
with self.up2k.mutex:
|
||||
if self.reloading:
|
||||
return "cannot reload; already in progress"
|
||||
self.reloading = 1
|
||||
|
||||
self.reloading = True
|
||||
Daemon(self._reload, "reloading")
|
||||
return "reload initiated"
|
||||
|
||||
def _reload(self) -> None:
|
||||
self.log("root", "reload scheduled")
|
||||
def _reload(self, rescan_all_vols: bool = True, up2k: bool = True) -> None:
|
||||
with self.up2k.mutex:
|
||||
self.asrv.reload()
|
||||
self.up2k.reload()
|
||||
if self.reloading != 1:
|
||||
return
|
||||
self.reloading = 2
|
||||
self.log("root", "reloading config")
|
||||
self.asrv.reload(9 if up2k else 4)
|
||||
if up2k:
|
||||
self.up2k.reload(rescan_all_vols)
|
||||
else:
|
||||
self.log("root", "reload done")
|
||||
self.broker.reload()
|
||||
self.reloading = 0
|
||||
|
||||
self.reloading = False
|
||||
def _reload_blocking(self, rescan_all_vols: bool = True, up2k: bool = True) -> None:
|
||||
while True:
|
||||
with self.up2k.mutex:
|
||||
if self.reloading < 2:
|
||||
self.reloading = 1
|
||||
break
|
||||
time.sleep(0.05)
|
||||
|
||||
# try to handle multiple pending IdP reloads at once:
|
||||
time.sleep(0.2)
|
||||
|
||||
self._reload(rescan_all_vols=rescan_all_vols, up2k=up2k)
|
||||
|
||||
def _reload_sessions(self) -> None:
|
||||
with self.asrv.mutex:
|
||||
self.asrv.load_sessions(True)
|
||||
self.broker.reload_sessions()
|
||||
|
||||
def stop_thr(self) -> None:
|
||||
while not self.stop_req:
|
||||
@@ -756,12 +1131,12 @@ class SvcHub(object):
|
||||
return
|
||||
|
||||
with self.log_mutex:
|
||||
zd = datetime.now(UTC)
|
||||
dt = datetime.now(self.tz)
|
||||
ts = self.log_dfmt % (
|
||||
zd.year,
|
||||
zd.month * 100 + zd.day,
|
||||
(zd.hour * 100 + zd.minute) * 100 + zd.second,
|
||||
zd.microsecond // self.log_div,
|
||||
dt.year,
|
||||
dt.month * 100 + dt.day,
|
||||
(dt.hour * 100 + dt.minute) * 100 + dt.second,
|
||||
dt.microsecond // self.log_div,
|
||||
)
|
||||
|
||||
if c and not self.args.no_ansi:
|
||||
@@ -778,45 +1153,30 @@ class SvcHub(object):
|
||||
if "\033" in msg:
|
||||
msg += "\033[0m"
|
||||
|
||||
self.logf.write("@%s [%s] %s\n" % (ts, src, msg))
|
||||
self.logf.write("@%s [%-21s] %s\n" % (ts, src, msg))
|
||||
if not self.args.no_logflush:
|
||||
self.logf.flush()
|
||||
|
||||
now = time.time()
|
||||
if now >= self.next_day:
|
||||
self._set_next_day()
|
||||
if dt.day != self.cday or dt.month != self.cmon:
|
||||
self._set_next_day(dt)
|
||||
|
||||
def _set_next_day(self) -> None:
|
||||
if self.next_day and self.logf and self.logf_base_fn != self._logname():
|
||||
def _set_next_day(self, dt: datetime) -> None:
|
||||
if self.cday and self.logf and self.logf_base_fn != self._logname():
|
||||
self.logf.close()
|
||||
self._setup_logfile("")
|
||||
|
||||
dt = datetime.now(UTC)
|
||||
|
||||
# unix timestamp of next 00:00:00 (leap-seconds safe)
|
||||
day_now = dt.day
|
||||
while dt.day == day_now:
|
||||
dt += timedelta(hours=12)
|
||||
|
||||
dt = dt.replace(hour=0, minute=0, second=0)
|
||||
try:
|
||||
tt = dt.utctimetuple()
|
||||
except:
|
||||
# still makes me hella uncomfortable
|
||||
tt = dt.timetuple()
|
||||
|
||||
self.next_day = calendar.timegm(tt)
|
||||
self.cday = dt.day
|
||||
self.cmon = dt.month
|
||||
|
||||
def _log_enabled(self, src: str, msg: str, c: Union[int, str] = 0) -> None:
|
||||
"""handles logging from all components"""
|
||||
with self.log_mutex:
|
||||
now = time.time()
|
||||
if now >= self.next_day:
|
||||
dt = datetime.fromtimestamp(now, UTC)
|
||||
dt = datetime.now(self.tz)
|
||||
if dt.day != self.cday or dt.month != self.cmon:
|
||||
zs = "{}\n" if self.no_ansi else "\033[36m{}\033[0m\n"
|
||||
zs = zs.format(dt.strftime("%Y-%m-%d"))
|
||||
print(zs, end="")
|
||||
self._set_next_day()
|
||||
self._set_next_day(dt)
|
||||
if self.logf:
|
||||
self.logf.write(zs)
|
||||
|
||||
@@ -835,12 +1195,11 @@ class SvcHub(object):
|
||||
else:
|
||||
msg = "%s%s\033[0m" % (c, msg)
|
||||
|
||||
zd = datetime.fromtimestamp(now, UTC)
|
||||
ts = self.log_efmt % (
|
||||
zd.hour,
|
||||
zd.minute,
|
||||
zd.second,
|
||||
zd.microsecond // self.log_div,
|
||||
dt.hour,
|
||||
dt.minute,
|
||||
dt.second,
|
||||
dt.microsecond // self.log_div,
|
||||
)
|
||||
msg = fmt % (ts, src, msg)
|
||||
try:
|
||||
|
||||
@@ -6,6 +6,7 @@ import stat
|
||||
import time
|
||||
import zlib
|
||||
|
||||
from .authsrv import AuthSrv
|
||||
from .bos import bos
|
||||
from .sutil import StreamArc, errdesc
|
||||
from .util import min_ex, sanitize_fn, spack, sunpack, yieldfile
|
||||
@@ -36,9 +37,7 @@ def dostime2unix(buf: bytes) -> int:
|
||||
|
||||
|
||||
def unixtime2dos(ts: int) -> bytes:
|
||||
tt = time.gmtime(ts + 1)
|
||||
dy, dm, dd, th, tm, ts = list(tt)[:6]
|
||||
|
||||
dy, dm, dd, th, tm, ts, _, _, _ = time.gmtime(ts + 1)
|
||||
bd = ((dy - 1980) << 9) + (dm << 5) + dd
|
||||
bt = (th << 11) + (tm << 5) + ts // 2
|
||||
try:
|
||||
@@ -218,12 +217,13 @@ class StreamZip(StreamArc):
|
||||
def __init__(
|
||||
self,
|
||||
log: "NamedLogger",
|
||||
asrv: AuthSrv,
|
||||
fgen: Generator[dict[str, Any], None, None],
|
||||
utf8: bool = False,
|
||||
pre_crc: bool = False,
|
||||
**kwargs: Any
|
||||
) -> None:
|
||||
super(StreamZip, self).__init__(log, fgen)
|
||||
super(StreamZip, self).__init__(log, asrv, fgen)
|
||||
|
||||
self.utf8 = utf8
|
||||
self.pre_crc = pre_crc
|
||||
@@ -248,7 +248,7 @@ class StreamZip(StreamArc):
|
||||
|
||||
crc = 0
|
||||
if self.pre_crc:
|
||||
for buf in yieldfile(src):
|
||||
for buf in yieldfile(src, self.args.iobuf):
|
||||
crc = zlib.crc32(buf, crc)
|
||||
|
||||
crc &= 0xFFFFFFFF
|
||||
@@ -257,7 +257,7 @@ class StreamZip(StreamArc):
|
||||
buf = gen_hdr(None, name, sz, ts, self.utf8, crc, self.pre_crc)
|
||||
yield self._ct(buf)
|
||||
|
||||
for buf in yieldfile(src):
|
||||
for buf in yieldfile(src, self.args.iobuf):
|
||||
if not self.pre_crc:
|
||||
crc = zlib.crc32(buf, crc)
|
||||
|
||||
@@ -300,7 +300,7 @@ class StreamZip(StreamArc):
|
||||
mbuf = b""
|
||||
|
||||
if errors:
|
||||
errf, txt = errdesc(errors)
|
||||
errf, txt = errdesc(self.asrv.vfs, errors)
|
||||
self.log("\n".join(([repr(errf)] + txt[1:])))
|
||||
for x in self.ser(errf):
|
||||
yield x
|
||||
|
||||
@@ -15,19 +15,25 @@ from .util import (
|
||||
E_ADDR_IN_USE,
|
||||
E_ADDR_NOT_AVAIL,
|
||||
E_UNREACH,
|
||||
HAVE_IPV6,
|
||||
IP6ALL,
|
||||
VF_CAREFUL,
|
||||
Netdev,
|
||||
atomic_move,
|
||||
min_ex,
|
||||
sunpack,
|
||||
termsize,
|
||||
)
|
||||
|
||||
if True:
|
||||
from typing import Generator
|
||||
from typing import Generator, Union
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .svchub import SvcHub
|
||||
|
||||
if not hasattr(socket, "AF_UNIX"):
|
||||
setattr(socket, "AF_UNIX", -9001)
|
||||
|
||||
if not hasattr(socket, "IPPROTO_IPV6"):
|
||||
setattr(socket, "IPPROTO_IPV6", 41)
|
||||
|
||||
@@ -111,8 +117,10 @@ class TcpSrv(object):
|
||||
|
||||
eps = {
|
||||
"127.0.0.1": Netdev("127.0.0.1", 0, "", "local only"),
|
||||
"::1": Netdev("::1", 0, "", "local only"),
|
||||
}
|
||||
if HAVE_IPV6:
|
||||
eps["::1"] = Netdev("::1", 0, "", "local only")
|
||||
|
||||
nonlocals = [x for x in self.args.i if x not in [k.split("/")[0] for k in eps]]
|
||||
if nonlocals:
|
||||
try:
|
||||
@@ -214,14 +222,41 @@ class TcpSrv(object):
|
||||
if self.args.qr or self.args.qrs:
|
||||
self.qr = self._qr(qr1, qr2)
|
||||
|
||||
def nlog(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||
self.log("tcpsrv", msg, c)
|
||||
|
||||
def _listen(self, ip: str, port: int) -> None:
|
||||
ipv = socket.AF_INET6 if ":" in ip else socket.AF_INET
|
||||
uds_perm = uds_gid = -1
|
||||
if "unix:" in ip:
|
||||
tcp = False
|
||||
ipv = socket.AF_UNIX
|
||||
uds = ip.split(":")
|
||||
ip = uds[-1]
|
||||
if len(uds) > 2:
|
||||
uds_perm = int(uds[1], 8)
|
||||
if len(uds) > 3:
|
||||
try:
|
||||
uds_gid = int(uds[2])
|
||||
except:
|
||||
import grp
|
||||
|
||||
uds_gid = grp.getgrnam(uds[2]).gr_gid
|
||||
|
||||
elif ":" in ip:
|
||||
tcp = True
|
||||
ipv = socket.AF_INET6
|
||||
else:
|
||||
tcp = True
|
||||
ipv = socket.AF_INET
|
||||
|
||||
srv = socket.socket(ipv, socket.SOCK_STREAM)
|
||||
|
||||
if not ANYWIN or self.args.reuseaddr:
|
||||
srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
|
||||
srv.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||
if tcp:
|
||||
srv.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||
|
||||
srv.settimeout(None) # < does not inherit, ^ opts above do
|
||||
|
||||
try:
|
||||
@@ -233,20 +268,53 @@ class TcpSrv(object):
|
||||
srv.setsockopt(socket.SOL_IP, socket.IP_FREEBIND, 1)
|
||||
|
||||
try:
|
||||
srv.bind((ip, port))
|
||||
sport = srv.getsockname()[1]
|
||||
if tcp:
|
||||
srv.bind((ip, port))
|
||||
else:
|
||||
if ANYWIN or self.args.rm_sck:
|
||||
if os.path.exists(ip):
|
||||
os.unlink(ip)
|
||||
srv.bind(ip)
|
||||
else:
|
||||
tf = "%s.%d" % (ip, os.getpid())
|
||||
if os.path.exists(tf):
|
||||
os.unlink(tf)
|
||||
srv.bind(tf)
|
||||
if uds_gid != -1:
|
||||
os.chown(tf, -1, uds_gid)
|
||||
if uds_perm != -1:
|
||||
os.chmod(tf, uds_perm)
|
||||
atomic_move(self.nlog, tf, ip, VF_CAREFUL)
|
||||
|
||||
sport = srv.getsockname()[1] if tcp else port
|
||||
if port != sport:
|
||||
# linux 6.0.16 lets you bind a port which is in use
|
||||
# except it just gives you a random port instead
|
||||
raise OSError(E_ADDR_IN_USE[0], "")
|
||||
self.srv.append(srv)
|
||||
except (OSError, socket.error) as ex:
|
||||
try:
|
||||
srv.close()
|
||||
except:
|
||||
pass
|
||||
|
||||
e = ""
|
||||
if ex.errno in E_ADDR_IN_USE:
|
||||
e = "\033[1;31mport {} is busy on interface {}\033[0m".format(port, ip)
|
||||
if not tcp:
|
||||
e = "\033[1;31munix-socket {} is busy\033[0m".format(ip)
|
||||
elif ex.errno in E_ADDR_NOT_AVAIL:
|
||||
e = "\033[1;31minterface {} does not exist\033[0m".format(ip)
|
||||
else:
|
||||
|
||||
if not e:
|
||||
if not tcp:
|
||||
t = "\n\n\n NOTE: this crash may be due to a unix-socket bug; try --rm-sck\n"
|
||||
self.log("tcpsrv", t, 2)
|
||||
raise
|
||||
|
||||
if not tcp and not self.args.rm_sck:
|
||||
e += "; maybe this is a bug? try --rm-sck"
|
||||
|
||||
raise Exception(e)
|
||||
|
||||
def run(self) -> None:
|
||||
@@ -254,7 +322,14 @@ class TcpSrv(object):
|
||||
bound: list[tuple[str, int]] = []
|
||||
srvs: list[socket.socket] = []
|
||||
for srv in self.srv:
|
||||
ip, port = srv.getsockname()[:2]
|
||||
if srv.family == socket.AF_UNIX:
|
||||
tcp = False
|
||||
ip = re.sub(r"\.[0-9]+$", "", srv.getsockname())
|
||||
port = 0
|
||||
else:
|
||||
tcp = True
|
||||
ip, port = srv.getsockname()[:2]
|
||||
|
||||
if ip == IP6ALL:
|
||||
ip = "::" # jython
|
||||
|
||||
@@ -286,8 +361,12 @@ class TcpSrv(object):
|
||||
bound.append((ip, port))
|
||||
srvs.append(srv)
|
||||
fno = srv.fileno()
|
||||
hip = "[{}]".format(ip) if ":" in ip else ip
|
||||
msg = "listening @ {}:{} f{} p{}".format(hip, port, fno, os.getpid())
|
||||
if tcp:
|
||||
hip = "[{}]".format(ip) if ":" in ip else ip
|
||||
msg = "listening @ {}:{} f{} p{}".format(hip, port, fno, os.getpid())
|
||||
else:
|
||||
msg = "listening @ {} f{} p{}".format(ip, fno, os.getpid())
|
||||
|
||||
self.log("tcpsrv", msg)
|
||||
if self.args.q:
|
||||
print(msg)
|
||||
@@ -304,6 +383,7 @@ class TcpSrv(object):
|
||||
self.hub.start_zeroconf()
|
||||
gencert(self.log, self.args, self.netdevs)
|
||||
self.hub.restart_ftpd()
|
||||
self.hub.restart_tftpd()
|
||||
|
||||
def shutdown(self) -> None:
|
||||
self.stopping = True
|
||||
@@ -339,6 +419,8 @@ class TcpSrv(object):
|
||||
def detect_interfaces(self, listen_ips: list[str]) -> dict[str, Netdev]:
|
||||
from .stolen.ifaddr import get_adapters
|
||||
|
||||
listen_ips = [x for x in listen_ips if "unix:" not in x]
|
||||
|
||||
nics = get_adapters(True)
|
||||
eps: dict[str, Netdev] = {}
|
||||
for nic in nics:
|
||||
@@ -457,6 +539,12 @@ class TcpSrv(object):
|
||||
sys.stderr.flush()
|
||||
|
||||
def _qr(self, t1: dict[str, list[int]], t2: dict[str, list[int]]) -> str:
|
||||
t2c = {zs: zli for zs, zli in t2.items() if zs in ("127.0.0.1", "::1")}
|
||||
t2b = {zs: zli for zs, zli in t2.items() if ":" in zs and zs not in t2c}
|
||||
t2 = {zs: zli for zs, zli in t2.items() if zs not in t2b and zs not in t2c}
|
||||
t2.update(t2b) # first ipv4, then ipv6...
|
||||
t2.update(t2c) # ...and finally localhost
|
||||
|
||||
ip = None
|
||||
ips = list(t1) + list(t2)
|
||||
qri = self.args.qri
|
||||
|
||||
455
copyparty/tftpd.py
Normal file
455
copyparty/tftpd.py
Normal file
@@ -0,0 +1,455 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
try:
|
||||
from types import SimpleNamespace
|
||||
except:
|
||||
|
||||
class SimpleNamespace(object):
|
||||
def __init__(self, **attr):
|
||||
self.__dict__.update(attr)
|
||||
|
||||
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import socket
|
||||
import stat
|
||||
import threading
|
||||
import time
|
||||
from datetime import datetime
|
||||
|
||||
try:
|
||||
import inspect
|
||||
except:
|
||||
pass
|
||||
|
||||
from partftpy import (
|
||||
TftpContexts,
|
||||
TftpPacketFactory,
|
||||
TftpPacketTypes,
|
||||
TftpServer,
|
||||
TftpStates,
|
||||
)
|
||||
from partftpy.TftpShared import TftpException
|
||||
|
||||
from .__init__ import EXE, PY2, TYPE_CHECKING
|
||||
from .authsrv import VFS
|
||||
from .bos import bos
|
||||
from .util import UTC, BytesIO, Daemon, ODict, exclude_dotfiles, min_ex, runhook, undot
|
||||
|
||||
if True: # pylint: disable=using-constant-test
|
||||
from typing import Any, Union
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .svchub import SvcHub
|
||||
|
||||
if PY2:
|
||||
range = xrange # type: ignore
|
||||
|
||||
|
||||
lg = logging.getLogger("tftp")
|
||||
debug, info, warning, error = (lg.debug, lg.info, lg.warning, lg.error)
|
||||
|
||||
|
||||
def noop(*a, **ka) -> None:
|
||||
pass
|
||||
|
||||
|
||||
def _serverInitial(self, pkt: Any, raddress: str, rport: int) -> bool:
|
||||
info("connection from %s:%s", raddress, rport)
|
||||
ret = _sinitial[0](self, pkt, raddress, rport)
|
||||
nm = _hub[0].args.tftp_ipa_nm
|
||||
if nm and not nm.map(raddress):
|
||||
yeet("client rejected (--tftp-ipa): %s" % (raddress,))
|
||||
return ret
|
||||
|
||||
|
||||
# patch ipa-check into partftpd (part 1/2)
|
||||
_hub: list["SvcHub"] = []
|
||||
_sinitial: list[Any] = []
|
||||
|
||||
|
||||
class Tftpd(object):
|
||||
def __init__(self, hub: "SvcHub") -> None:
|
||||
self.hub = hub
|
||||
self.args = hub.args
|
||||
self.asrv = hub.asrv
|
||||
self.log = hub.log
|
||||
self.mutex = threading.Lock()
|
||||
|
||||
_hub[:] = []
|
||||
_hub.append(hub)
|
||||
|
||||
lg.setLevel(logging.DEBUG if self.args.tftpv else logging.INFO)
|
||||
for x in ["partftpy", "partftpy.TftpStates", "partftpy.TftpServer"]:
|
||||
lgr = logging.getLogger(x)
|
||||
lgr.setLevel(logging.DEBUG if self.args.tftpv else logging.INFO)
|
||||
|
||||
if not self.args.tftpv and not self.args.tftpvv:
|
||||
# contexts -> states -> packettypes -> shared
|
||||
# contexts -> packetfactory
|
||||
# packetfactory -> packettypes
|
||||
Cs = [
|
||||
TftpPacketTypes,
|
||||
TftpPacketFactory,
|
||||
TftpStates,
|
||||
TftpContexts,
|
||||
TftpServer,
|
||||
]
|
||||
cbak = []
|
||||
if not self.args.tftp_no_fast and not EXE and not PY2:
|
||||
try:
|
||||
ptn = re.compile(r"(^\s*)log\.debug\(.*\)$")
|
||||
for C in Cs:
|
||||
cbak.append(C.__dict__)
|
||||
src1 = inspect.getsource(C).split("\n")
|
||||
src2 = "\n".join([ptn.sub("\\1pass", ln) for ln in src1])
|
||||
cfn = C.__spec__.origin
|
||||
exec (compile(src2, filename=cfn, mode="exec"), C.__dict__)
|
||||
except Exception:
|
||||
t = "failed to optimize tftp code; run with --tftp-no-fast if there are issues:\n"
|
||||
self.log("tftp", t + min_ex(), 3)
|
||||
for n, zd in enumerate(cbak):
|
||||
Cs[n].__dict__ = zd
|
||||
|
||||
for C in Cs:
|
||||
C.log.debug = noop
|
||||
|
||||
# patch ipa-check into partftpd (part 2/2)
|
||||
_sinitial[:] = []
|
||||
_sinitial.append(TftpStates.TftpServerState.serverInitial)
|
||||
TftpStates.TftpServerState.serverInitial = _serverInitial
|
||||
|
||||
# patch vfs into partftpy
|
||||
TftpContexts.open = self._open
|
||||
TftpStates.open = self._open
|
||||
|
||||
fos = SimpleNamespace()
|
||||
for k in os.__dict__:
|
||||
try:
|
||||
setattr(fos, k, getattr(os, k))
|
||||
except:
|
||||
pass
|
||||
fos.access = self._access
|
||||
fos.mkdir = self._mkdir
|
||||
fos.unlink = self._unlink
|
||||
fos.sep = "/"
|
||||
TftpContexts.os = fos
|
||||
TftpServer.os = fos
|
||||
TftpStates.os = fos
|
||||
|
||||
fop = SimpleNamespace()
|
||||
for k in os.path.__dict__:
|
||||
try:
|
||||
setattr(fop, k, getattr(os.path, k))
|
||||
except:
|
||||
pass
|
||||
fop.abspath = self._p_abspath
|
||||
fop.exists = self._p_exists
|
||||
fop.isdir = self._p_isdir
|
||||
fop.normpath = self._p_normpath
|
||||
fos.path = fop
|
||||
|
||||
self._disarm(fos)
|
||||
|
||||
self.port = int(self.args.tftp)
|
||||
self.srv = []
|
||||
self.ips = []
|
||||
|
||||
ports = []
|
||||
if self.args.tftp_pr:
|
||||
p1, p2 = [int(x) for x in self.args.tftp_pr.split("-")]
|
||||
ports = list(range(p1, p2 + 1))
|
||||
|
||||
ips = self.args.i
|
||||
if "::" in ips:
|
||||
ips.append("0.0.0.0")
|
||||
|
||||
ips = [x for x in ips if "unix:" not in x]
|
||||
|
||||
if self.args.tftp4:
|
||||
ips = [x for x in ips if ":" not in x]
|
||||
|
||||
if not ips:
|
||||
t = "cannot start tftp-server; no compatible IPs in -i"
|
||||
self.nlog(t, 1)
|
||||
return
|
||||
|
||||
ips = list(ODict.fromkeys(ips)) # dedup
|
||||
|
||||
for ip in ips:
|
||||
name = "tftp_%s" % (ip,)
|
||||
Daemon(self._start, name, [ip, ports])
|
||||
time.sleep(0.2) # give dualstack a chance
|
||||
|
||||
def nlog(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||
self.log("tftp", msg, c)
|
||||
|
||||
def _start(self, ip, ports):
|
||||
fam = socket.AF_INET6 if ":" in ip else socket.AF_INET
|
||||
have_been_alive = False
|
||||
while True:
|
||||
srv = TftpServer.TftpServer("/", self._ls)
|
||||
with self.mutex:
|
||||
self.srv.append(srv)
|
||||
self.ips.append(ip)
|
||||
|
||||
try:
|
||||
# this is the listen loop; it should block forever
|
||||
srv.listen(ip, self.port, af_family=fam, ports=ports)
|
||||
except:
|
||||
with self.mutex:
|
||||
self.srv.remove(srv)
|
||||
self.ips.remove(ip)
|
||||
|
||||
try:
|
||||
srv.sock.close()
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
bound = bool(srv.listenport)
|
||||
except:
|
||||
bound = False
|
||||
|
||||
if bound:
|
||||
# this instance has managed to bind at least once
|
||||
have_been_alive = True
|
||||
|
||||
if have_been_alive:
|
||||
t = "tftp server [%s]:%d crashed; restarting in 3 sec:\n%s"
|
||||
error(t, ip, self.port, min_ex())
|
||||
time.sleep(3)
|
||||
continue
|
||||
|
||||
# server failed to start; could be due to dualstack (ipv6 managed to bind and this is ipv4)
|
||||
if ip != "0.0.0.0" or "::" not in self.ips:
|
||||
# nope, it's fatal
|
||||
t = "tftp server [%s]:%d failed to start:\n%s"
|
||||
error(t, ip, self.port, min_ex())
|
||||
|
||||
# yep; ignore
|
||||
# (TODO: move the "listening @ ..." infolog in partftpy to
|
||||
# after the bind attempt so it doesn't print twice)
|
||||
return
|
||||
|
||||
info("tftp server [%s]:%d terminated", ip, self.port)
|
||||
break
|
||||
|
||||
def stop(self):
|
||||
with self.mutex:
|
||||
srvs = self.srv[:]
|
||||
|
||||
for srv in srvs:
|
||||
srv.stop()
|
||||
|
||||
def _v2a(self, caller: str, vpath: str, perms: list, *a: Any) -> tuple[VFS, str]:
|
||||
vpath = vpath.replace("\\", "/").lstrip("/")
|
||||
if not perms:
|
||||
perms = [True, True]
|
||||
|
||||
debug('%s("%s", %s) %s\033[K\033[0m', caller, vpath, str(a), perms)
|
||||
vfs, rem = self.asrv.vfs.get(vpath, "*", *perms)
|
||||
if not vfs.realpath:
|
||||
raise Exception("unmapped vfs")
|
||||
return vfs, vfs.canonical(rem)
|
||||
|
||||
def _ls(self, vpath: str, raddress: str, rport: int, force=False) -> Any:
|
||||
# generate file listing if vpath is dir.txt and return as file object
|
||||
if not force:
|
||||
vpath, fn = os.path.split(vpath.replace("\\", "/"))
|
||||
ptn = self.args.tftp_lsf
|
||||
if not ptn or not ptn.match(fn.lower()):
|
||||
return None
|
||||
|
||||
vn, rem = self.asrv.vfs.get(vpath, "*", True, False)
|
||||
fsroot, vfs_ls, vfs_virt = vn.ls(
|
||||
rem,
|
||||
"*",
|
||||
not self.args.no_scandir,
|
||||
[[True, False]],
|
||||
)
|
||||
dnames = set([x[0] for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)])
|
||||
dirs1 = [(v.st_mtime, v.st_size, k + "/") for k, v in vfs_ls if k in dnames]
|
||||
fils1 = [(v.st_mtime, v.st_size, k) for k, v in vfs_ls if k not in dnames]
|
||||
real1 = dirs1 + fils1
|
||||
realt = [(datetime.fromtimestamp(mt, UTC), sz, fn) for mt, sz, fn in real1]
|
||||
reals = [
|
||||
(
|
||||
"%04d-%02d-%02d %02d:%02d:%02d"
|
||||
% (
|
||||
zd.year,
|
||||
zd.month,
|
||||
zd.day,
|
||||
zd.hour,
|
||||
zd.minute,
|
||||
zd.second,
|
||||
),
|
||||
sz,
|
||||
fn,
|
||||
)
|
||||
for zd, sz, fn in realt
|
||||
]
|
||||
virs = [("????-??-?? ??:??:??", 0, k + "/") for k in vfs_virt.keys()]
|
||||
ls = virs + reals
|
||||
|
||||
if "*" not in vn.axs.udot:
|
||||
names = set(exclude_dotfiles([x[2] for x in ls]))
|
||||
ls = [x for x in ls if x[2] in names]
|
||||
|
||||
try:
|
||||
biggest = max([x[1] for x in ls])
|
||||
except:
|
||||
biggest = 0
|
||||
|
||||
perms = []
|
||||
if "*" in vn.axs.uread:
|
||||
perms.append("read")
|
||||
if "*" in vn.axs.udot:
|
||||
perms.append("hidden")
|
||||
if "*" in vn.axs.uwrite:
|
||||
if "*" in vn.axs.udel:
|
||||
perms.append("overwrite")
|
||||
else:
|
||||
perms.append("write")
|
||||
|
||||
fmt = "{{}} {{:{},}} {{}}"
|
||||
fmt = fmt.format(len("{:,}".format(biggest)))
|
||||
retl = ["# permissions: %s" % (", ".join(perms),)]
|
||||
retl += [fmt.format(*x) for x in ls]
|
||||
ret = "\n".join(retl).encode("utf-8", "replace")
|
||||
return BytesIO(ret + b"\n")
|
||||
|
||||
def _open(self, vpath: str, mode: str, *a: Any, **ka: Any) -> Any:
|
||||
rd = wr = False
|
||||
if mode == "rb":
|
||||
rd = True
|
||||
elif mode == "wb":
|
||||
wr = True
|
||||
else:
|
||||
raise Exception("bad mode %s" % (mode,))
|
||||
|
||||
vfs, ap = self._v2a("open", vpath, [rd, wr])
|
||||
if wr:
|
||||
if "*" not in vfs.axs.uwrite:
|
||||
yeet("blocked write; folder not world-writable: /%s" % (vpath,))
|
||||
|
||||
if bos.path.exists(ap) and "*" not in vfs.axs.udel:
|
||||
yeet("blocked write; folder not world-deletable: /%s" % (vpath,))
|
||||
|
||||
xbu = vfs.flags.get("xbu")
|
||||
if xbu and not runhook(
|
||||
self.nlog,
|
||||
None,
|
||||
self.hub.up2k,
|
||||
"xbu.tftpd",
|
||||
xbu,
|
||||
ap,
|
||||
vpath,
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
0,
|
||||
0,
|
||||
"8.3.8.7",
|
||||
time.time(),
|
||||
"",
|
||||
):
|
||||
yeet("blocked by xbu server config: " + vpath)
|
||||
|
||||
if not self.args.tftp_nols and bos.path.isdir(ap):
|
||||
return self._ls(vpath, "", 0, True)
|
||||
|
||||
if not a:
|
||||
a = (self.args.iobuf,)
|
||||
|
||||
return open(ap, mode, *a, **ka)
|
||||
|
||||
def _mkdir(self, vpath: str, *a) -> None:
|
||||
vfs, ap = self._v2a("mkdir", vpath, [])
|
||||
if "*" not in vfs.axs.uwrite:
|
||||
yeet("blocked mkdir; folder not world-writable: /%s" % (vpath,))
|
||||
|
||||
return bos.mkdir(ap)
|
||||
|
||||
def _unlink(self, vpath: str) -> None:
|
||||
# return bos.unlink(self._v2a("stat", vpath, *a)[1])
|
||||
vfs, ap = self._v2a("delete", vpath, [True, False, False, True])
|
||||
|
||||
try:
|
||||
inf = bos.stat(ap)
|
||||
except:
|
||||
return
|
||||
|
||||
if not stat.S_ISREG(inf.st_mode) or inf.st_size:
|
||||
yeet("attempted delete of non-empty file")
|
||||
|
||||
vpath = vpath.replace("\\", "/").lstrip("/")
|
||||
self.hub.up2k.handle_rm("*", "8.3.8.7", [vpath], [], False, False)
|
||||
|
||||
def _access(self, *a: Any) -> bool:
|
||||
return True
|
||||
|
||||
def _p_abspath(self, vpath: str) -> str:
|
||||
return "/" + undot(vpath)
|
||||
|
||||
def _p_normpath(self, *a: Any) -> str:
|
||||
return ""
|
||||
|
||||
def _p_exists(self, vpath: str) -> bool:
|
||||
try:
|
||||
ap = self._v2a("p.exists", vpath, [False, False])[1]
|
||||
bos.stat(ap)
|
||||
return True
|
||||
except:
|
||||
return vpath == "/"
|
||||
|
||||
def _p_isdir(self, vpath: str) -> bool:
|
||||
try:
|
||||
st = bos.stat(self._v2a("p.isdir", vpath, [False, False])[1])
|
||||
ret = stat.S_ISDIR(st.st_mode)
|
||||
return ret
|
||||
except:
|
||||
return vpath == "/"
|
||||
|
||||
def _hook(self, *a: Any, **ka: Any) -> None:
|
||||
src = inspect.currentframe().f_back.f_code.co_name
|
||||
error("\033[31m%s:hook(%s)\033[0m", src, a)
|
||||
raise Exception("nope")
|
||||
|
||||
def _disarm(self, fos: SimpleNamespace) -> None:
|
||||
fos.chmod = self._hook
|
||||
fos.chown = self._hook
|
||||
fos.close = self._hook
|
||||
fos.ftruncate = self._hook
|
||||
fos.lchown = self._hook
|
||||
fos.link = self._hook
|
||||
fos.listdir = self._hook
|
||||
fos.lstat = self._hook
|
||||
fos.open = self._hook
|
||||
fos.remove = self._hook
|
||||
fos.rename = self._hook
|
||||
fos.replace = self._hook
|
||||
fos.scandir = self._hook
|
||||
fos.stat = self._hook
|
||||
fos.symlink = self._hook
|
||||
fos.truncate = self._hook
|
||||
fos.utime = self._hook
|
||||
fos.walk = self._hook
|
||||
|
||||
fos.path.expanduser = self._hook
|
||||
fos.path.expandvars = self._hook
|
||||
fos.path.getatime = self._hook
|
||||
fos.path.getctime = self._hook
|
||||
fos.path.getmtime = self._hook
|
||||
fos.path.getsize = self._hook
|
||||
fos.path.isabs = self._hook
|
||||
fos.path.isfile = self._hook
|
||||
fos.path.islink = self._hook
|
||||
fos.path.realpath = self._hook
|
||||
|
||||
|
||||
def yeet(msg: str) -> None:
|
||||
warning(msg)
|
||||
raise TftpException(msg)
|
||||
@@ -57,9 +57,10 @@ class ThumbCli(object):
|
||||
if is_vid and "dvthumb" in dbv.flags:
|
||||
return None
|
||||
|
||||
want_opus = fmt in ("opus", "caf")
|
||||
want_opus = fmt in ("opus", "caf", "mp3")
|
||||
is_au = ext in self.fmt_ffa
|
||||
if is_au:
|
||||
is_vau = want_opus and ext in self.fmt_ffv
|
||||
if is_au or is_vau:
|
||||
if want_opus:
|
||||
if self.args.no_acode:
|
||||
return None
|
||||
@@ -78,16 +79,39 @@ class ThumbCli(object):
|
||||
if rem.startswith(".hist/th/") and rem.split(".")[-1] in ["webp", "jpg", "png"]:
|
||||
return os.path.join(ptop, rem)
|
||||
|
||||
if fmt == "j" and self.args.th_no_jpg:
|
||||
fmt = "w"
|
||||
if fmt[:1] in "jw":
|
||||
sfmt = fmt[:1]
|
||||
|
||||
if fmt == "w":
|
||||
if (
|
||||
self.args.th_no_webp
|
||||
or (is_img and not self.can_webp)
|
||||
or (self.args.th_ff_jpg and (not is_img or preferred == "ff"))
|
||||
):
|
||||
fmt = "j"
|
||||
if sfmt == "j" and self.args.th_no_jpg:
|
||||
sfmt = "w"
|
||||
|
||||
if sfmt == "w":
|
||||
if (
|
||||
self.args.th_no_webp
|
||||
or (is_img and not self.can_webp)
|
||||
or (self.args.th_ff_jpg and (not is_img or preferred == "ff"))
|
||||
):
|
||||
sfmt = "j"
|
||||
|
||||
vf_crop = dbv.flags["crop"]
|
||||
vf_th3x = dbv.flags["th3x"]
|
||||
|
||||
if "f" in vf_crop:
|
||||
sfmt += "f" if "n" in vf_crop else ""
|
||||
else:
|
||||
sfmt += "f" if "f" in fmt else ""
|
||||
|
||||
if "f" in vf_th3x:
|
||||
sfmt += "3" if "y" in vf_th3x else ""
|
||||
else:
|
||||
sfmt += "3" if "3" in fmt else ""
|
||||
|
||||
fmt = sfmt
|
||||
|
||||
elif fmt[:1] == "p" and not is_au and not is_vid:
|
||||
t = "cannot thumbnail [%s]: png only allowed for waveforms"
|
||||
self.log(t % (rem), 6)
|
||||
return None
|
||||
|
||||
histpath = self.asrv.vfs.histtab.get(ptop)
|
||||
if not histpath:
|
||||
|
||||
@@ -12,13 +12,13 @@ import time
|
||||
|
||||
from queue import Queue
|
||||
|
||||
from .__init__ import ANYWIN, TYPE_CHECKING
|
||||
from .__init__ import ANYWIN, PY2, TYPE_CHECKING
|
||||
from .authsrv import VFS
|
||||
from .bos import bos
|
||||
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, ffprobe
|
||||
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, au_unpk, ffprobe
|
||||
from .util import BytesIO # type: ignore
|
||||
from .util import (
|
||||
FFMPEG_URL,
|
||||
BytesIO, # type: ignore
|
||||
Cooldown,
|
||||
Daemon,
|
||||
Pebkac,
|
||||
@@ -28,6 +28,8 @@ from .util import (
|
||||
runcmd,
|
||||
statdir,
|
||||
vsplit,
|
||||
wrename,
|
||||
wunlink,
|
||||
)
|
||||
|
||||
if True: # pylint: disable=using-constant-test
|
||||
@@ -36,6 +38,9 @@ if True: # pylint: disable=using-constant-test
|
||||
if TYPE_CHECKING:
|
||||
from .svchub import SvcHub
|
||||
|
||||
if PY2:
|
||||
range = xrange # type: ignore
|
||||
|
||||
HAVE_PIL = False
|
||||
HAVE_PILF = False
|
||||
HAVE_HEIF = False
|
||||
@@ -43,22 +48,34 @@ HAVE_AVIF = False
|
||||
HAVE_WEBP = False
|
||||
|
||||
try:
|
||||
if os.environ.get("PRTY_NO_PIL"):
|
||||
raise Exception()
|
||||
|
||||
from PIL import ExifTags, Image, ImageFont, ImageOps
|
||||
|
||||
HAVE_PIL = True
|
||||
try:
|
||||
if os.environ.get("PRTY_NO_PILF"):
|
||||
raise Exception()
|
||||
|
||||
ImageFont.load_default(size=16)
|
||||
HAVE_PILF = True
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
if os.environ.get("PRTY_NO_PIL_WEBP"):
|
||||
raise Exception()
|
||||
|
||||
Image.new("RGB", (2, 2)).save(BytesIO(), format="webp")
|
||||
HAVE_WEBP = True
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
if os.environ.get("PRTY_NO_PIL_HEIF"):
|
||||
raise Exception()
|
||||
|
||||
from pyheif_pillow_opener import register_heif_opener
|
||||
|
||||
register_heif_opener()
|
||||
@@ -67,6 +84,9 @@ try:
|
||||
pass
|
||||
|
||||
try:
|
||||
if os.environ.get("PRTY_NO_PIL_AVIF"):
|
||||
raise Exception()
|
||||
|
||||
import pillow_avif # noqa: F401 # pylint: disable=unused-import
|
||||
|
||||
HAVE_AVIF = True
|
||||
@@ -78,6 +98,9 @@ except:
|
||||
pass
|
||||
|
||||
try:
|
||||
if os.environ.get("PRTY_NO_VIPS"):
|
||||
raise Exception()
|
||||
|
||||
HAVE_VIPS = True
|
||||
import pyvips
|
||||
|
||||
@@ -96,26 +119,26 @@ def thumb_path(histpath: str, rem: str, mtime: float, fmt: str, ffa: set[str]) -
|
||||
|
||||
# spectrograms are never cropped; strip fullsize flag
|
||||
ext = rem.split(".")[-1].lower()
|
||||
if ext in ffa and fmt in ("wf", "jf"):
|
||||
fmt = fmt[:1]
|
||||
if ext in ffa and fmt[:2] in ("wf", "jf"):
|
||||
fmt = fmt.replace("f", "")
|
||||
|
||||
rd += "\n" + fmt
|
||||
h = hashlib.sha512(afsenc(rd)).digest()
|
||||
b64 = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
||||
rd = "{}/{}/".format(b64[:2], b64[2:4]).lower() + b64
|
||||
rd = ("%s/%s/" % (b64[:2], b64[2:4])).lower() + b64
|
||||
|
||||
# could keep original filenames but this is safer re pathlen
|
||||
h = hashlib.sha512(afsenc(fn)).digest()
|
||||
fn = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
||||
|
||||
if fmt in ("opus", "caf"):
|
||||
if fmt in ("opus", "caf", "mp3"):
|
||||
cat = "ac"
|
||||
else:
|
||||
fc = fmt[:1]
|
||||
fmt = "webp" if fc == "w" else "png" if fc == "p" else "jpg"
|
||||
cat = "th"
|
||||
|
||||
return "{}/{}/{}/{}.{:x}.{}".format(histpath, cat, rd, fn, int(mtime), fmt)
|
||||
return "%s/%s/%s/%s.%x.%s" % (histpath, cat, rd, fn, int(mtime), fmt)
|
||||
|
||||
|
||||
class ThumbSrv(object):
|
||||
@@ -129,6 +152,8 @@ class ThumbSrv(object):
|
||||
|
||||
self.mutex = threading.Lock()
|
||||
self.busy: dict[str, list[threading.Condition]] = {}
|
||||
self.ram: dict[str, float] = {}
|
||||
self.memcond = threading.Condition(self.mutex)
|
||||
self.stopping = False
|
||||
self.nthr = max(1, self.args.th_mt)
|
||||
|
||||
@@ -197,9 +222,10 @@ class ThumbSrv(object):
|
||||
with self.mutex:
|
||||
return not self.nthr
|
||||
|
||||
def getres(self, vn: VFS) -> tuple[int, int]:
|
||||
def getres(self, vn: VFS, fmt: str) -> tuple[int, int]:
|
||||
mul = 3 if "3" in fmt else 1
|
||||
w, h = vn.flags["thsize"].split("x")
|
||||
return int(w), int(h)
|
||||
return int(w) * mul, int(h) * mul
|
||||
|
||||
def get(self, ptop: str, rem: str, mtime: float, fmt: str) -> Optional[str]:
|
||||
histpath = self.asrv.vfs.histtab.get(ptop)
|
||||
@@ -214,7 +240,7 @@ class ThumbSrv(object):
|
||||
with self.mutex:
|
||||
try:
|
||||
self.busy[tpath].append(cond)
|
||||
self.log("wait {}".format(tpath))
|
||||
self.log("joined waiting room for %s" % (tpath,))
|
||||
except:
|
||||
thdir = os.path.dirname(tpath)
|
||||
bos.makedirs(os.path.join(thdir, "w"))
|
||||
@@ -265,6 +291,23 @@ class ThumbSrv(object):
|
||||
"ffa": self.fmt_ffa,
|
||||
}
|
||||
|
||||
def wait4ram(self, need: float, ttpath: str) -> None:
|
||||
ram = self.args.th_ram_max
|
||||
if need > ram * 0.99:
|
||||
t = "file too big; need %.2f GiB RAM, but --th-ram-max is only %.1f"
|
||||
raise Exception(t % (need, ram))
|
||||
|
||||
while True:
|
||||
with self.mutex:
|
||||
used = sum([v for k, v in self.ram.items() if k != ttpath]) + need
|
||||
if used < ram:
|
||||
# self.log("XXX self.ram: %s" % (self.ram,), 5)
|
||||
self.ram[ttpath] = need
|
||||
return
|
||||
with self.memcond:
|
||||
# self.log("at RAM limit; used %.2f GiB, need %.2f more" % (used-need, need), 1)
|
||||
self.memcond.wait(3)
|
||||
|
||||
def worker(self) -> None:
|
||||
while not self.stopping:
|
||||
task = self.q.get()
|
||||
@@ -275,36 +318,52 @@ class ThumbSrv(object):
|
||||
ext = abspath.split(".")[-1].lower()
|
||||
png_ok = False
|
||||
funs = []
|
||||
|
||||
if ext in self.args.au_unpk:
|
||||
ap_unpk = au_unpk(self.log, self.args.au_unpk, abspath, vn)
|
||||
else:
|
||||
ap_unpk = abspath
|
||||
|
||||
if not bos.path.exists(tpath):
|
||||
want_mp3 = tpath.endswith(".mp3")
|
||||
want_opus = tpath.endswith(".opus") or tpath.endswith(".caf")
|
||||
want_png = tpath.endswith(".png")
|
||||
want_au = want_mp3 or want_opus
|
||||
for lib in self.args.th_dec:
|
||||
can_au = lib == "ff" and (
|
||||
ext in self.fmt_ffa or ext in self.fmt_ffv
|
||||
)
|
||||
|
||||
if lib == "pil" and ext in self.fmt_pil:
|
||||
funs.append(self.conv_pil)
|
||||
elif lib == "vips" and ext in self.fmt_vips:
|
||||
funs.append(self.conv_vips)
|
||||
elif lib == "ff" and ext in self.fmt_ffi or ext in self.fmt_ffv:
|
||||
funs.append(self.conv_ffmpeg)
|
||||
elif lib == "ff" and ext in self.fmt_ffa:
|
||||
if tpath.endswith(".opus") or tpath.endswith(".caf"):
|
||||
elif can_au and (want_png or want_au):
|
||||
if want_opus:
|
||||
funs.append(self.conv_opus)
|
||||
elif tpath.endswith(".png"):
|
||||
elif want_mp3:
|
||||
funs.append(self.conv_mp3)
|
||||
elif want_png:
|
||||
funs.append(self.conv_waves)
|
||||
png_ok = True
|
||||
else:
|
||||
funs.append(self.conv_spec)
|
||||
|
||||
if not png_ok and tpath.endswith(".png"):
|
||||
raise Pebkac(400, "png only allowed for waveforms")
|
||||
elif lib == "ff" and (ext in self.fmt_ffi or ext in self.fmt_ffv):
|
||||
funs.append(self.conv_ffmpeg)
|
||||
elif lib == "ff" and ext in self.fmt_ffa and not want_au:
|
||||
funs.append(self.conv_spec)
|
||||
|
||||
tdir, tfn = os.path.split(tpath)
|
||||
ttpath = os.path.join(tdir, "w", tfn)
|
||||
try:
|
||||
bos.unlink(ttpath)
|
||||
wunlink(self.log, ttpath, vn.flags)
|
||||
except:
|
||||
pass
|
||||
|
||||
for fun in funs:
|
||||
try:
|
||||
fun(abspath, ttpath, fmt, vn)
|
||||
if not png_ok and tpath.endswith(".png"):
|
||||
raise Exception("png only allowed for waveforms")
|
||||
|
||||
fun(ap_unpk, ttpath, fmt, vn)
|
||||
break
|
||||
except Exception as ex:
|
||||
msg = "{} could not create thumbnail of {}\n{}"
|
||||
@@ -318,29 +377,36 @@ class ThumbSrv(object):
|
||||
else:
|
||||
# ffmpeg may spawn empty files on windows
|
||||
try:
|
||||
os.unlink(ttpath)
|
||||
wunlink(self.log, ttpath, vn.flags)
|
||||
except:
|
||||
pass
|
||||
|
||||
if abspath != ap_unpk:
|
||||
wunlink(self.log, ap_unpk, vn.flags)
|
||||
|
||||
try:
|
||||
bos.rename(ttpath, tpath)
|
||||
wrename(self.log, ttpath, tpath, vn.flags)
|
||||
except:
|
||||
pass
|
||||
|
||||
with self.mutex:
|
||||
subs = self.busy[tpath]
|
||||
del self.busy[tpath]
|
||||
self.ram.pop(ttpath, None)
|
||||
|
||||
for x in subs:
|
||||
with x:
|
||||
x.notify_all()
|
||||
|
||||
with self.memcond:
|
||||
self.memcond.notify_all()
|
||||
|
||||
with self.mutex:
|
||||
self.nthr -= 1
|
||||
|
||||
def fancy_pillow(self, im: "Image.Image", fmt: str, vn: VFS) -> "Image.Image":
|
||||
# exif_transpose is expensive (loads full image + unconditional copy)
|
||||
res = self.getres(vn)
|
||||
res = self.getres(vn, fmt)
|
||||
r = max(*res) * 2
|
||||
im.thumbnail((r, r), resample=Image.LANCZOS)
|
||||
try:
|
||||
@@ -355,7 +421,7 @@ class ThumbSrv(object):
|
||||
if rot in rots:
|
||||
im = im.transpose(rots[rot])
|
||||
|
||||
if fmt.endswith("f"):
|
||||
if "f" in fmt:
|
||||
im.thumbnail(res, resample=Image.LANCZOS)
|
||||
else:
|
||||
iw, ih = im.size
|
||||
@@ -366,12 +432,13 @@ class ThumbSrv(object):
|
||||
return im
|
||||
|
||||
def conv_pil(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
|
||||
self.wait4ram(0.2, tpath)
|
||||
with Image.open(fsenc(abspath)) as im:
|
||||
try:
|
||||
im = self.fancy_pillow(im, fmt, vn)
|
||||
except Exception as ex:
|
||||
self.log("fancy_pillow {}".format(ex), "90")
|
||||
im.thumbnail(self.getres(vn))
|
||||
im.thumbnail(self.getres(vn, fmt))
|
||||
|
||||
fmts = ["RGB", "L"]
|
||||
args = {"quality": 40}
|
||||
@@ -382,7 +449,7 @@ class ThumbSrv(object):
|
||||
# method 0 = pillow-default, fast
|
||||
# method 4 = ffmpeg-default
|
||||
# method 6 = max, slow
|
||||
fmts += ["RGBA", "LA"]
|
||||
fmts.extend(("RGBA", "LA"))
|
||||
args["method"] = 6
|
||||
else:
|
||||
# default q = 75
|
||||
@@ -395,11 +462,12 @@ class ThumbSrv(object):
|
||||
im.save(tpath, **args)
|
||||
|
||||
def conv_vips(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
|
||||
self.wait4ram(0.2, tpath)
|
||||
crops = ["centre", "none"]
|
||||
if fmt.endswith("f"):
|
||||
if "f" in fmt:
|
||||
crops = ["none"]
|
||||
|
||||
w, h = self.getres(vn)
|
||||
w, h = self.getres(vn, fmt)
|
||||
kw = {"height": h, "size": "down", "intent": "relative"}
|
||||
|
||||
for c in crops:
|
||||
@@ -415,6 +483,7 @@ class ThumbSrv(object):
|
||||
img.write_to_file(tpath, Q=40)
|
||||
|
||||
def conv_ffmpeg(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
|
||||
self.wait4ram(0.2, tpath)
|
||||
ret, _ = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
||||
if not ret:
|
||||
return
|
||||
@@ -427,12 +496,12 @@ class ThumbSrv(object):
|
||||
seek = [b"-ss", "{:.0f}".format(dur / 3).encode("utf-8")]
|
||||
|
||||
scale = "scale={0}:{1}:force_original_aspect_ratio="
|
||||
if fmt.endswith("f"):
|
||||
if "f" in fmt:
|
||||
scale += "decrease,setsar=1:1"
|
||||
else:
|
||||
scale += "increase,crop={0}:{1},setsar=1:1"
|
||||
|
||||
res = self.getres(vn)
|
||||
res = self.getres(vn, fmt)
|
||||
bscale = scale.format(*list(res)).encode("utf-8")
|
||||
# fmt: off
|
||||
cmd = [
|
||||
@@ -467,9 +536,9 @@ class ThumbSrv(object):
|
||||
cmd += [fsenc(tpath)]
|
||||
self._run_ff(cmd, vn)
|
||||
|
||||
def _run_ff(self, cmd: list[bytes], vn: VFS) -> None:
|
||||
def _run_ff(self, cmd: list[bytes], vn: VFS, oom: int = 400) -> None:
|
||||
# self.log((b" ".join(cmd)).decode("utf-8"))
|
||||
ret, _, serr = runcmd(cmd, timeout=vn.flags["convt"], nice=True)
|
||||
ret, _, serr = runcmd(cmd, timeout=vn.flags["convt"], nice=True, oom=oom)
|
||||
if not ret:
|
||||
return
|
||||
|
||||
@@ -517,8 +586,21 @@ class ThumbSrv(object):
|
||||
if "ac" not in ret:
|
||||
raise Exception("not audio")
|
||||
|
||||
flt = (
|
||||
b"[0:a:0]"
|
||||
# jt_versi.xm: 405M/839s
|
||||
dur = ret[".dur"][1] if ".dur" in ret else 300
|
||||
need = 0.2 + dur / 3000
|
||||
speedup = b""
|
||||
if need > self.args.th_ram_max * 0.7:
|
||||
self.log("waves too big (need %.2f GiB); trying to optimize" % (need,))
|
||||
need = 0.2 + dur / 4200 # only helps about this much...
|
||||
speedup = b"aresample=8000,"
|
||||
if need > self.args.th_ram_max * 0.96:
|
||||
raise Exception("file too big; cannot waves")
|
||||
|
||||
self.wait4ram(need, tpath)
|
||||
|
||||
flt = b"[0:a:0]" + speedup
|
||||
flt += (
|
||||
b"compand=.3|.3:1|1:-90/-60|-60/-40|-40/-30|-20/-20:6:0:-90:0.2"
|
||||
b",volume=2"
|
||||
b",showwavespic=s=2048x64:colors=white"
|
||||
@@ -540,12 +622,44 @@ class ThumbSrv(object):
|
||||
cmd += [fsenc(tpath)]
|
||||
self._run_ff(cmd, vn)
|
||||
|
||||
if "pngquant" in vn.flags:
|
||||
wtpath = tpath + ".png"
|
||||
cmd = [
|
||||
b"pngquant",
|
||||
b"--strip",
|
||||
b"--nofs",
|
||||
b"--output",
|
||||
fsenc(wtpath),
|
||||
fsenc(tpath),
|
||||
]
|
||||
ret = runcmd(cmd, timeout=vn.flags["convt"], nice=True, oom=400)[0]
|
||||
if ret:
|
||||
try:
|
||||
wunlink(self.log, wtpath, vn.flags)
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
wrename(self.log, wtpath, tpath, vn.flags)
|
||||
|
||||
def conv_spec(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
|
||||
ret, _ = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
||||
if "ac" not in ret:
|
||||
raise Exception("not audio")
|
||||
|
||||
fc = "[0:a:0]aresample=48000{},showspectrumpic=s=640x512,crop=780:544:70:50[o]"
|
||||
# https://trac.ffmpeg.org/ticket/10797
|
||||
# expect 1 GiB every 600 seconds when duration is tricky;
|
||||
# simple filetypes are generally safer so let's special-case those
|
||||
safe = ("flac", "wav", "aif", "aiff", "opus")
|
||||
coeff = 1800 if abspath.split(".")[-1].lower() in safe else 600
|
||||
dur = ret[".dur"][1] if ".dur" in ret else 300
|
||||
need = 0.2 + dur / coeff
|
||||
self.wait4ram(need, tpath)
|
||||
|
||||
fc = "[0:a:0]aresample=48000{},showspectrumpic=s="
|
||||
if "3" in fmt:
|
||||
fc += "1280x1024,crop=1420:1056:70:48[o]"
|
||||
else:
|
||||
fc += "640x512,crop=780:544:70:48[o]"
|
||||
|
||||
if self.args.th_ff_swr:
|
||||
fco = ":filter_size=128:cutoff=0.877"
|
||||
@@ -583,30 +697,71 @@ class ThumbSrv(object):
|
||||
cmd += [fsenc(tpath)]
|
||||
self._run_ff(cmd, vn)
|
||||
|
||||
def conv_opus(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
|
||||
if self.args.no_acode:
|
||||
def conv_mp3(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
|
||||
quality = self.args.q_mp3.lower()
|
||||
if self.args.no_acode or not quality:
|
||||
raise Exception("disabled in server config")
|
||||
|
||||
ret, _ = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
||||
if "ac" not in ret:
|
||||
self.wait4ram(0.2, tpath)
|
||||
tags, rawtags = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
||||
if "ac" not in tags:
|
||||
raise Exception("not audio")
|
||||
|
||||
if quality.endswith("k"):
|
||||
qk = b"-b:a"
|
||||
qv = quality.encode("ascii")
|
||||
else:
|
||||
qk = b"-q:a"
|
||||
qv = quality[1:].encode("ascii")
|
||||
|
||||
# extremely conservative choices for output format
|
||||
# (always 2ch 44k1) because if a device is old enough
|
||||
# to not support opus then it's probably also super picky
|
||||
|
||||
# fmt: off
|
||||
cmd = [
|
||||
b"ffmpeg",
|
||||
b"-nostdin",
|
||||
b"-v", b"error",
|
||||
b"-hide_banner",
|
||||
b"-i", fsenc(abspath),
|
||||
] + self.big_tags(rawtags) + [
|
||||
b"-map", b"0:a:0",
|
||||
b"-ar", b"44100",
|
||||
b"-ac", b"2",
|
||||
b"-c:a", b"libmp3lame",
|
||||
qk, qv,
|
||||
fsenc(tpath)
|
||||
]
|
||||
# fmt: on
|
||||
self._run_ff(cmd, vn, oom=300)
|
||||
|
||||
def conv_opus(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
|
||||
if self.args.no_acode or not self.args.q_opus:
|
||||
raise Exception("disabled in server config")
|
||||
|
||||
self.wait4ram(0.2, tpath)
|
||||
tags, rawtags = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
||||
if "ac" not in tags:
|
||||
raise Exception("not audio")
|
||||
|
||||
try:
|
||||
dur = ret[".dur"][1]
|
||||
dur = tags[".dur"][1]
|
||||
except:
|
||||
dur = 0
|
||||
|
||||
src_opus = abspath.lower().endswith(".opus") or ret["ac"][1] == "opus"
|
||||
src_opus = abspath.lower().endswith(".opus") or tags["ac"][1] == "opus"
|
||||
want_caf = tpath.endswith(".caf")
|
||||
tmp_opus = tpath
|
||||
if want_caf:
|
||||
tmp_opus = tpath + ".opus"
|
||||
try:
|
||||
bos.unlink(tmp_opus)
|
||||
wunlink(self.log, tmp_opus, vn.flags)
|
||||
except:
|
||||
pass
|
||||
|
||||
caf_src = abspath if src_opus else tmp_opus
|
||||
bq = ("%dk" % (self.args.q_opus,)).encode("ascii")
|
||||
|
||||
if not want_caf or not src_opus:
|
||||
# fmt: off
|
||||
@@ -616,14 +771,14 @@ class ThumbSrv(object):
|
||||
b"-v", b"error",
|
||||
b"-hide_banner",
|
||||
b"-i", fsenc(abspath),
|
||||
b"-map_metadata", b"-1",
|
||||
] + self.big_tags(rawtags) + [
|
||||
b"-map", b"0:a:0",
|
||||
b"-c:a", b"libopus",
|
||||
b"-b:a", b"128k",
|
||||
b"-b:a", bq,
|
||||
fsenc(tmp_opus)
|
||||
]
|
||||
# fmt: on
|
||||
self._run_ff(cmd, vn)
|
||||
self._run_ff(cmd, vn, oom=300)
|
||||
|
||||
# iOS fails to play some "insufficiently complex" files
|
||||
# (average file shorter than 8 seconds), so of course we
|
||||
@@ -642,12 +797,12 @@ class ThumbSrv(object):
|
||||
b"-map_metadata", b"-1",
|
||||
b"-ac", b"2",
|
||||
b"-c:a", b"libopus",
|
||||
b"-b:a", b"128k",
|
||||
b"-b:a", bq,
|
||||
b"-f", b"caf",
|
||||
fsenc(tpath)
|
||||
]
|
||||
# fmt: on
|
||||
self._run_ff(cmd, vn)
|
||||
self._run_ff(cmd, vn, oom=300)
|
||||
|
||||
elif want_caf:
|
||||
# simple remux should be safe
|
||||
@@ -665,14 +820,24 @@ class ThumbSrv(object):
|
||||
fsenc(tpath)
|
||||
]
|
||||
# fmt: on
|
||||
self._run_ff(cmd, vn)
|
||||
self._run_ff(cmd, vn, oom=300)
|
||||
|
||||
if tmp_opus != tpath:
|
||||
try:
|
||||
bos.unlink(tmp_opus)
|
||||
wunlink(self.log, tmp_opus, vn.flags)
|
||||
except:
|
||||
pass
|
||||
|
||||
def big_tags(self, raw_tags: dict[str, list[str]]) -> list[bytes]:
|
||||
ret = []
|
||||
for k, vs in raw_tags.items():
|
||||
for v in vs:
|
||||
if len(str(v)) >= 1024:
|
||||
bv = k.encode("utf-8", "replace")
|
||||
ret += [b"-metadata", bv + b"="]
|
||||
break
|
||||
return ret
|
||||
|
||||
def poke(self, tdir: str) -> None:
|
||||
if not self.poke_cd.poke(tdir):
|
||||
return
|
||||
@@ -696,7 +861,10 @@ class ThumbSrv(object):
|
||||
else:
|
||||
self.log("\033[Jcln {} ({})/\033[A".format(histpath, vol))
|
||||
|
||||
ndirs += self.clean(histpath)
|
||||
try:
|
||||
ndirs += self.clean(histpath)
|
||||
except Exception as ex:
|
||||
self.log("\033[Jcln err in %s: %r" % (histpath, ex), 3)
|
||||
|
||||
self.log("\033[Jcln ok; rm {} dirs".format(ndirs))
|
||||
|
||||
@@ -713,7 +881,7 @@ class ThumbSrv(object):
|
||||
|
||||
def _clean(self, cat: str, thumbpath: str) -> int:
|
||||
# self.log("cln {}".format(thumbpath))
|
||||
exts = ["jpg", "webp", "png"] if cat == "th" else ["opus", "caf"]
|
||||
exts = ["jpg", "webp", "png"] if cat == "th" else ["opus", "caf", "mp3"]
|
||||
maxage = getattr(self.args, cat + "_maxage")
|
||||
now = time.time()
|
||||
prev_b64 = None
|
||||
|
||||
@@ -8,8 +8,8 @@ import threading
|
||||
import time
|
||||
from operator import itemgetter
|
||||
|
||||
from .__init__ import ANYWIN, TYPE_CHECKING, unicode
|
||||
from .authsrv import LEELOO_DALLAS
|
||||
from .__init__ import ANYWIN, PY2, TYPE_CHECKING, unicode
|
||||
from .authsrv import LEELOO_DALLAS, VFS
|
||||
from .bos import bos
|
||||
from .up2k import up2k_wark_from_hashlist
|
||||
from .util import (
|
||||
@@ -38,6 +38,9 @@ if True: # pylint: disable=using-constant-test
|
||||
if TYPE_CHECKING:
|
||||
from .httpsrv import HttpSrv
|
||||
|
||||
if PY2:
|
||||
range = xrange # type: ignore
|
||||
|
||||
|
||||
class U2idx(object):
|
||||
def __init__(self, hsrv: "HttpSrv") -> None:
|
||||
@@ -56,14 +59,27 @@ class U2idx(object):
|
||||
self.mem_cur = sqlite3.connect(":memory:", check_same_thread=False).cursor()
|
||||
self.mem_cur.execute(r"create table a (b text)")
|
||||
|
||||
self.sh_cur: Optional["sqlite3.Cursor"] = None
|
||||
|
||||
self.p_end = 0.0
|
||||
self.p_dur = 0.0
|
||||
|
||||
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||
self.log_func("u2idx", msg, c)
|
||||
|
||||
def shutdown(self) -> None:
|
||||
for cur in self.cur.values():
|
||||
db = cur.connection
|
||||
try:
|
||||
db.interrupt()
|
||||
except:
|
||||
pass
|
||||
|
||||
cur.close()
|
||||
db.close()
|
||||
|
||||
def fsearch(
|
||||
self, vols: list[tuple[str, str, dict[str, Any]]], body: dict[str, Any]
|
||||
self, uname: str, vols: list[VFS], body: dict[str, Any]
|
||||
) -> list[dict[str, Any]]:
|
||||
"""search by up2k hashlist"""
|
||||
if not HAVE_SQLITE3:
|
||||
@@ -77,18 +93,36 @@ class U2idx(object):
|
||||
uv: list[Union[str, int]] = [wark[:16], wark]
|
||||
|
||||
try:
|
||||
return self.run_query(vols, uq, uv, True, False, 99999)[0]
|
||||
return self.run_query(uname, vols, uq, uv, False, 99999)[0]
|
||||
except:
|
||||
raise Pebkac(500, min_ex())
|
||||
|
||||
def get_cur(self, ptop: str) -> Optional["sqlite3.Cursor"]:
|
||||
if not HAVE_SQLITE3:
|
||||
def get_shr(self) -> Optional["sqlite3.Cursor"]:
|
||||
if self.sh_cur:
|
||||
return self.sh_cur
|
||||
|
||||
if not HAVE_SQLITE3 or not self.args.shr:
|
||||
return None
|
||||
|
||||
cur = self.cur.get(ptop)
|
||||
assert sqlite3 # type: ignore
|
||||
|
||||
db = sqlite3.connect(self.args.shr_db, timeout=2, check_same_thread=False)
|
||||
cur = db.cursor()
|
||||
cur.execute('pragma table_info("sh")').fetchall()
|
||||
self.sh_cur = cur
|
||||
return cur
|
||||
|
||||
def get_cur(self, vn: VFS) -> Optional["sqlite3.Cursor"]:
|
||||
cur = self.cur.get(vn.realpath)
|
||||
if cur:
|
||||
return cur
|
||||
|
||||
if not HAVE_SQLITE3 or "e2d" not in vn.flags:
|
||||
return None
|
||||
|
||||
assert sqlite3 # type: ignore
|
||||
|
||||
ptop = vn.realpath
|
||||
histpath = self.asrv.vfs.histtab.get(ptop)
|
||||
if not histpath:
|
||||
self.log("no histpath for [{}]".format(ptop))
|
||||
@@ -103,7 +137,7 @@ class U2idx(object):
|
||||
uri = ""
|
||||
try:
|
||||
uri = "{}?mode=ro&nolock=1".format(Path(db_path).as_uri())
|
||||
db = sqlite3.connect(uri, 2, uri=True, check_same_thread=False)
|
||||
db = sqlite3.connect(uri, timeout=2, uri=True, check_same_thread=False)
|
||||
cur = db.cursor()
|
||||
cur.execute('pragma table_info("up")').fetchone()
|
||||
self.log("ro: {}".format(db_path))
|
||||
@@ -115,14 +149,14 @@ class U2idx(object):
|
||||
if not cur:
|
||||
# on windows, this steals the write-lock from up2k.deferred_init --
|
||||
# seen on win 10.0.17763.2686, py 3.10.4, sqlite 3.37.2
|
||||
cur = sqlite3.connect(db_path, 2, check_same_thread=False).cursor()
|
||||
cur = sqlite3.connect(db_path, timeout=2, check_same_thread=False).cursor()
|
||||
self.log("opened {}".format(db_path))
|
||||
|
||||
self.cur[ptop] = cur
|
||||
return cur
|
||||
|
||||
def search(
|
||||
self, vols: list[tuple[str, str, dict[str, Any]]], uq: str, lim: int
|
||||
self, uname: str, vols: list[VFS], uq: str, lim: int
|
||||
) -> tuple[list[dict[str, Any]], list[str], bool]:
|
||||
"""search by query params"""
|
||||
if not HAVE_SQLITE3:
|
||||
@@ -131,7 +165,6 @@ class U2idx(object):
|
||||
q = ""
|
||||
v: Union[str, int] = ""
|
||||
va: list[Union[str, int]] = []
|
||||
have_up = False # query has up.* operands
|
||||
have_mt = False
|
||||
is_key = True
|
||||
is_size = False
|
||||
@@ -176,26 +209,21 @@ class U2idx(object):
|
||||
if v == "size":
|
||||
v = "up.sz"
|
||||
is_size = True
|
||||
have_up = True
|
||||
|
||||
elif v == "date":
|
||||
v = "up.mt"
|
||||
is_date = True
|
||||
have_up = True
|
||||
|
||||
elif v == "up_at":
|
||||
v = "up.at"
|
||||
is_date = True
|
||||
have_up = True
|
||||
|
||||
elif v == "path":
|
||||
v = "trim(?||up.rd,'/')"
|
||||
va.append("\nrd")
|
||||
have_up = True
|
||||
|
||||
elif v == "name":
|
||||
v = "up.fn"
|
||||
have_up = True
|
||||
|
||||
elif v == "tags" or ptn_mt.match(v):
|
||||
have_mt = True
|
||||
@@ -271,22 +299,22 @@ class U2idx(object):
|
||||
q += " lower({}) {} ? ) ".format(field, oper)
|
||||
|
||||
try:
|
||||
return self.run_query(vols, q, va, have_up, have_mt, lim)
|
||||
return self.run_query(uname, vols, q, va, have_mt, lim)
|
||||
except Exception as ex:
|
||||
raise Pebkac(500, repr(ex))
|
||||
|
||||
def run_query(
|
||||
self,
|
||||
vols: list[tuple[str, str, dict[str, Any]]],
|
||||
uname: str,
|
||||
vols: list[VFS],
|
||||
uq: str,
|
||||
uv: list[Union[str, int]],
|
||||
have_up: bool,
|
||||
have_mt: bool,
|
||||
lim: int,
|
||||
) -> tuple[list[dict[str, Any]], list[str], bool]:
|
||||
if self.args.srch_dbg:
|
||||
t = "searching across all %s volumes in which the user has 'r' (full read access):\n %s"
|
||||
zs = "\n ".join(["/%s = %s" % (x[0], x[1]) for x in vols])
|
||||
zs = "\n ".join(["/%s = %s" % (x.vpath, x.realpath) for x in vols])
|
||||
self.log(t % (len(vols), zs), 5)
|
||||
|
||||
done_flag: list[bool] = []
|
||||
@@ -315,11 +343,15 @@ class U2idx(object):
|
||||
clamped = False
|
||||
|
||||
taglist = {}
|
||||
for (vtop, ptop, flags) in vols:
|
||||
for vol in vols:
|
||||
if lim < 0:
|
||||
break
|
||||
|
||||
cur = self.get_cur(ptop)
|
||||
vtop = vol.vpath
|
||||
ptop = vol.realpath
|
||||
flags = vol.flags
|
||||
|
||||
cur = self.get_cur(vol)
|
||||
if not cur:
|
||||
continue
|
||||
|
||||
@@ -343,7 +375,7 @@ class U2idx(object):
|
||||
|
||||
sret = []
|
||||
fk = flags.get("fk")
|
||||
dots = flags.get("dotsrch")
|
||||
dots = flags.get("dotsrch") and uname in vol.axs.udot
|
||||
fk_alg = 2 if "fka" in flags else 1
|
||||
c = cur.execute(uq, tuple(vuv))
|
||||
for hit in c:
|
||||
|
||||
1643
copyparty/up2k.py
1643
copyparty/up2k.py
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -17,8 +17,10 @@ window.baguetteBox = (function () {
|
||||
titleTag: false,
|
||||
async: false,
|
||||
preload: 2,
|
||||
refocus: true,
|
||||
afterShow: null,
|
||||
afterHide: null,
|
||||
duringHide: null,
|
||||
onChange: null,
|
||||
},
|
||||
overlay, slider, btnPrev, btnNext, btnHelp, btnAnim, btnRotL, btnRotR, btnSel, btnFull, btnVmode, btnClose,
|
||||
@@ -27,6 +29,8 @@ window.baguetteBox = (function () {
|
||||
isOverlayVisible = false,
|
||||
touch = {}, // start-pos
|
||||
touchFlag = false, // busy
|
||||
scrollCSS = ['', ''],
|
||||
scrollTimer = 0,
|
||||
re_i = /^[^?]+\.(a?png|avif|bmp|gif|heif|jpe?g|jfif|svg|webp)(\?|$)/i,
|
||||
re_v = /^[^?]+\.(webm|mkv|mp4)(\?|$)/i,
|
||||
anims = ['slideIn', 'fadeIn', 'none'],
|
||||
@@ -89,6 +93,30 @@ window.baguetteBox = (function () {
|
||||
touchendHandler();
|
||||
};
|
||||
|
||||
var overlayWheelHandler = function (e) {
|
||||
if (!options.noScrollbars || anymod(e))
|
||||
return;
|
||||
|
||||
ev(e);
|
||||
|
||||
var x = e.deltaX,
|
||||
y = e.deltaY,
|
||||
d = Math.abs(x) > Math.abs(y) ? x : y;
|
||||
|
||||
if (e.deltaMode)
|
||||
d *= 10;
|
||||
|
||||
if (Date.now() - scrollTimer < (Math.abs(d) > 20 ? 100 : 300))
|
||||
return;
|
||||
|
||||
scrollTimer = Date.now();
|
||||
|
||||
if (d > 0)
|
||||
showNextImage();
|
||||
else
|
||||
showPreviousImage();
|
||||
};
|
||||
|
||||
var trapFocusInsideOverlay = function (e) {
|
||||
if (overlay.style.display === 'block' && (overlay.contains && !overlay.contains(e.target))) {
|
||||
e.stopPropagation();
|
||||
@@ -144,7 +172,7 @@ window.baguetteBox = (function () {
|
||||
selectorData.galleries.push(gallery);
|
||||
});
|
||||
|
||||
return selectorData.galleries;
|
||||
return [selectorData.galleries, options];
|
||||
}
|
||||
|
||||
function clearCachedData() {
|
||||
@@ -255,19 +283,19 @@ window.baguetteBox = (function () {
|
||||
if (anymod(e, true))
|
||||
return;
|
||||
|
||||
var k = e.code + '', v = vid(), pos = -1;
|
||||
var k = (e.code || e.key) + '', v = vid(), pos = -1;
|
||||
|
||||
if (k == "BracketLeft")
|
||||
setloop(1);
|
||||
else if (k == "BracketRight")
|
||||
setloop(2);
|
||||
else if (e.shiftKey && k != 'KeyR')
|
||||
else if (e.shiftKey && k != "KeyR" && k != "R")
|
||||
return;
|
||||
else if (k == "ArrowLeft" || k == "KeyJ")
|
||||
else if (k == "ArrowLeft" || k == "KeyJ" || k == "Left" || k == "j")
|
||||
showPreviousImage();
|
||||
else if (k == "ArrowRight" || k == "KeyL")
|
||||
else if (k == "ArrowRight" || k == "KeyL" || k == "Right" || k == "l")
|
||||
showNextImage();
|
||||
else if (k == "Escape")
|
||||
else if (k == "Escape" || k == "Esc")
|
||||
hideOverlay();
|
||||
else if (k == "Home")
|
||||
showFirstImage(e);
|
||||
@@ -295,9 +323,9 @@ window.baguetteBox = (function () {
|
||||
}
|
||||
else if (k == "KeyF")
|
||||
tglfull();
|
||||
else if (k == "KeyS")
|
||||
else if (k == "KeyS" || k == "s")
|
||||
tglsel();
|
||||
else if (k == "KeyR")
|
||||
else if (k == "KeyR" || k == "r" || k == "R")
|
||||
rotn(e.shiftKey ? -1 : 1);
|
||||
else if (k == "KeyY")
|
||||
dlpic();
|
||||
@@ -392,8 +420,7 @@ window.baguetteBox = (function () {
|
||||
}
|
||||
|
||||
function dlpic() {
|
||||
var url = findfile()[3].href;
|
||||
url += (url.indexOf('?') < 0 ? '?' : '&') + 'cache';
|
||||
var url = addq(findfile()[3].href, 'cache');
|
||||
dl_file(url);
|
||||
}
|
||||
|
||||
@@ -450,6 +477,7 @@ window.baguetteBox = (function () {
|
||||
bind(document, 'keyup', keyUpHandler);
|
||||
bind(document, 'fullscreenchange', onFSC);
|
||||
bind(overlay, 'click', overlayClickHandler);
|
||||
bind(overlay, 'wheel', overlayWheelHandler);
|
||||
bind(btnPrev, 'click', showPreviousImage);
|
||||
bind(btnNext, 'click', showNextImage);
|
||||
bind(btnClose, 'click', hideOverlay);
|
||||
@@ -472,6 +500,7 @@ window.baguetteBox = (function () {
|
||||
unbind(document, 'keyup', keyUpHandler);
|
||||
unbind(document, 'fullscreenchange', onFSC);
|
||||
unbind(overlay, 'click', overlayClickHandler);
|
||||
unbind(overlay, 'wheel', overlayWheelHandler);
|
||||
unbind(btnPrev, 'click', showPreviousImage);
|
||||
unbind(btnNext, 'click', showNextImage);
|
||||
unbind(btnClose, 'click', hideOverlay);
|
||||
@@ -539,6 +568,12 @@ window.baguetteBox = (function () {
|
||||
|
||||
function showOverlay(chosenImageIndex) {
|
||||
if (options.noScrollbars) {
|
||||
var a = document.documentElement.style.overflowY,
|
||||
b = document.body.style.overflowY;
|
||||
|
||||
if (a != 'hidden' || b != 'scroll')
|
||||
scrollCSS = [a, b];
|
||||
|
||||
document.documentElement.style.overflowY = 'hidden';
|
||||
document.body.style.overflowY = 'scroll';
|
||||
}
|
||||
@@ -582,24 +617,30 @@ window.baguetteBox = (function () {
|
||||
isOverlayVisible = true;
|
||||
}
|
||||
|
||||
function hideOverlay(e) {
|
||||
function hideOverlay(e, dtor) {
|
||||
ev(e);
|
||||
playvid(false);
|
||||
removeFromCache('#files');
|
||||
if (options.noScrollbars) {
|
||||
document.documentElement.style.overflowY = 'auto';
|
||||
document.body.style.overflowY = 'auto';
|
||||
document.documentElement.style.overflowY = scrollCSS[0];
|
||||
document.body.style.overflowY = scrollCSS[1];
|
||||
}
|
||||
if (overlay.style.display === 'none')
|
||||
|
||||
try {
|
||||
if (document.fullscreenElement)
|
||||
document.exitFullscreen();
|
||||
}
|
||||
catch (ex) { }
|
||||
isFullscreen = false;
|
||||
|
||||
if (dtor || overlay.style.display === 'none')
|
||||
return;
|
||||
|
||||
if (options.duringHide)
|
||||
options.duringHide();
|
||||
|
||||
sethash('');
|
||||
unbindEvents();
|
||||
try {
|
||||
document.exitFullscreen();
|
||||
isFullscreen = false;
|
||||
}
|
||||
catch (ex) { }
|
||||
|
||||
// Fade out and hide the overlay
|
||||
overlay.className = '';
|
||||
@@ -613,9 +654,45 @@ window.baguetteBox = (function () {
|
||||
if (options.afterHide)
|
||||
options.afterHide();
|
||||
|
||||
documentLastFocus && documentLastFocus.focus();
|
||||
options.refocus && documentLastFocus && documentLastFocus.focus();
|
||||
isOverlayVisible = false;
|
||||
}, 500);
|
||||
unvid();
|
||||
unfig();
|
||||
}, 250);
|
||||
}
|
||||
|
||||
function unvid(keep) {
|
||||
var vids = QSA('#bbox-overlay video');
|
||||
for (var a = vids.length - 1; a >= 0; a--) {
|
||||
var v = vids[a];
|
||||
if (v == keep)
|
||||
continue;
|
||||
|
||||
v.src = '';
|
||||
v.load();
|
||||
|
||||
var p = v.parentNode;
|
||||
p.removeChild(v);
|
||||
p.parentNode.removeChild(p);
|
||||
}
|
||||
}
|
||||
|
||||
function unfig(keep) {
|
||||
var figs = QSA('#bbox-overlay figure'),
|
||||
npre = options.preload || 0,
|
||||
k = [];
|
||||
|
||||
if (keep === undefined)
|
||||
keep = -9;
|
||||
|
||||
for (var a = keep - npre; a <= keep + npre; a++)
|
||||
k.push('bbox-figure-' + a);
|
||||
|
||||
for (var a = figs.length - 1; a >= 0; a--) {
|
||||
var f = figs[a];
|
||||
if (!has(k, f.getAttribute('id')))
|
||||
f.parentNode.removeChild(f);
|
||||
}
|
||||
}
|
||||
|
||||
function loadImage(index, callback) {
|
||||
@@ -641,7 +718,7 @@ window.baguetteBox = (function () {
|
||||
options.captions.call(currentGallery, imageElement) :
|
||||
imageElement.getAttribute('data-caption') || imageElement.title;
|
||||
|
||||
imageSrc += imageSrc.indexOf('?') < 0 ? '?cache' : '&cache';
|
||||
imageSrc = addq(imageSrc, 'cache');
|
||||
|
||||
if (is_vid && index != currentIndex)
|
||||
return; // no preload
|
||||
@@ -670,8 +747,11 @@ window.baguetteBox = (function () {
|
||||
});
|
||||
image.setAttribute('src', imageSrc);
|
||||
if (is_vid) {
|
||||
image.volume = clamp(fcfg_get('vol', dvol / 100), 0, 1);
|
||||
image.setAttribute('controls', 'controls');
|
||||
image.onended = vidEnd;
|
||||
image.onplay = function () { show_buttons(1); };
|
||||
image.onpause = function () { show_buttons(); };
|
||||
}
|
||||
image.alt = thumbnailElement ? thumbnailElement.alt || '' : '';
|
||||
if (options.titleTag && imageCaption)
|
||||
@@ -679,6 +759,9 @@ window.baguetteBox = (function () {
|
||||
|
||||
figure.appendChild(image);
|
||||
|
||||
if (is_vid && window.afilt)
|
||||
afilt.apply(undefined, image);
|
||||
|
||||
if (options.async && callback)
|
||||
callback();
|
||||
}
|
||||
@@ -708,6 +791,7 @@ window.baguetteBox = (function () {
|
||||
}
|
||||
|
||||
function show(index, gallery) {
|
||||
gallery = gallery || currentGallery;
|
||||
if (!isOverlayVisible && index >= 0 && index < gallery.length) {
|
||||
prepareOverlay(gallery, options);
|
||||
showOverlay(index);
|
||||
@@ -720,12 +804,10 @@ window.baguetteBox = (function () {
|
||||
if (index >= imagesElements.length)
|
||||
return bounceAnimation('right');
|
||||
|
||||
var v = vid();
|
||||
if (v) {
|
||||
v.src = '';
|
||||
v.load();
|
||||
v.parentNode.removeChild(v);
|
||||
try {
|
||||
vid().pause();
|
||||
}
|
||||
catch (ex) { }
|
||||
|
||||
currentIndex = index;
|
||||
loadImage(currentIndex, function () {
|
||||
@@ -734,6 +816,15 @@ window.baguetteBox = (function () {
|
||||
});
|
||||
updateOffset();
|
||||
|
||||
if (options.animation == 'none')
|
||||
unvid(vid());
|
||||
else
|
||||
setTimeout(function () {
|
||||
unvid(vid());
|
||||
}, 100);
|
||||
|
||||
unfig(index);
|
||||
|
||||
if (options.onChange)
|
||||
options.onChange(currentIndex, imagesElements.length);
|
||||
|
||||
@@ -906,6 +997,12 @@ window.baguetteBox = (function () {
|
||||
}
|
||||
}
|
||||
|
||||
function show_buttons(v) {
|
||||
clmod(ebi('bbox-btns'), 'off', v);
|
||||
clmod(btnPrev, 'off', v);
|
||||
clmod(btnNext, 'off', v);
|
||||
}
|
||||
|
||||
function bounceAnimation(direction) {
|
||||
slider.className = options.animation == 'slideIn' ? 'bounce-from-' + direction : 'eog';
|
||||
setTimeout(function () {
|
||||
@@ -969,9 +1066,7 @@ window.baguetteBox = (function () {
|
||||
if (fx > 0.7)
|
||||
return showNextImage();
|
||||
|
||||
clmod(ebi('bbox-btns'), 'off', 't');
|
||||
clmod(btnPrev, 'off', 't');
|
||||
clmod(btnNext, 'off', 't');
|
||||
show_buttons('t');
|
||||
|
||||
if (Date.now() - ctime <= 500 && !IPHONE)
|
||||
tglfull();
|
||||
@@ -1013,6 +1108,7 @@ window.baguetteBox = (function () {
|
||||
}
|
||||
|
||||
function destroyPlugin() {
|
||||
hideOverlay(undefined, true);
|
||||
unbindEvents();
|
||||
clearCachedData();
|
||||
document.getElementsByTagName('body')[0].removeChild(ebi('bbox-overlay'));
|
||||
|
||||
@@ -10,7 +10,6 @@
|
||||
--fg2-max: #fff;
|
||||
--fg-weak: #bbb;
|
||||
|
||||
--bg-u7: #555;
|
||||
--bg-u6: #4c4c4c;
|
||||
--bg-u5: #444;
|
||||
--bg-u4: #383838;
|
||||
@@ -28,6 +27,8 @@
|
||||
--row-alt: #282828;
|
||||
|
||||
--scroll: #eb0;
|
||||
--sel-fg: var(--bg-d1);
|
||||
--sel-bg: var(--fg);
|
||||
|
||||
--a: #fc5;
|
||||
--a-b: #c90;
|
||||
@@ -41,8 +42,14 @@
|
||||
--btn-h-bg: #805;
|
||||
--btn-1-fg: #400;
|
||||
--btn-1-bg: var(--a);
|
||||
--btn-h-bs: var(--btn-bs);
|
||||
--btn-h-bb: var(--btn-bb);
|
||||
--btn-1-bs: var(--btn-bs);
|
||||
--btn-1-bb: var(--btn-bb);
|
||||
--btn-1h-fg: var(--btn-1-fg);
|
||||
--btn-1h-bg: #fe8;
|
||||
--btn-1h-bs: var(--btn-1-bs);
|
||||
--btn-1h-bb: var(--btn-1-bb);
|
||||
--chk-fg: var(--tab-alt);
|
||||
--txt-sh: var(--bg-d2);
|
||||
--txt-bg: var(--btn-bg);
|
||||
@@ -57,7 +64,7 @@
|
||||
--u2-tab-bg: linear-gradient(to bottom, var(--bg), var(--bg-u1));
|
||||
--u2-tab-b1: rgba(128,128,128,0.8);
|
||||
--u2-tab-1-fg: #fd7;
|
||||
--u2-tab-1-bg: linear-gradient(to bottom, var(#353), var(--bg) 80%);
|
||||
--u2-tab-1-bg: linear-gradient(to bottom, #353, var(--bg) 80%);
|
||||
--u2-tab-1-b1: #7c5;
|
||||
--u2-tab-1-b2: #583;
|
||||
--u2-tab-1-sh: #280;
|
||||
@@ -210,22 +217,19 @@ html.y {
|
||||
html.a {
|
||||
--op-aa-sh: 0 0 .2em var(--bg-d3) inset;
|
||||
|
||||
--u2-o-bg: #603;
|
||||
--u2-o-b1: #a16;
|
||||
--u2-o-sh: #a00;
|
||||
--u2-o-h-bg: var(--u2-o-bg);
|
||||
--u2-o-h-b1: #fb0;
|
||||
--u2-o-h-sh: #fb0;
|
||||
--u2-o-1-bg: #6a1;
|
||||
--u2-o-1-b1: #efa;
|
||||
--u2-o-1-sh: #0c0;
|
||||
--u2-o-1h-bg: var(--u2-o-1-bg);
|
||||
--btn-bs: 0 0 .2em var(--bg-d3);
|
||||
}
|
||||
html.az {
|
||||
--btn-1-bs: 0 0 .1em var(--fg) inset;
|
||||
}
|
||||
html.ay {
|
||||
--op-aa-sh: 0 .1em .2em #ccc;
|
||||
--op-aa-bg: var(--bg-max);
|
||||
}
|
||||
html.b {
|
||||
--btn-bs: 0 .05em 0 var(--bg-d3) inset;
|
||||
--btn-1-bs: 0 .05em 0 var(--btn-1h-bg) inset;
|
||||
|
||||
--tree-bg: var(--bg);
|
||||
|
||||
--g-bg: var(--bg);
|
||||
@@ -242,17 +246,13 @@ html.b {
|
||||
--u2-b1-bg: rgba(128,128,128,0.15);
|
||||
--u2-b2-bg: var(--u2-b1-bg);
|
||||
|
||||
--u2-o-bg: var(--btn-bg);
|
||||
--u2-o-h-bg: var(--btn-h-bg);
|
||||
--u2-o-1-bg: var(--a);
|
||||
--u2-o-1h-bg: var(--a-hil);
|
||||
|
||||
--f-sh1: 0.1;
|
||||
--mp-b-bg: transparent;
|
||||
}
|
||||
html.bz {
|
||||
--fg: #cce;
|
||||
--fg-weak: #bbd;
|
||||
|
||||
--bg-u5: #3b3f58;
|
||||
--bg-u4: #1e2130;
|
||||
--bg-u3: #1e2130;
|
||||
@@ -264,11 +264,14 @@ html.bz {
|
||||
|
||||
--row-alt: #181a27;
|
||||
|
||||
--a-b: #fb4;
|
||||
|
||||
--btn-bg: #202231;
|
||||
--btn-h-bg: #2d2f45;
|
||||
--btn-1-bg: #ba2959;
|
||||
--btn-1-fg: #fff;
|
||||
--btn-1-bg: #eb6;
|
||||
--btn-1-fg: #000;
|
||||
--btn-1h-fg: #000;
|
||||
--btn-1h-bg: #ff9;
|
||||
--txt-sh: a;
|
||||
|
||||
--u2-tab-b1: var(--bg-u5);
|
||||
@@ -303,6 +306,7 @@ html.by {
|
||||
}
|
||||
html.c {
|
||||
font-weight: bold;
|
||||
|
||||
--fg: #fff;
|
||||
--fg-weak: #cef;
|
||||
--bg-u5: #409;
|
||||
@@ -323,15 +327,25 @@ html.c {
|
||||
--chk-fg: #d90;
|
||||
|
||||
--op-aa-bg: #f9dd22;
|
||||
--u2-o-1-bg: #4cf;
|
||||
|
||||
--srv-1: #ea0;
|
||||
--mp-b-bg: transparent;
|
||||
}
|
||||
html.cz {
|
||||
--bgg: var(--bg-u2);
|
||||
|
||||
--sel-bg: var(--bg-u5);
|
||||
--sel-fg: var(--fg);
|
||||
|
||||
--btn-bb: .2em solid #709;
|
||||
--btn-bs: 0 .1em .6em rgba(255,0,185,0.5);
|
||||
--btn-1-bb: .2em solid #e90;
|
||||
--btn-1-bs: 0 .1em .8em rgba(255,205,0,0.9);
|
||||
|
||||
--srv-3: #fff;
|
||||
|
||||
--u2-tab-b1: var(--bg-d3);
|
||||
--u2-tab-1-bg: a;
|
||||
}
|
||||
html.cy {
|
||||
--fg: #fff;
|
||||
@@ -343,6 +357,8 @@ html.cy {
|
||||
--bg-d3: #f77;
|
||||
--bg-d2: #ff0;
|
||||
|
||||
--sel-bg: #f77;
|
||||
|
||||
--a: #fff;
|
||||
--a-hil: #fff;
|
||||
--a-h-bg: #000;
|
||||
@@ -356,16 +372,20 @@ html.cy {
|
||||
--btn-h-fg: #fff;
|
||||
--btn-1-bg: #ff0;
|
||||
--btn-1-fg: #000;
|
||||
--btn-bs: 0 .25em 0 #f00;
|
||||
--chk-fg: #fd0;
|
||||
|
||||
--txt-bg: #000;
|
||||
--srv-1: #f00;
|
||||
--srv-3: #fff;
|
||||
--op-aa-bg: #fff;
|
||||
|
||||
--u2-b1-bg: #f00;
|
||||
--u2-b2-bg: #f00;
|
||||
--u2-o-bg: #ff0;
|
||||
--u2-o-1-bg: #f00;
|
||||
|
||||
--g-sel-fg: #fff;
|
||||
--g-sel-bg: #aaa;
|
||||
--g-fsel-bg: #aaa;
|
||||
}
|
||||
html.dz {
|
||||
--fg: #4d4;
|
||||
@@ -373,7 +393,6 @@ html.dz {
|
||||
--fg2-max: #fff;
|
||||
--fg-weak: #2a2;
|
||||
|
||||
--bg-u7: #020;
|
||||
--bg-u6: #020;
|
||||
--bg-u5: #050;
|
||||
--bg-u4: #020;
|
||||
@@ -406,6 +425,9 @@ html.dz {
|
||||
--btn-1-bg: #4f4;
|
||||
--btn-1h-fg: var(--btn-1-fg);
|
||||
--btn-1h-bg: #3f3;
|
||||
--btn-bs: 0 0 0 .1em #080 inset;
|
||||
--btn-1-bs: a;
|
||||
|
||||
--chk-fg: var(--tab-alt);
|
||||
--txt-sh: var(--bg-d2);
|
||||
--txt-bg: var(--btn-bg);
|
||||
@@ -420,19 +442,13 @@ html.dz {
|
||||
--u2-tab-bg: linear-gradient(to bottom, var(--bg), var(--bg-u1));
|
||||
--u2-tab-b1: var(--fg-weak);
|
||||
--u2-tab-1-fg: #fff;
|
||||
--u2-tab-1-bg: linear-gradient(to bottom, var(#353), var(--bg) 80%);
|
||||
--u2-tab-1-bg: linear-gradient(to bottom, #151, var(--bg) 80%);
|
||||
--u2-tab-1-b1: #7c5;
|
||||
--u2-tab-1-b2: #583;
|
||||
--u2-tab-1-sh: #280;
|
||||
--u2-b-fg: #fff;
|
||||
--u2-b1-bg: #3a3;
|
||||
--u2-b2-bg: #3a3;
|
||||
--u2-o-bg: var(--btn-bg);
|
||||
--u2-o-b1: var(--bg-u5);
|
||||
--u2-o-h-bg: var(--fg-weak);
|
||||
--u2-o-1-bg: var(--fg-weak);
|
||||
--u2-o-1-b1: var(--a);
|
||||
--u2-o-1h-bg: var(--a);
|
||||
--u2-inf-bg: #07a;
|
||||
--u2-inf-b1: #0be;
|
||||
--u2-ok-bg: #380;
|
||||
@@ -494,6 +510,7 @@ html.dz {
|
||||
|
||||
text-shadow: none;
|
||||
font-family: 'scp', monospace, monospace;
|
||||
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||
}
|
||||
html.dy {
|
||||
--fg: #000;
|
||||
@@ -543,10 +560,6 @@ html.dy {
|
||||
--u2-tab-1-bg: a;
|
||||
--u2-b1-bg: #000;
|
||||
--u2-b2-bg: #000;
|
||||
--u2-o-h-bg: #999;
|
||||
--u2-o-1h-bg: #999;
|
||||
--u2-o-bg: #eee;
|
||||
--u2-o-1-bg: #000;
|
||||
|
||||
--ud-b1: a;
|
||||
|
||||
@@ -587,11 +600,11 @@ html.dy {
|
||||
line-height: 1.2em;
|
||||
}
|
||||
::selection {
|
||||
color: var(--bg-d1);
|
||||
background: var(--fg);
|
||||
color: var(--sel-fg);
|
||||
background: var(--sel-bg);
|
||||
text-shadow: none;
|
||||
}
|
||||
html,body,tr,th,td,#files,a {
|
||||
html,body,tr,th,td,#files,a,#blogout {
|
||||
color: inherit;
|
||||
background: none;
|
||||
font-weight: inherit;
|
||||
@@ -603,6 +616,7 @@ html {
|
||||
color: var(--fg);
|
||||
background: var(--bgg);
|
||||
font-family: sans-serif;
|
||||
font-family: var(--font-main), sans-serif;
|
||||
text-shadow: 1px 1px 0px var(--bg-max);
|
||||
}
|
||||
html, body {
|
||||
@@ -611,12 +625,14 @@ html, body {
|
||||
}
|
||||
pre, code, tt, #doc, #doc>code {
|
||||
font-family: 'scp', monospace, monospace;
|
||||
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||
}
|
||||
.ayjump {
|
||||
position: fixed;
|
||||
overflow: hidden;
|
||||
width: 0;
|
||||
height: 0;
|
||||
color: var(--bg);
|
||||
}
|
||||
html .ayjump:focus {
|
||||
z-index: 80386;
|
||||
@@ -671,11 +687,15 @@ html.y #path {
|
||||
#files tbody div a {
|
||||
color: var(--tab-alt);
|
||||
}
|
||||
a, #files tbody div a:last-child {
|
||||
a, #blogout, #files tbody div a:last-child {
|
||||
color: var(--a);
|
||||
padding: .2em;
|
||||
text-decoration: none;
|
||||
}
|
||||
#blogout {
|
||||
margin: -.2em;
|
||||
}
|
||||
#blogout:hover,
|
||||
a:hover {
|
||||
color: var(--a-hil);
|
||||
background: var(--a-h-bg);
|
||||
@@ -696,12 +716,12 @@ a:hover {
|
||||
.s0:after,
|
||||
.s1:after {
|
||||
content: '⌄';
|
||||
margin-left: -.1em;
|
||||
margin-left: -.15em;
|
||||
}
|
||||
.s0r:after,
|
||||
.s1r:after {
|
||||
content: '⌃';
|
||||
margin-left: -.1em;
|
||||
margin-left: -.15em;
|
||||
}
|
||||
.s0:after,
|
||||
.s0r:after {
|
||||
@@ -712,7 +732,7 @@ a:hover {
|
||||
color: var(--sort-2);
|
||||
}
|
||||
#files thead th:after {
|
||||
margin-right: -.7em;
|
||||
margin-right: -.5em;
|
||||
}
|
||||
#files tbody tr:hover td,
|
||||
#files tbody tr:hover td+td {
|
||||
@@ -741,6 +761,15 @@ html #files.hhpick thead th {
|
||||
word-wrap: break-word;
|
||||
overflow: hidden;
|
||||
}
|
||||
#files tr.fade a {
|
||||
color: #999;
|
||||
color: rgba(255, 255, 255, 0.4);
|
||||
font-style: italic;
|
||||
}
|
||||
html.y #files tr.fade a {
|
||||
color: #999;
|
||||
color: rgba(0, 0, 0, 0.4);
|
||||
}
|
||||
#files tr:nth-child(2n) td {
|
||||
background: var(--row-alt);
|
||||
}
|
||||
@@ -759,6 +788,7 @@ html #files.hhpick thead th {
|
||||
}
|
||||
#files tbody td:nth-child(3) {
|
||||
font-family: 'scp', monospace, monospace;
|
||||
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||
text-align: right;
|
||||
padding-right: 1em;
|
||||
white-space: nowrap;
|
||||
@@ -818,6 +848,11 @@ html.y #path a:hover {
|
||||
.logue:empty {
|
||||
display: none;
|
||||
}
|
||||
.logue.raw {
|
||||
white-space: pre;
|
||||
font-family: 'scp', 'consolas', monospace;
|
||||
font-family: var(--font-mono), 'scp', 'consolas', monospace;
|
||||
}
|
||||
#doc>iframe,
|
||||
.logue>iframe {
|
||||
background: var(--bgg);
|
||||
@@ -904,6 +939,9 @@ html.y #path a:hover {
|
||||
color: var(--srv-3);
|
||||
border-bottom: 1px solid var(--srv-3b);
|
||||
}
|
||||
#flogout {
|
||||
display: inline;
|
||||
}
|
||||
#goh+span {
|
||||
color: var(--bg-u5);
|
||||
padding-left: .5em;
|
||||
@@ -938,6 +976,8 @@ html.y #path a:hover {
|
||||
#files tbody tr.play a:hover {
|
||||
color: var(--btn-1h-fg);
|
||||
background: var(--btn-1h-bg);
|
||||
box-shadow: var(--btn-1h-bs);
|
||||
border-bottom: var(--btn-1h-bb);
|
||||
}
|
||||
#ggrid {
|
||||
margin: -.2em -.5em;
|
||||
@@ -946,6 +986,7 @@ html.y #path a:hover {
|
||||
overflow: hidden;
|
||||
display: block;
|
||||
display: -webkit-box;
|
||||
line-clamp: var(--grid-ln);
|
||||
-webkit-line-clamp: var(--grid-ln);
|
||||
-webkit-box-orient: vertical;
|
||||
padding-top: .3em;
|
||||
@@ -981,6 +1022,10 @@ html.y #path a:hover {
|
||||
margin: 0 auto;
|
||||
display: block;
|
||||
}
|
||||
#ggrid.nocrop>a img {
|
||||
max-height: 20em;
|
||||
max-height: calc(var(--grid-sz)*2);
|
||||
}
|
||||
#ggrid>a.dir:before {
|
||||
content: '📂';
|
||||
}
|
||||
@@ -988,9 +1033,6 @@ html.y #path a:hover {
|
||||
color: var(--g-dfg);
|
||||
}
|
||||
#ggrid>a.au:before {
|
||||
content: '💾';
|
||||
}
|
||||
html.np_open #ggrid>a.au:before {
|
||||
content: '▶';
|
||||
}
|
||||
#ggrid>a:before {
|
||||
@@ -1119,6 +1161,7 @@ html.y #widget.open {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
}
|
||||
#fshr,
|
||||
#wtgrid,
|
||||
#wtico {
|
||||
position: relative;
|
||||
@@ -1147,9 +1190,6 @@ html.y #widget.open {
|
||||
@keyframes spin {
|
||||
100% {transform: rotate(360deg)}
|
||||
}
|
||||
@media (prefers-reduced-motion) {
|
||||
@keyframes spin { }
|
||||
}
|
||||
@keyframes fadein {
|
||||
0% {opacity: 0}
|
||||
100% {opacity: 1}
|
||||
@@ -1243,6 +1283,13 @@ html.y #widget.open {
|
||||
0% {opacity:0}
|
||||
100% {opacity:1}
|
||||
}
|
||||
#ggrid>a.glow {
|
||||
animation: gexit .6s ease-out;
|
||||
}
|
||||
@keyframes gexit {
|
||||
0% {box-shadow: 0 0 0 2em var(--a)}
|
||||
100% {box-shadow: 0 0 0em 0em var(--a)}
|
||||
}
|
||||
#wzip a {
|
||||
font-size: .4em;
|
||||
margin: -.3em .1em;
|
||||
@@ -1301,6 +1348,7 @@ html.y #widget.open {
|
||||
#widget.cmp #wtoggle {
|
||||
font-size: 1.2em;
|
||||
}
|
||||
#widget.cmp #fshr,
|
||||
#widget.cmp #wtgrid {
|
||||
display: none;
|
||||
}
|
||||
@@ -1401,10 +1449,15 @@ input[type="checkbox"]+label {
|
||||
input[type="radio"]:checked+label,
|
||||
input[type="checkbox"]:checked+label {
|
||||
color: #0e0;
|
||||
color: var(--a);
|
||||
color: var(--btn-1-bg);
|
||||
}
|
||||
input[type="checkbox"]:checked+label {
|
||||
box-shadow: var(--btn-1-bs);
|
||||
border-bottom: var(--btn-1-bb);
|
||||
}
|
||||
html.dz input {
|
||||
font-family: 'scp', monospace, monospace;
|
||||
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||
}
|
||||
.opwide div>span>input+label {
|
||||
padding: .3em 0 .3em .3em;
|
||||
@@ -1578,10 +1631,13 @@ html {
|
||||
color: var(--btn-fg);
|
||||
background: #eee;
|
||||
background: var(--btn-bg);
|
||||
box-shadow: var(--btn-bs);
|
||||
border-bottom: var(--btn-bb);
|
||||
border-radius: .3em;
|
||||
padding: .2em .4em;
|
||||
font-size: 1.2em;
|
||||
margin: .2em;
|
||||
display: inline-block;
|
||||
white-space: pre;
|
||||
position: relative;
|
||||
top: -.12em;
|
||||
@@ -1590,20 +1646,14 @@ html.c .btn,
|
||||
html.a .btn {
|
||||
border-radius: .2em;
|
||||
}
|
||||
html.cz .btn {
|
||||
box-shadow: 0 .1em .6em rgba(255,0,185,0.5);
|
||||
border-bottom: .2em solid #709;
|
||||
}
|
||||
html.dz .btn {
|
||||
font-size: 1em;
|
||||
box-shadow: 0 0 0 .1em #080 inset;
|
||||
}
|
||||
html.dz .tgl.btn.on {
|
||||
box-shadow: 0 0 0 .1em var(--btn-1-bg) inset;
|
||||
}
|
||||
.btn:hover {
|
||||
color: var(--btn-h-fg);
|
||||
background: var(--btn-h-bg);
|
||||
box-shadow: var(--btn-h-bs);
|
||||
border-bottom: var(--btn-h-bb);
|
||||
}
|
||||
.tgl.btn.on {
|
||||
background: #000;
|
||||
@@ -1611,14 +1661,14 @@ html.dz .tgl.btn.on {
|
||||
color: #fff;
|
||||
color: var(--btn-1-fg);
|
||||
text-shadow: none;
|
||||
}
|
||||
html.cz .tgl.btn.on {
|
||||
box-shadow: 0 .1em .8em rgba(255,205,0,0.9);
|
||||
border-bottom: .2em solid #e90;
|
||||
box-shadow: var(--btn-1-bs);
|
||||
border-bottom: var(--btn-1-bb);
|
||||
}
|
||||
.tgl.btn.on:hover {
|
||||
background: var(--btn-1h-bg);
|
||||
color: var(--btn-1h-fg);
|
||||
background: var(--btn-1h-bg);
|
||||
box-shadow: var(--btn-1h-bs);
|
||||
border-bottom: var(--btn-1h-bb);
|
||||
}
|
||||
#detree {
|
||||
padding: .3em .5em;
|
||||
@@ -1653,7 +1703,9 @@ html.cz .tgl.btn.on {
|
||||
color: var(--fg-max);
|
||||
}
|
||||
#tree ul a.hl {
|
||||
color: #fff;
|
||||
color: var(--btn-1-fg);
|
||||
background: #000;
|
||||
background: var(--btn-1-bg);
|
||||
text-shadow: none;
|
||||
}
|
||||
@@ -1688,6 +1740,7 @@ html.y #tree.nowrap .ntree a+a:hover {
|
||||
}
|
||||
.ntree a:first-child {
|
||||
font-family: 'scp', monospace, monospace;
|
||||
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||
font-size: 1.2em;
|
||||
line-height: 0;
|
||||
}
|
||||
@@ -1715,6 +1768,7 @@ html.y #tree.nowrap .ntree a+a:hover {
|
||||
}
|
||||
#files th span {
|
||||
position: relative;
|
||||
white-space: nowrap;
|
||||
}
|
||||
#files>thead>tr>th.min,
|
||||
#files td.min {
|
||||
@@ -1752,9 +1806,6 @@ html.y #tree.nowrap .ntree a+a:hover {
|
||||
margin: .7em 0 .7em .5em;
|
||||
padding-left: .5em;
|
||||
}
|
||||
.opwide>div>div>a {
|
||||
line-height: 2em;
|
||||
}
|
||||
.opwide>div>h3 {
|
||||
color: var(--fg-weak);
|
||||
margin: 0 .4em;
|
||||
@@ -1769,6 +1820,7 @@ html.y #tree.nowrap .ntree a+a:hover {
|
||||
padding: 0;
|
||||
}
|
||||
#thumbs,
|
||||
#au_prescan,
|
||||
#au_fullpre,
|
||||
#au_os_seek,
|
||||
#au_osd_cv,
|
||||
@@ -1776,7 +1828,8 @@ html.y #tree.nowrap .ntree a+a:hover {
|
||||
opacity: .3;
|
||||
}
|
||||
#griden.on+#thumbs,
|
||||
#au_preload.on+#au_fullpre,
|
||||
#au_preload.on+#au_prescan,
|
||||
#au_preload.on+#au_prescan+#au_fullpre,
|
||||
#au_os_ctl.on+#au_os_seek,
|
||||
#au_os_ctl.on+#au_os_seek+#au_osd_cv,
|
||||
#u2turbo.on+#u2tdate {
|
||||
@@ -1816,6 +1869,11 @@ html.y #tree.nowrap .ntree a+a:hover {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
#unpost td:nth-child(3),
|
||||
#unpost td:nth-child(4) {
|
||||
text-align: right;
|
||||
}
|
||||
#shui,
|
||||
#rui {
|
||||
background: #fff;
|
||||
background: var(--bg);
|
||||
@@ -1831,23 +1889,41 @@ html.y #tree.nowrap .ntree a+a:hover {
|
||||
padding: 1em;
|
||||
z-index: 765;
|
||||
}
|
||||
#shui div+div,
|
||||
#rui div+div {
|
||||
margin-top: 1em;
|
||||
}
|
||||
#shui table,
|
||||
#rui table {
|
||||
width: 100%;
|
||||
border-collapse: collapse;
|
||||
}
|
||||
#shui button {
|
||||
margin: 0 1em 0 0;
|
||||
}
|
||||
#shui .btn {
|
||||
font-size: 1em;
|
||||
}
|
||||
#shui td {
|
||||
padding: .8em 0;
|
||||
}
|
||||
#shui td+td,
|
||||
#rui td+td {
|
||||
padding: .2em 0 .2em .5em;
|
||||
}
|
||||
#rn_vadv input {
|
||||
font-family: 'scp', monospace, monospace;
|
||||
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||
}
|
||||
#shui td+td,
|
||||
#rui td+td,
|
||||
#shui td input[type="text"],
|
||||
#rui td input[type="text"] {
|
||||
width: 100%;
|
||||
}
|
||||
#shui td.exs input[type="text"] {
|
||||
width: 3em;
|
||||
}
|
||||
#rn_f.m td:first-child {
|
||||
white-space: nowrap;
|
||||
}
|
||||
@@ -1906,6 +1982,7 @@ html.y #doc {
|
||||
#doc.mdo {
|
||||
white-space: normal;
|
||||
font-family: sans-serif;
|
||||
font-family: var(--font-main), sans-serif;
|
||||
}
|
||||
#doc.prism * {
|
||||
line-height: 1.5em;
|
||||
@@ -1965,6 +2042,7 @@ a.btn,
|
||||
}
|
||||
#hkhelp td:first-child {
|
||||
font-family: 'scp', monospace, monospace;
|
||||
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||
}
|
||||
html.noscroll,
|
||||
html.noscroll .sbar {
|
||||
@@ -2174,6 +2252,7 @@ html.y #bbox-overlay figcaption a {
|
||||
}
|
||||
#bbox-halp {
|
||||
color: var(--fg-max);
|
||||
background: #fff;
|
||||
background: var(--bg);
|
||||
position: absolute;
|
||||
top: 0;
|
||||
@@ -2473,6 +2552,7 @@ html.y #bbox-overlay figcaption a {
|
||||
}
|
||||
#op_up2k.srch td.prog {
|
||||
font-family: sans-serif;
|
||||
font-family: var(--font-main), sans-serif;
|
||||
font-size: 1em;
|
||||
width: auto;
|
||||
}
|
||||
@@ -2487,6 +2567,7 @@ html.y #bbox-overlay figcaption a {
|
||||
white-space: nowrap;
|
||||
display: inline-block;
|
||||
font-family: 'scp', monospace, monospace;
|
||||
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||
}
|
||||
#u2etas.o {
|
||||
width: 20em;
|
||||
@@ -2556,6 +2637,7 @@ html.y #bbox-overlay figcaption a {
|
||||
#u2cards span {
|
||||
color: var(--fg-max);
|
||||
font-family: 'scp', monospace;
|
||||
font-family: var(--font-mono), 'scp', monospace;
|
||||
}
|
||||
#u2cards > a:nth-child(4) > span {
|
||||
display: inline-block;
|
||||
@@ -2636,23 +2718,25 @@ html.b #u2conf a.b:hover {
|
||||
#u2conf input[type="checkbox"]:checked+label {
|
||||
position: relative;
|
||||
cursor: pointer;
|
||||
background: var(--u2-o-bg);
|
||||
border-bottom: .2em solid var(--u2-o-b1);
|
||||
box-shadow: 0 .1em .3em var(--u2-o-sh) inset;
|
||||
background: var(--btn-bg);
|
||||
box-shadow: var(--btn-bs);
|
||||
border-bottom: var(--btn-bb);
|
||||
text-shadow: 1px 1px 1px #000, 1px -1px 1px #000, -1px -1px 1px #000, -1px 1px 1px #000;
|
||||
}
|
||||
#u2conf input[type="checkbox"]:checked+label {
|
||||
background: var(--u2-o-1-bg);
|
||||
border-bottom: .2em solid var(--u2-o-1-b1);
|
||||
box-shadow: 0 .1em .5em var(--u2-o-1-sh);
|
||||
background: var(--btn-1-bg);
|
||||
box-shadow: var(--btn-1-bs);
|
||||
border-bottom: var(--btn-1-bb);
|
||||
}
|
||||
#u2conf input[type="checkbox"]+label:hover {
|
||||
box-shadow: 0 .1em .3em var(--u2-o-h-sh);
|
||||
border-color: var(--u2-o-h-b1);
|
||||
background: var(--u2-o-h-bg);
|
||||
background: var(--btn-h-bg);
|
||||
box-shadow: var(--btn-h-bs);
|
||||
border-bottom: var(--btn-h-bb);
|
||||
}
|
||||
#u2conf input[type="checkbox"]:checked+label:hover {
|
||||
background: var(--u2-o-1h-bg);
|
||||
background: var(--btn-1h-bg);
|
||||
box-shadow: var(--btn-1h-bs);
|
||||
border-bottom: var(--btn-1h-bb);
|
||||
}
|
||||
#op_up2k.srch #u2conf td:nth-child(2)>*,
|
||||
#op_up2k.srch #u2conf td:nth-child(3)>* {
|
||||
@@ -2721,6 +2805,7 @@ html.b #u2conf a.b:hover {
|
||||
}
|
||||
.prog {
|
||||
font-family: 'scp', monospace, monospace;
|
||||
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||
}
|
||||
#u2tab span.inf,
|
||||
#u2tab span.ok,
|
||||
@@ -3040,18 +3125,30 @@ html.by #u2cards a.act {
|
||||
|
||||
|
||||
|
||||
html.cy #wrap {
|
||||
color: #000;
|
||||
}
|
||||
html.cy .mdo a {
|
||||
background: #f00;
|
||||
}
|
||||
html.cy #wrap,
|
||||
html.cy #acc_info a,
|
||||
html.cy #op_up2k,
|
||||
html.cy #files,
|
||||
html.cy #files a,
|
||||
html.cy #files tbody div a:last-child {
|
||||
color: #000;
|
||||
}
|
||||
html.cy #u2tab a,
|
||||
html.cy #u2cards a {
|
||||
color: #f00;
|
||||
}
|
||||
html.cy #unpost a {
|
||||
color: #ff0;
|
||||
}
|
||||
html.cy #barbuf {
|
||||
filter: hue-rotate(267deg) brightness(0.8) contrast(4);
|
||||
}
|
||||
html.cy #pvol {
|
||||
filter: hue-rotate(4deg) contrast(2.2);
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -3129,7 +3226,7 @@ html.d #treepar {
|
||||
margin-top: 1.7em;
|
||||
}
|
||||
}
|
||||
@supports (display: grid) {
|
||||
@supports (display: grid) and (gap: 1em) {
|
||||
#ggrid {
|
||||
display: grid;
|
||||
margin: 0em 0.25em;
|
||||
@@ -3154,3 +3251,24 @@ html.d #treepar {
|
||||
padding: 0.2em;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@media (prefers-reduced-motion) {
|
||||
@keyframes spin { }
|
||||
@keyframes gexit { }
|
||||
@keyframes bounce { }
|
||||
@keyframes bounceFromLeft { }
|
||||
@keyframes bounceFromRight { }
|
||||
|
||||
#ggrid>a:before,
|
||||
#widget.anim,
|
||||
#u2tabw,
|
||||
.dropdesc,
|
||||
.dropdesc b,
|
||||
.dropdesc>div>div {
|
||||
transition: none;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,10 +6,10 @@
|
||||
<title>{{ title }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8, minimum-scale=0.6">
|
||||
<meta name="theme-color" content="#333">
|
||||
{{ html_head }}
|
||||
<meta name="theme-color" content="#{{ tcolor }}">
|
||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/browser.css?_={{ ts }}">
|
||||
{{ html_head }}
|
||||
{%- if css %}
|
||||
<link rel="stylesheet" media="screen" href="{{ css }}_={{ ts }}">
|
||||
{%- endif %}
|
||||
@@ -67,14 +67,14 @@
|
||||
<div id="op_up2k" class="opview"></div>
|
||||
|
||||
<div id="op_cfg" class="opview opbox opwide"></div>
|
||||
|
||||
|
||||
<h1 id="path">
|
||||
<a href="#" id="entree">🌲</a>
|
||||
{%- for n in vpnodes %}
|
||||
<a href="{{ r }}/{{ n[0] }}">{{ n[1] }}</a>
|
||||
{%- endfor %}
|
||||
</h1>
|
||||
|
||||
|
||||
<div id="tree"></div>
|
||||
|
||||
<div id="wrap">
|
||||
@@ -118,11 +118,11 @@
|
||||
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
|
||||
<div id="epi" class="logue">{{ "" if sb_lg else logues[1] }}</div>
|
||||
|
||||
<h2 id="wfp"><a href="{{ r }}/?h" id="goh">control-panel</a></h2>
|
||||
|
||||
|
||||
<a href="#" id="repl">π</a>
|
||||
|
||||
</div>
|
||||
@@ -148,7 +148,8 @@
|
||||
logues = {{ logues|tojson if sb_lg else "[]" }},
|
||||
ls0 = {{ ls0|tojson }};
|
||||
|
||||
document.documentElement.className = localStorage.cpp_thm || dtheme;
|
||||
var STG = window.localStorage;
|
||||
document.documentElement.className = (STG && STG.cpp_thm) || dtheme;
|
||||
</script>
|
||||
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
||||
<script src="{{ r }}/.cpr/baguettebox.js?_={{ ts }}"></script>
|
||||
@@ -160,3 +161,4 @@
|
||||
</body>
|
||||
|
||||
</html>
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -6,7 +6,6 @@
|
||||
<title>{{ title }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
{{ html_head }}
|
||||
<style>
|
||||
html{font-family:sans-serif}
|
||||
td{border:1px solid #999;border-width:1px 1px 0 0;padding:0 5px}
|
||||
@@ -52,12 +51,13 @@
|
||||
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
|
||||
{%- if logues[1] %}
|
||||
<div>{{ logues[1] }}</div><br />
|
||||
{%- endif %}
|
||||
|
||||
|
||||
<h2><a href="{{ r }}/{{ url_suf }}{{ url_suf and '&' or '?' }}h">control-panel</a></h2>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
|
||||
|
||||
@@ -25,3 +25,4 @@
|
||||
</body>
|
||||
|
||||
</html>
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@ html, body {
|
||||
color: #333;
|
||||
background: #eee;
|
||||
font-family: sans-serif;
|
||||
font-family: var(--font-main), sans-serif;
|
||||
line-height: 1.5em;
|
||||
}
|
||||
html.y #helpbox a {
|
||||
@@ -67,6 +68,7 @@ a {
|
||||
position: relative;
|
||||
display: inline-block;
|
||||
font-family: 'scp', monospace, monospace;
|
||||
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||
font-weight: bold;
|
||||
font-size: 1.3em;
|
||||
line-height: .1em;
|
||||
|
||||
@@ -3,13 +3,13 @@
|
||||
<title>📝 {{ title }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.7">
|
||||
<meta name="theme-color" content="#333">
|
||||
{{ html_head }}
|
||||
<meta name="theme-color" content="#{{ tcolor }}">
|
||||
<link rel="stylesheet" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||
<link rel="stylesheet" href="{{ r }}/.cpr/md.css?_={{ ts }}">
|
||||
{%- if edit %}
|
||||
<link rel="stylesheet" href="{{ r }}/.cpr/md2.css?_={{ ts }}">
|
||||
{%- endif %}
|
||||
{{ html_head }}
|
||||
</head>
|
||||
<body>
|
||||
<div id="mn"></div>
|
||||
@@ -49,7 +49,7 @@
|
||||
<div id="mp" class="mdo"></div>
|
||||
</div>
|
||||
<a href="#" id="repl">π</a>
|
||||
|
||||
|
||||
{%- if edit %}
|
||||
<div id="helpbox">
|
||||
<textarea autocomplete="off">
|
||||
@@ -125,7 +125,7 @@ write markdown (most html is 🙆 too)
|
||||
</textarea>
|
||||
</div>
|
||||
{%- endif %}
|
||||
|
||||
|
||||
<script>
|
||||
|
||||
var SR = {{ r|tojson }},
|
||||
@@ -139,16 +139,15 @@ var md_opt = {
|
||||
};
|
||||
|
||||
(function () {
|
||||
var l = localStorage,
|
||||
drk = l.light != 1,
|
||||
var l = window.localStorage,
|
||||
drk = (l && l.light) != 1,
|
||||
btn = document.getElementById("lightswitch"),
|
||||
f = function (e) {
|
||||
if (e) { e.preventDefault(); drk = !drk; }
|
||||
document.documentElement.className = drk? "z":"y";
|
||||
btn.innerHTML = "go " + (drk ? "light":"dark");
|
||||
l.light = drk? 0:1;
|
||||
try { l.light = drk? 0:1; } catch (ex) { }
|
||||
};
|
||||
|
||||
btn.onclick = f;
|
||||
f();
|
||||
})();
|
||||
@@ -160,4 +159,8 @@ l.light = drk? 0:1;
|
||||
{%- if edit %}
|
||||
<script src="{{ r }}/.cpr/md2.js?_={{ ts }}"></script>
|
||||
{%- endif %}
|
||||
{%- if js %}
|
||||
<script src="{{ js }}_={{ ts }}"></script>
|
||||
{%- endif %}
|
||||
</body></html>
|
||||
|
||||
|
||||
@@ -216,6 +216,11 @@ function convert_markdown(md_text, dest_dom) {
|
||||
md_html = DOMPurify.sanitize(md_html);
|
||||
}
|
||||
catch (ex) {
|
||||
if (IE) {
|
||||
dest_dom.innerHTML = 'IE cannot into markdown ;_;';
|
||||
return;
|
||||
}
|
||||
|
||||
if (ext)
|
||||
md_plug_err(ex, ext[1]);
|
||||
|
||||
@@ -507,13 +512,6 @@ dom_navtgl.onclick = function () {
|
||||
redraw();
|
||||
};
|
||||
|
||||
if (!HTTPS && location.hostname != '127.0.0.1') try {
|
||||
ebi('edit2').onclick = function (e) {
|
||||
toast.err(0, "the fancy editor is only available over https");
|
||||
return ev(e);
|
||||
}
|
||||
} catch (ex) { }
|
||||
|
||||
if (sread('hidenav') == 1)
|
||||
dom_navtgl.onclick();
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
width: calc(100% - 56em);
|
||||
}
|
||||
#mw {
|
||||
left: calc(100% - 55em);
|
||||
left: max(0em, calc(100% - 55em));
|
||||
overflow-y: auto;
|
||||
position: fixed;
|
||||
bottom: 0;
|
||||
@@ -56,6 +56,7 @@
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
font-family: 'scp', monospace, monospace;
|
||||
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||
white-space: pre-wrap;
|
||||
word-break: break-word;
|
||||
overflow-wrap: break-word;
|
||||
|
||||
@@ -163,7 +163,7 @@ redraw = (function () {
|
||||
dom_sbs.onclick = setsbs;
|
||||
dom_nsbs.onclick = modetoggle;
|
||||
|
||||
onresize();
|
||||
(IE ? modetoggle : onresize)();
|
||||
return onresize;
|
||||
})();
|
||||
|
||||
@@ -368,14 +368,14 @@ function save(e) {
|
||||
|
||||
function save_cb() {
|
||||
if (this.status !== 200)
|
||||
return toast.err(0, 'Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
|
||||
return toast.err(0, 'Error! The file was NOT saved.\n\nError ' + this.status + ":\n" + unpre(this.responseText));
|
||||
|
||||
var r;
|
||||
try {
|
||||
r = JSON.parse(this.responseText);
|
||||
}
|
||||
catch (ex) {
|
||||
return toast.err(0, 'Failed to parse reply from server:\n\n' + this.responseText);
|
||||
return toast.err(0, 'Error! The file was likely NOT saved.\n\nFailed to parse reply from server:\n\n' + unpre(this.responseText));
|
||||
}
|
||||
|
||||
if (!r.ok) {
|
||||
@@ -418,7 +418,7 @@ function run_savechk(lastmod, txt, btn, ntry) {
|
||||
|
||||
function savechk_cb() {
|
||||
if (this.status !== 200)
|
||||
return toast.err(0, 'Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
|
||||
return toast.err(0, 'Error! The file was NOT saved.\n\nError ' + this.status + ":\n" + unpre(this.responseText));
|
||||
|
||||
var doc1 = this.txt.replace(/\r\n/g, "\n");
|
||||
var doc2 = this.responseText.replace(/\r\n/g, "\n");
|
||||
@@ -607,10 +607,10 @@ function md_newline() {
|
||||
var s = linebounds(true),
|
||||
ln = s.md.substring(s.n1, s.n2),
|
||||
m1 = /^( *)([0-9]+)(\. +)/.exec(ln),
|
||||
m2 = /^[ \t>+-]*(\* )?/.exec(ln),
|
||||
m2 = /^[ \t]*[>+*-]{0,2}[ \t]/.exec(ln),
|
||||
drop = dom_src.selectionEnd - dom_src.selectionStart;
|
||||
|
||||
var pre = m2[0];
|
||||
var pre = m2 ? m2[0] : '';
|
||||
if (m1 !== null)
|
||||
pre = m1[1] + (parseInt(m1[2]) + 1) + m1[3];
|
||||
|
||||
@@ -933,7 +933,7 @@ var set_lno = (function () {
|
||||
var keydown = function (ev) {
|
||||
if (!ev && window.event) {
|
||||
ev = window.event;
|
||||
if (localStorage.dev_fbw == 1) {
|
||||
if (dev_fbw == 1) {
|
||||
toast.warn(10, 'hello from fallback code ;_;\ncheck console trace');
|
||||
console.error('using window.event');
|
||||
}
|
||||
@@ -1009,7 +1009,7 @@ var set_lno = (function () {
|
||||
md_home(ev.shiftKey);
|
||||
return false;
|
||||
}
|
||||
if (!ev.shiftKey && (ev.code.endsWith("Enter") || kc == 13)) {
|
||||
if (!ev.shiftKey && ((ev.code + '').endsWith("Enter") || kc == 13)) {
|
||||
return md_newline();
|
||||
}
|
||||
if (!ev.shiftKey && kc == 8) {
|
||||
|
||||
@@ -17,6 +17,7 @@ html, body {
|
||||
padding: 0;
|
||||
min-height: 100%;
|
||||
font-family: sans-serif;
|
||||
font-family: var(--font-main), sans-serif;
|
||||
background: #f7f7f7;
|
||||
color: #333;
|
||||
}
|
||||
|
||||
@@ -3,12 +3,12 @@
|
||||
<title>📝 {{ title }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.7">
|
||||
<meta name="theme-color" content="#333">
|
||||
{{ html_head }}
|
||||
<meta name="theme-color" content="#{{ tcolor }}">
|
||||
<link rel="stylesheet" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||
<link rel="stylesheet" href="{{ r }}/.cpr/mde.css?_={{ ts }}">
|
||||
<link rel="stylesheet" href="{{ r }}/.cpr/deps/mini-fa.css?_={{ ts }}">
|
||||
<link rel="stylesheet" href="{{ r }}/.cpr/deps/easymde.css?_={{ ts }}">
|
||||
{{ html_head }}
|
||||
</head>
|
||||
<body>
|
||||
<div id="mw">
|
||||
@@ -37,12 +37,12 @@ var md_opt = {
|
||||
};
|
||||
|
||||
var lightswitch = (function () {
|
||||
var l = localStorage,
|
||||
drk = l.light != 1,
|
||||
var l = window.localStorage,
|
||||
drk = (l && l.light) != 1,
|
||||
f = function (e) {
|
||||
if (e) drk = !drk;
|
||||
document.documentElement.className = drk? "z":"y";
|
||||
l.light = drk? 0:1;
|
||||
try { l.light = drk? 0:1; } catch (ex) { }
|
||||
};
|
||||
f();
|
||||
return f;
|
||||
@@ -53,4 +53,8 @@ l.light = drk? 0:1;
|
||||
<script src="{{ r }}/.cpr/deps/marked.js?_={{ ts }}"></script>
|
||||
<script src="{{ r }}/.cpr/deps/easymde.js?_={{ ts }}"></script>
|
||||
<script src="{{ r }}/.cpr/mde.js?_={{ ts }}"></script>
|
||||
{%- if js %}
|
||||
<script src="{{ js }}_={{ ts }}"></script>
|
||||
{%- endif %}
|
||||
</body></html>
|
||||
|
||||
|
||||
@@ -134,14 +134,14 @@ function save(mde) {
|
||||
|
||||
function save_cb() {
|
||||
if (this.status !== 200)
|
||||
return toast.err(0, 'Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
|
||||
return toast.err(0, 'Error! The file was NOT saved.\n\nError ' + this.status + ":\n" + unpre(this.responseText));
|
||||
|
||||
var r;
|
||||
try {
|
||||
r = JSON.parse(this.responseText);
|
||||
}
|
||||
catch (ex) {
|
||||
return toast.err(0, 'Failed to parse reply from server:\n\n' + this.responseText);
|
||||
return toast.err(0, 'Error! The file was likely NOT saved.\n\nFailed to parse reply from server:\n\n' + unpre(this.responseText));
|
||||
}
|
||||
|
||||
if (!r.ok) {
|
||||
@@ -180,7 +180,7 @@ function save_cb() {
|
||||
|
||||
function save_chk() {
|
||||
if (this.status !== 200)
|
||||
return toast.err(0, 'Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
|
||||
return toast.err(0, 'Error! The file was NOT saved.\n\nError ' + this.status + ":\n" + unpre(this.responseText));
|
||||
|
||||
var doc1 = this.txt.replace(/\r\n/g, "\n");
|
||||
var doc2 = this.responseText.replace(/\r\n/g, "\n");
|
||||
|
||||
@@ -1,3 +1,8 @@
|
||||
:root {
|
||||
--font-main: sans-serif;
|
||||
--font-serif: serif;
|
||||
--font-mono: 'scp';
|
||||
}
|
||||
html,body,tr,th,td,#files,a {
|
||||
color: inherit;
|
||||
background: none;
|
||||
@@ -10,6 +15,7 @@ html {
|
||||
color: #ccc;
|
||||
background: #333;
|
||||
font-family: sans-serif;
|
||||
font-family: var(--font-main), sans-serif;
|
||||
text-shadow: 1px 1px 0px #000;
|
||||
touch-action: manipulation;
|
||||
}
|
||||
@@ -23,6 +29,7 @@ html, body {
|
||||
}
|
||||
pre {
|
||||
font-family: monospace, monospace;
|
||||
font-family: var(--font-mono), monospace, monospace;
|
||||
}
|
||||
a {
|
||||
color: #fc5;
|
||||
|
||||
@@ -6,9 +6,9 @@
|
||||
<title>{{ s_doctitle }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
<meta name="theme-color" content="#333">
|
||||
{{ html_head }}
|
||||
<meta name="theme-color" content="#{{ tcolor }}">
|
||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/msg.css?_={{ ts }}">
|
||||
{{ html_head }}
|
||||
</head>
|
||||
|
||||
<body>
|
||||
@@ -46,6 +46,10 @@
|
||||
}, 1000);
|
||||
</script>
|
||||
{%- endif %}
|
||||
{%- if js %}
|
||||
<script src="{{ js }}_={{ ts }}"></script>
|
||||
{%- endif %}
|
||||
</body>
|
||||
|
||||
</html>
|
||||
</html>
|
||||
|
||||
|
||||
82
copyparty/web/shares.css
Normal file
82
copyparty/web/shares.css
Normal file
@@ -0,0 +1,82 @@
|
||||
html {
|
||||
color: #333;
|
||||
background: #f7f7f7;
|
||||
font-family: sans-serif;
|
||||
font-family: var(--font-main), sans-serif;
|
||||
touch-action: manipulation;
|
||||
}
|
||||
#wrap {
|
||||
margin: 2em auto;
|
||||
padding: 0 1em 3em 1em;
|
||||
line-height: 2.3em;
|
||||
}
|
||||
#wrap>span {
|
||||
margin: 0 0 0 1em;
|
||||
border-bottom: 1px solid #999;
|
||||
}
|
||||
li {
|
||||
margin: 1em 0;
|
||||
}
|
||||
a {
|
||||
color: #047;
|
||||
background: #fff;
|
||||
text-decoration: none;
|
||||
white-space: nowrap;
|
||||
border-bottom: 1px solid #8ab;
|
||||
border-radius: .2em;
|
||||
padding: .2em .6em;
|
||||
margin: 0 .3em;
|
||||
}
|
||||
td a {
|
||||
margin: 0;
|
||||
}
|
||||
#w {
|
||||
color: #fff;
|
||||
background: #940;
|
||||
border-color: #b70;
|
||||
}
|
||||
#repl {
|
||||
border: none;
|
||||
background: none;
|
||||
color: inherit;
|
||||
padding: 0;
|
||||
position: fixed;
|
||||
bottom: .25em;
|
||||
left: .2em;
|
||||
}
|
||||
table {
|
||||
border-collapse: collapse;
|
||||
position: relative;
|
||||
}
|
||||
th {
|
||||
top: -1px;
|
||||
position: sticky;
|
||||
background: #f7f7f7;
|
||||
}
|
||||
td, th {
|
||||
padding: .3em .6em;
|
||||
text-align: left;
|
||||
white-space: nowrap;
|
||||
}
|
||||
td+td+td+td+td+td+td+td {
|
||||
font-family: var(--font-mono), monospace, monospace;
|
||||
}
|
||||
|
||||
|
||||
|
||||
html.z {
|
||||
background: #222;
|
||||
color: #ccc;
|
||||
}
|
||||
html.z a {
|
||||
color: #fff;
|
||||
background: #057;
|
||||
border-color: #37a;
|
||||
}
|
||||
html.z th {
|
||||
background: #222;
|
||||
}
|
||||
html.bz {
|
||||
color: #bbd;
|
||||
background: #11121d;
|
||||
}
|
||||
76
copyparty/web/shares.html
Normal file
76
copyparty/web/shares.html
Normal file
@@ -0,0 +1,76 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>{{ s_doctitle }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
<meta name="theme-color" content="#{{ tcolor }}">
|
||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/shares.css?_={{ ts }}">
|
||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||
{{ html_head }}
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<div id="wrap">
|
||||
<a id="a" href="{{ r }}/?shares" class="af">refresh</a>
|
||||
<a id="a" href="{{ r }}/?h" class="af">control-panel</a>
|
||||
|
||||
<span>axs = perms (read,write,move,delet)</span>
|
||||
<span>nf = numFiles (0=dir)</span>
|
||||
<span>min/hrs = time left</span>
|
||||
|
||||
<table id="tab"><thead><tr>
|
||||
<th>delete</th>
|
||||
<th>sharekey</th>
|
||||
<th>pw</th>
|
||||
<th>source</th>
|
||||
<th>axs</th>
|
||||
<th>nf</th>
|
||||
<th>user</th>
|
||||
<th>created</th>
|
||||
<th>expires</th>
|
||||
<th>min</th>
|
||||
<th>hrs</th>
|
||||
<th>add time</th>
|
||||
</tr></thead><tbody>
|
||||
{% for k, pw, vp, pr, st, un, t0, t1 in rows %}
|
||||
<tr>
|
||||
<td><a href="#" k="{{ k }}">delete</a></td>
|
||||
<td><a href="{{ r }}{{ shr }}{{ k }}">{{ k }}</a></td>
|
||||
<td>{{ pw }}</td>
|
||||
<td><a href="{{ r }}/{{ vp|e }}">{{ vp|e }}</a></td>
|
||||
<td>{{ pr }}</td>
|
||||
<td>{{ st }}</td>
|
||||
<td>{{ un|e }}</td>
|
||||
<td>{{ t0 }}</td>
|
||||
<td>{{ t1 }}</td>
|
||||
<td>{{ "inf" if not t1 else "dead" if t1 < now else ((t1 - now) / 60) | round(1) }}</td>
|
||||
<td>{{ "inf" if not t1 else "dead" if t1 < now else ((t1 - now) / 3600) | round(1) }}</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody></table>
|
||||
{% if not rows %}
|
||||
(you don't have any active shares btw)
|
||||
{% endif %}
|
||||
<script>
|
||||
|
||||
var SR = {{ r|tojson }},
|
||||
shr="{{ shr }}",
|
||||
lang="{{ lang }}",
|
||||
dfavico="{{ favico }}";
|
||||
|
||||
var STG = window.localStorage;
|
||||
document.documentElement.className = (STG && STG.cpp_thm) || "{{ this.args.theme }}";
|
||||
|
||||
</script>
|
||||
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
||||
<script src="{{ r }}/.cpr/shares.js?_={{ ts }}"></script>
|
||||
{%- if js %}
|
||||
<script src="{{ js }}_={{ ts }}"></script>
|
||||
{%- endif %}
|
||||
</body>
|
||||
</html>
|
||||
|
||||
56
copyparty/web/shares.js
Normal file
56
copyparty/web/shares.js
Normal file
@@ -0,0 +1,56 @@
|
||||
var t = QSA('a[k]');
|
||||
for (var a = 0; a < t.length; a++)
|
||||
t[a].onclick = rm;
|
||||
|
||||
function rm() {
|
||||
var u = SR + shr + uricom_enc(this.getAttribute('k')) + '?eshare=rm',
|
||||
xhr = new XHR();
|
||||
|
||||
xhr.open('POST', u, true);
|
||||
xhr.onload = xhr.onerror = cb;
|
||||
xhr.send();
|
||||
}
|
||||
|
||||
function bump() {
|
||||
var k = this.closest('tr').getElementsByTagName('a')[0].getAttribute('k'),
|
||||
u = SR + shr + uricom_enc(k) + '?eshare=' + this.value,
|
||||
xhr = new XHR();
|
||||
|
||||
xhr.open('POST', u, true);
|
||||
xhr.onload = xhr.onerror = cb;
|
||||
xhr.send();
|
||||
}
|
||||
|
||||
function cb() {
|
||||
if (this.status !== 200)
|
||||
return modal.alert('<h6>server error</h6>' + esc(unpre(this.responseText)));
|
||||
|
||||
document.location = '?shares';
|
||||
}
|
||||
|
||||
(function() {
|
||||
var tab = ebi('tab').tBodies[0],
|
||||
tr = Array.prototype.slice.call(tab.rows, 0);
|
||||
|
||||
var buf = [];
|
||||
for (var a = 0; a < tr.length; a++)
|
||||
for (var b = 7; b < 9; b++)
|
||||
buf.push(parseInt(tr[a].cells[b].innerHTML));
|
||||
|
||||
var ibuf = 0;
|
||||
for (var a = 0; a < tr.length; a++)
|
||||
for (var b = 7; b < 9; b++) {
|
||||
var v = buf[ibuf++];
|
||||
tr[a].cells[b].innerHTML =
|
||||
v ? unix2iso(v).replace(' ', ', ') : 'never';
|
||||
}
|
||||
|
||||
for (var a = 0; a < tr.length; a++)
|
||||
tr[a].cells[11].innerHTML =
|
||||
'<button value="1">1min</button> ' +
|
||||
'<button value="60">1h</button>';
|
||||
|
||||
var btns = QSA('td button'), aa = btns.length;
|
||||
for (var a = 0; a < aa; a++)
|
||||
btns[a].onclick = bump;
|
||||
})();
|
||||
@@ -2,6 +2,7 @@ html {
|
||||
color: #333;
|
||||
background: #f7f7f7;
|
||||
font-family: sans-serif;
|
||||
font-family: var(--font-main), sans-serif;
|
||||
touch-action: manipulation;
|
||||
}
|
||||
#wrap {
|
||||
@@ -127,6 +128,7 @@ pre, code {
|
||||
color: #480;
|
||||
background: #fff;
|
||||
font-family: 'scp', monospace, monospace;
|
||||
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||
border: 1px solid rgba(128,128,128,0.3);
|
||||
border-radius: .2em;
|
||||
padding: .15em .2em;
|
||||
@@ -180,19 +182,35 @@ html.z a.g {
|
||||
border-color: #af4;
|
||||
box-shadow: 0 .3em 1em #7d0;
|
||||
}
|
||||
form {
|
||||
line-height: 2.5em;
|
||||
}
|
||||
#x,
|
||||
input {
|
||||
color: #a50;
|
||||
background: #fff;
|
||||
border: 1px solid #a50;
|
||||
border-radius: .5em;
|
||||
padding: .5em .7em;
|
||||
margin: 0 .5em 0 0;
|
||||
border-radius: .3em;
|
||||
padding: .25em .6em;
|
||||
margin: 0 .3em 0 0;
|
||||
font-size: 1em;
|
||||
}
|
||||
input::placeholder {
|
||||
font-size: 1.2em;
|
||||
font-style: italic;
|
||||
letter-spacing: .04em;
|
||||
opacity: 0.64;
|
||||
color: #930;
|
||||
}
|
||||
#x,
|
||||
html.z input {
|
||||
color: #fff;
|
||||
background: #626;
|
||||
border-color: #c2c;
|
||||
}
|
||||
html.z input::placeholder {
|
||||
color: #fff;
|
||||
}
|
||||
html.z .num {
|
||||
border-color: #777;
|
||||
}
|
||||
|
||||
@@ -6,14 +6,15 @@
|
||||
<title>{{ s_doctitle }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
<meta name="theme-color" content="#333">
|
||||
{{ html_head }}
|
||||
<meta name="theme-color" content="#{{ tcolor }}">
|
||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/splash.css?_={{ ts }}">
|
||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||
{{ html_head }}
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<div id="wrap">
|
||||
{%- if not in_shr %}
|
||||
<a id="a" href="{{ r }}/?h" class="af">refresh</a>
|
||||
<a id="v" href="{{ r }}/?hc" class="af">connect</a>
|
||||
|
||||
@@ -21,7 +22,8 @@
|
||||
<p id="b">howdy stranger <small>(you're not logged in)</small></p>
|
||||
{%- else %}
|
||||
<a id="c" href="{{ r }}/?pw=x" class="logout">logout</a>
|
||||
<p><span id="m">welcome back,</span> <strong>{{ this.uname }}</strong></p>
|
||||
<p><span id="m">welcome back,</span> <strong>{{ this.uname|e }}</strong></p>
|
||||
{%- endif %}
|
||||
{%- endif %}
|
||||
|
||||
{%- if msg %}
|
||||
@@ -76,29 +78,55 @@
|
||||
</ul>
|
||||
{%- endif %}
|
||||
|
||||
<h1 id="cc">client config:</h1>
|
||||
{%- if in_shr %}
|
||||
<h1 id="z">unlock this share:</h1>
|
||||
<div>
|
||||
<form id="lf" method="post" enctype="multipart/form-data" action="{{ r }}/{{ qvpath }}">
|
||||
<input type="hidden" id="la" name="act" value="login" />
|
||||
<input type="password" id="lp" name="cppwd" placeholder=" password" />
|
||||
<input type="hidden" name="uhash" id="uhash" value="x" />
|
||||
<input type="submit" id="ls" value="Unlock" />
|
||||
{% if ahttps %}
|
||||
<a id="w" href="{{ ahttps }}">switch to https</a>
|
||||
{% endif %}
|
||||
</form>
|
||||
</div>
|
||||
{%- else %}
|
||||
<h1 id="l">login for more:</h1>
|
||||
<div>
|
||||
<form id="lf" method="post" enctype="multipart/form-data" action="{{ r }}/{{ qvpath }}">
|
||||
<input type="hidden" id="la" name="act" value="login" />
|
||||
<input type="password" id="lp" name="cppwd" placeholder=" password" />
|
||||
<input type="hidden" name="uhash" id="uhash" value="x" />
|
||||
<input type="submit" id="ls" value="Login" />
|
||||
{% if chpw %}
|
||||
<a id="x" href="#">change password</a>
|
||||
{% endif %}
|
||||
{% if ahttps %}
|
||||
<a id="w" href="{{ ahttps }}">switch to https</a>
|
||||
{% endif %}
|
||||
</form>
|
||||
</div>
|
||||
{%- endif %}
|
||||
|
||||
<h1 id="cc">other stuff:</h1>
|
||||
<ul>
|
||||
{%- if this.uname != '*' and this.args.shr %}
|
||||
<li><a id="y" href="{{ r }}/?shares">edit shares</a></li>
|
||||
{% endif %}
|
||||
|
||||
{% if k304 or k304vis %}
|
||||
{% if k304 %}
|
||||
<li><a id="h" href="{{ r }}/?k304=n">disable k304</a> (currently enabled)
|
||||
{%- else %}
|
||||
<li><a id="i" href="{{ r }}/?k304=y" class="r">enable k304</a> (currently disabled)
|
||||
{% endif %}
|
||||
<blockquote id="j">enabling this will disconnect your client on every HTTP 304, which can prevent some buggy proxies from getting stuck (suddenly not loading pages), <em>but</em> it will also make things slower in general</blockquote></li>
|
||||
|
||||
{% endif %}
|
||||
|
||||
<li><a id="k" href="{{ r }}/?reset" class="r" onclick="localStorage.clear();return true">reset client settings</a></li>
|
||||
</ul>
|
||||
|
||||
<h1 id="l">login for more:</h1>
|
||||
<div>
|
||||
<form method="post" enctype="multipart/form-data" action="{{ r }}/{{ qvpath }}">
|
||||
<input type="hidden" name="act" value="login" />
|
||||
<input type="password" name="cppwd" />
|
||||
<input type="submit" value="Login" />
|
||||
{% if ahttps %}
|
||||
<a id="w" href="{{ ahttps }}">switch to https</a>
|
||||
{% endif %}
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
<a href="#" id="repl">π</a>
|
||||
{%- if not this.args.nb %}
|
||||
@@ -110,10 +138,15 @@ var SR = {{ r|tojson }},
|
||||
lang="{{ lang }}",
|
||||
dfavico="{{ favico }}";
|
||||
|
||||
document.documentElement.className=localStorage.cpp_thm||"{{ this.args.theme }}";
|
||||
var STG = window.localStorage;
|
||||
document.documentElement.className = (STG && STG.cpp_thm) || "{{ this.args.theme }}";
|
||||
|
||||
</script>
|
||||
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
||||
<script src="{{ r }}/.cpr/splash.js?_={{ ts }}"></script>
|
||||
{%- if js %}
|
||||
<script src="{{ js }}_={{ ts }}"></script>
|
||||
{%- endif %}
|
||||
</body>
|
||||
</html>
|
||||
|
||||
|
||||
@@ -6,10 +6,10 @@ var Ls = {
|
||||
"d1": "tilstand",
|
||||
"d2": "vis tilstanden til alle tråder",
|
||||
"e1": "last innst.",
|
||||
"e2": "leser inn konfigurasjonsfiler på nytt$N(kontoer, volumer, volumbrytere)$Nog kartlegger alle e2ds-volumer",
|
||||
"e2": "leser inn konfigurasjonsfiler på nytt$N(kontoer, volumer, volumbrytere)$Nog kartlegger alle e2ds-volumer$N$Nmerk: endringer i globale parametere$Nkrever en full restart for å ta gjenge",
|
||||
"f1": "du kan betrakte:",
|
||||
"g1": "du kan laste opp til:",
|
||||
"cc1": "klient-konfigurasjon",
|
||||
"cc1": "brytere og sånt",
|
||||
"h1": "skru av k304",
|
||||
"i1": "skru på k304",
|
||||
"j1": "k304 bryter tilkoplingen for hver HTTP 304. Dette hjelper mot visse mellomtjenere som kan sette seg fast / plutselig slutter å laste sider, men det reduserer også ytelsen betydelig",
|
||||
@@ -17,9 +17,9 @@ var Ls = {
|
||||
"l1": "logg inn:",
|
||||
"m1": "velkommen tilbake,",
|
||||
"n1": "404: filen finnes ikke ┐( ´ -`)┌",
|
||||
"o1": 'eller kanskje du ikke har tilgang? prøv å logge inn eller <a href="' + SR + '/?h">gå hjem</a>',
|
||||
"o1": 'eller kanskje du ikke har tilgang? prøv et passord eller <a href="' + SR + '/?h">gå hjem</a>',
|
||||
"p1": "403: tilgang nektet ~┻━┻",
|
||||
"q1": 'du må logge inn eller <a href="' + SR + '/?h">gå hjem</a>',
|
||||
"q1": 'prøv et passord eller <a href="' + SR + '/?h">gå hjem</a>',
|
||||
"r1": "gå hjem",
|
||||
".s1": "kartlegg",
|
||||
"t1": "handling",
|
||||
@@ -27,15 +27,65 @@ var Ls = {
|
||||
"v1": "koble til",
|
||||
"v2": "bruk denne serveren som en lokal harddisk$N$NADVARSEL: kommer til å vise passordet ditt!",
|
||||
"w1": "bytt til https",
|
||||
"x1": "bytt passord",
|
||||
"y1": "dine delinger",
|
||||
"z1": "lås opp område",
|
||||
"ta1": "du må skrive et nytt passord først",
|
||||
"ta2": "gjenta for å bekrefte nytt passord:",
|
||||
"ta3": "fant en skrivefeil; vennligst prøv igjen",
|
||||
},
|
||||
"eng": {
|
||||
"d2": "shows the state of all active threads",
|
||||
"e2": "reload config files (accounts/volumes/volflags),$Nand rescan all e2ds volumes",
|
||||
"e2": "reload config files (accounts/volumes/volflags),$Nand rescan all e2ds volumes$N$Nnote: any changes to global settings$Nrequire a full restart to take effect",
|
||||
"u2": "time since the last server write$N( upload / rename / ... )$N$N17d = 17 days$N1h23 = 1 hour 23 minutes$N4m56 = 4 minutes 56 seconds",
|
||||
"v2": "use this server as a local HDD$N$NWARNING: this will show your password!",
|
||||
"ta1": "fill in your new password first",
|
||||
"ta2": "repeat to confirm new password:",
|
||||
"ta3": "found a typo; please try again",
|
||||
},
|
||||
|
||||
"chi": {
|
||||
"a1": "更新",
|
||||
"b1": "你好 <small>(你尚未登录)</small>",
|
||||
"c1": "登出",
|
||||
"d1": "状态",
|
||||
"d2": "显示所有活动线程的状态",
|
||||
"e1": "重新加载配置",
|
||||
"e2": "重新加载配置文件(账户/卷/卷标),$N并重新扫描所有 e2ds 卷$N$N注意:任何全局设置的更改$N都需要完全重启才能生效",
|
||||
"f1": "你可以查看:",
|
||||
"g1": "你可以上传到:",
|
||||
"cc1": "开关等",
|
||||
"h1": "关闭 k304",
|
||||
"i1": "开启 k304",
|
||||
"j1": "k304 会在每个 HTTP 304 时断开连接。这有助于避免某些代理服务器卡住或突然停止加载页面,但也会显著降低性能。",
|
||||
"k1": "重置设置",
|
||||
"l1": "登录:",
|
||||
"m1": "欢迎回来,",
|
||||
"n1": "404: 文件不存在 ┐( ´ -`)┌",
|
||||
"o1": '或者你可能没有权限?尝试输入密码或 <a href="' + SR + '/?h">回家</a>',
|
||||
"p1": "403: 访问被拒绝 ~┻━┻",
|
||||
"q1": '尝试输入密码或 <a href="' + SR + '/?h">回家</a>',
|
||||
"r1": "回家",
|
||||
".s1": "映射",
|
||||
"t1": "操作",
|
||||
"u2": "自上次服务器写入的时间$N( 上传 / 重命名 / ... )$N$N17d = 17 天$N1h23 = 1 小时 23 分钟$N4m56 = 4 分钟 56 秒",
|
||||
"v1": "连接",
|
||||
"v2": "将此服务器用作本地硬盘$N$N警告:这将显示你的密码!",
|
||||
"w1": "切换到 https",
|
||||
"x1": "更改密码",
|
||||
"y1": "你的分享",
|
||||
"z1": "解锁区域",
|
||||
"ta1": "请先输入新密码",
|
||||
"ta2": "重复以确认新密码:",
|
||||
"ta3": "发现拼写错误;请重试",
|
||||
}
|
||||
},
|
||||
d = Ls[sread("cpp_lang", ["eng", "nor"]) || lang] || Ls.eng || Ls.nor;
|
||||
};
|
||||
|
||||
if (window.langmod)
|
||||
langmod();
|
||||
|
||||
var d = Ls[sread("cpp_lang", Object.keys(Ls)) || lang] ||
|
||||
Ls.eng || Ls.nor || Ls.chi;
|
||||
|
||||
for (var k in (d || {})) {
|
||||
var f = k.slice(-1),
|
||||
@@ -49,6 +99,15 @@ for (var k in (d || {})) {
|
||||
o[a].setAttribute("tt", d[k]);
|
||||
}
|
||||
|
||||
try {
|
||||
if (is_idp) {
|
||||
var z = ['#l+div', '#l', '#c'];
|
||||
for (var a = 0; a < z.length; a++)
|
||||
QS(z[a]).style.display = 'none';
|
||||
}
|
||||
}
|
||||
catch (ex) { }
|
||||
|
||||
tt.init();
|
||||
var o = QS('input[name="cppwd"]');
|
||||
if (!ebi('c') && o.offsetTop + o.offsetHeight < window.innerHeight)
|
||||
@@ -57,3 +116,44 @@ if (!ebi('c') && o.offsetTop + o.offsetHeight < window.innerHeight)
|
||||
o = ebi('u');
|
||||
if (o && /[0-9]+$/.exec(o.innerHTML))
|
||||
o.innerHTML = shumantime(o.innerHTML);
|
||||
|
||||
ebi('uhash').value = '' + location.hash;
|
||||
|
||||
(function() {
|
||||
if (!ebi('x'))
|
||||
return;
|
||||
|
||||
var pwi = ebi('lp');
|
||||
|
||||
function redo(msg) {
|
||||
modal.alert(msg, function() {
|
||||
pwi.value = '';
|
||||
pwi.focus();
|
||||
});
|
||||
}
|
||||
function mok(v) {
|
||||
if (v !== pwi.value)
|
||||
return redo(d.ta3);
|
||||
|
||||
pwi.setAttribute('name', 'pw');
|
||||
ebi('la').value = 'chpw';
|
||||
ebi('lf').submit();
|
||||
}
|
||||
function stars() {
|
||||
var m = ebi('modali');
|
||||
function enstars(n) {
|
||||
setTimeout(function() { m.value = ''; }, n);
|
||||
}
|
||||
m.setAttribute('type', 'password');
|
||||
enstars(17);
|
||||
enstars(32);
|
||||
enstars(69);
|
||||
}
|
||||
ebi('x').onclick = function (e) {
|
||||
ev(e);
|
||||
if (!pwi.value)
|
||||
return redo(d.ta1);
|
||||
|
||||
modal.prompt(d.ta2, "y", mok, null, stars);
|
||||
};
|
||||
})();
|
||||
|
||||
@@ -6,11 +6,11 @@
|
||||
<title>{{ s_doctitle }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
<meta name="theme-color" content="#333">
|
||||
{{ html_head }}
|
||||
<meta name="theme-color" content="#{{ tcolor }}">
|
||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/splash.css?_={{ ts }}">
|
||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||
<style>ul{padding-left:1.3em}li{margin:.4em 0}</style>
|
||||
{{ html_head }}
|
||||
</head>
|
||||
|
||||
<body>
|
||||
@@ -56,7 +56,7 @@
|
||||
<li>running <code>rclone mount</code> as root? add <code>--allow-other</code></li>
|
||||
<li>old version of rclone? replace all <code>=</code> with <code> </code> (space)</li>
|
||||
</ul>
|
||||
|
||||
|
||||
<p>if you want to use the native WebDAV client in windows instead (slow and buggy), first run <a href="{{ r }}/.cpr/a/webdav-cfg.bat">webdav-cfg.bat</a> to remove the 47 MiB filesize limit (also fixes latency and password login), then connect:</p>
|
||||
<pre>
|
||||
net use <b>w:</b> http{{ s }}://{{ ep }}/{{ rvp }}{% if accs %} k /user:<b>{{ pw }}</b>{% endif %}
|
||||
@@ -64,16 +64,7 @@
|
||||
</div>
|
||||
|
||||
<div class="os lin">
|
||||
<pre>
|
||||
yum install davfs2
|
||||
{% if accs %}printf '%s\n' <b>{{ pw }}</b> k | {% endif %}mount -t davfs -ouid=1000 http{{ s }}://{{ ep }}/{{ rvp }} <b>mp</b>
|
||||
</pre>
|
||||
<p>make it automount on boot:</p>
|
||||
<pre>
|
||||
printf '%s\n' "http{{ s }}://{{ ep }}/{{ rvp }} <b>{{ pw }}</b> k" >> /etc/davfs2/secrets
|
||||
printf '%s\n' "http{{ s }}://{{ ep }}/{{ rvp }} <b>mp</b> davfs rw,user,uid=1000,noauto 0 0" >> /etc/fstab
|
||||
</pre>
|
||||
<p>or you can use rclone instead, which is much slower but doesn't require root (plus it keeps lastmodified on upload):</p>
|
||||
<p>rclone (v1.63 or later) is recommended:</p>
|
||||
<pre>
|
||||
rclone config create {{ aname }}-dav webdav url=http{{ s }}://{{ rip }}{{ hport }} vendor=owncloud pacer_min_sleep=0.01ms{% if accs %} user=k pass=<b>{{ pw }}</b>{% endif %}
|
||||
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-dav:{{ rvp }} <b>mp</b>
|
||||
@@ -85,6 +76,16 @@
|
||||
<li>running <code>rclone mount</code> as root? add <code>--allow-other</code></li>
|
||||
<li>old version of rclone? replace all <code>=</code> with <code> </code> (space)</li>
|
||||
</ul>
|
||||
<p>alternatively use davfs2 (requires root, is slower, forgets lastmodified-timestamp on upload):</p>
|
||||
<pre>
|
||||
yum install davfs2
|
||||
{% if accs %}printf '%s\n' <b>{{ pw }}</b> k | {% endif %}mount -t davfs -ouid=1000 http{{ s }}://{{ ep }}/{{ rvp }} <b>mp</b>
|
||||
</pre>
|
||||
<p>make davfs2 automount on boot:</p>
|
||||
<pre>
|
||||
printf '%s\n' "http{{ s }}://{{ ep }}/{{ rvp }} <b>{{ pw }}</b> k" >> /etc/davfs2/secrets
|
||||
printf '%s\n' "http{{ s }}://{{ ep }}/{{ rvp }} <b>mp</b> davfs rw,user,uid=1000,noauto 0 0" >> /etc/fstab
|
||||
</pre>
|
||||
<p>or the emergency alternative (gnome/gui-only):</p>
|
||||
<!-- gnome-bug: ignores vp -->
|
||||
<pre>
|
||||
@@ -104,7 +105,7 @@
|
||||
<pre>
|
||||
http{{ s }}://k:<b>{{ pw }}</b>@{{ ep }}/{{ rvp }}
|
||||
</pre>
|
||||
|
||||
|
||||
{% if s %}
|
||||
<p><em>replace <code>https</code> with <code>http</code> if it doesn't work</em></p>
|
||||
{% endif %}
|
||||
@@ -238,10 +239,15 @@ var SR = {{ r|tojson }},
|
||||
lang="{{ lang }}",
|
||||
dfavico="{{ favico }}";
|
||||
|
||||
document.documentElement.className=localStorage.cpp_thm||"{{ args.theme }}";
|
||||
var STG = window.localStorage;
|
||||
document.documentElement.className = (STG && STG.cpp_thm) || "{{ args.theme }}";
|
||||
|
||||
</script>
|
||||
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
||||
<script src="{{ r }}/.cpr/svcs.js?_={{ ts }}"></script>
|
||||
{%- if js %}
|
||||
<script src="{{ js }}_={{ ts }}"></script>
|
||||
{%- endif %}
|
||||
</body>
|
||||
</html>
|
||||
|
||||
|
||||
@@ -1,4 +1,8 @@
|
||||
:root {
|
||||
--font-main: sans-serif;
|
||||
--font-serif: serif;
|
||||
--font-mono: 'scp';
|
||||
|
||||
--fg: #ccc;
|
||||
--fg-max: #fff;
|
||||
--bg-u2: #2b2b2b;
|
||||
@@ -105,6 +109,9 @@ html {
|
||||
#toast pre {
|
||||
margin: 0;
|
||||
}
|
||||
#toast.hide {
|
||||
display: none;
|
||||
}
|
||||
#toast.vis {
|
||||
right: 1.3em;
|
||||
transform: inherit;
|
||||
@@ -144,6 +151,10 @@ html {
|
||||
#toast.err #toastc {
|
||||
background: #d06;
|
||||
}
|
||||
#toast code {
|
||||
padding: 0 .2em;
|
||||
background: rgba(0,0,0,0.2);
|
||||
}
|
||||
#tth {
|
||||
color: #fff;
|
||||
background: #111;
|
||||
@@ -173,6 +184,7 @@ html {
|
||||
padding: 1.5em 2em;
|
||||
border-width: .5em 0;
|
||||
}
|
||||
.logue code,
|
||||
#modalc code,
|
||||
#tt code {
|
||||
color: #eee;
|
||||
@@ -253,7 +265,11 @@ html.y #tth {
|
||||
box-shadow: 0 .3em 3em rgba(0,0,0,0.5);
|
||||
max-width: 50em;
|
||||
max-height: 30em;
|
||||
overflow: auto;
|
||||
overflow-x: auto;
|
||||
overflow-y: scroll;
|
||||
}
|
||||
#modalc.yk {
|
||||
overflow-y: auto;
|
||||
}
|
||||
#modalc td {
|
||||
text-align: unset;
|
||||
@@ -369,8 +385,10 @@ html.y textarea:focus {
|
||||
}
|
||||
.mdo pre,
|
||||
.mdo code,
|
||||
.mdo code[class*="language-"],
|
||||
.mdo tt {
|
||||
font-family: 'scp', monospace, monospace;
|
||||
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||
white-space: pre-wrap;
|
||||
word-break: break-all;
|
||||
}
|
||||
@@ -440,6 +458,7 @@ html.y textarea:focus {
|
||||
}
|
||||
.mdo blockquote {
|
||||
font-family: serif;
|
||||
font-family: var(--font-serif), serif;
|
||||
background: #f7f7f7;
|
||||
border: .07em dashed #ccc;
|
||||
padding: 0 2em;
|
||||
@@ -573,3 +592,11 @@ hr {
|
||||
border: .07em dashed #444;
|
||||
}
|
||||
}
|
||||
|
||||
@media (prefers-reduced-motion) {
|
||||
#toast,
|
||||
#toast a#toastc,
|
||||
#tt {
|
||||
transition: none;
|
||||
}
|
||||
}
|
||||
@@ -17,7 +17,7 @@ function goto_up2k() {
|
||||
var up2k = null,
|
||||
up2k_hooks = [],
|
||||
hws = [],
|
||||
sha_js = window.WebAssembly ? 'hw' : 'ac', // ff53,c57,sa11
|
||||
sha_js = WebAssembly ? 'hw' : 'ac', // ff53,c57,sa11
|
||||
m = 'will use ' + sha_js + ' instead of native sha512 due to';
|
||||
|
||||
try {
|
||||
@@ -152,12 +152,13 @@ function U2pvis(act, btns, uc, st) {
|
||||
r.mod0 = null;
|
||||
|
||||
var markup = {
|
||||
'404': '<span class="err">404</span>',
|
||||
'ERROR': '<span class="err">ERROR</span>',
|
||||
'OS-error': '<span class="err">OS-error</span>',
|
||||
'found': '<span class="inf">found</span>',
|
||||
'YOLO': '<span class="inf">YOLO</span>',
|
||||
'done': '<span class="ok">done</span>',
|
||||
'404': '<span class="err">' + L.utl_404 + '</span>',
|
||||
'ERROR': '<span class="err">' + L.utl_err + '</span>',
|
||||
'OS-error': '<span class="err">' + L.utl_oserr + '</span>',
|
||||
'found': '<span class="inf">' + L.utl_found + '</span>',
|
||||
'defer': '<span class="inf">' + L.utl_defer + '</span>',
|
||||
'YOLO': '<span class="inf">' + L.utl_yolo + '</span>',
|
||||
'done': '<span class="ok">' + L.utl_done + '</span>',
|
||||
};
|
||||
|
||||
r.addfile = function (entry, sz, draw) {
|
||||
@@ -431,7 +432,7 @@ function U2pvis(act, btns, uc, st) {
|
||||
if (sread('potato') === null) {
|
||||
btn.click();
|
||||
toast.inf(30, L.u_gotpot);
|
||||
localStorage.removeItem('potato');
|
||||
sdrop('potato');
|
||||
}
|
||||
|
||||
u2f.appendChild(ode);
|
||||
@@ -445,9 +446,7 @@ function U2pvis(act, btns, uc, st) {
|
||||
return;
|
||||
|
||||
r.npotato = 0;
|
||||
var html = [
|
||||
"<p>files: <b>{0}</b> finished, <b>{1}</b> failed, <b>{2}</b> busy, <b>{3}</b> queued</p>".format(
|
||||
r.ctr.ok, r.ctr.ng, r.ctr.bz, r.ctr.q)];
|
||||
var html = [L.u_pott.format(r.ctr.ok, r.ctr.ng, r.ctr.bz, r.ctr.q)];
|
||||
|
||||
while (r.head < r.tab.length && has(["ok", "ng"], r.tab[r.head].in))
|
||||
r.head++;
|
||||
@@ -602,7 +601,7 @@ function U2pvis(act, btns, uc, st) {
|
||||
if (nf < 9000)
|
||||
return go();
|
||||
|
||||
modal.confirm('about to show ' + nf + ' files\n\nthis may crash your browser, are you sure?', go, null);
|
||||
modal.confirm(L.u_bigtab.format(nf), go, null);
|
||||
};
|
||||
}
|
||||
|
||||
@@ -658,7 +657,9 @@ function Donut(uc, st) {
|
||||
}
|
||||
|
||||
function pos() {
|
||||
return uc.fsearch ? Math.max(st.bytes.hashed, st.bytes.finished) : st.bytes.finished;
|
||||
return uc.fsearch ?
|
||||
Math.max(st.bytes.hashed, st.bytes.finished) :
|
||||
st.bytes.inflight + st.bytes.finished;
|
||||
}
|
||||
|
||||
r.on = function (ya) {
|
||||
@@ -717,7 +718,7 @@ function Donut(uc, st) {
|
||||
sfx();
|
||||
|
||||
// firefox may forget that filedrops are user-gestures so it can skip this:
|
||||
if (uc.upnag && window.Notification && Notification.permission == 'granted')
|
||||
if (uc.upnag && Notification && Notification.permission == 'granted')
|
||||
new Notification(uc.nagtxt);
|
||||
}
|
||||
|
||||
@@ -779,8 +780,8 @@ function up2k_init(subtle) {
|
||||
};
|
||||
|
||||
setTimeout(function () {
|
||||
if (window.WebAssembly && !hws.length)
|
||||
fetch(SR + '/.cpr/w.hash.js' + CB);
|
||||
if (WebAssembly && !hws.length)
|
||||
fetch(SR + '/.cpr/w.hash.js?_=' + TS);
|
||||
}, 1000);
|
||||
|
||||
function showmodal(msg) {
|
||||
@@ -852,7 +853,8 @@ function up2k_init(subtle) {
|
||||
|
||||
setmsg(suggest_up2k, 'msg');
|
||||
|
||||
var parallel_uploads = icfg_get('nthread'),
|
||||
var parallel_uploads = ebi('nthread').value = icfg_get('nthread', u2j),
|
||||
stitch_tgt = ebi('u2szg').value = icfg_get('u2sz', u2sz.split(',')[1]),
|
||||
uc = {},
|
||||
fdom_ctr = 0,
|
||||
biggest_file = 0;
|
||||
@@ -861,6 +863,7 @@ function up2k_init(subtle) {
|
||||
bcfg_bind(uc, 'multitask', 'multitask', true, null, false);
|
||||
bcfg_bind(uc, 'potato', 'potato', false, set_potato, false);
|
||||
bcfg_bind(uc, 'ask_up', 'ask_up', true, null, false);
|
||||
bcfg_bind(uc, 'umod', 'umod', false, null, false);
|
||||
bcfg_bind(uc, 'u2ts', 'u2ts', !u2ts.endsWith('u'), set_u2ts, false);
|
||||
bcfg_bind(uc, 'fsearch', 'fsearch', false, set_fsearch, false);
|
||||
|
||||
@@ -868,7 +871,7 @@ function up2k_init(subtle) {
|
||||
bcfg_bind(uc, 'turbo', 'u2turbo', turbolvl > 1, draw_turbo);
|
||||
bcfg_bind(uc, 'datechk', 'u2tdate', turbolvl < 3, null);
|
||||
bcfg_bind(uc, 'az', 'u2sort', u2sort.indexOf('n') + 1, set_u2sort);
|
||||
bcfg_bind(uc, 'hashw', 'hashw', !!window.WebAssembly && (!subtle || !CHROME || MOBILE || VCHROME >= 107), set_hashw);
|
||||
bcfg_bind(uc, 'hashw', 'hashw', !!WebAssembly && (!subtle || !CHROME || MOBILE || VCHROME >= 107), set_hashw);
|
||||
bcfg_bind(uc, 'upnag', 'upnag', false, set_upnag);
|
||||
bcfg_bind(uc, 'upsfx', 'upsfx', false, set_upsfx);
|
||||
|
||||
@@ -894,6 +897,7 @@ function up2k_init(subtle) {
|
||||
"bytes": {
|
||||
"total": 0,
|
||||
"hashed": 0,
|
||||
"inflight": 0,
|
||||
"uploaded": 0,
|
||||
"finished": 0
|
||||
},
|
||||
@@ -1032,7 +1036,7 @@ function up2k_init(subtle) {
|
||||
}
|
||||
catch (ex) {
|
||||
document.body.ondragenter = document.body.ondragleave = document.body.ondragover = null;
|
||||
return modal.alert('your browser does not support drag-and-drop uploading');
|
||||
return modal.alert(L.u_nodrop);
|
||||
}
|
||||
if (btn)
|
||||
return;
|
||||
@@ -1099,7 +1103,7 @@ function up2k_init(subtle) {
|
||||
}
|
||||
|
||||
if (!good_files.length && bad_files.length)
|
||||
return toast.err(30, "that's not a folder!\n\nyour browser is too old,\nplease try dragdrop instead");
|
||||
return toast.err(30, L.u_notdir);
|
||||
|
||||
return read_dirs(null, [], [], good_files, nil_files, bad_files);
|
||||
}
|
||||
@@ -1117,7 +1121,7 @@ function up2k_init(subtle) {
|
||||
if (err)
|
||||
return modal.alert('sorry, ' + err);
|
||||
|
||||
toast.inf(0, 'Scanning files...');
|
||||
toast.inf(0, L.u_scan);
|
||||
|
||||
if ((dz == 'up_dz' && uc.fsearch) || (dz == 'srch_dz' && !uc.fsearch))
|
||||
tgl_fsearch();
|
||||
@@ -1205,7 +1209,7 @@ function up2k_init(subtle) {
|
||||
match = false;
|
||||
|
||||
if (match) {
|
||||
var msg = ['directory iterator got stuck on the following {0} items; good chance your browser is about to spinlock:<ul>'.format(missing.length)];
|
||||
var msg = [L.u_dirstuck.format(missing.length) + '<ul>'];
|
||||
for (var a = 0; a < Math.min(20, missing.length); a++)
|
||||
msg.push('<li>' + esc(missing[a]) + '</li>');
|
||||
|
||||
@@ -1276,7 +1280,7 @@ function up2k_init(subtle) {
|
||||
}
|
||||
|
||||
function gotallfiles(good_files, nil_files, bad_files) {
|
||||
if (toast.txt == 'Scanning files...')
|
||||
if (toast.txt == L.u_scan)
|
||||
toast.hide();
|
||||
|
||||
if (uc.fsearch && !uc.turbo)
|
||||
@@ -1332,7 +1336,8 @@ function up2k_init(subtle) {
|
||||
return modal.confirm(msg.join('') + '</ul>', function () {
|
||||
start_actx();
|
||||
up_them(good_files);
|
||||
toast.inf(15, L.u_unpt, L.u_unpt);
|
||||
if (have_up2k_idx)
|
||||
toast.inf(15, L.u_unpt, L.u_unpt);
|
||||
}, null);
|
||||
|
||||
up_them(good_files);
|
||||
@@ -1344,9 +1349,9 @@ function up2k_init(subtle) {
|
||||
var evpath = get_evpath(),
|
||||
draw_each = good_files.length < 50;
|
||||
|
||||
if (window.WebAssembly && !hws.length) {
|
||||
if (WebAssembly && !hws.length) {
|
||||
for (var a = 0; a < Math.min(navigator.hardwareConcurrency || 4, 16); a++)
|
||||
hws.push(new Worker(SR + '/.cpr/w.hash.js' + CB));
|
||||
hws.push(new Worker(SR + '/.cpr/w.hash.js?_=' + TS));
|
||||
|
||||
console.log(hws.length + " hashers");
|
||||
}
|
||||
@@ -1391,6 +1396,8 @@ function up2k_init(subtle) {
|
||||
entry.rand = true;
|
||||
entry.name = 'a\n' + entry.name;
|
||||
}
|
||||
else if (uc.umod)
|
||||
entry.umod = true;
|
||||
|
||||
if (biggest_file < entry.size)
|
||||
biggest_file = entry.size;
|
||||
@@ -1429,7 +1436,7 @@ function up2k_init(subtle) {
|
||||
if (!actx || actx.state != 'suspended' || toast.visible)
|
||||
return;
|
||||
|
||||
toast.warn(30, "<div onclick=\"start_actx();toast.inf(3,'thanks!')\">please click this text to<br />unlock full upload speed</div>");
|
||||
toast.warn(30, "<div onclick=\"start_actx();toast.inf(3,'thanks!')\">" + L.u_actx + "</div>");
|
||||
}, 500);
|
||||
}
|
||||
|
||||
@@ -1471,7 +1478,7 @@ function up2k_init(subtle) {
|
||||
ev(e);
|
||||
var txt = linklist();
|
||||
cliptxt(txt + '\n', function () {
|
||||
toast.inf(5, txt.split('\n').length + ' links copied to clipboard');
|
||||
toast.inf(5, un_clip.format(txt.split('\n').length));
|
||||
});
|
||||
};
|
||||
|
||||
@@ -1539,17 +1546,21 @@ function up2k_init(subtle) {
|
||||
if (uc.fsearch)
|
||||
t.push(['u2etat', st.bytes.hashed, st.bytes.hashed, st.time.hashing]);
|
||||
}
|
||||
|
||||
var b_up = st.bytes.inflight + st.bytes.uploaded,
|
||||
b_fin = st.bytes.inflight + st.bytes.finished;
|
||||
|
||||
if (nsend) {
|
||||
st.time.uploading += td;
|
||||
t.push(['u2etau', st.bytes.uploaded, st.bytes.finished, st.time.uploading]);
|
||||
t.push(['u2etau', b_up, b_fin, st.time.uploading]);
|
||||
}
|
||||
if ((nhash || nsend) && !uc.fsearch) {
|
||||
if (!st.bytes.finished) {
|
||||
if (!b_fin) {
|
||||
ebi('u2etat').innerHTML = L.u_etaprep;
|
||||
}
|
||||
else {
|
||||
st.time.busy += td;
|
||||
t.push(['u2etat', st.bytes.finished, st.bytes.finished, st.time.busy]);
|
||||
t.push(['u2etat', b_fin, b_fin, st.time.busy]);
|
||||
}
|
||||
}
|
||||
for (var a = 0; a < t.length; a++) {
|
||||
@@ -1713,8 +1724,6 @@ function up2k_init(subtle) {
|
||||
ebi('u2etas').style.textAlign = 'left';
|
||||
}
|
||||
etafun();
|
||||
if (pvis.act == 'bz')
|
||||
pvis.changecard('bz');
|
||||
}
|
||||
|
||||
if (flag) {
|
||||
@@ -1729,16 +1738,13 @@ function up2k_init(subtle) {
|
||||
}
|
||||
}
|
||||
|
||||
var mou_ikkai = false;
|
||||
|
||||
if (st.busy.handshake.length &&
|
||||
st.busy.handshake[0].t_busied < now - 30 * 1000
|
||||
) {
|
||||
console.log("retrying stuck handshake");
|
||||
var t = st.busy.handshake.shift();
|
||||
st.todo.handshake.unshift(t);
|
||||
if (st.bytes.inflight && (st.bytes.inflight < 0 || !st.busy.upload.length)) {
|
||||
console.log('insane inflight ' + st.bytes.inflight);
|
||||
st.bytes.inflight = 0;
|
||||
}
|
||||
|
||||
var mou_ikkai = false;
|
||||
|
||||
var nprev = -1;
|
||||
for (var a = 0; a < st.todo.upload.length; a++) {
|
||||
var nf = st.todo.upload[a].nfile;
|
||||
@@ -1850,6 +1856,9 @@ function up2k_init(subtle) {
|
||||
timer.rm(donut.do);
|
||||
ebi('u2tabw').style.minHeight = '0px';
|
||||
utw_minh = 0;
|
||||
|
||||
if (pvis.act == 'bz')
|
||||
pvis.changecard('bz');
|
||||
}
|
||||
|
||||
function chill(t) {
|
||||
@@ -2168,7 +2177,7 @@ function up2k_init(subtle) {
|
||||
st.busy.head.push(t);
|
||||
|
||||
var xhr = new XMLHttpRequest();
|
||||
xhr.onerror = function () {
|
||||
xhr.onerror = xhr.ontimeout = function () {
|
||||
console.log('head onerror, retrying', t.name, t);
|
||||
if (!toast.visible)
|
||||
toast.warn(9.98, L.u_enethd + "\n\nfile: " + t.name, t);
|
||||
@@ -2212,6 +2221,7 @@ function up2k_init(subtle) {
|
||||
try { orz(e); } catch (ex) { vis_exh(ex + '', 'up2k.js', '', '', ex); }
|
||||
};
|
||||
|
||||
xhr.timeout = 34000;
|
||||
xhr.open('HEAD', t.purl + uricom_enc(t.name), true);
|
||||
xhr.send();
|
||||
}
|
||||
@@ -2236,8 +2246,11 @@ function up2k_init(subtle) {
|
||||
if (keepalive)
|
||||
console.log("sending keepalive handshake", t.name, t);
|
||||
|
||||
if (!t.srch && !t.t_handshake)
|
||||
pvis.seth(t.n, 2, L.u_hs);
|
||||
|
||||
var xhr = new XMLHttpRequest();
|
||||
xhr.onerror = function () {
|
||||
xhr.onerror = xhr.ontimeout = function () {
|
||||
if (t.t_busied != me) // t.done ok
|
||||
return console.log('zombie handshake onerror', t.name, t);
|
||||
|
||||
@@ -2247,6 +2260,7 @@ function up2k_init(subtle) {
|
||||
console.log('handshake onerror, retrying', t.name, t);
|
||||
apop(st.busy.handshake, t);
|
||||
st.todo.handshake.unshift(t);
|
||||
t.cooldown = Date.now() + 5000 + Math.floor(Math.random() * 3000);
|
||||
t.keepalive = keepalive;
|
||||
};
|
||||
var orz = function (e) {
|
||||
@@ -2254,16 +2268,27 @@ function up2k_init(subtle) {
|
||||
return console.log('zombie handshake onload', t.name, t);
|
||||
|
||||
if (xhr.status == 200) {
|
||||
try {
|
||||
var response = JSON.parse(xhr.responseText);
|
||||
}
|
||||
catch (ex) {
|
||||
apop(st.busy.handshake, t);
|
||||
st.todo.handshake.unshift(t);
|
||||
t.cooldown = Date.now() + 5000 + Math.floor(Math.random() * 3000);
|
||||
var txt = t.t_uploading ? L.u_ehsfin : t.srch ? L.u_ehssrch : L.u_ehsinit;
|
||||
return toast.err(0, txt + '\n\n' + L.badreply + ':\n\n' + unpre(xhr.responseText));
|
||||
}
|
||||
|
||||
t.t_handshake = Date.now();
|
||||
if (keepalive) {
|
||||
apop(st.busy.handshake, t);
|
||||
tasker();
|
||||
return;
|
||||
}
|
||||
|
||||
if (toast.tag === t)
|
||||
toast.ok(5, L.u_fixed);
|
||||
|
||||
var response = JSON.parse(xhr.responseText);
|
||||
if (!response.name) {
|
||||
var msg = '',
|
||||
smsg = '';
|
||||
@@ -2353,11 +2378,39 @@ function up2k_init(subtle) {
|
||||
var arr = st.todo.upload,
|
||||
sort = arr.length && arr[arr.length - 1].nfile > t.n;
|
||||
|
||||
for (var a = 0; a < t.postlist.length; a++)
|
||||
if (!t.stitch_sz) {
|
||||
// keep all connections busy
|
||||
var bpc = (st.bytes.total - st.bytes.finished) / (parallel_uploads || 1),
|
||||
ocs = 1024 * 1024,
|
||||
stp = 1024 * 512,
|
||||
ccs = ocs;
|
||||
while (ccs < bpc) {
|
||||
ocs = ccs;
|
||||
ccs += stp; if (ccs < bpc) ocs = ccs;
|
||||
ccs += stp; stp *= 2;
|
||||
}
|
||||
ocs = Math.floor(ocs / 1024 / 1024);
|
||||
t.stitch_sz = Math.min(ocs, stitch_tgt);
|
||||
}
|
||||
|
||||
for (var a = 0; a < t.postlist.length; a++) {
|
||||
var nparts = [], tbytes = 0, stitch = t.stitch_sz;
|
||||
if (t.nojoin && t.nojoin - t.postlist.length < 6)
|
||||
stitch = 1;
|
||||
|
||||
--a;
|
||||
for (var b = 0; b < stitch; b++) {
|
||||
nparts.push(t.postlist[++a]);
|
||||
tbytes += chunksize;
|
||||
if (tbytes + chunksize > stitch * 1024 * 1024 || t.postlist[a + 1] - t.postlist[a] !== 1)
|
||||
break;
|
||||
}
|
||||
arr.push({
|
||||
'nfile': t.n,
|
||||
'npart': t.postlist[a]
|
||||
'nparts': nparts
|
||||
});
|
||||
}
|
||||
t.nojoin = 0;
|
||||
|
||||
msg = null;
|
||||
done = false;
|
||||
@@ -2366,7 +2419,7 @@ function up2k_init(subtle) {
|
||||
arr.sort(function (a, b) {
|
||||
return a.nfile < b.nfile ? -1 :
|
||||
/* */ a.nfile > b.nfile ? 1 :
|
||||
a.npart < b.npart ? -1 : 1;
|
||||
/* */ a.nparts[0] < b.nparts[0] ? -1 : 1;
|
||||
});
|
||||
}
|
||||
|
||||
@@ -2402,6 +2455,7 @@ function up2k_init(subtle) {
|
||||
pvis.seth(t.n, 2, L.u_ehstmp, t);
|
||||
|
||||
var err = "",
|
||||
cls = "ERROR",
|
||||
rsp = unpre(xhr.responseText),
|
||||
ofs = rsp.lastIndexOf('\nURL: ');
|
||||
|
||||
@@ -2431,6 +2485,8 @@ function up2k_init(subtle) {
|
||||
if (!t.rechecks && (err_pend || err_srcb)) {
|
||||
t.rechecks = 0;
|
||||
t.want_recheck = true;
|
||||
err = L.u_dupdefer;
|
||||
cls = 'defer';
|
||||
}
|
||||
}
|
||||
if (rsp.indexOf('server HDD is full') + 1)
|
||||
@@ -2440,7 +2496,7 @@ function up2k_init(subtle) {
|
||||
if (!t.t_uploading)
|
||||
st.bytes.finished += t.size;
|
||||
|
||||
pvis.seth(t.n, 1, "ERROR");
|
||||
pvis.seth(t.n, 1, cls);
|
||||
pvis.seth(t.n, 2, err);
|
||||
pvis.move(t.n, 'ng');
|
||||
|
||||
@@ -2467,9 +2523,13 @@ function up2k_init(subtle) {
|
||||
req.srch = 1;
|
||||
else if (t.rand)
|
||||
req.rand = true;
|
||||
else if (t.umod)
|
||||
req.umod = true;
|
||||
|
||||
xhr.open('POST', t.purl, true);
|
||||
xhr.responseType = 'text';
|
||||
xhr.timeout = 42000 + (t.srch || t.t_uploaded ? 0 :
|
||||
(t.size / (1048 * 20))); // safededup 20M/s hdd
|
||||
xhr.send(JSON.stringify(req));
|
||||
}
|
||||
|
||||
@@ -2511,7 +2571,10 @@ function up2k_init(subtle) {
|
||||
function exec_upload() {
|
||||
var upt = st.todo.upload.shift(),
|
||||
t = st.files[upt.nfile],
|
||||
npart = upt.npart,
|
||||
nparts = upt.nparts,
|
||||
pcar = nparts[0],
|
||||
pcdr = nparts[nparts.length - 1],
|
||||
snpart = pcar == pcdr ? pcar : ('' + pcar + '~' + pcdr),
|
||||
tries = 0;
|
||||
|
||||
if (t.done)
|
||||
@@ -2526,24 +2589,30 @@ function up2k_init(subtle) {
|
||||
pvis.seth(t.n, 1, "🚀 send");
|
||||
|
||||
var chunksize = get_chunksize(t.size),
|
||||
car = npart * chunksize,
|
||||
cdr = car + chunksize;
|
||||
car = pcar * chunksize,
|
||||
cdr = (pcdr + 1) * chunksize;
|
||||
|
||||
if (cdr >= t.size)
|
||||
cdr = t.size;
|
||||
|
||||
var orz = function (xhr) {
|
||||
st.bytes.inflight -= xhr.bsent;
|
||||
var txt = unpre((xhr.response && xhr.response.err) || xhr.responseText);
|
||||
if (txt.indexOf('upload blocked by x') + 1) {
|
||||
apop(st.busy.upload, upt);
|
||||
apop(t.postlist, npart);
|
||||
for (var a = pcar; a <= pcdr; a++)
|
||||
apop(t.postlist, a);
|
||||
pvis.seth(t.n, 1, "ERROR");
|
||||
pvis.seth(t.n, 2, txt.split(/\n/)[0]);
|
||||
pvis.move(t.n, 'ng');
|
||||
return;
|
||||
}
|
||||
if (xhr.status == 200) {
|
||||
pvis.prog(t, npart, cdr - car);
|
||||
var bdone = cdr - car;
|
||||
for (var a = pcar; a <= pcdr; a++) {
|
||||
pvis.prog(t, a, Math.min(bdone, chunksize));
|
||||
bdone -= chunksize;
|
||||
}
|
||||
st.bytes.finished += cdr - car;
|
||||
st.bytes.uploaded += cdr - car;
|
||||
t.bytes_uploaded += cdr - car;
|
||||
@@ -2552,18 +2621,21 @@ function up2k_init(subtle) {
|
||||
}
|
||||
else if (txt.indexOf('already got that') + 1 ||
|
||||
txt.indexOf('already being written') + 1) {
|
||||
console.log("ignoring dupe-segment error", t.name, t);
|
||||
t.nojoin = t.nojoin || t.postlist.length;
|
||||
console.log("ignoring dupe-segment with backoff", t.nojoin, t.name, t);
|
||||
if (!toast.visible && st.todo.upload.length < 4)
|
||||
toast.inf(10, L.u_cbusy);
|
||||
}
|
||||
else {
|
||||
xhrchk(xhr, L.u_cuerr2.format(npart, Math.ceil(t.size / chunksize), t.name), "404, target folder not found (???)", "warn", t);
|
||||
|
||||
xhrchk(xhr, L.u_cuerr2.format(snpart, Math.ceil(t.size / chunksize), t.name), "404, target folder not found (???)", "warn", t);
|
||||
chill(t);
|
||||
}
|
||||
orz2(xhr);
|
||||
}
|
||||
var orz2 = function (xhr) {
|
||||
apop(st.busy.upload, upt);
|
||||
apop(t.postlist, npart);
|
||||
for (var a = pcar; a <= pcdr; a++)
|
||||
apop(t.postlist, a);
|
||||
if (!t.postlist.length) {
|
||||
t.t_uploaded = Date.now();
|
||||
pvis.seth(t.n, 1, 'verifying');
|
||||
@@ -2577,23 +2649,48 @@ function up2k_init(subtle) {
|
||||
btot = Math.floor(st.bytes.total / 1024 / 1024);
|
||||
|
||||
xhr.upload.onprogress = function (xev) {
|
||||
pvis.prog(t, npart, xev.loaded);
|
||||
var nb = xev.loaded,
|
||||
db = nb - xhr.bsent;
|
||||
|
||||
if (!db)
|
||||
return;
|
||||
|
||||
st.bytes.inflight += db;
|
||||
xhr.bsent = nb;
|
||||
xhr.timeout = 64000 + Date.now() - xhr.t0;
|
||||
pvis.prog(t, pcar, nb);
|
||||
};
|
||||
xhr.onload = function (xev) {
|
||||
try { orz(xhr); } catch (ex) { vis_exh(ex + '', 'up2k.js', '', '', ex); }
|
||||
};
|
||||
xhr.onerror = function (xev) {
|
||||
xhr.onerror = xhr.ontimeout = function (xev) {
|
||||
if (crashed)
|
||||
return;
|
||||
|
||||
if (!toast.visible)
|
||||
toast.warn(9.98, L.u_cuerr.format(npart, Math.ceil(t.size / chunksize), t.name), t);
|
||||
st.bytes.inflight -= (xhr.bsent || 0);
|
||||
xhr.bsent = 0;
|
||||
|
||||
if (!toast.visible)
|
||||
toast.warn(9.98, L.u_cuerr.format(snpart, Math.ceil(t.size / chunksize), t.name), t);
|
||||
|
||||
t.nojoin = t.nojoin || t.postlist.length; // maybe rproxy postsize limit
|
||||
console.log('chunkpit onerror,', ++tries, t.name, t);
|
||||
orz2(xhr);
|
||||
};
|
||||
|
||||
var chashes = [],
|
||||
ctxt = t.hash[pcar],
|
||||
plen = Math.floor(192 / nparts.length);
|
||||
|
||||
plen = plen > 9 ? 9 : plen < 2 ? 2 : plen;
|
||||
for (var a = pcar + 1; a <= pcdr; a++)
|
||||
chashes.push(t.hash[a].slice(0, plen));
|
||||
|
||||
if (chashes.length)
|
||||
ctxt += ',' + plen + ',' + chashes.join('');
|
||||
|
||||
xhr.open('POST', t.purl, true);
|
||||
xhr.setRequestHeader("X-Up2k-Hash", t.hash[npart]);
|
||||
xhr.setRequestHeader("X-Up2k-Hash", ctxt);
|
||||
xhr.setRequestHeader("X-Up2k-Wark", t.wark);
|
||||
xhr.setRequestHeader("X-Up2k-Stat", "{0}/{1}/{2}/{3} {4}/{5} {6}".format(
|
||||
pvis.ctr.ok, pvis.ctr.ng, pvis.ctr.bz, pvis.ctr.q, btot, btot - bfin,
|
||||
@@ -2602,6 +2699,9 @@ function up2k_init(subtle) {
|
||||
if (xhr.overrideMimeType)
|
||||
xhr.overrideMimeType('Content-Type', 'application/octet-stream');
|
||||
|
||||
xhr.bsent = 0;
|
||||
xhr.t0 = Date.now();
|
||||
xhr.timeout = 42000;
|
||||
xhr.responseType = 'text';
|
||||
xhr.send(t.fobj.slice(car, cdr));
|
||||
}
|
||||
@@ -2685,7 +2785,11 @@ function up2k_init(subtle) {
|
||||
}
|
||||
|
||||
parallel_uploads = v;
|
||||
swrite('nthread', v);
|
||||
if (v == u2j)
|
||||
sdrop('nthread');
|
||||
else
|
||||
swrite('nthread', v);
|
||||
|
||||
clmod(obj, 'err');
|
||||
return;
|
||||
}
|
||||
@@ -2698,8 +2802,32 @@ function up2k_init(subtle) {
|
||||
if (parallel_uploads > 16)
|
||||
parallel_uploads = 16;
|
||||
|
||||
if (parallel_uploads > 6)
|
||||
toast.warn(10, L.u_maxconn);
|
||||
else if (toast.txt == L.u_maxconn)
|
||||
toast.hide();
|
||||
|
||||
obj.value = parallel_uploads;
|
||||
bumpthread({ "target": 1 })
|
||||
bumpthread({ "target": 1 });
|
||||
}
|
||||
|
||||
var read_u2sz = function () {
|
||||
var el = ebi('u2szg'), n = parseInt(el.value), dv = u2sz.split(',');
|
||||
stitch_tgt = n = (
|
||||
isNaN(n) ? dv[1] :
|
||||
n < dv[0] ? dv[0] :
|
||||
n > dv[2] ? dv[2] : n
|
||||
);
|
||||
if (n == dv[1]) sdrop('u2sz'); else swrite('u2sz', n);
|
||||
if (el.value != n) el.value = n;
|
||||
};
|
||||
ebi('u2szg').addEventListener('blur', read_u2sz);
|
||||
ebi('u2szg').onkeydown = function (e) {
|
||||
if (anymod(e)) return;
|
||||
var n = e.code == 'ArrowUp' ? 1 : e.code == 'ArrowDown' ? -1 : 0;
|
||||
if (!n) return;
|
||||
this.value = parseInt(this.value) + n;
|
||||
read_u2sz();
|
||||
}
|
||||
|
||||
function tgl_fsearch() {
|
||||
@@ -2831,6 +2959,8 @@ function up2k_init(subtle) {
|
||||
new_state = false;
|
||||
fixed = true;
|
||||
}
|
||||
if (new_state === undefined)
|
||||
new_state = can_write ? false : have_up2k_idx ? true : undefined;
|
||||
}
|
||||
|
||||
if (new_state === undefined)
|
||||
@@ -2911,7 +3041,7 @@ function up2k_init(subtle) {
|
||||
}
|
||||
|
||||
function set_hashw() {
|
||||
if (!window.WebAssembly) {
|
||||
if (!WebAssembly) {
|
||||
bcfg_set('hashw', uc.hashw = false);
|
||||
toast.err(10, L.u_nowork);
|
||||
}
|
||||
@@ -2928,7 +3058,7 @@ function up2k_init(subtle) {
|
||||
nopenag();
|
||||
}
|
||||
|
||||
if (!window.Notification || !HTTPS)
|
||||
if (!Notification || !HTTPS)
|
||||
return nopenag();
|
||||
|
||||
if (en && Notification.permission == 'default')
|
||||
@@ -2950,7 +3080,7 @@ function up2k_init(subtle) {
|
||||
};
|
||||
}
|
||||
|
||||
if (uc.upnag && (!window.Notification || Notification.permission != 'granted'))
|
||||
if (uc.upnag && (!Notification || Notification.permission != 'granted'))
|
||||
bcfg_set('upnag', uc.upnag = false);
|
||||
|
||||
ebi('nthread_add').onclick = function (e) {
|
||||
|
||||
@@ -12,9 +12,10 @@ if (window.CGV)
|
||||
|
||||
|
||||
var wah = '',
|
||||
STG = null,
|
||||
NOAC = 'autocorrect="off" autocapitalize="off"',
|
||||
L, tt, treectl, thegrid, up2k, asmCrypto, hashwasm, vbar, marked,
|
||||
CB = '?_=' + Date.now(),
|
||||
T0 = Date.now(),
|
||||
R = SR.slice(1),
|
||||
RS = R ? "/" + R : "",
|
||||
HALFMAX = 8192 * 8192 * 8192 * 8192,
|
||||
@@ -40,8 +41,16 @@ if (!window.FormData)
|
||||
window.FormData = false;
|
||||
|
||||
try {
|
||||
CB = '?' + document.currentScript.src.split('?').pop();
|
||||
STG = window.localStorage;
|
||||
STG.STG;
|
||||
}
|
||||
catch (ex) {
|
||||
STG = null;
|
||||
if ((ex + '').indexOf('sandbox') < 0)
|
||||
console.log('no localStorage: ' + ex);
|
||||
}
|
||||
|
||||
try {
|
||||
if (navigator.userAgentData.mobile)
|
||||
MOBILE = true;
|
||||
|
||||
@@ -118,13 +127,13 @@ if ((document.location + '').indexOf(',rej,') + 1)
|
||||
|
||||
try {
|
||||
console.hist = [];
|
||||
var CMAXHIST = 100;
|
||||
var CMAXHIST = MOBILE ? 9000 : 44000;
|
||||
var hook = function (t) {
|
||||
var orig = console[t].bind(console),
|
||||
cfun = function () {
|
||||
console.hist.push(Date.now() + ' ' + t + ': ' + Array.from(arguments).join(', '));
|
||||
if (console.hist.length > CMAXHIST)
|
||||
console.hist = console.hist.slice(CMAXHIST / 2);
|
||||
console.hist = console.hist.slice(CMAXHIST / 4);
|
||||
|
||||
orig.apply(console, arguments);
|
||||
};
|
||||
@@ -145,6 +154,10 @@ catch (ex) {
|
||||
}
|
||||
var crashed = false, ignexd = {}, evalex_fatal = false;
|
||||
function vis_exh(msg, url, lineNo, columnNo, error) {
|
||||
var ekey = url + '\n' + lineNo + '\n' + msg;
|
||||
if (ignexd[ekey] || crashed)
|
||||
return;
|
||||
|
||||
msg = String(msg);
|
||||
url = String(url);
|
||||
|
||||
@@ -160,11 +173,13 @@ function vis_exh(msg, url, lineNo, columnNo, error) {
|
||||
if (url.indexOf(' > eval') + 1 && !evalex_fatal)
|
||||
return; // md timer
|
||||
|
||||
var ekey = url + '\n' + lineNo + '\n' + msg;
|
||||
if (ignexd[ekey] || crashed)
|
||||
if (IE && url.indexOf('prism.js') + 1)
|
||||
return;
|
||||
|
||||
if (url.indexOf('deps/marked.js') + 1 && !window.WebAssembly)
|
||||
if (url.indexOf('easymde.js') + 1)
|
||||
return; // clicking the preview pane
|
||||
|
||||
if (url.indexOf('deps/marked.js') + 1 && !WebAssembly)
|
||||
return; // ff<52
|
||||
|
||||
crashed = true;
|
||||
@@ -202,19 +217,24 @@ function vis_exh(msg, url, lineNo, columnNo, error) {
|
||||
}
|
||||
ignexd[ekey] = true;
|
||||
|
||||
var ls = jcp(localStorage);
|
||||
if (ls.fman_clip)
|
||||
ls.fman_clip = ls.fman_clip.length + ' items';
|
||||
var ls = {},
|
||||
lsk = Object.keys(localStorage),
|
||||
nka = lsk.length,
|
||||
nk = Math.min(200, nka);
|
||||
|
||||
var lsk = Object.keys(ls);
|
||||
lsk.sort();
|
||||
html.push('<p class="b">');
|
||||
for (var a = 0; a < lsk.length; a++) {
|
||||
if (ls[lsk[a]].length > 9000)
|
||||
continue;
|
||||
for (var a = 0; a < nk; a++) {
|
||||
var k = lsk[a],
|
||||
v = localStorage.getItem(k);
|
||||
|
||||
html.push(' <b>' + esc(lsk[a]) + '</b> <code>' + esc(ls[lsk[a]]) + '</code> ');
|
||||
ls[k] = v.length > 256 ? v.slice(0, 32) + '[...' + v.length + 'b]' : v;
|
||||
}
|
||||
|
||||
lsk = Object.keys(ls);
|
||||
lsk.sort();
|
||||
html.push('<p class="b"><b>' + nka + ': </b>');
|
||||
for (var a = 0; a < nk; a++)
|
||||
html.push(' <b>' + esc(lsk[a]) + '</b> <code>' + esc(ls[lsk[a]]) + '</code> ');
|
||||
|
||||
html.push('</p>');
|
||||
}
|
||||
catch (e) { }
|
||||
@@ -276,10 +296,11 @@ function anymod(e, shift_ok) {
|
||||
}
|
||||
|
||||
|
||||
var dev_fbw = sread('dev_fbw');
|
||||
function ev(e) {
|
||||
if (!e && window.event) {
|
||||
e = window.event;
|
||||
if (localStorage.dev_fbw == 1) {
|
||||
if (dev_fbw == 1) {
|
||||
toast.warn(10, 'hello from fallback code ;_;\ncheck console trace');
|
||||
console.error('using window.event');
|
||||
}
|
||||
@@ -370,6 +391,22 @@ catch (ex) {
|
||||
}
|
||||
}
|
||||
|
||||
if (!window.Set)
|
||||
window.Set = function () {
|
||||
var r = this;
|
||||
r.size = 0;
|
||||
r.d = {};
|
||||
r.add = function (k) {
|
||||
if (!r.d[k]) {
|
||||
r.d[k] = 1;
|
||||
r.size++;
|
||||
}
|
||||
};
|
||||
r.has = function (k) {
|
||||
return r.d[k];
|
||||
};
|
||||
};
|
||||
|
||||
// https://stackoverflow.com/a/950146
|
||||
function import_js(url, cb, ecb) {
|
||||
var head = document.head || document.getElementsByTagName('head')[0];
|
||||
@@ -395,6 +432,25 @@ function unsmart(txt) {
|
||||
}
|
||||
|
||||
|
||||
function namesan(txt, win, fslash) {
|
||||
if (win)
|
||||
txt = (txt.
|
||||
replace(/</g, "<").
|
||||
replace(/>/g, ">").
|
||||
replace(/:/g, ":").
|
||||
replace(/"/g, """).
|
||||
replace(/\\/g, "\").
|
||||
replace(/\|/g, "|").
|
||||
replace(/\?/g, "?").
|
||||
replace(/\*/g, "*"));
|
||||
|
||||
if (fslash)
|
||||
txt = txt.replace(/\//g, "/");
|
||||
|
||||
return txt;
|
||||
}
|
||||
|
||||
|
||||
var crctab = (function () {
|
||||
var c, tab = [];
|
||||
for (var n = 0; n < 256; n++) {
|
||||
@@ -417,6 +473,24 @@ function crc32(str) {
|
||||
}
|
||||
|
||||
|
||||
function randstr(len) {
|
||||
var ret = '';
|
||||
try {
|
||||
var ar = new Uint32Array(Math.floor((len + 3) / 4));
|
||||
crypto.getRandomValues(ar);
|
||||
for (var a = 0; a < ar.length; a++)
|
||||
ret += ('000' + ar[a].toString(36)).slice(-4);
|
||||
return ret.slice(0, len);
|
||||
}
|
||||
catch (ex) {
|
||||
console.log('using unsafe randstr because ' + ex);
|
||||
while (ret.length < len)
|
||||
ret += ('000' + Math.floor(Math.random() * 1679616).toString(36)).slice(-4);
|
||||
return ret.slice(0, len);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function clmod(el, cls, add) {
|
||||
if (!el)
|
||||
return false;
|
||||
@@ -681,6 +755,15 @@ function vjoin(p1, p2) {
|
||||
}
|
||||
|
||||
|
||||
function addq(url, q) {
|
||||
var uh = url.split('#', 1),
|
||||
u = uh[0],
|
||||
h = uh.length == 1 ? '' : '#' + uh[1];
|
||||
|
||||
return u + (u.indexOf('?') < 0 ? '?' : '&') + (q === undefined ? '' : q) + h;
|
||||
}
|
||||
|
||||
|
||||
function uricom_enc(txt, do_fb_enc) {
|
||||
try {
|
||||
return encodeURIComponent(txt);
|
||||
@@ -881,9 +964,16 @@ function jcp(obj) {
|
||||
}
|
||||
|
||||
|
||||
function sdrop(key) {
|
||||
try {
|
||||
STG.removeItem(key);
|
||||
}
|
||||
catch (ex) { }
|
||||
}
|
||||
|
||||
function sread(key, al) {
|
||||
try {
|
||||
var ret = localStorage.getItem(key);
|
||||
var ret = STG.getItem(key);
|
||||
return (!al || has(al, ret)) ? ret : null;
|
||||
}
|
||||
catch (e) {
|
||||
@@ -894,9 +984,9 @@ function sread(key, al) {
|
||||
function swrite(key, val) {
|
||||
try {
|
||||
if (val === undefined || val === null)
|
||||
localStorage.removeItem(key);
|
||||
STG.removeItem(key);
|
||||
else
|
||||
localStorage.setItem(key, val);
|
||||
STG.setItem(key, val);
|
||||
}
|
||||
catch (e) { }
|
||||
}
|
||||
@@ -931,7 +1021,7 @@ function fcfg_get(name, defval) {
|
||||
val = parseFloat(sread(name));
|
||||
|
||||
if (!isNum(val))
|
||||
return parseFloat(o ? o.value : defval);
|
||||
return parseFloat(o && o.value !== '' ? o.value : defval);
|
||||
|
||||
if (o)
|
||||
o.value = val;
|
||||
@@ -1057,7 +1147,7 @@ function dl_file(url) {
|
||||
|
||||
function cliptxt(txt, ok) {
|
||||
var fb = function () {
|
||||
console.log('fb');
|
||||
console.log('clip-fb');
|
||||
var o = mknod('input');
|
||||
o.value = txt;
|
||||
document.body.appendChild(o);
|
||||
@@ -1324,10 +1414,10 @@ var tt = (function () {
|
||||
o = ctr.querySelectorAll('*[tt]');
|
||||
|
||||
for (var a = o.length - 1; a >= 0; a--) {
|
||||
o[a].onfocus = _cshow;
|
||||
o[a].onblur = _hide;
|
||||
o[a].onmouseenter = _dshow;
|
||||
o[a].onmouseleave = _hide;
|
||||
o[a].addEventListener('focus', _cshow);
|
||||
o[a].addEventListener('blur', _hide);
|
||||
o[a].addEventListener('mouseenter', _dshow);
|
||||
o[a].addEventListener('mouseleave', _hide);
|
||||
}
|
||||
r.hide();
|
||||
}
|
||||
@@ -1351,9 +1441,12 @@ function lf2br(txt) {
|
||||
}
|
||||
|
||||
|
||||
function unpre(txt) {
|
||||
function hunpre(txt) {
|
||||
return ('' + txt).replace(/^<pre>/, '');
|
||||
}
|
||||
function unpre(txt) {
|
||||
return esc(hunpre(txt));
|
||||
}
|
||||
|
||||
|
||||
var toast = (function () {
|
||||
@@ -1392,15 +1485,23 @@ var toast = (function () {
|
||||
}
|
||||
|
||||
r.hide = function (e) {
|
||||
ev(e);
|
||||
if (this === ebi('toastc'))
|
||||
ev(e);
|
||||
|
||||
unscroll();
|
||||
clearTimeout(te);
|
||||
clmod(obj, 'vis');
|
||||
r.visible = false;
|
||||
r.tag = obj;
|
||||
if (!WebAssembly)
|
||||
te = setTimeout(function () {
|
||||
obj.className = 'hide';
|
||||
}, 500);
|
||||
};
|
||||
|
||||
r.show = function (cl, sec, txt, tag) {
|
||||
txt = (txt + '').slice(0, 16384);
|
||||
|
||||
var same = r.visible && txt == r.p_txt && r.p_sec == sec,
|
||||
delta = Date.now() - r.p_t;
|
||||
|
||||
@@ -1453,9 +1554,12 @@ var modal = (function () {
|
||||
var r = {},
|
||||
q = [],
|
||||
o = null,
|
||||
scrolling = null,
|
||||
cb_up = null,
|
||||
cb_ok = null,
|
||||
cb_ng = null,
|
||||
sel_0 = 0,
|
||||
sel_1 = 0,
|
||||
tok, tng, prim, sec, ok_cancel;
|
||||
|
||||
r.load = function () {
|
||||
@@ -1471,6 +1575,7 @@ var modal = (function () {
|
||||
r.nofocus = 0;
|
||||
|
||||
r.show = function (html) {
|
||||
tt.hide();
|
||||
o = mknod('div', 'modal');
|
||||
o.innerHTML = '<table><tr><td><div id="modalc">' + html + '</div></td></tr></table>';
|
||||
document.body.appendChild(o);
|
||||
@@ -1489,11 +1594,12 @@ var modal = (function () {
|
||||
(inp || a).focus();
|
||||
if (inp)
|
||||
setTimeout(function () {
|
||||
inp.setSelectionRange(0, inp.value.length, "forward");
|
||||
inp.setSelectionRange(sel_0, sel_1, "forward");
|
||||
}, 0);
|
||||
|
||||
document.addEventListener('focus', onfocus);
|
||||
document.addEventListener('selectionchange', onselch);
|
||||
timer.add(scrollchk, 1);
|
||||
timer.add(onfocus);
|
||||
if (cb_up)
|
||||
setTimeout(cb_up, 1);
|
||||
@@ -1501,6 +1607,8 @@ var modal = (function () {
|
||||
|
||||
r.hide = function () {
|
||||
timer.rm(onfocus);
|
||||
timer.rm(scrollchk);
|
||||
scrolling = null;
|
||||
try {
|
||||
ebi('modal-ok').removeEventListener('blur', onblur);
|
||||
}
|
||||
@@ -1519,13 +1627,28 @@ var modal = (function () {
|
||||
r.hide();
|
||||
if (cb_ok)
|
||||
cb_ok(v);
|
||||
}
|
||||
};
|
||||
var ng = function (e) {
|
||||
ev(e);
|
||||
r.hide();
|
||||
if (cb_ng)
|
||||
cb_ng(null);
|
||||
}
|
||||
};
|
||||
|
||||
var scrollchk = function () {
|
||||
if (scrolling === true)
|
||||
return;
|
||||
|
||||
var o = ebi('modalc'),
|
||||
vis = o.offsetHeight,
|
||||
all = o.scrollHeight,
|
||||
nsc = 8 + vis < all;
|
||||
|
||||
if (scrolling !== nsc)
|
||||
clmod(o, 'yk', !nsc);
|
||||
|
||||
scrolling = nsc;
|
||||
};
|
||||
|
||||
var onselch = function () {
|
||||
try {
|
||||
@@ -1558,7 +1681,7 @@ var modal = (function () {
|
||||
};
|
||||
|
||||
var onkey = function (e) {
|
||||
var k = e.code,
|
||||
var k = (e.code || e.key) + '',
|
||||
eok = ebi('modal-ok'),
|
||||
eng = ebi('modal-ng'),
|
||||
ae = document.activeElement;
|
||||
@@ -1573,10 +1696,10 @@ var modal = (function () {
|
||||
return ok(e);
|
||||
}
|
||||
|
||||
if ((k == 'ArrowLeft' || k == 'ArrowRight') && eng && (ae == eok || ae == eng))
|
||||
if ((k == 'ArrowLeft' || k == 'ArrowRight' || k == 'Left' || k == 'Right') && eng && (ae == eok || ae == eng))
|
||||
return (ae == eok ? eng : eok).focus() || ev(e);
|
||||
|
||||
if (k == 'Escape')
|
||||
if (k == 'Escape' || k == 'Esc')
|
||||
return ng(e);
|
||||
}
|
||||
|
||||
@@ -1612,16 +1735,18 @@ var modal = (function () {
|
||||
r.show(html);
|
||||
}
|
||||
|
||||
r.prompt = function (html, v, cok, cng, fun) {
|
||||
r.prompt = function (html, v, cok, cng, fun, so0, so1) {
|
||||
q.push(function () {
|
||||
_prompt(lf2br(html), v, cok, cng, fun);
|
||||
_prompt(lf2br(html), v, cok, cng, fun, so0, so1);
|
||||
});
|
||||
next();
|
||||
}
|
||||
var _prompt = function (html, v, cok, cng, fun) {
|
||||
var _prompt = function (html, v, cok, cng, fun, so0, so1) {
|
||||
cb_ok = cok;
|
||||
cb_ng = cng === undefined ? cok : null;
|
||||
cb_up = fun;
|
||||
sel_0 = so0 || 0;
|
||||
sel_1 = so1 === undefined ? v.length : so1;
|
||||
html += '<input id="modali" type="text" ' + NOAC + ' /><div id="modalb">' + ok_cancel + '</div>';
|
||||
r.show(html);
|
||||
|
||||
@@ -1808,7 +1933,7 @@ function md_thumbs(md) {
|
||||
float = has(flags, 'l') ? 'left' : has(flags, 'r') ? 'right' : '';
|
||||
|
||||
if (!/[?&]cache/.exec(url))
|
||||
url += (url.indexOf('?') < 0 ? '?' : '&') + 'cache=i';
|
||||
url = addq(url, 'cache=i');
|
||||
|
||||
md[a] = '<a href="' + url + '" class="mdth mdth' + float.slice(0, 1) + '"><img src="' + url + '&th=w" alt="' + alt + '" /></a>' + md[a].slice(o2 + 1);
|
||||
}
|
||||
@@ -1854,21 +1979,17 @@ var favico = (function () {
|
||||
var b64;
|
||||
try {
|
||||
b64 = btoa(svg ? svg_decl + svg : gx(r.txt));
|
||||
//console.log('f1');
|
||||
}
|
||||
catch (e1) {
|
||||
try {
|
||||
b64 = btoa(gx(encodeURIComponent(r.txt).replace(/%([0-9A-F]{2})/g,
|
||||
function x(m, v) { return String.fromCharCode('0x' + v); })));
|
||||
//console.log('f2');
|
||||
}
|
||||
catch (e2) {
|
||||
try {
|
||||
b64 = btoa(gx(unescape(encodeURIComponent(r.txt))));
|
||||
//console.log('f3');
|
||||
}
|
||||
catch (e3) {
|
||||
//console.log('fe');
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -1922,15 +2043,27 @@ function xhrchk(xhr, prefix, e404, lvl, tag) {
|
||||
if (xhr.status < 400 && xhr.status >= 200)
|
||||
return true;
|
||||
|
||||
var errtxt = (xhr.response && xhr.response.err) || xhr.responseText,
|
||||
if (tag === undefined)
|
||||
tag = prefix;
|
||||
|
||||
var errtxt = ((xhr.response && xhr.response.err) || xhr.responseText) || '',
|
||||
suf = '',
|
||||
fun = toast[lvl || 'err'],
|
||||
is_cf = /[Cc]loud[f]lare|>Just a mo[m]ent|#cf-b[u]bbles|Chec[k]ing your br[o]wser|\/chall[e]nge-platform|"chall[e]nge-error|nable Ja[v]aScript and cook/.test(errtxt);
|
||||
|
||||
if (errtxt.startsWith('<pre>'))
|
||||
suf = '\n\nerror-details: «' + unpre(errtxt).split('\n')[0].trim() + '»';
|
||||
else
|
||||
errtxt = esc(errtxt).slice(0, 32768);
|
||||
|
||||
if (xhr.status == 403 && !is_cf)
|
||||
return toast.err(0, prefix + (L && L.xhr403 || "403: access denied\n\ntry pressing F5, maybe you got logged out"), tag);
|
||||
return toast.err(0, prefix + (L && L.xhr403 || "403: access denied\n\ntry pressing F5, maybe you got logged out") + suf, tag);
|
||||
|
||||
if (xhr.status == 404)
|
||||
return toast.err(0, prefix + e404, tag);
|
||||
return toast.err(0, prefix + e404 + suf, tag);
|
||||
|
||||
if (!xhr.status && !errtxt)
|
||||
return toast.err(0, prefix + L.xhr0);
|
||||
|
||||
if (is_cf && (xhr.status == 403 || xhr.status == 503)) {
|
||||
var now = Date.now(), td = now - cf_cha_t;
|
||||
|
||||
@@ -13,6 +13,9 @@
|
||||
|
||||
# other stuff
|
||||
|
||||
## [`TODO.md`](TODO.md)
|
||||
* planned features / fixes / changes
|
||||
|
||||
## [`example.conf`](example.conf)
|
||||
* example config file for `-c`
|
||||
|
||||
|
||||
15
docs/TODO.md
Normal file
15
docs/TODO.md
Normal file
@@ -0,0 +1,15 @@
|
||||
a living list of upcoming features / fixes / changes, very roughly in order of priority
|
||||
|
||||
* download accelerator
|
||||
* definitely download chunks in parallel
|
||||
* maybe resumable downloads (chrome-only, jank api)
|
||||
* maybe checksum validation (return sha512 of requested range in responses, and probably also warks)
|
||||
|
||||
* [github issue #37](https://github.com/9001/copyparty/issues/37) - upload PWA
|
||||
* or [maybe not](https://arstechnica.com/tech-policy/2024/02/apple-under-fire-for-disabling-iphone-web-apps-eu-asks-developers-to-weigh-in/), or [maybe](https://arstechnica.com/gadgets/2024/03/apple-changes-course-will-keep-iphone-eu-web-apps-how-they-are-in-ios-17-4/)
|
||||
|
||||
* [github issue #57](https://github.com/9001/copyparty/issues/57) - config GUI
|
||||
* configs given to -c can be ordered with numerical prefix
|
||||
* autorevert settings if it fails to apply
|
||||
* countdown until session invalidates in settings gui, with refresh-button
|
||||
|
||||
96
docs/bufsize.txt
Normal file
96
docs/bufsize.txt
Normal file
@@ -0,0 +1,96 @@
|
||||
notes from testing various buffer sizes of files and sockets
|
||||
|
||||
summary:
|
||||
|
||||
download-folder-as-tar: would be 7% faster with --iobuf 65536 (but got 20% faster in v1.11.2)
|
||||
|
||||
download-folder-as-zip: optimal with default --iobuf 262144
|
||||
|
||||
download-file-over-https: optimal with default --iobuf 262144
|
||||
|
||||
put-large-file: optimal with default --iobuf 262144, --s-rd-sz 262144 (and got 14% faster in v1.11.2)
|
||||
|
||||
post-large-file: optimal with default --iobuf 262144, --s-rd-sz 262144 (and got 18% faster in v1.11.2)
|
||||
|
||||
----
|
||||
|
||||
oha -z10s -c1 --ipv4 --insecure http://127.0.0.1:3923/bigs/?tar
|
||||
3.3 req/s 1.11.1
|
||||
4.3 4.0 3.3 req/s 1.12.2
|
||||
64 256 512 --iobuf 256 (prefer smaller)
|
||||
32 32 32 --s-rd-sz
|
||||
|
||||
oha -z10s -c1 --ipv4 --insecure http://127.0.0.1:3923/bigs/?zip
|
||||
2.9 req/s 1.11.1
|
||||
2.5 2.9 2.9 req/s 1.12.2
|
||||
64 256 512 --iobuf 256 (prefer bigger)
|
||||
32 32 32 --s-rd-sz
|
||||
|
||||
oha -z10s -c1 --ipv4 --insecure http://127.0.0.1:3923/pairdupes/?tar
|
||||
8.3 req/s 1.11.1
|
||||
8.4 8.4 8.5 req/s 1.12.2
|
||||
64 256 512 --iobuf 256 (prefer bigger)
|
||||
32 32 32 --s-rd-sz
|
||||
|
||||
oha -z10s -c1 --ipv4 --insecure http://127.0.0.1:3923/pairdupes/?zip
|
||||
13.9 req/s 1.11.1
|
||||
14.1 14.0 13.8 req/s 1.12.2
|
||||
64 256 512 --iobuf 256 (prefer smaller)
|
||||
32 32 32 --s-rd-sz
|
||||
|
||||
oha -z10s -c1 --ipv4 --insecure http://127.0.0.1:3923/pairdupes/987a
|
||||
5260 req/s 1.11.1
|
||||
5246 5246 5280 5268 req/s 1.12.2
|
||||
64 256 512 256 --iobuf dontcare
|
||||
32 32 32 512 --s-rd-sz dontcare
|
||||
|
||||
oha -z10s -c1 --ipv4 --insecure https://127.0.0.1:3923/pairdupes/987a
|
||||
4445 req/s 1.11.1
|
||||
4462 4494 4444 req/s 1.12.2
|
||||
64 256 512 --iobuf dontcare
|
||||
32 32 32 --s-rd-sz
|
||||
|
||||
oha -z10s -c1 --ipv4 --insecure http://127.0.0.1:3923/bigs/gssc-02-cannonball-skydrift/track10.cdda.flac
|
||||
95 req/s 1.11.1
|
||||
95 97 req/s 1.12.2
|
||||
64 512 --iobuf dontcare
|
||||
32 32 --s-rd-sz
|
||||
|
||||
oha -z10s -c1 --ipv4 --insecure https://127.0.0.1:3923/bigs/gssc-02-cannonball-skydrift/track10.cdda.flac
|
||||
15.4 req/s 1.11.1
|
||||
15.4 15.3 14.9 15.4 req/s 1.12.2
|
||||
64 256 512 512 --iobuf 256 (prefer smaller, and smaller than s-wr-sz)
|
||||
32 32 32 32 --s-rd-sz
|
||||
256 256 256 512 --s-wr-sz
|
||||
|
||||
----
|
||||
|
||||
python3 ~/dev/old/copyparty\ v1.11.1\ dont\ ban\ the\ pipes.py -q -i 127.0.0.1 -v .::A --daw
|
||||
python3 ~/dev/copyparty/dist/copyparty-sfx.py -q -i 127.0.0.1 -v .::A --daw --iobuf $((1024*512))
|
||||
|
||||
oha -z10s -c1 --ipv4 --insecure -mPUT -r0 -D ~/Music/gssc-02-cannonball-skydrift/track10.cdda.flac http://127.0.0.1:3923/a.bin
|
||||
10.8 req/s 1.11.1
|
||||
10.8 11.5 11.8 12.1 12.2 12.3 req/s new
|
||||
512 512 512 512 512 256 --iobuf 256
|
||||
32 64 128 256 512 256 --s-rd-sz 256 (prefer bigger)
|
||||
|
||||
----
|
||||
|
||||
buildpost() {
|
||||
b=--jeg-er-grensestaven;
|
||||
printf -- "$b\r\nContent-Disposition: form-data; name=\"act\"\r\n\r\nbput\r\n$b\r\nContent-Disposition: form-data; name=\"f\"; filename=\"a.bin\"\r\nContent-Type: audio/mpeg\r\n\r\n"
|
||||
cat "$1"
|
||||
printf -- "\r\n${b}--\r\n"
|
||||
}
|
||||
buildpost ~/Music/gssc-02-cannonball-skydrift/track10.cdda.flac >big.post
|
||||
buildpost ~/Music/bottomtext.txt >smol.post
|
||||
|
||||
oha -z10s -c1 --ipv4 --insecure -mPOST -r0 -T 'multipart/form-data; boundary=jeg-er-grensestaven' -D big.post http://127.0.0.1:3923/?replace
|
||||
9.6 11.2 11.3 11.1 10.9 req/s v1.11.2
|
||||
512 512 256 128 256 --iobuf 256
|
||||
32 512 256 128 128 --s-rd-sz 256
|
||||
|
||||
oha -z10s -c1 --ipv4 --insecure -mPOST -r0 -T 'multipart/form-data; boundary=jeg-er-grensestaven' -D smol.post http://127.0.0.1:3923/?replace
|
||||
2445 2414 2401 2437
|
||||
256 128 256 256 --iobuf 256
|
||||
128 128 256 64 --s-rd-sz 128 (but use 256 since big posts are more important)
|
||||
1799
docs/changelog.md
1799
docs/changelog.md
File diff suppressed because it is too large
Load Diff
@@ -1,7 +1,7 @@
|
||||
## devnotes toc
|
||||
|
||||
* top
|
||||
* [future plans](#future-plans) - some improvement ideas
|
||||
* [future ideas](#future-ideas) - list of dreams which will probably never happen
|
||||
* [design](#design)
|
||||
* [up2k](#up2k) - quick outline of the up2k protocol
|
||||
* [why not tus](#why-not-tus) - I didn't know about [tus](https://tus.io/)
|
||||
@@ -12,6 +12,8 @@
|
||||
* [write](#write)
|
||||
* [admin](#admin)
|
||||
* [general](#general)
|
||||
* [event hooks](#event-hooks) - on writing your own [hooks](../README.md#event-hooks)
|
||||
* [hook effects](#hook-effects) - hooks can cause intentional side-effects
|
||||
* [assumptions](#assumptions)
|
||||
* [mdns](#mdns)
|
||||
* [sfx repack](#sfx-repack) - reduce the size of an sfx by removing features
|
||||
@@ -20,13 +22,14 @@
|
||||
* [just the sfx](#just-the-sfx)
|
||||
* [build from release tarball](#build-from-release-tarball) - uses the included prebuilt webdeps
|
||||
* [complete release](#complete-release)
|
||||
* [todo](#todo) - roughly sorted by priority
|
||||
* [debugging](#debugging)
|
||||
* [music playback halting on phones](#music-playback-halting-on-phones) - mostly fine on android
|
||||
* [discarded ideas](#discarded-ideas)
|
||||
|
||||
|
||||
# future plans
|
||||
# future ideas
|
||||
|
||||
some improvement ideas
|
||||
list of dreams which will probably never happen
|
||||
|
||||
* the JS is a mess -- a ~~preact~~ rewrite would be nice
|
||||
* preferably without build dependencies like webpack/babel/node.js, maybe a python thing to assemble js files into main.js
|
||||
@@ -54,8 +57,8 @@ quick outline of the up2k protocol, see [uploading](https://github.com/9001/cop
|
||||
* server creates the `wark`, an identifier for this upload
|
||||
* `sha512( salt + filesize + chunk_hashes )`
|
||||
* and a sparse file is created for the chunks to drop into
|
||||
* client uploads each chunk
|
||||
* header entries for the chunk-hash and wark
|
||||
* client sends a series of POSTs, with one or more consecutive chunks in each
|
||||
* header entries for the chunk-hashes (comma-separated) and wark
|
||||
* server writes chunks into place based on the hash
|
||||
* client does another handshake with the hashlist; server replies with OK or a list of chunks to reupload
|
||||
|
||||
@@ -133,10 +136,15 @@ authenticate using header `Cookie: cppwd=foo` or url param `&pw=foo`
|
||||
| GET | `?zip=utf-8` | ...as a zip file |
|
||||
| GET | `?zip` | ...as a WinXP-compatible zip file |
|
||||
| GET | `?zip=crc` | ...as an MSDOS-compatible zip file |
|
||||
| GET | `?tar&w` | pregenerate webp thumbnails |
|
||||
| GET | `?tar&j` | pregenerate jpg thumbnails |
|
||||
| GET | `?tar&p` | pregenerate audio waveforms |
|
||||
| GET | `?shares` | list your shared files/folders |
|
||||
| GET | `?ups` | show recent uploads from your IP |
|
||||
| GET | `?ups&filter=f` | ...where URL contains `f` |
|
||||
| GET | `?mime=foo` | specify return mimetype `foo` |
|
||||
| GET | `?v` | render markdown file at URL |
|
||||
| GET | `?v` | open image/video/audio in mediaplayer |
|
||||
| GET | `?txt` | get file at URL as plaintext |
|
||||
| GET | `?txt=iso-8859-1` | ...with specific charset |
|
||||
| GET | `?th` | get image/video at URL as thumbnail |
|
||||
@@ -162,10 +170,15 @@ authenticate using header `Cookie: cppwd=foo` or url param `&pw=foo`
|
||||
| PUT | | (binary data) | upload into file at URL |
|
||||
| PUT | `?gz` | (binary data) | compress with gzip and write into file at URL |
|
||||
| PUT | `?xz` | (binary data) | compress with xz and write into file at URL |
|
||||
| mPOST | | `act=bput`, `f=FILE` | upload `FILE` into the folder at URL |
|
||||
| mPOST | `?j` | `act=bput`, `f=FILE` | ...and reply with json |
|
||||
| mPOST | | `f=FILE` | upload `FILE` into the folder at URL |
|
||||
| mPOST | `?j` | `f=FILE` | ...and reply with json |
|
||||
| mPOST | `?replace` | `f=FILE` | ...and overwrite existing files |
|
||||
| mPOST | `?media` | `f=FILE` | ...and return medialink (not hotlink) |
|
||||
| mPOST | | `act=mkdir`, `name=foo` | create directory `foo` at URL |
|
||||
| POST | `?delete` | | delete URL recursively |
|
||||
| POST | `?eshare=rm` | | stop sharing a file/folder |
|
||||
| POST | `?eshare=3` | | set expiration to 3 minutes |
|
||||
| jPOST | `?share` | (complicated) | create temp URL for file/folder |
|
||||
| jPOST | `?delete` | `["/foo","/bar"]` | delete `/foo` and `/bar` recursively |
|
||||
| uPOST | | `msg=foo` | send message `foo` into server log |
|
||||
| mPOST | | `act=tput`, `body=TEXT` | overwrite markdown document at URL |
|
||||
@@ -197,6 +210,32 @@ upload modifiers:
|
||||
| GET | `?pw=x` | logout |
|
||||
|
||||
|
||||
# event hooks
|
||||
|
||||
on writing your own [hooks](../README.md#event-hooks)
|
||||
|
||||
## hook effects
|
||||
|
||||
hooks can cause intentional side-effects, such as redirecting an upload into another location, or creating+indexing additional files, or deleting existing files, by returning json on stdout
|
||||
|
||||
* `reloc` can redirect uploads before/after uploading has finished, based on filename, extension, file contents, uploader ip/name etc.
|
||||
* `idx` informs copyparty about a new file to index as a consequence of this upload
|
||||
* `del` tells copyparty to delete an unrelated file by vpath
|
||||
|
||||
for these to take effect, the hook must be defined with the `c1` flag; see example [reloc-by-ext](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reloc-by-ext.py)
|
||||
|
||||
a subset of effect types are available for a subset of hook types,
|
||||
|
||||
* most hook types (xbu/xau/xbr/xar/xbd/xad/xm) support `idx` and `del` for all http protocols (up2k / basic-uploader / webdav), but not ftp/tftp/smb
|
||||
* most hook types will abort/reject the action if the hook returns nonzero, assuming flag `c` is given, see examples [reject-extension](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reject-extension.py) and [reject-mimetype](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reject-mimetype.py)
|
||||
* `xbu` supports `reloc` for all http protocols (up2k / basic-uploader / webdav), but not ftp/tftp/smb
|
||||
* `xau` supports `reloc` for basic-uploader / webdav only, not up2k or ftp/tftp/smb
|
||||
* so clients like sharex are supported, but not dragdrop into browser
|
||||
|
||||
to trigger indexing of files `/foo/1.txt` and `/foo/bar/2.txt`, a hook can `print(json.dumps({"idx":{"vp":["/foo/1.txt","/foo/bar/2.txt"]}}))` (and replace "idx" with "del" to delete instead)
|
||||
* note: paths starting with `/` are absolute URLs, but you can also do `../3.txt` relative to the destination folder of each uploaded file
|
||||
|
||||
|
||||
# assumptions
|
||||
|
||||
## mdns
|
||||
@@ -218,7 +257,7 @@ if you don't need all the features, you can repack the sfx and save a bunch of s
|
||||
* `269k` after `./scripts/make-sfx.sh re no-cm no-hl`
|
||||
|
||||
the features you can opt to drop are
|
||||
* `cm`/easymde, the "fancy" markdown editor, saves ~82k
|
||||
* `cm`/easymde, the "fancy" markdown editor, saves ~89k
|
||||
* `hl`, prism, the syntax hilighter, saves ~41k
|
||||
* `fnt`, source-code-pro, the monospace font, saves ~9k
|
||||
* `dd`, the custom mouse cursor for the media player tray tab, saves ~2k
|
||||
@@ -242,6 +281,7 @@ python3 -m venv .venv
|
||||
pip install jinja2 strip_hints # MANDATORY
|
||||
pip install mutagen # audio metadata
|
||||
pip install pyftpdlib # ftp server
|
||||
pip install partftpy # tftp server
|
||||
pip install impacket # smb server -- disable Windows Defender if you REALLY need this on windows
|
||||
pip install Pillow pyheif-pillow-opener pillow-avif-plugin # thumbnails
|
||||
pip install pyvips # faster thumbnails
|
||||
@@ -299,19 +339,26 @@ in the `scripts` folder:
|
||||
* run `./rls.sh 1.2.3` which uploads to pypi + creates github release + sfx
|
||||
|
||||
|
||||
# todo
|
||||
# debugging
|
||||
|
||||
roughly sorted by priority
|
||||
## music playback halting on phones
|
||||
|
||||
* nothing! currently
|
||||
mostly fine on android, but still haven't find a way to massage iphones into behaving well
|
||||
|
||||
* conditionally starting/stopping mp.fau according to mp.au.readyState <3 or <4 doesn't help
|
||||
* loop=true doesn't work, and manually looping mp.fau from an onended also doesn't work (it does nothing)
|
||||
* assigning fau.currentTime in a timer doesn't work, as safari merely pretends to assign it
|
||||
* on ios 16.7.7, mp.fau can sometimes make everything visibly work correctly, but no audio is actually hitting the speakers
|
||||
|
||||
can be reproduced with `--no-sendfile --s-wr-sz 8192 --s-wr-slp 0.3 --rsp-slp 6` and then play a collection of small audio files with the screen off, `ffmpeg -i track01.cdda.flac -c:a libopus -b:a 128k -segment_time 12 -f segment smol-%02d.opus`
|
||||
|
||||
|
||||
## discarded ideas
|
||||
|
||||
* reduce up2k roundtrips
|
||||
* start from a chunk index and just go
|
||||
* terminate client on bad data
|
||||
* not worth the effort, just throw enough conncetions at it
|
||||
* optimization attempts which didn't improve performance
|
||||
* remove brokers / multiprocessing stuff; https://github.com/9001/copyparty/tree/no-broker
|
||||
* reduce the nesting / indirections in `HttpCli` / `httpcli.py`
|
||||
* nearly zero benefit from stuff like replacing all the `self.conn.hsrv` with a local `hsrv` variable
|
||||
* single sha512 across all up2k chunks?
|
||||
* crypto.subtle cannot into streaming, would have to use hashwasm, expensive
|
||||
* separate sqlite table per tag
|
||||
|
||||
@@ -10,7 +10,6 @@
|
||||
|
||||
# q, lo: /cfg/log/%Y-%m%d.log # log to file instead of docker
|
||||
|
||||
# ftp: 3921 # enable ftp server on port 3921
|
||||
# p: 3939 # listen on another port
|
||||
# ipa: 10.89. # only allow connections from 10.89.*
|
||||
# df: 16 # stop accepting uploads if less than 16 GB free disk space
|
||||
|
||||
50
docs/examples/docker/idp-authelia-traefik/README.md
Normal file
50
docs/examples/docker/idp-authelia-traefik/README.md
Normal file
@@ -0,0 +1,50 @@
|
||||
> [!WARNING]
|
||||
> I am unable to guarantee the quality, safety, and security of anything in this folder; it is a combination of examples I found online. Please submit corrections or improvements 🙏
|
||||
|
||||
to try this out with minimal adjustments:
|
||||
* specify what filesystem-path to share with copyparty, replacing the default/example value `/srv/pub` in `docker-compose.yml`
|
||||
* add `127.0.0.1 fs.example.com traefik.example.com authelia.example.com` to your `/etc/hosts`
|
||||
* `sudo docker-compose up`
|
||||
* login to https://fs.example.com/ with username `authelia` password `authelia`
|
||||
|
||||
to use this in a safe and secure manner:
|
||||
* follow a guide on setting up authelia properly (TODO:link) and use the copyparty-specific parts of this folder as inspiration for your own config; namely the `cpp` subfolder and the `copyparty` service in `docker-compose.yml`
|
||||
|
||||
this folder is based on:
|
||||
* https://github.com/authelia/authelia/tree/39763aaed24c4abdecd884b47357a052b235942d/examples/compose/lite
|
||||
|
||||
incomplete list of modifications made:
|
||||
* support for running with podman as root on fedora (`:z` volumes, `label:disable`)
|
||||
* explicitly using authelia `v4.38.0-beta3` because config syntax changed since last stable release
|
||||
* disabled automatic letsencrypt certificate signing
|
||||
* reduced logging from debug to info
|
||||
* added a warning that traefik is given access to the docker socket (as recommended by traefik docs) which means traefik is able to break out of the container and has full root access on the host machine
|
||||
|
||||
|
||||
# security
|
||||
|
||||
there is probably/definitely room for improvement in this example setup. Some ideas taken from [github issue #62](https://github.com/9001/copyparty/issues/62):
|
||||
|
||||
* Add in a redis password to limit attacker lateral movement in the system
|
||||
* Move redis to a private network shared with just authelia
|
||||
* Pin to image hashes (or go all in on updates and add `watchtower`)
|
||||
* Drop bridge networking for just exposing traefik's public ports
|
||||
* Configure docker for non-root access to docker socket and then move traefik to use [non-root perms](https://docs.docker.com/engine/security/rootless/)
|
||||
|
||||
if you manage to improve on any of this, especially in a way that might be useful for other people, consider sending a PR :>
|
||||
|
||||
|
||||
# performance
|
||||
|
||||
currently **not optimal,** at least when compared to running the python sfx outside of docker... some numbers from my laptop (ryzen4500u/fedora39):
|
||||
|
||||
| req/s | https D/L | http D/L | approach |
|
||||
| -----:| ----------:|:--------:| -------- |
|
||||
| 5200 | 1294 MiB/s | 5+ GiB/s | [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) running on host |
|
||||
| 4370 | 725 MiB/s | 4+ GiB/s | `docker run copyparty/ac` |
|
||||
| 2420 | 694 MiB/s | n/a | `copyparty/ac` behind traefik |
|
||||
| 75 | 694 MiB/s | n/a | traefik and authelia **(you are here)** |
|
||||
|
||||
authelia is behaving strangely, handling 340 requests per second for a while, but then it suddenly drops to 75 and stays there...
|
||||
|
||||
I'm assuming all of the performance issues is due to a misconfiguration of authelia/traefik/docker on my end, but I don't relly know where to start
|
||||
@@ -0,0 +1,66 @@
|
||||
# based on https://github.com/authelia/authelia/blob/39763aaed24c4abdecd884b47357a052b235942d/examples/compose/lite/authelia/configuration.yml
|
||||
|
||||
# Authelia configuration
|
||||
|
||||
# This secret can also be set using the env variables AUTHELIA_JWT_SECRET_FILE
|
||||
jwt_secret: a_very_important_secret
|
||||
|
||||
server:
|
||||
address: 'tcp://:9091'
|
||||
|
||||
log:
|
||||
level: info # debug
|
||||
|
||||
totp:
|
||||
issuer: authelia.com
|
||||
|
||||
authentication_backend:
|
||||
file:
|
||||
path: /config/users_database.yml
|
||||
|
||||
access_control:
|
||||
default_policy: deny
|
||||
rules:
|
||||
# Rules applied to everyone
|
||||
- domain: traefik.example.com
|
||||
policy: one_factor
|
||||
- domain: fs.example.com
|
||||
policy: one_factor
|
||||
|
||||
session:
|
||||
# This secret can also be set using the env variables AUTHELIA_SESSION_SECRET_FILE
|
||||
secret: unsecure_session_secret
|
||||
|
||||
cookies:
|
||||
- name: authelia_session
|
||||
domain: example.com # Should match whatever your root protected domain is
|
||||
default_redirection_url: https://fs.example.com
|
||||
authelia_url: https://authelia.example.com/
|
||||
expiration: 3600 # 1 hour
|
||||
inactivity: 300 # 5 minutes
|
||||
|
||||
redis:
|
||||
host: redis
|
||||
port: 6379
|
||||
# This secret can also be set using the env variables AUTHELIA_SESSION_REDIS_PASSWORD_FILE
|
||||
# password: authelia
|
||||
|
||||
regulation:
|
||||
max_retries: 3
|
||||
find_time: 120
|
||||
ban_time: 300
|
||||
|
||||
storage:
|
||||
encryption_key: you_must_generate_a_random_string_of_more_than_twenty_chars_and_configure_this
|
||||
local:
|
||||
path: /config/db.sqlite3
|
||||
|
||||
notifier:
|
||||
disable_startup_check: true
|
||||
smtp:
|
||||
username: test
|
||||
# This secret can also be set using the env variables AUTHELIA_NOTIFIER_SMTP_PASSWORD_FILE
|
||||
password: password
|
||||
host: mail.example.com
|
||||
port: 25
|
||||
sender: admin@example.com
|
||||
@@ -0,0 +1,18 @@
|
||||
# based on https://github.com/authelia/authelia/blob/39763aaed24c4abdecd884b47357a052b235942d/examples/compose/lite/authelia/users_database.yml
|
||||
|
||||
# Users Database
|
||||
|
||||
# This file can be used if you do not have an LDAP set up.
|
||||
|
||||
# List of users
|
||||
users:
|
||||
authelia:
|
||||
disabled: false
|
||||
displayname: "Authelia User"
|
||||
# Password is authelia
|
||||
password: "$6$rounds=50000$BpLnfgDsc2WD8F2q$Zis.ixdg9s/UOJYrs56b5QEZFiZECu0qZVNsIYxBaNJ7ucIL.nlxVCT5tqh8KHG8X4tlwCFm5r6NTOZZ5qRFN/"
|
||||
email: authelia@authelia.com
|
||||
groups:
|
||||
- admins
|
||||
- dev
|
||||
- su
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user