Compare commits
56 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
abcdf479e6 | ||
|
|
ad2371f810 | ||
|
|
c4e2b0f95f | ||
|
|
3da62ec234 | ||
|
|
01233991f3 | ||
|
|
ee35974273 | ||
|
|
7037e7365e | ||
|
|
03b13e8a1c | ||
|
|
cdd2da0208 | ||
|
|
cec0e0cf02 | ||
|
|
8122ddedfe | ||
|
|
55a77c5e89 | ||
|
|
461f31582d | ||
|
|
f356faa278 | ||
|
|
9f034d9c4c | ||
|
|
ba52590ae4 | ||
|
|
92edea1de5 | ||
|
|
7ff46966da | ||
|
|
fca70b3508 | ||
|
|
70009cd984 | ||
|
|
8d8b88c4fd | ||
|
|
c4b0cccefd | ||
|
|
7c2beba555 | ||
|
|
7d8d94388b | ||
|
|
0b46b1a614 | ||
|
|
5153db6bff | ||
|
|
b0af4b3712 | ||
|
|
c8f4aeaefa | ||
|
|
00da74400c | ||
|
|
83fb569d61 | ||
|
|
5a62cb4869 | ||
|
|
687df2fabd | ||
|
|
cdd0794d6e | ||
|
|
dcc988135e | ||
|
|
3db117d85f | ||
|
|
ee9aad82dd | ||
|
|
2d6eb63fce | ||
|
|
ca001c8504 | ||
|
|
4e581c59da | ||
|
|
dbd42bc6bf | ||
|
|
c862ec1b64 | ||
|
|
f709140571 | ||
|
|
ef1c4b7a20 | ||
|
|
6c94a63f1c | ||
|
|
20669c73d3 | ||
|
|
0da719f4c2 | ||
|
|
373194c38a | ||
|
|
3d245431fc | ||
|
|
250c8c56f0 | ||
|
|
e136231c8e | ||
|
|
98ffaadf52 | ||
|
|
ebb1981803 | ||
|
|
72361c99e1 | ||
|
|
d5c9c8ebbd | ||
|
|
746229846d | ||
|
|
ffd7cd3ca8 |
24
.vscode/settings.json
vendored
24
.vscode/settings.json
vendored
@@ -22,6 +22,9 @@
|
||||
"terminal.ansiBrightCyan": "#9cf0ed",
|
||||
"terminal.ansiBrightWhite": "#ffffff",
|
||||
},
|
||||
"python.terminal.activateEnvironment": false,
|
||||
"python.analysis.enablePytestSupport": false,
|
||||
"python.analysis.typeCheckingMode": "standard",
|
||||
"python.testing.pytestEnabled": false,
|
||||
"python.testing.unittestEnabled": true,
|
||||
"python.testing.unittestArgs": [
|
||||
@@ -31,23 +34,8 @@
|
||||
"-p",
|
||||
"test_*.py"
|
||||
],
|
||||
"python.linting.pylintEnabled": true,
|
||||
"python.linting.flake8Enabled": true,
|
||||
"python.linting.banditEnabled": true,
|
||||
"python.linting.mypyEnabled": true,
|
||||
"python.linting.flake8Args": [
|
||||
"--max-line-length=120",
|
||||
"--ignore=E722,F405,E203,W503,W293,E402,E501,E128,E226",
|
||||
],
|
||||
"python.linting.banditArgs": [
|
||||
"--ignore=B104,B110,B112"
|
||||
],
|
||||
// python3 -m isort --py=27 --profile=black copyparty/
|
||||
"python.formatting.provider": "none",
|
||||
"[python]": {
|
||||
"editor.defaultFormatter": "ms-python.black-formatter"
|
||||
},
|
||||
"editor.formatOnSave": true,
|
||||
// python3 -m isort --py=27 --profile=black ~/dev/copyparty/{copyparty,tests}/*.py && python3 -m black -t py27 ~/dev/copyparty/{copyparty,tests,bin}/*.py $(find ~/dev/copyparty/copyparty/stolen -iname '*.py')
|
||||
"editor.formatOnSave": false,
|
||||
"[html]": {
|
||||
"editor.formatOnSave": false,
|
||||
"editor.autoIndent": "keep",
|
||||
@@ -58,6 +46,4 @@
|
||||
"files.associations": {
|
||||
"*.makefile": "makefile"
|
||||
},
|
||||
"python.linting.enabled": true,
|
||||
"python.pythonPath": "/usr/bin/python3"
|
||||
}
|
||||
127
README.md
127
README.md
@@ -1,4 +1,6 @@
|
||||
# 💾🎉 copyparty
|
||||
<img src="docs/logo.svg" width="250" align="right"/>
|
||||
|
||||
### 💾🎉 copyparty
|
||||
|
||||
turn almost any device into a file server with resumable uploads/downloads using [*any*](#browser-support) web browser
|
||||
|
||||
@@ -42,6 +44,7 @@ turn almost any device into a file server with resumable uploads/downloads using
|
||||
* [self-destruct](#self-destruct) - uploads can be given a lifetime
|
||||
* [race the beam](#race-the-beam) - download files while they're still uploading ([demo video](http://a.ocv.me/pub/g/nerd-stuff/cpp/2024-0418-race-the-beam.webm))
|
||||
* [file manager](#file-manager) - cut/paste, rename, and delete files/folders (if you have permission)
|
||||
* [shares](#shares) - share a file or folder by creating a temporary link
|
||||
* [batch rename](#batch-rename) - select some files and press `F2` to bring up the rename UI
|
||||
* [media player](#media-player) - plays almost every audio format there is
|
||||
* [audio equalizer](#audio-equalizer) - and [dynamic range compressor](https://en.wikipedia.org/wiki/Dynamic_range_compression)
|
||||
@@ -76,6 +79,7 @@ turn almost any device into a file server with resumable uploads/downloads using
|
||||
* [upload events](#upload-events) - the older, more powerful approach ([examples](./bin/mtag/))
|
||||
* [handlers](#handlers) - redefine behavior with plugins ([examples](./bin/handlers/))
|
||||
* [identity providers](#identity-providers) - replace copyparty passwords with oauth and such
|
||||
* [user-changeable passwords](#user-changeable-passwords) - if permitted, users can change their own passwords
|
||||
* [using the cloud as storage](#using-the-cloud-as-storage) - connecting to an aws s3 bucket and similar
|
||||
* [hiding from google](#hiding-from-google) - tell search engines you dont wanna be indexed
|
||||
* [themes](#themes)
|
||||
@@ -85,6 +89,7 @@ turn almost any device into a file server with resumable uploads/downloads using
|
||||
* [prometheus](#prometheus) - metrics/stats can be enabled
|
||||
* [other extremely specific features](#other-extremely-specific-features) - you'll never find a use for these
|
||||
* [custom mimetypes](#custom-mimetypes) - change the association of a file extension
|
||||
* [feature chickenbits](#feature-chickenbits) - buggy feature? rip it out
|
||||
* [packages](#packages) - the party might be closer than you think
|
||||
* [arch package](#arch-package) - now [available on aur](https://aur.archlinux.org/packages/copyparty) maintained by [@icxes](https://github.com/icxes)
|
||||
* [fedora package](#fedora-package) - does not exist yet
|
||||
@@ -111,6 +116,7 @@ turn almost any device into a file server with resumable uploads/downloads using
|
||||
* [HTTP API](#HTTP-API) - see [devnotes](./docs/devnotes.md#http-api)
|
||||
* [dependencies](#dependencies) - mandatory deps
|
||||
* [optional dependencies](#optional-dependencies) - install these to enable bonus features
|
||||
* [dependency chickenbits](#dependency-chickenbits) - prevent loading an optional dependency
|
||||
* [optional gpl stuff](#optional-gpl-stuff)
|
||||
* [sfx](#sfx) - the self-contained "binary" (recommended!)
|
||||
* [copyparty.exe](#copypartyexe) - download [copyparty.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty.exe) (win8+) or [copyparty32.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty32.exe) (win7+)
|
||||
@@ -124,7 +130,7 @@ turn almost any device into a file server with resumable uploads/downloads using
|
||||
|
||||
just run **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py)** -- that's it! 🎉
|
||||
|
||||
* or install through pypi: `python3 -m pip install --user -U copyparty`
|
||||
* or install through [pypi](https://pypi.org/project/copyparty/): `python3 -m pip install --user -U copyparty`
|
||||
* or if you cannot install python, you can use [copyparty.exe](#copypartyexe) instead
|
||||
* or install [on arch](#arch-package) ╱ [on NixOS](#nixos-module) ╱ [through nix](#nix-package)
|
||||
* or if you are on android, [install copyparty in termux](#install-on-android)
|
||||
@@ -194,7 +200,7 @@ firewall-cmd --reload
|
||||
also see [comparison to similar software](./docs/versus.md)
|
||||
|
||||
* backend stuff
|
||||
* ☑ IPv6
|
||||
* ☑ IPv6 + unix-sockets
|
||||
* ☑ [multiprocessing](#performance) (actual multithreading)
|
||||
* ☑ volumes (mountpoints)
|
||||
* ☑ [accounts](#accounts-and-volumes)
|
||||
@@ -579,9 +585,6 @@ images with the following names (see `--th-covers`) become the thumbnail of the
|
||||
* the order is significant, so if both `cover.png` and `folder.jpg` exist in a folder, it will pick the first matching `--th-covers` entry (`folder.jpg`)
|
||||
* and, if you enable [file indexing](#file-indexing), it will also try those names as dotfiles (`.folder.jpg` and so), and then fallback on the first picture in the folder (if it has any pictures at all)
|
||||
|
||||
in the grid/thumbnail view, if the audio player panel is open, songs will start playing when clicked
|
||||
* indicated by the audio files having the ▶ icon instead of 💾
|
||||
|
||||
enabling `multiselect` lets you click files to select them, and then shift-click another file for range-select
|
||||
* `multiselect` is mostly intended for phones/tablets, but the `sel` option in the `[⚙️] settings` tab is better suited for desktop use, allowing selection by CTRL-clicking and range-selection with SHIFT-click, all without affecting regular clicking
|
||||
* the `sel` option can be made default globally with `--gsel` or per-volume with volflag `gsel`
|
||||
@@ -745,6 +748,42 @@ file selection: click somewhere on the line (not the link itsef), then:
|
||||
you can move files across browser tabs (cut in one tab, paste in another)
|
||||
|
||||
|
||||
## shares
|
||||
|
||||
share a file or folder by creating a temporary link
|
||||
|
||||
when enabled in the server settings (`--shr`), click the bottom-right `share` button to share the folder you're currently in, or alternatively:
|
||||
* select a folder first to share that folder instead
|
||||
* select one or more files to share only those files
|
||||
|
||||
this feature was made with [identity providers](#identity-providers) in mind -- configure your reverseproxy to skip the IdP's access-control for a given URL prefix and use that to safely share specific files/folders sans the usual auth checks
|
||||
|
||||
when creating a share, the creator can choose any of the following options:
|
||||
|
||||
* password-protection
|
||||
* expire after a certain time; `0` or blank means infinite
|
||||
* allow visitors to upload (if the user who creates the share has write-access)
|
||||
|
||||
semi-intentional limitations:
|
||||
|
||||
* cleanup of expired shares only works when global option `e2d` is set, and/or at least one volume on the server has volflag `e2d`
|
||||
* only folders from the same volume are shared; if you are sharing a folder which contains other volumes, then the contents of those volumes will not be available
|
||||
* no option to "delete after first access" because tricky
|
||||
* when linking something to discord (for example) it'll get accessed by their scraper and that would count as a hit
|
||||
* browsers wouldn't be able to resume a broken download unless the requester's IP gets allowlisted for X minutes (ref. tricky)
|
||||
|
||||
specify `--shr /foobar` to enable this feature; a toplevel virtual folder named `foobar` is then created, and that's where all the shares will be served from
|
||||
|
||||
* you can name it whatever, `foobar` is just an example
|
||||
* if you're using config files, put `shr: /foobar` inside the `[global]` section instead
|
||||
|
||||
users can delete their own shares in the controlpanel, and a list of privileged users (`--shr-adm`) are allowed to see and/or delet any share on the server
|
||||
|
||||
after a share has expired, it remains visible in the controlpanel for `--shr-rt` minutes (default is 1 day), and the owner can revive it by extending the expiration time there
|
||||
|
||||
**security note:** using this feature does not mean that you can skip the [accounts and volumes](#accounts-and-volumes) section -- you still need to restrict access to volumes that you do not intend to share with unauthenticated users! it is not sufficient to use rules in the reverseproxy to restrict access to just the `/share` folder.
|
||||
|
||||
|
||||
## batch rename
|
||||
|
||||
select some files and press `F2` to bring up the rename UI
|
||||
@@ -1314,6 +1353,8 @@ you can set hooks before and/or after an event happens, and currently you can ho
|
||||
|
||||
there's a bunch of flags and stuff, see `--help-hooks`
|
||||
|
||||
if you want to write your own hooks, see [devnotes](./docs/devnotes.md#event-hooks)
|
||||
|
||||
|
||||
### upload events
|
||||
|
||||
@@ -1354,6 +1395,29 @@ there is a [docker-compose example](./docs/examples/docker/idp-authelia-traefik)
|
||||
|
||||
a more complete example of the copyparty configuration options [look like this](./docs/examples/docker/idp/copyparty.conf)
|
||||
|
||||
but if you just want to let users change their own passwords, then you probably want [user-changeable passwords](#user-changeable-passwords) instead
|
||||
|
||||
|
||||
## user-changeable passwords
|
||||
|
||||
if permitted, users can change their own passwords in the control-panel
|
||||
|
||||
* not compatible with [identity providers](#identity-providers)
|
||||
|
||||
* must be enabled with `--chpw` because account-sharing is a popular usecase
|
||||
|
||||
* if you want to enable the feature but deny password-changing for a specific list of accounts, you can do that with `--chpw-no name1,name2,name3,...`
|
||||
|
||||
* to perform a password reset, edit the server config and give the user another password there, then do a [config reload](#server-config) or server restart
|
||||
|
||||
* the custom passwords are kept in a textfile at filesystem-path `--chpw-db`, by default `chpw.json` in the copyparty config folder
|
||||
|
||||
* if you run multiple copyparty instances with different users you *almost definitely* want to specify separate DBs for each instance
|
||||
|
||||
* if [password hashing](#password-hashing) is enbled, the passwords in the db are also hashed
|
||||
|
||||
* ...which means that all user-defined passwords will be forgotten if you change password-hashing settings
|
||||
|
||||
|
||||
## using the cloud as storage
|
||||
|
||||
@@ -1458,6 +1522,8 @@ some reverse proxies (such as [Caddy](https://caddyserver.com/)) can automatical
|
||||
* **warning:** nginx-QUIC (HTTP/3) is still experimental and can make uploads much slower, so HTTP/1.1 is recommended for now
|
||||
* depending on server/client, HTTP/1.1 can also be 5x faster than HTTP/2
|
||||
|
||||
for improved security (and a 10% performance boost) consider listening on a unix-socket with `-i unix:770:www:/tmp/party.sock` (permission `770` means only members of group `www` can access it)
|
||||
|
||||
example webserver configs:
|
||||
|
||||
* [nginx config](contrib/nginx/copyparty.conf) -- entire domain/subdomain
|
||||
@@ -1558,6 +1624,23 @@ in a config-file, this is the same as:
|
||||
run copyparty with `--mimes` to list all the default mappings
|
||||
|
||||
|
||||
### feature chickenbits
|
||||
|
||||
buggy feature? rip it out by setting any of the following environment variables to disable its associated bell or whistle,
|
||||
|
||||
| env-var | what it does |
|
||||
| -------------------- | ------------ |
|
||||
| `PRTY_NO_IFADDR` | disable ip/nic discovery by poking into your OS with ctypes |
|
||||
| `PRTY_NO_IPV6` | disable some ipv6 support (should not be necessary since windows 2000) |
|
||||
| `PRTY_NO_LZMA` | disable streaming xz compression of incoming uploads |
|
||||
| `PRTY_NO_MP` | disable all use of the python `multiprocessing` module (actual multithreading, cpu-count for parsers/thumbnailers) |
|
||||
| `PRTY_NO_SQLITE` | disable all database-related functionality (file indexing, metadata indexing, most file deduplication logic) |
|
||||
| `PRTY_NO_TLS` | disable native HTTPS support; if you still want to accept HTTPS connections then TLS must now be terminated by a reverse-proxy |
|
||||
| `PRTY_NO_TPOKE` | disable systemd-tmpfilesd avoider |
|
||||
|
||||
example: `PRTY_NO_IFADDR=1 python3 copyparty-sfx.py`
|
||||
|
||||
|
||||
# packages
|
||||
|
||||
the party might be closer than you think
|
||||
@@ -1880,6 +1963,7 @@ some notes on hardening
|
||||
* cors doesn't work right otherwise
|
||||
* if you allow anonymous uploads or otherwise don't trust the contents of a volume, you can prevent XSS with volflag `nohtml`
|
||||
* this returns html documents as plaintext, and also disables markdown rendering
|
||||
* when running behind a reverse-proxy, listen on a unix-socket for tighter access control (and more performance); see [reverse-proxy](#reverse-proxy) or `--help-bind`
|
||||
|
||||
safety profiles:
|
||||
|
||||
@@ -2044,6 +2128,37 @@ enable [smb](#smb-server) support (**not** recommended):
|
||||
`pyvips` gives higher quality thumbnails than `Pillow` and is 320% faster, using 270% more ram: `sudo apt install libvips42 && python3 -m pip install --user -U pyvips`
|
||||
|
||||
|
||||
### dependency chickenbits
|
||||
|
||||
prevent loading an optional dependency , for example if:
|
||||
|
||||
* you have an incompatible version installed and it causes problems
|
||||
* you just don't want copyparty to use it, maybe to save ram
|
||||
|
||||
set any of the following environment variables to disable its associated optional feature,
|
||||
|
||||
| env-var | what it does |
|
||||
| -------------------- | ------------ |
|
||||
| `PRTY_NO_ARGON2` | disable argon2-cffi password hashing |
|
||||
| `PRTY_NO_CFSSL` | never attempt to generate self-signed certificates using [cfssl](https://github.com/cloudflare/cfssl) |
|
||||
| `PRTY_NO_FFMPEG` | **audio transcoding** goes byebye, **thumbnailing** must be handled by Pillow/libvips |
|
||||
| `PRTY_NO_FFPROBE` | **audio transcoding** goes byebye, **thumbnailing** must be handled by Pillow/libvips, **metadata-scanning** must be handled by mutagen |
|
||||
| `PRTY_NO_MUTAGEN` | do not use [mutagen](https://pypi.org/project/mutagen/) for reading metadata from media files; will fallback to ffprobe |
|
||||
| `PRTY_NO_PIL` | disable all [Pillow](https://pypi.org/project/pillow/)-based thumbnail support; will fallback to libvips or ffmpeg |
|
||||
| `PRTY_NO_PILF` | disable Pillow `ImageFont` text rendering, used for folder thumbnails |
|
||||
| `PRTY_NO_PIL_AVIF` | disable 3rd-party Pillow plugin for [AVIF support](https://pypi.org/project/pillow-avif-plugin/) |
|
||||
| `PRTY_NO_PIL_HEIF` | disable 3rd-party Pillow plugin for [HEIF support](https://pypi.org/project/pyheif-pillow-opener/) |
|
||||
| `PRTY_NO_PIL_WEBP` | disable use of native webp support in Pillow |
|
||||
| `PRTY_NO_PSUTIL` | do not use [psutil](https://pypi.org/project/psutil/) for reaping stuck hooks and plugins on Windows |
|
||||
| `PRTY_NO_VIPS` | disable all [libvips](https://pypi.org/project/pyvips/)-based thumbnail support; will fallback to Pillow or ffmpeg |
|
||||
|
||||
example: `PRTY_NO_PIL=1 python3 copyparty-sfx.py`
|
||||
|
||||
* `PRTY_NO_PIL` saves ram
|
||||
* `PRTY_NO_VIPS` saves ram and startup time
|
||||
* python2.7 on windows: `PRTY_NO_FFMPEG` + `PRTY_NO_FFPROBE` saves startup time
|
||||
|
||||
|
||||
## optional gpl stuff
|
||||
|
||||
some bundled tools have copyleft dependencies, see [./bin/#mtag](bin/#mtag)
|
||||
|
||||
@@ -24,6 +24,7 @@ these are `--xiu` hooks; unlike `xbu` and `xau` (which get executed on every sin
|
||||
|
||||
# before upload
|
||||
* [reject-extension.py](reject-extension.py) rejects uploads if they match a list of file extensions
|
||||
* [reloc-by-ext.py](reloc-by-ext.py) redirects an upload to another destination based on the file extension
|
||||
|
||||
|
||||
# on message
|
||||
|
||||
@@ -41,8 +41,8 @@ parameters explained,
|
||||
t10 = abort download and continue if it takes longer than 10sec
|
||||
|
||||
example usage as a volflag (per-volume config):
|
||||
-v srv/inc:inc:r:rw,ed:xau=j,t10,bin/hooks/into-the-cache-it-goes.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
-v srv/inc:inc:r:rw,ed:c,xau=j,t10,bin/hooks/into-the-cache-it-goes.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
(share filesystem-path srv/inc as volume /inc,
|
||||
readable by everyone, read-write for user 'ed',
|
||||
|
||||
127
bin/hooks/reloc-by-ext.py
Normal file
127
bin/hooks/reloc-by-ext.py
Normal file
@@ -0,0 +1,127 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
|
||||
_ = r"""
|
||||
relocate/redirect incoming uploads according to file extension or name
|
||||
|
||||
example usage as global config:
|
||||
--xbu j,c1,bin/hooks/reloc-by-ext.py
|
||||
|
||||
parameters explained,
|
||||
xbu = execute before upload
|
||||
j = this hook needs upload information as json (not just the filename)
|
||||
c1 = this hook returns json on stdout, so tell copyparty to read that
|
||||
|
||||
example usage as a volflag (per-volume config):
|
||||
-v srv/inc:inc:r:rw,ed:c,xbu=j,c1,bin/hooks/reloc-by-ext.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
(share filesystem-path srv/inc as volume /inc,
|
||||
readable by everyone, read-write for user 'ed',
|
||||
running this plugin on all uploads with the params explained above)
|
||||
|
||||
example usage as a volflag in a copyparty config file:
|
||||
[/inc]
|
||||
srv/inc
|
||||
accs:
|
||||
r: *
|
||||
rw: ed
|
||||
flags:
|
||||
xbu: j,c1,bin/hooks/reloc-by-ext.py
|
||||
|
||||
note: this could also work as an xau hook (after-upload), but
|
||||
because it doesn't need to read the file contents its better
|
||||
as xbu (before-upload) since that's safer / less buggy,
|
||||
and only xbu works with up2k (dragdrop into browser)
|
||||
"""
|
||||
|
||||
|
||||
PICS = "avif bmp gif heic heif jpeg jpg jxl png psd qoi tga tif tiff webp"
|
||||
VIDS = "3gp asf avi flv mkv mov mp4 mpeg mpeg2 mpegts mpg mpg2 nut ogm ogv rm ts vob webm wmv"
|
||||
MUSIC = "aac aif aiff alac amr ape dfpwm flac m4a mp3 ogg opus ra tak tta wav wma wv"
|
||||
|
||||
|
||||
def main():
|
||||
inf = json.loads(sys.argv[1])
|
||||
vdir, fn = os.path.split(inf["vp"])
|
||||
|
||||
try:
|
||||
fn, ext = fn.rsplit(".", 1)
|
||||
except:
|
||||
# no file extension; pretend it's "bin"
|
||||
ext = "bin"
|
||||
|
||||
ext = ext.lower()
|
||||
|
||||
# this function must end by printing the action to perform;
|
||||
# that's handled by the print(json.dumps(... at the bottom
|
||||
#
|
||||
# the action can contain the following keys:
|
||||
# "vp" is the folder URL to move the upload to,
|
||||
# "ap" is the filesystem-path to move it to (but "vp" is safer),
|
||||
# "fn" overrides the final filename to use
|
||||
|
||||
##
|
||||
## some example actions to take; pick one by
|
||||
## selecting it inside the print at the end:
|
||||
##
|
||||
|
||||
# create a subfolder named after the filetype and move it into there
|
||||
into_subfolder = {"vp": ext}
|
||||
|
||||
# move it into a toplevel folder named after the filetype
|
||||
into_toplevel = {"vp": "/" + ext}
|
||||
|
||||
# move it into a filetype-named folder next to the target folder
|
||||
into_sibling = {"vp": "../" + ext}
|
||||
|
||||
# move images into "/just/pics", vids into "/just/vids",
|
||||
# music into "/just/tunes", and anything else as-is
|
||||
if ext in PICS.split():
|
||||
by_category = {"vp": "/just/pics"}
|
||||
elif ext in VIDS.split():
|
||||
by_category = {"vp": "/just/vids"}
|
||||
elif ext in MUSIC.split():
|
||||
by_category = {"vp": "/just/tunes"}
|
||||
else:
|
||||
by_category = {} # no action
|
||||
|
||||
# now choose the default effect to apply; can be any of these:
|
||||
# into_subfolder into_toplevel into_sibling by_category
|
||||
effect = {"vp": "/junk"}
|
||||
|
||||
##
|
||||
## but we can keep going, adding more speicifc rules
|
||||
## which can take precedence, replacing the fallback
|
||||
## effect we just specified:
|
||||
##
|
||||
|
||||
fn = fn.lower() # lowercase filename to make this easier
|
||||
|
||||
if "screenshot" in fn:
|
||||
effect = {"vp": "/ss"}
|
||||
if "mpv_" in fn:
|
||||
effect = {"vp": "/anishots"}
|
||||
elif "debian" in fn or "biebian" in fn:
|
||||
effect = {"vp": "/linux-ISOs"}
|
||||
elif re.search(r"ep(isode |\.)?[0-9]", fn):
|
||||
effect = {"vp": "/podcasts"}
|
||||
|
||||
# regex lets you grab a part of the matching
|
||||
# text and use that in the upload path:
|
||||
m = re.search(r"\b(op|ed)([^a-z]|$)", fn)
|
||||
if m:
|
||||
# the regex matched; use "anime-op" or "anime-ed"
|
||||
effect = {"vp": "/anime-" + m[1]}
|
||||
|
||||
# aaand DO IT
|
||||
print(json.dumps({"reloc": effect}))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
17
bin/u2c.py
17
bin/u2c.py
@@ -1,8 +1,8 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
S_VERSION = "1.21"
|
||||
S_BUILD_DT = "2024-07-26"
|
||||
S_VERSION = "1.23"
|
||||
S_BUILD_DT = "2024-08-22"
|
||||
|
||||
"""
|
||||
u2c.py: upload to copyparty
|
||||
@@ -660,8 +660,15 @@ def upload(fsl, pw, stats):
|
||||
# type: (FileSlice, str, str) -> None
|
||||
"""upload a range of file data, defined by one or more `cid` (chunk-hash)"""
|
||||
|
||||
ctxt = fsl.cids[0]
|
||||
if len(fsl.cids) > 1:
|
||||
n = 192 // len(fsl.cids)
|
||||
n = 9 if n > 9 else 2 if n < 2 else n
|
||||
zsl = [zs[:n] for zs in fsl.cids[1:]]
|
||||
ctxt += ",%d,%s" % (n, "".join(zsl))
|
||||
|
||||
headers = {
|
||||
"X-Up2k-Hash": ",".join(fsl.cids),
|
||||
"X-Up2k-Hash": ctxt,
|
||||
"X-Up2k-Wark": fsl.file.wark,
|
||||
"Content-Type": "application/octet-stream",
|
||||
}
|
||||
@@ -1229,7 +1236,7 @@ source file/folder selection uses rsync syntax, meaning that:
|
||||
ap.add_argument("-v", action="store_true", help="verbose")
|
||||
ap.add_argument("-a", metavar="PASSWD", help="password or $filepath")
|
||||
ap.add_argument("-s", action="store_true", help="file-search (disables upload)")
|
||||
ap.add_argument("-x", type=unicode, metavar="REGEX", default="", help="skip file if filesystem-abspath matches REGEX, example: '.*/\\.hist/.*'")
|
||||
ap.add_argument("-x", type=unicode, metavar="REGEX", action="append", help="skip file if filesystem-abspath matches REGEX (option can be repeated), example: '.*/\\.hist/.*'")
|
||||
ap.add_argument("--ok", action="store_true", help="continue even if some local files are inaccessible")
|
||||
ap.add_argument("--touch", action="store_true", help="if last-modified timestamps differ, push local to server (need write+delete perms)")
|
||||
ap.add_argument("--ow", action="store_true", help="overwrite existing files instead of autorenaming")
|
||||
@@ -1276,6 +1283,8 @@ source file/folder selection uses rsync syntax, meaning that:
|
||||
if ar.dr:
|
||||
ar.ow = True
|
||||
|
||||
ar.x = "|".join(ar.x or [])
|
||||
|
||||
for k in "dl dr drd".split():
|
||||
errs = []
|
||||
if ar.safe and getattr(ar, k):
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Maintainer: icxes <dev.null@need.moe>
|
||||
pkgname=copyparty
|
||||
pkgver="1.13.5"
|
||||
pkgver="1.14.2"
|
||||
pkgrel=1
|
||||
pkgdesc="File server with accelerated resumable uploads, dedup, WebDAV, FTP, TFTP, zeroconf, media indexer, thumbnails++"
|
||||
arch=("any")
|
||||
@@ -21,7 +21,7 @@ optdepends=("ffmpeg: thumbnails for videos, images (slower) and audio, music tag
|
||||
)
|
||||
source=("https://github.com/9001/${pkgname}/releases/download/v${pkgver}/${pkgname}-${pkgver}.tar.gz")
|
||||
backup=("etc/${pkgname}.d/init" )
|
||||
sha256sums=("83bf52ac03256ee6fe405a912e2767578692760f9554f821dfcab0700dd58082")
|
||||
sha256sums=("a39f3950c663671d635c453d1a400f6cec6ec827e7dc9d22c3e791b8ab54017b")
|
||||
|
||||
build() {
|
||||
cd "${srcdir}/${pkgname}-${pkgver}"
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"url": "https://github.com/9001/copyparty/releases/download/v1.13.5/copyparty-sfx.py",
|
||||
"version": "1.13.5",
|
||||
"hash": "sha256-I+dqsiScYPcX6JpLgwVoLs7l0FlbXabc/Ofqye9RQI0="
|
||||
"url": "https://github.com/9001/copyparty/releases/download/v1.14.2/copyparty-sfx.py",
|
||||
"version": "1.14.2",
|
||||
"hash": "sha256-n9Dj2MMrvkWhlXAKWOXn5YQsFCxNpgo5HDFQ111a66A="
|
||||
}
|
||||
@@ -20,6 +20,13 @@ point `--js-browser` to one of these by URL:
|
||||
|
||||
|
||||
|
||||
## example any-js
|
||||
point `--js-browser` and/or `--js-other` to one of these by URL:
|
||||
|
||||
* [`banner.js`](banner.js) shows a very enterprise [legal-banner](https://github.com/user-attachments/assets/8ae8e087-b209-449c-b08d-74e040f0284b)
|
||||
|
||||
|
||||
|
||||
## example browser-css
|
||||
point `--css-browser` to one of these by URL:
|
||||
|
||||
|
||||
93
contrib/plugins/banner.js
Normal file
93
contrib/plugins/banner.js
Normal file
@@ -0,0 +1,93 @@
|
||||
(function() {
|
||||
|
||||
// usage: copy this to '.banner.js' in your webroot,
|
||||
// and run copyparty with the following arguments:
|
||||
// --js-browser /.banner.js --js-other /.banner.js
|
||||
|
||||
|
||||
|
||||
// had to pick the most chuuni one as the default
|
||||
var bannertext = '' +
|
||||
'<h3>You are accessing a U.S. Government (USG) Information System (IS) that is provided for USG-authorized use only.</h3>' +
|
||||
'<p>By using this IS (which includes any device attached to this IS), you consent to the following conditions:</p>' +
|
||||
'<ul>' +
|
||||
'<li>The USG routinely intercepts and monitors communications on this IS for purposes including, but not limited to, penetration testing, COMSEC monitoring, network operations and defense, personnel misconduct (PM), law enforcement (LE), and counterintelligence (CI) investigations.</li>' +
|
||||
'<li>At any time, the USG may inspect and seize data stored on this IS.</li>' +
|
||||
'<li>Communications using, or data stored on, this IS are not private, are subject to routine monitoring, interception, and search, and may be disclosed or used for any USG-authorized purpose.</li>' +
|
||||
'<li>This IS includes security measures (e.g., authentication and access controls) to protect USG interests -- not for your personal benefit or privacy.</li>' +
|
||||
'<li>Notwithstanding the above, using this IS does not constitute consent to PM, LE or CI investigative searching or monitoring of the content of privileged communications, or work product, related to personal representation or services by attorneys, psychotherapists, or clergy, and their assistants. Such communications and work product are private and confidential. See User Agreement for details.</li>' +
|
||||
'</ul>';
|
||||
|
||||
|
||||
|
||||
// fancy div to insert into pages
|
||||
function bannerdiv(border) {
|
||||
var ret = mknod('div', null, bannertext);
|
||||
if (border)
|
||||
ret.setAttribute("style", "border:1em solid var(--fg); border-width:.3em 0; margin:3em 0");
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
||||
|
||||
// keep all of these false and then selectively enable them in the if-blocks below
|
||||
var show_msgbox = false,
|
||||
login_top = false,
|
||||
top = false,
|
||||
bottom = false,
|
||||
top_bordered = false,
|
||||
bottom_bordered = false;
|
||||
|
||||
if (QS("h1#cc") && QS("a#k")) {
|
||||
// this is the controlpanel
|
||||
// (you probably want to keep just one of these enabled)
|
||||
show_msgbox = true;
|
||||
login_top = true;
|
||||
bottom = true;
|
||||
}
|
||||
else if (ebi("swin") && ebi("smac")) {
|
||||
// this is the connect-page, same deal here
|
||||
show_msgbox = true;
|
||||
top_bordered = true;
|
||||
bottom_bordered = true;
|
||||
}
|
||||
else if (ebi("op_cfg") || ebi("div#mw") ) {
|
||||
// we're running in the main filebrowser (op_cfg) or markdown-viewer/editor (div#mw),
|
||||
// fragile pages which break if you do something too fancy
|
||||
show_msgbox = true;
|
||||
}
|
||||
|
||||
|
||||
|
||||
// shows a fullscreen messagebox; works on all pages
|
||||
if (show_msgbox) {
|
||||
var now = Math.floor(Date.now() / 1000),
|
||||
last_shown = sread("bannerts") || 0;
|
||||
|
||||
// 60 * 60 * 17 = 17 hour cooldown
|
||||
if (now - last_shown > 60 * 60 * 17) {
|
||||
swrite("bannerts", now);
|
||||
modal.confirm(bannertext, null, function () {
|
||||
location = 'https://this-page-intentionally-left-blank.org/';
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// show a message on the page footer; only works on the connect-page
|
||||
if (top || top_bordered) {
|
||||
var dst = ebi('wrap');
|
||||
dst.insertBefore(bannerdiv(top_bordered), dst.firstChild);
|
||||
}
|
||||
|
||||
// show a message on the page footer; only works on the controlpanel and connect-page
|
||||
if (bottom || bottom_bordered) {
|
||||
ebi('wrap').appendChild(bannerdiv(bottom_bordered));
|
||||
}
|
||||
|
||||
// show a message on the top of the page; only works on the controlpanel
|
||||
if (login_top) {
|
||||
var dst = QS('h1');
|
||||
dst.parentNode.insertBefore(bannerdiv(false), dst);
|
||||
}
|
||||
|
||||
})();
|
||||
@@ -40,8 +40,6 @@ html.ey {
|
||||
|
||||
--u2-b1-bg: var(--w2);
|
||||
--u2-b2-bg: var(--w2);
|
||||
--u2-o-bg: var(--w2);
|
||||
--u2-o-1-bg: var(--a);
|
||||
--u2-txt-bg: var(--w2);
|
||||
--u2-tab-bg: a;
|
||||
--u2-tab-1-bg: var(--w2);
|
||||
|
||||
@@ -67,7 +67,13 @@ if True: # pylint: disable=using-constant-test
|
||||
|
||||
from typing import Any, Optional
|
||||
|
||||
if PY2:
|
||||
range = xrange # type: ignore
|
||||
|
||||
try:
|
||||
if os.environ.get("PRTY_NO_TLS"):
|
||||
raise Exception()
|
||||
|
||||
HAVE_SSL = True
|
||||
import ssl
|
||||
except:
|
||||
@@ -344,7 +350,7 @@ def configure_ssl_ver(al: argparse.Namespace) -> None:
|
||||
# oh man i love openssl
|
||||
# check this out
|
||||
# hold my beer
|
||||
assert ssl
|
||||
assert ssl # type: ignore
|
||||
ptn = re.compile(r"^OP_NO_(TLS|SSL)v")
|
||||
sslver = terse_sslver(al.ssl_ver).split(",")
|
||||
flags = [k for k in ssl.__dict__ if ptn.match(k)]
|
||||
@@ -378,7 +384,7 @@ def configure_ssl_ver(al: argparse.Namespace) -> None:
|
||||
|
||||
|
||||
def configure_ssl_ciphers(al: argparse.Namespace) -> None:
|
||||
assert ssl
|
||||
assert ssl # type: ignore
|
||||
ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
|
||||
if al.ssl_ver:
|
||||
ctx.options &= ~al.ssl_flags_en
|
||||
@@ -491,6 +497,9 @@ def disable_quickedit() -> None:
|
||||
|
||||
|
||||
def sfx_tpoke(top: str):
|
||||
if os.environ.get("PRTY_NO_TPOKE"):
|
||||
return
|
||||
|
||||
files = [top] + [
|
||||
os.path.join(dp, p) for dp, dd, df in os.walk(top) for p in dd + df
|
||||
]
|
||||
@@ -518,6 +527,41 @@ def showlic() -> None:
|
||||
|
||||
def get_sects():
|
||||
return [
|
||||
[
|
||||
"bind",
|
||||
"configure listening",
|
||||
dedent(
|
||||
"""
|
||||
\033[33m-i\033[0m takes a comma-separated list of interfaces to listen on;
|
||||
IP-addresses and/or unix-sockets (Unix Domain Sockets)
|
||||
|
||||
the default (\033[32m-i ::\033[0m) means all IPv4 and IPv6 addresses
|
||||
|
||||
\033[32m-i 0.0.0.0\033[0m listens on all IPv4 NICs/subnets
|
||||
\033[32m-i 127.0.0.1\033[0m listens on IPv4 localhost only
|
||||
\033[32m-i 127.1\033[0m listens on IPv4 localhost only
|
||||
\033[32m-i 127.1,192.168.123.1\033[0m = IPv4 localhost and 192.168.123.1
|
||||
|
||||
\033[33m-p\033[0m takes a comma-separated list of tcp ports to listen on;
|
||||
the default is \033[32m-p 3923\033[0m but as root you can \033[32m-p 80,443,3923\033[0m
|
||||
|
||||
when running behind a reverse-proxy, it's recommended to
|
||||
use unix-sockets for improved performance and security;
|
||||
|
||||
\033[32m-i unix:770:www:\033[33m/tmp/a.sock\033[0m listens on \033[33m/tmp/a.sock\033[0m with
|
||||
permissions \033[33m0770\033[0m; only accessible to members of the \033[33mwww\033[0m
|
||||
group. This is the best approach. Alternatively,
|
||||
|
||||
\033[32m-i unix:777:\033[33m/tmp/a.sock\033[0m sets perms \033[33m0777\033[0m so anyone can
|
||||
access it; bad unless it's inside a restricted folder
|
||||
|
||||
\033[32m-i unix:\033[33m/tmp/a.sock\033[0m keeps umask-defined permissions
|
||||
(usually \033[33m0600\033[0m) and the same user/group as copyparty
|
||||
|
||||
\033[33m-p\033[0m (tcp ports) is ignored for unix sockets
|
||||
"""
|
||||
),
|
||||
],
|
||||
[
|
||||
"accounts",
|
||||
"accounts and volumes",
|
||||
@@ -695,6 +739,11 @@ def get_sects():
|
||||
\033[36mxban\033[0m can be used to overrule / cancel a user ban event;
|
||||
if the program returns 0 (true/OK) then the ban will NOT happen
|
||||
|
||||
effects can be used to redirect uploads into other
|
||||
locations, and to delete or index other files based
|
||||
on new uploads, but with certain limitations. See
|
||||
bin/hooks/reloc* and docs/devnotes.md#hook-effects
|
||||
|
||||
except for \033[36mxm\033[0m, only one hook / one action can run at a time,
|
||||
so it's recommended to use the \033[36mf\033[0m flag unless you really need
|
||||
to wait for the hook to finish before continuing (without \033[36mf\033[0m
|
||||
@@ -923,6 +972,16 @@ def add_fs(ap):
|
||||
ap2.add_argument("--mtab-age", metavar="SEC", type=int, default=60, help="rebuild mountpoint cache every \033[33mSEC\033[0m to keep track of sparse-files support; keep low on servers with removable media")
|
||||
|
||||
|
||||
def add_share(ap):
|
||||
db_path = os.path.join(E.cfg, "shares.db")
|
||||
ap2 = ap.add_argument_group('share-url options')
|
||||
ap2.add_argument("--shr", metavar="DIR", type=u, default="", help="toplevel virtual folder for shared files/folders, for example [\033[32m/share\033[0m]")
|
||||
ap2.add_argument("--shr-db", metavar="FILE", type=u, default=db_path, help="database to store shares in")
|
||||
ap2.add_argument("--shr-adm", metavar="U,U", type=u, default="", help="comma-separated list of users allowed to view/delete any share")
|
||||
ap2.add_argument("--shr-rt", metavar="MIN", type=int, default=1440, help="shares can be revived by their owner if they expired less than MIN minutes ago; [\033[32m60\033[0m]=hour, [\033[32m1440\033[0m]=day, [\033[32m10080\033[0m]=week")
|
||||
ap2.add_argument("--shr-v", action="store_true", help="debug")
|
||||
|
||||
|
||||
def add_upload(ap):
|
||||
ap2 = ap.add_argument_group('upload options')
|
||||
ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads, hiding them from clients unless \033[33m-ed\033[0m")
|
||||
@@ -955,8 +1014,8 @@ def add_upload(ap):
|
||||
|
||||
def add_network(ap):
|
||||
ap2 = ap.add_argument_group('network options')
|
||||
ap2.add_argument("-i", metavar="IP", type=u, default="::", help="ip to bind (comma-sep.), default: all IPv4 and IPv6")
|
||||
ap2.add_argument("-p", metavar="PORT", type=u, default="3923", help="ports to bind (comma/range)")
|
||||
ap2.add_argument("-i", metavar="IP", type=u, default="::", help="IPs and/or unix-sockets to listen on (see \033[33m--help-bind\033[0m). Default: all IPv4 and IPv6")
|
||||
ap2.add_argument("-p", metavar="PORT", type=u, default="3923", help="ports to listen on (comma/range); ignored for unix-sockets")
|
||||
ap2.add_argument("--ll", action="store_true", help="include link-local IPv4/IPv6 in mDNS replies, even if the NIC has routable IPs (breaks some mDNS clients)")
|
||||
ap2.add_argument("--rproxy", metavar="DEPTH", type=int, default=1, help="which ip to associate clients with; [\033[32m0\033[0m]=tcp, [\033[32m1\033[0m]=origin (first x-fwd, unsafe), [\033[32m2\033[0m]=outermost-proxy, [\033[32m3\033[0m]=second-proxy, [\033[32m-1\033[0m]=closest-proxy")
|
||||
ap2.add_argument("--xff-hdr", metavar="NAME", type=u, default="x-forwarded-for", help="if reverse-proxied, which http header to read the client's real ip from")
|
||||
@@ -1016,6 +1075,16 @@ def add_auth(ap):
|
||||
ap2.add_argument("--bauth-last", action="store_true", help="keeps basic-authentication enabled, but only as a last-resort; if a cookie is also provided then the cookie wins")
|
||||
|
||||
|
||||
def add_chpw(ap):
|
||||
db_path = os.path.join(E.cfg, "chpw.json")
|
||||
ap2 = ap.add_argument_group('user-changeable passwords options')
|
||||
ap2.add_argument("--chpw", action="store_true", help="allow users to change their own passwords")
|
||||
ap2.add_argument("--chpw-no", metavar="U,U,U", type=u, action="append", help="do not allow password-changes for this comma-separated list of usernames")
|
||||
ap2.add_argument("--chpw-db", metavar="PATH", type=u, default=db_path, help="where to store the passwords database (if you run multiple copyparty instances, make sure they use different DBs)")
|
||||
ap2.add_argument("--chpw-len", metavar="N", type=int, default=8, help="minimum password length")
|
||||
ap2.add_argument("--chpw-v", metavar="LVL", type=int, default=2, help="verbosity of summary on config load [\033[32m0\033[0m] = nothing at all, [\033[32m1\033[0m] = number of users, [\033[32m2\033[0m] = list users with default-pw, [\033[32m3\033[0m] = list all users")
|
||||
|
||||
|
||||
def add_zeroconf(ap):
|
||||
ap2 = ap.add_argument_group("Zeroconf options")
|
||||
ap2.add_argument("-z", action="store_true", help="enable all zeroconf backends (mdns, ssdp)")
|
||||
@@ -1123,6 +1192,7 @@ def add_hooks(ap):
|
||||
ap2.add_argument("--xad", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m after a file delete")
|
||||
ap2.add_argument("--xm", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m on message")
|
||||
ap2.add_argument("--xban", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m if someone gets banned (pw/404/403/url)")
|
||||
ap2.add_argument("--hook-v", action="store_true", help="verbose hooks")
|
||||
|
||||
|
||||
def add_stats(ap):
|
||||
@@ -1343,7 +1413,7 @@ def add_ui(ap, retry):
|
||||
ap2 = ap.add_argument_group('ui options')
|
||||
ap2.add_argument("--grid", action="store_true", help="show grid/thumbnails by default (volflag=grid)")
|
||||
ap2.add_argument("--gsel", action="store_true", help="select files in grid by ctrl-click (volflag=gsel)")
|
||||
ap2.add_argument("--lang", metavar="LANG", type=u, default="eng", help="language; one of the following: \033[32meng nor\033[0m")
|
||||
ap2.add_argument("--lang", metavar="LANG", type=u, default="eng", help="language; one of the following: \033[32meng nor chi\033[0m")
|
||||
ap2.add_argument("--theme", metavar="NUM", type=int, default=0, help="default theme to use (0..7)")
|
||||
ap2.add_argument("--themes", metavar="NUM", type=int, default=8, help="number of themes installed")
|
||||
ap2.add_argument("--au-vol", metavar="0-100", type=int, default=50, choices=range(0, 101), help="default audio/video volume percent")
|
||||
@@ -1351,9 +1421,10 @@ def add_ui(ap, retry):
|
||||
ap2.add_argument("--unlist", metavar="REGEX", type=u, default="", help="don't show files matching \033[33mREGEX\033[0m in file list. Purely cosmetic! Does not affect API calls, just the browser. Example: [\033[32m\\.(js|css)$\033[0m] (volflag=unlist)")
|
||||
ap2.add_argument("--favico", metavar="TXT", type=u, default="c 000 none" if retry else "🎉 000 none", help="\033[33mfavicon-text\033[0m [ \033[33mforeground\033[0m [ \033[33mbackground\033[0m ] ], set blank to disable")
|
||||
ap2.add_argument("--mpmc", metavar="URL", type=u, default="", help="change the mediaplayer-toggle mouse cursor; URL to a folder with {2..5}.png inside (or disable with [\033[32m.\033[0m])")
|
||||
ap2.add_argument("--js-browser", metavar="L", type=u, default="", help="URL to additional JS to include")
|
||||
ap2.add_argument("--css-browser", metavar="L", type=u, default="", help="URL to additional CSS to include")
|
||||
ap2.add_argument("--html-head", metavar="TXT", type=u, default="", help="text to append to the <head> of all HTML pages; can be @PATH to send the contents of a file at PATH, and/or begin with %% to render as jinja2 template (volflag=html_head)")
|
||||
ap2.add_argument("--css-browser", metavar="L", type=u, default="", help="URL to additional CSS to include in the filebrowser html")
|
||||
ap2.add_argument("--js-browser", metavar="L", type=u, default="", help="URL to additional JS to include in the filebrowser html")
|
||||
ap2.add_argument("--js-other", metavar="L", type=u, default="", help="URL to additional JS to include in all other pages")
|
||||
ap2.add_argument("--html-head", metavar="TXT", type=u, default="", help="text to append to the <head> of all HTML pages (except for basic-browser); can be @PATH to send the contents of a file at PATH, and/or begin with %% to render as jinja2 template (volflag=html_head)")
|
||||
ap2.add_argument("--ih", action="store_true", help="if a folder contains index.html, show that instead of the directory listing by default (can be changed in the client settings UI, or add ?v to URL for override)")
|
||||
ap2.add_argument("--textfiles", metavar="CSV", type=u, default="txt,nfo,diz,cue,readme", help="file extensions to present as plaintext")
|
||||
ap2.add_argument("--txt-max", metavar="KiB", type=int, default=64, help="max size of embedded textfiles on ?doc= (anything bigger will be lazy-loaded by JS)")
|
||||
@@ -1372,12 +1443,14 @@ def add_debug(ap):
|
||||
ap2 = ap.add_argument_group('debug options')
|
||||
ap2.add_argument("--vc", action="store_true", help="verbose config file parser (explain config)")
|
||||
ap2.add_argument("--cgen", action="store_true", help="generate config file from current config (best-effort; probably buggy)")
|
||||
ap2.add_argument("--deps", action="store_true", help="list information about detected optional dependencies")
|
||||
if hasattr(select, "poll"):
|
||||
ap2.add_argument("--no-poll", action="store_true", help="kernel-bug workaround: disable poll; use select instead (limits max num clients to ~700)")
|
||||
ap2.add_argument("--no-sendfile", action="store_true", help="kernel-bug workaround: disable sendfile; do a safe and slow read-send-loop instead")
|
||||
ap2.add_argument("--no-scandir", action="store_true", help="kernel-bug workaround: disable scandir; do a listdir + stat on each file instead")
|
||||
ap2.add_argument("--no-fastboot", action="store_true", help="wait for initial filesystem indexing before accepting client requests")
|
||||
ap2.add_argument("--no-htp", action="store_true", help="disable httpserver threadpool, create threads as-needed instead")
|
||||
ap2.add_argument("--rm-sck", action="store_true", help="when listening on unix-sockets, do a basic delete+bind instead of the default atomic bind")
|
||||
ap2.add_argument("--srch-dbg", action="store_true", help="explain search processing, and do some extra expensive sanity checks")
|
||||
ap2.add_argument("--rclone-mdns", action="store_true", help="use mdns-domain instead of server-ip on /?hc")
|
||||
ap2.add_argument("--stackmon", metavar="P,S", type=u, default="", help="write stacktrace to \033[33mP\033[0math every \033[33mS\033[0m second, for example --stackmon=\033[32m./st/%%Y-%%m/%%d/%%H%%M.xz,60")
|
||||
@@ -1420,11 +1493,13 @@ def run_argparse(
|
||||
add_tls(ap, cert_path)
|
||||
add_cert(ap, cert_path)
|
||||
add_auth(ap)
|
||||
add_chpw(ap)
|
||||
add_qr(ap, tty)
|
||||
add_zeroconf(ap)
|
||||
add_zc_mdns(ap)
|
||||
add_zc_ssdp(ap)
|
||||
add_fs(ap)
|
||||
add_share(ap)
|
||||
add_upload(ap)
|
||||
add_db_general(ap, hcores)
|
||||
add_db_metadata(ap)
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
# coding: utf-8
|
||||
|
||||
VERSION = (1, 13, 6)
|
||||
CODENAME = "race the beam"
|
||||
BUILD_DT = (2024, 7, 29)
|
||||
VERSION = (1, 14, 3)
|
||||
CODENAME = "one step forward"
|
||||
BUILD_DT = (2024, 8, 30)
|
||||
|
||||
S_VERSION = ".".join(map(str, VERSION))
|
||||
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import print_function, unicode_literals
|
||||
import argparse
|
||||
import base64
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import stat
|
||||
@@ -12,7 +13,7 @@ import threading
|
||||
import time
|
||||
from datetime import datetime
|
||||
|
||||
from .__init__ import ANYWIN, TYPE_CHECKING, WINDOWS, E
|
||||
from .__init__ import ANYWIN, PY2, TYPE_CHECKING, WINDOWS, E
|
||||
from .bos import bos
|
||||
from .cfg import flagdescs, permdescs, vf_bmap, vf_cmap, vf_vmap
|
||||
from .pwhash import PWHash
|
||||
@@ -34,9 +35,11 @@ from .util import (
|
||||
odfusion,
|
||||
relchk,
|
||||
statdir,
|
||||
ub64enc,
|
||||
uncyg,
|
||||
undot,
|
||||
unhumanize,
|
||||
vjoin,
|
||||
vsplit,
|
||||
)
|
||||
|
||||
@@ -56,6 +59,9 @@ if TYPE_CHECKING:
|
||||
# Vflags: TypeAlias = dict[str, Any]
|
||||
# Mflags: TypeAlias = dict[str, Vflags]
|
||||
|
||||
if PY2:
|
||||
range = xrange # type: ignore
|
||||
|
||||
|
||||
LEELOO_DALLAS = "leeloo_dallas"
|
||||
|
||||
@@ -338,6 +344,8 @@ class VFS(object):
|
||||
self.histtab: dict[str, str] = {} # all realpath->histpath
|
||||
self.dbv: Optional[VFS] = None # closest full/non-jump parent
|
||||
self.lim: Optional[Lim] = None # upload limits; only set for dbv
|
||||
self.shr_src: Optional[tuple[VFS, str]] = None # source vfs+rem of a share
|
||||
self.shr_files: set[str] = set() # filenames to include from shr_src
|
||||
self.aread: dict[str, list[str]] = {}
|
||||
self.awrite: dict[str, list[str]] = {}
|
||||
self.amove: dict[str, list[str]] = {}
|
||||
@@ -362,6 +370,9 @@ class VFS(object):
|
||||
self.all_aps = []
|
||||
self.all_vps = []
|
||||
|
||||
self.get_dbv = self._get_dbv
|
||||
self.ls = self._ls
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "VFS(%s)" % (
|
||||
", ".join(
|
||||
@@ -441,7 +452,7 @@ class VFS(object):
|
||||
|
||||
def _find(self, vpath: str) -> tuple["VFS", str]:
|
||||
"""return [vfs,remainder]"""
|
||||
if vpath == "":
|
||||
if not vpath:
|
||||
return self, ""
|
||||
|
||||
if "/" in vpath:
|
||||
@@ -451,7 +462,7 @@ class VFS(object):
|
||||
rem = ""
|
||||
|
||||
if name in self.nodes:
|
||||
return self.nodes[name]._find(undot(rem))
|
||||
return self.nodes[name]._find(rem)
|
||||
|
||||
return self, vpath
|
||||
|
||||
@@ -518,12 +529,20 @@ class VFS(object):
|
||||
t = "{} has no {} in [{}] => [{}] => [{}]"
|
||||
self.log("vfs", t.format(uname, msg, vpath, cvpath, ap), 6)
|
||||
|
||||
t = 'you don\'t have %s-access in "/%s"'
|
||||
raise Pebkac(err, t % (msg, cvpath))
|
||||
t = 'you don\'t have %s-access in "/%s" or below "/%s"'
|
||||
raise Pebkac(err, t % (msg, cvpath, vn.vpath))
|
||||
|
||||
return vn, rem
|
||||
|
||||
def get_dbv(self, vrem: str) -> tuple["VFS", str]:
|
||||
def _get_share_src(self, vrem: str) -> tuple["VFS", str]:
|
||||
src = self.shr_src
|
||||
if not src:
|
||||
return self._get_dbv(vrem)
|
||||
|
||||
shv, srem = src
|
||||
return shv, vjoin(srem, vrem)
|
||||
|
||||
def _get_dbv(self, vrem: str) -> tuple["VFS", str]:
|
||||
dbv = self.dbv
|
||||
if not dbv:
|
||||
return self, vrem
|
||||
@@ -549,7 +568,26 @@ class VFS(object):
|
||||
ad, fn = os.path.split(ap)
|
||||
return os.path.join(absreal(ad), fn)
|
||||
|
||||
def ls(
|
||||
def _ls_nope(
|
||||
self, *a, **ka
|
||||
) -> tuple[str, list[tuple[str, os.stat_result]], dict[str, "VFS"]]:
|
||||
raise Pebkac(500, "nope.avi")
|
||||
|
||||
def _ls_shr(
|
||||
self,
|
||||
rem: str,
|
||||
uname: str,
|
||||
scandir: bool,
|
||||
permsets: list[list[bool]],
|
||||
lstat: bool = False,
|
||||
) -> tuple[str, list[tuple[str, os.stat_result]], dict[str, "VFS"]]:
|
||||
"""replaces _ls for certain shares (single-file, or file selection)"""
|
||||
vn, rem = self.shr_src # type: ignore
|
||||
abspath, real, _ = vn.ls(rem, "\n", scandir, permsets, lstat)
|
||||
real = [x for x in real if os.path.basename(x[0]) in self.shr_files]
|
||||
return abspath, real, {}
|
||||
|
||||
def _ls(
|
||||
self,
|
||||
rem: str,
|
||||
uname: str,
|
||||
@@ -804,6 +842,7 @@ class AuthSrv(object):
|
||||
self.vfs = VFS(log_func, "", "", AXS(), {})
|
||||
self.acct: dict[str, str] = {}
|
||||
self.iacct: dict[str, str] = {}
|
||||
self.defpw: dict[str, str] = {}
|
||||
self.grps: dict[str, list[str]] = {}
|
||||
self.re_pwd: Optional[re.Pattern] = None
|
||||
|
||||
@@ -1349,7 +1388,7 @@ class AuthSrv(object):
|
||||
flags[name] = vals
|
||||
self._e("volflag [{}] += {} ({})".format(name, vals, desc))
|
||||
|
||||
def reload(self) -> None:
|
||||
def reload(self, verbosity: int = 9) -> None:
|
||||
"""
|
||||
construct a flat list of mountpoints and usernames
|
||||
first from the commandline arguments
|
||||
@@ -1357,9 +1396,9 @@ class AuthSrv(object):
|
||||
before finally building the VFS
|
||||
"""
|
||||
with self.mutex:
|
||||
self._reload()
|
||||
self._reload(verbosity)
|
||||
|
||||
def _reload(self) -> None:
|
||||
def _reload(self, verbosity: int = 9) -> None:
|
||||
acct: dict[str, str] = {} # username:password
|
||||
grps: dict[str, list[str]] = {} # groupname:usernames
|
||||
daxs: dict[str, AXS] = {}
|
||||
@@ -1437,6 +1476,8 @@ class AuthSrv(object):
|
||||
raise
|
||||
|
||||
self.setup_pwhash(acct)
|
||||
defpw = acct.copy()
|
||||
self.setup_chpw(acct)
|
||||
|
||||
# case-insensitive; normalize
|
||||
if WINDOWS:
|
||||
@@ -1452,9 +1493,8 @@ class AuthSrv(object):
|
||||
vfs = VFS(self.log_func, absreal("."), "", axs, {})
|
||||
elif "" not in mount:
|
||||
# there's volumes but no root; make root inaccessible
|
||||
vfs = VFS(self.log_func, "", "", AXS(), {})
|
||||
vfs.flags["tcolor"] = self.args.tcolor
|
||||
vfs.flags["d2d"] = True
|
||||
zsd = {"d2d": True, "tcolor": self.args.tcolor}
|
||||
vfs = VFS(self.log_func, "", "", AXS(), zsd)
|
||||
|
||||
maxdepth = 0
|
||||
for dst in sorted(mount.keys(), key=lambda x: (x.count("/"), len(x))):
|
||||
@@ -1483,6 +1523,56 @@ class AuthSrv(object):
|
||||
vol.all_vps.sort(key=lambda x: len(x[0]), reverse=True)
|
||||
vol.root = vfs
|
||||
|
||||
enshare = self.args.shr
|
||||
shr = enshare[1:-1]
|
||||
shrs = enshare[1:]
|
||||
if enshare:
|
||||
import sqlite3
|
||||
|
||||
shv = VFS(self.log_func, "", shr, AXS(), {"d2d": True})
|
||||
|
||||
db_path = self.args.shr_db
|
||||
db = sqlite3.connect(db_path)
|
||||
cur = db.cursor()
|
||||
cur2 = db.cursor()
|
||||
now = time.time()
|
||||
for row in cur.execute("select * from sh"):
|
||||
s_k, s_pw, s_vp, s_pr, s_nf, s_un, s_t0, s_t1 = row
|
||||
if s_t1 and s_t1 < now:
|
||||
continue
|
||||
|
||||
if self.args.shr_v:
|
||||
t = "loading %s share [%s] by [%s] => [%s]"
|
||||
self.log(t % (s_pr, s_k, s_un, s_vp))
|
||||
|
||||
if s_pw:
|
||||
# gotta reuse the "account" for all shares with this pw,
|
||||
# so do a light scramble as this appears in the web-ui
|
||||
zs = ub64enc(hashlib.sha512(s_pw.encode("utf-8")).digest())[4:16]
|
||||
sun = "s_%s" % (zs.decode("utf-8"),)
|
||||
acct[sun] = s_pw
|
||||
else:
|
||||
sun = "*"
|
||||
|
||||
s_axs = AXS(
|
||||
[sun] if "r" in s_pr else [],
|
||||
[sun] if "w" in s_pr else [],
|
||||
[sun] if "m" in s_pr else [],
|
||||
[sun] if "d" in s_pr else [],
|
||||
)
|
||||
|
||||
# don't know the abspath yet + wanna ensure the user
|
||||
# still has the privs they granted, so nullmap it
|
||||
shv.nodes[s_k] = VFS(
|
||||
self.log_func, "", "%s/%s" % (shr, s_k), s_axs, shv.flags.copy()
|
||||
)
|
||||
|
||||
vfs.nodes[shr] = vfs.all_vols[shr] = shv
|
||||
for vol in shv.nodes.values():
|
||||
vfs.all_vols[vol.vpath] = vol
|
||||
vol.get_dbv = vol._get_share_src
|
||||
vol.ls = vol._ls_nope
|
||||
|
||||
zss = set(acct)
|
||||
zss.update(self.idp_accs)
|
||||
zss.discard("*")
|
||||
@@ -1501,7 +1591,7 @@ class AuthSrv(object):
|
||||
for usr in unames:
|
||||
for vp, vol in vfs.all_vols.items():
|
||||
zx = getattr(vol.axs, axs_key)
|
||||
if usr in zx:
|
||||
if usr in zx and (not enshare or not vp.startswith(shrs)):
|
||||
umap[usr].append(vp)
|
||||
umap[usr].sort()
|
||||
setattr(vfs, "a" + perm, umap)
|
||||
@@ -1551,6 +1641,8 @@ class AuthSrv(object):
|
||||
|
||||
for usr in acct:
|
||||
if usr not in associated_users:
|
||||
if enshare and usr.startswith("s_"):
|
||||
continue
|
||||
if len(vfs.all_vols) > 1:
|
||||
# user probably familiar enough that the verbose message is not necessary
|
||||
t = "account [%s] is not mentioned in any volume definitions; see --help-accounts"
|
||||
@@ -1895,7 +1987,7 @@ class AuthSrv(object):
|
||||
self.log(t.format(vol.vpath), 1)
|
||||
del vol.flags["lifetime"]
|
||||
|
||||
needs_e2d = [x for x in hooks if x != "xm"]
|
||||
needs_e2d = [x for x in hooks if x in ("xau", "xiu")]
|
||||
drop = [x for x in needs_e2d if vol.flags.get(x)]
|
||||
if drop:
|
||||
t = 'removing [{}] from volume "/{}" because e2d is disabled'
|
||||
@@ -1986,9 +2078,12 @@ class AuthSrv(object):
|
||||
have_e2t = False
|
||||
t = "volumes and permissions:\n"
|
||||
for zv in vfs.all_vols.values():
|
||||
if not self.warn_anonwrite:
|
||||
if not self.warn_anonwrite or verbosity < 5:
|
||||
break
|
||||
|
||||
if enshare and (zv.vpath == shr or zv.vpath.startswith(shrs)):
|
||||
continue
|
||||
|
||||
t += '\n\033[36m"/{}" \033[33m{}\033[0m'.format(zv.vpath, zv.realpath)
|
||||
for txt, attr in [
|
||||
[" read", "uread"],
|
||||
@@ -2015,7 +2110,7 @@ class AuthSrv(object):
|
||||
|
||||
t += "\n"
|
||||
|
||||
if self.warn_anonwrite:
|
||||
if self.warn_anonwrite and verbosity > 4:
|
||||
if not self.args.no_voldump:
|
||||
self.log(t)
|
||||
|
||||
@@ -2039,7 +2134,7 @@ class AuthSrv(object):
|
||||
|
||||
try:
|
||||
zv, _ = vfs.get("", "*", False, True, err=999)
|
||||
if self.warn_anonwrite and os.getcwd() == zv.realpath:
|
||||
if self.warn_anonwrite and verbosity > 4 and os.getcwd() == zv.realpath:
|
||||
t = "anyone can write to the current directory: {}\n"
|
||||
self.log(t.format(zv.realpath), c=1)
|
||||
|
||||
@@ -2066,6 +2161,7 @@ class AuthSrv(object):
|
||||
|
||||
self.vfs = vfs
|
||||
self.acct = acct
|
||||
self.defpw = defpw
|
||||
self.grps = grps
|
||||
self.iacct = {v: k for k, v in acct.items()}
|
||||
|
||||
@@ -2086,6 +2182,169 @@ class AuthSrv(object):
|
||||
MIMES[ext] = mime
|
||||
EXTS.update({v: k for k, v in MIMES.items()})
|
||||
|
||||
if enshare:
|
||||
# hide shares from controlpanel
|
||||
vfs.all_vols = {
|
||||
x: y
|
||||
for x, y in vfs.all_vols.items()
|
||||
if x != shr and not x.startswith(shrs)
|
||||
}
|
||||
|
||||
assert db and cur and cur2 and shv # type: ignore
|
||||
for row in cur.execute("select * from sh"):
|
||||
s_k, s_pw, s_vp, s_pr, s_nf, s_un, s_t0, s_t1 = row
|
||||
shn = shv.nodes.get(s_k, None)
|
||||
if not shn:
|
||||
continue
|
||||
|
||||
try:
|
||||
s_vfs, s_rem = vfs.get(
|
||||
s_vp, s_un, "r" in s_pr, "w" in s_pr, "m" in s_pr, "d" in s_pr
|
||||
)
|
||||
except Exception as ex:
|
||||
t = "removing share [%s] by [%s] to [%s] due to %r"
|
||||
self.log(t % (s_k, s_un, s_vp, ex), 3)
|
||||
shv.nodes.pop(s_k)
|
||||
continue
|
||||
|
||||
fns = []
|
||||
if s_nf:
|
||||
q = "select vp from sf where k = ?"
|
||||
for (s_fn,) in cur2.execute(q, (s_k,)):
|
||||
fns.append(s_fn)
|
||||
|
||||
shn.shr_files = set(fns)
|
||||
shn.ls = shn._ls_shr
|
||||
else:
|
||||
shn.ls = shn._ls
|
||||
|
||||
shn.shr_src = (s_vfs, s_rem)
|
||||
shn.realpath = s_vfs.canonical(s_rem)
|
||||
|
||||
if self.args.shr_v:
|
||||
t = "mapped %s share [%s] by [%s] => [%s] => [%s]"
|
||||
self.log(t % (s_pr, s_k, s_un, s_vp, shn.realpath))
|
||||
|
||||
# transplant shadowing into shares
|
||||
for vn in shv.nodes.values():
|
||||
svn, srem = vn.shr_src # type: ignore
|
||||
if srem:
|
||||
continue # free branch, safe
|
||||
ap = svn.canonical(srem)
|
||||
if bos.path.isfile(ap):
|
||||
continue # also fine
|
||||
for zs in svn.nodes.keys():
|
||||
# hide subvolume
|
||||
vn.nodes[zs] = VFS(self.log_func, "", "", AXS(), {})
|
||||
|
||||
cur2.close()
|
||||
cur.close()
|
||||
db.close()
|
||||
|
||||
def chpw(self, broker: Optional["BrokerCli"], uname, pw) -> tuple[bool, str]:
|
||||
if not self.args.chpw:
|
||||
return False, "feature disabled in server config"
|
||||
|
||||
if uname == "*" or uname not in self.defpw:
|
||||
return False, "not logged in"
|
||||
|
||||
if uname in self.args.chpw_no:
|
||||
return False, "not allowed for this account"
|
||||
|
||||
if len(pw) < self.args.chpw_len:
|
||||
t = "minimum password length: %d characters"
|
||||
return False, t % (self.args.chpw_len,)
|
||||
|
||||
hpw = self.ah.hash(pw) if self.ah.on else pw
|
||||
|
||||
if hpw == self.acct[uname]:
|
||||
return False, "that's already your password my dude"
|
||||
|
||||
if hpw in self.iacct:
|
||||
return False, "password is taken"
|
||||
|
||||
with self.mutex:
|
||||
ap = self.args.chpw_db
|
||||
if not bos.path.exists(ap):
|
||||
pwdb = {}
|
||||
else:
|
||||
with open(ap, "r", encoding="utf-8") as f:
|
||||
pwdb = json.load(f)
|
||||
|
||||
pwdb = [x for x in pwdb if x[0] != uname]
|
||||
pwdb.append((uname, self.defpw[uname], hpw))
|
||||
|
||||
with open(ap, "w", encoding="utf-8") as f:
|
||||
json.dump(pwdb, f, separators=(",\n", ": "))
|
||||
|
||||
self.log("reinitializing due to password-change for user [%s]" % (uname,))
|
||||
|
||||
if not broker:
|
||||
# only true for tests
|
||||
self._reload()
|
||||
return True, "new password OK"
|
||||
|
||||
broker.ask("_reload_blocking", False, False).get()
|
||||
return True, "new password OK"
|
||||
|
||||
def setup_chpw(self, acct: dict[str, str]) -> None:
|
||||
ap = self.args.chpw_db
|
||||
if not self.args.chpw or not bos.path.exists(ap):
|
||||
return
|
||||
|
||||
with open(ap, "r", encoding="utf-8") as f:
|
||||
pwdb = json.load(f)
|
||||
|
||||
useen = set()
|
||||
urst = set()
|
||||
uok = set()
|
||||
for usr, orig, mod in pwdb:
|
||||
useen.add(usr)
|
||||
if usr not in acct:
|
||||
# previous user, no longer known
|
||||
continue
|
||||
if acct[usr] != orig:
|
||||
urst.add(usr)
|
||||
continue
|
||||
uok.add(usr)
|
||||
acct[usr] = mod
|
||||
|
||||
if not self.args.chpw_v:
|
||||
return
|
||||
|
||||
for usr in acct:
|
||||
if usr not in useen:
|
||||
urst.add(usr)
|
||||
|
||||
for zs in uok:
|
||||
urst.discard(zs)
|
||||
|
||||
if self.args.chpw_v == 1 or (self.args.chpw_v == 2 and not urst):
|
||||
t = "chpw: %d changed, %d unchanged"
|
||||
self.log(t % (len(uok), len(urst)))
|
||||
return
|
||||
|
||||
elif self.args.chpw_v == 2:
|
||||
t = "chpw: %d changed" % (len(uok))
|
||||
if urst:
|
||||
t += ", \033[0munchanged:\033[35m %s" % (", ".join(list(urst)))
|
||||
|
||||
self.log(t, 6)
|
||||
return
|
||||
|
||||
msg = ""
|
||||
if uok:
|
||||
t = "\033[0mchanged: \033[32m%s"
|
||||
msg += t % (", ".join(list(uok)),)
|
||||
if urst:
|
||||
t = "%s\033[0munchanged: \033[35m%s"
|
||||
msg += t % (
|
||||
", " if msg else "",
|
||||
", ".join(list(urst)),
|
||||
)
|
||||
|
||||
self.log("chpw: " + msg, 6)
|
||||
|
||||
def setup_pwhash(self, acct: dict[str, str]) -> None:
|
||||
self.ah = PWHash(self.args)
|
||||
if not self.ah.on:
|
||||
|
||||
@@ -9,7 +9,7 @@ import time
|
||||
from .__init__ import ANYWIN
|
||||
from .util import Netdev, runcmd, wrename, wunlink
|
||||
|
||||
HAVE_CFSSL = True
|
||||
HAVE_CFSSL = not os.environ.get("PRTY_NO_CFSSL")
|
||||
|
||||
if True: # pylint: disable=using-constant-test
|
||||
from .util import NamedLogger, RootLogger
|
||||
|
||||
@@ -9,12 +9,12 @@ import time
|
||||
from .__init__ import ANYWIN, MACOS
|
||||
from .authsrv import AXS, VFS
|
||||
from .bos import bos
|
||||
from .util import chkcmd, min_ex
|
||||
from .util import chkcmd, min_ex, undot
|
||||
|
||||
if True: # pylint: disable=using-constant-test
|
||||
from typing import Optional, Union
|
||||
|
||||
from .util import RootLogger
|
||||
from .util import RootLogger, undot
|
||||
|
||||
|
||||
class Fstab(object):
|
||||
@@ -52,7 +52,7 @@ class Fstab(object):
|
||||
self.log(msg.format(path, fs, min_ex()), 3)
|
||||
return fs
|
||||
|
||||
path = path.lstrip("/")
|
||||
path = undot(path)
|
||||
try:
|
||||
return self.cache[path]
|
||||
except:
|
||||
@@ -124,7 +124,7 @@ class Fstab(object):
|
||||
if ANYWIN:
|
||||
path = self._winpath(path)
|
||||
|
||||
path = path.lstrip("/")
|
||||
path = undot(path)
|
||||
ptn = re.compile(r"^[^\\/]*")
|
||||
vn, rem = self.tab._find(path)
|
||||
if not self.trusted:
|
||||
|
||||
@@ -41,6 +41,9 @@ if True: # pylint: disable=using-constant-test
|
||||
import typing
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
if PY2:
|
||||
range = xrange # type: ignore
|
||||
|
||||
|
||||
class FSE(FilesystemError):
|
||||
def __init__(self, msg: str, severity: int = 0) -> None:
|
||||
@@ -350,7 +353,7 @@ class FtpFs(AbstractedFS):
|
||||
svp = join(self.cwd, src).lstrip("/")
|
||||
dvp = join(self.cwd, dst).lstrip("/")
|
||||
try:
|
||||
self.hub.up2k.handle_mv(self.uname, svp, dvp)
|
||||
self.hub.up2k.handle_mv(self.uname, self.h.cli_ip, svp, dvp)
|
||||
except Exception as ex:
|
||||
raise FSE(str(ex))
|
||||
|
||||
@@ -468,6 +471,9 @@ class FtpHandler(FTPHandler):
|
||||
xbu = vfs.flags.get("xbu")
|
||||
if xbu and not runhook(
|
||||
None,
|
||||
None,
|
||||
self.hub.up2k,
|
||||
"xbu.ftpd",
|
||||
xbu,
|
||||
ap,
|
||||
vp,
|
||||
@@ -477,7 +483,7 @@ class FtpHandler(FTPHandler):
|
||||
0,
|
||||
0,
|
||||
self.cli_ip,
|
||||
0,
|
||||
time.time(),
|
||||
"",
|
||||
):
|
||||
raise FSE("Upload blocked by xbu server config")
|
||||
@@ -580,9 +586,15 @@ class Ftpd(object):
|
||||
if "::" in ips:
|
||||
ips.append("0.0.0.0")
|
||||
|
||||
ips = [x for x in ips if "unix:" not in x]
|
||||
|
||||
if self.args.ftp4:
|
||||
ips = [x for x in ips if ":" not in x]
|
||||
|
||||
if not ips:
|
||||
lgr.fatal("cannot start ftp-server; no compatible IPs in -i")
|
||||
return
|
||||
|
||||
ips = list(ODict.fromkeys(ips)) # dedup
|
||||
|
||||
ioloop = IOLoop()
|
||||
|
||||
@@ -13,18 +13,22 @@ import json
|
||||
import os
|
||||
import random
|
||||
import re
|
||||
import socket
|
||||
import stat
|
||||
import string
|
||||
import threading # typechk
|
||||
import time
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from email.utils import formatdate, parsedate
|
||||
from email.utils import parsedate
|
||||
from operator import itemgetter
|
||||
|
||||
import jinja2 # typechk
|
||||
|
||||
try:
|
||||
if os.environ.get("PRTY_NO_LZMA"):
|
||||
raise Exception()
|
||||
|
||||
import lzma
|
||||
except:
|
||||
pass
|
||||
@@ -41,6 +45,7 @@ from .util import unquote # type: ignore
|
||||
from .util import (
|
||||
APPLESAN_RE,
|
||||
BITNESS,
|
||||
HAVE_SQLITE3,
|
||||
HTTPCODE,
|
||||
META_NOBOTS,
|
||||
UTC,
|
||||
@@ -54,6 +59,7 @@ from .util import (
|
||||
alltrace,
|
||||
atomic_move,
|
||||
exclude_dotfiles,
|
||||
formatdate,
|
||||
fsenc,
|
||||
gen_filekey,
|
||||
gen_filekey_dbg,
|
||||
@@ -69,7 +75,9 @@ from .util import (
|
||||
humansize,
|
||||
ipnorm,
|
||||
loadpy,
|
||||
log_reloc,
|
||||
min_ex,
|
||||
pathmod,
|
||||
quotep,
|
||||
rand_name,
|
||||
read_header,
|
||||
@@ -105,6 +113,9 @@ if True: # pylint: disable=using-constant-test
|
||||
if TYPE_CHECKING:
|
||||
from .httpconn import HttpConn
|
||||
|
||||
if not hasattr(socket, "AF_UNIX"):
|
||||
setattr(socket, "AF_UNIX", -9001)
|
||||
|
||||
_ = (argparse, threading)
|
||||
|
||||
NO_CACHE = {"Cache-Control": "no-cache"}
|
||||
@@ -222,6 +233,11 @@ class HttpCli(object):
|
||||
ka["s_doctitle"] = self.args.doctitle
|
||||
ka["tcolor"] = self.vn.flags["tcolor"]
|
||||
|
||||
if self.args.js_other and "js" not in ka:
|
||||
zs = self.args.js_other
|
||||
zs += "&" if "?" in zs else "?"
|
||||
ka["js"] = zs
|
||||
|
||||
zso = self.vn.flags.get("html_head")
|
||||
if zso:
|
||||
ka["this"] = self
|
||||
@@ -303,8 +319,11 @@ class HttpCli(object):
|
||||
)
|
||||
self.host = self.headers.get("host") or ""
|
||||
if not self.host:
|
||||
zs = "%s:%s" % self.s.getsockname()[:2]
|
||||
self.host = zs[7:] if zs.startswith("::ffff:") else zs
|
||||
if self.s.family == socket.AF_UNIX:
|
||||
self.host = self.args.name
|
||||
else:
|
||||
zs = "%s:%s" % self.s.getsockname()[:2]
|
||||
self.host = zs[7:] if zs.startswith("::ffff:") else zs
|
||||
|
||||
trusted_xff = False
|
||||
n = self.args.rproxy
|
||||
@@ -436,7 +455,7 @@ class HttpCli(object):
|
||||
t = "incorrect --rp-loc or webserver config; expected vpath starting with [{}] but got [{}]"
|
||||
self.log(t.format(self.args.R, vpath), 1)
|
||||
|
||||
self.ouparam = {k: zs for k, zs in uparam.items()}
|
||||
self.ouparam = uparam.copy()
|
||||
|
||||
if self.args.rsp_slp:
|
||||
time.sleep(self.args.rsp_slp)
|
||||
@@ -445,6 +464,9 @@ class HttpCli(object):
|
||||
|
||||
zso = self.headers.get("cookie")
|
||||
if zso:
|
||||
if len(zso) > 8192:
|
||||
self.loud_reply("cookie header too big", status=400)
|
||||
return False
|
||||
zsll = [x.split("=", 1) for x in zso.split(";") if "=" in x]
|
||||
cookies = {k.strip(): unescape_cookie(zs) for k, zs in zsll}
|
||||
cookie_pw = cookies.get("cppws") or cookies.get("cppwd") or ""
|
||||
@@ -691,6 +713,9 @@ class HttpCli(object):
|
||||
xban = self.vn.flags.get("xban")
|
||||
if not xban or not runhook(
|
||||
self.log,
|
||||
self.conn.hsrv.broker,
|
||||
None,
|
||||
"xban",
|
||||
xban,
|
||||
self.vn.canonical(self.rem),
|
||||
self.vpath,
|
||||
@@ -787,7 +812,7 @@ class HttpCli(object):
|
||||
|
||||
# close if unknown length, otherwise take client's preference
|
||||
response.append("Connection: " + ("Keep-Alive" if self.keepalive else "Close"))
|
||||
response.append("Date: " + formatdate(usegmt=True))
|
||||
response.append("Date: " + formatdate())
|
||||
|
||||
# headers{} overrides anything set previously
|
||||
if headers:
|
||||
@@ -811,9 +836,9 @@ class HttpCli(object):
|
||||
self.cbonk(self.conn.hsrv.gmal, zs, "cc_hdr", "Cc in out-hdr")
|
||||
raise Pebkac(999)
|
||||
|
||||
response.append("\r\n")
|
||||
try:
|
||||
# best practice to separate headers and body into different packets
|
||||
self.s.sendall("\r\n".join(response).encode("utf-8") + b"\r\n\r\n")
|
||||
self.s.sendall("\r\n".join(response).encode("utf-8"))
|
||||
except:
|
||||
raise Pebkac(400, "client d/c while replying headers")
|
||||
|
||||
@@ -947,10 +972,10 @@ class HttpCli(object):
|
||||
status: int = 200,
|
||||
use302: bool = False,
|
||||
) -> bool:
|
||||
vp = self.args.RS + vpath
|
||||
vp = self.args.SRS + vpath
|
||||
html = self.j2s(
|
||||
"msg",
|
||||
h2='<a href="/{}">{} /{}</a>'.format(
|
||||
h2='<a href="{}">{} {}</a>'.format(
|
||||
quotep(vp) + suf, flavor, html_escape(vp, crlf=True) + suf
|
||||
),
|
||||
pre=msg,
|
||||
@@ -958,7 +983,7 @@ class HttpCli(object):
|
||||
).encode("utf-8", "replace")
|
||||
|
||||
if use302:
|
||||
self.reply(html, status=302, headers={"Location": "/" + vpath})
|
||||
self.reply(html, status=302, headers={"Location": vp})
|
||||
else:
|
||||
self.reply(html, status=status)
|
||||
|
||||
@@ -1120,7 +1145,7 @@ class HttpCli(object):
|
||||
if "move" in self.uparam:
|
||||
return self.handle_mv()
|
||||
|
||||
if not self.vpath:
|
||||
if not self.vpath and self.ouparam:
|
||||
if "reload" in self.uparam:
|
||||
return self.handle_reload()
|
||||
|
||||
@@ -1142,23 +1167,12 @@ class HttpCli(object):
|
||||
if "hc" in self.uparam:
|
||||
return self.tx_svcs()
|
||||
|
||||
if "shares" in self.uparam:
|
||||
return self.tx_shares()
|
||||
|
||||
if "h" in self.uparam:
|
||||
return self.tx_mounts()
|
||||
|
||||
# conditional redirect to single volumes
|
||||
if self.vpath == "" and not self.ouparam:
|
||||
nread = len(self.rvol)
|
||||
nwrite = len(self.wvol)
|
||||
if nread + nwrite == 1 or (self.rvol == self.wvol and nread == 1):
|
||||
if nread == 1:
|
||||
vpath = self.rvol[0]
|
||||
else:
|
||||
vpath = self.wvol[0]
|
||||
|
||||
if self.vpath != vpath:
|
||||
self.redirect(vpath, flavor="redirecting to", use302=True)
|
||||
return True
|
||||
|
||||
return self.tx_browser()
|
||||
|
||||
def handle_propfind(self) -> bool:
|
||||
@@ -1168,7 +1182,8 @@ class HttpCli(object):
|
||||
if self.args.no_dav:
|
||||
raise Pebkac(405, "WebDAV is disabled in server config")
|
||||
|
||||
vn, rem = self.asrv.vfs.get(self.vpath, self.uname, False, False, err=401)
|
||||
vn = self.vn
|
||||
rem = self.rem
|
||||
tap = vn.canonical(rem)
|
||||
|
||||
if "davauth" in vn.flags and self.uname == "*":
|
||||
@@ -1305,7 +1320,7 @@ class HttpCli(object):
|
||||
|
||||
pvs: dict[str, str] = {
|
||||
"displayname": html_escape(rp.split("/")[-1]),
|
||||
"getlastmodified": formatdate(mtime, usegmt=True),
|
||||
"getlastmodified": formatdate(mtime),
|
||||
"resourcetype": '<D:collection xmlns:D="DAV:"/>' if isdir else "",
|
||||
"supportedlock": '<D:lockentry xmlns:D="DAV:"><D:lockscope><D:exclusive/></D:lockscope><D:locktype><D:write/></D:locktype></D:lockentry>',
|
||||
}
|
||||
@@ -1552,8 +1567,8 @@ class HttpCli(object):
|
||||
self.log("PUT %s @%s" % (self.req, self.uname))
|
||||
|
||||
if not self.can_write:
|
||||
t = "user {} does not have write-access here"
|
||||
raise Pebkac(403, t.format(self.uname))
|
||||
t = "user %s does not have write-access under /%s"
|
||||
raise Pebkac(403, t % (self.uname, self.vn.vpath))
|
||||
|
||||
if not self.args.no_dav and self._applesan():
|
||||
return self.headers.get("content-length") == "0"
|
||||
@@ -1596,6 +1611,9 @@ class HttpCli(object):
|
||||
if "delete" in self.uparam:
|
||||
return self.handle_rm([])
|
||||
|
||||
if "eshare" in self.uparam:
|
||||
return self.handle_eshare()
|
||||
|
||||
if "application/octet-stream" in ctype:
|
||||
return self.handle_post_binary()
|
||||
|
||||
@@ -1628,6 +1646,9 @@ class HttpCli(object):
|
||||
if xm:
|
||||
runhook(
|
||||
self.log,
|
||||
self.conn.hsrv.broker,
|
||||
None,
|
||||
"xm",
|
||||
xm,
|
||||
self.vn.canonical(self.rem),
|
||||
self.vpath,
|
||||
@@ -1776,11 +1797,15 @@ class HttpCli(object):
|
||||
|
||||
if xbu:
|
||||
at = time.time() - lifetime
|
||||
if not runhook(
|
||||
vp = vjoin(self.vpath, fn) if nameless else self.vpath
|
||||
hr = runhook(
|
||||
self.log,
|
||||
self.conn.hsrv.broker,
|
||||
None,
|
||||
"xbu.http.dump",
|
||||
xbu,
|
||||
path,
|
||||
self.vpath,
|
||||
vp,
|
||||
self.host,
|
||||
self.uname,
|
||||
self.asrv.vfs.get_perms(self.vpath, self.uname),
|
||||
@@ -1789,10 +1814,25 @@ class HttpCli(object):
|
||||
self.ip,
|
||||
at,
|
||||
"",
|
||||
):
|
||||
)
|
||||
if not hr:
|
||||
t = "upload blocked by xbu server config"
|
||||
self.log(t, 1)
|
||||
raise Pebkac(403, t)
|
||||
if hr.get("reloc"):
|
||||
x = pathmod(self.asrv.vfs, path, vp, hr["reloc"])
|
||||
if x:
|
||||
if self.args.hook_v:
|
||||
log_reloc(self.log, hr["reloc"], x, path, vp, fn, vfs, rem)
|
||||
fdir, self.vpath, fn, (vfs, rem) = x
|
||||
if self.args.nw:
|
||||
fn = os.devnull
|
||||
else:
|
||||
bos.makedirs(fdir)
|
||||
path = os.path.join(fdir, fn)
|
||||
if not nameless:
|
||||
self.vpath = vjoin(self.vpath, fn)
|
||||
params["fdir"] = fdir
|
||||
|
||||
if is_put and not (self.args.no_dav or self.args.nw) and bos.path.exists(path):
|
||||
# allow overwrite if...
|
||||
@@ -1867,24 +1907,45 @@ class HttpCli(object):
|
||||
fn = fn2
|
||||
path = path2
|
||||
|
||||
if xau and not runhook(
|
||||
self.log,
|
||||
xau,
|
||||
path,
|
||||
self.vpath,
|
||||
self.host,
|
||||
self.uname,
|
||||
self.asrv.vfs.get_perms(self.vpath, self.uname),
|
||||
mt,
|
||||
post_sz,
|
||||
self.ip,
|
||||
at,
|
||||
"",
|
||||
):
|
||||
t = "upload blocked by xau server config"
|
||||
self.log(t, 1)
|
||||
wunlink(self.log, path, vfs.flags)
|
||||
raise Pebkac(403, t)
|
||||
if xau:
|
||||
vp = vjoin(self.vpath, fn) if nameless else self.vpath
|
||||
hr = runhook(
|
||||
self.log,
|
||||
self.conn.hsrv.broker,
|
||||
None,
|
||||
"xau.http.dump",
|
||||
xau,
|
||||
path,
|
||||
vp,
|
||||
self.host,
|
||||
self.uname,
|
||||
self.asrv.vfs.get_perms(self.vpath, self.uname),
|
||||
mt,
|
||||
post_sz,
|
||||
self.ip,
|
||||
at,
|
||||
"",
|
||||
)
|
||||
if not hr:
|
||||
t = "upload blocked by xau server config"
|
||||
self.log(t, 1)
|
||||
wunlink(self.log, path, vfs.flags)
|
||||
raise Pebkac(403, t)
|
||||
if hr.get("reloc"):
|
||||
x = pathmod(self.asrv.vfs, path, vp, hr["reloc"])
|
||||
if x:
|
||||
if self.args.hook_v:
|
||||
log_reloc(self.log, hr["reloc"], x, path, vp, fn, vfs, rem)
|
||||
fdir, self.vpath, fn, (vfs, rem) = x
|
||||
bos.makedirs(fdir)
|
||||
path2 = os.path.join(fdir, fn)
|
||||
atomic_move(self.log, path, path2, vfs.flags)
|
||||
path = path2
|
||||
if not nameless:
|
||||
self.vpath = vjoin(self.vpath, fn)
|
||||
sz = bos.path.getsize(path)
|
||||
else:
|
||||
sz = post_sz
|
||||
|
||||
vfs, rem = vfs.get_dbv(rem)
|
||||
self.conn.hsrv.broker.say(
|
||||
@@ -1907,7 +1968,7 @@ class HttpCli(object):
|
||||
alg,
|
||||
self.args.fk_salt,
|
||||
path,
|
||||
post_sz,
|
||||
sz,
|
||||
0 if ANYWIN else bos.stat(path).st_ino,
|
||||
)[: vfs.flags["fk"]]
|
||||
|
||||
@@ -2024,6 +2085,9 @@ class HttpCli(object):
|
||||
if act == "zip":
|
||||
return self.handle_zip_post()
|
||||
|
||||
if act == "chpw":
|
||||
return self.handle_chpw()
|
||||
|
||||
raise Pebkac(422, 'invalid action "{}"'.format(act))
|
||||
|
||||
def handle_zip_post(self) -> bool:
|
||||
@@ -2082,6 +2146,9 @@ class HttpCli(object):
|
||||
if "srch" in self.uparam or "srch" in body:
|
||||
return self.handle_search(body)
|
||||
|
||||
if "share" in self.uparam:
|
||||
return self.handle_share(body)
|
||||
|
||||
if "delete" in self.uparam:
|
||||
return self.handle_rm(body)
|
||||
|
||||
@@ -2138,7 +2205,9 @@ class HttpCli(object):
|
||||
def handle_search(self, body: dict[str, Any]) -> bool:
|
||||
idx = self.conn.get_u2idx()
|
||||
if not idx or not hasattr(idx, "p_end"):
|
||||
raise Pebkac(500, "server busy, or sqlite3 not available; cannot search")
|
||||
if not HAVE_SQLITE3:
|
||||
raise Pebkac(500, "sqlite3 not found on server; search is disabled")
|
||||
raise Pebkac(500, "server busy, cannot search; please retry in a bit")
|
||||
|
||||
vols: list[VFS] = []
|
||||
seen: dict[VFS, bool] = {}
|
||||
@@ -2210,13 +2279,21 @@ class HttpCli(object):
|
||||
raise Pebkac(400, "need hash and wark headers for binary POST")
|
||||
|
||||
chashes = [x.strip() for x in chashes]
|
||||
if len(chashes) == 3 and len(chashes[1]) == 1:
|
||||
# the first hash, then length of consecutive hashes,
|
||||
# then a list of stitched hashes as one long string
|
||||
clen = int(chashes[1])
|
||||
siblings = chashes[2]
|
||||
chashes = [chashes[0]]
|
||||
for n in range(0, len(siblings), clen):
|
||||
chashes.append(siblings[n : n + clen])
|
||||
|
||||
vfs, _ = self.asrv.vfs.get(self.vpath, self.uname, False, True)
|
||||
ptop = (vfs.dbv or vfs).realpath
|
||||
|
||||
x = self.conn.hsrv.broker.ask("up2k.handle_chunks", ptop, wark, chashes)
|
||||
response = x.get()
|
||||
chunksize, cstarts, path, lastmod, sprs = response
|
||||
chashes, chunksize, cstarts, path, lastmod, sprs = response
|
||||
maxsize = chunksize * len(chashes)
|
||||
cstart0 = cstarts[0]
|
||||
|
||||
@@ -2320,6 +2397,22 @@ class HttpCli(object):
|
||||
self.reply(b"thank")
|
||||
return True
|
||||
|
||||
def handle_chpw(self) -> bool:
|
||||
assert self.parser
|
||||
pwd = self.parser.require("pw", 64)
|
||||
self.parser.drop()
|
||||
|
||||
ok, msg = self.asrv.chpw(self.conn.hsrv.broker, self.uname, pwd)
|
||||
if ok:
|
||||
ok, msg = self.get_pwd_cookie(pwd)
|
||||
if ok:
|
||||
msg = "new password OK"
|
||||
|
||||
redir = (self.args.SRS + "?h") if ok else ""
|
||||
html = self.j2s("msg", h1=msg, h2='<a href="/?h">ack</a>', redir=redir)
|
||||
self.reply(html.encode("utf-8"))
|
||||
return True
|
||||
|
||||
def handle_login(self) -> bool:
|
||||
assert self.parser
|
||||
pwd = self.parser.require("cppwd", 64)
|
||||
@@ -2344,12 +2437,12 @@ class HttpCli(object):
|
||||
dst += "&" if "?" in dst else "?"
|
||||
dst += "_=1#" + html_escape(uhash, True, True)
|
||||
|
||||
msg = self.get_pwd_cookie(pwd)
|
||||
_, msg = self.get_pwd_cookie(pwd)
|
||||
html = self.j2s("msg", h1=msg, h2='<a href="' + dst + '">ack</a>', redir=dst)
|
||||
self.reply(html.encode("utf-8"))
|
||||
return True
|
||||
|
||||
def get_pwd_cookie(self, pwd: str) -> str:
|
||||
def get_pwd_cookie(self, pwd: str) -> tuple[bool, str]:
|
||||
hpwd = self.asrv.ah.hash(pwd)
|
||||
uname = self.asrv.iacct.get(hpwd)
|
||||
if uname:
|
||||
@@ -2381,7 +2474,7 @@ class HttpCli(object):
|
||||
ck = gencookie(k, pwd, self.args.R, self.is_https, dur, "; HttpOnly")
|
||||
self.out_headerlist.append(("Set-Cookie", ck))
|
||||
|
||||
return msg
|
||||
return dur > 0, msg
|
||||
|
||||
def handle_mkdir(self) -> bool:
|
||||
assert self.parser
|
||||
@@ -2420,7 +2513,7 @@ class HttpCli(object):
|
||||
except:
|
||||
raise Pebkac(500, min_ex())
|
||||
|
||||
self.out_headers["X-New-Dir"] = quotep(vpath)
|
||||
self.out_headers["X-New-Dir"] = quotep(self.args.RS + vpath)
|
||||
|
||||
if dav:
|
||||
self.reply(b"", 201)
|
||||
@@ -2524,18 +2617,15 @@ class HttpCli(object):
|
||||
fname = sanitize_fn(
|
||||
p_file or "", "", [".prologue.html", ".epilogue.html"]
|
||||
)
|
||||
abspath = os.path.join(fdir, fname)
|
||||
suffix = "-%.6f-%s" % (time.time(), dip)
|
||||
if p_file and not nullwrite:
|
||||
if rnd:
|
||||
fname = rand_name(fdir, fname, rnd)
|
||||
|
||||
if not bos.path.isdir(fdir):
|
||||
raise Pebkac(404, "that folder does not exist")
|
||||
|
||||
suffix = "-{:.6f}-{}".format(time.time(), dip)
|
||||
open_args = {"fdir": fdir, "suffix": suffix}
|
||||
|
||||
if "replace" in self.uparam:
|
||||
abspath = os.path.join(fdir, fname)
|
||||
if not self.can_delete:
|
||||
self.log("user not allowed to overwrite with ?replace")
|
||||
elif bos.path.exists(abspath):
|
||||
@@ -2545,6 +2635,58 @@ class HttpCli(object):
|
||||
except:
|
||||
t = "toctou while deleting for ?replace: %s"
|
||||
self.log(t % (abspath,))
|
||||
else:
|
||||
open_args = {}
|
||||
tnam = fname = os.devnull
|
||||
fdir = abspath = ""
|
||||
|
||||
if xbu:
|
||||
at = time.time() - lifetime
|
||||
hr = runhook(
|
||||
self.log,
|
||||
self.conn.hsrv.broker,
|
||||
None,
|
||||
"xbu.http.bup",
|
||||
xbu,
|
||||
abspath,
|
||||
vjoin(upload_vpath, fname),
|
||||
self.host,
|
||||
self.uname,
|
||||
self.asrv.vfs.get_perms(upload_vpath, self.uname),
|
||||
at,
|
||||
0,
|
||||
self.ip,
|
||||
at,
|
||||
"",
|
||||
)
|
||||
if not hr:
|
||||
t = "upload blocked by xbu server config"
|
||||
self.log(t, 1)
|
||||
raise Pebkac(403, t)
|
||||
if hr.get("reloc"):
|
||||
zs = vjoin(upload_vpath, fname)
|
||||
x = pathmod(self.asrv.vfs, abspath, zs, hr["reloc"])
|
||||
if x:
|
||||
if self.args.hook_v:
|
||||
log_reloc(
|
||||
self.log,
|
||||
hr["reloc"],
|
||||
x,
|
||||
abspath,
|
||||
zs,
|
||||
fname,
|
||||
vfs,
|
||||
rem,
|
||||
)
|
||||
fdir, upload_vpath, fname, (vfs, rem) = x
|
||||
abspath = os.path.join(fdir, fname)
|
||||
if nullwrite:
|
||||
fdir = abspath = ""
|
||||
else:
|
||||
open_args["fdir"] = fdir
|
||||
|
||||
if p_file and not nullwrite:
|
||||
bos.makedirs(fdir)
|
||||
|
||||
# reserve destination filename
|
||||
with ren_open(fname, "wb", fdir=fdir, suffix=suffix) as zfw:
|
||||
@@ -2560,26 +2702,6 @@ class HttpCli(object):
|
||||
tnam = fname = os.devnull
|
||||
fdir = abspath = ""
|
||||
|
||||
if xbu:
|
||||
at = time.time() - lifetime
|
||||
if not runhook(
|
||||
self.log,
|
||||
xbu,
|
||||
abspath,
|
||||
self.vpath,
|
||||
self.host,
|
||||
self.uname,
|
||||
self.asrv.vfs.get_perms(self.vpath, self.uname),
|
||||
at,
|
||||
0,
|
||||
self.ip,
|
||||
at,
|
||||
"",
|
||||
):
|
||||
t = "upload blocked by xbu server config"
|
||||
self.log(t, 1)
|
||||
raise Pebkac(403, t)
|
||||
|
||||
if lim:
|
||||
lim.chk_bup(self.ip)
|
||||
lim.chk_nup(self.ip)
|
||||
@@ -2622,29 +2744,58 @@ class HttpCli(object):
|
||||
|
||||
tabspath = ""
|
||||
|
||||
at = time.time() - lifetime
|
||||
if xau:
|
||||
hr = runhook(
|
||||
self.log,
|
||||
self.conn.hsrv.broker,
|
||||
None,
|
||||
"xau.http.bup",
|
||||
xau,
|
||||
abspath,
|
||||
vjoin(upload_vpath, fname),
|
||||
self.host,
|
||||
self.uname,
|
||||
self.asrv.vfs.get_perms(upload_vpath, self.uname),
|
||||
at,
|
||||
sz,
|
||||
self.ip,
|
||||
at,
|
||||
"",
|
||||
)
|
||||
if not hr:
|
||||
t = "upload blocked by xau server config"
|
||||
self.log(t, 1)
|
||||
wunlink(self.log, abspath, vfs.flags)
|
||||
raise Pebkac(403, t)
|
||||
if hr.get("reloc"):
|
||||
zs = vjoin(upload_vpath, fname)
|
||||
x = pathmod(self.asrv.vfs, abspath, zs, hr["reloc"])
|
||||
if x:
|
||||
if self.args.hook_v:
|
||||
log_reloc(
|
||||
self.log,
|
||||
hr["reloc"],
|
||||
x,
|
||||
abspath,
|
||||
zs,
|
||||
fname,
|
||||
vfs,
|
||||
rem,
|
||||
)
|
||||
fdir, upload_vpath, fname, (vfs, rem) = x
|
||||
ap2 = os.path.join(fdir, fname)
|
||||
if nullwrite:
|
||||
fdir = ap2 = ""
|
||||
else:
|
||||
bos.makedirs(fdir)
|
||||
atomic_move(self.log, abspath, ap2, vfs.flags)
|
||||
abspath = ap2
|
||||
sz = bos.path.getsize(abspath)
|
||||
|
||||
files.append(
|
||||
(sz, sha_hex, sha_b64, p_file or "(discarded)", fname, abspath)
|
||||
)
|
||||
at = time.time() - lifetime
|
||||
if xau and not runhook(
|
||||
self.log,
|
||||
xau,
|
||||
abspath,
|
||||
self.vpath,
|
||||
self.host,
|
||||
self.uname,
|
||||
self.asrv.vfs.get_perms(self.vpath, self.uname),
|
||||
at,
|
||||
sz,
|
||||
self.ip,
|
||||
at,
|
||||
"",
|
||||
):
|
||||
t = "upload blocked by xau server config"
|
||||
self.log(t, 1)
|
||||
wunlink(self.log, abspath, vfs.flags)
|
||||
raise Pebkac(403, t)
|
||||
|
||||
dbv, vrem = vfs.get_dbv(rem)
|
||||
self.conn.hsrv.broker.say(
|
||||
"up2k.hash_file",
|
||||
@@ -2700,13 +2851,14 @@ class HttpCli(object):
|
||||
for sz, sha_hex, sha_b64, ofn, lfn, ap in files:
|
||||
vsuf = ""
|
||||
if (self.can_read or self.can_upget) and "fk" in vfs.flags:
|
||||
st = bos.stat(ap)
|
||||
alg = 2 if "fka" in vfs.flags else 1
|
||||
vsuf = "?k=" + self.gen_fk(
|
||||
alg,
|
||||
self.args.fk_salt,
|
||||
ap,
|
||||
sz,
|
||||
0 if ANYWIN or not ap else bos.stat(ap).st_ino,
|
||||
st.st_size,
|
||||
0 if ANYWIN or not ap else st.st_ino,
|
||||
)[: vfs.flags["fk"]]
|
||||
|
||||
if "media" in self.uparam or "medialinks" in vfs.flags:
|
||||
@@ -2873,6 +3025,9 @@ class HttpCli(object):
|
||||
if xbu:
|
||||
if not runhook(
|
||||
self.log,
|
||||
self.conn.hsrv.broker,
|
||||
None,
|
||||
"xbu.http.txt",
|
||||
xbu,
|
||||
fp,
|
||||
self.vpath,
|
||||
@@ -2912,6 +3067,9 @@ class HttpCli(object):
|
||||
xau = vfs.flags.get("xau")
|
||||
if xau and not runhook(
|
||||
self.log,
|
||||
self.conn.hsrv.broker,
|
||||
None,
|
||||
"xau.http.txt",
|
||||
xau,
|
||||
fp,
|
||||
self.vpath,
|
||||
@@ -2952,7 +3110,7 @@ class HttpCli(object):
|
||||
return True
|
||||
|
||||
def _chk_lastmod(self, file_ts: int) -> tuple[str, bool]:
|
||||
file_lastmod = formatdate(file_ts, usegmt=True)
|
||||
file_lastmod = formatdate(file_ts)
|
||||
cli_lastmod = self.headers.get("if-modified-since")
|
||||
if cli_lastmod:
|
||||
try:
|
||||
@@ -3034,8 +3192,8 @@ class HttpCli(object):
|
||||
for n, fn in enumerate([".prologue.html", ".epilogue.html"]):
|
||||
if lnames is not None and fn not in lnames:
|
||||
continue
|
||||
fn = os.path.join(abspath, fn)
|
||||
if bos.path.exists(fn):
|
||||
fn = "%s/%s" % (abspath, fn)
|
||||
if bos.path.isfile(fn):
|
||||
with open(fsenc(fn), "rb") as f:
|
||||
logues[n] = f.read().decode("utf-8")
|
||||
if "exp" in vn.flags:
|
||||
@@ -3053,7 +3211,7 @@ class HttpCli(object):
|
||||
fns = []
|
||||
|
||||
for fn in fns:
|
||||
fn = os.path.join(abspath, fn)
|
||||
fn = "%s/%s" % (abspath, fn)
|
||||
if bos.path.isfile(fn):
|
||||
with open(fsenc(fn), "rb") as f:
|
||||
readme = f.read().decode("utf-8")
|
||||
@@ -3108,7 +3266,8 @@ class HttpCli(object):
|
||||
raise Exception("not found in registry")
|
||||
self.pipes.set(req_path, job)
|
||||
except Exception as ex:
|
||||
self.log("will not pipe [%s]; %s" % (ap_data, ex), 6)
|
||||
if getattr(ex, "errno", 0) != errno.ENOENT:
|
||||
self.log("will not pipe [%s]; %s" % (ap_data, ex), 6)
|
||||
ptop = None
|
||||
|
||||
#
|
||||
@@ -3588,7 +3747,7 @@ class HttpCli(object):
|
||||
# (useragent-sniffing kinshi due to caching proxies)
|
||||
mime, ico = self.ico.get(txt, not small, "raster" in self.uparam)
|
||||
|
||||
lm = formatdate(self.E.t0, usegmt=True)
|
||||
lm = formatdate(self.E.t0)
|
||||
self.reply(ico, mime=mime, headers={"Last-Modified": lm})
|
||||
return True
|
||||
|
||||
@@ -3667,6 +3826,11 @@ class HttpCli(object):
|
||||
"arg_base": arg_base,
|
||||
}
|
||||
|
||||
if self.args.js_other and "js" not in targs:
|
||||
zs = self.args.js_other
|
||||
zs += "&" if "?" in zs else "?"
|
||||
targs["js"] = zs
|
||||
|
||||
zfv = self.vn.flags.get("html_head")
|
||||
if zfv:
|
||||
targs["this"] = self
|
||||
@@ -3795,6 +3959,7 @@ class HttpCli(object):
|
||||
rvol=rvol,
|
||||
wvol=wvol,
|
||||
avol=avol,
|
||||
in_shr=self.args.shr and self.vpath.startswith(self.args.shr[1:]),
|
||||
vstate=vstate,
|
||||
scanning=vs["scanning"],
|
||||
hashq=vs["hashq"],
|
||||
@@ -3805,6 +3970,7 @@ class HttpCli(object):
|
||||
k304=self.k304(),
|
||||
k304vis=self.args.k304 > 0,
|
||||
ver=S_VERSION if self.args.ver else "",
|
||||
chpw=self.args.chpw and self.uname != "*",
|
||||
ahttps="" if self.is_https else "https://" + self.host + self.req,
|
||||
)
|
||||
self.reply(html.encode("utf-8"))
|
||||
@@ -3842,10 +4008,10 @@ class HttpCli(object):
|
||||
def tx_404(self, is_403: bool = False) -> bool:
|
||||
rc = 404
|
||||
if self.args.vague_403:
|
||||
t = '<h1 id="n">404 not found ┐( ´ -`)┌</h1><p id="o">or maybe you don\'t have access -- try logging in or <a href="{}/?h">go home</a></p>'
|
||||
pt = "404 not found ┐( ´ -`)┌ (or maybe you don't have access -- try logging in)"
|
||||
t = '<h1 id="n">404 not found ┐( ´ -`)┌</h1><p id="o">or maybe you don\'t have access -- try a password or <a href="{}/?h">go home</a></p>'
|
||||
pt = "404 not found ┐( ´ -`)┌ (or maybe you don't have access -- try a password)"
|
||||
elif is_403:
|
||||
t = '<h1 id="p">403 forbiddena ~┻━┻</h1><p id="q">you\'ll have to log in or <a href="{}/?h">go home</a></p>'
|
||||
t = '<h1 id="p">403 forbiddena ~┻━┻</h1><p id="q">use a password or <a href="{}/?h">go home</a></p>'
|
||||
pt = "403 forbiddena ~┻━┻ (you'll have to log in)"
|
||||
rc = 403
|
||||
else:
|
||||
@@ -3862,7 +4028,8 @@ class HttpCli(object):
|
||||
|
||||
t = t.format(self.args.SR)
|
||||
qv = quotep(self.vpaths) + self.ourlq()
|
||||
html = self.j2s("splash", this=self, qvpath=qv, msg=t)
|
||||
in_shr = self.args.shr and self.vpath.startswith(self.args.shr[1:])
|
||||
html = self.j2s("splash", this=self, qvpath=qv, in_shr=in_shr, msg=t)
|
||||
self.reply(html.encode("utf-8"), status=rc)
|
||||
return True
|
||||
|
||||
@@ -3939,7 +4106,9 @@ class HttpCli(object):
|
||||
dst = dst[len(top) + 1 :]
|
||||
|
||||
ret = self.gen_tree(top, dst, self.uparam.get("k", ""))
|
||||
if self.is_vproxied:
|
||||
if self.is_vproxied and not self.uparam["tree"]:
|
||||
# uparam is '' on initial load, which is
|
||||
# the only time we gotta fill in the blanks
|
||||
parents = self.args.R.split("/")
|
||||
for parent in reversed(parents):
|
||||
ret = {"k%s" % (parent,): ret, "a": []}
|
||||
@@ -4014,7 +4183,9 @@ class HttpCli(object):
|
||||
def tx_ups(self) -> bool:
|
||||
idx = self.conn.get_u2idx()
|
||||
if not idx or not hasattr(idx, "p_end"):
|
||||
raise Pebkac(500, "sqlite3 is not available on the server; cannot unpost")
|
||||
if not HAVE_SQLITE3:
|
||||
raise Pebkac(500, "sqlite3 not found on server; unpost is disabled")
|
||||
raise Pebkac(500, "server busy, cannot unpost; please retry in a bit")
|
||||
|
||||
filt = self.uparam.get("filter") or ""
|
||||
lm = "ups [{}]".format(filt)
|
||||
@@ -4103,6 +4274,187 @@ class HttpCli(object):
|
||||
self.reply(jtxt.encode("utf-8", "replace"), mime="application/json")
|
||||
return True
|
||||
|
||||
def tx_shares(self) -> bool:
|
||||
if self.uname == "*":
|
||||
self.loud_reply("you're not logged in")
|
||||
return True
|
||||
|
||||
idx = self.conn.get_u2idx()
|
||||
if not idx or not hasattr(idx, "p_end"):
|
||||
if not HAVE_SQLITE3:
|
||||
raise Pebkac(500, "sqlite3 not found on server; sharing is disabled")
|
||||
raise Pebkac(500, "server busy, cannot list shares; please retry in a bit")
|
||||
|
||||
cur = idx.get_shr()
|
||||
if not cur:
|
||||
raise Pebkac(400, "huh, sharing must be disabled in the server config...")
|
||||
|
||||
rows = cur.execute("select * from sh").fetchall()
|
||||
rows = [list(x) for x in rows]
|
||||
|
||||
if self.uname != self.args.shr_adm:
|
||||
rows = [x for x in rows if x[5] == self.uname]
|
||||
|
||||
for x in rows:
|
||||
x[1] = "yes" if x[1] else ""
|
||||
|
||||
html = self.j2s(
|
||||
"shares", this=self, shr=self.args.shr, rows=rows, now=int(time.time())
|
||||
)
|
||||
self.reply(html.encode("utf-8"), status=200)
|
||||
return True
|
||||
|
||||
def handle_eshare(self) -> bool:
|
||||
idx = self.conn.get_u2idx()
|
||||
if not idx or not hasattr(idx, "p_end"):
|
||||
if not HAVE_SQLITE3:
|
||||
raise Pebkac(500, "sqlite3 not found on server; sharing is disabled")
|
||||
raise Pebkac(500, "server busy, cannot create share; please retry in a bit")
|
||||
|
||||
if self.args.shr_v:
|
||||
self.log("handle_eshare: " + self.req)
|
||||
|
||||
cur = idx.get_shr()
|
||||
if not cur:
|
||||
raise Pebkac(400, "huh, sharing must be disabled in the server config...")
|
||||
|
||||
skey = self.vpath.split("/")[-1]
|
||||
|
||||
rows = cur.execute("select un, t1 from sh where k = ?", (skey,)).fetchall()
|
||||
un = rows[0][0] if rows and rows[0] else ""
|
||||
|
||||
if not un:
|
||||
raise Pebkac(400, "that sharekey didn't match anything")
|
||||
|
||||
expiry = rows[0][1]
|
||||
|
||||
if un != self.uname and self.uname != self.args.shr_adm:
|
||||
t = "your username (%r) does not match the sharekey's owner (%r) and you're not admin"
|
||||
raise Pebkac(400, t % (self.uname, un))
|
||||
|
||||
reload = False
|
||||
act = self.uparam["eshare"]
|
||||
if act == "rm":
|
||||
cur.execute("delete from sh where k = ?", (skey,))
|
||||
if skey in self.asrv.vfs.nodes[self.args.shr.strip("/")].nodes:
|
||||
reload = True
|
||||
else:
|
||||
now = time.time()
|
||||
if expiry < now:
|
||||
expiry = now
|
||||
reload = True
|
||||
expiry += int(act) * 60
|
||||
cur.execute("update sh set t1 = ? where k = ?", (expiry, skey))
|
||||
|
||||
cur.connection.commit()
|
||||
if reload:
|
||||
self.conn.hsrv.broker.ask("_reload_blocking", False, False).get()
|
||||
self.conn.hsrv.broker.ask("up2k.wake_rescanner").get()
|
||||
|
||||
self.redirect(self.args.SRS + "?shares")
|
||||
return True
|
||||
|
||||
def handle_share(self, req: dict[str, str]) -> bool:
|
||||
idx = self.conn.get_u2idx()
|
||||
if not idx or not hasattr(idx, "p_end"):
|
||||
if not HAVE_SQLITE3:
|
||||
raise Pebkac(500, "sqlite3 not found on server; sharing is disabled")
|
||||
raise Pebkac(500, "server busy, cannot create share; please retry in a bit")
|
||||
|
||||
if self.args.shr_v:
|
||||
self.log("handle_share: " + json.dumps(req, indent=4))
|
||||
|
||||
skey = req["k"]
|
||||
vps = req["vp"]
|
||||
fns = []
|
||||
if len(vps) == 1:
|
||||
vp = vps[0]
|
||||
if not vp.endswith("/"):
|
||||
vp, zs = vp.rsplit("/", 1)
|
||||
fns = [zs]
|
||||
else:
|
||||
for zs in vps:
|
||||
if zs.endswith("/"):
|
||||
t = "you cannot select more than one folder, or mix flies and folders in one selection"
|
||||
raise Pebkac(400, t)
|
||||
vp = vps[0].rsplit("/", 1)[0]
|
||||
for zs in vps:
|
||||
vp2, fn = zs.rsplit("/", 1)
|
||||
fns.append(fn)
|
||||
if vp != vp2:
|
||||
t = "mismatching base paths in selection:\n [%s]\n [%s]"
|
||||
raise Pebkac(400, t % (vp, vp2))
|
||||
|
||||
vp = vp.strip("/")
|
||||
if self.is_vproxied and (vp == self.args.R or vp.startswith(self.args.RS)):
|
||||
vp = vp[len(self.args.RS) :]
|
||||
|
||||
m = re.search(r"([^0-9a-zA-Z_-])", skey)
|
||||
if m:
|
||||
raise Pebkac(400, "sharekey has illegal character [%s]" % (m[1],))
|
||||
|
||||
if vp.startswith(self.args.shr[1:]):
|
||||
raise Pebkac(400, "yo dawg...")
|
||||
|
||||
cur = idx.get_shr()
|
||||
if not cur:
|
||||
raise Pebkac(400, "huh, sharing must be disabled in the server config...")
|
||||
|
||||
q = "select * from sh where k = ?"
|
||||
qr = cur.execute(q, (skey,)).fetchall()
|
||||
if qr and qr[0]:
|
||||
self.log("sharekey taken by %r" % (qr,))
|
||||
raise Pebkac(400, "sharekey [%s] is already in use" % (skey,))
|
||||
|
||||
# ensure user has requested perms
|
||||
s_rd = "read" in req["perms"]
|
||||
s_wr = "write" in req["perms"]
|
||||
s_mv = "move" in req["perms"]
|
||||
s_del = "delete" in req["perms"]
|
||||
try:
|
||||
vfs, rem = self.asrv.vfs.get(vp, self.uname, s_rd, s_wr, s_mv, s_del)
|
||||
except:
|
||||
raise Pebkac(400, "you dont have all the perms you tried to grant")
|
||||
|
||||
ap, reals, _ = vfs.ls(
|
||||
rem, self.uname, not self.args.no_scandir, [[s_rd, s_wr, s_mv, s_del]]
|
||||
)
|
||||
rfns = set([x[0] for x in reals])
|
||||
for fn in fns:
|
||||
if fn not in rfns:
|
||||
raise Pebkac(400, "selected file not found on disk: [%s]" % (fn,))
|
||||
|
||||
pw = req.get("pw") or ""
|
||||
now = int(time.time())
|
||||
sexp = req["exp"]
|
||||
exp = int(sexp) if sexp else 0
|
||||
exp = now + exp * 60 if exp else 0
|
||||
pr = "".join(zc for zc, zb in zip("rwmd", (s_rd, s_wr, s_mv, s_del)) if zb)
|
||||
|
||||
q = "insert into sh values (?,?,?,?,?,?,?,?)"
|
||||
cur.execute(q, (skey, pw, vp, pr, len(fns), self.uname, now, exp))
|
||||
|
||||
q = "insert into sf values (?,?)"
|
||||
for fn in fns:
|
||||
cur.execute(q, (skey, fn))
|
||||
|
||||
cur.connection.commit()
|
||||
self.conn.hsrv.broker.ask("_reload_blocking", False, False).get()
|
||||
self.conn.hsrv.broker.ask("up2k.wake_rescanner").get()
|
||||
|
||||
fn = quotep(fns[0]) if len(fns) == 1 else ""
|
||||
|
||||
surl = "created share: %s://%s%s%s%s/%s" % (
|
||||
"https" if self.is_https else "http",
|
||||
self.host,
|
||||
self.args.SR,
|
||||
self.args.shr,
|
||||
skey,
|
||||
fn,
|
||||
)
|
||||
self.loud_reply(surl, status=201)
|
||||
return True
|
||||
|
||||
def handle_rm(self, req: list[str]) -> bool:
|
||||
if not req and not self.can_delete:
|
||||
raise Pebkac(403, "not allowed for user " + self.uname)
|
||||
@@ -4144,7 +4496,7 @@ class HttpCli(object):
|
||||
if self.args.no_mv:
|
||||
raise Pebkac(403, "the rename/move feature is disabled in server config")
|
||||
|
||||
x = self.conn.hsrv.broker.ask("up2k.handle_mv", self.uname, vsrc, vdst)
|
||||
x = self.conn.hsrv.broker.ask("up2k.handle_mv", self.uname, self.ip, vsrc, vdst)
|
||||
self.loud_reply(x.get(), status=201)
|
||||
return True
|
||||
|
||||
@@ -4501,6 +4853,7 @@ class HttpCli(object):
|
||||
"have_mv": (not self.args.no_mv),
|
||||
"have_del": (not self.args.no_del),
|
||||
"have_zip": (not self.args.no_zip),
|
||||
"have_shr": self.args.shr,
|
||||
"have_unpost": int(self.args.unpost),
|
||||
"sb_md": "" if "no_sb_md" in vf else (vf.get("md_sbf") or "y"),
|
||||
"dgrid": "grid" in vf,
|
||||
|
||||
@@ -9,6 +9,9 @@ import threading # typechk
|
||||
import time
|
||||
|
||||
try:
|
||||
if os.environ.get("PRTY_NO_TLS"):
|
||||
raise Exception()
|
||||
|
||||
HAVE_SSL = True
|
||||
import ssl
|
||||
except:
|
||||
|
||||
@@ -12,7 +12,7 @@ import time
|
||||
|
||||
import queue
|
||||
|
||||
from .__init__ import ANYWIN, CORES, EXE, MACOS, TYPE_CHECKING, EnvParams, unicode
|
||||
from .__init__ import ANYWIN, CORES, EXE, MACOS, PY2, TYPE_CHECKING, EnvParams, unicode
|
||||
|
||||
try:
|
||||
MNFE = ModuleNotFoundError
|
||||
@@ -84,6 +84,12 @@ if TYPE_CHECKING:
|
||||
if True: # pylint: disable=using-constant-test
|
||||
from typing import Any, Optional
|
||||
|
||||
if PY2:
|
||||
range = xrange # type: ignore
|
||||
|
||||
if not hasattr(socket, "AF_UNIX"):
|
||||
setattr(socket, "AF_UNIX", -9001)
|
||||
|
||||
|
||||
class HttpSrv(object):
|
||||
"""
|
||||
@@ -148,7 +154,17 @@ class HttpSrv(object):
|
||||
|
||||
env = jinja2.Environment()
|
||||
env.loader = jinja2.FileSystemLoader(os.path.join(self.E.mod, "web"))
|
||||
jn = ["splash", "svcs", "browser", "browser2", "msg", "md", "mde", "cf"]
|
||||
jn = [
|
||||
"splash",
|
||||
"shares",
|
||||
"svcs",
|
||||
"browser",
|
||||
"browser2",
|
||||
"msg",
|
||||
"md",
|
||||
"mde",
|
||||
"cf",
|
||||
]
|
||||
self.j2 = {x: env.get_template(x + ".html") for x in jn}
|
||||
zs = os.path.join(self.E.mod, "web", "deps", "prism.js.gz")
|
||||
self.prism = os.path.exists(zs)
|
||||
@@ -240,15 +256,24 @@ class HttpSrv(object):
|
||||
return
|
||||
|
||||
def listen(self, sck: socket.socket, nlisteners: int) -> None:
|
||||
tcp = sck.family != socket.AF_UNIX
|
||||
|
||||
if self.args.j != 1:
|
||||
# lost in the pickle; redefine
|
||||
if not ANYWIN or self.args.reuseaddr:
|
||||
sck.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
|
||||
sck.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||
if tcp:
|
||||
sck.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||
|
||||
sck.settimeout(None) # < does not inherit, ^ opts above do
|
||||
|
||||
ip, port = sck.getsockname()[:2]
|
||||
if tcp:
|
||||
ip, port = sck.getsockname()[:2]
|
||||
else:
|
||||
ip = re.sub(r"\.[0-9]+$", "", sck.getsockname().split("/")[-1])
|
||||
port = 0
|
||||
|
||||
self.srvs.append(sck)
|
||||
self.bound.add((ip, port))
|
||||
self.nclimax = math.ceil(self.args.nc * 1.0 / nlisteners)
|
||||
@@ -260,10 +285,19 @@ class HttpSrv(object):
|
||||
|
||||
def thr_listen(self, srv_sck: socket.socket) -> None:
|
||||
"""listens on a shared tcp server"""
|
||||
ip, port = srv_sck.getsockname()[:2]
|
||||
fno = srv_sck.fileno()
|
||||
hip = "[{}]".format(ip) if ":" in ip else ip
|
||||
msg = "subscribed @ {}:{} f{} p{}".format(hip, port, fno, os.getpid())
|
||||
if srv_sck.family == socket.AF_UNIX:
|
||||
ip = re.sub(r"\.[0-9]+$", "", srv_sck.getsockname())
|
||||
msg = "subscribed @ %s f%d p%d" % (ip, fno, os.getpid())
|
||||
ip = ip.split("/")[-1]
|
||||
port = 0
|
||||
tcp = False
|
||||
else:
|
||||
tcp = True
|
||||
ip, port = srv_sck.getsockname()[:2]
|
||||
hip = "[%s]" % (ip,) if ":" in ip else ip
|
||||
msg = "subscribed @ %s:%d f%d p%d" % (hip, port, fno, os.getpid())
|
||||
|
||||
self.log(self.name, msg)
|
||||
|
||||
Daemon(self.broker.say, "sig-hsrv-up1", ("cb_httpsrv_up",))
|
||||
@@ -335,11 +369,13 @@ class HttpSrv(object):
|
||||
|
||||
try:
|
||||
sck, saddr = srv_sck.accept()
|
||||
cip = unicode(saddr[0])
|
||||
if cip.startswith("::ffff:"):
|
||||
cip = cip[7:]
|
||||
|
||||
addr = (cip, saddr[1])
|
||||
if tcp:
|
||||
cip = unicode(saddr[0])
|
||||
if cip.startswith("::ffff:"):
|
||||
cip = cip[7:]
|
||||
addr = (cip, saddr[1])
|
||||
else:
|
||||
addr = ("127.8.3.7", sck.fileno())
|
||||
except (OSError, socket.error) as ex:
|
||||
if self.stopping:
|
||||
break
|
||||
|
||||
@@ -74,7 +74,7 @@ class Ico(object):
|
||||
try:
|
||||
_, _, tw, th = pb.textbbox((0, 0), ext)
|
||||
except:
|
||||
tw, th = pb.textsize(ext)
|
||||
tw, th = pb.textsize(ext) # type: ignore
|
||||
|
||||
tw += len(ext)
|
||||
cw = tw // len(ext)
|
||||
|
||||
@@ -32,6 +32,17 @@ if True: # pylint: disable=using-constant-test
|
||||
from .util import NamedLogger, RootLogger
|
||||
|
||||
|
||||
try:
|
||||
if os.environ.get("PRTY_NO_MUTAGEN"):
|
||||
raise Exception()
|
||||
|
||||
from mutagen import version # noqa: F401
|
||||
|
||||
HAVE_MUTAGEN = True
|
||||
except:
|
||||
HAVE_MUTAGEN = False
|
||||
|
||||
|
||||
def have_ff(scmd: str) -> bool:
|
||||
if ANYWIN:
|
||||
scmd += ".exe"
|
||||
@@ -48,8 +59,8 @@ def have_ff(scmd: str) -> bool:
|
||||
return bool(shutil.which(scmd))
|
||||
|
||||
|
||||
HAVE_FFMPEG = have_ff("ffmpeg")
|
||||
HAVE_FFPROBE = have_ff("ffprobe")
|
||||
HAVE_FFMPEG = not os.environ.get("PRTY_NO_FFMPEG") and have_ff("ffmpeg")
|
||||
HAVE_FFPROBE = not os.environ.get("PRTY_NO_FFPROBE") and have_ff("ffprobe")
|
||||
|
||||
|
||||
class MParser(object):
|
||||
@@ -336,9 +347,7 @@ class MTag(object):
|
||||
|
||||
if self.backend == "mutagen":
|
||||
self._get = self.get_mutagen
|
||||
try:
|
||||
from mutagen import version # noqa: F401
|
||||
except:
|
||||
if not HAVE_MUTAGEN:
|
||||
self.log("could not load Mutagen, trying FFprobe instead", c=3)
|
||||
self.backend = "ffprobe"
|
||||
|
||||
@@ -578,7 +587,7 @@ class MTag(object):
|
||||
continue
|
||||
|
||||
if k == ".aq":
|
||||
v /= 1000
|
||||
v /= 1000 # type: ignore
|
||||
|
||||
if k == "ac" and v.startswith("mp4a.40."):
|
||||
v = "aac"
|
||||
|
||||
@@ -4,11 +4,21 @@ from __future__ import print_function, unicode_literals
|
||||
import argparse
|
||||
import base64
|
||||
import hashlib
|
||||
import os
|
||||
import sys
|
||||
import threading
|
||||
|
||||
from .__init__ import unicode
|
||||
|
||||
try:
|
||||
if os.environ.get("PRTY_NO_ARGON2"):
|
||||
raise Exception()
|
||||
|
||||
HAVE_ARGON2 = True
|
||||
from argon2 import __version__ as argon2ver
|
||||
except:
|
||||
HAVE_ARGON2 = False
|
||||
|
||||
|
||||
class PWHash(object):
|
||||
def __init__(self, args: argparse.Namespace):
|
||||
|
||||
@@ -187,6 +187,8 @@ class SMB(object):
|
||||
|
||||
debug('%s("%s", %s) %s @%s\033[K\033[0m', caller, vpath, str(a), perms, uname)
|
||||
vfs, rem = self.asrv.vfs.get(vpath, uname, *perms)
|
||||
if not vfs.realpath:
|
||||
raise Exception("unmapped vfs")
|
||||
return vfs, vfs.canonical(rem)
|
||||
|
||||
def _listdir(self, vpath: str, *a: Any, **ka: Any) -> list[str]:
|
||||
@@ -195,6 +197,8 @@ class SMB(object):
|
||||
uname = self._uname()
|
||||
# debug('listdir("%s", %s) @%s\033[K\033[0m', vpath, str(a), uname)
|
||||
vfs, rem = self.asrv.vfs.get(vpath, uname, False, False)
|
||||
if not vfs.realpath:
|
||||
raise Exception("unmapped vfs")
|
||||
_, vfs_ls, vfs_virt = vfs.ls(
|
||||
rem, uname, not self.args.no_scandir, [[False, False]]
|
||||
)
|
||||
@@ -240,7 +244,21 @@ class SMB(object):
|
||||
|
||||
xbu = vfs.flags.get("xbu")
|
||||
if xbu and not runhook(
|
||||
self.nlog, xbu, ap, vpath, "", "", "", 0, 0, "1.7.6.2", 0, ""
|
||||
self.nlog,
|
||||
None,
|
||||
self.hub.up2k,
|
||||
"xbu.smb",
|
||||
xbu,
|
||||
ap,
|
||||
vpath,
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
0,
|
||||
0,
|
||||
"1.7.6.2",
|
||||
time.time(),
|
||||
"",
|
||||
):
|
||||
yeet("blocked by xbu server config: " + vpath)
|
||||
|
||||
@@ -297,7 +315,7 @@ class SMB(object):
|
||||
t = "blocked rename (no-move-acc %s): /%s @%s"
|
||||
yeet(t % (vfs1.axs.umove, vp1, uname))
|
||||
|
||||
self.hub.up2k.handle_mv(uname, vp1, vp2)
|
||||
self.hub.up2k.handle_mv(uname, "1.7.6.2", vp1, vp2)
|
||||
try:
|
||||
bos.makedirs(ap2)
|
||||
except:
|
||||
|
||||
@@ -5,11 +5,11 @@ import errno
|
||||
import re
|
||||
import select
|
||||
import socket
|
||||
from email.utils import formatdate
|
||||
import time
|
||||
|
||||
from .__init__ import TYPE_CHECKING
|
||||
from .multicast import MC_Sck, MCast
|
||||
from .util import CachedSet, html_escape, min_ex
|
||||
from .util import CachedSet, formatdate, html_escape, min_ex
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .broker_util import BrokerCli
|
||||
@@ -229,7 +229,7 @@ CONFIGID.UPNP.ORG: 1
|
||||
|
||||
"""
|
||||
v4 = srv.ip.replace("::ffff:", "")
|
||||
zs = zs.format(formatdate(usegmt=True), v4, srv.hport, self.args.zsid)
|
||||
zs = zs.format(formatdate(), v4, srv.hport, self.args.zsid)
|
||||
zb = zs[1:].replace("\n", "\r\n").encode("utf-8", "replace")
|
||||
srv.sck.sendto(zb, addr[:2])
|
||||
|
||||
|
||||
@@ -12,6 +12,12 @@ from .label import DNSBuffer, DNSLabel
|
||||
from .ranges import IP4, IP6, H, I, check_bytes
|
||||
|
||||
|
||||
try:
|
||||
range = xrange
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
class DNSError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
@@ -11,7 +11,21 @@ import os
|
||||
|
||||
from ._shared import IP, Adapter
|
||||
|
||||
if os.name == "nt":
|
||||
|
||||
def nope(include_unconfigured=False):
|
||||
return []
|
||||
|
||||
|
||||
try:
|
||||
S390X = os.uname().machine == "s390x"
|
||||
except:
|
||||
S390X = False
|
||||
|
||||
|
||||
if os.environ.get("PRTY_NO_IFADDR") or S390X:
|
||||
# s390x deadlocks at libc.getifaddrs
|
||||
get_adapters = nope
|
||||
elif os.name == "nt":
|
||||
from ._win32 import get_adapters
|
||||
elif os.name == "posix":
|
||||
from ._posix import get_adapters
|
||||
|
||||
@@ -17,6 +17,7 @@ if not PY2:
|
||||
U: Callable[[str], str] = str
|
||||
else:
|
||||
U = unicode # noqa: F821 # pylint: disable=undefined-variable,self-assigning-variable
|
||||
range = xrange # noqa: F821 # pylint: disable=undefined-variable,self-assigning-variable
|
||||
|
||||
|
||||
class Adapter(object):
|
||||
|
||||
@@ -16,6 +16,11 @@ if True: # pylint: disable=using-constant-test
|
||||
|
||||
from typing import Callable, List, Optional, Tuple, Union
|
||||
|
||||
try:
|
||||
range = xrange
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def num_char_count_bits(ver: int) -> int:
|
||||
return 16 if (ver + 7) // 17 else 8
|
||||
|
||||
@@ -6,7 +6,7 @@ import tempfile
|
||||
from datetime import datetime
|
||||
|
||||
from .__init__ import CORES
|
||||
from .authsrv import AuthSrv, VFS
|
||||
from .authsrv import VFS, AuthSrv
|
||||
from .bos import bos
|
||||
from .th_cli import ThumbCli
|
||||
from .util import UTC, vjoin, vol_san
|
||||
|
||||
@@ -28,18 +28,30 @@ if True: # pylint: disable=using-constant-test
|
||||
import typing
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
from .__init__ import ANYWIN, EXE, MACOS, TYPE_CHECKING, E, EnvParams, unicode
|
||||
from .__init__ import ANYWIN, EXE, MACOS, PY2, TYPE_CHECKING, E, EnvParams, unicode
|
||||
from .authsrv import BAD_CFG, AuthSrv
|
||||
from .cert import ensure_cert
|
||||
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE
|
||||
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, HAVE_MUTAGEN
|
||||
from .pwhash import HAVE_ARGON2
|
||||
from .tcpsrv import TcpSrv
|
||||
from .th_srv import HAVE_PIL, HAVE_VIPS, HAVE_WEBP, ThumbSrv
|
||||
from .th_srv import (
|
||||
HAVE_AVIF,
|
||||
HAVE_FFMPEG,
|
||||
HAVE_FFPROBE,
|
||||
HAVE_HEIF,
|
||||
HAVE_PIL,
|
||||
HAVE_VIPS,
|
||||
HAVE_WEBP,
|
||||
ThumbSrv,
|
||||
)
|
||||
from .up2k import Up2k
|
||||
from .util import (
|
||||
DEF_EXP,
|
||||
DEF_MTE,
|
||||
DEF_MTH,
|
||||
FFMPEG_URL,
|
||||
HAVE_PSUTIL,
|
||||
HAVE_SQLITE3,
|
||||
UTC,
|
||||
VERSIONS,
|
||||
Daemon,
|
||||
@@ -65,6 +77,9 @@ if TYPE_CHECKING:
|
||||
except:
|
||||
pass
|
||||
|
||||
if PY2:
|
||||
range = xrange # type: ignore
|
||||
|
||||
|
||||
class SvcHub(object):
|
||||
"""
|
||||
@@ -91,6 +106,7 @@ class SvcHub(object):
|
||||
self.no_ansi = args.no_ansi
|
||||
self.logf: Optional[typing.TextIO] = None
|
||||
self.logf_base_fn = ""
|
||||
self.is_dut = False # running in unittest; always False
|
||||
self.stop_req = False
|
||||
self.stopping = False
|
||||
self.stopped = False
|
||||
@@ -193,6 +209,20 @@ class SvcHub(object):
|
||||
t = "WARNING: --s-rd-sz (%d) is larger than --iobuf (%d); this may lead to reduced performance"
|
||||
self.log("root", t % (args.s_rd_sz, args.iobuf), 3)
|
||||
|
||||
if args.chpw and args.idp_h_usr:
|
||||
t = "ERROR: user-changeable passwords is incompatible with IdP/identity-providers; you must disable either --chpw or --idp-h-usr"
|
||||
self.log("root", t, 1)
|
||||
raise Exception(t)
|
||||
|
||||
noch = set()
|
||||
for zs in args.chpw_no or []:
|
||||
zsl = [x.strip() for x in zs.split(",")]
|
||||
noch.update([x for x in zsl if x])
|
||||
args.chpw_no = noch
|
||||
|
||||
if args.shr:
|
||||
self.setup_share_db()
|
||||
|
||||
bri = "zy"[args.theme % 2 :][:1]
|
||||
ch = "abcdefghijklmnopqrstuvwx"[int(args.theme / 2)]
|
||||
args.theme = "{0}{1} {0} {1}".format(ch, bri)
|
||||
@@ -232,6 +262,8 @@ class SvcHub(object):
|
||||
|
||||
self.up2k = Up2k(self)
|
||||
|
||||
self._feature_test()
|
||||
|
||||
decs = {k: 1 for k in self.args.th_dec.split(",")}
|
||||
if not HAVE_VIPS:
|
||||
decs.pop("vips", None)
|
||||
@@ -336,6 +368,93 @@ class SvcHub(object):
|
||||
|
||||
self.broker = Broker(self)
|
||||
|
||||
def setup_share_db(self) -> None:
|
||||
al = self.args
|
||||
if not HAVE_SQLITE3:
|
||||
self.log("root", "sqlite3 not available; disabling --shr", 1)
|
||||
al.shr = ""
|
||||
return
|
||||
|
||||
import sqlite3
|
||||
|
||||
al.shr = al.shr.strip("/")
|
||||
if "/" in al.shr or not al.shr:
|
||||
t = "config error: --shr must be the name of a virtual toplevel directory to put shares inside"
|
||||
self.log("root", t, 1)
|
||||
raise Exception(t)
|
||||
|
||||
al.shr = "/%s/" % (al.shr,)
|
||||
|
||||
create = True
|
||||
modified = False
|
||||
db_path = self.args.shr_db
|
||||
self.log("root", "opening shares-db %s" % (db_path,))
|
||||
for n in range(2):
|
||||
try:
|
||||
db = sqlite3.connect(db_path)
|
||||
cur = db.cursor()
|
||||
try:
|
||||
cur.execute("select count(*) from sh").fetchone()
|
||||
create = False
|
||||
break
|
||||
except:
|
||||
pass
|
||||
except Exception as ex:
|
||||
if n:
|
||||
raise
|
||||
t = "shares-db corrupt; deleting and recreating: %r"
|
||||
self.log("root", t % (ex,), 3)
|
||||
try:
|
||||
cur.close() # type: ignore
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
db.close() # type: ignore
|
||||
except:
|
||||
pass
|
||||
os.unlink(db_path)
|
||||
|
||||
sch1 = [
|
||||
r"create table kv (k text, v int)",
|
||||
r"create table sh (k text, pw text, vp text, pr text, st int, un text, t0 int, t1 int)",
|
||||
# sharekey, password, src, perms, numFiles, owner, created, expires
|
||||
]
|
||||
sch2 = [
|
||||
r"create table sf (k text, vp text)",
|
||||
r"create index sf_k on sf(k)",
|
||||
r"create index sh_k on sh(k)",
|
||||
r"create index sh_t1 on sh(t1)",
|
||||
]
|
||||
|
||||
assert db # type: ignore
|
||||
assert cur # type: ignore
|
||||
if create:
|
||||
dver = 2
|
||||
modified = True
|
||||
for cmd in sch1 + sch2:
|
||||
cur.execute(cmd)
|
||||
self.log("root", "created new shares-db")
|
||||
else:
|
||||
(dver,) = cur.execute("select v from kv where k = 'sver'").fetchall()[0]
|
||||
|
||||
if dver == 1:
|
||||
modified = True
|
||||
for cmd in sch2:
|
||||
cur.execute(cmd)
|
||||
cur.execute("update sh set st = 0")
|
||||
self.log("root", "shares-db schema upgrade ok")
|
||||
|
||||
if modified:
|
||||
for cmd in [
|
||||
r"delete from kv where k = 'sver'",
|
||||
r"insert into kv values ('sver', %d)" % (2,),
|
||||
]:
|
||||
cur.execute(cmd)
|
||||
db.commit()
|
||||
|
||||
cur.close()
|
||||
db.close()
|
||||
|
||||
def start_ftpd(self) -> None:
|
||||
time.sleep(30)
|
||||
|
||||
@@ -420,6 +539,58 @@ class SvcHub(object):
|
||||
|
||||
Daemon(self.sd_notify, "sd-notify")
|
||||
|
||||
def _feature_test(self) -> None:
|
||||
fok = []
|
||||
fng = []
|
||||
t_ff = "transcode audio, create spectrograms, video thumbnails"
|
||||
to_check = [
|
||||
(HAVE_SQLITE3, "sqlite", "file and media indexing"),
|
||||
(HAVE_PIL, "pillow", "image thumbnails (plenty fast)"),
|
||||
(HAVE_VIPS, "vips", "image thumbnails (faster, eats more ram)"),
|
||||
(HAVE_WEBP, "pillow-webp", "create thumbnails as webp files"),
|
||||
(HAVE_FFMPEG, "ffmpeg", t_ff + ", good-but-slow image thumbnails"),
|
||||
(HAVE_FFPROBE, "ffprobe", t_ff + ", read audio/media tags"),
|
||||
(HAVE_MUTAGEN, "mutagen", "read audio tags (ffprobe is better but slower)"),
|
||||
(HAVE_ARGON2, "argon2", "secure password hashing (advanced users only)"),
|
||||
(HAVE_HEIF, "pillow-heif", "read .heif images with pillow (rarely useful)"),
|
||||
(HAVE_AVIF, "pillow-avif", "read .avif images with pillow (rarely useful)"),
|
||||
]
|
||||
if ANYWIN:
|
||||
to_check += [
|
||||
(HAVE_PSUTIL, "psutil", "improved plugin cleanup (rarely useful)")
|
||||
]
|
||||
|
||||
verbose = self.args.deps
|
||||
if verbose:
|
||||
self.log("dependencies", "")
|
||||
|
||||
for have, feat, what in to_check:
|
||||
lst = fok if have else fng
|
||||
lst.append((feat, what))
|
||||
if verbose:
|
||||
zi = 2 if have else 5
|
||||
sgot = "found" if have else "missing"
|
||||
t = "%7s: %s \033[36m(%s)"
|
||||
self.log("dependencies", t % (sgot, feat, what), zi)
|
||||
|
||||
if verbose:
|
||||
self.log("dependencies", "")
|
||||
return
|
||||
|
||||
sok = ", ".join(x[0] for x in fok)
|
||||
sng = ", ".join(x[0] for x in fng)
|
||||
|
||||
t = ""
|
||||
if sok:
|
||||
t += "OK: \033[32m" + sok
|
||||
if sng:
|
||||
if t:
|
||||
t += ", "
|
||||
t += "\033[0mNG: \033[35m" + sng
|
||||
|
||||
t += "\033[0m, see --deps"
|
||||
self.log("dependencies", t, 6)
|
||||
|
||||
def _check_env(self) -> None:
|
||||
try:
|
||||
files = os.listdir(E.cfg)
|
||||
@@ -746,18 +917,21 @@ class SvcHub(object):
|
||||
Daemon(self._reload, "reloading")
|
||||
return "reload initiated"
|
||||
|
||||
def _reload(self, rescan_all_vols: bool = True) -> None:
|
||||
def _reload(self, rescan_all_vols: bool = True, up2k: bool = True) -> None:
|
||||
with self.up2k.mutex:
|
||||
if self.reloading != 1:
|
||||
return
|
||||
self.reloading = 2
|
||||
self.log("root", "reloading config")
|
||||
self.asrv.reload()
|
||||
self.up2k.reload(rescan_all_vols)
|
||||
self.asrv.reload(9 if up2k else 4)
|
||||
if up2k:
|
||||
self.up2k.reload(rescan_all_vols)
|
||||
else:
|
||||
self.log("root", "reload done")
|
||||
self.broker.reload()
|
||||
self.reloading = 0
|
||||
|
||||
def _reload_blocking(self, rescan_all_vols: bool = True) -> None:
|
||||
def _reload_blocking(self, rescan_all_vols: bool = True, up2k: bool = True) -> None:
|
||||
while True:
|
||||
with self.up2k.mutex:
|
||||
if self.reloading < 2:
|
||||
@@ -768,7 +942,7 @@ class SvcHub(object):
|
||||
# try to handle multiple pending IdP reloads at once:
|
||||
time.sleep(0.2)
|
||||
|
||||
self._reload(rescan_all_vols=rescan_all_vols)
|
||||
self._reload(rescan_all_vols=rescan_all_vols, up2k=up2k)
|
||||
|
||||
def stop_thr(self) -> None:
|
||||
while not self.stop_req:
|
||||
|
||||
@@ -37,9 +37,7 @@ def dostime2unix(buf: bytes) -> int:
|
||||
|
||||
|
||||
def unixtime2dos(ts: int) -> bytes:
|
||||
tt = time.gmtime(ts + 1)
|
||||
dy, dm, dd, th, tm, ts = list(tt)[:6]
|
||||
|
||||
dy, dm, dd, th, tm, ts, _, _, _ = time.gmtime(ts + 1)
|
||||
bd = ((dy - 1980) << 9) + (dm << 5) + dd
|
||||
bt = (th << 11) + (tm << 5) + ts // 2
|
||||
try:
|
||||
|
||||
@@ -17,18 +17,23 @@ from .util import (
|
||||
E_UNREACH,
|
||||
HAVE_IPV6,
|
||||
IP6ALL,
|
||||
VF_CAREFUL,
|
||||
Netdev,
|
||||
atomic_move,
|
||||
min_ex,
|
||||
sunpack,
|
||||
termsize,
|
||||
)
|
||||
|
||||
if True:
|
||||
from typing import Generator
|
||||
from typing import Generator, Union
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .svchub import SvcHub
|
||||
|
||||
if not hasattr(socket, "AF_UNIX"):
|
||||
setattr(socket, "AF_UNIX", -9001)
|
||||
|
||||
if not hasattr(socket, "IPPROTO_IPV6"):
|
||||
setattr(socket, "IPPROTO_IPV6", 41)
|
||||
|
||||
@@ -217,14 +222,41 @@ class TcpSrv(object):
|
||||
if self.args.qr or self.args.qrs:
|
||||
self.qr = self._qr(qr1, qr2)
|
||||
|
||||
def nlog(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||
self.log("tcpsrv", msg, c)
|
||||
|
||||
def _listen(self, ip: str, port: int) -> None:
|
||||
ipv = socket.AF_INET6 if ":" in ip else socket.AF_INET
|
||||
uds_perm = uds_gid = -1
|
||||
if "unix:" in ip:
|
||||
tcp = False
|
||||
ipv = socket.AF_UNIX
|
||||
uds = ip.split(":")
|
||||
ip = uds[-1]
|
||||
if len(uds) > 2:
|
||||
uds_perm = int(uds[1], 8)
|
||||
if len(uds) > 3:
|
||||
try:
|
||||
uds_gid = int(uds[2])
|
||||
except:
|
||||
import grp
|
||||
|
||||
uds_gid = grp.getgrnam(uds[2]).gr_gid
|
||||
|
||||
elif ":" in ip:
|
||||
tcp = True
|
||||
ipv = socket.AF_INET6
|
||||
else:
|
||||
tcp = True
|
||||
ipv = socket.AF_INET
|
||||
|
||||
srv = socket.socket(ipv, socket.SOCK_STREAM)
|
||||
|
||||
if not ANYWIN or self.args.reuseaddr:
|
||||
srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
|
||||
srv.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||
if tcp:
|
||||
srv.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||
|
||||
srv.settimeout(None) # < does not inherit, ^ opts above do
|
||||
|
||||
try:
|
||||
@@ -236,8 +268,25 @@ class TcpSrv(object):
|
||||
srv.setsockopt(socket.SOL_IP, socket.IP_FREEBIND, 1)
|
||||
|
||||
try:
|
||||
srv.bind((ip, port))
|
||||
sport = srv.getsockname()[1]
|
||||
if tcp:
|
||||
srv.bind((ip, port))
|
||||
else:
|
||||
if ANYWIN or self.args.rm_sck:
|
||||
if os.path.exists(ip):
|
||||
os.unlink(ip)
|
||||
srv.bind(ip)
|
||||
else:
|
||||
tf = "%s.%d" % (ip, os.getpid())
|
||||
if os.path.exists(tf):
|
||||
os.unlink(tf)
|
||||
srv.bind(tf)
|
||||
if uds_gid != -1:
|
||||
os.chown(tf, -1, uds_gid)
|
||||
if uds_perm != -1:
|
||||
os.chmod(tf, uds_perm)
|
||||
atomic_move(self.nlog, tf, ip, VF_CAREFUL)
|
||||
|
||||
sport = srv.getsockname()[1] if tcp else port
|
||||
if port != sport:
|
||||
# linux 6.0.16 lets you bind a port which is in use
|
||||
# except it just gives you a random port instead
|
||||
@@ -249,12 +298,23 @@ class TcpSrv(object):
|
||||
except:
|
||||
pass
|
||||
|
||||
e = ""
|
||||
if ex.errno in E_ADDR_IN_USE:
|
||||
e = "\033[1;31mport {} is busy on interface {}\033[0m".format(port, ip)
|
||||
if not tcp:
|
||||
e = "\033[1;31munix-socket {} is busy\033[0m".format(ip)
|
||||
elif ex.errno in E_ADDR_NOT_AVAIL:
|
||||
e = "\033[1;31minterface {} does not exist\033[0m".format(ip)
|
||||
else:
|
||||
|
||||
if not e:
|
||||
if not tcp:
|
||||
t = "\n\n\n NOTE: this crash may be due to a unix-socket bug; try --rm-sck\n"
|
||||
self.log("tcpsrv", t, 2)
|
||||
raise
|
||||
|
||||
if not tcp and not self.args.rm_sck:
|
||||
e += "; maybe this is a bug? try --rm-sck"
|
||||
|
||||
raise Exception(e)
|
||||
|
||||
def run(self) -> None:
|
||||
@@ -262,7 +322,14 @@ class TcpSrv(object):
|
||||
bound: list[tuple[str, int]] = []
|
||||
srvs: list[socket.socket] = []
|
||||
for srv in self.srv:
|
||||
ip, port = srv.getsockname()[:2]
|
||||
if srv.family == socket.AF_UNIX:
|
||||
tcp = False
|
||||
ip = re.sub(r"\.[0-9]+$", "", srv.getsockname())
|
||||
port = 0
|
||||
else:
|
||||
tcp = True
|
||||
ip, port = srv.getsockname()[:2]
|
||||
|
||||
if ip == IP6ALL:
|
||||
ip = "::" # jython
|
||||
|
||||
@@ -294,8 +361,12 @@ class TcpSrv(object):
|
||||
bound.append((ip, port))
|
||||
srvs.append(srv)
|
||||
fno = srv.fileno()
|
||||
hip = "[{}]".format(ip) if ":" in ip else ip
|
||||
msg = "listening @ {}:{} f{} p{}".format(hip, port, fno, os.getpid())
|
||||
if tcp:
|
||||
hip = "[{}]".format(ip) if ":" in ip else ip
|
||||
msg = "listening @ {}:{} f{} p{}".format(hip, port, fno, os.getpid())
|
||||
else:
|
||||
msg = "listening @ {} f{} p{}".format(ip, fno, os.getpid())
|
||||
|
||||
self.log("tcpsrv", msg)
|
||||
if self.args.q:
|
||||
print(msg)
|
||||
@@ -348,6 +419,8 @@ class TcpSrv(object):
|
||||
def detect_interfaces(self, listen_ips: list[str]) -> dict[str, Netdev]:
|
||||
from .stolen.ifaddr import get_adapters
|
||||
|
||||
listen_ips = [x for x in listen_ips if "unix:" not in x]
|
||||
|
||||
nics = get_adapters(True)
|
||||
eps: dict[str, Netdev] = {}
|
||||
for nic in nics:
|
||||
|
||||
@@ -36,7 +36,7 @@ from partftpy.TftpShared import TftpException
|
||||
from .__init__ import EXE, PY2, TYPE_CHECKING
|
||||
from .authsrv import VFS
|
||||
from .bos import bos
|
||||
from .util import BytesIO, Daemon, ODict, exclude_dotfiles, min_ex, runhook, undot
|
||||
from .util import UTC, BytesIO, Daemon, ODict, exclude_dotfiles, min_ex, runhook, undot
|
||||
|
||||
if True: # pylint: disable=using-constant-test
|
||||
from typing import Any, Union
|
||||
@@ -44,6 +44,9 @@ if True: # pylint: disable=using-constant-test
|
||||
if TYPE_CHECKING:
|
||||
from .svchub import SvcHub
|
||||
|
||||
if PY2:
|
||||
range = xrange # type: ignore
|
||||
|
||||
|
||||
lg = logging.getLogger("tftp")
|
||||
debug, info, warning, error = (lg.debug, lg.info, lg.warning, lg.error)
|
||||
@@ -163,9 +166,16 @@ class Tftpd(object):
|
||||
if "::" in ips:
|
||||
ips.append("0.0.0.0")
|
||||
|
||||
ips = [x for x in ips if "unix:" not in x]
|
||||
|
||||
if self.args.tftp4:
|
||||
ips = [x for x in ips if ":" not in x]
|
||||
|
||||
if not ips:
|
||||
t = "cannot start tftp-server; no compatible IPs in -i"
|
||||
self.nlog(t, 1)
|
||||
return
|
||||
|
||||
ips = list(ODict.fromkeys(ips)) # dedup
|
||||
|
||||
for ip in ips:
|
||||
@@ -241,6 +251,8 @@ class Tftpd(object):
|
||||
|
||||
debug('%s("%s", %s) %s\033[K\033[0m', caller, vpath, str(a), perms)
|
||||
vfs, rem = self.asrv.vfs.get(vpath, "*", *perms)
|
||||
if not vfs.realpath:
|
||||
raise Exception("unmapped vfs")
|
||||
return vfs, vfs.canonical(rem)
|
||||
|
||||
def _ls(self, vpath: str, raddress: str, rport: int, force=False) -> Any:
|
||||
@@ -262,7 +274,7 @@ class Tftpd(object):
|
||||
dirs1 = [(v.st_mtime, v.st_size, k + "/") for k, v in vfs_ls if k in dnames]
|
||||
fils1 = [(v.st_mtime, v.st_size, k) for k, v in vfs_ls if k not in dnames]
|
||||
real1 = dirs1 + fils1
|
||||
realt = [(datetime.fromtimestamp(mt), sz, fn) for mt, sz, fn in real1]
|
||||
realt = [(datetime.fromtimestamp(mt, UTC), sz, fn) for mt, sz, fn in real1]
|
||||
reals = [
|
||||
(
|
||||
"%04d-%02d-%02d %02d:%02d:%02d"
|
||||
@@ -328,7 +340,21 @@ class Tftpd(object):
|
||||
|
||||
xbu = vfs.flags.get("xbu")
|
||||
if xbu and not runhook(
|
||||
self.nlog, xbu, ap, vpath, "", "", "", 0, 0, "8.3.8.7", 0, ""
|
||||
self.nlog,
|
||||
None,
|
||||
self.hub.up2k,
|
||||
"xbu.tftpd",
|
||||
xbu,
|
||||
ap,
|
||||
vpath,
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
0,
|
||||
0,
|
||||
"8.3.8.7",
|
||||
time.time(),
|
||||
"",
|
||||
):
|
||||
yeet("blocked by xbu server config: " + vpath)
|
||||
|
||||
@@ -336,7 +362,7 @@ class Tftpd(object):
|
||||
return self._ls(vpath, "", 0, True)
|
||||
|
||||
if not a:
|
||||
a = [self.args.iobuf]
|
||||
a = (self.args.iobuf,)
|
||||
|
||||
return open(ap, mode, *a, **ka)
|
||||
|
||||
@@ -377,7 +403,7 @@ class Tftpd(object):
|
||||
bos.stat(ap)
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
return vpath == "/"
|
||||
|
||||
def _p_isdir(self, vpath: str) -> bool:
|
||||
try:
|
||||
@@ -385,7 +411,7 @@ class Tftpd(object):
|
||||
ret = stat.S_ISDIR(st.st_mode)
|
||||
return ret
|
||||
except:
|
||||
return False
|
||||
return vpath == "/"
|
||||
|
||||
def _hook(self, *a: Any, **ka: Any) -> None:
|
||||
src = inspect.currentframe().f_back.f_code.co_name
|
||||
|
||||
@@ -12,7 +12,7 @@ import time
|
||||
|
||||
from queue import Queue
|
||||
|
||||
from .__init__ import ANYWIN, TYPE_CHECKING
|
||||
from .__init__ import ANYWIN, PY2, TYPE_CHECKING
|
||||
from .authsrv import VFS
|
||||
from .bos import bos
|
||||
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, au_unpk, ffprobe
|
||||
@@ -38,6 +38,9 @@ if True: # pylint: disable=using-constant-test
|
||||
if TYPE_CHECKING:
|
||||
from .svchub import SvcHub
|
||||
|
||||
if PY2:
|
||||
range = xrange # type: ignore
|
||||
|
||||
HAVE_PIL = False
|
||||
HAVE_PILF = False
|
||||
HAVE_HEIF = False
|
||||
@@ -45,22 +48,34 @@ HAVE_AVIF = False
|
||||
HAVE_WEBP = False
|
||||
|
||||
try:
|
||||
if os.environ.get("PRTY_NO_PIL"):
|
||||
raise Exception()
|
||||
|
||||
from PIL import ExifTags, Image, ImageFont, ImageOps
|
||||
|
||||
HAVE_PIL = True
|
||||
try:
|
||||
if os.environ.get("PRTY_NO_PILF"):
|
||||
raise Exception()
|
||||
|
||||
ImageFont.load_default(size=16)
|
||||
HAVE_PILF = True
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
if os.environ.get("PRTY_NO_PIL_WEBP"):
|
||||
raise Exception()
|
||||
|
||||
Image.new("RGB", (2, 2)).save(BytesIO(), format="webp")
|
||||
HAVE_WEBP = True
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
if os.environ.get("PRTY_NO_PIL_HEIF"):
|
||||
raise Exception()
|
||||
|
||||
from pyheif_pillow_opener import register_heif_opener
|
||||
|
||||
register_heif_opener()
|
||||
@@ -69,6 +84,9 @@ try:
|
||||
pass
|
||||
|
||||
try:
|
||||
if os.environ.get("PRTY_NO_PIL_AVIF"):
|
||||
raise Exception()
|
||||
|
||||
import pillow_avif # noqa: F401 # pylint: disable=unused-import
|
||||
|
||||
HAVE_AVIF = True
|
||||
@@ -80,6 +98,9 @@ except:
|
||||
pass
|
||||
|
||||
try:
|
||||
if os.environ.get("PRTY_NO_VIPS"):
|
||||
raise Exception()
|
||||
|
||||
HAVE_VIPS = True
|
||||
import pyvips
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ import threading
|
||||
import time
|
||||
from operator import itemgetter
|
||||
|
||||
from .__init__ import ANYWIN, TYPE_CHECKING, unicode
|
||||
from .__init__ import ANYWIN, PY2, TYPE_CHECKING, unicode
|
||||
from .authsrv import LEELOO_DALLAS, VFS
|
||||
from .bos import bos
|
||||
from .up2k import up2k_wark_from_hashlist
|
||||
@@ -38,6 +38,9 @@ if True: # pylint: disable=using-constant-test
|
||||
if TYPE_CHECKING:
|
||||
from .httpsrv import HttpSrv
|
||||
|
||||
if PY2:
|
||||
range = xrange # type: ignore
|
||||
|
||||
|
||||
class U2idx(object):
|
||||
def __init__(self, hsrv: "HttpSrv") -> None:
|
||||
@@ -56,6 +59,8 @@ class U2idx(object):
|
||||
self.mem_cur = sqlite3.connect(":memory:", check_same_thread=False).cursor()
|
||||
self.mem_cur.execute(r"create table a (b text)")
|
||||
|
||||
self.sh_cur: Optional["sqlite3.Cursor"] = None
|
||||
|
||||
self.p_end = 0.0
|
||||
self.p_dur = 0.0
|
||||
|
||||
@@ -92,17 +97,31 @@ class U2idx(object):
|
||||
except:
|
||||
raise Pebkac(500, min_ex())
|
||||
|
||||
def get_cur(self, vn: VFS) -> Optional["sqlite3.Cursor"]:
|
||||
if not HAVE_SQLITE3:
|
||||
def get_shr(self) -> Optional["sqlite3.Cursor"]:
|
||||
if self.sh_cur:
|
||||
return self.sh_cur
|
||||
|
||||
if not HAVE_SQLITE3 or not self.args.shr:
|
||||
return None
|
||||
|
||||
assert sqlite3 # type: ignore
|
||||
|
||||
db = sqlite3.connect(self.args.shr_db, timeout=2, check_same_thread=False)
|
||||
cur = db.cursor()
|
||||
cur.execute('pragma table_info("sh")').fetchall()
|
||||
self.sh_cur = cur
|
||||
return cur
|
||||
|
||||
def get_cur(self, vn: VFS) -> Optional["sqlite3.Cursor"]:
|
||||
cur = self.cur.get(vn.realpath)
|
||||
if cur:
|
||||
return cur
|
||||
|
||||
if "e2d" not in vn.flags:
|
||||
if not HAVE_SQLITE3 or "e2d" not in vn.flags:
|
||||
return None
|
||||
|
||||
assert sqlite3 # type: ignore
|
||||
|
||||
ptop = vn.realpath
|
||||
histpath = self.asrv.vfs.histtab.get(ptop)
|
||||
if not histpath:
|
||||
|
||||
@@ -28,8 +28,8 @@ from .fsutil import Fstab
|
||||
from .mtag import MParser, MTag
|
||||
from .util import (
|
||||
HAVE_SQLITE3,
|
||||
VF_CAREFUL,
|
||||
SYMTIME,
|
||||
VF_CAREFUL,
|
||||
Daemon,
|
||||
MTHash,
|
||||
Pebkac,
|
||||
@@ -46,6 +46,7 @@ from .util import (
|
||||
hidedir,
|
||||
humansize,
|
||||
min_ex,
|
||||
pathmod,
|
||||
quotep,
|
||||
rand_name,
|
||||
ren_open,
|
||||
@@ -165,6 +166,7 @@ class Up2k(object):
|
||||
self.xiu_ptn = re.compile(r"(?:^|,)i([0-9]+)")
|
||||
self.xiu_busy = False # currently running hook
|
||||
self.xiu_asleep = True # needs rescan_cond poke to schedule self
|
||||
self.fx_backlog: list[tuple[str, dict[str, str], str]] = []
|
||||
|
||||
self.cur: dict[str, "sqlite3.Cursor"] = {}
|
||||
self.mem_cur = None
|
||||
@@ -234,6 +236,9 @@ class Up2k(object):
|
||||
if not self.pp and self.args.exit == "idx":
|
||||
return self.hub.sigterm()
|
||||
|
||||
if self.hub.is_dut:
|
||||
return
|
||||
|
||||
Daemon(self._snapshot, "up2k-snapshot")
|
||||
if have_e2d:
|
||||
Daemon(self._hasher, "up2k-hasher")
|
||||
@@ -430,7 +435,7 @@ class Up2k(object):
|
||||
def _sched_rescan(self) -> None:
|
||||
volage = {}
|
||||
cooldown = timeout = time.time() + 3.0
|
||||
while True:
|
||||
while not self.stop:
|
||||
now = time.time()
|
||||
timeout = max(timeout, cooldown)
|
||||
wait = timeout - time.time()
|
||||
@@ -438,6 +443,9 @@ class Up2k(object):
|
||||
with self.rescan_cond:
|
||||
self.rescan_cond.wait(wait)
|
||||
|
||||
if self.stop:
|
||||
return
|
||||
|
||||
now = time.time()
|
||||
if now < cooldown:
|
||||
# self.log("SR: cd - now = {:.2f}".format(cooldown - now), 5)
|
||||
@@ -452,11 +460,18 @@ class Up2k(object):
|
||||
cooldown = now + 3
|
||||
# self.log("SR", 5)
|
||||
|
||||
if self.args.no_lifetime:
|
||||
if self.args.no_lifetime and not self.args.shr:
|
||||
timeout = now + 9001
|
||||
else:
|
||||
# important; not deferred by db_act
|
||||
timeout = self._check_lifetimes()
|
||||
try:
|
||||
if self.args.shr:
|
||||
timeout = min(self._check_shares(), timeout)
|
||||
except Exception as ex:
|
||||
timeout = min(timeout, now + 60)
|
||||
t = "could not check for expiring shares: %r"
|
||||
self.log(t % (ex,), 1)
|
||||
|
||||
try:
|
||||
timeout = min(timeout, now + self._check_xiu())
|
||||
@@ -559,6 +574,60 @@ class Up2k(object):
|
||||
|
||||
return timeout
|
||||
|
||||
def _check_shares(self) -> float:
|
||||
assert sqlite3 # type: ignore
|
||||
|
||||
now = time.time()
|
||||
timeout = now + 9001
|
||||
maxage = self.args.shr_rt * 60
|
||||
low = now - maxage
|
||||
|
||||
vn = self.asrv.vfs.nodes.get(self.args.shr.strip("/"))
|
||||
active = vn and vn.nodes
|
||||
|
||||
db = sqlite3.connect(self.args.shr_db, timeout=2)
|
||||
cur = db.cursor()
|
||||
|
||||
q = "select k from sh where t1 and t1 <= ?"
|
||||
rm = [x[0] for x in cur.execute(q, (now,))] if active else []
|
||||
if rm:
|
||||
assert vn and vn.nodes # type: ignore
|
||||
# self.log("chk_shr: %d" % (len(rm),))
|
||||
zss = set(rm)
|
||||
rm = [zs for zs in vn.nodes if zs in zss]
|
||||
reload = bool(rm)
|
||||
if reload:
|
||||
self.log("disabling expired shares %s" % (rm,))
|
||||
|
||||
rm = [x[0] for x in cur.execute(q, (low,))]
|
||||
if rm:
|
||||
self.log("forgetting expired shares %s" % (rm,))
|
||||
cur.executemany("delete from sh where k=?", [(x,) for x in rm])
|
||||
cur.executemany("delete from sf where k=?", [(x,) for x in rm])
|
||||
db.commit()
|
||||
|
||||
if reload:
|
||||
Daemon(self.hub._reload_blocking, "sharedrop", (False, False))
|
||||
|
||||
q = "select min(t1) from sh where t1 > ?"
|
||||
(earliest,) = cur.execute(q, (1,)).fetchone()
|
||||
if earliest:
|
||||
# deadline for revoking regular access
|
||||
timeout = min(timeout, earliest + maxage)
|
||||
|
||||
(earliest,) = cur.execute(q, (now - 2,)).fetchone()
|
||||
if earliest:
|
||||
# deadline for revival; drop entirely
|
||||
timeout = min(timeout, earliest)
|
||||
|
||||
cur.close()
|
||||
db.close()
|
||||
|
||||
if self.args.shr_v:
|
||||
self.log("next shr_chk = %d (%d)" % (timeout, timeout - time.time()))
|
||||
|
||||
return timeout
|
||||
|
||||
def _check_xiu(self) -> float:
|
||||
if self.xiu_busy:
|
||||
return 2
|
||||
@@ -1369,7 +1438,7 @@ class Up2k(object):
|
||||
if dts == lmod and dsz == sz and (nohash or dw[0] != "#" or not sz):
|
||||
continue
|
||||
|
||||
t = "reindex [{}] => [{}] ({}/{}) ({}/{})".format(
|
||||
t = "reindex [{}] => [{}] mtime({}/{}) size({}/{})".format(
|
||||
top, rp, dts, lmod, dsz, sz
|
||||
)
|
||||
self.log(t)
|
||||
@@ -2533,6 +2602,10 @@ class Up2k(object):
|
||||
|
||||
cur.connection.commit()
|
||||
|
||||
def wake_rescanner(self):
|
||||
with self.rescan_cond:
|
||||
self.rescan_cond.notify_all()
|
||||
|
||||
def handle_json(
|
||||
self, cj: dict[str, Any], busy_aps: dict[str, int]
|
||||
) -> dict[str, Any]:
|
||||
@@ -2544,7 +2617,7 @@ class Up2k(object):
|
||||
if self.mutex.acquire(timeout=10):
|
||||
got_lock = True
|
||||
with self.reg_mutex:
|
||||
return self._handle_json(cj)
|
||||
ret = self._handle_json(cj)
|
||||
else:
|
||||
t = "cannot receive uploads right now;\nserver busy with {}.\nPlease wait; the client will retry..."
|
||||
raise Pebkac(503, t.format(self.blocked or "[unknown]"))
|
||||
@@ -2552,12 +2625,20 @@ class Up2k(object):
|
||||
if not PY2:
|
||||
raise
|
||||
with self.mutex, self.reg_mutex:
|
||||
return self._handle_json(cj)
|
||||
ret = self._handle_json(cj)
|
||||
finally:
|
||||
if got_lock:
|
||||
self.mutex.release()
|
||||
|
||||
def _handle_json(self, cj: dict[str, Any]) -> dict[str, Any]:
|
||||
if self.fx_backlog:
|
||||
self.do_fx_backlog()
|
||||
|
||||
return ret
|
||||
|
||||
def _handle_json(self, cj: dict[str, Any], depth: int = 1) -> dict[str, Any]:
|
||||
if depth > 16:
|
||||
raise Pebkac(500, "too many xbu relocs, giving up")
|
||||
|
||||
ptop = cj["ptop"]
|
||||
if not self.register_vpath(ptop, cj["vcfg"]):
|
||||
if ptop not in self.registry:
|
||||
@@ -2616,11 +2697,19 @@ class Up2k(object):
|
||||
if stat.S_ISLNK(st.st_mode):
|
||||
# broken symlink
|
||||
raise Exception()
|
||||
except:
|
||||
if st.st_size != dsize:
|
||||
t = "candidate ignored (db/fs desync): {}, size fs={} db={}, mtime fs={} db={}, file: {}"
|
||||
t = t.format(
|
||||
wark, st.st_size, dsize, st.st_mtime, dtime, dp_abs
|
||||
)
|
||||
self.log(t)
|
||||
raise Exception("desync")
|
||||
except Exception as ex:
|
||||
if n4g:
|
||||
st = os.stat_result((0, -1, -1, 0, 0, 0, 0, 0, 0, 0))
|
||||
else:
|
||||
lost.append((cur, dp_dir, dp_fn))
|
||||
if str(ex) != "desync":
|
||||
lost.append((cur, dp_dir, dp_fn))
|
||||
continue
|
||||
|
||||
j = {
|
||||
@@ -2678,13 +2767,16 @@ class Up2k(object):
|
||||
ptop = None # use cj or job as appropriate
|
||||
|
||||
if not job and wark in reg:
|
||||
# ensure the files haven't been deleted manually
|
||||
# ensure the files haven't been edited or deleted
|
||||
path = ""
|
||||
st = None
|
||||
rj = reg[wark]
|
||||
names = [rj[x] for x in ["name", "tnam"] if x in rj]
|
||||
for fn in names:
|
||||
path = djoin(rj["ptop"], rj["prel"], fn)
|
||||
try:
|
||||
if bos.path.getsize(path) > 0 or not rj["need"]:
|
||||
st = bos.stat(path)
|
||||
if st.st_size > 0 or not rj["need"]:
|
||||
# upload completed or both present
|
||||
break
|
||||
except:
|
||||
@@ -2695,6 +2787,14 @@ class Up2k(object):
|
||||
del reg[wark]
|
||||
break
|
||||
|
||||
if st and not self.args.nw and not n4g and st.st_size != rj["size"]:
|
||||
t = "will not dedup (fs index desync): {}, size fs={} db={}, mtime fs={} db={}, file: {}"
|
||||
t = t.format(
|
||||
wark, st.st_size, rj["size"], st.st_mtime, rj["lmod"], path
|
||||
)
|
||||
self.log(t)
|
||||
del reg[wark]
|
||||
|
||||
if job or wark in reg:
|
||||
job = job or reg[wark]
|
||||
if (
|
||||
@@ -2750,7 +2850,8 @@ class Up2k(object):
|
||||
job = deepcopy(job)
|
||||
job["wark"] = wark
|
||||
job["at"] = cj.get("at") or time.time()
|
||||
for k in "lmod ptop vtop prel host user addr".split():
|
||||
zs = "lmod ptop vtop prel name host user addr poke"
|
||||
for k in zs.split():
|
||||
job[k] = cj.get(k) or ""
|
||||
|
||||
pdir = djoin(cj["ptop"], cj["prel"])
|
||||
@@ -2758,28 +2859,50 @@ class Up2k(object):
|
||||
job["name"] = rand_name(
|
||||
pdir, cj["name"], vfs.flags["nrand"]
|
||||
)
|
||||
else:
|
||||
job["name"] = self._untaken(pdir, cj, now)
|
||||
|
||||
dst = djoin(job["ptop"], job["prel"], job["name"])
|
||||
xbu = vfs.flags.get("xbu")
|
||||
if xbu and not runhook(
|
||||
self.log,
|
||||
xbu, # type: ignore
|
||||
dst,
|
||||
job["vtop"],
|
||||
job["host"],
|
||||
job["user"],
|
||||
self.asrv.vfs.get_perms(job["vtop"], job["user"]),
|
||||
job["lmod"],
|
||||
job["size"],
|
||||
job["addr"],
|
||||
job["at"],
|
||||
"",
|
||||
):
|
||||
t = "upload blocked by xbu server config: {}".format(dst)
|
||||
self.log(t, 1)
|
||||
raise Pebkac(403, t)
|
||||
if xbu:
|
||||
vp = djoin(job["vtop"], job["prel"], job["name"])
|
||||
hr = runhook(
|
||||
self.log,
|
||||
None,
|
||||
self,
|
||||
"xbu.up2k.dupe",
|
||||
xbu, # type: ignore
|
||||
dst,
|
||||
vp,
|
||||
job["host"],
|
||||
job["user"],
|
||||
self.asrv.vfs.get_perms(job["vtop"], job["user"]),
|
||||
job["lmod"],
|
||||
job["size"],
|
||||
job["addr"],
|
||||
job["at"],
|
||||
"",
|
||||
)
|
||||
if not hr:
|
||||
t = "upload blocked by xbu server config: %s" % (dst,)
|
||||
self.log(t, 1)
|
||||
raise Pebkac(403, t)
|
||||
if hr.get("reloc"):
|
||||
x = pathmod(self.asrv.vfs, dst, vp, hr["reloc"])
|
||||
if x:
|
||||
zvfs = vfs
|
||||
pdir, _, job["name"], (vfs, rem) = x
|
||||
dst = os.path.join(pdir, job["name"])
|
||||
job["vcfg"] = vfs.flags
|
||||
job["ptop"] = vfs.realpath
|
||||
job["vtop"] = vfs.vpath
|
||||
job["prel"] = rem
|
||||
if zvfs.vpath != vfs.vpath:
|
||||
# print(json.dumps(job, sort_keys=True, indent=4))
|
||||
job["hash"] = cj["hash"]
|
||||
self.log("xbu reloc1:%d..." % (depth,), 6)
|
||||
return self._handle_json(job, depth + 1)
|
||||
|
||||
job["name"] = self._untaken(pdir, job, now)
|
||||
dst = djoin(job["ptop"], job["prel"], job["name"])
|
||||
|
||||
if not self.args.nw:
|
||||
dvf: dict[str, Any] = vfs.flags
|
||||
@@ -2851,14 +2974,16 @@ class Up2k(object):
|
||||
# one chunk may occur multiple times in a file;
|
||||
# filter to unique values for the list of missing chunks
|
||||
# (preserve order to reduce disk thrashing)
|
||||
lut = {}
|
||||
lut = set()
|
||||
for k in cj["hash"]:
|
||||
if k not in lut:
|
||||
job["need"].append(k)
|
||||
lut[k] = 1
|
||||
lut.add(k)
|
||||
|
||||
try:
|
||||
self._new_upload(job)
|
||||
ret = self._new_upload(job, vfs, depth)
|
||||
if ret:
|
||||
return ret # xbu recursed
|
||||
except:
|
||||
self.registry[job["ptop"]].pop(job["wark"], None)
|
||||
raise
|
||||
@@ -3015,7 +3140,7 @@ class Up2k(object):
|
||||
|
||||
def handle_chunks(
|
||||
self, ptop: str, wark: str, chashes: list[str]
|
||||
) -> tuple[list[int], list[list[int]], str, float, bool]:
|
||||
) -> tuple[list[str], int, list[list[int]], str, float, bool]:
|
||||
with self.mutex, self.reg_mutex:
|
||||
self.db_act = self.vol_act[ptop] = time.time()
|
||||
job = self.registry[ptop].get(wark)
|
||||
@@ -3024,12 +3149,37 @@ class Up2k(object):
|
||||
self.log("unknown wark [{}], known: {}".format(wark, known))
|
||||
raise Pebkac(400, "unknown wark" + SSEELOG)
|
||||
|
||||
if len(chashes) > 1 and len(chashes[1]) < 44:
|
||||
# first hash is full-length; expand remaining ones
|
||||
uniq = []
|
||||
lut = set()
|
||||
for chash in job["hash"]:
|
||||
if chash not in lut:
|
||||
uniq.append(chash)
|
||||
lut.add(chash)
|
||||
try:
|
||||
nchunk = uniq.index(chashes[0])
|
||||
except:
|
||||
raise Pebkac(400, "unknown chunk0 [%s]" % (chashes[0]))
|
||||
expanded = [chashes[0]]
|
||||
for prefix in chashes[1:]:
|
||||
nchunk += 1
|
||||
chash = uniq[nchunk]
|
||||
if not chash.startswith(prefix):
|
||||
t = "next sibling chunk does not start with expected prefix [%s]: [%s]"
|
||||
raise Pebkac(400, t % (prefix, chash))
|
||||
expanded.append(chash)
|
||||
chashes = expanded
|
||||
|
||||
for chash in chashes:
|
||||
if chash not in job["need"]:
|
||||
msg = "chash = {} , need:\n".format(chash)
|
||||
msg += "\n".join(job["need"])
|
||||
self.log(msg)
|
||||
raise Pebkac(400, "already got that (%s) but thanks??" % (chash,))
|
||||
t = "already got that (%s) but thanks??"
|
||||
if chash not in job["hash"]:
|
||||
t = "unknown chunk wtf: %s"
|
||||
raise Pebkac(400, t % (chash,))
|
||||
|
||||
if chash in job["busy"]:
|
||||
nh = len(job["hash"])
|
||||
@@ -3037,9 +3187,11 @@ class Up2k(object):
|
||||
t = "that chunk is already being written to:\n {}\n {} {}/{}\n {}"
|
||||
raise Pebkac(400, t.format(wark, chash, idx, nh, job["name"]))
|
||||
|
||||
assert chash # type: ignore
|
||||
chunksize = up2k_chunksize(job["size"])
|
||||
|
||||
coffsets = []
|
||||
nchunks = []
|
||||
for chash in chashes:
|
||||
nchunk = [n for n, v in enumerate(job["hash"]) if v == chash]
|
||||
if not nchunk:
|
||||
@@ -3047,6 +3199,7 @@ class Up2k(object):
|
||||
|
||||
ofs = [chunksize * x for x in nchunk]
|
||||
coffsets.append(ofs)
|
||||
nchunks.append(nchunk)
|
||||
|
||||
for ofs1, ofs2 in zip(coffsets, coffsets[1:]):
|
||||
gap = (ofs2[0] - ofs1[0]) - chunksize
|
||||
@@ -3058,16 +3211,16 @@ class Up2k(object):
|
||||
|
||||
if not job["sprs"]:
|
||||
cur_sz = bos.path.getsize(path)
|
||||
if ofs[0] > cur_sz:
|
||||
if coffsets[0][0] > cur_sz:
|
||||
t = "please upload sequentially using one thread;\nserver filesystem does not support sparse files.\n file: {}\n chunk: {}\n cofs: {}\n flen: {}"
|
||||
t = t.format(job["name"], nchunk[0], ofs[0], cur_sz)
|
||||
t = t.format(job["name"], nchunks[0][0], coffsets[0][0], cur_sz)
|
||||
raise Pebkac(400, t)
|
||||
|
||||
job["busy"][chash] = 1
|
||||
|
||||
job["poke"] = time.time()
|
||||
|
||||
return chunksize, coffsets, path, job["lmod"], job["sprs"]
|
||||
return chashes, chunksize, coffsets, path, job["lmod"], job["sprs"]
|
||||
|
||||
def release_chunks(self, ptop: str, wark: str, chashes: list[str]) -> bool:
|
||||
with self.reg_mutex:
|
||||
@@ -3114,6 +3267,9 @@ class Up2k(object):
|
||||
with self.mutex, self.reg_mutex:
|
||||
self._finish_upload(ptop, wark)
|
||||
|
||||
if self.fx_backlog:
|
||||
self.do_fx_backlog()
|
||||
|
||||
def _finish_upload(self, ptop: str, wark: str) -> None:
|
||||
"""mutex(main,reg) me"""
|
||||
try:
|
||||
@@ -3307,25 +3463,30 @@ class Up2k(object):
|
||||
|
||||
xau = False if skip_xau else vflags.get("xau")
|
||||
dst = djoin(ptop, rd, fn)
|
||||
if xau and not runhook(
|
||||
self.log,
|
||||
xau,
|
||||
dst,
|
||||
djoin(vtop, rd, fn),
|
||||
host,
|
||||
usr,
|
||||
self.asrv.vfs.get_perms(djoin(vtop, rd, fn), usr),
|
||||
int(ts),
|
||||
sz,
|
||||
ip,
|
||||
at or time.time(),
|
||||
"",
|
||||
):
|
||||
t = "upload blocked by xau server config"
|
||||
self.log(t, 1)
|
||||
wunlink(self.log, dst, vflags)
|
||||
self.registry[ptop].pop(wark, None)
|
||||
raise Pebkac(403, t)
|
||||
if xau:
|
||||
hr = runhook(
|
||||
self.log,
|
||||
None,
|
||||
self,
|
||||
"xau.up2k",
|
||||
xau,
|
||||
dst,
|
||||
djoin(vtop, rd, fn),
|
||||
host,
|
||||
usr,
|
||||
self.asrv.vfs.get_perms(djoin(vtop, rd, fn), usr),
|
||||
ts,
|
||||
sz,
|
||||
ip,
|
||||
at or time.time(),
|
||||
"",
|
||||
)
|
||||
if not hr:
|
||||
t = "upload blocked by xau server config"
|
||||
self.log(t, 1)
|
||||
wunlink(self.log, dst, vflags)
|
||||
self.registry[ptop].pop(wark, None)
|
||||
raise Pebkac(403, t)
|
||||
|
||||
xiu = vflags.get("xiu")
|
||||
if xiu:
|
||||
@@ -3509,6 +3670,9 @@ class Up2k(object):
|
||||
if xbd:
|
||||
if not runhook(
|
||||
self.log,
|
||||
None,
|
||||
self,
|
||||
"xbd",
|
||||
xbd,
|
||||
abspath,
|
||||
vpath,
|
||||
@@ -3518,7 +3682,7 @@ class Up2k(object):
|
||||
stl.st_mtime,
|
||||
st.st_size,
|
||||
ip,
|
||||
0,
|
||||
time.time(),
|
||||
"",
|
||||
):
|
||||
t = "delete blocked by xbd server config: {}"
|
||||
@@ -3543,6 +3707,9 @@ class Up2k(object):
|
||||
if xad:
|
||||
runhook(
|
||||
self.log,
|
||||
None,
|
||||
self,
|
||||
"xad",
|
||||
xad,
|
||||
abspath,
|
||||
vpath,
|
||||
@@ -3552,7 +3719,7 @@ class Up2k(object):
|
||||
stl.st_mtime,
|
||||
st.st_size,
|
||||
ip,
|
||||
0,
|
||||
time.time(),
|
||||
"",
|
||||
)
|
||||
|
||||
@@ -3568,7 +3735,7 @@ class Up2k(object):
|
||||
|
||||
return n_files, ok + ok2, ng + ng2
|
||||
|
||||
def handle_mv(self, uname: str, svp: str, dvp: str) -> str:
|
||||
def handle_mv(self, uname: str, ip: str, svp: str, dvp: str) -> str:
|
||||
if svp == dvp or dvp.startswith(svp + "/"):
|
||||
raise Pebkac(400, "mv: cannot move parent into subfolder")
|
||||
|
||||
@@ -3585,7 +3752,7 @@ class Up2k(object):
|
||||
if stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode):
|
||||
with self.mutex:
|
||||
try:
|
||||
ret = self._mv_file(uname, svp, dvp, curs)
|
||||
ret = self._mv_file(uname, ip, svp, dvp, curs)
|
||||
finally:
|
||||
for v in curs:
|
||||
v.connection.commit()
|
||||
@@ -3618,7 +3785,7 @@ class Up2k(object):
|
||||
raise Pebkac(500, "mv: bug at {}, top {}".format(svpf, svp))
|
||||
|
||||
dvpf = dvp + svpf[len(svp) :]
|
||||
self._mv_file(uname, svpf, dvpf, curs)
|
||||
self._mv_file(uname, ip, svpf, dvpf, curs)
|
||||
finally:
|
||||
for v in curs:
|
||||
v.connection.commit()
|
||||
@@ -3643,7 +3810,7 @@ class Up2k(object):
|
||||
return "k"
|
||||
|
||||
def _mv_file(
|
||||
self, uname: str, svp: str, dvp: str, curs: set["sqlite3.Cursor"]
|
||||
self, uname: str, ip: str, svp: str, dvp: str, curs: set["sqlite3.Cursor"]
|
||||
) -> str:
|
||||
"""mutex(main) me; will mutex(reg)"""
|
||||
svn, srem = self.asrv.vfs.get(svp, uname, True, False, True)
|
||||
@@ -3677,21 +3844,27 @@ class Up2k(object):
|
||||
except:
|
||||
pass # broken symlink; keep as-is
|
||||
|
||||
ftime = stl.st_mtime
|
||||
fsize = st.st_size
|
||||
|
||||
xbr = svn.flags.get("xbr")
|
||||
xar = dvn.flags.get("xar")
|
||||
if xbr:
|
||||
if not runhook(
|
||||
self.log,
|
||||
None,
|
||||
self,
|
||||
"xbr",
|
||||
xbr,
|
||||
sabs,
|
||||
svp,
|
||||
"",
|
||||
uname,
|
||||
self.asrv.vfs.get_perms(svp, uname),
|
||||
stl.st_mtime,
|
||||
st.st_size,
|
||||
"",
|
||||
0,
|
||||
ftime,
|
||||
fsize,
|
||||
ip,
|
||||
time.time(),
|
||||
"",
|
||||
):
|
||||
t = "move blocked by xbr server config: {}".format(svp)
|
||||
@@ -3719,16 +3892,19 @@ class Up2k(object):
|
||||
if xar:
|
||||
runhook(
|
||||
self.log,
|
||||
None,
|
||||
self,
|
||||
"xar.ln",
|
||||
xar,
|
||||
dabs,
|
||||
dvp,
|
||||
"",
|
||||
uname,
|
||||
self.asrv.vfs.get_perms(dvp, uname),
|
||||
0,
|
||||
0,
|
||||
"",
|
||||
0,
|
||||
ftime,
|
||||
fsize,
|
||||
ip,
|
||||
time.time(),
|
||||
"",
|
||||
)
|
||||
|
||||
@@ -3737,13 +3913,6 @@ class Up2k(object):
|
||||
c1, w, ftime_, fsize_, ip, at = self._find_from_vpath(svn.realpath, srem)
|
||||
c2 = self.cur.get(dvn.realpath)
|
||||
|
||||
if ftime_ is None:
|
||||
ftime = stl.st_mtime
|
||||
fsize = st.st_size
|
||||
else:
|
||||
ftime = ftime_
|
||||
fsize = fsize_ or 0
|
||||
|
||||
has_dupes = False
|
||||
if w:
|
||||
assert c1
|
||||
@@ -3751,7 +3920,9 @@ class Up2k(object):
|
||||
self._copy_tags(c1, c2, w)
|
||||
|
||||
with self.reg_mutex:
|
||||
has_dupes = self._forget_file(svn.realpath, srem, c1, w, is_xvol, fsize)
|
||||
has_dupes = self._forget_file(
|
||||
svn.realpath, srem, c1, w, is_xvol, fsize_ or fsize
|
||||
)
|
||||
|
||||
if not is_xvol:
|
||||
has_dupes = self._relink(w, svn.realpath, srem, dabs)
|
||||
@@ -3821,7 +3992,7 @@ class Up2k(object):
|
||||
|
||||
if is_link:
|
||||
try:
|
||||
times = (int(time.time()), int(stl.st_mtime))
|
||||
times = (int(time.time()), int(ftime))
|
||||
bos.utime(dabs, times, False)
|
||||
except:
|
||||
pass
|
||||
@@ -3831,16 +4002,19 @@ class Up2k(object):
|
||||
if xar:
|
||||
runhook(
|
||||
self.log,
|
||||
None,
|
||||
self,
|
||||
"xar.mv",
|
||||
xar,
|
||||
dabs,
|
||||
dvp,
|
||||
"",
|
||||
uname,
|
||||
self.asrv.vfs.get_perms(dvp, uname),
|
||||
0,
|
||||
0,
|
||||
"",
|
||||
0,
|
||||
ftime,
|
||||
fsize,
|
||||
ip,
|
||||
time.time(),
|
||||
"",
|
||||
)
|
||||
|
||||
@@ -4105,42 +4279,57 @@ class Up2k(object):
|
||||
|
||||
return ret
|
||||
|
||||
def _new_upload(self, job: dict[str, Any]) -> None:
|
||||
def _new_upload(self, job: dict[str, Any], vfs: VFS, depth: int) -> dict[str, str]:
|
||||
pdir = djoin(job["ptop"], job["prel"])
|
||||
if not job["size"]:
|
||||
try:
|
||||
inf = bos.stat(djoin(pdir, job["name"]))
|
||||
if stat.S_ISREG(inf.st_mode):
|
||||
job["lmod"] = inf.st_size
|
||||
return
|
||||
return {}
|
||||
except:
|
||||
pass
|
||||
|
||||
self.registry[job["ptop"]][job["wark"]] = job
|
||||
job["name"] = self._untaken(pdir, job, job["t0"])
|
||||
# if len(job["name"].split(".")) > 8:
|
||||
# raise Exception("aaa")
|
||||
|
||||
xbu = self.flags[job["ptop"]].get("xbu")
|
||||
ap_chk = djoin(pdir, job["name"])
|
||||
vp_chk = djoin(job["vtop"], job["prel"], job["name"])
|
||||
if xbu and not runhook(
|
||||
self.log,
|
||||
xbu,
|
||||
ap_chk,
|
||||
vp_chk,
|
||||
job["host"],
|
||||
job["user"],
|
||||
self.asrv.vfs.get_perms(vp_chk, job["user"]),
|
||||
int(job["lmod"]),
|
||||
job["size"],
|
||||
job["addr"],
|
||||
int(job["t0"]),
|
||||
"",
|
||||
):
|
||||
t = "upload blocked by xbu server config: {}".format(vp_chk)
|
||||
self.log(t, 1)
|
||||
raise Pebkac(403, t)
|
||||
if xbu:
|
||||
hr = runhook(
|
||||
self.log,
|
||||
None,
|
||||
self,
|
||||
"xbu.up2k",
|
||||
xbu,
|
||||
ap_chk,
|
||||
vp_chk,
|
||||
job["host"],
|
||||
job["user"],
|
||||
self.asrv.vfs.get_perms(vp_chk, job["user"]),
|
||||
job["lmod"],
|
||||
job["size"],
|
||||
job["addr"],
|
||||
job["t0"],
|
||||
"",
|
||||
)
|
||||
if not hr:
|
||||
t = "upload blocked by xbu server config: {}".format(vp_chk)
|
||||
self.log(t, 1)
|
||||
raise Pebkac(403, t)
|
||||
if hr.get("reloc"):
|
||||
x = pathmod(self.asrv.vfs, ap_chk, vp_chk, hr["reloc"])
|
||||
if x:
|
||||
zvfs = vfs
|
||||
pdir, _, job["name"], (vfs, rem) = x
|
||||
job["vcfg"] = vfs.flags
|
||||
job["ptop"] = vfs.realpath
|
||||
job["vtop"] = vfs.vpath
|
||||
job["prel"] = rem
|
||||
if zvfs.vpath != vfs.vpath:
|
||||
self.log("xbu reloc2:%d..." % (depth,), 6)
|
||||
return self._handle_json(job, depth + 1)
|
||||
|
||||
job["name"] = self._untaken(pdir, job, job["t0"])
|
||||
self.registry[job["ptop"]][job["wark"]] = job
|
||||
|
||||
tnam = job["name"] + ".PARTIAL"
|
||||
if self.args.dotpart:
|
||||
@@ -4150,7 +4339,7 @@ class Up2k(object):
|
||||
job["tnam"] = tnam
|
||||
if not job["hash"]:
|
||||
del self.registry[job["ptop"]][job["wark"]]
|
||||
return
|
||||
return {}
|
||||
|
||||
if self.args.plain_ip:
|
||||
dip = job["addr"].replace(":", ".")
|
||||
@@ -4210,6 +4399,8 @@ class Up2k(object):
|
||||
if not job["hash"]:
|
||||
self._finish_upload(job["ptop"], job["wark"])
|
||||
|
||||
return {}
|
||||
|
||||
def _snapshot(self) -> None:
|
||||
slp = self.args.snap_wri
|
||||
if not slp or self.args.no_snap:
|
||||
@@ -4414,6 +4605,9 @@ class Up2k(object):
|
||||
with self.rescan_cond:
|
||||
self.rescan_cond.notify_all()
|
||||
|
||||
if self.fx_backlog:
|
||||
self.do_fx_backlog()
|
||||
|
||||
return True
|
||||
|
||||
def hash_file(
|
||||
@@ -4445,6 +4639,48 @@ class Up2k(object):
|
||||
self.hashq.put(zt)
|
||||
self.n_hashq += 1
|
||||
|
||||
def do_fx_backlog(self):
|
||||
with self.mutex, self.reg_mutex:
|
||||
todo = self.fx_backlog
|
||||
self.fx_backlog = []
|
||||
for act, hr, req_vp in todo:
|
||||
self.hook_fx(act, hr, req_vp)
|
||||
|
||||
def hook_fx(self, act: str, hr: dict[str, str], req_vp: str) -> None:
|
||||
bad = [k for k in hr if k != "vp"]
|
||||
if bad:
|
||||
t = "got unsupported key in %s from hook: %s"
|
||||
raise Exception(t % (act, bad))
|
||||
|
||||
for fvp in hr.get("vp") or []:
|
||||
# expect vpath including filename; either absolute
|
||||
# or relative to the client's vpath (request url)
|
||||
if fvp.startswith("/"):
|
||||
fvp, fn = vsplit(fvp[1:])
|
||||
fvp = "/" + fvp
|
||||
else:
|
||||
fvp, fn = vsplit(fvp)
|
||||
|
||||
x = pathmod(self.asrv.vfs, "", req_vp, {"vp": fvp, "fn": fn})
|
||||
if not x:
|
||||
t = "hook_fx(%s): failed to resolve %s based on %s"
|
||||
self.log(t % (act, fvp, req_vp))
|
||||
continue
|
||||
|
||||
ap, rd, fn, (vn, rem) = x
|
||||
vp = vjoin(rd, fn)
|
||||
if not vp:
|
||||
raise Exception("hook_fx: blank vp from pathmod")
|
||||
|
||||
if act == "idx":
|
||||
rd = rd[len(vn.vpath) :].strip("/")
|
||||
self.hash_file(
|
||||
vn.realpath, vn.vpath, vn.flags, rd, fn, "", time.time(), "", True
|
||||
)
|
||||
|
||||
if act == "del":
|
||||
self._handle_rm(LEELOO_DALLAS, "", vp, [], False, False)
|
||||
|
||||
def shutdown(self) -> None:
|
||||
self.stop = True
|
||||
|
||||
|
||||
@@ -26,7 +26,6 @@ import threading
|
||||
import time
|
||||
import traceback
|
||||
from collections import Counter
|
||||
from email.utils import formatdate
|
||||
|
||||
from ipaddress import IPv4Address, IPv4Network, IPv6Address, IPv6Network
|
||||
from queue import Queue
|
||||
@@ -60,6 +59,10 @@ except:
|
||||
UTC = _UTC()
|
||||
|
||||
|
||||
if PY2:
|
||||
range = xrange # type: ignore
|
||||
|
||||
|
||||
if sys.version_info >= (3, 7) or (
|
||||
sys.version_info >= (3, 6) and platform.python_implementation() == "CPython"
|
||||
):
|
||||
@@ -99,6 +102,9 @@ except:
|
||||
pass
|
||||
|
||||
try:
|
||||
if os.environ.get("PRTY_NO_SQLITE"):
|
||||
raise Exception()
|
||||
|
||||
HAVE_SQLITE3 = True
|
||||
import sqlite3
|
||||
|
||||
@@ -107,6 +113,9 @@ except:
|
||||
HAVE_SQLITE3 = False
|
||||
|
||||
try:
|
||||
if os.environ.get("PRTY_NO_PSUTIL"):
|
||||
raise Exception()
|
||||
|
||||
HAVE_PSUTIL = True
|
||||
import psutil
|
||||
except:
|
||||
@@ -137,10 +146,15 @@ if TYPE_CHECKING:
|
||||
import magic
|
||||
|
||||
from .authsrv import VFS
|
||||
from .broker_util import BrokerCli
|
||||
from .up2k import Up2k
|
||||
|
||||
FAKE_MP = False
|
||||
|
||||
try:
|
||||
if os.environ.get("PRTY_NO_MP"):
|
||||
raise ImportError()
|
||||
|
||||
import multiprocessing as mp
|
||||
|
||||
# import multiprocessing.dummy as mp
|
||||
@@ -159,6 +173,9 @@ else:
|
||||
|
||||
|
||||
try:
|
||||
if os.environ.get("PRTY_NO_IPV6"):
|
||||
raise Exception()
|
||||
|
||||
socket.inet_pton(socket.AF_INET6, "::1")
|
||||
HAVE_IPV6 = True
|
||||
except:
|
||||
@@ -794,7 +811,7 @@ class CachedSet(object):
|
||||
|
||||
c = self.c = {k: v for k, v in self.c.items() if now - v < self.maxage}
|
||||
try:
|
||||
self.oldest = c[min(c, key=c.get)]
|
||||
self.oldest = c[min(c, key=c.get)] # type: ignore
|
||||
except:
|
||||
self.oldest = now
|
||||
|
||||
@@ -1743,7 +1760,7 @@ def read_header(sr: Unrecv, t_idle: int, t_tot: int) -> list[str]:
|
||||
|
||||
ofs = ret.find(b"\r\n\r\n")
|
||||
if ofs < 0:
|
||||
if len(ret) > 1024 * 64:
|
||||
if len(ret) > 1024 * 32:
|
||||
raise Pebkac(400, "header 2big")
|
||||
else:
|
||||
continue
|
||||
@@ -1821,10 +1838,21 @@ def gen_filekey_dbg(
|
||||
return ret
|
||||
|
||||
|
||||
WKDAYS = "Mon Tue Wed Thu Fri Sat Sun".split()
|
||||
MONTHS = "Jan Feb Mar Apr May Jun Jul Aug Sep Oct Nov Dec".split()
|
||||
RFC2822 = "%s, %02d %s %04d %02d:%02d:%02d GMT"
|
||||
|
||||
|
||||
def formatdate(ts: Optional[float] = None) -> str:
|
||||
# gmtime ~= datetime.fromtimestamp(ts, UTC).timetuple()
|
||||
y, mo, d, h, mi, s, wd, _, _ = time.gmtime(ts)
|
||||
return RFC2822 % (WKDAYS[wd], d, MONTHS[mo - 1], y, h, mi, s)
|
||||
|
||||
|
||||
def gencookie(k: str, v: str, r: str, tls: bool, dur: int = 0, txt: str = "") -> str:
|
||||
v = v.replace("%", "%25").replace(";", "%3B")
|
||||
if dur:
|
||||
exp = formatdate(time.time() + dur, usegmt=True)
|
||||
exp = formatdate(time.time() + dur)
|
||||
else:
|
||||
exp = "Fri, 15 Aug 1997 01:00:00 GMT"
|
||||
|
||||
@@ -1839,12 +1867,10 @@ def humansize(sz: float, terse: bool = False) -> str:
|
||||
|
||||
sz /= 1024.0
|
||||
|
||||
ret = " ".join([str(sz)[:4].rstrip("."), unit])
|
||||
|
||||
if not terse:
|
||||
return ret
|
||||
|
||||
return ret.replace("iB", "").replace(" ", "")
|
||||
if terse:
|
||||
return "%s%s" % (str(sz)[:4].rstrip("."), unit[:1])
|
||||
else:
|
||||
return "%s %s" % (str(sz)[:4].rstrip("."), unit)
|
||||
|
||||
|
||||
def unhumanize(sz: str) -> int:
|
||||
@@ -1896,7 +1922,7 @@ def uncyg(path: str) -> str:
|
||||
def undot(path: str) -> str:
|
||||
ret: list[str] = []
|
||||
for node in path.split("/"):
|
||||
if node in ["", "."]:
|
||||
if node == "." or not node:
|
||||
continue
|
||||
|
||||
if node == "..":
|
||||
@@ -2049,7 +2075,7 @@ def _quotep2(txt: str) -> str:
|
||||
"""url quoter which deals with bytes correctly"""
|
||||
btxt = w8enc(txt)
|
||||
quot = quote(btxt, safe=b"/")
|
||||
return w8dec(quot.replace(b" ", b"+"))
|
||||
return w8dec(quot.replace(b" ", b"+")) # type: ignore
|
||||
|
||||
|
||||
def _quotep3(txt: str) -> str:
|
||||
@@ -2093,6 +2119,72 @@ def ujoin(rd: str, fn: str) -> str:
|
||||
return rd or fn
|
||||
|
||||
|
||||
def log_reloc(
|
||||
log: "NamedLogger",
|
||||
re: dict[str, str],
|
||||
pm: tuple[str, str, str, tuple["VFS", str]],
|
||||
ap: str,
|
||||
vp: str,
|
||||
fn: str,
|
||||
vn: "VFS",
|
||||
rem: str,
|
||||
) -> None:
|
||||
nap, nvp, nfn, (nvn, nrem) = pm
|
||||
t = "reloc %s:\nold ap [%s]\nnew ap [%s\033[36m/%s\033[0m]\nold vp [%s]\nnew vp [%s\033[36m/%s\033[0m]\nold fn [%s]\nnew fn [%s]\nold vfs [%s]\nnew vfs [%s]\nold rem [%s]\nnew rem [%s]"
|
||||
log(t % (re, ap, nap, nfn, vp, nvp, nfn, fn, nfn, vn.vpath, nvn.vpath, rem, nrem))
|
||||
|
||||
|
||||
def pathmod(
|
||||
vfs: "VFS", ap: str, vp: str, mod: dict[str, str]
|
||||
) -> Optional[tuple[str, str, str, tuple["VFS", str]]]:
|
||||
# vfs: authsrv.vfs
|
||||
# ap: original abspath to a file
|
||||
# vp: original urlpath to a file
|
||||
# mod: modification (ap/vp/fn)
|
||||
|
||||
nvp = "\n" # new vpath
|
||||
ap = os.path.dirname(ap)
|
||||
vp, fn = vsplit(vp)
|
||||
if mod.get("fn"):
|
||||
fn = mod["fn"]
|
||||
nvp = vp
|
||||
|
||||
for ref, k in ((ap, "ap"), (vp, "vp")):
|
||||
if k not in mod:
|
||||
continue
|
||||
|
||||
ms = mod[k].replace(os.sep, "/")
|
||||
if ms.startswith("/"):
|
||||
np = ms
|
||||
elif k == "vp":
|
||||
np = undot(vjoin(ref, ms))
|
||||
else:
|
||||
np = os.path.abspath(os.path.join(ref, ms))
|
||||
|
||||
if k == "vp":
|
||||
nvp = np.lstrip("/")
|
||||
continue
|
||||
|
||||
# try to map abspath to vpath
|
||||
np = np.replace("/", os.sep)
|
||||
for vn_ap, vn in vfs.all_aps:
|
||||
if not np.startswith(vn_ap):
|
||||
continue
|
||||
zs = np[len(vn_ap) :].replace(os.sep, "/")
|
||||
nvp = vjoin(vn.vpath, zs)
|
||||
break
|
||||
|
||||
if nvp == "\n":
|
||||
return None
|
||||
|
||||
vn, rem = vfs.get(nvp, "*", False, False)
|
||||
if not vn.realpath:
|
||||
raise Exception("unmapped vfs")
|
||||
|
||||
ap = vn.canonical(rem)
|
||||
return ap, nvp, fn, (vn, rem)
|
||||
|
||||
|
||||
def _w8dec2(txt: bytes) -> str:
|
||||
"""decodes filesystem-bytes to wtf8"""
|
||||
return surrogateescape.decodefilename(txt)
|
||||
@@ -2709,30 +2801,30 @@ def rmdirs_up(top: str, stop: str) -> tuple[list[str], list[str]]:
|
||||
|
||||
def unescape_cookie(orig: str) -> str:
|
||||
# mw=idk; doot=qwe%2Crty%3Basd+fgh%2Bjkl%25zxc%26vbn # qwe,rty;asd fgh+jkl%zxc&vbn
|
||||
ret = ""
|
||||
ret = []
|
||||
esc = ""
|
||||
for ch in orig:
|
||||
if ch == "%":
|
||||
if len(esc) > 0:
|
||||
ret += esc
|
||||
if esc:
|
||||
ret.append(esc)
|
||||
esc = ch
|
||||
|
||||
elif len(esc) > 0:
|
||||
elif esc:
|
||||
esc += ch
|
||||
if len(esc) == 3:
|
||||
try:
|
||||
ret += chr(int(esc[1:], 16))
|
||||
ret.append(chr(int(esc[1:], 16)))
|
||||
except:
|
||||
ret += esc
|
||||
ret.append(esc)
|
||||
esc = ""
|
||||
|
||||
else:
|
||||
ret += ch
|
||||
ret.append(ch)
|
||||
|
||||
if len(esc) > 0:
|
||||
ret += esc
|
||||
if esc:
|
||||
ret.append(esc)
|
||||
|
||||
return ret
|
||||
return "".join(ret)
|
||||
|
||||
|
||||
def guess_mime(url: str, fallback: str = "application/octet-stream") -> str:
|
||||
@@ -3106,6 +3198,7 @@ def runihook(
|
||||
|
||||
def _runhook(
|
||||
log: Optional["NamedLogger"],
|
||||
src: str,
|
||||
cmd: str,
|
||||
ap: str,
|
||||
vp: str,
|
||||
@@ -3117,14 +3210,16 @@ def _runhook(
|
||||
ip: str,
|
||||
at: float,
|
||||
txt: str,
|
||||
) -> bool:
|
||||
) -> dict[str, Any]:
|
||||
ret = {"rc": 0}
|
||||
areq, chk, fork, jtxt, wait, sp_ka, acmd = _parsehook(log, cmd)
|
||||
if areq:
|
||||
for ch in areq:
|
||||
if ch not in perms:
|
||||
t = "user %s not allowed to run hook %s; need perms %s, have %s"
|
||||
log(t % (uname, cmd, areq, perms))
|
||||
return True # fallthrough to next hook
|
||||
if log:
|
||||
log(t % (uname, cmd, areq, perms))
|
||||
return ret # fallthrough to next hook
|
||||
if jtxt:
|
||||
ja = {
|
||||
"ap": ap,
|
||||
@@ -3136,6 +3231,7 @@ def _runhook(
|
||||
"host": host,
|
||||
"user": uname,
|
||||
"perms": perms,
|
||||
"src": src,
|
||||
"txt": txt,
|
||||
}
|
||||
arg = json.dumps(ja)
|
||||
@@ -3154,18 +3250,34 @@ def _runhook(
|
||||
else:
|
||||
rc, v, err = runcmd(bcmd, **sp_ka) # type: ignore
|
||||
if chk and rc:
|
||||
ret["rc"] = rc
|
||||
retchk(rc, bcmd, err, log, 5)
|
||||
return False
|
||||
else:
|
||||
try:
|
||||
ret = json.loads(v)
|
||||
except:
|
||||
ret = {}
|
||||
|
||||
try:
|
||||
if "stdout" not in ret:
|
||||
ret["stdout"] = v
|
||||
if "rc" not in ret:
|
||||
ret["rc"] = rc
|
||||
except:
|
||||
ret = {"rc": rc, "stdout": v}
|
||||
|
||||
wait -= time.time() - t0
|
||||
if wait > 0:
|
||||
time.sleep(wait)
|
||||
|
||||
return True
|
||||
return ret
|
||||
|
||||
|
||||
def runhook(
|
||||
log: Optional["NamedLogger"],
|
||||
broker: Optional["BrokerCli"],
|
||||
up2k: Optional["Up2k"],
|
||||
src: str,
|
||||
cmds: list[str],
|
||||
ap: str,
|
||||
vp: str,
|
||||
@@ -3177,19 +3289,42 @@ def runhook(
|
||||
ip: str,
|
||||
at: float,
|
||||
txt: str,
|
||||
) -> bool:
|
||||
) -> dict[str, Any]:
|
||||
assert broker or up2k
|
||||
asrv = (broker or up2k).asrv
|
||||
args = (broker or up2k).args
|
||||
vp = vp.replace("\\", "/")
|
||||
ret = {"rc": 0}
|
||||
for cmd in cmds:
|
||||
try:
|
||||
if not _runhook(log, cmd, ap, vp, host, uname, perms, mt, sz, ip, at, txt):
|
||||
return False
|
||||
hr = _runhook(
|
||||
log, src, cmd, ap, vp, host, uname, perms, mt, sz, ip, at, txt
|
||||
)
|
||||
if log and args.hook_v:
|
||||
log("hook(%s) [%s] => \033[32m%s" % (src, cmd, hr), 6)
|
||||
if not hr:
|
||||
return {}
|
||||
for k, v in hr.items():
|
||||
if k in ("idx", "del") and v:
|
||||
if broker:
|
||||
broker.say("up2k.hook_fx", k, v, vp)
|
||||
else:
|
||||
up2k.fx_backlog.append((k, v, vp))
|
||||
elif k == "reloc" and v:
|
||||
# idk, just take the last one ig
|
||||
ret["reloc"] = v
|
||||
elif k in ret:
|
||||
if k == "rc" and v:
|
||||
ret[k] = v
|
||||
else:
|
||||
ret[k] = v
|
||||
except Exception as ex:
|
||||
(log or print)("hook: {}".format(ex))
|
||||
if ",c," in "," + cmd:
|
||||
return False
|
||||
return {}
|
||||
break
|
||||
|
||||
return True
|
||||
return ret
|
||||
|
||||
|
||||
def loadpy(ap: str, hot: bool) -> Any:
|
||||
|
||||
@@ -10,7 +10,6 @@
|
||||
--fg2-max: #fff;
|
||||
--fg-weak: #bbb;
|
||||
|
||||
--bg-u7: #555;
|
||||
--bg-u6: #4c4c4c;
|
||||
--bg-u5: #444;
|
||||
--bg-u4: #383838;
|
||||
@@ -43,8 +42,14 @@
|
||||
--btn-h-bg: #805;
|
||||
--btn-1-fg: #400;
|
||||
--btn-1-bg: var(--a);
|
||||
--btn-h-bs: var(--btn-bs);
|
||||
--btn-h-bb: var(--btn-bb);
|
||||
--btn-1-bs: var(--btn-bs);
|
||||
--btn-1-bb: var(--btn-bb);
|
||||
--btn-1h-fg: var(--btn-1-fg);
|
||||
--btn-1h-bg: #fe8;
|
||||
--btn-1h-bs: var(--btn-1-bs);
|
||||
--btn-1h-bb: var(--btn-1-bb);
|
||||
--chk-fg: var(--tab-alt);
|
||||
--txt-sh: var(--bg-d2);
|
||||
--txt-bg: var(--btn-bg);
|
||||
@@ -212,22 +217,19 @@ html.y {
|
||||
html.a {
|
||||
--op-aa-sh: 0 0 .2em var(--bg-d3) inset;
|
||||
|
||||
--u2-o-bg: #603;
|
||||
--u2-o-b1: #a16;
|
||||
--u2-o-sh: #a00;
|
||||
--u2-o-h-bg: var(--u2-o-bg);
|
||||
--u2-o-h-b1: #fb0;
|
||||
--u2-o-h-sh: #fb0;
|
||||
--u2-o-1-bg: #6a1;
|
||||
--u2-o-1-b1: #efa;
|
||||
--u2-o-1-sh: #0c0;
|
||||
--u2-o-1h-bg: var(--u2-o-1-bg);
|
||||
--btn-bs: 0 0 .2em var(--bg-d3);
|
||||
}
|
||||
html.az {
|
||||
--btn-1-bs: 0 0 .1em var(--fg) inset;
|
||||
}
|
||||
html.ay {
|
||||
--op-aa-sh: 0 .1em .2em #ccc;
|
||||
--op-aa-bg: var(--bg-max);
|
||||
}
|
||||
html.b {
|
||||
--btn-bs: 0 .05em 0 var(--bg-d3) inset;
|
||||
--btn-1-bs: 0 .05em 0 var(--btn-1h-bg) inset;
|
||||
|
||||
--tree-bg: var(--bg);
|
||||
|
||||
--g-bg: var(--bg);
|
||||
@@ -244,17 +246,13 @@ html.b {
|
||||
--u2-b1-bg: rgba(128,128,128,0.15);
|
||||
--u2-b2-bg: var(--u2-b1-bg);
|
||||
|
||||
--u2-o-bg: var(--btn-bg);
|
||||
--u2-o-h-bg: var(--btn-h-bg);
|
||||
--u2-o-1-bg: var(--a);
|
||||
--u2-o-1h-bg: var(--a-hil);
|
||||
|
||||
--f-sh1: 0.1;
|
||||
--mp-b-bg: transparent;
|
||||
}
|
||||
html.bz {
|
||||
--fg: #cce;
|
||||
--fg-weak: #bbd;
|
||||
|
||||
--bg-u5: #3b3f58;
|
||||
--bg-u4: #1e2130;
|
||||
--bg-u3: #1e2130;
|
||||
@@ -266,12 +264,14 @@ html.bz {
|
||||
|
||||
--row-alt: #181a27;
|
||||
|
||||
--a-b: #fb4;
|
||||
|
||||
--btn-bg: #202231;
|
||||
--btn-h-bg: #2d2f45;
|
||||
--btn-1-bg: #ba2959;
|
||||
--btn-1-is: #f59;
|
||||
--btn-1-fg: #fff;
|
||||
--btn-1-bg: #eb6;
|
||||
--btn-1-fg: #000;
|
||||
--btn-1h-fg: #000;
|
||||
--btn-1h-bg: #ff9;
|
||||
--txt-sh: a;
|
||||
|
||||
--u2-tab-b1: var(--bg-u5);
|
||||
@@ -306,6 +306,7 @@ html.by {
|
||||
}
|
||||
html.c {
|
||||
font-weight: bold;
|
||||
|
||||
--fg: #fff;
|
||||
--fg-weak: #cef;
|
||||
--bg-u5: #409;
|
||||
@@ -326,16 +327,23 @@ html.c {
|
||||
--chk-fg: #d90;
|
||||
|
||||
--op-aa-bg: #f9dd22;
|
||||
--u2-o-1-bg: #4cf;
|
||||
|
||||
--srv-1: #ea0;
|
||||
--mp-b-bg: transparent;
|
||||
}
|
||||
html.cz {
|
||||
--bgg: var(--bg-u2);
|
||||
|
||||
--sel-bg: var(--bg-u5);
|
||||
--sel-fg: var(--fg);
|
||||
|
||||
--btn-bb: .2em solid #709;
|
||||
--btn-bs: 0 .1em .6em rgba(255,0,185,0.5);
|
||||
--btn-1-bb: .2em solid #e90;
|
||||
--btn-1-bs: 0 .1em .8em rgba(255,205,0,0.9);
|
||||
|
||||
--srv-3: #fff;
|
||||
|
||||
--u2-tab-b1: var(--bg-d3);
|
||||
}
|
||||
html.cy {
|
||||
@@ -363,6 +371,7 @@ html.cy {
|
||||
--btn-h-fg: #fff;
|
||||
--btn-1-bg: #ff0;
|
||||
--btn-1-fg: #000;
|
||||
--btn-bs: 0 .25em 0 #f00;
|
||||
--chk-fg: #fd0;
|
||||
|
||||
--srv-1: #f00;
|
||||
@@ -371,8 +380,6 @@ html.cy {
|
||||
|
||||
--u2-b1-bg: #f00;
|
||||
--u2-b2-bg: #f00;
|
||||
--u2-o-bg: #ff0;
|
||||
--u2-o-1-bg: #f00;
|
||||
}
|
||||
html.dz {
|
||||
--fg: #4d4;
|
||||
@@ -380,7 +387,6 @@ html.dz {
|
||||
--fg2-max: #fff;
|
||||
--fg-weak: #2a2;
|
||||
|
||||
--bg-u7: #020;
|
||||
--bg-u6: #020;
|
||||
--bg-u5: #050;
|
||||
--bg-u4: #020;
|
||||
@@ -413,6 +419,9 @@ html.dz {
|
||||
--btn-1-bg: #4f4;
|
||||
--btn-1h-fg: var(--btn-1-fg);
|
||||
--btn-1h-bg: #3f3;
|
||||
--btn-bs: 0 0 0 .1em #080 inset;
|
||||
--btn-1-bs: a;
|
||||
|
||||
--chk-fg: var(--tab-alt);
|
||||
--txt-sh: var(--bg-d2);
|
||||
--txt-bg: var(--btn-bg);
|
||||
@@ -434,12 +443,6 @@ html.dz {
|
||||
--u2-b-fg: #fff;
|
||||
--u2-b1-bg: #3a3;
|
||||
--u2-b2-bg: #3a3;
|
||||
--u2-o-bg: var(--btn-bg);
|
||||
--u2-o-b1: var(--bg-u5);
|
||||
--u2-o-h-bg: var(--fg-weak);
|
||||
--u2-o-1-bg: var(--fg-weak);
|
||||
--u2-o-1-b1: var(--a);
|
||||
--u2-o-1h-bg: var(--a);
|
||||
--u2-inf-bg: #07a;
|
||||
--u2-inf-b1: #0be;
|
||||
--u2-ok-bg: #380;
|
||||
@@ -551,10 +554,6 @@ html.dy {
|
||||
--u2-tab-1-bg: a;
|
||||
--u2-b1-bg: #000;
|
||||
--u2-b2-bg: #000;
|
||||
--u2-o-h-bg: #999;
|
||||
--u2-o-1h-bg: #999;
|
||||
--u2-o-bg: #eee;
|
||||
--u2-o-1-bg: #000;
|
||||
|
||||
--ud-b1: a;
|
||||
|
||||
@@ -627,6 +626,7 @@ pre, code, tt, #doc, #doc>code {
|
||||
overflow: hidden;
|
||||
width: 0;
|
||||
height: 0;
|
||||
color: var(--bg);
|
||||
}
|
||||
html .ayjump:focus {
|
||||
z-index: 80386;
|
||||
@@ -963,6 +963,8 @@ html.y #path a:hover {
|
||||
#files tbody tr.play a:hover {
|
||||
color: var(--btn-1h-fg);
|
||||
background: var(--btn-1h-bg);
|
||||
box-shadow: var(--btn-1h-bs);
|
||||
border-bottom: var(--btn-1h-bb);
|
||||
}
|
||||
#ggrid {
|
||||
margin: -.2em -.5em;
|
||||
@@ -971,6 +973,7 @@ html.y #path a:hover {
|
||||
overflow: hidden;
|
||||
display: block;
|
||||
display: -webkit-box;
|
||||
line-clamp: var(--grid-ln);
|
||||
-webkit-line-clamp: var(--grid-ln);
|
||||
-webkit-box-orient: vertical;
|
||||
padding-top: .3em;
|
||||
@@ -1017,9 +1020,6 @@ html.y #path a:hover {
|
||||
color: var(--g-dfg);
|
||||
}
|
||||
#ggrid>a.au:before {
|
||||
content: '💾';
|
||||
}
|
||||
html.np_open #ggrid>a.au:before {
|
||||
content: '▶';
|
||||
}
|
||||
#ggrid>a:before {
|
||||
@@ -1148,6 +1148,7 @@ html.y #widget.open {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
}
|
||||
#fshr,
|
||||
#wtgrid,
|
||||
#wtico {
|
||||
position: relative;
|
||||
@@ -1334,6 +1335,7 @@ html.y #widget.open {
|
||||
#widget.cmp #wtoggle {
|
||||
font-size: 1.2em;
|
||||
}
|
||||
#widget.cmp #fshr,
|
||||
#widget.cmp #wtgrid {
|
||||
display: none;
|
||||
}
|
||||
@@ -1434,7 +1436,11 @@ input[type="checkbox"]+label {
|
||||
input[type="radio"]:checked+label,
|
||||
input[type="checkbox"]:checked+label {
|
||||
color: #0e0;
|
||||
color: var(--a);
|
||||
color: var(--btn-1-bg);
|
||||
}
|
||||
input[type="checkbox"]:checked+label {
|
||||
box-shadow: var(--btn-1-bs);
|
||||
border-bottom: var(--btn-1-bb);
|
||||
}
|
||||
html.dz input {
|
||||
font-family: 'scp', monospace, monospace;
|
||||
@@ -1612,6 +1618,8 @@ html {
|
||||
color: var(--btn-fg);
|
||||
background: #eee;
|
||||
background: var(--btn-bg);
|
||||
box-shadow: var(--btn-bs);
|
||||
border-bottom: var(--btn-bb);
|
||||
border-radius: .3em;
|
||||
padding: .2em .4em;
|
||||
font-size: 1.2em;
|
||||
@@ -1625,20 +1633,14 @@ html.c .btn,
|
||||
html.a .btn {
|
||||
border-radius: .2em;
|
||||
}
|
||||
html.cz .btn {
|
||||
box-shadow: 0 .1em .6em rgba(255,0,185,0.5);
|
||||
border-bottom: .2em solid #709;
|
||||
}
|
||||
html.dz .btn {
|
||||
font-size: 1em;
|
||||
box-shadow: 0 0 0 .1em #080 inset;
|
||||
}
|
||||
html.dz .tgl.btn.on {
|
||||
box-shadow: 0 0 0 .1em var(--btn-1-bg) inset;
|
||||
}
|
||||
.btn:hover {
|
||||
color: var(--btn-h-fg);
|
||||
background: var(--btn-h-bg);
|
||||
box-shadow: var(--btn-h-bs);
|
||||
border-bottom: var(--btn-h-bb);
|
||||
}
|
||||
.tgl.btn.on {
|
||||
background: #000;
|
||||
@@ -1646,14 +1648,14 @@ html.dz .tgl.btn.on {
|
||||
color: #fff;
|
||||
color: var(--btn-1-fg);
|
||||
text-shadow: none;
|
||||
}
|
||||
html.cz .tgl.btn.on {
|
||||
box-shadow: 0 .1em .8em rgba(255,205,0,0.9);
|
||||
border-bottom: .2em solid #e90;
|
||||
box-shadow: var(--btn-1-bs);
|
||||
border-bottom: var(--btn-1-bb);
|
||||
}
|
||||
.tgl.btn.on:hover {
|
||||
background: var(--btn-1h-bg);
|
||||
color: var(--btn-1h-fg);
|
||||
background: var(--btn-1h-bg);
|
||||
box-shadow: var(--btn-1h-bs);
|
||||
border-bottom: var(--btn-1h-bb);
|
||||
}
|
||||
#detree {
|
||||
padding: .3em .5em;
|
||||
@@ -1858,6 +1860,7 @@ html.y #tree.nowrap .ntree a+a:hover {
|
||||
#unpost td:nth-child(4) {
|
||||
text-align: right;
|
||||
}
|
||||
#shui,
|
||||
#rui {
|
||||
background: #fff;
|
||||
background: var(--bg);
|
||||
@@ -1873,13 +1876,25 @@ html.y #tree.nowrap .ntree a+a:hover {
|
||||
padding: 1em;
|
||||
z-index: 765;
|
||||
}
|
||||
#shui div+div,
|
||||
#rui div+div {
|
||||
margin-top: 1em;
|
||||
}
|
||||
#shui table,
|
||||
#rui table {
|
||||
width: 100%;
|
||||
border-collapse: collapse;
|
||||
}
|
||||
#shui button {
|
||||
margin: 0 1em 0 0;
|
||||
}
|
||||
#shui .btn {
|
||||
font-size: 1em;
|
||||
}
|
||||
#shui td {
|
||||
padding: .8em 0;
|
||||
}
|
||||
#shui td+td,
|
||||
#rui td+td {
|
||||
padding: .2em 0 .2em .5em;
|
||||
}
|
||||
@@ -1887,10 +1902,15 @@ html.y #tree.nowrap .ntree a+a:hover {
|
||||
font-family: 'scp', monospace, monospace;
|
||||
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||
}
|
||||
#shui td+td,
|
||||
#rui td+td,
|
||||
#shui td input[type="text"],
|
||||
#rui td input[type="text"] {
|
||||
width: 100%;
|
||||
}
|
||||
#shui td.exs input[type="text"] {
|
||||
width: 3em;
|
||||
}
|
||||
#rn_f.m td:first-child {
|
||||
white-space: nowrap;
|
||||
}
|
||||
@@ -2685,23 +2705,25 @@ html.b #u2conf a.b:hover {
|
||||
#u2conf input[type="checkbox"]:checked+label {
|
||||
position: relative;
|
||||
cursor: pointer;
|
||||
background: var(--u2-o-bg);
|
||||
border-bottom: .2em solid var(--u2-o-b1);
|
||||
box-shadow: 0 .1em .3em var(--u2-o-sh) inset;
|
||||
background: var(--btn-bg);
|
||||
box-shadow: var(--btn-bs);
|
||||
border-bottom: var(--btn-bb);
|
||||
text-shadow: 1px 1px 1px #000, 1px -1px 1px #000, -1px -1px 1px #000, -1px 1px 1px #000;
|
||||
}
|
||||
#u2conf input[type="checkbox"]:checked+label {
|
||||
background: var(--u2-o-1-bg);
|
||||
border-bottom: .2em solid var(--u2-o-1-b1);
|
||||
box-shadow: 0 .1em .5em var(--u2-o-1-sh);
|
||||
background: var(--btn-1-bg);
|
||||
box-shadow: var(--btn-1-bs);
|
||||
border-bottom: var(--btn-1-bb);
|
||||
}
|
||||
#u2conf input[type="checkbox"]+label:hover {
|
||||
box-shadow: 0 .1em .3em var(--u2-o-h-sh);
|
||||
border-color: var(--u2-o-h-b1);
|
||||
background: var(--u2-o-h-bg);
|
||||
background: var(--btn-h-bg);
|
||||
box-shadow: var(--btn-h-bs);
|
||||
border-bottom: var(--btn-h-bb);
|
||||
}
|
||||
#u2conf input[type="checkbox"]:checked+label:hover {
|
||||
background: var(--u2-o-1h-bg);
|
||||
background: var(--btn-1h-bg);
|
||||
box-shadow: var(--btn-1h-bs);
|
||||
border-bottom: var(--btn-1h-bb);
|
||||
}
|
||||
#op_up2k.srch #u2conf td:nth-child(2)>*,
|
||||
#op_up2k.srch #u2conf td:nth-child(3)>* {
|
||||
@@ -3061,14 +3083,6 @@ html.b #ggrid>a {
|
||||
html.b .btn {
|
||||
top: -.1em;
|
||||
}
|
||||
html.b .btn,
|
||||
html.b #u2conf a.b,
|
||||
html.b #u2conf input[type="checkbox"]:not(:checked)+label {
|
||||
box-shadow: 0 .05em 0 var(--bg-d3) inset;
|
||||
}
|
||||
html.b .tgl.btn.on {
|
||||
box-shadow: 0 .05em 0 var(--btn-1-is) inset;
|
||||
}
|
||||
html.b #op_up2k.srch sup {
|
||||
color: #fc0;
|
||||
}
|
||||
|
||||
@@ -67,14 +67,14 @@
|
||||
<div id="op_up2k" class="opview"></div>
|
||||
|
||||
<div id="op_cfg" class="opview opbox opwide"></div>
|
||||
|
||||
|
||||
<h1 id="path">
|
||||
<a href="#" id="entree">🌲</a>
|
||||
{%- for n in vpnodes %}
|
||||
<a href="{{ r }}/{{ n[0] }}">{{ n[1] }}</a>
|
||||
{%- endfor %}
|
||||
</h1>
|
||||
|
||||
|
||||
<div id="tree"></div>
|
||||
|
||||
<div id="wrap">
|
||||
@@ -118,11 +118,11 @@
|
||||
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
|
||||
<div id="epi" class="logue">{{ "" if sb_lg else logues[1] }}</div>
|
||||
|
||||
<h2 id="wfp"><a href="{{ r }}/?h" id="goh">control-panel</a></h2>
|
||||
|
||||
|
||||
<a href="#" id="repl">π</a>
|
||||
|
||||
</div>
|
||||
|
||||
@@ -309,6 +309,25 @@ var Ls = {
|
||||
"fd_emore": "select at least one item to delete",
|
||||
"fc_emore": "select at least one item to cut",
|
||||
|
||||
"fs_sc": "share the folder you're in",
|
||||
"fs_ss": "share the selected files",
|
||||
"fs_just1d": "you cannot select more than one folder,\nor mix flies and folders in one selection",
|
||||
"fs_abrt": "❌ abort",
|
||||
"fs_rand": "🎲 rand.name",
|
||||
"fs_go": "✅ create share",
|
||||
"fs_name": "name",
|
||||
"fs_src": "source",
|
||||
"fs_pwd": "passwd",
|
||||
"fs_exp": "expiry",
|
||||
"fs_tmin": "min",
|
||||
"fs_thrs": "hours",
|
||||
"fs_tdays": "days",
|
||||
"fs_never": "eternal",
|
||||
"fs_pname": "optional link name; will be random if blank",
|
||||
"fs_tsrc": "the file or folder to share",
|
||||
"fs_ppwd": "optional password",
|
||||
"fs_ok": "<h6>share-URL created</h6>\npress <code>Enter/OK</code> to Clipboard\npress <code>ESC/Cancel</code> to Close\n\n",
|
||||
|
||||
"frt_dec": "may fix some cases of broken filenames\">url-decode",
|
||||
"frt_rst": "reset modified filenames back to the original ones\">↺ reset",
|
||||
"frt_abrt": "abort and close this window\">❌ cancel",
|
||||
@@ -821,11 +840,30 @@ var Ls = {
|
||||
"fr_eperm": 'kan ikke endre navn:\ndu har ikke “move”-rettigheten i denne mappen',
|
||||
"fd_eperm": 'kan ikke slette:\ndu har ikke “delete”-rettigheten i denne mappen',
|
||||
"fc_eperm": 'kan ikke klippe ut:\ndu har ikke “move”-rettigheten i denne mappen',
|
||||
"fp_eperm": 'kan ikke lime inn:\ndu har ikke "write"-rettigheten i denne mappen',
|
||||
"fp_eperm": 'kan ikke lime inn:\ndu har ikke “write”-rettigheten i denne mappen',
|
||||
"fr_emore": "velg minst én fil som skal få nytt navn",
|
||||
"fd_emore": "velg minst én fil som skal slettes",
|
||||
"fc_emore": "velg minst én fil som skal klippes ut",
|
||||
|
||||
"fs_sc": "del mappen du er i nå",
|
||||
"fs_ss": "del de valgte filene",
|
||||
"fs_just1d": "du kan ikke markere flere mapper samtidig,\neller kombinere mapper og filer",
|
||||
"fs_abrt": "❌ avbryt",
|
||||
"fs_rand": "🎲 tilfeldig navn",
|
||||
"fs_go": "✅ opprett deling",
|
||||
"fs_name": "navn",
|
||||
"fs_src": "kilde",
|
||||
"fs_pwd": "passord",
|
||||
"fs_exp": "varighet",
|
||||
"fs_tmin": "min",
|
||||
"fs_thrs": "timer",
|
||||
"fs_tdays": "dager",
|
||||
"fs_never": "for evig",
|
||||
"fs_pname": "frivillig navn (blir noe tilfeldig ellers)",
|
||||
"fs_tsrc": "fil/mappe som skal deles",
|
||||
"fs_ppwd": "frivillig passord",
|
||||
"fs_ok": "<h6>URL opprettet</h6>\ntrykk <code>Enter/OK</code> for å kopiere linken (for CTRL-V)\ntrykk <code>ESC/Avbryt</code> for å bare bekrefte\n\n",
|
||||
|
||||
"frt_dec": "kan korrigere visse ødelagte filnavn\">url-decode",
|
||||
"frt_rst": "nullstiller endringer (tilbake til de originale filnavnene)\">↺ reset",
|
||||
"frt_abrt": "avbryt og lukk dette vinduet\">❌ avbryt",
|
||||
@@ -1037,14 +1075,555 @@ var Ls = {
|
||||
|
||||
"lang_set": "passer det å laste siden på nytt?",
|
||||
},
|
||||
|
||||
"chi": {
|
||||
// 以 //m 结尾的行是未经验证的机器翻译
|
||||
"tt": "中文",
|
||||
"cols": {
|
||||
"c": "操作按钮",
|
||||
"dur": "持续时间",
|
||||
"q": "质量 / 比特率",
|
||||
"Ac": "音频编码",
|
||||
"Vc": "视频编码",
|
||||
"Fmt": "格式 / 容器",
|
||||
"Ahash": "音频校验和",
|
||||
"Vhash": "视频校验和",
|
||||
"Res": "分辨率",
|
||||
"T": "文件类型",
|
||||
"aq": "音频质量 / 比特率",
|
||||
"vq": "视频质量 / 比特率",
|
||||
"pixfmt": "子采样 / 像素结构",
|
||||
"resw": "水平分辨率",
|
||||
"resh": "垂直分辨率",
|
||||
"chs": "音频频道",
|
||||
"hz": "采样率"
|
||||
},
|
||||
|
||||
"hks": [
|
||||
[
|
||||
"misc",
|
||||
["ESC", "关闭各种窗口"],
|
||||
|
||||
"file-manager",
|
||||
["G", "切换列表 / 网格视图"],
|
||||
["T", "切换缩略图 / 图标"],
|
||||
["🡅 A/D", "缩略图大小"],
|
||||
["ctrl-K", "删除选中项"],
|
||||
["ctrl-X", "剪切选中项"],
|
||||
["ctrl-V", "粘贴到文件夹"],
|
||||
["Y", "下载选中项"],
|
||||
["F2", "重命名选中项"],
|
||||
|
||||
"file-list-sel",
|
||||
["space", "切换文件选择"],
|
||||
["🡑/🡓", "移动选择光标"],
|
||||
["ctrl 🡑/🡓", "移动光标和视图"],
|
||||
["🡅 🡑/🡓", "选择上一个/下一个文件"],
|
||||
["ctrl-A", "选择所有文件 / 文件夹"]
|
||||
], [
|
||||
"navigation",
|
||||
["B", "切换面包屑导航 / 导航窗格"],
|
||||
["I/K", "前一个/下一个文件夹"],
|
||||
["M", "父文件夹(或折叠当前文件夹)"],
|
||||
["V", "切换导航窗格中的文件夹 / 文本文件"],
|
||||
["A/D", "导航窗格大小"]
|
||||
], [
|
||||
"audio-player",
|
||||
["J/L", "上一首/下一首歌曲"],
|
||||
["U/O", "跳过10秒向前/向后"],
|
||||
["0..9", "跳转到0%..90%"],
|
||||
["P", "播放/暂停(也可以启动)"],
|
||||
["Y", "下载歌曲"]
|
||||
], [
|
||||
"image-viewer",
|
||||
["J/L, ←/→", "上一张/下一张图片"],
|
||||
["Home/End", "第一张/最后一张图片"],
|
||||
["F", "全屏"],
|
||||
["R", "顺时针旋转"],
|
||||
["🡅 R", "逆时针旋转"],
|
||||
["Y", "下载图片"]
|
||||
], [
|
||||
"video-player",
|
||||
["U/O", "跳过10秒向前/向后"],
|
||||
["P/K/Space", "播放/暂停"],
|
||||
["C", "继续播放下一段"],
|
||||
["V", "循环"],
|
||||
["M", "静音"],
|
||||
["[ and ]", "设置循环区间"]
|
||||
], [
|
||||
"textfile-viewer",
|
||||
["I/K", "前一个/下一个文件"],
|
||||
["M", "关闭文本文件"],
|
||||
["E", "编辑文本文件"],
|
||||
["S", "选择文件(用于剪切/重命名)"]
|
||||
]
|
||||
],
|
||||
|
||||
"m_ok": "确定",
|
||||
"m_ng": "取消",
|
||||
|
||||
"ht_s": "秒",
|
||||
"ht_m": "分",
|
||||
"ht_h": "时",
|
||||
"ht_d": "天",
|
||||
"ht_and": " 和 ",
|
||||
|
||||
"goh": "控制面板",
|
||||
"gop": '前一项">pre',
|
||||
"gou": '顶部">up',
|
||||
"gon": '下一项">next',
|
||||
"logout": " 登出",
|
||||
"access": " 访问",
|
||||
"ot_close": "关闭子菜单",
|
||||
"ot_search": "按属性、路径/名称、音乐标签或上述内容的任意组合搜索文件$N$N<code>foo bar</code> = 必须包含 «foo» 和 «bar»,$N<code>foo -bar</code> = 包含 «foo» 而不包含 «bar»,$N<code>^yana .opus$</code> = 以 «yama» 为开头的 «opus» 文件$N<code>"try unite"</code> = 正好包含 «try unite»$N$N时间格式为 iso-8601, 比如:$N<code>2009-12-31</code> or <code>2020-09-12 23:30:00</code>",
|
||||
"ot_unpost": "取消发布:删除最近上传的内容,或中止未完成的内容",
|
||||
"ot_bup": "bup:基础上传器,甚至支持 Netscape 4.0",
|
||||
"ot_mkdir": "mkdir:创建新目录",
|
||||
"ot_md": "new-md:创建新 Markdown 文档",
|
||||
"ot_msg": "msg:向服务器日志发送消息",
|
||||
"ot_mp": "媒体播放器选项",
|
||||
"ot_cfg": "配置选项",
|
||||
"ot_u2i": 'up2k:上传文件(如果你有写入权限),或切换到搜索模式以查看文件是否存在于服务器上,$N$N上传是可恢复的,多线程的,保留文件时间戳,但比 [🎈] (基础上传器)占用 更多的CPU<br /><br />上传过程中,此图标会变成进度指示器!',
|
||||
"ot_u2w": 'up2k:带有恢复支持的文件上传(关闭浏览器后,重新上传相同文件)$N$N多线程的,文件时间戳得以保留,但比 [🎈] (基础上传器)使用更多CPU<br /><br />上传过程中,这个图标会变成进度指示器!',
|
||||
"ot_noie": '请使用 Chrome / Firefox / Edge',
|
||||
|
||||
"ab_mkdir": "创建目录",
|
||||
"ab_mkdoc": "新建 Markdown 文档",
|
||||
"ab_msg": "发送消息到服务器日志",
|
||||
|
||||
"ay_path": "跳转到文件夹",
|
||||
"ay_files": "跳转到文件",
|
||||
|
||||
"wt_ren": "重命名选中的项目$N快捷键: F2",
|
||||
"wt_del": "删除选中的项目$N快捷键: ctrl-K",
|
||||
"wt_cut": "剪切选中的项目<small>(然后粘贴到其他地方)</small>$N快捷键: ctrl-X",
|
||||
"wt_pst": "粘贴之前剪切/复制的选择$N快捷键: ctrl-V",
|
||||
"wt_selall": "选择所有文件$N快捷键: ctrl-A(当文件被聚焦时)",
|
||||
"wt_selinv": "反转选择",
|
||||
"wt_selzip": "将选择下载为归档文件",
|
||||
"wt_seldl": "将选择下载为单独的文件$N快捷键: Y",
|
||||
"wt_npirc": "复制 IRC 格式的曲目信息",
|
||||
"wt_nptxt": "复制纯文本格式的曲目信息",
|
||||
"wt_grid": "切换网格/列表视图$N快捷键: G",
|
||||
"wt_prev": "上一曲$N快捷键: J",
|
||||
"wt_play": "播放/暂停$N快捷键: P",
|
||||
"wt_next": "下一曲$N快捷键: L",
|
||||
|
||||
"ul_par": "并行上传:",
|
||||
"ut_rand": "随机化文件名",
|
||||
"ut_u2ts": "将最后修改的时间戳$N从你的文件系统复制到服务器",
|
||||
"ut_mt": "在上传时继续哈希其他文件$N$N如果你的 CPU 或硬盘是瓶颈,可能需要禁用",
|
||||
"ut_ask": '上传开始前询问确认">💭',
|
||||
"ut_pot": "通过简化 UI 来$N提高慢设备上的上传速度",
|
||||
"ut_srch": "实际不上传,而是检查文件是否$N已经存在于服务器上(将扫描你可以读取的所有文件夹)",
|
||||
"ut_par": "通过将其设置为 0 来暂停上传$N$N如果你的连接很慢/延迟高,$N$N请增加在局域网或服务器硬盘是瓶颈时保持为 1",
|
||||
"ul_btn": "将文件/文件夹拖放到这里(或点击我)",
|
||||
"ul_btnu": "上 传",
|
||||
"ul_btns": "搜 索",
|
||||
|
||||
"ul_hash": "哈希",
|
||||
"ul_send": "发送",
|
||||
"ul_done": "完成",
|
||||
"ul_idle1": "没有排队的上传任务",
|
||||
"ut_etah": "平均 <em>hashing</em> 速度和估计完成时间",
|
||||
"ut_etau": "平均 <em>上传</em> 速度和估计完成时间",
|
||||
"ut_etat": "平均 <em>总</em> 速度和估计完成时间",
|
||||
|
||||
"uct_ok": "成功完成",
|
||||
"uct_ng": "失败/拒绝/未找到",
|
||||
"uct_done": "成功和失败的组合",
|
||||
"uct_bz": "正在哈希或上传",
|
||||
"uct_q": "空闲,待处理",
|
||||
|
||||
"utl_name": "文件名",
|
||||
"utl_ulist": "列表",
|
||||
"utl_ucopy": "复制",
|
||||
"utl_links": "链接",
|
||||
"utl_stat": "状态",
|
||||
"utl_prog": "进度",
|
||||
|
||||
"ul_flagblk": "文件已添加到队列</b><br>但另一个浏览器标签中有一个繁忙的 up2k,<br>因此等待它完成",
|
||||
"ul_btnlk": "服务器配置已将此开关锁定到此状态",
|
||||
|
||||
"udt_up": "上传",
|
||||
"udt_srch": "搜索",
|
||||
"udt_drop": "将文件拖放到这里",
|
||||
|
||||
"u_nav_m": '<h6>好的,你有什么?</h6><code>Enter</code> = 文件(一个或多个)\n<code>ESC</code> = 一个文件夹(包括子文件夹)',
|
||||
"u_nav_b": '<a href="#" id="modal-ok">文件</a><a href="#" id="modal-ng">一个文件夹</a>',
|
||||
|
||||
"cl_opts": "开关选项",
|
||||
"cl_themes": "主题",
|
||||
"cl_langs": "语言",
|
||||
"cl_ziptype": "文件夹下载",
|
||||
"cl_uopts": "up2k 开关",
|
||||
"cl_favico": "网站图标",
|
||||
"cl_bigdir": "最大目录数",
|
||||
"cl_keytype": "键位符号",
|
||||
"cl_hiddenc": "隐藏列",
|
||||
"cl_hidec": "隐藏",
|
||||
"cl_reset": "重置",
|
||||
"cl_hpick": "点击列标题以在下表中隐藏",
|
||||
"cl_hcancel": "列隐藏已取消",
|
||||
|
||||
"ct_grid": '网格视图',
|
||||
"ct_thumb": '在网格视图中,切换图标或缩略图$N快捷键: T">🖼️ 缩略图',
|
||||
"ct_csel": '在网格视图中使用 CTRL 和 SHIFT 进行文件选择">CTRL',
|
||||
"ct_ihop": '当图像查看器关闭时,滚动到最后查看的文件">滚动',
|
||||
"ct_dots": '显示隐藏文件(如果服务器允许)">隐藏文件',
|
||||
"ct_dir1st": '在文件之前排序文件夹">📁 排序',
|
||||
"ct_readme": '在文件夹列表中显示 README.md">📜 readme',
|
||||
"ct_idxh": '显示 index.html 代替文件夹列表">htm',
|
||||
"ct_sbars": '显示滚动条">⟊',
|
||||
|
||||
"cut_umod": "如果文件已存在于服务器上,将服务器的最后修改时间戳更新为与你的本地文件匹配(需要写入和删除权限)\">re📅",
|
||||
|
||||
"cut_turbo": "YOLO 按钮,你可能不想启用这个:$N$N如果你上传了大量文件并且由于某些原因需要重新启动,$N并且想要尽快继续上传,使用此选项$N$N这会用简单的 <em>"服务器上的文件大小是否相同?"</em> 替代哈希检查,$N因此如果文件内容不同,它将不会被上传$N$N上传完成后,你应该关闭此选项,$N然后重新"上传"相同的文件以让客户端验证它们\">加速",
|
||||
|
||||
"cut_datechk": "除非启用「加速」按钮,否则没有效果$N$N略微减少 YOLO 因素;检查服务器上的文件时间戳是否与你的一致$N$N<em>理论上</em> 应该能捕捉到大多数未完成/损坏的上传,$N但不能替代之后禁用「加速」进行的验证\">日期检查",
|
||||
|
||||
"cut_u2sz": "每个上传块的大小(以 MiB 为单位);较大的值跨大西洋传输效果更好。在非常不可靠的连接上尝试较小的值",
|
||||
|
||||
"cut_flag": "确保一次只有一个标签页在上传$N -- 其他标签页也必须启用此选项$N -- 仅影响同一域名下的标签页",
|
||||
|
||||
"cut_az": "按字母顺序上传文件,而不是按最小文件优先$N$N按字母顺序可以更容易地查看服务器上是否出现了问题,但在光纤/局域网上传稍微慢一些",
|
||||
|
||||
"cut_nag": "上传完成时的操作系统通知$N(仅当浏览器或标签页不活跃时)",
|
||||
"cut_sfx": "上传完成时的声音警报$N(仅当浏览器或标签页不活跃时)",
|
||||
|
||||
"cut_mt": "使用多线程加速文件哈希$N$N这使用 Web Worker 并且需要更多内存(额外最多 512 MiB)$N$N比https快30%,http快4.5倍,比Android 手机快5.3倍\">mt",
|
||||
|
||||
"cft_text": "网站图标文本(为空并刷新以禁用)",
|
||||
"cft_fg": "前景色",
|
||||
"cft_bg": "背景色",
|
||||
|
||||
"cdt_lim": "文件夹中显示的最大文件数",
|
||||
"cdt_ask": "滚动到底部时,$N不会加载更多文件,$N而是询问你该怎么做",
|
||||
|
||||
"tt_entree": "显示导航面板(目录树侧边栏)$N快捷键: B",
|
||||
"tt_detree": "显示面包屑导航$N快捷键: B",
|
||||
"tt_visdir": "滚动到选定的文件夹",
|
||||
"tt_ftree": "切换文件夹树 / 文本文件$N快捷键: V",
|
||||
"tt_pdock": "在顶部的停靠窗格中显示父文件夹",
|
||||
"tt_dynt": "随着树的展开自动增长",
|
||||
"tt_wrap": "自动换行",
|
||||
"tt_hover": "悬停时显示溢出的行$N(当鼠标光标在左侧边栏中时,滚动可能会中断)",
|
||||
|
||||
"ml_pmode": "在文件夹末尾时...",
|
||||
"ml_btns": "命令",
|
||||
"ml_tcode": "转码",
|
||||
"ml_tint": "透明度",
|
||||
"ml_eq": "音频均衡器",
|
||||
"ml_drc": "动态范围压缩器",
|
||||
|
||||
"mt_shuf": "在每个文件夹中随机播放歌曲\">🔀",
|
||||
"mt_aplay": "如果链接中有歌曲 ID,则自动播放,禁用此选项将停止在播放音乐时更新页面 URL 中的歌曲 ID,以防止在设置丢失但 URL 保留时自动播放\">自动播放▶",
|
||||
"mt_preload": "在歌曲快结束时开始加载下一首歌,以实现无缝播放\">预加载",
|
||||
"mt_prescan": "在最后一首歌结束之前切换到下一个文件夹$N保持网页浏览器活跃$N以免停止播放\">自动切换",
|
||||
"mt_fullpre": "尝试预加载整首歌;$N✅ 在 <b>不可靠</b> 连接上启用,$N❌ 可能在慢速连接上禁用\">加载整首歌",
|
||||
"mt_fau": "在手机上,如果下一首歌未能快速预加载,防止音乐停止(可能导致标签显示异常)\">☕️",
|
||||
"mt_waves": "波形进度条:$N显示音频幅度\">进度条",
|
||||
"mt_npclip": "显示当前播放歌曲的剪贴板按钮\">♪剪切板",
|
||||
"mt_octl": "操作系统集成(媒体快捷键 / OSD)\">OSD",
|
||||
"mt_oseek": "允许通过操作系统集成进行跳转$N$N注意:在某些设备(如 iPhone)上,$N这将替代下一首歌按钮\">seek",
|
||||
"mt_oscv": "在 OSD 中显示专辑封面\">封面",
|
||||
"mt_follow": "保持正在播放的曲目滚动到视图中\">🎯",
|
||||
"mt_compact": "紧凑的控制按钮\">⟎",
|
||||
"mt_uncache": "清除缓存 $N(如果你的浏览器缓存了一个损坏的歌曲副本而拒绝播放,请尝试此操作)\">uncache",
|
||||
"mt_mloop": "循环打开的文件夹\">🔁 循环",
|
||||
"mt_mnext": "加载下一个文件夹并继续\">📂 下一首",
|
||||
"mt_cflac": "将 flac / wav 转换为 opus\">flac",
|
||||
"mt_caac": "将 aac / m4a 转换为 opus\">aac",
|
||||
"mt_coth": "将所有其他(不是 mp3)转换为 opus\">oth",
|
||||
"mt_tint": "在进度条上设置背景级别(0-100)",
|
||||
"mt_eq": "启用均衡器和增益控制;$N$Nboost <code>0</code> = 标准 100% 音量(默认)$N$Nwidth <code>1 </code> = 标准立体声(默认)$Nwidth <code>0.5</code> = 50% 左右交叉反馈$Nwidth <code>0 </code> = 单声道$N$Nboost <code>-0.8</code> & width <code>10</code> = 人声移除 )$N$N启用均衡器使无缝专辑完全无缝,所以如果你在乎这一点,请保持启用,所有值设为零(除了宽度 = 1)",
|
||||
"mt_drc": "启用动态范围压缩器(音量平滑器 / 限幅器);还会启用均衡器以平衡音频,因此如果你不想要它,请将均衡器字段除了 '宽度' 外的所有字段设置为 0$N$N降低 THRESHOLD dB 以上的音频的音量;每超过 THRESHOLD dB 的 RATIO 会有 1 dB 输出,所以默认值 tresh -24 和 ratio 12 意味着它的音量不应超过 -22 dB,可以安全地将均衡器增益提高到 0.8,甚至在 ATK 0 和 RLS 如 90 的情况下提高到 1.8(仅在 Firefox 中有效;其他浏览器中 RLS 最大为 1)$N$N(见维基百科,他们解释得更好)",
|
||||
|
||||
"mb_play": "播放",
|
||||
"mm_hashplay": "播放这个音频文件?",
|
||||
"mp_breq": "需要 Firefox 82+ 或 Chrome 73+ 或 iOS 15+",
|
||||
"mm_bload": "正在加载...",
|
||||
"mm_bconv": "正在转换为 {0},请稍等...",
|
||||
"mm_opusen": "你的浏览器无法播放 aac / m4a 文件;\n现在启用转码为 opus",
|
||||
"mm_playerr": "播放失败:",
|
||||
"mm_eabrt": "播放尝试已取消",
|
||||
"mm_enet": "你的互联网连接有问题",
|
||||
"mm_edec": "这个文件可能已损坏??",
|
||||
"mm_esupp": "你的浏览器不支持这个音频格式",
|
||||
"mm_eunk": "未知错误",
|
||||
"mm_e404": "无法播放音频;错误 404:文件未找到。",
|
||||
"mm_e403": "无法播放音频;错误 403:访问被拒绝。\n\n尝试按 F5 重新加载,也许你已被注销",
|
||||
"mm_e5xx": "无法播放音频;服务器错误",
|
||||
"mm_nof": "附近找不到更多音频文件",
|
||||
"mm_prescan": "正在寻找下一首音乐...",
|
||||
"mm_scank": "找到下一首歌:",
|
||||
"mm_uncache": "缓存已清除;所有歌曲将在下次播放时重新下载",
|
||||
"mm_hnf": "那首歌不再存在",
|
||||
|
||||
"im_hnf": "那张图片不再存在",
|
||||
|
||||
"f_empty": '该文件夹为空',
|
||||
"f_chide": '隐藏列 «{0}»\n\n你可以在设置选项卡中重新显示列',
|
||||
"f_bigtxt": "这个文件大小为 {0} MiB -- 真的以文本形式查看?",
|
||||
"fbd_more": '<div id="blazy">显示 <code>{0}</code> 个文件中的 <code>{1}</code> 个;<a href="#" id="bd_more">显示 {2}</a> 或 <a href="#" id="bd_all">显示全部</a></div>',
|
||||
"fbd_all": '<div id="blazy">显示 <code>{0}</code> 个文件中的 <code>{1}</code> 个;<a href="#" id="bd_all">显示全部</a></div>',
|
||||
|
||||
"f_dls": '当前文件夹中的文件链接已\n更改为下载链接',
|
||||
|
||||
"f_partial": "要安全下载正在上传的文件,请点击没有 <code>.PARTIAL</code> 文件扩展名的同名文件。请按取消或 Escape 执行此操作。\n\n按 OK / Enter 将忽略此警告并继续下载 <code>.PARTIAL</code> 临时文件,这几乎肯定会导致数据损坏。",
|
||||
|
||||
"ft_paste": "粘贴 {0} 项$N快捷键: ctrl-V",
|
||||
"fr_eperm": '无法重命名:\n你在此文件夹中没有 “移动” 权限',
|
||||
"fd_eperm": '无法删除:\n你在此文件夹中没有 “删除” 权限',
|
||||
"fc_eperm": '无法剪切:\n你在此文件夹中没有 “移动” 权限',
|
||||
"fp_eperm": '无法粘贴:\n你在此文件夹中没有 “写入” 权限',
|
||||
"fr_emore": "选择至少一个项目以重命名",
|
||||
"fd_emore": "选择至少一个项目以删除",
|
||||
"fc_emore": "选择至少一个项目以剪切",
|
||||
|
||||
"fs_sc": "分享你所在的文件夹",
|
||||
"fs_ss": "分享选定的文件",
|
||||
"fs_just1d": "你不能同时选择多个文件夹,也不能同时选择文件夹和文件",
|
||||
"fs_abrt": "❌ 取消",
|
||||
"fs_rand": "🎲 随机名称",
|
||||
"fs_go": "✅ 创建分享",
|
||||
"fs_name": "名称",
|
||||
"fs_src": "源",
|
||||
"fs_pwd": "密码",
|
||||
"fs_exp": "过期",
|
||||
"fs_tmin": "分",
|
||||
"fs_thrs": "时",
|
||||
"fs_tdays": "天",
|
||||
"fs_never": "永久",
|
||||
"fs_pname": "链接名称可选;如果为空则随机",
|
||||
"fs_tsrc": "共享的文件或文件夹",
|
||||
"fs_ppwd": "密码可选",
|
||||
"fs_ok": "<h6>分享链接已创建</h6>\n按 <code>Enter/OK</code> 复制到剪贴板\n按 <code>ESC/Cancel</code> 关闭\n\n",
|
||||
|
||||
"frt_dec": "可能修复一些损坏的文件名\">url-decode",
|
||||
"frt_rst": "将修改后的文件名重置为原始文件名\">↺ 重置",
|
||||
"frt_abrt": "中止并关闭此窗口\">❌ 取消",
|
||||
"frb_apply": "应用重命名",
|
||||
"fr_adv": "批量 / 元数据 / 模式重命名\">高级",
|
||||
"fr_case": "区分大小写的正则表达式\">case",
|
||||
"fr_win": "Windows 安全名称;将 <code><>:"\\|?*</code> 替换为日文全角字符\">win",
|
||||
"fr_slash": "将 <code>/</code> 替换为不会导致新文件夹创建的字符\">不使用 /",
|
||||
"fr_pdel": "删除",
|
||||
"fr_pnew": "另存为",
|
||||
"fr_pname": "为你的新预设提供一个名称",
|
||||
"fr_aborted": "已中止",
|
||||
"fr_lold": "旧名称",
|
||||
"fr_lnew": "新名称",
|
||||
"fr_tags": "选定文件的标签(只读,仅供参考):",
|
||||
"fr_busy": "正在重命名 {0} 项...\n\n{1}",
|
||||
"fr_efail": "重命名失败:\n",
|
||||
"fr_nchg": "{0} 个新名称由于 <code>win</code> 和/或 <code>ikke /</code> 被更改\n\n确定继续使用这些更改的新名称?",
|
||||
|
||||
"fd_ok": "删除成功",
|
||||
"fd_err": "删除失败:\n",
|
||||
"fd_none": "没有文件被删除;可能被服务器配置(xbd)阻止?",
|
||||
"fd_busy": "正在删除 {0} 项...\n\n{1}",
|
||||
"fd_warn1": "删除这 {0} 项?",
|
||||
"fd_warn2": "<b>最后机会!</b> 无法撤销。删除?",
|
||||
|
||||
"fc_ok": "剪切 {0} 项",
|
||||
"fc_warn": '剪切 {0} 项\n\n但:只有 <b>这个</b> 浏览器标签页可以粘贴它们\n(因为选择非常庞大)',
|
||||
|
||||
"fp_ecut": "首先剪切一些文件/文件夹以粘贴/移动\n\n注意:你可以在不同的浏览器标签页之间剪切/粘贴",
|
||||
"fp_ename": "这些 {0} 项不能移动到这里(名称已存在):",
|
||||
"fp_ok": "移动成功",
|
||||
"fp_busy": "正在移动 {0} 项...\n\n{1}",
|
||||
"fp_err": "移动失败:\n",
|
||||
"fp_confirm": "将这些 {0} 项移动到这里?",
|
||||
"fp_etab": '无法从其他浏览器标签页读取剪贴板',
|
||||
"fp_name": "从你的设备上传一个文件。给它一个名字:",
|
||||
"fp_both_m": '<h6>选择粘贴内容</h6><code>Enter</code> = 从 «{1}» 移动 {0} 个文件\n<code>ESC</code> = 从你的设备上传 {2} 个文件',
|
||||
"fp_both_b": '<a href="#" id="modal-ok">移动</a><a href="#" id="modal-ng">上传</a>',
|
||||
|
||||
"mk_noname": "在左侧文本框中输入名称,然后再执行此操作 :p",
|
||||
|
||||
"tv_load": "加载文本文件:\n\n{0}\n\n{1}% ({2} 的 {3} MiB 已加载)",
|
||||
"tv_xe1": "无法加载文本文件:\n\n错误 ",
|
||||
"tv_xe2": "404,文件未找到",
|
||||
"tv_lst": "文本文件列表",
|
||||
"tvt_close": "返回到文件夹视图$N快捷键: M(或 Esc)\">❌ 关闭",
|
||||
"tvt_dl": "下载此文件$N快捷键: Y\">💾 下载",
|
||||
"tvt_prev": "显示上一个文档$N快捷键: i\">⬆ 上一个",
|
||||
"tvt_next": "显示下一个文档$N快捷键: K\">⬇ 下一个",
|
||||
"tvt_sel": "选择文件 (用于剪切/删除/...)$N快捷键: S\">选择",
|
||||
"tvt_edit": "在文本编辑器中打开文件$N快捷键: E\">✏️ 编辑",
|
||||
|
||||
"gt_vau": "不显示视频,仅播放音频\">🎧",
|
||||
"gt_msel": "启用文件选择;按住 ctrl 键点击文件以覆盖$N$N<em>当启用时:双击文件/文件夹以打开它</em>$N$N快捷键:S\">多选",
|
||||
"gt_crop": "中心裁剪缩略图\">裁剪",
|
||||
"gt_3x": "高分辨率缩略图\">3x",
|
||||
"gt_zoom": "缩放",
|
||||
"gt_chop": "剪裁",
|
||||
"gt_sort": "排序依据",
|
||||
"gt_name": "名称",
|
||||
"gt_sz": "大小",
|
||||
"gt_ts": "日期",
|
||||
"gt_ext": "类型",
|
||||
"gt_c1": "截断文件名更多(显示更少)",
|
||||
"gt_c2": "截断文件名更少(显示更多)",
|
||||
|
||||
"sm_prev": "以下是来自先前查询的搜索结果:\n ",
|
||||
"sl_close": "关闭搜索结果",
|
||||
"sl_hits": "显示 {0} 个结果",
|
||||
"sl_moar": "加载更多",
|
||||
|
||||
"s_sz": "大小",
|
||||
"s_dt": "日期",
|
||||
"s_rd": "路径",
|
||||
"s_fn": "名称",
|
||||
"s_ta": "标签",
|
||||
"s_ua": "上传于",
|
||||
"s_ad": "高级",
|
||||
"s_s1": "最小 MiB",
|
||||
"s_s2": "最大 MiB",
|
||||
"s_d1": "最早 iso8601",
|
||||
"s_d2": "最晚 iso8601",
|
||||
"s_u1": "上传后",
|
||||
"s_u2": "和/或之前",
|
||||
"s_r1": "路径包含 (空格分隔)",
|
||||
"s_f1": "名称包含 (用 -nope 否定)",
|
||||
"s_t1": "标签包含 (^=开头,$=结尾)",
|
||||
"s_a1": "特定元数据属性",
|
||||
|
||||
"md_eshow": "无法渲染 ",
|
||||
"md_off": "[📜<em>readme</em>] 在 [⚙️] 中禁用 -- 文档隐藏",
|
||||
|
||||
"badreply": "解析服务器回复失败",
|
||||
|
||||
"xhr403": "403: 访问被拒绝\n\n尝试按 F5 可能会重新登录",
|
||||
"xhr0": "未知(可能丢失连接到服务器,或服务器离线)",
|
||||
"cf_ok": "抱歉 -- DD" + wah + "oS 保护启动\n\n事情应该在大约 30 秒后恢复\n\n如果没有任何变化,按 F5 重新加载页面",
|
||||
"tl_xe1": "无法列出子文件夹:\n\n错误 ",
|
||||
"tl_xe2": "404: 文件夹未找到",
|
||||
"fl_xe1": "无法列出文件夹中的文件:\n\n错误 ",
|
||||
"fl_xe2": "404: 文件夹未找到",
|
||||
"fd_xe1": "无法创建子文件夹:\n\n错误 ",
|
||||
"fd_xe2": "404: 父文件夹未找到",
|
||||
"fsm_xe1": "无法发送消息:\n\n错误 ",
|
||||
"fsm_xe2": "404: 父文件夹未找到",
|
||||
"fu_xe1": "无法从服务器加载未发布列表:\n\n错误 ",
|
||||
"fu_xe2": "404: 文件未找到??",
|
||||
|
||||
"fz_tar": "未压缩的 gnu-tar 文件(linux / mac)",
|
||||
"fz_pax": "未压缩的 pax 格式 tar(较慢)",
|
||||
"fz_targz": "gnu-tar 带 gzip 级别 3 压缩$N$N通常非常慢,所以$N建议使用未压缩的 tar",
|
||||
"fz_tarxz": "gnu-tar 带 xz 级别 1 压缩$N$N通常非常慢,所以$N建议使用未压缩的 tar",
|
||||
"fz_zip8": "zip 带 utf8 文件名(在 windows 7 及更早版本上可能会出现问题)",
|
||||
"fz_zipd": "zip 带传统 cp437 文件名,适用于非常旧的软件",
|
||||
"fz_zipc": "cp437 带 crc32 提前计算,$N适用于 MS-DOS PKZIP v2.04g(1993 年 10 月)$N(处理时间较长,在下载开始之前)",
|
||||
|
||||
"un_m1": "你可以删除下面的近期上传(或中止未完成的上传)",
|
||||
"un_upd": "刷新",
|
||||
"un_m4": "或分享下面可见的文件:",
|
||||
"un_ulist": "显示",
|
||||
"un_ucopy": "复制",
|
||||
"un_flt": "可选过滤器: URL 必须包含",
|
||||
"un_fclr": "清除过滤器",
|
||||
"un_derr": '未发布删除失败:\n',
|
||||
"un_f5": '出现问题,请尝试刷新或按 F5',
|
||||
"un_uf5": "抱歉,你必须刷新页面(例如,按 F5 或 CTRL-R),然后才能中止此上传",
|
||||
"un_nou": '<b>警告:</b> 服务器太忙,无法显示未完成的上传;稍后点击“刷新”链接',
|
||||
"un_noc": '<b>警告:</b> 服务器配置中未启用/允许完全上传文件的取消发布',
|
||||
"un_max": "显示前 2000 个文件(使用过滤器)",
|
||||
"un_avail": "{0} 个近期上传可以被删除<br />{1} 个未完成的上传可以被中止",
|
||||
"un_m2": "按上传时间排序;最新的在前:",
|
||||
"un_no1": "哎呀!没有足够新的上传",
|
||||
"un_no2": "哎呀!没有符合该过滤器的足够新的上传",
|
||||
"un_next": "删除下面的下一个 {0} 个文件",
|
||||
"un_abrt": "中止",
|
||||
"un_del": "删除",
|
||||
"un_m3": "正在加载你的近期上传...",
|
||||
"un_busy": "正在删除 {0} 个文件...",
|
||||
|
||||
"u_https1": "你应该",
|
||||
"u_https2": "切换到 https",
|
||||
"u_https3": "以获得更好的性能",
|
||||
"u_ancient": '你的浏览器非常古老 -- 也许你应该 <a href="#" onclick="goto(\'bup\')">改用 bup</a>',
|
||||
"u_nowork": "需要 Firefox 53+ 或 Chrome 57+ 或 iOS 11+",
|
||||
"u_uri": "要从其他浏览器窗口拖放图片,\n请将其拖放到大的上传按钮上",
|
||||
"u_enpot": '切换到 <a href="#">简约 UI</a>(可能提高上传速度)',
|
||||
"u_depot": '切换到 <a href="#">精美 UI</a>(可能降低上传速度)',
|
||||
"u_gotpot": '切换到土豆 UI 以提高上传速度,\n\n随时可以不同意并切换回去!',
|
||||
"u_ever": "这是基本的上传工具; up2k 需要至少<br>chrome 21 // firefox 13 // edge 12 // opera 12 // safari 5.1",
|
||||
"u_su2k": '这是基本的上传工具;<a href="#" id="u2yea">up2k</a> 更好',
|
||||
"u_ewrite": '你对这个文件夹没有写入权限',
|
||||
"u_eread": '你对这个文件夹没有读取权限',
|
||||
"u_enoi": '文件搜索在服务器配置中未启用',
|
||||
"u_badf": '这些 {0} 个文件(共 {1} 个)被跳过,可能是由于文件系统权限:\n\n',
|
||||
"u_blankf": '这些 {0} 个文件(共 {1} 个)是空白的;是否仍然上传?\n\n',
|
||||
"u_just1": '\n也许如果你只选择一个文件会更好',
|
||||
"u_ff_many": "如果你使用的是 <b>Linux / MacOS / Android,</b> 那么这个文件数量 <a href=\"https://bugzilla.mozilla.org/show_bug.cgi?id=1790500\" target=\"_blank\"><em>可能</em> 崩溃 Firefox!</a>\n如果发生这种情况,请再试一次(或使用 Chrome)。",
|
||||
"u_up_life": "此上传将在 {0} 后从服务器删除",
|
||||
"u_asku": '将这些 {0} 个文件上传到 <code>{1}</code>',
|
||||
"u_unpt": "你可以使用左上角的 🧯 撤销/删除此上传",
|
||||
"u_etadone": '完成 ({0}, {1} 个文件)',
|
||||
"u_etaprep": '(准备上传)',
|
||||
"u_hashdone": '哈希完成',
|
||||
"u_hashing": '哈希',
|
||||
"u_fixed": "好! 已修复 👍",
|
||||
"u_cuerr": "上传块 {0} 的 {1} 失败;\n可能无害,继续中\n\n文件:{2}",
|
||||
"u_cuerr2": "服务器拒绝上传(块 {0} 的 {1});\n稍后重试\n\n文件:{2}\n\n错误 ",
|
||||
"u_ehstmp": "将重试;见右下角",
|
||||
"u_ehsfin": "服务器拒绝了最终上传请求;正在重试...",
|
||||
"u_ehssrch": "服务器拒绝了搜索请求;正在重试...",
|
||||
"u_ehsinit": "服务器拒绝了启动上传请求;正在重试...",
|
||||
"u_eneths": "进行上传握手时的网络错误;正在重试...",
|
||||
"u_enethd": "测试目标存在时的网络错误;正在重试...",
|
||||
"u_cbusy": "等待服务器在网络故障后再次信任我们...",
|
||||
"u_ehsdf": "服务器磁盘空间不足!\n\n将继续重试,以防有人\n释放足够的空间以继续",
|
||||
"u_emtleak1": "看起来你的网页浏览器可能有内存泄漏;\n请",
|
||||
"u_emtleak2": ' <a href="{0}">切换到 https(推荐)</a> 或 ',
|
||||
"u_emtleak3": ' ',
|
||||
"u_emtleakc": '尝试以下操作:\n<ul><li>按 <code>F5</code> 刷新页面</li><li>然后在 <code>⚙️ 设置</code> 中禁用 <code>mt</code> 按钮</li><li>然后再次尝试上传</li></ul>上传会稍微慢一些,不过没关系。\n抱歉带来麻烦!\n\nPS:chrome v107 <a href="https://bugs.chromium.org/p/chromium/issues/detail?id=1354816" target="_blank">已修复</a>此问题',
|
||||
"u_emtleakf": '尝试以下操作:\n<ul><li>按 <code>F5</code> 刷新页面</li><li>然后在上传 UI 中启用 <code>🥔</code>(土豆)<li>然后再次尝试上传</li></ul>\nPS: firefox <a href="https://bugzilla.mozilla.org/show_bug.cgi?id=1790500" target="_blank">希望会在某个时点修复此问题</a>',
|
||||
"u_s404": "在服务器上未找到",
|
||||
"u_expl": "解释",
|
||||
"u_maxconn": "大多数浏览器限制为 6,但 Firefox 允许你通过 <code>connections-per-server</code> 在 <code>about:config</code> 中提高限制",
|
||||
"u_tu": '<p class="warn">警告:启用了 turbo,<span> 客户端可能无法检测和恢复不完整的上传;查看 turbo 按钮工具提示</span></p>',
|
||||
"u_ts": '<p class="warn">警告:启用了 turbo,<span> 搜索结果可能不正确;查看 turbo 按钮工具提示</span></p>',
|
||||
"u_turbo_c": "服务器配置中禁用了 turbo",
|
||||
"u_turbo_g": "禁用 turbo,因为你在此卷中没有\n目录列表权限",
|
||||
"u_life_cfg": '自动删除时间为 <input id="lifem" p="60" /> 分钟(或 <input id="lifeh" p="3600" /> 小时)',
|
||||
"u_life_est": '上传将在 <span id="lifew" tt="本地时间">---</span> 删除',
|
||||
"u_life_max": '此文件夹强制执行\n最大寿命为 {0}',
|
||||
"u_unp_ok": '允许取消发布 {0}',
|
||||
"u_unp_ng": '取消发布将不被允许',
|
||||
"ue_ro": '你对这个文件夹的访问是只读的\n\n',
|
||||
"ue_nl": '你当前未登录',
|
||||
"ue_la": '你当前以 "{0}" 登录',
|
||||
"ue_sr": '你当前处于文件搜索模式\n\n通过点击大搜索按钮旁边的放大镜 🔎 切换到上传模式,然后重试上传\n\n抱歉',
|
||||
"ue_ta": '尝试再次上传,现在应该能正常工作',
|
||||
"ur_1uo": "成功:文件上传成功",
|
||||
"ur_auo": "成功:所有 {0} 个文件上传成功",
|
||||
"ur_1so": "成功:文件在服务器上找到",
|
||||
"ur_aso": "成功:所有 {0} 个文件在服务器上找到",
|
||||
"ur_1un": "上传失败,抱歉",
|
||||
"ur_aun": "所有 {0} 个上传失败,抱歉",
|
||||
"ur_1sn": "文件未在服务器上找到",
|
||||
"ur_asn": "这些 {0} 个文件未在服务器上找到",
|
||||
"ur_um": "完成;\n{0} 个上传成功,\n{1} 个上传失败,抱歉",
|
||||
"ur_sm": "完成;\n{0} 个文件在服务器上找到,\n{1} 个文件未在服务器上找到",
|
||||
|
||||
"lang_set": "刷新以使更改生效?",
|
||||
},
|
||||
};
|
||||
|
||||
var LANGS = ["eng", "nor"];
|
||||
var LANGS = ["eng", "nor", "chi"];
|
||||
|
||||
if (window.langmod)
|
||||
langmod();
|
||||
|
||||
var L = Ls[sread("cpp_lang", LANGS) || lang] || Ls.eng || Ls.nor;
|
||||
for (var a = LANGS.length; a > 0;)
|
||||
if (!Ls[LANGS[--a]])
|
||||
LANGS.splice(a, 1);
|
||||
|
||||
var L = Ls[sread("cpp_lang", LANGS) || lang] ||
|
||||
Ls.eng || Ls.nor || Ls.chi;
|
||||
|
||||
for (var a = 0; a < LANGS.length; a++) {
|
||||
for (var b = a + 1; b < LANGS.length; b++) {
|
||||
@@ -1069,16 +1648,16 @@ modal.load();
|
||||
|
||||
// toolbar
|
||||
ebi('ops').innerHTML = (
|
||||
'<a href="#" data-dest="" tt="' + L.ot_close + '">--</a>' +
|
||||
'<a href="#" data-perm="read" data-dep="idx" data-dest="search" tt="' + L.ot_search + '">🔎</a>' +
|
||||
(have_del ? '<a href="#" data-dest="unpost" tt="' + L.ot_unpost + '">🧯</a>' : '') +
|
||||
'<a href="#" data-dest="up2k">🚀</a>' +
|
||||
'<a href="#" data-perm="write" data-dest="bup" tt="' + L.ot_bup + '">🎈</a>' +
|
||||
'<a href="#" data-perm="write" data-dest="mkdir" tt="' + L.ot_mkdir + '">📂</a>' +
|
||||
'<a href="#" data-perm="read write" data-dest="new_md" tt="' + L.ot_md + '">📝</a>' +
|
||||
'<a href="#" data-dest="msg" tt="' + L.ot_msg + '">📟</a>' +
|
||||
'<a href="#" data-dest="player" tt="' + L.ot_mp + '">🎺</a>' +
|
||||
'<a href="#" data-dest="cfg" tt="' + L.ot_cfg + '">⚙️</a>' +
|
||||
'<a href="#" id="opa_x" data-dest="" tt="' + L.ot_close + '">--</a>' +
|
||||
'<a href="#" id="opa_srch" data-perm="read" data-dep="idx" data-dest="search" tt="' + L.ot_search + '">🔎</a>' +
|
||||
(have_del ? '<a href="#" id="opa_del" data-dest="unpost" tt="' + L.ot_unpost + '">🧯</a>' : '') +
|
||||
'<a href="#" id="opa_up" data-dest="up2k">🚀</a>' +
|
||||
'<a href="#" id="opa_bup" data-perm="write" data-dest="bup" tt="' + L.ot_bup + '">🎈</a>' +
|
||||
'<a href="#" id="opa_mkd" data-perm="write" data-dest="mkdir" tt="' + L.ot_mkdir + '">📂</a>' +
|
||||
'<a href="#" id="opa_md" data-perm="read write" data-dest="new_md" tt="' + L.ot_md + '">📝</a>' +
|
||||
'<a href="#" id="opa_msg" data-dest="msg" tt="' + L.ot_msg + '">📟</a>' +
|
||||
'<a href="#" id="opa_auc" data-dest="player" tt="' + L.ot_mp + '">🎺</a>' +
|
||||
'<a href="#" id="opa_cfg" data-dest="cfg" tt="' + L.ot_cfg + '">⚙️</a>' +
|
||||
(IE ? '<span id="noie">' + L.ot_noie + '</span>' : '') +
|
||||
'<div id="opdesc"></div>'
|
||||
);
|
||||
@@ -1089,6 +1668,7 @@ ebi('widget').innerHTML = (
|
||||
'<div id="wtoggle">' +
|
||||
'<span id="wfs"></span>' +
|
||||
'<span id="wfm"><a' +
|
||||
' href="#" id="fshr" tt="' + L.wt_shr + '">📨<span>share</span></a><a' +
|
||||
' href="#" id="fren" tt="' + L.wt_ren + '">✎<span>name</span></a><a' +
|
||||
' href="#" id="fdel" tt="' + L.wt_del + '">⌫<span>del.</span></a><a' +
|
||||
' href="#" id="fcut" tt="' + L.wt_cut + '">✂<span>cut</span></a><a' +
|
||||
@@ -2543,6 +3123,7 @@ function mpause(e) {
|
||||
if (!dist || !mp.au)
|
||||
return true;
|
||||
|
||||
dist *= -1;
|
||||
mp.setvol(mp.vol + dist / 500);
|
||||
vbar.draw();
|
||||
ev(e);
|
||||
@@ -3690,6 +4271,7 @@ var fileman = (function () {
|
||||
bdel = ebi('fdel'),
|
||||
bcut = ebi('fcut'),
|
||||
bpst = ebi('fpst'),
|
||||
bshr = ebi('fshr'),
|
||||
t_paste,
|
||||
r = {};
|
||||
|
||||
@@ -3704,17 +4286,32 @@ var fileman = (function () {
|
||||
r.clip = jread('fman_clip', []).slice(1);
|
||||
|
||||
var sel = msel.getsel(),
|
||||
nsel = sel.length;
|
||||
nsel = sel.length,
|
||||
enren = nsel,
|
||||
endel = nsel,
|
||||
encut = nsel,
|
||||
enpst = r.clip && r.clip.length,
|
||||
hren = !(have_mv && has(perms, 'write') && has(perms, 'move')),
|
||||
hdel = !(have_del && has(perms, 'delete')),
|
||||
hcut = !(have_mv && has(perms, 'move')),
|
||||
hpst = !(have_mv && has(perms, 'write')),
|
||||
hshr = !(have_shr && acct != '*' && (has(perms, 'read') || has(perms, 'write')));
|
||||
|
||||
clmod(bren, 'en', nsel);
|
||||
clmod(bdel, 'en', nsel);
|
||||
clmod(bcut, 'en', nsel);
|
||||
clmod(bpst, 'en', r.clip && r.clip.length);
|
||||
if (!(enren || endel || encut || enpst))
|
||||
hren = hdel = hcut = hpst = true;
|
||||
|
||||
clmod(bren, 'en', enren);
|
||||
clmod(bdel, 'en', endel);
|
||||
clmod(bcut, 'en', encut);
|
||||
clmod(bpst, 'en', enpst);
|
||||
clmod(bshr, 'en', 1);
|
||||
|
||||
clmod(bren, 'hide', hren);
|
||||
clmod(bdel, 'hide', hdel);
|
||||
clmod(bcut, 'hide', hcut);
|
||||
clmod(bpst, 'hide', hpst);
|
||||
clmod(bshr, 'hide', hshr);
|
||||
|
||||
clmod(bren, 'hide', !(have_mv && has(perms, 'write') && has(perms, 'move')));
|
||||
clmod(bdel, 'hide', !(have_del && has(perms, 'delete')));
|
||||
clmod(bcut, 'hide', !(have_mv && has(perms, 'move')));
|
||||
clmod(bpst, 'hide', !(have_mv && has(perms, 'write')));
|
||||
clmod(ebi('wfm'), 'act', QS('#wfm a.en:not(.hide)'));
|
||||
|
||||
var wfs = ebi('wfs'), h = '';
|
||||
@@ -3725,6 +4322,7 @@ var fileman = (function () {
|
||||
clmod(wfs, 'act', h);
|
||||
|
||||
bpst.setAttribute('tt', L.ft_paste.format(r.clip.length));
|
||||
bshr.setAttribute('tt', nsel ? L.fs_ss : L.fs_sc);
|
||||
};
|
||||
|
||||
r.fsi = function (sel) {
|
||||
@@ -3762,6 +4360,181 @@ var fileman = (function () {
|
||||
return ret;
|
||||
};
|
||||
|
||||
r.share = function (e) {
|
||||
ev(e);
|
||||
|
||||
var vp = uricom_dec(get_evpath()),
|
||||
sel = msel.getsel(),
|
||||
fns = [];
|
||||
|
||||
for (var a = 0; a < sel.length; a++)
|
||||
fns.push(uricom_dec(noq_href(ebi(sel[a].id))));
|
||||
|
||||
if (fns.length == 1 && fns[0].endsWith('/'))
|
||||
vp = fns.pop();
|
||||
|
||||
for (var a = 0; a < fns.length; a++)
|
||||
if (fns[a].endsWith('/'))
|
||||
return toast.err(10, L.fs_just1d);
|
||||
|
||||
var shui = ebi('shui');
|
||||
if (!shui) {
|
||||
shui = mknod('div', 'shui');
|
||||
document.body.appendChild(shui);
|
||||
}
|
||||
shui.style.display = 'block';
|
||||
|
||||
var html = [
|
||||
'<div>',
|
||||
'<table>',
|
||||
'<tr><td colspan="2">',
|
||||
'<button id="sh_abrt">' + L.fs_abrt + '</button>',
|
||||
'<button id="sh_rand">' + L.fs_rand + '</button>',
|
||||
'<button id="sh_apply">' + L.fs_go + '</button>',
|
||||
'</td></tr>',
|
||||
'<tr><td>' + L.fs_name + '</td><td><input type="text" id="sh_k" ' + NOAC + ' placeholder=" ' + L.fs_pname + '" /></td></tr>',
|
||||
'<tr><td>' + L.fs_src + '</td><td><input type="text" id="sh_vp" ' + NOAC + ' readonly tt="' + L.fs_tsrc + '" /></td></tr>',
|
||||
'<tr><td>' + L.fs_pwd + '</td><td><input type="text" id="sh_pw" ' + NOAC + ' placeholder=" ' + L.fs_ppwd + '" /></td></tr>',
|
||||
'<tr><td>' + L.fs_exp + '</td><td class="exs">',
|
||||
'<input type="text" id="sh_exm" ' + NOAC + ' /> ' + L.fs_tmin + ' / ',
|
||||
'<input type="text" id="sh_exh" ' + NOAC + ' /> ' + L.fs_thrs + ' / ',
|
||||
'<input type="text" id="sh_exd" ' + NOAC + ' /> ' + L.fs_tdays + ' / ',
|
||||
'<button id="sh_noex">' + L.fs_never + '</button>',
|
||||
'</td></tr>',
|
||||
'<tr><td>perms</td><td class="sh_axs">',
|
||||
];
|
||||
for (var a = 0; a < perms.length; a++)
|
||||
if (perms[a] != 'admin')
|
||||
html.push('<a href="#" class="tgl btn">' + perms[a] + '</a>');
|
||||
|
||||
html.push('</td></tr></div');
|
||||
shui.innerHTML = html.join('\n');
|
||||
|
||||
var sh_rand = ebi('sh_rand'),
|
||||
sh_abrt = ebi('sh_abrt'),
|
||||
sh_apply = ebi('sh_apply'),
|
||||
sh_noex = ebi('sh_noex'),
|
||||
exm = ebi('sh_exm'),
|
||||
exh = ebi('sh_exh'),
|
||||
exd = ebi('sh_exd'),
|
||||
sh_k = ebi('sh_k'),
|
||||
sh_vp = ebi('sh_vp'),
|
||||
sh_pw = ebi('sh_pw');
|
||||
|
||||
function setexp(a, b) {
|
||||
a = parseFloat(a);
|
||||
if (!isNum(a))
|
||||
return;
|
||||
|
||||
var v = a * b;
|
||||
swrite('fsh_exp', v);
|
||||
|
||||
if (exm.value != v) exm.value = Math.round(v * 10) / 10; v /= 60;
|
||||
if (exh.value != v) exh.value = Math.round(v * 10) / 10; v /= 24;
|
||||
if (exd.value != v) exd.value = Math.round(v * 10) / 10;
|
||||
}
|
||||
function setdef() {
|
||||
setexp(icfg_get('fsh_exp', 60 * 24), 1);
|
||||
}
|
||||
setdef();
|
||||
|
||||
exm.oninput = function () { setexp(this.value, 1); };
|
||||
exh.oninput = function () { setexp(this.value, 60); };
|
||||
exd.oninput = function () { setexp(this.value, 60 * 24); };
|
||||
exm.onfocus = exh.onfocus = exd.onfocus = function () {
|
||||
this.value = '';
|
||||
};
|
||||
sh_noex.onclick = function () {
|
||||
setexp(0, 1);
|
||||
};
|
||||
exm.onblur = exh.onblur = exd.onblur = setdef;
|
||||
|
||||
exm.onkeydown = exh.onkeydown = exd.onkeydown =
|
||||
sh_k.onkeydown = sh_pw.onkeydown = function (e) {
|
||||
var kc = (e.code || e.key) + '';
|
||||
if (kc.endsWith('Enter'))
|
||||
sh_apply.click();
|
||||
};
|
||||
|
||||
sh_abrt.onclick = function () {
|
||||
shui.parentNode.removeChild(shui);
|
||||
};
|
||||
sh_rand.onclick = function () {
|
||||
sh_k.value = randstr(12).replace(/l/g, 'n');
|
||||
};
|
||||
tt.att(shui);
|
||||
|
||||
var pbtns = QSA('#shui .sh_axs a');
|
||||
for (var a = 0; a < pbtns.length; a++)
|
||||
pbtns[a].onclick = shspf;
|
||||
|
||||
function shspf() {
|
||||
clmod(this, 'on', 't');
|
||||
}
|
||||
clmod(pbtns[0], 'on', 1);
|
||||
|
||||
var vpt = vp;
|
||||
if (fns.length) {
|
||||
vpt = fns.length + ' files in ' + vp + ' '
|
||||
for (var a = 0; a < fns.length; a++)
|
||||
vpt += '「' + fns[a].split('/').pop() + '」';
|
||||
}
|
||||
sh_vp.value = vpt;
|
||||
|
||||
sh_k.oninput = function (e) {
|
||||
var v = this.value,
|
||||
v2 = v.replace(/[^0-9a-zA-Z-]/g, '_');
|
||||
|
||||
if (v != v2)
|
||||
this.value = v2;
|
||||
};
|
||||
|
||||
function shr_cb() {
|
||||
toast.hide();
|
||||
var surl = this.responseText;
|
||||
if (this.status !== 201 || !/^created share:/.exec(surl)) {
|
||||
shui.style.display = 'block';
|
||||
var msg = unpre(surl);
|
||||
toast.err(9, msg);
|
||||
return;
|
||||
}
|
||||
surl = surl.slice(15);
|
||||
modal.confirm(L.fs_ok + esc(surl), function() {
|
||||
cliptxt(surl, function () {
|
||||
toast.ok(2, 'copied to clipboard');
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
sh_apply.onclick = function () {
|
||||
if (!sh_k.value)
|
||||
sh_rand.click();
|
||||
|
||||
var plist = [];
|
||||
for (var a = 0; a < pbtns.length; a++)
|
||||
if (clgot(pbtns[a], 'on'))
|
||||
plist.push(pbtns[a].textContent);
|
||||
|
||||
shui.style.display = 'none';
|
||||
toast.inf(30, "creating share...");
|
||||
|
||||
var body = {
|
||||
"k": sh_k.value,
|
||||
"vp": fns.length ? fns : [sh_vp.value],
|
||||
"pw": sh_pw.value,
|
||||
"exp": exm.value,
|
||||
"perms": plist,
|
||||
};
|
||||
var xhr = new XHR();
|
||||
xhr.open('POST', SR + '/?share', true);
|
||||
xhr.setRequestHeader('Content-Type', 'text/plain');
|
||||
xhr.onload = xhr.onerror = shr_cb;
|
||||
xhr.send(JSON.stringify(body));
|
||||
};
|
||||
|
||||
setTimeout(sh_pw.focus.bind(sh_pw), 1);
|
||||
};
|
||||
|
||||
r.rename = function (e) {
|
||||
ev(e);
|
||||
if (clgot(bren, 'hide'))
|
||||
@@ -4338,6 +5111,7 @@ var fileman = (function () {
|
||||
bdel.onclick = r.delete;
|
||||
bcut.onclick = r.cut;
|
||||
bpst.onclick = r.paste;
|
||||
bshr.onclick = r.share;
|
||||
|
||||
return r;
|
||||
})();
|
||||
@@ -5348,6 +6122,9 @@ var ahotkeys = function (e) {
|
||||
if (ebi('rn_cancel'))
|
||||
return ebi('rn_cancel').click();
|
||||
|
||||
if (ebi('sh_abrt'))
|
||||
return ebi('sh_abrt').click();
|
||||
|
||||
if (QS('.opview.act'))
|
||||
return QS('#ops>a').click();
|
||||
|
||||
@@ -7402,10 +8179,11 @@ var settheme = (function () {
|
||||
|
||||
function setlang(e) {
|
||||
ev(e);
|
||||
var t = L.lang_set;
|
||||
L = Ls[this.textContent];
|
||||
swrite("cpp_lang", this.textContent);
|
||||
freshen();
|
||||
modal.confirm(Ls.eng.lang_set + "\n\n" + Ls.nor.lang_set, location.reload.bind(location), null);
|
||||
modal.confirm(L.lang_set + "\n\n" + t, location.reload.bind(location), null);
|
||||
};
|
||||
|
||||
freshen();
|
||||
@@ -8030,7 +8808,7 @@ function sandbox(tgt, rules, cls, html) {
|
||||
}
|
||||
|
||||
html = '<html class="iframe ' + document.documentElement.className +
|
||||
'"><head><style>html{background:#eee;color:#000}\n' + globalcss() +
|
||||
'"><head><style>html{background:#eee;color:#000}</style><style>' + globalcss() +
|
||||
'</style><base target="_parent"></head><body id="b" class="logue ' + cls + '">' + html +
|
||||
'<script>' + env + '</script>' + sandboxjs() +
|
||||
'<script>var d=document.documentElement,TS="' + TS + '",' +
|
||||
|
||||
@@ -11,7 +11,6 @@
|
||||
td{border:1px solid #999;border-width:1px 1px 0 0;padding:0 5px}
|
||||
a{display:block}
|
||||
</style>
|
||||
{{ html_head }}
|
||||
</head>
|
||||
|
||||
<body>
|
||||
@@ -52,11 +51,11 @@
|
||||
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
|
||||
{%- if logues[1] %}
|
||||
<div>{{ logues[1] }}</div><br />
|
||||
{%- endif %}
|
||||
|
||||
|
||||
<h2><a href="{{ r }}/{{ url_suf }}{{ url_suf and '&' or '?' }}h">control-panel</a></h2>
|
||||
|
||||
</body>
|
||||
|
||||
@@ -49,7 +49,7 @@
|
||||
<div id="mp" class="mdo"></div>
|
||||
</div>
|
||||
<a href="#" id="repl">π</a>
|
||||
|
||||
|
||||
{%- if edit %}
|
||||
<div id="helpbox">
|
||||
<textarea autocomplete="off">
|
||||
@@ -125,7 +125,7 @@ write markdown (most html is 🙆 too)
|
||||
</textarea>
|
||||
</div>
|
||||
{%- endif %}
|
||||
|
||||
|
||||
<script>
|
||||
|
||||
var SR = {{ r|tojson }},
|
||||
@@ -159,5 +159,8 @@ try { l.light = drk? 0:1; } catch (ex) { }
|
||||
{%- if edit %}
|
||||
<script src="{{ r }}/.cpr/md2.js?_={{ ts }}"></script>
|
||||
{%- endif %}
|
||||
{%- if js %}
|
||||
<script src="{{ js }}_={{ ts }}"></script>
|
||||
{%- endif %}
|
||||
</body></html>
|
||||
|
||||
|
||||
@@ -53,5 +53,8 @@ try { l.light = drk? 0:1; } catch (ex) { }
|
||||
<script src="{{ r }}/.cpr/deps/marked.js?_={{ ts }}"></script>
|
||||
<script src="{{ r }}/.cpr/deps/easymde.js?_={{ ts }}"></script>
|
||||
<script src="{{ r }}/.cpr/mde.js?_={{ ts }}"></script>
|
||||
{%- if js %}
|
||||
<script src="{{ js }}_={{ ts }}"></script>
|
||||
{%- endif %}
|
||||
</body></html>
|
||||
|
||||
|
||||
@@ -46,6 +46,9 @@
|
||||
}, 1000);
|
||||
</script>
|
||||
{%- endif %}
|
||||
{%- if js %}
|
||||
<script src="{{ js }}_={{ ts }}"></script>
|
||||
{%- endif %}
|
||||
</body>
|
||||
|
||||
</html>
|
||||
|
||||
82
copyparty/web/shares.css
Normal file
82
copyparty/web/shares.css
Normal file
@@ -0,0 +1,82 @@
|
||||
html {
|
||||
color: #333;
|
||||
background: #f7f7f7;
|
||||
font-family: sans-serif;
|
||||
font-family: var(--font-main), sans-serif;
|
||||
touch-action: manipulation;
|
||||
}
|
||||
#wrap {
|
||||
margin: 2em auto;
|
||||
padding: 0 1em 3em 1em;
|
||||
line-height: 2.3em;
|
||||
}
|
||||
#wrap>span {
|
||||
margin: 0 0 0 1em;
|
||||
border-bottom: 1px solid #999;
|
||||
}
|
||||
li {
|
||||
margin: 1em 0;
|
||||
}
|
||||
a {
|
||||
color: #047;
|
||||
background: #fff;
|
||||
text-decoration: none;
|
||||
white-space: nowrap;
|
||||
border-bottom: 1px solid #8ab;
|
||||
border-radius: .2em;
|
||||
padding: .2em .6em;
|
||||
margin: 0 .3em;
|
||||
}
|
||||
td a {
|
||||
margin: 0;
|
||||
}
|
||||
#w {
|
||||
color: #fff;
|
||||
background: #940;
|
||||
border-color: #b70;
|
||||
}
|
||||
#repl {
|
||||
border: none;
|
||||
background: none;
|
||||
color: inherit;
|
||||
padding: 0;
|
||||
position: fixed;
|
||||
bottom: .25em;
|
||||
left: .2em;
|
||||
}
|
||||
table {
|
||||
border-collapse: collapse;
|
||||
position: relative;
|
||||
}
|
||||
th {
|
||||
top: -1px;
|
||||
position: sticky;
|
||||
background: #f7f7f7;
|
||||
}
|
||||
td, th {
|
||||
padding: .3em .6em;
|
||||
text-align: left;
|
||||
white-space: nowrap;
|
||||
}
|
||||
td+td+td+td+td+td+td+td {
|
||||
font-family: var(--font-mono), monospace, monospace;
|
||||
}
|
||||
|
||||
|
||||
|
||||
html.z {
|
||||
background: #222;
|
||||
color: #ccc;
|
||||
}
|
||||
html.z a {
|
||||
color: #fff;
|
||||
background: #057;
|
||||
border-color: #37a;
|
||||
}
|
||||
html.z th {
|
||||
background: #222;
|
||||
}
|
||||
html.bz {
|
||||
color: #bbd;
|
||||
background: #11121d;
|
||||
}
|
||||
76
copyparty/web/shares.html
Normal file
76
copyparty/web/shares.html
Normal file
@@ -0,0 +1,76 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>{{ s_doctitle }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
<meta name="theme-color" content="#{{ tcolor }}">
|
||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/shares.css?_={{ ts }}">
|
||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||
{{ html_head }}
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<div id="wrap">
|
||||
<a id="a" href="{{ r }}/?shares" class="af">refresh</a>
|
||||
<a id="a" href="{{ r }}/?h" class="af">control-panel</a>
|
||||
|
||||
<span>axs = perms (read,write,move,delet)</span>
|
||||
<span>nf = numFiles (0=dir)</span>
|
||||
<span>min/hrs = time left</span>
|
||||
|
||||
<table id="tab"><thead><tr>
|
||||
<th>delete</th>
|
||||
<th>sharekey</th>
|
||||
<th>pw</th>
|
||||
<th>source</th>
|
||||
<th>axs</th>
|
||||
<th>nf</th>
|
||||
<th>user</th>
|
||||
<th>created</th>
|
||||
<th>expires</th>
|
||||
<th>min</th>
|
||||
<th>hrs</th>
|
||||
<th>add time</th>
|
||||
</tr></thead><tbody>
|
||||
{% for k, pw, vp, pr, st, un, t0, t1 in rows %}
|
||||
<tr>
|
||||
<td><a href="#" k="{{ k }}">delete</a></td>
|
||||
<td><a href="{{ r }}{{ shr }}{{ k }}">{{ k }}</a></td>
|
||||
<td>{{ pw }}</td>
|
||||
<td><a href="{{ r }}/{{ vp|e }}">{{ vp|e }}</a></td>
|
||||
<td>{{ pr }}</td>
|
||||
<td>{{ st }}</td>
|
||||
<td>{{ un|e }}</td>
|
||||
<td>{{ t0 }}</td>
|
||||
<td>{{ t1 }}</td>
|
||||
<td>{{ "inf" if not t1 else "dead" if t1 < now else ((t1 - now) / 60) | round(1) }}</td>
|
||||
<td>{{ "inf" if not t1 else "dead" if t1 < now else ((t1 - now) / 3600) | round(1) }}</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody></table>
|
||||
{% if not rows %}
|
||||
(you don't have any active shares btw)
|
||||
{% endif %}
|
||||
<script>
|
||||
|
||||
var SR = {{ r|tojson }},
|
||||
shr="{{ shr }}",
|
||||
lang="{{ lang }}",
|
||||
dfavico="{{ favico }}";
|
||||
|
||||
var STG = window.localStorage;
|
||||
document.documentElement.className = (STG && STG.cpp_thm) || "{{ this.args.theme }}";
|
||||
|
||||
</script>
|
||||
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
||||
<script src="{{ r }}/.cpr/shares.js?_={{ ts }}"></script>
|
||||
{%- if js %}
|
||||
<script src="{{ js }}_={{ ts }}"></script>
|
||||
{%- endif %}
|
||||
</body>
|
||||
</html>
|
||||
|
||||
56
copyparty/web/shares.js
Normal file
56
copyparty/web/shares.js
Normal file
@@ -0,0 +1,56 @@
|
||||
var t = QSA('a[k]');
|
||||
for (var a = 0; a < t.length; a++)
|
||||
t[a].onclick = rm;
|
||||
|
||||
function rm() {
|
||||
var u = SR + shr + uricom_enc(this.getAttribute('k')) + '?eshare=rm',
|
||||
xhr = new XHR();
|
||||
|
||||
xhr.open('POST', u, true);
|
||||
xhr.onload = xhr.onerror = cb;
|
||||
xhr.send();
|
||||
}
|
||||
|
||||
function bump() {
|
||||
var k = this.closest('tr').getElementsByTagName('a')[0].getAttribute('k'),
|
||||
u = SR + shr + uricom_enc(k) + '?eshare=' + this.value,
|
||||
xhr = new XHR();
|
||||
|
||||
xhr.open('POST', u, true);
|
||||
xhr.onload = xhr.onerror = cb;
|
||||
xhr.send();
|
||||
}
|
||||
|
||||
function cb() {
|
||||
if (this.status !== 200)
|
||||
return modal.alert('<h6>server error</h6>' + esc(unpre(this.responseText)));
|
||||
|
||||
document.location = '?shares';
|
||||
}
|
||||
|
||||
(function() {
|
||||
var tab = ebi('tab').tBodies[0],
|
||||
tr = Array.prototype.slice.call(tab.rows, 0);
|
||||
|
||||
var buf = [];
|
||||
for (var a = 0; a < tr.length; a++)
|
||||
for (var b = 7; b < 9; b++)
|
||||
buf.push(parseInt(tr[a].cells[b].innerHTML));
|
||||
|
||||
var ibuf = 0;
|
||||
for (var a = 0; a < tr.length; a++)
|
||||
for (var b = 7; b < 9; b++) {
|
||||
var v = buf[ibuf++];
|
||||
tr[a].cells[b].innerHTML =
|
||||
v ? unix2iso(v).replace(' ', ', ') : 'never';
|
||||
}
|
||||
|
||||
for (var a = 0; a < tr.length; a++)
|
||||
tr[a].cells[11].innerHTML =
|
||||
'<button value="1">1min</button> ' +
|
||||
'<button value="60">1h</button>';
|
||||
|
||||
var btns = QSA('td button'), aa = btns.length;
|
||||
for (var a = 0; a < aa; a++)
|
||||
btns[a].onclick = bump;
|
||||
})();
|
||||
@@ -182,13 +182,18 @@ html.z a.g {
|
||||
border-color: #af4;
|
||||
box-shadow: 0 .3em 1em #7d0;
|
||||
}
|
||||
form {
|
||||
line-height: 2.5em;
|
||||
}
|
||||
#x,
|
||||
input {
|
||||
color: #a50;
|
||||
background: #fff;
|
||||
border: 1px solid #a50;
|
||||
border-radius: .5em;
|
||||
padding: .5em .7em;
|
||||
margin: 0 .5em 0 0;
|
||||
border-radius: .3em;
|
||||
padding: .25em .6em;
|
||||
margin: 0 .3em 0 0;
|
||||
font-size: 1em;
|
||||
}
|
||||
input::placeholder {
|
||||
font-size: 1.2em;
|
||||
@@ -197,6 +202,7 @@ input::placeholder {
|
||||
opacity: 0.64;
|
||||
color: #930;
|
||||
}
|
||||
#x,
|
||||
html.z input {
|
||||
color: #fff;
|
||||
background: #626;
|
||||
|
||||
@@ -14,6 +14,7 @@
|
||||
|
||||
<body>
|
||||
<div id="wrap">
|
||||
{%- if not in_shr %}
|
||||
<a id="a" href="{{ r }}/?h" class="af">refresh</a>
|
||||
<a id="v" href="{{ r }}/?hc" class="af">connect</a>
|
||||
|
||||
@@ -21,7 +22,8 @@
|
||||
<p id="b">howdy stranger <small>(you're not logged in)</small></p>
|
||||
{%- else %}
|
||||
<a id="c" href="{{ r }}/?pw=x" class="logout">logout</a>
|
||||
<p><span id="m">welcome back,</span> <strong>{{ this.uname }}</strong></p>
|
||||
<p><span id="m">welcome back,</span> <strong>{{ this.uname|e }}</strong></p>
|
||||
{%- endif %}
|
||||
{%- endif %}
|
||||
|
||||
{%- if msg %}
|
||||
@@ -76,8 +78,43 @@
|
||||
</ul>
|
||||
{%- endif %}
|
||||
|
||||
<h1 id="cc">client config:</h1>
|
||||
{%- if in_shr %}
|
||||
<h1 id="z">unlock this share:</h1>
|
||||
<div>
|
||||
<form id="lf" method="post" enctype="multipart/form-data" action="{{ r }}/{{ qvpath }}">
|
||||
<input type="hidden" id="la" name="act" value="login" />
|
||||
<input type="password" id="lp" name="cppwd" placeholder=" password" />
|
||||
<input type="hidden" name="uhash" id="uhash" value="x" />
|
||||
<input type="submit" id="ls" value="Unlock" />
|
||||
{% if ahttps %}
|
||||
<a id="w" href="{{ ahttps }}">switch to https</a>
|
||||
{% endif %}
|
||||
</form>
|
||||
</div>
|
||||
{%- else %}
|
||||
<h1 id="l">login for more:</h1>
|
||||
<div>
|
||||
<form id="lf" method="post" enctype="multipart/form-data" action="{{ r }}/{{ qvpath }}">
|
||||
<input type="hidden" id="la" name="act" value="login" />
|
||||
<input type="password" id="lp" name="cppwd" placeholder=" password" />
|
||||
<input type="hidden" name="uhash" id="uhash" value="x" />
|
||||
<input type="submit" id="ls" value="Login" />
|
||||
{% if chpw %}
|
||||
<a id="x" href="#">change password</a>
|
||||
{% endif %}
|
||||
{% if ahttps %}
|
||||
<a id="w" href="{{ ahttps }}">switch to https</a>
|
||||
{% endif %}
|
||||
</form>
|
||||
</div>
|
||||
{%- endif %}
|
||||
|
||||
<h1 id="cc">other stuff:</h1>
|
||||
<ul>
|
||||
{%- if this.uname != '*' and this.args.shr %}
|
||||
<li><a id="y" href="{{ r }}/?shares">edit shares</a></li>
|
||||
{% endif %}
|
||||
|
||||
{% if k304 or k304vis %}
|
||||
{% if k304 %}
|
||||
<li><a id="h" href="{{ r }}/?k304=n">disable k304</a> (currently enabled)
|
||||
@@ -90,18 +127,6 @@
|
||||
<li><a id="k" href="{{ r }}/?reset" class="r" onclick="localStorage.clear();return true">reset client settings</a></li>
|
||||
</ul>
|
||||
|
||||
<h1 id="l">login for more:</h1>
|
||||
<div>
|
||||
<form method="post" enctype="multipart/form-data" action="{{ r }}/{{ qvpath }}">
|
||||
<input type="hidden" name="act" value="login" />
|
||||
<input type="password" name="cppwd" placeholder=" password" />
|
||||
<input type="hidden" name="uhash" id="uhash" value="x" />
|
||||
<input type="submit" value="Login" />
|
||||
{% if ahttps %}
|
||||
<a id="w" href="{{ ahttps }}">switch to https</a>
|
||||
{% endif %}
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
<a href="#" id="repl">π</a>
|
||||
{%- if not this.args.nb %}
|
||||
@@ -119,6 +144,9 @@ document.documentElement.className = (STG && STG.cpp_thm) || "{{ this.args.theme
|
||||
</script>
|
||||
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
||||
<script src="{{ r }}/.cpr/splash.js?_={{ ts }}"></script>
|
||||
{%- if js %}
|
||||
<script src="{{ js }}_={{ ts }}"></script>
|
||||
{%- endif %}
|
||||
</body>
|
||||
</html>
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ var Ls = {
|
||||
"e2": "leser inn konfigurasjonsfiler på nytt$N(kontoer, volumer, volumbrytere)$Nog kartlegger alle e2ds-volumer$N$Nmerk: endringer i globale parametere$Nkrever en full restart for å ta gjenge",
|
||||
"f1": "du kan betrakte:",
|
||||
"g1": "du kan laste opp til:",
|
||||
"cc1": "klient-konfigurasjon",
|
||||
"cc1": "brytere og sånt",
|
||||
"h1": "skru av k304",
|
||||
"i1": "skru på k304",
|
||||
"j1": "k304 bryter tilkoplingen for hver HTTP 304. Dette hjelper mot visse mellomtjenere som kan sette seg fast / plutselig slutter å laste sider, men det reduserer også ytelsen betydelig",
|
||||
@@ -17,9 +17,9 @@ var Ls = {
|
||||
"l1": "logg inn:",
|
||||
"m1": "velkommen tilbake,",
|
||||
"n1": "404: filen finnes ikke ┐( ´ -`)┌",
|
||||
"o1": 'eller kanskje du ikke har tilgang? prøv å logge inn eller <a href="' + SR + '/?h">gå hjem</a>',
|
||||
"o1": 'eller kanskje du ikke har tilgang? prøv et passord eller <a href="' + SR + '/?h">gå hjem</a>',
|
||||
"p1": "403: tilgang nektet ~┻━┻",
|
||||
"q1": 'du må logge inn eller <a href="' + SR + '/?h">gå hjem</a>',
|
||||
"q1": 'prøv et passord eller <a href="' + SR + '/?h">gå hjem</a>',
|
||||
"r1": "gå hjem",
|
||||
".s1": "kartlegg",
|
||||
"t1": "handling",
|
||||
@@ -27,21 +27,65 @@ var Ls = {
|
||||
"v1": "koble til",
|
||||
"v2": "bruk denne serveren som en lokal harddisk$N$NADVARSEL: kommer til å vise passordet ditt!",
|
||||
"w1": "bytt til https",
|
||||
"x1": "bytt passord",
|
||||
"y1": "dine delinger",
|
||||
"z1": "lås opp område",
|
||||
"ta1": "du må skrive et nytt passord først",
|
||||
"ta2": "gjenta for å bekrefte nytt passord:",
|
||||
"ta3": "fant en skrivefeil; vennligst prøv igjen",
|
||||
},
|
||||
"eng": {
|
||||
"d2": "shows the state of all active threads",
|
||||
"e2": "reload config files (accounts/volumes/volflags),$Nand rescan all e2ds volumes$N$Nnote: any changes to global settings$Nrequire a full restart to take effect",
|
||||
"u2": "time since the last server write$N( upload / rename / ... )$N$N17d = 17 days$N1h23 = 1 hour 23 minutes$N4m56 = 4 minutes 56 seconds",
|
||||
"v2": "use this server as a local HDD$N$NWARNING: this will show your password!",
|
||||
"ta1": "fill in your new password first",
|
||||
"ta2": "repeat to confirm new password:",
|
||||
"ta3": "found a typo; please try again",
|
||||
},
|
||||
|
||||
"chi": {
|
||||
"a1": "更新",
|
||||
"b1": "你好 <small>(你尚未登录)</small>",
|
||||
"c1": "登出",
|
||||
"d1": "状态",
|
||||
"d2": "显示所有活动线程的状态",
|
||||
"e1": "重新加载配置",
|
||||
"e2": "重新加载配置文件(账户/卷/卷标),$N并重新扫描所有 e2ds 卷$N$N注意:任何全局设置的更改$N都需要完全重启才能生效",
|
||||
"f1": "你可以查看:",
|
||||
"g1": "你可以上传到:",
|
||||
"cc1": "开关等",
|
||||
"h1": "关闭 k304",
|
||||
"i1": "开启 k304",
|
||||
"j1": "k304 会在每个 HTTP 304 时断开连接。这有助于避免某些代理服务器卡住或突然停止加载页面,但也会显著降低性能。",
|
||||
"k1": "重置设置",
|
||||
"l1": "登录:",
|
||||
"m1": "欢迎回来,",
|
||||
"n1": "404: 文件不存在 ┐( ´ -`)┌",
|
||||
"o1": '或者你可能没有权限?尝试输入密码或 <a href="' + SR + '/?h">回家</a>',
|
||||
"p1": "403: 访问被拒绝 ~┻━┻",
|
||||
"q1": '尝试输入密码或 <a href="' + SR + '/?h">回家</a>',
|
||||
"r1": "回家",
|
||||
".s1": "映射",
|
||||
"t1": "操作",
|
||||
"u2": "自上次服务器写入的时间$N( 上传 / 重命名 / ... )$N$N17d = 17 天$N1h23 = 1 小时 23 分钟$N4m56 = 4 分钟 56 秒",
|
||||
"v1": "连接",
|
||||
"v2": "将此服务器用作本地硬盘$N$N警告:这将显示你的密码!",
|
||||
"w1": "切换到 https",
|
||||
"x1": "更改密码",
|
||||
"y1": "你的分享",
|
||||
"z1": "解锁区域",
|
||||
"ta1": "请先输入新密码",
|
||||
"ta2": "重复以确认新密码:",
|
||||
"ta3": "发现拼写错误;请重试",
|
||||
}
|
||||
};
|
||||
|
||||
var LANGS = ["eng", "nor"];
|
||||
|
||||
if (window.langmod)
|
||||
langmod();
|
||||
|
||||
var d = Ls[sread("cpp_lang", LANGS) || lang] || Ls.eng || Ls.nor;
|
||||
var d = Ls[sread("cpp_lang", Object.keys(Ls)) || lang] ||
|
||||
Ls.eng || Ls.nor || Ls.chi;
|
||||
|
||||
for (var k in (d || {})) {
|
||||
var f = k.slice(-1),
|
||||
@@ -74,3 +118,42 @@ if (o && /[0-9]+$/.exec(o.innerHTML))
|
||||
o.innerHTML = shumantime(o.innerHTML);
|
||||
|
||||
ebi('uhash').value = '' + location.hash;
|
||||
|
||||
(function() {
|
||||
if (!ebi('x'))
|
||||
return;
|
||||
|
||||
var pwi = ebi('lp');
|
||||
|
||||
function redo(msg) {
|
||||
modal.alert(msg, function() {
|
||||
pwi.value = '';
|
||||
pwi.focus();
|
||||
});
|
||||
}
|
||||
function mok(v) {
|
||||
if (v !== pwi.value)
|
||||
return redo(d.ta3);
|
||||
|
||||
pwi.setAttribute('name', 'pw');
|
||||
ebi('la').value = 'chpw';
|
||||
ebi('lf').submit();
|
||||
}
|
||||
function stars() {
|
||||
var m = ebi('modali');
|
||||
function enstars(n) {
|
||||
setTimeout(function() { m.value = ''; }, n);
|
||||
}
|
||||
m.setAttribute('type', 'password');
|
||||
enstars(17);
|
||||
enstars(32);
|
||||
enstars(69);
|
||||
}
|
||||
ebi('x').onclick = function (e) {
|
||||
ev(e);
|
||||
if (!pwi.value)
|
||||
return redo(d.ta1);
|
||||
|
||||
modal.prompt(d.ta2, "y", mok, null, stars);
|
||||
};
|
||||
})();
|
||||
|
||||
@@ -245,6 +245,9 @@ document.documentElement.className = (STG && STG.cpp_thm) || "{{ args.theme }}";
|
||||
</script>
|
||||
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
||||
<script src="{{ r }}/.cpr/svcs.js?_={{ ts }}"></script>
|
||||
{%- if js %}
|
||||
<script src="{{ js }}_={{ ts }}"></script>
|
||||
{%- endif %}
|
||||
</body>
|
||||
</html>
|
||||
|
||||
|
||||
@@ -385,6 +385,7 @@ html.y textarea:focus {
|
||||
}
|
||||
.mdo pre,
|
||||
.mdo code,
|
||||
.mdo code[class*="language-"],
|
||||
.mdo tt {
|
||||
font-family: 'scp', monospace, monospace;
|
||||
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||
|
||||
@@ -2383,8 +2383,23 @@ function up2k_init(subtle) {
|
||||
var arr = st.todo.upload,
|
||||
sort = arr.length && arr[arr.length - 1].nfile > t.n;
|
||||
|
||||
if (!t.stitch_sz) {
|
||||
// keep all connections busy
|
||||
var bpc = (st.bytes.total - st.bytes.finished) / (parallel_uploads || 1),
|
||||
ocs = 1024 * 1024,
|
||||
stp = 1024 * 512,
|
||||
ccs = ocs;
|
||||
while (ccs < bpc) {
|
||||
ocs = ccs;
|
||||
ccs += stp; if (ccs < bpc) ocs = ccs;
|
||||
ccs += stp; stp *= 2;
|
||||
}
|
||||
ocs = Math.floor(ocs / 1024 / 1024);
|
||||
t.stitch_sz = Math.min(ocs, stitch_tgt);
|
||||
}
|
||||
|
||||
for (var a = 0; a < t.postlist.length; a++) {
|
||||
var nparts = [], tbytes = 0, stitch = stitch_tgt;
|
||||
var nparts = [], tbytes = 0, stitch = t.stitch_sz;
|
||||
if (t.nojoin && t.nojoin - t.postlist.length < 6)
|
||||
stitch = 1;
|
||||
|
||||
@@ -2400,6 +2415,7 @@ function up2k_init(subtle) {
|
||||
'nparts': nparts
|
||||
});
|
||||
}
|
||||
t.nojoin = 0;
|
||||
|
||||
msg = null;
|
||||
done = false;
|
||||
@@ -2606,7 +2622,7 @@ function up2k_init(subtle) {
|
||||
}
|
||||
else if (txt.indexOf('already got that') + 1 ||
|
||||
txt.indexOf('already being written') + 1) {
|
||||
t.nojoin = t.postlist.length;
|
||||
t.nojoin = t.nojoin || t.postlist.length;
|
||||
console.log("ignoring dupe-segment with backoff", t.nojoin, t.name, t);
|
||||
if (!toast.visible && st.todo.upload.length < 4)
|
||||
toast.msg(10, L.u_cbusy);
|
||||
@@ -2653,19 +2669,29 @@ function up2k_init(subtle) {
|
||||
return;
|
||||
|
||||
st.bytes.inflight -= (xhr.bsent || 0);
|
||||
xhr.bsent = 0;
|
||||
|
||||
if (!toast.visible)
|
||||
toast.warn(9.98, L.u_cuerr.format(snpart, Math.ceil(t.size / chunksize), t.name), t);
|
||||
|
||||
t.nojoin = t.nojoin || t.postlist.length; // maybe rproxy postsize limit
|
||||
console.log('chunkpit onerror,', ++tries, t.name, t);
|
||||
orz2(xhr);
|
||||
};
|
||||
var chashes = [];
|
||||
for (var a = pcar; a <= pcdr; a++)
|
||||
chashes.push(t.hash[a]);
|
||||
|
||||
var chashes = [],
|
||||
ctxt = t.hash[pcar],
|
||||
plen = Math.floor(192 / nparts.length);
|
||||
|
||||
plen = plen > 9 ? 9 : plen < 2 ? 2 : plen;
|
||||
for (var a = pcar + 1; a <= pcdr; a++)
|
||||
chashes.push(t.hash[a].slice(0, plen));
|
||||
|
||||
if (chashes.length)
|
||||
ctxt += ',' + plen + ',' + chashes.join('');
|
||||
|
||||
xhr.open('POST', t.purl, true);
|
||||
xhr.setRequestHeader("X-Up2k-Hash", chashes.join(","));
|
||||
xhr.setRequestHeader("X-Up2k-Hash", ctxt);
|
||||
xhr.setRequestHeader("X-Up2k-Wark", t.wark);
|
||||
xhr.setRequestHeader("X-Up2k-Stat", "{0}/{1}/{2}/{3} {4}/{5} {6}".format(
|
||||
pvis.ctr.ok, pvis.ctr.ng, pvis.ctr.bz, pvis.ctr.q, btot, btot - bfin,
|
||||
|
||||
@@ -473,6 +473,24 @@ function crc32(str) {
|
||||
}
|
||||
|
||||
|
||||
function randstr(len) {
|
||||
var ret = '';
|
||||
try {
|
||||
var ar = new Uint32Array(Math.floor((len + 3) / 4));
|
||||
crypto.getRandomValues(ar);
|
||||
for (var a = 0; a < ar.length; a++)
|
||||
ret += ('000' + ar[a].toString(36)).slice(-4);
|
||||
return ret.slice(0, len);
|
||||
}
|
||||
catch (ex) {
|
||||
console.log('using unsafe randstr because ' + ex);
|
||||
while (ret.length < len)
|
||||
ret += ('000' + Math.floor(Math.random() * 1679616).toString(36)).slice(-4);
|
||||
return ret.slice(0, len);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function clmod(el, cls, add) {
|
||||
if (!el)
|
||||
return false;
|
||||
|
||||
@@ -1,3 +1,150 @@
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2024-0823-2307 `v1.14.2` bing chilling
|
||||
|
||||
## new features
|
||||
|
||||
* #94 @ultwcz translated the UI to Chinese (thx!) 92edea1d
|
||||
* #84 improvements to [shares](https://github.com/9001/copyparty#shares): 8122dded
|
||||
* if one or more files are selected for sharing, they are placed into a virtual folder
|
||||
* more appropriate password UI for accessing protected shares
|
||||
* human-readable timestamps in shares listing
|
||||
* u2c (commandline uploader): support multiple exclusion patterns f356faa2
|
||||
|
||||
## bugfixes
|
||||
|
||||
* remove confusing logmessage when downloading a zerobyte file 9f034d9c
|
||||
* shares: 7ff46966
|
||||
* fix crash if the root volume is unmapped
|
||||
* log-spam on config reload
|
||||
* password coalescing
|
||||
* add chrome support
|
||||
|
||||
## other changes
|
||||
|
||||
* #93 add html IDs to the tabstrip 461f3158
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2024-0819-0014 `v1.14.1` one step forward
|
||||
|
||||
[if i turn back now, then this will always follow... one step forward, forward](https://youtu.be/xe3Wkzc0O3k?t=27)
|
||||
|
||||
* read-only demo server at https://a.ocv.me/pub/demo/
|
||||
* [docker image](https://github.com/9001/copyparty/tree/hovudstraum/scripts/docker) ╱ [similar software](https://github.com/9001/copyparty/blob/hovudstraum/docs/versus.md) ╱ [client testbed](https://cd.ocv.me/b/)
|
||||
|
||||
there is a [discord server](https://discord.gg/25J8CdTT6G) with an `@everyone` in case of future important updates, such as [vulnerabilities](https://github.com/9001/copyparty/security) (most recently 2023-07-23)
|
||||
|
||||
## new features
|
||||
|
||||
* #92 users can change their own passwords 83fb569d 00da7440
|
||||
* this feature is default-disabled; see [readme](https://github.com/9001/copyparty#user-changeable-passwords)
|
||||
* #84 share files/folders by creating a temporary url 7c2beba5
|
||||
* inspired by other file servers; click the share-button to create a link like `example.com/share/enkz8g374o8g`
|
||||
* primary usecase is to sneak past authentication services (see issue description)
|
||||
* the create-share UI has options to accept uploads into the share, and/or set expiration time
|
||||
* this feature is default-disabled; see [readme](https://github.com/9001/copyparty#shares)
|
||||
|
||||
## bugfixes
|
||||
|
||||
* #93 fixes for vproxy / location-based / not-vhost-based reverse-proxying 0b46b1a6
|
||||
* using `--rp-loc` to reverse-proxy from a subfolder made some UI stuff break
|
||||
* listening on unix-sockets: 687df2fa
|
||||
* fix `x-forwarded-for` support, and avoid a possible container-specific collision
|
||||
* new syntax which allows setting unix-permissions and unix-group
|
||||
* `-i unix:770:www:/tmp/party.sock` (see `--help-bind` for more examples)
|
||||
* using relocation hooks (introduced in previous ver) could cause dedup issues c8f4aeae b0af4b37
|
||||
* custom fonts using `@import` css statements 5a62cb48
|
||||
* invert volume scrollwheel 7d8d9438
|
||||
|
||||
## other changes
|
||||
|
||||
* changed the button colors in theme 2 (pm-monokai) from red to yellow 5153db6b
|
||||
* the red buttons look better, but are too confusing because usually red means off
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2024-0813-0008 `v1.13.8` hook into place
|
||||
|
||||
## new features
|
||||
|
||||
* #86 intentional side-effects from hooks 6c94a63f
|
||||
* use hooks (plugins) to conditionally move uploads into another folder depending on filename, extension, uploader ip/name, file contents, ...
|
||||
* hooks can create additional files and tell copyparty to index them immediately, or delete an existing file based on some condition
|
||||
* only one example so far though, [reloc-by-ext](https://github.com/9001/copyparty/tree/hovudstraum/bin/hooks#before-upload) which was a feature-request to dodge [sharex#3992](https://github.com/ShareX/ShareX/issues/3992)
|
||||
* listen on unix-sockets ee9aad82
|
||||
* `-i unix:/tmp/party.sock` stops listening on TCP ports entirely, and only listens on that unix-socket
|
||||
* can be combined with regular sockets, `-i 127.0.0.1,unix:/tmp/a.sock`
|
||||
* kinda buggy for now (need to `--xff-src=any` and doesn't let you set socket-perms yet), will be fixed in next ver
|
||||
* makes it 10% faster, but more importantly offers tighter access control behind reverse-proxies
|
||||
* inspired by https://www.oligo.security/blog/0-0-0-0-day-exploiting-localhost-apis-from-the-browser
|
||||
* up2k stitching:
|
||||
* more optimal stitch sizes for max throughput across connections c862ec1b
|
||||
* improve fat32 compatibility 373194c3
|
||||
* new option `--js-other` to load custom javascript dbd42bc6
|
||||
* `--js-browser` affects the filebrowser page, `--js-other` does all the others
|
||||
* endless possibilities, such as [adding a login-banner](https://github.com/9001/copyparty/blob/hovudstraum/contrib/plugins/banner.js) which [looks like this](https://github.com/user-attachments/assets/8ae8e087-b209-449c-b08d-74e040f0284b)
|
||||
* list detected optional dependencies on startup 3db117d8
|
||||
* hopefully reduces the guesswork / jank factor by a tiny bit
|
||||
|
||||
## bugfixes
|
||||
|
||||
* up2k stitching:
|
||||
* put the request headers on a diet so they fit through more reverse-proxies 0da719f4
|
||||
* fix deadlock on s390x (IBM mainframes) 250c8c56
|
||||
|
||||
## other changes
|
||||
|
||||
* add flags to disengage [features](https://github.com/9001/copyparty/tree/hovudstraum#feature-chickenbits) and [dependencies](https://github.com/9001/copyparty/tree/hovudstraum#dependency-chickenbits) in case they cause trouble 72361c99
|
||||
* optimizations
|
||||
* 6% faster on average d5c9c8eb
|
||||
* docker: reduce ram usage 98ffaadf
|
||||
* python2: reduce ram usage ebb19818
|
||||
* docker: add [portainer howto](https://github.com/9001/copyparty/blob/hovudstraum/docs/examples/docker/portainer.md) e136231c
|
||||
* update deps ca001c85
|
||||
* pyftpdlib 1.5.10
|
||||
* copyparty.exe: python 3.12.5
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2024-0729-2028 `v1.13.6` not that big
|
||||
|
||||
## new features
|
||||
|
||||
* up2k.js: set clientside timeouts on http connections during upload 85e54980
|
||||
* some reverse-proxy setups could cause uploads to hang indefinitely by eating requests; should recover nicely now
|
||||
* audio-player shows statustext while loading 662541c6
|
||||
* [bsod theme](https://github.com/9001/copyparty/tree/hovudstraum/contrib/themes) [(live demo)](https://cd.ocv.me/c/) 15ddcf53
|
||||
|
||||
## bugfixes
|
||||
|
||||
* fix bugs in the [long-distance upload optimizations](https://github.com/9001/copyparty/releases/tag/v1.13.5) in the previous version:
|
||||
* up2k.js didn't necessarily use the expected chunksize when stitching 225bd80e
|
||||
* u2c (commandline uploader): 8916bce3
|
||||
* use the correct chunksize instead of overshooting like crazy
|
||||
* could crash on exit if `-z` was enabled (so basically harmless)
|
||||
* the "time spent uploading" statustext that was printed on exit could multiply by `-j` and exceed walltime
|
||||
* misc ux 9bb6e0dc
|
||||
* don't accept hotkeys until it's safe to do so
|
||||
* improve messages regarding the [firefox crash](https://bugzilla.mozilla.org/show_bug.cgi?id=1790500)
|
||||
* keep more console logs in memory (easier to debug)
|
||||
* fix wordwrap in messageboxes on firefox a19a0fa9
|
||||
|
||||
## other changes
|
||||
|
||||
* changed the `xm` / "on message" [hook examples](https://github.com/9001/copyparty/tree/hovudstraum/bin/hooks#on-message) to reject users without write-access 99edba4f
|
||||
* docker images were rebuilt on 2024-08-02, 23:30 UTC with new optimizations: 98ffaadf
|
||||
* 😃 RAM usage decreased by `5-6 MiB` for most flavors; `10 MiB` for dj/iv
|
||||
* 😕 image size grew by `4 MiB` (min), `6 MiB` (ac/im/iv), `9 MiB` (dj)
|
||||
* 😃 startup time reduced to about half
|
||||
* and avoids a deadlock on IBM mainframes
|
||||
* updated comparison to other software 6b54972e
|
||||
* `hfs2` is dead, `hfs3` and `filebrowser` improved
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2024-0722-2323 `v1.13.5` american sized
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
## devnotes toc
|
||||
|
||||
* top
|
||||
* [future plans](#future-plans) - some improvement ideas
|
||||
* [future ideas](#future-ideas) - list of dreams which will probably never happen
|
||||
* [design](#design)
|
||||
* [up2k](#up2k) - quick outline of the up2k protocol
|
||||
* [why not tus](#why-not-tus) - I didn't know about [tus](https://tus.io/)
|
||||
@@ -12,6 +12,8 @@
|
||||
* [write](#write)
|
||||
* [admin](#admin)
|
||||
* [general](#general)
|
||||
* [event hooks](#event-hooks) - on writing your own [hooks](../README.md#event-hooks)
|
||||
* [hook effects](#hook-effects) - hooks can cause intentional side-effects
|
||||
* [assumptions](#assumptions)
|
||||
* [mdns](#mdns)
|
||||
* [sfx repack](#sfx-repack) - reduce the size of an sfx by removing features
|
||||
@@ -25,9 +27,9 @@
|
||||
* [discarded ideas](#discarded-ideas)
|
||||
|
||||
|
||||
# future plans
|
||||
# future ideas
|
||||
|
||||
some improvement ideas
|
||||
list of dreams which will probably never happen
|
||||
|
||||
* the JS is a mess -- a ~~preact~~ rewrite would be nice
|
||||
* preferably without build dependencies like webpack/babel/node.js, maybe a python thing to assemble js files into main.js
|
||||
@@ -137,6 +139,7 @@ authenticate using header `Cookie: cppwd=foo` or url param `&pw=foo`
|
||||
| GET | `?tar&w` | pregenerate webp thumbnails |
|
||||
| GET | `?tar&j` | pregenerate jpg thumbnails |
|
||||
| GET | `?tar&p` | pregenerate audio waveforms |
|
||||
| GET | `?shares` | list your shared files/folders |
|
||||
| GET | `?ups` | show recent uploads from your IP |
|
||||
| GET | `?ups&filter=f` | ...where URL contains `f` |
|
||||
| GET | `?mime=foo` | specify return mimetype `foo` |
|
||||
@@ -173,6 +176,9 @@ authenticate using header `Cookie: cppwd=foo` or url param `&pw=foo`
|
||||
| mPOST | `?media` | `f=FILE` | ...and return medialink (not hotlink) |
|
||||
| mPOST | | `act=mkdir`, `name=foo` | create directory `foo` at URL |
|
||||
| POST | `?delete` | | delete URL recursively |
|
||||
| POST | `?eshare=rm` | | stop sharing a file/folder |
|
||||
| POST | `?eshare=3` | | set expiration to 3 minutes |
|
||||
| jPOST | `?share` | (complicated) | create temp URL for file/folder |
|
||||
| jPOST | `?delete` | `["/foo","/bar"]` | delete `/foo` and `/bar` recursively |
|
||||
| uPOST | | `msg=foo` | send message `foo` into server log |
|
||||
| mPOST | | `act=tput`, `body=TEXT` | overwrite markdown document at URL |
|
||||
@@ -204,6 +210,32 @@ upload modifiers:
|
||||
| GET | `?pw=x` | logout |
|
||||
|
||||
|
||||
# event hooks
|
||||
|
||||
on writing your own [hooks](../README.md#event-hooks)
|
||||
|
||||
## hook effects
|
||||
|
||||
hooks can cause intentional side-effects, such as redirecting an upload into another location, or creating+indexing additional files, or deleting existing files, by returning json on stdout
|
||||
|
||||
* `reloc` can redirect uploads before/after uploading has finished, based on filename, extension, file contents, uploader ip/name etc.
|
||||
* `idx` informs copyparty about a new file to index as a consequence of this upload
|
||||
* `del` tells copyparty to delete an unrelated file by vpath
|
||||
|
||||
for these to take effect, the hook must be defined with the `c1` flag; see example [reloc-by-ext](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reloc-by-ext.py)
|
||||
|
||||
a subset of effect types are available for a subset of hook types,
|
||||
|
||||
* most hook types (xbu/xau/xbr/xar/xbd/xad/xm) support `idx` and `del` for all http protocols (up2k / basic-uploader / webdav), but not ftp/tftp/smb
|
||||
* most hook types will abort/reject the action if the hook returns nonzero, assuming flag `c` is given, see examples [reject-extension](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reject-extension.py) and [reject-mimetype](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reject-mimetype.py)
|
||||
* `xbu` supports `reloc` for all http protocols (up2k / basic-uploader / webdav), but not ftp/tftp/smb
|
||||
* `xau` supports `reloc` for basic-uploader / webdav only, not up2k or ftp/tftp/smb
|
||||
* so clients like sharex are supported, but not dragdrop into browser
|
||||
|
||||
to trigger indexing of files `/foo/1.txt` and `/foo/bar/2.txt`, a hook can `print(json.dumps({"idx":{"vp":["/foo/1.txt","/foo/bar/2.txt"]}}))` (and replace "idx" with "del" to delete instead)
|
||||
* note: paths starting with `/` are absolute URLs, but you can also do `../3.txt` relative to the destination folder of each uploaded file
|
||||
|
||||
|
||||
# assumptions
|
||||
|
||||
## mdns
|
||||
|
||||
45
docs/examples/docker/portainer.md
Normal file
45
docs/examples/docker/portainer.md
Normal file
@@ -0,0 +1,45 @@
|
||||
the following setup appears to work (copyparty starts, accepts uploads, is able to persist config)
|
||||
|
||||
tested on debian 12 using [portainer-ce](https://docs.portainer.io/start/install-ce/server/docker/linux) with [docker-ce](https://docs.docker.com/engine/install/debian/) as root (not rootless)
|
||||
|
||||
before making the container, first `mkdir /etc/copyparty /srv/pub` which will be bind-mounts into the container
|
||||
|
||||
> both `/etc/copyparty` and `/srv/pub` are examples; you can change them if you'd like
|
||||
|
||||
put your copyparty config files directly into `/etc/copyparty` and the files to share inside `/srv/pub`
|
||||
|
||||
on first startup, copyparty will create a subfolder inside `/etc/copyparty` called `copyparty` where it puts some runtime state; for example replacing `/etc/copyparty/copyparty/cert.pem` with another TLS certificate is a quick and dirty way to get valid HTTPS (if you really want copyparty to handle that and not a reverse-proxy)
|
||||
|
||||
|
||||
## in portainer:
|
||||
|
||||
```
|
||||
environments -> local -> containers -> add container:
|
||||
|
||||
name = copyparty-ac
|
||||
registry = docker hub
|
||||
image = copyparty/ac
|
||||
always pull = no
|
||||
|
||||
manual network port publishing:
|
||||
3923 to 3923 [TCP]
|
||||
|
||||
advanced -> command & logging:
|
||||
console = interactive & tty
|
||||
|
||||
advanced -> volumes -> map additional volume:
|
||||
container = /cfg [Bind]
|
||||
host = /etc/copyparty [Writable]
|
||||
|
||||
advanced -> volumes -> map additional volume:
|
||||
container = /w [Bind]
|
||||
host = /srv/pub [Writable]
|
||||
```
|
||||
|
||||
notes:
|
||||
|
||||
* `/cfg` is where copyparty expects to find its config files; `/etc/copyparty` is just an example mapping to that
|
||||
|
||||
* `/w` is where copyparty expects to find the folder to share; `/srv/pub` is just an example mapping to that
|
||||
|
||||
* the volumes must be bind-mounts to avoid permission issues (or so the theory goes)
|
||||
220
docs/logo.svg
Normal file
220
docs/logo.svg
Normal file
@@ -0,0 +1,220 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
width="300mm"
|
||||
height="207mm"
|
||||
viewBox="0 0 300 207"
|
||||
version="1.1"
|
||||
id="svg1"
|
||||
inkscape:version="1.3.2 (091e20e, 2023-11-25, custom)"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns:xlink="http://www.w3.org/1999/xlink"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg"
|
||||
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
xmlns:cc="http://creativecommons.org/ns#"
|
||||
xmlns:dc="http://purl.org/dc/elements/1.1/">
|
||||
<title
|
||||
id="title1">copyparty_logo</title>
|
||||
<defs
|
||||
id="defs1">
|
||||
<linearGradient
|
||||
inkscape:collect="always"
|
||||
id="linearGradient1">
|
||||
<stop
|
||||
style="stop-color:#ffcc55;stop-opacity:1"
|
||||
offset="0"
|
||||
id="stop1" />
|
||||
<stop
|
||||
style="stop-color:#ffcc00;stop-opacity:1"
|
||||
offset="0.2"
|
||||
id="stop2" />
|
||||
<stop
|
||||
style="stop-color:#ff8800;stop-opacity:1"
|
||||
offset="1"
|
||||
id="stop3" />
|
||||
</linearGradient>
|
||||
<linearGradient
|
||||
inkscape:collect="always"
|
||||
xlink:href="#linearGradient1"
|
||||
id="linearGradient2"
|
||||
x1="15"
|
||||
y1="15"
|
||||
x2="15"
|
||||
y2="143"
|
||||
gradientUnits="userSpaceOnUse" />
|
||||
</defs>
|
||||
<metadata
|
||||
id="metadata5">
|
||||
<rdf:RDF>
|
||||
<cc:Work
|
||||
rdf:about="the logo of https://github.com/9001/copyparty">
|
||||
<dc:format>image/svg+xml</dc:format>
|
||||
<dc:type
|
||||
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
|
||||
<dc:title>copyparty_logo</dc:title>
|
||||
</cc:Work>
|
||||
</rdf:RDF>
|
||||
</metadata>
|
||||
<g
|
||||
inkscape:groupmode="layer"
|
||||
id="layer1"
|
||||
inkscape:label="kassett">
|
||||
<rect
|
||||
style="fill:#333333"
|
||||
id="rect1"
|
||||
width="300"
|
||||
height="205"
|
||||
x="0"
|
||||
y="0"
|
||||
rx="12"
|
||||
ry="12" />
|
||||
<rect
|
||||
style="fill:url(#linearGradient2)"
|
||||
id="rect2"
|
||||
width="270"
|
||||
height="128"
|
||||
x="15"
|
||||
y="15"
|
||||
rx="8"
|
||||
ry="8" />
|
||||
<rect
|
||||
style="fill:#333333"
|
||||
id="rect3"
|
||||
width="172"
|
||||
height="52"
|
||||
x="64"
|
||||
y="72"
|
||||
rx="26"
|
||||
ry="26" />
|
||||
<circle
|
||||
style="fill:#cccccc"
|
||||
id="circle1"
|
||||
cx="91"
|
||||
cy="98"
|
||||
r="18" />
|
||||
<circle
|
||||
style="fill:#cccccc"
|
||||
id="circle2"
|
||||
cx="209"
|
||||
cy="98"
|
||||
r="18" />
|
||||
<path
|
||||
style="fill:#737373;stroke-width:1px"
|
||||
d="m 49,207 6.64,-39.33 c 1.6,-6.3 6.1,-7.7 11.55,-8 58.8,-1.1 106.3,-0.76 165,0 7.4,0.11 10.2,3.25 11.5,8.7 L 251,207 Z"
|
||||
id="path1"
|
||||
sodipodi:nodetypes="ccccccc" />
|
||||
</g>
|
||||
<g
|
||||
inkscape:groupmode="layer"
|
||||
id="layer2"
|
||||
inkscape:label="skeu"
|
||||
style="display:none">
|
||||
<path
|
||||
style="fill:#555555;stroke-width:1px"
|
||||
d="m 48.7,207 6.66,-36.87 c 1.6,-5.9 6.1,-7.2 11.6,-7.5 58.9,-1.06 106.6,-0.7 165.5,0 7.4,0.11 10.2,3.05 11.5,8.12 L 251.3,207 Z"
|
||||
id="path2"
|
||||
sodipodi:nodetypes="ccccccc" />
|
||||
</g>
|
||||
<g
|
||||
inkscape:groupmode="layer"
|
||||
id="layer3"
|
||||
inkscape:label="tekst"
|
||||
style="display:none">
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:38.8056px;line-height:1.25;font-family:Akbar;-inkscape-font-specification:Akbar;letter-spacing:3.70417px;word-spacing:0px;fill:#333333"
|
||||
x="47.153069"
|
||||
y="55.548954"
|
||||
id="text1"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan1"
|
||||
x="47.153069"
|
||||
y="55.548954"
|
||||
style="-inkscape-font-specification:Akbar"
|
||||
rotate="0 0">copyparty</tspan></text>
|
||||
</g>
|
||||
<g
|
||||
inkscape:groupmode="layer"
|
||||
id="layer4"
|
||||
inkscape:label="stensatt">
|
||||
<path
|
||||
d="m 64.54,50.93 q -0.85,0.93 -4.73,2.3 -3.6,1.3 -4.4,1.3 -3.3,0 -5.1,-2.1 -1.75,-2 -1.75,-5.36 0,-4.6 3.76,-7.64 3.3,-2.7 7.3,-2.7 0.4,0 0.93,0.74 0.54,0.7 0.54,1.16 0,2.06 -2.2,2.7 -1.36,0.4 -4.04,1.16 -2.2,1.16 -2.2,4.4 0,3.2 2.9,3.2 0.85,0 0.85,0 0.54,0 1.44,-0.16 1.1,-0.23 2.9,-0.74 1.8,-0.54 2.13,-0.54 0.4,0 1.75,0.6 z"
|
||||
style="fill:#333333"
|
||||
id="path11" />
|
||||
<path
|
||||
d="m 89.2,45.03 q 0,4.2 -3.7,6.95 -3.2,2.3 -6.87,2.3 -3.4,0 -6,-2.6 -2.5,-2.6 -2.5,-6 0,-3.6 3.14,-6.64 3.2,-3 6.8,-3 3.5,0 6.3,2.76 2.83,2.76 2.83,6.25 z m -3.4,0.16 q 0,-2.25 -1.75,-3.7 -1.7,-1.5 -4,-1.5 -0.1,0 -1.6,1.6 -1.44,1.55 -2.44,1.55 -0.6,0 -0.8,-0.3 -1.16,2.3 -1.16,3 0,2.25 2.13,3.4 1.6,0.9 3.6,0.9 2,0 3.76,-1.1 2.25,-1.4 2.25,-3.84 z"
|
||||
style="fill:#333333"
|
||||
id="path12" />
|
||||
<path
|
||||
d="m 114.3,47.94 q 0,2.8 -1.9,4.4 -1.8,1.5 -4.66,1.5 -0.74,0 -2.7,-0.4 -1.9,-0.4 -2.64,-0.4 -2,0 -2,2.64 0,0.85 0.2,2.6 0.2,1.75 0.2,2.6 0,1.9 -0.74,2.83 -1.44,0 -3,-0.85 Q 95.6,53.34 95.6,50.9 q 0,-3.65 1.75,-8.1 2.37,-6.05 6.4,-6.05 3.7,0 7.26,4.1 3.3,3.84 3.3,7.14 z m -3.76,0.2 q -0.66,-2.2 -2.64,-4.4 -2.25,-2.5 -4.3,-2.5 -1.3,0 -2.3,2.2 -0.85,1.8 -0.85,3.26 0,0.47 0.4,1.24 0.4,0.8 0.8,0.8 1.05,0 3.14,0.3 2.13,0.3 3.2,0.3 0.35,0 1.3,-0.4 1,-0.47 1.3,-0.74 z"
|
||||
style="fill:#333333"
|
||||
id="path13" />
|
||||
<path
|
||||
d="m 133.6,40.2 q -2.06,4.1 -3.2,7 -0.1,0.3 -1.55,4.5 -0.47,1.36 -1,4.2 -0.5,2.83 -1,4.2 -1,2.83 -2.37,2.64 -1.4,-0.2 -1.55,-1.6 -0.04,-0.2 -0.04,-0.5 0,-0.16 0.3,-1.5 1.05,-5.04 1.05,-6.44 0,-0.54 -0.1,-0.74 -1.4,-2.44 -4.1,-7.4 -2.7,-4.97 -2.4,-7.7 1.44,-1.36 2.1,-1.36 0.4,0 1.05,0.6 0.6,0.6 0.7,1.1 0.85,6.2 4.9,11.1 1,-1.8 1.86,-4.04 0.5,-1.4 1.6,-4.15 1.86,-4.46 3.4,-4.46 0.2,0 0.4,0.1 0.85,0.3 1.24,2.8 z"
|
||||
style="fill:#333333"
|
||||
id="path14" />
|
||||
<path
|
||||
d="m 156.97,47.94 q 0,2.8 -1.9,4.4 -1.8,1.5 -4.66,1.5 -0.74,0 -2.7,-0.4 -1.9,-0.4 -2.64,-0.4 -2,0 -2,2.64 0,0.85 0.2,2.6 0.2,1.75 0.2,2.6 0,1.9 -0.74,2.83 -1.44,0 -3,-0.85 -1.44,-9.5 -1.44,-11.95 0,-3.65 1.75,-8.1 2.37,-6.05 6.4,-6.05 3.7,0 7.26,4.1 3.3,3.84 3.3,7.14 z m -3.76,0.2 q -0.66,-2.2 -2.64,-4.4 -2.25,-2.5 -4.3,-2.5 -1.3,0 -2.3,2.2 -0.85,1.8 -0.85,3.26 0,0.47 0.4,1.24 0.4,0.8 0.8,0.8 1.05,0 3.14,0.3 2.13,0.3 3.2,0.3 0.35,0 1.3,-0.4 1,-0.47 1.3,-0.74 z"
|
||||
style="fill:#333333"
|
||||
id="path15" />
|
||||
<path
|
||||
d="m 182.4,53.3 q 0,0.9 -0.6,1.5 -0.54,0.6 -1.4,0.6 -1.55,0 -2.95,-0.9 -1.4,-0.93 -2.13,-2.3 -0.74,-0.1 -1.5,0.85 -0.9,1.16 -1.05,1.24 -1.24,0.54 -3.9,0.54 -2.2,0 -3.9,-2.44 -1.5,-2.13 -1.5,-4 0,-3.4 3.34,-6.4 3.2,-2.9 6.7,-2.9 0.9,0 1.67,0.6 0.8,0.6 0.8,1.44 0,0.54 -0.35,1.1 2.37,0.9 2.37,2.83 0,0.35 -0.1,1.05 -0.1,0.7 -0.1,1.05 0,0.4 0.1,0.6 0.5,1.3 2.56,3.4 1.9,1.9 1.9,2.2 z m -8.1,-10.1 q -0.4,0 -1.1,-0.1 -0.74,-0.16 -1.1,-0.16 -1.3,0 -3.2,1.94 -1.9,1.94 -1.9,3.3 0,0.8 0.7,1.8 0.9,1.3 2.2,1.3 2.64,0 3.53,-2.9 0.5,-2.6 1,-5.16 z"
|
||||
style="fill:#333333"
|
||||
id="path16" />
|
||||
<path
|
||||
d="m 204.8,41.35 q -0.4,0.4 -1.5,0.4 -0.85,0 -2.5,-0.3 -1.67,-0.3 -2.5,-0.3 -4.66,0 -5.43,6.9 -0.35,3.1 -0.4,3.3 -0.4,1 -1.7,2.3 h -1.16 q -0.66,-1.2 -1.3,-4.1 -0.6,-2.76 -0.6,-4.27 0,-1.16 0.1,-1.5 0.2,-0.54 1,-0.54 0.3,0 0.66,0.3 0.4,0.3 0.4,0.3 1.9,-3.53 3.07,-4.6 1.8,-1.7 5.04,-1.7 1.4,0 3.6,0.9 2.83,1.16 3.3,2.8 z"
|
||||
style="fill:#333333"
|
||||
id="path17" />
|
||||
<path
|
||||
d="m 228.46,37.16 q 0.3,0.8 0.3,1.44 0,1.86 -2.4,1.86 -1,0 -3.45,-0.5 -2.5,-0.54 -3.45,-0.54 -1.24,0 -1.44,0.1 -0.47,0.2 -0.47,1.2 0,2.2 0.6,6.9 0.74,5.86 1.6,6.13 -0.35,0.35 -0.35,1.1 -1.24,0.7 -2.64,0.7 -1.4,0 -1.94,-3.9 -0.2,-1.36 -0.5,-7.76 -0.23,-4.6 -0.8,-5.5 -0.3,-0.47 -4.35,-0.35 -1,0.04 -1.55,0.1 -0.54,0 -0.35,0 -0.8,0 -1.2,-0.7 -0.54,-1.3 -0.54,-1.4 0,-1.44 4.15,-2 1.6,-0.16 4.73,-0.5 0,-0.85 -0.1,-2.56 -0.04,-1.75 -0.04,-2.6 0,-4.35 2.1,-4.35 0.54,0 1.1,0.6 0.6,0.6 0.6,1.1 v 7.9 q 1.1,1.2 5,1.7 3.9,0.5 5.3,1.86 z"
|
||||
style="fill:#333333"
|
||||
id="path18" />
|
||||
<path
|
||||
d="m 250.2,40.2 q -2.06,4.1 -3.2,7 -0.1,0.3 -1.55,4.5 -0.47,1.36 -1,4.2 -0.5,2.83 -1,4.2 -1,2.83 -2.37,2.64 -1.4,-0.2 -1.55,-1.6 -0.04,-0.2 -0.04,-0.5 0,-0.16 0.3,-1.5 1.05,-5.04 1.05,-6.44 0,-0.54 -0.1,-0.74 -1.4,-2.44 -4.1,-7.4 -2.7,-4.97 -2.4,-7.7 1.44,-1.36 2.1,-1.36 0.4,0 1.05,0.6 0.6,0.6 0.7,1.1 0.85,6.2 4.9,11.1 1,-1.8 1.86,-4.04 0.5,-1.4 1.6,-4.15 1.86,-4.46 3.4,-4.46 0.2,0 0.4,0.1 0.85,0.3 1.24,2.8 z"
|
||||
style="fill:#333333"
|
||||
id="path19" />
|
||||
</g>
|
||||
<g
|
||||
inkscape:groupmode="layer"
|
||||
id="layer5"
|
||||
inkscape:label="tagger">
|
||||
<g
|
||||
id="g1">
|
||||
<path
|
||||
id="path4"
|
||||
style="fill:#333333"
|
||||
d="m 111.4,83.335 -9.526,5.5 2.5,4.33 9.526,-5.5 z m -33.775,19.5 -9.526,5.5 2.5,4.33 9.526,-5.5 z"
|
||||
sodipodi:nodetypes="cccccccccc" />
|
||||
<path
|
||||
id="path5"
|
||||
style="fill:#333333"
|
||||
d="M 88.5,73 V 84 h 5 V 73 Z m 0,39 v 11 h 5 V 112 Z"
|
||||
sodipodi:nodetypes="cccccccccc" />
|
||||
<path
|
||||
id="path6"
|
||||
style="fill:#333333"
|
||||
d="m 68.1,87.665 9.526,5.5 2.5,-4.33 -9.526,-5.5 z m 33.775,19.5 9.527,5.5 2.5,-4.33 -9.527,-5.5 z"
|
||||
sodipodi:nodetypes="cccccccccc" />
|
||||
</g>
|
||||
<g
|
||||
id="g2"
|
||||
transform="rotate(30,150,318.19)">
|
||||
<path
|
||||
id="path7"
|
||||
style="fill:#333333"
|
||||
d="m 111.4,83.335 -9.526,5.5 2.5,4.33 9.526,-5.5 z m -33.775,19.5 -9.526,5.5 2.5,4.33 9.526,-5.5 z"
|
||||
sodipodi:nodetypes="cccccccccc" />
|
||||
<path
|
||||
id="path8"
|
||||
style="fill:#333333"
|
||||
d="M 88.5,73 V 84 h 5 V 73 Z m 0,39 v 11 h 5 V 112 Z"
|
||||
sodipodi:nodetypes="cccccccccc" />
|
||||
<path
|
||||
id="path9"
|
||||
style="fill:#333333"
|
||||
d="m 68.1,87.665 9.526,5.5 2.5,-4.33 -9.526,-5.5 z m 33.775,19.5 9.527,5.5 2.5,-4.33 -9.527,-5.5 z"
|
||||
sodipodi:nodetypes="cccccccccc" />
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 8.8 KiB |
@@ -141,6 +141,9 @@ find -maxdepth 1 -printf '%s %p\n' | sort -n | awk '!/-([0-9a-zA-Z_-]{11})\.(mkv
|
||||
# unique stacks in a stackdump
|
||||
f=a; rm -rf stacks; mkdir stacks; grep -E '^#' $f | while IFS= read -r n; do awk -v n="$n" '!$0{o=0} o; $0==n{o=1}' <$f >stacks/f; h=$(sha1sum <stacks/f | cut -c-16); mv stacks/f stacks/$h-"$n"; done ; find stacks/ | sort | uniq -cw24
|
||||
|
||||
# find unused css variables
|
||||
cat browser.css | sed -r 's/(var\()/\n\1/g' | awk '{sub(/:/," ")} $1~/^--/{d[$1]=1} /var\(/{sub(/.*var\(/,"");sub(/\).*/,"");u[$1]=1} END{for (x in u) delete d[x]; for (x in d) print x}' | tr '\n' '|'
|
||||
|
||||
|
||||
##
|
||||
## sqlite3 stuff
|
||||
|
||||
@@ -175,6 +175,7 @@ symbol legend,
|
||||
| ┗ randomize filename | █ | | | | | | | █ | █ | | | | |
|
||||
| ┗ mimetype reject-list | ╱ | | | | | | | | • | ╱ | | ╱ | • |
|
||||
| ┗ extension reject-list | ╱ | | | | | | | █ | • | ╱ | | ╱ | • |
|
||||
| ┗ upload routing | █ | | | | | | | | | | | | |
|
||||
| checksums provided | | | | █ | █ | | | | █ | ╱ | | | |
|
||||
| cloud storage backend | ╱ | ╱ | ╱ | █ | █ | █ | ╱ | | | ╱ | █ | █ | ╱ |
|
||||
|
||||
@@ -188,6 +189,9 @@ symbol legend,
|
||||
|
||||
* `race the beam` = files can be downloaded while they're still uploading; downloaders are slowed down such that the uploader is always ahead
|
||||
|
||||
* `upload routing` = depending on filetype / contents / uploader etc., the file can be redirected to another location or otherwise transformed; mitigates limitations such as [sharex#3992](https://github.com/ShareX/ShareX/issues/3992)
|
||||
* copyparty example: [reloc-by-ext](https://github.com/9001/copyparty/tree/hovudstraum/bin/hooks#before-upload)
|
||||
|
||||
* `checksums provided` = when downloading a file from the server, the file's checksum is provided for verification client-side
|
||||
|
||||
* `cloud storage backend` = able to serve files from (and write to) s3 or similar cloud services; `╱` means the software can do this with some help from `rclone mount` as a bridge
|
||||
@@ -217,7 +221,7 @@ symbol legend,
|
||||
| serve sftp (ssh) | | | | | | █ | | | | | | █ | █ |
|
||||
| serve smb/cifs | ╱ | | | | | █ | | | | | | | |
|
||||
| serve dlna | | | | | | █ | | | | | | | |
|
||||
| listen on unix-socket | | | | █ | █ | | █ | █ | █ | █ | █ | █ | |
|
||||
| listen on unix-socket | █ | | | █ | █ | | █ | █ | █ | █ | █ | █ | |
|
||||
| zeroconf | █ | | | | | | | | | | | | █ |
|
||||
| supports netscape 4 | ╱ | | | | | █ | | | | | • | | ╱ |
|
||||
| ...internet explorer 6 | ╱ | █ | | █ | | █ | | | | | • | | ╱ |
|
||||
|
||||
@@ -5,19 +5,16 @@ LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
|
||||
org.opencontainers.image.licenses="MIT" \
|
||||
org.opencontainers.image.title="copyparty-ac" \
|
||||
org.opencontainers.image.description="copyparty with Pillow and FFmpeg (image/audio/video thumbnails, audio transcoding, media tags)"
|
||||
ENV PYTHONPYCACHEPREFIX=/tmp/pyc \
|
||||
XDG_CONFIG_HOME=/cfg
|
||||
ENV XDG_CONFIG_HOME=/cfg
|
||||
|
||||
RUN apk --no-cache add !pyc \
|
||||
wget \
|
||||
py3-argon2-cffi py3-pillow \
|
||||
ffmpeg \
|
||||
&& rm -rf /tmp/pyc \
|
||||
&& mkdir /cfg /w \
|
||||
&& chmod 777 /cfg /w \
|
||||
&& echo % /cfg > initcfg
|
||||
py3-jinja2 py3-argon2-cffi py3-pillow \
|
||||
ffmpeg
|
||||
|
||||
COPY i/dist/copyparty-sfx.py innvikler.sh ./
|
||||
RUN ash innvikler.sh && rm innvikler.sh
|
||||
|
||||
COPY i/dist/copyparty-sfx.py ./
|
||||
WORKDIR /w
|
||||
EXPOSE 3923
|
||||
ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "--no-crt", "-c", "/z/initcfg"]
|
||||
ENTRYPOINT ["python3", "-m", "copyparty", "--no-crt", "-c", "/z/initcfg"]
|
||||
|
||||
@@ -5,15 +5,14 @@ LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
|
||||
org.opencontainers.image.licenses="MIT" \
|
||||
org.opencontainers.image.title="copyparty-dj" \
|
||||
org.opencontainers.image.description="copyparty with all optional dependencies, including musical key / bpm detection"
|
||||
ENV PYTHONPYCACHEPREFIX=/tmp/pyc \
|
||||
XDG_CONFIG_HOME=/cfg
|
||||
ENV XDG_CONFIG_HOME=/cfg
|
||||
|
||||
COPY i/bin/mtag/install-deps.sh ./
|
||||
COPY i/bin/mtag/audio-bpm.py /mtag/
|
||||
COPY i/bin/mtag/audio-key.py /mtag/
|
||||
RUN apk add -U !pyc \
|
||||
wget \
|
||||
py3-argon2-cffi py3-pillow py3-pip py3-cffi \
|
||||
py3-jinja2 py3-argon2-cffi py3-pillow py3-pip py3-cffi \
|
||||
ffmpeg \
|
||||
vips-jxl vips-heif vips-poppler vips-magick \
|
||||
py3-numpy fftw libsndfile \
|
||||
@@ -27,18 +26,12 @@ RUN apk add -U !pyc \
|
||||
&& python3 -m pip install pyvips \
|
||||
&& bash install-deps.sh \
|
||||
&& apk del py3-pip .bd \
|
||||
&& rm -rf /var/cache/apk/* /tmp/pyc \
|
||||
&& chmod 777 /root \
|
||||
&& ln -s /root/vamp /root/.local / \
|
||||
&& mkdir /cfg /w \
|
||||
&& chmod 777 /cfg /w \
|
||||
&& echo % /cfg > initcfg
|
||||
&& ln -s /root/vamp /root/.local /
|
||||
|
||||
COPY i/dist/copyparty-sfx.py innvikler.sh ./
|
||||
RUN ash innvikler.sh && rm innvikler.sh
|
||||
|
||||
COPY i/dist/copyparty-sfx.py ./
|
||||
WORKDIR /w
|
||||
EXPOSE 3923
|
||||
ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "--no-crt", "-c", "/z/initcfg"]
|
||||
|
||||
# size: 286 MB
|
||||
# bpm/key: 529 sec
|
||||
# idx-bench: 2352 MB/s
|
||||
ENTRYPOINT ["python3", "-m", "copyparty", "--no-crt", "-c", "/z/initcfg"]
|
||||
|
||||
@@ -5,18 +5,15 @@ LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
|
||||
org.opencontainers.image.licenses="MIT" \
|
||||
org.opencontainers.image.title="copyparty-im" \
|
||||
org.opencontainers.image.description="copyparty with Pillow and Mutagen (image thumbnails, media tags)"
|
||||
ENV PYTHONPYCACHEPREFIX=/tmp/pyc \
|
||||
XDG_CONFIG_HOME=/cfg
|
||||
ENV XDG_CONFIG_HOME=/cfg
|
||||
|
||||
RUN apk --no-cache add !pyc \
|
||||
wget \
|
||||
py3-argon2-cffi py3-pillow py3-mutagen \
|
||||
&& rm -rf /tmp/pyc \
|
||||
&& mkdir /cfg /w \
|
||||
&& chmod 777 /cfg /w \
|
||||
&& echo % /cfg > initcfg
|
||||
py3-jinja2 py3-argon2-cffi py3-pillow py3-mutagen
|
||||
|
||||
COPY i/dist/copyparty-sfx.py innvikler.sh ./
|
||||
RUN ash innvikler.sh && rm innvikler.sh
|
||||
|
||||
COPY i/dist/copyparty-sfx.py ./
|
||||
WORKDIR /w
|
||||
EXPOSE 3923
|
||||
ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "--no-crt", "-c", "/z/initcfg"]
|
||||
ENTRYPOINT ["python3", "-m", "copyparty", "--no-crt", "-c", "/z/initcfg"]
|
||||
|
||||
@@ -5,12 +5,11 @@ LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
|
||||
org.opencontainers.image.licenses="MIT" \
|
||||
org.opencontainers.image.title="copyparty-iv" \
|
||||
org.opencontainers.image.description="copyparty with Pillow, FFmpeg, libvips (image/audio/video thumbnails, audio transcoding, media tags)"
|
||||
ENV PYTHONPYCACHEPREFIX=/tmp/pyc \
|
||||
XDG_CONFIG_HOME=/cfg
|
||||
ENV XDG_CONFIG_HOME=/cfg
|
||||
|
||||
RUN apk add -U !pyc \
|
||||
wget \
|
||||
py3-argon2-cffi py3-pillow py3-pip py3-cffi \
|
||||
py3-jinja2 py3-argon2-cffi py3-pillow py3-pip py3-cffi \
|
||||
ffmpeg \
|
||||
vips-jxl vips-heif vips-poppler vips-magick \
|
||||
&& apk add -t .bd \
|
||||
@@ -18,13 +17,11 @@ RUN apk add -U !pyc \
|
||||
python3-dev py3-wheel \
|
||||
&& rm -f /usr/lib/python3*/EXTERNALLY-MANAGED \
|
||||
&& python3 -m pip install pyvips \
|
||||
&& apk del py3-pip .bd \
|
||||
&& rm -rf /var/cache/apk/* /tmp/pyc \
|
||||
&& mkdir /cfg /w \
|
||||
&& chmod 777 /cfg /w \
|
||||
&& echo % /cfg > initcfg
|
||||
&& apk del py3-pip .bd
|
||||
|
||||
COPY i/dist/copyparty-sfx.py innvikler.sh ./
|
||||
RUN ash innvikler.sh && rm innvikler.sh
|
||||
|
||||
COPY i/dist/copyparty-sfx.py ./
|
||||
WORKDIR /w
|
||||
EXPOSE 3923
|
||||
ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "--no-crt", "-c", "/z/initcfg"]
|
||||
ENTRYPOINT ["python3", "-m", "copyparty", "--no-crt", "-c", "/z/initcfg"]
|
||||
|
||||
@@ -5,17 +5,14 @@ LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
|
||||
org.opencontainers.image.licenses="MIT" \
|
||||
org.opencontainers.image.title="copyparty-min" \
|
||||
org.opencontainers.image.description="just copyparty, no thumbnails / media tags / audio transcoding"
|
||||
ENV PYTHONPYCACHEPREFIX=/tmp/pyc \
|
||||
XDG_CONFIG_HOME=/cfg
|
||||
ENV XDG_CONFIG_HOME=/cfg
|
||||
|
||||
RUN apk --no-cache add !pyc \
|
||||
python3 \
|
||||
&& rm -rf /tmp/pyc \
|
||||
&& mkdir /cfg /w \
|
||||
&& chmod 777 /cfg /w \
|
||||
&& echo % /cfg > initcfg
|
||||
py3-jinja2
|
||||
|
||||
COPY i/dist/copyparty-sfx.py innvikler.sh ./
|
||||
RUN ash innvikler.sh && rm innvikler.sh
|
||||
|
||||
COPY i/dist/copyparty-sfx.py ./
|
||||
WORKDIR /w
|
||||
EXPOSE 3923
|
||||
ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "--no-crt", "--no-thumb", "-c", "/z/initcfg"]
|
||||
ENTRYPOINT ["python3", "-m", "copyparty", "--no-crt", "--no-thumb", "-c", "/z/initcfg"]
|
||||
|
||||
@@ -22,6 +22,11 @@ this example is also available as a podman-compatible [docker-compose yaml](http
|
||||
i'm not very familiar with containers, so let me know if this section could be better 🙏
|
||||
|
||||
|
||||
## portainer
|
||||
|
||||
* there is a [portainer howto](https://github.com/9001/copyparty/blob/hovudstraum/docs/examples/docker/portainer.md) which is mostly untested
|
||||
|
||||
|
||||
## configuration
|
||||
|
||||
> this section basically explains how the [docker-compose yaml](https://github.com/9001/copyparty/blob/hovudstraum/docs/examples/docker/basic-docker-compose) works, so you may look there instead
|
||||
|
||||
46
scripts/docker/innvikler.sh
Normal file
46
scripts/docker/innvikler.sh
Normal file
@@ -0,0 +1,46 @@
|
||||
#!/bin/ash
|
||||
set -ex
|
||||
|
||||
# cleanup for flavors with python build steps (dj/iv)
|
||||
rm -rf /var/cache/apk/* /root/.cache
|
||||
|
||||
# initial config; common for all flavors
|
||||
mkdir /cfg /w
|
||||
chmod 777 /cfg /w
|
||||
echo % /cfg > initcfg
|
||||
|
||||
# unpack sfx and dive in
|
||||
python3 copyparty-sfx.py --version
|
||||
cd /tmp/pe-copyparty.0
|
||||
|
||||
# steal the stuff we need
|
||||
mv copyparty partftpy ftp/* /usr/lib/python3.*/site-packages/
|
||||
|
||||
# golf
|
||||
cd /usr/lib/python3.*/
|
||||
rm -rf \
|
||||
/tmp/pe-* /z/copyparty-sfx.py \
|
||||
ensurepip pydoc_data turtle.py turtledemo lib2to3
|
||||
|
||||
# drop bytecode
|
||||
find / -xdev -name __pycache__ -print0 | xargs -0 rm -rf
|
||||
|
||||
# build the stuff we want
|
||||
python3 -m compileall -qj4 site-packages sqlite3 xml
|
||||
|
||||
# drop the stuff we dont
|
||||
find -name __pycache__ |
|
||||
grep -E 'ty/web/|/pycpar' |
|
||||
tr '\n' '\0' | xargs -0 rm -rf
|
||||
|
||||
# two-for-one:
|
||||
# 1) smoketest copyparty even starts
|
||||
# 2) build any bytecode we missed
|
||||
# this tends to race other builders (alle gode ting er tre)
|
||||
cd /z
|
||||
python3 -m copyparty \
|
||||
--ign-ebind -p$((1024+RANDOM)),$((1024+RANDOM)),$((1024+RANDOM)) \
|
||||
--no-crt -qi127.1 --exit=idx -e2dsa -e2ts
|
||||
|
||||
# output from -e2d
|
||||
rm -rf .hist
|
||||
@@ -6,6 +6,8 @@ set -e
|
||||
exit 1
|
||||
}
|
||||
|
||||
suf=-b1
|
||||
suf=
|
||||
sarchs="386 amd64 arm/v7 arm64/v8 ppc64le s390x"
|
||||
archs="amd64 arm s390x 386 arm64 ppc64le"
|
||||
imgs="dj iv min im ac"
|
||||
@@ -103,11 +105,12 @@ filt=
|
||||
# --pull=never does nothing at all btw
|
||||
(set -x
|
||||
$nice podman build \
|
||||
--squash \
|
||||
--pull=never \
|
||||
--from localhost/alpine-$a \
|
||||
-t copyparty-$i-$a \
|
||||
-t copyparty-$i-$a$suf \
|
||||
-f Dockerfile.$i . ||
|
||||
(echo $? $i-$a >> err)
|
||||
(echo $? $i-$a >> err; printf '%096d\n' $(seq 1 42))
|
||||
rm -f .blk
|
||||
) 2> >(tee $a.err | sed "s/^/$aa:/" >&2) > >(tee $a.out | sed "s/^/$aa:/") &
|
||||
done
|
||||
@@ -134,9 +137,10 @@ filt=
|
||||
variants=
|
||||
for a in $archs; do
|
||||
[[ " ${ngs[*]} " =~ " $i-$a " ]] && continue
|
||||
variants="$variants containers-storage:localhost/copyparty-$i-$a"
|
||||
variants="$variants containers-storage:localhost/copyparty-$i-$a$suf"
|
||||
done
|
||||
podman manifest create copyparty-$i $variants
|
||||
podman manifest rm copyparty-$i$suf || echo "(that's fine btw)"
|
||||
podman manifest create copyparty-$i$suf $variants
|
||||
done
|
||||
}
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@ set -e
|
||||
echo
|
||||
|
||||
berr() { p=$(head -c 72 </dev/zero | tr '\0' =); printf '\n%s\n\n' $p; cat; printf '\n%s\n\n' $p; }
|
||||
aerr() { printf '%s\n' "$*" | berr; }
|
||||
|
||||
help() { exec cat <<'EOF'
|
||||
|
||||
@@ -28,9 +29,11 @@ help() { exec cat <<'EOF'
|
||||
#
|
||||
# `no-tfp` saves ~10k by removing the tftp server, disabling --tftp
|
||||
#
|
||||
# `no-zm` saves ~7k by removing the zeroconf mDNS server
|
||||
#
|
||||
# `no-smb` saves ~3.5k by removing the smb / cifs server
|
||||
#
|
||||
# `no-zm` saves ~k by removing the zeroconf mDNS server
|
||||
# `no-pf` saves ~2.8k by removing the option to download partyfuse
|
||||
#
|
||||
# _____________________________________________________________________
|
||||
# web features:
|
||||
@@ -52,10 +55,15 @@ help() { exec cat <<'EOF'
|
||||
#
|
||||
# `ign-wd` allows building an sfx without webdeps
|
||||
#
|
||||
# ---------------------------------------------------------------------
|
||||
#
|
||||
# _____________________________________________________________________
|
||||
# if you are on windows, you can use msys2:
|
||||
# PATH=/c/Users/$USER/AppData/Local/Programs/Python/Python310:"$PATH" ./make-sfx.sh fast
|
||||
#
|
||||
# _____________________________________________________________________
|
||||
# some usage examples:
|
||||
# ./scripts/make-sfx.sh lang eng no-cm no-hl no-dd no-fnt no-smb no-pf
|
||||
# ./scripts/rls.sh sfx lang eng no-cm no-hl no-dd no-fnt no-smb no-pf
|
||||
# (reduces v1.14.2 from 700k to 495k)
|
||||
|
||||
EOF
|
||||
}
|
||||
@@ -112,6 +120,7 @@ while [ ! -z "$1" ]; do
|
||||
no-tfp) no_tfp=1 ; ;;
|
||||
no-smb) no_smb=1 ; ;;
|
||||
no-zm) no_zm=1 ; ;;
|
||||
no-pf) no_pf=1 ; ;;
|
||||
no-fnt) no_fnt=1 ; ;;
|
||||
no-hl) no_hl=1 ; ;;
|
||||
no-dd) no_dd=1 ; ;;
|
||||
@@ -119,7 +128,6 @@ while [ ! -z "$1" ]; do
|
||||
dl-wd) dl_wd=1 ; ;;
|
||||
ign-wd) ign_wd=1 ; ;;
|
||||
fast) zopf= ; ;;
|
||||
ultra) ultra=1 ; ;;
|
||||
lang) shift;langs="$1"; ;;
|
||||
*) help ; ;;
|
||||
esac
|
||||
@@ -202,14 +210,14 @@ necho() {
|
||||
mv {markupsafe,jinja2} j2/
|
||||
|
||||
necho collecting pyftpdlib
|
||||
f="../build/pyftpdlib-1.5.9.tar.gz"
|
||||
f="../build/pyftpdlib-1.5.10.tar.gz"
|
||||
[ -e "$f" ] ||
|
||||
(url=https://github.com/giampaolo/pyftpdlib/archive/refs/tags/release-1.5.9.tar.gz;
|
||||
(url=https://files.pythonhosted.org/packages/cf/31/8d910cf40317dd0db74ba0b8558d0dee23c8b002468c14d3a5dec0e6e9fd/pyftpdlib-1.5.10.tar.gz;
|
||||
wget -O$f "$url" || curl -L "$url" >$f)
|
||||
|
||||
tar -zxf $f
|
||||
mv pyftpdlib-release-*/pyftpdlib .
|
||||
rm -rf pyftpdlib-release-* pyftpdlib/test
|
||||
mv pyftpdlib-*/pyftpdlib .
|
||||
rm -rf pyftpdlib-* pyftpdlib/test
|
||||
for f in pyftpdlib/_async{hat,ore}.py; do
|
||||
[ -e "$f" ] || continue;
|
||||
iawk 'NR<4||NR>27||!/^#/;NR==4{print"# license: https://opensource.org/licenses/ISC\n"}' $f
|
||||
@@ -413,7 +421,7 @@ rm have
|
||||
|
||||
ised /fork_process/d ftp/pyftpdlib/servers.py
|
||||
iawk '/^class _Base/{s=1}!s' ftp/pyftpdlib/authorizers.py
|
||||
iawk '/^ {0,4}[^ ]/{s=0}/^ {4}def (serve_forever|_loop)/{s=1}!s' ftp/pyftpdlib/servers.py
|
||||
iawk '/^ {0,4}[a-zA-Z]/{s=0}/^ {4}def (serve_forever|_loop)/{s=1}!s' ftp/pyftpdlib/servers.py
|
||||
rm -f ftp/pyftpdlib/{__main__,prefork}.py
|
||||
|
||||
[ $no_ftp ] &&
|
||||
@@ -428,6 +436,9 @@ rm -f ftp/pyftpdlib/{__main__,prefork}.py
|
||||
[ $no_zm ] &&
|
||||
rm -rf copyparty/mdns.py copyparty/stolen/dnslib
|
||||
|
||||
[ $no_pf ] &&
|
||||
rm -rf copyparty/web/a/partyfuse.py
|
||||
|
||||
[ $no_cm ] && {
|
||||
rm -rf copyparty/web/mde.* copyparty/web/deps/easymde*
|
||||
echo h > copyparty/web/mde.html
|
||||
@@ -451,11 +462,16 @@ rm -f ftp/pyftpdlib/{__main__,prefork}.py
|
||||
ised 's/(cursor: ?)url\([^)]+\), ?(pointer)/\1\2/; s/[0-9]+% \{cursor:[^}]+\}//; s/animation: ?cursor[^};]+//' $f
|
||||
}
|
||||
|
||||
[ $langs ] &&
|
||||
[ $langs ] && {
|
||||
echo $langs | grep -q eng || {
|
||||
langs="eng|$langs"
|
||||
aerr "ERROR: removing english is not supported; will do this instead: $langs"
|
||||
}
|
||||
for f in copyparty/web/{browser.js,splash.js}; do
|
||||
gzip -d "$f.gz" || true
|
||||
iawk '/^\}/{l=0} !l; /^var Ls =/{l=1;next} o; /^\t["}]/{o=0} /^\t"'"$langs"'"/{o=1;print}' $f
|
||||
iawk '/^\}/{l=0} !l; /^var Ls =/{l=1;next} !l{next} o; /^\t["}]/{o=0} /^\t"'"$langs"'"/{o=1;print}' $f
|
||||
done
|
||||
}
|
||||
|
||||
[ ! $repack ] && {
|
||||
# uncomment
|
||||
|
||||
@@ -34,6 +34,6 @@ d1420c8417fad7888766dd26b9706a87c63e8f33dceeb8e26d0056d5127b0b3ed9272e44b4b76113
|
||||
8a6e2b13a2ec4ef914a5d62aad3db6464d45e525a82e07f6051ed10474eae959069e165dba011aefb8207cdfd55391d73d6f06362c7eb247b08763106709526e mutagen-1.47.0-py3-none-any.whl
|
||||
0203ec2551c4836696cfab0b2c9fff603352f03fa36e7476e2e1ca7ec57a3a0c24bd791fcd92f342bf817f0887854d9f072e0271c643de4b313d8c9569ba8813 packaging-24.1-py3-none-any.whl
|
||||
2be320b4191f208cdd6af183c77ba2cf460ea52164ee45ac3ff17d6dfa57acd9deff016636c2dd42a21f4f6af977d5f72df7dacf599bebcf41757272354d14c1 pillow-10.4.0-cp312-cp312-win_amd64.whl
|
||||
776378f5414efd26ec8a1cb3228a7b5fdf6afca3fa335a0e9b071266d55d9d9e66ee157c25a468a05bfa70ccd33c48b101998523fc6ff6bcf5e82a1d81ed0af8 pyinstaller-6.9.0-py3-none-win_amd64.whl
|
||||
c0af77d2a57cb063ab038dc986ed3582bc5acc8c8bd91d726101935d6388f50854ddbca26bc846ed5d1022cdee4d96242938c66f0ddc4565c36b60d691064db8 pyinstaller_hooks_contrib-2024.7-py2.py3-none-any.whl
|
||||
2f9a11ffae6d9f1ed76bf816f28812fcba71f87080b0c92e52bfccb46243118c5803a7e25dd78003ca7d66501bfcdce8ff7c691c63c0038b0d409ca3842dcc89 python-3.12.4-amd64.exe
|
||||
896ddddbd4b85e86e0600cb65eb4c07fbc7f3802d47e7f660411e20b5500831469b97ed4770f25820f4e75cbfac40308da624fd86d4f62e578149d5c276a9cde pyinstaller-6.10.0-py3-none-win_amd64.whl
|
||||
873781decaeef07f6a79b0ed8b9f35f3fa534a1ea0d866991e40278a10818fa5b60c70b0d5828971b045364f1099694cd1e5d5d60d480acb93fcfbfbced4a09e pyinstaller_hooks_contrib-2024.8-py3-none-any.whl
|
||||
0572c6345f6a4f7f3e5c2ff858e3ca7ca54ae4478f3d59d8e18cb0f596e61dcf12aef579db229e83d63b30f15d6684ee6bb3feaea9413e5e636a503933057678 python-3.12.5-amd64.exe
|
||||
|
||||
@@ -39,9 +39,9 @@ fns=(
|
||||
mutagen-1.47.0-py3-none-any.whl
|
||||
packaging-24.1-py3-none-any.whl
|
||||
pillow-10.4.0-cp312-cp312-win_amd64.whl
|
||||
pyinstaller-6.9.0-py3-none-win_amd64.whl
|
||||
pyinstaller_hooks_contrib-2024.7-py2.py3-none-any.whl
|
||||
python-3.12.4-amd64.exe
|
||||
pyinstaller-6.10.0-py3-none-win_amd64.whl
|
||||
pyinstaller_hooks_contrib-2024.8-py3-none-any.whl
|
||||
python-3.12.5-amd64.exe
|
||||
)
|
||||
[ $w7 ] && fns+=( # u2c stuff
|
||||
certifi-2024.2.2-py3-none-any.whl
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# if specified, keep the following sfx flags last: gz gzz fast
|
||||
|
||||
parallel=1
|
||||
|
||||
[ -e make-sfx.sh ] || cd scripts
|
||||
@@ -35,6 +37,14 @@ f=../dist/copyparty-sfx
|
||||
|
||||
$f$s.py --version >/dev/null
|
||||
|
||||
while [ "$1" ]; do
|
||||
case "$1" in
|
||||
gz*) break;;
|
||||
fast) break;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
[ $parallel -gt 1 ] && {
|
||||
printf '\033[%s' s 2r H "0;1;37;44mbruteforcing sfx size -- press enter to terminate" K u "7m $* " K $'27m\n'
|
||||
trap "rm -f .sfx-run; printf '\033[%s' s r u" INT TERM EXIT
|
||||
|
||||
@@ -103,6 +103,9 @@ copyparty/web/mde.html,
|
||||
copyparty/web/mde.js,
|
||||
copyparty/web/msg.css,
|
||||
copyparty/web/msg.html,
|
||||
copyparty/web/shares.css,
|
||||
copyparty/web/shares.html,
|
||||
copyparty/web/shares.js,
|
||||
copyparty/web/splash.css,
|
||||
copyparty/web/splash.html,
|
||||
copyparty/web/splash.js,
|
||||
|
||||
@@ -3,34 +3,39 @@ set -ex
|
||||
|
||||
# PYTHONPATH=.:~/dev/partftpy/ taskset -c 0 python3 -m copyparty -v srv::r -v srv/junk:junk:A --tftp 3969
|
||||
|
||||
get_src=~/dev/copyparty/srv/palette.flac
|
||||
get_fn=${get_src##*/}
|
||||
get_src=~/dev/copyparty/srv/ro/palette.flac
|
||||
get_fp=ro/${get_src##*/} # server url
|
||||
get_fn=${get_fp##*/} # just filename
|
||||
|
||||
put_src=~/Downloads/102.zip
|
||||
put_dst=~/dev/copyparty/srv/junk/102.zip
|
||||
|
||||
export PATH="$PATH:$HOME/src/atftp-0.8.0"
|
||||
|
||||
cd /dev/shm
|
||||
|
||||
echo curl get 1428 v4; curl --tftp-blksize 1428 tftp://127.0.0.1:3969/$get_fn | cmp $get_src || exit 1
|
||||
echo curl get 1428 v6; curl --tftp-blksize 1428 tftp://[::1]:3969/$get_fn | cmp $get_src || exit 1
|
||||
echo curl get 1428 v4; curl --tftp-blksize 1428 tftp://127.0.0.1:3969/$get_fp | cmp $get_src || exit 1
|
||||
echo curl get 1428 v6; curl --tftp-blksize 1428 tftp://[::1]:3969/$get_fp | cmp $get_src || exit 1
|
||||
|
||||
echo curl put 1428 v4; rm -f $put_dst && curl --tftp-blksize 1428 -T $put_src tftp://127.0.0.1:3969/junk/ && cmp $put_src $put_dst || exit 1
|
||||
echo curl put 1428 v6; rm -f $put_dst && curl --tftp-blksize 1428 -T $put_src tftp://[::1]:3969/junk/ && cmp $put_src $put_dst || exit 1
|
||||
|
||||
echo atftp get 1428; rm -f $get_fn && ~/src/atftp/atftp --option "blksize 1428" -g -r $get_fn 127.0.0.1 3969 && cmp $get_fn $get_src || exit 1
|
||||
echo atftp get 1428; rm -f $get_fn && atftp --option "blksize 1428" -g -r $get_fp -l $get_fn 127.0.0.1 3969 && cmp $get_fn $get_src || exit 1
|
||||
|
||||
echo atftp put 1428; rm -f $put_dst && ~/src/atftp/atftp --option "blksize 1428" 127.0.0.1 3969 -p -l $put_src -r junk/102.zip && cmp $put_src $put_dst || exit 1
|
||||
echo atftp put 1428; rm -f $put_dst && atftp --option "blksize 1428" 127.0.0.1 3969 -p -l $put_src -r junk/102.zip && cmp $put_src $put_dst || exit 1
|
||||
|
||||
echo tftp-hpa get; rm -f $put_dst && tftp -v -m binary 127.0.0.1 3969 -c get $get_fn && cmp $get_src $get_fn || exit 1
|
||||
echo tftp-hpa get; rm -f $get_fn && tftp -v -m binary 127.0.0.1 3969 -c get $get_fp && cmp $get_src $get_fn || exit 1
|
||||
|
||||
echo tftp-hpa put; rm -f $put_dst && tftp -v -m binary 127.0.0.1 3969 -c put $put_src junk/102.zip && cmp $put_src $put_dst || exit 1
|
||||
|
||||
echo curl get 512; curl tftp://127.0.0.1:3969/$get_fn | cmp $get_src || exit 1
|
||||
echo curl get 512; curl tftp://127.0.0.1:3969/$get_fp | cmp $get_src || exit 1
|
||||
|
||||
echo curl put 512; rm -f $put_dst && curl -T $put_src tftp://127.0.0.1:3969/junk/ && cmp $put_src $put_dst || exit 1
|
||||
|
||||
echo atftp get 512; rm -f $get_fn && ~/src/atftp/atftp -g -r $get_fn 127.0.0.1 3969 && cmp $get_fn $get_src || exit 1
|
||||
echo atftp get 512; rm -f $get_fn && atftp -g -r $get_fp -l $get_fn 127.0.0.1 3969 && cmp $get_fn $get_src || exit 1
|
||||
|
||||
echo atftp put 512; rm -f $put_dst && ~/src/atftp/atftp 127.0.0.1 3969 -p -l $put_src -r junk/102.zip && cmp $put_src $put_dst || exit 1
|
||||
echo atftp put 512; rm -f $put_dst && atftp 127.0.0.1 3969 -p -l $put_src -r junk/102.zip && cmp $put_src $put_dst || exit 1
|
||||
|
||||
echo nice
|
||||
|
||||
rm -f $get_fn
|
||||
|
||||
50
scripts/tlcheck.sh
Executable file
50
scripts/tlcheck.sh
Executable file
@@ -0,0 +1,50 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# usage: ./scripts/tlcheck.sh eng chi copyparty/web/browser.js
|
||||
|
||||
awk <"$3" -v lang1=\"$1\": -v lang2=\"$2\": '
|
||||
/^\t\}/{fa=0;fb=0}
|
||||
!/":/{next}
|
||||
$0~lang1{fa=1}
|
||||
$0~lang2{fb=1}
|
||||
fa{a[ia++]=$0}
|
||||
fb{b[ib++]=$0}
|
||||
END{for (i=0;i<ia;i++) printf "%s\n%s\n\n",a[i],b[i]}
|
||||
' |
|
||||
awk -v apos=\' -v quot=\" '
|
||||
# count special chars and prefix to line
|
||||
function c(ch) {
|
||||
m=$0;
|
||||
gsub(ch,"",m);
|
||||
t=t sprintf("%s%d ", ch, length($0)-length(m))
|
||||
}
|
||||
!$0 && t!=tp {
|
||||
print "\n\033[1;37;41m====DIFF===="
|
||||
}
|
||||
!$0 { print; next; }
|
||||
{
|
||||
tp=t; t="";
|
||||
c(quot);
|
||||
c(apos);
|
||||
c("<");
|
||||
c(">");
|
||||
c("{");
|
||||
c("}");
|
||||
c("&");
|
||||
c("\\\$");
|
||||
c("\\\\");
|
||||
print t $0;
|
||||
}
|
||||
' |
|
||||
sed -r $'
|
||||
s/\\\\/\033[1;37;41m\\\\\033[0m/g;
|
||||
s/\$N/\033[1;37;45m$N\033[0m/g;
|
||||
s/([{}])/\033[34m\\1\033[0m/g;
|
||||
s/"/\033[44m"\033[0m/g;
|
||||
s/\'/\033[45m\'\033[0m/g;
|
||||
s/&/\033[1;43;30m&\033[0m/g;
|
||||
s/([<>])/\033[30;47m\\1\033[0m/g
|
||||
' |
|
||||
sed -r 's/\t+//' |
|
||||
less -R
|
||||
@@ -8,6 +8,12 @@ import sys
|
||||
import tokenize
|
||||
|
||||
|
||||
try:
|
||||
FSTRING_MIDDLE = tokenize.FSTRING_MIDDLE
|
||||
except:
|
||||
FSTRING_MIDDLE = -9001
|
||||
|
||||
|
||||
def uncomment(fpath):
|
||||
"""modified https://stackoverflow.com/a/62074206"""
|
||||
|
||||
@@ -31,7 +37,7 @@ def uncomment(fpath):
|
||||
if start_line > last_lineno:
|
||||
last_col = 0
|
||||
|
||||
if start_col > last_col:
|
||||
if start_col > last_col and prev_toktype != FSTRING_MIDDLE:
|
||||
out += " " * (start_col - last_col)
|
||||
|
||||
is_legalese = (
|
||||
@@ -48,6 +54,10 @@ def uncomment(fpath):
|
||||
out += token_string
|
||||
else:
|
||||
out += '"a"'
|
||||
elif token_type == FSTRING_MIDDLE:
|
||||
out += token_string.replace(r"{", r"{{").replace(r"}", r"}}")
|
||||
if not code and token_string.strip():
|
||||
code = True
|
||||
elif token_type != tokenize.COMMENT:
|
||||
out += token_string
|
||||
if not code and token_string.strip():
|
||||
|
||||
138
tests/test_dedup.py
Normal file
138
tests/test_dedup.py
Normal file
@@ -0,0 +1,138 @@
|
||||
#!/usr/bin/env python3
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
import unittest
|
||||
from itertools import product
|
||||
|
||||
from copyparty.authsrv import AuthSrv
|
||||
from copyparty.httpcli import HttpCli
|
||||
from tests import util as tu
|
||||
from tests.util import Cfg
|
||||
|
||||
|
||||
class TestDedup(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.td = tu.get_ramdisk()
|
||||
|
||||
def tearDown(self):
|
||||
os.chdir(tempfile.gettempdir())
|
||||
shutil.rmtree(self.td)
|
||||
|
||||
def reset(self):
|
||||
td = os.path.join(self.td, "vfs")
|
||||
if os.path.exists(td):
|
||||
shutil.rmtree(td)
|
||||
os.mkdir(td)
|
||||
os.chdir(td)
|
||||
return td
|
||||
|
||||
def test(self):
|
||||
quick = True # sufficient for regular smoketests
|
||||
# quick = False
|
||||
|
||||
dirnames = ["d1", "d2"]
|
||||
filenames = ["f1", "f2"]
|
||||
files = [
|
||||
(
|
||||
"one",
|
||||
"BfcDQQeKz2oG1CPSFyD5ZD1flTYm2IoCY23DqeeVgq6w",
|
||||
"XMbpLRqVdtGmgggqjUI6uSoNMTqZVX4K6zr74XA1BRKc",
|
||||
),
|
||||
(
|
||||
"two",
|
||||
"ko1Q0eJNq3zKYs_oT83Pn8aVFgonj5G1wK8itwnYL4qj",
|
||||
"fxvihWlnQIbVbUPr--TxyV41913kPLhXPD1ngXYxDfou",
|
||||
),
|
||||
]
|
||||
# (data, chash, wark)
|
||||
|
||||
# 3072 uploads in total
|
||||
self.ctr = 3072
|
||||
self.conn = None
|
||||
for e2d in [True, False]:
|
||||
for dn1, fn1, f1 in product(dirnames, filenames, files):
|
||||
for dn2, fn2, f2 in product(dirnames, filenames, files):
|
||||
for dn3, fn3, f3 in product(dirnames, filenames, files):
|
||||
self.reset()
|
||||
if self.conn:
|
||||
self.conn.hsrv.hub.up2k.shutdown()
|
||||
self.args = Cfg(v=[".::A"], a=[], e2d=e2d)
|
||||
self.asrv = AuthSrv(self.args, self.log)
|
||||
self.conn = tu.VHttpConn(
|
||||
self.args, self.asrv, self.log, b"", True
|
||||
)
|
||||
self.do_post(dn1, fn1, f1, True)
|
||||
self.do_post(dn2, fn2, f2, False)
|
||||
self.do_post(dn3, fn3, f3, False)
|
||||
if quick:
|
||||
break
|
||||
|
||||
def do_post(self, dn, fn, fi, first):
|
||||
print("\n\n# do_post", self.ctr, repr((dn, fn, fi, first)))
|
||||
self.ctr -= 1
|
||||
|
||||
data, chash, wark = fi
|
||||
hs = self.handshake(dn, fn, fi)
|
||||
self.assertEqual(hs["wark"], wark)
|
||||
|
||||
sfn = hs["name"]
|
||||
if sfn == fn:
|
||||
print("using original name " + fn)
|
||||
else:
|
||||
print(fn + " got renamed to " + sfn)
|
||||
if first:
|
||||
raise Exception("wait what")
|
||||
|
||||
if hs["hash"]:
|
||||
self.assertEqual(hs["hash"][0], chash)
|
||||
self.put_chunk(dn, wark, chash, data)
|
||||
elif first:
|
||||
raise Exception("found first; %r, %r" % ((dn, fn, fi), hs))
|
||||
|
||||
h, b = self.curl("%s/%s" % (dn, sfn))
|
||||
self.assertEqual(b, data)
|
||||
|
||||
def handshake(self, dn, fn, fi):
|
||||
hdr = "POST /%s/ HTTP/1.1\r\nConnection: close\r\nContent-Type: text/plain\r\nContent-Length: %d\r\n\r\n"
|
||||
msg = {"name": fn, "size": 3, "lmod": 1234567890, "life": 0, "hash": [fi[1]]}
|
||||
buf = json.dumps(msg).encode("utf-8")
|
||||
buf = (hdr % (dn, len(buf))).encode("utf-8") + buf
|
||||
print("HS -->", buf)
|
||||
HttpCli(self.conn.setbuf(buf)).run()
|
||||
ret = self.conn.s._reply.decode("utf-8").split("\r\n\r\n", 1)
|
||||
print("HS <--", ret)
|
||||
return json.loads(ret[1])
|
||||
|
||||
def put_chunk(self, dn, wark, chash, data):
|
||||
msg = [
|
||||
"POST /%s/ HTTP/1.1" % (dn,),
|
||||
"Connection: close",
|
||||
"Content-Type: application/octet-stream",
|
||||
"Content-Length: 3",
|
||||
"X-Up2k-Hash: " + chash,
|
||||
"X-Up2k-Wark: " + wark,
|
||||
"",
|
||||
data,
|
||||
]
|
||||
buf = "\r\n".join(msg).encode("utf-8")
|
||||
print("PUT -->", buf)
|
||||
HttpCli(self.conn.setbuf(buf)).run()
|
||||
ret = self.conn.s._reply.decode("utf-8").split("\r\n\r\n", 1)
|
||||
self.assertEqual(ret[1], "thank")
|
||||
|
||||
def curl(self, url, binary=False):
|
||||
h = "GET /%s HTTP/1.1\r\nConnection: close\r\n\r\n"
|
||||
HttpCli(self.conn.setbuf((h % (url,)).encode("utf-8"))).run()
|
||||
if binary:
|
||||
h, b = self.conn.s._reply.split(b"\r\n\r\n", 1)
|
||||
return [h.decode("utf-8"), b]
|
||||
|
||||
return self.conn.s._reply.decode("utf-8").split("\r\n\r\n", 1)
|
||||
|
||||
def log(self, src, msg, c=0):
|
||||
print(msg)
|
||||
@@ -24,6 +24,10 @@ def hdr(query, uname):
|
||||
|
||||
|
||||
class TestDots(unittest.TestCase):
|
||||
def __init__(self, *a, **ka):
|
||||
super(TestDots, self).__init__(*a, **ka)
|
||||
self.is_dut = True
|
||||
|
||||
def setUp(self):
|
||||
self.td = tu.get_ramdisk()
|
||||
|
||||
|
||||
111
tests/test_hooks.py
Normal file
111
tests/test_hooks.py
Normal file
@@ -0,0 +1,111 @@
|
||||
#!/usr/bin/env python3
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
from copyparty.authsrv import AuthSrv
|
||||
from copyparty.httpcli import HttpCli
|
||||
from tests import util as tu
|
||||
from tests.util import Cfg
|
||||
|
||||
|
||||
def hdr(query):
|
||||
h = "GET /{} HTTP/1.1\r\nPW: o\r\nConnection: close\r\n\r\n"
|
||||
return h.format(query).encode("utf-8")
|
||||
|
||||
|
||||
class TestHooks(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.td = tu.get_ramdisk()
|
||||
|
||||
def tearDown(self):
|
||||
os.chdir(tempfile.gettempdir())
|
||||
shutil.rmtree(self.td)
|
||||
|
||||
def reset(self):
|
||||
td = os.path.join(self.td, "vfs")
|
||||
if os.path.exists(td):
|
||||
shutil.rmtree(td)
|
||||
os.mkdir(td)
|
||||
os.chdir(td)
|
||||
return td
|
||||
|
||||
def test(self):
|
||||
vcfg = ["a/b/c/d:c/d:A", "a:a:r"]
|
||||
|
||||
scenarios = (
|
||||
('{"vp":"x/y"}', "c/d/a.png", "c/d/x/y/a.png"),
|
||||
('{"vp":"x/y"}', "c/d/e/a.png", "c/d/e/x/y/a.png"),
|
||||
('{"vp":"../x/y"}', "c/d/e/a.png", "c/d/x/y/a.png"),
|
||||
('{"ap":"x/y"}', "c/d/a.png", "c/d/x/y/a.png"),
|
||||
('{"ap":"x/y"}', "c/d/e/a.png", "c/d/e/x/y/a.png"),
|
||||
('{"ap":"../x/y"}', "c/d/e/a.png", "c/d/x/y/a.png"),
|
||||
('{"ap":"../x/y"}', "c/d/a.png", "a/b/c/x/y/a.png"),
|
||||
('{"fn":"b.png"}', "c/d/a.png", "c/d/b.png"),
|
||||
('{"vp":"x","fn":"b.png"}', "c/d/a.png", "c/d/x/b.png"),
|
||||
)
|
||||
|
||||
for x in scenarios:
|
||||
print("\n\n\n", x)
|
||||
hooktxt, url_up, url_dl = x
|
||||
for hooktype in ("xbu", "xau"):
|
||||
for upfun in (self.put, self.bup):
|
||||
self.reset()
|
||||
self.makehook("""print('{"reloc":%s}')""" % (hooktxt,))
|
||||
ka = {hooktype: ["j,c1,h.py"]}
|
||||
self.args = Cfg(v=vcfg, a=["o:o"], e2d=True, **ka)
|
||||
self.asrv = AuthSrv(self.args, self.log)
|
||||
|
||||
h, b = upfun(url_up)
|
||||
self.assertIn("201 Created", h)
|
||||
h, b = self.curl(url_dl)
|
||||
self.assertEqual(b, "ok %s\n" % (url_up))
|
||||
|
||||
def makehook(self, hs):
|
||||
with open("h.py", "wb") as f:
|
||||
f.write(hs.encode("utf-8"))
|
||||
|
||||
def put(self, url):
|
||||
buf = "PUT /{0} HTTP/1.1\r\nPW: o\r\nConnection: close\r\nContent-Length: {1}\r\n\r\nok {0}\n"
|
||||
buf = buf.format(url, len(url) + 4).encode("utf-8")
|
||||
print("PUT -->", buf)
|
||||
conn = tu.VHttpConn(self.args, self.asrv, self.log, buf)
|
||||
HttpCli(conn).run()
|
||||
ret = conn.s._reply.decode("utf-8").split("\r\n\r\n", 1)
|
||||
print("PUT <--", ret)
|
||||
return ret
|
||||
|
||||
def bup(self, url):
|
||||
hdr = "POST /%s HTTP/1.1\r\nPW: o\r\nConnection: close\r\nContent-Type: multipart/form-data; boundary=XD\r\nContent-Length: %d\r\n\r\n"
|
||||
bdy = '--XD\r\nContent-Disposition: form-data; name="act"\r\n\r\nbput\r\n--XD\r\nContent-Disposition: form-data; name="f"; filename="%s"\r\n\r\n'
|
||||
ftr = "\r\n--XD--\r\n"
|
||||
try:
|
||||
url, fn = url.rsplit("/", 1)
|
||||
except:
|
||||
fn = url
|
||||
url = ""
|
||||
|
||||
buf = (bdy % (fn,) + "ok %s/%s\n" % (url, fn) + ftr).encode("utf-8")
|
||||
buf = (hdr % (url, len(buf))).encode("utf-8") + buf
|
||||
print("PoST -->", buf)
|
||||
conn = tu.VHttpConn(self.args, self.asrv, self.log, buf)
|
||||
HttpCli(conn).run()
|
||||
ret = conn.s._reply.decode("utf-8").split("\r\n\r\n", 1)
|
||||
print("POST <--", ret)
|
||||
return ret
|
||||
|
||||
def curl(self, url, binary=False):
|
||||
conn = tu.VHttpConn(self.args, self.asrv, self.log, hdr(url))
|
||||
HttpCli(conn).run()
|
||||
if binary:
|
||||
h, b = conn.s._reply.split(b"\r\n\r\n", 1)
|
||||
return [h.decode("utf-8"), b]
|
||||
|
||||
return conn.s._reply.decode("utf-8").split("\r\n\r\n", 1)
|
||||
|
||||
def log(self, src, msg, c=0):
|
||||
print(msg)
|
||||
@@ -10,6 +10,7 @@ import tarfile
|
||||
import tempfile
|
||||
import time
|
||||
import unittest
|
||||
import zipfile
|
||||
|
||||
from copyparty.authsrv import AuthSrv
|
||||
from copyparty.httpcli import HttpCli
|
||||
@@ -31,6 +32,9 @@ class TestHttpCli(unittest.TestCase):
|
||||
shutil.rmtree(self.td)
|
||||
|
||||
def test(self):
|
||||
test_tar = True
|
||||
test_zip = True
|
||||
|
||||
td = os.path.join(self.td, "vfs")
|
||||
os.mkdir(td)
|
||||
os.chdir(td)
|
||||
@@ -40,6 +44,7 @@ class TestHttpCli(unittest.TestCase):
|
||||
self.can_write = ["wa", "wo", "aa", "ao"]
|
||||
self.fn = "g{:x}g".format(int(time.time() * 3))
|
||||
|
||||
tctr = 0
|
||||
allfiles = []
|
||||
allvols = []
|
||||
for top in self.dtypes:
|
||||
@@ -83,6 +88,7 @@ class TestHttpCli(unittest.TestCase):
|
||||
self.asrv = AuthSrv(self.args, self.log)
|
||||
vfiles = [x for x in allfiles if x.startswith(top)]
|
||||
for fp in vfiles:
|
||||
tctr += 1
|
||||
rok, wok = self.can_rw(fp)
|
||||
furl = fp.split("/", 1)[1]
|
||||
durl = furl.rsplit("/", 1)[0] if "/" in furl else ""
|
||||
@@ -112,39 +118,61 @@ class TestHttpCli(unittest.TestCase):
|
||||
eprint("\033[33m{}\n# {}\033[0m".format(ret, url))
|
||||
self.fail()
|
||||
|
||||
# tar
|
||||
url = durl + "?tar"
|
||||
h, b = self.curl(url, True)
|
||||
# with open(os.path.join(td, "tar"), "wb") as f:
|
||||
# f.write(b)
|
||||
try:
|
||||
tar = tarfile.open(fileobj=io.BytesIO(b), mode="r|").getnames()
|
||||
except:
|
||||
if "HTTP/1.1 403 Forbidden" not in h and b != b"\nJ2EOT":
|
||||
eprint("bad tar?", url, h, b)
|
||||
raise
|
||||
tar = []
|
||||
tar = [x.split("/", 1)[1] for x in tar]
|
||||
tar = ["/".join([y for y in [top, durl, x] if y]) for x in tar]
|
||||
tar = [[x] + self.can_rw(x) for x in tar]
|
||||
tar_ok = [x[0] for x in tar if x[1]]
|
||||
tar_ng = [x[0] for x in tar if not x[1]]
|
||||
self.assertEqual([], tar_ng)
|
||||
|
||||
if durl.split("/")[-1] in self.can_read:
|
||||
# expected files in archives
|
||||
if rok:
|
||||
ref = [x for x in vfiles if self.in_dive(top + "/" + durl, x)]
|
||||
for f in ref:
|
||||
ok = f in tar_ok
|
||||
pr = print if ok else eprint
|
||||
pr("{}: {}".format("ok" if ok else "NG", f))
|
||||
ref.sort()
|
||||
else:
|
||||
ref = []
|
||||
|
||||
if test_tar:
|
||||
url = durl + "?tar"
|
||||
h, b = self.curl(url, True)
|
||||
try:
|
||||
tar = tarfile.open(fileobj=io.BytesIO(b), mode="r|").getnames()
|
||||
except:
|
||||
if "HTTP/1.1 403 Forbidden" not in h and b != b"\nJ2EOT":
|
||||
eprint("bad tar?", url, h, b)
|
||||
raise
|
||||
tar = []
|
||||
tar = [x.split("/", 1)[1] for x in tar]
|
||||
tar = ["/".join([y for y in [top, durl, x] if y]) for x in tar]
|
||||
tar = [[x] + self.can_rw(x) for x in tar]
|
||||
tar_ok = [x[0] for x in tar if x[1]]
|
||||
tar_ng = [x[0] for x in tar if not x[1]]
|
||||
tar_ok.sort()
|
||||
self.assertEqual(ref, tar_ok)
|
||||
self.assertEqual([], tar_ng)
|
||||
|
||||
if test_zip:
|
||||
url = durl + "?zip"
|
||||
h, b = self.curl(url, True)
|
||||
try:
|
||||
with zipfile.ZipFile(io.BytesIO(b), "r") as zf:
|
||||
zfi = zf.infolist()
|
||||
except:
|
||||
if "HTTP/1.1 403 Forbidden" not in h and b != b"\nJ2EOT":
|
||||
eprint("bad zip?", url, h, b)
|
||||
raise
|
||||
zfi = []
|
||||
zfn = [x.filename.split("/", 1)[1] for x in zfi]
|
||||
zfn = ["/".join([y for y in [top, durl, x] if y]) for x in zfn]
|
||||
zfn = [[x] + self.can_rw(x) for x in zfn]
|
||||
zf_ok = [x[0] for x in zfn if x[1]]
|
||||
zf_ng = [x[0] for x in zfn if not x[1]]
|
||||
zf_ok.sort()
|
||||
self.assertEqual(ref, zf_ok)
|
||||
self.assertEqual([], zf_ng)
|
||||
|
||||
# stash
|
||||
h, ret = self.put(url)
|
||||
res = h.startswith("HTTP/1.1 201 ")
|
||||
self.assertEqual(res, wok)
|
||||
if wok:
|
||||
vp = h.split("\nLocation: http://a:1/")[1].split("\r")[0]
|
||||
vn, rem = self.asrv.vfs.get(vp, "*", False, False)
|
||||
ap = os.path.join(vn.realpath, rem)
|
||||
os.unlink(ap)
|
||||
|
||||
def can_rw(self, fp):
|
||||
# lowest non-neutral folder declares permissions
|
||||
|
||||
@@ -3,9 +3,9 @@
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import shutil
|
||||
import socket
|
||||
import subprocess as sp
|
||||
import sys
|
||||
import tempfile
|
||||
@@ -15,9 +15,7 @@ from argparse import Namespace
|
||||
|
||||
import jinja2
|
||||
|
||||
WINDOWS = platform.system() == "Windows"
|
||||
ANYWIN = WINDOWS or sys.platform in ["msys"]
|
||||
MACOS = platform.system() == "Darwin"
|
||||
from copyparty.__init__ import MACOS, WINDOWS, E
|
||||
|
||||
J2_ENV = jinja2.Environment(loader=jinja2.BaseLoader) # type: ignore
|
||||
J2_FILES = J2_ENV.from_string("{{ files|join('\n') }}\nJ2EOT")
|
||||
@@ -41,10 +39,11 @@ if MACOS:
|
||||
# 25% faster; until any tests do symlink stuff
|
||||
|
||||
|
||||
from copyparty.__init__ import E
|
||||
from copyparty.__main__ import init_E
|
||||
from copyparty.broker_thr import BrokerThr
|
||||
from copyparty.ico import Ico
|
||||
from copyparty.u2idx import U2idx
|
||||
from copyparty.up2k import Up2k
|
||||
from copyparty.util import FHC, CachedDict, Garda, Unrecv
|
||||
|
||||
init_E(E)
|
||||
@@ -68,6 +67,13 @@ def chkcmd(argv):
|
||||
|
||||
def get_ramdisk():
|
||||
def subdir(top):
|
||||
for d in os.listdir(top):
|
||||
if not d.startswith("cptd-"):
|
||||
continue
|
||||
p = os.path.join(top, d)
|
||||
st = os.stat(p)
|
||||
if time.time() - st.st_mtime > 300:
|
||||
shutil.rmtree(p)
|
||||
ret = os.path.join(top, "cptd-{}".format(os.getpid()))
|
||||
shutil.rmtree(ret, True)
|
||||
os.mkdir(ret)
|
||||
@@ -111,13 +117,13 @@ class Cfg(Namespace):
|
||||
def __init__(self, a=None, v=None, c=None, **ka0):
|
||||
ka = {}
|
||||
|
||||
ex = "daw dav_auth dav_inf dav_mac dav_rt e2d e2ds e2dsa e2t e2ts e2tsr e2v e2vu e2vp early_ban ed emp exp force_js getmod grid gsel hardlink ih ihead magic never_symlink nid nih no_acode no_athumb no_dav no_dedup no_del no_dupe no_lifetime no_logues no_mv no_pipe no_poll no_readme no_robots no_sb_md no_sb_lg no_scandir no_tarcmp no_thumb no_vthumb no_zip nrand nw og og_no_head og_s_title q rand smb srch_dbg stats uqe vague_403 vc ver xdev xlink xvol"
|
||||
ex = "chpw daw dav_auth dav_inf dav_mac dav_rt e2d e2ds e2dsa e2t e2ts e2tsr e2v e2vu e2vp early_ban ed emp exp force_js getmod grid gsel hardlink ih ihead magic never_symlink nid nih no_acode no_athumb no_dav no_db_ip no_dedup no_del no_dupe no_lifetime no_logues no_mv no_pipe no_poll no_readme no_robots no_sb_md no_sb_lg no_scandir no_tarcmp no_thumb no_vthumb no_zip nrand nw og og_no_head og_s_title q rand smb srch_dbg stats uqe vague_403 vc ver write_uplog xdev xlink xvol zs"
|
||||
ka.update(**{k: False for k in ex.split()})
|
||||
|
||||
ex = "dotpart dotsrch no_dhash no_fastboot no_rescan no_sendfile no_snap no_voldump re_dhash plain_ip"
|
||||
ex = "dotpart dotsrch hook_v no_dhash no_fastboot no_fpool no_htp no_rescan no_sendfile no_snap no_voldump re_dhash plain_ip"
|
||||
ka.update(**{k: True for k in ex.split()})
|
||||
|
||||
ex = "ah_cli ah_gen css_browser hist js_browser mime mimes no_forget no_hash no_idx nonsus_urls og_tpl og_ua"
|
||||
ex = "ah_cli ah_gen css_browser hist js_browser js_other mime mimes no_forget no_hash no_idx nonsus_urls og_tpl og_ua"
|
||||
ka.update(**{k: None for k in ex.split()})
|
||||
|
||||
ex = "hash_mt srch_time u2abort u2j u2sz"
|
||||
@@ -129,9 +135,12 @@ class Cfg(Namespace):
|
||||
ex = "db_act k304 loris re_maxage rproxy rsp_jtr rsp_slp s_wr_slp snap_wri theme themes turbo"
|
||||
ka.update(**{k: 0 for k in ex.split()})
|
||||
|
||||
ex = "ah_alg bname doctitle df exit favico idp_h_usr html_head lg_sbf log_fk md_sbf name og_desc og_site og_th og_title og_title_a og_title_v og_title_i tcolor textfiles unlist vname R RS SR"
|
||||
ex = "ah_alg bname chpw_db doctitle df exit favico idp_h_usr ipa html_head lg_sbf log_fk md_sbf name og_desc og_site og_th og_title og_title_a og_title_v og_title_i shr tcolor textfiles unlist vname xff_src R RS SR"
|
||||
ka.update(**{k: "" for k in ex.split()})
|
||||
|
||||
ex = "ban_403 ban_404 ban_422 ban_pw ban_url"
|
||||
ka.update(**{k: "no" for k in ex.split()})
|
||||
|
||||
ex = "grp on403 on404 xad xar xau xban xbd xbr xbu xiu xm"
|
||||
ka.update(**{k: [] for k in ex.split()})
|
||||
|
||||
@@ -162,6 +171,7 @@ class Cfg(Namespace):
|
||||
s_wr_sz=256 * 1024,
|
||||
sort="href",
|
||||
srch_hits=99999,
|
||||
SRS="/",
|
||||
th_covers=["folder.png"],
|
||||
th_coversd=["folder.png"],
|
||||
th_covers_set=set(["folder.png"]),
|
||||
@@ -178,6 +188,10 @@ class Cfg(Namespace):
|
||||
|
||||
|
||||
class NullBroker(object):
|
||||
def __init__(self, args, asrv):
|
||||
self.args = args
|
||||
self.asrv = asrv
|
||||
|
||||
def say(self, *args):
|
||||
pass
|
||||
|
||||
@@ -189,6 +203,7 @@ class VSock(object):
|
||||
def __init__(self, buf):
|
||||
self._query = buf
|
||||
self._reply = b""
|
||||
self.family = socket.AF_INET
|
||||
self.sendall = self.send
|
||||
|
||||
def recv(self, sz):
|
||||
@@ -207,19 +222,37 @@ class VSock(object):
|
||||
pass
|
||||
|
||||
|
||||
class VHub(object):
|
||||
def __init__(self, args, asrv, log):
|
||||
self.args = args
|
||||
self.asrv = asrv
|
||||
self.log = log
|
||||
self.is_dut = True
|
||||
self.up2k = Up2k(self)
|
||||
|
||||
|
||||
class VBrokerThr(BrokerThr):
|
||||
def __init__(self, hub):
|
||||
self.hub = hub
|
||||
self.log = hub.log
|
||||
self.args = hub.args
|
||||
self.asrv = hub.asrv
|
||||
|
||||
|
||||
class VHttpSrv(object):
|
||||
def __init__(self, args, asrv, log):
|
||||
self.args = args
|
||||
self.asrv = asrv
|
||||
self.log = log
|
||||
self.hub = None
|
||||
|
||||
self.broker = NullBroker()
|
||||
self.broker = NullBroker(args, asrv)
|
||||
self.prism = None
|
||||
self.bans = {}
|
||||
self.nreq = 0
|
||||
self.nsus = 0
|
||||
|
||||
aliases = ["splash", "browser", "browser2", "msg", "md", "mde"]
|
||||
aliases = ["splash", "shares", "browser", "browser2", "msg", "md", "mde"]
|
||||
self.j2 = {x: J2_FILES for x in aliases}
|
||||
|
||||
self.gpwd = Garda("")
|
||||
@@ -238,18 +271,25 @@ class VHttpSrv(object):
|
||||
return self.u2idx
|
||||
|
||||
|
||||
class VHttpSrvUp2k(VHttpSrv):
|
||||
def __init__(self, args, asrv, log):
|
||||
super(VHttpSrvUp2k, self).__init__(args, asrv, log)
|
||||
self.hub = VHub(args, asrv, log)
|
||||
self.broker = VBrokerThr(self.hub)
|
||||
|
||||
|
||||
class VHttpConn(object):
|
||||
def __init__(self, args, asrv, log, buf):
|
||||
def __init__(self, args, asrv, log, buf, use_up2k=False):
|
||||
self.t0 = time.time()
|
||||
self.s = VSock(buf)
|
||||
self.sr = Unrecv(self.s, None) # type: ignore
|
||||
self.aclose = {}
|
||||
self.addr = ("127.0.0.1", "42069")
|
||||
self.args = args
|
||||
self.asrv = asrv
|
||||
self.bans = {}
|
||||
self.freshen_pwd = 0.0
|
||||
self.hsrv = VHttpSrv(args, asrv, log)
|
||||
|
||||
Ctor = VHttpSrvUp2k if use_up2k else VHttpSrv
|
||||
self.hsrv = Ctor(args, asrv, log)
|
||||
self.ico = Ico(args)
|
||||
self.ipa_nm = None
|
||||
self.lf_url = None
|
||||
@@ -265,6 +305,12 @@ class VHttpConn(object):
|
||||
self.u2fh = FHC()
|
||||
|
||||
self.get_u2idx = self.hsrv.get_u2idx
|
||||
self.setbuf(buf)
|
||||
|
||||
def setbuf(self, buf):
|
||||
self.s = VSock(buf)
|
||||
self.sr = Unrecv(self.s, None) # type: ignore
|
||||
return self
|
||||
|
||||
|
||||
if WINDOWS:
|
||||
|
||||
Reference in New Issue
Block a user