Compare commits

...

84 Commits

Author SHA1 Message Date
ed
a0c1239246 v1.8.0 2023-06-26 00:05:12 +00:00
ed
b8e851c332 cloudflare update + cosmetics:
* toastb padding fixes scrollbar on norwegian 403 in firefox
* fix text aspect ratio in seekbaron compact toggle
* crashpage had link overlaps on homepage
2023-06-25 23:09:29 +00:00
ed
baaf2eb24d include mdns names in tls cert 2023-06-25 22:06:35 +00:00
ed
e197895c10 support hashed passwords; closes #39 2023-06-25 21:50:33 +00:00
ed
cb75efa05d md-editor: index file and trigger upload hooks 2023-06-20 18:11:35 +00:00
ed
8b0cf2c982 volflags to limit volume size / num files; closes #40 2023-06-19 00:42:45 +00:00
ed
fc7d9e1f9c update pkgs to 1.7.6 2023-06-11 09:13:58 +00:00
ed
10caafa34c v1.7.6 2023-06-11 08:14:45 +00:00
ed
22cc22225a v1.7.5 2023-06-11 01:32:56 +00:00
ed
22dff4b0e5 update pkgs to 1.7.4 2023-06-11 01:26:25 +00:00
ed
a00ff2b086 v1.7.4 2023-06-11 00:07:38 +00:00
ed
e4acddc23b v1.7.3 2023-06-11 00:03:03 +00:00
ed
2b2d8e4e02 tls / gencert fixes 2023-06-10 23:34:34 +00:00
ed
5501d49032 prefer urandom for fk-salt unless cert.pem exists 2023-06-10 22:47:39 +00:00
ed
fa54b2eec4 generate tls certs 2023-06-10 22:46:24 +00:00
ed
cb0160021f upgrade pyinstaller env/deps 2023-06-10 11:58:58 +00:00
ed
93a723d588 add --ansi to systemd, fix grid controls bg,
mention folder thumbs dependency on -e2d,
improve make-sfx warnings,
update changelog
2023-06-06 22:04:39 +00:00
ed
8ebe1fb5e8 mention cfssl.sh in the default-certificate warning,
and improve documentation inside cfssl.sh
2023-06-06 21:41:19 +00:00
clach04
2acdf685b1 Fix issue #33 - no color output expected when redirecting stdout 2023-06-05 01:58:49 +02:00
ed
9f122ccd16 make-sfx: option to auto-obtain webdeps 2023-06-04 23:46:38 +00:00
ed
03be26fafc improve check for type-hint support 2023-06-04 22:59:25 +00:00
ed
df5d309d6e document the make-sfx.sh fast option 2023-06-04 14:13:35 +00:00
ed
c355f9bd91 catch common environment issues (#32):
* error-message which explains how to run on py2 / older py3
   when trying to run from source
* check compatibility between jinja2 and cpython on startup
* verify that webdeps are present on startup
* verify that webdeps are present when building sfx
* make-sfx.sh grabs the strip-hints dependency
2023-06-04 13:13:36 +00:00
ed
9c28ba417e option to regex-exclude files in browser listings 2023-06-02 21:54:25 +00:00
ed
705b58c741 support the NO_COLOR environment variable
https://no-color.org/ and more importantly
https://youtu.be/biW5UVGkPMA?t=150
2023-06-02 20:22:57 +00:00
ed
510302d667 support ftps-only; closes #30 2023-06-02 19:02:50 +00:00
ed
025a537413 add option to show thumbs by default; closes #31 2023-06-02 18:41:21 +00:00
ed
60a1ff0fc0 macos: mute select() noise on wake from suspend 2023-05-19 16:37:52 +02:00
ed
f94a0b1bff update pkgs to 1.7.2 2023-05-13 00:49:46 +00:00
ed
4ccfeeb2cd v1.7.2 2023-05-13 00:00:07 +00:00
ed
2646f6a4f2 oh nice, looks like 3.18 fixed whatever broke in 3.17 2023-05-12 23:38:10 +00:00
ed
b286ab539e readme: add more examples 2023-05-12 22:41:06 +00:00
ed
2cca6e0922 warn when sharing certain system locations 2023-05-12 21:38:16 +00:00
ed
db51f1b063 cfg: allow trailing colon on category headers 2023-05-12 21:01:34 +00:00
ed
d979c47f50 optimize clearTimeout + always shrink upload panes after completion + fix GET alignment 2023-05-12 20:46:45 +00:00
ed
e64b87b99b dont hardlink symlinks (they could be relative) 2023-05-12 20:41:09 +00:00
ed
b985011a00 upgrade docker to alpine 3.18:
* enables chiptune player
* smaller containers (generate pycache at runtime)
2023-05-11 06:56:21 +00:00
ed
c2ed2314c8 pkg/arch: add setuptools 2023-05-08 22:24:46 +00:00
ed
cd496658c3 update pkgs to 1.7.1 2023-05-07 19:51:59 +00:00
ed
deca082623 v1.7.1 2023-05-07 18:34:39 +00:00
ed
0ea8bb7c83 forgot the u2c symlink + sfx listing 2023-05-07 15:45:20 +00:00
ed
1fb251a4c2 was moved to pyproject 2023-05-07 15:41:00 +00:00
ed
4295923b76 rename up2k.py (client) to u2c.py 2023-05-07 15:37:52 +00:00
ed
572aa4b26c rename up2k.py (client) to u2c.py 2023-05-07 15:35:56 +00:00
ed
b1359f039f linter cleanup 2023-05-07 14:38:30 +00:00
ed
867d8ee49e replace setup.py with pyproject.toml + misc cleanup 2023-05-07 14:37:57 +00:00
ed
04c86e8a89 webdav: support write-only folders + force auth option 2023-05-06 20:33:29 +00:00
ed
bc0cb43ef9 include usernames in request logs 2023-05-06 20:17:56 +00:00
ed
769454fdce ftpd: only log invalid passwords 2023-05-06 19:16:52 +00:00
ed
4ee81af8f6 support ';' in passwords 2023-05-06 18:54:55 +00:00
ed
8b0e66122f smoother playback cursor on short songs + optimize 2023-05-06 16:31:04 +00:00
ed
8a98efb929 adapt to new archpkg layout 2023-05-05 20:51:18 +00:00
ed
b6fd555038 panic if two accounts have the same password 2023-05-05 20:24:24 +00:00
ed
7eb413ad51 doc tweaks 2023-05-05 19:39:10 +00:00
ixces
4421d509eb update PKGBUILD 2023-05-02 17:21:12 +02:00
ed
793ffd7b01 update pkgs to 1.7.0 2023-04-29 22:50:36 +00:00
ed
1e22222c60 v1.7.0 2023-04-29 21:14:38 +00:00
ed
544e0549bc make xvol and xdev apply at runtime (closes #24):
* when accessing files inside an xdev volume, verify that the file
   exists on the same device/filesystem as the volume root

* when accessing files inside an xvol volume, verify that the file
   exists within any volume where the user has read access
2023-04-29 21:10:02 +00:00
ed
83178d0836 preserve empty folders (closes #23):
* when deleting files, do not cascade upwards through empty folders
* when moving folders, also move any empty folders inside

the only remaining action which autoremoves empty folders is
files getting deleted as they expire volume lifetimes

also prevents accidentally moving parent folders into subfolders
(even though that actually worked surprisingly well)
2023-04-29 11:30:43 +00:00
ed
c44f5f5701 nit 2023-04-29 09:44:46 +00:00
ed
138f5bc989 warn about android powersave settings on music interruption + fix eq on folder change 2023-04-29 09:31:53 +00:00
ed
e4759f86ef ftpd correctness:
* winscp mkdir failed because the folder-not-found error got repeated
* rmdir fails after all files in the folder have poofed; that's OK
* add --ftp4 as a precaution
2023-04-28 20:50:45 +00:00
ed
d71416437a show file selection summary 2023-04-27 19:33:52 +00:00
ed
a84c583b2c ok that wasn't enough 2023-04-27 19:06:35 +00:00
ed
cdacdccdb8 update pkgs to 1.6.15 2023-04-27 00:36:56 +00:00
ed
d3ccd3f174 v1.6.15 2023-04-26 23:00:55 +00:00
ed
cb6de0387d a bit faster 2023-04-26 19:56:27 +00:00
ed
abff40519d eyecandy: restore playback indicator on folder hop 2023-04-26 19:09:16 +00:00
ed
55c74ad164 30% faster folder listings (wtf...) 2023-04-26 18:55:53 +00:00
ed
673b4f7e23 option to show symlink's lastmod instead of deref;
mainly motivated by u2cli's folder syncing in turbo mode
which would un-turbo on most dupes due to wrong lastmod

disabled by default for regular http listings
(to avoid confusion in most regular usecases),
enable per-request with urlparam lt

enabled by default for single-level webdav listings
(because rclone hits the same issue as u2cli),
can be disabled with arg --dav-rt or volflag davrt

impossible to enable for recursive webdav listings
2023-04-26 18:54:21 +00:00
ed
d11e02da49 u2cli: avoid dns lookups while uploading 2023-04-26 18:46:42 +00:00
ed
8790f89e08 fix installing from source tarball 2023-04-26 18:40:47 +00:00
ed
33442026b8 try to discourage android from stopping playback...
...when continuing into the next folder

accidentally introduces a neat bonus feature where the music
no longer stops while you go looking for stuff to play next
2023-04-26 18:33:30 +00:00
ed
03193de6d0 socket read/write timeout 2023-04-24 20:04:22 +00:00
ed
8675ff40f3 update pkgs to 1.6.14 2023-04-24 07:52:12 +00:00
ed
d88889d3fc v1.6.14 2023-04-24 06:09:44 +00:00
ed
6f244d4335 update pkgs to 1.6.13 2023-04-24 00:46:47 +00:00
ed
cacca663b3 v1.6.13 2023-04-23 23:05:31 +00:00
ed
d5109be559 ftp: track login state isolated from pyftpdlib;
for convenience, the password can be provided as the username
but that confuses pyftpd a little so let's do this
2023-04-23 21:06:19 +00:00
ed
d999f06bb9 volflags can be -unset 2023-04-23 21:05:29 +00:00
ed
a1a8a8c7b5 configurable tls-certificate location 2023-04-23 20:56:55 +00:00
ed
fdd6f3b4a6 tar/zip: use volume name as toplevel fallback 2023-04-23 20:55:34 +00:00
ed
f5191973df docs cleanup:
* mostly deprecate --http-only and --https-only since there is zero
   performance gain in recent python versions, however could still be
   useful for avoiding limitations in alternative python interpreters
   (and forcing http/https with mdns/ssdp/qr)

* mention antivirus being useless as usual
2023-04-23 20:25:44 +00:00
ed
ddbaebe779 update pkgs to 1.6.12 2023-04-20 22:47:37 +00:00
81 changed files with 2295 additions and 548 deletions

View File

@@ -1,2 +1,2 @@
Please include the following text somewhere in this PR description: To show that your contribution is compatible with the MIT License, please include the following text somewhere in this PR description:
This PR complies with the DCO; https://developercertificate.org/ This PR complies with the DCO; https://developercertificate.org/

1
.gitignore vendored
View File

@@ -37,6 +37,7 @@ up.*.txt
.hist/ .hist/
scripts/docker/*.out scripts/docker/*.out
scripts/docker/*.err scripts/docker/*.err
/perf.*
# nix build output link # nix build output link
result result

10
.vscode/launch.py vendored
View File

@@ -30,9 +30,17 @@ except:
argv = [os.path.expanduser(x) if x.startswith("~") else x for x in argv] argv = [os.path.expanduser(x) if x.startswith("~") else x for x in argv]
sfx = ""
if len(sys.argv) > 1 and os.path.isfile(sys.argv[1]):
sfx = sys.argv[1]
sys.argv = [sys.argv[0]] + sys.argv[2:]
argv += sys.argv[1:] argv += sys.argv[1:]
if re.search(" -j ?[0-9]", " ".join(argv)): if sfx:
argv = [sys.executable, sfx] + argv
sp.check_call(argv)
elif re.search(" -j ?[0-9]", " ".join(argv)):
argv = [sys.executable, "-m", "copyparty"] + argv argv = [sys.executable, "-m", "copyparty"] + argv
sp.check_call(argv) sp.check_call(argv)
else: else:

32
.vscode/settings.json vendored
View File

@@ -35,34 +35,18 @@
"python.linting.flake8Enabled": true, "python.linting.flake8Enabled": true,
"python.linting.banditEnabled": true, "python.linting.banditEnabled": true,
"python.linting.mypyEnabled": true, "python.linting.mypyEnabled": true,
"python.linting.mypyArgs": [
"--ignore-missing-imports",
"--follow-imports=silent",
"--show-column-numbers",
"--strict"
],
"python.linting.flake8Args": [ "python.linting.flake8Args": [
"--max-line-length=120", "--max-line-length=120",
"--ignore=E722,F405,E203,W503,W293,E402,E501,E128", "--ignore=E722,F405,E203,W503,W293,E402,E501,E128,E226",
], ],
"python.linting.banditArgs": [ "python.linting.banditArgs": [
"--ignore=B104" "--ignore=B104,B110,B112"
],
"python.linting.pylintArgs": [
"--disable=missing-module-docstring",
"--disable=missing-class-docstring",
"--disable=missing-function-docstring",
"--disable=import-outside-toplevel",
"--disable=wrong-import-position",
"--disable=raise-missing-from",
"--disable=bare-except",
"--disable=broad-except",
"--disable=invalid-name",
"--disable=line-too-long",
"--disable=consider-using-f-string"
], ],
// python3 -m isort --py=27 --profile=black copyparty/ // python3 -m isort --py=27 --profile=black copyparty/
"python.formatting.provider": "black", "python.formatting.provider": "none",
"[python]": {
"editor.defaultFormatter": "ms-python.black-formatter"
},
"editor.formatOnSave": true, "editor.formatOnSave": true,
"[html]": { "[html]": {
"editor.formatOnSave": false, "editor.formatOnSave": false,
@@ -74,10 +58,6 @@
"files.associations": { "files.associations": {
"*.makefile": "makefile" "*.makefile": "makefile"
}, },
"python.formatting.blackArgs": [
"-t",
"py27"
],
"python.linting.enabled": true, "python.linting.enabled": true,
"python.pythonPath": "/usr/bin/python3" "python.pythonPath": "/usr/bin/python3"
} }

111
README.md
View File

@@ -41,6 +41,7 @@ turn almost any device into a file server with resumable uploads/downloads using
* [batch rename](#batch-rename) - select some files and press `F2` to bring up the rename UI * [batch rename](#batch-rename) - select some files and press `F2` to bring up the rename UI
* [media player](#media-player) - plays almost every audio format there is * [media player](#media-player) - plays almost every audio format there is
* [audio equalizer](#audio-equalizer) - bass boosted * [audio equalizer](#audio-equalizer) - bass boosted
* [fix unreliable playback on android](#fix-unreliable-playback-on-android) - due to phone / app settings
* [markdown viewer](#markdown-viewer) - and there are *two* editors * [markdown viewer](#markdown-viewer) - and there are *two* editors
* [other tricks](#other-tricks) * [other tricks](#other-tricks)
* [searching](#searching) - search by size, date, path/name, mp3-tags, ... * [searching](#searching) - search by size, date, path/name, mp3-tags, ...
@@ -69,6 +70,8 @@ turn almost any device into a file server with resumable uploads/downloads using
* [themes](#themes) * [themes](#themes)
* [complete examples](#complete-examples) * [complete examples](#complete-examples)
* [reverse-proxy](#reverse-proxy) - running copyparty next to other websites * [reverse-proxy](#reverse-proxy) - running copyparty next to other websites
* [packages](#packages) - the party might be closer than you think
* [arch package](#arch-package) - now [available on aur](https://aur.archlinux.org/packages/copyparty) maintained by [@icxes](https://github.com/icxes)
* [nix package](#nix-package) - `nix profile install github:9001/copyparty` * [nix package](#nix-package) - `nix profile install github:9001/copyparty`
* [nixos module](#nixos-module) * [nixos module](#nixos-module)
* [browser support](#browser-support) - TLDR: yes * [browser support](#browser-support) - TLDR: yes
@@ -82,6 +85,7 @@ turn almost any device into a file server with resumable uploads/downloads using
* [security](#security) - some notes on hardening * [security](#security) - some notes on hardening
* [gotchas](#gotchas) - behavior that might be unexpected * [gotchas](#gotchas) - behavior that might be unexpected
* [cors](#cors) - cross-site request config * [cors](#cors) - cross-site request config
* [password hashing](#password-hashing) - you can hash passwords
* [https](#https) - both HTTP and HTTPS are accepted * [https](#https) - both HTTP and HTTPS are accepted
* [recovering from crashes](#recovering-from-crashes) * [recovering from crashes](#recovering-from-crashes)
* [client crashes](#client-crashes) * [client crashes](#client-crashes)
@@ -101,9 +105,9 @@ turn almost any device into a file server with resumable uploads/downloads using
just run **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py)** -- that's it! 🎉 just run **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py)** -- that's it! 🎉
* or install through pypi (python3 only): `python3 -m pip install --user -U copyparty` * or install through pypi: `python3 -m pip install --user -U copyparty`
* or if you cannot install python, you can use [copyparty.exe](#copypartyexe) instead * or if you cannot install python, you can use [copyparty.exe](#copypartyexe) instead
* or [install through nix](#nix-package), or [on NixOS](#nixos-module) * or install [on arch](#arch-package) [on NixOS](#nixos-module) [through nix](#nix-package)
* or if you are on android, [install copyparty in termux](#install-on-android) * or if you are on android, [install copyparty in termux](#install-on-android)
* or if you prefer to [use docker](./scripts/docker/) 🐋 you can do that too * or if you prefer to [use docker](./scripts/docker/) 🐋 you can do that too
* docker has all deps built-in, so skip this step: * docker has all deps built-in, so skip this step:
@@ -123,7 +127,7 @@ enable thumbnails (images/audio/video), media indexing, and audio transcoding by
running copyparty without arguments (for example doubleclicking it on Windows) will give everyone read/write access to the current folder; you may want [accounts and volumes](#accounts-and-volumes) running copyparty without arguments (for example doubleclicking it on Windows) will give everyone read/write access to the current folder; you may want [accounts and volumes](#accounts-and-volumes)
or see [complete windows example](./docs/examples/windows.md) or see [some usage examples](#complete-examples) for inspiration, or the [complete windows example](./docs/examples/windows.md)
some recommended options: some recommended options:
* `-e2dsa` enables general [file indexing](#file-indexing) * `-e2dsa` enables general [file indexing](#file-indexing)
@@ -275,6 +279,8 @@ server notes:
* [Firefox issue 1790500](https://bugzilla.mozilla.org/show_bug.cgi?id=1790500) -- entire browser can crash after uploading ~4000 small files * [Firefox issue 1790500](https://bugzilla.mozilla.org/show_bug.cgi?id=1790500) -- entire browser can crash after uploading ~4000 small files
* Android: music playback randomly stops due to [battery usage settings](#fix-unreliable-playback-on-android)
* iPhones: the volume control doesn't work because [apple doesn't want it to](https://developer.apple.com/library/archive/documentation/AudioVideo/Conceptual/Using_HTML5_Audio_Video/Device-SpecificConsiderations/Device-SpecificConsiderations.html#//apple_ref/doc/uid/TP40009523-CH5-SW11) * iPhones: the volume control doesn't work because [apple doesn't want it to](https://developer.apple.com/library/archive/documentation/AudioVideo/Conceptual/Using_HTML5_Audio_Video/Device-SpecificConsiderations/Device-SpecificConsiderations.html#//apple_ref/doc/uid/TP40009523-CH5-SW11)
* *future workaround:* enable the equalizer, make it all-zero, and set a negative boost to reduce the volume * *future workaround:* enable the equalizer, make it all-zero, and set a negative boost to reduce the volume
* "future" because `AudioContext` can't maintain a stable playback speed in the current iOS version (15.7), maybe one day... * "future" because `AudioContext` can't maintain a stable playback speed in the current iOS version (15.7), maybe one day...
@@ -300,7 +306,7 @@ upgrade notes
* http-api: delete/move is now `POST` instead of `GET` * http-api: delete/move is now `POST` instead of `GET`
* everything other than `GET` and `HEAD` must pass [cors validation](#cors) * everything other than `GET` and `HEAD` must pass [cors validation](#cors)
* `1.5.0` (2022-12-03): [new chunksize formula](https://github.com/9001/copyparty/commit/54e1c8d261df) for files larger than 128 GiB * `1.5.0` (2022-12-03): [new chunksize formula](https://github.com/9001/copyparty/commit/54e1c8d261df) for files larger than 128 GiB
* **users:** upgrade to the latest [cli uploader](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py) if you use that * **users:** upgrade to the latest [cli uploader](https://github.com/9001/copyparty/blob/hovudstraum/bin/u2c.py) if you use that
* **devs:** update third-party up2k clients (if those even exist) * **devs:** update third-party up2k clients (if those even exist)
@@ -466,6 +472,7 @@ click the `🌲` or pressing the `B` hotkey to toggle between breadcrumbs path (
## thumbnails ## thumbnails
press `g` or `田` to toggle grid-view instead of the file listing and `t` toggles icons / thumbnails press `g` or `田` to toggle grid-view instead of the file listing and `t` toggles icons / thumbnails
* can be made default globally with `--grid` or per-volume with volflag `grid`
![copyparty-thumbs-fs8](https://user-images.githubusercontent.com/241032/129636211-abd20fa2-a953-4366-9423-1c88ebb96ba9.png) ![copyparty-thumbs-fs8](https://user-images.githubusercontent.com/241032/129636211-abd20fa2-a953-4366-9423-1c88ebb96ba9.png)
@@ -476,6 +483,7 @@ it does static images with Pillow / pyvips / FFmpeg, and uses FFmpeg for video f
audio files are covnerted into spectrograms using FFmpeg unless you `--no-athumb` (and some FFmpeg builds may need `--th-ff-swr`) audio files are covnerted into spectrograms using FFmpeg unless you `--no-athumb` (and some FFmpeg builds may need `--th-ff-swr`)
images with the following names (see `--th-covers`) become the thumbnail of the folder they're in: `folder.png`, `folder.jpg`, `cover.png`, `cover.jpg` images with the following names (see `--th-covers`) become the thumbnail of the folder they're in: `folder.png`, `folder.jpg`, `cover.png`, `cover.jpg`
* and, if you enable [file indexing](#file-indexing), all remaining folders will also get thumbnails (as long as they contain any pics at all)
in the grid/thumbnail view, if the audio player panel is open, songs will start playing when clicked in the grid/thumbnail view, if the audio player panel is open, songs will start playing when clicked
* indicated by the audio files having the ▶ icon instead of 💾 * indicated by the audio files having the ▶ icon instead of 💾
@@ -507,7 +515,7 @@ you can also zip a selection of files or folders by clicking them in the browser
## uploading ## uploading
drag files/folders into the web-browser to upload (or use the [command-line uploader](https://github.com/9001/copyparty/tree/hovudstraum/bin#up2kpy)) drag files/folders into the web-browser to upload (or use the [command-line uploader](https://github.com/9001/copyparty/tree/hovudstraum/bin#u2cpy))
this initiates an upload using `up2k`; there are two uploaders available: this initiates an upload using `up2k`; there are two uploaders available:
* `[🎈] bup`, the basic uploader, supports almost every browser since netscape 4.0 * `[🎈] bup`, the basic uploader, supports almost every browser since netscape 4.0
@@ -701,6 +709,11 @@ can also boost the volume in general, or increase/decrease stereo width (like [c
has the convenient side-effect of reducing the pause between songs, so gapless albums play better with the eq enabled (just make it flat) has the convenient side-effect of reducing the pause between songs, so gapless albums play better with the eq enabled (just make it flat)
### fix unreliable playback on android
due to phone / app settings, android phones may randomly stop playing music when the power saver kicks in, especially at the end of an album -- you can fix it by [disabling power saving](https://user-images.githubusercontent.com/241032/235262123-c328cca9-3930-4948-bd18-3949b9fd3fcf.png) in the [app settings](https://user-images.githubusercontent.com/241032/235262121-2ffc51ae-7821-4310-a322-c3b7a507890c.png) of the browser you use for music streaming (preferably a dedicated one)
## markdown viewer ## markdown viewer
and there are *two* editors and there are *two* editors
@@ -818,6 +831,13 @@ an FTP server can be started using `--ftp 3921`, and/or `--ftps` for explicit T
* some older software (filezilla on debian-stable) cannot passive-mode with TLS * some older software (filezilla on debian-stable) cannot passive-mode with TLS
* login with any username + your password, or put your password in the username field * login with any username + your password, or put your password in the username field
some recommended FTP / FTPS clients; `wark` = example password:
* https://winscp.net/eng/download.php
* https://filezilla-project.org/ struggles a bit with ftps in active-mode, but is fine otherwise
* https://rclone.org/ does FTPS with `tls=false explicit_tls=true`
* `lftp -u k,wark -p 3921 127.0.0.1 -e ls`
* `lftp -u k,wark -p 3990 127.0.0.1 -e 'set ssl:verify-certificate no; ls'`
## webdav server ## webdav server
@@ -912,14 +932,13 @@ through arguments:
* `--xlink` enables deduplication across volumes * `--xlink` enables deduplication across volumes
the same arguments can be set as volflags, in addition to `d2d`, `d2ds`, `d2t`, `d2ts`, `d2v` for disabling: the same arguments can be set as volflags, in addition to `d2d`, `d2ds`, `d2t`, `d2ts`, `d2v` for disabling:
* `-v ~/music::r:c,e2dsa,e2tsr` does a full reindex of everything on startup * `-v ~/music::r:c,e2ds,e2tsr` does a full reindex of everything on startup
* `-v ~/music::r:c,d2d` disables **all** indexing, even if any `-e2*` are on * `-v ~/music::r:c,d2d` disables **all** indexing, even if any `-e2*` are on
* `-v ~/music::r:c,d2t` disables all `-e2t*` (tags), does not affect `-e2d*` * `-v ~/music::r:c,d2t` disables all `-e2t*` (tags), does not affect `-e2d*`
* `-v ~/music::r:c,d2ds` disables on-boot scans; only index new uploads * `-v ~/music::r:c,d2ds` disables on-boot scans; only index new uploads
* `-v ~/music::r:c,d2ts` same except only affecting tags * `-v ~/music::r:c,d2ts` same except only affecting tags
note: note:
* the parser can finally handle `c,e2dsa,e2tsr` so you no longer have to `c,e2dsa:c,e2tsr`
* `e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and `e2ts` would then reindex those, unless there is a new copyparty version with new parsers and the release note says otherwise * `e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and `e2ts` would then reindex those, unless there is a new copyparty version with new parsers and the release note says otherwise
* the rescan button in the admin panel has no effect unless the volume has `-e2ds` or higher * the rescan button in the admin panel has no effect unless the volume has `-e2ds` or higher
* deduplication is possible on windows if you run copyparty as administrator (not saying you should!) * deduplication is possible on windows if you run copyparty as administrator (not saying you should!)
@@ -941,7 +960,11 @@ avoid traversing into other filesystems using `--xdev` / volflag `:c,xdev`, ski
and/or you can `--xvol` / `:c,xvol` to ignore all symlinks leaving the volume's top directory, but still allow bind-mounts pointing elsewhere and/or you can `--xvol` / `:c,xvol` to ignore all symlinks leaving the volume's top directory, but still allow bind-mounts pointing elsewhere
**NB: only affects the indexer** -- users can still access anything inside a volume, unless shadowed by another volume * symlinks are permitted with `xvol` if they point into another volume where the user has the same level of access
these options will reduce performance; unlikely worst-case estimates are 14% reduction for directory listings, 35% for download-as-tar
as of copyparty v1.7.0 these options also prevent file access at runtime -- in previous versions it was just hints for the indexer
### periodic rescan ### periodic rescan
@@ -958,6 +981,8 @@ set upload rules using volflags, some examples:
* `:c,sz=1k-3m` sets allowed filesize between 1 KiB and 3 MiB inclusive (suffixes: `b`, `k`, `m`, `g`) * `:c,sz=1k-3m` sets allowed filesize between 1 KiB and 3 MiB inclusive (suffixes: `b`, `k`, `m`, `g`)
* `:c,df=4g` block uploads if there would be less than 4 GiB free disk space afterwards * `:c,df=4g` block uploads if there would be less than 4 GiB free disk space afterwards
* `:c,vmaxb=1g` block uploads if total volume size would exceed 1 GiB afterwards
* `:c,vmaxn=4k` block uploads if volume would contain more than 4096 files afterwards
* `:c,nosub` disallow uploading into subdirectories; goes well with `rotn` and `rotf`: * `:c,nosub` disallow uploading into subdirectories; goes well with `rotn` and `rotf`:
* `:c,rotn=1000,2` moves uploads into subfolders, up to 1000 files in each folder before making a new one, two levels deep (must be at least 1) * `:c,rotn=1000,2` moves uploads into subfolders, up to 1000 files in each folder before making a new one, two levels deep (must be at least 1)
* `:c,rotf=%Y/%m/%d/%H` enforces files to be uploaded into a structure of subfolders according to that date format * `:c,rotf=%Y/%m/%d/%H` enforces files to be uploaded into a structure of subfolders according to that date format
@@ -1137,9 +1162,33 @@ see the top of [./copyparty/web/browser.css](./copyparty/web/browser.css) where
## complete examples ## complete examples
* [running on windows](./docs/examples/windows.md) * see [running on windows](./docs/examples/windows.md) for a fancy windows setup
* read-only music server * or use any of the examples below, just replace `python copyparty-sfx.py` with `copyparty.exe` if you're using the exe edition
* allow anyone to download or upload files into the current folder:
`python copyparty-sfx.py`
* enable searching and music indexing with `-e2dsa -e2ts`
* start an FTP server on port 3921 with `--ftp 3921`
* announce it on your LAN with `-z` so it appears in windows/Linux file managers
* anyone can upload, but nobody can see any files (even the uploader):
`python copyparty-sfx.py -e2dsa -v .::w`
* block uploads if there's less than 4 GiB free disk space with `--df 4`
* show a popup on new uploads with `--xau bin/hooks/notify.py`
* anyone can upload, and receive "secret" links for each upload they do:
`python copyparty-sfx.py -e2dsa -v .::wG:c,fk=8`
* anyone can browse, only `kevin` (password `okgo`) can upload/move/delete files:
`python copyparty-sfx.py -e2dsa -a kevin:okgo -v .::r:rwmd,kevin`
* read-only music server:
`python copyparty-sfx.py -v /mnt/nas/music:/music:r -e2dsa -e2ts --no-robots --force-js --theme 2` `python copyparty-sfx.py -v /mnt/nas/music:/music:r -e2dsa -e2ts --no-robots --force-js --theme 2`
* ...with bpm and key scanning * ...with bpm and key scanning
@@ -1169,6 +1218,16 @@ example webserver configs:
* [apache2 config](contrib/apache/copyparty.conf) -- location-based * [apache2 config](contrib/apache/copyparty.conf) -- location-based
# packages
the party might be closer than you think
## arch package
now [available on aur](https://aur.archlinux.org/packages/copyparty) maintained by [@icxes](https://github.com/icxes)
## nix package ## nix package
`nix profile install github:9001/copyparty` `nix profile install github:9001/copyparty`
@@ -1343,10 +1402,10 @@ interact with copyparty using non-browser clients
* `(printf 'PUT /junk?pw=wark HTTP/1.1\r\n\r\n'; cat movie.mkv) | nc 127.0.0.1 3923` * `(printf 'PUT /junk?pw=wark HTTP/1.1\r\n\r\n'; cat movie.mkv) | nc 127.0.0.1 3923`
* `(printf 'PUT / HTTP/1.1\r\n\r\n'; cat movie.mkv) >/dev/tcp/127.0.0.1/3923` * `(printf 'PUT / HTTP/1.1\r\n\r\n'; cat movie.mkv) >/dev/tcp/127.0.0.1/3923`
* python: [up2k.py](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py) is a command-line up2k client [(webm)](https://ocv.me/stuff/u2cli.webm) * python: [u2c.py](https://github.com/9001/copyparty/blob/hovudstraum/bin/u2c.py) is a command-line up2k client [(webm)](https://ocv.me/stuff/u2cli.webm)
* file uploads, file-search, [folder sync](#folder-sync), autoresume of aborted/broken uploads * file uploads, file-search, [folder sync](#folder-sync), autoresume of aborted/broken uploads
* can be downloaded from copyparty: controlpanel -> connect -> [up2k.py](http://127.0.0.1:3923/.cpr/a/up2k.py) * can be downloaded from copyparty: controlpanel -> connect -> [u2c.py](http://127.0.0.1:3923/.cpr/a/u2c.py)
* see [./bin/README.md#up2kpy](bin/README.md#up2kpy) * see [./bin/README.md#u2cpy](bin/README.md#u2cpy)
* FUSE: mount a copyparty server as a local filesystem * FUSE: mount a copyparty server as a local filesystem
* cross-platform python client available in [./bin/](bin/) * cross-platform python client available in [./bin/](bin/)
@@ -1369,11 +1428,11 @@ NOTE: curl will not send the original filename if you use `-T` combined with url
sync folders to/from copyparty sync folders to/from copyparty
the commandline uploader [up2k.py](https://github.com/9001/copyparty/tree/hovudstraum/bin#up2kpy) with `--dr` is the best way to sync a folder to copyparty; verifies checksums and does files in parallel, and deletes unexpected files on the server after upload has finished which makes file-renames really cheap (it'll rename serverside and skip uploading) the commandline uploader [u2c.py](https://github.com/9001/copyparty/tree/hovudstraum/bin#u2cpy) with `--dr` is the best way to sync a folder to copyparty; verifies checksums and does files in parallel, and deletes unexpected files on the server after upload has finished which makes file-renames really cheap (it'll rename serverside and skip uploading)
alternatively there is [rclone](./docs/rclone.md) which allows for bidirectional sync and is *way* more flexible (stream files straight from sftp/s3/gcs to copyparty, ...), although there is no integrity check and it won't work with files over 100 MiB if copyparty is behind cloudflare alternatively there is [rclone](./docs/rclone.md) which allows for bidirectional sync and is *way* more flexible (stream files straight from sftp/s3/gcs to copyparty, ...), although there is no integrity check and it won't work with files over 100 MiB if copyparty is behind cloudflare
* starting from rclone v1.63 (currently [in beta](https://beta.rclone.org/?filter=latest)), rclone will also be faster than up2k.py * starting from rclone v1.63 (currently [in beta](https://beta.rclone.org/?filter=latest)), rclone will also be faster than u2c.py
## mount as drive ## mount as drive
@@ -1421,7 +1480,6 @@ defaults are usually fine - expect `8 GiB/s` download, `1 GiB/s` upload
below are some tweaks roughly ordered by usefulness: below are some tweaks roughly ordered by usefulness:
* `-q` disables logging and can help a bunch, even when combined with `-lo` to redirect logs to file * `-q` disables logging and can help a bunch, even when combined with `-lo` to redirect logs to file
* `--http-only` or `--https-only` (unless you want to support both protocols) will reduce the delay before a new connection is established
* `--hist` pointing to a fast location (ssd) will make directory listings and searches faster when `-e2d` or `-e2t` is set * `--hist` pointing to a fast location (ssd) will make directory listings and searches faster when `-e2d` or `-e2t` is set
* `--no-hash .` when indexing a network-disk if you don't care about the actual filehashes and only want the names/tags searchable * `--no-hash .` when indexing a network-disk if you don't care about the actual filehashes and only want the names/tags searchable
* `--no-htp --hash-mt=0 --mtag-mt=1 --th-mt=1` minimizes the number of threads; can help in some eccentric environments (like the vscode debugger) * `--no-htp --hash-mt=0 --mtag-mt=1 --th-mt=1` minimizes the number of threads; can help in some eccentric environments (like the vscode debugger)
@@ -1441,7 +1499,7 @@ when uploading files,
* chrome is recommended, at least compared to firefox: * chrome is recommended, at least compared to firefox:
* up to 90% faster when hashing, especially on SSDs * up to 90% faster when hashing, especially on SSDs
* up to 40% faster when uploading over extremely fast internets * up to 40% faster when uploading over extremely fast internets
* but [up2k.py](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py) can be 40% faster than chrome again * but [u2c.py](https://github.com/9001/copyparty/blob/hovudstraum/bin/u2c.py) can be 40% faster than chrome again
* if you're cpu-bottlenecked, or the browser is maxing a cpu core: * if you're cpu-bottlenecked, or the browser is maxing a cpu core:
* up to 30% faster uploads if you hide the upload status list by switching away from the `[🚀]` up2k ui-tab (or closing it) * up to 30% faster uploads if you hide the upload status list by switching away from the `[🚀]` up2k ui-tab (or closing it)
@@ -1511,12 +1569,28 @@ by default, except for `GET` and `HEAD` operations, all requests must either:
cors can be configured with `--acao` and `--acam`, or the protections entirely disabled with `--allow-csrf` cors can be configured with `--acao` and `--acam`, or the protections entirely disabled with `--allow-csrf`
## password hashing
you can hash passwords before putting them into config files / providing them as arguments; see `--help-pwhash` for all the details
`--ah-alg argon2` enables it, and if you have any plaintext passwords then it'll print the hashed versions on startup so you can replace them
optionally also specify `--ah-cli` to enter an interactive mode where it will hash passwords without ever writing the plaintext ones to disk
the default configs take about 0.4 sec and 256 MiB RAM to process a new password on a decent laptop
## https ## https
both HTTP and HTTPS are accepted by default, but letting a [reverse proxy](#reverse-proxy) handle the https/tls/ssl would be better (probably more secure by default) both HTTP and HTTPS are accepted by default, but letting a [reverse proxy](#reverse-proxy) handle the https/tls/ssl would be better (probably more secure by default)
copyparty doesn't speak HTTP/2 or QUIC, so using a reverse proxy would solve that as well copyparty doesn't speak HTTP/2 or QUIC, so using a reverse proxy would solve that as well
if [cfssl](https://github.com/cloudflare/cfssl/releases/latest) is installed, copyparty will automatically create a CA and server-cert on startup
* the certs are written to `--crt-dir` for distribution, see `--help` for the other `--crt` options
* this will be a self-signed certificate so you must install your `ca.pem` into all your browsers/devices
* if you want to avoid the hassle of distributing certs manually, please consider using a reverse proxy
# recovering from crashes # recovering from crashes
@@ -1553,6 +1627,8 @@ mandatory deps:
install these to enable bonus features install these to enable bonus features
enable hashed passwords in config: `argon2-cffi`
enable ftp-server: enable ftp-server:
* for just plaintext FTP, `pyftpdlib` (is built into the SFX) * for just plaintext FTP, `pyftpdlib` (is built into the SFX)
* with TLS encryption, `pyftpdlib pyopenssl` * with TLS encryption, `pyftpdlib pyopenssl`
@@ -1599,6 +1675,7 @@ can be convenient on machines where installing python is problematic, however is
* [copyparty.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty.exe) runs on win8 or newer, was compiled on win10, does thumbnails + media tags, and is *currently* safe to use, but any future python/expat/pillow CVEs can only be remedied by downloading a newer version of the exe * [copyparty.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty.exe) runs on win8 or newer, was compiled on win10, does thumbnails + media tags, and is *currently* safe to use, but any future python/expat/pillow CVEs can only be remedied by downloading a newer version of the exe
* on win8 it needs [vc redist 2015](https://www.microsoft.com/en-us/download/details.aspx?id=48145), on win10 it just works * on win8 it needs [vc redist 2015](https://www.microsoft.com/en-us/download/details.aspx?id=48145), on win10 it just works
* some antivirus may freak out (false-positive), possibly [Avast, AVG, and McAfee](https://www.virustotal.com/gui/file/52391a1e9842cf70ad243ef83844d46d29c0044d101ee0138fcdd3c8de2237d6/detection)
* dangerous: [copyparty32.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty32.exe) is compatible with [windows7](https://user-images.githubusercontent.com/241032/221445944-ae85d1f4-d351-4837-b130-82cab57d6cca.png), which means it uses an ancient copy of python (3.7.9) which cannot be upgraded and should never be exposed to the internet (LAN is fine) * dangerous: [copyparty32.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty32.exe) is compatible with [windows7](https://user-images.githubusercontent.com/241032/221445944-ae85d1f4-d351-4837-b130-82cab57d6cca.png), which means it uses an ancient copy of python (3.7.9) which cannot be upgraded and should never be exposed to the internet (LAN is fine)

View File

@@ -1,4 +1,4 @@
# [`up2k.py`](up2k.py) # [`u2c.py`](u2c.py)
* command-line up2k client [(webm)](https://ocv.me/stuff/u2cli.webm) * command-line up2k client [(webm)](https://ocv.me/stuff/u2cli.webm)
* file uploads, file-search, autoresume of aborted/broken uploads * file uploads, file-search, autoresume of aborted/broken uploads
* sync local folder to server * sync local folder to server

View File

@@ -1,13 +1,13 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
S_VERSION = "1.6" S_VERSION = "1.9"
S_BUILD_DT = "2023-04-20" S_BUILD_DT = "2023-05-07"
""" """
up2k.py: upload to copyparty u2c.py: upload to copyparty
2021, ed <irc.rizon.net>, MIT-Licensed 2021, ed <irc.rizon.net>, MIT-Licensed
https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py https://github.com/9001/copyparty/blob/hovudstraum/bin/u2c.py
- dependencies: requests - dependencies: requests
- supports python 2.6, 2.7, and 3.3 through 3.12 - supports python 2.6, 2.7, and 3.3 through 3.12
@@ -21,6 +21,7 @@ import math
import time import time
import atexit import atexit
import signal import signal
import socket
import base64 import base64
import hashlib import hashlib
import platform import platform
@@ -58,6 +59,7 @@ PY2 = sys.version_info < (3,)
if PY2: if PY2:
from Queue import Queue from Queue import Queue
from urllib import quote, unquote from urllib import quote, unquote
from urlparse import urlsplit, urlunsplit
sys.dont_write_bytecode = True sys.dont_write_bytecode = True
bytes = str bytes = str
@@ -65,6 +67,7 @@ else:
from queue import Queue from queue import Queue
from urllib.parse import unquote_to_bytes as unquote from urllib.parse import unquote_to_bytes as unquote
from urllib.parse import quote_from_bytes as quote from urllib.parse import quote_from_bytes as quote
from urllib.parse import urlsplit, urlunsplit
unicode = str unicode = str
@@ -337,6 +340,32 @@ class CTermsize(object):
ss = CTermsize() ss = CTermsize()
def undns(url):
usp = urlsplit(url)
hn = usp.hostname
gai = None
eprint("resolving host [{0}] ...".format(hn), end="")
try:
gai = socket.getaddrinfo(hn, None)
hn = gai[0][4][0]
except KeyboardInterrupt:
raise
except:
t = "\n\033[31mfailed to resolve upload destination host;\033[0m\ngai={0}\n"
eprint(t.format(repr(gai)))
raise
if usp.port:
hn = "{0}:{1}".format(hn, usp.port)
if usp.username or usp.password:
hn = "{0}:{1}@{2}".format(usp.username, usp.password, hn)
usp = usp._replace(netloc=hn)
url = urlunsplit(usp)
eprint(" {0}".format(url))
return url
def _scd(err, top): def _scd(err, top):
"""non-recursive listing of directory contents, along with stat() info""" """non-recursive listing of directory contents, along with stat() info"""
with os.scandir(top) as dh: with os.scandir(top) as dh:
@@ -853,7 +882,7 @@ class Ctl(object):
print(" ls ~{0}".format(srd)) print(" ls ~{0}".format(srd))
zb = self.ar.url.encode("utf-8") zb = self.ar.url.encode("utf-8")
zb += quotep(rd.replace(b"\\", b"/")) zb += quotep(rd.replace(b"\\", b"/"))
r = req_ses.get(zb + b"?ls&dots", headers=headers) r = req_ses.get(zb + b"?ls&lt&dots", headers=headers)
if not r: if not r:
raise Exception("HTTP {0}".format(r.status_code)) raise Exception("HTTP {0}".format(r.status_code))
@@ -931,7 +960,7 @@ class Ctl(object):
upath = file.abs.decode("utf-8", "replace") upath = file.abs.decode("utf-8", "replace")
if not VT100: if not VT100:
upath = upath[4:] upath = upath.lstrip("\\?")
hs, sprs = handshake(self.ar, file, search) hs, sprs = handshake(self.ar, file, search)
if search: if search:
@@ -1073,6 +1102,7 @@ source file/folder selection uses rsync syntax, meaning that:
ap = app.add_argument_group("compatibility") ap = app.add_argument_group("compatibility")
ap.add_argument("--cls", action="store_true", help="clear screen before start") ap.add_argument("--cls", action="store_true", help="clear screen before start")
ap.add_argument("--rh", type=int, metavar="TRIES", default=0, help="resolve server hostname before upload (good for buggy networks, but TLS certs will break)")
ap = app.add_argument_group("folder sync") ap = app.add_argument_group("folder sync")
ap.add_argument("--dl", action="store_true", help="delete local files after uploading") ap.add_argument("--dl", action="store_true", help="delete local files after uploading")
@@ -1096,7 +1126,7 @@ source file/folder selection uses rsync syntax, meaning that:
ar = app.parse_args() ar = app.parse_args()
finally: finally:
if EXE and not sys.argv[1:]: if EXE and not sys.argv[1:]:
print("*** hit enter to exit ***") eprint("*** hit enter to exit ***")
try: try:
input() input()
except: except:
@@ -1129,8 +1159,18 @@ source file/folder selection uses rsync syntax, meaning that:
with open(fn, "rb") as f: with open(fn, "rb") as f:
ar.a = f.read().decode("utf-8").strip() ar.a = f.read().decode("utf-8").strip()
for n in range(ar.rh):
try:
ar.url = undns(ar.url)
break
except KeyboardInterrupt:
raise
except:
if n > ar.rh - 2:
raise
if ar.cls: if ar.cls:
print("\x1b\x5b\x48\x1b\x5b\x32\x4a\x1b\x5b\x33\x4a", end="") eprint("\x1b\x5b\x48\x1b\x5b\x32\x4a\x1b\x5b\x33\x4a", end="")
ctl = Ctl(ar) ctl = Ctl(ar)

View File

@@ -1,7 +1,6 @@
# when running copyparty behind a reverse proxy, # when running copyparty behind a reverse proxy,
# the following arguments are recommended: # the following arguments are recommended:
# #
# --http-only lower latency on initial connection
# -i 127.0.0.1 only accept connections from nginx # -i 127.0.0.1 only accept connections from nginx
# #
# if you are doing location-based proxying (such as `/stuff` below) # if you are doing location-based proxying (such as `/stuff` below)

View File

@@ -1,14 +1,44 @@
#!/bin/bash #!/bin/bash
set -e set -e
cat >/dev/null <<'EOF'
NOTE: copyparty is now able to do this automatically;
however you may wish to use this script instead if
you have specific needs (or if copyparty breaks)
this script generates a new self-signed TLS certificate and
replaces the default insecure one that comes with copyparty
as it is trivial to impersonate a copyparty server using the
default certificate, it is highly recommended to do this
this will create a self-signed CA, and a Server certificate
which gets signed by that CA -- you can run it multiple times
with different server-FQDNs / IPs to create additional certs
for all your different servers / (non-)copyparty services
EOF
# ca-name and server-fqdn # ca-name and server-fqdn
ca_name="$1" ca_name="$1"
srv_fqdn="$2" srv_fqdn="$2"
[ -z "$srv_fqdn" ] && { [ -z "$srv_fqdn" ] && { cat <<'EOF'
echo "need arg 1: ca name" need arg 1: ca name
echo "need arg 2: server fqdn and/or IPs, comma-separated" need arg 2: server fqdn and/or IPs, comma-separated
echo "optional arg 3: if set, write cert into copyparty cfg" optional arg 3: if set, write cert into copyparty cfg
example:
./cfssl.sh PartyCo partybox.local y
EOF
exit 1
}
command -v cfssljson 2>/dev/null || {
echo please install cfssl and try again
exit 1 exit 1
} }
@@ -59,12 +89,14 @@ show() {
} }
show ca.pem show ca.pem
show "$srv_fqdn.pem" show "$srv_fqdn.pem"
echo
echo "successfully generated new certificates"
# write cert into copyparty config # write cert into copyparty config
[ -z "$3" ] || { [ -z "$3" ] || {
mkdir -p ~/.config/copyparty mkdir -p ~/.config/copyparty
cat "$srv_fqdn".{key,pem} ca.pem >~/.config/copyparty/cert.pem cat "$srv_fqdn".{key,pem} ca.pem >~/.config/copyparty/cert.pem
echo "successfully replaced copyparty certificate"
} }

View File

@@ -1,7 +1,6 @@
# when running copyparty behind a reverse proxy, # when running copyparty behind a reverse proxy,
# the following arguments are recommended: # the following arguments are recommended:
# #
# --http-only lower latency on initial connection
# -i 127.0.0.1 only accept connections from nginx # -i 127.0.0.1 only accept connections from nginx
# #
# -nc must match or exceed the webserver's max number of concurrent clients; # -nc must match or exceed the webserver's max number of concurrent clients;
@@ -9,7 +8,7 @@
# nginx default is 512 (worker_processes 1, worker_connections 512) # nginx default is 512 (worker_processes 1, worker_connections 512)
# #
# you may also consider adding -j0 for CPU-intensive configurations # you may also consider adding -j0 for CPU-intensive configurations
# (not that i can really think of any good examples) # (5'000 requests per second, or 20gbps upload/download in parallel)
# #
# on fedora/rhel, remember to setsebool -P httpd_can_network_connect 1 # on fedora/rhel, remember to setsebool -P httpd_can_network_connect 1

View File

@@ -1,50 +1,48 @@
# Maintainer: icxes <dev.null@need.moe> # Maintainer: icxes <dev.null@need.moe>
pkgname=copyparty pkgname=copyparty
pkgver="1.6.11" pkgver="1.7.6"
pkgrel=1 pkgrel=1
pkgdesc="Portable file sharing hub" pkgdesc="Portable file sharing hub"
arch=("any") arch=("any")
url="https://github.com/9001/${pkgname}" url="https://github.com/9001/${pkgname}"
license=('MIT') license=('MIT')
depends=("python" "lsof") depends=("python" "lsof" "python-jinja")
makedepends=("python-wheel" "python-setuptools" "python-build" "python-installer" "make" "pigz")
optdepends=("ffmpeg: thumbnails for videos, images (slower) and audio, music tags" optdepends=("ffmpeg: thumbnails for videos, images (slower) and audio, music tags"
"python-jinja: faster html generator"
"python-mutagen: music tags (alternative)" "python-mutagen: music tags (alternative)"
"python-pillow: thumbnails for images" "python-pillow: thumbnails for images"
"python-pyvips: thumbnails for images (higher quality, faster, uses more ram)" "python-pyvips: thumbnails for images (higher quality, faster, uses more ram)"
"libkeyfinder-git: detection of musical keys" "libkeyfinder-git: detection of musical keys"
"qm-vamp-plugins: BPM detection" "qm-vamp-plugins: BPM detection"
"python-pyopenssl: ftps functionality" "python-pyopenssl: ftps functionality"
"python-argon2_cffi: hashed passwords in config"
"python-impacket-git: smb support (bad idea)" "python-impacket-git: smb support (bad idea)"
) )
source=("${url}/releases/download/v${pkgver}/${pkgname}-sfx.py" source=("https://github.com/9001/${pkgname}/releases/download/v${pkgver}/${pkgname}-${pkgver}.tar.gz")
"${pkgname}.conf"
"${pkgname}.service"
"prisonparty.service"
"index.md"
"https://raw.githubusercontent.com/9001/${pkgname}/v${pkgver}/bin/prisonparty.sh"
"https://raw.githubusercontent.com/9001/${pkgname}/v${pkgver}/LICENSE"
)
backup=("etc/${pkgname}.d/init" ) backup=("etc/${pkgname}.d/init" )
sha256sums=("d096e33ab666ef45213899dd3a10735f62b5441339cb7374f93b232d0b6c8d34" sha256sums=("e44bfb2e998677a160343ed4aa87741e653dbc27db594e6a00935e89b90cd3f4")
"b8565eba5e64dedba1cf6c7aac7e31c5a731ed7153d6810288a28f00a36c28b2"
"f65c207e0670f9d78ad2e399bda18d5502ff30d2ac79e0e7fc48e7fbdc39afdc" build() {
"c4f396b083c9ec02ad50b52412c84d2a82be7f079b2d016e1c9fad22d68285ff" cd "${srcdir}/${pkgname}-${pkgver}"
"dba701de9fd584405917e923ea1e59dbb249b96ef23bad479cf4e42740b774c8"
"8e89d281483e22d11d111bed540652af35b66af6f14f49faae7b959f6cdc6475" pushd copyparty/web
"cb2ce3d6277bf2f5a82ecf336cc44963bc6490bcf496ffbd75fc9e21abaa75f3" make -j$(nproc)
) rm Makefile
popd
python3 -m build -wn
}
package() { package() {
cd "${srcdir}/" cd "${srcdir}/${pkgname}-${pkgver}"
python3 -m installer -d "$pkgdir" dist/*.whl
install -dm755 "${pkgdir}/etc/${pkgname}.d" install -dm755 "${pkgdir}/etc/${pkgname}.d"
install -Dm755 "${pkgname}-sfx.py" "${pkgdir}/usr/bin/${pkgname}" install -Dm755 "bin/prisonparty.sh" "${pkgdir}/usr/bin/prisonparty"
install -Dm755 "prisonparty.sh" "${pkgdir}/usr/bin/prisonparty" install -Dm644 "contrib/package/arch/${pkgname}.conf" "${pkgdir}/etc/${pkgname}.d/init"
install -Dm644 "${pkgname}.conf" "${pkgdir}/etc/${pkgname}.d/init" install -Dm644 "contrib/package/arch/${pkgname}.service" "${pkgdir}/usr/lib/systemd/system/${pkgname}.service"
install -Dm644 "${pkgname}.service" "${pkgdir}/usr/lib/systemd/system/${pkgname}.service" install -Dm644 "contrib/package/arch/prisonparty.service" "${pkgdir}/usr/lib/systemd/system/prisonparty.service"
install -Dm644 "prisonparty.service" "${pkgdir}/usr/lib/systemd/system/prisonparty.service" install -Dm644 "contrib/package/arch/index.md" "${pkgdir}/var/lib/${pkgname}-jail/README.md"
install -Dm644 "index.md" "${pkgdir}/var/lib/${pkgname}-jail/README.md"
install -Dm644 "LICENSE" "${pkgdir}/usr/share/licenses/${pkgname}/LICENSE" install -Dm644 "LICENSE" "${pkgdir}/usr/share/licenses/${pkgname}/LICENSE"
find /etc/${pkgname}.d -iname '*.conf' 2>/dev/null | grep -qE . && return find /etc/${pkgname}.d -iname '*.conf' 2>/dev/null | grep -qE . && return

View File

@@ -1,4 +1,7 @@
{ lib, stdenv, makeWrapper, fetchurl, utillinux, python, jinja2, impacket, pyftpdlib, pyopenssl, pillow, pyvips, ffmpeg, mutagen, { lib, stdenv, makeWrapper, fetchurl, utillinux, python, jinja2, impacket, pyftpdlib, pyopenssl, argon2-cffi, pillow, pyvips, ffmpeg, mutagen,
# use argon2id-hashed passwords in config files (sha2 is always available)
withHashedPasswords ? true,
# create thumbnails with Pillow; faster than FFmpeg / MediaProcessing # create thumbnails with Pillow; faster than FFmpeg / MediaProcessing
withThumbnails ? true, withThumbnails ? true,
@@ -35,6 +38,7 @@ let
++ lib.optional withFastThumbnails pyvips ++ lib.optional withFastThumbnails pyvips
++ lib.optional withMediaProcessing ffmpeg ++ lib.optional withMediaProcessing ffmpeg
++ lib.optional withBasicAudioMetadata mutagen ++ lib.optional withBasicAudioMetadata mutagen
++ lib.optional withHashedPasswords argon2-cffi
); );
in stdenv.mkDerivation { in stdenv.mkDerivation {
pname = "copyparty"; pname = "copyparty";

View File

@@ -1,5 +1,5 @@
{ {
"url": "https://github.com/9001/copyparty/releases/download/v1.6.11/copyparty-sfx.py", "url": "https://github.com/9001/copyparty/releases/download/v1.7.6/copyparty-sfx.py",
"version": "1.6.11", "version": "1.7.6",
"hash": "sha256-0JbjOrZm70UhOJndOhBzX2K1RBM5y3N0+TsjLQtsjTQ=" "hash": "sha256-jwvQfG36lsp/oWN9DfR03kHyodo2kANoY3V930/ALds="
} }

View File

@@ -1,3 +1,6 @@
# NOTE: this is now a built-in feature in copyparty
# but you may still want this if you have specific needs
#
# systemd service which generates a new TLS certificate on each boot, # systemd service which generates a new TLS certificate on each boot,
# that way the one-year expiry time won't cause any issues -- # that way the one-year expiry time won't cause any issues --
# just have everyone trust the ca.pem once every 10 years # just have everyone trust the ca.pem once every 10 years

View File

@@ -22,6 +22,7 @@
# add '-i 127.0.0.1' to only allow local connections # add '-i 127.0.0.1' to only allow local connections
# add '-e2dsa' to enable filesystem scanning + indexing # add '-e2dsa' to enable filesystem scanning + indexing
# add '-e2ts' to enable metadata indexing # add '-e2ts' to enable metadata indexing
# remove '--ansi' to disable colored logs
# #
# with `Type=notify`, copyparty will signal systemd when it is ready to # with `Type=notify`, copyparty will signal systemd when it is ready to
# accept connections; correctly delaying units depending on copyparty. # accept connections; correctly delaying units depending on copyparty.
@@ -59,7 +60,7 @@ ExecStartPre=+nft add rule ip nat prerouting tcp dport 443 redirect to :3923
ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf' ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
# copyparty settings # copyparty settings
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -e2d -v /mnt::rw ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py --ansi -e2d -v /mnt::rw
[Install] [Install]
WantedBy=multi-user.target WantedBy=multi-user.target

View File

@@ -6,6 +6,10 @@ import platform
import sys import sys
import time import time
# fmt: off
_:tuple[int,int]=(0,0) # _____________________________________________________________________ hey there! if you are reading this, your python is too old to run copyparty without some help. Please use https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py or the pypi package instead, or see https://github.com/9001/copyparty/blob/hovudstraum/docs/devnotes.md#building if you want to build it yourself :-) ************************************************************************************************************************************************
# fmt: on
try: try:
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
except: except:
@@ -27,7 +31,12 @@ WINDOWS: Any = (
else False else False
) )
VT100 = not WINDOWS or WINDOWS >= [10, 0, 14393] VT100 = "--ansi" in sys.argv or (
os.environ.get("NO_COLOR", "").lower() in ("", "0", "false")
and sys.stdout.isatty()
and "--no-ansi" not in sys.argv
and (not WINDOWS or WINDOWS >= [10, 0, 14393])
)
# introduced in anniversary update # introduced in anniversary update
ANYWIN = WINDOWS or sys.platform in ["msys", "cygwin"] ANYWIN = WINDOWS or sys.platform in ["msys", "cygwin"]

View File

@@ -10,11 +10,9 @@ __url__ = "https://github.com/9001/copyparty/"
import argparse import argparse
import base64 import base64
import filecmp
import locale import locale
import os import os
import re import re
import shutil
import socket import socket
import sys import sys
import threading import threading
@@ -186,7 +184,7 @@ def init_E(E: EnvParams) -> None:
with open_binary("copyparty", "z.tar") as tgz: with open_binary("copyparty", "z.tar") as tgz:
with tarfile.open(fileobj=tgz) as tf: with tarfile.open(fileobj=tgz) as tf:
tf.extractall(tdn) tf.extractall(tdn) # nosec (archive is safe)
return tdn return tdn
@@ -201,7 +199,7 @@ def init_E(E: EnvParams) -> None:
E.mod = _unpack() E.mod = _unpack()
if sys.platform == "win32": if sys.platform == "win32":
bdir = os.environ.get("APPDATA") or os.environ.get("TEMP") bdir = os.environ.get("APPDATA") or os.environ.get("TEMP") or "."
E.cfg = os.path.normpath(bdir + "/copyparty") E.cfg = os.path.normpath(bdir + "/copyparty")
elif sys.platform == "darwin": elif sys.platform == "darwin":
E.cfg = os.path.expanduser("~/Library/Preferences/copyparty") E.cfg = os.path.expanduser("~/Library/Preferences/copyparty")
@@ -242,6 +240,37 @@ def get_srvname() -> str:
return ret return ret
def get_fk_salt(cert_path) -> str:
fp = os.path.join(E.cfg, "fk-salt.txt")
try:
with open(fp, "rb") as f:
ret = f.read().strip()
except:
if os.path.exists(cert_path):
zi = os.path.getmtime(cert_path)
ret = "{}".format(zi).encode("utf-8")
else:
ret = base64.b64encode(os.urandom(18))
with open(fp, "wb") as f:
f.write(ret + b"\n")
return ret.decode("utf-8")
def get_ah_salt() -> str:
fp = os.path.join(E.cfg, "ah-salt.txt")
try:
with open(fp, "rb") as f:
ret = f.read().strip()
except:
ret = base64.b64encode(os.urandom(18))
with open(fp, "wb") as f:
f.write(ret + b"\n")
return ret.decode("utf-8")
def ensure_locale() -> None: def ensure_locale() -> None:
safe = "en_US.UTF-8" safe = "en_US.UTF-8"
for x in [ for x in [
@@ -261,30 +290,22 @@ def ensure_locale() -> None:
warn(t.format(safe)) warn(t.format(safe))
def ensure_cert() -> None: def ensure_webdeps() -> None:
ap = os.path.join(E.mod, "web/deps/mini-fa.woff")
if os.path.exists(ap):
return
warn(
"""could not find webdeps;
if you are running the sfx, or exe, or pypi package, or docker image,
then this is a bug! Please let me know so I can fix it, thanks :-)
https://github.com/9001/copyparty/issues/new?labels=bug&template=bug_report.md
however, if you are a dev, or running copyparty from source, and you want
full client functionality, you will need to build or obtain the webdeps:
https://github.com/9001/copyparty/blob/hovudstraum/docs/devnotes.md#building
""" """
the default cert (and the entire TLS support) is only here to enable the )
crypto.subtle javascript API, which is necessary due to the webkit guys
being massive memers (https://www.chromium.org/blink/webcrypto)
i feel awful about this and so should they
"""
cert_insec = os.path.join(E.mod, "res/insecure.pem")
cert_cfg = os.path.join(E.cfg, "cert.pem")
if not os.path.exists(cert_cfg):
shutil.copy(cert_insec, cert_cfg)
try:
if filecmp.cmp(cert_cfg, cert_insec):
lprint(
"\033[33musing default TLS certificate; https will be insecure."
+ "\033[36m\ncertificate location: {}\033[0m\n".format(cert_cfg)
)
except:
pass
# speaking of the default cert,
# printf 'NO\n.\n.\n.\n.\ncopyparty-insecure\n.\n' | faketime '2000-01-01 00:00:00' openssl req -x509 -sha256 -newkey rsa:2048 -keyout insecure.pem -out insecure.pem -days $((($(printf %d 0x7fffffff)-$(date +%s --date=2000-01-01T00:00:00Z))/(60*60*24))) -nodes && ls -al insecure.pem && openssl x509 -in insecure.pem -text -noout
def configure_ssl_ver(al: argparse.Namespace) -> None: def configure_ssl_ver(al: argparse.Namespace) -> None:
@@ -499,8 +520,12 @@ def get_sects():
""" """
volflags are appended to volume definitions, for example, volflags are appended to volume definitions, for example,
to create a write-only volume with the \033[33mnodupe\033[0m and \033[32mnosub\033[0m flags: to create a write-only volume with the \033[33mnodupe\033[0m and \033[32mnosub\033[0m flags:
\033[35m-v /mnt/inc:/inc:w\033[33m:c,nodupe\033[32m:c,nosub""" \033[35m-v /mnt/inc:/inc:w\033[33m:c,nodupe\033[32m:c,nosub\033[0m
)
if global config defines a volflag for all volumes,
you can unset it for a specific volume with -flag
"""
).rstrip()
+ build_flags_desc(), + build_flags_desc(),
], ],
[ [
@@ -600,9 +625,9 @@ def get_sects():
\033[32macid\033[0m = extremely safe but slow; the old default. Should never lose any data no matter what \033[32macid\033[0m = extremely safe but slow; the old default. Should never lose any data no matter what
\033[32mswal\033[0m = 2.4x faster uploads yet 99.9%% as safe -- theoretical chance of losing metadata for the ~200 most recently uploaded files if there's a power-loss or your OS crashes \033[32mswal\033[0m = 2.4x faster uploads yet 99.9% as safe -- theoretical chance of losing metadata for the ~200 most recently uploaded files if there's a power-loss or your OS crashes
\033[32mwal\033[0m = another 21x faster on HDDs yet 90%% as safe; same pitfall as \033[33mswal\033[0m except more likely \033[32mwal\033[0m = another 21x faster on HDDs yet 90% as safe; same pitfall as \033[33mswal\033[0m except more likely
\033[32myolo\033[0m = another 1.5x faster, and removes the occasional sudden upload-pause while the disk syncs, but now you're at risk of losing the entire database in a powerloss / OS-crash \033[32myolo\033[0m = another 1.5x faster, and removes the occasional sudden upload-pause while the disk syncs, but now you're at risk of losing the entire database in a powerloss / OS-crash
@@ -610,6 +635,38 @@ def get_sects():
""" """
), ),
], ],
[
"pwhash",
"password hashing",
dedent(
"""
when \033[36m--ah-alg\033[0m is not the default [\033[32mnone\033[0m], all account passwords must be hashed
passwords can be hashed on the commandline with \033[36m--ah-gen\033[0m, but copyparty will also hash and print any passwords that are non-hashed (password which do not start with '+') and then terminate afterwards
\033[36m--ah-alg\033[0m specifies the hashing algorithm and a list of optional comma-separated arguments:
\033[36m--ah-alg argon2\033[0m # which is the same as:
\033[36m--ah-alg argon2,3,256,4,19\033[0m
use argon2id with timecost 3, 256 MiB, 4 threads, version 19 (0x13/v1.3)
\033[36m--ah-alg scrypt\033[0m # which is the same as:
\033[36m--ah-alg scrypt,13,2,8,4\033[0m
use scrypt with cost 2**13, 2 iterations, blocksize 8, 4 threads
\033[36m--ah-alg sha2\033[0m # which is the same as:
\033[36m--ah-alg sha2,424242\033[0m
use sha2-512 with 424242 iterations
recommended: \033[32m--ah-alg argon2\033[0m
(takes about 0.4 sec and 256M RAM to process a new password)
argon2 needs python-package argon2-cffi,
scrypt needs openssl,
sha2 is always available
"""
),
],
] ]
@@ -691,22 +748,44 @@ def add_network(ap):
ap2.add_argument("--reuseaddr", action="store_true", help="set reuseaddr on listening sockets on windows; allows rapid restart of copyparty at the expense of being able to accidentally start multiple instances") ap2.add_argument("--reuseaddr", action="store_true", help="set reuseaddr on listening sockets on windows; allows rapid restart of copyparty at the expense of being able to accidentally start multiple instances")
else: else:
ap2.add_argument("--freebind", action="store_true", help="allow listening on IPs which do not yet exist, for example if the network interfaces haven't finished going up. Only makes sense for IPs other than '0.0.0.0', '127.0.0.1', '::', and '::1'. May require running as root (unless net.ipv6.ip_nonlocal_bind)") ap2.add_argument("--freebind", action="store_true", help="allow listening on IPs which do not yet exist, for example if the network interfaces haven't finished going up. Only makes sense for IPs other than '0.0.0.0', '127.0.0.1', '::', and '::1'. May require running as root (unless net.ipv6.ip_nonlocal_bind)")
ap2.add_argument("--s-thead", metavar="SEC", type=int, default=120, help="socket timeout (read request header)")
ap2.add_argument("--s-tbody", metavar="SEC", type=float, default=186, help="socket timeout (read/write request/response bodies). Use 60 on fast servers (default is extremely safe). Disable with 0 if reverse-proxied for a 2%% speed boost")
ap2.add_argument("--s-wr-sz", metavar="B", type=int, default=256*1024, help="socket write size in bytes") ap2.add_argument("--s-wr-sz", metavar="B", type=int, default=256*1024, help="socket write size in bytes")
ap2.add_argument("--s-wr-slp", metavar="SEC", type=float, default=0, help="debug: socket write delay in seconds") ap2.add_argument("--s-wr-slp", metavar="SEC", type=float, default=0, help="debug: socket write delay in seconds")
ap2.add_argument("--rsp-slp", metavar="SEC", type=float, default=0, help="debug: response delay in seconds") ap2.add_argument("--rsp-slp", metavar="SEC", type=float, default=0, help="debug: response delay in seconds")
ap2.add_argument("--rsp-jtr", metavar="SEC", type=float, default=0, help="debug: response delay, random duration 0..SEC") ap2.add_argument("--rsp-jtr", metavar="SEC", type=float, default=0, help="debug: response delay, random duration 0..SEC")
def add_tls(ap): def add_tls(ap, cert_path):
ap2 = ap.add_argument_group('SSL/TLS options') ap2 = ap.add_argument_group('SSL/TLS options')
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls -- force plaintext") ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls -- force plaintext")
ap2.add_argument("--https-only", action="store_true", help="disable plaintext -- force tls") ap2.add_argument("--https-only", action="store_true", help="disable plaintext -- force tls")
ap2.add_argument("--cert", metavar="PATH", type=u, default=cert_path, help="path to TLS certificate")
ap2.add_argument("--ssl-ver", metavar="LIST", type=u, help="set allowed ssl/tls versions; [\033[32mhelp\033[0m] shows available versions; default is what your python version considers safe") ap2.add_argument("--ssl-ver", metavar="LIST", type=u, help="set allowed ssl/tls versions; [\033[32mhelp\033[0m] shows available versions; default is what your python version considers safe")
ap2.add_argument("--ciphers", metavar="LIST", type=u, help="set allowed ssl/tls ciphers; [\033[32mhelp\033[0m] shows available ciphers") ap2.add_argument("--ciphers", metavar="LIST", type=u, help="set allowed ssl/tls ciphers; [\033[32mhelp\033[0m] shows available ciphers")
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info") ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
ap2.add_argument("--ssl-log", metavar="PATH", type=u, help="log master secrets for later decryption in wireshark") ap2.add_argument("--ssl-log", metavar="PATH", type=u, help="log master secrets for later decryption in wireshark")
def add_cert(ap, cert_path):
cert_dir = os.path.dirname(cert_path)
ap2 = ap.add_argument_group('TLS certificate generator options')
ap2.add_argument("--no-crt", action="store_true", help="disable automatic certificate creation")
ap2.add_argument("--crt-ns", metavar="N,N", type=u, default="", help="comma-separated list of FQDNs (domains) to add into the certificate")
ap2.add_argument("--crt-exact", action="store_true", help="do not add wildcard entries for each --crt-ns")
ap2.add_argument("--crt-noip", action="store_true", help="do not add autodetected IP addresses into cert")
ap2.add_argument("--crt-nolo", action="store_true", help="do not add 127.0.0.1 / localhost into cert")
ap2.add_argument("--crt-nohn", action="store_true", help="do not add mDNS names / hostname into cert")
ap2.add_argument("--crt-dir", metavar="PATH", default=cert_dir, help="where to save the CA cert")
ap2.add_argument("--crt-cdays", metavar="D", type=float, default=3650, help="ca-certificate expiration time in days")
ap2.add_argument("--crt-sdays", metavar="D", type=float, default=365, help="server-cert expiration time in days")
ap2.add_argument("--crt-cn", metavar="TXT", type=u, default="partyco", help="CA/server-cert common-name")
ap2.add_argument("--crt-cnc", metavar="TXT", type=u, default="--crt-cn", help="override CA name")
ap2.add_argument("--crt-cns", metavar="TXT", type=u, default="--crt-cn cpp", help="override server-cert name")
ap2.add_argument("--crt-back", metavar="HRS", type=float, default=72, help="backdate in hours")
ap2.add_argument("--crt-alg", metavar="S-N", type=u, default="ecdsa-256", help="algorithm and keysize; one of these: ecdsa-256 rsa-4096 rsa-2048")
def add_zeroconf(ap): def add_zeroconf(ap):
ap2 = ap.add_argument_group("Zeroconf options") ap2 = ap.add_argument_group("Zeroconf options")
ap2.add_argument("-z", action="store_true", help="enable all zeroconf backends (mdns, ssdp)") ap2.add_argument("-z", action="store_true", help="enable all zeroconf backends (mdns, ssdp)")
@@ -751,6 +830,7 @@ def add_ftp(ap):
ap2.add_argument("--ftp", metavar="PORT", type=int, help="enable FTP server on PORT, for example \033[32m3921") ap2.add_argument("--ftp", metavar="PORT", type=int, help="enable FTP server on PORT, for example \033[32m3921")
ap2.add_argument("--ftps", metavar="PORT", type=int, help="enable FTPS server on PORT, for example \033[32m3990") ap2.add_argument("--ftps", metavar="PORT", type=int, help="enable FTPS server on PORT, for example \033[32m3990")
ap2.add_argument("--ftpv", action="store_true", help="verbose") ap2.add_argument("--ftpv", action="store_true", help="verbose")
ap2.add_argument("--ftp4", action="store_true", help="only listen on IPv4")
ap2.add_argument("--ftp-wt", metavar="SEC", type=int, default=7, help="grace period for resuming interrupted uploads (any client can write to any file last-modified more recently than SEC seconds ago)") ap2.add_argument("--ftp-wt", metavar="SEC", type=int, default=7, help="grace period for resuming interrupted uploads (any client can write to any file last-modified more recently than SEC seconds ago)")
ap2.add_argument("--ftp-nat", metavar="ADDR", type=u, help="the NAT address to use for passive connections") ap2.add_argument("--ftp-nat", metavar="ADDR", type=u, help="the NAT address to use for passive connections")
ap2.add_argument("--ftp-pr", metavar="P-P", type=u, help="the range of TCP ports to use for passive connections, for example \033[32m12000-13000") ap2.add_argument("--ftp-pr", metavar="P-P", type=u, help="the range of TCP ports to use for passive connections, for example \033[32m12000-13000")
@@ -761,6 +841,8 @@ def add_webdav(ap):
ap2.add_argument("--daw", action="store_true", help="enable full write support, even if client may not be webdav. \033[1;31mWARNING:\033[0m This has side-effects -- PUT-operations will now \033[1;31mOVERWRITE\033[0m existing files, rather than inventing new filenames to avoid loss of data. You might want to instead set this as a volflag where needed. By not setting this flag, uploaded files can get written to a filename which the client does not expect (which might be okay, depending on client)") ap2.add_argument("--daw", action="store_true", help="enable full write support, even if client may not be webdav. \033[1;31mWARNING:\033[0m This has side-effects -- PUT-operations will now \033[1;31mOVERWRITE\033[0m existing files, rather than inventing new filenames to avoid loss of data. You might want to instead set this as a volflag where needed. By not setting this flag, uploaded files can get written to a filename which the client does not expect (which might be okay, depending on client)")
ap2.add_argument("--dav-inf", action="store_true", help="allow depth:infinite requests (recursive file listing); extremely server-heavy but required for spec compliance -- luckily few clients rely on this") ap2.add_argument("--dav-inf", action="store_true", help="allow depth:infinite requests (recursive file listing); extremely server-heavy but required for spec compliance -- luckily few clients rely on this")
ap2.add_argument("--dav-mac", action="store_true", help="disable apple-garbage filter -- allow macos to create junk files (._* and .DS_Store, .Spotlight-*, .fseventsd, .Trashes, .AppleDouble, __MACOS)") ap2.add_argument("--dav-mac", action="store_true", help="disable apple-garbage filter -- allow macos to create junk files (._* and .DS_Store, .Spotlight-*, .fseventsd, .Trashes, .AppleDouble, __MACOS)")
ap2.add_argument("--dav-rt", action="store_true", help="show symlink-destination's lastmodified instead of the link itself; always enabled for recursive listings (volflag=davrt)")
ap2.add_argument("--dav-auth", action="store_true", help="force auth for all folders (required by davfs2 when only some folders are world-readable) (volflag=davauth)")
def add_smb(ap): def add_smb(ap):
@@ -808,14 +890,14 @@ def add_optouts(ap):
ap2.add_argument("--no-lifetime", action="store_true", help="disable automatic deletion of uploads after a certain time (as specified by the 'lifetime' volflag)") ap2.add_argument("--no-lifetime", action="store_true", help="disable automatic deletion of uploads after a certain time (as specified by the 'lifetime' volflag)")
def add_safety(ap, fk_salt): def add_safety(ap):
ap2 = ap.add_argument_group('safety options') ap2 = ap.add_argument_group('safety options')
ap2.add_argument("-s", action="count", default=0, help="increase safety: Disable thumbnails / potentially dangerous software (ffmpeg/pillow/vips), hide partial uploads, avoid crawlers.\n └─Alias of\033[32m --dotpart --no-thumb --no-mtag-ff --no-robots --force-js") ap2.add_argument("-s", action="count", default=0, help="increase safety: Disable thumbnails / potentially dangerous software (ffmpeg/pillow/vips), hide partial uploads, avoid crawlers.\n └─Alias of\033[32m --dotpart --no-thumb --no-mtag-ff --no-robots --force-js")
ap2.add_argument("-ss", action="store_true", help="further increase safety: Prevent js-injection, accidental move/delete, broken symlinks, webdav, 404 on 403, ban on excessive 404s.\n └─Alias of\033[32m -s --unpost=0 --no-del --no-mv --hardlink --vague-403 --ban-404=50,60,1440 -nih") ap2.add_argument("-ss", action="store_true", help="further increase safety: Prevent js-injection, accidental move/delete, broken symlinks, webdav, 404 on 403, ban on excessive 404s.\n └─Alias of\033[32m -s --unpost=0 --no-del --no-mv --hardlink --vague-403 --ban-404=50,60,1440 -nih")
ap2.add_argument("-sss", action="store_true", help="further increase safety: Enable logging to disk, scan for dangerous symlinks.\n └─Alias of\033[32m -ss --no-dav --no-logues --no-readme -lo=cpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz --ls=**,*,ln,p,r") ap2.add_argument("-sss", action="store_true", help="further increase safety: Enable logging to disk, scan for dangerous symlinks.\n └─Alias of\033[32m -ss --no-dav --no-logues --no-readme -lo=cpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz --ls=**,*,ln,p,r")
ap2.add_argument("--ls", metavar="U[,V[,F]]", type=u, help="do a sanity/safety check of all volumes on startup; arguments \033[33mUSER\033[0m,\033[33mVOL\033[0m,\033[33mFLAGS\033[0m; example [\033[32m**,*,ln,p,r\033[0m]") ap2.add_argument("--ls", metavar="U[,V[,F]]", type=u, help="do a sanity/safety check of all volumes on startup; arguments \033[33mUSER\033[0m,\033[33mVOL\033[0m,\033[33mFLAGS\033[0m; example [\033[32m**,*,ln,p,r\033[0m]")
ap2.add_argument("--salt", type=u, default="hunter2", help="up2k file-hash salt; serves no purpose, no reason to change this (but delete all databases if you do)") ap2.add_argument("--xvol", action="store_true", help="never follow symlinks leaving the volume root, unless the link is into another volume where the user has similar access (volflag=xvol)")
ap2.add_argument("--fk-salt", metavar="SALT", type=u, default=fk_salt, help="per-file accesskey salt; used to generate unpredictable URLs for hidden files -- this one DOES matter") ap2.add_argument("--xdev", action="store_true", help="stay within the filesystem of the volume root; do not descend into other devices (symlink or bind-mount to another HDD, ...) (volflag=xdev)")
ap2.add_argument("--no-dot-mv", action="store_true", help="disallow moving dotfiles; makes it impossible to move folders containing dotfiles") ap2.add_argument("--no-dot-mv", action="store_true", help="disallow moving dotfiles; makes it impossible to move folders containing dotfiles")
ap2.add_argument("--no-dot-ren", action="store_true", help="disallow renaming dotfiles; makes it impossible to make something a dotfile") ap2.add_argument("--no-dot-ren", action="store_true", help="disallow renaming dotfiles; makes it impossible to make something a dotfile")
ap2.add_argument("--no-logues", action="store_true", help="disable rendering .prologue/.epilogue.html into directory listings") ap2.add_argument("--no-logues", action="store_true", help="disable rendering .prologue/.epilogue.html into directory listings")
@@ -832,6 +914,16 @@ def add_safety(ap, fk_salt):
ap2.add_argument("--acam", metavar="V[,V]", type=u, default="GET,HEAD", help="Access-Control-Allow-Methods; list of methods to accept from offsite ('*' behaves like described in --acao)") ap2.add_argument("--acam", metavar="V[,V]", type=u, default="GET,HEAD", help="Access-Control-Allow-Methods; list of methods to accept from offsite ('*' behaves like described in --acao)")
def add_salt(ap, fk_salt, ah_salt):
ap2 = ap.add_argument_group('salting options')
ap2.add_argument("--ah-alg", metavar="ALG", type=u, default="none", help="account-pw hashing algorithm; one of these, best to worst: argon2 scrypt sha2 none (each optionally followed by alg-specific comma-sep. config)")
ap2.add_argument("--ah-salt", metavar="SALT", type=u, default=ah_salt, help="account-pw salt; ignored if --ah-alg is none (default)")
ap2.add_argument("--ah-gen", metavar="PW", type=u, default="", help="generate hashed password for \033[33mPW\033[0m, or read passwords from STDIN if \033[33mPW\033[0m is [\033[32m-\033[0m]")
ap2.add_argument("--ah-cli", action="store_true", help="interactive shell which hashes passwords without ever storing or displaying the original passwords")
ap2.add_argument("--fk-salt", metavar="SALT", type=u, default=fk_salt, help="per-file accesskey salt; used to generate unpredictable URLs for hidden files")
ap2.add_argument("--warksalt", metavar="SALT", type=u, default="hunter2", help="up2k file-hash salt; serves no purpose, no reason to change this (but delete all databases if you do)")
def add_shutdown(ap): def add_shutdown(ap):
ap2 = ap.add_argument_group('shutdown options') ap2 = ap.add_argument_group('shutdown options')
ap2.add_argument("--ign-ebind", action="store_true", help="continue running even if it's impossible to listen on some of the requested endpoints") ap2.add_argument("--ign-ebind", action="store_true", help="continue running even if it's impossible to listen on some of the requested endpoints")
@@ -843,6 +935,8 @@ def add_logging(ap):
ap2 = ap.add_argument_group('logging options') ap2 = ap.add_argument_group('logging options')
ap2.add_argument("-q", action="store_true", help="quiet") ap2.add_argument("-q", action="store_true", help="quiet")
ap2.add_argument("-lo", metavar="PATH", type=u, help="logfile, example: \033[32mcpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz") ap2.add_argument("-lo", metavar="PATH", type=u, help="logfile, example: \033[32mcpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz")
ap2.add_argument("--no-ansi", action="store_true", default=not VT100, help="disable colors; same as environment-variable NO_COLOR")
ap2.add_argument("--ansi", action="store_true", help="force colors; overrides environment-variable NO_COLOR")
ap2.add_argument("--no-voldump", action="store_true", help="do not list volumes and permissions on startup") ap2.add_argument("--no-voldump", action="store_true", help="do not list volumes and permissions on startup")
ap2.add_argument("--log-conn", action="store_true", help="debug: print tcp-server msgs") ap2.add_argument("--log-conn", action="store_true", help="debug: print tcp-server msgs")
ap2.add_argument("--log-htp", action="store_true", help="debug: print http-server threadpool scaling") ap2.add_argument("--log-htp", action="store_true", help="debug: print http-server threadpool scaling")
@@ -874,7 +968,7 @@ def add_thumbnail(ap):
ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown -- avoids doing keepalive pokes (updating the mtime) on thumbnail folders more often than SEC seconds") ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown -- avoids doing keepalive pokes (updating the mtime) on thumbnail folders more often than SEC seconds")
ap2.add_argument("--th-clean", metavar="SEC", type=int, default=43200, help="cleanup interval; 0=disabled") ap2.add_argument("--th-clean", metavar="SEC", type=int, default=43200, help="cleanup interval; 0=disabled")
ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age -- folders which haven't been poked for longer than --th-poke seconds will get deleted every --th-clean seconds") ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age -- folders which haven't been poked for longer than --th-poke seconds will get deleted every --th-clean seconds")
ap2.add_argument("--th-covers", metavar="N,N", type=u, default="folder.png,folder.jpg,cover.png,cover.jpg", help="folder thumbnails to stat/look for; case-insensitive if -e2d") ap2.add_argument("--th-covers", metavar="N,N", type=u, default="folder.png,folder.jpg,cover.png,cover.jpg", help="folder thumbnails to stat/look for; enabling -e2d will make these case-insensitive, and also automatically select thumbnails for all folders that contain pics, even if none match this pattern")
# https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html # https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html
# https://github.com/libvips/libvips # https://github.com/libvips/libvips
# ffmpeg -hide_banner -demuxers | awk '/^ D /{print$2}' | while IFS= read -r x; do ffmpeg -hide_banner -h demuxer=$x; done | grep -E '^Demuxer |extensions:' # ffmpeg -hide_banner -demuxers | awk '/^ D /{print$2}' | while IFS= read -r x; do ffmpeg -hide_banner -h demuxer=$x; done | grep -E '^Demuxer |extensions:'
@@ -907,8 +1001,6 @@ def add_db_general(ap, hcores):
ap2.add_argument("--no-forget", action="store_true", help="never forget indexed files, even when deleted from disk -- makes it impossible to ever upload the same file twice (volflag=noforget)") ap2.add_argument("--no-forget", action="store_true", help="never forget indexed files, even when deleted from disk -- makes it impossible to ever upload the same file twice (volflag=noforget)")
ap2.add_argument("--dbd", metavar="PROFILE", default="wal", help="database durability profile; sets the tradeoff between robustness and speed, see --help-dbd (volflag=dbd)") ap2.add_argument("--dbd", metavar="PROFILE", default="wal", help="database durability profile; sets the tradeoff between robustness and speed, see --help-dbd (volflag=dbd)")
ap2.add_argument("--xlink", action="store_true", help="on upload: check all volumes for dupes, not just the target volume (volflag=xlink)") ap2.add_argument("--xlink", action="store_true", help="on upload: check all volumes for dupes, not just the target volume (volflag=xlink)")
ap2.add_argument("--xdev", action="store_true", help="do not descend into other filesystems (symlink or bind-mount to another HDD, ...) (volflag=xdev)")
ap2.add_argument("--xvol", action="store_true", help="skip symlinks leaving the volume root (volflag=xvol)")
ap2.add_argument("--hash-mt", metavar="CORES", type=int, default=hcores, help="num cpu cores to use for file hashing; set 0 or 1 for single-core hashing") ap2.add_argument("--hash-mt", metavar="CORES", type=int, default=hcores, help="num cpu cores to use for file hashing; set 0 or 1 for single-core hashing")
ap2.add_argument("--re-maxage", metavar="SEC", type=int, default=0, help="disk rescan volume interval, 0=off (volflag=scan)") ap2.add_argument("--re-maxage", metavar="SEC", type=int, default=0, help="disk rescan volume interval, 0=off (volflag=scan)")
ap2.add_argument("--db-act", metavar="SEC", type=float, default=10, help="defer any scheduled volume reindexing until SEC seconds after last db write (uploads, renames, ...)") ap2.add_argument("--db-act", metavar="SEC", type=float, default=10, help="defer any scheduled volume reindexing until SEC seconds after last db write (uploads, renames, ...)")
@@ -938,9 +1030,11 @@ def add_db_metadata(ap):
def add_ui(ap, retry): def add_ui(ap, retry):
ap2 = ap.add_argument_group('ui options') ap2 = ap.add_argument_group('ui options')
ap2.add_argument("--grid", action="store_true", help="show grid/thumbnails by default (volflag=grid)")
ap2.add_argument("--lang", metavar="LANG", type=u, default="eng", help="language") ap2.add_argument("--lang", metavar="LANG", type=u, default="eng", help="language")
ap2.add_argument("--theme", metavar="NUM", type=int, default=0, help="default theme to use") ap2.add_argument("--theme", metavar="NUM", type=int, default=0, help="default theme to use")
ap2.add_argument("--themes", metavar="NUM", type=int, default=8, help="number of themes installed") ap2.add_argument("--themes", metavar="NUM", type=int, default=8, help="number of themes installed")
ap2.add_argument("--unlist", metavar="REGEX", type=u, default="", help="don't show files matching REGEX in file list. Purely cosmetic! Does not affect API calls, just the browser. Example: [\033[32m\\.(js|css)$\033[0m] (volflag=unlist)")
ap2.add_argument("--favico", metavar="TXT", type=u, default="c 000 none" if retry else "🎉 000 none", help="\033[33mfavicon-text\033[0m [ \033[33mforeground\033[0m [ \033[33mbackground\033[0m ] ], set blank to disable") ap2.add_argument("--favico", metavar="TXT", type=u, default="c 000 none" if retry else "🎉 000 none", help="\033[33mfavicon-text\033[0m [ \033[33mforeground\033[0m [ \033[33mbackground\033[0m ] ], set blank to disable")
ap2.add_argument("--mpmc", metavar="URL", type=u, default="", help="change the mediaplayer-toggle mouse cursor; URL to a folder with {2..5}.png inside (or disable with [\033[32m.\033[0m])") ap2.add_argument("--mpmc", metavar="URL", type=u, default="", help="change the mediaplayer-toggle mouse cursor; URL to a folder with {2..5}.png inside (or disable with [\033[32m.\033[0m])")
ap2.add_argument("--js-browser", metavar="L", type=u, help="URL to additional JS to include") ap2.add_argument("--js-browser", metavar="L", type=u, help="URL to additional JS to include")
@@ -987,10 +1081,10 @@ def run_argparse(
description="http file sharing hub v{} ({})".format(S_VERSION, S_BUILD_DT), description="http file sharing hub v{} ({})".format(S_VERSION, S_BUILD_DT),
) )
try: cert_path = os.path.join(E.cfg, "cert.pem")
fk_salt = unicode(os.path.getmtime(os.path.join(E.cfg, "cert.pem")))
except: fk_salt = get_fk_salt(cert_path)
fk_salt = "hunter2" ah_salt = get_ah_salt()
hcores = min(CORES, 4) # optimal on py3.11 @ r5-4500U hcores = min(CORES, 4) # optimal on py3.11 @ r5-4500U
@@ -1000,7 +1094,8 @@ def run_argparse(
add_general(ap, nc, srvname) add_general(ap, nc, srvname)
add_network(ap) add_network(ap)
add_tls(ap) add_tls(ap, cert_path)
add_cert(ap, cert_path)
add_qr(ap, tty) add_qr(ap, tty)
add_zeroconf(ap) add_zeroconf(ap)
add_zc_mdns(ap) add_zc_mdns(ap)
@@ -1013,7 +1108,8 @@ def run_argparse(
add_ftp(ap) add_ftp(ap)
add_webdav(ap) add_webdav(ap)
add_smb(ap) add_smb(ap)
add_safety(ap, fk_salt) add_safety(ap)
add_salt(ap, fk_salt, ah_salt)
add_optouts(ap) add_optouts(ap)
add_shutdown(ap) add_shutdown(ap)
add_yolo(ap) add_yolo(ap)
@@ -1084,8 +1180,8 @@ def main(argv: Optional[list[str]] = None) -> None:
print("pybin: {}\n".format(pybin), end="") print("pybin: {}\n".format(pybin), end="")
ensure_locale() ensure_locale()
if HAVE_SSL:
ensure_cert() ensure_webdeps()
for k, v in zip(argv[1:], argv[2:]): for k, v in zip(argv[1:], argv[2:]):
if k == "-c" and os.path.isfile(v): if k == "-c" and os.path.isfile(v):
@@ -1098,16 +1194,22 @@ def main(argv: Optional[list[str]] = None) -> None:
supp = args_from_cfg(v) supp = args_from_cfg(v)
argv.extend(supp) argv.extend(supp)
deprecated: list[tuple[str, str]] = [] deprecated: list[tuple[str, str]] = [("--salt", "--warksalt")]
for dk, nk in deprecated: for dk, nk in deprecated:
try: idx = -1
idx = argv.index(dk) ov = ""
except: for n, k in enumerate(argv):
if k == dk or k.startswith(dk + "="):
idx = n
if "=" in k:
ov = "=" + k.split("=", 1)[1]
if idx < 0:
continue continue
msg = "\033[1;31mWARNING:\033[0;1m\n {} \033[0;33mwas replaced with\033[0;1m {} \033[0;33mand will be removed\n\033[0m" msg = "\033[1;31mWARNING:\033[0;1m\n {} \033[0;33mwas replaced with\033[0;1m {} \033[0;33mand will be removed\n\033[0m"
lprint(msg.format(dk, nk)) lprint(msg.format(dk, nk))
argv[idx] = nk argv[idx] = nk + ov
time.sleep(2) time.sleep(2)
da = len(argv) == 1 da = len(argv) == 1
@@ -1153,13 +1255,18 @@ def main(argv: Optional[list[str]] = None) -> None:
except: except:
sys.exit(1) sys.exit(1)
if al.ansi:
al.no_ansi = False
elif not al.no_ansi:
al.ansi = VT100
if WINDOWS and not al.keep_qem: if WINDOWS and not al.keep_qem:
try: try:
disable_quickedit() disable_quickedit()
except: except:
lprint("\nfailed to disable quick-edit-mode:\n" + min_ex() + "\n") lprint("\nfailed to disable quick-edit-mode:\n" + min_ex() + "\n")
if not VT100: if al.ansi:
al.wintitle = "" al.wintitle = ""
nstrs: list[str] = [] nstrs: list[str] = []
@@ -1238,6 +1345,7 @@ def main(argv: Optional[list[str]] = None) -> None:
configure_ssl_ciphers(al) configure_ssl_ciphers(al)
else: else:
warn("ssl module does not exist; cannot enable https") warn("ssl module does not exist; cannot enable https")
al.http_only = True
if PY2 and WINDOWS and al.e2d: if PY2 and WINDOWS and al.e2d:
warn( warn(

View File

@@ -1,8 +1,8 @@
# coding: utf-8 # coding: utf-8
VERSION = (1, 6, 12) VERSION = (1, 8, 0)
CODENAME = "cors k" CODENAME = "argon"
BUILD_DT = (2023, 4, 20) BUILD_DT = (2023, 6, 26)
S_VERSION = ".".join(map(str, VERSION)) S_VERSION = ".".join(map(str, VERSION))
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT) S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)

View File

@@ -15,6 +15,7 @@ from datetime import datetime
from .__init__ import ANYWIN, TYPE_CHECKING, WINDOWS from .__init__ import ANYWIN, TYPE_CHECKING, WINDOWS
from .bos import bos from .bos import bos
from .cfg import flagdescs, permdescs, vf_bmap, vf_cmap, vf_vmap from .cfg import flagdescs, permdescs, vf_bmap, vf_cmap, vf_vmap
from .pwhash import PWHash
from .util import ( from .util import (
IMPLICATIONS, IMPLICATIONS,
META_NOBOTS, META_NOBOTS,
@@ -40,7 +41,10 @@ if True: # pylint: disable=using-constant-test
from .util import NamedLogger, RootLogger from .util import NamedLogger, RootLogger
if TYPE_CHECKING: if TYPE_CHECKING:
pass from .broker_mp import BrokerMp
from .broker_thr import BrokerThr
from .broker_util import BrokerCli
# Vflags: TypeAlias = dict[str, str | bool | float | list[str]] # Vflags: TypeAlias = dict[str, str | bool | float | list[str]]
# Vflags: TypeAlias = dict[str, Any] # Vflags: TypeAlias = dict[str, Any]
# Mflags: TypeAlias = dict[str, Vflags] # Mflags: TypeAlias = dict[str, Vflags]
@@ -67,9 +71,9 @@ class AXS(object):
self.upget: set[str] = set(upget or []) self.upget: set[str] = set(upget or [])
def __repr__(self) -> str: def __repr__(self) -> str:
return "AXS({})".format( return "AXS(%s)" % (
", ".join( ", ".join(
"{}={!r}".format(k, self.__dict__[k]) "%s=%r" % (k, self.__dict__[k])
for k in "uread uwrite umove udel uget upget".split() for k in "uread uwrite umove udel uget upget".split()
) )
) )
@@ -90,6 +94,8 @@ class Lim(object):
self.dfl = 0 # free disk space limit self.dfl = 0 # free disk space limit
self.dft = 0 # last-measured time self.dft = 0 # last-measured time
self.dfv = 0 # currently free self.dfv = 0 # currently free
self.vbmax = 0 # volume bytes max
self.vnmax = 0 # volume max num files
self.smin = 0 # filesize min self.smin = 0 # filesize min
self.smax = 0 # filesize max self.smax = 0 # filesize max
@@ -119,8 +125,11 @@ class Lim(object):
ip: str, ip: str,
rem: str, rem: str,
sz: int, sz: int,
ptop: str,
abspath: str, abspath: str,
broker: Optional[Union["BrokerCli", "BrokerMp", "BrokerThr"]] = None,
reg: Optional[dict[str, dict[str, Any]]] = None, reg: Optional[dict[str, dict[str, Any]]] = None,
volgetter: str = "up2k.get_volsize",
) -> tuple[str, str]: ) -> tuple[str, str]:
if reg is not None and self.reg is None: if reg is not None and self.reg is None:
self.reg = reg self.reg = reg
@@ -131,6 +140,7 @@ class Lim(object):
self.chk_rem(rem) self.chk_rem(rem)
if sz != -1: if sz != -1:
self.chk_sz(sz) self.chk_sz(sz)
self.chk_vsz(broker, ptop, sz, volgetter)
self.chk_df(abspath, sz) # side effects; keep last-ish self.chk_df(abspath, sz) # side effects; keep last-ish
ap2, vp2 = self.rot(abspath) ap2, vp2 = self.rot(abspath)
@@ -146,6 +156,25 @@ class Lim(object):
if self.smax and sz > self.smax: if self.smax and sz > self.smax:
raise Pebkac(400, "file too big") raise Pebkac(400, "file too big")
def chk_vsz(
self,
broker: Optional[Union["BrokerCli", "BrokerMp", "BrokerThr"]],
ptop: str,
sz: int,
volgetter: str = "up2k.get_volsize",
) -> None:
if not broker or not self.vbmax + self.vnmax:
return
x = broker.ask(volgetter, ptop)
nbytes, nfiles = x.get()
if self.vbmax and self.vbmax < nbytes + sz:
raise Pebkac(400, "volume has exceeded max size")
if self.vnmax and self.vnmax < nfiles + 1:
raise Pebkac(400, "volume has exceeded max num.files")
def chk_df(self, abspath: str, sz: int, already_written: bool = False) -> None: def chk_df(self, abspath: str, sz: int, already_written: bool = False) -> None:
if not self.dfl: if not self.dfl:
return return
@@ -266,7 +295,7 @@ class Lim(object):
self.bupc[ip] = mark self.bupc[ip] = mark
if mark >= self.bmax: if mark >= self.bmax:
raise Pebkac(429, "ingress saturated") raise Pebkac(429, "upload size limit exceeded")
class VFS(object): class VFS(object):
@@ -285,6 +314,8 @@ class VFS(object):
self.vpath = vpath # absolute path in the virtual filesystem self.vpath = vpath # absolute path in the virtual filesystem
self.axs = axs self.axs = axs
self.flags = flags # config options self.flags = flags # config options
self.root = self
self.dev = 0 # st_dev
self.nodes: dict[str, VFS] = {} # child nodes self.nodes: dict[str, VFS] = {} # child nodes
self.histtab: dict[str, str] = {} # all realpath->histpath self.histtab: dict[str, str] = {} # all realpath->histpath
self.dbv: Optional[VFS] = None # closest full/non-jump parent self.dbv: Optional[VFS] = None # closest full/non-jump parent
@@ -297,26 +328,42 @@ class VFS(object):
self.apget: dict[str, list[str]] = {} self.apget: dict[str, list[str]] = {}
if realpath: if realpath:
rp = realpath + ("" if realpath.endswith(os.sep) else os.sep)
vp = vpath + ("/" if vpath else "")
self.histpath = os.path.join(realpath, ".hist") # db / thumbcache self.histpath = os.path.join(realpath, ".hist") # db / thumbcache
self.all_vols = {vpath: self} # flattened recursive self.all_vols = {vpath: self} # flattened recursive
self.all_aps = [(rp, self)]
self.all_vps = [(vp, self)]
else: else:
self.histpath = "" self.histpath = ""
self.all_vols = {} self.all_vols = {}
self.all_aps = []
self.all_vps = []
def __repr__(self) -> str: def __repr__(self) -> str:
return "VFS({})".format( return "VFS(%s)" % (
", ".join( ", ".join(
"{}={!r}".format(k, self.__dict__[k]) "%s=%r" % (k, self.__dict__[k])
for k in "realpath vpath axs flags".split() for k in "realpath vpath axs flags".split()
) )
) )
def get_all_vols(self, outdict: dict[str, "VFS"]) -> None: def get_all_vols(
self,
vols: dict[str, "VFS"],
aps: list[tuple[str, "VFS"]],
vps: list[tuple[str, "VFS"]],
) -> None:
if self.realpath: if self.realpath:
outdict[self.vpath] = self vols[self.vpath] = self
rp = self.realpath
rp += "" if rp.endswith(os.sep) else os.sep
vp = self.vpath + ("/" if self.vpath else "")
aps.append((rp, self))
vps.append((vp, self))
for v in self.nodes.values(): for v in self.nodes.values():
v.get_all_vols(outdict) v.get_all_vols(vols, aps, vps)
def add(self, src: str, dst: str) -> "VFS": def add(self, src: str, dst: str) -> "VFS":
"""get existing, or add new path to the vfs""" """get existing, or add new path to the vfs"""
@@ -390,7 +437,11 @@ class VFS(object):
self, vpath: str, uname: str self, vpath: str, uname: str
) -> tuple[bool, bool, bool, bool, bool, bool]: ) -> tuple[bool, bool, bool, bool, bool, bool]:
"""can Read,Write,Move,Delete,Get,Upget""" """can Read,Write,Move,Delete,Get,Upget"""
vn, _ = self._find(undot(vpath)) if vpath:
vn, _ = self._find(undot(vpath))
else:
vn = self
c = vn.axs c = vn.axs
return ( return (
uname in c.uread or "*" in c.uread, uname in c.uread or "*" in c.uread,
@@ -545,9 +596,20 @@ class VFS(object):
self.log("vfs.walk", t.format(seen[-1], fsroot, self.vpath, rem), 3) self.log("vfs.walk", t.format(seen[-1], fsroot, self.vpath, rem), 3)
return return
if "xdev" in self.flags or "xvol" in self.flags:
rm1 = []
for le in vfs_ls:
ap = absreal(os.path.join(fsroot, le[0]))
vn2 = self.chk_ap(ap)
if not vn2 or not vn2.get("", uname, True, False):
rm1.append(le)
_ = [vfs_ls.remove(x) for x in rm1] # type: ignore
seen = seen[:] + [fsroot] seen = seen[:] + [fsroot]
rfiles = [x for x in vfs_ls if not stat.S_ISDIR(x[1].st_mode)] rfiles = [x for x in vfs_ls if not stat.S_ISDIR(x[1].st_mode)]
rdirs = [x for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)] rdirs = [x for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)]
# if lstat: ignore folder symlinks since copyparty will never make those
# (and we definitely don't want to descend into them)
rfiles.sort() rfiles.sort()
rdirs.sort() rdirs.sort()
@@ -578,6 +640,7 @@ class VFS(object):
def zipgen( def zipgen(
self, self,
vpath: str,
vrem: str, vrem: str,
flt: set[str], flt: set[str],
uname: str, uname: str,
@@ -589,7 +652,7 @@ class VFS(object):
# if multiselect: add all items to archive root # if multiselect: add all items to archive root
# if single folder: the folder itself is the top-level item # if single folder: the folder itself is the top-level item
folder = "" if flt or not wrap else (vrem.split("/")[-1].lstrip(".") or "top") folder = "" if flt or not wrap else (vpath.split("/")[-1].lstrip(".") or "top")
g = self.walk(folder, vrem, [], uname, [[True, False]], dots, scandir, False) g = self.walk(folder, vrem, [], uname, [[True, False]], dots, scandir, False)
for _, _, vpath, apath, files, rd, vd in g: for _, _, vpath, apath, files, rd, vd in g:
@@ -640,6 +703,44 @@ class VFS(object):
for d in [{"vp": v, "ap": a, "st": n} for v, a, n in ret2]: for d in [{"vp": v, "ap": a, "st": n} for v, a, n in ret2]:
yield d yield d
def chk_ap(self, ap: str, st: Optional[os.stat_result] = None) -> Optional["VFS"]:
aps = ap + os.sep
if "xdev" in self.flags and not ANYWIN:
if not st:
ap2 = ap.replace("\\", "/") if ANYWIN else ap
while ap2:
try:
st = bos.stat(ap2)
break
except:
if "/" not in ap2:
raise
ap2 = ap2.rsplit("/", 1)[0]
assert st
vdev = self.dev
if not vdev:
vdev = self.dev = bos.stat(self.realpath).st_dev
if vdev != st.st_dev:
if self.log:
t = "xdev: {}[{}] => {}[{}]"
self.log("vfs", t.format(vdev, self.realpath, st.st_dev, ap), 3)
return None
if "xvol" in self.flags:
for vap, vn in self.root.all_aps:
if aps.startswith(vap):
return vn
if self.log:
self.log("vfs", "xvol: [{}]".format(ap), 3)
return None
return self
if WINDOWS: if WINDOWS:
re_vol = re.compile(r"^([a-zA-Z]:[\\/][^:]*|[^:]*):([^:]*):(.*)$") re_vol = re.compile(r"^([a-zA-Z]:[\\/][^:]*|[^:]*):([^:]*):(.*)$")
@@ -657,6 +758,7 @@ class AuthSrv(object):
warn_anonwrite: bool = True, warn_anonwrite: bool = True,
dargs: Optional[argparse.Namespace] = None, dargs: Optional[argparse.Namespace] = None,
) -> None: ) -> None:
self.ah = PWHash(args)
self.args = args self.args = args
self.dargs = dargs or args self.dargs = dargs or args
self.log_func = log_func self.log_func = log_func
@@ -769,6 +871,9 @@ class AuthSrv(object):
if not ln.split("#")[0].strip(): if not ln.split("#")[0].strip():
continue continue
if re.match(r"^\[.*\]:$", ln):
ln = ln[:-1]
subsection = ln in (catx, catf) subsection = ln in (catx, catf)
if ln.startswith("[") or subsection: if ln.startswith("[") or subsection:
self._e() self._e()
@@ -858,7 +963,7 @@ class AuthSrv(object):
zd = split_cfg_ln(ln) zd = split_cfg_ln(ln)
fstr = "" fstr = ""
for sk, sv in zd.items(): for sk, sv in zd.items():
bad = re.sub(r"[a-z0-9_]", "", sk) bad = re.sub(r"[a-z0-9_-]", "", sk).lstrip("-")
if bad: if bad:
err = "bad characters [{}] in volflag name [{}]; " err = "bad characters [{}] in volflag name [{}]; "
err = err.format(bad, sk) err = err.format(bad, sk)
@@ -934,7 +1039,14 @@ class AuthSrv(object):
value: Union[str, bool, list[str]], value: Union[str, bool, list[str]],
is_list: bool, is_list: bool,
) -> None: ) -> None:
desc = flagdescs.get(name, "?").replace("\n", " ") desc = flagdescs.get(name.lstrip("-"), "?").replace("\n", " ")
if re.match("^-[^-]+$", name):
t = "└─unset volflag [{}] ({})"
self._e(t.format(name[1:], desc))
flags[name] = True
return
if name not in "mtp xbu xau xiu xbr xar xbd xad xm".split(): if name not in "mtp xbu xau xiu xbr xar xbd xad xm".split():
if value is True: if value is True:
t = "└─add volflag [{}] = {} ({})" t = "└─add volflag [{}] = {} ({})"
@@ -1024,6 +1136,8 @@ class AuthSrv(object):
self.log("\n{0}\n{1}{0}".format(t, "\n".join(slns))) self.log("\n{0}\n{1}{0}".format(t, "\n".join(slns)))
raise raise
self.setup_pwhash(acct)
# case-insensitive; normalize # case-insensitive; normalize
if WINDOWS: if WINDOWS:
cased = {} cased = {}
@@ -1059,7 +1173,13 @@ class AuthSrv(object):
assert vfs assert vfs
vfs.all_vols = {} vfs.all_vols = {}
vfs.get_all_vols(vfs.all_vols) vfs.all_aps = []
vfs.all_vps = []
vfs.get_all_vols(vfs.all_vols, vfs.all_aps, vfs.all_vps)
for vol in vfs.all_vols.values():
vol.all_aps.sort(key=lambda x: len(x[0]), reverse=True)
vol.all_vps.sort(key=lambda x: len(x[0]), reverse=True)
vol.root = vfs
for perm in "read write move del get pget".split(): for perm in "read write move del get pget".split():
axs_key = "u" + perm axs_key = "u" + perm
@@ -1093,6 +1213,14 @@ class AuthSrv(object):
if LEELOO_DALLAS in all_users: if LEELOO_DALLAS in all_users:
raise Exception("sorry, reserved username: " + LEELOO_DALLAS) raise Exception("sorry, reserved username: " + LEELOO_DALLAS)
seenpwds = {}
for usr, pwd in acct.items():
if pwd in seenpwds:
t = "accounts [{}] and [{}] have the same password; this is not supported"
self.log(t.format(seenpwds[pwd], usr), 1)
raise Exception("invalid config")
seenpwds[pwd] = usr
promote = [] promote = []
demote = [] demote = []
for vol in vfs.all_vols.values(): for vol in vfs.all_vols.values():
@@ -1194,6 +1322,16 @@ class AuthSrv(object):
use = True use = True
lim.bmax, lim.bwin = [unhumanize(x) for x in zs.split(",")] lim.bmax, lim.bwin = [unhumanize(x) for x in zs.split(",")]
zs = vol.flags.get("vmaxb")
if zs:
use = True
lim.vbmax = unhumanize(zs)
zs = vol.flags.get("vmaxn")
if zs:
use = True
lim.vnmax = unhumanize(zs)
if use: if use:
vol.lim = lim vol.lim = lim
@@ -1440,6 +1578,16 @@ class AuthSrv(object):
self.log(t, 1) self.log(t, 1)
errors = True errors = True
if self.args.smb and self.ah.on and acct:
self.log("--smb can only be used when --ah-alg is none", 1)
errors = True
for vol in vfs.all_vols.values():
for k in list(vol.flags.keys()):
if re.match("^-[^-]+$", k):
vol.flags.pop(k[1:], None)
vol.flags.pop(k)
if errors: if errors:
sys.exit(1) sys.exit(1)
@@ -1479,6 +1627,12 @@ class AuthSrv(object):
if t: if t:
self.log("\n\033[{}\033[0m\n".format(t)) self.log("\n\033[{}\033[0m\n".format(t))
zv, _ = vfs.get("/", "*", False, False)
zs = zv.realpath.lower()
if zs in ("/", "c:\\") or zs.startswith(r"c:\windows"):
t = "you are sharing a system directory: {}\n"
self.log(t.format(zv.realpath), c=1)
try: try:
zv, _ = vfs.get("/", "*", False, True) zv, _ = vfs.get("/", "*", False, True)
if self.warn_anonwrite and os.getcwd() == zv.realpath: if self.warn_anonwrite and os.getcwd() == zv.realpath:
@@ -1497,7 +1651,51 @@ class AuthSrv(object):
self.re_pwd = None self.re_pwd = None
pwds = [re.escape(x) for x in self.iacct.keys()] pwds = [re.escape(x) for x in self.iacct.keys()]
if pwds: if pwds:
self.re_pwd = re.compile("=(" + "|".join(pwds) + ")([]&; ]|$)") if self.ah.on:
zs = r"(\[H\] pw:.*|[?&]pw=)([^&]+)"
else:
zs = r"(\[H\] pw:.*|=)(" + "|".join(pwds) + r")([]&; ]|$)"
self.re_pwd = re.compile(zs)
def setup_pwhash(self, acct: dict[str, str]) -> None:
self.ah = PWHash(self.args)
if not self.ah.on:
return
if self.args.ah_cli:
self.ah.cli()
sys.exit()
elif self.args.ah_gen == "-":
self.ah.stdin()
sys.exit()
elif self.args.ah_gen:
print(self.ah.hash(self.args.ah_gen))
sys.exit()
if not acct:
return
changed = False
for uname, pw in list(acct.items())[:]:
if pw.startswith("+") and len(pw) == 33:
continue
changed = True
hpw = self.ah.hash(pw)
acct[uname] = hpw
t = "hashed password for account {}: {}"
self.log(t.format(uname, hpw), 3)
if not changed:
return
lns = []
for uname, pw in acct.items():
lns.append(" {}: {}".format(uname, pw))
t = "please use the following hashed passwords in your config:\n{}"
self.log(t.format("\n".join(lns)), 3)
def chk_sqlite_threadsafe(self) -> str: def chk_sqlite_threadsafe(self) -> str:
v = SQLITE_VER[-1:] v = SQLITE_VER[-1:]

View File

@@ -9,7 +9,7 @@ import queue
from .__init__ import CORES, TYPE_CHECKING from .__init__ import CORES, TYPE_CHECKING
from .broker_mpw import MpWorker from .broker_mpw import MpWorker
from .broker_util import try_exec from .broker_util import ExceptionalQueue, try_exec
from .util import Daemon, mp from .util import Daemon, mp
if TYPE_CHECKING: if TYPE_CHECKING:
@@ -107,6 +107,19 @@ class BrokerMp(object):
if retq_id: if retq_id:
proc.q_pend.put((retq_id, "retq", rv)) proc.q_pend.put((retq_id, "retq", rv))
def ask(self, dest: str, *args: Any) -> ExceptionalQueue:
# new non-ipc invoking managed service in hub
obj = self.hub
for node in dest.split("."):
obj = getattr(obj, node)
rv = try_exec(True, obj, *args)
retq = ExceptionalQueue(1)
retq.put(rv)
return retq
def say(self, dest: str, *args: Any) -> None: def say(self, dest: str, *args: Any) -> None:
""" """
send message to non-hub component in other process, send message to non-hub component in other process,

222
copyparty/cert.py Normal file
View File

@@ -0,0 +1,222 @@
import calendar
import errno
import filecmp
import json
import os
import shutil
import time
from .util import Netdev, runcmd
HAVE_CFSSL = True
if True: # pylint: disable=using-constant-test
from .util import RootLogger
def ensure_cert(log: "RootLogger", args) -> None:
"""
the default cert (and the entire TLS support) is only here to enable the
crypto.subtle javascript API, which is necessary due to the webkit guys
being massive memers (https://www.chromium.org/blink/webcrypto)
i feel awful about this and so should they
"""
cert_insec = os.path.join(args.E.mod, "res/insecure.pem")
cert_appdata = os.path.join(args.E.cfg, "cert.pem")
if not os.path.isfile(args.cert):
if cert_appdata != args.cert:
raise Exception("certificate file does not exist: " + args.cert)
shutil.copy(cert_insec, args.cert)
with open(args.cert, "rb") as f:
buf = f.read()
o1 = buf.find(b" PRIVATE KEY-")
o2 = buf.find(b" CERTIFICATE-")
m = "unsupported certificate format: "
if o1 < 0:
raise Exception(m + "no private key inside pem")
if o2 < 0:
raise Exception(m + "no server certificate inside pem")
if o1 > o2:
raise Exception(m + "private key must appear before server certificate")
try:
if filecmp.cmp(args.cert, cert_insec):
t = "using default TLS certificate; https will be insecure:\033[36m {}"
log("cert", t.format(args.cert), 3)
except:
pass
# speaking of the default cert,
# printf 'NO\n.\n.\n.\n.\ncopyparty-insecure\n.\n' | faketime '2000-01-01 00:00:00' openssl req -x509 -sha256 -newkey rsa:2048 -keyout insecure.pem -out insecure.pem -days $((($(printf %d 0x7fffffff)-$(date +%s --date=2000-01-01T00:00:00Z))/(60*60*24))) -nodes && ls -al insecure.pem && openssl x509 -in insecure.pem -text -noout
def _read_crt(args, fn):
try:
if not os.path.exists(os.path.join(args.crt_dir, fn)):
return 0, {}
acmd = ["cfssl-certinfo", "-cert", fn]
rc, so, se = runcmd(acmd, cwd=args.crt_dir)
if rc:
return 0, {}
inf = json.loads(so)
zs = inf["not_after"]
expiry = calendar.timegm(time.strptime(zs, "%Y-%m-%dT%H:%M:%SZ"))
return expiry, inf
except OSError as ex:
if ex.errno == errno.ENOENT:
raise
return 0, {}
except:
return 0, {}
def _gen_ca(log: "RootLogger", args):
expiry = _read_crt(args, "ca.pem")[0]
if time.time() + args.crt_cdays * 60 * 60 * 24 * 0.1 < expiry:
return
backdate = "{}m".format(int(args.crt_back * 60))
expiry = "{}m".format(int(args.crt_cdays * 60 * 24))
cn = args.crt_cnc.replace("--crt-cn", args.crt_cn)
algo, ksz = args.crt_alg.split("-")
req = {
"CN": cn,
"CA": {"backdate": backdate, "expiry": expiry, "pathlen": 0},
"key": {"algo": algo, "size": int(ksz)},
"names": [{"O": cn}],
}
sin = json.dumps(req).encode("utf-8")
log("cert", "creating new ca ...", 6)
cmd = "cfssl gencert -initca -"
rc, so, se = runcmd(cmd.split(), 30, sin=sin)
if rc:
raise Exception("failed to create ca-cert: {}, {}".format(rc, se), 3)
cmd = "cfssljson -bare ca"
sin = so.encode("utf-8")
rc, so, se = runcmd(cmd.split(), 10, sin=sin, cwd=args.crt_dir)
if rc:
raise Exception("failed to translate ca-cert: {}, {}".format(rc, se), 3)
bname = os.path.join(args.crt_dir, "ca")
os.rename(bname + "-key.pem", bname + ".key")
os.unlink(bname + ".csr")
log("cert", "new ca OK", 2)
def _gen_srv(log: "RootLogger", args, netdevs: dict[str, Netdev]):
names = args.crt_ns.split(",") if args.crt_ns else []
if not args.crt_exact:
for n in names[:]:
names.append("*.{}".format(n))
if not args.crt_noip:
for ip in netdevs.keys():
names.append(ip.split("/")[0])
if args.crt_nolo:
names = [x for x in names if x not in ("localhost", "127.0.0.1", "::1")]
if not args.crt_nohn:
names.append(args.name)
names.append(args.name + ".local")
if not names:
names = ["127.0.0.1"]
if "127.0.0.1" in names or "::1" in names:
names.append("localhost")
names = list({x: 1 for x in names}.keys())
try:
expiry, inf = _read_crt(args, "srv.pem")
expired = time.time() + args.crt_sdays * 60 * 60 * 24 * 0.1 > expiry
cert_insec = os.path.join(args.E.mod, "res/insecure.pem")
for n in names:
if n not in inf["sans"]:
raise Exception("does not have {}".format(n))
if expired:
raise Exception("old server-cert has expired")
if not filecmp.cmp(args.cert, cert_insec):
return
except Exception as ex:
log("cert", "will create new server-cert; {}".format(ex))
log("cert", "creating server-cert ...", 6)
backdate = "{}m".format(int(args.crt_back * 60))
expiry = "{}m".format(int(args.crt_sdays * 60 * 24))
cfg = {
"signing": {
"default": {
"backdate": backdate,
"expiry": expiry,
"usages": ["signing", "key encipherment", "server auth"],
}
}
}
with open(os.path.join(args.crt_dir, "cfssl.json"), "wb") as f:
f.write(json.dumps(cfg).encode("utf-8"))
cn = args.crt_cns.replace("--crt-cn", args.crt_cn)
algo, ksz = args.crt_alg.split("-")
req = {
"key": {"algo": algo, "size": int(ksz)},
"names": [{"O": cn}],
}
sin = json.dumps(req).encode("utf-8")
cmd = "cfssl gencert -config=cfssl.json -ca ca.pem -ca-key ca.key -profile=www"
acmd = cmd.split() + ["-hostname=" + ",".join(names), "-"]
rc, so, se = runcmd(acmd, 30, sin=sin, cwd=args.crt_dir)
if rc:
raise Exception("failed to create cert: {}, {}".format(rc, se))
cmd = "cfssljson -bare srv"
sin = so.encode("utf-8")
rc, so, se = runcmd(cmd.split(), 10, sin=sin, cwd=args.crt_dir)
if rc:
raise Exception("failed to translate cert: {}, {}".format(rc, se))
bname = os.path.join(args.crt_dir, "srv")
os.rename(bname + "-key.pem", bname + ".key")
os.unlink(bname + ".csr")
with open(os.path.join(args.crt_dir, "ca.pem"), "rb") as f:
ca = f.read()
with open(bname + ".key", "rb") as f:
skey = f.read()
with open(bname + ".pem", "rb") as f:
scrt = f.read()
with open(args.cert, "wb") as f:
f.write(skey + scrt + ca)
log("cert", "new server-cert OK", 2)
def gencert(log: "RootLogger", args, netdevs: dict[str, Netdev]):
global HAVE_CFSSL
if args.http_only:
return
if args.no_crt or not HAVE_CFSSL:
ensure_cert(log, args)
return
try:
_gen_ca(log, args)
_gen_srv(log, args, netdevs)
except Exception as ex:
HAVE_CFSSL = False
log("cert", "could not create TLS certificates: {}".format(ex), 3)
if getattr(ex, "errno", 0) == errno.ENOENT:
t = "install cfssl if you want to fix this; https://github.com/cloudflare/cfssl/releases/latest"
log("cert", t, 6)
ensure_cert(log, args)

View File

@@ -13,6 +13,8 @@ def vf_bmap() -> dict[str, str]:
"no_dedup": "copydupes", "no_dedup": "copydupes",
"no_dupe": "nodupe", "no_dupe": "nodupe",
"no_forget": "noforget", "no_forget": "noforget",
"dav_auth": "davauth",
"dav_rt": "davrt",
} }
for k in ( for k in (
"dotsrch", "dotsrch",
@@ -22,6 +24,7 @@ def vf_bmap() -> dict[str, str]:
"e2v", "e2v",
"e2vu", "e2vu",
"e2vp", "e2vp",
"grid",
"hardlink", "hardlink",
"magic", "magic",
"no_sb_md", "no_sb_md",
@@ -38,7 +41,7 @@ def vf_bmap() -> dict[str, str]:
def vf_vmap() -> dict[str, str]: def vf_vmap() -> dict[str, str]:
"""argv-to-volflag: simple values""" """argv-to-volflag: simple values"""
ret = {} ret = {}
for k in ("lg_sbf", "md_sbf"): for k in ("lg_sbf", "md_sbf", "unlist"):
ret[k] = k ret[k] = k
return ret return ret
@@ -75,7 +78,9 @@ flagcats = {
}, },
"upload rules": { "upload rules": {
"maxn=250,600": "max 250 uploads over 15min", "maxn=250,600": "max 250 uploads over 15min",
"maxb=1g,300": "max 1 GiB over 5min (suffixes: b, k, m, g)", "maxb=1g,300": "max 1 GiB over 5min (suffixes: b, k, m, g, t)",
"vmaxb=1g": "total volume size max 1 GiB (suffixes: b, k, m, g, t)",
"vmaxn=4k": "max 4096 files in volume (suffixes: b, k, m, g, t)",
"rand": "force randomized filenames, 9 chars long by default", "rand": "force randomized filenames, 9 chars long by default",
"nrand=N": "randomized filenames are N chars long", "nrand=N": "randomized filenames are N chars long",
"sz=1k-3m": "allow filesizes between 1 KiB and 3MiB", "sz=1k-3m": "allow filesizes between 1 KiB and 3MiB",
@@ -106,7 +111,7 @@ flagcats = {
"dbd=[acid|swal|wal|yolo]": "database speed-durability tradeoff", "dbd=[acid|swal|wal|yolo]": "database speed-durability tradeoff",
"xlink": "cross-volume dupe detection / linking", "xlink": "cross-volume dupe detection / linking",
"xdev": "do not descend into other filesystems", "xdev": "do not descend into other filesystems",
"xvol": "skip symlinks leaving the volume root", "xvol": "do not follow symlinks leaving the volume root",
"dotsrch": "show dotfiles in search results", "dotsrch": "show dotfiles in search results",
"nodotsrch": "hide dotfiles in search results (default)", "nodotsrch": "hide dotfiles in search results (default)",
}, },
@@ -131,6 +136,8 @@ flagcats = {
"xm=CMD": "execute CMD on message", "xm=CMD": "execute CMD on message",
}, },
"client and ux": { "client and ux": {
"grid": "show grid/thumbnails by default",
"unlist": "dont list files matching REGEX",
"html_head=TXT": "includes TXT in the <head>", "html_head=TXT": "includes TXT in the <head>",
"robots": "allows indexing by search engines (default)", "robots": "allows indexing by search engines (default)",
"norobots": "kindly asks search engines to leave", "norobots": "kindly asks search engines to leave",
@@ -142,7 +149,9 @@ flagcats = {
"lg_sbf": "list of *logue-sandbox safeguards to disable", "lg_sbf": "list of *logue-sandbox safeguards to disable",
}, },
"others": { "others": {
"fk=8": 'generates per-file accesskeys,\nwhich will then be required at the "g" permission' "fk=8": 'generates per-file accesskeys,\nwhich will then be required at the "g" permission',
"davauth": "ask webdav clients to login for all folders",
"davrt": "show lastmod time of symlink destination, not the link itself\n(note: this option is always enabled for recursive listings)",
}, },
} }

View File

@@ -2,6 +2,7 @@
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
import argparse import argparse
import errno
import logging import logging
import os import os
import stat import stat
@@ -46,6 +47,12 @@ if True: # pylint: disable=using-constant-test
from typing import Any, Optional from typing import Any, Optional
class FSE(FilesystemError):
def __init__(self, msg: str, severity: int = 0) -> None:
super(FilesystemError, self).__init__(msg)
self.severity = severity
class FtpAuth(DummyAuthorizer): class FtpAuth(DummyAuthorizer):
def __init__(self, hub: "SvcHub") -> None: def __init__(self, hub: "SvcHub") -> None:
super(FtpAuth, self).__init__() super(FtpAuth, self).__init__()
@@ -55,6 +62,7 @@ class FtpAuth(DummyAuthorizer):
self, username: str, password: str, handler: Any self, username: str, password: str, handler: Any
) -> None: ) -> None:
handler.username = "{}:{}".format(username, password) handler.username = "{}:{}".format(username, password)
handler.uname = "*"
ip = handler.addr[0] ip = handler.addr[0]
if ip.startswith("::ffff:"): if ip.startswith("::ffff:"):
@@ -71,10 +79,13 @@ class FtpAuth(DummyAuthorizer):
raise AuthenticationFailed("banned") raise AuthenticationFailed("banned")
asrv = self.hub.asrv asrv = self.hub.asrv
if username == "anonymous": uname = "*"
uname = "*" if username != "anonymous":
else: for zs in (password, username):
uname = asrv.iacct.get(password, "") or asrv.iacct.get(username, "") or "*" zs = asrv.iacct.get(asrv.ah.hash(zs), "")
if zs:
uname = zs
break
if not uname or not (asrv.vfs.aread.get(uname) or asrv.vfs.awrite.get(uname)): if not uname or not (asrv.vfs.aread.get(uname) or asrv.vfs.awrite.get(uname)):
g = self.hub.gpwd g = self.hub.gpwd
@@ -86,14 +97,14 @@ class FtpAuth(DummyAuthorizer):
raise AuthenticationFailed("Authentication failed.") raise AuthenticationFailed("Authentication failed.")
handler.username = uname handler.uname = handler.username = uname
def get_home_dir(self, username: str) -> str: def get_home_dir(self, username: str) -> str:
return "/" return "/"
def has_user(self, username: str) -> bool: def has_user(self, username: str) -> bool:
asrv = self.hub.asrv asrv = self.hub.asrv
return username in asrv.acct return username in asrv.acct or username in asrv.iacct
def has_perm(self, username: str, perm: int, path: Optional[str] = None) -> bool: def has_perm(self, username: str, perm: int, path: Optional[str] = None) -> bool:
return True # handled at filesystem layer return True # handled at filesystem layer
@@ -112,11 +123,11 @@ class FtpFs(AbstractedFS):
def __init__( def __init__(
self, root: str, cmd_channel: Any self, root: str, cmd_channel: Any
) -> None: # pylint: disable=super-init-not-called ) -> None: # pylint: disable=super-init-not-called
self.h = self.cmd_channel = cmd_channel # type: FTPHandler self.h = cmd_channel # type: FTPHandler
self.cmd_channel = cmd_channel # type: FTPHandler
self.hub: "SvcHub" = cmd_channel.hub self.hub: "SvcHub" = cmd_channel.hub
self.args = cmd_channel.args self.args = cmd_channel.args
self.uname = cmd_channel.uname
self.uname = self.hub.asrv.iacct.get(cmd_channel.password, "*")
self.cwd = "/" # pyftpdlib convention of leading slash self.cwd = "/" # pyftpdlib convention of leading slash
self.root = "/var/lib/empty" self.root = "/var/lib/empty"
@@ -127,10 +138,6 @@ class FtpFs(AbstractedFS):
self.listdirinfo = self.listdir self.listdirinfo = self.listdir
self.chdir(".") self.chdir(".")
def die(self, msg):
self.h.die(msg)
raise Exception()
def v2a( def v2a(
self, self,
vpath: str, vpath: str,
@@ -140,21 +147,34 @@ class FtpFs(AbstractedFS):
d: bool = False, d: bool = False,
) -> tuple[str, VFS, str]: ) -> tuple[str, VFS, str]:
try: try:
vpath = vpath.replace("\\", "/").lstrip("/") vpath = vpath.replace("\\", "/").strip("/")
rd, fn = os.path.split(vpath) rd, fn = os.path.split(vpath)
if ANYWIN and relchk(rd): if ANYWIN and relchk(rd):
logging.warning("malicious vpath: %s", vpath) logging.warning("malicious vpath: %s", vpath)
self.die("Unsupported characters in filepath") t = "Unsupported characters in [{}]"
raise FSE(t.format(vpath), 1)
fn = sanitize_fn(fn or "", "", [".prologue.html", ".epilogue.html"]) fn = sanitize_fn(fn or "", "", [".prologue.html", ".epilogue.html"])
vpath = vjoin(rd, fn) vpath = vjoin(rd, fn)
vfs, rem = self.hub.asrv.vfs.get(vpath, self.uname, r, w, m, d) vfs, rem = self.hub.asrv.vfs.get(vpath, self.uname, r, w, m, d)
if not vfs.realpath: if not vfs.realpath:
self.die("No filesystem mounted at this path") t = "No filesystem mounted at [{}]"
raise FSE(t.format(vpath))
if "xdev" in vfs.flags or "xvol" in vfs.flags:
ap = vfs.canonical(rem)
avfs = vfs.chk_ap(ap)
t = "Permission denied in [{}]"
if not avfs:
raise FSE(t.format(vpath), 1)
cr, cw, cm, cd, _, _ = avfs.can_access("", self.h.uname)
if r and not cr or w and not cw or m and not cm or d and not cd:
raise FSE(t.format(vpath), 1)
return os.path.join(vfs.realpath, rem), vfs, rem return os.path.join(vfs.realpath, rem), vfs, rem
except Pebkac as ex: except Pebkac as ex:
self.die(str(ex)) raise FSE(str(ex))
def rv2a( def rv2a(
self, self,
@@ -177,7 +197,7 @@ class FtpFs(AbstractedFS):
def validpath(self, path: str) -> bool: def validpath(self, path: str) -> bool:
if "/.hist/" in path: if "/.hist/" in path:
if "/up2k." in path or path.endswith("/dir.txt"): if "/up2k." in path or path.endswith("/dir.txt"):
self.die("Access to this file is forbidden") raise FSE("Access to this file is forbidden", 1)
return True return True
@@ -194,7 +214,7 @@ class FtpFs(AbstractedFS):
td = 0 td = 0
if td < -1 or td > self.args.ftp_wt: if td < -1 or td > self.args.ftp_wt:
self.die("Cannot open existing file for writing") raise FSE("Cannot open existing file for writing")
self.validpath(ap) self.validpath(ap)
return open(fsenc(ap), mode) return open(fsenc(ap), mode)
@@ -203,9 +223,17 @@ class FtpFs(AbstractedFS):
nwd = join(self.cwd, path) nwd = join(self.cwd, path)
vfs, rem = self.hub.asrv.vfs.get(nwd, self.uname, False, False) vfs, rem = self.hub.asrv.vfs.get(nwd, self.uname, False, False)
ap = vfs.canonical(rem) ap = vfs.canonical(rem)
if not bos.path.isdir(ap): try:
st = bos.stat(ap)
if not stat.S_ISDIR(st.st_mode):
raise Exception()
except:
# returning 550 is library-default and suitable # returning 550 is library-default and suitable
self.die("Failed to change directory") raise FSE("No such file or directory")
avfs = vfs.chk_ap(ap, st)
if not avfs:
raise FSE("Permission denied", 1)
self.cwd = nwd self.cwd = nwd
( (
@@ -215,16 +243,18 @@ class FtpFs(AbstractedFS):
self.can_delete, self.can_delete,
self.can_get, self.can_get,
self.can_upget, self.can_upget,
) = self.hub.asrv.vfs.can_access(self.cwd.lstrip("/"), self.h.username) ) = avfs.can_access("", self.h.uname)
def mkdir(self, path: str) -> None: def mkdir(self, path: str) -> None:
ap = self.rv2a(path, w=True)[0] ap = self.rv2a(path, w=True)[0]
bos.makedirs(ap) # filezilla expects this bos.makedirs(ap) # filezilla expects this
def listdir(self, path: str) -> list[str]: def listdir(self, path: str) -> list[str]:
vpath = join(self.cwd, path).lstrip("/") vpath = join(self.cwd, path)
try: try:
vfs, rem = self.hub.asrv.vfs.get(vpath, self.uname, True, False) ap, vfs, rem = self.v2a(vpath, True, False)
if not bos.path.isdir(ap):
raise FSE("No such file or directory", 1)
fsroot, vfs_ls1, vfs_virt = vfs.ls( fsroot, vfs_ls1, vfs_virt = vfs.ls(
rem, rem,
@@ -240,8 +270,12 @@ class FtpFs(AbstractedFS):
vfs_ls.sort() vfs_ls.sort()
return vfs_ls return vfs_ls
except: except Exception as ex:
if vpath: # panic on malicious names
if getattr(ex, "severity", 0):
raise
if vpath.strip("/"):
# display write-only folders as empty # display write-only folders as empty
return [] return []
@@ -251,31 +285,35 @@ class FtpFs(AbstractedFS):
def rmdir(self, path: str) -> None: def rmdir(self, path: str) -> None:
ap = self.rv2a(path, d=True)[0] ap = self.rv2a(path, d=True)[0]
bos.rmdir(ap) try:
bos.rmdir(ap)
except OSError as e:
if e.errno != errno.ENOENT:
raise
def remove(self, path: str) -> None: def remove(self, path: str) -> None:
if self.args.no_del: if self.args.no_del:
self.die("The delete feature is disabled in server config") raise FSE("The delete feature is disabled in server config")
vp = join(self.cwd, path).lstrip("/") vp = join(self.cwd, path).lstrip("/")
try: try:
self.hub.up2k.handle_rm(self.uname, self.h.cli_ip, [vp], []) self.hub.up2k.handle_rm(self.uname, self.h.cli_ip, [vp], [], False)
except Exception as ex: except Exception as ex:
self.die(str(ex)) raise FSE(str(ex))
def rename(self, src: str, dst: str) -> None: def rename(self, src: str, dst: str) -> None:
if not self.can_move: if not self.can_move:
self.die("Not allowed for user " + self.h.username) raise FSE("Not allowed for user " + self.h.uname)
if self.args.no_mv: if self.args.no_mv:
self.die("The rename/move feature is disabled in server config") raise FSE("The rename/move feature is disabled in server config")
svp = join(self.cwd, src).lstrip("/") svp = join(self.cwd, src).lstrip("/")
dvp = join(self.cwd, dst).lstrip("/") dvp = join(self.cwd, dst).lstrip("/")
try: try:
self.hub.up2k.handle_mv(self.uname, svp, dvp) self.hub.up2k.handle_mv(self.uname, svp, dvp)
except Exception as ex: except Exception as ex:
self.die(str(ex)) raise FSE(str(ex))
def chmod(self, path: str, mode: str) -> None: def chmod(self, path: str, mode: str) -> None:
pass pass
@@ -284,7 +322,10 @@ class FtpFs(AbstractedFS):
try: try:
ap = self.rv2a(path, r=True)[0] ap = self.rv2a(path, r=True)[0]
return bos.stat(ap) return bos.stat(ap)
except: except FSE as ex:
if ex.severity:
raise
ap = self.rv2a(path)[0] ap = self.rv2a(path)[0]
st = bos.stat(ap) st = bos.stat(ap)
if not stat.S_ISDIR(st.st_mode): if not stat.S_ISDIR(st.st_mode):
@@ -304,7 +345,10 @@ class FtpFs(AbstractedFS):
try: try:
st = self.stat(path) st = self.stat(path)
return stat.S_ISREG(st.st_mode) return stat.S_ISREG(st.st_mode)
except: except Exception as ex:
if getattr(ex, "severity", 0):
raise
return False # expected for mojibake in ftp_SIZE() return False # expected for mojibake in ftp_SIZE()
def islink(self, path: str) -> bool: def islink(self, path: str) -> bool:
@@ -315,7 +359,10 @@ class FtpFs(AbstractedFS):
try: try:
st = self.stat(path) st = self.stat(path)
return stat.S_ISDIR(st.st_mode) return stat.S_ISDIR(st.st_mode)
except: except Exception as ex:
if getattr(ex, "severity", 0):
raise
return True return True
def getsize(self, path: str) -> int: def getsize(self, path: str) -> int:
@@ -344,10 +391,12 @@ class FtpHandler(FTPHandler):
abstracted_fs = FtpFs abstracted_fs = FtpFs
hub: "SvcHub" hub: "SvcHub"
args: argparse.Namespace args: argparse.Namespace
uname: str
def __init__(self, conn: Any, server: Any, ioloop: Any = None) -> None: def __init__(self, conn: Any, server: Any, ioloop: Any = None) -> None:
self.hub: "SvcHub" = FtpHandler.hub self.hub: "SvcHub" = FtpHandler.hub
self.args: argparse.Namespace = FtpHandler.args self.args: argparse.Namespace = FtpHandler.args
self.uname = "*"
if PY2: if PY2:
FTPHandler.__init__(self, conn, server, ioloop) FTPHandler.__init__(self, conn, server, ioloop)
@@ -363,14 +412,10 @@ class FtpHandler(FTPHandler):
# reduce non-debug logging # reduce non-debug logging
self.log_cmds_list = [x for x in self.log_cmds_list if x not in ("CWD", "XCWD")] self.log_cmds_list = [x for x in self.log_cmds_list if x not in ("CWD", "XCWD")]
def die(self, msg):
self.respond("550 {}".format(msg))
raise FilesystemError(msg)
def ftp_STOR(self, file: str, mode: str = "w") -> Any: def ftp_STOR(self, file: str, mode: str = "w") -> Any:
# Optional[str] # Optional[str]
vp = join(self.fs.cwd, file).lstrip("/") vp = join(self.fs.cwd, file).lstrip("/")
ap, vfs, rem = self.fs.v2a(vp) ap, vfs, rem = self.fs.v2a(vp, w=True)
self.vfs_map[ap] = vp self.vfs_map[ap] = vp
xbu = vfs.flags.get("xbu") xbu = vfs.flags.get("xbu")
if xbu and not runhook( if xbu and not runhook(
@@ -379,14 +424,14 @@ class FtpHandler(FTPHandler):
ap, ap,
vfs.canonical(rem), vfs.canonical(rem),
"", "",
self.username, self.uname,
0, 0,
0, 0,
self.cli_ip, self.cli_ip,
0, 0,
"", "",
): ):
self.die("Upload blocked by xbu server config") raise FSE("Upload blocked by xbu server config")
# print("ftp_STOR: {} {} => {}".format(vp, mode, ap)) # print("ftp_STOR: {} {} => {}".format(vp, mode, ap))
ret = FTPHandler.ftp_STOR(self, file, mode) ret = FTPHandler.ftp_STOR(self, file, mode)
@@ -408,7 +453,7 @@ class FtpHandler(FTPHandler):
# print("xfer_end: {} => {}".format(ap, vp)) # print("xfer_end: {} => {}".format(ap, vp))
if vp: if vp:
vp, fn = os.path.split(vp) vp, fn = os.path.split(vp)
vfs, rem = self.hub.asrv.vfs.get(vp, self.username, False, True) vfs, rem = self.hub.asrv.vfs.get(vp, self.uname, False, True)
vfs, rem = vfs.get_dbv(rem) vfs, rem = vfs.get_dbv(rem)
self.hub.up2k.hash_file( self.hub.up2k.hash_file(
vfs.realpath, vfs.realpath,
@@ -418,7 +463,7 @@ class FtpHandler(FTPHandler):
fn, fn,
self.cli_ip, self.cli_ip,
time.time(), time.time(),
self.username, self.uname,
) )
return FTPHandler.log_transfer( return FTPHandler.log_transfer(
@@ -452,7 +497,7 @@ class Ftpd(object):
print(t.format(pybin)) print(t.format(pybin))
sys.exit(1) sys.exit(1)
h1.certfile = os.path.join(self.args.E.cfg, "cert.pem") h1.certfile = self.args.cert
h1.tls_control_required = True h1.tls_control_required = True
h1.tls_data_required = True h1.tls_data_required = True
@@ -460,9 +505,9 @@ class Ftpd(object):
for h_lp in hs: for h_lp in hs:
h2, lp = h_lp h2, lp = h_lp
h2.hub = hub FtpHandler.hub = h2.hub = hub
h2.args = hub.args FtpHandler.args = h2.args = hub.args
h2.authorizer = FtpAuth(hub) FtpHandler.authorizer = h2.authorizer = FtpAuth(hub)
if self.args.ftp_pr: if self.args.ftp_pr:
p1, p2 = [int(x) for x in self.args.ftp_pr.split("-")] p1, p2 = [int(x) for x in self.args.ftp_pr.split("-")]
@@ -486,6 +531,9 @@ class Ftpd(object):
if "::" in ips: if "::" in ips:
ips.append("0.0.0.0") ips.append("0.0.0.0")
if self.args.ftp4:
ips = [x for x in ips if ":" not in x]
ioloop = IOLoop() ioloop = IOLoop()
for ip in ips: for ip in ips:
for h, lp in hs: for h, lp in hs:

View File

@@ -135,6 +135,7 @@ class HttpCli(object):
self.ouparam: dict[str, str] = {} self.ouparam: dict[str, str] = {}
self.uparam: dict[str, str] = {} self.uparam: dict[str, str] = {}
self.cookies: dict[str, str] = {} self.cookies: dict[str, str] = {}
self.avn: Optional[VFS] = None
self.vpath = " " self.vpath = " "
self.uname = " " self.uname = " "
self.pw = " " self.pw = " "
@@ -172,13 +173,16 @@ class HttpCli(object):
def log(self, msg: str, c: Union[int, str] = 0) -> None: def log(self, msg: str, c: Union[int, str] = 0) -> None:
ptn = self.asrv.re_pwd ptn = self.asrv.re_pwd
if ptn and ptn.search(msg): if ptn and ptn.search(msg):
msg = ptn.sub(self.unpwd, msg) if self.asrv.ah.on:
msg = ptn.sub("\033[7m pw \033[27m", msg)
else:
msg = ptn.sub(self.unpwd, msg)
self.log_func(self.log_src, msg, c) self.log_func(self.log_src, msg, c)
def unpwd(self, m: Match[str]) -> str: def unpwd(self, m: Match[str]) -> str:
a, b = m.groups() a, b, c = m.groups()
return "=\033[7m {} \033[27m{}".format(self.asrv.iacct[a], b) return "{}\033[7m {} \033[27m{}".format(a, self.asrv.iacct[b], c)
def _check_nonfatal(self, ex: Pebkac, post: bool) -> bool: def _check_nonfatal(self, ex: Pebkac, post: bool) -> bool:
if post: if post:
@@ -219,7 +223,7 @@ class HttpCli(object):
try: try:
self.s.settimeout(2) self.s.settimeout(2)
headerlines = read_header(self.sr) headerlines = read_header(self.sr, self.args.s_thead, self.args.s_thead)
self.in_hdr_recv = False self.in_hdr_recv = False
if not headerlines: if not headerlines:
return False return False
@@ -266,7 +270,7 @@ class HttpCli(object):
) )
self.host = self.headers.get("host") or "" self.host = self.headers.get("host") or ""
if not self.host: if not self.host:
zs = "{}:{}".format(*list(self.s.getsockname()[:2])) zs = "%s:%s" % self.s.getsockname()[:2]
self.host = zs[7:] if zs.startswith("::ffff:") else zs self.host = zs[7:] if zs.startswith("::ffff:") else zs
n = self.args.rproxy n = self.args.rproxy
@@ -330,7 +334,7 @@ class HttpCli(object):
for k in arglist.split("&"): for k in arglist.split("&"):
if "=" in k: if "=" in k:
k, zs = k.split("=", 1) k, zs = k.split("=", 1)
uparam[k.lower()] = zs.strip() uparam[k.lower()] = unquotep(zs.strip().replace("+", " "))
else: else:
uparam[k.lower()] = "" uparam[k.lower()] = ""
@@ -382,13 +386,14 @@ class HttpCli(object):
zs = base64.b64decode(zb).decode("utf-8") zs = base64.b64decode(zb).decode("utf-8")
# try "pwd", "x:pwd", "pwd:x" # try "pwd", "x:pwd", "pwd:x"
for bauth in [zs] + zs.split(":", 1)[::-1]: for bauth in [zs] + zs.split(":", 1)[::-1]:
if self.asrv.iacct.get(bauth): hpw = self.asrv.ah.hash(bauth)
if self.asrv.iacct.get(hpw):
break break
except: except:
pass pass
self.pw = uparam.get("pw") or self.headers.get("pw") or bauth or cookie_pw self.pw = uparam.get("pw") or self.headers.get("pw") or bauth or cookie_pw
self.uname = self.asrv.iacct.get(self.pw) or "*" self.uname = self.asrv.iacct.get(self.asrv.ah.hash(self.pw)) or "*"
self.rvol = self.asrv.vfs.aread[self.uname] self.rvol = self.asrv.vfs.aread[self.uname]
self.wvol = self.asrv.vfs.awrite[self.uname] self.wvol = self.asrv.vfs.awrite[self.uname]
self.mvol = self.asrv.vfs.amove[self.uname] self.mvol = self.asrv.vfs.amove[self.uname]
@@ -403,10 +408,21 @@ class HttpCli(object):
self.get_pwd_cookie(self.pw) self.get_pwd_cookie(self.pw)
if self.is_rclone: if self.is_rclone:
# dots: always include dotfiles if permitted
# lt: probably more important showing the correct timestamps of any dupes it just uploaded rather than the lastmod time of any non-copyparty-managed symlinks
# b: basic-browser if it tries to parse the html listing
uparam["dots"] = "" uparam["dots"] = ""
uparam["lt"] = ""
uparam["b"] = "" uparam["b"] = ""
cookies["b"] = "" cookies["b"] = ""
vn, rem = self.asrv.vfs.get(self.vpath, self.uname, False, False)
if "xdev" in vn.flags or "xvol" in vn.flags:
ap = vn.canonical(rem)
avn = vn.chk_ap(ap)
else:
avn = vn
( (
self.can_read, self.can_read,
self.can_write, self.can_write,
@@ -414,7 +430,12 @@ class HttpCli(object):
self.can_delete, self.can_delete,
self.can_get, self.can_get,
self.can_upget, self.can_upget,
) = self.asrv.vfs.can_access(self.vpath, self.uname) ) = (
avn.can_access("", self.uname) if avn else [False] * 6
)
self.avn = avn
self.s.settimeout(self.args.s_tbody or None)
try: try:
cors_k = self._cors() cors_k = self._cors()
@@ -530,7 +551,7 @@ class HttpCli(object):
mime: Optional[str] = None, mime: Optional[str] = None,
headers: Optional[dict[str, str]] = None, headers: Optional[dict[str, str]] = None,
) -> None: ) -> None:
response = ["{} {} {}".format(self.http_ver, status, HTTPCODE[status])] response = ["%s %s %s" % (self.http_ver, status, HTTPCODE[status])]
if length is not None: if length is not None:
response.append("Content-Length: " + unicode(length)) response.append("Content-Length: " + unicode(length))
@@ -554,11 +575,10 @@ class HttpCli(object):
self.out_headers["Content-Type"] = mime self.out_headers["Content-Type"] = mime
for k, zs in list(self.out_headers.items()) + self.out_headerlist: for k, zs in list(self.out_headers.items()) + self.out_headerlist:
response.append("{}: {}".format(k, zs)) response.append("%s: %s" % (k, zs))
try: try:
# best practice to separate headers and body into different packets # best practice to separate headers and body into different packets
self.s.settimeout(None)
self.s.sendall("\r\n".join(response).encode("utf-8") + b"\r\n\r\n") self.s.sendall("\r\n".join(response).encode("utf-8") + b"\r\n\r\n")
except: except:
raise Pebkac(400, "client d/c while replying headers") raise Pebkac(400, "client d/c while replying headers")
@@ -621,7 +641,7 @@ class HttpCli(object):
if not kv: if not kv:
return "" return ""
r = ["{}={}".format(k, quotep(zs)) if zs else k for k, zs in kv.items()] r = ["%s=%s" % (k, quotep(zs)) if zs else k for k, zs in kv.items()]
return "?" + "&amp;".join(r) return "?" + "&amp;".join(r)
def redirect( def redirect(
@@ -710,7 +730,7 @@ class HttpCli(object):
def handle_get(self) -> bool: def handle_get(self) -> bool:
if self.do_log: if self.do_log:
logmsg = "{:4} {}".format(self.mode, self.req) logmsg = "%-4s %s @%s" % (self.mode, self.req, self.uname)
if "range" in self.headers: if "range" in self.headers:
try: try:
@@ -809,17 +829,20 @@ class HttpCli(object):
def handle_propfind(self) -> bool: def handle_propfind(self) -> bool:
if self.do_log: if self.do_log:
self.log("PFIND " + self.req) self.log("PFIND %s @%s" % (self.req, self.uname))
if self.args.no_dav: if self.args.no_dav:
raise Pebkac(405, "WebDAV is disabled in server config") raise Pebkac(405, "WebDAV is disabled in server config")
if not self.can_read and not self.can_write and not self.can_get: vn, rem = self.asrv.vfs.get(self.vpath, self.uname, False, False, err=401)
if self.vpath: tap = vn.canonical(rem)
self.log("inaccessible: [{}]".format(self.vpath))
raise Pebkac(401, "authenticate")
self.uparam["h"] = "" if "davauth" in vn.flags and self.uname == "*":
self.can_read = self.can_write = self.can_get = False
if not self.can_read and not self.can_write and not self.can_get:
self.log("inaccessible: [{}]".format(self.vpath))
raise Pebkac(401, "authenticate")
from .dxml import parse_xml from .dxml import parse_xml
@@ -863,17 +886,16 @@ class HttpCli(object):
] ]
props = set(props_lst) props = set(props_lst)
vn, rem = self.asrv.vfs.get(self.vpath, self.uname, True, False, err=401)
depth = self.headers.get("depth", "infinity").lower() depth = self.headers.get("depth", "infinity").lower()
try: try:
topdir = {"vp": "", "st": bos.stat(vn.canonical(rem))} topdir = {"vp": "", "st": bos.stat(tap)}
except OSError as ex: except OSError as ex:
if ex.errno != errno.ENOENT: if ex.errno not in (errno.ENOENT, errno.ENOTDIR):
raise raise
raise Pebkac(404) raise Pebkac(404)
if not stat.S_ISDIR(topdir["st"].st_mode): if depth == "0" or not self.can_read or not stat.S_ISDIR(topdir["st"].st_mode):
fgen = [] fgen = []
elif depth == "infinity": elif depth == "infinity":
@@ -883,7 +905,11 @@ class HttpCli(object):
self.reply(zb, 403, "application/xml; charset=utf-8") self.reply(zb, 403, "application/xml; charset=utf-8")
return True return True
# this will return symlink-target timestamps
# because lstat=true would not recurse into subfolders
# and this is a rare case where we actually want that
fgen = vn.zipgen( fgen = vn.zipgen(
rem,
rem, rem,
set(), set(),
self.uname, self.uname,
@@ -895,7 +921,11 @@ class HttpCli(object):
elif depth == "1": elif depth == "1":
_, vfs_ls, vfs_virt = vn.ls( _, vfs_ls, vfs_virt = vn.ls(
rem, self.uname, not self.args.no_scandir, [[True, False]] rem,
self.uname,
not self.args.no_scandir,
[[True, False]],
lstat="davrt" not in vn.flags,
) )
if not self.args.ed: if not self.args.ed:
names = set(exclude_dotfiles([x[0] for x in vfs_ls])) names = set(exclude_dotfiles([x[0] for x in vfs_ls]))
@@ -907,9 +937,6 @@ class HttpCli(object):
ls += [{"vp": v, "st": zsr} for v in vfs_virt] ls += [{"vp": v, "st": zsr} for v in vfs_virt]
fgen = ls # type: ignore fgen = ls # type: ignore
elif depth == "0":
fgen = [] # type: ignore
else: else:
t = "invalid depth value '{}' (must be either '0' or '1'{})" t = "invalid depth value '{}' (must be either '0' or '1'{})"
t2 = " or 'infinity'" if self.args.dav_inf else "" t2 = " or 'infinity'" if self.args.dav_inf else ""
@@ -929,14 +956,23 @@ class HttpCli(object):
for x in fgen: for x in fgen:
rp = vjoin(vtop, x["vp"]) rp = vjoin(vtop, x["vp"])
st: os.stat_result = x["st"] st: os.stat_result = x["st"]
mtime = st.st_mtime
if stat.S_ISLNK(st.st_mode):
try:
st = bos.stat(os.path.join(tap, x["vp"]))
except:
continue
isdir = stat.S_ISDIR(st.st_mode) isdir = stat.S_ISDIR(st.st_mode)
t = "<D:response><D:href>/{}{}</D:href><D:propstat><D:prop>" ret += "<D:response><D:href>/%s%s</D:href><D:propstat><D:prop>" % (
ret += t.format(quotep(rp), "/" if isdir and rp else "") quotep(rp),
"/" if isdir and rp else "",
)
pvs: dict[str, str] = { pvs: dict[str, str] = {
"displayname": html_escape(rp.split("/")[-1]), "displayname": html_escape(rp.split("/")[-1]),
"getlastmodified": formatdate(st.st_mtime, usegmt=True), "getlastmodified": formatdate(mtime, usegmt=True),
"resourcetype": '<D:collection xmlns:D="DAV:"/>' if isdir else "", "resourcetype": '<D:collection xmlns:D="DAV:"/>' if isdir else "",
"supportedlock": '<D:lockentry xmlns:D="DAV:"><D:lockscope><D:exclusive/></D:lockscope><D:locktype><D:write/></D:locktype></D:lockentry>', "supportedlock": '<D:lockentry xmlns:D="DAV:"><D:lockscope><D:exclusive/></D:lockscope><D:locktype><D:write/></D:locktype></D:lockentry>',
} }
@@ -948,13 +984,13 @@ class HttpCli(object):
if k not in props: if k not in props:
continue continue
elif v: elif v:
ret += "<D:{0}>{1}</D:{0}>".format(k, v) ret += "<D:%s>%s</D:%s>" % (k, v, k)
else: else:
ret += "<D:{}/>".format(k) ret += "<D:%s/>" % (k,)
ret += "</D:prop><D:status>HTTP/1.1 200 OK</D:status></D:propstat>" ret += "</D:prop><D:status>HTTP/1.1 200 OK</D:status></D:propstat>"
missing = ["<D:{}/>".format(x) for x in props if x not in pvs] missing = ["<D:%s/>" % (x,) for x in props if x not in pvs]
if missing and clen: if missing and clen:
t = "<D:propstat><D:prop>{}</D:prop><D:status>HTTP/1.1 404 Not Found</D:status></D:propstat>" t = "<D:propstat><D:prop>{}</D:prop><D:status>HTTP/1.1 404 Not Found</D:status></D:propstat>"
ret += t.format("".join(missing)) ret += t.format("".join(missing))
@@ -973,7 +1009,7 @@ class HttpCli(object):
def handle_proppatch(self) -> bool: def handle_proppatch(self) -> bool:
if self.do_log: if self.do_log:
self.log("PPATCH " + self.req) self.log("PPATCH %s @%s" % (self.req, self.uname))
if self.args.no_dav: if self.args.no_dav:
raise Pebkac(405, "WebDAV is disabled in server config") raise Pebkac(405, "WebDAV is disabled in server config")
@@ -1031,7 +1067,7 @@ class HttpCli(object):
def handle_lock(self) -> bool: def handle_lock(self) -> bool:
if self.do_log: if self.do_log:
self.log("LOCK " + self.req) self.log("LOCK %s @%s" % (self.req, self.uname))
if self.args.no_dav: if self.args.no_dav:
raise Pebkac(405, "WebDAV is disabled in server config") raise Pebkac(405, "WebDAV is disabled in server config")
@@ -1097,7 +1133,7 @@ class HttpCli(object):
def handle_unlock(self) -> bool: def handle_unlock(self) -> bool:
if self.do_log: if self.do_log:
self.log("UNLOCK " + self.req) self.log("UNLOCK %s @%s" % (self.req, self.uname))
if self.args.no_dav: if self.args.no_dav:
raise Pebkac(405, "WebDAV is disabled in server config") raise Pebkac(405, "WebDAV is disabled in server config")
@@ -1114,7 +1150,7 @@ class HttpCli(object):
return True return True
if self.do_log: if self.do_log:
self.log("MKCOL " + self.req) self.log("MKCOL %s @%s" % (self.req, self.uname))
try: try:
return self._mkdir(self.vpath, True) return self._mkdir(self.vpath, True)
@@ -1129,18 +1165,9 @@ class HttpCli(object):
dst = self.headers["destination"] dst = self.headers["destination"]
dst = re.sub("^https?://[^/]+", "", dst).lstrip() dst = re.sub("^https?://[^/]+", "", dst).lstrip()
dst = unquotep(dst) dst = unquotep(dst)
if not self._mv(self.vpath, dst): if not self._mv(self.vpath, dst.lstrip("/")):
return False return False
# up2k only cares about files and removes all empty folders;
# clients naturally expect empty folders to survive a rename
vn, rem = self.asrv.vfs.get(dst, self.uname, False, False)
dabs = vn.canonical(rem)
try:
bos.makedirs(dabs)
except:
pass
return True return True
def _applesan(self) -> bool: def _applesan(self) -> bool:
@@ -1175,7 +1202,7 @@ class HttpCli(object):
def handle_options(self) -> bool: def handle_options(self) -> bool:
if self.do_log: if self.do_log:
self.log("OPTIONS " + self.req) self.log("OPTIONS %s @%s" % (self.req, self.uname))
oh = self.out_headers oh = self.out_headers
oh["Allow"] = ", ".join(self.conn.hsrv.mallow) oh["Allow"] = ", ".join(self.conn.hsrv.mallow)
@@ -1190,11 +1217,11 @@ class HttpCli(object):
return True return True
def handle_delete(self) -> bool: def handle_delete(self) -> bool:
self.log("DELETE " + self.req) self.log("DELETE %s @%s" % (self.req, self.uname))
return self.handle_rm([]) return self.handle_rm([])
def handle_put(self) -> bool: def handle_put(self) -> bool:
self.log("PUT " + self.req) self.log("PUT %s @%s" % (self.req, self.uname))
if not self.can_write: if not self.can_write:
t = "user {} does not have write-access here" t = "user {} does not have write-access here"
@@ -1205,7 +1232,6 @@ class HttpCli(object):
if self.headers.get("expect", "").lower() == "100-continue": if self.headers.get("expect", "").lower() == "100-continue":
try: try:
self.s.settimeout(None)
self.s.sendall(b"HTTP/1.1 100 Continue\r\n\r\n") self.s.sendall(b"HTTP/1.1 100 Continue\r\n\r\n")
except: except:
raise Pebkac(400, "client d/c before 100 continue") raise Pebkac(400, "client d/c before 100 continue")
@@ -1213,11 +1239,10 @@ class HttpCli(object):
return self.handle_stash(True) return self.handle_stash(True)
def handle_post(self) -> bool: def handle_post(self) -> bool:
self.log("POST " + self.req) self.log("POST %s @%s" % (self.req, self.uname))
if self.headers.get("expect", "").lower() == "100-continue": if self.headers.get("expect", "").lower() == "100-continue":
try: try:
self.s.settimeout(None)
self.s.sendall(b"HTTP/1.1 100 Continue\r\n\r\n") self.s.sendall(b"HTTP/1.1 100 Continue\r\n\r\n")
except: except:
raise Pebkac(400, "client d/c before 100 continue") raise Pebkac(400, "client d/c before 100 continue")
@@ -1337,7 +1362,9 @@ class HttpCli(object):
lim = vfs.get_dbv(rem)[0].lim lim = vfs.get_dbv(rem)[0].lim
fdir = vfs.canonical(rem) fdir = vfs.canonical(rem)
if lim: if lim:
fdir, rem = lim.all(self.ip, rem, remains, fdir) fdir, rem = lim.all(
self.ip, rem, remains, vfs.realpath, fdir, self.conn.hsrv.broker
)
fn = None fn = None
if rem and not self.trailing_slash and not bos.path.isdir(fdir): if rem and not self.trailing_slash and not bos.path.isdir(fdir):
@@ -1470,6 +1497,7 @@ class HttpCli(object):
lim.bup(self.ip, post_sz) lim.bup(self.ip, post_sz)
try: try:
lim.chk_sz(post_sz) lim.chk_sz(post_sz)
lim.chk_vsz(self.conn.hsrv.broker, vfs.realpath, post_sz)
except: except:
bos.unlink(path) bos.unlink(path)
raise raise
@@ -1622,7 +1650,7 @@ class HttpCli(object):
spd1 = get_spd(nbytes, self.t0) spd1 = get_spd(nbytes, self.t0)
spd2 = get_spd(self.conn.nbyte, self.conn.t0) spd2 = get_spd(self.conn.nbyte, self.conn.t0)
return "{} {} n{}".format(spd1, spd2, self.conn.nreq) return "%s %s n%s" % (spd1, spd2, self.conn.nreq)
def handle_post_multipart(self) -> bool: def handle_post_multipart(self) -> bool:
self.parser = MultipartParser(self.log, self.sr, self.headers) self.parser = MultipartParser(self.log, self.sr, self.headers)
@@ -1669,7 +1697,7 @@ class HttpCli(object):
items = [unquotep(x) for x in items if items] items = [unquotep(x) for x in items if items]
self.parser.drop() self.parser.drop()
return self.tx_zip(k, v, vn, rem, items, self.args.ed) return self.tx_zip(k, v, "", vn, rem, items, self.args.ed)
def handle_post_json(self) -> bool: def handle_post_json(self) -> bool:
try: try:
@@ -1944,7 +1972,7 @@ class HttpCli(object):
return True return True
def get_pwd_cookie(self, pwd: str) -> str: def get_pwd_cookie(self, pwd: str) -> str:
if pwd in self.asrv.iacct: if self.asrv.ah.hash(pwd) in self.asrv.iacct:
msg = "login ok" msg = "login ok"
dur = int(60 * 60 * self.args.logout) dur = int(60 * 60 * self.args.logout)
else: else:
@@ -2080,7 +2108,9 @@ class HttpCli(object):
lim = vfs.get_dbv(rem)[0].lim lim = vfs.get_dbv(rem)[0].lim
fdir_base = vfs.canonical(rem) fdir_base = vfs.canonical(rem)
if lim: if lim:
fdir_base, rem = lim.all(self.ip, rem, -1, fdir_base) fdir_base, rem = lim.all(
self.ip, rem, -1, vfs.realpath, fdir_base, self.conn.hsrv.broker
)
upload_vpath = "{}/{}".format(vfs.vpath, rem).strip("/") upload_vpath = "{}/{}".format(vfs.vpath, rem).strip("/")
if not nullwrite: if not nullwrite:
bos.makedirs(fdir_base) bos.makedirs(fdir_base)
@@ -2173,6 +2203,7 @@ class HttpCli(object):
try: try:
lim.chk_df(tabspath, sz, True) lim.chk_df(tabspath, sz, True)
lim.chk_sz(sz) lim.chk_sz(sz)
lim.chk_vsz(self.conn.hsrv.broker, vfs.realpath, sz)
lim.chk_bup(self.ip) lim.chk_bup(self.ip)
lim.chk_nup(self.ip) lim.chk_nup(self.ip)
except: except:
@@ -2348,7 +2379,7 @@ class HttpCli(object):
fp = vfs.canonical(rp) fp = vfs.canonical(rp)
lim = vfs.get_dbv(rem)[0].lim lim = vfs.get_dbv(rem)[0].lim
if lim: if lim:
fp, rp = lim.all(self.ip, rp, clen, fp) fp, rp = lim.all(self.ip, rp, clen, vfs.realpath, fp, self.conn.hsrv.broker)
bos.makedirs(fp) bos.makedirs(fp)
fp = os.path.join(fp, fn) fp = os.path.join(fp, fn)
@@ -2419,6 +2450,25 @@ class HttpCli(object):
if p_field != "body": if p_field != "body":
raise Pebkac(400, "expected body, got {}".format(p_field)) raise Pebkac(400, "expected body, got {}".format(p_field))
xbu = vfs.flags.get("xbu")
if xbu:
if not runhook(
self.log,
xbu,
fp,
self.vpath,
self.host,
self.uname,
time.time(),
0,
self.ip,
time.time(),
"",
):
t = "save blocked by xbu server config"
self.log(t, 1)
raise Pebkac(403, t)
if bos.path.exists(fp): if bos.path.exists(fp):
bos.unlink(fp) bos.unlink(fp)
@@ -2430,6 +2480,7 @@ class HttpCli(object):
lim.bup(self.ip, sz) lim.bup(self.ip, sz)
try: try:
lim.chk_sz(sz) lim.chk_sz(sz)
lim.chk_vsz(self.conn.hsrv.broker, vfs.realpath, sz)
except: except:
bos.unlink(fp) bos.unlink(fp)
raise raise
@@ -2438,6 +2489,39 @@ class HttpCli(object):
new_lastmod3 = int(new_lastmod * 1000) new_lastmod3 = int(new_lastmod * 1000)
sha512 = sha512[:56] sha512 = sha512[:56]
xau = vfs.flags.get("xau")
if xau and not runhook(
self.log,
xau,
fp,
self.vpath,
self.host,
self.uname,
new_lastmod,
sz,
self.ip,
new_lastmod,
"",
):
t = "save blocked by xau server config"
self.log(t, 1)
os.unlink(fp)
raise Pebkac(403, t)
vfs, rem = vfs.get_dbv(rem)
self.conn.hsrv.broker.say(
"up2k.hash_file",
vfs.realpath,
vfs.vpath,
vfs.flags,
vsplit(rem)[0],
fn,
self.ip,
new_lastmod,
self.uname,
True,
)
response = json.dumps( response = json.dumps(
{"ok": True, "lastmod": new_lastmod3, "size": sz, "sha512": sha512} {"ok": True, "lastmod": new_lastmod3, "size": sz, "sha512": sha512}
) )
@@ -2656,7 +2740,14 @@ class HttpCli(object):
return ret return ret
def tx_zip( def tx_zip(
self, fmt: str, uarg: str, vn: VFS, rem: str, items: list[str], dots: bool self,
fmt: str,
uarg: str,
vpath: str,
vn: VFS,
rem: str,
items: list[str],
dots: bool,
) -> bool: ) -> bool:
if self.args.no_zip: if self.args.no_zip:
raise Pebkac(400, "not enabled") raise Pebkac(400, "not enabled")
@@ -2699,7 +2790,7 @@ class HttpCli(object):
self.send_headers(None, mime=mime, headers={"Content-Disposition": cdis}) self.send_headers(None, mime=mime, headers={"Content-Disposition": cdis})
fgen = vn.zipgen( fgen = vn.zipgen(
rem, set(items), self.uname, False, dots, not self.args.no_scandir vpath, rem, set(items), self.uname, dots, False, not self.args.no_scandir
) )
# for f in fgen: print(repr({k: f[k] for k in ["vp", "ap"]})) # for f in fgen: print(repr({k: f[k] for k in ["vp", "ap"]}))
bgen = packer(self.log, fgen, utf8="utf" in uarg, pre_crc="crc" in uarg) bgen = packer(self.log, fgen, utf8="utf" in uarg, pre_crc="crc" in uarg)
@@ -2750,7 +2841,7 @@ class HttpCli(object):
return True return True
def tx_md(self, fs_path: str) -> bool: def tx_md(self, fs_path: str) -> bool:
logmsg = "{:4} {} ".format("", self.req) logmsg = " %s @%s " % (self.req, self.uname)
if not self.can_write: if not self.can_write:
if "edit" in self.uparam or "edit2" in self.uparam: if "edit" in self.uparam or "edit2" in self.uparam:
@@ -3026,8 +3117,8 @@ class HttpCli(object):
ret = self.gen_tree(top, dst) ret = self.gen_tree(top, dst)
if self.is_vproxied: if self.is_vproxied:
parents = self.args.R.split("/") parents = self.args.R.split("/")
for parent in parents[::-1]: for parent in reversed(parents):
ret = {"k{}".format(parent): ret, "a": []} ret = {"k%s" % (parent,): ret, "a": []}
zs = json.dumps(ret) zs = json.dumps(ret)
self.reply(zs.encode("utf-8"), mime="application/json") self.reply(zs.encode("utf-8"), mime="application/json")
@@ -3165,7 +3256,9 @@ class HttpCli(object):
nlim = int(self.uparam.get("lim") or 0) nlim = int(self.uparam.get("lim") or 0)
lim = [nlim, nlim] if nlim else [] lim = [nlim, nlim] if nlim else []
x = self.conn.hsrv.broker.ask("up2k.handle_rm", self.uname, self.ip, req, lim) x = self.conn.hsrv.broker.ask(
"up2k.handle_rm", self.uname, self.ip, req, lim, False
)
self.loud_reply(x.get()) self.loud_reply(x.get())
return True return True
@@ -3182,7 +3275,7 @@ class HttpCli(object):
# x-www-form-urlencoded (url query part) uses # x-www-form-urlencoded (url query part) uses
# either + or %20 for 0x20 so handle both # either + or %20 for 0x20 so handle both
dst = unquotep(dst.replace("+", " ")) dst = unquotep(dst.replace("+", " "))
return self._mv(self.vpath, dst) return self._mv(self.vpath, dst.lstrip("/"))
def _mv(self, vsrc: str, vdst: str) -> bool: def _mv(self, vsrc: str, vdst: str) -> bool:
if not self.can_move: if not self.can_move:
@@ -3429,6 +3522,7 @@ class HttpCli(object):
break break
vf = vn.flags vf = vn.flags
unlist = vf.get("unlist", "")
ls_ret = { ls_ret = {
"dirs": [], "dirs": [],
"files": [], "files": [],
@@ -3439,6 +3533,7 @@ class HttpCli(object):
"itag": e2t, "itag": e2t,
"lifetime": vn.flags.get("lifetime") or 0, "lifetime": vn.flags.get("lifetime") or 0,
"frand": bool(vn.flags.get("rand")), "frand": bool(vn.flags.get("rand")),
"unlist": unlist,
"perms": perms, "perms": perms,
"logues": logues, "logues": logues,
"readme": readme, "readme": readme,
@@ -3470,6 +3565,8 @@ class HttpCli(object):
"readme": readme, "readme": readme,
"title": html_escape(self.vpath, crlf=True) or "💾🎉", "title": html_escape(self.vpath, crlf=True) or "💾🎉",
"srv_info": srv_infot, "srv_info": srv_infot,
"dgrid": "grid" in vf,
"unlist": unlist,
"dtheme": self.args.theme, "dtheme": self.args.theme,
"themes": self.args.themes, "themes": self.args.themes,
"turbolvl": self.args.turbo, "turbolvl": self.args.turbo,
@@ -3503,10 +3600,14 @@ class HttpCli(object):
for k in ["zip", "tar"]: for k in ["zip", "tar"]:
v = self.uparam.get(k) v = self.uparam.get(k)
if v is not None: if v is not None:
return self.tx_zip(k, v, vn, rem, [], self.args.ed) return self.tx_zip(k, v, self.vpath, vn, rem, [], self.args.ed)
fsroot, vfs_ls, vfs_virt = vn.ls( fsroot, vfs_ls, vfs_virt = vn.ls(
rem, self.uname, not self.args.no_scandir, [[True, False], [False, True]] rem,
self.uname,
not self.args.no_scandir,
[[True, False], [False, True]],
lstat="lt" in self.uparam,
) )
stats = {k: v for k, v in vfs_ls} stats = {k: v for k, v in vfs_ls}
ls_names = [x[0] for x in vfs_ls] ls_names = [x[0] for x in vfs_ls]
@@ -3550,7 +3651,8 @@ class HttpCli(object):
fspath = fsroot + "/" + fn fspath = fsroot + "/" + fn
try: try:
inf = stats.get(fn) or bos.stat(fspath) linf = stats.get(fn) or bos.lstat(fspath)
inf = bos.stat(fspath) if stat.S_ISLNK(linf.st_mode) else linf
except: except:
self.log("broken symlink: {}".format(repr(fspath))) self.log("broken symlink: {}".format(repr(fspath)))
continue continue
@@ -3561,19 +3663,26 @@ class HttpCli(object):
if self.args.no_zip: if self.args.no_zip:
margin = "DIR" margin = "DIR"
else: else:
margin = '<a href="{}?zip" rel="nofollow">zip</a>'.format( margin = '<a href="%s?zip" rel="nofollow">zip</a>' % (quotep(href),)
quotep(href)
)
elif fn in hist: elif fn in hist:
margin = '<a href="{}.hist/{}">#{}</a>'.format( margin = '<a href="%s.hist/%s">#%s</a>' % (
base, html_escape(hist[fn][2], quot=True, crlf=True), hist[fn][0] base,
html_escape(hist[fn][2], quot=True, crlf=True),
hist[fn][0],
) )
else: else:
margin = "-" margin = "-"
sz = inf.st_size sz = inf.st_size
zd = datetime.utcfromtimestamp(inf.st_mtime) zd = datetime.utcfromtimestamp(linf.st_mtime)
dt = zd.strftime("%Y-%m-%d %H:%M:%S") dt = "%04d-%02d-%02d %02d:%02d:%02d" % (
zd.year,
zd.month,
zd.day,
zd.hour,
zd.minute,
zd.second,
)
try: try:
ext = "---" if is_dir else fn.rsplit(".", 1)[1] ext = "---" if is_dir else fn.rsplit(".", 1)[1]
@@ -3583,7 +3692,7 @@ class HttpCli(object):
ext = "%" ext = "%"
if add_fk: if add_fk:
href = "{}?k={}".format( href = "%s?k=%s" % (
quotep(href), quotep(href),
self.gen_fk( self.gen_fk(
self.args.fk_salt, fspath, sz, 0 if ANYWIN else inf.st_ino self.args.fk_salt, fspath, sz, 0 if ANYWIN else inf.st_ino
@@ -3599,7 +3708,7 @@ class HttpCli(object):
"sz": sz, "sz": sz,
"ext": ext, "ext": ext,
"dt": dt, "dt": dt,
"ts": int(inf.st_mtime), "ts": int(linf.st_mtime),
} }
if is_dir: if is_dir:
dirs.append(item) dirs.append(item)
@@ -3687,7 +3796,12 @@ class HttpCli(object):
dirs.sort(key=itemgetter("name")) dirs.sort(key=itemgetter("name"))
if is_js: if is_js:
j2a["ls0"] = {"dirs": dirs, "files": files, "taglist": taglist} j2a["ls0"] = {
"dirs": dirs,
"files": files,
"taglist": taglist,
"unlist": unlist,
}
j2a["files"] = [] j2a["files"] = []
else: else:
j2a["files"] = dirs + files j2a["files"] = dirs + files

View File

@@ -54,7 +54,6 @@ class HttpConn(object):
self.args: argparse.Namespace = hsrv.args # mypy404 self.args: argparse.Namespace = hsrv.args # mypy404
self.E: EnvParams = self.args.E self.E: EnvParams = self.args.E
self.asrv: AuthSrv = hsrv.asrv # mypy404 self.asrv: AuthSrv = hsrv.asrv # mypy404
self.cert_path = hsrv.cert_path
self.u2fh: Util.FHC = hsrv.u2fh # mypy404 self.u2fh: Util.FHC = hsrv.u2fh # mypy404
self.iphash: HMaccas = hsrv.broker.iphash self.iphash: HMaccas = hsrv.broker.iphash
self.bans: dict[str, int] = hsrv.bans self.bans: dict[str, int] = hsrv.bans
@@ -114,7 +113,7 @@ class HttpConn(object):
def _detect_https(self) -> bool: def _detect_https(self) -> bool:
method = None method = None
if self.cert_path: if True:
try: try:
method = self.s.recv(4, socket.MSG_PEEK) method = self.s.recv(4, socket.MSG_PEEK)
except socket.timeout: except socket.timeout:
@@ -148,7 +147,7 @@ class HttpConn(object):
self.sr = None self.sr = None
if self.args.https_only: if self.args.https_only:
is_https = True is_https = True
elif self.args.http_only or not HAVE_SSL: elif self.args.http_only:
is_https = False is_https = False
else: else:
# raise Exception("asdf") # raise Exception("asdf")
@@ -162,7 +161,7 @@ class HttpConn(object):
self.log_src = self.log_src.replace("[36m", "[35m") self.log_src = self.log_src.replace("[36m", "[35m")
try: try:
ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
ctx.load_cert_chain(self.cert_path) ctx.load_cert_chain(self.args.cert)
if self.args.ssl_ver: if self.args.ssl_ver:
ctx.options &= ~self.args.ssl_flags_en ctx.options &= ~self.args.ssl_flags_en
ctx.options |= self.args.ssl_flags_de ctx.options |= self.args.ssl_flags_de

View File

@@ -33,7 +33,23 @@ except MNFE:
* (try another python version, if you have one) * (try another python version, if you have one)
* (try copyparty.sfx instead) * (try copyparty.sfx instead)
""".format( """.format(
os.path.basename(sys.executable) sys.executable
)
)
sys.exit(1)
except SyntaxError:
if EXE:
raise
print(
"""\033[1;31m
your jinja2 version is incompatible with your python version;\033[33m
please try to replace it with an older version:\033[0m
* {} -m pip install --user jinja2==2.11.3
* (try another python version, if you have one)
* (try copyparty.sfx instead)
""".format(
sys.executable
) )
) )
sys.exit(1) sys.exit(1)
@@ -132,12 +148,6 @@ class HttpSrv(object):
self.ssdp = SSDPr(broker) self.ssdp = SSDPr(broker)
cert_path = os.path.join(self.E.cfg, "cert.pem")
if bos.path.exists(cert_path):
self.cert_path = cert_path
else:
self.cert_path = ""
if self.tp_q: if self.tp_q:
self.start_threads(4) self.start_threads(4)

View File

@@ -17,7 +17,9 @@ class Ico(object):
def get(self, ext: str, as_thumb: bool, chrome: bool) -> tuple[str, bytes]: def get(self, ext: str, as_thumb: bool, chrome: bool) -> tuple[str, bytes]:
"""placeholder to make thumbnails not break""" """placeholder to make thumbnails not break"""
zb = hashlib.sha1(ext.encode("utf-8")).digest()[2:4] bext = ext.encode("ascii", "replace")
ext = bext.decode("utf-8")
zb = hashlib.sha1(bext).digest()[2:4]
if PY2: if PY2:
zb = [ord(x) for x in zb] zb = [ord(x) for x in zb]

View File

@@ -1,6 +1,7 @@
# coding: utf-8 # coding: utf-8
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
import errno
import random import random
import select import select
import socket import socket
@@ -277,6 +278,18 @@ class MDNS(MCast):
zf = time.time() + 2 zf = time.time() + 2
self.probing = zf # cant unicast so give everyone an extra sec self.probing = zf # cant unicast so give everyone an extra sec
self.unsolicited = [zf, zf + 1, zf + 3, zf + 7] # rfc-8.3 self.unsolicited = [zf, zf + 1, zf + 3, zf + 7] # rfc-8.3
try:
self.run2()
except OSError as ex:
if ex.errno != errno.EBADF:
raise
self.log("stopping due to {}".format(ex), "90")
self.log("stopped", 2)
def run2(self) -> None:
last_hop = time.time() last_hop = time.time()
ihop = self.args.mc_hop ihop = self.args.mc_hop
while self.running: while self.running:
@@ -314,8 +327,6 @@ class MDNS(MCast):
self.log(t.format(self.hn[:-1]), 2) self.log(t.format(self.hn[:-1]), 2)
self.probing = 0 self.probing = 0
self.log("stopped", 2)
def stop(self, panic=False) -> None: def stop(self, panic=False) -> None:
self.running = False self.running = False
for srv in self.srv.values(): for srv in self.srv.values():

145
copyparty/pwhash.py Normal file
View File

@@ -0,0 +1,145 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import argparse
import base64
import hashlib
import sys
import threading
from .__init__ import unicode
class PWHash(object):
def __init__(self, args: argparse.Namespace):
self.args = args
try:
alg, ac = args.ah_alg.split(",")
except:
alg = args.ah_alg
ac = {}
if alg == "none":
alg = ""
self.alg = alg
self.ac = ac
if not alg:
self.on = False
self.hash = unicode
return
self.on = True
self.salt = args.ah_salt.encode("utf-8")
self.cache: dict[str, str] = {}
self.mutex = threading.Lock()
self.hash = self._cache_hash
if alg == "sha2":
self._hash = self._gen_sha2
elif alg == "scrypt":
self._hash = self._gen_scrypt
elif alg == "argon2":
self._hash = self._gen_argon2
else:
t = "unsupported password hashing algorithm [{}], must be one of these: argon2 scrypt sha2 none"
raise Exception(t.format(alg))
def _cache_hash(self, plain: str) -> str:
with self.mutex:
try:
return self.cache[plain]
except:
pass
if not plain:
return ""
if len(plain) > 255:
raise Exception("password too long")
if len(self.cache) > 9000:
self.cache = {}
ret = self._hash(plain)
self.cache[plain] = ret
return ret
def _gen_sha2(self, plain: str) -> str:
its = int(self.ac[0]) if self.ac else 424242
bplain = plain.encode("utf-8")
ret = b"\n"
for _ in range(its):
ret = hashlib.sha512(self.salt + bplain + ret).digest()
return "+" + base64.urlsafe_b64encode(ret[:24]).decode("utf-8")
def _gen_scrypt(self, plain: str) -> str:
cost = 2 << 13
its = 2
blksz = 8
para = 4
try:
cost = 2 << int(self.ac[0])
its = int(self.ac[1])
blksz = int(self.ac[2])
para = int(self.ac[3])
except:
pass
ret = plain.encode("utf-8")
for _ in range(its):
ret = hashlib.scrypt(ret, salt=self.salt, n=cost, r=blksz, p=para, dklen=24)
return "+" + base64.urlsafe_b64encode(ret).decode("utf-8")
def _gen_argon2(self, plain: str) -> str:
from argon2.low_level import Type as ArgonType
from argon2.low_level import hash_secret
time_cost = 3
mem_cost = 256
parallelism = 4
version = 19
try:
time_cost = int(self.ac[0])
mem_cost = int(self.ac[1])
parallelism = int(self.ac[2])
version = int(self.ac[3])
except:
pass
bplain = plain.encode("utf-8")
bret = hash_secret(
secret=bplain,
salt=self.salt,
time_cost=time_cost,
memory_cost=mem_cost * 1024,
parallelism=parallelism,
hash_len=24,
type=ArgonType.ID,
version=version,
)
ret = bret.split(b"$")[-1].decode("utf-8")
return "+" + ret.replace("/", "_").replace("+", "-")
def stdin(self) -> None:
while True:
ln = sys.stdin.readline().strip()
if not ln:
break
print(self.hash(ln))
def cli(self) -> None:
import getpass
while True:
p1 = getpass.getpass("password> ")
p2 = getpass.getpass("again or just hit ENTER> ")
if p2 and p1 != p2:
print("\033[31minputs don't match; try again\033[0m", file=sys.stderr)
continue
print(self.hash(p1))
print()

View File

View File

@@ -261,7 +261,7 @@ class SMB(object):
yeet("blocked delete (no-del-acc): " + vpath) yeet("blocked delete (no-del-acc): " + vpath)
vpath = vpath.replace("\\", "/").lstrip("/") vpath = vpath.replace("\\", "/").lstrip("/")
self.hub.up2k.handle_rm(LEELOO_DALLAS, "1.7.6.2", [vpath], []) self.hub.up2k.handle_rm(LEELOO_DALLAS, "1.7.6.2", [vpath], [], False)
def _utime(self, vpath: str, times: tuple[float, float]) -> None: def _utime(self, vpath: str, times: tuple[float, float]) -> None:
if not self.args.smbw: if not self.args.smbw:

View File

@@ -1,6 +1,7 @@
# coding: utf-8 # coding: utf-8
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
import errno
import re import re
import select import select
import socket import socket
@@ -129,6 +130,17 @@ class SSDPd(MCast):
srv.hport = hp srv.hport = hp
self.log("listening") self.log("listening")
try:
self.run2()
except OSError as ex:
if ex.errno != errno.EBADF:
raise
self.log("stopping due to {}".format(ex), "90")
self.log("stopped", 2)
def run2(self) -> None:
while self.running: while self.running:
rdy = select.select(self.srv, [], [], self.args.z_chk or 180) rdy = select.select(self.srv, [], [], self.args.z_chk or 180)
rx: list[socket.socket] = rdy[0] # type: ignore rx: list[socket.socket] = rdy[0] # type: ignore
@@ -148,8 +160,6 @@ class SSDPd(MCast):
) )
self.log(t, 6) self.log(t, 6)
self.log("stopped", 2)
def stop(self) -> None: def stop(self) -> None:
self.running = False self.running = False
for srv in self.srv.values(): for srv in self.srv.values():

View File

@@ -28,8 +28,9 @@ if True: # pylint: disable=using-constant-test
import typing import typing
from typing import Any, Optional, Union from typing import Any, Optional, Union
from .__init__ import ANYWIN, EXE, MACOS, TYPE_CHECKING, VT100, EnvParams, unicode from .__init__ import ANYWIN, EXE, MACOS, TYPE_CHECKING, EnvParams, unicode
from .authsrv import AuthSrv from .authsrv import AuthSrv
from .cert import ensure_cert
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE from .mtag import HAVE_FFMPEG, HAVE_FFPROBE
from .tcpsrv import TcpSrv from .tcpsrv import TcpSrv
from .th_srv import HAVE_PIL, HAVE_VIPS, HAVE_WEBP, ThumbSrv from .th_srv import HAVE_PIL, HAVE_VIPS, HAVE_WEBP, ThumbSrv
@@ -80,6 +81,7 @@ class SvcHub(object):
self.dargs = dargs self.dargs = dargs
self.argv = argv self.argv = argv
self.E: EnvParams = args.E self.E: EnvParams = args.E
self.no_ansi = args.no_ansi
self.logf: Optional[typing.TextIO] = None self.logf: Optional[typing.TextIO] = None
self.logf_base_fn = "" self.logf_base_fn = ""
self.stop_req = False self.stop_req = False
@@ -238,7 +240,8 @@ class SvcHub(object):
if args.ftp or args.ftps: if args.ftp or args.ftps:
from .ftpd import Ftpd from .ftpd import Ftpd
self.ftpd = Ftpd(self) self.ftpd: Optional[Ftpd] = None
Daemon(self.start_ftpd, "start_ftpd")
zms += "f" if args.ftp else "F" zms += "f" if args.ftp else "F"
if args.smb: if args.smb:
@@ -268,6 +271,28 @@ class SvcHub(object):
self.broker = Broker(self) self.broker = Broker(self)
def start_ftpd(self) -> None:
time.sleep(30)
if self.ftpd:
return
self.restart_ftpd()
def restart_ftpd(self) -> None:
if not hasattr(self, "ftpd"):
return
from .ftpd import Ftpd
if self.ftpd:
return # todo
if not os.path.exists(self.args.cert):
ensure_cert(self.log, self.args)
self.ftpd = Ftpd(self)
self.log("root", "started FTPd")
def thr_httpsrv_up(self) -> None: def thr_httpsrv_up(self) -> None:
time.sleep(1 if self.args.ign_ebind_all else 5) time.sleep(1 if self.args.ign_ebind_all else 5)
expected = self.broker.num_workers * self.tcpsrv.nsrv expected = self.broker.num_workers * self.tcpsrv.nsrv
@@ -647,8 +672,14 @@ class SvcHub(object):
return return
with self.log_mutex: with self.log_mutex:
ts = datetime.utcnow().strftime("%Y-%m%d-%H%M%S.%f")[:-3] zd = datetime.utcnow()
self.logf.write("@{} [{}\033[0m] {}\n".format(ts, src, msg)) ts = "%04d-%04d-%06d.%03d" % (
zd.year,
zd.month * 100 + zd.day,
(zd.hour * 100 + zd.minute) * 100 + zd.second,
zd.microsecond // 1000,
)
self.logf.write("@%s [%s\033[0m] %s\n" % (ts, src, msg))
now = time.time() now = time.time()
if now >= self.next_day: if now >= self.next_day:
@@ -675,26 +706,36 @@ class SvcHub(object):
now = time.time() now = time.time()
if now >= self.next_day: if now >= self.next_day:
dt = datetime.utcfromtimestamp(now) dt = datetime.utcfromtimestamp(now)
print("\033[36m{}\033[0m\n".format(dt.strftime("%Y-%m-%d")), end="") zs = "{}\n" if self.no_ansi else "\033[36m{}\033[0m\n"
zs = zs.format(dt.strftime("%Y-%m-%d"))
print(zs, end="")
self._set_next_day() self._set_next_day()
if self.logf:
self.logf.write(zs)
fmt = "\033[36m{} \033[33m{:21} \033[0m{}\n" fmt = "\033[36m%s \033[33m%-21s \033[0m%s\n"
if not VT100: if self.no_ansi:
fmt = "{} {:21} {}\n" fmt = "%s %-21s %s\n"
if "\033" in msg: if "\033" in msg:
msg = ansi_re.sub("", msg) msg = ansi_re.sub("", msg)
if "\033" in src: if "\033" in src:
src = ansi_re.sub("", src) src = ansi_re.sub("", src)
elif c: elif c:
if isinstance(c, int): if isinstance(c, int):
msg = "\033[3{}m{}\033[0m".format(c, msg) msg = "\033[3%sm%s\033[0m" % (c, msg)
elif "\033" not in c: elif "\033" not in c:
msg = "\033[{}m{}\033[0m".format(c, msg) msg = "\033[%sm%s\033[0m" % (c, msg)
else: else:
msg = "{}{}\033[0m".format(c, msg) msg = "%s%s\033[0m" % (c, msg)
ts = datetime.utcfromtimestamp(now).strftime("%H:%M:%S.%f")[:-3] zd = datetime.utcfromtimestamp(now)
msg = fmt.format(ts, src, msg) ts = "%02d:%02d:%02d.%03d" % (
zd.hour,
zd.minute,
zd.second,
zd.microsecond // 1000,
)
msg = fmt % (ts, src, msg)
try: try:
print(msg, end="") print(msg, end="")
except UnicodeEncodeError: except UnicodeEncodeError:

View File

@@ -7,7 +7,8 @@ import socket
import sys import sys
import time import time
from .__init__ import ANYWIN, PY2, TYPE_CHECKING, VT100, unicode from .__init__ import ANYWIN, PY2, TYPE_CHECKING, unicode
from .cert import gencert
from .stolen.qrcodegen import QrCode from .stolen.qrcodegen import QrCode
from .util import ( from .util import (
E_ACCESS, E_ACCESS,
@@ -295,6 +296,8 @@ class TcpSrv(object):
def _distribute_netdevs(self): def _distribute_netdevs(self):
self.hub.broker.say("set_netdevs", self.netdevs) self.hub.broker.say("set_netdevs", self.netdevs)
self.hub.start_zeroconf() self.hub.start_zeroconf()
gencert(self.log, self.args, self.netdevs)
self.hub.restart_ftpd()
def shutdown(self) -> None: def shutdown(self) -> None:
self.stopping = True self.stopping = True
@@ -501,7 +504,7 @@ class TcpSrv(object):
zoom = 1 zoom = 1
qr = qrc.render(zoom, pad) qr = qrc.render(zoom, pad)
if not VT100: if self.args.no_ansi:
return "{}\n{}".format(txt, qr) return "{}\n{}".format(txt, qr)
halfc = "\033[40;48;5;{0}m{1}\033[47;48;5;{2}m" halfc = "\033[40;48;5;{0}m{1}\033[47;48;5;{2}m"

View File

@@ -274,6 +274,10 @@ class ThumbSrv(object):
tdir, tfn = os.path.split(tpath) tdir, tfn = os.path.split(tpath)
ttpath = os.path.join(tdir, "w", tfn) ttpath = os.path.join(tdir, "w", tfn)
try:
bos.unlink(ttpath)
except:
pass
for fun in funs: for fun in funs:
try: try:
@@ -570,11 +574,15 @@ class ThumbSrv(object):
want_caf = tpath.endswith(".caf") want_caf = tpath.endswith(".caf")
tmp_opus = tpath tmp_opus = tpath
if want_caf: if want_caf:
tmp_opus = tpath.rsplit(".", 1)[0] + ".opus" tmp_opus = tpath + ".opus"
try:
bos.unlink(tmp_opus)
except:
pass
caf_src = abspath if src_opus else tmp_opus caf_src = abspath if src_opus else tmp_opus
if not want_caf or (not src_opus and not bos.path.isfile(tmp_opus)): if not want_caf or not src_opus:
# fmt: off # fmt: off
cmd = [ cmd = [
b"ffmpeg", b"ffmpeg",
@@ -633,6 +641,12 @@ class ThumbSrv(object):
# fmt: on # fmt: on
self._run_ff(cmd) self._run_ff(cmd)
if tmp_opus != tpath:
try:
bos.unlink(tmp_opus)
except:
pass
def poke(self, tdir: str) -> None: def poke(self, tdir: str) -> None:
if not self.poke_cd.poke(tdir): if not self.poke_cd.poke(tdir):
return return

View File

@@ -69,7 +69,7 @@ class U2idx(object):
fsize = body["size"] fsize = body["size"]
fhash = body["hash"] fhash = body["hash"]
wark = up2k_wark_from_hashlist(self.args.salt, fsize, fhash) wark = up2k_wark_from_hashlist(self.args.warksalt, fsize, fhash)
uq = "substr(w,1,16) = ? and w = ?" uq = "substr(w,1,16) = ? and w = ?"
uv: list[Union[str, int]] = [wark[:16], wark] uv: list[Union[str, int]] = [wark[:16], wark]

View File

@@ -41,6 +41,7 @@ from .util import (
gen_filekey, gen_filekey,
gen_filekey_dbg, gen_filekey_dbg,
hidedir, hidedir,
humansize,
min_ex, min_ex,
quotep, quotep,
rand_name, rand_name,
@@ -56,6 +57,7 @@ from .util import (
sfsenc, sfsenc,
spack, spack,
statdir, statdir,
unhumanize,
vjoin, vjoin,
vsplit, vsplit,
w8b64dec, w8b64dec,
@@ -73,8 +75,8 @@ if True: # pylint: disable=using-constant-test
if TYPE_CHECKING: if TYPE_CHECKING:
from .svchub import SvcHub from .svchub import SvcHub
zs = "avif,avifs,bmp,gif,heic,heics,heif,heifs,ico,j2p,j2k,jp2,jpeg,jpg,jpx,png,tga,tif,tiff,webp" zsg = "avif,avifs,bmp,gif,heic,heics,heif,heifs,ico,j2p,j2k,jp2,jpeg,jpg,jpx,png,tga,tif,tiff,webp"
CV_EXTS = set(zs.split(",")) CV_EXTS = set(zsg.split(","))
class Dbw(object): class Dbw(object):
@@ -110,7 +112,7 @@ class Up2k(object):
self.args = hub.args self.args = hub.args
self.log_func = hub.log self.log_func = hub.log
self.salt = self.args.salt self.salt = self.args.warksalt
self.r_hash = re.compile("^[0-9a-zA-Z_-]{44}$") self.r_hash = re.compile("^[0-9a-zA-Z_-]{44}$")
self.gid = 0 self.gid = 0
@@ -125,6 +127,8 @@ class Up2k(object):
self.registry: dict[str, dict[str, dict[str, Any]]] = {} self.registry: dict[str, dict[str, dict[str, Any]]] = {}
self.flags: dict[str, dict[str, Any]] = {} self.flags: dict[str, dict[str, Any]] = {}
self.droppable: dict[str, list[str]] = {} self.droppable: dict[str, list[str]] = {}
self.volnfiles: dict["sqlite3.Cursor", int] = {}
self.volsize: dict["sqlite3.Cursor", int] = {}
self.volstate: dict[str, str] = {} self.volstate: dict[str, str] = {}
self.vol_act: dict[str, float] = {} self.vol_act: dict[str, float] = {}
self.busy_aps: set[str] = set() self.busy_aps: set[str] = set()
@@ -261,6 +265,20 @@ class Up2k(object):
} }
return json.dumps(ret, indent=4) return json.dumps(ret, indent=4)
def get_volsize(self, ptop: str) -> tuple[int, int]:
with self.mutex:
return self._get_volsize(ptop)
def _get_volsize(self, ptop: str) -> tuple[int, int]:
cur = self.cur[ptop]
nbytes = self.volsize[cur]
nfiles = self.volnfiles[cur]
for j in list(self.registry.get(ptop, {}).values()):
nbytes += j["size"]
nfiles += 1
return (nbytes, nfiles)
def rescan( def rescan(
self, all_vols: dict[str, VFS], scan_vols: list[str], wait: bool, fscan: bool self, all_vols: dict[str, VFS], scan_vols: list[str], wait: bool, fscan: bool
) -> str: ) -> str:
@@ -380,11 +398,11 @@ class Up2k(object):
if rd.startswith("//") or fn.startswith("//"): if rd.startswith("//") or fn.startswith("//"):
rd, fn = s3dec(rd, fn) rd, fn = s3dec(rd, fn)
fvp = "{}/{}".format(rd, fn).strip("/") fvp = ("%s/%s" % (rd, fn)).strip("/")
if vp: if vp:
fvp = "{}/{}".format(vp, fvp) fvp = "%s/%s" % (vp, fvp)
self._handle_rm(LEELOO_DALLAS, "", fvp, []) self._handle_rm(LEELOO_DALLAS, "", fvp, [], True)
nrm += 1 nrm += 1
if nrm: if nrm:
@@ -810,6 +828,8 @@ class Up2k(object):
try: try:
cur = self._open_db(db_path) cur = self._open_db(db_path)
self.cur[ptop] = cur self.cur[ptop] = cur
self.volsize[cur] = 0
self.volnfiles[cur] = 0
# speeds measured uploading 520 small files on a WD20SPZX (SMR 2.5" 5400rpm 4kb) # speeds measured uploading 520 small files on a WD20SPZX (SMR 2.5" 5400rpm 4kb)
dbd = flags["dbd"] dbd = flags["dbd"]
@@ -917,6 +937,24 @@ class Up2k(object):
db.c.connection.commit() db.c.connection.commit()
if vol.flags.get("vmaxb") or vol.flags.get("vmaxn"):
zs = "select count(sz), sum(sz) from up"
vn, vb = db.c.execute(zs).fetchone()
vb = vb or 0
vb += vn * 2048
self.volsize[db.c] = vb
self.volnfiles[db.c] = vn
vmaxb = unhumanize(vol.flags.get("vmaxb") or "0")
vmaxn = unhumanize(vol.flags.get("vmaxn") or "0")
t = "{} / {} ( {} / {} files) in {}".format(
humansize(vb, True),
humansize(vmaxb, True),
humansize(vn, True).rstrip("B"),
humansize(vmaxn, True).rstrip("B"),
vol.realpath,
)
self.log(t)
return True, bool(n_add or n_rm or do_vac) return True, bool(n_add or n_rm or do_vac)
def _build_dir( def _build_dir(
@@ -1092,7 +1130,7 @@ class Up2k(object):
top, rp, dts, lmod, dsz, sz top, rp, dts, lmod, dsz, sz
) )
self.log(t) self.log(t)
self.db_rm(db.c, rd, fn) self.db_rm(db.c, rd, fn, 0)
ret += 1 ret += 1
db.n += 1 db.n += 1
in_db = [] in_db = []
@@ -1175,7 +1213,7 @@ class Up2k(object):
rm_files = [x for x in hits if x not in seen_files] rm_files = [x for x in hits if x not in seen_files]
n_rm = len(rm_files) n_rm = len(rm_files)
for fn in rm_files: for fn in rm_files:
self.db_rm(db.c, rd, fn) self.db_rm(db.c, rd, fn, 0)
if n_rm: if n_rm:
self.log("forgot {} deleted files".format(n_rm)) self.log("forgot {} deleted files".format(n_rm))
@@ -2284,7 +2322,9 @@ class Up2k(object):
if lost: if lost:
c2 = None c2 = None
for cur, dp_dir, dp_fn in lost: for cur, dp_dir, dp_fn in lost:
self.db_rm(cur, dp_dir, dp_fn) t = "forgetting deleted file: /{}"
self.log(t.format(vjoin(vjoin(vfs.vpath, dp_dir), dp_fn)))
self.db_rm(cur, dp_dir, dp_fn, cj["size"])
if c2 and c2 != cur: if c2 and c2 != cur:
c2.connection.commit() c2.connection.commit()
@@ -2418,7 +2458,14 @@ class Up2k(object):
if vfs.lim: if vfs.lim:
ap2, cj["prel"] = vfs.lim.all( ap2, cj["prel"] = vfs.lim.all(
cj["addr"], cj["prel"], cj["size"], ap1, reg cj["addr"],
cj["prel"],
cj["size"],
cj["ptop"],
ap1,
self.hub.broker,
reg,
"up2k._get_volsize",
) )
bos.makedirs(ap2) bos.makedirs(ap2)
vfs.lim.nup(cj["addr"]) vfs.lim.nup(cj["addr"])
@@ -2564,7 +2611,7 @@ class Up2k(object):
try: try:
if "hardlink" in flags: if "hardlink" in flags:
os.link(fsenc(src), fsenc(dst)) os.link(fsenc(absreal(src)), fsenc(dst))
linked = True linked = True
except Exception as ex: except Exception as ex:
self.log("cannot hardlink: " + repr(ex)) self.log("cannot hardlink: " + repr(ex))
@@ -2736,7 +2783,7 @@ class Up2k(object):
self._symlink(dst, d2, self.flags[ptop], lmod=lmod) self._symlink(dst, d2, self.flags[ptop], lmod=lmod)
if cur: if cur:
self.db_rm(cur, rd, fn) self.db_rm(cur, rd, fn, job["size"])
self.db_add(cur, vflags, rd, fn, lmod, *z2[3:]) self.db_add(cur, vflags, rd, fn, lmod, *z2[3:])
if cur: if cur:
@@ -2779,7 +2826,7 @@ class Up2k(object):
self.db_act = self.vol_act[ptop] = time.time() self.db_act = self.vol_act[ptop] = time.time()
try: try:
self.db_rm(cur, rd, fn) self.db_rm(cur, rd, fn, sz)
self.db_add( self.db_add(
cur, cur,
vflags, vflags,
@@ -2809,13 +2856,17 @@ class Up2k(object):
return True return True
def db_rm(self, db: "sqlite3.Cursor", rd: str, fn: str) -> None: def db_rm(self, db: "sqlite3.Cursor", rd: str, fn: str, sz: int) -> None:
sql = "delete from up where rd = ? and fn = ?" sql = "delete from up where rd = ? and fn = ?"
try: try:
db.execute(sql, (rd, fn)) r = db.execute(sql, (rd, fn))
except: except:
assert self.mem_cur assert self.mem_cur
db.execute(sql, s3enc(self.mem_cur, rd, fn)) r = db.execute(sql, s3enc(self.mem_cur, rd, fn))
if r.rowcount:
self.volsize[db] -= sz
self.volnfiles[db] -= 1
def db_add( def db_add(
self, self,
@@ -2844,6 +2895,9 @@ class Up2k(object):
v = (wark, int(ts), sz, rd, fn, ip or "", int(at or 0)) v = (wark, int(ts), sz, rd, fn, ip or "", int(at or 0))
db.execute(sql, v) db.execute(sql, v)
self.volsize[db] += sz
self.volnfiles[db] += 1
xau = False if skip_xau else vflags.get("xau") xau = False if skip_xau else vflags.get("xau")
dst = djoin(ptop, rd, fn) dst = djoin(ptop, rd, fn)
if xau and not runhook( if xau and not runhook(
@@ -2897,7 +2951,9 @@ class Up2k(object):
except: except:
pass pass
def handle_rm(self, uname: str, ip: str, vpaths: list[str], lim: list[int]) -> str: def handle_rm(
self, uname: str, ip: str, vpaths: list[str], lim: list[int], rm_up: bool
) -> str:
n_files = 0 n_files = 0
ok = {} ok = {}
ng = {} ng = {}
@@ -2906,7 +2962,7 @@ class Up2k(object):
self.log("hit delete limit of {} files".format(lim[1]), 3) self.log("hit delete limit of {} files".format(lim[1]), 3)
break break
a, b, c = self._handle_rm(uname, ip, vp, lim) a, b, c = self._handle_rm(uname, ip, vp, lim, rm_up)
n_files += a n_files += a
for k in b: for k in b:
ok[k] = 1 ok[k] = 1
@@ -2920,7 +2976,7 @@ class Up2k(object):
return "deleted {} files (and {}/{} folders)".format(n_files, iok, iok + ing) return "deleted {} files (and {}/{} folders)".format(n_files, iok, iok + ing)
def _handle_rm( def _handle_rm(
self, uname: str, ip: str, vpath: str, lim: list[int] self, uname: str, ip: str, vpath: str, lim: list[int], rm_up: bool
) -> tuple[int, list[str], list[str]]: ) -> tuple[int, list[str], list[str]]:
self.db_act = time.time() self.db_act = time.time()
try: try:
@@ -2989,12 +3045,12 @@ class Up2k(object):
break break
abspath = djoin(adir, fn) abspath = djoin(adir, fn)
st = bos.stat(abspath)
volpath = "{}/{}".format(vrem, fn).strip("/") volpath = "{}/{}".format(vrem, fn).strip("/")
vpath = "{}/{}".format(dbv.vpath, volpath).strip("/") vpath = "{}/{}".format(dbv.vpath, volpath).strip("/")
self.log("rm {}\n {}".format(vpath, abspath)) self.log("rm {}\n {}".format(vpath, abspath))
_ = dbv.get(volpath, uname, *permsets[0]) _ = dbv.get(volpath, uname, *permsets[0])
if xbd: if xbd:
st = bos.stat(abspath)
if not runhook( if not runhook(
self.log, self.log,
xbd, xbd,
@@ -3018,25 +3074,43 @@ class Up2k(object):
try: try:
ptop = dbv.realpath ptop = dbv.realpath
cur, wark, _, _, _, _ = self._find_from_vpath(ptop, volpath) cur, wark, _, _, _, _ = self._find_from_vpath(ptop, volpath)
self._forget_file(ptop, volpath, cur, wark, True) self._forget_file(ptop, volpath, cur, wark, True, st.st_size)
finally: finally:
if cur: if cur:
cur.connection.commit() cur.connection.commit()
bos.unlink(abspath) bos.unlink(abspath)
if xad: if xad:
runhook(self.log, xad, abspath, vpath, "", uname, 0, 0, ip, 0, "") runhook(
self.log,
xad,
abspath,
vpath,
"",
uname,
st.st_mtime,
st.st_size,
ip,
0,
"",
)
ok: list[str] = []
ng: list[str] = []
if is_dir: if is_dir:
ok, ng = rmdirs(self.log_func, scandir, True, atop, 1) ok, ng = rmdirs(self.log_func, scandir, True, atop, 1)
else:
ok = ng = []
ok2, ng2 = rmdirs_up(os.path.dirname(atop), ptop) if rm_up:
ok2, ng2 = rmdirs_up(os.path.dirname(atop), ptop)
else:
ok2 = ng2 = []
return n_files, ok + ok2, ng + ng2 return n_files, ok + ok2, ng + ng2
def handle_mv(self, uname: str, svp: str, dvp: str) -> str: def handle_mv(self, uname: str, svp: str, dvp: str) -> str:
if svp == dvp or dvp.startswith(svp + "/"):
raise Pebkac(400, "mv: cannot move parent into subfolder")
svn, srem = self.asrv.vfs.get(svp, uname, True, False, True) svn, srem = self.asrv.vfs.get(svp, uname, True, False, True)
svn, srem = svn.get_dbv(srem) svn, srem = svn.get_dbv(srem)
sabs = svn.canonical(srem, False) sabs = svn.canonical(srem, False)
@@ -3090,8 +3164,21 @@ class Up2k(object):
curs.clear() curs.clear()
rmdirs(self.log_func, scandir, True, sabs, 1) rm_ok, rm_ng = rmdirs(self.log_func, scandir, True, sabs, 1)
rmdirs_up(os.path.dirname(sabs), svn.realpath)
for zsl in (rm_ok, rm_ng):
for ap in reversed(zsl):
if not ap.startswith(sabs):
raise Pebkac(500, "mv_d: bug at {}, top {}".format(ap, sabs))
rem = ap[len(sabs) :].replace(os.sep, "/").lstrip("/")
vp = vjoin(dvp, rem)
try:
dvn, drem = self.asrv.vfs.get(vp, uname, False, True)
bos.mkdir(dvn.canonical(drem))
except:
pass
return "k" return "k"
def _mv_file( def _mv_file(
@@ -3182,7 +3269,7 @@ class Up2k(object):
if c2 and c2 != c1: if c2 and c2 != c1:
self._copy_tags(c1, c2, w) self._copy_tags(c1, c2, w)
self._forget_file(svn.realpath, srem, c1, w, c1 != c2) self._forget_file(svn.realpath, srem, c1, w, c1 != c2, fsize)
self._relink(w, svn.realpath, srem, dabs) self._relink(w, svn.realpath, srem, dabs)
curs.add(c1) curs.add(c1)
@@ -3258,6 +3345,7 @@ class Up2k(object):
cur: Optional["sqlite3.Cursor"], cur: Optional["sqlite3.Cursor"],
wark: Optional[str], wark: Optional[str],
drop_tags: bool, drop_tags: bool,
sz: int,
) -> None: ) -> None:
"""forgets file in db, fixes symlinks, does not delete""" """forgets file in db, fixes symlinks, does not delete"""
srd, sfn = vsplit(vrem) srd, sfn = vsplit(vrem)
@@ -3272,7 +3360,7 @@ class Up2k(object):
q = "delete from mt where w=?" q = "delete from mt where w=?"
cur.execute(q, (wark[:16],)) cur.execute(q, (wark[:16],))
self.db_rm(cur, srd, sfn) self.db_rm(cur, srd, sfn, sz)
reg = self.registry.get(ptop) reg = self.registry.get(ptop)
if reg: if reg:

View File

@@ -296,11 +296,11 @@ REKOBO_LKEY = {k.lower(): v for k, v in REKOBO_KEY.items()}
pybin = sys.executable or "" pybin = sys.executable or ""
if EXE: if EXE:
pybin = "" pybin = ""
for p in "python3 python".split(): for zsg in "python3 python".split():
try: try:
p = shutil.which(p) zsg = shutil.which(zsg)
if p: if zsg:
pybin = p pybin = zsg
break break
except: except:
pass pass
@@ -537,7 +537,7 @@ class _Unrecv(object):
self.log = log self.log = log
self.buf: bytes = b"" self.buf: bytes = b""
def recv(self, nbytes: int) -> bytes: def recv(self, nbytes: int, spins: int = 1) -> bytes:
if self.buf: if self.buf:
ret = self.buf[:nbytes] ret = self.buf[:nbytes]
self.buf = self.buf[nbytes:] self.buf = self.buf[nbytes:]
@@ -548,6 +548,10 @@ class _Unrecv(object):
ret = self.s.recv(nbytes) ret = self.s.recv(nbytes)
break break
except socket.timeout: except socket.timeout:
spins -= 1
if spins <= 0:
ret = b""
break
continue continue
except: except:
ret = b"" ret = b""
@@ -590,7 +594,7 @@ class _LUnrecv(object):
self.log = log self.log = log
self.buf = b"" self.buf = b""
def recv(self, nbytes: int) -> bytes: def recv(self, nbytes: int, spins: int) -> bytes:
if self.buf: if self.buf:
ret = self.buf[:nbytes] ret = self.buf[:nbytes]
self.buf = self.buf[nbytes:] self.buf = self.buf[nbytes:]
@@ -609,7 +613,7 @@ class _LUnrecv(object):
def recv_ex(self, nbytes: int, raise_on_trunc: bool = True) -> bytes: def recv_ex(self, nbytes: int, raise_on_trunc: bool = True) -> bytes:
"""read an exact number of bytes""" """read an exact number of bytes"""
try: try:
ret = self.recv(nbytes) ret = self.recv(nbytes, 1)
err = False err = False
except: except:
ret = b"" ret = b""
@@ -617,7 +621,7 @@ class _LUnrecv(object):
while not err and len(ret) < nbytes: while not err and len(ret) < nbytes:
try: try:
ret += self.recv(nbytes - len(ret)) ret += self.recv(nbytes - len(ret), 1)
except OSError: except OSError:
err = True err = True
@@ -1292,7 +1296,7 @@ class MultipartParser(object):
rfc1341/rfc1521/rfc2047/rfc2231/rfc2388/rfc6266/the-real-world rfc1341/rfc1521/rfc2047/rfc2231/rfc2388/rfc6266/the-real-world
(only the fallback non-js uploader relies on these filenames) (only the fallback non-js uploader relies on these filenames)
""" """
for ln in read_header(self.sr): for ln in read_header(self.sr, 2, 2592000):
self.log(ln) self.log(ln)
m = self.re_ctype.match(ln) m = self.re_ctype.match(ln)
@@ -1492,15 +1496,15 @@ def get_boundary(headers: dict[str, str]) -> str:
return m.group(2) return m.group(2)
def read_header(sr: Unrecv) -> list[str]: def read_header(sr: Unrecv, t_idle: int, t_tot: int) -> list[str]:
t0 = time.time() t0 = time.time()
ret = b"" ret = b""
while True: while True:
if time.time() - t0 > 120: if time.time() - t0 >= t_tot:
return [] return []
try: try:
ret += sr.recv(1024) ret += sr.recv(1024, t_idle // 2)
except: except:
if not ret: if not ret:
return [] return []
@@ -1549,7 +1553,7 @@ def rand_name(fdir: str, fn: str, rnd: int) -> str:
def gen_filekey(salt: str, fspath: str, fsize: int, inode: int) -> str: def gen_filekey(salt: str, fspath: str, fsize: int, inode: int) -> str:
return base64.urlsafe_b64encode( return base64.urlsafe_b64encode(
hashlib.sha512( hashlib.sha512(
"{} {} {} {}".format(salt, fspath, fsize, inode).encode("utf-8", "replace") ("%s %s %s %s" % (salt, fspath, fsize, inode)).encode("utf-8", "replace")
).digest() ).digest()
).decode("ascii") ).decode("ascii")
@@ -1589,7 +1593,7 @@ def gen_filekey_dbg(
def gencookie(k: str, v: str, r: str, tls: bool, dur: Optional[int]) -> str: def gencookie(k: str, v: str, r: str, tls: bool, dur: Optional[int]) -> str:
v = v.replace(";", "") v = v.replace("%", "%25").replace(";", "%3B")
if dur: if dur:
exp = formatdate(time.time() + dur, usegmt=True) exp = formatdate(time.time() + dur, usegmt=True)
else: else:
@@ -1622,7 +1626,12 @@ def unhumanize(sz: str) -> int:
pass pass
mc = sz[-1:].lower() mc = sz[-1:].lower()
mi = {"k": 1024, "m": 1024 * 1024, "g": 1024 * 1024 * 1024}.get(mc, 1) mi = {
"k": 1024,
"m": 1024 * 1024,
"g": 1024 * 1024 * 1024,
"t": 1024 * 1024 * 1024 * 1024,
}.get(mc, 1)
return int(float(sz[:-1]) * mi) return int(float(sz[:-1]) * mi)
@@ -1658,7 +1667,7 @@ def uncyg(path: str) -> str:
if len(path) > 2 and path[2] != "/": if len(path) > 2 and path[2] != "/":
return path return path
return "{}:\\{}".format(path[1], path[3:]) return "%s:\\%s" % (path[1], path[3:])
def undot(path: str) -> str: def undot(path: str) -> str:
@@ -1701,7 +1710,7 @@ def sanitize_fn(fn: str, ok: str, bad: list[str]) -> str:
bad = ["con", "prn", "aux", "nul"] bad = ["con", "prn", "aux", "nul"]
for n in range(1, 10): for n in range(1, 10):
bad += "com{0} lpt{0}".format(n).split(" ") bad += ("com%s lpt%s" % (n, n)).split(" ")
if fn.lower().split(".")[0] in bad: if fn.lower().split(".")[0] in bad:
fn = "_" + fn fn = "_" + fn
@@ -2015,6 +2024,8 @@ def shut_socket(log: "NamedLogger", sck: socket.socket, timeout: int = 3) -> Non
sck.shutdown(socket.SHUT_RDWR) sck.shutdown(socket.SHUT_RDWR)
except: except:
pass pass
except Exception as ex:
log("shut({}): {}".format(fd, ex), "90")
finally: finally:
td = time.time() - t0 td = time.time() - t0
if td >= 1: if td >= 1:
@@ -2266,7 +2277,7 @@ def rmdirs(
dirs = [os.path.join(top, x) for x in dirs] dirs = [os.path.join(top, x) for x in dirs]
ok = [] ok = []
ng = [] ng = []
for d in dirs[::-1]: for d in reversed(dirs):
a, b = rmdirs(logger, scandir, lstat, d, depth + 1) a, b = rmdirs(logger, scandir, lstat, d, depth + 1)
ok += a ok += a
ng += b ng += b
@@ -2316,7 +2327,7 @@ def unescape_cookie(orig: str) -> str:
ret += chr(int(esc[1:], 16)) ret += chr(int(esc[1:], 16))
except: except:
ret += esc ret += esc
esc = "" esc = ""
else: else:
ret += ch ret += ch

View File

@@ -6,7 +6,7 @@ pk: $(addsuffix .gz, $(wildcard *.js *.css))
un: $(addsuffix .un, $(wildcard *.gz)) un: $(addsuffix .un, $(wildcard *.gz))
%.gz: % %.gz: %
pigz -11 -J 34 -I 5730 $< pigz -11 -J 34 -I 573 $<
%.un: % %.un: %
pigz -d $< pigz -d $<

1
copyparty/web/a/u2c.py Symbolic link
View File

@@ -0,0 +1 @@
../../../bin/u2c.py

View File

@@ -1 +0,0 @@
../../../bin/up2k.py

View File

@@ -1159,10 +1159,10 @@ html.y #widget.open {
background: #fff; background: #fff;
background: var(--bg-u3); background: var(--bg-u3);
} }
#wfm, #wzip, #wnp { #wfs, #wfm, #wzip, #wnp {
display: none; display: none;
} }
#wzip, #wnp { #wfs, #wzip, #wnp {
margin-right: .2em; margin-right: .2em;
padding-right: .2em; padding-right: .2em;
border: 1px solid var(--bg-u5); border: 1px solid var(--bg-u5);
@@ -1174,6 +1174,7 @@ html.y #widget.open {
padding-left: .2em; padding-left: .2em;
border-left-width: .1em; border-left-width: .1em;
} }
#wfs.act,
#wfm.act { #wfm.act {
display: inline-block; display: inline-block;
} }
@@ -1197,6 +1198,13 @@ html.y #widget.open {
position: relative; position: relative;
display: inline-block; display: inline-block;
} }
#wfs {
font-size: .36em;
text-align: right;
line-height: 1.3em;
padding: 0 .3em 0 0;
border-width: 0 .25em 0 0;
}
#wfm span, #wfm span,
#wnp span { #wnp span {
font-size: .6em; font-size: .6em;
@@ -1743,6 +1751,7 @@ html.y #tree.nowrap .ntree a+a:hover {
display: none; display: none;
} }
.ghead { .ghead {
background: var(--bg-u2);
border-radius: .3em; border-radius: .3em;
padding: .2em .5em; padding: .2em .5em;
line-height: 2.3em; line-height: 2.3em;
@@ -2939,6 +2948,7 @@ html.b #treepar {
html.b #wrap { html.b #wrap {
margin-top: 2em; margin-top: 2em;
} }
html.by .ghead,
html.bz .ghead { html.bz .ghead {
background: var(--bg); background: var(--bg);
padding: .2em 0; padding: .2em 0;

View File

@@ -138,6 +138,7 @@
TS = "{{ ts }}", TS = "{{ ts }}",
acct = "{{ acct }}", acct = "{{ acct }}",
perms = {{ perms }}, perms = {{ perms }},
dgrid = {{ dgrid|tojson }},
themes = {{ themes }}, themes = {{ themes }},
dtheme = "{{ dtheme }}", dtheme = "{{ dtheme }}",
srvinf = "{{ srv_info }}", srvinf = "{{ srv_info }}",

View File

@@ -261,6 +261,7 @@ var Ls = {
"mm_e403": "Could not play audio; error 403: Access denied.\n\nTry pressing F5 to reload, maybe you got logged out", "mm_e403": "Could not play audio; error 403: Access denied.\n\nTry pressing F5 to reload, maybe you got logged out",
"mm_e5xx": "Could not play audio; server error ", "mm_e5xx": "Could not play audio; server error ",
"mm_nof": "not finding any more audio files nearby", "mm_nof": "not finding any more audio files nearby",
"mm_pwrsv": "<p>it looks like playback is being interrupted by your phone's power-saving settings!</p>" + '<p>please go to <a target="_blank" href="https://user-images.githubusercontent.com/241032/235262121-2ffc51ae-7821-4310-a322-c3b7a507890c.png">the app settings of your browser</a> and then <a target="_blank" href="https://user-images.githubusercontent.com/241032/235262123-c328cca9-3930-4948-bd18-3949b9fd3fcf.png">allow unrestricted battery usage</a> to fix it.</p><p>(probably a good idea to use a separate browser dedicated for just music streaming...)</p>',
"mm_hnf": "that song no longer exists", "mm_hnf": "that song no longer exists",
"im_hnf": "that image no longer exists", "im_hnf": "that image no longer exists",
@@ -721,6 +722,7 @@ var Ls = {
"mm_e403": "Avspilling feilet: Tilgang nektet.\n\nKanskje du ble logget ut?\nPrøv å trykk F5 for å laste siden på nytt.", "mm_e403": "Avspilling feilet: Tilgang nektet.\n\nKanskje du ble logget ut?\nPrøv å trykk F5 for å laste siden på nytt.",
"mm_e5xx": "Avspilling feilet: ", "mm_e5xx": "Avspilling feilet: ",
"mm_nof": "finner ikke flere sanger i nærheten", "mm_nof": "finner ikke flere sanger i nærheten",
"mm_pwrsv": "<p>det ser ut som musikken ble avbrutt av telefonen sine strømsparings-innstillinger!</p>" + '<p>ta en tur innom <a target="_blank" href="https://user-images.githubusercontent.com/241032/235262121-2ffc51ae-7821-4310-a322-c3b7a507890c.png">app-innstillingene til nettleseren din</a> og så <a target="_blank" href="https://user-images.githubusercontent.com/241032/235262123-c328cca9-3930-4948-bd18-3949b9fd3fcf.png">tillat ubegrenset batteriforbruk</a></p><p>(sikkert smart å ha en egen nettleser kun for musikkspilling...)</p>',
"mm_hnf": "sangen finnes ikke lenger", "mm_hnf": "sangen finnes ikke lenger",
"im_hnf": "bildet finnes ikke lenger", "im_hnf": "bildet finnes ikke lenger",
@@ -952,6 +954,7 @@ ebi('ops').innerHTML = (
// media player // media player
ebi('widget').innerHTML = ( ebi('widget').innerHTML = (
'<div id="wtoggle">' + '<div id="wtoggle">' +
'<span id="wfs"></span>' +
'<span id="wfm"><a' + '<span id="wfm"><a' +
' href="#" id="fren" tt="' + L.wt_ren + '">✎<span>name</span></a><a' + ' href="#" id="fren" tt="' + L.wt_ren + '">✎<span>name</span></a><a' +
' href="#" id="fdel" tt="' + L.wt_del + '">⌫<span>del.</span></a><a' + ' href="#" id="fdel" tt="' + L.wt_del + '">⌫<span>del.</span></a><a' +
@@ -1482,7 +1485,8 @@ var mpl = (function () {
ebi('np_title').textContent = np.title || ''; ebi('np_title').textContent = np.title || '';
ebi('np_dur').textContent = np['.dur'] || ''; ebi('np_dur').textContent = np['.dur'] || '';
ebi('np_url').textContent = get_vpath() + np.file.split('?')[0]; ebi('np_url').textContent = get_vpath() + np.file.split('?')[0];
ebi('np_img').setAttribute('src', cover); // dont give last.fm the pwd if (!MOBILE)
ebi('np_img').setAttribute('src', cover || ''); // dont give last.fm the pwd
navigator.mediaSession.metadata = new MediaMetadata(tags); navigator.mediaSession.metadata = new MediaMetadata(tags);
navigator.mediaSession.setActionHandler('play', mplay); navigator.mediaSession.setActionHandler('play', mplay);
@@ -1499,6 +1503,7 @@ var mpl = (function () {
if (!r.os_ctl) if (!r.os_ctl)
return; return;
// dead code; left for debug
navigator.mediaSession.metadata = null; navigator.mediaSession.metadata = null;
navigator.mediaSession.playbackState = "paused"; navigator.mediaSession.playbackState = "paused";
@@ -1538,12 +1543,14 @@ var re_au_native = can_ogg ? /\.(aac|flac|m4a|mp3|ogg|opus|wav)$/i :
// extract songs + add play column // extract songs + add play column
var mpo = { "au": null, "au2": null, "acs": null };
var t_fchg = 0;
function MPlayer() { function MPlayer() {
var r = this; var r = this;
r.id = Date.now(); r.id = Date.now();
r.au = null; r.au = mpo.au;
r.au = null; r.au2 = mpo.au2;
r.au2 = null; r.acs = mpo.acs;
r.tracks = {}; r.tracks = {};
r.order = []; r.order = [];
r.cd_pause = 0; r.cd_pause = 0;
@@ -1833,6 +1840,7 @@ var pbar = (function () {
html_txt = 'a', html_txt = 'a',
lastmove = 0, lastmove = 0,
mousepos = 0, mousepos = 0,
t_redraw = 0,
gradh = -1, gradh = -1,
grad; grad;
@@ -1901,6 +1909,9 @@ var pbar = (function () {
bctx = bc.ctx, bctx = bc.ctx,
apos, adur; apos, adur;
if (!widget.is_open)
return;
bctx.clearRect(0, 0, bc.w, bc.h); bctx.clearRect(0, 0, bc.w, bc.h);
if (!mp || !mp.au || !isNum(adur = mp.au.duration) || !isNum(apos = mp.au.currentTime) || apos < 0 || adur < apos) if (!mp || !mp.au || !isNum(adur = mp.au.duration) || !isNum(apos = mp.au.currentTime) || apos < 0 || adur < apos)
@@ -1963,6 +1974,10 @@ var pbar = (function () {
w = 8, w = 8,
apos, adur; apos, adur;
if (t_redraw) {
clearTimeout(t_redraw);
t_redraw = 0;
}
pctx.clearRect(0, 0, pc.w, pc.h); pctx.clearRect(0, 0, pc.w, pc.h);
if (!mp || !mp.au || !isNum(adur = mp.au.duration) || !isNum(apos = mp.au.currentTime) || apos < 0 || adur < apos) if (!mp || !mp.au || !isNum(adur = mp.au.duration) || !isNum(apos = mp.au.currentTime) || apos < 0 || adur < apos)
@@ -1977,17 +1992,30 @@ var pbar = (function () {
} }
var sm = bc.w * 1.0 / adur, var sm = bc.w * 1.0 / adur,
t1 = s2ms(adur),
t2 = s2ms(apos),
x = sm * apos; x = sm * apos;
if (w && html_txt != t2) {
ebi('np_pos').textContent = html_txt = t2;
if (mpl.os_ctl)
navigator.mediaSession.setPositionState({
'duration': adur,
'position': apos,
'playbackRate': 1
});
}
if (!widget.is_open)
return;
pctx.fillStyle = '#573'; pctx.fillRect((x - w / 2) - 1, 0, w + 2, pc.h); pctx.fillStyle = '#573'; pctx.fillRect((x - w / 2) - 1, 0, w + 2, pc.h);
pctx.fillStyle = '#dfc'; pctx.fillRect((x - w / 2), 0, w, pc.h); pctx.fillStyle = '#dfc'; pctx.fillRect((x - w / 2), 0, w, pc.h);
pctx.lineWidth = 2.5; pctx.lineWidth = 2.5;
pctx.fillStyle = '#fff'; pctx.fillStyle = '#fff';
var t1 = s2ms(adur), var m1 = pctx.measureText(t1),
t2 = s2ms(apos),
m1 = pctx.measureText(t1),
m1b = pctx.measureText(t1 + ":88"), m1b = pctx.measureText(t1 + ":88"),
m2 = pctx.measureText(t2), m2 = pctx.measureText(t2),
yt = pc.h / 3 * 2.1, yt = pc.h / 3 * 2.1,
@@ -2001,15 +2029,8 @@ var pbar = (function () {
pctx.fillText(t1, xt1, yt); pctx.fillText(t1, xt1, yt);
pctx.fillText(t2, xt2, yt); pctx.fillText(t2, xt2, yt);
if (w && html_txt != t2) { if (sm > 10)
ebi('np_pos').textContent = html_txt = t2; t_redraw = setTimeout(r.drawpos, sm > 50 ? 20 : 50);
if (mpl.os_ctl)
navigator.mediaSession.setPositionState({
'duration': adur,
'position': apos,
'playbackRate': 1
});
}
}; };
window.addEventListener('resize', r.onresize); window.addEventListener('resize', r.onresize);
@@ -2159,17 +2180,31 @@ function song_skip(n) {
else else
play(mp.order[n == -1 ? mp.order.length - 1 : 0]); play(mp.order[n == -1 ? mp.order.length - 1 : 0]);
} }
function next_song_sig(e) {
t_fchg = document.hasFocus() ? 0 : Date.now();
return next_song_cmn(e);
}
function next_song(e) { function next_song(e) {
t_fchg = 0;
return next_song_cmn(e);
}
function next_song_cmn(e) {
ev(e); ev(e);
if (mp.order.length) { if (mp.order.length) {
mpl.traversals = 0; mpl.traversals = 0;
return song_skip(1); return song_skip(1);
} }
if (mpl.traversals++ < 5) { if (mpl.traversals++ < 5) {
treectl.ls_cb = next_song; if (MOBILE && t_fchg && Date.now() - t_fchg > 30 * 1000)
modal.alert(L.mm_pwrsv);
t_fchg = document.hasFocus() ? 0 : Date.now();
treectl.ls_cb = next_song_cmn;
return tree_neigh(1); return tree_neigh(1);
} }
toast.inf(10, L.mm_nof); toast.inf(10, L.mm_nof);
mpl.traversals = 0;
t_fchg = 0;
} }
function prev_song(e) { function prev_song(e) {
ev(e); ev(e);
@@ -2285,10 +2320,16 @@ var mpui = (function () {
return; return;
} }
var paint = !MOBILE || document.hasFocus();
var pos = mp.au.currentTime;
if (!isNum(pos))
pos = 0;
// indicate playback state in ui // indicate playback state in ui
widget.paused(mp.au.paused); widget.paused(mp.au.paused);
if (++nth > 69) { if (paint && ++nth > 69) {
// android-chrome breaks aspect ratio with unannounced viewport changes // android-chrome breaks aspect ratio with unannounced viewport changes
nth = 0; nth = 0;
if (MOBILE) { if (MOBILE) {
@@ -2297,20 +2338,28 @@ var mpui = (function () {
vbar.onresize(); vbar.onresize();
} }
} }
else { else if (paint) {
// draw current position in song // draw current position in song
if (!mp.au.paused) if (!mp.au.paused)
pbar.drawpos(); pbar.drawpos();
// occasionally draw buffered regions // occasionally draw buffered regions
if (++nth % 5 == 0) if (nth % 5 == 0)
pbar.drawbuf(); pbar.drawbuf();
} }
if (pos > 0.3 && t_fchg) {
// cannot check document.hasFocus to avoid false positives;
// it continues on power-on, doesn't need to be in-browser
if (MOBILE && Date.now() - t_fchg > 30 * 1000)
modal.alert(L.mm_pwrsv);
t_fchg = 0;
}
// preload next song // preload next song
if (mpl.preload && preloaded != mp.au.rsrc) { if (mpl.preload && preloaded != mp.au.rsrc) {
var pos = mp.au.currentTime, var len = mp.au.duration,
len = mp.au.duration,
rem = pos > 1 ? len - pos : 999, rem = pos > 1 ? len - pos : 999,
full = null; full = null;
@@ -2483,7 +2532,7 @@ var afilt = (function () {
if (mp.acs) if (mp.acs)
mp.acs.disconnect(); mp.acs.disconnect();
mp.acs = null; mp.acs = mpo.acs = null;
}; };
r.apply = function () { r.apply = function () {
@@ -2703,6 +2752,7 @@ function play(tid, is_ev, seek) {
tn = 0; tn = 0;
} }
else if (mpl.pb_mode == 'next') { else if (mpl.pb_mode == 'next') {
t_fchg = document.hasFocus() ? 0 : Date.now();
treectl.ls_cb = next_song; treectl.ls_cb = next_song;
return tree_neigh(1); return tree_neigh(1);
} }
@@ -2722,7 +2772,9 @@ function play(tid, is_ev, seek) {
if (mp.au) { if (mp.au) {
mp.au.pause(); mp.au.pause();
clmod(ebi('a' + mp.au.tid), 'act'); var el = ebi('a' + mp.au.tid);
if (el)
clmod(el, 'act');
} }
else { else {
mp.au = new Audio(); mp.au = new Audio();
@@ -2730,7 +2782,7 @@ function play(tid, is_ev, seek) {
mp.au.onerror = evau_error; mp.au.onerror = evau_error;
mp.au.onprogress = pbar.drawpos; mp.au.onprogress = pbar.drawpos;
mp.au.onplaying = mpui.progress_updater; mp.au.onplaying = mpui.progress_updater;
mp.au.onended = next_song; mp.au.onended = next_song_sig;
widget.open(); widget.open();
} }
@@ -2747,7 +2799,7 @@ function play(tid, is_ev, seek) {
mp.au.onerror = evau_error; mp.au.onerror = evau_error;
mp.au.onprogress = pbar.drawpos; mp.au.onprogress = pbar.drawpos;
mp.au.onplaying = mpui.progress_updater; mp.au.onplaying = mpui.progress_updater;
mp.au.onended = next_song; mp.au.onended = next_song_sig;
t = mp.au.currentTime; t = mp.au.currentTime;
if (isNum(t) && t > 0.1) if (isNum(t) && t > 0.1)
mp.au.currentTime = 0; mp.au.currentTime = 0;
@@ -2807,7 +2859,7 @@ function play(tid, is_ev, seek) {
toast.err(0, esc(L.mm_playerr + basenames(ex))); toast.err(0, esc(L.mm_playerr + basenames(ex)));
} }
clmod(ebi(oid), 'act'); clmod(ebi(oid), 'act');
setTimeout(next_song, 5000); setTimeout(next_song_sig, 5000);
} }
@@ -2895,7 +2947,7 @@ function autoplay_blocked(seek) {
modal.confirm('<h6>' + L.mm_hashplay + '</h6>\n«' + esc(fn) + '»', function () { modal.confirm('<h6>' + L.mm_hashplay + '</h6>\n«' + esc(fn) + '»', function () {
// chrome 91 may permanently taint on a failed play() // chrome 91 may permanently taint on a failed play()
// depending on win10 settings or something? idk // depending on win10 settings or something? idk
mp.au = null; mp.au = mpo.au = null;
play(tid, true, seek); play(tid, true, seek);
mp.fade_in(); mp.fade_in();
@@ -3028,6 +3080,8 @@ function eval_hash() {
// compact media player // compact media player
function setacmp() { function setacmp() {
clmod(ebi('widget'), 'cmp', props.mcmp); clmod(ebi('widget'), 'cmp', props.mcmp);
pbar.onresize();
vbar.onresize();
} }
bcfg_bind(props, 'mcmp', 'au_compact', false, setacmp); bcfg_bind(props, 'mcmp', 'au_compact', false, setacmp);
setacmp(); setacmp();
@@ -3234,7 +3288,9 @@ var fileman = (function () {
if (r.clip === null) if (r.clip === null)
r.clip = jread('fman_clip', []).slice(1); r.clip = jread('fman_clip', []).slice(1);
var nsel = msel.getsel().length; var sel = msel.getsel(),
nsel = sel.length;
clmod(bren, 'en', nsel); clmod(bren, 'en', nsel);
clmod(bdel, 'en', nsel); clmod(bdel, 'en', nsel);
clmod(bcut, 'en', nsel); clmod(bcut, 'en', nsel);
@@ -3246,9 +3302,51 @@ var fileman = (function () {
clmod(bpst, 'hide', !(have_mv && has(perms, 'write'))); clmod(bpst, 'hide', !(have_mv && has(perms, 'write')));
clmod(ebi('wfm'), 'act', QS('#wfm a.en:not(.hide)')); clmod(ebi('wfm'), 'act', QS('#wfm a.en:not(.hide)'));
var wfs = ebi('wfs'), h = '';
try {
wfs.innerHTML = h = r.fsi(sel);
}
catch (ex) { }
clmod(wfs, 'act', h);
bpst.setAttribute('tt', L.ft_paste.format(r.clip.length)); bpst.setAttribute('tt', L.ft_paste.format(r.clip.length));
}; };
r.fsi = function (sel) {
if (!sel.length)
return '';
var lf = treectl.lsc.files,
nf = 0,
sz = 0,
dur = 0,
ntab = new Set();
for (var a = 0; a < sel.length; a++)
ntab.add(sel[a].vp.split('/').pop());
for (var a = 0; a < lf.length; a++) {
if (!ntab.has(lf[a].href.split('?')[0]))
continue;
var f = lf[a];
nf++;
sz += f.sz;
if (f.tags && f.tags['.dur'])
dur += f.tags['.dur']
}
if (!nf)
return '';
var ret = '{0}<br />{1}<small>F</small>'.format(humansize(sz), nf);
if (dur)
ret += ' ' + s2ms(dur);
return ret;
};
r.rename = function (e) { r.rename = function (e) {
ev(e); ev(e);
if (clgot(bren, 'hide')) if (clgot(bren, 'hide'))
@@ -4430,7 +4528,7 @@ var thegrid = (function () {
bcfg_bind(r, 'thumbs', 'thumbs', true, r.setdirty); bcfg_bind(r, 'thumbs', 'thumbs', true, r.setdirty);
bcfg_bind(r, 'sel', 'gridsel', false, r.loadsel); bcfg_bind(r, 'sel', 'gridsel', false, r.loadsel);
bcfg_bind(r, 'en', 'griden', false, function (v) { bcfg_bind(r, 'en', 'griden', dgrid, function (v) {
v ? loadgrid() : r.setvis(true); v ? loadgrid() : r.setvis(true);
pbar.onresize(); pbar.onresize();
vbar.onresize(); vbar.onresize();
@@ -5630,6 +5728,12 @@ var treectl = (function () {
seen = {}; seen = {};
r.lsc = res; r.lsc = res;
if (res.unlist) {
var ptn = new RegExp(res.unlist);
for (var a = nodes.length - 1; a >= 0; a--)
if (ptn.exec(nodes[a].href.split('?')[0]))
nodes.splice(a, 1);
}
nodes = sortfiles(nodes); nodes = sortfiles(nodes);
window.removeEventListener('scroll', r.tscroll); window.removeEventListener('scroll', r.tscroll);
r.trunc = nodes.length > r.nvis && location.hash.length < 2; r.trunc = nodes.length > r.nvis && location.hash.length < 2;
@@ -6975,6 +7079,7 @@ function sandbox(tgt, rules, cls, html) {
'},1)</script></body></html>'; '},1)</script></body></html>';
var fr = mknod('iframe'); var fr = mknod('iframe');
fr.setAttribute('title', 'folder ' + tid + 'logue');
fr.setAttribute('sandbox', rules ? 'allow-' + rules.replace(/ /g, ' allow-') : ''); fr.setAttribute('sandbox', rules ? 'allow-' + rules.replace(/ /g, ' allow-') : '');
fr.setAttribute('srcdoc', html); fr.setAttribute('srcdoc', html);
tgt.innerHTML = ''; tgt.innerHTML = '';
@@ -7277,21 +7382,25 @@ ebi('files').onclick = ebi('docul').onclick = function (e) {
function reload_mp() { function reload_mp() {
if (mp && mp.au) { if (mp && mp.au) {
if (afilt) mpo.au = mp.au;
afilt.stop(); mpo.au2 = mp.au2;
mpo.acs = mp.acs;
mp.au.pause();
mp.au = null;
mpl.unbuffer(); mpl.unbuffer();
} }
mpl.stop();
var plays = QSA('tr>td:first-child>a.play'); var plays = QSA('tr>td:first-child>a.play');
for (var a = plays.length - 1; a >= 0; a--) for (var a = plays.length - 1; a >= 0; a--)
plays[a].parentNode.innerHTML = '-'; plays[a].parentNode.innerHTML = '-';
mp = new MPlayer(); mp = new MPlayer();
if (afilt) if (mp.au && mp.au.tid) {
afilt.acst = {}; var el = QS('a#a' + mp.au.tid);
if (el)
clmod(el, 'act', 1);
el = el && el.closest('tr');
if (el)
clmod(el, 'play', 1);
}
setTimeout(pbar.onresize, 1); setTimeout(pbar.onresize, 1);
} }

View File

@@ -110,10 +110,21 @@
<div class="os win"> <div class="os win">
<p>if you can, install <a href="https://winfsp.dev/rel/">winfsp</a>+<a href="https://downloads.rclone.org/rclone-current-windows-amd64.zip">rclone</a> and then paste this in cmd:</p> <p>if you can, install <a href="https://winfsp.dev/rel/">winfsp</a>+<a href="https://downloads.rclone.org/rclone-current-windows-amd64.zip">rclone</a> and then paste this in cmd:</p>
{% if args.ftp %}
<p>connect with plaintext FTP:</p>
<pre> <pre>
rclone config create {{ aname }}-ftp ftp host={{ rip }} port={{ args.ftp or args.ftps }} pass=k user={% if accs %}<b>{{ pw }}</b>{% else %}anonymous{% endif %} tls={{ "false" if args.ftp else "true" }} rclone config create {{ aname }}-ftp ftp host={{ rip }} port={{ args.ftp }} pass=k user={% if accs %}<b>{{ pw }}</b>{% else %}anonymous{% endif %} tls=false
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-ftp:{{ rvp }} <b>W:</b> rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-ftp:{{ rvp }} <b>W:</b>
</pre> </pre>
{% endif %}
{% if args.ftps %}
<p>connect with TLS-encrypted FTPS:</p>
<pre>
rclone config create {{ aname }}-ftps ftp host={{ rip }} port={{ args.ftps }} pass=k user={% if accs %}<b>{{ pw }}</b>{% else %}anonymous{% endif %} tls=false explicit_tls=true
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-ftps:{{ rvp }} <b>W:</b>
</pre>
<p><em>note: if you are on LAN (or just dont have valid certificates), add <code>no_check_certificate=true</code> to the config command</em><br />---</p>
{% endif %}
<p>if you want to use the native FTP client in windows instead (please dont), press <code>win+R</code> and run this command:</p> <p>if you want to use the native FTP client in windows instead (please dont), press <code>win+R</code> and run this command:</p>
<pre> <pre>
explorer {{ "ftp" if args.ftp else "ftps" }}://{% if accs %}<b>{{ pw }}</b>:k@{% endif %}{{ host }}:{{ args.ftp or args.ftps }}/{{ rvp }} explorer {{ "ftp" if args.ftp else "ftps" }}://{% if accs %}<b>{{ pw }}</b>:k@{% endif %}{{ host }}:{{ args.ftp or args.ftps }}/{{ rvp }}
@@ -121,10 +132,21 @@
</div> </div>
<div class="os lin"> <div class="os lin">
{% if args.ftp %}
<p>connect with plaintext FTP:</p>
<pre> <pre>
rclone config create {{ aname }}-ftp ftp host={{ rip }} port={{ args.ftp or args.ftps }} pass=k user={% if accs %}<b>{{ pw }}</b>{% else %}anonymous{% endif %} tls={{ "false" if args.ftp else "true" }} rclone config create {{ aname }}-ftp ftp host={{ rip }} port={{ args.ftp }} pass=k user={% if accs %}<b>{{ pw }}</b>{% else %}anonymous{% endif %} tls=false
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-ftp:{{ rvp }} <b>mp</b> rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-ftp:{{ rvp }} <b>mp</b>
</pre> </pre>
{% endif %}
{% if args.ftps %}
<p>connect with TLS-encrypted FTPS:</p>
<pre>
rclone config create {{ aname }}-ftps ftp host={{ rip }} port={{ args.ftps }} pass=k user={% if accs %}<b>{{ pw }}</b>{% else %}anonymous{% endif %} tls=false explicit_tls=true
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-ftps:{{ rvp }} <b>mp</b>
</pre>
<p><em>note: if you are on LAN (or just dont have valid certificates), add <code>no_check_certificate=true</code> to the config command</em><br />---</p>
{% endif %}
<p>emergency alternative (gnome/gui-only):</p> <p>emergency alternative (gnome/gui-only):</p>
<!-- gnome-bug: ignores vp --> <!-- gnome-bug: ignores vp -->
<pre> <pre>
@@ -159,7 +181,7 @@
<p><em>note: if you are on LAN (or just dont have valid certificates), add <code>-td</code></em></p> <p><em>note: if you are on LAN (or just dont have valid certificates), add <code>-td</code></em></p>
{% endif %} {% endif %}
<p> <p>
you can use <a href="{{ r }}/.cpr/a/up2k.py">up2k.py</a> to upload (sometimes faster than web-browsers) you can use <a href="{{ r }}/.cpr/a/u2c.py">u2c.py</a> to upload (sometimes faster than web-browsers)
</p> </p>

View File

@@ -73,7 +73,7 @@ html {
#toastb { #toastb {
max-height: 70vh; max-height: 70vh;
overflow-y: auto; overflow-y: auto;
padding: 1px; padding: .1em;
} }
#toast.scroll #toastb { #toast.scroll #toastb {
overflow-y: scroll; overflow-y: scroll;

View File

@@ -1826,6 +1826,7 @@ function up2k_init(subtle) {
timer.rm(etafun); timer.rm(etafun);
timer.rm(donut.do); timer.rm(donut.do);
ebi('u2tabw').style.minHeight = '0px';
utw_minh = 0; utw_minh = 0;
} }

View File

@@ -159,8 +159,8 @@ function vis_exh(msg, url, lineNo, columnNo, error) {
window.onerror = undefined; window.onerror = undefined;
var html = [ var html = [
'<h1>you hit a bug!</h1>', '<h1>you hit a bug!</h1>',
'<p style="font-size:1.3em;margin:0">try to <a href="#" onclick="localStorage.clear();location.reload();">reset copyparty settings</a> if you are stuck here, or <a href="#" onclick="ignex();">ignore this</a> / <a href="#" onclick="ignex(true);">ignore all</a> / <a href="?b=u">basic</a></p>', '<p style="font-size:1.3em;margin:0;line-height:2em">try to <a href="#" onclick="localStorage.clear();location.reload();">reset copyparty settings</a> if you are stuck here, or <a href="#" onclick="ignex();">ignore this</a> / <a href="#" onclick="ignex(true);">ignore all</a> / <a href="?b=u">basic</a></p>',
'<p style="color:#fff">please send me a screenshot arigathanks gozaimuch: <a href="<ghi>" target="_blank">github issue</a> or <code>ed#2644</code></p>', '<p style="color:#fff">please send me a screenshot arigathanks gozaimuch: <a href="<ghi>" target="_blank">new github issue</a></p>',
'<p class="b">' + esc(url + ' @' + lineNo + ':' + columnNo), '<br />' + esc(String(msg)).replace(/\n/g, '<br />') + '</p>', '<p class="b">' + esc(url + ' @' + lineNo + ':' + columnNo), '<br />' + esc(String(msg)).replace(/\n/g, '<br />') + '</p>',
'<p><b>UA:</b> ' + esc(navigator.userAgent + '') '<p><b>UA:</b> ' + esc(navigator.userAgent + '')
]; ];
@@ -225,7 +225,7 @@ function vis_exh(msg, url, lineNo, columnNo, error) {
'#exbox{background:#222;color:#ddd;font-family:sans-serif;font-size:0.8em;padding:0 1em 1em 1em;z-index:80386;position:fixed;top:0;left:0;right:0;bottom:0;width:100%;height:100%;overflow:auto;width:calc(100% - 2em)} ' + '#exbox{background:#222;color:#ddd;font-family:sans-serif;font-size:0.8em;padding:0 1em 1em 1em;z-index:80386;position:fixed;top:0;left:0;right:0;bottom:0;width:100%;height:100%;overflow:auto;width:calc(100% - 2em)} ' +
'#exbox,#exbox *{line-height:1.5em;overflow-wrap:break-word} ' + '#exbox,#exbox *{line-height:1.5em;overflow-wrap:break-word} ' +
'#exbox code{color:#bf7;background:#222;padding:.1em;margin:.2em;font-size:1.1em;font-family:monospace,monospace} ' + '#exbox code{color:#bf7;background:#222;padding:.1em;margin:.2em;font-size:1.1em;font-family:monospace,monospace} ' +
'#exbox a{text-decoration:underline;color:#fc0} ' + '#exbox a{text-decoration:underline;color:#fc0;background:#222;border:none} ' +
'#exbox h1{margin:.5em 1em 0 0;padding:0} ' + '#exbox h1{margin:.5em 1em 0 0;padding:0} ' +
'#exbox p.b{border-top:1px solid #999;margin:1em 0 0 0;font-size:1em} ' + '#exbox p.b{border-top:1px solid #999;margin:1em 0 0 0;font-size:1em} ' +
'#exbox ul, #exbox li {margin:0 0 0 .5em;padding:0} ' + '#exbox ul, #exbox li {margin:0 0 0 .5em;padding:0} ' +
@@ -742,7 +742,7 @@ function get_pwd() {
if (pwd.length < 2) if (pwd.length < 2)
return null; return null;
return pwd[1].split(';')[0]; return decodeURIComponent(pwd[1].split(';')[0]);
} }
@@ -1769,7 +1769,6 @@ function cprop(name) {
function bchrome() { function bchrome() {
console.log(document.documentElement.className);
var v, o = QS('meta[name=theme-color]'); var v, o = QS('meta[name=theme-color]');
if (!o) if (!o)
return; return;
@@ -1787,16 +1786,17 @@ function xhrchk(xhr, prefix, e404, lvl, tag) {
if (xhr.status < 400 && xhr.status >= 200) if (xhr.status < 400 && xhr.status >= 200)
return true; return true;
if (xhr.status == 403) var errtxt = (xhr.response && xhr.response.err) || xhr.responseText,
fun = toast[lvl || 'err'],
is_cf = /[Cc]loud[f]lare|>Just a mo[m]ent|#cf-b[u]bbles|Chec[k]ing your br[o]wser|\/chall[e]nge-platform|"chall[e]nge-error|nable Ja[v]aScript and cook/.test(errtxt);
if (xhr.status == 403 && !is_cf)
return toast.err(0, prefix + (L && L.xhr403 || "403: access denied\n\ntry pressing F5, maybe you got logged out"), tag); return toast.err(0, prefix + (L && L.xhr403 || "403: access denied\n\ntry pressing F5, maybe you got logged out"), tag);
if (xhr.status == 404) if (xhr.status == 404)
return toast.err(0, prefix + e404, tag); return toast.err(0, prefix + e404, tag);
var errtxt = (xhr.response && xhr.response.err) || xhr.responseText, if (is_cf && (xhr.status == 403 || xhr.status == 503)) {
fun = toast[lvl || 'err'];
if (xhr.status == 503 && /[Cc]loud[f]lare|>Just a mo[m]ent|#cf-b[u]bbles|Chec[k]ing your br[o]wser/.test(errtxt)) {
var now = Date.now(), td = now - cf_cha_t; var now = Date.now(), td = now - cf_cha_t;
if (td < 15000) if (td < 15000)
return; return;

View File

@@ -1,3 +1,156 @@
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
# 2023-0513-0000 `v1.7.2` hard resolve
## new features
* print a warning if `c:\`, `c:\windows*`, or all of `/` are shared
* upgraded the docker image to v3.18 which enables the [chiptune player](https://a.ocv.me/pub/demo/music/chiptunes/#af-f6fb2e5f)
* in config files, allow trailing `:` in section headers
## bugfixes
* when `--hardlink` (or the volflag) is set, resolve symlinks before hardlinking
* uploads could fail due to relative symlinks
* really minor ux fixes
* left-align `GET` in access logs
* the upload panel didn't always shrink back down after uploads completed
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
# 2023-0507-1834 `v1.7.1` CräzY;PWDs
## new features
* webdav:
* support write-only folders
* option `--dav-auth` / volflag `davauth` forces clients to always auth
* helps clients such as `davfs2` see all folders if the root is anon-readable but some subfolders are not
* alternatively you could configure your client to always send the password in the `PW` header
* include usernames in http request logs
* audio player:
* consumes less power on phones when the screen is off
* smoother playback cursor on short songs
## bugfixes
* the characters `;` and `%` can now be used in passwords
* but non-ascii characters (such as the ä in the release title) can, in fact, not
* verify that all accounts have unique passwords on startup (#25)
## other changes
* ftpd: log incorrect passwords only, not correct ones
* `up2k.py` (the upload, folder-sync, and file-search client) has been renamed to [u2c.py](https://github.com/9001/copyparty/tree/hovudstraum/bin#u2cpy)
* `u2c` as in `up2k client`, or `up2k CLI`, or `upload-to-copyparty` -- good name
* now the only things named "up2k" are the web-ui and the server backend which is way less confusing
* upgrade packaging from [setup.py](https://github.com/9001/copyparty/blob/hovudstraum/setup.py) to [pyproject.toml](https://github.com/9001/copyparty/blob/hovudstraum/pyproject.toml)
* no practical consequences aside from a warm fuzzy feeling of being in the future
* the docker images ~~will be~~ got rebuilt 2023-05-11 ~~in a few days (when [alpine](https://alpinelinux.org/) 3.18 is released)~~ enabling [the chiptune player](https://a.ocv.me/pub/demo/music/chiptunes/#af-f6fb2e5f)
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
# 2023-0429-2114 `v1.7.0` unlinked
don't get excited! nothing new and revolutionary, but `xvol` and `xdev` changed behavior so there's an above-average chance of fresh bugs
## new features
* (#24): `xvol` and `xdev`, previously just hints to the filesystem indexer, now actively block access as well:
* `xvol` stops users following symlinks leaving the volumes they have access to
* so if you symlink `/home/ed/music` into `/srv/www/music` it'll get blocked
* ...unless both folders are accessible through volumes, and the user has read-access to both
* `xdev` stops users crossing the filesystem boundary of the volumes they have access to
* so if you symlink another HDD into a volume it'll get blocked, but you can still symlink from other places on the same FS
* enabling these will add a slight performance hit; the unlikely worst-case is `14%` slower directory listings, `35%` slower download-as-tar
* file selection summary (num files, size, audio duration) in the bottom right
* [u2cli](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py): more aggressive resolving with `--rh`
* [add a warning](https://github.com/9001/copyparty#fix-unreliable-playback-on-android) that the default powersave settings in android may stop playing music during album changes
* also appears [in the media player](https://user-images.githubusercontent.com/241032/235327191-7aaefff9-5d41-4e42-b71f-042a8247f29d.png) if the issue is detected at runtime (playback halts for 30sec while screen is off)
## bugfixes
* (#23): stop autodeleting empty folders when moving or deleting files
* but files which expire / [self-destruct](https://github.com/9001/copyparty#self-destruct) still clean up parent directories like before
* ftp-server: some clients could fail to `mkdir` at first attempt (and also complain during rmdir)
## other changes
* new version of [cpp-winpe64.exe](https://github.com/9001/copyparty/releases/download/v1.7.0/copyparty-winpe64.exe) since the ftp-server fix might be relevant
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
# 2023-0426-2300 `v1.6.15` unexpected boost
## new features
* 30% faster folder listings due to [the very last thing](https://github.com/9001/copyparty/commit/55c74ad164633a0a64dceb51f7f534da0422cbb5) i'd ever expect to be a bottleneck, [thx perf](https://docs.python.org/3.12/howto/perf_profiling.html)
* option to see the lastmod timestamps of symlinks instead of the target files
* makes the turbo mode of [u2cli, the commandline uploader and folder-sync tool](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py) more turbo since copyparty dedupes uploads by symlinking to an existing copy and the symlink is stamped with the deduped file's lastmod
* **webdav:** enabled by default (because rclone will want this), can be disabled with arg `--dav-rt` or volflag `davrt`
* **http:** disabled by default, can be enabled per-request with urlparam `lt`
* [u2cli](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py): option `--rh` to resolve server hostname only once at start of upload
* fantastic for buggy networks, but it'll break TLS
## bugfixes
* new arg `--s-tbody` specifies the network timeout before a dead connection gets dropped (default 3min)
* before there was no timeout at all, which could hang uploads or possibly consume all server resources
* ...but this is only relevant if your copyparty is directly exposed to the internet with no reverse proxy
* with nginx/caddy/etc you can disable the timeout with `--s-tbody 0` for a 3% performance boost (*wow!*)
* iPhone audio transcoder could turn bad and stop transcoding
* ~~maybe android phones no longer pause playback at the end of an album~~
* nope, that was due to [android's powersaver](https://github.com/9001/copyparty#fix-unreliable-playback-on-android), oh well
* ***bonus unintended feature:*** navigate into other folders while a song is plaing
* [installing from the source tarball](https://github.com/9001/copyparty/blob/hovudstraum/docs/devnotes.md#build-from-release-tarball) should be ok now
* good base for making distro packages probably
## other changes
* since the network timeout fix is relevant for the single usecase that [cpp-winpe64.exe](https://github.com/9001/copyparty/releases/download/v1.6.15/copyparty-winpe64.exe) covers, there is now a new version of that
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
# 2023-0424-0609 `v1.6.14` unsettable flags
## new features
* unset a volflag (override a global option) by negating it (setting volflag `-flagname`)
* new argument `--cert` to specify TLS certificate location
* defaults to `~/.config/copyparty/cert.pem` like before
## bugfixes
* in zip/tar downloads, always use the parent-folder name as the archive root
* more reliable ftp authentication when providing password as username
* connect-page: fix rclone ftps example
## other changes
* stop suggesting `--http-only` and `--https-only` for performance since the difference is negligible
* mention how some antivirus (avast, avg, mcafee) thinks that pillow's webp encoder is a virus, affecting `copyparty.exe`
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
# 2023-0420-2141 `v1.6.12` as seen on nixos
## new features
* @chinponya [made](https://github.com/9001/copyparty/pull/22) a copyparty [Nix package](https://github.com/9001/copyparty#nix-package) and a [NixOS module](https://github.com/9001/copyparty#nixos-module)! nice 🎉
* with [systemd-based hardening](https://github.com/9001/copyparty/blob/hovudstraum/contrib/nixos/modules/copyparty.nix#L230-L270) instead of [prisonparty](https://github.com/9001/copyparty/blob/hovudstraum/bin/prisonparty.sh)
* complements the [arch package](https://github.com/9001/copyparty/tree/hovudstraum/contrib/package/arch) very well w
## bugfixes
* fix an sqlite fd leak
* with enough simultaneous traffic, copyparty could run out of file descriptors since it relied on the gc to close sqlite cursors
* now there's a pool of cursors shared between the tcp connections instead, limited to the number of CPU cores
* performance mostly unaffected (or slightly improved) compared to before, except for a 20% reduction only during max server load caused by directory-listings or searches
* ~~somehow explicitly closing the cursors didn't always work... maybe this was actually a python bug :\\/~~
* yes, it does incomplete cleanup if opening a WAL database fails
* multirange requests would fail with an error; now they get a 200 as expected (since they're kinda useless and not worth the overhead)
* [the only software i've ever seen do that](https://apps.kde.org/discover/) now works as intended
* expand `~/` filesystem paths in all remaining args: `-c`, `-lo`, `--hist`, `--ssl-log`, and the `hist` volflag
* never use IPv6-format IPv4 (`::ffff:127.0.0.1`) in responses
* [u2cli](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py): don't enter delete stage if some of the uploads failed
* audio player in safari on touchbar macbooks
* songs would play backwards because the touchbar keeps spamming play/pause
* playback would stop when the preloader kicks in because safari sees the new audio object and freaks out
## other changes
* added [windows quickstart / service example](https://github.com/9001/copyparty/blob/hovudstraum/docs/examples/windows.md)
* updated pyinstaller (it makes smaller exe files now)
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀ ▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
# 2023-0401-2112 `v1.6.11` not joke # 2023-0401-2112 `v1.6.11` not joke

View File

@@ -4,8 +4,9 @@
* [future plans](#future-plans) - some improvement ideas * [future plans](#future-plans) - some improvement ideas
* [design](#design) * [design](#design)
* [up2k](#up2k) - quick outline of the up2k protocol * [up2k](#up2k) - quick outline of the up2k protocol
* [why not tus](#why-not-tus) - I didn't know about [tus](https://tus.io/) * [why not tus](#why-not-tus) - I didn't know about [tus](https://tus.io/)
* [why chunk-hashes](#why-chunk-hashes) - a single sha512 would be better, right? * [why chunk-hashes](#why-chunk-hashes) - a single sha512 would be better, right?
* [hashed passwords](#hashed-passwords) - regarding the curious decisions
* [http api](#http-api) * [http api](#http-api)
* [read](#read) * [read](#read)
* [write](#write) * [write](#write)
@@ -17,6 +18,7 @@
* [building](#building) * [building](#building)
* [dev env setup](#dev-env-setup) * [dev env setup](#dev-env-setup)
* [just the sfx](#just-the-sfx) * [just the sfx](#just-the-sfx)
* [build from release tarball](#build-from-release-tarball) - uses the included prebuilt webdeps
* [complete release](#complete-release) * [complete release](#complete-release)
* [todo](#todo) - roughly sorted by priority * [todo](#todo) - roughly sorted by priority
* [discarded ideas](#discarded-ideas) * [discarded ideas](#discarded-ideas)
@@ -67,14 +69,14 @@ regarding the frequent server log message during uploads;
* on this http connection, `2.77 GiB` transferred, `102.9 MiB/s` average, `948` chunks handled * on this http connection, `2.77 GiB` transferred, `102.9 MiB/s` average, `948` chunks handled
* client says `4` uploads OK, `0` failed, `3` busy, `1` queued, `10042 MiB` total size, `7198 MiB` and `00:01:09` left * client says `4` uploads OK, `0` failed, `3` busy, `1` queued, `10042 MiB` total size, `7198 MiB` and `00:01:09` left
## why not tus ### why not tus
I didn't know about [tus](https://tus.io/) when I made this, but: I didn't know about [tus](https://tus.io/) when I made this, but:
* up2k has the advantage that it supports parallel uploading of non-contiguous chunks straight into the final file -- [tus does a merge at the end](https://tus.io/protocols/resumable-upload.html#concatenation) which is slow and taxing on the server HDD / filesystem (unless i'm misunderstanding) * up2k has the advantage that it supports parallel uploading of non-contiguous chunks straight into the final file -- [tus does a merge at the end](https://tus.io/protocols/resumable-upload.html#concatenation) which is slow and taxing on the server HDD / filesystem (unless i'm misunderstanding)
* up2k has the slight disadvantage of requiring the client to hash the entire file before an upload can begin, but this has the benefit of immediately skipping duplicate files * up2k has the slight disadvantage of requiring the client to hash the entire file before an upload can begin, but this has the benefit of immediately skipping duplicate files
* and the hashing happens in a separate thread anyways so it's usually not a bottleneck * and the hashing happens in a separate thread anyways so it's usually not a bottleneck
## why chunk-hashes ### why chunk-hashes
a single sha512 would be better, right? a single sha512 would be better, right?
@@ -84,13 +86,22 @@ as a result, the hashes are much less useful than they could have been (search t
however it allows for hashing multiple chunks in parallel, greatly increasing upload speed from fast storage (NVMe, raid-0 and such) however it allows for hashing multiple chunks in parallel, greatly increasing upload speed from fast storage (NVMe, raid-0 and such)
* both the [browser uploader](https://github.com/9001/copyparty#uploading) and the [commandline one](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py) does this now, allowing for fast uploading even from plaintext http * both the [browser uploader](https://github.com/9001/copyparty#uploading) and the [commandline one](https://github.com/9001/copyparty/tree/hovudstraum/bin#u2cpy) does this now, allowing for fast uploading even from plaintext http
hashwasm would solve the streaming issue but reduces hashing speed for sha512 (xxh128 does 6 GiB/s), and it would make old browsers and [iphones](https://bugs.webkit.org/show_bug.cgi?id=228552) unsupported hashwasm would solve the streaming issue but reduces hashing speed for sha512 (xxh128 does 6 GiB/s), and it would make old browsers and [iphones](https://bugs.webkit.org/show_bug.cgi?id=228552) unsupported
* blake2 might be a better choice since xxh is non-cryptographic, but that gets ~15 MiB/s on slower androids * blake2 might be a better choice since xxh is non-cryptographic, but that gets ~15 MiB/s on slower androids
# hashed passwords
regarding the curious decisions
there is a static salt for all passwords;
* because most copyparty APIs allow users to authenticate using only their password, making the username unknown, so impossible to do per-account salts
* the drawback of this is that an attacker can bruteforce all accounts in parallel, however most copyparty instances only have a handful of accounts in the first place, and it can be compensated by increasing the hashing cost anyways
# http api # http api
* table-column `params` = URL parameters; `?foo=bar&qux=...` * table-column `params` = URL parameters; `?foo=bar&qux=...`
@@ -110,6 +121,7 @@ authenticate using header `Cookie: cppwd=foo` or url param `&pw=foo`
| GET | `?ls&dots` | list files/folders at URL as JSON, including dotfiles | | GET | `?ls&dots` | list files/folders at URL as JSON, including dotfiles |
| GET | `?ls=t` | list files/folders at URL as plaintext | | GET | `?ls=t` | list files/folders at URL as plaintext |
| GET | `?ls=v` | list files/folders at URL, terminal-formatted | | GET | `?ls=v` | list files/folders at URL, terminal-formatted |
| GET | `?lt` | in listings, use symlink timestamps rather than targets |
| GET | `?b` | list files/folders at URL as simplified HTML | | GET | `?b` | list files/folders at URL as simplified HTML |
| GET | `?tree=.` | list one level of subdirectories inside URL | | GET | `?tree=.` | list one level of subdirectories inside URL |
| GET | `?tree` | list one level of subdirectories for each level until URL | | GET | `?tree` | list one level of subdirectories for each level until URL |
@@ -226,39 +238,55 @@ pip install mutagen # audio metadata
pip install pyftpdlib # ftp server pip install pyftpdlib # ftp server
pip install impacket # smb server -- disable Windows Defender if you REALLY need this on windows pip install impacket # smb server -- disable Windows Defender if you REALLY need this on windows
pip install Pillow pyheif-pillow-opener pillow-avif-plugin # thumbnails pip install Pillow pyheif-pillow-opener pillow-avif-plugin # thumbnails
pip install pyvips # faster thumbnails
pip install psutil # better cleanup of stuck metadata parsers on windows
pip install black==21.12b0 click==8.0.2 bandit pylint flake8 isort mypy # vscode tooling pip install black==21.12b0 click==8.0.2 bandit pylint flake8 isort mypy # vscode tooling
``` ```
## just the sfx ## just the sfx
first grab the web-dependencies from a previous sfx (assuming you don't need to modify something in those): if you just want to modify the copyparty source code (py/html/css/js) then this is the easiest approach
```sh build the sfx using any of the following examples:
rm -rf copyparty/web/deps
curl -L https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py >x.py
python3 x.py --version
rm x.py
cp -R /tmp/pe-copyparty.$(id -u)/copyparty/web/deps copyparty/web/
```
or you could build the web-dependencies from source instead (NB: does not include prismjs, need to grab that manually):
```sh
make -C scripts/deps-docker
```
then build the sfx using any of the following examples:
```sh ```sh
./scripts/make-sfx.sh # regular edition ./scripts/make-sfx.sh # regular edition
./scripts/make-sfx.sh fast # build faster (worse js/css compression)
./scripts/make-sfx.sh gz no-cm # gzip-compressed + no fancy markdown editor ./scripts/make-sfx.sh gz no-cm # gzip-compressed + no fancy markdown editor
``` ```
## build from release tarball
uses the included prebuilt webdeps
if you downloaded a [release](https://github.com/9001/copyparty/releases) source tarball from github (for example [copyparty-1.6.15.tar.gz](https://github.com/9001/copyparty/releases/download/v1.6.15/copyparty-1.6.15.tar.gz) so not the autogenerated one) you can build it like so,
```bash
python3 -m pip install --user -U build setuptools wheel jinja2 strip_hints
bash scripts/run-tests.sh python3 # optional
python3 -m build
```
if you are unable to use `build`, you can use the old setuptools approach instead,
```bash
python3 setup.py install --user setuptools wheel jinja2
python3 setup.py build
# you now have a wheel which you can install. or extract and repackage:
python3 setup.py install --skip-build --prefix=/usr --root=$HOME/pe/copyparty
```
## complete release ## complete release
also builds the sfx so skip the sfx section above also builds the sfx so skip the sfx section above
*WARNING: `rls.sh` has not yet been updated with the docker-images and arch/nix packaging*
does everything completely from scratch, straight from your local repo
in the `scripts` folder: in the `scripts` folder:
* run `make -C deps-docker` to build all dependencies * run `make -C deps-docker` to build all dependencies

View File

@@ -15,7 +15,7 @@ open up notepad and save the following as `c:\users\you\documents\party.conf` (f
```yaml ```yaml
[global] [global]
lo: c:\users\you\logs\cpp-%Y-%m%d.xz # log to file lo: ~/logs/cpp-%Y-%m%d.xz # log to c:\users\you\logs\
e2dsa, e2ts, no-dedup, z # sets 4 flags; see expl. e2dsa, e2ts, no-dedup, z # sets 4 flags; see expl.
p: 80, 443 # listen on ports 80 and 443, not 3923 p: 80, 443 # listen on ports 80 and 443, not 3923
theme: 2 # default theme: protonmail-monokai theme: 2 # default theme: protonmail-monokai
@@ -47,8 +47,7 @@ open up notepad and save the following as `c:\users\you\documents\party.conf` (f
### config explained: [global] ### config explained: [global]
the `[global]` section accepts any config parameters you can see when running copyparty (either the exe or the sfx.py) with `--help`, so this is the same as running copyparty with arguments `--lo c:\users\you\logs\copyparty-%Y-%m%d.xz -e2dsa -e2ts --no-dedup -z -p 80,443 --theme 2 --lang nor` the `[global]` section accepts any config parameters you can see when running copyparty (either the exe or the sfx.py) with `--help`, so this is the same as running copyparty with arguments `--lo c:\users\you\logs\copyparty-%Y-%m%d.xz -e2dsa -e2ts --no-dedup -z -p 80,443 --theme 2 --lang nor`
* `lo: c:\users\you\logs\cpp-%Y-%m%d.xz` writes compressed logs (the compression will make them delayed) * `lo: ~/logs/cpp-%Y-%m%d.xz` writes compressed logs (the compression will make them delayed)
* sorry that `~/logs/` doesn't work currently, good oversight
* `e2dsa` enables the upload deduplicator and file indexer, which enables searching * `e2dsa` enables the upload deduplicator and file indexer, which enables searching
* `e2ts` enables music metadata indexing, making albums / titles etc. searchable too * `e2ts` enables music metadata indexing, making albums / titles etc. searchable too
* `no-dedup` writes full dupes to disk instead of symlinking, since lots of windows software doesn't handle symlinks well * `no-dedup` writes full dupes to disk instead of symlinking, since lots of windows software doesn't handle symlinks well

View File

@@ -194,6 +194,9 @@ sqlite3 .hist/up2k.db 'select * from mt where k="fgsfds" or k="t:mtp"' | tee /de
for ((f=420;f<1200;f++)); do sz=$(ffmpeg -y -f lavfi -i sine=frequency=$f:duration=2 -vf volume=0.1 -ac 1 -ar 44100 -f s16le /dev/shm/a.wav 2>/dev/null; base64 -w0 </dev/shm/a.wav | gzip -c | wc -c); printf '%d %d\n' $f $sz; done | tee /dev/stderr | sort -nrk2,2 for ((f=420;f<1200;f++)); do sz=$(ffmpeg -y -f lavfi -i sine=frequency=$f:duration=2 -vf volume=0.1 -ac 1 -ar 44100 -f s16le /dev/shm/a.wav 2>/dev/null; base64 -w0 </dev/shm/a.wav | gzip -c | wc -c); printf '%d %d\n' $f $sz; done | tee /dev/stderr | sort -nrk2,2
ffmpeg -y -f lavfi -i sine=frequency=1050:duration=2 -vf volume=0.1 -ac 1 -ar 44100 /dev/shm/a.wav ffmpeg -y -f lavfi -i sine=frequency=1050:duration=2 -vf volume=0.1 -ac 1 -ar 44100 /dev/shm/a.wav
# better sine
sox -DnV -r8000 -b8 -c1 /dev/shm/a.wav synth 1.1 sin 400 vol 0.02
# play icon calibration pics # play icon calibration pics
for w in 150 170 190 210 230 250; do for h in 130 150 170 190 210; do /c/Program\ Files/ImageMagick-7.0.11-Q16-HDRI/magick.exe convert -size ${w}x${h} xc:brown -fill orange -draw "circle $((w/2)),$((h/2)) $((w/2)),$((h/3))" $w-$h.png; done; done for w in 150 170 190 210 230 250; do for h in 130 150 170 190 210; do /c/Program\ Files/ImageMagick-7.0.11-Q16-HDRI/magick.exe convert -size ${w}x${h} xc:brown -fill orange -draw "circle $((w/2)),$((h/2)) $((w/2)),$((h/3))" $w-$h.png; done; done

View File

@@ -0,0 +1,2 @@
vsftpd a.conf -olisten=YES -olisten_port=3921 -orun_as_launching_user=YES -obackground=NO -olog_ftp_protocol=YES

View File

@@ -72,7 +72,7 @@ rclone.exe mount --vfs-cache-mode writes --vfs-cache-max-age 5s --attr-timeout 5
# sync folders to/from copyparty # sync folders to/from copyparty
note that the up2k client [up2k.py](https://github.com/9001/copyparty/tree/hovudstraum/bin#up2kpy) (available on the "connect" page of your copyparty server) does uploads much faster and safer, but rclone is bidirectional and more ubiquitous note that the up2k client [u2c.py](https://github.com/9001/copyparty/tree/hovudstraum/bin#u2cpy) (available on the "connect" page of your copyparty server) does uploads much faster and safer, but rclone is bidirectional and more ubiquitous
``` ```
rclone sync /usr/share/icons/ cpp-rw:fds/ rclone sync /usr/share/icons/ cpp-rw:fds/

View File

@@ -287,7 +287,7 @@ symbol legend,
* `curl-friendly ls` = returns a [sortable plaintext folder listing](https://user-images.githubusercontent.com/241032/215322619-ea5fd606-3654-40ad-94ee-2bc058647bb2.png) when curled * `curl-friendly ls` = returns a [sortable plaintext folder listing](https://user-images.githubusercontent.com/241032/215322619-ea5fd606-3654-40ad-94ee-2bc058647bb2.png) when curled
* `curl-friendly upload` = uploading with curl is just `curl -T some.bin http://.../` * `curl-friendly upload` = uploading with curl is just `curl -T some.bin http://.../`
* `a`/copyparty remarks: * `a`/copyparty remarks:
* one-way folder sync from local to server can be done efficiently with [up2k.py](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py), or with webdav and conventional rsync * one-way folder sync from local to server can be done efficiently with [u2c.py](https://github.com/9001/copyparty/tree/hovudstraum/bin#u2cpy), or with webdav and conventional rsync
* can hot-reload config files (with just a few exceptions) * can hot-reload config files (with just a few exceptions)
* can set per-folder permissions if that folder is made into a separate volume, so there is configuration overhead * can set per-folder permissions if that folder is made into a separate volume, so there is configuration overhead
* [event hooks](https://github.com/9001/copyparty/tree/hovudstraum/bin/hooks) ([discord](https://user-images.githubusercontent.com/241032/215304439-1c1cb3c8-ec6f-4c17-9f27-81f969b1811a.png), [desktop](https://user-images.githubusercontent.com/241032/215335767-9c91ed24-d36e-4b6b-9766-fb95d12d163f.png)) inspired by filebrowser, as well as the more complex [media parser](https://github.com/9001/copyparty/tree/hovudstraum/bin/mtag) alternative * [event hooks](https://github.com/9001/copyparty/tree/hovudstraum/bin/hooks) ([discord](https://user-images.githubusercontent.com/241032/215304439-1c1cb3c8-ec6f-4c17-9f27-81f969b1811a.png), [desktop](https://user-images.githubusercontent.com/241032/215335767-9c91ed24-d36e-4b6b-9766-fb95d12d163f.png)) inspired by filebrowser, as well as the more complex [media parser](https://github.com/9001/copyparty/tree/hovudstraum/bin/mtag) alternative

146
pyproject.toml Normal file
View File

@@ -0,0 +1,146 @@
[project]
name = "copyparty"
description = """
Portable file server with accelerated resumable uploads, \
deduplication, WebDAV, FTP, zeroconf, media indexer, \
video thumbnails, audio transcoding, and write-only folders"""
readme = "README.md"
authors = [{ name = "ed", email = "copyparty@ocv.me" }]
license = { text = "MIT" }
requires-python = ">=3.3"
dependencies = ["Jinja2"]
dynamic = ["version"]
classifiers = [
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: Jython",
"Programming Language :: Python :: Implementation :: PyPy",
"Environment :: Console",
"Environment :: No Input/Output (Daemon)",
"Intended Audience :: End Users/Desktop",
"Intended Audience :: System Administrators",
"Topic :: Communications :: File Sharing",
"Topic :: Internet :: File Transfer Protocol (FTP)",
"Topic :: Internet :: WWW/HTTP :: HTTP Servers",
]
[project.urls]
"Source Code" = "https://github.com/9001/copyparty"
"Bug Tracker" = "https://github.com/9001/copyparty/issues"
"Demo Server" = "https://a.ocv.me/pub/demo/"
[project.optional-dependencies]
thumbnails = ["Pillow"]
thumbnails2 = ["pyvips"]
audiotags = ["mutagen"]
ftpd = ["pyftpdlib"]
ftps = ["pyftpdlib", "pyopenssl"]
pwhash = ["argon2-cffi"]
[project.scripts]
copyparty = "copyparty.__main__:main"
"u2c" = "copyparty.web.a.u2c:main"
"partyfuse" = "copyparty.web.a.partyfuse:main"
# =====================================================================
[build-system]
requires = ["setuptools", "wheel"]
build-backend = "setuptools.build_meta"
# requires = ["hatchling"]
# build-backend = "hatchling.build"
[tool.hatch.version]
source = "code"
path = "copyparty/__version__.py"
[tool.setuptools.dynamic]
version = { attr = "copyparty.__version__.__version__" }
[tool.setuptools.packages.find]
include = ["copyparty*"]
[tool.setuptools.package-data]
copyparty = [
"res/COPYING.txt",
"res/insecure.pem",
"web/*.gz",
"web/*.js",
"web/*.css",
"web/*.html",
"web/a/*.bat",
"web/dd/*.png",
"web/deps/*.gz",
"web/deps/*.woff*",
]
# =====================================================================
[tool.black]
required-version = '21.12b0'
target-version = ['py27']
[tool.isort]
profile = "black"
include_trailing_comma = true
[tool.bandit]
skips = ["B104", "B110", "B112"]
# =====================================================================
[tool.pylint.MAIN]
py-version = "3.11"
jobs = 2
[tool.pylint."MESSAGES CONTROL"]
disable = [
"missing-module-docstring",
"missing-class-docstring",
"missing-function-docstring",
"import-outside-toplevel",
"wrong-import-position",
"raise-missing-from",
"bare-except",
"broad-exception-raised",
"broad-exception-caught",
"invalid-name",
"line-too-long",
"too-many-lines",
"consider-using-f-string",
"pointless-string-statement",
]
[tool.pylint.FORMAT]
expected-line-ending-format = "LF"
# =====================================================================
[tool.mypy]
python_version = "3.11"
files = ["copyparty"]
show_error_codes = true
show_column_numbers = true
pretty = true
strict = true
local_partial_types = true
strict_equality = true
warn_unreachable = true
ignore_missing_imports = true
follow_imports = "silent"
[[tool.mypy.overrides]]
no_implicit_reexport = false

View File

@@ -34,7 +34,7 @@ set -e
# 4823 copyparty-extras/copyparty-repack.sh # 4823 copyparty-extras/copyparty-repack.sh
# `- source files from github # `- source files from github
# #
# 23663 copyparty-extras/up2k.py # 23663 copyparty-extras/u2c.py
# `- standalone utility to upload or search for files # `- standalone utility to upload or search for files
# #
# 32280 copyparty-extras/partyfuse.py # 32280 copyparty-extras/partyfuse.py
@@ -147,7 +147,7 @@ repack sfx-lite "re no-dd no-cm no-hl gz"
# copy lite-sfx.py to ./copyparty, # copy lite-sfx.py to ./copyparty,
# delete extracted source code # delete extracted source code
( cd copyparty-extras/ ( cd copyparty-extras/
mv copyparty-*/bin/up2k.py . mv copyparty-*/bin/u2c.py .
mv copyparty-*/bin/partyfuse.py . mv copyparty-*/bin/partyfuse.py .
cp -pv sfx-lite/copyparty-sfx.py ../copyparty cp -pv sfx-lite/copyparty-sfx.py ../copyparty
rm -rf copyparty-{0..9}*.*.*{0..9} rm -rf copyparty-{0..9}*.*.*{0..9}

View File

@@ -1,5 +1,4 @@
# TODO easymde embeds codemirror on 3.17 due to new npm probably FROM alpine:3.18
FROM alpine:3.16
WORKDIR /z WORKDIR /z
ENV ver_asmcrypto=c72492f4a66e17a0e5dd8ad7874de354f3ccdaa5 \ ENV ver_asmcrypto=c72492f4a66e17a0e5dd8ad7874de354f3ccdaa5 \
ver_hashwasm=4.9.0 \ ver_hashwasm=4.9.0 \

View File

@@ -5,11 +5,13 @@ LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
org.opencontainers.image.licenses="MIT" \ org.opencontainers.image.licenses="MIT" \
org.opencontainers.image.title="copyparty-ac" \ org.opencontainers.image.title="copyparty-ac" \
org.opencontainers.image.description="copyparty with Pillow and FFmpeg (image/audio/video thumbnails, audio transcoding, media tags)" org.opencontainers.image.description="copyparty with Pillow and FFmpeg (image/audio/video thumbnails, audio transcoding, media tags)"
ENV PYTHONPYCACHEPREFIX=/tmp/pyc
RUN apk --no-cache add \ RUN apk --no-cache add !pyc \
wget \ wget \
py3-pillow \ py3-pillow \
ffmpeg \ ffmpeg \
&& rm -rf /tmp/pyc \
&& mkdir /cfg /w \ && mkdir /cfg /w \
&& chmod 777 /cfg /w \ && chmod 777 /cfg /w \
&& echo % /cfg > initcfg && echo % /cfg > initcfg

View File

@@ -5,26 +5,27 @@ LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
org.opencontainers.image.licenses="MIT" \ org.opencontainers.image.licenses="MIT" \
org.opencontainers.image.title="copyparty-dj" \ org.opencontainers.image.title="copyparty-dj" \
org.opencontainers.image.description="copyparty with all optional dependencies, including musical key / bpm detection" org.opencontainers.image.description="copyparty with all optional dependencies, including musical key / bpm detection"
ENV PYTHONPYCACHEPREFIX=/tmp/pyc
COPY i/bin/mtag/install-deps.sh ./ COPY i/bin/mtag/install-deps.sh ./
COPY i/bin/mtag/audio-bpm.py /mtag/ COPY i/bin/mtag/audio-bpm.py /mtag/
COPY i/bin/mtag/audio-key.py /mtag/ COPY i/bin/mtag/audio-key.py /mtag/
RUN apk add -U \ RUN apk add -U !pyc \
wget \ wget \
py3-pillow py3-pip py3-cffi \ py3-pillow py3-pip py3-cffi \
ffmpeg \ ffmpeg \
vips-jxl vips-heif vips-poppler vips-magick \ vips-jxl vips-heif vips-poppler vips-magick \
py3-numpy fftw libsndfile \ py3-numpy fftw libsndfile \
vamp-sdk vamp-sdk-libs \ vamp-sdk vamp-sdk-libs \
&& python3 -m pip install pyvips \ && apk add -t .bd \
&& apk --no-cache add -t .bd \
bash wget gcc g++ make cmake patchelf \ bash wget gcc g++ make cmake patchelf \
python3-dev ffmpeg-dev fftw-dev libsndfile-dev \ python3-dev ffmpeg-dev fftw-dev libsndfile-dev \
py3-wheel py3-numpy-dev \ py3-wheel py3-numpy-dev \
vamp-sdk-dev \ vamp-sdk-dev \
&& python3 -m pip install pyvips \
&& bash install-deps.sh \ && bash install-deps.sh \
&& apk del py3-pip .bd \ && apk del py3-pip .bd \
&& rm -rf /var/cache/apk/* \ && rm -rf /var/cache/apk/* /tmp/pyc \
&& chmod 777 /root \ && chmod 777 /root \
&& ln -s /root/vamp /root/.local / \ && ln -s /root/vamp /root/.local / \
&& mkdir /cfg /w \ && mkdir /cfg /w \

View File

@@ -5,10 +5,12 @@ LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
org.opencontainers.image.licenses="MIT" \ org.opencontainers.image.licenses="MIT" \
org.opencontainers.image.title="copyparty-im" \ org.opencontainers.image.title="copyparty-im" \
org.opencontainers.image.description="copyparty with Pillow and Mutagen (image thumbnails, media tags)" org.opencontainers.image.description="copyparty with Pillow and Mutagen (image thumbnails, media tags)"
ENV PYTHONPYCACHEPREFIX=/tmp/pyc
RUN apk --no-cache add \ RUN apk --no-cache add !pyc \
wget \ wget \
py3-pillow py3-mutagen \ py3-pillow py3-mutagen \
&& rm -rf /tmp/pyc \
&& mkdir /cfg /w \ && mkdir /cfg /w \
&& chmod 777 /cfg /w \ && chmod 777 /cfg /w \
&& echo % /cfg > initcfg && echo % /cfg > initcfg

View File

@@ -5,14 +5,19 @@ LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
org.opencontainers.image.licenses="MIT" \ org.opencontainers.image.licenses="MIT" \
org.opencontainers.image.title="copyparty-iv" \ org.opencontainers.image.title="copyparty-iv" \
org.opencontainers.image.description="copyparty with Pillow, FFmpeg, libvips (image/audio/video thumbnails, audio transcoding, media tags)" org.opencontainers.image.description="copyparty with Pillow, FFmpeg, libvips (image/audio/video thumbnails, audio transcoding, media tags)"
ENV PYTHONPYCACHEPREFIX=/tmp/pyc
RUN apk --no-cache add \ RUN apk add -U !pyc \
wget \ wget \
py3-pillow py3-pip py3-cffi \ py3-pillow py3-pip py3-cffi \
ffmpeg \ ffmpeg \
vips-jxl vips-heif vips-poppler vips-magick \ vips-jxl vips-heif vips-poppler vips-magick \
&& apk add -t .bd \
bash wget gcc g++ make cmake patchelf \
python3-dev py3-wheel \
&& python3 -m pip install pyvips \ && python3 -m pip install pyvips \
&& apk del py3-pip \ && apk del py3-pip .bd \
&& rm -rf /var/cache/apk/* /tmp/pyc \
&& mkdir /cfg /w \ && mkdir /cfg /w \
&& chmod 777 /cfg /w \ && chmod 777 /cfg /w \
&& echo % /cfg > initcfg && echo % /cfg > initcfg

View File

@@ -5,9 +5,11 @@ LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
org.opencontainers.image.licenses="MIT" \ org.opencontainers.image.licenses="MIT" \
org.opencontainers.image.title="copyparty-min" \ org.opencontainers.image.title="copyparty-min" \
org.opencontainers.image.description="just copyparty, no thumbnails / media tags / audio transcoding" org.opencontainers.image.description="just copyparty, no thumbnails / media tags / audio transcoding"
ENV PYTHONPYCACHEPREFIX=/tmp/pyc
RUN apk --no-cache add \ RUN apk --no-cache add !pyc \
python3 \ python3 \
&& rm -rf /tmp/pyc \
&& mkdir /cfg /w \ && mkdir /cfg /w \
&& chmod 777 /cfg /w \ && chmod 777 /cfg /w \
&& echo % /cfg > initcfg && echo % /cfg > initcfg

View File

@@ -5,10 +5,12 @@ LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
org.opencontainers.image.licenses="MIT" \ org.opencontainers.image.licenses="MIT" \
org.opencontainers.image.title="copyparty-min-pip" \ org.opencontainers.image.title="copyparty-min-pip" \
org.opencontainers.image.description="just copyparty, no thumbnails, no media tags, no audio transcoding" org.opencontainers.image.description="just copyparty, no thumbnails, no media tags, no audio transcoding"
ENV PYTHONPYCACHEPREFIX=/tmp/pyc
RUN apk --no-cache add python3 py3-pip \ RUN apk --no-cache add python3 py3-pip !pyc \
&& python3 -m pip install copyparty \ && python3 -m pip install copyparty \
&& apk del py3-pip \ && apk del py3-pip \
&& rm -rf /tmp/pyc \
&& mkdir /cfg /w \ && mkdir /cfg /w \
&& chmod 777 /cfg /w \ && chmod 777 /cfg /w \
&& echo % /cfg > initcfg && echo % /cfg > initcfg

View File

@@ -95,7 +95,7 @@ filt=
[ $(jobs -p | wc -l) -lt $(nproc) ] && break [ $(jobs -p | wc -l) -lt $(nproc) ] && break
while [ -e .blk ]; do sleep 0.2; done while [ -e .blk ]; do sleep 0.2; done
done done
aa="$(printf '%7s' $a)" aa="$(printf '%11s' $a-$i)"
# arm takes forever so make it top priority # arm takes forever so make it top priority
[ ${a::3} == arm ] && nice= || nice=nice [ ${a::3} == arm ] && nice= || nice=nice

View File

@@ -73,14 +73,17 @@ pydir="$(
} }
function have() { function have() {
python -c "import $1; $1; $1.__version__" python -c "import $1; $1; getattr($1,'__version__',0)"
} }
function load_env() { function load_env() {
. buildenv/bin/activate . buildenv/bin/activate
have setuptools have setuptools
have wheel have wheel
have build
have twine have twine
have jinja2
have strip_hints
} }
load_env || { load_env || {
@@ -88,19 +91,32 @@ load_env || {
deactivate || true deactivate || true
rm -rf buildenv rm -rf buildenv
python3 -m venv buildenv python3 -m venv buildenv
(. buildenv/bin/activate && pip install twine wheel) (. buildenv/bin/activate && pip install \
setuptools wheel build twine jinja2 strip_hints )
load_env load_env
} }
# cleanup
rm -rf unt build/pypi
# grab licenses # grab licenses
scripts/genlic.sh copyparty/res/COPYING.txt scripts/genlic.sh copyparty/res/COPYING.txt
# remove type hints to support python < 3.9 # clean-ish packaging env
rm -rf build/pypi rm -rf build/pypi
mkdir -p build/pypi mkdir -p build/pypi
cp -pR setup.py README.md LICENSE copyparty contrib bin scripts/strip_hints build/pypi/ cp -pR pyproject.toml README.md LICENSE copyparty contrib bin scripts/strip_hints build/pypi/
tar -c docs/lics.txt scripts/genlic.sh build/*.txt | tar -xC build/pypi/ tar -c docs/lics.txt scripts/genlic.sh build/*.txt | tar -xC build/pypi/
cd build/pypi cd build/pypi
# delete junk
find -name '*.pyc' -delete
find -name __pycache__ -delete
find -name py.typed -delete
find -type f \( -name .DS_Store -or -name ._.DS_Store \) -delete
find -type f -name ._\* | while IFS= read -r f; do cmp <(printf '\x00\x05\x16') <(head -c 3 -- "$f") && rm -f -- "$f"; done
# remove type hints to support python < 3.9
f=../strip-hints-0.1.10.tar.gz f=../strip-hints-0.1.10.tar.gz
[ -e $f ] || [ -e $f ] ||
(url=https://files.pythonhosted.org/packages/9c/d4/312ddce71ee10f7e0ab762afc027e07a918f1c0e1be5b0069db5b0e7542d/strip-hints-0.1.10.tar.gz; (url=https://files.pythonhosted.org/packages/9c/d4/312ddce71ee10f7e0ab762afc027e07a918f1c0e1be5b0069db5b0e7542d/strip-hints-0.1.10.tar.gz;
@@ -132,24 +148,13 @@ while IFS= read -r x; do
done done
rm -rf contrib rm -rf contrib
[ $fast ] && sed -ri s/5730/10/ copyparty/web/Makefile [ $fast ] && sed -ri s/573/10/ copyparty/web/Makefile
(cd copyparty/web && make -j$(nproc) && rm Makefile) (cd copyparty/web && make -j$(nproc) && rm Makefile)
# build # build
./setup.py clean2 python3 -m build
./setup.py sdist bdist_wheel --universal
[ "$mode" == t ] && twine upload -r pypitest dist/* [ "$mode" == t ] && twine upload -r pypitest dist/*
[ "$mode" == u ] && twine upload -r pypi dist/* [ "$mode" == u ] && twine upload -r pypi dist/*
cat <<EOF true
all done!
to clean up the source tree:
cd ~/dev/copyparty
./setup.py clean2
EOF

View File

@@ -2,10 +2,14 @@
set -e set -e
echo echo
berr() { p=$(head -c 72 </dev/zero | tr '\0' =); printf '\n%s\n\n' $p; cat; printf '\n%s\n\n' $p; }
help() { exec cat <<'EOF' help() { exec cat <<'EOF'
# optional args: # optional args:
# #
# `fast` builds faster, with cheaper js/css compression
#
# `clean` uses files from git (everything except web/deps), # `clean` uses files from git (everything except web/deps),
# so local changes won't affect the produced sfx # so local changes won't affect the produced sfx
# #
@@ -42,6 +46,13 @@ help() { exec cat <<'EOF'
# #
# `no-dd` saves ~2k by removing the mouse cursor # `no-dd` saves ~2k by removing the mouse cursor
# #
# _____________________________________________________________________
# build behavior:
#
# `dl-wd` automatically downloads webdeps if necessary
#
# `ign-wd` allows building an sfx without webdeps
#
# --------------------------------------------------------------------- # ---------------------------------------------------------------------
# #
# if you are on windows, you can use msys2: # if you are on windows, you can use msys2:
@@ -109,6 +120,8 @@ while [ ! -z "$1" ]; do
no-hl) no_hl=1 ; ;; no-hl) no_hl=1 ; ;;
no-dd) no_dd=1 ; ;; no-dd) no_dd=1 ; ;;
no-cm) no_cm=1 ; ;; no-cm) no_cm=1 ; ;;
dl-wd) dl_wd=1 ; ;;
ign-wd) ign_wd=1 ; ;;
fast) zopf= ; ;; fast) zopf= ; ;;
ultra) ultra=1 ; ;; ultra) ultra=1 ; ;;
lang) shift;langs="$1"; ;; lang) shift;langs="$1"; ;;
@@ -223,7 +236,7 @@ necho() {
# enable this to dynamically remove type hints at startup, # enable this to dynamically remove type hints at startup,
# in case a future python version can use them for performance # in case a future python version can use them for performance
true || ( true && (
necho collecting strip-hints necho collecting strip-hints
f=../build/strip-hints-0.1.10.tar.gz f=../build/strip-hints-0.1.10.tar.gz
[ -e $f ] || [ -e $f ] ||
@@ -283,12 +296,56 @@ necho() {
rm -f copyparty/stolen/*/README.md rm -f copyparty/stolen/*/README.md
# remove type hints before build instead # remove type hints before build instead
(cd copyparty; "$pybin" ../../scripts/strip_hints/a.py; rm uh) (cd copyparty; PYTHONPATH="..:$PYTHONPATH" "$pybin" ../../scripts/strip_hints/a.py; rm uh)
licfile=$(realpath copyparty/res/COPYING.txt) licfile=$(realpath copyparty/res/COPYING.txt)
(cd ../scripts; ./genlic.sh "$licfile") (cd ../scripts; ./genlic.sh "$licfile")
} }
[ ! -e copyparty/web/deps/mini-fa.woff ] && [ $dl_wd ] && {
echo "could not find webdeps; downloading..."
url=https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py
wget -Ox.py "$url" || curl -L "$url" >x.py
echo "extracting webdeps..."
wdsrc="$("$pybin" x.py --version 2>&1 | tee /dev/stderr | awk '/sfxdir:/{sub(/.*: /,"");print;exit}')"
[ "$wdsrc" ] || {
echo failed to discover tempdir of reference copyparty-sfx.py
exit 1
}
rm -rf copyparty/web/deps
cp -pvR "$wdsrc/copyparty/web/deps" copyparty/web/
# also copy it out into the source-tree for next time
rm -rf ../copyparty/web/deps
cp -pR copyparty/web/deps ../copyparty/web
rm x.py
}
[ -e copyparty/web/deps/mini-fa.woff ] || [ $ign_wd ] || { berr <<'EOF'
ERROR:
could not find webdeps; the front-end will not be fully functional
please choose one of the following:
A) add the argument "dl-wd" to fix it automatically; this will
download copyparty-sfx.py and extract the webdeps from there
B) build the webdeps from source: make -C scripts/deps-docker
C) add the argument "ign-wd" to continue building the sfx without webdeps
alternative A is a good choice if you are only intending to
modify the copyparty source code (py/html/css/js) and do not
plan to make any changes to the mostly-third-party webdeps
there may be additional hints in the devnotes:
https://github.com/9001/copyparty/blob/hovudstraum/docs/devnotes.md#building
EOF
exit 1
}
ver= ver=
[ -z "$repack" ] && [ -z "$repack" ] &&
git describe --tags >/dev/null 2>/dev/null && { git describe --tags >/dev/null 2>/dev/null && {
@@ -421,7 +478,7 @@ while IFS= read -r f; do
done done
# up2k goes from 28k to 22k laff # up2k goes from 28k to 22k laff
awk 'BEGIN{gensub(//,"",1)}' </dev/null && awk 'BEGIN{gensub(//,"",1)}' </dev/null 2>/dev/null &&
echo entabbening && echo entabbening &&
find | grep -E '\.css$' | while IFS= read -r f; do find | grep -E '\.css$' | while IFS= read -r f; do
awk '{ awk '{
@@ -435,7 +492,9 @@ find | grep -E '\.css$' | while IFS= read -r f; do
1 1
' <$f | sed -r 's/;\}$/}/; /\{\}$/d' >t ' <$f | sed -r 's/;\}$/}/; /\{\}$/d' >t
tmv "$f" tmv "$f"
done done ||
echo "WARNING: your awk does not have gensub, so the sfx will not have optimal compression"
unexpand -h 2>/dev/null && unexpand -h 2>/dev/null &&
find | grep -E '\.(js|html)$' | while IFS= read -r f; do find | grep -E '\.(js|html)$' | while IFS= read -r f; do
unexpand -t 4 --first-only <"$f" >t unexpand -t 4 --first-only <"$f" >t
@@ -529,7 +588,7 @@ sed -r 's/([^ ]*) (.*)/\2.\1/' | grep -vE '/list1?$' > list1
for n in {1..50}; do for n in {1..50}; do
(grep -vE '\.(gz|br)$' list1; grep -E '\.(gz|br)$' list1 | (shuf||gshuf) ) >list || true (grep -vE '\.(gz|br)$' list1; grep -E '\.(gz|br)$' list1 | (shuf||gshuf) ) >list || true
s=$( (sha1sum||shasum) < list | cut -c-16) s=$( (sha1sum||shasum) < list | cut -c-16)
grep -q $s "$zdir/h" && continue grep -q $s "$zdir/h" 2>/dev/null && continue
echo $s >> "$zdir/h" echo $s >> "$zdir/h"
break break
done done

View File

@@ -64,6 +64,8 @@ git archive hovudstraum | tar -xC "$rls_dir"
echo ">>> export untracked deps" echo ">>> export untracked deps"
tar -c copyparty/web/deps | tar -xC "$rls_dir" tar -c copyparty/web/deps | tar -xC "$rls_dir"
scripts/genlic.sh "$rls_dir/copyparty/res/COPYING.txt"
cd "$rls_dir" cd "$rls_dir"
find -type d -exec chmod 755 '{}' \+ find -type d -exec chmod 755 '{}' \+
find -type f -exec chmod 644 '{}' \+ find -type f -exec chmod 644 '{}' \+
@@ -93,7 +95,8 @@ rm \
.gitattributes \ .gitattributes \
.gitignore .gitignore
mv LICENSE LICENSE.txt cp -pv LICENSE LICENSE.txt
mv setup.py{,.disabled}
# the regular cleanup memes # the regular cleanup memes
find -name '*.pyc' -delete find -name '*.pyc' -delete

View File

@@ -11,30 +11,12 @@ update_arch_pkgbuild() {
rm -rf x rm -rf x
mkdir x mkdir x
(echo "$self/../dist/copyparty-sfx.py" sha=$(sha256sum "$self/../dist/copyparty-$ver.tar.gz" | awk '{print$1}')
awk -v self="$self" '
/^\)/{o=0}
/^source=/{o=1;next}
{
sub(/..pkgname./,"copyparty");
sub(/.*pkgver./,self "/..");
sub(/^ +"/,"");sub(/"/,"")
}
o&&!/https/' PKGBUILD
) |
xargs sha256sum > x/sums
(awk -v ver=$ver ' awk -v ver=$ver -v sha=$sha '
/^pkgver=/{sub(/[0-9\.]+/,ver)}; /^pkgver=/{sub(/[0-9\.]+/,ver)};
/^sha256sums=/{exit}; /^sha256sums=/{sub(/[0-9a-f]{64}/,sha)};
1' PKGBUILD 1' PKGBUILD >a
echo -n 'sha256sums=('
p=; cat x/sums | while read s _; do
echo "$p\"$s\""
p=' '
done
awk '/^sha256sums=/{o=1} o&&/^\)/{o=2} o==2' PKGBUILD
) >a
mv a PKGBUILD mv a PKGBUILD
rm -rf x rm -rf x

View File

@@ -1,13 +1,13 @@
d5510a24cb5e15d6d30677335bbc7624c319b371c0513981843dc51d9b3a1e027661096dfcfc540634222bb2634be6db55bf95185b30133cb884f1e47652cf53 altgraph-0.17.3-py2.py3-none-any.whl d5510a24cb5e15d6d30677335bbc7624c319b371c0513981843dc51d9b3a1e027661096dfcfc540634222bb2634be6db55bf95185b30133cb884f1e47652cf53 altgraph-0.17.3-py2.py3-none-any.whl
eda6c38fc4d813fee897e969ff9ecc5acc613df755ae63df0392217bbd67408b5c1f6c676f2bf5497b772a3eb4e1a360e1245e1c16ee83f0af555f1ab82c3977 Git-2.39.1-32-bit.exe eda6c38fc4d813fee897e969ff9ecc5acc613df755ae63df0392217bbd67408b5c1f6c676f2bf5497b772a3eb4e1a360e1245e1c16ee83f0af555f1ab82c3977 Git-2.39.1-32-bit.exe
17ce52ba50692a9d964f57a23ac163fb74c77fdeb2ca988a6d439ae1fe91955ff43730c073af97a7b3223093ffea3479a996b9b50ee7fba0869247a56f74baa6 pefile-2023.2.7-py3-none-any.whl 17ce52ba50692a9d964f57a23ac163fb74c77fdeb2ca988a6d439ae1fe91955ff43730c073af97a7b3223093ffea3479a996b9b50ee7fba0869247a56f74baa6 pefile-2023.2.7-py3-none-any.whl
d68c78bc83f4f48c604912b2d1ca4772b0e6ed676cd2eb439411e0a74d63fe215aac93dd9dab04ed341909a4a6a1efc13ec982516e3cb0fc7c355055e63d9178 pyinstaller-5.10.1-py3-none-win32.whl 2410f79f25b55829169fdd45611c04f51932f7701c0601df64ade0eb545c96ba950b7be186eb082482506bc689fcde5fe09c1f6f7cd77c2107028959b7e0d06f pyinstaller-5.12.0-py3-none-win32.whl
fe62705893c86eeb2d5b841da8debe05dedda98364dec190b487e718caad8a8735503bf93739a7a27ea793a835bf976fb919ceec1424b8fc550b936bae4a54e9 pyinstaller-5.10.1-py3-none-win_amd64.whl 62f4f3dda0526ea88cfc5af1806c7b53094672f4237d64c088626c226ad2fbc7549f6c9c6bbe5b228b1f87faf1e5c343ec468c485e4c17fe6d79c6b1f570153a pyinstaller-5.12.0-py3-none-win_amd64.whl
61c543983ff67e2bdff94d2d6198023679437363db8c660fa81683aff87c5928cd800720488e18d09be89fe45d6ab99be3ccb912cb2e03e2bca385b4338e1e42 pyinstaller_hooks_contrib-2023.2-py2.py3-none-any.whl 2612c263f73a02eab41404ba96e0c7cf8be4475104668b47dfbae50fadf977b3621dd4102682b301264d82b6e130d95ea84a28bf2106a626a1a2845dac16df47 pyinstaller_hooks_contrib-2023.3-py2.py3-none-any.whl
132a5380f33a245f2e744413a0e1090bc42b7356376de5121397cec5976b04b79f7c9ebe28af222c9c7b01461f7d7920810d220e337694727e0d7cd9e91fa667 pywin32_ctypes-0.2.0-py2.py3-none-any.whl 132a5380f33a245f2e744413a0e1090bc42b7356376de5121397cec5976b04b79f7c9ebe28af222c9c7b01461f7d7920810d220e337694727e0d7cd9e91fa667 pywin32_ctypes-0.2.0-py2.py3-none-any.whl
3c5adf0a36516d284a2ede363051edc1bcc9df925c5a8a9fa2e03cab579dd8d847fdad42f7fd5ba35992e08234c97d2dbfec40a9d12eec61c8dc03758f2bd88e typing_extensions-4.4.0-py3-none-any.whl 3c5adf0a36516d284a2ede363051edc1bcc9df925c5a8a9fa2e03cab579dd8d847fdad42f7fd5ba35992e08234c97d2dbfec40a9d12eec61c8dc03758f2bd88e typing_extensions-4.4.0-py3-none-any.whl
4b6e9ae967a769fe32be8cf0bc0d5a213b138d1e0344e97656d08a3d15578d81c06c45b334c872009db2db8f39db0c77c94ff6c35168d5e13801917667c08678 upx-4.0.2-win32.zip 4b6e9ae967a769fe32be8cf0bc0d5a213b138d1e0344e97656d08a3d15578d81c06c45b334c872009db2db8f39db0c77c94ff6c35168d5e13801917667c08678 upx-4.0.2-win32.zip
# up2k (win7) # u2c (win7)
a7d259277af4948bf960682bc9fb45a44b9ae9a19763c8a7c313cef4aa9ec2d447d843e4a7c409e9312c8c8f863a24487a8ee4ffa6891e9b1c4e111bb4723861 certifi-2022.12.7-py3-none-any.whl a7d259277af4948bf960682bc9fb45a44b9ae9a19763c8a7c313cef4aa9ec2d447d843e4a7c409e9312c8c8f863a24487a8ee4ffa6891e9b1c4e111bb4723861 certifi-2022.12.7-py3-none-any.whl
2822c0dae180b1c8cfb7a70c8c00bad62af9afdbb18b656236680def9d3f1fcdcb8ef5eb64fc3b4c934385cd175ad5992a2284bcba78a243130de75b2d1650db charset_normalizer-3.1.0-cp37-cp37m-win32.whl 2822c0dae180b1c8cfb7a70c8c00bad62af9afdbb18b656236680def9d3f1fcdcb8ef5eb64fc3b4c934385cd175ad5992a2284bcba78a243130de75b2d1650db charset_normalizer-3.1.0-cp37-cp37m-win32.whl
ffdd45326f4e91c02714f7a944cbcc2fdd09299f709cfa8aec0892053eef0134fb80d9ba3790afd319538a86feb619037cbf533e2f5939cb56b35bb17f56c858 idna-3.4-py3-none-any.whl ffdd45326f4e91c02714f7a944cbcc2fdd09299f709cfa8aec0892053eef0134fb80d9ba3790afd319538a86feb619037cbf533e2f5939cb56b35bb17f56c858 idna-3.4-py3-none-any.whl
@@ -24,7 +24,7 @@ c06b3295d1d0b0f0a6f9a6cd0be861b9b643b4a5ea37857f0bd41c45deaf27bb927b71922dab74e6
ba91ab0518c61eff13e5612d9e6b532940813f6b56e6ed81ea6c7c4d45acee4d98136a383a25067512b8f75538c67c987cf3944bfa0229e3cb677e2fb81e763e zipp-3.10.0-py3-none-any.whl ba91ab0518c61eff13e5612d9e6b532940813f6b56e6ed81ea6c7c4d45acee4d98136a383a25067512b8f75538c67c987cf3944bfa0229e3cb677e2fb81e763e zipp-3.10.0-py3-none-any.whl
# win10 # win10
00558cca2e0ac813d404252f6e5aeacb50546822ecb5d0570228b8ddd29d94e059fbeb6b90393dee5abcddaca1370aca784dc9b095cbb74e980b3c024767fb24 Jinja2-3.1.2-py3-none-any.whl 00558cca2e0ac813d404252f6e5aeacb50546822ecb5d0570228b8ddd29d94e059fbeb6b90393dee5abcddaca1370aca784dc9b095cbb74e980b3c024767fb24 Jinja2-3.1.2-py3-none-any.whl
b1db6f5a79fc15391547643e5973cf5946c0acfa6febb68bc90fc3f66369681100cc100f32dd04256dcefa510e7864c718515a436a4af3a10fe205c413c7e693 MarkupSafe-2.1.2-cp311-cp311-win_amd64.whl 7f8f4daa4f4f2dbf24cdd534b2952ee3fba6334eb42b37465ccda3aa1cccc3d6204aa6bfffb8a83bf42ec59c702b5b5247d4c8ee0d4df906334ae53072ef8c4c MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl
4a20aeb52d4fde6aabcba05ee261595eeb5482c72ee27332690f34dd6e7a49c0b3ba3813202ac15c9d21e29f1cd803f2e79ccc1c45ec314fcd0a937016bcbc56 mutagen-1.46.0-py3-none-any.whl 4a20aeb52d4fde6aabcba05ee261595eeb5482c72ee27332690f34dd6e7a49c0b3ba3813202ac15c9d21e29f1cd803f2e79ccc1c45ec314fcd0a937016bcbc56 mutagen-1.46.0-py3-none-any.whl
78414808cb9a5fa74e7b23360b8f46147952530e3cc78a3ad4b80be3e26598080537ac691a1be1f35b7428a22c1f65a6adf45986da2752fbe9d9819d77a58bf8 Pillow-9.5.0-cp311-cp311-win_amd64.whl 78414808cb9a5fa74e7b23360b8f46147952530e3cc78a3ad4b80be3e26598080537ac691a1be1f35b7428a22c1f65a6adf45986da2752fbe9d9819d77a58bf8 Pillow-9.5.0-cp311-cp311-win_amd64.whl
4b7711b950858f459d47145b88ccde659279c6af47144d58a1c54ea2ce4b80ec43eb7f69c68f12f8f6bc54c86a44e77441993257f7ad43aab364655de5c51bb1 python-3.11.2-amd64.exe a48ee8992eee60a0d620dced71b9f96596f5dd510e3024015aca55884cdb3f9e2405734bfc13f3f40b79106a77bc442cce02ac4c8f5d16207448052b368fd52a python-3.11.4-amd64.exe

View File

@@ -13,7 +13,7 @@ https://pypi.org/project/MarkupSafe/#files
https://pypi.org/project/mutagen/#files https://pypi.org/project/mutagen/#files
https://pypi.org/project/Pillow/#files https://pypi.org/project/Pillow/#files
# up2k (win7) additionals # u2c (win7) additionals
https://pypi.org/project/certifi/#files https://pypi.org/project/certifi/#files
https://pypi.org/project/charset-normalizer/#files # cp37-cp37m-win32.whl https://pypi.org/project/charset-normalizer/#files # cp37-cp37m-win32.whl
https://pypi.org/project/idna/#files https://pypi.org/project/idna/#files

View File

@@ -18,9 +18,9 @@ VSVersionInfo(
[StringStruct('CompanyName', 'ocv.me'), [StringStruct('CompanyName', 'ocv.me'),
StringStruct('FileDescription', 'copyparty uploader / filesearch command'), StringStruct('FileDescription', 'copyparty uploader / filesearch command'),
StringStruct('FileVersion', '1.2.3'), StringStruct('FileVersion', '1.2.3'),
StringStruct('InternalName', 'up2k'), StringStruct('InternalName', 'u2c'),
StringStruct('LegalCopyright', '2019, ed'), StringStruct('LegalCopyright', '2019, ed'),
StringStruct('OriginalFilename', 'up2k.exe'), StringStruct('OriginalFilename', 'u2c.exe'),
StringStruct('ProductName', 'copyparty up2k client'), StringStruct('ProductName', 'copyparty up2k client'),
StringStruct('ProductVersion', '1.2.3')]) StringStruct('ProductVersion', '1.2.3')])
]), ]),

View File

@@ -14,7 +14,7 @@ uname -s | grep -E 'WOW64|NT-10' && echo need win7-32 && exit 1
dl() { curl -fkLO "$1"; } dl() { curl -fkLO "$1"; }
cd ~/Downloads cd ~/Downloads
dl https://192.168.123.1:3923/cpp/bin/up2k.py dl https://192.168.123.1:3923/cpp/bin/u2c.py
dl https://192.168.123.1:3923/cpp/scripts/pyinstaller/up2k.ico dl https://192.168.123.1:3923/cpp/scripts/pyinstaller/up2k.ico
dl https://192.168.123.1:3923/cpp/scripts/pyinstaller/up2k.rc dl https://192.168.123.1:3923/cpp/scripts/pyinstaller/up2k.rc
dl https://192.168.123.1:3923/cpp/scripts/pyinstaller/up2k.spec dl https://192.168.123.1:3923/cpp/scripts/pyinstaller/up2k.spec
@@ -37,12 +37,12 @@ grep -E '^from .ssl_ import' $APPDATA/python/python37/site-packages/urllib3/util
echo golfed echo golfed
} }
read a b _ < <(awk -F\" '/^S_VERSION =/{$0=$2;sub(/\./," ");print}' < up2k.py) read a b _ < <(awk -F\" '/^S_VERSION =/{$0=$2;sub(/\./," ");print}' < u2c.py)
sed -r 's/1,2,3,0/'$a,$b,0,0'/;s/1\.2\.3/'$a.$b.0/ <up2k.rc >up2k.rc2 sed -r 's/1,2,3,0/'$a,$b,0,0'/;s/1\.2\.3/'$a.$b.0/ <up2k.rc >up2k.rc2
#python uncomment.py up2k.py #python uncomment.py u2c.py
$APPDATA/python/python37/scripts/pyinstaller -y --clean --upx-dir=. up2k.spec $APPDATA/python/python37/scripts/pyinstaller -y --clean --upx-dir=. up2k.spec
./dist/up2k.exe --version ./dist/u2c.exe --version
curl -fkT dist/up2k.exe -HPW:wark https://192.168.123.1:3923/ curl -fkT dist/u2c.exe -HPW:wark https://192.168.123.1:3923/

View File

@@ -5,7 +5,7 @@ block_cipher = None
a = Analysis( a = Analysis(
['up2k.py'], ['u2c.py'],
pathex=[], pathex=[],
binaries=[], binaries=[],
datas=[], datas=[],
@@ -49,7 +49,7 @@ a = Analysis(
# this is the only change to the autogenerated specfile: # this is the only change to the autogenerated specfile:
xdll = ["libcrypto-1_1.dll"] xdll = ["libcrypto-1_1.dll"]
a.binaries = TOC([x for x in a.binaries if x[0] not in xdll]) a.binaries = [x for x in a.binaries if x[0] not in xdll]
pyz = PYZ(a.pure, a.zipped_data, cipher=block_cipher) pyz = PYZ(a.pure, a.zipped_data, cipher=block_cipher)
@@ -60,7 +60,7 @@ exe = EXE(
a.zipfiles, a.zipfiles,
a.datas, a.datas,
[], [],
name='up2k', name='u2c',
debug=False, debug=False,
bootloader_ignore_signals=False, bootloader_ignore_signals=False,
strip=False, strip=False,

View File

@@ -11,4 +11,4 @@ ex=(
encodings.{zlib_codec,base64_codec,bz2_codec,charmap,hex_codec,palmos,punycode,rot_13} encodings.{zlib_codec,base64_codec,bz2_codec,charmap,hex_codec,palmos,punycode,rot_13}
); );
cex=(); for a in "${ex[@]}"; do cex+=(--exclude "$a"); done cex=(); for a in "${ex[@]}"; do cex+=(--exclude "$a"); done
$APPDATA/python/python37/scripts/pyi-makespec --version-file up2k.rc2 -i up2k.ico -n up2k -c -F up2k.py "${cex[@]}" $APPDATA/python/python37/scripts/pyi-makespec --version-file up2k.rc2 -i up2k.ico -n u2c -c -F u2c.py "${cex[@]}"

View File

@@ -1,7 +1,7 @@
#!/bin/bash #!/bin/bash
set -e set -e
parallel=2 parallel=1
[ -e make-sfx.sh ] || cd scripts [ -e make-sfx.sh ] || cd scripts
[ -e make-sfx.sh ] && [ -e deps-docker ] || { [ -e make-sfx.sh ] && [ -e deps-docker ] || {

View File

@@ -11,6 +11,7 @@ copyparty/broker_mp.py,
copyparty/broker_mpw.py, copyparty/broker_mpw.py,
copyparty/broker_thr.py, copyparty/broker_thr.py,
copyparty/broker_util.py, copyparty/broker_util.py,
copyparty/cert.py,
copyparty/cfg.py, copyparty/cfg.py,
copyparty/dxml.py, copyparty/dxml.py,
copyparty/fsutil.py, copyparty/fsutil.py,
@@ -22,7 +23,9 @@ copyparty/ico.py,
copyparty/mdns.py, copyparty/mdns.py,
copyparty/mtag.py, copyparty/mtag.py,
copyparty/multicast.py, copyparty/multicast.py,
copyparty/pwhash.py,
copyparty/res, copyparty/res,
copyparty/res/__init__.py,
copyparty/res/COPYING.txt, copyparty/res/COPYING.txt,
copyparty/res/insecure.pem, copyparty/res/insecure.pem,
copyparty/smbd.py, copyparty/smbd.py,
@@ -62,7 +65,7 @@ copyparty/web,
copyparty/web/a, copyparty/web/a,
copyparty/web/a/__init__.py, copyparty/web/a/__init__.py,
copyparty/web/a/partyfuse.py, copyparty/web/a/partyfuse.py,
copyparty/web/a/up2k.py, copyparty/web/a/u2c.py,
copyparty/web/a/webdav-cfg.bat, copyparty/web/a/webdav-cfg.bat,
copyparty/web/baguettebox.js, copyparty/web/baguettebox.js,
copyparty/web/browser.css, copyparty/web/browser.css,

View File

@@ -16,6 +16,8 @@ cat $f | awk '
h=0 h=0
}; };
}; };
/```/{o=!o}
o{next}
/^#/{s=1;rs=0;pr()} /^#/{s=1;rs=0;pr()}
/^#* *(nix package)/{rs=1} /^#* *(nix package)/{rs=1}
/^#* *(install on android|dev env setup|just the sfx|complete release|optional gpl stuff|nixos module)|`$/{s=rs} /^#* *(install on android|dev env setup|just the sfx|complete release|optional gpl stuff|nixos module)|`$/{s=rs}

View File

@@ -7,6 +7,11 @@ import sys
from shutil import rmtree from shutil import rmtree
from setuptools import setup, Command from setuptools import setup, Command
_ = """
this probably still works but is no longer in use;
pyproject.toml and scripts/make-pypi-release.sh
are in charge of packaging wheels now
"""
NAME = "copyparty" NAME = "copyparty"
VERSION = None VERSION = None
@@ -135,9 +140,10 @@ args = {
"audiotags": ["mutagen"], "audiotags": ["mutagen"],
"ftpd": ["pyftpdlib"], "ftpd": ["pyftpdlib"],
"ftps": ["pyftpdlib", "pyopenssl"], "ftps": ["pyftpdlib", "pyopenssl"],
"pwhash": ["argon2-cffi"],
}, },
"entry_points": {"console_scripts": ["copyparty = copyparty.__main__:main"]}, "entry_points": {"console_scripts": ["copyparty = copyparty.__main__:main"]},
"scripts": ["bin/partyfuse.py", "bin/up2k.py"], "scripts": ["bin/partyfuse.py", "bin/u2c.py"],
"cmdclass": {"clean2": clean2}, "cmdclass": {"clean2": clean2},
} }

View File

@@ -122,7 +122,7 @@ class TestHttpCli(unittest.TestCase):
tar = tarfile.open(fileobj=io.BytesIO(b)).getnames() tar = tarfile.open(fileobj=io.BytesIO(b)).getnames()
except: except:
tar = [] tar = []
tar = [x[4:] if x.startswith("top/") else x for x in tar] tar = [x.split("/", 1)[1] for x in tar]
tar = ["/".join([y for y in [top, durl, x] if y]) for x in tar] tar = ["/".join([y for y in [top, durl, x] if y]) for x in tar]
tar = [[x] + self.can_rw(x) for x in tar] tar = [[x] + self.can_rw(x) for x in tar]
tar_ok = [x[0] for x in tar if x[1]] tar_ok = [x[0] for x in tar if x[1]]

View File

@@ -98,7 +98,7 @@ class Cfg(Namespace):
def __init__(self, a=None, v=None, c=None): def __init__(self, a=None, v=None, c=None):
ka = {} ka = {}
ex = "daw dav_inf dav_mac dotsrch e2d e2ds e2dsa e2t e2ts e2tsr e2v e2vu e2vp ed emp force_js getmod hardlink ih ihead magic never_symlink nid nih no_acode no_athumb no_dav no_dedup no_del no_dupe no_logues no_mv no_readme no_robots no_sb_md no_sb_lg no_scandir no_thumb no_vthumb no_zip nrand nw rand vc xdev xlink xvol" ex = "daw dav_auth dav_inf dav_mac dav_rt dotsrch e2d e2ds e2dsa e2t e2ts e2tsr e2v e2vu e2vp ed emp force_js getmod grid hardlink ih ihead magic never_symlink nid nih no_acode no_athumb no_dav no_dedup no_del no_dupe no_logues no_mv no_readme no_robots no_sb_md no_sb_lg no_scandir no_thumb no_vthumb no_zip nrand nw rand smb vc xdev xlink xvol"
ka.update(**{k: False for k in ex.split()}) ka.update(**{k: False for k in ex.split()})
ex = "dotpart no_rescan no_sendfile no_voldump plain_ip" ex = "dotpart no_rescan no_sendfile no_voldump plain_ip"
@@ -107,10 +107,13 @@ class Cfg(Namespace):
ex = "css_browser hist js_browser no_forget no_hash no_idx" ex = "css_browser hist js_browser no_forget no_hash no_idx"
ka.update(**{k: None for k in ex.split()}) ka.update(**{k: None for k in ex.split()})
ex = "s_thead s_tbody"
ka.update(**{k: 9 for k in ex.split()})
ex = "df loris re_maxage rproxy rsp_jtr rsp_slp s_wr_slp theme themes turbo" ex = "df loris re_maxage rproxy rsp_jtr rsp_slp s_wr_slp theme themes turbo"
ka.update(**{k: 0 for k in ex.split()}) ka.update(**{k: 0 for k in ex.split()})
ex = "doctitle favico html_head lg_sbf log_fk md_sbf mth textfiles R RS SR" ex = "ah_alg doctitle favico html_head lg_sbf log_fk md_sbf mth textfiles unlist R RS SR"
ka.update(**{k: "" for k in ex.split()}) ka.update(**{k: "" for k in ex.split()})
ex = "xad xar xau xbd xbr xbu xiu xm" ex = "xad xar xau xbd xbr xbu xiu xm"