Compare commits
125 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e4acddc23b | ||
|
|
2b2d8e4e02 | ||
|
|
5501d49032 | ||
|
|
fa54b2eec4 | ||
|
|
cb0160021f | ||
|
|
93a723d588 | ||
|
|
8ebe1fb5e8 | ||
|
|
2acdf685b1 | ||
|
|
9f122ccd16 | ||
|
|
03be26fafc | ||
|
|
df5d309d6e | ||
|
|
c355f9bd91 | ||
|
|
9c28ba417e | ||
|
|
705b58c741 | ||
|
|
510302d667 | ||
|
|
025a537413 | ||
|
|
60a1ff0fc0 | ||
|
|
f94a0b1bff | ||
|
|
4ccfeeb2cd | ||
|
|
2646f6a4f2 | ||
|
|
b286ab539e | ||
|
|
2cca6e0922 | ||
|
|
db51f1b063 | ||
|
|
d979c47f50 | ||
|
|
e64b87b99b | ||
|
|
b985011a00 | ||
|
|
c2ed2314c8 | ||
|
|
cd496658c3 | ||
|
|
deca082623 | ||
|
|
0ea8bb7c83 | ||
|
|
1fb251a4c2 | ||
|
|
4295923b76 | ||
|
|
572aa4b26c | ||
|
|
b1359f039f | ||
|
|
867d8ee49e | ||
|
|
04c86e8a89 | ||
|
|
bc0cb43ef9 | ||
|
|
769454fdce | ||
|
|
4ee81af8f6 | ||
|
|
8b0e66122f | ||
|
|
8a98efb929 | ||
|
|
b6fd555038 | ||
|
|
7eb413ad51 | ||
|
|
4421d509eb | ||
|
|
793ffd7b01 | ||
|
|
1e22222c60 | ||
|
|
544e0549bc | ||
|
|
83178d0836 | ||
|
|
c44f5f5701 | ||
|
|
138f5bc989 | ||
|
|
e4759f86ef | ||
|
|
d71416437a | ||
|
|
a84c583b2c | ||
|
|
cdacdccdb8 | ||
|
|
d3ccd3f174 | ||
|
|
cb6de0387d | ||
|
|
abff40519d | ||
|
|
55c74ad164 | ||
|
|
673b4f7e23 | ||
|
|
d11e02da49 | ||
|
|
8790f89e08 | ||
|
|
33442026b8 | ||
|
|
03193de6d0 | ||
|
|
8675ff40f3 | ||
|
|
d88889d3fc | ||
|
|
6f244d4335 | ||
|
|
cacca663b3 | ||
|
|
d5109be559 | ||
|
|
d999f06bb9 | ||
|
|
a1a8a8c7b5 | ||
|
|
fdd6f3b4a6 | ||
|
|
f5191973df | ||
|
|
ddbaebe779 | ||
|
|
42099baeff | ||
|
|
2459965ca8 | ||
|
|
6acf436573 | ||
|
|
f217e1ce71 | ||
|
|
418000aee3 | ||
|
|
dbbba9625b | ||
|
|
397bc92fbc | ||
|
|
6e615dcd03 | ||
|
|
9ac5908b33 | ||
|
|
50912480b9 | ||
|
|
24b9b8319d | ||
|
|
b0f4f0b653 | ||
|
|
05bbd41c4b | ||
|
|
8f5f8a3cda | ||
|
|
c8938fc033 | ||
|
|
1550350e05 | ||
|
|
5cc190c026 | ||
|
|
d6a0a738ce | ||
|
|
f5fe3678ee | ||
|
|
f2a7925387 | ||
|
|
fa953ced52 | ||
|
|
f0000d9861 | ||
|
|
4e67516719 | ||
|
|
29db7a6270 | ||
|
|
852499e296 | ||
|
|
f1775fd51c | ||
|
|
4bb306932a | ||
|
|
2a37e81bd8 | ||
|
|
6a312ca856 | ||
|
|
e7f3e475a2 | ||
|
|
854ba0ec06 | ||
|
|
209b49d771 | ||
|
|
949baae539 | ||
|
|
5f4ea27586 | ||
|
|
099cc97247 | ||
|
|
592b7d6315 | ||
|
|
0880bf55a1 | ||
|
|
4cbffec0ec | ||
|
|
cc355417d4 | ||
|
|
e2bc573e61 | ||
|
|
41c0376177 | ||
|
|
c01cad091e | ||
|
|
eb349f339c | ||
|
|
24d8caaf3e | ||
|
|
5ac2c20959 | ||
|
|
bb72e6bf30 | ||
|
|
d8142e866a | ||
|
|
7b7979fd61 | ||
|
|
749616d09d | ||
|
|
5485c6d7ca | ||
|
|
b7aea38d77 | ||
|
|
0ecd9f99e6 |
2
.github/pull_request_template.md
vendored
2
.github/pull_request_template.md
vendored
@@ -1,2 +1,2 @@
|
||||
Please include the following text somewhere in this PR description:
|
||||
To show that your contribution is compatible with the MIT License, please include the following text somewhere in this PR description:
|
||||
This PR complies with the DCO; https://developercertificate.org/
|
||||
|
||||
7
.gitignore
vendored
7
.gitignore
vendored
@@ -21,6 +21,9 @@ copyparty.egg-info/
|
||||
# winmerge
|
||||
*.bak
|
||||
|
||||
# apple pls
|
||||
.DS_Store
|
||||
|
||||
# derived
|
||||
copyparty/res/COPYING.txt
|
||||
copyparty/web/deps/
|
||||
@@ -34,3 +37,7 @@ up.*.txt
|
||||
.hist/
|
||||
scripts/docker/*.out
|
||||
scripts/docker/*.err
|
||||
/perf.*
|
||||
|
||||
# nix build output link
|
||||
result
|
||||
|
||||
10
.vscode/launch.py
vendored
10
.vscode/launch.py
vendored
@@ -30,9 +30,17 @@ except:
|
||||
|
||||
argv = [os.path.expanduser(x) if x.startswith("~") else x for x in argv]
|
||||
|
||||
sfx = ""
|
||||
if len(sys.argv) > 1 and os.path.isfile(sys.argv[1]):
|
||||
sfx = sys.argv[1]
|
||||
sys.argv = [sys.argv[0]] + sys.argv[2:]
|
||||
|
||||
argv += sys.argv[1:]
|
||||
|
||||
if re.search(" -j ?[0-9]", " ".join(argv)):
|
||||
if sfx:
|
||||
argv = [sys.executable, sfx] + argv
|
||||
sp.check_call(argv)
|
||||
elif re.search(" -j ?[0-9]", " ".join(argv)):
|
||||
argv = [sys.executable, "-m", "copyparty"] + argv
|
||||
sp.check_call(argv)
|
||||
else:
|
||||
|
||||
32
.vscode/settings.json
vendored
32
.vscode/settings.json
vendored
@@ -35,34 +35,18 @@
|
||||
"python.linting.flake8Enabled": true,
|
||||
"python.linting.banditEnabled": true,
|
||||
"python.linting.mypyEnabled": true,
|
||||
"python.linting.mypyArgs": [
|
||||
"--ignore-missing-imports",
|
||||
"--follow-imports=silent",
|
||||
"--show-column-numbers",
|
||||
"--strict"
|
||||
],
|
||||
"python.linting.flake8Args": [
|
||||
"--max-line-length=120",
|
||||
"--ignore=E722,F405,E203,W503,W293,E402,E501,E128",
|
||||
"--ignore=E722,F405,E203,W503,W293,E402,E501,E128,E226",
|
||||
],
|
||||
"python.linting.banditArgs": [
|
||||
"--ignore=B104"
|
||||
],
|
||||
"python.linting.pylintArgs": [
|
||||
"--disable=missing-module-docstring",
|
||||
"--disable=missing-class-docstring",
|
||||
"--disable=missing-function-docstring",
|
||||
"--disable=import-outside-toplevel",
|
||||
"--disable=wrong-import-position",
|
||||
"--disable=raise-missing-from",
|
||||
"--disable=bare-except",
|
||||
"--disable=broad-except",
|
||||
"--disable=invalid-name",
|
||||
"--disable=line-too-long",
|
||||
"--disable=consider-using-f-string"
|
||||
"--ignore=B104,B110,B112"
|
||||
],
|
||||
// python3 -m isort --py=27 --profile=black copyparty/
|
||||
"python.formatting.provider": "black",
|
||||
"python.formatting.provider": "none",
|
||||
"[python]": {
|
||||
"editor.defaultFormatter": "ms-python.black-formatter"
|
||||
},
|
||||
"editor.formatOnSave": true,
|
||||
"[html]": {
|
||||
"editor.formatOnSave": false,
|
||||
@@ -74,10 +58,6 @@
|
||||
"files.associations": {
|
||||
"*.makefile": "makefile"
|
||||
},
|
||||
"python.formatting.blackArgs": [
|
||||
"-t",
|
||||
"py27"
|
||||
],
|
||||
"python.linting.enabled": true,
|
||||
"python.pythonPath": "/usr/bin/python3"
|
||||
}
|
||||
349
README.md
349
README.md
@@ -1,29 +1,16 @@
|
||||
# 💾🎉 copyparty
|
||||
|
||||
* portable file sharing hub (py2/py3) [(on PyPI)](https://pypi.org/project/copyparty/)
|
||||
* MIT-Licensed, 2019-05-26, ed @ irc.rizon.net
|
||||
turn almost any device into a file server with resumable uploads/downloads using [*any*](#browser-support) web browser
|
||||
|
||||
* server only needs Python (2 or 3), all dependencies optional
|
||||
* 🔌 protocols: [http](#the-browser) // [ftp](#ftp-server) // [webdav](#webdav-server) // [smb/cifs](#smb-server)
|
||||
* 📱 [android app](#android-app) // [iPhone shortcuts](#ios-shortcuts)
|
||||
|
||||
## summary
|
||||
|
||||
turn your phone or raspi into a portable file server with resumable uploads/downloads using *any* web browser
|
||||
|
||||
* server only needs Python (`2.7` or `3.3+`), all dependencies optional
|
||||
* browse/upload with [IE4](#browser-support) / netscape4.0 on win3.11 (heh)
|
||||
* protocols: [http](#the-browser) // [ftp](#ftp-server) // [webdav](#webdav-server) // [smb/cifs](#smb-server)
|
||||
|
||||
**[Get started](#quickstart)!** or visit the **[read-only demo server](https://a.ocv.me/pub/demo/)** 👀 running from a basement in finland
|
||||
👉 **[Get started](#quickstart)!** or visit the **[read-only demo server](https://a.ocv.me/pub/demo/)** 👀 running from a basement in finland
|
||||
|
||||
📷 **screenshots:** [browser](#the-browser) // [upload](#uploading) // [unpost](#unpost) // [thumbnails](#thumbnails) // [search](#searching) // [fsearch](#file-search) // [zip-DL](#zip-downloads) // [md-viewer](#markdown-viewer)
|
||||
|
||||
|
||||
## get the app
|
||||
|
||||
<a href="https://f-droid.org/packages/me.ocv.partyup/"><img src="https://ocv.me/fdroid.png" alt="Get it on F-Droid" height="50" /> '' <img src="https://img.shields.io/f-droid/v/me.ocv.partyup.svg" alt="f-droid version info" /></a> '' <a href="https://github.com/9001/party-up"><img src="https://img.shields.io/github/release/9001/party-up.svg?logo=github" alt="github version info" /></a>
|
||||
|
||||
(the app is **NOT** the full copyparty server! just a basic upload client, nothing fancy yet)
|
||||
|
||||
|
||||
## readme toc
|
||||
|
||||
* top
|
||||
@@ -52,6 +39,9 @@ turn your phone or raspi into a portable file server with resumable uploads/down
|
||||
* [self-destruct](#self-destruct) - uploads can be given a lifetime
|
||||
* [file manager](#file-manager) - cut/paste, rename, and delete files/folders (if you have permission)
|
||||
* [batch rename](#batch-rename) - select some files and press `F2` to bring up the rename UI
|
||||
* [media player](#media-player) - plays almost every audio format there is
|
||||
* [audio equalizer](#audio-equalizer) - bass boosted
|
||||
* [fix unreliable playback on android](#fix-unreliable-playback-on-android) - due to phone / app settings
|
||||
* [markdown viewer](#markdown-viewer) - and there are *two* editors
|
||||
* [other tricks](#other-tricks)
|
||||
* [searching](#searching) - search by size, date, path/name, mp3-tags, ...
|
||||
@@ -80,18 +70,26 @@ turn your phone or raspi into a portable file server with resumable uploads/down
|
||||
* [themes](#themes)
|
||||
* [complete examples](#complete-examples)
|
||||
* [reverse-proxy](#reverse-proxy) - running copyparty next to other websites
|
||||
* [packages](#packages) - the party might be closer than you think
|
||||
* [arch package](#arch-package) - now [available on aur](https://aur.archlinux.org/packages/copyparty) maintained by [@icxes](https://github.com/icxes)
|
||||
* [nix package](#nix-package) - `nix profile install github:9001/copyparty`
|
||||
* [nixos module](#nixos-module)
|
||||
* [browser support](#browser-support) - TLDR: yes
|
||||
* [client examples](#client-examples) - interact with copyparty using non-browser clients
|
||||
* [folder sync](#folder-sync) - sync folders to/from copyparty
|
||||
* [mount as drive](#mount-as-drive) - a remote copyparty server as a local filesystem
|
||||
* [android app](#android-app) - upload to copyparty with one tap
|
||||
* [iOS shortcuts](#iOS-shortcuts) - there is no iPhone app, but
|
||||
* [performance](#performance) - defaults are usually fine - expect `8 GiB/s` download, `1 GiB/s` upload
|
||||
* [client-side](#client-side) - when uploading files
|
||||
* [security](#security) - some notes on hardening
|
||||
* [gotchas](#gotchas) - behavior that might be unexpected
|
||||
* [cors](#cors) - cross-site request config
|
||||
* [https](#https) - both HTTP and HTTPS are accepted
|
||||
* [recovering from crashes](#recovering-from-crashes)
|
||||
* [client crashes](#client-crashes)
|
||||
* [frefox wsod](#frefox-wsod) - firefox 87 can crash during uploads
|
||||
* [HTTP API](#HTTP-API) - see [devnotes](#./docs/devnotes.md#http-api)
|
||||
* [HTTP API](#HTTP-API) - see [devnotes](./docs/devnotes.md#http-api)
|
||||
* [dependencies](#dependencies) - mandatory deps
|
||||
* [optional dependencies](#optional-dependencies) - install these to enable bonus features
|
||||
* [optional gpl stuff](#optional-gpl-stuff)
|
||||
@@ -106,8 +104,9 @@ turn your phone or raspi into a portable file server with resumable uploads/down
|
||||
|
||||
just run **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py)** -- that's it! 🎉
|
||||
|
||||
* or install through pypi (python3 only): `python3 -m pip install --user -U copyparty`
|
||||
* or install through pypi: `python3 -m pip install --user -U copyparty`
|
||||
* or if you cannot install python, you can use [copyparty.exe](#copypartyexe) instead
|
||||
* or install [on arch](#arch-package) ╱ [on NixOS](#nixos-module) ╱ [through nix](#nix-package)
|
||||
* or if you are on android, [install copyparty in termux](#install-on-android)
|
||||
* or if you prefer to [use docker](./scripts/docker/) 🐋 you can do that too
|
||||
* docker has all deps built-in, so skip this step:
|
||||
@@ -127,6 +126,8 @@ enable thumbnails (images/audio/video), media indexing, and audio transcoding by
|
||||
|
||||
running copyparty without arguments (for example doubleclicking it on Windows) will give everyone read/write access to the current folder; you may want [accounts and volumes](#accounts-and-volumes)
|
||||
|
||||
or see [some usage examples](#complete-examples) for inspiration, or the [complete windows example](./docs/examples/windows.md)
|
||||
|
||||
some recommended options:
|
||||
* `-e2dsa` enables general [file indexing](#file-indexing)
|
||||
* `-e2ts` enables audio metadata indexing (needs either FFprobe or Mutagen)
|
||||
@@ -139,10 +140,11 @@ some recommended options:
|
||||
|
||||
you may also want these, especially on servers:
|
||||
|
||||
* [contrib/systemd/copyparty.service](contrib/systemd/copyparty.service) to run copyparty as a systemd service
|
||||
* [contrib/systemd/copyparty.service](contrib/systemd/copyparty.service) to run copyparty as a systemd service (see guide inside)
|
||||
* [contrib/systemd/prisonparty.service](contrib/systemd/prisonparty.service) to run it in a chroot (for extra security)
|
||||
* [contrib/rc/copyparty](contrib/rc/copyparty) to run copyparty on FreeBSD
|
||||
* [contrib/nginx/copyparty.conf](contrib/nginx/copyparty.conf) to [reverse-proxy](#reverse-proxy) behind nginx (for better https)
|
||||
* [nixos module](#nixos-module) to run copyparty on NixOS hosts
|
||||
|
||||
and remember to open the ports you want; here's a complete example including every feature copyparty has to offer:
|
||||
```
|
||||
@@ -177,7 +179,7 @@ firewall-cmd --reload
|
||||
* ☑ write-only folders
|
||||
* ☑ [unpost](#unpost): undo/delete accidental uploads
|
||||
* ☑ [self-destruct](#self-destruct) (specified server-side or client-side)
|
||||
* ☑ symlink/discard existing files (content-matching)
|
||||
* ☑ symlink/discard duplicates (content-matching)
|
||||
* download
|
||||
* ☑ single files in browser
|
||||
* ☑ [folders as zip / tar files](#zip-downloads)
|
||||
@@ -199,7 +201,7 @@ firewall-cmd --reload
|
||||
* ☑ search by name/path/date/size
|
||||
* ☑ [search by ID3-tags etc.](#searching)
|
||||
* client support
|
||||
* ☑ [sync folder to server](https://github.com/9001/copyparty/tree/hovudstraum/bin#up2kpy)
|
||||
* ☑ [folder sync](#folder-sync)
|
||||
* ☑ [curl-friendly](https://user-images.githubusercontent.com/241032/215322619-ea5fd606-3654-40ad-94ee-2bc058647bb2.png)
|
||||
* markdown
|
||||
* ☑ [viewer](#markdown-viewer)
|
||||
@@ -276,9 +278,11 @@ server notes:
|
||||
|
||||
* [Firefox issue 1790500](https://bugzilla.mozilla.org/show_bug.cgi?id=1790500) -- entire browser can crash after uploading ~4000 small files
|
||||
|
||||
* Android: music playback randomly stops due to [battery usage settings](#fix-unreliable-playback-on-android)
|
||||
|
||||
* iPhones: the volume control doesn't work because [apple doesn't want it to](https://developer.apple.com/library/archive/documentation/AudioVideo/Conceptual/Using_HTML5_Audio_Video/Device-SpecificConsiderations/Device-SpecificConsiderations.html#//apple_ref/doc/uid/TP40009523-CH5-SW11)
|
||||
* *future workaround:* enable the equalizer, make it all-zero, and set a negative boost to reduce the volume
|
||||
* "future" because `AudioContext` is broken in the current iOS version (15.1), maybe one day...
|
||||
* "future" because `AudioContext` can't maintain a stable playback speed in the current iOS version (15.7), maybe one day...
|
||||
|
||||
* Windows: folders cannot be accessed if the name ends with `.`
|
||||
* python or windows bug
|
||||
@@ -301,7 +305,7 @@ upgrade notes
|
||||
* http-api: delete/move is now `POST` instead of `GET`
|
||||
* everything other than `GET` and `HEAD` must pass [cors validation](#cors)
|
||||
* `1.5.0` (2022-12-03): [new chunksize formula](https://github.com/9001/copyparty/commit/54e1c8d261df) for files larger than 128 GiB
|
||||
* **users:** upgrade to the latest [cli uploader](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py) if you use that
|
||||
* **users:** upgrade to the latest [cli uploader](https://github.com/9001/copyparty/blob/hovudstraum/bin/u2c.py) if you use that
|
||||
* **devs:** update third-party up2k clients (if those even exist)
|
||||
|
||||
|
||||
@@ -467,6 +471,7 @@ click the `🌲` or pressing the `B` hotkey to toggle between breadcrumbs path (
|
||||
## thumbnails
|
||||
|
||||
press `g` or `田` to toggle grid-view instead of the file listing and `t` toggles icons / thumbnails
|
||||
* can be made default globally with `--grid` or per-volume with volflag `grid`
|
||||
|
||||

|
||||
|
||||
@@ -477,6 +482,7 @@ it does static images with Pillow / pyvips / FFmpeg, and uses FFmpeg for video f
|
||||
audio files are covnerted into spectrograms using FFmpeg unless you `--no-athumb` (and some FFmpeg builds may need `--th-ff-swr`)
|
||||
|
||||
images with the following names (see `--th-covers`) become the thumbnail of the folder they're in: `folder.png`, `folder.jpg`, `cover.png`, `cover.jpg`
|
||||
* and, if you enable [file indexing](#file-indexing), all remaining folders will also get thumbnails (as long as they contain any pics at all)
|
||||
|
||||
in the grid/thumbnail view, if the audio player panel is open, songs will start playing when clicked
|
||||
* indicated by the audio files having the ▶ icon instead of 💾
|
||||
@@ -508,7 +514,7 @@ you can also zip a selection of files or folders by clicking them in the browser
|
||||
|
||||
## uploading
|
||||
|
||||
drag files/folders into the web-browser to upload (or use the [command-line uploader](https://github.com/9001/copyparty/tree/hovudstraum/bin#up2kpy))
|
||||
drag files/folders into the web-browser to upload (or use the [command-line uploader](https://github.com/9001/copyparty/tree/hovudstraum/bin#u2cpy))
|
||||
|
||||
this initiates an upload using `up2k`; there are two uploaders available:
|
||||
* `[🎈] bup`, the basic uploader, supports almost every browser since netscape 4.0
|
||||
@@ -658,12 +664,68 @@ or a mix of both:
|
||||
the metadata keys you can use in the format field are the ones in the file-browser table header (whatever is collected with `-mte` and `-mtp`)
|
||||
|
||||
|
||||
## media player
|
||||
|
||||
plays almost every audio format there is (if the server has FFmpeg installed for on-demand transcoding)
|
||||
|
||||
the following audio formats are usually always playable, even without FFmpeg: `aac|flac|m4a|mp3|ogg|opus|wav`
|
||||
|
||||
some hilights:
|
||||
* OS integration; control playback from your phone's lockscreen ([windows](https://user-images.githubusercontent.com/241032/233213022-298a98ba-721a-4cf1-a3d4-f62634bc53d5.png) // [iOS](https://user-images.githubusercontent.com/241032/142711926-0700be6c-3e31-47b3-9928-53722221f722.png) // [android](https://user-images.githubusercontent.com/241032/233212311-a7368590-08c7-4f9f-a1af-48ccf3f36fad.png))
|
||||
* shows the audio waveform in the seekbar
|
||||
* not perfectly gapless but can get really close (see settings + eq below); good enough to enjoy gapless albums as intended
|
||||
|
||||
click the `play` link next to an audio file, or copy the link target to [share it](https://a.ocv.me/pub/demo/music/Ubiktune%20-%20SOUNDSHOCK%202%20-%20FM%20FUNK%20TERRROR!!/#af-1fbfba61&t=18) (optionally with a timestamp to start playing from, like that example does)
|
||||
|
||||
open the `[🎺]` media-player-settings tab to configure it,
|
||||
* switches:
|
||||
* `[preload]` starts loading the next track when it's about to end, reduces the silence between songs
|
||||
* `[full]` does a full preload by downloading the entire next file; good for unreliable connections, bad for slow connections
|
||||
* `[~s]` toggles the seekbar waveform display
|
||||
* `[/np]` enables buttons to copy the now-playing info as an irc message
|
||||
* `[os-ctl]` makes it possible to control audio playback from the lockscreen of your device (enables [mediasession](https://developer.mozilla.org/en-US/docs/Web/API/MediaSession))
|
||||
* `[seek]` allows seeking with lockscreen controls (buggy on some devices)
|
||||
* `[art]` shows album art on the lockscreen
|
||||
* `[🎯]` keeps the playing song scrolled into view (good when using the player as a taskbar dock)
|
||||
* `[⟎]` shrinks the playback controls
|
||||
* playback mode:
|
||||
* `[loop]` keeps looping the folder
|
||||
* `[next]` plays into the next folder
|
||||
* transcode:
|
||||
* `[flac]` convers `flac` and `wav` files into opus
|
||||
* `[aac]` converts `aac` and `m4a` files into opus
|
||||
* `[oth]` converts all other known formats into opus
|
||||
* `aac|ac3|aif|aiff|alac|alaw|amr|ape|au|dfpwm|dts|flac|gsm|it|m4a|mo3|mod|mp2|mp3|mpc|mptm|mt2|mulaw|ogg|okt|opus|ra|s3m|tak|tta|ulaw|wav|wma|wv|xm|xpk`
|
||||
* "tint" reduces the contrast of the playback bar
|
||||
|
||||
|
||||
### audio equalizer
|
||||
|
||||
bass boosted
|
||||
|
||||
can also boost the volume in general, or increase/decrease stereo width (like [crossfeed](https://www.foobar2000.org/components/view/foo_dsp_meiercf) just worse)
|
||||
|
||||
has the convenient side-effect of reducing the pause between songs, so gapless albums play better with the eq enabled (just make it flat)
|
||||
|
||||
|
||||
### fix unreliable playback on android
|
||||
|
||||
due to phone / app settings, android phones may randomly stop playing music when the power saver kicks in, especially at the end of an album -- you can fix it by [disabling power saving](https://user-images.githubusercontent.com/241032/235262123-c328cca9-3930-4948-bd18-3949b9fd3fcf.png) in the [app settings](https://user-images.githubusercontent.com/241032/235262121-2ffc51ae-7821-4310-a322-c3b7a507890c.png) of the browser you use for music streaming (preferably a dedicated one)
|
||||
|
||||
|
||||
## markdown viewer
|
||||
|
||||
and there are *two* editors
|
||||
|
||||

|
||||
|
||||
there is a built-in extension for inline clickable thumbnails;
|
||||
* enable it by adding `<!-- th -->` somewhere in the doc
|
||||
* add thumbnails with `!th[l](your.jpg)` where `l` means left-align (`r` = right-align)
|
||||
* a single line with `---` clears the float / inlining
|
||||
* in the case of README.md being displayed below a file listing, thumbnails will open in the gallery viewer
|
||||
|
||||
other notes,
|
||||
* the document preview has a max-width which is the same as an A4 paper when printed
|
||||
|
||||
|
||||
@@ -768,6 +830,13 @@ an FTP server can be started using `--ftp 3921`, and/or `--ftps` for explicit T
|
||||
* some older software (filezilla on debian-stable) cannot passive-mode with TLS
|
||||
* login with any username + your password, or put your password in the username field
|
||||
|
||||
some recommended FTP / FTPS clients; `wark` = example password:
|
||||
* https://winscp.net/eng/download.php
|
||||
* https://filezilla-project.org/ struggles a bit with ftps in active-mode, but is fine otherwise
|
||||
* https://rclone.org/ does FTPS with `tls=false explicit_tls=true`
|
||||
* `lftp -u k,wark -p 3921 127.0.0.1 -e ls`
|
||||
* `lftp -u k,wark -p 3990 127.0.0.1 -e 'set ssl:verify-certificate no; ls'`
|
||||
|
||||
|
||||
## webdav server
|
||||
|
||||
@@ -862,14 +931,13 @@ through arguments:
|
||||
* `--xlink` enables deduplication across volumes
|
||||
|
||||
the same arguments can be set as volflags, in addition to `d2d`, `d2ds`, `d2t`, `d2ts`, `d2v` for disabling:
|
||||
* `-v ~/music::r:c,e2dsa,e2tsr` does a full reindex of everything on startup
|
||||
* `-v ~/music::r:c,e2ds,e2tsr` does a full reindex of everything on startup
|
||||
* `-v ~/music::r:c,d2d` disables **all** indexing, even if any `-e2*` are on
|
||||
* `-v ~/music::r:c,d2t` disables all `-e2t*` (tags), does not affect `-e2d*`
|
||||
* `-v ~/music::r:c,d2ds` disables on-boot scans; only index new uploads
|
||||
* `-v ~/music::r:c,d2ts` same except only affecting tags
|
||||
|
||||
note:
|
||||
* the parser can finally handle `c,e2dsa,e2tsr` so you no longer have to `c,e2dsa:c,e2tsr`
|
||||
* `e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and `e2ts` would then reindex those, unless there is a new copyparty version with new parsers and the release note says otherwise
|
||||
* the rescan button in the admin panel has no effect unless the volume has `-e2ds` or higher
|
||||
* deduplication is possible on windows if you run copyparty as administrator (not saying you should!)
|
||||
@@ -891,7 +959,11 @@ avoid traversing into other filesystems using `--xdev` / volflag `:c,xdev`, ski
|
||||
|
||||
and/or you can `--xvol` / `:c,xvol` to ignore all symlinks leaving the volume's top directory, but still allow bind-mounts pointing elsewhere
|
||||
|
||||
**NB: only affects the indexer** -- users can still access anything inside a volume, unless shadowed by another volume
|
||||
* symlinks are permitted with `xvol` if they point into another volume where the user has the same level of access
|
||||
|
||||
these options will reduce performance; unlikely worst-case estimates are 14% reduction for directory listings, 35% for download-as-tar
|
||||
|
||||
as of copyparty v1.7.0 these options also prevent file access at runtime -- in previous versions it was just hints for the indexer
|
||||
|
||||
### periodic rescan
|
||||
|
||||
@@ -1087,7 +1159,33 @@ see the top of [./copyparty/web/browser.css](./copyparty/web/browser.css) where
|
||||
|
||||
## complete examples
|
||||
|
||||
* read-only music server
|
||||
* see [running on windows](./docs/examples/windows.md) for a fancy windows setup
|
||||
|
||||
* or use any of the examples below, just replace `python copyparty-sfx.py` with `copyparty.exe` if you're using the exe edition
|
||||
|
||||
* allow anyone to download or upload files into the current folder:
|
||||
`python copyparty-sfx.py`
|
||||
|
||||
* enable searching and music indexing with `-e2dsa -e2ts`
|
||||
|
||||
* start an FTP server on port 3921 with `--ftp 3921`
|
||||
|
||||
* announce it on your LAN with `-z` so it appears in windows/Linux file managers
|
||||
|
||||
* anyone can upload, but nobody can see any files (even the uploader):
|
||||
`python copyparty-sfx.py -e2dsa -v .::w`
|
||||
|
||||
* block uploads if there's less than 4 GiB free disk space with `--df 4`
|
||||
|
||||
* show a popup on new uploads with `--xau bin/hooks/notify.py`
|
||||
|
||||
* anyone can upload, and receive "secret" links for each upload they do:
|
||||
`python copyparty-sfx.py -e2dsa -v .::wG:c,fk=8`
|
||||
|
||||
* anyone can browse, only `kevin` (password `okgo`) can upload/move/delete files:
|
||||
`python copyparty-sfx.py -e2dsa -a kevin:okgo -v .::r:rwmd,kevin`
|
||||
|
||||
* read-only music server:
|
||||
`python copyparty-sfx.py -v /mnt/nas/music:/music:r -e2dsa -e2ts --no-robots --force-js --theme 2`
|
||||
|
||||
* ...with bpm and key scanning
|
||||
@@ -1102,19 +1200,135 @@ see the top of [./copyparty/web/browser.css](./copyparty/web/browser.css) where
|
||||
|
||||
## reverse-proxy
|
||||
|
||||
running copyparty next to other websites hosted on an existing webserver such as nginx or apache
|
||||
running copyparty next to other websites hosted on an existing webserver such as nginx, caddy, or apache
|
||||
|
||||
you can either:
|
||||
* give copyparty its own domain or subdomain (recommended)
|
||||
* or do location-based proxying, using `--rp-loc=/stuff` to tell copyparty where it is mounted -- has a slight performance cost and higher chance of bugs
|
||||
* if copyparty says `incorrect --rp-loc or webserver config; expected vpath starting with [...]` it's likely because the webserver is stripping away the proxy location from the request URLs -- see the `ProxyPass` in the apache example below
|
||||
|
||||
some reverse proxies (such as [Caddy](https://caddyserver.com/)) can automatically obtain a valid https/tls certificate for you, and some support HTTP/2 and QUIC which could be a nice speed boost
|
||||
|
||||
example webserver configs:
|
||||
|
||||
* [nginx config](contrib/nginx/copyparty.conf) -- entire domain/subdomain
|
||||
* [apache2 config](contrib/apache/copyparty.conf) -- location-based
|
||||
|
||||
|
||||
# packages
|
||||
|
||||
the party might be closer than you think
|
||||
|
||||
|
||||
## arch package
|
||||
|
||||
now [available on aur](https://aur.archlinux.org/packages/copyparty) maintained by [@icxes](https://github.com/icxes)
|
||||
|
||||
|
||||
## nix package
|
||||
|
||||
`nix profile install github:9001/copyparty`
|
||||
|
||||
requires a [flake-enabled](https://nixos.wiki/wiki/Flakes) installation of nix
|
||||
|
||||
some recommended dependencies are enabled by default; [override the package](https://github.com/9001/copyparty/blob/hovudstraum/contrib/package/nix/copyparty/default.nix#L3-L22) if you want to add/remove some features/deps
|
||||
|
||||
`ffmpeg-full` was chosen over `ffmpeg-headless` mainly because we need `withWebp` (and `withOpenmpt` is also nice) and being able to use a cached build felt more important than optimizing for size at the time -- PRs welcome if you disagree 👍
|
||||
|
||||
|
||||
## nixos module
|
||||
|
||||
for this setup, you will need a [flake-enabled](https://nixos.wiki/wiki/Flakes) installation of NixOS.
|
||||
|
||||
```nix
|
||||
{
|
||||
# add copyparty flake to your inputs
|
||||
inputs.copyparty.url = "github:9001/copyparty";
|
||||
|
||||
# ensure that copyparty is an allowed argument to the outputs function
|
||||
outputs = { self, nixpkgs, copyparty }: {
|
||||
nixosConfigurations.yourHostName = nixpkgs.lib.nixosSystem {
|
||||
modules = [
|
||||
# load the copyparty NixOS module
|
||||
copyparty.nixosModules.default
|
||||
({ pkgs, ... }: {
|
||||
# add the copyparty overlay to expose the package to the module
|
||||
nixpkgs.overlays = [ copyparty.overlays.default ];
|
||||
# (optional) install the package globally
|
||||
environment.systemPackages = [ pkgs.copyparty ];
|
||||
# configure the copyparty module
|
||||
services.copyparty.enable = true;
|
||||
})
|
||||
];
|
||||
};
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
copyparty on NixOS is configured via `services.copyparty` options, for example:
|
||||
```nix
|
||||
services.copyparty = {
|
||||
enable = true;
|
||||
# directly maps to values in the [global] section of the copyparty config.
|
||||
# see `copyparty --help` for available options
|
||||
settings = {
|
||||
i = "0.0.0.0";
|
||||
# use lists to set multiple values
|
||||
p = [ 3210 3211 ];
|
||||
# use booleans to set binary flags
|
||||
no-reload = true;
|
||||
# using 'false' will do nothing and omit the value when generating a config
|
||||
ignored-flag = false;
|
||||
};
|
||||
|
||||
# create users
|
||||
accounts = {
|
||||
# specify the account name as the key
|
||||
ed = {
|
||||
# provide the path to a file containing the password, keeping it out of /nix/store
|
||||
# must be readable by the copyparty service user
|
||||
passwordFile = "/run/keys/copyparty/ed_password";
|
||||
};
|
||||
# or do both in one go
|
||||
k.passwordFile = "/run/keys/copyparty/k_password";
|
||||
};
|
||||
|
||||
# create a volume
|
||||
volumes = {
|
||||
# create a volume at "/" (the webroot), which will
|
||||
"/" = {
|
||||
# share the contents of "/srv/copyparty"
|
||||
path = "/srv/copyparty";
|
||||
# see `copyparty --help-accounts` for available options
|
||||
access = {
|
||||
# everyone gets read-access, but
|
||||
r = "*";
|
||||
# users "ed" and "k" get read-write
|
||||
rw = [ "ed" "k" ];
|
||||
};
|
||||
# see `copyparty --help-flags` for available options
|
||||
flags = {
|
||||
# "fk" enables filekeys (necessary for upget permission) (4 chars long)
|
||||
fk = 4;
|
||||
# scan for new files every 60sec
|
||||
scan = 60;
|
||||
# volflag "e2d" enables the uploads database
|
||||
e2d = true;
|
||||
# "d2t" disables multimedia parsers (in case the uploads are malicious)
|
||||
d2t = true;
|
||||
# skips hashing file contents if path matches *.iso
|
||||
nohash = "\.iso$";
|
||||
};
|
||||
};
|
||||
};
|
||||
# you may increase the open file limit for the process
|
||||
openFilesLimit = 8192;
|
||||
};
|
||||
```
|
||||
|
||||
the passwordFile at /run/keys/copyparty/ could for example be generated by [agenix](https://github.com/ryantm/agenix), or you could just dump it in the nix store instead if that's acceptable
|
||||
|
||||
|
||||
# browser support
|
||||
|
||||
TLDR: yes
|
||||
@@ -1185,10 +1399,10 @@ interact with copyparty using non-browser clients
|
||||
* `(printf 'PUT /junk?pw=wark HTTP/1.1\r\n\r\n'; cat movie.mkv) | nc 127.0.0.1 3923`
|
||||
* `(printf 'PUT / HTTP/1.1\r\n\r\n'; cat movie.mkv) >/dev/tcp/127.0.0.1/3923`
|
||||
|
||||
* python: [up2k.py](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py) is a command-line up2k client [(webm)](https://ocv.me/stuff/u2cli.webm)
|
||||
* file uploads, file-search, folder sync, autoresume of aborted/broken uploads
|
||||
* can be downloaded from copyparty: controlpanel -> connect -> [up2k.py](http://127.0.0.1:3923/.cpr/a/up2k.py)
|
||||
* see [./bin/README.md#up2kpy](bin/README.md#up2kpy)
|
||||
* python: [u2c.py](https://github.com/9001/copyparty/blob/hovudstraum/bin/u2c.py) is a command-line up2k client [(webm)](https://ocv.me/stuff/u2cli.webm)
|
||||
* file uploads, file-search, [folder sync](#folder-sync), autoresume of aborted/broken uploads
|
||||
* can be downloaded from copyparty: controlpanel -> connect -> [u2c.py](http://127.0.0.1:3923/.cpr/a/u2c.py)
|
||||
* see [./bin/README.md#u2cpy](bin/README.md#u2cpy)
|
||||
|
||||
* FUSE: mount a copyparty server as a local filesystem
|
||||
* cross-platform python client available in [./bin/](bin/)
|
||||
@@ -1207,17 +1421,27 @@ you can provide passwords using header `PW: hunter2`, cookie `cppwd=hunter2`, ur
|
||||
NOTE: curl will not send the original filename if you use `-T` combined with url-params! Also, make sure to always leave a trailing slash in URLs unless you want to override the filename
|
||||
|
||||
|
||||
## folder sync
|
||||
|
||||
sync folders to/from copyparty
|
||||
|
||||
the commandline uploader [u2c.py](https://github.com/9001/copyparty/tree/hovudstraum/bin#u2cpy) with `--dr` is the best way to sync a folder to copyparty; verifies checksums and does files in parallel, and deletes unexpected files on the server after upload has finished which makes file-renames really cheap (it'll rename serverside and skip uploading)
|
||||
|
||||
alternatively there is [rclone](./docs/rclone.md) which allows for bidirectional sync and is *way* more flexible (stream files straight from sftp/s3/gcs to copyparty, ...), although there is no integrity check and it won't work with files over 100 MiB if copyparty is behind cloudflare
|
||||
|
||||
* starting from rclone v1.63 (currently [in beta](https://beta.rclone.org/?filter=latest)), rclone will also be faster than u2c.py
|
||||
|
||||
|
||||
## mount as drive
|
||||
|
||||
a remote copyparty server as a local filesystem; go to the control-panel and click `connect` to see a list of commands to do that
|
||||
|
||||
alternatively, some alternatives roughly sorted by speed (unreproducible benchmark), best first:
|
||||
|
||||
* [rclone-http](./docs/rclone.md) (25s), read-only
|
||||
* [rclone-webdav](./docs/rclone.md) (25s), read/WRITE ([v1.63-beta](https://beta.rclone.org/?filter=latest))
|
||||
* [rclone-http](./docs/rclone.md) (26s), read-only
|
||||
* [partyfuse.py](./bin/#partyfusepy) (35s), read-only
|
||||
* [rclone-ftp](./docs/rclone.md) (47s), read/WRITE
|
||||
* [rclone-webdav](./docs/rclone.md) (51s), read/WRITE
|
||||
* copyparty-1.5.0's webdav server is faster than rclone-1.60.0 (69s)
|
||||
* [partyfuse.py](./bin/#partyfusepy) (71s), read-only
|
||||
* davfs2 (103s), read/WRITE, *very fast* on small files
|
||||
* [win10-webdav](#webdav-server) (138s), read/WRITE
|
||||
* [win10-smb2](#smb-server) (387s), read/WRITE
|
||||
@@ -1225,6 +1449,27 @@ alternatively, some alternatives roughly sorted by speed (unreproducible benchma
|
||||
most clients will fail to mount the root of a copyparty server unless there is a root volume (so you get the admin-panel instead of a browser when accessing it) -- in that case, mount a specific volume instead
|
||||
|
||||
|
||||
# android app
|
||||
|
||||
upload to copyparty with one tap
|
||||
|
||||
<a href="https://f-droid.org/packages/me.ocv.partyup/"><img src="https://ocv.me/fdroid.png" alt="Get it on F-Droid" height="50" /> '' <img src="https://img.shields.io/f-droid/v/me.ocv.partyup.svg" alt="f-droid version info" /></a> '' <a href="https://github.com/9001/party-up"><img src="https://img.shields.io/github/release/9001/party-up.svg?logo=github" alt="github version info" /></a>
|
||||
|
||||
the app is **NOT** the full copyparty server! just a basic upload client, nothing fancy yet
|
||||
|
||||
if you want to run the copyparty server on your android device, see [install on android](#install-on-android)
|
||||
|
||||
|
||||
# iOS shortcuts
|
||||
|
||||
there is no iPhone app, but the following shortcuts are almost as good:
|
||||
|
||||
* [upload to copyparty](https://www.icloud.com/shortcuts/41e98dd985cb4d3bb433222bc1e9e770) ([offline](https://github.com/9001/copyparty/raw/hovudstraum/contrib/ios/upload-to-copyparty.shortcut)) ([png](https://user-images.githubusercontent.com/241032/226118053-78623554-b0ed-482e-98e4-6d57ada58ea4.png)) based on the [original](https://www.icloud.com/shortcuts/ab415d5b4de3467b9ce6f151b439a5d7) by [Daedren](https://github.com/Daedren) (thx!)
|
||||
* can strip exif, upload files, pics, vids, links, clipboard
|
||||
* can download links and rehost the target file on copyparty (see first comment inside the shortcut)
|
||||
* pics become lowres if you share from gallery to shortcut, so better to launch the shortcut and pick stuff from there
|
||||
|
||||
|
||||
# performance
|
||||
|
||||
defaults are usually fine - expect `8 GiB/s` download, `1 GiB/s` upload
|
||||
@@ -1232,15 +1477,16 @@ defaults are usually fine - expect `8 GiB/s` download, `1 GiB/s` upload
|
||||
below are some tweaks roughly ordered by usefulness:
|
||||
|
||||
* `-q` disables logging and can help a bunch, even when combined with `-lo` to redirect logs to file
|
||||
* `--http-only` or `--https-only` (unless you want to support both protocols) will reduce the delay before a new connection is established
|
||||
* `--hist` pointing to a fast location (ssd) will make directory listings and searches faster when `-e2d` or `-e2t` is set
|
||||
* `--no-hash .` when indexing a network-disk if you don't care about the actual filehashes and only want the names/tags searchable
|
||||
* `--no-htp --hash-mt=0 --mtag-mt=1 --th-mt=1` minimizes the number of threads; can help in some eccentric environments (like the vscode debugger)
|
||||
* `-j` enables multiprocessing (actual multithreading) and can make copyparty perform better in cpu-intensive workloads, for example:
|
||||
* huge amount of short-lived connections
|
||||
* `-j0` enables multiprocessing (actual multithreading), can reduce latency to `20+80/numCores` percent and generally improve performance in cpu-intensive workloads, for example:
|
||||
* lots of connections (many users or heavy clients)
|
||||
* simultaneous downloads and uploads saturating a 20gbps connection
|
||||
|
||||
...however it adds an overhead to internal communication so it might be a net loss, see if it works 4 u
|
||||
* using [pypy](https://www.pypy.org/) instead of [cpython](https://www.python.org/) *can* be 70% faster for some workloads, but slower for many others
|
||||
* and pypy can sometimes crash on startup with `-j0` (TODO make issue)
|
||||
|
||||
|
||||
## client-side
|
||||
@@ -1250,7 +1496,7 @@ when uploading files,
|
||||
* chrome is recommended, at least compared to firefox:
|
||||
* up to 90% faster when hashing, especially on SSDs
|
||||
* up to 40% faster when uploading over extremely fast internets
|
||||
* but [up2k.py](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py) can be 40% faster than chrome again
|
||||
* but [u2c.py](https://github.com/9001/copyparty/blob/hovudstraum/bin/u2c.py) can be 40% faster than chrome again
|
||||
|
||||
* if you're cpu-bottlenecked, or the browser is maxing a cpu core:
|
||||
* up to 30% faster uploads if you hide the upload status list by switching away from the `[🚀]` up2k ui-tab (or closing it)
|
||||
@@ -1320,6 +1566,18 @@ by default, except for `GET` and `HEAD` operations, all requests must either:
|
||||
cors can be configured with `--acao` and `--acam`, or the protections entirely disabled with `--allow-csrf`
|
||||
|
||||
|
||||
## https
|
||||
|
||||
both HTTP and HTTPS are accepted by default, but letting a [reverse proxy](#reverse-proxy) handle the https/tls/ssl would be better (probably more secure by default)
|
||||
|
||||
copyparty doesn't speak HTTP/2 or QUIC, so using a reverse proxy would solve that as well
|
||||
|
||||
if [cfssl](https://github.com/cloudflare/cfssl/releases/latest) is installed, copyparty will automatically create a CA and server-cert on startup
|
||||
* the certs are written to `--crt-dir` for distribution, see `--help` for the other `--crt` options
|
||||
* this will be a self-signed certificate so you must install your `ca.pem` into all your browsers/devices
|
||||
* if you want to avoid the hassle of distributing certs manually, please consider using a reverse proxy
|
||||
|
||||
|
||||
# recovering from crashes
|
||||
|
||||
## client crashes
|
||||
@@ -1342,7 +1600,7 @@ however you can hit `F12` in the up2k tab and use the devtools to see how far yo
|
||||
|
||||
# HTTP API
|
||||
|
||||
see [devnotes](#./docs/devnotes.md#http-api)
|
||||
see [devnotes](./docs/devnotes.md#http-api)
|
||||
|
||||
|
||||
# dependencies
|
||||
@@ -1387,7 +1645,7 @@ these are standalone programs and will never be imported / evaluated by copypart
|
||||
|
||||
the self-contained "binary" [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) will unpack itself and run copyparty, assuming you have python installed of course
|
||||
|
||||
you can reduce the sfx size by repacking it; see [./docs/devnotes.md#sfx-repack](#./docs/devnotes.md#sfx-repack)
|
||||
you can reduce the sfx size by repacking it; see [./docs/devnotes.md#sfx-repack](./docs/devnotes.md#sfx-repack)
|
||||
|
||||
|
||||
## copyparty.exe
|
||||
@@ -1401,6 +1659,7 @@ can be convenient on machines where installing python is problematic, however is
|
||||
* [copyparty.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty.exe) runs on win8 or newer, was compiled on win10, does thumbnails + media tags, and is *currently* safe to use, but any future python/expat/pillow CVEs can only be remedied by downloading a newer version of the exe
|
||||
|
||||
* on win8 it needs [vc redist 2015](https://www.microsoft.com/en-us/download/details.aspx?id=48145), on win10 it just works
|
||||
* some antivirus may freak out (false-positive), possibly [Avast, AVG, and McAfee](https://www.virustotal.com/gui/file/52391a1e9842cf70ad243ef83844d46d29c0044d101ee0138fcdd3c8de2237d6/detection)
|
||||
|
||||
* dangerous: [copyparty32.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty32.exe) is compatible with [windows7](https://user-images.githubusercontent.com/241032/221445944-ae85d1f4-d351-4837-b130-82cab57d6cca.png), which means it uses an ancient copy of python (3.7.9) which cannot be upgraded and should never be exposed to the internet (LAN is fine)
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# [`up2k.py`](up2k.py)
|
||||
# [`u2c.py`](u2c.py)
|
||||
* command-line up2k client [(webm)](https://ocv.me/stuff/u2cli.webm)
|
||||
* file uploads, file-search, autoresume of aborted/broken uploads
|
||||
* sync local folder to server
|
||||
|
||||
@@ -10,6 +10,7 @@ run copyparty with `--help-hooks` for usage details / hook type explanations (xb
|
||||
# after upload
|
||||
* [notify.py](notify.py) shows a desktop notification ([example](https://user-images.githubusercontent.com/241032/215335767-9c91ed24-d36e-4b6b-9766-fb95d12d163f.png))
|
||||
* [notify2.py](notify2.py) uses the json API to show more context
|
||||
* [image-noexif.py](image-noexif.py) removes image exif by overwriting / directly editing the uploaded file
|
||||
* [discord-announce.py](discord-announce.py) announces new uploads on discord using webhooks ([example](https://user-images.githubusercontent.com/241032/215304439-1c1cb3c8-ec6f-4c17-9f27-81f969b1811a.png))
|
||||
* [reject-mimetype.py](reject-mimetype.py) rejects uploads unless the mimetype is acceptable
|
||||
|
||||
|
||||
@@ -13,9 +13,15 @@ example usage as global config:
|
||||
--xau f,t5,j,bin/hooks/discord-announce.py
|
||||
|
||||
example usage as a volflag (per-volume config):
|
||||
-v srv/inc:inc:c,xau=f,t5,j,bin/hooks/discord-announce.py
|
||||
-v srv/inc:inc:r:rw,ed:c,xau=f,t5,j,bin/hooks/discord-announce.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
(share filesystem-path srv/inc as volume /inc,
|
||||
readable by everyone, read-write for user 'ed',
|
||||
running this plugin on all uploads with the params listed below)
|
||||
|
||||
parameters explained,
|
||||
xbu = execute after upload
|
||||
f = fork; don't wait for it to finish
|
||||
t5 = timeout if it's still running after 5 sec
|
||||
j = provide upload information as json; not just the filename
|
||||
@@ -30,6 +36,7 @@ then use this to design your message: https://discohook.org/
|
||||
|
||||
def main():
|
||||
WEBHOOK = "https://discord.com/api/webhooks/1234/base64"
|
||||
WEBHOOK = "https://discord.com/api/webhooks/1066830390280597718/M1TDD110hQA-meRLMRhdurych8iyG35LDoI1YhzbrjGP--BXNZodZFczNVwK4Ce7Yme5"
|
||||
|
||||
# read info from copyparty
|
||||
inf = json.loads(sys.argv[1])
|
||||
|
||||
72
bin/hooks/image-noexif.py
Executable file
72
bin/hooks/image-noexif.py
Executable file
@@ -0,0 +1,72 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import sys
|
||||
import subprocess as sp
|
||||
|
||||
|
||||
_ = r"""
|
||||
remove exif tags from uploaded images; the eventhook edition of
|
||||
https://github.com/9001/copyparty/blob/hovudstraum/bin/mtag/image-noexif.py
|
||||
|
||||
dependencies:
|
||||
exiftool / perl-Image-ExifTool
|
||||
|
||||
being an upload hook, this will take effect after upload completion
|
||||
but before copyparty has hashed/indexed the file, which means that
|
||||
copyparty will never index the original file, so deduplication will
|
||||
not work as expected... which is mostly OK but ehhh
|
||||
|
||||
note: modifies the file in-place, so don't set the `f` (fork) flag
|
||||
|
||||
example usages; either as global config (all volumes) or as volflag:
|
||||
--xau bin/hooks/image-noexif.py
|
||||
-v srv/inc:inc:r:rw,ed:c,xau=bin/hooks/image-noexif.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
explained:
|
||||
share fs-path srv/inc at /inc (readable by all, read-write for user ed)
|
||||
running this xau (execute-after-upload) plugin for all uploaded files
|
||||
"""
|
||||
|
||||
|
||||
# filetypes to process; ignores everything else
|
||||
EXTS = ("jpg", "jpeg", "avif", "heif", "heic")
|
||||
|
||||
|
||||
try:
|
||||
from copyparty.util import fsenc
|
||||
except:
|
||||
|
||||
def fsenc(p):
|
||||
return p.encode("utf-8")
|
||||
|
||||
|
||||
def main():
|
||||
fp = sys.argv[1]
|
||||
ext = fp.lower().split(".")[-1]
|
||||
if ext not in EXTS:
|
||||
return
|
||||
|
||||
cwd, fn = os.path.split(fp)
|
||||
os.chdir(cwd)
|
||||
f1 = fsenc(fn)
|
||||
cmd = [
|
||||
b"exiftool",
|
||||
b"-exif:all=",
|
||||
b"-iptc:all=",
|
||||
b"-xmp:all=",
|
||||
b"-P",
|
||||
b"-overwrite_original",
|
||||
b"--",
|
||||
f1,
|
||||
]
|
||||
sp.check_output(cmd)
|
||||
print("image-noexif: stripped")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
except:
|
||||
pass
|
||||
@@ -17,8 +17,12 @@ depdencies:
|
||||
|
||||
example usages; either as global config (all volumes) or as volflag:
|
||||
--xau f,bin/hooks/notify.py
|
||||
-v srv/inc:inc:c,xau=f,bin/hooks/notify.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
-v srv/inc:inc:r:rw,ed:c,xau=f,bin/hooks/notify.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
(share filesystem-path srv/inc as volume /inc,
|
||||
readable by everyone, read-write for user 'ed',
|
||||
running this plugin on all uploads with the params listed below)
|
||||
|
||||
parameters explained,
|
||||
xau = execute after upload
|
||||
|
||||
@@ -15,9 +15,13 @@ and also supports --xm (notify on 📟 message)
|
||||
example usages; either as global config (all volumes) or as volflag:
|
||||
--xm f,j,bin/hooks/notify2.py
|
||||
--xau f,j,bin/hooks/notify2.py
|
||||
-v srv/inc:inc:c,xm=f,j,bin/hooks/notify2.py
|
||||
-v srv/inc:inc:c,xau=f,j,bin/hooks/notify2.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
-v srv/inc:inc:r:rw,ed:c,xm=f,j,bin/hooks/notify2.py
|
||||
-v srv/inc:inc:r:rw,ed:c,xau=f,j,bin/hooks/notify2.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
(share filesystem-path srv/inc as volume /inc,
|
||||
readable by everyone, read-write for user 'ed',
|
||||
running this plugin on all uploads / msgs with the params listed below)
|
||||
|
||||
parameters explained,
|
||||
xau = execute after upload
|
||||
|
||||
@@ -10,7 +10,12 @@ example usage as global config:
|
||||
--xbu c,bin/hooks/reject-extension.py
|
||||
|
||||
example usage as a volflag (per-volume config):
|
||||
-v srv/inc:inc:c,xbu=c,bin/hooks/reject-extension.py
|
||||
-v srv/inc:inc:r:rw,ed:c,xbu=c,bin/hooks/reject-extension.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
(share filesystem-path srv/inc as volume /inc,
|
||||
readable by everyone, read-write for user 'ed',
|
||||
running this plugin on all uploads with the params listed below)
|
||||
|
||||
parameters explained,
|
||||
xbu = execute before upload
|
||||
|
||||
@@ -17,7 +17,12 @@ example usage as global config:
|
||||
--xau c,bin/hooks/reject-mimetype.py
|
||||
|
||||
example usage as a volflag (per-volume config):
|
||||
-v srv/inc:inc:c,xau=c,bin/hooks/reject-mimetype.py
|
||||
-v srv/inc:inc:r:rw,ed:c,xau=c,bin/hooks/reject-mimetype.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
(share filesystem-path srv/inc as volume /inc,
|
||||
readable by everyone, read-write for user 'ed',
|
||||
running this plugin on all uploads with the params listed below)
|
||||
|
||||
parameters explained,
|
||||
xau = execute after upload
|
||||
|
||||
@@ -15,9 +15,15 @@ example usage as global config:
|
||||
--xm f,j,t3600,bin/hooks/wget.py
|
||||
|
||||
example usage as a volflag (per-volume config):
|
||||
-v srv/inc:inc:c,xm=f,j,t3600,bin/hooks/wget.py
|
||||
-v srv/inc:inc:r:rw,ed:c,xm=f,j,t3600,bin/hooks/wget.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
(share filesystem-path srv/inc as volume /inc,
|
||||
readable by everyone, read-write for user 'ed',
|
||||
running this plugin on all messages with the params listed below)
|
||||
|
||||
parameters explained,
|
||||
xm = execute on message-to-server-log
|
||||
f = fork so it doesn't block uploads
|
||||
j = provide message information as json; not just the text
|
||||
c3 = mute all output
|
||||
|
||||
@@ -18,7 +18,12 @@ example usage as global config:
|
||||
--xiu i5,j,bin/hooks/xiu-sha.py
|
||||
|
||||
example usage as a volflag (per-volume config):
|
||||
-v srv/inc:inc:c,xiu=i5,j,bin/hooks/xiu-sha.py
|
||||
-v srv/inc:inc:r:rw,ed:c,xiu=i5,j,bin/hooks/xiu-sha.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
(share filesystem-path srv/inc as volume /inc,
|
||||
readable by everyone, read-write for user 'ed',
|
||||
running this plugin on batches of uploads with the params listed below)
|
||||
|
||||
parameters explained,
|
||||
xiu = execute after uploads...
|
||||
|
||||
@@ -15,7 +15,12 @@ example usage as global config:
|
||||
--xiu i1,j,bin/hooks/xiu.py
|
||||
|
||||
example usage as a volflag (per-volume config):
|
||||
-v srv/inc:inc:c,xiu=i1,j,bin/hooks/xiu.py
|
||||
-v srv/inc:inc:r:rw,ed:c,xiu=i1,j,bin/hooks/xiu.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
(share filesystem-path srv/inc as volume /inc,
|
||||
readable by everyone, read-write for user 'ed',
|
||||
running this plugin on batches of uploads with the params listed below)
|
||||
|
||||
parameters explained,
|
||||
xiu = execute after uploads...
|
||||
|
||||
@@ -16,6 +16,10 @@ dep: ffmpeg
|
||||
"""
|
||||
|
||||
|
||||
# save beat timestamps to ".beats/filename.txt"
|
||||
SAVE = False
|
||||
|
||||
|
||||
def det(tf):
|
||||
# fmt: off
|
||||
sp.check_call([
|
||||
@@ -23,12 +27,11 @@ def det(tf):
|
||||
b"-nostdin",
|
||||
b"-hide_banner",
|
||||
b"-v", b"fatal",
|
||||
b"-ss", b"13",
|
||||
b"-y", b"-i", fsenc(sys.argv[1]),
|
||||
b"-map", b"0:a:0",
|
||||
b"-ac", b"1",
|
||||
b"-ar", b"22050",
|
||||
b"-t", b"300",
|
||||
b"-t", b"360",
|
||||
b"-f", b"f32le",
|
||||
fsenc(tf)
|
||||
])
|
||||
@@ -47,10 +50,29 @@ def det(tf):
|
||||
print(c["list"][0]["label"].split(" ")[0])
|
||||
return
|
||||
|
||||
# throws if detection failed:
|
||||
bpm = float(cl[-1]["timestamp"] - cl[1]["timestamp"])
|
||||
bpm = round(60 * ((len(cl) - 1) / bpm), 2)
|
||||
print(f"{bpm:.2f}")
|
||||
# throws if detection failed:
|
||||
beats = [float(x["timestamp"]) for x in cl]
|
||||
bds = [b - a for a, b in zip(beats, beats[1:])]
|
||||
bds.sort()
|
||||
n0 = int(len(bds) * 0.2)
|
||||
n1 = int(len(bds) * 0.75) + 1
|
||||
bds = bds[n0:n1]
|
||||
bpm = sum(bds)
|
||||
bpm = round(60 * (len(bds) / bpm), 2)
|
||||
print(f"{bpm:.2f}")
|
||||
|
||||
if SAVE:
|
||||
fdir, fname = os.path.split(sys.argv[1])
|
||||
bdir = os.path.join(fdir, ".beats")
|
||||
try:
|
||||
os.mkdir(fsenc(bdir))
|
||||
except:
|
||||
pass
|
||||
|
||||
fp = os.path.join(bdir, fname) + ".txt"
|
||||
with open(fsenc(fp), "wb") as f:
|
||||
txt = "\n".join([f"{x:.2f}" for x in beats])
|
||||
f.write(txt.encode("utf-8"))
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
@@ -118,6 +118,15 @@ mkdir -p "$jail/tmp"
|
||||
chmod 777 "$jail/tmp"
|
||||
|
||||
|
||||
# create a dev
|
||||
(cd $jail; mkdir -p dev; cd dev
|
||||
[ -e null ] || mknod -m 666 null c 1 3
|
||||
[ -e zero ] || mknod -m 666 zero c 1 5
|
||||
[ -e random ] || mknod -m 444 random c 1 8
|
||||
[ -e urandom ] || mknod -m 444 urandom c 1 9
|
||||
)
|
||||
|
||||
|
||||
# run copyparty
|
||||
export HOME=$(getent passwd $uid | cut -d: -f6)
|
||||
export USER=$(getent passwd $uid | cut -d: -f1)
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
S_VERSION = "1.5"
|
||||
S_BUILD_DT = "2023-03-12"
|
||||
S_VERSION = "1.9"
|
||||
S_BUILD_DT = "2023-05-07"
|
||||
|
||||
"""
|
||||
up2k.py: upload to copyparty
|
||||
u2c.py: upload to copyparty
|
||||
2021, ed <irc.rizon.net>, MIT-Licensed
|
||||
https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py
|
||||
https://github.com/9001/copyparty/blob/hovudstraum/bin/u2c.py
|
||||
|
||||
- dependencies: requests
|
||||
- supports python 2.6, 2.7, and 3.3 through 3.12
|
||||
@@ -21,6 +21,7 @@ import math
|
||||
import time
|
||||
import atexit
|
||||
import signal
|
||||
import socket
|
||||
import base64
|
||||
import hashlib
|
||||
import platform
|
||||
@@ -58,6 +59,7 @@ PY2 = sys.version_info < (3,)
|
||||
if PY2:
|
||||
from Queue import Queue
|
||||
from urllib import quote, unquote
|
||||
from urlparse import urlsplit, urlunsplit
|
||||
|
||||
sys.dont_write_bytecode = True
|
||||
bytes = str
|
||||
@@ -65,6 +67,7 @@ else:
|
||||
from queue import Queue
|
||||
from urllib.parse import unquote_to_bytes as unquote
|
||||
from urllib.parse import quote_from_bytes as quote
|
||||
from urllib.parse import urlsplit, urlunsplit
|
||||
|
||||
unicode = str
|
||||
|
||||
@@ -337,6 +340,32 @@ class CTermsize(object):
|
||||
ss = CTermsize()
|
||||
|
||||
|
||||
def undns(url):
|
||||
usp = urlsplit(url)
|
||||
hn = usp.hostname
|
||||
gai = None
|
||||
eprint("resolving host [{0}] ...".format(hn), end="")
|
||||
try:
|
||||
gai = socket.getaddrinfo(hn, None)
|
||||
hn = gai[0][4][0]
|
||||
except KeyboardInterrupt:
|
||||
raise
|
||||
except:
|
||||
t = "\n\033[31mfailed to resolve upload destination host;\033[0m\ngai={0}\n"
|
||||
eprint(t.format(repr(gai)))
|
||||
raise
|
||||
|
||||
if usp.port:
|
||||
hn = "{0}:{1}".format(hn, usp.port)
|
||||
if usp.username or usp.password:
|
||||
hn = "{0}:{1}@{2}".format(usp.username, usp.password, hn)
|
||||
|
||||
usp = usp._replace(netloc=hn)
|
||||
url = urlunsplit(usp)
|
||||
eprint(" {0}".format(url))
|
||||
return url
|
||||
|
||||
|
||||
def _scd(err, top):
|
||||
"""non-recursive listing of directory contents, along with stat() info"""
|
||||
with os.scandir(top) as dh:
|
||||
@@ -653,6 +682,7 @@ class Ctl(object):
|
||||
return nfiles, nbytes
|
||||
|
||||
def __init__(self, ar, stats=None):
|
||||
self.ok = False
|
||||
self.ar = ar
|
||||
self.stats = stats or self._scan()
|
||||
if not self.stats:
|
||||
@@ -700,6 +730,8 @@ class Ctl(object):
|
||||
|
||||
self._fancy()
|
||||
|
||||
self.ok = True
|
||||
|
||||
def _safe(self):
|
||||
"""minimal basic slow boring fallback codepath"""
|
||||
search = self.ar.s
|
||||
@@ -850,7 +882,7 @@ class Ctl(object):
|
||||
print(" ls ~{0}".format(srd))
|
||||
zb = self.ar.url.encode("utf-8")
|
||||
zb += quotep(rd.replace(b"\\", b"/"))
|
||||
r = req_ses.get(zb + b"?ls&dots", headers=headers)
|
||||
r = req_ses.get(zb + b"?ls<&dots", headers=headers)
|
||||
if not r:
|
||||
raise Exception("HTTP {0}".format(r.status_code))
|
||||
|
||||
@@ -928,7 +960,7 @@ class Ctl(object):
|
||||
|
||||
upath = file.abs.decode("utf-8", "replace")
|
||||
if not VT100:
|
||||
upath = upath[4:]
|
||||
upath = upath.lstrip("\\?")
|
||||
|
||||
hs, sprs = handshake(self.ar, file, search)
|
||||
if search:
|
||||
@@ -1009,8 +1041,9 @@ class Ctl(object):
|
||||
file, cid = task
|
||||
try:
|
||||
upload(file, cid, self.ar.a, stats)
|
||||
except:
|
||||
eprint("upload failed, retrying: {0} #{1}\n".format(file.name, cid[:8]))
|
||||
except Exception as ex:
|
||||
t = "upload failed, retrying: {0} #{1} ({2})\n"
|
||||
eprint(t.format(file.name, cid[:8], ex))
|
||||
# handshake will fix it
|
||||
|
||||
with self.mutex:
|
||||
@@ -1049,6 +1082,8 @@ def main():
|
||||
print(ver)
|
||||
return
|
||||
|
||||
sys.argv = [x for x in sys.argv if x != "--ws"]
|
||||
|
||||
# fmt: off
|
||||
ap = app = argparse.ArgumentParser(formatter_class=APF, description="copyparty up2k uploader / filesearch tool, " + ver, epilog="""
|
||||
NOTE:
|
||||
@@ -1067,7 +1102,7 @@ source file/folder selection uses rsync syntax, meaning that:
|
||||
|
||||
ap = app.add_argument_group("compatibility")
|
||||
ap.add_argument("--cls", action="store_true", help="clear screen before start")
|
||||
ap.add_argument("--ws", action="store_true", help="copyparty is running on windows; wait before deleting files after uploading")
|
||||
ap.add_argument("--rh", type=int, metavar="TRIES", default=0, help="resolve server hostname before upload (good for buggy networks, but TLS certs will break)")
|
||||
|
||||
ap = app.add_argument_group("folder sync")
|
||||
ap.add_argument("--dl", action="store_true", help="delete local files after uploading")
|
||||
@@ -1091,7 +1126,7 @@ source file/folder selection uses rsync syntax, meaning that:
|
||||
ar = app.parse_args()
|
||||
finally:
|
||||
if EXE and not sys.argv[1:]:
|
||||
print("*** hit enter to exit ***")
|
||||
eprint("*** hit enter to exit ***")
|
||||
try:
|
||||
input()
|
||||
except:
|
||||
@@ -1124,20 +1159,28 @@ source file/folder selection uses rsync syntax, meaning that:
|
||||
with open(fn, "rb") as f:
|
||||
ar.a = f.read().decode("utf-8").strip()
|
||||
|
||||
for n in range(ar.rh):
|
||||
try:
|
||||
ar.url = undns(ar.url)
|
||||
break
|
||||
except KeyboardInterrupt:
|
||||
raise
|
||||
except:
|
||||
if n > ar.rh - 2:
|
||||
raise
|
||||
|
||||
if ar.cls:
|
||||
print("\x1b\x5b\x48\x1b\x5b\x32\x4a\x1b\x5b\x33\x4a", end="")
|
||||
eprint("\x1b\x5b\x48\x1b\x5b\x32\x4a\x1b\x5b\x33\x4a", end="")
|
||||
|
||||
ctl = Ctl(ar)
|
||||
|
||||
if ar.dr and not ar.drd:
|
||||
if ar.dr and not ar.drd and ctl.ok:
|
||||
print("\npass 2/2: delete")
|
||||
if getattr(ctl, "up_br") and ar.ws:
|
||||
# wait for up2k to mtime if there was uploads
|
||||
time.sleep(4)
|
||||
|
||||
ar.drd = True
|
||||
ar.z = True
|
||||
Ctl(ar, ctl.stats)
|
||||
ctl = Ctl(ar, ctl.stats)
|
||||
|
||||
sys.exit(0 if ctl.ok else 1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
@@ -1,7 +1,6 @@
|
||||
# when running copyparty behind a reverse proxy,
|
||||
# the following arguments are recommended:
|
||||
#
|
||||
# --http-only lower latency on initial connection
|
||||
# -i 127.0.0.1 only accept connections from nginx
|
||||
#
|
||||
# if you are doing location-based proxying (such as `/stuff` below)
|
||||
|
||||
@@ -1,14 +1,44 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
cat >/dev/null <<'EOF'
|
||||
|
||||
NOTE: copyparty is now able to do this automatically;
|
||||
however you may wish to use this script instead if
|
||||
you have specific needs (or if copyparty breaks)
|
||||
|
||||
this script generates a new self-signed TLS certificate and
|
||||
replaces the default insecure one that comes with copyparty
|
||||
|
||||
as it is trivial to impersonate a copyparty server using the
|
||||
default certificate, it is highly recommended to do this
|
||||
|
||||
this will create a self-signed CA, and a Server certificate
|
||||
which gets signed by that CA -- you can run it multiple times
|
||||
with different server-FQDNs / IPs to create additional certs
|
||||
for all your different servers / (non-)copyparty services
|
||||
|
||||
EOF
|
||||
|
||||
|
||||
# ca-name and server-fqdn
|
||||
ca_name="$1"
|
||||
srv_fqdn="$2"
|
||||
|
||||
[ -z "$srv_fqdn" ] && {
|
||||
echo "need arg 1: ca name"
|
||||
echo "need arg 2: server fqdn and/or IPs, comma-separated"
|
||||
echo "optional arg 3: if set, write cert into copyparty cfg"
|
||||
[ -z "$srv_fqdn" ] && { cat <<'EOF'
|
||||
need arg 1: ca name
|
||||
need arg 2: server fqdn and/or IPs, comma-separated
|
||||
optional arg 3: if set, write cert into copyparty cfg
|
||||
|
||||
example:
|
||||
./cfssl.sh PartyCo partybox.local y
|
||||
EOF
|
||||
exit 1
|
||||
}
|
||||
|
||||
|
||||
command -v cfssljson 2>/dev/null || {
|
||||
echo please install cfssl and try again
|
||||
exit 1
|
||||
}
|
||||
|
||||
@@ -59,12 +89,14 @@ show() {
|
||||
}
|
||||
show ca.pem
|
||||
show "$srv_fqdn.pem"
|
||||
|
||||
echo
|
||||
echo "successfully generated new certificates"
|
||||
|
||||
# write cert into copyparty config
|
||||
[ -z "$3" ] || {
|
||||
mkdir -p ~/.config/copyparty
|
||||
cat "$srv_fqdn".{key,pem} ca.pem >~/.config/copyparty/cert.pem
|
||||
echo "successfully replaced copyparty certificate"
|
||||
}
|
||||
|
||||
|
||||
|
||||
BIN
contrib/ios/upload-to-copyparty.shortcut
Normal file
BIN
contrib/ios/upload-to-copyparty.shortcut
Normal file
Binary file not shown.
@@ -1,7 +1,6 @@
|
||||
# when running copyparty behind a reverse proxy,
|
||||
# the following arguments are recommended:
|
||||
#
|
||||
# --http-only lower latency on initial connection
|
||||
# -i 127.0.0.1 only accept connections from nginx
|
||||
#
|
||||
# -nc must match or exceed the webserver's max number of concurrent clients;
|
||||
@@ -9,7 +8,7 @@
|
||||
# nginx default is 512 (worker_processes 1, worker_connections 512)
|
||||
#
|
||||
# you may also consider adding -j0 for CPU-intensive configurations
|
||||
# (not that i can really think of any good examples)
|
||||
# (5'000 requests per second, or 20gbps upload/download in parallel)
|
||||
#
|
||||
# on fedora/rhel, remember to setsebool -P httpd_can_network_connect 1
|
||||
|
||||
@@ -39,3 +38,9 @@ server {
|
||||
proxy_set_header Connection "Keep-Alive";
|
||||
}
|
||||
}
|
||||
|
||||
# default client_max_body_size (1M) blocks uploads larger than 256 MiB
|
||||
client_max_body_size 1024M;
|
||||
client_header_timeout 610m;
|
||||
client_body_timeout 610m;
|
||||
send_timeout 610m;
|
||||
|
||||
281
contrib/nixos/modules/copyparty.nix
Normal file
281
contrib/nixos/modules/copyparty.nix
Normal file
@@ -0,0 +1,281 @@
|
||||
{ config, pkgs, lib, ... }:
|
||||
|
||||
with lib;
|
||||
|
||||
let
|
||||
mkKeyValue = key: value:
|
||||
if value == true then
|
||||
# sets with a true boolean value are coerced to just the key name
|
||||
key
|
||||
else if value == false then
|
||||
# or omitted completely when false
|
||||
""
|
||||
else
|
||||
(generators.mkKeyValueDefault { inherit mkValueString; } ": " key value);
|
||||
|
||||
mkAttrsString = value: (generators.toKeyValue { inherit mkKeyValue; } value);
|
||||
|
||||
mkValueString = value:
|
||||
if isList value then
|
||||
(concatStringsSep ", " (map mkValueString value))
|
||||
else if isAttrs value then
|
||||
"\n" + (mkAttrsString value)
|
||||
else
|
||||
(generators.mkValueStringDefault { } value);
|
||||
|
||||
mkSectionName = value: "[" + (escape [ "[" "]" ] value) + "]";
|
||||
|
||||
mkSection = name: attrs: ''
|
||||
${mkSectionName name}
|
||||
${mkAttrsString attrs}
|
||||
'';
|
||||
|
||||
mkVolume = name: attrs: ''
|
||||
${mkSectionName name}
|
||||
${attrs.path}
|
||||
${mkAttrsString {
|
||||
accs = attrs.access;
|
||||
flags = attrs.flags;
|
||||
}}
|
||||
'';
|
||||
|
||||
passwordPlaceholder = name: "{{password-${name}}}";
|
||||
|
||||
accountsWithPlaceholders = mapAttrs (name: attrs: passwordPlaceholder name);
|
||||
|
||||
configStr = ''
|
||||
${mkSection "global" cfg.settings}
|
||||
${mkSection "accounts" (accountsWithPlaceholders cfg.accounts)}
|
||||
${concatStringsSep "\n" (mapAttrsToList mkVolume cfg.volumes)}
|
||||
'';
|
||||
|
||||
name = "copyparty";
|
||||
cfg = config.services.copyparty;
|
||||
configFile = pkgs.writeText "${name}.conf" configStr;
|
||||
runtimeConfigPath = "/run/${name}/${name}.conf";
|
||||
home = "/var/lib/${name}";
|
||||
defaultShareDir = "${home}/data";
|
||||
in {
|
||||
options.services.copyparty = {
|
||||
enable = mkEnableOption "web-based file manager";
|
||||
|
||||
package = mkOption {
|
||||
type = types.package;
|
||||
default = pkgs.copyparty;
|
||||
defaultText = "pkgs.copyparty";
|
||||
description = ''
|
||||
Package of the application to run, exposed for overriding purposes.
|
||||
'';
|
||||
};
|
||||
|
||||
openFilesLimit = mkOption {
|
||||
default = 4096;
|
||||
type = types.either types.int types.str;
|
||||
description = "Number of files to allow copyparty to open.";
|
||||
};
|
||||
|
||||
settings = mkOption {
|
||||
type = types.attrs;
|
||||
description = ''
|
||||
Global settings to apply.
|
||||
Directly maps to values in the [global] section of the copyparty config.
|
||||
See `${getExe cfg.package} --help` for more details.
|
||||
'';
|
||||
default = {
|
||||
i = "127.0.0.1";
|
||||
no-reload = true;
|
||||
};
|
||||
example = literalExpression ''
|
||||
{
|
||||
i = "0.0.0.0";
|
||||
no-reload = true;
|
||||
}
|
||||
'';
|
||||
};
|
||||
|
||||
accounts = mkOption {
|
||||
type = types.attrsOf (types.submodule ({ ... }: {
|
||||
options = {
|
||||
passwordFile = mkOption {
|
||||
type = types.str;
|
||||
description = ''
|
||||
Runtime file path to a file containing the user password.
|
||||
Must be readable by the copyparty user.
|
||||
'';
|
||||
example = "/run/keys/copyparty/ed";
|
||||
};
|
||||
};
|
||||
}));
|
||||
description = ''
|
||||
A set of copyparty accounts to create.
|
||||
'';
|
||||
default = { };
|
||||
example = literalExpression ''
|
||||
{
|
||||
ed.passwordFile = "/run/keys/copyparty/ed";
|
||||
};
|
||||
'';
|
||||
};
|
||||
|
||||
volumes = mkOption {
|
||||
type = types.attrsOf (types.submodule ({ ... }: {
|
||||
options = {
|
||||
path = mkOption {
|
||||
type = types.str;
|
||||
description = ''
|
||||
Path of a directory to share.
|
||||
'';
|
||||
};
|
||||
access = mkOption {
|
||||
type = types.attrs;
|
||||
description = ''
|
||||
Attribute list of permissions and the users to apply them to.
|
||||
|
||||
The key must be a string containing any combination of allowed permission:
|
||||
"r" (read): list folder contents, download files
|
||||
"w" (write): upload files; need "r" to see the uploads
|
||||
"m" (move): move files and folders; need "w" at destination
|
||||
"d" (delete): permanently delete files and folders
|
||||
"g" (get): download files, but cannot see folder contents
|
||||
"G" (upget): "get", but can see filekeys of their own uploads
|
||||
|
||||
For example: "rwmd"
|
||||
|
||||
The value must be one of:
|
||||
an account name, defined in `accounts`
|
||||
a list of account names
|
||||
"*", which means "any account"
|
||||
'';
|
||||
example = literalExpression ''
|
||||
{
|
||||
# wG = write-upget = see your own uploads only
|
||||
wG = "*";
|
||||
# read-write-modify-delete for users "ed" and "k"
|
||||
rwmd = ["ed" "k"];
|
||||
};
|
||||
'';
|
||||
};
|
||||
flags = mkOption {
|
||||
type = types.attrs;
|
||||
description = ''
|
||||
Attribute list of volume flags to apply.
|
||||
See `${getExe cfg.package} --help-flags` for more details.
|
||||
'';
|
||||
example = literalExpression ''
|
||||
{
|
||||
# "fk" enables filekeys (necessary for upget permission) (4 chars long)
|
||||
fk = 4;
|
||||
# scan for new files every 60sec
|
||||
scan = 60;
|
||||
# volflag "e2d" enables the uploads database
|
||||
e2d = true;
|
||||
# "d2t" disables multimedia parsers (in case the uploads are malicious)
|
||||
d2t = true;
|
||||
# skips hashing file contents if path matches *.iso
|
||||
nohash = "\.iso$";
|
||||
};
|
||||
'';
|
||||
default = { };
|
||||
};
|
||||
};
|
||||
}));
|
||||
description = "A set of copyparty volumes to create";
|
||||
default = {
|
||||
"/" = {
|
||||
path = defaultShareDir;
|
||||
access = { r = "*"; };
|
||||
};
|
||||
};
|
||||
example = literalExpression ''
|
||||
{
|
||||
"/" = {
|
||||
path = ${defaultShareDir};
|
||||
access = {
|
||||
# wG = write-upget = see your own uploads only
|
||||
wG = "*";
|
||||
# read-write-modify-delete for users "ed" and "k"
|
||||
rwmd = ["ed" "k"];
|
||||
};
|
||||
};
|
||||
};
|
||||
'';
|
||||
};
|
||||
};
|
||||
|
||||
config = mkIf cfg.enable {
|
||||
systemd.services.copyparty = {
|
||||
description = "http file sharing hub";
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
|
||||
environment = {
|
||||
PYTHONUNBUFFERED = "true";
|
||||
XDG_CONFIG_HOME = "${home}/.config";
|
||||
};
|
||||
|
||||
preStart = let
|
||||
replaceSecretCommand = name: attrs:
|
||||
"${getExe pkgs.replace-secret} '${
|
||||
passwordPlaceholder name
|
||||
}' '${attrs.passwordFile}' ${runtimeConfigPath}";
|
||||
in ''
|
||||
set -euo pipefail
|
||||
install -m 600 ${configFile} ${runtimeConfigPath}
|
||||
${concatStringsSep "\n"
|
||||
(mapAttrsToList replaceSecretCommand cfg.accounts)}
|
||||
'';
|
||||
|
||||
serviceConfig = {
|
||||
Type = "simple";
|
||||
ExecStart = "${getExe cfg.package} -c ${runtimeConfigPath}";
|
||||
|
||||
# Hardening options
|
||||
User = "copyparty";
|
||||
Group = "copyparty";
|
||||
RuntimeDirectory = name;
|
||||
RuntimeDirectoryMode = "0700";
|
||||
StateDirectory = [ name "${name}/data" "${name}/.config" ];
|
||||
StateDirectoryMode = "0700";
|
||||
WorkingDirectory = home;
|
||||
TemporaryFileSystem = "/:ro";
|
||||
BindReadOnlyPaths = [
|
||||
"/nix/store"
|
||||
"-/etc/resolv.conf"
|
||||
"-/etc/nsswitch.conf"
|
||||
"-/etc/hosts"
|
||||
"-/etc/localtime"
|
||||
] ++ (mapAttrsToList (k: v: "-${v.passwordFile}") cfg.accounts);
|
||||
BindPaths = [ home ] ++ (mapAttrsToList (k: v: v.path) cfg.volumes);
|
||||
# Would re-mount paths ignored by temporary root
|
||||
#ProtectSystem = "strict";
|
||||
ProtectHome = true;
|
||||
PrivateTmp = true;
|
||||
PrivateDevices = true;
|
||||
ProtectKernelTunables = true;
|
||||
ProtectControlGroups = true;
|
||||
RestrictSUIDSGID = true;
|
||||
PrivateMounts = true;
|
||||
ProtectKernelModules = true;
|
||||
ProtectKernelLogs = true;
|
||||
ProtectHostname = true;
|
||||
ProtectClock = true;
|
||||
ProtectProc = "invisible";
|
||||
ProcSubset = "pid";
|
||||
RestrictNamespaces = true;
|
||||
RemoveIPC = true;
|
||||
UMask = "0077";
|
||||
LimitNOFILE = cfg.openFilesLimit;
|
||||
NoNewPrivileges = true;
|
||||
LockPersonality = true;
|
||||
RestrictRealtime = true;
|
||||
};
|
||||
};
|
||||
|
||||
users.groups.copyparty = { };
|
||||
users.users.copyparty = {
|
||||
description = "Service user for copyparty";
|
||||
group = "copyparty";
|
||||
home = home;
|
||||
isSystemUser = true;
|
||||
};
|
||||
};
|
||||
}
|
||||
@@ -1,14 +1,14 @@
|
||||
# Maintainer: icxes <dev.null@need.moe>
|
||||
pkgname=copyparty
|
||||
pkgver="1.6.8"
|
||||
pkgver="1.7.2"
|
||||
pkgrel=1
|
||||
pkgdesc="Portable file sharing hub"
|
||||
arch=("any")
|
||||
url="https://github.com/9001/${pkgname}"
|
||||
license=('MIT')
|
||||
depends=("python" "lsof")
|
||||
optdepends=("ffmpeg: thumbnails for videos, images (slower) and audio, music tags"
|
||||
"python-jinja: faster html generator"
|
||||
depends=("python" "lsof" "python-jinja")
|
||||
makedepends=("python-wheel" "python-setuptools" "python-build" "python-installer" "make" "pigz")
|
||||
optdepends=("ffmpeg: thumbnails for videos, images (slower) and audio, music tags"
|
||||
"python-mutagen: music tags (alternative)"
|
||||
"python-pillow: thumbnails for images"
|
||||
"python-pyvips: thumbnails for images (higher quality, faster, uses more ram)"
|
||||
@@ -17,34 +17,31 @@ optdepends=("ffmpeg: thumbnails for videos, images (slower) and audio, music tag
|
||||
"python-pyopenssl: ftps functionality"
|
||||
"python-impacket-git: smb support (bad idea)"
|
||||
)
|
||||
source=("${url}/releases/download/v${pkgver}/${pkgname}-sfx.py"
|
||||
"${pkgname}.conf"
|
||||
"${pkgname}.service"
|
||||
"prisonparty.service"
|
||||
"index.md"
|
||||
"https://raw.githubusercontent.com/9001/${pkgname}/v${pkgver}/bin/prisonparty.sh"
|
||||
"https://raw.githubusercontent.com/9001/${pkgname}/v${pkgver}/LICENSE"
|
||||
)
|
||||
source=("https://github.com/9001/${pkgname}/releases/download/v${pkgver}/${pkgname}-${pkgver}.tar.gz")
|
||||
backup=("etc/${pkgname}.d/init" )
|
||||
sha256sums=("744037ef9ff70e7a6ec066272b4bb24e259df6faa4472ab92a768d2f801ec2aa"
|
||||
"b8565eba5e64dedba1cf6c7aac7e31c5a731ed7153d6810288a28f00a36c28b2"
|
||||
"f65c207e0670f9d78ad2e399bda18d5502ff30d2ac79e0e7fc48e7fbdc39afdc"
|
||||
"c4f396b083c9ec02ad50b52412c84d2a82be7f079b2d016e1c9fad22d68285ff"
|
||||
"dba701de9fd584405917e923ea1e59dbb249b96ef23bad479cf4e42740b774c8"
|
||||
"23054bb206153a1ed34038accaf490b8068f9c856e423c2f2595b148b40c0a0c"
|
||||
"cb2ce3d6277bf2f5a82ecf336cc44963bc6490bcf496ffbd75fc9e21abaa75f3"
|
||||
)
|
||||
sha256sums=("fb261d45ce7cf146a3f620d1e3109eb5c584f8950e61a872e2d92d7b7447bae0")
|
||||
|
||||
build() {
|
||||
cd "${srcdir}/${pkgname}-${pkgver}"
|
||||
|
||||
pushd copyparty/web
|
||||
make -j$(nproc)
|
||||
rm Makefile
|
||||
popd
|
||||
|
||||
python3 -m build -wn
|
||||
}
|
||||
|
||||
package() {
|
||||
cd "${srcdir}/"
|
||||
cd "${srcdir}/${pkgname}-${pkgver}"
|
||||
python3 -m installer -d "$pkgdir" dist/*.whl
|
||||
|
||||
install -dm755 "${pkgdir}/etc/${pkgname}.d"
|
||||
install -Dm755 "${pkgname}-sfx.py" "${pkgdir}/usr/bin/${pkgname}"
|
||||
install -Dm755 "prisonparty.sh" "${pkgdir}/usr/bin/prisonparty"
|
||||
install -Dm644 "${pkgname}.conf" "${pkgdir}/etc/${pkgname}.d/init"
|
||||
install -Dm644 "${pkgname}.service" "${pkgdir}/usr/lib/systemd/system/${pkgname}.service"
|
||||
install -Dm644 "prisonparty.service" "${pkgdir}/usr/lib/systemd/system/prisonparty.service"
|
||||
install -Dm644 "index.md" "${pkgdir}/var/lib/${pkgname}-jail/README.md"
|
||||
install -Dm755 "bin/prisonparty.sh" "${pkgdir}/usr/bin/prisonparty"
|
||||
install -Dm644 "contrib/package/arch/${pkgname}.conf" "${pkgdir}/etc/${pkgname}.d/init"
|
||||
install -Dm644 "contrib/package/arch/${pkgname}.service" "${pkgdir}/usr/lib/systemd/system/${pkgname}.service"
|
||||
install -Dm644 "contrib/package/arch/prisonparty.service" "${pkgdir}/usr/lib/systemd/system/prisonparty.service"
|
||||
install -Dm644 "contrib/package/arch/index.md" "${pkgdir}/var/lib/${pkgname}-jail/README.md"
|
||||
install -Dm644 "LICENSE" "${pkgdir}/usr/share/licenses/${pkgname}/LICENSE"
|
||||
|
||||
find /etc/${pkgname}.d -iname '*.conf' 2>/dev/null | grep -qE . && return
|
||||
|
||||
55
contrib/package/nix/copyparty/default.nix
Normal file
55
contrib/package/nix/copyparty/default.nix
Normal file
@@ -0,0 +1,55 @@
|
||||
{ lib, stdenv, makeWrapper, fetchurl, utillinux, python, jinja2, impacket, pyftpdlib, pyopenssl, pillow, pyvips, ffmpeg, mutagen,
|
||||
|
||||
# create thumbnails with Pillow; faster than FFmpeg / MediaProcessing
|
||||
withThumbnails ? true,
|
||||
|
||||
# create thumbnails with PyVIPS; even faster, uses more memory
|
||||
# -- can be combined with Pillow to support more filetypes
|
||||
withFastThumbnails ? false,
|
||||
|
||||
# enable FFmpeg; thumbnails for most filetypes (also video and audio), extract audio metadata, transcode audio to opus
|
||||
# -- possibly dangerous if you allow anonymous uploads, since FFmpeg has a huge attack surface
|
||||
# -- can be combined with Thumbnails and/or FastThumbnails, since FFmpeg is slower than both
|
||||
withMediaProcessing ? true,
|
||||
|
||||
# if MediaProcessing is not enabled, you probably want this instead (less accurate, but much safer and faster)
|
||||
withBasicAudioMetadata ? false,
|
||||
|
||||
# enable FTPS support in the FTP server
|
||||
withFTPS ? false,
|
||||
|
||||
# samba/cifs server; dangerous and buggy, enable if you really need it
|
||||
withSMB ? false,
|
||||
|
||||
}:
|
||||
|
||||
let
|
||||
pinData = lib.importJSON ./pin.json;
|
||||
pyEnv = python.withPackages (ps:
|
||||
with ps; [
|
||||
jinja2
|
||||
]
|
||||
++ lib.optional withSMB impacket
|
||||
++ lib.optional withFTPS pyopenssl
|
||||
++ lib.optional withThumbnails pillow
|
||||
++ lib.optional withFastThumbnails pyvips
|
||||
++ lib.optional withMediaProcessing ffmpeg
|
||||
++ lib.optional withBasicAudioMetadata mutagen
|
||||
);
|
||||
in stdenv.mkDerivation {
|
||||
pname = "copyparty";
|
||||
version = pinData.version;
|
||||
src = fetchurl {
|
||||
url = pinData.url;
|
||||
hash = pinData.hash;
|
||||
};
|
||||
buildInputs = [ makeWrapper ];
|
||||
dontUnpack = true;
|
||||
dontBuild = true;
|
||||
installPhase = ''
|
||||
install -Dm755 $src $out/share/copyparty-sfx.py
|
||||
makeWrapper ${pyEnv.interpreter} $out/bin/copyparty \
|
||||
--set PATH '${lib.makeBinPath ([ utillinux ] ++ lib.optional withMediaProcessing ffmpeg)}:$PATH' \
|
||||
--add-flags "$out/share/copyparty-sfx.py"
|
||||
'';
|
||||
}
|
||||
5
contrib/package/nix/copyparty/pin.json
Normal file
5
contrib/package/nix/copyparty/pin.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"url": "https://github.com/9001/copyparty/releases/download/v1.7.2/copyparty-sfx.py",
|
||||
"version": "1.7.2",
|
||||
"hash": "sha256-h22sRRO/bpydgRVmSVD05ZLuzsUxBCWU3izt9Eg9bf0="
|
||||
}
|
||||
77
contrib/package/nix/copyparty/update.py
Executable file
77
contrib/package/nix/copyparty/update.py
Executable file
@@ -0,0 +1,77 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Update the Nix package pin
|
||||
#
|
||||
# Usage: ./update.sh [PATH]
|
||||
# When the [PATH] is not set, it will fetch the latest release from the repo.
|
||||
# With [PATH] set, it will hash the given file and generate the URL,
|
||||
# base on the version contained within the file
|
||||
|
||||
import base64
|
||||
import json
|
||||
import hashlib
|
||||
import sys
|
||||
import re
|
||||
from pathlib import Path
|
||||
|
||||
OUTPUT_FILE = Path("pin.json")
|
||||
TARGET_ASSET = "copyparty-sfx.py"
|
||||
HASH_TYPE = "sha256"
|
||||
LATEST_RELEASE_URL = "https://api.github.com/repos/9001/copyparty/releases/latest"
|
||||
DOWNLOAD_URL = lambda version: f"https://github.com/9001/copyparty/releases/download/v{version}/{TARGET_ASSET}"
|
||||
|
||||
|
||||
def get_formatted_hash(binary):
|
||||
hasher = hashlib.new("sha256")
|
||||
hasher.update(binary)
|
||||
asset_hash = hasher.digest()
|
||||
encoded_hash = base64.b64encode(asset_hash).decode("ascii")
|
||||
return f"{HASH_TYPE}-{encoded_hash}"
|
||||
|
||||
|
||||
def version_from_sfx(binary):
|
||||
result = re.search(b'^VER = "(.*)"$', binary, re.MULTILINE)
|
||||
if result:
|
||||
return result.groups(1)[0].decode("ascii")
|
||||
|
||||
raise ValueError("version not found in provided file")
|
||||
|
||||
|
||||
def remote_release_pin():
|
||||
import requests
|
||||
|
||||
response = requests.get(LATEST_RELEASE_URL).json()
|
||||
version = response["tag_name"].lstrip("v")
|
||||
asset_info = [a for a in response["assets"] if a["name"] == TARGET_ASSET][0]
|
||||
download_url = asset_info["browser_download_url"]
|
||||
asset = requests.get(download_url)
|
||||
formatted_hash = get_formatted_hash(asset.content)
|
||||
|
||||
result = {"url": download_url, "version": version, "hash": formatted_hash}
|
||||
return result
|
||||
|
||||
|
||||
def local_release_pin(path):
|
||||
asset = path.read_bytes()
|
||||
version = version_from_sfx(asset)
|
||||
download_url = DOWNLOAD_URL(version)
|
||||
formatted_hash = get_formatted_hash(asset)
|
||||
|
||||
result = {"url": download_url, "version": version, "hash": formatted_hash}
|
||||
return result
|
||||
|
||||
|
||||
def main():
|
||||
if len(sys.argv) > 1:
|
||||
asset_path = Path(sys.argv[1])
|
||||
result = local_release_pin(asset_path)
|
||||
else:
|
||||
result = remote_release_pin()
|
||||
|
||||
print(result)
|
||||
json_result = json.dumps(result, indent=4)
|
||||
OUTPUT_FILE.write_text(json_result)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
208
contrib/plugins/rave.js
Normal file
208
contrib/plugins/rave.js
Normal file
@@ -0,0 +1,208 @@
|
||||
/* untz untz untz untz */
|
||||
|
||||
(function () {
|
||||
|
||||
var can, ctx, W, H, fft, buf, bars, barw, pv,
|
||||
hue = 0,
|
||||
ibeat = 0,
|
||||
beats = [9001],
|
||||
beats_url = '',
|
||||
uofs = 0,
|
||||
ops = ebi('ops'),
|
||||
raving = false,
|
||||
recalc = 0,
|
||||
cdown = 0,
|
||||
FC = 0.9,
|
||||
css = `<style>
|
||||
|
||||
#fft {
|
||||
position: fixed;
|
||||
top: 0;
|
||||
left: 0;
|
||||
z-index: -1;
|
||||
}
|
||||
body {
|
||||
box-shadow: inset 0 0 0 white;
|
||||
}
|
||||
#ops>a,
|
||||
#path>a {
|
||||
display: inline-block;
|
||||
}
|
||||
/*
|
||||
body.untz {
|
||||
animation: untz-body 200ms ease-out;
|
||||
}
|
||||
@keyframes untz-body {
|
||||
0% {inset 0 0 20em white}
|
||||
100% {inset 0 0 0 white}
|
||||
}
|
||||
*/
|
||||
:root, html.a, html.b, html.c, html.d, html.e {
|
||||
--row-alt: rgba(48,52,78,0.2);
|
||||
}
|
||||
#files td {
|
||||
background: none;
|
||||
}
|
||||
|
||||
</style>`;
|
||||
|
||||
QS('body').appendChild(mknod('div', null, css));
|
||||
|
||||
function rave_load() {
|
||||
console.log('rave_load');
|
||||
can = mknod('canvas', 'fft');
|
||||
QS('body').appendChild(can);
|
||||
ctx = can.getContext('2d');
|
||||
|
||||
fft = new AnalyserNode(actx, {
|
||||
"fftSize": 2048,
|
||||
"maxDecibels": 0,
|
||||
"smoothingTimeConstant": 0.7,
|
||||
});
|
||||
ibeat = 0;
|
||||
beats = [9001];
|
||||
buf = new Uint8Array(fft.frequencyBinCount);
|
||||
bars = buf.length * FC;
|
||||
afilt.filters.push(fft);
|
||||
if (!raving) {
|
||||
raving = true;
|
||||
raver();
|
||||
}
|
||||
beats_url = mp.au.src.split('?')[0].replace(/(.*\/)(.*)/, '$1.beats/$2.txt');
|
||||
console.log("reading beats from", beats_url);
|
||||
var xhr = new XHR();
|
||||
xhr.open('GET', beats_url, true);
|
||||
xhr.onload = readbeats;
|
||||
xhr.url = beats_url;
|
||||
xhr.send();
|
||||
}
|
||||
|
||||
function rave_unload() {
|
||||
qsr('#fft');
|
||||
can = null;
|
||||
}
|
||||
|
||||
function readbeats() {
|
||||
if (this.url != beats_url)
|
||||
return console.log('old beats??', this.url, beats_url);
|
||||
|
||||
var sbeats = this.responseText.replace(/\r/g, '').split(/\n/g);
|
||||
if (sbeats.length < 3)
|
||||
return;
|
||||
|
||||
beats = [];
|
||||
for (var a = 0; a < sbeats.length; a++)
|
||||
beats.push(parseFloat(sbeats[a]));
|
||||
|
||||
var end = beats.slice(-2),
|
||||
t = end[1],
|
||||
d = t - end[0];
|
||||
|
||||
while (d > 0.1 && t < 1200)
|
||||
beats.push(t += d);
|
||||
}
|
||||
|
||||
function hrand() {
|
||||
return Math.random() - 0.5;
|
||||
}
|
||||
|
||||
function raver() {
|
||||
if (!can) {
|
||||
raving = false;
|
||||
return;
|
||||
}
|
||||
|
||||
requestAnimationFrame(raver);
|
||||
if (!mp || !mp.au || mp.au.paused)
|
||||
return;
|
||||
|
||||
if (--uofs >= 0) {
|
||||
document.body.style.marginLeft = hrand() * uofs + 'px';
|
||||
ebi('tree').style.marginLeft = hrand() * uofs + 'px';
|
||||
for (var a of QSA('#ops>a, #path>a, #pctl>a'))
|
||||
a.style.transform = 'translate(' + hrand() * uofs * 1 + 'px, ' + hrand() * uofs * 0.7 + 'px) rotate(' + Math.random() * uofs * 0.7 + 'deg)'
|
||||
}
|
||||
|
||||
if (--recalc < 0) {
|
||||
recalc = 60;
|
||||
var tree = ebi('tree'),
|
||||
x = tree.style.display == 'none' ? 0 : tree.offsetWidth;
|
||||
|
||||
//W = can.width = window.innerWidth - x;
|
||||
//H = can.height = window.innerHeight;
|
||||
//H = ebi('widget').offsetTop;
|
||||
W = can.width = bars;
|
||||
H = can.height = 512;
|
||||
barw = 1; //parseInt(0.8 + W / bars);
|
||||
can.style.left = x + 'px';
|
||||
can.style.width = (window.innerWidth - x) + 'px';
|
||||
can.style.height = ebi('widget').offsetTop + 'px';
|
||||
}
|
||||
|
||||
//if (--cdown == 1)
|
||||
// clmod(ops, 'untz');
|
||||
|
||||
fft.getByteFrequencyData(buf);
|
||||
|
||||
var imax = 0, vmax = 0;
|
||||
for (var a = 10; a < 50; a++)
|
||||
if (vmax < buf[a]) {
|
||||
vmax = buf[a];
|
||||
imax = a;
|
||||
}
|
||||
|
||||
hue = hue * 0.93 + imax * 0.07;
|
||||
|
||||
ctx.fillStyle = 'rgba(0,0,0,0)';
|
||||
ctx.fillRect(0, 0, W, H);
|
||||
ctx.clearRect(0, 0, W, H);
|
||||
ctx.fillStyle = 'hsla(' + (hue * 2.5) + ',100%,50%,0.7)';
|
||||
|
||||
var x = 0, mul = (H / 256) * 0.5;
|
||||
for (var a = 0; a < buf.length * FC; a++) {
|
||||
var v = buf[a] * mul * (1 + 0.69 * a / buf.length);
|
||||
ctx.fillRect(x, H - v, barw, v);
|
||||
x += barw;
|
||||
}
|
||||
|
||||
var t = mp.au.currentTime + 0.05;
|
||||
|
||||
if (ibeat >= beats.length || beats[ibeat] > t)
|
||||
return;
|
||||
|
||||
while (ibeat < beats.length && beats[ibeat++] < t)
|
||||
continue;
|
||||
|
||||
return untz();
|
||||
|
||||
var cv = 0;
|
||||
for (var a = 0; a < 128; a++)
|
||||
cv += buf[a];
|
||||
|
||||
if (cv - pv > 1000) {
|
||||
console.log(pv, cv, cv - pv);
|
||||
if (cdown < 0) {
|
||||
clmod(ops, 'untz', 1);
|
||||
cdown = 20;
|
||||
}
|
||||
}
|
||||
pv = cv;
|
||||
}
|
||||
|
||||
function untz() {
|
||||
console.log('untz');
|
||||
uofs = 14;
|
||||
document.body.animate([
|
||||
{ boxShadow: 'inset 0 0 1em #f0c' },
|
||||
{ boxShadow: 'inset 0 0 20em #f0c', offset: 0.2 },
|
||||
{ boxShadow: 'inset 0 0 0 #f0c' },
|
||||
], { duration: 200, iterations: 1 });
|
||||
}
|
||||
|
||||
afilt.plugs.push({
|
||||
"en": true,
|
||||
"load": rave_load,
|
||||
"unload": rave_unload
|
||||
});
|
||||
|
||||
})();
|
||||
@@ -1,3 +1,6 @@
|
||||
# NOTE: this is now a built-in feature in copyparty
|
||||
# but you may still want this if you have specific needs
|
||||
#
|
||||
# systemd service which generates a new TLS certificate on each boot,
|
||||
# that way the one-year expiry time won't cause any issues --
|
||||
# just have everyone trust the ca.pem once every 10 years
|
||||
|
||||
@@ -2,12 +2,16 @@
|
||||
# and share '/mnt' with anonymous read+write
|
||||
#
|
||||
# installation:
|
||||
# cp -pv copyparty.service /etc/systemd/system
|
||||
# restorecon -vr /etc/systemd/system/copyparty.service
|
||||
# wget https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py -O /usr/local/bin/copyparty-sfx.py
|
||||
# cp -pv copyparty.service /etc/systemd/system/
|
||||
# restorecon -vr /etc/systemd/system/copyparty.service # on fedora/rhel
|
||||
# firewall-cmd --permanent --add-port={80,443,3923}/tcp # --zone=libvirt
|
||||
# firewall-cmd --reload
|
||||
# systemctl daemon-reload && systemctl enable --now copyparty
|
||||
#
|
||||
# if it fails to start, first check this: systemctl status copyparty
|
||||
# then try starting it while viewing logs: journalctl -fan 100
|
||||
#
|
||||
# you may want to:
|
||||
# change "User=cpp" and "/home/cpp/" to another user
|
||||
# remove the nft lines to only listen on port 3923
|
||||
@@ -18,6 +22,7 @@
|
||||
# add '-i 127.0.0.1' to only allow local connections
|
||||
# add '-e2dsa' to enable filesystem scanning + indexing
|
||||
# add '-e2ts' to enable metadata indexing
|
||||
# remove '--ansi' to disable colored logs
|
||||
#
|
||||
# with `Type=notify`, copyparty will signal systemd when it is ready to
|
||||
# accept connections; correctly delaying units depending on copyparty.
|
||||
@@ -44,7 +49,7 @@ ExecReload=/bin/kill -s USR1 $MAINPID
|
||||
User=cpp
|
||||
Environment=XDG_CONFIG_HOME=/home/cpp/.config
|
||||
|
||||
# setup forwarding from ports 80 and 443 to port 3923
|
||||
# OPTIONAL: setup forwarding from ports 80 and 443 to port 3923
|
||||
ExecStartPre=+/bin/bash -c 'nft -n -a list table nat | awk "/ to :3923 /{print\$NF}" | xargs -rL1 nft delete rule nat prerouting handle; true'
|
||||
ExecStartPre=+nft add table ip nat
|
||||
ExecStartPre=+nft -- add chain ip nat prerouting { type nat hook prerouting priority -100 \; }
|
||||
@@ -55,7 +60,7 @@ ExecStartPre=+nft add rule ip nat prerouting tcp dport 443 redirect to :3923
|
||||
ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
||||
|
||||
# copyparty settings
|
||||
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -e2d -v /mnt::rw
|
||||
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py --ansi -e2d -v /mnt::rw
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
|
||||
@@ -6,6 +6,10 @@ import platform
|
||||
import sys
|
||||
import time
|
||||
|
||||
# fmt: off
|
||||
_:tuple[int,int]=(0,0) # _____________________________________________________________________ hey there! if you are reading this, your python is too old to run copyparty without some help. Please use https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py or the pypi package instead, or see https://github.com/9001/copyparty/blob/hovudstraum/docs/devnotes.md#building if you want to build it yourself :-) ************************************************************************************************************************************************
|
||||
# fmt: on
|
||||
|
||||
try:
|
||||
from typing import TYPE_CHECKING
|
||||
except:
|
||||
@@ -27,7 +31,12 @@ WINDOWS: Any = (
|
||||
else False
|
||||
)
|
||||
|
||||
VT100 = not WINDOWS or WINDOWS >= [10, 0, 14393]
|
||||
VT100 = "--ansi" in sys.argv or (
|
||||
os.environ.get("NO_COLOR", "").lower() in ("", "0", "false")
|
||||
and sys.stdout.isatty()
|
||||
and "--no-ansi" not in sys.argv
|
||||
and (not WINDOWS or WINDOWS >= [10, 0, 14393])
|
||||
)
|
||||
# introduced in anniversary update
|
||||
|
||||
ANYWIN = WINDOWS or sys.platform in ["msys", "cygwin"]
|
||||
|
||||
@@ -10,11 +10,9 @@ __url__ = "https://github.com/9001/copyparty/"
|
||||
|
||||
import argparse
|
||||
import base64
|
||||
import filecmp
|
||||
import locale
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import socket
|
||||
import sys
|
||||
import threading
|
||||
@@ -186,7 +184,7 @@ def init_E(E: EnvParams) -> None:
|
||||
|
||||
with open_binary("copyparty", "z.tar") as tgz:
|
||||
with tarfile.open(fileobj=tgz) as tf:
|
||||
tf.extractall(tdn)
|
||||
tf.extractall(tdn) # nosec (archive is safe)
|
||||
|
||||
return tdn
|
||||
|
||||
@@ -201,7 +199,7 @@ def init_E(E: EnvParams) -> None:
|
||||
E.mod = _unpack()
|
||||
|
||||
if sys.platform == "win32":
|
||||
bdir = os.environ.get("APPDATA") or os.environ.get("TEMP")
|
||||
bdir = os.environ.get("APPDATA") or os.environ.get("TEMP") or "."
|
||||
E.cfg = os.path.normpath(bdir + "/copyparty")
|
||||
elif sys.platform == "darwin":
|
||||
E.cfg = os.path.expanduser("~/Library/Preferences/copyparty")
|
||||
@@ -242,6 +240,24 @@ def get_srvname() -> str:
|
||||
return ret
|
||||
|
||||
|
||||
def get_fk_salt(cert_path) -> str:
|
||||
fp = os.path.join(E.cfg, "fk-salt.txt")
|
||||
try:
|
||||
with open(fp, "rb") as f:
|
||||
ret = f.read().strip()
|
||||
except:
|
||||
if os.path.exists(cert_path):
|
||||
print("salt from cert")
|
||||
return unicode(os.path.getmtime(cert_path))
|
||||
else:
|
||||
print("salt from os.random")
|
||||
ret = base64.b64encode(os.urandom(18))
|
||||
with open(fp, "wb") as f:
|
||||
f.write(ret + b"\n")
|
||||
|
||||
return ret.decode("utf-8")
|
||||
|
||||
|
||||
def ensure_locale() -> None:
|
||||
safe = "en_US.UTF-8"
|
||||
for x in [
|
||||
@@ -261,30 +277,22 @@ def ensure_locale() -> None:
|
||||
warn(t.format(safe))
|
||||
|
||||
|
||||
def ensure_cert() -> None:
|
||||
def ensure_webdeps() -> None:
|
||||
ap = os.path.join(E.mod, "web/deps/mini-fa.woff")
|
||||
if os.path.exists(ap):
|
||||
return
|
||||
|
||||
warn(
|
||||
"""could not find webdeps;
|
||||
if you are running the sfx, or exe, or pypi package, or docker image,
|
||||
then this is a bug! Please let me know so I can fix it, thanks :-)
|
||||
https://github.com/9001/copyparty/issues/new?labels=bug&template=bug_report.md
|
||||
|
||||
however, if you are a dev, or running copyparty from source, and you want
|
||||
full client functionality, you will need to build or obtain the webdeps:
|
||||
https://github.com/9001/copyparty/blob/hovudstraum/docs/devnotes.md#building
|
||||
"""
|
||||
the default cert (and the entire TLS support) is only here to enable the
|
||||
crypto.subtle javascript API, which is necessary due to the webkit guys
|
||||
being massive memers (https://www.chromium.org/blink/webcrypto)
|
||||
|
||||
i feel awful about this and so should they
|
||||
"""
|
||||
cert_insec = os.path.join(E.mod, "res/insecure.pem")
|
||||
cert_cfg = os.path.join(E.cfg, "cert.pem")
|
||||
if not os.path.exists(cert_cfg):
|
||||
shutil.copy(cert_insec, cert_cfg)
|
||||
|
||||
try:
|
||||
if filecmp.cmp(cert_cfg, cert_insec):
|
||||
lprint(
|
||||
"\033[33musing default TLS certificate; https will be insecure."
|
||||
+ "\033[36m\ncertificate location: {}\033[0m\n".format(cert_cfg)
|
||||
)
|
||||
except:
|
||||
pass
|
||||
|
||||
# speaking of the default cert,
|
||||
# printf 'NO\n.\n.\n.\n.\ncopyparty-insecure\n.\n' | faketime '2000-01-01 00:00:00' openssl req -x509 -sha256 -newkey rsa:2048 -keyout insecure.pem -out insecure.pem -days $((($(printf %d 0x7fffffff)-$(date +%s --date=2000-01-01T00:00:00Z))/(60*60*24))) -nodes && ls -al insecure.pem && openssl x509 -in insecure.pem -text -noout
|
||||
)
|
||||
|
||||
|
||||
def configure_ssl_ver(al: argparse.Namespace) -> None:
|
||||
@@ -499,8 +507,12 @@ def get_sects():
|
||||
"""
|
||||
volflags are appended to volume definitions, for example,
|
||||
to create a write-only volume with the \033[33mnodupe\033[0m and \033[32mnosub\033[0m flags:
|
||||
\033[35m-v /mnt/inc:/inc:w\033[33m:c,nodupe\033[32m:c,nosub"""
|
||||
)
|
||||
\033[35m-v /mnt/inc:/inc:w\033[33m:c,nodupe\033[32m:c,nosub\033[0m
|
||||
|
||||
if global config defines a volflag for all volumes,
|
||||
you can unset it for a specific volume with -flag
|
||||
"""
|
||||
).rstrip()
|
||||
+ build_flags_desc(),
|
||||
],
|
||||
[
|
||||
@@ -600,9 +612,9 @@ def get_sects():
|
||||
|
||||
\033[32macid\033[0m = extremely safe but slow; the old default. Should never lose any data no matter what
|
||||
|
||||
\033[32mswal\033[0m = 2.4x faster uploads yet 99.9%% as safe -- theoretical chance of losing metadata for the ~200 most recently uploaded files if there's a power-loss or your OS crashes
|
||||
\033[32mswal\033[0m = 2.4x faster uploads yet 99.9% as safe -- theoretical chance of losing metadata for the ~200 most recently uploaded files if there's a power-loss or your OS crashes
|
||||
|
||||
\033[32mwal\033[0m = another 21x faster on HDDs yet 90%% as safe; same pitfall as \033[33mswal\033[0m except more likely
|
||||
\033[32mwal\033[0m = another 21x faster on HDDs yet 90% as safe; same pitfall as \033[33mswal\033[0m except more likely
|
||||
|
||||
\033[32myolo\033[0m = another 1.5x faster, and removes the occasional sudden upload-pause while the disk syncs, but now you're at risk of losing the entire database in a powerloss / OS-crash
|
||||
|
||||
@@ -691,22 +703,43 @@ def add_network(ap):
|
||||
ap2.add_argument("--reuseaddr", action="store_true", help="set reuseaddr on listening sockets on windows; allows rapid restart of copyparty at the expense of being able to accidentally start multiple instances")
|
||||
else:
|
||||
ap2.add_argument("--freebind", action="store_true", help="allow listening on IPs which do not yet exist, for example if the network interfaces haven't finished going up. Only makes sense for IPs other than '0.0.0.0', '127.0.0.1', '::', and '::1'. May require running as root (unless net.ipv6.ip_nonlocal_bind)")
|
||||
ap2.add_argument("--s-thead", metavar="SEC", type=int, default=120, help="socket timeout (read request header)")
|
||||
ap2.add_argument("--s-tbody", metavar="SEC", type=float, default=186, help="socket timeout (read/write request/response bodies). Use 60 on fast servers (default is extremely safe). Disable with 0 if reverse-proxied for a 2%% speed boost")
|
||||
ap2.add_argument("--s-wr-sz", metavar="B", type=int, default=256*1024, help="socket write size in bytes")
|
||||
ap2.add_argument("--s-wr-slp", metavar="SEC", type=float, default=0, help="debug: socket write delay in seconds")
|
||||
ap2.add_argument("--rsp-slp", metavar="SEC", type=float, default=0, help="debug: response delay in seconds")
|
||||
ap2.add_argument("--rsp-jtr", metavar="SEC", type=float, default=0, help="debug: response delay, random duration 0..SEC")
|
||||
|
||||
|
||||
def add_tls(ap):
|
||||
def add_tls(ap, cert_path):
|
||||
ap2 = ap.add_argument_group('SSL/TLS options')
|
||||
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls -- force plaintext")
|
||||
ap2.add_argument("--https-only", action="store_true", help="disable plaintext -- force tls")
|
||||
ap2.add_argument("--cert", metavar="PATH", type=u, default=cert_path, help="path to TLS certificate")
|
||||
ap2.add_argument("--ssl-ver", metavar="LIST", type=u, help="set allowed ssl/tls versions; [\033[32mhelp\033[0m] shows available versions; default is what your python version considers safe")
|
||||
ap2.add_argument("--ciphers", metavar="LIST", type=u, help="set allowed ssl/tls ciphers; [\033[32mhelp\033[0m] shows available ciphers")
|
||||
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
|
||||
ap2.add_argument("--ssl-log", metavar="PATH", type=u, help="log master secrets for later decryption in wireshark")
|
||||
|
||||
|
||||
def add_cert(ap, cert_path):
|
||||
cert_dir = os.path.dirname(cert_path)
|
||||
ap2 = ap.add_argument_group('TLS certificate generator options')
|
||||
ap2.add_argument("--no-crt", action="store_true", help="disable automatic certificate creation")
|
||||
ap2.add_argument("--crt-ns", metavar="N,N", type=u, default="", help="comma-separated list of FQDNs (domains) to add into the certificate")
|
||||
ap2.add_argument("--crt-exact", action="store_true", help="do not add wildcard entries for each --crt-ns")
|
||||
ap2.add_argument("--crt-noip", action="store_true", help="do not add autodetected IP addresses into cert")
|
||||
ap2.add_argument("--crt-nolo", action="store_true", help="do not add 127.0.0.1 / localhost into cert")
|
||||
ap2.add_argument("--crt-dir", metavar="PATH", default=cert_dir, help="where to save the CA cert")
|
||||
ap2.add_argument("--crt-cdays", metavar="D", type=float, default=3650, help="ca-certificate expiration time in days")
|
||||
ap2.add_argument("--crt-sdays", metavar="D", type=float, default=365, help="server-cert expiration time in days")
|
||||
ap2.add_argument("--crt-cn", metavar="TXT", type=u, default="partyco", help="CA/server-cert common-name")
|
||||
ap2.add_argument("--crt-cnc", metavar="TXT", type=u, default="--crt-cn", help="override CA name")
|
||||
ap2.add_argument("--crt-cns", metavar="TXT", type=u, default="--crt-cn cpp", help="override server-cert name")
|
||||
ap2.add_argument("--crt-back", metavar="HRS", type=float, default=72, help="backdate in hours")
|
||||
ap2.add_argument("--crt-alg", metavar="S-N", type=u, default="ecdsa-256", help="algorithm and keysize; one of these: ecdsa-256 rsa-4096 rsa-2048")
|
||||
|
||||
|
||||
def add_zeroconf(ap):
|
||||
ap2 = ap.add_argument_group("Zeroconf options")
|
||||
ap2.add_argument("-z", action="store_true", help="enable all zeroconf backends (mdns, ssdp)")
|
||||
@@ -751,6 +784,7 @@ def add_ftp(ap):
|
||||
ap2.add_argument("--ftp", metavar="PORT", type=int, help="enable FTP server on PORT, for example \033[32m3921")
|
||||
ap2.add_argument("--ftps", metavar="PORT", type=int, help="enable FTPS server on PORT, for example \033[32m3990")
|
||||
ap2.add_argument("--ftpv", action="store_true", help="verbose")
|
||||
ap2.add_argument("--ftp4", action="store_true", help="only listen on IPv4")
|
||||
ap2.add_argument("--ftp-wt", metavar="SEC", type=int, default=7, help="grace period for resuming interrupted uploads (any client can write to any file last-modified more recently than SEC seconds ago)")
|
||||
ap2.add_argument("--ftp-nat", metavar="ADDR", type=u, help="the NAT address to use for passive connections")
|
||||
ap2.add_argument("--ftp-pr", metavar="P-P", type=u, help="the range of TCP ports to use for passive connections, for example \033[32m12000-13000")
|
||||
@@ -758,9 +792,11 @@ def add_ftp(ap):
|
||||
|
||||
def add_webdav(ap):
|
||||
ap2 = ap.add_argument_group('WebDAV options')
|
||||
ap2.add_argument("--daw", action="store_true", help="enable full write support. \033[1;31mWARNING:\033[0m This has side-effects -- PUT-operations will now \033[1;31mOVERWRITE\033[0m existing files, rather than inventing new filenames to avoid loss of data. You might want to instead set this as a volflag where needed. By not setting this flag, uploaded files can get written to a filename which the client does not expect (which might be okay, depending on client)")
|
||||
ap2.add_argument("--daw", action="store_true", help="enable full write support, even if client may not be webdav. \033[1;31mWARNING:\033[0m This has side-effects -- PUT-operations will now \033[1;31mOVERWRITE\033[0m existing files, rather than inventing new filenames to avoid loss of data. You might want to instead set this as a volflag where needed. By not setting this flag, uploaded files can get written to a filename which the client does not expect (which might be okay, depending on client)")
|
||||
ap2.add_argument("--dav-inf", action="store_true", help="allow depth:infinite requests (recursive file listing); extremely server-heavy but required for spec compliance -- luckily few clients rely on this")
|
||||
ap2.add_argument("--dav-mac", action="store_true", help="disable apple-garbage filter -- allow macos to create junk files (._* and .DS_Store, .Spotlight-*, .fseventsd, .Trashes, .AppleDouble, __MACOS)")
|
||||
ap2.add_argument("--dav-rt", action="store_true", help="show symlink-destination's lastmodified instead of the link itself; always enabled for recursive listings (volflag=davrt)")
|
||||
ap2.add_argument("--dav-auth", action="store_true", help="force auth for all folders (required by davfs2 when only some folders are world-readable) (volflag=davauth)")
|
||||
|
||||
|
||||
def add_smb(ap):
|
||||
@@ -814,6 +850,8 @@ def add_safety(ap, fk_salt):
|
||||
ap2.add_argument("-ss", action="store_true", help="further increase safety: Prevent js-injection, accidental move/delete, broken symlinks, webdav, 404 on 403, ban on excessive 404s.\n └─Alias of\033[32m -s --unpost=0 --no-del --no-mv --hardlink --vague-403 --ban-404=50,60,1440 -nih")
|
||||
ap2.add_argument("-sss", action="store_true", help="further increase safety: Enable logging to disk, scan for dangerous symlinks.\n └─Alias of\033[32m -ss --no-dav --no-logues --no-readme -lo=cpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz --ls=**,*,ln,p,r")
|
||||
ap2.add_argument("--ls", metavar="U[,V[,F]]", type=u, help="do a sanity/safety check of all volumes on startup; arguments \033[33mUSER\033[0m,\033[33mVOL\033[0m,\033[33mFLAGS\033[0m; example [\033[32m**,*,ln,p,r\033[0m]")
|
||||
ap2.add_argument("--xvol", action="store_true", help="never follow symlinks leaving the volume root, unless the link is into another volume where the user has similar access (volflag=xvol)")
|
||||
ap2.add_argument("--xdev", action="store_true", help="stay within the filesystem of the volume root; do not descend into other devices (symlink or bind-mount to another HDD, ...) (volflag=xdev)")
|
||||
ap2.add_argument("--salt", type=u, default="hunter2", help="up2k file-hash salt; serves no purpose, no reason to change this (but delete all databases if you do)")
|
||||
ap2.add_argument("--fk-salt", metavar="SALT", type=u, default=fk_salt, help="per-file accesskey salt; used to generate unpredictable URLs for hidden files -- this one DOES matter")
|
||||
ap2.add_argument("--no-dot-mv", action="store_true", help="disallow moving dotfiles; makes it impossible to move folders containing dotfiles")
|
||||
@@ -843,6 +881,8 @@ def add_logging(ap):
|
||||
ap2 = ap.add_argument_group('logging options')
|
||||
ap2.add_argument("-q", action="store_true", help="quiet")
|
||||
ap2.add_argument("-lo", metavar="PATH", type=u, help="logfile, example: \033[32mcpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz")
|
||||
ap2.add_argument("--no-ansi", action="store_true", default=not VT100, help="disable colors; same as environment-variable NO_COLOR")
|
||||
ap2.add_argument("--ansi", action="store_true", help="force colors; overrides environment-variable NO_COLOR")
|
||||
ap2.add_argument("--no-voldump", action="store_true", help="do not list volumes and permissions on startup")
|
||||
ap2.add_argument("--log-conn", action="store_true", help="debug: print tcp-server msgs")
|
||||
ap2.add_argument("--log-htp", action="store_true", help="debug: print http-server threadpool scaling")
|
||||
@@ -874,7 +914,7 @@ def add_thumbnail(ap):
|
||||
ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown -- avoids doing keepalive pokes (updating the mtime) on thumbnail folders more often than SEC seconds")
|
||||
ap2.add_argument("--th-clean", metavar="SEC", type=int, default=43200, help="cleanup interval; 0=disabled")
|
||||
ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age -- folders which haven't been poked for longer than --th-poke seconds will get deleted every --th-clean seconds")
|
||||
ap2.add_argument("--th-covers", metavar="N,N", type=u, default="folder.png,folder.jpg,cover.png,cover.jpg", help="folder thumbnails to stat/look for; case-insensitive if -e2d")
|
||||
ap2.add_argument("--th-covers", metavar="N,N", type=u, default="folder.png,folder.jpg,cover.png,cover.jpg", help="folder thumbnails to stat/look for; enabling -e2d will make these case-insensitive, and also automatically select thumbnails for all folders that contain pics, even if none match this pattern")
|
||||
# https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html
|
||||
# https://github.com/libvips/libvips
|
||||
# ffmpeg -hide_banner -demuxers | awk '/^ D /{print$2}' | while IFS= read -r x; do ffmpeg -hide_banner -h demuxer=$x; done | grep -E '^Demuxer |extensions:'
|
||||
@@ -900,15 +940,13 @@ def add_db_general(ap, hcores):
|
||||
ap2.add_argument("-e2vu", action="store_true", help="on hash mismatch: update the database with the new hash")
|
||||
ap2.add_argument("-e2vp", action="store_true", help="on hash mismatch: panic and quit copyparty")
|
||||
ap2.add_argument("--hist", metavar="PATH", type=u, help="where to store volume data (db, thumbs) (volflag=hist)")
|
||||
ap2.add_argument("--no-hash", metavar="PTN", type=u, help="regex: disable hashing of matching paths during e2ds folder scans (volflag=nohash)")
|
||||
ap2.add_argument("--no-idx", metavar="PTN", type=u, help="regex: disable indexing of matching paths during e2ds folder scans (volflag=noidx)")
|
||||
ap2.add_argument("--no-hash", metavar="PTN", type=u, help="regex: disable hashing of matching absolute-filesystem-paths during e2ds folder scans (volflag=nohash)")
|
||||
ap2.add_argument("--no-idx", metavar="PTN", type=u, help="regex: disable indexing of matching absolute-filesystem-paths during e2ds folder scans (volflag=noidx)")
|
||||
ap2.add_argument("--no-dhash", action="store_true", help="disable rescan acceleration; do full database integrity check -- makes the db ~5%% smaller and bootup/rescans 3~10x slower")
|
||||
ap2.add_argument("--re-dhash", action="store_true", help="rebuild the cache if it gets out of sync (for example crash on startup during metadata scanning)")
|
||||
ap2.add_argument("--no-forget", action="store_true", help="never forget indexed files, even when deleted from disk -- makes it impossible to ever upload the same file twice (volflag=noforget)")
|
||||
ap2.add_argument("--dbd", metavar="PROFILE", default="wal", help="database durability profile; sets the tradeoff between robustness and speed, see --help-dbd (volflag=dbd)")
|
||||
ap2.add_argument("--xlink", action="store_true", help="on upload: check all volumes for dupes, not just the target volume (volflag=xlink)")
|
||||
ap2.add_argument("--xdev", action="store_true", help="do not descend into other filesystems (symlink or bind-mount to another HDD, ...) (volflag=xdev)")
|
||||
ap2.add_argument("--xvol", action="store_true", help="skip symlinks leaving the volume root (volflag=xvol)")
|
||||
ap2.add_argument("--hash-mt", metavar="CORES", type=int, default=hcores, help="num cpu cores to use for file hashing; set 0 or 1 for single-core hashing")
|
||||
ap2.add_argument("--re-maxage", metavar="SEC", type=int, default=0, help="disk rescan volume interval, 0=off (volflag=scan)")
|
||||
ap2.add_argument("--db-act", metavar="SEC", type=float, default=10, help="defer any scheduled volume reindexing until SEC seconds after last db write (uploads, renames, ...)")
|
||||
@@ -938,9 +976,11 @@ def add_db_metadata(ap):
|
||||
|
||||
def add_ui(ap, retry):
|
||||
ap2 = ap.add_argument_group('ui options')
|
||||
ap2.add_argument("--grid", action="store_true", help="show grid/thumbnails by default (volflag=grid)")
|
||||
ap2.add_argument("--lang", metavar="LANG", type=u, default="eng", help="language")
|
||||
ap2.add_argument("--theme", metavar="NUM", type=int, default=0, help="default theme to use")
|
||||
ap2.add_argument("--themes", metavar="NUM", type=int, default=8, help="number of themes installed")
|
||||
ap2.add_argument("--unlist", metavar="REGEX", type=u, default="", help="don't show files matching REGEX in file list. Purely cosmetic! Does not affect API calls, just the browser. Example: [\033[32m\.(js|css)$\033[0m] (volflag=unlist)")
|
||||
ap2.add_argument("--favico", metavar="TXT", type=u, default="c 000 none" if retry else "🎉 000 none", help="\033[33mfavicon-text\033[0m [ \033[33mforeground\033[0m [ \033[33mbackground\033[0m ] ], set blank to disable")
|
||||
ap2.add_argument("--mpmc", metavar="URL", type=u, default="", help="change the mediaplayer-toggle mouse cursor; URL to a folder with {2..5}.png inside (or disable with [\033[32m.\033[0m])")
|
||||
ap2.add_argument("--js-browser", metavar="L", type=u, help="URL to additional JS to include")
|
||||
@@ -987,10 +1027,9 @@ def run_argparse(
|
||||
description="http file sharing hub v{} ({})".format(S_VERSION, S_BUILD_DT),
|
||||
)
|
||||
|
||||
try:
|
||||
fk_salt = unicode(os.path.getmtime(os.path.join(E.cfg, "cert.pem")))
|
||||
except:
|
||||
fk_salt = "hunter2"
|
||||
cert_path = os.path.join(E.cfg, "cert.pem")
|
||||
|
||||
fk_salt = get_fk_salt(cert_path)
|
||||
|
||||
hcores = min(CORES, 4) # optimal on py3.11 @ r5-4500U
|
||||
|
||||
@@ -1000,7 +1039,8 @@ def run_argparse(
|
||||
|
||||
add_general(ap, nc, srvname)
|
||||
add_network(ap)
|
||||
add_tls(ap)
|
||||
add_tls(ap, cert_path)
|
||||
add_cert(ap, cert_path)
|
||||
add_qr(ap, tty)
|
||||
add_zeroconf(ap)
|
||||
add_zc_mdns(ap)
|
||||
@@ -1084,8 +1124,8 @@ def main(argv: Optional[list[str]] = None) -> None:
|
||||
print("pybin: {}\n".format(pybin), end="")
|
||||
|
||||
ensure_locale()
|
||||
if HAVE_SSL:
|
||||
ensure_cert()
|
||||
|
||||
ensure_webdeps()
|
||||
|
||||
for k, v in zip(argv[1:], argv[2:]):
|
||||
if k == "-c" and os.path.isfile(v):
|
||||
@@ -1153,13 +1193,18 @@ def main(argv: Optional[list[str]] = None) -> None:
|
||||
except:
|
||||
sys.exit(1)
|
||||
|
||||
if al.ansi:
|
||||
al.no_ansi = False
|
||||
elif not al.no_ansi:
|
||||
al.ansi = VT100
|
||||
|
||||
if WINDOWS and not al.keep_qem:
|
||||
try:
|
||||
disable_quickedit()
|
||||
except:
|
||||
lprint("\nfailed to disable quick-edit-mode:\n" + min_ex() + "\n")
|
||||
|
||||
if not VT100:
|
||||
if al.ansi:
|
||||
al.wintitle = ""
|
||||
|
||||
nstrs: list[str] = []
|
||||
@@ -1238,6 +1283,7 @@ def main(argv: Optional[list[str]] = None) -> None:
|
||||
configure_ssl_ciphers(al)
|
||||
else:
|
||||
warn("ssl module does not exist; cannot enable https")
|
||||
al.http_only = True
|
||||
|
||||
if PY2 and WINDOWS and al.e2d:
|
||||
warn(
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
# coding: utf-8
|
||||
|
||||
VERSION = (1, 6, 9)
|
||||
CODENAME = "cors k"
|
||||
BUILD_DT = (2023, 3, 16)
|
||||
VERSION = (1, 7, 3)
|
||||
CODENAME = "unlinked"
|
||||
BUILD_DT = (2023, 6, 11)
|
||||
|
||||
S_VERSION = ".".join(map(str, VERSION))
|
||||
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
||||
|
||||
@@ -67,9 +67,9 @@ class AXS(object):
|
||||
self.upget: set[str] = set(upget or [])
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "AXS({})".format(
|
||||
return "AXS(%s)" % (
|
||||
", ".join(
|
||||
"{}={!r}".format(k, self.__dict__[k])
|
||||
"%s=%r" % (k, self.__dict__[k])
|
||||
for k in "uread uwrite umove udel uget upget".split()
|
||||
)
|
||||
)
|
||||
@@ -285,6 +285,8 @@ class VFS(object):
|
||||
self.vpath = vpath # absolute path in the virtual filesystem
|
||||
self.axs = axs
|
||||
self.flags = flags # config options
|
||||
self.root = self
|
||||
self.dev = 0 # st_dev
|
||||
self.nodes: dict[str, VFS] = {} # child nodes
|
||||
self.histtab: dict[str, str] = {} # all realpath->histpath
|
||||
self.dbv: Optional[VFS] = None # closest full/non-jump parent
|
||||
@@ -297,26 +299,42 @@ class VFS(object):
|
||||
self.apget: dict[str, list[str]] = {}
|
||||
|
||||
if realpath:
|
||||
rp = realpath + ("" if realpath.endswith(os.sep) else os.sep)
|
||||
vp = vpath + ("/" if vpath else "")
|
||||
self.histpath = os.path.join(realpath, ".hist") # db / thumbcache
|
||||
self.all_vols = {vpath: self} # flattened recursive
|
||||
self.all_aps = [(rp, self)]
|
||||
self.all_vps = [(vp, self)]
|
||||
else:
|
||||
self.histpath = ""
|
||||
self.all_vols = {}
|
||||
self.all_aps = []
|
||||
self.all_vps = []
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "VFS({})".format(
|
||||
return "VFS(%s)" % (
|
||||
", ".join(
|
||||
"{}={!r}".format(k, self.__dict__[k])
|
||||
"%s=%r" % (k, self.__dict__[k])
|
||||
for k in "realpath vpath axs flags".split()
|
||||
)
|
||||
)
|
||||
|
||||
def get_all_vols(self, outdict: dict[str, "VFS"]) -> None:
|
||||
def get_all_vols(
|
||||
self,
|
||||
vols: dict[str, "VFS"],
|
||||
aps: list[tuple[str, "VFS"]],
|
||||
vps: list[tuple[str, "VFS"]],
|
||||
) -> None:
|
||||
if self.realpath:
|
||||
outdict[self.vpath] = self
|
||||
vols[self.vpath] = self
|
||||
rp = self.realpath
|
||||
rp += "" if rp.endswith(os.sep) else os.sep
|
||||
vp = self.vpath + ("/" if self.vpath else "")
|
||||
aps.append((rp, self))
|
||||
vps.append((vp, self))
|
||||
|
||||
for v in self.nodes.values():
|
||||
v.get_all_vols(outdict)
|
||||
v.get_all_vols(vols, aps, vps)
|
||||
|
||||
def add(self, src: str, dst: str) -> "VFS":
|
||||
"""get existing, or add new path to the vfs"""
|
||||
@@ -356,7 +374,8 @@ class VFS(object):
|
||||
flags = {k: v for k, v in self.flags.items()}
|
||||
hist = flags.get("hist")
|
||||
if hist and hist != "-":
|
||||
flags["hist"] = "{}/{}".format(hist.rstrip("/"), name)
|
||||
zs = "{}/{}".format(hist.rstrip("/"), name)
|
||||
flags["hist"] = os.path.expanduser(zs) if zs.startswith("~") else zs
|
||||
|
||||
return flags
|
||||
|
||||
@@ -389,7 +408,11 @@ class VFS(object):
|
||||
self, vpath: str, uname: str
|
||||
) -> tuple[bool, bool, bool, bool, bool, bool]:
|
||||
"""can Read,Write,Move,Delete,Get,Upget"""
|
||||
vn, _ = self._find(undot(vpath))
|
||||
if vpath:
|
||||
vn, _ = self._find(undot(vpath))
|
||||
else:
|
||||
vn = self
|
||||
|
||||
c = vn.axs
|
||||
return (
|
||||
uname in c.uread or "*" in c.uread,
|
||||
@@ -544,9 +567,20 @@ class VFS(object):
|
||||
self.log("vfs.walk", t.format(seen[-1], fsroot, self.vpath, rem), 3)
|
||||
return
|
||||
|
||||
if "xdev" in self.flags or "xvol" in self.flags:
|
||||
rm1 = []
|
||||
for le in vfs_ls:
|
||||
ap = absreal(os.path.join(fsroot, le[0]))
|
||||
vn2 = self.chk_ap(ap)
|
||||
if not vn2 or not vn2.get("", uname, True, False):
|
||||
rm1.append(le)
|
||||
_ = [vfs_ls.remove(x) for x in rm1] # type: ignore
|
||||
|
||||
seen = seen[:] + [fsroot]
|
||||
rfiles = [x for x in vfs_ls if not stat.S_ISDIR(x[1].st_mode)]
|
||||
rdirs = [x for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)]
|
||||
# if lstat: ignore folder symlinks since copyparty will never make those
|
||||
# (and we definitely don't want to descend into them)
|
||||
|
||||
rfiles.sort()
|
||||
rdirs.sort()
|
||||
@@ -577,6 +611,7 @@ class VFS(object):
|
||||
|
||||
def zipgen(
|
||||
self,
|
||||
vpath: str,
|
||||
vrem: str,
|
||||
flt: set[str],
|
||||
uname: str,
|
||||
@@ -588,7 +623,7 @@ class VFS(object):
|
||||
|
||||
# if multiselect: add all items to archive root
|
||||
# if single folder: the folder itself is the top-level item
|
||||
folder = "" if flt or not wrap else (vrem.split("/")[-1].lstrip(".") or "top")
|
||||
folder = "" if flt or not wrap else (vpath.split("/")[-1].lstrip(".") or "top")
|
||||
|
||||
g = self.walk(folder, vrem, [], uname, [[True, False]], dots, scandir, False)
|
||||
for _, _, vpath, apath, files, rd, vd in g:
|
||||
@@ -639,6 +674,44 @@ class VFS(object):
|
||||
for d in [{"vp": v, "ap": a, "st": n} for v, a, n in ret2]:
|
||||
yield d
|
||||
|
||||
def chk_ap(self, ap: str, st: Optional[os.stat_result] = None) -> Optional["VFS"]:
|
||||
aps = ap + os.sep
|
||||
if "xdev" in self.flags and not ANYWIN:
|
||||
if not st:
|
||||
ap2 = ap.replace("\\", "/") if ANYWIN else ap
|
||||
while ap2:
|
||||
try:
|
||||
st = bos.stat(ap2)
|
||||
break
|
||||
except:
|
||||
if "/" not in ap2:
|
||||
raise
|
||||
ap2 = ap2.rsplit("/", 1)[0]
|
||||
assert st
|
||||
|
||||
vdev = self.dev
|
||||
if not vdev:
|
||||
vdev = self.dev = bos.stat(self.realpath).st_dev
|
||||
|
||||
if vdev != st.st_dev:
|
||||
if self.log:
|
||||
t = "xdev: {}[{}] => {}[{}]"
|
||||
self.log("vfs", t.format(vdev, self.realpath, st.st_dev, ap), 3)
|
||||
|
||||
return None
|
||||
|
||||
if "xvol" in self.flags:
|
||||
for vap, vn in self.root.all_aps:
|
||||
if aps.startswith(vap):
|
||||
return vn
|
||||
|
||||
if self.log:
|
||||
self.log("vfs", "xvol: [{}]".format(ap), 3)
|
||||
|
||||
return None
|
||||
|
||||
return self
|
||||
|
||||
|
||||
if WINDOWS:
|
||||
re_vol = re.compile(r"^([a-zA-Z]:[\\/][^:]*|[^:]*):([^:]*):(.*)$")
|
||||
@@ -768,6 +841,9 @@ class AuthSrv(object):
|
||||
if not ln.split("#")[0].strip():
|
||||
continue
|
||||
|
||||
if re.match(r"^\[.*\]:$", ln):
|
||||
ln = ln[:-1]
|
||||
|
||||
subsection = ln in (catx, catf)
|
||||
if ln.startswith("[") or subsection:
|
||||
self._e()
|
||||
@@ -857,7 +933,7 @@ class AuthSrv(object):
|
||||
zd = split_cfg_ln(ln)
|
||||
fstr = ""
|
||||
for sk, sv in zd.items():
|
||||
bad = re.sub(r"[a-z0-9_]", "", sk)
|
||||
bad = re.sub(r"[a-z0-9_-]", "", sk).lstrip("-")
|
||||
if bad:
|
||||
err = "bad characters [{}] in volflag name [{}]; "
|
||||
err = err.format(bad, sk)
|
||||
@@ -933,7 +1009,14 @@ class AuthSrv(object):
|
||||
value: Union[str, bool, list[str]],
|
||||
is_list: bool,
|
||||
) -> None:
|
||||
desc = flagdescs.get(name, "?").replace("\n", " ")
|
||||
desc = flagdescs.get(name.lstrip("-"), "?").replace("\n", " ")
|
||||
|
||||
if re.match("^-[^-]+$", name):
|
||||
t = "└─unset volflag [{}] ({})"
|
||||
self._e(t.format(name[1:], desc))
|
||||
flags[name] = True
|
||||
return
|
||||
|
||||
if name not in "mtp xbu xau xiu xbr xar xbd xad xm".split():
|
||||
if value is True:
|
||||
t = "└─add volflag [{}] = {} ({})"
|
||||
@@ -1058,7 +1141,13 @@ class AuthSrv(object):
|
||||
|
||||
assert vfs
|
||||
vfs.all_vols = {}
|
||||
vfs.get_all_vols(vfs.all_vols)
|
||||
vfs.all_aps = []
|
||||
vfs.all_vps = []
|
||||
vfs.get_all_vols(vfs.all_vols, vfs.all_aps, vfs.all_vps)
|
||||
for vol in vfs.all_vols.values():
|
||||
vol.all_aps.sort(key=lambda x: len(x[0]), reverse=True)
|
||||
vol.all_vps.sort(key=lambda x: len(x[0]), reverse=True)
|
||||
vol.root = vfs
|
||||
|
||||
for perm in "read write move del get pget".split():
|
||||
axs_key = "u" + perm
|
||||
@@ -1092,6 +1181,14 @@ class AuthSrv(object):
|
||||
if LEELOO_DALLAS in all_users:
|
||||
raise Exception("sorry, reserved username: " + LEELOO_DALLAS)
|
||||
|
||||
seenpwds = {}
|
||||
for usr, pwd in acct.items():
|
||||
if pwd in seenpwds:
|
||||
t = "accounts [{}] and [{}] have the same password; this is not supported"
|
||||
self.log(t.format(seenpwds[pwd], usr), 1)
|
||||
raise Exception("invalid config")
|
||||
seenpwds[pwd] = usr
|
||||
|
||||
promote = []
|
||||
demote = []
|
||||
for vol in vfs.all_vols.values():
|
||||
@@ -1101,6 +1198,9 @@ class AuthSrv(object):
|
||||
if vflag == "-":
|
||||
pass
|
||||
elif vflag:
|
||||
if vflag.startswith("~"):
|
||||
vflag = os.path.expanduser(vflag)
|
||||
|
||||
vol.histpath = uncyg(vflag) if WINDOWS else vflag
|
||||
elif self.args.hist:
|
||||
for nch in range(len(hid)):
|
||||
@@ -1436,6 +1536,12 @@ class AuthSrv(object):
|
||||
self.log(t, 1)
|
||||
errors = True
|
||||
|
||||
for vol in vfs.all_vols.values():
|
||||
for k in list(vol.flags.keys()):
|
||||
if re.match("^-[^-]+$", k):
|
||||
vol.flags.pop(k[1:], None)
|
||||
vol.flags.pop(k)
|
||||
|
||||
if errors:
|
||||
sys.exit(1)
|
||||
|
||||
@@ -1475,6 +1581,12 @@ class AuthSrv(object):
|
||||
if t:
|
||||
self.log("\n\033[{}\033[0m\n".format(t))
|
||||
|
||||
zv, _ = vfs.get("/", "*", False, False)
|
||||
zs = zv.realpath.lower()
|
||||
if zs in ("/", "c:\\") or zs.startswith(r"c:\windows"):
|
||||
t = "you are sharing a system directory: {}\n"
|
||||
self.log(t.format(zv.realpath), c=1)
|
||||
|
||||
try:
|
||||
zv, _ = vfs.get("/", "*", False, True)
|
||||
if self.warn_anonwrite and os.getcwd() == zv.realpath:
|
||||
|
||||
217
copyparty/cert.py
Normal file
217
copyparty/cert.py
Normal file
@@ -0,0 +1,217 @@
|
||||
import os
|
||||
import errno
|
||||
import time
|
||||
import json
|
||||
import shutil
|
||||
import filecmp
|
||||
import calendar
|
||||
|
||||
from .util import runcmd, Netdev
|
||||
|
||||
|
||||
HAVE_CFSSL = True
|
||||
|
||||
|
||||
def ensure_cert(log: "RootLogger", args) -> None:
|
||||
"""
|
||||
the default cert (and the entire TLS support) is only here to enable the
|
||||
crypto.subtle javascript API, which is necessary due to the webkit guys
|
||||
being massive memers (https://www.chromium.org/blink/webcrypto)
|
||||
|
||||
i feel awful about this and so should they
|
||||
"""
|
||||
cert_insec = os.path.join(args.E.mod, "res/insecure.pem")
|
||||
cert_appdata = os.path.join(args.E.cfg, "cert.pem")
|
||||
if not os.path.isfile(args.cert):
|
||||
if cert_appdata != args.cert:
|
||||
raise Exception("certificate file does not exist: " + args.cert)
|
||||
|
||||
shutil.copy(cert_insec, args.cert)
|
||||
|
||||
with open(args.cert, "rb") as f:
|
||||
buf = f.read()
|
||||
o1 = buf.find(b" PRIVATE KEY-")
|
||||
o2 = buf.find(b" CERTIFICATE-")
|
||||
m = "unsupported certificate format: "
|
||||
if o1 < 0:
|
||||
raise Exception(m + "no private key inside pem")
|
||||
if o2 < 0:
|
||||
raise Exception(m + "no server certificate inside pem")
|
||||
if o1 > o2:
|
||||
raise Exception(m + "private key must appear before server certificate")
|
||||
|
||||
try:
|
||||
if filecmp.cmp(args.cert, cert_insec):
|
||||
t = "using default TLS certificate; https will be insecure:\033[36m {}"
|
||||
log("cert", t.format(args.cert), 3)
|
||||
except:
|
||||
pass
|
||||
|
||||
# speaking of the default cert,
|
||||
# printf 'NO\n.\n.\n.\n.\ncopyparty-insecure\n.\n' | faketime '2000-01-01 00:00:00' openssl req -x509 -sha256 -newkey rsa:2048 -keyout insecure.pem -out insecure.pem -days $((($(printf %d 0x7fffffff)-$(date +%s --date=2000-01-01T00:00:00Z))/(60*60*24))) -nodes && ls -al insecure.pem && openssl x509 -in insecure.pem -text -noout
|
||||
|
||||
|
||||
def _read_crt(args, fn):
|
||||
try:
|
||||
if not os.path.exists(os.path.join(args.crt_dir, fn)):
|
||||
return 0, {}
|
||||
|
||||
acmd = ["cfssl-certinfo", "-cert", fn]
|
||||
rc, so, se = runcmd(acmd, cwd=args.crt_dir)
|
||||
if rc:
|
||||
return 0, {}
|
||||
|
||||
inf = json.loads(so)
|
||||
zs = inf["not_after"]
|
||||
expiry = calendar.timegm(time.strptime(zs, "%Y-%m-%dT%H:%M:%SZ"))
|
||||
return expiry, inf
|
||||
except OSError as ex:
|
||||
if ex.errno == errno.ENOENT:
|
||||
raise
|
||||
return 0, {}
|
||||
except:
|
||||
return 0, {}
|
||||
|
||||
|
||||
def _gen_ca(log: "RootLogger", args):
|
||||
expiry = _read_crt(args, "ca.pem")[0]
|
||||
if time.time() + args.crt_cdays * 60 * 60 * 24 * 0.1 < expiry:
|
||||
return
|
||||
|
||||
backdate = "{}m".format(int(args.crt_back * 60))
|
||||
expiry = "{}m".format(int(args.crt_cdays * 60 * 24))
|
||||
cn = args.crt_cnc.replace("--crt-cn", args.crt_cn)
|
||||
algo, ksz = args.crt_alg.split("-")
|
||||
req = {
|
||||
"CN": cn,
|
||||
"CA": {"backdate": backdate, "expiry": expiry, "pathlen": 0},
|
||||
"key": {"algo": algo, "size": int(ksz)},
|
||||
"names": [{"O": cn}],
|
||||
}
|
||||
sin = json.dumps(req).encode("utf-8")
|
||||
log("cert", "creating new ca ...", 6)
|
||||
|
||||
cmd = "cfssl gencert -initca -"
|
||||
rc, so, se = runcmd(cmd.split(), 30, sin=sin)
|
||||
if rc:
|
||||
raise Exception("failed to create ca-cert: {}, {}".format(rc, se), 3)
|
||||
|
||||
cmd = "cfssljson -bare ca"
|
||||
sin = so.encode("utf-8")
|
||||
rc, so, se = runcmd(cmd.split(), 10, sin=sin, cwd=args.crt_dir)
|
||||
if rc:
|
||||
raise Exception("failed to translate ca-cert: {}, {}".format(rc, se), 3)
|
||||
|
||||
bname = os.path.join(args.crt_dir, "ca")
|
||||
os.rename(bname + "-key.pem", bname + ".key")
|
||||
os.unlink(bname + ".csr")
|
||||
|
||||
log("cert", "new ca OK", 2)
|
||||
|
||||
|
||||
def _gen_srv(log: "RootLogger", args, netdevs: dict[str, Netdev]):
|
||||
names = args.crt_ns.split(",") if args.crt_ns else []
|
||||
if not args.crt_exact:
|
||||
for n in names[:]:
|
||||
names.append("*.{}".format(n))
|
||||
if not args.crt_noip:
|
||||
for ip in netdevs.keys():
|
||||
names.append(ip.split("/")[0])
|
||||
if args.crt_nolo:
|
||||
names = [x for x in names if x not in ("localhost", "127.0.0.1", "::1")]
|
||||
if not names:
|
||||
names = ["127.0.0.1"]
|
||||
if "127.0.0.1" in names or "::1" in names:
|
||||
names.append("localhost")
|
||||
names = list({x: 1 for x in names}.keys())
|
||||
|
||||
try:
|
||||
expiry, inf = _read_crt(args, "srv.pem")
|
||||
expired = time.time() + args.crt_sdays * 60 * 60 * 24 * 0.1 > expiry
|
||||
cert_insec = os.path.join(args.E.mod, "res/insecure.pem")
|
||||
for n in names:
|
||||
if n not in inf["sans"]:
|
||||
raise Exception("does not have {}".format(n))
|
||||
if expired:
|
||||
raise Exception("old server-cert has expired")
|
||||
if not filecmp.cmp(args.cert, cert_insec):
|
||||
return
|
||||
except Exception as ex:
|
||||
log("cert", "will create new server-cert; {}".format(ex))
|
||||
|
||||
log("cert", "creating server-cert ...", 6)
|
||||
|
||||
backdate = "{}m".format(int(args.crt_back * 60))
|
||||
expiry = "{}m".format(int(args.crt_sdays * 60 * 24))
|
||||
cfg = {
|
||||
"signing": {
|
||||
"default": {
|
||||
"backdate": backdate,
|
||||
"expiry": expiry,
|
||||
"usages": ["signing", "key encipherment", "server auth"],
|
||||
}
|
||||
}
|
||||
}
|
||||
with open(os.path.join(args.crt_dir, "cfssl.json"), "wb") as f:
|
||||
f.write(json.dumps(cfg).encode("utf-8"))
|
||||
|
||||
cn = args.crt_cns.replace("--crt-cn", args.crt_cn)
|
||||
algo, ksz = args.crt_alg.split("-")
|
||||
req = {
|
||||
"key": {"algo": algo, "size": int(ksz)},
|
||||
"names": [{"O": cn}],
|
||||
}
|
||||
sin = json.dumps(req).encode("utf-8")
|
||||
|
||||
cmd = "cfssl gencert -config=cfssl.json -ca ca.pem -ca-key ca.key -profile=www"
|
||||
acmd = cmd.split() + ["-hostname=" + ",".join(names), "-"]
|
||||
rc, so, se = runcmd(acmd, 30, sin=sin, cwd=args.crt_dir)
|
||||
if rc:
|
||||
raise Exception("failed to create cert: {}, {}".format(rc, se))
|
||||
|
||||
cmd = "cfssljson -bare srv"
|
||||
sin = so.encode("utf-8")
|
||||
rc, so, se = runcmd(cmd.split(), 10, sin=sin, cwd=args.crt_dir)
|
||||
if rc:
|
||||
raise Exception("failed to translate cert: {}, {}".format(rc, se))
|
||||
|
||||
bname = os.path.join(args.crt_dir, "srv")
|
||||
os.rename(bname + "-key.pem", bname + ".key")
|
||||
os.unlink(bname + ".csr")
|
||||
|
||||
with open(os.path.join(args.crt_dir, "ca.pem"), "rb") as f:
|
||||
ca = f.read()
|
||||
|
||||
with open(bname + ".key", "rb") as f:
|
||||
skey = f.read()
|
||||
|
||||
with open(bname + ".pem", "rb") as f:
|
||||
scrt = f.read()
|
||||
|
||||
with open(args.cert, "wb") as f:
|
||||
f.write(skey + scrt + ca)
|
||||
|
||||
log("cert", "new server-cert OK", 2)
|
||||
|
||||
|
||||
def gencert(log: "RootLogger", args, netdevs: dict[str, Netdev]):
|
||||
global HAVE_CFSSL
|
||||
|
||||
if args.http_only:
|
||||
return
|
||||
|
||||
if args.no_crt or not HAVE_CFSSL:
|
||||
ensure_cert(log, args)
|
||||
return
|
||||
|
||||
try:
|
||||
_gen_ca(log, args)
|
||||
_gen_srv(log, args, netdevs)
|
||||
except Exception as ex:
|
||||
HAVE_CFSSL = False
|
||||
log("cert", "could not create TLS certificates: {}".format(ex), 3)
|
||||
if getattr(ex, "errno", 0) == errno.ENOENT:
|
||||
t = "install cfssl if you want to fix this; https://github.com/cloudflare/cfssl/releases/latest"
|
||||
log("cert", t, 6)
|
||||
|
||||
ensure_cert(log, args)
|
||||
@@ -13,6 +13,8 @@ def vf_bmap() -> dict[str, str]:
|
||||
"no_dedup": "copydupes",
|
||||
"no_dupe": "nodupe",
|
||||
"no_forget": "noforget",
|
||||
"dav_auth": "davauth",
|
||||
"dav_rt": "davrt",
|
||||
}
|
||||
for k in (
|
||||
"dotsrch",
|
||||
@@ -22,6 +24,7 @@ def vf_bmap() -> dict[str, str]:
|
||||
"e2v",
|
||||
"e2vu",
|
||||
"e2vp",
|
||||
"grid",
|
||||
"hardlink",
|
||||
"magic",
|
||||
"no_sb_md",
|
||||
@@ -38,7 +41,7 @@ def vf_bmap() -> dict[str, str]:
|
||||
def vf_vmap() -> dict[str, str]:
|
||||
"""argv-to-volflag: simple values"""
|
||||
ret = {}
|
||||
for k in ("lg_sbf", "md_sbf"):
|
||||
for k in ("lg_sbf", "md_sbf", "unlist"):
|
||||
ret[k] = k
|
||||
return ret
|
||||
|
||||
@@ -106,7 +109,7 @@ flagcats = {
|
||||
"dbd=[acid|swal|wal|yolo]": "database speed-durability tradeoff",
|
||||
"xlink": "cross-volume dupe detection / linking",
|
||||
"xdev": "do not descend into other filesystems",
|
||||
"xvol": "skip symlinks leaving the volume root",
|
||||
"xvol": "do not follow symlinks leaving the volume root",
|
||||
"dotsrch": "show dotfiles in search results",
|
||||
"nodotsrch": "hide dotfiles in search results (default)",
|
||||
},
|
||||
@@ -131,6 +134,8 @@ flagcats = {
|
||||
"xm=CMD": "execute CMD on message",
|
||||
},
|
||||
"client and ux": {
|
||||
"grid": "show grid/thumbnails by default",
|
||||
"unlist": "dont list files matching REGEX",
|
||||
"html_head=TXT": "includes TXT in the <head>",
|
||||
"robots": "allows indexing by search engines (default)",
|
||||
"norobots": "kindly asks search engines to leave",
|
||||
@@ -142,7 +147,9 @@ flagcats = {
|
||||
"lg_sbf": "list of *logue-sandbox safeguards to disable",
|
||||
},
|
||||
"others": {
|
||||
"fk=8": 'generates per-file accesskeys,\nwhich will then be required at the "g" permission'
|
||||
"fk=8": 'generates per-file accesskeys,\nwhich will then be required at the "g" permission',
|
||||
"davauth": "ask webdav clients to login for all folders",
|
||||
"davrt": "show lastmod time of symlink destination, not the link itself\n(note: this option is always enabled for recursive listings)",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import argparse
|
||||
import errno
|
||||
import logging
|
||||
import os
|
||||
import stat
|
||||
@@ -14,8 +15,8 @@ from pyftpdlib.handlers import FTPHandler
|
||||
from pyftpdlib.servers import FTPServer
|
||||
|
||||
from .__init__ import ANYWIN, PY2, TYPE_CHECKING, E
|
||||
from .bos import bos
|
||||
from .authsrv import VFS
|
||||
from .bos import bos
|
||||
from .util import (
|
||||
Daemon,
|
||||
Pebkac,
|
||||
@@ -46,6 +47,12 @@ if True: # pylint: disable=using-constant-test
|
||||
from typing import Any, Optional
|
||||
|
||||
|
||||
class FSE(FilesystemError):
|
||||
def __init__(self, msg: str, severity: int = 0) -> None:
|
||||
super(FilesystemError, self).__init__(msg)
|
||||
self.severity = severity
|
||||
|
||||
|
||||
class FtpAuth(DummyAuthorizer):
|
||||
def __init__(self, hub: "SvcHub") -> None:
|
||||
super(FtpAuth, self).__init__()
|
||||
@@ -55,6 +62,7 @@ class FtpAuth(DummyAuthorizer):
|
||||
self, username: str, password: str, handler: Any
|
||||
) -> None:
|
||||
handler.username = "{}:{}".format(username, password)
|
||||
handler.uname = "*"
|
||||
|
||||
ip = handler.addr[0]
|
||||
if ip.startswith("::ffff:"):
|
||||
@@ -86,14 +94,14 @@ class FtpAuth(DummyAuthorizer):
|
||||
|
||||
raise AuthenticationFailed("Authentication failed.")
|
||||
|
||||
handler.username = uname
|
||||
handler.uname = handler.username = uname
|
||||
|
||||
def get_home_dir(self, username: str) -> str:
|
||||
return "/"
|
||||
|
||||
def has_user(self, username: str) -> bool:
|
||||
asrv = self.hub.asrv
|
||||
return username in asrv.acct
|
||||
return username in asrv.acct or username in asrv.iacct
|
||||
|
||||
def has_perm(self, username: str, perm: int, path: Optional[str] = None) -> bool:
|
||||
return True # handled at filesystem layer
|
||||
@@ -112,11 +120,11 @@ class FtpFs(AbstractedFS):
|
||||
def __init__(
|
||||
self, root: str, cmd_channel: Any
|
||||
) -> None: # pylint: disable=super-init-not-called
|
||||
self.h = self.cmd_channel = cmd_channel # type: FTPHandler
|
||||
self.h = cmd_channel # type: FTPHandler
|
||||
self.cmd_channel = cmd_channel # type: FTPHandler
|
||||
self.hub: "SvcHub" = cmd_channel.hub
|
||||
self.args = cmd_channel.args
|
||||
|
||||
self.uname = self.hub.asrv.iacct.get(cmd_channel.password, "*")
|
||||
self.uname = cmd_channel.uname
|
||||
|
||||
self.cwd = "/" # pyftpdlib convention of leading slash
|
||||
self.root = "/var/lib/empty"
|
||||
@@ -127,10 +135,6 @@ class FtpFs(AbstractedFS):
|
||||
self.listdirinfo = self.listdir
|
||||
self.chdir(".")
|
||||
|
||||
def die(self, msg):
|
||||
self.h.die(msg)
|
||||
raise Exception()
|
||||
|
||||
def v2a(
|
||||
self,
|
||||
vpath: str,
|
||||
@@ -140,21 +144,34 @@ class FtpFs(AbstractedFS):
|
||||
d: bool = False,
|
||||
) -> tuple[str, VFS, str]:
|
||||
try:
|
||||
vpath = vpath.replace("\\", "/").lstrip("/")
|
||||
vpath = vpath.replace("\\", "/").strip("/")
|
||||
rd, fn = os.path.split(vpath)
|
||||
if ANYWIN and relchk(rd):
|
||||
logging.warning("malicious vpath: %s", vpath)
|
||||
self.die("Unsupported characters in filepath")
|
||||
t = "Unsupported characters in [{}]"
|
||||
raise FSE(t.format(vpath), 1)
|
||||
|
||||
fn = sanitize_fn(fn or "", "", [".prologue.html", ".epilogue.html"])
|
||||
vpath = vjoin(rd, fn)
|
||||
vfs, rem = self.hub.asrv.vfs.get(vpath, self.uname, r, w, m, d)
|
||||
if not vfs.realpath:
|
||||
self.die("No filesystem mounted at this path")
|
||||
t = "No filesystem mounted at [{}]"
|
||||
raise FSE(t.format(vpath))
|
||||
|
||||
if "xdev" in vfs.flags or "xvol" in vfs.flags:
|
||||
ap = vfs.canonical(rem)
|
||||
avfs = vfs.chk_ap(ap)
|
||||
t = "Permission denied in [{}]"
|
||||
if not avfs:
|
||||
raise FSE(t.format(vpath), 1)
|
||||
|
||||
cr, cw, cm, cd, _, _ = avfs.can_access("", self.h.uname)
|
||||
if r and not cr or w and not cw or m and not cm or d and not cd:
|
||||
raise FSE(t.format(vpath), 1)
|
||||
|
||||
return os.path.join(vfs.realpath, rem), vfs, rem
|
||||
except Pebkac as ex:
|
||||
self.die(str(ex))
|
||||
raise FSE(str(ex))
|
||||
|
||||
def rv2a(
|
||||
self,
|
||||
@@ -177,7 +194,7 @@ class FtpFs(AbstractedFS):
|
||||
def validpath(self, path: str) -> bool:
|
||||
if "/.hist/" in path:
|
||||
if "/up2k." in path or path.endswith("/dir.txt"):
|
||||
self.die("Access to this file is forbidden")
|
||||
raise FSE("Access to this file is forbidden", 1)
|
||||
|
||||
return True
|
||||
|
||||
@@ -194,7 +211,7 @@ class FtpFs(AbstractedFS):
|
||||
td = 0
|
||||
|
||||
if td < -1 or td > self.args.ftp_wt:
|
||||
self.die("Cannot open existing file for writing")
|
||||
raise FSE("Cannot open existing file for writing")
|
||||
|
||||
self.validpath(ap)
|
||||
return open(fsenc(ap), mode)
|
||||
@@ -203,9 +220,17 @@ class FtpFs(AbstractedFS):
|
||||
nwd = join(self.cwd, path)
|
||||
vfs, rem = self.hub.asrv.vfs.get(nwd, self.uname, False, False)
|
||||
ap = vfs.canonical(rem)
|
||||
if not bos.path.isdir(ap):
|
||||
try:
|
||||
st = bos.stat(ap)
|
||||
if not stat.S_ISDIR(st.st_mode):
|
||||
raise Exception()
|
||||
except:
|
||||
# returning 550 is library-default and suitable
|
||||
self.die("Failed to change directory")
|
||||
raise FSE("No such file or directory")
|
||||
|
||||
avfs = vfs.chk_ap(ap, st)
|
||||
if not avfs:
|
||||
raise FSE("Permission denied", 1)
|
||||
|
||||
self.cwd = nwd
|
||||
(
|
||||
@@ -215,16 +240,18 @@ class FtpFs(AbstractedFS):
|
||||
self.can_delete,
|
||||
self.can_get,
|
||||
self.can_upget,
|
||||
) = self.hub.asrv.vfs.can_access(self.cwd.lstrip("/"), self.h.username)
|
||||
) = avfs.can_access("", self.h.uname)
|
||||
|
||||
def mkdir(self, path: str) -> None:
|
||||
ap = self.rv2a(path, w=True)[0]
|
||||
bos.makedirs(ap) # filezilla expects this
|
||||
|
||||
def listdir(self, path: str) -> list[str]:
|
||||
vpath = join(self.cwd, path).lstrip("/")
|
||||
vpath = join(self.cwd, path)
|
||||
try:
|
||||
vfs, rem = self.hub.asrv.vfs.get(vpath, self.uname, True, False)
|
||||
ap, vfs, rem = self.v2a(vpath, True, False)
|
||||
if not bos.path.isdir(ap):
|
||||
raise FSE("No such file or directory", 1)
|
||||
|
||||
fsroot, vfs_ls1, vfs_virt = vfs.ls(
|
||||
rem,
|
||||
@@ -240,8 +267,12 @@ class FtpFs(AbstractedFS):
|
||||
|
||||
vfs_ls.sort()
|
||||
return vfs_ls
|
||||
except:
|
||||
if vpath:
|
||||
except Exception as ex:
|
||||
# panic on malicious names
|
||||
if getattr(ex, "severity", 0):
|
||||
raise
|
||||
|
||||
if vpath.strip("/"):
|
||||
# display write-only folders as empty
|
||||
return []
|
||||
|
||||
@@ -251,31 +282,35 @@ class FtpFs(AbstractedFS):
|
||||
|
||||
def rmdir(self, path: str) -> None:
|
||||
ap = self.rv2a(path, d=True)[0]
|
||||
bos.rmdir(ap)
|
||||
try:
|
||||
bos.rmdir(ap)
|
||||
except OSError as e:
|
||||
if e.errno != errno.ENOENT:
|
||||
raise
|
||||
|
||||
def remove(self, path: str) -> None:
|
||||
if self.args.no_del:
|
||||
self.die("The delete feature is disabled in server config")
|
||||
raise FSE("The delete feature is disabled in server config")
|
||||
|
||||
vp = join(self.cwd, path).lstrip("/")
|
||||
try:
|
||||
self.hub.up2k.handle_rm(self.uname, self.h.cli_ip, [vp], [])
|
||||
self.hub.up2k.handle_rm(self.uname, self.h.cli_ip, [vp], [], False)
|
||||
except Exception as ex:
|
||||
self.die(str(ex))
|
||||
raise FSE(str(ex))
|
||||
|
||||
def rename(self, src: str, dst: str) -> None:
|
||||
if not self.can_move:
|
||||
self.die("Not allowed for user " + self.h.username)
|
||||
raise FSE("Not allowed for user " + self.h.uname)
|
||||
|
||||
if self.args.no_mv:
|
||||
self.die("The rename/move feature is disabled in server config")
|
||||
raise FSE("The rename/move feature is disabled in server config")
|
||||
|
||||
svp = join(self.cwd, src).lstrip("/")
|
||||
dvp = join(self.cwd, dst).lstrip("/")
|
||||
try:
|
||||
self.hub.up2k.handle_mv(self.uname, svp, dvp)
|
||||
except Exception as ex:
|
||||
self.die(str(ex))
|
||||
raise FSE(str(ex))
|
||||
|
||||
def chmod(self, path: str, mode: str) -> None:
|
||||
pass
|
||||
@@ -284,7 +319,10 @@ class FtpFs(AbstractedFS):
|
||||
try:
|
||||
ap = self.rv2a(path, r=True)[0]
|
||||
return bos.stat(ap)
|
||||
except:
|
||||
except FSE as ex:
|
||||
if ex.severity:
|
||||
raise
|
||||
|
||||
ap = self.rv2a(path)[0]
|
||||
st = bos.stat(ap)
|
||||
if not stat.S_ISDIR(st.st_mode):
|
||||
@@ -304,7 +342,10 @@ class FtpFs(AbstractedFS):
|
||||
try:
|
||||
st = self.stat(path)
|
||||
return stat.S_ISREG(st.st_mode)
|
||||
except:
|
||||
except Exception as ex:
|
||||
if getattr(ex, "severity", 0):
|
||||
raise
|
||||
|
||||
return False # expected for mojibake in ftp_SIZE()
|
||||
|
||||
def islink(self, path: str) -> bool:
|
||||
@@ -315,7 +356,10 @@ class FtpFs(AbstractedFS):
|
||||
try:
|
||||
st = self.stat(path)
|
||||
return stat.S_ISDIR(st.st_mode)
|
||||
except:
|
||||
except Exception as ex:
|
||||
if getattr(ex, "severity", 0):
|
||||
raise
|
||||
|
||||
return True
|
||||
|
||||
def getsize(self, path: str) -> int:
|
||||
@@ -344,10 +388,12 @@ class FtpHandler(FTPHandler):
|
||||
abstracted_fs = FtpFs
|
||||
hub: "SvcHub"
|
||||
args: argparse.Namespace
|
||||
uname: str
|
||||
|
||||
def __init__(self, conn: Any, server: Any, ioloop: Any = None) -> None:
|
||||
self.hub: "SvcHub" = FtpHandler.hub
|
||||
self.args: argparse.Namespace = FtpHandler.args
|
||||
self.uname = "*"
|
||||
|
||||
if PY2:
|
||||
FTPHandler.__init__(self, conn, server, ioloop)
|
||||
@@ -363,14 +409,10 @@ class FtpHandler(FTPHandler):
|
||||
# reduce non-debug logging
|
||||
self.log_cmds_list = [x for x in self.log_cmds_list if x not in ("CWD", "XCWD")]
|
||||
|
||||
def die(self, msg):
|
||||
self.respond("550 {}".format(msg))
|
||||
raise FilesystemError(msg)
|
||||
|
||||
def ftp_STOR(self, file: str, mode: str = "w") -> Any:
|
||||
# Optional[str]
|
||||
vp = join(self.fs.cwd, file).lstrip("/")
|
||||
ap, vfs, rem = self.fs.v2a(vp)
|
||||
ap, vfs, rem = self.fs.v2a(vp, w=True)
|
||||
self.vfs_map[ap] = vp
|
||||
xbu = vfs.flags.get("xbu")
|
||||
if xbu and not runhook(
|
||||
@@ -379,14 +421,14 @@ class FtpHandler(FTPHandler):
|
||||
ap,
|
||||
vfs.canonical(rem),
|
||||
"",
|
||||
self.username,
|
||||
self.uname,
|
||||
0,
|
||||
0,
|
||||
self.cli_ip,
|
||||
0,
|
||||
"",
|
||||
):
|
||||
self.die("Upload blocked by xbu server config")
|
||||
raise FSE("Upload blocked by xbu server config")
|
||||
|
||||
# print("ftp_STOR: {} {} => {}".format(vp, mode, ap))
|
||||
ret = FTPHandler.ftp_STOR(self, file, mode)
|
||||
@@ -408,7 +450,7 @@ class FtpHandler(FTPHandler):
|
||||
# print("xfer_end: {} => {}".format(ap, vp))
|
||||
if vp:
|
||||
vp, fn = os.path.split(vp)
|
||||
vfs, rem = self.hub.asrv.vfs.get(vp, self.username, False, True)
|
||||
vfs, rem = self.hub.asrv.vfs.get(vp, self.uname, False, True)
|
||||
vfs, rem = vfs.get_dbv(rem)
|
||||
self.hub.up2k.hash_file(
|
||||
vfs.realpath,
|
||||
@@ -418,7 +460,7 @@ class FtpHandler(FTPHandler):
|
||||
fn,
|
||||
self.cli_ip,
|
||||
time.time(),
|
||||
self.username,
|
||||
self.uname,
|
||||
)
|
||||
|
||||
return FTPHandler.log_transfer(
|
||||
@@ -452,7 +494,7 @@ class Ftpd(object):
|
||||
print(t.format(pybin))
|
||||
sys.exit(1)
|
||||
|
||||
h1.certfile = os.path.join(self.args.E.cfg, "cert.pem")
|
||||
h1.certfile = self.args.cert
|
||||
h1.tls_control_required = True
|
||||
h1.tls_data_required = True
|
||||
|
||||
@@ -460,9 +502,9 @@ class Ftpd(object):
|
||||
|
||||
for h_lp in hs:
|
||||
h2, lp = h_lp
|
||||
h2.hub = hub
|
||||
h2.args = hub.args
|
||||
h2.authorizer = FtpAuth(hub)
|
||||
FtpHandler.hub = h2.hub = hub
|
||||
FtpHandler.args = h2.args = hub.args
|
||||
FtpHandler.authorizer = h2.authorizer = FtpAuth(hub)
|
||||
|
||||
if self.args.ftp_pr:
|
||||
p1, p2 = [int(x) for x in self.args.ftp_pr.split("-")]
|
||||
@@ -486,6 +528,9 @@ class Ftpd(object):
|
||||
if "::" in ips:
|
||||
ips.append("0.0.0.0")
|
||||
|
||||
if self.args.ftp4:
|
||||
ips = [x for x in ips if ":" not in x]
|
||||
|
||||
ioloop = IOLoop()
|
||||
for ip in ips:
|
||||
for h, lp in hs:
|
||||
|
||||
@@ -135,6 +135,7 @@ class HttpCli(object):
|
||||
self.ouparam: dict[str, str] = {}
|
||||
self.uparam: dict[str, str] = {}
|
||||
self.cookies: dict[str, str] = {}
|
||||
self.avn: Optional[VFS] = None
|
||||
self.vpath = " "
|
||||
self.uname = " "
|
||||
self.pw = " "
|
||||
@@ -219,7 +220,7 @@ class HttpCli(object):
|
||||
|
||||
try:
|
||||
self.s.settimeout(2)
|
||||
headerlines = read_header(self.sr)
|
||||
headerlines = read_header(self.sr, self.args.s_thead, self.args.s_thead)
|
||||
self.in_hdr_recv = False
|
||||
if not headerlines:
|
||||
return False
|
||||
@@ -264,9 +265,10 @@ class HttpCli(object):
|
||||
self.is_https = (
|
||||
self.headers.get("x-forwarded-proto", "").lower() == "https" or self.tls
|
||||
)
|
||||
self.host = self.headers.get("host") or "{}:{}".format(
|
||||
*list(self.s.getsockname()[:2])
|
||||
)
|
||||
self.host = self.headers.get("host") or ""
|
||||
if not self.host:
|
||||
zs = "%s:%s" % self.s.getsockname()[:2]
|
||||
self.host = zs[7:] if zs.startswith("::ffff:") else zs
|
||||
|
||||
n = self.args.rproxy
|
||||
if n:
|
||||
@@ -329,7 +331,7 @@ class HttpCli(object):
|
||||
for k in arglist.split("&"):
|
||||
if "=" in k:
|
||||
k, zs = k.split("=", 1)
|
||||
uparam[k.lower()] = zs.strip()
|
||||
uparam[k.lower()] = unquotep(zs.strip().replace("+", " "))
|
||||
else:
|
||||
uparam[k.lower()] = ""
|
||||
|
||||
@@ -402,10 +404,21 @@ class HttpCli(object):
|
||||
self.get_pwd_cookie(self.pw)
|
||||
|
||||
if self.is_rclone:
|
||||
# dots: always include dotfiles if permitted
|
||||
# lt: probably more important showing the correct timestamps of any dupes it just uploaded rather than the lastmod time of any non-copyparty-managed symlinks
|
||||
# b: basic-browser if it tries to parse the html listing
|
||||
uparam["dots"] = ""
|
||||
uparam["lt"] = ""
|
||||
uparam["b"] = ""
|
||||
cookies["b"] = ""
|
||||
|
||||
vn, rem = self.asrv.vfs.get(self.vpath, self.uname, False, False)
|
||||
if "xdev" in vn.flags or "xvol" in vn.flags:
|
||||
ap = vn.canonical(rem)
|
||||
avn = vn.chk_ap(ap)
|
||||
else:
|
||||
avn = vn
|
||||
|
||||
(
|
||||
self.can_read,
|
||||
self.can_write,
|
||||
@@ -413,7 +426,12 @@ class HttpCli(object):
|
||||
self.can_delete,
|
||||
self.can_get,
|
||||
self.can_upget,
|
||||
) = self.asrv.vfs.can_access(self.vpath, self.uname)
|
||||
) = (
|
||||
avn.can_access("", self.uname) if avn else [False] * 6
|
||||
)
|
||||
self.avn = avn
|
||||
|
||||
self.s.settimeout(self.args.s_tbody or None)
|
||||
|
||||
try:
|
||||
cors_k = self._cors()
|
||||
@@ -529,7 +547,7 @@ class HttpCli(object):
|
||||
mime: Optional[str] = None,
|
||||
headers: Optional[dict[str, str]] = None,
|
||||
) -> None:
|
||||
response = ["{} {} {}".format(self.http_ver, status, HTTPCODE[status])]
|
||||
response = ["%s %s %s" % (self.http_ver, status, HTTPCODE[status])]
|
||||
|
||||
if length is not None:
|
||||
response.append("Content-Length: " + unicode(length))
|
||||
@@ -553,11 +571,10 @@ class HttpCli(object):
|
||||
self.out_headers["Content-Type"] = mime
|
||||
|
||||
for k, zs in list(self.out_headers.items()) + self.out_headerlist:
|
||||
response.append("{}: {}".format(k, zs))
|
||||
response.append("%s: %s" % (k, zs))
|
||||
|
||||
try:
|
||||
# best practice to separate headers and body into different packets
|
||||
self.s.settimeout(None)
|
||||
self.s.sendall("\r\n".join(response).encode("utf-8") + b"\r\n\r\n")
|
||||
except:
|
||||
raise Pebkac(400, "client d/c while replying headers")
|
||||
@@ -620,7 +637,7 @@ class HttpCli(object):
|
||||
if not kv:
|
||||
return ""
|
||||
|
||||
r = ["{}={}".format(k, quotep(zs)) if zs else k for k, zs in kv.items()]
|
||||
r = ["%s=%s" % (k, quotep(zs)) if zs else k for k, zs in kv.items()]
|
||||
return "?" + "&".join(r)
|
||||
|
||||
def redirect(
|
||||
@@ -709,7 +726,7 @@ class HttpCli(object):
|
||||
|
||||
def handle_get(self) -> bool:
|
||||
if self.do_log:
|
||||
logmsg = "{:4} {}".format(self.mode, self.req)
|
||||
logmsg = "%-4s %s @%s" % (self.mode, self.req, self.uname)
|
||||
|
||||
if "range" in self.headers:
|
||||
try:
|
||||
@@ -808,17 +825,20 @@ class HttpCli(object):
|
||||
|
||||
def handle_propfind(self) -> bool:
|
||||
if self.do_log:
|
||||
self.log("PFIND " + self.req)
|
||||
self.log("PFIND %s @%s" % (self.req, self.uname))
|
||||
|
||||
if self.args.no_dav:
|
||||
raise Pebkac(405, "WebDAV is disabled in server config")
|
||||
|
||||
if not self.can_read and not self.can_write and not self.can_get:
|
||||
if self.vpath:
|
||||
self.log("inaccessible: [{}]".format(self.vpath))
|
||||
raise Pebkac(401, "authenticate")
|
||||
vn, rem = self.asrv.vfs.get(self.vpath, self.uname, False, False, err=401)
|
||||
tap = vn.canonical(rem)
|
||||
|
||||
self.uparam["h"] = ""
|
||||
if "davauth" in vn.flags and self.uname == "*":
|
||||
self.can_read = self.can_write = self.can_get = False
|
||||
|
||||
if not self.can_read and not self.can_write and not self.can_get:
|
||||
self.log("inaccessible: [{}]".format(self.vpath))
|
||||
raise Pebkac(401, "authenticate")
|
||||
|
||||
from .dxml import parse_xml
|
||||
|
||||
@@ -862,17 +882,30 @@ class HttpCli(object):
|
||||
]
|
||||
|
||||
props = set(props_lst)
|
||||
vn, rem = self.asrv.vfs.get(self.vpath, self.uname, True, False, err=401)
|
||||
depth = self.headers.get("depth", "infinity").lower()
|
||||
|
||||
if depth == "infinity":
|
||||
try:
|
||||
topdir = {"vp": "", "st": bos.stat(tap)}
|
||||
except OSError as ex:
|
||||
if ex.errno not in (errno.ENOENT, errno.ENOTDIR):
|
||||
raise
|
||||
raise Pebkac(404)
|
||||
|
||||
if depth == "0" or not self.can_read or not stat.S_ISDIR(topdir["st"].st_mode):
|
||||
fgen = []
|
||||
|
||||
elif depth == "infinity":
|
||||
if not self.args.dav_inf:
|
||||
self.log("client wants --dav-inf", 3)
|
||||
zb = b'<?xml version="1.0" encoding="utf-8"?>\n<D:error xmlns:D="DAV:"><D:propfind-finite-depth/></D:error>'
|
||||
self.reply(zb, 403, "application/xml; charset=utf-8")
|
||||
return True
|
||||
|
||||
# this will return symlink-target timestamps
|
||||
# because lstat=true would not recurse into subfolders
|
||||
# and this is a rare case where we actually want that
|
||||
fgen = vn.zipgen(
|
||||
rem,
|
||||
rem,
|
||||
set(),
|
||||
self.uname,
|
||||
@@ -884,7 +917,11 @@ class HttpCli(object):
|
||||
|
||||
elif depth == "1":
|
||||
_, vfs_ls, vfs_virt = vn.ls(
|
||||
rem, self.uname, not self.args.no_scandir, [[True, False]]
|
||||
rem,
|
||||
self.uname,
|
||||
not self.args.no_scandir,
|
||||
[[True, False]],
|
||||
lstat="davrt" not in vn.flags,
|
||||
)
|
||||
if not self.args.ed:
|
||||
names = set(exclude_dotfiles([x[0] for x in vfs_ls]))
|
||||
@@ -896,21 +933,11 @@ class HttpCli(object):
|
||||
ls += [{"vp": v, "st": zsr} for v in vfs_virt]
|
||||
fgen = ls # type: ignore
|
||||
|
||||
elif depth == "0":
|
||||
fgen = [] # type: ignore
|
||||
|
||||
else:
|
||||
t = "invalid depth value '{}' (must be either '0' or '1'{})"
|
||||
t2 = " or 'infinity'" if self.args.dav_inf else ""
|
||||
raise Pebkac(412, t.format(depth, t2))
|
||||
|
||||
try:
|
||||
topdir = {"vp": "", "st": os.stat(vn.canonical(rem))}
|
||||
except OSError as ex:
|
||||
if ex.errno != errno.ENOENT:
|
||||
raise
|
||||
raise Pebkac(404)
|
||||
|
||||
fgen = itertools.chain([topdir], fgen) # type: ignore
|
||||
vtop = vjoin(self.args.R, vjoin(vn.vpath, rem))
|
||||
|
||||
@@ -925,14 +952,23 @@ class HttpCli(object):
|
||||
for x in fgen:
|
||||
rp = vjoin(vtop, x["vp"])
|
||||
st: os.stat_result = x["st"]
|
||||
mtime = st.st_mtime
|
||||
if stat.S_ISLNK(st.st_mode):
|
||||
try:
|
||||
st = bos.stat(os.path.join(tap, x["vp"]))
|
||||
except:
|
||||
continue
|
||||
|
||||
isdir = stat.S_ISDIR(st.st_mode)
|
||||
|
||||
t = "<D:response><D:href>/{}{}</D:href><D:propstat><D:prop>"
|
||||
ret += t.format(quotep(rp), "/" if isdir and rp else "")
|
||||
ret += "<D:response><D:href>/%s%s</D:href><D:propstat><D:prop>" % (
|
||||
quotep(rp),
|
||||
"/" if isdir and rp else "",
|
||||
)
|
||||
|
||||
pvs: dict[str, str] = {
|
||||
"displayname": html_escape(rp.split("/")[-1]),
|
||||
"getlastmodified": formatdate(st.st_mtime, usegmt=True),
|
||||
"getlastmodified": formatdate(mtime, usegmt=True),
|
||||
"resourcetype": '<D:collection xmlns:D="DAV:"/>' if isdir else "",
|
||||
"supportedlock": '<D:lockentry xmlns:D="DAV:"><D:lockscope><D:exclusive/></D:lockscope><D:locktype><D:write/></D:locktype></D:lockentry>',
|
||||
}
|
||||
@@ -944,13 +980,13 @@ class HttpCli(object):
|
||||
if k not in props:
|
||||
continue
|
||||
elif v:
|
||||
ret += "<D:{0}>{1}</D:{0}>".format(k, v)
|
||||
ret += "<D:%s>%s</D:%s>" % (k, v, k)
|
||||
else:
|
||||
ret += "<D:{}/>".format(k)
|
||||
ret += "<D:%s/>" % (k,)
|
||||
|
||||
ret += "</D:prop><D:status>HTTP/1.1 200 OK</D:status></D:propstat>"
|
||||
|
||||
missing = ["<D:{}/>".format(x) for x in props if x not in pvs]
|
||||
missing = ["<D:%s/>" % (x,) for x in props if x not in pvs]
|
||||
if missing and clen:
|
||||
t = "<D:propstat><D:prop>{}</D:prop><D:status>HTTP/1.1 404 Not Found</D:status></D:propstat>"
|
||||
ret += t.format("".join(missing))
|
||||
@@ -969,7 +1005,7 @@ class HttpCli(object):
|
||||
|
||||
def handle_proppatch(self) -> bool:
|
||||
if self.do_log:
|
||||
self.log("PPATCH " + self.req)
|
||||
self.log("PPATCH %s @%s" % (self.req, self.uname))
|
||||
|
||||
if self.args.no_dav:
|
||||
raise Pebkac(405, "WebDAV is disabled in server config")
|
||||
@@ -1027,7 +1063,7 @@ class HttpCli(object):
|
||||
|
||||
def handle_lock(self) -> bool:
|
||||
if self.do_log:
|
||||
self.log("LOCK " + self.req)
|
||||
self.log("LOCK %s @%s" % (self.req, self.uname))
|
||||
|
||||
if self.args.no_dav:
|
||||
raise Pebkac(405, "WebDAV is disabled in server config")
|
||||
@@ -1093,7 +1129,7 @@ class HttpCli(object):
|
||||
|
||||
def handle_unlock(self) -> bool:
|
||||
if self.do_log:
|
||||
self.log("UNLOCK " + self.req)
|
||||
self.log("UNLOCK %s @%s" % (self.req, self.uname))
|
||||
|
||||
if self.args.no_dav:
|
||||
raise Pebkac(405, "WebDAV is disabled in server config")
|
||||
@@ -1110,26 +1146,24 @@ class HttpCli(object):
|
||||
return True
|
||||
|
||||
if self.do_log:
|
||||
self.log("MKCOL " + self.req)
|
||||
self.log("MKCOL %s @%s" % (self.req, self.uname))
|
||||
|
||||
return self._mkdir(self.vpath)
|
||||
try:
|
||||
return self._mkdir(self.vpath, True)
|
||||
except Pebkac as ex:
|
||||
if ex.code >= 500:
|
||||
raise
|
||||
|
||||
self.reply(b"", ex.code)
|
||||
return True
|
||||
|
||||
def handle_move(self) -> bool:
|
||||
dst = self.headers["destination"]
|
||||
dst = re.sub("^https?://[^/]+", "", dst).lstrip()
|
||||
dst = unquotep(dst)
|
||||
if not self._mv(self.vpath, dst):
|
||||
if not self._mv(self.vpath, dst.lstrip("/")):
|
||||
return False
|
||||
|
||||
# up2k only cares about files and removes all empty folders;
|
||||
# clients naturally expect empty folders to survive a rename
|
||||
vn, rem = self.asrv.vfs.get(dst, self.uname, False, False)
|
||||
dabs = vn.canonical(rem)
|
||||
try:
|
||||
bos.makedirs(dabs)
|
||||
except:
|
||||
pass
|
||||
|
||||
return True
|
||||
|
||||
def _applesan(self) -> bool:
|
||||
@@ -1164,7 +1198,7 @@ class HttpCli(object):
|
||||
|
||||
def handle_options(self) -> bool:
|
||||
if self.do_log:
|
||||
self.log("OPTIONS " + self.req)
|
||||
self.log("OPTIONS %s @%s" % (self.req, self.uname))
|
||||
|
||||
oh = self.out_headers
|
||||
oh["Allow"] = ", ".join(self.conn.hsrv.mallow)
|
||||
@@ -1179,11 +1213,11 @@ class HttpCli(object):
|
||||
return True
|
||||
|
||||
def handle_delete(self) -> bool:
|
||||
self.log("DELETE " + self.req)
|
||||
self.log("DELETE %s @%s" % (self.req, self.uname))
|
||||
return self.handle_rm([])
|
||||
|
||||
def handle_put(self) -> bool:
|
||||
self.log("PUT " + self.req)
|
||||
self.log("PUT %s @%s" % (self.req, self.uname))
|
||||
|
||||
if not self.can_write:
|
||||
t = "user {} does not have write-access here"
|
||||
@@ -1194,7 +1228,6 @@ class HttpCli(object):
|
||||
|
||||
if self.headers.get("expect", "").lower() == "100-continue":
|
||||
try:
|
||||
self.s.settimeout(None)
|
||||
self.s.sendall(b"HTTP/1.1 100 Continue\r\n\r\n")
|
||||
except:
|
||||
raise Pebkac(400, "client d/c before 100 continue")
|
||||
@@ -1202,11 +1235,10 @@ class HttpCli(object):
|
||||
return self.handle_stash(True)
|
||||
|
||||
def handle_post(self) -> bool:
|
||||
self.log("POST " + self.req)
|
||||
self.log("POST %s @%s" % (self.req, self.uname))
|
||||
|
||||
if self.headers.get("expect", "").lower() == "100-continue":
|
||||
try:
|
||||
self.s.settimeout(None)
|
||||
self.s.sendall(b"HTTP/1.1 100 Continue\r\n\r\n")
|
||||
except:
|
||||
raise Pebkac(400, "client d/c before 100 continue")
|
||||
@@ -1426,9 +1458,9 @@ class HttpCli(object):
|
||||
self.log(t, 1)
|
||||
raise Pebkac(403, t)
|
||||
|
||||
if is_put and not (self.args.no_dav or self.args.nw):
|
||||
if is_put and not (self.args.no_dav or self.args.nw) and bos.path.exists(path):
|
||||
# allow overwrite if...
|
||||
# * volflag 'daw' is set
|
||||
# * volflag 'daw' is set, or client is definitely webdav
|
||||
# * and account has delete-access
|
||||
# or...
|
||||
# * file exists, is empty, sufficiently new
|
||||
@@ -1438,9 +1470,11 @@ class HttpCli(object):
|
||||
if self.args.dotpart:
|
||||
tnam = "." + tnam
|
||||
|
||||
if (vfs.flags.get("daw") and self.can_delete) or (
|
||||
if (
|
||||
self.can_delete
|
||||
and (vfs.flags.get("daw") or "x-oc-mtime" in self.headers)
|
||||
) or (
|
||||
not bos.path.exists(os.path.join(fdir, tnam))
|
||||
and bos.path.exists(path)
|
||||
and not bos.path.getsize(path)
|
||||
and bos.path.getmtime(path) >= time.time() - self.args.blank_wt
|
||||
):
|
||||
@@ -1464,6 +1498,16 @@ class HttpCli(object):
|
||||
if self.args.nw:
|
||||
return post_sz, sha_hex, sha_b64, remains, path, ""
|
||||
|
||||
at = mt = time.time() - lifetime
|
||||
cli_mt = self.headers.get("x-oc-mtime")
|
||||
if cli_mt:
|
||||
try:
|
||||
mt = int(cli_mt)
|
||||
times = (int(time.time()), mt)
|
||||
bos.utime(path, times, False)
|
||||
except:
|
||||
pass
|
||||
|
||||
if nameless and "magic" in vfs.flags:
|
||||
try:
|
||||
ext = self.conn.hsrv.magician.ext(path)
|
||||
@@ -1486,7 +1530,6 @@ class HttpCli(object):
|
||||
fn = fn2
|
||||
path = path2
|
||||
|
||||
at = time.time() - lifetime
|
||||
if xau and not runhook(
|
||||
self.log,
|
||||
xau,
|
||||
@@ -1494,7 +1537,7 @@ class HttpCli(object):
|
||||
self.vpath,
|
||||
self.host,
|
||||
self.uname,
|
||||
at,
|
||||
mt,
|
||||
post_sz,
|
||||
self.ip,
|
||||
at,
|
||||
@@ -1554,6 +1597,11 @@ class HttpCli(object):
|
||||
t = "{}\n{}\n{}\n{}\n".format(post_sz, sha_b64, sha_hex[:56], url)
|
||||
|
||||
h = {"Location": url} if is_put and url else {}
|
||||
|
||||
if "x-oc-mtime" in self.headers:
|
||||
h["X-OC-MTime"] = "accepted"
|
||||
t = "" # some webdav clients expect/prefer this
|
||||
|
||||
self.reply(t.encode("utf-8"), 201, headers=h)
|
||||
return True
|
||||
|
||||
@@ -1595,7 +1643,7 @@ class HttpCli(object):
|
||||
|
||||
spd1 = get_spd(nbytes, self.t0)
|
||||
spd2 = get_spd(self.conn.nbyte, self.conn.t0)
|
||||
return "{} {} n{}".format(spd1, spd2, self.conn.nreq)
|
||||
return "%s %s n%s" % (spd1, spd2, self.conn.nreq)
|
||||
|
||||
def handle_post_multipart(self) -> bool:
|
||||
self.parser = MultipartParser(self.log, self.sr, self.headers)
|
||||
@@ -1642,7 +1690,7 @@ class HttpCli(object):
|
||||
items = [unquotep(x) for x in items if items]
|
||||
|
||||
self.parser.drop()
|
||||
return self.tx_zip(k, v, vn, rem, items, self.args.ed)
|
||||
return self.tx_zip(k, v, "", vn, rem, items, self.args.ed)
|
||||
|
||||
def handle_post_json(self) -> bool:
|
||||
try:
|
||||
@@ -1727,7 +1775,7 @@ class HttpCli(object):
|
||||
|
||||
def handle_search(self, body: dict[str, Any]) -> bool:
|
||||
idx = self.conn.get_u2idx()
|
||||
if not hasattr(idx, "p_end"):
|
||||
if not idx or not hasattr(idx, "p_end"):
|
||||
raise Pebkac(500, "sqlite3 is not available on the server; cannot search")
|
||||
|
||||
vols = []
|
||||
@@ -1954,7 +2002,7 @@ class HttpCli(object):
|
||||
sanitized = sanitize_fn(new_dir, "", [])
|
||||
return self._mkdir(vjoin(self.vpath, sanitized))
|
||||
|
||||
def _mkdir(self, vpath: str) -> bool:
|
||||
def _mkdir(self, vpath: str, dav: bool = False) -> bool:
|
||||
nullwrite = self.args.nw
|
||||
vfs, rem = self.asrv.vfs.get(vpath, self.uname, False, True)
|
||||
self._assert_safe_rem(rem)
|
||||
@@ -1980,7 +2028,12 @@ class HttpCli(object):
|
||||
raise Pebkac(500, min_ex())
|
||||
|
||||
self.out_headers["X-New-Dir"] = quotep(vpath.split("/")[-1])
|
||||
self.redirect(vpath, status=201)
|
||||
|
||||
if dav:
|
||||
self.reply(b"", 201)
|
||||
else:
|
||||
self.redirect(vpath, status=201)
|
||||
|
||||
return True
|
||||
|
||||
def handle_new_md(self) -> bool:
|
||||
@@ -2524,8 +2577,12 @@ class HttpCli(object):
|
||||
hrange = self.headers.get("range")
|
||||
|
||||
# let's not support 206 with compression
|
||||
if do_send and not is_compressed and hrange and file_sz:
|
||||
# and multirange / multipart is also not-impl (mostly because calculating contentlength is a pain)
|
||||
if do_send and not is_compressed and hrange and file_sz and "," not in hrange:
|
||||
try:
|
||||
if not hrange.lower().startswith("bytes"):
|
||||
raise Exception()
|
||||
|
||||
a, b = hrange.split("=", 1)[1].split("-")
|
||||
|
||||
if a.strip():
|
||||
@@ -2620,7 +2677,14 @@ class HttpCli(object):
|
||||
return ret
|
||||
|
||||
def tx_zip(
|
||||
self, fmt: str, uarg: str, vn: VFS, rem: str, items: list[str], dots: bool
|
||||
self,
|
||||
fmt: str,
|
||||
uarg: str,
|
||||
vpath: str,
|
||||
vn: VFS,
|
||||
rem: str,
|
||||
items: list[str],
|
||||
dots: bool,
|
||||
) -> bool:
|
||||
if self.args.no_zip:
|
||||
raise Pebkac(400, "not enabled")
|
||||
@@ -2663,7 +2727,7 @@ class HttpCli(object):
|
||||
self.send_headers(None, mime=mime, headers={"Content-Disposition": cdis})
|
||||
|
||||
fgen = vn.zipgen(
|
||||
rem, set(items), self.uname, False, dots, not self.args.no_scandir
|
||||
vpath, rem, set(items), self.uname, dots, False, not self.args.no_scandir
|
||||
)
|
||||
# for f in fgen: print(repr({k: f[k] for k in ["vp", "ap"]}))
|
||||
bgen = packer(self.log, fgen, utf8="utf" in uarg, pre_crc="crc" in uarg)
|
||||
@@ -2714,7 +2778,7 @@ class HttpCli(object):
|
||||
return True
|
||||
|
||||
def tx_md(self, fs_path: str) -> bool:
|
||||
logmsg = "{:4} {} ".format("", self.req)
|
||||
logmsg = " %s @%s " % (self.req, self.uname)
|
||||
|
||||
if not self.can_write:
|
||||
if "edit" in self.uparam or "edit2" in self.uparam:
|
||||
@@ -2990,8 +3054,8 @@ class HttpCli(object):
|
||||
ret = self.gen_tree(top, dst)
|
||||
if self.is_vproxied:
|
||||
parents = self.args.R.split("/")
|
||||
for parent in parents[::-1]:
|
||||
ret = {"k{}".format(parent): ret, "a": []}
|
||||
for parent in reversed(parents):
|
||||
ret = {"k%s" % (parent,): ret, "a": []}
|
||||
|
||||
zs = json.dumps(ret)
|
||||
self.reply(zs.encode("utf-8"), mime="application/json")
|
||||
@@ -3043,7 +3107,7 @@ class HttpCli(object):
|
||||
raise Pebkac(403, "the unpost feature is disabled in server config")
|
||||
|
||||
idx = self.conn.get_u2idx()
|
||||
if not hasattr(idx, "p_end"):
|
||||
if not idx or not hasattr(idx, "p_end"):
|
||||
raise Pebkac(500, "sqlite3 is not available on the server; cannot unpost")
|
||||
|
||||
filt = self.uparam.get("filter")
|
||||
@@ -3129,7 +3193,9 @@ class HttpCli(object):
|
||||
nlim = int(self.uparam.get("lim") or 0)
|
||||
lim = [nlim, nlim] if nlim else []
|
||||
|
||||
x = self.conn.hsrv.broker.ask("up2k.handle_rm", self.uname, self.ip, req, lim)
|
||||
x = self.conn.hsrv.broker.ask(
|
||||
"up2k.handle_rm", self.uname, self.ip, req, lim, False
|
||||
)
|
||||
self.loud_reply(x.get())
|
||||
return True
|
||||
|
||||
@@ -3146,7 +3212,7 @@ class HttpCli(object):
|
||||
# x-www-form-urlencoded (url query part) uses
|
||||
# either + or %20 for 0x20 so handle both
|
||||
dst = unquotep(dst.replace("+", " "))
|
||||
return self._mv(self.vpath, dst)
|
||||
return self._mv(self.vpath, dst.lstrip("/"))
|
||||
|
||||
def _mv(self, vsrc: str, vdst: str) -> bool:
|
||||
if not self.can_move:
|
||||
@@ -3262,9 +3328,10 @@ class HttpCli(object):
|
||||
|
||||
is_dir = stat.S_ISDIR(st.st_mode)
|
||||
icur = None
|
||||
if e2t or (e2d and is_dir):
|
||||
if is_dir and (e2t or e2d):
|
||||
idx = self.conn.get_u2idx()
|
||||
icur = idx.get_cur(dbv.realpath)
|
||||
if idx and hasattr(idx, "p_end"):
|
||||
icur = idx.get_cur(dbv.realpath)
|
||||
|
||||
if self.can_read:
|
||||
th_fmt = self.uparam.get("th")
|
||||
@@ -3392,6 +3459,7 @@ class HttpCli(object):
|
||||
break
|
||||
|
||||
vf = vn.flags
|
||||
unlist = vf.get("unlist", "")
|
||||
ls_ret = {
|
||||
"dirs": [],
|
||||
"files": [],
|
||||
@@ -3402,6 +3470,7 @@ class HttpCli(object):
|
||||
"itag": e2t,
|
||||
"lifetime": vn.flags.get("lifetime") or 0,
|
||||
"frand": bool(vn.flags.get("rand")),
|
||||
"unlist": unlist,
|
||||
"perms": perms,
|
||||
"logues": logues,
|
||||
"readme": readme,
|
||||
@@ -3433,6 +3502,8 @@ class HttpCli(object):
|
||||
"readme": readme,
|
||||
"title": html_escape(self.vpath, crlf=True) or "💾🎉",
|
||||
"srv_info": srv_infot,
|
||||
"dgrid": "grid" in vf,
|
||||
"unlist": unlist,
|
||||
"dtheme": self.args.theme,
|
||||
"themes": self.args.themes,
|
||||
"turbolvl": self.args.turbo,
|
||||
@@ -3466,10 +3537,14 @@ class HttpCli(object):
|
||||
for k in ["zip", "tar"]:
|
||||
v = self.uparam.get(k)
|
||||
if v is not None:
|
||||
return self.tx_zip(k, v, vn, rem, [], self.args.ed)
|
||||
return self.tx_zip(k, v, self.vpath, vn, rem, [], self.args.ed)
|
||||
|
||||
fsroot, vfs_ls, vfs_virt = vn.ls(
|
||||
rem, self.uname, not self.args.no_scandir, [[True, False], [False, True]]
|
||||
rem,
|
||||
self.uname,
|
||||
not self.args.no_scandir,
|
||||
[[True, False], [False, True]],
|
||||
lstat="lt" in self.uparam,
|
||||
)
|
||||
stats = {k: v for k, v in vfs_ls}
|
||||
ls_names = [x[0] for x in vfs_ls]
|
||||
@@ -3513,7 +3588,8 @@ class HttpCli(object):
|
||||
fspath = fsroot + "/" + fn
|
||||
|
||||
try:
|
||||
inf = stats.get(fn) or bos.stat(fspath)
|
||||
linf = stats.get(fn) or bos.lstat(fspath)
|
||||
inf = bos.stat(fspath) if stat.S_ISLNK(linf.st_mode) else linf
|
||||
except:
|
||||
self.log("broken symlink: {}".format(repr(fspath)))
|
||||
continue
|
||||
@@ -3524,19 +3600,26 @@ class HttpCli(object):
|
||||
if self.args.no_zip:
|
||||
margin = "DIR"
|
||||
else:
|
||||
margin = '<a href="{}?zip" rel="nofollow">zip</a>'.format(
|
||||
quotep(href)
|
||||
)
|
||||
margin = '<a href="%s?zip" rel="nofollow">zip</a>' % (quotep(href),)
|
||||
elif fn in hist:
|
||||
margin = '<a href="{}.hist/{}">#{}</a>'.format(
|
||||
base, html_escape(hist[fn][2], quot=True, crlf=True), hist[fn][0]
|
||||
margin = '<a href="%s.hist/%s">#%s</a>' % (
|
||||
base,
|
||||
html_escape(hist[fn][2], quot=True, crlf=True),
|
||||
hist[fn][0],
|
||||
)
|
||||
else:
|
||||
margin = "-"
|
||||
|
||||
sz = inf.st_size
|
||||
zd = datetime.utcfromtimestamp(inf.st_mtime)
|
||||
dt = zd.strftime("%Y-%m-%d %H:%M:%S")
|
||||
zd = datetime.utcfromtimestamp(linf.st_mtime)
|
||||
dt = "%04d-%02d-%02d %02d:%02d:%02d" % (
|
||||
zd.year,
|
||||
zd.month,
|
||||
zd.day,
|
||||
zd.hour,
|
||||
zd.minute,
|
||||
zd.second,
|
||||
)
|
||||
|
||||
try:
|
||||
ext = "---" if is_dir else fn.rsplit(".", 1)[1]
|
||||
@@ -3546,7 +3629,7 @@ class HttpCli(object):
|
||||
ext = "%"
|
||||
|
||||
if add_fk:
|
||||
href = "{}?k={}".format(
|
||||
href = "%s?k=%s" % (
|
||||
quotep(href),
|
||||
self.gen_fk(
|
||||
self.args.fk_salt, fspath, sz, 0 if ANYWIN else inf.st_ino
|
||||
@@ -3562,7 +3645,7 @@ class HttpCli(object):
|
||||
"sz": sz,
|
||||
"ext": ext,
|
||||
"dt": dt,
|
||||
"ts": int(inf.st_mtime),
|
||||
"ts": int(linf.st_mtime),
|
||||
}
|
||||
if is_dir:
|
||||
dirs.append(item)
|
||||
@@ -3650,7 +3733,12 @@ class HttpCli(object):
|
||||
dirs.sort(key=itemgetter("name"))
|
||||
|
||||
if is_js:
|
||||
j2a["ls0"] = {"dirs": dirs, "files": files, "taglist": taglist}
|
||||
j2a["ls0"] = {
|
||||
"dirs": dirs,
|
||||
"files": files,
|
||||
"taglist": taglist,
|
||||
"unlist": unlist,
|
||||
}
|
||||
j2a["files"] = []
|
||||
else:
|
||||
j2a["files"] = dirs + files
|
||||
|
||||
@@ -8,12 +8,6 @@ import socket
|
||||
import threading # typechk
|
||||
import time
|
||||
|
||||
try:
|
||||
HAVE_SSL = True
|
||||
import ssl
|
||||
except:
|
||||
HAVE_SSL = False
|
||||
|
||||
from . import util as Util
|
||||
from .__init__ import TYPE_CHECKING, EnvParams
|
||||
from .authsrv import AuthSrv # typechk
|
||||
@@ -54,7 +48,6 @@ class HttpConn(object):
|
||||
self.args: argparse.Namespace = hsrv.args # mypy404
|
||||
self.E: EnvParams = self.args.E
|
||||
self.asrv: AuthSrv = hsrv.asrv # mypy404
|
||||
self.cert_path = hsrv.cert_path
|
||||
self.u2fh: Util.FHC = hsrv.u2fh # mypy404
|
||||
self.iphash: HMaccas = hsrv.broker.iphash
|
||||
self.bans: dict[str, int] = hsrv.bans
|
||||
@@ -103,17 +96,18 @@ class HttpConn(object):
|
||||
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||
self.log_func(self.log_src, msg, c)
|
||||
|
||||
def get_u2idx(self) -> U2idx:
|
||||
# one u2idx per tcp connection;
|
||||
def get_u2idx(self) -> Optional[U2idx]:
|
||||
# grab from a pool of u2idx instances;
|
||||
# sqlite3 fully parallelizes under python threads
|
||||
# but avoid running out of FDs by creating too many
|
||||
if not self.u2idx:
|
||||
self.u2idx = U2idx(self)
|
||||
self.u2idx = self.hsrv.get_u2idx(str(self.addr))
|
||||
|
||||
return self.u2idx
|
||||
|
||||
def _detect_https(self) -> bool:
|
||||
method = None
|
||||
if self.cert_path:
|
||||
if True:
|
||||
try:
|
||||
method = self.s.recv(4, socket.MSG_PEEK)
|
||||
except socket.timeout:
|
||||
@@ -147,7 +141,7 @@ class HttpConn(object):
|
||||
self.sr = None
|
||||
if self.args.https_only:
|
||||
is_https = True
|
||||
elif self.args.http_only or not HAVE_SSL:
|
||||
elif self.args.http_only:
|
||||
is_https = False
|
||||
else:
|
||||
# raise Exception("asdf")
|
||||
@@ -161,7 +155,7 @@ class HttpConn(object):
|
||||
self.log_src = self.log_src.replace("[36m", "[35m")
|
||||
try:
|
||||
ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
|
||||
ctx.load_cert_chain(self.cert_path)
|
||||
ctx.load_cert_chain(self.args.cert)
|
||||
if self.args.ssl_ver:
|
||||
ctx.options &= ~self.args.ssl_flags_en
|
||||
ctx.options |= self.args.ssl_flags_de
|
||||
@@ -215,3 +209,7 @@ class HttpConn(object):
|
||||
self.cli = HttpCli(self)
|
||||
if not self.cli.run():
|
||||
return
|
||||
|
||||
if self.u2idx:
|
||||
self.hsrv.put_u2idx(str(self.addr), self.u2idx)
|
||||
self.u2idx = None
|
||||
|
||||
@@ -11,7 +11,7 @@ import time
|
||||
|
||||
import queue
|
||||
|
||||
from .__init__ import ANYWIN, EXE, MACOS, TYPE_CHECKING, EnvParams
|
||||
from .__init__ import ANYWIN, CORES, EXE, MACOS, TYPE_CHECKING, EnvParams
|
||||
|
||||
try:
|
||||
MNFE = ModuleNotFoundError
|
||||
@@ -33,13 +33,30 @@ except MNFE:
|
||||
* (try another python version, if you have one)
|
||||
* (try copyparty.sfx instead)
|
||||
""".format(
|
||||
os.path.basename(sys.executable)
|
||||
sys.executable
|
||||
)
|
||||
)
|
||||
sys.exit(1)
|
||||
except SyntaxError:
|
||||
if EXE:
|
||||
raise
|
||||
|
||||
print(
|
||||
"""\033[1;31m
|
||||
your jinja2 version is incompatible with your python version;\033[33m
|
||||
please try to replace it with an older version:\033[0m
|
||||
* {} -m pip install --user jinja2==2.11.3
|
||||
* (try another python version, if you have one)
|
||||
* (try copyparty.sfx instead)
|
||||
""".format(
|
||||
sys.executable
|
||||
)
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
from .bos import bos
|
||||
from .httpconn import HttpConn
|
||||
from .u2idx import U2idx
|
||||
from .util import (
|
||||
E_SCK,
|
||||
FHC,
|
||||
@@ -111,6 +128,9 @@ class HttpSrv(object):
|
||||
self.cb_ts = 0.0
|
||||
self.cb_v = ""
|
||||
|
||||
self.u2idx_free: dict[str, U2idx] = {}
|
||||
self.u2idx_n = 0
|
||||
|
||||
env = jinja2.Environment()
|
||||
env.loader = jinja2.FileSystemLoader(os.path.join(self.E.mod, "web"))
|
||||
jn = ["splash", "svcs", "browser", "browser2", "msg", "md", "mde", "cf"]
|
||||
@@ -128,12 +148,6 @@ class HttpSrv(object):
|
||||
|
||||
self.ssdp = SSDPr(broker)
|
||||
|
||||
cert_path = os.path.join(self.E.cfg, "cert.pem")
|
||||
if bos.path.exists(cert_path):
|
||||
self.cert_path = cert_path
|
||||
else:
|
||||
self.cert_path = ""
|
||||
|
||||
if self.tp_q:
|
||||
self.start_threads(4)
|
||||
|
||||
@@ -445,6 +459,9 @@ class HttpSrv(object):
|
||||
self.clients.remove(cli)
|
||||
self.ncli -= 1
|
||||
|
||||
if cli.u2idx:
|
||||
self.put_u2idx(str(addr), cli.u2idx)
|
||||
|
||||
def cachebuster(self) -> str:
|
||||
if time.time() - self.cb_ts < 1:
|
||||
return self.cb_v
|
||||
@@ -466,3 +483,31 @@ class HttpSrv(object):
|
||||
self.cb_v = v.decode("ascii")[-4:]
|
||||
self.cb_ts = time.time()
|
||||
return self.cb_v
|
||||
|
||||
def get_u2idx(self, ident: str) -> Optional[U2idx]:
|
||||
utab = self.u2idx_free
|
||||
for _ in range(100): # 5/0.05 = 5sec
|
||||
with self.mutex:
|
||||
if utab:
|
||||
if ident in utab:
|
||||
return utab.pop(ident)
|
||||
|
||||
return utab.pop(list(utab.keys())[0])
|
||||
|
||||
if self.u2idx_n < CORES:
|
||||
self.u2idx_n += 1
|
||||
return U2idx(self)
|
||||
|
||||
time.sleep(0.05)
|
||||
# not using conditional waits, on a hunch that
|
||||
# average performance will be faster like this
|
||||
# since most servers won't be fully saturated
|
||||
|
||||
return None
|
||||
|
||||
def put_u2idx(self, ident: str, u2idx: U2idx) -> None:
|
||||
with self.mutex:
|
||||
while ident in self.u2idx_free:
|
||||
ident += "a"
|
||||
|
||||
self.u2idx_free[ident] = u2idx
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import errno
|
||||
import random
|
||||
import select
|
||||
import socket
|
||||
@@ -277,6 +278,18 @@ class MDNS(MCast):
|
||||
zf = time.time() + 2
|
||||
self.probing = zf # cant unicast so give everyone an extra sec
|
||||
self.unsolicited = [zf, zf + 1, zf + 3, zf + 7] # rfc-8.3
|
||||
|
||||
try:
|
||||
self.run2()
|
||||
except OSError as ex:
|
||||
if ex.errno != errno.EBADF:
|
||||
raise
|
||||
|
||||
self.log("stopping due to {}".format(ex), "90")
|
||||
|
||||
self.log("stopped", 2)
|
||||
|
||||
def run2(self) -> None:
|
||||
last_hop = time.time()
|
||||
ihop = self.args.mc_hop
|
||||
while self.running:
|
||||
@@ -314,8 +327,6 @@ class MDNS(MCast):
|
||||
self.log(t.format(self.hn[:-1]), 2)
|
||||
self.probing = 0
|
||||
|
||||
self.log("stopped", 2)
|
||||
|
||||
def stop(self, panic=False) -> None:
|
||||
self.running = False
|
||||
for srv in self.srv.values():
|
||||
|
||||
0
copyparty/res/__init__.py
Normal file
0
copyparty/res/__init__.py
Normal file
@@ -261,7 +261,7 @@ class SMB(object):
|
||||
yeet("blocked delete (no-del-acc): " + vpath)
|
||||
|
||||
vpath = vpath.replace("\\", "/").lstrip("/")
|
||||
self.hub.up2k.handle_rm(LEELOO_DALLAS, "1.7.6.2", [vpath], [])
|
||||
self.hub.up2k.handle_rm(LEELOO_DALLAS, "1.7.6.2", [vpath], [], False)
|
||||
|
||||
def _utime(self, vpath: str, times: tuple[float, float]) -> None:
|
||||
if not self.args.smbw:
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import errno
|
||||
import re
|
||||
import select
|
||||
import socket
|
||||
@@ -129,6 +130,17 @@ class SSDPd(MCast):
|
||||
srv.hport = hp
|
||||
|
||||
self.log("listening")
|
||||
try:
|
||||
self.run2()
|
||||
except OSError as ex:
|
||||
if ex.errno != errno.EBADF:
|
||||
raise
|
||||
|
||||
self.log("stopping due to {}".format(ex), "90")
|
||||
|
||||
self.log("stopped", 2)
|
||||
|
||||
def run2(self) -> None:
|
||||
while self.running:
|
||||
rdy = select.select(self.srv, [], [], self.args.z_chk or 180)
|
||||
rx: list[socket.socket] = rdy[0] # type: ignore
|
||||
@@ -148,8 +160,6 @@ class SSDPd(MCast):
|
||||
)
|
||||
self.log(t, 6)
|
||||
|
||||
self.log("stopped", 2)
|
||||
|
||||
def stop(self) -> None:
|
||||
self.running = False
|
||||
for srv in self.srv.values():
|
||||
|
||||
@@ -28,7 +28,7 @@ if True: # pylint: disable=using-constant-test
|
||||
import typing
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
from .__init__ import ANYWIN, EXE, MACOS, TYPE_CHECKING, VT100, EnvParams, unicode
|
||||
from .__init__ import ANYWIN, EXE, MACOS, TYPE_CHECKING, EnvParams, unicode
|
||||
from .authsrv import AuthSrv
|
||||
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE
|
||||
from .tcpsrv import TcpSrv
|
||||
@@ -80,6 +80,7 @@ class SvcHub(object):
|
||||
self.dargs = dargs
|
||||
self.argv = argv
|
||||
self.E: EnvParams = args.E
|
||||
self.no_ansi = args.no_ansi
|
||||
self.logf: Optional[typing.TextIO] = None
|
||||
self.logf_base_fn = ""
|
||||
self.stop_req = False
|
||||
@@ -128,6 +129,9 @@ class SvcHub(object):
|
||||
args.no_robots = True
|
||||
args.force_js = True
|
||||
|
||||
if not self._process_config():
|
||||
raise Exception("bad config")
|
||||
|
||||
self.log = self._log_disabled if args.q else self._log_enabled
|
||||
if args.lo:
|
||||
self._setup_logfile(printed)
|
||||
@@ -177,9 +181,6 @@ class SvcHub(object):
|
||||
|
||||
self.log("root", "max clients: {}".format(self.args.nc))
|
||||
|
||||
if not self._process_config():
|
||||
raise Exception("bad config")
|
||||
|
||||
self.tcpsrv = TcpSrv(self)
|
||||
self.up2k = Up2k(self)
|
||||
|
||||
@@ -350,6 +351,19 @@ class SvcHub(object):
|
||||
|
||||
al.th_covers = set(al.th_covers.split(","))
|
||||
|
||||
for k in "c".split(" "):
|
||||
vl = getattr(al, k)
|
||||
if not vl:
|
||||
continue
|
||||
|
||||
vl = [os.path.expanduser(x) if x.startswith("~") else x for x in vl]
|
||||
setattr(al, k, vl)
|
||||
|
||||
for k in "lo hist ssl_log".split(" "):
|
||||
vs = getattr(al, k)
|
||||
if vs and vs.startswith("~"):
|
||||
setattr(al, k, os.path.expanduser(vs))
|
||||
|
||||
return True
|
||||
|
||||
def _setlimits(self) -> None:
|
||||
@@ -634,8 +648,14 @@ class SvcHub(object):
|
||||
return
|
||||
|
||||
with self.log_mutex:
|
||||
ts = datetime.utcnow().strftime("%Y-%m%d-%H%M%S.%f")[:-3]
|
||||
self.logf.write("@{} [{}\033[0m] {}\n".format(ts, src, msg))
|
||||
zd = datetime.utcnow()
|
||||
ts = "%04d-%04d-%06d.%03d" % (
|
||||
zd.year,
|
||||
zd.month * 100 + zd.day,
|
||||
(zd.hour * 100 + zd.minute) * 100 + zd.second,
|
||||
zd.microsecond // 1000,
|
||||
)
|
||||
self.logf.write("@%s [%s\033[0m] %s\n" % (ts, src, msg))
|
||||
|
||||
now = time.time()
|
||||
if now >= self.next_day:
|
||||
@@ -662,26 +682,36 @@ class SvcHub(object):
|
||||
now = time.time()
|
||||
if now >= self.next_day:
|
||||
dt = datetime.utcfromtimestamp(now)
|
||||
print("\033[36m{}\033[0m\n".format(dt.strftime("%Y-%m-%d")), end="")
|
||||
zs = "{}\n" if self.no_ansi else "\033[36m{}\033[0m\n"
|
||||
zs = zs.format(dt.strftime("%Y-%m-%d"))
|
||||
print(zs, end="")
|
||||
self._set_next_day()
|
||||
if self.logf:
|
||||
self.logf.write(zs)
|
||||
|
||||
fmt = "\033[36m{} \033[33m{:21} \033[0m{}\n"
|
||||
if not VT100:
|
||||
fmt = "{} {:21} {}\n"
|
||||
fmt = "\033[36m%s \033[33m%-21s \033[0m%s\n"
|
||||
if self.no_ansi:
|
||||
fmt = "%s %-21s %s\n"
|
||||
if "\033" in msg:
|
||||
msg = ansi_re.sub("", msg)
|
||||
if "\033" in src:
|
||||
src = ansi_re.sub("", src)
|
||||
elif c:
|
||||
if isinstance(c, int):
|
||||
msg = "\033[3{}m{}\033[0m".format(c, msg)
|
||||
msg = "\033[3%sm%s\033[0m" % (c, msg)
|
||||
elif "\033" not in c:
|
||||
msg = "\033[{}m{}\033[0m".format(c, msg)
|
||||
msg = "\033[%sm%s\033[0m" % (c, msg)
|
||||
else:
|
||||
msg = "{}{}\033[0m".format(c, msg)
|
||||
msg = "%s%s\033[0m" % (c, msg)
|
||||
|
||||
ts = datetime.utcfromtimestamp(now).strftime("%H:%M:%S.%f")[:-3]
|
||||
msg = fmt.format(ts, src, msg)
|
||||
zd = datetime.utcfromtimestamp(now)
|
||||
ts = "%02d:%02d:%02d.%03d" % (
|
||||
zd.hour,
|
||||
zd.minute,
|
||||
zd.second,
|
||||
zd.microsecond // 1000,
|
||||
)
|
||||
msg = fmt % (ts, src, msg)
|
||||
try:
|
||||
print(msg, end="")
|
||||
except UnicodeEncodeError:
|
||||
|
||||
@@ -7,8 +7,9 @@ import socket
|
||||
import sys
|
||||
import time
|
||||
|
||||
from .__init__ import ANYWIN, PY2, TYPE_CHECKING, VT100, unicode
|
||||
from .__init__ import ANYWIN, PY2, TYPE_CHECKING, unicode
|
||||
from .stolen.qrcodegen import QrCode
|
||||
from .cert import gencert
|
||||
from .util import (
|
||||
E_ACCESS,
|
||||
E_ADDR_IN_USE,
|
||||
@@ -295,6 +296,7 @@ class TcpSrv(object):
|
||||
def _distribute_netdevs(self):
|
||||
self.hub.broker.say("set_netdevs", self.netdevs)
|
||||
self.hub.start_zeroconf()
|
||||
gencert(self.log, self.args, self.netdevs)
|
||||
|
||||
def shutdown(self) -> None:
|
||||
self.stopping = True
|
||||
@@ -322,7 +324,7 @@ class TcpSrv(object):
|
||||
if k not in netdevs:
|
||||
removed = "{} = {}".format(k, v)
|
||||
|
||||
t = "network change detected:\n added {}\nremoved {}"
|
||||
t = "network change detected:\n added {}\033[0;33m\nremoved {}"
|
||||
self.log("tcpsrv", t.format(added, removed), 3)
|
||||
self.netdevs = netdevs
|
||||
self._distribute_netdevs()
|
||||
@@ -501,7 +503,7 @@ class TcpSrv(object):
|
||||
zoom = 1
|
||||
|
||||
qr = qrc.render(zoom, pad)
|
||||
if not VT100:
|
||||
if self.args.no_ansi:
|
||||
return "{}\n{}".format(txt, qr)
|
||||
|
||||
halfc = "\033[40;48;5;{0}m{1}\033[47;48;5;{2}m"
|
||||
|
||||
@@ -16,10 +16,10 @@ from .__init__ import ANYWIN, TYPE_CHECKING
|
||||
from .bos import bos
|
||||
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, ffprobe
|
||||
from .util import (
|
||||
FFMPEG_URL,
|
||||
BytesIO,
|
||||
Cooldown,
|
||||
Daemon,
|
||||
FFMPEG_URL,
|
||||
Pebkac,
|
||||
afsenc,
|
||||
fsenc,
|
||||
@@ -274,6 +274,10 @@ class ThumbSrv(object):
|
||||
|
||||
tdir, tfn = os.path.split(tpath)
|
||||
ttpath = os.path.join(tdir, "w", tfn)
|
||||
try:
|
||||
bos.unlink(ttpath)
|
||||
except:
|
||||
pass
|
||||
|
||||
for fun in funs:
|
||||
try:
|
||||
@@ -561,13 +565,24 @@ class ThumbSrv(object):
|
||||
if "ac" not in ret:
|
||||
raise Exception("not audio")
|
||||
|
||||
try:
|
||||
dur = ret[".dur"][1]
|
||||
except:
|
||||
dur = 0
|
||||
|
||||
src_opus = abspath.lower().endswith(".opus") or ret["ac"][1] == "opus"
|
||||
want_caf = tpath.endswith(".caf")
|
||||
tmp_opus = tpath
|
||||
if want_caf:
|
||||
tmp_opus = tpath.rsplit(".", 1)[0] + ".opus"
|
||||
tmp_opus = tpath + ".opus"
|
||||
try:
|
||||
bos.unlink(tmp_opus)
|
||||
except:
|
||||
pass
|
||||
|
||||
if not want_caf or (not src_opus and not bos.path.isfile(tmp_opus)):
|
||||
caf_src = abspath if src_opus else tmp_opus
|
||||
|
||||
if not want_caf or not src_opus:
|
||||
# fmt: off
|
||||
cmd = [
|
||||
b"ffmpeg",
|
||||
@@ -584,7 +599,32 @@ class ThumbSrv(object):
|
||||
# fmt: on
|
||||
self._run_ff(cmd)
|
||||
|
||||
if want_caf:
|
||||
# iOS fails to play some "insufficiently complex" files
|
||||
# (average file shorter than 8 seconds), so of course we
|
||||
# fix that by mixing in some inaudible pink noise :^)
|
||||
# 6.3 sec seems like the cutoff so lets do 7, and
|
||||
# 7 sec of psyqui-musou.opus @ 3:50 is 174 KiB
|
||||
if want_caf and (dur < 20 or bos.path.getsize(caf_src) < 256 * 1024):
|
||||
# fmt: off
|
||||
cmd = [
|
||||
b"ffmpeg",
|
||||
b"-nostdin",
|
||||
b"-v", b"error",
|
||||
b"-hide_banner",
|
||||
b"-i", fsenc(abspath),
|
||||
b"-filter_complex", b"anoisesrc=a=0.001:d=7:c=pink,asplit[l][r]; [l][r]amerge[s]; [0:a:0][s]amix",
|
||||
b"-map_metadata", b"-1",
|
||||
b"-ac", b"2",
|
||||
b"-c:a", b"libopus",
|
||||
b"-b:a", b"128k",
|
||||
b"-f", b"caf",
|
||||
fsenc(tpath)
|
||||
]
|
||||
# fmt: on
|
||||
self._run_ff(cmd)
|
||||
|
||||
elif want_caf:
|
||||
# simple remux should be safe
|
||||
# fmt: off
|
||||
cmd = [
|
||||
b"ffmpeg",
|
||||
@@ -601,6 +641,12 @@ class ThumbSrv(object):
|
||||
# fmt: on
|
||||
self._run_ff(cmd)
|
||||
|
||||
if tmp_opus != tpath:
|
||||
try:
|
||||
bos.unlink(tmp_opus)
|
||||
except:
|
||||
pass
|
||||
|
||||
def poke(self, tdir: str) -> None:
|
||||
if not self.poke_cd.poke(tdir):
|
||||
return
|
||||
|
||||
@@ -34,14 +34,14 @@ if True: # pylint: disable=using-constant-test
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .httpconn import HttpConn
|
||||
from .httpsrv import HttpSrv
|
||||
|
||||
|
||||
class U2idx(object):
|
||||
def __init__(self, conn: "HttpConn") -> None:
|
||||
self.log_func = conn.log_func
|
||||
self.asrv = conn.asrv
|
||||
self.args = conn.args
|
||||
def __init__(self, hsrv: "HttpSrv") -> None:
|
||||
self.log_func = hsrv.log
|
||||
self.asrv = hsrv.asrv
|
||||
self.args = hsrv.args
|
||||
self.timeout = self.args.srch_time
|
||||
|
||||
if not HAVE_SQLITE3:
|
||||
@@ -51,7 +51,7 @@ class U2idx(object):
|
||||
self.active_id = ""
|
||||
self.active_cur: Optional["sqlite3.Cursor"] = None
|
||||
self.cur: dict[str, "sqlite3.Cursor"] = {}
|
||||
self.mem_cur = sqlite3.connect(":memory:").cursor()
|
||||
self.mem_cur = sqlite3.connect(":memory:", check_same_thread=False).cursor()
|
||||
self.mem_cur.execute(r"create table a (b text)")
|
||||
|
||||
self.p_end = 0.0
|
||||
@@ -101,7 +101,8 @@ class U2idx(object):
|
||||
uri = ""
|
||||
try:
|
||||
uri = "{}?mode=ro&nolock=1".format(Path(db_path).as_uri())
|
||||
cur = sqlite3.connect(uri, 2, uri=True).cursor()
|
||||
db = sqlite3.connect(uri, 2, uri=True, check_same_thread=False)
|
||||
cur = db.cursor()
|
||||
cur.execute('pragma table_info("up")').fetchone()
|
||||
self.log("ro: {}".format(db_path))
|
||||
except:
|
||||
@@ -112,7 +113,7 @@ class U2idx(object):
|
||||
if not cur:
|
||||
# on windows, this steals the write-lock from up2k.deferred_init --
|
||||
# seen on win 10.0.17763.2686, py 3.10.4, sqlite 3.37.2
|
||||
cur = sqlite3.connect(db_path, 2).cursor()
|
||||
cur = sqlite3.connect(db_path, 2, check_same_thread=False).cursor()
|
||||
self.log("opened {}".format(db_path))
|
||||
|
||||
self.cur[ptop] = cur
|
||||
|
||||
@@ -73,8 +73,8 @@ if True: # pylint: disable=using-constant-test
|
||||
if TYPE_CHECKING:
|
||||
from .svchub import SvcHub
|
||||
|
||||
zs = "avif,avifs,bmp,gif,heic,heics,heif,heifs,ico,j2p,j2k,jp2,jpeg,jpg,jpx,png,tga,tif,tiff,webp"
|
||||
CV_EXTS = set(zs.split(","))
|
||||
zsg = "avif,avifs,bmp,gif,heic,heics,heif,heifs,ico,j2p,j2k,jp2,jpeg,jpg,jpx,png,tga,tif,tiff,webp"
|
||||
CV_EXTS = set(zsg.split(","))
|
||||
|
||||
|
||||
class Dbw(object):
|
||||
@@ -380,11 +380,11 @@ class Up2k(object):
|
||||
if rd.startswith("//") or fn.startswith("//"):
|
||||
rd, fn = s3dec(rd, fn)
|
||||
|
||||
fvp = "{}/{}".format(rd, fn).strip("/")
|
||||
fvp = ("%s/%s" % (rd, fn)).strip("/")
|
||||
if vp:
|
||||
fvp = "{}/{}".format(vp, fvp)
|
||||
fvp = "%s/%s" % (vp, fvp)
|
||||
|
||||
self._handle_rm(LEELOO_DALLAS, "", fvp, [])
|
||||
self._handle_rm(LEELOO_DALLAS, "", fvp, [], True)
|
||||
nrm += 1
|
||||
|
||||
if nrm:
|
||||
@@ -1032,7 +1032,7 @@ class Up2k(object):
|
||||
zh.update(cv.encode("utf-8", "replace"))
|
||||
zh.update(spack(b"<d", cst.st_mtime))
|
||||
dhash = base64.urlsafe_b64encode(zh.digest()[:12]).decode("ascii")
|
||||
sql = "select d from dh where d = ? and h = ?"
|
||||
sql = "select d from dh where d=? and +h=?"
|
||||
try:
|
||||
c = db.c.execute(sql, (rd, dhash))
|
||||
drd = rd
|
||||
@@ -1316,9 +1316,9 @@ class Up2k(object):
|
||||
|
||||
w, drd, dfn = zb[:-1].decode("utf-8").split("\x00")
|
||||
with self.mutex:
|
||||
q = "select mt, sz from up where w = ? and rd = ? and fn = ?"
|
||||
q = "select mt, sz from up where rd=? and fn=? and +w=?"
|
||||
try:
|
||||
mt, sz = cur.execute(q, (w, drd, dfn)).fetchone()
|
||||
mt, sz = cur.execute(q, (drd, dfn, w)).fetchone()
|
||||
except:
|
||||
# file moved/deleted since spooling
|
||||
continue
|
||||
@@ -2223,7 +2223,7 @@ class Up2k(object):
|
||||
q = r"select * from up where w = ?"
|
||||
argv = [wark]
|
||||
else:
|
||||
q = r"select * from up where substr(w,1,16) = ? and w = ?"
|
||||
q = r"select * from up where substr(w,1,16)=? and +w=?"
|
||||
argv = [wark[:16], wark]
|
||||
|
||||
c2 = cur.execute(q, tuple(argv))
|
||||
@@ -2564,7 +2564,7 @@ class Up2k(object):
|
||||
|
||||
try:
|
||||
if "hardlink" in flags:
|
||||
os.link(fsenc(src), fsenc(dst))
|
||||
os.link(fsenc(absreal(src)), fsenc(dst))
|
||||
linked = True
|
||||
except Exception as ex:
|
||||
self.log("cannot hardlink: " + repr(ex))
|
||||
@@ -2897,7 +2897,9 @@ class Up2k(object):
|
||||
except:
|
||||
pass
|
||||
|
||||
def handle_rm(self, uname: str, ip: str, vpaths: list[str], lim: list[int]) -> str:
|
||||
def handle_rm(
|
||||
self, uname: str, ip: str, vpaths: list[str], lim: list[int], rm_up: bool
|
||||
) -> str:
|
||||
n_files = 0
|
||||
ok = {}
|
||||
ng = {}
|
||||
@@ -2906,7 +2908,7 @@ class Up2k(object):
|
||||
self.log("hit delete limit of {} files".format(lim[1]), 3)
|
||||
break
|
||||
|
||||
a, b, c = self._handle_rm(uname, ip, vp, lim)
|
||||
a, b, c = self._handle_rm(uname, ip, vp, lim, rm_up)
|
||||
n_files += a
|
||||
for k in b:
|
||||
ok[k] = 1
|
||||
@@ -2920,7 +2922,7 @@ class Up2k(object):
|
||||
return "deleted {} files (and {}/{} folders)".format(n_files, iok, iok + ing)
|
||||
|
||||
def _handle_rm(
|
||||
self, uname: str, ip: str, vpath: str, lim: list[int]
|
||||
self, uname: str, ip: str, vpath: str, lim: list[int], rm_up: bool
|
||||
) -> tuple[int, list[str], list[str]]:
|
||||
self.db_act = time.time()
|
||||
try:
|
||||
@@ -3027,16 +3029,22 @@ class Up2k(object):
|
||||
if xad:
|
||||
runhook(self.log, xad, abspath, vpath, "", uname, 0, 0, ip, 0, "")
|
||||
|
||||
ok: list[str] = []
|
||||
ng: list[str] = []
|
||||
if is_dir:
|
||||
ok, ng = rmdirs(self.log_func, scandir, True, atop, 1)
|
||||
else:
|
||||
ok = ng = []
|
||||
|
||||
ok2, ng2 = rmdirs_up(os.path.dirname(atop), ptop)
|
||||
if rm_up:
|
||||
ok2, ng2 = rmdirs_up(os.path.dirname(atop), ptop)
|
||||
else:
|
||||
ok2 = ng2 = []
|
||||
|
||||
return n_files, ok + ok2, ng + ng2
|
||||
|
||||
def handle_mv(self, uname: str, svp: str, dvp: str) -> str:
|
||||
if svp == dvp or dvp.startswith(svp + "/"):
|
||||
raise Pebkac(400, "mv: cannot move parent into subfolder")
|
||||
|
||||
svn, srem = self.asrv.vfs.get(svp, uname, True, False, True)
|
||||
svn, srem = svn.get_dbv(srem)
|
||||
sabs = svn.canonical(srem, False)
|
||||
@@ -3090,8 +3098,21 @@ class Up2k(object):
|
||||
|
||||
curs.clear()
|
||||
|
||||
rmdirs(self.log_func, scandir, True, sabs, 1)
|
||||
rmdirs_up(os.path.dirname(sabs), svn.realpath)
|
||||
rm_ok, rm_ng = rmdirs(self.log_func, scandir, True, sabs, 1)
|
||||
|
||||
for zsl in (rm_ok, rm_ng):
|
||||
for ap in reversed(zsl):
|
||||
if not ap.startswith(sabs):
|
||||
raise Pebkac(500, "mv_d: bug at {}, top {}".format(ap, sabs))
|
||||
|
||||
rem = ap[len(sabs) :].replace(os.sep, "/").lstrip("/")
|
||||
vp = vjoin(dvp, rem)
|
||||
try:
|
||||
dvn, drem = self.asrv.vfs.get(vp, uname, False, True)
|
||||
bos.mkdir(dvn.canonical(drem))
|
||||
except:
|
||||
pass
|
||||
|
||||
return "k"
|
||||
|
||||
def _mv_file(
|
||||
@@ -3300,9 +3321,16 @@ class Up2k(object):
|
||||
"""
|
||||
dupes = []
|
||||
sabs = djoin(sptop, srem)
|
||||
q = "select rd, fn from up where substr(w,1,16)=? and w=?"
|
||||
|
||||
if self.no_expr_idx:
|
||||
q = r"select rd, fn from up where w = ?"
|
||||
argv = (wark,)
|
||||
else:
|
||||
q = r"select rd, fn from up where substr(w,1,16)=? and +w=?"
|
||||
argv = (wark[:16], wark)
|
||||
|
||||
for ptop, cur in self.cur.items():
|
||||
for rd, fn in cur.execute(q, (wark[:16], wark)):
|
||||
for rd, fn in cur.execute(q, argv):
|
||||
if rd.startswith("//") or fn.startswith("//"):
|
||||
rd, fn = s3dec(rd, fn)
|
||||
|
||||
|
||||
@@ -296,11 +296,11 @@ REKOBO_LKEY = {k.lower(): v for k, v in REKOBO_KEY.items()}
|
||||
pybin = sys.executable or ""
|
||||
if EXE:
|
||||
pybin = ""
|
||||
for p in "python3 python".split():
|
||||
for zsg in "python3 python".split():
|
||||
try:
|
||||
p = shutil.which(p)
|
||||
if p:
|
||||
pybin = p
|
||||
zsg = shutil.which(zsg)
|
||||
if zsg:
|
||||
pybin = zsg
|
||||
break
|
||||
except:
|
||||
pass
|
||||
@@ -537,7 +537,7 @@ class _Unrecv(object):
|
||||
self.log = log
|
||||
self.buf: bytes = b""
|
||||
|
||||
def recv(self, nbytes: int) -> bytes:
|
||||
def recv(self, nbytes: int, spins: int = 1) -> bytes:
|
||||
if self.buf:
|
||||
ret = self.buf[:nbytes]
|
||||
self.buf = self.buf[nbytes:]
|
||||
@@ -548,6 +548,10 @@ class _Unrecv(object):
|
||||
ret = self.s.recv(nbytes)
|
||||
break
|
||||
except socket.timeout:
|
||||
spins -= 1
|
||||
if spins <= 0:
|
||||
ret = b""
|
||||
break
|
||||
continue
|
||||
except:
|
||||
ret = b""
|
||||
@@ -590,7 +594,7 @@ class _LUnrecv(object):
|
||||
self.log = log
|
||||
self.buf = b""
|
||||
|
||||
def recv(self, nbytes: int) -> bytes:
|
||||
def recv(self, nbytes: int, spins: int) -> bytes:
|
||||
if self.buf:
|
||||
ret = self.buf[:nbytes]
|
||||
self.buf = self.buf[nbytes:]
|
||||
@@ -609,7 +613,7 @@ class _LUnrecv(object):
|
||||
def recv_ex(self, nbytes: int, raise_on_trunc: bool = True) -> bytes:
|
||||
"""read an exact number of bytes"""
|
||||
try:
|
||||
ret = self.recv(nbytes)
|
||||
ret = self.recv(nbytes, 1)
|
||||
err = False
|
||||
except:
|
||||
ret = b""
|
||||
@@ -617,7 +621,7 @@ class _LUnrecv(object):
|
||||
|
||||
while not err and len(ret) < nbytes:
|
||||
try:
|
||||
ret += self.recv(nbytes - len(ret))
|
||||
ret += self.recv(nbytes - len(ret), 1)
|
||||
except OSError:
|
||||
err = True
|
||||
|
||||
@@ -1292,7 +1296,7 @@ class MultipartParser(object):
|
||||
rfc1341/rfc1521/rfc2047/rfc2231/rfc2388/rfc6266/the-real-world
|
||||
(only the fallback non-js uploader relies on these filenames)
|
||||
"""
|
||||
for ln in read_header(self.sr):
|
||||
for ln in read_header(self.sr, 2, 2592000):
|
||||
self.log(ln)
|
||||
|
||||
m = self.re_ctype.match(ln)
|
||||
@@ -1492,15 +1496,15 @@ def get_boundary(headers: dict[str, str]) -> str:
|
||||
return m.group(2)
|
||||
|
||||
|
||||
def read_header(sr: Unrecv) -> list[str]:
|
||||
def read_header(sr: Unrecv, t_idle: int, t_tot: int) -> list[str]:
|
||||
t0 = time.time()
|
||||
ret = b""
|
||||
while True:
|
||||
if time.time() - t0 > 120:
|
||||
if time.time() - t0 >= t_tot:
|
||||
return []
|
||||
|
||||
try:
|
||||
ret += sr.recv(1024)
|
||||
ret += sr.recv(1024, t_idle // 2)
|
||||
except:
|
||||
if not ret:
|
||||
return []
|
||||
@@ -1549,7 +1553,7 @@ def rand_name(fdir: str, fn: str, rnd: int) -> str:
|
||||
def gen_filekey(salt: str, fspath: str, fsize: int, inode: int) -> str:
|
||||
return base64.urlsafe_b64encode(
|
||||
hashlib.sha512(
|
||||
"{} {} {} {}".format(salt, fspath, fsize, inode).encode("utf-8", "replace")
|
||||
("%s %s %s %s" % (salt, fspath, fsize, inode)).encode("utf-8", "replace")
|
||||
).digest()
|
||||
).decode("ascii")
|
||||
|
||||
@@ -1589,7 +1593,7 @@ def gen_filekey_dbg(
|
||||
|
||||
|
||||
def gencookie(k: str, v: str, r: str, tls: bool, dur: Optional[int]) -> str:
|
||||
v = v.replace(";", "")
|
||||
v = v.replace("%", "%25").replace(";", "%3B")
|
||||
if dur:
|
||||
exp = formatdate(time.time() + dur, usegmt=True)
|
||||
else:
|
||||
@@ -1658,7 +1662,7 @@ def uncyg(path: str) -> str:
|
||||
if len(path) > 2 and path[2] != "/":
|
||||
return path
|
||||
|
||||
return "{}:\\{}".format(path[1], path[3:])
|
||||
return "%s:\\%s" % (path[1], path[3:])
|
||||
|
||||
|
||||
def undot(path: str) -> str:
|
||||
@@ -1701,7 +1705,7 @@ def sanitize_fn(fn: str, ok: str, bad: list[str]) -> str:
|
||||
|
||||
bad = ["con", "prn", "aux", "nul"]
|
||||
for n in range(1, 10):
|
||||
bad += "com{0} lpt{0}".format(n).split(" ")
|
||||
bad += ("com%s lpt%s" % (n, n)).split(" ")
|
||||
|
||||
if fn.lower().split(".")[0] in bad:
|
||||
fn = "_" + fn
|
||||
@@ -2015,6 +2019,8 @@ def shut_socket(log: "NamedLogger", sck: socket.socket, timeout: int = 3) -> Non
|
||||
sck.shutdown(socket.SHUT_RDWR)
|
||||
except:
|
||||
pass
|
||||
except Exception as ex:
|
||||
log("shut({}): {}".format(fd, ex), "90")
|
||||
finally:
|
||||
td = time.time() - t0
|
||||
if td >= 1:
|
||||
@@ -2266,7 +2272,7 @@ def rmdirs(
|
||||
dirs = [os.path.join(top, x) for x in dirs]
|
||||
ok = []
|
||||
ng = []
|
||||
for d in dirs[::-1]:
|
||||
for d in reversed(dirs):
|
||||
a, b = rmdirs(logger, scandir, lstat, d, depth + 1)
|
||||
ok += a
|
||||
ng += b
|
||||
@@ -2316,7 +2322,7 @@ def unescape_cookie(orig: str) -> str:
|
||||
ret += chr(int(esc[1:], 16))
|
||||
except:
|
||||
ret += esc
|
||||
esc = ""
|
||||
esc = ""
|
||||
|
||||
else:
|
||||
ret += ch
|
||||
|
||||
@@ -6,7 +6,7 @@ pk: $(addsuffix .gz, $(wildcard *.js *.css))
|
||||
un: $(addsuffix .un, $(wildcard *.gz))
|
||||
|
||||
%.gz: %
|
||||
pigz -11 -J 34 -I 5730 $<
|
||||
pigz -11 -J 34 -I 573 $<
|
||||
|
||||
%.un: %
|
||||
pigz -d $<
|
||||
|
||||
1
copyparty/web/a/u2c.py
Symbolic link
1
copyparty/web/a/u2c.py
Symbolic link
@@ -0,0 +1 @@
|
||||
../../../bin/u2c.py
|
||||
@@ -1 +0,0 @@
|
||||
../../../bin/up2k.py
|
||||
@@ -27,8 +27,8 @@ window.baguetteBox = (function () {
|
||||
isOverlayVisible = false,
|
||||
touch = {}, // start-pos
|
||||
touchFlag = false, // busy
|
||||
re_i = /.+\.(a?png|avif|bmp|gif|heif|jpe?g|jfif|svg|webp)(\?|$)/i,
|
||||
re_v = /.+\.(webm|mkv|mp4)(\?|$)/i,
|
||||
re_i = /^[^?]+\.(a?png|avif|bmp|gif|heif|jpe?g|jfif|svg|webp)(\?|$)/i,
|
||||
re_v = /^[^?]+\.(webm|mkv|mp4)(\?|$)/i,
|
||||
anims = ['slideIn', 'fadeIn', 'none'],
|
||||
data = {}, // all galleries
|
||||
imagesElements = [],
|
||||
@@ -580,6 +580,7 @@ window.baguetteBox = (function () {
|
||||
function hideOverlay(e) {
|
||||
ev(e);
|
||||
playvid(false);
|
||||
removeFromCache('#files');
|
||||
if (options.noScrollbars) {
|
||||
document.documentElement.style.overflowY = 'auto';
|
||||
document.body.style.overflowY = 'auto';
|
||||
@@ -812,10 +813,16 @@ window.baguetteBox = (function () {
|
||||
}
|
||||
|
||||
function vid() {
|
||||
if (currentIndex >= imagesElements.length)
|
||||
return;
|
||||
|
||||
return imagesElements[currentIndex].querySelector('video');
|
||||
}
|
||||
|
||||
function vidimg() {
|
||||
if (currentIndex >= imagesElements.length)
|
||||
return;
|
||||
|
||||
return imagesElements[currentIndex].querySelector('img, video');
|
||||
}
|
||||
|
||||
|
||||
@@ -1074,6 +1074,9 @@ html.np_open #ggrid>a.au:before {
|
||||
background: var(--bg-d3);
|
||||
box-shadow: -.2em .2em 0 var(--f-sel-sh), -.2em -.2em 0 var(--f-sel-sh);
|
||||
}
|
||||
#player {
|
||||
display: none;
|
||||
}
|
||||
#widget {
|
||||
position: fixed;
|
||||
font-size: 1.4em;
|
||||
@@ -1156,10 +1159,10 @@ html.y #widget.open {
|
||||
background: #fff;
|
||||
background: var(--bg-u3);
|
||||
}
|
||||
#wfm, #wzip, #wnp {
|
||||
#wfs, #wfm, #wzip, #wnp {
|
||||
display: none;
|
||||
}
|
||||
#wzip, #wnp {
|
||||
#wfs, #wzip, #wnp {
|
||||
margin-right: .2em;
|
||||
padding-right: .2em;
|
||||
border: 1px solid var(--bg-u5);
|
||||
@@ -1171,6 +1174,7 @@ html.y #widget.open {
|
||||
padding-left: .2em;
|
||||
border-left-width: .1em;
|
||||
}
|
||||
#wfs.act,
|
||||
#wfm.act {
|
||||
display: inline-block;
|
||||
}
|
||||
@@ -1194,6 +1198,13 @@ html.y #widget.open {
|
||||
position: relative;
|
||||
display: inline-block;
|
||||
}
|
||||
#wfs {
|
||||
font-size: .36em;
|
||||
text-align: right;
|
||||
line-height: 1.3em;
|
||||
padding: 0 .3em 0 0;
|
||||
border-width: 0 .25em 0 0;
|
||||
}
|
||||
#wfm span,
|
||||
#wnp span {
|
||||
font-size: .6em;
|
||||
@@ -1740,6 +1751,7 @@ html.y #tree.nowrap .ntree a+a:hover {
|
||||
display: none;
|
||||
}
|
||||
.ghead {
|
||||
background: var(--bg-u2);
|
||||
border-radius: .3em;
|
||||
padding: .2em .5em;
|
||||
line-height: 2.3em;
|
||||
@@ -2936,6 +2948,7 @@ html.b #treepar {
|
||||
html.b #wrap {
|
||||
margin-top: 2em;
|
||||
}
|
||||
html.by .ghead,
|
||||
html.bz .ghead {
|
||||
background: var(--bg);
|
||||
padding: .2em 0;
|
||||
|
||||
@@ -138,6 +138,7 @@
|
||||
TS = "{{ ts }}",
|
||||
acct = "{{ acct }}",
|
||||
perms = {{ perms }},
|
||||
dgrid = {{ dgrid|tojson }},
|
||||
themes = {{ themes }},
|
||||
dtheme = "{{ dtheme }}",
|
||||
srvinf = "{{ srv_info }}",
|
||||
|
||||
@@ -261,6 +261,7 @@ var Ls = {
|
||||
"mm_e403": "Could not play audio; error 403: Access denied.\n\nTry pressing F5 to reload, maybe you got logged out",
|
||||
"mm_e5xx": "Could not play audio; server error ",
|
||||
"mm_nof": "not finding any more audio files nearby",
|
||||
"mm_pwrsv": "<p>it looks like playback is being interrupted by your phone's power-saving settings!</p>" + '<p>please go to <a target="_blank" href="https://user-images.githubusercontent.com/241032/235262121-2ffc51ae-7821-4310-a322-c3b7a507890c.png">the app settings of your browser</a> and then <a target="_blank" href="https://user-images.githubusercontent.com/241032/235262123-c328cca9-3930-4948-bd18-3949b9fd3fcf.png">allow unrestricted battery usage</a> to fix it.</p><p>(probably a good idea to use a separate browser dedicated for just music streaming...)</p>',
|
||||
"mm_hnf": "that song no longer exists",
|
||||
|
||||
"im_hnf": "that image no longer exists",
|
||||
@@ -721,6 +722,7 @@ var Ls = {
|
||||
"mm_e403": "Avspilling feilet: Tilgang nektet.\n\nKanskje du ble logget ut?\nPrøv å trykk F5 for å laste siden på nytt.",
|
||||
"mm_e5xx": "Avspilling feilet: ",
|
||||
"mm_nof": "finner ikke flere sanger i nærheten",
|
||||
"mm_pwrsv": "<p>det ser ut som musikken ble avbrutt av telefonen sine strømsparings-innstillinger!</p>" + '<p>ta en tur innom <a target="_blank" href="https://user-images.githubusercontent.com/241032/235262121-2ffc51ae-7821-4310-a322-c3b7a507890c.png">app-innstillingene til nettleseren din</a> og så <a target="_blank" href="https://user-images.githubusercontent.com/241032/235262123-c328cca9-3930-4948-bd18-3949b9fd3fcf.png">tillat ubegrenset batteriforbruk</a></p><p>(sikkert smart å ha en egen nettleser kun for musikkspilling...)</p>',
|
||||
"mm_hnf": "sangen finnes ikke lenger",
|
||||
|
||||
"im_hnf": "bildet finnes ikke lenger",
|
||||
@@ -952,6 +954,7 @@ ebi('ops').innerHTML = (
|
||||
// media player
|
||||
ebi('widget').innerHTML = (
|
||||
'<div id="wtoggle">' +
|
||||
'<span id="wfs"></span>' +
|
||||
'<span id="wfm"><a' +
|
||||
' href="#" id="fren" tt="' + L.wt_ren + '">✎<span>name</span></a><a' +
|
||||
' href="#" id="fdel" tt="' + L.wt_del + '">⌫<span>del.</span></a><a' +
|
||||
@@ -974,6 +977,17 @@ ebi('widget').innerHTML = (
|
||||
' <canvas id="pvol" width="288" height="38"></canvas>' +
|
||||
' <canvas id="barpos"></canvas>' +
|
||||
' <canvas id="barbuf"></canvas>' +
|
||||
'</div>' +
|
||||
'<div id="np_inf">' +
|
||||
' <img id="np_img"></span>' +
|
||||
' <span id="np_url"></span>' +
|
||||
' <span id="np_circle"></span>' +
|
||||
' <span id="np_album"></span>' +
|
||||
' <span id="np_tn"></span>' +
|
||||
' <span id="np_artist"></span>' +
|
||||
' <span id="np_title"></span>' +
|
||||
' <span id="np_pos"></span>' +
|
||||
' <span id="np_dur"></span>' +
|
||||
'</div>'
|
||||
);
|
||||
|
||||
@@ -1405,10 +1419,17 @@ var mpl = (function () {
|
||||
};
|
||||
|
||||
r.pp = function () {
|
||||
var adur, apos, playing = mp.au && !mp.au.paused;
|
||||
|
||||
clmod(ebi('np_inf'), 'playing', playing);
|
||||
|
||||
if (mp.au && isNum(adur = mp.au.duration) && isNum(apos = mp.au.currentTime) && apos >= 0)
|
||||
ebi('np_pos').textContent = s2ms(apos);
|
||||
|
||||
if (!r.os_ctl)
|
||||
return;
|
||||
|
||||
navigator.mediaSession.playbackState = mp.au && !mp.au.paused ? "playing" : "paused";
|
||||
navigator.mediaSession.playbackState = playing ? "playing" : "paused";
|
||||
};
|
||||
|
||||
function setaufollow() {
|
||||
@@ -1423,9 +1444,10 @@ var mpl = (function () {
|
||||
var np = get_np()[0],
|
||||
fns = np.file.split(' - '),
|
||||
artist = (np.circle && np.circle != np.artist ? np.circle + ' // ' : '') + (np.artist || (fns.length > 1 ? fns[0] : '')),
|
||||
tags = {
|
||||
title: np.title || fns.pop()
|
||||
};
|
||||
title = np.title || fns.pop(),
|
||||
cover = '',
|
||||
pcover = '',
|
||||
tags = { title: title };
|
||||
|
||||
if (artist)
|
||||
tags.artist = artist;
|
||||
@@ -1446,18 +1468,29 @@ var mpl = (function () {
|
||||
|
||||
if (cover) {
|
||||
cover += (cover.indexOf('?') === -1 ? '?' : '&') + 'th=j';
|
||||
pcover = cover;
|
||||
|
||||
var pwd = get_pwd();
|
||||
if (pwd)
|
||||
cover += '&pw=' + uricom_enc(pwd);
|
||||
pcover += '&pw=' + uricom_enc(pwd);
|
||||
|
||||
tags.artwork = [{ "src": cover, type: "image/jpeg" }];
|
||||
tags.artwork = [{ "src": pcover, type: "image/jpeg" }];
|
||||
}
|
||||
}
|
||||
|
||||
ebi('np_circle').textContent = np.circle || '';
|
||||
ebi('np_album').textContent = np.album || '';
|
||||
ebi('np_tn').textContent = np['.tn'] || '';
|
||||
ebi('np_artist').textContent = np.artist || (fns.length > 1 ? fns[0] : '');
|
||||
ebi('np_title').textContent = np.title || '';
|
||||
ebi('np_dur').textContent = np['.dur'] || '';
|
||||
ebi('np_url').textContent = get_vpath() + np.file.split('?')[0];
|
||||
if (!MOBILE)
|
||||
ebi('np_img').setAttribute('src', cover || ''); // dont give last.fm the pwd
|
||||
|
||||
navigator.mediaSession.metadata = new MediaMetadata(tags);
|
||||
navigator.mediaSession.setActionHandler('play', playpause);
|
||||
navigator.mediaSession.setActionHandler('pause', playpause);
|
||||
navigator.mediaSession.setActionHandler('play', mplay);
|
||||
navigator.mediaSession.setActionHandler('pause', mpause);
|
||||
navigator.mediaSession.setActionHandler('seekbackward', r.os_seek ? function () { seek_au_rel(-10); } : null);
|
||||
navigator.mediaSession.setActionHandler('seekforward', r.os_seek ? function () { seek_au_rel(10); } : null);
|
||||
navigator.mediaSession.setActionHandler('previoustrack', prev_song);
|
||||
@@ -1467,11 +1500,18 @@ var mpl = (function () {
|
||||
r.announce = announce;
|
||||
|
||||
r.stop = function () {
|
||||
if (!r.os_ctl || !navigator.mediaSession.metadata)
|
||||
if (!r.os_ctl)
|
||||
return;
|
||||
|
||||
// dead code; left for debug
|
||||
navigator.mediaSession.metadata = null;
|
||||
navigator.mediaSession.playbackState = "paused";
|
||||
|
||||
var hs = 'play pause seekbackward seekforward previoustrack nexttrack'.split(/ /g);
|
||||
for (var a = 0; a < hs.length; a++)
|
||||
navigator.mediaSession.setActionHandler(hs[a], null);
|
||||
|
||||
navigator.mediaSession.setPositionState();
|
||||
};
|
||||
|
||||
r.unbuffer = function (url) {
|
||||
@@ -1503,14 +1543,17 @@ var re_au_native = can_ogg ? /\.(aac|flac|m4a|mp3|ogg|opus|wav)$/i :
|
||||
|
||||
|
||||
// extract songs + add play column
|
||||
var mpo = { "au": null, "au2": null, "acs": null };
|
||||
var t_fchg = 0;
|
||||
function MPlayer() {
|
||||
var r = this;
|
||||
r.id = Date.now();
|
||||
r.au = null;
|
||||
r.au = null;
|
||||
r.au2 = null;
|
||||
r.au = mpo.au;
|
||||
r.au2 = mpo.au2;
|
||||
r.acs = mpo.acs;
|
||||
r.tracks = {};
|
||||
r.order = [];
|
||||
r.cd_pause = 0;
|
||||
|
||||
var re_audio = have_acode && mpl.ac_oth ? re_au_all : re_au_native,
|
||||
trs = QSA('#files tbody tr');
|
||||
@@ -1567,6 +1610,7 @@ function MPlayer() {
|
||||
r.ftid = -1;
|
||||
r.ftimer = null;
|
||||
r.fade_in = function () {
|
||||
r.nopause();
|
||||
r.fvol = 0;
|
||||
r.fdir = 0.025 * r.vol * (CHROME ? 1.5 : 1);
|
||||
if (r.au) {
|
||||
@@ -1599,9 +1643,9 @@ function MPlayer() {
|
||||
r.au.pause();
|
||||
mpl.pp();
|
||||
|
||||
var t = mp.au.currentTime - 0.8;
|
||||
var t = r.au.currentTime - 0.8;
|
||||
if (isNum(t))
|
||||
mp.au.currentTime = Math.max(t, 0);
|
||||
r.au.currentTime = Math.max(t, 0);
|
||||
}
|
||||
else if (r.fvol > r.vol)
|
||||
r.fvol = r.vol;
|
||||
@@ -1647,8 +1691,14 @@ function MPlayer() {
|
||||
drop();
|
||||
});
|
||||
|
||||
mp.au2.preload = "auto";
|
||||
mp.au2.src = mp.au2.rsrc = url;
|
||||
r.nopause();
|
||||
r.au2.onloadeddata = r.au2.onloadedmetadata = r.nopause;
|
||||
r.au2.preload = "auto";
|
||||
r.au2.src = r.au2.rsrc = url;
|
||||
};
|
||||
|
||||
r.nopause = function () {
|
||||
r.cd_pause = Date.now();
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1787,8 +1837,10 @@ function glossy_grad(can, h, s, l) {
|
||||
var pbar = (function () {
|
||||
var r = {},
|
||||
bau = null,
|
||||
html_txt = 'a',
|
||||
lastmove = 0,
|
||||
mousepos = 0,
|
||||
t_redraw = 0,
|
||||
gradh = -1,
|
||||
grad;
|
||||
|
||||
@@ -1857,6 +1909,9 @@ var pbar = (function () {
|
||||
bctx = bc.ctx,
|
||||
apos, adur;
|
||||
|
||||
if (!widget.is_open)
|
||||
return;
|
||||
|
||||
bctx.clearRect(0, 0, bc.w, bc.h);
|
||||
|
||||
if (!mp || !mp.au || !isNum(adur = mp.au.duration) || !isNum(apos = mp.au.currentTime) || apos < 0 || adur < apos)
|
||||
@@ -1919,6 +1974,10 @@ var pbar = (function () {
|
||||
w = 8,
|
||||
apos, adur;
|
||||
|
||||
if (t_redraw) {
|
||||
clearTimeout(t_redraw);
|
||||
t_redraw = 0;
|
||||
}
|
||||
pctx.clearRect(0, 0, pc.w, pc.h);
|
||||
|
||||
if (!mp || !mp.au || !isNum(adur = mp.au.duration) || !isNum(apos = mp.au.currentTime) || apos < 0 || adur < apos)
|
||||
@@ -1933,17 +1992,30 @@ var pbar = (function () {
|
||||
}
|
||||
|
||||
var sm = bc.w * 1.0 / adur,
|
||||
t1 = s2ms(adur),
|
||||
t2 = s2ms(apos),
|
||||
x = sm * apos;
|
||||
|
||||
if (w && html_txt != t2) {
|
||||
ebi('np_pos').textContent = html_txt = t2;
|
||||
if (mpl.os_ctl)
|
||||
navigator.mediaSession.setPositionState({
|
||||
'duration': adur,
|
||||
'position': apos,
|
||||
'playbackRate': 1
|
||||
});
|
||||
}
|
||||
|
||||
if (!widget.is_open)
|
||||
return;
|
||||
|
||||
pctx.fillStyle = '#573'; pctx.fillRect((x - w / 2) - 1, 0, w + 2, pc.h);
|
||||
pctx.fillStyle = '#dfc'; pctx.fillRect((x - w / 2), 0, w, pc.h);
|
||||
|
||||
pctx.lineWidth = 2.5;
|
||||
pctx.fillStyle = '#fff';
|
||||
|
||||
var t1 = s2ms(adur),
|
||||
t2 = s2ms(apos),
|
||||
m1 = pctx.measureText(t1),
|
||||
var m1 = pctx.measureText(t1),
|
||||
m1b = pctx.measureText(t1 + ":88"),
|
||||
m2 = pctx.measureText(t2),
|
||||
yt = pc.h / 3 * 2.1,
|
||||
@@ -1956,6 +2028,9 @@ var pbar = (function () {
|
||||
pctx.strokeText(t2, xt2, yt);
|
||||
pctx.fillText(t1, xt1, yt);
|
||||
pctx.fillText(t2, xt2, yt);
|
||||
|
||||
if (sm > 10)
|
||||
t_redraw = setTimeout(r.drawpos, sm > 50 ? 20 : 50);
|
||||
};
|
||||
|
||||
window.addEventListener('resize', r.onresize);
|
||||
@@ -2085,6 +2160,7 @@ function seek_au_sec(seek) {
|
||||
if (!isNum(seek))
|
||||
return;
|
||||
|
||||
mp.nopause();
|
||||
mp.au.currentTime = seek;
|
||||
|
||||
if (mp.au.paused)
|
||||
@@ -2104,17 +2180,31 @@ function song_skip(n) {
|
||||
else
|
||||
play(mp.order[n == -1 ? mp.order.length - 1 : 0]);
|
||||
}
|
||||
function next_song_sig(e) {
|
||||
t_fchg = document.hasFocus() ? 0 : Date.now();
|
||||
return next_song_cmn(e);
|
||||
}
|
||||
function next_song(e) {
|
||||
t_fchg = 0;
|
||||
return next_song_cmn(e);
|
||||
}
|
||||
function next_song_cmn(e) {
|
||||
ev(e);
|
||||
if (mp.order.length) {
|
||||
mpl.traversals = 0;
|
||||
return song_skip(1);
|
||||
}
|
||||
if (mpl.traversals++ < 5) {
|
||||
treectl.ls_cb = next_song;
|
||||
if (MOBILE && t_fchg && Date.now() - t_fchg > 30 * 1000)
|
||||
modal.alert(L.mm_pwrsv);
|
||||
|
||||
t_fchg = document.hasFocus() ? 0 : Date.now();
|
||||
treectl.ls_cb = next_song_cmn;
|
||||
return tree_neigh(1);
|
||||
}
|
||||
toast.inf(10, L.mm_nof);
|
||||
mpl.traversals = 0;
|
||||
t_fchg = 0;
|
||||
}
|
||||
function prev_song(e) {
|
||||
ev(e);
|
||||
@@ -2157,6 +2247,25 @@ function playpause(e) {
|
||||
};
|
||||
|
||||
|
||||
function mplay(e) {
|
||||
if (mp.au && !mp.au.paused)
|
||||
return;
|
||||
|
||||
playpause(e);
|
||||
}
|
||||
|
||||
|
||||
function mpause(e) {
|
||||
if (mp.cd_pause > Date.now() - 100)
|
||||
return;
|
||||
|
||||
if (mp.au && mp.au.paused)
|
||||
return;
|
||||
|
||||
playpause(e);
|
||||
}
|
||||
|
||||
|
||||
// hook up the widget buttons
|
||||
(function () {
|
||||
ebi('bplay').onclick = playpause;
|
||||
@@ -2211,10 +2320,16 @@ var mpui = (function () {
|
||||
return;
|
||||
}
|
||||
|
||||
var paint = !MOBILE || document.hasFocus();
|
||||
|
||||
var pos = mp.au.currentTime;
|
||||
if (!isNum(pos))
|
||||
pos = 0;
|
||||
|
||||
// indicate playback state in ui
|
||||
widget.paused(mp.au.paused);
|
||||
|
||||
if (++nth > 69) {
|
||||
if (paint && ++nth > 69) {
|
||||
// android-chrome breaks aspect ratio with unannounced viewport changes
|
||||
nth = 0;
|
||||
if (MOBILE) {
|
||||
@@ -2223,20 +2338,28 @@ var mpui = (function () {
|
||||
vbar.onresize();
|
||||
}
|
||||
}
|
||||
else {
|
||||
else if (paint) {
|
||||
// draw current position in song
|
||||
if (!mp.au.paused)
|
||||
pbar.drawpos();
|
||||
|
||||
// occasionally draw buffered regions
|
||||
if (++nth % 5 == 0)
|
||||
if (nth % 5 == 0)
|
||||
pbar.drawbuf();
|
||||
}
|
||||
|
||||
if (pos > 0.3 && t_fchg) {
|
||||
// cannot check document.hasFocus to avoid false positives;
|
||||
// it continues on power-on, doesn't need to be in-browser
|
||||
if (MOBILE && Date.now() - t_fchg > 30 * 1000)
|
||||
modal.alert(L.mm_pwrsv);
|
||||
|
||||
t_fchg = 0;
|
||||
}
|
||||
|
||||
// preload next song
|
||||
if (mpl.preload && preloaded != mp.au.rsrc) {
|
||||
var pos = mp.au.currentTime,
|
||||
len = mp.au.duration,
|
||||
var len = mp.au.duration,
|
||||
rem = pos > 1 ? len - pos : 999,
|
||||
full = null;
|
||||
|
||||
@@ -2302,12 +2425,14 @@ function start_actx() {
|
||||
}
|
||||
}
|
||||
|
||||
var audio_eq = (function () {
|
||||
var afilt = (function () {
|
||||
var r = {
|
||||
"en": false,
|
||||
"eqen": false,
|
||||
"bands": [31.25, 62.5, 125, 250, 500, 1000, 2000, 4000, 8000, 16000],
|
||||
"gains": [4, 3, 2, 1, 0, 0, 1, 2, 3, 4],
|
||||
"filters": [],
|
||||
"filterskip": [],
|
||||
"plugs": [],
|
||||
"amp": 0,
|
||||
"chw": 1,
|
||||
"last_au": null,
|
||||
@@ -2396,6 +2521,10 @@ var audio_eq = (function () {
|
||||
r.filters[a].disconnect();
|
||||
|
||||
r.filters = [];
|
||||
r.filterskip = [];
|
||||
|
||||
for (var a = 0; a < r.plugs.length; a++)
|
||||
r.plugs[a].unload();
|
||||
|
||||
if (!mp)
|
||||
return;
|
||||
@@ -2403,7 +2532,7 @@ var audio_eq = (function () {
|
||||
if (mp.acs)
|
||||
mp.acs.disconnect();
|
||||
|
||||
mp.acs = null;
|
||||
mp.acs = mpo.acs = null;
|
||||
};
|
||||
|
||||
r.apply = function () {
|
||||
@@ -2413,18 +2542,34 @@ var audio_eq = (function () {
|
||||
if (!actx)
|
||||
bcfg_set('au_eq', false);
|
||||
|
||||
if (!actx || !mp.au || (!r.en && !mp.acs))
|
||||
var plug = false;
|
||||
for (var a = 0; a < r.plugs.length; a++)
|
||||
if (r.plugs[a].en)
|
||||
plug = true;
|
||||
|
||||
if (!actx || !mp.au || (!r.eqen && !plug && !mp.acs))
|
||||
return;
|
||||
|
||||
r.stop();
|
||||
mp.au.id = mp.au.id || Date.now();
|
||||
mp.acs = r.acst[mp.au.id] = r.acst[mp.au.id] || actx.createMediaElementSource(mp.au);
|
||||
|
||||
if (!r.en) {
|
||||
mp.acs.connect(actx.destination);
|
||||
return;
|
||||
}
|
||||
if (r.eqen)
|
||||
add_eq();
|
||||
|
||||
for (var a = 0; a < r.plugs.length; a++)
|
||||
if (r.plugs[a].en)
|
||||
r.plugs[a].load();
|
||||
|
||||
for (var a = 0; a < r.filters.length; a++)
|
||||
if (!has(r.filterskip, a))
|
||||
r.filters[a].connect(a ? r.filters[a - 1] : actx.destination);
|
||||
|
||||
mp.acs.connect(r.filters.length ?
|
||||
r.filters[r.filters.length - 1] : actx.destination);
|
||||
}
|
||||
|
||||
function add_eq() {
|
||||
var min, max;
|
||||
min = max = r.gains[0];
|
||||
for (var a = 1; a < r.gains.length; a++) {
|
||||
@@ -2455,9 +2600,6 @@ var audio_eq = (function () {
|
||||
fi.gain.value = r.amp + 0.94; // +.137 dB measured; now -.25 dB and almost bitperfect
|
||||
r.filters.push(fi);
|
||||
|
||||
for (var a = r.filters.length - 1; a >= 0; a--)
|
||||
r.filters[a].connect(a > 0 ? r.filters[a - 1] : actx.destination);
|
||||
|
||||
if (Math.round(r.chw * 25) != 25) {
|
||||
var split = actx.createChannelSplitter(2),
|
||||
merge = actx.createChannelMerger(2),
|
||||
@@ -2482,11 +2624,10 @@ var audio_eq = (function () {
|
||||
split.connect(lg2, 0);
|
||||
split.connect(rg1, 1);
|
||||
split.connect(rg2, 1);
|
||||
r.filterskip.push(r.filters.length);
|
||||
r.filters.push(split);
|
||||
mp.acs.channelCountMode = 'explicit';
|
||||
}
|
||||
|
||||
mp.acs.connect(r.filters[r.filters.length - 1]);
|
||||
}
|
||||
|
||||
function eq_step(e) {
|
||||
@@ -2581,7 +2722,7 @@ var audio_eq = (function () {
|
||||
txt[a].onkeydown = eq_keydown;
|
||||
}
|
||||
|
||||
bcfg_bind(r, 'en', 'au_eq', false, r.apply);
|
||||
bcfg_bind(r, 'eqen', 'au_eq', false, r.apply);
|
||||
|
||||
r.draw();
|
||||
return r;
|
||||
@@ -2611,6 +2752,7 @@ function play(tid, is_ev, seek) {
|
||||
tn = 0;
|
||||
}
|
||||
else if (mpl.pb_mode == 'next') {
|
||||
t_fchg = document.hasFocus() ? 0 : Date.now();
|
||||
treectl.ls_cb = next_song;
|
||||
return tree_neigh(1);
|
||||
}
|
||||
@@ -2630,7 +2772,9 @@ function play(tid, is_ev, seek) {
|
||||
|
||||
if (mp.au) {
|
||||
mp.au.pause();
|
||||
clmod(ebi('a' + mp.au.tid), 'act');
|
||||
var el = ebi('a' + mp.au.tid);
|
||||
if (el)
|
||||
clmod(el, 'act');
|
||||
}
|
||||
else {
|
||||
mp.au = new Audio();
|
||||
@@ -2638,7 +2782,7 @@ function play(tid, is_ev, seek) {
|
||||
mp.au.onerror = evau_error;
|
||||
mp.au.onprogress = pbar.drawpos;
|
||||
mp.au.onplaying = mpui.progress_updater;
|
||||
mp.au.onended = next_song;
|
||||
mp.au.onended = next_song_sig;
|
||||
widget.open();
|
||||
}
|
||||
|
||||
@@ -2655,7 +2799,7 @@ function play(tid, is_ev, seek) {
|
||||
mp.au.onerror = evau_error;
|
||||
mp.au.onprogress = pbar.drawpos;
|
||||
mp.au.onplaying = mpui.progress_updater;
|
||||
mp.au.onended = next_song;
|
||||
mp.au.onended = next_song_sig;
|
||||
t = mp.au.currentTime;
|
||||
if (isNum(t) && t > 0.1)
|
||||
mp.au.currentTime = 0;
|
||||
@@ -2663,7 +2807,7 @@ function play(tid, is_ev, seek) {
|
||||
else
|
||||
mp.au.src = mp.au.rsrc = url;
|
||||
|
||||
audio_eq.apply();
|
||||
afilt.apply();
|
||||
|
||||
setTimeout(function () {
|
||||
mpl.unbuffer(url);
|
||||
@@ -2686,6 +2830,7 @@ function play(tid, is_ev, seek) {
|
||||
scroll2playing();
|
||||
|
||||
try {
|
||||
mp.nopause();
|
||||
mp.au.play();
|
||||
if (mp.au.paused)
|
||||
autoplay_blocked(seek);
|
||||
@@ -2714,7 +2859,7 @@ function play(tid, is_ev, seek) {
|
||||
toast.err(0, esc(L.mm_playerr + basenames(ex)));
|
||||
}
|
||||
clmod(ebi(oid), 'act');
|
||||
setTimeout(next_song, 5000);
|
||||
setTimeout(next_song_sig, 5000);
|
||||
}
|
||||
|
||||
|
||||
@@ -2802,7 +2947,7 @@ function autoplay_blocked(seek) {
|
||||
modal.confirm('<h6>' + L.mm_hashplay + '</h6>\n«' + esc(fn) + '»', function () {
|
||||
// chrome 91 may permanently taint on a failed play()
|
||||
// depending on win10 settings or something? idk
|
||||
mp.au = null;
|
||||
mp.au = mpo.au = null;
|
||||
|
||||
play(tid, true, seek);
|
||||
mp.fade_in();
|
||||
@@ -3141,7 +3286,9 @@ var fileman = (function () {
|
||||
if (r.clip === null)
|
||||
r.clip = jread('fman_clip', []).slice(1);
|
||||
|
||||
var nsel = msel.getsel().length;
|
||||
var sel = msel.getsel(),
|
||||
nsel = sel.length;
|
||||
|
||||
clmod(bren, 'en', nsel);
|
||||
clmod(bdel, 'en', nsel);
|
||||
clmod(bcut, 'en', nsel);
|
||||
@@ -3153,9 +3300,51 @@ var fileman = (function () {
|
||||
clmod(bpst, 'hide', !(have_mv && has(perms, 'write')));
|
||||
clmod(ebi('wfm'), 'act', QS('#wfm a.en:not(.hide)'));
|
||||
|
||||
var wfs = ebi('wfs'), h = '';
|
||||
try {
|
||||
wfs.innerHTML = h = r.fsi(sel);
|
||||
}
|
||||
catch (ex) { }
|
||||
clmod(wfs, 'act', h);
|
||||
|
||||
bpst.setAttribute('tt', L.ft_paste.format(r.clip.length));
|
||||
};
|
||||
|
||||
r.fsi = function (sel) {
|
||||
if (!sel.length)
|
||||
return '';
|
||||
|
||||
var lf = treectl.lsc.files,
|
||||
nf = 0,
|
||||
sz = 0,
|
||||
dur = 0,
|
||||
ntab = new Set();
|
||||
|
||||
for (var a = 0; a < sel.length; a++)
|
||||
ntab.add(sel[a].vp.split('/').pop());
|
||||
|
||||
for (var a = 0; a < lf.length; a++) {
|
||||
if (!ntab.has(lf[a].href.split('?')[0]))
|
||||
continue;
|
||||
|
||||
var f = lf[a];
|
||||
nf++;
|
||||
sz += f.sz;
|
||||
if (f.tags && f.tags['.dur'])
|
||||
dur += f.tags['.dur']
|
||||
}
|
||||
|
||||
if (!nf)
|
||||
return '';
|
||||
|
||||
var ret = '{0}<br />{1}<small>F</small>'.format(humansize(sz), nf);
|
||||
|
||||
if (dur)
|
||||
ret += ' ' + s2ms(dur);
|
||||
|
||||
return ret;
|
||||
};
|
||||
|
||||
r.rename = function (e) {
|
||||
ev(e);
|
||||
if (clgot(bren, 'hide'))
|
||||
@@ -4150,6 +4339,21 @@ var thegrid = (function () {
|
||||
ev(e);
|
||||
}
|
||||
|
||||
r.imshow = function (url) {
|
||||
var sel = '#ggrid>a'
|
||||
if (!thegrid.en) {
|
||||
thegrid.bagit('#files');
|
||||
sel = '#files a[id]';
|
||||
}
|
||||
var ims = QSA(sel);
|
||||
for (var a = 0, aa = ims.length; a < aa; a++) {
|
||||
var iu = ims[a].getAttribute('href').split('?')[0].split('/').slice(-1)[0];
|
||||
if (iu == url)
|
||||
return ims[a].click();
|
||||
}
|
||||
baguetteBox.hide();
|
||||
};
|
||||
|
||||
r.loadsel = function () {
|
||||
if (r.dirty)
|
||||
return;
|
||||
@@ -4289,19 +4493,19 @@ var thegrid = (function () {
|
||||
}
|
||||
|
||||
r.dirty = false;
|
||||
r.bagit();
|
||||
r.bagit('#ggrid');
|
||||
r.loadsel();
|
||||
setTimeout(r.tippen, 20);
|
||||
}
|
||||
|
||||
r.bagit = function () {
|
||||
r.bagit = function (isrc) {
|
||||
if (!window.baguetteBox)
|
||||
return;
|
||||
|
||||
if (r.bbox)
|
||||
baguetteBox.destroy();
|
||||
|
||||
r.bbox = baguetteBox.run('#ggrid', {
|
||||
r.bbox = baguetteBox.run(isrc, {
|
||||
captions: function (g) {
|
||||
var idx = -1,
|
||||
h = '' + g;
|
||||
@@ -4322,7 +4526,7 @@ var thegrid = (function () {
|
||||
|
||||
bcfg_bind(r, 'thumbs', 'thumbs', true, r.setdirty);
|
||||
bcfg_bind(r, 'sel', 'gridsel', false, r.loadsel);
|
||||
bcfg_bind(r, 'en', 'griden', false, function (v) {
|
||||
bcfg_bind(r, 'en', 'griden', dgrid, function (v) {
|
||||
v ? loadgrid() : r.setvis(true);
|
||||
pbar.onresize();
|
||||
vbar.onresize();
|
||||
@@ -5522,6 +5726,12 @@ var treectl = (function () {
|
||||
seen = {};
|
||||
|
||||
r.lsc = res;
|
||||
if (res.unlist) {
|
||||
var ptn = new RegExp(res.unlist);
|
||||
for (var a = nodes.length - 1; a >= 0; a--)
|
||||
if (ptn.exec(nodes[a].href.split('?')[0]))
|
||||
nodes.splice(a, 1);
|
||||
}
|
||||
nodes = sortfiles(nodes);
|
||||
window.removeEventListener('scroll', r.tscroll);
|
||||
r.trunc = nodes.length > r.nvis && location.hash.length < 2;
|
||||
@@ -6674,22 +6884,27 @@ var globalcss = (function () {
|
||||
|
||||
var dcs = document.styleSheets;
|
||||
for (var a = 0; a < dcs.length; a++) {
|
||||
var base = dcs[a].href,
|
||||
var ds, base = '';
|
||||
try {
|
||||
base = dcs[a].href;
|
||||
if (!base)
|
||||
continue;
|
||||
|
||||
ds = dcs[a].cssRules;
|
||||
|
||||
if (!base)
|
||||
continue;
|
||||
|
||||
base = base.replace(/[^/]+$/, '');
|
||||
for (var b = 0; b < ds.length; b++) {
|
||||
var css = ds[b].cssText.split(/\burl\(/g);
|
||||
ret += css[0];
|
||||
for (var c = 1; c < css.length; c++) {
|
||||
var delim = (/^["']/.exec(css[c])) ? css[c].slice(0, 1) : '';
|
||||
ret += 'url(' + delim + ((css[c].slice(0, 8).indexOf('://') + 1 || css[c].startsWith('/')) ? '' : base) +
|
||||
css[c].slice(delim ? 1 : 0);
|
||||
base = base.replace(/[^/]+$/, '');
|
||||
for (var b = 0; b < ds.length; b++) {
|
||||
var css = ds[b].cssText.split(/\burl\(/g);
|
||||
ret += css[0];
|
||||
for (var c = 1; c < css.length; c++) {
|
||||
var delim = (/^["']/.exec(css[c])) ? css[c].slice(0, 1) : '';
|
||||
ret += 'url(' + delim + ((css[c].slice(0, 8).indexOf('://') + 1 || css[c].startsWith('/')) ? '' : base) +
|
||||
css[c].slice(delim ? 1 : 0);
|
||||
}
|
||||
ret += '\n';
|
||||
}
|
||||
ret += '\n';
|
||||
}
|
||||
catch (ex) {
|
||||
console.log('could not read css', a, base);
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
@@ -6775,6 +6990,7 @@ function show_md(md, name, div, url, depth) {
|
||||
|
||||
els[a].setAttribute('href', '#md-' + href.slice(1));
|
||||
}
|
||||
md_th_set();
|
||||
set_tabindex();
|
||||
var hash = location.hash;
|
||||
if (hash.startsWith('#md-'))
|
||||
@@ -6853,6 +7069,7 @@ function sandbox(tgt, rules, cls, html) {
|
||||
'window.onblur=function(){say("ilost #' + tid + '")};' +
|
||||
'var el="' + want + '"&&ebi("' + want + '");' +
|
||||
'if(el)say("iscroll #' + tid + ' "+el.offsetTop);' +
|
||||
'md_th_set();' +
|
||||
(cls == 'mdo' && md_plug.post ?
|
||||
'const x={' + md_plug.post + '};' +
|
||||
'if(x.render)x.render(ebi("b"));' +
|
||||
@@ -6860,6 +7077,7 @@ function sandbox(tgt, rules, cls, html) {
|
||||
'},1)</script></body></html>';
|
||||
|
||||
var fr = mknod('iframe');
|
||||
fr.setAttribute('title', 'folder ' + tid + 'logue');
|
||||
fr.setAttribute('sandbox', rules ? 'allow-' + rules.replace(/ /g, ' allow-') : '');
|
||||
fr.setAttribute('srcdoc', html);
|
||||
tgt.innerHTML = '';
|
||||
@@ -6885,6 +7103,9 @@ window.addEventListener("message", function (e) {
|
||||
else if (t[0] == 'igot' || t[0] == 'ilost') {
|
||||
clmod(QS(t[1] + '>iframe'), 'focus', t[0] == 'igot');
|
||||
}
|
||||
else if (t[0] == 'imshow') {
|
||||
thegrid.imshow(e.data.slice(7));
|
||||
}
|
||||
} catch (ex) {
|
||||
console.log('msg-err: ' + ex);
|
||||
}
|
||||
@@ -7159,21 +7380,25 @@ ebi('files').onclick = ebi('docul').onclick = function (e) {
|
||||
|
||||
function reload_mp() {
|
||||
if (mp && mp.au) {
|
||||
if (audio_eq)
|
||||
audio_eq.stop();
|
||||
|
||||
mp.au.pause();
|
||||
mp.au = null;
|
||||
mpo.au = mp.au;
|
||||
mpo.au2 = mp.au2;
|
||||
mpo.acs = mp.acs;
|
||||
mpl.unbuffer();
|
||||
}
|
||||
mpl.stop();
|
||||
var plays = QSA('tr>td:first-child>a.play');
|
||||
for (var a = plays.length - 1; a >= 0; a--)
|
||||
plays[a].parentNode.innerHTML = '-';
|
||||
|
||||
mp = new MPlayer();
|
||||
if (audio_eq)
|
||||
audio_eq.acst = {};
|
||||
if (mp.au && mp.au.tid) {
|
||||
var el = QS('a#a' + mp.au.tid);
|
||||
if (el)
|
||||
clmod(el, 'act', 1);
|
||||
|
||||
el = el && el.closest('tr');
|
||||
if (el)
|
||||
clmod(el, 'play', 1);
|
||||
}
|
||||
|
||||
setTimeout(pbar.onresize, 1);
|
||||
}
|
||||
|
||||
@@ -231,11 +231,11 @@ function convert_markdown(md_text, dest_dom) {
|
||||
var nodes = md_dom.getElementsByTagName('a');
|
||||
for (var a = nodes.length - 1; a >= 0; a--) {
|
||||
var href = nodes[a].getAttribute('href');
|
||||
var txt = nodes[a].textContent;
|
||||
var txt = nodes[a].innerHTML;
|
||||
|
||||
if (!txt)
|
||||
nodes[a].textContent = href;
|
||||
else if (href !== txt)
|
||||
else if (href !== txt && !nodes[a].className)
|
||||
nodes[a].className = 'vis';
|
||||
}
|
||||
|
||||
|
||||
@@ -43,10 +43,9 @@
|
||||
<h1>WebDAV</h1>
|
||||
|
||||
<div class="os win">
|
||||
<p><em>note: rclone-FTP is a bit faster, so {% if args.ftp or args.ftps %}try that first{% else %}consider enabling FTP in server settings{% endif %}</em></p>
|
||||
<p>if you can, install <a href="https://winfsp.dev/rel/">winfsp</a>+<a href="https://downloads.rclone.org/rclone-current-windows-amd64.zip">rclone</a> and then paste this in cmd:</p>
|
||||
<pre>
|
||||
rclone config create {{ aname }}-dav webdav url=http{{ s }}://{{ rip }}{{ hport }} vendor=other{% if accs %} user=k pass=<b>{{ pw }}</b>{% endif %}
|
||||
rclone config create {{ aname }}-dav webdav url=http{{ s }}://{{ rip }}{{ hport }} vendor=owncloud pacer_min_sleep=0.01ms{% if accs %} user=k pass=<b>{{ pw }}</b>{% endif %}
|
||||
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-dav:{{ rvp }} <b>W:</b>
|
||||
</pre>
|
||||
{% if s %}
|
||||
@@ -69,9 +68,9 @@
|
||||
printf '%s\n' "http{{ s }}://{{ ep }}/{{ rvp }} <b>{{ pw }}</b> k" >> /etc/davfs2/secrets
|
||||
printf '%s\n' "http{{ s }}://{{ ep }}/{{ rvp }} <b>mp</b> davfs rw,user,uid=1000,noauto 0 0" >> /etc/fstab
|
||||
</pre>
|
||||
<p>or you can use rclone instead, which is much slower but doesn't require root:</p>
|
||||
<p>or you can use rclone instead, which is much slower but doesn't require root (plus it keeps lastmodified on upload):</p>
|
||||
<pre>
|
||||
rclone config create {{ aname }}-dav webdav url=http{{ s }}://{{ rip }}{{ hport }} vendor=other{% if accs %} user=k pass=<b>{{ pw }}</b>{% endif %}
|
||||
rclone config create {{ aname }}-dav webdav url=http{{ s }}://{{ rip }}{{ hport }} vendor=owncloud pacer_min_sleep=0.01ms{% if accs %} user=k pass=<b>{{ pw }}</b>{% endif %}
|
||||
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-dav:{{ rvp }} <b>mp</b>
|
||||
</pre>
|
||||
{% if s %}
|
||||
@@ -111,10 +110,21 @@
|
||||
|
||||
<div class="os win">
|
||||
<p>if you can, install <a href="https://winfsp.dev/rel/">winfsp</a>+<a href="https://downloads.rclone.org/rclone-current-windows-amd64.zip">rclone</a> and then paste this in cmd:</p>
|
||||
{% if args.ftp %}
|
||||
<p>connect with plaintext FTP:</p>
|
||||
<pre>
|
||||
rclone config create {{ aname }}-ftp ftp host={{ rip }} port={{ args.ftp or args.ftps }} pass=k user={% if accs %}<b>{{ pw }}</b>{% else %}anonymous{% endif %} tls={{ "false" if args.ftp else "true" }}
|
||||
rclone config create {{ aname }}-ftp ftp host={{ rip }} port={{ args.ftp }} pass=k user={% if accs %}<b>{{ pw }}</b>{% else %}anonymous{% endif %} tls=false
|
||||
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-ftp:{{ rvp }} <b>W:</b>
|
||||
</pre>
|
||||
{% endif %}
|
||||
{% if args.ftps %}
|
||||
<p>connect with TLS-encrypted FTPS:</p>
|
||||
<pre>
|
||||
rclone config create {{ aname }}-ftps ftp host={{ rip }} port={{ args.ftps }} pass=k user={% if accs %}<b>{{ pw }}</b>{% else %}anonymous{% endif %} tls=false explicit_tls=true
|
||||
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-ftps:{{ rvp }} <b>W:</b>
|
||||
</pre>
|
||||
<p><em>note: if you are on LAN (or just dont have valid certificates), add <code>no_check_certificate=true</code> to the config command</em><br />---</p>
|
||||
{% endif %}
|
||||
<p>if you want to use the native FTP client in windows instead (please dont), press <code>win+R</code> and run this command:</p>
|
||||
<pre>
|
||||
explorer {{ "ftp" if args.ftp else "ftps" }}://{% if accs %}<b>{{ pw }}</b>:k@{% endif %}{{ host }}:{{ args.ftp or args.ftps }}/{{ rvp }}
|
||||
@@ -122,10 +132,21 @@
|
||||
</div>
|
||||
|
||||
<div class="os lin">
|
||||
{% if args.ftp %}
|
||||
<p>connect with plaintext FTP:</p>
|
||||
<pre>
|
||||
rclone config create {{ aname }}-ftp ftp host={{ rip }} port={{ args.ftp or args.ftps }} pass=k user={% if accs %}<b>{{ pw }}</b>{% else %}anonymous{% endif %} tls={{ "false" if args.ftp else "true" }}
|
||||
rclone config create {{ aname }}-ftp ftp host={{ rip }} port={{ args.ftp }} pass=k user={% if accs %}<b>{{ pw }}</b>{% else %}anonymous{% endif %} tls=false
|
||||
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-ftp:{{ rvp }} <b>mp</b>
|
||||
</pre>
|
||||
{% endif %}
|
||||
{% if args.ftps %}
|
||||
<p>connect with TLS-encrypted FTPS:</p>
|
||||
<pre>
|
||||
rclone config create {{ aname }}-ftps ftp host={{ rip }} port={{ args.ftps }} pass=k user={% if accs %}<b>{{ pw }}</b>{% else %}anonymous{% endif %} tls=false explicit_tls=true
|
||||
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-ftps:{{ rvp }} <b>mp</b>
|
||||
</pre>
|
||||
<p><em>note: if you are on LAN (or just dont have valid certificates), add <code>no_check_certificate=true</code> to the config command</em><br />---</p>
|
||||
{% endif %}
|
||||
<p>emergency alternative (gnome/gui-only):</p>
|
||||
<!-- gnome-bug: ignores vp -->
|
||||
<pre>
|
||||
@@ -160,7 +181,7 @@
|
||||
<p><em>note: if you are on LAN (or just dont have valid certificates), add <code>-td</code></em></p>
|
||||
{% endif %}
|
||||
<p>
|
||||
you can use <a href="{{ r }}/.cpr/a/up2k.py">up2k.py</a> to upload (sometimes faster than web-browsers)
|
||||
you can use <a href="{{ r }}/.cpr/a/u2c.py">u2c.py</a> to upload (sometimes faster than web-browsers)
|
||||
</p>
|
||||
|
||||
|
||||
|
||||
@@ -451,6 +451,20 @@ html.y textarea:focus {
|
||||
padding: .2em .5em;
|
||||
border: .12em solid #aaa;
|
||||
}
|
||||
.mdo .mdth,
|
||||
.mdo .mdthl,
|
||||
.mdo .mdthr {
|
||||
margin: .5em .5em .5em 0;
|
||||
}
|
||||
.mdthl {
|
||||
float: left;
|
||||
}
|
||||
.mdthr {
|
||||
float: right;
|
||||
}
|
||||
hr {
|
||||
clear: both;
|
||||
}
|
||||
|
||||
@media screen {
|
||||
.mdo {
|
||||
|
||||
@@ -1826,6 +1826,7 @@ function up2k_init(subtle) {
|
||||
|
||||
timer.rm(etafun);
|
||||
timer.rm(donut.do);
|
||||
ebi('u2tabw').style.minHeight = '0px';
|
||||
utw_minh = 0;
|
||||
}
|
||||
|
||||
|
||||
@@ -160,7 +160,7 @@ function vis_exh(msg, url, lineNo, columnNo, error) {
|
||||
var html = [
|
||||
'<h1>you hit a bug!</h1>',
|
||||
'<p style="font-size:1.3em;margin:0">try to <a href="#" onclick="localStorage.clear();location.reload();">reset copyparty settings</a> if you are stuck here, or <a href="#" onclick="ignex();">ignore this</a> / <a href="#" onclick="ignex(true);">ignore all</a> / <a href="?b=u">basic</a></p>',
|
||||
'<p style="color:#fff">please send me a screenshot arigathanks gozaimuch: <a href="<ghi>" target="_blank">github issue</a> or <code>ed#2644</code></p>',
|
||||
'<p style="color:#fff">please send me a screenshot arigathanks gozaimuch: <a href="<ghi>" target="_blank">github issue</a></p>',
|
||||
'<p class="b">' + esc(url + ' @' + lineNo + ':' + columnNo), '<br />' + esc(String(msg)).replace(/\n/g, '<br />') + '</p>',
|
||||
'<p><b>UA:</b> ' + esc(navigator.userAgent + '')
|
||||
];
|
||||
@@ -742,7 +742,7 @@ function get_pwd() {
|
||||
if (pwd.length < 2)
|
||||
return null;
|
||||
|
||||
return pwd[1].split(';')[0];
|
||||
return decodeURIComponent(pwd[1].split(';')[0]);
|
||||
}
|
||||
|
||||
|
||||
@@ -1602,6 +1602,14 @@ function load_md_plug(md_text, plug_type, defer) {
|
||||
if (defer)
|
||||
md_plug[plug_type] = null;
|
||||
|
||||
if (plug_type == 'pre')
|
||||
try {
|
||||
md_text = md_thumbs(md_text);
|
||||
}
|
||||
catch (ex) {
|
||||
toast.warn(30, '' + ex);
|
||||
}
|
||||
|
||||
if (!have_emp)
|
||||
return md_text;
|
||||
|
||||
@@ -1642,6 +1650,47 @@ function load_md_plug(md_text, plug_type, defer) {
|
||||
|
||||
return md;
|
||||
}
|
||||
function md_thumbs(md) {
|
||||
if (!/(^|\n)<!-- th -->/.exec(md))
|
||||
return md;
|
||||
|
||||
// `!th[flags](some.jpg)`
|
||||
// flags: nothing or "l" or "r"
|
||||
|
||||
md = md.split(/!th\[/g);
|
||||
for (var a = 1; a < md.length; a++) {
|
||||
if (!/^[^\]!()]*\]\([^\][!()]+\)/.exec(md[a])) {
|
||||
md[a] = '!th[' + md[a];
|
||||
continue;
|
||||
}
|
||||
|
||||
var o1 = md[a].indexOf(']('),
|
||||
o2 = md[a].indexOf(')', o1),
|
||||
alt = md[a].slice(0, o1),
|
||||
flags = alt.split(','),
|
||||
url = md[a].slice(o1 + 2, o2),
|
||||
float = has(flags, 'l') ? 'left' : has(flags, 'r') ? 'right' : '';
|
||||
|
||||
if (!/[?&]cache/.exec(url))
|
||||
url += (url.indexOf('?') < 0 ? '?' : '&') + 'cache=i';
|
||||
|
||||
md[a] = '<a href="' + url + '" class="mdth mdth' + float.slice(0, 1) + '"><img src="' + url + '&th=w" alt="' + alt + '" /></a>' + md[a].slice(o2 + 1);
|
||||
}
|
||||
return md.join('');
|
||||
}
|
||||
function md_th_set() {
|
||||
var els = QSA('.mdth');
|
||||
for (var a = 0, aa = els.length; a < aa; a++)
|
||||
els[a].onclick = md_th_click;
|
||||
}
|
||||
function md_th_click(e) {
|
||||
ev(e);
|
||||
var url = this.getAttribute('href').split('?')[0];
|
||||
if (window.sb_md)
|
||||
window.parent.postMessage("imshow " + url, "*");
|
||||
else
|
||||
thegrid.imshow(url);
|
||||
}
|
||||
|
||||
|
||||
var svg_decl = '<?xml version="1.0" encoding="UTF-8"?>\n';
|
||||
@@ -1720,7 +1769,6 @@ function cprop(name) {
|
||||
|
||||
|
||||
function bchrome() {
|
||||
console.log(document.documentElement.className);
|
||||
var v, o = QS('meta[name=theme-color]');
|
||||
if (!o)
|
||||
return;
|
||||
|
||||
@@ -1,3 +1,221 @@
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-0513-0000 `v1.7.2` hard resolve
|
||||
|
||||
## new features
|
||||
* print a warning if `c:\`, `c:\windows*`, or all of `/` are shared
|
||||
* upgraded the docker image to v3.18 which enables the [chiptune player](https://a.ocv.me/pub/demo/music/chiptunes/#af-f6fb2e5f)
|
||||
* in config files, allow trailing `:` in section headers
|
||||
|
||||
## bugfixes
|
||||
* when `--hardlink` (or the volflag) is set, resolve symlinks before hardlinking
|
||||
* uploads could fail due to relative symlinks
|
||||
* really minor ux fixes
|
||||
* left-align `GET` in access logs
|
||||
* the upload panel didn't always shrink back down after uploads completed
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-0507-1834 `v1.7.1` CräzY;PWDs
|
||||
|
||||
## new features
|
||||
* webdav:
|
||||
* support write-only folders
|
||||
* option `--dav-auth` / volflag `davauth` forces clients to always auth
|
||||
* helps clients such as `davfs2` see all folders if the root is anon-readable but some subfolders are not
|
||||
* alternatively you could configure your client to always send the password in the `PW` header
|
||||
* include usernames in http request logs
|
||||
* audio player:
|
||||
* consumes less power on phones when the screen is off
|
||||
* smoother playback cursor on short songs
|
||||
|
||||
## bugfixes
|
||||
* the characters `;` and `%` can now be used in passwords
|
||||
* but non-ascii characters (such as the ä in the release title) can, in fact, not
|
||||
* verify that all accounts have unique passwords on startup (#25)
|
||||
|
||||
## other changes
|
||||
* ftpd: log incorrect passwords only, not correct ones
|
||||
* `up2k.py` (the upload, folder-sync, and file-search client) has been renamed to [u2c.py](https://github.com/9001/copyparty/tree/hovudstraum/bin#u2cpy)
|
||||
* `u2c` as in `up2k client`, or `up2k CLI`, or `upload-to-copyparty` -- good name
|
||||
* now the only things named "up2k" are the web-ui and the server backend which is way less confusing
|
||||
* upgrade packaging from [setup.py](https://github.com/9001/copyparty/blob/hovudstraum/setup.py) to [pyproject.toml](https://github.com/9001/copyparty/blob/hovudstraum/pyproject.toml)
|
||||
* no practical consequences aside from a warm fuzzy feeling of being in the future
|
||||
* the docker images ~~will be~~ got rebuilt 2023-05-11 ~~in a few days (when [alpine](https://alpinelinux.org/) 3.18 is released)~~ enabling [the chiptune player](https://a.ocv.me/pub/demo/music/chiptunes/#af-f6fb2e5f)
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-0429-2114 `v1.7.0` unlinked
|
||||
|
||||
don't get excited! nothing new and revolutionary, but `xvol` and `xdev` changed behavior so there's an above-average chance of fresh bugs
|
||||
|
||||
## new features
|
||||
* (#24): `xvol` and `xdev`, previously just hints to the filesystem indexer, now actively block access as well:
|
||||
* `xvol` stops users following symlinks leaving the volumes they have access to
|
||||
* so if you symlink `/home/ed/music` into `/srv/www/music` it'll get blocked
|
||||
* ...unless both folders are accessible through volumes, and the user has read-access to both
|
||||
* `xdev` stops users crossing the filesystem boundary of the volumes they have access to
|
||||
* so if you symlink another HDD into a volume it'll get blocked, but you can still symlink from other places on the same FS
|
||||
* enabling these will add a slight performance hit; the unlikely worst-case is `14%` slower directory listings, `35%` slower download-as-tar
|
||||
* file selection summary (num files, size, audio duration) in the bottom right
|
||||
* [u2cli](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py): more aggressive resolving with `--rh`
|
||||
* [add a warning](https://github.com/9001/copyparty#fix-unreliable-playback-on-android) that the default powersave settings in android may stop playing music during album changes
|
||||
* also appears [in the media player](https://user-images.githubusercontent.com/241032/235327191-7aaefff9-5d41-4e42-b71f-042a8247f29d.png) if the issue is detected at runtime (playback halts for 30sec while screen is off)
|
||||
|
||||
## bugfixes
|
||||
* (#23): stop autodeleting empty folders when moving or deleting files
|
||||
* but files which expire / [self-destruct](https://github.com/9001/copyparty#self-destruct) still clean up parent directories like before
|
||||
* ftp-server: some clients could fail to `mkdir` at first attempt (and also complain during rmdir)
|
||||
|
||||
## other changes
|
||||
* new version of [cpp-winpe64.exe](https://github.com/9001/copyparty/releases/download/v1.7.0/copyparty-winpe64.exe) since the ftp-server fix might be relevant
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-0426-2300 `v1.6.15` unexpected boost
|
||||
|
||||
## new features
|
||||
* 30% faster folder listings due to [the very last thing](https://github.com/9001/copyparty/commit/55c74ad164633a0a64dceb51f7f534da0422cbb5) i'd ever expect to be a bottleneck, [thx perf](https://docs.python.org/3.12/howto/perf_profiling.html)
|
||||
* option to see the lastmod timestamps of symlinks instead of the target files
|
||||
* makes the turbo mode of [u2cli, the commandline uploader and folder-sync tool](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py) more turbo since copyparty dedupes uploads by symlinking to an existing copy and the symlink is stamped with the deduped file's lastmod
|
||||
* **webdav:** enabled by default (because rclone will want this), can be disabled with arg `--dav-rt` or volflag `davrt`
|
||||
* **http:** disabled by default, can be enabled per-request with urlparam `lt`
|
||||
* [u2cli](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py): option `--rh` to resolve server hostname only once at start of upload
|
||||
* fantastic for buggy networks, but it'll break TLS
|
||||
|
||||
## bugfixes
|
||||
* new arg `--s-tbody` specifies the network timeout before a dead connection gets dropped (default 3min)
|
||||
* before there was no timeout at all, which could hang uploads or possibly consume all server resources
|
||||
* ...but this is only relevant if your copyparty is directly exposed to the internet with no reverse proxy
|
||||
* with nginx/caddy/etc you can disable the timeout with `--s-tbody 0` for a 3% performance boost (*wow!*)
|
||||
* iPhone audio transcoder could turn bad and stop transcoding
|
||||
* ~~maybe android phones no longer pause playback at the end of an album~~
|
||||
* nope, that was due to [android's powersaver](https://github.com/9001/copyparty#fix-unreliable-playback-on-android), oh well
|
||||
* ***bonus unintended feature:*** navigate into other folders while a song is plaing
|
||||
* [installing from the source tarball](https://github.com/9001/copyparty/blob/hovudstraum/docs/devnotes.md#build-from-release-tarball) should be ok now
|
||||
* good base for making distro packages probably
|
||||
|
||||
## other changes
|
||||
* since the network timeout fix is relevant for the single usecase that [cpp-winpe64.exe](https://github.com/9001/copyparty/releases/download/v1.6.15/copyparty-winpe64.exe) covers, there is now a new version of that
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-0424-0609 `v1.6.14` unsettable flags
|
||||
|
||||
## new features
|
||||
* unset a volflag (override a global option) by negating it (setting volflag `-flagname`)
|
||||
* new argument `--cert` to specify TLS certificate location
|
||||
* defaults to `~/.config/copyparty/cert.pem` like before
|
||||
|
||||
## bugfixes
|
||||
* in zip/tar downloads, always use the parent-folder name as the archive root
|
||||
* more reliable ftp authentication when providing password as username
|
||||
* connect-page: fix rclone ftps example
|
||||
|
||||
## other changes
|
||||
* stop suggesting `--http-only` and `--https-only` for performance since the difference is negligible
|
||||
* mention how some antivirus (avast, avg, mcafee) thinks that pillow's webp encoder is a virus, affecting `copyparty.exe`
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-0420-2141 `v1.6.12` as seen on nixos
|
||||
|
||||
## new features
|
||||
* @chinponya [made](https://github.com/9001/copyparty/pull/22) a copyparty [Nix package](https://github.com/9001/copyparty#nix-package) and a [NixOS module](https://github.com/9001/copyparty#nixos-module)! nice 🎉
|
||||
* with [systemd-based hardening](https://github.com/9001/copyparty/blob/hovudstraum/contrib/nixos/modules/copyparty.nix#L230-L270) instead of [prisonparty](https://github.com/9001/copyparty/blob/hovudstraum/bin/prisonparty.sh)
|
||||
* complements the [arch package](https://github.com/9001/copyparty/tree/hovudstraum/contrib/package/arch) very well w
|
||||
|
||||
## bugfixes
|
||||
* fix an sqlite fd leak
|
||||
* with enough simultaneous traffic, copyparty could run out of file descriptors since it relied on the gc to close sqlite cursors
|
||||
* now there's a pool of cursors shared between the tcp connections instead, limited to the number of CPU cores
|
||||
* performance mostly unaffected (or slightly improved) compared to before, except for a 20% reduction only during max server load caused by directory-listings or searches
|
||||
* ~~somehow explicitly closing the cursors didn't always work... maybe this was actually a python bug :\\/~~
|
||||
* yes, it does incomplete cleanup if opening a WAL database fails
|
||||
* multirange requests would fail with an error; now they get a 200 as expected (since they're kinda useless and not worth the overhead)
|
||||
* [the only software i've ever seen do that](https://apps.kde.org/discover/) now works as intended
|
||||
* expand `~/` filesystem paths in all remaining args: `-c`, `-lo`, `--hist`, `--ssl-log`, and the `hist` volflag
|
||||
* never use IPv6-format IPv4 (`::ffff:127.0.0.1`) in responses
|
||||
* [u2cli](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py): don't enter delete stage if some of the uploads failed
|
||||
* audio player in safari on touchbar macbooks
|
||||
* songs would play backwards because the touchbar keeps spamming play/pause
|
||||
* playback would stop when the preloader kicks in because safari sees the new audio object and freaks out
|
||||
|
||||
## other changes
|
||||
* added [windows quickstart / service example](https://github.com/9001/copyparty/blob/hovudstraum/docs/examples/windows.md)
|
||||
* updated pyinstaller (it makes smaller exe files now)
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-0401-2112 `v1.6.11` not joke
|
||||
|
||||
## new features
|
||||
* new event-hook: [exif stripper](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/image-noexif.py)
|
||||
* [markdown thumbnails](https://a.ocv.me/pub/demo/pics-vids/README.md?v) -- see [readme](https://github.com/9001/copyparty#markdown-viewer)
|
||||
* soon: support for [web-scrobbler](https://github.com/web-scrobbler/web-scrobbler/) - the [Last.fm](https://www.last.fm/user/tripflag) browser extension
|
||||
* will update here + readme with more info when [the v3](https://github.com/web-scrobbler/web-scrobbler/projects/5) is out
|
||||
|
||||
## bugfixes
|
||||
* more sqlite query-planner twiddling
|
||||
* deleting files is MUCH faster now, and uploads / bootup might be a bit better too
|
||||
* webdav optimizations / compliance
|
||||
* should make some webdav clients run faster than before
|
||||
* in very related news, the webdav-client in [rclone](https://github.com/rclone/rclone/) v1.63 ([currently beta](https://beta.rclone.org/?filter=latest)) will be ***FAST!***
|
||||
* does cool stuff such as [bidirectional sync](https://github.com/9001/copyparty#folder-sync) between copyparty and a local folder
|
||||
* [bpm detector](https://github.com/9001/copyparty/blob/hovudstraum/bin/mtag/audio-bpm.py) is a bit more accurate
|
||||
* [u2cli](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py) / commandline uploader: better error messages if something goes wrong
|
||||
* readme rendering could fail in firefox if certain addons were installed (not sure which)
|
||||
* event-hooks: more accurate usage examples
|
||||
|
||||
## other changes
|
||||
* @chinponya automated the prismjs build step (thx!)
|
||||
* updated some js deps (markedjs, codemirror)
|
||||
* copyparty.exe: updated Pillow to 9.5.0
|
||||
* and finally [the joke](https://github.com/9001/copyparty/blob/hovudstraum/contrib/plugins/rave.js) (looks [like this](https://cd.ocv.me/b/d2/d21/#af-9b927c42))
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-0320-2156 `v1.6.10` rclone sync
|
||||
|
||||
## new features
|
||||
* [iPhone "app"](https://github.com/9001/copyparty#ios-shortcuts) (upload shortcut) -- thanks @Daedren !
|
||||
* can strip exif, upload files, pics, vids, links, clipboard
|
||||
* can download links and rehost the target file on your server
|
||||
* support `rclone sync` to [sync folders](https://github.com/9001/copyparty#folder-sync) to/from copyparty
|
||||
* let webdav clients set lastmodified times during upload
|
||||
* let webdav clients replace files during upload
|
||||
|
||||
## bugfixes
|
||||
* [prisonparty](https://github.com/9001/copyparty/blob/hovudstraum/bin/prisonparty.sh): FFmpeg transcoding was slow because there was no `/dev/urandom`
|
||||
* iphones would fail to play *some* songs (low-bitrate and/or shorter than ~7 seconds)
|
||||
* due to either an iOS bug or an FFmpeg bug in the caf remuxing idk
|
||||
* fixed by mixing in white noise into songs if an iPhone asks for them
|
||||
* small correction in the docker readme regarding rootless podman
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-0316-2106 `v1.6.9` index.html
|
||||
|
||||
## new features
|
||||
* option to show `index.html` instead of the folder listing
|
||||
* arg `--ih` makes it default-enabled
|
||||
* clients can enable/disable it in the `[⚙️]` settings tab
|
||||
* url-param `?v` skips it for a particular folder
|
||||
* faster folder-thumbnail validation on startup (mostly on conventional HDDs)
|
||||
|
||||
## bugfixes
|
||||
* "load more" button didn't always show up when search results got truncated
|
||||
* ux: tooltips could block buttons on android
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-0312-1610 `v1.6.8` folder thumbs
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
* [future plans](#future-plans) - some improvement ideas
|
||||
* [design](#design)
|
||||
* [up2k](#up2k) - quick outline of the up2k protocol
|
||||
* [why not tus](#why-not-tus) - I didn't know about [tus](https://tus.io/)
|
||||
* [why chunk-hashes](#why-chunk-hashes) - a single sha512 would be better, right?
|
||||
* [http api](#http-api)
|
||||
* [read](#read)
|
||||
@@ -16,6 +17,7 @@
|
||||
* [building](#building)
|
||||
* [dev env setup](#dev-env-setup)
|
||||
* [just the sfx](#just-the-sfx)
|
||||
* [build from release tarball](#build-from-release-tarball) - uses the included prebuilt webdeps
|
||||
* [complete release](#complete-release)
|
||||
* [todo](#todo) - roughly sorted by priority
|
||||
* [discarded ideas](#discarded-ideas)
|
||||
@@ -66,6 +68,13 @@ regarding the frequent server log message during uploads;
|
||||
* on this http connection, `2.77 GiB` transferred, `102.9 MiB/s` average, `948` chunks handled
|
||||
* client says `4` uploads OK, `0` failed, `3` busy, `1` queued, `10042 MiB` total size, `7198 MiB` and `00:01:09` left
|
||||
|
||||
## why not tus
|
||||
|
||||
I didn't know about [tus](https://tus.io/) when I made this, but:
|
||||
* up2k has the advantage that it supports parallel uploading of non-contiguous chunks straight into the final file -- [tus does a merge at the end](https://tus.io/protocols/resumable-upload.html#concatenation) which is slow and taxing on the server HDD / filesystem (unless i'm misunderstanding)
|
||||
* up2k has the slight disadvantage of requiring the client to hash the entire file before an upload can begin, but this has the benefit of immediately skipping duplicate files
|
||||
* and the hashing happens in a separate thread anyways so it's usually not a bottleneck
|
||||
|
||||
## why chunk-hashes
|
||||
|
||||
a single sha512 would be better, right?
|
||||
@@ -76,7 +85,7 @@ as a result, the hashes are much less useful than they could have been (search t
|
||||
|
||||
however it allows for hashing multiple chunks in parallel, greatly increasing upload speed from fast storage (NVMe, raid-0 and such)
|
||||
|
||||
* both the [browser uploader](https://github.com/9001/copyparty#uploading) and the [commandline one](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py) does this now, allowing for fast uploading even from plaintext http
|
||||
* both the [browser uploader](https://github.com/9001/copyparty#uploading) and the [commandline one](https://github.com/9001/copyparty/tree/hovudstraum/bin#u2cpy) does this now, allowing for fast uploading even from plaintext http
|
||||
|
||||
hashwasm would solve the streaming issue but reduces hashing speed for sha512 (xxh128 does 6 GiB/s), and it would make old browsers and [iphones](https://bugs.webkit.org/show_bug.cgi?id=228552) unsupported
|
||||
|
||||
@@ -102,6 +111,7 @@ authenticate using header `Cookie: cppwd=foo` or url param `&pw=foo`
|
||||
| GET | `?ls&dots` | list files/folders at URL as JSON, including dotfiles |
|
||||
| GET | `?ls=t` | list files/folders at URL as plaintext |
|
||||
| GET | `?ls=v` | list files/folders at URL, terminal-formatted |
|
||||
| GET | `?lt` | in listings, use symlink timestamps rather than targets |
|
||||
| GET | `?b` | list files/folders at URL as simplified HTML |
|
||||
| GET | `?tree=.` | list one level of subdirectories inside URL |
|
||||
| GET | `?tree` | list one level of subdirectories for each level until URL |
|
||||
@@ -218,39 +228,55 @@ pip install mutagen # audio metadata
|
||||
pip install pyftpdlib # ftp server
|
||||
pip install impacket # smb server -- disable Windows Defender if you REALLY need this on windows
|
||||
pip install Pillow pyheif-pillow-opener pillow-avif-plugin # thumbnails
|
||||
pip install pyvips # faster thumbnails
|
||||
pip install psutil # better cleanup of stuck metadata parsers on windows
|
||||
pip install black==21.12b0 click==8.0.2 bandit pylint flake8 isort mypy # vscode tooling
|
||||
```
|
||||
|
||||
|
||||
## just the sfx
|
||||
|
||||
first grab the web-dependencies from a previous sfx (assuming you don't need to modify something in those):
|
||||
if you just want to modify the copyparty source code (py/html/css/js) then this is the easiest approach
|
||||
|
||||
```sh
|
||||
rm -rf copyparty/web/deps
|
||||
curl -L https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py >x.py
|
||||
python3 x.py --version
|
||||
rm x.py
|
||||
cp -R /tmp/pe-copyparty.$(id -u)/copyparty/web/deps copyparty/web/
|
||||
```
|
||||
|
||||
or you could build the web-dependencies from source instead (NB: does not include prismjs, need to grab that manually):
|
||||
```sh
|
||||
make -C scripts/deps-docker
|
||||
```
|
||||
|
||||
then build the sfx using any of the following examples:
|
||||
build the sfx using any of the following examples:
|
||||
|
||||
```sh
|
||||
./scripts/make-sfx.sh # regular edition
|
||||
./scripts/make-sfx.sh fast # build faster (worse js/css compression)
|
||||
./scripts/make-sfx.sh gz no-cm # gzip-compressed + no fancy markdown editor
|
||||
```
|
||||
|
||||
|
||||
## build from release tarball
|
||||
|
||||
uses the included prebuilt webdeps
|
||||
|
||||
if you downloaded a [release](https://github.com/9001/copyparty/releases) source tarball from github (for example [copyparty-1.6.15.tar.gz](https://github.com/9001/copyparty/releases/download/v1.6.15/copyparty-1.6.15.tar.gz) so not the autogenerated one) you can build it like so,
|
||||
|
||||
```bash
|
||||
python3 -m pip install --user -U build setuptools wheel jinja2 strip_hints
|
||||
bash scripts/run-tests.sh python3 # optional
|
||||
python3 -m build
|
||||
```
|
||||
|
||||
if you are unable to use `build`, you can use the old setuptools approach instead,
|
||||
|
||||
```bash
|
||||
python3 setup.py install --user setuptools wheel jinja2
|
||||
python3 setup.py build
|
||||
# you now have a wheel which you can install. or extract and repackage:
|
||||
python3 setup.py install --skip-build --prefix=/usr --root=$HOME/pe/copyparty
|
||||
```
|
||||
|
||||
|
||||
## complete release
|
||||
|
||||
also builds the sfx so skip the sfx section above
|
||||
|
||||
*WARNING: `rls.sh` has not yet been updated with the docker-images and arch/nix packaging*
|
||||
|
||||
does everything completely from scratch, straight from your local repo
|
||||
|
||||
in the `scripts` folder:
|
||||
|
||||
* run `make -C deps-docker` to build all dependencies
|
||||
|
||||
4
docs/examples/README.md
Normal file
4
docs/examples/README.md
Normal file
@@ -0,0 +1,4 @@
|
||||
copyparty server config examples
|
||||
|
||||
[windows.md](windows.md) -- running copyparty as a service on windows
|
||||
|
||||
115
docs/examples/windows.md
Normal file
115
docs/examples/windows.md
Normal file
@@ -0,0 +1,115 @@
|
||||
# running copyparty on windows
|
||||
|
||||
this is a complete example / quickstart for running copyparty on windows, optionally as a service (autostart on boot)
|
||||
|
||||
you will definitely need either [copyparty.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty.exe) (comfy, portable, more features) or [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) (smaller, safer)
|
||||
|
||||
* if you decided to grab `copyparty-sfx.py` instead of the exe you will also need to install the ["Latest Python 3 Release"](https://www.python.org/downloads/windows/)
|
||||
|
||||
then you probably want to download [FFmpeg](https://github.com/BtbN/FFmpeg-Builds/releases/download/latest/ffmpeg-master-latest-win64-gpl.zip) and put `ffmpeg.exe` and `ffprobe.exe` in your PATH (so for example `C:\Windows\System32\`) -- this enables thumbnails, audio transcoding, and making music metadata searchable
|
||||
|
||||
|
||||
## the config file
|
||||
|
||||
open up notepad and save the following as `c:\users\you\documents\party.conf` (for example)
|
||||
|
||||
```yaml
|
||||
[global]
|
||||
lo: ~/logs/cpp-%Y-%m%d.xz # log to c:\users\you\logs\
|
||||
e2dsa, e2ts, no-dedup, z # sets 4 flags; see expl.
|
||||
p: 80, 443 # listen on ports 80 and 443, not 3923
|
||||
theme: 2 # default theme: protonmail-monokai
|
||||
lang: nor # default language: viking
|
||||
|
||||
[accounts] # usernames and passwords
|
||||
kevin: shangalabangala # kevin's password
|
||||
|
||||
[/] # create a volume available at /
|
||||
c:\pub # sharing this filesystem location
|
||||
accs: # and set permissions:
|
||||
r: * # everyone can read/download files,
|
||||
rwmd: kevin # kevin can read/write/move/delete
|
||||
|
||||
[/inc] # create another volume at /inc
|
||||
c:\pub\inc # sharing this filesystem location
|
||||
accs: # permissions:
|
||||
w: * # everyone can upload, but not browse
|
||||
rwmd: kevin # kevin is admin here too
|
||||
|
||||
[/music] # and a third volume at /music
|
||||
~/music # which shares c:\users\you\music
|
||||
accs:
|
||||
r: *
|
||||
rwmd: kevin
|
||||
```
|
||||
|
||||
|
||||
### config explained: [global]
|
||||
|
||||
the `[global]` section accepts any config parameters you can see when running copyparty (either the exe or the sfx.py) with `--help`, so this is the same as running copyparty with arguments `--lo c:\users\you\logs\copyparty-%Y-%m%d.xz -e2dsa -e2ts --no-dedup -z -p 80,443 --theme 2 --lang nor`
|
||||
* `lo: ~/logs/cpp-%Y-%m%d.xz` writes compressed logs (the compression will make them delayed)
|
||||
* `e2dsa` enables the upload deduplicator and file indexer, which enables searching
|
||||
* `e2ts` enables music metadata indexing, making albums / titles etc. searchable too
|
||||
* `no-dedup` writes full dupes to disk instead of symlinking, since lots of windows software doesn't handle symlinks well
|
||||
* but the improved upload speed from `e2dsa` is not affected
|
||||
* `z` enables zeroconf, making the server available at `http://HOSTNAME.local/` from any other machine in the LAN
|
||||
* `p: 80,443` listens on the ports `80` and `443` instead of the default `3923`
|
||||
* `lang: nor` sets default language to viking
|
||||
|
||||
|
||||
### config explained: [accounts]
|
||||
|
||||
the `[accounts]` section defines all the user accounts, which can then be referenced when granting people access to the different volumes
|
||||
|
||||
|
||||
### config explained: volumes
|
||||
|
||||
then we create three volumes, one at `/`, one at `/inc`, and one at `/music`
|
||||
* `/` and `/music` are readable without requiring people to login (`r: *`) but you need to login as kevin to write/move/delete files (`rwmd: kevin`)
|
||||
* anyone can upload to `/inc` but you must be logged in as kevin to see the files inside
|
||||
|
||||
|
||||
## run copyparty
|
||||
|
||||
to test your config it's best to just run copyparty in a console to watch the output:
|
||||
|
||||
```batch
|
||||
copyparty.exe -c party.conf
|
||||
```
|
||||
|
||||
or if you wanna use `copyparty-sfx.py` instead of the exe (understandable),
|
||||
|
||||
```batch
|
||||
%localappdata%\programs\python\python311\python.exe copyparty-sfx.py -c party.conf
|
||||
```
|
||||
|
||||
(please adjust `python311` to match the python version you installed, i'm not good enough at windows to make that bit generic)
|
||||
|
||||
|
||||
## run it as a service
|
||||
|
||||
to run this as a service you need [NSSM](https://nssm.cc/ci/nssm-2.24-101-g897c7ad.zip), so put the exe somewhere in your PATH
|
||||
|
||||
then either do this for `copyparty.exe`:
|
||||
```batch
|
||||
nssm install cpp %homedrive%%homepath%\downloads\copyparty.exe -c %homedrive%%homepath%\documents\party.conf
|
||||
```
|
||||
|
||||
or do this for `copyparty-sfx.py`:
|
||||
```batch
|
||||
nssm install cpp %localappdata%\programs\python\python311\python.exe %homedrive%%homepath%\downloads\copyparty-sfx.py -c %homedrive%%homepath%\documents\party.conf
|
||||
```
|
||||
|
||||
then after creating the service, modify it so it runs with your own windows account (so file permissions don't get wonky and paths expand as expected):
|
||||
```batch
|
||||
nssm set cpp ObjectName .\yourAccoutName yourWindowsPassword
|
||||
nssm start cpp
|
||||
```
|
||||
|
||||
and that's it, all good
|
||||
|
||||
if it doesn't start, enable stderr logging so you can see what went wrong:
|
||||
```batch
|
||||
nssm set cpp AppStderr %homedrive%%homepath%\logs\cppsvc.err
|
||||
nssm set cpp AppStderrCreationDisposition 2
|
||||
```
|
||||
@@ -194,6 +194,9 @@ sqlite3 .hist/up2k.db 'select * from mt where k="fgsfds" or k="t:mtp"' | tee /de
|
||||
for ((f=420;f<1200;f++)); do sz=$(ffmpeg -y -f lavfi -i sine=frequency=$f:duration=2 -vf volume=0.1 -ac 1 -ar 44100 -f s16le /dev/shm/a.wav 2>/dev/null; base64 -w0 </dev/shm/a.wav | gzip -c | wc -c); printf '%d %d\n' $f $sz; done | tee /dev/stderr | sort -nrk2,2
|
||||
ffmpeg -y -f lavfi -i sine=frequency=1050:duration=2 -vf volume=0.1 -ac 1 -ar 44100 /dev/shm/a.wav
|
||||
|
||||
# better sine
|
||||
sox -DnV -r8000 -b8 -c1 /dev/shm/a.wav synth 1.1 sin 400 vol 0.02
|
||||
|
||||
# play icon calibration pics
|
||||
for w in 150 170 190 210 230 250; do for h in 130 150 170 190 210; do /c/Program\ Files/ImageMagick-7.0.11-Q16-HDRI/magick.exe convert -size ${w}x${h} xc:brown -fill orange -draw "circle $((w/2)),$((h/2)) $((w/2)),$((h/3))" $w-$h.png; done; done
|
||||
|
||||
|
||||
2
docs/protocol-reference.sh
Normal file
2
docs/protocol-reference.sh
Normal file
@@ -0,0 +1,2 @@
|
||||
vsftpd a.conf -olisten=YES -olisten_port=3921 -orun_as_launching_user=YES -obackground=NO -olog_ftp_protocol=YES
|
||||
|
||||
@@ -14,6 +14,10 @@ when server is on another machine (1gbit LAN),
|
||||
|
||||
# creating the config file
|
||||
|
||||
the copyparty "connect" page at `/?hc` (so for example http://127.0.0.1:3923/?hc) will generate commands to autoconfigure rclone for your server
|
||||
|
||||
**if you prefer to configure rclone manually, continue reading:**
|
||||
|
||||
replace `hunter2` with your password, or remove the `hunter2` lines if you allow anonymous access
|
||||
|
||||
|
||||
@@ -22,14 +26,16 @@ replace `hunter2` with your password, or remove the `hunter2` lines if you allow
|
||||
(
|
||||
echo [cpp-rw]
|
||||
echo type = webdav
|
||||
echo vendor = other
|
||||
echo vendor = owncloud
|
||||
echo url = http://127.0.0.1:3923/
|
||||
echo headers = Cookie,cppwd=hunter2
|
||||
echo pacer_min_sleep = 0.01ms
|
||||
echo(
|
||||
echo [cpp-ro]
|
||||
echo type = http
|
||||
echo url = http://127.0.0.1:3923/
|
||||
echo headers = Cookie,cppwd=hunter2
|
||||
echo pacer_min_sleep = 0.01ms
|
||||
) > %userprofile%\.config\rclone\rclone.conf
|
||||
```
|
||||
|
||||
@@ -41,14 +47,16 @@ also install the windows dependencies: [winfsp](https://github.com/billziss-gh/w
|
||||
cat > ~/.config/rclone/rclone.conf <<'EOF'
|
||||
[cpp-rw]
|
||||
type = webdav
|
||||
vendor = other
|
||||
vendor = owncloud
|
||||
url = http://127.0.0.1:3923/
|
||||
headers = Cookie,cppwd=hunter2
|
||||
pacer_min_sleep = 0.01ms
|
||||
|
||||
[cpp-ro]
|
||||
type = http
|
||||
url = http://127.0.0.1:3923/
|
||||
headers = Cookie,cppwd=hunter2
|
||||
pacer_min_sleep = 0.01ms
|
||||
EOF
|
||||
```
|
||||
|
||||
@@ -62,6 +70,15 @@ rclone.exe mount --vfs-cache-mode writes --vfs-cache-max-age 5s --attr-timeout 5
|
||||
```
|
||||
|
||||
|
||||
# sync folders to/from copyparty
|
||||
|
||||
note that the up2k client [u2c.py](https://github.com/9001/copyparty/tree/hovudstraum/bin#u2cpy) (available on the "connect" page of your copyparty server) does uploads much faster and safer, but rclone is bidirectional and more ubiquitous
|
||||
|
||||
```
|
||||
rclone sync /usr/share/icons/ cpp-rw:fds/
|
||||
```
|
||||
|
||||
|
||||
# use rclone as server too, replacing copyparty
|
||||
|
||||
feels out of place but is too good not to mention
|
||||
@@ -70,3 +87,26 @@ feels out of place but is too good not to mention
|
||||
rclone.exe serve http --read-only .
|
||||
rclone.exe serve webdav .
|
||||
```
|
||||
|
||||
|
||||
# devnotes
|
||||
|
||||
copyparty supports and expects [the following](https://github.com/rclone/rclone/blob/46484022b08f8756050aa45505ea0db23e62df8b/backend/webdav/webdav.go#L575-L578) from rclone,
|
||||
|
||||
```go
|
||||
case "owncloud":
|
||||
f.canStream = true
|
||||
f.precision = time.Second
|
||||
f.useOCMtime = true
|
||||
f.hasOCMD5 = true
|
||||
f.hasOCSHA1 = true
|
||||
```
|
||||
|
||||
notably,
|
||||
* `useOCMtime` enables the `x-oc-mtime` header to retain mtime of uploads from rclone
|
||||
* `canStream` is supported but not required by us
|
||||
* `hasOCMD5` / `hasOCSHA1` is conveniently dontcare on both ends
|
||||
|
||||
there's a scary comment mentioning PROPSET of lastmodified which is not something we wish to support
|
||||
|
||||
and if `vendor=owncloud` ever stops working, try `vendor=fastmail` instead
|
||||
|
||||
377
docs/versus.md
377
docs/versus.md
@@ -17,6 +17,7 @@ currently up to date with [awesome-selfhosted](https://github.com/awesome-selfho
|
||||
|
||||
### ...in reviews:
|
||||
* ✅ = advantages over copyparty
|
||||
* 💾 = what copyparty offers as an alternative
|
||||
* 🔵 = similarities
|
||||
* ⚠️ = disadvantages (something copyparty does "better")
|
||||
|
||||
@@ -46,6 +47,7 @@ currently up to date with [awesome-selfhosted](https://github.com/awesome-selfho
|
||||
* [kodbox](#kodbox)
|
||||
* [filebrowser](#filebrowser)
|
||||
* [filegator](#filegator)
|
||||
* [sftpgo](#sftpgo)
|
||||
* [updog](#updog)
|
||||
* [goshs](#goshs)
|
||||
* [gimme-that](#gimme-that)
|
||||
@@ -53,6 +55,7 @@ currently up to date with [awesome-selfhosted](https://github.com/awesome-selfho
|
||||
* [linx](#linx)
|
||||
* [h5ai](#h5ai)
|
||||
* [autoindex](#autoindex)
|
||||
* [miniserve](#miniserve)
|
||||
* [briefly considered](#briefly-considered)
|
||||
|
||||
|
||||
@@ -89,6 +92,7 @@ the softwares,
|
||||
* `i` = [kodbox](https://github.com/kalcaddle/kodbox)
|
||||
* `j` = [filebrowser](https://github.com/filebrowser/filebrowser)
|
||||
* `k` = [filegator](https://github.com/filegator/filegator)
|
||||
* `l` = [sftpgo](https://github.com/drakkan/sftpgo)
|
||||
|
||||
some softwares not in the matrixes,
|
||||
* [updog](#updog)
|
||||
@@ -96,6 +100,9 @@ some softwares not in the matrixes,
|
||||
* [gimme-that](#gimmethat)
|
||||
* [ass](#ass)
|
||||
* [linx](#linx)
|
||||
* [h5ai](#h5ai)
|
||||
* [autoindex](#autoindex)
|
||||
* [miniserve](#miniserve)
|
||||
|
||||
symbol legend,
|
||||
* `█` = absolutely
|
||||
@@ -106,62 +113,64 @@ symbol legend,
|
||||
|
||||
## general
|
||||
|
||||
| feature / software | a | b | c | d | e | f | g | h | i | j | k |
|
||||
| ----------------------- | - | - | - | - | - | - | - | - | - | - | - |
|
||||
| intuitive UX | | ╱ | █ | █ | █ | | █ | █ | █ | █ | █ |
|
||||
| config GUI | | █ | █ | █ | █ | | | █ | █ | █ | |
|
||||
| good documentation | | | | █ | █ | █ | █ | | | █ | █ |
|
||||
| runs on iOS | ╱ | | | | | ╱ | | | | | |
|
||||
| runs on Android | █ | | | | | █ | | | | | |
|
||||
| runs on WinXP | █ | █ | | | | █ | | | | | |
|
||||
| runs on Windows | █ | █ | █ | █ | █ | █ | █ | ╱ | █ | █ | █ |
|
||||
| runs on Linux | █ | ╱ | █ | █ | █ | █ | █ | █ | █ | █ | █ |
|
||||
| runs on Macos | █ | | █ | █ | █ | █ | █ | █ | █ | █ | █ |
|
||||
| runs on FreeBSD | █ | | | • | █ | █ | █ | • | █ | █ | |
|
||||
| portable binary | █ | █ | █ | | | █ | █ | | | █ | |
|
||||
| zero setup, just go | █ | █ | █ | | | ╱ | █ | | | █ | |
|
||||
| android app | ╱ | | | █ | █ | | | | | | |
|
||||
| iOS app | | | | █ | █ | | | | | | |
|
||||
| feature / software | a | b | c | d | e | f | g | h | i | j | k | l |
|
||||
| ----------------------- | - | - | - | - | - | - | - | - | - | - | - | - |
|
||||
| intuitive UX | | ╱ | █ | █ | █ | | █ | █ | █ | █ | █ | █ |
|
||||
| config GUI | | █ | █ | █ | █ | | | █ | █ | █ | | █ |
|
||||
| good documentation | | | | █ | █ | █ | █ | | | █ | █ | ╱ |
|
||||
| runs on iOS | ╱ | | | | | ╱ | | | | | | |
|
||||
| runs on Android | █ | | | | | █ | | | | | | |
|
||||
| runs on WinXP | █ | █ | | | | █ | | | | | | |
|
||||
| runs on Windows | █ | █ | █ | █ | █ | █ | █ | ╱ | █ | █ | █ | █ |
|
||||
| runs on Linux | █ | ╱ | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ |
|
||||
| runs on Macos | █ | | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ |
|
||||
| runs on FreeBSD | █ | | | • | █ | █ | █ | • | █ | █ | | █ |
|
||||
| portable binary | █ | █ | █ | | | █ | █ | | | █ | | █ |
|
||||
| zero setup, just go | █ | █ | █ | | | ╱ | █ | | | █ | | ╱ |
|
||||
| android app | ╱ | | | █ | █ | | | | | | | |
|
||||
| iOS app | ╱ | | | █ | █ | | | | | | | |
|
||||
|
||||
* `zero setup` = you can get a mostly working setup by just launching the app, without having to install any software or configure whatever
|
||||
* `a`/copyparty remarks:
|
||||
* no gui for server settings; only for client-side stuff
|
||||
* can theoretically run on iOS / iPads using [iSH](https://ish.app/), but only the iPad will offer sufficient multitasking i think
|
||||
* [android app](https://f-droid.org/en/packages/me.ocv.partyup/) is for uploading only
|
||||
* no iOS app but has [shortcuts](https://github.com/9001/copyparty#ios-shortcuts) for easy uploading
|
||||
* `b`/hfs2 runs on linux through wine
|
||||
* `f`/rclone must be started with the command `rclone serve webdav .` or similar
|
||||
* `h`/chibisafe has undocumented windows support
|
||||
* `i`/sftpgo must be launched with a command
|
||||
|
||||
|
||||
## file transfer
|
||||
|
||||
*the thing that copyparty is actually kinda good at*
|
||||
|
||||
| feature / software | a | b | c | d | e | f | g | h | i | j | k |
|
||||
| ----------------------- | - | - | - | - | - | - | - | - | - | - | - |
|
||||
| download folder as zip | █ | █ | █ | █ | █ | | █ | | █ | █ | ╱ |
|
||||
| download folder as tar | █ | | | | | | | | | █ | |
|
||||
| upload | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ |
|
||||
| parallel uploads | █ | | | █ | █ | | • | | █ | | █ |
|
||||
| resumable uploads | █ | | | | | | | | █ | | █ |
|
||||
| upload segmenting | █ | | | | | | | █ | █ | | █ |
|
||||
| upload acceleration | █ | | | | | | | | █ | | █ |
|
||||
| upload verification | █ | | | █ | █ | | | | █ | | |
|
||||
| upload deduplication | █ | | | | █ | | | | █ | | |
|
||||
| upload a 999 TiB file | █ | | | | █ | █ | • | | █ | | █ |
|
||||
| keep last-modified time | █ | | | █ | █ | █ | | | | | |
|
||||
| upload rules | ╱ | ╱ | ╱ | ╱ | ╱ | | | ╱ | ╱ | | ╱ |
|
||||
| ┗ max disk usage | █ | █ | | | █ | | | | █ | | |
|
||||
| ┗ max filesize | █ | | | | | | | █ | | | █ |
|
||||
| ┗ max items in folder | █ | | | | | | | | | | |
|
||||
| ┗ max file age | █ | | | | | | | | █ | | |
|
||||
| ┗ max uploads over time | █ | | | | | | | | | | |
|
||||
| ┗ compress before write | █ | | | | | | | | | | |
|
||||
| ┗ randomize filename | █ | | | | | | | █ | █ | | |
|
||||
| ┗ mimetype reject-list | ╱ | | | | | | | | • | ╱ | |
|
||||
| ┗ extension reject-list | ╱ | | | | | | | █ | • | ╱ | |
|
||||
| checksums provided | | | | █ | █ | | | | █ | ╱ | |
|
||||
| cloud storage backend | ╱ | ╱ | ╱ | █ | █ | █ | ╱ | | | ╱ | █ |
|
||||
| feature / software | a | b | c | d | e | f | g | h | i | j | k | l |
|
||||
| ----------------------- | - | - | - | - | - | - | - | - | - | - | - | - |
|
||||
| download folder as zip | █ | █ | █ | █ | █ | | █ | | █ | █ | ╱ | █ |
|
||||
| download folder as tar | █ | | | | | | | | | █ | | |
|
||||
| upload | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ |
|
||||
| parallel uploads | █ | | | █ | █ | | • | | █ | | █ | |
|
||||
| resumable uploads | █ | | | | | | | | █ | | █ | ╱ |
|
||||
| upload segmenting | █ | | | | | | | █ | █ | | █ | ╱ |
|
||||
| upload acceleration | █ | | | | | | | | █ | | █ | |
|
||||
| upload verification | █ | | | █ | █ | | | | █ | | | |
|
||||
| upload deduplication | █ | | | | █ | | | | █ | | | |
|
||||
| upload a 999 TiB file | █ | | | | █ | █ | • | | █ | | █ | ╱ |
|
||||
| keep last-modified time | █ | | | █ | █ | █ | | | | | | █ |
|
||||
| upload rules | ╱ | ╱ | ╱ | ╱ | ╱ | | | ╱ | ╱ | | ╱ | ╱ |
|
||||
| ┗ max disk usage | █ | █ | | | █ | | | | █ | | | █ |
|
||||
| ┗ max filesize | █ | | | | | | | █ | | | █ | █ |
|
||||
| ┗ max items in folder | █ | | | | | | | | | | | ╱ |
|
||||
| ┗ max file age | █ | | | | | | | | █ | | | |
|
||||
| ┗ max uploads over time | █ | | | | | | | | | | | ╱ |
|
||||
| ┗ compress before write | █ | | | | | | | | | | | |
|
||||
| ┗ randomize filename | █ | | | | | | | █ | █ | | | |
|
||||
| ┗ mimetype reject-list | ╱ | | | | | | | | • | ╱ | | ╱ |
|
||||
| ┗ extension reject-list | ╱ | | | | | | | █ | • | ╱ | | ╱ |
|
||||
| checksums provided | | | | █ | █ | | | | █ | ╱ | | |
|
||||
| cloud storage backend | ╱ | ╱ | ╱ | █ | █ | █ | ╱ | | | ╱ | █ | █ |
|
||||
|
||||
* `upload segmenting` = files are sliced into chunks, making it possible to upload files larger than 100 MiB on cloudflare for example
|
||||
|
||||
@@ -178,25 +187,29 @@ symbol legend,
|
||||
* can provide checksums for single files on request
|
||||
* can probably do extension/mimetype rejection similar to copyparty
|
||||
* `k`/filegator download-as-zip is not streaming; it creates the full zipfile before download can start
|
||||
* `l`/sftpgo:
|
||||
* resumable/segmented uploads only over SFTP, not over HTTP
|
||||
* upload rules are totals only, not over time
|
||||
* can probably do extension/mimetype rejection similar to copyparty
|
||||
|
||||
|
||||
## protocols and client support
|
||||
|
||||
| feature / software | a | b | c | d | e | f | g | h | i | j | k |
|
||||
| ----------------------- | - | - | - | - | - | - | - | - | - | - | - |
|
||||
| serve https | █ | | █ | █ | █ | █ | █ | █ | █ | █ | █ |
|
||||
| serve webdav | █ | | | █ | █ | █ | █ | | █ | | |
|
||||
| serve ftp | █ | | | | | █ | | | | | |
|
||||
| serve ftps | █ | | | | | █ | | | | | |
|
||||
| serve sftp | | | | | | █ | | | | | |
|
||||
| serve smb/cifs | ╱ | | | | | █ | | | | | |
|
||||
| serve dlna | | | | | | █ | | | | | |
|
||||
| listen on unix-socket | | | | █ | █ | | █ | █ | █ | | █ |
|
||||
| zeroconf | █ | | | | | | | | | | |
|
||||
| supports netscape 4 | ╱ | | | | | █ | | | | | • |
|
||||
| ...internet explorer 6 | ╱ | █ | | █ | | █ | | | | | • |
|
||||
| mojibake filenames | █ | | | • | • | █ | █ | • | • | • | |
|
||||
| undecodable filenames | █ | | | • | • | █ | | • | • | | |
|
||||
| feature / software | a | b | c | d | e | f | g | h | i | j | k | l |
|
||||
| ----------------------- | - | - | - | - | - | - | - | - | - | - | - | - |
|
||||
| serve https | █ | | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ |
|
||||
| serve webdav | █ | | | █ | █ | █ | █ | | █ | | | █ |
|
||||
| serve ftp | █ | | | | | █ | | | | | | █ |
|
||||
| serve ftps | █ | | | | | █ | | | | | | █ |
|
||||
| serve sftp | | | | | | █ | | | | | | █ |
|
||||
| serve smb/cifs | ╱ | | | | | █ | | | | | | |
|
||||
| serve dlna | | | | | | █ | | | | | | |
|
||||
| listen on unix-socket | | | | █ | █ | | █ | █ | █ | | █ | █ |
|
||||
| zeroconf | █ | | | | | | | | | | | |
|
||||
| supports netscape 4 | ╱ | | | | | █ | | | | | • | |
|
||||
| ...internet explorer 6 | ╱ | █ | | █ | | █ | | | | | • | |
|
||||
| mojibake filenames | █ | | | • | • | █ | █ | • | • | • | | ╱ |
|
||||
| undecodable filenames | █ | | | • | • | █ | | • | • | | | ╱ |
|
||||
|
||||
* `webdav` = protocol convenient for mounting a remote server as a local filesystem; see zeroconf:
|
||||
* `zeroconf` = the server announces itself on the LAN, [automatically appearing](https://user-images.githubusercontent.com/241032/215344737-0eae8d98-9496-4256-9aa8-cd2f6971810d.png) on other zeroconf-capable devices
|
||||
@@ -206,57 +219,62 @@ symbol legend,
|
||||
* `a`/copyparty remarks:
|
||||
* extremely minimal samba/cifs server
|
||||
* netscape 4 / ie6 support is mostly listed as a joke altho some people have actually found it useful ([ie4 tho](https://user-images.githubusercontent.com/241032/118192791-fb31fe00-b446-11eb-9647-898ea8efc1f7.png))
|
||||
* `l`/sftpgo translates mojibake filenames into valid utf-8 (information loss)
|
||||
|
||||
|
||||
## server configuration
|
||||
|
||||
| feature / software | a | b | c | d | e | f | g | h | i | j | k |
|
||||
| ----------------------- | - | - | - | - | - | - | - | - | - | - | - |
|
||||
| config from cmd args | █ | | | | | █ | █ | | | █ | |
|
||||
| config files | █ | █ | █ | ╱ | ╱ | █ | | █ | | █ | • |
|
||||
| runtime config reload | █ | █ | █ | | | | | █ | █ | █ | █ |
|
||||
| same-port http / https | █ | | | | | | | | | | |
|
||||
| listen multiple ports | █ | | | | | | | | | | |
|
||||
| virtual file system | █ | █ | █ | | | | █ | | | | |
|
||||
| reverse-proxy ok | █ | | █ | █ | █ | █ | █ | █ | • | • | • |
|
||||
| folder-rproxy ok | █ | | | | █ | █ | | • | • | • | • |
|
||||
| feature / software | a | b | c | d | e | f | g | h | i | j | k | l |
|
||||
| ----------------------- | - | - | - | - | - | - | - | - | - | - | - | - |
|
||||
| config from cmd args | █ | | | | | █ | █ | | | █ | | ╱ |
|
||||
| config files | █ | █ | █ | ╱ | ╱ | █ | | █ | | █ | • | ╱ |
|
||||
| runtime config reload | █ | █ | █ | | | | | █ | █ | █ | █ | |
|
||||
| same-port http / https | █ | | | | | | | | | | | |
|
||||
| listen multiple ports | █ | | | | | | | | | | | █ |
|
||||
| virtual file system | █ | █ | █ | | | | █ | | | | | █ |
|
||||
| reverse-proxy ok | █ | | █ | █ | █ | █ | █ | █ | • | • | • | █ |
|
||||
| folder-rproxy ok | █ | | | | █ | █ | | • | • | • | • | |
|
||||
|
||||
* `folder-rproxy` = reverse-proxying without dedicating an entire (sub)domain, using a subfolder instead
|
||||
* `l`/sftpgo:
|
||||
* config: users must be added through gui / api calls
|
||||
|
||||
|
||||
## server capabilities
|
||||
|
||||
| feature / software | a | b | c | d | e | f | g | h | i | j | k |
|
||||
| ----------------------- | - | - | - | - | - | - | - | - | - | - | - |
|
||||
| accounts | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ |
|
||||
| single-sign-on | | | | █ | █ | | | | • | | |
|
||||
| token auth | | | | █ | █ | | | █ | | | |
|
||||
| per-volume permissions | █ | █ | █ | █ | █ | █ | █ | | █ | █ | ╱ |
|
||||
| per-folder permissions | ╱ | | | █ | █ | | █ | | █ | █ | ╱ |
|
||||
| per-file permissions | | | | █ | █ | | █ | | █ | | |
|
||||
| per-file passwords | █ | | | █ | █ | | █ | | █ | | |
|
||||
| unmap subfolders | █ | | | | | | █ | | | █ | ╱ |
|
||||
| index.html blocks list | | | | | | | █ | | | • | |
|
||||
| write-only folders | █ | | | | | | | | | | █ |
|
||||
| files stored as-is | █ | █ | █ | █ | | █ | █ | | | █ | █ |
|
||||
| file versioning | | | | █ | █ | | | | | | |
|
||||
| file encryption | | | | █ | █ | █ | | | | | |
|
||||
| file indexing | █ | | █ | █ | █ | | | █ | █ | █ | |
|
||||
| ┗ per-volume db | █ | | • | • | • | | | • | • | | |
|
||||
| ┗ db stored in folder | █ | | | | | | | • | • | █ | |
|
||||
| ┗ db stored out-of-tree | █ | | █ | █ | █ | | | • | • | █ | |
|
||||
| ┗ existing file tree | █ | | █ | | | | | | | █ | |
|
||||
| file action event hooks | █ | | | | | | | | | █ | |
|
||||
| one-way folder sync | █ | | | █ | █ | █ | | | | | |
|
||||
| full sync | | | | █ | █ | | | | | | |
|
||||
| speed throttle | | █ | █ | | | █ | | | █ | | |
|
||||
| anti-bruteforce | █ | █ | █ | █ | █ | | | | • | | |
|
||||
| dyndns updater | | █ | | | | | | | | | |
|
||||
| self-updater | | | █ | | | | | | | | |
|
||||
| log rotation | █ | | █ | █ | █ | | | • | █ | | |
|
||||
| upload tracking / log | █ | █ | • | █ | █ | | | █ | █ | | |
|
||||
| curl-friendly ls | █ | | | | | | | | | | |
|
||||
| curl-friendly upload | █ | | | | | █ | █ | • | | | |
|
||||
| feature / software | a | b | c | d | e | f | g | h | i | j | k | l |
|
||||
| ----------------------- | - | - | - | - | - | - | - | - | - | - | - | - |
|
||||
| accounts | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ |
|
||||
| per-account chroot | | | | | | | | | | | | █ |
|
||||
| single-sign-on | | | | █ | █ | | | | • | | | |
|
||||
| token auth | | | | █ | █ | | | █ | | | | |
|
||||
| 2fa | | | | █ | █ | | | | | | | █ |
|
||||
| per-volume permissions | █ | █ | █ | █ | █ | █ | █ | | █ | █ | ╱ | █ |
|
||||
| per-folder permissions | ╱ | | | █ | █ | | █ | | █ | █ | ╱ | █ |
|
||||
| per-file permissions | | | | █ | █ | | █ | | █ | | | |
|
||||
| per-file passwords | █ | | | █ | █ | | █ | | █ | | | |
|
||||
| unmap subfolders | █ | | | | | | █ | | | █ | ╱ | • |
|
||||
| index.html blocks list | | | | | | | █ | | | • | | |
|
||||
| write-only folders | █ | | | | | | | | | | █ | █ |
|
||||
| files stored as-is | █ | █ | █ | █ | | █ | █ | | | █ | █ | █ |
|
||||
| file versioning | | | | █ | █ | | | | | | | |
|
||||
| file encryption | | | | █ | █ | █ | | | | | | █ |
|
||||
| file indexing | █ | | █ | █ | █ | | | █ | █ | █ | | |
|
||||
| ┗ per-volume db | █ | | • | • | • | | | • | • | | | |
|
||||
| ┗ db stored in folder | █ | | | | | | | • | • | █ | | |
|
||||
| ┗ db stored out-of-tree | █ | | █ | █ | █ | | | • | • | █ | | |
|
||||
| ┗ existing file tree | █ | | █ | | | | | | | █ | | |
|
||||
| file action event hooks | █ | | | | | | | | | █ | | █ |
|
||||
| one-way folder sync | █ | | | █ | █ | █ | | | | | | |
|
||||
| full sync | | | | █ | █ | | | | | | | |
|
||||
| speed throttle | | █ | █ | | | █ | | | █ | | | █ |
|
||||
| anti-bruteforce | █ | █ | █ | █ | █ | | | | • | | | █ |
|
||||
| dyndns updater | | █ | | | | | | | | | | |
|
||||
| self-updater | | | █ | | | | | | | | | |
|
||||
| log rotation | █ | | █ | █ | █ | | | • | █ | | | █ |
|
||||
| upload tracking / log | █ | █ | • | █ | █ | | | █ | █ | | | ╱ |
|
||||
| curl-friendly ls | █ | | | | | | | | | | | |
|
||||
| curl-friendly upload | █ | | | | | █ | █ | • | | | | |
|
||||
|
||||
* `unmap subfolders` = "shadowing"; mounting a local folder in the middle of an existing filesystem tree in order to disable access below that path
|
||||
* `files stored as-is` = uploaded files are trivially readable from the server HDD, not sliced into chunks or in weird folder structures or anything like that
|
||||
@@ -269,7 +287,7 @@ symbol legend,
|
||||
* `curl-friendly ls` = returns a [sortable plaintext folder listing](https://user-images.githubusercontent.com/241032/215322619-ea5fd606-3654-40ad-94ee-2bc058647bb2.png) when curled
|
||||
* `curl-friendly upload` = uploading with curl is just `curl -T some.bin http://.../`
|
||||
* `a`/copyparty remarks:
|
||||
* one-way folder sync from local to server can be done efficiently with [up2k.py](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py), or with webdav and conventional rsync
|
||||
* one-way folder sync from local to server can be done efficiently with [u2c.py](https://github.com/9001/copyparty/tree/hovudstraum/bin#u2cpy), or with webdav and conventional rsync
|
||||
* can hot-reload config files (with just a few exceptions)
|
||||
* can set per-folder permissions if that folder is made into a separate volume, so there is configuration overhead
|
||||
* [event hooks](https://github.com/9001/copyparty/tree/hovudstraum/bin/hooks) ([discord](https://user-images.githubusercontent.com/241032/215304439-1c1cb3c8-ec6f-4c17-9f27-81f969b1811a.png), [desktop](https://user-images.githubusercontent.com/241032/215335767-9c91ed24-d36e-4b6b-9766-fb95d12d163f.png)) inspired by filebrowser, as well as the more complex [media parser](https://github.com/9001/copyparty/tree/hovudstraum/bin/mtag) alternative
|
||||
@@ -277,49 +295,52 @@ symbol legend,
|
||||
* `k`/filegator remarks:
|
||||
* `per-* permissions` -- can limit a user to one folder and its subfolders
|
||||
* `unmap subfolders` -- can globally filter a list of paths
|
||||
* `l`/sftpgo:
|
||||
* `file action event hooks` also include on-download triggers
|
||||
* `upload tracking / log` in main logfile
|
||||
|
||||
|
||||
## client features
|
||||
|
||||
| feature / software | a | b | c | d | e | f | g | h | i | j | k |
|
||||
| ---------------------- | - | - | - | - | - | - | - | - | - | - | - |
|
||||
| single-page app | █ | | █ | █ | █ | | | █ | █ | █ | █ |
|
||||
| themes | █ | █ | | █ | | | | | █ | | |
|
||||
| directory tree nav | █ | ╱ | | | █ | | | | █ | | ╱ |
|
||||
| multi-column sorting | █ | | | | | | | | | | |
|
||||
| thumbnails | █ | | | ╱ | ╱ | | | █ | █ | ╱ | |
|
||||
| ┗ image thumbnails | █ | | | █ | █ | | | █ | █ | █ | |
|
||||
| ┗ video thumbnails | █ | | | █ | █ | | | | █ | | |
|
||||
| ┗ audio spectrograms | █ | | | | | | | | | | |
|
||||
| audio player | █ | | | █ | █ | | | | █ | ╱ | |
|
||||
| ┗ gapless playback | █ | | | | | | | | • | | |
|
||||
| ┗ audio equalizer | █ | | | | | | | | | | |
|
||||
| ┗ waveform seekbar | █ | | | | | | | | | | |
|
||||
| ┗ OS integration | █ | | | | | | | | | | |
|
||||
| ┗ transcode to lossy | █ | | | | | | | | | | |
|
||||
| video player | █ | | | █ | █ | | | | █ | █ | |
|
||||
| ┗ video transcoding | | | | | | | | | █ | | |
|
||||
| audio BPM detector | █ | | | | | | | | | | |
|
||||
| audio key detector | █ | | | | | | | | | | |
|
||||
| search by path / name | █ | █ | █ | █ | █ | | █ | | █ | █ | ╱ |
|
||||
| search by date / size | █ | | | | █ | | | █ | █ | | |
|
||||
| search by bpm / key | █ | | | | | | | | | | |
|
||||
| search by custom tags | | | | | | | | █ | █ | | |
|
||||
| search in file contents | | | | █ | █ | | | | █ | | |
|
||||
| search by custom parser | █ | | | | | | | | | | |
|
||||
| find local file | █ | | | | | | | | | | |
|
||||
| undo recent uploads | █ | | | | | | | | | | |
|
||||
| create directories | █ | | | █ | █ | ╱ | █ | █ | █ | █ | █ |
|
||||
| image viewer | █ | | | █ | █ | | | | █ | █ | █ |
|
||||
| markdown viewer | █ | | | | █ | | | | █ | ╱ | ╱ |
|
||||
| markdown editor | █ | | | | █ | | | | █ | ╱ | ╱ |
|
||||
| readme.md in listing | █ | | | █ | | | | | | | |
|
||||
| rename files | █ | █ | █ | █ | █ | ╱ | █ | | █ | █ | █ |
|
||||
| batch rename | █ | | | | | | | | █ | | |
|
||||
| cut / paste files | █ | █ | | █ | █ | | | | █ | | |
|
||||
| move files | █ | █ | | █ | █ | | █ | | █ | █ | █ |
|
||||
| delete files | █ | █ | | █ | █ | ╱ | █ | █ | █ | █ | █ |
|
||||
| copy files | | | | | █ | | | | █ | █ | █ |
|
||||
| feature / software | a | b | c | d | e | f | g | h | i | j | k | l |
|
||||
| ---------------------- | - | - | - | - | - | - | - | - | - | - | - | - |
|
||||
| single-page app | █ | | █ | █ | █ | | | █ | █ | █ | █ | |
|
||||
| themes | █ | █ | | █ | | | | | █ | | | |
|
||||
| directory tree nav | █ | ╱ | | | █ | | | | █ | | ╱ | |
|
||||
| multi-column sorting | █ | | | | | | | | | | | |
|
||||
| thumbnails | █ | | | ╱ | ╱ | | | █ | █ | ╱ | | |
|
||||
| ┗ image thumbnails | █ | | | █ | █ | | | █ | █ | █ | | |
|
||||
| ┗ video thumbnails | █ | | | █ | █ | | | | █ | | | |
|
||||
| ┗ audio spectrograms | █ | | | | | | | | | | | |
|
||||
| audio player | █ | | | █ | █ | | | | █ | ╱ | | |
|
||||
| ┗ gapless playback | █ | | | | | | | | • | | | |
|
||||
| ┗ audio equalizer | █ | | | | | | | | | | | |
|
||||
| ┗ waveform seekbar | █ | | | | | | | | | | | |
|
||||
| ┗ OS integration | █ | | | | | | | | | | | |
|
||||
| ┗ transcode to lossy | █ | | | | | | | | | | | |
|
||||
| video player | █ | | | █ | █ | | | | █ | █ | | |
|
||||
| ┗ video transcoding | | | | | | | | | █ | | | |
|
||||
| audio BPM detector | █ | | | | | | | | | | | |
|
||||
| audio key detector | █ | | | | | | | | | | | |
|
||||
| search by path / name | █ | █ | █ | █ | █ | | █ | | █ | █ | ╱ | |
|
||||
| search by date / size | █ | | | | █ | | | █ | █ | | | |
|
||||
| search by bpm / key | █ | | | | | | | | | | | |
|
||||
| search by custom tags | | | | | | | | █ | █ | | | |
|
||||
| search in file contents | | | | █ | █ | | | | █ | | | |
|
||||
| search by custom parser | █ | | | | | | | | | | | |
|
||||
| find local file | █ | | | | | | | | | | | |
|
||||
| undo recent uploads | █ | | | | | | | | | | | |
|
||||
| create directories | █ | | | █ | █ | ╱ | █ | █ | █ | █ | █ | █ |
|
||||
| image viewer | █ | | | █ | █ | | | | █ | █ | █ | |
|
||||
| markdown viewer | █ | | | | █ | | | | █ | ╱ | ╱ | |
|
||||
| markdown editor | █ | | | | █ | | | | █ | ╱ | ╱ | |
|
||||
| readme.md in listing | █ | | | █ | | | | | | | | |
|
||||
| rename files | █ | █ | █ | █ | █ | ╱ | █ | | █ | █ | █ | █ |
|
||||
| batch rename | █ | | | | | | | | █ | | | |
|
||||
| cut / paste files | █ | █ | | █ | █ | | | | █ | | | |
|
||||
| move files | █ | █ | | █ | █ | | █ | | █ | █ | █ | |
|
||||
| delete files | █ | █ | | █ | █ | ╱ | █ | █ | █ | █ | █ | █ |
|
||||
| copy files | | | | | █ | | | | █ | █ | █ | |
|
||||
|
||||
* `single-page app` = multitasking; possible to continue navigating while uploading
|
||||
* `audio player » os-integration` = use the [lockscreen](https://user-images.githubusercontent.com/241032/142711926-0700be6c-3e31-47b3-9928-53722221f722.png) or [media hotkeys](https://user-images.githubusercontent.com/241032/215347492-b4250797-6c90-4e09-9a4c-721edf2fb15c.png) to play/pause, prev/next song
|
||||
@@ -335,20 +356,21 @@ symbol legend,
|
||||
|
||||
## integration
|
||||
|
||||
| feature / software | a | b | c | d | e | f | g | h | i | j | k |
|
||||
| ----------------------- | - | - | - | - | - | - | - | - | - | - | - |
|
||||
| OS alert on upload | █ | | | | | | | | | ╱ | |
|
||||
| discord | █ | | | | | | | | | ╱ | |
|
||||
| ┗ announce uploads | █ | | | | | | | | | | |
|
||||
| ┗ custom embeds | | | | | | | | | | | |
|
||||
| sharex | █ | | | █ | | █ | ╱ | █ | | | |
|
||||
| flameshot | | | | | | █ | | | | | |
|
||||
| feature / software | a | b | c | d | e | f | g | h | i | j | k | l |
|
||||
| ----------------------- | - | - | - | - | - | - | - | - | - | - | - | - |
|
||||
| OS alert on upload | █ | | | | | | | | | ╱ | | ╱ |
|
||||
| discord | █ | | | | | | | | | ╱ | | ╱ |
|
||||
| ┗ announce uploads | █ | | | | | | | | | | | ╱ |
|
||||
| ┗ custom embeds | | | | | | | | | | | | ╱ |
|
||||
| sharex | █ | | | █ | | █ | ╱ | █ | | | | |
|
||||
| flameshot | | | | | | █ | | | | | | |
|
||||
|
||||
* sharex `╱` = yes, but does not provide example sharex config
|
||||
* `a`/copyparty remarks:
|
||||
* `OS alert on upload` available as [a plugin](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/notify.py)
|
||||
* `discord » announce uploads` available as [a plugin](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/discord-announce.py)
|
||||
* `j`/filebrowser can probably pull those off with command runners similar to copyparty
|
||||
* `l`/sftpgo has nothing built-in but is very extensible
|
||||
|
||||
|
||||
## another matrix
|
||||
@@ -366,6 +388,7 @@ symbol legend,
|
||||
| kodbox | php | ░ gpl3 | 92 MB |
|
||||
| filebrowser | go | █ apl2 | 20 MB |
|
||||
| filegator | php | █ mit | • |
|
||||
| sftpgo | go | ‼ agpl | 44 MB |
|
||||
| updog | python | █ mit | 17 MB |
|
||||
| goshs | go | █ mit | 11 MB |
|
||||
| gimme-that | python | █ mit | 4.8 MB |
|
||||
@@ -379,6 +402,7 @@ symbol legend,
|
||||
# reviews
|
||||
|
||||
* ✅ are advantages over copyparty
|
||||
* 💾 are what copyparty offers as an alternative
|
||||
* 🔵 are similarities
|
||||
* ⚠️ are disadvantages (something copyparty does "better")
|
||||
|
||||
@@ -412,10 +436,11 @@ symbol legend,
|
||||
* ⚠️ http/webdav only; no ftp, zeroconf
|
||||
* ⚠️ less awesome music player
|
||||
* ⚠️ doesn't run on android or ipads
|
||||
* ⚠️ AGPL licensed
|
||||
* ✅ great ui/ux
|
||||
* ✅ config gui
|
||||
* ✅ apps (android / iphone)
|
||||
* copyparty: android upload-only app
|
||||
* 💾 android upload-only app + iPhone upload shortcut
|
||||
* ✅ more granular permissions (per-file)
|
||||
* ✅ search: fulltext indexing of file contents
|
||||
* ✅ webauthn passwordless authentication
|
||||
@@ -430,10 +455,11 @@ symbol legend,
|
||||
* ⚠️ http/webdav only; no ftp, zeroconf
|
||||
* ⚠️ less awesome music player
|
||||
* ⚠️ doesn't run on android or ipads
|
||||
* ⚠️ AGPL licensed
|
||||
* ✅ great ui/ux
|
||||
* ✅ config gui
|
||||
* ✅ apps (android / iphone)
|
||||
* copyparty: android upload-only app
|
||||
* 💾 android upload-only app + iPhone upload shortcut
|
||||
* ✅ more granular permissions (per-file)
|
||||
* ✅ search: fulltext indexing of file contents
|
||||
|
||||
@@ -467,7 +493,7 @@ symbol legend,
|
||||
* ✅ searchable image tags; delete by tag
|
||||
* ✅ browser extension to upload files to the server
|
||||
* ✅ reject uploads by file extension
|
||||
* copyparty: can reject uploads [by extension](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reject-extension.py) or [mimetype](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reject-mimetype.py) using plugins
|
||||
* 💾 can reject uploads [by extension](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reject-extension.py) or [mimetype](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reject-mimetype.py) using plugins
|
||||
* ✅ token auth (api keys)
|
||||
|
||||
## [kodbox](https://github.com/kalcaddle/kodbox)
|
||||
@@ -512,6 +538,30 @@ symbol legend,
|
||||
* ⚠️ doesn't support crazy filenames
|
||||
* ⚠️ limited file search
|
||||
|
||||
## [sftpgo](https://github.com/drakkan/sftpgo)
|
||||
* go; cross-platform (windows, linux, mac)
|
||||
* ⚠️ http uploads not resumable / accelerated / integrity-checked
|
||||
* ⚠️ on cloudflare: max upload size 100 MiB
|
||||
* 🔵 sftp uploads are resumable
|
||||
* ⚠️ web UI is very minimal + a bit slow
|
||||
* ⚠️ no thumbnails / image viewer / audio player
|
||||
* ⚠️ basic file manager (no cut/paste/move)
|
||||
* ⚠️ no filesystem indexing / search
|
||||
* ⚠️ doesn't run on phones, tablets
|
||||
* ⚠️ no zeroconf (mdns/ssdp)
|
||||
* ⚠️ AGPL licensed
|
||||
* 🔵 ftp, ftps, webdav
|
||||
* ✅ sftp server
|
||||
* ✅ settings gui
|
||||
* ✅ acme (automatic tls certs)
|
||||
* 💾 relies on caddy/certbot/acme.sh
|
||||
* ✅ at-rest encryption
|
||||
* 💾 relies on LUKS/BitLocker
|
||||
* ✅ can use S3/GCS as storage backend
|
||||
* 💾 relies on rclone-mount
|
||||
* ✅ on-download event hook (otherwise same as copyparty)
|
||||
* ✅ more extensive permissions control
|
||||
|
||||
## [updog](https://github.com/sc0tfree/updog)
|
||||
* python; cross-platform
|
||||
* basic directory listing with upload feature
|
||||
@@ -526,7 +576,7 @@ symbol legend,
|
||||
* ⚠️ uploads not resumable / accelerated / integrity-checked
|
||||
* ⚠️ on cloudflare: max upload size 100 MiB
|
||||
* ✅ cool clipboard widget
|
||||
* copyparty: the markdown editor is an ok substitute
|
||||
* 💾 the markdown editor is an ok substitute
|
||||
* 🔵 read-only and upload-only modes (same as copyparty's write-only)
|
||||
* 🔵 https, webdav, but no ftp
|
||||
|
||||
@@ -538,7 +588,7 @@ symbol legend,
|
||||
* ⚠️ weird folder structure for uploads
|
||||
* ✅ clamav antivirus check on upload! neat
|
||||
* 🔵 optional max-filesize, os-notification on uploads
|
||||
* copyparty: os-notification available as [a plugin](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/notify.py)
|
||||
* 💾 os-notification available as [a plugin](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/notify.py)
|
||||
|
||||
## [ass](https://github.com/tycrek/ass)
|
||||
* nodejs; recommends docker
|
||||
@@ -549,12 +599,13 @@ symbol legend,
|
||||
* ⚠️ on cloudflare: max upload size 100 MiB
|
||||
* ✅ token auth
|
||||
* ✅ gps metadata stripping
|
||||
* copyparty: possible with [a plugin](https://github.com/9001/copyparty/blob/hovudstraum/bin/mtag/image-noexif.py)
|
||||
* 💾 possible with [a plugin](https://github.com/9001/copyparty/blob/hovudstraum/bin/mtag/image-noexif.py)
|
||||
* ✅ discord integration (custom embeds, upload webhook)
|
||||
* copyparty: [upload webhook plugin](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/discord-announce.py)
|
||||
* 💾 [upload webhook plugin](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/discord-announce.py)
|
||||
* ✅ reject uploads by mimetype
|
||||
* copyparty: can reject uploads [by extension](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reject-extension.py) or [mimetype](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reject-mimetype.py) using plugins
|
||||
* ✅ can use S3 as storage backend; copyparty relies on rclone-mount for that
|
||||
* 💾 can reject uploads [by extension](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reject-extension.py) or [mimetype](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reject-mimetype.py) using plugins
|
||||
* ✅ can use S3 as storage backend
|
||||
* 💾 relies on rclone-mount
|
||||
* ✅ custom 404 pages
|
||||
|
||||
## [linx](https://github.com/ZizzyDizzyMC/linx-server/)
|
||||
@@ -564,12 +615,13 @@ symbol legend,
|
||||
* 🔵 some of its unique features have been added to copyparty as former linx users have migrated
|
||||
* file expiration timers, filename randomization
|
||||
* ✅ password-protected files
|
||||
* copyparty: password-protected folders + filekeys to skip the folder password seem to cover most usecases
|
||||
* 💾 password-protected folders + filekeys to skip the folder password seem to cover most usecases
|
||||
* ✅ file deletion keys
|
||||
* ✅ download files as torrents
|
||||
* ✅ remote uploads (send a link to the server and it downloads it)
|
||||
* copyparty: available as [a plugin](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/wget.py)
|
||||
* ✅ can use S3 as storage backend; copyparty relies on rclone-mount for that
|
||||
* 💾 available as [a plugin](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/wget.py)
|
||||
* ✅ can use S3 as storage backend
|
||||
* 💾 relies on rclone-mount
|
||||
|
||||
## [h5ai](https://larsjung.de/h5ai/)
|
||||
* ⚠️ read only; no upload/move/delete
|
||||
@@ -581,7 +633,16 @@ symbol legend,
|
||||
## [autoindex](https://github.com/nielsAD/autoindex)
|
||||
* ⚠️ read only; no upload/move/delete
|
||||
* ✅ directory cache for faster browsing of cloud storage
|
||||
* copyparty: local index/cache for recursive search (names/attrs/tags), but not for browsing
|
||||
* 💾 local index/cache for recursive search (names/attrs/tags), but not for browsing
|
||||
|
||||
## [miniserve](https://github.com/svenstaro/miniserve)
|
||||
* rust; cross-platform (windows, linux, mac)
|
||||
* ⚠️ uploads not resumable / accelerated / integrity-checked
|
||||
* ⚠️ on cloudflare: max upload size 100 MiB
|
||||
* ⚠️ no thumbnails / image viewer / audio player / file manager
|
||||
* ⚠️ no filesystem indexing / search
|
||||
* 🔵 upload, tar/zip download, qr-code
|
||||
* ✅ faster at loading huge folders
|
||||
|
||||
|
||||
# briefly considered
|
||||
|
||||
42
flake.lock
generated
Normal file
42
flake.lock
generated
Normal file
@@ -0,0 +1,42 @@
|
||||
{
|
||||
"nodes": {
|
||||
"flake-utils": {
|
||||
"locked": {
|
||||
"lastModified": 1678901627,
|
||||
"narHash": "sha256-U02riOqrKKzwjsxc/400XnElV+UtPUQWpANPlyazjH0=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "93a2b84fc4b70d9e089d029deacc3583435c2ed6",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1680334310,
|
||||
"narHash": "sha256-ISWz16oGxBhF7wqAxefMPwFag6SlsA9up8muV79V9ck=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "884e3b68be02ff9d61a042bc9bd9dd2a358f95da",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"id": "nixpkgs",
|
||||
"ref": "nixos-22.11",
|
||||
"type": "indirect"
|
||||
}
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"flake-utils": "flake-utils",
|
||||
"nixpkgs": "nixpkgs"
|
||||
}
|
||||
}
|
||||
},
|
||||
"root": "root",
|
||||
"version": 7
|
||||
}
|
||||
28
flake.nix
Normal file
28
flake.nix
Normal file
@@ -0,0 +1,28 @@
|
||||
{
|
||||
inputs = {
|
||||
nixpkgs.url = "nixpkgs/nixos-22.11";
|
||||
flake-utils.url = "github:numtide/flake-utils";
|
||||
};
|
||||
|
||||
outputs = { self, nixpkgs, flake-utils }:
|
||||
{
|
||||
nixosModules.default = ./contrib/nixos/modules/copyparty.nix;
|
||||
overlays.default = self: super: {
|
||||
copyparty =
|
||||
self.python3.pkgs.callPackage ./contrib/package/nix/copyparty {
|
||||
ffmpeg = self.ffmpeg-full;
|
||||
};
|
||||
};
|
||||
} // flake-utils.lib.eachDefaultSystem (system:
|
||||
let
|
||||
pkgs = import nixpkgs {
|
||||
inherit system;
|
||||
overlays = [ self.overlays.default ];
|
||||
};
|
||||
in {
|
||||
packages = {
|
||||
inherit (pkgs) copyparty;
|
||||
default = self.packages.${system}.copyparty;
|
||||
};
|
||||
});
|
||||
}
|
||||
145
pyproject.toml
Normal file
145
pyproject.toml
Normal file
@@ -0,0 +1,145 @@
|
||||
[project]
|
||||
name = "copyparty"
|
||||
description = """
|
||||
Portable file server with accelerated resumable uploads, \
|
||||
deduplication, WebDAV, FTP, zeroconf, media indexer, \
|
||||
video thumbnails, audio transcoding, and write-only folders"""
|
||||
readme = "README.md"
|
||||
authors = [{ name = "ed", email = "copyparty@ocv.me" }]
|
||||
license = { text = "MIT" }
|
||||
requires-python = ">=3.3"
|
||||
dependencies = ["Jinja2"]
|
||||
dynamic = ["version"]
|
||||
classifiers = [
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"License :: OSI Approved :: MIT License",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.3",
|
||||
"Programming Language :: Python :: 3.4",
|
||||
"Programming Language :: Python :: 3.5",
|
||||
"Programming Language :: Python :: 3.6",
|
||||
"Programming Language :: Python :: 3.7",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"Programming Language :: Python :: 3.12",
|
||||
"Programming Language :: Python :: Implementation :: CPython",
|
||||
"Programming Language :: Python :: Implementation :: Jython",
|
||||
"Programming Language :: Python :: Implementation :: PyPy",
|
||||
"Environment :: Console",
|
||||
"Environment :: No Input/Output (Daemon)",
|
||||
"Intended Audience :: End Users/Desktop",
|
||||
"Intended Audience :: System Administrators",
|
||||
"Topic :: Communications :: File Sharing",
|
||||
"Topic :: Internet :: File Transfer Protocol (FTP)",
|
||||
"Topic :: Internet :: WWW/HTTP :: HTTP Servers",
|
||||
]
|
||||
|
||||
[project.urls]
|
||||
"Source Code" = "https://github.com/9001/copyparty"
|
||||
"Bug Tracker" = "https://github.com/9001/copyparty/issues"
|
||||
"Demo Server" = "https://a.ocv.me/pub/demo/"
|
||||
|
||||
[project.optional-dependencies]
|
||||
thumbnails = ["Pillow"]
|
||||
thumbnails2 = ["pyvips"]
|
||||
audiotags = ["mutagen"]
|
||||
ftpd = ["pyftpdlib"]
|
||||
ftps = ["pyftpdlib", "pyopenssl"]
|
||||
|
||||
[project.scripts]
|
||||
copyparty = "copyparty.__main__:main"
|
||||
"u2c" = "copyparty.web.a.u2c:main"
|
||||
"partyfuse" = "copyparty.web.a.partyfuse:main"
|
||||
|
||||
# =====================================================================
|
||||
|
||||
[build-system]
|
||||
requires = ["setuptools", "wheel"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
# requires = ["hatchling"]
|
||||
# build-backend = "hatchling.build"
|
||||
|
||||
[tool.hatch.version]
|
||||
source = "code"
|
||||
path = "copyparty/__version__.py"
|
||||
|
||||
[tool.setuptools.dynamic]
|
||||
version = { attr = "copyparty.__version__.__version__" }
|
||||
|
||||
[tool.setuptools.packages.find]
|
||||
include = ["copyparty*"]
|
||||
|
||||
[tool.setuptools.package-data]
|
||||
copyparty = [
|
||||
"res/COPYING.txt",
|
||||
"res/insecure.pem",
|
||||
"web/*.gz",
|
||||
"web/*.js",
|
||||
"web/*.css",
|
||||
"web/*.html",
|
||||
"web/a/*.bat",
|
||||
"web/dd/*.png",
|
||||
"web/deps/*.gz",
|
||||
"web/deps/*.woff*",
|
||||
]
|
||||
|
||||
# =====================================================================
|
||||
|
||||
[tool.black]
|
||||
required-version = '21.12b0'
|
||||
target-version = ['py27']
|
||||
|
||||
[tool.isort]
|
||||
profile = "black"
|
||||
include_trailing_comma = true
|
||||
|
||||
[tool.bandit]
|
||||
skips = ["B104", "B110", "B112"]
|
||||
|
||||
# =====================================================================
|
||||
|
||||
[tool.pylint.MAIN]
|
||||
py-version = "3.11"
|
||||
jobs = 2
|
||||
|
||||
[tool.pylint."MESSAGES CONTROL"]
|
||||
disable = [
|
||||
"missing-module-docstring",
|
||||
"missing-class-docstring",
|
||||
"missing-function-docstring",
|
||||
"import-outside-toplevel",
|
||||
"wrong-import-position",
|
||||
"raise-missing-from",
|
||||
"bare-except",
|
||||
"broad-exception-raised",
|
||||
"broad-exception-caught",
|
||||
"invalid-name",
|
||||
"line-too-long",
|
||||
"too-many-lines",
|
||||
"consider-using-f-string",
|
||||
"pointless-string-statement",
|
||||
]
|
||||
|
||||
[tool.pylint.FORMAT]
|
||||
expected-line-ending-format = "LF"
|
||||
|
||||
# =====================================================================
|
||||
|
||||
[tool.mypy]
|
||||
python_version = "3.11"
|
||||
files = ["copyparty"]
|
||||
show_error_codes = true
|
||||
show_column_numbers = true
|
||||
pretty = true
|
||||
strict = true
|
||||
local_partial_types = true
|
||||
strict_equality = true
|
||||
warn_unreachable = true
|
||||
ignore_missing_imports = true
|
||||
follow_imports = "silent"
|
||||
|
||||
[[tool.mypy.overrides]]
|
||||
no_implicit_reexport = false
|
||||
@@ -34,7 +34,7 @@ set -e
|
||||
# 4823 copyparty-extras/copyparty-repack.sh
|
||||
# `- source files from github
|
||||
#
|
||||
# 23663 copyparty-extras/up2k.py
|
||||
# 23663 copyparty-extras/u2c.py
|
||||
# `- standalone utility to upload or search for files
|
||||
#
|
||||
# 32280 copyparty-extras/partyfuse.py
|
||||
@@ -147,7 +147,7 @@ repack sfx-lite "re no-dd no-cm no-hl gz"
|
||||
# copy lite-sfx.py to ./copyparty,
|
||||
# delete extracted source code
|
||||
( cd copyparty-extras/
|
||||
mv copyparty-*/bin/up2k.py .
|
||||
mv copyparty-*/bin/u2c.py .
|
||||
mv copyparty-*/bin/partyfuse.py .
|
||||
cp -pv sfx-lite/copyparty-sfx.py ../copyparty
|
||||
rm -rf copyparty-{0..9}*.*.*{0..9}
|
||||
|
||||
@@ -1,14 +1,22 @@
|
||||
# TODO easymde embeds codemirror on 3.17 due to new npm probably
|
||||
FROM alpine:3.16
|
||||
FROM alpine:3.18
|
||||
WORKDIR /z
|
||||
ENV ver_asmcrypto=c72492f4a66e17a0e5dd8ad7874de354f3ccdaa5 \
|
||||
ver_hashwasm=4.9.0 \
|
||||
ver_marked=4.2.5 \
|
||||
ver_marked=4.3.0 \
|
||||
ver_mde=2.18.0 \
|
||||
ver_codemirror=5.65.11 \
|
||||
ver_codemirror=5.65.12 \
|
||||
ver_fontawesome=5.13.0 \
|
||||
ver_prism=1.29.0 \
|
||||
ver_zopfli=1.0.3
|
||||
|
||||
# versioncheck:
|
||||
# https://github.com/markedjs/marked/releases
|
||||
# https://github.com/Ionaru/easy-markdown-editor/tags
|
||||
# https://github.com/codemirror/codemirror5/releases
|
||||
# https://github.com/Daninet/hash-wasm/releases
|
||||
# https://github.com/openpgpjs/asmcrypto.js
|
||||
# https://github.com/google/zopfli/tags
|
||||
|
||||
|
||||
# download;
|
||||
# the scp url is regular latin from https://fonts.googleapis.com/css2?family=Source+Code+Pro&display=swap
|
||||
@@ -22,6 +30,7 @@ RUN mkdir -p /z/dist/no-pk \
|
||||
&& wget https://github.com/FortAwesome/Font-Awesome/releases/download/$ver_fontawesome/fontawesome-free-$ver_fontawesome-web.zip -O fontawesome.zip \
|
||||
&& wget https://github.com/google/zopfli/archive/zopfli-$ver_zopfli.tar.gz -O zopfli.tgz \
|
||||
&& wget https://github.com/Daninet/hash-wasm/releases/download/v$ver_hashwasm/hash-wasm@$ver_hashwasm.zip -O hash-wasm.zip \
|
||||
&& wget https://github.com/PrismJS/prism/archive/refs/tags/v$ver_prism.tar.gz -O prism.tgz \
|
||||
&& (mkdir hash-wasm \
|
||||
&& cd hash-wasm \
|
||||
&& unzip ../hash-wasm.zip) \
|
||||
@@ -39,14 +48,11 @@ RUN mkdir -p /z/dist/no-pk \
|
||||
&& cd easy-markdown-editor* \
|
||||
&& npm install \
|
||||
&& npm i gulp-cli -g ) \
|
||||
&& tar -xf prism.tgz \
|
||||
&& unzip fontawesome.zip \
|
||||
&& tar -xf zopfli.tgz
|
||||
|
||||
|
||||
# todo
|
||||
# https://prismjs.com/download.html#themes=prism-funky&languages=markup+css+clike+javascript+autohotkey+bash+basic+batch+c+csharp+cpp+cmake+diff+docker+go+ini+java+json+kotlin+latex+less+lisp+lua+makefile+objectivec+perl+powershell+python+r+jsx+ruby+rust+sass+scss+sql+swift+systemd+toml+typescript+vbnet+verilog+vhdl+yaml&plugins=line-highlight+line-numbers+autolinker
|
||||
|
||||
|
||||
# build fonttools (which needs zopfli)
|
||||
RUN tar -xf zopfli.tgz \
|
||||
&& cd zopfli* \
|
||||
@@ -121,6 +127,12 @@ COPY shiftbase.py /z
|
||||
RUN /bin/ash /z/mini-fa.sh
|
||||
|
||||
|
||||
# build prismjs
|
||||
COPY genprism.py /z
|
||||
COPY genprism.sh /z
|
||||
RUN ./genprism.sh $ver_prism
|
||||
|
||||
|
||||
# compress
|
||||
COPY zopfli.makefile /z/dist/Makefile
|
||||
RUN cd /z/dist \
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
self := $(dir $(abspath $(lastword $(MAKEFILE_LIST))))
|
||||
vend := $(self)/../../copyparty/web/deps
|
||||
|
||||
# prefers podman-docker (optionally rootless) over actual docker/moby
|
||||
|
||||
all:
|
||||
-service docker start
|
||||
-systemctl start docker
|
||||
|
||||
docker build -t build-copyparty-deps .
|
||||
|
||||
rm -rf $(vend)
|
||||
@@ -14,6 +13,7 @@ all:
|
||||
docker run --rm -i build-copyparty-deps:latest | \
|
||||
tar -xvC $(vend) --strip-components=1
|
||||
|
||||
touch $(vend)/__init__.py
|
||||
chown -R `stat $(self) -c %u:%g` $(vend)
|
||||
|
||||
purge:
|
||||
|
||||
198
scripts/deps-docker/genprism.py
Executable file
198
scripts/deps-docker/genprism.py
Executable file
@@ -0,0 +1,198 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# author: @chinponya
|
||||
|
||||
|
||||
import argparse
|
||||
import json
|
||||
from pathlib import Path
|
||||
from urllib.parse import urlparse, parse_qsl
|
||||
|
||||
|
||||
def read_json(path):
|
||||
return json.loads(path.read_text())
|
||||
|
||||
|
||||
def get_prism_version(prism_path):
|
||||
package_json_path = prism_path / "package.json"
|
||||
package_json = read_json(package_json_path)
|
||||
return package_json["version"]
|
||||
|
||||
|
||||
def get_prism_components(prism_path):
|
||||
components_json_path = prism_path / "components.json"
|
||||
components_json = read_json(components_json_path)
|
||||
return components_json
|
||||
|
||||
|
||||
def parse_prism_configuration(url_str):
|
||||
url = urlparse(url_str)
|
||||
# prism.com uses a non-standard query string-like encoding
|
||||
query = {k: v.split(" ") for k, v in parse_qsl(url.fragment)}
|
||||
return query
|
||||
|
||||
|
||||
def paths_of_component(prism_path, kind, components, name, minified):
|
||||
component = components[kind][name]
|
||||
meta = components[kind]["meta"]
|
||||
path_format = meta["path"]
|
||||
path_base = prism_path / path_format.replace("{id}", name)
|
||||
|
||||
if isinstance(component, str):
|
||||
# 'core' component has a different shape, so we convert it to be consistent
|
||||
component = {"title": component}
|
||||
|
||||
if meta.get("noCSS") or component.get("noCSS"):
|
||||
extensions = ["js"]
|
||||
elif kind == "themes":
|
||||
extensions = ["css"]
|
||||
else:
|
||||
extensions = ["js", "css"]
|
||||
|
||||
if path_base.is_dir():
|
||||
result = {ext: path_base / f"{name}.{ext}" for ext in extensions}
|
||||
elif path_base.suffix:
|
||||
ext = path_base.suffix.replace(".", "")
|
||||
result = {ext: path_base}
|
||||
else:
|
||||
result = {ext: path_base.with_suffix(f".{ext}") for ext in extensions}
|
||||
|
||||
if minified:
|
||||
result = {
|
||||
ext: path.with_suffix(".min" + path.suffix) for ext, path in result.items()
|
||||
}
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def read_component_contents(kv_paths):
|
||||
return {k: path.read_text() for k, path in kv_paths.items()}
|
||||
|
||||
|
||||
def get_language_dependencies(components, name):
|
||||
dependencies = components["languages"][name].get("require")
|
||||
|
||||
if isinstance(dependencies, list):
|
||||
return dependencies
|
||||
elif isinstance(dependencies, str):
|
||||
return [dependencies]
|
||||
else:
|
||||
return []
|
||||
|
||||
|
||||
def make_header(prism_path, url):
|
||||
version = get_prism_version(prism_path)
|
||||
header = f"/* PrismJS {version}\n{url} */"
|
||||
return {"js": header, "css": header}
|
||||
|
||||
|
||||
def make_core(prism_path, components, minified):
|
||||
kv_paths = paths_of_component(prism_path, "core", components, "core", minified)
|
||||
return read_component_contents(kv_paths)
|
||||
|
||||
|
||||
def make_theme(prism_path, components, name, minified):
|
||||
kv_paths = paths_of_component(prism_path, "themes", components, name, minified)
|
||||
return read_component_contents(kv_paths)
|
||||
|
||||
|
||||
def make_language(prism_path, components, name, minified):
|
||||
kv_paths = paths_of_component(prism_path, "languages", components, name, minified)
|
||||
return read_component_contents(kv_paths)
|
||||
|
||||
|
||||
def make_languages(prism_path, components, names, minified):
|
||||
names_with_dependencies = sum(
|
||||
([*get_language_dependencies(components, name), name] for name in names), []
|
||||
)
|
||||
|
||||
seen = set()
|
||||
names_with_dependencies = [
|
||||
x for x in names_with_dependencies if not (x in seen or seen.add(x))
|
||||
]
|
||||
|
||||
kv_code = [
|
||||
make_language(prism_path, components, name, minified)
|
||||
for name in names_with_dependencies
|
||||
]
|
||||
|
||||
return kv_code
|
||||
|
||||
|
||||
def make_plugin(prism_path, components, name, minified):
|
||||
kv_paths = paths_of_component(prism_path, "plugins", components, name, minified)
|
||||
return read_component_contents(kv_paths)
|
||||
|
||||
|
||||
def make_plugins(prism_path, components, names, minified):
|
||||
kv_code = [make_plugin(prism_path, components, name, minified) for name in names]
|
||||
return kv_code
|
||||
|
||||
|
||||
def make_code(prism_path, url, minified):
|
||||
components = get_prism_components(prism_path)
|
||||
configuration = parse_prism_configuration(url)
|
||||
theme_name = configuration["themes"][0]
|
||||
code = [
|
||||
make_header(prism_path, url),
|
||||
make_core(prism_path, components, minified),
|
||||
make_theme(prism_path, components, theme_name, minified),
|
||||
]
|
||||
|
||||
if configuration.get("languages"):
|
||||
code.extend(
|
||||
make_languages(prism_path, components, configuration["languages"], minified)
|
||||
)
|
||||
|
||||
if configuration.get("plugins"):
|
||||
code.extend(
|
||||
make_plugins(prism_path, components, configuration["plugins"], minified)
|
||||
)
|
||||
|
||||
return code
|
||||
|
||||
|
||||
def join_code(kv_code):
|
||||
result = {"js": "", "css": ""}
|
||||
|
||||
for row in kv_code:
|
||||
for key, code in row.items():
|
||||
result[key] += code
|
||||
result[key] += "\n"
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def write_code(kv_code, js_out, css_out):
|
||||
code = join_code(kv_code)
|
||||
|
||||
with js_out.open("w") as f:
|
||||
f.write(code["js"])
|
||||
print(f"written {js_out}")
|
||||
|
||||
with css_out.open("w") as f:
|
||||
f.write(code["css"])
|
||||
print(f"written {css_out}")
|
||||
|
||||
|
||||
def parse_args():
|
||||
# fmt: off
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("url", help="configured prism download url")
|
||||
parser.add_argument("--dir", type=Path, default=Path("."), help="prism repo directory")
|
||||
parser.add_argument("--minify", default=True, action=argparse.BooleanOptionalAction, help="use minified files",)
|
||||
parser.add_argument("--js-out", type=Path, default=Path("prism.js"), help="JS output file path")
|
||||
parser.add_argument("--css-out", type=Path, default=Path("prism.css"), help="CSS output file path")
|
||||
# fmt: on
|
||||
args = parser.parse_args()
|
||||
return args
|
||||
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
code = make_code(args.dir, args.url, args.minify)
|
||||
write_code(code, args.js_out, args.css_out)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
66
scripts/deps-docker/genprism.sh
Executable file
66
scripts/deps-docker/genprism.sh
Executable file
@@ -0,0 +1,66 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
langs=(
|
||||
markup
|
||||
css
|
||||
clike
|
||||
javascript
|
||||
autohotkey
|
||||
bash
|
||||
basic
|
||||
batch
|
||||
c
|
||||
csharp
|
||||
cpp
|
||||
cmake
|
||||
diff
|
||||
docker
|
||||
elixir
|
||||
glsl
|
||||
go
|
||||
ini
|
||||
java
|
||||
json
|
||||
kotlin
|
||||
latex
|
||||
less
|
||||
lisp
|
||||
lua
|
||||
makefile
|
||||
matlab
|
||||
moonscript
|
||||
nim
|
||||
objectivec
|
||||
perl
|
||||
powershell
|
||||
python
|
||||
r
|
||||
jsx
|
||||
ruby
|
||||
rust
|
||||
sass
|
||||
scss
|
||||
sql
|
||||
swift
|
||||
systemd
|
||||
toml
|
||||
typescript
|
||||
vbnet
|
||||
verilog
|
||||
vhdl
|
||||
yaml
|
||||
zig
|
||||
)
|
||||
|
||||
slangs="${langs[*]}"
|
||||
slangs="${slangs// /+}"
|
||||
|
||||
for theme in prism-funky prism ; do
|
||||
u="https://prismjs.com/download.html#themes=$theme&languages=$slangs&plugins=line-highlight+line-numbers+autolinker"
|
||||
echo "$u"
|
||||
./genprism.py --dir prism-$1 --js-out prism.js --css-out $theme.css "$u"
|
||||
done
|
||||
|
||||
mv prism-funky.css prismd.css
|
||||
mv prismd.css prism.css prism.js /z/dist/
|
||||
@@ -1,5 +1,5 @@
|
||||
diff --git a/src/Lexer.js b/src/Lexer.js
|
||||
adds linetracking to marked.js v4.2.3;
|
||||
adds linetracking to marked.js v4.3.0;
|
||||
add data-ln="%d" to most tags, %d is the source markdown line
|
||||
--- a/src/Lexer.js
|
||||
+++ b/src/Lexer.js
|
||||
@@ -206,7 +206,6 @@ index a22a2bc..884ad66 100644
|
||||
// Run any renderer extensions
|
||||
if (this.options.extensions && this.options.extensions.renderers && this.options.extensions.renderers[token.type]) {
|
||||
diff --git a/src/Renderer.js b/src/Renderer.js
|
||||
index 7c36a75..aa1a53a 100644
|
||||
--- a/src/Renderer.js
|
||||
+++ b/src/Renderer.js
|
||||
@@ -11,6 +11,12 @@ export class Renderer {
|
||||
@@ -290,10 +289,9 @@ index 7c36a75..aa1a53a 100644
|
||||
if (title) {
|
||||
out += ` title="${title}"`;
|
||||
diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
||||
index e8a69b6..2cc772b 100644
|
||||
--- a/src/Tokenizer.js
|
||||
+++ b/src/Tokenizer.js
|
||||
@@ -312,4 +312,7 @@ export class Tokenizer {
|
||||
@@ -333,4 +333,7 @@ export class Tokenizer {
|
||||
const l = list.items.length;
|
||||
|
||||
+ // each nested list gets +1 ahead; this hack makes every listgroup -1 but atleast it doesn't get infinitely bad
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
diff --git a/src/Lexer.js b/src/Lexer.js
|
||||
strip some features
|
||||
--- a/src/Lexer.js
|
||||
+++ b/src/Lexer.js
|
||||
@@ -7,5 +7,5 @@ import { repeatString } from './helpers.js';
|
||||
@@ -56,7 +57,7 @@ diff --git a/src/Renderer.js b/src/Renderer.js
|
||||
diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
||||
--- a/src/Tokenizer.js
|
||||
+++ b/src/Tokenizer.js
|
||||
@@ -352,14 +352,7 @@ export class Tokenizer {
|
||||
@@ -367,14 +367,7 @@ export class Tokenizer {
|
||||
type: 'html',
|
||||
raw: cap[0],
|
||||
- pre: !this.options.sanitizer
|
||||
@@ -72,7 +73,7 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
||||
- }
|
||||
return token;
|
||||
}
|
||||
@@ -502,15 +495,9 @@ export class Tokenizer {
|
||||
@@ -517,15 +510,9 @@ export class Tokenizer {
|
||||
|
||||
return {
|
||||
- type: this.options.sanitize
|
||||
@@ -90,7 +91,7 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
||||
+ text: cap[0]
|
||||
};
|
||||
}
|
||||
@@ -699,10 +686,10 @@ export class Tokenizer {
|
||||
@@ -714,10 +701,10 @@ export class Tokenizer {
|
||||
}
|
||||
|
||||
- autolink(src, mangle) {
|
||||
@@ -103,7 +104,7 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
||||
+ text = escape(cap[1]);
|
||||
href = 'mailto:' + text;
|
||||
} else {
|
||||
@@ -727,10 +714,10 @@ export class Tokenizer {
|
||||
@@ -742,10 +729,10 @@ export class Tokenizer {
|
||||
}
|
||||
|
||||
- url(src, mangle) {
|
||||
@@ -116,7 +117,7 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
||||
+ text = escape(cap[0]);
|
||||
href = 'mailto:' + text;
|
||||
} else {
|
||||
@@ -764,12 +751,12 @@ export class Tokenizer {
|
||||
@@ -779,12 +766,12 @@ export class Tokenizer {
|
||||
}
|
||||
|
||||
- inlineText(src, smartypants) {
|
||||
@@ -135,8 +136,8 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
||||
diff --git a/src/defaults.js b/src/defaults.js
|
||||
--- a/src/defaults.js
|
||||
+++ b/src/defaults.js
|
||||
@@ -10,11 +10,7 @@ export function getDefaults() {
|
||||
highlight: null,
|
||||
@@ -11,11 +11,7 @@ export function getDefaults() {
|
||||
hooks: null,
|
||||
langPrefix: 'language-',
|
||||
- mangle: true,
|
||||
pedantic: false,
|
||||
@@ -170,7 +171,7 @@ diff --git a/src/helpers.js b/src/helpers.js
|
||||
+export function cleanUrl(base, href) {
|
||||
if (base && !originIndependentUrl.test(href)) {
|
||||
href = resolveUrl(base, href);
|
||||
@@ -250,10 +237,4 @@ export function findClosingBracket(str, b) {
|
||||
@@ -233,10 +220,4 @@ export function findClosingBracket(str, b) {
|
||||
}
|
||||
|
||||
-export function checkSanitizeDeprecation(opt) {
|
||||
@@ -185,30 +186,25 @@ diff --git a/src/marked.js b/src/marked.js
|
||||
--- a/src/marked.js
|
||||
+++ b/src/marked.js
|
||||
@@ -7,5 +7,4 @@ import { Slugger } from './Slugger.js';
|
||||
import { Hooks } from './Hooks.js';
|
||||
import {
|
||||
merge,
|
||||
- checkSanitizeDeprecation,
|
||||
escape
|
||||
} from './helpers.js';
|
||||
@@ -35,5 +34,4 @@ export function marked(src, opt, callback) {
|
||||
|
||||
opt = merge({}, marked.defaults, opt || {});
|
||||
- checkSanitizeDeprecation(opt);
|
||||
|
||||
if (callback) {
|
||||
@@ -318,5 +316,4 @@ marked.parseInline = function(src, opt) {
|
||||
|
||||
opt = merge({}, marked.defaults, opt || {});
|
||||
- checkSanitizeDeprecation(opt);
|
||||
|
||||
try {
|
||||
@@ -327,5 +324,5 @@ marked.parseInline = function(src, opt) {
|
||||
return Parser.parseInline(tokens, opt);
|
||||
} catch (e) {
|
||||
@@ -18,5 +17,5 @@ import {
|
||||
function onError(silent, async, callback) {
|
||||
return (e) => {
|
||||
- e.message += '\nPlease report this to https://github.com/markedjs/marked.';
|
||||
+ e.message += '\nmake issue @ https://github.com/9001/copyparty';
|
||||
if (opt.silent) {
|
||||
return '<p>An error occurred:</p><pre>'
|
||||
|
||||
if (silent) {
|
||||
@@ -65,6 +64,4 @@ function parseMarkdown(lexer, parser) {
|
||||
}
|
||||
|
||||
- checkSanitizeDeprecation(opt);
|
||||
-
|
||||
if (opt.hooks) {
|
||||
opt.hooks.options = opt;
|
||||
diff --git a/test/bench.js b/test/bench.js
|
||||
--- a/test/bench.js
|
||||
+++ b/test/bench.js
|
||||
@@ -250,70 +246,70 @@ diff --git a/test/specs/run-spec.js b/test/specs/run-spec.js
|
||||
diff --git a/test/unit/Lexer-spec.js b/test/unit/Lexer-spec.js
|
||||
--- a/test/unit/Lexer-spec.js
|
||||
+++ b/test/unit/Lexer-spec.js
|
||||
@@ -712,5 +712,5 @@ paragraph
|
||||
@@ -794,5 +794,5 @@ paragraph
|
||||
});
|
||||
|
||||
- it('sanitize', () => {
|
||||
+ /*it('sanitize', () => {
|
||||
expectTokens({
|
||||
md: '<div>html</div>',
|
||||
@@ -730,5 +730,5 @@ paragraph
|
||||
@@ -812,5 +812,5 @@ paragraph
|
||||
]
|
||||
});
|
||||
- });
|
||||
+ });*/
|
||||
});
|
||||
|
||||
@@ -810,5 +810,5 @@ paragraph
|
||||
@@ -892,5 +892,5 @@ paragraph
|
||||
});
|
||||
|
||||
- it('html sanitize', () => {
|
||||
+ /*it('html sanitize', () => {
|
||||
expectInlineTokens({
|
||||
md: '<div>html</div>',
|
||||
@@ -818,5 +818,5 @@ paragraph
|
||||
@@ -900,5 +900,5 @@ paragraph
|
||||
]
|
||||
});
|
||||
- });
|
||||
+ });*/
|
||||
|
||||
it('link', () => {
|
||||
@@ -1129,5 +1129,5 @@ paragraph
|
||||
@@ -1211,5 +1211,5 @@ paragraph
|
||||
});
|
||||
|
||||
- it('autolink mangle email', () => {
|
||||
+ /*it('autolink mangle email', () => {
|
||||
expectInlineTokens({
|
||||
md: '<test@example.com>',
|
||||
@@ -1149,5 +1149,5 @@ paragraph
|
||||
@@ -1231,5 +1231,5 @@ paragraph
|
||||
]
|
||||
});
|
||||
- });
|
||||
+ });*/
|
||||
|
||||
it('url', () => {
|
||||
@@ -1186,5 +1186,5 @@ paragraph
|
||||
@@ -1268,5 +1268,5 @@ paragraph
|
||||
});
|
||||
|
||||
- it('url mangle email', () => {
|
||||
+ /*it('url mangle email', () => {
|
||||
expectInlineTokens({
|
||||
md: 'test@example.com',
|
||||
@@ -1206,5 +1206,5 @@ paragraph
|
||||
@@ -1288,5 +1288,5 @@ paragraph
|
||||
]
|
||||
});
|
||||
- });
|
||||
+ });*/
|
||||
});
|
||||
|
||||
@@ -1222,5 +1222,5 @@ paragraph
|
||||
@@ -1304,5 +1304,5 @@ paragraph
|
||||
});
|
||||
|
||||
- describe('smartypants', () => {
|
||||
+ /*describe('smartypants', () => {
|
||||
it('single quotes', () => {
|
||||
expectInlineTokens({
|
||||
@@ -1292,5 +1292,5 @@ paragraph
|
||||
@@ -1374,5 +1374,5 @@ paragraph
|
||||
});
|
||||
});
|
||||
- });
|
||||
|
||||
@@ -5,11 +5,13 @@ LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
|
||||
org.opencontainers.image.licenses="MIT" \
|
||||
org.opencontainers.image.title="copyparty-ac" \
|
||||
org.opencontainers.image.description="copyparty with Pillow and FFmpeg (image/audio/video thumbnails, audio transcoding, media tags)"
|
||||
ENV PYTHONPYCACHEPREFIX=/tmp/pyc
|
||||
|
||||
RUN apk --no-cache add \
|
||||
RUN apk --no-cache add !pyc \
|
||||
wget \
|
||||
py3-pillow \
|
||||
ffmpeg \
|
||||
&& rm -rf /tmp/pyc \
|
||||
&& mkdir /cfg /w \
|
||||
&& chmod 777 /cfg /w \
|
||||
&& echo % /cfg > initcfg
|
||||
|
||||
@@ -5,26 +5,27 @@ LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
|
||||
org.opencontainers.image.licenses="MIT" \
|
||||
org.opencontainers.image.title="copyparty-dj" \
|
||||
org.opencontainers.image.description="copyparty with all optional dependencies, including musical key / bpm detection"
|
||||
ENV PYTHONPYCACHEPREFIX=/tmp/pyc
|
||||
|
||||
COPY i/bin/mtag/install-deps.sh ./
|
||||
COPY i/bin/mtag/audio-bpm.py /mtag/
|
||||
COPY i/bin/mtag/audio-key.py /mtag/
|
||||
RUN apk add -U \
|
||||
RUN apk add -U !pyc \
|
||||
wget \
|
||||
py3-pillow py3-pip py3-cffi \
|
||||
ffmpeg \
|
||||
vips-jxl vips-heif vips-poppler vips-magick \
|
||||
py3-numpy fftw libsndfile \
|
||||
vamp-sdk vamp-sdk-libs \
|
||||
&& python3 -m pip install pyvips \
|
||||
&& apk --no-cache add -t .bd \
|
||||
&& apk add -t .bd \
|
||||
bash wget gcc g++ make cmake patchelf \
|
||||
python3-dev ffmpeg-dev fftw-dev libsndfile-dev \
|
||||
py3-wheel py3-numpy-dev \
|
||||
vamp-sdk-dev \
|
||||
&& python3 -m pip install pyvips \
|
||||
&& bash install-deps.sh \
|
||||
&& apk del py3-pip .bd \
|
||||
&& rm -rf /var/cache/apk/* \
|
||||
&& rm -rf /var/cache/apk/* /tmp/pyc \
|
||||
&& chmod 777 /root \
|
||||
&& ln -s /root/vamp /root/.local / \
|
||||
&& mkdir /cfg /w \
|
||||
|
||||
@@ -5,10 +5,12 @@ LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
|
||||
org.opencontainers.image.licenses="MIT" \
|
||||
org.opencontainers.image.title="copyparty-im" \
|
||||
org.opencontainers.image.description="copyparty with Pillow and Mutagen (image thumbnails, media tags)"
|
||||
ENV PYTHONPYCACHEPREFIX=/tmp/pyc
|
||||
|
||||
RUN apk --no-cache add \
|
||||
RUN apk --no-cache add !pyc \
|
||||
wget \
|
||||
py3-pillow py3-mutagen \
|
||||
&& rm -rf /tmp/pyc \
|
||||
&& mkdir /cfg /w \
|
||||
&& chmod 777 /cfg /w \
|
||||
&& echo % /cfg > initcfg
|
||||
|
||||
@@ -5,14 +5,19 @@ LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
|
||||
org.opencontainers.image.licenses="MIT" \
|
||||
org.opencontainers.image.title="copyparty-iv" \
|
||||
org.opencontainers.image.description="copyparty with Pillow, FFmpeg, libvips (image/audio/video thumbnails, audio transcoding, media tags)"
|
||||
ENV PYTHONPYCACHEPREFIX=/tmp/pyc
|
||||
|
||||
RUN apk --no-cache add \
|
||||
RUN apk add -U !pyc \
|
||||
wget \
|
||||
py3-pillow py3-pip py3-cffi \
|
||||
ffmpeg \
|
||||
vips-jxl vips-heif vips-poppler vips-magick \
|
||||
&& apk add -t .bd \
|
||||
bash wget gcc g++ make cmake patchelf \
|
||||
python3-dev py3-wheel \
|
||||
&& python3 -m pip install pyvips \
|
||||
&& apk del py3-pip \
|
||||
&& apk del py3-pip .bd \
|
||||
&& rm -rf /var/cache/apk/* /tmp/pyc \
|
||||
&& mkdir /cfg /w \
|
||||
&& chmod 777 /cfg /w \
|
||||
&& echo % /cfg > initcfg
|
||||
|
||||
@@ -5,9 +5,11 @@ LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
|
||||
org.opencontainers.image.licenses="MIT" \
|
||||
org.opencontainers.image.title="copyparty-min" \
|
||||
org.opencontainers.image.description="just copyparty, no thumbnails / media tags / audio transcoding"
|
||||
ENV PYTHONPYCACHEPREFIX=/tmp/pyc
|
||||
|
||||
RUN apk --no-cache add \
|
||||
RUN apk --no-cache add !pyc \
|
||||
python3 \
|
||||
&& rm -rf /tmp/pyc \
|
||||
&& mkdir /cfg /w \
|
||||
&& chmod 777 /cfg /w \
|
||||
&& echo % /cfg > initcfg
|
||||
|
||||
@@ -5,10 +5,12 @@ LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
|
||||
org.opencontainers.image.licenses="MIT" \
|
||||
org.opencontainers.image.title="copyparty-min-pip" \
|
||||
org.opencontainers.image.description="just copyparty, no thumbnails, no media tags, no audio transcoding"
|
||||
ENV PYTHONPYCACHEPREFIX=/tmp/pyc
|
||||
|
||||
RUN apk --no-cache add python3 py3-pip \
|
||||
RUN apk --no-cache add python3 py3-pip !pyc \
|
||||
&& python3 -m pip install copyparty \
|
||||
&& apk del py3-pip \
|
||||
&& rm -rf /tmp/pyc \
|
||||
&& mkdir /cfg /w \
|
||||
&& chmod 777 /cfg /w \
|
||||
&& echo % /cfg > initcfg
|
||||
|
||||
@@ -14,6 +14,7 @@ docker run --rm -it -u 1000 -p 3923:3923 -v /mnt/nas:/w -v $PWD/cfgdir:/cfg copy
|
||||
* `/cfg` is an optional folder with zero or more config files (*.conf) to load
|
||||
* `copyparty/ac` is the recommended [image edition](#editions)
|
||||
* you can download the image from github instead by replacing `copyparty/ac` with `ghcr.io/9001/copyparty-ac`
|
||||
* if you are using rootless podman, remove `-u 1000`
|
||||
|
||||
i'm unfamiliar with docker-compose and alternatives so let me know if this section could be better 🙏
|
||||
|
||||
|
||||
@@ -95,7 +95,7 @@ filt=
|
||||
[ $(jobs -p | wc -l) -lt $(nproc) ] && break
|
||||
while [ -e .blk ]; do sleep 0.2; done
|
||||
done
|
||||
aa="$(printf '%7s' $a)"
|
||||
aa="$(printf '%11s' $a-$i)"
|
||||
|
||||
# arm takes forever so make it top priority
|
||||
[ ${a::3} == arm ] && nice= || nice=nice
|
||||
|
||||
@@ -73,14 +73,17 @@ pydir="$(
|
||||
}
|
||||
|
||||
function have() {
|
||||
python -c "import $1; $1; $1.__version__"
|
||||
python -c "import $1; $1; getattr($1,'__version__',0)"
|
||||
}
|
||||
|
||||
function load_env() {
|
||||
. buildenv/bin/activate
|
||||
have setuptools
|
||||
have wheel
|
||||
have build
|
||||
have twine
|
||||
have jinja2
|
||||
have strip_hints
|
||||
}
|
||||
|
||||
load_env || {
|
||||
@@ -88,19 +91,32 @@ load_env || {
|
||||
deactivate || true
|
||||
rm -rf buildenv
|
||||
python3 -m venv buildenv
|
||||
(. buildenv/bin/activate && pip install twine wheel)
|
||||
(. buildenv/bin/activate && pip install \
|
||||
setuptools wheel build twine jinja2 strip_hints )
|
||||
load_env
|
||||
}
|
||||
|
||||
# cleanup
|
||||
rm -rf unt build/pypi
|
||||
|
||||
# grab licenses
|
||||
scripts/genlic.sh copyparty/res/COPYING.txt
|
||||
|
||||
# remove type hints to support python < 3.9
|
||||
# clean-ish packaging env
|
||||
rm -rf build/pypi
|
||||
mkdir -p build/pypi
|
||||
cp -pR setup.py README.md LICENSE copyparty contrib bin scripts/strip_hints build/pypi/
|
||||
cp -pR pyproject.toml README.md LICENSE copyparty contrib bin scripts/strip_hints build/pypi/
|
||||
tar -c docs/lics.txt scripts/genlic.sh build/*.txt | tar -xC build/pypi/
|
||||
cd build/pypi
|
||||
|
||||
# delete junk
|
||||
find -name '*.pyc' -delete
|
||||
find -name __pycache__ -delete
|
||||
find -name py.typed -delete
|
||||
find -type f \( -name .DS_Store -or -name ._.DS_Store \) -delete
|
||||
find -type f -name ._\* | while IFS= read -r f; do cmp <(printf '\x00\x05\x16') <(head -c 3 -- "$f") && rm -f -- "$f"; done
|
||||
|
||||
# remove type hints to support python < 3.9
|
||||
f=../strip-hints-0.1.10.tar.gz
|
||||
[ -e $f ] ||
|
||||
(url=https://files.pythonhosted.org/packages/9c/d4/312ddce71ee10f7e0ab762afc027e07a918f1c0e1be5b0069db5b0e7542d/strip-hints-0.1.10.tar.gz;
|
||||
@@ -132,24 +148,13 @@ while IFS= read -r x; do
|
||||
done
|
||||
|
||||
rm -rf contrib
|
||||
[ $fast ] && sed -ri s/5730/10/ copyparty/web/Makefile
|
||||
[ $fast ] && sed -ri s/573/10/ copyparty/web/Makefile
|
||||
(cd copyparty/web && make -j$(nproc) && rm Makefile)
|
||||
|
||||
# build
|
||||
./setup.py clean2
|
||||
./setup.py sdist bdist_wheel --universal
|
||||
python3 -m build
|
||||
|
||||
[ "$mode" == t ] && twine upload -r pypitest dist/*
|
||||
[ "$mode" == u ] && twine upload -r pypi dist/*
|
||||
|
||||
cat <<EOF
|
||||
|
||||
|
||||
all done!
|
||||
|
||||
to clean up the source tree:
|
||||
|
||||
cd ~/dev/copyparty
|
||||
./setup.py clean2
|
||||
|
||||
EOF
|
||||
true
|
||||
|
||||
@@ -2,10 +2,14 @@
|
||||
set -e
|
||||
echo
|
||||
|
||||
berr() { p=$(head -c 72 </dev/zero | tr '\0' =); printf '\n%s\n\n' $p; cat; printf '\n%s\n\n' $p; }
|
||||
|
||||
help() { exec cat <<'EOF'
|
||||
|
||||
# optional args:
|
||||
#
|
||||
# `fast` builds faster, with cheaper js/css compression
|
||||
#
|
||||
# `clean` uses files from git (everything except web/deps),
|
||||
# so local changes won't affect the produced sfx
|
||||
#
|
||||
@@ -42,6 +46,13 @@ help() { exec cat <<'EOF'
|
||||
#
|
||||
# `no-dd` saves ~2k by removing the mouse cursor
|
||||
#
|
||||
# _____________________________________________________________________
|
||||
# build behavior:
|
||||
#
|
||||
# `dl-wd` automatically downloads webdeps if necessary
|
||||
#
|
||||
# `ign-wd` allows building an sfx without webdeps
|
||||
#
|
||||
# ---------------------------------------------------------------------
|
||||
#
|
||||
# if you are on windows, you can use msys2:
|
||||
@@ -109,6 +120,8 @@ while [ ! -z "$1" ]; do
|
||||
no-hl) no_hl=1 ; ;;
|
||||
no-dd) no_dd=1 ; ;;
|
||||
no-cm) no_cm=1 ; ;;
|
||||
dl-wd) dl_wd=1 ; ;;
|
||||
ign-wd) ign_wd=1 ; ;;
|
||||
fast) zopf= ; ;;
|
||||
ultra) ultra=1 ; ;;
|
||||
lang) shift;langs="$1"; ;;
|
||||
@@ -223,7 +236,7 @@ necho() {
|
||||
|
||||
# enable this to dynamically remove type hints at startup,
|
||||
# in case a future python version can use them for performance
|
||||
true || (
|
||||
true && (
|
||||
necho collecting strip-hints
|
||||
f=../build/strip-hints-0.1.10.tar.gz
|
||||
[ -e $f ] ||
|
||||
@@ -283,12 +296,56 @@ necho() {
|
||||
rm -f copyparty/stolen/*/README.md
|
||||
|
||||
# remove type hints before build instead
|
||||
(cd copyparty; "$pybin" ../../scripts/strip_hints/a.py; rm uh)
|
||||
(cd copyparty; PYTHONPATH="..:$PYTHONPATH" "$pybin" ../../scripts/strip_hints/a.py; rm uh)
|
||||
|
||||
licfile=$(realpath copyparty/res/COPYING.txt)
|
||||
(cd ../scripts; ./genlic.sh "$licfile")
|
||||
}
|
||||
|
||||
[ ! -e copyparty/web/deps/mini-fa.woff ] && [ $dl_wd ] && {
|
||||
echo "could not find webdeps; downloading..."
|
||||
url=https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py
|
||||
wget -Ox.py "$url" || curl -L "$url" >x.py
|
||||
|
||||
echo "extracting webdeps..."
|
||||
wdsrc="$("$pybin" x.py --version 2>&1 | tee /dev/stderr | awk '/sfxdir:/{sub(/.*: /,"");print;exit}')"
|
||||
[ "$wdsrc" ] || {
|
||||
echo failed to discover tempdir of reference copyparty-sfx.py
|
||||
exit 1
|
||||
}
|
||||
rm -rf copyparty/web/deps
|
||||
cp -pvR "$wdsrc/copyparty/web/deps" copyparty/web/
|
||||
|
||||
# also copy it out into the source-tree for next time
|
||||
rm -rf ../copyparty/web/deps
|
||||
cp -pR copyparty/web/deps ../copyparty/web
|
||||
|
||||
rm x.py
|
||||
}
|
||||
|
||||
[ -e copyparty/web/deps/mini-fa.woff ] || [ $ign_wd ] || { berr <<'EOF'
|
||||
ERROR:
|
||||
could not find webdeps; the front-end will not be fully functional
|
||||
|
||||
please choose one of the following:
|
||||
|
||||
A) add the argument "dl-wd" to fix it automatically; this will
|
||||
download copyparty-sfx.py and extract the webdeps from there
|
||||
|
||||
B) build the webdeps from source: make -C scripts/deps-docker
|
||||
|
||||
C) add the argument "ign-wd" to continue building the sfx without webdeps
|
||||
|
||||
alternative A is a good choice if you are only intending to
|
||||
modify the copyparty source code (py/html/css/js) and do not
|
||||
plan to make any changes to the mostly-third-party webdeps
|
||||
|
||||
there may be additional hints in the devnotes:
|
||||
https://github.com/9001/copyparty/blob/hovudstraum/docs/devnotes.md#building
|
||||
EOF
|
||||
exit 1
|
||||
}
|
||||
|
||||
ver=
|
||||
[ -z "$repack" ] &&
|
||||
git describe --tags >/dev/null 2>/dev/null && {
|
||||
@@ -421,7 +478,7 @@ while IFS= read -r f; do
|
||||
done
|
||||
|
||||
# up2k goes from 28k to 22k laff
|
||||
awk 'BEGIN{gensub(//,"",1)}' </dev/null &&
|
||||
awk 'BEGIN{gensub(//,"",1)}' </dev/null 2>/dev/null &&
|
||||
echo entabbening &&
|
||||
find | grep -E '\.css$' | while IFS= read -r f; do
|
||||
awk '{
|
||||
@@ -435,7 +492,9 @@ find | grep -E '\.css$' | while IFS= read -r f; do
|
||||
1
|
||||
' <$f | sed -r 's/;\}$/}/; /\{\}$/d' >t
|
||||
tmv "$f"
|
||||
done
|
||||
done ||
|
||||
echo "WARNING: your awk does not have gensub, so the sfx will not have optimal compression"
|
||||
|
||||
unexpand -h 2>/dev/null &&
|
||||
find | grep -E '\.(js|html)$' | while IFS= read -r f; do
|
||||
unexpand -t 4 --first-only <"$f" >t
|
||||
@@ -529,7 +588,7 @@ sed -r 's/([^ ]*) (.*)/\2.\1/' | grep -vE '/list1?$' > list1
|
||||
for n in {1..50}; do
|
||||
(grep -vE '\.(gz|br)$' list1; grep -E '\.(gz|br)$' list1 | (shuf||gshuf) ) >list || true
|
||||
s=$( (sha1sum||shasum) < list | cut -c-16)
|
||||
grep -q $s "$zdir/h" && continue
|
||||
grep -q $s "$zdir/h" 2>/dev/null && continue
|
||||
echo $s >> "$zdir/h"
|
||||
break
|
||||
done
|
||||
|
||||
@@ -64,6 +64,8 @@ git archive hovudstraum | tar -xC "$rls_dir"
|
||||
echo ">>> export untracked deps"
|
||||
tar -c copyparty/web/deps | tar -xC "$rls_dir"
|
||||
|
||||
scripts/genlic.sh "$rls_dir/copyparty/res/COPYING.txt"
|
||||
|
||||
cd "$rls_dir"
|
||||
find -type d -exec chmod 755 '{}' \+
|
||||
find -type f -exec chmod 644 '{}' \+
|
||||
@@ -93,7 +95,8 @@ rm \
|
||||
.gitattributes \
|
||||
.gitignore
|
||||
|
||||
mv LICENSE LICENSE.txt
|
||||
cp -pv LICENSE LICENSE.txt
|
||||
mv setup.py{,.disabled}
|
||||
|
||||
# the regular cleanup memes
|
||||
find -name '*.pyc' -delete
|
||||
|
||||
@@ -11,33 +11,21 @@ update_arch_pkgbuild() {
|
||||
rm -rf x
|
||||
mkdir x
|
||||
|
||||
(echo "$self/../dist/copyparty-sfx.py"
|
||||
awk -v self="$self" '
|
||||
/^\)/{o=0}
|
||||
/^source=/{o=1;next}
|
||||
{
|
||||
sub(/..pkgname./,"copyparty");
|
||||
sub(/.*pkgver./,self "/..");
|
||||
sub(/^ +"/,"");sub(/"/,"")
|
||||
}
|
||||
o&&!/https/' PKGBUILD
|
||||
) |
|
||||
xargs sha256sum > x/sums
|
||||
sha=$(sha256sum "$self/../dist/copyparty-$ver.tar.gz" | awk '{print$1}')
|
||||
|
||||
(awk -v ver=$ver '
|
||||
awk -v ver=$ver -v sha=$sha '
|
||||
/^pkgver=/{sub(/[0-9\.]+/,ver)};
|
||||
/^sha256sums=/{exit};
|
||||
1' PKGBUILD
|
||||
echo -n 'sha256sums=('
|
||||
p=; cat x/sums | while read s _; do
|
||||
echo "$p\"$s\""
|
||||
p=' '
|
||||
done
|
||||
awk '/^sha256sums=/{o=1} o&&/^\)/{o=2} o==2' PKGBUILD
|
||||
) >a
|
||||
/^sha256sums=/{sub(/[0-9a-f]{64}/,sha)};
|
||||
1' PKGBUILD >a
|
||||
mv a PKGBUILD
|
||||
|
||||
rm -rf x
|
||||
}
|
||||
|
||||
update_nixos_pin() {
|
||||
( cd $self/../contrib/package/nix/copyparty;
|
||||
./update.py $self/../dist/copyparty-sfx.py )
|
||||
}
|
||||
|
||||
update_arch_pkgbuild
|
||||
update_nixos_pin
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
d5510a24cb5e15d6d30677335bbc7624c319b371c0513981843dc51d9b3a1e027661096dfcfc540634222bb2634be6db55bf95185b30133cb884f1e47652cf53 altgraph-0.17.3-py2.py3-none-any.whl
|
||||
eda6c38fc4d813fee897e969ff9ecc5acc613df755ae63df0392217bbd67408b5c1f6c676f2bf5497b772a3eb4e1a360e1245e1c16ee83f0af555f1ab82c3977 Git-2.39.1-32-bit.exe
|
||||
17ce52ba50692a9d964f57a23ac163fb74c77fdeb2ca988a6d439ae1fe91955ff43730c073af97a7b3223093ffea3479a996b9b50ee7fba0869247a56f74baa6 pefile-2023.2.7-py3-none-any.whl
|
||||
85a041cc95cf493f5e2ebc2ca406d2718735e43951988810dc448d29e9ee0bcdb1ca19e0c22243441f45633969af8027469f29f6288f6830c724a3fa38886e5c pyinstaller-5.8.0-py3-none-win32.whl
|
||||
adf0d23a98da38056de25e07e68921739173efc70fb9bf3f68d8c7c3d0d092e09efa69d35c0c9ecc990bc3c5fa62038227ef480ed06ddfaf05353f6e468f5dca pyinstaller-5.8.0-py3-none-win_amd64.whl
|
||||
01d7f8125966ed30389a879ba69d2c1fd3212bafad3fb485317580bcb9f489e8b901c4d325f6cb8a52986838ba6d44d3852e62b27c1f1d5a576899821cc0ae02 pyinstaller_hooks_contrib-2023.0-py2.py3-none-any.whl
|
||||
2410f79f25b55829169fdd45611c04f51932f7701c0601df64ade0eb545c96ba950b7be186eb082482506bc689fcde5fe09c1f6f7cd77c2107028959b7e0d06f pyinstaller-5.12.0-py3-none-win32.whl
|
||||
62f4f3dda0526ea88cfc5af1806c7b53094672f4237d64c088626c226ad2fbc7549f6c9c6bbe5b228b1f87faf1e5c343ec468c485e4c17fe6d79c6b1f570153a pyinstaller-5.12.0-py3-none-win_amd64.whl
|
||||
2612c263f73a02eab41404ba96e0c7cf8be4475104668b47dfbae50fadf977b3621dd4102682b301264d82b6e130d95ea84a28bf2106a626a1a2845dac16df47 pyinstaller_hooks_contrib-2023.3-py2.py3-none-any.whl
|
||||
132a5380f33a245f2e744413a0e1090bc42b7356376de5121397cec5976b04b79f7c9ebe28af222c9c7b01461f7d7920810d220e337694727e0d7cd9e91fa667 pywin32_ctypes-0.2.0-py2.py3-none-any.whl
|
||||
3c5adf0a36516d284a2ede363051edc1bcc9df925c5a8a9fa2e03cab579dd8d847fdad42f7fd5ba35992e08234c97d2dbfec40a9d12eec61c8dc03758f2bd88e typing_extensions-4.4.0-py3-none-any.whl
|
||||
4b6e9ae967a769fe32be8cf0bc0d5a213b138d1e0344e97656d08a3d15578d81c06c45b334c872009db2db8f39db0c77c94ff6c35168d5e13801917667c08678 upx-4.0.2-win32.zip
|
||||
# up2k (win7)
|
||||
# u2c (win7)
|
||||
a7d259277af4948bf960682bc9fb45a44b9ae9a19763c8a7c313cef4aa9ec2d447d843e4a7c409e9312c8c8f863a24487a8ee4ffa6891e9b1c4e111bb4723861 certifi-2022.12.7-py3-none-any.whl
|
||||
2822c0dae180b1c8cfb7a70c8c00bad62af9afdbb18b656236680def9d3f1fcdcb8ef5eb64fc3b4c934385cd175ad5992a2284bcba78a243130de75b2d1650db charset_normalizer-3.1.0-cp37-cp37m-win32.whl
|
||||
ffdd45326f4e91c02714f7a944cbcc2fdd09299f709cfa8aec0892053eef0134fb80d9ba3790afd319538a86feb619037cbf533e2f5939cb56b35bb17f56c858 idna-3.4-py3-none-any.whl
|
||||
@@ -24,7 +24,7 @@ c06b3295d1d0b0f0a6f9a6cd0be861b9b643b4a5ea37857f0bd41c45deaf27bb927b71922dab74e6
|
||||
ba91ab0518c61eff13e5612d9e6b532940813f6b56e6ed81ea6c7c4d45acee4d98136a383a25067512b8f75538c67c987cf3944bfa0229e3cb677e2fb81e763e zipp-3.10.0-py3-none-any.whl
|
||||
# win10
|
||||
00558cca2e0ac813d404252f6e5aeacb50546822ecb5d0570228b8ddd29d94e059fbeb6b90393dee5abcddaca1370aca784dc9b095cbb74e980b3c024767fb24 Jinja2-3.1.2-py3-none-any.whl
|
||||
b1db6f5a79fc15391547643e5973cf5946c0acfa6febb68bc90fc3f66369681100cc100f32dd04256dcefa510e7864c718515a436a4af3a10fe205c413c7e693 MarkupSafe-2.1.2-cp311-cp311-win_amd64.whl
|
||||
7f8f4daa4f4f2dbf24cdd534b2952ee3fba6334eb42b37465ccda3aa1cccc3d6204aa6bfffb8a83bf42ec59c702b5b5247d4c8ee0d4df906334ae53072ef8c4c MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl
|
||||
4a20aeb52d4fde6aabcba05ee261595eeb5482c72ee27332690f34dd6e7a49c0b3ba3813202ac15c9d21e29f1cd803f2e79ccc1c45ec314fcd0a937016bcbc56 mutagen-1.46.0-py3-none-any.whl
|
||||
ea152624499966615ee74f2aefed27da528785e1215f46d61e79c5290bb8105fd98e9948938efbca9cd19e2f1dd48c9e712b4f30a4148a0ed5d1ff2dff77106e Pillow-9.4.0-cp311-cp311-win_amd64.whl
|
||||
2b04b196f1115f42375e623a35edeb71565dfd090416b22510ec0270fefe86f7d397a98aabbe9ebfe3f6a355fe25c487a4875d4252027d0a61ccb64cacd7631d python-3.11.2-amd64.exe
|
||||
78414808cb9a5fa74e7b23360b8f46147952530e3cc78a3ad4b80be3e26598080537ac691a1be1f35b7428a22c1f65a6adf45986da2752fbe9d9819d77a58bf8 Pillow-9.5.0-cp311-cp311-win_amd64.whl
|
||||
a48ee8992eee60a0d620dced71b9f96596f5dd510e3024015aca55884cdb3f9e2405734bfc13f3f40b79106a77bc442cce02ac4c8f5d16207448052b368fd52a python-3.11.4-amd64.exe
|
||||
|
||||
@@ -13,7 +13,7 @@ https://pypi.org/project/MarkupSafe/#files
|
||||
https://pypi.org/project/mutagen/#files
|
||||
https://pypi.org/project/Pillow/#files
|
||||
|
||||
# up2k (win7) additionals
|
||||
# u2c (win7) additionals
|
||||
https://pypi.org/project/certifi/#files
|
||||
https://pypi.org/project/charset-normalizer/#files # cp37-cp37m-win32.whl
|
||||
https://pypi.org/project/idna/#files
|
||||
|
||||
@@ -17,15 +17,15 @@ uname -s | grep NT-10 && w10=1 || {
|
||||
fns=(
|
||||
altgraph-0.17.3-py2.py3-none-any.whl
|
||||
pefile-2023.2.7-py3-none-any.whl
|
||||
pyinstaller-5.8.0-py3-none-win_amd64.whl
|
||||
pyinstaller_hooks_contrib-2023.0-py2.py3-none-any.whl
|
||||
pyinstaller-5.10.1-py3-none-win_amd64.whl
|
||||
pyinstaller_hooks_contrib-2023.2-py2.py3-none-any.whl
|
||||
pywin32_ctypes-0.2.0-py2.py3-none-any.whl
|
||||
upx-4.0.2-win32.zip
|
||||
)
|
||||
[ $w10 ] && fns+=(
|
||||
mutagen-1.46.0-py3-none-any.whl
|
||||
Pillow-9.4.0-cp311-cp311-win_amd64.whl
|
||||
python-3.11.2-amd64.exe
|
||||
python-3.11.3-amd64.exe
|
||||
}
|
||||
[ $w7 ] && fns+=(
|
||||
certifi-2022.12.7-py3-none-any.whl
|
||||
@@ -43,12 +43,12 @@ fns=(
|
||||
)
|
||||
[ $w7x64 ] && fns+=(
|
||||
windows6.1-kb2533623-x64.msu
|
||||
pyinstaller-5.8.0-py3-none-win_amd64.whl
|
||||
pyinstaller-5.10.1-py3-none-win_amd64.whl
|
||||
python-3.7.9-amd64.exe
|
||||
)
|
||||
[ $w7x32 ] && fns+=(
|
||||
windows6.1-kb2533623-x86.msu
|
||||
pyinstaller-5.8.0-py3-none-win32.whl
|
||||
pyinstaller-5.10.1-py3-none-win32.whl
|
||||
python-3.7.9.exe
|
||||
)
|
||||
dl() { curl -fkLOC- "$1" && return 0; echo "$1"; return 1; }
|
||||
|
||||
@@ -18,9 +18,9 @@ VSVersionInfo(
|
||||
[StringStruct('CompanyName', 'ocv.me'),
|
||||
StringStruct('FileDescription', 'copyparty uploader / filesearch command'),
|
||||
StringStruct('FileVersion', '1.2.3'),
|
||||
StringStruct('InternalName', 'up2k'),
|
||||
StringStruct('InternalName', 'u2c'),
|
||||
StringStruct('LegalCopyright', '2019, ed'),
|
||||
StringStruct('OriginalFilename', 'up2k.exe'),
|
||||
StringStruct('OriginalFilename', 'u2c.exe'),
|
||||
StringStruct('ProductName', 'copyparty up2k client'),
|
||||
StringStruct('ProductVersion', '1.2.3')])
|
||||
]),
|
||||
|
||||
@@ -14,7 +14,7 @@ uname -s | grep -E 'WOW64|NT-10' && echo need win7-32 && exit 1
|
||||
dl() { curl -fkLO "$1"; }
|
||||
cd ~/Downloads
|
||||
|
||||
dl https://192.168.123.1:3923/cpp/bin/up2k.py
|
||||
dl https://192.168.123.1:3923/cpp/bin/u2c.py
|
||||
dl https://192.168.123.1:3923/cpp/scripts/pyinstaller/up2k.ico
|
||||
dl https://192.168.123.1:3923/cpp/scripts/pyinstaller/up2k.rc
|
||||
dl https://192.168.123.1:3923/cpp/scripts/pyinstaller/up2k.spec
|
||||
@@ -37,12 +37,12 @@ grep -E '^from .ssl_ import' $APPDATA/python/python37/site-packages/urllib3/util
|
||||
echo golfed
|
||||
}
|
||||
|
||||
read a b _ < <(awk -F\" '/^S_VERSION =/{$0=$2;sub(/\./," ");print}' < up2k.py)
|
||||
read a b _ < <(awk -F\" '/^S_VERSION =/{$0=$2;sub(/\./," ");print}' < u2c.py)
|
||||
sed -r 's/1,2,3,0/'$a,$b,0,0'/;s/1\.2\.3/'$a.$b.0/ <up2k.rc >up2k.rc2
|
||||
|
||||
#python uncomment.py up2k.py
|
||||
#python uncomment.py u2c.py
|
||||
$APPDATA/python/python37/scripts/pyinstaller -y --clean --upx-dir=. up2k.spec
|
||||
|
||||
./dist/up2k.exe --version
|
||||
./dist/u2c.exe --version
|
||||
|
||||
curl -fkT dist/up2k.exe -HPW:wark https://192.168.123.1:3923/
|
||||
curl -fkT dist/u2c.exe -HPW:wark https://192.168.123.1:3923/
|
||||
|
||||
@@ -5,7 +5,7 @@ block_cipher = None
|
||||
|
||||
|
||||
a = Analysis(
|
||||
['up2k.py'],
|
||||
['u2c.py'],
|
||||
pathex=[],
|
||||
binaries=[],
|
||||
datas=[],
|
||||
@@ -49,7 +49,7 @@ a = Analysis(
|
||||
|
||||
# this is the only change to the autogenerated specfile:
|
||||
xdll = ["libcrypto-1_1.dll"]
|
||||
a.binaries = TOC([x for x in a.binaries if x[0] not in xdll])
|
||||
a.binaries = [x for x in a.binaries if x[0] not in xdll]
|
||||
|
||||
pyz = PYZ(a.pure, a.zipped_data, cipher=block_cipher)
|
||||
|
||||
@@ -60,7 +60,7 @@ exe = EXE(
|
||||
a.zipfiles,
|
||||
a.datas,
|
||||
[],
|
||||
name='up2k',
|
||||
name='u2c',
|
||||
debug=False,
|
||||
bootloader_ignore_signals=False,
|
||||
strip=False,
|
||||
|
||||
@@ -11,4 +11,4 @@ ex=(
|
||||
encodings.{zlib_codec,base64_codec,bz2_codec,charmap,hex_codec,palmos,punycode,rot_13}
|
||||
);
|
||||
cex=(); for a in "${ex[@]}"; do cex+=(--exclude "$a"); done
|
||||
$APPDATA/python/python37/scripts/pyi-makespec --version-file up2k.rc2 -i up2k.ico -n up2k -c -F up2k.py "${cex[@]}"
|
||||
$APPDATA/python/python37/scripts/pyi-makespec --version-file up2k.rc2 -i up2k.ico -n u2c -c -F u2c.py "${cex[@]}"
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user