Compare commits
76 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b69aace8d8 | ||
|
|
79097bb43c | ||
|
|
806fac1742 | ||
|
|
4f97d7cf8d | ||
|
|
42acc457af | ||
|
|
c02920607f | ||
|
|
452885c271 | ||
|
|
5c242a07b6 | ||
|
|
088899d59f | ||
|
|
1faff2a37e | ||
|
|
23c8d3d045 | ||
|
|
a033388d2b | ||
|
|
82fe45ac56 | ||
|
|
bcb7fcda6b | ||
|
|
726a98100b | ||
|
|
2f021a0c2b | ||
|
|
eb05cb6c6e | ||
|
|
7530af95da | ||
|
|
8399e95bda | ||
|
|
3b4dfe326f | ||
|
|
2e787a254e | ||
|
|
f888bed1a6 | ||
|
|
d865e9f35a | ||
|
|
fc7fe70f66 | ||
|
|
5aff39d2b2 | ||
|
|
d1be37a04a | ||
|
|
b0fd8bf7d4 | ||
|
|
b9cf8f3973 | ||
|
|
4588f11613 | ||
|
|
1a618c3c97 | ||
|
|
d500a51d97 | ||
|
|
734e9d3874 | ||
|
|
bd5cfc2f1b | ||
|
|
89f88ee78c | ||
|
|
b2ae14695a | ||
|
|
19d86b44d9 | ||
|
|
85be62e38b | ||
|
|
80f3d90200 | ||
|
|
0249fa6e75 | ||
|
|
2d0696e048 | ||
|
|
ff32ec515e | ||
|
|
a6935b0293 | ||
|
|
63eb08ba9f | ||
|
|
e5b67d2b3a | ||
|
|
9e10af6885 | ||
|
|
42bc9115d2 | ||
|
|
0a569ce413 | ||
|
|
9a16639a61 | ||
|
|
57953c68c6 | ||
|
|
088d08963f | ||
|
|
7bc8196821 | ||
|
|
7715299dd3 | ||
|
|
b8ac9b7994 | ||
|
|
98e7d8f728 | ||
|
|
e7fd871ffe | ||
|
|
14aab62f32 | ||
|
|
cb81fe962c | ||
|
|
fc970d2dea | ||
|
|
b0e203d1f9 | ||
|
|
37cef05b19 | ||
|
|
5886a42901 | ||
|
|
2fd99f807d | ||
|
|
3d4cbd7d10 | ||
|
|
f10d03c238 | ||
|
|
f9a66ffb0e | ||
|
|
777a50063d | ||
|
|
0bb9154747 | ||
|
|
30c3f45072 | ||
|
|
0d5ca67f32 | ||
|
|
4a8bf6aebd | ||
|
|
b11db090d8 | ||
|
|
189391fccd | ||
|
|
86d4c43909 | ||
|
|
5994f40982 | ||
|
|
076d32dee5 | ||
|
|
16c8e38ecd |
4
.gitignore
vendored
4
.gitignore
vendored
@@ -20,3 +20,7 @@ sfx/
|
|||||||
# derived
|
# derived
|
||||||
copyparty/web/deps/
|
copyparty/web/deps/
|
||||||
srv/
|
srv/
|
||||||
|
|
||||||
|
# state/logs
|
||||||
|
up.*.txt
|
||||||
|
.hist/
|
||||||
63
README.md
63
README.md
@@ -19,7 +19,7 @@ turn your phone or raspi into a portable file server with resumable uploads/down
|
|||||||
## readme toc
|
## readme toc
|
||||||
|
|
||||||
* top
|
* top
|
||||||
* **[quickstart](#quickstart)** - download **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py)** and you're all set!
|
* [quickstart](#quickstart) - download **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py)** and you're all set!
|
||||||
* [on servers](#on-servers) - you may also want these, especially on servers
|
* [on servers](#on-servers) - you may also want these, especially on servers
|
||||||
* [on debian](#on-debian) - recommended additional steps on debian
|
* [on debian](#on-debian) - recommended additional steps on debian
|
||||||
* [notes](#notes) - general notes
|
* [notes](#notes) - general notes
|
||||||
@@ -53,6 +53,7 @@ turn your phone or raspi into a portable file server with resumable uploads/down
|
|||||||
* [database location](#database-location) - in-volume (`.hist/up2k.db`, default) or somewhere else
|
* [database location](#database-location) - in-volume (`.hist/up2k.db`, default) or somewhere else
|
||||||
* [metadata from audio files](#metadata-from-audio-files) - set `-e2t` to index tags on upload
|
* [metadata from audio files](#metadata-from-audio-files) - set `-e2t` to index tags on upload
|
||||||
* [file parser plugins](#file-parser-plugins) - provide custom parsers to index additional tags
|
* [file parser plugins](#file-parser-plugins) - provide custom parsers to index additional tags
|
||||||
|
* [upload events](#upload-events) - trigger a script/program on each upload
|
||||||
* [complete examples](#complete-examples)
|
* [complete examples](#complete-examples)
|
||||||
* [browser support](#browser-support) - TLDR: yes
|
* [browser support](#browser-support) - TLDR: yes
|
||||||
* [client examples](#client-examples) - interact with copyparty using non-browser clients
|
* [client examples](#client-examples) - interact with copyparty using non-browser clients
|
||||||
@@ -61,6 +62,9 @@ turn your phone or raspi into a portable file server with resumable uploads/down
|
|||||||
* [performance](#performance) - defaults are usually fine - expect `8 GiB/s` download, `1 GiB/s` upload
|
* [performance](#performance) - defaults are usually fine - expect `8 GiB/s` download, `1 GiB/s` upload
|
||||||
* [security](#security) - some notes on hardening
|
* [security](#security) - some notes on hardening
|
||||||
* [gotchas](#gotchas) - behavior that might be unexpected
|
* [gotchas](#gotchas) - behavior that might be unexpected
|
||||||
|
* [recovering from crashes](#recovering-from-crashes)
|
||||||
|
* [client crashes](#client-crashes)
|
||||||
|
* [frefox wsod](#frefox-wsod) - firefox 87 can crash during uploads
|
||||||
* [dependencies](#dependencies) - mandatory deps
|
* [dependencies](#dependencies) - mandatory deps
|
||||||
* [optional dependencies](#optional-dependencies) - install these to enable bonus features
|
* [optional dependencies](#optional-dependencies) - install these to enable bonus features
|
||||||
* [install recommended deps](#install-recommended-deps)
|
* [install recommended deps](#install-recommended-deps)
|
||||||
@@ -433,7 +437,7 @@ and then theres the tabs below it,
|
|||||||
* plus up to 3 entries each from `[done]` and `[que]` for context
|
* plus up to 3 entries each from `[done]` and `[que]` for context
|
||||||
* `[que]` is all the files that are still queued
|
* `[que]` is all the files that are still queued
|
||||||
|
|
||||||
note that since up2k has to read each file twice, `[🎈 bup]` can *theoretically* be up to 2x faster in some extreme cases (files bigger than your ram, combined with an internet connection faster than the read-speed of your HDD)
|
note that since up2k has to read each file twice, `[🎈 bup]` can *theoretically* be up to 2x faster in some extreme cases (files bigger than your ram, combined with an internet connection faster than the read-speed of your HDD, or if you're uploading from a cuo2duo)
|
||||||
|
|
||||||
if you are resuming a massive upload and want to skip hashing the files which already finished, you can enable `turbo` in the `[⚙️] config` tab, but please read the tooltip on that button
|
if you are resuming a massive upload and want to skip hashing the files which already finished, you can enable `turbo` in the `[⚙️] config` tab, but please read the tooltip on that button
|
||||||
|
|
||||||
@@ -592,12 +596,14 @@ note:
|
|||||||
* `e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and `e2ts` would then reindex those, unless there is a new copyparty version with new parsers and the release note says otherwise
|
* `e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and `e2ts` would then reindex those, unless there is a new copyparty version with new parsers and the release note says otherwise
|
||||||
* the rescan button in the admin panel has no effect unless the volume has `-e2ds` or higher
|
* the rescan button in the admin panel has no effect unless the volume has `-e2ds` or higher
|
||||||
|
|
||||||
to save some time, you can choose to only index filename/path/size/last-modified (and not the hash of the file contents) by setting `--no-hash` or the volume-flag `:c,dhash`, this has the following consequences:
|
to save some time, you can provide a regex pattern for filepaths to only index by filename/path/size/last-modified (and not the hash of the file contents) by setting `--no-hash \.iso$` or the volume-flag `:c,nohash=\.iso$`, this has the following consequences:
|
||||||
* initial indexing is way faster, especially when the volume is on a network disk
|
* initial indexing is way faster, especially when the volume is on a network disk
|
||||||
* makes it impossible to [file-search](#file-search)
|
* makes it impossible to [file-search](#file-search)
|
||||||
* if someone uploads the same file contents, the upload will not be detected as a dupe, so it will not get symlinked or rejected
|
* if someone uploads the same file contents, the upload will not be detected as a dupe, so it will not get symlinked or rejected
|
||||||
|
|
||||||
if you set `--no-hash`, you can enable hashing for specific volumes using flag `:c,ehash`
|
similarly, you can fully ignore files/folders using `--no-idx [...]` and `:c,noidx=\.iso$`
|
||||||
|
|
||||||
|
if you set `--no-hash [...]` globally, you can enable hashing for specific volumes using flag `:c,nohash=`
|
||||||
|
|
||||||
|
|
||||||
## upload rules
|
## upload rules
|
||||||
@@ -696,6 +702,25 @@ copyparty can invoke external programs to collect additional metadata for files
|
|||||||
* `-mtp arch,built,ver,orig=an,eexe,edll,~/bin/exe.py` runs `~/bin/exe.py` to get properties about windows-binaries only if file is not audio (`an`) and file extension is exe or dll
|
* `-mtp arch,built,ver,orig=an,eexe,edll,~/bin/exe.py` runs `~/bin/exe.py` to get properties about windows-binaries only if file is not audio (`an`) and file extension is exe or dll
|
||||||
|
|
||||||
|
|
||||||
|
## upload events
|
||||||
|
|
||||||
|
trigger a script/program on each upload like so:
|
||||||
|
|
||||||
|
```
|
||||||
|
-v /mnt/inc:inc:w:c,mte=+a1:c,mtp=a1=ad,/usr/bin/notify-send
|
||||||
|
```
|
||||||
|
|
||||||
|
so filesystem location `/mnt/inc` shared at `/inc`, write-only for everyone, appending `a1` to the list of tags to index, and using `/usr/bin/notify-send` to "provide" that tag
|
||||||
|
|
||||||
|
that'll run the command `notify-send` with the path to the uploaded file as the first and only argument (so on linux it'll show a notification on-screen)
|
||||||
|
|
||||||
|
note that it will only trigger on new unique files, not dupes
|
||||||
|
|
||||||
|
and it will occupy the parsing threads, so fork anything expensive, or if you want to intentionally queue/singlethread you can combine it with `--no-mtag-mt`
|
||||||
|
|
||||||
|
if this becomes popular maybe there should be a less janky way to do it actually
|
||||||
|
|
||||||
|
|
||||||
## complete examples
|
## complete examples
|
||||||
|
|
||||||
* read-only music server with bpm and key scanning
|
* read-only music server with bpm and key scanning
|
||||||
@@ -766,6 +791,14 @@ interact with copyparty using non-browser clients
|
|||||||
* `chunk(){ curl -b cppwd=wark -T- http://127.0.0.1:3923/;}`
|
* `chunk(){ curl -b cppwd=wark -T- http://127.0.0.1:3923/;}`
|
||||||
`chunk <movie.mkv`
|
`chunk <movie.mkv`
|
||||||
|
|
||||||
|
* bash: when curl and wget is not available or too boring
|
||||||
|
* `(printf 'PUT /junk?pw=wark HTTP/1.1\r\n\r\n'; cat movie.mkv) | nc 127.0.0.1 3923`
|
||||||
|
* `(printf 'PUT / HTTP/1.1\r\n\r\n'; cat movie.mkv) >/dev/tcp/127.0.0.1/3923`
|
||||||
|
|
||||||
|
* python: [up2k.py](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py) is a command-line up2k client [(webm)](https://ocv.me/stuff/u2cli.webm)
|
||||||
|
* file uploads, file-search, autoresume of aborted/broken uploads
|
||||||
|
* see [./bin/README.md#up2kpy](bin/README.md#up2kpy)
|
||||||
|
|
||||||
* FUSE: mount a copyparty server as a local filesystem
|
* FUSE: mount a copyparty server as a local filesystem
|
||||||
* cross-platform python client available in [./bin/](bin/)
|
* cross-platform python client available in [./bin/](bin/)
|
||||||
* [rclone](https://rclone.org/) as client can give ~5x performance, see [./docs/rclone.md](docs/rclone.md)
|
* [rclone](https://rclone.org/) as client can give ~5x performance, see [./docs/rclone.md](docs/rclone.md)
|
||||||
@@ -820,7 +853,7 @@ below are some tweaks roughly ordered by usefulness:
|
|||||||
* `-q` disables logging and can help a bunch, even when combined with `-lo` to redirect logs to file
|
* `-q` disables logging and can help a bunch, even when combined with `-lo` to redirect logs to file
|
||||||
* `--http-only` or `--https-only` (unless you want to support both protocols) will reduce the delay before a new connection is established
|
* `--http-only` or `--https-only` (unless you want to support both protocols) will reduce the delay before a new connection is established
|
||||||
* `--hist` pointing to a fast location (ssd) will make directory listings and searches faster when `-e2d` or `-e2t` is set
|
* `--hist` pointing to a fast location (ssd) will make directory listings and searches faster when `-e2d` or `-e2t` is set
|
||||||
* `--no-hash` when indexing a network-disk if you don't care about the actual filehashes and only want the names/tags searchable
|
* `--no-hash .` when indexing a network-disk if you don't care about the actual filehashes and only want the names/tags searchable
|
||||||
* `-j` enables multiprocessing (actual multithreading) and can make copyparty perform better in cpu-intensive workloads, for example:
|
* `-j` enables multiprocessing (actual multithreading) and can make copyparty perform better in cpu-intensive workloads, for example:
|
||||||
* huge amount of short-lived connections
|
* huge amount of short-lived connections
|
||||||
* really heavy traffic (downloads/uploads)
|
* really heavy traffic (downloads/uploads)
|
||||||
@@ -851,6 +884,26 @@ behavior that might be unexpected
|
|||||||
* users without read-access to a folder can still see the `.prologue.html` / `.epilogue.html` / `README.md` contents, for the purpose of showing a description on how to use the uploader for example
|
* users without read-access to a folder can still see the `.prologue.html` / `.epilogue.html` / `README.md` contents, for the purpose of showing a description on how to use the uploader for example
|
||||||
|
|
||||||
|
|
||||||
|
# recovering from crashes
|
||||||
|
|
||||||
|
## client crashes
|
||||||
|
|
||||||
|
### frefox wsod
|
||||||
|
|
||||||
|
firefox 87 can crash during uploads -- the entire browser goes, including all other browser tabs, everything turns white
|
||||||
|
|
||||||
|
however you can hit `F12` in the up2k tab and use the devtools to see how far you got in the uploads:
|
||||||
|
|
||||||
|
* get a complete list of all uploads, organized by statuts (ok / no-good / busy / queued):
|
||||||
|
`var tabs = { ok:[], ng:[], bz:[], q:[] }; for (var a of up2k.ui.tab) tabs[a.in].push(a); tabs`
|
||||||
|
|
||||||
|
* list of filenames which failed:
|
||||||
|
`var ng = []; for (var a of up2k.ui.tab) if (a.in != 'ok') ng.push(a.hn.split('<a href=\"').slice(-1)[0].split('\">')[0]); ng`
|
||||||
|
|
||||||
|
* send the list of filenames to copyparty for safekeeping:
|
||||||
|
`await fetch('/inc', {method:'PUT', body:JSON.stringify(ng,null,1)})`
|
||||||
|
|
||||||
|
|
||||||
# dependencies
|
# dependencies
|
||||||
|
|
||||||
mandatory deps:
|
mandatory deps:
|
||||||
|
|||||||
@@ -1,3 +1,11 @@
|
|||||||
|
# [`up2k.py`](up2k.py)
|
||||||
|
* command-line up2k client [(webm)](https://ocv.me/stuff/u2cli.webm)
|
||||||
|
* file uploads, file-search, autoresume of aborted/broken uploads
|
||||||
|
* faster than browsers
|
||||||
|
* early beta, if something breaks just restart it
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# [`copyparty-fuse.py`](copyparty-fuse.py)
|
# [`copyparty-fuse.py`](copyparty-fuse.py)
|
||||||
* mount a copyparty server as a local filesystem (read-only)
|
* mount a copyparty server as a local filesystem (read-only)
|
||||||
* **supports Windows!** -- expect `194 MiB/s` sequential read
|
* **supports Windows!** -- expect `194 MiB/s` sequential read
|
||||||
@@ -47,6 +55,7 @@ you could replace winfsp with [dokan](https://github.com/dokan-dev/dokany/releas
|
|||||||
* copyparty can Popen programs like these during file indexing to collect additional metadata
|
* copyparty can Popen programs like these during file indexing to collect additional metadata
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# [`dbtool.py`](dbtool.py)
|
# [`dbtool.py`](dbtool.py)
|
||||||
upgrade utility which can show db info and help transfer data between databases, for example when a new version of copyparty is incompatible with the old DB and automatically rebuilds the DB from scratch, but you have some really expensive `-mtp` parsers and want to copy over the tags from the old db
|
upgrade utility which can show db info and help transfer data between databases, for example when a new version of copyparty is incompatible with the old DB and automatically rebuilds the DB from scratch, but you have some really expensive `-mtp` parsers and want to copy over the tags from the old db
|
||||||
|
|
||||||
@@ -63,6 +72,7 @@ cd /mnt/nas/music/.hist
|
|||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# [`prisonparty.sh`](prisonparty.sh)
|
# [`prisonparty.sh`](prisonparty.sh)
|
||||||
* run copyparty in a chroot, preventing any accidental file access
|
* run copyparty in a chroot, preventing any accidental file access
|
||||||
* creates bindmounts for /bin, /lib, and so on, see `sysdirs=`
|
* creates bindmounts for /bin, /lib, and so on, see `sysdirs=`
|
||||||
|
|||||||
@@ -71,7 +71,7 @@ except:
|
|||||||
elif MACOS:
|
elif MACOS:
|
||||||
libfuse = "install https://osxfuse.github.io/"
|
libfuse = "install https://osxfuse.github.io/"
|
||||||
else:
|
else:
|
||||||
libfuse = "apt install libfuse\n modprobe fuse"
|
libfuse = "apt install libfuse3-3\n modprobe fuse"
|
||||||
|
|
||||||
print(
|
print(
|
||||||
"\n could not import fuse; these may help:"
|
"\n could not import fuse; these may help:"
|
||||||
@@ -393,15 +393,16 @@ class Gateway(object):
|
|||||||
|
|
||||||
rsp = json.loads(rsp.decode("utf-8"))
|
rsp = json.loads(rsp.decode("utf-8"))
|
||||||
ret = []
|
ret = []
|
||||||
for is_dir, nodes in [[True, rsp["dirs"]], [False, rsp["files"]]]:
|
for statfun, nodes in [
|
||||||
|
[self.stat_dir, rsp["dirs"]],
|
||||||
|
[self.stat_file, rsp["files"]],
|
||||||
|
]:
|
||||||
for n in nodes:
|
for n in nodes:
|
||||||
fname = unquote(n["href"]).rstrip(b"/")
|
fname = unquote(n["href"].split("?")[0]).rstrip(b"/").decode("wtf-8")
|
||||||
fname = fname.decode("wtf-8")
|
|
||||||
if bad_good:
|
if bad_good:
|
||||||
fname = enwin(fname)
|
fname = enwin(fname)
|
||||||
|
|
||||||
fun = self.stat_dir if is_dir else self.stat_file
|
ret.append([fname, statfun(n["ts"], n["sz"]), 0])
|
||||||
ret.append([fname, fun(n["ts"], n["sz"]), 0])
|
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ some of these rely on libraries which are not MIT-compatible
|
|||||||
|
|
||||||
these do not have any problematic dependencies:
|
these do not have any problematic dependencies:
|
||||||
|
|
||||||
|
* [cksum.py](./cksum.py) computes various checksums
|
||||||
* [exe.py](./exe.py) grabs metadata from .exe and .dll files (example for retrieving multiple tags with one parser)
|
* [exe.py](./exe.py) grabs metadata from .exe and .dll files (example for retrieving multiple tags with one parser)
|
||||||
* [wget.py](./wget.py) lets you download files by POSTing URLs to copyparty
|
* [wget.py](./wget.py) lets you download files by POSTing URLs to copyparty
|
||||||
|
|
||||||
|
|||||||
89
bin/mtag/cksum.py
Executable file
89
bin/mtag/cksum.py
Executable file
@@ -0,0 +1,89 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import json
|
||||||
|
import zlib
|
||||||
|
import struct
|
||||||
|
import base64
|
||||||
|
import hashlib
|
||||||
|
|
||||||
|
try:
|
||||||
|
from copyparty.util import fsenc
|
||||||
|
except:
|
||||||
|
|
||||||
|
def fsenc(p):
|
||||||
|
return p
|
||||||
|
|
||||||
|
|
||||||
|
"""
|
||||||
|
calculates various checksums for uploads,
|
||||||
|
usage: -mtp crc32,md5,sha1,sha256b=bin/mtag/cksum.py
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
config = "crc32 md5 md5b sha1 sha1b sha256 sha256b sha512/240 sha512b/240"
|
||||||
|
# b suffix = base64 encoded
|
||||||
|
# slash = truncate to n bits
|
||||||
|
|
||||||
|
known = {
|
||||||
|
"md5": hashlib.md5,
|
||||||
|
"sha1": hashlib.sha1,
|
||||||
|
"sha256": hashlib.sha256,
|
||||||
|
"sha512": hashlib.sha512,
|
||||||
|
}
|
||||||
|
config = config.split()
|
||||||
|
hashers = {
|
||||||
|
k: v()
|
||||||
|
for k, v in known.items()
|
||||||
|
if k in [x.split("/")[0].rstrip("b") for x in known]
|
||||||
|
}
|
||||||
|
crc32 = 0 if "crc32" in config else None
|
||||||
|
|
||||||
|
with open(fsenc(sys.argv[1]), "rb", 512 * 1024) as f:
|
||||||
|
while True:
|
||||||
|
buf = f.read(64 * 1024)
|
||||||
|
if not buf:
|
||||||
|
break
|
||||||
|
|
||||||
|
for x in hashers.values():
|
||||||
|
x.update(buf)
|
||||||
|
|
||||||
|
if crc32 is not None:
|
||||||
|
crc32 = zlib.crc32(buf, crc32)
|
||||||
|
|
||||||
|
ret = {}
|
||||||
|
for s in config:
|
||||||
|
alg = s.split("/")[0]
|
||||||
|
b64 = alg.endswith("b")
|
||||||
|
alg = alg.rstrip("b")
|
||||||
|
if alg in hashers:
|
||||||
|
v = hashers[alg].digest()
|
||||||
|
elif alg == "crc32":
|
||||||
|
v = crc32
|
||||||
|
if v < 0:
|
||||||
|
v &= 2 ** 32 - 1
|
||||||
|
v = struct.pack(">L", v)
|
||||||
|
else:
|
||||||
|
raise Exception("what is {}".format(s))
|
||||||
|
|
||||||
|
if "/" in s:
|
||||||
|
v = v[: int(int(s.split("/")[1]) / 8)]
|
||||||
|
|
||||||
|
if b64:
|
||||||
|
v = base64.b64encode(v).decode("ascii").rstrip("=")
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
v = v.hex()
|
||||||
|
except:
|
||||||
|
import binascii
|
||||||
|
|
||||||
|
v = binascii.hexlify(v)
|
||||||
|
|
||||||
|
ret[s] = v
|
||||||
|
|
||||||
|
print(json.dumps(ret, indent=4))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
798
bin/up2k.py
Executable file
798
bin/up2k.py
Executable file
@@ -0,0 +1,798 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
|
"""
|
||||||
|
up2k.py: upload to copyparty
|
||||||
|
2021-10-12, v0.9, ed <irc.rizon.net>, MIT-Licensed
|
||||||
|
https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py
|
||||||
|
|
||||||
|
- dependencies: requests
|
||||||
|
- supports python 2.6, 2.7, and 3.3 through 3.10
|
||||||
|
|
||||||
|
- almost zero error-handling
|
||||||
|
- but if something breaks just try again and it'll autoresume
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import stat
|
||||||
|
import math
|
||||||
|
import time
|
||||||
|
import atexit
|
||||||
|
import signal
|
||||||
|
import base64
|
||||||
|
import hashlib
|
||||||
|
import argparse
|
||||||
|
import platform
|
||||||
|
import threading
|
||||||
|
import requests
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
|
||||||
|
# from copyparty/__init__.py
|
||||||
|
PY2 = sys.version_info[0] == 2
|
||||||
|
if PY2:
|
||||||
|
from Queue import Queue
|
||||||
|
from urllib import unquote
|
||||||
|
from urllib import quote
|
||||||
|
|
||||||
|
sys.dont_write_bytecode = True
|
||||||
|
bytes = str
|
||||||
|
else:
|
||||||
|
from queue import Queue
|
||||||
|
from urllib.parse import unquote_to_bytes as unquote
|
||||||
|
from urllib.parse import quote_from_bytes as quote
|
||||||
|
|
||||||
|
unicode = str
|
||||||
|
|
||||||
|
VT100 = platform.system() != "Windows"
|
||||||
|
|
||||||
|
|
||||||
|
req_ses = requests.Session()
|
||||||
|
|
||||||
|
|
||||||
|
class File(object):
|
||||||
|
"""an up2k upload task; represents a single file"""
|
||||||
|
|
||||||
|
def __init__(self, top, rel, size, lmod):
|
||||||
|
self.top = top # type: bytes
|
||||||
|
self.rel = rel.replace(b"\\", b"/") # type: bytes
|
||||||
|
self.size = size # type: int
|
||||||
|
self.lmod = lmod # type: float
|
||||||
|
|
||||||
|
self.abs = os.path.join(top, rel) # type: bytes
|
||||||
|
self.name = self.rel.split(b"/")[-1].decode("utf-8", "replace") # type: str
|
||||||
|
|
||||||
|
# set by get_hashlist
|
||||||
|
self.cids = [] # type: list[tuple[str, int, int]] # [ hash, ofs, sz ]
|
||||||
|
self.kchunks = {} # type: dict[str, tuple[int, int]] # hash: [ ofs, sz ]
|
||||||
|
|
||||||
|
# set by handshake
|
||||||
|
self.ucids = [] # type: list[str] # chunks which need to be uploaded
|
||||||
|
self.wark = None # type: str
|
||||||
|
self.url = None # type: str
|
||||||
|
|
||||||
|
# set by upload
|
||||||
|
self.up_b = 0 # type: int
|
||||||
|
self.up_c = 0 # type: int
|
||||||
|
|
||||||
|
# m = "size({}) lmod({}) top({}) rel({}) abs({}) name({})\n"
|
||||||
|
# eprint(m.format(self.size, self.lmod, self.top, self.rel, self.abs, self.name))
|
||||||
|
|
||||||
|
|
||||||
|
class FileSlice(object):
|
||||||
|
"""file-like object providing a fixed window into a file"""
|
||||||
|
|
||||||
|
def __init__(self, file, cid):
|
||||||
|
# type: (File, str) -> FileSlice
|
||||||
|
|
||||||
|
self.car, self.len = file.kchunks[cid]
|
||||||
|
self.cdr = self.car + self.len
|
||||||
|
self.ofs = 0 # type: int
|
||||||
|
self.f = open(file.abs, "rb", 512 * 1024)
|
||||||
|
self.f.seek(self.car)
|
||||||
|
|
||||||
|
# https://stackoverflow.com/questions/4359495/what-is-exactly-a-file-like-object-in-python
|
||||||
|
# IOBase, RawIOBase, BufferedIOBase
|
||||||
|
funs = "close closed __enter__ __exit__ __iter__ isatty __next__ readable seekable writable"
|
||||||
|
try:
|
||||||
|
for fun in funs.split():
|
||||||
|
setattr(self, fun, getattr(self.f, fun))
|
||||||
|
except:
|
||||||
|
pass # py27 probably
|
||||||
|
|
||||||
|
def tell(self):
|
||||||
|
return self.ofs
|
||||||
|
|
||||||
|
def seek(self, ofs, wh=0):
|
||||||
|
if wh == 1:
|
||||||
|
ofs = self.ofs + ofs
|
||||||
|
elif wh == 2:
|
||||||
|
ofs = self.len + ofs # provided ofs is negative
|
||||||
|
|
||||||
|
if ofs < 0:
|
||||||
|
ofs = 0
|
||||||
|
elif ofs >= self.len:
|
||||||
|
ofs = self.len - 1
|
||||||
|
|
||||||
|
self.ofs = ofs
|
||||||
|
self.f.seek(self.car + ofs)
|
||||||
|
|
||||||
|
def read(self, sz):
|
||||||
|
sz = min(sz, self.len - self.ofs)
|
||||||
|
ret = self.f.read(sz)
|
||||||
|
self.ofs += len(ret)
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
_print = print
|
||||||
|
|
||||||
|
|
||||||
|
def eprint(*a, **ka):
|
||||||
|
ka["file"] = sys.stderr
|
||||||
|
ka["end"] = ""
|
||||||
|
if not PY2:
|
||||||
|
ka["flush"] = True
|
||||||
|
|
||||||
|
_print(*a, **ka)
|
||||||
|
if PY2 or not VT100:
|
||||||
|
sys.stderr.flush()
|
||||||
|
|
||||||
|
|
||||||
|
def flushing_print(*a, **ka):
|
||||||
|
_print(*a, **ka)
|
||||||
|
if "flush" not in ka:
|
||||||
|
sys.stdout.flush()
|
||||||
|
|
||||||
|
|
||||||
|
if not VT100:
|
||||||
|
print = flushing_print
|
||||||
|
|
||||||
|
|
||||||
|
def termsize():
|
||||||
|
import os
|
||||||
|
|
||||||
|
env = os.environ
|
||||||
|
|
||||||
|
def ioctl_GWINSZ(fd):
|
||||||
|
try:
|
||||||
|
import fcntl, termios, struct, os
|
||||||
|
|
||||||
|
cr = struct.unpack("hh", fcntl.ioctl(fd, termios.TIOCGWINSZ, "1234"))
|
||||||
|
except:
|
||||||
|
return
|
||||||
|
return cr
|
||||||
|
|
||||||
|
cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
|
||||||
|
if not cr:
|
||||||
|
try:
|
||||||
|
fd = os.open(os.ctermid(), os.O_RDONLY)
|
||||||
|
cr = ioctl_GWINSZ(fd)
|
||||||
|
os.close(fd)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
if not cr:
|
||||||
|
try:
|
||||||
|
cr = (env["LINES"], env["COLUMNS"])
|
||||||
|
except:
|
||||||
|
cr = (25, 80)
|
||||||
|
return int(cr[1]), int(cr[0])
|
||||||
|
|
||||||
|
|
||||||
|
class CTermsize(object):
|
||||||
|
def __init__(self):
|
||||||
|
self.ev = False
|
||||||
|
self.margin = None
|
||||||
|
self.g = None
|
||||||
|
self.w, self.h = termsize()
|
||||||
|
|
||||||
|
try:
|
||||||
|
signal.signal(signal.SIGWINCH, self.ev_sig)
|
||||||
|
except:
|
||||||
|
return
|
||||||
|
|
||||||
|
thr = threading.Thread(target=self.worker)
|
||||||
|
thr.daemon = True
|
||||||
|
thr.start()
|
||||||
|
|
||||||
|
def worker(self):
|
||||||
|
while True:
|
||||||
|
time.sleep(0.5)
|
||||||
|
if not self.ev:
|
||||||
|
continue
|
||||||
|
|
||||||
|
self.ev = False
|
||||||
|
self.w, self.h = termsize()
|
||||||
|
|
||||||
|
if self.margin is not None:
|
||||||
|
self.scroll_region(self.margin)
|
||||||
|
|
||||||
|
def ev_sig(self, *a, **ka):
|
||||||
|
self.ev = True
|
||||||
|
|
||||||
|
def scroll_region(self, margin):
|
||||||
|
self.margin = margin
|
||||||
|
if margin is None:
|
||||||
|
self.g = None
|
||||||
|
eprint("\033[s\033[r\033[u")
|
||||||
|
else:
|
||||||
|
self.g = 1 + self.h - margin
|
||||||
|
m = "{0}\033[{1}A".format("\n" * margin, margin)
|
||||||
|
eprint("{0}\033[s\033[1;{1}r\033[u".format(m, self.g - 1))
|
||||||
|
|
||||||
|
|
||||||
|
ss = CTermsize()
|
||||||
|
|
||||||
|
|
||||||
|
def statdir(top):
|
||||||
|
"""non-recursive listing of directory contents, along with stat() info"""
|
||||||
|
if hasattr(os, "scandir"):
|
||||||
|
with os.scandir(top) as dh:
|
||||||
|
for fh in dh:
|
||||||
|
yield [os.path.join(top, fh.name), fh.stat()]
|
||||||
|
else:
|
||||||
|
for name in os.listdir(top):
|
||||||
|
abspath = os.path.join(top, name)
|
||||||
|
yield [abspath, os.stat(abspath)]
|
||||||
|
|
||||||
|
|
||||||
|
def walkdir(top):
|
||||||
|
"""recursive statdir"""
|
||||||
|
for ap, inf in sorted(statdir(top)):
|
||||||
|
if stat.S_ISDIR(inf.st_mode):
|
||||||
|
for x in walkdir(ap):
|
||||||
|
yield x
|
||||||
|
else:
|
||||||
|
yield ap, inf
|
||||||
|
|
||||||
|
|
||||||
|
def walkdirs(tops):
|
||||||
|
"""recursive statdir for a list of tops, yields [top, relpath, stat]"""
|
||||||
|
sep = "{0}".format(os.sep).encode("ascii")
|
||||||
|
for top in tops:
|
||||||
|
stop = top
|
||||||
|
if top[-1:] == sep:
|
||||||
|
stop = os.path.dirname(top.rstrip(sep))
|
||||||
|
|
||||||
|
if os.path.isdir(top):
|
||||||
|
for ap, inf in walkdir(top):
|
||||||
|
yield stop, ap[len(stop) :].lstrip(sep), inf
|
||||||
|
else:
|
||||||
|
d, n = top.rsplit(sep, 1)
|
||||||
|
yield d, n, os.stat(top)
|
||||||
|
|
||||||
|
|
||||||
|
# mostly from copyparty/util.py
|
||||||
|
def quotep(btxt):
|
||||||
|
quot1 = quote(btxt, safe=b"/")
|
||||||
|
if not PY2:
|
||||||
|
quot1 = quot1.encode("ascii")
|
||||||
|
|
||||||
|
return quot1.replace(b" ", b"+")
|
||||||
|
|
||||||
|
|
||||||
|
# from copyparty/util.py
|
||||||
|
def humansize(sz, terse=False):
|
||||||
|
"""picks a sensible unit for the given extent"""
|
||||||
|
for unit in ["B", "KiB", "MiB", "GiB", "TiB"]:
|
||||||
|
if sz < 1024:
|
||||||
|
break
|
||||||
|
|
||||||
|
sz /= 1024.0
|
||||||
|
|
||||||
|
ret = " ".join([str(sz)[:4].rstrip("."), unit])
|
||||||
|
|
||||||
|
if not terse:
|
||||||
|
return ret
|
||||||
|
|
||||||
|
return ret.replace("iB", "").replace(" ", "")
|
||||||
|
|
||||||
|
|
||||||
|
# from copyparty/up2k.py
|
||||||
|
def up2k_chunksize(filesize):
|
||||||
|
"""gives The correct chunksize for up2k hashing"""
|
||||||
|
chunksize = 1024 * 1024
|
||||||
|
stepsize = 512 * 1024
|
||||||
|
while True:
|
||||||
|
for mul in [1, 2]:
|
||||||
|
nchunks = math.ceil(filesize * 1.0 / chunksize)
|
||||||
|
if nchunks <= 256 or chunksize >= 32 * 1024 * 1024:
|
||||||
|
return chunksize
|
||||||
|
|
||||||
|
chunksize += stepsize
|
||||||
|
stepsize *= mul
|
||||||
|
|
||||||
|
|
||||||
|
# mostly from copyparty/up2k.py
|
||||||
|
def get_hashlist(file, pcb):
|
||||||
|
# type: (File, any) -> None
|
||||||
|
"""generates the up2k hashlist from file contents, inserts it into `file`"""
|
||||||
|
|
||||||
|
chunk_sz = up2k_chunksize(file.size)
|
||||||
|
file_rem = file.size
|
||||||
|
file_ofs = 0
|
||||||
|
ret = []
|
||||||
|
with open(file.abs, "rb", 512 * 1024) as f:
|
||||||
|
while file_rem > 0:
|
||||||
|
hashobj = hashlib.sha512()
|
||||||
|
chunk_sz = chunk_rem = min(chunk_sz, file_rem)
|
||||||
|
while chunk_rem > 0:
|
||||||
|
buf = f.read(min(chunk_rem, 64 * 1024))
|
||||||
|
if not buf:
|
||||||
|
raise Exception("EOF at " + str(f.tell()))
|
||||||
|
|
||||||
|
hashobj.update(buf)
|
||||||
|
chunk_rem -= len(buf)
|
||||||
|
|
||||||
|
digest = hashobj.digest()[:33]
|
||||||
|
digest = base64.urlsafe_b64encode(digest).decode("utf-8")
|
||||||
|
|
||||||
|
ret.append([digest, file_ofs, chunk_sz])
|
||||||
|
file_ofs += chunk_sz
|
||||||
|
file_rem -= chunk_sz
|
||||||
|
|
||||||
|
if pcb:
|
||||||
|
pcb(file, file_ofs)
|
||||||
|
|
||||||
|
file.cids = ret
|
||||||
|
file.kchunks = {}
|
||||||
|
for k, v1, v2 in ret:
|
||||||
|
file.kchunks[k] = [v1, v2]
|
||||||
|
|
||||||
|
|
||||||
|
def handshake(req_ses, url, file, pw, search):
|
||||||
|
# type: (requests.Session, str, File, any, bool) -> List[str]
|
||||||
|
"""
|
||||||
|
performs a handshake with the server; reply is:
|
||||||
|
if search, a list of search results
|
||||||
|
otherwise, a list of chunks to upload
|
||||||
|
"""
|
||||||
|
|
||||||
|
req = {
|
||||||
|
"hash": [x[0] for x in file.cids],
|
||||||
|
"name": file.name,
|
||||||
|
"lmod": file.lmod,
|
||||||
|
"size": file.size,
|
||||||
|
}
|
||||||
|
if search:
|
||||||
|
req["srch"] = 1
|
||||||
|
|
||||||
|
headers = {"Content-Type": "text/plain"} # wtf ed
|
||||||
|
if pw:
|
||||||
|
headers["Cookie"] = "=".join(["cppwd", pw])
|
||||||
|
|
||||||
|
if file.url:
|
||||||
|
url = file.url
|
||||||
|
elif b"/" in file.rel:
|
||||||
|
url += quotep(file.rel.rsplit(b"/", 1)[0]).decode("utf-8", "replace")
|
||||||
|
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
r = req_ses.post(url, headers=headers, json=req)
|
||||||
|
break
|
||||||
|
except:
|
||||||
|
eprint("handshake failed, retry...\n")
|
||||||
|
time.sleep(1)
|
||||||
|
|
||||||
|
try:
|
||||||
|
r = r.json()
|
||||||
|
except:
|
||||||
|
raise Exception(r.text)
|
||||||
|
|
||||||
|
if search:
|
||||||
|
return r["hits"]
|
||||||
|
|
||||||
|
try:
|
||||||
|
pre, url = url.split("://")
|
||||||
|
pre += "://"
|
||||||
|
except:
|
||||||
|
pre = ""
|
||||||
|
|
||||||
|
file.url = pre + url.split("/")[0] + r["purl"]
|
||||||
|
file.name = r["name"]
|
||||||
|
file.wark = r["wark"]
|
||||||
|
|
||||||
|
return r["hash"]
|
||||||
|
|
||||||
|
|
||||||
|
def upload(req_ses, file, cid, pw):
|
||||||
|
# type: (requests.Session, File, str, any) -> None
|
||||||
|
"""upload one specific chunk, `cid` (a chunk-hash)"""
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
"X-Up2k-Hash": cid,
|
||||||
|
"X-Up2k-Wark": file.wark,
|
||||||
|
"Content-Type": "application/octet-stream",
|
||||||
|
}
|
||||||
|
if pw:
|
||||||
|
headers["Cookie"] = "=".join(["cppwd", pw])
|
||||||
|
|
||||||
|
f = FileSlice(file, cid)
|
||||||
|
try:
|
||||||
|
r = req_ses.post(file.url, headers=headers, data=f)
|
||||||
|
if not r:
|
||||||
|
raise Exception(repr(r))
|
||||||
|
|
||||||
|
_ = r.content
|
||||||
|
finally:
|
||||||
|
f.f.close()
|
||||||
|
|
||||||
|
|
||||||
|
class Daemon(threading.Thread):
|
||||||
|
def __init__(self, *a, **ka):
|
||||||
|
threading.Thread.__init__(self, *a, **ka)
|
||||||
|
self.daemon = True
|
||||||
|
|
||||||
|
|
||||||
|
class Ctl(object):
|
||||||
|
"""
|
||||||
|
this will be the coordinator which runs everything in parallel
|
||||||
|
(hashing, handshakes, uploads) but right now it's p dumb
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, ar):
|
||||||
|
self.ar = ar
|
||||||
|
ar.files = [
|
||||||
|
os.path.abspath(os.path.realpath(x.encode("utf-8")))
|
||||||
|
+ (x[-1:] if x[-1:] == os.sep else "").encode("utf-8")
|
||||||
|
for x in ar.files
|
||||||
|
]
|
||||||
|
ar.url = ar.url.rstrip("/") + "/"
|
||||||
|
if "://" not in ar.url:
|
||||||
|
ar.url = "http://" + ar.url
|
||||||
|
|
||||||
|
eprint("\nscanning {0} locations\n".format(len(ar.files)))
|
||||||
|
|
||||||
|
nfiles = 0
|
||||||
|
nbytes = 0
|
||||||
|
for _, _, inf in walkdirs(ar.files):
|
||||||
|
nfiles += 1
|
||||||
|
nbytes += inf.st_size
|
||||||
|
|
||||||
|
eprint("found {0} files, {1}\n\n".format(nfiles, humansize(nbytes)))
|
||||||
|
self.nfiles = nfiles
|
||||||
|
self.nbytes = nbytes
|
||||||
|
|
||||||
|
if ar.td:
|
||||||
|
req_ses.verify = False
|
||||||
|
if ar.te:
|
||||||
|
req_ses.verify = ar.te
|
||||||
|
|
||||||
|
self.filegen = walkdirs(ar.files)
|
||||||
|
if ar.safe:
|
||||||
|
self.safe()
|
||||||
|
else:
|
||||||
|
self.fancy()
|
||||||
|
|
||||||
|
def safe(self):
|
||||||
|
"""minimal basic slow boring fallback codepath"""
|
||||||
|
search = self.ar.s
|
||||||
|
for nf, (top, rel, inf) in enumerate(self.filegen):
|
||||||
|
file = File(top, rel, inf.st_size, inf.st_mtime)
|
||||||
|
upath = file.abs.decode("utf-8", "replace")
|
||||||
|
|
||||||
|
print("{0} {1}\n hash...".format(self.nfiles - nf, upath))
|
||||||
|
get_hashlist(file, None)
|
||||||
|
|
||||||
|
burl = self.ar.url[:8] + self.ar.url[8:].split("/")[0] + "/"
|
||||||
|
while True:
|
||||||
|
print(" hs...")
|
||||||
|
hs = handshake(req_ses, self.ar.url, file, self.ar.a, search)
|
||||||
|
if search:
|
||||||
|
if hs:
|
||||||
|
for hit in hs:
|
||||||
|
print(" found: {0}{1}".format(burl, hit["rp"]))
|
||||||
|
else:
|
||||||
|
print(" NOT found")
|
||||||
|
break
|
||||||
|
|
||||||
|
file.ucids = hs
|
||||||
|
if not hs:
|
||||||
|
break
|
||||||
|
|
||||||
|
print("{0} {1}".format(self.nfiles - nf, upath))
|
||||||
|
ncs = len(hs)
|
||||||
|
for nc, cid in enumerate(hs):
|
||||||
|
print(" {0} up {1}".format(ncs - nc, cid))
|
||||||
|
upload(req_ses, file, cid, self.ar.a)
|
||||||
|
|
||||||
|
print(" ok!")
|
||||||
|
|
||||||
|
def fancy(self):
|
||||||
|
self.hash_f = 0
|
||||||
|
self.hash_c = 0
|
||||||
|
self.hash_b = 0
|
||||||
|
self.up_f = 0
|
||||||
|
self.up_c = 0
|
||||||
|
self.up_b = 0
|
||||||
|
self.up_br = 0
|
||||||
|
self.hasher_busy = 1
|
||||||
|
self.handshaker_busy = 0
|
||||||
|
self.uploader_busy = 0
|
||||||
|
|
||||||
|
self.t0 = time.time()
|
||||||
|
self.t0_up = None
|
||||||
|
self.spd = None
|
||||||
|
|
||||||
|
self.mutex = threading.Lock()
|
||||||
|
self.q_handshake = Queue() # type: Queue[File]
|
||||||
|
self.q_recheck = Queue() # type: Queue[File] # partial upload exists [...]
|
||||||
|
self.q_upload = Queue() # type: Queue[tuple[File, str]]
|
||||||
|
|
||||||
|
self.st_hash = [None, "(idle, starting...)"] # type: tuple[File, int]
|
||||||
|
self.st_up = [None, "(idle, starting...)"] # type: tuple[File, int]
|
||||||
|
if VT100:
|
||||||
|
atexit.register(self.cleanup_vt100)
|
||||||
|
ss.scroll_region(3)
|
||||||
|
|
||||||
|
Daemon(target=self.hasher).start()
|
||||||
|
for _ in range(self.ar.j):
|
||||||
|
Daemon(target=self.handshaker).start()
|
||||||
|
Daemon(target=self.uploader).start()
|
||||||
|
|
||||||
|
idles = 0
|
||||||
|
while idles < 3:
|
||||||
|
time.sleep(0.07)
|
||||||
|
with self.mutex:
|
||||||
|
if (
|
||||||
|
self.q_handshake.empty()
|
||||||
|
and self.q_upload.empty()
|
||||||
|
and not self.hasher_busy
|
||||||
|
and not self.handshaker_busy
|
||||||
|
and not self.uploader_busy
|
||||||
|
):
|
||||||
|
idles += 1
|
||||||
|
else:
|
||||||
|
idles = 0
|
||||||
|
|
||||||
|
if VT100:
|
||||||
|
maxlen = ss.w - len(str(self.nfiles)) - 14
|
||||||
|
txt = "\033[s\033[{0}H".format(ss.g)
|
||||||
|
for y, k, st, f in [
|
||||||
|
[0, "hash", self.st_hash, self.hash_f],
|
||||||
|
[1, "send", self.st_up, self.up_f],
|
||||||
|
]:
|
||||||
|
txt += "\033[{0}H{1}:".format(ss.g + y, k)
|
||||||
|
file, arg = st
|
||||||
|
if not file:
|
||||||
|
txt += " {0}\033[K".format(arg)
|
||||||
|
else:
|
||||||
|
if y:
|
||||||
|
p = 100 * file.up_b / file.size
|
||||||
|
else:
|
||||||
|
p = 100 * arg / file.size
|
||||||
|
|
||||||
|
name = file.abs.decode("utf-8", "replace")[-maxlen:]
|
||||||
|
if "/" in name:
|
||||||
|
name = "\033[36m{0}\033[0m/{1}".format(*name.rsplit("/", 1))
|
||||||
|
|
||||||
|
m = "{0:6.1f}% {1} {2}\033[K"
|
||||||
|
txt += m.format(p, self.nfiles - f, name)
|
||||||
|
|
||||||
|
txt += "\033[{0}H ".format(ss.g + 2)
|
||||||
|
else:
|
||||||
|
txt = " "
|
||||||
|
|
||||||
|
if not self.up_br:
|
||||||
|
spd = self.hash_b / (time.time() - self.t0)
|
||||||
|
eta = (self.nbytes - self.hash_b) / (spd + 1)
|
||||||
|
else:
|
||||||
|
spd = self.up_br / (time.time() - self.t0_up)
|
||||||
|
spd = self.spd = (self.spd or spd) * 0.9 + spd * 0.1
|
||||||
|
eta = (self.nbytes - self.up_b) / (spd + 1)
|
||||||
|
|
||||||
|
spd = humansize(spd)
|
||||||
|
eta = str(datetime.timedelta(seconds=int(eta)))
|
||||||
|
left = humansize(self.nbytes - self.up_b)
|
||||||
|
tail = "\033[K\033[u" if VT100 else "\r"
|
||||||
|
|
||||||
|
m = "eta: {0} @ {1}/s, {2} left".format(eta, spd, left)
|
||||||
|
eprint(txt + "\033]0;{0}\033\\\r{1}{2}".format(m, m, tail))
|
||||||
|
|
||||||
|
def cleanup_vt100(self):
|
||||||
|
ss.scroll_region(None)
|
||||||
|
eprint("\033[J\033]0;\033\\")
|
||||||
|
|
||||||
|
def cb_hasher(self, file, ofs):
|
||||||
|
self.st_hash = [file, ofs]
|
||||||
|
|
||||||
|
def hasher(self):
|
||||||
|
prd = None
|
||||||
|
ls = {}
|
||||||
|
for top, rel, inf in self.filegen:
|
||||||
|
if self.ar.z:
|
||||||
|
rd = os.path.dirname(rel)
|
||||||
|
if prd != rd:
|
||||||
|
prd = rd
|
||||||
|
headers = {}
|
||||||
|
if self.ar.a:
|
||||||
|
headers["Cookie"] = "=".join(["cppwd", self.ar.a])
|
||||||
|
|
||||||
|
ls = {}
|
||||||
|
try:
|
||||||
|
print(" ls ~{0}".format(rd.decode("utf-8", "replace")))
|
||||||
|
r = req_ses.get(
|
||||||
|
self.ar.url.encode("utf-8") + quotep(rd) + b"?ls",
|
||||||
|
headers=headers,
|
||||||
|
)
|
||||||
|
for f in r.json()["files"]:
|
||||||
|
rfn = f["href"].split("?")[0].encode("utf-8", "replace")
|
||||||
|
ls[unquote(rfn)] = f
|
||||||
|
except:
|
||||||
|
print(" mkdir ~{0}".format(rd.decode("utf-8", "replace")))
|
||||||
|
|
||||||
|
rf = ls.get(os.path.basename(rel), None)
|
||||||
|
if rf and rf["sz"] == inf.st_size and abs(rf["ts"] - inf.st_mtime) <= 1:
|
||||||
|
self.nfiles -= 1
|
||||||
|
self.nbytes -= inf.st_size
|
||||||
|
continue
|
||||||
|
|
||||||
|
file = File(top, rel, inf.st_size, inf.st_mtime)
|
||||||
|
while True:
|
||||||
|
with self.mutex:
|
||||||
|
if (
|
||||||
|
self.hash_b - self.up_b < 1024 * 1024 * 128
|
||||||
|
and self.hash_c - self.up_c < 64
|
||||||
|
and (
|
||||||
|
not self.ar.nh
|
||||||
|
or (
|
||||||
|
self.q_upload.empty()
|
||||||
|
and self.q_handshake.empty()
|
||||||
|
and not self.uploader_busy
|
||||||
|
)
|
||||||
|
)
|
||||||
|
):
|
||||||
|
break
|
||||||
|
|
||||||
|
time.sleep(0.05)
|
||||||
|
|
||||||
|
get_hashlist(file, self.cb_hasher)
|
||||||
|
with self.mutex:
|
||||||
|
self.hash_f += 1
|
||||||
|
self.hash_c += len(file.cids)
|
||||||
|
self.hash_b += file.size
|
||||||
|
|
||||||
|
self.q_handshake.put(file)
|
||||||
|
|
||||||
|
self.hasher_busy = 0
|
||||||
|
self.st_hash = [None, "(finished)"]
|
||||||
|
|
||||||
|
def handshaker(self):
|
||||||
|
search = self.ar.s
|
||||||
|
q = self.q_handshake
|
||||||
|
burl = self.ar.url[:8] + self.ar.url[8:].split("/")[0] + "/"
|
||||||
|
while True:
|
||||||
|
file = q.get()
|
||||||
|
if not file:
|
||||||
|
if q == self.q_handshake:
|
||||||
|
q = self.q_recheck
|
||||||
|
q.put(None)
|
||||||
|
continue
|
||||||
|
|
||||||
|
self.q_upload.put(None)
|
||||||
|
break
|
||||||
|
|
||||||
|
with self.mutex:
|
||||||
|
self.handshaker_busy += 1
|
||||||
|
|
||||||
|
upath = file.abs.decode("utf-8", "replace")
|
||||||
|
|
||||||
|
try:
|
||||||
|
hs = handshake(req_ses, self.ar.url, file, self.ar.a, search)
|
||||||
|
except Exception as ex:
|
||||||
|
if q == self.q_handshake and "<pre>partial upload exists" in str(ex):
|
||||||
|
self.q_recheck.put(file)
|
||||||
|
hs = []
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
|
||||||
|
if search:
|
||||||
|
if hs:
|
||||||
|
for hit in hs:
|
||||||
|
m = "found: {0}\n {1}{2}\n"
|
||||||
|
print(m.format(upath, burl, hit["rp"]), end="")
|
||||||
|
else:
|
||||||
|
print("NOT found: {0}\n".format(upath), end="")
|
||||||
|
|
||||||
|
with self.mutex:
|
||||||
|
self.up_f += 1
|
||||||
|
self.up_c += len(file.cids)
|
||||||
|
self.up_b += file.size
|
||||||
|
self.handshaker_busy -= 1
|
||||||
|
|
||||||
|
continue
|
||||||
|
|
||||||
|
with self.mutex:
|
||||||
|
if not hs:
|
||||||
|
# all chunks done
|
||||||
|
self.up_f += 1
|
||||||
|
self.up_c += len(file.cids) - file.up_c
|
||||||
|
self.up_b += file.size - file.up_b
|
||||||
|
|
||||||
|
if hs and file.up_c:
|
||||||
|
# some chunks failed
|
||||||
|
self.up_c -= len(hs)
|
||||||
|
file.up_c -= len(hs)
|
||||||
|
for cid in hs:
|
||||||
|
sz = file.kchunks[cid][1]
|
||||||
|
self.up_b -= sz
|
||||||
|
file.up_b -= sz
|
||||||
|
|
||||||
|
file.ucids = hs
|
||||||
|
self.handshaker_busy -= 1
|
||||||
|
|
||||||
|
if not hs:
|
||||||
|
kw = "uploaded" if file.up_b else " found"
|
||||||
|
print("{0} {1}".format(kw, upath))
|
||||||
|
for cid in hs:
|
||||||
|
self.q_upload.put([file, cid])
|
||||||
|
|
||||||
|
def uploader(self):
|
||||||
|
while True:
|
||||||
|
task = self.q_upload.get()
|
||||||
|
if not task:
|
||||||
|
self.st_up = [None, "(finished)"]
|
||||||
|
break
|
||||||
|
|
||||||
|
with self.mutex:
|
||||||
|
self.uploader_busy += 1
|
||||||
|
self.t0_up = self.t0_up or time.time()
|
||||||
|
|
||||||
|
file, cid = task
|
||||||
|
try:
|
||||||
|
upload(req_ses, file, cid, self.ar.a)
|
||||||
|
except:
|
||||||
|
eprint("upload failed, retry...\n")
|
||||||
|
pass # handshake will fix it
|
||||||
|
|
||||||
|
with self.mutex:
|
||||||
|
sz = file.kchunks[cid][1]
|
||||||
|
file.ucids = [x for x in file.ucids if x != cid]
|
||||||
|
if not file.ucids:
|
||||||
|
self.q_handshake.put(file)
|
||||||
|
|
||||||
|
self.st_up = [file, cid]
|
||||||
|
file.up_b += sz
|
||||||
|
self.up_b += sz
|
||||||
|
self.up_br += sz
|
||||||
|
file.up_c += 1
|
||||||
|
self.up_c += 1
|
||||||
|
self.uploader_busy -= 1
|
||||||
|
|
||||||
|
|
||||||
|
class APF(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
time.strptime("19970815", "%Y%m%d") # python#7980
|
||||||
|
if not VT100:
|
||||||
|
os.system("rem") # enables colors
|
||||||
|
|
||||||
|
# fmt: off
|
||||||
|
ap = app = argparse.ArgumentParser(formatter_class=APF, epilog="""
|
||||||
|
NOTE:
|
||||||
|
source file/folder selection uses rsync syntax, meaning that:
|
||||||
|
"foo" uploads the entire folder to URL/foo/
|
||||||
|
"foo/" uploads the CONTENTS of the folder into URL/
|
||||||
|
""")
|
||||||
|
|
||||||
|
ap.add_argument("url", type=unicode, help="server url, including destination folder")
|
||||||
|
ap.add_argument("files", type=unicode, nargs="+", help="files and/or folders to process")
|
||||||
|
ap.add_argument("-a", metavar="PASSWORD", help="password")
|
||||||
|
ap.add_argument("-s", action="store_true", help="file-search (disables upload)")
|
||||||
|
ap = app.add_argument_group("performance tweaks")
|
||||||
|
ap.add_argument("-j", type=int, metavar="THREADS", default=4, help="parallel connections")
|
||||||
|
ap.add_argument("-nh", action="store_true", help="disable hashing while uploading")
|
||||||
|
ap.add_argument("--safe", action="store_true", help="use simple fallback approach")
|
||||||
|
ap.add_argument("-z", action="store_true", help="ZOOMIN' (skip uploading files if they exist at the destination with the ~same last-modified timestamp, so same as yolo / turbo with date-chk but even faster)")
|
||||||
|
ap = app.add_argument_group("tls")
|
||||||
|
ap.add_argument("-te", metavar="PEM_FILE", help="certificate to expect/verify")
|
||||||
|
ap.add_argument("-td", action="store_true", help="disable certificate check")
|
||||||
|
# fmt: on
|
||||||
|
|
||||||
|
Ctl(app.parse_args())
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
12
bin/up2k.sh
Executable file → Normal file
12
bin/up2k.sh
Executable file → Normal file
@@ -8,7 +8,7 @@ set -e
|
|||||||
##
|
##
|
||||||
## config
|
## config
|
||||||
|
|
||||||
datalen=$((2*1024*1024*1024))
|
datalen=$((128*1024*1024))
|
||||||
target=127.0.0.1
|
target=127.0.0.1
|
||||||
posturl=/inc
|
posturl=/inc
|
||||||
passwd=wark
|
passwd=wark
|
||||||
@@ -37,10 +37,10 @@ gendata() {
|
|||||||
# pipe a chunk, get the base64 checksum
|
# pipe a chunk, get the base64 checksum
|
||||||
gethash() {
|
gethash() {
|
||||||
printf $(
|
printf $(
|
||||||
sha512sum | cut -c-64 |
|
sha512sum | cut -c-66 |
|
||||||
sed -r 's/ .*//;s/(..)/\\x\1/g'
|
sed -r 's/ .*//;s/(..)/\\x\1/g'
|
||||||
) |
|
) |
|
||||||
base64 -w0 | cut -c-43 |
|
base64 -w0 | cut -c-44 |
|
||||||
tr '+/' '-_'
|
tr '+/' '-_'
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -123,7 +123,7 @@ printf '\033[36m'
|
|||||||
{
|
{
|
||||||
{
|
{
|
||||||
cat <<EOF
|
cat <<EOF
|
||||||
POST $posturl/handshake.php HTTP/1.1
|
POST $posturl/ HTTP/1.1
|
||||||
Connection: Close
|
Connection: Close
|
||||||
Cookie: cppwd=$passwd
|
Cookie: cppwd=$passwd
|
||||||
Content-Type: text/plain;charset=UTF-8
|
Content-Type: text/plain;charset=UTF-8
|
||||||
@@ -145,6 +145,7 @@ printf '\033[0m\nwark: %s\n' $wark
|
|||||||
##
|
##
|
||||||
## wait for signal to continue
|
## wait for signal to continue
|
||||||
|
|
||||||
|
true || {
|
||||||
w8=/dev/shm/$salt.w8
|
w8=/dev/shm/$salt.w8
|
||||||
touch $w8
|
touch $w8
|
||||||
|
|
||||||
@@ -153,6 +154,7 @@ echo "ready; rm -f $w8"
|
|||||||
while [ -e $w8 ]; do
|
while [ -e $w8 ]; do
|
||||||
sleep 0.2
|
sleep 0.2
|
||||||
done
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
##
|
##
|
||||||
@@ -175,7 +177,7 @@ while [ $remains -gt 0 ]; do
|
|||||||
|
|
||||||
{
|
{
|
||||||
cat <<EOF
|
cat <<EOF
|
||||||
POST $posturl/chunkpit.php HTTP/1.1
|
POST $posturl/ HTTP/1.1
|
||||||
Connection: Keep-Alive
|
Connection: Keep-Alive
|
||||||
Cookie: cppwd=$passwd
|
Cookie: cppwd=$passwd
|
||||||
Content-Type: application/octet-stream
|
Content-Type: application/octet-stream
|
||||||
|
|||||||
@@ -276,7 +276,8 @@ def run_argparse(argv, formatter):
|
|||||||
\033[36me2d\033[35m sets -e2d (all -e2* args can be set using ce2* volflags)
|
\033[36me2d\033[35m sets -e2d (all -e2* args can be set using ce2* volflags)
|
||||||
\033[36md2t\033[35m disables metadata collection, overrides -e2t*
|
\033[36md2t\033[35m disables metadata collection, overrides -e2t*
|
||||||
\033[36md2d\033[35m disables all database stuff, overrides -e2*
|
\033[36md2d\033[35m disables all database stuff, overrides -e2*
|
||||||
\033[36mdhash\033[35m disables file hashing on initial scans, also ehash
|
\033[36mnohash=\\.iso$\033[35m skips hashing file contents if path matches *.iso
|
||||||
|
\033[36mnoidx=\\.iso$\033[35m fully ignores the contents at paths matching *.iso
|
||||||
\033[36mhist=/tmp/cdb\033[35m puts thumbnails and indexes at that location
|
\033[36mhist=/tmp/cdb\033[35m puts thumbnails and indexes at that location
|
||||||
\033[36mscan=60\033[35m scan for new files every 60sec, same as --re-maxage
|
\033[36mscan=60\033[35m scan for new files every 60sec, same as --re-maxage
|
||||||
|
|
||||||
@@ -344,6 +345,9 @@ def run_argparse(argv, formatter):
|
|||||||
ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads")
|
ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads")
|
||||||
ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="up2k min.size threshold (mswin-only)")
|
ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="up2k min.size threshold (mswin-only)")
|
||||||
ap2.add_argument("--unpost", metavar="SEC", type=int, default=3600*12, help="grace period where uploads can be deleted by the uploader, even without delete permissions; 0=disabled")
|
ap2.add_argument("--unpost", metavar="SEC", type=int, default=3600*12, help="grace period where uploads can be deleted by the uploader, even without delete permissions; 0=disabled")
|
||||||
|
ap2.add_argument("--no-fpool", action="store_true", help="disable file-handle pooling -- instead, repeatedly close and reopen files during upload")
|
||||||
|
ap2.add_argument("--use-fpool", action="store_true", help="force file-handle pooling, even if copyparty thinks you're better off without")
|
||||||
|
ap2.add_argument("--no-symlink", action="store_true", help="duplicate file contents instead")
|
||||||
|
|
||||||
ap2 = ap.add_argument_group('network options')
|
ap2 = ap.add_argument_group('network options')
|
||||||
ap2.add_argument("-i", metavar="IP", type=u, default="0.0.0.0", help="ip to bind (comma-sep.)")
|
ap2.add_argument("-i", metavar="IP", type=u, default="0.0.0.0", help="ip to bind (comma-sep.)")
|
||||||
@@ -375,6 +379,11 @@ def run_argparse(argv, formatter):
|
|||||||
ap2.add_argument("--no-dot-ren", action="store_true", help="disallow renaming dotfiles; makes it impossible to make something a dotfile")
|
ap2.add_argument("--no-dot-ren", action="store_true", help="disallow renaming dotfiles; makes it impossible to make something a dotfile")
|
||||||
ap2.add_argument("--no-logues", action="store_true", help="disable rendering .prologue/.epilogue.html into directory listings")
|
ap2.add_argument("--no-logues", action="store_true", help="disable rendering .prologue/.epilogue.html into directory listings")
|
||||||
ap2.add_argument("--no-readme", action="store_true", help="disable rendering readme.md into directory listings")
|
ap2.add_argument("--no-readme", action="store_true", help="disable rendering readme.md into directory listings")
|
||||||
|
ap2.add_argument("--vague-403", action="store_true", help="send 404 instead of 403 (security through ambiguity, very enterprise)")
|
||||||
|
|
||||||
|
ap2 = ap.add_argument_group('yolo options')
|
||||||
|
ap2.add_argument("--ign-ebind", action="store_true", help="continue running even if it's impossible to listen on some of the requested endpoints")
|
||||||
|
ap2.add_argument("--ign-ebind-all", action="store_true", help="continue running even if it's impossible to receive connections at all")
|
||||||
|
|
||||||
ap2 = ap.add_argument_group('logging options')
|
ap2 = ap.add_argument_group('logging options')
|
||||||
ap2.add_argument("-q", action="store_true", help="quiet")
|
ap2.add_argument("-q", action="store_true", help="quiet")
|
||||||
@@ -408,7 +417,8 @@ def run_argparse(argv, formatter):
|
|||||||
ap2.add_argument("-e2ds", action="store_true", help="enable up2k db-scanner, sets -e2d")
|
ap2.add_argument("-e2ds", action="store_true", help="enable up2k db-scanner, sets -e2d")
|
||||||
ap2.add_argument("-e2dsa", action="store_true", help="scan all folders (for search), sets -e2ds")
|
ap2.add_argument("-e2dsa", action="store_true", help="scan all folders (for search), sets -e2ds")
|
||||||
ap2.add_argument("--hist", metavar="PATH", type=u, help="where to store volume data (db, thumbs)")
|
ap2.add_argument("--hist", metavar="PATH", type=u, help="where to store volume data (db, thumbs)")
|
||||||
ap2.add_argument("--no-hash", action="store_true", help="disable hashing during e2ds folder scans")
|
ap2.add_argument("--no-hash", metavar="PTN", type=u, help="regex: disable hashing of matching paths during e2ds folder scans")
|
||||||
|
ap2.add_argument("--no-idx", metavar="PTN", type=u, help="regex: disable indexing of matching paths during e2ds folder scans")
|
||||||
ap2.add_argument("--re-int", metavar="SEC", type=int, default=30, help="disk rescan check interval")
|
ap2.add_argument("--re-int", metavar="SEC", type=int, default=30, help="disk rescan check interval")
|
||||||
ap2.add_argument("--re-maxage", metavar="SEC", type=int, default=0, help="disk rescan volume interval, 0=off, can be set per-volume with the 'scan' volflag")
|
ap2.add_argument("--re-maxage", metavar="SEC", type=int, default=0, help="disk rescan volume interval, 0=off, can be set per-volume with the 'scan' volflag")
|
||||||
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline")
|
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline")
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
|
|
||||||
VERSION = (1, 0, 5)
|
VERSION = (1, 0, 11)
|
||||||
CODENAME = "sufficient"
|
CODENAME = "sufficient"
|
||||||
BUILD_DT = (2021, 9, 19)
|
BUILD_DT = (2021, 10, 18)
|
||||||
|
|
||||||
S_VERSION = ".".join(map(str, VERSION))
|
S_VERSION = ".".join(map(str, VERSION))
|
||||||
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
||||||
|
|||||||
@@ -356,7 +356,7 @@ class VFS(object):
|
|||||||
if not dbv:
|
if not dbv:
|
||||||
return self, vrem
|
return self, vrem
|
||||||
|
|
||||||
vrem = [self.vpath[len(dbv.vpath) + 1 :], vrem]
|
vrem = [self.vpath[len(dbv.vpath) :].lstrip("/"), vrem]
|
||||||
vrem = "/".join([x for x in vrem if x])
|
vrem = "/".join([x for x in vrem if x])
|
||||||
return dbv, vrem
|
return dbv, vrem
|
||||||
|
|
||||||
@@ -726,6 +726,7 @@ class AuthSrv(object):
|
|||||||
axs = getattr(vol.axs, axs_key)
|
axs = getattr(vol.axs, axs_key)
|
||||||
if usr in axs or "*" in axs:
|
if usr in axs or "*" in axs:
|
||||||
umap[usr].append(mp)
|
umap[usr].append(mp)
|
||||||
|
umap[usr].sort()
|
||||||
setattr(vfs, "a" + perm, umap)
|
setattr(vfs, "a" + perm, umap)
|
||||||
|
|
||||||
all_users = {}
|
all_users = {}
|
||||||
@@ -865,9 +866,14 @@ class AuthSrv(object):
|
|||||||
if self.args.e2d or "e2ds" in vol.flags:
|
if self.args.e2d or "e2ds" in vol.flags:
|
||||||
vol.flags["e2d"] = True
|
vol.flags["e2d"] = True
|
||||||
|
|
||||||
if self.args.no_hash:
|
for ga, vf in [["no_hash", "nohash"], ["no_idx", "noidx"]]:
|
||||||
if "ehash" not in vol.flags:
|
if vf in vol.flags:
|
||||||
vol.flags["dhash"] = True
|
ptn = vol.flags.pop(vf)
|
||||||
|
else:
|
||||||
|
ptn = getattr(self.args, ga)
|
||||||
|
|
||||||
|
if ptn:
|
||||||
|
vol.flags[vf] = re.compile(ptn)
|
||||||
|
|
||||||
for k in ["e2t", "e2ts", "e2tsr"]:
|
for k in ["e2t", "e2ts", "e2tsr"]:
|
||||||
if getattr(self.args, k):
|
if getattr(self.args, k):
|
||||||
@@ -880,6 +886,10 @@ class AuthSrv(object):
|
|||||||
# default tag cfgs if unset
|
# default tag cfgs if unset
|
||||||
if "mte" not in vol.flags:
|
if "mte" not in vol.flags:
|
||||||
vol.flags["mte"] = self.args.mte
|
vol.flags["mte"] = self.args.mte
|
||||||
|
elif vol.flags["mte"].startswith("+"):
|
||||||
|
vol.flags["mte"] = ",".join(
|
||||||
|
x for x in [self.args.mte, vol.flags["mte"][1:]] if x
|
||||||
|
)
|
||||||
if "mth" not in vol.flags:
|
if "mth" not in vol.flags:
|
||||||
vol.flags["mth"] = self.args.mth
|
vol.flags["mth"] = self.args.mth
|
||||||
|
|
||||||
|
|||||||
@@ -25,14 +25,14 @@ def lstat(p):
|
|||||||
def makedirs(name, mode=0o755, exist_ok=True):
|
def makedirs(name, mode=0o755, exist_ok=True):
|
||||||
bname = fsenc(name)
|
bname = fsenc(name)
|
||||||
try:
|
try:
|
||||||
os.makedirs(bname, mode=mode)
|
os.makedirs(bname, mode)
|
||||||
except:
|
except:
|
||||||
if not exist_ok or not os.path.isdir(bname):
|
if not exist_ok or not os.path.isdir(bname):
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
def mkdir(p, mode=0o755):
|
def mkdir(p, mode=0o755):
|
||||||
return os.mkdir(fsenc(p), mode=mode)
|
return os.mkdir(fsenc(p), mode)
|
||||||
|
|
||||||
|
|
||||||
def rename(src, dst):
|
def rename(src, dst):
|
||||||
|
|||||||
@@ -10,7 +10,6 @@ import json
|
|||||||
import base64
|
import base64
|
||||||
import string
|
import string
|
||||||
import socket
|
import socket
|
||||||
import ctypes
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
import calendar
|
import calendar
|
||||||
@@ -20,6 +19,11 @@ try:
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
import ctypes
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
from .__init__ import E, PY2, WINDOWS, ANYWIN, unicode
|
from .__init__ import E, PY2, WINDOWS, ANYWIN, unicode
|
||||||
from .util import * # noqa # pylint: disable=unused-wildcard-import
|
from .util import * # noqa # pylint: disable=unused-wildcard-import
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
@@ -39,6 +43,7 @@ class HttpCli(object):
|
|||||||
def __init__(self, conn):
|
def __init__(self, conn):
|
||||||
self.t0 = time.time()
|
self.t0 = time.time()
|
||||||
self.conn = conn
|
self.conn = conn
|
||||||
|
self.mutex = conn.mutex
|
||||||
self.s = conn.s # type: socket
|
self.s = conn.s # type: socket
|
||||||
self.sr = conn.sr # type: Unrecv
|
self.sr = conn.sr # type: Unrecv
|
||||||
self.ip = conn.addr[0]
|
self.ip = conn.addr[0]
|
||||||
@@ -47,13 +52,14 @@ class HttpCli(object):
|
|||||||
self.asrv = conn.asrv # type: AuthSrv
|
self.asrv = conn.asrv # type: AuthSrv
|
||||||
self.ico = conn.ico
|
self.ico = conn.ico
|
||||||
self.thumbcli = conn.thumbcli
|
self.thumbcli = conn.thumbcli
|
||||||
|
self.u2fh = conn.u2fh
|
||||||
self.log_func = conn.log_func
|
self.log_func = conn.log_func
|
||||||
self.log_src = conn.log_src
|
self.log_src = conn.log_src
|
||||||
self.tls = hasattr(self.s, "cipher")
|
self.tls = hasattr(self.s, "cipher")
|
||||||
|
|
||||||
self.bufsz = 1024 * 32
|
self.bufsz = 1024 * 32
|
||||||
self.hint = None
|
self.hint = None
|
||||||
self.absolute_urls = False
|
self.trailing_slash = True
|
||||||
self.out_headers = {
|
self.out_headers = {
|
||||||
"Access-Control-Allow-Origin": "*",
|
"Access-Control-Allow-Origin": "*",
|
||||||
"Cache-Control": "no-store; max-age=0",
|
"Cache-Control": "no-store; max-age=0",
|
||||||
@@ -92,6 +98,7 @@ class HttpCli(object):
|
|||||||
def run(self):
|
def run(self):
|
||||||
"""returns true if connection can be reused"""
|
"""returns true if connection can be reused"""
|
||||||
self.keepalive = False
|
self.keepalive = False
|
||||||
|
self.is_https = False
|
||||||
self.headers = {}
|
self.headers = {}
|
||||||
self.hint = None
|
self.hint = None
|
||||||
try:
|
try:
|
||||||
@@ -129,6 +136,7 @@ class HttpCli(object):
|
|||||||
|
|
||||||
v = self.headers.get("connection", "").lower()
|
v = self.headers.get("connection", "").lower()
|
||||||
self.keepalive = not v.startswith("close") and self.http_ver != "HTTP/1.0"
|
self.keepalive = not v.startswith("close") and self.http_ver != "HTTP/1.0"
|
||||||
|
self.is_https = (self.headers.get("x-forwarded-proto", "").lower() == "https" or self.tls)
|
||||||
|
|
||||||
n = self.args.rproxy
|
n = self.args.rproxy
|
||||||
if n:
|
if n:
|
||||||
@@ -146,6 +154,8 @@ class HttpCli(object):
|
|||||||
|
|
||||||
self.log_src = self.conn.set_rproxy(self.ip)
|
self.log_src = self.conn.set_rproxy(self.ip)
|
||||||
|
|
||||||
|
self.dip = self.ip.replace(":", ".")
|
||||||
|
|
||||||
if self.args.ihead:
|
if self.args.ihead:
|
||||||
keys = self.args.ihead
|
keys = self.args.ihead
|
||||||
if "*" in keys:
|
if "*" in keys:
|
||||||
@@ -162,15 +172,11 @@ class HttpCli(object):
|
|||||||
# split req into vpath + uparam
|
# split req into vpath + uparam
|
||||||
uparam = {}
|
uparam = {}
|
||||||
if "?" not in self.req:
|
if "?" not in self.req:
|
||||||
if not self.req.endswith("/"):
|
self.trailing_slash = self.req.endswith("/")
|
||||||
self.absolute_urls = True
|
|
||||||
|
|
||||||
vpath = undot(self.req)
|
vpath = undot(self.req)
|
||||||
else:
|
else:
|
||||||
vpath, arglist = self.req.split("?", 1)
|
vpath, arglist = self.req.split("?", 1)
|
||||||
if not vpath.endswith("/"):
|
self.trailing_slash = vpath.endswith("/")
|
||||||
self.absolute_urls = True
|
|
||||||
|
|
||||||
vpath = undot(vpath)
|
vpath = undot(vpath)
|
||||||
for k in arglist.split("&"):
|
for k in arglist.split("&"):
|
||||||
if "=" in k:
|
if "=" in k:
|
||||||
@@ -387,7 +393,7 @@ class HttpCli(object):
|
|||||||
if not self.can_read and not self.can_write and not self.can_get:
|
if not self.can_read and not self.can_write and not self.can_get:
|
||||||
if self.vpath:
|
if self.vpath:
|
||||||
self.log("inaccessible: [{}]".format(self.vpath))
|
self.log("inaccessible: [{}]".format(self.vpath))
|
||||||
return self.tx_404()
|
return self.tx_404(True)
|
||||||
|
|
||||||
self.uparam["h"] = False
|
self.uparam["h"] = False
|
||||||
|
|
||||||
@@ -464,13 +470,13 @@ class HttpCli(object):
|
|||||||
except:
|
except:
|
||||||
raise Pebkac(400, "client d/c before 100 continue")
|
raise Pebkac(400, "client d/c before 100 continue")
|
||||||
|
|
||||||
|
if "raw" in self.uparam:
|
||||||
|
return self.handle_stash()
|
||||||
|
|
||||||
ctype = self.headers.get("content-type", "").lower()
|
ctype = self.headers.get("content-type", "").lower()
|
||||||
if not ctype:
|
if not ctype:
|
||||||
raise Pebkac(400, "you can't post without a content-type header")
|
raise Pebkac(400, "you can't post without a content-type header")
|
||||||
|
|
||||||
if "raw" in self.uparam:
|
|
||||||
return self.handle_stash()
|
|
||||||
|
|
||||||
if "multipart/form-data" in ctype:
|
if "multipart/form-data" in ctype:
|
||||||
return self.handle_post_multipart()
|
return self.handle_post_multipart()
|
||||||
|
|
||||||
@@ -531,17 +537,16 @@ class HttpCli(object):
|
|||||||
fdir = os.path.join(vfs.realpath, rem)
|
fdir = os.path.join(vfs.realpath, rem)
|
||||||
if lim:
|
if lim:
|
||||||
fdir, rem = lim.all(self.ip, rem, remains, fdir)
|
fdir, rem = lim.all(self.ip, rem, remains, fdir)
|
||||||
|
|
||||||
|
fn = None
|
||||||
|
if rem and not self.trailing_slash and not bos.path.isdir(fdir):
|
||||||
|
fdir, fn = os.path.split(fdir)
|
||||||
|
rem, _ = vsplit(rem)
|
||||||
|
|
||||||
bos.makedirs(fdir)
|
bos.makedirs(fdir)
|
||||||
|
|
||||||
addr = self.ip.replace(":", ".")
|
open_ka = {"fun": open}
|
||||||
fn = "put-{:.6f}-{}.bin".format(time.time(), addr)
|
open_a = ["wb", 512 * 1024]
|
||||||
path = os.path.join(fdir, fn)
|
|
||||||
if self.args.nw:
|
|
||||||
path = os.devnull
|
|
||||||
|
|
||||||
open_f = open
|
|
||||||
open_a = [fsenc(path), "wb", 512 * 1024]
|
|
||||||
open_ka = {}
|
|
||||||
|
|
||||||
# user-request || config-force
|
# user-request || config-force
|
||||||
if ("gz" in vfs.flags or "xz" in vfs.flags) and (
|
if ("gz" in vfs.flags or "xz" in vfs.flags) and (
|
||||||
@@ -582,16 +587,28 @@ class HttpCli(object):
|
|||||||
|
|
||||||
self.log("compressing with {} level {}".format(alg, lv.get(alg)))
|
self.log("compressing with {} level {}".format(alg, lv.get(alg)))
|
||||||
if alg == "gz":
|
if alg == "gz":
|
||||||
open_f = gzip.GzipFile
|
open_ka["fun"] = gzip.GzipFile
|
||||||
open_a = [fsenc(path), "wb", lv[alg], None, 0x5FEE6600] # 2021-01-01
|
open_a = ["wb", lv[alg], None, 0x5FEE6600] # 2021-01-01
|
||||||
elif alg == "xz":
|
elif alg == "xz":
|
||||||
open_f = lzma.open
|
open_ka = {"fun": lzma.open, "preset": lv[alg]}
|
||||||
open_a = [fsenc(path), "wb"]
|
open_a = ["wb"]
|
||||||
open_ka = {"preset": lv[alg]}
|
|
||||||
else:
|
else:
|
||||||
self.log("fallthrough? thats a bug", 1)
|
self.log("fallthrough? thats a bug", 1)
|
||||||
|
|
||||||
with open_f(*open_a, **open_ka) as f:
|
suffix = "-{:.6f}-{}".format(time.time(), self.dip)
|
||||||
|
params = {"suffix": suffix, "fdir": fdir}
|
||||||
|
if self.args.nw:
|
||||||
|
params = {}
|
||||||
|
fn = os.devnull
|
||||||
|
|
||||||
|
params.update(open_ka)
|
||||||
|
|
||||||
|
if not fn:
|
||||||
|
fn = "put" + suffix
|
||||||
|
|
||||||
|
with ren_open(fn, *open_a, **params) as f:
|
||||||
|
f, fn = f["orz"]
|
||||||
|
path = os.path.join(fdir, fn)
|
||||||
post_sz, _, sha_b64 = hashcopy(reader, f)
|
post_sz, _, sha_b64 = hashcopy(reader, f)
|
||||||
|
|
||||||
if lim:
|
if lim:
|
||||||
@@ -835,7 +852,18 @@ class HttpCli(object):
|
|||||||
|
|
||||||
reader = read_socket(self.sr, remains)
|
reader = read_socket(self.sr, remains)
|
||||||
|
|
||||||
with open(fsenc(path), "rb+", 512 * 1024) as f:
|
f = None
|
||||||
|
fpool = not self.args.no_fpool
|
||||||
|
if fpool:
|
||||||
|
with self.mutex:
|
||||||
|
try:
|
||||||
|
f = self.u2fh.pop(path)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
f = f or open(fsenc(path), "rb+", 512 * 1024)
|
||||||
|
|
||||||
|
try:
|
||||||
f.seek(cstart[0])
|
f.seek(cstart[0])
|
||||||
post_sz, _, sha_b64 = hashcopy(reader, f)
|
post_sz, _, sha_b64 = hashcopy(reader, f)
|
||||||
|
|
||||||
@@ -865,22 +893,36 @@ class HttpCli(object):
|
|||||||
ofs += len(buf)
|
ofs += len(buf)
|
||||||
|
|
||||||
self.log("clone {} done".format(cstart[0]))
|
self.log("clone {} done".format(cstart[0]))
|
||||||
|
finally:
|
||||||
|
if not fpool:
|
||||||
|
f.close()
|
||||||
|
else:
|
||||||
|
with self.mutex:
|
||||||
|
self.u2fh.put(path, f)
|
||||||
|
|
||||||
x = self.conn.hsrv.broker.put(True, "up2k.confirm_chunk", ptop, wark, chash)
|
x = self.conn.hsrv.broker.put(True, "up2k.confirm_chunk", ptop, wark, chash)
|
||||||
x = x.get()
|
x = x.get()
|
||||||
try:
|
try:
|
||||||
num_left, path = x
|
num_left, fin_path = x
|
||||||
except:
|
except:
|
||||||
self.loud_reply(x, status=500)
|
self.loud_reply(x, status=500)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if not ANYWIN and num_left == 0:
|
if not num_left and fpool:
|
||||||
|
with self.mutex:
|
||||||
|
self.u2fh.close(path)
|
||||||
|
|
||||||
|
# windows cant rename open files
|
||||||
|
if ANYWIN and path != fin_path and not self.args.nw:
|
||||||
|
self.conn.hsrv.broker.put(True, "up2k.finish_upload", ptop, wark).get()
|
||||||
|
|
||||||
|
if not ANYWIN and not num_left:
|
||||||
times = (int(time.time()), int(lastmod))
|
times = (int(time.time()), int(lastmod))
|
||||||
self.log("no more chunks, setting times {}".format(times))
|
self.log("no more chunks, setting times {}".format(times))
|
||||||
try:
|
try:
|
||||||
bos.utime(path, times)
|
bos.utime(fin_path, times)
|
||||||
except:
|
except:
|
||||||
self.log("failed to utime ({}, {})".format(path, times))
|
self.log("failed to utime ({}, {})".format(fin_path, times))
|
||||||
|
|
||||||
spd = self._spd(post_sz)
|
spd = self._spd(post_sz)
|
||||||
self.log("{} thank".format(spd))
|
self.log("{} thank".format(spd))
|
||||||
@@ -1003,7 +1045,7 @@ class HttpCli(object):
|
|||||||
if not bos.path.isdir(fdir):
|
if not bos.path.isdir(fdir):
|
||||||
raise Pebkac(404, "that folder does not exist")
|
raise Pebkac(404, "that folder does not exist")
|
||||||
|
|
||||||
suffix = ".{:.6f}-{}".format(time.time(), self.ip)
|
suffix = "-{:.6f}-{}".format(time.time(), self.dip)
|
||||||
open_args = {"fdir": fdir, "suffix": suffix}
|
open_args = {"fdir": fdir, "suffix": suffix}
|
||||||
else:
|
else:
|
||||||
open_args = {}
|
open_args = {}
|
||||||
@@ -1102,7 +1144,7 @@ class HttpCli(object):
|
|||||||
# using SHA-512/224, optionally SHA-512/256 = :64
|
# using SHA-512/224, optionally SHA-512/256 = :64
|
||||||
jpart = {
|
jpart = {
|
||||||
"url": "{}://{}/{}".format(
|
"url": "{}://{}/{}".format(
|
||||||
"https" if self.tls else "http",
|
"https" if self.is_https else "http",
|
||||||
self.headers.get("host", "copyparty"),
|
self.headers.get("host", "copyparty"),
|
||||||
vpath + vsuf,
|
vpath + vsuf,
|
||||||
),
|
),
|
||||||
@@ -1538,7 +1580,7 @@ class HttpCli(object):
|
|||||||
|
|
||||||
if not self.can_write:
|
if not self.can_write:
|
||||||
if "edit" in self.uparam or "edit2" in self.uparam:
|
if "edit" in self.uparam or "edit2" in self.uparam:
|
||||||
return self.tx_404()
|
return self.tx_404(True)
|
||||||
|
|
||||||
tpl = "mde" if "edit2" in self.uparam else "md"
|
tpl = "mde" if "edit2" in self.uparam else "md"
|
||||||
html_path = os.path.join(E.mod, "web", "{}.html".format(tpl))
|
html_path = os.path.join(E.mod, "web", "{}.html".format(tpl))
|
||||||
@@ -1640,8 +1682,14 @@ class HttpCli(object):
|
|||||||
self.reply(html.encode("utf-8"))
|
self.reply(html.encode("utf-8"))
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def tx_404(self):
|
def tx_404(self, is_403=False):
|
||||||
|
if self.args.vague_403:
|
||||||
m = '<h1>404 not found ┐( ´ -`)┌</h1><p>or maybe you don\'t have access -- try logging in or <a href="/?h">go home</a></p>'
|
m = '<h1>404 not found ┐( ´ -`)┌</h1><p>or maybe you don\'t have access -- try logging in or <a href="/?h">go home</a></p>'
|
||||||
|
elif is_403:
|
||||||
|
m = '<h1>403 forbiddena ~┻━┻</h1><p>you\'ll have to log in or <a href="/?h">go home</a></p>'
|
||||||
|
else:
|
||||||
|
m = '<h1>404 not found ┐( ´ -`)┌</h1><p><a href="/?h">go home</a></p>'
|
||||||
|
|
||||||
html = self.j2("splash", this=self, qvpath=quotep(self.vpath), msg=m)
|
html = self.j2("splash", this=self, qvpath=quotep(self.vpath), msg=m)
|
||||||
self.reply(html.encode("utf-8"), status=404)
|
self.reply(html.encode("utf-8"), status=404)
|
||||||
return True
|
return True
|
||||||
@@ -1755,7 +1803,7 @@ class HttpCli(object):
|
|||||||
if filt and filt not in vp:
|
if filt and filt not in vp:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
ret.append({"vp": vp, "sz": sz, "at": at})
|
ret.append({"vp": quotep(vp), "sz": sz, "at": at})
|
||||||
if len(ret) > 3000:
|
if len(ret) > 3000:
|
||||||
ret.sort(key=lambda x: x["at"], reverse=True)
|
ret.sort(key=lambda x: x["at"], reverse=True)
|
||||||
ret = ret[:2000]
|
ret = ret[:2000]
|
||||||
@@ -1868,7 +1916,7 @@ class HttpCli(object):
|
|||||||
return self.tx_file(abspath)
|
return self.tx_file(abspath)
|
||||||
|
|
||||||
elif is_dir and not self.can_read and not self.can_write:
|
elif is_dir and not self.can_read and not self.can_write:
|
||||||
return self.tx_404()
|
return self.tx_404(True)
|
||||||
|
|
||||||
srv_info = []
|
srv_info = []
|
||||||
|
|
||||||
@@ -1882,11 +1930,14 @@ class HttpCli(object):
|
|||||||
# some fuses misbehave
|
# some fuses misbehave
|
||||||
if not self.args.nid:
|
if not self.args.nid:
|
||||||
if WINDOWS:
|
if WINDOWS:
|
||||||
|
try:
|
||||||
bfree = ctypes.c_ulonglong(0)
|
bfree = ctypes.c_ulonglong(0)
|
||||||
ctypes.windll.kernel32.GetDiskFreeSpaceExW(
|
ctypes.windll.kernel32.GetDiskFreeSpaceExW(
|
||||||
ctypes.c_wchar_p(abspath), None, None, ctypes.pointer(bfree)
|
ctypes.c_wchar_p(abspath), None, None, ctypes.pointer(bfree)
|
||||||
)
|
)
|
||||||
srv_info.append(humansize(bfree.value) + " free")
|
srv_info.append(humansize(bfree.value) + " free")
|
||||||
|
except:
|
||||||
|
pass
|
||||||
else:
|
else:
|
||||||
sv = os.statvfs(fsenc(abspath))
|
sv = os.statvfs(fsenc(abspath))
|
||||||
free = humansize(sv.f_frsize * sv.f_bfree, True)
|
free = humansize(sv.f_frsize * sv.f_bfree, True)
|
||||||
@@ -1973,7 +2024,7 @@ class HttpCli(object):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
if not stat.S_ISDIR(st.st_mode):
|
if not stat.S_ISDIR(st.st_mode):
|
||||||
return self.tx_404()
|
return self.tx_404(True)
|
||||||
|
|
||||||
if "zip" in self.uparam or "tar" in self.uparam:
|
if "zip" in self.uparam or "tar" in self.uparam:
|
||||||
raise Pebkac(403)
|
raise Pebkac(403)
|
||||||
@@ -2030,7 +2081,7 @@ class HttpCli(object):
|
|||||||
for fn in vfs_ls:
|
for fn in vfs_ls:
|
||||||
base = ""
|
base = ""
|
||||||
href = fn
|
href = fn
|
||||||
if not is_ls and self.absolute_urls and vpath:
|
if not is_ls and not self.trailing_slash and vpath:
|
||||||
base = "/" + vpath + "/"
|
base = "/" + vpath + "/"
|
||||||
href = base + fn
|
href = base + fn
|
||||||
|
|
||||||
|
|||||||
@@ -32,9 +32,11 @@ class HttpConn(object):
|
|||||||
self.addr = addr
|
self.addr = addr
|
||||||
self.hsrv = hsrv
|
self.hsrv = hsrv
|
||||||
|
|
||||||
|
self.mutex = hsrv.mutex
|
||||||
self.args = hsrv.args
|
self.args = hsrv.args
|
||||||
self.asrv = hsrv.asrv
|
self.asrv = hsrv.asrv
|
||||||
self.cert_path = hsrv.cert_path
|
self.cert_path = hsrv.cert_path
|
||||||
|
self.u2fh = hsrv.u2fh
|
||||||
|
|
||||||
enth = HAVE_PIL and not self.args.no_thumb
|
enth = HAVE_PIL and not self.args.no_thumb
|
||||||
self.thumbcli = ThumbCli(hsrv.broker) if enth else None
|
self.thumbcli = ThumbCli(hsrv.broker) if enth else None
|
||||||
|
|||||||
@@ -27,7 +27,7 @@ except ImportError:
|
|||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
from .__init__ import E, PY2, MACOS
|
from .__init__ import E, PY2, MACOS
|
||||||
from .util import spack, min_ex, start_stackmon, start_log_thrs
|
from .util import FHC, spack, min_ex, start_stackmon, start_log_thrs
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .httpconn import HttpConn
|
from .httpconn import HttpConn
|
||||||
|
|
||||||
@@ -50,7 +50,10 @@ class HttpSrv(object):
|
|||||||
self.log = broker.log
|
self.log = broker.log
|
||||||
self.asrv = broker.asrv
|
self.asrv = broker.asrv
|
||||||
|
|
||||||
self.name = "httpsrv" + ("-n{}-i{:x}".format(nid, os.getpid()) if nid else "")
|
nsuf = "-{}".format(nid) if nid else ""
|
||||||
|
nsuf2 = "-n{}-i{:x}".format(nid, os.getpid()) if nid else ""
|
||||||
|
|
||||||
|
self.name = "hsrv" + nsuf2
|
||||||
self.mutex = threading.Lock()
|
self.mutex = threading.Lock()
|
||||||
self.stopping = False
|
self.stopping = False
|
||||||
|
|
||||||
@@ -59,6 +62,7 @@ class HttpSrv(object):
|
|||||||
self.tp_time = None # latest worker collect
|
self.tp_time = None # latest worker collect
|
||||||
self.tp_q = None if self.args.no_htp else queue.LifoQueue()
|
self.tp_q = None if self.args.no_htp else queue.LifoQueue()
|
||||||
|
|
||||||
|
self.u2fh = FHC()
|
||||||
self.srvs = []
|
self.srvs = []
|
||||||
self.ncli = 0 # exact
|
self.ncli = 0 # exact
|
||||||
self.clients = {} # laggy
|
self.clients = {} # laggy
|
||||||
@@ -82,11 +86,6 @@ class HttpSrv(object):
|
|||||||
if self.tp_q:
|
if self.tp_q:
|
||||||
self.start_threads(4)
|
self.start_threads(4)
|
||||||
|
|
||||||
name = "httpsrv-scaler" + ("-{}".format(nid) if nid else "")
|
|
||||||
t = threading.Thread(target=self.thr_scaler, name=name)
|
|
||||||
t.daemon = True
|
|
||||||
t.start()
|
|
||||||
|
|
||||||
if nid:
|
if nid:
|
||||||
if self.args.stackmon:
|
if self.args.stackmon:
|
||||||
start_stackmon(self.args.stackmon, nid)
|
start_stackmon(self.args.stackmon, nid)
|
||||||
@@ -94,6 +93,10 @@ class HttpSrv(object):
|
|||||||
if self.args.log_thrs:
|
if self.args.log_thrs:
|
||||||
start_log_thrs(self.log, self.args.log_thrs, nid)
|
start_log_thrs(self.log, self.args.log_thrs, nid)
|
||||||
|
|
||||||
|
t = threading.Thread(target=self.periodic, name="hsrv-pt" + nsuf)
|
||||||
|
t.daemon = True
|
||||||
|
t.start()
|
||||||
|
|
||||||
def start_threads(self, n):
|
def start_threads(self, n):
|
||||||
self.tp_nthr += n
|
self.tp_nthr += n
|
||||||
if self.args.log_htp:
|
if self.args.log_htp:
|
||||||
@@ -115,10 +118,12 @@ class HttpSrv(object):
|
|||||||
for _ in range(n):
|
for _ in range(n):
|
||||||
self.tp_q.put(None)
|
self.tp_q.put(None)
|
||||||
|
|
||||||
def thr_scaler(self):
|
def periodic(self):
|
||||||
while True:
|
while True:
|
||||||
time.sleep(2 if self.tp_ncli else 30)
|
time.sleep(2 if self.tp_ncli else 10)
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
|
self.u2fh.clean()
|
||||||
|
if self.tp_q:
|
||||||
self.tp_ncli = max(self.ncli, self.tp_ncli - 2)
|
self.tp_ncli = max(self.ncli, self.tp_ncli - 2)
|
||||||
if self.tp_nthr > self.tp_ncli + 8:
|
if self.tp_nthr > self.tp_ncli + 8:
|
||||||
self.stop_threads(4)
|
self.stop_threads(4)
|
||||||
|
|||||||
@@ -471,7 +471,10 @@ class MTag(object):
|
|||||||
ret = {}
|
ret = {}
|
||||||
for tagname, mp in parsers.items():
|
for tagname, mp in parsers.items():
|
||||||
try:
|
try:
|
||||||
cmd = [sys.executable, mp.bin, abspath]
|
cmd = [mp.bin, abspath]
|
||||||
|
if mp.bin.endswith(".py"):
|
||||||
|
cmd = [sys.executable] + cmd
|
||||||
|
|
||||||
args = {"env": env, "timeout": mp.timeout}
|
args = {"env": env, "timeout": mp.timeout}
|
||||||
|
|
||||||
if WINDOWS:
|
if WINDOWS:
|
||||||
|
|||||||
@@ -38,6 +38,7 @@ class SvcHub(object):
|
|||||||
self.stop_req = False
|
self.stop_req = False
|
||||||
self.stopping = False
|
self.stopping = False
|
||||||
self.stop_cond = threading.Condition()
|
self.stop_cond = threading.Condition()
|
||||||
|
self.retcode = 0
|
||||||
self.httpsrv_up = 0
|
self.httpsrv_up = 0
|
||||||
|
|
||||||
self.log_mutex = threading.Lock()
|
self.log_mutex = threading.Lock()
|
||||||
@@ -53,6 +54,17 @@ class SvcHub(object):
|
|||||||
if args.log_thrs:
|
if args.log_thrs:
|
||||||
start_log_thrs(self.log, args.log_thrs, 0)
|
start_log_thrs(self.log, args.log_thrs, 0)
|
||||||
|
|
||||||
|
if not ANYWIN and not args.use_fpool:
|
||||||
|
args.no_fpool = True
|
||||||
|
|
||||||
|
if not args.no_fpool and args.j != 1:
|
||||||
|
m = "WARNING: --use-fpool combined with multithreading is untested and can probably cause undefined behavior"
|
||||||
|
if ANYWIN:
|
||||||
|
m = 'windows cannot do multithreading without --no-fpool, so enabling that -- note that upload performance will suffer if you have microsoft defender "real-time protection" enabled, so you probably want to use -j 1 instead'
|
||||||
|
args.no_fpool = True
|
||||||
|
|
||||||
|
self.log("root", m, c=3)
|
||||||
|
|
||||||
# initiate all services to manage
|
# initiate all services to manage
|
||||||
self.asrv = AuthSrv(self.args, self.log)
|
self.asrv = AuthSrv(self.args, self.log)
|
||||||
if args.ls:
|
if args.ls:
|
||||||
@@ -87,14 +99,23 @@ class SvcHub(object):
|
|||||||
|
|
||||||
def thr_httpsrv_up(self):
|
def thr_httpsrv_up(self):
|
||||||
time.sleep(5)
|
time.sleep(5)
|
||||||
failed = self.broker.num_workers - self.httpsrv_up
|
expected = self.broker.num_workers * self.tcpsrv.nsrv
|
||||||
|
failed = expected - self.httpsrv_up
|
||||||
if not failed:
|
if not failed:
|
||||||
return
|
return
|
||||||
|
|
||||||
m = "{}/{} workers failed to start"
|
m = "{}/{} workers failed to start"
|
||||||
m = m.format(failed, self.broker.num_workers)
|
m = m.format(failed, expected)
|
||||||
self.log("root", m, 1)
|
self.log("root", m, 1)
|
||||||
os._exit(1)
|
|
||||||
|
if self.args.ign_ebind_all:
|
||||||
|
return
|
||||||
|
|
||||||
|
if self.args.ign_ebind and self.tcpsrv.srv:
|
||||||
|
return
|
||||||
|
|
||||||
|
self.retcode = 1
|
||||||
|
os.kill(os.getpid(), signal.SIGTERM)
|
||||||
|
|
||||||
def cb_httpsrv_up(self):
|
def cb_httpsrv_up(self):
|
||||||
self.httpsrv_up += 1
|
self.httpsrv_up += 1
|
||||||
@@ -231,7 +252,7 @@ class SvcHub(object):
|
|||||||
print("waiting for thumbsrv (10sec)...")
|
print("waiting for thumbsrv (10sec)...")
|
||||||
|
|
||||||
print("nailed it", end="")
|
print("nailed it", end="")
|
||||||
ret = 0
|
ret = self.retcode
|
||||||
finally:
|
finally:
|
||||||
print("\033[0m")
|
print("\033[0m")
|
||||||
if self.logf:
|
if self.logf:
|
||||||
|
|||||||
@@ -42,9 +42,21 @@ class TcpSrv(object):
|
|||||||
self.log("tcpsrv", m)
|
self.log("tcpsrv", m)
|
||||||
|
|
||||||
self.srv = []
|
self.srv = []
|
||||||
|
self.nsrv = 0
|
||||||
for ip in self.args.i:
|
for ip in self.args.i:
|
||||||
for port in self.args.p:
|
for port in self.args.p:
|
||||||
self.srv.append(self._listen(ip, port))
|
self.nsrv += 1
|
||||||
|
try:
|
||||||
|
self._listen(ip, port)
|
||||||
|
except Exception as ex:
|
||||||
|
if self.args.ign_ebind or self.args.ign_ebind_all:
|
||||||
|
m = "could not listen on {}:{}: {}"
|
||||||
|
self.log("tcpsrv", m.format(ip, port, ex), c=1)
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
|
||||||
|
if not self.srv and not self.args.ign_ebind_all:
|
||||||
|
raise Exception("could not listen on any of the given interfaces")
|
||||||
|
|
||||||
def _listen(self, ip, port):
|
def _listen(self, ip, port):
|
||||||
srv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
srv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||||
@@ -52,7 +64,7 @@ class TcpSrv(object):
|
|||||||
srv.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
srv.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||||
try:
|
try:
|
||||||
srv.bind((ip, port))
|
srv.bind((ip, port))
|
||||||
return srv
|
self.srv.append(srv)
|
||||||
except (OSError, socket.error) as ex:
|
except (OSError, socket.error) as ex:
|
||||||
if ex.errno in [98, 48]:
|
if ex.errno in [98, 48]:
|
||||||
e = "\033[1;31mport {} is busy on interface {}\033[0m".format(port, ip)
|
e = "\033[1;31mport {} is busy on interface {}\033[0m".format(port, ip)
|
||||||
|
|||||||
@@ -6,9 +6,10 @@ import os
|
|||||||
import time
|
import time
|
||||||
import threading
|
import threading
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
from operator import itemgetter
|
||||||
|
|
||||||
from .__init__ import ANYWIN, unicode
|
from .__init__ import ANYWIN, unicode
|
||||||
from .util import absreal, s3dec, Pebkac, min_ex, gen_filekey
|
from .util import absreal, s3dec, Pebkac, min_ex, gen_filekey, quotep
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .up2k import up2k_wark_from_hashlist
|
from .up2k import up2k_wark_from_hashlist
|
||||||
|
|
||||||
@@ -253,21 +254,23 @@ class U2idx(object):
|
|||||||
if rd.startswith("//") or fn.startswith("//"):
|
if rd.startswith("//") or fn.startswith("//"):
|
||||||
rd, fn = s3dec(rd, fn)
|
rd, fn = s3dec(rd, fn)
|
||||||
|
|
||||||
if fk:
|
if not fk:
|
||||||
|
suf = ""
|
||||||
|
else:
|
||||||
try:
|
try:
|
||||||
ap = absreal(os.path.join(ptop, rd, fn))
|
ap = absreal(os.path.join(ptop, rd, fn))
|
||||||
inf = bos.stat(ap)
|
inf = bos.stat(ap)
|
||||||
except:
|
except:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
fn += (
|
suf = (
|
||||||
"?k="
|
"?k="
|
||||||
+ gen_filekey(
|
+ gen_filekey(
|
||||||
self.args.fk_salt, ap, sz, 0 if ANYWIN else inf.st_ino
|
self.args.fk_salt, ap, sz, 0 if ANYWIN else inf.st_ino
|
||||||
)[:fk]
|
)[:fk]
|
||||||
)
|
)
|
||||||
|
|
||||||
rp = "/".join([x for x in [vtop, rd, fn] if x])
|
rp = quotep("/".join([x for x in [vtop, rd, fn] if x])) + suf
|
||||||
sret.append({"ts": int(ts), "sz": sz, "rp": rp, "w": w[:16]})
|
sret.append({"ts": int(ts), "sz": sz, "rp": rp, "w": w[:16]})
|
||||||
|
|
||||||
for hit in sret:
|
for hit in sret:
|
||||||
@@ -290,9 +293,13 @@ class U2idx(object):
|
|||||||
# undupe hits from multiple metadata keys
|
# undupe hits from multiple metadata keys
|
||||||
if len(ret) > 1:
|
if len(ret) > 1:
|
||||||
ret = [ret[0]] + [
|
ret = [ret[0]] + [
|
||||||
y for x, y in zip(ret[:-1], ret[1:]) if x["rp"] != y["rp"]
|
y
|
||||||
|
for x, y in zip(ret[:-1], ret[1:])
|
||||||
|
if x["rp"].split("?")[0] != y["rp"].split("?")[0]
|
||||||
]
|
]
|
||||||
|
|
||||||
|
ret.sort(key=itemgetter("rp"))
|
||||||
|
|
||||||
return ret, list(taglist.keys())
|
return ret, list(taglist.keys())
|
||||||
|
|
||||||
def terminator(self, identifier, done_flag):
|
def terminator(self, identifier, done_flag):
|
||||||
|
|||||||
@@ -27,7 +27,10 @@ from .util import (
|
|||||||
sanitize_fn,
|
sanitize_fn,
|
||||||
ren_open,
|
ren_open,
|
||||||
atomic_move,
|
atomic_move,
|
||||||
|
quotep,
|
||||||
vsplit,
|
vsplit,
|
||||||
|
w8b64enc,
|
||||||
|
w8b64dec,
|
||||||
s3enc,
|
s3enc,
|
||||||
s3dec,
|
s3dec,
|
||||||
rmdirs,
|
rmdirs,
|
||||||
@@ -66,6 +69,7 @@ class Up2k(object):
|
|||||||
self.n_tagq = 0
|
self.n_tagq = 0
|
||||||
self.volstate = {}
|
self.volstate = {}
|
||||||
self.need_rescan = {}
|
self.need_rescan = {}
|
||||||
|
self.dupesched = {}
|
||||||
self.registry = {}
|
self.registry = {}
|
||||||
self.entags = {}
|
self.entags = {}
|
||||||
self.flags = {}
|
self.flags = {}
|
||||||
@@ -462,7 +466,8 @@ class Up2k(object):
|
|||||||
def _build_file_index(self, vol, all_vols):
|
def _build_file_index(self, vol, all_vols):
|
||||||
do_vac = False
|
do_vac = False
|
||||||
top = vol.realpath
|
top = vol.realpath
|
||||||
nohash = "dhash" in vol.flags
|
rei = vol.flags.get("noidx")
|
||||||
|
reh = vol.flags.get("nohash")
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
cur, _ = self.register_vpath(top, vol.flags)
|
cur, _ = self.register_vpath(top, vol.flags)
|
||||||
|
|
||||||
@@ -477,37 +482,54 @@ class Up2k(object):
|
|||||||
if WINDOWS:
|
if WINDOWS:
|
||||||
excl = [x.replace("/", "\\") for x in excl]
|
excl = [x.replace("/", "\\") for x in excl]
|
||||||
|
|
||||||
n_add = self._build_dir(dbw, top, set(excl), top, nohash, [])
|
n_add = n_rm = 0
|
||||||
|
try:
|
||||||
|
n_add = self._build_dir(dbw, top, set(excl), top, rei, reh, [])
|
||||||
n_rm = self._drop_lost(dbw[0], top)
|
n_rm = self._drop_lost(dbw[0], top)
|
||||||
|
except:
|
||||||
|
m = "failed to index volume [{}]:\n{}"
|
||||||
|
self.log(m.format(top, min_ex()), c=1)
|
||||||
|
|
||||||
if dbw[1]:
|
if dbw[1]:
|
||||||
self.log("commit {} new files".format(dbw[1]))
|
self.log("commit {} new files".format(dbw[1]))
|
||||||
|
|
||||||
dbw[0].connection.commit()
|
dbw[0].connection.commit()
|
||||||
|
|
||||||
return True, n_add or n_rm or do_vac
|
return True, n_add or n_rm or do_vac
|
||||||
|
|
||||||
def _build_dir(self, dbw, top, excl, cdir, nohash, seen):
|
def _build_dir(self, dbw, top, excl, cdir, rei, reh, seen):
|
||||||
rcdir = absreal(cdir) # a bit expensive but worth
|
rcdir = absreal(cdir) # a bit expensive but worth
|
||||||
if rcdir in seen:
|
if rcdir in seen:
|
||||||
m = "bailing from symlink loop,\n prev: {}\n curr: {}\n from: {}"
|
m = "bailing from symlink loop,\n prev: {}\n curr: {}\n from: {}"
|
||||||
self.log(m.format(seen[-1], rcdir, cdir), 3)
|
self.log(m.format(seen[-1], rcdir, cdir), 3)
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
seen = seen + [cdir]
|
seen = seen + [rcdir]
|
||||||
self.pp.msg = "a{} {}".format(self.pp.n, cdir)
|
self.pp.msg = "a{} {}".format(self.pp.n, cdir)
|
||||||
histpath = self.asrv.vfs.histtab[top]
|
histpath = self.asrv.vfs.histtab[top]
|
||||||
ret = 0
|
ret = 0
|
||||||
|
seen_files = {}
|
||||||
g = statdir(self.log_func, not self.args.no_scandir, False, cdir)
|
g = statdir(self.log_func, not self.args.no_scandir, False, cdir)
|
||||||
for iname, inf in sorted(g):
|
for iname, inf in sorted(g):
|
||||||
abspath = os.path.join(cdir, iname)
|
abspath = os.path.join(cdir, iname)
|
||||||
|
if rei and rei.search(abspath):
|
||||||
|
continue
|
||||||
|
|
||||||
|
nohash = reh.search(abspath) if reh else False
|
||||||
lmod = int(inf.st_mtime)
|
lmod = int(inf.st_mtime)
|
||||||
sz = inf.st_size
|
sz = inf.st_size
|
||||||
if stat.S_ISDIR(inf.st_mode):
|
if stat.S_ISDIR(inf.st_mode):
|
||||||
if abspath in excl or abspath == histpath:
|
if abspath in excl or abspath == histpath:
|
||||||
continue
|
continue
|
||||||
# self.log(" dir: {}".format(abspath))
|
# self.log(" dir: {}".format(abspath))
|
||||||
ret += self._build_dir(dbw, top, excl, abspath, nohash, seen)
|
try:
|
||||||
|
ret += self._build_dir(dbw, top, excl, abspath, rei, reh, seen)
|
||||||
|
except:
|
||||||
|
m = "failed to index subdir [{}]:\n{}"
|
||||||
|
self.log(m.format(abspath, min_ex()), c=1)
|
||||||
else:
|
else:
|
||||||
# self.log("file: {}".format(abspath))
|
# self.log("file: {}".format(abspath))
|
||||||
|
seen_files[iname] = 1
|
||||||
rp = abspath[len(top) + 1 :]
|
rp = abspath[len(top) + 1 :]
|
||||||
if WINDOWS:
|
if WINDOWS:
|
||||||
rp = rp.replace("\\", "/").strip("/")
|
rp = rp.replace("\\", "/").strip("/")
|
||||||
@@ -566,34 +588,65 @@ class Up2k(object):
|
|||||||
dbw[0].connection.commit()
|
dbw[0].connection.commit()
|
||||||
dbw[1] = 0
|
dbw[1] = 0
|
||||||
dbw[2] = time.time()
|
dbw[2] = time.time()
|
||||||
|
|
||||||
|
# drop missing files
|
||||||
|
rd = cdir[len(top) + 1 :].strip("/")
|
||||||
|
if WINDOWS:
|
||||||
|
rd = rd.replace("\\", "/").strip("/")
|
||||||
|
|
||||||
|
q = "select fn from up where rd = ?"
|
||||||
|
try:
|
||||||
|
c = dbw[0].execute(q, (rd,))
|
||||||
|
except:
|
||||||
|
c = dbw[0].execute(q, ("//" + w8b64enc(rd),))
|
||||||
|
|
||||||
|
hits = [w8b64dec(x[2:]) if x.startswith("//") else x for (x,) in c]
|
||||||
|
rm_files = [x for x in hits if x not in seen_files]
|
||||||
|
n_rm = len(rm_files)
|
||||||
|
for fn in rm_files:
|
||||||
|
self.db_rm(dbw[0], rd, fn)
|
||||||
|
|
||||||
|
if n_rm:
|
||||||
|
self.log("forgot {} deleted files".format(n_rm))
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
def _drop_lost(self, cur, top):
|
def _drop_lost(self, cur, top):
|
||||||
rm = []
|
rm = []
|
||||||
|
n_rm = 0
|
||||||
nchecked = 0
|
nchecked = 0
|
||||||
nfiles = next(cur.execute("select count(w) from up"))[0]
|
# `_build_dir` did all the files, now do dirs
|
||||||
c = cur.execute("select rd, fn from up")
|
ndirs = next(cur.execute("select count(distinct rd) from up"))[0]
|
||||||
for drd, dfn in c:
|
c = cur.execute("select distinct rd from up order by rd desc")
|
||||||
|
for (drd,) in c:
|
||||||
nchecked += 1
|
nchecked += 1
|
||||||
if drd.startswith("//") or dfn.startswith("//"):
|
if drd.startswith("//"):
|
||||||
drd, dfn = s3dec(drd, dfn)
|
rd = w8b64dec(drd[2:])
|
||||||
|
else:
|
||||||
|
rd = drd
|
||||||
|
|
||||||
abspath = os.path.join(top, drd, dfn)
|
abspath = os.path.join(top, rd)
|
||||||
# almost zero overhead dw
|
self.pp.msg = "b{} {}".format(ndirs - nchecked, abspath)
|
||||||
self.pp.msg = "b{} {}".format(nfiles - nchecked, abspath)
|
|
||||||
try:
|
try:
|
||||||
if not bos.path.exists(abspath):
|
if os.path.isdir(abspath):
|
||||||
rm.append([drd, dfn])
|
continue
|
||||||
except Exception as ex:
|
except:
|
||||||
self.log("stat-rm: {} @ [{}]".format(repr(ex), abspath))
|
pass
|
||||||
|
|
||||||
if rm:
|
rm.append(drd)
|
||||||
self.log("forgetting {} deleted files".format(len(rm)))
|
|
||||||
for rd, fn in rm:
|
|
||||||
# self.log("{} / {}".format(rd, fn))
|
|
||||||
self.db_rm(cur, rd, fn)
|
|
||||||
|
|
||||||
return len(rm)
|
if not rm:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
q = "select count(w) from up where rd = ?"
|
||||||
|
for rd in rm:
|
||||||
|
n_rm += next(cur.execute(q, (rd,)))[0]
|
||||||
|
|
||||||
|
self.log("forgetting {} deleted dirs, {} files".format(len(rm), n_rm))
|
||||||
|
for rd in rm:
|
||||||
|
cur.execute("delete from up where rd = ?", (rd,))
|
||||||
|
|
||||||
|
return n_rm
|
||||||
|
|
||||||
def _build_tags_index(self, vol):
|
def _build_tags_index(self, vol):
|
||||||
ptop = vol.realpath
|
ptop = vol.realpath
|
||||||
@@ -940,7 +993,12 @@ class Up2k(object):
|
|||||||
|
|
||||||
def _tag_file(self, write_cur, entags, wark, abspath, tags=None):
|
def _tag_file(self, write_cur, entags, wark, abspath, tags=None):
|
||||||
if tags is None:
|
if tags is None:
|
||||||
|
try:
|
||||||
tags = self.mtag.get(abspath)
|
tags = self.mtag.get(abspath)
|
||||||
|
except Exception as ex:
|
||||||
|
msg = "failed to read tags from {}:\n{}"
|
||||||
|
self.log(msg.format(abspath, ex), c=3)
|
||||||
|
return
|
||||||
|
|
||||||
if entags:
|
if entags:
|
||||||
tags = {k: v for k, v in tags.items() if k in entags}
|
tags = {k: v for k, v in tags.items() if k in entags}
|
||||||
@@ -1112,9 +1170,18 @@ class Up2k(object):
|
|||||||
if dp_dir.startswith("//") or dp_fn.startswith("//"):
|
if dp_dir.startswith("//") or dp_fn.startswith("//"):
|
||||||
dp_dir, dp_fn = s3dec(dp_dir, dp_fn)
|
dp_dir, dp_fn = s3dec(dp_dir, dp_fn)
|
||||||
|
|
||||||
|
if job and (dp_dir != cj["prel"] or dp_fn != cj["name"]):
|
||||||
|
continue
|
||||||
|
|
||||||
dp_abs = "/".join([cj["ptop"], dp_dir, dp_fn])
|
dp_abs = "/".join([cj["ptop"], dp_dir, dp_fn])
|
||||||
# relying on path.exists to return false on broken symlinks
|
# relying on this to fail on broken symlinks
|
||||||
if bos.path.exists(dp_abs):
|
try:
|
||||||
|
sz = bos.path.getsize(dp_abs)
|
||||||
|
except:
|
||||||
|
sz = 0
|
||||||
|
|
||||||
|
if sz:
|
||||||
|
# self.log("--- " + wark + " " + dp_abs + " found file", 4)
|
||||||
job = {
|
job = {
|
||||||
"name": dp_fn,
|
"name": dp_fn,
|
||||||
"prel": dp_dir,
|
"prel": dp_dir,
|
||||||
@@ -1127,9 +1194,9 @@ class Up2k(object):
|
|||||||
"hash": [],
|
"hash": [],
|
||||||
"need": [],
|
"need": [],
|
||||||
}
|
}
|
||||||
break
|
|
||||||
|
|
||||||
if job and wark in reg:
|
if job and wark in reg:
|
||||||
|
# self.log("pop " + wark + " " + job["name"] + " handle_json db", 4)
|
||||||
del reg[wark]
|
del reg[wark]
|
||||||
|
|
||||||
if job or wark in reg:
|
if job or wark in reg:
|
||||||
@@ -1157,11 +1224,20 @@ class Up2k(object):
|
|||||||
if job["need"]:
|
if job["need"]:
|
||||||
self.log("unfinished:\n {0}\n {1}".format(src, dst))
|
self.log("unfinished:\n {0}\n {1}".format(src, dst))
|
||||||
err = "partial upload exists at a different location; please resume uploading here instead:\n"
|
err = "partial upload exists at a different location; please resume uploading here instead:\n"
|
||||||
err += "/" + vsrc + " "
|
err += "/" + quotep(vsrc) + " "
|
||||||
|
|
||||||
|
dupe = [cj["prel"], cj["name"]]
|
||||||
|
try:
|
||||||
|
self.dupesched[src].append(dupe)
|
||||||
|
except:
|
||||||
|
self.dupesched[src] = [dupe]
|
||||||
|
|
||||||
raise Pebkac(400, err)
|
raise Pebkac(400, err)
|
||||||
|
|
||||||
elif "nodupe" in self.flags[job["ptop"]]:
|
elif "nodupe" in self.flags[job["ptop"]]:
|
||||||
self.log("dupe-reject:\n {0}\n {1}".format(src, dst))
|
self.log("dupe-reject:\n {0}\n {1}".format(src, dst))
|
||||||
err = "upload rejected, file already exists:\n/" + vsrc + " "
|
err = "upload rejected, file already exists:\n"
|
||||||
|
err += "/" + quotep(vsrc) + " "
|
||||||
raise Pebkac(400, err)
|
raise Pebkac(400, err)
|
||||||
else:
|
else:
|
||||||
# symlink to the client-provided name,
|
# symlink to the client-provided name,
|
||||||
@@ -1242,7 +1318,7 @@ class Up2k(object):
|
|||||||
|
|
||||||
# TODO broker which avoid this race and
|
# TODO broker which avoid this race and
|
||||||
# provides a new filename if taken (same as bup)
|
# provides a new filename if taken (same as bup)
|
||||||
suffix = ".{:.6f}-{}".format(ts, ip)
|
suffix = "-{:.6f}-{}".format(ts, ip.replace(":", "."))
|
||||||
with ren_open(fname, "wb", fdir=fdir, suffix=suffix) as f:
|
with ren_open(fname, "wb", fdir=fdir, suffix=suffix) as f:
|
||||||
return f["orz"][1]
|
return f["orz"][1]
|
||||||
|
|
||||||
@@ -1254,6 +1330,9 @@ class Up2k(object):
|
|||||||
return
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
if self.args.no_symlink:
|
||||||
|
raise Exception("disabled in config")
|
||||||
|
|
||||||
lsrc = src
|
lsrc = src
|
||||||
ldst = dst
|
ldst = dst
|
||||||
fs1 = bos.stat(os.path.dirname(src)).st_dev
|
fs1 = bos.stat(os.path.dirname(src)).st_dev
|
||||||
@@ -1334,6 +1413,26 @@ class Up2k(object):
|
|||||||
# del self.registry[ptop][wark]
|
# del self.registry[ptop][wark]
|
||||||
return ret, dst
|
return ret, dst
|
||||||
|
|
||||||
|
# windows cant rename open files
|
||||||
|
if not ANYWIN or src == dst:
|
||||||
|
self._finish_upload(ptop, wark)
|
||||||
|
|
||||||
|
return ret, dst
|
||||||
|
|
||||||
|
def finish_upload(self, ptop, wark):
|
||||||
|
with self.mutex:
|
||||||
|
self._finish_upload(ptop, wark)
|
||||||
|
|
||||||
|
def _finish_upload(self, ptop, wark):
|
||||||
|
try:
|
||||||
|
job = self.registry[ptop][wark]
|
||||||
|
pdir = os.path.join(job["ptop"], job["prel"])
|
||||||
|
src = os.path.join(pdir, job["tnam"])
|
||||||
|
dst = os.path.join(pdir, job["name"])
|
||||||
|
except Exception as ex:
|
||||||
|
return "finish_upload, wark, " + repr(ex)
|
||||||
|
|
||||||
|
# self.log("--- " + wark + " " + dst + " finish_upload atomic " + dst, 4)
|
||||||
atomic_move(src, dst)
|
atomic_move(src, dst)
|
||||||
|
|
||||||
if ANYWIN:
|
if ANYWIN:
|
||||||
@@ -1343,10 +1442,27 @@ class Up2k(object):
|
|||||||
a = [job[x] for x in "ptop wark prel name lmod size addr".split()]
|
a = [job[x] for x in "ptop wark prel name lmod size addr".split()]
|
||||||
a += [job.get("at") or time.time()]
|
a += [job.get("at") or time.time()]
|
||||||
if self.idx_wark(*a):
|
if self.idx_wark(*a):
|
||||||
|
# self.log("pop " + wark + " " + dst + " finish_upload idx_wark", 4)
|
||||||
del self.registry[ptop][wark]
|
del self.registry[ptop][wark]
|
||||||
# in-memory registry is reserved for unfinished uploads
|
# in-memory registry is reserved for unfinished uploads
|
||||||
|
|
||||||
return ret, dst
|
dupes = self.dupesched.pop(dst, [])
|
||||||
|
if not dupes:
|
||||||
|
return
|
||||||
|
|
||||||
|
cur = self.cur.get(ptop)
|
||||||
|
for rd, fn in dupes:
|
||||||
|
d2 = os.path.join(ptop, rd, fn)
|
||||||
|
if os.path.exists(d2):
|
||||||
|
continue
|
||||||
|
|
||||||
|
self._symlink(dst, d2)
|
||||||
|
if cur:
|
||||||
|
self.db_rm(cur, rd, fn)
|
||||||
|
self.db_add(cur, wark, rd, fn, *a[-4:])
|
||||||
|
|
||||||
|
if cur:
|
||||||
|
cur.connection.commit()
|
||||||
|
|
||||||
def idx_wark(self, ptop, wark, rd, fn, lmod, sz, ip, at):
|
def idx_wark(self, ptop, wark, rd, fn, lmod, sz, ip, at):
|
||||||
cur = self.cur.get(ptop)
|
cur = self.cur.get(ptop)
|
||||||
@@ -1402,6 +1518,7 @@ class Up2k(object):
|
|||||||
try:
|
try:
|
||||||
permsets = [[True, False, False, True]]
|
permsets = [[True, False, False, True]]
|
||||||
vn, rem = self.asrv.vfs.get(vpath, uname, *permsets[0])
|
vn, rem = self.asrv.vfs.get(vpath, uname, *permsets[0])
|
||||||
|
vn, rem = vn.get_dbv(rem)
|
||||||
unpost = False
|
unpost = False
|
||||||
except:
|
except:
|
||||||
# unpost with missing permissions? try read+write and verify with db
|
# unpost with missing permissions? try read+write and verify with db
|
||||||
@@ -1411,6 +1528,7 @@ class Up2k(object):
|
|||||||
unpost = True
|
unpost = True
|
||||||
permsets = [[True, True]]
|
permsets = [[True, True]]
|
||||||
vn, rem = self.asrv.vfs.get(vpath, uname, *permsets[0])
|
vn, rem = self.asrv.vfs.get(vpath, uname, *permsets[0])
|
||||||
|
vn, rem = vn.get_dbv(rem)
|
||||||
_, _, _, _, dip, dat = self._find_from_vpath(vn.realpath, rem)
|
_, _, _, _, dip, dat = self._find_from_vpath(vn.realpath, rem)
|
||||||
|
|
||||||
m = "you cannot delete this: "
|
m = "you cannot delete this: "
|
||||||
@@ -1623,7 +1741,7 @@ class Up2k(object):
|
|||||||
wark = [
|
wark = [
|
||||||
x
|
x
|
||||||
for x, y in reg.items()
|
for x, y in reg.items()
|
||||||
if fn in [y["name"], y.get("tnam")] and y["prel"] == vrem
|
if sfn in [y["name"], y.get("tnam")] and y["prel"] == vrem
|
||||||
]
|
]
|
||||||
|
|
||||||
if wark and wark in reg:
|
if wark and wark in reg:
|
||||||
@@ -1706,7 +1824,13 @@ class Up2k(object):
|
|||||||
except:
|
except:
|
||||||
cj["lmod"] = int(time.time())
|
cj["lmod"] = int(time.time())
|
||||||
|
|
||||||
|
if cj["hash"]:
|
||||||
wark = up2k_wark_from_hashlist(self.salt, cj["size"], cj["hash"])
|
wark = up2k_wark_from_hashlist(self.salt, cj["size"], cj["hash"])
|
||||||
|
else:
|
||||||
|
wark = up2k_wark_from_metadata(
|
||||||
|
self.salt, cj["size"], cj["lmod"], cj["prel"], cj["name"]
|
||||||
|
)
|
||||||
|
|
||||||
return wark
|
return wark
|
||||||
|
|
||||||
def _hashlist_from_file(self, path):
|
def _hashlist_from_file(self, path):
|
||||||
@@ -1749,9 +1873,12 @@ class Up2k(object):
|
|||||||
|
|
||||||
if self.args.nw:
|
if self.args.nw:
|
||||||
job["tnam"] = tnam
|
job["tnam"] = tnam
|
||||||
|
if not job["hash"]:
|
||||||
|
del self.registry[job["ptop"]][job["wark"]]
|
||||||
return
|
return
|
||||||
|
|
||||||
suffix = ".{:.6f}-{}".format(job["t0"], job["addr"])
|
dip = job["addr"].replace(":", ".")
|
||||||
|
suffix = "-{:.6f}-{}".format(job["t0"], dip)
|
||||||
with ren_open(tnam, "wb", fdir=pdir, suffix=suffix) as f:
|
with ren_open(tnam, "wb", fdir=pdir, suffix=suffix) as f:
|
||||||
f, job["tnam"] = f["orz"]
|
f, job["tnam"] = f["orz"]
|
||||||
if (
|
if (
|
||||||
@@ -1765,9 +1892,13 @@ class Up2k(object):
|
|||||||
except:
|
except:
|
||||||
self.log("could not sparse [{}]".format(fp), 3)
|
self.log("could not sparse [{}]".format(fp), 3)
|
||||||
|
|
||||||
|
if job["hash"]:
|
||||||
f.seek(job["size"] - 1)
|
f.seek(job["size"] - 1)
|
||||||
f.write(b"e")
|
f.write(b"e")
|
||||||
|
|
||||||
|
if not job["hash"]:
|
||||||
|
self._finish_upload(job["ptop"], job["wark"])
|
||||||
|
|
||||||
def _lastmodder(self):
|
def _lastmodder(self):
|
||||||
while True:
|
while True:
|
||||||
ready = []
|
ready = []
|
||||||
@@ -1864,11 +1995,16 @@ class Up2k(object):
|
|||||||
|
|
||||||
# self.log("\n " + repr([ptop, rd, fn]))
|
# self.log("\n " + repr([ptop, rd, fn]))
|
||||||
abspath = os.path.join(ptop, rd, fn)
|
abspath = os.path.join(ptop, rd, fn)
|
||||||
|
try:
|
||||||
tags = self.mtag.get(abspath)
|
tags = self.mtag.get(abspath)
|
||||||
ntags1 = len(tags)
|
ntags1 = len(tags)
|
||||||
parsers = self._get_parsers(ptop, tags, abspath)
|
parsers = self._get_parsers(ptop, tags, abspath)
|
||||||
if parsers:
|
if parsers:
|
||||||
tags.update(self.mtag.get_bin(parsers, abspath))
|
tags.update(self.mtag.get_bin(parsers, abspath))
|
||||||
|
except Exception as ex:
|
||||||
|
msg = "failed to read tags from {}:\n{}"
|
||||||
|
self.log(msg.format(abspath, ex), c=3)
|
||||||
|
continue
|
||||||
|
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
cur = self.cur[ptop]
|
cur = self.cur[ptop]
|
||||||
|
|||||||
@@ -251,6 +251,55 @@ class _LUnrecv(object):
|
|||||||
Unrecv = _Unrecv
|
Unrecv = _Unrecv
|
||||||
|
|
||||||
|
|
||||||
|
class FHC(object):
|
||||||
|
class CE(object):
|
||||||
|
def __init__(self, fh):
|
||||||
|
self.ts = 0
|
||||||
|
self.fhs = [fh]
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.cache = {}
|
||||||
|
|
||||||
|
def close(self, path):
|
||||||
|
try:
|
||||||
|
ce = self.cache[path]
|
||||||
|
except:
|
||||||
|
return
|
||||||
|
|
||||||
|
for fh in ce.fhs:
|
||||||
|
fh.close()
|
||||||
|
|
||||||
|
del self.cache[path]
|
||||||
|
|
||||||
|
def clean(self):
|
||||||
|
if not self.cache:
|
||||||
|
return
|
||||||
|
|
||||||
|
keep = {}
|
||||||
|
now = time.time()
|
||||||
|
for path, ce in self.cache.items():
|
||||||
|
if now < ce.ts + 5:
|
||||||
|
keep[path] = ce
|
||||||
|
else:
|
||||||
|
for fh in ce.fhs:
|
||||||
|
fh.close()
|
||||||
|
|
||||||
|
self.cache = keep
|
||||||
|
|
||||||
|
def pop(self, path):
|
||||||
|
return self.cache[path].fhs.pop()
|
||||||
|
|
||||||
|
def put(self, path, fh):
|
||||||
|
try:
|
||||||
|
ce = self.cache[path]
|
||||||
|
ce.fhs.append(fh)
|
||||||
|
except:
|
||||||
|
ce = self.CE(fh)
|
||||||
|
self.cache[path] = ce
|
||||||
|
|
||||||
|
ce.ts = time.time()
|
||||||
|
|
||||||
|
|
||||||
class ProgressPrinter(threading.Thread):
|
class ProgressPrinter(threading.Thread):
|
||||||
"""
|
"""
|
||||||
periodically print progress info without linefeeds
|
periodically print progress info without linefeeds
|
||||||
@@ -410,6 +459,10 @@ def log_thrs(log, ival, name):
|
|||||||
def vol_san(vols, txt):
|
def vol_san(vols, txt):
|
||||||
for vol in vols:
|
for vol in vols:
|
||||||
txt = txt.replace(vol.realpath.encode("utf-8"), vol.vpath.encode("utf-8"))
|
txt = txt.replace(vol.realpath.encode("utf-8"), vol.vpath.encode("utf-8"))
|
||||||
|
txt = txt.replace(
|
||||||
|
vol.realpath.encode("utf-8").replace(b"\\", b"\\\\"),
|
||||||
|
vol.vpath.encode("utf-8"),
|
||||||
|
)
|
||||||
|
|
||||||
return txt
|
return txt
|
||||||
|
|
||||||
@@ -425,11 +478,12 @@ def min_ex():
|
|||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def ren_open(fname, *args, **kwargs):
|
def ren_open(fname, *args, **kwargs):
|
||||||
|
fun = kwargs.pop("fun", open)
|
||||||
fdir = kwargs.pop("fdir", None)
|
fdir = kwargs.pop("fdir", None)
|
||||||
suffix = kwargs.pop("suffix", None)
|
suffix = kwargs.pop("suffix", None)
|
||||||
|
|
||||||
if fname == os.devnull:
|
if fname == os.devnull:
|
||||||
with open(fname, *args, **kwargs) as f:
|
with fun(fname, *args, **kwargs) as f:
|
||||||
yield {"orz": [f, fname]}
|
yield {"orz": [f, fname]}
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -463,7 +517,7 @@ def ren_open(fname, *args, **kwargs):
|
|||||||
fname += suffix
|
fname += suffix
|
||||||
ext += suffix
|
ext += suffix
|
||||||
|
|
||||||
with open(fsenc(fpath), *args, **kwargs) as f:
|
with fun(fsenc(fpath), *args, **kwargs) as f:
|
||||||
if b64:
|
if b64:
|
||||||
fp2 = "fn-trunc.{}.txt".format(b64)
|
fp2 = "fn-trunc.{}.txt".format(b64)
|
||||||
fp2 = os.path.join(fdir, fp2)
|
fp2 = os.path.join(fdir, fp2)
|
||||||
@@ -708,7 +762,7 @@ class MultipartParser(object):
|
|||||||
def get_boundary(headers):
|
def get_boundary(headers):
|
||||||
# boundaries contain a-z A-Z 0-9 ' ( ) + _ , - . / : = ?
|
# boundaries contain a-z A-Z 0-9 ' ( ) + _ , - . / : = ?
|
||||||
# (whitespace allowed except as the last char)
|
# (whitespace allowed except as the last char)
|
||||||
ptn = r"^multipart/form-data; *(.*; *)?boundary=([^;]+)"
|
ptn = r"^multipart/form-data *; *(.*; *)?boundary=([^;]+)"
|
||||||
ct = headers["content-type"]
|
ct = headers["content-type"]
|
||||||
m = re.match(ptn, ct, re.IGNORECASE)
|
m = re.match(ptn, ct, re.IGNORECASE)
|
||||||
if not m:
|
if not m:
|
||||||
@@ -1137,6 +1191,9 @@ def sendfile_kern(lower, upper, f, s):
|
|||||||
|
|
||||||
|
|
||||||
def statdir(logger, scandir, lstat, top):
|
def statdir(logger, scandir, lstat, top):
|
||||||
|
if lstat and ANYWIN:
|
||||||
|
lstat = False
|
||||||
|
|
||||||
if lstat and not os.supports_follow_symlinks:
|
if lstat and not os.supports_follow_symlinks:
|
||||||
scandir = False
|
scandir = False
|
||||||
|
|
||||||
|
|||||||
@@ -165,6 +165,7 @@ a, #files tbody div a:last-child {
|
|||||||
.logue {
|
.logue {
|
||||||
padding: .2em 1.5em;
|
padding: .2em 1.5em;
|
||||||
}
|
}
|
||||||
|
.logue.hidden,
|
||||||
.logue:empty {
|
.logue:empty {
|
||||||
display: none;
|
display: none;
|
||||||
}
|
}
|
||||||
@@ -288,40 +289,6 @@ html.light #ggrid>a.sel {
|
|||||||
#files tr:focus+tr td {
|
#files tr:focus+tr td {
|
||||||
border-top: 1px solid transparent;
|
border-top: 1px solid transparent;
|
||||||
}
|
}
|
||||||
#blocked {
|
|
||||||
position: fixed;
|
|
||||||
top: 0;
|
|
||||||
left: 0;
|
|
||||||
width: 100%;
|
|
||||||
height: 100%;
|
|
||||||
background: #333;
|
|
||||||
font-size: 2.5em;
|
|
||||||
z-index: 99;
|
|
||||||
}
|
|
||||||
#blk_play,
|
|
||||||
#blk_abrt {
|
|
||||||
position: fixed;
|
|
||||||
display: table;
|
|
||||||
width: 80%;
|
|
||||||
}
|
|
||||||
#blk_play {
|
|
||||||
height: 60%;
|
|
||||||
left: 10%;
|
|
||||||
top: 5%;
|
|
||||||
}
|
|
||||||
#blk_abrt {
|
|
||||||
height: 25%;
|
|
||||||
left: 10%;
|
|
||||||
bottom: 5%;
|
|
||||||
}
|
|
||||||
#blk_play a,
|
|
||||||
#blk_abrt a {
|
|
||||||
display: table-cell;
|
|
||||||
vertical-align: middle;
|
|
||||||
text-align: center;
|
|
||||||
background: #444;
|
|
||||||
border-radius: 2em;
|
|
||||||
}
|
|
||||||
#widget {
|
#widget {
|
||||||
position: fixed;
|
position: fixed;
|
||||||
font-size: 1.4em;
|
font-size: 1.4em;
|
||||||
@@ -628,6 +595,9 @@ input.eq_gain {
|
|||||||
margin-top: .5em;
|
margin-top: .5em;
|
||||||
padding: 1.3em .3em;
|
padding: 1.3em .3em;
|
||||||
}
|
}
|
||||||
|
#ico1 {
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@@ -870,8 +840,8 @@ html.light #tree.nowrap #treeul a+a:hover {
|
|||||||
.opwide>div {
|
.opwide>div {
|
||||||
display: inline-block;
|
display: inline-block;
|
||||||
vertical-align: top;
|
vertical-align: top;
|
||||||
border-left: .2em solid #4c4c4c;
|
border-left: .4em solid #4c4c4c;
|
||||||
margin-left: .5em;
|
margin: .7em 0 .7em .5em;
|
||||||
padding-left: .5em;
|
padding-left: .5em;
|
||||||
}
|
}
|
||||||
.opwide>div.fill {
|
.opwide>div.fill {
|
||||||
@@ -880,6 +850,10 @@ html.light #tree.nowrap #treeul a+a:hover {
|
|||||||
.opwide>div>div>a {
|
.opwide>div>div>a {
|
||||||
line-height: 2em;
|
line-height: 2em;
|
||||||
}
|
}
|
||||||
|
.opwide>div>h3 {
|
||||||
|
margin: 0 .4em;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
#op_cfg>div>div>span {
|
#op_cfg>div>div>span {
|
||||||
display: inline-block;
|
display: inline-block;
|
||||||
padding: .2em .4em;
|
padding: .2em .4em;
|
||||||
@@ -1071,7 +1045,8 @@ a.btn,
|
|||||||
#rui label,
|
#rui label,
|
||||||
#modal-ok,
|
#modal-ok,
|
||||||
#modal-ng,
|
#modal-ng,
|
||||||
#ops {
|
#ops,
|
||||||
|
#ico1 {
|
||||||
-webkit-user-select: none;
|
-webkit-user-select: none;
|
||||||
-moz-user-select: none;
|
-moz-user-select: none;
|
||||||
-ms-user-select: none;
|
-ms-user-select: none;
|
||||||
@@ -1231,14 +1206,6 @@ html.light tr.play a {
|
|||||||
html.light #files th:hover .cfg {
|
html.light #files th:hover .cfg {
|
||||||
background: #ccc;
|
background: #ccc;
|
||||||
}
|
}
|
||||||
html.light #blocked {
|
|
||||||
background: #eee;
|
|
||||||
}
|
|
||||||
html.light #blk_play a,
|
|
||||||
html.light #blk_abrt a {
|
|
||||||
background: #fff;
|
|
||||||
box-shadow: 0 .2em .4em #ddd;
|
|
||||||
}
|
|
||||||
html.light #widget a {
|
html.light #widget a {
|
||||||
color: #06a;
|
color: #06a;
|
||||||
}
|
}
|
||||||
@@ -1602,7 +1569,7 @@ html.light #bbox-overlay figcaption a {
|
|||||||
border-radius: .5em;
|
border-radius: .5em;
|
||||||
border-width: 1vw;
|
border-width: 1vw;
|
||||||
color: #fff;
|
color: #fff;
|
||||||
transition: all 0.2s;
|
transition: all 0.12s;
|
||||||
}
|
}
|
||||||
#drops .dropdesc.hl.ok {
|
#drops .dropdesc.hl.ok {
|
||||||
border-color: #fff;
|
border-color: #fff;
|
||||||
@@ -1623,6 +1590,16 @@ html.light #bbox-overlay figcaption a {
|
|||||||
vertical-align: middle;
|
vertical-align: middle;
|
||||||
text-align: center;
|
text-align: center;
|
||||||
}
|
}
|
||||||
|
#drops .dropdesc>div>div {
|
||||||
|
position: absolute;
|
||||||
|
top: 40%;
|
||||||
|
top: calc(50% - .5em);
|
||||||
|
left: -.8em;
|
||||||
|
}
|
||||||
|
#drops .dropdesc>div>div+div {
|
||||||
|
left: auto;
|
||||||
|
right: -.8em;
|
||||||
|
}
|
||||||
#drops .dropzone {
|
#drops .dropzone {
|
||||||
z-index: 80386;
|
z-index: 80386;
|
||||||
height: 50%;
|
height: 50%;
|
||||||
@@ -1964,7 +1941,8 @@ html.light #u2foot .warn span {
|
|||||||
background: #900;
|
background: #900;
|
||||||
border-color: #d06;
|
border-color: #d06;
|
||||||
}
|
}
|
||||||
#u2tab a>span {
|
#u2tab a>span,
|
||||||
|
#unpost a>span {
|
||||||
font-weight: bold;
|
font-weight: bold;
|
||||||
font-style: italic;
|
font-style: italic;
|
||||||
color: #fff;
|
color: #fff;
|
||||||
|
|||||||
@@ -135,6 +135,8 @@
|
|||||||
have_unpost = {{ have_unpost|tojson }},
|
have_unpost = {{ have_unpost|tojson }},
|
||||||
have_zip = {{ have_zip|tojson }},
|
have_zip = {{ have_zip|tojson }},
|
||||||
readme = {{ readme|tojson }};
|
readme = {{ readme|tojson }};
|
||||||
|
|
||||||
|
document.documentElement.setAttribute("class", localStorage.lightmode == 1 ? "light" : "dark");
|
||||||
</script>
|
</script>
|
||||||
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
||||||
<script src="/.cpr/browser.js?_={{ ts }}"></script>
|
<script src="/.cpr/browser.js?_={{ ts }}"></script>
|
||||||
|
|||||||
@@ -70,10 +70,6 @@ ebi('op_up2k').innerHTML = (
|
|||||||
' <input type="checkbox" id="ask_up" />\n' +
|
' <input type="checkbox" id="ask_up" />\n' +
|
||||||
' <label for="ask_up" tt="ask for confirmation before upload starts">💭</label>\n' +
|
' <label for="ask_up" tt="ask for confirmation before upload starts">💭</label>\n' +
|
||||||
' </td>\n' +
|
' </td>\n' +
|
||||||
' <td rowspan="2">\n' +
|
|
||||||
' <input type="checkbox" id="flag_en" />\n' +
|
|
||||||
' <label for="flag_en" tt="ensure only one tab is uploading at a time $N (other tabs must have this enabled too)">💤</label>\n' +
|
|
||||||
' </td>\n' +
|
|
||||||
(have_up2k_idx ? (
|
(have_up2k_idx ? (
|
||||||
' <td data-perm="read" rowspan="2">\n' +
|
' <td data-perm="read" rowspan="2">\n' +
|
||||||
' <input type="checkbox" id="fsearch" />\n' +
|
' <input type="checkbox" id="fsearch" />\n' +
|
||||||
@@ -137,8 +133,8 @@ ebi('op_up2k').innerHTML = (
|
|||||||
var o = mknod('div');
|
var o = mknod('div');
|
||||||
o.innerHTML = (
|
o.innerHTML = (
|
||||||
'<div id="drops">\n' +
|
'<div id="drops">\n' +
|
||||||
' <div class="dropdesc" id="up_zd"><div>🚀 Upload<br /><span></span></div></div>\n' +
|
' <div class="dropdesc" id="up_zd"><div>🚀 Upload<br /><span></span><div>🚀</div><div>🚀</div></div></div>\n' +
|
||||||
' <div class="dropdesc" id="srch_zd"><div>🔎 Search<br /><span></span></div></div>\n' +
|
' <div class="dropdesc" id="srch_zd"><div>🔎 Search<br /><span></span><div>🔎</div><div>🔎</div></div></div>\n' +
|
||||||
' <div class="dropzone" id="up_dz" v="up_zd"></div>\n' +
|
' <div class="dropzone" id="up_dz" v="up_zd"></div>\n' +
|
||||||
' <div class="dropzone" id="srch_dz" v="srch_zd"></div>\n' +
|
' <div class="dropzone" id="srch_dz" v="srch_zd"></div>\n' +
|
||||||
'</div>'
|
'</div>'
|
||||||
@@ -168,6 +164,17 @@ ebi('op_cfg').innerHTML = (
|
|||||||
' <div>\n' +
|
' <div>\n' +
|
||||||
' <a id="u2turbo" class="tgl btn ttb" href="#" tt="the yolo button, you probably DO NOT want to enable this:$N$Nuse this if you were uploading a huge amount of files and had to restart for some reason, and want to continue the upload ASAP$N$Nthis replaces the hash-check with a simple <em>"does this have the same filesize on the server?"</em> so if the file contents are different it will NOT be uploaded$N$Nyou should turn this off when the upload is done, and then "upload" the same files again to let the client verify them">turbo</a>\n' +
|
' <a id="u2turbo" class="tgl btn ttb" href="#" tt="the yolo button, you probably DO NOT want to enable this:$N$Nuse this if you were uploading a huge amount of files and had to restart for some reason, and want to continue the upload ASAP$N$Nthis replaces the hash-check with a simple <em>"does this have the same filesize on the server?"</em> so if the file contents are different it will NOT be uploaded$N$Nyou should turn this off when the upload is done, and then "upload" the same files again to let the client verify them">turbo</a>\n' +
|
||||||
' <a id="u2tdate" class="tgl btn ttb" href="#" tt="has no effect unless the turbo button is enabled$N$Nreduces the yolo factor by a tiny amount; checks whether the file timestamps on the server matches yours$N$Nshould <em>theoretically</em> catch most unfinished/corrupted uploads, but is not a substitute for doing a verification pass with turbo disabled afterwards">date-chk</a>\n' +
|
' <a id="u2tdate" class="tgl btn ttb" href="#" tt="has no effect unless the turbo button is enabled$N$Nreduces the yolo factor by a tiny amount; checks whether the file timestamps on the server matches yours$N$Nshould <em>theoretically</em> catch most unfinished/corrupted uploads, but is not a substitute for doing a verification pass with turbo disabled afterwards">date-chk</a>\n' +
|
||||||
|
' <a id="flag_en" class="tgl btn" href="#" tt="ensure only one tab is uploading at a time $N (other tabs must have this enabled too)">💤</a>\n' +
|
||||||
|
' </td>\n' +
|
||||||
|
' </div>\n' +
|
||||||
|
'</div>\n' +
|
||||||
|
'<div>\n' +
|
||||||
|
' <h3>favicon <span id="ico1">🎉</span></h3>\n' +
|
||||||
|
' <div>\n' +
|
||||||
|
' <input type="text" id="icot" style="width:1.3em" value="" tt="favicon text (blank and refresh to disable)" />' +
|
||||||
|
' <input type="text" id="icof" style="width:2em" value="" tt="foreground color" />' +
|
||||||
|
' <input type="text" id="icob" style="width:2em" value="" tt="background color" />' +
|
||||||
|
' </td>\n' +
|
||||||
' </div>\n' +
|
' </div>\n' +
|
||||||
'</div>\n' +
|
'</div>\n' +
|
||||||
'<div><h3>key notation</h3><div id="key_notation"></div></div>\n' +
|
'<div><h3>key notation</h3><div id="key_notation"></div></div>\n' +
|
||||||
@@ -733,6 +740,12 @@ var pbar = (function () {
|
|||||||
for (var p = 1, mins = adur / 60; p <= mins; p++)
|
for (var p = 1, mins = adur / 60; p <= mins; p++)
|
||||||
pctx.fillRect(Math.floor(sm * p * 60), 0, 2, pc.h);
|
pctx.fillRect(Math.floor(sm * p * 60), 0, 2, pc.h);
|
||||||
|
|
||||||
|
pctx.font = '.5em sans-serif';
|
||||||
|
pctx.fillStyle = light ? 'rgba(0,64,0,0.9)' : 'rgba(192,255,96,1)';
|
||||||
|
for (var p = 1, mins = adur / 60; p <= mins; p++) {
|
||||||
|
pctx.fillText(p, Math.floor(sm * p * 60 + 3), pc.h / 3);
|
||||||
|
}
|
||||||
|
|
||||||
pctx.fillStyle = light ? 'rgba(0,0,0,1)' : 'rgba(255,255,255,1)';
|
pctx.fillStyle = light ? 'rgba(0,0,0,1)' : 'rgba(255,255,255,1)';
|
||||||
for (var p = 1, mins = adur / 600; p <= mins; p++)
|
for (var p = 1, mins = adur / 600; p <= mins; p++)
|
||||||
pctx.fillRect(Math.floor(sm * p * 600), 0, 2, pc.h);
|
pctx.fillRect(Math.floor(sm * p * 600), 0, 2, pc.h);
|
||||||
@@ -1349,7 +1362,7 @@ function play(tid, is_ev, seek, call_depth) {
|
|||||||
mp.au = mp.au_ogvjs = new OGVPlayer();
|
mp.au = mp.au_ogvjs = new OGVPlayer();
|
||||||
}
|
}
|
||||||
catch (ex) {
|
catch (ex) {
|
||||||
return toast.err(30, 'your browser cannot play ogg/vorbis/opus\n\n' + ex +
|
return toast.err(30, 'your browser cannot play ogg/vorbis/opus\n\n' + basenames(ex) +
|
||||||
'\n\n<a href="#" onclick="new OGVPlayer();">click here</a> for a full crash report');
|
'\n\n<a href="#" onclick="new OGVPlayer();">click here</a> for a full crash report');
|
||||||
}
|
}
|
||||||
attempt_play = is_ev;
|
attempt_play = is_ev;
|
||||||
@@ -1426,12 +1439,7 @@ function play(tid, is_ev, seek, call_depth) {
|
|||||||
if (!seek) {
|
if (!seek) {
|
||||||
var o = ebi(oid);
|
var o = ebi(oid);
|
||||||
o.setAttribute('id', 'thx_js');
|
o.setAttribute('id', 'thx_js');
|
||||||
if (window.history && history.replaceState) {
|
sethash(oid);
|
||||||
hist_replace(document.location.pathname + '#' + oid);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
document.location.hash = oid;
|
|
||||||
}
|
|
||||||
o.setAttribute('id', oid);
|
o.setAttribute('id', oid);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1441,7 +1449,7 @@ function play(tid, is_ev, seek, call_depth) {
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
catch (ex) {
|
catch (ex) {
|
||||||
toast.err(0, esc('playback failed: ' + ex));
|
toast.err(0, esc('playback failed: ' + basenames(ex)));
|
||||||
}
|
}
|
||||||
setclass(oid, 'play');
|
setclass(oid, 'play');
|
||||||
setTimeout(next_song, 500);
|
setTimeout(next_song, 500);
|
||||||
@@ -1475,48 +1483,18 @@ function evau_error(e) {
|
|||||||
|
|
||||||
err += '\n\nFile: «' + uricom_dec(eplaya.src.split('/').slice(-1)[0])[0] + '»';
|
err += '\n\nFile: «' + uricom_dec(eplaya.src.split('/').slice(-1)[0])[0] + '»';
|
||||||
|
|
||||||
toast.warn(15, esc(err + ''));
|
toast.warn(15, esc(basenames(err)));
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
// show a fullscreen message
|
|
||||||
function show_modal(html) {
|
|
||||||
var body = document.body || document.getElementsByTagName('body')[0],
|
|
||||||
div = mknod('div');
|
|
||||||
|
|
||||||
div.setAttribute('id', 'blocked');
|
|
||||||
div.innerHTML = html;
|
|
||||||
unblocked();
|
|
||||||
body.appendChild(div);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
// hide fullscreen message
|
|
||||||
function unblocked(e) {
|
|
||||||
ev(e);
|
|
||||||
var dom = ebi('blocked');
|
|
||||||
if (dom)
|
|
||||||
dom.parentNode.removeChild(dom);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// show ui to manually start playback of a linked song
|
// show ui to manually start playback of a linked song
|
||||||
function autoplay_blocked(seek) {
|
function autoplay_blocked(seek) {
|
||||||
show_modal(
|
var tid = mp.au.tid,
|
||||||
'<div id="blk_play"><a href="#" id="blk_go"></a></div>' +
|
|
||||||
'<div id="blk_abrt"><a href="#" id="blk_na">Cancel<br />(show file list)</a></div>');
|
|
||||||
|
|
||||||
var go = ebi('blk_go'),
|
|
||||||
na = ebi('blk_na'),
|
|
||||||
tid = mp.au.tid,
|
|
||||||
fn = mp.tracks[tid].split(/\//).pop();
|
fn = mp.tracks[tid].split(/\//).pop();
|
||||||
|
|
||||||
fn = uricom_dec(fn.replace(/\+/g, ' '))[0];
|
fn = uricom_dec(fn.replace(/\+/g, ' '))[0];
|
||||||
|
|
||||||
go.textContent = 'Play "' + fn + '"';
|
modal.confirm('<h6>play this audio file?</h6>\n«' + esc(fn) + '»', function () {
|
||||||
go.onclick = function (e) {
|
|
||||||
unblocked(e);
|
|
||||||
toast.hide();
|
|
||||||
if (mp.au !== mp.au_ogvjs)
|
if (mp.au !== mp.au_ogvjs)
|
||||||
// chrome 91 may permanently taint on a failed play()
|
// chrome 91 may permanently taint on a failed play()
|
||||||
// depending on win10 settings or something? idk
|
// depending on win10 settings or something? idk
|
||||||
@@ -1529,14 +1507,16 @@ function autoplay_blocked(seek) {
|
|||||||
|
|
||||||
play(tid, true, seek);
|
play(tid, true, seek);
|
||||||
mp.fade_in();
|
mp.fade_in();
|
||||||
};
|
}, null);
|
||||||
na.onclick = unblocked;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
function play_linked() {
|
function eval_hash() {
|
||||||
var v = location.hash;
|
var v = location.hash;
|
||||||
if (v && v.indexOf('#af-') === 0) {
|
if (!v)
|
||||||
|
return;
|
||||||
|
|
||||||
|
if (v.indexOf('#af-') === 0) {
|
||||||
var id = v.slice(2).split('&');
|
var id = v.slice(2).split('&');
|
||||||
if (id[0].length != 10)
|
if (id[0].length != 10)
|
||||||
return;
|
return;
|
||||||
@@ -1550,6 +1530,13 @@ function play_linked() {
|
|||||||
|
|
||||||
return play(id[0], false, parseInt(m[1] || 0) * 60 + parseInt(m[2] || 0));
|
return play(id[0], false, parseInt(m[1] || 0) * 60 + parseInt(m[2] || 0));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (v.indexOf('#q=') === 0) {
|
||||||
|
goto('search');
|
||||||
|
var i = ebi('q_raw');
|
||||||
|
i.value = uricom_dec(v.slice(3))[0];
|
||||||
|
return i.oninput();
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
@@ -1561,6 +1548,9 @@ function play_linked() {
|
|||||||
|
|
||||||
|
|
||||||
function sortfiles(nodes) {
|
function sortfiles(nodes) {
|
||||||
|
if (!nodes.length)
|
||||||
|
return nodes;
|
||||||
|
|
||||||
var sopts = jread('fsort', [["href", 1, ""]]);
|
var sopts = jread('fsort', [["href", 1, ""]]);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@@ -2740,28 +2730,28 @@ document.onkeydown = function (e) {
|
|||||||
(function () {
|
(function () {
|
||||||
var sconf = [
|
var sconf = [
|
||||||
["size",
|
["size",
|
||||||
["szl", "sz_min", "minimum MiB", ""],
|
["szl", "sz_min", "minimum MiB", "16"],
|
||||||
["szu", "sz_max", "maximum MiB", ""]
|
["szu", "sz_max", "maximum MiB", "16"]
|
||||||
],
|
],
|
||||||
["date",
|
["date",
|
||||||
["dtl", "dt_min", "min. iso8601", ""],
|
["dtl", "dt_min", "min. iso8601", "16"],
|
||||||
["dtu", "dt_max", "max. iso8601", ""]
|
["dtu", "dt_max", "max. iso8601", "16"]
|
||||||
],
|
],
|
||||||
["path",
|
["path",
|
||||||
["path", "path", "path contains (space-separated)", "46"]
|
["path", "path", "path contains (space-separated)", "34"]
|
||||||
],
|
],
|
||||||
["name",
|
["name",
|
||||||
["name", "name", "name contains (negate with -nope)", "46"]
|
["name", "name", "name contains (negate with -nope)", "34"]
|
||||||
]
|
]
|
||||||
];
|
];
|
||||||
var oldcfg = [];
|
var oldcfg = [];
|
||||||
|
|
||||||
if (QS('#srch_form.tags')) {
|
if (QS('#srch_form.tags')) {
|
||||||
sconf.push(["tags",
|
sconf.push(["tags",
|
||||||
["tags", "tags", "tags contains (^=start, end=$)", "46"]
|
["tags", "tags", "tags contains (^=start, end=$)", "34"]
|
||||||
]);
|
]);
|
||||||
sconf.push(["adv.",
|
sconf.push(["adv.",
|
||||||
["adv", "adv", "key>=1A key<=2B .bpm>165", "46"]
|
["adv", "adv", "key>=1A key<=2B .bpm>165", "34"]
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -2778,8 +2768,8 @@ document.onkeydown = function (e) {
|
|||||||
html.push(
|
html.push(
|
||||||
'<td colspan="' + csp + '"><input id="' + hn + 'c" type="checkbox">\n' +
|
'<td colspan="' + csp + '"><input id="' + hn + 'c" type="checkbox">\n' +
|
||||||
'<label for="' + hn + 'c">' + sconf[a][b][2] + '</label>\n' +
|
'<label for="' + hn + 'c">' + sconf[a][b][2] + '</label>\n' +
|
||||||
'<br /><input id="' + hn + 'v" type="text" size="' + sconf[a][b][3] +
|
'<br /><input id="' + hn + 'v" type="text" style="width:' + sconf[a][b][3] +
|
||||||
'" name="' + sconf[a][b][1] + '" /></td>');
|
'em" name="' + sconf[a][b][1] + '" /></td>');
|
||||||
if (csp == 2)
|
if (csp == 2)
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@@ -2951,7 +2941,7 @@ document.onkeydown = function (e) {
|
|||||||
var r = res.hits[a],
|
var r = res.hits[a],
|
||||||
ts = parseInt(r.ts),
|
ts = parseInt(r.ts),
|
||||||
sz = esc(r.sz + ''),
|
sz = esc(r.sz + ''),
|
||||||
rp = esc(r.rp + ''),
|
rp = esc(uricom_dec(r.rp + '')[0]),
|
||||||
ext = rp.lastIndexOf('.') > 0 ? rp.split('.').slice(-1)[0] : '%',
|
ext = rp.lastIndexOf('.') > 0 ? rp.split('.').slice(-1)[0] : '%',
|
||||||
links = linksplit(r.rp + '');
|
links = linksplit(r.rp + '');
|
||||||
|
|
||||||
@@ -2991,6 +2981,7 @@ document.onkeydown = function (e) {
|
|||||||
reload_browser();
|
reload_browser();
|
||||||
filecols.set_style(['File Name']);
|
filecols.set_style(['File Name']);
|
||||||
|
|
||||||
|
sethash('q=' + uricom_enc(this.q_raw));
|
||||||
ebi('unsearch').onclick = unsearch;
|
ebi('unsearch').onclick = unsearch;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -3000,6 +2991,7 @@ document.onkeydown = function (e) {
|
|||||||
ebi('files').innerHTML = orig_html;
|
ebi('files').innerHTML = orig_html;
|
||||||
ebi('files').removeAttribute('q_raw');
|
ebi('files').removeAttribute('q_raw');
|
||||||
orig_html = null;
|
orig_html = null;
|
||||||
|
sethash('');
|
||||||
reload_browser();
|
reload_browser();
|
||||||
}
|
}
|
||||||
})();
|
})();
|
||||||
@@ -3147,7 +3139,7 @@ var treectl = (function () {
|
|||||||
|
|
||||||
treectl.goto = function (url, push) {
|
treectl.goto = function (url, push) {
|
||||||
get_tree("", url, true);
|
get_tree("", url, true);
|
||||||
reqls(url, push);
|
reqls(url, push, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
function get_tree(top, dst, rst) {
|
function get_tree(top, dst, rst) {
|
||||||
@@ -3227,12 +3219,12 @@ var treectl = (function () {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function reload_tree() {
|
function reload_tree() {
|
||||||
var cdir = get_evpath(),
|
var cdir = get_vpath(),
|
||||||
links = QSA('#treeul a+a'),
|
links = QSA('#treeul a+a'),
|
||||||
nowrap = QS('#tree.nowrap') && QS('#hovertree.on');
|
nowrap = QS('#tree.nowrap') && QS('#hovertree.on');
|
||||||
|
|
||||||
for (var a = 0, aa = links.length; a < aa; a++) {
|
for (var a = 0, aa = links.length; a < aa; a++) {
|
||||||
var href = links[a].getAttribute('href');
|
var href = uricom_dec(links[a].getAttribute('href'))[0];
|
||||||
links[a].setAttribute('class', href == cdir ? 'hl' : '');
|
links[a].setAttribute('class', href == cdir ? 'hl' : '');
|
||||||
links[a].onclick = treego;
|
links[a].onclick = treego;
|
||||||
links[a].onmouseenter = nowrap ? menter : null;
|
links[a].onmouseenter = nowrap ? menter : null;
|
||||||
@@ -3275,7 +3267,7 @@ var treectl = (function () {
|
|||||||
reqls(this.getAttribute('href'), true);
|
reqls(this.getAttribute('href'), true);
|
||||||
}
|
}
|
||||||
|
|
||||||
function reqls(url, hpush) {
|
function reqls(url, hpush, no_tree) {
|
||||||
var xhr = new XMLHttpRequest();
|
var xhr = new XMLHttpRequest();
|
||||||
xhr.top = url;
|
xhr.top = url;
|
||||||
xhr.hpush = hpush;
|
xhr.hpush = hpush;
|
||||||
@@ -3283,7 +3275,7 @@ var treectl = (function () {
|
|||||||
xhr.open('GET', xhr.top + '?ls' + (treectl.dots ? '&dots' : ''), true);
|
xhr.open('GET', xhr.top + '?ls' + (treectl.dots ? '&dots' : ''), true);
|
||||||
xhr.onreadystatechange = recvls;
|
xhr.onreadystatechange = recvls;
|
||||||
xhr.send();
|
xhr.send();
|
||||||
if (hpush)
|
if (hpush && !no_tree)
|
||||||
get_tree('.', xhr.top);
|
get_tree('.', xhr.top);
|
||||||
|
|
||||||
enspin(thegrid.en ? '#gfiles' : '#files');
|
enspin(thegrid.en ? '#gfiles' : '#files');
|
||||||
@@ -3612,7 +3604,7 @@ var filecols = (function () {
|
|||||||
"pixfmt": "subsampling / pixel structure",
|
"pixfmt": "subsampling / pixel structure",
|
||||||
"resw": "horizontal resolution",
|
"resw": "horizontal resolution",
|
||||||
"resh": "veritcal resolution",
|
"resh": "veritcal resolution",
|
||||||
"acs": "audio channels",
|
"chs": "audio channels",
|
||||||
"hz": "sample rate"
|
"hz": "sample rate"
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -4078,6 +4070,106 @@ var msel = (function () {
|
|||||||
})();
|
})();
|
||||||
|
|
||||||
|
|
||||||
|
(function () {
|
||||||
|
if (!window.FormData)
|
||||||
|
return;
|
||||||
|
|
||||||
|
var form = QS('#op_mkdir>form'),
|
||||||
|
tb = QS('#op_mkdir input[name="name"]'),
|
||||||
|
sf = mknod('div');
|
||||||
|
|
||||||
|
clmod(sf, 'msg', 1);
|
||||||
|
form.parentNode.appendChild(sf);
|
||||||
|
|
||||||
|
form.onsubmit = function (e) {
|
||||||
|
ev(e);
|
||||||
|
clmod(sf, 'vis', 1);
|
||||||
|
sf.textContent = 'creating "' + tb.value + '"...';
|
||||||
|
|
||||||
|
var fd = new FormData();
|
||||||
|
fd.append("act", "mkdir");
|
||||||
|
fd.append("name", tb.value);
|
||||||
|
|
||||||
|
var xhr = new XMLHttpRequest();
|
||||||
|
xhr.vp = get_evpath();
|
||||||
|
xhr.dn = tb.value;
|
||||||
|
xhr.open('POST', xhr.vp, true);
|
||||||
|
xhr.onreadystatechange = cb;
|
||||||
|
xhr.responseType = 'text';
|
||||||
|
xhr.send(fd);
|
||||||
|
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
|
||||||
|
function cb() {
|
||||||
|
if (this.readyState != XMLHttpRequest.DONE)
|
||||||
|
return;
|
||||||
|
|
||||||
|
if (this.vp !== get_evpath()) {
|
||||||
|
sf.textContent = 'aborted due to location change';
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.status !== 200) {
|
||||||
|
sf.textContent = 'error: ' + this.responseText;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
tb.value = '';
|
||||||
|
clmod(sf, 'vis');
|
||||||
|
sf.textContent = '';
|
||||||
|
treectl.goto(this.vp + uricom_enc(this.dn) + '/', true);
|
||||||
|
}
|
||||||
|
})();
|
||||||
|
|
||||||
|
|
||||||
|
(function () {
|
||||||
|
var form = QS('#op_msg>form'),
|
||||||
|
tb = QS('#op_msg input[name="msg"]'),
|
||||||
|
sf = mknod('div');
|
||||||
|
|
||||||
|
clmod(sf, 'msg', 1);
|
||||||
|
form.parentNode.appendChild(sf);
|
||||||
|
|
||||||
|
form.onsubmit = function (e) {
|
||||||
|
ev(e);
|
||||||
|
clmod(sf, 'vis', 1);
|
||||||
|
sf.textContent = 'sending...';
|
||||||
|
|
||||||
|
var xhr = new XMLHttpRequest(),
|
||||||
|
ct = 'application/x-www-form-urlencoded;charset=UTF-8';
|
||||||
|
|
||||||
|
xhr.msg = tb.value;
|
||||||
|
xhr.open('POST', get_evpath(), true);
|
||||||
|
xhr.responseType = 'text';
|
||||||
|
xhr.onreadystatechange = cb;
|
||||||
|
xhr.setRequestHeader('Content-Type', ct);
|
||||||
|
if (xhr.overrideMimeType)
|
||||||
|
xhr.overrideMimeType('Content-Type', ct);
|
||||||
|
|
||||||
|
xhr.send('msg=' + uricom_enc(xhr.msg));
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
|
||||||
|
function cb() {
|
||||||
|
if (this.readyState != XMLHttpRequest.DONE)
|
||||||
|
return;
|
||||||
|
|
||||||
|
if (this.status !== 200) {
|
||||||
|
sf.textContent = 'error: ' + this.responseText;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
tb.value = '';
|
||||||
|
clmod(sf, 'vis');
|
||||||
|
sf.textContent = 'sent: "' + this.msg + '"';
|
||||||
|
setTimeout(function () {
|
||||||
|
treectl.goto(get_evpath());
|
||||||
|
}, 100);
|
||||||
|
}
|
||||||
|
})();
|
||||||
|
|
||||||
|
|
||||||
function show_readme(md, url, depth) {
|
function show_readme(md, url, depth) {
|
||||||
if (!treectl.ireadme)
|
if (!treectl.ireadme)
|
||||||
return;
|
return;
|
||||||
@@ -4245,7 +4337,6 @@ var unpost = (function () {
|
|||||||
}
|
}
|
||||||
|
|
||||||
ct.onclick = function (e) {
|
ct.onclick = function (e) {
|
||||||
ev(e);
|
|
||||||
var tgt = e.target.closest('a[me]');
|
var tgt = e.target.closest('a[me]');
|
||||||
if (!tgt)
|
if (!tgt)
|
||||||
return;
|
return;
|
||||||
@@ -4253,6 +4344,7 @@ var unpost = (function () {
|
|||||||
if (!tgt.getAttribute('href'))
|
if (!tgt.getAttribute('href'))
|
||||||
return;
|
return;
|
||||||
|
|
||||||
|
ev(e);
|
||||||
var ame = tgt.getAttribute('me');
|
var ame = tgt.getAttribute('me');
|
||||||
if (ame != r.me)
|
if (ame != r.me)
|
||||||
return toast.err(0, 'something broke, please try a refresh');
|
return toast.err(0, 'something broke, please try a refresh');
|
||||||
@@ -4263,7 +4355,7 @@ var unpost = (function () {
|
|||||||
|
|
||||||
for (var a = n; a < n2; a++)
|
for (var a = n; a < n2; a++)
|
||||||
if (QS('#op_unpost a.n' + a))
|
if (QS('#op_unpost a.n' + a))
|
||||||
req.push(r.files[a].vp);
|
req.push(uricom_dec(r.files[a].vp)[0]);
|
||||||
|
|
||||||
var links = QSA('#op_unpost a.n' + n);
|
var links = QSA('#op_unpost a.n' + n);
|
||||||
for (var a = 0, aa = links.length; a < aa; a++) {
|
for (var a = 0, aa = links.length; a < aa; a++) {
|
||||||
@@ -4351,6 +4443,9 @@ function reload_browser(not_mp) {
|
|||||||
makeSortable(ebi('files'), mp.read_order.bind(mp));
|
makeSortable(ebi('files'), mp.read_order.bind(mp));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for (var a = 0; a < 2; a++)
|
||||||
|
clmod(ebi(a ? 'pro' : 'epi'), 'hidden', ebi('unsearch'));
|
||||||
|
|
||||||
if (window['up2k'])
|
if (window['up2k'])
|
||||||
up2k.set_fsearch();
|
up2k.set_fsearch();
|
||||||
|
|
||||||
@@ -4359,4 +4454,4 @@ function reload_browser(not_mp) {
|
|||||||
}
|
}
|
||||||
reload_browser(true);
|
reload_browser(true);
|
||||||
mukey.render();
|
mukey.render();
|
||||||
play_linked();
|
setTimeout(eval_hash, 1);
|
||||||
|
|||||||
@@ -135,13 +135,13 @@ var md_opt = {
|
|||||||
|
|
||||||
(function () {
|
(function () {
|
||||||
var l = localStorage,
|
var l = localStorage,
|
||||||
drk = l.getItem('lightmode') != 1,
|
drk = l.lightmode != 1,
|
||||||
btn = document.getElementById("lightswitch"),
|
btn = document.getElementById("lightswitch"),
|
||||||
f = function (e) {
|
f = function (e) {
|
||||||
if (e) { e.preventDefault(); drk = !drk; }
|
if (e) { e.preventDefault(); drk = !drk; }
|
||||||
document.documentElement.setAttribute("class", drk? "dark":"light");
|
document.documentElement.setAttribute("class", drk? "dark":"light");
|
||||||
btn.innerHTML = "go " + (drk ? "light":"dark");
|
btn.innerHTML = "go " + (drk ? "light":"dark");
|
||||||
l.setItem('lightmode', drk? 0:1);
|
l.lightmode = drk? 0:1;
|
||||||
};
|
};
|
||||||
|
|
||||||
btn.onclick = f;
|
btn.onclick = f;
|
||||||
|
|||||||
@@ -33,11 +33,11 @@ var md_opt = {
|
|||||||
|
|
||||||
var lightswitch = (function () {
|
var lightswitch = (function () {
|
||||||
var l = localStorage,
|
var l = localStorage,
|
||||||
drk = l.getItem('lightmode') != 1,
|
drk = l.lightmode != 1,
|
||||||
f = function (e) {
|
f = function (e) {
|
||||||
if (e) drk = !drk;
|
if (e) drk = !drk;
|
||||||
document.documentElement.setAttribute("class", drk? "dark":"light");
|
document.documentElement.setAttribute("class", drk? "dark":"light");
|
||||||
l.setItem('lightmode', drk? 0:1);
|
l.lightmode = drk? 0:1;
|
||||||
};
|
};
|
||||||
f();
|
f();
|
||||||
return f;
|
return f;
|
||||||
|
|||||||
@@ -80,7 +80,7 @@
|
|||||||
<a href="#" id="repl">π</a>
|
<a href="#" id="repl">π</a>
|
||||||
<script>
|
<script>
|
||||||
|
|
||||||
if (localStorage.getItem('lightmode') != 1)
|
if (localStorage.lightmode != 1)
|
||||||
document.documentElement.setAttribute("class", "dark");
|
document.documentElement.setAttribute("class", "dark");
|
||||||
|
|
||||||
</script>
|
</script>
|
||||||
|
|||||||
@@ -258,6 +258,16 @@ html.light #pctl *:focus,
|
|||||||
html.light .btn:focus {
|
html.light .btn:focus {
|
||||||
box-shadow: 0 .1em .2em #037 inset;
|
box-shadow: 0 .1em .2em #037 inset;
|
||||||
}
|
}
|
||||||
|
input[type="text"]:focus,
|
||||||
|
input:not([type]):focus,
|
||||||
|
textarea:focus {
|
||||||
|
box-shadow: 0 .1em .3em #fc0, 0 -.1em .3em #fc0;
|
||||||
|
}
|
||||||
|
html.light input[type="text"]:focus,
|
||||||
|
html.light input:not([type]):focus,
|
||||||
|
html.light textarea:focus {
|
||||||
|
box-shadow: 0 .1em .3em #037, 0 -.1em .3em #037;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -512,9 +512,13 @@ function up2k_init(subtle) {
|
|||||||
// chrome<37 firefox<34 edge<12 opera<24 safari<7
|
// chrome<37 firefox<34 edge<12 opera<24 safari<7
|
||||||
shame = 'your browser is impressively ancient';
|
shame = 'your browser is impressively ancient';
|
||||||
|
|
||||||
var got_deps = false;
|
function got_deps() {
|
||||||
|
return subtle || window.asmCrypto || window.hashwasm;
|
||||||
|
}
|
||||||
|
|
||||||
|
var loading_deps = false;
|
||||||
function init_deps() {
|
function init_deps() {
|
||||||
if (!got_deps && !subtle && !window.asmCrypto) {
|
if (!loading_deps && !got_deps()) {
|
||||||
var fn = 'sha512.' + sha_js + '.js';
|
var fn = 'sha512.' + sha_js + '.js';
|
||||||
showmodal('<h1>loading ' + fn + '</h1><h2>since ' + shame + '</h2><h4>thanks chrome</h4>');
|
showmodal('<h1>loading ' + fn + '</h1><h2>since ' + shame + '</h2><h4>thanks chrome</h4>');
|
||||||
import_js('/.cpr/deps/' + fn, unmodal);
|
import_js('/.cpr/deps/' + fn, unmodal);
|
||||||
@@ -525,7 +529,7 @@ function up2k_init(subtle) {
|
|||||||
ebi('u2foot').innerHTML = 'seems like ' + shame + ' so do that if you want more performance <span style="color:#' +
|
ebi('u2foot').innerHTML = 'seems like ' + shame + ' so do that if you want more performance <span style="color:#' +
|
||||||
(sha_js == 'ac' ? 'c84">(expecting 20' : '8a5">(but dont worry too much, expect 100') + ' MiB/s)</span>';
|
(sha_js == 'ac' ? 'c84">(expecting 20' : '8a5">(but dont worry too much, expect 100') + ' MiB/s)</span>';
|
||||||
}
|
}
|
||||||
got_deps = true;
|
loading_deps = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (perms.length && !has(perms, 'read') && has(perms, 'write'))
|
if (perms.length && !has(perms, 'read') && has(perms, 'write'))
|
||||||
@@ -578,7 +582,7 @@ function up2k_init(subtle) {
|
|||||||
|
|
||||||
bcfg_bind(uc, 'multitask', 'multitask', true, null, false);
|
bcfg_bind(uc, 'multitask', 'multitask', true, null, false);
|
||||||
bcfg_bind(uc, 'ask_up', 'ask_up', true, null, false);
|
bcfg_bind(uc, 'ask_up', 'ask_up', true, null, false);
|
||||||
bcfg_bind(uc, 'flag_en', 'flag_en', false, apply_flag_cfg, false);
|
bcfg_bind(uc, 'flag_en', 'flag_en', false, apply_flag_cfg);
|
||||||
bcfg_bind(uc, 'fsearch', 'fsearch', false, set_fsearch, false);
|
bcfg_bind(uc, 'fsearch', 'fsearch', false, set_fsearch, false);
|
||||||
bcfg_bind(uc, 'turbo', 'u2turbo', false, draw_turbo, false);
|
bcfg_bind(uc, 'turbo', 'u2turbo', false, draw_turbo, false);
|
||||||
bcfg_bind(uc, 'datechk', 'u2tdate', true, null, false);
|
bcfg_bind(uc, 'datechk', 'u2tdate', true, null, false);
|
||||||
@@ -744,11 +748,14 @@ function up2k_init(subtle) {
|
|||||||
|
|
||||||
more_one_file();
|
more_one_file();
|
||||||
var bad_files = [],
|
var bad_files = [],
|
||||||
|
nil_files = [],
|
||||||
good_files = [],
|
good_files = [],
|
||||||
dirs = [];
|
dirs = [];
|
||||||
|
|
||||||
for (var a = 0; a < files.length; a++) {
|
for (var a = 0; a < files.length; a++) {
|
||||||
var fobj = files[a];
|
var fobj = files[a],
|
||||||
|
dst = good_files;
|
||||||
|
|
||||||
if (is_itemlist) {
|
if (is_itemlist) {
|
||||||
if (fobj.kind !== 'file')
|
if (fobj.kind !== 'file')
|
||||||
continue;
|
continue;
|
||||||
@@ -765,16 +772,15 @@ function up2k_init(subtle) {
|
|||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
if (fobj.size < 1)
|
if (fobj.size < 1)
|
||||||
throw 1;
|
dst = nil_files;
|
||||||
}
|
}
|
||||||
catch (ex) {
|
catch (ex) {
|
||||||
bad_files.push(fobj.name);
|
dst = bad_files;
|
||||||
continue;
|
|
||||||
}
|
}
|
||||||
good_files.push([fobj, fobj.name]);
|
dst.push([fobj, fobj.name]);
|
||||||
}
|
}
|
||||||
if (dirs) {
|
if (dirs) {
|
||||||
return read_dirs(null, [], dirs, good_files, bad_files);
|
return read_dirs(null, [], dirs, good_files, nil_files, bad_files);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -788,7 +794,7 @@ function up2k_init(subtle) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var rd_missing_ref = [];
|
var rd_missing_ref = [];
|
||||||
function read_dirs(rd, pf, dirs, good, bad, spins) {
|
function read_dirs(rd, pf, dirs, good, nil, bad, spins) {
|
||||||
spins = spins || 0;
|
spins = spins || 0;
|
||||||
if (++spins == 5)
|
if (++spins == 5)
|
||||||
rd_missing_ref = rd_flatten(pf, dirs);
|
rd_missing_ref = rd_flatten(pf, dirs);
|
||||||
@@ -809,7 +815,7 @@ function up2k_init(subtle) {
|
|||||||
msg.push('<li>' + esc(missing[a]) + '</li>');
|
msg.push('<li>' + esc(missing[a]) + '</li>');
|
||||||
|
|
||||||
return modal.alert(msg.join('') + '</ul>', function () {
|
return modal.alert(msg.join('') + '</ul>', function () {
|
||||||
read_dirs(rd, [], [], good, bad, spins);
|
read_dirs(rd, [], [], good, nil, bad, spins);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
spins = 0;
|
spins = 0;
|
||||||
@@ -817,11 +823,11 @@ function up2k_init(subtle) {
|
|||||||
|
|
||||||
if (!dirs.length) {
|
if (!dirs.length) {
|
||||||
if (!pf.length)
|
if (!pf.length)
|
||||||
return gotallfiles(good, bad);
|
return gotallfiles(good, nil, bad);
|
||||||
|
|
||||||
console.log("retry pf, " + pf.length);
|
console.log("retry pf, " + pf.length);
|
||||||
setTimeout(function () {
|
setTimeout(function () {
|
||||||
read_dirs(rd, pf, dirs, good, bad, spins);
|
read_dirs(rd, pf, dirs, good, nil, bad, spins);
|
||||||
}, 50);
|
}, 50);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -843,14 +849,15 @@ function up2k_init(subtle) {
|
|||||||
pf.push(name);
|
pf.push(name);
|
||||||
dn.file(function (fobj) {
|
dn.file(function (fobj) {
|
||||||
apop(pf, name);
|
apop(pf, name);
|
||||||
|
var dst = good;
|
||||||
try {
|
try {
|
||||||
if (fobj.size > 0) {
|
if (fobj.size < 1)
|
||||||
good.push([fobj, name]);
|
dst = nil;
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
|
catch (ex) {
|
||||||
|
dst = bad;
|
||||||
}
|
}
|
||||||
catch (ex) { }
|
dst.push([fobj, name]);
|
||||||
bad.push(name);
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
ngot += 1;
|
ngot += 1;
|
||||||
@@ -859,23 +866,33 @@ function up2k_init(subtle) {
|
|||||||
dirs.shift();
|
dirs.shift();
|
||||||
rd = null;
|
rd = null;
|
||||||
}
|
}
|
||||||
return read_dirs(rd, pf, dirs, good, bad, spins);
|
return read_dirs(rd, pf, dirs, good, nil, bad, spins);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function gotallfiles(good_files, bad_files) {
|
function gotallfiles(good_files, nil_files, bad_files) {
|
||||||
|
var ntot = good_files.concat(nil_files, bad_files).length;
|
||||||
if (bad_files.length) {
|
if (bad_files.length) {
|
||||||
var ntot = bad_files.length + good_files.length,
|
var msg = 'These {0} files (of {1} total) were skipped, possibly due to filesystem permissions:\n'.format(bad_files.length, ntot);
|
||||||
msg = 'These {0} files (of {1} total) were skipped because they are empty:\n'.format(bad_files.length, ntot);
|
|
||||||
|
|
||||||
for (var a = 0, aa = Math.min(20, bad_files.length); a < aa; a++)
|
for (var a = 0, aa = Math.min(20, bad_files.length); a < aa; a++)
|
||||||
msg += '-- ' + bad_files[a] + '\n';
|
msg += '-- ' + bad_files[a][1] + '\n';
|
||||||
|
|
||||||
if (good_files.length - bad_files.length <= 1 && ANDROID)
|
|
||||||
msg += '\nFirefox-Android has a bug which prevents selecting multiple files. Try selecting one file at a time. For more info, see firefox bug 1456557';
|
|
||||||
|
|
||||||
|
msg += '\nMaybe it works better if you select just one file';
|
||||||
return modal.alert(msg, function () {
|
return modal.alert(msg, function () {
|
||||||
gotallfiles(good_files, []);
|
gotallfiles(good_files, nil_files, []);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (nil_files.length) {
|
||||||
|
var msg = 'These {0} files (of {1} total) are blank/empty; upload them anyways?\n'.format(nil_files.length, ntot);
|
||||||
|
for (var a = 0, aa = Math.min(20, nil_files.length); a < aa; a++)
|
||||||
|
msg += '-- ' + nil_files[a][1] + '\n';
|
||||||
|
|
||||||
|
msg += '\nMaybe it works better if you select just one file';
|
||||||
|
return modal.confirm(msg, function () {
|
||||||
|
gotallfiles(good_files.concat(nil_files), [], []);
|
||||||
|
}, function () {
|
||||||
|
gotallfiles(good_files, [], []);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -921,7 +938,7 @@ function up2k_init(subtle) {
|
|||||||
"t0": now,
|
"t0": now,
|
||||||
"fobj": fobj,
|
"fobj": fobj,
|
||||||
"name": name,
|
"name": name,
|
||||||
"size": fobj.size,
|
"size": fobj.size || 0,
|
||||||
"lmod": lmod / 1000,
|
"lmod": lmod / 1000,
|
||||||
"purl": fdir,
|
"purl": fdir,
|
||||||
"done": false,
|
"done": false,
|
||||||
@@ -939,14 +956,16 @@ function up2k_init(subtle) {
|
|||||||
|
|
||||||
pvis.addfile([
|
pvis.addfile([
|
||||||
uc.fsearch ? esc(entry.name) : linksplit(
|
uc.fsearch ? esc(entry.name) : linksplit(
|
||||||
uricom_dec(entry.purl)[0] + entry.name).join(' '),
|
entry.purl + uricom_enc(entry.name)).join(' '),
|
||||||
'📐 hash',
|
'📐 hash',
|
||||||
''
|
''
|
||||||
], fobj.size, draw_each);
|
], fobj.size, draw_each);
|
||||||
|
|
||||||
st.bytes.total += fobj.size;
|
st.bytes.total += fobj.size;
|
||||||
st.files.push(entry);
|
st.files.push(entry);
|
||||||
if (uc.turbo)
|
if (!entry.size)
|
||||||
|
push_t(st.todo.handshake, entry);
|
||||||
|
else if (uc.turbo)
|
||||||
push_t(st.todo.head, entry);
|
push_t(st.todo.head, entry);
|
||||||
else
|
else
|
||||||
push_t(st.todo.hash, entry);
|
push_t(st.todo.hash, entry);
|
||||||
@@ -1081,11 +1100,6 @@ function up2k_init(subtle) {
|
|||||||
st.busy.handshake.length)
|
st.busy.handshake.length)
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
if (st.busy.handshake.length)
|
|
||||||
for (var n = t.n - 1; n >= t.n - parallel_uploads && n >= 0; n--)
|
|
||||||
if (st.files[n].t_uploading)
|
|
||||||
return false;
|
|
||||||
|
|
||||||
if ((uc.multitask ? 1 : 0) <
|
if ((uc.multitask ? 1 : 0) <
|
||||||
st.todo.upload.length +
|
st.todo.upload.length +
|
||||||
st.busy.upload.length)
|
st.busy.upload.length)
|
||||||
@@ -1122,7 +1136,7 @@ function up2k_init(subtle) {
|
|||||||
if (running)
|
if (running)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
if (crashed)
|
if (crashed || !got_deps())
|
||||||
return defer();
|
return defer();
|
||||||
|
|
||||||
running = true;
|
running = true;
|
||||||
@@ -1138,6 +1152,23 @@ function up2k_init(subtle) {
|
|||||||
st.busy.handshake.length +
|
st.busy.handshake.length +
|
||||||
st.busy.upload.length;
|
st.busy.upload.length;
|
||||||
|
|
||||||
|
if (was_busy && !is_busy) {
|
||||||
|
for (var a = 0; a < st.files.length; a++) {
|
||||||
|
var t = st.files[a];
|
||||||
|
if (t.want_recheck) {
|
||||||
|
t.rechecks++;
|
||||||
|
t.want_recheck = false;
|
||||||
|
push_t(st.todo.handshake, t);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
is_busy = st.todo.handshake.length;
|
||||||
|
try {
|
||||||
|
if (!is_busy && !uc.fsearch && !msel.getsel().length && (!mp.au || mp.au.paused))
|
||||||
|
treectl.goto(get_evpath());
|
||||||
|
}
|
||||||
|
catch (ex) { }
|
||||||
|
}
|
||||||
|
|
||||||
if (was_busy != is_busy) {
|
if (was_busy != is_busy) {
|
||||||
was_busy = is_busy;
|
was_busy = is_busy;
|
||||||
|
|
||||||
@@ -1172,6 +1203,8 @@ function up2k_init(subtle) {
|
|||||||
ebi('u2etas').style.textAlign = 'left';
|
ebi('u2etas').style.textAlign = 'left';
|
||||||
}
|
}
|
||||||
etafun();
|
etafun();
|
||||||
|
if (pvis.act == 'bz')
|
||||||
|
pvis.changecard('bz');
|
||||||
}
|
}
|
||||||
|
|
||||||
if (flag) {
|
if (flag) {
|
||||||
@@ -1370,7 +1403,7 @@ function up2k_init(subtle) {
|
|||||||
pvis.move(t.n, 'ng');
|
pvis.move(t.n, 'ng');
|
||||||
apop(st.busy.hash, t);
|
apop(st.busy.hash, t);
|
||||||
st.bytes.finished += t.size;
|
st.bytes.finished += t.size;
|
||||||
return tasker();
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
toast.err(0, 'y o u b r o k e i t\nfile: ' + esc(t.name + '') + '\nerror: ' + err);
|
toast.err(0, 'y o u b r o k e i t\nfile: ' + esc(t.name + '') + '\nerror: ' + err);
|
||||||
@@ -1446,7 +1479,6 @@ function up2k_init(subtle) {
|
|||||||
console.log('head onerror, retrying', t);
|
console.log('head onerror, retrying', t);
|
||||||
apop(st.busy.head, t);
|
apop(st.busy.head, t);
|
||||||
st.todo.head.unshift(t);
|
st.todo.head.unshift(t);
|
||||||
tasker();
|
|
||||||
};
|
};
|
||||||
function orz(e) {
|
function orz(e) {
|
||||||
var ok = false;
|
var ok = false;
|
||||||
@@ -1468,6 +1500,7 @@ function up2k_init(subtle) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
t.done = true;
|
t.done = true;
|
||||||
|
t.fobj = null;
|
||||||
st.bytes.hashed += t.size;
|
st.bytes.hashed += t.size;
|
||||||
st.bytes.finished += t.size;
|
st.bytes.finished += t.size;
|
||||||
pvis.move(t.n, 'bz');
|
pvis.move(t.n, 'bz');
|
||||||
@@ -1511,7 +1544,6 @@ function up2k_init(subtle) {
|
|||||||
apop(st.busy.handshake, t);
|
apop(st.busy.handshake, t);
|
||||||
st.todo.handshake.unshift(t);
|
st.todo.handshake.unshift(t);
|
||||||
t.keepalive = keepalive;
|
t.keepalive = keepalive;
|
||||||
tasker();
|
|
||||||
};
|
};
|
||||||
function orz(e) {
|
function orz(e) {
|
||||||
if (t.t_busied != me) {
|
if (t.t_busied != me) {
|
||||||
@@ -1537,15 +1569,18 @@ function up2k_init(subtle) {
|
|||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
smsg = 'found';
|
smsg = 'found';
|
||||||
var hit = response.hits[0],
|
var msg = [];
|
||||||
msg = linksplit(hit.rp).join(''),
|
for (var a = 0, aa = Math.min(20, response.hits.length); a < aa; a++) {
|
||||||
|
var hit = response.hits[a],
|
||||||
tr = unix2iso(hit.ts),
|
tr = unix2iso(hit.ts),
|
||||||
tu = unix2iso(t.lmod),
|
tu = unix2iso(t.lmod),
|
||||||
diff = parseInt(t.lmod) - parseInt(hit.ts),
|
diff = parseInt(t.lmod) - parseInt(hit.ts),
|
||||||
cdiff = (Math.abs(diff) <= 2) ? '3c0' : 'f0b',
|
cdiff = (Math.abs(diff) <= 2) ? '3c0' : 'f0b',
|
||||||
sdiff = '<span style="color:#' + cdiff + '">diff ' + diff;
|
sdiff = '<span style="color:#' + cdiff + '">diff ' + diff;
|
||||||
|
|
||||||
msg += '<br /><small>' + tr + ' (srv), ' + tu + ' (You), ' + sdiff + '</span></span>';
|
msg.push(linksplit(hit.rp).join('') + '<br /><small>' + tr + ' (srv), ' + tu + ' (You), ' + sdiff + '</small></span>');
|
||||||
|
}
|
||||||
|
msg = msg.join('<br />\n');
|
||||||
}
|
}
|
||||||
pvis.seth(t.n, 2, msg);
|
pvis.seth(t.n, 2, msg);
|
||||||
pvis.seth(t.n, 1, smsg);
|
pvis.seth(t.n, 1, smsg);
|
||||||
@@ -1553,6 +1588,7 @@ function up2k_init(subtle) {
|
|||||||
apop(st.busy.handshake, t);
|
apop(st.busy.handshake, t);
|
||||||
st.bytes.finished += t.size;
|
st.bytes.finished += t.size;
|
||||||
t.done = true;
|
t.done = true;
|
||||||
|
t.fobj = null;
|
||||||
tasker();
|
tasker();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -1563,7 +1599,7 @@ function up2k_init(subtle) {
|
|||||||
console.log("server-rename [" + t.purl + "] [" + t.name + "] to [" + rsp_purl + "] [" + response.name + "]");
|
console.log("server-rename [" + t.purl + "] [" + t.name + "] to [" + rsp_purl + "] [" + response.name + "]");
|
||||||
t.purl = rsp_purl;
|
t.purl = rsp_purl;
|
||||||
t.name = response.name;
|
t.name = response.name;
|
||||||
pvis.seth(t.n, 0, linksplit(uricom_dec(t.purl)[0] + t.name).join(' '));
|
pvis.seth(t.n, 0, linksplit(t.purl + uricom_enc(t.name)).join(' '));
|
||||||
}
|
}
|
||||||
|
|
||||||
var chunksize = get_chunksize(t.size),
|
var chunksize = get_chunksize(t.size),
|
||||||
@@ -1619,6 +1655,7 @@ function up2k_init(subtle) {
|
|||||||
|
|
||||||
if (done) {
|
if (done) {
|
||||||
t.done = true;
|
t.done = true;
|
||||||
|
t.fobj = null;
|
||||||
st.bytes.finished += t.size - t.bytes_uploaded;
|
st.bytes.finished += t.size - t.bytes_uploaded;
|
||||||
var spd1 = (t.size / ((t.t_hashed - t.t_hashing) / 1000.)) / (1024 * 1024.),
|
var spd1 = (t.size / ((t.t_hashed - t.t_hashing) / 1000.)) / (1024 * 1024.),
|
||||||
spd2 = (t.size / ((t.t_uploaded - t.t_uploading) / 1000.)) / (1024 * 1024.);
|
spd2 = (t.size / ((t.t_uploaded - t.t_uploading) / 1000.)) / (1024 * 1024.);
|
||||||
@@ -1653,13 +1690,19 @@ function up2k_init(subtle) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
st.bytes.finished += t.size;
|
st.bytes.finished += t.size;
|
||||||
if (rsp.indexOf('partial upload exists') !== -1 ||
|
var err_pend = rsp.indexOf('partial upload exists') + 1,
|
||||||
rsp.indexOf('file already exists') !== -1) {
|
err_dupe = rsp.indexOf('file already exists') + 1;
|
||||||
|
|
||||||
|
if (err_pend || err_dupe) {
|
||||||
err = rsp;
|
err = rsp;
|
||||||
ofs = err.indexOf('\n/');
|
ofs = err.indexOf('\n/');
|
||||||
if (ofs !== -1) {
|
if (ofs !== -1) {
|
||||||
err = err.slice(0, ofs + 1) + linksplit(err.slice(ofs + 2).trimEnd()).join(' ');
|
err = err.slice(0, ofs + 1) + linksplit(err.slice(ofs + 2).trimEnd()).join(' ');
|
||||||
}
|
}
|
||||||
|
if (!t.rechecks && err_pend) {
|
||||||
|
t.rechecks = 0;
|
||||||
|
t.want_recheck = true;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
if (err != "") {
|
if (err != "") {
|
||||||
pvis.seth(t.n, 1, "ERROR");
|
pvis.seth(t.n, 1, "ERROR");
|
||||||
@@ -1705,7 +1748,8 @@ function up2k_init(subtle) {
|
|||||||
st.busy.upload.push(upt);
|
st.busy.upload.push(upt);
|
||||||
|
|
||||||
var npart = upt.npart,
|
var npart = upt.npart,
|
||||||
t = st.files[upt.nfile];
|
t = st.files[upt.nfile],
|
||||||
|
tries = 0;
|
||||||
|
|
||||||
if (!t.t_uploading)
|
if (!t.t_uploading)
|
||||||
t.t_uploading = Date.now();
|
t.t_uploading = Date.now();
|
||||||
@@ -1756,8 +1800,9 @@ function up2k_init(subtle) {
|
|||||||
if (crashed)
|
if (crashed)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
console.log('chunkpit onerror, retrying', t);
|
toast.err(9.98, "failed to upload a chunk,\n" + tries + " retries so far -- retrying in 10sec\n\n" + t.name);
|
||||||
do_send();
|
console.log('chunkpit onerror,', ++tries, t);
|
||||||
|
setTimeout(do_send, 10 * 1000);
|
||||||
};
|
};
|
||||||
xhr.open('POST', t.purl, true);
|
xhr.open('POST', t.purl, true);
|
||||||
xhr.setRequestHeader("X-Up2k-Hash", t.hash[npart]);
|
xhr.setRequestHeader("X-Up2k-Hash", t.hash[npart]);
|
||||||
@@ -1920,8 +1965,8 @@ function up2k_init(subtle) {
|
|||||||
flag = up2k_flagbus();
|
flag = up2k_flagbus();
|
||||||
}
|
}
|
||||||
catch (ex) {
|
catch (ex) {
|
||||||
toast.err(5, "not supported on your browser:\n" + ex);
|
toast.err(5, "not supported on your browser:\n" + esc(basenames(ex)));
|
||||||
tgl_flag_en();
|
bcfg_set('flag_en', false);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else if (!uc.flag_en && flag) {
|
else if (!uc.flag_en && flag) {
|
||||||
@@ -1972,6 +2017,15 @@ function warn_uploader_busy(e) {
|
|||||||
|
|
||||||
|
|
||||||
tt.init();
|
tt.init();
|
||||||
|
favico.init();
|
||||||
|
ebi('ico1').onclick = function () {
|
||||||
|
var a = favico.txt == this.textContent;
|
||||||
|
swrite('icot', a ? 'c' : this.textContent);
|
||||||
|
swrite('icof', a ? null : '000');
|
||||||
|
swrite('icob', a ? null : '');
|
||||||
|
favico.init();
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
if (QS('#op_up2k.act'))
|
if (QS('#op_up2k.act'))
|
||||||
goto_up2k();
|
goto_up2k();
|
||||||
|
|||||||
@@ -29,18 +29,24 @@ function esc(txt) {
|
|||||||
}[c];
|
}[c];
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
function basenames(txt) {
|
||||||
|
return (txt + '').replace(/https?:\/\/[^ \/]+\//g, '/').replace(/js\?_=[a-zA-Z]{4}/g, 'js');
|
||||||
|
}
|
||||||
|
if ((document.location + '').indexOf(',rej,') + 1)
|
||||||
window.onunhandledrejection = function (e) {
|
window.onunhandledrejection = function (e) {
|
||||||
var err = e.reason;
|
var err = e.reason;
|
||||||
try {
|
try {
|
||||||
err += '\n' + e.reason.stack;
|
err += '\n' + e.reason.stack;
|
||||||
}
|
}
|
||||||
catch (e) { }
|
catch (e) { }
|
||||||
|
err = basenames(err);
|
||||||
console.log("REJ: " + err);
|
console.log("REJ: " + err);
|
||||||
try {
|
try {
|
||||||
toast.warn(30, err);
|
toast.warn(30, err);
|
||||||
}
|
}
|
||||||
catch (e) { }
|
catch (e) { }
|
||||||
};
|
};
|
||||||
|
|
||||||
try {
|
try {
|
||||||
console.hist = [];
|
console.hist = [];
|
||||||
var hook = function (t) {
|
var hook = function (t) {
|
||||||
@@ -151,7 +157,7 @@ function vis_exh(msg, url, lineNo, columnNo, error) {
|
|||||||
);
|
);
|
||||||
document.head.appendChild(s);
|
document.head.appendChild(s);
|
||||||
}
|
}
|
||||||
exbox.innerHTML = html.join('\n').replace(/https?:\/\/[^ \/]+\//g, '/').replace(/js\?_=[a-zA-Z]{4}/g, 'js').replace(/<ghi>/, 'https://github.com/9001/copyparty/issues/new?labels=bug&template=bug_report.md');
|
exbox.innerHTML = basenames(html.join('\n')).replace(/<ghi>/, 'https://github.com/9001/copyparty/issues/new?labels=bug&template=bug_report.md');
|
||||||
exbox.style.display = 'block';
|
exbox.style.display = 'block';
|
||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
@@ -241,7 +247,9 @@ function import_js(url, cb) {
|
|||||||
script.src = url;
|
script.src = url;
|
||||||
script.onload = cb;
|
script.onload = cb;
|
||||||
script.onerror = function () {
|
script.onerror = function () {
|
||||||
toast.err(0, 'Failed to load module:\n' + url);
|
var m = 'Failed to load module:\n' + url;
|
||||||
|
console.log(m);
|
||||||
|
toast.err(0, m);
|
||||||
};
|
};
|
||||||
head.appendChild(script);
|
head.appendChild(script);
|
||||||
}
|
}
|
||||||
@@ -400,19 +408,17 @@ function linksplit(rp) {
|
|||||||
link = rp.slice(0, ofs + 1);
|
link = rp.slice(0, ofs + 1);
|
||||||
rp = rp.slice(ofs + 1);
|
rp = rp.slice(ofs + 1);
|
||||||
}
|
}
|
||||||
var vlink = esc(link),
|
var vlink = esc(uricom_dec(link)[0]);
|
||||||
elink = uricom_enc(link);
|
|
||||||
|
|
||||||
if (link.indexOf('/') !== -1) {
|
if (link.indexOf('/') !== -1) {
|
||||||
vlink = vlink.slice(0, -1) + '<span>/</span>';
|
vlink = vlink.slice(0, -1) + '<span>/</span>';
|
||||||
elink = elink.slice(0, -3) + '/';
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!rp && q)
|
if (!rp && q)
|
||||||
elink += q;
|
link += q;
|
||||||
|
|
||||||
ret.push('<a href="' + apath + elink + '">' + vlink + '</a>');
|
ret.push('<a href="' + apath + link + '">' + vlink + '</a>');
|
||||||
apath += elink;
|
apath += link;
|
||||||
}
|
}
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
@@ -577,15 +583,23 @@ function jcp(obj) {
|
|||||||
|
|
||||||
|
|
||||||
function sread(key) {
|
function sread(key) {
|
||||||
|
try {
|
||||||
return localStorage.getItem(key);
|
return localStorage.getItem(key);
|
||||||
}
|
}
|
||||||
|
catch (e) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
function swrite(key, val) {
|
function swrite(key, val) {
|
||||||
|
try {
|
||||||
if (val === undefined || val === null)
|
if (val === undefined || val === null)
|
||||||
localStorage.removeItem(key);
|
localStorage.removeItem(key);
|
||||||
else
|
else
|
||||||
localStorage.setItem(key, val);
|
localStorage.setItem(key, val);
|
||||||
}
|
}
|
||||||
|
catch (e) { }
|
||||||
|
}
|
||||||
|
|
||||||
function jread(key, fb) {
|
function jread(key, fb) {
|
||||||
var str = sread(key);
|
var str = sread(key);
|
||||||
@@ -607,9 +621,9 @@ function icfg_get(name, defval) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function fcfg_get(name, defval) {
|
function fcfg_get(name, defval) {
|
||||||
var o = ebi(name);
|
var o = ebi(name),
|
||||||
|
val = parseFloat(sread(name));
|
||||||
|
|
||||||
var val = parseFloat(sread(name));
|
|
||||||
if (isNaN(val))
|
if (isNaN(val))
|
||||||
return parseFloat(o ? o.value : defval);
|
return parseFloat(o ? o.value : defval);
|
||||||
|
|
||||||
@@ -619,6 +633,19 @@ function fcfg_get(name, defval) {
|
|||||||
return val;
|
return val;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function scfg_get(name, defval) {
|
||||||
|
var o = ebi(name),
|
||||||
|
val = sread(name);
|
||||||
|
|
||||||
|
if (val === null)
|
||||||
|
val = defval;
|
||||||
|
|
||||||
|
if (o)
|
||||||
|
o.value = val;
|
||||||
|
|
||||||
|
return val;
|
||||||
|
}
|
||||||
|
|
||||||
function bcfg_get(name, defval) {
|
function bcfg_get(name, defval) {
|
||||||
var o = ebi(name);
|
var o = ebi(name);
|
||||||
if (!o)
|
if (!o)
|
||||||
@@ -670,6 +697,21 @@ function bcfg_bind(obj, oname, cname, defval, cb, un_ev) {
|
|||||||
return v;
|
return v;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function scfg_bind(obj, oname, cname, defval, cb) {
|
||||||
|
var v = scfg_get(cname, defval),
|
||||||
|
el = ebi(cname);
|
||||||
|
|
||||||
|
obj[oname] = v;
|
||||||
|
if (el)
|
||||||
|
el.oninput = function (e) {
|
||||||
|
swrite(cname, obj[oname] = this.value);
|
||||||
|
if (cb)
|
||||||
|
cb(obj[oname]);
|
||||||
|
};
|
||||||
|
|
||||||
|
return v;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
function hist_push(url) {
|
function hist_push(url) {
|
||||||
console.log("h-push " + url);
|
console.log("h-push " + url);
|
||||||
@@ -681,6 +723,15 @@ function hist_replace(url) {
|
|||||||
history.replaceState(url, url, url);
|
history.replaceState(url, url, url);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function sethash(hv) {
|
||||||
|
if (window.history && history.replaceState) {
|
||||||
|
hist_replace(document.location.pathname + '#' + hv);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
document.location.hash = hv;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
var timer = (function () {
|
var timer = (function () {
|
||||||
var r = {};
|
var r = {};
|
||||||
@@ -835,16 +886,7 @@ var tt = (function () {
|
|||||||
}
|
}
|
||||||
|
|
||||||
r.init = function () {
|
r.init = function () {
|
||||||
var ttb = ebi('tooltips');
|
bcfg_bind(r, 'en', 'tooltips', r.en, r.init);
|
||||||
if (ttb) {
|
|
||||||
ttb.onclick = function (e) {
|
|
||||||
ev(e);
|
|
||||||
r.en = !r.en;
|
|
||||||
bcfg_set('tooltips', r.en);
|
|
||||||
r.init();
|
|
||||||
};
|
|
||||||
r.en = bcfg_get('tooltips', true)
|
|
||||||
}
|
|
||||||
r.att(document);
|
r.att(document);
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -907,6 +949,9 @@ var toast = (function () {
|
|||||||
if (sec)
|
if (sec)
|
||||||
te = setTimeout(r.hide, sec * 1000);
|
te = setTimeout(r.hide, sec * 1000);
|
||||||
|
|
||||||
|
if (txt.indexOf('<body>') + 1)
|
||||||
|
txt = txt.slice(0, txt.indexOf('<')) + ' [...]';
|
||||||
|
|
||||||
obj.innerHTML = '<a href="#" id="toastc">x</a><div id="toastb">' + lf2br(txt) + '</div>';
|
obj.innerHTML = '<a href="#" id="toastc">x</a><div id="toastb">' + lf2br(txt) + '</div>';
|
||||||
obj.className = cl;
|
obj.className = cl;
|
||||||
sec += obj.offsetWidth;
|
sec += obj.offsetWidth;
|
||||||
@@ -1048,7 +1093,7 @@ var modal = (function () {
|
|||||||
}
|
}
|
||||||
function _confirm(html, cok, cng, fun) {
|
function _confirm(html, cok, cng, fun) {
|
||||||
cb_ok = cok;
|
cb_ok = cok;
|
||||||
cb_ng = cng === undefined ? cok : null;
|
cb_ng = cng === undefined ? cok : cng;
|
||||||
cb_up = fun;
|
cb_up = fun;
|
||||||
html += '<div id="modalb">' + ok_cancel + '</div>';
|
html += '<div id="modalb">' + ok_cancel + '</div>';
|
||||||
r.show(html);
|
r.show(html);
|
||||||
@@ -1164,3 +1209,54 @@ function repl(e) {
|
|||||||
}
|
}
|
||||||
if (ebi('repl'))
|
if (ebi('repl'))
|
||||||
ebi('repl').onclick = repl;
|
ebi('repl').onclick = repl;
|
||||||
|
|
||||||
|
|
||||||
|
var favico = (function () {
|
||||||
|
var r = {};
|
||||||
|
r.en = true;
|
||||||
|
|
||||||
|
function gx(txt) {
|
||||||
|
return (
|
||||||
|
'<?xml version="1.0" encoding="UTF-8"?>\n' +
|
||||||
|
'<svg version="1.1" viewBox="0 0 64 64" xmlns="http://www.w3.org/2000/svg"><g>\n' +
|
||||||
|
(r.bg ? '<rect width="100%" height="100%" rx="16" fill="#' + r.bg + '" />\n' : '') +
|
||||||
|
'<text x="50%" y="55%" dominant-baseline="middle" text-anchor="middle"' +
|
||||||
|
' font-family="sans-serif" font-weight="bold" font-size="64px"' +
|
||||||
|
' fill="#' + r.fg + '">' + txt + '</text></g></svg>'
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
r.upd = function () {
|
||||||
|
var i = QS('link[rel="icon"]'), b64;
|
||||||
|
if (!r.txt)
|
||||||
|
return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
b64 = btoa(gx(r.txt));
|
||||||
|
}
|
||||||
|
catch (ex) {
|
||||||
|
b64 = encodeURIComponent(r.txt).replace(/%([0-9A-F]{2})/g,
|
||||||
|
function x(m, v) { return String.fromCharCode('0x' + v); });
|
||||||
|
|
||||||
|
b64 = btoa(gx(unescape(encodeURIComponent(r.txt))));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!i) {
|
||||||
|
i = mknod('link');
|
||||||
|
i.rel = 'icon';
|
||||||
|
document.head.appendChild(i);
|
||||||
|
}
|
||||||
|
i.href = 'data:image/svg+xml;base64,' + b64;
|
||||||
|
};
|
||||||
|
|
||||||
|
r.init = function () {
|
||||||
|
clearTimeout(r.to);
|
||||||
|
scfg_bind(r, 'txt', 'icot', '', r.upd);
|
||||||
|
scfg_bind(r, 'fg', 'icof', 'fc5', r.upd);
|
||||||
|
scfg_bind(r, 'bg', 'icob', '333', r.upd);
|
||||||
|
r.upd();
|
||||||
|
};
|
||||||
|
|
||||||
|
r.to = setTimeout(r.init, 100);
|
||||||
|
return r;
|
||||||
|
})();
|
||||||
|
|||||||
@@ -162,7 +162,7 @@ brew install python@2
|
|||||||
pip install virtualenv
|
pip install virtualenv
|
||||||
|
|
||||||
# readme toc
|
# readme toc
|
||||||
cat README.md | awk 'function pr() { if (!h) {return}; if (/^ *[*!#]/||!s) {printf "%s\n",h;h=0;return}; if (/.../) {printf "%s - %s\n",h,$0;h=0}; }; /^#/{s=1;pr()} /^#* *(file indexing|install on android|dev env setup|just the sfx|complete release|optional gpl stuff)|`$/{s=0} /^#/{lv=length($1);sub(/[^ ]+ /,"");bab=$0;gsub(/ /,"-",bab); h=sprintf("%" ((lv-1)*4+1) "s [%s](#%s)", "*",$0,bab);next} !h{next} {sub(/ .*/,"");sub(/[:,]$/,"")} {pr()}' > toc; grep -E '^## readme toc' -B1000 -A2 <README.md >p1; grep -E '^## quickstart' -B2 -A999999 <README.md >p2; (cat p1; grep quickstart -A1000 <toc; cat p2) >README.md
|
cat README.md | awk 'function pr() { if (!h) {return}; if (/^ *[*!#]/||!s) {printf "%s\n",h;h=0;return}; if (/.../) {printf "%s - %s\n",h,$0;h=0}; }; /^#/{s=1;pr()} /^#* *(file indexing|install on android|dev env setup|just the sfx|complete release|optional gpl stuff)|`$/{s=0} /^#/{lv=length($1);sub(/[^ ]+ /,"");bab=$0;gsub(/ /,"-",bab); h=sprintf("%" ((lv-1)*4+1) "s [%s](#%s)", "*",$0,bab);next} !h{next} {sub(/ .*/,"");sub(/[:,]$/,"")} {pr()}' > toc; grep -E '^## readme toc' -B1000 -A2 <README.md >p1; grep -E '^## quickstart' -B2 -A999999 <README.md >p2; (cat p1; grep quickstart -A1000 <toc; cat p2) >README.md; rm p1 p2 toc
|
||||||
|
|
||||||
# fix firefox phantom breakpoints,
|
# fix firefox phantom breakpoints,
|
||||||
# suggestions from bugtracker, doesnt work (debugger is not attachable)
|
# suggestions from bugtracker, doesnt work (debugger is not attachable)
|
||||||
|
|||||||
@@ -238,7 +238,7 @@ rm have
|
|||||||
rm -rf copyparty/web/dd
|
rm -rf copyparty/web/dd
|
||||||
f=copyparty/web/browser.css
|
f=copyparty/web/browser.css
|
||||||
gzip -d "$f.gz" || true
|
gzip -d "$f.gz" || true
|
||||||
sed -r 's/(cursor: ?)url\([^)]+\), ?(pointer)/\1\2/; /[0-9]+% \{cursor:/d; /animation: ?cursor/d' <$f >t
|
sed -r 's/(cursor: ?)url\([^)]+\), ?(pointer)/\1\2/; s/[0-9]+% \{cursor:[^}]+\}//; s/animation: ?cursor[^};]+//' <$f >t
|
||||||
tmv "$f"
|
tmv "$f"
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -271,7 +271,7 @@ find | grep -E '\.css$' | while IFS= read -r f; do
|
|||||||
}
|
}
|
||||||
!/\}$/ {printf "%s",$0;next}
|
!/\}$/ {printf "%s",$0;next}
|
||||||
1
|
1
|
||||||
' <$f | sed 's/;\}$/}/' >t
|
' <$f | sed -r 's/;\}$/}/; /\{\}$/d' >t
|
||||||
tmv "$f"
|
tmv "$f"
|
||||||
done
|
done
|
||||||
unexpand -h 2>/dev/null &&
|
unexpand -h 2>/dev/null &&
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ import subprocess as sp
|
|||||||
to edit this file, use HxD or "vim -b"
|
to edit this file, use HxD or "vim -b"
|
||||||
(there is compressed stuff at the end)
|
(there is compressed stuff at the end)
|
||||||
|
|
||||||
run me with any version of python, i will unpack and run copyparty
|
run me with python 2.7 or 3.3+ to unpack and run copyparty
|
||||||
|
|
||||||
there's zero binaries! just plaintext python scripts all the way down
|
there's zero binaries! just plaintext python scripts all the way down
|
||||||
so you can easily unpack the archive and inspect it for shady stuff
|
so you can easily unpack the archive and inspect it for shady stuff
|
||||||
|
|||||||
2
setup.py
2
setup.py
@@ -114,7 +114,7 @@ args = {
|
|||||||
"install_requires": ["jinja2"],
|
"install_requires": ["jinja2"],
|
||||||
"extras_require": {"thumbnails": ["Pillow"], "audiotags": ["mutagen"]},
|
"extras_require": {"thumbnails": ["Pillow"], "audiotags": ["mutagen"]},
|
||||||
"entry_points": {"console_scripts": ["copyparty = copyparty.__main__:main"]},
|
"entry_points": {"console_scripts": ["copyparty = copyparty.__main__:main"]},
|
||||||
"scripts": ["bin/copyparty-fuse.py"],
|
"scripts": ["bin/copyparty-fuse.py", "bin/up2k.py"],
|
||||||
"cmdclass": {"clean2": clean2},
|
"cmdclass": {"clean2": clean2},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -48,7 +48,8 @@ class Cfg(Namespace):
|
|||||||
mte="a",
|
mte="a",
|
||||||
mth="",
|
mth="",
|
||||||
hist=None,
|
hist=None,
|
||||||
no_hash=False,
|
no_idx=None,
|
||||||
|
no_hash=None,
|
||||||
css_browser=None,
|
css_browser=None,
|
||||||
**{k: False for k in "e2d e2ds e2dsa e2t e2ts e2tsr".split()}
|
**{k: False for k in "e2d e2ds e2dsa e2t e2ts e2tsr".split()}
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -23,7 +23,8 @@ class Cfg(Namespace):
|
|||||||
"mte": "a",
|
"mte": "a",
|
||||||
"mth": "",
|
"mth": "",
|
||||||
"hist": None,
|
"hist": None,
|
||||||
"no_hash": False,
|
"no_idx": None,
|
||||||
|
"no_hash": None,
|
||||||
"css_browser": None,
|
"css_browser": None,
|
||||||
"no_voldump": True,
|
"no_voldump": True,
|
||||||
"no_logues": False,
|
"no_logues": False,
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import sys
|
|||||||
import time
|
import time
|
||||||
import shutil
|
import shutil
|
||||||
import jinja2
|
import jinja2
|
||||||
|
import threading
|
||||||
import tempfile
|
import tempfile
|
||||||
import platform
|
import platform
|
||||||
import subprocess as sp
|
import subprocess as sp
|
||||||
@@ -28,7 +29,7 @@ if MACOS:
|
|||||||
# 25% faster; until any tests do symlink stuff
|
# 25% faster; until any tests do symlink stuff
|
||||||
|
|
||||||
|
|
||||||
from copyparty.util import Unrecv
|
from copyparty.util import Unrecv, FHC
|
||||||
|
|
||||||
|
|
||||||
def runcmd(argv):
|
def runcmd(argv):
|
||||||
@@ -132,6 +133,8 @@ class VHttpConn(object):
|
|||||||
self.log_src = "a"
|
self.log_src = "a"
|
||||||
self.lf_url = None
|
self.lf_url = None
|
||||||
self.hsrv = VHttpSrv()
|
self.hsrv = VHttpSrv()
|
||||||
|
self.u2fh = FHC()
|
||||||
|
self.mutex = threading.Lock()
|
||||||
self.nreq = 0
|
self.nreq = 0
|
||||||
self.nbyte = 0
|
self.nbyte = 0
|
||||||
self.ico = None
|
self.ico = None
|
||||||
|
|||||||
Reference in New Issue
Block a user