Compare commits
276 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c5d822c70a | ||
|
|
9c09b4061a | ||
|
|
c26fb43ced | ||
|
|
deb8f20db6 | ||
|
|
50e18ed8ff | ||
|
|
31f3895f40 | ||
|
|
615929268a | ||
|
|
b8b15814cf | ||
|
|
7766fffe83 | ||
|
|
2a16c150d1 | ||
|
|
418c2166cc | ||
|
|
a4dd44f648 | ||
|
|
5352f7cda7 | ||
|
|
5533b47099 | ||
|
|
e9b14464ee | ||
|
|
4e986e5cd1 | ||
|
|
8a59b40c53 | ||
|
|
391caca043 | ||
|
|
171ce348d6 | ||
|
|
c2cc729135 | ||
|
|
e7e71b76f0 | ||
|
|
a2af61cf6f | ||
|
|
e111edd5e4 | ||
|
|
3375377371 | ||
|
|
0ced020c67 | ||
|
|
c0d7aa9e4a | ||
|
|
e5b3d2a312 | ||
|
|
7b4a794981 | ||
|
|
86a859de17 | ||
|
|
b3aaa7bd0f | ||
|
|
a90586e6a8 | ||
|
|
807f272895 | ||
|
|
f050647b43 | ||
|
|
73baebbd16 | ||
|
|
f327f698b9 | ||
|
|
8164910fe8 | ||
|
|
3498644055 | ||
|
|
d31116b54c | ||
|
|
aced110cdf | ||
|
|
e9ab6aec77 | ||
|
|
15b261c861 | ||
|
|
970badce66 | ||
|
|
64304a9d65 | ||
|
|
d1983553d2 | ||
|
|
6b15df3bcd | ||
|
|
730b1fff71 | ||
|
|
c3add751e5 | ||
|
|
9da2dbdc1c | ||
|
|
977f09c470 | ||
|
|
4d0c6a8802 | ||
|
|
5345565037 | ||
|
|
be38c27c64 | ||
|
|
82a0401099 | ||
|
|
33bea1b663 | ||
|
|
f083acd46d | ||
|
|
5aacd15272 | ||
|
|
cb7674b091 | ||
|
|
3899c7ad56 | ||
|
|
d2debced09 | ||
|
|
b86c0ddc48 | ||
|
|
ba36f33bd8 | ||
|
|
49368a10ba | ||
|
|
ac1568cacf | ||
|
|
862ca3439d | ||
|
|
fdd4f9f2aa | ||
|
|
aa2dc49ebe | ||
|
|
cc23b7ee74 | ||
|
|
f6f9fc5a45 | ||
|
|
26c8589399 | ||
|
|
c2469935cb | ||
|
|
5e7c20955e | ||
|
|
967fa38108 | ||
|
|
280fe8e36b | ||
|
|
03ca96ccc3 | ||
|
|
b5b8a2c9d5 | ||
|
|
0008832730 | ||
|
|
c9b385db4b | ||
|
|
c951b66ae0 | ||
|
|
de735f3a45 | ||
|
|
19161425f3 | ||
|
|
c69e8d5bf4 | ||
|
|
3d3bce2788 | ||
|
|
1cb0dc7f8e | ||
|
|
cd5c56e601 | ||
|
|
8c979905e4 | ||
|
|
4d69f15f48 | ||
|
|
083f6572f7 | ||
|
|
4e7dd75266 | ||
|
|
3eb83f449b | ||
|
|
d31f69117b | ||
|
|
f5f9e3ac97 | ||
|
|
598d6c598c | ||
|
|
744727087a | ||
|
|
f93212a665 | ||
|
|
6dade82d2c | ||
|
|
6b737bf1d7 | ||
|
|
94dbd70677 | ||
|
|
527ae0348e | ||
|
|
79629c430a | ||
|
|
908dd61be5 | ||
|
|
88f77b8cca | ||
|
|
1e846657d1 | ||
|
|
ce70f62a88 | ||
|
|
bca0cdbb62 | ||
|
|
1ee11e04e6 | ||
|
|
6eef44f212 | ||
|
|
8bd94f4a1c | ||
|
|
4bc4701372 | ||
|
|
dfd89b503a | ||
|
|
060dc54832 | ||
|
|
f7a4ea5793 | ||
|
|
71b478e6e2 | ||
|
|
ed8fff8c52 | ||
|
|
95dc78db10 | ||
|
|
addeac64c7 | ||
|
|
d77ec22007 | ||
|
|
20030c91b7 | ||
|
|
8b366e255c | ||
|
|
6da366fcb0 | ||
|
|
2fa35f851e | ||
|
|
e4ca4260bb | ||
|
|
b69aace8d8 | ||
|
|
79097bb43c | ||
|
|
806fac1742 | ||
|
|
4f97d7cf8d | ||
|
|
42acc457af | ||
|
|
c02920607f | ||
|
|
452885c271 | ||
|
|
5c242a07b6 | ||
|
|
088899d59f | ||
|
|
1faff2a37e | ||
|
|
23c8d3d045 | ||
|
|
a033388d2b | ||
|
|
82fe45ac56 | ||
|
|
bcb7fcda6b | ||
|
|
726a98100b | ||
|
|
2f021a0c2b | ||
|
|
eb05cb6c6e | ||
|
|
7530af95da | ||
|
|
8399e95bda | ||
|
|
3b4dfe326f | ||
|
|
2e787a254e | ||
|
|
f888bed1a6 | ||
|
|
d865e9f35a | ||
|
|
fc7fe70f66 | ||
|
|
5aff39d2b2 | ||
|
|
d1be37a04a | ||
|
|
b0fd8bf7d4 | ||
|
|
b9cf8f3973 | ||
|
|
4588f11613 | ||
|
|
1a618c3c97 | ||
|
|
d500a51d97 | ||
|
|
734e9d3874 | ||
|
|
bd5cfc2f1b | ||
|
|
89f88ee78c | ||
|
|
b2ae14695a | ||
|
|
19d86b44d9 | ||
|
|
85be62e38b | ||
|
|
80f3d90200 | ||
|
|
0249fa6e75 | ||
|
|
2d0696e048 | ||
|
|
ff32ec515e | ||
|
|
a6935b0293 | ||
|
|
63eb08ba9f | ||
|
|
e5b67d2b3a | ||
|
|
9e10af6885 | ||
|
|
42bc9115d2 | ||
|
|
0a569ce413 | ||
|
|
9a16639a61 | ||
|
|
57953c68c6 | ||
|
|
088d08963f | ||
|
|
7bc8196821 | ||
|
|
7715299dd3 | ||
|
|
b8ac9b7994 | ||
|
|
98e7d8f728 | ||
|
|
e7fd871ffe | ||
|
|
14aab62f32 | ||
|
|
cb81fe962c | ||
|
|
fc970d2dea | ||
|
|
b0e203d1f9 | ||
|
|
37cef05b19 | ||
|
|
5886a42901 | ||
|
|
2fd99f807d | ||
|
|
3d4cbd7d10 | ||
|
|
f10d03c238 | ||
|
|
f9a66ffb0e | ||
|
|
777a50063d | ||
|
|
0bb9154747 | ||
|
|
30c3f45072 | ||
|
|
0d5ca67f32 | ||
|
|
4a8bf6aebd | ||
|
|
b11db090d8 | ||
|
|
189391fccd | ||
|
|
86d4c43909 | ||
|
|
5994f40982 | ||
|
|
076d32dee5 | ||
|
|
16c8e38ecd | ||
|
|
eacbcda8e5 | ||
|
|
59be76cd44 | ||
|
|
5bb0e7e8b3 | ||
|
|
b78d207121 | ||
|
|
0fcbcdd08c | ||
|
|
ed6c683922 | ||
|
|
9fe1edb02b | ||
|
|
fb3811a708 | ||
|
|
18f8658eec | ||
|
|
3ead4676b0 | ||
|
|
d30001d23d | ||
|
|
06bbf0d656 | ||
|
|
6ddd952e04 | ||
|
|
027ad0c3ee | ||
|
|
3abad2b87b | ||
|
|
32a1c7c5d5 | ||
|
|
f06e165bd4 | ||
|
|
1c843b24f7 | ||
|
|
2ace9ed380 | ||
|
|
5f30c0ae03 | ||
|
|
ef60adf7e2 | ||
|
|
7354b462e8 | ||
|
|
da904d6be8 | ||
|
|
c5fbbbbb5c | ||
|
|
5010387d8a | ||
|
|
f00c54a7fb | ||
|
|
9f52c169d0 | ||
|
|
bf18339404 | ||
|
|
2ad12b074b | ||
|
|
a6788ffe8d | ||
|
|
0e884df486 | ||
|
|
ef1c55286f | ||
|
|
abc0424c26 | ||
|
|
44e5c82e6d | ||
|
|
5849c446ed | ||
|
|
12b7317831 | ||
|
|
fe323f59af | ||
|
|
a00e56f219 | ||
|
|
1a7852794f | ||
|
|
22b1373a57 | ||
|
|
17d78b1469 | ||
|
|
4d8b32b249 | ||
|
|
b65bea2550 | ||
|
|
0b52ccd200 | ||
|
|
3006a07059 | ||
|
|
801dbc7a9a | ||
|
|
4f4e895fb7 | ||
|
|
cc57c3b655 | ||
|
|
ca6ec9c5c7 | ||
|
|
633b1f0a78 | ||
|
|
6136b9bf9c | ||
|
|
524a3ba566 | ||
|
|
58580320f9 | ||
|
|
759b0a994d | ||
|
|
d2800473e4 | ||
|
|
f5b1a2065e | ||
|
|
5e62532295 | ||
|
|
c1bee96c40 | ||
|
|
f273253a2b | ||
|
|
012bbcf770 | ||
|
|
b54cb47b2e | ||
|
|
1b15f43745 | ||
|
|
96771bf1bd | ||
|
|
580078bddb | ||
|
|
c5c7080ec6 | ||
|
|
408339b51d | ||
|
|
02e3d44998 | ||
|
|
156f13ded1 | ||
|
|
d288467cb7 | ||
|
|
21662c9f3f | ||
|
|
9149fe6cdd | ||
|
|
9a146192b7 | ||
|
|
3a9d3b7b61 | ||
|
|
f03f0973ab | ||
|
|
7ec0881e8c | ||
|
|
59e1ab42ff | ||
|
|
722216b901 | ||
|
|
bd8f3dc368 | ||
|
|
33cd94a141 |
40
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
40
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
---
|
||||||
|
name: Bug report
|
||||||
|
about: Create a report to help us improve
|
||||||
|
title: ''
|
||||||
|
labels: bug
|
||||||
|
assignees: '9001'
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
NOTE:
|
||||||
|
all of the below are optional, consider them as inspiration, delete and rewrite at will, thx md
|
||||||
|
|
||||||
|
|
||||||
|
**Describe the bug**
|
||||||
|
a description of what the bug is
|
||||||
|
|
||||||
|
**To Reproduce**
|
||||||
|
List of steps to reproduce the issue, or, if it's hard to reproduce, then at least a detailed explanation of what you did to run into it
|
||||||
|
|
||||||
|
**Expected behavior**
|
||||||
|
a description of what you expected to happen
|
||||||
|
|
||||||
|
**Screenshots**
|
||||||
|
if applicable, add screenshots to help explain your problem, such as the kickass crashpage :^)
|
||||||
|
|
||||||
|
**Server details**
|
||||||
|
if the issue is possibly on the server-side, then mention some of the following:
|
||||||
|
* server OS / version:
|
||||||
|
* python version:
|
||||||
|
* copyparty arguments:
|
||||||
|
* filesystem (`lsblk -f` on linux):
|
||||||
|
|
||||||
|
**Client details**
|
||||||
|
if the issue is possibly on the client-side, then mention some of the following:
|
||||||
|
* the device type and model:
|
||||||
|
* OS version:
|
||||||
|
* browser version:
|
||||||
|
|
||||||
|
**Additional context**
|
||||||
|
any other context about the problem here
|
||||||
22
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
22
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
---
|
||||||
|
name: Feature request
|
||||||
|
about: Suggest an idea for this project
|
||||||
|
title: ''
|
||||||
|
labels: enhancement
|
||||||
|
assignees: '9001'
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
all of the below are optional, consider them as inspiration, delete and rewrite at will
|
||||||
|
|
||||||
|
**is your feature request related to a problem? Please describe.**
|
||||||
|
a description of what the problem is, for example, `I'm always frustrated when [...]` or `Why is it not possible to [...]`
|
||||||
|
|
||||||
|
**Describe the idea / solution you'd like**
|
||||||
|
a description of what you want to happen
|
||||||
|
|
||||||
|
**Describe any alternatives you've considered**
|
||||||
|
a description of any alternative solutions or features you've considered
|
||||||
|
|
||||||
|
**Additional context**
|
||||||
|
add any other context or screenshots about the feature request here
|
||||||
10
.github/ISSUE_TEMPLATE/something-else.md
vendored
Normal file
10
.github/ISSUE_TEMPLATE/something-else.md
vendored
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
---
|
||||||
|
name: Something else
|
||||||
|
about: "┐(゚∀゚)┌"
|
||||||
|
title: ''
|
||||||
|
labels: ''
|
||||||
|
assignees: ''
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
|
||||||
7
.github/branch-rename.md
vendored
Normal file
7
.github/branch-rename.md
vendored
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
modernize your local checkout of the repo like so,
|
||||||
|
```sh
|
||||||
|
git branch -m master hovudstraum
|
||||||
|
git fetch origin
|
||||||
|
git branch -u origin/hovudstraum hovudstraum
|
||||||
|
git remote set-head origin -a
|
||||||
|
```
|
||||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -9,6 +9,7 @@ buildenv/
|
|||||||
build/
|
build/
|
||||||
dist/
|
dist/
|
||||||
sfx/
|
sfx/
|
||||||
|
py2/
|
||||||
.venv/
|
.venv/
|
||||||
|
|
||||||
# ide
|
# ide
|
||||||
@@ -20,3 +21,7 @@ sfx/
|
|||||||
# derived
|
# derived
|
||||||
copyparty/web/deps/
|
copyparty/web/deps/
|
||||||
srv/
|
srv/
|
||||||
|
|
||||||
|
# state/logs
|
||||||
|
up.*.txt
|
||||||
|
.hist/
|
||||||
2
.vscode/launch.json
vendored
2
.vscode/launch.json
vendored
@@ -17,7 +17,7 @@
|
|||||||
"-mtp",
|
"-mtp",
|
||||||
".bpm=f,bin/mtag/audio-bpm.py",
|
".bpm=f,bin/mtag/audio-bpm.py",
|
||||||
"-aed:wark",
|
"-aed:wark",
|
||||||
"-vsrv::r:aed:cnodupe",
|
"-vsrv::r:rw,ed:c,dupe",
|
||||||
"-vdist:dist:r"
|
"-vdist:dist:r"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
|||||||
1
.vscode/settings.json
vendored
1
.vscode/settings.json
vendored
@@ -55,4 +55,5 @@
|
|||||||
"py27"
|
"py27"
|
||||||
],
|
],
|
||||||
"python.linting.enabled": true,
|
"python.linting.enabled": true,
|
||||||
|
"python.pythonPath": "/usr/bin/python3"
|
||||||
}
|
}
|
||||||
24
CODE_OF_CONDUCT.md
Normal file
24
CODE_OF_CONDUCT.md
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
in the words of Abraham Lincoln:
|
||||||
|
|
||||||
|
> Be excellent to each other... and... PARTY ON, DUDES!
|
||||||
|
|
||||||
|
more specifically I'll paraphrase some examples from a german automotive corporation as they cover all the bases without being too wordy
|
||||||
|
|
||||||
|
## Examples of unacceptable behavior
|
||||||
|
* intimidation, harassment, trolling
|
||||||
|
* insulting, derogatory, harmful or prejudicial comments
|
||||||
|
* posting private information without permission
|
||||||
|
* political or personal attacks
|
||||||
|
|
||||||
|
## Examples of expected behavior
|
||||||
|
* being nice, friendly, welcoming, inclusive, mindful and empathetic
|
||||||
|
* acting considerate, modest, respectful
|
||||||
|
* using polite and inclusive language
|
||||||
|
* criticize constructively and accept constructive criticism
|
||||||
|
* respect different points of view
|
||||||
|
|
||||||
|
## finally and even more specifically,
|
||||||
|
* parse opinions and feedback objectively without prejudice
|
||||||
|
* it's the message that matters, not who said it
|
||||||
|
|
||||||
|
aaand that's how you say `be nice` in a way that fills half a floppy w
|
||||||
3
CONTRIBUTING.md
Normal file
3
CONTRIBUTING.md
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
* do something cool
|
||||||
|
|
||||||
|
really tho, send a PR or an issue or whatever, all appreciated, anything goes, just behave aight
|
||||||
317
README.md
317
README.md
@@ -19,14 +19,18 @@ turn your phone or raspi into a portable file server with resumable uploads/down
|
|||||||
## readme toc
|
## readme toc
|
||||||
|
|
||||||
* top
|
* top
|
||||||
* **[quickstart](#quickstart)** - download **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py)** and you're all set!
|
* [quickstart](#quickstart) - download **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py)** and you're all set!
|
||||||
|
* [on servers](#on-servers) - you may also want these, especially on servers
|
||||||
* [on debian](#on-debian) - recommended additional steps on debian
|
* [on debian](#on-debian) - recommended additional steps on debian
|
||||||
* [notes](#notes) - general notes
|
* [notes](#notes) - general notes
|
||||||
* [status](#status) - feature summary
|
* [status](#status) - feature summary
|
||||||
* [testimonials](#testimonials) - small collection of user feedback
|
* [testimonials](#testimonials) - small collection of user feedback
|
||||||
|
* [motivations](#motivations) - project goals / philosophy
|
||||||
|
* [future plans](#future-plans) - some improvement ideas
|
||||||
* [bugs](#bugs)
|
* [bugs](#bugs)
|
||||||
* [general bugs](#general-bugs)
|
* [general bugs](#general-bugs)
|
||||||
* [not my bugs](#not-my-bugs)
|
* [not my bugs](#not-my-bugs)
|
||||||
|
* [FAQ](#FAQ) - "frequently" asked questions
|
||||||
* [accounts and volumes](#accounts-and-volumes) - per-folder, per-user permissions
|
* [accounts and volumes](#accounts-and-volumes) - per-folder, per-user permissions
|
||||||
* [the browser](#the-browser) - accessing a copyparty server using a web-browser
|
* [the browser](#the-browser) - accessing a copyparty server using a web-browser
|
||||||
* [tabs](#tabs) - the main tabs in the ui
|
* [tabs](#tabs) - the main tabs in the ui
|
||||||
@@ -34,27 +38,39 @@ turn your phone or raspi into a portable file server with resumable uploads/down
|
|||||||
* [navpane](#navpane) - switching between breadcrumbs or navpane
|
* [navpane](#navpane) - switching between breadcrumbs or navpane
|
||||||
* [thumbnails](#thumbnails) - press `g` to toggle grid-view instead of the file listing
|
* [thumbnails](#thumbnails) - press `g` to toggle grid-view instead of the file listing
|
||||||
* [zip downloads](#zip-downloads) - download folders (or file selections) as `zip` or `tar` files
|
* [zip downloads](#zip-downloads) - download folders (or file selections) as `zip` or `tar` files
|
||||||
* [uploading](#uploading) - web-browsers can upload using `bup` and `up2k`
|
* [uploading](#uploading) - drag files/folders into the web-browser to upload
|
||||||
* [file-search](#file-search) - drop files/folders into up2k to see if they exist on the server
|
* [file-search](#file-search) - dropping files into the browser also lets you see if they exist on the server
|
||||||
* [unpost](#unpost) - undo/delete accidental uploads
|
* [unpost](#unpost) - undo/delete accidental uploads
|
||||||
* [file manager](#file-manager) - cut/paste, rename, and delete files/folders (if you have permission)
|
* [file manager](#file-manager) - cut/paste, rename, and delete files/folders (if you have permission)
|
||||||
* [batch rename](#batch-rename) - select some files and press `F2` to bring up the rename UI
|
* [batch rename](#batch-rename) - select some files and press `F2` to bring up the rename UI
|
||||||
* [markdown viewer](#markdown-viewer) - and there are *two* editors
|
* [markdown viewer](#markdown-viewer) - and there are *two* editors
|
||||||
* [other tricks](#other-tricks)
|
* [other tricks](#other-tricks)
|
||||||
* [searching](#searching) - search by size, date, path/name, mp3-tags, ...
|
* [searching](#searching) - search by size, date, path/name, mp3-tags, ...
|
||||||
* [server config](#server-config)
|
* [server config](#server-config) - using arguments or config files, or a mix of both
|
||||||
* [file indexing](#file-indexing)
|
* [file indexing](#file-indexing)
|
||||||
* [upload rules](#upload-rules) - set upload rules using volume flags
|
* [upload rules](#upload-rules) - set upload rules using volume flags
|
||||||
* [compress uploads](#compress-uploads) - files can be autocompressed on upload
|
* [compress uploads](#compress-uploads) - files can be autocompressed on upload
|
||||||
* [database location](#database-location) - in-volume (`.hist/up2k.db`, default) or somewhere else
|
* [database location](#database-location) - in-volume (`.hist/up2k.db`, default) or somewhere else
|
||||||
* [metadata from audio files](#metadata-from-audio-files) - set `-e2t` to index tags on upload
|
* [metadata from audio files](#metadata-from-audio-files) - set `-e2t` to index tags on upload
|
||||||
* [file parser plugins](#file-parser-plugins) - provide custom parsers to index additional tags
|
* [file parser plugins](#file-parser-plugins) - provide custom parsers to index additional tags
|
||||||
|
* [upload events](#upload-events) - trigger a script/program on each upload
|
||||||
* [complete examples](#complete-examples)
|
* [complete examples](#complete-examples)
|
||||||
* [browser support](#browser-support) - TLDR: yes
|
* [browser support](#browser-support) - TLDR: yes
|
||||||
* [client examples](#client-examples) - interact with copyparty using non-browser clients
|
* [client examples](#client-examples) - interact with copyparty using non-browser clients
|
||||||
* [up2k](#up2k) - quick outline of the up2k protocol, see [uploading](#uploading) for the web-client
|
* [up2k](#up2k) - quick outline of the up2k protocol, see [uploading](#uploading) for the web-client
|
||||||
* [why chunk-hashes](#why-chunk-hashes) - a single sha512 would be better, right?
|
* [why chunk-hashes](#why-chunk-hashes) - a single sha512 would be better, right?
|
||||||
* [performance](#performance) - defaults are usually fine - expect `8 GiB/s` download, `1 GiB/s` upload
|
* [performance](#performance) - defaults are usually fine - expect `8 GiB/s` download, `1 GiB/s` upload
|
||||||
|
* [client-side](#client-side) - when uploading files
|
||||||
|
* [security](#security) - some notes on hardening
|
||||||
|
* [gotchas](#gotchas) - behavior that might be unexpected
|
||||||
|
* [recovering from crashes](#recovering-from-crashes)
|
||||||
|
* [client crashes](#client-crashes)
|
||||||
|
* [frefox wsod](#frefox-wsod) - firefox 87 can crash during uploads
|
||||||
|
* [HTTP API](#HTTP-API)
|
||||||
|
* [read](#read)
|
||||||
|
* [write](#write)
|
||||||
|
* [admin](#admin)
|
||||||
|
* [general](#general)
|
||||||
* [dependencies](#dependencies) - mandatory deps
|
* [dependencies](#dependencies) - mandatory deps
|
||||||
* [optional dependencies](#optional-dependencies) - install these to enable bonus features
|
* [optional dependencies](#optional-dependencies) - install these to enable bonus features
|
||||||
* [install recommended deps](#install-recommended-deps)
|
* [install recommended deps](#install-recommended-deps)
|
||||||
@@ -81,11 +97,16 @@ some recommended options:
|
|||||||
* `-e2ts` enables audio metadata indexing (needs either FFprobe or Mutagen), see [optional dependencies](#optional-dependencies)
|
* `-e2ts` enables audio metadata indexing (needs either FFprobe or Mutagen), see [optional dependencies](#optional-dependencies)
|
||||||
* `-v /mnt/music:/music:r:rw,foo -a foo:bar` shares `/mnt/music` as `/music`, `r`eadable by anyone, and read-write for user `foo`, password `bar`
|
* `-v /mnt/music:/music:r:rw,foo -a foo:bar` shares `/mnt/music` as `/music`, `r`eadable by anyone, and read-write for user `foo`, password `bar`
|
||||||
* replace `:r:rw,foo` with `:r,foo` to only make the folder readable by `foo` and nobody else
|
* replace `:r:rw,foo` with `:r,foo` to only make the folder readable by `foo` and nobody else
|
||||||
* see [accounts and volumes](#accounts-and-volumes) for the syntax and other access levels (`r`ead, `w`rite, `m`ove, `d`elete)
|
* see [accounts and volumes](#accounts-and-volumes) for the syntax and other permissions (`r`ead, `w`rite, `m`ove, `d`elete, `g`et)
|
||||||
* `--ls '**,*,ln,p,r'` to crash on startup if any of the volumes contain a symlink which point outside the volume, as that could give users unintended access
|
* `--ls '**,*,ln,p,r'` to crash on startup if any of the volumes contain a symlink which point outside the volume, as that could give users unintended access
|
||||||
|
|
||||||
|
|
||||||
|
### on servers
|
||||||
|
|
||||||
you may also want these, especially on servers:
|
you may also want these, especially on servers:
|
||||||
|
|
||||||
* [contrib/systemd/copyparty.service](contrib/systemd/copyparty.service) to run copyparty as a systemd service
|
* [contrib/systemd/copyparty.service](contrib/systemd/copyparty.service) to run copyparty as a systemd service
|
||||||
|
* [contrib/systemd/prisonparty.service](contrib/systemd/prisonparty.service) to run it in a chroot (for extra security)
|
||||||
* [contrib/nginx/copyparty.conf](contrib/nginx/copyparty.conf) to reverse-proxy behind nginx (for better https)
|
* [contrib/nginx/copyparty.conf](contrib/nginx/copyparty.conf) to reverse-proxy behind nginx (for better https)
|
||||||
|
|
||||||
|
|
||||||
@@ -138,7 +159,7 @@ feature summary
|
|||||||
* browser
|
* browser
|
||||||
* ☑ [navpane](#navpane) (directory tree sidebar)
|
* ☑ [navpane](#navpane) (directory tree sidebar)
|
||||||
* ☑ file manager (cut/paste, delete, [batch-rename](#batch-rename))
|
* ☑ file manager (cut/paste, delete, [batch-rename](#batch-rename))
|
||||||
* ☑ audio player (with OS media controls)
|
* ☑ audio player (with OS media controls and opus transcoding)
|
||||||
* ☑ image gallery with webm player
|
* ☑ image gallery with webm player
|
||||||
* ☑ [thumbnails](#thumbnails)
|
* ☑ [thumbnails](#thumbnails)
|
||||||
* ☑ ...of images using Pillow
|
* ☑ ...of images using Pillow
|
||||||
@@ -162,12 +183,48 @@ small collection of user feedback
|
|||||||
`good enough`, `surprisingly correct`, `certified good software`, `just works`, `why`
|
`good enough`, `surprisingly correct`, `certified good software`, `just works`, `why`
|
||||||
|
|
||||||
|
|
||||||
|
# motivations
|
||||||
|
|
||||||
|
project goals / philosophy
|
||||||
|
|
||||||
|
* inverse linux philosophy -- do all the things, and do an *okay* job
|
||||||
|
* quick drop-in service to get a lot of features in a pinch
|
||||||
|
* there are probably [better alternatives](https://github.com/awesome-selfhosted/awesome-selfhosted) if you have specific/long-term needs
|
||||||
|
* run anywhere, support everything
|
||||||
|
* as many web-browsers and python versions as possible
|
||||||
|
* every browser should at least be able to browse, download, upload files
|
||||||
|
* be a good emergency solution for transferring stuff between ancient boxes
|
||||||
|
* minimal dependencies
|
||||||
|
* but optional dependencies adding bonus-features are ok
|
||||||
|
* everything being plaintext makes it possible to proofread for malicious code
|
||||||
|
* no preparations / setup necessary, just run the sfx (which is also plaintext)
|
||||||
|
* adaptable, malleable, hackable
|
||||||
|
* no build steps; modify the js/python without needing node.js or anything like that
|
||||||
|
|
||||||
|
|
||||||
|
## future plans
|
||||||
|
|
||||||
|
some improvement ideas
|
||||||
|
|
||||||
|
* the JS is a mess -- a preact rewrite would be nice
|
||||||
|
* preferably without build dependencies like webpack/babel/node.js, maybe a python thing to assemble js files into main.js
|
||||||
|
* good excuse to look at using virtual lists (browsers start to struggle when folders contain over 5000 files)
|
||||||
|
* the UX is a mess -- a proper design would be nice
|
||||||
|
* very organic (much like the python/js), everything was an afterthought
|
||||||
|
* true for both the layout and the visual flair
|
||||||
|
* something like the tron board-room ui (or most other hollywood ones, like ironman) would be :100:
|
||||||
|
* some of the python files are way too big
|
||||||
|
* `up2k.py` ended up doing all the file indexing / db management
|
||||||
|
* `httpcli.py` should be separated into modules in general
|
||||||
|
|
||||||
|
|
||||||
# bugs
|
# bugs
|
||||||
|
|
||||||
* Windows: python 3.7 and older cannot read tags with FFprobe, so use Mutagen or upgrade
|
* Windows: python 3.7 and older cannot read tags with FFprobe, so use Mutagen or upgrade
|
||||||
* Windows: python 2.7 cannot index non-ascii filenames with `-e2d`
|
* Windows: python 2.7 cannot index non-ascii filenames with `-e2d`
|
||||||
* Windows: python 2.7 cannot handle filenames with mojibake
|
* Windows: python 2.7 cannot handle filenames with mojibake
|
||||||
* `--th-ff-jpg` may fix video thumbnails on some FFmpeg versions (macos, some linux)
|
* `--th-ff-jpg` may fix video thumbnails on some FFmpeg versions (macos, some linux)
|
||||||
|
* `--th-ff-swr` may fix audio thumbnails on some FFmpeg versions
|
||||||
|
|
||||||
## general bugs
|
## general bugs
|
||||||
|
|
||||||
@@ -186,9 +243,23 @@ small collection of user feedback
|
|||||||
* use `--hist` or the `hist` volflag (`-v [...]:c,hist=/tmp/foo`) to place the db inside the vm instead
|
* use `--hist` or the `hist` volflag (`-v [...]:c,hist=/tmp/foo`) to place the db inside the vm instead
|
||||||
|
|
||||||
|
|
||||||
|
# FAQ
|
||||||
|
|
||||||
|
"frequently" asked questions
|
||||||
|
|
||||||
|
* is it possible to block read-access to folders unless you know the exact URL for a particular file inside?
|
||||||
|
* yes, using the [`g` permission](#accounts-and-volumes), see the examples there
|
||||||
|
|
||||||
|
* can I make copyparty download a file to my server if I give it a URL?
|
||||||
|
* not officially, but there is a [terrible hack](https://github.com/9001/copyparty/blob/hovudstraum/bin/mtag/wget.py) which makes it possible
|
||||||
|
|
||||||
|
|
||||||
# accounts and volumes
|
# accounts and volumes
|
||||||
|
|
||||||
per-folder, per-user permissions
|
per-folder, per-user permissions - if your setup is getting complex, consider making a [config file](./docs/example.conf) instead of using arguments
|
||||||
|
* much easier to manage, and you can modify the config at runtime with `systemctl reload copyparty` or more conveniently using the `[reload cfg]` button in the control-panel (if logged in as admin)
|
||||||
|
|
||||||
|
configuring accounts/volumes with arguments:
|
||||||
* `-a usr:pwd` adds account `usr` with password `pwd`
|
* `-a usr:pwd` adds account `usr` with password `pwd`
|
||||||
* `-v .::r` adds current-folder `.` as the webroot, `r`eadable by anyone
|
* `-v .::r` adds current-folder `.` as the webroot, `r`eadable by anyone
|
||||||
* the syntax is `-v src:dst:perm:perm:...` so local-path, url-path, and one or more permissions to set
|
* the syntax is `-v src:dst:perm:perm:...` so local-path, url-path, and one or more permissions to set
|
||||||
@@ -200,6 +271,7 @@ permissions:
|
|||||||
* `w` (write): upload files, move files *into* this folder
|
* `w` (write): upload files, move files *into* this folder
|
||||||
* `m` (move): move files/folders *from* this folder
|
* `m` (move): move files/folders *from* this folder
|
||||||
* `d` (delete): delete files/folders
|
* `d` (delete): delete files/folders
|
||||||
|
* `g` (get): only download files, cannot see folder contents or zip/tar
|
||||||
|
|
||||||
examples:
|
examples:
|
||||||
* add accounts named u1, u2, u3 with passwords p1, p2, p3: `-a u1:p1 -a u2:p2 -a u3:p3`
|
* add accounts named u1, u2, u3 with passwords p1, p2, p3: `-a u1:p1 -a u2:p2 -a u3:p3`
|
||||||
@@ -210,6 +282,10 @@ examples:
|
|||||||
* unauthorized users accessing the webroot can see that the `inc` folder exists, but cannot open it
|
* unauthorized users accessing the webroot can see that the `inc` folder exists, but cannot open it
|
||||||
* `u1` can open the `inc` folder, but cannot see the contents, only upload new files to it
|
* `u1` can open the `inc` folder, but cannot see the contents, only upload new files to it
|
||||||
* `u2` can browse it and move files *from* `/inc` into any folder where `u2` has write-access
|
* `u2` can browse it and move files *from* `/inc` into any folder where `u2` has write-access
|
||||||
|
* make folder `/mnt/ss` available at `/i`, read-write for u1, get-only for everyone else, and enable accesskeys: `-v /mnt/ss:i:rw,u1:g:c,fk=4`
|
||||||
|
* `c,fk=4` sets the `fk` volume-flag to 4, meaning each file gets a 4-character accesskey
|
||||||
|
* `u1` can upload files, browse the folder, and see the generated accesskeys
|
||||||
|
* other users cannot browse the folder, but can access the files if they have the full file URL with the accesskey
|
||||||
|
|
||||||
|
|
||||||
# the browser
|
# the browser
|
||||||
@@ -238,6 +314,7 @@ the browser has the following hotkeys (always qwerty)
|
|||||||
* `B` toggle breadcrumbs / [navpane](#navpane)
|
* `B` toggle breadcrumbs / [navpane](#navpane)
|
||||||
* `I/K` prev/next folder
|
* `I/K` prev/next folder
|
||||||
* `M` parent folder (or unexpand current)
|
* `M` parent folder (or unexpand current)
|
||||||
|
* `V` toggle folders / textfiles in the navpane
|
||||||
* `G` toggle list / [grid view](#thumbnails)
|
* `G` toggle list / [grid view](#thumbnails)
|
||||||
* `T` toggle thumbnails / icons
|
* `T` toggle thumbnails / icons
|
||||||
* `ctrl-X` cut selected files/folders
|
* `ctrl-X` cut selected files/folders
|
||||||
@@ -249,6 +326,10 @@ the browser has the following hotkeys (always qwerty)
|
|||||||
* ctrl+`Up/Down` move cursor and scroll viewport
|
* ctrl+`Up/Down` move cursor and scroll viewport
|
||||||
* `Space` toggle file selection
|
* `Space` toggle file selection
|
||||||
* `Ctrl-A` toggle select all
|
* `Ctrl-A` toggle select all
|
||||||
|
* when a textfile is open:
|
||||||
|
* `I/K` prev/next textfile
|
||||||
|
* `S` toggle selection of open file
|
||||||
|
* `M` close textfile
|
||||||
* when playing audio:
|
* when playing audio:
|
||||||
* `J/L` prev/next song
|
* `J/L` prev/next song
|
||||||
* `U/O` skip 10sec back/forward
|
* `U/O` skip 10sec back/forward
|
||||||
@@ -300,6 +381,8 @@ press `g` to toggle grid-view instead of the file listing, and `t` toggles icon
|
|||||||
|
|
||||||
it does static images with Pillow and uses FFmpeg for video files, so you may want to `--no-thumb` or maybe just `--no-vthumb` depending on how dangerous your users are
|
it does static images with Pillow and uses FFmpeg for video files, so you may want to `--no-thumb` or maybe just `--no-vthumb` depending on how dangerous your users are
|
||||||
|
|
||||||
|
audio files are covnerted into spectrograms using FFmpeg unless you `--no-athumb` (and some FFmpeg builds may need `--th-ff-swr`)
|
||||||
|
|
||||||
images with the following names (see `--th-covers`) become the thumbnail of the folder they're in: `folder.png`, `folder.jpg`, `cover.png`, `cover.jpg`
|
images with the following names (see `--th-covers`) become the thumbnail of the folder they're in: `folder.png`, `folder.jpg`, `cover.png`, `cover.jpg`
|
||||||
|
|
||||||
in the grid/thumbnail view, if the audio player panel is open, songs will start playing when clicked
|
in the grid/thumbnail view, if the audio player panel is open, songs will start playing when clicked
|
||||||
@@ -331,11 +414,13 @@ you can also zip a selection of files or folders by clicking them in the browser
|
|||||||
|
|
||||||
## uploading
|
## uploading
|
||||||
|
|
||||||
web-browsers can upload using `bup` and `up2k`:
|
drag files/folders into the web-browser to upload
|
||||||
|
|
||||||
|
this initiates an upload using `up2k`; there are two uploaders available:
|
||||||
* `[🎈] bup`, the basic uploader, supports almost every browser since netscape 4.0
|
* `[🎈] bup`, the basic uploader, supports almost every browser since netscape 4.0
|
||||||
* `[🚀] up2k`, the fancy one
|
* `[🚀] up2k`, the fancy one
|
||||||
|
|
||||||
you can undo/delete uploads using `[🧯]` [unpost](#unpost)
|
you can also undo/delete uploads by using `[🧯]` [unpost](#unpost)
|
||||||
|
|
||||||
up2k has several advantages:
|
up2k has several advantages:
|
||||||
* you can drop folders into the browser (files are added recursively)
|
* you can drop folders into the browser (files are added recursively)
|
||||||
@@ -353,12 +438,15 @@ see [up2k](#up2k) for details on how it works
|
|||||||
|
|
||||||
**protip:** you can avoid scaring away users with [docs/minimal-up2k.html](docs/minimal-up2k.html) which makes it look [much simpler](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png)
|
**protip:** you can avoid scaring away users with [docs/minimal-up2k.html](docs/minimal-up2k.html) which makes it look [much simpler](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png)
|
||||||
|
|
||||||
|
**protip:** if you enable `favicon` in the `[⚙️] settings` tab (by typing something into the textbox), the icon in the browser tab will indicate upload progress
|
||||||
|
|
||||||
the up2k UI is the epitome of polished inutitive experiences:
|
the up2k UI is the epitome of polished inutitive experiences:
|
||||||
* "parallel uploads" specifies how many chunks to upload at the same time
|
* "parallel uploads" specifies how many chunks to upload at the same time
|
||||||
* `[🏃]` analysis of other files should continue while one is uploading
|
* `[🏃]` analysis of other files should continue while one is uploading
|
||||||
* `[💭]` ask for confirmation before files are added to the queue
|
* `[💭]` ask for confirmation before files are added to the queue
|
||||||
* `[💤]` sync uploading between other copyparty browser-tabs so only one is active
|
* `[💤]` sync uploading between other copyparty browser-tabs so only one is active
|
||||||
* `[🔎]` switch between upload and [file-search](#file-search) mode
|
* `[🔎]` switch between upload and [file-search](#file-search) mode
|
||||||
|
* ignore `[🔎]` if you add files by dragging them into the browser
|
||||||
|
|
||||||
and then theres the tabs below it,
|
and then theres the tabs below it,
|
||||||
* `[ok]` is the files which completed successfully
|
* `[ok]` is the files which completed successfully
|
||||||
@@ -368,24 +456,26 @@ and then theres the tabs below it,
|
|||||||
* plus up to 3 entries each from `[done]` and `[que]` for context
|
* plus up to 3 entries each from `[done]` and `[que]` for context
|
||||||
* `[que]` is all the files that are still queued
|
* `[que]` is all the files that are still queued
|
||||||
|
|
||||||
note that since up2k has to read each file twice, `[🎈 bup]` can be up to 2x faster in some extreme cases (huge files combined with internet connection faster than the read-speed of your HDD)
|
note that since up2k has to read each file twice, `[🎈 bup]` can *theoretically* be up to 2x faster in some extreme cases (files bigger than your ram, combined with an internet connection faster than the read-speed of your HDD, or if you're uploading from a cuo2duo)
|
||||||
|
|
||||||
if you are resuming a massive upload and want to skip hashing the files which already finished, you can enable `turbo` in the `[⚙️] config` tab, but please read the tooltip on that button
|
if you are resuming a massive upload and want to skip hashing the files which already finished, you can enable `turbo` in the `[⚙️] config` tab, but please read the tooltip on that button
|
||||||
|
|
||||||
|
|
||||||
### file-search
|
### file-search
|
||||||
|
|
||||||
drop files/folders into up2k to see if they exist on the server
|
dropping files into the browser also lets you see if they exist on the server
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
in the `[🚀 up2k]` tab, after toggling the `[🔎]` switch green, any files/folders you drop onto the dropzone will be hashed on the client-side. Each hash is sent to the server which checks if that file exists somewhere
|
when you drag/drop files into the browser, you will see two dropzones: `Upload` and `Search`
|
||||||
|
|
||||||
|
> on a phone? toggle the `[🔎]` switch green before tapping the big yellow Search button to select your files
|
||||||
|
|
||||||
|
the files will be hashed on the client-side, and each hash is sent to the server, which checks if that file exists somewhere
|
||||||
|
|
||||||
files go into `[ok]` if they exist (and you get a link to where it is), otherwise they land in `[ng]`
|
files go into `[ok]` if they exist (and you get a link to where it is), otherwise they land in `[ng]`
|
||||||
* the main reason filesearch is combined with the uploader is cause the code was too spaghetti to separate it out somewhere else, this is no longer the case but now i've warmed up to the idea too much
|
* the main reason filesearch is combined with the uploader is cause the code was too spaghetti to separate it out somewhere else, this is no longer the case but now i've warmed up to the idea too much
|
||||||
|
|
||||||
adding the same file multiple times is blocked, so if you first search for a file and then decide to upload it, you have to click the `[cleanup]` button to discard `[done]` files (or just refresh the page)
|
|
||||||
|
|
||||||
|
|
||||||
### unpost
|
### unpost
|
||||||
|
|
||||||
@@ -473,6 +563,8 @@ and there are *two* editors
|
|||||||
|
|
||||||
* you can link a particular timestamp in an audio file by adding it to the URL, such as `&20` / `&20s` / `&1m20` / `&t=1:20` after the `.../#af-c8960dab`
|
* you can link a particular timestamp in an audio file by adding it to the URL, such as `&20` / `&20s` / `&1m20` / `&t=1:20` after the `.../#af-c8960dab`
|
||||||
|
|
||||||
|
* get a plaintext file listing by adding `?ls=t` to a URL, or a compact colored one with `?ls=v` (for unix terminals)
|
||||||
|
|
||||||
* if you are using media hotkeys to switch songs and are getting tired of seeing the OSD popup which Windows doesn't let you disable, consider https://ocv.me/dev/?media-osd-bgone.ps1
|
* if you are using media hotkeys to switch songs and are getting tired of seeing the OSD popup which Windows doesn't let you disable, consider https://ocv.me/dev/?media-osd-bgone.ps1
|
||||||
|
|
||||||
* click the bottom-left `π` to open a javascript prompt for debugging
|
* click the bottom-left `π` to open a javascript prompt for debugging
|
||||||
@@ -501,6 +593,12 @@ add the argument `-e2ts` to also scan/index tags from music files, which brings
|
|||||||
|
|
||||||
# server config
|
# server config
|
||||||
|
|
||||||
|
using arguments or config files, or a mix of both:
|
||||||
|
* config files (`-c some.conf`) can set additional commandline arguments; see [./docs/example.conf](docs/example.conf)
|
||||||
|
* `kill -s USR1` (same as `systemctl reload copyparty`) to reload accounts and volumes from config files without restarting
|
||||||
|
* or click the `[reload cfg]` button in the control-panel when logged in as admin
|
||||||
|
|
||||||
|
|
||||||
## file indexing
|
## file indexing
|
||||||
|
|
||||||
file indexing relies on two database tables, the up2k filetree (`-e2d`) and the metadata tags (`-e2t`), stored in `.hist/up2k.db`. Configuration can be done through arguments, volume flags, or a mix of both.
|
file indexing relies on two database tables, the up2k filetree (`-e2d`) and the metadata tags (`-e2t`), stored in `.hist/up2k.db`. Configuration can be done through arguments, volume flags, or a mix of both.
|
||||||
@@ -514,28 +612,30 @@ through arguments:
|
|||||||
* `-e2tsr` also deletes all existing tags, doing a full reindex
|
* `-e2tsr` also deletes all existing tags, doing a full reindex
|
||||||
|
|
||||||
the same arguments can be set as volume flags, in addition to `d2d` and `d2t` for disabling:
|
the same arguments can be set as volume flags, in addition to `d2d` and `d2t` for disabling:
|
||||||
* `-v ~/music::r:c,e2dsa:c,e2tsr` does a full reindex of everything on startup
|
* `-v ~/music::r:c,e2dsa,e2tsr` does a full reindex of everything on startup
|
||||||
* `-v ~/music::r:c,d2d` disables **all** indexing, even if any `-e2*` are on
|
* `-v ~/music::r:c,d2d` disables **all** indexing, even if any `-e2*` are on
|
||||||
* `-v ~/music::r:c,d2t` disables all `-e2t*` (tags), does not affect `-e2d*`
|
* `-v ~/music::r:c,d2t` disables all `-e2t*` (tags), does not affect `-e2d*`
|
||||||
|
|
||||||
note:
|
note:
|
||||||
* the parser currently can't handle `c,e2dsa,e2tsr` so you have to `c,e2dsa:c,e2tsr`
|
* the parser can finally handle `c,e2dsa,e2tsr` so you no longer have to `c,e2dsa:c,e2tsr`
|
||||||
* `e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and `e2ts` would then reindex those, unless there is a new copyparty version with new parsers and the release note says otherwise
|
* `e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and `e2ts` would then reindex those, unless there is a new copyparty version with new parsers and the release note says otherwise
|
||||||
* the rescan button in the admin panel has no effect unless the volume has `-e2ds` or higher
|
* the rescan button in the admin panel has no effect unless the volume has `-e2ds` or higher
|
||||||
|
|
||||||
to save some time, you can choose to only index filename/path/size/last-modified (and not the hash of the file contents) by setting `--no-hash` or the volume-flag `:c,dhash`, this has the following consequences:
|
to save some time, you can provide a regex pattern for filepaths to only index by filename/path/size/last-modified (and not the hash of the file contents) by setting `--no-hash \.iso$` or the volume-flag `:c,nohash=\.iso$`, this has the following consequences:
|
||||||
* initial indexing is way faster, especially when the volume is on a network disk
|
* initial indexing is way faster, especially when the volume is on a network disk
|
||||||
* makes it impossible to [file-search](#file-search)
|
* makes it impossible to [file-search](#file-search)
|
||||||
* if someone uploads the same file contents, the upload will not be detected as a dupe, so it will not get symlinked or rejected
|
* if someone uploads the same file contents, the upload will not be detected as a dupe, so it will not get symlinked or rejected
|
||||||
|
|
||||||
if you set `--no-hash`, you can enable hashing for specific volumes using flag `:c,ehash`
|
similarly, you can fully ignore files/folders using `--no-idx [...]` and `:c,noidx=\.iso$`
|
||||||
|
|
||||||
|
if you set `--no-hash [...]` globally, you can enable hashing for specific volumes using flag `:c,nohash=`
|
||||||
|
|
||||||
|
|
||||||
## upload rules
|
## upload rules
|
||||||
|
|
||||||
set upload rules using volume flags, some examples:
|
set upload rules using volume flags, some examples:
|
||||||
|
|
||||||
* `:c,sz=1k-3m` sets allowed filesize between 1 KiB and 3 MiB inclusive (suffixes: b, k, m, g)
|
* `:c,sz=1k-3m` sets allowed filesize between 1 KiB and 3 MiB inclusive (suffixes: `b`, `k`, `m`, `g`)
|
||||||
* `:c,nosub` disallow uploading into subdirectories; goes well with `rotn` and `rotf`:
|
* `:c,nosub` disallow uploading into subdirectories; goes well with `rotn` and `rotf`:
|
||||||
* `:c,rotn=1000,2` moves uploads into subfolders, up to 1000 files in each folder before making a new one, two levels deep (must be at least 1)
|
* `:c,rotn=1000,2` moves uploads into subfolders, up to 1000 files in each folder before making a new one, two levels deep (must be at least 1)
|
||||||
* `:c,rotf=%Y/%m/%d/%H` enforces files to be uploaded into a structure of subfolders according to that date format
|
* `:c,rotf=%Y/%m/%d/%H` enforces files to be uploaded into a structure of subfolders according to that date format
|
||||||
@@ -601,7 +701,7 @@ but instead of using `-mte`, `-mth` is a better way to hide tags in the browser:
|
|||||||
|
|
||||||
tags that start with a `.` such as `.bpm` and `.dur`(ation) indicate numeric value
|
tags that start with a `.` such as `.bpm` and `.dur`(ation) indicate numeric value
|
||||||
|
|
||||||
see the beautiful mess of a dictionary in [mtag.py](https://github.com/9001/copyparty/blob/master/copyparty/mtag.py) for the default mappings (should cover mp3,opus,flac,m4a,wav,aif,)
|
see the beautiful mess of a dictionary in [mtag.py](https://github.com/9001/copyparty/blob/hovudstraum/copyparty/mtag.py) for the default mappings (should cover mp3,opus,flac,m4a,wav,aif,)
|
||||||
|
|
||||||
`--no-mutagen` disables Mutagen and uses FFprobe instead, which...
|
`--no-mutagen` disables Mutagen and uses FFprobe instead, which...
|
||||||
* is about 20x slower than Mutagen
|
* is about 20x slower than Mutagen
|
||||||
@@ -627,6 +727,25 @@ copyparty can invoke external programs to collect additional metadata for files
|
|||||||
* `-mtp arch,built,ver,orig=an,eexe,edll,~/bin/exe.py` runs `~/bin/exe.py` to get properties about windows-binaries only if file is not audio (`an`) and file extension is exe or dll
|
* `-mtp arch,built,ver,orig=an,eexe,edll,~/bin/exe.py` runs `~/bin/exe.py` to get properties about windows-binaries only if file is not audio (`an`) and file extension is exe or dll
|
||||||
|
|
||||||
|
|
||||||
|
## upload events
|
||||||
|
|
||||||
|
trigger a script/program on each upload like so:
|
||||||
|
|
||||||
|
```
|
||||||
|
-v /mnt/inc:inc:w:c,mte=+a1:c,mtp=a1=ad,/usr/bin/notify-send
|
||||||
|
```
|
||||||
|
|
||||||
|
so filesystem location `/mnt/inc` shared at `/inc`, write-only for everyone, appending `a1` to the list of tags to index, and using `/usr/bin/notify-send` to "provide" that tag
|
||||||
|
|
||||||
|
that'll run the command `notify-send` with the path to the uploaded file as the first and only argument (so on linux it'll show a notification on-screen)
|
||||||
|
|
||||||
|
note that it will only trigger on new unique files, not dupes
|
||||||
|
|
||||||
|
and it will occupy the parsing threads, so fork anything expensive, or if you want to intentionally queue/singlethread you can combine it with `--mtag-mt 1`
|
||||||
|
|
||||||
|
if this becomes popular maybe there should be a less janky way to do it actually
|
||||||
|
|
||||||
|
|
||||||
## complete examples
|
## complete examples
|
||||||
|
|
||||||
* read-only music server with bpm and key scanning
|
* read-only music server with bpm and key scanning
|
||||||
@@ -653,11 +772,11 @@ TLDR: yes
|
|||||||
| zip selection | - | yep | yep | yep | yep | yep | yep | yep |
|
| zip selection | - | yep | yep | yep | yep | yep | yep | yep |
|
||||||
| file rename | - | yep | yep | yep | yep | yep | yep | yep |
|
| file rename | - | yep | yep | yep | yep | yep | yep | yep |
|
||||||
| file cut/paste | - | yep | yep | yep | yep | yep | yep | yep |
|
| file cut/paste | - | yep | yep | yep | yep | yep | yep | yep |
|
||||||
| navpane | - | `*2` | yep | yep | yep | yep | yep | yep |
|
| navpane | - | yep | yep | yep | yep | yep | yep | yep |
|
||||||
| image viewer | - | yep | yep | yep | yep | yep | yep | yep |
|
| image viewer | - | yep | yep | yep | yep | yep | yep | yep |
|
||||||
| video player | - | yep | yep | yep | yep | yep | yep | yep |
|
| video player | - | yep | yep | yep | yep | yep | yep | yep |
|
||||||
| markdown editor | - | - | yep | yep | yep | yep | yep | yep |
|
| markdown editor | - | - | yep | yep | yep | yep | yep | yep |
|
||||||
| markdown viewer | - | - | yep | yep | yep | yep | yep | yep |
|
| markdown viewer | - | yep | yep | yep | yep | yep | yep | yep |
|
||||||
| play mp3/m4a | - | yep | yep | yep | yep | yep | yep | yep |
|
| play mp3/m4a | - | yep | yep | yep | yep | yep | yep | yep |
|
||||||
| play ogg/opus | - | - | - | - | yep | yep | `*3` | yep |
|
| play ogg/opus | - | - | - | - | yep | yep | `*3` | yep |
|
||||||
| **= feature =** | ie6 | ie9 | ie10 | ie11 | ff 52 | c 49 | iOS | Andr |
|
| **= feature =** | ie6 | ie9 | ie10 | ie11 | ff 52 | c 49 | iOS | Andr |
|
||||||
@@ -665,21 +784,18 @@ TLDR: yes
|
|||||||
* internet explorer 6 to 8 behave the same
|
* internet explorer 6 to 8 behave the same
|
||||||
* firefox 52 and chrome 49 are the final winxp versions
|
* firefox 52 and chrome 49 are the final winxp versions
|
||||||
* `*1` yes, but extremely slow (ie10: `1 MiB/s`, ie11: `270 KiB/s`)
|
* `*1` yes, but extremely slow (ie10: `1 MiB/s`, ie11: `270 KiB/s`)
|
||||||
* `*2` causes a full-page refresh on each navigation
|
|
||||||
* `*3` using a wasm decoder which consumes a bit more power
|
* `*3` using a wasm decoder which consumes a bit more power
|
||||||
|
|
||||||
quick summary of more eccentric web-browsers trying to view a directory index:
|
quick summary of more eccentric web-browsers trying to view a directory index:
|
||||||
|
|
||||||
| browser | will it blend |
|
| browser | will it blend |
|
||||||
| ------- | ------------- |
|
| ------- | ------------- |
|
||||||
| **safari** (14.0.3/macos) | is chrome with janky wasm, so playing opus can deadlock the javascript engine |
|
|
||||||
| **safari** (14.0.1/iOS) | same as macos, except it recovers from the deadlocks if you poke it a bit |
|
|
||||||
| **links** (2.21/macports) | can browse, login, upload/mkdir/msg |
|
| **links** (2.21/macports) | can browse, login, upload/mkdir/msg |
|
||||||
| **lynx** (2.8.9/macports) | can browse, login, upload/mkdir/msg |
|
| **lynx** (2.8.9/macports) | can browse, login, upload/mkdir/msg |
|
||||||
| **w3m** (0.5.3/macports) | can browse, login, upload at 100kB/s, mkdir/msg |
|
| **w3m** (0.5.3/macports) | can browse, login, upload at 100kB/s, mkdir/msg |
|
||||||
| **netsurf** (3.10/arch) | is basically ie6 with much better css (javascript has almost no effect) |
|
| **netsurf** (3.10/arch) | is basically ie6 with much better css (javascript has almost no effect) |
|
||||||
| **opera** (11.60/winxp) | OK: thumbnails, image-viewer, zip-selection, rename/cut/paste. NG: up2k, navpane, markdown, audio |
|
| **opera** (11.60/winxp) | OK: thumbnails, image-viewer, zip-selection, rename/cut/paste. NG: up2k, navpane, markdown, audio |
|
||||||
| **ie4** and **netscape** 4.0 | can browse (text is yellow on white), upload with `?b=u` |
|
| **ie4** and **netscape** 4.0 | can browse, upload with `?b=u` |
|
||||||
| **SerenityOS** (7e98457) | hits a page fault, works with `?b=u`, file upload not-impl |
|
| **SerenityOS** (7e98457) | hits a page fault, works with `?b=u`, file upload not-impl |
|
||||||
|
|
||||||
|
|
||||||
@@ -688,8 +804,8 @@ quick summary of more eccentric web-browsers trying to view a directory index:
|
|||||||
interact with copyparty using non-browser clients
|
interact with copyparty using non-browser clients
|
||||||
|
|
||||||
* javascript: dump some state into a file (two separate examples)
|
* javascript: dump some state into a file (two separate examples)
|
||||||
* `await fetch('https://127.0.0.1:3923/', {method:"PUT", body: JSON.stringify(foo)});`
|
* `await fetch('//127.0.0.1:3923/', {method:"PUT", body: JSON.stringify(foo)});`
|
||||||
* `var xhr = new XMLHttpRequest(); xhr.open('POST', 'https://127.0.0.1:3923/msgs?raw'); xhr.send('foo');`
|
* `var xhr = new XMLHttpRequest(); xhr.open('POST', '//127.0.0.1:3923/msgs?raw'); xhr.send('foo');`
|
||||||
|
|
||||||
* curl/wget: upload some files (post=file, chunk=stdin)
|
* curl/wget: upload some files (post=file, chunk=stdin)
|
||||||
* `post(){ curl -b cppwd=wark -F act=bput -F f=@"$1" http://127.0.0.1:3923/;}`
|
* `post(){ curl -b cppwd=wark -F act=bput -F f=@"$1" http://127.0.0.1:3923/;}`
|
||||||
@@ -699,6 +815,14 @@ interact with copyparty using non-browser clients
|
|||||||
* `chunk(){ curl -b cppwd=wark -T- http://127.0.0.1:3923/;}`
|
* `chunk(){ curl -b cppwd=wark -T- http://127.0.0.1:3923/;}`
|
||||||
`chunk <movie.mkv`
|
`chunk <movie.mkv`
|
||||||
|
|
||||||
|
* bash: when curl and wget is not available or too boring
|
||||||
|
* `(printf 'PUT /junk?pw=wark HTTP/1.1\r\n\r\n'; cat movie.mkv) | nc 127.0.0.1 3923`
|
||||||
|
* `(printf 'PUT / HTTP/1.1\r\n\r\n'; cat movie.mkv) >/dev/tcp/127.0.0.1/3923`
|
||||||
|
|
||||||
|
* python: [up2k.py](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py) is a command-line up2k client [(webm)](https://ocv.me/stuff/u2cli.webm)
|
||||||
|
* file uploads, file-search, autoresume of aborted/broken uploads
|
||||||
|
* see [./bin/README.md#up2kpy](bin/README.md#up2kpy)
|
||||||
|
|
||||||
* FUSE: mount a copyparty server as a local filesystem
|
* FUSE: mount a copyparty server as a local filesystem
|
||||||
* cross-platform python client available in [./bin/](bin/)
|
* cross-platform python client available in [./bin/](bin/)
|
||||||
* [rclone](https://rclone.org/) as client can give ~5x performance, see [./docs/rclone.md](docs/rclone.md)
|
* [rclone](https://rclone.org/) as client can give ~5x performance, see [./docs/rclone.md](docs/rclone.md)
|
||||||
@@ -746,14 +870,12 @@ hashwasm would solve the streaming issue but reduces hashing speed for sha512 (x
|
|||||||
|
|
||||||
defaults are usually fine - expect `8 GiB/s` download, `1 GiB/s` upload
|
defaults are usually fine - expect `8 GiB/s` download, `1 GiB/s` upload
|
||||||
|
|
||||||
you can ignore the `cannot efficiently use multiple CPU cores` message, very unlikely to be a problem
|
|
||||||
|
|
||||||
below are some tweaks roughly ordered by usefulness:
|
below are some tweaks roughly ordered by usefulness:
|
||||||
|
|
||||||
* `-q` disables logging and can help a bunch, even when combined with `-lo` to redirect logs to file
|
* `-q` disables logging and can help a bunch, even when combined with `-lo` to redirect logs to file
|
||||||
* `--http-only` or `--https-only` (unless you want to support both protocols) will reduce the delay before a new connection is established
|
* `--http-only` or `--https-only` (unless you want to support both protocols) will reduce the delay before a new connection is established
|
||||||
* `--hist` pointing to a fast location (ssd) will make directory listings and searches faster when `-e2d` or `-e2t` is set
|
* `--hist` pointing to a fast location (ssd) will make directory listings and searches faster when `-e2d` or `-e2t` is set
|
||||||
* `--no-hash` when indexing a network-disk if you don't care about the actual filehashes and only want the names/tags searchable
|
* `--no-hash .` when indexing a network-disk if you don't care about the actual filehashes and only want the names/tags searchable
|
||||||
* `-j` enables multiprocessing (actual multithreading) and can make copyparty perform better in cpu-intensive workloads, for example:
|
* `-j` enables multiprocessing (actual multithreading) and can make copyparty perform better in cpu-intensive workloads, for example:
|
||||||
* huge amount of short-lived connections
|
* huge amount of short-lived connections
|
||||||
* really heavy traffic (downloads/uploads)
|
* really heavy traffic (downloads/uploads)
|
||||||
@@ -761,6 +883,21 @@ below are some tweaks roughly ordered by usefulness:
|
|||||||
...however it adds an overhead to internal communication so it might be a net loss, see if it works 4 u
|
...however it adds an overhead to internal communication so it might be a net loss, see if it works 4 u
|
||||||
|
|
||||||
|
|
||||||
|
## client-side
|
||||||
|
|
||||||
|
when uploading files,
|
||||||
|
|
||||||
|
* chrome is recommended, at least compared to firefox:
|
||||||
|
* up to 90% faster when hashing, especially on SSDs
|
||||||
|
* up to 40% faster when uploading over extremely fast internets
|
||||||
|
* but [up2k.py](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py) can be 40% faster than chrome again
|
||||||
|
|
||||||
|
* if you're cpu-bottlenecked, or the browser is maxing a cpu core:
|
||||||
|
* up to 30% faster uploads if you hide the upload status list by switching away from the `[🚀]` up2k ui-tab (or closing it)
|
||||||
|
* switching to another browser-tab also works, the favicon will update every 10 seconds in that case
|
||||||
|
* unlikely to be a problem, but can happen when uploding many small files, or your internet is too fast, or PC too slow
|
||||||
|
|
||||||
|
|
||||||
# security
|
# security
|
||||||
|
|
||||||
some notes on hardening
|
some notes on hardening
|
||||||
@@ -771,6 +908,11 @@ on public copyparty instances with anonymous upload enabled:
|
|||||||
* unless `--no-readme` is set: by uploading/modifying a file named `readme.md`
|
* unless `--no-readme` is set: by uploading/modifying a file named `readme.md`
|
||||||
* if `move` access is granted AND none of `--no-logues`, `--no-dot-mv`, `--no-dot-ren` is set: by uploading some .html file and renaming it to `.epilogue.html` (uploading it directly is blocked)
|
* if `move` access is granted AND none of `--no-logues`, `--no-dot-mv`, `--no-dot-ren` is set: by uploading some .html file and renaming it to `.epilogue.html` (uploading it directly is blocked)
|
||||||
|
|
||||||
|
other misc:
|
||||||
|
|
||||||
|
* you can disable directory listings by giving permission `g` instead of `r`, only accepting direct URLs to files
|
||||||
|
* combine this with volume-flag `c,fk` to generate per-file accesskeys; users which have full read-access will then see URLs with `?k=...` appended to the end, and `g` users must provide that URL including the correct key to avoid a 404
|
||||||
|
|
||||||
|
|
||||||
## gotchas
|
## gotchas
|
||||||
|
|
||||||
@@ -779,6 +921,103 @@ behavior that might be unexpected
|
|||||||
* users without read-access to a folder can still see the `.prologue.html` / `.epilogue.html` / `README.md` contents, for the purpose of showing a description on how to use the uploader for example
|
* users without read-access to a folder can still see the `.prologue.html` / `.epilogue.html` / `README.md` contents, for the purpose of showing a description on how to use the uploader for example
|
||||||
|
|
||||||
|
|
||||||
|
# recovering from crashes
|
||||||
|
|
||||||
|
## client crashes
|
||||||
|
|
||||||
|
### frefox wsod
|
||||||
|
|
||||||
|
firefox 87 can crash during uploads -- the entire browser goes, including all other browser tabs, everything turns white
|
||||||
|
|
||||||
|
however you can hit `F12` in the up2k tab and use the devtools to see how far you got in the uploads:
|
||||||
|
|
||||||
|
* get a complete list of all uploads, organized by statuts (ok / no-good / busy / queued):
|
||||||
|
`var tabs = { ok:[], ng:[], bz:[], q:[] }; for (var a of up2k.ui.tab) tabs[a.in].push(a); tabs`
|
||||||
|
|
||||||
|
* list of filenames which failed:
|
||||||
|
`var ng = []; for (var a of up2k.ui.tab) if (a.in != 'ok') ng.push(a.hn.split('<a href=\"').slice(-1)[0].split('\">')[0]); ng`
|
||||||
|
|
||||||
|
* send the list of filenames to copyparty for safekeeping:
|
||||||
|
`await fetch('/inc', {method:'PUT', body:JSON.stringify(ng,null,1)})`
|
||||||
|
|
||||||
|
|
||||||
|
# HTTP API
|
||||||
|
|
||||||
|
* table-column `params` = URL parameters; `?foo=bar&qux=...`
|
||||||
|
* table-column `body` = POST payload
|
||||||
|
* method `jPOST` = json post
|
||||||
|
* method `mPOST` = multipart post
|
||||||
|
* method `uPOST` = url-encoded post
|
||||||
|
* `FILE` = conventional HTTP file upload entry (rfc1867 et al, filename in `Content-Disposition`)
|
||||||
|
|
||||||
|
authenticate using header `Cookie: cppwd=foo` or url param `&pw=foo`
|
||||||
|
|
||||||
|
## read
|
||||||
|
|
||||||
|
| method | params | result |
|
||||||
|
|--|--|--|
|
||||||
|
| GET | `?ls` | list files/folders at URL as JSON |
|
||||||
|
| GET | `?ls&dots` | list files/folders at URL as JSON, including dotfiles |
|
||||||
|
| GET | `?ls=t` | list files/folders at URL as plaintext |
|
||||||
|
| GET | `?ls=v` | list files/folders at URL, terminal-formatted |
|
||||||
|
| GET | `?b` | list files/folders at URL as simplified HTML |
|
||||||
|
| GET | `?tree=.` | list one level of subdirectories inside URL |
|
||||||
|
| GET | `?tree` | list one level of subdirectories for each level until URL |
|
||||||
|
| GET | `?tar` | download everything below URL as a tar file |
|
||||||
|
| GET | `?zip=utf-8` | download everything below URL as a zip file |
|
||||||
|
| GET | `?ups` | show recent uploads from your IP |
|
||||||
|
| GET | `?ups&filter=f` | ...where URL contains `f` |
|
||||||
|
| GET | `?mime=foo` | specify return mimetype `foo` |
|
||||||
|
| GET | `?raw` | get markdown file at URL as plaintext |
|
||||||
|
| GET | `?txt` | get file at URL as plaintext |
|
||||||
|
| GET | `?txt=iso-8859-1` | ...with specific charset |
|
||||||
|
| GET | `?th` | get image/video at URL as thumbnail |
|
||||||
|
| GET | `?th=opus` | convert audio file to 128kbps opus |
|
||||||
|
|
||||||
|
| method | body | result |
|
||||||
|
|--|--|--|
|
||||||
|
| jPOST | `{"q":"foo"}` | do a server-wide search; see the `[🔎]` search tab `raw` field for syntax |
|
||||||
|
|
||||||
|
| method | params | body | result |
|
||||||
|
|--|--|--|--|
|
||||||
|
| jPOST | `?tar` | `["foo","bar"]` | download folders `foo` and `bar` inside URL as a tar file |
|
||||||
|
|
||||||
|
## write
|
||||||
|
|
||||||
|
| method | params | result |
|
||||||
|
|--|--|--|
|
||||||
|
| GET | `?move=/foo/bar` | move/rename the file/folder at URL to /foo/bar |
|
||||||
|
|
||||||
|
| method | params | body | result |
|
||||||
|
|--|--|--|--|
|
||||||
|
| PUT | | (binary data) | upload into file at URL |
|
||||||
|
| PUT | `?gz` | (binary data) | compress with gzip and write into file at URL |
|
||||||
|
| PUT | `?xz` | (binary data) | compress with xz and write into file at URL |
|
||||||
|
| mPOST | | `act=bput`, `f=FILE` | upload `FILE` into the folder at URL |
|
||||||
|
| mPOST | `?j` | `act=bput`, `f=FILE` | ...and reply with json |
|
||||||
|
| mPOST | | `act=mkdir`, `name=foo` | create directory `foo` at URL |
|
||||||
|
| GET | `?delete` | | delete URL recursively |
|
||||||
|
| jPOST | `?delete` | `["/foo","/bar"]` | delete `/foo` and `/bar` recursively |
|
||||||
|
| uPOST | | `msg=foo` | send message `foo` into server log |
|
||||||
|
| mPOST | | `act=tput`, `body=TEXT` | overwrite markdown document at URL |
|
||||||
|
|
||||||
|
server behavior of `msg` can be reconfigured with `--urlform`
|
||||||
|
|
||||||
|
## admin
|
||||||
|
|
||||||
|
| method | params | result |
|
||||||
|
|--|--|--|
|
||||||
|
| GET | `?reload=cfg` | reload config files and rescan volumes |
|
||||||
|
| GET | `?scan` | initiate a rescan of the volume which provides URL |
|
||||||
|
| GET | `?stack` | show a stacktrace of all threads |
|
||||||
|
|
||||||
|
## general
|
||||||
|
|
||||||
|
| method | params | result |
|
||||||
|
|--|--|--|
|
||||||
|
| GET | `?pw=x` | logout |
|
||||||
|
|
||||||
|
|
||||||
# dependencies
|
# dependencies
|
||||||
|
|
||||||
mandatory deps:
|
mandatory deps:
|
||||||
@@ -795,7 +1034,7 @@ enable music tags:
|
|||||||
|
|
||||||
enable [thumbnails](#thumbnails) of...
|
enable [thumbnails](#thumbnails) of...
|
||||||
* **images:** `Pillow` (requires py2.7 or py3.5+)
|
* **images:** `Pillow` (requires py2.7 or py3.5+)
|
||||||
* **videos:** `ffmpeg` and `ffprobe` somewhere in `$PATH`
|
* **videos/audio:** `ffmpeg` and `ffprobe` somewhere in `$PATH`
|
||||||
* **HEIF pictures:** `pyheif-pillow-opener` (requires Linux or a C compiler)
|
* **HEIF pictures:** `pyheif-pillow-opener` (requires Linux or a C compiler)
|
||||||
* **AVIF pictures:** `pillow-avif-plugin`
|
* **AVIF pictures:** `pillow-avif-plugin`
|
||||||
|
|
||||||
@@ -829,13 +1068,15 @@ pls note that `copyparty-sfx.sh` will fail if you rename `copyparty-sfx.py` to `
|
|||||||
reduce the size of an sfx by removing features
|
reduce the size of an sfx by removing features
|
||||||
|
|
||||||
if you don't need all the features, you can repack the sfx and save a bunch of space; all you need is an sfx and a copy of this repo (nothing else to download or build, except if you're on windows then you need msys2 or WSL)
|
if you don't need all the features, you can repack the sfx and save a bunch of space; all you need is an sfx and a copy of this repo (nothing else to download or build, except if you're on windows then you need msys2 or WSL)
|
||||||
* `525k` size of original sfx.py as of v0.11.30
|
* `584k` size of original sfx.py as of v1.1.0
|
||||||
* `315k` after `./scripts/make-sfx.sh re no-ogv`
|
* `392k` after `./scripts/make-sfx.sh re no-ogv`
|
||||||
* `223k` after `./scripts/make-sfx.sh re no-ogv no-cm`
|
* `310k` after `./scripts/make-sfx.sh re no-ogv no-cm`
|
||||||
|
* `269k` after `./scripts/make-sfx.sh re no-ogv no-cm no-hl`
|
||||||
|
|
||||||
the features you can opt to drop are
|
the features you can opt to drop are
|
||||||
* `ogv`.js, the opus/vorbis decoder which is needed by apple devices to play foss audio files, saves ~192k
|
* `ogv`.js, the opus/vorbis decoder which is needed by apple devices to play foss audio files, saves ~192k
|
||||||
* `cm`/easymde, the "fancy" markdown editor, saves ~92k
|
* `cm`/easymde, the "fancy" markdown editor, saves ~82k
|
||||||
|
* `hl`, prism, the syntax hilighter, saves ~41k
|
||||||
* `fnt`, source-code-pro, the monospace font, saves ~9k
|
* `fnt`, source-code-pro, the monospace font, saves ~9k
|
||||||
* `dd`, the custom mouse cursor for the media player tray tab, saves ~2k
|
* `dd`, the custom mouse cursor for the media player tray tab, saves ~2k
|
||||||
|
|
||||||
@@ -908,7 +1149,7 @@ in the `scripts` folder:
|
|||||||
|
|
||||||
roughly sorted by priority
|
roughly sorted by priority
|
||||||
|
|
||||||
* hls framework for Someone Else to drop code into :^)
|
* nothing! currently
|
||||||
|
|
||||||
|
|
||||||
## discarded ideas
|
## discarded ideas
|
||||||
@@ -936,3 +1177,5 @@ roughly sorted by priority
|
|||||||
* indexedDB for hashes, cfg enable/clear/sz, 2gb avail, ~9k for 1g, ~4k for 100m, 500k items before autoeviction
|
* indexedDB for hashes, cfg enable/clear/sz, 2gb avail, ~9k for 1g, ~4k for 100m, 500k items before autoeviction
|
||||||
* blank hashlist when up-ok to skip handshake
|
* blank hashlist when up-ok to skip handshake
|
||||||
* too many confusing side-effects
|
* too many confusing side-effects
|
||||||
|
* hls framework for Someone Else to drop code into :^)
|
||||||
|
* probably not, too much stuff to consider -- seeking, start at offset, task stitching (probably np-hard), conditional passthru, rate-control (especially multi-consumer), session keepalive, cache mgmt...
|
||||||
|
|||||||
@@ -1,3 +1,11 @@
|
|||||||
|
# [`up2k.py`](up2k.py)
|
||||||
|
* command-line up2k client [(webm)](https://ocv.me/stuff/u2cli.webm)
|
||||||
|
* file uploads, file-search, autoresume of aborted/broken uploads
|
||||||
|
* faster than browsers
|
||||||
|
* if something breaks just restart it
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# [`copyparty-fuse.py`](copyparty-fuse.py)
|
# [`copyparty-fuse.py`](copyparty-fuse.py)
|
||||||
* mount a copyparty server as a local filesystem (read-only)
|
* mount a copyparty server as a local filesystem (read-only)
|
||||||
* **supports Windows!** -- expect `194 MiB/s` sequential read
|
* **supports Windows!** -- expect `194 MiB/s` sequential read
|
||||||
@@ -47,6 +55,7 @@ you could replace winfsp with [dokan](https://github.com/dokan-dev/dokany/releas
|
|||||||
* copyparty can Popen programs like these during file indexing to collect additional metadata
|
* copyparty can Popen programs like these during file indexing to collect additional metadata
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# [`dbtool.py`](dbtool.py)
|
# [`dbtool.py`](dbtool.py)
|
||||||
upgrade utility which can show db info and help transfer data between databases, for example when a new version of copyparty is incompatible with the old DB and automatically rebuilds the DB from scratch, but you have some really expensive `-mtp` parsers and want to copy over the tags from the old db
|
upgrade utility which can show db info and help transfer data between databases, for example when a new version of copyparty is incompatible with the old DB and automatically rebuilds the DB from scratch, but you have some really expensive `-mtp` parsers and want to copy over the tags from the old db
|
||||||
|
|
||||||
@@ -63,6 +72,7 @@ cd /mnt/nas/music/.hist
|
|||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# [`prisonparty.sh`](prisonparty.sh)
|
# [`prisonparty.sh`](prisonparty.sh)
|
||||||
* run copyparty in a chroot, preventing any accidental file access
|
* run copyparty in a chroot, preventing any accidental file access
|
||||||
* creates bindmounts for /bin, /lib, and so on, see `sysdirs=`
|
* creates bindmounts for /bin, /lib, and so on, see `sysdirs=`
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ dependencies:
|
|||||||
|
|
||||||
note:
|
note:
|
||||||
you probably want to run this on windows clients:
|
you probably want to run this on windows clients:
|
||||||
https://github.com/9001/copyparty/blob/master/contrib/explorer-nothumbs-nofoldertypes.reg
|
https://github.com/9001/copyparty/blob/hovudstraum/contrib/explorer-nothumbs-nofoldertypes.reg
|
||||||
|
|
||||||
get server cert:
|
get server cert:
|
||||||
awk '/-BEGIN CERTIFICATE-/ {a=1} a; /-END CERTIFICATE-/{exit}' <(openssl s_client -connect 127.0.0.1:3923 </dev/null 2>/dev/null) >cert.pem
|
awk '/-BEGIN CERTIFICATE-/ {a=1} a; /-END CERTIFICATE-/{exit}' <(openssl s_client -connect 127.0.0.1:3923 </dev/null 2>/dev/null) >cert.pem
|
||||||
@@ -71,7 +71,7 @@ except:
|
|||||||
elif MACOS:
|
elif MACOS:
|
||||||
libfuse = "install https://osxfuse.github.io/"
|
libfuse = "install https://osxfuse.github.io/"
|
||||||
else:
|
else:
|
||||||
libfuse = "apt install libfuse\n modprobe fuse"
|
libfuse = "apt install libfuse3-3\n modprobe fuse"
|
||||||
|
|
||||||
print(
|
print(
|
||||||
"\n could not import fuse; these may help:"
|
"\n could not import fuse; these may help:"
|
||||||
@@ -393,15 +393,16 @@ class Gateway(object):
|
|||||||
|
|
||||||
rsp = json.loads(rsp.decode("utf-8"))
|
rsp = json.loads(rsp.decode("utf-8"))
|
||||||
ret = []
|
ret = []
|
||||||
for is_dir, nodes in [[True, rsp["dirs"]], [False, rsp["files"]]]:
|
for statfun, nodes in [
|
||||||
|
[self.stat_dir, rsp["dirs"]],
|
||||||
|
[self.stat_file, rsp["files"]],
|
||||||
|
]:
|
||||||
for n in nodes:
|
for n in nodes:
|
||||||
fname = unquote(n["href"]).rstrip(b"/")
|
fname = unquote(n["href"].split("?")[0]).rstrip(b"/").decode("wtf-8")
|
||||||
fname = fname.decode("wtf-8")
|
|
||||||
if bad_good:
|
if bad_good:
|
||||||
fname = enwin(fname)
|
fname = enwin(fname)
|
||||||
|
|
||||||
fun = self.stat_dir if is_dir else self.stat_file
|
ret.append([fname, statfun(n["ts"], n["sz"]), 0])
|
||||||
ret.append([fname, fun(n["ts"], n["sz"]), 0])
|
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|||||||
@@ -1,11 +1,19 @@
|
|||||||
standalone programs which take an audio file as argument
|
standalone programs which take an audio file as argument
|
||||||
|
|
||||||
|
**NOTE:** these all require `-e2ts` to be functional, meaning you need to do at least one of these: `apt install ffmpeg` or `pip3 install mutagen`
|
||||||
|
|
||||||
some of these rely on libraries which are not MIT-compatible
|
some of these rely on libraries which are not MIT-compatible
|
||||||
|
|
||||||
* [audio-bpm.py](./audio-bpm.py) detects the BPM of music using the BeatRoot Vamp Plugin; imports GPL2
|
* [audio-bpm.py](./audio-bpm.py) detects the BPM of music using the BeatRoot Vamp Plugin; imports GPL2
|
||||||
* [audio-key.py](./audio-key.py) detects the melodic key of music using the Mixxx fork of keyfinder; imports GPL3
|
* [audio-key.py](./audio-key.py) detects the melodic key of music using the Mixxx fork of keyfinder; imports GPL3
|
||||||
* [media-hash.py](./media-hash.py) generates checksums for audio and video streams; uses FFmpeg (LGPL or GPL)
|
* [media-hash.py](./media-hash.py) generates checksums for audio and video streams; uses FFmpeg (LGPL or GPL)
|
||||||
|
|
||||||
|
these do not have any problematic dependencies:
|
||||||
|
|
||||||
|
* [cksum.py](./cksum.py) computes various checksums
|
||||||
|
* [exe.py](./exe.py) grabs metadata from .exe and .dll files (example for retrieving multiple tags with one parser)
|
||||||
|
* [wget.py](./wget.py) lets you download files by POSTing URLs to copyparty
|
||||||
|
|
||||||
|
|
||||||
# dependencies
|
# dependencies
|
||||||
|
|
||||||
|
|||||||
@@ -25,6 +25,7 @@ def det(tf):
|
|||||||
"-v", "fatal",
|
"-v", "fatal",
|
||||||
"-ss", "13",
|
"-ss", "13",
|
||||||
"-y", "-i", fsenc(sys.argv[1]),
|
"-y", "-i", fsenc(sys.argv[1]),
|
||||||
|
"-map", "0:a:0",
|
||||||
"-ac", "1",
|
"-ac", "1",
|
||||||
"-ar", "22050",
|
"-ar", "22050",
|
||||||
"-t", "300",
|
"-t", "300",
|
||||||
|
|||||||
@@ -28,6 +28,7 @@ def det(tf):
|
|||||||
"-hide_banner",
|
"-hide_banner",
|
||||||
"-v", "fatal",
|
"-v", "fatal",
|
||||||
"-y", "-i", fsenc(sys.argv[1]),
|
"-y", "-i", fsenc(sys.argv[1]),
|
||||||
|
"-map", "0:a:0",
|
||||||
"-t", "300",
|
"-t", "300",
|
||||||
"-sample_fmt", "s16",
|
"-sample_fmt", "s16",
|
||||||
tf
|
tf
|
||||||
|
|||||||
89
bin/mtag/cksum.py
Executable file
89
bin/mtag/cksum.py
Executable file
@@ -0,0 +1,89 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import json
|
||||||
|
import zlib
|
||||||
|
import struct
|
||||||
|
import base64
|
||||||
|
import hashlib
|
||||||
|
|
||||||
|
try:
|
||||||
|
from copyparty.util import fsenc
|
||||||
|
except:
|
||||||
|
|
||||||
|
def fsenc(p):
|
||||||
|
return p
|
||||||
|
|
||||||
|
|
||||||
|
"""
|
||||||
|
calculates various checksums for uploads,
|
||||||
|
usage: -mtp crc32,md5,sha1,sha256b=bin/mtag/cksum.py
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
config = "crc32 md5 md5b sha1 sha1b sha256 sha256b sha512/240 sha512b/240"
|
||||||
|
# b suffix = base64 encoded
|
||||||
|
# slash = truncate to n bits
|
||||||
|
|
||||||
|
known = {
|
||||||
|
"md5": hashlib.md5,
|
||||||
|
"sha1": hashlib.sha1,
|
||||||
|
"sha256": hashlib.sha256,
|
||||||
|
"sha512": hashlib.sha512,
|
||||||
|
}
|
||||||
|
config = config.split()
|
||||||
|
hashers = {
|
||||||
|
k: v()
|
||||||
|
for k, v in known.items()
|
||||||
|
if k in [x.split("/")[0].rstrip("b") for x in known]
|
||||||
|
}
|
||||||
|
crc32 = 0 if "crc32" in config else None
|
||||||
|
|
||||||
|
with open(fsenc(sys.argv[1]), "rb", 512 * 1024) as f:
|
||||||
|
while True:
|
||||||
|
buf = f.read(64 * 1024)
|
||||||
|
if not buf:
|
||||||
|
break
|
||||||
|
|
||||||
|
for x in hashers.values():
|
||||||
|
x.update(buf)
|
||||||
|
|
||||||
|
if crc32 is not None:
|
||||||
|
crc32 = zlib.crc32(buf, crc32)
|
||||||
|
|
||||||
|
ret = {}
|
||||||
|
for s in config:
|
||||||
|
alg = s.split("/")[0]
|
||||||
|
b64 = alg.endswith("b")
|
||||||
|
alg = alg.rstrip("b")
|
||||||
|
if alg in hashers:
|
||||||
|
v = hashers[alg].digest()
|
||||||
|
elif alg == "crc32":
|
||||||
|
v = crc32
|
||||||
|
if v < 0:
|
||||||
|
v &= 2 ** 32 - 1
|
||||||
|
v = struct.pack(">L", v)
|
||||||
|
else:
|
||||||
|
raise Exception("what is {}".format(s))
|
||||||
|
|
||||||
|
if "/" in s:
|
||||||
|
v = v[: int(int(s.split("/")[1]) / 8)]
|
||||||
|
|
||||||
|
if b64:
|
||||||
|
v = base64.b64encode(v).decode("ascii").rstrip("=")
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
v = v.hex()
|
||||||
|
except:
|
||||||
|
import binascii
|
||||||
|
|
||||||
|
v = binascii.hexlify(v)
|
||||||
|
|
||||||
|
ret[s] = v
|
||||||
|
|
||||||
|
print(json.dumps(ret, indent=4))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
@@ -4,7 +4,8 @@ set -e
|
|||||||
|
|
||||||
# install dependencies for audio-*.py
|
# install dependencies for audio-*.py
|
||||||
#
|
#
|
||||||
# linux: requires {python3,ffmpeg,fftw}-dev py3-{wheel,pip} py3-numpy{,-dev} vamp-sdk-dev patchelf
|
# linux/alpine: requires {python3,ffmpeg,fftw}-dev py3-{wheel,pip} py3-numpy{,-dev} vamp-sdk-dev patchelf cmake
|
||||||
|
# linux/debian: requires libav{codec,device,filter,format,resample,util}-dev {libfftw3,python3}-dev python3-{numpy,pip} vamp-{plugin-sdk,examples} patchelf cmake
|
||||||
# win64: requires msys2-mingw64 environment
|
# win64: requires msys2-mingw64 environment
|
||||||
# macos: requires macports
|
# macos: requires macports
|
||||||
#
|
#
|
||||||
|
|||||||
85
bin/mtag/wget.py
Normal file
85
bin/mtag/wget.py
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
"""
|
||||||
|
use copyparty as a file downloader by POSTing URLs as
|
||||||
|
application/x-www-form-urlencoded (for example using the
|
||||||
|
message/pager function on the website)
|
||||||
|
|
||||||
|
example copyparty config to use this:
|
||||||
|
--urlform save,get -vsrv/wget:wget:rwmd,ed:c,e2ts,mtp=title=ebin,t300,ad,bin/mtag/wget.py
|
||||||
|
|
||||||
|
explained:
|
||||||
|
for realpath srv/wget (served at /wget) with read-write-modify-delete for ed,
|
||||||
|
enable file analysis on upload (e2ts),
|
||||||
|
use mtp plugin "bin/mtag/wget.py" to provide metadata tag "title",
|
||||||
|
do this on all uploads with the file extension "bin",
|
||||||
|
t300 = 300 seconds timeout for each dwonload,
|
||||||
|
ad = parse file regardless if FFmpeg thinks it is audio or not
|
||||||
|
|
||||||
|
PS: this requires e2ts to be functional,
|
||||||
|
meaning you need to do at least one of these:
|
||||||
|
* apt install ffmpeg
|
||||||
|
* pip3 install mutagen
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import subprocess as sp
|
||||||
|
from urllib.parse import unquote_to_bytes as unquote
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
fp = os.path.abspath(sys.argv[1])
|
||||||
|
fdir = os.path.dirname(fp)
|
||||||
|
fname = os.path.basename(fp)
|
||||||
|
if not fname.startswith("put-") or not fname.endswith(".bin"):
|
||||||
|
raise Exception("not a post file")
|
||||||
|
|
||||||
|
buf = b""
|
||||||
|
with open(fp, "rb") as f:
|
||||||
|
while True:
|
||||||
|
b = f.read(4096)
|
||||||
|
buf += b
|
||||||
|
if len(buf) > 4096:
|
||||||
|
raise Exception("too big")
|
||||||
|
|
||||||
|
if not b:
|
||||||
|
break
|
||||||
|
|
||||||
|
if not buf:
|
||||||
|
raise Exception("file is empty")
|
||||||
|
|
||||||
|
buf = unquote(buf.replace(b"+", b" "))
|
||||||
|
url = buf.decode("utf-8")
|
||||||
|
|
||||||
|
if not url.startswith("msg="):
|
||||||
|
raise Exception("does not start with msg=")
|
||||||
|
|
||||||
|
url = url[4:]
|
||||||
|
if "://" not in url:
|
||||||
|
url = "https://" + url
|
||||||
|
|
||||||
|
os.chdir(fdir)
|
||||||
|
|
||||||
|
name = url.split("?")[0].split("/")[-1]
|
||||||
|
tfn = "-- DOWNLOADING " + name
|
||||||
|
open(tfn, "wb").close()
|
||||||
|
|
||||||
|
cmd = ["wget", "--trust-server-names", "--", url]
|
||||||
|
|
||||||
|
try:
|
||||||
|
sp.check_call(cmd)
|
||||||
|
|
||||||
|
# OPTIONAL:
|
||||||
|
# on success, delete the .bin file which contains the URL
|
||||||
|
os.unlink(fp)
|
||||||
|
except:
|
||||||
|
open("-- FAILED TO DONWLOAD " + name, "wb").close()
|
||||||
|
|
||||||
|
os.unlink(tfn)
|
||||||
|
print(url)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
@@ -17,7 +17,7 @@ it's probably best to use this through a config file; see res/yt-ipr.conf
|
|||||||
|
|
||||||
but if you want to use plain arguments instead then:
|
but if you want to use plain arguments instead then:
|
||||||
-v srv/ytm:ytm:w:rw,ed
|
-v srv/ytm:ytm:w:rw,ed
|
||||||
:c,e2ts:c,e2dsa
|
:c,e2ts,e2dsa
|
||||||
:c,sz=16k-1m:c,maxn=10,300:c,rotf=%Y-%m/%d-%H
|
:c,sz=16k-1m:c,maxn=10,300:c,rotf=%Y-%m/%d-%H
|
||||||
:c,mtp=yt-id,yt-title,yt-author,yt-channel,yt-views,yt-private,yt-manifest,yt-expires=bin/mtag/yt-ipr.py
|
:c,mtp=yt-id,yt-title,yt-author,yt-channel,yt-views,yt-private,yt-manifest,yt-expires=bin/mtag/yt-ipr.py
|
||||||
:c,mte=yt-id,yt-title,yt-author,yt-channel,yt-views,yt-private,yt-manifest,yt-expires
|
:c,mte=yt-id,yt-title,yt-author,yt-channel,yt-views,yt-private,yt-manifest,yt-expires
|
||||||
|
|||||||
800
bin/up2k.py
Executable file
800
bin/up2k.py
Executable file
@@ -0,0 +1,800 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
|
"""
|
||||||
|
up2k.py: upload to copyparty
|
||||||
|
2021-10-31, v0.11, ed <irc.rizon.net>, MIT-Licensed
|
||||||
|
https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py
|
||||||
|
|
||||||
|
- dependencies: requests
|
||||||
|
- supports python 2.6, 2.7, and 3.3 through 3.10
|
||||||
|
|
||||||
|
- almost zero error-handling
|
||||||
|
- but if something breaks just try again and it'll autoresume
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import stat
|
||||||
|
import math
|
||||||
|
import time
|
||||||
|
import atexit
|
||||||
|
import signal
|
||||||
|
import base64
|
||||||
|
import hashlib
|
||||||
|
import argparse
|
||||||
|
import platform
|
||||||
|
import threading
|
||||||
|
import requests
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
|
||||||
|
# from copyparty/__init__.py
|
||||||
|
PY2 = sys.version_info[0] == 2
|
||||||
|
if PY2:
|
||||||
|
from Queue import Queue
|
||||||
|
from urllib import unquote
|
||||||
|
from urllib import quote
|
||||||
|
|
||||||
|
sys.dont_write_bytecode = True
|
||||||
|
bytes = str
|
||||||
|
else:
|
||||||
|
from queue import Queue
|
||||||
|
from urllib.parse import unquote_to_bytes as unquote
|
||||||
|
from urllib.parse import quote_from_bytes as quote
|
||||||
|
|
||||||
|
unicode = str
|
||||||
|
|
||||||
|
VT100 = platform.system() != "Windows"
|
||||||
|
|
||||||
|
|
||||||
|
req_ses = requests.Session()
|
||||||
|
|
||||||
|
|
||||||
|
class File(object):
|
||||||
|
"""an up2k upload task; represents a single file"""
|
||||||
|
|
||||||
|
def __init__(self, top, rel, size, lmod):
|
||||||
|
self.top = top # type: bytes
|
||||||
|
self.rel = rel.replace(b"\\", b"/") # type: bytes
|
||||||
|
self.size = size # type: int
|
||||||
|
self.lmod = lmod # type: float
|
||||||
|
|
||||||
|
self.abs = os.path.join(top, rel) # type: bytes
|
||||||
|
self.name = self.rel.split(b"/")[-1].decode("utf-8", "replace") # type: str
|
||||||
|
|
||||||
|
# set by get_hashlist
|
||||||
|
self.cids = [] # type: list[tuple[str, int, int]] # [ hash, ofs, sz ]
|
||||||
|
self.kchunks = {} # type: dict[str, tuple[int, int]] # hash: [ ofs, sz ]
|
||||||
|
|
||||||
|
# set by handshake
|
||||||
|
self.ucids = [] # type: list[str] # chunks which need to be uploaded
|
||||||
|
self.wark = None # type: str
|
||||||
|
self.url = None # type: str
|
||||||
|
|
||||||
|
# set by upload
|
||||||
|
self.up_b = 0 # type: int
|
||||||
|
self.up_c = 0 # type: int
|
||||||
|
|
||||||
|
# m = "size({}) lmod({}) top({}) rel({}) abs({}) name({})\n"
|
||||||
|
# eprint(m.format(self.size, self.lmod, self.top, self.rel, self.abs, self.name))
|
||||||
|
|
||||||
|
|
||||||
|
class FileSlice(object):
|
||||||
|
"""file-like object providing a fixed window into a file"""
|
||||||
|
|
||||||
|
def __init__(self, file, cid):
|
||||||
|
# type: (File, str) -> FileSlice
|
||||||
|
|
||||||
|
self.car, self.len = file.kchunks[cid]
|
||||||
|
self.cdr = self.car + self.len
|
||||||
|
self.ofs = 0 # type: int
|
||||||
|
self.f = open(file.abs, "rb", 512 * 1024)
|
||||||
|
self.f.seek(self.car)
|
||||||
|
|
||||||
|
# https://stackoverflow.com/questions/4359495/what-is-exactly-a-file-like-object-in-python
|
||||||
|
# IOBase, RawIOBase, BufferedIOBase
|
||||||
|
funs = "close closed __enter__ __exit__ __iter__ isatty __next__ readable seekable writable"
|
||||||
|
try:
|
||||||
|
for fun in funs.split():
|
||||||
|
setattr(self, fun, getattr(self.f, fun))
|
||||||
|
except:
|
||||||
|
pass # py27 probably
|
||||||
|
|
||||||
|
def tell(self):
|
||||||
|
return self.ofs
|
||||||
|
|
||||||
|
def seek(self, ofs, wh=0):
|
||||||
|
if wh == 1:
|
||||||
|
ofs = self.ofs + ofs
|
||||||
|
elif wh == 2:
|
||||||
|
ofs = self.len + ofs # provided ofs is negative
|
||||||
|
|
||||||
|
if ofs < 0:
|
||||||
|
ofs = 0
|
||||||
|
elif ofs >= self.len:
|
||||||
|
ofs = self.len - 1
|
||||||
|
|
||||||
|
self.ofs = ofs
|
||||||
|
self.f.seek(self.car + ofs)
|
||||||
|
|
||||||
|
def read(self, sz):
|
||||||
|
sz = min(sz, self.len - self.ofs)
|
||||||
|
ret = self.f.read(sz)
|
||||||
|
self.ofs += len(ret)
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
_print = print
|
||||||
|
|
||||||
|
|
||||||
|
def eprint(*a, **ka):
|
||||||
|
ka["file"] = sys.stderr
|
||||||
|
ka["end"] = ""
|
||||||
|
if not PY2:
|
||||||
|
ka["flush"] = True
|
||||||
|
|
||||||
|
_print(*a, **ka)
|
||||||
|
if PY2 or not VT100:
|
||||||
|
sys.stderr.flush()
|
||||||
|
|
||||||
|
|
||||||
|
def flushing_print(*a, **ka):
|
||||||
|
_print(*a, **ka)
|
||||||
|
if "flush" not in ka:
|
||||||
|
sys.stdout.flush()
|
||||||
|
|
||||||
|
|
||||||
|
if not VT100:
|
||||||
|
print = flushing_print
|
||||||
|
|
||||||
|
|
||||||
|
def termsize():
|
||||||
|
import os
|
||||||
|
|
||||||
|
env = os.environ
|
||||||
|
|
||||||
|
def ioctl_GWINSZ(fd):
|
||||||
|
try:
|
||||||
|
import fcntl, termios, struct, os
|
||||||
|
|
||||||
|
cr = struct.unpack("hh", fcntl.ioctl(fd, termios.TIOCGWINSZ, "1234"))
|
||||||
|
except:
|
||||||
|
return
|
||||||
|
return cr
|
||||||
|
|
||||||
|
cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
|
||||||
|
if not cr:
|
||||||
|
try:
|
||||||
|
fd = os.open(os.ctermid(), os.O_RDONLY)
|
||||||
|
cr = ioctl_GWINSZ(fd)
|
||||||
|
os.close(fd)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
if not cr:
|
||||||
|
try:
|
||||||
|
cr = (env["LINES"], env["COLUMNS"])
|
||||||
|
except:
|
||||||
|
cr = (25, 80)
|
||||||
|
return int(cr[1]), int(cr[0])
|
||||||
|
|
||||||
|
|
||||||
|
class CTermsize(object):
|
||||||
|
def __init__(self):
|
||||||
|
self.ev = False
|
||||||
|
self.margin = None
|
||||||
|
self.g = None
|
||||||
|
self.w, self.h = termsize()
|
||||||
|
|
||||||
|
try:
|
||||||
|
signal.signal(signal.SIGWINCH, self.ev_sig)
|
||||||
|
except:
|
||||||
|
return
|
||||||
|
|
||||||
|
thr = threading.Thread(target=self.worker)
|
||||||
|
thr.daemon = True
|
||||||
|
thr.start()
|
||||||
|
|
||||||
|
def worker(self):
|
||||||
|
while True:
|
||||||
|
time.sleep(0.5)
|
||||||
|
if not self.ev:
|
||||||
|
continue
|
||||||
|
|
||||||
|
self.ev = False
|
||||||
|
self.w, self.h = termsize()
|
||||||
|
|
||||||
|
if self.margin is not None:
|
||||||
|
self.scroll_region(self.margin)
|
||||||
|
|
||||||
|
def ev_sig(self, *a, **ka):
|
||||||
|
self.ev = True
|
||||||
|
|
||||||
|
def scroll_region(self, margin):
|
||||||
|
self.margin = margin
|
||||||
|
if margin is None:
|
||||||
|
self.g = None
|
||||||
|
eprint("\033[s\033[r\033[u")
|
||||||
|
else:
|
||||||
|
self.g = 1 + self.h - margin
|
||||||
|
m = "{0}\033[{1}A".format("\n" * margin, margin)
|
||||||
|
eprint("{0}\033[s\033[1;{1}r\033[u".format(m, self.g - 1))
|
||||||
|
|
||||||
|
|
||||||
|
ss = CTermsize()
|
||||||
|
|
||||||
|
|
||||||
|
def statdir(top):
|
||||||
|
"""non-recursive listing of directory contents, along with stat() info"""
|
||||||
|
if hasattr(os, "scandir"):
|
||||||
|
with os.scandir(top) as dh:
|
||||||
|
for fh in dh:
|
||||||
|
yield [os.path.join(top, fh.name), fh.stat()]
|
||||||
|
else:
|
||||||
|
for name in os.listdir(top):
|
||||||
|
abspath = os.path.join(top, name)
|
||||||
|
yield [abspath, os.stat(abspath)]
|
||||||
|
|
||||||
|
|
||||||
|
def walkdir(top):
|
||||||
|
"""recursive statdir"""
|
||||||
|
for ap, inf in sorted(statdir(top)):
|
||||||
|
if stat.S_ISDIR(inf.st_mode):
|
||||||
|
for x in walkdir(ap):
|
||||||
|
yield x
|
||||||
|
else:
|
||||||
|
yield ap, inf
|
||||||
|
|
||||||
|
|
||||||
|
def walkdirs(tops):
|
||||||
|
"""recursive statdir for a list of tops, yields [top, relpath, stat]"""
|
||||||
|
sep = "{0}".format(os.sep).encode("ascii")
|
||||||
|
for top in tops:
|
||||||
|
if top[-1:] == sep:
|
||||||
|
stop = top.rstrip(sep)
|
||||||
|
else:
|
||||||
|
stop = os.path.dirname(top)
|
||||||
|
|
||||||
|
if os.path.isdir(top):
|
||||||
|
for ap, inf in walkdir(top):
|
||||||
|
yield stop, ap[len(stop) :].lstrip(sep), inf
|
||||||
|
else:
|
||||||
|
d, n = top.rsplit(sep, 1)
|
||||||
|
yield d, n, os.stat(top)
|
||||||
|
|
||||||
|
|
||||||
|
# mostly from copyparty/util.py
|
||||||
|
def quotep(btxt):
|
||||||
|
quot1 = quote(btxt, safe=b"/")
|
||||||
|
if not PY2:
|
||||||
|
quot1 = quot1.encode("ascii")
|
||||||
|
|
||||||
|
return quot1.replace(b" ", b"+")
|
||||||
|
|
||||||
|
|
||||||
|
# from copyparty/util.py
|
||||||
|
def humansize(sz, terse=False):
|
||||||
|
"""picks a sensible unit for the given extent"""
|
||||||
|
for unit in ["B", "KiB", "MiB", "GiB", "TiB"]:
|
||||||
|
if sz < 1024:
|
||||||
|
break
|
||||||
|
|
||||||
|
sz /= 1024.0
|
||||||
|
|
||||||
|
ret = " ".join([str(sz)[:4].rstrip("."), unit])
|
||||||
|
|
||||||
|
if not terse:
|
||||||
|
return ret
|
||||||
|
|
||||||
|
return ret.replace("iB", "").replace(" ", "")
|
||||||
|
|
||||||
|
|
||||||
|
# from copyparty/up2k.py
|
||||||
|
def up2k_chunksize(filesize):
|
||||||
|
"""gives The correct chunksize for up2k hashing"""
|
||||||
|
chunksize = 1024 * 1024
|
||||||
|
stepsize = 512 * 1024
|
||||||
|
while True:
|
||||||
|
for mul in [1, 2]:
|
||||||
|
nchunks = math.ceil(filesize * 1.0 / chunksize)
|
||||||
|
if nchunks <= 256 or chunksize >= 32 * 1024 * 1024:
|
||||||
|
return chunksize
|
||||||
|
|
||||||
|
chunksize += stepsize
|
||||||
|
stepsize *= mul
|
||||||
|
|
||||||
|
|
||||||
|
# mostly from copyparty/up2k.py
|
||||||
|
def get_hashlist(file, pcb):
|
||||||
|
# type: (File, any) -> None
|
||||||
|
"""generates the up2k hashlist from file contents, inserts it into `file`"""
|
||||||
|
|
||||||
|
chunk_sz = up2k_chunksize(file.size)
|
||||||
|
file_rem = file.size
|
||||||
|
file_ofs = 0
|
||||||
|
ret = []
|
||||||
|
with open(file.abs, "rb", 512 * 1024) as f:
|
||||||
|
while file_rem > 0:
|
||||||
|
hashobj = hashlib.sha512()
|
||||||
|
chunk_sz = chunk_rem = min(chunk_sz, file_rem)
|
||||||
|
while chunk_rem > 0:
|
||||||
|
buf = f.read(min(chunk_rem, 64 * 1024))
|
||||||
|
if not buf:
|
||||||
|
raise Exception("EOF at " + str(f.tell()))
|
||||||
|
|
||||||
|
hashobj.update(buf)
|
||||||
|
chunk_rem -= len(buf)
|
||||||
|
|
||||||
|
digest = hashobj.digest()[:33]
|
||||||
|
digest = base64.urlsafe_b64encode(digest).decode("utf-8")
|
||||||
|
|
||||||
|
ret.append([digest, file_ofs, chunk_sz])
|
||||||
|
file_ofs += chunk_sz
|
||||||
|
file_rem -= chunk_sz
|
||||||
|
|
||||||
|
if pcb:
|
||||||
|
pcb(file, file_ofs)
|
||||||
|
|
||||||
|
file.cids = ret
|
||||||
|
file.kchunks = {}
|
||||||
|
for k, v1, v2 in ret:
|
||||||
|
file.kchunks[k] = [v1, v2]
|
||||||
|
|
||||||
|
|
||||||
|
def handshake(req_ses, url, file, pw, search):
|
||||||
|
# type: (requests.Session, str, File, any, bool) -> List[str]
|
||||||
|
"""
|
||||||
|
performs a handshake with the server; reply is:
|
||||||
|
if search, a list of search results
|
||||||
|
otherwise, a list of chunks to upload
|
||||||
|
"""
|
||||||
|
|
||||||
|
req = {
|
||||||
|
"hash": [x[0] for x in file.cids],
|
||||||
|
"name": file.name,
|
||||||
|
"lmod": file.lmod,
|
||||||
|
"size": file.size,
|
||||||
|
}
|
||||||
|
if search:
|
||||||
|
req["srch"] = 1
|
||||||
|
|
||||||
|
headers = {"Content-Type": "text/plain"} # wtf ed
|
||||||
|
if pw:
|
||||||
|
headers["Cookie"] = "=".join(["cppwd", pw])
|
||||||
|
|
||||||
|
if file.url:
|
||||||
|
url = file.url
|
||||||
|
elif b"/" in file.rel:
|
||||||
|
url += quotep(file.rel.rsplit(b"/", 1)[0]).decode("utf-8", "replace")
|
||||||
|
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
r = req_ses.post(url, headers=headers, json=req)
|
||||||
|
break
|
||||||
|
except:
|
||||||
|
eprint("handshake failed, retry...\n")
|
||||||
|
time.sleep(1)
|
||||||
|
|
||||||
|
try:
|
||||||
|
r = r.json()
|
||||||
|
except:
|
||||||
|
raise Exception(r.text)
|
||||||
|
|
||||||
|
if search:
|
||||||
|
return r["hits"]
|
||||||
|
|
||||||
|
try:
|
||||||
|
pre, url = url.split("://")
|
||||||
|
pre += "://"
|
||||||
|
except:
|
||||||
|
pre = ""
|
||||||
|
|
||||||
|
file.url = pre + url.split("/")[0] + r["purl"]
|
||||||
|
file.name = r["name"]
|
||||||
|
file.wark = r["wark"]
|
||||||
|
|
||||||
|
return r["hash"]
|
||||||
|
|
||||||
|
|
||||||
|
def upload(req_ses, file, cid, pw):
|
||||||
|
# type: (requests.Session, File, str, any) -> None
|
||||||
|
"""upload one specific chunk, `cid` (a chunk-hash)"""
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
"X-Up2k-Hash": cid,
|
||||||
|
"X-Up2k-Wark": file.wark,
|
||||||
|
"Content-Type": "application/octet-stream",
|
||||||
|
}
|
||||||
|
if pw:
|
||||||
|
headers["Cookie"] = "=".join(["cppwd", pw])
|
||||||
|
|
||||||
|
f = FileSlice(file, cid)
|
||||||
|
try:
|
||||||
|
r = req_ses.post(file.url, headers=headers, data=f)
|
||||||
|
if not r:
|
||||||
|
raise Exception(repr(r))
|
||||||
|
|
||||||
|
_ = r.content
|
||||||
|
finally:
|
||||||
|
f.f.close()
|
||||||
|
|
||||||
|
|
||||||
|
class Daemon(threading.Thread):
|
||||||
|
def __init__(self, *a, **ka):
|
||||||
|
threading.Thread.__init__(self, *a, **ka)
|
||||||
|
self.daemon = True
|
||||||
|
|
||||||
|
|
||||||
|
class Ctl(object):
|
||||||
|
"""
|
||||||
|
this will be the coordinator which runs everything in parallel
|
||||||
|
(hashing, handshakes, uploads) but right now it's p dumb
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, ar):
|
||||||
|
self.ar = ar
|
||||||
|
ar.files = [
|
||||||
|
os.path.abspath(os.path.realpath(x.encode("utf-8")))
|
||||||
|
+ (x[-1:] if x[-1:] == os.sep else "").encode("utf-8")
|
||||||
|
for x in ar.files
|
||||||
|
]
|
||||||
|
ar.url = ar.url.rstrip("/") + "/"
|
||||||
|
if "://" not in ar.url:
|
||||||
|
ar.url = "http://" + ar.url
|
||||||
|
|
||||||
|
eprint("\nscanning {0} locations\n".format(len(ar.files)))
|
||||||
|
|
||||||
|
nfiles = 0
|
||||||
|
nbytes = 0
|
||||||
|
for _, _, inf in walkdirs(ar.files):
|
||||||
|
nfiles += 1
|
||||||
|
nbytes += inf.st_size
|
||||||
|
|
||||||
|
eprint("found {0} files, {1}\n\n".format(nfiles, humansize(nbytes)))
|
||||||
|
self.nfiles = nfiles
|
||||||
|
self.nbytes = nbytes
|
||||||
|
|
||||||
|
if ar.td:
|
||||||
|
requests.packages.urllib3.disable_warnings()
|
||||||
|
req_ses.verify = False
|
||||||
|
if ar.te:
|
||||||
|
req_ses.verify = ar.te
|
||||||
|
|
||||||
|
self.filegen = walkdirs(ar.files)
|
||||||
|
if ar.safe:
|
||||||
|
self.safe()
|
||||||
|
else:
|
||||||
|
self.fancy()
|
||||||
|
|
||||||
|
def safe(self):
|
||||||
|
"""minimal basic slow boring fallback codepath"""
|
||||||
|
search = self.ar.s
|
||||||
|
for nf, (top, rel, inf) in enumerate(self.filegen):
|
||||||
|
file = File(top, rel, inf.st_size, inf.st_mtime)
|
||||||
|
upath = file.abs.decode("utf-8", "replace")
|
||||||
|
|
||||||
|
print("{0} {1}\n hash...".format(self.nfiles - nf, upath))
|
||||||
|
get_hashlist(file, None)
|
||||||
|
|
||||||
|
burl = self.ar.url[:8] + self.ar.url[8:].split("/")[0] + "/"
|
||||||
|
while True:
|
||||||
|
print(" hs...")
|
||||||
|
hs = handshake(req_ses, self.ar.url, file, self.ar.a, search)
|
||||||
|
if search:
|
||||||
|
if hs:
|
||||||
|
for hit in hs:
|
||||||
|
print(" found: {0}{1}".format(burl, hit["rp"]))
|
||||||
|
else:
|
||||||
|
print(" NOT found")
|
||||||
|
break
|
||||||
|
|
||||||
|
file.ucids = hs
|
||||||
|
if not hs:
|
||||||
|
break
|
||||||
|
|
||||||
|
print("{0} {1}".format(self.nfiles - nf, upath))
|
||||||
|
ncs = len(hs)
|
||||||
|
for nc, cid in enumerate(hs):
|
||||||
|
print(" {0} up {1}".format(ncs - nc, cid))
|
||||||
|
upload(req_ses, file, cid, self.ar.a)
|
||||||
|
|
||||||
|
print(" ok!")
|
||||||
|
|
||||||
|
def fancy(self):
|
||||||
|
self.hash_f = 0
|
||||||
|
self.hash_c = 0
|
||||||
|
self.hash_b = 0
|
||||||
|
self.up_f = 0
|
||||||
|
self.up_c = 0
|
||||||
|
self.up_b = 0
|
||||||
|
self.up_br = 0
|
||||||
|
self.hasher_busy = 1
|
||||||
|
self.handshaker_busy = 0
|
||||||
|
self.uploader_busy = 0
|
||||||
|
|
||||||
|
self.t0 = time.time()
|
||||||
|
self.t0_up = None
|
||||||
|
self.spd = None
|
||||||
|
|
||||||
|
self.mutex = threading.Lock()
|
||||||
|
self.q_handshake = Queue() # type: Queue[File]
|
||||||
|
self.q_recheck = Queue() # type: Queue[File] # partial upload exists [...]
|
||||||
|
self.q_upload = Queue() # type: Queue[tuple[File, str]]
|
||||||
|
|
||||||
|
self.st_hash = [None, "(idle, starting...)"] # type: tuple[File, int]
|
||||||
|
self.st_up = [None, "(idle, starting...)"] # type: tuple[File, int]
|
||||||
|
if VT100:
|
||||||
|
atexit.register(self.cleanup_vt100)
|
||||||
|
ss.scroll_region(3)
|
||||||
|
|
||||||
|
Daemon(target=self.hasher).start()
|
||||||
|
for _ in range(self.ar.j):
|
||||||
|
Daemon(target=self.handshaker).start()
|
||||||
|
Daemon(target=self.uploader).start()
|
||||||
|
|
||||||
|
idles = 0
|
||||||
|
while idles < 3:
|
||||||
|
time.sleep(0.07)
|
||||||
|
with self.mutex:
|
||||||
|
if (
|
||||||
|
self.q_handshake.empty()
|
||||||
|
and self.q_upload.empty()
|
||||||
|
and not self.hasher_busy
|
||||||
|
and not self.handshaker_busy
|
||||||
|
and not self.uploader_busy
|
||||||
|
):
|
||||||
|
idles += 1
|
||||||
|
else:
|
||||||
|
idles = 0
|
||||||
|
|
||||||
|
if VT100:
|
||||||
|
maxlen = ss.w - len(str(self.nfiles)) - 14
|
||||||
|
txt = "\033[s\033[{0}H".format(ss.g)
|
||||||
|
for y, k, st, f in [
|
||||||
|
[0, "hash", self.st_hash, self.hash_f],
|
||||||
|
[1, "send", self.st_up, self.up_f],
|
||||||
|
]:
|
||||||
|
txt += "\033[{0}H{1}:".format(ss.g + y, k)
|
||||||
|
file, arg = st
|
||||||
|
if not file:
|
||||||
|
txt += " {0}\033[K".format(arg)
|
||||||
|
else:
|
||||||
|
if y:
|
||||||
|
p = 100 * file.up_b / file.size
|
||||||
|
else:
|
||||||
|
p = 100 * arg / file.size
|
||||||
|
|
||||||
|
name = file.abs.decode("utf-8", "replace")[-maxlen:]
|
||||||
|
if "/" in name:
|
||||||
|
name = "\033[36m{0}\033[0m/{1}".format(*name.rsplit("/", 1))
|
||||||
|
|
||||||
|
m = "{0:6.1f}% {1} {2}\033[K"
|
||||||
|
txt += m.format(p, self.nfiles - f, name)
|
||||||
|
|
||||||
|
txt += "\033[{0}H ".format(ss.g + 2)
|
||||||
|
else:
|
||||||
|
txt = " "
|
||||||
|
|
||||||
|
if not self.up_br:
|
||||||
|
spd = self.hash_b / (time.time() - self.t0)
|
||||||
|
eta = (self.nbytes - self.hash_b) / (spd + 1)
|
||||||
|
else:
|
||||||
|
spd = self.up_br / (time.time() - self.t0_up)
|
||||||
|
spd = self.spd = (self.spd or spd) * 0.9 + spd * 0.1
|
||||||
|
eta = (self.nbytes - self.up_b) / (spd + 1)
|
||||||
|
|
||||||
|
spd = humansize(spd)
|
||||||
|
eta = str(datetime.timedelta(seconds=int(eta)))
|
||||||
|
left = humansize(self.nbytes - self.up_b)
|
||||||
|
tail = "\033[K\033[u" if VT100 else "\r"
|
||||||
|
|
||||||
|
m = "eta: {0} @ {1}/s, {2} left".format(eta, spd, left)
|
||||||
|
eprint(txt + "\033]0;{0}\033\\\r{1}{2}".format(m, m, tail))
|
||||||
|
|
||||||
|
def cleanup_vt100(self):
|
||||||
|
ss.scroll_region(None)
|
||||||
|
eprint("\033[J\033]0;\033\\")
|
||||||
|
|
||||||
|
def cb_hasher(self, file, ofs):
|
||||||
|
self.st_hash = [file, ofs]
|
||||||
|
|
||||||
|
def hasher(self):
|
||||||
|
prd = None
|
||||||
|
ls = {}
|
||||||
|
for top, rel, inf in self.filegen:
|
||||||
|
if self.ar.z:
|
||||||
|
rd = os.path.dirname(rel)
|
||||||
|
if prd != rd:
|
||||||
|
prd = rd
|
||||||
|
headers = {}
|
||||||
|
if self.ar.a:
|
||||||
|
headers["Cookie"] = "=".join(["cppwd", self.ar.a])
|
||||||
|
|
||||||
|
ls = {}
|
||||||
|
try:
|
||||||
|
print(" ls ~{0}".format(rd.decode("utf-8", "replace")))
|
||||||
|
r = req_ses.get(
|
||||||
|
self.ar.url.encode("utf-8") + quotep(rd) + b"?ls",
|
||||||
|
headers=headers,
|
||||||
|
)
|
||||||
|
for f in r.json()["files"]:
|
||||||
|
rfn = f["href"].split("?")[0].encode("utf-8", "replace")
|
||||||
|
ls[unquote(rfn)] = f
|
||||||
|
except:
|
||||||
|
print(" mkdir ~{0}".format(rd.decode("utf-8", "replace")))
|
||||||
|
|
||||||
|
rf = ls.get(os.path.basename(rel), None)
|
||||||
|
if rf and rf["sz"] == inf.st_size and abs(rf["ts"] - inf.st_mtime) <= 1:
|
||||||
|
self.nfiles -= 1
|
||||||
|
self.nbytes -= inf.st_size
|
||||||
|
continue
|
||||||
|
|
||||||
|
file = File(top, rel, inf.st_size, inf.st_mtime)
|
||||||
|
while True:
|
||||||
|
with self.mutex:
|
||||||
|
if (
|
||||||
|
self.hash_b - self.up_b < 1024 * 1024 * 128
|
||||||
|
and self.hash_c - self.up_c < 64
|
||||||
|
and (
|
||||||
|
not self.ar.nh
|
||||||
|
or (
|
||||||
|
self.q_upload.empty()
|
||||||
|
and self.q_handshake.empty()
|
||||||
|
and not self.uploader_busy
|
||||||
|
)
|
||||||
|
)
|
||||||
|
):
|
||||||
|
break
|
||||||
|
|
||||||
|
time.sleep(0.05)
|
||||||
|
|
||||||
|
get_hashlist(file, self.cb_hasher)
|
||||||
|
with self.mutex:
|
||||||
|
self.hash_f += 1
|
||||||
|
self.hash_c += len(file.cids)
|
||||||
|
self.hash_b += file.size
|
||||||
|
|
||||||
|
self.q_handshake.put(file)
|
||||||
|
|
||||||
|
self.hasher_busy = 0
|
||||||
|
self.st_hash = [None, "(finished)"]
|
||||||
|
|
||||||
|
def handshaker(self):
|
||||||
|
search = self.ar.s
|
||||||
|
q = self.q_handshake
|
||||||
|
burl = self.ar.url[:8] + self.ar.url[8:].split("/")[0] + "/"
|
||||||
|
while True:
|
||||||
|
file = q.get()
|
||||||
|
if not file:
|
||||||
|
if q == self.q_handshake:
|
||||||
|
q = self.q_recheck
|
||||||
|
q.put(None)
|
||||||
|
continue
|
||||||
|
|
||||||
|
self.q_upload.put(None)
|
||||||
|
break
|
||||||
|
|
||||||
|
with self.mutex:
|
||||||
|
self.handshaker_busy += 1
|
||||||
|
|
||||||
|
upath = file.abs.decode("utf-8", "replace")
|
||||||
|
|
||||||
|
try:
|
||||||
|
hs = handshake(req_ses, self.ar.url, file, self.ar.a, search)
|
||||||
|
except Exception as ex:
|
||||||
|
if q == self.q_handshake and "<pre>partial upload exists" in str(ex):
|
||||||
|
self.q_recheck.put(file)
|
||||||
|
hs = []
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
|
||||||
|
if search:
|
||||||
|
if hs:
|
||||||
|
for hit in hs:
|
||||||
|
m = "found: {0}\n {1}{2}\n"
|
||||||
|
print(m.format(upath, burl, hit["rp"]), end="")
|
||||||
|
else:
|
||||||
|
print("NOT found: {0}\n".format(upath), end="")
|
||||||
|
|
||||||
|
with self.mutex:
|
||||||
|
self.up_f += 1
|
||||||
|
self.up_c += len(file.cids)
|
||||||
|
self.up_b += file.size
|
||||||
|
self.handshaker_busy -= 1
|
||||||
|
|
||||||
|
continue
|
||||||
|
|
||||||
|
with self.mutex:
|
||||||
|
if not hs:
|
||||||
|
# all chunks done
|
||||||
|
self.up_f += 1
|
||||||
|
self.up_c += len(file.cids) - file.up_c
|
||||||
|
self.up_b += file.size - file.up_b
|
||||||
|
|
||||||
|
if hs and file.up_c:
|
||||||
|
# some chunks failed
|
||||||
|
self.up_c -= len(hs)
|
||||||
|
file.up_c -= len(hs)
|
||||||
|
for cid in hs:
|
||||||
|
sz = file.kchunks[cid][1]
|
||||||
|
self.up_b -= sz
|
||||||
|
file.up_b -= sz
|
||||||
|
|
||||||
|
file.ucids = hs
|
||||||
|
self.handshaker_busy -= 1
|
||||||
|
|
||||||
|
if not hs:
|
||||||
|
kw = "uploaded" if file.up_b else " found"
|
||||||
|
print("{0} {1}".format(kw, upath))
|
||||||
|
for cid in hs:
|
||||||
|
self.q_upload.put([file, cid])
|
||||||
|
|
||||||
|
def uploader(self):
|
||||||
|
while True:
|
||||||
|
task = self.q_upload.get()
|
||||||
|
if not task:
|
||||||
|
self.st_up = [None, "(finished)"]
|
||||||
|
break
|
||||||
|
|
||||||
|
with self.mutex:
|
||||||
|
self.uploader_busy += 1
|
||||||
|
self.t0_up = self.t0_up or time.time()
|
||||||
|
|
||||||
|
file, cid = task
|
||||||
|
try:
|
||||||
|
upload(req_ses, file, cid, self.ar.a)
|
||||||
|
except:
|
||||||
|
eprint("upload failed, retry...\n")
|
||||||
|
pass # handshake will fix it
|
||||||
|
|
||||||
|
with self.mutex:
|
||||||
|
sz = file.kchunks[cid][1]
|
||||||
|
file.ucids = [x for x in file.ucids if x != cid]
|
||||||
|
if not file.ucids:
|
||||||
|
self.q_handshake.put(file)
|
||||||
|
|
||||||
|
self.st_up = [file, cid]
|
||||||
|
file.up_b += sz
|
||||||
|
self.up_b += sz
|
||||||
|
self.up_br += sz
|
||||||
|
file.up_c += 1
|
||||||
|
self.up_c += 1
|
||||||
|
self.uploader_busy -= 1
|
||||||
|
|
||||||
|
|
||||||
|
class APF(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
time.strptime("19970815", "%Y%m%d") # python#7980
|
||||||
|
if not VT100:
|
||||||
|
os.system("rem") # enables colors
|
||||||
|
|
||||||
|
# fmt: off
|
||||||
|
ap = app = argparse.ArgumentParser(formatter_class=APF, epilog="""
|
||||||
|
NOTE:
|
||||||
|
source file/folder selection uses rsync syntax, meaning that:
|
||||||
|
"foo" uploads the entire folder to URL/foo/
|
||||||
|
"foo/" uploads the CONTENTS of the folder into URL/
|
||||||
|
""")
|
||||||
|
|
||||||
|
ap.add_argument("url", type=unicode, help="server url, including destination folder")
|
||||||
|
ap.add_argument("files", type=unicode, nargs="+", help="files and/or folders to process")
|
||||||
|
ap.add_argument("-a", metavar="PASSWORD", help="password")
|
||||||
|
ap.add_argument("-s", action="store_true", help="file-search (disables upload)")
|
||||||
|
ap = app.add_argument_group("performance tweaks")
|
||||||
|
ap.add_argument("-j", type=int, metavar="THREADS", default=4, help="parallel connections")
|
||||||
|
ap.add_argument("-nh", action="store_true", help="disable hashing while uploading")
|
||||||
|
ap.add_argument("--safe", action="store_true", help="use simple fallback approach")
|
||||||
|
ap.add_argument("-z", action="store_true", help="ZOOMIN' (skip uploading files if they exist at the destination with the ~same last-modified timestamp, so same as yolo / turbo with date-chk but even faster)")
|
||||||
|
ap = app.add_argument_group("tls")
|
||||||
|
ap.add_argument("-te", metavar="PEM_FILE", help="certificate to expect/verify")
|
||||||
|
ap.add_argument("-td", action="store_true", help="disable certificate check")
|
||||||
|
# fmt: on
|
||||||
|
|
||||||
|
Ctl(app.parse_args())
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
24
bin/up2k.sh
Executable file → Normal file
24
bin/up2k.sh
Executable file → Normal file
@@ -8,7 +8,7 @@ set -e
|
|||||||
##
|
##
|
||||||
## config
|
## config
|
||||||
|
|
||||||
datalen=$((2*1024*1024*1024))
|
datalen=$((128*1024*1024))
|
||||||
target=127.0.0.1
|
target=127.0.0.1
|
||||||
posturl=/inc
|
posturl=/inc
|
||||||
passwd=wark
|
passwd=wark
|
||||||
@@ -37,10 +37,10 @@ gendata() {
|
|||||||
# pipe a chunk, get the base64 checksum
|
# pipe a chunk, get the base64 checksum
|
||||||
gethash() {
|
gethash() {
|
||||||
printf $(
|
printf $(
|
||||||
sha512sum | cut -c-64 |
|
sha512sum | cut -c-66 |
|
||||||
sed -r 's/ .*//;s/(..)/\\x\1/g'
|
sed -r 's/ .*//;s/(..)/\\x\1/g'
|
||||||
) |
|
) |
|
||||||
base64 -w0 | cut -c-43 |
|
base64 -w0 | cut -c-44 |
|
||||||
tr '+/' '-_'
|
tr '+/' '-_'
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -123,7 +123,7 @@ printf '\033[36m'
|
|||||||
{
|
{
|
||||||
{
|
{
|
||||||
cat <<EOF
|
cat <<EOF
|
||||||
POST $posturl/handshake.php HTTP/1.1
|
POST $posturl/ HTTP/1.1
|
||||||
Connection: Close
|
Connection: Close
|
||||||
Cookie: cppwd=$passwd
|
Cookie: cppwd=$passwd
|
||||||
Content-Type: text/plain;charset=UTF-8
|
Content-Type: text/plain;charset=UTF-8
|
||||||
@@ -145,14 +145,16 @@ printf '\033[0m\nwark: %s\n' $wark
|
|||||||
##
|
##
|
||||||
## wait for signal to continue
|
## wait for signal to continue
|
||||||
|
|
||||||
w8=/dev/shm/$salt.w8
|
true || {
|
||||||
touch $w8
|
w8=/dev/shm/$salt.w8
|
||||||
|
touch $w8
|
||||||
|
|
||||||
echo "ready; rm -f $w8"
|
echo "ready; rm -f $w8"
|
||||||
|
|
||||||
while [ -e $w8 ]; do
|
while [ -e $w8 ]; do
|
||||||
sleep 0.2
|
sleep 0.2
|
||||||
done
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
##
|
##
|
||||||
@@ -175,7 +177,7 @@ while [ $remains -gt 0 ]; do
|
|||||||
|
|
||||||
{
|
{
|
||||||
cat <<EOF
|
cat <<EOF
|
||||||
POST $posturl/chunkpit.php HTTP/1.1
|
POST $posturl/ HTTP/1.1
|
||||||
Connection: Keep-Alive
|
Connection: Keep-Alive
|
||||||
Cookie: cppwd=$passwd
|
Cookie: cppwd=$passwd
|
||||||
Content-Type: application/octet-stream
|
Content-Type: application/octet-stream
|
||||||
|
|||||||
@@ -30,6 +30,7 @@ however if your copyparty is behind a reverse-proxy, you may want to use [`share
|
|||||||
# OS integration
|
# OS integration
|
||||||
init-scripts to start copyparty as a service
|
init-scripts to start copyparty as a service
|
||||||
* [`systemd/copyparty.service`](systemd/copyparty.service) runs the sfx normally
|
* [`systemd/copyparty.service`](systemd/copyparty.service) runs the sfx normally
|
||||||
|
* [`rc/copyparty`](rc/copyparty) runs sfx normally on freebsd, create a `copyparty` user
|
||||||
* [`systemd/prisonparty.service`](systemd/prisonparty.service) runs the sfx in a chroot
|
* [`systemd/prisonparty.service`](systemd/prisonparty.service) runs the sfx in a chroot
|
||||||
* [`openrc/copyparty`](openrc/copyparty)
|
* [`openrc/copyparty`](openrc/copyparty)
|
||||||
|
|
||||||
|
|||||||
@@ -1,13 +1,14 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
# ca-name and server-name
|
# ca-name and server-fqdn
|
||||||
ca_name="$1"
|
ca_name="$1"
|
||||||
srv_name="$2"
|
srv_fqdn="$2"
|
||||||
|
|
||||||
[ -z "$srv_name" ] && {
|
[ -z "$srv_fqdn" ] && {
|
||||||
echo "need arg 1: ca name"
|
echo "need arg 1: ca name"
|
||||||
echo "need arg 2: server name"
|
echo "need arg 2: server fqdn"
|
||||||
|
echo "optional arg 3: if set, write cert into copyparty cfg"
|
||||||
exit 1
|
exit 1
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -31,15 +32,15 @@ EOF
|
|||||||
gen_srv() {
|
gen_srv() {
|
||||||
(tee /dev/stderr <<EOF
|
(tee /dev/stderr <<EOF
|
||||||
{"key": {"algo":"rsa", "size":4096},
|
{"key": {"algo":"rsa", "size":4096},
|
||||||
"names": [{"O":"$ca_name - $srv_name"}]}
|
"names": [{"O":"$ca_name - $srv_fqdn"}]}
|
||||||
EOF
|
EOF
|
||||||
)|
|
)|
|
||||||
cfssl gencert -ca ca.pem -ca-key ca.key \
|
cfssl gencert -ca ca.pem -ca-key ca.key \
|
||||||
-profile=www -hostname="$srv_name.$ca_name" - |
|
-profile=www -hostname="$srv_fqdn" - |
|
||||||
cfssljson -bare "$srv_name"
|
cfssljson -bare "$srv_fqdn"
|
||||||
|
|
||||||
mv "$srv_name-key.pem" "$srv_name.key"
|
mv "$srv_fqdn-key.pem" "$srv_fqdn.key"
|
||||||
rm "$srv_name.csr"
|
rm "$srv_fqdn.csr"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -57,13 +58,13 @@ show() {
|
|||||||
awk '!o; {o=0} /[0-9a-f:]{16}/{o=1}'
|
awk '!o; {o=0} /[0-9a-f:]{16}/{o=1}'
|
||||||
}
|
}
|
||||||
show ca.pem
|
show ca.pem
|
||||||
show "$srv_name.pem"
|
show "$srv_fqdn.pem"
|
||||||
|
|
||||||
|
|
||||||
# write cert into copyparty config
|
# write cert into copyparty config
|
||||||
[ -z "$3" ] || {
|
[ -z "$3" ] || {
|
||||||
mkdir -p ~/.config/copyparty
|
mkdir -p ~/.config/copyparty
|
||||||
cat "$srv_name".{key,pem} ca.pem >~/.config/copyparty/cert.pem
|
cat "$srv_fqdn".{key,pem} ca.pem >~/.config/copyparty/cert.pem
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
31
contrib/rc/copyparty
Normal file
31
contrib/rc/copyparty
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
#
|
||||||
|
# PROVIDE: copyparty
|
||||||
|
# REQUIRE: networking
|
||||||
|
# KEYWORD:
|
||||||
|
|
||||||
|
. /etc/rc.subr
|
||||||
|
|
||||||
|
name="copyparty"
|
||||||
|
rcvar="copyparty_enable"
|
||||||
|
copyparty_user="copyparty"
|
||||||
|
copyparty_args="-e2dsa -v /storage:/storage:r" # change as you see fit
|
||||||
|
copyparty_command="/usr/local/bin/python3.8 /usr/local/copyparty/copyparty-sfx.py ${copyparty_args}"
|
||||||
|
pidfile="/var/run/copyparty/${name}.pid"
|
||||||
|
command="/usr/sbin/daemon"
|
||||||
|
command_args="-P ${pidfile} -r -f ${copyparty_command}"
|
||||||
|
|
||||||
|
stop_postcmd="copyparty_shutdown"
|
||||||
|
|
||||||
|
copyparty_shutdown()
|
||||||
|
{
|
||||||
|
if [ -e "${pidfile}" ]; then
|
||||||
|
echo "Stopping supervising daemon."
|
||||||
|
kill -s TERM `cat ${pidfile}`
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
load_rc_config $name
|
||||||
|
: ${copyparty_enable:=no}
|
||||||
|
|
||||||
|
run_rc_command "$1"
|
||||||
@@ -3,10 +3,15 @@
|
|||||||
#
|
#
|
||||||
# installation:
|
# installation:
|
||||||
# cp -pv copyparty.service /etc/systemd/system && systemctl enable --now copyparty
|
# cp -pv copyparty.service /etc/systemd/system && systemctl enable --now copyparty
|
||||||
|
# restorecon -vr /etc/systemd/system/copyparty.service
|
||||||
|
# firewall-cmd --permanent --add-port={80,443,3923}/tcp
|
||||||
|
# firewall-cmd --reload
|
||||||
#
|
#
|
||||||
# you may want to:
|
# you may want to:
|
||||||
# change '/usr/bin/python' to another interpreter
|
# change '/usr/bin/python3' to another interpreter
|
||||||
# change '/mnt::rw' to another location or permission-set
|
# change '/mnt::rw' to another location or permission-set
|
||||||
|
# remove '-p 80,443,3923' to only listen on port 3923
|
||||||
|
# add '-i 127.0.0.1' to only allow local connections
|
||||||
#
|
#
|
||||||
# with `Type=notify`, copyparty will signal systemd when it is ready to
|
# with `Type=notify`, copyparty will signal systemd when it is ready to
|
||||||
# accept connections; correctly delaying units depending on copyparty.
|
# accept connections; correctly delaying units depending on copyparty.
|
||||||
@@ -14,11 +19,8 @@
|
|||||||
# python disabling line-buffering, so messages are out-of-order:
|
# python disabling line-buffering, so messages are out-of-order:
|
||||||
# https://user-images.githubusercontent.com/241032/126040249-cb535cc7-c599-4931-a796-a5d9af691bad.png
|
# https://user-images.githubusercontent.com/241032/126040249-cb535cc7-c599-4931-a796-a5d9af691bad.png
|
||||||
#
|
#
|
||||||
# enable line-buffering for realtime logging (slight performance cost):
|
# if you remove -q to enable logging, you may also want to remove the
|
||||||
# modify ExecStart and prefix it with `/usr/bin/stdbuf -oL` like so:
|
# following line to enable buffering (slightly better performance):
|
||||||
# ExecStart=/usr/bin/stdbuf -oL /usr/bin/python3 [...]
|
|
||||||
# but some systemd versions require this instead (higher performance cost):
|
|
||||||
# inside the [Service] block, add the following line:
|
|
||||||
# Environment=PYTHONUNBUFFERED=x
|
# Environment=PYTHONUNBUFFERED=x
|
||||||
|
|
||||||
[Unit]
|
[Unit]
|
||||||
@@ -27,8 +29,10 @@ Description=copyparty file server
|
|||||||
[Service]
|
[Service]
|
||||||
Type=notify
|
Type=notify
|
||||||
SyslogIdentifier=copyparty
|
SyslogIdentifier=copyparty
|
||||||
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -v /mnt::rw
|
Environment=PYTHONUNBUFFERED=x
|
||||||
|
ExecReload=/bin/kill -s USR1 $MAINPID
|
||||||
ExecStartPre=/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
ExecStartPre=/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
||||||
|
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -p 80,443,3923 -v /mnt::rw
|
||||||
|
|
||||||
[Install]
|
[Install]
|
||||||
WantedBy=multi-user.target
|
WantedBy=multi-user.target
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python3
|
||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
@@ -20,7 +20,7 @@ import threading
|
|||||||
import traceback
|
import traceback
|
||||||
from textwrap import dedent
|
from textwrap import dedent
|
||||||
|
|
||||||
from .__init__ import E, WINDOWS, VT100, PY2, unicode
|
from .__init__ import E, WINDOWS, ANYWIN, VT100, PY2, unicode
|
||||||
from .__version__ import S_VERSION, S_BUILD_DT, CODENAME
|
from .__version__ import S_VERSION, S_BUILD_DT, CODENAME
|
||||||
from .svchub import SvcHub
|
from .svchub import SvcHub
|
||||||
from .util import py_desc, align_tab, IMPLICATIONS, ansi_re
|
from .util import py_desc, align_tab, IMPLICATIONS, ansi_re
|
||||||
@@ -104,7 +104,7 @@ def ensure_cert():
|
|||||||
cert_insec = os.path.join(E.mod, "res/insecure.pem")
|
cert_insec = os.path.join(E.mod, "res/insecure.pem")
|
||||||
cert_cfg = os.path.join(E.cfg, "cert.pem")
|
cert_cfg = os.path.join(E.cfg, "cert.pem")
|
||||||
if not os.path.exists(cert_cfg):
|
if not os.path.exists(cert_cfg):
|
||||||
shutil.copy2(cert_insec, cert_cfg)
|
shutil.copy(cert_insec, cert_cfg)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if filecmp.cmp(cert_cfg, cert_insec):
|
if filecmp.cmp(cert_cfg, cert_insec):
|
||||||
@@ -186,6 +186,32 @@ def configure_ssl_ciphers(al):
|
|||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
|
|
||||||
|
def args_from_cfg(cfg_path):
|
||||||
|
ret = []
|
||||||
|
skip = False
|
||||||
|
with open(cfg_path, "rb") as f:
|
||||||
|
for ln in [x.decode("utf-8").strip() for x in f]:
|
||||||
|
if not ln:
|
||||||
|
skip = False
|
||||||
|
continue
|
||||||
|
|
||||||
|
if ln.startswith("#"):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not ln.startswith("-"):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if skip:
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
ret.extend(ln.split(" ", 1))
|
||||||
|
except:
|
||||||
|
ret.append(ln)
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
def sighandler(sig=None, frame=None):
|
def sighandler(sig=None, frame=None):
|
||||||
msg = [""] * 5
|
msg = [""] * 5
|
||||||
for th in threading.enumerate():
|
for th in threading.enumerate():
|
||||||
@@ -203,6 +229,13 @@ def run_argparse(argv, formatter):
|
|||||||
description="http file sharing hub v{} ({})".format(S_VERSION, S_BUILD_DT),
|
description="http file sharing hub v{} ({})".format(S_VERSION, S_BUILD_DT),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
fk_salt = unicode(os.path.getmtime(os.path.join(E.cfg, "cert.pem")))
|
||||||
|
except:
|
||||||
|
fk_salt = "hunter2"
|
||||||
|
|
||||||
|
cores = os.cpu_count() if hasattr(os, "cpu_count") else 4
|
||||||
|
|
||||||
sects = [
|
sects = [
|
||||||
[
|
[
|
||||||
"accounts",
|
"accounts",
|
||||||
@@ -211,14 +244,15 @@ def run_argparse(argv, formatter):
|
|||||||
"""
|
"""
|
||||||
-a takes username:password,
|
-a takes username:password,
|
||||||
-v takes src:dst:perm1:perm2:permN:volflag1:volflag2:volflagN:...
|
-v takes src:dst:perm1:perm2:permN:volflag1:volflag2:volflagN:...
|
||||||
where "perm" is "accesslevels,username1,username2,..."
|
where "perm" is "permissions,username1,username2,..."
|
||||||
and "volflag" is config flags to set on this volume
|
and "volflag" is config flags to set on this volume
|
||||||
|
|
||||||
list of accesslevels:
|
list of permissions:
|
||||||
"r" (read): list folder contents, download files
|
"r" (read): list folder contents, download files
|
||||||
"w" (write): upload files; need "r" to see the uploads
|
"w" (write): upload files; need "r" to see the uploads
|
||||||
"m" (move): move files and folders; need "w" at destination
|
"m" (move): move files and folders; need "w" at destination
|
||||||
"d" (delete): permanently delete files and folders
|
"d" (delete): permanently delete files and folders
|
||||||
|
"g" (get): download files, but cannot see folder contents
|
||||||
|
|
||||||
too many volflags to list here, see the other sections
|
too many volflags to list here, see the other sections
|
||||||
|
|
||||||
@@ -270,7 +304,8 @@ def run_argparse(argv, formatter):
|
|||||||
\033[36me2d\033[35m sets -e2d (all -e2* args can be set using ce2* volflags)
|
\033[36me2d\033[35m sets -e2d (all -e2* args can be set using ce2* volflags)
|
||||||
\033[36md2t\033[35m disables metadata collection, overrides -e2t*
|
\033[36md2t\033[35m disables metadata collection, overrides -e2t*
|
||||||
\033[36md2d\033[35m disables all database stuff, overrides -e2*
|
\033[36md2d\033[35m disables all database stuff, overrides -e2*
|
||||||
\033[36mdhash\033[35m disables file hashing on initial scans, also ehash
|
\033[36mnohash=\\.iso$\033[35m skips hashing file contents if path matches *.iso
|
||||||
|
\033[36mnoidx=\\.iso$\033[35m fully ignores the contents at paths matching *.iso
|
||||||
\033[36mhist=/tmp/cdb\033[35m puts thumbnails and indexes at that location
|
\033[36mhist=/tmp/cdb\033[35m puts thumbnails and indexes at that location
|
||||||
\033[36mscan=60\033[35m scan for new files every 60sec, same as --re-maxage
|
\033[36mscan=60\033[35m scan for new files every 60sec, same as --re-maxage
|
||||||
|
|
||||||
@@ -279,6 +314,10 @@ def run_argparse(argv, formatter):
|
|||||||
\033[36mmtp=.bpm=f,audio-bpm.py\033[35m uses the "audio-bpm.py" program to
|
\033[36mmtp=.bpm=f,audio-bpm.py\033[35m uses the "audio-bpm.py" program to
|
||||||
generate ".bpm" tags from uploads (f = overwrite tags)
|
generate ".bpm" tags from uploads (f = overwrite tags)
|
||||||
\033[36mmtp=ahash,vhash=media-hash.py\033[35m collects two tags at once
|
\033[36mmtp=ahash,vhash=media-hash.py\033[35m collects two tags at once
|
||||||
|
|
||||||
|
\033[0mothers:
|
||||||
|
\033[36mfk=8\033[35m generates per-file accesskeys,
|
||||||
|
which will then be required at the "g" permission
|
||||||
\033[0m"""
|
\033[0m"""
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
@@ -322,7 +361,7 @@ def run_argparse(argv, formatter):
|
|||||||
ap2 = ap.add_argument_group('general options')
|
ap2 = ap.add_argument_group('general options')
|
||||||
ap2.add_argument("-c", metavar="PATH", type=u, action="append", help="add config file")
|
ap2.add_argument("-c", metavar="PATH", type=u, action="append", help="add config file")
|
||||||
ap2.add_argument("-nc", metavar="NUM", type=int, default=64, help="max num clients")
|
ap2.add_argument("-nc", metavar="NUM", type=int, default=64, help="max num clients")
|
||||||
ap2.add_argument("-j", metavar="CORES", type=int, default=1, help="max num cpu cores")
|
ap2.add_argument("-j", metavar="CORES", type=int, default=1, help="max num cpu cores, 0=all")
|
||||||
ap2.add_argument("-a", metavar="ACCT", type=u, action="append", help="add account, USER:PASS; example [ed:wark")
|
ap2.add_argument("-a", metavar="ACCT", type=u, action="append", help="add account, USER:PASS; example [ed:wark")
|
||||||
ap2.add_argument("-v", metavar="VOL", type=u, action="append", help="add volume, SRC:DST:FLAG; example [.::r], [/mnt/nas/music:/music:r:aed")
|
ap2.add_argument("-v", metavar="VOL", type=u, action="append", help="add volume, SRC:DST:FLAG; example [.::r], [/mnt/nas/music:/music:r:aed")
|
||||||
ap2.add_argument("-ed", action="store_true", help="enable ?dots")
|
ap2.add_argument("-ed", action="store_true", help="enable ?dots")
|
||||||
@@ -334,12 +373,17 @@ def run_argparse(argv, formatter):
|
|||||||
ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads")
|
ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads")
|
||||||
ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="up2k min.size threshold (mswin-only)")
|
ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="up2k min.size threshold (mswin-only)")
|
||||||
ap2.add_argument("--unpost", metavar="SEC", type=int, default=3600*12, help="grace period where uploads can be deleted by the uploader, even without delete permissions; 0=disabled")
|
ap2.add_argument("--unpost", metavar="SEC", type=int, default=3600*12, help="grace period where uploads can be deleted by the uploader, even without delete permissions; 0=disabled")
|
||||||
|
ap2.add_argument("--no-fpool", action="store_true", help="disable file-handle pooling -- instead, repeatedly close and reopen files during upload")
|
||||||
|
ap2.add_argument("--use-fpool", action="store_true", help="force file-handle pooling, even if copyparty thinks you're better off without")
|
||||||
|
ap2.add_argument("--no-symlink", action="store_true", help="duplicate file contents instead")
|
||||||
|
|
||||||
ap2 = ap.add_argument_group('network options')
|
ap2 = ap.add_argument_group('network options')
|
||||||
ap2.add_argument("-i", metavar="IP", type=u, default="0.0.0.0", help="ip to bind (comma-sep.)")
|
ap2.add_argument("-i", metavar="IP", type=u, default="0.0.0.0", help="ip to bind (comma-sep.)")
|
||||||
ap2.add_argument("-p", metavar="PORT", type=u, default="3923", help="ports to bind (comma/range)")
|
ap2.add_argument("-p", metavar="PORT", type=u, default="3923", help="ports to bind (comma/range)")
|
||||||
ap2.add_argument("--rproxy", metavar="DEPTH", type=int, default=1, help="which ip to keep; 0 = tcp, 1 = origin (first x-fwd), 2 = cloudflare, 3 = nginx, -1 = closest proxy")
|
ap2.add_argument("--rproxy", metavar="DEPTH", type=int, default=1, help="which ip to keep; 0 = tcp, 1 = origin (first x-fwd), 2 = cloudflare, 3 = nginx, -1 = closest proxy")
|
||||||
|
ap2.add_argument("--s-wr-sz", metavar="B", type=int, default=256*1024, help="socket write size in bytes")
|
||||||
|
ap2.add_argument("--s-wr-slp", metavar="SEC", type=float, default=0, help="socket write delay in seconds")
|
||||||
|
|
||||||
ap2 = ap.add_argument_group('SSL/TLS options')
|
ap2 = ap.add_argument_group('SSL/TLS options')
|
||||||
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls")
|
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls")
|
||||||
ap2.add_argument("--https-only", action="store_true", help="disable plaintext")
|
ap2.add_argument("--https-only", action="store_true", help="disable plaintext")
|
||||||
@@ -360,10 +404,16 @@ def run_argparse(argv, formatter):
|
|||||||
ap2 = ap.add_argument_group('safety options')
|
ap2 = ap.add_argument_group('safety options')
|
||||||
ap2.add_argument("--ls", metavar="U[,V[,F]]", type=u, help="scan all volumes; arguments USER,VOL,FLAGS; example [**,*,ln,p,r]")
|
ap2.add_argument("--ls", metavar="U[,V[,F]]", type=u, help="scan all volumes; arguments USER,VOL,FLAGS; example [**,*,ln,p,r]")
|
||||||
ap2.add_argument("--salt", type=u, default="hunter2", help="up2k file-hash salt")
|
ap2.add_argument("--salt", type=u, default="hunter2", help="up2k file-hash salt")
|
||||||
|
ap2.add_argument("--fk-salt", metavar="SALT", type=u, default=fk_salt, help="per-file accesskey salt")
|
||||||
ap2.add_argument("--no-dot-mv", action="store_true", help="disallow moving dotfiles; makes it impossible to move folders containing dotfiles")
|
ap2.add_argument("--no-dot-mv", action="store_true", help="disallow moving dotfiles; makes it impossible to move folders containing dotfiles")
|
||||||
ap2.add_argument("--no-dot-ren", action="store_true", help="disallow renaming dotfiles; makes it impossible to make something a dotfile")
|
ap2.add_argument("--no-dot-ren", action="store_true", help="disallow renaming dotfiles; makes it impossible to make something a dotfile")
|
||||||
ap2.add_argument("--no-logues", action="store_true", help="disable rendering .prologue/.epilogue.html into directory listings")
|
ap2.add_argument("--no-logues", action="store_true", help="disable rendering .prologue/.epilogue.html into directory listings")
|
||||||
ap2.add_argument("--no-readme", action="store_true", help="disable rendering readme.md into directory listings")
|
ap2.add_argument("--no-readme", action="store_true", help="disable rendering readme.md into directory listings")
|
||||||
|
ap2.add_argument("--vague-403", action="store_true", help="send 404 instead of 403 (security through ambiguity, very enterprise)")
|
||||||
|
|
||||||
|
ap2 = ap.add_argument_group('yolo options')
|
||||||
|
ap2.add_argument("--ign-ebind", action="store_true", help="continue running even if it's impossible to listen on some of the requested endpoints")
|
||||||
|
ap2.add_argument("--ign-ebind-all", action="store_true", help="continue running even if it's impossible to receive connections at all")
|
||||||
|
|
||||||
ap2 = ap.add_argument_group('logging options')
|
ap2 = ap.add_argument_group('logging options')
|
||||||
ap2.add_argument("-q", action="store_true", help="quiet")
|
ap2.add_argument("-q", action="store_true", help="quiet")
|
||||||
@@ -375,30 +425,37 @@ def run_argparse(argv, formatter):
|
|||||||
ap2.add_argument("--lf-url", metavar="RE", type=u, default=r"^/\.cpr/|\?th=[wj]$", help="dont log URLs matching")
|
ap2.add_argument("--lf-url", metavar="RE", type=u, default=r"^/\.cpr/|\?th=[wj]$", help="dont log URLs matching")
|
||||||
|
|
||||||
ap2 = ap.add_argument_group('admin panel options')
|
ap2 = ap.add_argument_group('admin panel options')
|
||||||
|
ap2.add_argument("--no-reload", action="store_true", help="disable ?reload=cfg (reload users/volumes/volflags from config file)")
|
||||||
ap2.add_argument("--no-rescan", action="store_true", help="disable ?scan (volume reindexing)")
|
ap2.add_argument("--no-rescan", action="store_true", help="disable ?scan (volume reindexing)")
|
||||||
ap2.add_argument("--no-stack", action="store_true", help="disable ?stack (list all stacks)")
|
ap2.add_argument("--no-stack", action="store_true", help="disable ?stack (list all stacks)")
|
||||||
|
|
||||||
ap2 = ap.add_argument_group('thumbnail options')
|
ap2 = ap.add_argument_group('thumbnail options')
|
||||||
ap2.add_argument("--no-thumb", action="store_true", help="disable all thumbnails")
|
ap2.add_argument("--no-thumb", action="store_true", help="disable all thumbnails")
|
||||||
|
ap2.add_argument("--no-athumb", action="store_true", help="disable audio thumbnails (spectrograms)")
|
||||||
ap2.add_argument("--no-vthumb", action="store_true", help="disable video thumbnails")
|
ap2.add_argument("--no-vthumb", action="store_true", help="disable video thumbnails")
|
||||||
ap2.add_argument("--th-size", metavar="WxH", default="320x256", help="thumbnail res")
|
ap2.add_argument("--th-size", metavar="WxH", default="320x256", help="thumbnail res")
|
||||||
ap2.add_argument("--th-mt", metavar="CORES", type=int, default=0, help="max num cpu cores to use, 0=all")
|
ap2.add_argument("--th-mt", metavar="CORES", type=int, default=cores, help="num cpu cores to use for generating thumbnails")
|
||||||
ap2.add_argument("--th-no-crop", action="store_true", help="dynamic height; show full image")
|
ap2.add_argument("--th-no-crop", action="store_true", help="dynamic height; show full image")
|
||||||
ap2.add_argument("--th-no-jpg", action="store_true", help="disable jpg output")
|
ap2.add_argument("--th-no-jpg", action="store_true", help="disable jpg output")
|
||||||
ap2.add_argument("--th-no-webp", action="store_true", help="disable webp output")
|
ap2.add_argument("--th-no-webp", action="store_true", help="disable webp output")
|
||||||
ap2.add_argument("--th-ff-jpg", action="store_true", help="force jpg for video thumbs")
|
ap2.add_argument("--th-ff-jpg", action="store_true", help="force jpg for video thumbs")
|
||||||
|
ap2.add_argument("--th-ff-swr", action="store_true", help="use swresample instead of soxr for audio thumbs")
|
||||||
ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown")
|
ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown")
|
||||||
ap2.add_argument("--th-clean", metavar="SEC", type=int, default=43200, help="cleanup interval; 0=disabled")
|
ap2.add_argument("--th-clean", metavar="SEC", type=int, default=43200, help="cleanup interval; 0=disabled")
|
||||||
ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age")
|
ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age")
|
||||||
ap2.add_argument("--th-covers", metavar="N,N", type=u, default="folder.png,folder.jpg,cover.png,cover.jpg", help="folder thumbnails to stat for")
|
ap2.add_argument("--th-covers", metavar="N,N", type=u, default="folder.png,folder.jpg,cover.png,cover.jpg", help="folder thumbnails to stat for")
|
||||||
|
|
||||||
|
ap2 = ap.add_argument_group('transcoding options')
|
||||||
|
ap2.add_argument("--no-acode", action="store_true", help="disable audio transcoding")
|
||||||
|
ap2.add_argument("--ac-maxage", metavar="SEC", type=int, default=86400, help="delete transcode output after SEC seconds")
|
||||||
|
|
||||||
ap2 = ap.add_argument_group('general db options')
|
ap2 = ap.add_argument_group('general db options')
|
||||||
ap2.add_argument("-e2d", action="store_true", help="enable up2k database")
|
ap2.add_argument("-e2d", action="store_true", help="enable up2k database")
|
||||||
ap2.add_argument("-e2ds", action="store_true", help="enable up2k db-scanner, sets -e2d")
|
ap2.add_argument("-e2ds", action="store_true", help="enable up2k db-scanner, sets -e2d")
|
||||||
ap2.add_argument("-e2dsa", action="store_true", help="scan all folders (for search), sets -e2ds")
|
ap2.add_argument("-e2dsa", action="store_true", help="scan all folders (for search), sets -e2ds")
|
||||||
ap2.add_argument("--hist", metavar="PATH", type=u, help="where to store volume data (db, thumbs)")
|
ap2.add_argument("--hist", metavar="PATH", type=u, help="where to store volume data (db, thumbs)")
|
||||||
ap2.add_argument("--no-hash", action="store_true", help="disable hashing during e2ds folder scans")
|
ap2.add_argument("--no-hash", metavar="PTN", type=u, help="regex: disable hashing of matching paths during e2ds folder scans")
|
||||||
ap2.add_argument("--re-int", metavar="SEC", type=int, default=30, help="disk rescan check interval")
|
ap2.add_argument("--no-idx", metavar="PTN", type=u, help="regex: disable indexing of matching paths during e2ds folder scans")
|
||||||
ap2.add_argument("--re-maxage", metavar="SEC", type=int, default=0, help="disk rescan volume interval, 0=off, can be set per-volume with the 'scan' volflag")
|
ap2.add_argument("--re-maxage", metavar="SEC", type=int, default=0, help="disk rescan volume interval, 0=off, can be set per-volume with the 'scan' volflag")
|
||||||
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline")
|
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline")
|
||||||
|
|
||||||
@@ -407,8 +464,8 @@ def run_argparse(argv, formatter):
|
|||||||
ap2.add_argument("-e2ts", action="store_true", help="enable metadata scanner, sets -e2t")
|
ap2.add_argument("-e2ts", action="store_true", help="enable metadata scanner, sets -e2t")
|
||||||
ap2.add_argument("-e2tsr", action="store_true", help="rescan all metadata, sets -e2ts")
|
ap2.add_argument("-e2tsr", action="store_true", help="rescan all metadata, sets -e2ts")
|
||||||
ap2.add_argument("--no-mutagen", action="store_true", help="use FFprobe for tags instead")
|
ap2.add_argument("--no-mutagen", action="store_true", help="use FFprobe for tags instead")
|
||||||
ap2.add_argument("--no-mtag-mt", action="store_true", help="disable tag-read parallelism")
|
|
||||||
ap2.add_argument("--no-mtag-ff", action="store_true", help="never use FFprobe as tag reader")
|
ap2.add_argument("--no-mtag-ff", action="store_true", help="never use FFprobe as tag reader")
|
||||||
|
ap2.add_argument("--mtag-mt", metavar="CORES", type=int, default=cores, help="num cpu cores to use for tag scanning")
|
||||||
ap2.add_argument("-mtm", metavar="M=t,t,t", type=u, action="append", help="add/replace metadata mapping")
|
ap2.add_argument("-mtm", metavar="M=t,t,t", type=u, action="append", help="add/replace metadata mapping")
|
||||||
ap2.add_argument("-mte", metavar="M,M,M", type=u, help="tags to index/display (comma-sep.)",
|
ap2.add_argument("-mte", metavar="M,M,M", type=u, help="tags to index/display (comma-sep.)",
|
||||||
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,vc,ac,res,.fps,ahash,vhash")
|
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,vc,ac,res,.fps,ahash,vhash")
|
||||||
@@ -416,8 +473,10 @@ def run_argparse(argv, formatter):
|
|||||||
default=".vq,.aq,vc,ac,res,.fps")
|
default=".vq,.aq,vc,ac,res,.fps")
|
||||||
ap2.add_argument("-mtp", metavar="M=[f,]bin", type=u, action="append", help="read tag M using bin")
|
ap2.add_argument("-mtp", metavar="M=[f,]bin", type=u, action="append", help="read tag M using bin")
|
||||||
|
|
||||||
ap2 = ap.add_argument_group('appearance options')
|
ap2 = ap.add_argument_group('ui options')
|
||||||
|
ap2.add_argument("--js-browser", metavar="L", type=u, help="URL to additional JS to include")
|
||||||
ap2.add_argument("--css-browser", metavar="L", type=u, help="URL to additional CSS to include")
|
ap2.add_argument("--css-browser", metavar="L", type=u, help="URL to additional CSS to include")
|
||||||
|
ap2.add_argument("--textfiles", metavar="CSV", type=u, default="txt,nfo,diz,cue,readme", help="file extensions to present as plaintext")
|
||||||
|
|
||||||
ap2 = ap.add_argument_group('debug options')
|
ap2 = ap.add_argument_group('debug options')
|
||||||
ap2.add_argument("--no-sendfile", action="store_true", help="disable sendfile")
|
ap2.add_argument("--no-sendfile", action="store_true", help="disable sendfile")
|
||||||
@@ -460,7 +519,12 @@ def main(argv=None):
|
|||||||
if HAVE_SSL:
|
if HAVE_SSL:
|
||||||
ensure_cert()
|
ensure_cert()
|
||||||
|
|
||||||
deprecated = [["-e2s", "-e2ds"]]
|
for k, v in zip(argv, argv[1:]):
|
||||||
|
if k == "-c":
|
||||||
|
supp = args_from_cfg(v)
|
||||||
|
argv.extend(supp)
|
||||||
|
|
||||||
|
deprecated = []
|
||||||
for dk, nk in deprecated:
|
for dk, nk in deprecated:
|
||||||
try:
|
try:
|
||||||
idx = argv.index(dk)
|
idx = argv.index(dk)
|
||||||
@@ -472,6 +536,12 @@ def main(argv=None):
|
|||||||
argv[idx] = nk
|
argv[idx] = nk
|
||||||
time.sleep(2)
|
time.sleep(2)
|
||||||
|
|
||||||
|
try:
|
||||||
|
if len(argv) == 1 and (ANYWIN or not os.geteuid()):
|
||||||
|
argv.extend(["-p80,443,3923", "--ign-ebind"])
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
try:
|
try:
|
||||||
al = run_argparse(argv, RiceFormatter)
|
al = run_argparse(argv, RiceFormatter)
|
||||||
except AssertionError:
|
except AssertionError:
|
||||||
@@ -493,7 +563,7 @@ def main(argv=None):
|
|||||||
if re.match("c[^,]", opt):
|
if re.match("c[^,]", opt):
|
||||||
mod = True
|
mod = True
|
||||||
na.append("c," + opt[1:])
|
na.append("c," + opt[1:])
|
||||||
elif re.sub("^[rwmd]*", "", opt) and "," not in opt:
|
elif re.sub("^[rwmdg]*", "", opt) and "," not in opt:
|
||||||
mod = True
|
mod = True
|
||||||
perm = opt[0]
|
perm = opt[0]
|
||||||
if perm == "a":
|
if perm == "a":
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
|
|
||||||
VERSION = (0, 13, 14)
|
VERSION = (1, 1, 2)
|
||||||
CODENAME = "future-proof"
|
CODENAME = "opus"
|
||||||
BUILD_DT = (2021, 9, 6)
|
BUILD_DT = (2021, 11, 12)
|
||||||
|
|
||||||
S_VERSION = ".".join(map(str, VERSION))
|
S_VERSION = ".".join(map(str, VERSION))
|
||||||
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
||||||
|
|||||||
@@ -29,17 +29,18 @@ LEELOO_DALLAS = "leeloo_dallas"
|
|||||||
|
|
||||||
|
|
||||||
class AXS(object):
|
class AXS(object):
|
||||||
def __init__(self, uread=None, uwrite=None, umove=None, udel=None):
|
def __init__(self, uread=None, uwrite=None, umove=None, udel=None, uget=None):
|
||||||
self.uread = {} if uread is None else {k: 1 for k in uread}
|
self.uread = {} if uread is None else {k: 1 for k in uread}
|
||||||
self.uwrite = {} if uwrite is None else {k: 1 for k in uwrite}
|
self.uwrite = {} if uwrite is None else {k: 1 for k in uwrite}
|
||||||
self.umove = {} if umove is None else {k: 1 for k in umove}
|
self.umove = {} if umove is None else {k: 1 for k in umove}
|
||||||
self.udel = {} if udel is None else {k: 1 for k in udel}
|
self.udel = {} if udel is None else {k: 1 for k in udel}
|
||||||
|
self.uget = {} if uget is None else {k: 1 for k in uget}
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "AXS({})".format(
|
return "AXS({})".format(
|
||||||
", ".join(
|
", ".join(
|
||||||
"{}={!r}".format(k, self.__dict__[k])
|
"{}={!r}".format(k, self.__dict__[k])
|
||||||
for k in "uread uwrite umove udel".split()
|
for k in "uread uwrite umove udel uget".split()
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -215,6 +216,7 @@ class VFS(object):
|
|||||||
self.awrite = {}
|
self.awrite = {}
|
||||||
self.amove = {}
|
self.amove = {}
|
||||||
self.adel = {}
|
self.adel = {}
|
||||||
|
self.aget = {}
|
||||||
else:
|
else:
|
||||||
self.histpath = None
|
self.histpath = None
|
||||||
self.all_vols = None
|
self.all_vols = None
|
||||||
@@ -222,6 +224,7 @@ class VFS(object):
|
|||||||
self.awrite = None
|
self.awrite = None
|
||||||
self.amove = None
|
self.amove = None
|
||||||
self.adel = None
|
self.adel = None
|
||||||
|
self.aget = None
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "VFS({})".format(
|
return "VFS({})".format(
|
||||||
@@ -308,7 +311,7 @@ class VFS(object):
|
|||||||
|
|
||||||
def can_access(self, vpath, uname):
|
def can_access(self, vpath, uname):
|
||||||
# type: (str, str) -> tuple[bool, bool, bool, bool]
|
# type: (str, str) -> tuple[bool, bool, bool, bool]
|
||||||
"""can Read,Write,Move,Delete"""
|
"""can Read,Write,Move,Delete,Get"""
|
||||||
vn, _ = self._find(vpath)
|
vn, _ = self._find(vpath)
|
||||||
c = vn.axs
|
c = vn.axs
|
||||||
return [
|
return [
|
||||||
@@ -316,10 +319,20 @@ class VFS(object):
|
|||||||
uname in c.uwrite or "*" in c.uwrite,
|
uname in c.uwrite or "*" in c.uwrite,
|
||||||
uname in c.umove or "*" in c.umove,
|
uname in c.umove or "*" in c.umove,
|
||||||
uname in c.udel or "*" in c.udel,
|
uname in c.udel or "*" in c.udel,
|
||||||
|
uname in c.uget or "*" in c.uget,
|
||||||
]
|
]
|
||||||
|
|
||||||
def get(self, vpath, uname, will_read, will_write, will_move=False, will_del=False):
|
def get(
|
||||||
# type: (str, str, bool, bool, bool, bool) -> tuple[VFS, str]
|
self,
|
||||||
|
vpath,
|
||||||
|
uname,
|
||||||
|
will_read,
|
||||||
|
will_write,
|
||||||
|
will_move=False,
|
||||||
|
will_del=False,
|
||||||
|
will_get=False,
|
||||||
|
):
|
||||||
|
# type: (str, str, bool, bool, bool, bool, bool) -> tuple[VFS, str]
|
||||||
"""returns [vfsnode,fs_remainder] if user has the requested permissions"""
|
"""returns [vfsnode,fs_remainder] if user has the requested permissions"""
|
||||||
vn, rem = self._find(vpath)
|
vn, rem = self._find(vpath)
|
||||||
c = vn.axs
|
c = vn.axs
|
||||||
@@ -329,6 +342,7 @@ class VFS(object):
|
|||||||
[will_write, c.uwrite, "write"],
|
[will_write, c.uwrite, "write"],
|
||||||
[will_move, c.umove, "move"],
|
[will_move, c.umove, "move"],
|
||||||
[will_del, c.udel, "delete"],
|
[will_del, c.udel, "delete"],
|
||||||
|
[will_get, c.uget, "get"],
|
||||||
]:
|
]:
|
||||||
if req and (uname not in d and "*" not in d) and uname != LEELOO_DALLAS:
|
if req and (uname not in d and "*" not in d) and uname != LEELOO_DALLAS:
|
||||||
m = "you don't have {}-access for this location"
|
m = "you don't have {}-access for this location"
|
||||||
@@ -342,7 +356,7 @@ class VFS(object):
|
|||||||
if not dbv:
|
if not dbv:
|
||||||
return self, vrem
|
return self, vrem
|
||||||
|
|
||||||
vrem = [self.vpath[len(dbv.vpath) + 1 :], vrem]
|
vrem = [self.vpath[len(dbv.vpath) :].lstrip("/"), vrem]
|
||||||
vrem = "/".join([x for x in vrem if x])
|
vrem = "/".join([x for x in vrem if x])
|
||||||
return dbv, vrem
|
return dbv, vrem
|
||||||
|
|
||||||
@@ -368,7 +382,7 @@ class VFS(object):
|
|||||||
for name, vn2 in sorted(self.nodes.items()):
|
for name, vn2 in sorted(self.nodes.items()):
|
||||||
ok = False
|
ok = False
|
||||||
axs = vn2.axs
|
axs = vn2.axs
|
||||||
axs = [axs.uread, axs.uwrite, axs.umove, axs.udel]
|
axs = [axs.uread, axs.uwrite, axs.umove, axs.udel, axs.uget]
|
||||||
for pset in permsets:
|
for pset in permsets:
|
||||||
ok = True
|
ok = True
|
||||||
for req, lst in zip(pset, axs):
|
for req, lst in zip(pset, axs):
|
||||||
@@ -434,7 +448,11 @@ class VFS(object):
|
|||||||
f2a = os.sep + "dir.txt"
|
f2a = os.sep + "dir.txt"
|
||||||
f2b = "{0}.hist{0}".format(os.sep)
|
f2b = "{0}.hist{0}".format(os.sep)
|
||||||
|
|
||||||
g = self.walk("", vrem, [], uname, [[True]], dots, scandir, False)
|
# if multiselect: add all items to archive root
|
||||||
|
# if single folder: the folder itself is the top-level item
|
||||||
|
folder = "" if flt else (vrem.split("/")[-1] or "top")
|
||||||
|
|
||||||
|
g = self.walk(folder, vrem, [], uname, [[True]], dots, scandir, False)
|
||||||
for _, _, vpath, apath, files, rd, vd in g:
|
for _, _, vpath, apath, files, rd, vd in g:
|
||||||
if flt:
|
if flt:
|
||||||
files = [x for x in files if x[0] in flt]
|
files = [x for x in files if x[0] in flt]
|
||||||
@@ -508,8 +526,27 @@ class AuthSrv(object):
|
|||||||
|
|
||||||
yield prev, True
|
yield prev, True
|
||||||
|
|
||||||
|
def _map_volume(self, src, dst, mount, daxs, mflags):
|
||||||
|
if dst in mount:
|
||||||
|
m = "multiple filesystem-paths mounted at [/{}]:\n [{}]\n [{}]"
|
||||||
|
self.log(m.format(dst, mount[dst], src), c=1)
|
||||||
|
raise Exception("invalid config")
|
||||||
|
|
||||||
|
if src in mount.values():
|
||||||
|
m = "warning: filesystem-path [{}] mounted in multiple locations:"
|
||||||
|
m = m.format(src)
|
||||||
|
for v in [k for k, v in mount.items() if v == src] + [dst]:
|
||||||
|
m += "\n /{}".format(v)
|
||||||
|
|
||||||
|
self.log(m, c=3)
|
||||||
|
|
||||||
|
mount[dst] = src
|
||||||
|
daxs[dst] = AXS()
|
||||||
|
mflags[dst] = {}
|
||||||
|
|
||||||
def _parse_config_file(self, fd, acct, daxs, mflags, mount):
|
def _parse_config_file(self, fd, acct, daxs, mflags, mount):
|
||||||
# type: (any, str, dict[str, AXS], any, str) -> None
|
# type: (any, str, dict[str, AXS], any, str) -> None
|
||||||
|
skip = False
|
||||||
vol_src = None
|
vol_src = None
|
||||||
vol_dst = None
|
vol_dst = None
|
||||||
self.line_ctr = 0
|
self.line_ctr = 0
|
||||||
@@ -519,6 +556,11 @@ class AuthSrv(object):
|
|||||||
vol_src = None
|
vol_src = None
|
||||||
vol_dst = None
|
vol_dst = None
|
||||||
|
|
||||||
|
if skip:
|
||||||
|
if not ln:
|
||||||
|
skip = False
|
||||||
|
continue
|
||||||
|
|
||||||
if not ln or ln.startswith("#"):
|
if not ln or ln.startswith("#"):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -526,6 +568,8 @@ class AuthSrv(object):
|
|||||||
if ln.startswith("u "):
|
if ln.startswith("u "):
|
||||||
u, p = ln[2:].split(":", 1)
|
u, p = ln[2:].split(":", 1)
|
||||||
acct[u] = p
|
acct[u] = p
|
||||||
|
elif ln.startswith("-"):
|
||||||
|
skip = True # argv
|
||||||
else:
|
else:
|
||||||
vol_src = ln
|
vol_src = ln
|
||||||
continue
|
continue
|
||||||
@@ -538,9 +582,7 @@ class AuthSrv(object):
|
|||||||
# cfg files override arguments and previous files
|
# cfg files override arguments and previous files
|
||||||
vol_src = bos.path.abspath(vol_src)
|
vol_src = bos.path.abspath(vol_src)
|
||||||
vol_dst = vol_dst.strip("/")
|
vol_dst = vol_dst.strip("/")
|
||||||
mount[vol_dst] = vol_src
|
self._map_volume(vol_src, vol_dst, mount, daxs, mflags)
|
||||||
daxs[vol_dst] = AXS()
|
|
||||||
mflags[vol_dst] = {}
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -557,13 +599,21 @@ class AuthSrv(object):
|
|||||||
|
|
||||||
def _read_vol_str(self, lvl, uname, axs, flags):
|
def _read_vol_str(self, lvl, uname, axs, flags):
|
||||||
# type: (str, str, AXS, any) -> None
|
# type: (str, str, AXS, any) -> None
|
||||||
if lvl.strip("crwmd"):
|
if lvl.strip("crwmdg"):
|
||||||
raise Exception("invalid volume flag: {},{}".format(lvl, uname))
|
raise Exception("invalid volume flag: {},{}".format(lvl, uname))
|
||||||
|
|
||||||
if lvl == "c":
|
if lvl == "c":
|
||||||
cval = True
|
try:
|
||||||
if "=" in uname:
|
# volume flag with arguments, possibly with a preceding list of bools
|
||||||
uname, cval = uname.split("=", 1)
|
uname, cval = uname.split("=", 1)
|
||||||
|
except:
|
||||||
|
# just one or more bools
|
||||||
|
cval = True
|
||||||
|
|
||||||
|
while "," in uname:
|
||||||
|
# one or more bools before the final flag; eat them
|
||||||
|
n1, uname = uname.split(",", 1)
|
||||||
|
self._read_volflag(flags, n1, True, False)
|
||||||
|
|
||||||
self._read_volflag(flags, uname, cval, False)
|
self._read_volflag(flags, uname, cval, False)
|
||||||
return
|
return
|
||||||
@@ -571,7 +621,7 @@ class AuthSrv(object):
|
|||||||
if uname == "":
|
if uname == "":
|
||||||
uname = "*"
|
uname = "*"
|
||||||
|
|
||||||
for un in uname.split(","):
|
for un in uname.replace(",", " ").strip().split():
|
||||||
if "r" in lvl:
|
if "r" in lvl:
|
||||||
axs.uread[un] = 1
|
axs.uread[un] = 1
|
||||||
|
|
||||||
@@ -584,6 +634,9 @@ class AuthSrv(object):
|
|||||||
if "d" in lvl:
|
if "d" in lvl:
|
||||||
axs.udel[un] = 1
|
axs.udel[un] = 1
|
||||||
|
|
||||||
|
if "g" in lvl:
|
||||||
|
axs.uget[un] = 1
|
||||||
|
|
||||||
def _read_volflag(self, flags, name, value, is_list):
|
def _read_volflag(self, flags, name, value, is_list):
|
||||||
if name not in ["mtp"]:
|
if name not in ["mtp"]:
|
||||||
flags[name] = value
|
flags[name] = value
|
||||||
@@ -621,7 +674,7 @@ class AuthSrv(object):
|
|||||||
|
|
||||||
if self.args.v:
|
if self.args.v:
|
||||||
# list of src:dst:permset:permset:...
|
# list of src:dst:permset:permset:...
|
||||||
# permset is <rwmd>[,username][,username] or <c>,<flag>[=args]
|
# permset is <rwmdg>[,username][,username] or <c>,<flag>[=args]
|
||||||
for v_str in self.args.v:
|
for v_str in self.args.v:
|
||||||
m = re_vol.match(v_str)
|
m = re_vol.match(v_str)
|
||||||
if not m:
|
if not m:
|
||||||
@@ -634,9 +687,7 @@ class AuthSrv(object):
|
|||||||
# print("\n".join([src, dst, perms]))
|
# print("\n".join([src, dst, perms]))
|
||||||
src = bos.path.abspath(src)
|
src = bos.path.abspath(src)
|
||||||
dst = dst.strip("/")
|
dst = dst.strip("/")
|
||||||
mount[dst] = src
|
self._map_volume(src, dst, mount, daxs, mflags)
|
||||||
daxs[dst] = AXS()
|
|
||||||
mflags[dst] = {}
|
|
||||||
|
|
||||||
for x in perms.split(":"):
|
for x in perms.split(":"):
|
||||||
lvl, uname = x.split(",", 1) if "," in x else [x, ""]
|
lvl, uname = x.split(",", 1) if "," in x else [x, ""]
|
||||||
@@ -688,20 +739,22 @@ class AuthSrv(object):
|
|||||||
vfs.all_vols = {}
|
vfs.all_vols = {}
|
||||||
vfs.get_all_vols(vfs.all_vols)
|
vfs.get_all_vols(vfs.all_vols)
|
||||||
|
|
||||||
for perm in "read write move del".split():
|
for perm in "read write move del get".split():
|
||||||
axs_key = "u" + perm
|
axs_key = "u" + perm
|
||||||
unames = ["*"] + list(acct.keys())
|
unames = ["*"] + list(acct.keys())
|
||||||
umap = {x: [] for x in unames}
|
umap = {x: [] for x in unames}
|
||||||
for usr in unames:
|
for usr in unames:
|
||||||
for mp, vol in vfs.all_vols.items():
|
for mp, vol in vfs.all_vols.items():
|
||||||
if usr in getattr(vol.axs, axs_key):
|
axs = getattr(vol.axs, axs_key)
|
||||||
|
if usr in axs or "*" in axs:
|
||||||
umap[usr].append(mp)
|
umap[usr].append(mp)
|
||||||
|
umap[usr].sort()
|
||||||
setattr(vfs, "a" + perm, umap)
|
setattr(vfs, "a" + perm, umap)
|
||||||
|
|
||||||
all_users = {}
|
all_users = {}
|
||||||
missing_users = {}
|
missing_users = {}
|
||||||
for axs in daxs.values():
|
for axs in daxs.values():
|
||||||
for d in [axs.uread, axs.uwrite, axs.umove, axs.udel]:
|
for d in [axs.uread, axs.uwrite, axs.umove, axs.udel, axs.uget]:
|
||||||
for usr in d.keys():
|
for usr in d.keys():
|
||||||
all_users[usr] = 1
|
all_users[usr] = 1
|
||||||
if usr != "*" and usr not in acct:
|
if usr != "*" and usr not in acct:
|
||||||
@@ -812,6 +865,11 @@ class AuthSrv(object):
|
|||||||
if use:
|
if use:
|
||||||
vol.lim = lim
|
vol.lim = lim
|
||||||
|
|
||||||
|
for vol in vfs.all_vols.values():
|
||||||
|
fk = vol.flags.get("fk")
|
||||||
|
if fk:
|
||||||
|
vol.flags["fk"] = int(fk) if fk is not True else 8
|
||||||
|
|
||||||
for vol in vfs.all_vols.values():
|
for vol in vfs.all_vols.values():
|
||||||
if "pk" in vol.flags and "gz" not in vol.flags and "xz" not in vol.flags:
|
if "pk" in vol.flags and "gz" not in vol.flags and "xz" not in vol.flags:
|
||||||
vol.flags["gz"] = False # def.pk
|
vol.flags["gz"] = False # def.pk
|
||||||
@@ -830,9 +888,14 @@ class AuthSrv(object):
|
|||||||
if self.args.e2d or "e2ds" in vol.flags:
|
if self.args.e2d or "e2ds" in vol.flags:
|
||||||
vol.flags["e2d"] = True
|
vol.flags["e2d"] = True
|
||||||
|
|
||||||
if self.args.no_hash:
|
for ga, vf in [["no_hash", "nohash"], ["no_idx", "noidx"]]:
|
||||||
if "ehash" not in vol.flags:
|
if vf in vol.flags:
|
||||||
vol.flags["dhash"] = True
|
ptn = vol.flags.pop(vf)
|
||||||
|
else:
|
||||||
|
ptn = getattr(self.args, ga)
|
||||||
|
|
||||||
|
if ptn:
|
||||||
|
vol.flags[vf] = re.compile(ptn)
|
||||||
|
|
||||||
for k in ["e2t", "e2ts", "e2tsr"]:
|
for k in ["e2t", "e2ts", "e2tsr"]:
|
||||||
if getattr(self.args, k):
|
if getattr(self.args, k):
|
||||||
@@ -845,6 +908,10 @@ class AuthSrv(object):
|
|||||||
# default tag cfgs if unset
|
# default tag cfgs if unset
|
||||||
if "mte" not in vol.flags:
|
if "mte" not in vol.flags:
|
||||||
vol.flags["mte"] = self.args.mte
|
vol.flags["mte"] = self.args.mte
|
||||||
|
elif vol.flags["mte"].startswith("+"):
|
||||||
|
vol.flags["mte"] = ",".join(
|
||||||
|
x for x in [self.args.mte, vol.flags["mte"][1:]] if x
|
||||||
|
)
|
||||||
if "mth" not in vol.flags:
|
if "mth" not in vol.flags:
|
||||||
vol.flags["mth"] = self.args.mth
|
vol.flags["mth"] = self.args.mth
|
||||||
|
|
||||||
@@ -926,6 +993,7 @@ class AuthSrv(object):
|
|||||||
[" write", "uwrite"],
|
[" write", "uwrite"],
|
||||||
[" move", "umove"],
|
[" move", "umove"],
|
||||||
["delete", "udel"],
|
["delete", "udel"],
|
||||||
|
[" get", "uget"],
|
||||||
]:
|
]:
|
||||||
u = list(sorted(getattr(v.axs, attr).keys()))
|
u = list(sorted(getattr(v.axs, attr).keys()))
|
||||||
u = ", ".join("\033[35meverybody\033[0m" if x == "*" else x for x in u)
|
u = ", ".join("\033[35meverybody\033[0m" if x == "*" else x for x in u)
|
||||||
@@ -940,7 +1008,7 @@ class AuthSrv(object):
|
|||||||
v, _ = vfs.get("/", "*", False, True)
|
v, _ = vfs.get("/", "*", False, True)
|
||||||
if self.warn_anonwrite and os.getcwd() == v.realpath:
|
if self.warn_anonwrite and os.getcwd() == v.realpath:
|
||||||
self.warn_anonwrite = False
|
self.warn_anonwrite = False
|
||||||
msg = "anyone can read/write the current directory: {}"
|
msg = "anyone can read/write the current directory: {}\n"
|
||||||
self.log(msg.format(v.realpath), c=1)
|
self.log(msg.format(v.realpath), c=1)
|
||||||
except Pebkac:
|
except Pebkac:
|
||||||
self.warn_anonwrite = True
|
self.warn_anonwrite = True
|
||||||
@@ -993,10 +1061,10 @@ class AuthSrv(object):
|
|||||||
raise Exception("volume not found: " + v)
|
raise Exception("volume not found: " + v)
|
||||||
|
|
||||||
self.log({"users": users, "vols": vols, "flags": flags})
|
self.log({"users": users, "vols": vols, "flags": flags})
|
||||||
m = "/{}: read({}) write({}) move({}) del({})"
|
m = "/{}: read({}) write({}) move({}) del({}) get({})"
|
||||||
for k, v in self.vfs.all_vols.items():
|
for k, v in self.vfs.all_vols.items():
|
||||||
vc = v.axs
|
vc = v.axs
|
||||||
self.log(m.format(k, vc.uread, vc.uwrite, vc.umove, vc.udel))
|
self.log(m.format(k, vc.uread, vc.uwrite, vc.umove, vc.udel, vc.uget))
|
||||||
|
|
||||||
flag_v = "v" in flags
|
flag_v = "v" in flags
|
||||||
flag_ln = "ln" in flags
|
flag_ln = "ln" in flags
|
||||||
@@ -1010,7 +1078,7 @@ class AuthSrv(object):
|
|||||||
for u in users:
|
for u in users:
|
||||||
self.log("checking /{} as {}".format(v, u))
|
self.log("checking /{} as {}".format(v, u))
|
||||||
try:
|
try:
|
||||||
vn, _ = self.vfs.get(v, u, True, False, False, False)
|
vn, _ = self.vfs.get(v, u, True, False, False, False, False)
|
||||||
except:
|
except:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|||||||
@@ -25,14 +25,14 @@ def lstat(p):
|
|||||||
def makedirs(name, mode=0o755, exist_ok=True):
|
def makedirs(name, mode=0o755, exist_ok=True):
|
||||||
bname = fsenc(name)
|
bname = fsenc(name)
|
||||||
try:
|
try:
|
||||||
os.makedirs(bname, mode=mode)
|
os.makedirs(bname, mode)
|
||||||
except:
|
except:
|
||||||
if not exist_ok or not os.path.isdir(bname):
|
if not exist_ok or not os.path.isdir(bname):
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
def mkdir(p, mode=0o755):
|
def mkdir(p, mode=0o755):
|
||||||
return os.mkdir(fsenc(p), mode=mode)
|
return os.mkdir(fsenc(p), mode)
|
||||||
|
|
||||||
|
|
||||||
def rename(src, dst):
|
def rename(src, dst):
|
||||||
|
|||||||
@@ -21,6 +21,10 @@ def getsize(p):
|
|||||||
return os.path.getsize(fsenc(p))
|
return os.path.getsize(fsenc(p))
|
||||||
|
|
||||||
|
|
||||||
|
def isfile(p):
|
||||||
|
return os.path.isfile(fsenc(p))
|
||||||
|
|
||||||
|
|
||||||
def isdir(p):
|
def isdir(p):
|
||||||
return os.path.isdir(fsenc(p))
|
return os.path.isdir(fsenc(p))
|
||||||
|
|
||||||
|
|||||||
@@ -62,6 +62,11 @@ class BrokerMp(object):
|
|||||||
|
|
||||||
procs.pop()
|
procs.pop()
|
||||||
|
|
||||||
|
def reload(self):
|
||||||
|
self.log("broker", "reloading")
|
||||||
|
for _, proc in enumerate(self.procs):
|
||||||
|
proc.q_pend.put([0, "reload", []])
|
||||||
|
|
||||||
def collector(self, proc):
|
def collector(self, proc):
|
||||||
"""receive message from hub in other process"""
|
"""receive message from hub in other process"""
|
||||||
while True:
|
while True:
|
||||||
|
|||||||
@@ -29,7 +29,7 @@ class MpWorker(object):
|
|||||||
# we inherited signal_handler from parent,
|
# we inherited signal_handler from parent,
|
||||||
# replace it with something harmless
|
# replace it with something harmless
|
||||||
if not FAKE_MP:
|
if not FAKE_MP:
|
||||||
for sig in [signal.SIGINT, signal.SIGTERM]:
|
for sig in [signal.SIGINT, signal.SIGTERM, signal.SIGUSR1]:
|
||||||
signal.signal(sig, self.signal_handler)
|
signal.signal(sig, self.signal_handler)
|
||||||
|
|
||||||
# starting to look like a good idea
|
# starting to look like a good idea
|
||||||
@@ -69,6 +69,11 @@ class MpWorker(object):
|
|||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
elif dest == "reload":
|
||||||
|
self.logw("mpw.asrv reloading")
|
||||||
|
self.asrv.reload()
|
||||||
|
self.logw("mpw.asrv reloaded")
|
||||||
|
|
||||||
elif dest == "listen":
|
elif dest == "listen":
|
||||||
self.httpsrv.listen(args[0], args[1])
|
self.httpsrv.listen(args[0], args[1])
|
||||||
|
|
||||||
|
|||||||
@@ -21,10 +21,13 @@ class BrokerThr(object):
|
|||||||
|
|
||||||
# instantiate all services here (TODO: inheritance?)
|
# instantiate all services here (TODO: inheritance?)
|
||||||
self.httpsrv = HttpSrv(self, None)
|
self.httpsrv = HttpSrv(self, None)
|
||||||
|
self.reload = self.noop
|
||||||
|
|
||||||
def shutdown(self):
|
def shutdown(self):
|
||||||
# self.log("broker", "shutting down")
|
# self.log("broker", "shutting down")
|
||||||
self.httpsrv.shutdown()
|
self.httpsrv.shutdown()
|
||||||
|
|
||||||
|
def noop(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def put(self, want_retval, dest, *args):
|
def put(self, want_retval, dest, *args):
|
||||||
|
|||||||
@@ -10,8 +10,8 @@ import json
|
|||||||
import base64
|
import base64
|
||||||
import string
|
import string
|
||||||
import socket
|
import socket
|
||||||
import ctypes
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
from operator import itemgetter
|
||||||
import calendar
|
import calendar
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -19,16 +19,20 @@ try:
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
import ctypes
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
from .__init__ import E, PY2, WINDOWS, ANYWIN, unicode
|
from .__init__ import E, PY2, WINDOWS, ANYWIN, unicode
|
||||||
from .util import * # noqa # pylint: disable=unused-wildcard-import
|
from .util import * # noqa # pylint: disable=unused-wildcard-import
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .authsrv import AuthSrv, Lim
|
from .authsrv import AuthSrv
|
||||||
from .szip import StreamZip
|
from .szip import StreamZip
|
||||||
from .star import StreamTar
|
from .star import StreamTar
|
||||||
|
|
||||||
|
|
||||||
NO_CACHE = {"Cache-Control": "no-cache"}
|
NO_CACHE = {"Cache-Control": "no-cache"}
|
||||||
NO_STORE = {"Cache-Control": "no-store; max-age=0"}
|
|
||||||
|
|
||||||
|
|
||||||
class HttpCli(object):
|
class HttpCli(object):
|
||||||
@@ -39,6 +43,7 @@ class HttpCli(object):
|
|||||||
def __init__(self, conn):
|
def __init__(self, conn):
|
||||||
self.t0 = time.time()
|
self.t0 = time.time()
|
||||||
self.conn = conn
|
self.conn = conn
|
||||||
|
self.mutex = conn.mutex
|
||||||
self.s = conn.s # type: socket
|
self.s = conn.s # type: socket
|
||||||
self.sr = conn.sr # type: Unrecv
|
self.sr = conn.sr # type: Unrecv
|
||||||
self.ip = conn.addr[0]
|
self.ip = conn.addr[0]
|
||||||
@@ -47,14 +52,18 @@ class HttpCli(object):
|
|||||||
self.asrv = conn.asrv # type: AuthSrv
|
self.asrv = conn.asrv # type: AuthSrv
|
||||||
self.ico = conn.ico
|
self.ico = conn.ico
|
||||||
self.thumbcli = conn.thumbcli
|
self.thumbcli = conn.thumbcli
|
||||||
|
self.u2fh = conn.u2fh
|
||||||
self.log_func = conn.log_func
|
self.log_func = conn.log_func
|
||||||
self.log_src = conn.log_src
|
self.log_src = conn.log_src
|
||||||
self.tls = hasattr(self.s, "cipher")
|
self.tls = hasattr(self.s, "cipher")
|
||||||
|
|
||||||
self.bufsz = 1024 * 32
|
self.bufsz = 1024 * 32
|
||||||
self.hint = None
|
self.hint = None
|
||||||
self.absolute_urls = False
|
self.trailing_slash = True
|
||||||
self.out_headers = {"Access-Control-Allow-Origin": "*"}
|
self.out_headers = {
|
||||||
|
"Access-Control-Allow-Origin": "*",
|
||||||
|
"Cache-Control": "no-store; max-age=0",
|
||||||
|
}
|
||||||
|
|
||||||
def log(self, msg, c=0):
|
def log(self, msg, c=0):
|
||||||
ptn = self.asrv.re_pwd
|
ptn = self.asrv.re_pwd
|
||||||
@@ -89,6 +98,7 @@ class HttpCli(object):
|
|||||||
def run(self):
|
def run(self):
|
||||||
"""returns true if connection can be reused"""
|
"""returns true if connection can be reused"""
|
||||||
self.keepalive = False
|
self.keepalive = False
|
||||||
|
self.is_https = False
|
||||||
self.headers = {}
|
self.headers = {}
|
||||||
self.hint = None
|
self.hint = None
|
||||||
try:
|
try:
|
||||||
@@ -126,6 +136,7 @@ class HttpCli(object):
|
|||||||
|
|
||||||
v = self.headers.get("connection", "").lower()
|
v = self.headers.get("connection", "").lower()
|
||||||
self.keepalive = not v.startswith("close") and self.http_ver != "HTTP/1.0"
|
self.keepalive = not v.startswith("close") and self.http_ver != "HTTP/1.0"
|
||||||
|
self.is_https = (self.headers.get("x-forwarded-proto", "").lower() == "https" or self.tls)
|
||||||
|
|
||||||
n = self.args.rproxy
|
n = self.args.rproxy
|
||||||
if n:
|
if n:
|
||||||
@@ -143,6 +154,8 @@ class HttpCli(object):
|
|||||||
|
|
||||||
self.log_src = self.conn.set_rproxy(self.ip)
|
self.log_src = self.conn.set_rproxy(self.ip)
|
||||||
|
|
||||||
|
self.dip = self.ip.replace(":", ".")
|
||||||
|
|
||||||
if self.args.ihead:
|
if self.args.ihead:
|
||||||
keys = self.args.ihead
|
keys = self.args.ihead
|
||||||
if "*" in keys:
|
if "*" in keys:
|
||||||
@@ -159,15 +172,11 @@ class HttpCli(object):
|
|||||||
# split req into vpath + uparam
|
# split req into vpath + uparam
|
||||||
uparam = {}
|
uparam = {}
|
||||||
if "?" not in self.req:
|
if "?" not in self.req:
|
||||||
if not self.req.endswith("/"):
|
self.trailing_slash = self.req.endswith("/")
|
||||||
self.absolute_urls = True
|
|
||||||
|
|
||||||
vpath = undot(self.req)
|
vpath = undot(self.req)
|
||||||
else:
|
else:
|
||||||
vpath, arglist = self.req.split("?", 1)
|
vpath, arglist = self.req.split("?", 1)
|
||||||
if not vpath.endswith("/"):
|
self.trailing_slash = vpath.endswith("/")
|
||||||
self.absolute_urls = True
|
|
||||||
|
|
||||||
vpath = undot(vpath)
|
vpath = undot(vpath)
|
||||||
for k in arglist.split("&"):
|
for k in arglist.split("&"):
|
||||||
if "=" in k:
|
if "=" in k:
|
||||||
@@ -213,6 +222,7 @@ class HttpCli(object):
|
|||||||
self.wvol = self.asrv.vfs.awrite[self.uname]
|
self.wvol = self.asrv.vfs.awrite[self.uname]
|
||||||
self.mvol = self.asrv.vfs.amove[self.uname]
|
self.mvol = self.asrv.vfs.amove[self.uname]
|
||||||
self.dvol = self.asrv.vfs.adel[self.uname]
|
self.dvol = self.asrv.vfs.adel[self.uname]
|
||||||
|
self.gvol = self.asrv.vfs.aget[self.uname]
|
||||||
|
|
||||||
if pwd and "pw" in self.ouparam and pwd != cookies.get("cppwd"):
|
if pwd and "pw" in self.ouparam and pwd != cookies.get("cppwd"):
|
||||||
self.out_headers["Set-Cookie"] = self.get_pwd_cookie(pwd)[0]
|
self.out_headers["Set-Cookie"] = self.get_pwd_cookie(pwd)[0]
|
||||||
@@ -227,6 +237,9 @@ class HttpCli(object):
|
|||||||
|
|
||||||
self.do_log = not self.conn.lf_url or not self.conn.lf_url.search(self.req)
|
self.do_log = not self.conn.lf_url or not self.conn.lf_url.search(self.req)
|
||||||
|
|
||||||
|
x = self.asrv.vfs.can_access(self.vpath, self.uname)
|
||||||
|
self.can_read, self.can_write, self.can_move, self.can_delete, self.can_get = x
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if self.mode in ["GET", "HEAD"]:
|
if self.mode in ["GET", "HEAD"]:
|
||||||
return self.handle_get() and self.keepalive
|
return self.handle_get() and self.keepalive
|
||||||
@@ -261,6 +274,15 @@ class HttpCli(object):
|
|||||||
except Pebkac:
|
except Pebkac:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
def permit_caching(self):
|
||||||
|
cache = self.uparam.get("cache")
|
||||||
|
if cache is None:
|
||||||
|
self.out_headers.update(NO_CACHE)
|
||||||
|
return
|
||||||
|
|
||||||
|
n = "604800" if cache == "i" else cache or "69"
|
||||||
|
self.out_headers["Cache-Control"] = "max-age=" + n
|
||||||
|
|
||||||
def send_headers(self, length, status=200, mime=None, headers=None):
|
def send_headers(self, length, status=200, mime=None, headers=None):
|
||||||
response = ["{} {} {}".format(self.http_ver, status, HTTPCODE[status])]
|
response = ["{} {} {}".format(self.http_ver, status, HTTPCODE[status])]
|
||||||
|
|
||||||
@@ -276,7 +298,7 @@ class HttpCli(object):
|
|||||||
|
|
||||||
# default to utf8 html if no content-type is set
|
# default to utf8 html if no content-type is set
|
||||||
if not mime:
|
if not mime:
|
||||||
mime = self.out_headers.get("Content-Type", "text/html; charset=UTF-8")
|
mime = self.out_headers.get("Content-Type", "text/html; charset=utf-8")
|
||||||
|
|
||||||
self.out_headers["Content-Type"] = mime
|
self.out_headers["Content-Type"] = mime
|
||||||
|
|
||||||
@@ -351,8 +373,7 @@ class HttpCli(object):
|
|||||||
).encode("utf-8", "replace")
|
).encode("utf-8", "replace")
|
||||||
|
|
||||||
if use302:
|
if use302:
|
||||||
h = {"Location": "/" + vpath, "Cache-Control": "no-cache"}
|
self.reply(html, status=302, headers={"Location": "/" + vpath})
|
||||||
self.reply(html, status=302, headers=h)
|
|
||||||
else:
|
else:
|
||||||
self.reply(html, status=status)
|
self.reply(html, status=status)
|
||||||
|
|
||||||
@@ -378,12 +399,10 @@ class HttpCli(object):
|
|||||||
static_path = os.path.join(E.mod, "web/", self.vpath[5:])
|
static_path = os.path.join(E.mod, "web/", self.vpath[5:])
|
||||||
return self.tx_file(static_path)
|
return self.tx_file(static_path)
|
||||||
|
|
||||||
x = self.asrv.vfs.can_access(self.vpath, self.uname)
|
if not self.can_read and not self.can_write and not self.can_get:
|
||||||
self.can_read, self.can_write, self.can_move, self.can_delete = x
|
|
||||||
if not self.can_read and not self.can_write:
|
|
||||||
if self.vpath:
|
if self.vpath:
|
||||||
self.log("inaccessible: [{}]".format(self.vpath))
|
self.log("inaccessible: [{}]".format(self.vpath))
|
||||||
raise Pebkac(404)
|
return self.tx_404(True)
|
||||||
|
|
||||||
self.uparam["h"] = False
|
self.uparam["h"] = False
|
||||||
|
|
||||||
@@ -400,6 +419,9 @@ class HttpCli(object):
|
|||||||
return self.scanvol()
|
return self.scanvol()
|
||||||
|
|
||||||
if not self.vpath:
|
if not self.vpath:
|
||||||
|
if "reload" in self.uparam:
|
||||||
|
return self.handle_reload()
|
||||||
|
|
||||||
if "stack" in self.uparam:
|
if "stack" in self.uparam:
|
||||||
return self.tx_stack()
|
return self.tx_stack()
|
||||||
|
|
||||||
@@ -460,13 +482,13 @@ class HttpCli(object):
|
|||||||
except:
|
except:
|
||||||
raise Pebkac(400, "client d/c before 100 continue")
|
raise Pebkac(400, "client d/c before 100 continue")
|
||||||
|
|
||||||
|
if "raw" in self.uparam:
|
||||||
|
return self.handle_stash()
|
||||||
|
|
||||||
ctype = self.headers.get("content-type", "").lower()
|
ctype = self.headers.get("content-type", "").lower()
|
||||||
if not ctype:
|
if not ctype:
|
||||||
raise Pebkac(400, "you can't post without a content-type header")
|
raise Pebkac(400, "you can't post without a content-type header")
|
||||||
|
|
||||||
if "raw" in self.uparam:
|
|
||||||
return self.handle_stash()
|
|
||||||
|
|
||||||
if "multipart/form-data" in ctype:
|
if "multipart/form-data" in ctype:
|
||||||
return self.handle_post_multipart()
|
return self.handle_post_multipart()
|
||||||
|
|
||||||
@@ -505,16 +527,16 @@ class HttpCli(object):
|
|||||||
if "get" in opt:
|
if "get" in opt:
|
||||||
return self.handle_get()
|
return self.handle_get()
|
||||||
|
|
||||||
raise Pebkac(405, "POST({}) is disabled".format(ctype))
|
raise Pebkac(405, "POST({}) is disabled in server config".format(ctype))
|
||||||
|
|
||||||
raise Pebkac(405, "don't know how to handle POST({})".format(ctype))
|
raise Pebkac(405, "don't know how to handle POST({})".format(ctype))
|
||||||
|
|
||||||
def get_body_reader(self):
|
def get_body_reader(self):
|
||||||
chunked = "chunked" in self.headers.get("transfer-encoding", "").lower()
|
if "chunked" in self.headers.get("transfer-encoding", "").lower():
|
||||||
|
return read_socket_chunked(self.sr), -1
|
||||||
|
|
||||||
remains = int(self.headers.get("content-length", -1))
|
remains = int(self.headers.get("content-length", -1))
|
||||||
if chunked:
|
if remains == -1:
|
||||||
return read_socket_chunked(self.sr), remains
|
|
||||||
elif remains == -1:
|
|
||||||
self.keepalive = False
|
self.keepalive = False
|
||||||
return read_socket_unbounded(self.sr), remains
|
return read_socket_unbounded(self.sr), remains
|
||||||
else:
|
else:
|
||||||
@@ -527,17 +549,16 @@ class HttpCli(object):
|
|||||||
fdir = os.path.join(vfs.realpath, rem)
|
fdir = os.path.join(vfs.realpath, rem)
|
||||||
if lim:
|
if lim:
|
||||||
fdir, rem = lim.all(self.ip, rem, remains, fdir)
|
fdir, rem = lim.all(self.ip, rem, remains, fdir)
|
||||||
bos.makedirs(fdir)
|
|
||||||
|
|
||||||
addr = self.ip.replace(":", ".")
|
fn = None
|
||||||
fn = "put-{:.6f}-{}.bin".format(time.time(), addr)
|
if rem and not self.trailing_slash and not bos.path.isdir(fdir):
|
||||||
path = os.path.join(fdir, fn)
|
fdir, fn = os.path.split(fdir)
|
||||||
if self.args.nw:
|
rem, _ = vsplit(rem)
|
||||||
path = os.devnull
|
|
||||||
|
|
||||||
open_f = open
|
bos.makedirs(fdir)
|
||||||
open_a = [fsenc(path), "wb", 512 * 1024]
|
|
||||||
open_ka = {}
|
open_ka = {"fun": open}
|
||||||
|
open_a = ["wb", 512 * 1024]
|
||||||
|
|
||||||
# user-request || config-force
|
# user-request || config-force
|
||||||
if ("gz" in vfs.flags or "xz" in vfs.flags) and (
|
if ("gz" in vfs.flags or "xz" in vfs.flags) and (
|
||||||
@@ -578,16 +599,28 @@ class HttpCli(object):
|
|||||||
|
|
||||||
self.log("compressing with {} level {}".format(alg, lv.get(alg)))
|
self.log("compressing with {} level {}".format(alg, lv.get(alg)))
|
||||||
if alg == "gz":
|
if alg == "gz":
|
||||||
open_f = gzip.GzipFile
|
open_ka["fun"] = gzip.GzipFile
|
||||||
open_a = [fsenc(path), "wb", lv[alg], None, 0x5FEE6600] # 2021-01-01
|
open_a = ["wb", lv[alg], None, 0x5FEE6600] # 2021-01-01
|
||||||
elif alg == "xz":
|
elif alg == "xz":
|
||||||
open_f = lzma.open
|
open_ka = {"fun": lzma.open, "preset": lv[alg]}
|
||||||
open_a = [fsenc(path), "wb"]
|
open_a = ["wb"]
|
||||||
open_ka = {"preset": lv[alg]}
|
|
||||||
else:
|
else:
|
||||||
self.log("fallthrough? thats a bug", 1)
|
self.log("fallthrough? thats a bug", 1)
|
||||||
|
|
||||||
with open_f(*open_a, **open_ka) as f:
|
suffix = "-{:.6f}-{}".format(time.time(), self.dip)
|
||||||
|
params = {"suffix": suffix, "fdir": fdir}
|
||||||
|
if self.args.nw:
|
||||||
|
params = {}
|
||||||
|
fn = os.devnull
|
||||||
|
|
||||||
|
params.update(open_ka)
|
||||||
|
|
||||||
|
if not fn:
|
||||||
|
fn = "put" + suffix
|
||||||
|
|
||||||
|
with ren_open(fn, *open_a, **params) as f:
|
||||||
|
f, fn = f["orz"]
|
||||||
|
path = os.path.join(fdir, fn)
|
||||||
post_sz, _, sha_b64 = hashcopy(reader, f)
|
post_sz, _, sha_b64 = hashcopy(reader, f)
|
||||||
|
|
||||||
if lim:
|
if lim:
|
||||||
@@ -831,7 +864,18 @@ class HttpCli(object):
|
|||||||
|
|
||||||
reader = read_socket(self.sr, remains)
|
reader = read_socket(self.sr, remains)
|
||||||
|
|
||||||
with open(fsenc(path), "rb+", 512 * 1024) as f:
|
f = None
|
||||||
|
fpool = not self.args.no_fpool
|
||||||
|
if fpool:
|
||||||
|
with self.mutex:
|
||||||
|
try:
|
||||||
|
f = self.u2fh.pop(path)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
f = f or open(fsenc(path), "rb+", 512 * 1024)
|
||||||
|
|
||||||
|
try:
|
||||||
f.seek(cstart[0])
|
f.seek(cstart[0])
|
||||||
post_sz, _, sha_b64 = hashcopy(reader, f)
|
post_sz, _, sha_b64 = hashcopy(reader, f)
|
||||||
|
|
||||||
@@ -861,22 +905,36 @@ class HttpCli(object):
|
|||||||
ofs += len(buf)
|
ofs += len(buf)
|
||||||
|
|
||||||
self.log("clone {} done".format(cstart[0]))
|
self.log("clone {} done".format(cstart[0]))
|
||||||
|
finally:
|
||||||
|
if not fpool:
|
||||||
|
f.close()
|
||||||
|
else:
|
||||||
|
with self.mutex:
|
||||||
|
self.u2fh.put(path, f)
|
||||||
|
|
||||||
x = self.conn.hsrv.broker.put(True, "up2k.confirm_chunk", ptop, wark, chash)
|
x = self.conn.hsrv.broker.put(True, "up2k.confirm_chunk", ptop, wark, chash)
|
||||||
x = x.get()
|
x = x.get()
|
||||||
try:
|
try:
|
||||||
num_left, path = x
|
num_left, fin_path = x
|
||||||
except:
|
except:
|
||||||
self.loud_reply(x, status=500)
|
self.loud_reply(x, status=500)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if not ANYWIN and num_left == 0:
|
if not num_left and fpool:
|
||||||
|
with self.mutex:
|
||||||
|
self.u2fh.close(path)
|
||||||
|
|
||||||
|
# windows cant rename open files
|
||||||
|
if ANYWIN and path != fin_path and not self.args.nw:
|
||||||
|
self.conn.hsrv.broker.put(True, "up2k.finish_upload", ptop, wark).get()
|
||||||
|
|
||||||
|
if not ANYWIN and not num_left:
|
||||||
times = (int(time.time()), int(lastmod))
|
times = (int(time.time()), int(lastmod))
|
||||||
self.log("no more chunks, setting times {}".format(times))
|
self.log("no more chunks, setting times {}".format(times))
|
||||||
try:
|
try:
|
||||||
bos.utime(path, times)
|
bos.utime(fin_path, times)
|
||||||
except:
|
except:
|
||||||
self.log("failed to utime ({}, {})".format(path, times))
|
self.log("failed to utime ({}, {})".format(fin_path, times))
|
||||||
|
|
||||||
spd = self._spd(post_sz)
|
spd = self._spd(post_sz)
|
||||||
self.log("{} thank".format(spd))
|
self.log("{} thank".format(spd))
|
||||||
@@ -887,8 +945,12 @@ class HttpCli(object):
|
|||||||
pwd = self.parser.require("cppwd", 64)
|
pwd = self.parser.require("cppwd", 64)
|
||||||
self.parser.drop()
|
self.parser.drop()
|
||||||
|
|
||||||
|
dst = "/?h"
|
||||||
|
if self.vpath:
|
||||||
|
dst = "/" + quotep(self.vpath)
|
||||||
|
|
||||||
ck, msg = self.get_pwd_cookie(pwd)
|
ck, msg = self.get_pwd_cookie(pwd)
|
||||||
html = self.j2("msg", h1=msg, h2='<a href="/?h">ack</a>', redir="/?h")
|
html = self.j2("msg", h1=msg, h2='<a href="' + dst + '">ack</a>', redir=dst)
|
||||||
self.reply(html.encode("utf-8"), headers={"Set-Cookie": ck})
|
self.reply(html.encode("utf-8"), headers={"Set-Cookie": ck})
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@@ -995,7 +1057,7 @@ class HttpCli(object):
|
|||||||
if not bos.path.isdir(fdir):
|
if not bos.path.isdir(fdir):
|
||||||
raise Pebkac(404, "that folder does not exist")
|
raise Pebkac(404, "that folder does not exist")
|
||||||
|
|
||||||
suffix = ".{:.6f}-{}".format(time.time(), self.ip)
|
suffix = "-{:.6f}-{}".format(time.time(), self.dip)
|
||||||
open_args = {"fdir": fdir, "suffix": suffix}
|
open_args = {"fdir": fdir, "suffix": suffix}
|
||||||
else:
|
else:
|
||||||
open_args = {}
|
open_args = {}
|
||||||
@@ -1024,7 +1086,7 @@ class HttpCli(object):
|
|||||||
bos.unlink(abspath)
|
bos.unlink(abspath)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
files.append([sz, sha512_hex, p_file, fname])
|
files.append([sz, sha512_hex, p_file, fname, abspath])
|
||||||
dbv, vrem = vfs.get_dbv(rem)
|
dbv, vrem = vfs.get_dbv(rem)
|
||||||
self.conn.hsrv.broker.put(
|
self.conn.hsrv.broker.put(
|
||||||
False,
|
False,
|
||||||
@@ -1076,24 +1138,33 @@ class HttpCli(object):
|
|||||||
jmsg["error"] = errmsg
|
jmsg["error"] = errmsg
|
||||||
errmsg = "ERROR: " + errmsg
|
errmsg = "ERROR: " + errmsg
|
||||||
|
|
||||||
for sz, sha512, ofn, lfn in files:
|
for sz, sha512, ofn, lfn, ap in files:
|
||||||
|
vsuf = ""
|
||||||
|
if self.can_read and "fk" in vfs.flags:
|
||||||
|
vsuf = "?k=" + gen_filekey(
|
||||||
|
self.args.fk_salt,
|
||||||
|
abspath,
|
||||||
|
sz,
|
||||||
|
0 if ANYWIN or not ap else bos.stat(ap).st_ino,
|
||||||
|
)[: vfs.flags["fk"]]
|
||||||
|
|
||||||
vpath = "{}/{}".format(upload_vpath, lfn).strip("/")
|
vpath = "{}/{}".format(upload_vpath, lfn).strip("/")
|
||||||
msg += 'sha512: {} // {} bytes // <a href="/{}">{}</a>\n'.format(
|
msg += 'sha512: {} // {} bytes // <a href="/{}">{}</a> {}\n'.format(
|
||||||
sha512[:56], sz, quotep(vpath), html_escape(ofn, crlf=True)
|
sha512[:56], sz, quotep(vpath) + vsuf, html_escape(ofn, crlf=True), vsuf
|
||||||
)
|
)
|
||||||
# truncated SHA-512 prevents length extension attacks;
|
# truncated SHA-512 prevents length extension attacks;
|
||||||
# using SHA-512/224, optionally SHA-512/256 = :64
|
# using SHA-512/224, optionally SHA-512/256 = :64
|
||||||
jpart = {
|
jpart = {
|
||||||
"url": "{}://{}/{}".format(
|
"url": "{}://{}/{}".format(
|
||||||
"https" if self.tls else "http",
|
"https" if self.is_https else "http",
|
||||||
self.headers.get("host", "copyparty"),
|
self.headers.get("host", "copyparty"),
|
||||||
vpath,
|
vpath + vsuf,
|
||||||
),
|
),
|
||||||
"sha512": sha512[:56],
|
"sha512": sha512[:56],
|
||||||
"sz": sz,
|
"sz": sz,
|
||||||
"fn": lfn,
|
"fn": lfn,
|
||||||
"fn_orig": ofn,
|
"fn_orig": ofn,
|
||||||
"path": vpath,
|
"path": vpath + vsuf,
|
||||||
}
|
}
|
||||||
jmsg["files"].append(jpart)
|
jmsg["files"].append(jpart)
|
||||||
|
|
||||||
@@ -1277,7 +1348,7 @@ class HttpCli(object):
|
|||||||
break
|
break
|
||||||
|
|
||||||
if not editions:
|
if not editions:
|
||||||
raise Pebkac(404)
|
return self.tx_404()
|
||||||
|
|
||||||
#
|
#
|
||||||
# if-modified
|
# if-modified
|
||||||
@@ -1388,15 +1459,20 @@ class HttpCli(object):
|
|||||||
#
|
#
|
||||||
# send reply
|
# send reply
|
||||||
|
|
||||||
if not is_compressed and "cache" not in self.uparam:
|
if is_compressed:
|
||||||
self.out_headers.update(NO_CACHE)
|
self.out_headers["Cache-Control"] = "max-age=573"
|
||||||
|
else:
|
||||||
|
self.permit_caching()
|
||||||
|
|
||||||
|
if "txt" in self.uparam:
|
||||||
|
mime = "text/plain; charset={}".format(self.uparam["txt"] or "utf-8")
|
||||||
|
elif "mime" in self.uparam:
|
||||||
|
mime = self.uparam.get("mime")
|
||||||
|
else:
|
||||||
|
mime = guess_mime(req_path)
|
||||||
|
|
||||||
self.out_headers["Accept-Ranges"] = "bytes"
|
self.out_headers["Accept-Ranges"] = "bytes"
|
||||||
self.send_headers(
|
self.send_headers(length=upper - lower, status=status, mime=mime)
|
||||||
length=upper - lower,
|
|
||||||
status=status,
|
|
||||||
mime=guess_mime(req_path),
|
|
||||||
)
|
|
||||||
|
|
||||||
logmsg += unicode(status) + logtail
|
logmsg += unicode(status) + logtail
|
||||||
|
|
||||||
@@ -1408,10 +1484,10 @@ class HttpCli(object):
|
|||||||
|
|
||||||
ret = True
|
ret = True
|
||||||
with open_func(*open_args) as f:
|
with open_func(*open_args) as f:
|
||||||
if use_sendfile:
|
sendfun = sendfile_kern if use_sendfile else sendfile_py
|
||||||
remains = sendfile_kern(lower, upper, f, self.s)
|
remains = sendfun(
|
||||||
else:
|
lower, upper, f, self.s, self.args.s_wr_sz, self.args.s_wr_slp
|
||||||
remains = sendfile_py(lower, upper, f, self.s)
|
)
|
||||||
|
|
||||||
if remains > 0:
|
if remains > 0:
|
||||||
logmsg += " \033[31m" + unicode(upper - remains) + "\033[0m"
|
logmsg += " \033[31m" + unicode(upper - remains) + "\033[0m"
|
||||||
@@ -1485,6 +1561,7 @@ class HttpCli(object):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
def tx_ico(self, ext, exact=False):
|
def tx_ico(self, ext, exact=False):
|
||||||
|
self.permit_caching()
|
||||||
if ext.endswith("/"):
|
if ext.endswith("/"):
|
||||||
ext = "folder"
|
ext = "folder"
|
||||||
exact = True
|
exact = True
|
||||||
@@ -1515,6 +1592,10 @@ class HttpCli(object):
|
|||||||
def tx_md(self, fs_path):
|
def tx_md(self, fs_path):
|
||||||
logmsg = "{:4} {} ".format("", self.req)
|
logmsg = "{:4} {} ".format("", self.req)
|
||||||
|
|
||||||
|
if not self.can_write:
|
||||||
|
if "edit" in self.uparam or "edit2" in self.uparam:
|
||||||
|
return self.tx_404(True)
|
||||||
|
|
||||||
tpl = "mde" if "edit2" in self.uparam else "md"
|
tpl = "mde" if "edit2" in self.uparam else "md"
|
||||||
html_path = os.path.join(E.mod, "web", "{}.html".format(tpl))
|
html_path = os.path.join(E.mod, "web", "{}.html".format(tpl))
|
||||||
template = self.j2(tpl)
|
template = self.j2(tpl)
|
||||||
@@ -1537,6 +1618,10 @@ class HttpCli(object):
|
|||||||
self.out_headers.update(NO_CACHE)
|
self.out_headers.update(NO_CACHE)
|
||||||
status = 200 if do_send else 304
|
status = 200 if do_send else 304
|
||||||
|
|
||||||
|
arg_base = "?"
|
||||||
|
if "k" in self.uparam:
|
||||||
|
arg_base = "?k={}&".format(self.uparam["k"])
|
||||||
|
|
||||||
boundary = "\roll\tide"
|
boundary = "\roll\tide"
|
||||||
targs = {
|
targs = {
|
||||||
"edit": "edit" in self.uparam,
|
"edit": "edit" in self.uparam,
|
||||||
@@ -1546,6 +1631,7 @@ class HttpCli(object):
|
|||||||
"md_chk_rate": self.args.mcr,
|
"md_chk_rate": self.args.mcr,
|
||||||
"md": boundary,
|
"md": boundary,
|
||||||
"ts": self.conn.hsrv.cachebuster(),
|
"ts": self.conn.hsrv.cachebuster(),
|
||||||
|
"arg_base": arg_base,
|
||||||
}
|
}
|
||||||
html = template.render(**targs).encode("utf-8", "replace")
|
html = template.render(**targs).encode("utf-8", "replace")
|
||||||
html = html.split(boundary.encode("utf-8"))
|
html = html.split(boundary.encode("utf-8"))
|
||||||
@@ -1596,6 +1682,7 @@ class HttpCli(object):
|
|||||||
html = self.j2(
|
html = self.j2(
|
||||||
"splash",
|
"splash",
|
||||||
this=self,
|
this=self,
|
||||||
|
qvpath=quotep(self.vpath),
|
||||||
rvol=rvol,
|
rvol=rvol,
|
||||||
wvol=wvol,
|
wvol=wvol,
|
||||||
avol=avol,
|
avol=avol,
|
||||||
@@ -1606,7 +1693,19 @@ class HttpCli(object):
|
|||||||
mtpq=vs["mtpq"],
|
mtpq=vs["mtpq"],
|
||||||
url_suf=suf,
|
url_suf=suf,
|
||||||
)
|
)
|
||||||
self.reply(html.encode("utf-8"), headers=NO_STORE)
|
self.reply(html.encode("utf-8"))
|
||||||
|
return True
|
||||||
|
|
||||||
|
def tx_404(self, is_403=False):
|
||||||
|
if self.args.vague_403:
|
||||||
|
m = '<h1>404 not found ┐( ´ -`)┌</h1><p>or maybe you don\'t have access -- try logging in or <a href="/?h">go home</a></p>'
|
||||||
|
elif is_403:
|
||||||
|
m = '<h1>403 forbiddena ~┻━┻</h1><p>you\'ll have to log in or <a href="/?h">go home</a></p>'
|
||||||
|
else:
|
||||||
|
m = '<h1>404 not found ┐( ´ -`)┌</h1><p><a href="/?h">go home</a></p>'
|
||||||
|
|
||||||
|
html = self.j2("splash", this=self, qvpath=quotep(self.vpath), msg=m)
|
||||||
|
self.reply(html.encode("utf-8"), status=404)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def scanvol(self):
|
def scanvol(self):
|
||||||
@@ -1614,11 +1713,11 @@ class HttpCli(object):
|
|||||||
raise Pebkac(403, "not allowed for user " + self.uname)
|
raise Pebkac(403, "not allowed for user " + self.uname)
|
||||||
|
|
||||||
if self.args.no_rescan:
|
if self.args.no_rescan:
|
||||||
raise Pebkac(403, "disabled by argv")
|
raise Pebkac(403, "the rescan feature is disabled in server config")
|
||||||
|
|
||||||
vn, _ = self.asrv.vfs.get(self.vpath, self.uname, True, True)
|
vn, _ = self.asrv.vfs.get(self.vpath, self.uname, True, True)
|
||||||
|
|
||||||
args = [self.asrv.vfs.all_vols, [vn.vpath]]
|
args = [self.asrv.vfs.all_vols, [vn.vpath], False]
|
||||||
|
|
||||||
x = self.conn.hsrv.broker.put(True, "up2k.rescan", *args)
|
x = self.conn.hsrv.broker.put(True, "up2k.rescan", *args)
|
||||||
x = x.get()
|
x = x.get()
|
||||||
@@ -1628,12 +1727,26 @@ class HttpCli(object):
|
|||||||
|
|
||||||
raise Pebkac(500, x)
|
raise Pebkac(500, x)
|
||||||
|
|
||||||
|
def handle_reload(self):
|
||||||
|
act = self.uparam.get("reload")
|
||||||
|
if act != "cfg":
|
||||||
|
raise Pebkac(400, "only config files ('cfg') can be reloaded rn")
|
||||||
|
|
||||||
|
if not [x for x in self.wvol if x in self.rvol]:
|
||||||
|
raise Pebkac(403, "not allowed for user " + self.uname)
|
||||||
|
|
||||||
|
if self.args.no_reload:
|
||||||
|
raise Pebkac(403, "the reload feature is disabled in server config")
|
||||||
|
|
||||||
|
x = self.conn.hsrv.broker.put(True, "reload")
|
||||||
|
return self.redirect("", "?h", x.get(), "return to", False)
|
||||||
|
|
||||||
def tx_stack(self):
|
def tx_stack(self):
|
||||||
if not [x for x in self.wvol if x in self.rvol]:
|
if not [x for x in self.wvol if x in self.rvol]:
|
||||||
raise Pebkac(403, "not allowed for user " + self.uname)
|
raise Pebkac(403, "not allowed for user " + self.uname)
|
||||||
|
|
||||||
if self.args.no_stack:
|
if self.args.no_stack:
|
||||||
raise Pebkac(403, "disabled by argv")
|
raise Pebkac(403, "the stackdump feature is disabled in server config")
|
||||||
|
|
||||||
ret = "<pre>{}\n{}".format(time.time(), alltrace())
|
ret = "<pre>{}\n{}".format(time.time(), alltrace())
|
||||||
self.reply(ret.encode("utf-8"))
|
self.reply(ret.encode("utf-8"))
|
||||||
@@ -1697,7 +1810,7 @@ class HttpCli(object):
|
|||||||
|
|
||||||
def tx_ups(self):
|
def tx_ups(self):
|
||||||
if not self.args.unpost:
|
if not self.args.unpost:
|
||||||
raise Pebkac(400, "the unpost feature was disabled by server config")
|
raise Pebkac(400, "the unpost feature is disabled in server config")
|
||||||
|
|
||||||
filt = self.uparam.get("filter")
|
filt = self.uparam.get("filter")
|
||||||
lm = "ups [{}]".format(filt)
|
lm = "ups [{}]".format(filt)
|
||||||
@@ -1718,7 +1831,7 @@ class HttpCli(object):
|
|||||||
if filt and filt not in vp:
|
if filt and filt not in vp:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
ret.append({"vp": vp, "sz": sz, "at": at})
|
ret.append({"vp": quotep(vp), "sz": sz, "at": at})
|
||||||
if len(ret) > 3000:
|
if len(ret) > 3000:
|
||||||
ret.sort(key=lambda x: x["at"], reverse=True)
|
ret.sort(key=lambda x: x["at"], reverse=True)
|
||||||
ret = ret[:2000]
|
ret = ret[:2000]
|
||||||
@@ -1735,7 +1848,7 @@ class HttpCli(object):
|
|||||||
raise Pebkac(403, "not allowed for user " + self.uname)
|
raise Pebkac(403, "not allowed for user " + self.uname)
|
||||||
|
|
||||||
if self.args.no_del:
|
if self.args.no_del:
|
||||||
raise Pebkac(403, "disabled by argv")
|
raise Pebkac(403, "the delete feature is disabled in server config")
|
||||||
|
|
||||||
if not req:
|
if not req:
|
||||||
req = [self.vpath]
|
req = [self.vpath]
|
||||||
@@ -1748,7 +1861,7 @@ class HttpCli(object):
|
|||||||
raise Pebkac(403, "not allowed for user " + self.uname)
|
raise Pebkac(403, "not allowed for user " + self.uname)
|
||||||
|
|
||||||
if self.args.no_mv:
|
if self.args.no_mv:
|
||||||
raise Pebkac(403, "disabled by argv")
|
raise Pebkac(403, "the rename/move feature is disabled in server config")
|
||||||
|
|
||||||
# full path of new loc (incl filename)
|
# full path of new loc (incl filename)
|
||||||
dst = self.uparam.get("move")
|
dst = self.uparam.get("move")
|
||||||
@@ -1763,6 +1876,64 @@ class HttpCli(object):
|
|||||||
)
|
)
|
||||||
self.loud_reply(x.get())
|
self.loud_reply(x.get())
|
||||||
|
|
||||||
|
def tx_ls(self, ls):
|
||||||
|
dirs = ls["dirs"]
|
||||||
|
files = ls["files"]
|
||||||
|
arg = self.uparam["ls"]
|
||||||
|
if arg in ["v", "t", "txt"]:
|
||||||
|
try:
|
||||||
|
biggest = max(ls["files"] + ls["dirs"], key=itemgetter("sz"))["sz"]
|
||||||
|
except:
|
||||||
|
biggest = 0
|
||||||
|
|
||||||
|
if arg == "v":
|
||||||
|
fmt = "\033[0;7;36m{{}} {{:>{}}}\033[0m {{}}"
|
||||||
|
nfmt = "{}"
|
||||||
|
biggest = 0
|
||||||
|
f2 = "".join(
|
||||||
|
"{}{{}}".format(x)
|
||||||
|
for x in [
|
||||||
|
"\033[7m",
|
||||||
|
"\033[27m",
|
||||||
|
"",
|
||||||
|
"\033[0;1m",
|
||||||
|
"\033[0;36m",
|
||||||
|
"\033[0m",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
ctab = {"B": 6, "K": 5, "M": 1, "G": 3}
|
||||||
|
for lst in [dirs, files]:
|
||||||
|
for x in lst:
|
||||||
|
a = x["dt"].replace("-", " ").replace(":", " ").split(" ")
|
||||||
|
x["dt"] = f2.format(*list(a))
|
||||||
|
sz = humansize(x["sz"], True)
|
||||||
|
x["sz"] = "\033[0;3{}m{:>5}".format(ctab.get(sz[-1:], 0), sz)
|
||||||
|
else:
|
||||||
|
fmt = "{{}} {{:{},}} {{}}"
|
||||||
|
nfmt = "{:,}"
|
||||||
|
|
||||||
|
fmt = fmt.format(len(nfmt.format(biggest)))
|
||||||
|
ret = [
|
||||||
|
"# {}: {}".format(x, ls[x])
|
||||||
|
for x in ["acct", "perms", "srvinf"]
|
||||||
|
if x in ls
|
||||||
|
]
|
||||||
|
ret += [
|
||||||
|
fmt.format(x["dt"], x["sz"], x["name"])
|
||||||
|
for y in [dirs, files]
|
||||||
|
for x in y
|
||||||
|
]
|
||||||
|
ret = "\n".join(ret)
|
||||||
|
mime = "text/plain; charset=utf-8"
|
||||||
|
else:
|
||||||
|
[x.pop(k) for k in ["name", "dt"] for y in [dirs, files] for x in y]
|
||||||
|
|
||||||
|
ret = json.dumps(ls)
|
||||||
|
mime = "application/json"
|
||||||
|
|
||||||
|
self.reply(ret.encode("utf-8", "replace") + b"\n", mime=mime)
|
||||||
|
return True
|
||||||
|
|
||||||
def tx_browser(self):
|
def tx_browser(self):
|
||||||
vpath = ""
|
vpath = ""
|
||||||
vpnodes = [["", "/"]]
|
vpnodes = [["", "/"]]
|
||||||
@@ -1782,15 +1953,15 @@ class HttpCli(object):
|
|||||||
try:
|
try:
|
||||||
st = bos.stat(abspath)
|
st = bos.stat(abspath)
|
||||||
except:
|
except:
|
||||||
raise Pebkac(404)
|
return self.tx_404()
|
||||||
|
|
||||||
|
if rem.startswith(".hist/up2k.") or (
|
||||||
|
rem.endswith("/dir.txt") and rem.startswith(".hist/th/")
|
||||||
|
):
|
||||||
|
raise Pebkac(403)
|
||||||
|
|
||||||
|
is_dir = stat.S_ISDIR(st.st_mode)
|
||||||
if self.can_read:
|
if self.can_read:
|
||||||
if rem.startswith(".hist/up2k.") or (
|
|
||||||
rem.endswith("/dir.txt") and rem.startswith(".hist/th/")
|
|
||||||
):
|
|
||||||
raise Pebkac(403)
|
|
||||||
|
|
||||||
is_dir = stat.S_ISDIR(st.st_mode)
|
|
||||||
th_fmt = self.uparam.get("th")
|
th_fmt = self.uparam.get("th")
|
||||||
if th_fmt is not None:
|
if th_fmt is not None:
|
||||||
if is_dir:
|
if is_dir:
|
||||||
@@ -1815,11 +1986,23 @@ class HttpCli(object):
|
|||||||
|
|
||||||
return self.tx_ico(rem)
|
return self.tx_ico(rem)
|
||||||
|
|
||||||
if not is_dir:
|
if not is_dir and (self.can_read or self.can_get):
|
||||||
if abspath.endswith(".md") and "raw" not in self.uparam:
|
if not self.can_read and "fk" in vn.flags:
|
||||||
return self.tx_md(abspath)
|
correct = gen_filekey(
|
||||||
|
self.args.fk_salt, abspath, st.st_size, 0 if ANYWIN else st.st_ino
|
||||||
|
)[: vn.flags["fk"]]
|
||||||
|
got = self.uparam.get("k")
|
||||||
|
if got != correct:
|
||||||
|
self.log("wrong filekey, want {}, got {}".format(correct, got))
|
||||||
|
return self.tx_404()
|
||||||
|
|
||||||
return self.tx_file(abspath)
|
if abspath.endswith(".md") and "raw" not in self.uparam:
|
||||||
|
return self.tx_md(abspath)
|
||||||
|
|
||||||
|
return self.tx_file(abspath)
|
||||||
|
|
||||||
|
elif is_dir and not self.can_read and not self.can_write:
|
||||||
|
return self.tx_404(True)
|
||||||
|
|
||||||
srv_info = []
|
srv_info = []
|
||||||
|
|
||||||
@@ -1833,11 +2016,14 @@ class HttpCli(object):
|
|||||||
# some fuses misbehave
|
# some fuses misbehave
|
||||||
if not self.args.nid:
|
if not self.args.nid:
|
||||||
if WINDOWS:
|
if WINDOWS:
|
||||||
bfree = ctypes.c_ulonglong(0)
|
try:
|
||||||
ctypes.windll.kernel32.GetDiskFreeSpaceExW(
|
bfree = ctypes.c_ulonglong(0)
|
||||||
ctypes.c_wchar_p(abspath), None, None, ctypes.pointer(bfree)
|
ctypes.windll.kernel32.GetDiskFreeSpaceExW(
|
||||||
)
|
ctypes.c_wchar_p(abspath), None, None, ctypes.pointer(bfree)
|
||||||
srv_info.append(humansize(bfree.value) + " free")
|
)
|
||||||
|
srv_info.append(humansize(bfree.value) + " free")
|
||||||
|
except:
|
||||||
|
pass
|
||||||
else:
|
else:
|
||||||
sv = os.statvfs(fsenc(abspath))
|
sv = os.statvfs(fsenc(abspath))
|
||||||
free = humansize(sv.f_frsize * sv.f_bfree, True)
|
free = humansize(sv.f_frsize * sv.f_bfree, True)
|
||||||
@@ -1859,6 +2045,8 @@ class HttpCli(object):
|
|||||||
perms.append("move")
|
perms.append("move")
|
||||||
if self.can_delete:
|
if self.can_delete:
|
||||||
perms.append("delete")
|
perms.append("delete")
|
||||||
|
if self.can_get:
|
||||||
|
perms.append("get")
|
||||||
|
|
||||||
url_suf = self.urlq({}, [])
|
url_suf = self.urlq({}, [])
|
||||||
is_ls = "ls" in self.uparam
|
is_ls = "ls" in self.uparam
|
||||||
@@ -1904,6 +2092,7 @@ class HttpCli(object):
|
|||||||
"def_hcols": [],
|
"def_hcols": [],
|
||||||
"have_up2k_idx": ("e2d" in vn.flags),
|
"have_up2k_idx": ("e2d" in vn.flags),
|
||||||
"have_tags_idx": ("e2t" in vn.flags),
|
"have_tags_idx": ("e2t" in vn.flags),
|
||||||
|
"have_acode": (not self.args.no_acode),
|
||||||
"have_mv": (not self.args.no_mv),
|
"have_mv": (not self.args.no_mv),
|
||||||
"have_del": (not self.args.no_del),
|
"have_del": (not self.args.no_del),
|
||||||
"have_zip": (not self.args.no_zip),
|
"have_zip": (not self.args.no_zip),
|
||||||
@@ -1917,19 +2106,16 @@ class HttpCli(object):
|
|||||||
}
|
}
|
||||||
if not self.can_read:
|
if not self.can_read:
|
||||||
if is_ls:
|
if is_ls:
|
||||||
ret = json.dumps(ls_ret)
|
return self.tx_ls(ls_ret)
|
||||||
self.reply(
|
|
||||||
ret.encode("utf-8", "replace"),
|
|
||||||
mime="application/json",
|
|
||||||
headers=NO_STORE,
|
|
||||||
)
|
|
||||||
return True
|
|
||||||
|
|
||||||
if not stat.S_ISDIR(st.st_mode):
|
if not stat.S_ISDIR(st.st_mode):
|
||||||
raise Pebkac(404)
|
return self.tx_404(True)
|
||||||
|
|
||||||
|
if "zip" in self.uparam or "tar" in self.uparam:
|
||||||
|
raise Pebkac(403)
|
||||||
|
|
||||||
html = self.j2(tpl, **j2a)
|
html = self.j2(tpl, **j2a)
|
||||||
self.reply(html.encode("utf-8", "replace"), headers=NO_STORE)
|
self.reply(html.encode("utf-8", "replace"))
|
||||||
return True
|
return True
|
||||||
|
|
||||||
for k in ["zip", "tar"]:
|
for k in ["zip", "tar"]:
|
||||||
@@ -1973,12 +2159,14 @@ class HttpCli(object):
|
|||||||
idx = self.conn.get_u2idx()
|
idx = self.conn.get_u2idx()
|
||||||
icur = idx.get_cur(dbv.realpath)
|
icur = idx.get_cur(dbv.realpath)
|
||||||
|
|
||||||
|
add_fk = vn.flags.get("fk")
|
||||||
|
|
||||||
dirs = []
|
dirs = []
|
||||||
files = []
|
files = []
|
||||||
for fn in vfs_ls:
|
for fn in vfs_ls:
|
||||||
base = ""
|
base = ""
|
||||||
href = fn
|
href = fn
|
||||||
if not is_ls and self.absolute_urls and vpath:
|
if not is_ls and not self.trailing_slash and vpath:
|
||||||
base = "/" + vpath + "/"
|
base = "/" + vpath + "/"
|
||||||
href = base + fn
|
href = base + fn
|
||||||
|
|
||||||
@@ -2015,12 +2203,24 @@ class HttpCli(object):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
ext = "---" if is_dir else fn.rsplit(".", 1)[1]
|
ext = "---" if is_dir else fn.rsplit(".", 1)[1]
|
||||||
|
if len(ext) > 16:
|
||||||
|
ext = ext[:16]
|
||||||
except:
|
except:
|
||||||
ext = "%"
|
ext = "%"
|
||||||
|
|
||||||
|
if add_fk:
|
||||||
|
href = "{}?k={}".format(
|
||||||
|
quotep(href),
|
||||||
|
gen_filekey(
|
||||||
|
self.args.fk_salt, fspath, sz, 0 if ANYWIN else inf.st_ino
|
||||||
|
)[:add_fk],
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
href = quotep(href)
|
||||||
|
|
||||||
item = {
|
item = {
|
||||||
"lead": margin,
|
"lead": margin,
|
||||||
"href": quotep(href),
|
"href": href,
|
||||||
"name": fn,
|
"name": fn,
|
||||||
"sz": sz,
|
"sz": sz,
|
||||||
"ext": ext,
|
"ext": ext,
|
||||||
@@ -2083,28 +2283,46 @@ class HttpCli(object):
|
|||||||
f["tags"] = {}
|
f["tags"] = {}
|
||||||
|
|
||||||
if is_ls:
|
if is_ls:
|
||||||
[x.pop(k) for k in ["name", "dt"] for y in [dirs, files] for x in y]
|
|
||||||
ls_ret["dirs"] = dirs
|
ls_ret["dirs"] = dirs
|
||||||
ls_ret["files"] = files
|
ls_ret["files"] = files
|
||||||
ls_ret["taglist"] = taglist
|
ls_ret["taglist"] = taglist
|
||||||
ret = json.dumps(ls_ret)
|
return self.tx_ls(ls_ret)
|
||||||
self.reply(
|
|
||||||
ret.encode("utf-8", "replace"),
|
doc = self.uparam.get("doc") if self.can_read else None
|
||||||
mime="application/json",
|
if doc:
|
||||||
headers=NO_STORE,
|
doc = unquotep(doc.replace("+", " "))
|
||||||
)
|
j2a["docname"] = doc
|
||||||
return True
|
if next((x for x in files if x["name"] == doc), None):
|
||||||
|
with open(os.path.join(abspath, doc), "rb") as f:
|
||||||
|
doc = f.read().decode("utf-8", "replace")
|
||||||
|
else:
|
||||||
|
self.log("doc 404: [{}]".format(doc), c=6)
|
||||||
|
doc = "( textfile not found )"
|
||||||
|
|
||||||
|
j2a["doc"] = doc
|
||||||
|
|
||||||
|
if not self.conn.hsrv.prism:
|
||||||
|
j2a["no_prism"] = True
|
||||||
|
|
||||||
|
for d in dirs:
|
||||||
|
d["name"] += "/"
|
||||||
|
|
||||||
|
dirs.sort(key=itemgetter("name"))
|
||||||
|
|
||||||
j2a["files"] = dirs + files
|
j2a["files"] = dirs + files
|
||||||
j2a["logues"] = logues
|
j2a["logues"] = logues
|
||||||
j2a["taglist"] = taglist
|
j2a["taglist"] = taglist
|
||||||
|
j2a["txt_ext"] = self.args.textfiles.replace(",", " ")
|
||||||
|
|
||||||
if "mth" in vn.flags:
|
if "mth" in vn.flags:
|
||||||
j2a["def_hcols"] = vn.flags["mth"].split(",")
|
j2a["def_hcols"] = vn.flags["mth"].split(",")
|
||||||
|
|
||||||
|
if self.args.js_browser:
|
||||||
|
j2a["js"] = self.args.js_browser
|
||||||
|
|
||||||
if self.args.css_browser:
|
if self.args.css_browser:
|
||||||
j2a["css"] = self.args.css_browser
|
j2a["css"] = self.args.css_browser
|
||||||
|
|
||||||
html = self.j2(tpl, **j2a)
|
html = self.j2(tpl, **j2a)
|
||||||
self.reply(html.encode("utf-8", "replace"), headers=NO_STORE)
|
self.reply(html.encode("utf-8", "replace"))
|
||||||
return True
|
return True
|
||||||
|
|||||||
@@ -32,12 +32,14 @@ class HttpConn(object):
|
|||||||
self.addr = addr
|
self.addr = addr
|
||||||
self.hsrv = hsrv
|
self.hsrv = hsrv
|
||||||
|
|
||||||
|
self.mutex = hsrv.mutex
|
||||||
self.args = hsrv.args
|
self.args = hsrv.args
|
||||||
self.asrv = hsrv.asrv
|
self.asrv = hsrv.asrv
|
||||||
self.cert_path = hsrv.cert_path
|
self.cert_path = hsrv.cert_path
|
||||||
|
self.u2fh = hsrv.u2fh
|
||||||
|
|
||||||
enth = HAVE_PIL and not self.args.no_thumb
|
enth = HAVE_PIL and not self.args.no_thumb
|
||||||
self.thumbcli = ThumbCli(hsrv.broker) if enth else None
|
self.thumbcli = ThumbCli(hsrv) if enth else None
|
||||||
self.ico = Ico(self.args)
|
self.ico = Ico(self.args)
|
||||||
|
|
||||||
self.t0 = time.time()
|
self.t0 = time.time()
|
||||||
|
|||||||
@@ -27,7 +27,7 @@ except ImportError:
|
|||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
from .__init__ import E, PY2, MACOS
|
from .__init__ import E, PY2, MACOS
|
||||||
from .util import spack, min_ex, start_stackmon, start_log_thrs
|
from .util import FHC, spack, min_ex, start_stackmon, start_log_thrs
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .httpconn import HttpConn
|
from .httpconn import HttpConn
|
||||||
|
|
||||||
@@ -50,7 +50,9 @@ class HttpSrv(object):
|
|||||||
self.log = broker.log
|
self.log = broker.log
|
||||||
self.asrv = broker.asrv
|
self.asrv = broker.asrv
|
||||||
|
|
||||||
self.name = "httpsrv" + ("-n{}-i{:x}".format(nid, os.getpid()) if nid else "")
|
nsuf = "-n{}-i{:x}".format(nid, os.getpid()) if nid else ""
|
||||||
|
|
||||||
|
self.name = "hsrv" + nsuf
|
||||||
self.mutex = threading.Lock()
|
self.mutex = threading.Lock()
|
||||||
self.stopping = False
|
self.stopping = False
|
||||||
|
|
||||||
@@ -58,7 +60,9 @@ class HttpSrv(object):
|
|||||||
self.tp_ncli = 0 # fading
|
self.tp_ncli = 0 # fading
|
||||||
self.tp_time = None # latest worker collect
|
self.tp_time = None # latest worker collect
|
||||||
self.tp_q = None if self.args.no_htp else queue.LifoQueue()
|
self.tp_q = None if self.args.no_htp else queue.LifoQueue()
|
||||||
|
self.t_periodic = None
|
||||||
|
|
||||||
|
self.u2fh = FHC()
|
||||||
self.srvs = []
|
self.srvs = []
|
||||||
self.ncli = 0 # exact
|
self.ncli = 0 # exact
|
||||||
self.clients = {} # laggy
|
self.clients = {} # laggy
|
||||||
@@ -72,6 +76,7 @@ class HttpSrv(object):
|
|||||||
x: env.get_template(x + ".html")
|
x: env.get_template(x + ".html")
|
||||||
for x in ["splash", "browser", "browser2", "msg", "md", "mde"]
|
for x in ["splash", "browser", "browser2", "msg", "md", "mde"]
|
||||||
}
|
}
|
||||||
|
self.prism = os.path.exists(os.path.join(E.mod, "web", "deps", "prism.js.gz"))
|
||||||
|
|
||||||
cert_path = os.path.join(E.cfg, "cert.pem")
|
cert_path = os.path.join(E.cfg, "cert.pem")
|
||||||
if bos.path.exists(cert_path):
|
if bos.path.exists(cert_path):
|
||||||
@@ -82,11 +87,6 @@ class HttpSrv(object):
|
|||||||
if self.tp_q:
|
if self.tp_q:
|
||||||
self.start_threads(4)
|
self.start_threads(4)
|
||||||
|
|
||||||
name = "httpsrv-scaler" + ("-{}".format(nid) if nid else "")
|
|
||||||
t = threading.Thread(target=self.thr_scaler, name=name)
|
|
||||||
t.daemon = True
|
|
||||||
t.start()
|
|
||||||
|
|
||||||
if nid:
|
if nid:
|
||||||
if self.args.stackmon:
|
if self.args.stackmon:
|
||||||
start_stackmon(self.args.stackmon, nid)
|
start_stackmon(self.args.stackmon, nid)
|
||||||
@@ -115,13 +115,19 @@ class HttpSrv(object):
|
|||||||
for _ in range(n):
|
for _ in range(n):
|
||||||
self.tp_q.put(None)
|
self.tp_q.put(None)
|
||||||
|
|
||||||
def thr_scaler(self):
|
def periodic(self):
|
||||||
while True:
|
while True:
|
||||||
time.sleep(2 if self.tp_ncli else 30)
|
time.sleep(2 if self.tp_ncli or self.ncli else 10)
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
self.tp_ncli = max(self.ncli, self.tp_ncli - 2)
|
self.u2fh.clean()
|
||||||
if self.tp_nthr > self.tp_ncli + 8:
|
if self.tp_q:
|
||||||
self.stop_threads(4)
|
self.tp_ncli = max(self.ncli, self.tp_ncli - 2)
|
||||||
|
if self.tp_nthr > self.tp_ncli + 8:
|
||||||
|
self.stop_threads(4)
|
||||||
|
|
||||||
|
if not self.ncli and not self.u2fh.cache and self.tp_nthr <= 8:
|
||||||
|
self.t_periodic = None
|
||||||
|
return
|
||||||
|
|
||||||
def listen(self, sck, nlisteners):
|
def listen(self, sck, nlisteners):
|
||||||
ip, port = sck.getsockname()
|
ip, port = sck.getsockname()
|
||||||
@@ -141,7 +147,12 @@ class HttpSrv(object):
|
|||||||
fno = srv_sck.fileno()
|
fno = srv_sck.fileno()
|
||||||
msg = "subscribed @ {}:{} f{}".format(ip, port, fno)
|
msg = "subscribed @ {}:{} f{}".format(ip, port, fno)
|
||||||
self.log(self.name, msg)
|
self.log(self.name, msg)
|
||||||
self.broker.put(False, "cb_httpsrv_up")
|
|
||||||
|
def fun():
|
||||||
|
self.broker.put(False, "cb_httpsrv_up")
|
||||||
|
|
||||||
|
threading.Thread(target=fun).start()
|
||||||
|
|
||||||
while not self.stopping:
|
while not self.stopping:
|
||||||
if self.args.log_conn:
|
if self.args.log_conn:
|
||||||
self.log(self.name, "|%sC-ncli" % ("-" * 1,), c="1;30")
|
self.log(self.name, "|%sC-ncli" % ("-" * 1,), c="1;30")
|
||||||
@@ -181,6 +192,16 @@ class HttpSrv(object):
|
|||||||
|
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
self.ncli += 1
|
self.ncli += 1
|
||||||
|
if not self.t_periodic:
|
||||||
|
name = "hsrv-pt"
|
||||||
|
if self.nid:
|
||||||
|
name += "-{}".format(self.nid)
|
||||||
|
|
||||||
|
t = threading.Thread(target=self.periodic, name=name)
|
||||||
|
self.t_periodic = t
|
||||||
|
t.daemon = True
|
||||||
|
t.start()
|
||||||
|
|
||||||
if self.tp_q:
|
if self.tp_q:
|
||||||
self.tp_time = self.tp_time or now
|
self.tp_time = self.tp_time or now
|
||||||
self.tp_ncli = max(self.tp_ncli, self.ncli)
|
self.tp_ncli = max(self.tp_ncli, self.ncli)
|
||||||
|
|||||||
@@ -413,6 +413,9 @@ class MTag(object):
|
|||||||
return r1
|
return r1
|
||||||
|
|
||||||
def get_mutagen(self, abspath):
|
def get_mutagen(self, abspath):
|
||||||
|
if not bos.path.isfile(abspath):
|
||||||
|
return {}
|
||||||
|
|
||||||
import mutagen
|
import mutagen
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -458,10 +461,16 @@ class MTag(object):
|
|||||||
return self.normalize_tags(ret, md)
|
return self.normalize_tags(ret, md)
|
||||||
|
|
||||||
def get_ffprobe(self, abspath):
|
def get_ffprobe(self, abspath):
|
||||||
|
if not bos.path.isfile(abspath):
|
||||||
|
return {}
|
||||||
|
|
||||||
ret, md = ffprobe(abspath)
|
ret, md = ffprobe(abspath)
|
||||||
return self.normalize_tags(ret, md)
|
return self.normalize_tags(ret, md)
|
||||||
|
|
||||||
def get_bin(self, parsers, abspath):
|
def get_bin(self, parsers, abspath):
|
||||||
|
if not bos.path.isfile(abspath):
|
||||||
|
return {}
|
||||||
|
|
||||||
pypath = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
|
pypath = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
|
||||||
pypath = [str(pypath)] + [str(x) for x in sys.path if x]
|
pypath = [str(pypath)] + [str(x) for x in sys.path if x]
|
||||||
pypath = str(os.pathsep.join(pypath))
|
pypath = str(os.pathsep.join(pypath))
|
||||||
@@ -471,7 +480,10 @@ class MTag(object):
|
|||||||
ret = {}
|
ret = {}
|
||||||
for tagname, mp in parsers.items():
|
for tagname, mp in parsers.items():
|
||||||
try:
|
try:
|
||||||
cmd = [sys.executable, mp.bin, abspath]
|
cmd = [mp.bin, abspath]
|
||||||
|
if mp.bin.endswith(".py"):
|
||||||
|
cmd = [sys.executable] + cmd
|
||||||
|
|
||||||
args = {"env": env, "timeout": mp.timeout}
|
args = {"env": env, "timeout": mp.timeout}
|
||||||
|
|
||||||
if WINDOWS:
|
if WINDOWS:
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
import re
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
@@ -19,6 +18,7 @@ from .authsrv import AuthSrv
|
|||||||
from .tcpsrv import TcpSrv
|
from .tcpsrv import TcpSrv
|
||||||
from .up2k import Up2k
|
from .up2k import Up2k
|
||||||
from .th_srv import ThumbSrv, HAVE_PIL, HAVE_WEBP
|
from .th_srv import ThumbSrv, HAVE_PIL, HAVE_WEBP
|
||||||
|
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE
|
||||||
|
|
||||||
|
|
||||||
class SvcHub(object):
|
class SvcHub(object):
|
||||||
@@ -37,8 +37,11 @@ class SvcHub(object):
|
|||||||
self.argv = argv
|
self.argv = argv
|
||||||
self.logf = None
|
self.logf = None
|
||||||
self.stop_req = False
|
self.stop_req = False
|
||||||
|
self.reload_req = False
|
||||||
self.stopping = False
|
self.stopping = False
|
||||||
|
self.reloading = False
|
||||||
self.stop_cond = threading.Condition()
|
self.stop_cond = threading.Condition()
|
||||||
|
self.retcode = 0
|
||||||
self.httpsrv_up = 0
|
self.httpsrv_up = 0
|
||||||
|
|
||||||
self.log_mutex = threading.Lock()
|
self.log_mutex = threading.Lock()
|
||||||
@@ -54,6 +57,19 @@ class SvcHub(object):
|
|||||||
if args.log_thrs:
|
if args.log_thrs:
|
||||||
start_log_thrs(self.log, args.log_thrs, 0)
|
start_log_thrs(self.log, args.log_thrs, 0)
|
||||||
|
|
||||||
|
if not args.use_fpool and args.j != 1:
|
||||||
|
args.no_fpool = True
|
||||||
|
m = "multithreading enabled with -j {}, so disabling fpool -- this can reduce upload performance on some filesystems"
|
||||||
|
self.log("root", m.format(args.j))
|
||||||
|
|
||||||
|
if not args.no_fpool and args.j != 1:
|
||||||
|
m = "WARNING: --use-fpool combined with multithreading is untested and can probably cause undefined behavior"
|
||||||
|
if ANYWIN:
|
||||||
|
m = 'windows cannot do multithreading without --no-fpool, so enabling that -- note that upload performance will suffer if you have microsoft defender "real-time protection" enabled, so you probably want to use -j 1 instead'
|
||||||
|
args.no_fpool = True
|
||||||
|
|
||||||
|
self.log("root", m, c=3)
|
||||||
|
|
||||||
# initiate all services to manage
|
# initiate all services to manage
|
||||||
self.asrv = AuthSrv(self.args, self.log)
|
self.asrv = AuthSrv(self.args, self.log)
|
||||||
if args.ls:
|
if args.ls:
|
||||||
@@ -77,31 +93,52 @@ class SvcHub(object):
|
|||||||
"thumb", msg.format(" " * 37, os.path.basename(sys.executable)), c=3
|
"thumb", msg.format(" " * 37, os.path.basename(sys.executable)), c=3
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if not args.no_acode and args.no_thumb:
|
||||||
|
msg = "setting --no-acode because --no-thumb (sorry)"
|
||||||
|
self.log("thumb", msg, c=6)
|
||||||
|
args.no_acode = True
|
||||||
|
|
||||||
|
if not args.no_acode and (not HAVE_FFMPEG or not HAVE_FFPROBE):
|
||||||
|
msg = "setting --no-acode because either FFmpeg or FFprobe is not available"
|
||||||
|
self.log("thumb", msg, c=6)
|
||||||
|
args.no_acode = True
|
||||||
|
|
||||||
|
args.th_poke = min(args.th_poke, args.th_maxage, args.ac_maxage)
|
||||||
|
|
||||||
# decide which worker impl to use
|
# decide which worker impl to use
|
||||||
if self.check_mp_enable():
|
if self.check_mp_enable():
|
||||||
from .broker_mp import BrokerMp as Broker
|
from .broker_mp import BrokerMp as Broker
|
||||||
else:
|
else:
|
||||||
self.log("root", "cannot efficiently use multiple CPU cores")
|
|
||||||
from .broker_thr import BrokerThr as Broker
|
from .broker_thr import BrokerThr as Broker
|
||||||
|
|
||||||
self.broker = Broker(self)
|
self.broker = Broker(self)
|
||||||
|
|
||||||
def thr_httpsrv_up(self):
|
def thr_httpsrv_up(self):
|
||||||
time.sleep(5)
|
time.sleep(5)
|
||||||
failed = self.broker.num_workers - self.httpsrv_up
|
expected = self.broker.num_workers * self.tcpsrv.nsrv
|
||||||
|
failed = expected - self.httpsrv_up
|
||||||
if not failed:
|
if not failed:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
if self.args.ign_ebind_all:
|
||||||
|
return
|
||||||
|
|
||||||
|
if self.args.ign_ebind and self.tcpsrv.srv:
|
||||||
|
return
|
||||||
|
|
||||||
m = "{}/{} workers failed to start"
|
m = "{}/{} workers failed to start"
|
||||||
m = m.format(failed, self.broker.num_workers)
|
m = m.format(failed, expected)
|
||||||
self.log("root", m, 1)
|
self.log("root", m, 1)
|
||||||
os._exit(1)
|
|
||||||
|
self.retcode = 1
|
||||||
|
os.kill(os.getpid(), signal.SIGTERM)
|
||||||
|
|
||||||
def cb_httpsrv_up(self):
|
def cb_httpsrv_up(self):
|
||||||
self.httpsrv_up += 1
|
self.httpsrv_up += 1
|
||||||
if self.httpsrv_up != self.broker.num_workers:
|
if self.httpsrv_up != self.broker.num_workers:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
time.sleep(0.1) # purely cosmetic dw
|
||||||
self.log("root", "workers OK\n")
|
self.log("root", "workers OK\n")
|
||||||
self.up2k.init_vols()
|
self.up2k.init_vols()
|
||||||
|
|
||||||
@@ -162,7 +199,11 @@ class SvcHub(object):
|
|||||||
thr.daemon = True
|
thr.daemon = True
|
||||||
thr.start()
|
thr.start()
|
||||||
|
|
||||||
for sig in [signal.SIGINT, signal.SIGTERM]:
|
sigs = [signal.SIGINT, signal.SIGTERM]
|
||||||
|
if not ANYWIN:
|
||||||
|
sigs.append(signal.SIGUSR1)
|
||||||
|
|
||||||
|
for sig in sigs:
|
||||||
signal.signal(sig, self.signal_handler)
|
signal.signal(sig, self.signal_handler)
|
||||||
|
|
||||||
# macos hangs after shutdown on sigterm with while-sleep,
|
# macos hangs after shutdown on sigterm with while-sleep,
|
||||||
@@ -186,18 +227,45 @@ class SvcHub(object):
|
|||||||
else:
|
else:
|
||||||
self.stop_thr()
|
self.stop_thr()
|
||||||
|
|
||||||
|
def reload(self):
|
||||||
|
if self.reloading:
|
||||||
|
return "cannot reload; already in progress"
|
||||||
|
|
||||||
|
self.reloading = True
|
||||||
|
t = threading.Thread(target=self._reload)
|
||||||
|
t.daemon = True
|
||||||
|
t.start()
|
||||||
|
return "reload initiated"
|
||||||
|
|
||||||
|
def _reload(self):
|
||||||
|
self.log("root", "reload scheduled")
|
||||||
|
with self.up2k.mutex:
|
||||||
|
self.asrv.reload()
|
||||||
|
self.up2k.reload()
|
||||||
|
self.broker.reload()
|
||||||
|
|
||||||
|
self.reloading = False
|
||||||
|
|
||||||
def stop_thr(self):
|
def stop_thr(self):
|
||||||
while not self.stop_req:
|
while not self.stop_req:
|
||||||
with self.stop_cond:
|
with self.stop_cond:
|
||||||
self.stop_cond.wait(9001)
|
self.stop_cond.wait(9001)
|
||||||
|
|
||||||
|
if self.reload_req:
|
||||||
|
self.reload_req = False
|
||||||
|
self.reload()
|
||||||
|
|
||||||
self.shutdown()
|
self.shutdown()
|
||||||
|
|
||||||
def signal_handler(self, sig, frame):
|
def signal_handler(self, sig, frame):
|
||||||
if self.stopping:
|
if self.stopping:
|
||||||
return
|
return
|
||||||
|
|
||||||
self.stop_req = True
|
if sig == signal.SIGUSR1:
|
||||||
|
self.reload_req = True
|
||||||
|
else:
|
||||||
|
self.stop_req = True
|
||||||
|
|
||||||
with self.stop_cond:
|
with self.stop_cond:
|
||||||
self.stop_cond.notify_all()
|
self.stop_cond.notify_all()
|
||||||
|
|
||||||
@@ -205,6 +273,8 @@ class SvcHub(object):
|
|||||||
if self.stopping:
|
if self.stopping:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
# start_log_thrs(print, 0.1, 1)
|
||||||
|
|
||||||
self.stopping = True
|
self.stopping = True
|
||||||
self.stop_req = True
|
self.stop_req = True
|
||||||
with self.stop_cond:
|
with self.stop_cond:
|
||||||
@@ -230,7 +300,7 @@ class SvcHub(object):
|
|||||||
print("waiting for thumbsrv (10sec)...")
|
print("waiting for thumbsrv (10sec)...")
|
||||||
|
|
||||||
print("nailed it", end="")
|
print("nailed it", end="")
|
||||||
ret = 0
|
ret = self.retcode
|
||||||
finally:
|
finally:
|
||||||
print("\033[0m")
|
print("\033[0m")
|
||||||
if self.logf:
|
if self.logf:
|
||||||
@@ -327,10 +397,11 @@ class SvcHub(object):
|
|||||||
|
|
||||||
def check_mp_enable(self):
|
def check_mp_enable(self):
|
||||||
if self.args.j == 1:
|
if self.args.j == 1:
|
||||||
self.log("root", "multiprocessing disabled by argument -j 1;")
|
self.log("svchub", "multiprocessing disabled by argument -j 1")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if mp.cpu_count() <= 1:
|
if mp.cpu_count() <= 1:
|
||||||
|
self.log("svchub", "only one CPU detected; multiprocessing disabled")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -345,6 +416,7 @@ class SvcHub(object):
|
|||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
self.log("svchub", err)
|
self.log("svchub", err)
|
||||||
|
self.log("svchub", "cannot efficiently use multiple CPU cores")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def sd_notify(self):
|
def sd_notify(self):
|
||||||
|
|||||||
@@ -21,6 +21,29 @@ class TcpSrv(object):
|
|||||||
|
|
||||||
self.stopping = False
|
self.stopping = False
|
||||||
|
|
||||||
|
self.srv = []
|
||||||
|
self.nsrv = 0
|
||||||
|
ok = {}
|
||||||
|
for ip in self.args.i:
|
||||||
|
ok[ip] = []
|
||||||
|
for port in self.args.p:
|
||||||
|
self.nsrv += 1
|
||||||
|
try:
|
||||||
|
self._listen(ip, port)
|
||||||
|
ok[ip].append(port)
|
||||||
|
except Exception as ex:
|
||||||
|
if self.args.ign_ebind or self.args.ign_ebind_all:
|
||||||
|
m = "could not listen on {}:{}: {}"
|
||||||
|
self.log("tcpsrv", m.format(ip, port, ex), c=3)
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
|
||||||
|
if not self.srv and not self.args.ign_ebind_all:
|
||||||
|
raise Exception("could not listen on any of the given interfaces")
|
||||||
|
|
||||||
|
if self.nsrv != len(self.srv):
|
||||||
|
self.log("tcpsrv", "")
|
||||||
|
|
||||||
ip = "127.0.0.1"
|
ip = "127.0.0.1"
|
||||||
eps = {ip: "local only"}
|
eps = {ip: "local only"}
|
||||||
nonlocals = [x for x in self.args.i if x != ip]
|
nonlocals = [x for x in self.args.i if x != ip]
|
||||||
@@ -34,6 +57,9 @@ class TcpSrv(object):
|
|||||||
m = "available @ http://{}:{}/ (\033[33m{}\033[0m)"
|
m = "available @ http://{}:{}/ (\033[33m{}\033[0m)"
|
||||||
for ip, desc in sorted(eps.items(), key=lambda x: x[1]):
|
for ip, desc in sorted(eps.items(), key=lambda x: x[1]):
|
||||||
for port in sorted(self.args.p):
|
for port in sorted(self.args.p):
|
||||||
|
if port not in ok.get(ip, ok.get("0.0.0.0", [])):
|
||||||
|
continue
|
||||||
|
|
||||||
msgs.append(m.format(ip, port, desc))
|
msgs.append(m.format(ip, port, desc))
|
||||||
|
|
||||||
if msgs:
|
if msgs:
|
||||||
@@ -41,18 +67,13 @@ class TcpSrv(object):
|
|||||||
for m in msgs:
|
for m in msgs:
|
||||||
self.log("tcpsrv", m)
|
self.log("tcpsrv", m)
|
||||||
|
|
||||||
self.srv = []
|
|
||||||
for ip in self.args.i:
|
|
||||||
for port in self.args.p:
|
|
||||||
self.srv.append(self._listen(ip, port))
|
|
||||||
|
|
||||||
def _listen(self, ip, port):
|
def _listen(self, ip, port):
|
||||||
srv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
srv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||||
srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||||
srv.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
srv.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||||
try:
|
try:
|
||||||
srv.bind((ip, port))
|
srv.bind((ip, port))
|
||||||
return srv
|
self.srv.append(srv)
|
||||||
except (OSError, socket.error) as ex:
|
except (OSError, socket.error) as ex:
|
||||||
if ex.errno in [98, 48]:
|
if ex.errno in [98, 48]:
|
||||||
e = "\033[1;31mport {} is busy on interface {}\033[0m".format(port, ip)
|
e = "\033[1;31mport {} is busy on interface {}\033[0m".format(port, ip)
|
||||||
|
|||||||
@@ -4,28 +4,44 @@ from __future__ import print_function, unicode_literals
|
|||||||
import os
|
import os
|
||||||
|
|
||||||
from .util import Cooldown
|
from .util import Cooldown
|
||||||
from .th_srv import thumb_path, THUMBABLE, FMT_FF
|
from .th_srv import thumb_path, THUMBABLE, FMT_FFV, FMT_FFA
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
|
|
||||||
|
|
||||||
class ThumbCli(object):
|
class ThumbCli(object):
|
||||||
def __init__(self, broker):
|
def __init__(self, hsrv):
|
||||||
self.broker = broker
|
self.broker = hsrv.broker
|
||||||
self.args = broker.args
|
self.log_func = hsrv.log
|
||||||
self.asrv = broker.asrv
|
self.args = hsrv.args
|
||||||
|
self.asrv = hsrv.asrv
|
||||||
|
|
||||||
# cache on both sides for less broker spam
|
# cache on both sides for less broker spam
|
||||||
self.cooldown = Cooldown(self.args.th_poke)
|
self.cooldown = Cooldown(self.args.th_poke)
|
||||||
|
|
||||||
|
def log(self, msg, c=0):
|
||||||
|
self.log_func("thumbcli", msg, c)
|
||||||
|
|
||||||
def get(self, ptop, rem, mtime, fmt):
|
def get(self, ptop, rem, mtime, fmt):
|
||||||
ext = rem.rsplit(".")[-1].lower()
|
ext = rem.rsplit(".")[-1].lower()
|
||||||
if ext not in THUMBABLE:
|
if ext not in THUMBABLE:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
is_vid = ext in FMT_FF
|
is_vid = ext in FMT_FFV
|
||||||
if is_vid and self.args.no_vthumb:
|
if is_vid and self.args.no_vthumb:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
want_opus = fmt == "opus"
|
||||||
|
is_au = ext in FMT_FFA
|
||||||
|
if is_au:
|
||||||
|
if want_opus:
|
||||||
|
if self.args.no_acode:
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
if self.args.no_athumb:
|
||||||
|
return None
|
||||||
|
elif want_opus:
|
||||||
|
return None
|
||||||
|
|
||||||
if rem.startswith(".hist/th/") and rem.split(".")[-1] in ["webp", "jpg"]:
|
if rem.startswith(".hist/th/") and rem.split(".")[-1] in ["webp", "jpg"]:
|
||||||
return os.path.join(ptop, rem)
|
return os.path.join(ptop, rem)
|
||||||
|
|
||||||
@@ -33,10 +49,14 @@ class ThumbCli(object):
|
|||||||
fmt = "w"
|
fmt = "w"
|
||||||
|
|
||||||
if fmt == "w":
|
if fmt == "w":
|
||||||
if self.args.th_no_webp or (is_vid and self.args.th_ff_jpg):
|
if self.args.th_no_webp or ((is_vid or is_au) and self.args.th_ff_jpg):
|
||||||
fmt = "j"
|
fmt = "j"
|
||||||
|
|
||||||
histpath = self.asrv.vfs.histtab[ptop]
|
histpath = self.asrv.vfs.histtab.get(ptop)
|
||||||
|
if not histpath:
|
||||||
|
self.log("no histpath for [{}]".format(ptop))
|
||||||
|
return None
|
||||||
|
|
||||||
tpath = thumb_path(histpath, rem, mtime, fmt)
|
tpath = thumb_path(histpath, rem, mtime, fmt)
|
||||||
ret = None
|
ret = None
|
||||||
try:
|
try:
|
||||||
@@ -53,6 +73,11 @@ class ThumbCli(object):
|
|||||||
if self.cooldown.poke(tdir):
|
if self.cooldown.poke(tdir):
|
||||||
self.broker.put(False, "thumbsrv.poke", tdir)
|
self.broker.put(False, "thumbsrv.poke", tdir)
|
||||||
|
|
||||||
|
if want_opus:
|
||||||
|
# audio files expire individually
|
||||||
|
if self.cooldown.poke(tpath):
|
||||||
|
self.broker.put(False, "thumbsrv.poke", tpath)
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
x = self.broker.put(True, "thumbsrv.get", ptop, rem, mtime, fmt)
|
x = self.broker.put(True, "thumbsrv.get", ptop, rem, mtime, fmt)
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ import threading
|
|||||||
import subprocess as sp
|
import subprocess as sp
|
||||||
|
|
||||||
from .__init__ import PY2, unicode
|
from .__init__ import PY2, unicode
|
||||||
from .util import fsenc, vsplit, runcmd, Queue, Cooldown, BytesIO, min_ex
|
from .util import fsenc, vsplit, statdir, runcmd, Queue, Cooldown, BytesIO, min_ex
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, ffprobe
|
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, ffprobe
|
||||||
|
|
||||||
@@ -50,7 +50,8 @@ except:
|
|||||||
# https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html
|
# https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html
|
||||||
# ffmpeg -formats
|
# ffmpeg -formats
|
||||||
FMT_PIL = "bmp dib gif icns ico jpg jpeg jp2 jpx pcx png pbm pgm ppm pnm sgi tga tif tiff webp xbm dds xpm"
|
FMT_PIL = "bmp dib gif icns ico jpg jpeg jp2 jpx pcx png pbm pgm ppm pnm sgi tga tif tiff webp xbm dds xpm"
|
||||||
FMT_FF = "av1 asf avi flv m4v mkv mjpeg mjpg mpg mpeg mpg2 mpeg2 h264 avc mts h265 hevc mov 3gp mp4 ts mpegts nut ogv ogm rm vob webm wmv"
|
FMT_FFV = "av1 asf avi flv m4v mkv mjpeg mjpg mpg mpeg mpg2 mpeg2 h264 avc mts h265 hevc mov 3gp mp4 ts mpegts nut ogv ogm rm vob webm wmv"
|
||||||
|
FMT_FFA = "aac m4a ogg opus flac alac mp3 mp2 ac3 dts wma ra wav aif aiff au alaw ulaw mulaw amr gsm ape tak tta wv"
|
||||||
|
|
||||||
if HAVE_HEIF:
|
if HAVE_HEIF:
|
||||||
FMT_PIL += " heif heifs heic heics"
|
FMT_PIL += " heif heifs heic heics"
|
||||||
@@ -58,7 +59,9 @@ if HAVE_HEIF:
|
|||||||
if HAVE_AVIF:
|
if HAVE_AVIF:
|
||||||
FMT_PIL += " avif avifs"
|
FMT_PIL += " avif avifs"
|
||||||
|
|
||||||
FMT_PIL, FMT_FF = [{x: True for x in y.split(" ") if x} for y in [FMT_PIL, FMT_FF]]
|
FMT_PIL, FMT_FFV, FMT_FFA = [
|
||||||
|
{x: True for x in y.split(" ") if x} for y in [FMT_PIL, FMT_FFV, FMT_FFA]
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
THUMBABLE = {}
|
THUMBABLE = {}
|
||||||
@@ -67,7 +70,8 @@ if HAVE_PIL:
|
|||||||
THUMBABLE.update(FMT_PIL)
|
THUMBABLE.update(FMT_PIL)
|
||||||
|
|
||||||
if HAVE_FFMPEG and HAVE_FFPROBE:
|
if HAVE_FFMPEG and HAVE_FFPROBE:
|
||||||
THUMBABLE.update(FMT_FF)
|
THUMBABLE.update(FMT_FFV)
|
||||||
|
THUMBABLE.update(FMT_FFA)
|
||||||
|
|
||||||
|
|
||||||
def thumb_path(histpath, rem, mtime, fmt):
|
def thumb_path(histpath, rem, mtime, fmt):
|
||||||
@@ -86,9 +90,13 @@ def thumb_path(histpath, rem, mtime, fmt):
|
|||||||
h = hashlib.sha512(fsenc(fn)).digest()
|
h = hashlib.sha512(fsenc(fn)).digest()
|
||||||
fn = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
fn = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
||||||
|
|
||||||
return "{}/th/{}/{}.{:x}.{}".format(
|
if fmt == "opus":
|
||||||
histpath, rd, fn, int(mtime), "webp" if fmt == "w" else "jpg"
|
cat = "ac"
|
||||||
)
|
else:
|
||||||
|
fmt = "webp" if fmt == "w" else "jpg"
|
||||||
|
cat = "th"
|
||||||
|
|
||||||
|
return "{}/{}/{}/{}.{:x}.{}".format(histpath, cat, rd, fn, int(mtime), fmt)
|
||||||
|
|
||||||
|
|
||||||
class ThumbSrv(object):
|
class ThumbSrv(object):
|
||||||
@@ -105,9 +113,7 @@ class ThumbSrv(object):
|
|||||||
self.mutex = threading.Lock()
|
self.mutex = threading.Lock()
|
||||||
self.busy = {}
|
self.busy = {}
|
||||||
self.stopping = False
|
self.stopping = False
|
||||||
self.nthr = self.args.th_mt
|
self.nthr = max(1, self.args.th_mt)
|
||||||
if not self.nthr:
|
|
||||||
self.nthr = os.cpu_count() if hasattr(os, "cpu_count") else 4
|
|
||||||
|
|
||||||
self.q = Queue(self.nthr * 4)
|
self.q = Queue(self.nthr * 4)
|
||||||
for n in range(self.nthr):
|
for n in range(self.nthr):
|
||||||
@@ -117,7 +123,8 @@ class ThumbSrv(object):
|
|||||||
t.daemon = True
|
t.daemon = True
|
||||||
t.start()
|
t.start()
|
||||||
|
|
||||||
if not self.args.no_vthumb and (not HAVE_FFMPEG or not HAVE_FFPROBE):
|
want_ff = not self.args.no_vthumb or not self.args.no_athumb
|
||||||
|
if want_ff and (not HAVE_FFMPEG or not HAVE_FFPROBE):
|
||||||
missing = []
|
missing = []
|
||||||
if not HAVE_FFMPEG:
|
if not HAVE_FFMPEG:
|
||||||
missing.append("FFmpeg")
|
missing.append("FFmpeg")
|
||||||
@@ -125,12 +132,12 @@ class ThumbSrv(object):
|
|||||||
if not HAVE_FFPROBE:
|
if not HAVE_FFPROBE:
|
||||||
missing.append("FFprobe")
|
missing.append("FFprobe")
|
||||||
|
|
||||||
msg = "cannot create video thumbnails because some of the required programs are not available: "
|
msg = "cannot create audio/video thumbnails because some of the required programs are not available: "
|
||||||
msg += ", ".join(missing)
|
msg += ", ".join(missing)
|
||||||
self.log(msg, c=3)
|
self.log(msg, c=3)
|
||||||
|
|
||||||
if self.args.th_clean:
|
if self.args.th_clean:
|
||||||
t = threading.Thread(target=self.cleaner, name="thumb-cleaner")
|
t = threading.Thread(target=self.cleaner, name="thumb.cln")
|
||||||
t.daemon = True
|
t.daemon = True
|
||||||
t.start()
|
t.start()
|
||||||
|
|
||||||
@@ -147,7 +154,11 @@ class ThumbSrv(object):
|
|||||||
return not self.nthr
|
return not self.nthr
|
||||||
|
|
||||||
def get(self, ptop, rem, mtime, fmt):
|
def get(self, ptop, rem, mtime, fmt):
|
||||||
histpath = self.asrv.vfs.histtab[ptop]
|
histpath = self.asrv.vfs.histtab.get(ptop)
|
||||||
|
if not histpath:
|
||||||
|
self.log("no histpath for [{}]".format(ptop))
|
||||||
|
return None
|
||||||
|
|
||||||
tpath = thumb_path(histpath, rem, mtime, fmt)
|
tpath = thumb_path(histpath, rem, mtime, fmt)
|
||||||
abspath = os.path.join(ptop, rem)
|
abspath = os.path.join(ptop, rem)
|
||||||
cond = threading.Condition(self.mutex)
|
cond = threading.Condition(self.mutex)
|
||||||
@@ -183,6 +194,7 @@ class ThumbSrv(object):
|
|||||||
try:
|
try:
|
||||||
st = bos.stat(tpath)
|
st = bos.stat(tpath)
|
||||||
if st.st_size:
|
if st.st_size:
|
||||||
|
self.poke(tpath)
|
||||||
return tpath
|
return tpath
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
@@ -201,8 +213,13 @@ class ThumbSrv(object):
|
|||||||
if not bos.path.exists(tpath):
|
if not bos.path.exists(tpath):
|
||||||
if ext in FMT_PIL:
|
if ext in FMT_PIL:
|
||||||
fun = self.conv_pil
|
fun = self.conv_pil
|
||||||
elif ext in FMT_FF:
|
elif ext in FMT_FFV:
|
||||||
fun = self.conv_ffmpeg
|
fun = self.conv_ffmpeg
|
||||||
|
elif ext in FMT_FFA:
|
||||||
|
if tpath.endswith(".opus"):
|
||||||
|
fun = self.conv_opus
|
||||||
|
else:
|
||||||
|
fun = self.conv_spec
|
||||||
|
|
||||||
if fun:
|
if fun:
|
||||||
try:
|
try:
|
||||||
@@ -328,8 +345,10 @@ class ThumbSrv(object):
|
|||||||
]
|
]
|
||||||
|
|
||||||
cmd += [fsenc(tpath)]
|
cmd += [fsenc(tpath)]
|
||||||
# self.log((b" ".join(cmd)).decode("utf-8"))
|
self._run_ff(cmd)
|
||||||
|
|
||||||
|
def _run_ff(self, cmd):
|
||||||
|
# self.log((b" ".join(cmd)).decode("utf-8"))
|
||||||
ret, sout, serr = runcmd(cmd)
|
ret, sout, serr = runcmd(cmd)
|
||||||
if ret != 0:
|
if ret != 0:
|
||||||
m = "FFmpeg failed (probably a corrupt video file):\n"
|
m = "FFmpeg failed (probably a corrupt video file):\n"
|
||||||
@@ -337,16 +356,81 @@ class ThumbSrv(object):
|
|||||||
self.log(m, c="1;30")
|
self.log(m, c="1;30")
|
||||||
raise sp.CalledProcessError(ret, (cmd[0], b"...", cmd[-1]))
|
raise sp.CalledProcessError(ret, (cmd[0], b"...", cmd[-1]))
|
||||||
|
|
||||||
|
def conv_spec(self, abspath, tpath):
|
||||||
|
ret, _ = ffprobe(abspath)
|
||||||
|
if "ac" not in ret:
|
||||||
|
raise Exception("not audio")
|
||||||
|
|
||||||
|
fc = "[0:a:0]aresample=48000{},showspectrumpic=s=640x512,crop=780:544:70:50[o]"
|
||||||
|
|
||||||
|
if self.args.th_ff_swr:
|
||||||
|
fco = ":filter_size=128:cutoff=0.877"
|
||||||
|
else:
|
||||||
|
fco = ":resampler=soxr"
|
||||||
|
|
||||||
|
fc = fc.format(fco)
|
||||||
|
|
||||||
|
# fmt: off
|
||||||
|
cmd = [
|
||||||
|
b"ffmpeg",
|
||||||
|
b"-nostdin",
|
||||||
|
b"-v", b"error",
|
||||||
|
b"-hide_banner",
|
||||||
|
b"-i", fsenc(abspath),
|
||||||
|
b"-filter_complex", fc.encode("utf-8"),
|
||||||
|
b"-map", b"[o]"
|
||||||
|
]
|
||||||
|
# fmt: on
|
||||||
|
|
||||||
|
if tpath.endswith(".jpg"):
|
||||||
|
cmd += [
|
||||||
|
b"-q:v",
|
||||||
|
b"6", # default=??
|
||||||
|
]
|
||||||
|
else:
|
||||||
|
cmd += [
|
||||||
|
b"-q:v",
|
||||||
|
b"50", # default=75
|
||||||
|
b"-compression_level:v",
|
||||||
|
b"6", # default=4, 0=fast, 6=max
|
||||||
|
]
|
||||||
|
|
||||||
|
cmd += [fsenc(tpath)]
|
||||||
|
self._run_ff(cmd)
|
||||||
|
|
||||||
|
def conv_opus(self, abspath, tpath):
|
||||||
|
if self.args.no_acode:
|
||||||
|
raise Exception("disabled in server config")
|
||||||
|
|
||||||
|
ret, _ = ffprobe(abspath)
|
||||||
|
if "ac" not in ret:
|
||||||
|
raise Exception("not audio")
|
||||||
|
|
||||||
|
# fmt: off
|
||||||
|
cmd = [
|
||||||
|
b"ffmpeg",
|
||||||
|
b"-nostdin",
|
||||||
|
b"-v", b"error",
|
||||||
|
b"-hide_banner",
|
||||||
|
b"-i", fsenc(abspath),
|
||||||
|
b"-map", b"0:a:0",
|
||||||
|
b"-c:a", b"libopus",
|
||||||
|
b"-b:a", b"128k",
|
||||||
|
fsenc(tpath)
|
||||||
|
]
|
||||||
|
# fmt: on
|
||||||
|
|
||||||
|
self._run_ff(cmd)
|
||||||
|
|
||||||
def poke(self, tdir):
|
def poke(self, tdir):
|
||||||
if not self.poke_cd.poke(tdir):
|
if not self.poke_cd.poke(tdir):
|
||||||
return
|
return
|
||||||
|
|
||||||
ts = int(time.time())
|
ts = int(time.time())
|
||||||
try:
|
try:
|
||||||
p1 = os.path.dirname(tdir)
|
for _ in range(4):
|
||||||
p2 = os.path.dirname(p1)
|
bos.utime(tdir, (ts, ts))
|
||||||
for dp in [tdir, p1, p2]:
|
tdir = os.path.dirname(tdir)
|
||||||
bos.utime(dp, (ts, ts))
|
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -366,25 +450,36 @@ class ThumbSrv(object):
|
|||||||
self.log("\033[Jcln ok; rm {} dirs".format(ndirs))
|
self.log("\033[Jcln ok; rm {} dirs".format(ndirs))
|
||||||
|
|
||||||
def clean(self, histpath):
|
def clean(self, histpath):
|
||||||
thumbpath = os.path.join(histpath, "th")
|
ret = 0
|
||||||
|
for cat in ["th", "ac"]:
|
||||||
|
ret += self._clean(histpath, cat, None)
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def _clean(self, histpath, cat, thumbpath):
|
||||||
|
if not thumbpath:
|
||||||
|
thumbpath = os.path.join(histpath, cat)
|
||||||
|
|
||||||
# self.log("cln {}".format(thumbpath))
|
# self.log("cln {}".format(thumbpath))
|
||||||
maxage = self.args.th_maxage
|
exts = ["jpg", "webp"] if cat == "th" else ["opus"]
|
||||||
|
maxage = getattr(self.args, cat + "_maxage")
|
||||||
now = time.time()
|
now = time.time()
|
||||||
prev_b64 = None
|
prev_b64 = None
|
||||||
prev_fp = None
|
prev_fp = None
|
||||||
try:
|
try:
|
||||||
ents = bos.listdir(thumbpath)
|
ents = statdir(self.log, not self.args.no_scandir, False, thumbpath)
|
||||||
|
ents = sorted(list(ents))
|
||||||
except:
|
except:
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
ndirs = 0
|
ndirs = 0
|
||||||
for f in sorted(ents):
|
for f, inf in ents:
|
||||||
fp = os.path.join(thumbpath, f)
|
fp = os.path.join(thumbpath, f)
|
||||||
cmp = fp.lower().replace("\\", "/")
|
cmp = fp.lower().replace("\\", "/")
|
||||||
|
|
||||||
# "top" or b64 prefix/full (a folder)
|
# "top" or b64 prefix/full (a folder)
|
||||||
if len(f) <= 3 or len(f) == 24:
|
if len(f) <= 3 or len(f) == 24:
|
||||||
age = now - bos.path.getmtime(fp)
|
age = now - inf.st_mtime
|
||||||
if age > maxage:
|
if age > maxage:
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
safe = True
|
safe = True
|
||||||
@@ -398,16 +493,15 @@ class ThumbSrv(object):
|
|||||||
self.log("rm -rf [{}]".format(fp))
|
self.log("rm -rf [{}]".format(fp))
|
||||||
shutil.rmtree(fp, ignore_errors=True)
|
shutil.rmtree(fp, ignore_errors=True)
|
||||||
else:
|
else:
|
||||||
ndirs += self.clean(fp)
|
self._clean(histpath, cat, fp)
|
||||||
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# thumb file
|
# thumb file
|
||||||
try:
|
try:
|
||||||
b64, ts, ext = f.split(".")
|
b64, ts, ext = f.split(".")
|
||||||
if len(b64) != 24 or len(ts) != 8 or ext not in ["jpg", "webp"]:
|
if len(b64) != 24 or len(ts) != 8 or ext not in exts:
|
||||||
raise Exception()
|
raise Exception()
|
||||||
|
|
||||||
ts = int(ts, 16)
|
|
||||||
except:
|
except:
|
||||||
if f != "dir.txt":
|
if f != "dir.txt":
|
||||||
self.log("foreign file in thumbs dir: [{}]".format(fp), 1)
|
self.log("foreign file in thumbs dir: [{}]".format(fp), 1)
|
||||||
@@ -418,6 +512,10 @@ class ThumbSrv(object):
|
|||||||
self.log("rm replaced [{}]".format(fp))
|
self.log("rm replaced [{}]".format(fp))
|
||||||
bos.unlink(prev_fp)
|
bos.unlink(prev_fp)
|
||||||
|
|
||||||
|
if cat != "th" and inf.st_mtime + maxage < now:
|
||||||
|
self.log("rm expired [{}]".format(fp))
|
||||||
|
bos.unlink(fp)
|
||||||
|
|
||||||
prev_b64 = b64
|
prev_b64 = b64
|
||||||
prev_fp = fp
|
prev_fp = fp
|
||||||
|
|
||||||
|
|||||||
@@ -6,9 +6,10 @@ import os
|
|||||||
import time
|
import time
|
||||||
import threading
|
import threading
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
from operator import itemgetter
|
||||||
|
|
||||||
from .__init__ import unicode
|
from .__init__ import ANYWIN, unicode
|
||||||
from .util import s3dec, Pebkac, min_ex
|
from .util import absreal, s3dec, Pebkac, min_ex, gen_filekey, quotep
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .up2k import up2k_wark_from_hashlist
|
from .up2k import up2k_wark_from_hashlist
|
||||||
|
|
||||||
@@ -66,7 +67,11 @@ class U2idx(object):
|
|||||||
if cur:
|
if cur:
|
||||||
return cur
|
return cur
|
||||||
|
|
||||||
histpath = self.asrv.vfs.histtab[ptop]
|
histpath = self.asrv.vfs.histtab.get(ptop)
|
||||||
|
if not histpath:
|
||||||
|
self.log("no histpath for [{}]".format(ptop))
|
||||||
|
return None
|
||||||
|
|
||||||
db_path = os.path.join(histpath, "up2k.db")
|
db_path = os.path.join(histpath, "up2k.db")
|
||||||
if not bos.path.exists(db_path):
|
if not bos.path.exists(db_path):
|
||||||
return None
|
return None
|
||||||
@@ -242,6 +247,7 @@ class U2idx(object):
|
|||||||
self.active_cur = cur
|
self.active_cur = cur
|
||||||
|
|
||||||
sret = []
|
sret = []
|
||||||
|
fk = flags.get("fk")
|
||||||
c = cur.execute(q, v)
|
c = cur.execute(q, v)
|
||||||
for hit in c:
|
for hit in c:
|
||||||
w, ts, sz, rd, fn, ip, at = hit
|
w, ts, sz, rd, fn, ip, at = hit
|
||||||
@@ -252,7 +258,23 @@ class U2idx(object):
|
|||||||
if rd.startswith("//") or fn.startswith("//"):
|
if rd.startswith("//") or fn.startswith("//"):
|
||||||
rd, fn = s3dec(rd, fn)
|
rd, fn = s3dec(rd, fn)
|
||||||
|
|
||||||
rp = "/".join([x for x in [vtop, rd, fn] if x])
|
if not fk:
|
||||||
|
suf = ""
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
ap = absreal(os.path.join(ptop, rd, fn))
|
||||||
|
inf = bos.stat(ap)
|
||||||
|
except:
|
||||||
|
continue
|
||||||
|
|
||||||
|
suf = (
|
||||||
|
"?k="
|
||||||
|
+ gen_filekey(
|
||||||
|
self.args.fk_salt, ap, sz, 0 if ANYWIN else inf.st_ino
|
||||||
|
)[:fk]
|
||||||
|
)
|
||||||
|
|
||||||
|
rp = quotep("/".join([x for x in [vtop, rd, fn] if x])) + suf
|
||||||
sret.append({"ts": int(ts), "sz": sz, "rp": rp, "w": w[:16]})
|
sret.append({"ts": int(ts), "sz": sz, "rp": rp, "w": w[:16]})
|
||||||
|
|
||||||
for hit in sret:
|
for hit in sret:
|
||||||
@@ -275,9 +297,13 @@ class U2idx(object):
|
|||||||
# undupe hits from multiple metadata keys
|
# undupe hits from multiple metadata keys
|
||||||
if len(ret) > 1:
|
if len(ret) > 1:
|
||||||
ret = [ret[0]] + [
|
ret = [ret[0]] + [
|
||||||
y for x, y in zip(ret[:-1], ret[1:]) if x["rp"] != y["rp"]
|
y
|
||||||
|
for x, y in zip(ret[:-1], ret[1:])
|
||||||
|
if x["rp"].split("?")[0] != y["rp"].split("?")[0]
|
||||||
]
|
]
|
||||||
|
|
||||||
|
ret.sort(key=itemgetter("rp"))
|
||||||
|
|
||||||
return ret, list(taglist.keys())
|
return ret, list(taglist.keys())
|
||||||
|
|
||||||
def terminator(self, identifier, done_flag):
|
def terminator(self, identifier, done_flag):
|
||||||
|
|||||||
@@ -27,7 +27,10 @@ from .util import (
|
|||||||
sanitize_fn,
|
sanitize_fn,
|
||||||
ren_open,
|
ren_open,
|
||||||
atomic_move,
|
atomic_move,
|
||||||
|
quotep,
|
||||||
vsplit,
|
vsplit,
|
||||||
|
w8b64enc,
|
||||||
|
w8b64dec,
|
||||||
s3enc,
|
s3enc,
|
||||||
s3dec,
|
s3dec,
|
||||||
rmdirs,
|
rmdirs,
|
||||||
@@ -60,12 +63,15 @@ class Up2k(object):
|
|||||||
|
|
||||||
# state
|
# state
|
||||||
self.mutex = threading.Lock()
|
self.mutex = threading.Lock()
|
||||||
|
self.rescan_cond = threading.Condition()
|
||||||
self.hashq = Queue()
|
self.hashq = Queue()
|
||||||
self.tagq = Queue()
|
self.tagq = Queue()
|
||||||
self.n_hashq = 0
|
self.n_hashq = 0
|
||||||
self.n_tagq = 0
|
self.n_tagq = 0
|
||||||
|
self.gid = 0
|
||||||
self.volstate = {}
|
self.volstate = {}
|
||||||
self.need_rescan = {}
|
self.need_rescan = {}
|
||||||
|
self.dupesched = {}
|
||||||
self.registry = {}
|
self.registry = {}
|
||||||
self.entags = {}
|
self.entags = {}
|
||||||
self.flags = {}
|
self.flags = {}
|
||||||
@@ -109,6 +115,12 @@ class Up2k(object):
|
|||||||
t.daemon = True
|
t.daemon = True
|
||||||
t.start()
|
t.start()
|
||||||
|
|
||||||
|
def reload(self):
|
||||||
|
self.gid += 1
|
||||||
|
self.log("reload #{} initiated".format(self.gid))
|
||||||
|
all_vols = self.asrv.vfs.all_vols
|
||||||
|
self.rescan(all_vols, list(all_vols.keys()), True)
|
||||||
|
|
||||||
def deferred_init(self):
|
def deferred_init(self):
|
||||||
all_vols = self.asrv.vfs.all_vols
|
all_vols = self.asrv.vfs.all_vols
|
||||||
have_e2d = self.init_indexes(all_vols)
|
have_e2d = self.init_indexes(all_vols)
|
||||||
@@ -127,9 +139,11 @@ class Up2k(object):
|
|||||||
thr.start()
|
thr.start()
|
||||||
|
|
||||||
if self.mtag:
|
if self.mtag:
|
||||||
thr = threading.Thread(target=self._tagger, name="up2k-tagger")
|
for n in range(max(1, self.args.mtag_mt)):
|
||||||
thr.daemon = True
|
name = "tagger-{}".format(n)
|
||||||
thr.start()
|
thr = threading.Thread(target=self._tagger, name=name)
|
||||||
|
thr.daemon = True
|
||||||
|
thr.start()
|
||||||
|
|
||||||
thr = threading.Thread(target=self._run_all_mtp, name="up2k-mtp-init")
|
thr = threading.Thread(target=self._run_all_mtp, name="up2k-mtp-init")
|
||||||
thr.daemon = True
|
thr.daemon = True
|
||||||
@@ -161,15 +175,15 @@ class Up2k(object):
|
|||||||
}
|
}
|
||||||
return json.dumps(ret, indent=4)
|
return json.dumps(ret, indent=4)
|
||||||
|
|
||||||
def rescan(self, all_vols, scan_vols):
|
def rescan(self, all_vols, scan_vols, wait):
|
||||||
if hasattr(self, "pp"):
|
if not wait and hasattr(self, "pp"):
|
||||||
return "cannot initiate; scan is already in progress"
|
return "cannot initiate; scan is already in progress"
|
||||||
|
|
||||||
args = (all_vols, scan_vols)
|
args = (all_vols, scan_vols)
|
||||||
t = threading.Thread(
|
t = threading.Thread(
|
||||||
target=self.init_indexes,
|
target=self.init_indexes,
|
||||||
args=args,
|
args=args,
|
||||||
name="up2k-rescan-{}".format(scan_vols[0]),
|
name="up2k-rescan-{}".format(scan_vols[0] if scan_vols else "all"),
|
||||||
)
|
)
|
||||||
t.daemon = True
|
t.daemon = True
|
||||||
t.start()
|
t.start()
|
||||||
@@ -177,9 +191,23 @@ class Up2k(object):
|
|||||||
|
|
||||||
def _sched_rescan(self):
|
def _sched_rescan(self):
|
||||||
volage = {}
|
volage = {}
|
||||||
|
cooldown = 0
|
||||||
|
timeout = time.time() + 3
|
||||||
while True:
|
while True:
|
||||||
time.sleep(self.args.re_int)
|
timeout = max(timeout, cooldown)
|
||||||
|
wait = max(0.1, timeout + 0.1 - time.time())
|
||||||
|
with self.rescan_cond:
|
||||||
|
self.rescan_cond.wait(wait)
|
||||||
|
|
||||||
now = time.time()
|
now = time.time()
|
||||||
|
if now < cooldown:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if hasattr(self, "pp"):
|
||||||
|
cooldown = now + 5
|
||||||
|
continue
|
||||||
|
|
||||||
|
timeout = now + 9001
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
for vp, vol in sorted(self.asrv.vfs.all_vols.items()):
|
for vp, vol in sorted(self.asrv.vfs.all_vols.items()):
|
||||||
maxage = vol.flags.get("scan")
|
maxage = vol.flags.get("scan")
|
||||||
@@ -189,14 +217,18 @@ class Up2k(object):
|
|||||||
if vp not in volage:
|
if vp not in volage:
|
||||||
volage[vp] = now
|
volage[vp] = now
|
||||||
|
|
||||||
if now - volage[vp] >= maxage:
|
deadline = volage[vp] + maxage
|
||||||
|
if deadline <= now:
|
||||||
self.need_rescan[vp] = 1
|
self.need_rescan[vp] = 1
|
||||||
|
|
||||||
|
timeout = min(timeout, deadline)
|
||||||
|
|
||||||
vols = list(sorted(self.need_rescan.keys()))
|
vols = list(sorted(self.need_rescan.keys()))
|
||||||
self.need_rescan = {}
|
self.need_rescan = {}
|
||||||
|
|
||||||
if vols:
|
if vols:
|
||||||
err = self.rescan(self.asrv.vfs.all_vols, vols)
|
cooldown = now + 10
|
||||||
|
err = self.rescan(self.asrv.vfs.all_vols, vols, False)
|
||||||
if err:
|
if err:
|
||||||
for v in vols:
|
for v in vols:
|
||||||
self.need_rescan[v] = True
|
self.need_rescan[v] = True
|
||||||
@@ -218,8 +250,11 @@ class Up2k(object):
|
|||||||
if not cur:
|
if not cur:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
lifetime = int(lifetime)
|
||||||
|
timeout = min(timeout, now + lifetime)
|
||||||
|
|
||||||
nrm = 0
|
nrm = 0
|
||||||
deadline = time.time() - int(lifetime)
|
deadline = time.time() - lifetime
|
||||||
q = "select rd, fn from up where at > 0 and at < ? limit 100"
|
q = "select rd, fn from up where at > 0 and at < ? limit 100"
|
||||||
while True:
|
while True:
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
@@ -242,6 +277,16 @@ class Up2k(object):
|
|||||||
if nrm:
|
if nrm:
|
||||||
self.log("{} files graduated in {}".format(nrm, vp))
|
self.log("{} files graduated in {}".format(nrm, vp))
|
||||||
|
|
||||||
|
if timeout < 10:
|
||||||
|
continue
|
||||||
|
|
||||||
|
q = "select at from up where at > 0 order by at limit 1"
|
||||||
|
with self.mutex:
|
||||||
|
hits = cur.execute(q).fetchone()
|
||||||
|
|
||||||
|
if hits:
|
||||||
|
timeout = min(timeout, now + lifetime - (now - hits[0]))
|
||||||
|
|
||||||
def _vis_job_progress(self, job):
|
def _vis_job_progress(self, job):
|
||||||
perc = 100 - (len(job["need"]) * 100.0 / len(job["hash"]))
|
perc = 100 - (len(job["need"]) * 100.0 / len(job["hash"]))
|
||||||
path = os.path.join(job["ptop"], job["prel"], job["name"])
|
path = os.path.join(job["ptop"], job["prel"], job["name"])
|
||||||
@@ -265,6 +310,16 @@ class Up2k(object):
|
|||||||
return True, ret
|
return True, ret
|
||||||
|
|
||||||
def init_indexes(self, all_vols, scan_vols=None):
|
def init_indexes(self, all_vols, scan_vols=None):
|
||||||
|
gid = self.gid
|
||||||
|
while hasattr(self, "pp") and gid == self.gid:
|
||||||
|
time.sleep(0.1)
|
||||||
|
|
||||||
|
if gid != self.gid:
|
||||||
|
return
|
||||||
|
|
||||||
|
if gid:
|
||||||
|
self.log("reload #{} running".format(self.gid))
|
||||||
|
|
||||||
self.pp = ProgressPrinter()
|
self.pp = ProgressPrinter()
|
||||||
vols = all_vols.values()
|
vols = all_vols.values()
|
||||||
t0 = time.time()
|
t0 = time.time()
|
||||||
@@ -395,7 +450,11 @@ class Up2k(object):
|
|||||||
return have_e2d
|
return have_e2d
|
||||||
|
|
||||||
def register_vpath(self, ptop, flags):
|
def register_vpath(self, ptop, flags):
|
||||||
histpath = self.asrv.vfs.histtab[ptop]
|
histpath = self.asrv.vfs.histtab.get(ptop)
|
||||||
|
if not histpath:
|
||||||
|
self.log("no histpath for [{}]".format(ptop))
|
||||||
|
return None
|
||||||
|
|
||||||
db_path = os.path.join(histpath, "up2k.db")
|
db_path = os.path.join(histpath, "up2k.db")
|
||||||
if ptop in self.registry:
|
if ptop in self.registry:
|
||||||
try:
|
try:
|
||||||
@@ -462,7 +521,8 @@ class Up2k(object):
|
|||||||
def _build_file_index(self, vol, all_vols):
|
def _build_file_index(self, vol, all_vols):
|
||||||
do_vac = False
|
do_vac = False
|
||||||
top = vol.realpath
|
top = vol.realpath
|
||||||
nohash = "dhash" in vol.flags
|
rei = vol.flags.get("noidx")
|
||||||
|
reh = vol.flags.get("nohash")
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
cur, _ = self.register_vpath(top, vol.flags)
|
cur, _ = self.register_vpath(top, vol.flags)
|
||||||
|
|
||||||
@@ -477,38 +537,55 @@ class Up2k(object):
|
|||||||
if WINDOWS:
|
if WINDOWS:
|
||||||
excl = [x.replace("/", "\\") for x in excl]
|
excl = [x.replace("/", "\\") for x in excl]
|
||||||
|
|
||||||
n_add = self._build_dir(dbw, top, set(excl), top, nohash, [])
|
n_add = n_rm = 0
|
||||||
n_rm = self._drop_lost(dbw[0], top)
|
try:
|
||||||
|
n_add = self._build_dir(dbw, top, set(excl), top, rei, reh, [])
|
||||||
|
n_rm = self._drop_lost(dbw[0], top)
|
||||||
|
except:
|
||||||
|
m = "failed to index volume [{}]:\n{}"
|
||||||
|
self.log(m.format(top, min_ex()), c=1)
|
||||||
|
|
||||||
if dbw[1]:
|
if dbw[1]:
|
||||||
self.log("commit {} new files".format(dbw[1]))
|
self.log("commit {} new files".format(dbw[1]))
|
||||||
dbw[0].connection.commit()
|
|
||||||
|
dbw[0].connection.commit()
|
||||||
|
|
||||||
return True, n_add or n_rm or do_vac
|
return True, n_add or n_rm or do_vac
|
||||||
|
|
||||||
def _build_dir(self, dbw, top, excl, cdir, nohash, seen):
|
def _build_dir(self, dbw, top, excl, cdir, rei, reh, seen):
|
||||||
rcdir = absreal(cdir) # a bit expensive but worth
|
rcdir = absreal(cdir) # a bit expensive but worth
|
||||||
if rcdir in seen:
|
if rcdir in seen:
|
||||||
m = "bailing from symlink loop,\n prev: {}\n curr: {}\n from: {}"
|
m = "bailing from symlink loop,\n prev: {}\n curr: {}\n from: {}"
|
||||||
self.log(m.format(seen[-1], rcdir, cdir), 3)
|
self.log(m.format(seen[-1], rcdir, cdir), 3)
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
seen = seen + [cdir]
|
seen = seen + [rcdir]
|
||||||
self.pp.msg = "a{} {}".format(self.pp.n, cdir)
|
self.pp.msg = "a{} {}".format(self.pp.n, cdir)
|
||||||
histpath = self.asrv.vfs.histtab[top]
|
histpath = self.asrv.vfs.histtab[top]
|
||||||
ret = 0
|
ret = 0
|
||||||
|
seen_files = {}
|
||||||
g = statdir(self.log_func, not self.args.no_scandir, False, cdir)
|
g = statdir(self.log_func, not self.args.no_scandir, False, cdir)
|
||||||
for iname, inf in sorted(g):
|
for iname, inf in sorted(g):
|
||||||
abspath = os.path.join(cdir, iname)
|
abspath = os.path.join(cdir, iname)
|
||||||
|
if rei and rei.search(abspath):
|
||||||
|
continue
|
||||||
|
|
||||||
|
nohash = reh.search(abspath) if reh else False
|
||||||
lmod = int(inf.st_mtime)
|
lmod = int(inf.st_mtime)
|
||||||
sz = inf.st_size
|
sz = inf.st_size
|
||||||
if stat.S_ISDIR(inf.st_mode):
|
if stat.S_ISDIR(inf.st_mode):
|
||||||
if abspath in excl or abspath == histpath:
|
if abspath in excl or abspath == histpath:
|
||||||
continue
|
continue
|
||||||
# self.log(" dir: {}".format(abspath))
|
# self.log(" dir: {}".format(abspath))
|
||||||
ret += self._build_dir(dbw, top, excl, abspath, nohash, seen)
|
try:
|
||||||
|
ret += self._build_dir(dbw, top, excl, abspath, rei, reh, seen)
|
||||||
|
except:
|
||||||
|
m = "failed to index subdir [{}]:\n{}"
|
||||||
|
self.log(m.format(abspath, min_ex()), c=1)
|
||||||
else:
|
else:
|
||||||
# self.log("file: {}".format(abspath))
|
# self.log("file: {}".format(abspath))
|
||||||
rp = abspath[len(top) + 1 :]
|
seen_files[iname] = 1
|
||||||
|
rp = abspath[len(top) :].lstrip("/")
|
||||||
if WINDOWS:
|
if WINDOWS:
|
||||||
rp = rp.replace("\\", "/").strip("/")
|
rp = rp.replace("\\", "/").strip("/")
|
||||||
|
|
||||||
@@ -566,34 +643,65 @@ class Up2k(object):
|
|||||||
dbw[0].connection.commit()
|
dbw[0].connection.commit()
|
||||||
dbw[1] = 0
|
dbw[1] = 0
|
||||||
dbw[2] = time.time()
|
dbw[2] = time.time()
|
||||||
|
|
||||||
|
# drop missing files
|
||||||
|
rd = cdir[len(top) + 1 :].strip("/")
|
||||||
|
if WINDOWS:
|
||||||
|
rd = rd.replace("\\", "/").strip("/")
|
||||||
|
|
||||||
|
q = "select fn from up where rd = ?"
|
||||||
|
try:
|
||||||
|
c = dbw[0].execute(q, (rd,))
|
||||||
|
except:
|
||||||
|
c = dbw[0].execute(q, ("//" + w8b64enc(rd),))
|
||||||
|
|
||||||
|
hits = [w8b64dec(x[2:]) if x.startswith("//") else x for (x,) in c]
|
||||||
|
rm_files = [x for x in hits if x not in seen_files]
|
||||||
|
n_rm = len(rm_files)
|
||||||
|
for fn in rm_files:
|
||||||
|
self.db_rm(dbw[0], rd, fn)
|
||||||
|
|
||||||
|
if n_rm:
|
||||||
|
self.log("forgot {} deleted files".format(n_rm))
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
def _drop_lost(self, cur, top):
|
def _drop_lost(self, cur, top):
|
||||||
rm = []
|
rm = []
|
||||||
|
n_rm = 0
|
||||||
nchecked = 0
|
nchecked = 0
|
||||||
nfiles = next(cur.execute("select count(w) from up"))[0]
|
# `_build_dir` did all the files, now do dirs
|
||||||
c = cur.execute("select rd, fn from up")
|
ndirs = next(cur.execute("select count(distinct rd) from up"))[0]
|
||||||
for drd, dfn in c:
|
c = cur.execute("select distinct rd from up order by rd desc")
|
||||||
|
for (drd,) in c:
|
||||||
nchecked += 1
|
nchecked += 1
|
||||||
if drd.startswith("//") or dfn.startswith("//"):
|
if drd.startswith("//"):
|
||||||
drd, dfn = s3dec(drd, dfn)
|
rd = w8b64dec(drd[2:])
|
||||||
|
else:
|
||||||
|
rd = drd
|
||||||
|
|
||||||
abspath = os.path.join(top, drd, dfn)
|
abspath = os.path.join(top, rd)
|
||||||
# almost zero overhead dw
|
self.pp.msg = "b{} {}".format(ndirs - nchecked, abspath)
|
||||||
self.pp.msg = "b{} {}".format(nfiles - nchecked, abspath)
|
|
||||||
try:
|
try:
|
||||||
if not bos.path.exists(abspath):
|
if os.path.isdir(abspath):
|
||||||
rm.append([drd, dfn])
|
continue
|
||||||
except Exception as ex:
|
except:
|
||||||
self.log("stat-rm: {} @ [{}]".format(repr(ex), abspath))
|
pass
|
||||||
|
|
||||||
if rm:
|
rm.append(drd)
|
||||||
self.log("forgetting {} deleted files".format(len(rm)))
|
|
||||||
for rd, fn in rm:
|
|
||||||
# self.log("{} / {}".format(rd, fn))
|
|
||||||
self.db_rm(cur, rd, fn)
|
|
||||||
|
|
||||||
return len(rm)
|
if not rm:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
q = "select count(w) from up where rd = ?"
|
||||||
|
for rd in rm:
|
||||||
|
n_rm += next(cur.execute(q, (rd,)))[0]
|
||||||
|
|
||||||
|
self.log("forgetting {} deleted dirs, {} files".format(len(rm), n_rm))
|
||||||
|
for rd in rm:
|
||||||
|
cur.execute("delete from up where rd = ?", (rd,))
|
||||||
|
|
||||||
|
return n_rm
|
||||||
|
|
||||||
def _build_tags_index(self, vol):
|
def _build_tags_index(self, vol):
|
||||||
ptop = vol.realpath
|
ptop = vol.realpath
|
||||||
@@ -647,7 +755,7 @@ class Up2k(object):
|
|||||||
return n_add, n_rm, False
|
return n_add, n_rm, False
|
||||||
|
|
||||||
mpool = False
|
mpool = False
|
||||||
if self.mtag.prefer_mt and not self.args.no_mtag_mt:
|
if self.mtag.prefer_mt and self.args.mtag_mt > 1:
|
||||||
mpool = self._start_mpool()
|
mpool = self._start_mpool()
|
||||||
|
|
||||||
conn = sqlite3.connect(db_path, timeout=15)
|
conn = sqlite3.connect(db_path, timeout=15)
|
||||||
@@ -714,10 +822,11 @@ class Up2k(object):
|
|||||||
return ret
|
return ret
|
||||||
|
|
||||||
def _run_all_mtp(self):
|
def _run_all_mtp(self):
|
||||||
|
gid = self.gid
|
||||||
t0 = time.time()
|
t0 = time.time()
|
||||||
for ptop, flags in self.flags.items():
|
for ptop, flags in self.flags.items():
|
||||||
if "mtp" in flags:
|
if "mtp" in flags:
|
||||||
self._run_one_mtp(ptop)
|
self._run_one_mtp(ptop, gid)
|
||||||
|
|
||||||
td = time.time() - t0
|
td = time.time() - t0
|
||||||
msg = "mtp finished in {:.2f} sec ({})"
|
msg = "mtp finished in {:.2f} sec ({})"
|
||||||
@@ -728,7 +837,10 @@ class Up2k(object):
|
|||||||
if "OFFLINE" not in self.volstate[k]:
|
if "OFFLINE" not in self.volstate[k]:
|
||||||
self.volstate[k] = "online, idle"
|
self.volstate[k] = "online, idle"
|
||||||
|
|
||||||
def _run_one_mtp(self, ptop):
|
def _run_one_mtp(self, ptop, gid):
|
||||||
|
if gid != self.gid:
|
||||||
|
return
|
||||||
|
|
||||||
entags = self.entags[ptop]
|
entags = self.entags[ptop]
|
||||||
|
|
||||||
parsers = {}
|
parsers = {}
|
||||||
@@ -761,6 +873,9 @@ class Up2k(object):
|
|||||||
in_progress = {}
|
in_progress = {}
|
||||||
while True:
|
while True:
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
|
if gid != self.gid:
|
||||||
|
break
|
||||||
|
|
||||||
q = "select w from mt where k = 't:mtp' limit ?"
|
q = "select w from mt where k = 't:mtp' limit ?"
|
||||||
warks = cur.execute(q, (batch_sz,)).fetchall()
|
warks = cur.execute(q, (batch_sz,)).fetchall()
|
||||||
warks = [x[0] for x in warks]
|
warks = [x[0] for x in warks]
|
||||||
@@ -838,6 +953,7 @@ class Up2k(object):
|
|||||||
|
|
||||||
cur.connection.commit()
|
cur.connection.commit()
|
||||||
if n_done:
|
if n_done:
|
||||||
|
self.log("mtp: scanned {} files in {}".format(n_done, ptop), c=6)
|
||||||
cur.execute("vacuum")
|
cur.execute("vacuum")
|
||||||
|
|
||||||
wcur.close()
|
wcur.close()
|
||||||
@@ -879,9 +995,7 @@ class Up2k(object):
|
|||||||
def _start_mpool(self):
|
def _start_mpool(self):
|
||||||
# mp.pool.ThreadPool and concurrent.futures.ThreadPoolExecutor
|
# mp.pool.ThreadPool and concurrent.futures.ThreadPoolExecutor
|
||||||
# both do crazy runahead so lets reinvent another wheel
|
# both do crazy runahead so lets reinvent another wheel
|
||||||
nw = os.cpu_count() if hasattr(os, "cpu_count") else 4
|
nw = max(1, self.args.mtag_mt)
|
||||||
if self.args.no_mtag_mt:
|
|
||||||
nw = 1
|
|
||||||
|
|
||||||
if self.pending_tags is None:
|
if self.pending_tags is None:
|
||||||
self.log("using {}x {}".format(nw, self.mtag.backend))
|
self.log("using {}x {}".format(nw, self.mtag.backend))
|
||||||
@@ -939,7 +1053,15 @@ class Up2k(object):
|
|||||||
|
|
||||||
def _tag_file(self, write_cur, entags, wark, abspath, tags=None):
|
def _tag_file(self, write_cur, entags, wark, abspath, tags=None):
|
||||||
if tags is None:
|
if tags is None:
|
||||||
tags = self.mtag.get(abspath)
|
try:
|
||||||
|
tags = self.mtag.get(abspath)
|
||||||
|
except Exception as ex:
|
||||||
|
msg = "failed to read tags from {}:\n{}"
|
||||||
|
self.log(msg.format(abspath, ex), c=3)
|
||||||
|
return 0
|
||||||
|
|
||||||
|
if not bos.path.isfile(abspath):
|
||||||
|
return 0
|
||||||
|
|
||||||
if entags:
|
if entags:
|
||||||
tags = {k: v for k, v in tags.items() if k in entags}
|
tags = {k: v for k, v in tags.items() if k in entags}
|
||||||
@@ -1111,9 +1233,18 @@ class Up2k(object):
|
|||||||
if dp_dir.startswith("//") or dp_fn.startswith("//"):
|
if dp_dir.startswith("//") or dp_fn.startswith("//"):
|
||||||
dp_dir, dp_fn = s3dec(dp_dir, dp_fn)
|
dp_dir, dp_fn = s3dec(dp_dir, dp_fn)
|
||||||
|
|
||||||
|
if job and (dp_dir != cj["prel"] or dp_fn != cj["name"]):
|
||||||
|
continue
|
||||||
|
|
||||||
dp_abs = "/".join([cj["ptop"], dp_dir, dp_fn])
|
dp_abs = "/".join([cj["ptop"], dp_dir, dp_fn])
|
||||||
# relying on path.exists to return false on broken symlinks
|
# relying on this to fail on broken symlinks
|
||||||
if bos.path.exists(dp_abs):
|
try:
|
||||||
|
sz = bos.path.getsize(dp_abs)
|
||||||
|
except:
|
||||||
|
sz = 0
|
||||||
|
|
||||||
|
if sz:
|
||||||
|
# self.log("--- " + wark + " " + dp_abs + " found file", 4)
|
||||||
job = {
|
job = {
|
||||||
"name": dp_fn,
|
"name": dp_fn,
|
||||||
"prel": dp_dir,
|
"prel": dp_dir,
|
||||||
@@ -1126,9 +1257,9 @@ class Up2k(object):
|
|||||||
"hash": [],
|
"hash": [],
|
||||||
"need": [],
|
"need": [],
|
||||||
}
|
}
|
||||||
break
|
|
||||||
|
|
||||||
if job and wark in reg:
|
if job and wark in reg:
|
||||||
|
# self.log("pop " + wark + " " + job["name"] + " handle_json db", 4)
|
||||||
del reg[wark]
|
del reg[wark]
|
||||||
|
|
||||||
if job or wark in reg:
|
if job or wark in reg:
|
||||||
@@ -1156,11 +1287,20 @@ class Up2k(object):
|
|||||||
if job["need"]:
|
if job["need"]:
|
||||||
self.log("unfinished:\n {0}\n {1}".format(src, dst))
|
self.log("unfinished:\n {0}\n {1}".format(src, dst))
|
||||||
err = "partial upload exists at a different location; please resume uploading here instead:\n"
|
err = "partial upload exists at a different location; please resume uploading here instead:\n"
|
||||||
err += "/" + vsrc + " "
|
err += "/" + quotep(vsrc) + " "
|
||||||
|
|
||||||
|
dupe = [cj["prel"], cj["name"]]
|
||||||
|
try:
|
||||||
|
self.dupesched[src].append(dupe)
|
||||||
|
except:
|
||||||
|
self.dupesched[src] = [dupe]
|
||||||
|
|
||||||
raise Pebkac(400, err)
|
raise Pebkac(400, err)
|
||||||
|
|
||||||
elif "nodupe" in self.flags[job["ptop"]]:
|
elif "nodupe" in self.flags[job["ptop"]]:
|
||||||
self.log("dupe-reject:\n {0}\n {1}".format(src, dst))
|
self.log("dupe-reject:\n {0}\n {1}".format(src, dst))
|
||||||
err = "upload rejected, file already exists:\n/" + vsrc + " "
|
err = "upload rejected, file already exists:\n"
|
||||||
|
err += "/" + quotep(vsrc) + " "
|
||||||
raise Pebkac(400, err)
|
raise Pebkac(400, err)
|
||||||
else:
|
else:
|
||||||
# symlink to the client-provided name,
|
# symlink to the client-provided name,
|
||||||
@@ -1241,7 +1381,7 @@ class Up2k(object):
|
|||||||
|
|
||||||
# TODO broker which avoid this race and
|
# TODO broker which avoid this race and
|
||||||
# provides a new filename if taken (same as bup)
|
# provides a new filename if taken (same as bup)
|
||||||
suffix = ".{:.6f}-{}".format(ts, ip)
|
suffix = "-{:.6f}-{}".format(ts, ip.replace(":", "."))
|
||||||
with ren_open(fname, "wb", fdir=fdir, suffix=suffix) as f:
|
with ren_open(fname, "wb", fdir=fdir, suffix=suffix) as f:
|
||||||
return f["orz"][1]
|
return f["orz"][1]
|
||||||
|
|
||||||
@@ -1253,6 +1393,9 @@ class Up2k(object):
|
|||||||
return
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
if self.args.no_symlink:
|
||||||
|
raise Exception("disabled in config")
|
||||||
|
|
||||||
lsrc = src
|
lsrc = src
|
||||||
ldst = dst
|
ldst = dst
|
||||||
fs1 = bos.stat(os.path.dirname(src)).st_dev
|
fs1 = bos.stat(os.path.dirname(src)).st_dev
|
||||||
@@ -1333,20 +1476,57 @@ class Up2k(object):
|
|||||||
# del self.registry[ptop][wark]
|
# del self.registry[ptop][wark]
|
||||||
return ret, dst
|
return ret, dst
|
||||||
|
|
||||||
atomic_move(src, dst)
|
# windows cant rename open files
|
||||||
|
if not ANYWIN or src == dst:
|
||||||
if ANYWIN:
|
self._finish_upload(ptop, wark)
|
||||||
a = [dst, job["size"], (int(time.time()), int(job["lmod"]))]
|
|
||||||
self.lastmod_q.put(a)
|
|
||||||
|
|
||||||
a = [job[x] for x in "ptop wark prel name lmod size addr".split()]
|
|
||||||
a += [job.get("at") or time.time()]
|
|
||||||
if self.idx_wark(*a):
|
|
||||||
del self.registry[ptop][wark]
|
|
||||||
# in-memory registry is reserved for unfinished uploads
|
|
||||||
|
|
||||||
return ret, dst
|
return ret, dst
|
||||||
|
|
||||||
|
def finish_upload(self, ptop, wark):
|
||||||
|
with self.mutex:
|
||||||
|
self._finish_upload(ptop, wark)
|
||||||
|
|
||||||
|
def _finish_upload(self, ptop, wark):
|
||||||
|
try:
|
||||||
|
job = self.registry[ptop][wark]
|
||||||
|
pdir = os.path.join(job["ptop"], job["prel"])
|
||||||
|
src = os.path.join(pdir, job["tnam"])
|
||||||
|
dst = os.path.join(pdir, job["name"])
|
||||||
|
except Exception as ex:
|
||||||
|
return "finish_upload, wark, " + repr(ex)
|
||||||
|
|
||||||
|
# self.log("--- " + wark + " " + dst + " finish_upload atomic " + dst, 4)
|
||||||
|
atomic_move(src, dst)
|
||||||
|
|
||||||
|
if ANYWIN:
|
||||||
|
a = [dst, job["size"], (int(time.time()), int(job["lmod"]))]
|
||||||
|
self.lastmod_q.put(a)
|
||||||
|
|
||||||
|
a = [job[x] for x in "ptop wark prel name lmod size addr".split()]
|
||||||
|
a += [job.get("at") or time.time()]
|
||||||
|
if self.idx_wark(*a):
|
||||||
|
# self.log("pop " + wark + " " + dst + " finish_upload idx_wark", 4)
|
||||||
|
del self.registry[ptop][wark]
|
||||||
|
# in-memory registry is reserved for unfinished uploads
|
||||||
|
|
||||||
|
dupes = self.dupesched.pop(dst, [])
|
||||||
|
if not dupes:
|
||||||
|
return
|
||||||
|
|
||||||
|
cur = self.cur.get(ptop)
|
||||||
|
for rd, fn in dupes:
|
||||||
|
d2 = os.path.join(ptop, rd, fn)
|
||||||
|
if os.path.exists(d2):
|
||||||
|
continue
|
||||||
|
|
||||||
|
self._symlink(dst, d2)
|
||||||
|
if cur:
|
||||||
|
self.db_rm(cur, rd, fn)
|
||||||
|
self.db_add(cur, wark, rd, fn, *a[-4:])
|
||||||
|
|
||||||
|
if cur:
|
||||||
|
cur.connection.commit()
|
||||||
|
|
||||||
def idx_wark(self, ptop, wark, rd, fn, lmod, sz, ip, at):
|
def idx_wark(self, ptop, wark, rd, fn, lmod, sz, ip, at):
|
||||||
cur = self.cur.get(ptop)
|
cur = self.cur.get(ptop)
|
||||||
if not cur:
|
if not cur:
|
||||||
@@ -1401,15 +1581,17 @@ class Up2k(object):
|
|||||||
try:
|
try:
|
||||||
permsets = [[True, False, False, True]]
|
permsets = [[True, False, False, True]]
|
||||||
vn, rem = self.asrv.vfs.get(vpath, uname, *permsets[0])
|
vn, rem = self.asrv.vfs.get(vpath, uname, *permsets[0])
|
||||||
|
vn, rem = vn.get_dbv(rem)
|
||||||
unpost = False
|
unpost = False
|
||||||
except:
|
except:
|
||||||
# unpost with missing permissions? try read+write and verify with db
|
# unpost with missing permissions? try read+write and verify with db
|
||||||
if not self.args.unpost:
|
if not self.args.unpost:
|
||||||
raise Pebkac(400, "the unpost feature was disabled by server config")
|
raise Pebkac(400, "the unpost feature is disabled in server config")
|
||||||
|
|
||||||
unpost = True
|
unpost = True
|
||||||
permsets = [[True, True]]
|
permsets = [[True, True]]
|
||||||
vn, rem = self.asrv.vfs.get(vpath, uname, *permsets[0])
|
vn, rem = self.asrv.vfs.get(vpath, uname, *permsets[0])
|
||||||
|
vn, rem = vn.get_dbv(rem)
|
||||||
_, _, _, _, dip, dat = self._find_from_vpath(vn.realpath, rem)
|
_, _, _, _, dip, dat = self._find_from_vpath(vn.realpath, rem)
|
||||||
|
|
||||||
m = "you cannot delete this: "
|
m = "you cannot delete this: "
|
||||||
@@ -1455,16 +1637,18 @@ class Up2k(object):
|
|||||||
self.log("rm {}\n {}".format(vpath, abspath))
|
self.log("rm {}\n {}".format(vpath, abspath))
|
||||||
_ = dbv.get(volpath, uname, *permsets[0])
|
_ = dbv.get(volpath, uname, *permsets[0])
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
|
cur = None
|
||||||
try:
|
try:
|
||||||
ptop = dbv.realpath
|
ptop = dbv.realpath
|
||||||
cur, wark, _, _, _, _ = self._find_from_vpath(ptop, volpath)
|
cur, wark, _, _, _, _ = self._find_from_vpath(ptop, volpath)
|
||||||
self._forget_file(ptop, volpath, cur, wark, True)
|
self._forget_file(ptop, volpath, cur, wark, True)
|
||||||
finally:
|
finally:
|
||||||
cur.connection.commit()
|
if cur:
|
||||||
|
cur.connection.commit()
|
||||||
|
|
||||||
bos.unlink(abspath)
|
bos.unlink(abspath)
|
||||||
|
|
||||||
rm = rmdirs(self.log_func, scandir, True, atop)
|
rm = rmdirs(self.log_func, scandir, True, atop, 1)
|
||||||
return n_files, rm[0], rm[1]
|
return n_files, rm[0], rm[1]
|
||||||
|
|
||||||
def handle_mv(self, uname, svp, dvp):
|
def handle_mv(self, uname, svp, dvp):
|
||||||
@@ -1506,7 +1690,7 @@ class Up2k(object):
|
|||||||
with self.mutex:
|
with self.mutex:
|
||||||
self._mv_file(uname, svpf, dvpf)
|
self._mv_file(uname, svpf, dvpf)
|
||||||
|
|
||||||
rmdirs(self.log_func, scandir, True, sabs)
|
rmdirs(self.log_func, scandir, True, sabs, 1)
|
||||||
return "k"
|
return "k"
|
||||||
|
|
||||||
def _mv_file(self, uname, svp, dvp):
|
def _mv_file(self, uname, svp, dvp):
|
||||||
@@ -1520,13 +1704,13 @@ class Up2k(object):
|
|||||||
dabs = dvn.canonical(drem)
|
dabs = dvn.canonical(drem)
|
||||||
drd, dfn = vsplit(drem)
|
drd, dfn = vsplit(drem)
|
||||||
|
|
||||||
n1 = svp.split('/')[-1]
|
n1 = svp.split("/")[-1]
|
||||||
n2 = dvp.split('/')[-1]
|
n2 = dvp.split("/")[-1]
|
||||||
if n1.startswith('.') or n2.startswith('.'):
|
if n1.startswith(".") or n2.startswith("."):
|
||||||
if self.args.no_dot_mv:
|
if self.args.no_dot_mv:
|
||||||
raise Pebkac(400, "moving dotfiles was disabled by server config")
|
raise Pebkac(400, "moving dotfiles is disabled in server config")
|
||||||
elif self.args.no_dot_ren and n1 != n2:
|
elif self.args.no_dot_ren and n1 != n2:
|
||||||
raise Pebkac(400, "renaming dotfiles was disabled by server config")
|
raise Pebkac(400, "renaming dotfiles is disabled in server config")
|
||||||
|
|
||||||
if bos.path.exists(dabs):
|
if bos.path.exists(dabs):
|
||||||
raise Pebkac(400, "mv2: target file exists")
|
raise Pebkac(400, "mv2: target file exists")
|
||||||
@@ -1543,6 +1727,9 @@ class Up2k(object):
|
|||||||
# folders are too scary, schedule rescan of both vols
|
# folders are too scary, schedule rescan of both vols
|
||||||
self.need_rescan[svn.vpath] = 1
|
self.need_rescan[svn.vpath] = 1
|
||||||
self.need_rescan[dvn.vpath] = 1
|
self.need_rescan[dvn.vpath] = 1
|
||||||
|
with self.rescan_cond:
|
||||||
|
self.rescan_cond.notify_all()
|
||||||
|
|
||||||
return "k"
|
return "k"
|
||||||
|
|
||||||
c1, w, ftime, fsize, ip, at = self._find_from_vpath(svn.realpath, srem)
|
c1, w, ftime, fsize, ip, at = self._find_from_vpath(svn.realpath, srem)
|
||||||
@@ -1583,7 +1770,7 @@ class Up2k(object):
|
|||||||
def _find_from_vpath(self, ptop, vrem):
|
def _find_from_vpath(self, ptop, vrem):
|
||||||
cur = self.cur.get(ptop)
|
cur = self.cur.get(ptop)
|
||||||
if not cur:
|
if not cur:
|
||||||
return None, None
|
return [None] * 6
|
||||||
|
|
||||||
rd, fn = vsplit(vrem)
|
rd, fn = vsplit(vrem)
|
||||||
q = "select w, mt, sz, ip, at from up where rd=? and fn=? limit 1"
|
q = "select w, mt, sz, ip, at from up where rd=? and fn=? limit 1"
|
||||||
@@ -1620,7 +1807,7 @@ class Up2k(object):
|
|||||||
wark = [
|
wark = [
|
||||||
x
|
x
|
||||||
for x, y in reg.items()
|
for x, y in reg.items()
|
||||||
if fn in [y["name"], y.get("tnam")] and y["prel"] == vrem
|
if sfn in [y["name"], y.get("tnam")] and y["prel"] == vrem
|
||||||
]
|
]
|
||||||
|
|
||||||
if wark and wark in reg:
|
if wark and wark in reg:
|
||||||
@@ -1703,7 +1890,13 @@ class Up2k(object):
|
|||||||
except:
|
except:
|
||||||
cj["lmod"] = int(time.time())
|
cj["lmod"] = int(time.time())
|
||||||
|
|
||||||
wark = up2k_wark_from_hashlist(self.salt, cj["size"], cj["hash"])
|
if cj["hash"]:
|
||||||
|
wark = up2k_wark_from_hashlist(self.salt, cj["size"], cj["hash"])
|
||||||
|
else:
|
||||||
|
wark = up2k_wark_from_metadata(
|
||||||
|
self.salt, cj["size"], cj["lmod"], cj["prel"], cj["name"]
|
||||||
|
)
|
||||||
|
|
||||||
return wark
|
return wark
|
||||||
|
|
||||||
def _hashlist_from_file(self, path):
|
def _hashlist_from_file(self, path):
|
||||||
@@ -1746,9 +1939,12 @@ class Up2k(object):
|
|||||||
|
|
||||||
if self.args.nw:
|
if self.args.nw:
|
||||||
job["tnam"] = tnam
|
job["tnam"] = tnam
|
||||||
|
if not job["hash"]:
|
||||||
|
del self.registry[job["ptop"]][job["wark"]]
|
||||||
return
|
return
|
||||||
|
|
||||||
suffix = ".{:.6f}-{}".format(job["t0"], job["addr"])
|
dip = job["addr"].replace(":", ".")
|
||||||
|
suffix = "-{:.6f}-{}".format(job["t0"], dip)
|
||||||
with ren_open(tnam, "wb", fdir=pdir, suffix=suffix) as f:
|
with ren_open(tnam, "wb", fdir=pdir, suffix=suffix) as f:
|
||||||
f, job["tnam"] = f["orz"]
|
f, job["tnam"] = f["orz"]
|
||||||
if (
|
if (
|
||||||
@@ -1762,8 +1958,12 @@ class Up2k(object):
|
|||||||
except:
|
except:
|
||||||
self.log("could not sparse [{}]".format(fp), 3)
|
self.log("could not sparse [{}]".format(fp), 3)
|
||||||
|
|
||||||
f.seek(job["size"] - 1)
|
if job["hash"]:
|
||||||
f.write(b"e")
|
f.seek(job["size"] - 1)
|
||||||
|
f.write(b"e")
|
||||||
|
|
||||||
|
if not job["hash"]:
|
||||||
|
self._finish_upload(job["ptop"], job["wark"])
|
||||||
|
|
||||||
def _lastmodder(self):
|
def _lastmodder(self):
|
||||||
while True:
|
while True:
|
||||||
@@ -1792,7 +1992,8 @@ class Up2k(object):
|
|||||||
self.snap_prev = {}
|
self.snap_prev = {}
|
||||||
while True:
|
while True:
|
||||||
time.sleep(self.snap_persist_interval)
|
time.sleep(self.snap_persist_interval)
|
||||||
self.do_snapshot()
|
if not hasattr(self, "pp"):
|
||||||
|
self.do_snapshot()
|
||||||
|
|
||||||
def do_snapshot(self):
|
def do_snapshot(self):
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
@@ -1801,7 +2002,10 @@ class Up2k(object):
|
|||||||
|
|
||||||
def _snap_reg(self, ptop, reg):
|
def _snap_reg(self, ptop, reg):
|
||||||
now = time.time()
|
now = time.time()
|
||||||
histpath = self.asrv.vfs.histtab[ptop]
|
histpath = self.asrv.vfs.histtab.get(ptop)
|
||||||
|
if not histpath:
|
||||||
|
return
|
||||||
|
|
||||||
rm = [x for x in reg.values() if now - x["poke"] > self.snap_discard_interval]
|
rm = [x for x in reg.values() if now - x["poke"] > self.snap_discard_interval]
|
||||||
if rm:
|
if rm:
|
||||||
m = "dropping {} abandoned uploads in {}".format(len(rm), ptop)
|
m = "dropping {} abandoned uploads in {}".format(len(rm), ptop)
|
||||||
@@ -1861,11 +2065,16 @@ class Up2k(object):
|
|||||||
|
|
||||||
# self.log("\n " + repr([ptop, rd, fn]))
|
# self.log("\n " + repr([ptop, rd, fn]))
|
||||||
abspath = os.path.join(ptop, rd, fn)
|
abspath = os.path.join(ptop, rd, fn)
|
||||||
tags = self.mtag.get(abspath)
|
try:
|
||||||
ntags1 = len(tags)
|
tags = self.mtag.get(abspath)
|
||||||
parsers = self._get_parsers(ptop, tags, abspath)
|
ntags1 = len(tags)
|
||||||
if parsers:
|
parsers = self._get_parsers(ptop, tags, abspath)
|
||||||
tags.update(self.mtag.get_bin(parsers, abspath))
|
if parsers:
|
||||||
|
tags.update(self.mtag.get_bin(parsers, abspath))
|
||||||
|
except Exception as ex:
|
||||||
|
msg = "failed to read tags from {}:\n{}"
|
||||||
|
self.log(msg.format(abspath, ex), c=3)
|
||||||
|
continue
|
||||||
|
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
cur = self.cur[ptop]
|
cur = self.cur[ptop]
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ import subprocess as sp # nosec
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from collections import Counter
|
from collections import Counter
|
||||||
|
|
||||||
from .__init__ import PY2, WINDOWS, ANYWIN, VT100
|
from .__init__ import PY2, WINDOWS, ANYWIN, VT100, unicode
|
||||||
from .stolen import surrogateescape
|
from .stolen import surrogateescape
|
||||||
|
|
||||||
FAKE_MP = False
|
FAKE_MP = False
|
||||||
@@ -100,10 +100,24 @@ IMPLICATIONS = [
|
|||||||
|
|
||||||
|
|
||||||
MIMES = {
|
MIMES = {
|
||||||
"md": "text/plain; charset=UTF-8",
|
"md": "text/plain",
|
||||||
|
"txt": "text/plain",
|
||||||
|
"js": "text/javascript",
|
||||||
"opus": "audio/ogg; codecs=opus",
|
"opus": "audio/ogg; codecs=opus",
|
||||||
"webp": "image/webp",
|
"mp3": "audio/mpeg",
|
||||||
|
"m4a": "audio/mp4",
|
||||||
|
"jpg": "image/jpeg",
|
||||||
}
|
}
|
||||||
|
for ln in """text css html csv
|
||||||
|
application json wasm xml pdf rtf zip
|
||||||
|
image webp jpeg png gif bmp
|
||||||
|
audio aac ogg wav
|
||||||
|
video webm mp4 mpeg
|
||||||
|
font woff woff2 otf ttf
|
||||||
|
""".splitlines():
|
||||||
|
k, vs = ln.split(" ", 1)
|
||||||
|
for v in vs.strip().split():
|
||||||
|
MIMES[v] = "{}/{}".format(k, v)
|
||||||
|
|
||||||
|
|
||||||
REKOBO_KEY = {
|
REKOBO_KEY = {
|
||||||
@@ -169,7 +183,7 @@ class Cooldown(object):
|
|||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
class Unrecv(object):
|
class _Unrecv(object):
|
||||||
"""
|
"""
|
||||||
undo any number of socket recv ops
|
undo any number of socket recv ops
|
||||||
"""
|
"""
|
||||||
@@ -189,10 +203,117 @@ class Unrecv(object):
|
|||||||
except:
|
except:
|
||||||
return b""
|
return b""
|
||||||
|
|
||||||
|
def recv_ex(self, nbytes):
|
||||||
|
"""read an exact number of bytes"""
|
||||||
|
ret = self.recv(nbytes)
|
||||||
|
while ret and len(ret) < nbytes:
|
||||||
|
buf = self.recv(nbytes - len(ret))
|
||||||
|
if not buf:
|
||||||
|
break
|
||||||
|
|
||||||
|
ret += buf
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
def unrecv(self, buf):
|
def unrecv(self, buf):
|
||||||
self.buf = buf + self.buf
|
self.buf = buf + self.buf
|
||||||
|
|
||||||
|
|
||||||
|
class _LUnrecv(object):
|
||||||
|
"""
|
||||||
|
with expensive debug logging
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, s):
|
||||||
|
self.s = s
|
||||||
|
self.buf = b""
|
||||||
|
|
||||||
|
def recv(self, nbytes):
|
||||||
|
if self.buf:
|
||||||
|
ret = self.buf[:nbytes]
|
||||||
|
self.buf = self.buf[nbytes:]
|
||||||
|
m = "\033[0;7mur:pop:\033[0;1;32m {}\n\033[0;7mur:rem:\033[0;1;35m {}\033[0m\n"
|
||||||
|
print(m.format(ret, self.buf), end="")
|
||||||
|
return ret
|
||||||
|
|
||||||
|
try:
|
||||||
|
ret = self.s.recv(nbytes)
|
||||||
|
m = "\033[0;7mur:recv\033[0;1;33m {}\033[0m\n"
|
||||||
|
print(m.format(ret), end="")
|
||||||
|
return ret
|
||||||
|
except:
|
||||||
|
return b""
|
||||||
|
|
||||||
|
def recv_ex(self, nbytes):
|
||||||
|
"""read an exact number of bytes"""
|
||||||
|
ret = self.recv(nbytes)
|
||||||
|
while ret and len(ret) < nbytes:
|
||||||
|
buf = self.recv(nbytes - len(ret))
|
||||||
|
if not buf:
|
||||||
|
break
|
||||||
|
|
||||||
|
ret += buf
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def unrecv(self, buf):
|
||||||
|
self.buf = buf + self.buf
|
||||||
|
m = "\033[0;7mur:push\033[0;1;31m {}\n\033[0;7mur:rem:\033[0;1;35m {}\033[0m\n"
|
||||||
|
print(m.format(buf, self.buf), end="")
|
||||||
|
|
||||||
|
|
||||||
|
Unrecv = _Unrecv
|
||||||
|
|
||||||
|
|
||||||
|
class FHC(object):
|
||||||
|
class CE(object):
|
||||||
|
def __init__(self, fh):
|
||||||
|
self.ts = 0
|
||||||
|
self.fhs = [fh]
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.cache = {}
|
||||||
|
|
||||||
|
def close(self, path):
|
||||||
|
try:
|
||||||
|
ce = self.cache[path]
|
||||||
|
except:
|
||||||
|
return
|
||||||
|
|
||||||
|
for fh in ce.fhs:
|
||||||
|
fh.close()
|
||||||
|
|
||||||
|
del self.cache[path]
|
||||||
|
|
||||||
|
def clean(self):
|
||||||
|
if not self.cache:
|
||||||
|
return
|
||||||
|
|
||||||
|
keep = {}
|
||||||
|
now = time.time()
|
||||||
|
for path, ce in self.cache.items():
|
||||||
|
if now < ce.ts + 5:
|
||||||
|
keep[path] = ce
|
||||||
|
else:
|
||||||
|
for fh in ce.fhs:
|
||||||
|
fh.close()
|
||||||
|
|
||||||
|
self.cache = keep
|
||||||
|
|
||||||
|
def pop(self, path):
|
||||||
|
return self.cache[path].fhs.pop()
|
||||||
|
|
||||||
|
def put(self, path, fh):
|
||||||
|
try:
|
||||||
|
ce = self.cache[path]
|
||||||
|
ce.fhs.append(fh)
|
||||||
|
except:
|
||||||
|
ce = self.CE(fh)
|
||||||
|
self.cache[path] = ce
|
||||||
|
|
||||||
|
ce.ts = time.time()
|
||||||
|
|
||||||
|
|
||||||
class ProgressPrinter(threading.Thread):
|
class ProgressPrinter(threading.Thread):
|
||||||
"""
|
"""
|
||||||
periodically print progress info without linefeeds
|
periodically print progress info without linefeeds
|
||||||
@@ -317,7 +438,7 @@ def stackmon(fp, ival, suffix):
|
|||||||
|
|
||||||
|
|
||||||
def start_log_thrs(logger, ival, nid):
|
def start_log_thrs(logger, ival, nid):
|
||||||
ival = int(ival)
|
ival = float(ival)
|
||||||
tname = lname = "log-thrs"
|
tname = lname = "log-thrs"
|
||||||
if nid:
|
if nid:
|
||||||
tname = "logthr-n{}-i{:x}".format(nid, os.getpid())
|
tname = "logthr-n{}-i{:x}".format(nid, os.getpid())
|
||||||
@@ -338,7 +459,7 @@ def log_thrs(log, ival, name):
|
|||||||
tv = [x.name for x in threading.enumerate()]
|
tv = [x.name for x in threading.enumerate()]
|
||||||
tv = [
|
tv = [
|
||||||
x.split("-")[0]
|
x.split("-")[0]
|
||||||
if x.startswith("httpconn-") or x.startswith("thumb-")
|
if x.split("-")[0] in ["httpconn", "thumb", "tagger"]
|
||||||
else "listen"
|
else "listen"
|
||||||
if "-listen-" in x
|
if "-listen-" in x
|
||||||
else x
|
else x
|
||||||
@@ -352,6 +473,10 @@ def log_thrs(log, ival, name):
|
|||||||
def vol_san(vols, txt):
|
def vol_san(vols, txt):
|
||||||
for vol in vols:
|
for vol in vols:
|
||||||
txt = txt.replace(vol.realpath.encode("utf-8"), vol.vpath.encode("utf-8"))
|
txt = txt.replace(vol.realpath.encode("utf-8"), vol.vpath.encode("utf-8"))
|
||||||
|
txt = txt.replace(
|
||||||
|
vol.realpath.encode("utf-8").replace(b"\\", b"\\\\"),
|
||||||
|
vol.vpath.encode("utf-8"),
|
||||||
|
)
|
||||||
|
|
||||||
return txt
|
return txt
|
||||||
|
|
||||||
@@ -367,11 +492,12 @@ def min_ex():
|
|||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def ren_open(fname, *args, **kwargs):
|
def ren_open(fname, *args, **kwargs):
|
||||||
|
fun = kwargs.pop("fun", open)
|
||||||
fdir = kwargs.pop("fdir", None)
|
fdir = kwargs.pop("fdir", None)
|
||||||
suffix = kwargs.pop("suffix", None)
|
suffix = kwargs.pop("suffix", None)
|
||||||
|
|
||||||
if fname == os.devnull:
|
if fname == os.devnull:
|
||||||
with open(fname, *args, **kwargs) as f:
|
with fun(fname, *args, **kwargs) as f:
|
||||||
yield {"orz": [f, fname]}
|
yield {"orz": [f, fname]}
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -405,7 +531,7 @@ def ren_open(fname, *args, **kwargs):
|
|||||||
fname += suffix
|
fname += suffix
|
||||||
ext += suffix
|
ext += suffix
|
||||||
|
|
||||||
with open(fsenc(fpath), *args, **kwargs) as f:
|
with fun(fsenc(fpath), *args, **kwargs) as f:
|
||||||
if b64:
|
if b64:
|
||||||
fp2 = "fn-trunc.{}.txt".format(b64)
|
fp2 = "fn-trunc.{}.txt".format(b64)
|
||||||
fp2 = os.path.join(fdir, fp2)
|
fp2 = os.path.join(fdir, fp2)
|
||||||
@@ -450,8 +576,8 @@ class MultipartParser(object):
|
|||||||
self.log = log_func
|
self.log = log_func
|
||||||
self.headers = http_headers
|
self.headers = http_headers
|
||||||
|
|
||||||
self.re_ctype = re.compile(r"^content-type: *([^;]+)", re.IGNORECASE)
|
self.re_ctype = re.compile(r"^content-type: *([^; ]+)", re.IGNORECASE)
|
||||||
self.re_cdisp = re.compile(r"^content-disposition: *([^;]+)", re.IGNORECASE)
|
self.re_cdisp = re.compile(r"^content-disposition: *([^; ]+)", re.IGNORECASE)
|
||||||
self.re_cdisp_field = re.compile(
|
self.re_cdisp_field = re.compile(
|
||||||
r'^content-disposition:(?: *|.*; *)name="([^"]+)"', re.IGNORECASE
|
r'^content-disposition:(?: *|.*; *)name="([^"]+)"', re.IGNORECASE
|
||||||
)
|
)
|
||||||
@@ -587,19 +713,21 @@ class MultipartParser(object):
|
|||||||
yields [fieldname, unsanitized_filename, fieldvalue]
|
yields [fieldname, unsanitized_filename, fieldvalue]
|
||||||
where fieldvalue yields chunks of data
|
where fieldvalue yields chunks of data
|
||||||
"""
|
"""
|
||||||
while True:
|
run = True
|
||||||
|
while run:
|
||||||
fieldname, filename = self._read_header()
|
fieldname, filename = self._read_header()
|
||||||
yield [fieldname, filename, self._read_data()]
|
yield [fieldname, filename, self._read_data()]
|
||||||
|
|
||||||
tail = self.sr.recv(2)
|
tail = self.sr.recv_ex(2)
|
||||||
|
|
||||||
if tail == b"--":
|
if tail == b"--":
|
||||||
# EOF indicated by this immediately after final boundary
|
# EOF indicated by this immediately after final boundary
|
||||||
self.sr.recv(2)
|
tail = self.sr.recv_ex(2)
|
||||||
return
|
run = False
|
||||||
|
|
||||||
if tail != b"\r\n":
|
if tail != b"\r\n":
|
||||||
raise Pebkac(400, "protocol error after field value")
|
m = "protocol error after field value: want b'\\r\\n', got {!r}"
|
||||||
|
raise Pebkac(400, m.format(tail))
|
||||||
|
|
||||||
def _read_value(self, iterator, max_len):
|
def _read_value(self, iterator, max_len):
|
||||||
ret = b""
|
ret = b""
|
||||||
@@ -648,7 +776,7 @@ class MultipartParser(object):
|
|||||||
def get_boundary(headers):
|
def get_boundary(headers):
|
||||||
# boundaries contain a-z A-Z 0-9 ' ( ) + _ , - . / : = ?
|
# boundaries contain a-z A-Z 0-9 ' ( ) + _ , - . / : = ?
|
||||||
# (whitespace allowed except as the last char)
|
# (whitespace allowed except as the last char)
|
||||||
ptn = r"^multipart/form-data; *(.*; *)?boundary=([^;]+)"
|
ptn = r"^multipart/form-data *; *(.*; *)?boundary=([^;]+)"
|
||||||
ct = headers["content-type"]
|
ct = headers["content-type"]
|
||||||
m = re.match(ptn, ct, re.IGNORECASE)
|
m = re.match(ptn, ct, re.IGNORECASE)
|
||||||
if not m:
|
if not m:
|
||||||
@@ -685,6 +813,14 @@ def read_header(sr):
|
|||||||
return ret[:ofs].decode("utf-8", "surrogateescape").lstrip("\r\n").split("\r\n")
|
return ret[:ofs].decode("utf-8", "surrogateescape").lstrip("\r\n").split("\r\n")
|
||||||
|
|
||||||
|
|
||||||
|
def gen_filekey(salt, fspath, fsize, inode):
|
||||||
|
return base64.urlsafe_b64encode(
|
||||||
|
hashlib.sha512(
|
||||||
|
"{} {} {} {}".format(salt, fspath, fsize, inode).encode("utf-8", "replace")
|
||||||
|
).digest()
|
||||||
|
).decode("ascii")
|
||||||
|
|
||||||
|
|
||||||
def humansize(sz, terse=False):
|
def humansize(sz, terse=False):
|
||||||
for unit in ["B", "KiB", "MiB", "GiB", "TiB"]:
|
for unit in ["B", "KiB", "MiB", "GiB", "TiB"]:
|
||||||
if sz < 1024:
|
if sz < 1024:
|
||||||
@@ -985,8 +1121,12 @@ def read_socket_chunked(sr, log=None):
|
|||||||
raise Pebkac(400, err)
|
raise Pebkac(400, err)
|
||||||
|
|
||||||
if chunklen == 0:
|
if chunklen == 0:
|
||||||
sr.recv(2) # \r\n after final chunk
|
x = sr.recv_ex(2)
|
||||||
return
|
if x == b"\r\n":
|
||||||
|
return
|
||||||
|
|
||||||
|
m = "protocol error after final chunk: want b'\\r\\n', got {!r}"
|
||||||
|
raise Pebkac(400, m.format(x))
|
||||||
|
|
||||||
if log:
|
if log:
|
||||||
log("receiving {} byte chunk".format(chunklen))
|
log("receiving {} byte chunk".format(chunklen))
|
||||||
@@ -994,7 +1134,10 @@ def read_socket_chunked(sr, log=None):
|
|||||||
for chunk in read_socket(sr, chunklen):
|
for chunk in read_socket(sr, chunklen):
|
||||||
yield chunk
|
yield chunk
|
||||||
|
|
||||||
sr.recv(2) # \r\n after each chunk too
|
x = sr.recv_ex(2)
|
||||||
|
if x != b"\r\n":
|
||||||
|
m = "protocol error in chunk separator: want b'\\r\\n', got {!r}"
|
||||||
|
raise Pebkac(400, m.format(x))
|
||||||
|
|
||||||
|
|
||||||
def yieldfile(fn):
|
def yieldfile(fn):
|
||||||
@@ -1021,12 +1164,14 @@ def hashcopy(fin, fout):
|
|||||||
return tlen, hashobj.hexdigest(), digest_b64
|
return tlen, hashobj.hexdigest(), digest_b64
|
||||||
|
|
||||||
|
|
||||||
def sendfile_py(lower, upper, f, s):
|
def sendfile_py(lower, upper, f, s, bufsz, slp):
|
||||||
remains = upper - lower
|
remains = upper - lower
|
||||||
f.seek(lower)
|
f.seek(lower)
|
||||||
while remains > 0:
|
while remains > 0:
|
||||||
# time.sleep(0.01)
|
if slp:
|
||||||
buf = f.read(min(1024 * 32, remains))
|
time.sleep(slp)
|
||||||
|
|
||||||
|
buf = f.read(min(bufsz, remains))
|
||||||
if not buf:
|
if not buf:
|
||||||
return remains
|
return remains
|
||||||
|
|
||||||
@@ -1039,7 +1184,7 @@ def sendfile_py(lower, upper, f, s):
|
|||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
|
||||||
def sendfile_kern(lower, upper, f, s):
|
def sendfile_kern(lower, upper, f, s, bufsz, slp):
|
||||||
out_fd = s.fileno()
|
out_fd = s.fileno()
|
||||||
in_fd = f.fileno()
|
in_fd = f.fileno()
|
||||||
ofs = lower
|
ofs = lower
|
||||||
@@ -1062,6 +1207,9 @@ def sendfile_kern(lower, upper, f, s):
|
|||||||
|
|
||||||
|
|
||||||
def statdir(logger, scandir, lstat, top):
|
def statdir(logger, scandir, lstat, top):
|
||||||
|
if lstat and ANYWIN:
|
||||||
|
lstat = False
|
||||||
|
|
||||||
if lstat and not os.supports_follow_symlinks:
|
if lstat and not os.supports_follow_symlinks:
|
||||||
scandir = False
|
scandir = False
|
||||||
|
|
||||||
@@ -1089,9 +1237,10 @@ def statdir(logger, scandir, lstat, top):
|
|||||||
logger(src, "{} @ {}".format(repr(ex), top), 1)
|
logger(src, "{} @ {}".format(repr(ex), top), 1)
|
||||||
|
|
||||||
|
|
||||||
def rmdirs(logger, scandir, lstat, top):
|
def rmdirs(logger, scandir, lstat, top, depth):
|
||||||
if not os.path.exists(fsenc(top)) or not os.path.isdir(fsenc(top)):
|
if not os.path.exists(fsenc(top)) or not os.path.isdir(fsenc(top)):
|
||||||
top = os.path.dirname(top)
|
top = os.path.dirname(top)
|
||||||
|
depth -= 1
|
||||||
|
|
||||||
dirs = statdir(logger, scandir, lstat, top)
|
dirs = statdir(logger, scandir, lstat, top)
|
||||||
dirs = [x[0] for x in dirs if stat.S_ISDIR(x[1].st_mode)]
|
dirs = [x[0] for x in dirs if stat.S_ISDIR(x[1].st_mode)]
|
||||||
@@ -1099,15 +1248,16 @@ def rmdirs(logger, scandir, lstat, top):
|
|||||||
ok = []
|
ok = []
|
||||||
ng = []
|
ng = []
|
||||||
for d in dirs[::-1]:
|
for d in dirs[::-1]:
|
||||||
a, b = rmdirs(logger, scandir, lstat, d)
|
a, b = rmdirs(logger, scandir, lstat, d, depth + 1)
|
||||||
ok += a
|
ok += a
|
||||||
ng += b
|
ng += b
|
||||||
|
|
||||||
try:
|
if depth:
|
||||||
os.rmdir(fsenc(top))
|
try:
|
||||||
ok.append(top)
|
os.rmdir(fsenc(top))
|
||||||
except:
|
ok.append(top)
|
||||||
ng.append(top)
|
except:
|
||||||
|
ng.append(top)
|
||||||
|
|
||||||
return ok, ng
|
return ok, ng
|
||||||
|
|
||||||
@@ -1146,11 +1296,18 @@ def guess_mime(url, fallback="application/octet-stream"):
|
|||||||
except:
|
except:
|
||||||
return fallback
|
return fallback
|
||||||
|
|
||||||
ret = MIMES.get(ext) or mimetypes.guess_type(url)[0] or fallback
|
ret = MIMES.get(ext)
|
||||||
|
|
||||||
|
if not ret:
|
||||||
|
x = mimetypes.guess_type(url)
|
||||||
|
ret = "application/{}".format(x[1]) if x[1] else x[0]
|
||||||
|
|
||||||
|
if not ret:
|
||||||
|
ret = fallback
|
||||||
|
|
||||||
if ";" not in ret:
|
if ";" not in ret:
|
||||||
if ret.startswith("text/") or ret.endswith("/javascript"):
|
if ret.startswith("text/") or ret.endswith("/javascript"):
|
||||||
ret += "; charset=UTF-8"
|
ret += "; charset=utf-8"
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|||||||
@@ -237,7 +237,7 @@ window.baguetteBox = (function () {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function keyDownHandler(e) {
|
function keyDownHandler(e) {
|
||||||
if (e.ctrlKey || e.altKey || e.metaKey || e.isComposing)
|
if (e.ctrlKey || e.altKey || e.metaKey || e.isComposing || modal.busy)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
var k = e.code + '', v = vid();
|
var k = e.code + '', v = vid();
|
||||||
@@ -331,7 +331,7 @@ window.baguetteBox = (function () {
|
|||||||
|
|
||||||
function tglsel() {
|
function tglsel() {
|
||||||
var thumb = currentGallery[currentIndex].imageElement,
|
var thumb = currentGallery[currentIndex].imageElement,
|
||||||
name = vsplit(thumb.href)[1],
|
name = vsplit(thumb.href)[1].split('?')[0],
|
||||||
files = msel.getall();
|
files = msel.getall();
|
||||||
|
|
||||||
for (var a = 0; a < files.length; a++)
|
for (var a = 0; a < files.length; a++)
|
||||||
@@ -345,7 +345,7 @@ window.baguetteBox = (function () {
|
|||||||
function selbg() {
|
function selbg() {
|
||||||
var img = vidimg(),
|
var img = vidimg(),
|
||||||
thumb = currentGallery[currentIndex].imageElement,
|
thumb = currentGallery[currentIndex].imageElement,
|
||||||
name = vsplit(thumb.href)[1],
|
name = vsplit(thumb.href)[1].split('?')[0],
|
||||||
files = msel.getsel(),
|
files = msel.getsel(),
|
||||||
sel = false;
|
sel = false;
|
||||||
|
|
||||||
@@ -530,9 +530,7 @@ window.baguetteBox = (function () {
|
|||||||
if (options.bodyClass && document.body.classList)
|
if (options.bodyClass && document.body.classList)
|
||||||
document.body.classList.remove(options.bodyClass);
|
document.body.classList.remove(options.bodyClass);
|
||||||
|
|
||||||
var h = ebi('bbox-halp');
|
qsr('#bbox-halp');
|
||||||
if (h)
|
|
||||||
h.parentNode.removeChild(h);
|
|
||||||
|
|
||||||
if (options.afterHide)
|
if (options.afterHide)
|
||||||
options.afterHide();
|
options.afterHide();
|
||||||
@@ -590,8 +588,7 @@ window.baguetteBox = (function () {
|
|||||||
|
|
||||||
image.addEventListener(is_vid ? 'loadedmetadata' : 'load', function () {
|
image.addEventListener(is_vid ? 'loadedmetadata' : 'load', function () {
|
||||||
// Remove loader element
|
// Remove loader element
|
||||||
var spinner = QS('#baguette-img-' + index + ' .bbox-spinner');
|
qsr('#baguette-img-' + index + ' .bbox-spinner');
|
||||||
figure.removeChild(spinner);
|
|
||||||
if (!options.async && callback)
|
if (!options.async && callback)
|
||||||
callback();
|
callback();
|
||||||
});
|
});
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -18,9 +18,9 @@
|
|||||||
|
|
||||||
<div id="op_search" class="opview">
|
<div id="op_search" class="opview">
|
||||||
{%- if have_tags_idx %}
|
{%- if have_tags_idx %}
|
||||||
<div id="srch_form" class="tags"></div>
|
<div id="srch_form" class="tags opbox"></div>
|
||||||
{%- else %}
|
{%- else %}
|
||||||
<div id="srch_form"></div>
|
<div id="srch_form" class="opbox"></div>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
<div id="srch_q"></div>
|
<div id="srch_q"></div>
|
||||||
</div>
|
</div>
|
||||||
@@ -31,7 +31,7 @@
|
|||||||
<div id="u2err"></div>
|
<div id="u2err"></div>
|
||||||
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
|
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
|
||||||
<input type="hidden" name="act" value="bput" />
|
<input type="hidden" name="act" value="bput" />
|
||||||
<input type="file" name="f" multiple><br />
|
<input type="file" name="f" multiple /><br />
|
||||||
<input type="submit" value="start upload">
|
<input type="submit" value="start upload">
|
||||||
</form>
|
</form>
|
||||||
</div>
|
</div>
|
||||||
@@ -39,7 +39,7 @@
|
|||||||
<div id="op_mkdir" class="opview opbox act">
|
<div id="op_mkdir" class="opview opbox act">
|
||||||
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
|
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
|
||||||
<input type="hidden" name="act" value="mkdir" />
|
<input type="hidden" name="act" value="mkdir" />
|
||||||
📂<input type="text" name="name" size="30">
|
📂<input type="text" name="name" class="i">
|
||||||
<input type="submit" value="make directory">
|
<input type="submit" value="make directory">
|
||||||
</form>
|
</form>
|
||||||
</div>
|
</div>
|
||||||
@@ -47,15 +47,15 @@
|
|||||||
<div id="op_new_md" class="opview opbox">
|
<div id="op_new_md" class="opview opbox">
|
||||||
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
|
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
|
||||||
<input type="hidden" name="act" value="new_md" />
|
<input type="hidden" name="act" value="new_md" />
|
||||||
📝<input type="text" name="name" size="30">
|
📝<input type="text" name="name" class="i">
|
||||||
<input type="submit" value="new markdown doc">
|
<input type="submit" value="new markdown doc">
|
||||||
</form>
|
</form>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div id="op_msg" class="opview opbox act">
|
<div id="op_msg" class="opview opbox act">
|
||||||
<form method="post" enctype="application/x-www-form-urlencoded" accept-charset="utf-8" action="{{ url_suf }}">
|
<form method="post" enctype="application/x-www-form-urlencoded" accept-charset="utf-8" action="{{ url_suf }}">
|
||||||
📟<input type="text" name="msg" size="30">
|
📟<input type="text" name="msg" class="i">
|
||||||
<input type="submit" value="send msg to server log">
|
<input type="submit" value="send msg to srv log">
|
||||||
</form>
|
</form>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
@@ -76,6 +76,12 @@
|
|||||||
|
|
||||||
<div id="wrap">
|
<div id="wrap">
|
||||||
|
|
||||||
|
{%- if doc %}
|
||||||
|
<div id="bdoc"><pre>{{ doc|e }}</pre></div>
|
||||||
|
{%- else %}
|
||||||
|
<div id="bdoc"></div>
|
||||||
|
{%- endif %}
|
||||||
|
|
||||||
<div id="pro" class="logue">{{ logues[0] }}</div>
|
<div id="pro" class="logue">{{ logues[0] }}</div>
|
||||||
|
|
||||||
<table id="files">
|
<table id="files">
|
||||||
@@ -130,15 +136,23 @@
|
|||||||
def_hcols = {{ def_hcols|tojson }},
|
def_hcols = {{ def_hcols|tojson }},
|
||||||
have_up2k_idx = {{ have_up2k_idx|tojson }},
|
have_up2k_idx = {{ have_up2k_idx|tojson }},
|
||||||
have_tags_idx = {{ have_tags_idx|tojson }},
|
have_tags_idx = {{ have_tags_idx|tojson }},
|
||||||
|
have_acode = {{ have_acode|tojson }},
|
||||||
have_mv = {{ have_mv|tojson }},
|
have_mv = {{ have_mv|tojson }},
|
||||||
have_del = {{ have_del|tojson }},
|
have_del = {{ have_del|tojson }},
|
||||||
have_unpost = {{ have_unpost|tojson }},
|
have_unpost = {{ have_unpost|tojson }},
|
||||||
have_zip = {{ have_zip|tojson }},
|
have_zip = {{ have_zip|tojson }},
|
||||||
|
txt_ext = "{{ txt_ext }}",
|
||||||
|
{% if no_prism %}no_prism = 1,{% endif %}
|
||||||
readme = {{ readme|tojson }};
|
readme = {{ readme|tojson }};
|
||||||
|
|
||||||
|
document.documentElement.setAttribute("class", localStorage.lightmode == 1 ? "light" : "dark");
|
||||||
</script>
|
</script>
|
||||||
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
||||||
<script src="/.cpr/browser.js?_={{ ts }}"></script>
|
<script src="/.cpr/browser.js?_={{ ts }}"></script>
|
||||||
<script src="/.cpr/up2k.js?_={{ ts }}"></script>
|
<script src="/.cpr/up2k.js?_={{ ts }}"></script>
|
||||||
|
{%- if js %}
|
||||||
|
<script src="{{ js }}?_={{ ts }}"></script>
|
||||||
|
{%- endif %}
|
||||||
</body>
|
</body>
|
||||||
|
|
||||||
</html>
|
</html>
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -212,6 +212,10 @@ blink {
|
|||||||
#toolsbox a+a {
|
#toolsbox a+a {
|
||||||
text-decoration: none;
|
text-decoration: none;
|
||||||
}
|
}
|
||||||
|
#lno {
|
||||||
|
position: absolute;
|
||||||
|
right: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -15,7 +15,7 @@
|
|||||||
<a id="lightswitch" href="#">go dark</a>
|
<a id="lightswitch" href="#">go dark</a>
|
||||||
<a id="navtoggle" href="#">hide nav</a>
|
<a id="navtoggle" href="#">hide nav</a>
|
||||||
{%- if edit %}
|
{%- if edit %}
|
||||||
<a id="save" href="?edit" tt="Hotkey: ctrl-s">save</a>
|
<a id="save" href="{{ arg_base }}edit" tt="Hotkey: ctrl-s">save</a>
|
||||||
<a id="sbs" href="#" tt="editor and preview side by side">sbs</a>
|
<a id="sbs" href="#" tt="editor and preview side by side">sbs</a>
|
||||||
<a id="nsbs" href="#" tt="switch between editor and preview$NHotkey: ctrl-e">editor</a>
|
<a id="nsbs" href="#" tt="switch between editor and preview$NHotkey: ctrl-e">editor</a>
|
||||||
<div id="toolsbox">
|
<div id="toolsbox">
|
||||||
@@ -26,10 +26,11 @@
|
|||||||
<a id="cfg_uni" href="#">non-ascii: whitelist</a>
|
<a id="cfg_uni" href="#">non-ascii: whitelist</a>
|
||||||
<a id="help" href="#">help</a>
|
<a id="help" href="#">help</a>
|
||||||
</div>
|
</div>
|
||||||
|
<span id="lno">L#</span>
|
||||||
{%- else %}
|
{%- else %}
|
||||||
<a href="?edit" tt="good: higher performance$Ngood: same document width as viewer$Nbad: assumes you know markdown">edit (basic)</a>
|
<a href="{{ arg_base }}edit" tt="good: higher performance$Ngood: same document width as viewer$Nbad: assumes you know markdown">edit (basic)</a>
|
||||||
<a href="?edit2" tt="not in-house so probably less buggy">edit (fancy)</a>
|
<a href="{{ arg_base }}edit2" tt="not in-house so probably less buggy">edit (fancy)</a>
|
||||||
<a href="?raw">view raw</a>
|
<a href="{{ arg_base }}raw">view raw</a>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
</div>
|
</div>
|
||||||
<div id="toc"></div>
|
<div id="toc"></div>
|
||||||
@@ -134,13 +135,13 @@ var md_opt = {
|
|||||||
|
|
||||||
(function () {
|
(function () {
|
||||||
var l = localStorage,
|
var l = localStorage,
|
||||||
drk = l.getItem('lightmode') != 1,
|
drk = l.lightmode != 1,
|
||||||
btn = document.getElementById("lightswitch"),
|
btn = document.getElementById("lightswitch"),
|
||||||
f = function (e) {
|
f = function (e) {
|
||||||
if (e) { e.preventDefault(); drk = !drk; }
|
if (e) { e.preventDefault(); drk = !drk; }
|
||||||
document.documentElement.setAttribute("class", drk? "dark":"light");
|
document.documentElement.setAttribute("class", drk? "dark":"light");
|
||||||
btn.innerHTML = "go " + (drk ? "light":"dark");
|
btn.innerHTML = "go " + (drk ? "light":"dark");
|
||||||
l.setItem('lightmode', drk? 0:1);
|
l.lightmode = drk? 0:1;
|
||||||
};
|
};
|
||||||
|
|
||||||
btn.onclick = f;
|
btn.onclick = f;
|
||||||
|
|||||||
@@ -164,10 +164,7 @@ function copydom(src, dst, lv) {
|
|||||||
|
|
||||||
|
|
||||||
function md_plug_err(ex, js) {
|
function md_plug_err(ex, js) {
|
||||||
var errbox = ebi('md_errbox');
|
qsr('#md_errbox');
|
||||||
if (errbox)
|
|
||||||
errbox.parentNode.removeChild(errbox);
|
|
||||||
|
|
||||||
if (!ex)
|
if (!ex)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
@@ -183,7 +180,7 @@ function md_plug_err(ex, js) {
|
|||||||
o.textContent = lns[ln - 1];
|
o.textContent = lns[ln - 1];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
errbox = mknod('div');
|
var errbox = mknod('div');
|
||||||
errbox.setAttribute('id', 'md_errbox');
|
errbox.setAttribute('id', 'md_errbox');
|
||||||
errbox.style.cssText = 'position:absolute;top:0;left:0;padding:1em .5em;background:#2b2b2b;color:#fc5'
|
errbox.style.cssText = 'position:absolute;top:0;left:0;padding:1em .5em;background:#2b2b2b;color:#fc5'
|
||||||
errbox.textContent = msg;
|
errbox.textContent = msg;
|
||||||
@@ -267,7 +264,14 @@ function convert_markdown(md_text, dest_dom) {
|
|||||||
|
|
||||||
throw ex;
|
throw ex;
|
||||||
}
|
}
|
||||||
var md_dom = new DOMParser().parseFromString(md_html, "text/html").body;
|
var md_dom = dest_dom;
|
||||||
|
try {
|
||||||
|
md_dom = new DOMParser().parseFromString(md_html, "text/html").body;
|
||||||
|
}
|
||||||
|
catch (ex) {
|
||||||
|
md_dom.innerHTML = md_html;
|
||||||
|
window.copydom = noop;
|
||||||
|
}
|
||||||
|
|
||||||
var nodes = md_dom.getElementsByTagName('a');
|
var nodes = md_dom.getElementsByTagName('a');
|
||||||
for (var a = nodes.length - 1; a >= 0; a--) {
|
for (var a = nodes.length - 1; a >= 0; a--) {
|
||||||
@@ -374,8 +378,7 @@ function convert_markdown(md_text, dest_dom) {
|
|||||||
|
|
||||||
|
|
||||||
function init_toc() {
|
function init_toc() {
|
||||||
var loader = ebi('ml');
|
qsr('#ml');
|
||||||
loader.parentNode.removeChild(loader);
|
|
||||||
|
|
||||||
var anchors = []; // list of toc entries, complex objects
|
var anchors = []; // list of toc entries, complex objects
|
||||||
var anchor = null; // current toc node
|
var anchor = null; // current toc node
|
||||||
@@ -502,9 +505,11 @@ img_load.callbacks = [toc.refresh];
|
|||||||
|
|
||||||
// scroll handler
|
// scroll handler
|
||||||
var redraw = (function () {
|
var redraw = (function () {
|
||||||
var sbs = false;
|
var sbs = true;
|
||||||
function onresize() {
|
function onresize() {
|
||||||
sbs = window.matchMedia('(min-width: 64em)').matches;
|
if (window.matchMedia)
|
||||||
|
sbs = window.matchMedia('(min-width: 64em)').matches;
|
||||||
|
|
||||||
var y = (dom_hbar.offsetTop + dom_hbar.offsetHeight) + 'px';
|
var y = (dom_hbar.offsetTop + dom_hbar.offsetHeight) + 'px';
|
||||||
if (sbs) {
|
if (sbs) {
|
||||||
dom_toc.style.top = y;
|
dom_toc.style.top = y;
|
||||||
|
|||||||
@@ -230,44 +230,40 @@ redraw = (function () {
|
|||||||
|
|
||||||
// modification checker
|
// modification checker
|
||||||
function Modpoll() {
|
function Modpoll() {
|
||||||
this.skip_one = true;
|
var r = {
|
||||||
this.disabled = false;
|
skip_one: true,
|
||||||
|
disabled: false
|
||||||
this.periodic = function () {
|
};
|
||||||
var that = this;
|
|
||||||
setTimeout(function () {
|
|
||||||
that.periodic();
|
|
||||||
}, 1000 * md_opt.modpoll_freq);
|
|
||||||
|
|
||||||
|
r.periodic = function () {
|
||||||
var skip = null;
|
var skip = null;
|
||||||
|
|
||||||
if (toast.visible)
|
if (toast.visible)
|
||||||
skip = 'toast';
|
skip = 'toast';
|
||||||
|
|
||||||
else if (this.skip_one)
|
else if (r.skip_one)
|
||||||
skip = 'saved';
|
skip = 'saved';
|
||||||
|
|
||||||
else if (this.disabled)
|
else if (r.disabled)
|
||||||
skip = 'disabled';
|
skip = 'disabled';
|
||||||
|
|
||||||
if (skip) {
|
if (skip) {
|
||||||
console.log('modpoll skip, ' + skip);
|
console.log('modpoll skip, ' + skip);
|
||||||
this.skip_one = false;
|
r.skip_one = false;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log('modpoll...');
|
console.log('modpoll...');
|
||||||
var url = (document.location + '').split('?')[0] + '?raw&_=' + Date.now();
|
var url = (document.location + '').split('?')[0] + '?raw&_=' + Date.now();
|
||||||
var xhr = new XMLHttpRequest();
|
var xhr = new XMLHttpRequest();
|
||||||
xhr.modpoll = this;
|
|
||||||
xhr.open('GET', url, true);
|
xhr.open('GET', url, true);
|
||||||
xhr.responseType = 'text';
|
xhr.responseType = 'text';
|
||||||
xhr.onreadystatechange = this.cb;
|
xhr.onreadystatechange = r.cb;
|
||||||
xhr.send();
|
xhr.send();
|
||||||
}
|
};
|
||||||
|
|
||||||
this.cb = function () {
|
r.cb = function () {
|
||||||
if (this.modpoll.disabled || this.modpoll.skip_one) {
|
if (r.disabled || r.skip_one) {
|
||||||
console.log('modpoll abort');
|
console.log('modpoll abort');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -288,7 +284,7 @@ function Modpoll() {
|
|||||||
|
|
||||||
if (server_ref != server_now) {
|
if (server_ref != server_now) {
|
||||||
console.log("modpoll diff |" + server_ref.length + "|, |" + server_now.length + "|");
|
console.log("modpoll diff |" + server_ref.length + "|, |" + server_now.length + "|");
|
||||||
this.modpoll.disabled = true;
|
r.disabled = true;
|
||||||
var msg = [
|
var msg = [
|
||||||
"The document has changed on the server.",
|
"The document has changed on the server.",
|
||||||
"The changes will NOT be loaded into your editor automatically.",
|
"The changes will NOT be loaded into your editor automatically.",
|
||||||
@@ -302,12 +298,12 @@ function Modpoll() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
console.log('modpoll eq');
|
console.log('modpoll eq');
|
||||||
}
|
};
|
||||||
|
|
||||||
if (md_opt.modpoll_freq > 0)
|
if (md_opt.modpoll_freq > 0)
|
||||||
this.periodic();
|
setInterval(r.periodic, 1000 * md_opt.modpoll_freq);
|
||||||
|
|
||||||
return this;
|
return r;
|
||||||
}
|
}
|
||||||
var modpoll = new Modpoll();
|
var modpoll = new Modpoll();
|
||||||
|
|
||||||
@@ -879,6 +875,40 @@ function cfg_uni(e) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
var set_lno = (function () {
|
||||||
|
var t = null,
|
||||||
|
pi = null,
|
||||||
|
pv = null,
|
||||||
|
lno = ebi('lno');
|
||||||
|
|
||||||
|
function poke() {
|
||||||
|
clearTimeout(t);
|
||||||
|
t = setTimeout(fire, 20);
|
||||||
|
}
|
||||||
|
|
||||||
|
function fire() {
|
||||||
|
try {
|
||||||
|
clearTimeout(t);
|
||||||
|
|
||||||
|
var i = dom_src.selectionStart;
|
||||||
|
if (i === pi)
|
||||||
|
return;
|
||||||
|
|
||||||
|
var v = 'L' + dom_src.value.slice(0, i).split('\n').length;
|
||||||
|
if (v != pv)
|
||||||
|
lno.innerHTML = v;
|
||||||
|
|
||||||
|
pi = i;
|
||||||
|
pv = v;
|
||||||
|
}
|
||||||
|
catch (e) { }
|
||||||
|
}
|
||||||
|
|
||||||
|
timer.add(fire);
|
||||||
|
return poke;
|
||||||
|
})();
|
||||||
|
|
||||||
|
|
||||||
// hotkeys / toolbar
|
// hotkeys / toolbar
|
||||||
(function () {
|
(function () {
|
||||||
function keydown(ev) {
|
function keydown(ev) {
|
||||||
@@ -897,6 +927,8 @@ function cfg_uni(e) {
|
|||||||
if (document.activeElement != dom_src)
|
if (document.activeElement != dom_src)
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
|
set_lno();
|
||||||
|
|
||||||
if (ctrl(ev)) {
|
if (ctrl(ev)) {
|
||||||
if (ev.code == "KeyH" || kc == 72) {
|
if (ev.code == "KeyH" || kc == 72) {
|
||||||
md_header(ev.shiftKey);
|
md_header(ev.shiftKey);
|
||||||
|
|||||||
@@ -33,11 +33,11 @@ var md_opt = {
|
|||||||
|
|
||||||
var lightswitch = (function () {
|
var lightswitch = (function () {
|
||||||
var l = localStorage,
|
var l = localStorage,
|
||||||
drk = l.getItem('lightmode') != 1,
|
drk = l.lightmode != 1,
|
||||||
f = function (e) {
|
f = function (e) {
|
||||||
if (e) drk = !drk;
|
if (e) drk = !drk;
|
||||||
document.documentElement.setAttribute("class", drk? "dark":"light");
|
document.documentElement.setAttribute("class", drk? "dark":"light");
|
||||||
l.setItem('lightmode', drk? 0:1);
|
l.lightmode = drk? 0:1;
|
||||||
};
|
};
|
||||||
f();
|
f();
|
||||||
return f;
|
return f;
|
||||||
@@ -45,6 +45,7 @@ l.setItem('lightmode', drk? 0:1);
|
|||||||
|
|
||||||
</script>
|
</script>
|
||||||
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
||||||
|
<script src="/.cpr/deps/marked.js?_={{ ts }}"></script>
|
||||||
<script src="/.cpr/deps/easymde.js?_={{ ts }}"></script>
|
<script src="/.cpr/deps/easymde.js?_={{ ts }}"></script>
|
||||||
<script src="/.cpr/mde.js?_={{ ts }}"></script>
|
<script src="/.cpr/mde.js?_={{ ts }}"></script>
|
||||||
</body></html>
|
</body></html>
|
||||||
|
|||||||
@@ -65,8 +65,7 @@ var mde = (function () {
|
|||||||
mde.codemirror.on("change", function () {
|
mde.codemirror.on("change", function () {
|
||||||
md_changed(mde);
|
md_changed(mde);
|
||||||
});
|
});
|
||||||
var loader = ebi('ml');
|
qsr('#ml');
|
||||||
loader.parentNode.removeChild(loader);
|
|
||||||
return mde;
|
return mde;
|
||||||
})();
|
})();
|
||||||
|
|
||||||
|
|||||||
@@ -25,10 +25,23 @@ a {
|
|||||||
color: #047;
|
color: #047;
|
||||||
background: #fff;
|
background: #fff;
|
||||||
text-decoration: none;
|
text-decoration: none;
|
||||||
border-bottom: 1px solid #aaa;
|
border-bottom: 1px solid #8ab;
|
||||||
border-radius: .2em;
|
border-radius: .2em;
|
||||||
padding: .2em .8em;
|
padding: .2em .8em;
|
||||||
}
|
}
|
||||||
|
a+a {
|
||||||
|
margin-left: .5em;
|
||||||
|
}
|
||||||
|
.refresh,
|
||||||
|
.logout {
|
||||||
|
float: right;
|
||||||
|
margin: -.2em 0 0 .5em;
|
||||||
|
}
|
||||||
|
.logout,
|
||||||
|
.btns a {
|
||||||
|
color: #c04;
|
||||||
|
border-color: #c7a;
|
||||||
|
}
|
||||||
#repl {
|
#repl {
|
||||||
border: none;
|
border: none;
|
||||||
background: none;
|
background: none;
|
||||||
@@ -42,6 +55,7 @@ table {
|
|||||||
.vols th {
|
.vols th {
|
||||||
padding: .3em .6em;
|
padding: .3em .6em;
|
||||||
text-align: left;
|
text-align: left;
|
||||||
|
white-space: nowrap;
|
||||||
}
|
}
|
||||||
.num {
|
.num {
|
||||||
border-right: 1px solid #bbb;
|
border-right: 1px solid #bbb;
|
||||||
@@ -55,6 +69,16 @@ table {
|
|||||||
.btns {
|
.btns {
|
||||||
margin: 1em 0;
|
margin: 1em 0;
|
||||||
}
|
}
|
||||||
|
#msg {
|
||||||
|
margin: 3em 0;
|
||||||
|
}
|
||||||
|
#msg h1 {
|
||||||
|
margin-bottom: 0;
|
||||||
|
}
|
||||||
|
#msg h1 + p {
|
||||||
|
margin-top: .3em;
|
||||||
|
text-align: right;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
html.dark,
|
html.dark,
|
||||||
@@ -71,10 +95,15 @@ html.dark a {
|
|||||||
background: #057;
|
background: #057;
|
||||||
border-color: #37a;
|
border-color: #37a;
|
||||||
}
|
}
|
||||||
|
html.dark .logout,
|
||||||
|
html.dark .btns a {
|
||||||
|
background: #804;
|
||||||
|
border-color: #c28;
|
||||||
|
}
|
||||||
html.dark input {
|
html.dark input {
|
||||||
color: #fff;
|
color: #fff;
|
||||||
background: #624;
|
background: #626;
|
||||||
border: 1px solid #c27;
|
border: 1px solid #c2c;
|
||||||
border-width: 1px 0 0 0;
|
border-width: 1px 0 0 0;
|
||||||
border-radius: .5em;
|
border-radius: .5em;
|
||||||
padding: .5em .7em;
|
padding: .5em .7em;
|
||||||
|
|||||||
@@ -12,7 +12,20 @@
|
|||||||
|
|
||||||
<body>
|
<body>
|
||||||
<div id="wrap">
|
<div id="wrap">
|
||||||
<p>hello {{ this.uname }}</p>
|
<a href="/?h" class="refresh">refresh</a>
|
||||||
|
|
||||||
|
{%- if this.uname == '*' %}
|
||||||
|
<p>howdy stranger <small>(you're not logged in)</small></p>
|
||||||
|
{%- else %}
|
||||||
|
<a href="/?pw=x" class="logout">logout</a>
|
||||||
|
<p>welcome back, <strong>{{ this.uname }}</strong></p>
|
||||||
|
{%- endif %}
|
||||||
|
|
||||||
|
{%- if msg %}
|
||||||
|
<div id="msg">
|
||||||
|
{{ msg }}
|
||||||
|
</div>
|
||||||
|
{%- endif %}
|
||||||
|
|
||||||
{%- if avol %}
|
{%- if avol %}
|
||||||
<h1>admin panel:</h1>
|
<h1>admin panel:</h1>
|
||||||
@@ -36,7 +49,8 @@
|
|||||||
</table>
|
</table>
|
||||||
</td></tr></table>
|
</td></tr></table>
|
||||||
<div class="btns">
|
<div class="btns">
|
||||||
<a href="/?stack">dump stack</a>
|
<a href="/?stack" tt="shows the state of all active threads">dump stack</a>
|
||||||
|
<a href="/?reload=cfg" tt="reload config files (accounts/volumes/volflags),$Nand rescan all e2ds volumes">reload cfg</a>
|
||||||
</div>
|
</div>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
@@ -60,7 +74,7 @@
|
|||||||
|
|
||||||
<h1>login for more:</h1>
|
<h1>login for more:</h1>
|
||||||
<ul>
|
<ul>
|
||||||
<form method="post" enctype="multipart/form-data" action="/">
|
<form method="post" enctype="multipart/form-data" action="/{{ qvpath }}">
|
||||||
<input type="hidden" name="act" value="login" />
|
<input type="hidden" name="act" value="login" />
|
||||||
<input type="password" name="cppwd" />
|
<input type="password" name="cppwd" />
|
||||||
<input type="submit" value="Login" />
|
<input type="submit" value="Login" />
|
||||||
@@ -70,10 +84,11 @@
|
|||||||
<a href="#" id="repl">π</a>
|
<a href="#" id="repl">π</a>
|
||||||
<script>
|
<script>
|
||||||
|
|
||||||
if (localStorage.getItem('lightmode') != 1)
|
if (localStorage.lightmode != 1)
|
||||||
document.documentElement.setAttribute("class", "dark");
|
document.documentElement.setAttribute("class", "dark");
|
||||||
|
|
||||||
</script>
|
</script>
|
||||||
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
||||||
|
<script>tt.init();</script>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
@font-face {
|
@font-face {
|
||||||
font-family: 'scp';
|
font-family: 'scp';
|
||||||
|
font-display: swap;
|
||||||
src: local('Source Code Pro Regular'), local('SourceCodePro-Regular'), url(/.cpr/deps/scp.woff2) format('woff2');
|
src: local('Source Code Pro Regular'), local('SourceCodePro-Regular'), url(/.cpr/deps/scp.woff2) format('woff2');
|
||||||
}
|
}
|
||||||
html {
|
html {
|
||||||
@@ -10,9 +11,9 @@ html {
|
|||||||
max-width: 34em;
|
max-width: 34em;
|
||||||
max-width: min(34em, 90%);
|
max-width: min(34em, 90%);
|
||||||
max-width: min(34em, calc(100% - 7em));
|
max-width: min(34em, calc(100% - 7em));
|
||||||
background: #222;
|
background: #333;
|
||||||
border: 0 solid #777;
|
border: 0 solid #777;
|
||||||
box-shadow: 0 .2em .5em #222;
|
box-shadow: 0 .2em .5em #111;
|
||||||
border-radius: .4em;
|
border-radius: .4em;
|
||||||
z-index: 9001;
|
z-index: 9001;
|
||||||
}
|
}
|
||||||
@@ -78,7 +79,8 @@ html {
|
|||||||
}
|
}
|
||||||
#toast.vis {
|
#toast.vis {
|
||||||
right: 1.3em;
|
right: 1.3em;
|
||||||
transform: unset;
|
transform: inherit;
|
||||||
|
transform: initial;
|
||||||
}
|
}
|
||||||
#toast.vis #toastc {
|
#toast.vis #toastc {
|
||||||
left: -2em;
|
left: -2em;
|
||||||
@@ -131,7 +133,8 @@ html {
|
|||||||
}
|
}
|
||||||
#modalc code,
|
#modalc code,
|
||||||
#tt code {
|
#tt code {
|
||||||
background: #3c3c3c;
|
color: #eee;
|
||||||
|
background: #444;
|
||||||
padding: .1em .3em;
|
padding: .1em .3em;
|
||||||
border-top: 1px solid #777;
|
border-top: 1px solid #777;
|
||||||
border-radius: .3em;
|
border-radius: .3em;
|
||||||
@@ -246,6 +249,27 @@ html.light #tt em {
|
|||||||
#repl_pre {
|
#repl_pre {
|
||||||
max-width: 24em;
|
max-width: 24em;
|
||||||
}
|
}
|
||||||
|
*:focus,
|
||||||
|
#pctl *:focus,
|
||||||
|
.btn:focus {
|
||||||
|
box-shadow: 0 .1em .2em #fc0 inset;
|
||||||
|
border-radius: .2em;
|
||||||
|
}
|
||||||
|
html.light *:focus,
|
||||||
|
html.light #pctl *:focus,
|
||||||
|
html.light .btn:focus {
|
||||||
|
box-shadow: 0 .1em .2em #037 inset;
|
||||||
|
}
|
||||||
|
input[type="text"]:focus,
|
||||||
|
input:not([type]):focus,
|
||||||
|
textarea:focus {
|
||||||
|
box-shadow: 0 .1em .3em #fc0, 0 -.1em .3em #fc0;
|
||||||
|
}
|
||||||
|
html.light input[type="text"]:focus,
|
||||||
|
html.light input:not([type]):focus,
|
||||||
|
html.light textarea:focus {
|
||||||
|
box-shadow: 0 .1em .3em #037, 0 -.1em .3em #037;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -30,7 +30,10 @@ catch (ex) {
|
|||||||
try {
|
try {
|
||||||
up2k = up2k_init(false);
|
up2k = up2k_init(false);
|
||||||
}
|
}
|
||||||
catch (ex) { }
|
catch (ex) {
|
||||||
|
console.log('up2k init failed:', ex);
|
||||||
|
toast.err(10, 'could not initialze up2k\n\n' + basenames(ex));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
treectl.onscroll();
|
treectl.onscroll();
|
||||||
|
|
||||||
@@ -210,14 +213,14 @@ function U2pvis(act, btns) {
|
|||||||
};
|
};
|
||||||
|
|
||||||
r.setat = function (nfile, blocktab) {
|
r.setat = function (nfile, blocktab) {
|
||||||
r.tab[nfile].cb = blocktab;
|
var fo = r.tab[nfile], bd = 0;
|
||||||
|
|
||||||
var bd = 0;
|
|
||||||
for (var a = 0; a < blocktab.length; a++)
|
for (var a = 0; a < blocktab.length; a++)
|
||||||
bd += blocktab[a];
|
bd += blocktab[a];
|
||||||
|
|
||||||
r.tab[nfile].bd = bd;
|
fo.bd = bd;
|
||||||
r.tab[nfile].bd0 = bd;
|
fo.bd0 = bd;
|
||||||
|
fo.cb = blocktab;
|
||||||
};
|
};
|
||||||
|
|
||||||
r.perc = function (bd, bd0, sz, t0) {
|
r.perc = function (bd, bd0, sz, t0) {
|
||||||
@@ -246,7 +249,7 @@ function U2pvis(act, btns) {
|
|||||||
|
|
||||||
obj.innerHTML = fo.hp;
|
obj.innerHTML = fo.hp;
|
||||||
obj.style.color = '#fff';
|
obj.style.color = '#fff';
|
||||||
obj.style.background = 'linear-gradient(90deg, #025, #06a ' + o1 + '%, #09d ' + o2 + '%, #333 ' + o3 + '%, #333 99%, #777)';
|
obj.style.background = 'linear-gradient(90deg, #025, #06a ' + o1 + '%, #09d ' + o2 + '%, #222 ' + o3 + '%, #222 99%, #555)';
|
||||||
};
|
};
|
||||||
|
|
||||||
r.prog = function (fobj, nchunk, cbd) {
|
r.prog = function (fobj, nchunk, cbd) {
|
||||||
@@ -303,7 +306,7 @@ function U2pvis(act, btns) {
|
|||||||
|
|
||||||
obj.innerHTML = fo.hp;
|
obj.innerHTML = fo.hp;
|
||||||
obj.style.color = '#fff';
|
obj.style.color = '#fff';
|
||||||
obj.style.background = 'linear-gradient(90deg, #050, #270 ' + o1 + '%, #4b0 ' + o2 + '%, #333 ' + o3 + '%, #333 99%, #777)';
|
obj.style.background = 'linear-gradient(90deg, #050, #270 ' + o1 + '%, #4b0 ' + o2 + '%, #222 ' + o3 + '%, #222 99%, #555)';
|
||||||
};
|
};
|
||||||
|
|
||||||
r.move = function (nfile, newcat) {
|
r.move = function (nfile, newcat) {
|
||||||
@@ -329,8 +332,7 @@ function U2pvis(act, btns) {
|
|||||||
r.head++;
|
r.head++;
|
||||||
|
|
||||||
if (!bz_act) {
|
if (!bz_act) {
|
||||||
var tr = ebi("f" + nfile);
|
qsr("#f" + nfile);
|
||||||
tr.parentNode.removeChild(tr);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else return;
|
else return;
|
||||||
@@ -349,9 +351,7 @@ function U2pvis(act, btns) {
|
|||||||
last = parseInt(last.getAttribute('id').slice(1));
|
last = parseInt(last.getAttribute('id').slice(1));
|
||||||
|
|
||||||
while (r.head - first > r.wsz) {
|
while (r.head - first > r.wsz) {
|
||||||
var obj = ebi('f' + (first++));
|
qsr('#f' + (first++));
|
||||||
if (obj)
|
|
||||||
obj.parentNode.removeChild(obj);
|
|
||||||
}
|
}
|
||||||
while (last - r.tail < r.wsz && last < r.tab.length - 2) {
|
while (last - r.tail < r.wsz && last < r.tab.length - 2) {
|
||||||
var obj = ebi('f' + (++last));
|
var obj = ebi('f' + (++last));
|
||||||
@@ -477,14 +477,94 @@ function U2pvis(act, btns) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function Donut(uc, st) {
|
||||||
|
var r = this,
|
||||||
|
el = null,
|
||||||
|
psvg = null,
|
||||||
|
o = 20 * 2 * Math.PI,
|
||||||
|
optab = QS('#ops a[data-dest="up2k"]');
|
||||||
|
|
||||||
|
optab.setAttribute('ico', optab.textContent);
|
||||||
|
|
||||||
|
function svg(v) {
|
||||||
|
var ico = v !== undefined,
|
||||||
|
bg = ico ? '#333' : 'transparent',
|
||||||
|
fg = '#fff',
|
||||||
|
fsz = 52,
|
||||||
|
rc = 32;
|
||||||
|
|
||||||
|
if (r.eta && (r.eta > 99 || (uc.fsearch ? st.time.hashing : st.time.uploading) < 20))
|
||||||
|
r.eta = null;
|
||||||
|
|
||||||
|
if (r.eta) {
|
||||||
|
if (r.eta < 10) {
|
||||||
|
fg = '#fa0';
|
||||||
|
fsz = 72;
|
||||||
|
}
|
||||||
|
rc = 8;
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
'<svg version="1.1" viewBox="0 0 64 64" xmlns="http://www.w3.org/2000/svg">\n' +
|
||||||
|
(ico ? '<rect width="100%" height="100%" rx="' + rc + '" fill="#333" />\n' :
|
||||||
|
'<circle stroke="white" stroke-width="6" r="3" cx="32" cy="32" />\n') +
|
||||||
|
(r.eta ? (
|
||||||
|
'<text x="55%" y="58%" dominant-baseline="middle" text-anchor="middle"' +
|
||||||
|
' font-family="sans-serif" font-weight="bold" font-size="' + fsz + 'px"' +
|
||||||
|
' fill="' + fg + '">' + r.eta + '</text></svg>'
|
||||||
|
) : (
|
||||||
|
'<circle class="donut" stroke="white" fill="' + bg +
|
||||||
|
'" stroke-dashoffset="' + (ico ? v : o) + '" stroke-dasharray="' + o + ' ' + o +
|
||||||
|
'" transform="rotate(270 32 32)" stroke-width="12" r="20" cx="32" cy="32" /></svg>'
|
||||||
|
))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function pos() {
|
||||||
|
return uc.fsearch ? Math.max(st.bytes.hashed, st.bytes.finished) : st.bytes.finished;
|
||||||
|
}
|
||||||
|
|
||||||
|
r.on = function (ya) {
|
||||||
|
r.fc = 99;
|
||||||
|
r.eta = null;
|
||||||
|
r.base = pos();
|
||||||
|
optab.innerHTML = ya ? svg() : optab.getAttribute('ico');
|
||||||
|
el = QS('#ops a .donut');
|
||||||
|
if (!ya)
|
||||||
|
favico.upd();
|
||||||
|
};
|
||||||
|
r.do = function () {
|
||||||
|
if (!el)
|
||||||
|
return;
|
||||||
|
|
||||||
|
var t = st.bytes.total - r.base,
|
||||||
|
v = pos() - r.base,
|
||||||
|
ofs = el.style.strokeDashoffset = o - o * v / t;
|
||||||
|
|
||||||
|
if (favico.txt) {
|
||||||
|
if (++r.fc < 10 && r.eta && r.eta > 99)
|
||||||
|
return;
|
||||||
|
|
||||||
|
var s = svg(ofs);
|
||||||
|
if (s == psvg || (r.eta === null && r.fc < 10))
|
||||||
|
return;
|
||||||
|
|
||||||
|
favico.upd('', s);
|
||||||
|
psvg = s;
|
||||||
|
r.fc = 0;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
function fsearch_explain(n) {
|
function fsearch_explain(n) {
|
||||||
if (n)
|
if (n)
|
||||||
return toast.inf(60, 'your access to this folder is Read-Only\n\n' + (acct == '*' ? 'you are currently not logged in' : 'you are currently logged in as "' + acct + '"'));
|
return toast.inf(60, 'your access to this folder is Read-Only\n\n' + (acct == '*' ? 'you are currently not logged in' : 'you are currently logged in as "' + acct + '"'));
|
||||||
|
|
||||||
if (bcfg_get('fsearch', false))
|
if (bcfg_get('fsearch', false))
|
||||||
return toast.inf(60, 'you are currently in file-search mode\n\nswitch to upload-mode by clicking the green magnifying glass (next to the big yellow search button), and then refresh\n\nsorry');
|
return toast.inf(60, 'you are currently in file-search mode\n\nswitch to upload-mode by clicking the green magnifying glass (next to the big yellow search button), and try uploading again\n\nsorry');
|
||||||
|
|
||||||
return toast.inf(60, 'refresh the page and try again, it should work now');
|
return toast.inf(60, 'try again, it should work now');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -512,9 +592,13 @@ function up2k_init(subtle) {
|
|||||||
// chrome<37 firefox<34 edge<12 opera<24 safari<7
|
// chrome<37 firefox<34 edge<12 opera<24 safari<7
|
||||||
shame = 'your browser is impressively ancient';
|
shame = 'your browser is impressively ancient';
|
||||||
|
|
||||||
var got_deps = false;
|
function got_deps() {
|
||||||
|
return subtle || window.asmCrypto || window.hashwasm;
|
||||||
|
}
|
||||||
|
|
||||||
|
var loading_deps = false;
|
||||||
function init_deps() {
|
function init_deps() {
|
||||||
if (!got_deps && !subtle && !window.asmCrypto) {
|
if (!loading_deps && !got_deps()) {
|
||||||
var fn = 'sha512.' + sha_js + '.js';
|
var fn = 'sha512.' + sha_js + '.js';
|
||||||
showmodal('<h1>loading ' + fn + '</h1><h2>since ' + shame + '</h2><h4>thanks chrome</h4>');
|
showmodal('<h1>loading ' + fn + '</h1><h2>since ' + shame + '</h2><h4>thanks chrome</h4>');
|
||||||
import_js('/.cpr/deps/' + fn, unmodal);
|
import_js('/.cpr/deps/' + fn, unmodal);
|
||||||
@@ -525,10 +609,10 @@ function up2k_init(subtle) {
|
|||||||
ebi('u2foot').innerHTML = 'seems like ' + shame + ' so do that if you want more performance <span style="color:#' +
|
ebi('u2foot').innerHTML = 'seems like ' + shame + ' so do that if you want more performance <span style="color:#' +
|
||||||
(sha_js == 'ac' ? 'c84">(expecting 20' : '8a5">(but dont worry too much, expect 100') + ' MiB/s)</span>';
|
(sha_js == 'ac' ? 'c84">(expecting 20' : '8a5">(but dont worry too much, expect 100') + ' MiB/s)</span>';
|
||||||
}
|
}
|
||||||
got_deps = true;
|
loading_deps = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (perms.length && !has(perms, 'read'))
|
if (perms.length && !has(perms, 'read') && has(perms, 'write'))
|
||||||
goto('up2k');
|
goto('up2k');
|
||||||
|
|
||||||
function setmsg(msg, type) {
|
function setmsg(msg, type) {
|
||||||
@@ -572,17 +656,20 @@ function up2k_init(subtle) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var parallel_uploads = icfg_get('nthread'),
|
var parallel_uploads = icfg_get('nthread'),
|
||||||
multitask = bcfg_get('multitask', true),
|
uc = {},
|
||||||
ask_up = bcfg_get('ask_up', true),
|
|
||||||
flag_en = bcfg_get('flag_en', false),
|
|
||||||
fsearch = bcfg_get('fsearch', false),
|
|
||||||
turbo = bcfg_get('u2turbo', false),
|
|
||||||
datechk = bcfg_get('u2tdate', true),
|
|
||||||
fdom_ctr = 0,
|
fdom_ctr = 0,
|
||||||
min_filebuf = 0;
|
min_filebuf = 0;
|
||||||
|
|
||||||
|
bcfg_bind(uc, 'multitask', 'multitask', true, null, false);
|
||||||
|
bcfg_bind(uc, 'ask_up', 'ask_up', true, null, false);
|
||||||
|
bcfg_bind(uc, 'flag_en', 'flag_en', false, apply_flag_cfg);
|
||||||
|
bcfg_bind(uc, 'fsearch', 'fsearch', false, set_fsearch, false);
|
||||||
|
bcfg_bind(uc, 'turbo', 'u2turbo', false, draw_turbo, false);
|
||||||
|
bcfg_bind(uc, 'datechk', 'u2tdate', true, null, false);
|
||||||
|
|
||||||
var st = {
|
var st = {
|
||||||
"files": [],
|
"files": [],
|
||||||
|
"seen": {},
|
||||||
"todo": {
|
"todo": {
|
||||||
"head": [],
|
"head": [],
|
||||||
"hash": [],
|
"hash": [],
|
||||||
@@ -617,7 +704,8 @@ function up2k_init(subtle) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
var pvis = new U2pvis("bz", '#u2cards');
|
var pvis = new U2pvis("bz", '#u2cards'),
|
||||||
|
donut = new Donut(uc, st);
|
||||||
|
|
||||||
var bobslice = null;
|
var bobslice = null;
|
||||||
if (window.File)
|
if (window.File)
|
||||||
@@ -633,20 +721,97 @@ function up2k_init(subtle) {
|
|||||||
function nav() {
|
function nav() {
|
||||||
ebi('file' + fdom_ctr).click();
|
ebi('file' + fdom_ctr).click();
|
||||||
}
|
}
|
||||||
ebi('u2btn').addEventListener('click', nav, false);
|
ebi('u2btn').onclick = nav;
|
||||||
|
|
||||||
|
var nenters = 0;
|
||||||
function ondrag(e) {
|
function ondrag(e) {
|
||||||
e.stopPropagation();
|
if (++nenters <= 0)
|
||||||
e.preventDefault();
|
nenters = 1;
|
||||||
|
|
||||||
|
//console.log(nenters, Date.now(), 'enter', this, e.target);
|
||||||
|
if (onover.bind(this)(e))
|
||||||
|
return true;
|
||||||
|
|
||||||
|
var mup, up = QS('#up_zd');
|
||||||
|
var msr, sr = QS('#srch_zd');
|
||||||
|
if (!has(perms, 'write'))
|
||||||
|
mup = 'you do not have write-access to this folder';
|
||||||
|
if (!has(perms, 'read'))
|
||||||
|
msr = 'you do not have read-access to this folder';
|
||||||
|
if (!have_up2k_idx)
|
||||||
|
msr = 'file-search is not enabled in server config';
|
||||||
|
|
||||||
|
up.querySelector('span').textContent = mup || 'drop it here';
|
||||||
|
sr.querySelector('span').textContent = msr || 'drop it here';
|
||||||
|
clmod(up, 'err', mup);
|
||||||
|
clmod(sr, 'err', msr);
|
||||||
|
clmod(up, 'ok', !mup);
|
||||||
|
clmod(sr, 'ok', !msr);
|
||||||
|
ebi('up_dz').setAttribute('err', mup || '');
|
||||||
|
ebi('srch_dz').setAttribute('err', msr || '');
|
||||||
|
}
|
||||||
|
function onover(e) {
|
||||||
|
try {
|
||||||
|
var ok = false, dt = e.dataTransfer.types;
|
||||||
|
for (var a = 0; a < dt.length; a++)
|
||||||
|
if (dt[a] == 'Files')
|
||||||
|
ok = true;
|
||||||
|
|
||||||
|
if (!ok)
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
catch (ex) { }
|
||||||
|
|
||||||
|
ev(e);
|
||||||
e.dataTransfer.dropEffect = 'copy';
|
e.dataTransfer.dropEffect = 'copy';
|
||||||
e.dataTransfer.effectAllowed = 'copy';
|
e.dataTransfer.effectAllowed = 'copy';
|
||||||
|
clmod(ebi('drops'), 'vis', 1);
|
||||||
|
var v = this.getAttribute('v');
|
||||||
|
if (v)
|
||||||
|
clmod(ebi(v), 'hl', 1);
|
||||||
}
|
}
|
||||||
ebi('u2btn').addEventListener('dragover', ondrag, false);
|
function offdrag(e) {
|
||||||
ebi('u2btn').addEventListener('dragenter', ondrag, false);
|
ev(e);
|
||||||
|
|
||||||
|
var v = this.getAttribute('v');
|
||||||
|
if (v)
|
||||||
|
clmod(ebi(v), 'hl');
|
||||||
|
|
||||||
|
if (--nenters <= 0) {
|
||||||
|
clmod(ebi('drops'), 'vis');
|
||||||
|
clmod(ebi('up_dz'), 'hl');
|
||||||
|
clmod(ebi('srch_dz'), 'hl');
|
||||||
|
}
|
||||||
|
|
||||||
|
//console.log(nenters, Date.now(), 'leave', this, e && e.target);
|
||||||
|
}
|
||||||
|
document.body.ondragenter = ondrag;
|
||||||
|
document.body.ondragleave = offdrag;
|
||||||
|
|
||||||
|
var drops = [ebi('up_dz'), ebi('srch_dz')];
|
||||||
|
for (var a = 0; a < 2; a++) {
|
||||||
|
drops[a].ondragenter = ondrag;
|
||||||
|
drops[a].ondragover = onover;
|
||||||
|
drops[a].ondragleave = offdrag;
|
||||||
|
drops[a].ondrop = gotfile;
|
||||||
|
}
|
||||||
|
ebi('drops').onclick = offdrag; // old ff
|
||||||
|
|
||||||
function gotfile(e) {
|
function gotfile(e) {
|
||||||
e.stopPropagation();
|
ev(e);
|
||||||
e.preventDefault();
|
nenters = 0;
|
||||||
|
offdrag.bind(this)();
|
||||||
|
var dz = (this && this.getAttribute('id'));
|
||||||
|
|
||||||
|
var err = this.getAttribute('err');
|
||||||
|
if (err)
|
||||||
|
return modal.alert('sorry, ' + err);
|
||||||
|
|
||||||
|
if ((dz == 'up_dz' && uc.fsearch) || (dz == 'srch_dz' && !uc.fsearch))
|
||||||
|
tgl_fsearch();
|
||||||
|
|
||||||
|
if (!QS('#op_up2k.act'))
|
||||||
|
goto('up2k');
|
||||||
|
|
||||||
var files,
|
var files,
|
||||||
is_itemlist = false;
|
is_itemlist = false;
|
||||||
@@ -665,11 +830,14 @@ function up2k_init(subtle) {
|
|||||||
|
|
||||||
more_one_file();
|
more_one_file();
|
||||||
var bad_files = [],
|
var bad_files = [],
|
||||||
|
nil_files = [],
|
||||||
good_files = [],
|
good_files = [],
|
||||||
dirs = [];
|
dirs = [];
|
||||||
|
|
||||||
for (var a = 0; a < files.length; a++) {
|
for (var a = 0; a < files.length; a++) {
|
||||||
var fobj = files[a];
|
var fobj = files[a],
|
||||||
|
dst = good_files;
|
||||||
|
|
||||||
if (is_itemlist) {
|
if (is_itemlist) {
|
||||||
if (fobj.kind !== 'file')
|
if (fobj.kind !== 'file')
|
||||||
continue;
|
continue;
|
||||||
@@ -686,16 +854,15 @@ function up2k_init(subtle) {
|
|||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
if (fobj.size < 1)
|
if (fobj.size < 1)
|
||||||
throw 1;
|
dst = nil_files;
|
||||||
}
|
}
|
||||||
catch (ex) {
|
catch (ex) {
|
||||||
bad_files.push(fobj.name);
|
dst = bad_files;
|
||||||
continue;
|
|
||||||
}
|
}
|
||||||
good_files.push([fobj, fobj.name]);
|
dst.push([fobj, fobj.name]);
|
||||||
}
|
}
|
||||||
if (dirs) {
|
if (dirs) {
|
||||||
return read_dirs(null, [], dirs, good_files, bad_files);
|
return read_dirs(null, [], dirs, good_files, nil_files, bad_files);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -709,7 +876,7 @@ function up2k_init(subtle) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var rd_missing_ref = [];
|
var rd_missing_ref = [];
|
||||||
function read_dirs(rd, pf, dirs, good, bad, spins) {
|
function read_dirs(rd, pf, dirs, good, nil, bad, spins) {
|
||||||
spins = spins || 0;
|
spins = spins || 0;
|
||||||
if (++spins == 5)
|
if (++spins == 5)
|
||||||
rd_missing_ref = rd_flatten(pf, dirs);
|
rd_missing_ref = rd_flatten(pf, dirs);
|
||||||
@@ -730,7 +897,7 @@ function up2k_init(subtle) {
|
|||||||
msg.push('<li>' + esc(missing[a]) + '</li>');
|
msg.push('<li>' + esc(missing[a]) + '</li>');
|
||||||
|
|
||||||
return modal.alert(msg.join('') + '</ul>', function () {
|
return modal.alert(msg.join('') + '</ul>', function () {
|
||||||
read_dirs(rd, [], [], good, bad, spins);
|
read_dirs(rd, [], [], good, nil, bad, spins);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
spins = 0;
|
spins = 0;
|
||||||
@@ -738,11 +905,11 @@ function up2k_init(subtle) {
|
|||||||
|
|
||||||
if (!dirs.length) {
|
if (!dirs.length) {
|
||||||
if (!pf.length)
|
if (!pf.length)
|
||||||
return gotallfiles(good, bad);
|
return gotallfiles(good, nil, bad);
|
||||||
|
|
||||||
console.log("retry pf, " + pf.length);
|
console.log("retry pf, " + pf.length);
|
||||||
setTimeout(function () {
|
setTimeout(function () {
|
||||||
read_dirs(rd, pf, dirs, good, bad, spins);
|
read_dirs(rd, pf, dirs, good, nil, bad, spins);
|
||||||
}, 50);
|
}, 50);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -764,14 +931,15 @@ function up2k_init(subtle) {
|
|||||||
pf.push(name);
|
pf.push(name);
|
||||||
dn.file(function (fobj) {
|
dn.file(function (fobj) {
|
||||||
apop(pf, name);
|
apop(pf, name);
|
||||||
|
var dst = good;
|
||||||
try {
|
try {
|
||||||
if (fobj.size > 0) {
|
if (fobj.size < 1)
|
||||||
good.push([fobj, name]);
|
dst = nil;
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
catch (ex) { }
|
catch (ex) {
|
||||||
bad.push(name);
|
dst = bad;
|
||||||
|
}
|
||||||
|
dst.push([fobj, name]);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
ngot += 1;
|
ngot += 1;
|
||||||
@@ -780,23 +948,33 @@ function up2k_init(subtle) {
|
|||||||
dirs.shift();
|
dirs.shift();
|
||||||
rd = null;
|
rd = null;
|
||||||
}
|
}
|
||||||
return read_dirs(rd, pf, dirs, good, bad, spins);
|
return read_dirs(rd, pf, dirs, good, nil, bad, spins);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function gotallfiles(good_files, bad_files) {
|
function gotallfiles(good_files, nil_files, bad_files) {
|
||||||
|
var ntot = good_files.concat(nil_files, bad_files).length;
|
||||||
if (bad_files.length) {
|
if (bad_files.length) {
|
||||||
var ntot = bad_files.length + good_files.length,
|
var msg = 'These {0} files (of {1} total) were skipped, possibly due to filesystem permissions:\n'.format(bad_files.length, ntot);
|
||||||
msg = 'These {0} files (of {1} total) were skipped because they are empty:\n'.format(bad_files.length, ntot);
|
|
||||||
|
|
||||||
for (var a = 0, aa = Math.min(20, bad_files.length); a < aa; a++)
|
for (var a = 0, aa = Math.min(20, bad_files.length); a < aa; a++)
|
||||||
msg += '-- ' + bad_files[a] + '\n';
|
msg += '-- ' + bad_files[a][1] + '\n';
|
||||||
|
|
||||||
if (good_files.length - bad_files.length <= 1 && ANDROID)
|
|
||||||
msg += '\nFirefox-Android has a bug which prevents selecting multiple files. Try selecting one file at a time. For more info, see firefox bug 1456557';
|
|
||||||
|
|
||||||
|
msg += '\nMaybe it works better if you select just one file';
|
||||||
return modal.alert(msg, function () {
|
return modal.alert(msg, function () {
|
||||||
gotallfiles(good_files, []);
|
gotallfiles(good_files, nil_files, []);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (nil_files.length) {
|
||||||
|
var msg = 'These {0} files (of {1} total) are blank/empty; upload them anyways?\n'.format(nil_files.length, ntot);
|
||||||
|
for (var a = 0, aa = Math.min(20, nil_files.length); a < aa; a++)
|
||||||
|
msg += '-- ' + nil_files[a][1] + '\n';
|
||||||
|
|
||||||
|
msg += '\nMaybe it works better if you select just one file';
|
||||||
|
return modal.confirm(msg, function () {
|
||||||
|
gotallfiles(good_files.concat(nil_files), [], []);
|
||||||
|
}, function () {
|
||||||
|
gotallfiles(good_files, [], []);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -806,24 +984,20 @@ function up2k_init(subtle) {
|
|||||||
return a < b ? -1 : a > b ? 1 : 0;
|
return a < b ? -1 : a > b ? 1 : 0;
|
||||||
});
|
});
|
||||||
|
|
||||||
var msg = ['{0} these {1} files?<ul>'.format(fsearch ? 'search' : 'upload', good_files.length)];
|
var msg = ['{0} these {1} files?<ul>'.format(uc.fsearch ? 'search' : 'upload', good_files.length)];
|
||||||
for (var a = 0, aa = Math.min(20, good_files.length); a < aa; a++)
|
for (var a = 0, aa = Math.min(20, good_files.length); a < aa; a++)
|
||||||
msg.push('<li>' + esc(good_files[a][1]) + '</li>');
|
msg.push('<li>' + esc(good_files[a][1]) + '</li>');
|
||||||
|
|
||||||
if (ask_up && !fsearch)
|
if (uc.ask_up && !uc.fsearch)
|
||||||
return modal.confirm(msg.join('') + '</ul>', function () { up_them(good_files); }, null);
|
return modal.confirm(msg.join('') + '</ul>', function () { up_them(good_files); }, null);
|
||||||
|
|
||||||
up_them(good_files);
|
up_them(good_files);
|
||||||
}
|
}
|
||||||
|
|
||||||
function up_them(good_files) {
|
function up_them(good_files) {
|
||||||
var seen = {},
|
var evpath = get_evpath(),
|
||||||
evpath = get_evpath(),
|
|
||||||
draw_each = good_files.length < 50;
|
draw_each = good_files.length < 50;
|
||||||
|
|
||||||
for (var a = 0; a < st.files.length; a++)
|
|
||||||
seen[st.files[a].name + '\n' + st.files[a].size] = 1;
|
|
||||||
|
|
||||||
for (var a = 0; a < good_files.length; a++) {
|
for (var a = 0; a < good_files.length; a++) {
|
||||||
var fobj = good_files[a][0],
|
var fobj = good_files[a][0],
|
||||||
name = good_files[a][1],
|
name = good_files[a][1],
|
||||||
@@ -842,32 +1016,40 @@ function up2k_init(subtle) {
|
|||||||
"t0": now,
|
"t0": now,
|
||||||
"fobj": fobj,
|
"fobj": fobj,
|
||||||
"name": name,
|
"name": name,
|
||||||
"size": fobj.size,
|
"size": fobj.size || 0,
|
||||||
"lmod": lmod / 1000,
|
"lmod": lmod / 1000,
|
||||||
"purl": fdir,
|
"purl": fdir,
|
||||||
"done": false,
|
"done": false,
|
||||||
|
"bytes_uploaded": 0,
|
||||||
"hash": []
|
"hash": []
|
||||||
},
|
},
|
||||||
key = entry.name + '\n' + entry.size;
|
key = name + '\n' + entry.size + '\n' + lmod + '\n' + uc.fsearch;
|
||||||
|
|
||||||
if (fsearch)
|
if (uc.fsearch)
|
||||||
entry.srch = 1;
|
entry.srch = 1;
|
||||||
|
|
||||||
if (seen[key])
|
try {
|
||||||
continue;
|
if (st.seen[fdir][key])
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
catch (ex) {
|
||||||
|
st.seen[fdir] = {};
|
||||||
|
}
|
||||||
|
|
||||||
seen[key] = 1;
|
st.seen[fdir][key] = 1;
|
||||||
|
|
||||||
pvis.addfile([
|
pvis.addfile([
|
||||||
fsearch ? esc(entry.name) : linksplit(
|
uc.fsearch ? esc(entry.name) : linksplit(
|
||||||
uricom_dec(entry.purl)[0] + entry.name).join(' '),
|
entry.purl + uricom_enc(entry.name)).join(' '),
|
||||||
'📐 hash',
|
'📐 hash',
|
||||||
''
|
''
|
||||||
], fobj.size, draw_each);
|
], fobj.size, draw_each);
|
||||||
|
|
||||||
st.bytes.total += fobj.size;
|
st.bytes.total += fobj.size;
|
||||||
st.files.push(entry);
|
st.files.push(entry);
|
||||||
if (turbo)
|
if (!entry.size)
|
||||||
|
push_t(st.todo.handshake, entry);
|
||||||
|
else if (uc.turbo)
|
||||||
push_t(st.todo.head, entry);
|
push_t(st.todo.head, entry);
|
||||||
else
|
else
|
||||||
push_t(st.todo.hash, entry);
|
push_t(st.todo.hash, entry);
|
||||||
@@ -877,33 +1059,16 @@ function up2k_init(subtle) {
|
|||||||
pvis.changecard(pvis.act);
|
pvis.changecard(pvis.act);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ebi('u2btn').addEventListener('drop', gotfile, false);
|
|
||||||
|
|
||||||
function more_one_file() {
|
function more_one_file() {
|
||||||
fdom_ctr++;
|
fdom_ctr++;
|
||||||
var elm = mknod('div');
|
var elm = mknod('div');
|
||||||
elm.innerHTML = '<input id="file{0}" type="file" name="file{0}[]" multiple="multiple" />'.format(fdom_ctr);
|
elm.innerHTML = '<input id="file{0}" type="file" name="file{0}[]" multiple="multiple" />'.format(fdom_ctr);
|
||||||
ebi('u2form').appendChild(elm);
|
ebi('u2form').appendChild(elm);
|
||||||
ebi('file' + fdom_ctr).addEventListener('change', gotfile, false);
|
ebi('file' + fdom_ctr).onchange = gotfile;
|
||||||
}
|
}
|
||||||
more_one_file();
|
more_one_file();
|
||||||
|
|
||||||
function u2cleanup(e) {
|
|
||||||
ev(e);
|
|
||||||
for (var a = 0; a < st.files.length; a++) {
|
|
||||||
var t = st.files[a];
|
|
||||||
if (t.done && t.name) {
|
|
||||||
var tr = ebi('f' + t.n);
|
|
||||||
if (!tr)
|
|
||||||
continue;
|
|
||||||
|
|
||||||
tr.parentNode.removeChild(tr);
|
|
||||||
t.name = undefined;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
ebi('u2cleanup').onclick = u2cleanup;
|
|
||||||
|
|
||||||
var etaref = 0, etaskip = 0, op_minh = 0;
|
var etaref = 0, etaskip = 0, op_minh = 0;
|
||||||
function etafun() {
|
function etafun() {
|
||||||
var nhash = st.busy.head.length + st.busy.hash.length + st.todo.head.length + st.todo.hash.length,
|
var nhash = st.busy.head.length + st.busy.hash.length + st.todo.head.length + st.todo.hash.length,
|
||||||
@@ -943,14 +1108,14 @@ function up2k_init(subtle) {
|
|||||||
if (nhash) {
|
if (nhash) {
|
||||||
st.time.hashing += td;
|
st.time.hashing += td;
|
||||||
t.push(['u2etah', st.bytes.hashed, st.bytes.hashed, st.time.hashing]);
|
t.push(['u2etah', st.bytes.hashed, st.bytes.hashed, st.time.hashing]);
|
||||||
if (fsearch)
|
if (uc.fsearch)
|
||||||
t.push(['u2etat', st.bytes.hashed, st.bytes.hashed, st.time.hashing]);
|
t.push(['u2etat', st.bytes.hashed, st.bytes.hashed, st.time.hashing]);
|
||||||
}
|
}
|
||||||
if (nsend) {
|
if (nsend) {
|
||||||
st.time.uploading += td;
|
st.time.uploading += td;
|
||||||
t.push(['u2etau', st.bytes.uploaded, st.bytes.finished, st.time.uploading]);
|
t.push(['u2etau', st.bytes.uploaded, st.bytes.finished, st.time.uploading]);
|
||||||
}
|
}
|
||||||
if ((nhash || nsend) && !fsearch) {
|
if ((nhash || nsend) && !uc.fsearch) {
|
||||||
if (!st.bytes.finished) {
|
if (!st.bytes.finished) {
|
||||||
ebi('u2etat').innerHTML = '(preparing to upload)';
|
ebi('u2etat').innerHTML = '(preparing to upload)';
|
||||||
}
|
}
|
||||||
@@ -969,6 +1134,7 @@ function up2k_init(subtle) {
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
donut.eta = eta;
|
||||||
if (etaskip)
|
if (etaskip)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
@@ -1003,12 +1169,7 @@ function up2k_init(subtle) {
|
|||||||
st.busy.handshake.length)
|
st.busy.handshake.length)
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
if (st.busy.handshake.length)
|
if ((uc.multitask ? 1 : 0) <
|
||||||
for (var n = t.n - 1; n >= t.n - parallel_uploads && n >= 0; n--)
|
|
||||||
if (st.files[n].t_uploading)
|
|
||||||
return false;
|
|
||||||
|
|
||||||
if ((multitask ? 1 : 0) <
|
|
||||||
st.todo.upload.length +
|
st.todo.upload.length +
|
||||||
st.busy.upload.length)
|
st.busy.upload.length)
|
||||||
return false;
|
return false;
|
||||||
@@ -1020,7 +1181,7 @@ function up2k_init(subtle) {
|
|||||||
if (!parallel_uploads)
|
if (!parallel_uploads)
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
if (multitask) {
|
if (uc.multitask) {
|
||||||
var ahead = st.bytes.hashed - st.bytes.finished;
|
var ahead = st.bytes.hashed - st.bytes.finished;
|
||||||
return ahead < 1024 * 1024 * 1024 * 4 &&
|
return ahead < 1024 * 1024 * 1024 * 4 &&
|
||||||
st.todo.handshake.length + st.busy.handshake.length < 16;
|
st.todo.handshake.length + st.busy.handshake.length < 16;
|
||||||
@@ -1044,7 +1205,7 @@ function up2k_init(subtle) {
|
|||||||
if (running)
|
if (running)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
if (crashed)
|
if (crashed || !got_deps())
|
||||||
return defer();
|
return defer();
|
||||||
|
|
||||||
running = true;
|
running = true;
|
||||||
@@ -1060,20 +1221,39 @@ function up2k_init(subtle) {
|
|||||||
st.busy.handshake.length +
|
st.busy.handshake.length +
|
||||||
st.busy.upload.length;
|
st.busy.upload.length;
|
||||||
|
|
||||||
|
if (was_busy && !is_busy) {
|
||||||
|
for (var a = 0; a < st.files.length; a++) {
|
||||||
|
var t = st.files[a];
|
||||||
|
if (t.want_recheck) {
|
||||||
|
t.rechecks++;
|
||||||
|
t.want_recheck = false;
|
||||||
|
push_t(st.todo.handshake, t);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
is_busy = st.todo.handshake.length;
|
||||||
|
try {
|
||||||
|
if (!is_busy && !uc.fsearch && !msel.getsel().length && (!mp.au || mp.au.paused))
|
||||||
|
treectl.goto(get_evpath());
|
||||||
|
}
|
||||||
|
catch (ex) { }
|
||||||
|
}
|
||||||
|
|
||||||
if (was_busy != is_busy) {
|
if (was_busy != is_busy) {
|
||||||
was_busy = is_busy;
|
was_busy = is_busy;
|
||||||
|
|
||||||
window[(is_busy ? "add" : "remove") +
|
window[(is_busy ? "add" : "remove") +
|
||||||
"EventListener"]("beforeunload", warn_uploader_busy);
|
"EventListener"]("beforeunload", warn_uploader_busy);
|
||||||
|
|
||||||
|
donut.on(is_busy);
|
||||||
|
|
||||||
if (!is_busy) {
|
if (!is_busy) {
|
||||||
var k = fsearch ? 'searches' : 'uploads',
|
var k = uc.fsearch ? 'searches' : 'uploads',
|
||||||
ks = fsearch ? 'Search' : 'Upload',
|
ks = uc.fsearch ? 'Search' : 'Upload',
|
||||||
tok = fsearch ? 'successful (found on server)' : 'completed successfully',
|
tok = uc.fsearch ? 'successful (found on server)' : 'completed successfully',
|
||||||
tng = fsearch ? 'failed (NOT found on server)' : 'failed, sorry',
|
tng = uc.fsearch ? 'failed (NOT found on server)' : 'failed, sorry',
|
||||||
ok = pvis.ctr["ok"],
|
ok = pvis.ctr["ok"],
|
||||||
ng = pvis.ctr["ng"],
|
ng = pvis.ctr["ng"],
|
||||||
t = ask_up ? 0 : 10;
|
t = uc.ask_up ? 0 : 10;
|
||||||
|
|
||||||
if (ok && ng)
|
if (ok && ng)
|
||||||
toast.warn(t, 'Finished, but some {0} failed:\n{1} {2},\n{3} {4}'.format(k, ok, tok, ng, tng));
|
toast.warn(t, 'Finished, but some {0} failed:\n{1} {2},\n{3} {4}'.format(k, ok, tok, ng, tng));
|
||||||
@@ -1087,13 +1267,17 @@ function up2k_init(subtle) {
|
|||||||
toast.err(t, '{0} {1}'.format(ks, tng));
|
toast.err(t, '{0} {1}'.format(ks, tng));
|
||||||
|
|
||||||
timer.rm(etafun);
|
timer.rm(etafun);
|
||||||
|
timer.rm(donut.do);
|
||||||
op_minh = 0;
|
op_minh = 0;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
|
timer.add(donut.do);
|
||||||
timer.add(etafun, false);
|
timer.add(etafun, false);
|
||||||
ebi('u2etas').style.textAlign = 'left';
|
ebi('u2etas').style.textAlign = 'left';
|
||||||
}
|
}
|
||||||
etafun();
|
etafun();
|
||||||
|
if (pvis.act == 'bz')
|
||||||
|
pvis.changecard('bz');
|
||||||
}
|
}
|
||||||
|
|
||||||
if (flag) {
|
if (flag) {
|
||||||
@@ -1235,7 +1419,6 @@ function up2k_init(subtle) {
|
|||||||
function exec_hash() {
|
function exec_hash() {
|
||||||
var t = st.todo.hash.shift();
|
var t = st.todo.hash.shift();
|
||||||
st.busy.hash.push(t);
|
st.busy.hash.push(t);
|
||||||
t.bytes_uploaded = 0;
|
|
||||||
|
|
||||||
var bpend = 0,
|
var bpend = 0,
|
||||||
nchunk = 0,
|
nchunk = 0,
|
||||||
@@ -1292,7 +1475,7 @@ function up2k_init(subtle) {
|
|||||||
pvis.move(t.n, 'ng');
|
pvis.move(t.n, 'ng');
|
||||||
apop(st.busy.hash, t);
|
apop(st.busy.hash, t);
|
||||||
st.bytes.finished += t.size;
|
st.bytes.finished += t.size;
|
||||||
return tasker();
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
toast.err(0, 'y o u b r o k e i t\nfile: ' + esc(t.name + '') + '\nerror: ' + err);
|
toast.err(0, 'y o u b r o k e i t\nfile: ' + esc(t.name + '') + '\nerror: ' + err);
|
||||||
@@ -1368,7 +1551,6 @@ function up2k_init(subtle) {
|
|||||||
console.log('head onerror, retrying', t);
|
console.log('head onerror, retrying', t);
|
||||||
apop(st.busy.head, t);
|
apop(st.busy.head, t);
|
||||||
st.todo.head.unshift(t);
|
st.todo.head.unshift(t);
|
||||||
tasker();
|
|
||||||
};
|
};
|
||||||
function orz(e) {
|
function orz(e) {
|
||||||
var ok = false;
|
var ok = false;
|
||||||
@@ -1377,7 +1559,7 @@ function up2k_init(subtle) {
|
|||||||
srv_ts = xhr.getResponseHeader('Last-Modified');
|
srv_ts = xhr.getResponseHeader('Last-Modified');
|
||||||
|
|
||||||
ok = t.size == srv_sz;
|
ok = t.size == srv_sz;
|
||||||
if (ok && datechk) {
|
if (ok && uc.datechk) {
|
||||||
srv_ts = new Date(srv_ts) / 1000;
|
srv_ts = new Date(srv_ts) / 1000;
|
||||||
ok = Math.abs(srv_ts - t.lmod) < 2;
|
ok = Math.abs(srv_ts - t.lmod) < 2;
|
||||||
}
|
}
|
||||||
@@ -1390,6 +1572,7 @@ function up2k_init(subtle) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
t.done = true;
|
t.done = true;
|
||||||
|
t.fobj = null;
|
||||||
st.bytes.hashed += t.size;
|
st.bytes.hashed += t.size;
|
||||||
st.bytes.finished += t.size;
|
st.bytes.finished += t.size;
|
||||||
pvis.move(t.n, 'bz');
|
pvis.move(t.n, 'bz');
|
||||||
@@ -1433,7 +1616,6 @@ function up2k_init(subtle) {
|
|||||||
apop(st.busy.handshake, t);
|
apop(st.busy.handshake, t);
|
||||||
st.todo.handshake.unshift(t);
|
st.todo.handshake.unshift(t);
|
||||||
t.keepalive = keepalive;
|
t.keepalive = keepalive;
|
||||||
tasker();
|
|
||||||
};
|
};
|
||||||
function orz(e) {
|
function orz(e) {
|
||||||
if (t.t_busied != me) {
|
if (t.t_busied != me) {
|
||||||
@@ -1459,15 +1641,18 @@ function up2k_init(subtle) {
|
|||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
smsg = 'found';
|
smsg = 'found';
|
||||||
var hit = response.hits[0],
|
var msg = [];
|
||||||
msg = linksplit(hit.rp).join(''),
|
for (var a = 0, aa = Math.min(20, response.hits.length); a < aa; a++) {
|
||||||
tr = unix2iso(hit.ts),
|
var hit = response.hits[a],
|
||||||
tu = unix2iso(t.lmod),
|
tr = unix2iso(hit.ts),
|
||||||
diff = parseInt(t.lmod) - parseInt(hit.ts),
|
tu = unix2iso(t.lmod),
|
||||||
cdiff = (Math.abs(diff) <= 2) ? '3c0' : 'f0b',
|
diff = parseInt(t.lmod) - parseInt(hit.ts),
|
||||||
sdiff = '<span style="color:#' + cdiff + '">diff ' + diff;
|
cdiff = (Math.abs(diff) <= 2) ? '3c0' : 'f0b',
|
||||||
|
sdiff = '<span style="color:#' + cdiff + '">diff ' + diff;
|
||||||
|
|
||||||
msg += '<br /><small>' + tr + ' (srv), ' + tu + ' (You), ' + sdiff + '</span></span>';
|
msg.push(linksplit(hit.rp).join('') + '<br /><small>' + tr + ' (srv), ' + tu + ' (You), ' + sdiff + '</small></span>');
|
||||||
|
}
|
||||||
|
msg = msg.join('<br />\n');
|
||||||
}
|
}
|
||||||
pvis.seth(t.n, 2, msg);
|
pvis.seth(t.n, 2, msg);
|
||||||
pvis.seth(t.n, 1, smsg);
|
pvis.seth(t.n, 1, smsg);
|
||||||
@@ -1475,6 +1660,7 @@ function up2k_init(subtle) {
|
|||||||
apop(st.busy.handshake, t);
|
apop(st.busy.handshake, t);
|
||||||
st.bytes.finished += t.size;
|
st.bytes.finished += t.size;
|
||||||
t.done = true;
|
t.done = true;
|
||||||
|
t.fobj = null;
|
||||||
tasker();
|
tasker();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -1485,7 +1671,7 @@ function up2k_init(subtle) {
|
|||||||
console.log("server-rename [" + t.purl + "] [" + t.name + "] to [" + rsp_purl + "] [" + response.name + "]");
|
console.log("server-rename [" + t.purl + "] [" + t.name + "] to [" + rsp_purl + "] [" + response.name + "]");
|
||||||
t.purl = rsp_purl;
|
t.purl = rsp_purl;
|
||||||
t.name = response.name;
|
t.name = response.name;
|
||||||
pvis.seth(t.n, 0, linksplit(uricom_dec(t.purl)[0] + t.name).join(' '));
|
pvis.seth(t.n, 0, linksplit(t.purl + uricom_enc(t.name)).join(' '));
|
||||||
}
|
}
|
||||||
|
|
||||||
var chunksize = get_chunksize(t.size),
|
var chunksize = get_chunksize(t.size),
|
||||||
@@ -1541,6 +1727,7 @@ function up2k_init(subtle) {
|
|||||||
|
|
||||||
if (done) {
|
if (done) {
|
||||||
t.done = true;
|
t.done = true;
|
||||||
|
t.fobj = null;
|
||||||
st.bytes.finished += t.size - t.bytes_uploaded;
|
st.bytes.finished += t.size - t.bytes_uploaded;
|
||||||
var spd1 = (t.size / ((t.t_hashed - t.t_hashing) / 1000.)) / (1024 * 1024.),
|
var spd1 = (t.size / ((t.t_hashed - t.t_hashing) / 1000.)) / (1024 * 1024.),
|
||||||
spd2 = (t.size / ((t.t_uploaded - t.t_uploading) / 1000.)) / (1024 * 1024.);
|
spd2 = (t.size / ((t.t_uploaded - t.t_uploading) / 1000.)) / (1024 * 1024.);
|
||||||
@@ -1575,12 +1762,18 @@ function up2k_init(subtle) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
st.bytes.finished += t.size;
|
st.bytes.finished += t.size;
|
||||||
if (rsp.indexOf('partial upload exists') !== -1 ||
|
var err_pend = rsp.indexOf('partial upload exists') + 1,
|
||||||
rsp.indexOf('file already exists') !== -1) {
|
err_dupe = rsp.indexOf('file already exists') + 1;
|
||||||
|
|
||||||
|
if (err_pend || err_dupe) {
|
||||||
err = rsp;
|
err = rsp;
|
||||||
ofs = err.indexOf('\n/');
|
ofs = err.indexOf('\n/');
|
||||||
if (ofs !== -1) {
|
if (ofs !== -1) {
|
||||||
err = err.slice(0, ofs + 1) + linksplit(err.slice(ofs + 2)).join(' ');
|
err = err.slice(0, ofs + 1) + linksplit(err.slice(ofs + 2).trimEnd()).join(' ');
|
||||||
|
}
|
||||||
|
if (!t.rechecks && err_pend) {
|
||||||
|
t.rechecks = 0;
|
||||||
|
t.want_recheck = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (err != "") {
|
if (err != "") {
|
||||||
@@ -1627,7 +1820,8 @@ function up2k_init(subtle) {
|
|||||||
st.busy.upload.push(upt);
|
st.busy.upload.push(upt);
|
||||||
|
|
||||||
var npart = upt.npart,
|
var npart = upt.npart,
|
||||||
t = st.files[upt.nfile];
|
t = st.files[upt.nfile],
|
||||||
|
tries = 0;
|
||||||
|
|
||||||
if (!t.t_uploading)
|
if (!t.t_uploading)
|
||||||
t.t_uploading = Date.now();
|
t.t_uploading = Date.now();
|
||||||
@@ -1678,8 +1872,9 @@ function up2k_init(subtle) {
|
|||||||
if (crashed)
|
if (crashed)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
console.log('chunkpit onerror, retrying', t);
|
toast.err(9.98, "failed to upload a chunk,\n" + tries + " retries so far -- retrying in 10sec\n\n" + t.name);
|
||||||
do_send();
|
console.log('chunkpit onerror,', ++tries, t);
|
||||||
|
setTimeout(do_send, 10 * 1000);
|
||||||
};
|
};
|
||||||
xhr.open('POST', t.purl, true);
|
xhr.open('POST', t.purl, true);
|
||||||
xhr.setRequestHeader("X-Up2k-Hash", t.hash[npart]);
|
xhr.setRequestHeader("X-Up2k-Hash", t.hash[npart]);
|
||||||
@@ -1704,16 +1899,28 @@ function up2k_init(subtle) {
|
|||||||
wpx = window.innerWidth,
|
wpx = window.innerWidth,
|
||||||
fpx = parseInt(getComputedStyle(bar)['font-size']),
|
fpx = parseInt(getComputedStyle(bar)['font-size']),
|
||||||
wem = wpx * 1.0 / fpx,
|
wem = wpx * 1.0 / fpx,
|
||||||
wide = wem > 54,
|
write = has(perms, 'write'),
|
||||||
parent = ebi(wide && has(perms, 'write') ? 'u2btn_cw' : 'u2btn_ct'),
|
wide = write && wem > 54 ? 'w' : '',
|
||||||
|
parent = ebi(wide && write ? 'u2btn_cw' : 'u2btn_ct'),
|
||||||
btn = ebi('u2btn');
|
btn = ebi('u2btn');
|
||||||
|
|
||||||
//console.log([wpx, fpx, wem]);
|
//console.log([wpx, fpx, wem]);
|
||||||
if (btn.parentNode !== parent) {
|
if (btn.parentNode !== parent) {
|
||||||
parent.appendChild(btn);
|
parent.appendChild(btn);
|
||||||
ebi('u2conf').setAttribute('class', wide ? 'has_btn' : '');
|
ebi('u2conf').setAttribute('class', wide);
|
||||||
ebi('u2cards').setAttribute('class', wide ? 'w' : '');
|
ebi('u2cards').setAttribute('class', wide);
|
||||||
ebi('u2etaw').setAttribute('class', wide ? 'w' : '');
|
ebi('u2etaw').setAttribute('class', wide);
|
||||||
|
}
|
||||||
|
|
||||||
|
wide = write && wem > 78 ? 'ww' : wide;
|
||||||
|
parent = ebi(wide == 'ww' && write ? 'u2c3w' : 'u2c3t');
|
||||||
|
var its = [ebi('u2etaw'), ebi('u2cards')];
|
||||||
|
if (its[0].parentNode !== parent) {
|
||||||
|
ebi('u2conf').setAttribute('class', wide);
|
||||||
|
for (var a = 0; a < 2; a++) {
|
||||||
|
parent.appendChild(its[a]);
|
||||||
|
its[a].setAttribute('class', wide);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
window.addEventListener('resize', onresize);
|
window.addEventListener('resize', onresize);
|
||||||
@@ -1726,7 +1933,7 @@ function up2k_init(subtle) {
|
|||||||
setTimeout(onresize, 500);
|
setTimeout(onresize, 500);
|
||||||
}
|
}
|
||||||
|
|
||||||
var o = QSA('#u2conf *[tt]');
|
var o = QSA('#u2conf .c *[tt]');
|
||||||
for (var a = o.length - 1; a >= 0; a--) {
|
for (var a = o.length - 1; a >= 0; a--) {
|
||||||
o[a].parentNode.getElementsByTagName('input')[0].setAttribute('tt', o[a].getAttribute('tt'));
|
o[a].parentNode.getElementsByTagName('input')[0].setAttribute('tt', o[a].getAttribute('tt'));
|
||||||
}
|
}
|
||||||
@@ -1774,42 +1981,21 @@ function up2k_init(subtle) {
|
|||||||
bumpthread({ "target": 1 })
|
bumpthread({ "target": 1 })
|
||||||
}
|
}
|
||||||
|
|
||||||
function tgl_multitask() {
|
|
||||||
multitask = !multitask;
|
|
||||||
bcfg_set('multitask', multitask);
|
|
||||||
}
|
|
||||||
|
|
||||||
function tgl_ask_up() {
|
|
||||||
ask_up = !ask_up;
|
|
||||||
bcfg_set('ask_up', ask_up);
|
|
||||||
}
|
|
||||||
|
|
||||||
function tgl_fsearch() {
|
function tgl_fsearch() {
|
||||||
set_fsearch(!fsearch);
|
set_fsearch(!uc.fsearch);
|
||||||
}
|
|
||||||
|
|
||||||
function tgl_turbo() {
|
|
||||||
turbo = !turbo;
|
|
||||||
bcfg_set('u2turbo', turbo);
|
|
||||||
draw_turbo();
|
|
||||||
}
|
|
||||||
|
|
||||||
function tgl_datechk() {
|
|
||||||
datechk = !datechk;
|
|
||||||
bcfg_set('u2tdate', datechk);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function draw_turbo() {
|
function draw_turbo() {
|
||||||
var msgu = '<p class="warn">WARNING: turbo enabled, <span> client may not detect and resume incomplete uploads; see turbo-button tooltip</span></p>',
|
var msgu = '<p class="warn">WARNING: turbo enabled, <span> client may not detect and resume incomplete uploads; see turbo-button tooltip</span></p>',
|
||||||
msgs = '<p class="warn">WARNING: turbo enabled, <span> search results can be incorrect; see turbo-button tooltip</span></p>',
|
msgs = '<p class="warn">WARNING: turbo enabled, <span> search results can be incorrect; see turbo-button tooltip</span></p>',
|
||||||
msg = fsearch ? msgs : msgu,
|
msg = uc.fsearch ? msgs : msgu,
|
||||||
omsg = fsearch ? msgu : msgs,
|
omsg = uc.fsearch ? msgu : msgs,
|
||||||
html = ebi('u2foot').innerHTML,
|
html = ebi('u2foot').innerHTML,
|
||||||
ohtml = html;
|
ohtml = html;
|
||||||
|
|
||||||
if (turbo && html.indexOf(msg) === -1)
|
if (uc.turbo && html.indexOf(msg) === -1)
|
||||||
html = html.replace(omsg, '') + msg;
|
html = html.replace(omsg, '') + msg;
|
||||||
else if (!turbo)
|
else if (!uc.turbo)
|
||||||
html = html.replace(msgu, '').replace(msgs, '');
|
html = html.replace(msgu, '').replace(msgs, '');
|
||||||
|
|
||||||
if (html !== ohtml)
|
if (html !== ohtml)
|
||||||
@@ -1835,8 +2021,8 @@ function up2k_init(subtle) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (new_state !== undefined) {
|
if (new_state !== undefined) {
|
||||||
fsearch = new_state;
|
uc.fsearch = new_state;
|
||||||
bcfg_set('fsearch', fsearch);
|
bcfg_set('fsearch', uc.fsearch);
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@@ -1845,10 +2031,10 @@ function up2k_init(subtle) {
|
|||||||
catch (ex) { }
|
catch (ex) { }
|
||||||
|
|
||||||
try {
|
try {
|
||||||
var ico = fsearch ? '🔎' : '🚀',
|
var ico = uc.fsearch ? '🔎' : '🚀',
|
||||||
desc = fsearch ? 'Search' : 'Upload';
|
desc = uc.fsearch ? 'Search' : 'Upload';
|
||||||
|
|
||||||
clmod(ebi('op_up2k'), 'srch', fsearch);
|
clmod(ebi('op_up2k'), 'srch', uc.fsearch);
|
||||||
ebi('u2bm').innerHTML = ico + ' <sup>' + desc + '</sup>';
|
ebi('u2bm').innerHTML = ico + ' <sup>' + desc + '</sup>';
|
||||||
}
|
}
|
||||||
catch (ex) { }
|
catch (ex) { }
|
||||||
@@ -1857,23 +2043,17 @@ function up2k_init(subtle) {
|
|||||||
onresize();
|
onresize();
|
||||||
}
|
}
|
||||||
|
|
||||||
function tgl_flag_en() {
|
|
||||||
flag_en = !flag_en;
|
|
||||||
bcfg_set('flag_en', flag_en);
|
|
||||||
apply_flag_cfg();
|
|
||||||
}
|
|
||||||
|
|
||||||
function apply_flag_cfg() {
|
function apply_flag_cfg() {
|
||||||
if (flag_en && !flag) {
|
if (uc.flag_en && !flag) {
|
||||||
try {
|
try {
|
||||||
flag = up2k_flagbus();
|
flag = up2k_flagbus();
|
||||||
}
|
}
|
||||||
catch (ex) {
|
catch (ex) {
|
||||||
toast.err(5, "not supported on your browser:\n" + ex);
|
toast.err(5, "not supported on your browser:\n" + esc(basenames(ex)));
|
||||||
tgl_flag_en();
|
bcfg_set('flag_en', false);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else if (!flag_en && flag) {
|
else if (!uc.flag_en && flag) {
|
||||||
if (flag.ours)
|
if (flag.ours)
|
||||||
flag.give();
|
flag.give();
|
||||||
|
|
||||||
@@ -1898,14 +2078,6 @@ function up2k_init(subtle) {
|
|||||||
|
|
||||||
ebi('nthread').onkeydown = bumpthread2;
|
ebi('nthread').onkeydown = bumpthread2;
|
||||||
ebi('nthread').oninput = bumpthread;
|
ebi('nthread').oninput = bumpthread;
|
||||||
ebi('multitask').onclick = tgl_multitask;
|
|
||||||
ebi('ask_up').onclick = tgl_ask_up;
|
|
||||||
ebi('flag_en').onclick = tgl_flag_en;
|
|
||||||
ebi('u2turbo').onclick = tgl_turbo;
|
|
||||||
ebi('u2tdate').onclick = tgl_datechk;
|
|
||||||
var o = ebi('fsearch');
|
|
||||||
if (o)
|
|
||||||
o.addEventListener('click', tgl_fsearch, false);
|
|
||||||
|
|
||||||
ebi('u2etas').onclick = function (e) {
|
ebi('u2etas').onclick = function (e) {
|
||||||
ev(e);
|
ev(e);
|
||||||
@@ -1929,6 +2101,15 @@ function warn_uploader_busy(e) {
|
|||||||
|
|
||||||
|
|
||||||
tt.init();
|
tt.init();
|
||||||
|
favico.init();
|
||||||
|
ebi('ico1').onclick = function () {
|
||||||
|
var a = favico.txt == this.textContent;
|
||||||
|
swrite('icot', a ? 'c' : this.textContent);
|
||||||
|
swrite('icof', a ? null : '000');
|
||||||
|
swrite('icob', a ? null : '');
|
||||||
|
favico.init();
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
if (QS('#op_up2k.act'))
|
if (QS('#op_up2k.act'))
|
||||||
goto_up2k();
|
goto_up2k();
|
||||||
|
|||||||
@@ -18,6 +18,15 @@ var ebi = document.getElementById.bind(document),
|
|||||||
mknod = document.createElement.bind(document);
|
mknod = document.createElement.bind(document);
|
||||||
|
|
||||||
|
|
||||||
|
function qsr(sel) {
|
||||||
|
var el = QS(sel);
|
||||||
|
if (el)
|
||||||
|
el.parentNode.removeChild(el);
|
||||||
|
|
||||||
|
return el;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
// error handler for mobile devices
|
// error handler for mobile devices
|
||||||
function esc(txt) {
|
function esc(txt) {
|
||||||
return txt.replace(/[&"<>]/g, function (c) {
|
return txt.replace(/[&"<>]/g, function (c) {
|
||||||
@@ -29,9 +38,24 @@ function esc(txt) {
|
|||||||
}[c];
|
}[c];
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
window.onunhandledrejection = function (e) {
|
function basenames(txt) {
|
||||||
console.log("REJ: " + e.reason);
|
return (txt + '').replace(/https?:\/\/[^ \/]+\//g, '/').replace(/js\?_=[a-zA-Z]{4}/g, 'js');
|
||||||
};
|
}
|
||||||
|
if ((document.location + '').indexOf(',rej,') + 1)
|
||||||
|
window.onunhandledrejection = function (e) {
|
||||||
|
var err = e.reason;
|
||||||
|
try {
|
||||||
|
err += '\n' + e.reason.stack;
|
||||||
|
}
|
||||||
|
catch (e) { }
|
||||||
|
err = basenames(err);
|
||||||
|
console.log("REJ: " + err);
|
||||||
|
try {
|
||||||
|
toast.warn(30, err);
|
||||||
|
}
|
||||||
|
catch (e) { }
|
||||||
|
};
|
||||||
|
|
||||||
try {
|
try {
|
||||||
console.hist = [];
|
console.hist = [];
|
||||||
var hook = function (t) {
|
var hook = function (t) {
|
||||||
@@ -56,7 +80,7 @@ try {
|
|||||||
catch (ex) {
|
catch (ex) {
|
||||||
if (console.stdlog)
|
if (console.stdlog)
|
||||||
console.log = console.stdlog;
|
console.log = console.stdlog;
|
||||||
console.log(ex);
|
console.log('console capture failed', ex);
|
||||||
}
|
}
|
||||||
var crashed = false, ignexd = {};
|
var crashed = false, ignexd = {};
|
||||||
function vis_exh(msg, url, lineNo, columnNo, error) {
|
function vis_exh(msg, url, lineNo, columnNo, error) {
|
||||||
@@ -72,7 +96,7 @@ function vis_exh(msg, url, lineNo, columnNo, error) {
|
|||||||
var html = [
|
var html = [
|
||||||
'<h1>you hit a bug!</h1>',
|
'<h1>you hit a bug!</h1>',
|
||||||
'<p style="font-size:1.3em;margin:0">try to <a href="#" onclick="localStorage.clear();location.reload();">reset copyparty settings</a> if you are stuck here, or <a href="#" onclick="ignex();">ignore this</a> / <a href="#" onclick="ignex(true);">ignore all</a></p>',
|
'<p style="font-size:1.3em;margin:0">try to <a href="#" onclick="localStorage.clear();location.reload();">reset copyparty settings</a> if you are stuck here, or <a href="#" onclick="ignex();">ignore this</a> / <a href="#" onclick="ignex(true);">ignore all</a></p>',
|
||||||
'<p style="color:#fff">please send me a screenshot arigathanks gozaimuch: <code>ed/irc.rizon.net</code> or <code>ed#2644</code></p>',
|
'<p style="color:#fff">please send me a screenshot arigathanks gozaimuch: <a href="<ghi>" target="_blank">github issue</a> or <code>ed#2644</code></p>',
|
||||||
'<p class="b">' + esc(url + ' @' + lineNo + ':' + columnNo), '<br />' + esc(String(msg)) + '</p>',
|
'<p class="b">' + esc(url + ' @' + lineNo + ':' + columnNo), '<br />' + esc(String(msg)) + '</p>',
|
||||||
'<p><b>UA:</b> ' + esc(navigator.userAgent + '')
|
'<p><b>UA:</b> ' + esc(navigator.userAgent + '')
|
||||||
];
|
];
|
||||||
@@ -131,7 +155,7 @@ function vis_exh(msg, url, lineNo, columnNo, error) {
|
|||||||
|
|
||||||
var s = mknod('style');
|
var s = mknod('style');
|
||||||
s.innerHTML = (
|
s.innerHTML = (
|
||||||
'#exbox{background:#333;color:#ddd;font-family:sans-serif;font-size:0.8em;padding:0 1em 1em 1em;z-index:80386;position:fixed;top:0;left:0;right:0;bottom:0;width:100%;height:100%;overflow:auto;width:calc(100% - 2em)} ' +
|
'#exbox{background:#222;color:#ddd;font-family:sans-serif;font-size:0.8em;padding:0 1em 1em 1em;z-index:80386;position:fixed;top:0;left:0;right:0;bottom:0;width:100%;height:100%;overflow:auto;width:calc(100% - 2em)} ' +
|
||||||
'#exbox,#exbox *{line-height:1.5em;overflow-wrap:break-word} ' +
|
'#exbox,#exbox *{line-height:1.5em;overflow-wrap:break-word} ' +
|
||||||
'#exbox code{color:#bf7;background:#222;padding:.1em;margin:.2em;font-size:1.1em;font-family:monospace,monospace} ' +
|
'#exbox code{color:#bf7;background:#222;padding:.1em;margin:.2em;font-size:1.1em;font-family:monospace,monospace} ' +
|
||||||
'#exbox a{text-decoration:underline;color:#fc0} ' +
|
'#exbox a{text-decoration:underline;color:#fc0} ' +
|
||||||
@@ -142,13 +166,12 @@ function vis_exh(msg, url, lineNo, columnNo, error) {
|
|||||||
);
|
);
|
||||||
document.head.appendChild(s);
|
document.head.appendChild(s);
|
||||||
}
|
}
|
||||||
exbox.innerHTML = html.join('\n').replace(/https?:\/\/[^ \/]+\//g, '/').replace(/js\?_=[a-zA-Z]{4}/g, 'js');
|
exbox.innerHTML = basenames(html.join('\n')).replace(/<ghi>/, 'https://github.com/9001/copyparty/issues/new?labels=bug&template=bug_report.md');
|
||||||
exbox.style.display = 'block';
|
exbox.style.display = 'block';
|
||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
document.body.innerHTML = html.join('\n');
|
document.body.innerHTML = html.join('\n');
|
||||||
}
|
}
|
||||||
throw 'fatal_err';
|
|
||||||
}
|
}
|
||||||
function ignex(all) {
|
function ignex(all) {
|
||||||
var o = ebi('exbox');
|
var o = ebi('exbox');
|
||||||
@@ -160,6 +183,9 @@ function ignex(all) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function noop() { }
|
||||||
|
|
||||||
|
|
||||||
function ctrl(e) {
|
function ctrl(e) {
|
||||||
return e && (e.ctrlKey || e.metaKey);
|
return e && (e.ctrlKey || e.metaKey);
|
||||||
}
|
}
|
||||||
@@ -185,36 +211,40 @@ function ev(e) {
|
|||||||
|
|
||||||
|
|
||||||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
|
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
|
||||||
if (!String.prototype.endsWith) {
|
if (!String.prototype.endsWith)
|
||||||
String.prototype.endsWith = function (search, this_len) {
|
String.prototype.endsWith = function (search, this_len) {
|
||||||
if (this_len === undefined || this_len > this.length) {
|
if (this_len === undefined || this_len > this.length) {
|
||||||
this_len = this.length;
|
this_len = this.length;
|
||||||
}
|
}
|
||||||
return this.substring(this_len - search.length, this_len) === search;
|
return this.substring(this_len - search.length, this_len) === search;
|
||||||
};
|
};
|
||||||
}
|
|
||||||
if (!String.startsWith) {
|
if (!String.startsWith)
|
||||||
String.prototype.startsWith = function (s, i) {
|
String.prototype.startsWith = function (s, i) {
|
||||||
i = i > 0 ? i | 0 : 0;
|
i = i > 0 ? i | 0 : 0;
|
||||||
return this.substring(i, i + s.length) === s;
|
return this.substring(i, i + s.length) === s;
|
||||||
};
|
};
|
||||||
}
|
|
||||||
if (!Element.prototype.matches) {
|
if (!String.trimEnd)
|
||||||
|
String.prototype.trimEnd = String.prototype.trimRight = function () {
|
||||||
|
return this.replace(/[ \t\r\n]+$/m, '');
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!Element.prototype.matches)
|
||||||
Element.prototype.matches =
|
Element.prototype.matches =
|
||||||
Element.prototype.oMatchesSelector ||
|
Element.prototype.oMatchesSelector ||
|
||||||
Element.prototype.msMatchesSelector ||
|
Element.prototype.msMatchesSelector ||
|
||||||
Element.prototype.mozMatchesSelector ||
|
Element.prototype.mozMatchesSelector ||
|
||||||
Element.prototype.webkitMatchesSelector;
|
Element.prototype.webkitMatchesSelector;
|
||||||
}
|
|
||||||
if (!Element.prototype.closest) {
|
if (!Element.prototype.closest)
|
||||||
Element.prototype.closest = function (s) {
|
Element.prototype.closest = function (s) {
|
||||||
var el = this;
|
var el = this;
|
||||||
do {
|
do {
|
||||||
if (el.matches(s)) return el;
|
if (el.matches(s)) return el;
|
||||||
el = el.parentElement || el.parentNode;
|
el = el.parentElement || el.parentNode;
|
||||||
} while (el !== null && el.nodeType === 1);
|
} while (el !== null && el.nodeType === 1);
|
||||||
}
|
};
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
// https://stackoverflow.com/a/950146
|
// https://stackoverflow.com/a/950146
|
||||||
@@ -225,7 +255,9 @@ function import_js(url, cb) {
|
|||||||
script.src = url;
|
script.src = url;
|
||||||
script.onload = cb;
|
script.onload = cb;
|
||||||
script.onerror = function () {
|
script.onerror = function () {
|
||||||
toast.err(0, 'Failed to load module:\n' + url);
|
var m = 'Failed to load module:\n' + url;
|
||||||
|
console.log(m);
|
||||||
|
toast.err(0, m);
|
||||||
};
|
};
|
||||||
head.appendChild(script);
|
head.appendChild(script);
|
||||||
}
|
}
|
||||||
@@ -361,8 +393,16 @@ function makeSortable(table, cb) {
|
|||||||
|
|
||||||
|
|
||||||
function linksplit(rp) {
|
function linksplit(rp) {
|
||||||
var ret = [];
|
var ret = [],
|
||||||
var apath = '/';
|
apath = '/',
|
||||||
|
q = null;
|
||||||
|
|
||||||
|
if (rp && rp.indexOf('?') + 1) {
|
||||||
|
q = rp.split('?', 2);
|
||||||
|
rp = q[0];
|
||||||
|
q = '?' + q[1];
|
||||||
|
}
|
||||||
|
|
||||||
if (rp && rp.charAt(0) == '/')
|
if (rp && rp.charAt(0) == '/')
|
||||||
rp = rp.slice(1);
|
rp = rp.slice(1);
|
||||||
|
|
||||||
@@ -376,16 +416,17 @@ function linksplit(rp) {
|
|||||||
link = rp.slice(0, ofs + 1);
|
link = rp.slice(0, ofs + 1);
|
||||||
rp = rp.slice(ofs + 1);
|
rp = rp.slice(ofs + 1);
|
||||||
}
|
}
|
||||||
var vlink = esc(link),
|
var vlink = esc(uricom_dec(link)[0]);
|
||||||
elink = uricom_enc(link);
|
|
||||||
|
|
||||||
if (link.indexOf('/') !== -1) {
|
if (link.indexOf('/') !== -1) {
|
||||||
vlink = vlink.slice(0, -1) + '<span>/</span>';
|
vlink = vlink.slice(0, -1) + '<span>/</span>';
|
||||||
elink = elink.slice(0, -3) + '/';
|
|
||||||
}
|
}
|
||||||
|
|
||||||
ret.push('<a href="' + apath + elink + '">' + vlink + '</a>');
|
if (!rp && q)
|
||||||
apath += elink;
|
link += q;
|
||||||
|
|
||||||
|
ret.push('<a href="' + apath + link + '">' + vlink + '</a>');
|
||||||
|
apath += link;
|
||||||
}
|
}
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
@@ -467,6 +508,11 @@ function get_vpath() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function noq_href(el) {
|
||||||
|
return el.getAttribute('href').split('?')[0];
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
function get_pwd() {
|
function get_pwd() {
|
||||||
var pwd = ('; ' + document.cookie).split('; cppwd=');
|
var pwd = ('; ' + document.cookie).split('; cppwd=');
|
||||||
if (pwd.length < 2)
|
if (pwd.length < 2)
|
||||||
@@ -545,14 +591,22 @@ function jcp(obj) {
|
|||||||
|
|
||||||
|
|
||||||
function sread(key) {
|
function sread(key) {
|
||||||
return localStorage.getItem(key);
|
try {
|
||||||
|
return localStorage.getItem(key);
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function swrite(key, val) {
|
function swrite(key, val) {
|
||||||
if (val === undefined || val === null)
|
try {
|
||||||
localStorage.removeItem(key);
|
if (val === undefined || val === null)
|
||||||
else
|
localStorage.removeItem(key);
|
||||||
localStorage.setItem(key, val);
|
else
|
||||||
|
localStorage.setItem(key, val);
|
||||||
|
}
|
||||||
|
catch (e) { }
|
||||||
}
|
}
|
||||||
|
|
||||||
function jread(key, fb) {
|
function jread(key, fb) {
|
||||||
@@ -575,9 +629,9 @@ function icfg_get(name, defval) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function fcfg_get(name, defval) {
|
function fcfg_get(name, defval) {
|
||||||
var o = ebi(name);
|
var o = ebi(name),
|
||||||
|
val = parseFloat(sread(name));
|
||||||
|
|
||||||
var val = parseFloat(sread(name));
|
|
||||||
if (isNaN(val))
|
if (isNaN(val))
|
||||||
return parseFloat(o ? o.value : defval);
|
return parseFloat(o ? o.value : defval);
|
||||||
|
|
||||||
@@ -587,6 +641,19 @@ function fcfg_get(name, defval) {
|
|||||||
return val;
|
return val;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function scfg_get(name, defval) {
|
||||||
|
var o = ebi(name),
|
||||||
|
val = sread(name);
|
||||||
|
|
||||||
|
if (val === null)
|
||||||
|
val = defval;
|
||||||
|
|
||||||
|
if (o)
|
||||||
|
o.value = val;
|
||||||
|
|
||||||
|
return val;
|
||||||
|
}
|
||||||
|
|
||||||
function bcfg_get(name, defval) {
|
function bcfg_get(name, defval) {
|
||||||
var o = ebi(name);
|
var o = ebi(name);
|
||||||
if (!o)
|
if (!o)
|
||||||
@@ -620,15 +687,59 @@ function bcfg_upd_ui(name, val) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function bcfg_bind(obj, oname, cname, defval, cb, un_ev) {
|
||||||
|
var v = bcfg_get(cname, defval),
|
||||||
|
el = ebi(cname);
|
||||||
|
|
||||||
|
obj[oname] = v;
|
||||||
|
if (el)
|
||||||
|
el.onclick = function (e) {
|
||||||
|
if (un_ev !== false)
|
||||||
|
ev(e);
|
||||||
|
|
||||||
|
obj[oname] = bcfg_set(cname, !obj[oname]);
|
||||||
|
if (cb)
|
||||||
|
cb(obj[oname]);
|
||||||
|
};
|
||||||
|
|
||||||
|
return v;
|
||||||
|
}
|
||||||
|
|
||||||
|
function scfg_bind(obj, oname, cname, defval, cb) {
|
||||||
|
var v = scfg_get(cname, defval),
|
||||||
|
el = ebi(cname);
|
||||||
|
|
||||||
|
obj[oname] = v;
|
||||||
|
if (el)
|
||||||
|
el.oninput = function (e) {
|
||||||
|
swrite(cname, obj[oname] = this.value);
|
||||||
|
if (cb)
|
||||||
|
cb(obj[oname]);
|
||||||
|
};
|
||||||
|
|
||||||
|
return v;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
function hist_push(url) {
|
function hist_push(url) {
|
||||||
console.log("h-push " + url);
|
console.log("h-push " + url);
|
||||||
history.pushState(url, url, url);
|
if (window.history && history.pushState)
|
||||||
|
history.pushState(url, url, url);
|
||||||
}
|
}
|
||||||
|
|
||||||
function hist_replace(url) {
|
function hist_replace(url) {
|
||||||
console.log("h-repl " + url);
|
console.log("h-repl " + url);
|
||||||
history.replaceState(url, url, url);
|
if (window.history && history.replaceState)
|
||||||
|
history.replaceState(url, url, url);
|
||||||
|
}
|
||||||
|
|
||||||
|
function sethash(hv) {
|
||||||
|
if (window.history && history.replaceState) {
|
||||||
|
hist_replace(document.location.pathname + document.location.search + '#' + hv);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
document.location.hash = hv;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -679,6 +790,14 @@ var tt = (function () {
|
|||||||
r.tt.setAttribute('id', 'tt');
|
r.tt.setAttribute('id', 'tt');
|
||||||
document.body.appendChild(r.tt);
|
document.body.appendChild(r.tt);
|
||||||
|
|
||||||
|
var prev = null;
|
||||||
|
r.cshow = function () {
|
||||||
|
if (this !== prev)
|
||||||
|
r.show.bind(this)();
|
||||||
|
|
||||||
|
prev = this;
|
||||||
|
};
|
||||||
|
|
||||||
r.show = function () {
|
r.show = function () {
|
||||||
if (r.skip) {
|
if (r.skip) {
|
||||||
r.skip = false;
|
r.skip = false;
|
||||||
@@ -732,6 +851,7 @@ var tt = (function () {
|
|||||||
ev(e);
|
ev(e);
|
||||||
window.removeEventListener('scroll', r.hide);
|
window.removeEventListener('scroll', r.hide);
|
||||||
clmod(r.tt, 'show');
|
clmod(r.tt, 'show');
|
||||||
|
clmod(r.tt, 'b');
|
||||||
if (r.el)
|
if (r.el)
|
||||||
r.el.removeEventListener('mouseleave', r.hide);
|
r.el.removeEventListener('mouseleave', r.hide);
|
||||||
};
|
};
|
||||||
@@ -761,12 +881,13 @@ var tt = (function () {
|
|||||||
r.tt.onclick = r.hide;
|
r.tt.onclick = r.hide;
|
||||||
|
|
||||||
r.att = function (ctr) {
|
r.att = function (ctr) {
|
||||||
var _show = r.en ? r.show : null,
|
var _cshow = r.en ? r.cshow : null,
|
||||||
|
_show = r.en ? r.show : null,
|
||||||
_hide = r.en ? r.hide : null,
|
_hide = r.en ? r.hide : null,
|
||||||
o = ctr.querySelectorAll('*[tt]');
|
o = ctr.querySelectorAll('*[tt]');
|
||||||
|
|
||||||
for (var a = o.length - 1; a >= 0; a--) {
|
for (var a = o.length - 1; a >= 0; a--) {
|
||||||
o[a].onfocus = _show;
|
o[a].onfocus = _cshow;
|
||||||
o[a].onblur = _hide;
|
o[a].onblur = _hide;
|
||||||
o[a].onmouseenter = _show;
|
o[a].onmouseenter = _show;
|
||||||
o[a].onmouseleave = _hide;
|
o[a].onmouseleave = _hide;
|
||||||
@@ -775,16 +896,7 @@ var tt = (function () {
|
|||||||
}
|
}
|
||||||
|
|
||||||
r.init = function () {
|
r.init = function () {
|
||||||
var ttb = ebi('tooltips');
|
bcfg_bind(r, 'en', 'tooltips', r.en, r.init);
|
||||||
if (ttb) {
|
|
||||||
ttb.onclick = function (e) {
|
|
||||||
ev(e);
|
|
||||||
r.en = !r.en;
|
|
||||||
bcfg_set('tooltips', r.en);
|
|
||||||
r.init();
|
|
||||||
};
|
|
||||||
r.en = bcfg_get('tooltips', true)
|
|
||||||
}
|
|
||||||
r.att(document);
|
r.att(document);
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -847,6 +959,9 @@ var toast = (function () {
|
|||||||
if (sec)
|
if (sec)
|
||||||
te = setTimeout(r.hide, sec * 1000);
|
te = setTimeout(r.hide, sec * 1000);
|
||||||
|
|
||||||
|
if (txt.indexOf('<body>') + 1)
|
||||||
|
txt = txt.slice(0, txt.indexOf('<')) + ' [...]';
|
||||||
|
|
||||||
obj.innerHTML = '<a href="#" id="toastc">x</a><div id="toastb">' + lf2br(txt) + '</div>';
|
obj.innerHTML = '<a href="#" id="toastc">x</a><div id="toastb">' + lf2br(txt) + '</div>';
|
||||||
obj.className = cl;
|
obj.className = cl;
|
||||||
sec += obj.offsetWidth;
|
sec += obj.offsetWidth;
|
||||||
@@ -950,15 +1065,22 @@ var modal = (function () {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function onkey(e) {
|
function onkey(e) {
|
||||||
if (e.code == 'Enter') {
|
var k = e.code,
|
||||||
var a = ebi('modal-ng');
|
eok = ebi('modal-ok'),
|
||||||
if (a && document.activeElement == a)
|
eng = ebi('modal-ng'),
|
||||||
|
ae = document.activeElement;
|
||||||
|
|
||||||
|
if (k == 'Space' && ae && (ae === eok || ae === eng))
|
||||||
|
k = 'Enter';
|
||||||
|
|
||||||
|
if (k == 'Enter') {
|
||||||
|
if (ae && ae == eng)
|
||||||
return ng();
|
return ng();
|
||||||
|
|
||||||
return ok();
|
return ok();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (e.code == 'Escape')
|
if (k == 'Escape')
|
||||||
return ng();
|
return ng();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -988,7 +1110,7 @@ var modal = (function () {
|
|||||||
}
|
}
|
||||||
function _confirm(html, cok, cng, fun) {
|
function _confirm(html, cok, cng, fun) {
|
||||||
cb_ok = cok;
|
cb_ok = cok;
|
||||||
cb_ng = cng === undefined ? cok : null;
|
cb_ng = cng === undefined ? cok : cng;
|
||||||
cb_up = fun;
|
cb_up = fun;
|
||||||
html += '<div id="modalb">' + ok_cancel + '</div>';
|
html += '<div id="modalb">' + ok_cancel + '</div>';
|
||||||
r.show(html);
|
r.show(html);
|
||||||
@@ -1035,6 +1157,7 @@ function repl_load() {
|
|||||||
if (!ret.length)
|
if (!ret.length)
|
||||||
ret = [
|
ret = [
|
||||||
'var v=Object.keys(localStorage); v.sort(); JSON.stringify(v)',
|
'var v=Object.keys(localStorage); v.sort(); JSON.stringify(v)',
|
||||||
|
"for (var a of QSA('#files a[id]')) a.setAttribute('download','')",
|
||||||
'console.hist.slice(-10).join("\\n")'
|
'console.hist.slice(-10).join("\\n")'
|
||||||
];
|
];
|
||||||
|
|
||||||
@@ -1104,3 +1227,57 @@ function repl(e) {
|
|||||||
}
|
}
|
||||||
if (ebi('repl'))
|
if (ebi('repl'))
|
||||||
ebi('repl').onclick = repl;
|
ebi('repl').onclick = repl;
|
||||||
|
|
||||||
|
|
||||||
|
var svg_decl = '<?xml version="1.0" encoding="UTF-8"?>\n';
|
||||||
|
|
||||||
|
|
||||||
|
var favico = (function () {
|
||||||
|
var r = {};
|
||||||
|
r.en = true;
|
||||||
|
r.tag = null;
|
||||||
|
|
||||||
|
function gx(txt) {
|
||||||
|
return (svg_decl +
|
||||||
|
'<svg version="1.1" viewBox="0 0 64 64" xmlns="http://www.w3.org/2000/svg">\n' +
|
||||||
|
(r.bg ? '<rect width="100%" height="100%" rx="16" fill="#' + r.bg + '" />\n' : '') +
|
||||||
|
'<text x="50%" y="55%" dominant-baseline="middle" text-anchor="middle"' +
|
||||||
|
' font-family="sans-serif" font-weight="bold" font-size="64px"' +
|
||||||
|
' fill="#' + r.fg + '">' + txt + '</text></svg>'
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
r.upd = function (txt, svg) {
|
||||||
|
if (!r.txt)
|
||||||
|
return;
|
||||||
|
|
||||||
|
var b64;
|
||||||
|
try {
|
||||||
|
b64 = btoa(svg ? svg_decl + svg : gx(r.txt));
|
||||||
|
}
|
||||||
|
catch (ex) {
|
||||||
|
b64 = encodeURIComponent(r.txt).replace(/%([0-9A-F]{2})/g,
|
||||||
|
function x(m, v) { return String.fromCharCode('0x' + v); });
|
||||||
|
|
||||||
|
b64 = btoa(gx(unescape(encodeURIComponent(r.txt))));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!r.tag) {
|
||||||
|
r.tag = mknod('link');
|
||||||
|
r.tag.rel = 'icon';
|
||||||
|
document.head.appendChild(r.tag);
|
||||||
|
}
|
||||||
|
r.tag.href = 'data:image/svg+xml;base64,' + b64;
|
||||||
|
};
|
||||||
|
|
||||||
|
r.init = function () {
|
||||||
|
clearTimeout(r.to);
|
||||||
|
scfg_bind(r, 'txt', 'icot', '', r.upd);
|
||||||
|
scfg_bind(r, 'fg', 'icof', 'fc5', r.upd);
|
||||||
|
scfg_bind(r, 'bg', 'icob', '222', r.upd);
|
||||||
|
r.upd();
|
||||||
|
};
|
||||||
|
|
||||||
|
r.to = setTimeout(r.init, 100);
|
||||||
|
return r;
|
||||||
|
})();
|
||||||
|
|||||||
@@ -1,11 +1,21 @@
|
|||||||
# example `.epilogue.html`
|
**NOTE:** there's more stuff (sharex config, service scripts, nginx configs, ...) in [`/contrib/`](/contrib/)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# example resource files
|
||||||
|
|
||||||
|
can be provided to copyparty to tweak things
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## example `.epilogue.html`
|
||||||
save one of these as `.epilogue.html` inside a folder to customize it:
|
save one of these as `.epilogue.html` inside a folder to customize it:
|
||||||
|
|
||||||
* [`minimal-up2k.html`](minimal-up2k.html) will [simplify the upload ui](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png)
|
* [`minimal-up2k.html`](minimal-up2k.html) will [simplify the upload ui](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# example browser-css
|
## example browser-css
|
||||||
point `--css-browser` to one of these by URL:
|
point `--css-browser` to one of these by URL:
|
||||||
|
|
||||||
* [`browser.css`](browser.css) changes the background
|
* [`browser.css`](browser.css) changes the background
|
||||||
@@ -19,4 +29,23 @@ point `--css-browser` to one of these by URL:
|
|||||||
* notes on using rclone as a fuse client/server
|
* notes on using rclone as a fuse client/server
|
||||||
|
|
||||||
## [`example.conf`](example.conf)
|
## [`example.conf`](example.conf)
|
||||||
* example config file for `-c` which never really happened
|
* example config file for `-c`
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# junk
|
||||||
|
|
||||||
|
alphabetical list of the remaining files
|
||||||
|
|
||||||
|
| what | why |
|
||||||
|
| -- | -- |
|
||||||
|
| [biquad.html](biquad.html) | bruteforce calibrator for the audio equalizer since im not that good at maths |
|
||||||
|
| [design.txt](design.txt) | initial brainstorming of the copyparty design, unmaintained, incorrect, sentimental value only |
|
||||||
|
| [hls.html](hls.html) | experimenting with hls playback using `hls.js`, works p well, almost became a thing |
|
||||||
|
| [music-analysis.sh](music-analysis.sh) | testing various bpm/key detection libraries before settling on the ones used in [`/bin/mtag/`](/bin/mtag/) |
|
||||||
|
| [notes.sh](notes.sh) | notepad, just scraps really |
|
||||||
|
| [nuitka.txt](nuitka.txt) | how to build a copyparty exe using nuitka (not maintained) |
|
||||||
|
| [pretend-youre-qnap.patch](pretend-youre-qnap.patch) | simulate a NAS which keeps returning old cached data even though you just modified the file yourself |
|
||||||
|
| [tcp-debug.sh](tcp-debug.sh) | looks like this was to debug stuck tcp connections? |
|
||||||
|
| [unirange.py](unirange.py) | uhh |
|
||||||
|
| [up2k.txt](up2k.txt) | initial ideas for how up2k should work, another unmaintained sentimental-value-only thing |
|
||||||
|
|||||||
@@ -3,6 +3,24 @@
|
|||||||
setTimeout(location.reload.bind(location), 700);
|
setTimeout(location.reload.bind(location), 700);
|
||||||
document.documentElement.scrollLeft = 0;
|
document.documentElement.scrollLeft = 0;
|
||||||
|
|
||||||
|
var cali = (function() {
|
||||||
|
var ac = new AudioContext(),
|
||||||
|
fi = ac.createBiquadFilter(),
|
||||||
|
freqs = new Float32Array(1),
|
||||||
|
mag = new Float32Array(1),
|
||||||
|
phase = new Float32Array(1);
|
||||||
|
|
||||||
|
freqs[0] = 14000;
|
||||||
|
fi.type = 'peaking';
|
||||||
|
fi.frequency.value = 18000;
|
||||||
|
fi.Q.value = 0.8;
|
||||||
|
fi.gain.value = 1;
|
||||||
|
fi.getFrequencyResponse(freqs, mag, phase);
|
||||||
|
|
||||||
|
return mag[0]; // 1.0407 good, 1.0563 bad
|
||||||
|
})(),
|
||||||
|
mp = cali < 1.05;
|
||||||
|
|
||||||
var can = document.createElement('canvas'),
|
var can = document.createElement('canvas'),
|
||||||
cc = can.getContext('2d'),
|
cc = can.getContext('2d'),
|
||||||
w = 2048,
|
w = 2048,
|
||||||
@@ -28,12 +46,12 @@ var cfg = [ // hz, q, g
|
|||||||
[1000, 0.9, 1.1],
|
[1000, 0.9, 1.1],
|
||||||
[2000, 0.9, 1.105],
|
[2000, 0.9, 1.105],
|
||||||
[4000, 0.88, 1.05],
|
[4000, 0.88, 1.05],
|
||||||
[8000 * 1.006, 0.73, 1.24],
|
[8000 * 1.006, 0.73, mp ? 1.24 : 1.2],
|
||||||
//[16000 * 1.00, 0.5, 1.75], // peak.v1
|
//[16000 * 1.00, 0.5, 1.75], // peak.v1
|
||||||
//[16000 * 1.19, 0, 1.8] // shelf.v1
|
//[16000 * 1.19, 0, 1.8] // shelf.v1
|
||||||
[16000 * 0.89, 0.7, 1.26], // peak
|
[16000 * 0.89, 0.7, mp ? 1.26 : 1.2], // peak
|
||||||
[16000 * 1.13, 0.82, 1.09], // peak
|
[16000 * 1.13, 0.82, mp ? 1.09 : 0.75], // peak
|
||||||
[16000 * 1.205, 0, 1.9] // shelf
|
[16000 * 1.205, 0, mp ? 1.9 : 1.85] // shelf
|
||||||
];
|
];
|
||||||
|
|
||||||
var freqs = new Float32Array(22000),
|
var freqs = new Float32Array(22000),
|
||||||
|
|||||||
@@ -1,11 +1,11 @@
|
|||||||
html {
|
html {
|
||||||
background: #333 url('/wp/wallhaven-mdjrqy.jpg') center / cover no-repeat fixed;
|
background: #222 url('/wp/wallhaven-mdjrqy.jpg') center / cover no-repeat fixed;
|
||||||
}
|
}
|
||||||
#files th {
|
#files th {
|
||||||
background: rgba(32, 32, 32, 0.9) !important;
|
background: rgba(32, 32, 32, 0.9) !important;
|
||||||
}
|
}
|
||||||
#ops,
|
#ops,
|
||||||
#treeul,
|
#tree,
|
||||||
#files td {
|
#files td {
|
||||||
background: rgba(32, 32, 32, 0.3) !important;
|
background: rgba(32, 32, 32, 0.3) !important;
|
||||||
}
|
}
|
||||||
@@ -17,8 +17,9 @@ html.light {
|
|||||||
html.light #files th {
|
html.light #files th {
|
||||||
background: rgba(255, 255, 255, 0.9) !important;
|
background: rgba(255, 255, 255, 0.9) !important;
|
||||||
}
|
}
|
||||||
|
html.light .logue,
|
||||||
html.light #ops,
|
html.light #ops,
|
||||||
html.light #treeul,
|
html.light #tree,
|
||||||
html.light #files td {
|
html.light #files td {
|
||||||
background: rgba(248, 248, 248, 0.8) !important;
|
background: rgba(248, 248, 248, 0.8) !important;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,3 +1,10 @@
|
|||||||
|
# append some arguments to the commandline;
|
||||||
|
# the first space in a line counts as a separator,
|
||||||
|
# any additional spaces are part of the value
|
||||||
|
-e2dsa
|
||||||
|
-e2ts
|
||||||
|
-i 127.0.0.1
|
||||||
|
|
||||||
# create users:
|
# create users:
|
||||||
# u username:password
|
# u username:password
|
||||||
u ed:123
|
u ed:123
|
||||||
@@ -24,7 +31,8 @@ rw ed
|
|||||||
r k
|
r k
|
||||||
rw ed
|
rw ed
|
||||||
|
|
||||||
# this does the same thing:
|
# this does the same thing,
|
||||||
|
# and will cause an error on startup since /priv is already taken:
|
||||||
./priv
|
./priv
|
||||||
/priv
|
/priv
|
||||||
r ed k
|
r ed k
|
||||||
@@ -47,5 +55,5 @@ c e2d
|
|||||||
c nodupe
|
c nodupe
|
||||||
|
|
||||||
# this entire config file can be replaced with these arguments:
|
# this entire config file can be replaced with these arguments:
|
||||||
# -u ed:123 -u k:k -v .::r:a,ed -v priv:priv:r,k:rw,ed -v /home/ed/Music:music:r -v /home/ed/inc:dump:w:c,e2d:c,nodupe
|
# -u ed:123 -u k:k -v .::r:a,ed -v priv:priv:r,k:rw,ed -v /home/ed/Music:music:r -v /home/ed/inc:dump:w:c,e2d,nodupe
|
||||||
# but note that the config file always wins in case of conflicts
|
# but note that the config file always wins in case of conflicts
|
||||||
|
|||||||
@@ -9,7 +9,9 @@
|
|||||||
|
|
||||||
#ops, #tree, #path, #wrap>h2:last-child, /* main tabs and navigators (tree/breadcrumbs) */
|
#ops, #tree, #path, #wrap>h2:last-child, /* main tabs and navigators (tree/breadcrumbs) */
|
||||||
|
|
||||||
#u2cleanup, #u2conf tr:first-child>td[rowspan]:not(#u2btn_cw), /* most of the config options */
|
#u2conf tr:first-child>td[rowspan]:not(#u2btn_cw), /* most of the config options */
|
||||||
|
|
||||||
|
#srch_dz, #srch_zd, /* the filesearch dropzone */
|
||||||
|
|
||||||
#u2cards, #u2etaw /* and the upload progress tabs */
|
#u2cards, #u2etaw /* and the upload progress tabs */
|
||||||
|
|
||||||
@@ -25,7 +27,7 @@
|
|||||||
#u2conf #u2btn, #u2btn {padding:1.5em 0}
|
#u2conf #u2btn, #u2btn {padding:1.5em 0}
|
||||||
|
|
||||||
/* adjust the button area a bit */
|
/* adjust the button area a bit */
|
||||||
#u2conf.has_btn {width: 35em !important; margin: 5em auto}
|
#u2conf.w, #u2conf.ww {width: 35em !important; margin: 5em auto}
|
||||||
|
|
||||||
/* a */
|
/* a */
|
||||||
#op_up2k {min-height: 0}
|
#op_up2k {min-height: 0}
|
||||||
|
|||||||
@@ -1,26 +0,0 @@
|
|||||||
|
|
||||||
method = self.s.recv(4)
|
|
||||||
self.s.unrecv(method)
|
|
||||||
print("xxx unrecv'd [{}]".format(method))
|
|
||||||
|
|
||||||
# jython used to do this, they stopped since it's broken
|
|
||||||
# but reimplementing sendall is out of scope for now
|
|
||||||
if not getattr(self.s.s, "sendall", None):
|
|
||||||
self.s.s.sendall = self.s.s.send
|
|
||||||
|
|
||||||
# TODO this is also pretty bad
|
|
||||||
have = dir(self.s)
|
|
||||||
for k in self.s.s.__dict__:
|
|
||||||
if k not in have and not k.startswith("__"):
|
|
||||||
if k == "recv":
|
|
||||||
raise Exception("wait what")
|
|
||||||
|
|
||||||
self.s.__dict__[k] = self.s.s.__dict__[k]
|
|
||||||
|
|
||||||
have = dir(self.s)
|
|
||||||
for k in dir(self.s.s):
|
|
||||||
if k not in have and not k.startswith("__"):
|
|
||||||
if k == "recv":
|
|
||||||
raise Exception("wait what")
|
|
||||||
|
|
||||||
setattr(self.s, k, getattr(self.s.s, k))
|
|
||||||
@@ -38,12 +38,19 @@ para() { for s in 1 2 3 4 5 6 7 8 12 16 24 32 48 64; do echo $s; for r in {1..4}
|
|||||||
avg() { awk 'function pr(ncsz) {if (nsmp>0) {printf "%3s %s\n", csz, sum/nsmp} csz=$1;sum=0;nsmp=0} {sub(/\r$/,"")} /^[0-9]+$/ {pr($1);next} / MiB/ {sub(/ MiB.*/,"");sub(/.* /,"");sum+=$1;nsmp++} END {pr(0)}' "$1"; }
|
avg() { awk 'function pr(ncsz) {if (nsmp>0) {printf "%3s %s\n", csz, sum/nsmp} csz=$1;sum=0;nsmp=0} {sub(/\r$/,"")} /^[0-9]+$/ {pr($1);next} / MiB/ {sub(/ MiB.*/,"");sub(/.* /,"");sum+=$1;nsmp++} END {pr(0)}' "$1"; }
|
||||||
|
|
||||||
|
|
||||||
|
##
|
||||||
|
## time between first and last upload
|
||||||
|
|
||||||
|
python3 -um copyparty -nw -v srv::rw -i 127.0.0.1 2>&1 | tee log
|
||||||
|
cat log | awk '!/"purl"/{next} {s=$1;sub(/[^m]+m/,"");gsub(/:/," ");t=60*(60*$1+$2)+$3} !a{a=t;sa=s} {b=t;sb=s} END {print b-a,sa,sb}'
|
||||||
|
|
||||||
|
|
||||||
##
|
##
|
||||||
## bad filenames
|
## bad filenames
|
||||||
|
|
||||||
dirs=("$HOME/vfs/ほげ" "$HOME/vfs/ほげ/ぴよ" "$HOME/vfs/$(printf \\xed\\x91)" "$HOME/vfs/$(printf \\xed\\x91/\\xed\\x92)")
|
dirs=("./ほげ" "./ほげ/ぴよ" "./$(printf \\xed\\x91)" "./$(printf \\xed\\x91/\\xed\\x92)" './qw,er;ty%20as df?gh+jkl%zxc&vbn <qwe>"rty'"'"'uio&asd fgh')
|
||||||
mkdir -p "${dirs[@]}"
|
mkdir -p "${dirs[@]}"
|
||||||
for dir in "${dirs[@]}"; do for fn in ふが "$(printf \\xed\\x93)" 'qwe,rty;asd fgh+jkl%zxc&vbn <qwe>"rty'"'"'uio&asd fgh'; do echo "$dir" > "$dir/$fn.html"; done; done
|
for dir in "${dirs[@]}"; do for fn in ふが "$(printf \\xed\\x93)" 'qw,er;ty%20as df?gh+jkl%zxc&vbn <qwe>"rty'"'"'uio&asd fgh'; do echo "$dir" > "$dir/$fn.html"; done; done
|
||||||
# qw er+ty%20ui%%20op<as>df&gh&jk#zx'cv"bn`m=qw*er^ty?ui@op,as.df-gh_jk
|
# qw er+ty%20ui%%20op<as>df&gh&jk#zx'cv"bn`m=qw*er^ty?ui@op,as.df-gh_jk
|
||||||
|
|
||||||
##
|
##
|
||||||
@@ -79,10 +86,8 @@ command -v gdate && date() { gdate "$@"; }; while true; do t=$(date +%s.%N); (ti
|
|||||||
# get all up2k search result URLs
|
# get all up2k search result URLs
|
||||||
var t=[]; var b=document.location.href.split('#')[0].slice(0, -1); document.querySelectorAll('#u2tab .prog a').forEach((x) => {t.push(b+encodeURI(x.getAttribute("href")))}); console.log(t.join("\n"));
|
var t=[]; var b=document.location.href.split('#')[0].slice(0, -1); document.querySelectorAll('#u2tab .prog a').forEach((x) => {t.push(b+encodeURI(x.getAttribute("href")))}); console.log(t.join("\n"));
|
||||||
|
|
||||||
# rename all selected songs to <leading-track-number> + <Title> + <extension>
|
# debug md-editor line tracking
|
||||||
var sel=msel.getsel(), ci=find_file_col('Title')[0], re=[]; for (var a=0; a<sel.length; a++) { var url=sel[a].vp, tag=ebi(sel[a].id).closest('tr').querySelectorAll('td')[ci].textContent, name=uricom_dec(vsplit(url)[1])[0], m=/^([0-9]+[\. -]+)?.*(\.[^\.]+$)/.exec(name), name2=(m[1]||'')+tag+m[2], url2=vsplit(url)[0]+uricom_enc(name2,false); if (url!=url2) re.push([url, url2]); }
|
var s=mknod('style');s.innerHTML='*[data-ln]:before {content:attr(data-ln)!important;color:#f0c;background:#000;position:absolute;left:-1.5em;font-size:1rem}';document.head.appendChild(s);
|
||||||
console.log(JSON.stringify(re, null, ' '));
|
|
||||||
function f() { if (!re.length) return treectl.goto(get_evpath()); var [u1,u2] = re.shift(); fetch(u1+'?move='+u2).then((rsp) => {if (rsp.ok) f(); }); }; f();
|
|
||||||
|
|
||||||
##
|
##
|
||||||
## bash oneliners
|
## bash oneliners
|
||||||
@@ -164,7 +169,7 @@ brew install python@2
|
|||||||
pip install virtualenv
|
pip install virtualenv
|
||||||
|
|
||||||
# readme toc
|
# readme toc
|
||||||
cat README.md | awk 'function pr() { if (!h) {return}; if (/^ *[*!#]/||!s) {printf "%s\n",h;h=0;return}; if (/.../) {printf "%s - %s\n",h,$0;h=0}; }; /^#/{s=1;pr()} /^#* *(file indexing|install on android|dev env setup|just the sfx|complete release|optional gpl stuff)|`$/{s=0} /^#/{lv=length($1);sub(/[^ ]+ /,"");bab=$0;gsub(/ /,"-",bab); h=sprintf("%" ((lv-1)*4+1) "s [%s](#%s)", "*",$0,bab);next} !h{next} {sub(/ .*/,"");sub(/[:,]$/,"")} {pr()}' > toc; grep -E '^## readme toc' -B1000 -A2 <README.md >p1; grep -E '^## quickstart' -B2 -A999999 <README.md >p2; (cat p1; grep quickstart -A1000 <toc; cat p2) >README.md
|
cat README.md | awk 'function pr() { if (!h) {return}; if (/^ *[*!#|]/||!s) {printf "%s\n",h;h=0;return}; if (/.../) {printf "%s - %s\n",h,$0;h=0}; }; /^#/{s=1;pr()} /^#* *(file indexing|install on android|dev env setup|just the sfx|complete release|optional gpl stuff)|`$/{s=0} /^#/{lv=length($1);sub(/[^ ]+ /,"");bab=$0;gsub(/ /,"-",bab); h=sprintf("%" ((lv-1)*4+1) "s [%s](#%s)", "*",$0,bab);next} !h{next} {sub(/ .*/,"");sub(/[:,]$/,"")} {pr()}' > toc; grep -E '^## readme toc' -B1000 -A2 <README.md >p1; grep -E '^## quickstart' -B2 -A999999 <README.md >p2; (cat p1; grep quickstart -A1000 <toc; cat p2) >README.md; rm p1 p2 toc
|
||||||
|
|
||||||
# fix firefox phantom breakpoints,
|
# fix firefox phantom breakpoints,
|
||||||
# suggestions from bugtracker, doesnt work (debugger is not attachable)
|
# suggestions from bugtracker, doesnt work (debugger is not attachable)
|
||||||
|
|||||||
@@ -10,14 +10,41 @@ set -e
|
|||||||
# (and those are usually linux so bash is good inaff)
|
# (and those are usually linux so bash is good inaff)
|
||||||
# (but that said this even has macos support)
|
# (but that said this even has macos support)
|
||||||
#
|
#
|
||||||
# bundle will look like:
|
# output summary (filesizes and contents):
|
||||||
# -rwxr-xr-x 0 ed ed 183808 Nov 19 00:43 copyparty
|
#
|
||||||
# -rw-r--r-- 0 ed ed 491318 Nov 19 00:40 copyparty-extras/copyparty-0.5.4.tar.gz
|
# 535672 copyparty-extras/sfx-full/copyparty-sfx.sh
|
||||||
# -rwxr-xr-x 0 ed ed 30254 Nov 17 23:58 copyparty-extras/copyparty-fuse.py
|
# 550760 copyparty-extras/sfx-full/copyparty-sfx.py
|
||||||
# -rwxr-xr-x 0 ed ed 481403 Nov 19 00:40 copyparty-extras/sfx-full/copyparty-sfx.sh
|
# `- original unmodified sfx from github
|
||||||
# -rwxr-xr-x 0 ed ed 506043 Nov 19 00:40 copyparty-extras/sfx-full/copyparty-sfx.py
|
#
|
||||||
# -rwxr-xr-x 0 ed ed 167699 Nov 19 00:43 copyparty-extras/sfx-lite/copyparty-sfx.sh
|
# 572923 copyparty-extras/sfx-full/copyparty-sfx-gz.py
|
||||||
# -rwxr-xr-x 0 ed ed 183808 Nov 19 00:43 copyparty-extras/sfx-lite/copyparty-sfx.py
|
# `- unmodified but recompressed from bzip2 to gzip
|
||||||
|
#
|
||||||
|
# 341792 copyparty-extras/sfx-ent/copyparty-sfx.sh
|
||||||
|
# 353975 copyparty-extras/sfx-ent/copyparty-sfx.py
|
||||||
|
# 376934 copyparty-extras/sfx-ent/copyparty-sfx-gz.py
|
||||||
|
# `- removed iOS ogg/opus/vorbis audio decoder,
|
||||||
|
# removed the audio tray mouse cursor,
|
||||||
|
# "enterprise edition"
|
||||||
|
#
|
||||||
|
# 259288 copyparty-extras/sfx-lite/copyparty-sfx.sh
|
||||||
|
# 270004 copyparty-extras/sfx-lite/copyparty-sfx.py
|
||||||
|
# 293159 copyparty-extras/sfx-lite/copyparty-sfx-gz.py
|
||||||
|
# `- also removed the codemirror markdown editor
|
||||||
|
# and the text-viewer syntax hilighting,
|
||||||
|
# only essential features remaining
|
||||||
|
#
|
||||||
|
# 646297 copyparty-extras/copyparty-1.0.14.tar.gz
|
||||||
|
# 4823 copyparty-extras/copyparty-repack.sh
|
||||||
|
# `- source files from github
|
||||||
|
#
|
||||||
|
# 23663 copyparty-extras/up2k.py
|
||||||
|
# `- standalone utility to upload or search for files
|
||||||
|
#
|
||||||
|
# 32280 copyparty-extras/copyparty-fuse.py
|
||||||
|
# `- standalone to mount a URL as a local read-only filesystem
|
||||||
|
#
|
||||||
|
# 270004 copyparty
|
||||||
|
# `- minimal binary, same as sfx-lite/copyparty-sfx.py
|
||||||
|
|
||||||
|
|
||||||
command -v gnutar && tar() { gnutar "$@"; }
|
command -v gnutar && tar() { gnutar "$@"; }
|
||||||
@@ -54,6 +81,7 @@ cache="$od/.copyparty-repack.cache"
|
|||||||
# fallback to awk (sorry)
|
# fallback to awk (sorry)
|
||||||
awk -F\" '/"browser_download_url".*(\.tar\.gz|-sfx\.)/ {print$4}'
|
awk -F\" '/"browser_download_url".*(\.tar\.gz|-sfx\.)/ {print$4}'
|
||||||
) |
|
) |
|
||||||
|
grep -E '(sfx\.(sh|py)|tar\.gz)$' |
|
||||||
tee /dev/stderr |
|
tee /dev/stderr |
|
||||||
tr -d '\r' | tr '\n' '\0' |
|
tr -d '\r' | tr '\n' '\0' |
|
||||||
xargs -0 bash -c 'dl_files "$@"' _
|
xargs -0 bash -c 'dl_files "$@"' _
|
||||||
@@ -64,7 +92,7 @@ cache="$od/.copyparty-repack.cache"
|
|||||||
|
|
||||||
# move src into copyparty-extras/,
|
# move src into copyparty-extras/,
|
||||||
# move sfx into copyparty-extras/sfx-full/
|
# move sfx into copyparty-extras/sfx-full/
|
||||||
mkdir -p copyparty-extras/sfx-{full,lite}
|
mkdir -p copyparty-extras/sfx-{full,ent,lite}
|
||||||
mv copyparty-sfx.* copyparty-extras/sfx-full/
|
mv copyparty-sfx.* copyparty-extras/sfx-full/
|
||||||
mv copyparty-*.tar.gz copyparty-extras/
|
mv copyparty-*.tar.gz copyparty-extras/
|
||||||
|
|
||||||
@@ -112,14 +140,17 @@ repack() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
repack sfx-full "re gz no-sh"
|
repack sfx-full "re gz no-sh"
|
||||||
repack sfx-lite "re no-ogv no-cm"
|
repack sfx-ent "re no-dd no-ogv"
|
||||||
repack sfx-lite "re no-ogv no-cm gz no-sh"
|
repack sfx-ent "re no-dd no-ogv gz no-sh"
|
||||||
|
repack sfx-lite "re no-dd no-ogv no-cm no-hl"
|
||||||
|
repack sfx-lite "re no-dd no-ogv no-cm no-hl gz no-sh"
|
||||||
|
|
||||||
|
|
||||||
# move fuse client into copyparty-extras/,
|
# move fuse and up2k clients into copyparty-extras/,
|
||||||
# copy lite-sfx.py to ./copyparty,
|
# copy lite-sfx.py to ./copyparty,
|
||||||
# delete extracted source code
|
# delete extracted source code
|
||||||
( cd copyparty-extras/
|
( cd copyparty-extras/
|
||||||
|
mv copyparty-*/bin/up2k.py .
|
||||||
mv copyparty-*/bin/copyparty-fuse.py .
|
mv copyparty-*/bin/copyparty-fuse.py .
|
||||||
cp -pv sfx-lite/copyparty-sfx.py ../copyparty
|
cp -pv sfx-lite/copyparty-sfx.py ../copyparty
|
||||||
rm -rf copyparty-{0..9}*.*.*{0..9}
|
rm -rf copyparty-{0..9}*.*.*{0..9}
|
||||||
|
|||||||
@@ -1,11 +1,11 @@
|
|||||||
FROM alpine:3.13
|
FROM alpine:3.14
|
||||||
WORKDIR /z
|
WORKDIR /z
|
||||||
ENV ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \
|
ENV ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \
|
||||||
ver_hashwasm=4.7.0 \
|
ver_hashwasm=4.9.0 \
|
||||||
ver_marked=1.1.0 \
|
ver_marked=3.0.4 \
|
||||||
ver_ogvjs=1.8.4 \
|
ver_ogvjs=1.8.4 \
|
||||||
ver_mde=2.14.0 \
|
ver_mde=2.15.0 \
|
||||||
ver_codemirror=5.59.3 \
|
ver_codemirror=5.62.3 \
|
||||||
ver_fontawesome=5.13.0 \
|
ver_fontawesome=5.13.0 \
|
||||||
ver_zopfli=1.0.3
|
ver_zopfli=1.0.3
|
||||||
|
|
||||||
@@ -45,6 +45,12 @@ RUN mkdir -p /z/dist/no-pk \
|
|||||||
&& tar -xf zopfli.tgz
|
&& tar -xf zopfli.tgz
|
||||||
|
|
||||||
|
|
||||||
|
# todo
|
||||||
|
# https://cdn.jsdelivr.net/gh/highlightjs/cdn-release/build/highlight.min.js
|
||||||
|
# https://cdn.jsdelivr.net/gh/highlightjs/cdn-release/build/styles/default.min.css
|
||||||
|
# https://prismjs.com/download.html#themes=prism-funky&languages=markup+css+clike+javascript+autohotkey+bash+basic+batch+c+csharp+cpp+cmake+diff+docker+go+ini+java+json+kotlin+latex+less+lisp+lua+makefile+objectivec+perl+powershell+python+r+jsx+ruby+rust+sass+scss+sql+swift+systemd+toml+typescript+vbnet+verilog+vhdl+yaml&plugins=line-highlight+line-numbers+autolinker
|
||||||
|
|
||||||
|
|
||||||
# build fonttools (which needs zopfli)
|
# build fonttools (which needs zopfli)
|
||||||
RUN tar -xf zopfli.tgz \
|
RUN tar -xf zopfli.tgz \
|
||||||
&& cd zopfli* \
|
&& cd zopfli* \
|
||||||
@@ -113,9 +119,10 @@ RUN cd CodeMirror-$ver_codemirror \
|
|||||||
COPY easymde.patch /z/
|
COPY easymde.patch /z/
|
||||||
RUN cd easy-markdown-editor-$ver_mde \
|
RUN cd easy-markdown-editor-$ver_mde \
|
||||||
&& patch -p1 < /z/easymde.patch \
|
&& patch -p1 < /z/easymde.patch \
|
||||||
&& sed -ri 's`https://registry.npmjs.org/marked/-/marked-0.8.2.tgz`file:/z/nodepkgs/marked`' package-lock.json \
|
&& sed -ri 's`https://registry.npmjs.org/marked/-/marked-[0-9\.]+.tgz`file:/z/nodepkgs/marked`' package-lock.json \
|
||||||
&& sed -ri 's`("marked": ")[^"]+`\1file:/z/nodepkgs/marked`' ./package.json \
|
&& sed -ri 's`("marked": ")[^"]+`\1file:/z/nodepkgs/marked`' ./package.json \
|
||||||
&& sed -ri 's`("codemirror": ")[^"]+`\1file:/z/nodepkgs/codemirror`' ./package.json \
|
&& sed -ri 's`("codemirror": ")[^"]+`\1file:/z/nodepkgs/codemirror`' ./package.json \
|
||||||
|
&& sed -ri 's`^var marked = require\(.marked/lib/marked.\);$`var marked = window.marked;`' src/js/easymde.js \
|
||||||
&& npm install
|
&& npm install
|
||||||
|
|
||||||
COPY easymde-ln.patch /z/
|
COPY easymde-ln.patch /z/
|
||||||
|
|||||||
@@ -1,15 +1,15 @@
|
|||||||
diff --git a/src/Lexer.js b/src/Lexer.js
|
diff --git a/src/Lexer.js b/src/Lexer.js
|
||||||
adds linetracking to marked.js v1.0.0 +git;
|
adds linetracking to marked.js v3.0.4;
|
||||||
add data-ln="%d" to most tags, %d is the source markdown line
|
add data-ln="%d" to most tags, %d is the source markdown line
|
||||||
--- a/src/Lexer.js
|
--- a/src/Lexer.js
|
||||||
+++ b/src/Lexer.js
|
+++ b/src/Lexer.js
|
||||||
@@ -49,4 +49,5 @@ function mangle(text) {
|
@@ -50,4 +50,5 @@ function mangle(text) {
|
||||||
module.exports = class Lexer {
|
module.exports = class Lexer {
|
||||||
constructor(options) {
|
constructor(options) {
|
||||||
+ this.ln = 1; // like most editors, start couting from 1
|
+ this.ln = 1; // like most editors, start couting from 1
|
||||||
this.tokens = [];
|
this.tokens = [];
|
||||||
this.tokens.links = Object.create(null);
|
this.tokens.links = Object.create(null);
|
||||||
@@ -108,4 +109,15 @@ module.exports = class Lexer {
|
@@ -127,4 +128,15 @@ module.exports = class Lexer {
|
||||||
}
|
}
|
||||||
|
|
||||||
+ set_ln(token, ln = this.ln) {
|
+ set_ln(token, ln = this.ln) {
|
||||||
@@ -25,122 +25,123 @@ add data-ln="%d" to most tags, %d is the source markdown line
|
|||||||
+
|
+
|
||||||
/**
|
/**
|
||||||
* Lexing
|
* Lexing
|
||||||
@@ -113,10 +125,15 @@ module.exports = class Lexer {
|
@@ -134,7 +146,11 @@ module.exports = class Lexer {
|
||||||
blockTokens(src, tokens = [], top = true) {
|
src = src.replace(/^ +$/gm, '');
|
||||||
src = src.replace(/^ +$/gm, '');
|
}
|
||||||
- let token, i, l, lastToken;
|
- let token, lastToken, cutSrc, lastParagraphClipped;
|
||||||
+ let token, i, l, lastToken, ln;
|
+ let token, lastToken, cutSrc, lastParagraphClipped, ln;
|
||||||
|
|
||||||
while (src) {
|
while (src) {
|
||||||
+ // this.ln will be bumped by recursive calls into this func;
|
+ // this.ln will be bumped by recursive calls into this func;
|
||||||
+ // reset the count and rely on the outermost token's raw only
|
+ // reset the count and rely on the outermost token's raw only
|
||||||
+ ln = this.ln;
|
+ ln = this.ln;
|
||||||
+
|
+
|
||||||
// newline
|
if (this.options.extensions
|
||||||
|
&& this.options.extensions.block
|
||||||
|
@@ -142,4 +158,5 @@ module.exports = class Lexer {
|
||||||
|
if (token = extTokenizer.call({ lexer: this }, src, tokens)) {
|
||||||
|
src = src.substring(token.raw.length);
|
||||||
|
+ this.set_ln(token, ln);
|
||||||
|
tokens.push(token);
|
||||||
|
return true;
|
||||||
|
@@ -153,4 +170,5 @@ module.exports = class Lexer {
|
||||||
if (token = this.tokenizer.space(src)) {
|
if (token = this.tokenizer.space(src)) {
|
||||||
src = src.substring(token.raw.length);
|
src = src.substring(token.raw.length);
|
||||||
+ this.set_ln(token); // is \n if not type
|
+ this.set_ln(token, ln); // is \n if not type
|
||||||
if (token.type) {
|
if (token.type) {
|
||||||
tokens.push(token);
|
tokens.push(token);
|
||||||
@@ -128,4 +145,5 @@ module.exports = class Lexer {
|
@@ -162,4 +180,5 @@ module.exports = class Lexer {
|
||||||
if (token = this.tokenizer.code(src, tokens)) {
|
if (token = this.tokenizer.code(src)) {
|
||||||
src = src.substring(token.raw.length);
|
src = src.substring(token.raw.length);
|
||||||
+ this.set_ln(token);
|
+ this.set_ln(token, ln);
|
||||||
if (token.type) {
|
lastToken = tokens[tokens.length - 1];
|
||||||
tokens.push(token);
|
// An indented code block cannot interrupt a paragraph.
|
||||||
@@ -141,4 +159,5 @@ module.exports = class Lexer {
|
@@ -177,4 +196,5 @@ module.exports = class Lexer {
|
||||||
if (token = this.tokenizer.fences(src)) {
|
if (token = this.tokenizer.fences(src)) {
|
||||||
src = src.substring(token.raw.length);
|
src = src.substring(token.raw.length);
|
||||||
+ this.set_ln(token);
|
+ this.set_ln(token, ln);
|
||||||
tokens.push(token);
|
tokens.push(token);
|
||||||
continue;
|
continue;
|
||||||
@@ -148,4 +167,5 @@ module.exports = class Lexer {
|
@@ -184,4 +204,5 @@ module.exports = class Lexer {
|
||||||
if (token = this.tokenizer.heading(src)) {
|
if (token = this.tokenizer.heading(src)) {
|
||||||
src = src.substring(token.raw.length);
|
src = src.substring(token.raw.length);
|
||||||
+ this.set_ln(token);
|
+ this.set_ln(token, ln);
|
||||||
tokens.push(token);
|
tokens.push(token);
|
||||||
continue;
|
continue;
|
||||||
@@ -155,4 +175,5 @@ module.exports = class Lexer {
|
@@ -191,4 +212,5 @@ module.exports = class Lexer {
|
||||||
if (token = this.tokenizer.nptable(src)) {
|
|
||||||
src = src.substring(token.raw.length);
|
|
||||||
+ this.set_ln(token);
|
|
||||||
tokens.push(token);
|
|
||||||
continue;
|
|
||||||
@@ -162,4 +183,5 @@ module.exports = class Lexer {
|
|
||||||
if (token = this.tokenizer.hr(src)) {
|
if (token = this.tokenizer.hr(src)) {
|
||||||
src = src.substring(token.raw.length);
|
src = src.substring(token.raw.length);
|
||||||
+ this.set_ln(token);
|
+ this.set_ln(token, ln);
|
||||||
tokens.push(token);
|
tokens.push(token);
|
||||||
continue;
|
continue;
|
||||||
@@ -170,4 +192,7 @@ module.exports = class Lexer {
|
@@ -198,4 +220,5 @@ module.exports = class Lexer {
|
||||||
|
if (token = this.tokenizer.blockquote(src)) {
|
||||||
src = src.substring(token.raw.length);
|
src = src.substring(token.raw.length);
|
||||||
token.tokens = this.blockTokens(token.text, [], top);
|
|
||||||
+ // recursive call to blockTokens probably bumped this.ln,
|
|
||||||
+ // token.raw is more reliable so reset this.ln and use that
|
|
||||||
+ this.set_ln(token, ln);
|
+ this.set_ln(token, ln);
|
||||||
tokens.push(token);
|
tokens.push(token);
|
||||||
continue;
|
continue;
|
||||||
@@ -180,5 +205,9 @@ module.exports = class Lexer {
|
@@ -205,4 +228,5 @@ module.exports = class Lexer {
|
||||||
for (i = 0; i < l; i++) {
|
if (token = this.tokenizer.list(src)) {
|
||||||
token.items[i].tokens = this.blockTokens(token.items[i].text, [], false);
|
src = src.substring(token.raw.length);
|
||||||
+ // list entries don't bump the linecounter, so let's
|
|
||||||
+ this.ln++;
|
|
||||||
}
|
|
||||||
+ // then reset like blockquote
|
|
||||||
+ this.set_ln(token, ln);
|
+ this.set_ln(token, ln);
|
||||||
tokens.push(token);
|
tokens.push(token);
|
||||||
continue;
|
continue;
|
||||||
@@ -188,4 +217,5 @@ module.exports = class Lexer {
|
@@ -212,4 +236,5 @@ module.exports = class Lexer {
|
||||||
if (token = this.tokenizer.html(src)) {
|
if (token = this.tokenizer.html(src)) {
|
||||||
src = src.substring(token.raw.length);
|
src = src.substring(token.raw.length);
|
||||||
+ this.set_ln(token);
|
+ this.set_ln(token, ln);
|
||||||
tokens.push(token);
|
tokens.push(token);
|
||||||
continue;
|
continue;
|
||||||
@@ -195,4 +225,5 @@ module.exports = class Lexer {
|
@@ -219,4 +244,5 @@ module.exports = class Lexer {
|
||||||
if (top && (token = this.tokenizer.def(src))) {
|
if (token = this.tokenizer.def(src)) {
|
||||||
src = src.substring(token.raw.length);
|
src = src.substring(token.raw.length);
|
||||||
+ this.set_ln(token);
|
+ this.set_ln(token, ln);
|
||||||
if (!this.tokens.links[token.tag]) {
|
lastToken = tokens[tokens.length - 1];
|
||||||
this.tokens.links[token.tag] = {
|
if (lastToken && (lastToken.type === 'paragraph' || lastToken.type === 'text')) {
|
||||||
@@ -207,4 +238,5 @@ module.exports = class Lexer {
|
@@ -236,4 +262,5 @@ module.exports = class Lexer {
|
||||||
if (token = this.tokenizer.table(src)) {
|
if (token = this.tokenizer.table(src)) {
|
||||||
src = src.substring(token.raw.length);
|
src = src.substring(token.raw.length);
|
||||||
+ this.set_ln(token);
|
+ this.set_ln(token, ln);
|
||||||
tokens.push(token);
|
tokens.push(token);
|
||||||
continue;
|
continue;
|
||||||
@@ -214,4 +246,5 @@ module.exports = class Lexer {
|
@@ -243,4 +270,5 @@ module.exports = class Lexer {
|
||||||
if (token = this.tokenizer.lheading(src)) {
|
if (token = this.tokenizer.lheading(src)) {
|
||||||
src = src.substring(token.raw.length);
|
src = src.substring(token.raw.length);
|
||||||
+ this.set_ln(token);
|
+ this.set_ln(token, ln);
|
||||||
tokens.push(token);
|
tokens.push(token);
|
||||||
continue;
|
continue;
|
||||||
@@ -221,4 +254,5 @@ module.exports = class Lexer {
|
@@ -263,4 +291,5 @@ module.exports = class Lexer {
|
||||||
if (top && (token = this.tokenizer.paragraph(src))) {
|
}
|
||||||
|
if (this.state.top && (token = this.tokenizer.paragraph(cutSrc))) {
|
||||||
|
+ this.set_ln(token, ln);
|
||||||
|
lastToken = tokens[tokens.length - 1];
|
||||||
|
if (lastParagraphClipped && lastToken.type === 'paragraph') {
|
||||||
|
@@ -280,4 +309,6 @@ module.exports = class Lexer {
|
||||||
|
if (token = this.tokenizer.text(src)) {
|
||||||
src = src.substring(token.raw.length);
|
src = src.substring(token.raw.length);
|
||||||
+ this.set_ln(token);
|
+ this.set_ln(token, ln);
|
||||||
tokens.push(token);
|
+ this.ln++;
|
||||||
continue;
|
lastToken = tokens[tokens.length - 1];
|
||||||
@@ -228,4 +262,5 @@ module.exports = class Lexer {
|
if (lastToken && lastToken.type === 'text') {
|
||||||
if (token = this.tokenizer.text(src, tokens)) {
|
@@ -355,4 +386,5 @@ module.exports = class Lexer {
|
||||||
src = src.substring(token.raw.length);
|
if (token = extTokenizer.call({ lexer: this }, src, tokens)) {
|
||||||
+ this.set_ln(token);
|
src = src.substring(token.raw.length);
|
||||||
if (token.type) {
|
+ this.ln = token.ln || this.ln;
|
||||||
tokens.push(token);
|
tokens.push(token);
|
||||||
@@ -263,4 +298,7 @@ module.exports = class Lexer {
|
return true;
|
||||||
for (i = 0; i < l; i++) {
|
@@ -420,4 +452,6 @@ module.exports = class Lexer {
|
||||||
token = tokens[i];
|
|
||||||
+ // this.ln is at EOF when inline() is invoked;
|
|
||||||
+ // all this affects <br> tags only so no biggie if it breaks
|
|
||||||
+ this.ln = token.ln || this.ln;
|
|
||||||
switch (token.type) {
|
|
||||||
case 'paragraph':
|
|
||||||
@@ -386,4 +424,6 @@ module.exports = class Lexer {
|
|
||||||
if (token = this.tokenizer.br(src)) {
|
if (token = this.tokenizer.br(src)) {
|
||||||
src = src.substring(token.raw.length);
|
src = src.substring(token.raw.length);
|
||||||
+ // no need to reset (no more blockTokens anyways)
|
+ // no need to reset (no more blockTokens anyways)
|
||||||
+ token.ln = this.ln++;
|
+ token.ln = this.ln++;
|
||||||
tokens.push(token);
|
tokens.push(token);
|
||||||
continue;
|
continue;
|
||||||
|
@@ -462,4 +496,5 @@ module.exports = class Lexer {
|
||||||
|
if (token = this.tokenizer.inlineText(cutSrc, smartypants)) {
|
||||||
|
src = src.substring(token.raw.length);
|
||||||
|
+ this.ln = token.ln || this.ln;
|
||||||
|
if (token.raw.slice(-1) !== '_') { // Track prevChar before string of ____ started
|
||||||
|
prevChar = token.raw.slice(-1);
|
||||||
diff --git a/src/Parser.js b/src/Parser.js
|
diff --git a/src/Parser.js b/src/Parser.js
|
||||||
--- a/src/Parser.js
|
--- a/src/Parser.js
|
||||||
+++ b/src/Parser.js
|
+++ b/src/Parser.js
|
||||||
@@ -150,17 +151,16 @@ diff --git a/src/Parser.js b/src/Parser.js
|
|||||||
+ this.ln = 0; // error indicator; should always be set >=1 from tokens
|
+ this.ln = 0; // error indicator; should always be set >=1 from tokens
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -55,4 +56,9 @@ module.exports = class Parser {
|
@@ -64,4 +65,8 @@ module.exports = class Parser {
|
||||||
for (i = 0; i < l; i++) {
|
for (i = 0; i < l; i++) {
|
||||||
token = tokens[i];
|
token = tokens[i];
|
||||||
+ // take line-numbers from tokens whenever possible
|
+ // take line-numbers from tokens whenever possible
|
||||||
+ // and update the renderer's html attribute with the new value
|
+ // and update the renderer's html attribute with the new value
|
||||||
+ this.ln = token.ln || this.ln;
|
+ this.ln = token.ln || this.ln;
|
||||||
+ this.renderer.tag_ln(this.ln);
|
+ this.renderer.tag_ln(this.ln);
|
||||||
+
|
|
||||||
switch (token.type) {
|
// Run any renderer extensions
|
||||||
case 'space': {
|
@@ -124,7 +129,10 @@ module.exports = class Parser {
|
||||||
@@ -105,7 +111,10 @@ module.exports = class Parser {
|
|
||||||
}
|
}
|
||||||
|
|
||||||
- body += this.renderer.tablerow(cell);
|
- body += this.renderer.tablerow(cell);
|
||||||
@@ -173,7 +173,7 @@ diff --git a/src/Parser.js b/src/Parser.js
|
|||||||
+ out += this.renderer.tag_ln(token.ln).table(header, body);
|
+ out += this.renderer.tag_ln(token.ln).table(header, body);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@@ -148,8 +157,12 @@ module.exports = class Parser {
|
@@ -167,8 +175,12 @@ module.exports = class Parser {
|
||||||
|
|
||||||
itemBody += this.parse(item.tokens, loose);
|
itemBody += this.parse(item.tokens, loose);
|
||||||
- body += this.renderer.listitem(itemBody, task, checked);
|
- body += this.renderer.listitem(itemBody, task, checked);
|
||||||
@@ -188,7 +188,7 @@ diff --git a/src/Parser.js b/src/Parser.js
|
|||||||
+ out += this.renderer.tag_ln(token.ln).list(body, ordered, start);
|
+ out += this.renderer.tag_ln(token.ln).list(body, ordered, start);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@@ -160,5 +173,6 @@ module.exports = class Parser {
|
@@ -179,5 +191,6 @@ module.exports = class Parser {
|
||||||
}
|
}
|
||||||
case 'paragraph': {
|
case 'paragraph': {
|
||||||
- out += this.renderer.paragraph(this.parseInline(token.tokens));
|
- out += this.renderer.paragraph(this.parseInline(token.tokens));
|
||||||
@@ -196,22 +196,14 @@ diff --git a/src/Parser.js b/src/Parser.js
|
|||||||
+ out += this.renderer.tag_ln(token.ln).paragraph(t);
|
+ out += this.renderer.tag_ln(token.ln).paragraph(t);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@@ -199,4 +213,6 @@ module.exports = class Parser {
|
@@ -221,4 +234,7 @@ module.exports = class Parser {
|
||||||
for (i = 0; i < l; i++) {
|
|
||||||
token = tokens[i];
|
token = tokens[i];
|
||||||
|
|
||||||
+ // another thing that only affects <br/> and other inlines
|
+ // another thing that only affects <br/> and other inlines
|
||||||
+ this.ln = token.ln || this.ln;
|
+ this.ln = token.ln || this.ln;
|
||||||
switch (token.type) {
|
+
|
||||||
case 'escape': {
|
// Run any renderer extensions
|
||||||
@@ -229,5 +245,7 @@ module.exports = class Parser {
|
if (this.options.extensions && this.options.extensions.renderers && this.options.extensions.renderers[token.type]) {
|
||||||
}
|
|
||||||
case 'br': {
|
|
||||||
- out += renderer.br();
|
|
||||||
+ // update the html attribute before writing each <br/>,
|
|
||||||
+ // don't care about the others
|
|
||||||
+ out += renderer.tag_ln(this.ln).br();
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
diff --git a/src/Renderer.js b/src/Renderer.js
|
diff --git a/src/Renderer.js b/src/Renderer.js
|
||||||
--- a/src/Renderer.js
|
--- a/src/Renderer.js
|
||||||
+++ b/src/Renderer.js
|
+++ b/src/Renderer.js
|
||||||
@@ -228,7 +220,7 @@ diff --git a/src/Renderer.js b/src/Renderer.js
|
|||||||
+
|
+
|
||||||
code(code, infostring, escaped) {
|
code(code, infostring, escaped) {
|
||||||
const lang = (infostring || '').match(/\S*/)[0];
|
const lang = (infostring || '').match(/\S*/)[0];
|
||||||
@@ -24,10 +30,10 @@ module.exports = class Renderer {
|
@@ -26,10 +32,10 @@ module.exports = class Renderer {
|
||||||
|
|
||||||
if (!lang) {
|
if (!lang) {
|
||||||
- return '<pre><code>'
|
- return '<pre><code>'
|
||||||
@@ -241,58 +233,69 @@ diff --git a/src/Renderer.js b/src/Renderer.js
|
|||||||
+ return '<pre' + this.ln + '><code class="'
|
+ return '<pre' + this.ln + '><code class="'
|
||||||
+ this.options.langPrefix
|
+ this.options.langPrefix
|
||||||
+ escape(lang, true)
|
+ escape(lang, true)
|
||||||
@@ -38,5 +44,5 @@ module.exports = class Renderer {
|
@@ -40,5 +46,5 @@ module.exports = class Renderer {
|
||||||
|
|
||||||
blockquote(quote) {
|
blockquote(quote) {
|
||||||
- return '<blockquote>\n' + quote + '</blockquote>\n';
|
- return '<blockquote>\n' + quote + '</blockquote>\n';
|
||||||
+ return '<blockquote' + this.ln + '>\n' + quote + '</blockquote>\n';
|
+ return '<blockquote' + this.ln + '>\n' + quote + '</blockquote>\n';
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -49,4 +55,5 @@ module.exports = class Renderer {
|
@@ -51,4 +57,5 @@ module.exports = class Renderer {
|
||||||
return '<h'
|
return '<h'
|
||||||
+ level
|
+ level
|
||||||
+ + this.ln
|
+ + this.ln
|
||||||
+ ' id="'
|
+ ' id="'
|
||||||
+ this.options.headerPrefix
|
+ this.options.headerPrefix
|
||||||
@@ -59,5 +66,5 @@ module.exports = class Renderer {
|
@@ -61,5 +68,5 @@ module.exports = class Renderer {
|
||||||
}
|
}
|
||||||
// ignore IDs
|
// ignore IDs
|
||||||
- return '<h' + level + '>' + text + '</h' + level + '>\n';
|
- return '<h' + level + '>' + text + '</h' + level + '>\n';
|
||||||
+ return '<h' + level + this.ln + '>' + text + '</h' + level + '>\n';
|
+ return '<h' + level + this.ln + '>' + text + '</h' + level + '>\n';
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -73,5 +80,5 @@ module.exports = class Renderer {
|
@@ -75,5 +82,5 @@ module.exports = class Renderer {
|
||||||
|
|
||||||
listitem(text) {
|
listitem(text) {
|
||||||
- return '<li>' + text + '</li>\n';
|
- return '<li>' + text + '</li>\n';
|
||||||
+ return '<li' + this.ln + '>' + text + '</li>\n';
|
+ return '<li' + this.ln + '>' + text + '</li>\n';
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -85,5 +92,5 @@ module.exports = class Renderer {
|
@@ -87,5 +94,5 @@ module.exports = class Renderer {
|
||||||
|
|
||||||
paragraph(text) {
|
paragraph(text) {
|
||||||
- return '<p>' + text + '</p>\n';
|
- return '<p>' + text + '</p>\n';
|
||||||
+ return '<p' + this.ln + '>' + text + '</p>\n';
|
+ return '<p' + this.ln + '>' + text + '</p>\n';
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -100,5 +107,5 @@ module.exports = class Renderer {
|
@@ -102,5 +109,5 @@ module.exports = class Renderer {
|
||||||
|
|
||||||
tablerow(content) {
|
tablerow(content) {
|
||||||
- return '<tr>\n' + content + '</tr>\n';
|
- return '<tr>\n' + content + '</tr>\n';
|
||||||
+ return '<tr' + this.ln + '>\n' + content + '</tr>\n';
|
+ return '<tr' + this.ln + '>\n' + content + '</tr>\n';
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -125,5 +132,5 @@ module.exports = class Renderer {
|
@@ -127,5 +134,5 @@ module.exports = class Renderer {
|
||||||
|
|
||||||
br() {
|
br() {
|
||||||
- return this.options.xhtml ? '<br/>' : '<br>';
|
- return this.options.xhtml ? '<br/>' : '<br>';
|
||||||
+ return this.options.xhtml ? '<br' + this.ln + '/>' : '<br' + this.ln + '>';
|
+ return this.options.xhtml ? '<br' + this.ln + '/>' : '<br' + this.ln + '>';
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -151,5 +158,5 @@ module.exports = class Renderer {
|
@@ -153,5 +160,5 @@ module.exports = class Renderer {
|
||||||
}
|
}
|
||||||
|
|
||||||
- let out = '<img src="' + href + '" alt="' + text + '"';
|
- let out = '<img src="' + href + '" alt="' + text + '"';
|
||||||
+ let out = '<img' + this.ln + ' src="' + href + '" alt="' + text + '"';
|
+ let out = '<img' + this.ln + ' src="' + href + '" alt="' + text + '"';
|
||||||
if (title) {
|
if (title) {
|
||||||
out += ' title="' + title + '"';
|
out += ' title="' + title + '"';
|
||||||
|
diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
||||||
|
--- a/src/Tokenizer.js
|
||||||
|
+++ b/src/Tokenizer.js
|
||||||
|
@@ -301,4 +301,7 @@ module.exports = class Tokenizer {
|
||||||
|
const l = list.items.length;
|
||||||
|
|
||||||
|
+ // each nested list gets +1 ahead; this hack makes every listgroup -1 but atleast it doesn't get infinitely bad
|
||||||
|
+ this.lexer.ln--;
|
||||||
|
+
|
||||||
|
// Item child tokens handled here at end because we needed to have the final item to trim it first
|
||||||
|
for (i = 0; i < l; i++) {
|
||||||
|
|||||||
@@ -1,52 +1,52 @@
|
|||||||
diff --git a/src/Lexer.js b/src/Lexer.js
|
diff --git a/src/Lexer.js b/src/Lexer.js
|
||||||
--- a/src/Lexer.js
|
--- a/src/Lexer.js
|
||||||
+++ b/src/Lexer.js
|
+++ b/src/Lexer.js
|
||||||
@@ -5,5 +5,5 @@ const { block, inline } = require('./rules.js');
|
@@ -6,5 +6,5 @@ const { repeatString } = require('./helpers.js');
|
||||||
/**
|
/**
|
||||||
* smartypants text replacement
|
* smartypants text replacement
|
||||||
- */
|
- */
|
||||||
+ *
|
+ *
|
||||||
function smartypants(text) {
|
function smartypants(text) {
|
||||||
return text
|
return text
|
||||||
@@ -26,5 +26,5 @@ function smartypants(text) {
|
@@ -27,5 +27,5 @@ function smartypants(text) {
|
||||||
/**
|
/**
|
||||||
* mangle email addresses
|
* mangle email addresses
|
||||||
- */
|
- */
|
||||||
+ *
|
+ *
|
||||||
function mangle(text) {
|
function mangle(text) {
|
||||||
let out = '',
|
let out = '',
|
||||||
@@ -439,5 +439,5 @@ module.exports = class Lexer {
|
@@ -465,5 +465,5 @@ module.exports = class Lexer {
|
||||||
|
|
||||||
// autolink
|
// autolink
|
||||||
- if (token = this.tokenizer.autolink(src, mangle)) {
|
- if (token = this.tokenizer.autolink(src, mangle)) {
|
||||||
+ if (token = this.tokenizer.autolink(src)) {
|
+ if (token = this.tokenizer.autolink(src)) {
|
||||||
src = src.substring(token.raw.length);
|
src = src.substring(token.raw.length);
|
||||||
tokens.push(token);
|
tokens.push(token);
|
||||||
@@ -446,5 +446,5 @@ module.exports = class Lexer {
|
@@ -472,5 +472,5 @@ module.exports = class Lexer {
|
||||||
|
|
||||||
// url (gfm)
|
// url (gfm)
|
||||||
- if (!inLink && (token = this.tokenizer.url(src, mangle))) {
|
- if (!this.state.inLink && (token = this.tokenizer.url(src, mangle))) {
|
||||||
+ if (!inLink && (token = this.tokenizer.url(src))) {
|
+ if (!this.state.inLink && (token = this.tokenizer.url(src))) {
|
||||||
src = src.substring(token.raw.length);
|
src = src.substring(token.raw.length);
|
||||||
tokens.push(token);
|
tokens.push(token);
|
||||||
@@ -453,5 +453,5 @@ module.exports = class Lexer {
|
@@ -493,5 +493,5 @@ module.exports = class Lexer {
|
||||||
|
}
|
||||||
// text
|
}
|
||||||
- if (token = this.tokenizer.inlineText(src, inRawBlock, smartypants)) {
|
- if (token = this.tokenizer.inlineText(cutSrc, smartypants)) {
|
||||||
+ if (token = this.tokenizer.inlineText(src, inRawBlock)) {
|
+ if (token = this.tokenizer.inlineText(cutSrc)) {
|
||||||
src = src.substring(token.raw.length);
|
src = src.substring(token.raw.length);
|
||||||
tokens.push(token);
|
this.ln = token.ln || this.ln;
|
||||||
diff --git a/src/Renderer.js b/src/Renderer.js
|
diff --git a/src/Renderer.js b/src/Renderer.js
|
||||||
--- a/src/Renderer.js
|
--- a/src/Renderer.js
|
||||||
+++ b/src/Renderer.js
|
+++ b/src/Renderer.js
|
||||||
@@ -140,5 +140,5 @@ module.exports = class Renderer {
|
@@ -142,5 +142,5 @@ module.exports = class Renderer {
|
||||||
|
|
||||||
link(href, title, text) {
|
link(href, title, text) {
|
||||||
- href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
|
- href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
|
||||||
+ href = cleanUrl(this.options.baseUrl, href);
|
+ href = cleanUrl(this.options.baseUrl, href);
|
||||||
if (href === null) {
|
if (href === null) {
|
||||||
return text;
|
return text;
|
||||||
@@ -153,5 +153,5 @@ module.exports = class Renderer {
|
@@ -155,5 +155,5 @@ module.exports = class Renderer {
|
||||||
|
|
||||||
image(href, title, text) {
|
image(href, title, text) {
|
||||||
- href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
|
- href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
|
||||||
@@ -56,22 +56,23 @@ diff --git a/src/Renderer.js b/src/Renderer.js
|
|||||||
diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
||||||
--- a/src/Tokenizer.js
|
--- a/src/Tokenizer.js
|
||||||
+++ b/src/Tokenizer.js
|
+++ b/src/Tokenizer.js
|
||||||
@@ -287,11 +287,8 @@ module.exports = class Tokenizer {
|
@@ -321,14 +321,7 @@ module.exports = class Tokenizer {
|
||||||
if (cap) {
|
type: 'html',
|
||||||
return {
|
|
||||||
- type: this.options.sanitize
|
|
||||||
- ? 'paragraph'
|
|
||||||
- : 'html',
|
|
||||||
+ type: 'html',
|
|
||||||
raw: cap[0],
|
raw: cap[0],
|
||||||
- pre: !this.options.sanitizer
|
- pre: !this.options.sanitizer
|
||||||
- && (cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style'),
|
- && (cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style'),
|
||||||
- text: this.options.sanitize ? (this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0])) : cap[0]
|
+ pre: (cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style'),
|
||||||
+ pre: cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style',
|
text: cap[0]
|
||||||
+ text: cap[0]
|
|
||||||
};
|
};
|
||||||
|
- if (this.options.sanitize) {
|
||||||
|
- token.type = 'paragraph';
|
||||||
|
- token.text = this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0]);
|
||||||
|
- token.tokens = [];
|
||||||
|
- this.lexer.inline(token.text, token.tokens);
|
||||||
|
- }
|
||||||
|
return token;
|
||||||
}
|
}
|
||||||
@@ -421,15 +418,9 @@ module.exports = class Tokenizer {
|
@@ -477,15 +470,9 @@ module.exports = class Tokenizer {
|
||||||
|
|
||||||
return {
|
return {
|
||||||
- type: this.options.sanitize
|
- type: this.options.sanitize
|
||||||
@@ -79,8 +80,8 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
|||||||
- : 'html',
|
- : 'html',
|
||||||
+ type: 'html',
|
+ type: 'html',
|
||||||
raw: cap[0],
|
raw: cap[0],
|
||||||
inLink,
|
inLink: this.lexer.state.inLink,
|
||||||
inRawBlock,
|
inRawBlock: this.lexer.state.inRawBlock,
|
||||||
- text: this.options.sanitize
|
- text: this.options.sanitize
|
||||||
- ? (this.options.sanitizer
|
- ? (this.options.sanitizer
|
||||||
- ? this.options.sanitizer(cap[0])
|
- ? this.options.sanitizer(cap[0])
|
||||||
@@ -89,7 +90,7 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
|||||||
+ text: cap[0]
|
+ text: cap[0]
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@@ -550,10 +541,10 @@ module.exports = class Tokenizer {
|
@@ -672,10 +659,10 @@ module.exports = class Tokenizer {
|
||||||
}
|
}
|
||||||
|
|
||||||
- autolink(src, mangle) {
|
- autolink(src, mangle) {
|
||||||
@@ -102,7 +103,7 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
|||||||
+ text = escape(cap[1]);
|
+ text = escape(cap[1]);
|
||||||
href = 'mailto:' + text;
|
href = 'mailto:' + text;
|
||||||
} else {
|
} else {
|
||||||
@@ -578,10 +569,10 @@ module.exports = class Tokenizer {
|
@@ -700,10 +687,10 @@ module.exports = class Tokenizer {
|
||||||
}
|
}
|
||||||
|
|
||||||
- url(src, mangle) {
|
- url(src, mangle) {
|
||||||
@@ -115,15 +116,15 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
|||||||
+ text = escape(cap[0]);
|
+ text = escape(cap[0]);
|
||||||
href = 'mailto:' + text;
|
href = 'mailto:' + text;
|
||||||
} else {
|
} else {
|
||||||
@@ -615,12 +606,12 @@ module.exports = class Tokenizer {
|
@@ -737,12 +724,12 @@ module.exports = class Tokenizer {
|
||||||
}
|
}
|
||||||
|
|
||||||
- inlineText(src, inRawBlock, smartypants) {
|
- inlineText(src, smartypants) {
|
||||||
+ inlineText(src, inRawBlock) {
|
+ inlineText(src) {
|
||||||
const cap = this.rules.inline.text.exec(src);
|
const cap = this.rules.inline.text.exec(src);
|
||||||
if (cap) {
|
if (cap) {
|
||||||
let text;
|
let text;
|
||||||
if (inRawBlock) {
|
if (this.lexer.state.inRawBlock) {
|
||||||
- text = this.options.sanitize ? (this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0])) : cap[0];
|
- text = this.options.sanitize ? (this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0])) : cap[0];
|
||||||
+ text = cap[0];
|
+ text = cap[0];
|
||||||
} else {
|
} else {
|
||||||
@@ -134,7 +135,7 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
|||||||
diff --git a/src/defaults.js b/src/defaults.js
|
diff --git a/src/defaults.js b/src/defaults.js
|
||||||
--- a/src/defaults.js
|
--- a/src/defaults.js
|
||||||
+++ b/src/defaults.js
|
+++ b/src/defaults.js
|
||||||
@@ -8,12 +8,8 @@ function getDefaults() {
|
@@ -9,12 +9,8 @@ function getDefaults() {
|
||||||
highlight: null,
|
highlight: null,
|
||||||
langPrefix: 'language-',
|
langPrefix: 'language-',
|
||||||
- mangle: true,
|
- mangle: true,
|
||||||
@@ -170,7 +171,7 @@ diff --git a/src/helpers.js b/src/helpers.js
|
|||||||
+function cleanUrl(base, href) {
|
+function cleanUrl(base, href) {
|
||||||
if (base && !originIndependentUrl.test(href)) {
|
if (base && !originIndependentUrl.test(href)) {
|
||||||
href = resolveUrl(base, href);
|
href = resolveUrl(base, href);
|
||||||
@@ -223,10 +210,4 @@ function findClosingBracket(str, b) {
|
@@ -227,10 +214,4 @@ function findClosingBracket(str, b) {
|
||||||
}
|
}
|
||||||
|
|
||||||
-function checkSanitizeDeprecation(opt) {
|
-function checkSanitizeDeprecation(opt) {
|
||||||
@@ -179,14 +180,13 @@ diff --git a/src/helpers.js b/src/helpers.js
|
|||||||
- }
|
- }
|
||||||
-}
|
-}
|
||||||
-
|
-
|
||||||
module.exports = {
|
// copied from https://stackoverflow.com/a/5450113/806777
|
||||||
escape,
|
function repeatString(pattern, count) {
|
||||||
@@ -239,5 +220,4 @@ module.exports = {
|
@@ -260,5 +241,4 @@ module.exports = {
|
||||||
splitCells,
|
|
||||||
rtrim,
|
rtrim,
|
||||||
- findClosingBracket,
|
findClosingBracket,
|
||||||
- checkSanitizeDeprecation
|
- checkSanitizeDeprecation,
|
||||||
+ findClosingBracket
|
repeatString
|
||||||
};
|
};
|
||||||
diff --git a/src/marked.js b/src/marked.js
|
diff --git a/src/marked.js b/src/marked.js
|
||||||
--- a/src/marked.js
|
--- a/src/marked.js
|
||||||
@@ -203,8 +203,14 @@ diff --git a/src/marked.js b/src/marked.js
|
|||||||
- checkSanitizeDeprecation(opt);
|
- checkSanitizeDeprecation(opt);
|
||||||
|
|
||||||
if (callback) {
|
if (callback) {
|
||||||
@@ -108,5 +106,5 @@ function marked(src, opt, callback) {
|
@@ -302,5 +300,4 @@ marked.parseInline = function(src, opt) {
|
||||||
return Parser.parse(tokens, opt);
|
|
||||||
|
opt = merge({}, marked.defaults, opt || {});
|
||||||
|
- checkSanitizeDeprecation(opt);
|
||||||
|
|
||||||
|
try {
|
||||||
|
@@ -311,5 +308,5 @@ marked.parseInline = function(src, opt) {
|
||||||
|
return Parser.parseInline(tokens, opt);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
- e.message += '\nPlease report this to https://github.com/markedjs/marked.';
|
- e.message += '\nPlease report this to https://github.com/markedjs/marked.';
|
||||||
+ e.message += '\nmake issue @ https://github.com/9001/copyparty';
|
+ e.message += '\nmake issue @ https://github.com/9001/copyparty';
|
||||||
@@ -252,86 +258,87 @@ diff --git a/test/bench.js b/test/bench.js
|
|||||||
diff --git a/test/specs/run-spec.js b/test/specs/run-spec.js
|
diff --git a/test/specs/run-spec.js b/test/specs/run-spec.js
|
||||||
--- a/test/specs/run-spec.js
|
--- a/test/specs/run-spec.js
|
||||||
+++ b/test/specs/run-spec.js
|
+++ b/test/specs/run-spec.js
|
||||||
@@ -22,8 +22,4 @@ function runSpecs(title, dir, showCompletionTable, options) {
|
@@ -22,9 +22,4 @@ function runSpecs(title, dir, showCompletionTable, options) {
|
||||||
}
|
}
|
||||||
|
|
||||||
- if (spec.options.sanitizer) {
|
- if (spec.options.sanitizer) {
|
||||||
- // eslint-disable-next-line no-eval
|
- // eslint-disable-next-line no-eval
|
||||||
- spec.options.sanitizer = eval(spec.options.sanitizer);
|
- spec.options.sanitizer = eval(spec.options.sanitizer);
|
||||||
- }
|
- }
|
||||||
|
-
|
||||||
(spec.only ? fit : (spec.skip ? xit : it))('should ' + passFail + example, async() => {
|
(spec.only ? fit : (spec.skip ? xit : it))('should ' + passFail + example, async() => {
|
||||||
@@ -53,3 +49,2 @@ runSpecs('Original', './original', false, { gfm: false, pedantic: true });
|
const before = process.hrtime();
|
||||||
|
@@ -53,3 +48,2 @@ runSpecs('Original', './original', false, { gfm: false, pedantic: true });
|
||||||
runSpecs('New', './new');
|
runSpecs('New', './new');
|
||||||
runSpecs('ReDOS', './redos');
|
runSpecs('ReDOS', './redos');
|
||||||
-runSpecs('Security', './security', false, { silent: true }); // silent - do not show deprecation warning
|
-runSpecs('Security', './security', false, { silent: true }); // silent - do not show deprecation warning
|
||||||
diff --git a/test/unit/Lexer-spec.js b/test/unit/Lexer-spec.js
|
diff --git a/test/unit/Lexer-spec.js b/test/unit/Lexer-spec.js
|
||||||
--- a/test/unit/Lexer-spec.js
|
--- a/test/unit/Lexer-spec.js
|
||||||
+++ b/test/unit/Lexer-spec.js
|
+++ b/test/unit/Lexer-spec.js
|
||||||
@@ -465,5 +465,5 @@ a | b
|
@@ -589,5 +589,5 @@ paragraph
|
||||||
});
|
});
|
||||||
|
|
||||||
- it('sanitize', () => {
|
- it('sanitize', () => {
|
||||||
+ /*it('sanitize', () => {
|
+ /*it('sanitize', () => {
|
||||||
expectTokens({
|
expectTokens({
|
||||||
md: '<div>html</div>',
|
md: '<div>html</div>',
|
||||||
@@ -483,5 +483,5 @@ a | b
|
@@ -607,5 +607,5 @@ paragraph
|
||||||
]
|
]
|
||||||
});
|
});
|
||||||
- });
|
- });
|
||||||
+ });*/
|
+ });*/
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -587,5 +587,5 @@ a | b
|
@@ -652,5 +652,5 @@ paragraph
|
||||||
});
|
});
|
||||||
|
|
||||||
- it('html sanitize', () => {
|
- it('html sanitize', () => {
|
||||||
+ /*it('html sanitize', () => {
|
+ /*it('html sanitize', () => {
|
||||||
expectInlineTokens({
|
expectInlineTokens({
|
||||||
md: '<div>html</div>',
|
md: '<div>html</div>',
|
||||||
@@ -597,5 +597,5 @@ a | b
|
@@ -660,5 +660,5 @@ paragraph
|
||||||
]
|
]
|
||||||
});
|
});
|
||||||
- });
|
- });
|
||||||
+ });*/
|
+ });*/
|
||||||
|
|
||||||
it('link', () => {
|
it('link', () => {
|
||||||
@@ -909,5 +909,5 @@ a | b
|
@@ -971,5 +971,5 @@ paragraph
|
||||||
});
|
});
|
||||||
|
|
||||||
- it('autolink mangle email', () => {
|
- it('autolink mangle email', () => {
|
||||||
+ /*it('autolink mangle email', () => {
|
+ /*it('autolink mangle email', () => {
|
||||||
expectInlineTokens({
|
expectInlineTokens({
|
||||||
md: '<test@example.com>',
|
md: '<test@example.com>',
|
||||||
@@ -929,5 +929,5 @@ a | b
|
@@ -991,5 +991,5 @@ paragraph
|
||||||
]
|
]
|
||||||
});
|
});
|
||||||
- });
|
- });
|
||||||
+ });*/
|
+ });*/
|
||||||
|
|
||||||
it('url', () => {
|
it('url', () => {
|
||||||
@@ -966,5 +966,5 @@ a | b
|
@@ -1028,5 +1028,5 @@ paragraph
|
||||||
});
|
});
|
||||||
|
|
||||||
- it('url mangle email', () => {
|
- it('url mangle email', () => {
|
||||||
+ /*it('url mangle email', () => {
|
+ /*it('url mangle email', () => {
|
||||||
expectInlineTokens({
|
expectInlineTokens({
|
||||||
md: 'test@example.com',
|
md: 'test@example.com',
|
||||||
@@ -986,5 +986,5 @@ a | b
|
@@ -1048,5 +1048,5 @@ paragraph
|
||||||
]
|
]
|
||||||
});
|
});
|
||||||
- });
|
- });
|
||||||
+ });*/
|
+ });*/
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -1002,5 +1002,5 @@ a | b
|
@@ -1064,5 +1064,5 @@ paragraph
|
||||||
});
|
});
|
||||||
|
|
||||||
- describe('smartypants', () => {
|
- describe('smartypants', () => {
|
||||||
+ /*describe('smartypants', () => {
|
+ /*describe('smartypants', () => {
|
||||||
it('single quotes', () => {
|
it('single quotes', () => {
|
||||||
expectInlineTokens({
|
expectInlineTokens({
|
||||||
@@ -1072,5 +1072,5 @@ a | b
|
@@ -1134,5 +1134,5 @@ paragraph
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
- });
|
- });
|
||||||
|
|||||||
@@ -19,9 +19,11 @@ help() { exec cat <<'EOF'
|
|||||||
# `no-ogv` saves ~192k by removing the opus/vorbis audio codecs
|
# `no-ogv` saves ~192k by removing the opus/vorbis audio codecs
|
||||||
# (only affects apple devices; everything else has native support)
|
# (only affects apple devices; everything else has native support)
|
||||||
#
|
#
|
||||||
# `no-cm` saves ~92k by removing easymde/codemirror
|
# `no-cm` saves ~82k by removing easymde/codemirror
|
||||||
# (the fancy markdown editor)
|
# (the fancy markdown editor)
|
||||||
#
|
#
|
||||||
|
# `no-hl` saves ~41k by removing syntax hilighting in the text viewer
|
||||||
|
#
|
||||||
# `no-fnt` saves ~9k by removing the source-code-pro font
|
# `no-fnt` saves ~9k by removing the source-code-pro font
|
||||||
# (browsers will try to use 'Consolas' instead)
|
# (browsers will try to use 'Consolas' instead)
|
||||||
#
|
#
|
||||||
@@ -67,6 +69,7 @@ pybin=$(command -v python3 || command -v python) || {
|
|||||||
use_gz=
|
use_gz=
|
||||||
do_sh=1
|
do_sh=1
|
||||||
do_py=1
|
do_py=1
|
||||||
|
zopf=2560
|
||||||
while [ ! -z "$1" ]; do
|
while [ ! -z "$1" ]; do
|
||||||
case $1 in
|
case $1 in
|
||||||
clean) clean=1 ; ;;
|
clean) clean=1 ; ;;
|
||||||
@@ -74,10 +77,12 @@ while [ ! -z "$1" ]; do
|
|||||||
gz) use_gz=1 ; ;;
|
gz) use_gz=1 ; ;;
|
||||||
no-ogv) no_ogv=1 ; ;;
|
no-ogv) no_ogv=1 ; ;;
|
||||||
no-fnt) no_fnt=1 ; ;;
|
no-fnt) no_fnt=1 ; ;;
|
||||||
|
no-hl) no_hl=1 ; ;;
|
||||||
no-dd) no_dd=1 ; ;;
|
no-dd) no_dd=1 ; ;;
|
||||||
no-cm) no_cm=1 ; ;;
|
no-cm) no_cm=1 ; ;;
|
||||||
no-sh) do_sh= ; ;;
|
no-sh) do_sh= ; ;;
|
||||||
no-py) do_py= ; ;;
|
no-py) do_py= ; ;;
|
||||||
|
fast) zopf=100 ; ;;
|
||||||
*) help ; ;;
|
*) help ; ;;
|
||||||
esac
|
esac
|
||||||
shift
|
shift
|
||||||
@@ -136,7 +141,7 @@ tmpdir="$(
|
|||||||
# msys2 tar is bad, make the best of it
|
# msys2 tar is bad, make the best of it
|
||||||
echo collecting source
|
echo collecting source
|
||||||
[ $clean ] && {
|
[ $clean ] && {
|
||||||
(cd .. && git archive master >tar) && tar -xf ../tar copyparty
|
(cd .. && git archive hovudstraum >tar) && tar -xf ../tar copyparty
|
||||||
(cd .. && tar -cf tar copyparty/web/deps) && tar -xf ../tar
|
(cd .. && tar -cf tar copyparty/web/deps) && tar -xf ../tar
|
||||||
}
|
}
|
||||||
[ $clean ] || {
|
[ $clean ] || {
|
||||||
@@ -204,6 +209,15 @@ while IFS= read -r x; do
|
|||||||
tmv "$x"
|
tmv "$x"
|
||||||
done
|
done
|
||||||
|
|
||||||
|
find copyparty | LC_ALL=C sort | sed 's/\.gz$//;s/$/,/' > have
|
||||||
|
cat have | while IFS= read -r x; do
|
||||||
|
grep -qF -- "$x" ../scripts/sfx.ls || {
|
||||||
|
echo "unexpected file: $x"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
done
|
||||||
|
rm have
|
||||||
|
|
||||||
[ $no_ogv ] &&
|
[ $no_ogv ] &&
|
||||||
rm -rf copyparty/web/deps/{dynamicaudio,ogv}*
|
rm -rf copyparty/web/deps/{dynamicaudio,ogv}*
|
||||||
|
|
||||||
@@ -215,6 +229,9 @@ done
|
|||||||
tmv "$f"
|
tmv "$f"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
[ $no_hl ] &&
|
||||||
|
rm -rf copyparty/web/deps/prism*
|
||||||
|
|
||||||
[ $no_fnt ] && {
|
[ $no_fnt ] && {
|
||||||
rm -f copyparty/web/deps/scp.woff2
|
rm -f copyparty/web/deps/scp.woff2
|
||||||
f=copyparty/web/ui.css
|
f=copyparty/web/ui.css
|
||||||
@@ -227,7 +244,7 @@ done
|
|||||||
rm -rf copyparty/web/dd
|
rm -rf copyparty/web/dd
|
||||||
f=copyparty/web/browser.css
|
f=copyparty/web/browser.css
|
||||||
gzip -d "$f.gz" || true
|
gzip -d "$f.gz" || true
|
||||||
sed -r 's/(cursor: ?)url\([^)]+\), ?(pointer)/\1\2/; /[0-9]+% \{cursor:/d; /animation: ?cursor/d' <$f >t
|
sed -r 's/(cursor: ?)url\([^)]+\), ?(pointer)/\1\2/; s/[0-9]+% \{cursor:[^}]+\}//; s/animation: ?cursor[^};]+//' <$f >t
|
||||||
tmv "$f"
|
tmv "$f"
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -260,7 +277,7 @@ find | grep -E '\.css$' | while IFS= read -r f; do
|
|||||||
}
|
}
|
||||||
!/\}$/ {printf "%s",$0;next}
|
!/\}$/ {printf "%s",$0;next}
|
||||||
1
|
1
|
||||||
' <$f | sed 's/;\}$/}/' >t
|
' <$f | sed -r 's/;\}$/}/; /\{\}$/d' >t
|
||||||
tmv "$f"
|
tmv "$f"
|
||||||
done
|
done
|
||||||
unexpand -h 2>/dev/null &&
|
unexpand -h 2>/dev/null &&
|
||||||
@@ -271,7 +288,7 @@ done
|
|||||||
|
|
||||||
gzres() {
|
gzres() {
|
||||||
command -v pigz &&
|
command -v pigz &&
|
||||||
pk='pigz -11 -I 2560' ||
|
pk="pigz -11 -I $zopf" ||
|
||||||
pk='gzip'
|
pk='gzip'
|
||||||
|
|
||||||
np=$(nproc)
|
np=$(nproc)
|
||||||
|
|||||||
@@ -61,7 +61,7 @@ rls_dir="$tmp/copyparty-$ver"
|
|||||||
mkdir "$rls_dir"
|
mkdir "$rls_dir"
|
||||||
|
|
||||||
echo ">>> export from git"
|
echo ">>> export from git"
|
||||||
git archive master | tar -xC "$rls_dir"
|
git archive hovudstraum | tar -xC "$rls_dir"
|
||||||
|
|
||||||
echo ">>> export untracked deps"
|
echo ">>> export untracked deps"
|
||||||
tar -c copyparty/web/deps | tar -xC "$rls_dir"
|
tar -c copyparty/web/deps | tar -xC "$rls_dir"
|
||||||
@@ -122,5 +122,5 @@ echo " $zip_path"
|
|||||||
echo " $tgz_path"
|
echo " $tgz_path"
|
||||||
echo
|
echo
|
||||||
|
|
||||||
# function alr() { ls -alR copyparty-$1 | sed -r "s/copyparty-$1/copyparty/" | sed -r 's/[A-Z][a-z]{2} [0-9 ]{2} [0-9]{2}:[0-9]{2}//' > $1; }; for x in master rls src ; do alr $x; done
|
# function alr() { ls -alR copyparty-$1 | sed -r "s/copyparty-$1/copyparty/" | sed -r 's/[A-Z][a-z]{2} [0-9 ]{2} [0-9]{2}:[0-9]{2}//' > $1; }; for x in hovudstraum rls src ; do alr $x; done
|
||||||
|
|
||||||
|
|||||||
36
scripts/rls.sh
Executable file
36
scripts/rls.sh
Executable file
@@ -0,0 +1,36 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
cd ~/dev/copyparty/scripts
|
||||||
|
|
||||||
|
v=$1
|
||||||
|
printf '%s\n' "$v" | grep -qE '^[0-9\.]+$' || exit 1
|
||||||
|
grep -E "(${v//./, })" ../copyparty/__version__.py || exit 1
|
||||||
|
|
||||||
|
git tag v$v
|
||||||
|
git push origin --tags
|
||||||
|
|
||||||
|
rm -rf ../dist
|
||||||
|
|
||||||
|
./make-pypi-release.sh u
|
||||||
|
(cd .. && python3 ./setup.py clean2)
|
||||||
|
|
||||||
|
./make-tgz-release.sh $v
|
||||||
|
|
||||||
|
rm -f ../dist/copyparty-sfx.*
|
||||||
|
./make-sfx.sh no-sh
|
||||||
|
../dist/copyparty-sfx.py -h
|
||||||
|
|
||||||
|
ar=
|
||||||
|
while true; do
|
||||||
|
for ((a=0; a<100; a++)); do
|
||||||
|
for f in ../dist/copyparty-sfx.{py,sh}; do
|
||||||
|
[ -e $f ] || continue;
|
||||||
|
mv $f $f.$(wc -c <$f | awk '{print$1}')
|
||||||
|
done
|
||||||
|
./make-sfx.sh re $ar
|
||||||
|
done
|
||||||
|
ar=no-sh
|
||||||
|
done
|
||||||
|
|
||||||
|
# git tag -d v$v; git push --delete origin v$v
|
||||||
80
scripts/sfx.ls
Normal file
80
scripts/sfx.ls
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
copyparty,
|
||||||
|
copyparty/__init__.py,
|
||||||
|
copyparty/__main__.py,
|
||||||
|
copyparty/__version__.py,
|
||||||
|
copyparty/authsrv.py,
|
||||||
|
copyparty/bos,
|
||||||
|
copyparty/bos/__init__.py,
|
||||||
|
copyparty/bos/bos.py,
|
||||||
|
copyparty/bos/path.py,
|
||||||
|
copyparty/broker_mp.py,
|
||||||
|
copyparty/broker_mpw.py,
|
||||||
|
copyparty/broker_thr.py,
|
||||||
|
copyparty/broker_util.py,
|
||||||
|
copyparty/httpcli.py,
|
||||||
|
copyparty/httpconn.py,
|
||||||
|
copyparty/httpsrv.py,
|
||||||
|
copyparty/ico.py,
|
||||||
|
copyparty/mtag.py,
|
||||||
|
copyparty/res,
|
||||||
|
copyparty/res/insecure.pem,
|
||||||
|
copyparty/star.py,
|
||||||
|
copyparty/stolen,
|
||||||
|
copyparty/stolen/__init__.py,
|
||||||
|
copyparty/stolen/surrogateescape.py,
|
||||||
|
copyparty/sutil.py,
|
||||||
|
copyparty/svchub.py,
|
||||||
|
copyparty/szip.py,
|
||||||
|
copyparty/tcpsrv.py,
|
||||||
|
copyparty/th_cli.py,
|
||||||
|
copyparty/th_srv.py,
|
||||||
|
copyparty/u2idx.py,
|
||||||
|
copyparty/up2k.py,
|
||||||
|
copyparty/util.py,
|
||||||
|
copyparty/web,
|
||||||
|
copyparty/web/baguettebox.js,
|
||||||
|
copyparty/web/browser.css,
|
||||||
|
copyparty/web/browser.html,
|
||||||
|
copyparty/web/browser.js,
|
||||||
|
copyparty/web/browser2.html,
|
||||||
|
copyparty/web/copyparty.gif,
|
||||||
|
copyparty/web/dd,
|
||||||
|
copyparty/web/dd/2.png,
|
||||||
|
copyparty/web/dd/3.png,
|
||||||
|
copyparty/web/dd/4.png,
|
||||||
|
copyparty/web/dd/5.png,
|
||||||
|
copyparty/web/deps,
|
||||||
|
copyparty/web/deps/easymde.css,
|
||||||
|
copyparty/web/deps/easymde.js,
|
||||||
|
copyparty/web/deps/marked.js,
|
||||||
|
copyparty/web/deps/mini-fa.css,
|
||||||
|
copyparty/web/deps/mini-fa.woff,
|
||||||
|
copyparty/web/deps/ogv-decoder-audio-opus-wasm.js,
|
||||||
|
copyparty/web/deps/ogv-decoder-audio-opus-wasm.wasm,
|
||||||
|
copyparty/web/deps/ogv-decoder-audio-vorbis-wasm.js,
|
||||||
|
copyparty/web/deps/ogv-decoder-audio-vorbis-wasm.wasm,
|
||||||
|
copyparty/web/deps/ogv-demuxer-ogg-wasm.js,
|
||||||
|
copyparty/web/deps/ogv-demuxer-ogg-wasm.wasm,
|
||||||
|
copyparty/web/deps/ogv-worker-audio.js,
|
||||||
|
copyparty/web/deps/ogv.js,
|
||||||
|
copyparty/web/deps/prism.js,
|
||||||
|
copyparty/web/deps/prism.css,
|
||||||
|
copyparty/web/deps/prismd.css,
|
||||||
|
copyparty/web/deps/scp.woff2,
|
||||||
|
copyparty/web/deps/sha512.ac.js,
|
||||||
|
copyparty/web/deps/sha512.hw.js,
|
||||||
|
copyparty/web/md.css,
|
||||||
|
copyparty/web/md.html,
|
||||||
|
copyparty/web/md.js,
|
||||||
|
copyparty/web/md2.css,
|
||||||
|
copyparty/web/md2.js,
|
||||||
|
copyparty/web/mde.css,
|
||||||
|
copyparty/web/mde.html,
|
||||||
|
copyparty/web/mde.js,
|
||||||
|
copyparty/web/msg.css,
|
||||||
|
copyparty/web/msg.html,
|
||||||
|
copyparty/web/splash.css,
|
||||||
|
copyparty/web/splash.html,
|
||||||
|
copyparty/web/ui.css,
|
||||||
|
copyparty/web/up2k.js,
|
||||||
|
copyparty/web/util.js,
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python3
|
||||||
# coding: latin-1
|
# coding: latin-1
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
@@ -9,7 +9,7 @@ import subprocess as sp
|
|||||||
to edit this file, use HxD or "vim -b"
|
to edit this file, use HxD or "vim -b"
|
||||||
(there is compressed stuff at the end)
|
(there is compressed stuff at the end)
|
||||||
|
|
||||||
run me with any version of python, i will unpack and run copyparty
|
run me with python 2.7 or 3.3+ to unpack and run copyparty
|
||||||
|
|
||||||
there's zero binaries! just plaintext python scripts all the way down
|
there's zero binaries! just plaintext python scripts all the way down
|
||||||
so you can easily unpack the archive and inspect it for shady stuff
|
so you can easily unpack the archive and inspect it for shady stuff
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|||||||
@@ -60,7 +60,7 @@ class Cpp(object):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def tc1():
|
def tc1(vflags):
|
||||||
ub = "http://127.0.0.1:4321/"
|
ub = "http://127.0.0.1:4321/"
|
||||||
td = os.path.join("srv", "smoketest")
|
td = os.path.join("srv", "smoketest")
|
||||||
try:
|
try:
|
||||||
@@ -100,17 +100,17 @@ def tc1():
|
|||||||
for d1 in ["r", "w", "a"]:
|
for d1 in ["r", "w", "a"]:
|
||||||
pdirs.append("{}/{}".format(td, d1))
|
pdirs.append("{}/{}".format(td, d1))
|
||||||
pdirs.append("{}/{}/j".format(td, d1))
|
pdirs.append("{}/{}/j".format(td, d1))
|
||||||
for d2 in ["r", "w", "a"]:
|
for d2 in ["r", "w", "a", "c"]:
|
||||||
d = os.path.join(td, d1, "j", d2)
|
d = os.path.join(td, d1, "j", d2)
|
||||||
pdirs.append(d)
|
pdirs.append(d)
|
||||||
os.makedirs(d)
|
os.makedirs(d)
|
||||||
|
|
||||||
pdirs = [x.replace("\\", "/") for x in pdirs]
|
pdirs = [x.replace("\\", "/") for x in pdirs]
|
||||||
udirs = [x.split("/", 2)[2] for x in pdirs]
|
udirs = [x.split("/", 2)[2] for x in pdirs]
|
||||||
perms = [x.rstrip("j/")[-1] for x in pdirs]
|
perms = [x.rstrip("cj/")[-1] for x in pdirs]
|
||||||
perms = ["rw" if x == "a" else x for x in perms]
|
perms = ["rw" if x == "a" else x for x in perms]
|
||||||
for pd, ud, p in zip(pdirs, udirs, perms):
|
for pd, ud, p in zip(pdirs, udirs, perms):
|
||||||
if ud[-1] == "j":
|
if ud[-1] == "j" or ud[-1] == "c":
|
||||||
continue
|
continue
|
||||||
|
|
||||||
hp = None
|
hp = None
|
||||||
@@ -123,29 +123,37 @@ def tc1():
|
|||||||
hp = "-"
|
hp = "-"
|
||||||
hpaths[ud] = os.path.join(pd, ".hist")
|
hpaths[ud] = os.path.join(pd, ".hist")
|
||||||
|
|
||||||
arg = "{}:{}:{}".format(pd, ud, p, hp)
|
arg = "{}:{}:{}".format(pd, ud, p)
|
||||||
if hp:
|
if hp:
|
||||||
arg += ":c,hist=" + hp
|
arg += ":c,hist=" + hp
|
||||||
|
|
||||||
args += ["-v", arg]
|
args += ["-v", arg + vflags]
|
||||||
|
|
||||||
# return
|
# return
|
||||||
cpp = Cpp(args)
|
cpp = Cpp(args)
|
||||||
CPP.append(cpp)
|
CPP.append(cpp)
|
||||||
cpp.await_idle(ub, 3)
|
cpp.await_idle(ub, 3)
|
||||||
|
|
||||||
for d in udirs:
|
for d, p in zip(udirs, perms):
|
||||||
vid = ovid + "\n{}".format(d).encode("utf-8")
|
vid = ovid + "\n{}".format(d).encode("utf-8")
|
||||||
try:
|
r = requests.post(
|
||||||
requests.post(ub + d, data={"act": "bput"}, files={"f": ("a.h264", vid)})
|
ub + d,
|
||||||
except:
|
data={"act": "bput"},
|
||||||
pass
|
files={"f": (d.replace("/", "") + ".h264", vid)},
|
||||||
|
)
|
||||||
|
c = r.status_code
|
||||||
|
if c == 200 and p not in ["w", "rw"]:
|
||||||
|
raise Exception("post {} with perm {} at {}".format(c, p, d))
|
||||||
|
elif c == 403 and p not in ["r"]:
|
||||||
|
raise Exception("post {} with perm {} at {}".format(c, p, d))
|
||||||
|
elif c not in [200, 403]:
|
||||||
|
raise Exception("post {} with perm {} at {}".format(c, p, d))
|
||||||
|
|
||||||
cpp.clean()
|
cpp.clean()
|
||||||
|
|
||||||
# GET permission
|
# GET permission
|
||||||
for d, p in zip(udirs, perms):
|
for d, p in zip(udirs, perms):
|
||||||
u = "{}{}/a.h264".format(ub, d)
|
u = "{}{}/{}.h264".format(ub, d, d.replace("/", ""))
|
||||||
r = requests.get(u)
|
r = requests.get(u)
|
||||||
ok = bool(r)
|
ok = bool(r)
|
||||||
if ok != (p in ["rw"]):
|
if ok != (p in ["rw"]):
|
||||||
@@ -153,14 +161,14 @@ def tc1():
|
|||||||
|
|
||||||
# stat filesystem
|
# stat filesystem
|
||||||
for d, p in zip(pdirs, perms):
|
for d, p in zip(pdirs, perms):
|
||||||
u = "{}/a.h264".format(d)
|
u = "{}/{}.h264".format(d, d.split("test/")[-1].replace("/", ""))
|
||||||
ok = os.path.exists(u)
|
ok = os.path.exists(u)
|
||||||
if ok != (p in ["rw", "w"]):
|
if ok != (p in ["rw", "w"]):
|
||||||
raise Exception("stat {} with perm {} at {}".format(ok, p, u))
|
raise Exception("stat {} with perm {} at {}".format(ok, p, u))
|
||||||
|
|
||||||
# GET thumbnail, vreify contents
|
# GET thumbnail, vreify contents
|
||||||
for d, p in zip(udirs, perms):
|
for d, p in zip(udirs, perms):
|
||||||
u = "{}{}/a.h264?th=j".format(ub, d)
|
u = "{}{}/{}.h264?th=j".format(ub, d, d.replace("/", ""))
|
||||||
r = requests.get(u)
|
r = requests.get(u)
|
||||||
ok = bool(r and r.content[:3] == b"\xff\xd8\xff")
|
ok = bool(r and r.content[:3] == b"\xff\xd8\xff")
|
||||||
if ok != (p in ["rw"]):
|
if ok != (p in ["rw"]):
|
||||||
@@ -192,9 +200,9 @@ def tc1():
|
|||||||
cpp.stop(True)
|
cpp.stop(True)
|
||||||
|
|
||||||
|
|
||||||
def run(tc):
|
def run(tc, *a):
|
||||||
try:
|
try:
|
||||||
tc()
|
tc(*a)
|
||||||
finally:
|
finally:
|
||||||
try:
|
try:
|
||||||
CPP[0].stop(False)
|
CPP[0].stop(False)
|
||||||
@@ -203,7 +211,8 @@ def run(tc):
|
|||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
run(tc1)
|
run(tc1, "")
|
||||||
|
run(tc1, ":c,fk")
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python3
|
||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
@@ -8,7 +8,7 @@ import tokenize
|
|||||||
|
|
||||||
|
|
||||||
def uncomment(fpath):
|
def uncomment(fpath):
|
||||||
""" modified https://stackoverflow.com/a/62074206 """
|
"""modified https://stackoverflow.com/a/62074206"""
|
||||||
|
|
||||||
with open(fpath, "rb") as f:
|
with open(fpath, "rb") as f:
|
||||||
orig = f.read().decode("utf-8")
|
orig = f.read().decode("utf-8")
|
||||||
|
|||||||
9
setup.py
9
setup.py
@@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python3
|
||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
@@ -61,7 +61,7 @@ class clean2(Command):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
nuke = []
|
nuke = []
|
||||||
for (dirpath, dirnames, filenames) in os.walk("."):
|
for (dirpath, _, filenames) in os.walk("."):
|
||||||
for fn in filenames:
|
for fn in filenames:
|
||||||
if (
|
if (
|
||||||
fn.startswith("MANIFEST")
|
fn.startswith("MANIFEST")
|
||||||
@@ -86,7 +86,7 @@ args = {
|
|||||||
"url": "https://github.com/9001/copyparty",
|
"url": "https://github.com/9001/copyparty",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"classifiers": [
|
"classifiers": [
|
||||||
"Development Status :: 4 - Beta",
|
"Development Status :: 5 - Production/Stable",
|
||||||
"License :: OSI Approved :: MIT License",
|
"License :: OSI Approved :: MIT License",
|
||||||
"Programming Language :: Python",
|
"Programming Language :: Python",
|
||||||
"Programming Language :: Python :: 2",
|
"Programming Language :: Python :: 2",
|
||||||
@@ -101,6 +101,7 @@ args = {
|
|||||||
"Programming Language :: Python :: 3.9",
|
"Programming Language :: Python :: 3.9",
|
||||||
"Programming Language :: Python :: 3.10",
|
"Programming Language :: Python :: 3.10",
|
||||||
"Programming Language :: Python :: Implementation :: CPython",
|
"Programming Language :: Python :: Implementation :: CPython",
|
||||||
|
"Programming Language :: Python :: Implementation :: Jython",
|
||||||
"Programming Language :: Python :: Implementation :: PyPy",
|
"Programming Language :: Python :: Implementation :: PyPy",
|
||||||
"Environment :: Console",
|
"Environment :: Console",
|
||||||
"Environment :: No Input/Output (Daemon)",
|
"Environment :: No Input/Output (Daemon)",
|
||||||
@@ -113,7 +114,7 @@ args = {
|
|||||||
"install_requires": ["jinja2"],
|
"install_requires": ["jinja2"],
|
||||||
"extras_require": {"thumbnails": ["Pillow"], "audiotags": ["mutagen"]},
|
"extras_require": {"thumbnails": ["Pillow"], "audiotags": ["mutagen"]},
|
||||||
"entry_points": {"console_scripts": ["copyparty = copyparty.__main__:main"]},
|
"entry_points": {"console_scripts": ["copyparty = copyparty.__main__:main"]},
|
||||||
"scripts": ["bin/copyparty-fuse.py"],
|
"scripts": ["bin/copyparty-fuse.py", "bin/up2k.py"],
|
||||||
"cmdclass": {"clean2": clean2},
|
"cmdclass": {"clean2": clean2},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
20
srv/test.md
20
srv/test.md
@@ -1,11 +1,17 @@
|
|||||||
### hello world
|
### hello world
|
||||||
|
|
||||||
* qwe
|
* qwe
|
||||||
* asd
|
* rty
|
||||||
* zxc
|
* uio
|
||||||
* 573
|
* asd
|
||||||
* one
|
* fgh
|
||||||
* two
|
* jkl
|
||||||
|
* zxc
|
||||||
|
* vbn
|
||||||
|
* 573
|
||||||
|
* one
|
||||||
|
* two
|
||||||
|
* three
|
||||||
|
|
||||||
* |||
|
* |||
|
||||||
|--|--|
|
|--|--|
|
||||||
@@ -134,12 +140,12 @@ a newline toplevel
|
|||||||
| a table | on the right |
|
| a table | on the right |
|
||||||
| second row | foo bar |
|
| second row | foo bar |
|
||||||
|
|
||||||
||
|
a||a
|
||||||
--|:-:|-:
|
--|:-:|-:
|
||||||
a table | big text in this | aaakbfddd
|
a table | big text in this | aaakbfddd
|
||||||
second row | centred | bbb
|
second row | centred | bbb
|
||||||
|
|
||||||
||
|
||||
|
||||||
--|--|--
|
--|--|--
|
||||||
foo
|
foo
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python3
|
||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
@@ -39,16 +39,21 @@ class Cfg(Namespace):
|
|||||||
no_scandir=False,
|
no_scandir=False,
|
||||||
no_sendfile=True,
|
no_sendfile=True,
|
||||||
no_rescan=True,
|
no_rescan=True,
|
||||||
|
no_logues=False,
|
||||||
|
no_readme=False,
|
||||||
re_maxage=0,
|
re_maxage=0,
|
||||||
ihead=False,
|
ihead=False,
|
||||||
nih=True,
|
nih=True,
|
||||||
mtp=[],
|
mtp=[],
|
||||||
mte="a",
|
mte="a",
|
||||||
mth="",
|
mth="",
|
||||||
|
textfiles="",
|
||||||
hist=None,
|
hist=None,
|
||||||
no_hash=False,
|
no_idx=None,
|
||||||
|
no_hash=None,
|
||||||
|
js_browser=None,
|
||||||
css_browser=None,
|
css_browser=None,
|
||||||
**{k: False for k in "e2d e2ds e2dsa e2t e2ts e2tsr".split()}
|
**{k: False for k in "e2d e2ds e2dsa e2t e2ts e2tsr no_acode".split()}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -96,7 +101,7 @@ class TestHttpCli(unittest.TestCase):
|
|||||||
if not vol.startswith(top):
|
if not vol.startswith(top):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
mode = vol[-2].replace("a", "rwmd")
|
mode = vol[-2].replace("a", "rw")
|
||||||
usr = vol[-1]
|
usr = vol[-1]
|
||||||
if usr == "a":
|
if usr == "a":
|
||||||
usr = ""
|
usr = ""
|
||||||
@@ -151,6 +156,7 @@ class TestHttpCli(unittest.TestCase):
|
|||||||
tar = tarfile.open(fileobj=io.BytesIO(b)).getnames()
|
tar = tarfile.open(fileobj=io.BytesIO(b)).getnames()
|
||||||
except:
|
except:
|
||||||
tar = []
|
tar = []
|
||||||
|
tar = [x[4:] if x.startswith("top/") else x for x in tar]
|
||||||
tar = ["/".join([y for y in [top, durl, x] if y]) for x in tar]
|
tar = ["/".join([y for y in [top, durl, x] if y]) for x in tar]
|
||||||
tar = [[x] + self.can_rw(x) for x in tar]
|
tar = [[x] + self.can_rw(x) for x in tar]
|
||||||
tar_ok = [x[0] for x in tar if x[1]]
|
tar_ok = [x[0] for x in tar if x[1]]
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python3
|
||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
@@ -17,13 +17,16 @@ from copyparty import util
|
|||||||
|
|
||||||
class Cfg(Namespace):
|
class Cfg(Namespace):
|
||||||
def __init__(self, a=None, v=None, c=None):
|
def __init__(self, a=None, v=None, c=None):
|
||||||
ex = {k: False for k in "nw e2d e2ds e2dsa e2t e2ts e2tsr".split()}
|
ex = "nw e2d e2ds e2dsa e2t e2ts e2tsr no_logues no_readme no_acode"
|
||||||
|
ex = {k: False for k in ex.split()}
|
||||||
ex2 = {
|
ex2 = {
|
||||||
"mtp": [],
|
"mtp": [],
|
||||||
"mte": "a",
|
"mte": "a",
|
||||||
"mth": "",
|
"mth": "",
|
||||||
"hist": None,
|
"hist": None,
|
||||||
"no_hash": False,
|
"no_idx": None,
|
||||||
|
"no_hash": None,
|
||||||
|
"js_browser": None,
|
||||||
"css_browser": None,
|
"css_browser": None,
|
||||||
"no_voldump": True,
|
"no_voldump": True,
|
||||||
"re_maxage": 0,
|
"re_maxage": 0,
|
||||||
@@ -195,10 +198,10 @@ class TestVFS(unittest.TestCase):
|
|||||||
self.assertEqual(n.realpath, os.path.join(td, "a"))
|
self.assertEqual(n.realpath, os.path.join(td, "a"))
|
||||||
self.assertAxs(n.axs.uread, ["*"])
|
self.assertAxs(n.axs.uread, ["*"])
|
||||||
self.assertAxs(n.axs.uwrite, [])
|
self.assertAxs(n.axs.uwrite, [])
|
||||||
self.assertEqual(vfs.can_access("/", "*"), [False, False, False, False])
|
self.assertEqual(vfs.can_access("/", "*"), [False, False, False, False, False])
|
||||||
self.assertEqual(vfs.can_access("/", "k"), [True, True, False, False])
|
self.assertEqual(vfs.can_access("/", "k"), [True, True, False, False, False])
|
||||||
self.assertEqual(vfs.can_access("/a", "*"), [True, False, False, False])
|
self.assertEqual(vfs.can_access("/a", "*"), [True, False, False, False, False])
|
||||||
self.assertEqual(vfs.can_access("/a", "k"), [True, False, False, False])
|
self.assertEqual(vfs.can_access("/a", "k"), [True, False, False, False, False])
|
||||||
|
|
||||||
# breadth-first construction
|
# breadth-first construction
|
||||||
vfs = AuthSrv(
|
vfs = AuthSrv(
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import sys
|
|||||||
import time
|
import time
|
||||||
import shutil
|
import shutil
|
||||||
import jinja2
|
import jinja2
|
||||||
|
import threading
|
||||||
import tempfile
|
import tempfile
|
||||||
import platform
|
import platform
|
||||||
import subprocess as sp
|
import subprocess as sp
|
||||||
@@ -28,7 +29,7 @@ if MACOS:
|
|||||||
# 25% faster; until any tests do symlink stuff
|
# 25% faster; until any tests do symlink stuff
|
||||||
|
|
||||||
|
|
||||||
from copyparty.util import Unrecv
|
from copyparty.util import Unrecv, FHC
|
||||||
|
|
||||||
|
|
||||||
def runcmd(argv):
|
def runcmd(argv):
|
||||||
@@ -112,6 +113,7 @@ class VSock(object):
|
|||||||
class VHttpSrv(object):
|
class VHttpSrv(object):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.broker = NullBroker()
|
self.broker = NullBroker()
|
||||||
|
self.prism = None
|
||||||
|
|
||||||
aliases = ["splash", "browser", "browser2", "msg", "md", "mde"]
|
aliases = ["splash", "browser", "browser2", "msg", "md", "mde"]
|
||||||
self.j2 = {x: J2_FILES for x in aliases}
|
self.j2 = {x: J2_FILES for x in aliases}
|
||||||
@@ -132,8 +134,10 @@ class VHttpConn(object):
|
|||||||
self.log_src = "a"
|
self.log_src = "a"
|
||||||
self.lf_url = None
|
self.lf_url = None
|
||||||
self.hsrv = VHttpSrv()
|
self.hsrv = VHttpSrv()
|
||||||
|
self.u2fh = FHC()
|
||||||
|
self.mutex = threading.Lock()
|
||||||
self.nreq = 0
|
self.nreq = 0
|
||||||
self.nbyte = 0
|
self.nbyte = 0
|
||||||
self.ico = None
|
self.ico = None
|
||||||
self.thumbcli = None
|
self.thumbcli = None
|
||||||
self.t0 = time.time()
|
self.t0 = time.time()
|
||||||
|
|||||||
Reference in New Issue
Block a user