Compare commits
15 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c80307e8ff | ||
|
|
c1d77e1041 | ||
|
|
d9e83650dc | ||
|
|
f6d635acd9 | ||
|
|
0dbd8a01ff | ||
|
|
8d755d41e0 | ||
|
|
190473bd32 | ||
|
|
030d1ec254 | ||
|
|
5a2b91a084 | ||
|
|
a50a05e4e7 | ||
|
|
6cb5a87c79 | ||
|
|
b9f89ca552 | ||
|
|
26c9fd5dea | ||
|
|
e81a9b6fe0 | ||
|
|
452450e451 |
@@ -487,6 +487,7 @@ you can set upload rules using volume flags, some examples:
|
|||||||
* if someone uploads to `/foo/bar` the path would be rewritten to `/foo/bar/2021/08/06/23` for example
|
* if someone uploads to `/foo/bar` the path would be rewritten to `/foo/bar/2021/08/06/23` for example
|
||||||
* but the actual value is not verified, just the structure, so the uploader can choose any values which conform to the format string
|
* but the actual value is not verified, just the structure, so the uploader can choose any values which conform to the format string
|
||||||
* just to avoid additional complexity in up2k which is enough of a mess already
|
* just to avoid additional complexity in up2k which is enough of a mess already
|
||||||
|
* `:c,lifetime=300` delete uploaded files when they become 5 minutes old
|
||||||
|
|
||||||
you can also set transaction limits which apply per-IP and per-volume, but these assume `-j 1` (default) otherwise the limits will be off, for example `-j 4` would allow anywhere between 1x and 4x the limits you set depending on which processing node the client gets routed to
|
you can also set transaction limits which apply per-IP and per-volume, but these assume `-j 1` (default) otherwise the limits will be off, for example `-j 4` would allow anywhere between 1x and 4x the limits you set depending on which processing node the client gets routed to
|
||||||
|
|
||||||
|
|||||||
@@ -61,3 +61,8 @@ cd /mnt/nas/music/.hist
|
|||||||
~/src/copyparty/bin/dbtool.py -src up2k.*.v3 up2k.db -rm-mtp-flag -copy key
|
~/src/copyparty/bin/dbtool.py -src up2k.*.v3 up2k.db -rm-mtp-flag -copy key
|
||||||
~/src/copyparty/bin/dbtool.py -src up2k.*.v3 up2k.db -rm-mtp-flag -copy .bpm -vac
|
~/src/copyparty/bin/dbtool.py -src up2k.*.v3 up2k.db -rm-mtp-flag -copy .bpm -vac
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
# [`prisonparty.sh`](prisonparty.sh)
|
||||||
|
* run copyparty in a chroot, preventing any accidental file access
|
||||||
|
* creates bindmounts for /bin, /lib, and so on, see `sysdirs=`
|
||||||
|
|||||||
39
bin/mtag/res/yt-ipr.conf
Normal file
39
bin/mtag/res/yt-ipr.conf
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
# example config file to use copyparty as a youtube manifest collector,
|
||||||
|
# use with copyparty like: python copyparty.py -c yt-ipr.conf
|
||||||
|
#
|
||||||
|
# see docs/example.conf for a better explanation of the syntax, but
|
||||||
|
# newlines are block separators, so adding blank lines inside a volume definition is bad
|
||||||
|
# (use comments as separators instead)
|
||||||
|
|
||||||
|
|
||||||
|
# create user ed, password wark
|
||||||
|
u ed:wark
|
||||||
|
|
||||||
|
|
||||||
|
# create a volume at /ytm which stores files at ./srv/ytm
|
||||||
|
./srv/ytm
|
||||||
|
/ytm
|
||||||
|
# write-only, but read-write for user ed
|
||||||
|
w
|
||||||
|
rw ed
|
||||||
|
# rescan the volume on startup
|
||||||
|
c e2dsa
|
||||||
|
# collect tags from all new files since last scan
|
||||||
|
c e2ts
|
||||||
|
# optionally enable compression to make the files 50% smaller
|
||||||
|
c pk
|
||||||
|
# only allow uploads which are between 16k and 1m large
|
||||||
|
c sz=16k-1m
|
||||||
|
# allow up to 10 uploads over 5 minutes from each ip
|
||||||
|
c maxn=10,300
|
||||||
|
# move uploads into subfolders: YEAR-MONTH / DAY-HOUR / <upload>
|
||||||
|
c rotf=%Y-%m/%d-%H
|
||||||
|
# delete uploads when they are 24 hours old
|
||||||
|
c lifetime=86400
|
||||||
|
# add the parser and tell copyparty what tags it can expect from it
|
||||||
|
c mtp=yt-id,yt-title,yt-author,yt-channel,yt-views,yt-private,yt-manifest,yt-expires=bin/mtag/yt-ipr.py
|
||||||
|
# decide which tags we want to index and in what order
|
||||||
|
c mte=yt-id,yt-title,yt-author,yt-channel,yt-views,yt-private,yt-manifest,yt-expires
|
||||||
|
|
||||||
|
|
||||||
|
# create any other volumes you'd like down here, or merge this with an existing config file
|
||||||
@@ -7,26 +7,42 @@
|
|||||||
// ==/UserScript==
|
// ==/UserScript==
|
||||||
|
|
||||||
function main() {
|
function main() {
|
||||||
|
var server = 'https://127.0.0.1:3923/ytm?pw=wark',
|
||||||
|
interval = 60; // sec
|
||||||
|
|
||||||
var sent = {};
|
var sent = {};
|
||||||
function send(txt) {
|
function send(txt, mf_url, desc) {
|
||||||
if (sent[txt])
|
if (sent[mf_url])
|
||||||
return;
|
return;
|
||||||
|
|
||||||
fetch('https://127.0.0.1:3923/playerdata?_=' + Date.now(), { method: "PUT", body: txt });
|
fetch(server + '&_=' + Date.now(), { method: "PUT", body: txt });
|
||||||
console.log('[yt-ipr] yeet %d bytes', txt.length);
|
console.log('[yt-pdh] yeet %d bytes, %s', txt.length, desc);
|
||||||
sent[txt] = 1;
|
sent[mf_url] = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
function collect() {
|
function collect() {
|
||||||
setTimeout(collect, 60 * 1000);
|
setTimeout(collect, interval * 1000);
|
||||||
var pd = document.querySelector('ytd-watch-flexy');
|
try {
|
||||||
if (pd)
|
var pd = document.querySelector('ytd-watch-flexy');
|
||||||
send(JSON.stringify(pd.playerData));
|
if (!pd)
|
||||||
|
return console.log('[yt-pdh] no video found');
|
||||||
|
|
||||||
|
pd = pd.playerData;
|
||||||
|
var mu = pd.streamingData.dashManifestUrl || pd.streamingData.hlsManifestUrl;
|
||||||
|
if (!mu || !mu.length)
|
||||||
|
return console.log('[yt-pdh] no manifest found');
|
||||||
|
|
||||||
|
var desc = pd.videoDetails.videoId + ', ' + pd.videoDetails.title;
|
||||||
|
send(JSON.stringify(pd), mu, desc);
|
||||||
|
}
|
||||||
|
catch (ex) {
|
||||||
|
console.log("[yt-pdh]", ex);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
setTimeout(collect, 5000);
|
collect();
|
||||||
}
|
}
|
||||||
|
|
||||||
var scr = document.createElement('script');
|
var scr = document.createElement('script');
|
||||||
scr.textContent = '(' + main.toString() + ')();';
|
scr.textContent = '(' + main.toString() + ')();';
|
||||||
(document.head || document.getElementsByTagName('head')[0]).appendChild(scr);
|
(document.head || document.getElementsByTagName('head')[0]).appendChild(scr);
|
||||||
console.log('[yt-ipr] a');
|
console.log('[yt-pdh] a');
|
||||||
|
|||||||
@@ -1,19 +1,26 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
import re
|
import re
|
||||||
|
import os
|
||||||
import sys
|
import sys
|
||||||
import gzip
|
import gzip
|
||||||
import json
|
import json
|
||||||
|
import base64
|
||||||
|
import string
|
||||||
|
import urllib.request
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
"""
|
"""
|
||||||
youtube initial player response
|
youtube initial player response
|
||||||
|
|
||||||
example usage:
|
it's probably best to use this through a config file; see res/yt-ipr.conf
|
||||||
-v srv/playerdata:playerdata:w
|
|
||||||
:c,e2tsr:c,e2dsa
|
but if you want to use plain arguments instead then:
|
||||||
:c,mtp=yt-id,yt-title,yt-author,yt-channel,yt-views,yt-private,yt-expires=bin/mtag/yt-ipr.py
|
-v srv/ytm:ytm:w:rw,ed
|
||||||
:c,mte=yt-id,yt-title,yt-author,yt-channel,yt-views,yt-private,yt-expires
|
:c,e2ts:c,e2dsa
|
||||||
|
:c,sz=16k-1m:c,maxn=10,300:c,rotf=%Y-%m/%d-%H
|
||||||
|
:c,mtp=yt-id,yt-title,yt-author,yt-channel,yt-views,yt-private,yt-manifest,yt-expires=bin/mtag/yt-ipr.py
|
||||||
|
:c,mte=yt-id,yt-title,yt-author,yt-channel,yt-views,yt-private,yt-manifest,yt-expires
|
||||||
|
|
||||||
see res/yt-ipr.user.js for the example userscript to go with this
|
see res/yt-ipr.user.js for the example userscript to go with this
|
||||||
"""
|
"""
|
||||||
@@ -30,19 +37,36 @@ def main():
|
|||||||
txt = "{" + txt.split("{", 1)[1]
|
txt = "{" + txt.split("{", 1)[1]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
obj = json.loads(txt)
|
pd = json.loads(txt)
|
||||||
except json.decoder.JSONDecodeError as ex:
|
except json.decoder.JSONDecodeError as ex:
|
||||||
obj = json.loads(txt[: ex.pos])
|
pd = json.loads(txt[: ex.pos])
|
||||||
|
|
||||||
# print(json.dumps(obj, indent=2))
|
# print(json.dumps(pd, indent=2))
|
||||||
|
|
||||||
vd = obj["videoDetails"]
|
if "videoDetails" in pd:
|
||||||
sd = obj["streamingData"]
|
parse_youtube(pd)
|
||||||
|
else:
|
||||||
|
parse_freg(pd)
|
||||||
|
|
||||||
|
|
||||||
|
def get_expiration(url):
|
||||||
|
et = re.search(r"[?&]expire=([0-9]+)", url).group(1)
|
||||||
|
et = datetime.utcfromtimestamp(int(et))
|
||||||
|
return et.strftime("%Y-%m-%d, %H:%M")
|
||||||
|
|
||||||
|
|
||||||
|
def parse_youtube(pd):
|
||||||
|
vd = pd["videoDetails"]
|
||||||
|
sd = pd["streamingData"]
|
||||||
|
|
||||||
et = sd["adaptiveFormats"][0]["url"]
|
et = sd["adaptiveFormats"][0]["url"]
|
||||||
et = re.search(r"[?&]expire=([0-9]+)", et).group(1)
|
et = get_expiration(et)
|
||||||
et = datetime.utcfromtimestamp(int(et))
|
|
||||||
et = et.strftime("%Y-%m-%d, %H:%M")
|
mf = []
|
||||||
|
if "dashManifestUrl" in sd:
|
||||||
|
mf.append("dash")
|
||||||
|
if "hlsManifestUrl" in sd:
|
||||||
|
mf.append("hls")
|
||||||
|
|
||||||
r = {
|
r = {
|
||||||
"yt-id": vd["videoId"],
|
"yt-id": vd["videoId"],
|
||||||
@@ -52,10 +76,123 @@ def main():
|
|||||||
"yt-views": vd["viewCount"],
|
"yt-views": vd["viewCount"],
|
||||||
"yt-private": vd["isPrivate"],
|
"yt-private": vd["isPrivate"],
|
||||||
# "yt-expires": sd["expiresInSeconds"],
|
# "yt-expires": sd["expiresInSeconds"],
|
||||||
|
"yt-manifest": ",".join(mf),
|
||||||
"yt-expires": et,
|
"yt-expires": et,
|
||||||
}
|
}
|
||||||
print(json.dumps(r))
|
print(json.dumps(r))
|
||||||
|
|
||||||
|
freg_conv(pd)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_freg(pd):
|
||||||
|
md = pd["metadata"]
|
||||||
|
r = {
|
||||||
|
"yt-id": md["id"],
|
||||||
|
"yt-title": md["title"],
|
||||||
|
"yt-author": md["channelName"],
|
||||||
|
"yt-channel": md["channelURL"].strip("/").split("/")[-1],
|
||||||
|
"yt-expires": get_expiration(list(pd["video"].values())[0]),
|
||||||
|
}
|
||||||
|
print(json.dumps(r))
|
||||||
|
|
||||||
|
|
||||||
|
def freg_conv(pd):
|
||||||
|
# based on getURLs.js v1.5 (2021-08-07)
|
||||||
|
# fmt: off
|
||||||
|
priority = {
|
||||||
|
"video": [
|
||||||
|
337, 315, 266, 138, # 2160p60
|
||||||
|
313, 336, # 2160p
|
||||||
|
308, # 1440p60
|
||||||
|
271, 264, # 1440p
|
||||||
|
335, 303, 299, # 1080p60
|
||||||
|
248, 169, 137, # 1080p
|
||||||
|
334, 302, 298, # 720p60
|
||||||
|
247, 136 # 720p
|
||||||
|
],
|
||||||
|
"audio": [
|
||||||
|
251, 141, 171, 140, 250, 249, 139
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
vid_id = pd["videoDetails"]["videoId"]
|
||||||
|
chan_id = pd["videoDetails"]["channelId"]
|
||||||
|
|
||||||
|
try:
|
||||||
|
thumb_url = pd["microformat"]["playerMicroformatRenderer"]["thumbnail"]["thumbnails"][0]["url"]
|
||||||
|
start_ts = pd["microformat"]["playerMicroformatRenderer"]["liveBroadcastDetails"]["startTimestamp"]
|
||||||
|
except:
|
||||||
|
thumb_url = f"https://img.youtube.com/vi/{vid_id}/maxresdefault.jpg"
|
||||||
|
start_ts = ""
|
||||||
|
|
||||||
|
# fmt: on
|
||||||
|
|
||||||
|
metadata = {
|
||||||
|
"title": pd["videoDetails"]["title"],
|
||||||
|
"id": vid_id,
|
||||||
|
"channelName": pd["videoDetails"]["author"],
|
||||||
|
"channelURL": "https://www.youtube.com/channel/" + chan_id,
|
||||||
|
"description": pd["videoDetails"]["shortDescription"],
|
||||||
|
"thumbnailUrl": thumb_url,
|
||||||
|
"startTimestamp": start_ts,
|
||||||
|
}
|
||||||
|
|
||||||
|
if [x for x in vid_id if x not in string.ascii_letters + string.digits + "_-"]:
|
||||||
|
print(f"malicious json", file=sys.stderr)
|
||||||
|
return
|
||||||
|
|
||||||
|
basepath = os.path.dirname(sys.argv[1])
|
||||||
|
|
||||||
|
thumb_fn = f"{basepath}/{vid_id}.jpg"
|
||||||
|
tmp_fn = f"{thumb_fn}.{os.getpid()}"
|
||||||
|
if not os.path.exists(thumb_fn) and (
|
||||||
|
thumb_url.startswith("https://img.youtube.com/vi/")
|
||||||
|
or thumb_url.startswith("https://i.ytimg.com/vi/")
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
with urllib.request.urlopen(thumb_url) as fi:
|
||||||
|
with open(tmp_fn, "wb") as fo:
|
||||||
|
fo.write(fi.read())
|
||||||
|
|
||||||
|
os.rename(tmp_fn, thumb_fn)
|
||||||
|
except:
|
||||||
|
if os.path.exists(tmp_fn):
|
||||||
|
os.unlink(tmp_fn)
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(thumb_fn, "rb") as f:
|
||||||
|
thumb = base64.b64encode(f.read()).decode("ascii")
|
||||||
|
except:
|
||||||
|
thumb = "/9j/4AAQSkZJRgABAQEASABIAAD/2wBDAAMCAgICAgMCAgIDAwMDBAYEBAQEBAgGBgUGCQgKCgkICQkKDA8MCgsOCwkJDRENDg8QEBEQCgwSExIQEw8QEBD/yQALCAABAAEBAREA/8wABgAQEAX/2gAIAQEAAD8A0s8g/9k="
|
||||||
|
|
||||||
|
metadata["thumbnail"] = "data:image/jpeg;base64," + thumb
|
||||||
|
|
||||||
|
ret = {
|
||||||
|
"metadata": metadata,
|
||||||
|
"version": "1.5",
|
||||||
|
"createTime": datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ"),
|
||||||
|
}
|
||||||
|
|
||||||
|
for stream, itags in priority.items():
|
||||||
|
for itag in itags:
|
||||||
|
url = None
|
||||||
|
for afmt in pd["streamingData"]["adaptiveFormats"]:
|
||||||
|
if itag == afmt["itag"]:
|
||||||
|
url = afmt["url"]
|
||||||
|
break
|
||||||
|
|
||||||
|
if url:
|
||||||
|
ret[stream] = {itag: url}
|
||||||
|
break
|
||||||
|
|
||||||
|
fn = f"{basepath}/{vid_id}.urls.json"
|
||||||
|
with open(fn, "w", encoding="utf-8", errors="replace") as f:
|
||||||
|
f.write(json.dumps(ret, indent=4))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
try:
|
||||||
|
main()
|
||||||
|
except:
|
||||||
|
# raise
|
||||||
|
pass
|
||||||
|
|||||||
94
bin/prisonparty.sh
Executable file
94
bin/prisonparty.sh
Executable file
@@ -0,0 +1,94 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# runs copyparty (or any other python script really) in a chroot
|
||||||
|
#
|
||||||
|
# assumption: these directories, and everything within, are owned by root
|
||||||
|
sysdirs=(bin lib lib32 lib64 sbin usr)
|
||||||
|
|
||||||
|
|
||||||
|
# error-handler
|
||||||
|
help() { cat <<'EOF'
|
||||||
|
|
||||||
|
usage:
|
||||||
|
./prisonparty.sh <ROOTDIR> <UID> <GID> [VOLDIR [VOLDIR...]] -- copyparty-sfx.py [...]"
|
||||||
|
|
||||||
|
example:
|
||||||
|
./prisonparty.sh /var/jail 1000 1000 /mnt/nas/music -- copyparty-sfx.py -v /mnt/nas/music::rwmd"
|
||||||
|
|
||||||
|
EOF
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# read arguments
|
||||||
|
trap help EXIT
|
||||||
|
jail="$1"; shift
|
||||||
|
uid="$1"; shift
|
||||||
|
gid="$1"; shift
|
||||||
|
|
||||||
|
vols=()
|
||||||
|
while true; do
|
||||||
|
v="$1"; shift
|
||||||
|
[ "$v" = -- ] && break # end of volumes
|
||||||
|
[ "$#" -eq 0 ] && break # invalid usage
|
||||||
|
vols+=("$v")
|
||||||
|
done
|
||||||
|
cpp="$1"; shift
|
||||||
|
cpp="$(realpath "$cpp")"
|
||||||
|
cppdir="$(dirname "$cpp")"
|
||||||
|
trap - EXIT
|
||||||
|
|
||||||
|
|
||||||
|
# debug/vis
|
||||||
|
echo
|
||||||
|
echo "chroot-dir = $jail"
|
||||||
|
echo "user:group = $uid:$gid"
|
||||||
|
echo " copyparty = $cpp"
|
||||||
|
echo
|
||||||
|
printf '\033[33m%s\033[0m\n' "copyparty can access these folders and all their subdirectories:"
|
||||||
|
for v in "${vols[@]}"; do
|
||||||
|
printf '\033[36m ├─\033[0m %s \033[36m ── added by (You)\033[0m\n' "$v"
|
||||||
|
done
|
||||||
|
printf '\033[36m ├─\033[0m %s \033[36m ── where the copyparty binary is\033[0m\n' "$cppdir"
|
||||||
|
printf '\033[36m ╰─\033[0m %s \033[36m ── the folder you are currently in\033[0m\n' "$PWD"
|
||||||
|
vols+=("$cppdir" "$PWD")
|
||||||
|
echo
|
||||||
|
|
||||||
|
|
||||||
|
# resolve and remove trailing slash
|
||||||
|
jail="$(realpath "$jail")"
|
||||||
|
jail="${jail%/}"
|
||||||
|
|
||||||
|
|
||||||
|
# bind-mount system directories and volumes
|
||||||
|
printf '%s\n' "${sysdirs[@]}" "${vols[@]}" | LC_ALL=C sort |
|
||||||
|
while IFS= read -r v; do
|
||||||
|
[ -e "/$v" ] || {
|
||||||
|
# printf '\033[1;31mfolder does not exist:\033[0m %s\n' "$v"
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
mkdir -p "$jail/$v"
|
||||||
|
mount | grep -qF " on $jail/$v " ||
|
||||||
|
mount --bind /$v "$jail/$v"
|
||||||
|
done
|
||||||
|
|
||||||
|
|
||||||
|
# create a tmp
|
||||||
|
mkdir -p "$jail/tmp"
|
||||||
|
chown -R "$uid:$gid" "$jail/tmp"
|
||||||
|
|
||||||
|
|
||||||
|
# run copyparty
|
||||||
|
/sbin/chroot --userspec=$uid:$gid "$jail" "$(which python3)" "$cpp" "$@" && rv=0 || rv=$?
|
||||||
|
|
||||||
|
|
||||||
|
# cleanup if not in use
|
||||||
|
lsof "$jail" | grep -qF "$jail" &&
|
||||||
|
echo "chroot is in use, will not cleanup" ||
|
||||||
|
{
|
||||||
|
mount | grep -F " on $jail" |
|
||||||
|
awk '{sub(/ type .*/,"");sub(/.* on /,"");print}' |
|
||||||
|
LC_ALL=C sort -r | tr '\n' '\0' | xargs -r0 umount
|
||||||
|
}
|
||||||
|
exit $rv
|
||||||
@@ -25,6 +25,28 @@ ANYWIN = WINDOWS or sys.platform in ["msys"]
|
|||||||
MACOS = platform.system() == "Darwin"
|
MACOS = platform.system() == "Darwin"
|
||||||
|
|
||||||
|
|
||||||
|
def get_unix_home():
|
||||||
|
try:
|
||||||
|
v = os.environ["XDG_CONFIG_HOME"]
|
||||||
|
if not v:
|
||||||
|
raise Exception()
|
||||||
|
ret = os.path.normpath(v)
|
||||||
|
os.listdir(ret)
|
||||||
|
return ret
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
v = os.path.expanduser("~/.config")
|
||||||
|
if v.startswith("~"):
|
||||||
|
raise Exception()
|
||||||
|
ret = os.path.normpath(v)
|
||||||
|
os.listdir(ret)
|
||||||
|
return ret
|
||||||
|
except:
|
||||||
|
return "/tmp"
|
||||||
|
|
||||||
|
|
||||||
class EnvParams(object):
|
class EnvParams(object):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.t0 = time.time()
|
self.t0 = time.time()
|
||||||
@@ -37,10 +59,7 @@ class EnvParams(object):
|
|||||||
elif sys.platform == "darwin":
|
elif sys.platform == "darwin":
|
||||||
self.cfg = os.path.expanduser("~/Library/Preferences/copyparty")
|
self.cfg = os.path.expanduser("~/Library/Preferences/copyparty")
|
||||||
else:
|
else:
|
||||||
self.cfg = os.path.normpath(
|
self.cfg = get_unix_home() + "/copyparty"
|
||||||
os.getenv("XDG_CONFIG_HOME", os.path.expanduser("~/.config"))
|
|
||||||
+ "/copyparty"
|
|
||||||
)
|
|
||||||
|
|
||||||
self.cfg = self.cfg.replace("\\", "/")
|
self.cfg = self.cfg.replace("\\", "/")
|
||||||
try:
|
try:
|
||||||
|
|||||||
@@ -210,9 +210,9 @@ def run_argparse(argv, formatter):
|
|||||||
dedent(
|
dedent(
|
||||||
"""
|
"""
|
||||||
-a takes username:password,
|
-a takes username:password,
|
||||||
-v takes src:dst:perm1:perm2:permN:cflag1:cflag2:cflagN:...
|
-v takes src:dst:perm1:perm2:permN:volflag1:volflag2:volflagN:...
|
||||||
where "perm" is "accesslevels,username1,username2,..."
|
where "perm" is "accesslevels,username1,username2,..."
|
||||||
and "cflag" is config flags to set on this volume
|
and "volflag" is config flags to set on this volume
|
||||||
|
|
||||||
list of accesslevels:
|
list of accesslevels:
|
||||||
"r" (read): list folder contents, download files
|
"r" (read): list folder contents, download files
|
||||||
@@ -220,7 +220,7 @@ def run_argparse(argv, formatter):
|
|||||||
"m" (move): move files and folders; need "w" at destination
|
"m" (move): move files and folders; need "w" at destination
|
||||||
"d" (delete): permanently delete files and folders
|
"d" (delete): permanently delete files and folders
|
||||||
|
|
||||||
too many cflags to list here, see the other sections
|
too many volflags to list here, see the other sections
|
||||||
|
|
||||||
example:\033[35m
|
example:\033[35m
|
||||||
-a ed:hunter2 -v .::r:rw,ed -v ../inc:dump:w:rw,ed:c,nodupe \033[36m
|
-a ed:hunter2 -v .::r:rw,ed -v ../inc:dump:w:rw,ed:c,nodupe \033[36m
|
||||||
@@ -241,11 +241,11 @@ def run_argparse(argv, formatter):
|
|||||||
),
|
),
|
||||||
],
|
],
|
||||||
[
|
[
|
||||||
"cflags",
|
"flags",
|
||||||
"list of cflags",
|
"list of volflags",
|
||||||
dedent(
|
dedent(
|
||||||
"""
|
"""
|
||||||
cflags are appended to volume definitions, for example,
|
volflags are appended to volume definitions, for example,
|
||||||
to create a write-only volume with the \033[33mnodupe\033[0m and \033[32mnosub\033[0m flags:
|
to create a write-only volume with the \033[33mnodupe\033[0m and \033[32mnosub\033[0m flags:
|
||||||
\033[35m-v /mnt/inc:/inc:w\033[33m:c,nodupe\033[32m:c,nosub
|
\033[35m-v /mnt/inc:/inc:w\033[33m:c,nodupe\033[32m:c,nosub
|
||||||
|
|
||||||
@@ -264,13 +264,15 @@ def run_argparse(argv, formatter):
|
|||||||
(moves all uploads into the specified folder structure)
|
(moves all uploads into the specified folder structure)
|
||||||
\033[36mrotn=100,3\033[35m 3 levels of subfolders with 100 entries in each
|
\033[36mrotn=100,3\033[35m 3 levels of subfolders with 100 entries in each
|
||||||
\033[36mrotf=%Y-%m/%d-%H\033[35m date-formatted organizing
|
\033[36mrotf=%Y-%m/%d-%H\033[35m date-formatted organizing
|
||||||
|
\033[36mlifetime=3600\033[35m uploads are deleted after 1 hour
|
||||||
|
|
||||||
\033[0mdatabase, general:
|
\033[0mdatabase, general:
|
||||||
\033[36me2d\033[35m sets -e2d (all -e2* args can be set using ce2* cflags)
|
\033[36me2d\033[35m sets -e2d (all -e2* args can be set using ce2* volflags)
|
||||||
\033[36md2t\033[35m disables metadata collection, overrides -e2t*
|
\033[36md2t\033[35m disables metadata collection, overrides -e2t*
|
||||||
\033[36md2d\033[35m disables all database stuff, overrides -e2*
|
\033[36md2d\033[35m disables all database stuff, overrides -e2*
|
||||||
\033[36mdhash\033[35m disables file hashing on initial scans, also ehash
|
\033[36mdhash\033[35m disables file hashing on initial scans, also ehash
|
||||||
\033[36mhist=/tmp/cdb\033[35m puts thumbnails and indexes at that location
|
\033[36mhist=/tmp/cdb\033[35m puts thumbnails and indexes at that location
|
||||||
|
\033[36mscan=60\033[35m scan for new files every 60sec, same as --re-maxage
|
||||||
|
|
||||||
\033[0mdatabase, audio tags:
|
\033[0mdatabase, audio tags:
|
||||||
"mte", "mth", "mtp", "mtm" all work the same as -mte, -mth, ...
|
"mte", "mth", "mtp", "mtm" all work the same as -mte, -mth, ...
|
||||||
@@ -353,6 +355,7 @@ def run_argparse(argv, formatter):
|
|||||||
ap2.add_argument("-nih", action="store_true", help="no info hostname")
|
ap2.add_argument("-nih", action="store_true", help="no info hostname")
|
||||||
ap2.add_argument("-nid", action="store_true", help="no info disk-usage")
|
ap2.add_argument("-nid", action="store_true", help="no info disk-usage")
|
||||||
ap2.add_argument("--no-zip", action="store_true", help="disable download as zip/tar")
|
ap2.add_argument("--no-zip", action="store_true", help="disable download as zip/tar")
|
||||||
|
ap2.add_argument("--no-lifetime", action="store_true", help="disable automatic deletion of uploads after a certain time (lifetime volflag)")
|
||||||
|
|
||||||
ap2 = ap.add_argument_group('safety options')
|
ap2 = ap.add_argument_group('safety options')
|
||||||
ap2.add_argument("--ls", metavar="U[,V[,F]]", type=u, help="scan all volumes; arguments USER,VOL,FLAGS; example [**,*,ln,p,r]")
|
ap2.add_argument("--ls", metavar="U[,V[,F]]", type=u, help="scan all volumes; arguments USER,VOL,FLAGS; example [**,*,ln,p,r]")
|
||||||
@@ -391,7 +394,7 @@ def run_argparse(argv, formatter):
|
|||||||
ap2.add_argument("--hist", metavar="PATH", type=u, help="where to store volume data (db, thumbs)")
|
ap2.add_argument("--hist", metavar="PATH", type=u, help="where to store volume data (db, thumbs)")
|
||||||
ap2.add_argument("--no-hash", action="store_true", help="disable hashing during e2ds folder scans")
|
ap2.add_argument("--no-hash", action="store_true", help="disable hashing during e2ds folder scans")
|
||||||
ap2.add_argument("--re-int", metavar="SEC", type=int, default=30, help="disk rescan check interval")
|
ap2.add_argument("--re-int", metavar="SEC", type=int, default=30, help="disk rescan check interval")
|
||||||
ap2.add_argument("--re-maxage", metavar="SEC", type=int, default=0, help="disk rescan volume interval (0=off)")
|
ap2.add_argument("--re-maxage", metavar="SEC", type=int, default=0, help="disk rescan volume interval, 0=off, can be set per-volume with the 'scan' volflag")
|
||||||
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline")
|
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline")
|
||||||
|
|
||||||
ap2 = ap.add_argument_group('metadata db options')
|
ap2 = ap.add_argument_group('metadata db options')
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
|
|
||||||
VERSION = (0, 13, 0)
|
VERSION = (0, 13, 1)
|
||||||
CODENAME = "future-proof"
|
CODENAME = "future-proof"
|
||||||
BUILD_DT = (2021, 8, 8)
|
BUILD_DT = (2021, 8, 9)
|
||||||
|
|
||||||
S_VERSION = ".".join(map(str, VERSION))
|
S_VERSION = ".".join(map(str, VERSION))
|
||||||
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
||||||
|
|||||||
@@ -25,6 +25,9 @@ from .util import (
|
|||||||
from .bos import bos
|
from .bos import bos
|
||||||
|
|
||||||
|
|
||||||
|
LEELOO_DALLAS = "leeloo_dallas"
|
||||||
|
|
||||||
|
|
||||||
class AXS(object):
|
class AXS(object):
|
||||||
def __init__(self, uread=None, uwrite=None, umove=None, udel=None):
|
def __init__(self, uread=None, uwrite=None, umove=None, udel=None):
|
||||||
self.uread = {} if uread is None else {k: 1 for k in uread}
|
self.uread = {} if uread is None else {k: 1 for k in uread}
|
||||||
@@ -327,7 +330,7 @@ class VFS(object):
|
|||||||
[will_move, c.umove, "move"],
|
[will_move, c.umove, "move"],
|
||||||
[will_del, c.udel, "delete"],
|
[will_del, c.udel, "delete"],
|
||||||
]:
|
]:
|
||||||
if req and (uname not in d and "*" not in d):
|
if req and (uname not in d and "*" not in d) and uname != LEELOO_DALLAS:
|
||||||
m = "you don't have {}-access for this location"
|
m = "you don't have {}-access for this location"
|
||||||
raise Pebkac(403, m.format(msg))
|
raise Pebkac(403, m.format(msg))
|
||||||
|
|
||||||
@@ -554,6 +557,9 @@ class AuthSrv(object):
|
|||||||
|
|
||||||
def _read_vol_str(self, lvl, uname, axs, flags):
|
def _read_vol_str(self, lvl, uname, axs, flags):
|
||||||
# type: (str, str, AXS, any) -> None
|
# type: (str, str, AXS, any) -> None
|
||||||
|
if lvl.strip("crwmd"):
|
||||||
|
raise Exception("invalid volume flag: {},{}".format(lvl, uname))
|
||||||
|
|
||||||
if lvl == "c":
|
if lvl == "c":
|
||||||
cval = True
|
cval = True
|
||||||
if "=" in uname:
|
if "=" in uname:
|
||||||
@@ -709,6 +715,9 @@ class AuthSrv(object):
|
|||||||
)
|
)
|
||||||
raise Exception("invalid config")
|
raise Exception("invalid config")
|
||||||
|
|
||||||
|
if LEELOO_DALLAS in all_users:
|
||||||
|
raise Exception("sorry, reserved username: " + LEELOO_DALLAS)
|
||||||
|
|
||||||
promote = []
|
promote = []
|
||||||
demote = []
|
demote = []
|
||||||
for vol in vfs.all_vols.values():
|
for vol in vfs.all_vols.values():
|
||||||
@@ -807,6 +816,11 @@ class AuthSrv(object):
|
|||||||
if "pk" in vol.flags and "gz" not in vol.flags and "xz" not in vol.flags:
|
if "pk" in vol.flags and "gz" not in vol.flags and "xz" not in vol.flags:
|
||||||
vol.flags["gz"] = False # def.pk
|
vol.flags["gz"] = False # def.pk
|
||||||
|
|
||||||
|
if "scan" in vol.flags:
|
||||||
|
vol.flags["scan"] = int(vol.flags["scan"])
|
||||||
|
elif self.args.re_maxage:
|
||||||
|
vol.flags["scan"] = self.args.re_maxage
|
||||||
|
|
||||||
all_mte = {}
|
all_mte = {}
|
||||||
errors = False
|
errors = False
|
||||||
for vol in vfs.all_vols.values():
|
for vol in vfs.all_vols.values():
|
||||||
|
|||||||
@@ -112,7 +112,7 @@ class HttpCli(object):
|
|||||||
self.http_ver = "HTTP/1.1"
|
self.http_ver = "HTTP/1.1"
|
||||||
# self.log("pebkac at httpcli.run #1: " + repr(ex))
|
# self.log("pebkac at httpcli.run #1: " + repr(ex))
|
||||||
self.keepalive = False
|
self.keepalive = False
|
||||||
self.loud_reply(unicode(ex), status=ex.code)
|
self.loud_reply(unicode(ex), status=ex.code, volsan=True)
|
||||||
return self.keepalive
|
return self.keepalive
|
||||||
|
|
||||||
# time.sleep(0.4)
|
# time.sleep(0.4)
|
||||||
@@ -224,19 +224,24 @@ class HttpCli(object):
|
|||||||
else:
|
else:
|
||||||
raise Pebkac(400, 'invalid HTTP mode "{0}"'.format(self.mode))
|
raise Pebkac(400, 'invalid HTTP mode "{0}"'.format(self.mode))
|
||||||
|
|
||||||
except Pebkac as ex:
|
except Exception as ex:
|
||||||
|
pex = ex
|
||||||
|
if not hasattr(ex, "code"):
|
||||||
|
pex = Pebkac(500)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# self.log("pebkac at httpcli.run #2: " + repr(ex))
|
|
||||||
post = self.mode in ["POST", "PUT"] or "content-length" in self.headers
|
post = self.mode in ["POST", "PUT"] or "content-length" in self.headers
|
||||||
if not self._check_nonfatal(ex, post):
|
if not self._check_nonfatal(pex, post):
|
||||||
self.keepalive = False
|
self.keepalive = False
|
||||||
|
|
||||||
self.log("{}\033[0m, {}".format(str(ex), self.vpath), 3)
|
msg = str(ex) if pex == ex else min_ex()
|
||||||
|
self.log("{}\033[0m, {}".format(msg, self.vpath), 3)
|
||||||
|
|
||||||
msg = "<pre>{}\r\nURL: {}\r\n".format(str(ex), self.vpath)
|
msg = "<pre>{}\r\nURL: {}\r\n".format(str(ex), self.vpath)
|
||||||
if self.hint:
|
if self.hint:
|
||||||
msg += "hint: {}\r\n".format(self.hint)
|
msg += "hint: {}\r\n".format(self.hint)
|
||||||
|
|
||||||
self.reply(msg.encode("utf-8", "replace"), status=ex.code)
|
self.reply(msg.encode("utf-8", "replace"), status=pex.code, volsan=True)
|
||||||
return self.keepalive
|
return self.keepalive
|
||||||
except Pebkac:
|
except Pebkac:
|
||||||
return False
|
return False
|
||||||
@@ -269,8 +274,12 @@ class HttpCli(object):
|
|||||||
except:
|
except:
|
||||||
raise Pebkac(400, "client d/c while replying headers")
|
raise Pebkac(400, "client d/c while replying headers")
|
||||||
|
|
||||||
def reply(self, body, status=200, mime=None, headers=None):
|
def reply(self, body, status=200, mime=None, headers=None, volsan=False):
|
||||||
# TODO something to reply with user-supplied values safely
|
# TODO something to reply with user-supplied values safely
|
||||||
|
|
||||||
|
if volsan:
|
||||||
|
body = vol_san(self.asrv.vfs.all_vols.values(), body)
|
||||||
|
|
||||||
self.send_headers(len(body), status, mime, headers)
|
self.send_headers(len(body), status, mime, headers)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -1029,7 +1038,7 @@ class HttpCli(object):
|
|||||||
raise
|
raise
|
||||||
|
|
||||||
except Pebkac as ex:
|
except Pebkac as ex:
|
||||||
errmsg = unicode(ex)
|
errmsg = volsan(self.asrv.vfs.all_vols.values(), unicode(ex))
|
||||||
|
|
||||||
td = max(0.1, time.time() - t0)
|
td = max(0.1, time.time() - t0)
|
||||||
sz_total = sum(x[0] for x in files)
|
sz_total = sum(x[0] for x in files)
|
||||||
|
|||||||
@@ -36,7 +36,7 @@ from .util import (
|
|||||||
min_ex,
|
min_ex,
|
||||||
)
|
)
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .authsrv import AuthSrv
|
from .authsrv import AuthSrv, LEELOO_DALLAS
|
||||||
from .mtag import MTag, MParser
|
from .mtag import MTag, MParser
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -176,36 +176,71 @@ class Up2k(object):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
def _sched_rescan(self):
|
def _sched_rescan(self):
|
||||||
maxage = self.args.re_maxage
|
|
||||||
volage = {}
|
volage = {}
|
||||||
while True:
|
while True:
|
||||||
time.sleep(self.args.re_int)
|
time.sleep(self.args.re_int)
|
||||||
now = time.time()
|
now = time.time()
|
||||||
vpaths = list(sorted(self.asrv.vfs.all_vols.keys()))
|
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
if maxage:
|
for vp, vol in sorted(self.asrv.vfs.all_vols.items()):
|
||||||
for vp in vpaths:
|
maxage = vol.flags.get("scan")
|
||||||
if vp not in volage:
|
if not maxage:
|
||||||
volage[vp] = now
|
continue
|
||||||
|
|
||||||
if now - volage[vp] >= maxage:
|
if vp not in volage:
|
||||||
self.need_rescan[vp] = 1
|
volage[vp] = now
|
||||||
|
|
||||||
if not self.need_rescan:
|
if now - volage[vp] >= maxage:
|
||||||
continue
|
self.need_rescan[vp] = 1
|
||||||
|
|
||||||
vols = list(sorted(self.need_rescan.keys()))
|
vols = list(sorted(self.need_rescan.keys()))
|
||||||
self.need_rescan = {}
|
self.need_rescan = {}
|
||||||
|
|
||||||
err = self.rescan(self.asrv.vfs.all_vols, vols)
|
if vols:
|
||||||
if err:
|
err = self.rescan(self.asrv.vfs.all_vols, vols)
|
||||||
for v in vols:
|
if err:
|
||||||
self.need_rescan[v] = True
|
for v in vols:
|
||||||
|
self.need_rescan[v] = True
|
||||||
|
|
||||||
|
continue
|
||||||
|
|
||||||
|
for v in vols:
|
||||||
|
volage[v] = now
|
||||||
|
|
||||||
|
if self.args.no_lifetime:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
for v in vols:
|
for vp, vol in sorted(self.asrv.vfs.all_vols.items()):
|
||||||
volage[v] = now
|
lifetime = vol.flags.get("lifetime")
|
||||||
|
if not lifetime:
|
||||||
|
continue
|
||||||
|
|
||||||
|
cur = self.cur.get(vol.realpath)
|
||||||
|
if not cur:
|
||||||
|
continue
|
||||||
|
|
||||||
|
nrm = 0
|
||||||
|
deadline = time.time() - int(lifetime)
|
||||||
|
q = "select rd, fn from up where at > 0 and at < ? limit 100"
|
||||||
|
while True:
|
||||||
|
with self.mutex:
|
||||||
|
hits = cur.execute(q, (deadline,)).fetchall()
|
||||||
|
|
||||||
|
if not hits:
|
||||||
|
break
|
||||||
|
|
||||||
|
for rd, fn in hits:
|
||||||
|
if rd.startswith("//") or fn.startswith("//"):
|
||||||
|
rd, fn = s3dec(rd, fn)
|
||||||
|
|
||||||
|
fvp = "{}/{}".format(rd, fn).strip("/")
|
||||||
|
if vp:
|
||||||
|
fvp = "{}/{}".format(vp, fvp)
|
||||||
|
|
||||||
|
self._handle_rm(LEELOO_DALLAS, None, fvp)
|
||||||
|
nrm += 1
|
||||||
|
|
||||||
|
if nrm:
|
||||||
|
self.log("{} files graduated in {}".format(nrm, vp))
|
||||||
|
|
||||||
def _vis_job_progress(self, job):
|
def _vis_job_progress(self, job):
|
||||||
perc = 100 - (len(job["need"]) * 100.0 / len(job["hash"]))
|
perc = 100 - (len(job["need"]) * 100.0 / len(job["hash"]))
|
||||||
|
|||||||
@@ -344,6 +344,15 @@ def log_thrs(log, ival, name):
|
|||||||
log(name, "\033[0m \033[33m".join(tv), 3)
|
log(name, "\033[0m \033[33m".join(tv), 3)
|
||||||
|
|
||||||
|
|
||||||
|
def vol_san(vols, txt):
|
||||||
|
for vol in vols:
|
||||||
|
txt = txt.replace(
|
||||||
|
vol.realpath.encode("utf-8"), vol.vpath.encode("utf-8")
|
||||||
|
)
|
||||||
|
|
||||||
|
return txt
|
||||||
|
|
||||||
|
|
||||||
def min_ex():
|
def min_ex():
|
||||||
et, ev, tb = sys.exc_info()
|
et, ev, tb = sys.exc_info()
|
||||||
tb = traceback.extract_tb(tb)
|
tb = traceback.extract_tb(tb)
|
||||||
|
|||||||
@@ -923,7 +923,9 @@ function up2k_init(subtle) {
|
|||||||
}
|
}
|
||||||
return handshakes_permitted() && 0 ==
|
return handshakes_permitted() && 0 ==
|
||||||
st.todo.handshake.length +
|
st.todo.handshake.length +
|
||||||
st.busy.handshake.length;
|
st.busy.handshake.length +
|
||||||
|
st.todo.upload.length +
|
||||||
|
st.busy.upload.length;
|
||||||
}
|
}
|
||||||
|
|
||||||
var tasker = (function () {
|
var tasker = (function () {
|
||||||
|
|||||||
@@ -239,7 +239,8 @@ awk '/^LOREM_IPSUM_WORDS/{o=1;print "LOREM_IPSUM_WORDS = u\"a\"";next} !o; /"""/
|
|||||||
tmv "$f"
|
tmv "$f"
|
||||||
|
|
||||||
# up2k goes from 28k to 22k laff
|
# up2k goes from 28k to 22k laff
|
||||||
echo entabbening
|
awk 'BEGIN{gensub(//,"",1)}' </dev/null &&
|
||||||
|
echo entabbening &&
|
||||||
find | grep -E '\.css$' | while IFS= read -r f; do
|
find | grep -E '\.css$' | while IFS= read -r f; do
|
||||||
awk '{
|
awk '{
|
||||||
sub(/^[ \t]+/,"");
|
sub(/^[ \t]+/,"");
|
||||||
@@ -253,6 +254,7 @@ find | grep -E '\.css$' | while IFS= read -r f; do
|
|||||||
' <$f | sed 's/;\}$/}/' >t
|
' <$f | sed 's/;\}$/}/' >t
|
||||||
tmv "$f"
|
tmv "$f"
|
||||||
done
|
done
|
||||||
|
unexpand -h 2>/dev/null &&
|
||||||
find | grep -E '\.(js|html)$' | while IFS= read -r f; do
|
find | grep -E '\.(js|html)$' | while IFS= read -r f; do
|
||||||
unexpand -t 4 --first-only <"$f" >t
|
unexpand -t 4 --first-only <"$f" >t
|
||||||
tmv "$f"
|
tmv "$f"
|
||||||
|
|||||||
@@ -364,7 +364,7 @@ def confirm(rv):
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
sys.exit(rv)
|
sys.exit(rv or 1)
|
||||||
|
|
||||||
|
|
||||||
def run(tmp, j2):
|
def run(tmp, j2):
|
||||||
|
|||||||
@@ -39,6 +39,7 @@ class Cfg(Namespace):
|
|||||||
no_scandir=False,
|
no_scandir=False,
|
||||||
no_sendfile=True,
|
no_sendfile=True,
|
||||||
no_rescan=True,
|
no_rescan=True,
|
||||||
|
re_maxage=0,
|
||||||
ihead=False,
|
ihead=False,
|
||||||
nih=True,
|
nih=True,
|
||||||
mtp=[],
|
mtp=[],
|
||||||
|
|||||||
@@ -26,6 +26,7 @@ class Cfg(Namespace):
|
|||||||
"no_hash": False,
|
"no_hash": False,
|
||||||
"css_browser": None,
|
"css_browser": None,
|
||||||
"no_voldump": True,
|
"no_voldump": True,
|
||||||
|
"re_maxage": 0,
|
||||||
"rproxy": 0,
|
"rproxy": 0,
|
||||||
}
|
}
|
||||||
ex.update(ex2)
|
ex.update(ex2)
|
||||||
|
|||||||
Reference in New Issue
Block a user