override domain for uploads (closes #255);

`up-site` will override the scheme and domain (https://example.com/)
in the links to newly uploaded files, making it possible to upload a
file from a LAN IP while obtaining an external URL in return
This commit is contained in:
ed
2026-01-18 00:30:46 +00:00
parent 41d3bae929
commit d925553810
7 changed files with 42 additions and 26 deletions

View File

@@ -1229,6 +1229,7 @@ def add_upload(ap):
ap2 = ap.add_argument_group("upload options") ap2 = ap.add_argument_group("upload options")
ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads, hiding them from clients unless \033[33m-ed\033[0m") ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads, hiding them from clients unless \033[33m-ed\033[0m")
ap2.add_argument("--plain-ip", action="store_true", help="when avoiding filename collisions by appending the uploader's ip to the filename: append the plaintext ip instead of salting and hashing the ip") ap2.add_argument("--plain-ip", action="store_true", help="when avoiding filename collisions by appending the uploader's ip to the filename: append the plaintext ip instead of salting and hashing the ip")
ap2.add_argument("--up-site", metavar="URL", type=u, default="--site", help="public URL to assume when creating links to uploaded files; example: [\033[32mhttps://example.com/\033[0m]")
ap2.add_argument("--put-name", metavar="TXT", type=u, default="put-{now.6f}-{cip}.bin", help="filename for nameless uploads (when uploader doesn't provide a name); default is [\033[32mput-UNIXTIME-IP.bin\033[0m] (the \033[32m.6f\033[0m means six decimal places) (volflag=put_name)") ap2.add_argument("--put-name", metavar="TXT", type=u, default="put-{now.6f}-{cip}.bin", help="filename for nameless uploads (when uploader doesn't provide a name); default is [\033[32mput-UNIXTIME-IP.bin\033[0m] (the \033[32m.6f\033[0m means six decimal places) (volflag=put_name)")
ap2.add_argument("--put-ck", metavar="ALG", type=u, default="sha512", help="default checksum-hasher for PUT/WebDAV uploads: no / md5 / sha1 / sha256 / sha512 / b2 / blake2 / b2s / blake2s (volflag=put_ck)") ap2.add_argument("--put-ck", metavar="ALG", type=u, default="sha512", help="default checksum-hasher for PUT/WebDAV uploads: no / md5 / sha1 / sha256 / sha512 / b2 / blake2 / b2s / blake2s (volflag=put_ck)")
ap2.add_argument("--bup-ck", metavar="ALG", type=u, default="sha512", help="default checksum-hasher for bup/basic-uploader: no / md5 / sha1 / sha256 / sha512 / b2 / blake2 / b2s / blake2s (volflag=bup_ck)") ap2.add_argument("--bup-ck", metavar="ALG", type=u, default="sha512", help="default checksum-hasher for bup/basic-uploader: no / md5 / sha1 / sha256 / sha512 / b2 / blake2 / b2s / blake2s (volflag=bup_ck)")

View File

@@ -2684,11 +2684,20 @@ class HttpCli(object):
vpath = "/".join([x for x in [vfs.vpath, rem, fn] if x]) vpath = "/".join([x for x in [vfs.vpath, rem, fn] if x])
vpath = quotep(vpath) vpath = quotep(vpath)
url = "{}://{}/{}".format( if self.args.up_site:
"https" if self.is_https else "http", url = "%s%s%s" % (
self.host, self.args.up_site,
self.args.RS + vpath + vsuf, vpath,
) vsuf,
)
else:
url = "%s://%s/%s%s%s" % (
"https" if self.is_https else "http",
self.host,
self.args.RS,
vpath,
vsuf,
)
return post_sz, halg, sha_hex, sha_b64, remains, path, url return post_sz, halg, sha_hex, sha_b64, remains, path, url
@@ -2962,7 +2971,7 @@ class HttpCli(object):
raise Pebkac(500, t % zt) raise Pebkac(500, t % zt)
ret["purl"] = vp_req + ret["purl"][len(vp_vfs) :] ret["purl"] = vp_req + ret["purl"][len(vp_vfs) :]
if self.is_vproxied: if self.is_vproxied and not self.args.up_site:
if "purl" in ret: if "purl" in ret:
ret["purl"] = self.args.SR + ret["purl"] ret["purl"] = self.args.SR + ret["purl"]
@@ -3840,9 +3849,9 @@ class HttpCli(object):
errmsg = "ERROR: " + errmsg errmsg = "ERROR: " + errmsg
if halg: if halg:
file_fmt = '{0}: {1} // {2} // {3} bytes // <a href="/{4}">{5}</a> {6}\n' file_fmt = '{0}: {1} // {2} // {3} bytes // <a href="{4}">{5}</a> {6}\n'
else: else:
file_fmt = '{3} bytes // <a href="/{4}">{5}</a> {6}\n' file_fmt = '{3} bytes // <a href="{4}">{5}</a> {6}\n'
for sz, sha_hex, sha_b64, ofn, lfn, ap in files: for sz, sha_hex, sha_b64, ofn, lfn, ap in files:
vsuf = "" vsuf = ""
@@ -3860,25 +3869,31 @@ class HttpCli(object):
if "media" in self.uparam or "medialinks" in vfs.flags: if "media" in self.uparam or "medialinks" in vfs.flags:
vsuf += "&v" if vsuf else "?v" vsuf += "&v" if vsuf else "?v"
vpath = "{}/{}".format(upload_vpath, lfn).strip("/") vpath = vjoin(upload_vpath, lfn)
rel_url = quotep(self.args.RS + vpath) + vsuf if self.args.up_site:
ah_url = j_url = self.args.up_site + quotep(vpath) + vsuf
rel_url = "/" + j_url.split("//", 1)[-1].split("/", 1)[-1]
else:
ah_url = rel_url = "/%s%s%s" % (self.args.RS, quotep(vpath), vsuf)
j_url = "%s://%s%s" % (
"https" if self.is_https else "http",
self.host,
rel_url,
)
msg += file_fmt.format( msg += file_fmt.format(
halg, halg,
sha_hex[:56], sha_hex[:56],
sha_b64, sha_b64,
sz, sz,
rel_url, ah_url,
html_escape(ofn, crlf=True), html_escape(ofn, crlf=True),
vsuf, vsuf,
) )
# truncated SHA-512 prevents length extension attacks; # truncated SHA-512 prevents length extension attacks;
# using SHA-512/224, optionally SHA-512/256 = :64 # using SHA-512/224, optionally SHA-512/256 = :64
jpart = { jpart = {
"url": "{}://{}/{}".format( "url": j_url,
"https" if self.is_https else "http",
self.host,
rel_url,
),
"sz": sz, "sz": sz,
"fn": lfn, "fn": lfn,
"fn_orig": ofn, "fn_orig": ofn,

View File

@@ -315,7 +315,7 @@ class SvcHub(object):
args.doctitle = args.doctitle.replace("--name", args.vname) args.doctitle = args.doctitle.replace("--name", args.vname)
args.bname = args.bname.replace("--name", args.vname) or args.vname args.bname = args.bname.replace("--name", args.vname) or args.vname
for zs in ("shr_site",): for zs in "shr_site up_site".split():
if getattr(args, zs) == "--site": if getattr(args, zs) == "--site":
setattr(args, zs, args.site) setattr(args, zs, args.site)

View File

@@ -6525,7 +6525,7 @@ var search_ui = (function () {
if (ext.length > 8) if (ext.length > 8)
ext = '%'; ext = '%';
var links = linksplit(r.rp + '', id).join('<span>/</span>'), var links = linksplit(r.rp + '', null, id).join('<span>/</span>'),
nodes = ['<tr><td>-</td><td><div>' + links + nodes = ['<tr><td>-</td><td><div>' + links +
'</div></td><td sortv="' + sz + '">' + hsz]; '</div></td><td sortv="' + sz + '">' + hsz];

View File

@@ -1535,7 +1535,7 @@ function up2k_init(subtle) {
pvis.addfile([ pvis.addfile([
uc.fsearch ? esc(entry.name) : linksplit( uc.fsearch ? esc(entry.name) : linksplit(
entry.purl + uricom_enc(entry.name)).join(' / '), entry.purl + uricom_enc(entry.name), window.up_site).join(' / '),
'📐 ' + L.u_hashing, '📐 ' + L.u_hashing,
'' ''
], entry.size, draw_each); ], entry.size, draw_each);
@@ -1575,8 +1575,8 @@ function up2k_init(subtle) {
more_one_file(); more_one_file();
function linklist() { function linklist() {
var ret = [], var ret = [],
base = location.origin.replace(/\/$/, ''); base = (window.up_site || location.origin).replace(/\/$/, '');
for (var a = 0; a < st.files.length; a++) { for (var a = 0; a < st.files.length; a++) {
var t = st.files[a], var t = st.files[a],
@@ -2562,7 +2562,7 @@ function up2k_init(subtle) {
cdiff = (Math.abs(diff) <= 2) ? '3c0' : 'f0b', cdiff = (Math.abs(diff) <= 2) ? '3c0' : 'f0b',
sdiff = '<span style="color:#' + cdiff + '">diff ' + diff; sdiff = '<span style="color:#' + cdiff + '">diff ' + diff;
msg.push(linksplit(hit.rp).join(' / ') + '<br /><small>' + tr + ' (srv), ' + tu + ' (You), ' + sdiff + '</small></span>'); msg.push(linksplit(hit.rp, window.up_site).join(' / ') + '<br /><small>' + tr + ' (srv), ' + tu + ' (You), ' + sdiff + '</small></span>');
} }
msg = msg.join('<br />\n'); msg = msg.join('<br />\n');
} }
@@ -2596,7 +2596,7 @@ function up2k_init(subtle) {
url += '?k=' + fk; url += '?k=' + fk;
} }
pvis.seth(t.n, 0, linksplit(url).join(' / ')); pvis.seth(t.n, 0, linksplit(url, window.up_site).join(' / '));
} }
var chunksize = get_chunksize(t.size), var chunksize = get_chunksize(t.size),

View File

@@ -766,9 +766,9 @@ function assert_vp(path) {
} }
function linksplit(rp, id) { function linksplit(rp, base, id) {
var ret = [], var ret = [],
apath = '/', apath = base || '/',
q = null; q = null;
if (rp && rp.indexOf('?') + 1) { if (rp && rp.indexOf('?') + 1) {

View File

@@ -164,7 +164,7 @@ class Cfg(Namespace):
ex = "ctl_re db_act forget_ip idp_cookie idp_store k304 loris no304 nosubtle qr_pin qr_wait re_maxage rproxy rsp_jtr rsp_slp s_wr_slp snap_wri theme themes turbo u2ow zipmaxn zipmaxs" ex = "ctl_re db_act forget_ip idp_cookie idp_store k304 loris no304 nosubtle qr_pin qr_wait re_maxage rproxy rsp_jtr rsp_slp s_wr_slp snap_wri theme themes turbo u2ow zipmaxn zipmaxs"
ka.update(**{k: 0 for k in ex.split()}) ka.update(**{k: 0 for k in ex.split()})
ex = "ah_alg bname chdir chmod_f chpw_db db_xattr doctitle df epilogues exit favico ipa ipar html_head html_head_d html_head_s idp_login idp_logout lg_sba lg_sbf log_date log_fk md_sba md_sbf name og_desc og_site og_th og_title og_title_a og_title_v og_title_i opds_exts preadmes prologues readmes shr tcolor textfiles txt_eol ufavico ufavico_h unlist vname xff_src zipmaxt R RS SR" ex = "ah_alg bname chdir chmod_f chpw_db db_xattr doctitle df epilogues exit favico ipa ipar html_head html_head_d html_head_s idp_login idp_logout lg_sba lg_sbf log_date log_fk md_sba md_sbf name og_desc og_site og_th og_title og_title_a og_title_v og_title_i opds_exts preadmes prologues readmes shr shr_site site tcolor textfiles txt_eol ufavico ufavico_h unlist up_site vname xff_src zipmaxt R RS SR"
ka.update(**{k: "" for k in ex.split()}) ka.update(**{k: "" for k in ex.split()})
ex = "apnd_who ban_403 ban_404 ban_422 ban_pw ban_pwc ban_url dont_ban cachectl http_vary rss_fmt_d rss_fmt_t spinner" ex = "apnd_who ban_403 ban_404 ban_422 ban_pw ban_pwc ban_url dont_ban cachectl http_vary rss_fmt_d rss_fmt_t spinner"