mirror of
https://github.com/notwa/rc
synced 2024-11-05 07:19:02 -08:00
417 lines
15 KiB
Bash
Executable file
417 lines
15 KiB
Bash
Executable file
#!/usr/bin/env sh
|
|
# compat: +ash +bash +dash +zsh
|
|
|
|
__notice_warn() {
|
|
printf >&2 'notice: %s\n' "$*"
|
|
}
|
|
|
|
__notice_curl() {
|
|
curl -q --no-progress-meter 127.255.255.255 >/dev/null 2>&1
|
|
if [ $? = 2 ]
|
|
then __notice_curl() { curl -fsS "$@"; }
|
|
else __notice_curl() { curl -f --no-progress-meter "$@"; }
|
|
fi
|
|
__notice_curl "$@"
|
|
}
|
|
|
|
__notice_commaize() { # fun with POSIX shell
|
|
while [ -n "$1" ]; do
|
|
set -- "${1%???}" "${1#"${1%???}"},$2" "$1"
|
|
if [ "$1" = "$3" ]; then
|
|
set -- "" "$1$2"
|
|
break
|
|
fi
|
|
done
|
|
REPLY="${2%,}"
|
|
}
|
|
|
|
__notice_escape() {
|
|
# doesn't work in posh. it doesn't like "${1%%[$2]*}", bug?
|
|
# warning: mksh is O(n^2) with string length!
|
|
# note: bash is horribly slow without `export LC_ALL=C`. zsh is fastest.
|
|
set -- "" "" "$1" "$2"
|
|
while :; do
|
|
set -- "$1" "${3%%[$4]*}" "$3" "$4"
|
|
[ "$2" != "$3" ] || break
|
|
set -- "$1$2\\" "${3#"$2"}" "$3" "$4"
|
|
set -- "$1${2%"${2#?}"}" "$2" "${2#?}" "$4"
|
|
done
|
|
REPLY="$1$3"
|
|
}
|
|
|
|
__notice_urlencode() {
|
|
while test "$1"; do
|
|
case "$1" in
|
|
([a-zA-Z0-9~_.-]*) printf '%.1s' "$1";; # RFC 3986
|
|
(*) printf '%%%02X' "'$1";; # bash complains but works
|
|
esac
|
|
set -- "${1#?}"
|
|
done
|
|
} 2>&- # send bash's complaints to the abyss
|
|
|
|
__notice_urlencode2() {
|
|
while test "$1"; do
|
|
case "$1" in
|
|
([:/a-zA-Z0-9~_.-]*) printf '%.1s' "$1";; # : and / added here
|
|
(*) printf '%%%02X' "'$1";; # bash complains but works
|
|
esac
|
|
set -- "${1#?}"
|
|
done
|
|
} 2>&- # send bash's complaints to the abyss
|
|
|
|
__notice_generate_id() {
|
|
: "${1:?missing seed}"
|
|
unset REPLY
|
|
|
|
case "$1" in
|
|
# when "$(date -u +%s)" is the argument, this will last until 2096.
|
|
(*[!0-9]*) false;; (???????????*) false;; ([4-9]?????????) false;;
|
|
esac || return
|
|
|
|
# use REPLY as a temporary variable, since we'll overwrite it later anyway.
|
|
REPLY="$(( (2250438373 * $1 + 1) & 4294967295 ))" # shuffle IDs
|
|
# all this checking for negatives is due to mksh which overflows an i32.
|
|
REPLY="$(( (REPLY < 0 ? REPLY + 216313691 : REPLY) % 815730721 ))"
|
|
REPLY="$(( REPLY < 0 ? REPLY + 815730721 : REPLY ))"
|
|
|
|
set -- "$REPLY"
|
|
REPLY=
|
|
while [ "${#REPLY}" -lt 8 ]; do
|
|
case "$(($1%13))" in
|
|
(0) REPLY="${REPLY}a";; (1) REPLY="${REPLY}c";; (2) REPLY="${REPLY}e";;
|
|
(3) REPLY="${REPLY}m";; (4) REPLY="${REPLY}n";; (5) REPLY="${REPLY}o";;
|
|
(6) REPLY="${REPLY}r";; (7) REPLY="${REPLY}s";; (8) REPLY="${REPLY}u";;
|
|
(9) REPLY="${REPLY}v";; (10) REPLY="${REPLY}w";; (11) REPLY="${REPLY}x";;
|
|
(12) REPLY="${REPLY}z";;
|
|
esac
|
|
set -- "$(($1/13))"
|
|
done
|
|
}
|
|
|
|
__notice_log() {
|
|
printf '## [%s](%s)\\n\\nUploaded %s byte%s to %s [(aux)](%s) with hashes:\\n* sha1sum: `%s`' \
|
|
"$sanitized" "$normal" "$pretty" "$fancy" "$target" "$aux" "$sha1"
|
|
}
|
|
|
|
# __notice_upload_to_...
|
|
# (SLOW) com_cockfile 134217728+??? or 128 MiB (wtf is going on here)
|
|
# (slow) io_file 2000000000-200-4-x or 1907 MiB (untested)
|
|
# sh_envs 536870912-200-4-x or 512 MiB (untested)
|
|
# se_uguu 67108864-200-7-x or 64 MiB (tested)
|
|
# org_c_net_paste 50000000 or 47 MiB (tested)
|
|
# at_oshi 5000000000-200-1-x or 4768 MiB (untested)
|
|
# net_filebin unlimited?
|
|
# com_bashupload 50000000000 or 47 GiB (untested)
|
|
# (fast) at_x0 232783872-200-4-x or 222 MiB (tested) (used to be 100 MiB)
|
|
# (FAST) moe_catbox_litterbox 1000000000 or 1 GB (untested)
|
|
|
|
# NOTE: for sites that use `curl -F` to upload, you automatically lose
|
|
# at least 200 bytes to multipart form-data. names count toward this.
|
|
|
|
# EXAMPLE: curl -F files[]=@myfile https://example.com
|
|
# let's say example.com advertises a file size limit of 1000 bytes.
|
|
# you lose 200, then another 7 for "files[]", then another 6 for "myfile".
|
|
# therefore the maximum size that "myfile" can be is instead 787.
|
|
|
|
__notice_compute_form_limit() {
|
|
# usage: $0 {FIELD NAME} {FILE NAME} {FILE SIZE} {SIZE LIMIT}
|
|
# returns 1 when over the limit.
|
|
return "$(( ($3) + 200 + (${#1}) + (${#2}) > ($4) ))"
|
|
}
|
|
|
|
__notice_upload_to_org_c_net_paste() {
|
|
target=paste.c-net.org
|
|
# defaults: 180 days, permanent (remains after download)
|
|
# TODO: retrieve deletion key from json response (see docs).
|
|
# WARNING: there seems to be a bug where files with a hash collision
|
|
# are not uploaded, and you get someone else's file instead!
|
|
[ "${bytes:-0}" -le 50000000 ] || return 128 # approx, untested
|
|
raw="$(__notice_curl -g --data-binary "@$filepath" -H "X-FileName: ${1##*/}" "https://$target")" || return
|
|
normal="https:${raw#*http*:}"; normal="${normal%%[!!-~]*}"
|
|
aux="$normal"
|
|
}
|
|
|
|
__notice_upload_to_io_file() {
|
|
target=file.io
|
|
# defaults: 14 days, ephemeral (deletes after 1 download)
|
|
# wget should use --content-disposition when downloading
|
|
__notice_compute_form_limit "file" "${1##*/}" "${bytes:-0}" 2000000000 || return 128
|
|
raw="$(__notice_curl -F "file=@$1" "https://$target")" || return
|
|
[ "${raw#'{"success":true,"status":200,'}" ] || return
|
|
normal="https:${raw#*\"link\":\"https:}"; normal="${normal%%[\"]*}"
|
|
aux="$normal" # no direct link, i think it's based on User-Agent
|
|
}
|
|
|
|
__notice_upload_to_at_oshi() {
|
|
target=oshi.at
|
|
# defaults: 1 day, semi-permanent (remains up to 1000 downloads?)
|
|
# configured: 14 days instead
|
|
# TODO: retrieve admin URL from response (suffixed with " [Admin]")
|
|
# NOTE: spaces are automatically converted (by the server) to underscores.
|
|
__notice_compute_form_limit "f" "${1##*/}" "${bytes:-0}" 5000000000 || return 128
|
|
raw="$(__notice_curl -F "f=@$1" -F expire=20160 "https://$target")" || return
|
|
normal="https:${raw##*DL: http*:}"; normal="${normal%%[!!-~]*}"
|
|
aux="$normal"
|
|
}
|
|
|
|
__notice_upload_to_com_bashupload() {
|
|
target=bashupload.com
|
|
# defaults: 3 days, ephemeral (deletes after 1 download)
|
|
# it also seems to accept `-F file=blah` multipart form-data
|
|
# note that filenames with spaces are treated as if no filename was given!
|
|
# TODO: when name contains spaces, replace with underscores and append to URL?
|
|
[ "${bytes:-0}" -le 50000000000 ] || return 128 # approx, untested
|
|
# https://$target/${${1##*/}// /%20}
|
|
raw="$(__notice_curl -gT "$1" "https://$target")" || return
|
|
normal="https:${raw#*http*:}"; normal="${normal%%[!!-~]*}"
|
|
aux="$normal?download=1"
|
|
}
|
|
|
|
__notice_upload_to_net_filebin() {
|
|
target=filebin.net
|
|
# defaults: 7 days, permament (but can be deleted by anyone with the link)
|
|
# note that the site says 6 days, but this is rounded down. the API shows 7 days.
|
|
# spaces are converted to underscores, and perhaps other characters are as well.
|
|
# note that you need curl -L to retrieve the files; they redirect to an s3 store.
|
|
__notice_generate_id "$(date -u +%s)" || return
|
|
aux="https://$target/$REPLY"
|
|
#raw="$(__notice_curl -gfT "$1" "$aux/$(__notice_urlencode ${1##*/})")" || return
|
|
raw="$(__notice_curl -gT "$1" "$aux/")" || return
|
|
normal="${raw##*\"filename\": \"}"; normal="$aux/${normal%%\"*}"
|
|
# optional: lock the bin so nobody can edit it, but anyone can still delete it.
|
|
curl -sSX PUT "$aux" >/dev/null || true
|
|
}
|
|
|
|
__notice_upload_to_at_x0() {
|
|
target=x0.at
|
|
# defaults: 100 days, permanent
|
|
# note that file retention decreases as file size increases.
|
|
# 100 MiB files are kept for 3 days, and 0 byte files are kept for 100 days.
|
|
# filenames are randomized. cannot manually delete nor specify expiry.
|
|
__notice_compute_form_limit "file" "${1##*/}" "${bytes:-0}" 232783872 || return 128
|
|
raw="$(__notice_curl -F "file=@$1" "https://$target")" || return
|
|
normal="$raw"
|
|
aux="$normal"
|
|
}
|
|
|
|
__notice_upload_to_se_uguu() {
|
|
target=uguu.se
|
|
# defaults: 3 hours, permanent
|
|
# filenames are randomized. cannot manually delete nor specify expiry.
|
|
# TODO: use fixed filename to squeeze a couple more bytes out of the limit?
|
|
__notice_compute_form_limit "files[]" "${1##*/}" "${bytes:-0}" 67108864 || return 128
|
|
raw="$(__notice_curl -F "files[]=@$1" "https://$target/upload?output=text")" || return
|
|
normal="$raw"
|
|
aux="$normal"
|
|
}
|
|
|
|
__notice_upload_to_com_cockfile() {
|
|
target=cockfile.com
|
|
# defaults: 12 hours, permanent
|
|
# filenames are randomized. cannot manually delete nor specify expiry.
|
|
# must have a file extension or it gets rejected (415), so use `.bin`.
|
|
__notice_compute_form_limit "files[]" ".bin" "${bytes:-0}" 999999999 || return 128
|
|
raw="$(__notice_curl -F "files[]=@$1;filename=.bin" "https://$target/upload.php?output=text")" || return
|
|
normal="$raw"
|
|
aux="$normal"
|
|
}
|
|
|
|
__notice_upload_to_sh_envs() {
|
|
target=envs.sh
|
|
# configured: 14 days, permanent (TODO: add "defaults" to match other docs)
|
|
# does not remember filenames in any capacity, BUT we can tack on our own to the URL.
|
|
# you can delete files if you extract the X-Token field from the response HTTP headers.
|
|
# banned MIME types: application/java-archive, application/java-vm
|
|
__notice_compute_form_limit "file" "${1##*/}" "${bytes:-0}" 536870912 || return 128
|
|
raw="$(__notice_curl -F "file=@$1" -Fsecret= -Fexpires=336 "https://$target")" || return
|
|
aux="$raw"
|
|
normal="$raw/${1##*/}"
|
|
}
|
|
|
|
__notice_upload_to_moe_catbox_litterbox() {
|
|
target=litterbox.catbox.moe
|
|
# defaults: 1 hour, permanent
|
|
# configured: 3 days, permanent
|
|
# filenames are randomized. cannot manually delete nor specify expiry.
|
|
# SUPER fast. disallowed filetypes: .exe, .scr, .cpl, .doc*, .jar
|
|
__notice_compute_form_limit "fileToUpload" "${1##*/}" "${bytes:-0}" 1000000000 || return 128
|
|
if [ "${1%.exe}" != "$1" ] || [ "${1%.scr}" != "$1" ]; then
|
|
set -- "$1" "fileToUpload=@$1;filename=${1%.???}.com" # bypass
|
|
elif [ "${1%.cpl}" != "$1" ]; then
|
|
set -- "$1" "fileToUpload=@$1;filename=${1%.???}.dll" # bypass
|
|
else
|
|
set -- "$1" "fileToUpload=@$1"
|
|
fi
|
|
raw="$(__notice_curl -F "$2" -Ftime=72h -Freqtype=fileupload "https://$target/resources/internals/api.php")" || return
|
|
normal="$raw"
|
|
aux="$normal"
|
|
}
|
|
|
|
__notice_retrying() {
|
|
case $? in
|
|
(0) return 0;;
|
|
# https://curl.se/libcurl/c/libcurl-errors.html
|
|
(127)
|
|
interrupt=1
|
|
__notice_warn "interrupted, exiting."
|
|
return 0;;
|
|
(128)
|
|
__notice_warn "file unsuitable for $target, skipping to next service..."
|
|
return 1;;
|
|
(*)
|
|
__notice_warn "failed to upload to $target, trying next service..."
|
|
return 1;;
|
|
esac
|
|
}
|
|
|
|
__notice_upload() {
|
|
unset REPLY aux bytes fancy file filepath interrupt normal pretty raw sanitized sha1 target
|
|
file="${1:-missing argument}"
|
|
|
|
file="$(readlink -f "$file")" || return 2 # also converts `\`s to `/`s
|
|
# i have no idea why, but mingw64 curl on msys2 is replacing each unicode codepoint with a question mark.
|
|
# the irony is that it seems to be properly decoding the UTF-8 encoding and then replacing it all anyway.
|
|
[ -d /C ] && filepath="$(LC_ALL= cygpath -ws "$file")" || filepath="$file"
|
|
|
|
REPLY="${file##*/}"
|
|
# ksh and mksh dictate that the characters must be in this order:
|
|
__notice_escape "$REPLY" '][!#()*+<>_`{|}\\\-' # removed '.' due to discord
|
|
__notice_escape "$REPLY" '\\"'
|
|
sanitized="$REPLY"
|
|
|
|
bytes="$(du -b -- "$file")" || return 2
|
|
sha1="$(sha1sum -- "$file")" || return 2
|
|
bytes="${bytes%%[!!-~]*}"
|
|
sha1="${sha1%%[!!-~]*}"
|
|
|
|
__notice_commaize "$bytes"; pretty="$REPLY"
|
|
test "$bytes" -ge 0 || return 2
|
|
if [ "$bytes" -ge 10200547328 ]; then
|
|
fancy="s ($(( (bytes + 536870912) / 1073741824)) GiB)"
|
|
elif [ "$bytes" -ge 9961472 ]; then
|
|
fancy="s ($(( (bytes + 524288) / 1048576)) MiB)"
|
|
elif [ "$bytes" -ge 9728 ]; then
|
|
fancy="s ($(( (bytes + 512) / 1024)) KiB)"
|
|
elif [ "$bytes" -ne 1 ]; then
|
|
fancy="s"
|
|
fi
|
|
|
|
REPLY=
|
|
__notice_upload_to_sh_envs "$file" || __notice_retrying ||
|
|
__notice_upload_to_at_oshi "$file" || __notice_retrying ||
|
|
__notice_upload_to_net_filebin "$file" || __notice_retrying ||
|
|
__notice_upload_to_org_c_net_paste "$file" || __notice_retrying ||
|
|
__notice_upload_to_io_file "$file" || __notice_retrying ||
|
|
__notice_upload_to_moe_catbox_litterbox "$file" || __notice_retrying ||
|
|
__notice_upload_to_com_bashupload "$file" || __notice_retrying ||
|
|
{
|
|
__notice_warn "failed to upload to every service, exiting."
|
|
return 1
|
|
}
|
|
[ "$interrupt" != 1 ] || return 127
|
|
normal="$(__notice_urlencode2 "$normal")"
|
|
aux="$(__notice_urlencode2 "$aux")"
|
|
REPLY="$(__notice_log)"
|
|
return 0
|
|
}
|
|
|
|
__notice_read_conf() {
|
|
# TODO: ensure conf file is unreadable by others?
|
|
while read -r line; do
|
|
line="${line%%#*}" # strip comments
|
|
line="${line#"${line%%[! ]*}"}" # strip left
|
|
line="${line%"${line##*[! ]}"}" # strip right
|
|
if [ "${line#gotify=}" != "$line" ]; then
|
|
__notice_push_to_gotify "${line#*=}"
|
|
elif [ "${line#discord=}" != "$line" ]; then
|
|
__notice_push_to_discord "${line#*=}"
|
|
elif [ -n "$line" ]; then
|
|
__notice_warn "warning: ignoring line: $line"
|
|
fi
|
|
done < "$1"
|
|
}
|
|
|
|
__notice() {
|
|
unset LC_ALL REPLY aux code conf extras line message normal title token url
|
|
export LC_ALL=C
|
|
conf="${XDG_CONFIG_HOME:-"$HOME/.config"}/notice.conf"
|
|
lament() { printf >&2 %s\\n "$@"; }
|
|
|
|
url='https://eaguru.guru/gotify/message?token='
|
|
if ! [ -e "$conf" ] && [ -e ~/.gotify.secret ]; then
|
|
lament 'notice: warning: ~/.gotify.secret is being phased out in favor of'
|
|
lament ' '"$conf"' containing a list of URLs:'
|
|
lament ' gotify=https://example.com/gotify/message?token=secret'
|
|
read -r token <~/.gotify.secret
|
|
fi
|
|
|
|
if [ $# != 1 ] && [ $# != 2 ]; then
|
|
lament 'usage:'
|
|
printf >&2 ' %s %s\n' "$0" '{message}' "$0" '{title} {message}' "$0" '@{file}'
|
|
return 64
|
|
fi
|
|
|
|
[ $# != 2 ] || { title="$1"; shift; }
|
|
message="${1:?missing message}"
|
|
if [ $# = 1 ] && [ "${message#@}" != "$message" ]; then
|
|
__notice_upload "${message#@}" || return
|
|
lament "$normal"
|
|
fi
|
|
|
|
if [ -e "$conf" ]; then
|
|
__notice_read_conf "$conf"
|
|
elif [ -n "$token" ]; then
|
|
__notice_push_to_gotify "$url$token"
|
|
fi
|
|
}
|
|
|
|
__notice_push_to_gotify() {
|
|
url="${1%%\?*}"
|
|
set -- -qLo /dev/null -w '%{http_code}' "$@"
|
|
if [ -n "$REPLY" ]; then
|
|
extras='{"client::display": {"contentType": "text/markdown"}, "client::notification": {"click": {"url": "'"$aux"'"}}}'
|
|
set -- "$@" --data '{"title": "New file uploaded", "message": "'"$REPLY"'", "extras": '"$extras"'}'
|
|
set -- "$@" -H 'Content-Type: application/json'
|
|
else
|
|
[ -z "$title" ] || set -- "$@" --form-string "title=$title"
|
|
[ -z "$message" ] || set -- "$@" --form-string "message=$message"
|
|
fi
|
|
set -- "$@" -H "Accept: application/json"
|
|
|
|
if code="$(__notice_curl "$@")" && [ "$code" = 200 ]; then
|
|
:
|
|
else
|
|
__notice_warn "failed to push message to $url (\$code=$code, \$?=$?)"
|
|
return 1
|
|
fi
|
|
}
|
|
|
|
__notice_push_to_discord() {
|
|
# flags: 2 (SUPPRESS_EMBEDS)
|
|
url="${1%%\?*}"
|
|
set -- -qLo /dev/null -w '%{http_code}' "$@"
|
|
if [ -n "$REPLY" ]; then
|
|
set -- "$@" --data '{"content": "# New file uploaded\n\n'"$REPLY"'", "flags": 2}'
|
|
elif [ -z "$title" ]; then
|
|
set -- "$@" --data '{"content": "'"$message"'", "flags": 2}'
|
|
elif [ -z "$message" ]; then
|
|
set -- "$@" --data '{"content": "# '"$title"'", "flags": 2}'
|
|
else
|
|
set -- "$@" --data '{"content": "# '"$title"'\n\n'"$message"'", "flags": 2}'
|
|
fi
|
|
set -- "$@" -H 'Content-Type: application/json'
|
|
set -- "$@" -H "Accept: application/json"
|
|
|
|
if code="$(__notice_curl "$@")" && { [ "$code" = 200 ] || [ "$code" = 204 ]; }; then
|
|
:
|
|
else
|
|
__notice_warn "failed to push message to ${url%/*} (\$code=$code, \$?=$?)"
|
|
return 1
|
|
fi
|
|
}
|
|
|
|
notice()(__notice "$@")
|
|
[ -n "${preload+-}" ] || __notice "$@"
|
|
|
|
# cursed modeline:
|
|
# vim:ft=bash ts=3 sw=3 noet sts=3
|