added scripts that Ive been using for a while now

This commit is contained in:
Alexander Bocken 2021-01-12 19:22:00 +01:00
parent 71c4ec4c42
commit ae4035c2da
Signed by: Alexander
GPG Key ID: 1D237BE83F9B05E8
12 changed files with 574 additions and 0 deletions

0
.config/mutt/mbsyncrc Normal file
View File

5
.config/mutt/muttrc Normal file
View File

@ -0,0 +1,5 @@
# vim: filetype=neomuttrc
source /usr/local/share/mutt-wizard/mutt-wizard.muttrc # mw-autogenerated
macro index,pager i2 '<sync-mailbox><enter-command>source /home/alex/.config/mutt/accounts/2-ethz.muttrc<enter><change-folder>!<enter>;<check-stats>' "switch to abocken@ethz.ch" # mw-autogenerated
source /home/alex/.config/mutt/accounts/1-bocken.muttrc # mw-autogenerated
macro index,pager i1 '<sync-mailbox><enter-command>source /home/alex/.config/mutt/accounts/1-bocken.muttrc<enter><change-folder>!<enter>;<check-stats>' "switch to alexander@bocken.org" # mw-autogenerated

1
.local/bin/emoji Symbolic link
View File

@ -0,0 +1 @@
/home/alex/.local/bin/dmenuunicode

113
.local/bin/ripper Executable file
View File

@ -0,0 +1,113 @@
#!/bin/sh
#A script that checks multiple youtube and bitchute channels for new videos to download via youtube-dl
#This script works considerably faster than just giving youtube-dl a channel URI.
#The YouTube implementation now uses a YoutubeData API v3 key to work more reliably.
#This can be quite quota taxing, as each channel search is 1% of the allotted qutoa for the day.
#-> checking n YT channels => n% of daily quota required to run this script
#Keep this in mind when running it as a cronjob
#Either insert this key in plain text below at the variable "APIKEY" or do it via ENV vars or a password manager
#Since bitchute still doesn't have an API I'm using lynx to emulate a user.
#This can limit the number of recent videos available. For a whole download of bitchute channels consider other methods first.
#For youtube the videos per channel are limited to the last 500 uploaded videos. For the rest you can just use youtube-dl itself
#needed if run as cronjob
XDG_VIDEOS_DIR=$HOME/vids #TODO ADJUST FOR PERSONAL USE HERE!
export XDG_VIDEOS_DIR
DLARCHIVE="${XDG_VIDEOS_DIR:-$HOME/Videos}/.downloaded"
DLLOC="${XDG_VIDEOS_DIR:-$HOME/Videos}"
#FORMAT OF CHANNELSFILE:
#Youtube: include the channel URI: https://www.youtube.com/channel/<channelId>
#Bitchute: normal channel URI: https://www.bitchute.com/channel/<user>
#Lines starting with '#' will be ignored in this file
CHANNELSFILE="${XDG_VIDEOS_DIR:-$HOME/Videos}/.channels"
BLACKLIST="${XDG_VIDEOS_DIR:-$HOME/Videos}/.blacklist"
# Required to display notifications if run as a cronjob:
DBUS_SESSION_BUS_ADDRESS=unix:path=/run/user/$(id -u)/bus
export DBUS_SESSION_BUS_ADDRESS
export DISPLAY=:0.0
XDG_DATA_HOME=/home/alex/.local/share
# Required to have pass work if run as cronjob
export PASSWORD_STORE_GPG_OPTS="--homedir=$XDG_DATA_HOME/gnupg"
export GTK2_RC_FILES="${XDG_CONFIG_HOME:-$HOME/.config}/gtk-2.0/gtkrc-2.0"
[ -d "$HOME/.local/share/password-store" ] && export PASSWORD_STORE_DIR="$HOME/.local/share/password-store"
APIKEY="$(pass show Misc/Youtube\ Data\ API\ v3 | head -n1 )"
LEGACYMODE=$1 #set to anything nonzero to ignore YT API
[ -n "$LEGACYMODE" ] && printf "Using YT Legacy fallback mode...\nThis is less reliable than the API requests.\nOnly expect to find the last 5 videos or so per channel\n"
if [ "$(pgrep -c ripper)" -gt 1 ]; then
echo "Ripper already running, exiting new instance..."
exit
fi
echo "Scanning for new Videos to download"
##YOUTUBE
echo "Scanning on Youtube..."
IDs="$( grep 'youtube' "$CHANNELSFILE" | grep -v '^#' | grep 'channel' | sed 's/https:\/\/www\.youtube\.com\/channel\///')"
not_correctly_formatted="$(grep 'youtube' "$CHANNELSFILE" | grep -v '^#' | grep -v 'https:\/\/www\.youtube\.com\/channel\/')"
if [ -n "$not_correctly_formatted" ]; then
echo Please fix the following channel urls to be scannable:
echo "$not_correctly_formatted" | while read -r line; do
printf 'Given URI:\t%s\n' "$line"
printf 'Potentially correct channel URI:\n\thttps://www.youtube.com/channel/%s\n' "$(curl "$line" -s | grep -Eo 'externalId":"[^"]*"' | sed 's|^externalId":"||; s|"||g')"
done
echo "They need to be in the 'https://www.youtube.com/channel/...' format"
fi
for channel_id in $IDs; do
echo "YT-ID: $channel_id"
if [ -z "$LEGACYMODE" ]; then
json="$(curl -s "https://www.googleapis.com/youtube/v3/search?key=$APIKEY&channelId=$channel_id&part=snippet,id&order=date&maxResults=500")"
#Fallback to legacy mode if API quota is exceeded
if [ "$(echo "$json" | jq '."error"."errors"[]."reason"' 2> /dev/null )" = '"quotaExceeded"' ];then
echo "YT API Quota exceeded, using fallback"
LEGACYMODE=1
fi
fi
if [ -n "$LEGACYMODE" ];then
curl -s "https://www.youtube.com/feeds/videos.xml?channel_id=$channel_id" > /tmp/"${channel_id}.xml"
python -c "from lxml import etree
file=\"/tmp/${channel_id}.xml\"
root = etree.parse(file)
for el in root.iter():
if(el.tag in '{http://www.youtube.com/xml/schemas/2015}videoId'):
print(el.text)" |
sed 's/^/https:\/\/www\.youtube\.com\/watch\?v=/' | grep -vf "$BLACKLIST" >> /tmp/todownload$$
rm -f "/tmp/${channel_id}.xml"
else
echo "$json" | jq '."items"[].id."videoId"' | tr -d '"' | grep -v '^null$'| sed 's/^/https:\/\/www\.youtube\.com\/watch\?v=/' | grep -vf "$BLACKLIST" >> /tmp/todownload$$
fi
done
grep 'youtube' "$DLARCHIVE" | sed 's/youtube /https:\/\/www\.youtube\.com\/watch?v=/' > /tmp/alreadydownloaded$$
##BITCHUTE
#This section is quite generic and could probably be easily adapted for other video hosting websites
echo "Scanning on Bitchute..."
BC_CHANNELS="$(grep 'bitchute' "$CHANNELSFILE" | grep -v '^#')"
for channel in $BC_CHANNELS; do
printf 'BC channel: %s\n' "$( echo "$channel" | sed 's|https://www.bitchute.com/channel/||; s|/||')"
lynx --read_timeout=5 --dump --nonumbers -listonly "$channel"| grep 'bitchute\.com\/video' | sort -u | grep -vf "$BLACKLIST" >> /tmp/todownload$$
done
grep 'bitchute' "$DLARCHIVE" | sed 's/bitchute /https:\/\/www\.bitchute\.com\/video\//' >> /tmp/alreadydownloaded$$
##DOWNLOAD VIDEOS FROM ACCUMULATED LINKS
grep -vf /tmp/alreadydownloaded$$ /tmp/todownload$$ | sort -u > /tmp/new_videos$$
rm -f /tmp/alreadydownloaded$$ /tmp/todownload$$
number=$(wc -l /tmp/new_videos$$ | cut -d ' ' -f 1 )
if [ "$number" -gt 0 ]; then
[ "$number" -gt 1 ] && plural="s"
notify-send "Channel Ripper" "$number new video$plural available for download, downloading now."
echo "$number new video$plural for download available, downloading now."
if [ "$number" -lt 10 ];then
youtube-dl --get-filename -o "'%(uploader)s' '%(title)s'" -a /tmp/new_videos$$ | xargs -L1 notify-send
fi
youtube-dl --hls-prefer-native -i --download-archive "$DLARCHIVE" -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/best[ext=mp4]/best' --add-metadata -o "$DLLOC/%(uploader)s/%(upload_date)s-%(title)s.%(ext)s" -a /tmp/new_videos$$
rm -f /tmp/new_videos$$
notify-send "Channel Ripper" "Finished downloading"
fi
if [ "$number" -eq 0 ]; then
echo "No new videos"
fi

3
.local/bin/tools/dlupdates Executable file
View File

@ -0,0 +1,3 @@
#!/bin/sh
sudo pacman -Syuw --noconfirm
pkill -RTMIN+6 dwmblocks

7
.local/bin/tools/dropdowncalc Executable file
View File

@ -0,0 +1,7 @@
#!/bin/sh
ifinstalled python && python -iq -c "print('Welcome to the Calculator')
from numpy import *
import sys
import matplotlib.pyplot as plt
sys.ps1=''
"

3
.local/bin/tools/killdwm Executable file
View File

@ -0,0 +1,3 @@
#!/bin/sh
touch /tmp/killdwm
pkill dwm

View File

@ -0,0 +1,81 @@
#!/usr/bin/env python3
# matrix_decrypt - Download and decrypt an encrypted attachment
# from a matrix server
# Copyright © 2019 Damir Jelić <poljar@termina.org.uk>
#
# Permission to use, copy, modify, and/or distribute this software for
# any purpose with or without fee is hereby granted, provided that the
# above copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
# SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import argparse
import requests
import tempfile
import subprocess
from urllib.parse import urlparse, parse_qs
from nio.crypto import decrypt_attachment
def save_file(data):
"""Save data to a temporary file and return its name."""
tmp_dir = tempfile.gettempdir()
with tempfile.NamedTemporaryFile(
prefix='plumber-',
dir=tmp_dir,
delete=False
) as f:
f.write(data)
f.flush()
return f.name
def main():
parser = argparse.ArgumentParser(
description='Download and decrypt matrix attachments'
)
parser.add_argument('url', help='the url of the attachment')
parser.add_argument('--plumber',
help='program that gets called with the '
'dowloaded file')
args = parser.parse_args()
url = urlparse(args.url)
query = parse_qs(url.query)
if not query["key"] or not query["iv"] or not query["hash"]:
print("Missing decryption argument")
return -1
key = query["key"][0]
iv = query["iv"][0]
hash = query["hash"][0]
http_url = "https://{}{}".format(url.netloc, url.path)
request = requests.get(http_url)
if not request.ok:
print("Error downloading file")
return -2
plumber = args.plumber or "/usr/bin/rifle"
plaintext = decrypt_attachment(request.content, key, hash, iv)
file_name = save_file(plaintext)
subprocess.run([plumber, "{file}".format(file=file_name)])
return 0
if __name__ == "__main__":
main()

318
.local/bin/tools/matrix_upload.py Executable file
View File

@ -0,0 +1,318 @@
#!/usr/bin/env -S python3 -u
# Copyright © 2018 Damir Jelić <poljar@termina.org.uk>
#
# Permission to use, copy, modify, and/or distribute this software for
# any purpose with or without fee is hereby granted, provided that the
# above copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
# SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import os
import json
import magic
import requests
import argparse
from urllib.parse import urlparse
from itertools import zip_longest
import urllib3
from nio import Api, UploadResponse, UploadError
from nio.crypto import encrypt_attachment
from json.decoder import JSONDecodeError
urllib3.disable_warnings()
def to_stdout(message):
print(json.dumps(message), flush=True)
def error(e):
message = {
"type": "status",
"status": "error",
"message": str(e)
}
to_stdout(message)
os.sys.exit()
def mime_from_file(file):
try:
t = magic.from_file(file, mime=True)
except AttributeError:
try:
m = magic.open(magic.MIME)
m.load()
t, _ = m.file(file).split(';')
except AttributeError:
error('Your \'magic\' module is unsupported. '
'Install either https://github.com/ahupp/python-magic '
'or https://github.com/file/file/tree/master/python '
'(official \'file\' python bindings, available as the '
'python-magic package on many distros)')
raise SystemExit
return t
class Upload(object):
def __init__(self, file, chunksize=1 << 13):
self.file = file
self.filename = os.path.basename(file)
self.chunksize = chunksize
self.totalsize = os.path.getsize(file)
self.mimetype = mime_from_file(file)
self.readsofar = 0
def send_progress(self):
message = {
"type": "progress",
"data": self.readsofar
}
to_stdout(message)
def __iter__(self):
with open(self.file, 'rb') as file:
while True:
data = file.read(self.chunksize)
if not data:
break
self.readsofar += len(data)
self.send_progress()
yield data
def __len__(self):
return self.totalsize
def chunk_bytes(iterable, n):
args = [iter(iterable)] * n
return (
bytes(
(filter(lambda x: x is not None, chunk))
) for chunk in zip_longest(*args)
)
class EncryptedUpload(Upload):
def __init__(self, file, chunksize=1 << 13):
super().__init__(file, chunksize)
self.source_mimetype = self.mimetype
self.mimetype = "application/octet-stream"
with open(self.filename, "rb") as file:
self.ciphertext, self.file_keys = encrypt_attachment(file.read())
def send_progress(self):
message = {
"type": "progress",
"data": self.readsofar
}
to_stdout(message)
def __iter__(self):
for chunk in chunk_bytes(self.ciphertext, self.chunksize):
self.readsofar += len(chunk)
self.send_progress()
yield chunk
def __len__(self):
return len(self.ciphertext)
class IterableToFileAdapter(object):
def __init__(self, iterable):
self.iterator = iter(iterable)
self.length = len(iterable)
def read(self, size=-1):
return next(self.iterator, b'')
def __len__(self):
return self.length
def upload_process(args):
file_path = os.path.expanduser(args.file)
thumbnail = None
try:
if args.encrypt:
upload = EncryptedUpload(file_path)
if upload.source_mimetype.startswith("image"):
# TODO create a thumbnail
thumbnail = None
else:
upload = Upload(file_path)
except (FileNotFoundError, OSError, IOError) as e:
error(e)
try:
url = urlparse(args.homeserver)
except ValueError as e:
error(e)
upload_url = ("https://{}".format(args.homeserver)
if not url.scheme else args.homeserver)
_, api_path, _ = Api.upload(args.access_token, upload.filename)
upload_url += api_path
headers = {
"Content-type": upload.mimetype,
}
proxies = {}
if args.proxy_address:
user = args.proxy_user or ""
if args.proxy_password:
user += ":{}".format(args.proxy_password)
if user:
user += "@"
proxies = {
"https": "{}://{}{}:{}/".format(
args.proxy_type,
user,
args.proxy_address,
args.proxy_port
)
}
message = {
"type": "status",
"status": "started",
"total": upload.totalsize,
"file_name": upload.filename,
}
if isinstance(upload, EncryptedUpload):
message["mimetype"] = upload.source_mimetype
else:
message["mimetype"] = upload.mimetype
to_stdout(message)
session = requests.Session()
session.trust_env = False
try:
r = session.post(
url=upload_url,
auth=None,
headers=headers,
data=IterableToFileAdapter(upload),
verify=(not args.insecure),
proxies=proxies
)
except (requests.exceptions.RequestException, OSError) as e:
error(e)
try:
json_response = json.loads(r.content)
except JSONDecodeError:
error(r.content)
response = UploadResponse.from_dict(json_response)
if isinstance(response, UploadError):
error(str(response))
message = {
"type": "status",
"status": "done",
"url": response.content_uri
}
if isinstance(upload, EncryptedUpload):
message["file_keys"] = upload.file_keys
to_stdout(message)
return 0
def main():
parser = argparse.ArgumentParser(
description="Encrypt and upload matrix attachments"
)
parser.add_argument("file", help="the file that will be uploaded")
parser.add_argument(
"homeserver",
type=str,
help="the address of the homeserver"
)
parser.add_argument(
"access_token",
type=str,
help="the access token to use for the upload"
)
parser.add_argument(
"--encrypt",
action="store_const",
const=True,
default=False,
help="encrypt the file before uploading it"
)
parser.add_argument(
"--insecure",
action="store_const",
const=True,
default=False,
help="disable SSL certificate verification"
)
parser.add_argument(
"--proxy-type",
choices=[
"http",
"socks4",
"socks5"
],
default="http",
help="type of the proxy that will be used to establish a connection"
)
parser.add_argument(
"--proxy-address",
type=str,
help="address of the proxy that will be used to establish a connection"
)
parser.add_argument(
"--proxy-port",
type=int,
default=8080,
help="port of the proxy that will be used to establish a connection"
)
parser.add_argument(
"--proxy-user",
type=str,
help="user that will be used for authentication on the proxy"
)
parser.add_argument(
"--proxy-password",
type=str,
help="password that will be used for authentication on the proxy"
)
args = parser.parse_args()
upload_process(args)
if __name__ == "__main__":
main()

View File

@ -0,0 +1,16 @@
#!/bin/sh
urlregex="(((http|https)://|www\\.)[a-zA-Z0-9.]*[:]?[a-zA-Z0-9./@$&%?$#=_-]*)|((magnet:\\?xt=urn:btih:)[a-zA-Z0-9]*)"
# First remove linebreaks and mutt sidebars:
urls="$(sed 's/.*│//g' | tr -d '\n' |
grep -aEo "$urlregex" | # grep only urls as defined above.
sed 's/Podcast$//' |
sed 's/^www./http:\/\/www\./g' | uniq )"
[ -z "$urls" ] && exit
if [ "$( echo "$urls" | wc -l )" -gt 1 ]; then
chosen="$(echo "$urls" | dmenu -i -p 'Follow which url?' -l 10)"
else
chosen="$urls"
fi
[ -z "$chosen" ] && exit
echo "$chosen" | xargs dmenuhandler

27
.local/bin/tools/stopwatch Executable file
View File

@ -0,0 +1,27 @@
#!/bin/sh
start_time_sec=$( date +'%s' )
HOME=$(tput cup 0 0)
ED=$(tput ed)
EL=$(tput el)
printf '%s%s' "$HOME" "$ED"
cleanup(){
tput cnorm
exit
}
trap cleanup INT
tput civis
while true
do
diff=$(( $(date +'%s') - start_time_sec ))
ROWS=$(tput lines)
COLS=$(tput cols)
date --date "@$diff" +'%M:%S' | head -n $ROWS | while IFS= read LINE; do
printf '%-*.*s%s\n' $COLS $COLS "$LINE" "$EL"
done
printf '%s%s' "$ED" "$HOME"
sleep 0.1
done
tput cnorm

BIN
.local/bin/tools/zoomdl Executable file

Binary file not shown.