various smaller changes
Changes to be committed: modified: .config/aliasrc modified: .config/mpv/input.conf modified: .config/zathura/zathurarc modified: .local/bin/bt modified: .local/bin/statusbar/clock modified: .local/bin/tools/ce deleted: .local/bin/tools/ripper modified: .local/bin/tools/wv deleted: .local/bin/transadd
This commit is contained in:
parent
a00598c4cb
commit
0feccdb787
@ -11,7 +11,7 @@ alias \
|
||||
rm="rm -v" \
|
||||
mkd="mkdir -pv" \
|
||||
yt="youtube-dl --add-metadata -o '%(upload_date)s-%(title)s.%(ext)s' -i" \
|
||||
yta="yt -x -f bestaudio/best" \
|
||||
yta="yt -x --add-metadata -f bestaudio/best" -o '%(upload_date)s-%(title)s.%(ext)s'\
|
||||
ffmpeg="ffmpeg -hide_banner" \
|
||||
sage="sage -q"
|
||||
|
||||
|
@ -176,6 +176,7 @@
|
||||
! add chapter -1 # skip to previous chapter
|
||||
@ add chapter 1 # next
|
||||
|
||||
#: run "/bin/sh" "-c" "notify-send MPV ${filename}"
|
||||
#
|
||||
# Not assigned by default
|
||||
# (not an exhaustive list of unbound commands)
|
||||
|
@ -32,3 +32,4 @@ set completion-highlight-bg "#2C4E6A"
|
||||
set completion-highlight-fg "#4C5A6C"
|
||||
set recolor-lightcolor "#000000"
|
||||
set recolor-darkcolor "#a6c0d0"
|
||||
set adjust-open "width"
|
||||
|
@ -26,8 +26,11 @@ done
|
||||
|
||||
power(){
|
||||
powerstatus="$( bluetoothctl show | grep Powered | awk '{print $2}' )"
|
||||
[ "$powerstatus" = "no" ] && [ "$1" = on ] && bluetoothctl power on
|
||||
[ "$powerstatus" = "yes" ] && [ "$1" = off ] && bluetoothctl power off
|
||||
if [ "$powerstatus" = "no" ]; then
|
||||
[ "$1" = on ] && bluetoothctl power on
|
||||
elif [ "$powerstatus" = "yes" ]; then
|
||||
[ "$1" = off ] && bluetoothctl power off
|
||||
fi
|
||||
}
|
||||
|
||||
scan(){
|
||||
@ -117,12 +120,8 @@ pair(){
|
||||
start_scan="$(cat /tmp/bt_start_scan$$)"
|
||||
if [ $((( "$(date +'%s')" - "$start_scan" ))) -lt $SCAN_PERIOD ]; then
|
||||
sleep_period="$((( "$SCAN_PERIOD" - "$( date +'%s')" + "$start_scan" )))"
|
||||
if [ "$sleep_period" -eq 1 ]; then
|
||||
notify-send "Bluetooth" "Searching for devices, please wait 1 second"
|
||||
else
|
||||
notify-send "Bluetooth" "Searching for devices, please wait $sleep_period seconds"
|
||||
fi
|
||||
|
||||
[ "$sleep_period" -gt 1 ] && plural="s"
|
||||
notify-send "Bluetooth" "Searching for devices, please wait $sleep_period second$plural"
|
||||
sleep "$sleep_period"
|
||||
fi
|
||||
all_devices="$( bluetoothctl devices )"
|
||||
|
@ -1,3 +1,2 @@
|
||||
#!/bin/sh
|
||||
|
||||
date '+%H:%M'
|
||||
|
@ -1,28 +1,31 @@
|
||||
#!/bin/bash
|
||||
#!/bin/sh
|
||||
#A script that launches documents in their respective viewers
|
||||
#either via dmenu or fzf depending on the context
|
||||
ppid () { ps -p ${1:-$$} -o ppid=; }
|
||||
shell="$(ps aux | grep $( ppid ) | head -n1 | awk '{print $11}' )"
|
||||
document_dirs="$HOME/.config/* $HOME/.local/src/* $HOME/.local/bin/*"
|
||||
#/bin/sh means it's probably from dmenu_run (it's ugly but works)
|
||||
if [ "$shell" = "/bin/sh" ]; then
|
||||
file="$( du -a $document_dirs | awk '{for(i=2; i<NF; i++){printf("%s ", $i)}printf("%s\n", $NF)}'| dmenu -l 10 -p 'open what file?' -it "$*" )"
|
||||
[ -z "$file" ] && exit
|
||||
cd "$( dirname "$file" )" || exit
|
||||
if rifle -l "$file" | head -n1 | grep -q 'EDITOR';then
|
||||
st rifle "$file"
|
||||
else
|
||||
rifle "$file"
|
||||
fi
|
||||
else
|
||||
if [ ! "$shell" = "/bin/sh" ]; then
|
||||
currentdir="$(pwd)"
|
||||
file="$( du -a $document_dirs | awk '{for(i=2; i<NF; i++){printf("%s ", $i)}printf("%s\n", $NF)}' | fzf -e --query="$*")"
|
||||
file="$( find $document_dirs -type f | sed "s|$HOME|~|" | fzf -e --query="$*")"
|
||||
path="$(printf '%s%s' "$HOME" "${file//\~/}")"
|
||||
[ -z "$file" ] && exit
|
||||
cd "$( dirname "$file" )" || exit
|
||||
if rifle -l "$file" | head -n1 | grep -q 'EDITOR';then
|
||||
rifle "$file"
|
||||
cd "$( dirname "$path" )" || exit
|
||||
if rifle -l "$path" | head -n1 | grep -q 'EDITOR';then
|
||||
rifle "$path"
|
||||
else
|
||||
rifle "$file" & disown
|
||||
rifle "$path" &
|
||||
fi
|
||||
cd "$currentdir" || exit
|
||||
#launch dmenu as it's probably in dmenu_run
|
||||
else
|
||||
file="$( find $document_dirs -type f | sed "s|$HOME|~|" | dmenu -l 10 -p 'open what file?' -it "$*" )"
|
||||
[ -z "$file" ] && exit
|
||||
path="$(printf '%s%s' "$HOME" "${file//\~/}")"
|
||||
cd "$( dirname "$path" )" || exit
|
||||
if rifle -l "$path" | head -n1 | grep -q 'EDITOR';then
|
||||
st rifle "$path" &
|
||||
else
|
||||
rifle "$path" &
|
||||
fi
|
||||
fi
|
||||
|
@ -1,108 +0,0 @@
|
||||
#!/bin/sh
|
||||
#A script that checks multiple youtube and bitchute channels for new videos to download via youtube-dl
|
||||
#This script works considerably faster than just giving youtube-dl a channel URI.
|
||||
#The YouTube implementation now uses a YoutubeData API v3 key to work more reliably.
|
||||
#This can be quite quota taxing, as each channel search is 1% of the allotted qutoa for the day.
|
||||
#-> checking n YT channels => n% of daily quota required to run this script
|
||||
#Keep this in mind when running it as a cronjob
|
||||
#Either insert this key in plain text below at the variable "APIKEY" or do it via ENV vars or a password manager
|
||||
#Since bitchute still doesn't have an API I'm using lynx to emulate a user.
|
||||
#This can limit the number of recent videos available. For a whole download of bitchute channels consider other methods first.
|
||||
#For youtube the videos per channel are limited to the last 500 uploaded videos. For the rest you can just use youtube-dl itself
|
||||
|
||||
#needed if run as cronjob
|
||||
XDG_VIDEOS_DIR=$HOME/vids #TODO ADJUST FOR PERSONAL USE HERE!
|
||||
export XDG_VIDEOS_DIR
|
||||
DLARCHIVE="${XDG_VIDEOS_DIR:-$HOME/Videos}/.downloaded"
|
||||
DLLOC="${XDG_VIDEOS_DIR:-$HOME/Videos}"
|
||||
#FORMAT OF CHANNELSFILE:
|
||||
#Youtube: include the channel URI: https://www.youtube.com/channel/<channelId>
|
||||
#Bitchute: normal channel URI: https://www.bitchute.com/channel/<user>
|
||||
#Lines starting with '#' will be ignored in this file
|
||||
CHANNELSFILE="${XDG_VIDEOS_DIR:-$HOME/Videos}/.channels"
|
||||
BLACKLIST="${XDG_VIDEOS_DIR:-$HOME/Videos}/.blacklist"
|
||||
|
||||
# Required to display notifications if run as a cronjob:
|
||||
DBUS_SESSION_BUS_ADDRESS=unix:path=/run/user/$(id -u)/bus
|
||||
export DBUS_SESSION_BUS_ADDRESS
|
||||
export DISPLAY=:0.0
|
||||
XDG_DATA_HOME=/home/alex/.local/share
|
||||
# Required to have pass work if run as cronjob
|
||||
export PASSWORD_STORE_GPG_OPTS="--homedir=$XDG_DATA_HOME/gnupg"
|
||||
export GTK2_RC_FILES="${XDG_CONFIG_HOME:-$HOME/.config}/gtk-2.0/gtkrc-2.0"
|
||||
[ -d "$HOME/.local/share/password-store" ] && export PASSWORD_STORE_DIR="$HOME/.local/share/password-store"
|
||||
|
||||
APIKEY="$(pass show Misc/Youtube\ Data\ API\ v3 | head -n1 )"
|
||||
LEGACYMODE=$1 #set to anything nonzero to ignore YT API
|
||||
|
||||
[ -n "$LEGACYMODE" ] && printf "Using YT Legacy fallback mode...\nThis is less reliable than the API requests.\nOnly expect to find the last 5 videos or so per channel\n"
|
||||
if [ "$(pgrep -c ripper)" -gt 1 ]; then
|
||||
echo "Ripper already running, exiting new instance..."
|
||||
exit
|
||||
fi
|
||||
|
||||
echo "Scanning for new Videos to download"
|
||||
|
||||
|
||||
##YOUTUBE
|
||||
echo "Scanning on Youtube..."
|
||||
IDs="$( grep 'youtube' "$CHANNELSFILE" | grep -v '^#' | grep 'channel' | sed 's/https:\/\/www\.youtube\.com\/channel\///')"
|
||||
not_correctly_formatted="$(grep 'youtube' "$CHANNELSFILE" | grep -v '^#' | grep -v 'https:\/\/www\.youtube\.com\/channel\/')"
|
||||
if [ -n "$not_correctly_formatted" ]; then
|
||||
echo Please fix the following channel urls to be scannable:
|
||||
echo "$not_correctly_formatted" | while read -r line; do
|
||||
printf 'Given URI:\t%s\n' "$line"
|
||||
printf 'Potentiall correct channel URI:\n\thttps://youtube.com/channel/%s\n' "$(curl "$line" -s | grep -Eo 'externalId":"[^"]*"' | sed 's|^externalId":"||; s|"||g')"
|
||||
done
|
||||
echo "They need to be in the 'https://www.youtube.com/channel/...' format"
|
||||
fi
|
||||
for channel_id in $IDs; do
|
||||
echo "ID: $channel_id"
|
||||
if [ -z "$LEGACYMODE" ]; then
|
||||
json="$(curl -s "https://www.googleapis.com/youtube/v3/search?key=$APIKEY&channelId=$channel_id&part=snippet,id&order=date&maxResults=500")"
|
||||
#Fallback to legacy mode if API quota is exceeded
|
||||
if [ "$(echo "$json" | jq '."error"."errors"[]."reason"' 2> /dev/null )" = '"quotaExceeded"' ];then
|
||||
echo "YT API Quota exceeded, using fallback"
|
||||
LEGACYMODE=1
|
||||
fi
|
||||
elif [ -n "$LEGACYMODE" ];then
|
||||
lynx --dump --nonumbers -listonly "https://www.youtube.com/channel/$channel_id" | grep 'videos.xml' | xargs curl -s > /tmp/"${channel_id}.xml"
|
||||
python -c "from lxml import etree
|
||||
file=\"/tmp/${channel_id}.xml\"
|
||||
root = etree.parse(file)
|
||||
for el in root.iter():
|
||||
if(el.tag in '{http://www.youtube.com/xml/schemas/2015}videoId'):
|
||||
print(el.text)" |
|
||||
sed 's/^/https:\/\/www\.youtube\.com\/watch\?v=/' | grep -vf "$BLACKLIST" >> /tmp/todownload$$
|
||||
rm -f "/tmp/${channel_id}.xml"
|
||||
else
|
||||
echo "$json" | jq '."items"[].id."videoId"' | tr -d '"' | grep -v '^null$'| sed 's/^/https:\/\/www\.youtube\.com\/watch\?v=/' | grep -vf "$BLACKLIST" >> /tmp/todownload$$
|
||||
fi
|
||||
done
|
||||
grep 'youtube' "$DLARCHIVE" | sed 's/youtube /https:\/\/www\.youtube\.com\/watch?v=/' > /tmp/alreadydownloaded$$
|
||||
|
||||
##BITCHUTE
|
||||
#This section is quite generic and could probably be easily adapted for other video hosting websites
|
||||
echo "Scanning on Bitchute..."
|
||||
grep 'bitchute' "$CHANNELSFILE" | grep -v '^#' | xargs -L1 lynx --dump --nonumbers -listonly | grep 'bitchute\.com\/video' | sort -u | grep -vf "$BLACKLIST" >> /tmp/todownload$$
|
||||
grep 'bitchute' "$DLARCHIVE" | sed 's/bitchute /https:\/\/www\.bitchute\.com\/video\//' >> /tmp/alreadydownloaded$$
|
||||
|
||||
##DOWNLOAD VIDEOS FROM ACCUMULATED LINKS
|
||||
grep -vf /tmp/alreadydownloaded$$ /tmp/todownload$$ | sort -u > /tmp/new_videos$$
|
||||
rm -f /tmp/alreadydownloaded$$ /tmp/todownload$$
|
||||
number=$(wc -l /tmp/new_videos$$ | cut -d ' ' -f 1 )
|
||||
if [ "$number" -gt 0 ]; then
|
||||
[ "$number" -gt 1 ] && plural="s"
|
||||
notify-send "Channel Ripper" "$number new video$plural available for download, downloading now."
|
||||
echo "$number new video$plural for download available, downloading now."
|
||||
if [ "$number" -lt 10 ];then
|
||||
youtube-dl --get-filename -o "'%(uploader)s' '%(title)s'" -a /tmp/new_videos$$ | xargs -L1 notify-send
|
||||
fi
|
||||
youtube-dl --hls-prefer-native -i --download-archive "$DLARCHIVE" -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/best[ext=mp4]/best' --add-metadata -o "$DLLOC/%(uploader)s/%(upload_date)s-%(title)s.%(ext)s" -a /tmp/new_videos$$
|
||||
rm -f /tmp/new_videos$$
|
||||
notify-send "Channel Ripper" "Finished downloading"
|
||||
fi
|
||||
|
||||
if [ "$number" -eq 0 ]; then
|
||||
echo "No new videos"
|
||||
fi
|
@ -1,6 +1,6 @@
|
||||
#!/bin/sh
|
||||
videos="/tmp/videos$$"
|
||||
du -a ~/vids/* --time | awk '!($1="")' | sort -bgr -k1 | awk '!($1="")' | awk '!($1="")' | sed 's/^ //' | grep -v "$(find ~/vids/* -type d | sed 's/$/\$/; s/\[/\\[/g; s/\]/\\]/g')" | sed "s|$HOME|\~|" > $videos
|
||||
du -a ~/vids/* ~/dls/* --time | awk '!($1="")' | sort -bgr -k1 | awk '!($1="")' | awk '!($1="")' | sed 's/^ //' | grep -v "$(find ~/vids/* ~/dls/* -type d | sed 's/$/\$/; s/\[/\\[/g; s/\]/\\]/g')" | sed "s|$HOME|\~|" > $videos
|
||||
path="$(dmenu -i -l 30 -p "Choose a video to watch:" <"$videos" | sed "s|\~|$HOME|")"
|
||||
rm "$videos"
|
||||
if [ -n "$path" ];then
|
||||
|
@ -1,9 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
# Mimeapp script for adding torrent to transmission-daemon, but will also start the daemon first if not running.
|
||||
|
||||
# transmission-daemon sometimes fails to take remote requests in its first moments, hence the sleep.
|
||||
|
||||
pidof transmission-daemon >/dev/null || (transmission-daemon && notify-send "Starting transmission daemon..." && sleep 3 && pkill -RTMIN+7 "${STATUSBAR:-dwmblocks}") &
|
||||
|
||||
transmission-remote -a "$@" && notify-send "🔽 Torrent added."
|
Loading…
Reference in New Issue
Block a user