dotfiles/.local/bin/tools/ripper

83 lines
4.5 KiB
Plaintext
Raw Normal View History

2021-01-02 09:37:42 +01:00
#!/bin/sh
2021-01-01 19:22:03 +01:00
#A script that checks multiple youtube and bitchute channels for new videos to download via youtube-dl
#This script works considerably faster than just giving youtube-dl a channel URI.
#The YouTube implementation now uses a YoutubeData API v3 key to work more reliably.
#This can be quite quota taxing, as each channel search is 1% of the allotted qutoa for the day.
#-> checking n YT channels => n% of daily quota required to run this script
#Keep this in mind when running it as a cronjob
2021-01-01 19:22:03 +01:00
#Either insert this key in plain text below at the variable "APIKEY" or do it via ENV vars or a password manager
#Since bitchute still doesn't have an API I'm using lynx to emulate a user.
2021-01-01 20:26:19 +01:00
#This can limit the number of recent videos available. For a whole download of bitchute channels consider other methods first.
#For youtube the videos per channel are limited to the last 500 uploaded videos. For the rest you can just use youtube-dl itself
2021-01-01 19:22:03 +01:00
2020-07-04 14:23:27 +02:00
#needed if run as cronjob
2021-01-02 09:37:42 +01:00
XDG_VIDEOS_DIR=$HOME/vids #TODO ADJUST FOR PERSONAL USE HERE!
export XDG_VIDEOS_DIR
2021-01-01 19:26:09 +01:00
DLARCHIVE="${XDG_VIDEOS_DIR:-$HOME/Videos}/.downloaded"
DLLOC="${XDG_VIDEOS_DIR:-$HOME/Videos}"
CHANNELSFILE="${XDG_VIDEOS_DIR:-$HOME/Videos}/.channels"
2021-01-01 19:22:03 +01:00
#FORMAT OF CHANNELSFILE:
#Youtube: include the channel URI: https://www.youtube.com/channel/<channelId>
#Bitchute: normal channel URI: https://www.bitchute.com/channel/<user>
#Lines starting with '#' will be ignored in this file
2021-01-01 19:26:09 +01:00
BLACKLIST="${XDG_VIDEOS_DIR:-$HOME/Videos}/.blacklist"
2020-07-04 14:23:27 +02:00
# Required to display notifications if run as a cronjob:
2021-01-02 09:37:42 +01:00
DBUS_SESSION_BUS_ADDRESS=unix:path=/run/user/$(id -u)/bus
export DBUS_SESSION_BUS_ADDRESS
2020-07-04 14:23:27 +02:00
export DISPLAY=:0.0
2021-01-01 17:25:13 +01:00
APIKEY="$(pass show Misc/Youtube\ Data\ API\ v3 | head -n1 )"
2021-01-02 09:37:42 +01:00
if [ "$(pgrep -c ripper)" -gt 1 ]; then
2020-12-26 14:04:07 +01:00
echo "Ripper already running, exiting new instance..."
exit
fi
2020-07-04 14:23:27 +02:00
echo "Scanning for new Videos to download"
2021-01-02 15:33:03 +01:00
echo "Scanning on Youtube..."
IDs="$( grep 'youtube' "$CHANNELSFILE" | grep -v '^#' | grep 'channel' | sed 's/https:\/\/www\.youtube\.com\/channel\///')"
not_correctly_formatted="$(grep 'youtube' "$CHANNELSFILE" | grep -v '^#' | grep -v 'channel')"
if [ -n "$not_correctly_formatted" ]; then
2021-01-01 17:25:13 +01:00
echo Please fix the following channel urls to be scannable:
echo "$not_correctly_formatted"
echo "They need to be in the 'https://www.youtube.com/channel/...' format"
fi
for channel_id in $IDs; do
echo "ID: $channel_id"
2021-01-02 15:33:03 +01:00
json="$(curl -s "https://www.googleapis.com/youtube/v3/search?key=$APIKEY&channelId=$channel_id&part=snippet,id&order=date&maxResults=500")"
#Fallback to legacy mode if API quota is exceeded
if [ "$(echo "$json" | jq '."error"."errors"[]."reason"')" = '"quotaExceeded"' ];then
echo "YT API Quota exceeded, using fallback"
lynx --dump --nonumbers -listonly "https://www.youtube.com/channel/$channel_id" | grep 'videos.xml' | xargs curl -s > "${channel_id}.xml"
#| grep -oE 'yt:video:[^\s\t ]{11,15}' | perl -pe 's/^yt:video:([^ \t\s]*)$/https:\/\/www\.youtube\.com\/watch\?v=\1/' | sed 's|</id||' | grep -vf "$BLACKLIST" >> /tmp/todownload$$ #TODO: Use an actual xml parser instead of regexp
else
echo "$json" | jq '."items"[].id."videoId"' | tr -d '"' | grep -v '^null$'| sed 's/^/https:\/\/www\.youtube\.com\/watch\?v=/' | grep -vf "$BLACKLIST" >> /tmp/todownload$$
fi
2021-01-01 17:25:13 +01:00
done
2020-12-26 14:04:07 +01:00
grep 'youtube' "$DLARCHIVE" | sed 's/youtube /https:\/\/www\.youtube\.com\/watch?v=/' > /tmp/alreadydownloaded$$
2020-07-04 14:23:27 +02:00
2021-01-01 17:25:13 +01:00
echo "Scanning on Bitchute..."
grep 'bitchute' "$CHANNELSFILE" | grep -v '^#' | xargs -L1 lynx --dump --nonumbers -listonly | grep 'bitchute\.com\/video' | sort -u | grep -vf "$BLACKLIST" >> /tmp/todownload$$
2020-12-26 14:04:07 +01:00
grep 'bitchute' "$DLARCHIVE" | sed 's/bitchute /https:\/\/www\.bitchute\.com\/video\//' >> /tmp/alreadydownloaded$$
2020-07-04 14:23:27 +02:00
2020-12-26 15:30:55 +01:00
grep -vf /tmp/alreadydownloaded$$ /tmp/todownload$$ | sort -u > /tmp/new_videos$$
2021-01-01 19:22:03 +01:00
rm -f /tmp/alreadydownloaded$$ /tmp/todownload$$
2020-12-26 15:30:55 +01:00
number=$(wc -l /tmp/new_videos$$ | cut -d ' ' -f 1 )
2021-01-02 09:37:42 +01:00
if [ "$number" -gt 0 ]; then
[ "$number" -gt 1 ] && plural="s"
2020-12-26 14:04:07 +01:00
notify-send "Channel Ripper" "$number new video$plural available for download, downloading now."
echo "$number new video$plural for download available, downloading now."
if [ "$number" -lt 10 ];then
2021-01-01 17:25:13 +01:00
youtube-dl --get-filename -o "'%(uploader)s' '%(title)s'" -a /tmp/new_videos$$ | xargs -L1 notify-send
fi
2021-01-02 09:37:42 +01:00
youtube-dl --hls-prefer-native -i --download-archive "$DLARCHIVE" -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/best[ext=mp4]/best' --add-metadata -o "$DLLOC/%(uploader)s/%(upload_date)s-%(title)s.%(ext)s" -a /tmp/new_videos$$
2020-12-26 14:04:07 +01:00
rm -f /tmp/new_videos$$
2020-07-04 14:23:27 +02:00
notify-send "Channel Ripper" "Finished downloading"
fi
2021-01-02 09:37:42 +01:00
if [ "$number" -eq 0 ]; then
2021-01-01 17:25:13 +01:00
echo "No new videos"
2020-07-04 14:23:27 +02:00
fi