various updates
This commit is contained in:
parent
d2a9963dad
commit
6859ef38c9
@ -13,7 +13,8 @@ alias \
|
|||||||
yt="youtube-dl --add-metadata -o '%(upload_date)s-%(title)s.%(ext)s' -i" \
|
yt="youtube-dl --add-metadata -o '%(upload_date)s-%(title)s.%(ext)s' -i" \
|
||||||
yta="yt -x --add-metadata -f bestaudio/best" -o '%(upload_date)s-%(title)s.%(ext)s'\
|
yta="yt -x --add-metadata -f bestaudio/best" -o '%(upload_date)s-%(title)s.%(ext)s'\
|
||||||
ffmpeg="ffmpeg -hide_banner" \
|
ffmpeg="ffmpeg -hide_banner" \
|
||||||
sage="sage -q"
|
sage="sage -q" \
|
||||||
|
abook="abook --config $XDG_CONFIG_HOME/abook/abookrc --datafile $XDG_DATA_HOME/abook/addressbook"
|
||||||
|
|
||||||
# Colorize commands when possible.
|
# Colorize commands when possible.
|
||||||
alias \
|
alias \
|
||||||
|
@ -509,23 +509,23 @@ use_console_editor = yes
|
|||||||
## are used there is a possibility that you'll be able to use only colors with
|
## are used there is a possibility that you'll be able to use only colors with
|
||||||
## transparent background.
|
## transparent background.
|
||||||
#
|
#
|
||||||
#colors_enabled = yes
|
colors_enabled = yes
|
||||||
#
|
#
|
||||||
empty_tag_color = magenta
|
empty_tag_color = magenta
|
||||||
#
|
#
|
||||||
#header_window_color = magenta
|
header_window_color = blue
|
||||||
#
|
#
|
||||||
#volume_color = default
|
volume_color = default
|
||||||
#
|
#
|
||||||
#state_line_color = default
|
state_line_color = default
|
||||||
#
|
#
|
||||||
#state_flags_color = default:b
|
state_flags_color = default:b
|
||||||
#
|
#
|
||||||
main_window_color = white
|
main_window_color = white
|
||||||
#
|
#
|
||||||
#color1 = white
|
color1 = white
|
||||||
#
|
#
|
||||||
#color2 = green
|
color2 = green
|
||||||
#
|
#
|
||||||
progressbar_color = black:b
|
progressbar_color = black:b
|
||||||
#
|
#
|
||||||
@ -535,11 +535,11 @@ statusbar_color = red
|
|||||||
#
|
#
|
||||||
statusbar_time_color = cyan:b
|
statusbar_time_color = cyan:b
|
||||||
#
|
#
|
||||||
#player_state_color = default:b
|
player_state_color = default:b
|
||||||
#
|
#
|
||||||
#alternative_ui_separator_color = black:b
|
alternative_ui_separator_color = black:b
|
||||||
#
|
#
|
||||||
#window_border_color = green
|
window_border_color = green
|
||||||
#
|
#
|
||||||
#active_window_border = red
|
active_window_border = red
|
||||||
#
|
#
|
||||||
|
@ -27,6 +27,7 @@ Plug 'dense-analysis/ale'
|
|||||||
Plug 'arcticicestudio/nord-vim'
|
Plug 'arcticicestudio/nord-vim'
|
||||||
Plug 'tpope/vim-fugitive'
|
Plug 'tpope/vim-fugitive'
|
||||||
Plug 'rhysd/vim-grammarous'
|
Plug 'rhysd/vim-grammarous'
|
||||||
|
" Plug 'goballooning/vim-live-latex-preview'
|
||||||
call plug#end()
|
call plug#end()
|
||||||
|
|
||||||
" Integrated spell-check
|
" Integrated spell-check
|
||||||
|
@ -31,6 +31,8 @@ settings:
|
|||||||
""
|
""
|
||||||
',v': hint links spawn -d startmpv {hint-url}
|
',v': hint links spawn -d startmpv {hint-url}
|
||||||
',x': hint links spawn -d dmenuhandler {hint-url}
|
',x': hint links spawn -d dmenuhandler {hint-url}
|
||||||
|
ao: download-open;; download-remove
|
||||||
|
ar: download-remove
|
||||||
colors.webpage.darkmode.algorithm:
|
colors.webpage.darkmode.algorithm:
|
||||||
global: lightness-cielab
|
global: lightness-cielab
|
||||||
colors.webpage.darkmode.enabled:
|
colors.webpage.darkmode.enabled:
|
||||||
@ -52,11 +54,15 @@ settings:
|
|||||||
https://teams.microsoft.com: true
|
https://teams.microsoft.com: true
|
||||||
content.media.audio_video_capture:
|
content.media.audio_video_capture:
|
||||||
https://teams.microsoft.com: true
|
https://teams.microsoft.com: true
|
||||||
|
content.media.video_capture:
|
||||||
|
https://teams.microsoft.com: true
|
||||||
content.notifications.enabled:
|
content.notifications.enabled:
|
||||||
https://1337x.to: false
|
https://1337x.to: false
|
||||||
https://boards.4chan.org: true
|
https://boards.4chan.org: true
|
||||||
https://boards.4channel.org: true
|
https://boards.4channel.org: true
|
||||||
|
https://forums.ankiweb.net: true
|
||||||
https://iq.opengenus.org: false
|
https://iq.opengenus.org: false
|
||||||
|
https://kiwifarms.cc: true
|
||||||
https://nobodyhasthe.biz: true
|
https://nobodyhasthe.biz: true
|
||||||
https://old.reddit.com: false
|
https://old.reddit.com: false
|
||||||
https://riot.im: true
|
https://riot.im: true
|
||||||
@ -76,10 +82,12 @@ settings:
|
|||||||
global: false
|
global: false
|
||||||
downloads.location.suggestion:
|
downloads.location.suggestion:
|
||||||
global: path
|
global: path
|
||||||
|
downloads.open_dispatcher:
|
||||||
|
global: rifle {}
|
||||||
downloads.position:
|
downloads.position:
|
||||||
global: top
|
global: top
|
||||||
downloads.remove_finished:
|
downloads.remove_finished:
|
||||||
global: 0
|
global: 1000000
|
||||||
editor.command:
|
editor.command:
|
||||||
global:
|
global:
|
||||||
- st
|
- st
|
||||||
@ -117,19 +125,20 @@ settings:
|
|||||||
tabs.tabs_are_windows:
|
tabs.tabs_are_windows:
|
||||||
global: true
|
global: true
|
||||||
url.default_page:
|
url.default_page:
|
||||||
global: https://searx.lukesmith.xyz/
|
global: https://searx.cedars.xyz/
|
||||||
url.open_base_url:
|
url.open_base_url:
|
||||||
global: true
|
global: true
|
||||||
url.searchengines:
|
url.searchengines:
|
||||||
global:
|
global:
|
||||||
DEFAULT: https://searx.lukesmith.xyz/?q={}
|
DEFAULT: https://searx.cedars.xyz/?q={}
|
||||||
ddg: https://duckduckgo.com/?q={}
|
ddg: https://duckduckgo.com/?q={}
|
||||||
|
duden: https://www.duden.de/rechtschreibung/{}
|
||||||
gg: https://linggle.com/?q={}
|
gg: https://linggle.com/?q={}
|
||||||
ling: https://www.linguee.com/english-german/search?source=auto&query={}
|
ling: https://www.linguee.com/english-german/search?source=auto&query={}
|
||||||
oz: http://www.ozdic.com/collocation-dictionary/{}
|
oz: http://www.ozdic.com/collocation-dictionary/{}
|
||||||
sx: https://searx.lukesmith.xyz/?q={}&categories=general&language=en-US
|
sx: https://searx.cedars.xyz/?q={}&categories=general&language=en-US
|
||||||
yt: https://www.youtube.com/results?search_query={}
|
yt: https://www.youtube.com/results?search_query={}
|
||||||
url.start_pages:
|
url.start_pages:
|
||||||
global: https://searx.lukesmith.xyz/
|
global: https://searx.cedars.xyz/
|
||||||
zoom.default:
|
zoom.default:
|
||||||
global: 160%
|
global: 160%
|
||||||
|
@ -1,6 +1,3 @@
|
|||||||
https://traditionalcatholicprayers.com/2019/11/12/how-to-pray-the-rosary-in-latin/ How to Pray the Rosary in Latin | Traditional Catholic Prayers
|
|
||||||
https://people.math.ethz.ch/~fdalio/ANALYSISIIIDMAVTDMATLHS20 Personal Homepage of Prof. Dr. Francesca Da Lio
|
|
||||||
http://n.ethz.ch/~staerkr/ Roland Stärk
|
http://n.ethz.ch/~staerkr/ Roland Stärk
|
||||||
https://www.kath-flims-trin.ch/Pfarramt Pfarrer, Katholische Kirchgemeinde Flims-Trin
|
|
||||||
https://ostechnix.com/setup-japanese-language-environment-arch-linux/ How To Setup Japanese Language Environment In Arch Linux
|
https://ostechnix.com/setup-japanese-language-environment-arch-linux/ How To Setup Japanese Language Environment In Arch Linux
|
||||||
https://www.localizingjapan.com/blog/2012/01/20/regular-expressions-for-japanese-text/ Regular Expressions for Japanese Text - Localizing Japan
|
https://fsspx.today/chapel/zaitzkofen/ Zaitzkofen - Chapel app
|
||||||
|
@ -13,3 +13,4 @@ wuf2 https://video.ethz.ch/live/lectures/zentrum/eta/eta-f-5.html
|
|||||||
rt2 https://moodle-app2.let.ethz.ch/course/view.php?id=14186
|
rt2 https://moodle-app2.let.ethz.ch/course/view.php?id=14186
|
||||||
mech2live https://video.ethz.ch/live/lectures/zentrum/eta/eta-f-5.html
|
mech2live https://video.ethz.ch/live/lectures/zentrum/eta/eta-f-5.html
|
||||||
mech2unterlagen https://n.ethz.ch/~bamertd/download/
|
mech2unterlagen https://n.ethz.ch/~bamertd/download/
|
||||||
|
out https://boards.4channel.org/out/
|
||||||
|
@ -84,6 +84,9 @@ ext x?html?, has w3m, terminal = w3m "$@"
|
|||||||
#-------------------------------------------
|
#-------------------------------------------
|
||||||
# Misc
|
# Misc
|
||||||
#-------------------------------------------
|
#-------------------------------------------
|
||||||
|
#Import calendar files into calcurse
|
||||||
|
mime text/calendar, has calcurse, flag f = calendaradd "$@"
|
||||||
|
|
||||||
# Define the "editor" for text files as first action
|
# Define the "editor" for text files as first action
|
||||||
mime ^text, label editor = ${VISUAL:-$EDITOR} -- "$@"
|
mime ^text, label editor = ${VISUAL:-$EDITOR} -- "$@"
|
||||||
mime ^text, label pager = "$PAGER" -- "$@"
|
mime ^text, label pager = "$PAGER" -- "$@"
|
||||||
|
@ -1 +1,2 @@
|
|||||||
# vim: filetype=sh
|
# vim: filetype=sh
|
||||||
|
#alias
|
||||||
|
@ -7,7 +7,7 @@ do
|
|||||||
[ -z "$destdir" ] && destdir="$(sed "s/\s.*#.*$//;/^\s*$/d" ${XDG_CONFIG_HOME:-$HOME/.config}/directories | awk '{print $2}' | dmenu -l 20 -i -p "Copy file(s) to where?" | sed "s|~|$HOME|g")"
|
[ -z "$destdir" ] && destdir="$(sed "s/\s.*#.*$//;/^\s*$/d" ${XDG_CONFIG_HOME:-$HOME/.config}/directories | awk '{print $2}' | dmenu -l 20 -i -p "Copy file(s) to where?" | sed "s|~|$HOME|g")"
|
||||||
[ -z "$destdir" ] && exit
|
[ -z "$destdir" ] && exit
|
||||||
[ ! -d "$destdir" ] && notify-send "$destdir is not a directory, cancelled." && exit
|
[ ! -d "$destdir" ] && notify-send "$destdir is not a directory, cancelled." && exit
|
||||||
cp "$file" "$destdir" && notify-send -i "$(readlink -f "$file")" "$file copied to $destdir." &
|
cp "$file" "$destdir" && notify-send -i "$(readlink -f "$file")" "sxiv" "$file copied to $destdir." &
|
||||||
;;
|
;;
|
||||||
"m")
|
"m")
|
||||||
[ -z "$destdir" ] && destdir="$(sed "s/\s.*#.*$//;/^\s*$/d" ${XDG_CONFIG_HOME:-$HOME/.config}/directories | awk '{print $2}' | dmenu -l 20 -i -p "Move file(s) to where?" | sed "s|~|$HOME|g")"
|
[ -z "$destdir" ] && destdir="$(sed "s/\s.*#.*$//;/^\s*$/d" ${XDG_CONFIG_HOME:-$HOME/.config}/directories | awk '{print $2}' | dmenu -l 20 -i -p "Move file(s) to where?" | sed "s|~|$HOME|g")"
|
||||||
|
@ -58,8 +58,8 @@
|
|||||||
"seed-queue-size": 10,
|
"seed-queue-size": 10,
|
||||||
"speed-limit-down": 100,
|
"speed-limit-down": 100,
|
||||||
"speed-limit-down-enabled": false,
|
"speed-limit-down-enabled": false,
|
||||||
"speed-limit-up": 100,
|
"speed-limit-up": 0,
|
||||||
"speed-limit-up-enabled": false,
|
"speed-limit-up-enabled": true,
|
||||||
"start-added-torrents": true,
|
"start-added-torrents": true,
|
||||||
"trash-original-torrent-files": false,
|
"trash-original-torrent-files": false,
|
||||||
"umask": 18,
|
"umask": 18,
|
||||||
|
@ -18,7 +18,7 @@
|
|||||||
"dht-enabled": true,
|
"dht-enabled": true,
|
||||||
"download-dir": "/home/alex/dls",
|
"download-dir": "/home/alex/dls",
|
||||||
"download-queue-enabled": true,
|
"download-queue-enabled": true,
|
||||||
"download-queue-size": 5,
|
"download-queue-size": 1,
|
||||||
"encryption": 1,
|
"encryption": 1,
|
||||||
"idle-seeding-limit": 30,
|
"idle-seeding-limit": 30,
|
||||||
"idle-seeding-limit-enabled": false,
|
"idle-seeding-limit-enabled": false,
|
||||||
|
@ -78,6 +78,14 @@ startup(){
|
|||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
if [ "$1" = "edit" ]; then
|
||||||
|
case $2 in
|
||||||
|
"pairlist")${EDITOR:-vim} "$PAIRLIST" && exit;;
|
||||||
|
"blacklist")${EDITOR:-vim} "$BLACKLIST" && exit;;
|
||||||
|
*)true;;
|
||||||
|
esac
|
||||||
|
fi
|
||||||
#Sadly needs to be run without '&' for now, since it otherwise breaks start_scan variable
|
#Sadly needs to be run without '&' for now, since it otherwise breaks start_scan variable
|
||||||
startup &
|
startup &
|
||||||
update_pair_list &
|
update_pair_list &
|
||||||
|
@ -34,9 +34,9 @@ mountandroid() { \
|
|||||||
chosen="$(echo "$anddrives" | dmenu -i -p "Which Android device?")" || exit 1
|
chosen="$(echo "$anddrives" | dmenu -i -p "Which Android device?")" || exit 1
|
||||||
chosen="$(echo "$chosen" | cut -d : -f 1)"
|
chosen="$(echo "$chosen" | cut -d : -f 1)"
|
||||||
getmount "$HOME -maxdepth 3 -type d"
|
getmount "$HOME -maxdepth 3 -type d"
|
||||||
mtpfs --device "$chosen" "$mp"
|
simple-mtpfs --device "$chosen" "$mp"
|
||||||
#echo "OK" | dmenu -i -p "Tap Allow on your phone if it asks for permission and then press enter" || exit 1
|
#echo "OK" | dmenu -i -p "Tap Allow on your phone if it asks for permission and then press enter" || exit 1
|
||||||
mtpfs --device "$chosen" "$mp"
|
simple-mtpfs --device "$chosen" "$mp"
|
||||||
notify-send "🤖 Android Mounting" "Android device mounted to $mp."
|
notify-send "🤖 Android Mounting" "Android device mounted to $mp."
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -17,6 +17,6 @@ elif ping -c 1 -W 2 -q webprint.ethz.ch ;then
|
|||||||
fi
|
fi
|
||||||
else
|
else
|
||||||
printf '%s\n%s\n' "$USER" "$(pass $PASS_DIR)" > /tmp/tmp_pass
|
printf '%s\n%s\n' "$USER" "$(pass $PASS_DIR)" > /tmp/tmp_pass
|
||||||
[ -n "$choice" ] && st -t "openVPN" sudo openvpn --config "${XDG_CONFIG_HOME}/openvpn/cfgs/${choice}.ovpn" --auth-user-pass /tmp/tmp_pass & pkill -RTMIN+9 dwmblocks; sleep 2; rm -f /tmp/tmp_pass
|
[ -n "$choice" ] && st -t "openVPN" sudo openvpn --config "${XDG_CONFIG_HOME}/openvpn/cfgs/${choice}.ovpn" --mute-replay-warnings --auth-user-pass /tmp/tmp_pass & pkill -RTMIN+9 dwmblocks; sleep 2; rm -f /tmp/tmp_pass
|
||||||
fi
|
fi
|
||||||
pkill -RTMIN+9 dwmblocks
|
pkill -RTMIN+9 dwmblocks
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
|
#gets called by newsboat if it finds a new article.
|
||||||
CACHE="${XDG_DATA_HOME:-$HOME/.local/share}/newsboat/cache.db"
|
CACHE="${XDG_DATA_HOME:-$HOME/.local/share}/newsboat/cache.db"
|
||||||
ALREADY_NOTIFIED="${XDG_DATA_HOME:-$HOME/.local/share}/newsboat/notified"
|
ALREADY_NOTIFIED="${XDG_DATA_HOME:-$HOME/.local/share}/newsboat/notified"
|
||||||
[ -f "$ALREADY_NOTIFIED" ] || touch "$ALREADY_NOTIFIED"
|
[ -f "$ALREADY_NOTIFIED" ] || touch "$ALREADY_NOTIFIED"
|
||||||
@ -6,8 +7,11 @@ ALREADY_NOTIFIED="${XDG_DATA_HOME:-$HOME/.local/share}/newsboat/notified"
|
|||||||
unread="$(sqlite3 "$CACHE" "SELECT unread,pubDate,title FROM rss_item;" |
|
unread="$(sqlite3 "$CACHE" "SELECT unread,pubDate,title FROM rss_item;" |
|
||||||
grep -vE '^0' | sort -t'|' -k2 -gr |
|
grep -vE '^0' | sort -t'|' -k2 -gr |
|
||||||
grep -vf "$ALREADY_NOTIFIED")"
|
grep -vf "$ALREADY_NOTIFIED")"
|
||||||
|
[ -z "$unread" ] && exit
|
||||||
echo "$unread" >> "$ALREADY_NOTIFIED"
|
echo "$unread" >> "$ALREADY_NOTIFIED"
|
||||||
[ "$( echo "$unread" | wc -l)" -gt 1 ] && plural=s
|
[ "$( echo "$unread" | wc -l)" -gt 1 ] && plural=s
|
||||||
formatted="$(echo "$unread" | sed 's/[0-9]*|[0-9]*|//')"
|
formatted="$(echo "$unread" | sed 's/[0-9]*|[0-9]*|//')"
|
||||||
notify-send "newsboat" "$(printf 'new article%s:\n %s' "$plural" "$formatted")"
|
notify-send "new article${plural}" "$formatted"
|
||||||
|
|
||||||
|
#update statusbar to account for new articles
|
||||||
|
pkill -RTMIN+13 dwmblocks
|
||||||
|
@ -26,13 +26,13 @@ BLACKLIST="${XDG_VIDEOS_DIR:-$HOME/Videos}/.blacklist"
|
|||||||
DBUS_SESSION_BUS_ADDRESS=unix:path=/run/user/$(id -u)/bus
|
DBUS_SESSION_BUS_ADDRESS=unix:path=/run/user/$(id -u)/bus
|
||||||
export DBUS_SESSION_BUS_ADDRESS
|
export DBUS_SESSION_BUS_ADDRESS
|
||||||
export DISPLAY=:0.0
|
export DISPLAY=:0.0
|
||||||
XDG_DATA_HOME=/home/alex/.local/share
|
export XDG_DATA_HOME=/home/alex/.local/share
|
||||||
# Required to have pass work if run as cronjob
|
# Required to have pass work if run as cronjob
|
||||||
export PASSWORD_STORE_GPG_OPTS="--homedir=$XDG_DATA_HOME/gnupg"
|
export PASSWORD_STORE_GPG_OPTS="--homedir=$XDG_DATA_HOME/gnupg"
|
||||||
export GTK2_RC_FILES="${XDG_CONFIG_HOME:-$HOME/.config}/gtk-2.0/gtkrc-2.0"
|
export GTK2_RC_FILES="${XDG_CONFIG_HOME:-$HOME/.config}/gtk-2.0/gtkrc-2.0"
|
||||||
[ -d "$HOME/.local/share/password-store" ] && export PASSWORD_STORE_DIR="$HOME/.local/share/password-store"
|
[ -d "$HOME/.local/share/password-store" ] && export PASSWORD_STORE_DIR="$HOME/.local/share/password-store"
|
||||||
|
|
||||||
APIKEY="$(pass show Misc/Youtube\ Data\ API\ v3 | head -n1 )"
|
APIKEY="$(pass show API/Youtube\ Data\ API\ v3 | head -n1 )"
|
||||||
LEGACYMODE=$1 #set to anything nonzero to ignore YT API
|
LEGACYMODE=$1 #set to anything nonzero to ignore YT API
|
||||||
|
|
||||||
[ -n "$LEGACYMODE" ] && printf "Using YT Legacy fallback mode...\nThis is less reliable than the API requests.\nOnly expect to find the last 5 videos or so per channel\n"
|
[ -n "$LEGACYMODE" ] && printf "Using YT Legacy fallback mode...\nThis is less reliable than the API requests.\nOnly expect to find the last 5 videos or so per channel\n"
|
||||||
@ -77,7 +77,11 @@ for el in root.iter():
|
|||||||
sed 's/^/https:\/\/www\.youtube\.com\/watch\?v=/' | grep -vf "$BLACKLIST" >> /tmp/todownload$$
|
sed 's/^/https:\/\/www\.youtube\.com\/watch\?v=/' | grep -vf "$BLACKLIST" >> /tmp/todownload$$
|
||||||
rm -f "/tmp/${channel_id}.xml"
|
rm -f "/tmp/${channel_id}.xml"
|
||||||
else
|
else
|
||||||
echo "$json" | jq '."items"[].id."videoId"' | tr -d '"' | grep -v '^null$'| sed 's/^/https:\/\/www\.youtube\.com\/watch\?v=/' | grep -vf "$BLACKLIST" >> /tmp/todownload$$
|
#Only try to download videos, not sheduled livestreams
|
||||||
|
echo "$json" | jq '.items[] | select(.snippet.liveBroadcastContent != "upcoming") | .id.videoId' |
|
||||||
|
tr -d '"' | grep -v '^null$'|
|
||||||
|
sed 's/^/https:\/\/www\.youtube\.com\/watch\?v=/' |
|
||||||
|
grep -vf "$BLACKLIST" >> /tmp/todownload$$
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
grep 'youtube' "$DLARCHIVE" | sed 's/youtube /https:\/\/www\.youtube\.com\/watch?v=/' > /tmp/alreadydownloaded$$
|
grep 'youtube' "$DLARCHIVE" | sed 's/youtube /https:\/\/www\.youtube\.com\/watch?v=/' > /tmp/alreadydownloaded$$
|
||||||
|
@ -16,6 +16,7 @@ if [ -n "$diff" ]; then
|
|||||||
else
|
else
|
||||||
midnight=$(( $(date --date "$(date | sed 's/..:..:../23:59:59/; s/ PM//; s/ AM//')" +%s) + 1 ))
|
midnight=$(( $(date --date "$(date | sed 's/..:..:../23:59:59/; s/ PM//; s/ AM//')" +%s) + 1 ))
|
||||||
if [ $midnight -gt "$time_appointment" ]; then
|
if [ $midnight -gt "$time_appointment" ]; then
|
||||||
|
#only print appointment if dunst-notifications are also shown -> "privacy screen"
|
||||||
if [ ! -f /tmp/nodunst ]; then
|
if [ ! -f /tmp/nodunst ]; then
|
||||||
printf '%s %s' "$(date --date "@$time_appointment" +'%H:%M')" "$appointment"
|
printf '%s %s' "$(date --date "@$time_appointment" +'%H:%M')" "$appointment"
|
||||||
fi
|
fi
|
||||||
|
@ -1,10 +1,5 @@
|
|||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
if mpc status | grep -q '\[paused\]' || [ "$(mpc status | wc -l)" -eq 1 ]; then
|
if [ "$( mpc status | grep -c '\[playing\]')" -eq 1 ]; then
|
||||||
printf '\n'
|
|
||||||
exit
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ $( mpc status | grep '\[playing\]' | wc -l ) -eq 1 ]; then
|
|
||||||
full_length="$(mpc status | sed "/^volume:/d" | tac | sed -e "s/\\[paused\\].*//g;s/\\[playing\\].*//g" | tr -d '\n' | sed -e "s/$/ /g")"
|
full_length="$(mpc status | sed "/^volume:/d" | tac | sed -e "s/\\[paused\\].*//g;s/\\[playing\\].*//g" | tr -d '\n' | sed -e "s/$/ /g")"
|
||||||
#remove full directory structure if no title present and too long
|
#remove full directory structure if no title present and too long
|
||||||
if [ ${#full_length} -gt 40 ]; then
|
if [ ${#full_length} -gt 40 ]; then
|
||||||
@ -12,5 +7,6 @@ if [ $( mpc status | grep '\[playing\]' | wc -l ) -eq 1 ]; then
|
|||||||
else
|
else
|
||||||
echo "$full_length"
|
echo "$full_length"
|
||||||
fi
|
fi
|
||||||
exit
|
else
|
||||||
|
printf '\n'
|
||||||
fi
|
fi
|
||||||
|
@ -5,6 +5,7 @@
|
|||||||
# second, gives network traffic per second.
|
# second, gives network traffic per second.
|
||||||
|
|
||||||
lasttime=${XDG_CACHE_HOME:-$HOME/.cache}/nettraf_time
|
lasttime=${XDG_CACHE_HOME:-$HOME/.cache}/nettraf_time
|
||||||
|
#lasttime="$NETTRAF_TIME"
|
||||||
|
|
||||||
update() {
|
update() {
|
||||||
sum=0
|
sum=0
|
||||||
|
@ -1,134 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
#A script that interacts with 4chans API to checks for media to download out of threads.
|
|
||||||
#It uses the file name used by the uploader.
|
|
||||||
#(and adds post no. to distinguish possible duplicate file names)
|
|
||||||
#consider using it in a cronjob intermittently with something like
|
|
||||||
#*/10 * * * * /home/<yourname>/.local/bin/threadwatcher scan
|
|
||||||
THREADWATCHER_DIR=${XDG_DATA_HOME:-$HOME/.local/share}/4chan_watcher
|
|
||||||
URLFILE="$THREADWATCHER_DIR/threads"
|
|
||||||
TMP_URLFILE=/tmp/4chan_thread_watcher_tmp$$
|
|
||||||
|
|
||||||
[ -d "$THREADWATCHER_DIR" ] || mkdir -p "$THREADWATCHER_DIR"
|
|
||||||
[ -f "$URLFILE" ] || touch "$URLFILE"
|
|
||||||
|
|
||||||
#Cronjob Notifications
|
|
||||||
DBUS_SESSION_BUS_ADDRESS=unix:path=/run/user/$(id -u)/bus
|
|
||||||
export DBUS_SESSION_BUS_ADDRESS
|
|
||||||
export DISPLAY=:0.0
|
|
||||||
|
|
||||||
getfilelist(){
|
|
||||||
true
|
|
||||||
}
|
|
||||||
scan(){
|
|
||||||
ping -q -c 1 1.1.1.1 > /dev/null || ping -q -c 1 1.0.0.1 > /dev/null || ping -q -c 1 example.org || { echo "No internet connection detected."; exit ;}
|
|
||||||
if [ $(wc -l < "$URLFILE") -gt 0 ]; then
|
|
||||||
echo "scanning threads..."
|
|
||||||
else
|
|
||||||
echo "no threads to watch over currently"
|
|
||||||
exit
|
|
||||||
fi
|
|
||||||
while read -r line; do
|
|
||||||
running_dls=0
|
|
||||||
url="$(echo "$line" | cut -f1)"
|
|
||||||
echo "scanning $url"
|
|
||||||
dl_location="$(echo "$line" | cut -f2)"
|
|
||||||
mkdir -p "$dl_location"
|
|
||||||
echo "downloading to $dl_location"
|
|
||||||
json_url="$(echo "$url" | sed -E 's/boards\.(4chan|4channel)/a.4cdn/; s/$/.json/')"
|
|
||||||
curl -s -L "$json_url" | jq . > /tmp/content$$
|
|
||||||
if [ -z "$(</tmp/content$$)" ]; then
|
|
||||||
echo "Thread $url not found ($dl_location) deleting from cached list of threads to watch"
|
|
||||||
notify-send "threadwatcher" "Thread downloading $(echo "$dl_location" | sed "s|$HOME|~|") is complete now."
|
|
||||||
continue
|
|
||||||
else
|
|
||||||
echo "$line" >> "$TMP_URLFILE"
|
|
||||||
mkdir -p "$dl_location"
|
|
||||||
fi
|
|
||||||
files_json="$(jq '.posts[] | if has("filename") then {filename: "\(.no)_\(.filename)\(.ext)", location: "\(.tim)\(.ext)", md5: .md5} else null end ' < /tmp/content$$ | grep -vE '^null$')"
|
|
||||||
rm /tmp/content$$
|
|
||||||
#pastes together a multiline var using process substitution with layout: filename location md5
|
|
||||||
files="$(paste <(paste <(echo "$files_json" | jq '.filename' | tr -d '"') <(echo "$files_json" | jq '.location' | tr -d '"')) <(echo "$files_json" | jq '.md5' | tr -d '"'))"
|
|
||||||
echo "$files" | while read -r file_line; do
|
|
||||||
filename="$(echo "$file_line" | cut -f1 | tr ' ' '_')"
|
|
||||||
master_location="$(echo "$file_line" | cut -f2 | tr -d '"')"
|
|
||||||
filelocation="$dl_location/$filename"
|
|
||||||
correct_md5="$(echo "$file_line" | cut -f3)"
|
|
||||||
[ -f "$filelocation" ] && slave_md5="$(openssl dgst -md5 -binary "$filelocation" | openssl enc -base64)"
|
|
||||||
board="$(echo "$url" | cut -d '/' -f4)"
|
|
||||||
file_url="https://i.4cdn.org/$board/$master_location"
|
|
||||||
if [ -f "$filelocation" ] && [ "$correct_md5" = "$slave_md5" ]; then
|
|
||||||
true
|
|
||||||
else
|
|
||||||
if [ "$correct_md5" != "$slave_md5" ] && [ -f "$filelocation" ]; then
|
|
||||||
rm "$filelocation"
|
|
||||||
echo "removed $filename because of incorrect checksum, redownloading."
|
|
||||||
fi
|
|
||||||
[ -f "$filelocation" ] ||
|
|
||||||
touch "$filelocation" #to keep atime order correct?
|
|
||||||
#limit concurrent dls
|
|
||||||
if [ $running_dls -gt 25 ]; then
|
|
||||||
wait
|
|
||||||
running_dls=0
|
|
||||||
fi
|
|
||||||
wget -q -O "$filelocation" "$file_url" &
|
|
||||||
echo downloading "$filelocation"
|
|
||||||
((running_dls=running_dls+1))
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
wait
|
|
||||||
done<"$URLFILE"
|
|
||||||
mv "$TMP_URLFILE" "$URLFILE"
|
|
||||||
}
|
|
||||||
|
|
||||||
makepathabsolute(){
|
|
||||||
if echo "$1" | grep -qE '^/'; then
|
|
||||||
dl_location="$1"
|
|
||||||
else
|
|
||||||
dl_location="$HOME/$1"
|
|
||||||
fi
|
|
||||||
echo "$dl_location"
|
|
||||||
}
|
|
||||||
case "$1" in
|
|
||||||
"add") dl_location="$(makepathabsolute "$3")"
|
|
||||||
if grep -qP "^$2\t" "$URLFILE"; then
|
|
||||||
dl_location_already="$(grep -P "^$2\t" "$URLFILE" | cut -f2)"
|
|
||||||
notify-send "threadwatcher" "Thread already being watched. currently downloads to $(echo "$dl_location_already" | sed "s|$HOME|~|")"
|
|
||||||
prompt "Do you want to change download directory to $3?" &&
|
|
||||||
new_location="$dl_location" ||
|
|
||||||
exit 0
|
|
||||||
[ -z "$new_location" ] && exit
|
|
||||||
sed -i "s|$dl_location_already|$new_location|" "$URLFILE"
|
|
||||||
|
|
||||||
## Move already downloaded files to new location
|
|
||||||
mkdir -p "$new_location"
|
|
||||||
url="$2"
|
|
||||||
json_url="$(echo "$url" | sed -E 's/boards\.(4chan|4channel)/a.4cdn/; s/$/.json/')"
|
|
||||||
curl -s -L "$json_url" | jq . > /tmp/content$$
|
|
||||||
files_json="$(jq '.posts[] | if has("filename") then {filename: "\(.no)_\(.filename)\(.ext)", location: "\(.tim)\(.ext)"} else null end ' < /tmp/content$$ | grep -vE '^null$')"
|
|
||||||
rm /tmp/content$$
|
|
||||||
files="$(paste <(echo "$files_json" | jq '.filename' | tr -d '"') <(echo "$files_json" | jq '.location' | tr -d '"'))"
|
|
||||||
echo "$files" | while read -r file_line; do
|
|
||||||
filename="$(echo "$file_line" | cut -f1 | tr ' ' '_')"
|
|
||||||
mv -v "$dl_location_already/$filename" "$new_location"
|
|
||||||
done
|
|
||||||
rmdir --ignore-fail-on-non-empty "$dl_location_already"
|
|
||||||
notify-send "threadwatcher" "already downloaded files moved to $new_location. New files will also be downloaded there"
|
|
||||||
|
|
||||||
else
|
|
||||||
printf "%s\t%s\n" "$2" "$dl_location" | tee -ai "$URLFILE"
|
|
||||||
echo "added $2 to threadwatcher list. Downloading to $dl_location"
|
|
||||||
fi
|
|
||||||
echo "dl_location:$dl_location"
|
|
||||||
scan;;
|
|
||||||
"scan") scan;;
|
|
||||||
"list") printf "Thread:\t\t\t\t\t\tDownload location:\n"
|
|
||||||
sed "s|$HOME|~|" "$URLFILE";;
|
|
||||||
"clean")
|
|
||||||
echo "Watchlist used up to now:"
|
|
||||||
cat "$URLFILE"
|
|
||||||
echo "Deleting..."
|
|
||||||
rm "$URLFILE"
|
|
||||||
touch "$URLFILE";;
|
|
||||||
"edit") ${EDITOR:-vim} "$URLFILE";;
|
|
||||||
*)printf "Incorrect usage.\n\tthreadwatcher [add URL DL_LOCATION] [list] [edit] [clean]\n"; exit 1;;
|
|
||||||
esac
|
|
@ -29,6 +29,7 @@ newwidth=$(( 2 * screenwidth / 5 ))
|
|||||||
borderwidth=2
|
borderwidth=2
|
||||||
|
|
||||||
# see https://specifications.freedesktop.org/wm-spec/wm-spec-1.3.html for gravity specs
|
# see https://specifications.freedesktop.org/wm-spec/wm-spec-1.3.html for gravity specs
|
||||||
|
# I do not believe that gravity is being respected in dwm.
|
||||||
case "$1" in
|
case "$1" in
|
||||||
left) horizontal=$(( borderwidth ))
|
left) horizontal=$(( borderwidth ))
|
||||||
vertical=$(( screenheight - newheight - 2 * borderwidth ))
|
vertical=$(( screenheight - newheight - 2 * borderwidth ))
|
||||||
|
@ -1,90 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
# matrix_decrypt - Download and decrypt an encrypted attachment
|
|
||||||
# from a matrix server
|
|
||||||
|
|
||||||
# Copyright © 2019 Damir Jelić <poljar@termina.org.uk>
|
|
||||||
#
|
|
||||||
# Permission to use, copy, modify, and/or distribute this software for
|
|
||||||
# any purpose with or without fee is hereby granted, provided that the
|
|
||||||
# above copyright notice and this permission notice appear in all copies.
|
|
||||||
#
|
|
||||||
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
|
||||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
|
||||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
|
|
||||||
# SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
|
|
||||||
# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
|
|
||||||
# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
|
|
||||||
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import requests
|
|
||||||
import tempfile
|
|
||||||
import subprocess
|
|
||||||
|
|
||||||
from urllib.parse import urlparse, parse_qs
|
|
||||||
from nio.crypto import decrypt_attachment
|
|
||||||
|
|
||||||
|
|
||||||
def save_file(data):
|
|
||||||
"""Save data to a temporary file and return its name."""
|
|
||||||
tmp_dir = tempfile.gettempdir()
|
|
||||||
|
|
||||||
with tempfile.NamedTemporaryFile(
|
|
||||||
prefix='plumber-',
|
|
||||||
dir=tmp_dir,
|
|
||||||
delete=False
|
|
||||||
) as f:
|
|
||||||
f.write(data)
|
|
||||||
f.flush()
|
|
||||||
return f.name
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
parser = argparse.ArgumentParser(
|
|
||||||
description='Download and decrypt matrix attachments'
|
|
||||||
)
|
|
||||||
parser.add_argument('url', help='the url of the attachment')
|
|
||||||
parser.add_argument('file', nargs='?', help='save attachment to <file>')
|
|
||||||
parser.add_argument('--plumber',
|
|
||||||
help='program that gets called with the '
|
|
||||||
'dowloaded file')
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
url = urlparse(args.url)
|
|
||||||
query = parse_qs(url.query)
|
|
||||||
|
|
||||||
if not query["key"] or not query["iv"] or not query["hash"]:
|
|
||||||
print("Missing decryption argument")
|
|
||||||
return -1
|
|
||||||
|
|
||||||
key = query["key"][0]
|
|
||||||
iv = query["iv"][0]
|
|
||||||
hash = query["hash"][0]
|
|
||||||
|
|
||||||
http_url = "https://{}{}".format(url.netloc, url.path)
|
|
||||||
|
|
||||||
request = requests.get(http_url)
|
|
||||||
|
|
||||||
if not request.ok:
|
|
||||||
print("Error downloading file")
|
|
||||||
return -2
|
|
||||||
|
|
||||||
plumber = args.plumber
|
|
||||||
plaintext = decrypt_attachment(request.content, key, hash, iv)
|
|
||||||
|
|
||||||
if args.file is None:
|
|
||||||
file_name = save_file(plaintext)
|
|
||||||
if plumber is None:
|
|
||||||
plumber = "xdg-open"
|
|
||||||
else:
|
|
||||||
file_name = args.file
|
|
||||||
open(file_name, "wb").write(plaintext)
|
|
||||||
|
|
||||||
if plumber is not None:
|
|
||||||
subprocess.run([plumber, file_name])
|
|
||||||
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
@ -1,318 +0,0 @@
|
|||||||
#!/usr/bin/env -S python3 -u
|
|
||||||
# Copyright © 2018 Damir Jelić <poljar@termina.org.uk>
|
|
||||||
#
|
|
||||||
# Permission to use, copy, modify, and/or distribute this software for
|
|
||||||
# any purpose with or without fee is hereby granted, provided that the
|
|
||||||
# above copyright notice and this permission notice appear in all copies.
|
|
||||||
#
|
|
||||||
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
|
||||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
|
||||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
|
|
||||||
# SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
|
|
||||||
# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
|
|
||||||
# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
|
|
||||||
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
|
||||||
|
|
||||||
|
|
||||||
import os
|
|
||||||
import json
|
|
||||||
import magic
|
|
||||||
import requests
|
|
||||||
import argparse
|
|
||||||
from urllib.parse import urlparse
|
|
||||||
from itertools import zip_longest
|
|
||||||
import urllib3
|
|
||||||
|
|
||||||
from nio import Api, UploadResponse, UploadError
|
|
||||||
from nio.crypto import encrypt_attachment
|
|
||||||
|
|
||||||
from json.decoder import JSONDecodeError
|
|
||||||
|
|
||||||
urllib3.disable_warnings()
|
|
||||||
|
|
||||||
|
|
||||||
def to_stdout(message):
|
|
||||||
print(json.dumps(message), flush=True)
|
|
||||||
|
|
||||||
|
|
||||||
def error(e):
|
|
||||||
message = {
|
|
||||||
"type": "status",
|
|
||||||
"status": "error",
|
|
||||||
"message": str(e)
|
|
||||||
}
|
|
||||||
to_stdout(message)
|
|
||||||
os.sys.exit()
|
|
||||||
|
|
||||||
|
|
||||||
def mime_from_file(file):
|
|
||||||
try:
|
|
||||||
t = magic.from_file(file, mime=True)
|
|
||||||
except AttributeError:
|
|
||||||
try:
|
|
||||||
m = magic.open(magic.MIME)
|
|
||||||
m.load()
|
|
||||||
t, _ = m.file(file).split(';')
|
|
||||||
except AttributeError:
|
|
||||||
error('Your \'magic\' module is unsupported. '
|
|
||||||
'Install either https://github.com/ahupp/python-magic '
|
|
||||||
'or https://github.com/file/file/tree/master/python '
|
|
||||||
'(official \'file\' python bindings, available as the '
|
|
||||||
'python-magic package on many distros)')
|
|
||||||
|
|
||||||
raise SystemExit
|
|
||||||
|
|
||||||
return t
|
|
||||||
|
|
||||||
|
|
||||||
class Upload(object):
|
|
||||||
def __init__(self, file, chunksize=1 << 13):
|
|
||||||
self.file = file
|
|
||||||
self.filename = os.path.basename(file)
|
|
||||||
self.chunksize = chunksize
|
|
||||||
self.totalsize = os.path.getsize(file)
|
|
||||||
self.mimetype = mime_from_file(file)
|
|
||||||
self.readsofar = 0
|
|
||||||
|
|
||||||
def send_progress(self):
|
|
||||||
message = {
|
|
||||||
"type": "progress",
|
|
||||||
"data": self.readsofar
|
|
||||||
}
|
|
||||||
to_stdout(message)
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
with open(self.file, 'rb') as file:
|
|
||||||
while True:
|
|
||||||
data = file.read(self.chunksize)
|
|
||||||
|
|
||||||
if not data:
|
|
||||||
break
|
|
||||||
|
|
||||||
self.readsofar += len(data)
|
|
||||||
self.send_progress()
|
|
||||||
|
|
||||||
yield data
|
|
||||||
|
|
||||||
def __len__(self):
|
|
||||||
return self.totalsize
|
|
||||||
|
|
||||||
|
|
||||||
def chunk_bytes(iterable, n):
|
|
||||||
args = [iter(iterable)] * n
|
|
||||||
return (
|
|
||||||
bytes(
|
|
||||||
(filter(lambda x: x is not None, chunk))
|
|
||||||
) for chunk in zip_longest(*args)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class EncryptedUpload(Upload):
|
|
||||||
def __init__(self, file, chunksize=1 << 13):
|
|
||||||
super().__init__(file, chunksize)
|
|
||||||
self.source_mimetype = self.mimetype
|
|
||||||
self.mimetype = "application/octet-stream"
|
|
||||||
|
|
||||||
with open(self.filename, "rb") as file:
|
|
||||||
self.ciphertext, self.file_keys = encrypt_attachment(file.read())
|
|
||||||
|
|
||||||
def send_progress(self):
|
|
||||||
message = {
|
|
||||||
"type": "progress",
|
|
||||||
"data": self.readsofar
|
|
||||||
}
|
|
||||||
to_stdout(message)
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
for chunk in chunk_bytes(self.ciphertext, self.chunksize):
|
|
||||||
self.readsofar += len(chunk)
|
|
||||||
self.send_progress()
|
|
||||||
yield chunk
|
|
||||||
|
|
||||||
def __len__(self):
|
|
||||||
return len(self.ciphertext)
|
|
||||||
|
|
||||||
|
|
||||||
class IterableToFileAdapter(object):
|
|
||||||
def __init__(self, iterable):
|
|
||||||
self.iterator = iter(iterable)
|
|
||||||
self.length = len(iterable)
|
|
||||||
|
|
||||||
def read(self, size=-1):
|
|
||||||
return next(self.iterator, b'')
|
|
||||||
|
|
||||||
def __len__(self):
|
|
||||||
return self.length
|
|
||||||
|
|
||||||
|
|
||||||
def upload_process(args):
|
|
||||||
file_path = os.path.expanduser(args.file)
|
|
||||||
thumbnail = None
|
|
||||||
|
|
||||||
try:
|
|
||||||
if args.encrypt:
|
|
||||||
upload = EncryptedUpload(file_path)
|
|
||||||
|
|
||||||
if upload.source_mimetype.startswith("image"):
|
|
||||||
# TODO create a thumbnail
|
|
||||||
thumbnail = None
|
|
||||||
else:
|
|
||||||
upload = Upload(file_path)
|
|
||||||
|
|
||||||
except (FileNotFoundError, OSError, IOError) as e:
|
|
||||||
error(e)
|
|
||||||
|
|
||||||
try:
|
|
||||||
url = urlparse(args.homeserver)
|
|
||||||
except ValueError as e:
|
|
||||||
error(e)
|
|
||||||
|
|
||||||
upload_url = ("https://{}".format(args.homeserver)
|
|
||||||
if not url.scheme else args.homeserver)
|
|
||||||
_, api_path, _ = Api.upload(args.access_token, upload.filename)
|
|
||||||
upload_url += api_path
|
|
||||||
|
|
||||||
headers = {
|
|
||||||
"Content-type": upload.mimetype,
|
|
||||||
}
|
|
||||||
|
|
||||||
proxies = {}
|
|
||||||
|
|
||||||
if args.proxy_address:
|
|
||||||
user = args.proxy_user or ""
|
|
||||||
|
|
||||||
if args.proxy_password:
|
|
||||||
user += ":{}".format(args.proxy_password)
|
|
||||||
|
|
||||||
if user:
|
|
||||||
user += "@"
|
|
||||||
|
|
||||||
proxies = {
|
|
||||||
"https": "{}://{}{}:{}/".format(
|
|
||||||
args.proxy_type,
|
|
||||||
user,
|
|
||||||
args.proxy_address,
|
|
||||||
args.proxy_port
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
message = {
|
|
||||||
"type": "status",
|
|
||||||
"status": "started",
|
|
||||||
"total": upload.totalsize,
|
|
||||||
"file_name": upload.filename,
|
|
||||||
}
|
|
||||||
|
|
||||||
if isinstance(upload, EncryptedUpload):
|
|
||||||
message["mimetype"] = upload.source_mimetype
|
|
||||||
else:
|
|
||||||
message["mimetype"] = upload.mimetype
|
|
||||||
|
|
||||||
to_stdout(message)
|
|
||||||
|
|
||||||
session = requests.Session()
|
|
||||||
session.trust_env = False
|
|
||||||
|
|
||||||
try:
|
|
||||||
r = session.post(
|
|
||||||
url=upload_url,
|
|
||||||
auth=None,
|
|
||||||
headers=headers,
|
|
||||||
data=IterableToFileAdapter(upload),
|
|
||||||
verify=(not args.insecure),
|
|
||||||
proxies=proxies
|
|
||||||
)
|
|
||||||
except (requests.exceptions.RequestException, OSError) as e:
|
|
||||||
error(e)
|
|
||||||
|
|
||||||
try:
|
|
||||||
json_response = json.loads(r.content)
|
|
||||||
except JSONDecodeError:
|
|
||||||
error(r.content)
|
|
||||||
|
|
||||||
response = UploadResponse.from_dict(json_response)
|
|
||||||
|
|
||||||
if isinstance(response, UploadError):
|
|
||||||
error(str(response))
|
|
||||||
|
|
||||||
message = {
|
|
||||||
"type": "status",
|
|
||||||
"status": "done",
|
|
||||||
"url": response.content_uri
|
|
||||||
}
|
|
||||||
|
|
||||||
if isinstance(upload, EncryptedUpload):
|
|
||||||
message["file_keys"] = upload.file_keys
|
|
||||||
|
|
||||||
to_stdout(message)
|
|
||||||
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
parser = argparse.ArgumentParser(
|
|
||||||
description="Encrypt and upload matrix attachments"
|
|
||||||
)
|
|
||||||
parser.add_argument("file", help="the file that will be uploaded")
|
|
||||||
parser.add_argument(
|
|
||||||
"homeserver",
|
|
||||||
type=str,
|
|
||||||
help="the address of the homeserver"
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"access_token",
|
|
||||||
type=str,
|
|
||||||
help="the access token to use for the upload"
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--encrypt",
|
|
||||||
action="store_const",
|
|
||||||
const=True,
|
|
||||||
default=False,
|
|
||||||
help="encrypt the file before uploading it"
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--insecure",
|
|
||||||
action="store_const",
|
|
||||||
const=True,
|
|
||||||
default=False,
|
|
||||||
help="disable SSL certificate verification"
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--proxy-type",
|
|
||||||
choices=[
|
|
||||||
"http",
|
|
||||||
"socks4",
|
|
||||||
"socks5"
|
|
||||||
],
|
|
||||||
default="http",
|
|
||||||
help="type of the proxy that will be used to establish a connection"
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--proxy-address",
|
|
||||||
type=str,
|
|
||||||
help="address of the proxy that will be used to establish a connection"
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--proxy-port",
|
|
||||||
type=int,
|
|
||||||
default=8080,
|
|
||||||
help="port of the proxy that will be used to establish a connection"
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--proxy-user",
|
|
||||||
type=str,
|
|
||||||
help="user that will be used for authentication on the proxy"
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--proxy-password",
|
|
||||||
type=str,
|
|
||||||
help="password that will be used for authentication on the proxy"
|
|
||||||
)
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
upload_process(args)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
@ -12,7 +12,7 @@ else
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
if [ "$choice" = "🔍" ]; then
|
if [ "$choice" = "🔍" ]; then
|
||||||
$browser "https://searx.bocken.org"
|
$browser "https://searx.cedars.xyz"
|
||||||
elif [ "$choice" = "📋" ]; then
|
elif [ "$choice" = "📋" ]; then
|
||||||
$browser "$(xclip -o -selection clipboard)"
|
$browser "$(xclip -o -selection clipboard)"
|
||||||
else
|
else
|
||||||
@ -20,6 +20,6 @@ else
|
|||||||
$browser "$choice"
|
$browser "$choice"
|
||||||
else
|
else
|
||||||
string="$(echo $choice | sed 's/\ /+/g')"
|
string="$(echo $choice | sed 's/\ /+/g')"
|
||||||
$browser "https://searx.lukesmith.xyz/?q=$string&t=ffab&atb=v1-1"
|
$browser "https://searx.cedars.xyz/?q=$string&t=ffab&atb=v1-1"
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
@ -6,10 +6,11 @@ if echo "$1" | grep -qE 'youtube.*list'; then
|
|||||||
# The above mentioned script needs Youtube API access, (see the repo for more info on that)
|
# The above mentioned script needs Youtube API access, (see the repo for more info on that)
|
||||||
# I've saved my API key in my password manager, I reccomend you to do something similar.
|
# I've saved my API key in my password manager, I reccomend you to do something similar.
|
||||||
API_key="$(pass show 'Misc/Youtube Data API v3' | head -n1 )"
|
API_key="$(pass show 'Misc/Youtube Data API v3' | head -n1 )"
|
||||||
|
index="$(echo "$1" | perl -pe "s|&index=([0-9]+)|\1|")"
|
||||||
playlist_id="$( echo "$1" | perl -pe "s|^.*?(\?\|&)list=(.*?)|\2|; s|&index=[0-9]+||" )"
|
playlist_id="$( echo "$1" | perl -pe "s|^.*?(\?\|&)list=(.*?)|\2|; s|&index=[0-9]+||" )"
|
||||||
notify-send "startmpv" "Searching for all videos in playlist..."
|
notify-send "startmpv" "Searching for all videos in playlist..."
|
||||||
curl "https://www.googleapis.com/youtube/v3/playlistItems?part=snippet&maxResults=500&playlistId=$playlist_id&key=$API_key" |
|
curl "https://www.googleapis.com/youtube/v3/playlistItems?part=snippet&maxResults=500&playlistId=$playlist_id&key=$API_key" |
|
||||||
jq '."items"[]."snippet"."resourceId"."videoId"' |
|
jq '.items[].snippet.resourceId.videoId' |
|
||||||
sed 's/^/https:\/\/www.youtube.com\/watch?v=/' |
|
sed 's/^/https:\/\/www.youtube.com\/watch?v=/' |
|
||||||
xargs mpv --ytdl-format='bestvideo[ext=mp4][width<=1920][height<=1080]+bestaudio[ext=m4a]/best[ext=mp4]/best' ||
|
xargs mpv --ytdl-format='bestvideo[ext=mp4][width<=1920][height<=1080]+bestaudio[ext=m4a]/best[ext=mp4]/best' ||
|
||||||
notify-send "startmpv" "An error has occured with playlist $playlist_id"
|
notify-send "startmpv" "An error has occured with playlist $playlist_id"
|
||||||
|
16
.zprofile
16
.zprofile
@ -47,7 +47,20 @@ export GOPATH="${XDG_DATA_HOME:-$HOME/.local/share}/go"
|
|||||||
export ANSIBLE_CONFIG="${XDG_CONFIG_HOME:-$HOME/.config}/ansible/ansible.cfg"
|
export ANSIBLE_CONFIG="${XDG_CONFIG_HOME:-$HOME/.config}/ansible/ansible.cfg"
|
||||||
export SSB_HOME="$XDG_DATA_HOME/zoom"
|
export SSB_HOME="$XDG_DATA_HOME/zoom"
|
||||||
export PASSWORD_STORE_GPG_OPTS='--homedir=/home/alex/.local/share/gnupg'
|
export PASSWORD_STORE_GPG_OPTS='--homedir=/home/alex/.local/share/gnupg'
|
||||||
|
export MAILCAPS="${XDG_CONFIG_HOME:-$HOME/.config}/mutt/mailcap"
|
||||||
|
export CABAL_CONFIG="$XDG_CONFIG_HOME"/cabal/config
|
||||||
|
export CABAL_DIR="$XDG_CACHE_HOME"/cabal
|
||||||
|
export GRADLE_USER_HOME="$XDG_DATA_HOME"/gradle
|
||||||
|
export IPYTHONDIR="$XDG_CONFIG_HOME"/jupyter
|
||||||
|
export JUPYTER_CONFIG_DIR="$XDG_CONFIG_HOME"/jupyter
|
||||||
|
export _JAVA_OPTIONS=-Djava.util.prefs.userRoot="$XDG_CONFIG_HOME"/java
|
||||||
|
export JULIA_DEPOT_PATH="$XDG_DATA_HOME/julia:$JULIA_DEPOT_PATH"
|
||||||
|
export NUGET_PACKAGES="$XDG_CACHE_HOME"/NuGetPackages
|
||||||
|
export MAXIMA_USERDIR="$XDG_CONFIG_HOME"/maxima
|
||||||
|
export WEECHAT_HOME="$XDG_CONFIG_HOME"/weechat
|
||||||
|
export TEXMFHOME=$XDG_DATA_HOME/texmf
|
||||||
|
export TEXMFVAR=$XDG_CACHE_HOME/texlive/texmf-var
|
||||||
|
export TEXMFCONFIG=$XDG_CONFIG_HOME/texlive/texmf-config
|
||||||
|
|
||||||
# XDG USER DIRECOTRIES
|
# XDG USER DIRECOTRIES
|
||||||
export XDG_DESKTOP_DIR="$HOME"
|
export XDG_DESKTOP_DIR="$HOME"
|
||||||
@ -152,3 +165,4 @@ ex=🎯:\
|
|||||||
|
|
||||||
# Switch escape and caps if tty and no passwd required:
|
# Switch escape and caps if tty and no passwd required:
|
||||||
sudo -n loadkeys ${XDG_DATA_HOME:-$HOME/.local/share}/larbs/ttymaps.kmap 2>/dev/null
|
sudo -n loadkeys ${XDG_DATA_HOME:-$HOME/.local/share}/larbs/ttymaps.kmap 2>/dev/null
|
||||||
|
source "/home/alex/.local/share/cargo/env"
|
||||||
|
Loading…
Reference in New Issue
Block a user