dotfiles/scripts/.local/bin/vidl

221 lines
5.2 KiB
Text
Raw Normal View History

#!/usr/bin/env bash
# download a file if it does not exist in the archive alread
# otherwise just re-link it from the archive
show_help() {
printf """
vidl: Video downloader
Simple wrapper for youtube-dl (or yt-dlp or similar).
Usage: vidl [OPTION] <link>
Point it to a link you want downloaded.
Options:
-h Display this help.
-d Directory to check for existence of file and archive to if needed.
If this is passed will not download existing archived files again.
-f Directory to download to.
-c Clear existing download queue.
-p Print out number of remaining items in queue.
-t Point to youtube-dl command to use. Can be command or absolute link.
By default will use \`yt-dlp\` which is a more up to date fork of the
youtube-dl program.
"""
}
urls=("$@")
while getopts "t:f:d:hcp" opt; do
case "$opt" in
# v) verbose=1
# ;;
f)
DL_FOLDER="$OPTARG"
;;
t)
YT_DL_CMD="$OPTARG"
;;
d)
ARCHIVE_FOLDER="$OPTARG"
;;
c)
ONLY_DO=clear
;;
p)
ONLY_DO=remaining
;;
h | \? | *)
show_help
exit 0
;;
esac
done
shift $((OPTIND - 1))
get_ytdl_fname() {
yt_dl_fname=$("$YT_DL_CMD" --get-filename -o "$YT_DL_TITLE" "$*")
if [ -z "$yt_dl_fname" ]; then
echo Could not get video filename, error with youtube-dl.
exit 1
fi
}
_call_archive() { # 1=new_file, 2=archive_location
command -v archive >/dev/null 2>&1 || {
echo "archive command not found."
exit 1
}
archive -d "$2" -a "$1"
}
vid_ext="\(mp4\|avi\|webm\|mkv\|mpe?g\|3gp\|m4a\)"
_findfile() { # 1=directory, 2=file
find "$1" -type f -name "$2.*" | sed -ne "/$2\.$vid_ext$/Ip"
}
_alreadyexists() { # 1=video_regex
if [ ! -d "$ARCHIVE_FOLDER" ]; then return 0; fi
local found
found=$(_findfile "$ARCHIVE_FOLDER" "$1")
if [ -n "$found" ]; then
return 0
else
return 1
fi
}
_should_archive() {
if [ -n "$ARCHIVE_FOLDER" ]; then
return 0
else
return 1
fi
}
_download_cmd() {
"$YT_DL_CMD" \
-o "$TEMP_FOLDER/$YT_DL_TITLE.%(ext)s" \
"${YT_DL_OPTS[@]}" \
"$*"
}
download() { # 1=url
# # download the video to download folder
if ! _alreadyexists "$yt_dl_fname" || ! _should_archive; then
_download_cmd "$*"
file=$(_findfile "$TEMP_FOLDER" "$yt_dl_fname")
mv "$file" "$DL_FOLDER"
if _should_archive; then
# yt-dl never knows the exact filename in advance
file=$(_findfile "$DL_FOLDER" "$yt_dl_fname")
if [ -z "$file" ]; then exit 1; fi
_call_archive "$file" "$ARCHIVE_FOLDER"
fi
# only link old file if one exists
elif _should_archive; then
archive_file=$(_findfile "$ARCHIVE_FOLDER" "$yt_dl_fname")
echo "$archive_file" | while read -r file; do
echo "file $file exists, not downloading duplicate"
_call_archive "$file" "$DL_FOLDER"
done
fi
}
setup() {
TEMP_FOLDER="${TEMP_FOLDER:-${HOME}/downloads}"
DL_FOLDER="${DL_FOLDER:-${XDG_VIDEOS_DIR:-$HOME/videos}/inbox}"
ARCHIVE_FOLDER="${ARCHIVE_FOLDER:-${XDG_VIDEOS_DIR:-$HOME/videos}/archive}"
YT_DL_CMD="${YT_DL_CMD:-yt-dlp}"
yt_default_opts=(-f "best[height\<=1080]" --retries 15 --embed-subs --sub-lang "en,de,es,fr")
declare -a YT_DL_OPTS=${YT_DL_OPTS:-( "${yt_default_opts[@]}" )}
YT_DL_TITLE="${YT_DL_TITLE:-%(channel)s_%(title)s_%(id)s}" # this title needs to be without extension
data_dir="${XDG_DATA_HOME:-$HOME/.local/share}/vidl"
cache_dir="${XDG_CACHE_HOME:-$HOME/.cache}/vidl"
[ ! -d "$data_dir" ] && mkdir -p "$data_dir"
[ ! -d "$cache_dir" ] && mkdir -p "$cache_dir"
queue_file="${data_dir}/vidl_queue"
lock_file="${cache_dir}/vidl_lock"
}
is_in_queue() { # 1=url
[ -f "$queue_file" ] || return 1
grep -q "$1" "$queue_file"
}
add_to_queue() { # 1=url
if is_in_queue "$1"; then return; fi
echo "$1" >>"$queue_file"
echo "added $url to queue."
}
remove_from_queue() { # 1=url
sed -i.bak -e "\|$1|d" "$queue_file"
}
clear_queue() {
rm "$queue_file"
}
remove_lock() {
if ! rmdir "$lock_file"; then
echo "Failed to remove lock '$lock_file'. Please remove manually before next run."
exit 1
fi
}
is_only_instance() {
if mkdir "$lock_file" 2>/dev/null; then
trap "remove_lock" EXIT
return 0
else
return 1
fi
}
print_queue_remaining() {
if [ ! -f "$queue_file" ]; then
echo 0
return
fi
wc -l "$queue_file" | cut -f1 -d' '
}
main() {
setup
if [ "$ONLY_DO" = "clear" ]; then
clear_queue
exit
elif [ "$ONLY_DO" = "remaining" ]; then
print_queue_remaining
exit
fi
for url in $*; do
add_to_queue "$url"
done
if is_only_instance; then
echo "Download starting..."
while read -r line; do
get_ytdl_fname "$line"
download "$line"
remove_from_queue "$line"
done <"$queue_file"
else
echo "Download already running, only adding to queue."
fi
echo "Download done..."
}
main "${urls[@]}"