# NOTE: Cookie files at:
# ~/.config/yt-dlp/cookies-youtube.txt
# ~/.config/yt-dlp/cookies-bilibili.txt
{ config, pkgs, lib, ... }:
with lib;
let
cfg = config.programs.yt-dlp-custom;
in
{
options.programs.yt-dlp-custom = {
enable = mkEnableOption "yt-dlp video downloader configuration";
package = mkOption {
type = types.package;
default = pkgs.yt-dlp;
example = "pkgs.yt-dlp";
description = "yt-dlp package to use";
};
downloadDir = mkOption {
type = types.str;
default = "~/Downloads/Videos";
example = "/mnt/storage/videos";
description = "Base directory for downloaded videos";
};
};
config = mkIf cfg.enable {
# Install yt-dlp, deno, and ffmpeg
# Deno is required for YouTube downloads (GitHub issue #14404)
home.packages = with pkgs; [
cfg.package
deno # Required for YouTube downloads due to JS challenges
ffmpeg
jq # For JSON parsing in cleanup functions
python312Packages.bgutil-ytdlp-pot-provider # PO token provider for YouTube
];
# Create yt-dlp configuration file
home.file.".config/yt-dlp/config".text = ''
# Quality settings
--format "bestvideo[ext=mp4][height<=1080]+bestaudio[ext=m4a]/best[ext=mp4][height<=1080]/best"
--merge-output-format mp4
# Download options
--no-playlist
--embed-thumbnail
--write-thumbnail
--write-description
--write-info-json
# Error handling
--ignore-errors
--no-abort-on-error
# File naming and organization
# Allow unicode characters in filenames for Chinese/Japanese content
# Performance
--concurrent-fragments 4
--retries 10
--fragment-retries 10
# SponsorBlock for YouTube
--sponsorblock-mark all
# Remote components for JavaScript challenge solving (required for YouTube)
--remote-components ejs:npm
# Extractor arguments for format handling
--extractor-args "youtube:formats=missing_pot"
# User agent
--user-agent "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36"
'';
programs.zsh.initContent = ''
# Base download directory
DOWNLOAD_DIR="${cfg.downloadDir}"
DOWNLOAD_DIR="''${DOWNLOAD_DIR/#\~/$HOME}"
# Retry configuration
MAX_RETRIES=10
BASE_DELAY=10
# Retry wrapper function with exponential backoff
_retry_download() {
local cmd="$1"
local attempt=1
local delay=$BASE_DELAY
while [[ $attempt -le $MAX_RETRIES ]]; do
echo "Attempt $attempt/$MAX_RETRIES..."
eval "$cmd"
local result=$?
if [[ $result -eq 0 ]]; then
return 0
fi
if [[ $attempt -lt $MAX_RETRIES ]]; then
echo "Download failed, retrying in ''${delay}s..."
sleep $delay
delay=$((delay * 2)) # Exponential backoff
else
echo "All retry attempts failed"
fi
((attempt++))
done
return 1
}
# Generate Jellyfin-compatible NFO files from yt-dlp metadata
_generate_jellyfin_nfo() {
local filepath="$1"
[[ -z "$filepath" ]] && return 1
local dir=$(dirname "$filepath")
local basename=$(basename "$filepath")
local name_noext="''${basename%.*}"
local season_dir="$dir"
local series_dir=$(dirname "$season_dir")
local json_file="$dir/$name_noext.info.json"
[[ ! -f "$json_file" ]] && return 1
local title=$(jq -r '.title // "Unknown"' "$json_file")
local description=$(jq -r '.description // ""' "$json_file" | head -c 2000)
local upload_date=$(jq -r '.upload_date // ""' "$json_file")
local uploader=$(jq -r '.uploader // "Unknown"' "$json_file")
local season_num=""
local episode_num=""
local aired_date=""
if [[ ''${#upload_date} -eq 8 ]]; then
season_num="''${upload_date:0:4}"
episode_num="''${upload_date:4:4}"
aired_date="''${upload_date:0:4}-''${upload_date:4:2}-''${upload_date:6:2}"
fi
description=$(echo "$description" | sed 's/&/\&/g; s/\</g; s/>/\>/g')
title=$(echo "$title" | sed 's/&/\&/g; s/\</g; s/>/\>/g')
uploader=$(echo "$uploader" | sed 's/&/\&/g; s/\</g; s/>/\>/g')
local nfo_file="$dir/$name_noext.nfo"
cat > "$nfo_file" << EPISODENFO
$title
$season_num
$episode_num
''${aired_date:-}
$description
EPISODENFO
if [[ ! -f "$series_dir/tvshow.nfo" ]]; then
cat > "$series_dir/tvshow.nfo" << TVSHOWNFO
$uploader
Videos from $uploader
TVSHOWNFO
fi
if [[ ! -f "$season_dir/season.nfo" ]] && [[ -n "$season_num" ]]; then
cat > "$season_dir/season.nfo" << SEASONNFO
Season $season_num
$season_num
SEASONNFO
fi
local thumb_file=""
for ext in jpg webp png; do
if [[ -f "$dir/$name_noext.$ext" ]]; then
thumb_file="$dir/$name_noext.$ext"
break
fi
done
if [[ -n "$thumb_file" ]]; then
local thumb_ext="''${thumb_file##*.}"
mv "$thumb_file" "$dir/$name_noext-thumb.$thumb_ext" 2>/dev/null
if [[ ! -f "$series_dir/poster.jpg" ]] && [[ ! -f "$series_dir/poster.webp" ]] && [[ ! -f "$series_dir/poster.png" ]]; then
cp "$dir/$name_noext-thumb.$thumb_ext" "$series_dir/poster.$thumb_ext"
fi
if [[ ! -f "$season_dir/poster.jpg" ]] && [[ ! -f "$season_dir/poster.webp" ]] && [[ ! -f "$season_dir/poster.png" ]]; then
cp "$dir/$name_noext-thumb.$thumb_ext" "$season_dir/poster.$thumb_ext"
fi
fi
}
# Unified video download function
dlv() {
local platform=""
local playlist_mode=false
local max_downloads=""
local custom_retries=""
local min_duration=""
local max_duration=""
local title_filter=""
local days_filter=""
local url=""
# Parse arguments
while [[ $# -gt 0 ]]; do
case "$1" in
-p|--playlist)
playlist_mode=true
shift
;;
-n|--count)
max_downloads="$2"
shift 2
;;
-r|--retries)
custom_retries="$2"
shift 2
;;
--min)
min_duration="$2"
shift 2
;;
--max)
max_duration="$2"
shift 2
;;
--title)
title_filter="$2"
shift 2
;;
--days|--within-days)
days_filter="$2"
shift 2
;;
youtube|bilibili)
platform="$1"
shift
;;
*)
url="$url $1"
shift
;;
esac
done
url="''${url## }" # Trim leading space
# Validate inputs
if [[ -z "$platform" ]] || [[ -z "$url" ]]; then
echo "Usage: dlv [OPTIONS] "
echo ""
echo "Arguments:"
echo " youtube|bilibili Platform to download from"
echo ""
echo "Options:"
echo " -p, --playlist Download as playlist"
echo " -n, --count Limit number of videos to process/download"
echo " -r, --retries Number of retry attempts (0 for no retries, default: 10)"
echo " --min Minimum video duration in minutes"
echo " --max Maximum video duration in minutes"
echo " --title Filter videos by title (case-insensitive)"
echo " --days Download videos uploaded within N days"
echo ""
echo "Examples:"
echo " dlv youtube - Download single YouTube video"
echo " dlv youtube -p - Download YouTube playlist"
echo " dlv youtube --min 5 --max 30 - Download videos between 5-30 minutes"
echo " dlv youtube --title \"tutorial\" - Download videos with 'tutorial' in title"
echo " dlv youtube --days 7 -p - Download playlist videos from last 7 days"
echo " dlv bilibili -p -n 10 - Download first 10 videos from playlist"
return 1
fi
# Override MAX_RETRIES if specified
[[ -n "$custom_retries" ]] && local MAX_RETRIES="$custom_retries"
# Platform-specific configuration
local cookies_file platform_name platform_flags
case "$platform" in
youtube)
cookies_file="$HOME/.config/yt-dlp/cookies-youtube.txt"
platform_name="YouTube"
platform_flags=""
;;
bilibili)
cookies_file="$HOME/.config/yt-dlp/cookies-bilibili.txt"
platform_name="Bilibili"
platform_flags="--referer https://www.bilibili.com/"
;;
esac
# Build match filter (duration and/or title)
local match_filter=""
local filter_parts=()
# Duration filter
if [[ -n "$min_duration" ]] || [[ -n "$max_duration" ]]; then
local min_sec=""
local max_sec=""
[[ -n "$min_duration" ]] && min_sec=$((min_duration * 60))
[[ -n "$max_duration" ]] && max_sec=$((max_duration * 60))
if [[ -n "$min_sec" ]] && [[ -n "$max_sec" ]]; then
filter_parts+=("duration >= $min_sec & duration <= $max_sec")
elif [[ -n "$min_sec" ]]; then
filter_parts+=("duration >= $min_sec")
elif [[ -n "$max_sec" ]]; then
filter_parts+=("duration <= $max_sec")
fi
fi
# Title filter
if [[ -n "$title_filter" ]]; then
filter_parts+=("title ~= '(?i).*$title_filter.*'")
fi
# Combine filters
if [[ ''${#filter_parts[@]} -gt 0 ]]; then
local combined_filter
combined_filter=$(IFS=" & "; echo "''${filter_parts[*]}")
match_filter="--match-filter \"$combined_filter\""
fi
# Build output template based on playlist mode (Jellyfin TV show format)
local output_template
if [[ "$playlist_mode" == true ]]; then
output_template="$DOWNLOAD_DIR/$platform_name/%(uploader|Unknown)s-%(playlist|)s/Season %(upload_date>%Y|0000)s/S%(upload_date>%Y|0000)sE%(upload_date>%m%d|0000)s - %(title)s.%(ext)s"
else
output_template="$DOWNLOAD_DIR/$platform_name/%(uploader|Unknown)s/Season %(upload_date>%Y|0000)s/S%(upload_date>%Y|0000)sE%(upload_date>%m%d|0000)s - %(title)s.%(ext)s"
fi
local archive_file="$DOWNLOAD_DIR/.archive.txt"
# Setup and display info
mkdir -p "$DOWNLOAD_DIR"
if [[ "$playlist_mode" == true ]]; then
echo "Downloading $platform_name playlist..."
[[ -n "$max_downloads" ]] && echo "Limiting to $max_downloads videos"
else
echo "Downloading $platform_name video..."
[[ -n "$max_downloads" ]] && echo "Processing max $max_downloads videos"
fi
echo "Output directory: $DOWNLOAD_DIR/$platform_name"
# Build command
local cmd="yt-dlp $platform_flags $match_filter --no-write-playlist-metafiles"
if [[ "$playlist_mode" == true ]]; then
cmd="$cmd --yes-playlist"
fi
[[ -n "$max_downloads" ]] && cmd="$cmd --playlist-end '$max_downloads'"
[[ -n "$days_filter" ]] && cmd="$cmd --dateafter 'today-''${days_filter}days'"
[[ -f "$cookies_file" ]] && cmd="$cmd --cookies '$cookies_file'" || cmd="$cmd --no-cookies"
cmd="$cmd --download-archive '$archive_file' -o '$output_template' '$url'"
# Execute download with retry
if _retry_download "$cmd"; then
# Generate NFO files for any videos missing them
local series_base="$DOWNLOAD_DIR/$platform_name"
find "$series_base" -name "*.info.json" 2>/dev/null | while read -r json_file; do
local base="''${json_file%.info.json}"
local nfo_file="$base.nfo"
if [[ ! -f "$nfo_file" ]]; then
for ext in mp4 mkv webm; do
[[ -f "$base.$ext" ]] && _generate_jellyfin_nfo "$base.$ext" && break
done
fi
done
# Build success message
local success_msg="$platform_name download completed"
[[ "$playlist_mode" == true ]] && success_msg="$platform_name playlist download completed"
# Add filter info if any
local filter_info=""
if [[ -n "$min_duration" ]] || [[ -n "$max_duration" ]] || [[ -n "$title_filter" ]] || [[ -n "$days_filter" ]]; then
filter_info=" (Filters:"
[[ -n "$min_duration" ]] && filter_info="$filter_info min ''${min_duration}m"
[[ -n "$max_duration" ]] && filter_info="$filter_info max ''${max_duration}m"
[[ -n "$title_filter" ]] && filter_info="$filter_info title: \"$title_filter\""
[[ -n "$days_filter" ]] && filter_info="$filter_info within ''${days_filter} days"
filter_info="$filter_info)"
fi
[[ -n "$max_downloads" ]] && filter_info="''${filter_info} [max ''${max_downloads} videos]"
success_msg="''${success_msg}''${filter_info}: $url"
if [[ "$playlist_mode" == true ]]; then
echo "✓ Playlist download completed successfully"
else
echo "✓ Download completed successfully"
fi
local result=0
else
# Build failure message
local fail_msg="$platform_name download failed after $MAX_RETRIES attempts"
[[ "$playlist_mode" == true ]] && fail_msg="$platform_name playlist download failed after $MAX_RETRIES attempts"
fail_msg="''${fail_msg}: $url"
if [[ "$playlist_mode" == true ]]; then
echo "✗ Playlist download failed after $MAX_RETRIES attempts"
else
echo "✗ Download failed after $MAX_RETRIES attempts"
fi
local result=1
fi
return $result
}
# Function to clear download archive
dl-clear-archive() {
local archive_file="$DOWNLOAD_DIR/.archive.txt"
if [[ -f "$archive_file" ]]; then
echo "Clearing download archive: $archive_file"
rm -f "$archive_file"
echo "✓ Archive cleared. Videos can now be re-downloaded."
else
echo "No archive file found at: $archive_file"
fi
}
# Alias for backward compatibility
alias dlv-clear-archive='dl-clear-archive'
dlv-remove-older() {
local days=""
local root_dir=""
while [[ $# -gt 0 ]]; do
case "$1" in
--days)
days="$2"
shift 2
;;
*)
root_dir="$1"
shift
;;
esac
done
if [[ -z "$days" ]] || [[ -z "$root_dir" ]]; then
echo "Usage: dlv-remove-older --days "
echo "Remove videos older than N days (based on upload_date in .info.json)"
return 1
fi
root_dir="''${root_dir/#\~/$HOME}"
if [[ ! -d "$root_dir" ]]; then
echo "Directory not found: $root_dir"
return 1
fi
local cutoff_date=$(date -d "$days days ago" +%Y%m%d)
local removed=0
echo "Scanning for videos older than $days days (before $cutoff_date)..."
while IFS= read -r -d $'\0' info_file; do
local upload_date=$(jq -r '.upload_date // empty' "$info_file" 2>/dev/null)
if [[ -n "$upload_date" ]] && [[ "$upload_date" < "$cutoff_date" ]]; then
local base="''${info_file%.info.json}"
echo "Removing: $(basename "$base")"
rm -f "$base".{mp4,webm,mkv,info.json,description,jpg,webp,png}
((removed++))
fi
done < <(find "$root_dir" -type f -name "*.info.json" -print0)
echo "Removed $removed video(s)"
}
'';
};
}