#!/bin/bash

# Copyright (C) 2025 Pädagogisches Landesinstitut Rheinland-Pfalz
# Copyright (C) 2025 Daniel Teichmann <daniel.teichmann@das-netzwerkteam.de>
#
# This script is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This script is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA.

set -eo pipefail

# Unset proxy variables to avoid unintended network interference
unset http_proxy
unset https_proxy
unset ftp_proxy

COMMON_FILE="/usr/share/debian-edu-router/debian-edu-router.common"
# Load common functions, variables, and logging routines.
if [[ -s "${COMMON_FILE}" ]]; then
	source "${COMMON_FILE}"
else
	echo "Could not load common file at ${COMMON_FILE}."
	exit 0
fi

# When the script exits, log a final message.
function finish {
	# If we crash, do not keep file there, delete it.
	rm -f "${FILTERLISTS_PATH}/work-in-progress"
}
trap finish EXIT

###############################################################################
# Global Variables
###############################################################################
LISTS_DIR="/var/lib/debian-edu-router/d-e-r-p.c-f/filterlists-toulouse.squid.d"
WORK_DIR="/var/lib/debian-edu-router/d-e-r-p.c-f/filterlists-toulouse.squid.working_dir"
FILTERLISTS_PATH="/var/lib/debian-edu-router/filterlists.d"
RSYNC_SOURCE="rsync://ftp.ut-capitole.fr/blacklist"
RSYNC_OPTIONS="-aht --info=NAME,STATS"

# Directories for selected categories
SELECTED_CATEGORIES_DIR="/var/lib/debian-edu-router/d-e-r-p.c-f/filterlists-toulouse.squid.selected_categories"
TEMPLATES_DIR="/usr/share/debian-edu-router/templates"

# Directory where the repository is downloaded (do not work directly here)
ORIG_DIR="/var/lib/debian-edu-router/d-e-r-p.c-f/filterlists-toulouse.squid.orig_dir"
# IMPORTANT: The downloaded repository holds the important files under the "dest" subdirectory.
SOURCE_DIR="${ORIG_DIR}/dest"

# Define two alternating intermediate working directories.
SQUID_DIR_A="${WORK_DIR}/blacklists.squid_A"
SQUID_DIR_B="${WORK_DIR}/blacklists.squid_B"

# Determine current SQUID directory based on a state file.
STATE_FILE="${WORK_DIR}/last_squid"
if [[ -f "${STATE_FILE}" ]]; then
	last_squid=$(cat "${STATE_FILE}")
	if [[ "${last_squid}" = "A" ]]; then
		CURRENT_SQUID="B"
	else
		CURRENT_SQUID="A"
	fi
else
	CURRENT_SQUID="A"
fi

if [[ "${CURRENT_SQUID}" = "A" ]]; then
	CURRENT_SQUID_DIR="${SQUID_DIR_A}"
else
	CURRENT_SQUID_DIR="${SQUID_DIR_B}"
fi

# Header template for each final .toulouse file
HEADER_TEMPLATE="${TEMPLATES_DIR}/ProxyHeader.toulouse"

# Path to Squid's snippets.d/.
SNIPPETS_PATH="/etc/debian-edu-router/squid-snippets.d"

###############################################################################
# Function: update_timestamp
# Writes the current epoch time to last-updated.
# Globals:
#   WORK_DIR
# Arguments:
#   None
# Returns:
#   None
###############################################################################
function update_timestamp() {
	date +%s > "${WORK_DIR}/last-updated"
}

###############################################################################
# Function: perform_rsync
# Downloads the blacklist repository via rsync if allowed (once every 12 hours),
# unless D_E_R_DEBUG is set (in which case download is forced).
# Globals:
#   WORK_DIR, ORIG_DIR, RSYNC_SOURCE, RSYNC_OPTIONS, D_E_R_DEBUG, FORCE_DOWNLOAD
# Arguments:
#   None
# Returns:
#   None
###############################################################################
function perform_rsync() {
	if [[ -z "$FORCE_DOWNLOAD" ]]; then
		if [[ -s "${WORK_DIR}/last-updated" ]]; then
			last_update=$(cat "${WORK_DIR}/last-updated")
			current_time=$(date +%s)
			date_diff=$(( (current_time - last_update) / 3600 ))
			if [[ "${date_diff}" -lt 12 ]]; then
				if [[ -n "${D_E_R_DEBUG}" ]]; then
					notice_log "Last rsync run was ${date_diff} hours ago, but \$D_E_R_DEBUG is set, forcing rsync download."
					FORCE_DOWNLOAD="true"
				else
					notice_log "Skipping rsync download (last update ${date_diff} hours ago)"
					return
				fi
			fi
		fi
	else
		notice_log "Forcing rsync download."
	fi

	notice_log "Starting rsync download from ${RSYNC_SOURCE}"
	ensure_dir "${ORIG_DIR}"
	rsync ${RSYNC_OPTIONS} "${RSYNC_SOURCE}" "${ORIG_DIR}"
	update_timestamp
	notice_log "Rsync download completed and timestamp updated"
}

###############################################################################
# Function: remove_blacklist_files
# If REMOVE_FILTERLISTS is "true", remove all related blacklist files (except templates)
# and log each deletion using notice_log.
# Globals:
#   LISTS_DIR, WORK_DIR, ORIG_DIR, SELECTED_CATEGORIES_DIR, SNIPPETS_PATH
# Arguments:
#   None
# Returns:
#   None
###############################################################################
function remove_blacklist_files() {
	notice_log "Removing all filterlist files related to squid-based domain and URL filtering!"

	# Purge filtering list files that should normally persist removal (as they are re-used if
	# filterlists get disabled / re-enabled.
	if [[ "${PURGE_FILTERLISTS}" = "true" ]]; then
		if [[ -d "${SELECTED_CATEGORIES_DIR}" ]]; then
			notice_log "Removing ${SELECTED_CATEGORIES_DIR,}"
			rm -rf "${SELECTED_CATEGORIES_DIR}"
		fi
		if [[ -d "${ORIG_DIR}" ]]; then
			notice_log "Removing ${ORIG_DIR}"
			rm -rf "${ORIG_DIR}"
		fi
	elif [[ -d "${SELECTED_CATEGORIES_DIR}" ]]; then
		notice_log "Selection files for blacklist/whitelist categories won't be deleted. Please purge the content filter package to fully remove them."
	fi

	# Remove filtering lists symlinks from LISTS_DIR
	for file in "${LISTS_DIR}"/ProxyBlacklistSite*.toulouse.d "${LISTS_DIR}"/ProxyWhitelistSite*.toulouse.d; do
		if [[ -e "${file}" || -h "${file}" ]]; then
			notice_log "Removing ${file}"
			rm -rf "${file}"
		fi
	done

	# Remove SQUID directories
	for dir in "${WORK_DIR}"/blacklists.squid_*; do
		if [[ -d "${dir}" ]]; then
			notice_log "Removing ${dir}"
			rm -rf "${dir}"
		fi
	done
	if [[ -e "${WORK_DIR}/last_squid" ]]; then
		notice_log "Removing ${WORK_DIR}/last_squid"
		rm -rf "${WORK_DIR}/last_squid"
	fi

	if [[ -e "${WORK_DIR}/last-updated" ]]; then
		notice_log "Removing ${WORK_DIR}/last-updated"
		rm -rf "${WORK_DIR}/last-updated"
	fi

	if [[ "${PURGE_FILTERLISTS}" = "true" ]]; then
		if [[ -d "${LISTS_DIR}" ]]; then
			notice_log "Removing directory ${LISTS_DIR}/"
			rmdir "${LISTS_DIR}"
		fi

		if [[ -d "${WORK_DIR}" ]]; then
			notice_log "Removing directory ${WORK_DIR}/"
			rmdir "${WORK_DIR}"
		fi
	fi

	# Remove the Squid ACLs configuration file
	local squid_snippet_conf="${SNIPPETS_PATH}/26_squid_acls.toulouse_d-e-r-p.c-f.conf"
	rm -fv "${squid_snippet_conf}" || true
}

###############################################################################
# Function: create_categories_shellinclude_file
# Creates a new categories file (for shell sourcing) with (default) categories enabled.
# Globals:
#   SOURCE_DIR
# Arguments:
#   $1 - Path to the categories file to create
#   $2 - Type of categories ("black" or "white")
#   $3 - Path to the header template for this file
#   $4 - Path to the (default) categories file (optional)
# Returns:
#   None
###############################################################################
function create_categories_shellinclude_file() {
	local categories_file_shellinclude="$1"
	local category_type="$2"
	local header_template="$3"
	local categories_file="$4"
	local temp_file=$(mktemp)

	update_mode=false
	if [[ ! -e "${categories_file_shellinclude}" ]]; then
		notice_log "Creating new categories file: ${categories_file_shellinclude}"
	else
		update_mode=true
		notice_log "Updating categories file: ${categories_file_shellinclude}"
	fi

	# Get all current categories from source directory
	local current_categories=()

	for category in "${SOURCE_DIR}"/*; do
		if [[ -d "${category}" && -f "${category}/usage" ]]; then
			usage=$(grep -v '^[[:space:]]*#' "${category}/usage" | head -n 1 | tr -d '[:space:]')
			if [[ "${usage}" = "${category_type}" ]]; then
				cat_name=$(basename "${category}")
				# Validate category name (only allow alphanumeric, underscore, hyphen)
				if [[ "${cat_name}" =~ ^[a-zA-Z0-9_-]+$ ]]; then
					current_categories+=("${cat_name}")
				else
					warning_log "Invalid characters in category name '${cat_name}', skipping..."
				fi
			fi
		fi
	done

	# Read (default) categories if file exists
	local default_categories=()
	if [[ -n "${categories_file}" && -s "${categories_file}" ]]; then
		while IFS= read -r line; do
			# Skip comments and empty lines
			if [[ "${line}" =~ ^[[:space:]]*# ]] || [[ -z "${line// /}" ]]; then
				continue
			fi

			# Remove any trailing whitespace
			default_categories+=("$(echo "${line}" | tr -d '[:space:]')")
		done < "${categories_file}"

		notice_log "Using default categories from ${categories_file}"
	fi

	# Create the header for the file
	cat "${header_template}" > "${temp_file}"

	# Add the declaration of the associative array
	echo "declare -A categories" >> "${temp_file}"

	# Add all categories to the file
	for category in $(printf '%s\n' "${current_categories[@]}" | sort); do
		local is_default_on=false

		# Check if category is in the default list
		for default in "${default_categories[@]}"; do
			if [[ "${default}" == "${category}" ]]; then
				is_default_on=true
				break
			fi
		done

		if [[ "${is_default_on}" == true ]]; then
			echo "categories[\"${category}\"]=true" >> "${temp_file}"
		else
			echo "categories[\"${category}\"]=false" >> "${temp_file}"
		fi
	done

	# Move the temp file to the final destination
	mv "${temp_file}" "${categories_file_shellinclude}"
	if [[ "${update_mode}" = "false" ]]; then
		notice_log "Created ${categories_file_shellinclude} with all available categories"
	else
		notice_log "Updated ${categories_file_shellinclude} with all available categories"
	fi
}


###############################################################################
# Function: handle_selected_categories_files
# Checks if the header templates and default category lists exist
# and creates or updates both blacklisted and whitelisted categories files.
# Globals:
#   SELECTED_CATEGORIES_DIR, TEMPLATES_DIR
# Arguments:
#   None
# Returns:
#   None
###############################################################################
function handle_selected_categories_files() {
	ensure_dir "${SELECTED_CATEGORIES_DIR}"
	BLACKLISTED_FILE="${SELECTED_CATEGORIES_DIR}/blacklisted_categories"
	WHITELISTED_FILE="${SELECTED_CATEGORIES_DIR}/whitelisted_categories"
	BLACKLISTED_FILE_SHELLINCLUDE="${SELECTED_CATEGORIES_DIR}/blacklisted_categories.sh"
	WHITELISTED_FILE_SHELLINCLUDE="${SELECTED_CATEGORIES_DIR}/whitelisted_categories.sh"

	# Check if header templates exist
	if [[ ! -f "${TEMPLATES_DIR}/header_blacklisted_categories" ]]; then
		error_log "Header template for blacklisted categories not found at ${TEMPLATES_DIR}/header_blacklisted_categories"
		exit 1
	fi

	if [[ ! -f "${TEMPLATES_DIR}/header_whitelisted_categories" ]]; then
		error_log "Header template for whitelisted categories not found at ${TEMPLATES_DIR}/header_whitelisted_categories"
		exit 1
	fi

	# Check for default category files
	local default_blacklist="/usr/share/debian-edu-router/templates/blacklists-toulouse.squid.selected_categories/default_selected_blacklisted_categories"
	local default_whitelist="/usr/share/debian-edu-router/templates/blacklists-toulouse.squid.selected_categories/default_selected_whitelisted_categories"


	if [[ ! -f "${default_blacklist}" ]]; then
		warning_log "Default blacklist categories file not found at ${default_blacklist}"
		default_blacklist=""
	fi

	if [[ ! -f "${default_whitelist}" ]]; then
		warning_log "Default whitelist categories file not found at ${default_whitelist}"
		default_whitelist=""
	fi

	# Handle blacklisted categories file
	if [[ ! -f "${BLACKLISTED_FILE}" ]]; then
		# Create new file
		cp "${default_blacklist}" "${BLACKLISTED_FILE}"
	fi
	create_categories_shellinclude_file "${BLACKLISTED_FILE_SHELLINCLUDE}" "black" "${TEMPLATES_DIR}/header_blacklisted_categories" "${BLACKLISTED_FILE}"

	# Handle whitelisted categories file
	if [[ ! -f "${WHITELISTED_FILE}" ]]; then
		# Create new file
		cp "${default_whitelist}" "${WHITELISTED_FILE}"
	fi
	create_categories_shellinclude_file "${WHITELISTED_FILE_SHELLINCLUDE}" "white" "${TEMPLATES_DIR}/header_whitelisted_categories" "${WHITELISTED_FILE}"
}

###############################################################################
# Function: is_category_enabled
# Checks if a category is enabled in the appropriate categories file.
# Globals:
#   SELECTED_CATEGORIES_DIR
# Arguments:
#   $1 - Category name
#   $2 - List type ("black" or "white")
# Returns:
#   0 if category is enabled, 1 otherwise
###############################################################################
function is_category_enabled() {
	local category="$1"
	local list_type="$2"
	local categories_file_shellinclude=""

	if [[ "${list_type}" = "black" ]]; then
		categories_file_shellinclude="${SELECTED_CATEGORIES_DIR}/blacklisted_categories.sh"
	else
		categories_file_shellinclude="${SELECTED_CATEGORIES_DIR}/whitelisted_categories.sh"
	fi

	# Source the categories file to get the associative array
	if [[ ! -f "${categories_file_shellinclude}" ]]; then
		debug_log "Categories file ${categories_file_shellinlcude} not found"
		return 1
	fi

	# Unset any existing categories array to avoid conflicts
	unset categories

	# Source the file to get the categories array
	source "${categories_file_shellinclude}"

	# Check if the category is enabled (set to true)
	if [[ "${categories["${category}"]}" == "true" ]]; then
		return 0
	else
		return 1
	fi
}

###############################################################################
# Function: process_category
# Processes one category by reading its usage file (from SOURCE_DIR),
# then copying its domains and urls into the intermediate working area (CURRENT_SQUID_DIR)
# with a header prepended. Only categories with a valid usage file ("black" or "white")
# that are enabled in the corresponding selected categories file are processed.
# Globals:
#   SOURCE_DIR, CURRENT_SQUID_DIR, HEADER_TEMPLATE, WORK_DIR, SELECTED_CATEGORIES_DIR
# Arguments:
#   Category name (directory name)
# Returns:
#   None
###############################################################################
function process_category() {
	local category="${1}"
	local category_path="${SOURCE_DIR}/${category}"
	local usage_file="${category_path}/usage"
	local list_type=""
	local line=""

	if [[ ! -s "${usage_file}" ]]; then
		warning_log "Usage file missing for category '${category}', skipping..."
		return
	fi

	while IFS= read -r line; do
		if [[ "${line}" =~ ^[[:space:]]*# ]] || [[ -z "${line// }" ]]; then
			continue
		else
			list_type="${line}"
			break
		fi
	done < "${usage_file}"

	if [[ "${list_type}" != "black" && "${list_type}" != "white" ]]; then
		error_log "Invalid usage type '${list_type}' for category '${category}', skipping..."
		return
	fi

	# Check if the category is enabled
	if ! is_category_enabled "${category}" "${list_type}"; then
		debug_log "Category $(printf "%-28s" "\"${category}\"") is disabled in \$SELECTED_CATEGORIES_DIR/${list_type}listed_categories, skipping..."
		return
	fi

	debug_log "Processing category '${category}' as a '${list_type}' list"

	local domains_target_dir=""
	local urls_target_dir=""

	if [[ "${list_type}" = "black" ]]; then
		domains_target_dir="${CURRENT_SQUID_DIR}/ProxyBlacklistSite.toulouse.d"
		urls_target_dir="${CURRENT_SQUID_DIR}/ProxyBlacklistSiteURL.toulouse.d"
	else
		domains_target_dir="${CURRENT_SQUID_DIR}/ProxyWhitelistSite.toulouse.d"
		urls_target_dir="${CURRENT_SQUID_DIR}/ProxyWhitelistSiteURL.toulouse.d"
	fi

	local header_content=""
	if [[ -s "${HEADER_TEMPLATE}" ]]; then
		header_content=$(<"${HEADER_TEMPLATE}")
	else
		warning_log "Header template not found at ${HEADER_TEMPLATE}"
	fi

	# Process domains file
	local domains_file="${category_path}/domains"
	if [[ -s "${domains_file}" ]]; then
		local target_file="${domains_target_dir}/${category}.toulouse"
		local debug_target_file="${target_file/#${WORK_DIR}/\$WORK_DIR}"
		debug_log "Processing domains file for category '${category}' into '${debug_target_file}'"
		{
			echo "${header_content}"
			cat "${domains_file}"
		} > "${target_file}"
	else
		debug_log "No domains file found for category '${category}'"
	fi

	# Process urls file
	local urls_file="${category_path}/urls"
	if [[ -s "${urls_file}" ]]; then
		local target_file="${urls_target_dir}/${category}.toulouse"
		local debug_target_file="${target_file/#${WORK_DIR}/\$WORK_DIR}"
		debug_log "Processing urls file for category '${category}' into '${debug_target_file}'"
		{
			echo "${header_content}"
			cat "${urls_file}"
		} > "${target_file}"
	else
		debug_log "No urls file found for category '${category}'"
	fi
}

###############################################################################
# Function: generate_squid_acl_file
# Generates a Squid ACL configuration file by including all relevant files
# from the previously processed black/white lists.
# Globals:
#   SNIPPETS_PATH - Path to the snippets.d directory of Squid.
#   LISTS_DIR - Path to the directory containing filter lists.
# Arguments:
#   None
# Returns:
#   None
# Output:
#   Creates a new configuration file named
#   "26_squid_acls.toulouse_d-e-r-p.c-f.conf" in the $SNIPPETS_PATH directory.
###############################################################################
function generate_squid_acl_file() {
	local squid_snippet_conf="${SNIPPETS_PATH}/26_squid_acls.toulouse_d-e-r-p.c-f.conf"
	local current_date=$(date '+%A, %B %d, %Y, %I:%M %p %Z')

	# Define mapping of list types to ACL types
	declare -A acl_types=(
	    ["ProxyWhitelistSite"]="dstdomain"
	    ["ProxyBlacklistSite"]="dstdomain"
	    ["ProxyWhitelistSiteURL"]="url_regex"
	    ["ProxyBlacklistSiteURL"]="url_regex"
	)

	# Start creating the Squid configuration file
	{
		echo "# Debian Edu Router Plugin: Content Filter"
		echo "# DO NOT MODIFY THIS FILE, EXCEPT YOU KNOW WHAT YOU ARE DOING."
		echo "# Automatically generated on ${current_date}"
		echo ""
		echo "# ACLs - University of Toulouse blacklists/whitelists ACLs by"
		echo "#        $0"
		echo ""

		# Iterate over each list type and generate corresponding ACL entries
		for acl_name in "${!acl_types[@]}"; do
			local acl_type="${acl_types[$acl_name]}"
			local filepath="${LISTS_DIR}/${acl_name}.toulouse.d"

			echo "###"
			if [[ -d "${filepath}" ]]; then
				for file in "${filepath}"/*; do
					if [[ -s "${file}" ]]; then
						echo "acl ${acl_name} ${acl_type} \"${file}\""
					fi
				done
			else
				echo "# Directory not found: ${filepath}"
			fi
			echo "###"
			echo
		done

	} > "${squid_snippet_conf}"

	# Log the completion of the file generation
	notice_log "Squid ACL configuration file generated at: ${squid_snippet_conf}"
}

###############################################################################
# Function: ensure_symlink
# Ensures that a given final target (symlink) exists and points to the correct source.
# If not, it is removed and recreated.
# Globals:
#   None
# Arguments:
#   $1 - final target path
#   $2 - source path
# Returns:
#   None
###############################################################################
function ensure_symlink() {
	local target="$1"
	local source="$2"
	if [[ -L "${target}" ]]; then
		if [[ "$(readlink "${target}")" != "${source}" ]]; then
			rm -f "${target}"
			ln -s "${source}" "${target}"
		fi
	else
		if [[ -e "${target}" ]]; then
			rm -rf "${target}"
		fi
		ln -s "${source}" "${target}"
	fi
}

###############################################################################
# Function: print_usage
# Prints usage information for the script and exits.
# Globals:
#   None
# Arguments:
#   None
# Returns:
#   None
###############################################################################
function print_usage() {
	notice_log "Usage: $0 [[-e|--enable-filterlists]|[-r|--remove-filterlists]|[-p|--purge-filterlists]]"
	notice_log "Default behavior (without arguments) is to just download and update filterlists if already present."
	exit 0
}

###############################################################################
# Function: parse_arguments
# Parses command line arguments and sets global variables accordingly.
# Globals:
#   REMOVE_FILTERLISTS
# Arguments:
#   Command line arguments ($@)
# Returns:
#   None
###############################################################################
function parse_arguments() {
	# Parse command line arguments
	while [[ $# -gt 0 ]]; do
		case "$1" in
			-e|--enable-filterlists)
				ENABLE_FILTERLISTS="true"
				shift
				;;
			-p|--purge-filterlists)
				REMOVE_FILTERLISTS="true"
				PURGE_FILTERLISTS="true"
				shift
				;;
			-r|--remove-filterlists)
				REMOVE_FILTERLISTS="true"
				shift
				;;
			-h|--help)
				print_usage
				;;
			*)
				echo "$0 Unknown argument: $1"
				print_usage
				exit 1
				;;
		esac
	done
}


###############################################################################
# Function: migrate_data
# Handle data created by previous version of this script.
# Globals:
#   SELECTED_CATEGORIES_DIR
# Arguments:
#   None
# Returns:
#   None
###############################################################################
function migrate_data() {

	## pre-2.13.0~beta8:

	if [[ -d "/var/lib/debian-edu-router/d-e-r-p.c-f/blacklists-toulouse.working" ]] ||
	   [[ -h "/var/lib/debian-edu-router/filterlists.d/ProxyBlacklistSite.toulouse.d" ]] ||
	   [[ -h "/var/lib/debian-edu-router/filterlists.d/ProxyBlacklistSiteURL.toulouse.d" ]] ||
	   [[ -h "/var/lib/debian-edu-router/filterlists.d/ProxyWhitelistSite.toulouse.d" ]] ||
	   [[ -h "/var/lib/debian-edu-router/filterlists.d/ProxyWhitelistSiteURL.toulouse.d" ]]; then
		debug_log "Found filterlists etc. at previously used paths, removing them:"
		for old_path in "/var/lib/debian-edu-router/d-e-r-p.c-f/blacklists-toulouse.working" \
		                "/var/lib/debian-edu-router/filterlists.d/ProxyBlacklistSite.toulouse.d" \
		                "/var/lib/debian-edu-router/filterlists.d/ProxyBlacklistSiteURL.toulouse.d" \
		                "/var/lib/debian-edu-router/filterlists.d/ProxyWhitelistSite.toulouse.d" \
		                "/var/lib/debian-edu-router/filterlists.d/ProxyWhitelistSiteURL.toulouse.d"; do
			if [[ -d "${old_path}" || -h "${old_path}" ]]; then
				rm -Rf "${old_path}"
				debug_log "  Deleting ${old_path}"
			fi
		done
	fi

	if [[ -d "/var/lib/debian-edu-router/d-e-r-p.c-f/blacklists-toulouse.selected_categories" ]]; then
		debug_log "Found filterlists etc. at previously used paths, migrating them:"
		for old_path in "/var/lib/debian-edu-router/d-e-r-p.c-f/blacklists-toulouse.selected_categories"; do
			if [[ -d "${old_path}" ]]; then
				mv "${old_path}" "${SELECTED_CATEGORIES_DIR}"
				debug_log "  Moving ${old_path} -> ${SELECTED_CATEGORIES_DIR}"
			fi
		done
	fi
}


###############################################################################
# Main Script Execution
###############################################################################
parse_arguments "$@"

# If REMOVE_FILTERLISTS is set to "true", remove all related files and exit.
if [[ "${REMOVE_FILTERLISTS}" = "true" ]]; then
	remove_blacklist_files
	exit 0
fi

if [[ "${ENABLE_FILTERLISTS}" = "true" ]]; then
	migrate_data
	ensure_dir "${WORK_DIR}"
fi

if [[ ! -d "${WORK_DIR}" ]]; then
	notice_log "refresh-blacklists_squid: Squid-based filterlists are disabled. Skipping. (No WORK_DIR found)"
	exit 0
fi

ensure_dir "${LISTS_DIR}"
ensure_dir "${ORIG_DIR}"

debug_log "File paths in log messages are shortened using following variables, please copy into your terminal:"
debug_log "  - LISTS_DIR=\"${LISTS_DIR}\""
debug_log "  - WORK_DIR=\"${WORK_DIR}\""
debug_log "  - FILTERLISTS_PATH=\"${FILTERLISTS_PATH}\""
debug_log "  - SELECTED_CATEGORIES_DIR=\"$SELECTED_CATEGORIES_DIR\""

perform_rsync
if ! [[ -d "${SOURCE_DIR}" ]]; then
	warning_log "Source directory \$SOURCE_DIR does not exist. Forcing resync of blacklists files..."
	FORCE_DOWNLOAD="true" perform_rsync
fi

# Create or update selected categories files
handle_selected_categories_files

# Clear the current SQUID directory to start fresh.
rm -rf "${CURRENT_SQUID_DIR}"
ensure_dir "${CURRENT_SQUID_DIR}"

ensure_dir "${CURRENT_SQUID_DIR}/ProxyBlacklistSite.toulouse.d"
ensure_dir "${CURRENT_SQUID_DIR}/ProxyBlacklistSiteURL.toulouse.d"
ensure_dir "${CURRENT_SQUID_DIR}/ProxyWhitelistSite.toulouse.d"
ensure_dir "${CURRENT_SQUID_DIR}/ProxyWhitelistSiteURL.toulouse.d"

notice_log "Starting processing of categories..."
for category in "${SOURCE_DIR}"/*; do
	if [[ -d "${category}" ]]; then
		category_name="$(basename "${category}")"
		process_category "${category_name}"
	fi
done
notice_log "Category processing completed."

# Symlink stage: Create final symlinks instead of copying files.
notice_log "Creating final symlinks in ${LISTS_DIR}"

debug_log "Creating symlink ${LISTS_DIR}/ProxyBlacklistSite.toulouse.d to ${WORK_DIR}/blacklists.squid_${CURRENT_SQUID}/ProxyBlacklistSite.toulouse.d"
ensure_symlink "${LISTS_DIR}/ProxyBlacklistSite.toulouse.d" "${CURRENT_SQUID_DIR}/ProxyBlacklistSite.toulouse.d"

debug_log "Creating symlink ${LISTS_DIR}/ProxyBlacklistSiteURL.toulouse.d to ${WORK_DIR}/blacklists.squid_${CURRENT_SQUID}/ProxyBlacklistSiteURL.toulouse.d"
ensure_symlink "${LISTS_DIR}/ProxyBlacklistSiteURL.toulouse.d" "${CURRENT_SQUID_DIR}/ProxyBlacklistSiteURL.toulouse.d"

debug_log "Creating symlink ${LISTS_DIR}/ProxyWhitelistSite.toulouse.d to ${WORK_DIR}/blacklists.squid_${CURRENT_SQUID}/ProxyWhitelistSite.toulouse.d"
ensure_symlink "${LISTS_DIR}/ProxyWhitelistSite.toulouse.d" "${CURRENT_SQUID_DIR}/ProxyWhitelistSite.toulouse.d"

debug_log "Creating symlink ${LISTS_DIR}/ProxyWhitelistSiteURL.toulouse.d to ${WORK_DIR}/blacklists.squid_${CURRENT_SQUID}/ProxyWhitelistSiteURL.toulouse.d"
ensure_symlink "${LISTS_DIR}/ProxyWhitelistSiteURL.toulouse.d" "${CURRENT_SQUID_DIR}/ProxyWhitelistSiteURL.toulouse.d"

notice_log "Symlink creation completed successfully."

# Squid ACL stage: Create ACL file for Squid, which includes all enabled categories.
generate_squid_acl_file

# Update state for next round.
echo "${CURRENT_SQUID}" > "${STATE_FILE}"

# Remove WIP file.
rm -f "${FILTERLISTS_PATH}/work-in-progress"

# End of script, finish() will be run now.
