Merge branch 'development' into development-v6-merge-development (resolved conflicts)

Signed-off-by: Adam Warner <me@adamwarner.co.uk>
This commit is contained in:
Adam Warner 2024-02-09 19:22:53 +00:00
commit f193edd428
No known key found for this signature in database
6 changed files with 446 additions and 415 deletions

View file

@ -63,7 +63,7 @@ gravityOLDfile="${gravityDIR}/gravity_old.db"
# Generate new SQLite3 file from schema template
generate_gravity_database() {
if ! pihole-FTL sqlite3 -ni "${gravityDBfile}" < "${gravityDBschema}"; then
if ! pihole-FTL sqlite3 -ni "${gravityDBfile}" <"${gravityDBschema}"; then
echo -e " ${CROSS} Unable to create ${gravityDBfile}"
return 1
fi
@ -78,7 +78,7 @@ gravity_build_tree() {
echo -ne " ${INFO} ${str}..."
# The index is intentionally not UNIQUE as poor quality adlists may contain domains more than once
output=$( { pihole-FTL sqlite3 -ni "${gravityTEMPfile}" "CREATE INDEX idx_gravity ON gravity (domain, adlist_id);"; } 2>&1 )
output=$({ pihole-FTL sqlite3 -ni "${gravityTEMPfile}" "CREATE INDEX idx_gravity ON gravity (domain, adlist_id);"; } 2>&1)
status="$?"
if [[ "${status}" -ne 0 ]]; then
@ -117,7 +117,7 @@ gravity_swap_databases() {
# Update timestamp when the gravity table was last updated successfully
update_gravity_timestamp() {
output=$( { printf ".timeout 30000\\nINSERT OR REPLACE INTO info (property,value) values ('updated',cast(strftime('%%s', 'now') as int));" | pihole-FTL sqlite3 -ni "${gravityTEMPfile}"; } 2>&1 )
output=$({ printf ".timeout 30000\\nINSERT OR REPLACE INTO info (property,value) values ('updated',cast(strftime('%%s', 'now') as int));" | pihole-FTL sqlite3 -ni -ni "${gravityTEMPfile}"; } 2>&1)
status="$?"
if [[ "${status}" -ne 0 ]]; then
@ -171,19 +171,18 @@ database_table_from_file() {
# Loop over all domains in ${src} file
# Read file line by line
grep -v '^ *#' < "${src}" | while IFS= read -r domain
do
grep -v '^ *#' <"${src}" | while IFS= read -r domain; do
# Only add non-empty lines
if [[ -n "${domain}" ]]; then
if [[ "${table}" == "domain_audit" ]]; then
# domain_audit table format (no enable or modified fields)
echo "${rowid},\"${domain}\",${timestamp}" >> "${tmpFile}"
echo "${rowid},\"${domain}\",${timestamp}" >>"${tmpFile}"
elif [[ "${table}" == "adlist" ]]; then
# Adlist table format
echo "${rowid},\"${domain}\",1,${timestamp},${timestamp},\"Migrated from ${src}\",,0,0,0,0,0" >> "${tmpFile}"
echo "${rowid},\"${domain}\",1,${timestamp},${timestamp},\"Migrated from ${src}\",,0,0,0,0,0" >>"${tmpFile}"
else
# White-, black-, and regexlist table format
echo "${rowid},${list_type},\"${domain}\",1,${timestamp},${timestamp},\"Migrated from ${src}\"" >> "${tmpFile}"
echo "${rowid},${list_type},\"${domain}\",1,${timestamp},${timestamp},\"Migrated from ${src}\"" >>"${tmpFile}"
fi
rowid+=1
fi
@ -192,7 +191,7 @@ database_table_from_file() {
# Store domains in database table specified by ${table}
# Use printf as .mode and .import need to be on separate lines
# see https://unix.stackexchange.com/a/445615/83260
output=$( { printf ".timeout 30000\\n.mode csv\\n.import \"%s\" %s\\n" "${tmpFile}" "${table}" | pihole-FTL sqlite3 -ni "${gravityDBfile}"; } 2>&1 )
output=$({ printf ".timeout 30000\\n.mode csv\\n.import \"%s\" %s\\n" "${tmpFile}" "${table}" | pihole-FTL sqlite3 -ni "${gravityDBfile}"; } 2>&1)
status="$?"
if [[ "${status}" -ne 0 ]]; then
@ -202,17 +201,17 @@ database_table_from_file() {
# Move source file to backup directory, create directory if not existing
mkdir -p "${backup_path}"
mv "${src}" "${backup_file}" 2> /dev/null || \
mv "${src}" "${backup_file}" 2>/dev/null ||
echo -e " ${CROSS} Unable to backup ${src} to ${backup_path}"
# Delete tmpFile
rm "${tmpFile}" > /dev/null 2>&1 || \
rm "${tmpFile}" >/dev/null 2>&1 ||
echo -e " ${CROSS} Unable to remove ${tmpFile}"
}
# Check if a column with name ${2} exists in gravity table with name ${1}
gravity_column_exists() {
output=$( { printf ".timeout 30000\\nSELECT EXISTS(SELECT * FROM pragma_table_info('%s') WHERE name='%s');\\n" "${1}" "${2}" | pihole-FTL sqlite3 -ni "${gravityTEMPfile}"; } 2>&1 )
output=$({ printf ".timeout 30000\\nSELECT EXISTS(SELECT * FROM pragma_table_info('%s') WHERE name='%s');\\n" "${1}" "${2}" | pihole-FTL sqlite3 -ni "${gravityTEMPfile}"; } 2>&1)
if [[ "${output}" == "1" ]]; then
return 0 # Bash 0 is success
fi
@ -224,10 +223,10 @@ gravity_column_exists() {
database_adlist_number() {
# Only try to set number of domains when this field exists in the gravity database
if ! gravity_column_exists "adlist" "number"; then
return;
return
fi
output=$( { printf ".timeout 30000\\nUPDATE adlist SET number = %i, invalid_domains = %i WHERE id = %i;\\n" "${2}" "${3}" "${1}" | pihole-FTL sqlite3 -ni "${gravityTEMPfile}"; } 2>&1 )
output=$({ printf ".timeout 30000\\nUPDATE adlist SET number = %i, invalid_domains = %i WHERE id = %i;\\n" "${2}" "${3}" "${1}" | pihole-FTL sqlite3 -ni "${gravityTEMPfile}"; } 2>&1)
status="$?"
if [[ "${status}" -ne 0 ]]; then
@ -240,10 +239,10 @@ database_adlist_number() {
database_adlist_status() {
# Only try to set the status when this field exists in the gravity database
if ! gravity_column_exists "adlist" "status"; then
return;
return
fi
output=$( { printf ".timeout 30000\\nUPDATE adlist SET status = %i WHERE id = %i;\\n" "${2}" "${1}" | pihole-FTL sqlite3 -ni "${gravityTEMPfile}"; } 2>&1 )
output=$({ printf ".timeout 30000\\nUPDATE adlist SET status = %i WHERE id = %i;\\n" "${2}" "${1}" | pihole-FTL sqlite3 -ni "${gravityTEMPfile}"; } 2>&1)
status="$?"
if [[ "${status}" -ne 0 ]]; then
@ -305,7 +304,7 @@ gravity_CheckDNSResolutionAvailable() {
fi
# Determine if $lookupDomain is resolvable
if timeout 4 getent hosts "${lookupDomain}" &> /dev/null; then
if timeout 4 getent hosts "${lookupDomain}" &>/dev/null; then
# Print confirmation of resolvability if it had previously failed
if [[ -n "${secs:-}" ]]; then
echo -e "${OVER} ${TICK} DNS resolution is now available\\n"
@ -319,7 +318,7 @@ gravity_CheckDNSResolutionAvailable() {
# If the /etc/resolv.conf contains resolvers other than 127.0.0.1 then the local dnsmasq will not be queried and pi.hole is NXDOMAIN.
# This means that even though name resolution is working, the getent hosts check fails and the holddown timer keeps ticking and eventually fails
# So we check the output of the last command and if it failed, attempt to use dig +short as a fallback
if timeout 4 dig +short "${lookupDomain}" &> /dev/null; then
if timeout 4 dig +short "${lookupDomain}" &>/dev/null; then
if [[ -n "${secs:-}" ]]; then
echo -e "${OVER} ${TICK} DNS resolution is now available\\n"
fi
@ -330,7 +329,7 @@ gravity_CheckDNSResolutionAvailable() {
fi
# Determine error output message
if pgrep pihole-FTL &> /dev/null; then
if pgrep pihole-FTL &>/dev/null; then
echo -e " ${CROSS} DNS resolution is currently unavailable"
else
echo -e " ${CROSS} DNS service is not running"
@ -340,7 +339,7 @@ gravity_CheckDNSResolutionAvailable() {
# Ensure DNS server is given time to be resolvable
secs="120"
echo -ne " ${INFO} Time until retry: ${secs}"
until timeout 1 getent hosts "${lookupDomain}" &> /dev/null; do
until timeout 1 getent hosts "${lookupDomain}" &>/dev/null; do
[[ "${secs:-}" -eq 0 ]] && break
echo -ne "${OVER} ${INFO} Time until retry: ${secs}"
: $((secs--))
@ -361,19 +360,19 @@ gravity_DownloadBlocklists() {
# Retrieve source URLs from gravity database
# We source only enabled adlists, SQLite3 stores boolean values as 0 (false) or 1 (true)
mapfile -t sources <<< "$(pihole-FTL sqlite3 -ni "${gravityDBfile}" "SELECT address FROM vw_adlist;" 2> /dev/null)"
mapfile -t sourceIDs <<< "$(pihole-FTL sqlite3 -ni "${gravityDBfile}" "SELECT id FROM vw_adlist;" 2> /dev/null)"
mapfile -t sourceTypes <<< "$(pihole-FTL sqlite3 -ni "${gravityDBfile}" "SELECT type FROM vw_adlist;" 2> /dev/null)"
mapfile -t sources <<<"$(pihole-FTL sqlite3 -ni -ni "${gravityDBfile}" "SELECT address FROM vw_adlist;" 2>/dev/null)"
mapfile -t sourceIDs <<<"$(pihole-FTL sqlite3 -ni -ni "${gravityDBfile}" "SELECT id FROM vw_adlist;" 2>/dev/null)"
mapfile -t sourceTypes <<<"$(pihole-FTL sqlite3 -ni "${gravityDBfile}" "SELECT type FROM vw_adlist;" 2>/dev/null)"
# Parse source domains from $sources
mapfile -t sourceDomains <<< "$(
mapfile -t sourceDomains <<<"$(
# Logic: Split by folder/port
awk -F '[/:]' '{
# Remove URL protocol & optional username:password@
gsub(/(.*:\/\/|.*:.*@)/, "", $0)
if(length($1)>0){print $1}
else {print "local"}
}' <<< "$(printf '%s\n' "${sources[@]}")" 2> /dev/null
}' <<<"$(printf '%s\n' "${sources[@]}")" 2>/dev/null
)"
local str="Pulling blocklist source list into range"
@ -391,8 +390,8 @@ gravity_DownloadBlocklists() {
# Prepare new gravity database
str="Preparing new gravity database"
echo -ne " ${INFO} ${str}..."
rm "${gravityTEMPfile}" > /dev/null 2>&1
output=$( { pihole-FTL sqlite3 -ni "${gravityTEMPfile}" < "${gravityDBschema}"; } 2>&1 )
rm "${gravityTEMPfile}" >/dev/null 2>&1
output=$({ pihole-FTL sqlite3 -ni "${gravityTEMPfile}" <"${gravityDBschema}"; } 2>&1)
status="$?"
if [[ "${status}" -ne 0 ]]; then
@ -412,7 +411,7 @@ gravity_DownloadBlocklists() {
copyGravity="${copyGravity//"${gravityDBfile_default}"/"${gravityDBfile}"}"
fi
output=$( { pihole-FTL sqlite3 -ni "${gravityTEMPfile}" <<< "${copyGravity}"; } 2>&1 )
output=$({ pihole-FTL sqlite3 -ni "${gravityTEMPfile}" <<<"${copyGravity}"; } 2>&1)
status="$?"
if [[ "${status}" -ne 0 ]]; then
@ -457,7 +456,7 @@ gravity_DownloadBlocklists() {
# this will remove first @ that is after schema and before domain
# \1 is optional schema, \2 is userinfo
check_url="$( sed -re 's#([^:/]*://)?([^/]+)@#\1\2#' <<< "$url" )"
check_url="$(sed -re 's#([^:/]*://)?([^/]+)@#\1\2#' <<<"$url")"
if [[ "${check_url}" =~ ${regex} ]]; then
echo -e " ${CROSS} Invalid Target"
@ -477,7 +476,7 @@ compareLists() {
if [[ -s "${target}.sha1" ]]; then
if ! sha1sum --check --status --strict "${target}.sha1"; then
# The list changed upstream, we need to update the checksum
sha1sum "${target}" > "${target}.sha1"
sha1sum "${target}" >"${target}.sha1"
echo " ${INFO} List has been updated"
database_adlist_status "${adlistID}" "1"
else
@ -486,7 +485,7 @@ compareLists() {
fi
else
# No checksum available, create one for comparing on the next run
sha1sum "${target}" > "${target}.sha1"
sha1sum "${target}" >"${target}.sha1"
# We assume here it was changed upstream
database_adlist_status "${adlistID}" "1"
fi
@ -515,25 +514,29 @@ gravity_DownloadBlocklistFromUrl() {
echo -ne " ${INFO} ${str} Pending..."
blocked=false
case $(getFTLConfigValue dns.blocking.mode) in
"IP-NODATA-AAAA"|"IP")
# Get IP address of this domain
ip="$(dig "${domain}" +short)"
# Check if this IP matches any IP of the system
if [[ -n "${ip}" && $(grep -Ec "inet(|6) ${ip}" <<< "$(ip a)") -gt 0 ]]; then
blocked=true
fi;;
"NXDOMAIN")
if [[ $(dig "${domain}" | grep "NXDOMAIN" -c) -ge 1 ]]; then
blocked=true
fi;;
"NODATA")
if [[ $(dig "${domain}" | grep "NOERROR" -c) -ge 1 ]] && [[ -z $(dig +short "${domain}") ]]; then
blocked=true
fi;;
"NULL"|*)
if [[ $(dig "${domain}" +short | grep "0.0.0.0" -c) -ge 1 ]]; then
blocked=true
fi;;
"IP-NODATA-AAAA" | "IP")
# Get IP address of this domain
ip="$(dig "${domain}" +short)"
# Check if this IP matches any IP of the system
if [[ -n "${ip}" && $(grep -Ec "inet(|6) ${ip}" <<<"$(ip a)") -gt 0 ]]; then
blocked=true
fi
;;
"NXDOMAIN")
if [[ $(dig "${domain}" | grep "NXDOMAIN" -c) -ge 1 ]]; then
blocked=true
fi
;;
"NODATA")
if [[ $(dig "${domain}" | grep "NOERROR" -c) -ge 1 ]] && [[ -z $(dig +short "${domain}") ]]; then
blocked=true
fi
;;
"NULL" | *)
if [[ $(dig "${domain}" +short | grep "0.0.0.0" -c) -ge 1 ]]; then
blocked=true
fi
;;
esac
if [[ "${blocked}" == true ]]; then
@ -545,43 +548,53 @@ gravity_DownloadBlocklistFromUrl() {
fi
ip=$(dig "@${ip_addr}" -p "${port}" +short "${domain}" | tail -1)
if [[ $(echo "${url}" | awk -F '://' '{print $1}') = "https" ]]; then
port=443;
else port=80
port=443
else
port=80
fi
bad_list=$(pihole -q -adlist "${domain}" | head -n1 | awk -F 'Match found in ' '{print $2}')
echo -e "${OVER} ${CROSS} ${str} ${domain} is blocked by ${bad_list%:}. Using DNS on ${PIHOLE_DNS_1} to download ${url}";
echo -e "${OVER} ${CROSS} ${str} ${domain} is blocked by ${bad_list%:}. Using DNS on ${PIHOLE_DNS_1} to download ${url}"
echo -ne " ${INFO} ${str} Pending..."
cmd_ext="--resolve $domain:$port:$ip"
fi
# shellcheck disable=SC2086
httpCode=$(curl --connect-timeout ${curl_connect_timeout} -s -L ${compression} ${cmd_ext} ${heisenbergCompensator} -w "%{http_code}" "${url}" -o "${listCurlBuffer}" 2> /dev/null)
httpCode=$(curl --connect-timeout ${curl_connect_timeout} -s -L ${compression} ${cmd_ext} ${heisenbergCompensator} -w "%{http_code}" "${url}" -o "${listCurlBuffer}" 2>/dev/null)
case $url in
# Did we "download" a local file?
"file"*)
if [[ -s "${listCurlBuffer}" ]]; then
echo -e "${OVER} ${TICK} ${str} Retrieval successful"; success=true
else
echo -e "${OVER} ${CROSS} ${str} Not found / empty list"
fi;;
# Did we "download" a remote file?
*)
# Determine "Status:" output based on HTTP response
case "${httpCode}" in
"200") echo -e "${OVER} ${TICK} ${str} Retrieval successful"; success=true;;
"304") echo -e "${OVER} ${TICK} ${str} No changes detected"; success=true;;
"000") echo -e "${OVER} ${CROSS} ${str} Connection Refused";;
"403") echo -e "${OVER} ${CROSS} ${str} Forbidden";;
"404") echo -e "${OVER} ${CROSS} ${str} Not found";;
"408") echo -e "${OVER} ${CROSS} ${str} Time-out";;
"451") echo -e "${OVER} ${CROSS} ${str} Unavailable For Legal Reasons";;
"500") echo -e "${OVER} ${CROSS} ${str} Internal Server Error";;
"504") echo -e "${OVER} ${CROSS} ${str} Connection Timed Out (Gateway)";;
"521") echo -e "${OVER} ${CROSS} ${str} Web Server Is Down (Cloudflare)";;
"522") echo -e "${OVER} ${CROSS} ${str} Connection Timed Out (Cloudflare)";;
* ) echo -e "${OVER} ${CROSS} ${str} ${url} (${httpCode})";;
esac;;
# Did we "download" a local file?
"file"*)
if [[ -s "${listCurlBuffer}" ]]; then
echo -e "${OVER} ${TICK} ${str} Retrieval successful"
success=true
else
echo -e "${OVER} ${CROSS} ${str} Not found / empty list"
fi
;;
# Did we "download" a remote file?
*)
# Determine "Status:" output based on HTTP response
case "${httpCode}" in
"200")
echo -e "${OVER} ${TICK} ${str} Retrieval successful"
success=true
;;
"304")
echo -e "${OVER} ${TICK} ${str} No changes detected"
success=true
;;
"000") echo -e "${OVER} ${CROSS} ${str} Connection Refused" ;;
"403") echo -e "${OVER} ${CROSS} ${str} Forbidden" ;;
"404") echo -e "${OVER} ${CROSS} ${str} Not found" ;;
"408") echo -e "${OVER} ${CROSS} ${str} Time-out" ;;
"451") echo -e "${OVER} ${CROSS} ${str} Unavailable For Legal Reasons" ;;
"500") echo -e "${OVER} ${CROSS} ${str} Internal Server Error" ;;
"504") echo -e "${OVER} ${CROSS} ${str} Connection Timed Out (Gateway)" ;;
"521") echo -e "${OVER} ${CROSS} ${str} Web Server Is Down (Cloudflare)" ;;
"522") echo -e "${OVER} ${CROSS} ${str} Connection Timed Out (Cloudflare)" ;;
*) echo -e "${OVER} ${CROSS} ${str} ${url} (${httpCode})" ;;
esac
;;
esac
local done="false"
@ -635,7 +648,7 @@ gravity_ParseFileIntoDomains() {
# This helps with that and makes it easier to read
# It also helps with debugging so each stage of the script can be researched more in depth
# 1) Convert all characters to lowercase
tr '[:upper:]' '[:lower:]' < "${src}" > "${destination}"
tr '[:upper:]' '[:lower:]' <"${src}" >"${destination}"
# 2) Remove carriage returns
# 3) Remove lines starting with ! (ABP Comments)
@ -645,7 +658,7 @@ gravity_ParseFileIntoDomains() {
# 7) Remove leading tabs, spaces, etc. (Also removes leading IP addresses)
# 8) Remove empty lines
sed -i -r \
sed -i -r \
-e 's/\r$//' \
-e 's/\s*!.*//g' \
-e 's/\s*\[.*//g' \
@ -662,12 +675,12 @@ gravity_Table_Count() {
local table="${1}"
local str="${2}"
local num
num="$(pihole-FTL sqlite3 -ni "${gravityTEMPfile}" "SELECT COUNT(*) FROM ${table};")"
num="$(pihole-FTL sqlite3 -ni -ni "${gravityTEMPfile}" "SELECT COUNT(*) FROM ${table};")"
if [[ "${table}" == "gravity" ]]; then
local unique
unique="$(pihole-FTL sqlite3 -ni "${gravityTEMPfile}" "SELECT COUNT(*) FROM (SELECT DISTINCT domain FROM ${table});")"
unique="$(pihole-FTL sqlite3 -ni -ni "${gravityTEMPfile}" "SELECT COUNT(*) FROM (SELECT DISTINCT domain FROM ${table});")"
echo -e " ${INFO} Number of ${str}: ${num} (${COL_BOLD}${unique} unique domains${COL_NC})"
pihole-FTL sqlite3 -ni "${gravityTEMPfile}" "INSERT OR REPLACE INTO info (property,value) VALUES ('gravity_count',${unique});"
pihole-FTL sqlite3 -ni -ni "${gravityTEMPfile}" "INSERT OR REPLACE INTO info (property,value) VALUES ('gravity_count',${unique});"
else
echo -e " ${INFO} Number of ${str}: ${num}"
fi
@ -687,12 +700,12 @@ gravity_ShowCount() {
# Create "localhost" entries into hosts format
gravity_generateLocalList() {
# Empty $localList if it already exists, otherwise, create it
echo "### Do not modify this file, it will be overwritten by pihole -g" > "${localList}"
echo "### Do not modify this file, it will be overwritten by pihole -g" >"${localList}"
chmod 644 "${localList}"
# Add additional LAN hosts provided by OpenVPN (if available)
if [[ -f "${VPNList}" ]]; then
awk -F, '{printf $2"\t"$1".vpn\n"}' "${VPNList}" >> "${localList}"
awk -F, '{printf $2"\t"$1".vpn\n"}' "${VPNList}" >>"${localList}"
fi
}
@ -709,12 +722,12 @@ gravity_Cleanup() {
echo -ne " ${INFO} ${str}..."
# Delete tmp content generated by Gravity
rm ${piholeDir}/pihole.*.txt 2> /dev/null
rm ${piholeDir}/*.tmp 2> /dev/null
rm ${piholeDir}/pihole.*.txt 2>/dev/null
rm ${piholeDir}/*.tmp 2>/dev/null
# listCurlBuffer location
rm "${GRAVITY_TMPDIR}"/*.phgpb 2> /dev/null
rm "${GRAVITY_TMPDIR}"/*.phgpb 2>/dev/null
# invalid_domains location
rm "${GRAVITY_TMPDIR}"/*.ph-non-domains 2> /dev/null
rm "${GRAVITY_TMPDIR}"/*.ph-non-domains 2>/dev/null
# Ensure this function only runs when gravity_SetDownloadOptions() has completed
if [[ "${gravity_Blackbody:-}" == true ]]; then
@ -722,7 +735,7 @@ gravity_Cleanup() {
for file in "${piholeDir}"/*."${domainsExtension}"; do
# If list is not in active array, then remove it
if [[ ! "${activeDomains[*]}" == *"${file}"* ]]; then
rm -f "${file}" 2> /dev/null || \
rm -f "${file}" 2>/dev/null ||
echo -e " ${CROSS} Failed to remove ${file##*/}"
fi
done
@ -764,17 +777,17 @@ database_recovery() {
fi
else
echo -e "${OVER} ${CROSS} ${str} - errors found:"
while IFS= read -r line ; do echo " - $line"; done <<< "$result"
while IFS= read -r line; do echo " - $line"; done <<<"$result"
fi
else
echo -e "${OVER} ${CROSS} ${str} - errors found:"
while IFS= read -r line ; do echo " - $line"; done <<< "$result"
while IFS= read -r line; do echo " - $line"; done <<<"$result"
fi
str="Trying to recover existing gravity database"
echo -ne " ${INFO} ${str}..."
# We have to remove any possibly existing recovery database or this will fail
rm -f "${gravityDBfile}.recovered" > /dev/null 2>&1
rm -f "${gravityDBfile}.recovered" >/dev/null 2>&1
if result="$(pihole-FTL sqlite3 -ni "${gravityDBfile}" ".recover" | pihole-FTL sqlite3 -ni "${gravityDBfile}.recovered" 2>&1)"; then
echo -e "${OVER} ${TICK} ${str} - success"
mv "${gravityDBfile}" "${gravityDBfile}.old"
@ -783,7 +796,7 @@ database_recovery() {
echo -ne " ${INFO} The old ${gravityDBfile} has been moved to ${gravityDBfile}.old"
else
echo -e "${OVER} ${CROSS} ${str} - the following errors happened:"
while IFS= read -r line ; do echo " - $line"; done <<< "$result"
while IFS= read -r line; do echo " - $line"; done <<<"$result"
echo -e " ${CROSS} Recovery failed. Try \"pihole -r recreate\" instead."
exit 1
fi
@ -802,9 +815,10 @@ Options:
repairSelector() {
case "$1" in
"recover") recover_database=true;;
"recreate") recreate_database=true;;
*) echo "Usage: pihole -g -r {recover,recreate}
"recover") recover_database=true ;;
"recreate") recreate_database=true ;;
*)
echo "Usage: pihole -g -r {recover,recreate}
Attempt to repair gravity database
Available options:
@ -823,15 +837,16 @@ Available options:
and create a new file from scratch. If you still
have the migration backup created when migrating
to Pi-hole v5.0, Pi-hole will import these files."
exit 0;;
exit 0
;;
esac
}
for var in "$@"; do
case "${var}" in
"-f" | "--force" ) forceDelete=true;;
"-r" | "--repair" ) repairSelector "$3";;
"-h" | "--help" ) helpFunc;;
"-f" | "--force") forceDelete=true ;;
"-r" | "--repair") repairSelector "$3" ;;
"-h" | "--help") helpFunc ;;
esac
done
@ -847,9 +862,9 @@ if [[ "${recreate_database:-}" == true ]]; then
str="Recreating gravity database from migration backup"
echo -ne "${INFO} ${str}..."
rm "${gravityDBfile}"
pushd "${piholeDir}" > /dev/null || exit
pushd "${piholeDir}" >/dev/null || exit
cp migration_backup/* .
popd > /dev/null || exit
popd >/dev/null || exit
echo -e "${OVER} ${TICK} ${str}"
fi
@ -867,7 +882,7 @@ if [[ "${forceDelete:-}" == true ]]; then
str="Deleting existing list cache"
echo -ne "${INFO} ${str}..."
rm /etc/pihole/list.* 2> /dev/null || true
rm /etc/pihole/list.* 2>/dev/null || true
echo -e "${OVER} ${TICK} ${str}"
fi