2015-11-11 08:43:00 +00:00
#!/usr/bin/env bash
2017-07-24 11:24:34 +00:00
# shellcheck disable=SC1090
2015-11-23 10:52:12 +00:00
# Pi-hole: A black hole for Internet advertisements
2017-02-22 17:55:20 +00:00
# (c) 2017 Pi-hole, LLC (https://pi-hole.net)
# Network-wide ad blocking via your own hardware.
#
2017-07-24 11:24:34 +00:00
# Usage: "pihole -g"
2015-11-06 23:03:55 +00:00
# Compiles a list of ad-serving domains by downloading them from multiple sources
2015-12-06 13:55:50 +00:00
#
2017-02-22 17:55:20 +00:00
# This file is copyright under the latest version of the EUPL.
# Please see LICENSE file for your rights under this license.
2018-02-21 11:33:29 +00:00
export LC_ALL = C
2023-02-11 12:34:12 +00:00
PI_HOLE_SCRIPT_DIR = "/opt/pihole"
# Source utils.sh for GetFTLConfigValue
utilsfile = " ${ PI_HOLE_SCRIPT_DIR } /utils.sh "
# shellcheck disable=SC1090
. " ${ utilsfile } "
coltable = " ${ PI_HOLE_SCRIPT_DIR } /COL_TABLE "
# shellcheck disable=SC1090
. " ${ coltable } "
2019-07-07 08:46:20 +00:00
# shellcheck disable=SC1091
2023-02-11 12:34:12 +00:00
. "/etc/.pihole/advanced/Scripts/database_migration/gravity-db.sh"
2016-04-11 10:29:14 +00:00
2017-07-24 11:24:34 +00:00
basename = "pihole"
PIHOLE_COMMAND = " /usr/local/bin/ ${ basename } "
2017-06-21 11:49:05 +00:00
2017-07-24 11:24:34 +00:00
piholeDir = " /etc/ ${ basename } "
2019-04-24 17:55:05 +00:00
# Legacy (pre v5.0) list file locations
2017-07-24 11:24:34 +00:00
whitelistFile = " ${ piholeDir } /whitelist.txt "
blacklistFile = " ${ piholeDir } /blacklist.txt "
2018-06-17 12:26:57 +00:00
regexFile = " ${ piholeDir } /regex.list "
2019-04-24 17:55:05 +00:00
adListFile = " ${ piholeDir } /adlists.list "
2016-01-21 22:14:55 +00:00
2019-02-03 12:04:31 +00:00
piholeGitDir = "/etc/.pihole"
2023-02-11 12:34:12 +00:00
GRAVITYDB = $( getFTLConfigValue files.gravity)
2023-12-01 09:21:02 +00:00
GRAVITY_TMPDIR = $( getFTLConfigValue files.gravity_tmp)
2019-02-05 18:05:11 +00:00
gravityDBschema = " ${ piholeGitDir } /advanced/Templates/gravity.db.sql "
2020-01-24 17:39:13 +00:00
gravityDBcopy = " ${ piholeGitDir } /advanced/Templates/gravity_copy.sql "
2019-02-03 12:04:31 +00:00
2017-07-24 11:24:34 +00:00
domainsExtension = "domains"
2022-08-31 19:41:57 +00:00
curl_connect_timeout = 10
2017-07-24 11:24:34 +00:00
2023-12-01 09:21:02 +00:00
# Check gravity temp directory
2023-03-19 04:32:46 +00:00
if [ ! -d " ${ GRAVITY_TMPDIR } " ] || [ ! -w " ${ GRAVITY_TMPDIR } " ] ; then
echo -e " ${ COL_LIGHT_RED } Gravity temporary directory does not exist or is not a writeable directory, falling back to /tmp. ${ COL_NC } "
GRAVITY_TMPDIR = "/tmp"
fi
2021-03-18 07:57:03 +00:00
# Set this only after sourcing pihole-FTL.conf as the gravity database path may
# have changed
gravityDBfile = " ${ GRAVITYDB } "
2023-11-22 20:08:06 +00:00
gravityDBfile_default = "/etc/pihole/gravity.db"
2021-03-18 07:57:03 +00:00
gravityTEMPfile = " ${ GRAVITYDB } _temp "
2021-06-14 18:27:10 +00:00
gravityDIR = " $( dirname -- " ${ gravityDBfile } " ) "
gravityOLDfile = " ${ gravityDIR } /gravity_old.db "
2021-03-18 07:57:03 +00:00
2024-11-25 11:33:26 +00:00
fix_owner_permissions( ) {
# Fix ownership and permissions for the specified file
# User and group are set to pihole:pihole
# Permissions are set to 664 (rw-rw-r--)
chown pihole:pihole " ${ 1 } "
chmod 664 " ${ 1 } "
# Ensure the containing directory is group writable
chmod g+w " $( dirname -- " ${ 1 } " ) "
}
2022-01-29 21:39:45 +00:00
# Generate new SQLite3 file from schema template
2019-02-03 12:04:31 +00:00
generate_gravity_database( ) {
2024-02-09 19:22:53 +00:00
if ! pihole-FTL sqlite3 -ni " ${ gravityDBfile } " <" ${ gravityDBschema } " ; then
2021-12-20 19:36:55 +00:00
echo -e " ${ CROSS } Unable to create ${ gravityDBfile } "
return 1
fi
2024-11-25 11:33:26 +00:00
fix_owner_permissions " ${ gravityDBfile } "
2019-02-03 12:04:31 +00:00
}
2023-10-22 06:14:11 +00:00
# Build gravity tree
gravity_build_tree( ) {
local str
2020-01-24 17:39:13 +00:00
str = "Building tree"
echo -ne " ${ INFO } ${ str } ... "
2021-01-26 07:04:37 +00:00
# The index is intentionally not UNIQUE as poor quality adlists may contain domains more than once
2024-02-09 19:22:53 +00:00
output = $( { pihole-FTL sqlite3 -ni " ${ gravityTEMPfile } " "CREATE INDEX idx_gravity ON gravity (domain, adlist_id);" ; } 2>& 1)
2019-09-01 12:42:07 +00:00
status = " $? "
if [ [ " ${ status } " -ne 0 ] ] ; then
2020-01-24 17:39:13 +00:00
echo -e " \\n ${ CROSS } Unable to build gravity tree in ${ gravityTEMPfile } \\n ${ output } "
2019-09-04 21:14:29 +00:00
return 1
2019-09-01 12:42:07 +00:00
fi
2020-01-24 17:39:13 +00:00
echo -e " ${ OVER } ${ TICK } ${ str } "
2023-10-22 06:14:11 +00:00
}
2019-09-01 12:42:07 +00:00
2023-10-22 06:14:11 +00:00
# Copy data from old to new database file and swap them
gravity_swap_databases( ) {
2020-01-24 17:39:13 +00:00
str = "Swapping databases"
echo -ne " ${ INFO } ${ str } ... "
2019-02-03 12:21:26 +00:00
2021-06-14 18:27:10 +00:00
# Swap databases and remove or conditionally rename old database
# Number of available blocks on disk
availableBlocks = $( stat -f --format "%a" " ${ gravityDIR } " )
# Number of blocks, used by gravity.db
2023-11-22 20:06:09 +00:00
gravityBlocks = $( stat --format "%b" " ${ gravityDBfile } " )
2021-06-14 18:27:10 +00:00
# Only keep the old database if available disk space is at least twice the size of the existing gravity.db.
# Better be safe than sorry...
2022-01-30 09:36:20 +00:00
oldAvail = false
2021-09-12 16:24:15 +00:00
if [ " ${ availableBlocks } " -gt " $(( gravityBlocks * 2 )) " ] && [ -f " ${ gravityDBfile } " ] ; then
2022-01-30 09:36:20 +00:00
oldAvail = true
2021-06-14 18:27:10 +00:00
mv " ${ gravityDBfile } " " ${ gravityOLDfile } "
else
rm " ${ gravityDBfile } "
fi
2020-01-24 17:39:13 +00:00
mv " ${ gravityTEMPfile } " " ${ gravityDBfile } "
2022-01-30 09:36:20 +00:00
echo -e " ${ OVER } ${ TICK } ${ str } "
2022-01-30 09:42:13 +00:00
if $oldAvail ; then
2023-05-15 17:25:56 +00:00
echo -e " ${ TICK } The old database remains available "
2022-01-30 09:36:20 +00:00
fi
2020-01-24 17:39:13 +00:00
}
# Update timestamp when the gravity table was last updated successfully
update_gravity_timestamp( ) {
2024-02-11 20:07:15 +00:00
output = $( { printf ".timeout 30000\\nINSERT OR REPLACE INTO info (property,value) values ('updated',cast(strftime('%%s', 'now') as int));" | pihole-FTL sqlite3 -ni " ${ gravityTEMPfile } " ; } 2>& 1)
2020-01-24 17:39:13 +00:00
status = " $? "
if [ [ " ${ status } " -ne 0 ] ] ; then
2023-10-22 06:14:11 +00:00
echo -e " \\n ${ CROSS } Unable to update gravity timestamp in database ${ gravityTEMPfile } \\n ${ output } "
2019-09-04 21:14:29 +00:00
return 1
fi
return 0
}
# Import domains from file and store them in the specified database table
database_table_from_file( ) {
# Define locals
2022-08-01 17:38:15 +00:00
local table src backup_path backup_file tmpFile list_type
2019-09-04 21:14:29 +00:00
table = " ${ 1 } "
2022-08-01 17:38:15 +00:00
src = " ${ 2 } "
2019-09-04 21:14:29 +00:00
backup_path = " ${ piholeDir } /migration_backup "
backup_file = " ${ backup_path } / $( basename " ${ 2 } " ) "
2023-05-10 04:52:51 +00:00
# Create a temporary file. We don't use '--suffix' here because not all
# implementations of mktemp support it, e.g. on Alpine
tmpFile = " $( mktemp -p " ${ GRAVITY_TMPDIR } " ) "
mv " ${ tmpFile } " " ${ tmpFile %.* } .gravity "
2023-10-23 19:36:18 +00:00
tmpFile = " ${ tmpFile %.* } .gravity "
2020-01-24 17:39:13 +00:00
2019-02-22 21:46:19 +00:00
local timestamp
timestamp = " $( date --utc +'%s' ) "
2020-01-24 17:39:13 +00:00
2019-09-04 21:14:29 +00:00
local rowid
declare -i rowid
rowid = 1
2019-12-09 22:30:41 +00:00
2020-01-24 17:39:13 +00:00
# Special handling for domains to be imported into the common domainlist table
if [ [ " ${ table } " = = "whitelist" ] ] ; then
2022-08-01 17:38:15 +00:00
list_type = "0"
2020-01-24 17:39:13 +00:00
table = "domainlist"
elif [ [ " ${ table } " = = "blacklist" ] ] ; then
2022-08-01 17:38:15 +00:00
list_type = "1"
2020-01-24 17:39:13 +00:00
table = "domainlist"
elif [ [ " ${ table } " = = "regex" ] ] ; then
2022-08-01 17:38:15 +00:00
list_type = "3"
2020-01-24 17:39:13 +00:00
table = "domainlist"
2019-12-09 22:30:41 +00:00
fi
2019-09-04 21:14:29 +00:00
2020-01-24 17:39:13 +00:00
# Get MAX(id) from domainlist when INSERTing into this table
if [ [ " ${ table } " = = "domainlist" ] ] ; then
2023-12-09 22:06:50 +00:00
rowid = " $( pihole-FTL sqlite3 -ni " ${ gravityDBfile } " "SELECT MAX(id) FROM domainlist;" ) "
2020-01-24 17:39:13 +00:00
if [ [ -z " $rowid " ] ] ; then
rowid = 0
fi
rowid += 1
fi
2022-08-01 17:38:15 +00:00
# Loop over all domains in ${src} file
2020-01-24 17:39:13 +00:00
# Read file line by line
2024-02-09 19:22:53 +00:00
grep -v '^ *#' <" ${ src } " | while IFS = read -r domain; do
2020-01-24 17:39:13 +00:00
# Only add non-empty lines
if [ [ -n " ${ domain } " ] ] ; then
2024-11-16 07:19:09 +00:00
if [ [ " ${ table } " = = "adlist" ] ] ; then
2020-01-24 17:39:13 +00:00
# Adlist table format
2024-02-09 19:22:53 +00:00
echo " ${ rowid } ,\" ${ domain } \",1, ${ timestamp } , ${ timestamp } ,\"Migrated from ${ src } \",,0,0,0,0,0 " >>" ${ tmpFile } "
2020-01-24 17:39:13 +00:00
else
# White-, black-, and regexlist table format
2024-02-09 19:22:53 +00:00
echo " ${ rowid } , ${ list_type } ,\" ${ domain } \",1, ${ timestamp } , ${ timestamp } ,\"Migrated from ${ src } \" " >>" ${ tmpFile } "
2020-01-24 17:39:13 +00:00
fi
rowid += 1
fi
done
2019-12-12 10:13:51 +00:00
2019-04-24 17:55:05 +00:00
# Store domains in database table specified by ${table}
2019-02-06 17:57:48 +00:00
# Use printf as .mode and .import need to be on separate lines
# see https://unix.stackexchange.com/a/445615/83260
2024-02-09 19:22:53 +00:00
output = $( { printf ".timeout 30000\\n.mode csv\\n.import \"%s\" %s\\n" " ${ tmpFile } " " ${ table } " | pihole-FTL sqlite3 -ni " ${ gravityDBfile } " ; } 2>& 1)
2019-02-03 12:21:26 +00:00
status = " $? "
if [ [ " ${ status } " -ne 0 ] ] ; then
2022-08-01 17:38:15 +00:00
echo -e " \\n ${ CROSS } Unable to fill table ${ table } ${ list_type } in database ${ gravityDBfile } \\n ${ output } "
2019-02-03 12:21:26 +00:00
gravity_Cleanup "error"
fi
2019-04-28 20:15:47 +00:00
# Move source file to backup directory, create directory if not existing
mkdir -p " ${ backup_path } "
2024-02-09 19:22:53 +00:00
mv " ${ src } " " ${ backup_file } " 2>/dev/null ||
2022-08-01 17:38:15 +00:00
echo -e " ${ CROSS } Unable to backup ${ src } to ${ backup_path } "
2020-01-24 17:39:13 +00:00
# Delete tmpFile
2024-02-09 19:22:53 +00:00
rm " ${ tmpFile } " >/dev/null 2>& 1 ||
2020-01-24 17:39:13 +00:00
echo -e " ${ CROSS } Unable to remove ${ tmpFile } "
2019-02-03 12:21:26 +00:00
}
2020-12-27 18:14:52 +00:00
# Check if a column with name ${2} exists in gravity table with name ${1}
gravity_column_exists( ) {
2024-02-09 19:22:53 +00:00
output = $( { printf ".timeout 30000\\nSELECT EXISTS(SELECT * FROM pragma_table_info('%s') WHERE name='%s');\\n" " ${ 1 } " " ${ 2 } " | pihole-FTL sqlite3 -ni " ${ gravityTEMPfile } " ; } 2>& 1)
2020-12-27 18:14:52 +00:00
if [ [ " ${ output } " = = "1" ] ] ; then
return 0 # Bash 0 is success
fi
return 1 # Bash non-0 is failure
}
# Update number of domain on this list. We store this in the "old" database as all values in the new database will later be overwritten
database_adlist_number( ) {
# Only try to set number of domains when this field exists in the gravity database
if ! gravity_column_exists "adlist" "number" ; then
2024-02-09 19:22:53 +00:00
return
2020-12-27 18:14:52 +00:00
fi
2024-02-09 19:22:53 +00:00
output = $( { printf ".timeout 30000\\nUPDATE adlist SET number = %i, invalid_domains = %i WHERE id = %i;\\n" " ${ 2 } " " ${ 3 } " " ${ 1 } " | pihole-FTL sqlite3 -ni " ${ gravityTEMPfile } " ; } 2>& 1)
2020-12-27 18:14:52 +00:00
status = " $? "
if [ [ " ${ status } " -ne 0 ] ] ; then
2023-05-30 15:01:58 +00:00
echo -e " \\n ${ CROSS } Unable to update number of domains in adlist with ID ${ 1 } in database ${ gravityTEMPfile } \\n ${ output } "
2020-12-27 18:14:52 +00:00
gravity_Cleanup "error"
fi
}
# Update status of this list. We store this in the "old" database as all values in the new database will later be overwritten
database_adlist_status( ) {
# Only try to set the status when this field exists in the gravity database
if ! gravity_column_exists "adlist" "status" ; then
2024-02-09 19:22:53 +00:00
return
2020-12-27 18:14:52 +00:00
fi
2024-02-09 19:22:53 +00:00
output = $( { printf ".timeout 30000\\nUPDATE adlist SET status = %i WHERE id = %i;\\n" " ${ 2 } " " ${ 1 } " | pihole-FTL sqlite3 -ni " ${ gravityTEMPfile } " ; } 2>& 1)
2020-12-27 18:14:52 +00:00
status = " $? "
if [ [ " ${ status } " -ne 0 ] ] ; then
2023-05-30 15:01:58 +00:00
echo -e " \\n ${ CROSS } Unable to update status of adlist with ID ${ 1 } in database ${ gravityTEMPfile } \\n ${ output } "
2020-12-27 18:14:52 +00:00
gravity_Cleanup "error"
fi
}
2019-04-25 08:46:18 +00:00
# Migrate pre-v5.0 list files to database-based Pi-hole versions
2019-04-24 17:55:05 +00:00
migrate_to_database( ) {
2019-05-01 19:12:22 +00:00
# Create database file only if not present
2019-07-05 12:03:57 +00:00
if [ ! -e " ${ gravityDBfile } " ] ; then
2019-07-07 08:33:08 +00:00
# Create new database file - note that this will be created in version 1
2019-07-05 12:03:57 +00:00
echo -e " ${ INFO } Creating new gravity database "
2021-12-20 19:36:55 +00:00
if ! generate_gravity_database; then
echo -e " ${ CROSS } Error creating new gravity database. Please contact support. "
return 1
fi
2020-01-24 17:39:13 +00:00
# Check if gravity database needs to be updated
upgrade_gravityDB " ${ gravityDBfile } " " ${ piholeDir } "
2019-07-07 08:33:08 +00:00
# Migrate list files to new database
if [ -e " ${ adListFile } " ] ; then
# Store adlist domains in database
echo -e " ${ INFO } Migrating content of ${ adListFile } into new database "
database_table_from_file "adlist" " ${ adListFile } "
fi
if [ -e " ${ blacklistFile } " ] ; then
# Store blacklisted domains in database
echo -e " ${ INFO } Migrating content of ${ blacklistFile } into new database "
database_table_from_file "blacklist" " ${ blacklistFile } "
fi
if [ -e " ${ whitelistFile } " ] ; then
# Store whitelisted domains in database
echo -e " ${ INFO } Migrating content of ${ whitelistFile } into new database "
database_table_from_file "whitelist" " ${ whitelistFile } "
fi
if [ -e " ${ regexFile } " ] ; then
# Store regex domains in database
2019-07-08 19:39:30 +00:00
# Important note: We need to add the domains to the "regex" table
# as it will only later be renamed to "regex_blacklist"!
2019-07-07 08:33:08 +00:00
echo -e " ${ INFO } Migrating content of ${ regexFile } into new database "
2019-07-08 19:39:30 +00:00
database_table_from_file "regex" " ${ regexFile } "
2019-07-07 08:33:08 +00:00
fi
2019-05-01 19:12:22 +00:00
fi
2019-07-05 12:03:57 +00:00
# Check if gravity database needs to be updated
2019-07-09 09:41:44 +00:00
upgrade_gravityDB " ${ gravityDBfile } " " ${ piholeDir } "
2018-08-13 11:43:14 +00:00
}
2017-09-15 12:39:17 +00:00
# Determine if DNS resolution is available before proceeding
2018-01-14 20:38:39 +00:00
gravity_CheckDNSResolutionAvailable( ) {
2023-11-06 20:40:32 +00:00
local lookupDomain = "raw.githubusercontent.com"
2017-09-14 06:39:30 +00:00
2017-09-15 12:39:17 +00:00
# Determine if $lookupDomain is resolvable
2024-02-09 19:22:53 +00:00
if timeout 4 getent hosts " ${ lookupDomain } " & >/dev/null; then
2024-09-03 21:20:00 +00:00
echo -e " ${ OVER } ${ TICK } DNS resolution is available\\n "
2017-09-15 12:39:17 +00:00
return 0
2024-09-03 21:20:00 +00:00
else
echo -e " ${ CROSS } DNS resolution is currently unavailable "
2017-12-08 04:38:47 +00:00
fi
2024-09-09 16:37:17 +00:00
str = "Waiting until DNS resolution is available..."
echo -ne " ${ INFO } ${ str } "
2024-09-03 21:20:00 +00:00
until getent hosts github.com & > /dev/null; do
# Append one dot for each second waiting
str = " ${ str } . "
echo -ne " ${ OVER } ${ INFO } ${ str } "
2017-09-21 17:56:53 +00:00
sleep 1
2017-09-15 12:39:17 +00:00
done
2024-09-03 21:20:00 +00:00
# If we reach this point, DNS resolution is available
echo -e " ${ OVER } ${ TICK } DNS resolution is available "
2017-07-24 11:24:34 +00:00
}
2017-03-31 18:16:09 +00:00
2019-06-30 21:21:10 +00:00
# Retrieve blocklist URLs and parse domains from adlist.list
2019-09-04 21:14:29 +00:00
gravity_DownloadBlocklists( ) {
2017-09-15 12:39:17 +00:00
echo -e " ${ INFO } ${ COL_BOLD } Neutrino emissions detected ${ COL_NC } ... "
2017-03-31 18:16:09 +00:00
2021-03-18 07:57:03 +00:00
if [ [ " ${ gravityDBfile } " != " ${ gravityDBfile_default } " ] ] ; then
echo -e " ${ INFO } Storing gravity database in ${ COL_BOLD } ${ gravityDBfile } ${ COL_NC } "
fi
2019-04-25 08:46:18 +00:00
# Retrieve source URLs from gravity database
2022-01-29 21:39:45 +00:00
# We source only enabled adlists, SQLite3 stores boolean values as 0 (false) or 1 (true)
2024-02-11 20:07:15 +00:00
mapfile -t sources <<< " $( pihole-FTL sqlite3 -ni " ${ gravityDBfile } " "SELECT address FROM vw_adlist;" 2>/dev/null) "
mapfile -t sourceIDs <<< " $( pihole-FTL sqlite3 -ni " ${ gravityDBfile } " "SELECT id FROM vw_adlist;" 2>/dev/null) "
2024-02-09 19:22:53 +00:00
mapfile -t sourceTypes <<< " $( pihole-FTL sqlite3 -ni " ${ gravityDBfile } " "SELECT type FROM vw_adlist;" 2>/dev/null) "
2017-07-24 11:24:34 +00:00
# Parse source domains from $sources
2024-02-09 19:22:53 +00:00
mapfile -t sourceDomains <<< " $(
2017-09-15 12:39:17 +00:00
# Logic: Split by folder/port
2017-07-24 11:24:34 +00:00
awk -F '[/:]' ' {
2017-09-15 12:39:17 +00:00
# Remove URL protocol & optional username:password@
2017-11-21 19:55:47 +00:00
gsub( /( .*:\/ \/ | .*:.*@) /, "" , $0 )
2017-11-21 17:30:40 +00:00
if ( length( $1 ) >0) { print $1 }
else { print "local" }
2024-02-09 19:22:53 +00:00
} ' <<<"$(printf ' %s\n ' " ${ sources [@] } " ) " 2>/dev/null
2017-09-20 12:25:33 +00:00
) "
2017-07-24 11:24:34 +00:00
2018-04-30 20:45:03 +00:00
local str = "Pulling blocklist source list into range"
2022-01-08 13:15:26 +00:00
echo -e " ${ OVER } ${ TICK } ${ str } "
2018-04-30 20:45:03 +00:00
2022-01-08 13:15:26 +00:00
if [ [ -z " ${ sources [*] } " ] ] || [ [ -z " ${ sourceDomains [*] } " ] ] ; then
2018-04-30 20:45:03 +00:00
echo -e " ${ INFO } No source list found, or it is empty "
echo ""
2022-01-08 13:15:26 +00:00
unset sources
2017-07-24 11:24:34 +00:00
fi
2023-09-11 09:43:56 +00:00
local url domain str target compression adlist_type
2017-07-27 02:34:35 +00:00
echo ""
2020-01-24 17:39:13 +00:00
# Prepare new gravity database
str = "Preparing new gravity database"
2019-09-04 21:14:29 +00:00
echo -ne " ${ INFO } ${ str } ... "
2024-02-09 19:22:53 +00:00
rm " ${ gravityTEMPfile } " >/dev/null 2>& 1
output = $( { pihole-FTL sqlite3 -ni " ${ gravityTEMPfile } " <" ${ gravityDBschema } " ; } 2>& 1)
2020-01-24 17:39:13 +00:00
status = " $? "
if [ [ " ${ status } " -ne 0 ] ] ; then
echo -e " \\n ${ CROSS } Unable to create new database ${ gravityTEMPfile } \\n ${ output } "
2023-07-06 20:52:28 +00:00
gravity_Cleanup "error"
2020-01-24 17:39:13 +00:00
else
2019-09-04 21:14:29 +00:00
echo -e " ${ OVER } ${ TICK } ${ str } "
fi
2023-05-15 17:25:56 +00:00
str = "Creating new gravity databases"
echo -ne " ${ INFO } ${ str } ... "
# Gravity copying SQL script
copyGravity = " $( cat " ${ gravityDBcopy } " ) "
if [ [ " ${ gravityDBfile } " != " ${ gravityDBfile_default } " ] ] ; then
# Replace default gravity script location by custom location
copyGravity = " ${ copyGravity // " ${ gravityDBfile_default } " / " ${ gravityDBfile } " } "
fi
2024-02-09 19:22:53 +00:00
output = $( { pihole-FTL sqlite3 -ni " ${ gravityTEMPfile } " <<< " ${ copyGravity } " ; } 2>& 1)
2023-05-15 17:25:56 +00:00
status = " $? "
if [ [ " ${ status } " -ne 0 ] ] ; then
echo -e " \\n ${ CROSS } Unable to copy data from ${ gravityDBfile } to ${ gravityTEMPfile } \\n ${ output } "
return 1
fi
echo -e " ${ OVER } ${ TICK } ${ str } "
2020-01-24 17:39:13 +00:00
2021-01-19 18:33:38 +00:00
# Use compression to reduce the amount of data that is transferred
2020-08-03 20:46:14 +00:00
# between the Pi-hole and the ad list provider. Use this feature
# only if it is supported by the locally available version of curl
if curl -V | grep -q "Features:.* libz" ; then
compression = "--compressed"
echo -e " ${ INFO } Using libz compression\n "
else
2021-11-25 06:41:40 +00:00
compression = ""
echo -e " ${ INFO } Libz compression not available\n "
fi
2017-09-15 12:39:17 +00:00
# Loop through $sources and download each one
2017-07-27 02:34:35 +00:00
for ( ( i = 0; i < " ${# sources [@] } " ; i++) ) ; do
url = " ${ sources [ $i ] } "
domain = " ${ sourceDomains [ $i ] } "
2020-07-05 12:32:33 +00:00
id = " ${ sourceIDs [ $i ] } "
2023-07-05 20:24:11 +00:00
if [ [ " ${ sourceTypes [ $i ] } " -eq "0" ] ] ; then
# Gravity list
str = "blocklist"
adlist_type = "gravity"
else
# AntiGravity list
str = "allowlist"
adlist_type = "antigravity"
fi
2017-07-27 02:34:35 +00:00
# Save the file as list.#.domain
2020-07-05 12:32:33 +00:00
saveLocation = " ${ piholeDir } /list. ${ id } . ${ domain } . ${ domainsExtension } "
2017-07-27 02:34:35 +00:00
activeDomains[ $i ] = " ${ saveLocation } "
2024-11-25 11:33:26 +00:00
# Check if we can write to the save location file
if ! touch " ${ saveLocation } " 2>/dev/null; then
echo -e " ${ CROSS } Unable to write to ${ saveLocation } "
echo " Please run pihole -g as root"
echo ""
continue
fi
# Chown the file to the pihole user
# This is necessary for the FTL to be able to update the file
# when gravity is run from the web interface
fix_owner_permissions " ${ saveLocation } "
2019-12-12 10:29:44 +00:00
echo -e " ${ INFO } Target: ${ url } "
2020-12-06 23:23:04 +00:00
local regex check_url
2020-03-31 20:48:10 +00:00
# Check for characters NOT allowed in URLs
2020-05-26 13:53:01 +00:00
regex = "[^a-zA-Z0-9:/?&%=~._()-;]"
2020-12-06 23:23:04 +00:00
# this will remove first @ that is after schema and before domain
# \1 is optional schema, \2 is userinfo
2024-02-09 19:22:53 +00:00
check_url = " $( sed -re 's#([^:/]*://)?([^/]+)@#\1\2#' <<< " $url " ) "
2020-12-06 23:23:04 +00:00
if [ [ " ${ check_url } " = ~ ${ regex } ] ] ; then
2021-11-25 06:41:40 +00:00
echo -e " ${ CROSS } Invalid Target "
2020-03-31 20:48:10 +00:00
else
2024-09-17 18:23:09 +00:00
timeit gravity_DownloadBlocklistFromUrl " ${ url } " " ${ sourceIDs [ $i ] } " " ${ saveLocation } " " ${ target } " " ${ compression } " " ${ adlist_type } " " ${ domain } "
2020-03-31 20:48:10 +00:00
fi
2019-04-25 09:18:54 +00:00
echo ""
2017-07-27 02:34:35 +00:00
done
2020-01-24 17:39:13 +00:00
2017-09-23 00:32:56 +00:00
gravity_Blackbody = true
2017-07-27 02:34:35 +00:00
}
2020-12-29 08:54:25 +00:00
compareLists( ) {
2020-12-29 19:35:48 +00:00
local adlistID = " ${ 1 } " target = " ${ 2 } "
2020-12-29 08:54:25 +00:00
# Verify checksum when an older checksum exists
if [ [ -s " ${ target } .sha1 " ] ] ; then
if ! sha1sum --check --status --strict " ${ target } .sha1 " ; then
# The list changed upstream, we need to update the checksum
2024-02-09 19:22:53 +00:00
sha1sum " ${ target } " >" ${ target } .sha1 "
2020-12-29 08:54:25 +00:00
echo " ${ INFO } List has been updated "
database_adlist_status " ${ adlistID } " "1"
else
echo " ${ INFO } List stayed unchanged "
database_adlist_status " ${ adlistID } " "2"
fi
else
# No checksum available, create one for comparing on the next run
2024-02-09 19:22:53 +00:00
sha1sum " ${ target } " >" ${ target } .sha1 "
2020-12-29 08:54:25 +00:00
# We assume here it was changed upstream
database_adlist_status " ${ adlistID } " "1"
fi
}
2020-02-21 18:56:48 +00:00
2017-09-14 10:23:49 +00:00
# Download specified URL and perform checks on HTTP status and file content
2018-01-14 20:38:39 +00:00
gravity_DownloadBlocklistFromUrl( ) {
2023-11-22 19:56:23 +00:00
local url = " ${ 1 } " adlistID = " ${ 2 } " saveLocation = " ${ 3 } " target = " ${ 4 } " compression = " ${ 5 } " gravity_type = " ${ 6 } " domain = " ${ 7 } "
2023-04-07 10:36:50 +00:00
local heisenbergCompensator = "" listCurlBuffer str httpCode success = "" ip cmd_ext
2024-03-28 14:41:45 +00:00
local file_path permissions ip_addr port blocked = false download = true
2017-07-27 02:34:35 +00:00
2017-09-15 12:39:17 +00:00
# Create temp file to store content on disk instead of RAM
2023-05-10 04:52:51 +00:00
# We don't use '--suffix' here because not all implementations of mktemp support it, e.g. on Alpine
listCurlBuffer = " $( mktemp -p " ${ GRAVITY_TMPDIR } " ) "
mv " ${ listCurlBuffer } " " ${ listCurlBuffer %.* } .phgpb "
2023-10-23 19:36:18 +00:00
listCurlBuffer = " ${ listCurlBuffer %.* } .phgpb "
2017-08-28 01:36:02 +00:00
2017-09-15 12:39:17 +00:00
# Determine if $saveLocation has read permission
2017-11-21 17:35:58 +00:00
if [ [ -r " ${ saveLocation } " && $url != "file" * ] ] ; then
2017-09-15 12:39:17 +00:00
# Have curl determine if a remote file has been modified since last retrieval
# Uses "Last-Modified" header, which certain web servers do not provide (e.g: raw github urls)
2017-11-21 17:35:58 +00:00
# Note: Don't do this for local files, always download them
2017-07-27 02:34:35 +00:00
heisenbergCompensator = " -z ${ saveLocation } "
fi
str = "Status:"
echo -ne " ${ INFO } ${ str } Pending... "
2018-08-11 23:15:42 +00:00
blocked = false
2023-02-11 12:34:12 +00:00
case $( getFTLConfigValue dns.blocking.mode) in
2024-02-09 19:22:53 +00:00
"IP-NODATA-AAAA" | "IP" )
# Get IP address of this domain
ip = " $( dig " ${ domain } " +short) "
# Check if this IP matches any IP of the system
if [ [ -n " ${ ip } " && $( grep -Ec " inet(|6) ${ ip } " <<< " $( ip a) " ) -gt 0 ] ] ; then
blocked = true
fi
; ;
"NXDOMAIN" )
if [ [ $( dig " ${ domain } " | grep "NXDOMAIN" -c) -ge 1 ] ] ; then
blocked = true
fi
; ;
"NODATA" )
if [ [ $( dig " ${ domain } " | grep "NOERROR" -c) -ge 1 ] ] && [ [ -z $( dig +short " ${ domain } " ) ] ] ; then
blocked = true
fi
; ;
"NULL" | *)
if [ [ $( dig " ${ domain } " +short | grep "0.0.0.0" -c) -ge 1 ] ] ; then
blocked = true
fi
; ;
2021-11-25 06:41:40 +00:00
esac
2023-11-22 20:04:46 +00:00
2023-11-22 20:10:22 +00:00
# Check if this domain is blocked by Pi-hole but only if the domain is not a
# local file or empty
if [ [ $url != "file" * ] ] && [ [ -n " ${ domain } " ] ] ; then
case $( getFTLConfigValue dns.blocking.mode) in
2024-05-08 20:25:26 +00:00
"IP-NODATA-AAAA" | "IP" )
2021-11-25 06:41:40 +00:00
# Get IP address of this domain
ip = " $( dig " ${ domain } " +short) "
# Check if this IP matches any IP of the system
2024-05-08 20:25:26 +00:00
if [ [ -n " ${ ip } " && $( grep -Ec " inet(|6) ${ ip } " <<< " $( ip a) " ) -gt 0 ] ] ; then
2021-11-25 06:41:40 +00:00
blocked = true
2024-05-08 20:25:26 +00:00
fi
; ;
2018-08-11 23:15:42 +00:00
"NXDOMAIN" )
2021-11-25 06:41:40 +00:00
if [ [ $( dig " ${ domain } " | grep "NXDOMAIN" -c) -ge 1 ] ] ; then
blocked = true
2024-05-08 20:25:26 +00:00
fi
; ;
2021-12-03 08:17:19 +00:00
"NODATA" )
2021-12-21 21:01:34 +00:00
if [ [ $( dig " ${ domain } " | grep "NOERROR" -c) -ge 1 ] ] && [ [ -z $( dig +short " ${ domain } " ) ] ] ; then
2024-05-08 20:25:26 +00:00
blocked = true
fi
; ;
"NULL" | *)
2021-11-25 06:41:40 +00:00
if [ [ $( dig " ${ domain } " +short | grep "0.0.0.0" -c) -ge 1 ] ] ; then
blocked = true
2024-05-08 20:25:26 +00:00
fi
; ;
2023-11-22 20:10:22 +00:00
esac
if [ [ " ${ blocked } " = = true ] ] ; then
2024-05-08 20:25:26 +00:00
# Get first defined upstream server
local upstream
upstream = " $( getFTLConfigValue dns.upstreams) "
# Isolate first upstream server from a string like
# [ 1.2.3.4#1234, 5.6.7.8#5678, ... ]
upstream = " ${ upstream %%,* } "
upstream = " ${ upstream ##*[ } "
upstream = " ${ upstream %%]* } "
# Trim leading and trailing spaces and tabs
upstream = " ${ upstream # " ${ upstream %%[![ : space : ]]* } " } "
upstream = " ${ upstream % " ${ upstream ##*[![ : space : ]] } " } "
# Get IP address and port of this upstream server
local ip_addr port
printf -v ip_addr "%s" " ${ upstream %#* } "
if [ [ ${ upstream } != *"#" * ] ] ; then
port = 53
else
printf -v port "%s" " ${ upstream #*# } "
fi
ip = $( dig " @ ${ ip_addr } " -p " ${ port } " +short " ${ domain } " | tail -1)
if [ [ $( echo " ${ url } " | awk -F '://' '{print $1}' ) = "https" ] ] ; then
port = 443
else
port = 80
fi
echo -e " ${ OVER } ${ CROSS } ${ str } ${ domain } is blocked by one of your lists. Using DNS server ${ upstream } instead "
echo -ne " ${ INFO } ${ str } Pending... "
cmd_ext = " --resolve $domain : $port : $ip "
2018-08-11 12:33:33 +00:00
fi
fi
2019-08-10 11:33:30 +00:00
2024-03-28 14:41:45 +00:00
# If we are going to "download" a local file, we first check if the target
# file has a+r permission. We explicitly check for all+read because we want
# to make sure that the file is readable by everyone and not just the user
# running the script.
if [ [ $url = = "file://" * ] ] ; then
# Get the file path
2024-03-04 18:38:13 +00:00
file_path = $( echo " $url " | cut -d'/' -f3-)
2024-03-27 21:10:12 +00:00
# Check if the file exists and is a regular file (i.e. not a socket, fifo, tty, block). Might still be a symlink.
if [ [ ! -f $file_path ] ] ; then
2024-03-28 14:41:45 +00:00
# Output that the file does not exist
echo -e " ${ OVER } ${ CROSS } ${ file_path } does not exist "
download = false
else
2024-03-27 21:10:12 +00:00
# Check if the file or a file referenced by the symlink has a+r permissions
permissions = $( stat -L -c "%a" " $file_path " )
2024-03-28 14:41:45 +00:00
if [ [ $permissions = = *4 || $permissions = = *5 || $permissions = = *6 || $permissions = = *7 ] ] ; then
# Output that we are using the local file
echo -e " ${ OVER } ${ INFO } Using local file ${ file_path } "
else
# Output that the file does not have the correct permissions
2024-03-04 18:38:13 +00:00
echo -e " ${ OVER } ${ CROSS } Cannot read file (file needs to have a+r permission) "
2024-03-28 14:41:45 +00:00
download = false
fi
fi
fi
2024-04-30 13:47:57 +00:00
# Check for allowed protocols
if [ [ $url != "http" * && $url != "https" * && $url != "file" * && $url != "ftp" * && $url != "ftps" * && $url != "sftp" * ] ] ; then
2024-10-24 05:05:13 +00:00
echo -e " ${ OVER } ${ CROSS } ${ str } Invalid protocol specified. Ignoring list. "
2024-10-24 17:03:02 +00:00
echo -e "Ensure your URL starts with a valid protocol like http:// , https:// or file:// ."
2024-04-30 13:47:57 +00:00
download = false
fi
2024-03-28 14:41:45 +00:00
if [ [ " ${ download } " = = true ] ] ; then
# shellcheck disable=SC2086
2024-05-08 20:25:26 +00:00
httpCode = $( curl --connect-timeout ${ curl_connect_timeout } -s -L ${ compression } ${ cmd_ext } ${ heisenbergCompensator } -w "%{http_code}" " ${ url } " -o " ${ listCurlBuffer } " 2>/dev/null)
2024-03-28 14:41:45 +00:00
fi
2017-07-27 02:34:35 +00:00
2017-11-21 17:30:40 +00:00
case $url in
2024-02-09 19:22:53 +00:00
# Did we "download" a local file?
"file" *)
if [ [ -s " ${ listCurlBuffer } " ] ] ; then
echo -e " ${ OVER } ${ TICK } ${ str } Retrieval successful "
success = true
else
2024-03-28 14:41:45 +00:00
echo -e " ${ OVER } ${ CROSS } ${ str } Retrieval failed / empty list "
2024-02-09 19:22:53 +00:00
fi
; ;
# Did we "download" a remote file?
*)
# Determine "Status:" output based on HTTP response
case " ${ httpCode } " in
"200" )
echo -e " ${ OVER } ${ TICK } ${ str } Retrieval successful "
success = true
; ;
"304" )
echo -e " ${ OVER } ${ TICK } ${ str } No changes detected "
success = true
; ;
"000" ) echo -e " ${ OVER } ${ CROSS } ${ str } Connection Refused " ; ;
"403" ) echo -e " ${ OVER } ${ CROSS } ${ str } Forbidden " ; ;
"404" ) echo -e " ${ OVER } ${ CROSS } ${ str } Not found " ; ;
"408" ) echo -e " ${ OVER } ${ CROSS } ${ str } Time-out " ; ;
"451" ) echo -e " ${ OVER } ${ CROSS } ${ str } Unavailable For Legal Reasons " ; ;
"500" ) echo -e " ${ OVER } ${ CROSS } ${ str } Internal Server Error " ; ;
"504" ) echo -e " ${ OVER } ${ CROSS } ${ str } Connection Timed Out (Gateway) " ; ;
"521" ) echo -e " ${ OVER } ${ CROSS } ${ str } Web Server Is Down (Cloudflare) " ; ;
"522" ) echo -e " ${ OVER } ${ CROSS } ${ str } Connection Timed Out (Cloudflare) " ; ;
*) echo -e " ${ OVER } ${ CROSS } ${ str } ${ url } ( ${ httpCode } ) " ; ;
esac
; ;
2017-07-27 02:34:35 +00:00
esac
2020-12-27 18:14:52 +00:00
local done = "false"
2017-07-27 02:34:35 +00:00
# Determine if the blocklist was downloaded and saved correctly
2017-09-15 12:39:17 +00:00
if [ [ " ${ success } " = = true ] ] ; then
2017-07-27 02:34:35 +00:00
if [ [ " ${ httpCode } " = = "304" ] ] ; then
2020-01-24 17:39:13 +00:00
# Add domains to database table file
2023-11-22 20:06:09 +00:00
pihole-FTL " ${ gravity_type } " parseList " ${ saveLocation } " " ${ gravityTEMPfile } " " ${ adlistID } "
2020-12-27 18:14:52 +00:00
database_adlist_status " ${ adlistID } " "2"
done = "true"
2023-03-24 23:15:49 +00:00
# Check if $listCurlBuffer is a non-zero length file
elif [ [ -s " ${ listCurlBuffer } " ] ] ; then
2017-07-27 02:34:35 +00:00
# Determine if blocklist is non-standard and parse as appropriate
2023-03-24 23:15:49 +00:00
gravity_ParseFileIntoDomains " ${ listCurlBuffer } " " ${ saveLocation } "
# Remove curl buffer file after its use
rm " ${ listCurlBuffer } "
2020-02-21 18:56:48 +00:00
# Add domains to database table file
2023-11-22 20:06:09 +00:00
pihole-FTL " ${ gravity_type } " parseList " ${ saveLocation } " " ${ gravityTEMPfile } " " ${ adlistID } "
2020-12-29 08:54:25 +00:00
# Compare lists, are they identical?
compareLists " ${ adlistID } " " ${ saveLocation } "
2020-12-27 18:14:52 +00:00
done = "true"
2017-07-27 02:34:35 +00:00
else
2023-03-24 23:15:49 +00:00
# Fall back to previously cached list if $listCurlBuffer is empty
2020-12-27 18:14:52 +00:00
echo -e " ${ INFO } Received empty file "
2017-07-27 02:34:35 +00:00
fi
2020-12-27 18:14:52 +00:00
fi
# Do we need to fall back to a cached list (if available)?
if [ [ " ${ done } " != "true" ] ] ; then
2017-09-15 12:39:17 +00:00
# Determine if cached list has read permission
2017-07-27 02:34:35 +00:00
if [ [ -r " ${ saveLocation } " ] ] ; then
echo -e " ${ CROSS } List download failed: ${ COL_LIGHT_GREEN } using previously cached list ${ COL_NC } "
2020-02-21 18:56:48 +00:00
# Add domains to database table file
2023-11-22 20:06:09 +00:00
pihole-FTL " ${ gravity_type } " parseList " ${ saveLocation } " " ${ gravityTEMPfile } " " ${ adlistID } "
2020-12-27 18:14:52 +00:00
database_adlist_status " ${ adlistID } " "3"
2017-07-27 02:34:35 +00:00
else
echo -e " ${ CROSS } List download failed: ${ COL_LIGHT_RED } no cached list available ${ COL_NC } "
2020-12-29 19:35:48 +00:00
# Manually reset these two numbers because we do not call parseList here
2023-05-15 17:25:56 +00:00
database_adlist_number " ${ adlistID } " 0 0
2020-12-27 18:14:52 +00:00
database_adlist_status " ${ adlistID } " "4"
2017-07-27 02:34:35 +00:00
fi
fi
}
2017-09-15 12:39:17 +00:00
# Parse source files into domains format
2017-07-27 02:34:35 +00:00
gravity_ParseFileIntoDomains( ) {
2022-10-16 18:54:24 +00:00
local src = " ${ 1 } " destination = " ${ 2 } "
# Remove comments and print only the domain name
# Most of the lists downloaded are already in hosts file format but the spacing/formatting is not contiguous
# This helps with that and makes it easier to read
# It also helps with debugging so each stage of the script can be researched more in depth
2023-02-19 17:47:10 +00:00
# 1) Convert all characters to lowercase
2024-02-09 19:22:53 +00:00
tr '[:upper:]' '[:lower:]' <" ${ src } " >" ${ destination } "
2023-02-19 17:47:10 +00:00
# 2) Remove carriage returns
2023-04-07 08:14:59 +00:00
# 3) Remove lines starting with ! (ABP Comments)
# 4) Remove lines starting with [ (ABP Header)
2024-09-06 19:22:22 +00:00
# 5) Remove lines containing ABP extended CSS selectors ("##", "#$#", "#@#", "#?#") and Adguard JavaScript (#%#) preceded by a letter
2023-04-07 08:14:59 +00:00
# 6) Remove comments (text starting with "#", include possible spaces before the hash sign)
2023-04-07 08:25:25 +00:00
# 7) Remove leading tabs, spaces, etc. (Also removes leading IP addresses)
# 8) Remove empty lines
2023-04-07 07:44:31 +00:00
2024-02-09 19:22:53 +00:00
sed -i -r \
2023-04-07 07:44:31 +00:00
-e 's/\r$//' \
-e 's/\s*!.*//g' \
-e 's/\s*\[.*//g' \
2024-09-06 19:20:36 +00:00
-e '/[a-z]\#[$?@%]{0,3}\#/d' \
2023-04-07 08:14:59 +00:00
-e 's/\s*#.*//g' \
2023-04-07 07:44:31 +00:00
-e 's/^.*\s+//g' \
-e '/^$/d' " ${ destination } "
2023-02-19 17:47:10 +00:00
2022-10-16 18:54:24 +00:00
chmod 644 " ${ destination } "
2015-11-23 08:36:01 +00:00
}
2015-11-23 07:49:38 +00:00
2019-04-25 09:18:54 +00:00
# Report number of entries in a table
gravity_Table_Count( ) {
local table = " ${ 1 } "
local str = " ${ 2 } "
2017-12-16 13:55:52 +00:00
local num
2024-02-11 20:07:15 +00:00
num = " $( pihole-FTL sqlite3 -ni " ${ gravityTEMPfile } " " SELECT COUNT(*) FROM ${ table } ; " ) "
2023-05-15 17:25:56 +00:00
if [ [ " ${ table } " = = "gravity" ] ] ; then
2019-12-04 21:02:46 +00:00
local unique
2024-02-11 20:07:15 +00:00
unique = " $( pihole-FTL sqlite3 -ni " ${ gravityTEMPfile } " " SELECT COUNT(*) FROM (SELECT DISTINCT domain FROM ${ table } ); " ) "
2020-04-21 07:10:21 +00:00
echo -e " ${ INFO } Number of ${ str } : ${ num } ( ${ COL_BOLD } ${ unique } unique domains ${ COL_NC } ) "
2024-02-11 20:07:15 +00:00
pihole-FTL sqlite3 -ni " ${ gravityTEMPfile } " " INSERT OR REPLACE INTO info (property,value) VALUES ('gravity_count', ${ unique } ); "
2019-12-04 21:02:46 +00:00
else
echo -e " ${ INFO } Number of ${ str } : ${ num } "
fi
2019-04-25 09:18:54 +00:00
}
2017-06-21 11:49:05 +00:00
2019-04-25 09:18:54 +00:00
# Output count of blacklisted domains and regex filters
gravity_ShowCount( ) {
2023-05-15 17:25:56 +00:00
# Here we use the table "gravity" instead of the view "vw_gravity" for speed.
# It's safe to replace it here, because right after a gravity run both will show the exactly same number of domains.
gravity_Table_Count "gravity" "gravity domains" ""
2023-10-04 15:20:38 +00:00
gravity_Table_Count "vw_blacklist" "exact denied domains"
gravity_Table_Count "vw_regex_blacklist" "regex denied filters"
gravity_Table_Count "vw_whitelist" "exact allowed domains"
gravity_Table_Count "vw_regex_whitelist" "regex allowed filters"
2015-11-23 11:11:16 +00:00
}
2015-11-26 23:48:52 +00:00
2017-07-24 11:24:34 +00:00
# Trap Ctrl-C
gravity_Trap( ) {
trap '{ echo -e "\\n\\n ${INFO} ${COL_LIGHT_RED}User-abort detected${COL_NC}"; gravity_Cleanup "error"; }' INT
2015-12-26 18:37:51 +00:00
}
2015-12-05 03:41:37 +00:00
2017-09-15 12:39:17 +00:00
# Clean up after Gravity upon exit or cancellation
2017-07-24 11:24:34 +00:00
gravity_Cleanup( ) {
local error = " ${ 1 :- } "
2017-07-27 02:34:35 +00:00
str = "Cleaning up stray matter"
2017-07-24 11:24:34 +00:00
echo -ne " ${ INFO } ${ str } ... "
2017-06-21 11:49:05 +00:00
2017-09-15 12:39:17 +00:00
# Delete tmp content generated by Gravity
2024-02-09 19:22:53 +00:00
rm ${ piholeDir } /pihole.*.txt 2>/dev/null
rm ${ piholeDir } /*.tmp 2>/dev/null
2023-03-24 23:15:49 +00:00
# listCurlBuffer location
2024-02-09 19:22:53 +00:00
rm " ${ GRAVITY_TMPDIR } " /*.phgpb 2>/dev/null
2023-03-24 23:15:49 +00:00
# invalid_domains location
2024-02-09 19:22:53 +00:00
rm " ${ GRAVITY_TMPDIR } " /*.ph-non-domains 2>/dev/null
2017-07-24 11:24:34 +00:00
2018-01-14 20:38:39 +00:00
# Ensure this function only runs when gravity_SetDownloadOptions() has completed
2017-09-23 00:32:56 +00:00
if [ [ " ${ gravity_Blackbody :- } " = = true ] ] ; then
2017-09-22 04:17:56 +00:00
# Remove any unused .domains files
2019-12-09 21:35:54 +00:00
for file in " ${ piholeDir } " /*." ${ domainsExtension } " ; do
2017-09-22 04:17:56 +00:00
# If list is not in active array, then remove it
if [ [ ! " ${ activeDomains [*] } " = = *" ${ file } " * ] ] ; then
2024-02-09 19:22:53 +00:00
rm -f " ${ file } " 2>/dev/null ||
2017-09-22 04:17:56 +00:00
echo -e " ${ CROSS } Failed to remove ${ file ##*/ } "
fi
done
fi
2017-06-21 11:49:05 +00:00
2017-07-24 11:24:34 +00:00
echo -e " ${ OVER } ${ TICK } ${ str } "
2020-03-08 23:53:14 +00:00
# Print Pi-hole status if an error occurred
2017-08-28 01:36:02 +00:00
if [ [ -n " ${ error } " ] ] ; then
2017-07-24 11:24:34 +00:00
" ${ PIHOLE_COMMAND } " status
exit 1
fi
2015-11-06 02:11:34 +00:00
}
2015-08-23 04:44:41 +00:00
2021-08-20 18:48:57 +00:00
database_recovery( ) {
2021-12-21 13:00:46 +00:00
local result
2022-05-29 07:51:33 +00:00
local str = "Checking integrity of existing gravity database (this can take a while)"
2021-12-21 15:20:02 +00:00
local option = " ${ 1 } "
2021-08-20 18:48:57 +00:00
echo -ne " ${ INFO } ${ str } ... "
2023-12-09 22:06:50 +00:00
result = " $( pihole-FTL sqlite3 -ni " ${ gravityDBfile } " "PRAGMA integrity_check" 2>& 1) "
2022-05-29 07:51:33 +00:00
if [ [ ${ result } = "ok" ] ] ; then
2021-08-20 18:48:57 +00:00
echo -e " ${ OVER } ${ TICK } ${ str } - no errors found "
2022-05-29 07:51:33 +00:00
str = "Checking foreign keys of existing gravity database (this can take a while)"
2021-09-11 19:54:42 +00:00
echo -ne " ${ INFO } ${ str } ... "
2022-05-29 07:51:33 +00:00
unset result
2023-12-09 22:06:50 +00:00
result = " $( pihole-FTL sqlite3 -ni " ${ gravityDBfile } " "PRAGMA foreign_key_check" 2>& 1) "
2022-05-29 07:51:33 +00:00
if [ [ -z ${ result } ] ] ; then
2021-09-11 19:54:42 +00:00
echo -e " ${ OVER } ${ TICK } ${ str } - no errors found "
2021-12-21 15:20:02 +00:00
if [ [ " ${ option } " != "force" ] ] ; then
2021-09-11 19:56:44 +00:00
return
fi
2021-09-11 19:54:42 +00:00
else
echo -e " ${ OVER } ${ CROSS } ${ str } - errors found: "
2024-02-09 19:22:53 +00:00
while IFS = read -r line; do echo " - $line " ; done <<< " $result "
2021-09-11 19:54:42 +00:00
fi
2021-08-20 18:48:57 +00:00
else
echo -e " ${ OVER } ${ CROSS } ${ str } - errors found: "
2024-02-09 19:22:53 +00:00
while IFS = read -r line; do echo " - $line " ; done <<< " $result "
2021-08-20 18:48:57 +00:00
fi
str = "Trying to recover existing gravity database"
echo -ne " ${ INFO } ${ str } ... "
# We have to remove any possibly existing recovery database or this will fail
2024-02-09 19:22:53 +00:00
rm -f " ${ gravityDBfile } .recovered " >/dev/null 2>& 1
2023-12-09 22:06:50 +00:00
if result = " $( pihole-FTL sqlite3 -ni " ${ gravityDBfile } " ".recover" | pihole-FTL sqlite3 -ni " ${ gravityDBfile } .recovered " 2>& 1) " ; then
2021-08-20 18:48:57 +00:00
echo -e " ${ OVER } ${ TICK } ${ str } - success "
mv " ${ gravityDBfile } " " ${ gravityDBfile } .old "
mv " ${ gravityDBfile } .recovered " " ${ gravityDBfile } "
2021-12-22 18:52:08 +00:00
echo -ne " ${ INFO } ${ gravityDBfile } has been recovered "
echo -ne " ${ INFO } The old ${ gravityDBfile } has been moved to ${ gravityDBfile } .old "
2021-08-20 18:48:57 +00:00
else
echo -e " ${ OVER } ${ CROSS } ${ str } - the following errors happened: "
2024-02-09 19:22:53 +00:00
while IFS = read -r line; do echo " - $line " ; done <<< " $result "
2021-08-20 18:48:57 +00:00
echo -e " ${ CROSS } Recovery failed. Try \"pihole -r recreate\" instead. "
exit 1
fi
echo ""
}
2024-01-01 07:42:31 +00:00
gravity_optimize( ) {
# The ANALYZE command gathers statistics about tables and indices and stores
# the collected information in internal tables of the database where the
# query optimizer can access the information and use it to help make better
# query planning choices
local str = "Optimizing database"
echo -ne " ${ INFO } ${ str } ... "
output = $( { pihole-FTL sqlite3 -ni " ${ gravityTEMPfile } " "PRAGMA analysis_limit=0; ANALYZE" 2>& 1; } 2>& 1 )
status = " $? "
if [ [ " ${ status } " -ne 0 ] ] ; then
echo -e " \\n ${ CROSS } Unable to optimize database ${ gravityTEMPfile } \\n ${ output } "
gravity_Cleanup "error"
else
echo -e " ${ OVER } ${ TICK } ${ str } "
fi
}
2024-09-17 18:23:09 +00:00
# Function: timeit
# Description: Measures the execution time of a given command.
#
# Usage:
# timeit <command>
#
# Parameters:
# <command> - The command to be executed and timed.
#
# Returns:
# The exit status of the executed command.
#
# Output:
# If the 'timed' variable is set to true, prints the elapsed time in seconds
# with millisecond precision.
#
# Example:
# timeit ls -l
#
timeit( ) {
local start_time end_time elapsed_time ret
# Capture the start time
start_time = $( date +%s%3N)
# Execute the command passed as arguments
" $@ "
ret = $?
if [ [ " ${ timed :- } " != true ] ] ; then
return $ret
fi
# Capture the end time
end_time = $( date +%s%3N)
# Calculate the elapsed time
elapsed_time = $(( end_time - start_time))
# Display the elapsed time
2024-09-18 20:13:35 +00:00
printf " %b--> took %d.%03d seconds%b\n" ${ COL_BLUE } $(( elapsed_time / 1000 )) $(( elapsed_time % 1000 )) ${ COL_NC }
2024-09-17 18:23:09 +00:00
return $ret
}
2017-07-27 02:34:35 +00:00
helpFunc( ) {
echo " Usage: pihole -g
Update domains from blocklists specified in adlists.list
Options:
-f, --force Force the download of all specified blocklists
2024-09-17 18:23:09 +00:00
-t, --timeit Time the gravity update process
2017-07-27 02:34:35 +00:00
-h, --help Show this help dialog"
exit 0
}
2021-08-20 18:48:57 +00:00
repairSelector( ) {
case " $1 " in
2024-02-09 19:22:53 +00:00
"recover" ) recover_database = true ; ;
"recreate" ) recreate_database = true ; ;
*)
echo " Usage: pihole -g -r {recover,recreate}
2021-08-20 18:48:57 +00:00
Attempt to repair gravity database
Available options:
2021-12-22 18:53:52 +00:00
pihole -g -r recover Try to recover a damaged gravity database file.
Pi-hole tries to restore as much as possible
from a corrupted gravity database.
pihole -g -r recover force Pi-hole will run the recovery process even when
no damage is detected. This option is meant to be
a last resort. Recovery is a fragile task
consuming a lot of resources and shouldn' t be
performed unnecessarily.
pihole -g -r recreate Create a new gravity database file from scratch.
This will remove your existing gravity database
and create a new file from scratch. If you still
have the migration backup created when migrating
to Pi-hole v5.0, Pi-hole will import these files."
2024-02-09 19:22:53 +00:00
exit 0
; ;
2021-08-20 18:48:57 +00:00
esac
}
2016-10-22 06:02:45 +00:00
for var in " $@ " ; do
2017-06-21 11:49:05 +00:00
case " ${ var } " in
2024-05-08 20:25:26 +00:00
"-f" | "--force" ) forceDelete = true ; ;
2024-09-17 18:23:09 +00:00
"-t" | "--timeit" ) timed = true ; ;
2024-05-08 20:25:26 +00:00
"-r" | "--repair" ) repairSelector " $3 " ; ;
"-u" | "--upgrade" )
upgrade_gravityDB " ${ gravityDBfile } " " ${ piholeDir } "
exit 0
; ;
"-h" | "--help" ) helpFunc ; ;
2017-06-21 11:49:05 +00:00
esac
2016-08-17 18:08:55 +00:00
done
2021-06-14 18:27:10 +00:00
# Remove OLD (backup) gravity file, if it exists
if [ [ -f " ${ gravityOLDfile } " ] ] ; then
rm " ${ gravityOLDfile } "
fi
2017-09-15 12:39:17 +00:00
# Trap Ctrl-C
2017-07-24 11:24:34 +00:00
gravity_Trap
2019-11-26 09:58:39 +00:00
if [ [ " ${ recreate_database :- } " = = true ] ] ; then
2021-08-20 18:48:57 +00:00
str = "Recreating gravity database from migration backup"
2019-11-26 09:58:39 +00:00
echo -ne " ${ INFO } ${ str } ... "
rm " ${ gravityDBfile } "
2024-02-09 19:22:53 +00:00
pushd " ${ piholeDir } " >/dev/null || exit
2019-11-26 09:58:39 +00:00
cp migration_backup/* .
2024-02-09 19:22:53 +00:00
popd >/dev/null || exit
2019-11-26 09:58:39 +00:00
echo -e " ${ OVER } ${ TICK } ${ str } "
fi
2021-08-20 18:48:57 +00:00
if [ [ " ${ recover_database :- } " = = true ] ] ; then
2024-09-17 18:23:09 +00:00
timeit database_recovery " $4 "
2021-08-20 18:48:57 +00:00
fi
2019-04-24 17:55:05 +00:00
# Move possibly existing legacy files to the gravity database
2024-09-17 18:23:09 +00:00
if ! timeit migrate_to_database; then
2021-12-20 19:36:55 +00:00
echo -e " ${ CROSS } Unable to migrate to database. Please contact support. "
exit 1
fi
2019-04-24 17:55:05 +00:00
2017-09-15 12:39:17 +00:00
if [ [ " ${ forceDelete :- } " = = true ] ] ; then
2017-09-18 07:36:03 +00:00
str = "Deleting existing list cache"
2017-06-21 11:49:05 +00:00
echo -ne " ${ INFO } ${ str } ... "
2024-02-09 19:22:53 +00:00
rm /etc/pihole/list.* 2>/dev/null || true
2017-09-18 07:36:03 +00:00
echo -e " ${ OVER } ${ TICK } ${ str } "
2016-08-17 18:08:55 +00:00
fi
2019-04-25 09:18:54 +00:00
# Gravity downloads blocklists next
2024-09-17 18:23:09 +00:00
if ! timeit gravity_CheckDNSResolutionAvailable; then
2021-12-20 19:09:11 +00:00
echo -e " ${ CROSS } Can not complete gravity update, no DNS is available. Please contact support. "
exit 1
fi
2023-03-24 23:15:49 +00:00
if ! gravity_DownloadBlocklists; then
echo -e " ${ CROSS } Unable to create gravity database. Please try again later. If the problem persists, please contact support. "
exit 1
fi
2017-07-24 11:24:34 +00:00
2020-02-12 18:26:25 +00:00
# Update gravity timestamp
update_gravity_timestamp
2020-01-24 17:39:13 +00:00
# Ensure proper permissions are set for the database
2024-11-25 11:33:26 +00:00
fix_owner_permissions " ${ gravityTEMPfile } "
2023-10-22 06:14:11 +00:00
# Build the tree
2024-09-17 18:23:09 +00:00
timeit gravity_build_tree
2017-09-14 10:23:49 +00:00
2023-10-22 06:14:11 +00:00
# Compute numbers to be displayed (do this after building the tree to get the
# numbers quickly from the tree instead of having to scan the whole database)
2024-09-17 18:23:09 +00:00
timeit gravity_ShowCount
2020-02-17 20:07:48 +00:00
2024-01-01 07:42:31 +00:00
# Optimize the database
2024-09-17 18:23:09 +00:00
timeit gravity_optimize
2020-02-17 20:07:48 +00:00
2023-10-22 06:14:11 +00:00
# Migrate rest of the data from old to new database
# IMPORTANT: Swapping the databases must be the last step before the cleanup
2024-09-17 18:23:09 +00:00
if ! timeit gravity_swap_databases; then
2023-10-22 06:14:11 +00:00
echo -e " ${ CROSS } Unable to create database. Please contact support. "
exit 1
fi
2024-09-17 18:23:09 +00:00
timeit gravity_Cleanup
2020-01-24 17:39:13 +00:00
echo ""
2023-10-16 20:19:44 +00:00
echo " ${ TICK } Done. "
2023-02-12 17:39:37 +00:00
# "${PIHOLE_COMMAND}" status