merge devel into 4.3.2 And Resolve merge conflicts

Signed-off-by: Adam Warner <me@adamwarner.co.uk>
This commit is contained in:
Adam Warner 2019-09-22 01:16:44 +01:00
commit 61a40c1b43
No known key found for this signature in database
GPG key ID: 872950F3ECF2B173
21 changed files with 1150 additions and 567 deletions

View file

@ -17,37 +17,35 @@ coltable="/opt/pihole/COL_TABLE"
source "${coltable}"
regexconverter="/opt/pihole/wildcard_regex_converter.sh"
source "${regexconverter}"
# shellcheck disable=SC1091
source "/etc/.pihole/advanced/Scripts/database_migration/gravity-db.sh"
basename="pihole"
PIHOLE_COMMAND="/usr/local/bin/${basename}"
piholeDir="/etc/${basename}"
adListFile="${piholeDir}/adlists.list"
adListDefault="${piholeDir}/adlists.default"
# Legacy (pre v5.0) list file locations
whitelistFile="${piholeDir}/whitelist.txt"
blacklistFile="${piholeDir}/blacklist.txt"
regexFile="${piholeDir}/regex.list"
adListFile="${piholeDir}/adlists.list"
adList="${piholeDir}/gravity.list"
blackList="${piholeDir}/black.list"
localList="${piholeDir}/local.list"
VPNList="/etc/openvpn/ipp.txt"
piholeGitDir="/etc/.pihole"
gravityDBfile="${piholeDir}/gravity.db"
gravityDBschema="${piholeGitDir}/advanced/Templates/gravity.db.sql"
optimize_database=false
domainsExtension="domains"
matterAndLight="${basename}.0.matterandlight.txt"
parsedMatter="${basename}.1.parsedmatter.txt"
whitelistMatter="${basename}.2.whitelistmatter.txt"
accretionDisc="${basename}.3.accretionDisc.txt"
preEventHorizon="list.preEventHorizon"
skipDownload="false"
resolver="pihole-FTL"
haveSourceUrls=true
# Source setupVars from install script
setupVars="${piholeDir}/setupVars.conf"
if [[ -f "${setupVars}" ]];then
@ -83,26 +81,137 @@ if [[ -r "${piholeDir}/pihole.conf" ]]; then
echo -e " ${COL_LIGHT_RED}Ignoring overrides specified within pihole.conf! ${COL_NC}"
fi
# Determine if Pi-hole blocking is disabled
# If this is the case, we want to update
# gravity.list.bck and black.list.bck instead of
# gravity.list and black.list
detect_pihole_blocking_status() {
if [[ "${BLOCKING_ENABLED}" == false ]]; then
echo -e " ${INFO} Pi-hole blocking is disabled"
adList="${adList}.bck"
blackList="${blackList}.bck"
else
echo -e " ${INFO} Pi-hole blocking is enabled"
# Generate new sqlite3 file from schema template
generate_gravity_database() {
sqlite3 "${gravityDBfile}" < "${gravityDBschema}"
# Ensure proper permissions are set for the newly created database
chown pihole:pihole "${gravityDBfile}"
chmod g+w "${piholeDir}" "${gravityDBfile}"
}
update_gravity_timestamp() {
# Update timestamp when the gravity table was last updated successfully
output=$( { sqlite3 "${gravityDBfile}" <<< "INSERT OR REPLACE INTO info (property,value) values (\"updated\",cast(strftime('%s', 'now') as int));"; } 2>&1 )
status="$?"
if [[ "${status}" -ne 0 ]]; then
echo -e "\\n ${CROSS} Unable to update gravity timestamp in database ${gravityDBfile}\\n ${output}"
fi
}
# Import domains from file and store them in the specified database table
database_table_from_file() {
# Define locals
local table source backup_path backup_file
table="${1}"
source="${2}"
backup_path="${piholeDir}/migration_backup"
backup_file="${backup_path}/$(basename "${2}")"
# Truncate table
output=$( { sqlite3 "${gravityDBfile}" <<< "DELETE FROM ${table};"; } 2>&1 )
status="$?"
if [[ "${status}" -ne 0 ]]; then
echo -e "\\n ${CROSS} Unable to truncate ${table} database ${gravityDBfile}\\n ${output}"
gravity_Cleanup "error"
fi
local tmpFile
tmpFile="$(mktemp -p "/tmp" --suffix=".gravity")"
local timestamp
timestamp="$(date --utc +'%s')"
local inputfile
if [[ "${table}" == "gravity" ]]; then
# No need to modify the input data for the gravity table
inputfile="${source}"
else
# Apply format for white-, blacklist, regex, and adlist tables
# Read file line by line
local rowid
declare -i rowid
rowid=1
grep -v '^ *#' < "${source}" | while IFS= read -r domain
do
# Only add non-empty lines
if [[ -n "${domain}" ]]; then
if [[ "${table}" == "domain_audit" ]]; then
# domain_audit table format (no enable or modified fields)
echo "${rowid},\"${domain}\",${timestamp}" >> "${tmpFile}"
else
# White-, black-, and regexlist format
echo "${rowid},\"${domain}\",1,${timestamp},${timestamp},\"Migrated from ${source}\"" >> "${tmpFile}"
fi
rowid+=1
fi
done
inputfile="${tmpFile}"
fi
# Store domains in database table specified by ${table}
# Use printf as .mode and .import need to be on separate lines
# see https://unix.stackexchange.com/a/445615/83260
output=$( { printf ".timeout 10000\\n.mode csv\\n.import \"%s\" %s\\n" "${inputfile}" "${table}" | sqlite3 "${gravityDBfile}"; } 2>&1 )
status="$?"
if [[ "${status}" -ne 0 ]]; then
echo -e "\\n ${CROSS} Unable to fill table ${table} in database ${gravityDBfile}\\n ${output}"
gravity_Cleanup "error"
fi
# Delete tmpfile
rm "${tmpFile}" > /dev/null 2>&1 || \
echo -e " ${CROSS} Unable to remove ${tmpFile}"
# Move source file to backup directory, create directory if not existing
mkdir -p "${backup_path}"
mv "${source}" "${backup_file}" 2> /dev/null || \
echo -e " ${CROSS} Unable to backup ${source} to ${backup_path}"
}
# Migrate pre-v5.0 list files to database-based Pi-hole versions
migrate_to_database() {
# Create database file only if not present
if [ ! -e "${gravityDBfile}" ]; then
# Create new database file - note that this will be created in version 1
echo -e " ${INFO} Creating new gravity database"
generate_gravity_database
# Migrate list files to new database
if [ -e "${adListFile}" ]; then
# Store adlist domains in database
echo -e " ${INFO} Migrating content of ${adListFile} into new database"
database_table_from_file "adlist" "${adListFile}"
fi
if [ -e "${blacklistFile}" ]; then
# Store blacklisted domains in database
echo -e " ${INFO} Migrating content of ${blacklistFile} into new database"
database_table_from_file "blacklist" "${blacklistFile}"
fi
if [ -e "${whitelistFile}" ]; then
# Store whitelisted domains in database
echo -e " ${INFO} Migrating content of ${whitelistFile} into new database"
database_table_from_file "whitelist" "${whitelistFile}"
fi
if [ -e "${regexFile}" ]; then
# Store regex domains in database
# Important note: We need to add the domains to the "regex" table
# as it will only later be renamed to "regex_blacklist"!
echo -e " ${INFO} Migrating content of ${regexFile} into new database"
database_table_from_file "regex" "${regexFile}"
fi
fi
# Check if gravity database needs to be updated
upgrade_gravityDB "${gravityDBfile}" "${piholeDir}"
}
# Determine if DNS resolution is available before proceeding
gravity_CheckDNSResolutionAvailable() {
local lookupDomain="pi.hole"
# Determine if $localList does not exist
if [[ ! -e "${localList}" ]]; then
# Determine if $localList does not exist, and ensure it is not empty
if [[ ! -e "${localList}" ]] || [[ -s "${localList}" ]]; then
lookupDomain="raw.githubusercontent.com"
fi
@ -153,19 +262,13 @@ gravity_CheckDNSResolutionAvailable() {
gravity_CheckDNSResolutionAvailable
}
# Retrieve blocklist URLs and parse domains from adlists.list
# Retrieve blocklist URLs and parse domains from adlist.list
gravity_GetBlocklistUrls() {
echo -e " ${INFO} ${COL_BOLD}Neutrino emissions detected${COL_NC}..."
if [[ -f "${adListDefault}" ]] && [[ -f "${adListFile}" ]]; then
# Remove superceded $adListDefault file
rm "${adListDefault}" 2> /dev/null || \
echo -e " ${CROSS} Unable to remove ${adListDefault}"
fi
# Retrieve source URLs from $adListFile
# Logic: Remove comments and empty lines
mapfile -t sources <<< "$(grep -v -E "^(#|$)" "${adListFile}" 2> /dev/null)"
# Retrieve source URLs from gravity database
# We source only enabled adlists, sqlite3 stores boolean values as 0 (false) or 1 (true)
mapfile -t sources <<< "$(sqlite3 "${gravityDBfile}" "SELECT address FROM vw_adlist;" 2> /dev/null)"
# Parse source domains from $sources
mapfile -t sourceDomains <<< "$(
@ -182,11 +285,12 @@ gravity_GetBlocklistUrls() {
if [[ -n "${sources[*]}" ]] && [[ -n "${sourceDomains[*]}" ]]; then
echo -e "${OVER} ${TICK} ${str}"
return 0
else
echo -e "${OVER} ${CROSS} ${str}"
echo -e " ${INFO} No source list found, or it is empty"
echo ""
haveSourceUrls=false
return 1
fi
}
@ -214,11 +318,9 @@ gravity_SetDownloadOptions() {
*) cmd_ext="";;
esac
if [[ "${skipDownload}" == false ]]; then
echo -e " ${INFO} Target: ${domain} (${url##*/})"
gravity_DownloadBlocklistFromUrl "${url}" "${cmd_ext}" "${agent}"
echo ""
fi
echo -e " ${INFO} Target: ${domain} (${url##*/})"
gravity_DownloadBlocklistFromUrl "${url}" "${cmd_ext}" "${agent}"
echo ""
done
gravity_Blackbody=true
}
@ -335,14 +437,17 @@ gravity_ParseFileIntoDomains() {
# Most of the lists downloaded are already in hosts file format but the spacing/formating is not contigious
# This helps with that and makes it easier to read
# It also helps with debugging so each stage of the script can be researched more in depth
# Awk -F splits on given IFS, we grab the right hand side (chops trailing #coments and /'s to grab the domain only.
# Last awk command takes non-commented lines and if they have 2 fields, take the right field (the domain) and leave
# the left (IP address), otherwise grab the single field.
< ${source} awk -F '#' '{print $1}' | \
awk -F '/' '{print $1}' | \
awk '($1 !~ /^#/) { if (NF>1) {print $2} else {print $1}}' | \
sed -nr -e 's/\.{2,}/./g' -e '/\./p' > ${destination}
# 1) Remove carriage returns
# 2) Convert all characters to lowercase
# 3) Remove lines containing "#" or "/"
# 4) Remove leading tabs, spaces, etc.
# 5) Delete lines not matching domain names
< "${source}" tr -d '\r' | \
tr '[:upper:]' '[:lower:]' | \
sed -r '/(\/|#).*$/d' | \
sed -r 's/^.*\s+//g' | \
sed -r '/([^\.]+\.)+[^\.]{2,}/!d' > "${destination}"
chmod 644 "${destination}"
return 0
fi
@ -374,11 +479,13 @@ gravity_ParseFileIntoDomains() {
# Print if nonempty
length { print }
' "${source}" 2> /dev/null > "${destination}"
chmod 644 "${destination}"
echo -e "${OVER} ${TICK} Format: URL"
else
# Default: Keep hosts/domains file in same format as it was downloaded
output=$( { mv "${source}" "${destination}"; } 2>&1 )
chmod 644 "${destination}"
if [[ ! -e "${destination}" ]]; then
echo -e "\\n ${CROSS} Unable to move tmp file to ${piholeDir}
@ -393,12 +500,11 @@ gravity_ConsolidateDownloadedBlocklists() {
local str lastLine
str="Consolidating blocklists"
if [[ "${haveSourceUrls}" == true ]]; then
echo -ne " ${INFO} ${str}..."
fi
echo -ne " ${INFO} ${str}..."
# Empty $matterAndLight if it already exists, otherwise, create it
: > "${piholeDir}/${matterAndLight}"
chmod 644 "${piholeDir}/${matterAndLight}"
# Loop through each *.domains file
for i in "${activeDomains[@]}"; do
@ -414,9 +520,8 @@ gravity_ConsolidateDownloadedBlocklists() {
fi
fi
done
if [[ "${haveSourceUrls}" == true ]]; then
echo -e "${OVER} ${TICK} ${str}"
fi
echo -e "${OVER} ${TICK} ${str}"
}
# Parse consolidated list into (filtered, unique) domains-only format
@ -424,67 +529,46 @@ gravity_SortAndFilterConsolidatedList() {
local str num
str="Extracting domains from blocklists"
if [[ "${haveSourceUrls}" == true ]]; then
echo -ne " ${INFO} ${str}..."
fi
echo -ne " ${INFO} ${str}..."
# Parse into hosts file
# Parse into file
gravity_ParseFileIntoDomains "${piholeDir}/${matterAndLight}" "${piholeDir}/${parsedMatter}"
# Format $parsedMatter line total as currency
num=$(printf "%'.0f" "$(wc -l < "${piholeDir}/${parsedMatter}")")
if [[ "${haveSourceUrls}" == true ]]; then
echo -e "${OVER} ${TICK} ${str}"
fi
echo -e " ${INFO} Number of domains being pulled in by gravity: ${COL_BLUE}${num}${COL_NC}"
echo -e "${OVER} ${TICK} ${str}"
echo -e " ${INFO} Gravity pulled in ${COL_BLUE}${num}${COL_NC} domains"
str="Removing duplicate domains"
if [[ "${haveSourceUrls}" == true ]]; then
echo -ne " ${INFO} ${str}..."
fi
echo -ne " ${INFO} ${str}..."
sort -u "${piholeDir}/${parsedMatter}" > "${piholeDir}/${preEventHorizon}"
chmod 644 "${piholeDir}/${preEventHorizon}"
echo -e "${OVER} ${TICK} ${str}"
if [[ "${haveSourceUrls}" == true ]]; then
echo -e "${OVER} ${TICK} ${str}"
# Format $preEventHorizon line total as currency
num=$(printf "%'.0f" "$(wc -l < "${piholeDir}/${preEventHorizon}")")
echo -e " ${INFO} Number of unique domains trapped in the Event Horizon: ${COL_BLUE}${num}${COL_NC}"
fi
# Format $preEventHorizon line total as currency
num=$(printf "%'.0f" "$(wc -l < "${piholeDir}/${preEventHorizon}")")
str="Storing ${COL_BLUE}${num}${COL_NC} unique blocking domains in database"
echo -ne " ${INFO} ${str}..."
database_table_from_file "gravity" "${piholeDir}/${preEventHorizon}"
echo -e "${OVER} ${TICK} ${str}"
}
# Whitelist user-defined domains
gravity_Whitelist() {
local num str
if [[ ! -f "${whitelistFile}" ]]; then
echo -e " ${INFO} Nothing to whitelist!"
return 0
fi
num=$(wc -l < "${whitelistFile}")
str="Number of whitelisted domains: ${num}"
echo -ne " ${INFO} ${str}..."
# Print everything from preEventHorizon into whitelistMatter EXCEPT domains in $whitelistFile
comm -23 "${piholeDir}/${preEventHorizon}" <(sort "${whitelistFile}") > "${piholeDir}/${whitelistMatter}"
echo -e "${OVER} ${INFO} ${str}"
# Report number of entries in a table
gravity_Table_Count() {
local table="${1}"
local str="${2}"
local num
num="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM ${table} WHERE enabled = 1;")"
echo -e " ${INFO} Number of ${str}: ${num}"
}
# Output count of blacklisted domains and regex filters
gravity_ShowBlockCount() {
local num
if [[ -f "${blacklistFile}" ]]; then
num=$(printf "%'.0f" "$(wc -l < "${blacklistFile}")")
echo -e " ${INFO} Number of blacklisted domains: ${num}"
fi
if [[ -f "${regexFile}" ]]; then
num=$(grep -cv "^#" "${regexFile}")
echo -e " ${INFO} Number of regex filters: ${num}"
fi
gravity_ShowCount() {
gravity_Table_Count "blacklist" "exact blacklisted domains"
gravity_Table_Count "regex_blacklist" "regex blacklist filters"
gravity_Table_Count "whitelist" "exact whitelisted domains"
gravity_Table_Count "regex_whitelist" "regex whitelist filters"
}
# Parse list of domains into hosts format
@ -504,7 +588,7 @@ gravity_ParseDomainsIntoHosts() {
}
# Create "localhost" entries into hosts format
gravity_ParseLocalDomains() {
gravity_generateLocalList() {
local hostname
if [[ -s "/etc/hostname" ]]; then
@ -520,6 +604,7 @@ gravity_ParseLocalDomains() {
# Empty $localList if it already exists, otherwise, create it
: > "${localList}"
chmod 644 "${localList}"
gravity_ParseDomainsIntoHosts "${localList}.tmp" "${localList}"
@ -529,40 +614,6 @@ gravity_ParseLocalDomains() {
fi
}
# Create primary blacklist entries
gravity_ParseBlacklistDomains() {
local output status
# Empty $accretionDisc if it already exists, otherwise, create it
: > "${piholeDir}/${accretionDisc}"
if [[ -f "${piholeDir}/${whitelistMatter}" ]]; then
mv "${piholeDir}/${whitelistMatter}" "${piholeDir}/${accretionDisc}"
else
# There was no whitelist file, so use preEventHorizon instead of whitelistMatter.
cp "${piholeDir}/${preEventHorizon}" "${piholeDir}/${accretionDisc}"
fi
# Move the file over as /etc/pihole/gravity.list so dnsmasq can use it
output=$( { mv "${piholeDir}/${accretionDisc}" "${adList}"; } 2>&1 )
status="$?"
if [[ "${status}" -ne 0 ]]; then
echo -e "\\n ${CROSS} Unable to move ${accretionDisc} from ${piholeDir}\\n ${output}"
gravity_Cleanup "error"
fi
}
# Create user-added blacklist entries
gravity_ParseUserDomains() {
if [[ ! -f "${blacklistFile}" ]]; then
return 0
fi
# Copy the file over as /etc/pihole/black.list so dnsmasq can use it
cp "${blacklistFile}" "${blackList}" 2> /dev/null || \
echo -e "\\n ${CROSS} Unable to move ${blacklistFile##*/} to ${piholeDir}"
}
# Trap Ctrl-C
gravity_Trap() {
trap '{ echo -e "\\n\\n ${INFO} ${COL_LIGHT_RED}User-abort detected${COL_NC}"; gravity_Cleanup "error"; }' INT
@ -594,6 +645,21 @@ gravity_Cleanup() {
echo -e "${OVER} ${TICK} ${str}"
if ${optimize_database} ; then
str="Optimizing domains database"
echo -ne " ${INFO} ${str}..."
# Run VACUUM command on database to optimize it
output=$( { sqlite3 "${gravityDBfile}" "VACUUM;"; } 2>&1 )
status="$?"
if [[ "${status}" -ne 0 ]]; then
echo -e "\\n ${CROSS} Unable to optimize gravity database ${gravityDBfile}\\n ${output}"
error="error"
else
echo -e "${OVER} ${TICK} ${str}"
fi
fi
# Only restart DNS service if offline
if ! pidof ${resolver} &> /dev/null; then
"${PIHOLE_COMMAND}" restartdns
@ -620,17 +686,17 @@ Options:
for var in "$@"; do
case "${var}" in
"-f" | "--force" ) forceDelete=true;;
"-o" | "--optimize" ) optimize_database=true;;
"-h" | "--help" ) helpFunc;;
"-sd" | "--skip-download" ) skipDownload=true;;
"-b" | "--blacklist-only" ) listType="blacklist";;
"-w" | "--whitelist-only" ) listType="whitelist";;
"-wild" | "--wildcard-only" ) listType="wildcard"; dnsRestartType="restart";;
esac
done
# Trap Ctrl-C
gravity_Trap
# Move possibly existing legacy files to the gravity database
migrate_to_database
if [[ "${forceDelete:-}" == true ]]; then
str="Deleting existing list cache"
echo -ne "${INFO} ${str}..."
@ -639,56 +705,26 @@ if [[ "${forceDelete:-}" == true ]]; then
echo -e "${OVER} ${TICK} ${str}"
fi
detect_pihole_blocking_status
# Determine which functions to run
if [[ "${skipDownload}" == false ]]; then
# Gravity needs to download blocklists
gravity_CheckDNSResolutionAvailable
gravity_GetBlocklistUrls
if [[ "${haveSourceUrls}" == true ]]; then
gravity_SetDownloadOptions
fi
# Gravity downloads blocklists next
gravity_CheckDNSResolutionAvailable
if gravity_GetBlocklistUrls; then
gravity_SetDownloadOptions
# Build preEventHorizon
gravity_ConsolidateDownloadedBlocklists
gravity_SortAndFilterConsolidatedList
else
# Gravity needs to modify Blacklist/Whitelist/Wildcards
echo -e " ${INFO} Using cached Event Horizon list..."
numberOf=$(printf "%'.0f" "$(wc -l < "${piholeDir}/${preEventHorizon}")")
echo -e " ${INFO} ${COL_BLUE}${numberOf}${COL_NC} unique domains trapped in the Event Horizon"
fi
# Perform when downloading blocklists, or modifying the whitelist
if [[ "${skipDownload}" == false ]] || [[ "${listType}" == "whitelist" ]]; then
gravity_Whitelist
fi
# Create local.list
gravity_generateLocalList
gravity_ShowCount
convert_wildcard_to_regex
gravity_ShowBlockCount
# Perform when downloading blocklists, or modifying the white/blacklist (not wildcards)
if [[ "${skipDownload}" == false ]] || [[ "${listType}" == *"list" ]]; then
str="Parsing domains into hosts format"
echo -ne " ${INFO} ${str}..."
gravity_ParseUserDomains
# Perform when downloading blocklists
if [[ ! "${listType:-}" == "blacklist" ]]; then
gravity_ParseLocalDomains
gravity_ParseBlacklistDomains
fi
echo -e "${OVER} ${TICK} ${str}"
gravity_Cleanup
fi
update_gravity_timestamp
gravity_Cleanup
echo ""
# Determine if DNS has been restarted by this instance of gravity
if [[ -z "${dnsWasOffline:-}" ]]; then
# Use "force-reload" when restarting dnsmasq for everything but Wildcards
"${PIHOLE_COMMAND}" restartdns "${dnsRestartType:-force-reload}"
"${PIHOLE_COMMAND}" restartdns reload
fi
"${PIHOLE_COMMAND}" status