mirror of
https://github.com/pi-hole/pi-hole.git
synced 2024-11-15 02:42:58 +00:00
Release v5.16 (#5220)
This commit is contained in:
commit
c6d1137eb0
19 changed files with 283 additions and 119 deletions
2
.github/workflows/codeql-analysis.yml
vendored
2
.github/workflows/codeql-analysis.yml
vendored
|
@ -25,7 +25,7 @@ jobs:
|
|||
steps:
|
||||
-
|
||||
name: Checkout repository
|
||||
uses: actions/checkout@v3.3.0
|
||||
uses: actions/checkout@v3.4.0
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
-
|
||||
name: Initialize CodeQL
|
||||
|
|
24
.github/workflows/sync-back-to-dev.yml
vendored
24
.github/workflows/sync-back-to-dev.yml
vendored
|
@ -5,13 +5,35 @@ on:
|
|||
branches:
|
||||
- master
|
||||
|
||||
# The section is needed to drop the default write-all permissions for all jobs
|
||||
# that are granted on `push` event. By specifying any permission explicitly
|
||||
# all others are set to none. By using the principle of least privilege the damage a compromised
|
||||
# workflow can do (because of an injection or compromised third party tool or
|
||||
# action) is restricted. Adding labels to issues, commenting
|
||||
# on pull-requests, etc. may need additional permissions:
|
||||
#
|
||||
# Syntax for this section:
|
||||
# https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#permissions
|
||||
#
|
||||
# Reference for how to assign permissions on a job-by-job basis:
|
||||
# https://docs.github.com/en/actions/using-jobs/assigning-permissions-to-jobs
|
||||
#
|
||||
# Reference for available permissions that we can enable if needed:
|
||||
# https://docs.github.com/en/actions/security-guides/automatic-token-authentication#permissions-for-the-github_token
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
sync-branches:
|
||||
# The job needs to be able to pull the code and create a pull request.
|
||||
permissions:
|
||||
contents: read # for actions/checkout
|
||||
pull-requests: write # to create pull request
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
name: Syncing branches
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3.3.0
|
||||
uses: actions/checkout@v3.4.0
|
||||
- name: Opening pull request
|
||||
run: gh pr create -B development -H master --title 'Sync master back into development' --body 'Created by Github action' --label 'internal'
|
||||
env:
|
||||
|
|
4
.github/workflows/test.yml
vendored
4
.github/workflows/test.yml
vendored
|
@ -13,7 +13,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3.3.0
|
||||
uses: actions/checkout@v3.4.0
|
||||
|
||||
- name: Check scripts in repository are executable
|
||||
run: |
|
||||
|
@ -62,7 +62,7 @@ jobs:
|
|||
DISTRO: ${{matrix.distro}}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3.3.0
|
||||
uses: actions/checkout@v3.4.0
|
||||
|
||||
- name: Set up Python 3.10
|
||||
uses: actions/setup-python@v4.5.0
|
||||
|
|
|
@ -230,7 +230,7 @@ initialize_debug() {
|
|||
|
||||
# This is a function for visually displaying the current test that is being run.
|
||||
# Accepts one variable: the name of what is being diagnosed
|
||||
# Colors do not show in the dasboard, but the icons do: [i], [✓], and [✗]
|
||||
# Colors do not show in the dashboard, but the icons do: [i], [✓], and [✗]
|
||||
echo_current_diagnostic() {
|
||||
# Colors are used for visually distinguishing each test in the output
|
||||
# These colors do not show in the GUI, but the formatting will
|
||||
|
|
|
@ -30,33 +30,6 @@ gravityDBfile="${GRAVITYDB}"
|
|||
colfile="/opt/pihole/COL_TABLE"
|
||||
source "${colfile}"
|
||||
|
||||
# Scan an array of files for matching strings
|
||||
scanList(){
|
||||
# Escape full stops
|
||||
local domain="${1}" esc_domain="${1//./\\.}" lists="${2}" list_type="${3:-}"
|
||||
|
||||
# Prevent grep from printing file path
|
||||
cd "$piholeDir" || exit 1
|
||||
|
||||
# Prevent grep -i matching slowly: https://bit.ly/2xFXtUX
|
||||
export LC_CTYPE=C
|
||||
|
||||
# /dev/null forces filename to be printed when only one list has been generated
|
||||
case "${list_type}" in
|
||||
"exact" ) grep -i -E -l "(^|(?<!#)\\s)${esc_domain}($|\\s|#)" ${lists} /dev/null 2>/dev/null;;
|
||||
# Iterate through each regexp and check whether it matches the domainQuery
|
||||
# If it does, print the matching regexp and continue looping
|
||||
# Input 1 - regexps | Input 2 - domainQuery
|
||||
"regex" )
|
||||
for list in ${lists}; do
|
||||
if [[ "${domain}" =~ ${list} ]]; then
|
||||
printf "%b\n" "${list}";
|
||||
fi
|
||||
done;;
|
||||
* ) grep -i "${esc_domain}" ${lists} /dev/null 2>/dev/null;;
|
||||
esac
|
||||
}
|
||||
|
||||
if [[ "${options}" == "-h" ]] || [[ "${options}" == "--help" ]]; then
|
||||
echo "Usage: pihole -q [option] <domain>
|
||||
Example: 'pihole -q -exact domain.com'
|
||||
|
@ -84,17 +57,47 @@ options=$(sed -E 's/ ?-(all|exact) ?//g' <<< "${options}")
|
|||
case "${options}" in
|
||||
"" ) str="No domain specified";;
|
||||
*" "* ) str="Unknown query option specified";;
|
||||
*[![:ascii:]]* ) domainQuery=$(idn2 "${options}");;
|
||||
* ) domainQuery="${options}";;
|
||||
*[![:ascii:]]* ) rawDomainQuery=$(idn2 "${options}");;
|
||||
* ) rawDomainQuery="${options}";;
|
||||
esac
|
||||
|
||||
# convert the domain to lowercase
|
||||
domainQuery=$(echo "${rawDomainQuery}" | tr '[:upper:]' '[:lower:]')
|
||||
|
||||
if [[ -n "${str:-}" ]]; then
|
||||
echo -e "${str}${COL_NC}\\nTry 'pihole -q --help' for more information."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Scan an array of files for matching strings
|
||||
scanList(){
|
||||
# Escape full stops
|
||||
local domain="${1}" esc_domain="${1//./\\.}" lists="${2}" list_type="${3:-}"
|
||||
|
||||
# Prevent grep from printing file path
|
||||
cd "$piholeDir" || exit 1
|
||||
|
||||
# Prevent grep -i matching slowly: https://bit.ly/2xFXtUX
|
||||
export LC_CTYPE=C
|
||||
|
||||
# /dev/null forces filename to be printed when only one list has been generated
|
||||
case "${list_type}" in
|
||||
"exact" ) grep -i -E -l "(^|(?<!#)\\s)${esc_domain}($|\\s|#)" "${lists}" /dev/null 2>/dev/null;;
|
||||
# Iterate through each regexp and check whether it matches the domainQuery
|
||||
# If it does, print the matching regexp and continue looping
|
||||
# Input 1 - regexps | Input 2 - domainQuery
|
||||
"regex" )
|
||||
for list in ${lists}; do
|
||||
if [[ "${domain}" =~ ${list} ]]; then
|
||||
printf "%b\n" "${list}";
|
||||
fi
|
||||
done;;
|
||||
* ) grep -i "${esc_domain}" "${lists}" /dev/null 2>/dev/null;;
|
||||
esac
|
||||
}
|
||||
|
||||
scanDatabaseTable() {
|
||||
local domain table list_type querystr result extra
|
||||
local domain table list_type querystr result extra abpquerystr abpfound abpentry searchstr
|
||||
domain="$(printf "%q" "${1}")"
|
||||
table="${2}"
|
||||
list_type="${3:-}"
|
||||
|
@ -104,9 +107,34 @@ scanDatabaseTable() {
|
|||
# behavior. The "ESCAPE '\'" clause specifies that an underscore preceded by an '\' should be matched
|
||||
# as a literal underscore character. We pretreat the $domain variable accordingly to escape underscores.
|
||||
if [[ "${table}" == "gravity" ]]; then
|
||||
|
||||
# Are there ABP entries on gravity?
|
||||
# Return 1 if abp_domain=1 or Zero if abp_domain=0 or not set
|
||||
abpquerystr="SELECT EXISTS (SELECT 1 FROM info WHERE property='abp_domains' and value='1')"
|
||||
abpfound="$(pihole-FTL sqlite3 "${gravityDBfile}" "${abpquerystr}")" 2> /dev/null
|
||||
|
||||
# Create search string for ABP entries only if needed
|
||||
if [ "${abpfound}" -eq 1 ]; then
|
||||
abpentry="${domain}"
|
||||
|
||||
searchstr="'||${abpentry}^'"
|
||||
|
||||
# While a dot is found ...
|
||||
while [ "${abpentry}" != "${abpentry/./}" ]
|
||||
do
|
||||
# ... remove text before the dot (including the dot) and append the result to $searchstr
|
||||
abpentry=$(echo "${abpentry}" | cut -f 2- -d '.')
|
||||
searchstr="$searchstr, '||${abpentry}^'"
|
||||
done
|
||||
|
||||
# The final search string will look like:
|
||||
# "domain IN ('||sub2.sub1.domain.com^', '||sub1.domain.com^', '||domain.com^', '||com^') OR"
|
||||
searchstr="domain IN (${searchstr}) OR "
|
||||
fi
|
||||
|
||||
case "${exact}" in
|
||||
"exact" ) querystr="SELECT gravity.domain,adlist.address,adlist.enabled FROM gravity LEFT JOIN adlist ON adlist.id = gravity.adlist_id WHERE domain = '${domain}'";;
|
||||
* ) querystr="SELECT gravity.domain,adlist.address,adlist.enabled FROM gravity LEFT JOIN adlist ON adlist.id = gravity.adlist_id WHERE domain LIKE '%${domain//_/\\_}%' ESCAPE '\\'";;
|
||||
* ) querystr="SELECT gravity.domain,adlist.address,adlist.enabled FROM gravity LEFT JOIN adlist ON adlist.id = gravity.adlist_id WHERE ${searchstr} domain LIKE '%${domain//_/\\_}%' ESCAPE '\\'";;
|
||||
esac
|
||||
else
|
||||
case "${exact}" in
|
||||
|
@ -116,7 +144,7 @@ scanDatabaseTable() {
|
|||
fi
|
||||
|
||||
# Send prepared query to gravity database
|
||||
result="$(pihole-FTL sqlite3 "${gravityDBfile}" "${querystr}")" 2> /dev/null
|
||||
result="$(pihole-FTL sqlite3 -separator ',' "${gravityDBfile}" "${querystr}")" 2> /dev/null
|
||||
if [[ -z "${result}" ]]; then
|
||||
# Return early when there are no matches in this table
|
||||
return
|
||||
|
@ -136,8 +164,8 @@ scanDatabaseTable() {
|
|||
# Loop over results and print them
|
||||
mapfile -t results <<< "${result}"
|
||||
for result in "${results[@]}"; do
|
||||
domain="${result/|*}"
|
||||
if [[ "${result#*|}" == "0" ]]; then
|
||||
domain="${result/,*}"
|
||||
if [[ "${result#*,}" == "0" ]]; then
|
||||
extra=" (disabled)"
|
||||
else
|
||||
extra=""
|
||||
|
@ -212,10 +240,10 @@ if [[ -n "${exact}" ]]; then
|
|||
fi
|
||||
|
||||
for result in "${results[@]}"; do
|
||||
match="${result/|*/}"
|
||||
extra="${result#*|}"
|
||||
adlistAddress="${extra/|*/}"
|
||||
extra="${extra#*|}"
|
||||
match="${result/,*/}"
|
||||
extra="${result#*,}"
|
||||
adlistAddress="${extra/,*/}"
|
||||
extra="${extra#*,}"
|
||||
if [[ "${extra}" == "0" ]]; then
|
||||
extra=" (disabled)"
|
||||
else
|
||||
|
|
|
@ -44,7 +44,7 @@ addOrEditKeyValPair() {
|
|||
}
|
||||
|
||||
#######################
|
||||
# Takes two arguments: file, and key.
|
||||
# Takes two arguments: file and key.
|
||||
# Adds a key to target file
|
||||
#
|
||||
# Example usage:
|
||||
|
@ -57,14 +57,18 @@ addKey(){
|
|||
# touch file to prevent grep error if file does not exist yet
|
||||
touch "${file}"
|
||||
|
||||
if ! grep -q "^${key}" "${file}"; then
|
||||
# Match key against entire line, using both anchors. We assume
|
||||
# that the file's keys never have bounding whitespace. Anchors
|
||||
# are necessary to ensure the key is considered absent when it
|
||||
# is a substring of another key present in the file.
|
||||
if ! grep -q "^${key}$" "${file}"; then
|
||||
# Key does not exist, add it.
|
||||
echo "${key}" >> "${file}"
|
||||
fi
|
||||
}
|
||||
|
||||
#######################
|
||||
# Takes two arguments: file, and key.
|
||||
# Takes two arguments: file and key.
|
||||
# Deletes a key or key/value pair from target file
|
||||
#
|
||||
# Example usage:
|
||||
|
@ -76,6 +80,24 @@ removeKey() {
|
|||
sed -i "/^${key}/d" "${file}"
|
||||
}
|
||||
|
||||
#######################
|
||||
# Takes two arguments: file and key.
|
||||
# Returns the value of a given key from target file
|
||||
# - ignores all commented lines
|
||||
# - only returns the first value if multiple identical keys exist
|
||||
#
|
||||
#
|
||||
# Example usage:
|
||||
# getVal "/etc/pihole/setupVars.conf" "PIHOLE_DNS_1"
|
||||
#######################
|
||||
getVal() {
|
||||
local file="${1}"
|
||||
local key="${2}"
|
||||
local value
|
||||
value=$(sed -e '/^[[:blank:]]*#/d' "${file}" | grep "${key}" | awk -F "=" 'NR==1{printf$2}')
|
||||
printf "%s" "$value"
|
||||
}
|
||||
|
||||
|
||||
#######################
|
||||
# returns FTL's current telnet API port based on the setting in /etc/pihole-FTL.conf
|
||||
|
|
|
@ -2612,7 +2612,8 @@ main() {
|
|||
|
||||
# Get the privacy level if it exists (default is 0)
|
||||
if [[ -f "${FTL_CONFIG_FILE}" ]]; then
|
||||
PRIVACY_LEVEL=$(sed -ne 's/PRIVACYLEVEL=\(.*\)/\1/p' "${FTL_CONFIG_FILE}")
|
||||
# use getVal from utils.sh to get PRIVACYLEVEL
|
||||
PRIVACY_LEVEL=$(getVal "${FTL_CONFIG_FILE}" "PRIVACYLEVEL")
|
||||
|
||||
# If no setting was found, default to 0
|
||||
PRIVACY_LEVEL="${PRIVACY_LEVEL:-0}"
|
||||
|
|
|
@ -193,6 +193,18 @@ removeNoPurge() {
|
|||
else
|
||||
service pihole-FTL stop
|
||||
fi
|
||||
${SUDO} rm -f /etc/systemd/system/pihole-FTL.service
|
||||
if [[ -d '/etc/systemd/system/pihole-FTL.service.d' ]]; then
|
||||
read -rp " ${QST} FTL service override directory /etc/systemd/system/pihole-FTL.service.d detected. Do you wish to remove this from your system? [y/N] " answer
|
||||
case $answer in
|
||||
[yY]*)
|
||||
echo -ne " ${INFO} Removing /etc/systemd/system/pihole-FTL.service.d..."
|
||||
${SUDO} rm -R /etc/systemd/system/pihole-FTL.service.d
|
||||
echo -e "${OVER} ${INFO} Removed /etc/systemd/system/pihole-FTL.service.d"
|
||||
;;
|
||||
*) echo -e " ${INFO} Leaving /etc/systemd/system/pihole-FTL.service.d in place.";;
|
||||
esac
|
||||
fi
|
||||
${SUDO} rm -f /etc/init.d/pihole-FTL
|
||||
${SUDO} rm -f /usr/bin/pihole-FTL
|
||||
echo -e "${OVER} ${TICK} Removed pihole-FTL"
|
||||
|
|
165
gravity.sh
165
gravity.sh
|
@ -52,6 +52,14 @@ else
|
|||
exit 1
|
||||
fi
|
||||
|
||||
# Set up tmp dir variable in case it's not configured
|
||||
: "${GRAVITY_TMPDIR:=/tmp}"
|
||||
|
||||
if [ ! -d "${GRAVITY_TMPDIR}" ] || [ ! -w "${GRAVITY_TMPDIR}" ]; then
|
||||
echo -e " ${COL_LIGHT_RED}Gravity temporary directory does not exist or is not a writeable directory, falling back to /tmp. ${COL_NC}"
|
||||
GRAVITY_TMPDIR="/tmp"
|
||||
fi
|
||||
|
||||
# Source pihole-FTL from install script
|
||||
pihole_FTL="${piholeDir}/pihole-FTL.conf"
|
||||
if [[ -f "${pihole_FTL}" ]]; then
|
||||
|
@ -137,6 +145,18 @@ update_gravity_timestamp() {
|
|||
return 0
|
||||
}
|
||||
|
||||
# Update timestamp when the gravity table was last updated successfully
|
||||
set_abp_info() {
|
||||
pihole-FTL sqlite3 "${gravityDBfile}" "INSERT OR REPLACE INTO info (property,value) VALUES ('abp_domains',${abp_domains});"
|
||||
status="$?"
|
||||
|
||||
if [[ "${status}" -ne 0 ]]; then
|
||||
echo -e "\\n ${CROSS} Unable to update ABP domain status in database ${gravityDBfile}\\n ${output}"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
# Import domains from file and store them in the specified database table
|
||||
database_table_from_file() {
|
||||
# Define locals
|
||||
|
@ -145,7 +165,7 @@ database_table_from_file() {
|
|||
src="${2}"
|
||||
backup_path="${piholeDir}/migration_backup"
|
||||
backup_file="${backup_path}/$(basename "${2}")"
|
||||
tmpFile="$(mktemp -p "/tmp" --suffix=".gravity")"
|
||||
tmpFile="$(mktemp -p "${GRAVITY_TMPDIR}" --suffix=".gravity")"
|
||||
|
||||
local timestamp
|
||||
timestamp="$(date --utc +'%s')"
|
||||
|
@ -418,7 +438,7 @@ gravity_DownloadBlocklists() {
|
|||
echo -e "${OVER} ${TICK} ${str}"
|
||||
fi
|
||||
|
||||
target="$(mktemp -p "/tmp" --suffix=".gravity")"
|
||||
target="$(mktemp -p "${GRAVITY_TMPDIR}" --suffix=".gravity")"
|
||||
|
||||
# Use compression to reduce the amount of data that is transferred
|
||||
# between the Pi-hole and the ad list provider. Use this feature
|
||||
|
@ -519,64 +539,69 @@ gravity_DownloadBlocklists() {
|
|||
gravity_Blackbody=true
|
||||
}
|
||||
|
||||
# num_total_imported_domains increases for each list processed
|
||||
num_total_imported_domains=0
|
||||
num_domains=0
|
||||
num_non_domains=0
|
||||
parseList() {
|
||||
local adlistID="${1}" src="${2}" target="${3}" non_domains sample_non_domains tmp_non_domains_str false_positive
|
||||
# This sed does the following things:
|
||||
# 1. Remove all lines containing no domains
|
||||
# 2. Remove all domains containing invalid characters. Valid are: a-z, A-Z, 0-9, dot (.), minus (-), underscore (_)
|
||||
# 3. Append ,adlistID to every line
|
||||
# 4. Remove trailing period (see https://github.com/pi-hole/pi-hole/issues/4701)
|
||||
# 5. Ensures there is a newline on the last line
|
||||
sed -r "/([^\.]+\.)+[^\.]{2,}/!d;/[^a-zA-Z0-9.\_-]/d;s/\.$//;s/$/,${adlistID}/;/.$/a\\" "${src}" >> "${target}"
|
||||
|
||||
# Find lines containing no domains or with invalid characters (see above)
|
||||
# global variable to indicate if we found ABP style domains during the gravity run
|
||||
# is saved in gravtiy's info table to signal FTL if such domains are available
|
||||
abp_domains=0
|
||||
parseList() {
|
||||
local adlistID="${1}" src="${2}" target="${3}" temp_file temp_file_base non_domains sample_non_domains valid_domain_pattern abp_domain_pattern
|
||||
|
||||
# Create a temporary file for the sed magic instead of using "${target}" directly
|
||||
# this allows to split the sed commands to improve readability
|
||||
# we use a file handle here and remove the temporary file immediately so the content will be deleted in any case
|
||||
# when the script stops
|
||||
temp_file_base="$(mktemp -p "/tmp" --suffix=".gravity")"
|
||||
exec 3>"$temp_file_base"
|
||||
rm "${temp_file_base}"
|
||||
temp_file="/proc/$$/fd/3"
|
||||
|
||||
# define valid domain patterns
|
||||
# no need to include uppercase letters, as we convert to lowercase in gravity_ParseFileIntoDomains() already
|
||||
# adapted from https://stackoverflow.com/a/30007882
|
||||
# supported ABP style: ||subdomain.domain.tlp^
|
||||
|
||||
valid_domain_pattern="([a-z0-9]([a-z0-9_-]{0,61}[a-z0-9]){0,1}\.)+[a-z0-9][a-z0-9-]{0,61}[a-z0-9]"
|
||||
abp_domain_pattern="\|\|${valid_domain_pattern}\^"
|
||||
|
||||
|
||||
# 1. Add all valid domains
|
||||
sed -r "/^${valid_domain_pattern}$/!d" "${src}" > "${temp_file}"
|
||||
|
||||
# 2. Add valid ABP style domains if there is at least one such domain
|
||||
if grep -E "^${abp_domain_pattern}$" -m 1 -q "${src}"; then
|
||||
echo " ${INFO} List contained AdBlock Plus style domains"
|
||||
abp_domains=1
|
||||
sed -r "/^${abp_domain_pattern}$/!d" "${src}" >> "${temp_file}"
|
||||
fi
|
||||
|
||||
|
||||
# Find lines containing no domains or with invalid characters (not matching regex above)
|
||||
# This is simply everything that is not in $temp_file compared to $src
|
||||
# Remove duplicates from the list
|
||||
mapfile -t non_domains <<< "$(sed -r "/([^\.]+\.)+[^\.]{2,}/d" < "${src}")"
|
||||
mapfile -t -O "${#non_domains[@]}" non_domains <<< "$(sed -r "/[^a-zA-Z0-9.\_-]/!d" < "${src}")"
|
||||
IFS=" " read -r -a non_domains <<< "$(tr ' ' '\n' <<< "${non_domains[@]}" | sort -u | tr '\n' ' ')"
|
||||
mapfile -t non_domains < <(grep -Fvf "${temp_file}" "${src}" | sort -u )
|
||||
|
||||
# 3. Remove trailing period (see https://github.com/pi-hole/pi-hole/issues/4701)
|
||||
# 4. Append ,adlistID to every line
|
||||
# 5. Ensures there is a newline on the last line
|
||||
# and write everything to the target file
|
||||
sed "s/\.$//;s/$/,${adlistID}/;/.$/a\\" "${temp_file}" >> "${target}"
|
||||
|
||||
# A list of items of common local hostnames not to report as unusable
|
||||
# Some lists (i.e StevenBlack's) contain these as they are supposed to be used as HOST files
|
||||
# but flagging them as unusable causes more confusion than it's worth - so we suppress them from the output
|
||||
false_positives=(
|
||||
"localhost"
|
||||
"localhost.localdomain"
|
||||
"local"
|
||||
"broadcasthost"
|
||||
"localhost"
|
||||
"ip6-localhost"
|
||||
"ip6-loopback"
|
||||
"lo0 localhost"
|
||||
"ip6-localnet"
|
||||
"ip6-mcastprefix"
|
||||
"ip6-allnodes"
|
||||
"ip6-allrouters"
|
||||
"ip6-allhosts"
|
||||
)
|
||||
false_positives="localhost|localhost.localdomain|local|broadcasthost|localhost|ip6-localhost|ip6-loopback|lo0 localhost|ip6-localnet|ip6-mcastprefix|ip6-allnodes|ip6-allrouters|ip6-allhosts"
|
||||
|
||||
# Read the unusable lines into a string
|
||||
tmp_non_domains_str=" ${non_domains[*]} "
|
||||
for false_positive in "${false_positives[@]}"; do
|
||||
# Remove false positives from tmp_non_domains_str
|
||||
tmp_non_domains_str="${tmp_non_domains_str/ ${false_positive} / }"
|
||||
done
|
||||
# Read the string back into an array
|
||||
IFS=" " read -r -a non_domains <<< "${tmp_non_domains_str}"
|
||||
# if there are any non-domains, filter the array for false-positives
|
||||
# Credit: https://stackoverflow.com/a/40264051
|
||||
if [[ "${#non_domains[@]}" -gt 0 ]]; then
|
||||
mapfile -d $'\0' -t non_domains < <(printf '%s\0' "${non_domains[@]}" | grep -Ezv "^${false_positives}")
|
||||
fi
|
||||
|
||||
# Get a sample of non-domain entries, limited to 5 (the list should already have been de-duplicated)
|
||||
IFS=" " read -r -a sample_non_domains <<< "$(tr ' ' '\n' <<< "${non_domains[@]}" | head -n 5 | tr '\n' ' ')"
|
||||
|
||||
local tmp_new_imported_total
|
||||
# Get the new number of domains in destination file
|
||||
tmp_new_imported_total="$(grep -c "^" "${target}")"
|
||||
# Number of imported lines for this file is the difference between the new total and the old total. (Or, the number of domains we just added.)
|
||||
num_domains="$(( tmp_new_imported_total-num_total_imported_domains ))"
|
||||
# Replace the running total with the new total.
|
||||
num_total_imported_domains="$tmp_new_imported_total"
|
||||
# Get the number of domains added
|
||||
num_domains="$(grep -c "^" "${temp_file}")"
|
||||
# Get the number of non_domains (this is the number of entries left after stripping the source of comments/duplicates/false positives/domains)
|
||||
num_non_domains="${#non_domains[@]}"
|
||||
|
||||
|
@ -591,6 +616,9 @@ parseList() {
|
|||
else
|
||||
echo " ${INFO} Imported ${num_domains} domains"
|
||||
fi
|
||||
|
||||
# close file handle
|
||||
exec 3<&-
|
||||
}
|
||||
|
||||
compareLists() {
|
||||
|
@ -623,7 +651,7 @@ gravity_DownloadBlocklistFromUrl() {
|
|||
local heisenbergCompensator="" patternBuffer str httpCode success="" ip
|
||||
|
||||
# Create temp file to store content on disk instead of RAM
|
||||
patternBuffer=$(mktemp -p "/tmp" --suffix=".phgpb")
|
||||
patternBuffer=$(mktemp -p "${GRAVITY_TMPDIR}" --suffix=".phgpb")
|
||||
|
||||
# Determine if $saveLocation has read permission
|
||||
if [[ -r "${saveLocation}" && $url != "file"* ]]; then
|
||||
|
@ -761,18 +789,30 @@ gravity_ParseFileIntoDomains() {
|
|||
# Most of the lists downloaded are already in hosts file format but the spacing/formatting is not contiguous
|
||||
# This helps with that and makes it easier to read
|
||||
# It also helps with debugging so each stage of the script can be researched more in depth
|
||||
# 1) Remove carriage returns
|
||||
# 2) Convert all characters to lowercase
|
||||
# 3) Remove comments (text starting with "#", include possible spaces before the hash sign)
|
||||
# 1) Convert all characters to lowercase
|
||||
tr '[:upper:]' '[:lower:]' < "${src}" > "${destination}"
|
||||
|
||||
# 2) Remove carriage returns
|
||||
sed -i 's/\r$//' "${destination}"
|
||||
|
||||
# 3a) Remove comments (text starting with "#", include possible spaces before the hash sign)
|
||||
sed -i 's/\s*#.*//g' "${destination}"
|
||||
|
||||
# 3b) Remove lines starting with ! (ABP Comments)
|
||||
sed -i 's/\s*!.*//g' "${destination}"
|
||||
|
||||
# 3c) Remove lines starting with [ (ABP Header)
|
||||
sed -i 's/\s*\[.*//g' "${destination}"
|
||||
|
||||
# 4) Remove lines containing "/"
|
||||
# 5) Remove leading tabs, spaces, etc.
|
||||
sed -i -r '/(\/).*$/d' "${destination}"
|
||||
|
||||
# 5) Remove leading tabs, spaces, etc. (Also removes leading IP addresses)
|
||||
sed -i -r 's/^.*\s+//g' "${destination}"
|
||||
|
||||
# 6) Remove empty lines
|
||||
< "${src}" tr -d '\r' | \
|
||||
tr '[:upper:]' '[:lower:]' | \
|
||||
sed 's/\s*#.*//g' | \
|
||||
sed -r '/(\/).*$/d' | \
|
||||
sed -r 's/^.*\s+//g' | \
|
||||
sed '/^$/d'> "${destination}"
|
||||
sed -i '/^$/d' "${destination}"
|
||||
|
||||
chmod 644 "${destination}"
|
||||
}
|
||||
|
||||
|
@ -828,7 +868,7 @@ gravity_Cleanup() {
|
|||
# Delete tmp content generated by Gravity
|
||||
rm ${piholeDir}/pihole.*.txt 2> /dev/null
|
||||
rm ${piholeDir}/*.tmp 2> /dev/null
|
||||
rm /tmp/*.phgpb 2> /dev/null
|
||||
rm "${GRAVITY_TMPDIR}"/*.phgpb 2> /dev/null
|
||||
|
||||
# Ensure this function only runs when gravity_SetDownloadOptions() has completed
|
||||
if [[ "${gravity_Blackbody:-}" == true ]]; then
|
||||
|
@ -1005,6 +1045,9 @@ fi
|
|||
# Update gravity timestamp
|
||||
update_gravity_timestamp
|
||||
|
||||
# Set abp_domain info field
|
||||
set_abp_info
|
||||
|
||||
# Ensure proper permissions are set for the database
|
||||
chown pihole:pihole "${gravityDBfile}"
|
||||
chmod g+w "${piholeDir}" "${gravityDBfile}"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
docker-compose == 1.29.2
|
||||
pytest == 7.2.1
|
||||
pytest-xdist == 3.1.0
|
||||
pytest == 7.2.2
|
||||
pytest-xdist == 3.2.1
|
||||
pytest-testinfra == 7.0.0
|
||||
tox == 4.4.4
|
||||
tox == 4.4.7
|
||||
|
||||
|
|
|
@ -40,6 +40,26 @@ def test_key_addition_works(host):
|
|||
assert expected_stdout == output.stdout
|
||||
|
||||
|
||||
def test_key_addition_substr(host):
|
||||
"""Confirms addKey adds substring keys (no value) to a file"""
|
||||
host.run(
|
||||
"""
|
||||
source /opt/pihole/utils.sh
|
||||
addKey "./testoutput" "KEY_ONE"
|
||||
addKey "./testoutput" "KEY_O"
|
||||
addKey "./testoutput" "KEY_TWO"
|
||||
addKey "./testoutput" "Y_TWO"
|
||||
"""
|
||||
)
|
||||
output = host.run(
|
||||
"""
|
||||
cat ./testoutput
|
||||
"""
|
||||
)
|
||||
expected_stdout = "KEY_ONE\nKEY_O\nKEY_TWO\nY_TWO\n"
|
||||
assert expected_stdout == output.stdout
|
||||
|
||||
|
||||
def test_key_removal_works(host):
|
||||
"""Confirms removeKey removes a key or key/value pair"""
|
||||
host.run(
|
||||
|
@ -62,6 +82,22 @@ def test_key_removal_works(host):
|
|||
assert expected_stdout == output.stdout
|
||||
|
||||
|
||||
def test_get_value_works(host):
|
||||
"""Confirms getVal returns the correct value for a given key"""
|
||||
output = host.run(
|
||||
"""
|
||||
source /opt/pihole/utils.sh
|
||||
echo "Somekey=xxx" >> /tmp/testfile
|
||||
echo "#Testkey=1234" >> /tmp/testfile
|
||||
echo "Testkey=5678" >> /tmp/testfile
|
||||
echo "Testkey=abcd" >> /tmp/testfile
|
||||
getVal "/tmp/testfile" "Testkey"
|
||||
"""
|
||||
)
|
||||
expected_stdout = "5678"
|
||||
assert expected_stdout == output.stdout
|
||||
|
||||
|
||||
def test_getFTLAPIPort_default(host):
|
||||
"""Confirms getFTLAPIPort returns the default API port"""
|
||||
output = host.run(
|
||||
|
|
|
@ -4,5 +4,5 @@ envlist = py3
|
|||
[testenv:py3]
|
||||
allowlist_externals = docker
|
||||
deps = -rrequirements.txt
|
||||
commands = docker build -f _centos_8.Dockerfile -t pytest_pihole:test_container ../
|
||||
commands = docker buildx build --load --progress plain -f _centos_8.Dockerfile -t pytest_pihole:test_container ../
|
||||
pytest {posargs:-vv -n auto} ./test_any_automated_install.py ./test_any_utils.py ./test_centos_fedora_common_support.py ./test_centos_common_support.py
|
||||
|
|
|
@ -4,5 +4,5 @@ envlist = py3
|
|||
[testenv:py3]
|
||||
allowlist_externals = docker
|
||||
deps = -rrequirements.txt
|
||||
commands = docker build -f _centos_9.Dockerfile -t pytest_pihole:test_container ../
|
||||
commands = docker buildx build --load --progress plain -f _centos_9.Dockerfile -t pytest_pihole:test_container ../
|
||||
pytest {posargs:-vv -n auto} ./test_any_automated_install.py ./test_any_utils.py ./test_centos_fedora_common_support.py ./test_centos_common_support.py
|
||||
|
|
|
@ -4,5 +4,5 @@ envlist = py3
|
|||
[testenv:py3]
|
||||
allowlist_externals = docker
|
||||
deps = -rrequirements.txt
|
||||
commands = docker build -f _debian_10.Dockerfile -t pytest_pihole:test_container ../
|
||||
commands = docker buildx build --load --progress plain -f _debian_10.Dockerfile -t pytest_pihole:test_container ../
|
||||
pytest {posargs:-vv -n auto} ./test_any_automated_install.py ./test_any_utils.py
|
||||
|
|
|
@ -4,5 +4,5 @@ envlist = py3
|
|||
[testenv:py3]
|
||||
allowlist_externals = docker
|
||||
deps = -rrequirements.txt
|
||||
commands = docker build -f _debian_11.Dockerfile -t pytest_pihole:test_container ../
|
||||
commands = docker buildx build --load --progress plain -f _debian_11.Dockerfile -t pytest_pihole:test_container ../
|
||||
pytest {posargs:-vv -n auto} ./test_any_automated_install.py ./test_any_utils.py
|
||||
|
|
|
@ -4,5 +4,5 @@ envlist = py3
|
|||
[testenv:py3]
|
||||
allowlist_externals = docker
|
||||
deps = -rrequirements.txt
|
||||
commands = docker build -f _fedora_36.Dockerfile -t pytest_pihole:test_container ../
|
||||
commands = docker buildx build --load --progress plain -f _fedora_36.Dockerfile -t pytest_pihole:test_container ../
|
||||
pytest {posargs:-vv -n auto} ./test_any_automated_install.py ./test_any_utils.py ./test_centos_fedora_common_support.py ./test_fedora_support.py
|
||||
|
|
|
@ -4,5 +4,5 @@ envlist = py3
|
|||
[testenv]
|
||||
allowlist_externals = docker
|
||||
deps = -rrequirements.txt
|
||||
commands = docker build -f _fedora_37.Dockerfile -t pytest_pihole:test_container ../
|
||||
commands = docker buildx build --load --progress plain -f _fedora_37.Dockerfile -t pytest_pihole:test_container ../
|
||||
pytest {posargs:-vv -n auto} ./test_any_automated_install.py ./test_any_utils.py ./test_centos_fedora_common_support.py ./test_fedora_support.py
|
||||
|
|
|
@ -4,5 +4,5 @@ envlist = py3
|
|||
[testenv:py3]
|
||||
allowlist_externals = docker
|
||||
deps = -rrequirements.txt
|
||||
commands = docker build -f _ubuntu_20.Dockerfile -t pytest_pihole:test_container ../
|
||||
commands = docker buildx build --load --progress plain -f _ubuntu_20.Dockerfile -t pytest_pihole:test_container ../
|
||||
pytest {posargs:-vv -n auto} ./test_any_automated_install.py ./test_any_utils.py
|
||||
|
|
|
@ -4,5 +4,5 @@ envlist = py3
|
|||
[testenv:py3]
|
||||
allowlist_externals = docker
|
||||
deps = -rrequirements.txt
|
||||
commands = docker build -f _ubuntu_22.Dockerfile -t pytest_pihole:test_container ../
|
||||
commands = docker buildx build --load --progress plain -f _ubuntu_22.Dockerfile -t pytest_pihole:test_container ../
|
||||
pytest {posargs:-vv -n auto} ./test_any_automated_install.py ./test_any_utils.py
|
||||
|
|
Loading…
Reference in a new issue