mirror of
https://github.com/pi-hole/pi-hole.git
synced 2024-11-22 06:03:43 +00:00
Merge branch 'development' into development-v6-merge-development (resolved conflicts)
Signed-off-by: Adam Warner <me@adamwarner.co.uk>
This commit is contained in:
commit
f193edd428
6 changed files with 446 additions and 415 deletions
11
.github/workflows/stale.yml
vendored
11
.github/workflows/stale.yml
vendored
|
@ -23,14 +23,17 @@ jobs:
|
||||||
days-before-stale: 30
|
days-before-stale: 30
|
||||||
days-before-close: 5
|
days-before-close: 5
|
||||||
stale-issue-message: 'This issue is stale because it has been open 30 days with no activity. Please comment or update this issue or it will be closed in 5 days.'
|
stale-issue-message: 'This issue is stale because it has been open 30 days with no activity. Please comment or update this issue or it will be closed in 5 days.'
|
||||||
stale-issue-label: $stale_label
|
stale-issue-label: '${{ env.stale_label }}'
|
||||||
exempt-issue-labels: 'Internal, Fixed in next release, Bug: Confirmed, Documentation Needed'
|
exempt-issue-labels: 'Internal, Fixed in next release, Bug: Confirmed, Documentation Needed'
|
||||||
exempt-all-issue-assignees: true
|
exempt-all-issue-assignees: true
|
||||||
operations-per-run: 300
|
operations-per-run: 300
|
||||||
close-issue-reason: 'not_planned'
|
close-issue-reason: 'not_planned'
|
||||||
|
|
||||||
remove_stale: # trigger "stale" removal immediately when stale issues are commented on
|
remove_stale:
|
||||||
if: github.event_name == 'issue_comment'
|
# trigger "stale" removal immediately when stale issues are commented on
|
||||||
|
# we need to explicitly check that the trigger does not run on comment on a PR as
|
||||||
|
# https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#issue_comment-on-issues-only-or-pull-requests-only
|
||||||
|
if: ${{ !github.event.issue.pull_request && github.event_name != 'schedule' }}
|
||||||
permissions:
|
permissions:
|
||||||
contents: read # for actions/checkout
|
contents: read # for actions/checkout
|
||||||
issues: write # to edit issues label
|
issues: write # to edit issues label
|
||||||
|
@ -39,7 +42,7 @@ jobs:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4.1.1
|
uses: actions/checkout@v4.1.1
|
||||||
- name: Remove 'stale' label
|
- name: Remove 'stale' label
|
||||||
run: gh issue edit ${{ github.event.issue.number }} --remove-label $stale_label
|
run: gh issue edit ${{ github.event.issue.number }} --remove-label ${{ env.stale_label }}
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
|
|
@ -27,7 +27,7 @@ colfile="/opt/pihole/COL_TABLE"
|
||||||
# Source api functions
|
# Source api functions
|
||||||
. "${PI_HOLE_INSTALL_DIR}/api.sh"
|
. "${PI_HOLE_INSTALL_DIR}/api.sh"
|
||||||
|
|
||||||
Help(){
|
Help() {
|
||||||
echo "Usage: pihole -q [option] <domain>
|
echo "Usage: pihole -q [option] <domain>
|
||||||
Example: 'pihole -q --partial domain.com'
|
Example: 'pihole -q --partial domain.com'
|
||||||
Query the adlists for a specified domain
|
Query the adlists for a specified domain
|
||||||
|
@ -36,11 +36,10 @@ Options:
|
||||||
--partial Search the adlists for partially matching domains
|
--partial Search the adlists for partially matching domains
|
||||||
--all Return all query matches within the adlists
|
--all Return all query matches within the adlists
|
||||||
-h, --help Show this help dialog"
|
-h, --help Show this help dialog"
|
||||||
exit 0
|
exit 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
GenerateOutput() {
|
||||||
GenerateOutput(){
|
|
||||||
local data gravity_data lists_data num_gravity num_lists search_type_str
|
local data gravity_data lists_data num_gravity num_lists search_type_str
|
||||||
local gravity_data_csv lists_data_csv line current_domain
|
local gravity_data_csv lists_data_csv line current_domain
|
||||||
data="${1}"
|
data="${1}"
|
||||||
|
@ -52,13 +51,13 @@ GenerateOutput(){
|
||||||
gravity_data=$(printf %s "${data}" | jq '.search.gravity | group_by(.address) | map({ address: (.[0].address), domains: [.[] | .domain] })')
|
gravity_data=$(printf %s "${data}" | jq '.search.gravity | group_by(.address) | map({ address: (.[0].address), domains: [.[] | .domain] })')
|
||||||
|
|
||||||
# number of objects in each json
|
# number of objects in each json
|
||||||
num_gravity=$(printf %s "${gravity_data}" | jq length )
|
num_gravity=$(printf %s "${gravity_data}" | jq length)
|
||||||
num_lists=$(printf %s "${lists_data}" | jq length )
|
num_lists=$(printf %s "${lists_data}" | jq length)
|
||||||
|
|
||||||
if [ "${partial}" = true ]; then
|
if [ "${partial}" = true ]; then
|
||||||
search_type_str="partially"
|
search_type_str="partially"
|
||||||
else
|
else
|
||||||
search_type_str="exactly"
|
search_type_str="exactly"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Results from allow/deny list
|
# Results from allow/deny list
|
||||||
|
@ -66,7 +65,7 @@ GenerateOutput(){
|
||||||
if [ "${num_lists}" -gt 0 ]; then
|
if [ "${num_lists}" -gt 0 ]; then
|
||||||
# Convert the data to a csv, each line is a "domain,type" string
|
# Convert the data to a csv, each line is a "domain,type" string
|
||||||
# not using jq's @csv here as it quotes each value individually
|
# not using jq's @csv here as it quotes each value individually
|
||||||
lists_data_csv=$(printf %s "${lists_data}" | jq --raw-output '.[] | [.domain, .type] | join(",")' )
|
lists_data_csv=$(printf %s "${lists_data}" | jq --raw-output '.[] | [.domain, .type] | join(",")')
|
||||||
|
|
||||||
# Generate output for each csv line, separating line in a domain and type substring at the ','
|
# Generate output for each csv line, separating line in a domain and type substring at the ','
|
||||||
echo "${lists_data_csv}" | while read -r line; do
|
echo "${lists_data_csv}" | while read -r line; do
|
||||||
|
@ -79,7 +78,7 @@ GenerateOutput(){
|
||||||
if [ "${num_gravity}" -gt 0 ]; then
|
if [ "${num_gravity}" -gt 0 ]; then
|
||||||
# Convert the data to a csv, each line is a "URL,domain,domain,...." string
|
# Convert the data to a csv, each line is a "URL,domain,domain,...." string
|
||||||
# not using jq's @csv here as it quotes each value individually
|
# not using jq's @csv here as it quotes each value individually
|
||||||
gravity_data_csv=$(printf %s "${gravity_data}" | jq --raw-output '.[] | [.address, .domains[]] | join(",")' )
|
gravity_data_csv=$(printf %s "${gravity_data}" | jq --raw-output '.[] | [.address, .domains[]] | join(",")')
|
||||||
|
|
||||||
# Generate line-by-line output for each csv line
|
# Generate line-by-line output for each csv line
|
||||||
echo "${gravity_data_csv}" | while read -r line; do
|
echo "${gravity_data_csv}" | while read -r line; do
|
||||||
|
@ -90,7 +89,7 @@ GenerateOutput(){
|
||||||
# cut off URL, leaving "domain,domain,...."
|
# cut off URL, leaving "domain,domain,...."
|
||||||
line=${line#*,}
|
line=${line#*,}
|
||||||
# print each domain and remove it from the string until nothing is left
|
# print each domain and remove it from the string until nothing is left
|
||||||
while [ ${#line} -gt 0 ]; do
|
while [ ${#line} -gt 0 ]; do
|
||||||
current_domain=${line%%,*}
|
current_domain=${line%%,*}
|
||||||
printf ' - %s\n' "${COL_GREEN}${current_domain}${COL_NC}"
|
printf ' - %s\n' "${COL_GREEN}${current_domain}${COL_NC}"
|
||||||
# we need to remove the current_domain and the comma in two steps because
|
# we need to remove the current_domain and the comma in two steps because
|
||||||
|
@ -103,17 +102,17 @@ GenerateOutput(){
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
Main(){
|
Main() {
|
||||||
local data
|
local data
|
||||||
|
|
||||||
if [ -z "${domain}" ]; then
|
if [ -z "${domain}" ]; then
|
||||||
echo "No domain specified"; exit 1
|
echo "No domain specified"
|
||||||
|
exit 1
|
||||||
fi
|
fi
|
||||||
# domains are lowercased and converted to punycode by FTL since
|
# domains are lowercased and converted to punycode by FTL since
|
||||||
# https://github.com/pi-hole/FTL/pull/1715
|
# https://github.com/pi-hole/FTL/pull/1715
|
||||||
# no need to do it here
|
# no need to do it here
|
||||||
|
|
||||||
|
|
||||||
# Test if the authentication endpoint is available
|
# Test if the authentication endpoint is available
|
||||||
TestAPIAvailability
|
TestAPIAvailability
|
||||||
|
|
||||||
|
@ -137,13 +136,13 @@ Main(){
|
||||||
|
|
||||||
# Process all options (if present)
|
# Process all options (if present)
|
||||||
while [ "$#" -gt 0 ]; do
|
while [ "$#" -gt 0 ]; do
|
||||||
case "$1" in
|
case "$1" in
|
||||||
"-h" | "--help" ) Help;;
|
"-h" | "--help") Help ;;
|
||||||
"--partial" ) partial="true";;
|
"--partial") partial="true" ;;
|
||||||
"--all" ) max_results=10000;; # hard-coded FTL limit
|
"--all") max_results=10000 ;; # hard-coded FTL limit
|
||||||
* ) domain=$1;;
|
*) domain=$1 ;;
|
||||||
esac
|
esac
|
||||||
shift
|
shift
|
||||||
done
|
done
|
||||||
|
|
||||||
Main "${domain}"
|
Main "${domain}"
|
||||||
|
|
|
@ -10,32 +10,31 @@
|
||||||
|
|
||||||
function get_local_branch() {
|
function get_local_branch() {
|
||||||
# Return active branch
|
# Return active branch
|
||||||
cd "${1}" 2> /dev/null || return 1
|
cd "${1}" 2>/dev/null || return 1
|
||||||
git rev-parse --abbrev-ref HEAD || return 1
|
git rev-parse --abbrev-ref HEAD || return 1
|
||||||
}
|
}
|
||||||
|
|
||||||
function get_local_version() {
|
function get_local_version() {
|
||||||
# Return active version
|
# Return active version
|
||||||
cd "${1}" 2> /dev/null || return 1
|
cd "${1}" 2>/dev/null || return 1
|
||||||
git describe --tags --always 2> /dev/null || return 1
|
git describe --tags --always 2>/dev/null || return 1
|
||||||
}
|
}
|
||||||
|
|
||||||
function get_local_hash() {
|
function get_local_hash() {
|
||||||
cd "${1}" 2> /dev/null || return 1
|
cd "${1}" 2>/dev/null || return 1
|
||||||
git rev-parse --short=8 HEAD || return 1
|
git rev-parse --short=8 HEAD || return 1
|
||||||
}
|
}
|
||||||
|
|
||||||
function get_remote_version() {
|
function get_remote_version() {
|
||||||
# if ${2} is = "master" we need to use the "latest" endpoint, otherwise, we simply return null
|
# if ${2} is = "master" we need to use the "latest" endpoint, otherwise, we simply return null
|
||||||
if [[ "${2}" == "master" ]]; then
|
if [[ "${2}" == "master" ]]; then
|
||||||
curl -s "https://api.github.com/repos/pi-hole/${1}/releases/latest" 2> /dev/null | jq --raw-output .tag_name || return 1
|
curl -s "https://api.github.com/repos/pi-hole/${1}/releases/latest" 2>/dev/null | jq --raw-output .tag_name || return 1
|
||||||
else
|
else
|
||||||
echo "null"
|
echo "null"
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function get_remote_hash() {
|
||||||
function get_remote_hash(){
|
|
||||||
git ls-remote "https://github.com/pi-hole/${1}" --tags "${2}" | awk '{print substr($0, 1,8);}' || return 1
|
git ls-remote "https://github.com/pi-hole/${1}" --tags "${2}" | awk '{print substr($0, 1,8);}' || return 1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -57,16 +56,15 @@ chmod 644 "${VERSION_FILE}"
|
||||||
DOCKER_TAG=$(cat /pihole.docker.tag 2>/dev/null)
|
DOCKER_TAG=$(cat /pihole.docker.tag 2>/dev/null)
|
||||||
regex='^([0-9]+\.){1,2}(\*|[0-9]+)(-.*)?$|(^nightly$)|(^dev.*$)'
|
regex='^([0-9]+\.){1,2}(\*|[0-9]+)(-.*)?$|(^nightly$)|(^dev.*$)'
|
||||||
if [[ ! "${DOCKER_TAG}" =~ $regex ]]; then
|
if [[ ! "${DOCKER_TAG}" =~ $regex ]]; then
|
||||||
# DOCKER_TAG does not match the pattern (see https://regex101.com/r/RsENuz/1), so unset it.
|
# DOCKER_TAG does not match the pattern (see https://regex101.com/r/RsENuz/1), so unset it.
|
||||||
unset DOCKER_TAG
|
unset DOCKER_TAG
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# used in cronjob
|
# used in cronjob
|
||||||
if [[ "$1" == "reboot" ]]; then
|
if [[ "$1" == "reboot" ]]; then
|
||||||
sleep 30
|
sleep 30
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|
||||||
# get Core versions
|
# get Core versions
|
||||||
|
|
||||||
CORE_VERSION="$(get_local_version /etc/.pihole)"
|
CORE_VERSION="$(get_local_version /etc/.pihole)"
|
||||||
|
@ -84,7 +82,6 @@ addOrEditKeyValPair "${VERSION_FILE}" "GITHUB_CORE_VERSION" "${GITHUB_CORE_VERSI
|
||||||
GITHUB_CORE_HASH="$(get_remote_hash pi-hole "${CORE_BRANCH}")"
|
GITHUB_CORE_HASH="$(get_remote_hash pi-hole "${CORE_BRANCH}")"
|
||||||
addOrEditKeyValPair "${VERSION_FILE}" "GITHUB_CORE_HASH" "${GITHUB_CORE_HASH}"
|
addOrEditKeyValPair "${VERSION_FILE}" "GITHUB_CORE_HASH" "${GITHUB_CORE_HASH}"
|
||||||
|
|
||||||
|
|
||||||
# get Web versions
|
# get Web versions
|
||||||
|
|
||||||
WEB_VERSION="$(get_local_version /var/www/html/admin)"
|
WEB_VERSION="$(get_local_version /var/www/html/admin)"
|
||||||
|
@ -119,7 +116,6 @@ addOrEditKeyValPair "${VERSION_FILE}" "GITHUB_FTL_VERSION" "${GITHUB_FTL_VERSION
|
||||||
GITHUB_FTL_HASH="$(get_remote_hash FTL "${FTL_BRANCH}")"
|
GITHUB_FTL_HASH="$(get_remote_hash FTL "${FTL_BRANCH}")"
|
||||||
addOrEditKeyValPair "${VERSION_FILE}" "GITHUB_FTL_HASH" "${GITHUB_FTL_HASH}"
|
addOrEditKeyValPair "${VERSION_FILE}" "GITHUB_FTL_HASH" "${GITHUB_FTL_HASH}"
|
||||||
|
|
||||||
|
|
||||||
# get Docker versions
|
# get Docker versions
|
||||||
|
|
||||||
if [[ "${DOCKER_TAG}" ]]; then
|
if [[ "${DOCKER_TAG}" ]]; then
|
||||||
|
|
|
@ -31,7 +31,7 @@ main() {
|
||||||
|
|
||||||
# Automatically show detailed information if
|
# Automatically show detailed information if
|
||||||
# at least one of the components is not on master branch
|
# at least one of the components is not on master branch
|
||||||
if [ ! "${CORE_BRANCH}" = "master" ] || [ ! "${WEB_BRANCH}" = "master" ] || [ ! "${FTL_BRANCH}" = "master" ] ; then
|
if [ ! "${CORE_BRANCH}" = "master" ] || [ ! "${WEB_BRANCH}" = "master" ] || [ ! "${FTL_BRANCH}" = "master" ]; then
|
||||||
details=true
|
details=true
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|
File diff suppressed because it is too large
Load diff
223
gravity.sh
223
gravity.sh
|
@ -63,7 +63,7 @@ gravityOLDfile="${gravityDIR}/gravity_old.db"
|
||||||
|
|
||||||
# Generate new SQLite3 file from schema template
|
# Generate new SQLite3 file from schema template
|
||||||
generate_gravity_database() {
|
generate_gravity_database() {
|
||||||
if ! pihole-FTL sqlite3 -ni "${gravityDBfile}" < "${gravityDBschema}"; then
|
if ! pihole-FTL sqlite3 -ni "${gravityDBfile}" <"${gravityDBschema}"; then
|
||||||
echo -e " ${CROSS} Unable to create ${gravityDBfile}"
|
echo -e " ${CROSS} Unable to create ${gravityDBfile}"
|
||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
|
@ -78,7 +78,7 @@ gravity_build_tree() {
|
||||||
echo -ne " ${INFO} ${str}..."
|
echo -ne " ${INFO} ${str}..."
|
||||||
|
|
||||||
# The index is intentionally not UNIQUE as poor quality adlists may contain domains more than once
|
# The index is intentionally not UNIQUE as poor quality adlists may contain domains more than once
|
||||||
output=$( { pihole-FTL sqlite3 -ni "${gravityTEMPfile}" "CREATE INDEX idx_gravity ON gravity (domain, adlist_id);"; } 2>&1 )
|
output=$({ pihole-FTL sqlite3 -ni "${gravityTEMPfile}" "CREATE INDEX idx_gravity ON gravity (domain, adlist_id);"; } 2>&1)
|
||||||
status="$?"
|
status="$?"
|
||||||
|
|
||||||
if [[ "${status}" -ne 0 ]]; then
|
if [[ "${status}" -ne 0 ]]; then
|
||||||
|
@ -117,7 +117,7 @@ gravity_swap_databases() {
|
||||||
|
|
||||||
# Update timestamp when the gravity table was last updated successfully
|
# Update timestamp when the gravity table was last updated successfully
|
||||||
update_gravity_timestamp() {
|
update_gravity_timestamp() {
|
||||||
output=$( { printf ".timeout 30000\\nINSERT OR REPLACE INTO info (property,value) values ('updated',cast(strftime('%%s', 'now') as int));" | pihole-FTL sqlite3 -ni "${gravityTEMPfile}"; } 2>&1 )
|
output=$({ printf ".timeout 30000\\nINSERT OR REPLACE INTO info (property,value) values ('updated',cast(strftime('%%s', 'now') as int));" | pihole-FTL sqlite3 -ni -ni "${gravityTEMPfile}"; } 2>&1)
|
||||||
status="$?"
|
status="$?"
|
||||||
|
|
||||||
if [[ "${status}" -ne 0 ]]; then
|
if [[ "${status}" -ne 0 ]]; then
|
||||||
|
@ -171,19 +171,18 @@ database_table_from_file() {
|
||||||
|
|
||||||
# Loop over all domains in ${src} file
|
# Loop over all domains in ${src} file
|
||||||
# Read file line by line
|
# Read file line by line
|
||||||
grep -v '^ *#' < "${src}" | while IFS= read -r domain
|
grep -v '^ *#' <"${src}" | while IFS= read -r domain; do
|
||||||
do
|
|
||||||
# Only add non-empty lines
|
# Only add non-empty lines
|
||||||
if [[ -n "${domain}" ]]; then
|
if [[ -n "${domain}" ]]; then
|
||||||
if [[ "${table}" == "domain_audit" ]]; then
|
if [[ "${table}" == "domain_audit" ]]; then
|
||||||
# domain_audit table format (no enable or modified fields)
|
# domain_audit table format (no enable or modified fields)
|
||||||
echo "${rowid},\"${domain}\",${timestamp}" >> "${tmpFile}"
|
echo "${rowid},\"${domain}\",${timestamp}" >>"${tmpFile}"
|
||||||
elif [[ "${table}" == "adlist" ]]; then
|
elif [[ "${table}" == "adlist" ]]; then
|
||||||
# Adlist table format
|
# Adlist table format
|
||||||
echo "${rowid},\"${domain}\",1,${timestamp},${timestamp},\"Migrated from ${src}\",,0,0,0,0,0" >> "${tmpFile}"
|
echo "${rowid},\"${domain}\",1,${timestamp},${timestamp},\"Migrated from ${src}\",,0,0,0,0,0" >>"${tmpFile}"
|
||||||
else
|
else
|
||||||
# White-, black-, and regexlist table format
|
# White-, black-, and regexlist table format
|
||||||
echo "${rowid},${list_type},\"${domain}\",1,${timestamp},${timestamp},\"Migrated from ${src}\"" >> "${tmpFile}"
|
echo "${rowid},${list_type},\"${domain}\",1,${timestamp},${timestamp},\"Migrated from ${src}\"" >>"${tmpFile}"
|
||||||
fi
|
fi
|
||||||
rowid+=1
|
rowid+=1
|
||||||
fi
|
fi
|
||||||
|
@ -192,7 +191,7 @@ database_table_from_file() {
|
||||||
# Store domains in database table specified by ${table}
|
# Store domains in database table specified by ${table}
|
||||||
# Use printf as .mode and .import need to be on separate lines
|
# Use printf as .mode and .import need to be on separate lines
|
||||||
# see https://unix.stackexchange.com/a/445615/83260
|
# see https://unix.stackexchange.com/a/445615/83260
|
||||||
output=$( { printf ".timeout 30000\\n.mode csv\\n.import \"%s\" %s\\n" "${tmpFile}" "${table}" | pihole-FTL sqlite3 -ni "${gravityDBfile}"; } 2>&1 )
|
output=$({ printf ".timeout 30000\\n.mode csv\\n.import \"%s\" %s\\n" "${tmpFile}" "${table}" | pihole-FTL sqlite3 -ni "${gravityDBfile}"; } 2>&1)
|
||||||
status="$?"
|
status="$?"
|
||||||
|
|
||||||
if [[ "${status}" -ne 0 ]]; then
|
if [[ "${status}" -ne 0 ]]; then
|
||||||
|
@ -202,17 +201,17 @@ database_table_from_file() {
|
||||||
|
|
||||||
# Move source file to backup directory, create directory if not existing
|
# Move source file to backup directory, create directory if not existing
|
||||||
mkdir -p "${backup_path}"
|
mkdir -p "${backup_path}"
|
||||||
mv "${src}" "${backup_file}" 2> /dev/null || \
|
mv "${src}" "${backup_file}" 2>/dev/null ||
|
||||||
echo -e " ${CROSS} Unable to backup ${src} to ${backup_path}"
|
echo -e " ${CROSS} Unable to backup ${src} to ${backup_path}"
|
||||||
|
|
||||||
# Delete tmpFile
|
# Delete tmpFile
|
||||||
rm "${tmpFile}" > /dev/null 2>&1 || \
|
rm "${tmpFile}" >/dev/null 2>&1 ||
|
||||||
echo -e " ${CROSS} Unable to remove ${tmpFile}"
|
echo -e " ${CROSS} Unable to remove ${tmpFile}"
|
||||||
}
|
}
|
||||||
|
|
||||||
# Check if a column with name ${2} exists in gravity table with name ${1}
|
# Check if a column with name ${2} exists in gravity table with name ${1}
|
||||||
gravity_column_exists() {
|
gravity_column_exists() {
|
||||||
output=$( { printf ".timeout 30000\\nSELECT EXISTS(SELECT * FROM pragma_table_info('%s') WHERE name='%s');\\n" "${1}" "${2}" | pihole-FTL sqlite3 -ni "${gravityTEMPfile}"; } 2>&1 )
|
output=$({ printf ".timeout 30000\\nSELECT EXISTS(SELECT * FROM pragma_table_info('%s') WHERE name='%s');\\n" "${1}" "${2}" | pihole-FTL sqlite3 -ni "${gravityTEMPfile}"; } 2>&1)
|
||||||
if [[ "${output}" == "1" ]]; then
|
if [[ "${output}" == "1" ]]; then
|
||||||
return 0 # Bash 0 is success
|
return 0 # Bash 0 is success
|
||||||
fi
|
fi
|
||||||
|
@ -224,10 +223,10 @@ gravity_column_exists() {
|
||||||
database_adlist_number() {
|
database_adlist_number() {
|
||||||
# Only try to set number of domains when this field exists in the gravity database
|
# Only try to set number of domains when this field exists in the gravity database
|
||||||
if ! gravity_column_exists "adlist" "number"; then
|
if ! gravity_column_exists "adlist" "number"; then
|
||||||
return;
|
return
|
||||||
fi
|
fi
|
||||||
|
|
||||||
output=$( { printf ".timeout 30000\\nUPDATE adlist SET number = %i, invalid_domains = %i WHERE id = %i;\\n" "${2}" "${3}" "${1}" | pihole-FTL sqlite3 -ni "${gravityTEMPfile}"; } 2>&1 )
|
output=$({ printf ".timeout 30000\\nUPDATE adlist SET number = %i, invalid_domains = %i WHERE id = %i;\\n" "${2}" "${3}" "${1}" | pihole-FTL sqlite3 -ni "${gravityTEMPfile}"; } 2>&1)
|
||||||
status="$?"
|
status="$?"
|
||||||
|
|
||||||
if [[ "${status}" -ne 0 ]]; then
|
if [[ "${status}" -ne 0 ]]; then
|
||||||
|
@ -240,10 +239,10 @@ database_adlist_number() {
|
||||||
database_adlist_status() {
|
database_adlist_status() {
|
||||||
# Only try to set the status when this field exists in the gravity database
|
# Only try to set the status when this field exists in the gravity database
|
||||||
if ! gravity_column_exists "adlist" "status"; then
|
if ! gravity_column_exists "adlist" "status"; then
|
||||||
return;
|
return
|
||||||
fi
|
fi
|
||||||
|
|
||||||
output=$( { printf ".timeout 30000\\nUPDATE adlist SET status = %i WHERE id = %i;\\n" "${2}" "${1}" | pihole-FTL sqlite3 -ni "${gravityTEMPfile}"; } 2>&1 )
|
output=$({ printf ".timeout 30000\\nUPDATE adlist SET status = %i WHERE id = %i;\\n" "${2}" "${1}" | pihole-FTL sqlite3 -ni "${gravityTEMPfile}"; } 2>&1)
|
||||||
status="$?"
|
status="$?"
|
||||||
|
|
||||||
if [[ "${status}" -ne 0 ]]; then
|
if [[ "${status}" -ne 0 ]]; then
|
||||||
|
@ -305,7 +304,7 @@ gravity_CheckDNSResolutionAvailable() {
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Determine if $lookupDomain is resolvable
|
# Determine if $lookupDomain is resolvable
|
||||||
if timeout 4 getent hosts "${lookupDomain}" &> /dev/null; then
|
if timeout 4 getent hosts "${lookupDomain}" &>/dev/null; then
|
||||||
# Print confirmation of resolvability if it had previously failed
|
# Print confirmation of resolvability if it had previously failed
|
||||||
if [[ -n "${secs:-}" ]]; then
|
if [[ -n "${secs:-}" ]]; then
|
||||||
echo -e "${OVER} ${TICK} DNS resolution is now available\\n"
|
echo -e "${OVER} ${TICK} DNS resolution is now available\\n"
|
||||||
|
@ -319,7 +318,7 @@ gravity_CheckDNSResolutionAvailable() {
|
||||||
# If the /etc/resolv.conf contains resolvers other than 127.0.0.1 then the local dnsmasq will not be queried and pi.hole is NXDOMAIN.
|
# If the /etc/resolv.conf contains resolvers other than 127.0.0.1 then the local dnsmasq will not be queried and pi.hole is NXDOMAIN.
|
||||||
# This means that even though name resolution is working, the getent hosts check fails and the holddown timer keeps ticking and eventually fails
|
# This means that even though name resolution is working, the getent hosts check fails and the holddown timer keeps ticking and eventually fails
|
||||||
# So we check the output of the last command and if it failed, attempt to use dig +short as a fallback
|
# So we check the output of the last command and if it failed, attempt to use dig +short as a fallback
|
||||||
if timeout 4 dig +short "${lookupDomain}" &> /dev/null; then
|
if timeout 4 dig +short "${lookupDomain}" &>/dev/null; then
|
||||||
if [[ -n "${secs:-}" ]]; then
|
if [[ -n "${secs:-}" ]]; then
|
||||||
echo -e "${OVER} ${TICK} DNS resolution is now available\\n"
|
echo -e "${OVER} ${TICK} DNS resolution is now available\\n"
|
||||||
fi
|
fi
|
||||||
|
@ -330,7 +329,7 @@ gravity_CheckDNSResolutionAvailable() {
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Determine error output message
|
# Determine error output message
|
||||||
if pgrep pihole-FTL &> /dev/null; then
|
if pgrep pihole-FTL &>/dev/null; then
|
||||||
echo -e " ${CROSS} DNS resolution is currently unavailable"
|
echo -e " ${CROSS} DNS resolution is currently unavailable"
|
||||||
else
|
else
|
||||||
echo -e " ${CROSS} DNS service is not running"
|
echo -e " ${CROSS} DNS service is not running"
|
||||||
|
@ -340,7 +339,7 @@ gravity_CheckDNSResolutionAvailable() {
|
||||||
# Ensure DNS server is given time to be resolvable
|
# Ensure DNS server is given time to be resolvable
|
||||||
secs="120"
|
secs="120"
|
||||||
echo -ne " ${INFO} Time until retry: ${secs}"
|
echo -ne " ${INFO} Time until retry: ${secs}"
|
||||||
until timeout 1 getent hosts "${lookupDomain}" &> /dev/null; do
|
until timeout 1 getent hosts "${lookupDomain}" &>/dev/null; do
|
||||||
[[ "${secs:-}" -eq 0 ]] && break
|
[[ "${secs:-}" -eq 0 ]] && break
|
||||||
echo -ne "${OVER} ${INFO} Time until retry: ${secs}"
|
echo -ne "${OVER} ${INFO} Time until retry: ${secs}"
|
||||||
: $((secs--))
|
: $((secs--))
|
||||||
|
@ -361,19 +360,19 @@ gravity_DownloadBlocklists() {
|
||||||
|
|
||||||
# Retrieve source URLs from gravity database
|
# Retrieve source URLs from gravity database
|
||||||
# We source only enabled adlists, SQLite3 stores boolean values as 0 (false) or 1 (true)
|
# We source only enabled adlists, SQLite3 stores boolean values as 0 (false) or 1 (true)
|
||||||
mapfile -t sources <<< "$(pihole-FTL sqlite3 -ni "${gravityDBfile}" "SELECT address FROM vw_adlist;" 2> /dev/null)"
|
mapfile -t sources <<<"$(pihole-FTL sqlite3 -ni -ni "${gravityDBfile}" "SELECT address FROM vw_adlist;" 2>/dev/null)"
|
||||||
mapfile -t sourceIDs <<< "$(pihole-FTL sqlite3 -ni "${gravityDBfile}" "SELECT id FROM vw_adlist;" 2> /dev/null)"
|
mapfile -t sourceIDs <<<"$(pihole-FTL sqlite3 -ni -ni "${gravityDBfile}" "SELECT id FROM vw_adlist;" 2>/dev/null)"
|
||||||
mapfile -t sourceTypes <<< "$(pihole-FTL sqlite3 -ni "${gravityDBfile}" "SELECT type FROM vw_adlist;" 2> /dev/null)"
|
mapfile -t sourceTypes <<<"$(pihole-FTL sqlite3 -ni "${gravityDBfile}" "SELECT type FROM vw_adlist;" 2>/dev/null)"
|
||||||
|
|
||||||
# Parse source domains from $sources
|
# Parse source domains from $sources
|
||||||
mapfile -t sourceDomains <<< "$(
|
mapfile -t sourceDomains <<<"$(
|
||||||
# Logic: Split by folder/port
|
# Logic: Split by folder/port
|
||||||
awk -F '[/:]' '{
|
awk -F '[/:]' '{
|
||||||
# Remove URL protocol & optional username:password@
|
# Remove URL protocol & optional username:password@
|
||||||
gsub(/(.*:\/\/|.*:.*@)/, "", $0)
|
gsub(/(.*:\/\/|.*:.*@)/, "", $0)
|
||||||
if(length($1)>0){print $1}
|
if(length($1)>0){print $1}
|
||||||
else {print "local"}
|
else {print "local"}
|
||||||
}' <<< "$(printf '%s\n' "${sources[@]}")" 2> /dev/null
|
}' <<<"$(printf '%s\n' "${sources[@]}")" 2>/dev/null
|
||||||
)"
|
)"
|
||||||
|
|
||||||
local str="Pulling blocklist source list into range"
|
local str="Pulling blocklist source list into range"
|
||||||
|
@ -391,8 +390,8 @@ gravity_DownloadBlocklists() {
|
||||||
# Prepare new gravity database
|
# Prepare new gravity database
|
||||||
str="Preparing new gravity database"
|
str="Preparing new gravity database"
|
||||||
echo -ne " ${INFO} ${str}..."
|
echo -ne " ${INFO} ${str}..."
|
||||||
rm "${gravityTEMPfile}" > /dev/null 2>&1
|
rm "${gravityTEMPfile}" >/dev/null 2>&1
|
||||||
output=$( { pihole-FTL sqlite3 -ni "${gravityTEMPfile}" < "${gravityDBschema}"; } 2>&1 )
|
output=$({ pihole-FTL sqlite3 -ni "${gravityTEMPfile}" <"${gravityDBschema}"; } 2>&1)
|
||||||
status="$?"
|
status="$?"
|
||||||
|
|
||||||
if [[ "${status}" -ne 0 ]]; then
|
if [[ "${status}" -ne 0 ]]; then
|
||||||
|
@ -412,7 +411,7 @@ gravity_DownloadBlocklists() {
|
||||||
copyGravity="${copyGravity//"${gravityDBfile_default}"/"${gravityDBfile}"}"
|
copyGravity="${copyGravity//"${gravityDBfile_default}"/"${gravityDBfile}"}"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
output=$( { pihole-FTL sqlite3 -ni "${gravityTEMPfile}" <<< "${copyGravity}"; } 2>&1 )
|
output=$({ pihole-FTL sqlite3 -ni "${gravityTEMPfile}" <<<"${copyGravity}"; } 2>&1)
|
||||||
status="$?"
|
status="$?"
|
||||||
|
|
||||||
if [[ "${status}" -ne 0 ]]; then
|
if [[ "${status}" -ne 0 ]]; then
|
||||||
|
@ -457,7 +456,7 @@ gravity_DownloadBlocklists() {
|
||||||
|
|
||||||
# this will remove first @ that is after schema and before domain
|
# this will remove first @ that is after schema and before domain
|
||||||
# \1 is optional schema, \2 is userinfo
|
# \1 is optional schema, \2 is userinfo
|
||||||
check_url="$( sed -re 's#([^:/]*://)?([^/]+)@#\1\2#' <<< "$url" )"
|
check_url="$(sed -re 's#([^:/]*://)?([^/]+)@#\1\2#' <<<"$url")"
|
||||||
|
|
||||||
if [[ "${check_url}" =~ ${regex} ]]; then
|
if [[ "${check_url}" =~ ${regex} ]]; then
|
||||||
echo -e " ${CROSS} Invalid Target"
|
echo -e " ${CROSS} Invalid Target"
|
||||||
|
@ -477,7 +476,7 @@ compareLists() {
|
||||||
if [[ -s "${target}.sha1" ]]; then
|
if [[ -s "${target}.sha1" ]]; then
|
||||||
if ! sha1sum --check --status --strict "${target}.sha1"; then
|
if ! sha1sum --check --status --strict "${target}.sha1"; then
|
||||||
# The list changed upstream, we need to update the checksum
|
# The list changed upstream, we need to update the checksum
|
||||||
sha1sum "${target}" > "${target}.sha1"
|
sha1sum "${target}" >"${target}.sha1"
|
||||||
echo " ${INFO} List has been updated"
|
echo " ${INFO} List has been updated"
|
||||||
database_adlist_status "${adlistID}" "1"
|
database_adlist_status "${adlistID}" "1"
|
||||||
else
|
else
|
||||||
|
@ -486,7 +485,7 @@ compareLists() {
|
||||||
fi
|
fi
|
||||||
else
|
else
|
||||||
# No checksum available, create one for comparing on the next run
|
# No checksum available, create one for comparing on the next run
|
||||||
sha1sum "${target}" > "${target}.sha1"
|
sha1sum "${target}" >"${target}.sha1"
|
||||||
# We assume here it was changed upstream
|
# We assume here it was changed upstream
|
||||||
database_adlist_status "${adlistID}" "1"
|
database_adlist_status "${adlistID}" "1"
|
||||||
fi
|
fi
|
||||||
|
@ -515,25 +514,29 @@ gravity_DownloadBlocklistFromUrl() {
|
||||||
echo -ne " ${INFO} ${str} Pending..."
|
echo -ne " ${INFO} ${str} Pending..."
|
||||||
blocked=false
|
blocked=false
|
||||||
case $(getFTLConfigValue dns.blocking.mode) in
|
case $(getFTLConfigValue dns.blocking.mode) in
|
||||||
"IP-NODATA-AAAA"|"IP")
|
"IP-NODATA-AAAA" | "IP")
|
||||||
# Get IP address of this domain
|
# Get IP address of this domain
|
||||||
ip="$(dig "${domain}" +short)"
|
ip="$(dig "${domain}" +short)"
|
||||||
# Check if this IP matches any IP of the system
|
# Check if this IP matches any IP of the system
|
||||||
if [[ -n "${ip}" && $(grep -Ec "inet(|6) ${ip}" <<< "$(ip a)") -gt 0 ]]; then
|
if [[ -n "${ip}" && $(grep -Ec "inet(|6) ${ip}" <<<"$(ip a)") -gt 0 ]]; then
|
||||||
blocked=true
|
blocked=true
|
||||||
fi;;
|
fi
|
||||||
"NXDOMAIN")
|
;;
|
||||||
if [[ $(dig "${domain}" | grep "NXDOMAIN" -c) -ge 1 ]]; then
|
"NXDOMAIN")
|
||||||
blocked=true
|
if [[ $(dig "${domain}" | grep "NXDOMAIN" -c) -ge 1 ]]; then
|
||||||
fi;;
|
blocked=true
|
||||||
"NODATA")
|
fi
|
||||||
if [[ $(dig "${domain}" | grep "NOERROR" -c) -ge 1 ]] && [[ -z $(dig +short "${domain}") ]]; then
|
;;
|
||||||
blocked=true
|
"NODATA")
|
||||||
fi;;
|
if [[ $(dig "${domain}" | grep "NOERROR" -c) -ge 1 ]] && [[ -z $(dig +short "${domain}") ]]; then
|
||||||
"NULL"|*)
|
blocked=true
|
||||||
if [[ $(dig "${domain}" +short | grep "0.0.0.0" -c) -ge 1 ]]; then
|
fi
|
||||||
blocked=true
|
;;
|
||||||
fi;;
|
"NULL" | *)
|
||||||
|
if [[ $(dig "${domain}" +short | grep "0.0.0.0" -c) -ge 1 ]]; then
|
||||||
|
blocked=true
|
||||||
|
fi
|
||||||
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
if [[ "${blocked}" == true ]]; then
|
if [[ "${blocked}" == true ]]; then
|
||||||
|
@ -545,43 +548,53 @@ gravity_DownloadBlocklistFromUrl() {
|
||||||
fi
|
fi
|
||||||
ip=$(dig "@${ip_addr}" -p "${port}" +short "${domain}" | tail -1)
|
ip=$(dig "@${ip_addr}" -p "${port}" +short "${domain}" | tail -1)
|
||||||
if [[ $(echo "${url}" | awk -F '://' '{print $1}') = "https" ]]; then
|
if [[ $(echo "${url}" | awk -F '://' '{print $1}') = "https" ]]; then
|
||||||
port=443;
|
port=443
|
||||||
else port=80
|
else
|
||||||
|
port=80
|
||||||
fi
|
fi
|
||||||
bad_list=$(pihole -q -adlist "${domain}" | head -n1 | awk -F 'Match found in ' '{print $2}')
|
bad_list=$(pihole -q -adlist "${domain}" | head -n1 | awk -F 'Match found in ' '{print $2}')
|
||||||
echo -e "${OVER} ${CROSS} ${str} ${domain} is blocked by ${bad_list%:}. Using DNS on ${PIHOLE_DNS_1} to download ${url}";
|
echo -e "${OVER} ${CROSS} ${str} ${domain} is blocked by ${bad_list%:}. Using DNS on ${PIHOLE_DNS_1} to download ${url}"
|
||||||
echo -ne " ${INFO} ${str} Pending..."
|
echo -ne " ${INFO} ${str} Pending..."
|
||||||
cmd_ext="--resolve $domain:$port:$ip"
|
cmd_ext="--resolve $domain:$port:$ip"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# shellcheck disable=SC2086
|
# shellcheck disable=SC2086
|
||||||
httpCode=$(curl --connect-timeout ${curl_connect_timeout} -s -L ${compression} ${cmd_ext} ${heisenbergCompensator} -w "%{http_code}" "${url}" -o "${listCurlBuffer}" 2> /dev/null)
|
httpCode=$(curl --connect-timeout ${curl_connect_timeout} -s -L ${compression} ${cmd_ext} ${heisenbergCompensator} -w "%{http_code}" "${url}" -o "${listCurlBuffer}" 2>/dev/null)
|
||||||
|
|
||||||
case $url in
|
case $url in
|
||||||
# Did we "download" a local file?
|
# Did we "download" a local file?
|
||||||
"file"*)
|
"file"*)
|
||||||
if [[ -s "${listCurlBuffer}" ]]; then
|
if [[ -s "${listCurlBuffer}" ]]; then
|
||||||
echo -e "${OVER} ${TICK} ${str} Retrieval successful"; success=true
|
echo -e "${OVER} ${TICK} ${str} Retrieval successful"
|
||||||
else
|
success=true
|
||||||
echo -e "${OVER} ${CROSS} ${str} Not found / empty list"
|
else
|
||||||
fi;;
|
echo -e "${OVER} ${CROSS} ${str} Not found / empty list"
|
||||||
# Did we "download" a remote file?
|
fi
|
||||||
*)
|
;;
|
||||||
# Determine "Status:" output based on HTTP response
|
# Did we "download" a remote file?
|
||||||
case "${httpCode}" in
|
*)
|
||||||
"200") echo -e "${OVER} ${TICK} ${str} Retrieval successful"; success=true;;
|
# Determine "Status:" output based on HTTP response
|
||||||
"304") echo -e "${OVER} ${TICK} ${str} No changes detected"; success=true;;
|
case "${httpCode}" in
|
||||||
"000") echo -e "${OVER} ${CROSS} ${str} Connection Refused";;
|
"200")
|
||||||
"403") echo -e "${OVER} ${CROSS} ${str} Forbidden";;
|
echo -e "${OVER} ${TICK} ${str} Retrieval successful"
|
||||||
"404") echo -e "${OVER} ${CROSS} ${str} Not found";;
|
success=true
|
||||||
"408") echo -e "${OVER} ${CROSS} ${str} Time-out";;
|
;;
|
||||||
"451") echo -e "${OVER} ${CROSS} ${str} Unavailable For Legal Reasons";;
|
"304")
|
||||||
"500") echo -e "${OVER} ${CROSS} ${str} Internal Server Error";;
|
echo -e "${OVER} ${TICK} ${str} No changes detected"
|
||||||
"504") echo -e "${OVER} ${CROSS} ${str} Connection Timed Out (Gateway)";;
|
success=true
|
||||||
"521") echo -e "${OVER} ${CROSS} ${str} Web Server Is Down (Cloudflare)";;
|
;;
|
||||||
"522") echo -e "${OVER} ${CROSS} ${str} Connection Timed Out (Cloudflare)";;
|
"000") echo -e "${OVER} ${CROSS} ${str} Connection Refused" ;;
|
||||||
* ) echo -e "${OVER} ${CROSS} ${str} ${url} (${httpCode})";;
|
"403") echo -e "${OVER} ${CROSS} ${str} Forbidden" ;;
|
||||||
esac;;
|
"404") echo -e "${OVER} ${CROSS} ${str} Not found" ;;
|
||||||
|
"408") echo -e "${OVER} ${CROSS} ${str} Time-out" ;;
|
||||||
|
"451") echo -e "${OVER} ${CROSS} ${str} Unavailable For Legal Reasons" ;;
|
||||||
|
"500") echo -e "${OVER} ${CROSS} ${str} Internal Server Error" ;;
|
||||||
|
"504") echo -e "${OVER} ${CROSS} ${str} Connection Timed Out (Gateway)" ;;
|
||||||
|
"521") echo -e "${OVER} ${CROSS} ${str} Web Server Is Down (Cloudflare)" ;;
|
||||||
|
"522") echo -e "${OVER} ${CROSS} ${str} Connection Timed Out (Cloudflare)" ;;
|
||||||
|
*) echo -e "${OVER} ${CROSS} ${str} ${url} (${httpCode})" ;;
|
||||||
|
esac
|
||||||
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
local done="false"
|
local done="false"
|
||||||
|
@ -635,7 +648,7 @@ gravity_ParseFileIntoDomains() {
|
||||||
# This helps with that and makes it easier to read
|
# This helps with that and makes it easier to read
|
||||||
# It also helps with debugging so each stage of the script can be researched more in depth
|
# It also helps with debugging so each stage of the script can be researched more in depth
|
||||||
# 1) Convert all characters to lowercase
|
# 1) Convert all characters to lowercase
|
||||||
tr '[:upper:]' '[:lower:]' < "${src}" > "${destination}"
|
tr '[:upper:]' '[:lower:]' <"${src}" >"${destination}"
|
||||||
|
|
||||||
# 2) Remove carriage returns
|
# 2) Remove carriage returns
|
||||||
# 3) Remove lines starting with ! (ABP Comments)
|
# 3) Remove lines starting with ! (ABP Comments)
|
||||||
|
@ -645,7 +658,7 @@ gravity_ParseFileIntoDomains() {
|
||||||
# 7) Remove leading tabs, spaces, etc. (Also removes leading IP addresses)
|
# 7) Remove leading tabs, spaces, etc. (Also removes leading IP addresses)
|
||||||
# 8) Remove empty lines
|
# 8) Remove empty lines
|
||||||
|
|
||||||
sed -i -r \
|
sed -i -r \
|
||||||
-e 's/\r$//' \
|
-e 's/\r$//' \
|
||||||
-e 's/\s*!.*//g' \
|
-e 's/\s*!.*//g' \
|
||||||
-e 's/\s*\[.*//g' \
|
-e 's/\s*\[.*//g' \
|
||||||
|
@ -662,12 +675,12 @@ gravity_Table_Count() {
|
||||||
local table="${1}"
|
local table="${1}"
|
||||||
local str="${2}"
|
local str="${2}"
|
||||||
local num
|
local num
|
||||||
num="$(pihole-FTL sqlite3 -ni "${gravityTEMPfile}" "SELECT COUNT(*) FROM ${table};")"
|
num="$(pihole-FTL sqlite3 -ni -ni "${gravityTEMPfile}" "SELECT COUNT(*) FROM ${table};")"
|
||||||
if [[ "${table}" == "gravity" ]]; then
|
if [[ "${table}" == "gravity" ]]; then
|
||||||
local unique
|
local unique
|
||||||
unique="$(pihole-FTL sqlite3 -ni "${gravityTEMPfile}" "SELECT COUNT(*) FROM (SELECT DISTINCT domain FROM ${table});")"
|
unique="$(pihole-FTL sqlite3 -ni -ni "${gravityTEMPfile}" "SELECT COUNT(*) FROM (SELECT DISTINCT domain FROM ${table});")"
|
||||||
echo -e " ${INFO} Number of ${str}: ${num} (${COL_BOLD}${unique} unique domains${COL_NC})"
|
echo -e " ${INFO} Number of ${str}: ${num} (${COL_BOLD}${unique} unique domains${COL_NC})"
|
||||||
pihole-FTL sqlite3 -ni "${gravityTEMPfile}" "INSERT OR REPLACE INTO info (property,value) VALUES ('gravity_count',${unique});"
|
pihole-FTL sqlite3 -ni -ni "${gravityTEMPfile}" "INSERT OR REPLACE INTO info (property,value) VALUES ('gravity_count',${unique});"
|
||||||
else
|
else
|
||||||
echo -e " ${INFO} Number of ${str}: ${num}"
|
echo -e " ${INFO} Number of ${str}: ${num}"
|
||||||
fi
|
fi
|
||||||
|
@ -687,12 +700,12 @@ gravity_ShowCount() {
|
||||||
# Create "localhost" entries into hosts format
|
# Create "localhost" entries into hosts format
|
||||||
gravity_generateLocalList() {
|
gravity_generateLocalList() {
|
||||||
# Empty $localList if it already exists, otherwise, create it
|
# Empty $localList if it already exists, otherwise, create it
|
||||||
echo "### Do not modify this file, it will be overwritten by pihole -g" > "${localList}"
|
echo "### Do not modify this file, it will be overwritten by pihole -g" >"${localList}"
|
||||||
chmod 644 "${localList}"
|
chmod 644 "${localList}"
|
||||||
|
|
||||||
# Add additional LAN hosts provided by OpenVPN (if available)
|
# Add additional LAN hosts provided by OpenVPN (if available)
|
||||||
if [[ -f "${VPNList}" ]]; then
|
if [[ -f "${VPNList}" ]]; then
|
||||||
awk -F, '{printf $2"\t"$1".vpn\n"}' "${VPNList}" >> "${localList}"
|
awk -F, '{printf $2"\t"$1".vpn\n"}' "${VPNList}" >>"${localList}"
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -709,12 +722,12 @@ gravity_Cleanup() {
|
||||||
echo -ne " ${INFO} ${str}..."
|
echo -ne " ${INFO} ${str}..."
|
||||||
|
|
||||||
# Delete tmp content generated by Gravity
|
# Delete tmp content generated by Gravity
|
||||||
rm ${piholeDir}/pihole.*.txt 2> /dev/null
|
rm ${piholeDir}/pihole.*.txt 2>/dev/null
|
||||||
rm ${piholeDir}/*.tmp 2> /dev/null
|
rm ${piholeDir}/*.tmp 2>/dev/null
|
||||||
# listCurlBuffer location
|
# listCurlBuffer location
|
||||||
rm "${GRAVITY_TMPDIR}"/*.phgpb 2> /dev/null
|
rm "${GRAVITY_TMPDIR}"/*.phgpb 2>/dev/null
|
||||||
# invalid_domains location
|
# invalid_domains location
|
||||||
rm "${GRAVITY_TMPDIR}"/*.ph-non-domains 2> /dev/null
|
rm "${GRAVITY_TMPDIR}"/*.ph-non-domains 2>/dev/null
|
||||||
|
|
||||||
# Ensure this function only runs when gravity_SetDownloadOptions() has completed
|
# Ensure this function only runs when gravity_SetDownloadOptions() has completed
|
||||||
if [[ "${gravity_Blackbody:-}" == true ]]; then
|
if [[ "${gravity_Blackbody:-}" == true ]]; then
|
||||||
|
@ -722,7 +735,7 @@ gravity_Cleanup() {
|
||||||
for file in "${piholeDir}"/*."${domainsExtension}"; do
|
for file in "${piholeDir}"/*."${domainsExtension}"; do
|
||||||
# If list is not in active array, then remove it
|
# If list is not in active array, then remove it
|
||||||
if [[ ! "${activeDomains[*]}" == *"${file}"* ]]; then
|
if [[ ! "${activeDomains[*]}" == *"${file}"* ]]; then
|
||||||
rm -f "${file}" 2> /dev/null || \
|
rm -f "${file}" 2>/dev/null ||
|
||||||
echo -e " ${CROSS} Failed to remove ${file##*/}"
|
echo -e " ${CROSS} Failed to remove ${file##*/}"
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
@ -764,17 +777,17 @@ database_recovery() {
|
||||||
fi
|
fi
|
||||||
else
|
else
|
||||||
echo -e "${OVER} ${CROSS} ${str} - errors found:"
|
echo -e "${OVER} ${CROSS} ${str} - errors found:"
|
||||||
while IFS= read -r line ; do echo " - $line"; done <<< "$result"
|
while IFS= read -r line; do echo " - $line"; done <<<"$result"
|
||||||
fi
|
fi
|
||||||
else
|
else
|
||||||
echo -e "${OVER} ${CROSS} ${str} - errors found:"
|
echo -e "${OVER} ${CROSS} ${str} - errors found:"
|
||||||
while IFS= read -r line ; do echo " - $line"; done <<< "$result"
|
while IFS= read -r line; do echo " - $line"; done <<<"$result"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
str="Trying to recover existing gravity database"
|
str="Trying to recover existing gravity database"
|
||||||
echo -ne " ${INFO} ${str}..."
|
echo -ne " ${INFO} ${str}..."
|
||||||
# We have to remove any possibly existing recovery database or this will fail
|
# We have to remove any possibly existing recovery database or this will fail
|
||||||
rm -f "${gravityDBfile}.recovered" > /dev/null 2>&1
|
rm -f "${gravityDBfile}.recovered" >/dev/null 2>&1
|
||||||
if result="$(pihole-FTL sqlite3 -ni "${gravityDBfile}" ".recover" | pihole-FTL sqlite3 -ni "${gravityDBfile}.recovered" 2>&1)"; then
|
if result="$(pihole-FTL sqlite3 -ni "${gravityDBfile}" ".recover" | pihole-FTL sqlite3 -ni "${gravityDBfile}.recovered" 2>&1)"; then
|
||||||
echo -e "${OVER} ${TICK} ${str} - success"
|
echo -e "${OVER} ${TICK} ${str} - success"
|
||||||
mv "${gravityDBfile}" "${gravityDBfile}.old"
|
mv "${gravityDBfile}" "${gravityDBfile}.old"
|
||||||
|
@ -783,7 +796,7 @@ database_recovery() {
|
||||||
echo -ne " ${INFO} The old ${gravityDBfile} has been moved to ${gravityDBfile}.old"
|
echo -ne " ${INFO} The old ${gravityDBfile} has been moved to ${gravityDBfile}.old"
|
||||||
else
|
else
|
||||||
echo -e "${OVER} ${CROSS} ${str} - the following errors happened:"
|
echo -e "${OVER} ${CROSS} ${str} - the following errors happened:"
|
||||||
while IFS= read -r line ; do echo " - $line"; done <<< "$result"
|
while IFS= read -r line; do echo " - $line"; done <<<"$result"
|
||||||
echo -e " ${CROSS} Recovery failed. Try \"pihole -r recreate\" instead."
|
echo -e " ${CROSS} Recovery failed. Try \"pihole -r recreate\" instead."
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
@ -802,9 +815,10 @@ Options:
|
||||||
|
|
||||||
repairSelector() {
|
repairSelector() {
|
||||||
case "$1" in
|
case "$1" in
|
||||||
"recover") recover_database=true;;
|
"recover") recover_database=true ;;
|
||||||
"recreate") recreate_database=true;;
|
"recreate") recreate_database=true ;;
|
||||||
*) echo "Usage: pihole -g -r {recover,recreate}
|
*)
|
||||||
|
echo "Usage: pihole -g -r {recover,recreate}
|
||||||
Attempt to repair gravity database
|
Attempt to repair gravity database
|
||||||
|
|
||||||
Available options:
|
Available options:
|
||||||
|
@ -823,15 +837,16 @@ Available options:
|
||||||
and create a new file from scratch. If you still
|
and create a new file from scratch. If you still
|
||||||
have the migration backup created when migrating
|
have the migration backup created when migrating
|
||||||
to Pi-hole v5.0, Pi-hole will import these files."
|
to Pi-hole v5.0, Pi-hole will import these files."
|
||||||
exit 0;;
|
exit 0
|
||||||
|
;;
|
||||||
esac
|
esac
|
||||||
}
|
}
|
||||||
|
|
||||||
for var in "$@"; do
|
for var in "$@"; do
|
||||||
case "${var}" in
|
case "${var}" in
|
||||||
"-f" | "--force" ) forceDelete=true;;
|
"-f" | "--force") forceDelete=true ;;
|
||||||
"-r" | "--repair" ) repairSelector "$3";;
|
"-r" | "--repair") repairSelector "$3" ;;
|
||||||
"-h" | "--help" ) helpFunc;;
|
"-h" | "--help") helpFunc ;;
|
||||||
esac
|
esac
|
||||||
done
|
done
|
||||||
|
|
||||||
|
@ -847,9 +862,9 @@ if [[ "${recreate_database:-}" == true ]]; then
|
||||||
str="Recreating gravity database from migration backup"
|
str="Recreating gravity database from migration backup"
|
||||||
echo -ne "${INFO} ${str}..."
|
echo -ne "${INFO} ${str}..."
|
||||||
rm "${gravityDBfile}"
|
rm "${gravityDBfile}"
|
||||||
pushd "${piholeDir}" > /dev/null || exit
|
pushd "${piholeDir}" >/dev/null || exit
|
||||||
cp migration_backup/* .
|
cp migration_backup/* .
|
||||||
popd > /dev/null || exit
|
popd >/dev/null || exit
|
||||||
echo -e "${OVER} ${TICK} ${str}"
|
echo -e "${OVER} ${TICK} ${str}"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
@ -867,7 +882,7 @@ if [[ "${forceDelete:-}" == true ]]; then
|
||||||
str="Deleting existing list cache"
|
str="Deleting existing list cache"
|
||||||
echo -ne "${INFO} ${str}..."
|
echo -ne "${INFO} ${str}..."
|
||||||
|
|
||||||
rm /etc/pihole/list.* 2> /dev/null || true
|
rm /etc/pihole/list.* 2>/dev/null || true
|
||||||
echo -e "${OVER} ${TICK} ${str}"
|
echo -e "${OVER} ${TICK} ${str}"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue