mirror of
https://github.com/pi-hole/pi-hole.git
synced 2024-11-21 21:53:43 +00:00
Merge branch 'development' into development-v6-merge-development (resolved conflicts)
Signed-off-by: Adam Warner <me@adamwarner.co.uk>
This commit is contained in:
commit
f193edd428
6 changed files with 446 additions and 415 deletions
11
.github/workflows/stale.yml
vendored
11
.github/workflows/stale.yml
vendored
|
@ -23,14 +23,17 @@ jobs:
|
|||
days-before-stale: 30
|
||||
days-before-close: 5
|
||||
stale-issue-message: 'This issue is stale because it has been open 30 days with no activity. Please comment or update this issue or it will be closed in 5 days.'
|
||||
stale-issue-label: $stale_label
|
||||
stale-issue-label: '${{ env.stale_label }}'
|
||||
exempt-issue-labels: 'Internal, Fixed in next release, Bug: Confirmed, Documentation Needed'
|
||||
exempt-all-issue-assignees: true
|
||||
operations-per-run: 300
|
||||
close-issue-reason: 'not_planned'
|
||||
|
||||
remove_stale: # trigger "stale" removal immediately when stale issues are commented on
|
||||
if: github.event_name == 'issue_comment'
|
||||
remove_stale:
|
||||
# trigger "stale" removal immediately when stale issues are commented on
|
||||
# we need to explicitly check that the trigger does not run on comment on a PR as
|
||||
# https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#issue_comment-on-issues-only-or-pull-requests-only
|
||||
if: ${{ !github.event.issue.pull_request && github.event_name != 'schedule' }}
|
||||
permissions:
|
||||
contents: read # for actions/checkout
|
||||
issues: write # to edit issues label
|
||||
|
@ -39,7 +42,7 @@ jobs:
|
|||
- name: Checkout
|
||||
uses: actions/checkout@v4.1.1
|
||||
- name: Remove 'stale' label
|
||||
run: gh issue edit ${{ github.event.issue.number }} --remove-label $stale_label
|
||||
run: gh issue edit ${{ github.event.issue.number }} --remove-label ${{ env.stale_label }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
|
|
|
@ -27,7 +27,7 @@ colfile="/opt/pihole/COL_TABLE"
|
|||
# Source api functions
|
||||
. "${PI_HOLE_INSTALL_DIR}/api.sh"
|
||||
|
||||
Help(){
|
||||
Help() {
|
||||
echo "Usage: pihole -q [option] <domain>
|
||||
Example: 'pihole -q --partial domain.com'
|
||||
Query the adlists for a specified domain
|
||||
|
@ -36,11 +36,10 @@ Options:
|
|||
--partial Search the adlists for partially matching domains
|
||||
--all Return all query matches within the adlists
|
||||
-h, --help Show this help dialog"
|
||||
exit 0
|
||||
exit 0
|
||||
}
|
||||
|
||||
|
||||
GenerateOutput(){
|
||||
GenerateOutput() {
|
||||
local data gravity_data lists_data num_gravity num_lists search_type_str
|
||||
local gravity_data_csv lists_data_csv line current_domain
|
||||
data="${1}"
|
||||
|
@ -52,13 +51,13 @@ GenerateOutput(){
|
|||
gravity_data=$(printf %s "${data}" | jq '.search.gravity | group_by(.address) | map({ address: (.[0].address), domains: [.[] | .domain] })')
|
||||
|
||||
# number of objects in each json
|
||||
num_gravity=$(printf %s "${gravity_data}" | jq length )
|
||||
num_lists=$(printf %s "${lists_data}" | jq length )
|
||||
num_gravity=$(printf %s "${gravity_data}" | jq length)
|
||||
num_lists=$(printf %s "${lists_data}" | jq length)
|
||||
|
||||
if [ "${partial}" = true ]; then
|
||||
search_type_str="partially"
|
||||
search_type_str="partially"
|
||||
else
|
||||
search_type_str="exactly"
|
||||
search_type_str="exactly"
|
||||
fi
|
||||
|
||||
# Results from allow/deny list
|
||||
|
@ -66,7 +65,7 @@ GenerateOutput(){
|
|||
if [ "${num_lists}" -gt 0 ]; then
|
||||
# Convert the data to a csv, each line is a "domain,type" string
|
||||
# not using jq's @csv here as it quotes each value individually
|
||||
lists_data_csv=$(printf %s "${lists_data}" | jq --raw-output '.[] | [.domain, .type] | join(",")' )
|
||||
lists_data_csv=$(printf %s "${lists_data}" | jq --raw-output '.[] | [.domain, .type] | join(",")')
|
||||
|
||||
# Generate output for each csv line, separating line in a domain and type substring at the ','
|
||||
echo "${lists_data_csv}" | while read -r line; do
|
||||
|
@ -79,7 +78,7 @@ GenerateOutput(){
|
|||
if [ "${num_gravity}" -gt 0 ]; then
|
||||
# Convert the data to a csv, each line is a "URL,domain,domain,...." string
|
||||
# not using jq's @csv here as it quotes each value individually
|
||||
gravity_data_csv=$(printf %s "${gravity_data}" | jq --raw-output '.[] | [.address, .domains[]] | join(",")' )
|
||||
gravity_data_csv=$(printf %s "${gravity_data}" | jq --raw-output '.[] | [.address, .domains[]] | join(",")')
|
||||
|
||||
# Generate line-by-line output for each csv line
|
||||
echo "${gravity_data_csv}" | while read -r line; do
|
||||
|
@ -90,7 +89,7 @@ GenerateOutput(){
|
|||
# cut off URL, leaving "domain,domain,...."
|
||||
line=${line#*,}
|
||||
# print each domain and remove it from the string until nothing is left
|
||||
while [ ${#line} -gt 0 ]; do
|
||||
while [ ${#line} -gt 0 ]; do
|
||||
current_domain=${line%%,*}
|
||||
printf ' - %s\n' "${COL_GREEN}${current_domain}${COL_NC}"
|
||||
# we need to remove the current_domain and the comma in two steps because
|
||||
|
@ -103,17 +102,17 @@ GenerateOutput(){
|
|||
fi
|
||||
}
|
||||
|
||||
Main(){
|
||||
Main() {
|
||||
local data
|
||||
|
||||
if [ -z "${domain}" ]; then
|
||||
echo "No domain specified"; exit 1
|
||||
echo "No domain specified"
|
||||
exit 1
|
||||
fi
|
||||
# domains are lowercased and converted to punycode by FTL since
|
||||
# https://github.com/pi-hole/FTL/pull/1715
|
||||
# no need to do it here
|
||||
|
||||
|
||||
# Test if the authentication endpoint is available
|
||||
TestAPIAvailability
|
||||
|
||||
|
@ -137,13 +136,13 @@ Main(){
|
|||
|
||||
# Process all options (if present)
|
||||
while [ "$#" -gt 0 ]; do
|
||||
case "$1" in
|
||||
"-h" | "--help" ) Help;;
|
||||
"--partial" ) partial="true";;
|
||||
"--all" ) max_results=10000;; # hard-coded FTL limit
|
||||
* ) domain=$1;;
|
||||
esac
|
||||
shift
|
||||
case "$1" in
|
||||
"-h" | "--help") Help ;;
|
||||
"--partial") partial="true" ;;
|
||||
"--all") max_results=10000 ;; # hard-coded FTL limit
|
||||
*) domain=$1 ;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
Main "${domain}"
|
||||
|
|
|
@ -10,32 +10,31 @@
|
|||
|
||||
function get_local_branch() {
|
||||
# Return active branch
|
||||
cd "${1}" 2> /dev/null || return 1
|
||||
cd "${1}" 2>/dev/null || return 1
|
||||
git rev-parse --abbrev-ref HEAD || return 1
|
||||
}
|
||||
|
||||
function get_local_version() {
|
||||
# Return active version
|
||||
cd "${1}" 2> /dev/null || return 1
|
||||
git describe --tags --always 2> /dev/null || return 1
|
||||
cd "${1}" 2>/dev/null || return 1
|
||||
git describe --tags --always 2>/dev/null || return 1
|
||||
}
|
||||
|
||||
function get_local_hash() {
|
||||
cd "${1}" 2> /dev/null || return 1
|
||||
cd "${1}" 2>/dev/null || return 1
|
||||
git rev-parse --short=8 HEAD || return 1
|
||||
}
|
||||
|
||||
function get_remote_version() {
|
||||
# if ${2} is = "master" we need to use the "latest" endpoint, otherwise, we simply return null
|
||||
if [[ "${2}" == "master" ]]; then
|
||||
curl -s "https://api.github.com/repos/pi-hole/${1}/releases/latest" 2> /dev/null | jq --raw-output .tag_name || return 1
|
||||
curl -s "https://api.github.com/repos/pi-hole/${1}/releases/latest" 2>/dev/null | jq --raw-output .tag_name || return 1
|
||||
else
|
||||
echo "null"
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
function get_remote_hash(){
|
||||
function get_remote_hash() {
|
||||
git ls-remote "https://github.com/pi-hole/${1}" --tags "${2}" | awk '{print substr($0, 1,8);}' || return 1
|
||||
}
|
||||
|
||||
|
@ -57,16 +56,15 @@ chmod 644 "${VERSION_FILE}"
|
|||
DOCKER_TAG=$(cat /pihole.docker.tag 2>/dev/null)
|
||||
regex='^([0-9]+\.){1,2}(\*|[0-9]+)(-.*)?$|(^nightly$)|(^dev.*$)'
|
||||
if [[ ! "${DOCKER_TAG}" =~ $regex ]]; then
|
||||
# DOCKER_TAG does not match the pattern (see https://regex101.com/r/RsENuz/1), so unset it.
|
||||
unset DOCKER_TAG
|
||||
# DOCKER_TAG does not match the pattern (see https://regex101.com/r/RsENuz/1), so unset it.
|
||||
unset DOCKER_TAG
|
||||
fi
|
||||
|
||||
# used in cronjob
|
||||
if [[ "$1" == "reboot" ]]; then
|
||||
sleep 30
|
||||
sleep 30
|
||||
fi
|
||||
|
||||
|
||||
# get Core versions
|
||||
|
||||
CORE_VERSION="$(get_local_version /etc/.pihole)"
|
||||
|
@ -84,7 +82,6 @@ addOrEditKeyValPair "${VERSION_FILE}" "GITHUB_CORE_VERSION" "${GITHUB_CORE_VERSI
|
|||
GITHUB_CORE_HASH="$(get_remote_hash pi-hole "${CORE_BRANCH}")"
|
||||
addOrEditKeyValPair "${VERSION_FILE}" "GITHUB_CORE_HASH" "${GITHUB_CORE_HASH}"
|
||||
|
||||
|
||||
# get Web versions
|
||||
|
||||
WEB_VERSION="$(get_local_version /var/www/html/admin)"
|
||||
|
@ -119,7 +116,6 @@ addOrEditKeyValPair "${VERSION_FILE}" "GITHUB_FTL_VERSION" "${GITHUB_FTL_VERSION
|
|||
GITHUB_FTL_HASH="$(get_remote_hash FTL "${FTL_BRANCH}")"
|
||||
addOrEditKeyValPair "${VERSION_FILE}" "GITHUB_FTL_HASH" "${GITHUB_FTL_HASH}"
|
||||
|
||||
|
||||
# get Docker versions
|
||||
|
||||
if [[ "${DOCKER_TAG}" ]]; then
|
||||
|
|
|
@ -31,7 +31,7 @@ main() {
|
|||
|
||||
# Automatically show detailed information if
|
||||
# at least one of the components is not on master branch
|
||||
if [ ! "${CORE_BRANCH}" = "master" ] || [ ! "${WEB_BRANCH}" = "master" ] || [ ! "${FTL_BRANCH}" = "master" ] ; then
|
||||
if [ ! "${CORE_BRANCH}" = "master" ] || [ ! "${WEB_BRANCH}" = "master" ] || [ ! "${FTL_BRANCH}" = "master" ]; then
|
||||
details=true
|
||||
fi
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load diff
223
gravity.sh
223
gravity.sh
|
@ -63,7 +63,7 @@ gravityOLDfile="${gravityDIR}/gravity_old.db"
|
|||
|
||||
# Generate new SQLite3 file from schema template
|
||||
generate_gravity_database() {
|
||||
if ! pihole-FTL sqlite3 -ni "${gravityDBfile}" < "${gravityDBschema}"; then
|
||||
if ! pihole-FTL sqlite3 -ni "${gravityDBfile}" <"${gravityDBschema}"; then
|
||||
echo -e " ${CROSS} Unable to create ${gravityDBfile}"
|
||||
return 1
|
||||
fi
|
||||
|
@ -78,7 +78,7 @@ gravity_build_tree() {
|
|||
echo -ne " ${INFO} ${str}..."
|
||||
|
||||
# The index is intentionally not UNIQUE as poor quality adlists may contain domains more than once
|
||||
output=$( { pihole-FTL sqlite3 -ni "${gravityTEMPfile}" "CREATE INDEX idx_gravity ON gravity (domain, adlist_id);"; } 2>&1 )
|
||||
output=$({ pihole-FTL sqlite3 -ni "${gravityTEMPfile}" "CREATE INDEX idx_gravity ON gravity (domain, adlist_id);"; } 2>&1)
|
||||
status="$?"
|
||||
|
||||
if [[ "${status}" -ne 0 ]]; then
|
||||
|
@ -117,7 +117,7 @@ gravity_swap_databases() {
|
|||
|
||||
# Update timestamp when the gravity table was last updated successfully
|
||||
update_gravity_timestamp() {
|
||||
output=$( { printf ".timeout 30000\\nINSERT OR REPLACE INTO info (property,value) values ('updated',cast(strftime('%%s', 'now') as int));" | pihole-FTL sqlite3 -ni "${gravityTEMPfile}"; } 2>&1 )
|
||||
output=$({ printf ".timeout 30000\\nINSERT OR REPLACE INTO info (property,value) values ('updated',cast(strftime('%%s', 'now') as int));" | pihole-FTL sqlite3 -ni -ni "${gravityTEMPfile}"; } 2>&1)
|
||||
status="$?"
|
||||
|
||||
if [[ "${status}" -ne 0 ]]; then
|
||||
|
@ -171,19 +171,18 @@ database_table_from_file() {
|
|||
|
||||
# Loop over all domains in ${src} file
|
||||
# Read file line by line
|
||||
grep -v '^ *#' < "${src}" | while IFS= read -r domain
|
||||
do
|
||||
grep -v '^ *#' <"${src}" | while IFS= read -r domain; do
|
||||
# Only add non-empty lines
|
||||
if [[ -n "${domain}" ]]; then
|
||||
if [[ "${table}" == "domain_audit" ]]; then
|
||||
# domain_audit table format (no enable or modified fields)
|
||||
echo "${rowid},\"${domain}\",${timestamp}" >> "${tmpFile}"
|
||||
echo "${rowid},\"${domain}\",${timestamp}" >>"${tmpFile}"
|
||||
elif [[ "${table}" == "adlist" ]]; then
|
||||
# Adlist table format
|
||||
echo "${rowid},\"${domain}\",1,${timestamp},${timestamp},\"Migrated from ${src}\",,0,0,0,0,0" >> "${tmpFile}"
|
||||
echo "${rowid},\"${domain}\",1,${timestamp},${timestamp},\"Migrated from ${src}\",,0,0,0,0,0" >>"${tmpFile}"
|
||||
else
|
||||
# White-, black-, and regexlist table format
|
||||
echo "${rowid},${list_type},\"${domain}\",1,${timestamp},${timestamp},\"Migrated from ${src}\"" >> "${tmpFile}"
|
||||
echo "${rowid},${list_type},\"${domain}\",1,${timestamp},${timestamp},\"Migrated from ${src}\"" >>"${tmpFile}"
|
||||
fi
|
||||
rowid+=1
|
||||
fi
|
||||
|
@ -192,7 +191,7 @@ database_table_from_file() {
|
|||
# Store domains in database table specified by ${table}
|
||||
# Use printf as .mode and .import need to be on separate lines
|
||||
# see https://unix.stackexchange.com/a/445615/83260
|
||||
output=$( { printf ".timeout 30000\\n.mode csv\\n.import \"%s\" %s\\n" "${tmpFile}" "${table}" | pihole-FTL sqlite3 -ni "${gravityDBfile}"; } 2>&1 )
|
||||
output=$({ printf ".timeout 30000\\n.mode csv\\n.import \"%s\" %s\\n" "${tmpFile}" "${table}" | pihole-FTL sqlite3 -ni "${gravityDBfile}"; } 2>&1)
|
||||
status="$?"
|
||||
|
||||
if [[ "${status}" -ne 0 ]]; then
|
||||
|
@ -202,17 +201,17 @@ database_table_from_file() {
|
|||
|
||||
# Move source file to backup directory, create directory if not existing
|
||||
mkdir -p "${backup_path}"
|
||||
mv "${src}" "${backup_file}" 2> /dev/null || \
|
||||
mv "${src}" "${backup_file}" 2>/dev/null ||
|
||||
echo -e " ${CROSS} Unable to backup ${src} to ${backup_path}"
|
||||
|
||||
# Delete tmpFile
|
||||
rm "${tmpFile}" > /dev/null 2>&1 || \
|
||||
rm "${tmpFile}" >/dev/null 2>&1 ||
|
||||
echo -e " ${CROSS} Unable to remove ${tmpFile}"
|
||||
}
|
||||
|
||||
# Check if a column with name ${2} exists in gravity table with name ${1}
|
||||
gravity_column_exists() {
|
||||
output=$( { printf ".timeout 30000\\nSELECT EXISTS(SELECT * FROM pragma_table_info('%s') WHERE name='%s');\\n" "${1}" "${2}" | pihole-FTL sqlite3 -ni "${gravityTEMPfile}"; } 2>&1 )
|
||||
output=$({ printf ".timeout 30000\\nSELECT EXISTS(SELECT * FROM pragma_table_info('%s') WHERE name='%s');\\n" "${1}" "${2}" | pihole-FTL sqlite3 -ni "${gravityTEMPfile}"; } 2>&1)
|
||||
if [[ "${output}" == "1" ]]; then
|
||||
return 0 # Bash 0 is success
|
||||
fi
|
||||
|
@ -224,10 +223,10 @@ gravity_column_exists() {
|
|||
database_adlist_number() {
|
||||
# Only try to set number of domains when this field exists in the gravity database
|
||||
if ! gravity_column_exists "adlist" "number"; then
|
||||
return;
|
||||
return
|
||||
fi
|
||||
|
||||
output=$( { printf ".timeout 30000\\nUPDATE adlist SET number = %i, invalid_domains = %i WHERE id = %i;\\n" "${2}" "${3}" "${1}" | pihole-FTL sqlite3 -ni "${gravityTEMPfile}"; } 2>&1 )
|
||||
output=$({ printf ".timeout 30000\\nUPDATE adlist SET number = %i, invalid_domains = %i WHERE id = %i;\\n" "${2}" "${3}" "${1}" | pihole-FTL sqlite3 -ni "${gravityTEMPfile}"; } 2>&1)
|
||||
status="$?"
|
||||
|
||||
if [[ "${status}" -ne 0 ]]; then
|
||||
|
@ -240,10 +239,10 @@ database_adlist_number() {
|
|||
database_adlist_status() {
|
||||
# Only try to set the status when this field exists in the gravity database
|
||||
if ! gravity_column_exists "adlist" "status"; then
|
||||
return;
|
||||
return
|
||||
fi
|
||||
|
||||
output=$( { printf ".timeout 30000\\nUPDATE adlist SET status = %i WHERE id = %i;\\n" "${2}" "${1}" | pihole-FTL sqlite3 -ni "${gravityTEMPfile}"; } 2>&1 )
|
||||
output=$({ printf ".timeout 30000\\nUPDATE adlist SET status = %i WHERE id = %i;\\n" "${2}" "${1}" | pihole-FTL sqlite3 -ni "${gravityTEMPfile}"; } 2>&1)
|
||||
status="$?"
|
||||
|
||||
if [[ "${status}" -ne 0 ]]; then
|
||||
|
@ -305,7 +304,7 @@ gravity_CheckDNSResolutionAvailable() {
|
|||
fi
|
||||
|
||||
# Determine if $lookupDomain is resolvable
|
||||
if timeout 4 getent hosts "${lookupDomain}" &> /dev/null; then
|
||||
if timeout 4 getent hosts "${lookupDomain}" &>/dev/null; then
|
||||
# Print confirmation of resolvability if it had previously failed
|
||||
if [[ -n "${secs:-}" ]]; then
|
||||
echo -e "${OVER} ${TICK} DNS resolution is now available\\n"
|
||||
|
@ -319,7 +318,7 @@ gravity_CheckDNSResolutionAvailable() {
|
|||
# If the /etc/resolv.conf contains resolvers other than 127.0.0.1 then the local dnsmasq will not be queried and pi.hole is NXDOMAIN.
|
||||
# This means that even though name resolution is working, the getent hosts check fails and the holddown timer keeps ticking and eventually fails
|
||||
# So we check the output of the last command and if it failed, attempt to use dig +short as a fallback
|
||||
if timeout 4 dig +short "${lookupDomain}" &> /dev/null; then
|
||||
if timeout 4 dig +short "${lookupDomain}" &>/dev/null; then
|
||||
if [[ -n "${secs:-}" ]]; then
|
||||
echo -e "${OVER} ${TICK} DNS resolution is now available\\n"
|
||||
fi
|
||||
|
@ -330,7 +329,7 @@ gravity_CheckDNSResolutionAvailable() {
|
|||
fi
|
||||
|
||||
# Determine error output message
|
||||
if pgrep pihole-FTL &> /dev/null; then
|
||||
if pgrep pihole-FTL &>/dev/null; then
|
||||
echo -e " ${CROSS} DNS resolution is currently unavailable"
|
||||
else
|
||||
echo -e " ${CROSS} DNS service is not running"
|
||||
|
@ -340,7 +339,7 @@ gravity_CheckDNSResolutionAvailable() {
|
|||
# Ensure DNS server is given time to be resolvable
|
||||
secs="120"
|
||||
echo -ne " ${INFO} Time until retry: ${secs}"
|
||||
until timeout 1 getent hosts "${lookupDomain}" &> /dev/null; do
|
||||
until timeout 1 getent hosts "${lookupDomain}" &>/dev/null; do
|
||||
[[ "${secs:-}" -eq 0 ]] && break
|
||||
echo -ne "${OVER} ${INFO} Time until retry: ${secs}"
|
||||
: $((secs--))
|
||||
|
@ -361,19 +360,19 @@ gravity_DownloadBlocklists() {
|
|||
|
||||
# Retrieve source URLs from gravity database
|
||||
# We source only enabled adlists, SQLite3 stores boolean values as 0 (false) or 1 (true)
|
||||
mapfile -t sources <<< "$(pihole-FTL sqlite3 -ni "${gravityDBfile}" "SELECT address FROM vw_adlist;" 2> /dev/null)"
|
||||
mapfile -t sourceIDs <<< "$(pihole-FTL sqlite3 -ni "${gravityDBfile}" "SELECT id FROM vw_adlist;" 2> /dev/null)"
|
||||
mapfile -t sourceTypes <<< "$(pihole-FTL sqlite3 -ni "${gravityDBfile}" "SELECT type FROM vw_adlist;" 2> /dev/null)"
|
||||
mapfile -t sources <<<"$(pihole-FTL sqlite3 -ni -ni "${gravityDBfile}" "SELECT address FROM vw_adlist;" 2>/dev/null)"
|
||||
mapfile -t sourceIDs <<<"$(pihole-FTL sqlite3 -ni -ni "${gravityDBfile}" "SELECT id FROM vw_adlist;" 2>/dev/null)"
|
||||
mapfile -t sourceTypes <<<"$(pihole-FTL sqlite3 -ni "${gravityDBfile}" "SELECT type FROM vw_adlist;" 2>/dev/null)"
|
||||
|
||||
# Parse source domains from $sources
|
||||
mapfile -t sourceDomains <<< "$(
|
||||
mapfile -t sourceDomains <<<"$(
|
||||
# Logic: Split by folder/port
|
||||
awk -F '[/:]' '{
|
||||
# Remove URL protocol & optional username:password@
|
||||
gsub(/(.*:\/\/|.*:.*@)/, "", $0)
|
||||
if(length($1)>0){print $1}
|
||||
else {print "local"}
|
||||
}' <<< "$(printf '%s\n' "${sources[@]}")" 2> /dev/null
|
||||
}' <<<"$(printf '%s\n' "${sources[@]}")" 2>/dev/null
|
||||
)"
|
||||
|
||||
local str="Pulling blocklist source list into range"
|
||||
|
@ -391,8 +390,8 @@ gravity_DownloadBlocklists() {
|
|||
# Prepare new gravity database
|
||||
str="Preparing new gravity database"
|
||||
echo -ne " ${INFO} ${str}..."
|
||||
rm "${gravityTEMPfile}" > /dev/null 2>&1
|
||||
output=$( { pihole-FTL sqlite3 -ni "${gravityTEMPfile}" < "${gravityDBschema}"; } 2>&1 )
|
||||
rm "${gravityTEMPfile}" >/dev/null 2>&1
|
||||
output=$({ pihole-FTL sqlite3 -ni "${gravityTEMPfile}" <"${gravityDBschema}"; } 2>&1)
|
||||
status="$?"
|
||||
|
||||
if [[ "${status}" -ne 0 ]]; then
|
||||
|
@ -412,7 +411,7 @@ gravity_DownloadBlocklists() {
|
|||
copyGravity="${copyGravity//"${gravityDBfile_default}"/"${gravityDBfile}"}"
|
||||
fi
|
||||
|
||||
output=$( { pihole-FTL sqlite3 -ni "${gravityTEMPfile}" <<< "${copyGravity}"; } 2>&1 )
|
||||
output=$({ pihole-FTL sqlite3 -ni "${gravityTEMPfile}" <<<"${copyGravity}"; } 2>&1)
|
||||
status="$?"
|
||||
|
||||
if [[ "${status}" -ne 0 ]]; then
|
||||
|
@ -457,7 +456,7 @@ gravity_DownloadBlocklists() {
|
|||
|
||||
# this will remove first @ that is after schema and before domain
|
||||
# \1 is optional schema, \2 is userinfo
|
||||
check_url="$( sed -re 's#([^:/]*://)?([^/]+)@#\1\2#' <<< "$url" )"
|
||||
check_url="$(sed -re 's#([^:/]*://)?([^/]+)@#\1\2#' <<<"$url")"
|
||||
|
||||
if [[ "${check_url}" =~ ${regex} ]]; then
|
||||
echo -e " ${CROSS} Invalid Target"
|
||||
|
@ -477,7 +476,7 @@ compareLists() {
|
|||
if [[ -s "${target}.sha1" ]]; then
|
||||
if ! sha1sum --check --status --strict "${target}.sha1"; then
|
||||
# The list changed upstream, we need to update the checksum
|
||||
sha1sum "${target}" > "${target}.sha1"
|
||||
sha1sum "${target}" >"${target}.sha1"
|
||||
echo " ${INFO} List has been updated"
|
||||
database_adlist_status "${adlistID}" "1"
|
||||
else
|
||||
|
@ -486,7 +485,7 @@ compareLists() {
|
|||
fi
|
||||
else
|
||||
# No checksum available, create one for comparing on the next run
|
||||
sha1sum "${target}" > "${target}.sha1"
|
||||
sha1sum "${target}" >"${target}.sha1"
|
||||
# We assume here it was changed upstream
|
||||
database_adlist_status "${adlistID}" "1"
|
||||
fi
|
||||
|
@ -515,25 +514,29 @@ gravity_DownloadBlocklistFromUrl() {
|
|||
echo -ne " ${INFO} ${str} Pending..."
|
||||
blocked=false
|
||||
case $(getFTLConfigValue dns.blocking.mode) in
|
||||
"IP-NODATA-AAAA"|"IP")
|
||||
# Get IP address of this domain
|
||||
ip="$(dig "${domain}" +short)"
|
||||
# Check if this IP matches any IP of the system
|
||||
if [[ -n "${ip}" && $(grep -Ec "inet(|6) ${ip}" <<< "$(ip a)") -gt 0 ]]; then
|
||||
blocked=true
|
||||
fi;;
|
||||
"NXDOMAIN")
|
||||
if [[ $(dig "${domain}" | grep "NXDOMAIN" -c) -ge 1 ]]; then
|
||||
blocked=true
|
||||
fi;;
|
||||
"NODATA")
|
||||
if [[ $(dig "${domain}" | grep "NOERROR" -c) -ge 1 ]] && [[ -z $(dig +short "${domain}") ]]; then
|
||||
blocked=true
|
||||
fi;;
|
||||
"NULL"|*)
|
||||
if [[ $(dig "${domain}" +short | grep "0.0.0.0" -c) -ge 1 ]]; then
|
||||
blocked=true
|
||||
fi;;
|
||||
"IP-NODATA-AAAA" | "IP")
|
||||
# Get IP address of this domain
|
||||
ip="$(dig "${domain}" +short)"
|
||||
# Check if this IP matches any IP of the system
|
||||
if [[ -n "${ip}" && $(grep -Ec "inet(|6) ${ip}" <<<"$(ip a)") -gt 0 ]]; then
|
||||
blocked=true
|
||||
fi
|
||||
;;
|
||||
"NXDOMAIN")
|
||||
if [[ $(dig "${domain}" | grep "NXDOMAIN" -c) -ge 1 ]]; then
|
||||
blocked=true
|
||||
fi
|
||||
;;
|
||||
"NODATA")
|
||||
if [[ $(dig "${domain}" | grep "NOERROR" -c) -ge 1 ]] && [[ -z $(dig +short "${domain}") ]]; then
|
||||
blocked=true
|
||||
fi
|
||||
;;
|
||||
"NULL" | *)
|
||||
if [[ $(dig "${domain}" +short | grep "0.0.0.0" -c) -ge 1 ]]; then
|
||||
blocked=true
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
if [[ "${blocked}" == true ]]; then
|
||||
|
@ -545,43 +548,53 @@ gravity_DownloadBlocklistFromUrl() {
|
|||
fi
|
||||
ip=$(dig "@${ip_addr}" -p "${port}" +short "${domain}" | tail -1)
|
||||
if [[ $(echo "${url}" | awk -F '://' '{print $1}') = "https" ]]; then
|
||||
port=443;
|
||||
else port=80
|
||||
port=443
|
||||
else
|
||||
port=80
|
||||
fi
|
||||
bad_list=$(pihole -q -adlist "${domain}" | head -n1 | awk -F 'Match found in ' '{print $2}')
|
||||
echo -e "${OVER} ${CROSS} ${str} ${domain} is blocked by ${bad_list%:}. Using DNS on ${PIHOLE_DNS_1} to download ${url}";
|
||||
echo -e "${OVER} ${CROSS} ${str} ${domain} is blocked by ${bad_list%:}. Using DNS on ${PIHOLE_DNS_1} to download ${url}"
|
||||
echo -ne " ${INFO} ${str} Pending..."
|
||||
cmd_ext="--resolve $domain:$port:$ip"
|
||||
fi
|
||||
|
||||
# shellcheck disable=SC2086
|
||||
httpCode=$(curl --connect-timeout ${curl_connect_timeout} -s -L ${compression} ${cmd_ext} ${heisenbergCompensator} -w "%{http_code}" "${url}" -o "${listCurlBuffer}" 2> /dev/null)
|
||||
httpCode=$(curl --connect-timeout ${curl_connect_timeout} -s -L ${compression} ${cmd_ext} ${heisenbergCompensator} -w "%{http_code}" "${url}" -o "${listCurlBuffer}" 2>/dev/null)
|
||||
|
||||
case $url in
|
||||
# Did we "download" a local file?
|
||||
"file"*)
|
||||
if [[ -s "${listCurlBuffer}" ]]; then
|
||||
echo -e "${OVER} ${TICK} ${str} Retrieval successful"; success=true
|
||||
else
|
||||
echo -e "${OVER} ${CROSS} ${str} Not found / empty list"
|
||||
fi;;
|
||||
# Did we "download" a remote file?
|
||||
*)
|
||||
# Determine "Status:" output based on HTTP response
|
||||
case "${httpCode}" in
|
||||
"200") echo -e "${OVER} ${TICK} ${str} Retrieval successful"; success=true;;
|
||||
"304") echo -e "${OVER} ${TICK} ${str} No changes detected"; success=true;;
|
||||
"000") echo -e "${OVER} ${CROSS} ${str} Connection Refused";;
|
||||
"403") echo -e "${OVER} ${CROSS} ${str} Forbidden";;
|
||||
"404") echo -e "${OVER} ${CROSS} ${str} Not found";;
|
||||
"408") echo -e "${OVER} ${CROSS} ${str} Time-out";;
|
||||
"451") echo -e "${OVER} ${CROSS} ${str} Unavailable For Legal Reasons";;
|
||||
"500") echo -e "${OVER} ${CROSS} ${str} Internal Server Error";;
|
||||
"504") echo -e "${OVER} ${CROSS} ${str} Connection Timed Out (Gateway)";;
|
||||
"521") echo -e "${OVER} ${CROSS} ${str} Web Server Is Down (Cloudflare)";;
|
||||
"522") echo -e "${OVER} ${CROSS} ${str} Connection Timed Out (Cloudflare)";;
|
||||
* ) echo -e "${OVER} ${CROSS} ${str} ${url} (${httpCode})";;
|
||||
esac;;
|
||||
# Did we "download" a local file?
|
||||
"file"*)
|
||||
if [[ -s "${listCurlBuffer}" ]]; then
|
||||
echo -e "${OVER} ${TICK} ${str} Retrieval successful"
|
||||
success=true
|
||||
else
|
||||
echo -e "${OVER} ${CROSS} ${str} Not found / empty list"
|
||||
fi
|
||||
;;
|
||||
# Did we "download" a remote file?
|
||||
*)
|
||||
# Determine "Status:" output based on HTTP response
|
||||
case "${httpCode}" in
|
||||
"200")
|
||||
echo -e "${OVER} ${TICK} ${str} Retrieval successful"
|
||||
success=true
|
||||
;;
|
||||
"304")
|
||||
echo -e "${OVER} ${TICK} ${str} No changes detected"
|
||||
success=true
|
||||
;;
|
||||
"000") echo -e "${OVER} ${CROSS} ${str} Connection Refused" ;;
|
||||
"403") echo -e "${OVER} ${CROSS} ${str} Forbidden" ;;
|
||||
"404") echo -e "${OVER} ${CROSS} ${str} Not found" ;;
|
||||
"408") echo -e "${OVER} ${CROSS} ${str} Time-out" ;;
|
||||
"451") echo -e "${OVER} ${CROSS} ${str} Unavailable For Legal Reasons" ;;
|
||||
"500") echo -e "${OVER} ${CROSS} ${str} Internal Server Error" ;;
|
||||
"504") echo -e "${OVER} ${CROSS} ${str} Connection Timed Out (Gateway)" ;;
|
||||
"521") echo -e "${OVER} ${CROSS} ${str} Web Server Is Down (Cloudflare)" ;;
|
||||
"522") echo -e "${OVER} ${CROSS} ${str} Connection Timed Out (Cloudflare)" ;;
|
||||
*) echo -e "${OVER} ${CROSS} ${str} ${url} (${httpCode})" ;;
|
||||
esac
|
||||
;;
|
||||
esac
|
||||
|
||||
local done="false"
|
||||
|
@ -635,7 +648,7 @@ gravity_ParseFileIntoDomains() {
|
|||
# This helps with that and makes it easier to read
|
||||
# It also helps with debugging so each stage of the script can be researched more in depth
|
||||
# 1) Convert all characters to lowercase
|
||||
tr '[:upper:]' '[:lower:]' < "${src}" > "${destination}"
|
||||
tr '[:upper:]' '[:lower:]' <"${src}" >"${destination}"
|
||||
|
||||
# 2) Remove carriage returns
|
||||
# 3) Remove lines starting with ! (ABP Comments)
|
||||
|
@ -645,7 +658,7 @@ gravity_ParseFileIntoDomains() {
|
|||
# 7) Remove leading tabs, spaces, etc. (Also removes leading IP addresses)
|
||||
# 8) Remove empty lines
|
||||
|
||||
sed -i -r \
|
||||
sed -i -r \
|
||||
-e 's/\r$//' \
|
||||
-e 's/\s*!.*//g' \
|
||||
-e 's/\s*\[.*//g' \
|
||||
|
@ -662,12 +675,12 @@ gravity_Table_Count() {
|
|||
local table="${1}"
|
||||
local str="${2}"
|
||||
local num
|
||||
num="$(pihole-FTL sqlite3 -ni "${gravityTEMPfile}" "SELECT COUNT(*) FROM ${table};")"
|
||||
num="$(pihole-FTL sqlite3 -ni -ni "${gravityTEMPfile}" "SELECT COUNT(*) FROM ${table};")"
|
||||
if [[ "${table}" == "gravity" ]]; then
|
||||
local unique
|
||||
unique="$(pihole-FTL sqlite3 -ni "${gravityTEMPfile}" "SELECT COUNT(*) FROM (SELECT DISTINCT domain FROM ${table});")"
|
||||
unique="$(pihole-FTL sqlite3 -ni -ni "${gravityTEMPfile}" "SELECT COUNT(*) FROM (SELECT DISTINCT domain FROM ${table});")"
|
||||
echo -e " ${INFO} Number of ${str}: ${num} (${COL_BOLD}${unique} unique domains${COL_NC})"
|
||||
pihole-FTL sqlite3 -ni "${gravityTEMPfile}" "INSERT OR REPLACE INTO info (property,value) VALUES ('gravity_count',${unique});"
|
||||
pihole-FTL sqlite3 -ni -ni "${gravityTEMPfile}" "INSERT OR REPLACE INTO info (property,value) VALUES ('gravity_count',${unique});"
|
||||
else
|
||||
echo -e " ${INFO} Number of ${str}: ${num}"
|
||||
fi
|
||||
|
@ -687,12 +700,12 @@ gravity_ShowCount() {
|
|||
# Create "localhost" entries into hosts format
|
||||
gravity_generateLocalList() {
|
||||
# Empty $localList if it already exists, otherwise, create it
|
||||
echo "### Do not modify this file, it will be overwritten by pihole -g" > "${localList}"
|
||||
echo "### Do not modify this file, it will be overwritten by pihole -g" >"${localList}"
|
||||
chmod 644 "${localList}"
|
||||
|
||||
# Add additional LAN hosts provided by OpenVPN (if available)
|
||||
if [[ -f "${VPNList}" ]]; then
|
||||
awk -F, '{printf $2"\t"$1".vpn\n"}' "${VPNList}" >> "${localList}"
|
||||
awk -F, '{printf $2"\t"$1".vpn\n"}' "${VPNList}" >>"${localList}"
|
||||
fi
|
||||
}
|
||||
|
||||
|
@ -709,12 +722,12 @@ gravity_Cleanup() {
|
|||
echo -ne " ${INFO} ${str}..."
|
||||
|
||||
# Delete tmp content generated by Gravity
|
||||
rm ${piholeDir}/pihole.*.txt 2> /dev/null
|
||||
rm ${piholeDir}/*.tmp 2> /dev/null
|
||||
rm ${piholeDir}/pihole.*.txt 2>/dev/null
|
||||
rm ${piholeDir}/*.tmp 2>/dev/null
|
||||
# listCurlBuffer location
|
||||
rm "${GRAVITY_TMPDIR}"/*.phgpb 2> /dev/null
|
||||
rm "${GRAVITY_TMPDIR}"/*.phgpb 2>/dev/null
|
||||
# invalid_domains location
|
||||
rm "${GRAVITY_TMPDIR}"/*.ph-non-domains 2> /dev/null
|
||||
rm "${GRAVITY_TMPDIR}"/*.ph-non-domains 2>/dev/null
|
||||
|
||||
# Ensure this function only runs when gravity_SetDownloadOptions() has completed
|
||||
if [[ "${gravity_Blackbody:-}" == true ]]; then
|
||||
|
@ -722,7 +735,7 @@ gravity_Cleanup() {
|
|||
for file in "${piholeDir}"/*."${domainsExtension}"; do
|
||||
# If list is not in active array, then remove it
|
||||
if [[ ! "${activeDomains[*]}" == *"${file}"* ]]; then
|
||||
rm -f "${file}" 2> /dev/null || \
|
||||
rm -f "${file}" 2>/dev/null ||
|
||||
echo -e " ${CROSS} Failed to remove ${file##*/}"
|
||||
fi
|
||||
done
|
||||
|
@ -764,17 +777,17 @@ database_recovery() {
|
|||
fi
|
||||
else
|
||||
echo -e "${OVER} ${CROSS} ${str} - errors found:"
|
||||
while IFS= read -r line ; do echo " - $line"; done <<< "$result"
|
||||
while IFS= read -r line; do echo " - $line"; done <<<"$result"
|
||||
fi
|
||||
else
|
||||
echo -e "${OVER} ${CROSS} ${str} - errors found:"
|
||||
while IFS= read -r line ; do echo " - $line"; done <<< "$result"
|
||||
while IFS= read -r line; do echo " - $line"; done <<<"$result"
|
||||
fi
|
||||
|
||||
str="Trying to recover existing gravity database"
|
||||
echo -ne " ${INFO} ${str}..."
|
||||
# We have to remove any possibly existing recovery database or this will fail
|
||||
rm -f "${gravityDBfile}.recovered" > /dev/null 2>&1
|
||||
rm -f "${gravityDBfile}.recovered" >/dev/null 2>&1
|
||||
if result="$(pihole-FTL sqlite3 -ni "${gravityDBfile}" ".recover" | pihole-FTL sqlite3 -ni "${gravityDBfile}.recovered" 2>&1)"; then
|
||||
echo -e "${OVER} ${TICK} ${str} - success"
|
||||
mv "${gravityDBfile}" "${gravityDBfile}.old"
|
||||
|
@ -783,7 +796,7 @@ database_recovery() {
|
|||
echo -ne " ${INFO} The old ${gravityDBfile} has been moved to ${gravityDBfile}.old"
|
||||
else
|
||||
echo -e "${OVER} ${CROSS} ${str} - the following errors happened:"
|
||||
while IFS= read -r line ; do echo " - $line"; done <<< "$result"
|
||||
while IFS= read -r line; do echo " - $line"; done <<<"$result"
|
||||
echo -e " ${CROSS} Recovery failed. Try \"pihole -r recreate\" instead."
|
||||
exit 1
|
||||
fi
|
||||
|
@ -802,9 +815,10 @@ Options:
|
|||
|
||||
repairSelector() {
|
||||
case "$1" in
|
||||
"recover") recover_database=true;;
|
||||
"recreate") recreate_database=true;;
|
||||
*) echo "Usage: pihole -g -r {recover,recreate}
|
||||
"recover") recover_database=true ;;
|
||||
"recreate") recreate_database=true ;;
|
||||
*)
|
||||
echo "Usage: pihole -g -r {recover,recreate}
|
||||
Attempt to repair gravity database
|
||||
|
||||
Available options:
|
||||
|
@ -823,15 +837,16 @@ Available options:
|
|||
and create a new file from scratch. If you still
|
||||
have the migration backup created when migrating
|
||||
to Pi-hole v5.0, Pi-hole will import these files."
|
||||
exit 0;;
|
||||
exit 0
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
for var in "$@"; do
|
||||
case "${var}" in
|
||||
"-f" | "--force" ) forceDelete=true;;
|
||||
"-r" | "--repair" ) repairSelector "$3";;
|
||||
"-h" | "--help" ) helpFunc;;
|
||||
"-f" | "--force") forceDelete=true ;;
|
||||
"-r" | "--repair") repairSelector "$3" ;;
|
||||
"-h" | "--help") helpFunc ;;
|
||||
esac
|
||||
done
|
||||
|
||||
|
@ -847,9 +862,9 @@ if [[ "${recreate_database:-}" == true ]]; then
|
|||
str="Recreating gravity database from migration backup"
|
||||
echo -ne "${INFO} ${str}..."
|
||||
rm "${gravityDBfile}"
|
||||
pushd "${piholeDir}" > /dev/null || exit
|
||||
pushd "${piholeDir}" >/dev/null || exit
|
||||
cp migration_backup/* .
|
||||
popd > /dev/null || exit
|
||||
popd >/dev/null || exit
|
||||
echo -e "${OVER} ${TICK} ${str}"
|
||||
fi
|
||||
|
||||
|
@ -867,7 +882,7 @@ if [[ "${forceDelete:-}" == true ]]; then
|
|||
str="Deleting existing list cache"
|
||||
echo -ne "${INFO} ${str}..."
|
||||
|
||||
rm /etc/pihole/list.* 2> /dev/null || true
|
||||
rm /etc/pihole/list.* 2>/dev/null || true
|
||||
echo -e "${OVER} ${TICK} ${str}"
|
||||
fi
|
||||
|
||||
|
|
Loading…
Reference in a new issue