mirror of
https://github.com/pi-hole/pi-hole.git
synced 2024-11-28 17:13:17 +00:00
Merge branch 'development-v6' into new/migrate_dnsmasq_conf
This commit is contained in:
commit
7bf97cf02a
15 changed files with 219 additions and 92 deletions
2
.github/workflows/codeql-analysis.yml
vendored
2
.github/workflows/codeql-analysis.yml
vendored
|
@ -25,7 +25,7 @@ jobs:
|
||||||
steps:
|
steps:
|
||||||
-
|
-
|
||||||
name: Checkout repository
|
name: Checkout repository
|
||||||
uses: actions/checkout@v4.1.1
|
uses: actions/checkout@v4.1.5
|
||||||
# Initializes the CodeQL tools for scanning.
|
# Initializes the CodeQL tools for scanning.
|
||||||
-
|
-
|
||||||
name: Initialize CodeQL
|
name: Initialize CodeQL
|
||||||
|
|
2
.github/workflows/merge-conflict.yml
vendored
2
.github/workflows/merge-conflict.yml
vendored
|
@ -13,7 +13,7 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Check if PRs are have merge conflicts
|
- name: Check if PRs are have merge conflicts
|
||||||
uses: eps1lon/actions-label-merge-conflict@v2.1.0
|
uses: eps1lon/actions-label-merge-conflict@v3.0.1
|
||||||
with:
|
with:
|
||||||
dirtyLabel: "PR: Merge Conflict"
|
dirtyLabel: "PR: Merge Conflict"
|
||||||
repoToken: "${{ secrets.GITHUB_TOKEN }}"
|
repoToken: "${{ secrets.GITHUB_TOKEN }}"
|
||||||
|
|
2
.github/workflows/stale.yml
vendored
2
.github/workflows/stale.yml
vendored
|
@ -40,7 +40,7 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4.1.1
|
uses: actions/checkout@v4.1.5
|
||||||
- name: Remove 'stale' label
|
- name: Remove 'stale' label
|
||||||
run: gh issue edit ${{ github.event.issue.number }} --remove-label ${{ env.stale_label }}
|
run: gh issue edit ${{ github.event.issue.number }} --remove-label ${{ env.stale_label }}
|
||||||
env:
|
env:
|
||||||
|
|
2
.github/workflows/sync-back-to-dev.yml
vendored
2
.github/workflows/sync-back-to-dev.yml
vendored
|
@ -33,7 +33,7 @@ jobs:
|
||||||
name: Syncing branches
|
name: Syncing branches
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4.1.1
|
uses: actions/checkout@v4.1.5
|
||||||
- name: Opening pull request
|
- name: Opening pull request
|
||||||
run: gh pr create -B development -H master --title 'Sync master back into development' --body 'Created by Github action' --label 'internal'
|
run: gh pr create -B development -H master --title 'Sync master back into development' --body 'Created by Github action' --label 'internal'
|
||||||
env:
|
env:
|
||||||
|
|
9
.github/workflows/test.yml
vendored
9
.github/workflows/test.yml
vendored
|
@ -13,7 +13,7 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4.1.1
|
uses: actions/checkout@v4.1.5
|
||||||
|
|
||||||
- name: Check scripts in repository are executable
|
- name: Check scripts in repository are executable
|
||||||
run: |
|
run: |
|
||||||
|
@ -62,19 +62,20 @@ jobs:
|
||||||
ubuntu_20,
|
ubuntu_20,
|
||||||
ubuntu_22,
|
ubuntu_22,
|
||||||
ubuntu_23,
|
ubuntu_23,
|
||||||
|
ubuntu_24,
|
||||||
centos_8,
|
centos_8,
|
||||||
centos_9,
|
centos_9,
|
||||||
fedora_38,
|
|
||||||
fedora_39,
|
fedora_39,
|
||||||
|
fedora_40,
|
||||||
]
|
]
|
||||||
env:
|
env:
|
||||||
DISTRO: ${{matrix.distro}}
|
DISTRO: ${{matrix.distro}}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4.1.1
|
uses: actions/checkout@v4.1.5
|
||||||
|
|
||||||
- name: Set up Python 3.10
|
- name: Set up Python 3.10
|
||||||
uses: actions/setup-python@v5.0.0
|
uses: actions/setup-python@v5.1.0
|
||||||
with:
|
with:
|
||||||
python-version: "3.10"
|
python-version: "3.10"
|
||||||
|
|
||||||
|
|
|
@ -865,8 +865,6 @@ make_array_from_file() {
|
||||||
local limit=${2}
|
local limit=${2}
|
||||||
# A local iterator for testing if we are at the limit above
|
# A local iterator for testing if we are at the limit above
|
||||||
local i=0
|
local i=0
|
||||||
# Set the array to be empty so we can start fresh when the function is used
|
|
||||||
local file_content=()
|
|
||||||
# If the file is a directory
|
# If the file is a directory
|
||||||
if [[ -d "${filename}" ]]; then
|
if [[ -d "${filename}" ]]; then
|
||||||
# do nothing since it cannot be parsed
|
# do nothing since it cannot be parsed
|
||||||
|
@ -878,11 +876,14 @@ make_array_from_file() {
|
||||||
new_line=$(echo "${line}" | sed -e 's/^\s*#.*$//' -e '/^$/d')
|
new_line=$(echo "${line}" | sed -e 's/^\s*#.*$//' -e '/^$/d')
|
||||||
# If the line still has content (a non-zero value)
|
# If the line still has content (a non-zero value)
|
||||||
if [[ -n "${new_line}" ]]; then
|
if [[ -n "${new_line}" ]]; then
|
||||||
# Put it into the array
|
|
||||||
file_content+=("${new_line}")
|
# If the string contains "### CHANGED", highlight this part in red
|
||||||
else
|
if [[ "${new_line}" == *"### CHANGED"* ]]; then
|
||||||
# Otherwise, it's a blank line or comment, so do nothing
|
new_line="${new_line//### CHANGED/${COL_RED}### CHANGED${COL_NC}}"
|
||||||
:
|
fi
|
||||||
|
|
||||||
|
# Finally, write this line to the log
|
||||||
|
log_write " ${new_line}"
|
||||||
fi
|
fi
|
||||||
# Increment the iterator +1
|
# Increment the iterator +1
|
||||||
i=$((i+1))
|
i=$((i+1))
|
||||||
|
@ -894,12 +895,6 @@ make_array_from_file() {
|
||||||
break
|
break
|
||||||
fi
|
fi
|
||||||
done < "${filename}"
|
done < "${filename}"
|
||||||
# Now the we have made an array of the file's content
|
|
||||||
for each_line in "${file_content[@]}"; do
|
|
||||||
# Print each line
|
|
||||||
# At some point, we may want to check the file line-by-line, so that's the reason for an array
|
|
||||||
log_write " ${each_line}"
|
|
||||||
done
|
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -41,14 +41,14 @@ Options:
|
||||||
|
|
||||||
GenerateOutput() {
|
GenerateOutput() {
|
||||||
local data gravity_data lists_data num_gravity num_lists search_type_str
|
local data gravity_data lists_data num_gravity num_lists search_type_str
|
||||||
local gravity_data_csv lists_data_csv line current_domain
|
local gravity_data_csv lists_data_csv line current_domain url type color
|
||||||
data="${1}"
|
data="${1}"
|
||||||
|
|
||||||
# construct a new json for the list results where each object contains the domain and the related type
|
# construct a new json for the list results where each object contains the domain and the related type
|
||||||
lists_data=$(printf %s "${data}" | jq '.search.domains | [.[] | {domain: .domain, type: .type}]')
|
lists_data=$(printf %s "${data}" | jq '.search.domains | [.[] | {domain: .domain, type: .type}]')
|
||||||
|
|
||||||
# construct a new json for the gravity results where each object contains the adlist URL and the related domains
|
# construct a new json for the gravity results where each object contains the adlist URL and the related domains
|
||||||
gravity_data=$(printf %s "${data}" | jq '.search.gravity | group_by(.address) | map({ address: (.[0].address), domains: [.[] | .domain] })')
|
gravity_data=$(printf %s "${data}" | jq '.search.gravity | group_by(.address,.type) | map({ address: (.[0].address), type: (.[0].type), domains: [.[] | .domain] })')
|
||||||
|
|
||||||
# number of objects in each json
|
# number of objects in each json
|
||||||
num_gravity=$(printf %s "${gravity_data}" | jq length)
|
num_gravity=$(printf %s "${gravity_data}" | jq length)
|
||||||
|
@ -78,15 +78,27 @@ GenerateOutput() {
|
||||||
if [ "${num_gravity}" -gt 0 ]; then
|
if [ "${num_gravity}" -gt 0 ]; then
|
||||||
# Convert the data to a csv, each line is a "URL,domain,domain,...." string
|
# Convert the data to a csv, each line is a "URL,domain,domain,...." string
|
||||||
# not using jq's @csv here as it quotes each value individually
|
# not using jq's @csv here as it quotes each value individually
|
||||||
gravity_data_csv=$(printf %s "${gravity_data}" | jq --raw-output '.[] | [.address, .domains[]] | join(",")')
|
gravity_data_csv=$(printf %s "${gravity_data}" | jq --raw-output '.[] | [.address, .type, .domains[]] | join(",")')
|
||||||
|
|
||||||
# Generate line-by-line output for each csv line
|
# Generate line-by-line output for each csv line
|
||||||
echo "${gravity_data_csv}" | while read -r line; do
|
echo "${gravity_data_csv}" | while read -r line; do
|
||||||
|
# Get first part of the line, the URL
|
||||||
|
url=${line%%,*}
|
||||||
|
|
||||||
|
# cut off URL, leaving "type,domain,domain,...."
|
||||||
|
line=${line#*,}
|
||||||
|
type=${line%%,*}
|
||||||
|
# type == "block" -> red, type == "allow" -> green
|
||||||
|
if [ "${type}" = "block" ]; then
|
||||||
|
color="${COL_RED}"
|
||||||
|
else
|
||||||
|
color="${COL_GREEN}"
|
||||||
|
fi
|
||||||
|
|
||||||
# print adlist URL
|
# print adlist URL
|
||||||
printf "%s\n\n" " - ${COL_BLUE}${line%%,*}${COL_NC}"
|
printf "%s (%s)\n\n" " - ${COL_BLUE}${url}${COL_NC}" "${color}${type}${COL_NC}"
|
||||||
|
|
||||||
# cut off URL, leaving "domain,domain,...."
|
# cut off type, leaving "domain,domain,...."
|
||||||
line=${line#*,}
|
line=${line#*,}
|
||||||
# print each domain and remove it from the string until nothing is left
|
# print each domain and remove it from the string until nothing is left
|
||||||
while [ ${#line} -gt 0 ]; do
|
while [ ${#line} -gt 0 ]; do
|
||||||
|
|
|
@ -27,7 +27,7 @@ CREATE TABLE domainlist
|
||||||
CREATE TABLE adlist
|
CREATE TABLE adlist
|
||||||
(
|
(
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
address TEXT UNIQUE NOT NULL,
|
address TEXT NOT NULL,
|
||||||
enabled BOOLEAN NOT NULL DEFAULT 1,
|
enabled BOOLEAN NOT NULL DEFAULT 1,
|
||||||
date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)),
|
date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)),
|
||||||
date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)),
|
date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)),
|
||||||
|
@ -37,7 +37,8 @@ CREATE TABLE adlist
|
||||||
invalid_domains INTEGER NOT NULL DEFAULT 0,
|
invalid_domains INTEGER NOT NULL DEFAULT 0,
|
||||||
status INTEGER NOT NULL DEFAULT 0,
|
status INTEGER NOT NULL DEFAULT 0,
|
||||||
abp_entries INTEGER NOT NULL DEFAULT 0,
|
abp_entries INTEGER NOT NULL DEFAULT 0,
|
||||||
type INTEGER NOT NULL DEFAULT 0
|
type INTEGER NOT NULL DEFAULT 0,
|
||||||
|
UNIQUE(address, type)
|
||||||
);
|
);
|
||||||
|
|
||||||
CREATE TABLE adlist_by_group
|
CREATE TABLE adlist_by_group
|
||||||
|
|
|
@ -826,12 +826,12 @@ If you want to specify a port other than 53, separate it with a hash.\
|
||||||
printf -v PIHOLE_DNS_1 "%s" "${piholeDNS%%,*}"
|
printf -v PIHOLE_DNS_1 "%s" "${piholeDNS%%,*}"
|
||||||
printf -v PIHOLE_DNS_2 "%s" "${piholeDNS##*,}"
|
printf -v PIHOLE_DNS_2 "%s" "${piholeDNS##*,}"
|
||||||
|
|
||||||
# If the first DNS value is invalid or empty, this if statement will be true and we will set PIHOLE_DNS_1="Invalid"
|
# If the first DNS value is invalid (neither IPv4 nor IPv6) or empty, set PIHOLE_DNS_1="Invalid"
|
||||||
if ! valid_ip "${PIHOLE_DNS_1}" || [[ ! "${PIHOLE_DNS_1}" ]]; then
|
if ! valid_ip "${PIHOLE_DNS_1}" && ! valid_ip6 "${PIHOLE_DNS_1}" || [[ -z "${PIHOLE_DNS_1}" ]]; then
|
||||||
PIHOLE_DNS_1=${strInvalid}
|
PIHOLE_DNS_1=${strInvalid}
|
||||||
fi
|
fi
|
||||||
# If the second DNS value is invalid or empty, this if statement will be true and we will set PIHOLE_DNS_2="Invalid"
|
# If the second DNS value is invalid but not empty, set PIHOLE_DNS_2="Invalid"
|
||||||
if ! valid_ip "${PIHOLE_DNS_2}" && [[ "${PIHOLE_DNS_2}" ]]; then
|
if ! valid_ip "${PIHOLE_DNS_2}" && ! valid_ip6 "${PIHOLE_DNS_2}" && [[ -n "${PIHOLE_DNS_2}" ]]; then
|
||||||
PIHOLE_DNS_2=${strInvalid}
|
PIHOLE_DNS_2=${strInvalid}
|
||||||
fi
|
fi
|
||||||
# If either of the DNS servers are invalid,
|
# If either of the DNS servers are invalid,
|
||||||
|
@ -2005,9 +2005,11 @@ FTLcheckUpdate() {
|
||||||
local localSha1
|
local localSha1
|
||||||
|
|
||||||
if [[ ! "${ftlBranch}" == "master" ]]; then
|
if [[ ! "${ftlBranch}" == "master" ]]; then
|
||||||
# Check whether or not the binary for this FTL branch actually exists. If not, then there is no update!
|
# This is not the master branch
|
||||||
local path
|
local path
|
||||||
path="${ftlBranch}/${binary}"
|
path="${ftlBranch}/${binary}"
|
||||||
|
|
||||||
|
# Check whether or not the binary for this FTL branch actually exists. If not, then there is no update!
|
||||||
# shellcheck disable=SC1090
|
# shellcheck disable=SC1090
|
||||||
check_download_exists "$path"
|
check_download_exists "$path"
|
||||||
local ret=$?
|
local ret=$?
|
||||||
|
@ -2026,23 +2028,22 @@ FTLcheckUpdate() {
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ ${ftlLoc} ]]; then
|
if [[ ${ftlLoc} ]]; then
|
||||||
# We already have a pihole-FTL binary downloaded.
|
# We already have a pihole-FTL binary installed, check if it's the
|
||||||
# Alt branches don't have a tagged version against them, so just confirm the checksum of the local vs remote to decide whether we download or not
|
# same as the remote one
|
||||||
remoteSha1=$(curl -sSL --fail "https://ftl.pi-hole.net/${ftlBranch}/${binary}.sha1" | cut -d ' ' -f 1)
|
# Alt branches don't have a tagged version against them, so just
|
||||||
localSha1=$(sha1sum "$(command -v pihole-FTL)" | cut -d ' ' -f 1)
|
# confirm the checksum of the local vs remote to decide whether we
|
||||||
|
# download or not
|
||||||
if [[ "${remoteSha1}" != "${localSha1}" ]]; then
|
printf " %b FTL binary already installed, verifying integrity...\\n" "${INFO}"
|
||||||
printf " %b Checksums do not match, downloading from ftl.pi-hole.net.\\n" "${INFO}"
|
checkSumFile="https://ftl.pi-hole.net/${ftlBranch}/${binary}.sha1"
|
||||||
return 0
|
# Continue further down...
|
||||||
else
|
|
||||||
printf " %b Checksum of installed binary matches remote. No need to download!\\n" "${INFO}"
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
else
|
else
|
||||||
return 0
|
return 0
|
||||||
fi
|
fi
|
||||||
else
|
else
|
||||||
|
# This is the master branch
|
||||||
if [[ ${ftlLoc} ]]; then
|
if [[ ${ftlLoc} ]]; then
|
||||||
|
# We already have a pihole-FTL binary installed, check if it's the
|
||||||
|
# same as the remote one
|
||||||
local FTLversion
|
local FTLversion
|
||||||
FTLversion=$(/usr/bin/pihole-FTL tag)
|
FTLversion=$(/usr/bin/pihole-FTL tag)
|
||||||
local FTLlatesttag
|
local FTLlatesttag
|
||||||
|
@ -2056,25 +2057,39 @@ FTLcheckUpdate() {
|
||||||
|
|
||||||
# Check if the installed version matches the latest version
|
# Check if the installed version matches the latest version
|
||||||
if [[ "${FTLversion}" != "${FTLlatesttag}" ]]; then
|
if [[ "${FTLversion}" != "${FTLlatesttag}" ]]; then
|
||||||
|
# If the installed version does not match the latest version,
|
||||||
|
# then download
|
||||||
return 0
|
return 0
|
||||||
else
|
else
|
||||||
printf " %b Latest FTL Binary already installed (%s). Confirming Checksum...\\n" "${INFO}" "${FTLlatesttag}"
|
# If the installed version matches the latest version, then
|
||||||
|
# check the installed sha1sum of the binary vs the remote
|
||||||
remoteSha1=$(curl -sSL --fail "https://github.com/pi-hole/FTL/releases/download/${FTLversion%$'\r'}/${binary}.sha1" | cut -d ' ' -f 1)
|
# sha1sum. If they do not match, then download
|
||||||
localSha1=$(sha1sum "$(command -v pihole-FTL)" | cut -d ' ' -f 1)
|
printf " %b Latest FTL binary already installed (%s), verifying integrity...\\n" "${INFO}" "${FTLlatesttag}"
|
||||||
|
checkSumFile="https://github.com/pi-hole/FTL/releases/download/${FTLversion%$'\r'}/${binary}.sha1"
|
||||||
if [[ "${remoteSha1}" != "${localSha1}" ]]; then
|
# Continue further down...
|
||||||
printf " %b Corruption detected...\\n" "${INFO}"
|
fi
|
||||||
return 0
|
|
||||||
else
|
else
|
||||||
printf " %b Checksum correct. No need to download!\\n" "${INFO}"
|
return 0
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# If we reach this point, we need to check the checksum of the local vs
|
||||||
|
# remote to decide whether we download or not
|
||||||
|
remoteSha1=$(curl -sSL --fail "${checkSumFile}" | cut -d ' ' -f 1)
|
||||||
|
localSha1=$(sha1sum "${ftlLoc}" | cut -d ' ' -f 1)
|
||||||
|
|
||||||
|
# Check we downloaded a valid checksum (no 404 or other error like
|
||||||
|
# no DNS resolution)
|
||||||
|
if [[ ! "${remoteSha1}" =~ ^[a-f0-9]{40}$ ]]; then
|
||||||
|
printf " %b Remote checksum not available, trying to redownload...\\n" "${CROSS}"
|
||||||
|
return 0
|
||||||
|
elif [[ "${remoteSha1}" != "${localSha1}" ]]; then
|
||||||
|
printf " %b Remote binary is different, downloading...\\n" "${CROSS}"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
printf " %b Local binary up-to-date. No need to download!\\n" "${INFO}"
|
||||||
return 1
|
return 1
|
||||||
fi
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
}
|
}
|
||||||
|
|
||||||
# Detect suitable FTL binary platform
|
# Detect suitable FTL binary platform
|
||||||
|
|
103
gravity.sh
103
gravity.sh
|
@ -54,6 +54,7 @@ fi
|
||||||
# Set this only after sourcing pihole-FTL.conf as the gravity database path may
|
# Set this only after sourcing pihole-FTL.conf as the gravity database path may
|
||||||
# have changed
|
# have changed
|
||||||
gravityDBfile="${GRAVITYDB}"
|
gravityDBfile="${GRAVITYDB}"
|
||||||
|
gravityDBfile_default="/etc/pihole/gravity.db"
|
||||||
gravityTEMPfile="${GRAVITYDB}_temp"
|
gravityTEMPfile="${GRAVITYDB}_temp"
|
||||||
gravityDIR="$(dirname -- "${gravityDBfile}")"
|
gravityDIR="$(dirname -- "${gravityDBfile}")"
|
||||||
gravityOLDfile="${gravityDIR}/gravity_old.db"
|
gravityOLDfile="${gravityDIR}/gravity_old.db"
|
||||||
|
@ -94,7 +95,7 @@ gravity_swap_databases() {
|
||||||
# Number of available blocks on disk
|
# Number of available blocks on disk
|
||||||
availableBlocks=$(stat -f --format "%a" "${gravityDIR}")
|
availableBlocks=$(stat -f --format "%a" "${gravityDIR}")
|
||||||
# Number of blocks, used by gravity.db
|
# Number of blocks, used by gravity.db
|
||||||
gravityBlocks=$(stat --format "%b" ${gravityDBfile})
|
gravityBlocks=$(stat --format "%b" "${gravityDBfile}")
|
||||||
# Only keep the old database if available disk space is at least twice the size of the existing gravity.db.
|
# Only keep the old database if available disk space is at least twice the size of the existing gravity.db.
|
||||||
# Better be safe than sorry...
|
# Better be safe than sorry...
|
||||||
oldAvail=false
|
oldAvail=false
|
||||||
|
@ -453,7 +454,7 @@ gravity_DownloadBlocklists() {
|
||||||
if [[ "${check_url}" =~ ${regex} ]]; then
|
if [[ "${check_url}" =~ ${regex} ]]; then
|
||||||
echo -e " ${CROSS} Invalid Target"
|
echo -e " ${CROSS} Invalid Target"
|
||||||
else
|
else
|
||||||
gravity_DownloadBlocklistFromUrl "${url}" "${sourceIDs[$i]}" "${saveLocation}" "${target}" "${compression}" "${adlist_type}"
|
gravity_DownloadBlocklistFromUrl "${url}" "${sourceIDs[$i]}" "${saveLocation}" "${target}" "${compression}" "${adlist_type}" "${domain}"
|
||||||
fi
|
fi
|
||||||
echo ""
|
echo ""
|
||||||
done
|
done
|
||||||
|
@ -485,8 +486,9 @@ compareLists() {
|
||||||
|
|
||||||
# Download specified URL and perform checks on HTTP status and file content
|
# Download specified URL and perform checks on HTTP status and file content
|
||||||
gravity_DownloadBlocklistFromUrl() {
|
gravity_DownloadBlocklistFromUrl() {
|
||||||
local url="${1}" adlistID="${2}" saveLocation="${3}" target="${4}" compression="${5}" gravity_type="${6}"
|
local url="${1}" adlistID="${2}" saveLocation="${3}" target="${4}" compression="${5}" gravity_type="${6}" domain="${7}"
|
||||||
local heisenbergCompensator="" listCurlBuffer str httpCode success="" ip cmd_ext
|
local heisenbergCompensator="" listCurlBuffer str httpCode success="" ip cmd_ext
|
||||||
|
local file_path permissions ip_addr port blocked=false download=true
|
||||||
|
|
||||||
# Create temp file to store content on disk instead of RAM
|
# Create temp file to store content on disk instead of RAM
|
||||||
# We don't use '--suffix' here because not all implementations of mktemp support it, e.g. on Alpine
|
# We don't use '--suffix' here because not all implementations of mktemp support it, e.g. on Alpine
|
||||||
|
@ -531,12 +533,56 @@ gravity_DownloadBlocklistFromUrl() {
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
|
# Check if this domain is blocked by Pi-hole but only if the domain is not a
|
||||||
|
# local file or empty
|
||||||
|
if [[ $url != "file"* ]] && [[ -n "${domain}" ]]; then
|
||||||
|
case $(getFTLConfigValue dns.blocking.mode) in
|
||||||
|
"IP-NODATA-AAAA" | "IP")
|
||||||
|
# Get IP address of this domain
|
||||||
|
ip="$(dig "${domain}" +short)"
|
||||||
|
# Check if this IP matches any IP of the system
|
||||||
|
if [[ -n "${ip}" && $(grep -Ec "inet(|6) ${ip}" <<<"$(ip a)") -gt 0 ]]; then
|
||||||
|
blocked=true
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
"NXDOMAIN")
|
||||||
|
if [[ $(dig "${domain}" | grep "NXDOMAIN" -c) -ge 1 ]]; then
|
||||||
|
blocked=true
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
"NODATA")
|
||||||
|
if [[ $(dig "${domain}" | grep "NOERROR" -c) -ge 1 ]] && [[ -z $(dig +short "${domain}") ]]; then
|
||||||
|
blocked=true
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
"NULL" | *)
|
||||||
|
if [[ $(dig "${domain}" +short | grep "0.0.0.0" -c) -ge 1 ]]; then
|
||||||
|
blocked=true
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
if [[ "${blocked}" == true ]]; then
|
if [[ "${blocked}" == true ]]; then
|
||||||
printf -v ip_addr "%s" "${PIHOLE_DNS_1%#*}"
|
# Get first defined upstream server
|
||||||
if [[ ${PIHOLE_DNS_1} != *"#"* ]]; then
|
local upstream
|
||||||
|
upstream="$(getFTLConfigValue dns.upstreams)"
|
||||||
|
|
||||||
|
# Isolate first upstream server from a string like
|
||||||
|
# [ 1.2.3.4#1234, 5.6.7.8#5678, ... ]
|
||||||
|
upstream="${upstream%%,*}"
|
||||||
|
upstream="${upstream##*[}"
|
||||||
|
upstream="${upstream%%]*}"
|
||||||
|
# Trim leading and trailing spaces and tabs
|
||||||
|
upstream="${upstream#"${upstream%%[![:space:]]*}"}"
|
||||||
|
upstream="${upstream%"${upstream##*[![:space:]]}"}"
|
||||||
|
|
||||||
|
# Get IP address and port of this upstream server
|
||||||
|
local ip_addr port
|
||||||
|
printf -v ip_addr "%s" "${upstream%#*}"
|
||||||
|
if [[ ${upstream} != *"#"* ]]; then
|
||||||
port=53
|
port=53
|
||||||
else
|
else
|
||||||
printf -v port "%s" "${PIHOLE_DNS_1#*#}"
|
printf -v port "%s" "${upstream#*#}"
|
||||||
fi
|
fi
|
||||||
ip=$(dig "@${ip_addr}" -p "${port}" +short "${domain}" | tail -1)
|
ip=$(dig "@${ip_addr}" -p "${port}" +short "${domain}" | tail -1)
|
||||||
if [[ $(echo "${url}" | awk -F '://' '{print $1}') = "https" ]]; then
|
if [[ $(echo "${url}" | awk -F '://' '{print $1}') = "https" ]]; then
|
||||||
|
@ -544,14 +590,42 @@ gravity_DownloadBlocklistFromUrl() {
|
||||||
else
|
else
|
||||||
port=80
|
port=80
|
||||||
fi
|
fi
|
||||||
bad_list=$(pihole -q -adlist "${domain}" | head -n1 | awk -F 'Match found in ' '{print $2}')
|
echo -e "${OVER} ${CROSS} ${str} ${domain} is blocked by one of your lists. Using DNS server ${upstream} instead"
|
||||||
echo -e "${OVER} ${CROSS} ${str} ${domain} is blocked by ${bad_list%:}. Using DNS on ${PIHOLE_DNS_1} to download ${url}"
|
|
||||||
echo -ne " ${INFO} ${str} Pending..."
|
echo -ne " ${INFO} ${str} Pending..."
|
||||||
cmd_ext="--resolve $domain:$port:$ip"
|
cmd_ext="--resolve $domain:$port:$ip"
|
||||||
fi
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# If we are going to "download" a local file, we first check if the target
|
||||||
|
# file has a+r permission. We explicitly check for all+read because we want
|
||||||
|
# to make sure that the file is readable by everyone and not just the user
|
||||||
|
# running the script.
|
||||||
|
if [[ $url == "file://"* ]]; then
|
||||||
|
# Get the file path
|
||||||
|
file_path=$(echo "$url" | cut -d'/' -f3-)
|
||||||
|
# Check if the file exists and is a regular file (i.e. not a socket, fifo, tty, block). Might still be a symlink.
|
||||||
|
if [[ ! -f $file_path ]]; then
|
||||||
|
# Output that the file does not exist
|
||||||
|
echo -e "${OVER} ${CROSS} ${file_path} does not exist"
|
||||||
|
download=false
|
||||||
|
else
|
||||||
|
# Check if the file or a file referenced by the symlink has a+r permissions
|
||||||
|
permissions=$(stat -L -c "%a" "$file_path")
|
||||||
|
if [[ $permissions == *4 || $permissions == *5 || $permissions == *6 || $permissions == *7 ]]; then
|
||||||
|
# Output that we are using the local file
|
||||||
|
echo -e "${OVER} ${INFO} Using local file ${file_path}"
|
||||||
|
else
|
||||||
|
# Output that the file does not have the correct permissions
|
||||||
|
echo -e "${OVER} ${CROSS} Cannot read file (file needs to have a+r permission)"
|
||||||
|
download=false
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "${download}" == true ]]; then
|
||||||
# shellcheck disable=SC2086
|
# shellcheck disable=SC2086
|
||||||
httpCode=$(curl --connect-timeout ${curl_connect_timeout} -s -L ${compression} ${cmd_ext} ${heisenbergCompensator} -w "%{http_code}" "${url}" -o "${listCurlBuffer}" 2>/dev/null)
|
httpCode=$(curl --connect-timeout ${curl_connect_timeout} -s -L ${compression} ${cmd_ext} ${heisenbergCompensator} -w "%{http_code}" "${url}" -o "${listCurlBuffer}" 2>/dev/null)
|
||||||
|
fi
|
||||||
|
|
||||||
case $url in
|
case $url in
|
||||||
# Did we "download" a local file?
|
# Did we "download" a local file?
|
||||||
|
@ -560,7 +634,7 @@ gravity_DownloadBlocklistFromUrl() {
|
||||||
echo -e "${OVER} ${TICK} ${str} Retrieval successful"
|
echo -e "${OVER} ${TICK} ${str} Retrieval successful"
|
||||||
success=true
|
success=true
|
||||||
else
|
else
|
||||||
echo -e "${OVER} ${CROSS} ${str} Not found / empty list"
|
echo -e "${OVER} ${CROSS} ${str} Retrieval failed / empty list"
|
||||||
fi
|
fi
|
||||||
;;
|
;;
|
||||||
# Did we "download" a remote file?
|
# Did we "download" a remote file?
|
||||||
|
@ -594,7 +668,7 @@ gravity_DownloadBlocklistFromUrl() {
|
||||||
if [[ "${success}" == true ]]; then
|
if [[ "${success}" == true ]]; then
|
||||||
if [[ "${httpCode}" == "304" ]]; then
|
if [[ "${httpCode}" == "304" ]]; then
|
||||||
# Add domains to database table file
|
# Add domains to database table file
|
||||||
pihole-FTL ${gravity_type} parseList "${saveLocation}" "${gravityTEMPfile}" "${adlistID}"
|
pihole-FTL "${gravity_type}" parseList "${saveLocation}" "${gravityTEMPfile}" "${adlistID}"
|
||||||
database_adlist_status "${adlistID}" "2"
|
database_adlist_status "${adlistID}" "2"
|
||||||
done="true"
|
done="true"
|
||||||
# Check if $listCurlBuffer is a non-zero length file
|
# Check if $listCurlBuffer is a non-zero length file
|
||||||
|
@ -604,7 +678,7 @@ gravity_DownloadBlocklistFromUrl() {
|
||||||
# Remove curl buffer file after its use
|
# Remove curl buffer file after its use
|
||||||
rm "${listCurlBuffer}"
|
rm "${listCurlBuffer}"
|
||||||
# Add domains to database table file
|
# Add domains to database table file
|
||||||
pihole-FTL ${gravity_type} parseList "${saveLocation}" "${gravityTEMPfile}" "${adlistID}"
|
pihole-FTL "${gravity_type}" parseList "${saveLocation}" "${gravityTEMPfile}" "${adlistID}"
|
||||||
# Compare lists, are they identical?
|
# Compare lists, are they identical?
|
||||||
compareLists "${adlistID}" "${saveLocation}"
|
compareLists "${adlistID}" "${saveLocation}"
|
||||||
done="true"
|
done="true"
|
||||||
|
@ -620,7 +694,7 @@ gravity_DownloadBlocklistFromUrl() {
|
||||||
if [[ -r "${saveLocation}" ]]; then
|
if [[ -r "${saveLocation}" ]]; then
|
||||||
echo -e " ${CROSS} List download failed: ${COL_LIGHT_GREEN}using previously cached list${COL_NC}"
|
echo -e " ${CROSS} List download failed: ${COL_LIGHT_GREEN}using previously cached list${COL_NC}"
|
||||||
# Add domains to database table file
|
# Add domains to database table file
|
||||||
pihole-FTL ${gravity_type} parseList "${saveLocation}" "${gravityTEMPfile}" "${adlistID}"
|
pihole-FTL "${gravity_type}" parseList "${saveLocation}" "${gravityTEMPfile}" "${adlistID}"
|
||||||
database_adlist_status "${adlistID}" "3"
|
database_adlist_status "${adlistID}" "3"
|
||||||
else
|
else
|
||||||
echo -e " ${CROSS} List download failed: ${COL_LIGHT_RED}no cached list available${COL_NC}"
|
echo -e " ${CROSS} List download failed: ${COL_LIGHT_RED}no cached list available${COL_NC}"
|
||||||
|
@ -826,7 +900,10 @@ for var in "$@"; do
|
||||||
case "${var}" in
|
case "${var}" in
|
||||||
"-f" | "--force") forceDelete=true ;;
|
"-f" | "--force") forceDelete=true ;;
|
||||||
"-r" | "--repair") repairSelector "$3" ;;
|
"-r" | "--repair") repairSelector "$3" ;;
|
||||||
"-u" | "--upgrade" ) upgrade_gravityDB "${gravityDBfile}" "${piholeDir}"; exit 0;;
|
"-u" | "--upgrade")
|
||||||
|
upgrade_gravityDB "${gravityDBfile}" "${piholeDir}"
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
"-h" | "--help") helpFunc ;;
|
"-h" | "--help") helpFunc ;;
|
||||||
esac
|
esac
|
||||||
done
|
done
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
FROM fedora:38
|
FROM fedora:40
|
||||||
RUN dnf install -y git initscripts
|
RUN dnf install -y git initscripts
|
||||||
|
|
||||||
ENV GITDIR /etc/.pihole
|
ENV GITDIR /etc/.pihole
|
18
test/_ubuntu_24.Dockerfile
Normal file
18
test/_ubuntu_24.Dockerfile
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
FROM buildpack-deps:24.04-scm
|
||||||
|
|
||||||
|
ENV GITDIR /etc/.pihole
|
||||||
|
ENV SCRIPTDIR /opt/pihole
|
||||||
|
|
||||||
|
RUN mkdir -p $GITDIR $SCRIPTDIR /etc/pihole
|
||||||
|
ADD . $GITDIR
|
||||||
|
RUN cp $GITDIR/advanced/Scripts/*.sh $GITDIR/gravity.sh $GITDIR/pihole $GITDIR/automated\ install/*.sh $SCRIPTDIR/
|
||||||
|
ENV PATH /usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:$SCRIPTDIR
|
||||||
|
ENV DEBIAN_FRONTEND=noninteractive
|
||||||
|
|
||||||
|
RUN true && \
|
||||||
|
chmod +x $SCRIPTDIR/*
|
||||||
|
|
||||||
|
ENV SKIP_INSTALL true
|
||||||
|
ENV OS_CHECK_DOMAIN_NAME dev-supportedos.pi-hole.net
|
||||||
|
|
||||||
|
#sed '/# Start the installer/Q' /opt/pihole/basic-install.sh > /opt/pihole/stub_basic-install.sh && \
|
|
@ -1,6 +1,6 @@
|
||||||
pyyaml == 6.0.1
|
pyyaml == 6.0.1
|
||||||
pytest == 8.0.0
|
pytest == 8.2.0
|
||||||
pytest-xdist == 3.5.0
|
pytest-xdist == 3.6.1
|
||||||
pytest-testinfra == 10.0.0
|
pytest-testinfra == 10.1.0
|
||||||
tox == 4.12.1
|
tox == 4.15.0
|
||||||
|
|
||||||
|
|
|
@ -4,5 +4,5 @@ envlist = py3
|
||||||
[testenv]
|
[testenv]
|
||||||
allowlist_externals = docker
|
allowlist_externals = docker
|
||||||
deps = -rrequirements.txt
|
deps = -rrequirements.txt
|
||||||
commands = docker buildx build --load --progress plain -f _fedora_38.Dockerfile -t pytest_pihole:test_container ../
|
commands = docker buildx build --load --progress plain -f _fedora_40.Dockerfile -t pytest_pihole:test_container ../
|
||||||
pytest {posargs:-vv -n auto} ./test_any_automated_install.py ./test_any_utils.py ./test_centos_fedora_common_support.py
|
pytest {posargs:-vv -n auto} ./test_any_automated_install.py ./test_any_utils.py ./test_centos_fedora_common_support.py
|
8
test/tox.ubuntu_24.ini
Normal file
8
test/tox.ubuntu_24.ini
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
[tox]
|
||||||
|
envlist = py3
|
||||||
|
|
||||||
|
[testenv:py3]
|
||||||
|
allowlist_externals = docker
|
||||||
|
deps = -rrequirements.txt
|
||||||
|
commands = docker buildx build --load --progress plain -f _ubuntu_24.Dockerfile -t pytest_pihole:test_container ../
|
||||||
|
pytest {posargs:-vv -n auto} ./test_any_automated_install.py ./test_any_utils.py
|
Loading…
Reference in a new issue