diff --git a/advanced/01-pihole.conf b/advanced/01-pihole.conf index 8b772ae8..f7b78ab0 100644 --- a/advanced/01-pihole.conf +++ b/advanced/01-pihole.conf @@ -42,6 +42,6 @@ cache-size=10000 log-queries log-facility=/var/log/pihole.log -local-ttl=300 +local-ttl=2 log-async diff --git a/advanced/Scripts/list.sh b/advanced/Scripts/list.sh index 86589083..a3f3261a 100755 --- a/advanced/Scripts/list.sh +++ b/advanced/Scripts/list.sh @@ -66,15 +66,15 @@ HandleOther() { domain="${1,,}" # Check validity of domain - validDomain=$(perl -lne 'print if /^((-|_)*[a-z\d]((-|_)*[a-z\d])*(-|_)*)(\.(-|_)*([a-z\d]((-|_)*[a-z\d])*))*$/' <<< "${domain}") # Valid chars check - validDomain=$(perl -lne 'print if /^.{1,253}$/' <<< "${validDomain}") # Overall length check - validDomain=$(perl -lne 'print if /^[^\.]{1,63}(\.[^\.]{1,63})*$/' <<< "${validDomain}") # Length of each label + if [[ "${#domain}" -le 253 ]]; then + validDomain=$(grep -P "^((-|_)*[a-z\d]((-|_)*[a-z\d])*(-|_)*)(\.(-|_)*([a-z\d]((-|_)*[a-z\d])*))*$" <<< "${domain}") # Valid chars check + validDomain=$(grep -P "^[^\.]{1,63}(\.[^\.]{1,63})*$" <<< "${validDomain}") # Length of each label + fi - if [[ -z "${validDomain}" ]]; then - echo -e " ${CROSS} $1 is not a valid argument or domain name!" - else - echo -e " ${TICK} $1 is a valid domain name!" + if [[ -n "${validDomain}" ]]; then domList=("${domList[@]}" ${validDomain}) + else + echo -e " ${CROSS} ${domain} is not a valid argument or domain name!" fi } @@ -107,6 +107,8 @@ AddDomain() { [[ "${list}" == "${wildcardlist}" ]] && listname="wildcard blacklist" if [[ "${list}" == "${whitelist}" || "${list}" == "${blacklist}" ]]; then + [[ "${list}" == "${whitelist}" && -z "${type}" ]] && type="--whitelist-only" + [[ "${list}" == "${blacklist}" && -z "${type}" ]] && type="--blacklist-only" bool=true # Is the domain in the list we want to add it to? grep -Ex -q "${domain}" "${list}" > /dev/null 2>&1 || bool=false @@ -129,7 +131,7 @@ AddDomain() { # Remove the /* from the end of the IP addresses IPV4_ADDRESS=${IPV4_ADDRESS%/*} IPV6_ADDRESS=${IPV6_ADDRESS%/*} - + [[ -z "${type}" ]] && type="--wildcard-only" bool=true # Is the domain in the list? grep -e "address=\/${domain}\/" "${wildcardlist}" > /dev/null 2>&1 || bool=false @@ -138,7 +140,7 @@ AddDomain() { if [[ "${verbose}" == true ]]; then echo -e " ${INFO} Adding $1 to wildcard blacklist..." fi - reload=true + reload="restart" echo "address=/$1/${IPV4_ADDRESS}" >> "${wildcardlist}" if [[ "${#IPV6_ADDRESS}" > 0 ]]; then echo "address=/$1/${IPV6_ADDRESS}" >> "${wildcardlist}" @@ -161,6 +163,8 @@ RemoveDomain() { if [[ "${list}" == "${whitelist}" || "${list}" == "${blacklist}" ]]; then bool=true + [[ "${list}" == "${whitelist}" && -z "${type}" ]] && type="--whitelist-only" + [[ "${list}" == "${blacklist}" && -z "${type}" ]] && type="--blacklist-only" # Is it in the list? Logic follows that if its whitelisted it should not be blacklisted and vice versa grep -Ex -q "${domain}" "${list}" > /dev/null 2>&1 || bool=false if [[ "${bool}" == true ]]; then @@ -175,6 +179,7 @@ RemoveDomain() { fi fi elif [[ "${list}" == "${wildcardlist}" ]]; then + [[ -z "${type}" ]] && type="--wildcard-only" bool=true # Is it in the list? grep -e "address=\/${domain}\/" "${wildcardlist}" > /dev/null 2>&1 || bool=false @@ -192,12 +197,10 @@ RemoveDomain() { fi } +# Update Gravity Reload() { - # Reload hosts file echo "" - echo -e " ${INFO} Updating gravity..." - echo "" - pihole -g -sd + pihole -g --skip-download "${type:-}" } Displaylist() { @@ -243,6 +246,7 @@ fi PoplistFile -if ${reload}; then - Reload +if [[ "${reload}" != false ]]; then + # Ensure that "restart" is used for Wildcard updates + Reload "${reload}" fi diff --git a/advanced/Scripts/webpage.sh b/advanced/Scripts/webpage.sh index fbba3f74..b0957ab4 100755 --- a/advanced/Scripts/webpage.sh +++ b/advanced/Scripts/webpage.sh @@ -1,4 +1,6 @@ #!/usr/bin/env bash +# shellcheck disable=SC1090 + # Pi-hole: A black hole for Internet advertisements # (c) 2017 Pi-hole, LLC (https://pi-hole.net) # Network-wide ad blocking via your own hardware. @@ -30,6 +32,7 @@ Options: -f, fahrenheit Set Fahrenheit as preferred temperature unit -k, kelvin Set Kelvin as preferred temperature unit -r, hostrecord Add a name to the DNS associated to an IPv4/IPv6 address + -e, email Set an administrative contact address for the Block Page -h, --help Show this help dialog -i, interface Specify dnsmasq's interface listening behavior Add '-h' for more info on interface usage" @@ -226,20 +229,7 @@ Reboot() { } RestartDNS() { - local str="Restarting DNS service" - [[ -t 1 ]] && echo -ne " ${INFO} ${str}" - if command -v systemctl &> /dev/null; then - output=$( { systemctl restart dnsmasq; } 2>&1 ) - else - output=$( { service dnsmasq restart; } 2>&1 ) - fi - - if [[ -z "${output}" ]]; then - [[ -t 1 ]] && echo -e "${OVER} ${TICK} ${str}" - else - [[ ! -t 1 ]] && OVER="" - echo -e "${OVER} ${CROSS} ${output}" - fi + /usr/local/bin/pihole restartdns } SetQueryLogOptions() { @@ -427,6 +417,27 @@ Options: RestartDNS } +SetAdminEmail() { + if [[ "${1}" == *"-h"* ]]; then + echo "Usage: pihole -a email
+Example: 'pihole -a email admin@address.com' +Set an administrative contact address for the Block Page + +Options: + \"\" Empty: Remove admin contact + -h, --help Show this help dialog" + exit 0 + fi + + if [[ -n "${args[2]}" ]]; then + change_setting "ADMIN_EMAIL" "${args[2]}" + echo -e " ${TICK} Setting admin contact to ${args[2]}" + else + change_setting "ADMIN_EMAIL" "" + echo -e " ${TICK} Removing admin contact" + fi +} + SetListeningMode() { source "${setupVars}" @@ -497,6 +508,7 @@ main() { "addstaticdhcp" ) AddDHCPStaticAddress;; "removestaticdhcp" ) RemoveDHCPStaticAddress;; "-r" | "hostrecord" ) SetHostRecord "$3";; + "-e" | "email" ) SetAdminEmail "$3";; "-i" | "interface" ) SetListeningMode "$@";; "-t" | "teleporter" ) Teleporter;; "adlist" ) CustomizeAdLists;; diff --git a/advanced/blockingpage.css b/advanced/blockingpage.css index cf379eea..e74844d1 100644 --- a/advanced/blockingpage.css +++ b/advanced/blockingpage.css @@ -228,7 +228,6 @@ header #bpAlt label { .aboutImg { background: url("/admin/img/logo.svg") no-repeat center; background-size: 90px 90px; - border: 3px solid rgba(255,255,255,0.2); height: 90px; margin: 0 auto; padding: 2px; diff --git a/advanced/index.js b/advanced/index.js deleted file mode 100644 index 9e153e96..00000000 --- a/advanced/index.js +++ /dev/null @@ -1 +0,0 @@ -var x = "" diff --git a/advanced/index.php b/advanced/index.php index a44423e3..5c2f250d 100644 --- a/advanced/index.php +++ b/advanced/index.php @@ -9,32 +9,30 @@ // Sanitise HTTP_HOST output $serverName = htmlspecialchars($_SERVER["HTTP_HOST"]); +if (!is_file("/etc/pihole/setupVars.conf")) + die("[ERROR] File not found: /etc/pihole/setupVars.conf"); + // Get values from setupVars.conf -if (is_file("/etc/pihole/setupVars.conf")) { - $setupVars = parse_ini_file("/etc/pihole/setupVars.conf"); - $svFQDN = $setupVars["FQDN"]; - $svPasswd = !empty($setupVars["WEBPASSWORD"]); - $svEmail = (!empty($setupVars["ADMIN_EMAIL"]) && filter_var($setupVars["ADMIN_EMAIL"], FILTER_VALIDATE_EMAIL)) ? $setupVars["ADMIN_EMAIL"] : ""; - unset($setupVars); -} else { - die("[ERROR] File not found: /etc/pihole/setupVars.conf"); -} +$setupVars = parse_ini_file("/etc/pihole/setupVars.conf"); +$svPasswd = !empty($setupVars["WEBPASSWORD"]); +$svEmail = (!empty($setupVars["ADMIN_EMAIL"]) && filter_var($setupVars["ADMIN_EMAIL"], FILTER_VALIDATE_EMAIL)) ? $setupVars["ADMIN_EMAIL"] : ""; +unset($setupVars); // Set landing page location, found within /var/www/html/ $landPage = "../landing.php"; -// Set empty array for hostnames to be accepted as self address for splash page +// Define array for hostnames to be accepted as self address for splash page $authorizedHosts = []; - -// Append FQDN to $authorizedHosts -if (!empty($svFQDN)) array_push($authorizedHosts, $svFQDN); - -// Append virtual hostname to $authorizedHosts -if (!empty($_SERVER["VIRTUAL_HOST"])) { +if (!empty($_SERVER["FQDN"])) { + // If setenv.add-environment = ("fqdn" => "true") is configured in lighttpd, + // append $serverName to $authorizedHosts + array_push($authorizedHosts, $serverName); +} else if (!empty($_SERVER["VIRTUAL_HOST"])) { + // Append virtual hostname to $authorizedHosts array_push($authorizedHosts, $_SERVER["VIRTUAL_HOST"]); } -// Set which extension types render as Block Page (Including "" for index.wxyz) +// Set which extension types render as Block Page (Including "" for index.ext) $validExtTypes = array("asp", "htm", "html", "php", "rss", "xml", ""); // Get extension of current URL @@ -56,8 +54,9 @@ function setHeader($type = "x") { if (isset($type) && $type === "js") header("Content-Type: application/javascript"); } -// Determine block page redirect type +// Determine block page type if ($serverName === "pi.hole") { + // Redirect to Web Interface exit(header("Location: /admin")); } elseif (filter_var($serverName, FILTER_VALIDATE_IP) || in_array($serverName, $authorizedHosts)) { // Set Splash Page output @@ -68,21 +67,28 @@ if ($serverName === "pi.hole") {
Pi-hole: Your black hole for Internet advertisements "; - // Render splash page or landing page when directly browsing via IP or auth'd hostname + // Set splash/landing page based off presence of $landPage $renderPage = is_file(getcwd()."/$landPage") ? include $landPage : "$splashPage"; - unset($serverName, $svFQDN, $svPasswd, $svEmail, $authorizedHosts, $validExtTypes, $currentUrlExt, $viewPort); + + // Unset variables so as to not be included in $landPage + unset($serverName, $svPasswd, $svEmail, $authorizedHosts, $validExtTypes, $currentUrlExt, $viewPort); + + // Render splash/landing page when directly browsing via IP or authorised hostname exit($renderPage); } elseif ($currentUrlExt === "js") { - // Serve dummy Javascript for blocked domains + // Serve Pi-hole Javascript for blocked domains requesting JS exit(setHeader("js").'var x = "Pi-hole: A black hole for Internet advertisements."'); } elseif (strpos($_SERVER["REQUEST_URI"], "?") !== FALSE && isset($_SERVER["HTTP_REFERER"])) { - // Serve blank image upon receiving REQUEST_URI w/ query string & HTTP_REFERRER (e.g: an iframe of a blocked domain) + // Serve blank image upon receiving REQUEST_URI w/ query string & HTTP_REFERRER + // e.g: An iframe of a blocked domain exit(setHeader().' '); } elseif (!in_array($currentUrlExt, $validExtTypes) || substr_count($_SERVER["REQUEST_URI"], "?")) { - // Serve SVG upon receiving non $validExtTypes URL extension or query string (e.g: not an iframe of a blocked domain) + // Serve SVG upon receiving non $validExtTypes URL extension or query string + // e.g: Not an iframe of a blocked domain, such as when browsing to a file/query directly + // QoL addition: Allow the SVG to be clicked on in order to quickly show the full Block Page $blockImg = 'Blocked by Pi-hole'; exit(setHeader()." $viewPort @@ -95,7 +101,7 @@ if ($serverName === "pi.hole") { // Determine placeholder text based off $svPasswd presence $wlPlaceHolder = empty($svPasswd) ? "No admin password set" : "Javascript disabled"; -// Define admin email address text +// Define admin email address text based off $svEmail presence $bpAskAdmin = !empty($svEmail) ? '' : ""; // Determine if at least one block list has been generated @@ -120,8 +126,10 @@ if (empty($adlistsUrls)) // Get total number of blocklists (Including Whitelist, Blacklist & Wildcard lists) $adlistsCount = count($adlistsUrls) + 3; -// Get results of queryads.php exact search +// Set query timeout ini_set("default_socket_timeout", 3); + +// Logic for querying blocklists function queryAds($serverName) { // Determine the time it takes while querying adlists $preQueryTime = microtime(true)-$_SERVER["REQUEST_TIME_FLOAT"]; @@ -131,32 +139,39 @@ function queryAds($serverName) { // Exception Handling try { - if ($queryTime >= ini_get("default_socket_timeout")) { + // Define Exceptions + if (strpos($queryAds[0], "No exact results") !== FALSE) { + // Return "none" into $queryAds array + return array("0" => "none"); + } else if ($queryTime >= ini_get("default_socket_timeout")) { + // Connection Timeout throw new Exception ("Connection timeout (".ini_get("default_socket_timeout")."s)"); } elseif (!strpos($queryAds[0], ".") !== false) { - if (strpos($queryAds[0], "No exact results") !== FALSE) return array("0" => "none"); + // Unknown $queryAds output throw new Exception ("Unhandled error message ($queryAds[0])"); } return $queryAds; } catch (Exception $e) { + // Return exception as array return array("0" => "error", "1" => $e->getMessage()); } - } +// Get results of queryads.php exact search $queryAds = queryAds($serverName); -if ($queryAds[0] === "error") { +// Pass error through to Block Page +if ($queryAds[0] === "error") die("[ERROR]: Unable to parse results from queryads.php: ".$queryAds[1].""); -} else { - $featuredTotal = count($queryAds); - // Place results into key => value array - $queryResults = null; - foreach ($queryAds as $str) { - $value = explode(" ", $str); - @$queryResults[$value[0]] .= "$value[1]"; - } +// Count total number of matching blocklists +$featuredTotal = count($queryAds); + +// Place results into key => value array +$queryResults = null; +foreach ($queryAds as $str) { + $value = explode(" ", $str); + @$queryResults[$value[0]] .= "$value[1]"; } // Determine if domain has been blacklisted, whitelisted, wildcarded or CNAME blocked @@ -174,7 +189,8 @@ if (strpos($queryAds[0], "blacklist") !== FALSE) { $featuredTotal = "0"; $notableFlagClass = "noblock"; - // Determine appropriate info message if CNAME exists + // QoL addition: Determine appropriate info message if CNAME exists + // Suggests to the user that $serverName has a CNAME (alias) that may be blocked $dnsRecord = dns_get_record("$serverName")[0]; if (array_key_exists("target", $dnsRecord)) { $wlInfo = $dnsRecord['target']; @@ -191,9 +207,12 @@ $wlOutput = (isset($wlInfo) && $wlInfo !== "recentwl") ? " /dev/null; then # These programs are stored in an array so they can be looped through later INSTALLER_DEPS=(apt-utils dialog debconf dhcpcd5 git ${iproute_pkg} whiptail) # Pi-hole itself has several dependencies that also need to be installed - PIHOLE_DEPS=(bc cron curl dnsmasq dnsutils iputils-ping lsof netcat sudo unzip wget) + PIHOLE_DEPS=(bc cron curl dnsmasq dnsutils iputils-ping lsof netcat sudo unzip wget idn2) # The Web dashboard has some that also need to be installed # It's useful to separate the two since our repos are also setup as "Core" code and "Web" code PIHOLE_WEB_DEPS=(lighttpd ${phpVer}-common ${phpVer}-cgi ${phpVer}-${phpSqlite}) @@ -208,7 +208,7 @@ elif command -v rpm &> /dev/null; then PKG_INSTALL=(${PKG_MANAGER} install -y) PKG_COUNT="${PKG_MANAGER} check-update | egrep '(.i686|.x86|.noarch|.arm|.src)' | wc -l" INSTALLER_DEPS=(dialog git iproute net-tools newt procps-ng) - PIHOLE_DEPS=(bc bind-utils cronie curl dnsmasq findutils nmap-ncat sudo unzip wget) + PIHOLE_DEPS=(bc bind-utils cronie curl dnsmasq findutils nmap-ncat sudo unzip wget idn2) PIHOLE_WEB_DEPS=(lighttpd lighttpd-fastcgi php php-common php-cli php-pdo) if ! grep -q 'Fedora' /etc/redhat-release; then INSTALLER_DEPS=("${INSTALLER_DEPS[@]}" "epel-release"); @@ -1304,6 +1304,12 @@ installPiholeWeb() { install -d /var/www/html/pihole # and the blockpage install -D ${PI_HOLE_LOCAL_REPO}/advanced/{index,blockingpage}.* /var/www/html/pihole/ + + # Remove superseded file + if [[ -e "/var/www/html/pihole/index.js" ]]; then + rm "/var/www/html/pihole/index.js" + fi + echo -e "${OVER} ${TICK} ${str}" local str="Backing up index.lighttpd.html" @@ -1450,7 +1456,7 @@ finalExports() { # If the setup variable file exists, if [[ -e "${setupVars}" ]]; then # update the variables in the file - sed -i.update.bak '/PIHOLE_INTERFACE/d;/IPV4_ADDRESS/d;/IPV6_ADDRESS/d;/PIHOLE_DNS_1/d;/PIHOLE_DNS_2/d;/QUERY_LOGGING/d;/INSTALL_WEB/d;' "${setupVars}" + sed -i.update.bak '/PIHOLE_INTERFACE/d;/IPV4_ADDRESS/d;/IPV6_ADDRESS/d;/PIHOLE_DNS_1/d;/PIHOLE_DNS_2/d;/QUERY_LOGGING/d;/INSTALL_WEB/d;/LIGHTTPD_ENABLED/d;' "${setupVars}" fi # echo the information to the user { @@ -2064,13 +2070,13 @@ main() { fi fi - # Download and compile the aggregated block list - runGravity - # Enable FTL start_service pihole-FTL enable_service pihole-FTL + # Download and compile the aggregated block list + runGravity + # if [[ "${useUpdateVars}" == false ]]; then displayFinalMessage "${pw}" diff --git a/automated install/uninstall.sh b/automated install/uninstall.sh index 08869b2e..2f4f4f9f 100755 --- a/automated install/uninstall.sh +++ b/automated install/uninstall.sh @@ -36,16 +36,29 @@ else fi fi +readonly PI_HOLE_FILES_DIR="/etc/.pihole" +PH_TEST="true" +source "${PI_HOLE_FILES_DIR}/automated install/basic-install.sh" +# setupVars set in basic-install.sh +source "${setupVars}" + +# distro_check() sourced from basic-install.sh +distro_check + +# Install packages used by the Pi-hole +if [[ "${INSTALL_WEB}" == true ]]; then + # Install the Web dependencies + DEPS=("${INSTALLER_DEPS[@]}" "${PIHOLE_DEPS[@]}" "${PIHOLE_WEB_DEPS[@]}") +# Otherwise, +else + # just install the Core dependencies + DEPS=("${INSTALLER_DEPS[@]}" "${PIHOLE_DEPS[@]}") +fi + # Compatability if [ -x "$(command -v rpm)" ]; then # Fedora Family - if [ -x "$(command -v dnf)" ]; then - PKG_MANAGER="dnf" - else - PKG_MANAGER="yum" - fi PKG_REMOVE="${PKG_MANAGER} remove -y" - PIHOLE_DEPS=( bind-utils bc dnsmasq lighttpd lighttpd-fastcgi php-common php-pdo git curl unzip wget findutils ) package_check() { rpm -qa | grep ^$1- > /dev/null } @@ -54,9 +67,7 @@ if [ -x "$(command -v rpm)" ]; then } elif [ -x "$(command -v apt-get)" ]; then # Debian Family - PKG_MANAGER="apt-get" PKG_REMOVE="${PKG_MANAGER} -y remove --purge" - PIHOLE_DEPS=( dnsutils bc dnsmasq lighttpd php5-common php5-sqlite git curl unzip wget ) package_check() { dpkg-query -W -f='${Status}' "$1" 2>/dev/null | grep -c "ok installed" } @@ -72,7 +83,7 @@ fi removeAndPurge() { # Purge dependencies echo "" - for i in "${PIHOLE_DEPS[@]}"; do + for i in "${DEPS[@]}"; do package_check ${i} > /dev/null if [[ "$?" -eq 0 ]]; then while true; do @@ -92,7 +103,7 @@ removeAndPurge() { done # Remove dnsmasq config files - ${SUDO} rm /etc/dnsmasq.conf /etc/dnsmasq.conf.orig /etc/dnsmasq.d/01-pihole.conf &> /dev/null + ${SUDO} rm -f /etc/dnsmasq.conf /etc/dnsmasq.conf.orig /etc/dnsmasq.d/01-pihole.conf &> /dev/null echo -e " ${TICK} Removing dnsmasq config files" # Take care of any additional package cleaning @@ -109,7 +120,7 @@ removeNoPurge() { echo -ne " ${INFO} Removing Web Interface..." ${SUDO} rm -rf /var/www/html/admin &> /dev/null ${SUDO} rm -rf /var/www/html/pihole &> /dev/null - ${SUDO} rm /var/www/html/index.lighttpd.orig &> /dev/null + ${SUDO} rm -f /var/www/html/index.lighttpd.orig &> /dev/null # If the web directory is empty after removing these files, then the parent html folder can be removed. if [ -d "/var/www/html" ]; then @@ -132,7 +143,7 @@ removeNoPurge() { # Attempt to preserve backwards compatibility with older versions if [[ -f /etc/cron.d/pihole ]];then - ${SUDO} rm /etc/cron.d/pihole &> /dev/null + ${SUDO} rm -f /etc/cron.d/pihole &> /dev/null echo -e " ${TICK} Removed /etc/cron.d/pihole" fi @@ -146,15 +157,15 @@ removeNoPurge() { fi fi - ${SUDO} rm /etc/dnsmasq.d/adList.conf &> /dev/null - ${SUDO} rm /etc/dnsmasq.d/01-pihole.conf &> /dev/null + ${SUDO} rm -f /etc/dnsmasq.d/adList.conf &> /dev/null + ${SUDO} rm -f /etc/dnsmasq.d/01-pihole.conf &> /dev/null ${SUDO} rm -rf /var/log/*pihole* &> /dev/null ${SUDO} rm -rf /etc/pihole/ &> /dev/null ${SUDO} rm -rf /etc/.pihole/ &> /dev/null ${SUDO} rm -rf /opt/pihole/ &> /dev/null - ${SUDO} rm /usr/local/bin/pihole &> /dev/null - ${SUDO} rm /etc/bash_completion.d/pihole &> /dev/null - ${SUDO} rm /etc/sudoers.d/pihole &> /dev/null + ${SUDO} rm -f /usr/local/bin/pihole &> /dev/null + ${SUDO} rm -f /etc/bash_completion.d/pihole &> /dev/null + ${SUDO} rm -f /etc/sudoers.d/pihole &> /dev/null echo -e " ${TICK} Removed config files" # Remove FTL @@ -167,9 +178,8 @@ removeNoPurge() { service pihole-FTL stop fi - ${SUDO} rm /etc/init.d/pihole-FTL - ${SUDO} rm /usr/bin/pihole-FTL - + ${SUDO} rm -f /etc/init.d/pihole-FTL + ${SUDO} rm -f /usr/bin/pihole-FTL echo -e "${OVER} ${TICK} Removed pihole-FTL" fi @@ -198,7 +208,13 @@ else echo -e " ${INFO} Be sure to confirm if any dependencies should not be removed" fi while true; do - read -rp " ${QST} Do you wish to go through each dependency for removal? [Y/n] " yn + echo -e " ${INFO} ${COL_YELLOW}The following dependencies may have been added by the Pi-hole install:" + echo -n " " + for i in "${DEPS[@]}"; do + echo -n "${i} " + done + echo "${COL_NC}" + read -rp " ${QST} Do you wish to go through each dependency for removal? (Choosing No will leave all dependencies installed) [Y/n] " yn case ${yn} in [Yy]* ) removeAndPurge; break;; [Nn]* ) removeNoPurge; break;; diff --git a/gravity.sh b/gravity.sh index c399eb53..f4b5fc36 100755 --- a/gravity.sh +++ b/gravity.sh @@ -1,17 +1,586 @@ #!/usr/bin/env bash +# shellcheck disable=SC1090 + # Pi-hole: A black hole for Internet advertisements # (c) 2017 Pi-hole, LLC (https://pi-hole.net) # Network-wide ad blocking via your own hardware. # +# Usage: "pihole -g" # Compiles a list of ad-serving domains by downloading them from multiple sources # # This file is copyright under the latest version of the EUPL. # Please see LICENSE file for your rights under this license. -# Run this script as root or under sudo - coltable="/opt/pihole/COL_TABLE" -source ${coltable} +source "${coltable}" + +basename="pihole" +PIHOLE_COMMAND="/usr/local/bin/${basename}" + +piholeDir="/etc/${basename}" +piholeRepo="/etc/.${basename}" + +adListFile="${piholeDir}/adlists.list" +adListDefault="${piholeDir}/adlists.default" +adListRepoDefault="${piholeRepo}/adlists.default" + +whitelistFile="${piholeDir}/whitelist.txt" +blacklistFile="${piholeDir}/blacklist.txt" +wildcardFile="/etc/dnsmasq.d/03-pihole-wildcard.conf" + +adList="${piholeDir}/gravity.list" +blackList="${piholeDir}/black.list" +localList="${piholeDir}/local.list" +VPNList="/etc/openvpn/ipp.txt" + +domainsExtension="domains" +matterAndLight="${basename}.0.matterandlight.txt" +parsedMatter="${basename}.1.parsedmatter.txt" +whitelistMatter="${basename}.2.whitelistmatter.txt" +accretionDisc="${basename}.3.accretionDisc.txt" +preEventHorizon="list.preEventHorizon" + +skipDownload="false" + +# Source setupVars from install script +setupVars="${piholeDir}/setupVars.conf" +if [[ -f "${setupVars}" ]];then + source "${setupVars}" + + # Remove CIDR mask from IPv4/6 addresses + IPV4_ADDRESS="${IPV4_ADDRESS%/*}" + IPV6_ADDRESS="${IPV6_ADDRESS%/*}" + + # Determine if IPv4/6 addresses exist + if [[ -z "${IPV4_ADDRESS}" ]] && [[ -z "${IPV6_ADDRESS}" ]]; then + echo -e " ${COL_LIGHT_RED}No IP addresses found! Please run 'pihole -r' to reconfigure${COL_NC}" + exit 1 + fi +else + echo -e " ${COL_LIGHT_RED}Installation Failure: ${setupVars} does not exist! ${COL_NC} + Please run 'pihole -r', and choose the 'reconfigure' option to fix." + exit 1 +fi + +# Determine if superseded pihole.conf exists +if [[ -r "${piholeDir}/pihole.conf" ]]; then + echo -e " ${COL_LIGHT_RED}Ignoring overrides specified within pihole.conf! ${COL_NC}" +fi + +# Determine if DNS resolution is available before proceeding +gravity_DNSLookup() { + local lookupDomain="pi.hole" plural="" + + # Determine if $localList does not exist + if [[ ! -e "${localList}" ]]; then + lookupDomain="raw.githubusercontent.com" + fi + + # Determine if $lookupDomain is resolvable + if timeout 1 getent hosts "${lookupDomain}" &> /dev/null; then + # Print confirmation of resolvability if it had previously failed + if [[ -n "${secs:-}" ]]; then + echo -e "${OVER} ${TICK} DNS resolution is now available\\n" + fi + return 0 + elif [[ -n "${secs:-}" ]]; then + echo -e "${OVER} ${CROSS} DNS resolution is not available" + exit 1 + fi + + # Determine error output message + if pidof dnsmasq &> /dev/null; then + echo -e " ${CROSS} DNS resolution is currently unavailable" + else + echo -e " ${CROSS} DNS service is not running" + "${PIHOLE_COMMAND}" restartdns + fi + + # Ensure DNS server is given time to be resolvable + secs="120" + echo -ne " ${INFO} Waiting up to ${secs} seconds before continuing..." + until timeout 1 getent hosts "${lookupDomain}" &> /dev/null; do + [[ "${secs:-}" -eq 0 ]] && break + [[ "${secs:-}" -ne 1 ]] && plural="s" + echo -ne "${OVER} ${INFO} Waiting up to ${secs} second${plural} before continuing..." + : $((secs--)) + sleep 1 + done + + # Try again + gravity_DNSLookup +} + +# Retrieve blocklist URLs and parse domains from adlists.list +gravity_Collapse() { + echo -e " ${INFO} ${COL_BOLD}Neutrino emissions detected${COL_NC}..." + + # Determine if adlists file needs handling + if [[ ! -f "${adListFile}" ]]; then + # Create "adlists.list" by copying "adlists.default" from internal core repo + cp "${adListRepoDefault}" "${adListFile}" 2> /dev/null || \ + echo -e " ${CROSS} Unable to copy ${adListFile##*/} from ${piholeRepo}" + elif [[ -f "${adListDefault}" ]] && [[ -f "${adListFile}" ]]; then + # Remove superceded $adListDefault file + rm "${adListDefault}" 2> /dev/null || \ + echo -e " ${CROSS} Unable to remove ${adListDefault}" + fi + + local str="Pulling blocklist source list into range" + echo -ne " ${INFO} ${str}..." + + # Retrieve source URLs from $adListFile + # Logic: Remove comments and empty lines + mapfile -t sources <<< "$(grep -v -E "^(#|$)" "${adListFile}" 2> /dev/null)" + + # Parse source domains from $sources + mapfile -t sourceDomains <<< "$( + # Logic: Split by folder/port + awk -F '[/:]' '{ + # Remove URL protocol & optional username:password@ + gsub(/(.*:\/\/|.*:.*@)/, "", $0) + print $1 + }' <<< "$(printf '%s\n' "${sources[@]}")" 2> /dev/null + )" + + if [[ -n "${sources[*]}" ]] && [[ -n "${sourceDomains[*]}" ]]; then + echo -e "${OVER} ${TICK} ${str}" + else + echo -e "${OVER} ${CROSS} ${str}" + gravity_Cleanup "error" + fi +} + +# Define options for when retrieving blocklists +gravity_Supernova() { + local url domain agent cmd_ext str + + echo "" + + # Loop through $sources and download each one + for ((i = 0; i < "${#sources[@]}"; i++)); do + url="${sources[$i]}" + domain="${sourceDomains[$i]}" + + # Save the file as list.#.domain + saveLocation="${piholeDir}/list.${i}.${domain}.${domainsExtension}" + activeDomains[$i]="${saveLocation}" + + # Default user-agent (for Cloudflare's Browser Integrity Check: https://support.cloudflare.com/hc/en-us/articles/200170086-What-does-the-Browser-Integrity-Check-do-) + agent="Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36" + + # Provide special commands for blocklists which may need them + case "${domain}" in + "pgl.yoyo.org") cmd_ext="-d mimetype=plaintext -d hostformat=hosts";; + *) cmd_ext="";; + esac + + if [[ "${skipDownload}" == false ]]; then + echo -e " ${INFO} Target: ${domain} (${url##*/})" + gravity_Pull "${url}" "${cmd_ext}" "${agent}" + echo "" + fi + done + gravity_Blackbody=true +} + +# Download specified URL and perform checks on HTTP status and file content +gravity_Pull() { + local url="${1}" cmd_ext="${2}" agent="${3}" heisenbergCompensator="" patternBuffer str httpCode success="" + + # Create temp file to store content on disk instead of RAM + patternBuffer=$(mktemp -p "/tmp" --suffix=".phgpb") + + # Determine if $saveLocation has read permission + if [[ -r "${saveLocation}" ]]; then + # Have curl determine if a remote file has been modified since last retrieval + # Uses "Last-Modified" header, which certain web servers do not provide (e.g: raw github urls) + heisenbergCompensator="-z ${saveLocation}" + fi + + str="Status:" + echo -ne " ${INFO} ${str} Pending..." + # shellcheck disable=SC2086 + httpCode=$(curl -s -L ${cmd_ext} ${heisenbergCompensator} -w "%{http_code}" -A "${agent}" "${url}" -o "${patternBuffer}" 2> /dev/null) + + # Determine "Status:" output based on HTTP response + case "${httpCode}" in + "200") echo -e "${OVER} ${TICK} ${str} Retrieval successful"; success=true;; + "304") echo -e "${OVER} ${TICK} ${str} No changes detected"; success=true;; + "000") echo -e "${OVER} ${CROSS} ${str} Connection Refused";; + "403") echo -e "${OVER} ${CROSS} ${str} Forbidden";; + "404") echo -e "${OVER} ${CROSS} ${str} Not found";; + "408") echo -e "${OVER} ${CROSS} ${str} Time-out";; + "451") echo -e "${OVER} ${CROSS} ${str} Unavailable For Legal Reasons";; + "500") echo -e "${OVER} ${CROSS} ${str} Internal Server Error";; + "504") echo -e "${OVER} ${CROSS} ${str} Connection Timed Out (Gateway)";; + "521") echo -e "${OVER} ${CROSS} ${str} Web Server Is Down (Cloudflare)";; + "522") echo -e "${OVER} ${CROSS} ${str} Connection Timed Out (Cloudflare)";; + * ) echo -e "${OVER} ${CROSS} ${str} ${httpCode}";; + esac + + # Determine if the blocklist was downloaded and saved correctly + if [[ "${success}" == true ]]; then + if [[ "${httpCode}" == "304" ]]; then + : # Do not attempt to re-parse file + # Check if $patternbuffer is a non-zero length file + elif [[ -s "${patternBuffer}" ]]; then + # Determine if blocklist is non-standard and parse as appropriate + gravity_ParseFileIntoDomains "${patternBuffer}" "${saveLocation}" + else + # Fall back to previously cached list if $patternBuffer is empty + echo -e " ${INFO} Received empty file: ${COL_LIGHT_GREEN}using previously cached list${COL_NC}" + fi + else + # Determine if cached list has read permission + if [[ -r "${saveLocation}" ]]; then + echo -e " ${CROSS} List download failed: ${COL_LIGHT_GREEN}using previously cached list${COL_NC}" + else + echo -e " ${CROSS} List download failed: ${COL_LIGHT_RED}no cached list available${COL_NC}" + fi + fi +} + +# Parse source files into domains format +gravity_ParseFileIntoDomains() { + local source="${1}" destination="${2}" commentPattern firstLine abpFilter + + # Determine if we are parsing a consolidated list + if [[ "${source}" == "${piholeDir}/${matterAndLight}" ]]; then + # Define symbols used as comments: #;@![/ + commentPattern="[#;@![\\/]" + + # Parse Domains/Hosts files by removing comments & host IPs + # Logic: Ignore lines which begin with comments + awk '!/^'"${commentPattern}"'/ { + # Determine if there are multiple words seperated by a space + if(NF>1) { + # Remove comments (including prefixed spaces/tabs) + if($0 ~ /'"${commentPattern}"'/) { gsub("( |\t)'"${commentPattern}"'.*", "", $0) } + # Determine if there are aliased domains + if($3) { + # Remove IP address + $1="" + # Remove space which is left in $0 when removing $1 + gsub("^ ", "", $0) + print $0 + } else if($2) { + # Print single domain without IP + print $2 + } + # If there are no words seperated by space + } else if($1) { + print $1 + } + }' "${source}" 2> /dev/null > "${destination}" + return 0 + fi + + # Individual file parsing: Keep comments, while parsing domains from each line + # We keep comments to respect the list maintainer's licensing + read -r firstLine < "${source}" + + # Determine how to parse individual source file formats + if [[ "${firstLine,,}" =~ (adblock|ublock|^!) ]]; then + # Compare $firstLine against lower case words found in Adblock lists + echo -ne " ${INFO} Format: Adblock" + + # Define symbols used as comments: [! + # "||.*^" includes the "Example 2" domains we can extract + # https://adblockplus.org/filter-cheatsheet + abpFilter="/^(\\[|!)|^(\\|\\|.*\\^)/" + + # Parse Adblock lists by extracting "Example 2" domains + # Logic: Ignore lines which do not include comments or domain name anchor + awk ''"${abpFilter}"' { + # Remove valid adblock type options + gsub(/\$?~?(important|third-party|popup|subdocument|websocket),?/, "", $0) + # Remove starting domain name anchor "||" and ending seperator "^" + gsub(/^(\|\|)|(\^)/, "", $0) + # Remove invalid characters (*/,=$) + if($0 ~ /[*\/,=\$]/) { $0="" } + # Remove lines which are only IPv4 addresses + if($0 ~ /^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$/) { $0="" } + if($0) { print $0 } + }' "${source}" > "${destination}" + + # Determine if there are Adblock exception rules + # https://adblockplus.org/filters + if grep -q "^@@||" "${source}" &> /dev/null; then + # Parse Adblock lists by extracting exception rules + # Logic: Ignore lines which do not include exception format "@@||example.com^" + awk -F "[|^]" '/^@@\|\|.*\^/ { + # Remove valid adblock type options + gsub(/\$?~?(third-party)/, "", $0) + # Remove invalid characters (*/,=$) + if($0 ~ /[*\/,=\$]/) { $0="" } + if($3) { print $3 } + }' "${source}" > "${destination}.exceptionsFile.tmp" + + # Remove exceptions + grep -F -x -v -f "${destination}.exceptionsFile.tmp" "${destination}" > "${source}" + mv "${source}" "${destination}" + fi + + echo -e "${OVER} ${TICK} Format: Adblock" + elif grep -q "^address=/" "${source}" &> /dev/null; then + # Parse Dnsmasq format lists + echo -e " ${CROSS} Format: Dnsmasq (list type not supported)" + elif grep -q -E "^https?://" "${source}" &> /dev/null; then + # Parse URL list if source file contains "http://" or "https://" + # Scanning for "^IPv4$" is too slow with large (1M) lists on low-end hardware + echo -ne " ${INFO} Format: URL" + + awk '{ + # Remove URL protocol, optional "username:password@", and ":?/;" + if ($0 ~ /[:?\/;]/) { gsub(/(^.*:\/\/(.*:.*@)?|[:?\/;].*)/, "", $0) } + # Remove lines which are only IPv4 addresses + if ($0 ~ /^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$/) { $0="" } + if ($0) { print $0 } + }' "${source}" 2> /dev/null > "${destination}" + + echo -e "${OVER} ${TICK} Format: URL" + else + # Default: Keep hosts/domains file in same format as it was downloaded + output=$( { mv "${source}" "${destination}"; } 2>&1 ) + + if [[ ! -e "${destination}" ]]; then + echo -e "\\n ${CROSS} Unable to move tmp file to ${piholeDir} + ${output}" + gravity_Cleanup "error" + fi + fi +} + +# Create (unfiltered) "Matter and Light" consolidated list +gravity_Schwarzschild() { + local str lastLine + + str="Consolidating blocklists" + echo -ne " ${INFO} ${str}..." + + # Empty $matterAndLight if it already exists, otherwise, create it + : > "${piholeDir}/${matterAndLight}" + + # Loop through each *.domains file + for i in "${activeDomains[@]}"; do + # Determine if file has read permissions, as download might have failed + if [[ -r "${i}" ]]; then + # Remove windows CRs from file, convert list to lower case, and append into $matterAndLight + tr -d '\r' < "${i}" | tr '[:upper:]' '[:lower:]' >> "${piholeDir}/${matterAndLight}" + + # Ensure that the first line of a new list is on a new line + lastLine=$(tail -1 "${piholeDir}/${matterAndLight}") + if [[ "${#lastLine}" -gt 0 ]]; then + echo "" >> "${piholeDir}/${matterAndLight}" + fi + fi + done + + echo -e "${OVER} ${TICK} ${str}" +} + +# Parse consolidated list into (filtered, unique) domains-only format +gravity_Filter() { + local str num + + str="Extracting domains from blocklists" + echo -ne " ${INFO} ${str}..." + + # Parse into hosts file + gravity_ParseFileIntoDomains "${piholeDir}/${matterAndLight}" "${piholeDir}/${parsedMatter}" + + # Format $parsedMatter line total as currency + num=$(printf "%'.0f" "$(wc -l < "${piholeDir}/${parsedMatter}")") + echo -e "${OVER} ${TICK} ${str} + ${INFO} ${COL_BLUE}${num}${COL_NC} domains being pulled in by gravity" + + str="Removing duplicate domains" + echo -ne " ${INFO} ${str}..." + sort -u "${piholeDir}/${parsedMatter}" > "${piholeDir}/${preEventHorizon}" + echo -e "${OVER} ${TICK} ${str}" + + # Format $preEventHorizon line total as currency + num=$(printf "%'.0f" "$(wc -l < "${piholeDir}/${preEventHorizon}")") + echo -e " ${INFO} ${COL_BLUE}${num}${COL_NC} unique domains trapped in the Event Horizon" +} + +# Whitelist unique blocklist domain sources +gravity_WhitelistBLD() { + local uniqDomains plural="" str + + echo "" + + # Create array of unique $sourceDomains + mapfile -t uniqDomains <<< "$(awk '{ if(!a[$1]++) { print $1 } }' <<< "$(printf '%s\n' "${sourceDomains[@]}")")" + [[ "${#uniqDomains[@]}" -ne 1 ]] && plural="s" + + str="Adding ${#uniqDomains[@]} blocklist source domain${plural} to the whitelist" + echo -ne " ${INFO} ${str}..." + + # Whitelist $uniqDomains + "${PIHOLE_COMMAND}" -w -nr -q "${uniqDomains[*]}" &> /dev/null + + echo -e "${OVER} ${TICK} ${str}" +} + +# Whitelist user-defined domains +gravity_Whitelist() { + local num plural="" str + + if [[ ! -f "${whitelistFile}" ]]; then + echo -e " ${INFO} Nothing to whitelist!" + return 0 + fi + + num=$(wc -l < "${whitelistFile}") + [[ "${num}" -ne 1 ]] && plural="s" + str="Whitelisting ${num} domain${plural}" + echo -ne " ${INFO} ${str}..." + + # Print everything from preEventHorizon into whitelistMatter EXCEPT domains in $whitelistFile + grep -F -x -v -f "${whitelistFile}" "${piholeDir}/${preEventHorizon}" > "${piholeDir}/${whitelistMatter}" + + echo -e "${OVER} ${TICK} ${str}" +} + +# Output count of blacklisted domains and wildcards +gravity_ShowBlockCount() { + local num plural + + if [[ -f "${blacklistFile}" ]]; then + num=$(printf "%'.0f" "$(wc -l < "${blacklistFile}")") + plural=; [[ "${num}" -ne 1 ]] && plural="s" + echo -e " ${INFO} Blacklisted ${num} domain${plural}" + fi + + if [[ -f "${wildcardFile}" ]]; then + num=$(grep -c "^" "${wildcardFile}") + # If IPv4 and IPv6 is used, divide total wildcard count by 2 + if [[ -n "${IPV4_ADDRESS}" ]] && [[ -n "${IPV6_ADDRESS}" ]];then + num=$(( num/2 )) + fi + plural=; [[ "${num}" -ne 1 ]] && plural="s" + echo -e " ${INFO} Wildcard blocked ${num} domain${plural}" + fi +} + +# Parse list of domains into hosts format +gravity_ParseDomainsIntoHosts() { + awk -v ipv4="$IPV4_ADDRESS" -v ipv6="$IPV6_ADDRESS" '{ + # Remove windows CR line endings + sub(/\r$/, "") + # Parse each line as "ipaddr domain" + if(ipv6 && ipv4) { + print ipv4" "$0"\n"ipv6" "$0 + } else if(!ipv6) { + print ipv4" "$0 + } else { + print ipv6" "$0 + } + }' >> "${2}" < "${1}" +} + +# Create "localhost" entries into hosts format +gravity_ParseLocalDomains() { + local hostname + + if [[ -s "/etc/hostname" ]]; then + hostname=$(< "/etc/hostname") + elif command -v hostname &> /dev/null; then + hostname=$(hostname -f) + else + echo -e " ${CROSS} Unable to determine fully qualified domain name of host" + return 0 + fi + + echo -e "${hostname}\\npi.hole" > "${localList}.tmp" + + # Empty $localList if it already exists, otherwise, create it + : > "${localList}" + + gravity_ParseDomainsIntoHosts "${localList}.tmp" "${localList}" + + # Add additional LAN hosts provided by OpenVPN (if available) + if [[ -f "${VPNList}" ]]; then + awk -F, '{printf $2"\t"$1".vpn\n"}' "${VPNList}" >> "${localList}" + fi +} + +# Create primary blacklist entries +gravity_ParseBlacklistDomains() { + local output status + + # Empty $accretionDisc if it already exists, otherwise, create it + : > "${piholeDir}/${accretionDisc}" + + gravity_ParseDomainsIntoHosts "${piholeDir}/${whitelistMatter}" "${piholeDir}/${accretionDisc}" + + # Move the file over as /etc/pihole/gravity.list so dnsmasq can use it + output=$( { mv "${piholeDir}/${accretionDisc}" "${adList}"; } 2>&1 ) + status="$?" + + if [[ "${status}" -ne 0 ]]; then + echo -e "\\n ${CROSS} Unable to move ${accretionDisc} from ${piholeDir}\\n ${output}" + gravity_Cleanup "error" + fi +} + +# Create user-added blacklist entries +gravity_ParseUserDomains() { + if [[ ! -f "${blacklistFile}" ]]; then + return 0 + fi + + gravity_ParseDomainsIntoHosts "${blacklistFile}" "${blackList}.tmp" + # Copy the file over as /etc/pihole/black.list so dnsmasq can use it + mv "${blackList}.tmp" "${blackList}" 2> /dev/null || \ + echo -e "\\n ${CROSS} Unable to move ${blackList##*/}.tmp to ${piholeDir}" +} + +# Trap Ctrl-C +gravity_Trap() { + trap '{ echo -e "\\n\\n ${INFO} ${COL_LIGHT_RED}User-abort detected${COL_NC}"; gravity_Cleanup "error"; }' INT +} + +# Clean up after Gravity upon exit or cancellation +gravity_Cleanup() { + local error="${1:-}" + + str="Cleaning up stray matter" + echo -ne " ${INFO} ${str}..." + + # Delete tmp content generated by Gravity + rm ${piholeDir}/pihole.*.txt 2> /dev/null + rm ${piholeDir}/*.tmp 2> /dev/null + rm /tmp/*.phgpb 2> /dev/null + + # Ensure this function only runs when gravity_Supernova() has completed + if [[ "${gravity_Blackbody:-}" == true ]]; then + # Remove any unused .domains files + for file in ${piholeDir}/*.${domainsExtension}; do + # If list is not in active array, then remove it + if [[ ! "${activeDomains[*]}" == *"${file}"* ]]; then + rm -f "${file}" 2> /dev/null || \ + echo -e " ${CROSS} Failed to remove ${file##*/}" + fi + done + fi + + echo -e "${OVER} ${TICK} ${str}" + + # Only restart DNS service if offline + if ! pidof dnsmasq &> /dev/null; then + "${PIHOLE_COMMAND}" restartdns + dnsWasOffline=true + fi + + # Print Pi-hole status if an error occured + if [[ -n "${error}" ]]; then + "${PIHOLE_COMMAND}" status + exit 1 + fi +} helpFunc() { echo "Usage: pihole -g @@ -23,460 +592,74 @@ Options: exit 0 } -PIHOLE_COMMAND="/usr/local/bin/pihole" - -adListFile=/etc/pihole/adlists.list -adListDefault=/etc/pihole/adlists.default # Deprecated -adListRepoDefault=/etc/.pihole/adlists.default -whitelistScript="${PIHOLE_COMMAND} -w" -whitelistFile=/etc/pihole/whitelist.txt -blacklistFile=/etc/pihole/blacklist.txt -readonly wildcardlist="/etc/dnsmasq.d/03-pihole-wildcard.conf" - -# Source the setupVars from install script for the IP -setupVars=/etc/pihole/setupVars.conf -if [[ -f "${setupVars}" ]];then - . /etc/pihole/setupVars.conf -else - echo -e " ${COL_LIGHT_RED}Error: /etc/pihole/setupVars.conf missing. Possible installation failure.${COL_NC} - Please run 'pihole -r', and choose the 'reconfigure' option to reconfigure." - exit 1 -fi - -# Remove the /* from the end of the IP addresses -IPV4_ADDRESS=${IPV4_ADDRESS%/*} -IPV6_ADDRESS=${IPV6_ADDRESS%/*} - -# Variables for various stages of downloading and formatting the list -basename=pihole -piholeDir=/etc/${basename} -adList=${piholeDir}/gravity.list -blackList=${piholeDir}/black.list -localList=${piholeDir}/local.list -VPNList=/etc/openvpn/ipp.txt -justDomainsExtension=domains -matterAndLight=${basename}.0.matterandlight.txt -supernova=${basename}.1.supernova.txt -preEventHorizon=list.preEventHorizon -eventHorizon=${basename}.2.supernova.txt -accretionDisc=${basename}.3.accretionDisc.txt - -skipDownload=false - -# Warn users still using pihole.conf that it no longer has any effect -if [[ -r ${piholeDir}/pihole.conf ]]; then -echo -e " ${COL_LIGHT_RED}pihole.conf file no longer supported. Overrides in this file are ignored.${COL_NC}" -fi - -########################### -# Collapse - begin formation of pihole -gravity_collapse() { - - #New Logic: - # Does /etc/pihole/adlists.list exist? If so leave it alone - # If not, cp /etc/.pihole/adlists.default /etc/pihole/adlists.list - # Read from adlists.list - - # The following two blocks will sort out any missing adlists in the /etc/pihole directory, and remove legacy adlists.default - if [[ -f "${adListDefault}" ]] && [[ -f "${adListFile}" ]]; then - rm "${adListDefault}" - fi - - if [ ! -f "${adListFile}" ]; then - cp "${adListRepoDefault}" "${adListFile}" - fi - - echo -e " ${INFO} Neutrino emissions detected..." - echo "" - local str="Pulling source lists into range" - echo -ne " ${INFO} ${str}..." - - sources=() - while IFS= read -r line || [[ -n "$line" ]]; do - # Do not read commented out or blank lines - if [[ ${line} = \#* ]] || [[ ! ${line} ]]; then - echo "" > /dev/null - else - sources+=(${line}) - fi - done < ${adListFile} - - echo -e "${OVER} ${TICK} ${str}" -} - -# patternCheck - check to see if curl downloaded any new files. -gravity_patternCheck() { - patternBuffer=$1 - success=$2 - error=$3 - if [[ "${success}" = true ]]; then - # Check if download was successful but list has not been modified - if [[ "${error}" == "304" ]]; then - echo -e " ${TICK} No changes detected, transport skipped!" - # Check if the patternbuffer is a non-zero length file - elif [[ -s "${patternBuffer}" ]]; then - # Some blocklists are copyright, they need to be downloaded and stored - # as is. They can be processed for content after they have been saved. - mv "${patternBuffer}" "${saveLocation}" - echo -e " ${TICK} List updated, transport successful!" - else - # Empty file -> use previously downloaded list - echo -e " ${INFO} Received empty file, ${COL_LIGHT_GREEN}using cached one${COL_NC} (list not updated!)" - fi - else - # Check if cached list exists - if [[ -r "${saveLocation}" ]]; then - echo -e " ${CROSS} List download failed, using cached list (list not updated!)" - else - echo -e " ${CROSS} Download failed and no cached list available (list will not be considered)" - fi - fi -} - -# transport - curl the specified url with any needed command extentions -gravity_transport() { - url=$1 - cmd_ext=$2 - agent=$3 - - # tmp file, so we don't have to store the (long!) lists in RAM - patternBuffer=$(mktemp) - heisenbergCompensator="" - if [[ -r ${saveLocation} ]]; then - # If domain has been saved, add file for date check to only download newer - heisenbergCompensator="-z ${saveLocation}" - fi - - # Silently curl url - echo -e "${OVER} ${TICK} ${str}" - local str="Status:" - echo -ne " ${INFO} ${str} Pending" - err=$(curl -s -L ${cmd_ext} ${heisenbergCompensator} -w %{http_code} -A "${agent}" ${url} -o ${patternBuffer}) - - # Analyze http response - case "$err" in - "200" ) echo -e "${OVER} ${TICK} ${str} Success (OK)"; success=true;; - "304" ) echo -e "${OVER} ${TICK} ${str} Not modified"; success=true;; - "403" ) echo -e "${OVER} ${CROSS} ${str} Forbidden"; success=false;; - "404" ) echo -e "${OVER} ${CROSS} ${str} Not found"; success=false;; - "408" ) echo -e "${OVER} ${CROSS} ${str} Time-out"; success=false;; - "451" ) echo -e "${OVER} ${CROSS} ${str} Unavailable For Legal Reasons"; success=false;; - "521" ) echo -e "${OVER} ${CROSS} ${str} Web Server Is Down (Cloudflare)"; success=false;; - "522" ) echo -e "${OVER} ${CROSS} ${str} Connection Timed Out (Cloudflare)"; success=false;; - "500" ) echo -e "${OVER} ${CROSS} ${str} Internal Server Error"; success=false;; - * ) echo -e "${OVER} ${CROSS} ${str} Status $err"; success=false;; - esac - - # Process result - gravity_patternCheck "${patternBuffer}" "${success}" "${err}" - - # Delete temp file if it hasn't been moved - if [[ -f "${patternBuffer}" ]]; then - rm "${patternBuffer}" - fi -} - -# spinup - main gravity function -gravity_spinup() { - echo "" - # Loop through domain list. Download each one and remove commented lines (lines beginning with '# 'or '/') and # blank lines - for ((i = 0; i < "${#sources[@]}"; i++)); do - url=${sources[$i]} - # Get just the domain from the URL - domain=$(cut -d'/' -f3 <<< "${url}") - - # Save the file as list.#.domain - saveLocation=${piholeDir}/list.${i}.${domain}.${justDomainsExtension} - activeDomains[$i]=${saveLocation} - - agent="Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36" - - # Use a case statement to download lists that need special cURL commands - # to complete properly and reset the user agent when required - case "${domain}" in - "pgl.yoyo.org") - cmd_ext="-d mimetype=plaintext -d hostformat=hosts" - ;; - - # Default is a simple request - *) cmd_ext="" - esac - - if [[ "${skipDownload}" == false ]]; then - local str="Aiming tractor beam at $domain" - echo -ne " ${INFO} ${str}..." - - gravity_transport "$url" "$cmd_ext" "$agent" "$str" - echo "" - fi - done -} - -# Schwarzchild - aggregate domains to one list and add blacklisted domains -gravity_Schwarzchild() { - echo "" - # Find all active domains and compile them into one file and remove CRs - local str="Aggregating list of domains" - echo -ne " ${INFO} ${str}..." - - truncate -s 0 ${piholeDir}/${matterAndLight} - for i in "${activeDomains[@]}"; do - # Only assimilate list if it is available (download might have failed permanently) - if [[ -r "${i}" ]]; then - cat "${i}" | tr -d '\r' >> ${piholeDir}/${matterAndLight} - fi - done - - echo -e "${OVER} ${TICK} ${str}" -} - -gravity_Blacklist() { - # Append blacklist entries to eventHorizon if they exist - if [[ -f "${blacklistFile}" ]]; then - numBlacklisted=$(wc -l < "${blacklistFile}") - plural=; [[ "$numBlacklisted" != "1" ]] && plural=s - local str="Exact blocked domain${plural}: $numBlacklisted" - echo -e " ${INFO} ${str}" - else - echo -e " ${INFO} Nothing to blacklist!" - fi -} - -gravity_Wildcard() { - # Return number of wildcards in output - don't actually handle wildcards - if [[ -f "${wildcardlist}" ]]; then - numWildcards=$(grep -c ^ "${wildcardlist}") - if [[ -n "${IPV4_ADDRESS}" ]] && [[ -n "${IPV6_ADDRESS}" ]];then - let numWildcards/=2 - fi - plural=; [[ "$numWildcards" != "1" ]] && plural=s - echo -e " ${INFO} Wildcard blocked domain${plural}: $numWildcards" - else - echo -e " ${INFO} No wildcards used!" - fi - -} - -gravity_Whitelist() { - echo "" - # Prevent our sources from being pulled into the hole - plural=; [[ "${sources[@]}" != "1" ]] && plural=s - local str="Adding adlist source${plural} to the whitelist" - echo -ne " ${INFO} ${str}..." - - urls=() - for url in "${sources[@]}"; do - tmp=$(awk -F '/' '{print $3}' <<< "${url}") - urls=("${urls[@]}" ${tmp}) - done - - echo -e "${OVER} ${TICK} ${str}" - - # Ensure adlist domains are in whitelist.txt - ${whitelistScript} -nr -q "${urls[@]}" > /dev/null - - # Check whitelist.txt exists. - if [[ -f "${whitelistFile}" ]]; then - # Remove anything in whitelist.txt from the Event Horizon - numWhitelisted=$(wc -l < "${whitelistFile}") - plural=; [[ "$numWhitelisted" != "1" ]] && plural=s - local str="Whitelisting $numWhitelisted domain${plural}" - echo -ne " ${INFO} ${str}..." - - # Print everything from preEventHorizon into eventHorizon EXCEPT domains in whitelist.txt - grep -F -x -v -f ${whitelistFile} ${piholeDir}/${preEventHorizon} > ${piholeDir}/${eventHorizon} - - echo -e "${OVER} ${TICK} ${str}" - else - echo -e " ${INFO} Nothing to whitelist!" - fi -} - -gravity_unique() { - # Sort and remove duplicates - local str="Removing duplicate domains" - echo -ne " ${INFO} ${str}..." - - sort -u -f ${piholeDir}/${supernova} > ${piholeDir}/${preEventHorizon} - - echo -e "${OVER} ${TICK} ${str}" - numberOf=$(wc -l < ${piholeDir}/${preEventHorizon}) - echo -e " ${INFO} ${COL_LIGHT_BLUE}${numberOf}${COL_NC} unique domains trapped in the event horizon." -} - -gravity_doHostFormat() { - # Check vars from setupVars.conf to see if we're using IPv4, IPv6, Or both. - if [[ -n "${IPV4_ADDRESS}" ]] && [[ -n "${IPV6_ADDRESS}" ]];then - # Both IPv4 and IPv6 - awk -v ipv4addr="$IPV4_ADDRESS" -v ipv6addr="$IPV6_ADDRESS" '{sub(/\r$/,""); print ipv4addr" "$0"\n"ipv6addr" "$0}' >> "${2}" < "${1}" - elif [[ -n "${IPV4_ADDRESS}" ]] && [[ -z "${IPV6_ADDRESS}" ]];then - # Only IPv4 - awk -v ipv4addr="$IPV4_ADDRESS" '{sub(/\r$/,""); print ipv4addr" "$0}' >> "${2}" < "${1}" - elif [[ -z "${IPV4_ADDRESS}" ]] && [[ -n "${IPV6_ADDRESS}" ]];then - # Only IPv6 - awk -v ipv6addr="$IPV6_ADDRESS" '{sub(/\r$/,""); print ipv6addr" "$0}' >> "${2}" < "${1}" - elif [[ -z "${IPV4_ADDRESS}" ]] &&[[ -z "${IPV6_ADDRESS}" ]];then - echo -e "${OVER} ${CROSS} ${str}" - echo -e " ${COL_LIGHT_RED}No IP Values found! Please run 'pihole -r' and choose reconfigure to restore values${COL_NC}" - exit 1 - fi -} - -gravity_hostFormatLocal() { - # Format domain list as "192.168.x.x domain.com" - - if [[ -f "/etc/hostname" ]]; then - hostname=$(< /etc/hostname) - elif [ -x "$(command -v hostname)" ]; then - hostname=$(hostname -f) - else - echo -e " ${CROSS} Unable to determine fully qualified domain name of host" - fi - - echo -e "${hostname}\npi.hole" > "${localList}.tmp" - # Copy the file over as /etc/pihole/local.list so dnsmasq can use it - rm "${localList}" - gravity_doHostFormat "${localList}.tmp" "${localList}" - rm "${localList}.tmp" - - # Generate local HOSTS list with information obtained from OpenVPN (if available) - if [[ -f ${VPNList} ]]; then - awk -F, '{printf $2"\t"$1"\n"}' "${VPNList}" >> "${localList}" - fi -} - -gravity_hostFormatGravity() { - # Format domain list as "192.168.x.x domain.com" - echo "" > "${piholeDir}/${accretionDisc}" - gravity_doHostFormat "${piholeDir}/${eventHorizon}" "${piholeDir}/${accretionDisc}" - # Copy the file over as /etc/pihole/gravity.list so dnsmasq can use it - mv "${piholeDir}/${accretionDisc}" "${adList}" - -} - -gravity_hostFormatBlack() { - if [[ -f "${blacklistFile}" ]]; then - numBlacklisted=$(wc -l < "${blacklistFile}") - # Format domain list as "192.168.x.x domain.com" - gravity_doHostFormat "${blacklistFile}" "${blackList}.tmp" - # Copy the file over as /etc/pihole/black.list so dnsmasq can use it - mv "${blackList}.tmp" "${blackList}" - else - echo -e " ${INFO} Nothing to blacklist!" - fi -} - -# blackbody - remove any remnant files from script processes -gravity_blackbody() { - # Loop through list files - for file in ${piholeDir}/*.${justDomainsExtension}; do - # If list is in active array then leave it (noop) else rm the list - if [[ " ${activeDomains[@]} " =~ ${file} ]]; then - : - else - rm -f "${file}" - fi - done -} - -gravity_advanced() { - # Remove comments and print only the domain name - # Most of the lists downloaded are already in hosts file format but the spacing/formating is not contigious - # This helps with that and makes it easier to read - # It also helps with debugging so each stage of the script can be researched more in depth - local str="Formatting list of domains to remove comments" - echo -ne " ${INFO} ${str}..." - - #awk '($1 !~ /^#/) { if (NF>1) {print $2} else {print $1}}' ${piholeDir}/${matterAndLight} | sed -nr -e 's/\.{2,}/./g' -e '/\./p' > ${piholeDir}/${supernova} - #Above line does not correctly grab domains where comment is on the same line (e.g 'addomain.com #comment') - #Awk -F splits on given IFS, we grab the right hand side (chops trailing #coments and /'s to grab the domain only. - #Last awk command takes non-commented lines and if they have 2 fields, take the left field (the domain) and leave - #+ the right (IP address), otherwise grab the single field. - cat ${piholeDir}/${matterAndLight} | \ - awk -F '#' '{print $1}' | \ - awk -F '/' '{print $1}' | \ - awk '($1 !~ /^#/) { if (NF>1) {print $2} else {print $1}}' | \ - sed -nr -e 's/\.{2,}/./g' -e '/\./p' > ${piholeDir}/${supernova} - - echo -e "${OVER} ${TICK} ${str}" - - numberOf=$(wc -l < ${piholeDir}/${supernova}) - echo -e " ${INFO} ${COL_LIGHT_BLUE}${numberOf}${COL_NC} domains being pulled in by gravity" - - gravity_unique -} - -gravity_reload() { - # Reload hosts file - echo "" - local str="Refreshing lists in dnsmasq" - echo -e " ${INFO} ${str}..." - - # Ensure /etc/dnsmasq.d/01-pihole.conf is pointing at the correct list! - # First escape forward slashes in the path: - adList=${adList//\//\\\/} - # Now replace the line in dnsmasq file - # sed -i "s/^addn-hosts.*/addn-hosts=$adList/" /etc/dnsmasq.d/01-pihole.conf - - "${PIHOLE_COMMAND}" restartdns -} - for var in "$@"; do case "${var}" in - "-f" | "--force" ) forceGrav=true;; - "-h" | "--help" ) helpFunc;; - "-sd" | "--skip-download" ) skipDownload=true;; - "-b" | "--blacklist-only" ) blackListOnly=true;; + "-f" | "--force" ) forceDelete=true;; + "-h" | "--help" ) helpFunc;; + "-sd" | "--skip-download" ) skipDownload=true;; + "-b" | "--blacklist-only" ) listType="blacklist";; + "-w" | "--whitelist-only" ) listType="whitelist";; + "-wild" | "--wildcard-only" ) listType="wildcard"; dnsRestartType="restart";; esac done -if [[ "${forceGrav}" == true ]]; then - str="Deleting exising list cache" +# Trap Ctrl-C +gravity_Trap + +if [[ "${forceDelete:-}" == true ]]; then + str="Deleting existing list cache" echo -ne "${INFO} ${str}..." - if rm /etc/pihole/list.* 2> /dev/null; then - echo -e "${OVER} ${TICK} ${str}" - else - echo -e "${OVER} ${CROSS} ${str}" - fi -fi - -if [[ ! "${blackListOnly}" == true ]]; then - gravity_collapse - gravity_spinup - if [[ "${skipDownload}" == false ]]; then - gravity_Schwarzchild - gravity_advanced - else - echo -e " ${INFO} Using cached Event Horizon list..." - numberOf=$(wc -l < ${piholeDir}/${preEventHorizon}) - echo -e " ${INFO} ${COL_LIGHT_BLUE}$numberOf${COL_NC} unique domains trapped in the event horizon." - fi - gravity_Whitelist -fi -gravity_Blacklist -gravity_Wildcard - -str="Formatting domains into a HOSTS file" -echo -ne " ${INFO} ${str}..." -if [[ ! "${blackListOnly}" == true ]]; then - gravity_hostFormatLocal - gravity_hostFormatGravity -fi -gravity_hostFormatBlack -echo -e "${OVER} ${TICK} ${str}" - -gravity_blackbody - -if [[ ! "${blackListOnly}" == true ]]; then - # Clear no longer needed files... - str="Cleaning up un-needed files" - echo -ne " ${INFO} ${str}..." - - rm ${piholeDir}/pihole.*.txt 2> /dev/null - + rm /etc/pihole/list.* 2> /dev/null || true echo -e "${OVER} ${TICK} ${str}" fi -gravity_reload +# Determine which functions to run +if [[ "${skipDownload}" == false ]]; then + # Gravity needs to download blocklists + gravity_DNSLookup + gravity_Collapse + gravity_Supernova + gravity_Schwarzschild + gravity_Filter + gravity_WhitelistBLD +else + # Gravity needs to modify Blacklist/Whitelist/Wildcards + echo -e " ${INFO} Using cached Event Horizon list..." + numberOf=$(printf "%'.0f" "$(wc -l < "${piholeDir}/${preEventHorizon}")") + echo -e " ${INFO} ${COL_BLUE}${numberOf}${COL_NC} unique domains trapped in the Event Horizon" +fi + +# Perform when downloading blocklists, or modifying the whitelist +if [[ "${skipDownload}" == false ]] || [[ "${listType}" == "whitelist" ]]; then + gravity_Whitelist +fi + +gravity_ShowBlockCount + +# Perform when downloading blocklists, or modifying the white/blacklist (not wildcards) +if [[ "${skipDownload}" == false ]] || [[ "${listType}" == *"list" ]]; then + str="Parsing domains into hosts format" + echo -ne " ${INFO} ${str}..." + + gravity_ParseUserDomains + + # Perform when downloading blocklists + if [[ ! "${listType:-}" == "blacklist" ]]; then + gravity_ParseLocalDomains + gravity_ParseBlacklistDomains + fi + + echo -e "${OVER} ${TICK} ${str}" + + gravity_Cleanup +fi + +echo "" + +# Determine if DNS has been restarted by this instance of gravity +if [[ -z "${dnsWasOffline:-}" ]]; then + # Use "force-reload" when restarting dnsmasq for everything but Wildcards + "${PIHOLE_COMMAND}" restartdns "${dnsRestartType:-force-reload}" +fi "${PIHOLE_COMMAND}" status diff --git a/pihole b/pihole index ded79f71..45f7ea92 100755 --- a/pihole +++ b/pihole @@ -12,8 +12,7 @@ readonly PI_HOLE_SCRIPT_DIR="/opt/pihole" readonly wildcardlist="/etc/dnsmasq.d/03-pihole-wildcard.conf" readonly colfile="${PI_HOLE_SCRIPT_DIR}/COL_TABLE" - -source ${colfile} +source "${colfile}" # Must be root to use this tool if [[ ! $EUID -eq 0 ]];then @@ -27,7 +26,7 @@ if [[ ! $EUID -eq 0 ]];then fi webpageFunc() { - source /opt/pihole/webpage.sh + source "${PI_HOLE_SCRIPT_DIR}/webpage.sh" main "$@" exit 0 } @@ -84,21 +83,27 @@ updateGravityFunc() { exit 0 } +# Scan an array of files for matching strings scanList(){ - domain="${1}" - list="${2}" - method="${3}" + local domain="${1}" lists="${2}" type="${3:-}" - # Switch folder, preventing grep from printing file path - cd "/etc/pihole" || return 1 + # Prevent grep from printing file path + cd "/etc/pihole" || exit 1 - if [[ -n "${method}" ]]; then - grep -i -E -l "(^|\s|\/)${domain}($|\s|\/)" ${list} /dev/null 2> /dev/null - else - grep -i "${domain}" ${list} /dev/null 2> /dev/null - fi + # Prevent grep -i matching slowly: http://bit.ly/2xFXtUX + export LC_CTYPE=C + + # /dev/null forces filename to be printed when only one list has been generated + # shellcheck disable=SC2086 + case "${type}" in + "exact" ) grep -i -E -l "(^|\\s)${domain}($|\\s|#)" ${lists} /dev/null;; + "wc" ) grep -i -o -m 1 "/${domain}/" ${lists};; + * ) grep -i "${domain}" ${lists} /dev/null;; + esac } +# Print each subdomain +# e.g: foo.bar.baz.com = "foo.bar.baz.com bar.baz.com baz.com com" processWildcards() { IFS="." read -r -a array <<< "${1}" for (( i=${#array[@]}-1; i>=0; i-- )); do @@ -115,8 +120,8 @@ processWildcards() { } queryFunc() { - options="$*" - options="${options/-q /}" + shift + local options="$*" adlist="" all="" exact="" blockpage="" matchType="match" if [[ "${options}" == "-h" ]] || [[ "${options}" == "--help" ]]; then echo "Usage: pihole -q [option] @@ -131,201 +136,176 @@ Options: exit 0 fi - if [[ "${options}" == *"-exact"* ]]; then - method="exact" - exact=true - fi - - if [[ "${options}" == *"-adlist"* ]]; then - adlist=true - fi - - if [[ "${options}" == *"-bp"* ]]; then - method="exact" - blockpage=true - fi - - if [[ "${options}" == *"-all"* ]]; then - all=true - fi - - # Strip valid options, leaving only the domain and invalid options - options=$(sed 's/ \?-\(exact\|adlist\(s\)\?\|bp\|all\) \?//g' <<< "$options") - - # Handle errors - if [[ "${options}" == *" "* ]]; then - error=true - str="Unknown option specified" - elif [[ "${options}" == "-q" ]]; then - error=true - str="No domain specified" - fi - - if [[ -n "${error}" ]]; then - echo -e " ${COL_LIGHT_RED}${str}${COL_NC} - Try 'pihole -q --help' for more information." + if [[ ! -e "/etc/pihole/adlists.list" ]]; then + echo -e "${COL_LIGHT_RED}The file '/etc/pihole/adlists.list' was not found${COL_NC}" exit 1 fi - # If domain contains non ASCII characters, convert domain to punycode if python is available - # Cr: https://serverfault.com/a/335079 - if [[ "$options" = *[![:ascii:]]* ]]; then - if command -v python &> /dev/null; then - query=$(python -c 'import sys;print sys.argv[1].decode("utf-8").encode("idna")' "${options}") - fi + # Handle valid options + if [[ "${options}" == *"-bp"* ]]; then + exact="exact"; blockpage=true else - query="${options}" + [[ "${options}" == *"-adlist"* ]] && adlist=true + [[ "${options}" == *"-all"* ]] && all=true + if [[ "${options}" == *"-exact"* ]]; then + exact="exact"; matchType="exact ${matchType}" + fi + fi + + # Strip valid options, leaving only the domain and invalid options + # This allows users to place the options before or after the domain + options=$(sed -E 's/ ?-(bp|adlists?|all|exact)//g' <<< "${options}") + + # Handle remaining options + # If $options contain non ASCII characters, convert to punycode + case "${options}" in + "" ) str="No domain specified";; + *" "* ) str="Unknown query option specified";; + *[![:ascii:]]* ) domainQuery=$(idn2 "${options}");; + * ) domainQuery="${options}";; + esac + + if [[ -n "${str:-}" ]]; then + echo -e "${str}${COL_NC}\\nTry 'pihole -q --help' for more information." + exit 1 fi # Scan Whitelist and Blacklist lists="whitelist.txt blacklist.txt" - results=($(scanList "${query}" "${lists}" "${method}")) + mapfile -t results <<< "$(scanList "${domainQuery}" "${lists}" "${exact}")" if [[ -n "${results[*]}" ]]; then - blResult=true - # Loop through each scanList line to print appropriate title + wbMatch=true + + # Loop through each result in order to print unique file title once for result in "${results[@]}"; do - filename="${result/:*/}" - if [[ -n "$exact" ]]; then - printf " Exact result in %s\n" "${filename}" - elif [[ -n "$blockpage" ]]; then - printf "π %s\n" "${filename}" + fileName="${result%%.*}" + + if [[ -n "${blockpage}" ]]; then + echo "π ${result}" + exit 0 + elif [[ -n "${exact}" ]]; then + echo " ${matchType^} found in ${COL_BOLD}${fileName^}${COL_NC}" else - domain="${result/*:/}" - if [[ ! "${filename}" == "${filename_prev:-}" ]]; then - printf " Result from %s\n" "${filename}" + # Only print filename title once per file + if [[ ! "${fileName}" == "${fileName_prev:-}" ]]; then + echo " ${matchType^} found in ${COL_BOLD}${fileName^}${COL_NC}" + fileName_prev="${fileName}" fi - printf " %s\n" "${domain}" - filename_prev="${filename}" + echo " ${result#*:}" fi done fi # Scan Wildcards if [[ -e "${wildcardlist}" ]]; then - wildcards=($(processWildcards "${query}")) + # Determine all subdomains, domain and TLDs + mapfile -t wildcards <<< "$(processWildcards "${domainQuery}")" for match in "${wildcards[@]}"; do - results=($(scanList "\/${match}\/" ${wildcardlist})) + # Search wildcard list for matches + mapfile -t results <<< "$(scanList "${match}" "${wildcardlist}" "wc")" if [[ -n "${results[*]}" ]]; then - # Remove empty lines before couting number of results - count=$(sed '/^\s*$/d' <<< "${results[@]}" | wc -l) - if [[ "${count}" -ge 0 ]]; then - blResult=true - if [[ -z "${blockpage}" ]]; then - printf " Wildcard result in %s\n" "${wildcardlist/*dnsmasq.d\/}" - fi - - if [[ -n "${blockpage}" ]]; then - echo "π ${wildcardlist/*\/}" - else - echo " *.${match}" - fi + if [[ -z "${wcMatch:-}" ]] && [[ -z "${blockpage}" ]]; then + wcMatch=true + echo " ${matchType^} found in ${COL_BOLD}Wildcards${COL_NC}:" fi + + case "${blockpage}" in + true ) echo "π ${wildcardlist##*/}"; exit 0;; + * ) echo " *.${match}";; + esac fi done - - [[ -n "${blResult}" ]] && [[ -n "${blockpage}" ]] && exit 0 fi - # Glob *.domains file names, remove file paths and sort by list number - lists_raw=(/etc/pihole/*.domains) - IFS_OLD=$IFS - IFS=$'\n' - lists=$(sort -t . -k 2 -g <<< "${lists_raw[*]//\/etc\/pihole\//}") + # Get version sorted *.domains filenames (without dir path) + lists=("$(cd "/etc/pihole" || exit 0; printf "%s\\n" -- *.domains | sort -V)") - # Scan Domains files - results=($(scanList "${query}" "${lists}" "${method}")) + # Query blocklists for occurences of domain + mapfile -t results <<< "$(scanList "${domainQuery}" "${lists[*]}" "${exact}")" # Handle notices - if [[ -z "${blResult}" ]] && [[ -z "${results[*]}" ]]; then - notice=true - str="No ${method/t/t }results found for ${query} found within block lists" - elif [[ -z "${all}" ]] && [[ "${#results[*]}" -ge 16000 ]]; then - # 16000 chars is 15 chars X 1000 lines worth of results - notice=true - str="Hundreds of ${method/t/t }results found for ${query} - This can be overriden using the -all option" + if [[ -z "${wbMatch:-}" ]] && [[ -z "${wcMatch:-}" ]] && [[ -z "${results[*]}" ]]; then + echo -e " ${INFO} No ${exact/t/t }results found for ${COL_BOLD}${domainQuery}${COL_NC} found within block lists" + exit 0 + elif [[ -z "${results[*]}" ]]; then + # Result found in WL/BL/Wildcards + exit 0 + elif [[ -z "${all}" ]] && [[ "${#results[*]}" -ge 100 ]]; then + echo -e " ${INFO} Over 100 ${exact/t/t }results found for ${COL_BOLD}${domainQuery}${COL_NC} + This can be overridden using the -all option" + exit 0 fi - if [[ -n "${notice}" ]]; then - echo -e " ${INFO} ${str}" - exit + # Remove unwanted content from non-exact $results + if [[ -z "${exact}" ]]; then + # Delete lines starting with # + # Remove comments after domain + # Remove hosts format IP address + mapfile -t results <<< "$(IFS=$'\n'; sed \ + -e "/:#/d" \ + -e "s/[ \\t]#.*//g" \ + -e "s/:.*[ \\t]/:/g" \ + <<< "${results[*]}")" + + # Exit if result was in a comment + [[ -z "${results[*]}" ]] && exit 0 fi - # Remove unwanted content from results - if [[ -z "${method}" ]]; then - results=($(sed "/:#/d" <<< "${results[*]}")) # Lines starting with comments - results=($(sed "s/[ \t]#.*//g" <<< "${results[*]}")) # Comments after domain - results=($(sed "s/:.*[ \t]/:/g" <<< "${results[*]}")) # IP address - fi - IFS=$IFS_OLD - - # Get adlist content as array + # Get adlist file content as array if [[ -n "${adlist}" ]] || [[ -n "${blockpage}" ]]; then - if [[ -f "/etc/pihole/adlists.list" ]]; then - for url in $(< /etc/pihole/adlists.list); do - if [[ "${url:0:4}" == "http" ]] || [[ "${url:0:3}" == "www" ]]; then - adlists+=("$url") - fi - done - else - echo -e " ${COL_LIGHT_RED}The file '/etc/pihole/adlists.list' was not found${COL_NC}" - exit 1 - fi - fi - - if [[ -n "${results[*]}" ]]; then - if [[ -n "${exact}" ]]; then - echo " Exact result(s) for ${query} found in:" - fi - - for result in "${results[@]}"; do - filename="${result/:*/}" - - # Convert file name to URL name for -adlist or -bp options - if [[ -n "${adlist}" ]] || [[ -n "${blockpage}" ]]; then - filenum=("${filename/list./}") - filenum=("${filenum/.*/}") - filename="${adlists[$filenum]}" - - # If gravity has generated associated .domains files - # but adlists.list has been modified since - if [[ -z "${filename}" ]]; then - filename="${COL_LIGHT_RED}Error: no associated adlists URL found${COL_NC}" - fi - fi - - if [[ -n "${exact}" ]]; then - printf " %s\n" "${filename}" - elif [[ -n "${blockpage}" ]]; then - printf "%s %s\n" "${filenum}" "${filename}" - else # Standard query output - - # Print filename heading once per file, not for every match - if [[ ! "${filename}" == "${filename_prev:-}" ]]; then - unset count - printf " Result from %s\n" "${filename}" - else - let count++ - fi - - # Print matching domain if $max_count has not been reached - [[ -z "${all}" ]] && max_count="20" - if [[ -z "${all}" ]] && [[ "${count}" -eq "${max_count}" ]]; then - echo " Over $count results found, skipping rest of file" - elif [[ -z "${all}" ]] && [[ "${count}" -gt "${max_count}" ]]; then - continue - else - domain="${result/*:/}" - printf " %s\n" "${domain}" - fi - filename_prev="${filename}" + for adlistUrl in $(< "/etc/pihole/adlists.list"); do + if [[ "${adlistUrl:0:4}" =~ (http|www.) ]]; then + adlists+=("${adlistUrl}") fi done fi + # Print "Exact matches for" title + if [[ -n "${exact}" ]] && [[ -z "${blockpage}" ]]; then + plural=""; [[ "${#results[*]}" -gt 1 ]] && plural="es" + echo " ${matchType^}${plural} for ${COL_BOLD}${domainQuery}${COL_NC} found in:" + fi + + for result in "${results[@]}"; do + fileName="${result/:*/}" + + # Determine *.domains URL using filename's number + if [[ -n "${adlist}" ]] || [[ -n "${blockpage}" ]]; then + fileNum="${fileName/list./}"; fileNum="${fileNum%%.*}" + fileName="${adlists[$fileNum]}" + + # Discrepency occurs when adlists has been modified, but Gravity has not been run + if [[ -z "${fileName}" ]]; then + fileName="${COL_LIGHT_RED}(no associated adlists URL found)${COL_NC}" + fi + fi + + if [[ -n "${blockpage}" ]]; then + echo "${fileNum} ${fileName}" + elif [[ -n "${exact}" ]]; then + echo " ${fileName}" + else + if [[ ! "${fileName}" == "${fileName_prev:-}" ]]; then + count="" + echo " ${matchType^} found in ${COL_BOLD}${fileName}${COL_NC}:" + fileName_prev="${fileName}" + fi + : $((count++)) + + # Print matching domain if $max_count has not been reached + [[ -z "${all}" ]] && max_count="50" + if [[ -z "${all}" ]] && [[ "${count}" -ge "${max_count}" ]]; then + [[ "${count}" -gt "${max_count}" ]] && continue + echo " ${COL_GRAY}Over ${count} results found, skipping rest of file${COL_NC}" + else + echo " ${result#*:}" + fi + fi + done + exit 0 } @@ -348,33 +328,35 @@ versionFunc() { } restartDNS() { - dnsmasqPid=$(pidof dnsmasq) - local str="Restarting DNS service" - echo -ne " ${INFO} ${str}" - if [[ "${dnsmasqPid}" ]]; then - # Service already running - reload config - if [[ -x "$(command -v systemctl)" ]]; then - output=$( { systemctl restart dnsmasq; } 2>&1 ) + local svcOption svc str output status + svcOption="${1:-}" + + # Determine if we should reload or restart dnsmasq + if [[ "${svcOption}" =~ "reload" ]]; then + # Using SIGHUP will NOT re-read any *.conf files + svc="killall -s SIGHUP dnsmasq" + elif [[ -z "${svcOption}" ]]; then + # Get PID of dnsmasq to determine if it needs to start or restart + if pidof dnsmasq &> /dev/null; then + svcOption="restart" else - output=$( { service dnsmasq restart; } 2>&1 ) - fi - if [[ -z "${output}" ]]; then - echo -e "${OVER} ${TICK} ${str}" - else - echo -e "${OVER} ${CROSS} ${output}" + svcOption="start" fi + svc="service dnsmasq ${svcOption}" + fi + + # Print output to Terminal, but not to Web Admin + str="${svcOption^}ing DNS service" + [[ -t 1 ]] && echo -ne " ${INFO} ${str}..." + + output=$( { ${svc}; } 2>&1 ) + status="$?" + + if [[ "${status}" -eq 0 ]]; then + [[ -t 1 ]] && echo -e "${OVER} ${TICK} ${str}" else - # Service not running, start it up - if [[ -x "$(command -v systemctl)" ]]; then - output=$( { systemctl start dnsmasq; } 2>&1 ) - else - output=$( { service dnsmasq start; } 2>&1 ) - fi - if [[ -z "${output}" ]]; then - echo -e "${OVER} ${TICK} ${str}" - else - echo -e "${OVER} ${CROSS} ${output}" - fi + [[ ! -t 1 ]] && local OVER="" + echo -e "${OVER} ${CROSS} ${output}" fi } @@ -526,13 +508,20 @@ statusFunc() { } tailFunc() { - date=$(date +'%b %d ') echo -e " ${INFO} Press Ctrl-C to exit" - tail -f /var/log/pihole.log | sed \ - -e "s,\(${date}\| dnsmasq\[.*[0-9]]\),,g" \ - -e "s,\(.*\(gravity.list\|black.list\| config \).* is \(${IPV4_ADDRESS%/*}\|${IPV6_ADDRESS:-NULL}\).*\),${COL_LIGHT_RED}&${COL_NC}," \ - -e "s,.*\(query\[A\|DHCP\).*,${COL_NC}&${COL_NC}," \ - -e "s,.*,${COL_DARK_GRAY}&${COL_NC}," + + # Retrieve IPv4/6 addresses + source /etc/pihole/setupVars.conf + + # Strip date from each line + # Colour blocklist/blacklist/wildcard entries as red + # Colour A/AAAA/DHCP strings as white + # Colour everything else as gray + tail -f /var/log/pihole.log | sed -E \ + -e "s,($(date +'%b %d ')| dnsmasq[.*[0-9]]),,g" \ + -e "s,(.*(gravity.list|black.list| config ).* is (${IPV4_ADDRESS%/*}|${IPV6_ADDRESS:-NULL}).*),${COL_RED}&${COL_NC}," \ + -e "s,.*(query\\[A|DHCP).*,${COL_NC}&${COL_NC}," \ + -e "s,.*,${COL_GRAY}&${COL_NC}," exit 0 } @@ -655,7 +644,7 @@ case "${1}" in "enable" ) piholeEnable 1;; "disable" ) piholeEnable 0 "$2";; "status" ) statusFunc "$2";; - "restartdns" ) restartDNS;; + "restartdns" ) restartDNS "$2";; "-a" | "admin" ) webpageFunc "$@";; "-t" | "tail" ) tailFunc;; "checkout" ) piholeCheckoutFunc "$@";; diff --git a/test/test_automated_install.py b/test/test_automated_install.py index 3f0bd969..0e961c7f 100644 --- a/test/test_automated_install.py +++ b/test/test_automated_install.py @@ -186,7 +186,6 @@ def test_installPiholeWeb_fresh_install_no_errors(Pihole): assert tick_box + ' Installing sudoer file' in installWeb.stdout web_directory = Pihole.run('ls -r /var/www/html/pihole').stdout assert 'index.php' in web_directory - assert 'index.js' in web_directory assert 'blockingpage.css' in web_directory def test_update_package_cache_success_no_errors(Pihole):