From 8f22203d248ba1a80ec860dbb3003b1413139bd2 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Tue, 14 Jan 2020 19:57:45 +0100 Subject: [PATCH 01/60] Wait 30 seconds for obtaining a database lock instead of immediately failing if the database is busy. Signed-off-by: DL6ER --- gravity.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/gravity.sh b/gravity.sh index 659263b5..105febb7 100755 --- a/gravity.sh +++ b/gravity.sh @@ -85,7 +85,7 @@ generate_gravity_database() { update_gravity_timestamp() { # Update timestamp when the gravity table was last updated successfully - output=$( { sqlite3 "${gravityDBfile}" <<< "INSERT OR REPLACE INTO info (property,value) values ('updated',cast(strftime('%s', 'now') as int));"; } 2>&1 ) + output=$( { printf ".timeout 30000\\nINSERT OR REPLACE INTO info (property,value) values ('updated',cast(strftime('%%s', 'now') as int));" | sqlite3 "${gravityDBfile}"; } 2>&1 ) status="$?" if [[ "${status}" -ne 0 ]]; then @@ -99,7 +99,7 @@ database_truncate_table() { local table table="${1}" - output=$( { sqlite3 "${gravityDBfile}" <<< "DELETE FROM ${table};"; } 2>&1 ) + output=$( { printf ".timeout 30000\\nDELETE FROM %s;" "${table}" | sqlite3 "${gravityDBfile}"; } 2>&1 ) status="$?" if [[ "${status}" -ne 0 ]]; then @@ -164,7 +164,7 @@ database_table_from_file() { # Store domains in database table specified by ${table} # Use printf as .mode and .import need to be on separate lines # see https://unix.stackexchange.com/a/445615/83260 - output=$( { printf ".timeout 10000\\n.mode csv\\n.import \"%s\" %s\\n" "${inputfile}" "${table}" | sqlite3 "${gravityDBfile}"; } 2>&1 ) + output=$( { printf ".timeout 30000\\n.mode csv\\n.import \"%s\" %s\\n" "${inputfile}" "${table}" | sqlite3 "${gravityDBfile}"; } 2>&1 ) status="$?" if [[ "${status}" -ne 0 ]]; then From 276b19184500b65eb34c9d480fe641373dba28f7 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sun, 19 Jan 2020 21:39:49 +0100 Subject: [PATCH 02/60] Remove dead code causing failure from the blocking page. Signed-off-by: DL6ER --- advanced/index.php | 6 ------ 1 file changed, 6 deletions(-) diff --git a/advanced/index.php b/advanced/index.php index 62e45091..b0c4a7c3 100644 --- a/advanced/index.php +++ b/advanced/index.php @@ -96,12 +96,6 @@ if ($serverName === "pi.hole") { // Define admin email address text based off $svEmail presence $bpAskAdmin = !empty($svEmail) ? '' : ""; -// Determine if at least one block list has been generated -$blocklistglob = glob("/etc/pihole/list.0.*.domains"); -if ($blocklistglob === array()) { - die("[ERROR] There are no domain lists generated lists within /etc/pihole/! Please update gravity by running pihole -g, or repair Pi-hole using pihole -r."); -} - // Get possible non-standard location of FTL's database $FTLsettings = parse_ini_file("/etc/pihole/pihole-FTL.conf"); if (isset($FTLsettings["GRAVITYDB"])) { From 633e56e8a99f54970d8ed7f3bf3a25c53430003d Mon Sep 17 00:00:00 2001 From: DL6ER Date: Mon, 20 Jan 2020 17:59:24 +0100 Subject: [PATCH 03/60] Add gravity database 9->10 update script. Signed-off-by: DL6ER --- .../Scripts/database_migration/gravity-db.sh | 7 +++++ .../database_migration/gravity/9_to_10.sql | 29 +++++++++++++++++++ 2 files changed, 36 insertions(+) create mode 100644 advanced/Scripts/database_migration/gravity/9_to_10.sql diff --git a/advanced/Scripts/database_migration/gravity-db.sh b/advanced/Scripts/database_migration/gravity-db.sh index 184b3a4a..6a51e353 100644 --- a/advanced/Scripts/database_migration/gravity-db.sh +++ b/advanced/Scripts/database_migration/gravity-db.sh @@ -87,4 +87,11 @@ upgrade_gravityDB(){ sqlite3 "${database}" < "${scriptPath}/8_to_9.sql" version=9 fi + if [[ "$version" == "9" ]]; then + # This migration drops unused tables and creates triggers to remove + # obsolete groups assignments when the linked items are deleted + echo -e " ${INFO} Upgrading gravity database from version 9 to 10" + sqlite3 "${database}" < "${scriptPath}/9_to_10.sql" + version=10 + fi } diff --git a/advanced/Scripts/database_migration/gravity/9_to_10.sql b/advanced/Scripts/database_migration/gravity/9_to_10.sql new file mode 100644 index 00000000..a5636a23 --- /dev/null +++ b/advanced/Scripts/database_migration/gravity/9_to_10.sql @@ -0,0 +1,29 @@ +.timeout 30000 + +PRAGMA FOREIGN_KEYS=OFF; + +BEGIN TRANSACTION; + +DROP TABLE IF EXISTS whitelist; +DROP TABLE IF EXISTS blacklist; +DROP TABLE IF EXISTS regex_whitelist; +DROP TABLE IF EXISTS regex_blacklist; + +CREATE TRIGGER tr_domainlist_delete AFTER DELETE ON domainlist + BEGIN + DELETE FROM domainlist_by_group WHERE domainlist_id = OLD.id; + END; + +CREATE TRIGGER tr_adlist_delete AFTER DELETE ON adlist + BEGIN + DELETE FROM adlist_by_group WHERE adlist_id = OLD.id; + END; + +CREATE TRIGGER tr_client_delete AFTER DELETE ON client + BEGIN + DELETE FROM client_by_group WHERE client_id = OLD.id; + END; + +UPDATE info SET value = 10 WHERE property = 'version'; + +COMMIT; From 3f9e79f152b404e9d92d786977e796d510a5e63a Mon Sep 17 00:00:00 2001 From: DL6ER Date: Mon, 20 Jan 2020 20:07:25 +0100 Subject: [PATCH 04/60] Print human-readable timestamps in the debugger's gravity output Signed-off-by: DL6ER --- advanced/Scripts/piholeDebug.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index 84e34416..76a409f9 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -1105,7 +1105,7 @@ show_db_entries() { } show_groups() { - show_db_entries "Groups" "SELECT * FROM \"group\"" "4 4 30 50" + show_db_entries "Groups" "SELECT id,name,enabled,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,description FROM \"group\"" "4 50 7 19 19 50" } show_adlists() { From a8096243569d3bb91e1c68204d65f569e9833ddb Mon Sep 17 00:00:00 2001 From: DL6ER Date: Thu, 23 Jan 2020 19:18:22 +0100 Subject: [PATCH 05/60] Update blocked strings for pihole -t. Signed-off-by: DL6ER --- pihole | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pihole b/pihole index cc7e1b7c..e1758645 100755 --- a/pihole +++ b/pihole @@ -307,7 +307,7 @@ tailFunc() { # Colour everything else as gray tail -f /var/log/pihole.log | sed -E \ -e "s,($(date +'%b %d ')| dnsmasq[.*[0-9]]),,g" \ - -e "s,(.*(gravity |black |regex | config ).* is (0.0.0.0|::|NXDOMAIN|${IPV4_ADDRESS%/*}|${IPV6_ADDRESS:-NULL}).*),${COL_RED}&${COL_NC}," \ + -e "s,(.*(blacklisted |gravity blocked ).* is (0.0.0.0|::|NXDOMAIN|${IPV4_ADDRESS%/*}|${IPV6_ADDRESS:-NULL}).*),${COL_RED}&${COL_NC}," \ -e "s,.*(query\\[A|DHCP).*,${COL_NC}&${COL_NC}," \ -e "s,.*,${COL_GRAY}&${COL_NC}," exit 0 From 10c2dad48ad2e600d016004166ab5d88bec16424 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Fri, 24 Jan 2020 18:39:13 +0100 Subject: [PATCH 06/60] Improve gravity performance (#3100) * Gravity performance improvements. Signed-off-by: DL6ER * Do not move downloaded lists into migration_backup directory. Signed-off-by: DL6ER * Do not (strictly) sort domains. Random-leaf access is faster than always-last-leaf access (on average). Signed-off-by: DL6ER * Append instead of overwrite gravity_new collection list. Signed-off-by: DL6ER * Rename table gravity_new to gravity_temp to clarify that this is only an intermediate table. Signed-off-by: DL6ER * Add timers for each of the calls to compute intense parts. They are to be removed before this finally hits the release/v5.0 branch. Signed-off-by: DL6ER * Fix legacy list files import. It currently doesn't work when the gravity database has already been updated to using the single domainlist table. Signed-off-by: DL6ER * Simplify database_table_from_file(), remove all to this function for gravity lost downloads. Signed-off-by: DL6ER * Update gravity.db.sql to version 10 to have newle created databases already reflect the most recent state. Signed-off-by: DL6ER * Create second gravity database and swap them on success. This has a number of advantages such as instantaneous gravity updates (as seen from FTL) and always available gravity blocking. Furthermore, this saves disk space as the old database is removed on completion. * Add timing output for the database swapping SQLite3 call. Signed-off-by: DL6ER * Explicitly generate index as a separate process. Signed-off-by: DL6ER * Remove time measurements. Signed-off-by: DL6ER --- advanced/Templates/gravity.db.sql | 212 ++++++++++++++++----------- advanced/Templates/gravity_copy.sql | 21 +++ gravity.sh | 219 +++++++++++++++++----------- 3 files changed, 283 insertions(+), 169 deletions(-) create mode 100644 advanced/Templates/gravity_copy.sql diff --git a/advanced/Templates/gravity.db.sql b/advanced/Templates/gravity.db.sql index d0c744f4..a7dc12df 100644 --- a/advanced/Templates/gravity.db.sql +++ b/advanced/Templates/gravity.db.sql @@ -1,16 +1,21 @@ -PRAGMA FOREIGN_KEYS=ON; +PRAGMA foreign_keys=OFF; +BEGIN TRANSACTION; CREATE TABLE "group" ( id INTEGER PRIMARY KEY AUTOINCREMENT, enabled BOOLEAN NOT NULL DEFAULT 1, - name TEXT NOT NULL, + name TEXT UNIQUE NOT NULL, + date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), + date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), description TEXT ); +INSERT INTO "group" (id,enabled,name) VALUES (0,1,'Unassociated'); -CREATE TABLE whitelist +CREATE TABLE domainlist ( id INTEGER PRIMARY KEY AUTOINCREMENT, + type INTEGER NOT NULL DEFAULT 0, domain TEXT UNIQUE NOT NULL, enabled BOOLEAN NOT NULL DEFAULT 1, date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), @@ -18,47 +23,6 @@ CREATE TABLE whitelist comment TEXT ); -CREATE TABLE whitelist_by_group -( - whitelist_id INTEGER NOT NULL REFERENCES whitelist (id), - group_id INTEGER NOT NULL REFERENCES "group" (id), - PRIMARY KEY (whitelist_id, group_id) -); - -CREATE TABLE blacklist -( - id INTEGER PRIMARY KEY AUTOINCREMENT, - domain TEXT UNIQUE NOT NULL, - enabled BOOLEAN NOT NULL DEFAULT 1, - date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), - date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), - comment TEXT -); - -CREATE TABLE blacklist_by_group -( - blacklist_id INTEGER NOT NULL REFERENCES blacklist (id), - group_id INTEGER NOT NULL REFERENCES "group" (id), - PRIMARY KEY (blacklist_id, group_id) -); - -CREATE TABLE regex -( - id INTEGER PRIMARY KEY AUTOINCREMENT, - domain TEXT UNIQUE NOT NULL, - enabled BOOLEAN NOT NULL DEFAULT 1, - date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), - date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), - comment TEXT -); - -CREATE TABLE regex_by_group -( - regex_id INTEGER NOT NULL REFERENCES regex (id), - group_id INTEGER NOT NULL REFERENCES "group" (id), - PRIMARY KEY (regex_id, group_id) -); - CREATE TABLE adlist ( id INTEGER PRIMARY KEY AUTOINCREMENT, @@ -78,7 +42,8 @@ CREATE TABLE adlist_by_group CREATE TABLE gravity ( - domain TEXT PRIMARY KEY + domain TEXT NOT NULL, + adlist_id INTEGER NOT NULL REFERENCES adlist (id) ); CREATE TABLE info @@ -87,56 +52,129 @@ CREATE TABLE info value TEXT NOT NULL ); -INSERT INTO info VALUES("version","1"); +INSERT INTO "info" VALUES('version','10'); -CREATE VIEW vw_whitelist AS SELECT DISTINCT domain - FROM whitelist - LEFT JOIN whitelist_by_group ON whitelist_by_group.whitelist_id = whitelist.id - LEFT JOIN "group" ON "group".id = whitelist_by_group.group_id - WHERE whitelist.enabled = 1 AND (whitelist_by_group.group_id IS NULL OR "group".enabled = 1) - ORDER BY whitelist.id; +CREATE TABLE domain_audit +( + id INTEGER PRIMARY KEY AUTOINCREMENT, + domain TEXT UNIQUE NOT NULL, + date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)) +); -CREATE TRIGGER tr_whitelist_update AFTER UPDATE ON whitelist - BEGIN - UPDATE whitelist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE domain = NEW.domain; - END; +CREATE TABLE domainlist_by_group +( + domainlist_id INTEGER NOT NULL REFERENCES domainlist (id), + group_id INTEGER NOT NULL REFERENCES "group" (id), + PRIMARY KEY (domainlist_id, group_id) +); -CREATE VIEW vw_blacklist AS SELECT DISTINCT domain - FROM blacklist - LEFT JOIN blacklist_by_group ON blacklist_by_group.blacklist_id = blacklist.id - LEFT JOIN "group" ON "group".id = blacklist_by_group.group_id - WHERE blacklist.enabled = 1 AND (blacklist_by_group.group_id IS NULL OR "group".enabled = 1) - ORDER BY blacklist.id; +CREATE TABLE client +( + id INTEGER PRIMARY KEY AUTOINCREMENT, + ip TEXT NOL NULL UNIQUE +); -CREATE TRIGGER tr_blacklist_update AFTER UPDATE ON blacklist - BEGIN - UPDATE blacklist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE domain = NEW.domain; - END; - -CREATE VIEW vw_regex AS SELECT DISTINCT domain - FROM regex - LEFT JOIN regex_by_group ON regex_by_group.regex_id = regex.id - LEFT JOIN "group" ON "group".id = regex_by_group.group_id - WHERE regex.enabled = 1 AND (regex_by_group.group_id IS NULL OR "group".enabled = 1) - ORDER BY regex.id; - -CREATE TRIGGER tr_regex_update AFTER UPDATE ON regex - BEGIN - UPDATE regex SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE domain = NEW.domain; - END; - -CREATE VIEW vw_adlist AS SELECT DISTINCT address - FROM adlist - LEFT JOIN adlist_by_group ON adlist_by_group.adlist_id = adlist.id - LEFT JOIN "group" ON "group".id = adlist_by_group.group_id - WHERE adlist.enabled = 1 AND (adlist_by_group.group_id IS NULL OR "group".enabled = 1) - ORDER BY adlist.id; +CREATE TABLE client_by_group +( + client_id INTEGER NOT NULL REFERENCES client (id), + group_id INTEGER NOT NULL REFERENCES "group" (id), + PRIMARY KEY (client_id, group_id) +); CREATE TRIGGER tr_adlist_update AFTER UPDATE ON adlist BEGIN UPDATE adlist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE address = NEW.address; END; -CREATE VIEW vw_gravity AS SELECT domain +CREATE TRIGGER tr_domainlist_update AFTER UPDATE ON domainlist + BEGIN + UPDATE domainlist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE domain = NEW.domain; + END; + +CREATE VIEW vw_whitelist AS SELECT domain, domainlist.id AS id, domainlist_by_group.group_id AS group_id + FROM domainlist + LEFT JOIN domainlist_by_group ON domainlist_by_group.domainlist_id = domainlist.id + LEFT JOIN "group" ON "group".id = domainlist_by_group.group_id + WHERE domainlist.enabled = 1 AND (domainlist_by_group.group_id IS NULL OR "group".enabled = 1) + AND domainlist.type = 0 + ORDER BY domainlist.id; + +CREATE VIEW vw_blacklist AS SELECT domain, domainlist.id AS id, domainlist_by_group.group_id AS group_id + FROM domainlist + LEFT JOIN domainlist_by_group ON domainlist_by_group.domainlist_id = domainlist.id + LEFT JOIN "group" ON "group".id = domainlist_by_group.group_id + WHERE domainlist.enabled = 1 AND (domainlist_by_group.group_id IS NULL OR "group".enabled = 1) + AND domainlist.type = 1 + ORDER BY domainlist.id; + +CREATE VIEW vw_regex_whitelist AS SELECT domain, domainlist.id AS id, domainlist_by_group.group_id AS group_id + FROM domainlist + LEFT JOIN domainlist_by_group ON domainlist_by_group.domainlist_id = domainlist.id + LEFT JOIN "group" ON "group".id = domainlist_by_group.group_id + WHERE domainlist.enabled = 1 AND (domainlist_by_group.group_id IS NULL OR "group".enabled = 1) + AND domainlist.type = 2 + ORDER BY domainlist.id; + +CREATE VIEW vw_regex_blacklist AS SELECT domain, domainlist.id AS id, domainlist_by_group.group_id AS group_id + FROM domainlist + LEFT JOIN domainlist_by_group ON domainlist_by_group.domainlist_id = domainlist.id + LEFT JOIN "group" ON "group".id = domainlist_by_group.group_id + WHERE domainlist.enabled = 1 AND (domainlist_by_group.group_id IS NULL OR "group".enabled = 1) + AND domainlist.type = 3 + ORDER BY domainlist.id; + +CREATE VIEW vw_gravity AS SELECT domain, adlist_by_group.group_id AS group_id FROM gravity - WHERE domain NOT IN (SELECT domain from vw_whitelist); + LEFT JOIN adlist_by_group ON adlist_by_group.adlist_id = gravity.adlist_id + LEFT JOIN adlist ON adlist.id = gravity.adlist_id + LEFT JOIN "group" ON "group".id = adlist_by_group.group_id + WHERE adlist.enabled = 1 AND (adlist_by_group.group_id IS NULL OR "group".enabled = 1); + +CREATE VIEW vw_adlist AS SELECT DISTINCT address, adlist.id AS id + FROM adlist + LEFT JOIN adlist_by_group ON adlist_by_group.adlist_id = adlist.id + LEFT JOIN "group" ON "group".id = adlist_by_group.group_id + WHERE adlist.enabled = 1 AND (adlist_by_group.group_id IS NULL OR "group".enabled = 1) + ORDER BY adlist.id; + +CREATE TRIGGER tr_domainlist_add AFTER INSERT ON domainlist + BEGIN + INSERT INTO domainlist_by_group (domainlist_id, group_id) VALUES (NEW.id, 0); + END; + +CREATE TRIGGER tr_client_add AFTER INSERT ON client + BEGIN + INSERT INTO client_by_group (client_id, group_id) VALUES (NEW.id, 0); + END; + +CREATE TRIGGER tr_adlist_add AFTER INSERT ON adlist + BEGIN + INSERT INTO adlist_by_group (adlist_id, group_id) VALUES (NEW.id, 0); + END; + +CREATE TRIGGER tr_group_update AFTER UPDATE ON "group" + BEGIN + UPDATE "group" SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE id = NEW.id; + END; + +CREATE TRIGGER tr_group_zero AFTER DELETE ON "group" + BEGIN + INSERT OR IGNORE INTO "group" (id,enabled,name) VALUES (0,1,'Unassociated'); + END; + +CREATE TRIGGER tr_domainlist_delete AFTER DELETE ON domainlist + BEGIN + DELETE FROM domainlist_by_group WHERE domainlist_id = OLD.id; + END; + +CREATE TRIGGER tr_adlist_delete AFTER DELETE ON adlist + BEGIN + DELETE FROM adlist_by_group WHERE adlist_id = OLD.id; + END; + +CREATE TRIGGER tr_client_delete AFTER DELETE ON client + BEGIN + DELETE FROM client_by_group WHERE client_id = OLD.id; + END; + +COMMIT; diff --git a/advanced/Templates/gravity_copy.sql b/advanced/Templates/gravity_copy.sql new file mode 100644 index 00000000..e14d9d8c --- /dev/null +++ b/advanced/Templates/gravity_copy.sql @@ -0,0 +1,21 @@ +.timeout 30000 + +ATTACH DATABASE '/etc/pihole/gravity.db' AS OLD; + +BEGIN TRANSACTION; + +INSERT OR REPLACE INTO "group" SELECT * FROM OLD."group"; +INSERT OR REPLACE INTO domain_audit SELECT * FROM OLD.domain_audit; + +INSERT OR REPLACE INTO domainlist SELECT * FROM OLD.domainlist; +INSERT OR REPLACE INTO domainlist_by_group SELECT * FROM OLD.domainlist_by_group; + +INSERT OR REPLACE INTO adlist SELECT * FROM OLD.adlist; +INSERT OR REPLACE INTO adlist_by_group SELECT * FROM OLD.adlist_by_group; + +INSERT OR REPLACE INTO info SELECT * FROM OLD.info; + +INSERT OR REPLACE INTO client SELECT * FROM OLD.client; +INSERT OR REPLACE INTO client_by_group SELECT * FROM OLD.client_by_group; + +COMMIT; diff --git a/gravity.sh b/gravity.sh index 105febb7..26bedae7 100755 --- a/gravity.sh +++ b/gravity.sh @@ -36,7 +36,9 @@ VPNList="/etc/openvpn/ipp.txt" piholeGitDir="/etc/.pihole" gravityDBfile="${piholeDir}/gravity.db" +gravityTEMPfile="${piholeDir}/gravity_temp.db" gravityDBschema="${piholeGitDir}/advanced/Templates/gravity.db.sql" +gravityDBcopy="${piholeGitDir}/advanced/Templates/gravity_copy.sql" optimize_database=false domainsExtension="domains" @@ -80,31 +82,49 @@ fi # Generate new sqlite3 file from schema template generate_gravity_database() { - sqlite3 "${gravityDBfile}" < "${gravityDBschema}" + sqlite3 "${1}" < "${gravityDBschema}" } -update_gravity_timestamp() { - # Update timestamp when the gravity table was last updated successfully - output=$( { printf ".timeout 30000\\nINSERT OR REPLACE INTO info (property,value) values ('updated',cast(strftime('%%s', 'now') as int));" | sqlite3 "${gravityDBfile}"; } 2>&1 ) +# Copy data from old to new database file and swap them +gravity_swap_databases() { + local str + str="Building tree" + echo -ne " ${INFO} ${str}..." + + # The index is intentionally not UNIQUE as prro quality adlists may contain domains more than once + output=$( { sqlite3 "${gravityTEMPfile}" "CREATE INDEX idx_gravity ON gravity (domain, adlist_id);"; } 2>&1 ) status="$?" if [[ "${status}" -ne 0 ]]; then - echo -e "\\n ${CROSS} Unable to update gravity timestamp in database ${gravityDBfile}\\n ${output}" + echo -e "\\n ${CROSS} Unable to build gravity tree in ${gravityTEMPfile}\\n ${output}" return 1 fi - return 0 -} + echo -e "${OVER} ${TICK} ${str}" -database_truncate_table() { - local table - table="${1}" + str="Swapping databases" + echo -ne " ${INFO} ${str}..." - output=$( { printf ".timeout 30000\\nDELETE FROM %s;" "${table}" | sqlite3 "${gravityDBfile}"; } 2>&1 ) + output=$( { sqlite3 "${gravityTEMPfile}" < "${gravityDBcopy}"; } 2>&1 ) status="$?" if [[ "${status}" -ne 0 ]]; then - echo -e "\\n ${CROSS} Unable to truncate ${table} database ${gravityDBfile}\\n ${output}" - gravity_Cleanup "error" + echo -e "\\n ${CROSS} Unable to copy data from ${gravityDBfile} to ${gravityTEMPfile}\\n ${output}" + return 1 + fi + echo -e "${OVER} ${TICK} ${str}" + + # Swap databases and remove old database + rm "${gravityDBfile}" + mv "${gravityTEMPfile}" "${gravityDBfile}" +} + +# Update timestamp when the gravity table was last updated successfully +update_gravity_timestamp() { + output=$( { printf ".timeout 30000\\nINSERT OR REPLACE INTO info (property,value) values ('updated',cast(strftime('%%s', 'now') as int));" | sqlite3 "${gravityTEMPfile}"; } 2>&1 ) + status="$?" + + if [[ "${status}" -ne 0 ]]; then + echo -e "\\n ${CROSS} Unable to update gravity timestamp in database ${gravityTEMPfile}\\n ${output}" return 1 fi return 0 @@ -113,73 +133,80 @@ database_truncate_table() { # Import domains from file and store them in the specified database table database_table_from_file() { # Define locals - local table source backup_path backup_file arg + local table source backup_path backup_file tmpFile type table="${1}" source="${2}" - arg="${3}" backup_path="${piholeDir}/migration_backup" backup_file="${backup_path}/$(basename "${2}")" - - # Truncate table only if not gravity (we add multiple times to this table) - if [[ "${table}" != "gravity" ]]; then - database_truncate_table "${table}" - fi - - local tmpFile tmpFile="$(mktemp -p "/tmp" --suffix=".gravity")" + local timestamp timestamp="$(date --utc +'%s')" - local inputfile - # Apply format for white-, blacklist, regex, and adlist tables - # Read file line by line + local rowid declare -i rowid rowid=1 - if [[ "${table}" == "gravity" ]]; then - #Append ,${arg} to every line and then remove blank lines before import - sed -e "s/$/,${arg}/" "${source}" > "${tmpFile}" - sed -i '/^$/d' "${tmpFile}" - else - grep -v '^ *#' < "${source}" | while IFS= read -r domain - do - # Only add non-empty lines - if [[ -n "${domain}" ]]; then - if [[ "${table}" == "domain_audit" ]]; then - # domain_audit table format (no enable or modified fields) - echo "${rowid},\"${domain}\",${timestamp}" >> "${tmpFile}" - else - # White-, black-, and regexlist format - echo "${rowid},\"${domain}\",1,${timestamp},${timestamp},\"Migrated from ${source}\"" >> "${tmpFile}" - fi - rowid+=1 - fi - done + # Special handling for domains to be imported into the common domainlist table + if [[ "${table}" == "whitelist" ]]; then + type="0" + table="domainlist" + elif [[ "${table}" == "blacklist" ]]; then + type="1" + table="domainlist" + elif [[ "${table}" == "regex" ]]; then + type="3" + table="domainlist" fi - inputfile="${tmpFile}" - # Remove possible duplicates found in lower-quality adlists - sort -u -o "${inputfile}" "${inputfile}" + # Get MAX(id) from domainlist when INSERTing into this table + if [[ "${table}" == "domainlist" ]]; then + rowid="$(sqlite3 "${gravityDBfile}" "SELECT MAX(id) FROM domainlist;")" + if [[ -z "$rowid" ]]; then + rowid=0 + fi + rowid+=1 + fi + + # Loop over all domains in ${source} file + # Read file line by line + grep -v '^ *#' < "${source}" | while IFS= read -r domain + do + # Only add non-empty lines + if [[ -n "${domain}" ]]; then + if [[ "${table}" == "domain_audit" ]]; then + # domain_audit table format (no enable or modified fields) + echo "${rowid},\"${domain}\",${timestamp}" >> "${tmpFile}" + elif [[ "${table}" == "adlist" ]]; then + # Adlist table format + echo "${rowid},\"${domain}\",1,${timestamp},${timestamp},\"Migrated from ${source}\"" >> "${tmpFile}" + else + # White-, black-, and regexlist table format + echo "${rowid},${type},\"${domain}\",1,${timestamp},${timestamp},\"Migrated from ${source}\"" >> "${tmpFile}" + fi + rowid+=1 + fi + done # Store domains in database table specified by ${table} # Use printf as .mode and .import need to be on separate lines # see https://unix.stackexchange.com/a/445615/83260 - output=$( { printf ".timeout 30000\\n.mode csv\\n.import \"%s\" %s\\n" "${inputfile}" "${table}" | sqlite3 "${gravityDBfile}"; } 2>&1 ) + output=$( { printf ".timeout 30000\\n.mode csv\\n.import \"%s\" %s\\n" "${tmpFile}" "${table}" | sqlite3 "${gravityDBfile}"; } 2>&1 ) status="$?" if [[ "${status}" -ne 0 ]]; then - echo -e "\\n ${CROSS} Unable to fill table ${table} in database ${gravityDBfile}\\n ${output}" + echo -e "\\n ${CROSS} Unable to fill table ${table}${type} in database ${gravityDBfile}\\n ${output}" gravity_Cleanup "error" fi - # Delete tmpfile - rm "${tmpFile}" > /dev/null 2>&1 || \ - echo -e " ${CROSS} Unable to remove ${tmpFile}" - # Move source file to backup directory, create directory if not existing mkdir -p "${backup_path}" mv "${source}" "${backup_file}" 2> /dev/null || \ echo -e " ${CROSS} Unable to backup ${source} to ${backup_path}" + + # Delete tmpFile + rm "${tmpFile}" > /dev/null 2>&1 || \ + echo -e " ${CROSS} Unable to remove ${tmpFile}" } # Migrate pre-v5.0 list files to database-based Pi-hole versions @@ -188,7 +215,10 @@ migrate_to_database() { if [ ! -e "${gravityDBfile}" ]; then # Create new database file - note that this will be created in version 1 echo -e " ${INFO} Creating new gravity database" - generate_gravity_database + generate_gravity_database "${gravityDBfile}" + + # Check if gravity database needs to be updated + upgrade_gravityDB "${gravityDBfile}" "${piholeDir}" # Migrate list files to new database if [ -e "${adListFile}" ]; then @@ -306,16 +336,25 @@ gravity_DownloadBlocklists() { return 1 fi - local url domain agent cmd_ext str + local url domain agent cmd_ext str target echo "" - # Flush gravity table once before looping over sources - str="Flushing gravity table" + # Prepare new gravity database + str="Preparing new gravity database" echo -ne " ${INFO} ${str}..." - if database_truncate_table "gravity"; then + rm "${gravityTEMPfile}" > /dev/null 2>&1 + output=$( { sqlite3 "${gravityTEMPfile}" < "${gravityDBschema}"; } 2>&1 ) + status="$?" + + if [[ "${status}" -ne 0 ]]; then + echo -e "\\n ${CROSS} Unable to create new database ${gravityTEMPfile}\\n ${output}" + gravity_Cleanup "error" + else echo -e "${OVER} ${TICK} ${str}" fi + target="$(mktemp -p "/tmp" --suffix=".gravity")" + # Loop through $sources and download each one for ((i = 0; i < "${#sources[@]}"; i++)); do url="${sources[$i]}" @@ -335,15 +374,32 @@ gravity_DownloadBlocklists() { esac echo -e " ${INFO} Target: ${url}" - gravity_DownloadBlocklistFromUrl "${url}" "${cmd_ext}" "${agent}" "${sourceIDs[$i]}" + gravity_DownloadBlocklistFromUrl "${url}" "${cmd_ext}" "${agent}" "${sourceIDs[$i]}" "${saveLocation}" "${target}" echo "" done + + str="Storing downloaded domains in new gravity database" + echo -ne " ${INFO} ${str}..." + output=$( { printf ".timeout 30000\\n.mode csv\\n.import \"%s\" gravity\\n" "${target}" | sqlite3 "${gravityTEMPfile}"; } 2>&1 ) + status="$?" + + if [[ "${status}" -ne 0 ]]; then + echo -e "\\n ${CROSS} Unable to fill gravity table in database ${gravityTEMPfile}\\n ${output}" + gravity_Cleanup "error" + else + echo -e "${OVER} ${TICK} ${str}" + fi + + rm "${target}" > /dev/null 2>&1 || \ + echo -e " ${CROSS} Unable to remove ${target}" + gravity_Blackbody=true } # Download specified URL and perform checks on HTTP status and file content gravity_DownloadBlocklistFromUrl() { - local url="${1}" cmd_ext="${2}" agent="${3}" adlistID="${4}" heisenbergCompensator="" patternBuffer str httpCode success="" + local url="${1}" cmd_ext="${2}" agent="${3}" adlistID="${4}" saveLocation="${5}" target="${6}" + local heisenbergCompensator="" patternBuffer str httpCode success="" # Create temp file to store content on disk instead of RAM patternBuffer=$(mktemp -p "/tmp" --suffix=".phgpb") @@ -424,20 +480,15 @@ gravity_DownloadBlocklistFromUrl() { # Determine if the blocklist was downloaded and saved correctly if [[ "${success}" == true ]]; then if [[ "${httpCode}" == "304" ]]; then - # Add domains to database table - str="Adding adlist with ID ${adlistID} to database table" - echo -ne " ${INFO} ${str}..." - database_table_from_file "gravity" "${saveLocation}" "${adlistID}" - echo -e "${OVER} ${TICK} ${str}" + # Add domains to database table file + #Append ,${arg} to every line and then remove blank lines before import + sed -e "s/$/,${adlistID}/;/^$/d" "${saveLocation}" >> "${target}" # Check if $patternbuffer is a non-zero length file elif [[ -s "${patternBuffer}" ]]; then # Determine if blocklist is non-standard and parse as appropriate gravity_ParseFileIntoDomains "${patternBuffer}" "${saveLocation}" - # Add domains to database table - str="Adding adlist with ID ${adlistID} to database table" - echo -ne " ${INFO} ${str}..." - database_table_from_file "gravity" "${saveLocation}" "${adlistID}" - echo -e "${OVER} ${TICK} ${str}" + #Append ,${arg} to every line and then remove blank lines before import + sed -e "s/$/,${adlistID}/;/^$/d" "${saveLocation}" >> "${target}" else # Fall back to previously cached list if $patternBuffer is empty echo -e " ${INFO} Received empty file: ${COL_LIGHT_GREEN}using previously cached list${COL_NC}" @@ -446,11 +497,8 @@ gravity_DownloadBlocklistFromUrl() { # Determine if cached list has read permission if [[ -r "${saveLocation}" ]]; then echo -e " ${CROSS} List download failed: ${COL_LIGHT_GREEN}using previously cached list${COL_NC}" - # Add domains to database table - str="Adding to database table" - echo -ne " ${INFO} ${str}..." - database_table_from_file "gravity" "${saveLocation}" "${adlistID}" - echo -e "${OVER} ${TICK} ${str}" + #Append ,${arg} to every line and then remove blank lines before import + sed -e "s/$/,${adlistID}/;/^$/d" "${saveLocation}" >> "${target}" else echo -e " ${CROSS} List download failed: ${COL_LIGHT_RED}no cached list available${COL_NC}" fi @@ -686,10 +734,6 @@ fi # Move possibly existing legacy files to the gravity database migrate_to_database -# Ensure proper permissions are set for the newly created database -chown pihole:pihole "${gravityDBfile}" -chmod g+w "${piholeDir}" "${gravityDBfile}" - if [[ "${forceDelete:-}" == true ]]; then str="Deleting existing list cache" echo -ne "${INFO} ${str}..." @@ -704,15 +748,26 @@ gravity_DownloadBlocklists # Create local.list gravity_generateLocalList -gravity_ShowCount +# Update gravity timestamp update_gravity_timestamp -gravity_Cleanup -echo "" +# Migrate rest of the data from old to new database +gravity_swap_databases + +# Ensure proper permissions are set for the database +chown pihole:pihole "${gravityDBfile}" +chmod g+w "${piholeDir}" "${gravityDBfile}" # Determine if DNS has been restarted by this instance of gravity if [[ -z "${dnsWasOffline:-}" ]]; then "${PIHOLE_COMMAND}" restartdns reload fi + +# Compute numbers to be displayed +gravity_ShowCount + +gravity_Cleanup +echo "" + "${PIHOLE_COMMAND}" status From 6b04997fc3d182ef1463d1c2955deed5a505c90e Mon Sep 17 00:00:00 2001 From: DL6ER Date: Mon, 27 Jan 2020 10:12:05 +0000 Subject: [PATCH 07/60] DROP and reCREATE TRIGGERs during gravity swapping. Signed-off-by: DL6ER --- advanced/Templates/gravity_copy.sql | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/advanced/Templates/gravity_copy.sql b/advanced/Templates/gravity_copy.sql index e14d9d8c..4a2a9b22 100644 --- a/advanced/Templates/gravity_copy.sql +++ b/advanced/Templates/gravity_copy.sql @@ -4,6 +4,10 @@ ATTACH DATABASE '/etc/pihole/gravity.db' AS OLD; BEGIN TRANSACTION; +DROP TRIGGER tr_domainlist_add; +DROP TRIGGER tr_client_add; +DROP TRIGGER tr_adlist_add; + INSERT OR REPLACE INTO "group" SELECT * FROM OLD."group"; INSERT OR REPLACE INTO domain_audit SELECT * FROM OLD.domain_audit; @@ -18,4 +22,21 @@ INSERT OR REPLACE INTO info SELECT * FROM OLD.info; INSERT OR REPLACE INTO client SELECT * FROM OLD.client; INSERT OR REPLACE INTO client_by_group SELECT * FROM OLD.client_by_group; + +CREATE TRIGGER tr_domainlist_add AFTER INSERT ON domainlist + BEGIN + INSERT INTO domainlist_by_group (domainlist_id, group_id) VALUES (NEW.id, 0); + END; + +CREATE TRIGGER tr_client_add AFTER INSERT ON client + BEGIN + INSERT INTO client_by_group (client_id, group_id) VALUES (NEW.id, 0); + END; + +CREATE TRIGGER tr_adlist_add AFTER INSERT ON adlist + BEGIN + INSERT INTO adlist_by_group (adlist_id, group_id) VALUES (NEW.id, 0); + END; + + COMMIT; From 92aa510bdabe457598e40df983c01863195a2aa1 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Mon, 27 Jan 2020 10:36:16 +0000 Subject: [PATCH 08/60] Add timestamps and comment fields to clients. This updates the gravity database to version 11. Signed-off-by: DL6ER --- .../Scripts/database_migration/gravity-db.sh | 10 ++++++++++ .../database_migration/gravity/10_to_11.sql | 16 ++++++++++++++++ advanced/Templates/gravity.db.sql | 12 ++++++++++-- 3 files changed, 36 insertions(+), 2 deletions(-) create mode 100644 advanced/Scripts/database_migration/gravity/10_to_11.sql diff --git a/advanced/Scripts/database_migration/gravity-db.sh b/advanced/Scripts/database_migration/gravity-db.sh index 6a51e353..8a669429 100644 --- a/advanced/Scripts/database_migration/gravity-db.sh +++ b/advanced/Scripts/database_migration/gravity-db.sh @@ -94,4 +94,14 @@ upgrade_gravityDB(){ sqlite3 "${database}" < "${scriptPath}/9_to_10.sql" version=10 fi + if [[ "$version" == "10" ]]; then + # This adds timestamp and an optional comment field to the client table + # These fields are only temporary and will be replaces by the columns + # defined in gravity.db.sql during gravity swapping. We add them here + # to keep the copying process generic (needs the same columns in both the + # source and the destination databases). + echo -e " ${INFO} Upgrading gravity database from version 10 to 11" + sqlite3 "${database}" < "${scriptPath}/10_to_11.sql" + version=11 + fi } diff --git a/advanced/Scripts/database_migration/gravity/10_to_11.sql b/advanced/Scripts/database_migration/gravity/10_to_11.sql new file mode 100644 index 00000000..b073f83b --- /dev/null +++ b/advanced/Scripts/database_migration/gravity/10_to_11.sql @@ -0,0 +1,16 @@ +.timeout 30000 + +BEGIN TRANSACTION; + +ALTER TABLE client ADD COLUMN date_added INTEGER; +ALTER TABLE client ADD COLUMN date_modified INTEGER; +ALTER TABLE client ADD COLUMN comment TEXT; + +CREATE TRIGGER tr_client_update AFTER UPDATE ON client + BEGIN + UPDATE client SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE id = NEW.id; + END; + +UPDATE info SET value = 11 WHERE property = 'version'; + +COMMIT; diff --git a/advanced/Templates/gravity.db.sql b/advanced/Templates/gravity.db.sql index a7dc12df..e543bd19 100644 --- a/advanced/Templates/gravity.db.sql +++ b/advanced/Templates/gravity.db.sql @@ -52,7 +52,7 @@ CREATE TABLE info value TEXT NOT NULL ); -INSERT INTO "info" VALUES('version','10'); +INSERT INTO "info" VALUES('version','11'); CREATE TABLE domain_audit ( @@ -71,7 +71,10 @@ CREATE TABLE domainlist_by_group CREATE TABLE client ( id INTEGER PRIMARY KEY AUTOINCREMENT, - ip TEXT NOL NULL UNIQUE + ip TEXT NOL NULL UNIQUE, + date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), + date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)), + comment TEXT ); CREATE TABLE client_by_group @@ -86,6 +89,11 @@ CREATE TRIGGER tr_adlist_update AFTER UPDATE ON adlist UPDATE adlist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE address = NEW.address; END; +CREATE TRIGGER tr_client_update AFTER UPDATE ON client + BEGIN + UPDATE client SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE ip = NEW.ip; + END; + CREATE TRIGGER tr_domainlist_update AFTER UPDATE ON domainlist BEGIN UPDATE domainlist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE domain = NEW.domain; From 2a5cf221fa54609472f29f5294d3d716db50e268 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sun, 2 Feb 2020 23:46:33 +0100 Subject: [PATCH 09/60] Store number of distinct gravity domains in database after counting. Signed-off-by: DL6ER --- gravity.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/gravity.sh b/gravity.sh index 26bedae7..f4cbe78d 100755 --- a/gravity.sh +++ b/gravity.sh @@ -583,6 +583,7 @@ gravity_Table_Count() { local unique unique="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(DISTINCT domain) FROM ${table};")" echo -e " ${INFO} Number of ${str}: ${num} (${unique} unique domains)" + sqlite3 "${gravityDBfile}" "INSERT OR REPLACE INTO info (property,value) VALUES ('gravity_count',${unique});" else echo -e " ${INFO} Number of ${str}: ${num}" fi From c91d9cc0b653adf82ef12e6356e6706d12e17f07 Mon Sep 17 00:00:00 2001 From: Willem Date: Sat, 8 Feb 2020 17:00:22 +0100 Subject: [PATCH 10/60] Update Cameleon blacklist url to use https Switches from http to https for the Cameleon (sysctl.org) blacklist. Signed-off-by: canihavesomecoffee --- automated install/basic-install.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index f99d02ab..b83f8585 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -1199,7 +1199,7 @@ appendToListsFile() { case $1 in StevenBlack ) echo "https://raw.githubusercontent.com/StevenBlack/hosts/master/hosts" >> "${adlistFile}";; MalwareDom ) echo "https://mirror1.malwaredomains.com/files/justdomains" >> "${adlistFile}";; - Cameleon ) echo "http://sysctl.org/cameleon/hosts" >> "${adlistFile}";; + Cameleon ) echo "https://sysctl.org/cameleon/hosts" >> "${adlistFile}";; DisconTrack ) echo "https://s3.amazonaws.com/lists.disconnect.me/simple_tracking.txt" >> "${adlistFile}";; DisconAd ) echo "https://s3.amazonaws.com/lists.disconnect.me/simple_ad.txt" >> "${adlistFile}";; HostsFile ) echo "https://hosts-file.net/ad_servers.txt" >> "${adlistFile}";; From c4005c4a313608e42dadc722f00459a97197bf0a Mon Sep 17 00:00:00 2001 From: Dan Schaper Date: Tue, 11 Feb 2020 09:56:28 -0800 Subject: [PATCH 11/60] Delete FUNDING.yml Organization-wide FUNDING now set up. --- .github/FUNDING.yml | 4 ---- 1 file changed, 4 deletions(-) delete mode 100644 .github/FUNDING.yml diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml deleted file mode 100644 index 3a75dc12..00000000 --- a/.github/FUNDING.yml +++ /dev/null @@ -1,4 +0,0 @@ -# These are supported funding model platforms - -patreon: pihole -custom: https://pi-hole.net/donate From dc2fce8e1d48d9013d18eac578f8e5e52670d4c8 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 12 Feb 2020 19:26:25 +0100 Subject: [PATCH 12/60] Store gravity update timestamp only after database swapping. Signed-off-by: DL6ER --- gravity.sh | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/gravity.sh b/gravity.sh index 26bedae7..e6e53405 100755 --- a/gravity.sh +++ b/gravity.sh @@ -120,11 +120,11 @@ gravity_swap_databases() { # Update timestamp when the gravity table was last updated successfully update_gravity_timestamp() { - output=$( { printf ".timeout 30000\\nINSERT OR REPLACE INTO info (property,value) values ('updated',cast(strftime('%%s', 'now') as int));" | sqlite3 "${gravityTEMPfile}"; } 2>&1 ) + output=$( { printf ".timeout 30000\\nINSERT OR REPLACE INTO info (property,value) values ('updated',cast(strftime('%%s', 'now') as int));" | sqlite3 "${gravityDBfile}"; } 2>&1 ) status="$?" if [[ "${status}" -ne 0 ]]; then - echo -e "\\n ${CROSS} Unable to update gravity timestamp in database ${gravityTEMPfile}\\n ${output}" + echo -e "\\n ${CROSS} Unable to update gravity timestamp in database ${gravityDBfile}\\n ${output}" return 1 fi return 0 @@ -749,12 +749,12 @@ gravity_DownloadBlocklists # Create local.list gravity_generateLocalList -# Update gravity timestamp -update_gravity_timestamp - # Migrate rest of the data from old to new database gravity_swap_databases +# Update gravity timestamp +update_gravity_timestamp + # Ensure proper permissions are set for the database chown pihole:pihole "${gravityDBfile}" chmod g+w "${piholeDir}" "${gravityDBfile}" From baf5340dc0aa9344216c1c8eba589ca15888a10d Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 12 Feb 2020 19:39:12 +0100 Subject: [PATCH 13/60] Show info table instead of counting domains to speed up the debugging process on low-end hardware drastically. Signed-off-by: DL6ER --- advanced/Scripts/piholeDebug.sh | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index 76a409f9..0a256a97 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -1134,16 +1134,14 @@ analyze_gravity_list() { gravity_permissions=$(ls -ld "${PIHOLE_GRAVITY_DB_FILE}") log_write "${COL_GREEN}${gravity_permissions}${COL_NC}" - local gravity_size - gravity_size=$(sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" "SELECT COUNT(*) FROM vw_gravity") - log_write " Size (excluding blacklist): ${COL_CYAN}${gravity_size}${COL_NC} entries" + show_db_entries "Info table" "SELECT property,value FROM info" "20 40" log_write "" OLD_IFS="$IFS" IFS=$'\r\n' local gravity_sample=() mapfile -t gravity_sample < <(sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" "SELECT domain FROM vw_gravity LIMIT 10") - log_write " ${COL_CYAN}----- First 10 Domains -----${COL_NC}" + log_write " ${COL_CYAN}----- First 10 Gravity Domains -----${COL_NC}" for line in "${gravity_sample[@]}"; do log_write " ${line}" From 50f6fffbdc31252ae25e494398134d02a79bb57d Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 12 Feb 2020 19:43:55 +0100 Subject: [PATCH 14/60] Migrate debugger to domainlist and add printing of client table. Signed-off-by: DL6ER --- advanced/Scripts/piholeDebug.sh | 20 ++++++++------------ 1 file changed, 8 insertions(+), 12 deletions(-) diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index 0a256a97..c778995d 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -1113,18 +1113,14 @@ show_adlists() { show_db_entries "Adlist groups" "SELECT * FROM adlist_by_group" "4 4" } -show_whitelist() { - show_db_entries "Exact whitelist" "SELECT id,domain,enabled,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,comment FROM whitelist" "4 100 7 19 19 50" - show_db_entries "Exact whitelist groups" "SELECT * FROM whitelist_by_group" "4 4" - show_db_entries "Regex whitelist" "SELECT id,domain,enabled,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,comment FROM regex_whitelist" "4 100 7 19 19 50" - show_db_entries "Regex whitelist groups" "SELECT * FROM regex_whitelist_by_group" "4 4" +show_domainlist() { + show_db_entries "Domainlist (0/1 = exact/regex whitelist, 2/3 = exact/regex blacklist)" "SELECT id,type,domain,enabled,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,comment FROM domainlist" "4 4 100 7 19 19 50" + show_db_entries "Domainlist groups" "SELECT * FROM domainlist_by_group" "10 10" } -show_blacklist() { - show_db_entries "Exact blacklist" "SELECT id,domain,enabled,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,comment FROM blacklist" "4 100 7 19 19 50" - show_db_entries "Exact blacklist groups" "SELECT * FROM blacklist_by_group" "4 4" - show_db_entries "Regex blacklist" "SELECT id,domain,enabled,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,comment FROM regex_blacklist" "4 100 7 19 19 50" - show_db_entries "Regex blacklist groups" "SELECT * FROM regex_blacklist_by_group" "4 4" +show_clients() { + show_db_entries "Clients" "SELECT id,ip,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,comment FROM client" "4 100 19 19 50" + show_db_entries "Client groups" "SELECT * FROM client_by_group" "10 10" } analyze_gravity_list() { @@ -1299,9 +1295,9 @@ parse_setup_vars check_x_headers analyze_gravity_list show_groups +show_domainlist +show_clients show_adlists -show_whitelist -show_blacklist show_content_of_pihole_files parse_locale analyze_pihole_log From eadd82761c97c6e10ec327871d557dc76f67be59 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 12 Feb 2020 19:51:40 +0100 Subject: [PATCH 15/60] Add human-readable output of time of the last gravity run. Signed-off-by: DL6ER --- advanced/Scripts/piholeDebug.sh | 3 +++ 1 file changed, 3 insertions(+) diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index c778995d..b0af3a40 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -1131,6 +1131,9 @@ analyze_gravity_list() { log_write "${COL_GREEN}${gravity_permissions}${COL_NC}" show_db_entries "Info table" "SELECT property,value FROM info" "20 40" + gravity_updated_raw="$(sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" "SELECT value FROM info where property = 'updated'")" + gravity_updated="$(date -d @${gravity_updated_raw})" + log_write " Last gravity run finished at: ${COL_CYAN}${gravity_updated}${COL_NC}" log_write "" OLD_IFS="$IFS" From f10a15146914ce02344f4cb27cb7863f681d1932 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 12 Feb 2020 21:05:02 +0100 Subject: [PATCH 16/60] Fix pihole -t sed instructions. Signed-off-by: DL6ER --- pihole | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pihole b/pihole index e1758645..6e72b4a3 100755 --- a/pihole +++ b/pihole @@ -306,7 +306,7 @@ tailFunc() { # Colour A/AAAA/DHCP strings as white # Colour everything else as gray tail -f /var/log/pihole.log | sed -E \ - -e "s,($(date +'%b %d ')| dnsmasq[.*[0-9]]),,g" \ + -e "s,($(date +'%b %d ')| dnsmasq\[[0-9]*\]),,g" \ -e "s,(.*(blacklisted |gravity blocked ).* is (0.0.0.0|::|NXDOMAIN|${IPV4_ADDRESS%/*}|${IPV6_ADDRESS:-NULL}).*),${COL_RED}&${COL_NC}," \ -e "s,.*(query\\[A|DHCP).*,${COL_NC}&${COL_NC}," \ -e "s,.*,${COL_GRAY}&${COL_NC}," From 1072078e26d2e71a984d85744d59ff23c8f33110 Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Sun, 16 Feb 2020 11:47:42 +0000 Subject: [PATCH 17/60] Change to use API instead of the Location Header (some trailing whitespace removed) Signed-off-by: Adam Warner --- automated install/basic-install.sh | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 7b43f2a3..66f1ddc9 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -427,11 +427,11 @@ make_repo() { # Clone the repo and return the return code from this command git clone -q --depth 20 "${remoteRepo}" "${directory}" &> /dev/null || return $? # Data in the repositories is public anyway so we can make it readable by everyone (+r to keep executable permission if already set by git) - chmod -R a+rX "${directory}" + chmod -R a+rX "${directory}" # Move into the directory that was passed as an argument pushd "${directory}" &> /dev/null || return 1 # Check current branch. If it is master, then reset to the latest availible tag. - # In case extra commits have been added after tagging/release (i.e in case of metadata updates/README.MD tweaks) + # In case extra commits have been added after tagging/release (i.e in case of metadata updates/README.MD tweaks) curBranch=$(git rev-parse --abbrev-ref HEAD) if [[ "${curBranch}" == "master" ]]; then #If we're calling make_repo() then it should always be master, we may not need to check. git reset --hard "$(git describe --abbrev=0 --tags)" || return $? @@ -457,7 +457,7 @@ update_repo() { # Again, it's useful to store these in variables in case we need to reuse or change the message; # we only need to make one change here local str="Update repo in ${1}" - # Move into the directory that was passed as an argument + # Move into the directory that was passed as an argument pushd "${directory}" &> /dev/null || return 1 # Let the user know what's happening printf " %b %s..." "${INFO}" "${str}" @@ -467,7 +467,7 @@ update_repo() { # Pull the latest commits git pull --quiet &> /dev/null || return $? # Check current branch. If it is master, then reset to the latest availible tag. - # In case extra commits have been added after tagging/release (i.e in case of metadata updates/README.MD tweaks) + # In case extra commits have been added after tagging/release (i.e in case of metadata updates/README.MD tweaks) curBranch=$(git rev-parse --abbrev-ref HEAD) if [[ "${curBranch}" == "master" ]]; then git reset --hard "$(git describe --abbrev=0 --tags)" || return $? @@ -529,7 +529,7 @@ resetRepo() { printf "%b %b %s\\n" "${OVER}" "${TICK}" "${str}" # Return to where we came from popd &> /dev/null || return 1 - # Returning success anyway? + # Returning success anyway? return 0 } @@ -2229,7 +2229,7 @@ FTLinstall() { printf " %b %s..." "${INFO}" "${str}" # Find the latest version tag for FTL - latesttag=$(curl -sI https://github.com/pi-hole/FTL/releases/latest | grep "Location" | awk -F '/' '{print $NF}') + latesttag=$(curl --silent "https://api.github.com/repos/pi-hole/ftl/releases/latest" | grep '"tag_name":' | sed -E 's/.*"([^"]+)".*/\1/') # Tags should always start with v, check for that. if [[ ! "${latesttag}" == v* ]]; then printf "%b %b %s\\n" "${OVER}" "${CROSS}" "${str}" From 75633f09509f3b1bfe223069bde34696be068e65 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sun, 16 Feb 2020 21:24:32 +0100 Subject: [PATCH 18/60] Install php-intl and trust the system to install the right extension. We've seen reports that just installing php5-intl or php7-intl isn't sufficient and that we need the meta package as well. Signed-off-by: DL6ER --- automated install/basic-install.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 66f1ddc9..369e49e0 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -247,7 +247,7 @@ if is_command apt-get ; then PIHOLE_DEPS=(cron curl dnsutils iputils-ping lsof netcat psmisc sudo unzip wget idn2 sqlite3 libcap2-bin dns-root-data resolvconf libcap2) # The Web dashboard has some that also need to be installed # It's useful to separate the two since our repos are also setup as "Core" code and "Web" code - PIHOLE_WEB_DEPS=(lighttpd "${phpVer}-common" "${phpVer}-cgi" "${phpVer}-${phpSqlite}" "${phpVer}-xml" "${phpVer}-intl") + PIHOLE_WEB_DEPS=(lighttpd "${phpVer}-common" "${phpVer}-cgi" "${phpVer}-${phpSqlite}" "${phpVer}-xml" "php-intl") # The Web server user, LIGHTTPD_USER="www-data" # group, From cd3ad0bdc7b6758183252667d419e5881f7c8f51 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 12 Feb 2020 19:39:12 +0100 Subject: [PATCH 19/60] Show info table instead of counting domains to speed up the debugging process on low-end hardware drastically. Signed-off-by: DL6ER --- advanced/Scripts/piholeDebug.sh | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index 76a409f9..0a256a97 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -1134,16 +1134,14 @@ analyze_gravity_list() { gravity_permissions=$(ls -ld "${PIHOLE_GRAVITY_DB_FILE}") log_write "${COL_GREEN}${gravity_permissions}${COL_NC}" - local gravity_size - gravity_size=$(sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" "SELECT COUNT(*) FROM vw_gravity") - log_write " Size (excluding blacklist): ${COL_CYAN}${gravity_size}${COL_NC} entries" + show_db_entries "Info table" "SELECT property,value FROM info" "20 40" log_write "" OLD_IFS="$IFS" IFS=$'\r\n' local gravity_sample=() mapfile -t gravity_sample < <(sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" "SELECT domain FROM vw_gravity LIMIT 10") - log_write " ${COL_CYAN}----- First 10 Domains -----${COL_NC}" + log_write " ${COL_CYAN}----- First 10 Gravity Domains -----${COL_NC}" for line in "${gravity_sample[@]}"; do log_write " ${line}" From 714a79ffced5867fdfd6b43e14266a7850b07b11 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 12 Feb 2020 19:43:55 +0100 Subject: [PATCH 20/60] Migrate debugger to domainlist and add printing of client table. Signed-off-by: DL6ER --- advanced/Scripts/piholeDebug.sh | 20 ++++++++------------ 1 file changed, 8 insertions(+), 12 deletions(-) diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index 0a256a97..c778995d 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -1113,18 +1113,14 @@ show_adlists() { show_db_entries "Adlist groups" "SELECT * FROM adlist_by_group" "4 4" } -show_whitelist() { - show_db_entries "Exact whitelist" "SELECT id,domain,enabled,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,comment FROM whitelist" "4 100 7 19 19 50" - show_db_entries "Exact whitelist groups" "SELECT * FROM whitelist_by_group" "4 4" - show_db_entries "Regex whitelist" "SELECT id,domain,enabled,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,comment FROM regex_whitelist" "4 100 7 19 19 50" - show_db_entries "Regex whitelist groups" "SELECT * FROM regex_whitelist_by_group" "4 4" +show_domainlist() { + show_db_entries "Domainlist (0/1 = exact/regex whitelist, 2/3 = exact/regex blacklist)" "SELECT id,type,domain,enabled,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,comment FROM domainlist" "4 4 100 7 19 19 50" + show_db_entries "Domainlist groups" "SELECT * FROM domainlist_by_group" "10 10" } -show_blacklist() { - show_db_entries "Exact blacklist" "SELECT id,domain,enabled,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,comment FROM blacklist" "4 100 7 19 19 50" - show_db_entries "Exact blacklist groups" "SELECT * FROM blacklist_by_group" "4 4" - show_db_entries "Regex blacklist" "SELECT id,domain,enabled,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,comment FROM regex_blacklist" "4 100 7 19 19 50" - show_db_entries "Regex blacklist groups" "SELECT * FROM regex_blacklist_by_group" "4 4" +show_clients() { + show_db_entries "Clients" "SELECT id,ip,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,comment FROM client" "4 100 19 19 50" + show_db_entries "Client groups" "SELECT * FROM client_by_group" "10 10" } analyze_gravity_list() { @@ -1299,9 +1295,9 @@ parse_setup_vars check_x_headers analyze_gravity_list show_groups +show_domainlist +show_clients show_adlists -show_whitelist -show_blacklist show_content_of_pihole_files parse_locale analyze_pihole_log From d0e29ab7b0d08e16b8d7e6f69d3e5b8a544a3c32 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 12 Feb 2020 19:51:40 +0100 Subject: [PATCH 21/60] Add human-readable output of time of the last gravity run. Signed-off-by: DL6ER --- advanced/Scripts/piholeDebug.sh | 3 +++ 1 file changed, 3 insertions(+) diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index c778995d..b0af3a40 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -1131,6 +1131,9 @@ analyze_gravity_list() { log_write "${COL_GREEN}${gravity_permissions}${COL_NC}" show_db_entries "Info table" "SELECT property,value FROM info" "20 40" + gravity_updated_raw="$(sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" "SELECT value FROM info where property = 'updated'")" + gravity_updated="$(date -d @${gravity_updated_raw})" + log_write " Last gravity run finished at: ${COL_CYAN}${gravity_updated}${COL_NC}" log_write "" OLD_IFS="$IFS" From c5c414a7a24789a41fd64fdbb1114d037ecf19b0 Mon Sep 17 00:00:00 2001 From: Dan Schaper Date: Sun, 16 Feb 2020 19:24:05 -0800 Subject: [PATCH 22/60] Stickler Lint - quote to prevent splitting Signed-off-by: Dan Schaper --- advanced/Scripts/piholeDebug.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index b0af3a40..7f2b60c4 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -1132,7 +1132,7 @@ analyze_gravity_list() { show_db_entries "Info table" "SELECT property,value FROM info" "20 40" gravity_updated_raw="$(sqlite3 "${PIHOLE_GRAVITY_DB_FILE}" "SELECT value FROM info where property = 'updated'")" - gravity_updated="$(date -d @${gravity_updated_raw})" + gravity_updated="$(date -d @"${gravity_updated_raw}")" log_write " Last gravity run finished at: ${COL_CYAN}${gravity_updated}${COL_NC}" log_write "" From 52398052e911edfe73fb15b5a18f601cfd636ed5 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Mon, 17 Feb 2020 21:07:48 +0100 Subject: [PATCH 23/60] Compute number of domains (and store it in the database) BEFORE calling FTL to re-read said value. Signed-off-by: DL6ER --- gravity.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/gravity.sh b/gravity.sh index f4cbe78d..50ae3883 100755 --- a/gravity.sh +++ b/gravity.sh @@ -760,14 +760,14 @@ gravity_swap_databases chown pihole:pihole "${gravityDBfile}" chmod g+w "${piholeDir}" "${gravityDBfile}" +# Compute numbers to be displayed +gravity_ShowCount + # Determine if DNS has been restarted by this instance of gravity if [[ -z "${dnsWasOffline:-}" ]]; then "${PIHOLE_COMMAND}" restartdns reload fi -# Compute numbers to be displayed -gravity_ShowCount - gravity_Cleanup echo "" From 7be019ff522e78a26e65d2b6cce2d2fba1598c1a Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Mon, 17 Feb 2020 21:29:25 +0000 Subject: [PATCH 24/60] No need to determine the latest tag, we can just go direct Co-authored-by: Dan Schaper Signed-off-by: Adam Warner --- automated install/basic-install.sh | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 369e49e0..e15ce0f5 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -2228,15 +2228,6 @@ FTLinstall() { local str="Downloading and Installing FTL" printf " %b %s..." "${INFO}" "${str}" - # Find the latest version tag for FTL - latesttag=$(curl --silent "https://api.github.com/repos/pi-hole/ftl/releases/latest" | grep '"tag_name":' | sed -E 's/.*"([^"]+)".*/\1/') - # Tags should always start with v, check for that. - if [[ ! "${latesttag}" == v* ]]; then - printf "%b %b %s\\n" "${OVER}" "${CROSS}" "${str}" - printf " %bError: Unable to get latest release location from GitHub%b\\n" "${COL_LIGHT_RED}" "${COL_NC}" - return 1 - fi - # Move into the temp ftl directory pushd "$(mktemp -d)" > /dev/null || { printf "Unable to make temporary directory for FTL binary download\\n"; return 1; } @@ -2257,7 +2248,7 @@ FTLinstall() { # Determine which version of FTL to download if [[ "${ftlBranch}" == "master" ]];then - url="https://github.com/pi-hole/FTL/releases/download/${latesttag%$'\r'}" + url="https://github.com/pi-hole/ftl/releases/latest/download" else url="https://ftl.pi-hole.net/${ftlBranch}" fi From af95e8c2500bc5ac0f6cd78e9ca9ee533f66dc76 Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Wed, 19 Feb 2020 17:41:53 +0000 Subject: [PATCH 25/60] force `updatchecker.sh` run if any of the three components are updated Signed-off-by: Adam Warner --- advanced/Scripts/update.sh | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/advanced/Scripts/update.sh b/advanced/Scripts/update.sh index e45be5cf..1b98eafb 100755 --- a/advanced/Scripts/update.sh +++ b/advanced/Scripts/update.sh @@ -198,6 +198,14 @@ main() { ${PI_HOLE_FILES_DIR}/automated\ install/basic-install.sh --reconfigure --unattended || \ echo -e "${basicError}" && exit 1 fi + + if [[ "${FTL_update}" == true || "${core_update}" == true || "${web_update}" == true ]] + # Force an update of the updatechecker + /opt/pihole/updatecheck.sh + /opt/pihole/updatecheck.sh x remote + echo -e " ${INFO} Local version file information updated." + fi + echo "" exit 0 } From 4a5f344b099cd1a281a9a38a795cb9bc39eb0857 Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Wed, 19 Feb 2020 17:46:45 +0000 Subject: [PATCH 26/60] then Signed-off-by: Adam Warner --- advanced/Scripts/update.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/advanced/Scripts/update.sh b/advanced/Scripts/update.sh index 1b98eafb..f833fc2f 100755 --- a/advanced/Scripts/update.sh +++ b/advanced/Scripts/update.sh @@ -199,7 +199,7 @@ main() { echo -e "${basicError}" && exit 1 fi - if [[ "${FTL_update}" == true || "${core_update}" == true || "${web_update}" == true ]] + if [[ "${FTL_update}" == true || "${core_update}" == true || "${web_update}" == true ]]; then # Force an update of the updatechecker /opt/pihole/updatecheck.sh /opt/pihole/updatecheck.sh x remote From 839fe32042982b1d7109b2c8e3b818da0f9b4118 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Fri, 21 Feb 2020 19:56:48 +0100 Subject: [PATCH 27/60] Fix issue with missing newline at the end of adlists (#3144) * Also display non-fatal warnings during the database importing. Previously, we have only show warnings when there were also errors (errors are always fatal). Signed-off-by: DL6ER * Ensure there is always a newline on the last line. Signed-off-by: DL6ER * Stickler linting Signed-off-by: Dan Schaper * Move sed command into subroutine to avoid code duplication. Signed-off-by: DL6ER * Also unify comments. Signed-off-by: DL6ER * Also unify comments. Signed-off-by: DL6ER Co-authored-by: Dan Schaper --- gravity.sh | 22 ++++++++++++++++------ 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/gravity.sh b/gravity.sh index bc2cace3..b9abd83d 100755 --- a/gravity.sh +++ b/gravity.sh @@ -390,12 +390,23 @@ gravity_DownloadBlocklists() { echo -e "${OVER} ${TICK} ${str}" fi + if [[ "${status}" -eq 0 && -n "${output}" ]]; then + echo -e " Encountered non-critical SQL warnings. Please check the suitability of the list you're using!\\nSQL warnings:\\n${output}\\n" + fi + rm "${target}" > /dev/null 2>&1 || \ echo -e " ${CROSS} Unable to remove ${target}" gravity_Blackbody=true } +parseList() { + local adlistID="${1}" src="${2}" target="${3}" + #Append ,${arg} to every line and then remove blank lines before import + # /.$/a\\ ensures there is a newline on the last line + sed -e "s/$/,${adlistID}/;/^$/d;/.$/a\\" "${src}" >> "${target}" +} + # Download specified URL and perform checks on HTTP status and file content gravity_DownloadBlocklistFromUrl() { local url="${1}" cmd_ext="${2}" agent="${3}" adlistID="${4}" saveLocation="${5}" target="${6}" @@ -481,14 +492,13 @@ gravity_DownloadBlocklistFromUrl() { if [[ "${success}" == true ]]; then if [[ "${httpCode}" == "304" ]]; then # Add domains to database table file - #Append ,${arg} to every line and then remove blank lines before import - sed -e "s/$/,${adlistID}/;/^$/d" "${saveLocation}" >> "${target}" + parseList "${adlistID}" "${saveLocation}" "${target}" # Check if $patternbuffer is a non-zero length file elif [[ -s "${patternBuffer}" ]]; then # Determine if blocklist is non-standard and parse as appropriate gravity_ParseFileIntoDomains "${patternBuffer}" "${saveLocation}" - #Append ,${arg} to every line and then remove blank lines before import - sed -e "s/$/,${adlistID}/;/^$/d" "${saveLocation}" >> "${target}" + # Add domains to database table file + parseList "${adlistID}" "${saveLocation}" "${target}" else # Fall back to previously cached list if $patternBuffer is empty echo -e " ${INFO} Received empty file: ${COL_LIGHT_GREEN}using previously cached list${COL_NC}" @@ -497,8 +507,8 @@ gravity_DownloadBlocklistFromUrl() { # Determine if cached list has read permission if [[ -r "${saveLocation}" ]]; then echo -e " ${CROSS} List download failed: ${COL_LIGHT_GREEN}using previously cached list${COL_NC}" - #Append ,${arg} to every line and then remove blank lines before import - sed -e "s/$/,${adlistID}/;/^$/d" "${saveLocation}" >> "${target}" + # Add domains to database table file + parseList "${adlistID}" "${saveLocation}" "${target}" else echo -e " ${CROSS} List download failed: ${COL_LIGHT_RED}no cached list available${COL_NC}" fi From 3c09cd4a3ae296a6ed2d061f30cb433aa044bf45 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Fri, 21 Feb 2020 21:41:28 +0100 Subject: [PATCH 28/60] Experimental output of matching line from shown warnings. Signed-off-by: DL6ER --- gravity.sh | 24 +++++++++++++++++++++++- 1 file changed, 23 insertions(+), 1 deletion(-) diff --git a/gravity.sh b/gravity.sh index b9abd83d..eac0acfe 100755 --- a/gravity.sh +++ b/gravity.sh @@ -391,7 +391,29 @@ gravity_DownloadBlocklists() { fi if [[ "${status}" -eq 0 && -n "${output}" ]]; then - echo -e " Encountered non-critical SQL warnings. Please check the suitability of the list you're using!\\nSQL warnings:\\n${output}\\n" + echo -e " Encountered non-critical SQL warnings. Please check the suitability of the lists you're using!\\n\\n SQL warnings:" + local warning file line lineno + while IFS= read -r line; do + echo " - ${line}" + warning="$(grep -oh "^[^:]*:[0-9]*" <<< "${line}")" + file="${warning%:*}" + lineno="${warning#*:}" + if [[ -n "${file}" && -n "${lineno}" ]]; then + echo -n " Line contains: " + awk "NR==${lineno}" < ${file} + fi + done <<< "${output}" + echo "" + local file line + while IFS= read -r line; do + warning="$(grep -oh "^[^:]*:[0-9]*" <<< "${line}")" + file="${warning%:*}" + lineno="${warning#*:}" + if [[ -n "${file}" && -n "${lineno}" ]]; then + echo -n "Line contains: " + awk "NR==${lineno}" < ${file} + fi + done <<< "${output}" fi rm "${target}" > /dev/null 2>&1 || \ From 050e2963c754cab03262bcc53948b620ca92cd01 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Fri, 21 Feb 2020 22:28:53 +0100 Subject: [PATCH 29/60] Remove redundant code. Signed-off-by: DL6ER --- gravity.sh | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/gravity.sh b/gravity.sh index eac0acfe..4860339f 100755 --- a/gravity.sh +++ b/gravity.sh @@ -404,16 +404,6 @@ gravity_DownloadBlocklists() { fi done <<< "${output}" echo "" - local file line - while IFS= read -r line; do - warning="$(grep -oh "^[^:]*:[0-9]*" <<< "${line}")" - file="${warning%:*}" - lineno="${warning#*:}" - if [[ -n "${file}" && -n "${lineno}" ]]; then - echo -n "Line contains: " - awk "NR==${lineno}" < ${file} - fi - done <<< "${output}" fi rm "${target}" > /dev/null 2>&1 || \ From 81d4531e10b07e96b863b23920d32de189b7616d Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sat, 22 Feb 2020 13:00:38 +0100 Subject: [PATCH 30/60] Implement performant list checking routine. Signed-off-by: DL6ER --- gravity.sh | 29 +++++++++++++++++++++++++++-- 1 file changed, 27 insertions(+), 2 deletions(-) diff --git a/gravity.sh b/gravity.sh index 4860339f..30e43135 100755 --- a/gravity.sh +++ b/gravity.sh @@ -412,11 +412,36 @@ gravity_DownloadBlocklists() { gravity_Blackbody=true } +total_num=0 parseList() { - local adlistID="${1}" src="${2}" target="${3}" + local adlistID="${1}" src="${2}" target="${3}" incorrect_lines #Append ,${arg} to every line and then remove blank lines before import # /.$/a\\ ensures there is a newline on the last line - sed -e "s/$/,${adlistID}/;/^$/d;/.$/a\\" "${src}" >> "${target}" + sed -e "/[^a-zA-Z0-9.\_-]/d;s/$/,${adlistID}/;/^$/d;/.$/a\\" "${src}" >> "${target}" + incorrect_lines="$(sed -e "/[^a-zA-Z0-9.\_-]/!d" "${src}" | head -n 5)" + + local num_lines num_target_lines num_correct_lines percentage percentage_fraction + num_lines="$(grep -c "^" "${src}")" + #num_correct_lines="$(grep -c "^[a-zA-Z0-9.-]*$" "${src}")" + num_target_lines="$(grep -c "^" "${target}")" + num_correct_lines="$(( num_target_lines-total_num ))" + total_num="$num_target_lines" + percentage=100 + percentage_fraction=0 + if [[ "${num_lines}" -gt 0 ]]; then + percentage="$(( 1000*num_correct_lines/num_lines ))" + percentage_fraction="$(( percentage%10 ))" + percentage="$(( percentage/10 ))" + fi + echo " ${INFO} List quality: ${num_correct_lines} of ${num_lines} lines importable (${percentage}.${percentage_fraction}%)" + + if [[ -n "${incorrect_lines}" ]]; then + echo " Example for invalid domains (showing only the first five):" + while IFS= read -r line; do + echo " - ${line}" + done <<< "${incorrect_lines}" + fi + } # Download specified URL and perform checks on HTTP status and file content From 8131b5961cdaf0df890408d1400f11e67b2d56e9 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sat, 22 Feb 2020 15:17:24 +0100 Subject: [PATCH 31/60] Add comments to the code describing the changes. Signed-off-by: DL6ER --- gravity.sh | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/gravity.sh b/gravity.sh index 30e43135..b3a70f74 100755 --- a/gravity.sh +++ b/gravity.sh @@ -400,7 +400,7 @@ gravity_DownloadBlocklists() { lineno="${warning#*:}" if [[ -n "${file}" && -n "${lineno}" ]]; then echo -n " Line contains: " - awk "NR==${lineno}" < ${file} + awk "NR==${lineno}" < "${file}" fi done <<< "${output}" echo "" @@ -415,17 +415,22 @@ gravity_DownloadBlocklists() { total_num=0 parseList() { local adlistID="${1}" src="${2}" target="${3}" incorrect_lines - #Append ,${arg} to every line and then remove blank lines before import - # /.$/a\\ ensures there is a newline on the last line - sed -e "/[^a-zA-Z0-9.\_-]/d;s/$/,${adlistID}/;/^$/d;/.$/a\\" "${src}" >> "${target}" + # This sed does the following things: + # 1. Remove all domains containing invalid characters. Valid are: a-z, A-Z, 0-9, dot (.), minus (-), underscore (_) + # 2. Append ,adlistID to every line + # 3. Ensures there is a newline on the last line + sed -e "/[^a-zA-Z0-9.\_-]/d;s/$/,${adlistID}/;/.$/a\\" "${src}" >> "${target}" + # Find (up to) five domains containing invalid characters (see above) incorrect_lines="$(sed -e "/[^a-zA-Z0-9.\_-]/!d" "${src}" | head -n 5)" local num_lines num_target_lines num_correct_lines percentage percentage_fraction + # Get number of lines in source file num_lines="$(grep -c "^" "${src}")" - #num_correct_lines="$(grep -c "^[a-zA-Z0-9.-]*$" "${src}")" + # Get number of lines in destination file num_target_lines="$(grep -c "^" "${target}")" num_correct_lines="$(( num_target_lines-total_num ))" total_num="$num_target_lines" + # Compute percentage of valid lines percentage=100 percentage_fraction=0 if [[ "${num_lines}" -gt 0 ]]; then @@ -433,15 +438,15 @@ parseList() { percentage_fraction="$(( percentage%10 ))" percentage="$(( percentage/10 ))" fi - echo " ${INFO} List quality: ${num_correct_lines} of ${num_lines} lines importable (${percentage}.${percentage_fraction}%)" + echo " ${INFO} ${num_correct_lines} of ${num_lines} domains imported (${percentage}.${percentage_fraction}%)" + # Display sample of invalid lines if we found some if [[ -n "${incorrect_lines}" ]]; then - echo " Example for invalid domains (showing only the first five):" + echo " Sample of invalid domains (showing up to five):" while IFS= read -r line; do echo " - ${line}" done <<< "${incorrect_lines}" fi - } # Download specified URL and perform checks on HTTP status and file content From 545b6605bc03dacc31e3f96d20beea5065c50bfe Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Sun, 23 Feb 2020 21:34:12 +0000 Subject: [PATCH 32/60] 4.3.3 (#3154) * Backport ee7090b8fcb11f090741b33eb68822bcc96135e6 to v4 to prevent failures in FTL download * update tests to reflect changes to FTL download URL * backport `tbd` fix Signed-off-by: Adam Warner --- advanced/Scripts/piholeCheckout.sh | 9 ++-- advanced/Scripts/update.sh | 9 +++- automated install/basic-install.sh | 60 ++++++++++++--------- test/test_automated_install.py | 87 +++++++++++------------------- 4 files changed, 81 insertions(+), 84 deletions(-) diff --git a/advanced/Scripts/piholeCheckout.sh b/advanced/Scripts/piholeCheckout.sh index c4b07a98..dd57117f 100644 --- a/advanced/Scripts/piholeCheckout.sh +++ b/advanced/Scripts/piholeCheckout.sh @@ -46,6 +46,12 @@ checkout() { local corebranches local webbranches + # Check if FTL is installed - do this early on as FTL is a hard dependency for Pi-hole + local funcOutput + funcOutput=$(get_binary_name) #Store output of get_binary_name here + local binary + binary="pihole-FTL${funcOutput##*pihole-FTL}" #binary name will be the last line of the output of get_binary_name (it always begins with pihole-FTL) + # Avoid globbing set -f @@ -86,7 +92,6 @@ checkout() { fi #echo -e " ${TICK} Pi-hole Core" - get_binary_name local path path="development/${binary}" echo "development" > /etc/pihole/ftlbranch @@ -100,7 +105,6 @@ checkout() { fetch_checkout_pull_branch "${webInterfaceDir}" "master" || { echo " ${CROSS} Unable to pull Web master branch"; exit 1; } fi #echo -e " ${TICK} Web Interface" - get_binary_name local path path="master/${binary}" echo "master" > /etc/pihole/ftlbranch @@ -159,7 +163,6 @@ checkout() { fi checkout_pull_branch "${webInterfaceDir}" "${2}" elif [[ "${1}" == "ftl" ]] ; then - get_binary_name local path path="${2}/${binary}" diff --git a/advanced/Scripts/update.sh b/advanced/Scripts/update.sh index 4d352777..503d3042 100755 --- a/advanced/Scripts/update.sh +++ b/advanced/Scripts/update.sh @@ -31,7 +31,6 @@ source "/opt/pihole/COL_TABLE" # make_repo() sourced from basic-install.sh # update_repo() source from basic-install.sh # getGitFiles() sourced from basic-install.sh -# get_binary_name() sourced from basic-install.sh # FTLcheckUpdate() sourced from basic-install.sh GitCheckUpdateAvail() { @@ -194,6 +193,14 @@ main() { ${PI_HOLE_FILES_DIR}/automated\ install/basic-install.sh --reconfigure --unattended || \ echo -e "${basicError}" && exit 1 fi + + if [[ "${FTL_update}" == true || "${core_update}" == true || "${web_update}" == true ]]; then + # Force an update of the updatechecker + /opt/pihole/updatecheck.sh + /opt/pihole/updatecheck.sh x remote + echo -e " ${INFO} Local version file information updated." + fi + echo "" exit 0 } diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index f99d02ab..9986df3c 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -139,9 +139,6 @@ else OVER="\\r\\033[K" fi -# Define global binary variable -binary="tbd" - # A simple function that just echoes out our logo in ASCII format # This lets users know that it is a Pi-hole, LLC product show_ascii_berry() { @@ -2154,21 +2151,15 @@ clone_or_update_repos() { } # Download FTL binary to random temp directory and install FTL binary +# Disable directive for SC2120 a value _can_ be passed to this function, but it is passed from an external script that sources this one +# shellcheck disable=SC2120 FTLinstall() { + # Local, named variables local latesttag local str="Downloading and Installing FTL" printf " %b %s..." "${INFO}" "${str}" - # Find the latest version tag for FTL - latesttag=$(curl -sI https://github.com/pi-hole/FTL/releases/latest | grep "Location" | awk -F '/' '{print $NF}') - # Tags should always start with v, check for that. - if [[ ! "${latesttag}" == v* ]]; then - printf "%b %b %s\\n" "${OVER}" "${CROSS}" "${str}" - printf " %bError: Unable to get latest release location from GitHub%b\\n" "${COL_LIGHT_RED}" "${COL_NC}" - return 1 - fi - # Move into the temp ftl directory pushd "$(mktemp -d)" > /dev/null || { printf "Unable to make temporary directory for FTL binary download\\n"; return 1; } @@ -2184,9 +2175,12 @@ FTLinstall() { ftlBranch="master" fi + local binary + binary="${1}" + # Determine which version of FTL to download if [[ "${ftlBranch}" == "master" ]];then - url="https://github.com/pi-hole/FTL/releases/download/${latesttag%$'\r'}" + url="https://github.com/pi-hole/ftl/releases/latest/download" else url="https://ftl.pi-hole.net/${ftlBranch}" fi @@ -2259,6 +2253,8 @@ get_binary_name() { local machine machine=$(uname -m) + local l_binary + local str="Detecting architecture" printf " %b %s..." "${INFO}" "${str}" # If the machine is arm or aarch @@ -2274,24 +2270,24 @@ get_binary_name() { if [[ "${lib}" == "/lib/ld-linux-aarch64.so.1" ]]; then printf "%b %b Detected ARM-aarch64 architecture\\n" "${OVER}" "${TICK}" # set the binary to be used - binary="pihole-FTL-aarch64-linux-gnu" + l_binary="pihole-FTL-aarch64-linux-gnu" # elif [[ "${lib}" == "/lib/ld-linux-armhf.so.3" ]]; then # if [[ "${rev}" -gt 6 ]]; then printf "%b %b Detected ARM-hf architecture (armv7+)\\n" "${OVER}" "${TICK}" # set the binary to be used - binary="pihole-FTL-arm-linux-gnueabihf" + l_binary="pihole-FTL-arm-linux-gnueabihf" # Otherwise, else printf "%b %b Detected ARM-hf architecture (armv6 or lower) Using ARM binary\\n" "${OVER}" "${TICK}" # set the binary to be used - binary="pihole-FTL-arm-linux-gnueabi" + l_binary="pihole-FTL-arm-linux-gnueabi" fi else printf "%b %b Detected ARM architecture\\n" "${OVER}" "${TICK}" # set the binary to be used - binary="pihole-FTL-arm-linux-gnueabi" + l_binary="pihole-FTL-arm-linux-gnueabi" fi elif [[ "${machine}" == "x86_64" ]]; then # This gives the architecture of packages dpkg installs (for example, "i386") @@ -2304,12 +2300,12 @@ get_binary_name() { # in the past (see https://github.com/pi-hole/pi-hole/pull/2004) if [[ "${dpkgarch}" == "i386" ]]; then printf "%b %b Detected 32bit (i686) architecture\\n" "${OVER}" "${TICK}" - binary="pihole-FTL-linux-x86_32" + l_binary="pihole-FTL-linux-x86_32" else # 64bit printf "%b %b Detected x86_64 architecture\\n" "${OVER}" "${TICK}" # set the binary to be used - binary="pihole-FTL-linux-x86_64" + l_binary="pihole-FTL-linux-x86_64" fi else # Something else - we try to use 32bit executable and warn the user @@ -2320,13 +2316,13 @@ get_binary_name() { else printf "%b %b Detected 32bit (i686) architecture\\n" "${OVER}" "${TICK}" fi - binary="pihole-FTL-linux-x86_32" + l_binary="pihole-FTL-linux-x86_32" fi + + echo ${l_binary} } FTLcheckUpdate() { - get_binary_name - #In the next section we check to see if FTL is already installed (in case of pihole -r). #If the installed version matches the latest version, then check the installed sha1sum of the binary vs the remote sha1sum. If they do not match, then download printf " %b Checking for existing FTL binary...\\n" "${INFO}" @@ -2342,6 +2338,9 @@ FTLcheckUpdate() { ftlBranch="master" fi + local binary + binary="${1}" + local remoteSha1 local localSha1 @@ -2420,8 +2419,10 @@ FTLcheckUpdate() { FTLdetect() { printf "\\n %b FTL Checks...\\n\\n" "${INFO}" - if FTLcheckUpdate ; then - FTLinstall || return 1 + printf " %b" "${2}" + + if FTLcheckUpdate "${1}"; then + FTLinstall "${1}" || return 1 fi } @@ -2581,8 +2582,17 @@ main() { else LIGHTTPD_ENABLED=false fi + # Create the pihole user + create_pihole_user + # Check if FTL is installed - do this early on as FTL is a hard dependency for Pi-hole - if ! FTLdetect; then + local funcOutput + funcOutput=$(get_binary_name) #Store output of get_binary_name here + local binary + binary="pihole-FTL${funcOutput##*pihole-FTL}" #binary name will be the last line of the output of get_binary_name (it always begins with pihole-FTL) + local theRest + theRest="${funcOutput%pihole-FTL*}" # Print the rest of get_binary_name's output to display (cut out from first instance of "pihole-FTL") + if ! FTLdetect "${binary}" "${theRest}"; then printf " %b FTL Engine not installed\\n" "${CROSS}" exit 1 fi diff --git a/test/test_automated_install.py b/test/test_automated_install.py index be8fdcfa..0c304c40 100644 --- a/test/test_automated_install.py +++ b/test/test_automated_install.py @@ -398,7 +398,11 @@ def test_FTL_detect_aarch64_no_errors(Pihole): ) detectPlatform = Pihole.run(''' source /opt/pihole/basic-install.sh - FTLdetect + create_pihole_user + funcOutput=$(get_binary_name) + binary="pihole-FTL${funcOutput##*pihole-FTL}" + theRest="${funcOutput%pihole-FTL*}" + FTLdetect "${binary}" "${theRest}" ''') expected_stdout = info_box + ' FTL Checks...' assert expected_stdout in detectPlatform.stdout @@ -418,7 +422,11 @@ def test_FTL_detect_armv6l_no_errors(Pihole): mock_command('ldd', {'/bin/ls': ('/lib/ld-linux-armhf.so.3', '0')}, Pihole) detectPlatform = Pihole.run(''' source /opt/pihole/basic-install.sh - FTLdetect + create_pihole_user + funcOutput=$(get_binary_name) + binary="pihole-FTL${funcOutput##*pihole-FTL}" + theRest="${funcOutput%pihole-FTL*}" + FTLdetect "${binary}" "${theRest}" ''') expected_stdout = info_box + ' FTL Checks...' assert expected_stdout in detectPlatform.stdout @@ -439,7 +447,11 @@ def test_FTL_detect_armv7l_no_errors(Pihole): mock_command('ldd', {'/bin/ls': ('/lib/ld-linux-armhf.so.3', '0')}, Pihole) detectPlatform = Pihole.run(''' source /opt/pihole/basic-install.sh - FTLdetect + create_pihole_user + funcOutput=$(get_binary_name) + binary="pihole-FTL${funcOutput##*pihole-FTL}" + theRest="${funcOutput%pihole-FTL*}" + FTLdetect "${binary}" "${theRest}" ''') expected_stdout = info_box + ' FTL Checks...' assert expected_stdout in detectPlatform.stdout @@ -455,7 +467,11 @@ def test_FTL_detect_x86_64_no_errors(Pihole): ''' detectPlatform = Pihole.run(''' source /opt/pihole/basic-install.sh - FTLdetect + create_pihole_user + funcOutput=$(get_binary_name) + binary="pihole-FTL${funcOutput##*pihole-FTL}" + theRest="${funcOutput%pihole-FTL*}" + FTLdetect "${binary}" "${theRest}" ''') expected_stdout = info_box + ' FTL Checks...' assert expected_stdout in detectPlatform.stdout @@ -471,7 +487,11 @@ def test_FTL_detect_unknown_no_errors(Pihole): mock_command('uname', {'-m': ('mips', '0')}, Pihole) detectPlatform = Pihole.run(''' source /opt/pihole/basic-install.sh - FTLdetect + create_pihole_user + funcOutput=$(get_binary_name) + binary="pihole-FTL${funcOutput##*pihole-FTL}" + theRest="${funcOutput%pihole-FTL*}" + FTLdetect "${binary}" "${theRest}" ''') expected_stdout = 'Not able to detect architecture (unknown: mips)' assert expected_stdout in detectPlatform.stdout @@ -490,68 +510,25 @@ def test_FTL_download_aarch64_no_errors(Pihole): ''') download_binary = Pihole.run(''' source /opt/pihole/basic-install.sh - binary="pihole-FTL-aarch64-linux-gnu" - FTLinstall + create_pihole_user + FTLinstall "pihole-FTL-aarch64-linux-gnu" ''') expected_stdout = tick_box + ' Downloading and Installing FTL' assert expected_stdout in download_binary.stdout assert 'error' not in download_binary.stdout.lower() -def test_FTL_download_unknown_fails_no_errors(Pihole): - ''' - confirms unknown binary is not downloaded for FTL engine - ''' - # mock whiptail answers and ensure installer dependencies - mock_command('whiptail', {'*': ('', '0')}, Pihole) - Pihole.run(''' - source /opt/pihole/basic-install.sh - distro_check - install_dependent_packages ${INSTALLER_DEPS[@]} - ''') - download_binary = Pihole.run(''' - source /opt/pihole/basic-install.sh - binary="pihole-FTL-mips" - FTLinstall - ''') - expected_stdout = cross_box + ' Downloading and Installing FTL' - assert expected_stdout in download_binary.stdout - error1 = 'Error: URL https://github.com/pi-hole/FTL/releases/download/' - assert error1 in download_binary.stdout - error2 = 'not found' - assert error2 in download_binary.stdout - - -def test_FTL_download_binary_unset_no_errors(Pihole): - ''' - confirms unset binary variable does not download FTL engine - ''' - # mock whiptail answers and ensure installer dependencies - mock_command('whiptail', {'*': ('', '0')}, Pihole) - Pihole.run(''' - source /opt/pihole/basic-install.sh - distro_check - install_dependent_packages ${INSTALLER_DEPS[@]} - ''') - download_binary = Pihole.run(''' - source /opt/pihole/basic-install.sh - FTLinstall - ''') - expected_stdout = cross_box + ' Downloading and Installing FTL' - assert expected_stdout in download_binary.stdout - error1 = 'Error: URL https://github.com/pi-hole/FTL/releases/download/' - assert error1 in download_binary.stdout - error2 = 'not found' - assert error2 in download_binary.stdout - - def test_FTL_binary_installed_and_responsive_no_errors(Pihole): ''' confirms FTL binary is copied and functional in installed location ''' installed_binary = Pihole.run(''' source /opt/pihole/basic-install.sh - FTLdetect + create_pihole_user + funcOutput=$(get_binary_name) + binary="pihole-FTL${funcOutput##*pihole-FTL}" + theRest="${funcOutput%pihole-FTL*}" + FTLdetect "${binary}" "${theRest}" pihole-FTL version ''') expected_stdout = 'v' From 1e8bfd33f5375bf4e2d2967be08bd544dad86d71 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sun, 23 Feb 2020 22:50:06 +0100 Subject: [PATCH 33/60] Improve output Signed-off-by: DL6ER --- gravity.sh | 15 ++++----------- 1 file changed, 4 insertions(+), 11 deletions(-) diff --git a/gravity.sh b/gravity.sh index b3a70f74..d09211c8 100755 --- a/gravity.sh +++ b/gravity.sh @@ -423,26 +423,19 @@ parseList() { # Find (up to) five domains containing invalid characters (see above) incorrect_lines="$(sed -e "/[^a-zA-Z0-9.\_-]/!d" "${src}" | head -n 5)" - local num_lines num_target_lines num_correct_lines percentage percentage_fraction + local num_lines num_target_lines num_correct_lines num_invalid # Get number of lines in source file num_lines="$(grep -c "^" "${src}")" # Get number of lines in destination file num_target_lines="$(grep -c "^" "${target}")" num_correct_lines="$(( num_target_lines-total_num ))" total_num="$num_target_lines" - # Compute percentage of valid lines - percentage=100 - percentage_fraction=0 - if [[ "${num_lines}" -gt 0 ]]; then - percentage="$(( 1000*num_correct_lines/num_lines ))" - percentage_fraction="$(( percentage%10 ))" - percentage="$(( percentage/10 ))" - fi - echo " ${INFO} ${num_correct_lines} of ${num_lines} domains imported (${percentage}.${percentage_fraction}%)" + num_invalid="$(( num_lines-num_correct_lines ))" + echo " ${INFO} Imported ${num_correct_lines} of ${num_lines} domains, ${num_invalid} domains invalid" # Display sample of invalid lines if we found some if [[ -n "${incorrect_lines}" ]]; then - echo " Sample of invalid domains (showing up to five):" + echo " Sample of invalid domains:" while IFS= read -r line; do echo " - ${line}" done <<< "${incorrect_lines}" From 3dd05606ca4714023124b579c648b991fc57af77 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Mon, 24 Feb 2020 07:06:15 +0100 Subject: [PATCH 34/60] Call it the received number of domains instead of the imported number as importing does only happen a bit later. Only show the number of invalid domains if there are invalid domains. Signed-off-by: DL6ER --- gravity.sh | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/gravity.sh b/gravity.sh index d09211c8..c421e832 100755 --- a/gravity.sh +++ b/gravity.sh @@ -431,7 +431,11 @@ parseList() { num_correct_lines="$(( num_target_lines-total_num ))" total_num="$num_target_lines" num_invalid="$(( num_lines-num_correct_lines ))" - echo " ${INFO} Imported ${num_correct_lines} of ${num_lines} domains, ${num_invalid} domains invalid" + if [[ "${num_invalid}" -eq 0 ]]; then + echo " ${INFO} Received ${num_lines} domains" + else + echo " ${INFO} Received ${num_lines} domains, ${num_invalid} domains invalid!" + fi # Display sample of invalid lines if we found some if [[ -n "${incorrect_lines}" ]]; then From 707e21b92755e9e75a5b9b1f2b19c6cc9c1b1ca9 Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Wed, 4 Dec 2019 20:09:34 +0000 Subject: [PATCH 35/60] :dominik: Detect binary name before calling FTLcheckUpdate in update.sh Signed-off-by: Adam Warner --- advanced/Scripts/update.sh | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/advanced/Scripts/update.sh b/advanced/Scripts/update.sh index 503d3042..f833fc2f 100755 --- a/advanced/Scripts/update.sh +++ b/advanced/Scripts/update.sh @@ -128,7 +128,12 @@ main() { fi fi - if FTLcheckUpdate > /dev/null; then + local funcOutput + funcOutput=$(get_binary_name) #Store output of get_binary_name here + local binary + binary="pihole-FTL${funcOutput##*pihole-FTL}" #binary name will be the last line of the output of get_binary_name (it always begins with pihole-FTL) + + if FTLcheckUpdate "${binary}" > /dev/null; then FTL_update=true echo -e " ${INFO} FTL:\\t\\t${COL_YELLOW}update available${COL_NC}" else From 0fbcc6d8b5f23065d33870f069d5b919fd0cd52a Mon Sep 17 00:00:00 2001 From: Dan Schaper Date: Mon, 24 Feb 2020 09:38:37 -0800 Subject: [PATCH 36/60] Compare daemons to expected results. (#3158) Signed-off-by: Dan Schaper --- advanced/Scripts/piholeDebug.sh | 24 +++++++++++++----------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index 1010f26c..d46944d6 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -643,19 +643,21 @@ ping_internet() { } compare_port_to_service_assigned() { - local service_name="${1}" - # The programs we use may change at some point, so they are in a varible here - local resolver="pihole-FTL" - local web_server="lighttpd" - local ftl="pihole-FTL" + local service_name + local expected_service + local port + + service_name="${2}" + expected_service="${1}" + port="${3}" # If the service is a Pi-hole service, highlight it in green - if [[ "${service_name}" == "${resolver}" ]] || [[ "${service_name}" == "${web_server}" ]] || [[ "${service_name}" == "${ftl}" ]]; then - log_write "[${COL_GREEN}${port_number}${COL_NC}] is in use by ${COL_GREEN}${service_name}${COL_NC}" + if [[ "${service_name}" == "${expected_service}" ]]; then + log_write "[${COL_GREEN}${port}${COL_NC}] is in use by ${COL_GREEN}${service_name}${COL_NC}" # Otherwise, else # Show the service name in red since it's non-standard - log_write "[${COL_RED}${port_number}${COL_NC}] is in use by ${COL_RED}${service_name}${COL_NC} (${FAQ_HARDWARE_REQUIREMENTS_PORTS})" + log_write "[${COL_RED}${port}${COL_NC}] is in use by ${COL_RED}${service_name}${COL_NC} (${FAQ_HARDWARE_REQUIREMENTS_PORTS})" fi } @@ -689,11 +691,11 @@ check_required_ports() { fi # Use a case statement to determine if the right services are using the right ports case "$(echo "$port_number" | rev | cut -d: -f1 | rev)" in - 53) compare_port_to_service_assigned "${resolver}" + 53) compare_port_to_service_assigned "${resolver}" "${service_name}" 53 ;; - 80) compare_port_to_service_assigned "${web_server}" + 80) compare_port_to_service_assigned "${web_server}" "${service_name}" 80 ;; - 4711) compare_port_to_service_assigned "${ftl}" + 4711) compare_port_to_service_assigned "${ftl}" "${service_name}" 4711 ;; # If it's not a default port that Pi-hole needs, just print it out for the user to see *) log_write "${port_number} ${service_name} (${protocol_type})"; From 8ecaaba2479492a9df5a9e84377dcdeec56d158f Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Mon, 24 Feb 2020 18:00:19 +0000 Subject: [PATCH 37/60] Compare daemons to expected results. (#3158) (#3159) Signed-off-by: Dan Schaper Co-authored-by: Dan Schaper --- advanced/Scripts/piholeDebug.sh | 24 +++++++++++++----------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index 7f2b60c4..28d34ab6 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -662,19 +662,21 @@ ping_internet() { } compare_port_to_service_assigned() { - local service_name="${1}" - # The programs we use may change at some point, so they are in a varible here - local resolver="pihole-FTL" - local web_server="lighttpd" - local ftl="pihole-FTL" + local service_name + local expected_service + local port + + service_name="${2}" + expected_service="${1}" + port="${3}" # If the service is a Pi-hole service, highlight it in green - if [[ "${service_name}" == "${resolver}" ]] || [[ "${service_name}" == "${web_server}" ]] || [[ "${service_name}" == "${ftl}" ]]; then - log_write "[${COL_GREEN}${port_number}${COL_NC}] is in use by ${COL_GREEN}${service_name}${COL_NC}" + if [[ "${service_name}" == "${expected_service}" ]]; then + log_write "[${COL_GREEN}${port}${COL_NC}] is in use by ${COL_GREEN}${service_name}${COL_NC}" # Otherwise, else # Show the service name in red since it's non-standard - log_write "[${COL_RED}${port_number}${COL_NC}] is in use by ${COL_RED}${service_name}${COL_NC} (${FAQ_HARDWARE_REQUIREMENTS_PORTS})" + log_write "[${COL_RED}${port}${COL_NC}] is in use by ${COL_RED}${service_name}${COL_NC} (${FAQ_HARDWARE_REQUIREMENTS_PORTS})" fi } @@ -708,11 +710,11 @@ check_required_ports() { fi # Use a case statement to determine if the right services are using the right ports case "$(echo "$port_number" | rev | cut -d: -f1 | rev)" in - 53) compare_port_to_service_assigned "${resolver}" + 53) compare_port_to_service_assigned "${resolver}" "${service_name}" 53 ;; - 80) compare_port_to_service_assigned "${web_server}" + 80) compare_port_to_service_assigned "${web_server}" "${service_name}" 80 ;; - 4711) compare_port_to_service_assigned "${ftl}" + 4711) compare_port_to_service_assigned "${ftl}" "${service_name}" 4711 ;; # If it's not a default port that Pi-hole needs, just print it out for the user to see *) log_write "${port_number} ${service_name} (${protocol_type})"; From b4c2bf678f4715da45cd22dcf56ca68d1a877112 Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Mon, 24 Feb 2020 20:02:48 +0000 Subject: [PATCH 38/60] Safeguard against colour output in grep commandadd -i to grep to make search for "Location" case-insensitive Signed-off-by: Adam Warner --- automated install/basic-install.sh | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 9986df3c..ee4793e7 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -2382,17 +2382,14 @@ FTLcheckUpdate() { if [[ ${ftlLoc} ]]; then local FTLversion FTLversion=$(/usr/bin/pihole-FTL tag) - local FTLreleaseData local FTLlatesttag - if ! FTLreleaseData=$(curl -sI https://github.com/pi-hole/FTL/releases/latest); then + if ! FTLlatesttag=$(curl -sI https://github.com/pi-hole/FTL/releases/latest | grep --color=never -i Location | awk -F / '{print $NF}' | tr -d '[:cntrl:]'); then # There was an issue while retrieving the latest version printf " %b Failed to retrieve latest FTL release metadata" "${CROSS}" return 3 fi - FTLlatesttag=$(grep 'Location' <<< "${FTLreleaseData}" | awk -F '/' '{print $NF}' | tr -d '\r\n') - if [[ "${FTLversion}" != "${FTLlatesttag}" ]]; then return 0 else From 6104d816226761ffc47279fa9b0cdcf54af472f7 Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Mon, 24 Feb 2020 20:02:48 +0000 Subject: [PATCH 39/60] Safeguard against colour output in grep commandadd -i to grep to make search for "Location" case-insensitive Signed-off-by: Adam Warner --- automated install/basic-install.sh | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index e15ce0f5..65c72b40 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -2459,17 +2459,14 @@ FTLcheckUpdate() { if [[ ${ftlLoc} ]]; then local FTLversion FTLversion=$(/usr/bin/pihole-FTL tag) - local FTLreleaseData local FTLlatesttag - if ! FTLreleaseData=$(curl -sI https://github.com/pi-hole/FTL/releases/latest); then + if ! FTLlatesttag=$(curl -sI https://github.com/pi-hole/FTL/releases/latest | grep --color=never -i Location | awk -F / '{print $NF}' | tr -d '[:cntrl:]'); then # There was an issue while retrieving the latest version printf " %b Failed to retrieve latest FTL release metadata" "${CROSS}" return 3 fi - FTLlatesttag=$(grep 'Location' <<< "${FTLreleaseData}" | awk -F '/' '{print $NF}' | tr -d '\r\n') - if [[ "${FTLversion}" != "${FTLlatesttag}" ]]; then return 0 else From 1c74b41869887afb87cc818e1be54d5a108db866 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Sat, 7 Sep 2019 23:11:20 +0200 Subject: [PATCH 40/60] Add use-application-dns.net = NXDOMAIN in ProcessDNSSettings rather than in the template so we can ensure that it will survive config-renewals. Signed-off-by: DL6ER --- advanced/Scripts/webpage.sh | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/advanced/Scripts/webpage.sh b/advanced/Scripts/webpage.sh index 600a45a5..3b17b6b5 100755 --- a/advanced/Scripts/webpage.sh +++ b/advanced/Scripts/webpage.sh @@ -211,6 +211,11 @@ trust-anchor=.,20326,8,2,E06D44B80B8F1D39A95C0B0D7C65D08458E880409BBC68345710423 add_dnsmasq_setting "server=/${CONDITIONAL_FORWARDING_DOMAIN}/${CONDITIONAL_FORWARDING_IP}" add_dnsmasq_setting "server=/${CONDITIONAL_FORWARDING_REVERSE}/${CONDITIONAL_FORWARDING_IP}" fi + + # Prevent Firefox from automatically switching over to DNS-over-HTTPS + # This follows https://support.mozilla.org/en-US/kb/configuring-networks-disable-dns-over-https + # (sourced 7th September 2019) + add_dnsmasq_setting "server=/use-application-dns.net/" } SetDNSServers() { From d16b47259213d389075e3aa0be9f780519c6d1df Mon Sep 17 00:00:00 2001 From: msamendinger Date: Sun, 1 Mar 2020 21:47:56 +0100 Subject: [PATCH 41/60] Remove wget from PIHOLE_DEPS All external calls have been moved to curl, wget no longer needed as dependency Signed-off-by: msamendinger --- automated install/basic-install.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 65c72b40..5aaa4a75 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -244,7 +244,7 @@ if is_command apt-get ; then # These programs are stored in an array so they can be looped through later INSTALLER_DEPS=(dhcpcd5 git "${iproute_pkg}" whiptail) # Pi-hole itself has several dependencies that also need to be installed - PIHOLE_DEPS=(cron curl dnsutils iputils-ping lsof netcat psmisc sudo unzip wget idn2 sqlite3 libcap2-bin dns-root-data resolvconf libcap2) + PIHOLE_DEPS=(cron curl dnsutils iputils-ping lsof netcat psmisc sudo unzip idn2 sqlite3 libcap2-bin dns-root-data resolvconf libcap2) # The Web dashboard has some that also need to be installed # It's useful to separate the two since our repos are also setup as "Core" code and "Web" code PIHOLE_WEB_DEPS=(lighttpd "${phpVer}-common" "${phpVer}-cgi" "${phpVer}-${phpSqlite}" "${phpVer}-xml" "php-intl") @@ -286,7 +286,7 @@ elif is_command rpm ; then PKG_INSTALL=("${PKG_MANAGER}" install -y) PKG_COUNT="${PKG_MANAGER} check-update | egrep '(.i686|.x86|.noarch|.arm|.src)' | wc -l" INSTALLER_DEPS=(git iproute newt procps-ng which chkconfig) - PIHOLE_DEPS=(bind-utils cronie curl findutils nmap-ncat sudo unzip wget libidn2 psmisc sqlite libcap) + PIHOLE_DEPS=(bind-utils cronie curl findutils nmap-ncat sudo unzip libidn2 psmisc sqlite libcap) PIHOLE_WEB_DEPS=(lighttpd lighttpd-fastcgi php-common php-cli php-pdo php-xml php-json php-intl) LIGHTTPD_USER="lighttpd" LIGHTTPD_GROUP="lighttpd" From 4f390ce801c739ea87cfcae131855ecbd9161818 Mon Sep 17 00:00:00 2001 From: Dan Schaper Date: Mon, 2 Mar 2020 05:39:21 -0800 Subject: [PATCH 42/60] Use bash regex instead of awk. Signed-off-by: Dan Schaper --- advanced/Scripts/query.sh | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/advanced/Scripts/query.sh b/advanced/Scripts/query.sh index a96129e0..73650400 100755 --- a/advanced/Scripts/query.sh +++ b/advanced/Scripts/query.sh @@ -33,15 +33,13 @@ scanList(){ export LC_CTYPE=C # /dev/null forces filename to be printed when only one list has been generated - # shellcheck disable=SC2086 case "${type}" in "exact" ) grep -i -E -l "(^|(?/dev/null;; # Create array of regexps # Iterate through each regexp and check whether it matches the domainQuery # If it does, print the matching regexp and continue looping # Input 1 - regexps | Input 2 - domainQuery - "regex" ) awk 'NR==FNR{regexps[$0];next}{for (r in regexps)if($0 ~ r)print r}' \ - <(echo "${lists}") <(echo "${domain}") 2>/dev/null;; + "regex" ) if [[ "${domain}" =~ ${lists} ]]; then printf "%b\n" "${lists}"; fi;; * ) grep -i "${esc_domain}" ${lists} /dev/null 2>/dev/null;; esac } From 360d0e4e6bfb5c71e078be41d21132422fb20323 Mon Sep 17 00:00:00 2001 From: Dan Schaper Date: Mon, 2 Mar 2020 08:07:10 -0800 Subject: [PATCH 43/60] Loop through array of lists. Signed-off-by: Dan Schaper --- advanced/Scripts/query.sh | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/advanced/Scripts/query.sh b/advanced/Scripts/query.sh index 73650400..4dc9429d 100755 --- a/advanced/Scripts/query.sh +++ b/advanced/Scripts/query.sh @@ -35,11 +35,15 @@ scanList(){ # /dev/null forces filename to be printed when only one list has been generated case "${type}" in "exact" ) grep -i -E -l "(^|(?/dev/null;; - # Create array of regexps # Iterate through each regexp and check whether it matches the domainQuery # If it does, print the matching regexp and continue looping # Input 1 - regexps | Input 2 - domainQuery - "regex" ) if [[ "${domain}" =~ ${lists} ]]; then printf "%b\n" "${lists}"; fi;; + "regex" ) + for list in `echo "${lists}"`; do + if [[ "${domain}" =~ ${list} ]]; then + printf "%b\n" "${list}"; + fi + done;; * ) grep -i "${esc_domain}" ${lists} /dev/null 2>/dev/null;; esac } From bf4fada3b7188630ecb4656b123a94110998cacd Mon Sep 17 00:00:00 2001 From: Dan Schaper Date: Mon, 2 Mar 2020 09:52:06 -0800 Subject: [PATCH 44/60] Don't quote inside backticks, use unquoted variable. Signed-off-by: Dan Schaper --- advanced/Scripts/query.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/advanced/Scripts/query.sh b/advanced/Scripts/query.sh index 4dc9429d..7518e6c4 100755 --- a/advanced/Scripts/query.sh +++ b/advanced/Scripts/query.sh @@ -39,7 +39,7 @@ scanList(){ # If it does, print the matching regexp and continue looping # Input 1 - regexps | Input 2 - domainQuery "regex" ) - for list in `echo "${lists}"`; do + for list in ${lists}; do if [[ "${domain}" =~ ${list} ]]; then printf "%b\n" "${list}"; fi From 4a711340ef203221e18e31eb05c16aa89512eb60 Mon Sep 17 00:00:00 2001 From: jnozsc Date: Mon, 2 Mar 2020 23:30:44 -0800 Subject: [PATCH 45/60] use py3 instead py2 (#3153) * use py3 instead py2 Signed-off-by: jnozsc * use python 3.6 Signed-off-by: jnozsc --- .travis.yml | 2 +- test/conftest.py | 14 ++++++-------- test/test_000_build_containers.py | 4 ++-- test/test_automated_install.py | 10 +++++----- test/test_centos_fedora_support.py | 3 +-- test/test_shellcheck.py | 2 +- tox.ini | 2 +- 7 files changed, 17 insertions(+), 20 deletions(-) diff --git a/.travis.yml b/.travis.yml index fa525e01..274c28cb 100644 --- a/.travis.yml +++ b/.travis.yml @@ -3,7 +3,7 @@ services: - docker language: python python: - - "2.7" + - "3.6" install: - pip install -r requirements.txt diff --git a/test/conftest.py b/test/conftest.py index 58530d38..5b8be41e 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -14,9 +14,9 @@ SETUPVARS = { 'PIHOLE_DNS_2': '4.2.2.2' } -tick_box = "[\x1b[1;32m\xe2\x9c\x93\x1b[0m]".decode("utf-8") -cross_box = "[\x1b[1;31m\xe2\x9c\x97\x1b[0m]".decode("utf-8") -info_box = "[i]".decode("utf-8") +tick_box = "[\x1b[1;32m\u2713\x1b[0m]" +cross_box = "[\x1b[1;31m\u2717\x1b[0m]" +info_box = "[i]" @pytest.fixture @@ -38,9 +38,7 @@ def Pihole(Docker): return out funcType = type(Docker.run) - Docker.run = funcType(run_bash, - Docker, - testinfra.backend.docker.DockerBackend) + Docker.run = funcType(run_bash, Docker) return Docker @@ -106,7 +104,7 @@ def mock_command(script, args, container): #!/bin/bash -e echo "\$0 \$@" >> /var/log/{script} case "\$1" in'''.format(script=script)) - for k, v in args.iteritems(): + for k, v in args.items(): case = dedent(''' {arg}) echo {res} @@ -133,7 +131,7 @@ def mock_command_2(script, args, container): #!/bin/bash -e echo "\$0 \$@" >> /var/log/{script} case "\$1 \$2" in'''.format(script=script)) - for k, v in args.iteritems(): + for k, v in args.items(): case = dedent(''' \"{arg}\") echo \"{res}\" diff --git a/test/test_000_build_containers.py b/test/test_000_build_containers.py index e9e9e7db..bca67989 100644 --- a/test/test_000_build_containers.py +++ b/test/test_000_build_containers.py @@ -18,6 +18,6 @@ run_local = testinfra.get_backend( def test_build_pihole_image(image, tag): build_cmd = run_local('docker build -f {} -t {} .'.format(image, tag)) if build_cmd.rc != 0: - print build_cmd.stdout - print build_cmd.stderr + print(build_cmd.stdout) + print(build_cmd.stderr) assert build_cmd.rc == 0 diff --git a/test/test_automated_install.py b/test/test_automated_install.py index 567ea241..4e9a7eef 100644 --- a/test/test_automated_install.py +++ b/test/test_automated_install.py @@ -1,6 +1,6 @@ from textwrap import dedent import re -from conftest import ( +from .conftest import ( SETUPVARS, tick_box, info_box, @@ -34,7 +34,7 @@ def test_setupVars_are_sourced_to_global_scope(Pihole): This confirms the sourced variables are in scope between functions ''' setup_var_file = 'cat < /etc/pihole/setupVars.conf\n' - for k, v in SETUPVARS.iteritems(): + for k, v in SETUPVARS.items(): setup_var_file += "{}={}\n".format(k, v) setup_var_file += "EOF\n" Pihole.run(setup_var_file) @@ -59,7 +59,7 @@ def test_setupVars_are_sourced_to_global_scope(Pihole): output = run_script(Pihole, script).stdout - for k, v in SETUPVARS.iteritems(): + for k, v in SETUPVARS.items(): assert "{}={}".format(k, v) in output @@ -69,7 +69,7 @@ def test_setupVars_saved_to_file(Pihole): ''' # dedent works better with this and padding matching script below set_setup_vars = '\n' - for k, v in SETUPVARS.iteritems(): + for k, v in SETUPVARS.items(): set_setup_vars += " {}={}\n".format(k, v) Pihole.run(set_setup_vars).stdout @@ -88,7 +88,7 @@ def test_setupVars_saved_to_file(Pihole): output = run_script(Pihole, script).stdout - for k, v in SETUPVARS.iteritems(): + for k, v in SETUPVARS.items(): assert "{}={}".format(k, v) in output diff --git a/test/test_centos_fedora_support.py b/test/test_centos_fedora_support.py index aee16212..4b405920 100644 --- a/test/test_centos_fedora_support.py +++ b/test/test_centos_fedora_support.py @@ -1,10 +1,9 @@ import pytest -from conftest import ( +from .conftest import ( tick_box, info_box, cross_box, mock_command, - mock_command_2, ) diff --git a/test/test_shellcheck.py b/test/test_shellcheck.py index 43e8ad6f..919ad2b9 100644 --- a/test/test_shellcheck.py +++ b/test/test_shellcheck.py @@ -14,5 +14,5 @@ def test_scripts_pass_shellcheck(): "shellcheck -x \"$file\" -e SC1090,SC1091; " "done;") results = run_local(shellcheck) - print results.stdout + print(results.stdout) assert '' == results.stdout diff --git a/tox.ini b/tox.ini index e7916e04..249575b6 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py27 +envlist = py36 [testenv] whitelist_externals = docker From 22ce5c0d70e48f8e806dd79758359ccb623efb20 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Mon, 9 Mar 2020 00:32:37 +0100 Subject: [PATCH 46/60] Fix incorrect type description. (#3201) Signed-off-by: DL6ER --- advanced/Scripts/piholeDebug.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index 28d34ab6..304dc666 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -1116,7 +1116,7 @@ show_adlists() { } show_domainlist() { - show_db_entries "Domainlist (0/1 = exact/regex whitelist, 2/3 = exact/regex blacklist)" "SELECT id,type,domain,enabled,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,comment FROM domainlist" "4 4 100 7 19 19 50" + show_db_entries "Domainlist (0/1 = exact white-/blacklist, 2/3 = regex white-/blacklist)" "SELECT id,type,domain,enabled,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,comment FROM domainlist" "4 4 100 7 19 19 50" show_db_entries "Domainlist groups" "SELECT * FROM domainlist_by_group" "10 10" } From 497bfd80a5419e9057305dbc4f0ed281a917756c Mon Sep 17 00:00:00 2001 From: DL6ER Date: Mon, 9 Mar 2020 00:38:53 +0100 Subject: [PATCH 47/60] Update development from release/v5.0 (#3200) * Use bash regex instead of awk. Signed-off-by: Dan Schaper * Fix incorrect type description. (#3201) Signed-off-by: DL6ER Co-authored-by: Dan Schaper --- advanced/Scripts/piholeDebug.sh | 2 +- advanced/Scripts/query.sh | 10 ++++++---- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index 28d34ab6..304dc666 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -1116,7 +1116,7 @@ show_adlists() { } show_domainlist() { - show_db_entries "Domainlist (0/1 = exact/regex whitelist, 2/3 = exact/regex blacklist)" "SELECT id,type,domain,enabled,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,comment FROM domainlist" "4 4 100 7 19 19 50" + show_db_entries "Domainlist (0/1 = exact white-/blacklist, 2/3 = regex white-/blacklist)" "SELECT id,type,domain,enabled,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,comment FROM domainlist" "4 4 100 7 19 19 50" show_db_entries "Domainlist groups" "SELECT * FROM domainlist_by_group" "10 10" } diff --git a/advanced/Scripts/query.sh b/advanced/Scripts/query.sh index a96129e0..7518e6c4 100755 --- a/advanced/Scripts/query.sh +++ b/advanced/Scripts/query.sh @@ -33,15 +33,17 @@ scanList(){ export LC_CTYPE=C # /dev/null forces filename to be printed when only one list has been generated - # shellcheck disable=SC2086 case "${type}" in "exact" ) grep -i -E -l "(^|(?/dev/null;; - # Create array of regexps # Iterate through each regexp and check whether it matches the domainQuery # If it does, print the matching regexp and continue looping # Input 1 - regexps | Input 2 - domainQuery - "regex" ) awk 'NR==FNR{regexps[$0];next}{for (r in regexps)if($0 ~ r)print r}' \ - <(echo "${lists}") <(echo "${domain}") 2>/dev/null;; + "regex" ) + for list in ${lists}; do + if [[ "${domain}" =~ ${list} ]]; then + printf "%b\n" "${list}"; + fi + done;; * ) grep -i "${esc_domain}" ${lists} /dev/null 2>/dev/null;; esac } From 7b8611ced064bb5f697a633f01e58828eb6e2e46 Mon Sep 17 00:00:00 2001 From: XhmikosR Date: Mon, 9 Mar 2020 01:53:14 +0200 Subject: [PATCH 48/60] Assorted typo fixes. (#3126) * Assorted typo fixes. Signed-off-by: XhmikosR Co-authored-by: Dan Schaper --- .github/ISSUE_TEMPLATE.md | 4 ++-- .gitignore | 2 +- advanced/Scripts/COL_TABLE | 4 ++-- advanced/Scripts/chronometer.sh | 10 +++++----- advanced/Scripts/piholeCheckout.sh | 4 ++-- advanced/Scripts/piholeDebug.sh | 20 ++++++++++---------- advanced/Scripts/setupLCD.sh | 2 +- advanced/blockingpage.css | 4 ++-- advanced/dnsmasq.conf.original | 12 ++++++------ advanced/index.php | 6 +++--- automated install/basic-install.sh | 12 ++++++------ automated install/uninstall.sh | 6 +++--- gravity.sh | 6 +++--- pihole | 6 +++--- test/README.md | 4 ++-- test/test_automated_install.py | 8 ++++---- 16 files changed, 55 insertions(+), 55 deletions(-) diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md index 4a9c585a..bef9f73c 100644 --- a/.github/ISSUE_TEMPLATE.md +++ b/.github/ISSUE_TEMPLATE.md @@ -9,11 +9,11 @@ `{Replace this with a number from 1 to 10. 1 being not familiar, and 10 being very familiar}` --- -**Expected behaviour:** +**Expected behavior:** `{A detailed description of what you expect to see}` -**Actual behaviour:** +**Actual behavior:** `{A detailed description and/or screenshots of what you do see}` diff --git a/.gitignore b/.gitignore index 1e80dfb8..b7ad1e41 100644 --- a/.gitignore +++ b/.gitignore @@ -15,7 +15,7 @@ __pycache__ # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 -# All idea files, with execptions +# All idea files, with exceptions .idea !.idea/codeStyles/* !.idea/codeStyleSettings.xml diff --git a/advanced/Scripts/COL_TABLE b/advanced/Scripts/COL_TABLE index 57aab4dd..d76be68c 100644 --- a/advanced/Scripts/COL_TABLE +++ b/advanced/Scripts/COL_TABLE @@ -1,7 +1,7 @@ -# Determine if terminal is capable of showing colours +# Determine if terminal is capable of showing colors if [[ -t 1 ]] && [[ $(tput colors) -ge 8 ]]; then # Bold and underline may not show up on all clients - # If something MUST be emphasised, use both + # If something MUST be emphasized, use both COL_BOLD='' COL_ULINE='' diff --git a/advanced/Scripts/chronometer.sh b/advanced/Scripts/chronometer.sh index 1a4ce993..757df9be 100755 --- a/advanced/Scripts/chronometer.sh +++ b/advanced/Scripts/chronometer.sh @@ -72,7 +72,7 @@ printFunc() { # Remove excess characters from main text if [[ "$text_main_len" -gt "$text_main_max_len" ]]; then - # Trim text without colours + # Trim text without colors text_main_trim="${text_main_nocol:0:$text_main_max_len}" # Replace with trimmed text text_main="${text_main/$text_main_nocol/$text_main_trim}" @@ -88,7 +88,7 @@ printFunc() { [[ "$spc_num" -le 0 ]] && spc_num="0" spc=$(printf "%${spc_num}s") - #spc="${spc// /.}" # Debug: Visualise spaces + #spc="${spc// /.}" # Debug: Visualize spaces printf "%s%s$spc" "$title" "$text_main" @@ -131,7 +131,7 @@ get_init_stats() { printf "%s%02d:%02d:%02d\\n" "$days" "$hrs" "$mins" "$secs" } - # Set Colour Codes + # Set Color Codes coltable="/opt/pihole/COL_TABLE" if [[ -f "${coltable}" ]]; then source ${coltable} @@ -269,7 +269,7 @@ get_sys_stats() { scr_lines="${scr_size[0]}" scr_cols="${scr_size[1]}" - # Determine Chronometer size behaviour + # Determine Chronometer size behavior if [[ "$scr_cols" -ge 58 ]]; then chrono_width="large" elif [[ "$scr_cols" -gt 40 ]]; then @@ -308,7 +308,7 @@ get_sys_stats() { [[ "${cpu_freq}" == *".0"* ]] && cpu_freq="${cpu_freq/.0/}" fi - # Determine colour for temperature + # Determine color for temperature if [[ -n "$temp_file" ]]; then if [[ "$temp_unit" == "C" ]]; then cpu_temp=$(printf "%.0fc\\n" "$(calcFunc "$(< $temp_file) / 1000")") diff --git a/advanced/Scripts/piholeCheckout.sh b/advanced/Scripts/piholeCheckout.sh index 31009dd9..042a3c02 100644 --- a/advanced/Scripts/piholeCheckout.sh +++ b/advanced/Scripts/piholeCheckout.sh @@ -36,7 +36,7 @@ warning1() { return 0 ;; *) - echo -e "\\n ${INFO} Branch change has been cancelled" + echo -e "\\n ${INFO} Branch change has been canceled" return 1 ;; esac @@ -84,7 +84,7 @@ checkout() { echo -e " ${INFO} Shortcut \"dev\" detected - checking out development / devel branches..." echo "" echo -e " ${INFO} Pi-hole Core" - fetch_checkout_pull_branch "${PI_HOLE_FILES_DIR}" "development" || { echo " ${CROSS} Unable to pull Core developement branch"; exit 1; } + fetch_checkout_pull_branch "${PI_HOLE_FILES_DIR}" "development" || { echo " ${CROSS} Unable to pull Core development branch"; exit 1; } if [[ "${INSTALL_WEB_INTERFACE}" == "true" ]]; then echo "" echo -e " ${INFO} Web interface" diff --git a/advanced/Scripts/piholeDebug.sh b/advanced/Scripts/piholeDebug.sh index 304dc666..4e137f8d 100755 --- a/advanced/Scripts/piholeDebug.sh +++ b/advanced/Scripts/piholeDebug.sh @@ -138,7 +138,7 @@ PIHOLE_FTL_LOG="$(get_ftl_conf_value "LOGFILE" "${LOG_DIRECTORY}/pihole-FTL.log" PIHOLE_WEB_SERVER_ACCESS_LOG_FILE="${WEB_SERVER_LOG_DIRECTORY}/access.log" PIHOLE_WEB_SERVER_ERROR_LOG_FILE="${WEB_SERVER_LOG_DIRECTORY}/error.log" -# An array of operating system "pretty names" that we officialy support +# An array of operating system "pretty names" that we officially support # We can loop through the array at any time to see if it matches a value #SUPPORTED_OS=("Raspbian" "Ubuntu" "Fedora" "Debian" "CentOS") @@ -300,7 +300,7 @@ compare_local_version_to_git_version() { if [[ "${remote_branch}" == "master" ]]; then # so the color of the text is green log_write "${INFO} Branch: ${COL_GREEN}${remote_branch}${COL_NC}" - # If it is any other branch, they are in a developement branch + # If it is any other branch, they are in a development branch else # So show that in yellow, signifying it's something to take a look at, but not a critical error log_write "${INFO} Branch: ${COL_YELLOW}${remote_branch:-Detached}${COL_NC} (${FAQ_CHECKOUT_COMMAND})" @@ -357,7 +357,7 @@ check_component_versions() { get_program_version() { local program_name="${1}" - # Create a loval variable so this function can be safely reused + # Create a local variable so this function can be safely reused local program_version echo_current_diagnostic "${program_name} version" # Evalutate the program we are checking, if it is any of the ones below, show the version @@ -747,7 +747,7 @@ check_x_headers() { # Do it for the dashboard as well, as the header is different than above local dashboard dashboard=$(curl -Is localhost/admin/ | awk '/X-Pi-hole/' | tr -d '\r') - # Store what the X-Header shoud be in variables for comparision later + # Store what the X-Header shoud be in variables for comparison later local block_page_working block_page_working="X-Pi-hole: A black hole for Internet advertisements." local dashboard_working @@ -818,7 +818,7 @@ dig_at() { # First, do a dig on localhost to see if Pi-hole can use itself to block a domain if local_dig=$(dig +tries=1 +time=2 -"${protocol}" "${random_url}" @${local_address} +short "${record_type}"); then - # If it can, show sucess + # If it can, show success log_write "${TICK} ${random_url} ${COL_GREEN}is ${local_dig}${COL_NC} via ${COL_CYAN}localhost$COL_NC (${local_address})" else # Otherwise, show a failure @@ -969,7 +969,7 @@ check_name_resolution() { # This function can check a directory exists # Pi-hole has files in several places, so we will reuse this function dir_check() { - # Set the first argument passed to tihs function as a named variable for better readability + # Set the first argument passed to this function as a named variable for better readability local directory="${1}" # Display the current test that is running echo_current_diagnostic "contents of ${COL_CYAN}${directory}${COL_NC}" @@ -987,14 +987,14 @@ dir_check() { } list_files_in_dir() { - # Set the first argument passed to tihs function as a named variable for better readability + # Set the first argument passed to this function as a named variable for better readability local dir_to_parse="${1}" # Store the files found in an array mapfile -t files_found < <(ls "${dir_to_parse}") # For each file in the array, for each_file in "${files_found[@]}"; do if [[ -d "${dir_to_parse}/${each_file}" ]]; then - # If it's a directoy, do nothing + # If it's a directory, do nothing : elif [[ "${dir_to_parse}/${each_file}" == "${PIHOLE_DEBUG_LOG}" ]] || \ [[ "${dir_to_parse}/${each_file}" == "${PIHOLE_RAW_BLOCKLIST_FILES}" ]] || \ @@ -1190,7 +1190,7 @@ analyze_pihole_log() { # So first check if there are domains in the log that should be obfuscated if [[ -n ${line_to_obfuscate} ]]; then # If there are, we need to use awk to replace only the domain name (the 6th field in the log) - # so we substitue the domain for the placeholder value + # so we substitute the domain for the placeholder value obfuscated_line=$(echo "${line_to_obfuscate}" | awk -v placeholder="${OBFUSCATED_PLACEHOLDER}" '{sub($6,placeholder); print $0}') log_write " ${obfuscated_line}" else @@ -1238,7 +1238,7 @@ upload_to_tricorder() { log_write " * The debug log can be uploaded to tricorder.pi-hole.net for sharing with developers only." log_write " * For more information, see: ${TRICORDER_CONTEST}" log_write " * If available, we'll use openssl to upload the log, otherwise it will fall back to netcat." - # If pihole -d is running automatically (usually throught the dashboard) + # If pihole -d is running automatically (usually through the dashboard) if [[ "${AUTOMATED}" ]]; then # let the user know log_write "${INFO} Debug script running in automated mode" diff --git a/advanced/Scripts/setupLCD.sh b/advanced/Scripts/setupLCD.sh index 00eb963f..e8f14f06 100755 --- a/advanced/Scripts/setupLCD.sh +++ b/advanced/Scripts/setupLCD.sh @@ -20,7 +20,7 @@ getInitSys() { elif [ -f /etc/init.d/cron ] && [ ! -h /etc/init.d/cron ]; then SYSTEMD=0 else - echo "Unrecognised init system" + echo "Unrecognized init system" return 1 fi } diff --git a/advanced/blockingpage.css b/advanced/blockingpage.css index e74844d1..5fd858fb 100644 --- a/advanced/blockingpage.css +++ b/advanced/blockingpage.css @@ -14,7 +14,7 @@ #bpOutput.add:before { content: "Info"; } #bpOutput.add:after { content: "The domain is being whitelisted..."; } #bpOutput.error:before, .unhandled:before { content: "Error"; } -#bpOutput.unhandled:after { content: "An unhandled exception occured. This may happen when your browser is unable to load jQuery, or when the webserver is denying access to the Pi-hole API."; } +#bpOutput.unhandled:after { content: "An unhandled exception occurred. This may happen when your browser is unable to load jQuery, or when the webserver is denying access to the Pi-hole API."; } #bpOutput.success:before { content: "Success"; } #bpOutput.success:after { content: "Website has been whitelisted! You may need to flush your DNS cache"; } @@ -325,7 +325,7 @@ main { box-shadow: inset 0 3px 5px rgba(0,0,0,0.125); } -/* Input border colour */ +/* Input border color */ .buttons *:not([disabled]):hover, .buttons input:focus { border-color: rgba(0,0,0,0.25); } diff --git a/advanced/dnsmasq.conf.original b/advanced/dnsmasq.conf.original index 9e4cc92e..6758f0b8 100644 --- a/advanced/dnsmasq.conf.original +++ b/advanced/dnsmasq.conf.original @@ -46,7 +46,7 @@ #resolv-file= # By default, dnsmasq will send queries to any of the upstream -# servers it knows about and tries to favour servers to are known +# servers it knows about and tries to favor servers to are known # to be up. Uncommenting this forces dnsmasq to try each query # with each server strictly in the order they appear in # /etc/resolv.conf @@ -189,7 +189,7 @@ # add names to the DNS for the IPv6 address of SLAAC-configured dual-stack # hosts. Use the DHCPv4 lease to derive the name, network segment and # MAC address and assume that the host will also have an -# IPv6 address calculated using the SLAAC alogrithm. +# IPv6 address calculated using the SLAAC algorithm. #dhcp-range=1234::, ra-names # Do Router Advertisements, BUT NOT DHCP for this subnet. @@ -210,7 +210,7 @@ #dhcp-range=1234::, ra-stateless, ra-names # Do router advertisements for all subnets where we're doing DHCPv6 -# Unless overriden by ra-stateless, ra-names, et al, the router +# Unless overridden by ra-stateless, ra-names, et al, the router # advertisements will have the M and O bits set, so that the clients # get addresses and configuration from DHCPv6, and the A bit reset, so the # clients don't use SLAAC addresses. @@ -281,7 +281,7 @@ # Give a fixed IPv6 address and name to client with # DUID 00:01:00:01:16:d2:83:fc:92:d4:19:e2:d8:b2 # Note the MAC addresses CANNOT be used to identify DHCPv6 clients. -# Note also the they [] around the IPv6 address are obilgatory. +# Note also the they [] around the IPv6 address are obligatory. #dhcp-host=id:00:01:00:01:16:d2:83:fc:92:d4:19:e2:d8:b2, fred, [1234::5] # Ignore any clients which are not specified in dhcp-host lines @@ -404,14 +404,14 @@ #dhcp-option=vendor:MSFT,2,1i # Send the Encapsulated-vendor-class ID needed by some configurations of -# Etherboot to allow is to recognise the DHCP server. +# Etherboot to allow is to recognize the DHCP server. #dhcp-option=vendor:Etherboot,60,"Etherboot" # Send options to PXELinux. Note that we need to send the options even # though they don't appear in the parameter request list, so we need # to use dhcp-option-force here. # See http://syslinux.zytor.com/pxe.php#special for details. -# Magic number - needed before anything else is recognised +# Magic number - needed before anything else is recognized #dhcp-option-force=208,f1:00:74:7e # Configuration file name #dhcp-option-force=209,configs/common diff --git a/advanced/index.php b/advanced/index.php index b0c4a7c3..3b1de8d8 100644 --- a/advanced/index.php +++ b/advanced/index.php @@ -6,7 +6,7 @@ * This file is copyright under the latest version of the EUPL. * Please see LICENSE file for your rights under this license. */ -// Sanitise HTTP_HOST output +// Sanitize HTTP_HOST output $serverName = htmlspecialchars($_SERVER["HTTP_HOST"]); // Remove external ipv6 brackets if any $serverName = preg_replace('/^\[(.*)\]$/', '${1}', $serverName); @@ -68,7 +68,7 @@ if ($serverName === "pi.hole") { // Unset variables so as to not be included in $landPage unset($serverName, $svPasswd, $svEmail, $authorizedHosts, $validExtTypes, $currentUrlExt, $viewPort); - // Render splash/landing page when directly browsing via IP or authorised hostname + // Render splash/landing page when directly browsing via IP or authorized hostname exit($renderPage); } elseif ($currentUrlExt === "js") { // Serve Pi-hole Javascript for blocked domains requesting JS @@ -209,7 +209,7 @@ $phVersion = exec("cd /etc/.pihole/ && git describe --long --tags"); if (explode("-", $phVersion)[1] != "0") $execTime = microtime(true)-$_SERVER["REQUEST_TIME_FLOAT"]; -// Please Note: Text is added via CSS to allow an admin to provide a localised +// Please Note: Text is added via CSS to allow an admin to provide a localized // language without the need to edit this file setHeader(); diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 5aaa4a75..14c68250 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -430,8 +430,8 @@ make_repo() { chmod -R a+rX "${directory}" # Move into the directory that was passed as an argument pushd "${directory}" &> /dev/null || return 1 - # Check current branch. If it is master, then reset to the latest availible tag. - # In case extra commits have been added after tagging/release (i.e in case of metadata updates/README.MD tweaks) + # Check current branch. If it is master, then reset to the latest available tag. + # In case extra commits have been added after tagging/release (i.e in case of metadata updates/README.MD tweaks) curBranch=$(git rev-parse --abbrev-ref HEAD) if [[ "${curBranch}" == "master" ]]; then #If we're calling make_repo() then it should always be master, we may not need to check. git reset --hard "$(git describe --abbrev=0 --tags)" || return $? @@ -466,8 +466,8 @@ update_repo() { git clean --quiet --force -d || true # Okay for already clean directory # Pull the latest commits git pull --quiet &> /dev/null || return $? - # Check current branch. If it is master, then reset to the latest availible tag. - # In case extra commits have been added after tagging/release (i.e in case of metadata updates/README.MD tweaks) + # Check current branch. If it is master, then reset to the latest available tag. + # In case extra commits have been added after tagging/release (i.e in case of metadata updates/README.MD tweaks) curBranch=$(git rev-parse --abbrev-ref HEAD) if [[ "${curBranch}" == "master" ]]; then git reset --hard "$(git describe --abbrev=0 --tags)" || return $? @@ -819,13 +819,13 @@ It is also possible to use a DHCP reservation, but if you are going to do that, # Ask for the IPv4 address IPV4_ADDRESS=$(whiptail --backtitle "Calibrating network interface" --title "IPv4 address" --inputbox "Enter your desired IPv4 address" "${r}" "${c}" "${IPV4_ADDRESS}" 3>&1 1>&2 2>&3) || \ - # Cancelling IPv4 settings window + # Canceling IPv4 settings window { ipSettingsCorrect=False; echo -e " ${COL_LIGHT_RED}Cancel was selected, exiting installer${COL_NC}"; exit 1; } printf " %b Your static IPv4 address: %s\\n" "${INFO}" "${IPV4_ADDRESS}" # Ask for the gateway IPv4gw=$(whiptail --backtitle "Calibrating network interface" --title "IPv4 gateway (router)" --inputbox "Enter your desired IPv4 default gateway" "${r}" "${c}" "${IPv4gw}" 3>&1 1>&2 2>&3) || \ - # Cancelling gateway settings window + # Canceling gateway settings window { ipSettingsCorrect=False; echo -e " ${COL_LIGHT_RED}Cancel was selected, exiting installer${COL_NC}"; exit 1; } printf " %b Your static IPv4 gateway: %s\\n" "${INFO}" "${IPv4gw}" diff --git a/automated install/uninstall.sh b/automated install/uninstall.sh index 2d6837b4..01ce9c39 100755 --- a/automated install/uninstall.sh +++ b/automated install/uninstall.sh @@ -14,8 +14,8 @@ while true; do read -rp " ${QST} Are you sure you would like to remove ${COL_WHITE}Pi-hole${COL_NC}? [y/N] " yn case ${yn} in [Yy]* ) break;; - [Nn]* ) echo -e "${OVER} ${COL_LIGHT_GREEN}Uninstall has been cancelled${COL_NC}"; exit 0;; - * ) echo -e "${OVER} ${COL_LIGHT_GREEN}Uninstall has been cancelled${COL_NC}"; exit 0;; + [Nn]* ) echo -e "${OVER} ${COL_LIGHT_GREEN}Uninstall has been canceled${COL_NC}"; exit 0;; + * ) echo -e "${OVER} ${COL_LIGHT_GREEN}Uninstall has been canceled${COL_NC}"; exit 0;; esac done @@ -52,7 +52,7 @@ if [[ "${INSTALL_WEB_SERVER}" == true ]]; then DEPS+=("${PIHOLE_WEB_DEPS[@]}") fi -# Compatability +# Compatibility if [ -x "$(command -v apt-get)" ]; then # Debian Family PKG_REMOVE=("${PKG_MANAGER}" -y remove --purge) diff --git a/gravity.sh b/gravity.sh index c421e832..78b5ef98 100755 --- a/gravity.sh +++ b/gravity.sh @@ -271,7 +271,7 @@ gravity_CheckDNSResolutionAvailable() { fi # If the /etc/resolv.conf contains resolvers other than 127.0.0.1 then the local dnsmasq will not be queried and pi.hole is NXDOMAIN. - # This means that even though name resolution is working, the getent hosts check fails and the holddown timer keeps ticking and eventualy fails + # This means that even though name resolution is working, the getent hosts check fails and the holddown timer keeps ticking and eventually fails # So we check the output of the last command and if it failed, attempt to use dig +short as a fallback if timeout 4 dig +short "${lookupDomain}" &> /dev/null; then if [[ -n "${secs:-}" ]]; then @@ -561,7 +561,7 @@ gravity_ParseFileIntoDomains() { # Determine if we are parsing a consolidated list #if [[ "${source}" == "${piholeDir}/${matterAndLight}" ]]; then # Remove comments and print only the domain name - # Most of the lists downloaded are already in hosts file format but the spacing/formating is not contigious + # Most of the lists downloaded are already in hosts file format but the spacing/formating is not contiguous # This helps with that and makes it easier to read # It also helps with debugging so each stage of the script can be researched more in depth # 1) Remove carriage returns @@ -742,7 +742,7 @@ gravity_Cleanup() { dnsWasOffline=true fi - # Print Pi-hole status if an error occured + # Print Pi-hole status if an error occurred if [[ -n "${error}" ]]; then "${PIHOLE_COMMAND}" status exit 1 diff --git a/pihole b/pihole index 6e72b4a3..9624105a 100755 --- a/pihole +++ b/pihole @@ -302,9 +302,9 @@ tailFunc() { source /etc/pihole/setupVars.conf # Strip date from each line - # Colour blocklist/blacklist/wildcard entries as red - # Colour A/AAAA/DHCP strings as white - # Colour everything else as gray + # Color blocklist/blacklist/wildcard entries as red + # Color A/AAAA/DHCP strings as white + # Color everything else as gray tail -f /var/log/pihole.log | sed -E \ -e "s,($(date +'%b %d ')| dnsmasq\[[0-9]*\]),,g" \ -e "s,(.*(blacklisted |gravity blocked ).* is (0.0.0.0|::|NXDOMAIN|${IPV4_ADDRESS%/*}|${IPV6_ADDRESS:-NULL}).*),${COL_RED}&${COL_NC}," \ diff --git a/test/README.md b/test/README.md index f5a9b5e8..b4dd1122 100644 --- a/test/README.md +++ b/test/README.md @@ -7,11 +7,11 @@ From command line all you need to do is: - `pip install tox` - `tox` -Tox handles setting up a virtual environment for python dependancies, installing dependancies, building the docker images used by tests, and finally running tests. It's an easy way to have travis-ci like build behavior locally. +Tox handles setting up a virtual environment for python dependencies, installing dependencies, building the docker images used by tests, and finally running tests. It's an easy way to have travis-ci like build behavior locally. ## Alternative py.test method of running tests -You're responsible for setting up your virtual env and dependancies in this situation. +You're responsible for setting up your virtual env and dependencies in this situation. ``` py.test -vv -n auto -m "build_stage" diff --git a/test/test_automated_install.py b/test/test_automated_install.py index 4e9a7eef..c0bd1ebe 100644 --- a/test/test_automated_install.py +++ b/test/test_automated_install.py @@ -195,12 +195,12 @@ def test_configureFirewall_IPTables_enabled_rules_exist_no_errors(Pihole): expected_stdout = 'Installing new IPTables firewall rulesets' assert expected_stdout in configureFirewall.stdout firewall_calls = Pihole.run('cat /var/log/iptables').stdout - # General call type occurances + # General call type occurrences assert len(re.findall(r'iptables -S', firewall_calls)) == 1 assert len(re.findall(r'iptables -C', firewall_calls)) == 4 assert len(re.findall(r'iptables -I', firewall_calls)) == 0 - # Specific port call occurances + # Specific port call occurrences assert len(re.findall(r'tcp --dport 80', firewall_calls)) == 1 assert len(re.findall(r'tcp --dport 53', firewall_calls)) == 1 assert len(re.findall(r'udp --dport 53', firewall_calls)) == 1 @@ -242,12 +242,12 @@ def test_configureFirewall_IPTables_enabled_not_exist_no_errors(Pihole): expected_stdout = 'Installing new IPTables firewall rulesets' assert expected_stdout in configureFirewall.stdout firewall_calls = Pihole.run('cat /var/log/iptables').stdout - # General call type occurances + # General call type occurrences assert len(re.findall(r'iptables -S', firewall_calls)) == 1 assert len(re.findall(r'iptables -C', firewall_calls)) == 4 assert len(re.findall(r'iptables -I', firewall_calls)) == 4 - # Specific port call occurances + # Specific port call occurrences assert len(re.findall(r'tcp --dport 80', firewall_calls)) == 2 assert len(re.findall(r'tcp --dport 53', firewall_calls)) == 2 assert len(re.findall(r'udp --dport 53', firewall_calls)) == 2 From bb936f4fdb3e5631193a8f236b71f1b6d75f5743 Mon Sep 17 00:00:00 2001 From: MichaIng Date: Wed, 11 Mar 2020 11:01:28 +0100 Subject: [PATCH 49/60] Reduce apt-get install verbosity The new version of the installer moved from debconf-apt-progress to raw apt-get output on installs to solve issues with interactive config file choices. This lead to a largely increases amount of output lines of the installer. To reduce the apt-get output to a minimum, while sustaining interactive input in case of config files, the "-qq" option can be used, which inherits "--yes": - https://manpages.debian.org/buster/apt/apt-get.8.en.html#OPTIONS - https://manpages.ubuntu.com/manpages/bionic/man8/apt-get.8.html#options Signed-off-by: MichaIng --- automated install/basic-install.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 14c68250..070fc3b7 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -184,7 +184,7 @@ if is_command apt-get ; then # A variable to store the command used to update the package cache UPDATE_PKG_CACHE="${PKG_MANAGER} update" # An array for something... - PKG_INSTALL=("${PKG_MANAGER}" --yes --no-install-recommends install) + PKG_INSTALL=("${PKG_MANAGER}" -qq --no-install-recommends install) # grep -c will return 1 retVal on 0 matches, block this throwing the set -e with an OR TRUE PKG_COUNT="${PKG_MANAGER} -s -o Debug::NoLocking=true upgrade | grep -c ^Inst || true" # Some distros vary slightly so these fixes for dependencies may apply From dbc54b3063e6bfff302fdd95269c67ae03085e41 Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Wed, 11 Mar 2020 18:47:59 +0000 Subject: [PATCH 50/60] remove resolvconf dep Signed-off-by: Adam Warner --- automated install/basic-install.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 65c72b40..0d05db1a 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -244,7 +244,7 @@ if is_command apt-get ; then # These programs are stored in an array so they can be looped through later INSTALLER_DEPS=(dhcpcd5 git "${iproute_pkg}" whiptail) # Pi-hole itself has several dependencies that also need to be installed - PIHOLE_DEPS=(cron curl dnsutils iputils-ping lsof netcat psmisc sudo unzip wget idn2 sqlite3 libcap2-bin dns-root-data resolvconf libcap2) + PIHOLE_DEPS=(cron curl dnsutils iputils-ping lsof netcat psmisc sudo unzip wget idn2 sqlite3 libcap2-bin dns-root-data libcap2) # The Web dashboard has some that also need to be installed # It's useful to separate the two since our repos are also setup as "Core" code and "Web" code PIHOLE_WEB_DEPS=(lighttpd "${phpVer}-common" "${phpVer}-cgi" "${phpVer}-${phpSqlite}" "${phpVer}-xml" "php-intl") From 1481cc583fe6425a9be74720f1c45a8bfc389ab5 Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Wed, 11 Mar 2020 18:48:40 +0000 Subject: [PATCH 51/60] Don't set nameserver in dhcpcd.conf Signed-off-by: Adam Warner --- automated install/basic-install.sh | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 0d05db1a..b896eb35 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -854,8 +854,7 @@ setDHCPCD() { # we can append these lines to dhcpcd.conf to enable a static IP echo "interface ${PIHOLE_INTERFACE} static ip_address=${IPV4_ADDRESS} - static routers=${IPv4gw} - static domain_name_servers=127.0.0.1" | tee -a /etc/dhcpcd.conf >/dev/null + static routers=${IPv4gw}" | tee -a /etc/dhcpcd.conf >/dev/null # Then use the ip command to immediately set the new address ip addr replace dev "${PIHOLE_INTERFACE}" "${IPV4_ADDRESS}" # Also give a warning that the user may need to reboot their system From 175d32c5f660a03368be40bb931a3752bb24643c Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Wed, 11 Mar 2020 18:55:43 +0000 Subject: [PATCH 52/60] Set nameservers to be that which have been chosen by the user in the whiptail Signed-off-by: Adam Warner --- automated install/basic-install.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index b896eb35..35d4df9f 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -854,7 +854,8 @@ setDHCPCD() { # we can append these lines to dhcpcd.conf to enable a static IP echo "interface ${PIHOLE_INTERFACE} static ip_address=${IPV4_ADDRESS} - static routers=${IPv4gw}" | tee -a /etc/dhcpcd.conf >/dev/null + static routers=${IPv4gw} + static domain_name_servers=${PIHOLE_DNS_1},${PIHOLE_DNS_2}" | tee -a /etc/dhcpcd.conf >/dev/null # Then use the ip command to immediately set the new address ip addr replace dev "${PIHOLE_INTERFACE}" "${IPV4_ADDRESS}" # Also give a warning that the user may need to reboot their system From 4994da5170300cceaba8f1eca143daabe89df357 Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Thu, 12 Mar 2020 18:48:40 +0000 Subject: [PATCH 53/60] Update automated install/basic-install.sh --- automated install/basic-install.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 35d4df9f..f5043ded 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -855,7 +855,7 @@ setDHCPCD() { echo "interface ${PIHOLE_INTERFACE} static ip_address=${IPV4_ADDRESS} static routers=${IPv4gw} - static domain_name_servers=${PIHOLE_DNS_1},${PIHOLE_DNS_2}" | tee -a /etc/dhcpcd.conf >/dev/null + static domain_name_servers=${PIHOLE_DNS_1} ${PIHOLE_DNS_2}" | tee -a /etc/dhcpcd.conf >/dev/null # Then use the ip command to immediately set the new address ip addr replace dev "${PIHOLE_INTERFACE}" "${IPV4_ADDRESS}" # Also give a warning that the user may need to reboot their system From 277179f150692e6c0968912a02341959069a9242 Mon Sep 17 00:00:00 2001 From: DL6ER Date: Fri, 27 Mar 2020 19:34:41 +0100 Subject: [PATCH 54/60] Remove 19036 trust anchor, now expired: https://www.icann.org/resources/pages/ksk-rollover Signed-off-by: DL6ER --- advanced/Scripts/webpage.sh | 1 - 1 file changed, 1 deletion(-) diff --git a/advanced/Scripts/webpage.sh b/advanced/Scripts/webpage.sh index 829ba57b..aab90c35 100755 --- a/advanced/Scripts/webpage.sh +++ b/advanced/Scripts/webpage.sh @@ -179,7 +179,6 @@ ProcessDNSSettings() { if [[ "${DNSSEC}" == true ]]; then echo "dnssec -trust-anchor=.,19036,8,2,49AAC11D7B6F6446702E54A1607371607A1A41855200FD2CE1CDDE32F24E8FB5 trust-anchor=.,20326,8,2,E06D44B80B8F1D39A95C0B0D7C65D08458E880409BBC683457104237C7F8EC8D " >> "${dnsmasqconfig}" fi From dc35709a1b3a60cf48bcd78d1a7ffae00c81cb69 Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Tue, 31 Mar 2020 17:39:21 +0100 Subject: [PATCH 55/60] Remove hosts-file.net from default lists --- automated install/basic-install.sh | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index f5043ded..a8ac91f3 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -1212,8 +1212,7 @@ chooseBlocklists() { MalwareDom "MalwareDomains" on Cameleon "Cameleon" on DisconTrack "Disconnect.me Tracking" on - DisconAd "Disconnect.me Ads" on - HostsFile "Hosts-file.net Ads" on) + DisconAd "Disconnect.me Ads" on) # In a variable, show the choices available; exit if Cancel is selected choices=$("${cmd[@]}" "${options[@]}" 2>&1 >/dev/tty) || { printf " %bCancel was selected, exiting installer%b\\n" "${COL_LIGHT_RED}" "${COL_NC}"; rm "${adlistFile}" ;exit 1; } @@ -1235,7 +1234,6 @@ appendToListsFile() { Cameleon ) echo "https://sysctl.org/cameleon/hosts" >> "${adlistFile}";; DisconTrack ) echo "https://s3.amazonaws.com/lists.disconnect.me/simple_tracking.txt" >> "${adlistFile}";; DisconAd ) echo "https://s3.amazonaws.com/lists.disconnect.me/simple_ad.txt" >> "${adlistFile}";; - HostsFile ) echo "https://hosts-file.net/ad_servers.txt" >> "${adlistFile}";; esac } From 7d19ee1b2575f90b7a42ee390b5561fe6908250a Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Tue, 31 Mar 2020 21:48:10 +0100 Subject: [PATCH 56/60] validate blocklist URL before adding to the database (#3237) Signed-off-by: Adam Warner Co-authored-by: DL6ER --- advanced/Scripts/webpage.sh | 34 +++++++++++++++++++++++++--------- gravity.sh | 9 ++++++++- 2 files changed, 33 insertions(+), 10 deletions(-) diff --git a/advanced/Scripts/webpage.sh b/advanced/Scripts/webpage.sh index aab90c35..2b70249e 100755 --- a/advanced/Scripts/webpage.sh +++ b/advanced/Scripts/webpage.sh @@ -401,22 +401,38 @@ SetWebUILayout() { change_setting "WEBUIBOXEDLAYOUT" "${args[2]}" } +CheckUrl(){ + local regex + # Check for characters NOT allowed in URLs + regex="[^a-zA-Z0-9:/?&%=~._-]" + if [[ "${1}" =~ ${regex} ]]; then + return 1 + else + return 0 + fi +} + CustomizeAdLists() { local address address="${args[3]}" local comment comment="${args[4]}" - if [[ "${args[2]}" == "enable" ]]; then - sqlite3 "${gravityDBfile}" "UPDATE adlist SET enabled = 1 WHERE address = '${address}'" - elif [[ "${args[2]}" == "disable" ]]; then - sqlite3 "${gravityDBfile}" "UPDATE adlist SET enabled = 0 WHERE address = '${address}'" - elif [[ "${args[2]}" == "add" ]]; then - sqlite3 "${gravityDBfile}" "INSERT OR IGNORE INTO adlist (address, comment) VALUES ('${address}', '${comment}')" - elif [[ "${args[2]}" == "del" ]]; then - sqlite3 "${gravityDBfile}" "DELETE FROM adlist WHERE address = '${address}'" + if CheckUrl "${address}"; then + if [[ "${args[2]}" == "enable" ]]; then + sqlite3 "${gravityDBfile}" "UPDATE adlist SET enabled = 1 WHERE address = '${address}'" + elif [[ "${args[2]}" == "disable" ]]; then + sqlite3 "${gravityDBfile}" "UPDATE adlist SET enabled = 0 WHERE address = '${address}'" + elif [[ "${args[2]}" == "add" ]]; then + sqlite3 "${gravityDBfile}" "INSERT OR IGNORE INTO adlist (address, comment) VALUES ('${address}', '${comment}')" + elif [[ "${args[2]}" == "del" ]]; then + sqlite3 "${gravityDBfile}" "DELETE FROM adlist WHERE address = '${address}'" + else + echo "Not permitted" + return 1 + fi else - echo "Not permitted" + echo "Invalid Url" return 1 fi } diff --git a/gravity.sh b/gravity.sh index c421e832..cf3f9299 100755 --- a/gravity.sh +++ b/gravity.sh @@ -374,7 +374,14 @@ gravity_DownloadBlocklists() { esac echo -e " ${INFO} Target: ${url}" - gravity_DownloadBlocklistFromUrl "${url}" "${cmd_ext}" "${agent}" "${sourceIDs[$i]}" "${saveLocation}" "${target}" + local regex + # Check for characters NOT allowed in URLs + regex="[^a-zA-Z0-9:/?&%=~._-]" + if [[ "${url}" =~ ${regex} ]]; then + echo -e " ${CROSS} Invalid Target" + else + gravity_DownloadBlocklistFromUrl "${url}" "${cmd_ext}" "${agent}" "${sourceIDs[$i]}" "${saveLocation}" "${target}" + fi echo "" done From d1caad76d832eca713352826392917fa3f4a23dc Mon Sep 17 00:00:00 2001 From: DL6ER Date: Wed, 1 Apr 2020 17:19:32 +0000 Subject: [PATCH 57/60] Do not flush neigh cache as this is known to create a number of issues. The better aproach to this is to manually flush the ARP cache by either restarting or calling "ip neigh flush all". Signed-off-by: DL6ER --- advanced/Scripts/piholeARPTable.sh | 7 ------- 1 file changed, 7 deletions(-) diff --git a/advanced/Scripts/piholeARPTable.sh b/advanced/Scripts/piholeARPTable.sh index aa45f9ad..b6b552c9 100755 --- a/advanced/Scripts/piholeARPTable.sh +++ b/advanced/Scripts/piholeARPTable.sh @@ -36,13 +36,6 @@ flushARP(){ echo -ne " ${INFO} Flushing network table ..." fi - # Flush ARP cache to avoid re-adding of dead entries - if ! output=$(ip neigh flush all 2>&1); then - echo -e "${OVER} ${CROSS} Failed to clear ARP cache" - echo " Output: ${output}" - return 1 - fi - # Truncate network_addresses table in pihole-FTL.db # This needs to be done before we can truncate the network table due to # foreign key contraints From 2de5362adc2c1c780eac1ab39e466875143091d5 Mon Sep 17 00:00:00 2001 From: M4x Date: Sun, 5 Apr 2020 17:20:35 +0800 Subject: [PATCH 58/60] Sanitize email address in case of security issues (#3254) * Sanitize email address in case of security issues Signed-off-by: bash-c --- advanced/Scripts/webpage.sh | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/advanced/Scripts/webpage.sh b/advanced/Scripts/webpage.sh index 2b70249e..f0f8bc31 100755 --- a/advanced/Scripts/webpage.sh +++ b/advanced/Scripts/webpage.sh @@ -517,6 +517,13 @@ Options: fi if [[ -n "${args[2]}" ]]; then + + # Sanitize email address in case of security issues + if [[ ! "${args[2]}" =~ ^[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,4}$ ]]; then + echo -e " ${CROSS} Invalid email address" + exit 0 + fi + change_setting "ADMIN_EMAIL" "${args[2]}" echo -e " ${TICK} Setting admin contact to ${args[2]}" else From 7e1a8c1cebf45895a65c1f2d76b830d509419c60 Mon Sep 17 00:00:00 2001 From: Mohammed Swillam <4535397+Mohammed-Swillam@users.noreply.github.com> Date: Sat, 18 Apr 2020 12:49:01 +0200 Subject: [PATCH 59/60] - Added 2 new DNS entries for the new Cloudflare DNS for families, which comes in 2 flavors (#3276) 1- 1.1.1.2 (No Malware) 2- 1.1.1.3 (No Malware or Adult Content) This would allow parents to have more control over the safety of their family's network. Signed-off-by: Mohammed-Swillam --- automated install/basic-install.sh | 2 ++ 1 file changed, 2 insertions(+) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 6ef30862..0157e0ac 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -39,6 +39,8 @@ Quad9 (filtered, DNSSEC);9.9.9.9;149.112.112.112;2620:fe::fe;2620:fe::9 Quad9 (unfiltered, no DNSSEC);9.9.9.10;149.112.112.10;2620:fe::10;2620:fe::fe:10 Quad9 (filtered + ECS);9.9.9.11;149.112.112.11;2620:fe::11; Cloudflare;1.1.1.1;1.0.0.1;2606:4700:4700::1111;2606:4700:4700::1001 +Cloudflare (No Malware);1.1.1.2;1.0.0.2;2606:4700:4700::1112;2606:4700:4700::1002 +Cloudflare (No Malware or Adult Content);1.1.1.3;1.0.0.3;2606:4700:4700::1113;2606:4700:4700::1003 EOM ) From e6dcccc7bd0c7c9ce6ad1d1ccaf3184d7edc147c Mon Sep 17 00:00:00 2001 From: Adam Warner Date: Sat, 18 Apr 2020 20:05:54 +0100 Subject: [PATCH 60/60] Revert "- Added 2 new DNS entries for the new Cloudflare DNS for families, which comes in 2 flavors (#3276)" This reverts commit 7e1a8c1cebf45895a65c1f2d76b830d509419c60. --- automated install/basic-install.sh | 2 -- 1 file changed, 2 deletions(-) diff --git a/automated install/basic-install.sh b/automated install/basic-install.sh index 0157e0ac..6ef30862 100755 --- a/automated install/basic-install.sh +++ b/automated install/basic-install.sh @@ -39,8 +39,6 @@ Quad9 (filtered, DNSSEC);9.9.9.9;149.112.112.112;2620:fe::fe;2620:fe::9 Quad9 (unfiltered, no DNSSEC);9.9.9.10;149.112.112.10;2620:fe::10;2620:fe::fe:10 Quad9 (filtered + ECS);9.9.9.11;149.112.112.11;2620:fe::11; Cloudflare;1.1.1.1;1.0.0.1;2606:4700:4700::1111;2606:4700:4700::1001 -Cloudflare (No Malware);1.1.1.2;1.0.0.2;2606:4700:4700::1112;2606:4700:4700::1002 -Cloudflare (No Malware or Adult Content);1.1.1.3;1.0.0.3;2606:4700:4700::1113;2606:4700:4700::1003 EOM )