mirror of
https://github.com/pi-hole/pi-hole.git
synced 2024-11-15 02:42:58 +00:00
Merge pull request #4851 from pi-hole/keywords
Dont use bash keywords/programs as variable names
This commit is contained in:
commit
3cdaad060b
2 changed files with 29 additions and 29 deletions
|
@ -34,7 +34,7 @@ source "${colfile}"
|
|||
# Scan an array of files for matching strings
|
||||
scanList(){
|
||||
# Escape full stops
|
||||
local domain="${1}" esc_domain="${1//./\\.}" lists="${2}" type="${3:-}"
|
||||
local domain="${1}" esc_domain="${1//./\\.}" lists="${2}" list_type="${3:-}"
|
||||
|
||||
# Prevent grep from printing file path
|
||||
cd "$piholeDir" || exit 1
|
||||
|
@ -43,7 +43,7 @@ scanList(){
|
|||
export LC_CTYPE=C
|
||||
|
||||
# /dev/null forces filename to be printed when only one list has been generated
|
||||
case "${type}" in
|
||||
case "${list_type}" in
|
||||
"exact" ) grep -i -E -l "(^|(?<!#)\\s)${esc_domain}($|\\s|#)" ${lists} /dev/null 2>/dev/null;;
|
||||
# Iterate through each regexp and check whether it matches the domainQuery
|
||||
# If it does, print the matching regexp and continue looping
|
||||
|
@ -99,10 +99,10 @@ if [[ -n "${str:-}" ]]; then
|
|||
fi
|
||||
|
||||
scanDatabaseTable() {
|
||||
local domain table type querystr result extra
|
||||
local domain table list_type querystr result extra
|
||||
domain="$(printf "%q" "${1}")"
|
||||
table="${2}"
|
||||
type="${3:-}"
|
||||
list_type="${3:-}"
|
||||
|
||||
# As underscores are legitimate parts of domains, we escape them when using the LIKE operator.
|
||||
# Underscores are SQLite wildcards matching exactly one character. We obviously want to suppress this
|
||||
|
@ -115,8 +115,8 @@ scanDatabaseTable() {
|
|||
esac
|
||||
else
|
||||
case "${exact}" in
|
||||
"exact" ) querystr="SELECT domain,enabled FROM domainlist WHERE type = '${type}' AND domain = '${domain}'";;
|
||||
* ) querystr="SELECT domain,enabled FROM domainlist WHERE type = '${type}' AND domain LIKE '%${domain//_/\\_}%' ESCAPE '\\'";;
|
||||
"exact" ) querystr="SELECT domain,enabled FROM domainlist WHERE type = '${list_type}' AND domain = '${domain}'";;
|
||||
* ) querystr="SELECT domain,enabled FROM domainlist WHERE type = '${list_type}' AND domain LIKE '%${domain//_/\\_}%' ESCAPE '\\'";;
|
||||
esac
|
||||
fi
|
||||
|
||||
|
@ -158,13 +158,13 @@ scanDatabaseTable() {
|
|||
}
|
||||
|
||||
scanRegexDatabaseTable() {
|
||||
local domain list
|
||||
local domain list list_type
|
||||
domain="${1}"
|
||||
list="${2}"
|
||||
type="${3:-}"
|
||||
list_type="${3:-}"
|
||||
|
||||
# Query all regex from the corresponding database tables
|
||||
mapfile -t regexList < <(pihole-FTL sqlite3 "${gravityDBfile}" "SELECT domain FROM domainlist WHERE type = ${type}" 2> /dev/null)
|
||||
mapfile -t regexList < <(pihole-FTL sqlite3 "${gravityDBfile}" "SELECT domain FROM domainlist WHERE type = ${list_type}" 2> /dev/null)
|
||||
|
||||
# If we have regexps to process
|
||||
if [[ "${#regexList[@]}" -ne 0 ]]; then
|
||||
|
|
40
gravity.sh
40
gravity.sh
|
@ -139,9 +139,9 @@ update_gravity_timestamp() {
|
|||
# Import domains from file and store them in the specified database table
|
||||
database_table_from_file() {
|
||||
# Define locals
|
||||
local table source backup_path backup_file tmpFile type
|
||||
local table src backup_path backup_file tmpFile list_type
|
||||
table="${1}"
|
||||
source="${2}"
|
||||
src="${2}"
|
||||
backup_path="${piholeDir}/migration_backup"
|
||||
backup_file="${backup_path}/$(basename "${2}")"
|
||||
tmpFile="$(mktemp -p "/tmp" --suffix=".gravity")"
|
||||
|
@ -155,13 +155,13 @@ database_table_from_file() {
|
|||
|
||||
# Special handling for domains to be imported into the common domainlist table
|
||||
if [[ "${table}" == "whitelist" ]]; then
|
||||
type="0"
|
||||
list_type="0"
|
||||
table="domainlist"
|
||||
elif [[ "${table}" == "blacklist" ]]; then
|
||||
type="1"
|
||||
list_type="1"
|
||||
table="domainlist"
|
||||
elif [[ "${table}" == "regex" ]]; then
|
||||
type="3"
|
||||
list_type="3"
|
||||
table="domainlist"
|
||||
fi
|
||||
|
||||
|
@ -174,9 +174,9 @@ database_table_from_file() {
|
|||
rowid+=1
|
||||
fi
|
||||
|
||||
# Loop over all domains in ${source} file
|
||||
# Loop over all domains in ${src} file
|
||||
# Read file line by line
|
||||
grep -v '^ *#' < "${source}" | while IFS= read -r domain
|
||||
grep -v '^ *#' < "${src}" | while IFS= read -r domain
|
||||
do
|
||||
# Only add non-empty lines
|
||||
if [[ -n "${domain}" ]]; then
|
||||
|
@ -185,10 +185,10 @@ database_table_from_file() {
|
|||
echo "${rowid},\"${domain}\",${timestamp}" >> "${tmpFile}"
|
||||
elif [[ "${table}" == "adlist" ]]; then
|
||||
# Adlist table format
|
||||
echo "${rowid},\"${domain}\",1,${timestamp},${timestamp},\"Migrated from ${source}\",,0,0,0" >> "${tmpFile}"
|
||||
echo "${rowid},\"${domain}\",1,${timestamp},${timestamp},\"Migrated from ${src}\",,0,0,0" >> "${tmpFile}"
|
||||
else
|
||||
# White-, black-, and regexlist table format
|
||||
echo "${rowid},${type},\"${domain}\",1,${timestamp},${timestamp},\"Migrated from ${source}\"" >> "${tmpFile}"
|
||||
echo "${rowid},${list_type},\"${domain}\",1,${timestamp},${timestamp},\"Migrated from ${src}\"" >> "${tmpFile}"
|
||||
fi
|
||||
rowid+=1
|
||||
fi
|
||||
|
@ -201,14 +201,14 @@ database_table_from_file() {
|
|||
status="$?"
|
||||
|
||||
if [[ "${status}" -ne 0 ]]; then
|
||||
echo -e "\\n ${CROSS} Unable to fill table ${table}${type} in database ${gravityDBfile}\\n ${output}"
|
||||
echo -e "\\n ${CROSS} Unable to fill table ${table}${list_type} in database ${gravityDBfile}\\n ${output}"
|
||||
gravity_Cleanup "error"
|
||||
fi
|
||||
|
||||
# Move source file to backup directory, create directory if not existing
|
||||
mkdir -p "${backup_path}"
|
||||
mv "${source}" "${backup_file}" 2> /dev/null || \
|
||||
echo -e " ${CROSS} Unable to backup ${source} to ${backup_path}"
|
||||
mv "${src}" "${backup_file}" 2> /dev/null || \
|
||||
echo -e " ${CROSS} Unable to backup ${src} to ${backup_path}"
|
||||
|
||||
# Delete tmpFile
|
||||
rm "${tmpFile}" > /dev/null 2>&1 || \
|
||||
|
@ -719,10 +719,10 @@ gravity_DownloadBlocklistFromUrl() {
|
|||
|
||||
# Parse source files into domains format
|
||||
gravity_ParseFileIntoDomains() {
|
||||
local source="${1}" destination="${2}" firstLine
|
||||
local src="${1}" destination="${2}" firstLine
|
||||
|
||||
# Determine if we are parsing a consolidated list
|
||||
#if [[ "${source}" == "${piholeDir}/${matterAndLight}" ]]; then
|
||||
#if [[ "${src}" == "${piholeDir}/${matterAndLight}" ]]; then
|
||||
# Remove comments and print only the domain name
|
||||
# Most of the lists downloaded are already in hosts file format but the spacing/formatting is not contiguous
|
||||
# This helps with that and makes it easier to read
|
||||
|
@ -733,7 +733,7 @@ gravity_ParseFileIntoDomains() {
|
|||
# 4) Remove lines containing "/"
|
||||
# 5) Remove leading tabs, spaces, etc.
|
||||
# 6) Delete lines not matching domain names
|
||||
< "${source}" tr -d '\r' | \
|
||||
< "${src}" tr -d '\r' | \
|
||||
tr '[:upper:]' '[:lower:]' | \
|
||||
sed 's/\s*#.*//g' | \
|
||||
sed -r '/(\/).*$/d' | \
|
||||
|
@ -745,16 +745,16 @@ gravity_ParseFileIntoDomains() {
|
|||
|
||||
# Individual file parsing: Keep comments, while parsing domains from each line
|
||||
# We keep comments to respect the list maintainer's licensing
|
||||
read -r firstLine < "${source}"
|
||||
read -r firstLine < "${src}"
|
||||
|
||||
# Determine how to parse individual source file formats
|
||||
if [[ "${firstLine,,}" =~ (adblock|ublock|^!) ]]; then
|
||||
# Compare $firstLine against lower case words found in Adblock lists
|
||||
echo -e " ${CROSS} Format: Adblock (list type not supported)"
|
||||
elif grep -q "^address=/" "${source}" &> /dev/null; then
|
||||
elif grep -q "^address=/" "${src}" &> /dev/null; then
|
||||
# Parse Dnsmasq format lists
|
||||
echo -e " ${CROSS} Format: Dnsmasq (list type not supported)"
|
||||
elif grep -q -E "^https?://" "${source}" &> /dev/null; then
|
||||
elif grep -q -E "^https?://" "${src}" &> /dev/null; then
|
||||
# Parse URL list if source file contains "http://" or "https://"
|
||||
# Scanning for "^IPv4$" is too slow with large (1M) lists on low-end hardware
|
||||
echo -ne " ${INFO} Format: URL"
|
||||
|
@ -770,13 +770,13 @@ gravity_ParseFileIntoDomains() {
|
|||
/^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$/ { next }
|
||||
# Print if nonempty
|
||||
length { print }
|
||||
' "${source}" 2> /dev/null > "${destination}"
|
||||
' "${src}" 2> /dev/null > "${destination}"
|
||||
chmod 644 "${destination}"
|
||||
|
||||
echo -e "${OVER} ${TICK} Format: URL"
|
||||
else
|
||||
# Default: Keep hosts/domains file in same format as it was downloaded
|
||||
output=$( { mv "${source}" "${destination}"; } 2>&1 )
|
||||
output=$( { mv "${src}" "${destination}"; } 2>&1 )
|
||||
chmod 644 "${destination}"
|
||||
|
||||
if [[ ! -e "${destination}" ]]; then
|
||||
|
|
Loading…
Reference in a new issue