mirror of
https://github.com/pi-hole/pi-hole.git
synced 2024-11-15 10:43:55 +00:00
Merge pull request #94 from dschaper/bug/CleanupFiles
Bug Fix - Cleanup remnant files
This commit is contained in:
commit
9fe531d1aa
1 changed files with 164 additions and 101 deletions
205
gravity.sh
205
gravity.sh
|
@ -1,4 +1,7 @@
|
||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
|
# Pi-hole: A black hole for Internet advertisements
|
||||||
|
# (c) 2015 by Jacob Salmela GPL 2.0
|
||||||
|
# Network-wide ad blocking via your Raspberry Pi
|
||||||
# http://pi-hole.net
|
# http://pi-hole.net
|
||||||
# Compiles a list of ad-serving domains by downloading them from multiple sources
|
# Compiles a list of ad-serving domains by downloading them from multiple sources
|
||||||
piholeIPfile=/tmp/piholeIP
|
piholeIPfile=/tmp/piholeIP
|
||||||
|
@ -25,34 +28,83 @@ sources=('https://adaway.org/hosts.txt'
|
||||||
'http://winhelp2002.mvps.org/hosts.txt')
|
'http://winhelp2002.mvps.org/hosts.txt')
|
||||||
|
|
||||||
# Variables for various stages of downloading and formatting the list
|
# Variables for various stages of downloading and formatting the list
|
||||||
adList=/etc/pihole/gravity.list
|
basename=pihole
|
||||||
origin=/etc/pihole
|
piholeDir=/etc/$basename
|
||||||
piholeDir=/etc/pihole
|
adList=$piholeDir/gravity.list
|
||||||
justDomainsExtension=domains
|
|
||||||
matter=pihole.0.matter.txt
|
|
||||||
andLight=pihole.1.andLight.txt
|
|
||||||
supernova=pihole.2.supernova.txt
|
|
||||||
eventHorizon=pihole.3.eventHorizon.txt
|
|
||||||
accretionDisc=pihole.4.accretionDisc.txt
|
|
||||||
eyeOfTheNeedle=pihole.5.wormhole.txt
|
|
||||||
blacklist=$piholeDir/blacklist.txt
|
blacklist=$piholeDir/blacklist.txt
|
||||||
whitelist=$piholeDir/whitelist.txt
|
whitelist=$piholeDir/whitelist.txt
|
||||||
latentWhitelist=$origin/latentWhitelist.txt
|
latentWhitelist=$piholeDir/latentWhitelist.txt
|
||||||
|
justDomainsExtension=domains
|
||||||
|
matter=$basename.0.matter.txt
|
||||||
|
andLight=$basename.1.andLight.txt
|
||||||
|
supernova=$basename.2.supernova.txt
|
||||||
|
eventHorizon=$basename.3.eventHorizon.txt
|
||||||
|
accretionDisc=$basename.4.accretionDisc.txt
|
||||||
|
eyeOfTheNeedle=$basename.5.wormhole.txt
|
||||||
|
|
||||||
# After setting defaults, check if there's local overrides
|
# After setting defaults, check if there's local overrides
|
||||||
if [[ -r $piholeDir/pihole.conf ]];then
|
if [[ -r $piholeDir/pihole.conf ]];then
|
||||||
echo "** Local calibration requested..."
|
echo "** Local calibration requested..."
|
||||||
. $piholeDir/pihole.conf
|
. $piholeDir/pihole.conf
|
||||||
fi
|
fi
|
||||||
|
###########################
|
||||||
|
# collapse - begin formation of pihole
|
||||||
|
function gravity_collapse() {
|
||||||
echo "** Neutrino emissions detected..."
|
echo "** Neutrino emissions detected..."
|
||||||
|
|
||||||
# Create the pihole resource directory if it doesn't exist. Future files will be stored here
|
# Create the pihole resource directory if it doesn't exist. Future files will be stored here
|
||||||
if [[ -d $piholeDir ]];then
|
if [[ -d $piholeDir ]];then
|
||||||
:
|
# Temporary hack to allow non-root access to pihole directory
|
||||||
|
# Will update later, needed for existing installs, new installs should
|
||||||
|
# create this directory as non-root
|
||||||
|
sudo chmod 777 $piholeDir
|
||||||
|
find "$piholeDir" -type f -exec sudo chmod 666 {} \;
|
||||||
else
|
else
|
||||||
echo "** Creating pihole directory..."
|
echo "** Creating pihole directory..."
|
||||||
sudo mkdir $piholeDir
|
mkdir $piholeDir
|
||||||
fi
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# patternCheck - check to see if curl downloaded any new files, and then process those
|
||||||
|
# files so they are in host format.
|
||||||
|
function gravity_patternCheck() {
|
||||||
|
patternBuffer=$1
|
||||||
|
# check if the patternbuffer is a non-zero length file
|
||||||
|
if [[ -s "$patternBuffer" ]];then
|
||||||
|
# Some of the blocklists are copyright, they need to be downloaded
|
||||||
|
# and stored as is. They can be processed for content after they
|
||||||
|
# have been saved.
|
||||||
|
cp $patternBuffer $saveLocation
|
||||||
|
echo "Done."
|
||||||
|
else
|
||||||
|
# curl didn't download any host files, probably because of the date check
|
||||||
|
echo "Transporter logic detected no changes, pattern skipped..."
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# transport - curl the specified url with any needed command extentions, then patternCheck
|
||||||
|
function gravity_transport() {
|
||||||
|
url=$1
|
||||||
|
cmd_ext=$2
|
||||||
|
agent=$3
|
||||||
|
# tmp file, so we don't have to store the (long!) lists in RAM
|
||||||
|
patternBuffer=$(mktemp)
|
||||||
|
heisenbergCompensator=""
|
||||||
|
if [[ -r $saveLocation ]]; then
|
||||||
|
# if domain has been saved, add file for date check to only download newer
|
||||||
|
heisenbergCompensator="-z $saveLocation"
|
||||||
|
fi
|
||||||
|
# Silently curl url
|
||||||
|
curl -s $cmd_ext $heisenbergCompensator -A "$agent" $url > $patternBuffer
|
||||||
|
|
||||||
|
gravity_patternCheck $patternBuffer
|
||||||
|
|
||||||
|
# Cleanup
|
||||||
|
rm -f $patternBuffer
|
||||||
|
|
||||||
|
}
|
||||||
|
# spinup - main gravity function
|
||||||
|
function gravity_spinup() {
|
||||||
|
|
||||||
# Loop through domain list. Download each one and remove commented lines (lines beginning with '# 'or '/') and blank lines
|
# Loop through domain list. Download each one and remove commented lines (lines beginning with '# 'or '/') and blank lines
|
||||||
for ((i = 0; i < "${#sources[@]}"; i++))
|
for ((i = 0; i < "${#sources[@]}"; i++))
|
||||||
|
@ -62,7 +114,8 @@ do
|
||||||
domain=$(echo "$url" | cut -d'/' -f3)
|
domain=$(echo "$url" | cut -d'/' -f3)
|
||||||
|
|
||||||
# Save the file as list.#.domain
|
# Save the file as list.#.domain
|
||||||
saveLocation=$origin/list.$i.$domain.$justDomainsExtension
|
saveLocation=$piholeDir/list.$i.$domain.$justDomainsExtension
|
||||||
|
activeDomains[$i]=$saveLocation
|
||||||
|
|
||||||
agent="Mozilla/10.0"
|
agent="Mozilla/10.0"
|
||||||
|
|
||||||
|
@ -73,78 +126,42 @@ do
|
||||||
case "$domain" in
|
case "$domain" in
|
||||||
"adblock.mahakala.is")
|
"adblock.mahakala.is")
|
||||||
agent='Mozilla/5.0 (X11; Linux x86_64; rv:30.0) Gecko/20100101 Firefox/30.0'
|
agent='Mozilla/5.0 (X11; Linux x86_64; rv:30.0) Gecko/20100101 Firefox/30.0'
|
||||||
cmd="curl -e http://forum.xda-developers.com/"
|
cmd_ext="-e http://forum.xda-developers.com/"
|
||||||
;;
|
;;
|
||||||
|
|
||||||
"pgl.yoyo.org")
|
"pgl.yoyo.org")
|
||||||
cmd="curl -d mimetype=plaintext -d hostformat=hosts"
|
cmd_ext="-d mimetype=plaintext -d hostformat=hosts"
|
||||||
;;
|
;;
|
||||||
|
|
||||||
# Default is a simple curl request
|
# Default is a simple request
|
||||||
*) cmd="curl"
|
*) cmd_ext=""
|
||||||
esac
|
esac
|
||||||
|
gravity_transport $url $cmd_ext $agent
|
||||||
# tmp file, so we don't have to store the (long!) lists in RAM
|
|
||||||
patternBuffer=$(mktemp)
|
|
||||||
heisenbergCompensator=""
|
|
||||||
if [[ -r $saveLocation ]]; then
|
|
||||||
heisenbergCompensator="-z $saveLocation"
|
|
||||||
fi
|
|
||||||
CMD="$cmd -s $heisenbergCompensator -A '$agent' $url > $patternBuffer"
|
|
||||||
$cmd -s $heisenbergCompensator -A "$agent" $url > $patternBuffer
|
|
||||||
|
|
||||||
|
|
||||||
if [[ -s "$patternBuffer" ]];then
|
|
||||||
# Remove comments and print only the domain name
|
|
||||||
# Most of the lists downloaded are already in hosts file format but the spacing/formating is not contigious
|
|
||||||
# This helps with that and makes it easier to read
|
|
||||||
# It also helps with debugging so each stage of the script can be researched more in depth
|
|
||||||
awk '($1 !~ /^#/) { if (NF>1) {print $2} else {print $1}}' $patternBuffer | \
|
|
||||||
sed -nr -e 's/\.{2,}/./g' -e '/\./p' > $saveLocation
|
|
||||||
echo "Done."
|
|
||||||
else
|
|
||||||
echo "Skipping pattern because transporter logic detected no changes..."
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Cleanup
|
|
||||||
rm -f $patternBuffer
|
|
||||||
done
|
done
|
||||||
|
}
|
||||||
|
|
||||||
# Find all files with the .domains extension and compile them into one file and remove CRs
|
# Schwarzchild - aggregate domains to one list and add blacklisted domains
|
||||||
|
function gravity_Schwarzchild() {
|
||||||
|
|
||||||
|
# Find all active domains and compile them into one file and remove CRs
|
||||||
echo "** Aggregating list of domains..."
|
echo "** Aggregating list of domains..."
|
||||||
find $origin/ -type f -name "*.$justDomainsExtension" -exec cat {} \; | tr -d '\r' > $origin/$matter
|
truncate -s 0 $piholeDir/$matter
|
||||||
|
for i in "${activeDomains[@]}"
|
||||||
|
do
|
||||||
|
cat $i |tr -d '\r' >> $piholeDir/$matter
|
||||||
|
done
|
||||||
|
|
||||||
# Append blacklist entries if they exist
|
# Append blacklist entries if they exist
|
||||||
if [[ -r $blacklist ]];then
|
if [[ -r $blacklist ]];then
|
||||||
numberOf=$(cat $blacklist | sed '/^\s*$/d' | wc -l)
|
numberOf=$(cat $blacklist | sed '/^\s*$/d' | wc -l)
|
||||||
echo "** Blacklisting $numberOf domain(s)..."
|
echo "** Blacklisting $numberOf domain(s)..."
|
||||||
cat $blacklist >> $origin/$matter
|
cat $blacklist >> $piholeDir/$matter
|
||||||
fi
|
fi
|
||||||
|
|
||||||
###########################
|
|
||||||
function gravity_advanced() {
|
|
||||||
|
|
||||||
numberOf=$(wc -l < $origin/$andLight)
|
|
||||||
echo "** $numberOf domains being pulled in by gravity..."
|
|
||||||
|
|
||||||
# Remove carriage returns and preceding whitespace
|
|
||||||
# not really needed anymore?
|
|
||||||
cp $origin/$andLight $origin/$supernova
|
|
||||||
|
|
||||||
# Sort and remove duplicates
|
|
||||||
sort -u $origin/$supernova > $origin/$eventHorizon
|
|
||||||
numberOf=$(wc -l < $origin/$eventHorizon)
|
|
||||||
echo "** $numberOf unique domains trapped in the event horizon."
|
|
||||||
|
|
||||||
# Format domain list as "192.168.x.x domain.com"
|
|
||||||
echo "** Formatting domains into a HOSTS file..."
|
|
||||||
cat $origin/$eventHorizon | awk '{sub(/\r$/,""); print "'"$piholeIP"' " $0}' > $origin/$accretionDisc
|
|
||||||
# Copy the file over as /etc/pihole/gravity.list so dnsmasq can use it
|
|
||||||
sudo cp $origin/$accretionDisc $adList
|
|
||||||
kill -HUP $(pidof dnsmasq)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
# Whitelist (if applicable) then remove duplicates and format for dnsmasq
|
function gravity_pulsar() {
|
||||||
|
|
||||||
|
# Whitelist (if applicable) domains
|
||||||
if [[ -r $whitelist ]];then
|
if [[ -r $whitelist ]];then
|
||||||
# Remove whitelist entries
|
# Remove whitelist entries
|
||||||
numberOf=$(cat $whitelist | sed '/^\s*$/d' | wc -l)
|
numberOf=$(cat $whitelist | sed '/^\s*$/d' | wc -l)
|
||||||
|
@ -167,7 +184,53 @@ do
|
||||||
echo "$url" | awk -F '/' '{print "^"$3"$"}' | sed 's/\./\\./g' >> $latentWhitelist
|
echo "$url" | awk -F '/' '{print "^"$3"$"}' | sed 's/\./\\./g' >> $latentWhitelist
|
||||||
done
|
done
|
||||||
|
|
||||||
# Remove whitelist entries from deduped list
|
# Remove whitelist entries from list
|
||||||
grep -vxf $latentWhitelist $origin/$matter > $origin/$andLight
|
grep -vxf $latentWhitelist $piholeDir/$matter > $piholeDir/$andLight
|
||||||
|
}
|
||||||
|
|
||||||
|
function gravity_unique() {
|
||||||
|
# Sort and remove duplicates
|
||||||
|
sort -u $piholeDir/$supernova > $piholeDir/$eventHorizon
|
||||||
|
numberOf=$(wc -l < $piholeDir/$eventHorizon)
|
||||||
|
echo "** $numberOf unique domains trapped in the event horizon."
|
||||||
|
}
|
||||||
|
function gravity_hostFormat() {
|
||||||
|
# Format domain list as "192.168.x.x domain.com"
|
||||||
|
echo "** Formatting domains into a HOSTS file..."
|
||||||
|
cat $piholeDir/$eventHorizon | awk '{sub(/\r$/,""); print "'"$piholeIP"' " $0}' > $piholeDir/$accretionDisc
|
||||||
|
# Copy the file over as /etc/pihole/gravity.list so dnsmasq can use it
|
||||||
|
cp $piholeDir/$accretionDisc $adList
|
||||||
|
}
|
||||||
|
function gravity_blackbody() {
|
||||||
|
for file in $piholeDir/*.$justDomainsExtension
|
||||||
|
do
|
||||||
|
if [[ " ${activeDomains[@]} " =~ " ${file} " ]]; then
|
||||||
|
:
|
||||||
|
else
|
||||||
|
rm -f $file
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
}
|
||||||
|
function gravity_advanced() {
|
||||||
|
|
||||||
|
# Remove comments and print only the domain name
|
||||||
|
# Most of the lists downloaded are already in hosts file format but the spacing/formating is not contigious
|
||||||
|
# This helps with that and makes it easier to read
|
||||||
|
# It also helps with debugging so each stage of the script can be researched more in depth
|
||||||
|
awk '($1 !~ /^#/) { if (NF>1) {print $2} else {print $1}}' $piholeDir/$andLight | \
|
||||||
|
sed -nr -e 's/\.{2,}/./g' -e '/\./p' > $piholeDir/$supernova
|
||||||
|
|
||||||
|
numberOf=$(wc -l < $piholeDir/$supernova)
|
||||||
|
echo "** $numberOf domains being pulled in by gravity..."
|
||||||
|
gravity_unique
|
||||||
|
|
||||||
|
sudo kill -HUP $(pidof dnsmasq)
|
||||||
|
}
|
||||||
|
|
||||||
|
gravity_collapse
|
||||||
|
gravity_spinup
|
||||||
|
gravity_Schwarzchild
|
||||||
|
gravity_pulsar
|
||||||
|
gravity_hostFormat
|
||||||
gravity_advanced
|
gravity_advanced
|
||||||
|
gravity_blackbody
|
||||||
|
|
Loading…
Reference in a new issue