From c45dc277b6117aae69e1e60cbe2d7e77ab2e4525 Mon Sep 17 00:00:00 2001 From: Dan Schaper Date: Thu, 26 Nov 2015 15:29:13 -0800 Subject: [PATCH 1/3] Tighten themeatics --- gravity.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/gravity.sh b/gravity.sh index d70f0dce..018989c7 100755 --- a/gravity.sh +++ b/gravity.sh @@ -73,10 +73,10 @@ function gravity_patternCheck() { # and stored as is. They can be processed for content after they # have been saved. cp $patternBuffer $saveLocation - echo "Done." + echo "List updated, transport successful..." else # curl didn't download any host files, probably because of the date check - echo "Transporter logic detected no changes, pattern skipped..." + echo "No changes detected, transport skipped..." fi } @@ -117,7 +117,7 @@ do agent="Mozilla/10.0" - echo -n "Getting $domain list... " + echo -n " Getting $domain list: " # Use a case statement to download lists that need special cURL commands # to complete properly and reset the user agent when required From d1e475da89154fe2ad83e03391137ca1fae0e99e Mon Sep 17 00:00:00 2001 From: Dan Schaper Date: Thu, 26 Nov 2015 15:48:52 -0800 Subject: [PATCH 2/3] Move black/white list check to pulsar --- gravity.sh | 144 ++++++++++++++++++++++++++++------------------------- 1 file changed, 77 insertions(+), 67 deletions(-) diff --git a/gravity.sh b/gravity.sh index 018989c7..27f5d823 100755 --- a/gravity.sh +++ b/gravity.sh @@ -4,6 +4,7 @@ # Network-wide ad blocking via your Raspberry Pi # http://pi-hole.net # Compiles a list of ad-serving domains by downloading them from multiple sources + piholeIPfile=/tmp/piholeIP if [[ -f $piholeIPfile ]];then # If the file exists, it means it was exported from the installation script and we should use that value instead of detecting it in this script @@ -15,7 +16,8 @@ else fi # Ad-list sources--one per line in single quotes -# The mahakala source is commented out due to many users having issues with it blocking legitimate domains. Uncomment at your own risk +# The mahakala source is commented out due to many users having issues with it blocking legitimate domains. +# Uncomment at your own risk sources=('https://adaway.org/hosts.txt' 'http://adblock.gjtech.net/?format=unix-hosts' #'http://adblock.mahakala.is/' @@ -45,62 +47,65 @@ if [[ -r $piholeDir/pihole.conf ]];then echo "** Local calibration requested..." . $piholeDir/pihole.conf fi + ########################### # collapse - begin formation of pihole function gravity_collapse() { echo "** Neutrino emissions detected..." -# Create the pihole resource directory if it doesn't exist. Future files will be stored here -if [[ -d $piholeDir ]];then + # Create the pihole resource directory if it doesn't exist. Future files will be stored here + if [[ -d $piholeDir ]];then # Temporary hack to allow non-root access to pihole directory # Will update later, needed for existing installs, new installs should # create this directory as non-root sudo chmod 777 $piholeDir find "$piholeDir" -type f -exec sudo chmod 666 {} \; -else + else echo "** Creating pihole directory..." mkdir $piholeDir -fi + fi } # patternCheck - check to see if curl downloaded any new files, and then process those # files so they are in host format. function gravity_patternCheck() { - patternBuffer=$1 - # check if the patternbuffer is a non-zero length file - if [[ -s "$patternBuffer" ]];then - # Some of the blocklists are copyright, they need to be downloaded - # and stored as is. They can be processed for content after they - # have been saved. - cp $patternBuffer $saveLocation - echo "List updated, transport successful..." - else - # curl didn't download any host files, probably because of the date check - echo "No changes detected, transport skipped..." - fi + patternBuffer=$1 + # check if the patternbuffer is a non-zero length file + if [[ -s "$patternBuffer" ]];then + # Some of the blocklists are copyright, they need to be downloaded + # and stored as is. They can be processed for content after they + # have been saved. + cp $patternBuffer $saveLocation + echo "List updated, transport successful..." + else + # curl didn't download any host files, probably because of the date check + echo "No changes detected, transport skipped..." + fi } # transport - curl the specified url with any needed command extentions, then patternCheck function gravity_transport() { - url=$1 - cmd_ext=$2 - agent=$3 - # tmp file, so we don't have to store the (long!) lists in RAM - patternBuffer=$(mktemp) - heisenbergCompensator="" - if [[ -r $saveLocation ]]; then - # if domain has been saved, add file for date check to only download newer - heisenbergCompensator="-z $saveLocation" - fi - # Silently curl url - curl -s $cmd_ext $heisenbergCompensator -A "$agent" $url > $patternBuffer + url=$1 + cmd_ext=$2 + agent=$3 + + # tmp file, so we don't have to store the (long!) lists in RAM + patternBuffer=$(mktemp) + heisenbergCompensator="" + if [[ -r $saveLocation ]]; then + # if domain has been saved, add file for date check to only download newer + heisenbergCompensator="-z $saveLocation" + fi - gravity_patternCheck $patternBuffer - - # Cleanup - rm -f $patternBuffer + # Silently curl url + curl -s $cmd_ext $heisenbergCompensator -A "$agent" $url > $patternBuffer + # Check for list updates + gravity_patternCheck $patternBuffer + # Cleanup + rm -f $patternBuffer } + # spinup - main gravity function function gravity_spinup() { @@ -141,26 +146,26 @@ done # Schwarzchild - aggregate domains to one list and add blacklisted domains function gravity_Schwarzchild() { -# Find all active domains and compile them into one file and remove CRs -echo "** Aggregating list of domains..." -truncate -s 0 $piholeDir/$matter -for i in "${activeDomains[@]}" -do - cat $i |tr -d '\r' >> $piholeDir/$matter -done - -# Append blacklist entries if they exist -if [[ -r $blacklist ]];then - numberOf=$(cat $blacklist | sed '/^\s*$/d' | wc -l) - echo "** Blacklisting $numberOf domain(s)..." - cat $blacklist >> $piholeDir/$matter -fi + # Find all active domains and compile them into one file and remove CRs + echo "** Aggregating list of domains..." + truncate -s 0 $piholeDir/$matter + for i in "${activeDomains[@]}" + do + cat $i |tr -d '\r' >> $piholeDir/$matter + done } function gravity_pulsar() { + + # Append blacklist entries if they exist + if [[ -r $blacklist ]];then + numberOf=$(cat $blacklist | sed '/^\s*$/d' | wc -l) + echo "** Blacklisting $numberOf domain(s)..." + cat $blacklist >> $piholeDir/$matter + fi -# Whitelist (if applicable) domains -if [[ -r $whitelist ]];then + # Whitelist (if applicable) domains + if [[ -r $whitelist ]];then # Remove whitelist entries numberOf=$(cat $whitelist | sed '/^\s*$/d' | wc -l) plural=; [[ "$numberOf" != "1" ]] && plural=s @@ -170,20 +175,20 @@ if [[ -r $whitelist ]];then # replace "." with "\." of each line to turn each entry into a # regexp so it can be parsed out with grep -x awk -F '[# \t]' 'NF>0&&$1!="" {print "^"$1"$"}' $whitelist | sed 's/\./\\./g' > $latentWhitelist -else + else rm $latentWhitelist -fi + fi -# Prevent our sources from being pulled into the hole -plural=; [[ "${#sources[@]}" != "1" ]] && plural=s -echo "** Whitelisting ${#sources[@]} ad list source${plural}..." -for url in ${sources[@]} -do + # Prevent our sources from being pulled into the hole + plural=; [[ "${#sources[@]}" != "1" ]] && plural=s + echo "** Whitelisting ${#sources[@]} ad list source${plural}..." + for url in ${sources[@]} + do echo "$url" | awk -F '/' '{print "^"$3"$"}' | sed 's/\./\\./g' >> $latentWhitelist -done + done -# Remove whitelist entries from list -grep -vxf $latentWhitelist $piholeDir/$matter > $piholeDir/$andLight + # Remove whitelist entries from list + grep -vxf $latentWhitelist $piholeDir/$matter > $piholeDir/$andLight } function gravity_unique() { @@ -192,6 +197,7 @@ function gravity_unique() { numberOf=$(wc -l < $piholeDir/$eventHorizon) echo "** $numberOf unique domains trapped in the event horizon." } + function gravity_hostFormat() { # Format domain list as "192.168.x.x domain.com" echo "** Formatting domains into a HOSTS file..." @@ -199,16 +205,20 @@ function gravity_hostFormat() { # Copy the file over as /etc/pihole/gravity.list so dnsmasq can use it cp $piholeDir/$accretionDisc $adList } + function gravity_blackbody() { - for file in $piholeDir/*.$justDomainsExtension - do - if [[ " ${activeDomains[@]} " =~ " ${file} " ]]; then - : - else - rm -f $file - fi - done + # Loop through list files + for file in $piholeDir/*.$justDomainsExtension + do + # If list is active then leave it (noop) else rm the list + if [[ " ${activeDomains[@]} " =~ " ${file} " ]]; then + : + else + rm -f $file + fi + done } + function gravity_advanced() { # Remove comments and print only the domain name From 9892d06c0db96ad090061778049e765d05283007 Mon Sep 17 00:00:00 2001 From: Dan Schaper Date: Thu, 26 Nov 2015 15:56:37 -0800 Subject: [PATCH 3/3] Code format cleanups --- gravity.sh | 61 +++++++++++++++++++++++++++--------------------------- 1 file changed, 31 insertions(+), 30 deletions(-) diff --git a/gravity.sh b/gravity.sh index 27f5d823..f34ce80b 100755 --- a/gravity.sh +++ b/gravity.sh @@ -51,7 +51,7 @@ fi ########################### # collapse - begin formation of pihole function gravity_collapse() { -echo "** Neutrino emissions detected..." + echo "** Neutrino emissions detected..." # Create the pihole resource directory if it doesn't exist. Future files will be stored here if [[ -d $piholeDir ]];then @@ -66,8 +66,7 @@ echo "** Neutrino emissions detected..." fi } -# patternCheck - check to see if curl downloaded any new files, and then process those -# files so they are in host format. +# patternCheck - check to see if curl downloaded any new files. function gravity_patternCheck() { patternBuffer=$1 # check if the patternbuffer is a non-zero length file @@ -83,7 +82,7 @@ function gravity_patternCheck() { fi } -# transport - curl the specified url with any needed command extentions, then patternCheck +# transport - curl the specified url with any needed command extentions function gravity_transport() { url=$1 cmd_ext=$2 @@ -109,9 +108,9 @@ function gravity_transport() { # spinup - main gravity function function gravity_spinup() { -# Loop through domain list. Download each one and remove commented lines (lines beginning with '# 'or '/') and blank lines -for ((i = 0; i < "${#sources[@]}"; i++)) -do + # Loop through domain list. Download each one and remove commented lines (lines beginning with '# 'or '/') and # blank lines + for ((i = 0; i < "${#sources[@]}"; i++)) + do url=${sources[$i]} # Get just the domain from the URL domain=$(echo "$url" | cut -d'/' -f3) @@ -139,8 +138,8 @@ do # Default is a simple request *) cmd_ext="" esac - gravity_transport $url $cmd_ext $agent -done + gravity_transport $url $cmd_ext $agent + done } # Schwarzchild - aggregate domains to one list and add blacklisted domains @@ -155,6 +154,7 @@ function gravity_Schwarzchild() { done } +# Pulsar - White/blacklist application function gravity_pulsar() { # Append blacklist entries if they exist @@ -192,25 +192,26 @@ function gravity_pulsar() { } function gravity_unique() { - # Sort and remove duplicates - sort -u $piholeDir/$supernova > $piholeDir/$eventHorizon - numberOf=$(wc -l < $piholeDir/$eventHorizon) - echo "** $numberOf unique domains trapped in the event horizon." + # Sort and remove duplicates + sort -u $piholeDir/$supernova > $piholeDir/$eventHorizon + numberOf=$(wc -l < $piholeDir/$eventHorizon) + echo "** $numberOf unique domains trapped in the event horizon." } function gravity_hostFormat() { - # Format domain list as "192.168.x.x domain.com" - echo "** Formatting domains into a HOSTS file..." - cat $piholeDir/$eventHorizon | awk '{sub(/\r$/,""); print "'"$piholeIP"' " $0}' > $piholeDir/$accretionDisc - # Copy the file over as /etc/pihole/gravity.list so dnsmasq can use it - cp $piholeDir/$accretionDisc $adList + # Format domain list as "192.168.x.x domain.com" + echo "** Formatting domains into a HOSTS file..." + cat $piholeDir/$eventHorizon | awk '{sub(/\r$/,""); print "'"$piholeIP"' " $0}' > $piholeDir/$accretionDisc + # Copy the file over as /etc/pihole/gravity.list so dnsmasq can use it + cp $piholeDir/$accretionDisc $adList } +# blackbody - remove any remnant files from script processes function gravity_blackbody() { # Loop through list files for file in $piholeDir/*.$justDomainsExtension do - # If list is active then leave it (noop) else rm the list + # If list is in active array then leave it (noop) else rm the list if [[ " ${activeDomains[@]} " =~ " ${file} " ]]; then : else @@ -220,19 +221,19 @@ function gravity_blackbody() { } function gravity_advanced() { + # Remove comments and print only the domain name + # Most of the lists downloaded are already in hosts file format but the spacing/formating is not contigious + # This helps with that and makes it easier to read + # It also helps with debugging so each stage of the script can be researched more in depth + awk '($1 !~ /^#/) { if (NF>1) {print $2} else {print $1}}' $piholeDir/$andLight | \ + sed -nr -e 's/\.{2,}/./g' -e '/\./p' > $piholeDir/$supernova - # Remove comments and print only the domain name - # Most of the lists downloaded are already in hosts file format but the spacing/formating is not contigious - # This helps with that and makes it easier to read - # It also helps with debugging so each stage of the script can be researched more in depth - awk '($1 !~ /^#/) { if (NF>1) {print $2} else {print $1}}' $piholeDir/$andLight | \ - sed -nr -e 's/\.{2,}/./g' -e '/\./p' > $piholeDir/$supernova + numberOf=$(wc -l < $piholeDir/$supernova) + echo "** $numberOf domains being pulled in by gravity..." - numberOf=$(wc -l < $piholeDir/$supernova) - echo "** $numberOf domains being pulled in by gravity..." - gravity_unique - - sudo kill -HUP $(pidof dnsmasq) + gravity_unique + + sudo kill -HUP $(pidof dnsmasq) } gravity_collapse