diff --git a/advanced/Scripts/piholeLogFlush.sh b/advanced/Scripts/piholeLogFlush.sh new file mode 100755 index 0000000..9647fe2 --- /dev/null +++ b/advanced/Scripts/piholeLogFlush.sh @@ -0,0 +1,3 @@ +#!/bin/bash +# Flushes /var/log/pihole.log +truncate -s 0 /var/log/pihole.log diff --git a/gravity.sh b/gravity.sh index f5bf6e8..4c191c3 100755 --- a/gravity.sh +++ b/gravity.sh @@ -9,7 +9,7 @@ piholeIP=$(hostname -I) sources=('https://adaway.org/hosts.txt' 'http://adblock.gjtech.net/?format=unix-hosts' #'http://adblock.mahakala.is/' -'http://hosts-file.net/.%5Cad_servers.txt' +'http://hosts-file.net/ad_servers.txt' 'http://www.malwaredomainlist.com/hostslist/hosts.txt' 'http://pgl.yoyo.org/adservers/serverlist.php?' 'http://someonewhocares.org/hosts/hosts' @@ -35,8 +35,6 @@ if [[ -r $piholeDir/pihole.conf ]];then echo "** Local calibration requested..." . $piholeDir/pihole.conf fi - - echo "** Neutrino emissions detected..." # Create the pihole resource directory if it doesn't exist. Future files will be stored here @@ -47,35 +45,6 @@ else sudo mkdir $piholeDir fi -# Add additional swap to prevent the "Error fork: unable to allocate memory" message: https://github.com/jacobsalmela/pi-hole/issues/37 -function createSwapFile() -######################### - { - echo "** Creating more swap space to accomodate large solar masses..." - sudo dphys-swapfile swapoff - sudo curl -s -o /etc/dphys-swapfile https://raw.githubusercontent.com/jacobsalmela/pi-hole/master/advanced/dphys-swapfile - sudo dphys-swapfile setup - sudo dphys-swapfile swapon - } - - -if [[ -n "$noSwap" ]]; then - # if $noSwap is set, don't do anything - : -elif [[ -f /etc/dphys-swapfile ]];then - swapSize=$(cat /etc/dphys-swapfile | grep -m1 CONF_SWAPSIZE | cut -d'=' -f2) - if [[ $swapSize != 500 ]];then - mv /etc/dphys-swapfile /etc/dphys-swapfile.orig - echo "** Current swap size is $swapSize" - createSwapFile - else - : - fi -else - echo "** No swap file found. Creating one..." - createSwapFile -fi - # Loop through domain list. Download each one and remove commented lines (lines beginning with '# 'or '/') and blank lines for ((i = 0; i < "${#sources[@]}"; i++)) do @@ -86,27 +55,50 @@ do # Save the file as list.#.domain saveLocation=$origin/list.$i.$domain.$justDomainsExtension - echo -n "Getting $domain list... " - # Use a case statement to download lists that need special cURL commands to complete properly + agent="Mozilla/10.0" + + echo -n "Getting $domain list... " + + # Use a case statement to download lists that need special cURL commands + # to complete properly and reset the user agent when required case "$domain" in - "adblock.mahakala.is") data=$(curl -s -A 'Mozilla/5.0 (X11; Linux x86_64; rv:30.0) Gecko/20100101 Firefox/30.0' -e http://forum.xda-developers.com/ -z $saveLocation $url);; + "adblock.mahakala.is") + agent='Mozilla/5.0 (X11; Linux x86_64; rv:30.0) Gecko/20100101 Firefox/30.0' + cmd="curl -e http://forum.xda-developers.com/" + ;; - "pgl.yoyo.org") data=$(curl -s -d mimetype=plaintext -d hostformat=hosts -z $saveLocation $url);; + "pgl.yoyo.org") + cmd="curl -d mimetype=plaintext -d hostformat=hosts" + ;; - *) data=$(curl -s -z $saveLocation -A "Mozilla/10.0" $url);; + # Default is a simple curl request + *) cmd="curl" esac - if [[ -n "$data" ]];then + # tmp file, so we don't have to store the (long!) lists in RAM + patternBuffer=$(mktemp) + heisenbergCompensator="" + if [[ -r $saveLocation ]]; then + heisenbergCompensator="-z $saveLocation" + fi + CMD="$cmd -s $heisenbergCompensator -A '$agent' $url > $patternBuffer" + $cmd -s $heisenbergCompensator -A "$agent" $url > $patternBuffer + + + if [[ -s "$patternBuffer" ]];then # Remove comments and print only the domain name # Most of the lists downloaded are already in hosts file format but the spacing/formating is not contigious # This helps with that and makes it easier to read # It also helps with debugging so each stage of the script can be researched more in depth - echo "$data" | awk 'NF {if ($1 !~ "#") { if (NF>1) {print $2} else {print $1}}}' | \ - sed -e 's/^[. \t]*//' -e 's/\.\.\+/./g' -e 's/[. \t]*$//' | grep "\." > $saveLocation + awk '($1 !~ /^#/) { if (NF>1) {print $2} else {print $1}}' $patternBuffer | \ + sed -nr -e 's/\.{2,}/./g' -e '/\./p' > $saveLocation echo "Done." else - echo "Skipping list because it does not have any new entries." + echo "Skipping pattern because transporter logic detected no changes..." fi + + # Cleanup + rm -f $patternBuffer done # Find all files with the .domains extension and compile them into one file and remove CRs @@ -114,39 +106,43 @@ echo "** Aggregating list of domains..." find $origin/ -type f -name "*.$justDomainsExtension" -exec cat {} \; | tr -d '\r' > $origin/$matter # Append blacklist entries if they exist -if [[ -f $blacklist ]];then +if [[ -r $blacklist ]];then numberOf=$(cat $blacklist | sed '/^\s*$/d' | wc -l) echo "** Blacklisting $numberOf domain(s)..." cat $blacklist >> $origin/$matter -else - : fi -function gravity_advanced() ########################### - { - numberOf=$(cat $origin/$andLight | sed '/^\s*$/d' | wc -l) +function gravity_advanced() { + + numberOf=$(wc -l < $origin/$andLight) echo "** $numberOf domains being pulled in by gravity..." + # Remove carriage returns and preceding whitespace - cat $origin/$andLight | sed $'s/\r$//' | sed '/^\s*$/d' > $origin/$supernova + # not really needed anymore? + cp $origin/$andLight $origin/$supernova + # Sort and remove duplicates - cat $origin/$supernova | sort | uniq > $origin/$eventHorizon - numberOf=$(cat $origin/$eventHorizon | sed '/^\s*$/d' | wc -l) + sort -u $origin/$supernova > $origin/$eventHorizon + numberOf=$(wc -l < $origin/$eventHorizon) echo "** $numberOf unique domains trapped in the event horizon." + # Format domain list as "192.168.x.x domain.com" echo "** Formatting domains into a HOSTS file..." - cat $origin/$eventHorizon | awk '{sub(/\r$/,""); print "'"$piholeIP"' " $0}' > $origin/$accretionDisc + awk '{print "'"$piholeIP"'" $1}' $origin/$eventHorizon > $origin/$accretionDisc + # Copy the file over as /etc/pihole/gravity.list so dnsmasq can use it sudo cp $origin/$accretionDisc $adList kill -HUP $(pidof dnsmasq) - } +} # Whitelist (if applicable) then remove duplicates and format for dnsmasq -if [[ -f $whitelist ]];then +if [[ -r $whitelist ]];then # Remove whitelist entries numberOf=$(cat $whitelist | sed '/^\s*$/d' | wc -l) plural=; [[ "$numberOf" != "1" ]] && plural=s echo "** Whitelisting $numberOf domain${plural}..." + # Append a "$" to the end, prepend a "^" to the beginning, and # replace "." with "\." of each line to turn each entry into a # regexp so it can be parsed out with grep -x @@ -163,6 +159,7 @@ do echo "$url" | awk -F '/' '{print "^"$3"$"}' | sed 's/\./\\./g' >> $latentWhitelist done +# Remove whitelist entries from deduped list grep -vxf $latentWhitelist $origin/$matter > $origin/$andLight gravity_advanced