1
0
mirror of https://github.com/moparisthebest/pi-hole synced 2024-11-04 16:35:09 -05:00

Merge pull request #62 from jacobsalmela/lighttpd

New lighttpd.conf.  Closes #31 and #50
This commit is contained in:
Jacob Salmela 2015-10-27 17:26:45 -05:00
commit 2f9e650739
2 changed files with 33 additions and 24 deletions

View File

@ -2,37 +2,46 @@ server.modules = (
"mod_expire",
"mod_compress",
"mod_redirect",
"mod_setenv",
"mod_rewrite"
)
server.document-root = "/var/www"
server.document-root = "/var/www/html"
server.error-handler-404 = "pihole/index.html"
server.upload-dirs = ( "/var/cache/lighttpd/uploads" )
server.errorlog = "/var/log/lighttpd/error.log"
server.pid-file = "/var/run/lighttpd.pid"
server.username = "www-data"
server.groupname = "www-data"
server.port = 80
index-file.names = ( "index.php", "index.html", "index.lighttpd.html" )
url.access-deny = ( "~", ".inc" )
static-file.exclude-extensions = ( ".php", ".pl", ".fcgi" )
compress.cache-dir = "/var/cache/lighttpd/compress/"
compress.filetype = ( "application/javascript", "text/css", "text/html", "text/plain" )
# default listening port for IPv6 falls back to the IPv4 port
include_shell "/usr/share/lighttpd/use-ipv6.pl " + server.port
include_shell "/usr/share/lighttpd/create-mime.assign.pl"
include_shell "/usr/share/lighttpd/include-conf-enabled.pl"
# Set access to 1 day for better query performance when the list gets so large
# http://jacobsalmela.com/raspberry-pi-block-ads-adtrap/#comment-2013820434
$HTTP["url"] =~ "^/pihole/" {
expire.url = ("" => "access plus 1 days")
# If the URL starts with /admin, it is the Web interface
$HTTP["url"] =~ "^/admin/" {
# Create a response header for debugging using curl -I
setenv.add-response-header = ( "X-Pi-hole" => "The Pi-hole Web interface is working!" )
}
# If the URL does not start with /admin, then it is a query for an ad domain
$HTTP["url"] =~ "^(?!/admin)/.*" {
# Create a response header for debugging using curl -I
setenv.add-response-header = ( "X-Pi-hole" => "A black hole for Internet advertisements." )
# Set the cache to 1 day for better performance
expire.url = ("" => "access plus 1 days")
# Send the query into the black hole
url.rewrite = (".*" => "pihole/index.html" )
}
# Rewrites all URLs to the /var/www/pihole/index.html
$HTTP["host"] =~ ".*" {
url.rewrite = (".*" => "pihole/index.html")
}

View File

@ -1,6 +1,6 @@
#!/bin/bash
# http://pi-hole.net
# Compiles a list of ad-serving domains by downloading them from multiple sources
# Compiles a list of ad-serving domains by downloading them from multiple sources
# This script should only be run after you have a static IP address set on the Pi
piholeIP=$(hostname -I)
@ -58,7 +58,7 @@ function createSwapFile()
sudo dphys-swapfile setup
sudo dphys-swapfile swapon
}
if [[ -n "$noSwap" ]]; then
# if $noSwap is set, don't do anything
@ -83,20 +83,20 @@ do
url=${sources[$i]}
# Get just the domain from the URL
domain=$(echo "$url" | cut -d'/' -f3)
# Save the file as list.#.domain
saveLocation=$origin/list.$i.$domain.$justDomainsExtension
echo -n "Getting $domain list... "
# Use a case statement to download lists that need special cURL commands to complete properly
case "$domain" in
"adblock.mahakala.is") data=$(curl -s -A 'Mozilla/5.0 (X11; Linux x86_64; rv:30.0) Gecko/20100101 Firefox/30.0' -e http://forum.xda-developers.com/ -z $saveLocation $url);;
"pgl.yoyo.org") data=$(curl -s -d mimetype=plaintext -d hostformat=hosts -z $saveLocation $url);;
*) data=$(curl -s -z $saveLocation -A "Mozilla/10.0" $url);;
esac
if [[ -n "$data" ]];then
# Remove comments and print only the domain name
# Most of the lists downloaded are already in hosts file format but the spacing/formating is not contigious
@ -127,7 +127,7 @@ function gravity_advanced()
###########################
{
numberOf=$(cat $origin/$andLight | sed '/^\s*$/d' | wc -l)
echo "** $numberOf domains being pulled in by gravity..."
echo "** $numberOf domains being pulled in by gravity..."
# Remove carriage returns and preceding whitespace
cat $origin/$andLight | sed $'s/\r$//' | sed '/^\s*$/d' > $origin/$supernova
# Sort and remove duplicates
@ -141,7 +141,7 @@ function gravity_advanced()
sudo cp $origin/$accretionDisc $adList
kill -HUP $(pidof dnsmasq)
}
# Whitelist (if applicable) then remove duplicates and format for dnsmasq
if [[ -f $whitelist ]];then
# Remove whitelist entries