2015-11-11 09:43:00 +01:00
#!/usr/bin/env bash
2017-07-24 13:24:34 +02:00
# shellcheck disable=SC1090
2015-11-23 11:52:12 +01:00
# Pi-hole: A black hole for Internet advertisements
2017-02-22 18:55:20 +01:00
# (c) 2017 Pi-hole, LLC (https://pi-hole.net)
# Network-wide ad blocking via your own hardware.
#
2017-07-24 13:24:34 +02:00
# Usage: "pihole -g"
2015-11-07 00:03:55 +01:00
# Compiles a list of ad-serving domains by downloading them from multiple sources
2015-12-06 14:55:50 +01:00
#
2017-02-22 18:55:20 +01:00
# This file is copyright under the latest version of the EUPL.
# Please see LICENSE file for your rights under this license.
2018-02-21 12:33:29 +01:00
export LC_ALL = C
2023-02-11 13:34:12 +01:00
PI_HOLE_SCRIPT_DIR = "/opt/pihole"
# Source utils.sh for GetFTLConfigValue
utilsfile = " ${ PI_HOLE_SCRIPT_DIR } /utils.sh "
# shellcheck disable=SC1090
. " ${ utilsfile } "
coltable = " ${ PI_HOLE_SCRIPT_DIR } /COL_TABLE "
# shellcheck disable=SC1090
. " ${ coltable } "
2019-07-07 10:46:20 +02:00
# shellcheck disable=SC1091
2023-02-11 13:34:12 +01:00
. "/etc/.pihole/advanced/Scripts/database_migration/gravity-db.sh"
2016-04-11 12:29:14 +02:00
2017-07-24 13:24:34 +02:00
basename = "pihole"
PIHOLE_COMMAND = " /usr/local/bin/ ${ basename } "
2017-06-21 13:49:05 +02:00
2017-07-24 13:24:34 +02:00
piholeDir = " /etc/ ${ basename } "
2019-04-24 19:55:05 +02:00
# Legacy (pre v5.0) list file locations
2017-07-24 13:24:34 +02:00
whitelistFile = " ${ piholeDir } /whitelist.txt "
blacklistFile = " ${ piholeDir } /blacklist.txt "
2018-06-17 14:26:57 +02:00
regexFile = " ${ piholeDir } /regex.list "
2019-04-24 19:55:05 +02:00
adListFile = " ${ piholeDir } /adlists.list "
2016-01-21 23:14:55 +01:00
2019-02-03 13:04:31 +01:00
piholeGitDir = "/etc/.pihole"
2023-02-11 13:34:12 +01:00
GRAVITYDB = $( getFTLConfigValue files.gravity)
2023-12-01 10:21:02 +01:00
GRAVITY_TMPDIR = $( getFTLConfigValue files.gravity_tmp)
2019-02-05 19:05:11 +01:00
gravityDBschema = " ${ piholeGitDir } /advanced/Templates/gravity.db.sql "
2020-01-24 18:39:13 +01:00
gravityDBcopy = " ${ piholeGitDir } /advanced/Templates/gravity_copy.sql "
2019-02-03 13:04:31 +01:00
2017-07-24 13:24:34 +02:00
domainsExtension = "domains"
2022-08-31 21:41:57 +02:00
curl_connect_timeout = 10
2017-07-24 13:24:34 +02:00
2023-12-01 10:21:02 +01:00
# Check gravity temp directory
2023-03-19 05:32:46 +01:00
if [ ! -d " ${ GRAVITY_TMPDIR } " ] || [ ! -w " ${ GRAVITY_TMPDIR } " ] ; then
echo -e " ${ COL_LIGHT_RED } Gravity temporary directory does not exist or is not a writeable directory, falling back to /tmp. ${ COL_NC } "
GRAVITY_TMPDIR = "/tmp"
fi
2021-03-18 08:57:03 +01:00
# Set this only after sourcing pihole-FTL.conf as the gravity database path may
# have changed
gravityDBfile = " ${ GRAVITYDB } "
2023-11-22 21:08:06 +01:00
gravityDBfile_default = "/etc/pihole/gravity.db"
2021-03-18 08:57:03 +01:00
gravityTEMPfile = " ${ GRAVITYDB } _temp "
2021-06-14 20:27:10 +02:00
gravityDIR = " $( dirname -- " ${ gravityDBfile } " ) "
gravityOLDfile = " ${ gravityDIR } /gravity_old.db "
2021-03-18 08:57:03 +01:00
2022-01-29 22:39:45 +01:00
# Generate new SQLite3 file from schema template
2019-02-03 13:04:31 +01:00
generate_gravity_database( ) {
2024-02-09 20:22:53 +01:00
if ! pihole-FTL sqlite3 -ni " ${ gravityDBfile } " <" ${ gravityDBschema } " ; then
2021-12-20 20:36:55 +01:00
echo -e " ${ CROSS } Unable to create ${ gravityDBfile } "
return 1
fi
2021-12-20 19:56:42 +01:00
chown pihole:pihole " ${ gravityDBfile } "
chmod g+w " ${ piholeDir } " " ${ gravityDBfile } "
2019-02-03 13:04:31 +01:00
}
2023-10-22 08:14:11 +02:00
# Build gravity tree
gravity_build_tree( ) {
local str
2020-01-24 18:39:13 +01:00
str = "Building tree"
echo -ne " ${ INFO } ${ str } ... "
2021-01-26 08:04:37 +01:00
# The index is intentionally not UNIQUE as poor quality adlists may contain domains more than once
2024-02-09 20:22:53 +01:00
output = $( { pihole-FTL sqlite3 -ni " ${ gravityTEMPfile } " "CREATE INDEX idx_gravity ON gravity (domain, adlist_id);" ; } 2>& 1)
2019-09-01 14:42:07 +02:00
status = " $? "
if [ [ " ${ status } " -ne 0 ] ] ; then
2020-01-24 18:39:13 +01:00
echo -e " \\n ${ CROSS } Unable to build gravity tree in ${ gravityTEMPfile } \\n ${ output } "
2019-09-04 23:14:29 +02:00
return 1
2019-09-01 14:42:07 +02:00
fi
2020-01-24 18:39:13 +01:00
echo -e " ${ OVER } ${ TICK } ${ str } "
2023-10-22 08:14:11 +02:00
}
2019-09-01 14:42:07 +02:00
2023-10-22 08:14:11 +02:00
# Copy data from old to new database file and swap them
gravity_swap_databases( ) {
2020-01-24 18:39:13 +01:00
str = "Swapping databases"
echo -ne " ${ INFO } ${ str } ... "
2019-02-03 13:21:26 +01:00
2021-06-14 20:27:10 +02:00
# Swap databases and remove or conditionally rename old database
# Number of available blocks on disk
availableBlocks = $( stat -f --format "%a" " ${ gravityDIR } " )
# Number of blocks, used by gravity.db
2023-11-22 21:06:09 +01:00
gravityBlocks = $( stat --format "%b" " ${ gravityDBfile } " )
2021-06-14 20:27:10 +02:00
# Only keep the old database if available disk space is at least twice the size of the existing gravity.db.
# Better be safe than sorry...
2022-01-30 10:36:20 +01:00
oldAvail = false
2021-09-12 18:24:15 +02:00
if [ " ${ availableBlocks } " -gt " $(( gravityBlocks * 2 )) " ] && [ -f " ${ gravityDBfile } " ] ; then
2022-01-30 10:36:20 +01:00
oldAvail = true
2021-06-14 20:27:10 +02:00
mv " ${ gravityDBfile } " " ${ gravityOLDfile } "
else
rm " ${ gravityDBfile } "
fi
2020-01-24 18:39:13 +01:00
mv " ${ gravityTEMPfile } " " ${ gravityDBfile } "
2022-01-30 10:36:20 +01:00
echo -e " ${ OVER } ${ TICK } ${ str } "
2022-01-30 10:42:13 +01:00
if $oldAvail ; then
2023-05-15 19:25:56 +02:00
echo -e " ${ TICK } The old database remains available "
2022-01-30 10:36:20 +01:00
fi
2020-01-24 18:39:13 +01:00
}
# Update timestamp when the gravity table was last updated successfully
update_gravity_timestamp( ) {
2024-02-11 21:07:15 +01:00
output = $( { printf ".timeout 30000\\nINSERT OR REPLACE INTO info (property,value) values ('updated',cast(strftime('%%s', 'now') as int));" | pihole-FTL sqlite3 -ni " ${ gravityTEMPfile } " ; } 2>& 1)
2020-01-24 18:39:13 +01:00
status = " $? "
if [ [ " ${ status } " -ne 0 ] ] ; then
2023-10-22 08:14:11 +02:00
echo -e " \\n ${ CROSS } Unable to update gravity timestamp in database ${ gravityTEMPfile } \\n ${ output } "
2019-09-04 23:14:29 +02:00
return 1
fi
return 0
}
# Import domains from file and store them in the specified database table
database_table_from_file( ) {
# Define locals
2022-08-01 19:38:15 +02:00
local table src backup_path backup_file tmpFile list_type
2019-09-04 23:14:29 +02:00
table = " ${ 1 } "
2022-08-01 19:38:15 +02:00
src = " ${ 2 } "
2019-09-04 23:14:29 +02:00
backup_path = " ${ piholeDir } /migration_backup "
backup_file = " ${ backup_path } / $( basename " ${ 2 } " ) "
2023-05-10 06:52:51 +02:00
# Create a temporary file. We don't use '--suffix' here because not all
# implementations of mktemp support it, e.g. on Alpine
tmpFile = " $( mktemp -p " ${ GRAVITY_TMPDIR } " ) "
mv " ${ tmpFile } " " ${ tmpFile %.* } .gravity "
2023-10-23 21:36:18 +02:00
tmpFile = " ${ tmpFile %.* } .gravity "
2020-01-24 18:39:13 +01:00
2019-02-22 22:46:19 +01:00
local timestamp
timestamp = " $( date --utc +'%s' ) "
2020-01-24 18:39:13 +01:00
2019-09-04 23:14:29 +02:00
local rowid
declare -i rowid
rowid = 1
2019-12-09 23:30:41 +01:00
2020-01-24 18:39:13 +01:00
# Special handling for domains to be imported into the common domainlist table
if [ [ " ${ table } " = = "whitelist" ] ] ; then
2022-08-01 19:38:15 +02:00
list_type = "0"
2020-01-24 18:39:13 +01:00
table = "domainlist"
elif [ [ " ${ table } " = = "blacklist" ] ] ; then
2022-08-01 19:38:15 +02:00
list_type = "1"
2020-01-24 18:39:13 +01:00
table = "domainlist"
elif [ [ " ${ table } " = = "regex" ] ] ; then
2022-08-01 19:38:15 +02:00
list_type = "3"
2020-01-24 18:39:13 +01:00
table = "domainlist"
2019-12-09 23:30:41 +01:00
fi
2019-09-04 23:14:29 +02:00
2020-01-24 18:39:13 +01:00
# Get MAX(id) from domainlist when INSERTing into this table
if [ [ " ${ table } " = = "domainlist" ] ] ; then
2023-12-09 23:06:50 +01:00
rowid = " $( pihole-FTL sqlite3 -ni " ${ gravityDBfile } " "SELECT MAX(id) FROM domainlist;" ) "
2020-01-24 18:39:13 +01:00
if [ [ -z " $rowid " ] ] ; then
rowid = 0
fi
rowid += 1
fi
2022-08-01 19:38:15 +02:00
# Loop over all domains in ${src} file
2020-01-24 18:39:13 +01:00
# Read file line by line
2024-02-09 20:22:53 +01:00
grep -v '^ *#' <" ${ src } " | while IFS = read -r domain; do
2020-01-24 18:39:13 +01:00
# Only add non-empty lines
if [ [ -n " ${ domain } " ] ] ; then
if [ [ " ${ table } " = = "domain_audit" ] ] ; then
# domain_audit table format (no enable or modified fields)
2024-02-09 20:22:53 +01:00
echo " ${ rowid } ,\" ${ domain } \", ${ timestamp } " >>" ${ tmpFile } "
2020-01-24 18:39:13 +01:00
elif [ [ " ${ table } " = = "adlist" ] ] ; then
# Adlist table format
2024-02-09 20:22:53 +01:00
echo " ${ rowid } ,\" ${ domain } \",1, ${ timestamp } , ${ timestamp } ,\"Migrated from ${ src } \",,0,0,0,0,0 " >>" ${ tmpFile } "
2020-01-24 18:39:13 +01:00
else
# White-, black-, and regexlist table format
2024-02-09 20:22:53 +01:00
echo " ${ rowid } , ${ list_type } ,\" ${ domain } \",1, ${ timestamp } , ${ timestamp } ,\"Migrated from ${ src } \" " >>" ${ tmpFile } "
2020-01-24 18:39:13 +01:00
fi
rowid += 1
fi
done
2019-12-12 11:13:51 +01:00
2019-04-24 19:55:05 +02:00
# Store domains in database table specified by ${table}
2019-02-06 18:57:48 +01:00
# Use printf as .mode and .import need to be on separate lines
# see https://unix.stackexchange.com/a/445615/83260
2024-02-09 20:22:53 +01:00
output = $( { printf ".timeout 30000\\n.mode csv\\n.import \"%s\" %s\\n" " ${ tmpFile } " " ${ table } " | pihole-FTL sqlite3 -ni " ${ gravityDBfile } " ; } 2>& 1)
2019-02-03 13:21:26 +01:00
status = " $? "
if [ [ " ${ status } " -ne 0 ] ] ; then
2022-08-01 19:38:15 +02:00
echo -e " \\n ${ CROSS } Unable to fill table ${ table } ${ list_type } in database ${ gravityDBfile } \\n ${ output } "
2019-02-03 13:21:26 +01:00
gravity_Cleanup "error"
fi
2019-04-28 22:15:47 +02:00
# Move source file to backup directory, create directory if not existing
mkdir -p " ${ backup_path } "
2024-02-09 20:22:53 +01:00
mv " ${ src } " " ${ backup_file } " 2>/dev/null ||
2022-08-01 19:38:15 +02:00
echo -e " ${ CROSS } Unable to backup ${ src } to ${ backup_path } "
2020-01-24 18:39:13 +01:00
# Delete tmpFile
2024-02-09 20:22:53 +01:00
rm " ${ tmpFile } " >/dev/null 2>& 1 ||
2020-01-24 18:39:13 +01:00
echo -e " ${ CROSS } Unable to remove ${ tmpFile } "
2019-02-03 13:21:26 +01:00
}
2020-12-27 19:14:52 +01:00
# Check if a column with name ${2} exists in gravity table with name ${1}
gravity_column_exists( ) {
2024-02-09 20:22:53 +01:00
output = $( { printf ".timeout 30000\\nSELECT EXISTS(SELECT * FROM pragma_table_info('%s') WHERE name='%s');\\n" " ${ 1 } " " ${ 2 } " | pihole-FTL sqlite3 -ni " ${ gravityTEMPfile } " ; } 2>& 1)
2020-12-27 19:14:52 +01:00
if [ [ " ${ output } " = = "1" ] ] ; then
return 0 # Bash 0 is success
fi
return 1 # Bash non-0 is failure
}
# Update number of domain on this list. We store this in the "old" database as all values in the new database will later be overwritten
database_adlist_number( ) {
# Only try to set number of domains when this field exists in the gravity database
if ! gravity_column_exists "adlist" "number" ; then
2024-02-09 20:22:53 +01:00
return
2020-12-27 19:14:52 +01:00
fi
2024-02-09 20:22:53 +01:00
output = $( { printf ".timeout 30000\\nUPDATE adlist SET number = %i, invalid_domains = %i WHERE id = %i;\\n" " ${ 2 } " " ${ 3 } " " ${ 1 } " | pihole-FTL sqlite3 -ni " ${ gravityTEMPfile } " ; } 2>& 1)
2020-12-27 19:14:52 +01:00
status = " $? "
if [ [ " ${ status } " -ne 0 ] ] ; then
2023-05-30 17:01:58 +02:00
echo -e " \\n ${ CROSS } Unable to update number of domains in adlist with ID ${ 1 } in database ${ gravityTEMPfile } \\n ${ output } "
2020-12-27 19:14:52 +01:00
gravity_Cleanup "error"
fi
}
# Update status of this list. We store this in the "old" database as all values in the new database will later be overwritten
database_adlist_status( ) {
# Only try to set the status when this field exists in the gravity database
if ! gravity_column_exists "adlist" "status" ; then
2024-02-09 20:22:53 +01:00
return
2020-12-27 19:14:52 +01:00
fi
2024-02-09 20:22:53 +01:00
output = $( { printf ".timeout 30000\\nUPDATE adlist SET status = %i WHERE id = %i;\\n" " ${ 2 } " " ${ 1 } " | pihole-FTL sqlite3 -ni " ${ gravityTEMPfile } " ; } 2>& 1)
2020-12-27 19:14:52 +01:00
status = " $? "
if [ [ " ${ status } " -ne 0 ] ] ; then
2023-05-30 17:01:58 +02:00
echo -e " \\n ${ CROSS } Unable to update status of adlist with ID ${ 1 } in database ${ gravityTEMPfile } \\n ${ output } "
2020-12-27 19:14:52 +01:00
gravity_Cleanup "error"
fi
}
2019-04-25 10:46:18 +02:00
# Migrate pre-v5.0 list files to database-based Pi-hole versions
2019-04-24 19:55:05 +02:00
migrate_to_database( ) {
2019-05-01 21:12:22 +02:00
# Create database file only if not present
2019-07-05 14:03:57 +02:00
if [ ! -e " ${ gravityDBfile } " ] ; then
2019-07-07 10:33:08 +02:00
# Create new database file - note that this will be created in version 1
2019-07-05 14:03:57 +02:00
echo -e " ${ INFO } Creating new gravity database "
2021-12-20 20:36:55 +01:00
if ! generate_gravity_database; then
echo -e " ${ CROSS } Error creating new gravity database. Please contact support. "
return 1
fi
2020-01-24 18:39:13 +01:00
# Check if gravity database needs to be updated
upgrade_gravityDB " ${ gravityDBfile } " " ${ piholeDir } "
2019-07-07 10:33:08 +02:00
# Migrate list files to new database
if [ -e " ${ adListFile } " ] ; then
# Store adlist domains in database
echo -e " ${ INFO } Migrating content of ${ adListFile } into new database "
database_table_from_file "adlist" " ${ adListFile } "
fi
if [ -e " ${ blacklistFile } " ] ; then
# Store blacklisted domains in database
echo -e " ${ INFO } Migrating content of ${ blacklistFile } into new database "
database_table_from_file "blacklist" " ${ blacklistFile } "
fi
if [ -e " ${ whitelistFile } " ] ; then
# Store whitelisted domains in database
echo -e " ${ INFO } Migrating content of ${ whitelistFile } into new database "
database_table_from_file "whitelist" " ${ whitelistFile } "
fi
if [ -e " ${ regexFile } " ] ; then
# Store regex domains in database
2019-07-08 21:39:30 +02:00
# Important note: We need to add the domains to the "regex" table
# as it will only later be renamed to "regex_blacklist"!
2019-07-07 10:33:08 +02:00
echo -e " ${ INFO } Migrating content of ${ regexFile } into new database "
2019-07-08 21:39:30 +02:00
database_table_from_file "regex" " ${ regexFile } "
2019-07-07 10:33:08 +02:00
fi
2019-05-01 21:12:22 +02:00
fi
2019-07-05 14:03:57 +02:00
# Check if gravity database needs to be updated
2019-07-09 11:41:44 +02:00
upgrade_gravityDB " ${ gravityDBfile } " " ${ piholeDir } "
2018-08-13 13:43:14 +02:00
}
2017-09-15 14:39:17 +02:00
# Determine if DNS resolution is available before proceeding
2018-01-14 21:38:39 +01:00
gravity_CheckDNSResolutionAvailable( ) {
2023-11-06 21:40:32 +01:00
local lookupDomain = "raw.githubusercontent.com"
2017-09-14 08:39:30 +02:00
2017-09-15 14:39:17 +02:00
# Determine if $lookupDomain is resolvable
2024-02-09 20:22:53 +01:00
if timeout 4 getent hosts " ${ lookupDomain } " & >/dev/null; then
2017-09-15 14:39:17 +02:00
# Print confirmation of resolvability if it had previously failed
if [ [ -n " ${ secs :- } " ] ] ; then
echo -e " ${ OVER } ${ TICK } DNS resolution is now available\\n "
2017-07-27 04:34:35 +02:00
fi
2017-09-15 14:39:17 +02:00
return 0
elif [ [ -n " ${ secs :- } " ] ] ; then
2017-12-08 05:38:47 +01:00
echo -e " ${ OVER } ${ CROSS } DNS resolution is not available "
exit 1
fi
# If the /etc/resolv.conf contains resolvers other than 127.0.0.1 then the local dnsmasq will not be queried and pi.hole is NXDOMAIN.
2020-03-09 00:53:14 +01:00
# This means that even though name resolution is working, the getent hosts check fails and the holddown timer keeps ticking and eventually fails
2017-12-08 05:38:47 +01:00
# So we check the output of the last command and if it failed, attempt to use dig +short as a fallback
2024-02-09 20:22:53 +01:00
if timeout 4 dig +short " ${ lookupDomain } " & >/dev/null; then
2017-12-08 05:38:47 +01:00
if [ [ -n " ${ secs :- } " ] ] ; then
echo -e " ${ OVER } ${ TICK } DNS resolution is now available\\n "
fi
return 0
elif [ [ -n " ${ secs :- } " ] ] ; then
2017-09-21 19:56:53 +02:00
echo -e " ${ OVER } ${ CROSS } DNS resolution is not available "
2017-09-15 14:39:17 +02:00
exit 1
fi
2017-07-27 04:34:35 +02:00
2017-09-15 14:39:17 +02:00
# Determine error output message
2024-02-09 20:22:53 +01:00
if pgrep pihole-FTL & >/dev/null; then
2017-09-15 14:39:17 +02:00
echo -e " ${ CROSS } DNS resolution is currently unavailable "
else
echo -e " ${ CROSS } DNS service is not running "
" ${ PIHOLE_COMMAND } " restartdns
2017-07-27 04:34:35 +02:00
fi
2017-09-15 14:39:17 +02:00
2017-09-21 19:56:53 +02:00
# Ensure DNS server is given time to be resolvable
secs = "120"
2017-12-16 14:55:52 +01:00
echo -ne " ${ INFO } Time until retry: ${ secs } "
2024-02-09 20:22:53 +01:00
until timeout 1 getent hosts " ${ lookupDomain } " & >/dev/null; do
2017-09-21 19:56:53 +02:00
[ [ " ${ secs :- } " -eq 0 ] ] && break
2017-12-16 14:55:52 +01:00
echo -ne " ${ OVER } ${ INFO } Time until retry: ${ secs } "
2017-09-15 14:39:17 +02:00
: $(( secs--))
2017-09-21 19:56:53 +02:00
sleep 1
2017-09-15 14:39:17 +02:00
done
# Try again
2018-01-14 21:38:39 +01:00
gravity_CheckDNSResolutionAvailable
2017-07-24 13:24:34 +02:00
}
2017-03-31 20:16:09 +02:00
2019-06-30 23:21:10 +02:00
# Retrieve blocklist URLs and parse domains from adlist.list
2019-09-04 23:14:29 +02:00
gravity_DownloadBlocklists( ) {
2017-09-15 14:39:17 +02:00
echo -e " ${ INFO } ${ COL_BOLD } Neutrino emissions detected ${ COL_NC } ... "
2017-03-31 20:16:09 +02:00
2021-03-18 08:57:03 +01:00
if [ [ " ${ gravityDBfile } " != " ${ gravityDBfile_default } " ] ] ; then
echo -e " ${ INFO } Storing gravity database in ${ COL_BOLD } ${ gravityDBfile } ${ COL_NC } "
fi
2019-04-25 10:46:18 +02:00
# Retrieve source URLs from gravity database
2022-01-29 22:39:45 +01:00
# We source only enabled adlists, SQLite3 stores boolean values as 0 (false) or 1 (true)
2024-02-11 21:07:15 +01:00
mapfile -t sources <<< " $( pihole-FTL sqlite3 -ni " ${ gravityDBfile } " "SELECT address FROM vw_adlist;" 2>/dev/null) "
mapfile -t sourceIDs <<< " $( pihole-FTL sqlite3 -ni " ${ gravityDBfile } " "SELECT id FROM vw_adlist;" 2>/dev/null) "
2024-02-09 20:22:53 +01:00
mapfile -t sourceTypes <<< " $( pihole-FTL sqlite3 -ni " ${ gravityDBfile } " "SELECT type FROM vw_adlist;" 2>/dev/null) "
2017-07-24 13:24:34 +02:00
# Parse source domains from $sources
2024-02-09 20:22:53 +01:00
mapfile -t sourceDomains <<< " $(
2017-09-15 14:39:17 +02:00
# Logic: Split by folder/port
2017-07-24 13:24:34 +02:00
awk -F '[/:]' ' {
2017-09-15 14:39:17 +02:00
# Remove URL protocol & optional username:password@
2017-11-21 20:55:47 +01:00
gsub( /( .*:\/ \/ | .*:.*@) /, "" , $0 )
2017-11-21 18:30:40 +01:00
if ( length( $1 ) >0) { print $1 }
else { print "local" }
2024-02-09 20:22:53 +01:00
} ' <<<"$(printf ' %s\n ' " ${ sources [@] } " ) " 2>/dev/null
2017-09-20 14:25:33 +02:00
) "
2017-07-24 13:24:34 +02:00
2018-04-30 22:45:03 +02:00
local str = "Pulling blocklist source list into range"
2022-01-08 14:15:26 +01:00
echo -e " ${ OVER } ${ TICK } ${ str } "
2018-04-30 22:45:03 +02:00
2022-01-08 14:15:26 +01:00
if [ [ -z " ${ sources [*] } " ] ] || [ [ -z " ${ sourceDomains [*] } " ] ] ; then
2018-04-30 22:45:03 +02:00
echo -e " ${ INFO } No source list found, or it is empty "
echo ""
2022-01-08 14:15:26 +01:00
unset sources
2017-07-24 13:24:34 +02:00
fi
2023-09-11 11:43:56 +02:00
local url domain str target compression adlist_type
2017-07-27 04:34:35 +02:00
echo ""
2020-01-24 18:39:13 +01:00
# Prepare new gravity database
str = "Preparing new gravity database"
2019-09-04 23:14:29 +02:00
echo -ne " ${ INFO } ${ str } ... "
2024-02-09 20:22:53 +01:00
rm " ${ gravityTEMPfile } " >/dev/null 2>& 1
output = $( { pihole-FTL sqlite3 -ni " ${ gravityTEMPfile } " <" ${ gravityDBschema } " ; } 2>& 1)
2020-01-24 18:39:13 +01:00
status = " $? "
if [ [ " ${ status } " -ne 0 ] ] ; then
echo -e " \\n ${ CROSS } Unable to create new database ${ gravityTEMPfile } \\n ${ output } "
2023-07-06 22:52:28 +02:00
gravity_Cleanup "error"
2020-01-24 18:39:13 +01:00
else
2019-09-04 23:14:29 +02:00
echo -e " ${ OVER } ${ TICK } ${ str } "
fi
2023-05-15 19:25:56 +02:00
str = "Creating new gravity databases"
echo -ne " ${ INFO } ${ str } ... "
# Gravity copying SQL script
copyGravity = " $( cat " ${ gravityDBcopy } " ) "
if [ [ " ${ gravityDBfile } " != " ${ gravityDBfile_default } " ] ] ; then
# Replace default gravity script location by custom location
copyGravity = " ${ copyGravity // " ${ gravityDBfile_default } " / " ${ gravityDBfile } " } "
fi
2024-02-09 20:22:53 +01:00
output = $( { pihole-FTL sqlite3 -ni " ${ gravityTEMPfile } " <<< " ${ copyGravity } " ; } 2>& 1)
2023-05-15 19:25:56 +02:00
status = " $? "
if [ [ " ${ status } " -ne 0 ] ] ; then
echo -e " \\n ${ CROSS } Unable to copy data from ${ gravityDBfile } to ${ gravityTEMPfile } \\n ${ output } "
return 1
fi
echo -e " ${ OVER } ${ TICK } ${ str } "
2020-01-24 18:39:13 +01:00
2021-01-19 19:33:38 +01:00
# Use compression to reduce the amount of data that is transferred
2020-08-03 22:46:14 +02:00
# between the Pi-hole and the ad list provider. Use this feature
# only if it is supported by the locally available version of curl
if curl -V | grep -q "Features:.* libz" ; then
compression = "--compressed"
echo -e " ${ INFO } Using libz compression\n "
else
2021-11-25 07:41:40 +01:00
compression = ""
echo -e " ${ INFO } Libz compression not available\n "
fi
2017-09-15 14:39:17 +02:00
# Loop through $sources and download each one
2017-07-27 04:34:35 +02:00
for ( ( i = 0; i < " ${# sources [@] } " ; i++) ) ; do
url = " ${ sources [ $i ] } "
domain = " ${ sourceDomains [ $i ] } "
2020-07-05 14:32:33 +02:00
id = " ${ sourceIDs [ $i ] } "
2023-07-05 22:24:11 +02:00
if [ [ " ${ sourceTypes [ $i ] } " -eq "0" ] ] ; then
# Gravity list
str = "blocklist"
adlist_type = "gravity"
else
# AntiGravity list
str = "allowlist"
adlist_type = "antigravity"
fi
2017-07-27 04:34:35 +02:00
# Save the file as list.#.domain
2020-07-05 14:32:33 +02:00
saveLocation = " ${ piholeDir } /list. ${ id } . ${ domain } . ${ domainsExtension } "
2017-07-27 04:34:35 +02:00
activeDomains[ $i ] = " ${ saveLocation } "
2019-12-12 11:29:44 +01:00
echo -e " ${ INFO } Target: ${ url } "
2020-12-07 00:23:04 +01:00
local regex check_url
2020-03-31 22:48:10 +02:00
# Check for characters NOT allowed in URLs
2020-05-26 15:53:01 +02:00
regex = "[^a-zA-Z0-9:/?&%=~._()-;]"
2020-12-07 00:23:04 +01:00
# this will remove first @ that is after schema and before domain
# \1 is optional schema, \2 is userinfo
2024-02-09 20:22:53 +01:00
check_url = " $( sed -re 's#([^:/]*://)?([^/]+)@#\1\2#' <<< " $url " ) "
2020-12-07 00:23:04 +01:00
if [ [ " ${ check_url } " = ~ ${ regex } ] ] ; then
2021-11-25 07:41:40 +01:00
echo -e " ${ CROSS } Invalid Target "
2020-03-31 22:48:10 +02:00
else
2023-11-22 20:56:23 +01:00
gravity_DownloadBlocklistFromUrl " ${ url } " " ${ sourceIDs [ $i ] } " " ${ saveLocation } " " ${ target } " " ${ compression } " " ${ adlist_type } " " ${ domain } "
2020-03-31 22:48:10 +02:00
fi
2019-04-25 11:18:54 +02:00
echo ""
2017-07-27 04:34:35 +02:00
done
2020-01-24 18:39:13 +01:00
2017-09-23 02:32:56 +02:00
gravity_Blackbody = true
2017-07-27 04:34:35 +02:00
}
2020-12-29 09:54:25 +01:00
compareLists( ) {
2020-12-29 20:35:48 +01:00
local adlistID = " ${ 1 } " target = " ${ 2 } "
2020-12-29 09:54:25 +01:00
# Verify checksum when an older checksum exists
if [ [ -s " ${ target } .sha1 " ] ] ; then
if ! sha1sum --check --status --strict " ${ target } .sha1 " ; then
# The list changed upstream, we need to update the checksum
2024-02-09 20:22:53 +01:00
sha1sum " ${ target } " >" ${ target } .sha1 "
2020-12-29 09:54:25 +01:00
echo " ${ INFO } List has been updated "
database_adlist_status " ${ adlistID } " "1"
else
echo " ${ INFO } List stayed unchanged "
database_adlist_status " ${ adlistID } " "2"
fi
else
# No checksum available, create one for comparing on the next run
2024-02-09 20:22:53 +01:00
sha1sum " ${ target } " >" ${ target } .sha1 "
2020-12-29 09:54:25 +01:00
# We assume here it was changed upstream
database_adlist_status " ${ adlistID } " "1"
fi
}
2020-02-21 19:56:48 +01:00
2017-09-14 12:23:49 +02:00
# Download specified URL and perform checks on HTTP status and file content
2018-01-14 21:38:39 +01:00
gravity_DownloadBlocklistFromUrl( ) {
2023-11-22 20:56:23 +01:00
local url = " ${ 1 } " adlistID = " ${ 2 } " saveLocation = " ${ 3 } " target = " ${ 4 } " compression = " ${ 5 } " gravity_type = " ${ 6 } " domain = " ${ 7 } "
2023-04-07 12:36:50 +02:00
local heisenbergCompensator = "" listCurlBuffer str httpCode success = "" ip cmd_ext
2024-03-28 15:41:45 +01:00
local file_path permissions ip_addr port blocked = false download = true
2017-07-27 04:34:35 +02:00
2017-09-15 14:39:17 +02:00
# Create temp file to store content on disk instead of RAM
2023-05-10 06:52:51 +02:00
# We don't use '--suffix' here because not all implementations of mktemp support it, e.g. on Alpine
listCurlBuffer = " $( mktemp -p " ${ GRAVITY_TMPDIR } " ) "
mv " ${ listCurlBuffer } " " ${ listCurlBuffer %.* } .phgpb "
2023-10-23 21:36:18 +02:00
listCurlBuffer = " ${ listCurlBuffer %.* } .phgpb "
2017-08-28 03:36:02 +02:00
2017-09-15 14:39:17 +02:00
# Determine if $saveLocation has read permission
2017-11-21 18:35:58 +01:00
if [ [ -r " ${ saveLocation } " && $url != "file" * ] ] ; then
2017-09-15 14:39:17 +02:00
# Have curl determine if a remote file has been modified since last retrieval
# Uses "Last-Modified" header, which certain web servers do not provide (e.g: raw github urls)
2017-11-21 18:35:58 +01:00
# Note: Don't do this for local files, always download them
2017-07-27 04:34:35 +02:00
heisenbergCompensator = " -z ${ saveLocation } "
fi
str = "Status:"
echo -ne " ${ INFO } ${ str } Pending... "
2018-08-12 01:15:42 +02:00
blocked = false
2023-02-11 13:34:12 +01:00
case $( getFTLConfigValue dns.blocking.mode) in
2024-02-09 20:22:53 +01:00
"IP-NODATA-AAAA" | "IP" )
# Get IP address of this domain
ip = " $( dig " ${ domain } " +short) "
# Check if this IP matches any IP of the system
if [ [ -n " ${ ip } " && $( grep -Ec " inet(|6) ${ ip } " <<< " $( ip a) " ) -gt 0 ] ] ; then
blocked = true
fi
; ;
"NXDOMAIN" )
if [ [ $( dig " ${ domain } " | grep "NXDOMAIN" -c) -ge 1 ] ] ; then
blocked = true
fi
; ;
"NODATA" )
if [ [ $( dig " ${ domain } " | grep "NOERROR" -c) -ge 1 ] ] && [ [ -z $( dig +short " ${ domain } " ) ] ] ; then
blocked = true
fi
; ;
"NULL" | *)
if [ [ $( dig " ${ domain } " +short | grep "0.0.0.0" -c) -ge 1 ] ] ; then
blocked = true
fi
; ;
2021-11-25 07:41:40 +01:00
esac
2023-11-22 21:04:46 +01:00
2023-11-22 21:10:22 +01:00
# Check if this domain is blocked by Pi-hole but only if the domain is not a
# local file or empty
if [ [ $url != "file" * ] ] && [ [ -n " ${ domain } " ] ] ; then
case $( getFTLConfigValue dns.blocking.mode) in
2024-05-08 22:25:26 +02:00
"IP-NODATA-AAAA" | "IP" )
2021-11-25 07:41:40 +01:00
# Get IP address of this domain
ip = " $( dig " ${ domain } " +short) "
# Check if this IP matches any IP of the system
2024-05-08 22:25:26 +02:00
if [ [ -n " ${ ip } " && $( grep -Ec " inet(|6) ${ ip } " <<< " $( ip a) " ) -gt 0 ] ] ; then
2021-11-25 07:41:40 +01:00
blocked = true
2024-05-08 22:25:26 +02:00
fi
; ;
2018-08-12 01:15:42 +02:00
"NXDOMAIN" )
2021-11-25 07:41:40 +01:00
if [ [ $( dig " ${ domain } " | grep "NXDOMAIN" -c) -ge 1 ] ] ; then
blocked = true
2024-05-08 22:25:26 +02:00
fi
; ;
2021-12-03 09:17:19 +01:00
"NODATA" )
2021-12-21 22:01:34 +01:00
if [ [ $( dig " ${ domain } " | grep "NOERROR" -c) -ge 1 ] ] && [ [ -z $( dig +short " ${ domain } " ) ] ] ; then
2024-05-08 22:25:26 +02:00
blocked = true
fi
; ;
"NULL" | *)
2021-11-25 07:41:40 +01:00
if [ [ $( dig " ${ domain } " +short | grep "0.0.0.0" -c) -ge 1 ] ] ; then
blocked = true
2024-05-08 22:25:26 +02:00
fi
; ;
2023-11-22 21:10:22 +01:00
esac
if [ [ " ${ blocked } " = = true ] ] ; then
2024-05-08 22:25:26 +02:00
# Get first defined upstream server
local upstream
upstream = " $( getFTLConfigValue dns.upstreams) "
# Isolate first upstream server from a string like
# [ 1.2.3.4#1234, 5.6.7.8#5678, ... ]
upstream = " ${ upstream %%,* } "
upstream = " ${ upstream ##*[ } "
upstream = " ${ upstream %%]* } "
# Trim leading and trailing spaces and tabs
upstream = " ${ upstream # " ${ upstream %%[![ : space : ]]* } " } "
upstream = " ${ upstream % " ${ upstream ##*[![ : space : ]] } " } "
# Get IP address and port of this upstream server
local ip_addr port
printf -v ip_addr "%s" " ${ upstream %#* } "
if [ [ ${ upstream } != *"#" * ] ] ; then
port = 53
else
printf -v port "%s" " ${ upstream #*# } "
fi
ip = $( dig " @ ${ ip_addr } " -p " ${ port } " +short " ${ domain } " | tail -1)
if [ [ $( echo " ${ url } " | awk -F '://' '{print $1}' ) = "https" ] ] ; then
port = 443
else
port = 80
fi
echo -e " ${ OVER } ${ CROSS } ${ str } ${ domain } is blocked by one of your lists. Using DNS server ${ upstream } instead "
echo -ne " ${ INFO } ${ str } Pending... "
cmd_ext = " --resolve $domain : $port : $ip "
2018-08-11 14:33:33 +02:00
fi
fi
2019-08-10 13:33:30 +02:00
2024-03-28 15:41:45 +01:00
# If we are going to "download" a local file, we first check if the target
# file has a+r permission. We explicitly check for all+read because we want
# to make sure that the file is readable by everyone and not just the user
# running the script.
if [ [ $url = = "file://" * ] ] ; then
# Get the file path
2024-03-04 19:38:13 +01:00
file_path = $( echo " $url " | cut -d'/' -f3-)
2024-03-27 22:10:12 +01:00
# Check if the file exists and is a regular file (i.e. not a socket, fifo, tty, block). Might still be a symlink.
if [ [ ! -f $file_path ] ] ; then
2024-03-28 15:41:45 +01:00
# Output that the file does not exist
echo -e " ${ OVER } ${ CROSS } ${ file_path } does not exist "
download = false
else
2024-03-27 22:10:12 +01:00
# Check if the file or a file referenced by the symlink has a+r permissions
permissions = $( stat -L -c "%a" " $file_path " )
2024-03-28 15:41:45 +01:00
if [ [ $permissions = = *4 || $permissions = = *5 || $permissions = = *6 || $permissions = = *7 ] ] ; then
# Output that we are using the local file
echo -e " ${ OVER } ${ INFO } Using local file ${ file_path } "
else
# Output that the file does not have the correct permissions
2024-03-04 19:38:13 +01:00
echo -e " ${ OVER } ${ CROSS } Cannot read file (file needs to have a+r permission) "
2024-03-28 15:41:45 +01:00
download = false
fi
fi
fi
2024-04-30 15:47:57 +02:00
# Check for allowed protocols
if [ [ $url != "http" * && $url != "https" * && $url != "file" * && $url != "ftp" * && $url != "ftps" * && $url != "sftp" * ] ] ; then
echo -e " ${ OVER } ${ CROSS } ${ str } Invalid protocol specified, ignoring list "
download = false
fi
2024-03-28 15:41:45 +01:00
if [ [ " ${ download } " = = true ] ] ; then
# shellcheck disable=SC2086
2024-05-08 22:25:26 +02:00
httpCode = $( curl --connect-timeout ${ curl_connect_timeout } -s -L ${ compression } ${ cmd_ext } ${ heisenbergCompensator } -w "%{http_code}" " ${ url } " -o " ${ listCurlBuffer } " 2>/dev/null)
2024-03-28 15:41:45 +01:00
fi
2017-07-27 04:34:35 +02:00
2017-11-21 18:30:40 +01:00
case $url in
2024-02-09 20:22:53 +01:00
# Did we "download" a local file?
"file" *)
if [ [ -s " ${ listCurlBuffer } " ] ] ; then
echo -e " ${ OVER } ${ TICK } ${ str } Retrieval successful "
success = true
else
2024-03-28 15:41:45 +01:00
echo -e " ${ OVER } ${ CROSS } ${ str } Retrieval failed / empty list "
2024-02-09 20:22:53 +01:00
fi
; ;
# Did we "download" a remote file?
*)
# Determine "Status:" output based on HTTP response
case " ${ httpCode } " in
"200" )
echo -e " ${ OVER } ${ TICK } ${ str } Retrieval successful "
success = true
; ;
"304" )
echo -e " ${ OVER } ${ TICK } ${ str } No changes detected "
success = true
; ;
"000" ) echo -e " ${ OVER } ${ CROSS } ${ str } Connection Refused " ; ;
"403" ) echo -e " ${ OVER } ${ CROSS } ${ str } Forbidden " ; ;
"404" ) echo -e " ${ OVER } ${ CROSS } ${ str } Not found " ; ;
"408" ) echo -e " ${ OVER } ${ CROSS } ${ str } Time-out " ; ;
"451" ) echo -e " ${ OVER } ${ CROSS } ${ str } Unavailable For Legal Reasons " ; ;
"500" ) echo -e " ${ OVER } ${ CROSS } ${ str } Internal Server Error " ; ;
"504" ) echo -e " ${ OVER } ${ CROSS } ${ str } Connection Timed Out (Gateway) " ; ;
"521" ) echo -e " ${ OVER } ${ CROSS } ${ str } Web Server Is Down (Cloudflare) " ; ;
"522" ) echo -e " ${ OVER } ${ CROSS } ${ str } Connection Timed Out (Cloudflare) " ; ;
*) echo -e " ${ OVER } ${ CROSS } ${ str } ${ url } ( ${ httpCode } ) " ; ;
esac
; ;
2017-07-27 04:34:35 +02:00
esac
2020-12-27 19:14:52 +01:00
local done = "false"
2017-07-27 04:34:35 +02:00
# Determine if the blocklist was downloaded and saved correctly
2017-09-15 14:39:17 +02:00
if [ [ " ${ success } " = = true ] ] ; then
2017-07-27 04:34:35 +02:00
if [ [ " ${ httpCode } " = = "304" ] ] ; then
2020-01-24 18:39:13 +01:00
# Add domains to database table file
2023-11-22 21:06:09 +01:00
pihole-FTL " ${ gravity_type } " parseList " ${ saveLocation } " " ${ gravityTEMPfile } " " ${ adlistID } "
2020-12-27 19:14:52 +01:00
database_adlist_status " ${ adlistID } " "2"
done = "true"
2023-03-25 00:15:49 +01:00
# Check if $listCurlBuffer is a non-zero length file
elif [ [ -s " ${ listCurlBuffer } " ] ] ; then
2017-07-27 04:34:35 +02:00
# Determine if blocklist is non-standard and parse as appropriate
2023-03-25 00:15:49 +01:00
gravity_ParseFileIntoDomains " ${ listCurlBuffer } " " ${ saveLocation } "
# Remove curl buffer file after its use
rm " ${ listCurlBuffer } "
2020-02-21 19:56:48 +01:00
# Add domains to database table file
2023-11-22 21:06:09 +01:00
pihole-FTL " ${ gravity_type } " parseList " ${ saveLocation } " " ${ gravityTEMPfile } " " ${ adlistID } "
2020-12-29 09:54:25 +01:00
# Compare lists, are they identical?
compareLists " ${ adlistID } " " ${ saveLocation } "
2020-12-27 19:14:52 +01:00
done = "true"
2017-07-27 04:34:35 +02:00
else
2023-03-25 00:15:49 +01:00
# Fall back to previously cached list if $listCurlBuffer is empty
2020-12-27 19:14:52 +01:00
echo -e " ${ INFO } Received empty file "
2017-07-27 04:34:35 +02:00
fi
2020-12-27 19:14:52 +01:00
fi
# Do we need to fall back to a cached list (if available)?
if [ [ " ${ done } " != "true" ] ] ; then
2017-09-15 14:39:17 +02:00
# Determine if cached list has read permission
2017-07-27 04:34:35 +02:00
if [ [ -r " ${ saveLocation } " ] ] ; then
echo -e " ${ CROSS } List download failed: ${ COL_LIGHT_GREEN } using previously cached list ${ COL_NC } "
2020-02-21 19:56:48 +01:00
# Add domains to database table file
2023-11-22 21:06:09 +01:00
pihole-FTL " ${ gravity_type } " parseList " ${ saveLocation } " " ${ gravityTEMPfile } " " ${ adlistID } "
2020-12-27 19:14:52 +01:00
database_adlist_status " ${ adlistID } " "3"
2017-07-27 04:34:35 +02:00
else
echo -e " ${ CROSS } List download failed: ${ COL_LIGHT_RED } no cached list available ${ COL_NC } "
2020-12-29 20:35:48 +01:00
# Manually reset these two numbers because we do not call parseList here
2023-05-15 19:25:56 +02:00
database_adlist_number " ${ adlistID } " 0 0
2020-12-27 19:14:52 +01:00
database_adlist_status " ${ adlistID } " "4"
2017-07-27 04:34:35 +02:00
fi
fi
}
2017-09-15 14:39:17 +02:00
# Parse source files into domains format
2017-07-27 04:34:35 +02:00
gravity_ParseFileIntoDomains( ) {
2022-10-16 20:54:24 +02:00
local src = " ${ 1 } " destination = " ${ 2 } "
# Remove comments and print only the domain name
# Most of the lists downloaded are already in hosts file format but the spacing/formatting is not contiguous
# This helps with that and makes it easier to read
# It also helps with debugging so each stage of the script can be researched more in depth
2023-02-19 18:47:10 +01:00
# 1) Convert all characters to lowercase
2024-02-09 20:22:53 +01:00
tr '[:upper:]' '[:lower:]' <" ${ src } " >" ${ destination } "
2023-02-19 18:47:10 +01:00
# 2) Remove carriage returns
2023-04-07 10:14:59 +02:00
# 3) Remove lines starting with ! (ABP Comments)
# 4) Remove lines starting with [ (ABP Header)
2023-04-08 08:01:46 +02:00
# 5) Remove lines containing ABP extended CSS selectors ("##", "#!#", "#@#", "#?#") preceded by a letter
2023-04-07 10:14:59 +02:00
# 6) Remove comments (text starting with "#", include possible spaces before the hash sign)
2023-04-07 10:25:25 +02:00
# 7) Remove leading tabs, spaces, etc. (Also removes leading IP addresses)
# 8) Remove empty lines
2023-04-07 09:44:31 +02:00
2024-02-09 20:22:53 +01:00
sed -i -r \
2023-04-07 09:44:31 +02:00
-e 's/\r$//' \
-e 's/\s*!.*//g' \
-e 's/\s*\[.*//g' \
2023-04-08 08:01:46 +02:00
-e '/[a-z]\#[$?@]{0,1}\#/d' \
2023-04-07 10:14:59 +02:00
-e 's/\s*#.*//g' \
2023-04-07 09:44:31 +02:00
-e 's/^.*\s+//g' \
-e '/^$/d' " ${ destination } "
2023-02-19 18:47:10 +01:00
2022-10-16 20:54:24 +02:00
chmod 644 " ${ destination } "
2015-11-23 09:36:01 +01:00
}
2015-11-23 08:49:38 +01:00
2019-04-25 11:18:54 +02:00
# Report number of entries in a table
gravity_Table_Count( ) {
local table = " ${ 1 } "
local str = " ${ 2 } "
2017-12-16 14:55:52 +01:00
local num
2024-02-11 21:07:15 +01:00
num = " $( pihole-FTL sqlite3 -ni " ${ gravityTEMPfile } " " SELECT COUNT(*) FROM ${ table } ; " ) "
2023-05-15 19:25:56 +02:00
if [ [ " ${ table } " = = "gravity" ] ] ; then
2019-12-04 22:02:46 +01:00
local unique
2024-02-11 21:07:15 +01:00
unique = " $( pihole-FTL sqlite3 -ni " ${ gravityTEMPfile } " " SELECT COUNT(*) FROM (SELECT DISTINCT domain FROM ${ table } ); " ) "
2020-04-21 09:10:21 +02:00
echo -e " ${ INFO } Number of ${ str } : ${ num } ( ${ COL_BOLD } ${ unique } unique domains ${ COL_NC } ) "
2024-02-11 21:07:15 +01:00
pihole-FTL sqlite3 -ni " ${ gravityTEMPfile } " " INSERT OR REPLACE INTO info (property,value) VALUES ('gravity_count', ${ unique } ); "
2019-12-04 22:02:46 +01:00
else
echo -e " ${ INFO } Number of ${ str } : ${ num } "
fi
2019-04-25 11:18:54 +02:00
}
2017-06-21 13:49:05 +02:00
2019-04-25 11:18:54 +02:00
# Output count of blacklisted domains and regex filters
gravity_ShowCount( ) {
2023-05-15 19:25:56 +02:00
# Here we use the table "gravity" instead of the view "vw_gravity" for speed.
# It's safe to replace it here, because right after a gravity run both will show the exactly same number of domains.
gravity_Table_Count "gravity" "gravity domains" ""
2023-10-04 17:20:38 +02:00
gravity_Table_Count "vw_blacklist" "exact denied domains"
gravity_Table_Count "vw_regex_blacklist" "regex denied filters"
gravity_Table_Count "vw_whitelist" "exact allowed domains"
gravity_Table_Count "vw_regex_whitelist" "regex allowed filters"
2015-11-23 12:11:16 +01:00
}
2015-11-27 00:48:52 +01:00
2017-07-24 13:24:34 +02:00
# Trap Ctrl-C
gravity_Trap( ) {
trap '{ echo -e "\\n\\n ${INFO} ${COL_LIGHT_RED}User-abort detected${COL_NC}"; gravity_Cleanup "error"; }' INT
2015-12-26 19:37:51 +01:00
}
2015-12-05 04:41:37 +01:00
2017-09-15 14:39:17 +02:00
# Clean up after Gravity upon exit or cancellation
2017-07-24 13:24:34 +02:00
gravity_Cleanup( ) {
local error = " ${ 1 :- } "
2017-07-27 04:34:35 +02:00
str = "Cleaning up stray matter"
2017-07-24 13:24:34 +02:00
echo -ne " ${ INFO } ${ str } ... "
2017-06-21 13:49:05 +02:00
2017-09-15 14:39:17 +02:00
# Delete tmp content generated by Gravity
2024-02-09 20:22:53 +01:00
rm ${ piholeDir } /pihole.*.txt 2>/dev/null
rm ${ piholeDir } /*.tmp 2>/dev/null
2023-03-25 00:15:49 +01:00
# listCurlBuffer location
2024-02-09 20:22:53 +01:00
rm " ${ GRAVITY_TMPDIR } " /*.phgpb 2>/dev/null
2023-03-25 00:15:49 +01:00
# invalid_domains location
2024-02-09 20:22:53 +01:00
rm " ${ GRAVITY_TMPDIR } " /*.ph-non-domains 2>/dev/null
2017-07-24 13:24:34 +02:00
2018-01-14 21:38:39 +01:00
# Ensure this function only runs when gravity_SetDownloadOptions() has completed
2017-09-23 02:32:56 +02:00
if [ [ " ${ gravity_Blackbody :- } " = = true ] ] ; then
2017-09-22 06:17:56 +02:00
# Remove any unused .domains files
2019-12-09 22:35:54 +01:00
for file in " ${ piholeDir } " /*." ${ domainsExtension } " ; do
2017-09-22 06:17:56 +02:00
# If list is not in active array, then remove it
if [ [ ! " ${ activeDomains [*] } " = = *" ${ file } " * ] ] ; then
2024-02-09 20:22:53 +01:00
rm -f " ${ file } " 2>/dev/null ||
2017-09-22 06:17:56 +02:00
echo -e " ${ CROSS } Failed to remove ${ file ##*/ } "
fi
done
fi
2017-06-21 13:49:05 +02:00
2017-07-24 13:24:34 +02:00
echo -e " ${ OVER } ${ TICK } ${ str } "
2023-02-12 18:39:37 +01:00
# # Only restart DNS service if offline
# if ! pgrep pihole-FTL &> /dev/null; then
# "${PIHOLE_COMMAND}" restartdns
# dnsWasOffline=true
# fi
2017-07-24 13:24:34 +02:00
2020-03-09 00:53:14 +01:00
# Print Pi-hole status if an error occurred
2017-08-28 03:36:02 +02:00
if [ [ -n " ${ error } " ] ] ; then
2017-07-24 13:24:34 +02:00
" ${ PIHOLE_COMMAND } " status
exit 1
fi
2015-11-06 03:11:34 +01:00
}
2015-08-23 06:44:41 +02:00
2021-08-20 20:48:57 +02:00
database_recovery( ) {
2021-12-21 14:00:46 +01:00
local result
2022-05-29 09:51:33 +02:00
local str = "Checking integrity of existing gravity database (this can take a while)"
2021-12-21 16:20:02 +01:00
local option = " ${ 1 } "
2021-08-20 20:48:57 +02:00
echo -ne " ${ INFO } ${ str } ... "
2023-12-09 23:06:50 +01:00
result = " $( pihole-FTL sqlite3 -ni " ${ gravityDBfile } " "PRAGMA integrity_check" 2>& 1) "
2022-05-29 09:51:33 +02:00
if [ [ ${ result } = "ok" ] ] ; then
2021-08-20 20:48:57 +02:00
echo -e " ${ OVER } ${ TICK } ${ str } - no errors found "
2022-05-29 09:51:33 +02:00
str = "Checking foreign keys of existing gravity database (this can take a while)"
2021-09-11 21:54:42 +02:00
echo -ne " ${ INFO } ${ str } ... "
2022-05-29 09:51:33 +02:00
unset result
2023-12-09 23:06:50 +01:00
result = " $( pihole-FTL sqlite3 -ni " ${ gravityDBfile } " "PRAGMA foreign_key_check" 2>& 1) "
2022-05-29 09:51:33 +02:00
if [ [ -z ${ result } ] ] ; then
2021-09-11 21:54:42 +02:00
echo -e " ${ OVER } ${ TICK } ${ str } - no errors found "
2021-12-21 16:20:02 +01:00
if [ [ " ${ option } " != "force" ] ] ; then
2021-09-11 21:56:44 +02:00
return
fi
2021-09-11 21:54:42 +02:00
else
echo -e " ${ OVER } ${ CROSS } ${ str } - errors found: "
2024-02-09 20:22:53 +01:00
while IFS = read -r line; do echo " - $line " ; done <<< " $result "
2021-09-11 21:54:42 +02:00
fi
2021-08-20 20:48:57 +02:00
else
echo -e " ${ OVER } ${ CROSS } ${ str } - errors found: "
2024-02-09 20:22:53 +01:00
while IFS = read -r line; do echo " - $line " ; done <<< " $result "
2021-08-20 20:48:57 +02:00
fi
str = "Trying to recover existing gravity database"
echo -ne " ${ INFO } ${ str } ... "
# We have to remove any possibly existing recovery database or this will fail
2024-02-09 20:22:53 +01:00
rm -f " ${ gravityDBfile } .recovered " >/dev/null 2>& 1
2023-12-09 23:06:50 +01:00
if result = " $( pihole-FTL sqlite3 -ni " ${ gravityDBfile } " ".recover" | pihole-FTL sqlite3 -ni " ${ gravityDBfile } .recovered " 2>& 1) " ; then
2021-08-20 20:48:57 +02:00
echo -e " ${ OVER } ${ TICK } ${ str } - success "
mv " ${ gravityDBfile } " " ${ gravityDBfile } .old "
mv " ${ gravityDBfile } .recovered " " ${ gravityDBfile } "
2021-12-22 19:52:08 +01:00
echo -ne " ${ INFO } ${ gravityDBfile } has been recovered "
echo -ne " ${ INFO } The old ${ gravityDBfile } has been moved to ${ gravityDBfile } .old "
2021-08-20 20:48:57 +02:00
else
echo -e " ${ OVER } ${ CROSS } ${ str } - the following errors happened: "
2024-02-09 20:22:53 +01:00
while IFS = read -r line; do echo " - $line " ; done <<< " $result "
2021-08-20 20:48:57 +02:00
echo -e " ${ CROSS } Recovery failed. Try \"pihole -r recreate\" instead. "
exit 1
fi
echo ""
}
2017-07-27 04:34:35 +02:00
helpFunc( ) {
echo " Usage: pihole -g
Update domains from blocklists specified in adlists.list
Options:
-f, --force Force the download of all specified blocklists
-h, --help Show this help dialog"
exit 0
}
2021-08-20 20:48:57 +02:00
repairSelector( ) {
case " $1 " in
2024-02-09 20:22:53 +01:00
"recover" ) recover_database = true ; ;
"recreate" ) recreate_database = true ; ;
*)
echo " Usage: pihole -g -r {recover,recreate}
2021-08-20 20:48:57 +02:00
Attempt to repair gravity database
Available options:
2021-12-22 19:53:52 +01:00
pihole -g -r recover Try to recover a damaged gravity database file.
Pi-hole tries to restore as much as possible
from a corrupted gravity database.
pihole -g -r recover force Pi-hole will run the recovery process even when
no damage is detected. This option is meant to be
a last resort. Recovery is a fragile task
consuming a lot of resources and shouldn' t be
performed unnecessarily.
pihole -g -r recreate Create a new gravity database file from scratch.
This will remove your existing gravity database
and create a new file from scratch. If you still
have the migration backup created when migrating
to Pi-hole v5.0, Pi-hole will import these files."
2024-02-09 20:22:53 +01:00
exit 0
; ;
2021-08-20 20:48:57 +02:00
esac
}
2016-10-22 08:02:45 +02:00
for var in " $@ " ; do
2017-06-21 13:49:05 +02:00
case " ${ var } " in
2024-05-08 22:25:26 +02:00
"-f" | "--force" ) forceDelete = true ; ;
"-r" | "--repair" ) repairSelector " $3 " ; ;
"-u" | "--upgrade" )
upgrade_gravityDB " ${ gravityDBfile } " " ${ piholeDir } "
exit 0
; ;
"-h" | "--help" ) helpFunc ; ;
2017-06-21 13:49:05 +02:00
esac
2016-08-17 20:08:55 +02:00
done
2021-06-14 20:27:10 +02:00
# Remove OLD (backup) gravity file, if it exists
if [ [ -f " ${ gravityOLDfile } " ] ] ; then
rm " ${ gravityOLDfile } "
fi
2017-09-15 14:39:17 +02:00
# Trap Ctrl-C
2017-07-24 13:24:34 +02:00
gravity_Trap
2019-11-26 10:58:39 +01:00
if [ [ " ${ recreate_database :- } " = = true ] ] ; then
2021-08-20 20:48:57 +02:00
str = "Recreating gravity database from migration backup"
2019-11-26 10:58:39 +01:00
echo -ne " ${ INFO } ${ str } ... "
rm " ${ gravityDBfile } "
2024-02-09 20:22:53 +01:00
pushd " ${ piholeDir } " >/dev/null || exit
2019-11-26 10:58:39 +01:00
cp migration_backup/* .
2024-02-09 20:22:53 +01:00
popd >/dev/null || exit
2019-11-26 10:58:39 +01:00
echo -e " ${ OVER } ${ TICK } ${ str } "
fi
2021-08-20 20:48:57 +02:00
if [ [ " ${ recover_database :- } " = = true ] ] ; then
2021-09-11 21:56:44 +02:00
database_recovery " $4 "
2021-08-20 20:48:57 +02:00
fi
2019-04-24 19:55:05 +02:00
# Move possibly existing legacy files to the gravity database
2021-12-20 20:36:55 +01:00
if ! migrate_to_database; then
echo -e " ${ CROSS } Unable to migrate to database. Please contact support. "
exit 1
fi
2019-04-24 19:55:05 +02:00
2017-09-15 14:39:17 +02:00
if [ [ " ${ forceDelete :- } " = = true ] ] ; then
2017-09-18 09:36:03 +02:00
str = "Deleting existing list cache"
2017-06-21 13:49:05 +02:00
echo -ne " ${ INFO } ${ str } ... "
2024-02-09 20:22:53 +01:00
rm /etc/pihole/list.* 2>/dev/null || true
2017-09-18 09:36:03 +02:00
echo -e " ${ OVER } ${ TICK } ${ str } "
2016-08-17 20:08:55 +02:00
fi
2019-04-25 11:18:54 +02:00
# Gravity downloads blocklists next
2021-12-20 20:36:55 +01:00
if ! gravity_CheckDNSResolutionAvailable; then
2021-12-20 20:09:11 +01:00
echo -e " ${ CROSS } Can not complete gravity update, no DNS is available. Please contact support. "
exit 1
fi
2023-03-25 00:15:49 +01:00
if ! gravity_DownloadBlocklists; then
echo -e " ${ CROSS } Unable to create gravity database. Please try again later. If the problem persists, please contact support. "
exit 1
fi
2017-07-24 13:24:34 +02:00
2020-02-12 19:26:25 +01:00
# Update gravity timestamp
update_gravity_timestamp
2020-01-24 18:39:13 +01:00
# Ensure proper permissions are set for the database
2023-10-22 08:14:11 +02:00
chown pihole:pihole " ${ gravityTEMPfile } "
chmod g+w " ${ piholeDir } " " ${ gravityTEMPfile } "
# Build the tree
gravity_build_tree
2017-09-14 12:23:49 +02:00
2023-10-22 08:14:11 +02:00
# Compute numbers to be displayed (do this after building the tree to get the
# numbers quickly from the tree instead of having to scan the whole database)
2020-02-17 21:07:48 +01:00
gravity_ShowCount
2023-10-22 08:14:11 +02:00
# Migrate rest of the data from old to new database
# IMPORTANT: Swapping the databases must be the last step before the cleanup
if ! gravity_swap_databases; then
echo -e " ${ CROSS } Unable to create database. Please contact support. "
exit 1
fi
2020-01-24 18:39:13 +01:00
gravity_Cleanup
echo ""
2023-10-16 22:19:44 +02:00
echo " ${ TICK } Done. "
2023-02-12 18:39:37 +01:00
# "${PIHOLE_COMMAND}" status