mirror of
https://github.com/pi-hole/pi-hole.git
synced 2024-11-15 02:42:58 +00:00
Remove fake user agent when downloading adlists (#5366)
This commit is contained in:
commit
4955c52af7
1 changed files with 4 additions and 7 deletions
11
gravity.sh
11
gravity.sh
|
@ -401,7 +401,7 @@ gravity_DownloadBlocklists() {
|
||||||
unset sources
|
unset sources
|
||||||
fi
|
fi
|
||||||
|
|
||||||
local url domain agent str target compression
|
local url domain str target compression
|
||||||
echo ""
|
echo ""
|
||||||
|
|
||||||
# Prepare new gravity database
|
# Prepare new gravity database
|
||||||
|
@ -457,9 +457,6 @@ gravity_DownloadBlocklists() {
|
||||||
saveLocation="${piholeDir}/list.${id}.${domain}.${domainsExtension}"
|
saveLocation="${piholeDir}/list.${id}.${domain}.${domainsExtension}"
|
||||||
activeDomains[$i]="${saveLocation}"
|
activeDomains[$i]="${saveLocation}"
|
||||||
|
|
||||||
# Default user-agent (for Cloudflare's Browser Integrity Check: https://support.cloudflare.com/hc/en-us/articles/200170086-What-does-the-Browser-Integrity-Check-do-)
|
|
||||||
agent="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.102 Safari/537.36"
|
|
||||||
|
|
||||||
echo -e " ${INFO} Target: ${url}"
|
echo -e " ${INFO} Target: ${url}"
|
||||||
local regex check_url
|
local regex check_url
|
||||||
# Check for characters NOT allowed in URLs
|
# Check for characters NOT allowed in URLs
|
||||||
|
@ -472,7 +469,7 @@ gravity_DownloadBlocklists() {
|
||||||
if [[ "${check_url}" =~ ${regex} ]]; then
|
if [[ "${check_url}" =~ ${regex} ]]; then
|
||||||
echo -e " ${CROSS} Invalid Target"
|
echo -e " ${CROSS} Invalid Target"
|
||||||
else
|
else
|
||||||
gravity_DownloadBlocklistFromUrl "${url}" "${agent}" "${sourceIDs[$i]}" "${saveLocation}" "${target}" "${compression}"
|
gravity_DownloadBlocklistFromUrl "${url}" "${sourceIDs[$i]}" "${saveLocation}" "${target}" "${compression}"
|
||||||
fi
|
fi
|
||||||
echo ""
|
echo ""
|
||||||
done
|
done
|
||||||
|
@ -504,7 +501,7 @@ compareLists() {
|
||||||
|
|
||||||
# Download specified URL and perform checks on HTTP status and file content
|
# Download specified URL and perform checks on HTTP status and file content
|
||||||
gravity_DownloadBlocklistFromUrl() {
|
gravity_DownloadBlocklistFromUrl() {
|
||||||
local url="${1}" agent="${2}" adlistID="${3}" saveLocation="${4}" target="${5}" compression="${6}"
|
local url="${1}" adlistID="${2}" saveLocation="${3}" target="${4}" compression="${5}"
|
||||||
local heisenbergCompensator="" listCurlBuffer str httpCode success="" ip cmd_ext
|
local heisenbergCompensator="" listCurlBuffer str httpCode success="" ip cmd_ext
|
||||||
|
|
||||||
# Create temp file to store content on disk instead of RAM
|
# Create temp file to store content on disk instead of RAM
|
||||||
|
@ -564,7 +561,7 @@ gravity_DownloadBlocklistFromUrl() {
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# shellcheck disable=SC2086
|
# shellcheck disable=SC2086
|
||||||
httpCode=$(curl --connect-timeout ${curl_connect_timeout} -s -L ${compression} ${cmd_ext} ${heisenbergCompensator} -w "%{http_code}" -A "${agent}" "${url}" -o "${listCurlBuffer}" 2> /dev/null)
|
httpCode=$(curl --connect-timeout ${curl_connect_timeout} -s -L ${compression} ${cmd_ext} ${heisenbergCompensator} -w "%{http_code}" "${url}" -o "${listCurlBuffer}" 2> /dev/null)
|
||||||
|
|
||||||
case $url in
|
case $url in
|
||||||
# Did we "download" a local file?
|
# Did we "download" a local file?
|
||||||
|
|
Loading…
Reference in a new issue