mirror of
https://github.com/pi-hole/pi-hole.git
synced 2025-12-12 20:35:52 +01:00
Pi-hole Core v6.3 (#6480)
This commit is contained in:
8
.github/workflows/codeql-analysis.yml
vendored
8
.github/workflows/codeql-analysis.yml
vendored
@@ -25,16 +25,16 @@ jobs:
|
||||
steps:
|
||||
-
|
||||
name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
-
|
||||
name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@4e94bd11f71e507f7f87df81788dff88d1dacbfb #v4.31.0
|
||||
uses: github/codeql-action/init@e12f0178983d466f2f6028f5cc7a6d786fd97f4b #v4.31.4
|
||||
with:
|
||||
languages: 'python'
|
||||
-
|
||||
name: Autobuild
|
||||
uses: github/codeql-action/autobuild@4e94bd11f71e507f7f87df81788dff88d1dacbfb #v4.31.0
|
||||
uses: github/codeql-action/autobuild@e12f0178983d466f2f6028f5cc7a6d786fd97f4b #v4.31.4
|
||||
-
|
||||
name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@4e94bd11f71e507f7f87df81788dff88d1dacbfb #v4.31.0
|
||||
uses: github/codeql-action/analyze@e12f0178983d466f2f6028f5cc7a6d786fd97f4b #v4.31.4
|
||||
|
||||
2
.github/workflows/stale.yml
vendored
2
.github/workflows/stale.yml
vendored
@@ -40,7 +40,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
|
||||
- name: Remove 'stale' label
|
||||
run: gh issue edit ${{ github.event.issue.number }} --remove-label ${{ env.stale_label }}
|
||||
env:
|
||||
|
||||
2
.github/workflows/sync-back-to-dev.yml
vendored
2
.github/workflows/sync-back-to-dev.yml
vendored
@@ -33,7 +33,7 @@ jobs:
|
||||
name: Syncing branches
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
|
||||
- name: Opening pull request
|
||||
run: gh pr create -B development -H master --title 'Sync master back into development' --body 'Created by Github action' --label 'internal'
|
||||
env:
|
||||
|
||||
9
.github/workflows/test.yml
vendored
9
.github/workflows/test.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
|
||||
with:
|
||||
fetch-depth: 0 # Differential ShellCheck requires full git history
|
||||
|
||||
@@ -38,7 +38,7 @@ jobs:
|
||||
|
||||
|
||||
- name: Spell-Checking
|
||||
uses: codespell-project/actions-codespell@406322ec52dd7b488e48c1c4b82e2a8b3a1bf630 #v2.1
|
||||
uses: codespell-project/actions-codespell@8f01853be192eb0f849a5c7d721450e7a467c579 #v2.2
|
||||
with:
|
||||
ignore_words_file: .codespellignore
|
||||
|
||||
@@ -49,7 +49,7 @@ jobs:
|
||||
run: editorconfig-checker
|
||||
|
||||
- name: Check python code formatting with black
|
||||
uses: psf/black@af0ba72a73598c76189d6dd1b21d8532255d5942 #25.9.0
|
||||
uses: psf/black@05f0a8ce1f71fbb36e1e032d3b518c7b945089a2 #25.11.0
|
||||
with:
|
||||
src: "./test"
|
||||
options: "--check --diff --color"
|
||||
@@ -74,6 +74,7 @@ jobs:
|
||||
fedora_40,
|
||||
fedora_41,
|
||||
fedora_42,
|
||||
fedora_43,
|
||||
alpine_3_21,
|
||||
alpine_3_22,
|
||||
]
|
||||
@@ -81,7 +82,7 @@ jobs:
|
||||
DISTRO: ${{matrix.distro}}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c #v6.0.0
|
||||
|
||||
@@ -593,18 +593,21 @@ check_required_ports() {
|
||||
# Add port 53
|
||||
ports_configured+=("53")
|
||||
|
||||
local protocol_type port_number service_name
|
||||
# Now that we have the values stored,
|
||||
for i in "${!ports_in_use[@]}"; do
|
||||
# loop through them and assign some local variables
|
||||
local service_name
|
||||
service_name=$(echo "${ports_in_use[$i]}" | awk '{gsub(/users:\(\("/,"",$7);gsub(/".*/,"",$7);print $7}')
|
||||
local protocol_type
|
||||
protocol_type=$(echo "${ports_in_use[$i]}" | awk '{print $1}')
|
||||
local port_number
|
||||
port_number="$(echo "${ports_in_use[$i]}" | awk '{print $5}')" # | awk '{gsub(/^.*:/,"",$5);print $5}')
|
||||
read -r protocol_type port_number service_name <<< "$(
|
||||
awk '{
|
||||
p=$1; n=$5; s=$7
|
||||
gsub(/users:\(\("/,"",s)
|
||||
gsub(/".*/,"",s)
|
||||
print p, n, s
|
||||
}' <<< "${ports_in_use[$i]}"
|
||||
)"
|
||||
|
||||
# Check if the right services are using the right ports
|
||||
if [[ ${ports_configured[*]} =~ $(echo "${port_number}" | rev | cut -d: -f1 | rev) ]]; then
|
||||
if [[ ${ports_configured[*]} =~ ${port_number##*:} ]]; then
|
||||
compare_port_to_service_assigned "${ftl}" "${service_name}" "${protocol_type}:${port_number}"
|
||||
else
|
||||
# If it's not a default port that Pi-hole needs, just print it out for the user to see
|
||||
@@ -722,7 +725,7 @@ dig_at() {
|
||||
fi
|
||||
|
||||
# Check if Pi-hole can use itself to block a domain
|
||||
if local_dig="$(dig +tries=1 +time=2 -"${protocol}" "${random_url}" @"${local_address}" "${record_type}")"; then
|
||||
if local_dig="$(dig +tries=1 +time=2 -"${protocol}" "${random_url}" @"${local_address}" "${record_type}" -p "$(get_ftl_conf_value "dns.port")")"; then
|
||||
# If it can, show success
|
||||
if [[ "${local_dig}" == *"status: NOERROR"* ]]; then
|
||||
local_dig="NOERROR"
|
||||
@@ -816,42 +819,27 @@ ftl_full_status(){
|
||||
|
||||
make_array_from_file() {
|
||||
local filename="${1}"
|
||||
|
||||
# If the file is a directory do nothing since it cannot be parsed
|
||||
[[ -d "${filename}" ]] && return
|
||||
|
||||
# The second argument can put a limit on how many line should be read from the file
|
||||
# Since some of the files are so large, this is helpful to limit the output
|
||||
local limit=${2}
|
||||
# A local iterator for testing if we are at the limit above
|
||||
local i=0
|
||||
# If the file is a directory
|
||||
if [[ -d "${filename}" ]]; then
|
||||
# do nothing since it cannot be parsed
|
||||
:
|
||||
else
|
||||
# Otherwise, read the file line by line
|
||||
while IFS= read -r line;do
|
||||
# Otherwise, strip out comments and blank lines
|
||||
new_line=$(echo "${line}" | sed -e 's/^\s*#.*$//' -e '/^$/d')
|
||||
# If the line still has content (a non-zero value)
|
||||
if [[ -n "${new_line}" ]]; then
|
||||
|
||||
# If the string contains "### CHANGED", highlight this part in red
|
||||
if [[ "${new_line}" == *"### CHANGED"* ]]; then
|
||||
new_line="${new_line//### CHANGED/${COL_RED}### CHANGED${COL_NC}}"
|
||||
fi
|
||||
# Process the file, strip out comments and blank lines
|
||||
local processed
|
||||
processed=$(sed -e 's/^\s*#.*$//' -e '/^$/d' "${filename}")
|
||||
|
||||
# Finally, write this line to the log
|
||||
log_write " ${new_line}"
|
||||
fi
|
||||
# Increment the iterator +1
|
||||
i=$((i+1))
|
||||
# but if the limit of lines we want to see is exceeded
|
||||
if [[ -z ${limit} ]]; then
|
||||
# do nothing
|
||||
:
|
||||
elif [[ $i -eq ${limit} ]]; then
|
||||
break
|
||||
fi
|
||||
done < "${filename}"
|
||||
fi
|
||||
while IFS= read -r line; do
|
||||
# If the string contains "### CHANGED", highlight this part in red
|
||||
log_write " ${line//### CHANGED/${COL_RED}### CHANGED${COL_NC}}"
|
||||
((i++))
|
||||
# if the limit of lines we want to see is exceeded do nothing
|
||||
[[ -n ${limit} && $i -eq ${limit} ]] && break
|
||||
done <<< "$processed"
|
||||
}
|
||||
|
||||
parse_file() {
|
||||
@@ -924,38 +912,38 @@ list_files_in_dir() {
|
||||
fi
|
||||
|
||||
# Store the files found in an array
|
||||
mapfile -t files_found < <(ls "${dir_to_parse}")
|
||||
local files_found=("${dir_to_parse}"/*)
|
||||
# For each file in the array,
|
||||
for each_file in "${files_found[@]}"; do
|
||||
if [[ -d "${dir_to_parse}/${each_file}" ]]; then
|
||||
if [[ -d "${each_file}" ]]; then
|
||||
# If it's a directory, do nothing
|
||||
:
|
||||
elif [[ "${dir_to_parse}/${each_file}" == "${PIHOLE_DEBUG_LOG}" ]] || \
|
||||
[[ "${dir_to_parse}/${each_file}" == "${PIHOLE_RAW_BLOCKLIST_FILES}" ]] || \
|
||||
[[ "${dir_to_parse}/${each_file}" == "${PIHOLE_INSTALL_LOG_FILE}" ]] || \
|
||||
[[ "${dir_to_parse}/${each_file}" == "${PIHOLE_LOG}" ]] || \
|
||||
[[ "${dir_to_parse}/${each_file}" == "${PIHOLE_LOG_GZIPS}" ]]; then
|
||||
elif [[ "${each_file}" == "${PIHOLE_DEBUG_LOG}" ]] || \
|
||||
[[ "${each_file}" == "${PIHOLE_RAW_BLOCKLIST_FILES}" ]] || \
|
||||
[[ "${each_file}" == "${PIHOLE_INSTALL_LOG_FILE}" ]] || \
|
||||
[[ "${each_file}" == "${PIHOLE_LOG}" ]] || \
|
||||
[[ "${each_file}" == "${PIHOLE_LOG_GZIPS}" ]]; then
|
||||
:
|
||||
elif [[ "${dir_to_parse}" == "${DNSMASQ_D_DIRECTORY}" ]]; then
|
||||
# in case of the dnsmasq directory include all files in the debug output
|
||||
log_write "\\n${COL_GREEN}$(ls -lhd "${dir_to_parse}"/"${each_file}")${COL_NC}"
|
||||
make_array_from_file "${dir_to_parse}/${each_file}"
|
||||
log_write "\\n${COL_GREEN}$(ls -lhd "${each_file}")${COL_NC}"
|
||||
make_array_from_file "${each_file}"
|
||||
else
|
||||
# Then, parse the file's content into an array so each line can be analyzed if need be
|
||||
for i in "${!REQUIRED_FILES[@]}"; do
|
||||
if [[ "${dir_to_parse}/${each_file}" == "${REQUIRED_FILES[$i]}" ]]; then
|
||||
if [[ "${each_file}" == "${REQUIRED_FILES[$i]}" ]]; then
|
||||
# display the filename
|
||||
log_write "\\n${COL_GREEN}$(ls -lhd "${dir_to_parse}"/"${each_file}")${COL_NC}"
|
||||
log_write "\\n${COL_GREEN}$(ls -lhd "${each_file}")${COL_NC}"
|
||||
# Check if the file we want to view has a limit (because sometimes we just need a little bit of info from the file, not the entire thing)
|
||||
case "${dir_to_parse}/${each_file}" in
|
||||
case "${each_file}" in
|
||||
# If it's Web server log, give the first and last 25 lines
|
||||
"${PIHOLE_WEBSERVER_LOG}") head_tail_log "${dir_to_parse}/${each_file}" 25
|
||||
"${PIHOLE_WEBSERVER_LOG}") head_tail_log "${each_file}" 25
|
||||
;;
|
||||
# Same for the FTL log
|
||||
"${PIHOLE_FTL_LOG}") head_tail_log "${dir_to_parse}/${each_file}" 35
|
||||
"${PIHOLE_FTL_LOG}") head_tail_log "${each_file}" 35
|
||||
;;
|
||||
# parse the file into an array in case we ever need to analyze it line-by-line
|
||||
*) make_array_from_file "${dir_to_parse}/${each_file}";
|
||||
*) make_array_from_file "${each_file}";
|
||||
esac
|
||||
else
|
||||
# Otherwise, do nothing since it's not a file needed for Pi-hole so we don't care about it
|
||||
@@ -991,6 +979,7 @@ head_tail_log() {
|
||||
local filename="${1}"
|
||||
# The number of lines to use for head and tail
|
||||
local qty="${2}"
|
||||
local filebasename="${filename##*/}"
|
||||
local head_line
|
||||
local tail_line
|
||||
# Put the current Internal Field Separator into another variable so it can be restored later
|
||||
@@ -999,14 +988,14 @@ head_tail_log() {
|
||||
IFS=$'\r\n'
|
||||
local log_head=()
|
||||
mapfile -t log_head < <(head -n "${qty}" "${filename}")
|
||||
log_write " ${COL_CYAN}-----head of $(basename "${filename}")------${COL_NC}"
|
||||
log_write " ${COL_CYAN}-----head of ${filebasename}------${COL_NC}"
|
||||
for head_line in "${log_head[@]}"; do
|
||||
log_write " ${head_line}"
|
||||
done
|
||||
log_write ""
|
||||
local log_tail=()
|
||||
mapfile -t log_tail < <(tail -n "${qty}" "${filename}")
|
||||
log_write " ${COL_CYAN}-----tail of $(basename "${filename}")------${COL_NC}"
|
||||
log_write " ${COL_CYAN}-----tail of ${filebasename}------${COL_NC}"
|
||||
for tail_line in "${log_tail[@]}"; do
|
||||
log_write " ${tail_line}"
|
||||
done
|
||||
@@ -1080,15 +1069,15 @@ check_dhcp_servers() {
|
||||
}
|
||||
|
||||
show_groups() {
|
||||
show_db_entries "Groups" "SELECT id,CASE enabled WHEN '0' THEN ' 0' WHEN '1' THEN ' 1' ELSE enabled END enabled,name,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,description FROM \"group\"" "4 7 50 19 19 50"
|
||||
show_db_entries "Groups" "SELECT id,CASE enabled WHEN '0' THEN ' no' WHEN '1' THEN ' yes' ELSE enabled END enabled,name,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,description FROM \"group\"" "4 7 50 19 19 50"
|
||||
}
|
||||
|
||||
show_adlists() {
|
||||
show_db_entries "Adlists" "SELECT id,CASE enabled WHEN '0' THEN ' 0' WHEN '1' THEN ' 1' ELSE enabled END enabled,GROUP_CONCAT(adlist_by_group.group_id) group_ids,address,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,comment FROM adlist LEFT JOIN adlist_by_group ON adlist.id = adlist_by_group.adlist_id GROUP BY id;" "5 7 12 100 19 19 50"
|
||||
show_db_entries "Adlists" "SELECT id,CASE enabled WHEN '0' THEN ' no' WHEN '1' THEN ' yes' ELSE enabled END enabled,GROUP_CONCAT(adlist_by_group.group_id) group_ids, CASE type WHEN '0' THEN 'Block' WHEN '1' THEN 'Allow' ELSE type END type, address,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,comment FROM adlist LEFT JOIN adlist_by_group ON adlist.id = adlist_by_group.adlist_id GROUP BY id;" "5 7 12 5 100 19 19 50"
|
||||
}
|
||||
|
||||
show_domainlist() {
|
||||
show_db_entries "Domainlist (0/1 = exact allow-/denylist, 2/3 = regex allow-/denylist)" "SELECT id,CASE type WHEN '0' THEN '0 ' WHEN '1' THEN ' 1 ' WHEN '2' THEN ' 2 ' WHEN '3' THEN ' 3' ELSE type END type,CASE enabled WHEN '0' THEN ' 0' WHEN '1' THEN ' 1' ELSE enabled END enabled,GROUP_CONCAT(domainlist_by_group.group_id) group_ids,domain,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,comment FROM domainlist LEFT JOIN domainlist_by_group ON domainlist.id = domainlist_by_group.domainlist_id GROUP BY id;" "5 4 7 12 100 19 19 50"
|
||||
show_db_entries "Domainlist" "SELECT id,CASE type WHEN '0' THEN 'exact-allow' WHEN '1' THEN 'exact-deny' WHEN '2' THEN 'regex-allow' WHEN '3' THEN 'regex-deny' ELSE type END type,CASE enabled WHEN '0' THEN ' no' WHEN '1' THEN ' yes' ELSE enabled END enabled,GROUP_CONCAT(domainlist_by_group.group_id) group_ids,domain,datetime(date_added,'unixepoch','localtime') date_added,datetime(date_modified,'unixepoch','localtime') date_modified,comment FROM domainlist LEFT JOIN domainlist_by_group ON domainlist.id = domainlist_by_group.domainlist_id GROUP BY id;" "5 11 7 12 100 19 19 50"
|
||||
}
|
||||
|
||||
show_clients() {
|
||||
|
||||
@@ -13,7 +13,7 @@ extra_started_commands="reload"
|
||||
|
||||
respawn_max=5
|
||||
respawn_period=60
|
||||
capabilities="^CAP_NET_BIND_SERVICE,CAP_NET_RAW,CAP_NET_ADMIN,CAP_SYS_NICE,CAP_IPC_LOCK,CAP_CHOWN,CAP_SYS_TIME"
|
||||
capabilities="^CAP_NET_BIND_SERVICE,^CAP_NET_RAW,^CAP_NET_ADMIN,^CAP_SYS_NICE,^CAP_IPC_LOCK,^CAP_CHOWN,^CAP_SYS_TIME"
|
||||
|
||||
depend() {
|
||||
want net
|
||||
|
||||
@@ -17,15 +17,15 @@ StartLimitIntervalSec=60s
|
||||
|
||||
[Service]
|
||||
User=pihole
|
||||
PermissionsStartOnly=true
|
||||
AmbientCapabilities=CAP_NET_BIND_SERVICE CAP_NET_RAW CAP_NET_ADMIN CAP_SYS_NICE CAP_IPC_LOCK CAP_CHOWN CAP_SYS_TIME
|
||||
|
||||
ExecStartPre=/opt/pihole/pihole-FTL-prestart.sh
|
||||
# Run prestart with elevated permissions
|
||||
ExecStartPre=+/opt/pihole/pihole-FTL-prestart.sh
|
||||
ExecStart=/usr/bin/pihole-FTL -f
|
||||
Restart=on-failure
|
||||
RestartSec=5s
|
||||
ExecReload=/bin/kill -HUP $MAINPID
|
||||
ExecStopPost=/opt/pihole/pihole-FTL-poststop.sh
|
||||
ExecStopPost=+/opt/pihole/pihole-FTL-poststop.sh
|
||||
|
||||
# Use graceful shutdown with a reasonable timeout
|
||||
TimeoutStopSec=60s
|
||||
|
||||
17
test/_fedora_43.Dockerfile
Normal file
17
test/_fedora_43.Dockerfile
Normal file
@@ -0,0 +1,17 @@
|
||||
FROM fedora:43
|
||||
RUN dnf install -y git initscripts
|
||||
|
||||
ENV GITDIR=/etc/.pihole
|
||||
ENV SCRIPTDIR=/opt/pihole
|
||||
|
||||
RUN mkdir -p $GITDIR $SCRIPTDIR /etc/pihole
|
||||
ADD . $GITDIR
|
||||
RUN cp $GITDIR/advanced/Scripts/*.sh $GITDIR/gravity.sh $GITDIR/pihole $GITDIR/automated\ install/*.sh $GITDIR/advanced/Scripts/COL_TABLE $SCRIPTDIR/
|
||||
ENV PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:$SCRIPTDIR
|
||||
|
||||
RUN true && \
|
||||
chmod +x $SCRIPTDIR/*
|
||||
|
||||
ENV SKIP_INSTALL=true
|
||||
|
||||
#sed '/# Start the installer/Q' /opt/pihole/basic-install.sh > /opt/pihole/stub_basic-install.sh && \
|
||||
@@ -1,5 +1,5 @@
|
||||
pyyaml == 6.0.3
|
||||
pytest == 8.4.2
|
||||
pytest == 9.0.1
|
||||
pytest-xdist == 3.8.0
|
||||
pytest-testinfra == 10.2.2
|
||||
tox == 4.32.0
|
||||
|
||||
10
test/tox.fedora_43.ini
Normal file
10
test/tox.fedora_43.ini
Normal file
@@ -0,0 +1,10 @@
|
||||
[tox]
|
||||
envlist = py3
|
||||
|
||||
[testenv]
|
||||
allowlist_externals = docker
|
||||
deps = -rrequirements.txt
|
||||
setenv =
|
||||
COLUMNS=120
|
||||
commands = docker buildx build --load --progress plain -f _fedora_43.Dockerfile -t pytest_pihole:test_container ../
|
||||
pytest {posargs:-vv -n auto} ./test_any_automated_install.py ./test_any_utils.py ./test_centos_fedora_common_support.py
|
||||
Reference in New Issue
Block a user