2
0
mirror of https://gitlab.isc.org/isc-projects/kea synced 2025-08-22 09:57:41 +00:00

[#3287] fix reported shellcheck warnings

This commit is contained in:
Andrei Pavel 2024-06-17 17:07:22 +03:00
parent bb2ce014c8
commit 3efbe09a45
No known key found for this signature in database
GPG Key ID: D4E804481939CB21
12 changed files with 343 additions and 334 deletions

View File

@ -16,16 +16,22 @@
# shellcheck disable=SC2154 # shellcheck disable=SC2154
# SC2154: ... is referenced but not assigned. # SC2154: ... is referenced but not assigned.
# Reason: some variables are taken from keactrl.conf # Reason: some variables are taken from keactrl.conf
#
# shellcheck disable=SC2317
# SC2317: Command appears to be unreachable. Check usage (or ignore if invoked indirectly).
# Reason: shellcheck is deceived by the 'if test "${HAVE_NETCONF}" = 'yes'' condition which it
# evaluates to always false and thinks the rest of the script is never executed.
# Exit with error if commands exit with non-zero and if undefined variables are # Exit with error if commands exit with non-zero and if undefined variables are
# used. # used.
set -eu set -eu
PACKAGE_VERSION="@PACKAGE_VERSION@" HAVE_NETCONF='@HAVE_NETCONF@'
EXTENDED_VERSION="@EXTENDED_VERSION@" EXTENDED_VERSION="@EXTENDED_VERSION@"
PACKAGE_VERSION="@PACKAGE_VERSION@"
# Set the have_netconf flag to know if netconf is available. # Set the have_netconf flag to know if netconf is available.
if test '@HAVE_NETCONF@' = 'yes'; then if test "${HAVE_NETCONF}" = 'yes'; then
have_netconf=true have_netconf=true
else else
have_netconf=false have_netconf=false
@ -145,9 +151,9 @@ check_running() {
# Get the PID from the PID file (if it exists) # Get the PID from the PID file (if it exists)
get_pid_from_file "${proc_name}" get_pid_from_file "${proc_name}"
if [ ${_pid} -gt 0 ]; then if [ "${_pid}" -gt 0 ]; then
# Use ps to check if PID is alive # Use ps to check if PID is alive
if ps -p ${_pid} 1>/dev/null; then if ps -p "${_pid}" 1>/dev/null; then
# No error, so PID IS ALIVE # No error, so PID IS ALIVE
_running=1 _running=1
fi fi

View File

@ -149,10 +149,17 @@ list_commands_test() {
fi fi
# Main test phase: send command, check response. # Main test phase: send command, check response.
# shellcheck disable=SC2086
# SC2086: Double quote to prevent globbing and word splitting.
# Reason: we spcifically want ${arguments} to split because there may be multiple words in it.
tmp="echo | ${shell_bin_path}/${shell_bin} --port 8443 \ tmp="echo | ${shell_bin_path}/${shell_bin} --port 8443 \
${arguments} > ${tmpfile_path}/shell-stdout.txt" ${arguments} > ${tmpfile_path}/shell-stdout.txt"
echo "Executing kea-shell ($tmp)" echo "Executing kea-shell ($tmp)"
# shellcheck disable=SC2086
# SC2086: Double quote to prevent globbing and word splitting.
# Reason: we spcifically want ${arguments} to split because there may be multiple words in it.
echo | ${shell_bin_path}/${shell_bin} --port 8443 \ echo | ${shell_bin_path}/${shell_bin} --port 8443 \
${arguments} > ${tmpfile_path}/shell-stdout.txt ${arguments} > ${tmpfile_path}/shell-stdout.txt
EXIT_CODE=$? EXIT_CODE=$?

View File

@ -1,4 +1,4 @@
// Copyright (C) 2021 Internet Systems Consortium, Inc. ("ISC") // Copyright (C) 2021-2024 Internet Systems Consortium, Inc. ("ISC")
// //
// This Source Code Form is subject to the terms of the Mozilla Public // This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this // License, v. 2.0. If a copy of the MPL was not distributed with this

View File

@ -425,7 +425,7 @@ set_logger() {
clean_exit 1 clean_exit 1
fi fi
printf 'Kea log will be stored in %s.\n' "${LOG_FILE}" printf 'Kea log will be stored in %s.\n' "${LOG_FILE}"
export KEA_LOGGER_DESTINATION=${LOG_FILE} export KEA_LOGGER_DESTINATION="${LOG_FILE}"
} }
# Checks if specified process is running. # Checks if specified process is running.

View File

@ -37,7 +37,7 @@ fi
# Add column only if it doesn't exist to work around the 1.9.4 leak of # Add column only if it doesn't exist to work around the 1.9.4 leak of
# cache_threshold and cache_max_age column alters in subnet and shared network # cache_threshold and cache_max_age column alters in subnet and shared network
# tables in schema version 9.5. # tables in schema version 9.5.
if ! mysql "${@}" -e 'SELECT cache_threshold FROM dhcp4_subnet LIMIT 1' &> /dev/null; then if ! mysql "${@}" -e 'SELECT cache_threshold FROM dhcp4_subnet LIMIT 1' > /dev/null 2>&1; then
mysql "${@}" <<EOF mysql "${@}" <<EOF
# Add new lease cache parameters. # Add new lease cache parameters.
ALTER TABLE dhcp4_subnet ALTER TABLE dhcp4_subnet

View File

@ -25,6 +25,9 @@ Options:
"$(basename "${0}")" "$(basename "${0}")"
} }
red='\033[91m'
reset='\033[0m'
# Parse parameters. # Parse parameters.
while test ${#} -gt 0; do while test ${#} -gt 0; do
case "${1}" in case "${1}" in
@ -44,7 +47,7 @@ done
# Get script path. # Get script path.
script_path=$(cd "$(dirname "${0}")" && pwd) script_path=$(cd "$(dirname "${0}")" && pwd)
pushd "${script_path}/.." cd "${script_path}/.."
# Get the last wednesday of the month. # Get the last wednesday of the month.
this_month=$(date +%Y-%m) this_month=$(date +%Y-%m)
@ -59,7 +62,7 @@ done
# - rename it to the new revision # - rename it to the new revision
# - change its name in Makefile.am # - change its name in Makefile.am
# - change its name in yang_revisions.h # - change its name in yang_revisions.h
ca=$(git merge-base origin/master $(git rev-parse --abbrev-ref HEAD)) ca=$(git merge-base origin/master "$(git rev-parse --abbrev-ref HEAD)")
for module in $(git diff "${ca}" --name-only . | grep -E '\.yang$'); do for module in $(git diff "${ca}" --name-only . | grep -E '\.yang$'); do
module=$(basename "${module}") module=$(basename "${module}")
new_module="$(printf '%s' "${module}" | sed "s/@.*\.yang/@${wednesday}.yang/g")" new_module="$(printf '%s' "${module}" | sed "s/@.*\.yang/@${wednesday}.yang/g")"

View File

@ -1,114 +1,123 @@
#!/bin/bash #!/bin/sh
# Usage: # Usage:
# check-for-json-errors-in-doc.sh [--all] [<file1>, <file2>, ...] # check-for-json-errors-in-doc.sh [--all] [<file1>, <file2>, ...]
set -eu
# Change directory to the root of the repository. # Change directory to the root of the repository.
script_path=$(cd "$(dirname "${0}")" && pwd) script_path=$(cd "$(dirname "${0}")" && pwd)
cd "${script_path}/.." cd "${script_path}/.." || exit 1
# Parse parameters. # Parse parameters.
if test ${#} -gt 0; then if test ${#} -gt 0; then
if test "${1}" = '--all'; then if test "${1}" = '--all'; then
files='doc src' files='doc src'
else else
files="${*}" files="${*}"
fi fi
else else
# By default, check only modified files. # By default, check only modified files.
files=$(git diff --name-only $(git merge-base origin/master HEAD)) files=$(git diff --name-only "$(git merge-base origin/master HEAD)")
# If there is nothing to check, exit early. Otherwise, it checks everything. # If there is nothing to check, exit early. Otherwise, it checks everything.
if test -z "${files}"; then if test -z "${files}"; then
exit 0 exit 0
fi fi
fi fi
exit_code=0 exit_code=0
work_file=$(mktemp)
# Get the files. # Get the files.
files=$(find $(echo $files) -type f \( -name '*.rst' -or -name '*.json' \) -and -not -path '*/_build/*' -and -not -path '*/man/*' | sort -uV) # shellcheck disable=SC2086
work_file=$(mktemp) # SC2086: Double quote to prevent globbing and word splitting.
for file in $(echo $files); do # Reason: There may be multiple files in ${files} so we explicitly want it expanded to not be treated as a single long file name.
json=0 files=$(find ${files} -type f \( -name '*.rst' -or -name '*.json' \) -and -not -path '*/_build/*' -and -not -path '*/man/*' | sort -uV)
comment=0 for file in ${files}; do
line_num=0 json=0
echo "processing: $file" comment=0
IFS= line_num=0
while read -r line; do echo "processing: $file"
line_num=$((line_num+1)) IFS=
if [ $comment -eq 0 -a $json -eq 0 -a $(echo "$line" | grep "^[A-Za-z]+\|^\s*\`" | wc -l) -eq 1 ]; then content=$(tr '\n' '\r' < "${file}" | sed -r 's/,[[:blank:]]*\r[[:blank:]]*\.\.\.//g' | sed -r 's/\\[[:blank:]]*\r[[:blank:]]*//g' | tr '\r' '\n')
# ignore line if it starts with 'A-Za-z' or spaces followed by '`' stop_at=$(echo "${content}" | wc -l)
continue while true; do
elif [ $comment -eq 0 -a $(echo "$line" | grep "/\*" | grep -v "\*/" | wc -l) -eq 1 ]; then line_num=$((line_num + 1))
# if the line contains /* and it does not contain */ on the same line if test "${line_num}" -gt "${stop_at}"; then
comment=1 break
echo "" >> $work_file fi
continue line=$(echo "${content}" | head -n "${line_num}" | tail -n 1)
elif [ $comment -eq 1 -a $(echo "$line" | grep "\*/" | wc -l) -eq 1 ]; then if [ $comment -eq 0 ] && [ $json -eq 0 ] && [ "$(echo "$line" | grep -c "^[A-Za-z]+\|^\s*\`")" -eq 1 ]; then
# if the line contains */ # ignore line if it starts with 'A-Za-z' or spaces followed by '`'
comment=0 continue
echo "" >> $work_file elif [ $comment -eq 0 ] && [ "$(echo "$line" | grep "/\*" | grep -cv "\*/")" -eq 1 ]; then
continue # if the line contains /* and it does not contain */ on the same line
elif [ $comment -eq 0 -a $json -eq 0 -a $(echo "$line" | grep "^\s*{\|^\s*\".*{\|^\s*\[\s*$" | grep -v "}" | wc -l) -eq 1 ]; then comment=1
# if this is not a comment and the line starts with spaces followed by '{' or by '"' followed by "{" echo >> "${work_file}"
json=1 continue
# ignore any map name before top level map elif [ $comment -eq 1 ] && [ "$(echo "$line" | grep -c "\*/")" -eq 1 ]; then
line=$(echo "$line" | sed 's/.*{/{/g') # if the line contains */
echo "" > $work_file comment=0
elif [ $comment -eq 0 -a $json -eq 1 -a $(echo "$line" | grep "^\s*[A-Za-z]\|^\s*\`" | wc -l) -eq 1 ]; then echo >> "${work_file}"
# if the line is not a comment and the line starts with spaces followed by 'A-Za-z' or followed by "`" and the parser is processing a json structure continue
json=0 elif [ $comment -eq 0 ] && [ $json -eq 0 ] && [ "$(echo "$line" | grep "^\s*{\|^\s*\".*{\|^\s*\[\s*$" | grep -cv "}")" -eq 1 ]; then
cat $work_file | jq . > /dev/null # if this is not a comment and the line starts with spaces followed by '{' or by '"' followed by "{"
if [ $? -ne 0 ]; then json=1
# if the jq tool returned error # ignore any map name before top level map
echo "file $file contains invalid JSON near line $line_num" line=$(echo "$line" | sed 's/.*{/{/g')
echo "===start of JSON block===" echo > "${work_file}"
cat $work_file elif [ $comment -eq 0 ] && [ $json -eq 1 ] && [ "$(echo "$line" | grep -c "^\s*[A-Za-z]\|^\s*\`")" -eq 1 ]; then
echo "====end of JSON block====" # if the line is not a comment and the line starts with spaces followed by 'A-Za-z' or followed by "`" and the parser is processing a json structure
exit_code=1 json=0
fi if ! jq . "${work_file}" > /dev/null; then
fi # if the jq tool returned error
if [ $comment -eq 0 -a $json -eq 1 ]; then echo "file $file contains invalid JSON near line $line_num"
if [ $(echo "$line" | grep "^\s*\.\.\s" | wc -l) -eq 1 ]; then echo "===start of JSON block==="
echo "" >> $work_file cat "${work_file}"
else echo "====end of JSON block===="
# if file is .json the following replace in line are done: exit_code=1
# 1. delete everything after '#' fi
# 2. delete everything after // fi
# 3. ignore <?include?> if [ $comment -eq 0 ] && [ $json -eq 1 ]; then
# 4. replace all '[ <DATA> ]' with '[ "<DATA>" ]' where DATA contains: '-' and 'A-Za-z0-9' and ' ' if [ "$(echo "$line" | grep -c "^\s*\.\.\s")" -eq 1 ]; then
# 5. replace all ' <DATA>:' with ' "<DATA>":' echo >> "${work_file}"
# 6. replace all ': <DATA>' with ': "<DATA>"' else
# 7. replace ' ...' with ' "placeholder": "value" # if file is .json the following replace in line are done:
# 8. replace ', ... ' with ' ' # 1. delete everything after '#'
# 9. replace ' <DATA>' with ' "placeholder": "value"' # 2. delete everything after //
# 10. replace ' <DATA>' with ' "placeholder"' # 3. ignore <?include?>
if [ $(echo "$file" | grep "\.json" | wc -l) -eq 0 ]; then # 4. replace all '[ <DATA> ]' with '[ "<DATA>" ]' where DATA contains: '-' and 'A-Za-z0-9' and ' '
echo "$line" | cut -d "#" -f 1 | sed 's/\/\/ .*//g' | sed 's/<?.*?>//g' | sed 's/\[ <\([-A-Za-z0-9 ]*\)> \]/\[ \"<\1>\" \]/g' | sed 's/ <\(.*\)>:/ \"<\1>\":/g' | sed 's/: <\(.*\)>/: \"<\1>\"/g' | sed 's/ \.\.\./ \"placeholder\": \"value\"/g' | sed 's/, \.\.\. / /g' | sed 's/ <\(.*\)>/ \"placeholder\": \"value\"/g' | sed 's/ <\(.*\)>/ \"placeholder\"/g' >> $work_file # 5. replace all ' <DATA>:' with ' "<DATA>":'
else # 6. replace all ': <DATA>' with ': "<DATA>"'
# if file is .rst the following replace in line are done: # 7. replace ' ...' with ' "placeholder": "value"
# 1. delete everything after '#' # 8. replace ', ... ' with ' '
# 2. delete everything after // # 9. replace ' <DATA>' with ' "placeholder": "value"'
# 3. ignore <?include?> # 10. replace ' <DATA>' with ' "placeholder"'
echo "$line" | cut -d "#" -f 1 | sed 's/\/\/ .*//g' | sed 's/<?.*?>//g' >> $work_file if [ "$(echo "$file" | grep -c "\.json")" -eq 0 ]; then
fi echo "$line" | cut -d "#" -f 1 | sed 's/\/\/ .*//g' | sed 's/<?.*?>//g' | sed 's/\[ <\([-A-Za-z0-9 ]*\)> \]/\[ \"<\1>\" \]/g' | sed 's/ <\(.*\)>:/ \"<\1>\":/g' | sed 's/: <\(.*\)>/: \"<\1>\"/g' | sed 's/ \.\.\./ \"placeholder\": \"value\"/g' | sed 's/, \.\.\. / /g' | sed 's/ <\(.*\)>/ \"placeholder\": \"value\"/g' | sed 's/ <\(.*\)>/ \"placeholder\"/g' >> "${work_file}"
fi else
fi # if file is .rst the following replace in line are done:
done <<< $(cat $file | tr '\n' '\r' | sed -r 's/,[[:blank:]]*\r[[:blank:]]*\.\.\.//g' | sed -r 's/\\[[:blank:]]*\r[[:blank:]]*//g' | tr '\r' '\n') # 1. delete everything after '#'
if [ $comment -eq 0 -a $json -eq 1 ]; then # 2. delete everything after //
# if the file ended but the parser is processing a json structure # 3. ignore <?include?>
cat $work_file | jq . > /dev/null echo "$line" | cut -d "#" -f 1 | sed 's/\/\/ .*//g' | sed 's/<?.*?>//g' >> "${work_file}"
if [ $? -ne 0 ]; then fi
# if the jq tool returned error fi
echo "file $file contains invalid JSON near line $line_num" fi
echo "===start of JSON block===" done
cat $work_file if [ $comment -eq 0 ] && [ $json -eq 1 ]; then
echo "====end of JSON block====" # if the file ended but the parser is processing a json structure
exit_code=1 if ! jq . "${work_file}" > /dev/null; then
fi # if the jq tool returned error
fi echo "file $file contains invalid JSON near line $line_num"
echo "===start of JSON block==="
cat "${work_file}"
echo "====end of JSON block===="
exit_code=1
fi
fi
done done
rm $work_file rm "${work_file}"
exit ${exit_code} exit ${exit_code}

View File

@ -1,48 +1,44 @@
#!/bin/bash #!/bin/sh
# extract folder name containing file set -eu
#
# param ${1} file name script_path=$(cd "$(dirname "${0}")" && pwd)
# return folder name top_srcdir=$(realpath "${script_path}/..")
extract_folder_name() {
# return name of the file until last '/'
echo "$(echo "${1}" | rev | cut -d '/' -f 2- | rev)"
}
# extract all includes found in source files found in the same folder as specified Makefile.am # extract all includes found in source files found in the same folder as specified Makefile.am
# #
# param ${1} path to a Makefile.am # param ${1} path to a Makefile.am
# return all dependencies libs in the order of compilation # return all dependencies libs in the order of compilation
extract_includes() { extract_includes() {
# extract folder name from current library Makefile.am # extract folder name from current library Makefile.am
CURRENT_FOLDER=$(extract_folder_name "${1}")"/" CURRENT_FOLDER=$(dirname "${1}")"/"
# select only files in current folder # select only files in current folder
SEARCH_FILES=$(echo "${FILE_LIST}" | grep "${CURRENT_FOLDER}") SEARCH_FILES=$(echo "${FILE_LIST}" | grep "${CURRENT_FOLDER}")
# select all lines containing '#include ' directive # select all lines containing '#include ' directive
RAW_INCLUDES_LIST=$(echo "${SEARCH_FILES}" | xargs grep "^#include " 2>/dev/null) RAW_INCLUDES_LIST=$(echo "${SEARCH_FILES}" | xargs grep "^#include " 2>/dev/null)
# filter only included dependencies found in other libraries by using the form 'other_lib_name/header_file.h' # filter only included dependencies found in other libraries by using the form 'other_lib_name/header_file.h'
# to do this it is required to select the string between '<' and '>', searching for '/' character and returning the name until last '/' # to do this it is required to select the string between '<' and '>', searching for '/' character and returning the name until last '/'
RAW_INCLUDES_LIST=$(echo "${RAW_INCLUDES_LIST}" | cut -d "#" -f 2 | tr "\"" " " | cut -d "<" -f 2 | cut -d ">" -f 1 | grep "\/" | rev | cut -d "/" -f 2 | rev | sort | uniq) RAW_INCLUDES_LIST=$(echo "${RAW_INCLUDES_LIST}" | cut -d "#" -f 2 | tr "\"" " " | cut -d "<" -f 2 | cut -d ">" -f 1 | grep "/" | rev | cut -d "/" -f 2 | rev | sort -u)
# filter includes that are not compiled by the project's Makefiles # filter includes that are not compiled by the project's Makefiles
INCLUDES_LIST= INCLUDES_LIST=
for i in ${LIBRARIES_LIST}; do for i in ${LIBRARIES_LIST}; do
for j in ${RAW_INCLUDES_LIST}; do for j in ${RAW_INCLUDES_LIST}; do
if test "${j}" = "${i}"; then if test "${j}" = "${i}"; then
INCLUDES_LIST="${i} ${INCLUDES_LIST}" INCLUDES_LIST="${i} ${INCLUDES_LIST}"
break break
fi fi
done done
done done
# remove empty spaces # remove empty spaces
INCLUDES_LIST=$(echo ${INCLUDES_LIST} | tr -s " ") INCLUDES_LIST=$(echo "${INCLUDES_LIST}" | tr -s " ")
# order dependencies in the order of compilation # order dependencies in the order of compilation
FILTERED_INCLUDES_LIST= FILTERED_INCLUDES_LIST=
for i in ${LIBRARIES_LIST}; do for i in ${LIBRARIES_LIST}; do
if test $(echo "${INCLUDES_LIST}" | grep "\b${i}\b" | wc -l) -ne 0; then if test "$(echo "${INCLUDES_LIST}" | grep -c "\b${i}\b")" -ne 0; then
FILTERED_INCLUDES_LIST="${i} ${FILTERED_INCLUDES_LIST}" FILTERED_INCLUDES_LIST="${i} ${FILTERED_INCLUDES_LIST}"
fi fi
done done
echo "${FILTERED_INCLUDES_LIST}" echo "${FILTERED_INCLUDES_LIST}"
} }
# extract all header only files and headers and source files found in the external library required by specified library # extract all header only files and headers and source files found in the external library required by specified library
@ -50,50 +46,50 @@ extract_includes() {
# param ${2} name of the external dependency library required by current library # param ${2} name of the external dependency library required by current library
# return the list of header only files as 'HEADERS: heaser1.h header2.h' and header and source files as 'HEADERS_AND_SOURCES: source1.h source1.cc source2.h source2.cpp' # return the list of header only files as 'HEADERS: heaser1.h header2.h' and header and source files as 'HEADERS_AND_SOURCES: source1.h source1.cc source2.h source2.cpp'
extract_non_include_files() { extract_non_include_files() {
# extract folder name for current library Makefile.am # extract folder name for current library Makefile.am
CURRENT_FOLDER=$(extract_folder_name "src/lib/${1}/Makefile.am")"/" CURRENT_FOLDER=$(dirname "src/lib/${1}/Makefile.am")"/"
# extract folder name for external dependency library Makefile.am # extract folder name for external dependency library Makefile.am
EXTERNAL_FOLDER=$(extract_folder_name "src/lib/${2}/Makefile.am")"/" EXTERNAL_FOLDER=$(dirname "src/lib/${2}/Makefile.am")"/"
# select only files in current folder # select only files in current folder
SEARCH_FILES=$(echo "${FILE_LIST}" | grep "${CURRENT_FOLDER}") SEARCH_FILES=$(echo "${FILE_LIST}" | grep "${CURRENT_FOLDER}")
HEADERS_LIST= HEADERS_LIST=
NON_HEADERS_LIST= NON_HEADERS_LIST=
# select all lines containing '#include ' directive # select all lines containing '#include ' directive
RAW_INCLUDES_LIST=$(echo "${SEARCH_FILES}" | xargs grep "^#include " 2>/dev/null) RAW_INCLUDES_LIST=$(echo "${SEARCH_FILES}" | xargs grep "^#include " 2>/dev/null)
# filter only included headers found in other libraries by using the form 'other_lib_name/header_file.h' # filter only included headers found in other libraries by using the form 'other_lib_name/header_file.h'
# to do this it is required to select the string between '<' and '>', searching for '/' character, search for the extension marker '.' and returning the name after last '/' # to do this it is required to select the string between '<' and '>', searching for '/' character, search for the extension marker '.' and returning the name after last '/'
RAW_INCLUDES_LIST=$(echo "${RAW_INCLUDES_LIST}" | cut -d "#" -f 2 | tr "\"" " " | cut -d "<" -f 2 | cut -d ">" -f 1 | grep "\/" | grep "\b${2}\b" | cut -d "/" -f 2 | grep "\." | sort | uniq) RAW_INCLUDES_LIST=$(echo "${RAW_INCLUDES_LIST}" | cut -d "#" -f 2 | tr "\"" " " | cut -d "<" -f 2 | cut -d ">" -f 1 | grep "/" | grep "\b${2}\b" | cut -d "/" -f 2 | grep "\." | sort -u)
# select only files in dependency library folder and strip full path # select only files in dependency library folder and strip full path
RELATIVE_SEARCH_FILES=$(echo "${FILE_LIST}" | grep "${EXTERNAL_FOLDER}" | sed -e "s#${REPO_FOLDER}${EXTERNAL_FOLDER}##g") RELATIVE_SEARCH_FILES=$(echo "${FILE_LIST}" | grep "${EXTERNAL_FOLDER}" | sed -e "s#${EXTERNAL_FOLDER}##g")
# search for the header file but also for source files # search for the header file but also for source files
for i in ${RAW_INCLUDES_LIST}; do for i in ${RAW_INCLUDES_LIST}; do
# filter by name only (no extension) # filter by name only (no extension)
FILTER=$(echo "${i}" | cut -d "." -f 1) FILTER=$(echo "${i}" | cut -d "." -f 1)
# filter non header files with exact name of the header file without the extension # filter non header files with exact name of the header file without the extension
NON_HEADER=$(echo "${RELATIVE_SEARCH_FILES}" | grep "\b${FILTER}\." | grep -v "${i}") NON_HEADER=$(echo "${RELATIVE_SEARCH_FILES}" | grep "\b${FILTER}\." | grep -v "${i}")
if test $(echo "${NON_HEADER}" | wc -w) -ne 0; then if test "$(echo "${NON_HEADER}" | wc -w)" -ne 0; then
# append header and source file names # append header and source file names
NON_HEADERS_LIST="${i} ${NON_HEADER} ${NON_HEADERS_LIST}" NON_HEADERS_LIST="${i} ${NON_HEADER} ${NON_HEADERS_LIST}"
else else
# append header only file name # append header only file name
HEADERS_LIST="${i} ${HEADERS_LIST}" HEADERS_LIST="${i} ${HEADERS_LIST}"
fi fi
done done
# sort header only files # sort header only files
HEADERS_LIST=$(echo ${HEADERS_LIST} | tr -s " " | sort | uniq) HEADERS_LIST=$(echo "${HEADERS_LIST}" | tr -s " " | sort -u)
# sort header and source files # sort header and source files
NON_HEADERS_LIST=$(echo ${NON_HEADERS_LIST} | tr -s " " | sort | uniq) NON_HEADERS_LIST=$(echo "${NON_HEADERS_LIST}" | tr -s " " | sort -u)
echo "HEADERS_AND_SOURCES:${NON_HEADERS_LIST}" echo "HEADERS_AND_SOURCES:${NON_HEADERS_LIST}"
echo "HEADERS:${HEADERS_LIST}" echo "HEADERS:${HEADERS_LIST}"
} }
# extract all valid dependencies of a specified library # extract all valid dependencies of a specified library
# #
# param ${1} list of all libraries in the reverse compilation order # param ${1} list of all libraries in the reverse compilation order
# param ${2} library name for which the dependency list is computed # param ${2} library name for which the dependency list is computed
# return the list of dependencies for specified library in the reverse compilation order # return the list of dependencies for specified library in the reverse compilation order
extract_dependencies() { extract_dependencies() {
echo "${1}" | grep -Eo "\b${2}\b.*$" echo "${1}" | grep -Eo "\b${2}\b.*$"
} }
# extract computed dependency for specified library # extract computed dependency for specified library
@ -102,29 +98,29 @@ extract_dependencies() {
# param ${2} library path for which the dependency list is retrieved # param ${2} library path for which the dependency list is retrieved
# return stored value of computed dependencies or 'NONE' if dependencies have not been computed yet # return stored value of computed dependencies or 'NONE' if dependencies have not been computed yet
extract_computed_dependencies() { extract_computed_dependencies() {
PATH_TO_NAME=$(echo "${2}" | tr -s "/" "_") PATH_TO_NAME=$(echo "${2}" | tr -s "/" "_")
NAME="COMPUTED_DEPENDENCIES_${PATH_TO_NAME}_${1}" NAME="COMPUTED_DEPENDENCIES_${PATH_TO_NAME}_${1}"
if test -n "${!NAME+x}"; then if test -n "$(eval "echo \"\${$NAME+x}\"")"; then
echo "${!NAME}" eval "echo \"\${$NAME}\""
else else
echo "NONE" echo "NONE"
fi fi
} }
# extract library directive # extract library directive
# #
# param ${1} artifact path # param ${1} artifact path
extract_library_directive() { extract_library_directive() {
ARTIFACT_PATH="${1}" ARTIFACT_PATH="${1}"
echo `cat ${ARTIFACT_PATH}/Makefile.am | grep "LIBADD\|LDADD" | sort | tr -s ' ' | cut -d " " -f 1 | sort -u` grep 'LIBADD\|LDADD' "${ARTIFACT_PATH}/Makefile.am" | sort | tr -s ' ' | cut -d " " -f 1 | sort -u | tr '\n' ' ' | sed 's/ *$//'
} }
# extract library name # extract library name
# #
# param ${1} artifact path # param ${1} artifact path
extract_library_name() { extract_library_name() {
ARTIFACT_PATH="${1}" ARTIFACT_PATH="${1}"
echo `cat ${ARTIFACT_PATH}/Makefile.am | grep "LIBRARIES" | tr -s ' ' | cut -d " " -f 3` grep 'LIBRARIES' "${ARTIFACT_PATH}/Makefile.am" | grep "LIBRARIES" | tr -s ' ' | cut -d " " -f 3
} }
# compute artifact dependencies # compute artifact dependencies
@ -132,115 +128,107 @@ extract_library_name() {
# param ${1} artifact name # param ${1} artifact name
# param ${2} artifact path # param ${2} artifact path
compute_dependencies() { compute_dependencies() {
ARTIFACT="${1}" ARTIFACT="${1}"
ARTIFACT_PATH="${2}" ARTIFACT_PATH="${2}"
echo "" echo ""
echo "########################################" echo "########################################"
echo "### ${ARTIFACT_PATH}/${ARTIFACT}" echo "### ${ARTIFACT_PATH}/${ARTIFACT}"
echo "########################################" echo "########################################"
echo "" echo ""
# all valid dependencies that can be added by each dependency library # all valid dependencies that can be added by each dependency library
echo "${ARTIFACT_PATH}/${ARTIFACT} valid dependencies:" echo "${ARTIFACT_PATH}/${ARTIFACT} valid dependencies:"
echo "${VALID_LIST}" echo "${VALID_LIST}"
# detect dependencies errors by searching for dependencies that are compiled after the current library and can generate missing symbols # detect dependencies errors by searching for dependencies that are compiled after the current library and can generate missing symbols
NON_RECURSIVE_BASE_DEPENDENCIES= NON_RECURSIVE_BASE_DEPENDENCIES=
for j in ${BASE_DEPENDENCIES}; do for j in ${BASE_DEPENDENCIES}; do
# only add the dependency if it is in the valid dependencies list to prevent infinite recursion and log the error otherwise # only add the dependency if it is in the valid dependencies list to prevent infinite recursion and log the error otherwise
if test $(echo "${VALID_LIST}" | grep "\b${j}\b" | wc -l) -eq 0; then if test "$(echo "${VALID_LIST}" | grep -c "\b${j}\b")" -eq 0; then
# search for external header and source files # search for external header and source files
INVALID_EXTERNAL_DEPENDENCIES=$(extract_non_include_files "${ARTIFACT}" "${j}") INVALID_EXTERNAL_DEPENDENCIES=$(extract_non_include_files "${ARTIFACT}" "${j}") || true
# filter header only external files # filter header only external files
EXTERNAL_HEADERS=$(echo "${INVALID_EXTERNAL_DEPENDENCIES}" | grep "HEADERS:" | cut -d ":" -f 2) EXTERNAL_HEADERS=$(echo "${INVALID_EXTERNAL_DEPENDENCIES}" | grep "HEADERS:" | cut -d ":" -f 2)
# filter header and source external files # filter header and source external files
EXTERNAL_ALL=$(echo "${INVALID_EXTERNAL_DEPENDENCIES}" | grep "HEADERS_AND_SOURCES:" | cut -d ":" -f 2) EXTERNAL_ALL=$(echo "${INVALID_EXTERNAL_DEPENDENCIES}" | grep "HEADERS_AND_SOURCES:" | cut -d ":" -f 2)
echo "### ERROR ### dependencies ERROR for ${ARTIFACT_PATH}/${ARTIFACT} on ${j} with:" echo "### ERROR ### dependencies ERROR for ${ARTIFACT_PATH}/${ARTIFACT} on ${j} with:"
# if there are any header only external files # if there are any header only external files
if test $(echo "${EXTERNAL_ALL}" | wc -w) -ne 0; then if test "$(echo "${EXTERNAL_ALL}" | wc -w)" -ne 0; then
echo "non header only files: ${EXTERNAL_ALL}" echo "non header only files: ${EXTERNAL_ALL}"
fi fi
# if there are any header and source external files # if there are any header and source external files
if test $(echo "${EXTERNAL_HEADERS}" | wc -w) -ne 0; then if test "$(echo "${EXTERNAL_HEADERS}" | wc -w)" -ne 0; then
echo "header only files: ${EXTERNAL_HEADERS}" echo "header only files: ${EXTERNAL_HEADERS}"
fi fi
else else
# don't add current library to it's dependencies list # don't add current library to it's dependencies list
if test ${j} != ${ARTIFACT}; then if test "${j}" != "${ARTIFACT}"; then
NON_RECURSIVE_BASE_DEPENDENCIES="${NON_RECURSIVE_BASE_DEPENDENCIES} ${j}" NON_RECURSIVE_BASE_DEPENDENCIES="${NON_RECURSIVE_BASE_DEPENDENCIES} ${j}"
fi fi
fi fi
done done
# all found dependencies in the reverse compilation order # all found dependencies in the reverse compilation order
BASE_DEPENDENCIES=$(echo "${BASE_DEPENDENCIES}" | xargs) BASE_DEPENDENCIES=$(echo "${BASE_DEPENDENCIES}" | xargs)
# all found and valid dependencies in the reverse compilation order # all found and valid dependencies in the reverse compilation order
NON_RECURSIVE_BASE_DEPENDENCIES=$(echo "${NON_RECURSIVE_BASE_DEPENDENCIES}" | xargs) NON_RECURSIVE_BASE_DEPENDENCIES=$(echo "${NON_RECURSIVE_BASE_DEPENDENCIES}" | xargs)
echo "${ARTIFACT_PATH}/${ARTIFACT} base dependencies:" echo "${ARTIFACT_PATH}/${ARTIFACT} base dependencies:"
echo "${BASE_DEPENDENCIES}" echo "${BASE_DEPENDENCIES}"
echo "${ARTIFACT_PATH}/${ARTIFACT} non recursive dependencies:" echo "${ARTIFACT_PATH}/${ARTIFACT} non recursive dependencies:"
echo "${NON_RECURSIVE_BASE_DEPENDENCIES}" echo "${NON_RECURSIVE_BASE_DEPENDENCIES}"
# minimum set of dependencies for current library # minimum set of dependencies for current library
DEPENDENCIES= DEPENDENCIES=
for j in ${NON_RECURSIVE_BASE_DEPENDENCIES}; do for j in ${NON_RECURSIVE_BASE_DEPENDENCIES}; do
NEW_DEPENDENCIES=$(extract_computed_dependencies "${j}" "src/lib") NEW_DEPENDENCIES=$(extract_computed_dependencies "${j}" "src/lib")
if test "${NEW_DEPENDENCIES}" == "NONE"; then if test "${NEW_DEPENDENCIES}" == "NONE"; then
echo "### ERROR ### computed dependency not found for ${j}" echo "### ERROR ### computed dependency not found for ${j}"
else else
DEPENDENCIES="${NEW_DEPENDENCIES} ${DEPENDENCIES}" DEPENDENCIES="${NEW_DEPENDENCIES} ${DEPENDENCIES}"
fi fi
done done
DEPENDENCIES=$(echo "${DEPENDENCIES} ${NON_RECURSIVE_BASE_DEPENDENCIES}" | tr -s " " "\n" | sort | uniq | xargs) DEPENDENCIES=$(echo "${DEPENDENCIES} ${NON_RECURSIVE_BASE_DEPENDENCIES}" | tr -s " " "\n" | sort -u | xargs)
# order dependencies in the order of compilation # order dependencies in the order of compilation
SORTED_DEPENDENCIES= SORTED_DEPENDENCIES=
for j in ${LIBRARIES_LIST}; do for j in ${LIBRARIES_LIST}; do
if test $(echo "${DEPENDENCIES}" | grep "\b${j}\b" | wc -l) -ne 0; then if test "$(echo "${DEPENDENCIES}" | grep -c "\b${j}\b")" -ne 0; then
SORTED_DEPENDENCIES="${j} ${SORTED_DEPENDENCIES}" SORTED_DEPENDENCIES="${j} ${SORTED_DEPENDENCIES}"
fi fi
done done
echo "${ARTIFACT_PATH}/${ARTIFACT} minimum dependencies:" SORTED_DEPENDENCIES=$(echo "${SORTED_DEPENDENCIES}" | sed 's/ *$//g')
echo "${SORTED_DEPENDENCIES}" echo "${ARTIFACT_PATH}/${ARTIFACT} minimum dependencies:"
echo "" echo "${SORTED_DEPENDENCIES}"
echo "++++++++++++++++++++++++++++++++++++++++" echo ""
ARTIFACT_DIRECTIVE=$(extract_library_directive ${ARTIFACT_PATH}/${ARTIFACT}) echo "++++++++++++++++++++++++++++++++++++++++"
for j in ${SORTED_DEPENDENCIES}; do ARTIFACT_DIRECTIVE=$(extract_library_directive "${ARTIFACT_PATH}/${ARTIFACT}")
DEPENDENCY_LIBRARY_NAME=$(extract_library_name "src/lib/${j}") for j in ${SORTED_DEPENDENCIES}; do
echo "${ARTIFACT_DIRECTIVE} += \$(top_builddir)/src/lib/${j}/${DEPENDENCY_LIBRARY_NAME}" DEPENDENCY_LIBRARY_NAME=$(extract_library_name "src/lib/${j}")
done echo "${ARTIFACT_DIRECTIVE} += \$(top_builddir)/src/lib/${j}/${DEPENDENCY_LIBRARY_NAME}"
echo "++++++++++++++++++++++++++++++++++++++++" done
echo "########################################" echo "++++++++++++++++++++++++++++++++++++++++"
echo "" echo "########################################"
echo ""
} }
# if wrong number of parameters print usage # Folder containing full repo. Default is "tools/.."
if test ${#} -ne 1; then REPO_FOLDER="${1-${top_srcdir}}"
echo "Usage: ${0} path/to/kea/repo" cd "${REPO_FOLDER}"
exit
fi
# folder containing full repo
REPO_FOLDER=${1}
if test $(echo -n ${REPO_FOLDER} | tail -c 1) != "/"; then
REPO_FOLDER="${REPO_FOLDER}/"
fi
# filter all Makefile.am files # filter all Makefile.am files
MAKEFILES_LIST=$(find ${REPO_FOLDER} | grep "Makefile\.am" | sed -e "s#${REPO_FOLDER}##g" | grep "src\/" | sort) MAKEFILES_LIST=$(find . -type f -wholename '*src/*Makefile.am' | sed 's#\./##g' | sort)
# if no Makefile.am found exit # if no Makefile.am found exit
if test -z "${MAKEFILES_LIST}"; then if test -z "${MAKEFILES_LIST}"; then
echo "invalid repo path: no Makefile.am file found" echo "invalid repo path: no Makefile.am file found"
exit exit
fi fi
echo "list of Makefile.am:" echo "list of Makefile.am:"
echo "${MAKEFILES_LIST}" echo "${MAKEFILES_LIST}"
# base Makefile.am for all sources is in src/lib/Makefile.am # base Makefile.am for all sources is in src/lib/Makefile.am
BASE_MAKEFILE=$(echo "${MAKEFILES_LIST}" | grep "src\/lib\/Makefile.am") BASE_MAKEFILE=$(echo "${MAKEFILES_LIST}" | grep "src/lib/Makefile.am")
# if no src/lib/Makefile.am found exit # if no src/lib/Makefile.am found exit
if test -z ${BASE_MAKEFILE}; then if test -z "${BASE_MAKEFILE}"; then
echo "invalid repo path: no src/lib/Makefile.am file found" echo "invalid repo path: no src/lib/Makefile.am file found"
exit exit
fi fi
echo "base Makefile.am:" echo "base Makefile.am:"
@ -248,19 +236,11 @@ echo "${BASE_MAKEFILE}"
# generate the list of libraries in the compilation order # generate the list of libraries in the compilation order
LIBRARIES_LIST= LIBRARIES_LIST=
RAW_LIBRARIES_LIST=$(cat "${REPO_FOLDER}${BASE_MAKEFILE}" | grep "SUBDIRS") RAW_LIBRARIES_LIST=$(grep 'SUBDIRS' "${BASE_MAKEFILE}")
for i in ${RAW_LIBRARIES_LIST}; do LIBRARIES_LIST=$(echo "${RAW_LIBRARIES_LIST}" | tr ' ' '\n' | grep -v SUBDIRS | grep -v '=' | tr '\n' ' ' | sed 's/ *$//')
LIBRARIES_LIST="${LIBRARIES_LIST} $(echo ${i} | grep -v "SUBDIRS" | grep -v '=')"
done
# remove empty spaces
LIBRARIES_LIST=$(echo "${LIBRARIES_LIST}" | tr -s ' ' | xargs)
# generate the list of libraries in the reverse compilation order # generate the list of libraries in the reverse compilation order
REVERSE_LIBRARIES_LIST= REVERSE_LIBRARIES_LIST=$(echo "${LIBRARIES_LIST}" | tr ' ' '\n' | tac | tr '\n' ' ' | sed 's/ *$//')
for i in ${LIBRARIES_LIST}; do
REVERSE_LIBRARIES_LIST="${i} ${REVERSE_LIBRARIES_LIST}"
done
echo "list of libraries:" echo "list of libraries:"
echo "${LIBRARIES_LIST}" echo "${LIBRARIES_LIST}"
@ -270,7 +250,7 @@ echo "${REVERSE_LIBRARIES_LIST}"
# filter all files of interest ignoring irrelevant ones # filter all files of interest ignoring irrelevant ones
# ignore .git, .libs, .deps doc folders and .o .lo .Plo .Po .gcno .gcda .m4 .dox .json .mes files # ignore .git, .libs, .deps doc folders and .o .lo .Plo .Po .gcno .gcda .m4 .dox .json .mes files
FILE_LIST=$(find "${REPO_FOLDER}" 2>/dev/null | grep -v "\.git" | grep -v "\/\.libs\/" | grep -v "\.o$" | grep -v "\/\.deps\/" | grep -v "\.lo$" | grep -v "\.Plo$" | grep -v "\.Po$" | grep -v "\.gcno$" | grep -v "gcda" | grep -v "\.m4$" | grep -v "\.dox$" | grep -v "\.json$" | grep -v "\/doc\/" | grep -v "\.mes$" | sort) FILE_LIST=$(find . 2>/dev/null | grep -v "\.git" | grep -v "/\.libs/" | grep -v "\.o$" | grep -v "/\.deps/" | grep -v "\.lo$" | grep -v "\.Plo$" | grep -v "\.Po$" | grep -v "\.gcno$" | grep -v "gcda" | grep -v "\.m4$" | grep -v "\.dox$" | grep -v "\.json$" | grep -v "/doc/" | grep -v "\.mes$" | sort)
#echo "files:" #echo "files:"
#echo "${FILE_LIST}" #echo "${FILE_LIST}"
@ -279,41 +259,36 @@ BASE_LIBRARIES_MAKEFILES=
# generate the list of dependencies for all libraries in src/lib # generate the list of dependencies for all libraries in src/lib
for i in ${LIBRARIES_LIST}; do for i in ${LIBRARIES_LIST}; do
# generate current library Makefile.am path # generate current library Makefile.am path
BASE_LIBRARIES_MAKEFILES="${BASE_LIBRARIES_MAKEFILES} src/lib/${i}/Makefile.am" BASE_LIBRARIES_MAKEFILES="${BASE_LIBRARIES_MAKEFILES} src/lib/${i}/Makefile.am"
# extract dependencies found in the library folder # extract dependencies found in the library folder
BASE_DEPENDENCIES=$(extract_includes "src/lib/${i}/Makefile.am") BASE_DEPENDENCIES=$(extract_includes "src/lib/${i}/Makefile.am") || true
# generate the list of valid dependencies for the current library (take compilation order into account) # generate the list of valid dependencies for the current library (take compilation order into account)
VALID_LIST=$(extract_dependencies "${REVERSE_LIBRARIES_LIST}" "${i}") VALID_LIST=$(extract_dependencies "${REVERSE_LIBRARIES_LIST}" "${i}")
compute_dependencies "${i}" "src/lib" compute_dependencies "${i}" "src/lib"
PATH_TO_NAME=$(echo "src/lib" | tr -s "/" "_") PATH_TO_NAME=$(echo "src/lib" | tr -s "/" "_")
declare COMPUTED_DEPENDENCIES_${PATH_TO_NAME}_${i}="${SORTED_DEPENDENCIES}" export "COMPUTED_DEPENDENCIES_${PATH_TO_NAME}_${i}=${SORTED_DEPENDENCIES}"
done done
# remove empty spaces # remove empty spaces
BASE_LIBRARIES_MAKEFILES=$(echo "${BASE_LIBRARIES_MAKEFILES}" | xargs | tr -s " " "\n") BASE_LIBRARIES_MAKEFILES=$(echo "${BASE_LIBRARIES_MAKEFILES}" | sed 's/ *$//' | tr ' ' '\n')
echo "base Makefiles.am files:" echo "base Makefile.am files:"
echo "${BASE_LIBRARIES_MAKEFILES}" echo "${BASE_LIBRARIES_MAKEFILES}"
OTHER_MAKEFILES=$(echo "${MAKEFILES_LIST}" | tr -s " " "\n" | grep -v "src/lib/" | grep -v "src/share/" | grep -v "src/Makefile.am") OTHER_MAKEFILES=$(echo "${MAKEFILES_LIST}" | tr -s " " "\n" | grep -v "src/lib/" | grep -v "src/share/" | grep -v "src/Makefile.am")
# remove empty spaces
OTHER_MAKEFILES=$(echo "${OTHER_MAKEFILES}" | xargs | tr -s " " "\n")
echo "remaining Makefile.am files:" echo "remaining Makefile.am files:"
echo "${OTHER_MAKEFILES}" echo "${OTHER_MAKEFILES}"
for i in ${OTHER_MAKEFILES}; do for i in ${OTHER_MAKEFILES}; do
# extract dependencies found in the artifact folder # extract dependencies found in the artifact folder
BASE_DEPENDENCIES=$(extract_includes "${i}") BASE_DEPENDENCIES=$(extract_includes "${i}") || true
# generate the list of valid dependencies for the current artifact (take compilation order into account) # generate the list of valid dependencies for the current artifact (take compilation order into account)
VALID_LIST="${REVERSE_LIBRARIES_LIST}" VALID_LIST="${REVERSE_LIBRARIES_LIST}"
ARTIFACT=$(echo "${i}" | rev | cut -d "/" -f 2 | rev) ARTIFACT=$(echo "${i}" | rev | cut -d "/" -f 2 | rev)
ARTIFACT_PATH=$(echo "${i}" | rev | cut -d "/" -f 3- | rev) ARTIFACT_PATH=$(echo "${i}" | rev | cut -d "/" -f 3- | rev)
compute_dependencies "${ARTIFACT}" "${ARTIFACT_PATH}" compute_dependencies "${ARTIFACT}" "${ARTIFACT_PATH}"
PATH_TO_NAME=$(echo "${ARTIFACT_PATH}" | tr -s "/" "_") PATH_TO_NAME=$(echo "${ARTIFACT_PATH}" | tr -s "/" "_")
declare COMPUTED_DEPENDENCIES_${PATH_TO_NAME}_${ARTIFACT}="${SORTED_DEPENDENCIES}" export "COMPUTED_DEPENDENCIES_${PATH_TO_NAME}_${ARTIFACT}=${SORTED_DEPENDENCIES}"
done done
exit

View File

@ -22,6 +22,9 @@ extensions_regex='(\.cpp|\.cc|\.C|\.cxx|\.m|\.hpp|\.hh|\.h|\.H|\.hxx|\.tpp)$'
# Print usage. # Print usage.
print_usage() { print_usage() {
# shellcheck disable=SC2016
# SC2016: Expressions don't expand in single quotes, use double quotes for that.
# Reason: $directory and $file should be displayed ad-literam. This way, it is expressed that a parameter is expected there.
printf \ printf \
'Usage: %s {{options}} 'Usage: %s {{options}}
Options: Options:
@ -56,7 +59,7 @@ script_path=$(cd "$(dirname "${0}")" && pwd)
list_of_files= list_of_files=
if ${changed-false}; then if ${changed-false}; then
list_of_files=$(git diff $(git merge-base origin/master HEAD) --name-only | grep -E "${extensions_regex}") list_of_files=$(git diff --name-only "$(git merge-base origin/master HEAD)" | grep -E "${extensions_regex}")
elif test ${#} = 0; then elif test ${#} = 0; then
# Use current directory when called without an argument. # Use current directory when called without an argument.
set -- . set -- .
@ -98,7 +101,8 @@ while test ${#} -gt 0 || test -n "${list_of_files}"; do
if test -f "${file}"; then if test -f "${file}"; then
# Format file. # Format file.
# shellcheck disable=SC2046 # shellcheck disable=SC2046
# We specifically want word splitting for the parameters. # SC2046: Quote this to prevent word splitting.
# Reason: We specifically want word splitting for the parameters.
clang-format --style=file -i $(printf '%s' "${parameters}") "${file}" clang-format --style=file -i $(printf '%s' "${parameters}") "${file}"
elif test -d "${file}"; then elif test -d "${file}"; then
# Keep CWD for later use. # Keep CWD for later use.

View File

@ -1,6 +1,6 @@
#!/bin/sh #!/bin/sh
# Copyright (C) 2019-2022 Internet Systems Consortium, Inc. ("ISC") # Copyright (C) 2019-2024 Internet Systems Consortium, Inc. ("ISC")
# #
# This Source Code Form is subject to the terms of the Mozilla Public # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this # License, v. 2.0. If a copy of the MPL was not distributed with this
@ -40,7 +40,7 @@ if [ -f "${base}.yy" ]; then
# if you system is set to Polish, rather than "Terminals") and this will # if you system is set to Polish, rather than "Terminals") and this will
# confuse our script. # confuse our script.
LANG=en_US LANGUAGE=en_US @YACC@ -v "${base}.yy" -o output LANG=en_US LANGUAGE=en_US @YACC@ -v "${base}.yy" -o output
rm -f output output.h *.hh rm -f output output.h ./*.hh
mv output.output /tmp/output mv output.output /tmp/output
output=/tmp/output output=/tmp/output
else else
@ -57,7 +57,7 @@ fi
# - replace : by BNF ::= # - replace : by BNF ::=
# - squeeze multiple blank lines # - squeeze multiple blank lines
cat $output |\ @AWK@ '{ print }' $output |\
@AWK@ '/^Terminal/ { exit }; // { print }' |\ @AWK@ '/^Terminal/ { exit }; // { print }' |\
@AWK@ '// { gsub("^ +[0-9]+ ", ""); print }' |\ @AWK@ '// { gsub("^ +[0-9]+ ", ""); print }' |\
@AWK@ '/^\$@[0-9]+:/ { next }; // { print }' |\ @AWK@ '/^\$@[0-9]+:/ { next }; // { print }' |\
@ -83,7 +83,10 @@ $header
:linenos: :linenos:
EOF EOF
cat $output.2 | @AWK@ '/^.+$/ { print " ",$0 }; /^$/ { print } ' >> $output.3 # shellcheck disable=SC2016
# SC2016: Expressions don't expand in single quotes, use double quotes for that.
# Reason: we spcifically do not want $0 to expand.
@AWK@ '/^.+$/ { print " ",$0 }; /^$/ { print } ' $output.2 >> $output.3
cat $output.3 cat $output.3
else else
cat $output.2 cat $output.2

View File

@ -56,11 +56,9 @@ root_path=$(cd "$(dirname "${0}")/.." && pwd)
cd "${root_path}" cd "${root_path}"
# Disable shellcheck warnings: # Disable shellcheck warnings:
# SC1117: Backslash is literal in "\/". Prefer explicit escaping: "\\/".
# SC2119: Use "$@" if function's $1 should mean script's $1. # SC2119: Use "$@" if function's $1 should mean script's $1.
# SC2039: In POSIX sh, 'local' is undefined.
# SC3043: In POSIX sh, 'local' is undefined. # SC3043: In POSIX sh, 'local' is undefined.
shellcheck_opts="--exclude=SC1117 --exclude=SC2119 --exclude=SC2039 --exclude=SC3043" shellcheck_opts="--exclude=SC2119 --exclude=SC3043"
files="$(find . -type f -name '*.sh' -or -name '*.sh.in' | sort)" files="$(find . -type f -name '*.sh' -or -name '*.sh.in' | sort)"
@ -75,7 +73,7 @@ for i in \
fi fi
done done
# shellcheck disable=SC2046 # shellcheck disable=SC2086
# SC2046: Quote this to prevent word splitting. # SC2086: Double quote to prevent globbing and word splitting.
# Reason: We explicitly want the parameters split. # Reason: We explicitly want the parameters split.
shellcheck ${shellcheck_opts} ${files} shellcheck ${shellcheck_opts} ${files}

View File

@ -22,6 +22,9 @@ extensions_regex='(\.cpp|\.cc|\.C|\.cxx|\.m|\.hpp|\.hh|\.h|\.H|\.hxx|\.tpp)$'
# Print usage. # Print usage.
print_usage() { print_usage() {
# shellcheck disable=SC2016
# SC2016: Expressions don't expand in single quotes, use double quotes for that.
# Reason: $directory and $file should be displayed ad-literam. This way, it is expressed that a parameter is expected there.
printf \ printf \
'Usage: %s {{options}} 'Usage: %s {{options}}
Options: Options:
@ -56,7 +59,7 @@ script_path=$(cd "$(dirname "${0}")" && pwd)
list_of_files= list_of_files=
if ${changed-false}; then if ${changed-false}; then
list_of_files=$(git diff $(git merge-base origin/master HEAD) --name-only | grep -E "${extensions_regex}") list_of_files=$(git diff --name-only "$(git merge-base origin/master HEAD)" | grep -E "${extensions_regex}")
elif test ${#} = 0; then elif test ${#} = 0; then
# Use current directory when called without an argument. # Use current directory when called without an argument.
set -- . set -- .
@ -98,7 +101,8 @@ while test ${#} -gt 0 || test -n "${list_of_files}"; do
if test -f "${file}"; then if test -f "${file}"; then
# Format file. # Format file.
# shellcheck disable=SC2046 # shellcheck disable=SC2046
# We specifically want word splitting for the parameters. # SC2046: Quote this to prevent word splitting.
# Reason: We specifically want word splitting for the parameters.
uncrustify -c "${script_path}/../.uncrustify.cfg" --replace $(printf '%s' "${parameters}") "${file}" uncrustify -c "${script_path}/../.uncrustify.cfg" --replace $(printf '%s' "${parameters}") "${file}"
elif test -d "${file}"; then elif test -d "${file}"; then
# Keep CWD for later use. # Keep CWD for later use.