mirror of
https://dev.iopsys.eu/feed/iopsys.git
synced 2025-12-10 07:44:50 +01:00
- Print report_dir along with report_file - Stop verbose logging of exec_cmds to prevent syslog overflow - Increased timeouts of wifi diagnotics scripts - Added report_dir in error output, if failed to generate tar
415 lines
7.9 KiB
Bash
Executable file
415 lines
7.9 KiB
Bash
Executable file
#!/bin/sh
|
|
|
|
. /usr/share/libubox/jshn.sh
|
|
|
|
JSON_OUT=0
|
|
SPEC_DIR="/usr/share/self-diagnostics/spec"
|
|
SPEC_EXT_DIR="/etc/self-diagnostics/spec"
|
|
REPORT_PATH="/var/log/"
|
|
REPORT_TEMP_DIR="$(mktemp -p ${REPORT_PATH} -d)"
|
|
REPORT_NAME="self-test-diagnostics"
|
|
VERBOSE=0
|
|
COMPOPTS=""
|
|
TIMEOUT=5
|
|
JSON_OUT_BUFFER=""
|
|
|
|
cleanup()
|
|
{
|
|
if [ -d "${REPORT_TEMP_DIR}" ]; then
|
|
rm -rf ${REPORT_TEMP_DIR}
|
|
fi
|
|
}
|
|
|
|
trap cleanup EXIT
|
|
|
|
help()
|
|
{
|
|
echo "Generate self diagnostics report"
|
|
echo
|
|
echo "Syntax: $0 [-m|h|l|j]"
|
|
echo
|
|
echo "Options:"
|
|
echo " l List available module(s)"
|
|
echo " m Generate system report of specific module(s)"
|
|
echo " j Enable JSON output"
|
|
echo " h Print this help"
|
|
echo
|
|
}
|
|
|
|
log()
|
|
{
|
|
log_file="${REPORT_TEMP_DIR}/execution.log"
|
|
if [ "$VERBOSE" -eq 1 ]; then
|
|
logger -p debug -t $0 "$*"
|
|
fi
|
|
echo "[$(date +%Y:%m:%d-%H:%M:%S)] $*" >> ${log_file}
|
|
}
|
|
|
|
err_log()
|
|
{
|
|
log_file="${REPORT_TEMP_DIR}/execution.log"
|
|
logger -p err -t $0 "$*"
|
|
echo "[$(date +%Y:%m:%d-%H:%M:%S) ERR] $*" >> ${log_file}
|
|
}
|
|
|
|
# Alias ubus to have a smaller 5-second timeout on all subsequent calls
|
|
ubus()
|
|
{
|
|
if [ "${1}" == "call" ]; then
|
|
if command ubus list $2 >/dev/null 2>&1; then
|
|
command ubus "$@";
|
|
fi
|
|
else
|
|
command ubus "$@";
|
|
fi
|
|
|
|
}
|
|
|
|
config_load()
|
|
{
|
|
local temp
|
|
local MODEL SERIAL
|
|
|
|
# Default value for MODEL and SERIAL
|
|
MODEL="XXX"
|
|
SERIAL="FFFFFFFFFFFF"
|
|
|
|
log "# Starting Self diagnostics tests #"
|
|
if [ "${JSON_OUT}" -eq 1 ]; then
|
|
json_init
|
|
json_add_string "report_dir" "${REPORT_PATH}"
|
|
JSON_OUT_BUFFER="$(json_dump)"
|
|
else
|
|
echo "report_dir: ${REPORT_PATH}"
|
|
fi
|
|
|
|
temp="$(db -q get device.deviceinfo.ModelName)"
|
|
[ -d "${temp}" ] && \
|
|
MODEL="${temp}"
|
|
|
|
temp="$(db -q get device.deviceinfo.SerialNumber)"
|
|
[ -d "${temp}" ] && \
|
|
SERIAL="${temp}"
|
|
|
|
temp="$(uci -q get self-diagnostics.globals.extended_spec_dir)"
|
|
[ -d "${temp}" ] && \
|
|
SPEC_EXT_DIR="${temp}"
|
|
|
|
temp="$(uci -q get self-diagnostics.globals.exec_timeout)"
|
|
[ -n "${temp}" ] && \
|
|
TIMEOUT="${temp}"
|
|
|
|
temp="$(uci -q get self-diagnostics.globals.report_name)"
|
|
[ -n "${temp}" ] && \
|
|
REPORT_NAME="$(eval echo ${temp})"
|
|
|
|
REPORT_NAME="${REPORT_NAME//[ \/]/_}"
|
|
|
|
temp="$(uci -q get self-diagnostics.globals.compression_level)"
|
|
[ -n "${temp}" ] && \
|
|
COMPOPTS="${temp}"
|
|
|
|
temp="$(uci -q get self-diagnostics.globals.verbose)"
|
|
[ -n "${temp}" ] && \
|
|
VERBOSE="${temp}"
|
|
}
|
|
|
|
exec_spec()
|
|
{
|
|
local json_file exec_skip name timeout exec_timeout rc
|
|
|
|
json_file="$1"
|
|
[ -z "$json_file" ] && {
|
|
err_log "No/invalid spec json_file"
|
|
return 1
|
|
}
|
|
|
|
log "Loading $json_file ..."
|
|
|
|
json_init
|
|
json_load_file "${json_file}" || {
|
|
err_log "Failed to load ${json_file} spec file"
|
|
return 1
|
|
}
|
|
|
|
name="$(basename ${json_file})"
|
|
export_path="${REPORT_TEMP_DIR}/${name//.json/.log}"
|
|
|
|
exec_skip=0
|
|
if json_is_a dependency array; then
|
|
json_select "dependency"
|
|
json_get_keys ekeys
|
|
|
|
for key in $ekeys; do
|
|
if json_is_a $key object; then
|
|
json_select $key
|
|
json_get_var type type
|
|
|
|
if [ "$type" == "file" ]; then
|
|
json_get_var file file
|
|
if [ ! -e "$file" ]; then
|
|
err_log "${json_file} has unmet file dependency $file"
|
|
exec_skip=1
|
|
json_select ..
|
|
continue
|
|
fi
|
|
fi
|
|
json_select ..
|
|
fi
|
|
done
|
|
json_select ..
|
|
fi
|
|
|
|
[ "${exec_skip}" -eq 1 ] && {
|
|
err_log "Dependency not satisfied for ${json_file}"
|
|
return 0
|
|
}
|
|
|
|
json_get_var description description
|
|
log "Description: $description"
|
|
|
|
if json_is_a exec array; then
|
|
json_select "exec"
|
|
json_get_keys keys
|
|
|
|
for key in $keys; do
|
|
if json_is_a $key object; then
|
|
json_select $key
|
|
local cmd_skip file
|
|
|
|
cmd_skip=0
|
|
file=""
|
|
if json_is_a dependency array; then
|
|
json_select "dependency"
|
|
json_get_keys d_keys
|
|
|
|
for d_key in $d_keys; do
|
|
if json_is_a $d_key object; then
|
|
json_select $d_key
|
|
json_get_var type type
|
|
if [ "$type" == "file" ]; then
|
|
json_get_var file file
|
|
if [ ! -e $file ]; then
|
|
json_select ..
|
|
cmd_skip=1
|
|
continue
|
|
fi
|
|
fi
|
|
json_select ..
|
|
fi
|
|
done
|
|
json_select ..
|
|
fi
|
|
|
|
[ $cmd_skip -eq 1 ] && {
|
|
json_select ..
|
|
err_log "Dependency not satisfied for ${file}"
|
|
continue
|
|
}
|
|
|
|
json_get_var description description
|
|
json_get_var cmd cmd
|
|
json_get_var timeout timeout
|
|
|
|
if [ -n "$timeout" ]; then
|
|
exec_timeout=$timeout
|
|
else
|
|
exec_timeout=$TIMEOUT
|
|
fi
|
|
log "Executing $cmd with timeout $exec_timeout"
|
|
echo "##########################################" >> $export_path
|
|
echo "# $description #">> $export_path
|
|
echo "# Exec [$cmd], timeout [$exec_timeout] #" >> $export_path
|
|
echo "##########################################" >> $export_path
|
|
eval timeout ${exec_timeout} $cmd >> $export_path 2>&1
|
|
rc=$?
|
|
echo "######## Execution done return code $rc ######" >> $export_path
|
|
|
|
if [ "$rc" -eq 0 ]; then
|
|
log "Execution [$cmd] completed"
|
|
else
|
|
err_log "Execution [$cmd] Failed/Timeout with $rc exit code"
|
|
fi
|
|
|
|
echo >> $export_path
|
|
json_select ..
|
|
fi
|
|
done
|
|
json_select ..
|
|
fi
|
|
|
|
log "Handling of $json_file done"
|
|
log ""
|
|
}
|
|
|
|
generate_module()
|
|
{
|
|
local modules="${@}"
|
|
local file module
|
|
|
|
config_load
|
|
|
|
log "Modules [$@]"
|
|
for module in $modules; do
|
|
module="${module/.json/}"
|
|
file="$(find $SPEC_DIR -type f -name ${module}.json)"
|
|
[ -z "$file" ] && {
|
|
[ -d "${SPEC_EXT_DIR}" ] && \
|
|
file="$(find $SPEC_EXT_DIR -type f -name ${module}.json)"
|
|
}
|
|
|
|
[ -f "$file" ] && \
|
|
exec_spec "$file"
|
|
done
|
|
}
|
|
|
|
generate_all()
|
|
{
|
|
local files
|
|
|
|
config_load
|
|
|
|
files="$(find ${SPEC_DIR} -type f -name *.json)"
|
|
[ -d "${SPEC_EXT_DIR}" ] && \
|
|
files="${files} $(find $SPEC_EXT_DIR -type f -name *.json)"
|
|
|
|
[ -z "$files" ] && {
|
|
return 0
|
|
}
|
|
|
|
for file in $files; do
|
|
exec_spec "$file"
|
|
done
|
|
|
|
}
|
|
|
|
list_modules()
|
|
{
|
|
local files
|
|
|
|
if [ "${JSON_OUT}" -eq 1 ]; then
|
|
json_init
|
|
json_add_array "CoreModules"
|
|
else
|
|
echo
|
|
echo "Core Module(s):"
|
|
fi
|
|
|
|
cd ${SPEC_DIR} && {
|
|
for file in $(ls); do
|
|
if [ "${JSON_OUT}" -eq 1 ]; then
|
|
json_add_string "" "${file/.json/}"
|
|
else
|
|
echo " - ${file/.json/}"
|
|
fi
|
|
done
|
|
}
|
|
|
|
if [ "${JSON_OUT}" -eq 1 ]; then
|
|
json_close_array
|
|
json_add_array "ExtensionModules"
|
|
else
|
|
echo
|
|
echo "Extension Module(s):"
|
|
fi
|
|
|
|
cd ${SPEC_EXT_DIR} && {
|
|
for file in $(ls); do
|
|
if [ "${JSON_OUT}" -eq 1 ]; then
|
|
json_add_string "" "${file/.json/}"
|
|
else
|
|
echo " - ${file/.json/}"
|
|
fi
|
|
done
|
|
}
|
|
|
|
if [ "${JSON_OUT}" -eq 1 ]; then
|
|
json_close_array
|
|
json_dump
|
|
else
|
|
echo
|
|
fi
|
|
}
|
|
|
|
generate_report()
|
|
{
|
|
local filename
|
|
|
|
filename="${REPORT_PATH}/${REPORT_NAME}"
|
|
|
|
[ -f "${filename}.tar" ] && rm "${filename}.tar"
|
|
[ -f "${filename}.tar.gz" ] && rm "${filename}.tar.gz"
|
|
|
|
log "# Report generation completed #"
|
|
cd ${REPORT_TEMP_DIR} && {
|
|
filename="${filename}.tar"
|
|
tar -cf "${filename}" *
|
|
}
|
|
|
|
if [ -n "$COMPOPTS" ]; then
|
|
gzip -${COMPOPTS} -f "${filename}"
|
|
filename="${filename}.gz"
|
|
fi
|
|
|
|
# Move logs if failed to generate tar
|
|
if [ ! -f "${filename}" ]; then
|
|
mv ${REPORT_TEMP_DIR}/*.log ${REPORT_PATH}/
|
|
fi
|
|
|
|
if [ "${JSON_OUT}" -eq 1 ]; then
|
|
json_init
|
|
json_load "${JSON_OUT_BUFFER}"
|
|
if [ -f "${filename}" ]; then
|
|
json_add_string result "${filename}"
|
|
else
|
|
log "error: Failed to generate report tar, check logs in ${REPORT_PATH}"
|
|
json_add_string error "Failed to generate report tar, check logs in ${REPORT_PATH}"
|
|
fi
|
|
json_dump
|
|
else
|
|
if [ -f "${filename}" ]; then
|
|
echo "result: ${filename}"
|
|
else
|
|
log "error: Failed to generate report tar, check logs in ${REPORT_PATH}"
|
|
echo "error: Failed to generate report tar, check logs in ${REPORT_PATH}"
|
|
fi
|
|
fi
|
|
}
|
|
|
|
[ ! -d "${SPEC_DIR}" ] && {
|
|
log "# ${SPEC_DIR} does not exist"
|
|
exit 1
|
|
}
|
|
|
|
list=0
|
|
modules=""
|
|
|
|
while getopts "m:hlj" opts; do
|
|
case $opts in
|
|
h)
|
|
help
|
|
exit;;
|
|
j)
|
|
JSON_OUT=1
|
|
;;
|
|
l)
|
|
list=1
|
|
;;
|
|
m)
|
|
modules="$modules ${OPTARG}"
|
|
;;
|
|
esac
|
|
done
|
|
|
|
if [ "${list}" -eq 1 ]; then
|
|
list_modules
|
|
exit 0
|
|
fi
|
|
|
|
if [ -z "${modules}" ]; then
|
|
generate_all
|
|
else
|
|
generate_module ${modules}
|
|
fi
|
|
|
|
generate_report
|