Commit 107fac31 authored by Andrey Filippov's avatar Andrey Filippov

Add LWIR16 workflow helpers

parent 5db686ca
......@@ -84,6 +84,25 @@ Use known-good `.107` local.conf by default during stage-1 builds:
./scripts/compare_localconf_with_107.sh
```
LWIR16 profile workflow (single entrypoint):
```bash
# list configured profiles (from mmc_profiles.tsv)
./scripts/lwir16_ctl.sh profiles list
# build all lwir16_* profiles and bundle outputs into deploy/images/elphel393/lwir16/
./scripts/lwir16_ctl.sh build
# compare running camera /etc with each profile rootfs.tar.gz
./scripts/lwir16_ctl.sh diff
# save sparse /etc overrides
./scripts/lwir16_ctl.sh save
# after replacing SD cards and rebooting, restore overrides
./scripts/lwir16_ctl.sh restore
```
## References
- Main upstream repository: `git@git.elphel.com:Elphel/elphel393.git` (`warrior` branch)
......
......@@ -99,7 +99,7 @@ These are the two command lines currently used for quick IMU stream checks:
```bash
cltool -c /dev/ttyUSB0 -baud=921600 -stats -did DID_INS_1 DID_GPS1_POS DID_GPS2_POS DID_GPS1_UBX_POS DID_STROBE_IN_TIME DID_PIMU
cltool -c /dev/ttyUSB0 -baud=921600 -did DID_INS_1 DID_GPS1_POS DID_GPS2_POS DID_GPS1_UBX_POS DID_STROBE_IN_TIME DID_PIMU
cltool -c /dev/ttyUSB0 -baud=921600 -did DID_INS_1 DID_INS_2 DID_GPS1_POS DID_GPS2_POS DID_GPS1_UBX_POS DID_STROBE_IN_TIME DID_PIMU
```
Operational notes:
......
#!/usr/bin/env bash
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "${SCRIPT_DIR}/.." && pwd)"
usage() {
cat <<'EOF'
Usage:
autocampars_sync.sh save [options]
autocampars_sync.sh restore [options]
Description:
Save/restore camera autocampars XML files between camera hosts and local storage.
Default host set:
192.168.0.41 .. 192.168.0.46
Default storage:
<repo>/workspace/bootable-images/versions/<year>/autocampars/
(example: .../versions/2026/autocampars/192.168.0.45/etc/elphel393/)
Options:
--user USER SSH user (default: root)
--prefix A.B.C First 3 octets for generated hosts (default: 192.168.0)
--from N Starting host number (default: 41)
--count N Number of hosts (default: 6)
--hosts "H1 H2 ..." Space-separated host list (IPs or hostnames)
--base-dir DIR Base directory for saved trees
--connect-timeout S SSH/SCP connect timeout in seconds (default: 7)
--dry-run Print commands without executing
-h, --help Show this help
Examples:
autocampars_sync.sh save
autocampars_sync.sh restore
autocampars_sync.sh save --hosts "192.168.0.45"
autocampars_sync.sh restore --hosts "192.168.0.45" --dry-run
EOF
}
log() {
printf '[%s] %s\n' "$(date '+%F %T')" "$*"
}
run_cmd() {
if [[ "${dry_run}" -eq 1 ]]; then
printf 'DRY-RUN:'
printf ' %q' "$@"
printf '\n'
return 0
fi
"$@"
}
mode="${1:-}"
if [[ -z "${mode}" || "${mode}" == "-h" || "${mode}" == "--help" ]]; then
usage
exit 0
fi
if [[ "${mode}" != "save" && "${mode}" != "restore" ]]; then
echo "ERROR: first argument must be 'save' or 'restore'" >&2
usage
exit 1
fi
shift
ssh_user="root"
host_prefix="192.168.0"
start_octet=41
host_count=6
hosts_csv=""
base_dir="${REPO_ROOT}/workspace/bootable-images/versions/$(date +%Y)/autocampars"
connect_timeout=7
dry_run=0
while [[ $# -gt 0 ]]; do
case "$1" in
--user)
ssh_user="$2"
shift 2
;;
--prefix)
host_prefix="$2"
shift 2
;;
--from)
start_octet="$2"
shift 2
;;
--count)
host_count="$2"
shift 2
;;
--hosts)
hosts_csv="$2"
shift 2
;;
--base-dir)
base_dir="$2"
shift 2
;;
--connect-timeout)
connect_timeout="$2"
shift 2
;;
--dry-run)
dry_run=1
shift
;;
-h|--help)
usage
exit 0
;;
*)
echo "ERROR: unknown option: $1" >&2
usage
exit 1
;;
esac
done
declare -a hosts=()
if [[ -n "${hosts_csv}" ]]; then
# shellcheck disable=SC2206
hosts=(${hosts_csv})
else
for ((i = 0; i < host_count; i++)); do
hosts+=("${host_prefix}.$((start_octet + i))")
done
fi
if [[ "${#hosts[@]}" -eq 0 ]]; then
echo "ERROR: host list is empty" >&2
exit 1
fi
run_cmd mkdir -p "${base_dir}"
ok=0
fail=0
if [[ "${mode}" == "save" ]]; then
log "Saving autocampars XML files to ${base_dir}"
for host in "${hosts[@]}"; do
target="${ssh_user}@${host}"
dst="${base_dir}/${host}/etc/elphel393"
run_cmd rm -rf "${base_dir:?}/${host}"
run_cmd mkdir -p "${dst}"
log "Host ${host}: listing /etc/elphel393/autocampars*.xml"
mapfile -t remote_files < <(
ssh -o BatchMode=yes -o ConnectTimeout="${connect_timeout}" "${target}" \
"ls -1 /etc/elphel393/autocampars*.xml 2>/dev/null" || true
)
if [[ "${#remote_files[@]}" -eq 0 ]]; then
log "Host ${host}: no files found or host unreachable"
fail=$((fail + 1))
continue
fi
copied=0
for remote_file in "${remote_files[@]}"; do
if run_cmd scp -O -p -o BatchMode=yes -o ConnectTimeout="${connect_timeout}" \
"${target}:${remote_file}" "${dst}/"; then
copied=$((copied + 1))
fi
done
if [[ "${copied}" -gt 0 ]]; then
log "Host ${host}: copied ${copied} file(s)"
ok=$((ok + 1))
else
log "Host ${host}: copy failed"
fail=$((fail + 1))
fi
done
else
log "Restoring autocampars subtree from ${base_dir}"
for host in "${hosts[@]}"; do
target="${ssh_user}@${host}"
src="${base_dir}/${host}/etc/elphel393"
if [[ ! -d "${src}" ]]; then
log "Host ${host}: missing local source dir ${src}"
fail=$((fail + 1))
continue
fi
if ! find "${src}" -mindepth 1 -print -quit | grep -q .; then
log "Host ${host}: local source dir is empty: ${src}"
fail=$((fail + 1))
continue
fi
if ! run_cmd ssh -o BatchMode=yes -o ConnectTimeout="${connect_timeout}" \
"${target}" "mkdir -p /etc/elphel393"; then
log "Host ${host}: unable to prepare remote destination"
fail=$((fail + 1))
continue
fi
if run_cmd scp -O -r -p -o BatchMode=yes -o ConnectTimeout="${connect_timeout}" \
"${src}/." "${target}:/etc/elphel393/"; then
log "Host ${host}: restore completed"
ok=$((ok + 1))
else
log "Host ${host}: restore failed"
fail=$((fail + 1))
fi
done
fi
log "Summary: mode=${mode} ok=${ok} fail=${fail}"
if [[ "${fail}" -gt 0 ]]; then
exit 1
fi
......@@ -5,22 +5,26 @@ set -euo pipefail
# Default targets:
# root@192.168.0.41 ... root@192.168.0.46
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
usage() {
cat <<'EOF'
Usage:
konsole_ssh_2x3.sh [user@host ...]
konsole_ssh_2x3.sh [--user USER] [--prefix A.B.C] [--from N] [--dmesg]
konsole_ssh_2x3.sh [--user USER] [--prefix A.B.C] [--from N] [--dmesg] [--new]
Options:
--user USER SSH user for generated host list (default: root)
--prefix A.B.C First 3 octets for generated host list (default: 192.168.0)
--from N Starting host number, creates 6 hosts N..N+5 (default: 41)
--dmesg Run 'dmesg' on remote after SSH login, then keep interactive shell
--new Reset known_hosts and run ssh-copy-id for selected targets first
-h, --help Show this help
Examples:
konsole_ssh_2x3.sh
konsole_ssh_2x3.sh --dmesg
konsole_ssh_2x3.sh --new
konsole_ssh_2x3.sh --user root --prefix 192.168.0 --from 41
konsole_ssh_2x3.sh root@192.168.0.41 root@192.168.0.42 root@192.168.0.43 \
root@192.168.0.44 root@192.168.0.45 root@192.168.0.46
......@@ -46,6 +50,7 @@ ssh_user="root"
host_prefix="192.168.0"
start_octet=41
run_dmesg=0
prepare_new=0
declare -a targets=()
while [[ $# -gt 0 ]]; do
......@@ -66,6 +71,10 @@ while [[ $# -gt 0 ]]; do
run_dmesg=1
shift
;;
--new)
prepare_new=1
shift
;;
-h|--help)
usage
exit 0
......@@ -101,6 +110,27 @@ if [[ "${#targets[@]}" -ne 6 ]]; then
exit 1
fi
if [[ "$prepare_new" -eq 1 ]]; then
reset_script="${SCRIPT_DIR}/reset_camera_ssh_keys.sh"
if [[ ! -x "$reset_script" ]]; then
echo "ERROR: missing helper script: $reset_script" >&2
exit 1
fi
log "Preparing SSH trust for selected targets (--new)"
for target in "${targets[@]}"; do
if [[ "$target" == *@* ]]; then
prep_user="${target%@*}"
prep_host="${target#*@}"
else
prep_user="$ssh_user"
prep_host="$target"
fi
log " reset+copy: ${prep_user}@${prep_host}"
"$reset_script" --user "$prep_user" --hosts "$prep_host"
done
fi
layout_file="$(mktemp --tmpdir konsole_2x3_layout_XXXXXX.json)"
trap 'rm -f "$layout_file"' EXIT
......
#!/usr/bin/env bash
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "${SCRIPT_DIR}/.." && pwd)"
WORKSPACE="${REPO_ROOT}/workspace"
BUILD_CONF_DIR="${WORKSPACE}/poky/build/conf"
DEPLOY_DIR="${WORKSPACE}/poky/build/tmp/deploy/images/elphel393"
CATALOG_DEFAULT="${BUILD_CONF_DIR}/mmc_profiles.tsv"
OUT_DIR_DEFAULT="${DEPLOY_DIR}/lwir16"
TARGET_DEFAULT="core-image-elphel393"
usage() {
cat <<'EOF'
Usage:
lwir16_build_bundle.sh [options]
Description:
Build and bundle LWIR16 image sets by iterating over lwir16_* profiles from
mmc_profiles.tsv. For each profile:
1) copy profile local.conf -> build/conf/local.conf
2) run bitbake (unless --skip-build)
3) copy deploy/mmc -> deploy/lwir16/mmc_<host_octet>
4) copy deploy/nand -> deploy/lwir16/nand_<host_octet> (if exists)
Options:
--catalog PATH Profile catalog TSV (default: build/conf/mmc_profiles.tsv)
--out-dir PATH Output directory (default: deploy/images/elphel393/lwir16)
--target TARGET BitBake target (default: core-image-elphel393)
--profiles "N1 N2" Space-separated profile names to process
--skip-build Do not run bitbake, only bundle from existing deploy files
--no-nand Do not copy nand artifacts
--clean-out Remove --out-dir before processing
--dry-run Print commands without executing
-h, --help Show this help
Examples:
lwir16_build_bundle.sh
lwir16_build_bundle.sh --profiles "lwir16_boson640_41 lwir16_parallel_45"
lwir16_build_bundle.sh --skip-build --clean-out
EOF
}
log() {
printf '[%s] %s\n' "$(date '+%F %T')" "$*"
}
die() {
echo "ERROR: $*" >&2
exit 1
}
run_cmd() {
if [[ "${DRY_RUN}" -eq 1 ]]; then
printf 'DRY-RUN:'
printf ' %q' "$@"
printf '\n'
return 0
fi
"$@"
}
write_sums_if_possible() {
local dir="$1"
if [[ ! -d "$dir" ]]; then
return 0
fi
(
cd "$dir"
if [[ -f boot.bin && -f u-boot-dtb.img && -f devicetree.dtb && -f uImage ]]; then
if [[ -f rootfs.tar.gz ]]; then
sha256sum boot.bin u-boot-dtb.img devicetree.dtb uImage rootfs.tar.gz >SHA256SUMS
elif [[ -f rootfs.ubi && -f rootfs.ubifs ]]; then
sha256sum boot.bin u-boot-dtb.img devicetree.dtb uImage rootfs.ubi rootfs.ubifs >SHA256SUMS
fi
fi
)
}
extract_remote_ip() {
local conf_file="$1"
awk -F'"' '/^[[:space:]]*REMOTE_IP[[:space:]]*\?=/{print $2; exit}' "$conf_file"
}
extract_host_octet() {
local conf_file="$1"
local ip
ip="$(extract_remote_ip "$conf_file")"
if [[ "$ip" =~ ^([0-9]{1,3}\.){3}([0-9]{1,3})$ ]]; then
echo "${BASH_REMATCH[2]}"
return 0
fi
return 1
}
CATALOG="${CATALOG_DEFAULT}"
OUT_DIR="${OUT_DIR_DEFAULT}"
TARGET="${TARGET_DEFAULT}"
PROFILES=""
SKIP_BUILD=0
NO_NAND=0
CLEAN_OUT=0
DRY_RUN=0
while [[ $# -gt 0 ]]; do
case "$1" in
--catalog)
CATALOG="$2"
shift 2
;;
--out-dir)
OUT_DIR="$2"
shift 2
;;
--target)
TARGET="$2"
shift 2
;;
--profiles)
PROFILES="$2"
shift 2
;;
--skip-build)
SKIP_BUILD=1
shift
;;
--no-nand)
NO_NAND=1
shift
;;
--clean-out)
CLEAN_OUT=1
shift
;;
--dry-run)
DRY_RUN=1
shift
;;
-h|--help)
usage
exit 0
;;
*)
die "unknown option: $1"
;;
esac
done
CATALOG="${CATALOG/#\~/${HOME}}"
OUT_DIR="${OUT_DIR/#\~/${HOME}}"
[[ -f "${CATALOG}" ]] || die "catalog not found: ${CATALOG}"
[[ -f "${BUILD_CONF_DIR}/local.conf" ]] || die "missing ${BUILD_CONF_DIR}/local.conf"
[[ -d "${DEPLOY_DIR}/mmc" ]] || die "missing deploy mmc directory: ${DEPLOY_DIR}/mmc"
orig_local_conf="$(mktemp)"
if [[ "${DRY_RUN}" -eq 0 ]]; then
cp -f "${BUILD_CONF_DIR}/local.conf" "${orig_local_conf}"
fi
cleanup() {
if [[ "${DRY_RUN}" -eq 0 && -f "${orig_local_conf}" ]]; then
cp -f "${orig_local_conf}" "${BUILD_CONF_DIR}/local.conf" || true
fi
rm -f "${orig_local_conf}" || true
}
trap cleanup EXIT
if [[ "${CLEAN_OUT}" -eq 1 ]]; then
run_cmd rm -rf "${OUT_DIR}"
fi
run_cmd mkdir -p "${OUT_DIR}"
index_file="${OUT_DIR}/index.tsv"
run_cmd bash -lc "printf 'slot\\tprofile\\tremote_ip\\tlocal_conf\\tbuilt_at\\tmmc_dir\\tnand_dir\\tnotes\\n' > '$index_file'"
declare -a selected_profiles=()
if [[ -n "${PROFILES}" ]]; then
# shellcheck disable=SC2206
selected_profiles=(${PROFILES})
else
mapfile -t selected_profiles < <(awk -F'\t' 'NR>1 && $1 ~ /^lwir16_/ {print $1}' "${CATALOG}")
fi
[[ "${#selected_profiles[@]}" -gt 0 ]] || die "no profiles selected"
total_profiles="${#selected_profiles[@]}"
idx=0
declare -A used_slots=()
ok=0
fail=0
for profile in "${selected_profiles[@]}"; do
idx=$((idx + 1))
line="$(awk -F'\t' -v n="${profile}" 'NR>1 && $1==n {print; exit}' "${CATALOG}")"
if [[ -z "${line}" ]]; then
log "[${idx}/${total_profiles}] Profile not found in catalog: ${profile}"
fail=$((fail + 1))
continue
fi
IFS=$'\t' read -r p_name p_local_conf p_mmc_dir p_created p_notes <<<"${line}"
p_local_conf="${p_local_conf/#\~/${HOME}}"
[[ -f "${p_local_conf}" ]] || {
log "[${idx}/${total_profiles}] Missing local.conf for ${p_name}: ${p_local_conf}"
fail=$((fail + 1))
continue
}
remote_ip="$(extract_remote_ip "${p_local_conf}" || true)"
host_octet="$(extract_host_octet "${p_local_conf}" || true)"
if [[ -z "${host_octet}" ]]; then
host_octet="${p_name}"
fi
slot="mmc_${host_octet}"
if [[ -n "${used_slots[${slot}]:-}" ]]; then
slot="${slot}_${p_name}"
fi
used_slots["${slot}"]=1
log "[${idx}/${total_profiles}] Processing ${p_name} (REMOTE_IP=${remote_ip:-unknown}, slot=${slot})"
run_cmd cp -f "${p_local_conf}" "${BUILD_CONF_DIR}/local.conf"
if [[ "${SKIP_BUILD}" -eq 0 ]]; then
run_cmd "${SCRIPT_DIR}/run_docker.sh" bash -lc "
set -e
cd /work/elphel393/poky
set +u
. ./oe-init-build-env build
set -u
export DL_DIR=/cache/downloads
export SSTATE_DIR=/cache/sstate-cache
bitbake ${TARGET}
"
fi
mmc_slot_dir="${OUT_DIR}/${slot}"
run_cmd rm -rf "${mmc_slot_dir}"
run_cmd mkdir -p "${mmc_slot_dir}"
run_cmd rsync -a --delete "${DEPLOY_DIR}/mmc/." "${mmc_slot_dir}/"
run_cmd cp -f "${p_local_conf}" "${mmc_slot_dir}/local.conf"
run_cmd bash -lc "printf 'profile=%s\nremote_ip=%s\nbuilt_at=%s\ntarget=%s\nnotes=%s\n' \
'${p_name}' '${remote_ip:-}' '$(date -Is)' '${TARGET}' '${p_notes}' > '${mmc_slot_dir}/PROFILE.txt'"
if [[ "${DRY_RUN}" -eq 0 ]]; then
write_sums_if_possible "${mmc_slot_dir}"
fi
nand_slot_dir=""
if [[ "${NO_NAND}" -eq 0 && -d "${DEPLOY_DIR}/nand" ]]; then
nand_slot_dir="${OUT_DIR}/nand_${host_octet}"
run_cmd rm -rf "${nand_slot_dir}"
run_cmd mkdir -p "${nand_slot_dir}"
run_cmd rsync -a --delete "${DEPLOY_DIR}/nand/." "${nand_slot_dir}/"
run_cmd cp -f "${p_local_conf}" "${nand_slot_dir}/local.conf"
run_cmd bash -lc "printf 'profile=%s\nremote_ip=%s\nbuilt_at=%s\ntarget=%s\nnotes=%s\n' \
'${p_name}' '${remote_ip:-}' '$(date -Is)' '${TARGET}' '${p_notes}' > '${nand_slot_dir}/PROFILE.txt'"
if [[ "${DRY_RUN}" -eq 0 ]]; then
write_sums_if_possible "${nand_slot_dir}"
fi
fi
run_cmd bash -lc "printf '%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n' \
'${slot}' '${p_name}' '${remote_ip:-}' '${p_local_conf}' '$(date -Is)' \
'${mmc_slot_dir}' '${nand_slot_dir}' '${p_notes}' >> '${index_file}'"
log "[${idx}/${total_profiles}] Completed ${p_name}"
ok=$((ok + 1))
done
log "Done: ok=${ok} fail=${fail}"
if [[ "${fail}" -gt 0 ]]; then
exit 1
fi
#!/usr/bin/env bash
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "${SCRIPT_DIR}/.." && pwd)"
WORKSPACE="${REPO_ROOT}/workspace"
CATALOG_DEFAULT="${WORKSPACE}/poky/build/conf/mmc_profiles.tsv"
ETC_BASE_DEFAULT="${WORKSPACE}/bootable-images/lwir16/etc-overrides"
usage() {
cat <<'EOF'
Usage:
lwir16_ctl.sh <command> [options]
Commands:
profiles [args...] Pass-through to lwir16_profile_catalog.sh
build [args...] Pass-through to lwir16_build_bundle.sh
autocampars [args...] Pass-through to autocampars_sync.sh
diff List /etc differences vs each profile rootfs
save Save sparse /etc overrides (only changed/new files)
restore Restore sparse /etc overrides to camera hosts
verify Verify saved overrides against camera /etc
Options for diff/save/restore/verify:
--catalog PATH Catalog TSV (default: workspace/poky/build/conf/mmc_profiles.tsv)
--profiles "P1 P2 ..." Profiles to process (default: all names starting with lwir16_)
--base-dir DIR Override storage root (default: workspace/bootable-images/lwir16/etc-overrides)
--user USER SSH user (default: root)
--connect-timeout S SSH/SCP timeout seconds (default: 7)
--no-sync-time Do not sync camera UTC time from host before operations
--sync-time Sync camera UTC time from host (default)
--no-sync-fpga-time Skip FPGA time sync attempt
--sync-fpga-time Enable FPGA time sync attempt (default)
--sync-fpga-auto Use ccam.php _time semantics (set once)
--sync-fpga-force Use ccam.php _stime semantics (force update)
--sync-fpga-mode MODE MODE=auto|force (default: force)
--show-mod-diff For "diff": print content diffs for MOD files
--mod-diff-max-lines N Max lines per MOD diff block (default: 120)
--mod-diff-max-bytes N Max text file size for content diff (default: 262144)
--apply-deletes On restore, remove files listed in deleted.list
--dry-run Print commands without executing
-h, --help Show this help
Examples:
lwir16_ctl.sh profiles list
lwir16_ctl.sh build --skip-build --clean-out
lwir16_ctl.sh diff
lwir16_ctl.sh diff --show-mod-diff
lwir16_ctl.sh restore --no-sync-time
lwir16_ctl.sh restore --no-sync-fpga-time
lwir16_ctl.sh restore --sync-fpga-force
lwir16_ctl.sh save --profiles "lwir16_boson640_41 lwir16_parallel_45"
lwir16_ctl.sh restore --profiles "lwir16_parallel_45"
lwir16_ctl.sh verify --profiles "lwir16_parallel_45"
EOF
}
die() {
echo "ERROR: $*" >&2
exit 1
}
log() {
printf '[%s] %s\n' "$(date '+%F %T')" "$*"
}
run_cmd() {
if [[ "${DRY_RUN}" -eq 1 ]]; then
printf 'DRY-RUN:'
printf ' %q' "$@"
printf '\n'
return 0
fi
"$@"
}
ensure_catalog() {
local cat_file="$1"
[[ -f "$cat_file" ]] || die "catalog not found: $cat_file"
local first
first="$(head -n1 "$cat_file" || true)"
[[ "$first" == $'name\tlocal_conf\tmmc_dir\tcreated_at\tnotes' ]] || \
die "invalid catalog header in $cat_file"
}
profile_line_by_name() {
local cat_file="$1"
local name="$2"
awk -F'\t' -v n="$name" 'NR>1 && $1==n {print; exit}' "$cat_file"
}
extract_remote_ip() {
local conf_file="$1"
awk -F'"' '/^[[:space:]]*REMOTE_IP[[:space:]]*\?=/{print $2; exit}' "$conf_file"
}
extract_base_etc() {
local rootfs_tgz="$1"
local out_root="$2"
run_cmd mkdir -p "$out_root"
if [[ "${DRY_RUN}" -eq 1 ]]; then
printf 'DRY-RUN: extract /etc from %s -> %s\n' "$rootfs_tgz" "$out_root"
return 0
fi
if tar -xzf "$rootfs_tgz" -C "$out_root" etc >/dev/null 2>&1; then
:
elif tar -xzf "$rootfs_tgz" -C "$out_root" ./etc >/dev/null 2>&1; then
:
else
tar -xzf "$rootfs_tgz" -C "$out_root" >/dev/null 2>&1
fi
[[ -d "$out_root/etc" ]] || die "failed to extract /etc from $rootfs_tgz"
}
extract_remote_etc() {
local target="$1"
local out_root="$2"
run_cmd mkdir -p "$out_root"
if [[ "${DRY_RUN}" -eq 1 ]]; then
printf 'DRY-RUN: ssh -o BatchMode=yes -o ConnectTimeout=%s %s tar -C / -czf - etc | tar -xzf - -C %q\n' \
"$CONNECT_TIMEOUT" "$target" "$out_root"
return 0
fi
if ! ssh -o BatchMode=yes -o ConnectTimeout="${CONNECT_TIMEOUT}" "$target" \
"tar -C / -czf - etc" | tar -xzf - -C "$out_root"; then
return 1
fi
[[ -d "$out_root/etc" ]] || return 1
}
build_diff_report() {
local base_etc="$1"
local remote_etc="$2"
local report_tsv="$3"
local raw_file="${report_tsv}.raw"
local err_file="${report_tsv}.err"
if [[ "${DRY_RUN}" -eq 1 ]]; then
printf 'DRY-RUN: diff -qr --no-dereference %q %q > %q\n' "$base_etc" "$remote_etc" "$raw_file"
return 0
fi
diff -qr --no-dereference "$base_etc" "$remote_etc" >"$raw_file" 2>"$err_file" || true
awk -v base="${base_etc}/" -v rem="${remote_etc}/" '
/^(Files|Symbolic links) .* and .* differ$/ {
line=$0
sub(/^(Files|Symbolic links) /, "", line)
sub(/ and .*/, "", line)
rel=line
sub("^" base, "", rel)
print "MOD\t" rel
next
}
/^Only in / {
line=$0
sub(/^Only in /, "", line)
split(line, pair, ": ")
dir=pair[1]
relname=substr(line, length(dir) + 3)
if (index(dir, rem) == 1) {
rel=substr(dir, length(rem) + 1)
sub(/^\//, "", rel)
if (rel == "") print "ADD\t" relname
else print "ADD\t" rel "/" relname
} else if (index(dir, base) == 1) {
rel=substr(dir, length(base) + 1)
sub(/^\//, "", rel)
if (rel == "") print "DEL\t" relname
else print "DEL\t" rel "/" relname
}
next
}
' "$raw_file" | sort -u >"$report_tsv"
if [[ -s "$err_file" ]]; then
awk '{print "ERR\t" $0}' "$err_file" >>"$report_tsv"
fi
}
print_report_summary() {
local profile="$1"
local target="$2"
local report_tsv="$3"
if [[ "${DRY_RUN}" -eq 1 ]]; then
log "DRY-RUN report for ${profile} (${target})"
return 0
fi
local add_count mod_count del_count
add_count="$(awk -F'\t' '$1=="ADD"{c++} END{print c+0}' "$report_tsv")"
mod_count="$(awk -F'\t' '$1=="MOD"{c++} END{print c+0}' "$report_tsv")"
del_count="$(awk -F'\t' '$1=="DEL"{c++} END{print c+0}' "$report_tsv")"
local err_count
err_count="$(awk -F'\t' '$1=="ERR"{c++} END{print c+0}' "$report_tsv")"
log "${profile} (${target}) ADD=${add_count} MOD=${mod_count} DEL=${del_count} ERR=${err_count}"
if [[ -s "$report_tsv" ]]; then
awk -F'\t' '{printf " %s %s\n",$1,$2}' "$report_tsv"
else
echo " (no differences)"
fi
}
is_probably_text_file() {
local f="$1"
[[ -f "$f" ]] || return 1
if [[ ! -s "$f" ]]; then
return 0
fi
LC_ALL=C grep -Iq . "$f"
}
print_mod_content_diffs() {
local profile="$1"
local target="$2"
local base_etc="$3"
local remote_etc="$4"
local report_tsv="$5"
[[ "${SHOW_MOD_DIFF}" -eq 1 ]] || return 0
if [[ "${DRY_RUN}" -eq 1 ]]; then
log "DRY-RUN mod-content diff for ${profile} (${target})"
return 0
fi
mapfile -t mod_files < <(awk -F'\t' '$1=="MOD"{print $2}' "$report_tsv")
if [[ "${#mod_files[@]}" -eq 0 ]]; then
return 0
fi
log "${profile} (${target}) MOD content details:"
local rel bf rf bsz rsz
for rel in "${mod_files[@]}"; do
bf="${base_etc}/${rel}"
rf="${remote_etc}/${rel}"
echo " --- ${rel}"
if [[ -L "$bf" || -L "$rf" ]]; then
echo " SYMLINK base: $(readlink "$bf" 2>/dev/null || echo '<missing>')"
echo " SYMLINK remote: $(readlink "$rf" 2>/dev/null || echo '<missing>')"
continue
fi
if [[ ! -e "$bf" || ! -e "$rf" ]]; then
echo " One side missing (base_exists=$([[ -e "$bf" ]] && echo yes || echo no), remote_exists=$([[ -e "$rf" ]] && echo yes || echo no))"
continue
fi
if [[ ! -f "$bf" || ! -f "$rf" ]]; then
echo " Non-regular file type (base=$(stat -c %F "$bf" 2>/dev/null || echo unknown), remote=$(stat -c %F "$rf" 2>/dev/null || echo unknown))"
continue
fi
bsz="$(stat -c '%s' "$bf" 2>/dev/null || echo 0)"
rsz="$(stat -c '%s' "$rf" 2>/dev/null || echo 0)"
if (( bsz > MOD_DIFF_MAX_BYTES || rsz > MOD_DIFF_MAX_BYTES )); then
echo " Large file (base=${bsz}B remote=${rsz}B), showing hashes only:"
echo " base: $(sha256sum "$bf" | awk '{print $1}')"
echo " remote: $(sha256sum "$rf" | awk '{print $1}')"
continue
fi
if ! is_probably_text_file "$bf" || ! is_probably_text_file "$rf"; then
echo " Binary/non-text file, showing hashes only:"
echo " base: $(sha256sum "$bf" | awk '{print $1}')"
echo " remote: $(sha256sum "$rf" | awk '{print $1}')"
continue
fi
local dtmp lines
dtmp="$(mktemp)"
diff -u --label "base/${rel}" --label "remote/${rel}" "$bf" "$rf" >"$dtmp" || true
lines="$(wc -l <"$dtmp" | tr -d ' ')"
sed -n "1,${MOD_DIFF_MAX_LINES}p" "$dtmp"
if (( lines > MOD_DIFF_MAX_LINES )); then
echo " ... diff truncated at ${MOD_DIFF_MAX_LINES} lines (total ${lines})"
fi
rm -f "$dtmp"
done
}
copy_reported_overrides() {
local report_tsv="$1"
local remote_etc="$2"
local out_etc="$3"
if [[ "${DRY_RUN}" -eq 1 ]]; then
printf 'DRY-RUN: copy ADD/MOD paths from %s to %s (excluding timestamp/version)\n' \
"$report_tsv" "$out_etc"
return 0
fi
local copied=0
local missing=0
local rel src dst
while IFS= read -r rel; do
[[ -n "$rel" ]] || continue
src="${remote_etc}/${rel}"
dst="${out_etc}/${rel}"
if [[ ! -e "$src" && ! -L "$src" ]]; then
log "Warning: changed path missing in remote snapshot: /etc/${rel}"
missing=$((missing + 1))
continue
fi
mkdir -p "$(dirname "$dst")"
cp -a "$src" "$dst"
copied=$((copied + 1))
done < <(
awk -F'\t' '
($1=="ADD" || $1=="MOD") && $2!="timestamp" && $2!="version" {print $2}
' "$report_tsv" | sort -u
)
log "Saved override paths copied: ${copied}"
if [[ "$missing" -gt 0 ]]; then
log "Saved override paths missing: ${missing}"
fi
}
verify_saved_overrides() {
local profile="$1"
local target="$2"
local saved_root="$3"
shift 3
local dirs=("$@")
if [[ "${DRY_RUN}" -eq 1 ]]; then
printf 'DRY-RUN: verify saved overrides for %s against %s\n' "$profile" "$target"
return 0
fi
local ok=0
local missing=0
local mismatch=0
local rel local_path remote_abs
local local_hash remote_hash local_link remote_link
while IFS= read -r rel; do
[[ -n "$rel" ]] || continue
local_path="${saved_root}/${rel}"
remote_abs="/${rel}"
if [[ -L "$local_path" ]]; then
if ! ssh -o BatchMode=yes -o ConnectTimeout="${CONNECT_TIMEOUT}" "$target" \
"test -L $(sq "$remote_abs")" >/dev/null 2>&1; then
echo " MISSING-LINK ${rel}"
missing=$((missing + 1))
continue
fi
local_link="$(readlink "$local_path" 2>/dev/null || true)"
remote_link="$(ssh -o BatchMode=yes -o ConnectTimeout="${CONNECT_TIMEOUT}" "$target" \
"readlink $(sq "$remote_abs")" 2>/dev/null || true)"
if [[ "$local_link" == "$remote_link" ]]; then
ok=$((ok + 1))
else
echo " MISMATCH-LINK ${rel}: local='${local_link}' remote='${remote_link}'"
mismatch=$((mismatch + 1))
fi
continue
fi
if [[ ! -f "$local_path" ]]; then
continue
fi
if ! ssh -o BatchMode=yes -o ConnectTimeout="${CONNECT_TIMEOUT}" "$target" \
"test -f $(sq "$remote_abs")" >/dev/null 2>&1; then
echo " MISSING-FILE ${rel}"
missing=$((missing + 1))
continue
fi
local_hash="$(sha256sum "$local_path" | awk '{print $1}')"
remote_hash="$(ssh -o BatchMode=yes -o ConnectTimeout="${CONNECT_TIMEOUT}" "$target" \
"if command -v sha256sum >/dev/null 2>&1; then sha256sum $(sq "$remote_abs"); elif command -v busybox >/dev/null 2>&1; then busybox sha256sum $(sq "$remote_abs"); else exit 127; fi" \
2>/dev/null | awk '{print $1}' | head -n1)"
if [[ "$local_hash" == "$remote_hash" ]]; then
ok=$((ok + 1))
else
echo " MISMATCH-FILE ${rel}"
mismatch=$((mismatch + 1))
fi
done < <(
cd "$saved_root"
for d in "${dirs[@]}"; do
[[ -d "$d" ]] || continue
find "$d" \( -type f -o -type l \) -print
done | sed 's#^\./##' | sort -u
)
log "${profile} (${target}) VERIFY OK=${ok} MISSING=${missing} MISMATCH=${mismatch}"
if [[ $((missing + mismatch)) -gt 0 ]]; then
return 1
fi
return 0
}
profile_override_dirs() {
local profile_root="$1"
find "$profile_root" -mindepth 1 -maxdepth 1 -type d -printf '%f\n' | sort
}
sync_remote_utc_time() {
local target="$1"
local stamp="$2"
local stamp_ms="$3"
local fpga_param
if [[ "${SYNC_FPGA_MODE}" == "force" ]]; then
fpga_param="_stime"
else
fpga_param="_time"
fi
local cmd
cmd="set -e;
fpga_sync='skip';
if [ '${SYNC_FPGA_TIME}' = '1' ]; then
if command -v wget >/dev/null 2>&1; then
wget -qO /dev/null 'http://127.0.0.1/ccam.php?${fpga_param}=${stamp_ms}' && fpga_sync='ok' || \
wget -qO /dev/null 'http://localhost/ccam.php?${fpga_param}=${stamp_ms}' && fpga_sync='ok' || true
elif command -v curl >/dev/null 2>&1; then
curl -fsS 'http://127.0.0.1/ccam.php?${fpga_param}=${stamp_ms}' >/dev/null && fpga_sync='ok' || \
curl -fsS 'http://localhost/ccam.php?${fpga_param}=${stamp_ms}' >/dev/null && fpga_sync='ok' || true
fi
if [ "\${fpga_sync}" != 'ok' ] && command -v php >/dev/null 2>&1; then
if [ '${SYNC_FPGA_MODE}' = 'auto' ]; then
php -r \"if(function_exists('elphel_get_fpga_time')&&function_exists('elphel_set_fpga_time')){if(elphel_get_fpga_time()<=100000000){elphel_set_fpga_time(${stamp_ms}/1000.0);} exit(0);} exit(1);\" \
>/dev/null 2>&1 && fpga_sync='ok' || true
else
php -r \"if(function_exists('elphel_set_fpga_time')){elphel_set_fpga_time(${stamp_ms}/1000.0); exit(0);} exit(1);\" \
>/dev/null 2>&1 && fpga_sync='ok' || true
fi
fi
fi
if date -u -s '${stamp}' >/dev/null 2>&1; then
:
elif command -v busybox >/dev/null 2>&1 && busybox date -u -s '${stamp}' >/dev/null 2>&1; then
:
else
exit 1
fi
if command -v hwclock >/dev/null 2>&1; then
hwclock --utc --systohc >/dev/null 2>&1 || hwclock -u -w >/dev/null 2>&1 || hwclock --systohc >/dev/null 2>&1 || true
elif [ -x /etc/init.d/hwclock.sh ]; then
/etc/init.d/hwclock.sh stop >/dev/null 2>&1 || true
fi
echo FPGA_SYNC=\${fpga_sync}"
if [[ "${DRY_RUN}" -eq 1 ]]; then
run_cmd ssh -o BatchMode=yes -o ConnectTimeout="${CONNECT_TIMEOUT}" "$target" "$cmd"
log "Synced UTC(system) on ${target} to ${stamp}; attempted RTC update (FPGA mode: ${SYNC_FPGA_MODE}, when enabled)"
return 0
fi
local out
if out="$(ssh -o BatchMode=yes -o ConnectTimeout="${CONNECT_TIMEOUT}" "$target" "$cmd" 2>&1)"; then
if echo "$out" | grep -q 'FPGA_SYNC=ok'; then
log "Synced UTC(system) and FPGA time on ${target} to ${stamp}; attempted RTC update"
else
log "Synced UTC(system) on ${target} to ${stamp}; attempted RTC update (FPGA sync skipped/failed)"
fi
return 0
fi
log "Warning: failed to sync UTC on ${target}, continuing"
return 1
}
sync_remote_filesystem() {
local target="$1"
local cmd="sync; sync; sync"
if [[ "${DRY_RUN}" -eq 1 ]]; then
run_cmd ssh -o BatchMode=yes -o ConnectTimeout="${CONNECT_TIMEOUT}" "$target" "$cmd"
log "Requested filesystem sync on ${target}"
return 0
fi
if ssh -o BatchMode=yes -o ConnectTimeout="${CONNECT_TIMEOUT}" "$target" "$cmd" >/dev/null 2>&1; then
log "Flushed filesystem buffers on ${target}"
return 0
fi
log "Warning: failed to flush filesystem buffers on ${target}"
return 1
}
sq() {
local s="$1"
s="${s//\'/\'\\\'\'}"
printf "'%s'" "$s"
}
process_profiles() {
local action="$1"
local profile
local ok=0
local fail=0
local sync_stamp=""
local sync_stamp_ms=""
declare -A synced_hosts=()
if [[ "${SYNC_TIME}" -eq 1 ]]; then
sync_stamp="$(date -u '+%Y-%m-%d %H:%M:%S')"
sync_stamp_ms="$(date -u '+%s%3N' 2>/dev/null || printf '%s000' "$(date -u '+%s')")"
log "UTC sync enabled, target time ${sync_stamp}"
fi
for profile in "${SELECTED_PROFILES[@]}"; do
local line p_name p_local_conf p_mmc_dir p_created p_notes
line="$(profile_line_by_name "$CATALOG" "$profile" || true)"
if [[ -z "$line" ]]; then
log "Profile not found in catalog: $profile"
fail=$((fail + 1))
continue
fi
IFS=$'\t' read -r p_name p_local_conf p_mmc_dir p_created p_notes <<<"$line"
p_local_conf="${p_local_conf/#\~/${HOME}}"
p_mmc_dir="${p_mmc_dir/#\~/${HOME}}"
[[ -f "$p_local_conf" ]] || { log "Missing local.conf for $p_name: $p_local_conf"; fail=$((fail + 1)); continue; }
local remote_ip target
remote_ip="$(extract_remote_ip "$p_local_conf" || true)"
[[ -n "$remote_ip" ]] || { log "REMOTE_IP not found in $p_local_conf"; fail=$((fail + 1)); continue; }
target="${SSH_USER}@${remote_ip}"
if [[ "${SYNC_TIME}" -eq 1 && -z "${synced_hosts[$target]:-}" ]]; then
sync_remote_utc_time "$target" "$sync_stamp" "$sync_stamp_ms" || true
synced_hosts["$target"]=1
fi
if [[ "$action" == "verify" ]]; then
local src_root
src_root="${BASE_DIR}/${p_name}"
if [[ ! -d "${src_root}" ]]; then
log "Missing saved overrides: ${src_root}"
fail=$((fail + 1))
continue
fi
local -a override_dirs=()
mapfile -t override_dirs < <(profile_override_dirs "$src_root")
if [[ "${#override_dirs[@]}" -eq 0 ]]; then
log "No override directories under ${src_root} (expected e.g. etc, usr)"
fail=$((fail + 1))
continue
fi
log "Processing ${p_name} (${target})"
if verify_saved_overrides "$p_name" "$target" "$src_root" "${override_dirs[@]}"; then
ok=$((ok + 1))
else
fail=$((fail + 1))
fi
continue
fi
if [[ "$action" == "restore" ]]; then
local src_root
src_root="${BASE_DIR}/${p_name}"
if [[ ! -d "${src_root}" ]]; then
log "Missing saved overrides: ${src_root}"
fail=$((fail + 1))
continue
fi
local -a override_dirs=()
mapfile -t override_dirs < <(profile_override_dirs "$src_root")
if [[ "${#override_dirs[@]}" -eq 0 ]]; then
log "No override directories under ${src_root} (expected e.g. etc, usr)"
fail=$((fail + 1))
continue
fi
log "Processing ${p_name} (${target})"
local d
for d in "${override_dirs[@]}"; do
if ! run_cmd ssh -o BatchMode=yes -o ConnectTimeout="${CONNECT_TIMEOUT}" \
"${target}" "mkdir -p $(sq "/$d")"; then
log "Failed to prepare ${target}:/${d}"
fail=$((fail + 1))
continue 2
fi
if ! run_cmd scp -O -r -p -o BatchMode=yes -o ConnectTimeout="${CONNECT_TIMEOUT}" \
"${src_root}/${d}" "${target}:/"; then
log "Restore copy failed for ${p_name} directory /${d}"
fail=$((fail + 1))
continue 2
fi
done
if [[ "${APPLY_DELETES}" -eq 1 && -s "${src_root}/deleted.list" ]]; then
while IFS= read -r rel; do
[[ -n "$rel" ]] || continue
run_cmd ssh -o BatchMode=yes -o ConnectTimeout="${CONNECT_TIMEOUT}" "$target" \
"rm -rf -- $(sq "/etc/${rel}")"
done <"${src_root}/deleted.list"
fi
if ! sync_remote_filesystem "$target"; then
log "Post-restore sync failed for ${p_name} (${target})"
fail=$((fail + 1))
continue
fi
log "Restored overrides from ${src_root} to ${target}:/ (${override_dirs[*]})"
ok=$((ok + 1))
continue
fi
local rootfs_tgz="${p_mmc_dir}/rootfs.tar.gz"
[[ -f "$rootfs_tgz" ]] || { log "Missing rootfs.tar.gz for $p_name: $rootfs_tgz"; fail=$((fail + 1)); continue; }
local tmpdir base_root remote_root report_tsv
tmpdir="$(mktemp -d)"
base_root="${tmpdir}/base"
remote_root="${tmpdir}/remote"
report_tsv="${tmpdir}/diff.tsv"
trap 'rm -rf "$tmpdir" || true' RETURN
log "Processing ${p_name} (${target})"
extract_base_etc "$rootfs_tgz" "$base_root"
if ! extract_remote_etc "$target" "$remote_root"; then
log "Failed to read remote /etc from ${target}"
rm -rf "$tmpdir" || true
trap - RETURN
fail=$((fail + 1))
continue
fi
build_diff_report "${base_root}/etc" "${remote_root}/etc" "$report_tsv"
if [[ "$action" == "diff" ]]; then
print_report_summary "$p_name" "$target" "$report_tsv"
print_mod_content_diffs "$p_name" "$target" "${base_root}/etc" "${remote_root}/etc" "$report_tsv"
rm -rf "$tmpdir" || true
trap - RETURN
ok=$((ok + 1))
continue
fi
if [[ "$action" == "save" ]]; then
local out_dir
out_dir="${BASE_DIR}/${p_name}"
run_cmd rm -rf "$out_dir"
run_cmd mkdir -p "$out_dir/etc"
# Save only ADD/MOD paths from diff report.
# This keeps output aligned with reported differences and avoids mtime-only noise.
copy_reported_overrides "$report_tsv" "${remote_root}/etc" "${out_dir}/etc"
if [[ "${DRY_RUN}" -eq 0 ]]; then
cp -f "$p_local_conf" "${out_dir}/local.conf"
cp -f "$report_tsv" "${out_dir}/diff.tsv"
awk -F'\t' '$1=="ADD" && $2!="timestamp" && $2!="version"{print $2}' "$report_tsv" >"${out_dir}/added.list"
awk -F'\t' '$1=="MOD" && $2!="timestamp" && $2!="version"{print $2}' "$report_tsv" >"${out_dir}/modified.list"
awk -F'\t' '$1=="DEL" && $2!="timestamp" && $2!="version"{print $2}' "$report_tsv" >"${out_dir}/deleted.list"
awk -F'\t' '$1=="ERR"{print $2}' "$report_tsv" >"${out_dir}/errors.list"
awk -F'\t' '($1=="ADD" || $1=="MOD" || $1=="DEL") && ($2=="timestamp" || $2=="version"){print $1 "\t" $2}' \
"$report_tsv" >"${out_dir}/ignored.list"
cat >"${out_dir}/META.txt" <<EOF
profile=${p_name}
remote_ip=${remote_ip}
saved_at=$(date -Is)
local_conf=${p_local_conf}
rootfs=${rootfs_tgz}
EOF
fi
print_report_summary "$p_name" "$target" "$report_tsv"
if [[ "${DRY_RUN}" -eq 0 ]]; then
local ignored_file
ignored_file="$(mktemp)"
awk -F'\t' '($1=="ADD" || $1=="MOD" || $1=="DEL") && ($2=="timestamp" || $2=="version"){print}' \
"$report_tsv" >"$ignored_file"
if [[ -s "$ignored_file" ]]; then
log "Note: ignored during save (kept from flashed image):"
awk -F'\t' '{printf " IGNORED %s %s\n",$1,$2}' "$ignored_file"
fi
rm -f "$ignored_file"
fi
log "Saved sparse overrides to ${out_dir}"
rm -rf "$tmpdir" || true
trap - RETURN
ok=$((ok + 1))
continue
fi
die "internal error: unexpected action '${action}'"
done
log "Summary: action=${action} ok=${ok} fail=${fail}"
if [[ "$fail" -gt 0 ]]; then
return 1
fi
return 0
}
command_name="${1:-}"
if [[ -z "$command_name" || "$command_name" == "-h" || "$command_name" == "--help" ]]; then
usage
exit 0
fi
shift
case "$command_name" in
profiles)
exec "${SCRIPT_DIR}/lwir16_profile_catalog.sh" "$@"
;;
build)
exec "${SCRIPT_DIR}/lwir16_build_bundle.sh" "$@"
;;
autocampars)
exec "${SCRIPT_DIR}/autocampars_sync.sh" "$@"
;;
diff|save|restore|verify)
;;
*)
die "unknown command: ${command_name}"
;;
esac
CATALOG="$CATALOG_DEFAULT"
PROFILES_STR=""
BASE_DIR="$ETC_BASE_DEFAULT"
SSH_USER="root"
CONNECT_TIMEOUT=7
APPLY_DELETES=0
SYNC_TIME=1
SYNC_FPGA_TIME=1
SYNC_FPGA_MODE="force"
SHOW_MOD_DIFF=0
MOD_DIFF_MAX_LINES=120
MOD_DIFF_MAX_BYTES=262144
DRY_RUN=0
while [[ $# -gt 0 ]]; do
case "$1" in
--catalog)
CATALOG="$2"
shift 2
;;
--profiles)
PROFILES_STR="$2"
shift 2
;;
--base-dir)
BASE_DIR="$2"
shift 2
;;
--user)
SSH_USER="$2"
shift 2
;;
--connect-timeout)
CONNECT_TIMEOUT="$2"
shift 2
;;
--sync-time)
SYNC_TIME=1
shift
;;
--no-sync-time)
SYNC_TIME=0
shift
;;
--sync-fpga-time)
SYNC_FPGA_TIME=1
shift
;;
--no-sync-fpga-time)
SYNC_FPGA_TIME=0
shift
;;
--sync-fpga-auto)
SYNC_FPGA_MODE="auto"
shift
;;
--sync-fpga-force)
SYNC_FPGA_MODE="force"
shift
;;
--sync-fpga-mode)
SYNC_FPGA_MODE="$2"
shift 2
;;
--show-mod-diff)
SHOW_MOD_DIFF=1
shift
;;
--mod-diff-max-lines)
MOD_DIFF_MAX_LINES="$2"
shift 2
;;
--mod-diff-max-bytes)
MOD_DIFF_MAX_BYTES="$2"
shift 2
;;
--apply-deletes)
APPLY_DELETES=1
shift
;;
--dry-run)
DRY_RUN=1
shift
;;
-h|--help)
usage
exit 0
;;
*)
die "unknown option: $1"
;;
esac
done
CATALOG="${CATALOG/#\~/${HOME}}"
BASE_DIR="${BASE_DIR/#\~/${HOME}}"
[[ "${MOD_DIFF_MAX_LINES}" =~ ^[0-9]+$ ]] || die "--mod-diff-max-lines must be integer"
[[ "${MOD_DIFF_MAX_BYTES}" =~ ^[0-9]+$ ]] || die "--mod-diff-max-bytes must be integer"
[[ "${SYNC_FPGA_MODE}" == "auto" || "${SYNC_FPGA_MODE}" == "force" ]] || die "--sync-fpga-mode must be auto or force"
ensure_catalog "$CATALOG"
declare -a SELECTED_PROFILES=()
if [[ -n "$PROFILES_STR" ]]; then
# shellcheck disable=SC2206
SELECTED_PROFILES=(${PROFILES_STR})
else
mapfile -t SELECTED_PROFILES < <(awk -F'\t' 'NR>1 && $1 ~ /^lwir16_/ {print $1}' "$CATALOG")
fi
[[ "${#SELECTED_PROFILES[@]}" -gt 0 ]] || die "no profiles selected"
if [[ "$command_name" == "save" || "$command_name" == "restore" ]]; then
run_cmd mkdir -p "$BASE_DIR"
fi
process_profiles "$command_name"
#!/usr/bin/env python3
"""
Build an overview of *.disk pointers on LWIR16 cameras.
Reads /mnt/sda1/*.disk from each selected host over SSH, parses LBA pointers,
prints a summary table, and optionally saves machine-readable reports.
"""
import argparse
import csv
import json
import subprocess
import sys
from dataclasses import asdict, dataclass
from datetime import datetime, timezone
from pathlib import Path
from typing import Dict, List, Optional, Tuple
@dataclass
class DiskRecord:
host: str
file_name: str
size_bytes: int
mtime_epoch: int
device: str
start_lba: int
current_lba: int
end_lba: int
@property
def used_blocks(self) -> int:
return max(0, self.current_lba - self.start_lba)
@property
def span_blocks(self) -> int:
return max(0, self.end_lba - self.start_lba)
@property
def used_gib(self) -> float:
return (self.used_blocks * 512.0) / (1024.0 ** 3)
@property
def span_gib(self) -> float:
return (self.span_blocks * 512.0) / (1024.0 ** 3)
@property
def fill_percent(self) -> float:
if self.span_blocks <= 0:
return 0.0
return 100.0 * (self.used_blocks / float(self.span_blocks))
@property
def mtime_iso_utc(self) -> str:
if self.mtime_epoch <= 0:
return ""
return datetime.fromtimestamp(self.mtime_epoch, tz=timezone.utc).strftime(
"%Y-%m-%d %H:%M:%S UTC"
)
@dataclass
class RecordView:
host: str
file_name: str
mtime_epoch: int
mtime_iso_utc: str
pointer_lba: int
pointer_gib: float
segment_blocks: Optional[int]
segment_gib: Optional[float]
next_marker: str
is_camogm: bool
@dataclass
class HostView:
host: str
start_lba: int
end_lba: int
ring_blocks: int
ring_gib: float
camogm_lba: Optional[int]
camogm_gib: Optional[float]
free_blocks: Optional[int]
free_gib: Optional[float]
free_to_marker: str
accounting_delta_blocks: Optional[int]
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(
description="Print/save overview of camera /mnt/sda1/*.disk pointers"
)
parser.add_argument("--user", default="root", help="SSH user (default: root)")
parser.add_argument(
"--prefix",
default="192.168.0",
help="First 3 octets for generated hosts (default: 192.168.0)",
)
parser.add_argument(
"--from", dest="start_octet", type=int, default=41, help="Start host number"
)
parser.add_argument(
"--count",
type=int,
default=5,
help="Number of hosts (default: 5 => .41..45)",
)
parser.add_argument(
"--hosts",
default="",
help='Space-separated host list, overrides --prefix/--from/--count',
)
parser.add_argument(
"--connect-timeout", type=int, default=7, help="SSH connect timeout in seconds"
)
parser.add_argument(
"--save-prefix",
default="",
help=(
"Output file prefix; default is <repo>/logs/"
"lwir16_disk_overview_<YYYYmmdd_HHMMSS>"
),
)
parser.add_argument(
"--no-save", action="store_true", help="Do not save CSV/JSON files"
)
parser.add_argument(
"--date",
default="",
help="Optional UTC date filter for records: YYYY-MM-DD",
)
parser.add_argument(
"--strict",
action="store_true",
help="Exit non-zero if any host query fails",
)
return parser.parse_args()
def make_hosts(args: argparse.Namespace) -> List[str]:
if args.hosts.strip():
return [h for h in args.hosts.split() if h]
return [f"{args.prefix}.{args.start_octet + i}" for i in range(args.count)]
def run_ssh(
user: str, host: str, connect_timeout: int
) -> Tuple[int, str, str]:
remote_script = r"""
set -eu
for f in /mnt/sda1/*.disk; do
[ -f "$f" ] || continue
bn="${f##*/}"
set -- $(stat -c "%s %Y" "$f" 2>/dev/null || busybox stat -c "%s %Y" "$f" 2>/dev/null || echo "0 0")
sz="${1:-0}"
mt="${2:-0}"
line=$(sed -n "2p" "$f" | tr '\t' ' ')
set -- $line
dev="${1:-}"
start="${2:-0}"
current="${3:-0}"
end="${4:-0}"
printf "REC\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n" "$bn" "$sz" "$mt" "$dev" "$start" "$current" "$end"
done
"""
cmd = [
"ssh",
"-o",
"BatchMode=yes",
"-o",
f"ConnectTimeout={connect_timeout}",
f"{user}@{host}",
remote_script,
]
proc = subprocess.run(cmd, capture_output=True, text=True)
return proc.returncode, proc.stdout, proc.stderr
def parse_host_output(host: str, stdout: str) -> List[DiskRecord]:
records: List[DiskRecord] = []
for raw in stdout.splitlines():
line = raw.strip()
if not line or not line.startswith("REC\t"):
continue
parts = line.split("\t")
if len(parts) != 8:
continue
_, file_name, sz_s, mt_s, dev, start_s, current_s, end_s = parts
try:
rec = DiskRecord(
host=host,
file_name=file_name,
size_bytes=int(sz_s),
mtime_epoch=int(mt_s),
device=dev,
start_lba=int(start_s),
current_lba=int(current_s),
end_lba=int(end_s),
)
except ValueError:
continue
records.append(rec)
return records
def blocks_to_gib(blocks: int) -> float:
return (blocks * 512.0) / (1024.0 ** 3)
def ring_distance_blocks(a: int, b: int, start_lba: int, end_lba: int) -> int:
span = max(0, end_lba - start_lba)
if span <= 0:
return 0
if b >= a:
return b - a
return (end_lba - a) + (b - start_lba)
def date_filter_ok(epoch: int, date_ymd: str) -> bool:
if not date_ymd:
return True
if epoch <= 0:
return False
rec_date = datetime.fromtimestamp(epoch, tz=timezone.utc).strftime("%Y-%m-%d")
return rec_date == date_ymd
def short_ts(epoch: int) -> str:
if epoch <= 0:
return "-"
return datetime.fromtimestamp(epoch, tz=timezone.utc).strftime("%m-%d %H:%M")
def analyze_records(
records: List[DiskRecord],
) -> Tuple[List[RecordView], Dict[str, HostView]]:
by_host: Dict[str, List[DiskRecord]] = {}
for rec in records:
by_host.setdefault(rec.host, []).append(rec)
all_views: List[RecordView] = []
host_views: Dict[str, HostView] = {}
for host, host_records in by_host.items():
if not host_records:
continue
camogm = next((r for r in host_records if r.file_name == "camogm.disk"), None)
geom = camogm if camogm is not None else host_records[0]
start_lba = geom.start_lba
end_lba = geom.end_lba
ring_blocks = max(0, end_lba - start_lba)
named = [r for r in host_records if r.file_name != "camogm.disk"]
named_sorted = sorted(named, key=lambda r: (r.current_lba, r.file_name))
segment_blocks_by_name: Dict[str, int] = {}
next_marker_by_name: Dict[str, str] = {}
free_blocks: Optional[int] = None
free_to_marker = "-"
if camogm is not None and named_sorted:
latest_idx = -1
for idx, rec in enumerate(named_sorted):
if rec.current_lba <= camogm.current_lba:
latest_idx = idx
if latest_idx < 0:
latest_idx = len(named_sorted) - 1
for idx, rec in enumerate(named_sorted):
if idx == latest_idx:
seg = ring_distance_blocks(
rec.current_lba, camogm.current_lba, start_lba, end_lba
)
nxt = "camogm.disk"
else:
nxt_rec = named_sorted[(idx + 1) % len(named_sorted)]
seg = ring_distance_blocks(
rec.current_lba, nxt_rec.current_lba, start_lba, end_lba
)
nxt = nxt_rec.file_name
segment_blocks_by_name[rec.file_name] = seg
next_marker_by_name[rec.file_name] = nxt
oldest_idx = (latest_idx + 1) % len(named_sorted)
oldest = named_sorted[oldest_idx]
free_blocks = ring_distance_blocks(
camogm.current_lba, oldest.current_lba, start_lba, end_lba
)
free_to_marker = oldest.file_name
elif camogm is not None and not named_sorted:
free_blocks = ring_blocks
free_to_marker = "-"
elif camogm is None and named_sorted:
for idx, rec in enumerate(named_sorted):
nxt_rec = named_sorted[(idx + 1) % len(named_sorted)]
seg = ring_distance_blocks(
rec.current_lba, nxt_rec.current_lba, start_lba, end_lba
)
segment_blocks_by_name[rec.file_name] = seg
next_marker_by_name[rec.file_name] = nxt_rec.file_name
accounting_delta_blocks: Optional[int] = None
if camogm is not None:
used_blocks_sum = sum(segment_blocks_by_name.values())
free = free_blocks if free_blocks is not None else 0
accounting_delta_blocks = ring_blocks - (used_blocks_sum + free)
for rec in sorted(host_records, key=lambda r: (r.current_lba, r.file_name)):
ptr_blocks = ring_distance_blocks(
start_lba, rec.current_lba, start_lba, end_lba
)
seg_blocks = segment_blocks_by_name.get(rec.file_name)
all_views.append(
RecordView(
host=host,
file_name=rec.file_name,
mtime_epoch=rec.mtime_epoch,
mtime_iso_utc=rec.mtime_iso_utc,
pointer_lba=rec.current_lba,
pointer_gib=blocks_to_gib(ptr_blocks),
segment_blocks=seg_blocks,
segment_gib=(
None if seg_blocks is None else blocks_to_gib(seg_blocks)
),
next_marker=next_marker_by_name.get(rec.file_name, ""),
is_camogm=(rec.file_name == "camogm.disk"),
)
)
host_views[host] = HostView(
host=host,
start_lba=start_lba,
end_lba=end_lba,
ring_blocks=ring_blocks,
ring_gib=blocks_to_gib(ring_blocks),
camogm_lba=None if camogm is None else camogm.current_lba,
camogm_gib=(
None
if camogm is None
else blocks_to_gib(
ring_distance_blocks(
start_lba, camogm.current_lba, start_lba, end_lba
)
)
),
free_blocks=free_blocks,
free_gib=(None if free_blocks is None else blocks_to_gib(free_blocks)),
free_to_marker=free_to_marker,
accounting_delta_blocks=accounting_delta_blocks,
)
return all_views, host_views
def print_host_summary(host_views: Dict[str, HostView], hosts: List[str]) -> None:
print("\nHost ring summary")
print(
f"{'Host':<15} {'Ring GiB':>9} {'camogm GiB':>10} {'Free GiB':>10} "
f"{'Free->':<24} {'Delta blocks':>12}"
)
print("-" * 92)
for host in hosts:
hv = host_views.get(host)
if hv is None:
print(f"{host:<15} {'-':>9} {'-':>10} {'-':>10} {'-':<24} {'-':>12}")
continue
cam_s = "-" if hv.camogm_gib is None else f"{hv.camogm_gib:.2f}"
free_s = "-" if hv.free_gib is None else f"{hv.free_gib:.2f}"
delta_s = (
"-"
if hv.accounting_delta_blocks is None
else str(hv.accounting_delta_blocks)
)
print(
f"{host:<15} {hv.ring_gib:9.2f} {cam_s:>10} {free_s:>10} "
f"{hv.free_to_marker:<24} {delta_s:>12}"
)
def print_per_host(views: List[RecordView], hosts: List[str], date_ymd: str) -> None:
print("\nPer-host recordings (.disk -> next pointer size)")
print(
f"{'Host':<15} {'Record':<26} {'UTC mtime':<20} "
f"{'Ptr GiB':>8} {'Rec GiB':>8} {'Next':<24}"
)
print("-" * 116)
for host in hosts:
host_views = [
v
for v in views
if v.host == host and (not v.is_camogm) and date_filter_ok(v.mtime_epoch, date_ymd)
]
host_views.sort(key=lambda v: (v.pointer_lba, v.file_name))
if not host_views:
print(f"{host:<15} {'(no matching records)':<26}")
continue
for v in host_views:
rec_s = "-" if v.segment_gib is None else f"{v.segment_gib:.2f}"
print(
f"{v.host:<15} {v.file_name:<26} {v.mtime_iso_utc:<20} "
f"{v.pointer_gib:8.2f} {rec_s:>8} {v.next_marker:<24}"
)
def print_combined(views: List[RecordView], hosts: List[str], date_ymd: str) -> None:
by_name: Dict[str, Dict[str, RecordView]] = {}
for v in views:
if v.is_camogm:
continue
if not date_filter_ok(v.mtime_epoch, date_ymd):
continue
by_name.setdefault(v.file_name, {})[v.host] = v
print("\nCombined by record name (size to next pointer)")
header_cells = ["Record", "Hosts", "UTC range"]
for host in hosts:
header_cells.append(host.rsplit(".", 1)[-1])
widths = [26, 8, 25] + [16] * len(hosts)
fmt = " ".join(f"{{:{w}}}" for w in widths)
print(fmt.format(*header_cells))
print("-" * (sum(widths) + len(widths) - 1))
def row_key(item: Tuple[str, Dict[str, RecordView]]) -> Tuple[int, int, str]:
name, host_map = item
ts_values = [r.mtime_epoch for r in host_map.values() if r.mtime_epoch > 0]
latest = max(ts_values) if ts_values else 0
return (-latest, -len(host_map), name)
for name, host_map in sorted(by_name.items(), key=row_key):
ts_values = [r.mtime_epoch for r in host_map.values() if r.mtime_epoch > 0]
if ts_values:
rng = (
datetime.fromtimestamp(min(ts_values), tz=timezone.utc).strftime(
"%Y-%m-%d %H:%M"
)
+ " .. "
+ datetime.fromtimestamp(max(ts_values), tz=timezone.utc).strftime(
"%Y-%m-%d %H:%M"
)
)
else:
rng = "-"
cells = [name, f"{len(host_map)}/{len(hosts)}", rng]
for host in hosts:
v = host_map.get(host)
if v is None or v.segment_gib is None:
cells.append("-")
else:
cells.append(f"{v.segment_gib:5.1f}G@{short_ts(v.mtime_epoch)}")
print(fmt.format(*cells))
def print_expected_pattern(views: List[RecordView], date_ymd: str) -> None:
"""
Expected: hosts .41..44 have similar sizes, .45 is smaller.
"""
by_name: Dict[str, Dict[str, RecordView]] = {}
for v in views:
if v.is_camogm or v.segment_gib is None:
continue
if not date_filter_ok(v.mtime_epoch, date_ymd):
continue
by_name.setdefault(v.file_name, {})[v.host] = v
print("\nPattern check (.41..44 similar, .45 smaller)")
print(
f"{'Record':<26} {'41-44 mean GiB':>14} {'41-44 spread%':>14} "
f"{'45 GiB':>10} {'45/mean':>10}"
)
print("-" * 80)
for name, host_map in sorted(by_name.items()):
values_41_44: List[float] = []
val_45: Optional[float] = None
for host, view in host_map.items():
tail = host.rsplit(".", 1)[-1]
if tail in {"41", "42", "43", "44"}:
values_41_44.append(view.segment_gib)
elif tail == "45":
val_45 = view.segment_gib
if values_41_44:
mean_41_44 = sum(values_41_44) / len(values_41_44)
max_v = max(values_41_44)
min_v = min(values_41_44)
spread = 0.0 if mean_41_44 == 0 else 100.0 * (max_v - min_v) / mean_41_44
else:
mean_41_44 = 0.0
spread = 0.0
if val_45 is not None and mean_41_44 > 0:
ratio_s = f"{(val_45 / mean_41_44):.3f}"
else:
ratio_s = "-"
mean_s = f"{mean_41_44:.2f}" if values_41_44 else "-"
spread_s = f"{spread:.2f}" if values_41_44 else "-"
v45_s = f"{val_45:.2f}" if val_45 is not None else "-"
print(f"{name:<26} {mean_s:>14} {spread_s:>14} {v45_s:>10} {ratio_s:>10}")
def default_save_prefix(script_path: Path) -> Path:
repo_root = script_path.parent.parent
logs_dir = repo_root / "logs"
logs_dir.mkdir(parents=True, exist_ok=True)
ts = datetime.now().strftime("%Y%m%d_%H%M%S")
return logs_dir / f"lwir16_disk_overview_{ts}"
def save_reports(
prefix: Path,
records: List[DiskRecord],
views: List[RecordView],
host_views: Dict[str, HostView],
hosts: List[str],
date_ymd: str,
) -> None:
csv_path = prefix.with_suffix(".csv")
json_path = prefix.with_suffix(".json")
combined_csv_path = prefix.parent / f"{prefix.name}_combined.csv"
host_summary_path = prefix.parent / f"{prefix.name}_hosts.csv"
with csv_path.open("w", newline="") as f:
writer = csv.writer(f)
writer.writerow(
[
"host",
"file_name",
"mtime_epoch",
"mtime_iso_utc",
"pointer_lba",
"pointer_gib",
"segment_blocks",
"segment_gib",
"next_marker",
"is_camogm",
]
)
for view in sorted(views, key=lambda v: (v.host, v.pointer_lba, v.file_name)):
writer.writerow(
[
view.host,
view.file_name,
view.mtime_epoch,
view.mtime_iso_utc,
view.pointer_lba,
f"{view.pointer_gib:.6f}",
"" if view.segment_blocks is None else view.segment_blocks,
"" if view.segment_gib is None else f"{view.segment_gib:.6f}",
view.next_marker,
int(view.is_camogm),
]
)
by_name: Dict[str, Dict[str, RecordView]] = {}
for view in views:
if view.is_camogm:
continue
if not date_filter_ok(view.mtime_epoch, date_ymd):
continue
by_name.setdefault(view.file_name, {})[view.host] = view
with combined_csv_path.open("w", newline="") as f:
writer = csv.writer(f)
writer.writerow(
["record", "hosts_present", "hosts_total", "mtime_min_utc", "mtime_max_utc"]
+ [f"{h}_record_gib" for h in hosts]
)
for name in sorted(by_name):
host_map = by_name[name]
ts_values = [r.mtime_epoch for r in host_map.values() if r.mtime_epoch > 0]
mmin = (
datetime.fromtimestamp(min(ts_values), tz=timezone.utc).strftime(
"%Y-%m-%d %H:%M:%S"
)
if ts_values
else ""
)
mmax = (
datetime.fromtimestamp(max(ts_values), tz=timezone.utc).strftime(
"%Y-%m-%d %H:%M:%S"
)
if ts_values
else ""
)
row = [name, len(host_map), len(hosts), mmin, mmax]
for host in hosts:
view = host_map.get(host)
row.append(
""
if (view is None or view.segment_gib is None)
else f"{view.segment_gib:.6f}"
)
writer.writerow(row)
with host_summary_path.open("w", newline="") as f:
writer = csv.writer(f)
writer.writerow(
[
"host",
"ring_gib",
"camogm_gib",
"free_gib",
"free_to_marker",
"accounting_delta_blocks",
]
)
for host in hosts:
hv = host_views.get(host)
if hv is None:
writer.writerow([host, "", "", "", "", ""])
continue
writer.writerow(
[
host,
f"{hv.ring_gib:.6f}",
"" if hv.camogm_gib is None else f"{hv.camogm_gib:.6f}",
"" if hv.free_gib is None else f"{hv.free_gib:.6f}",
hv.free_to_marker,
""
if hv.accounting_delta_blocks is None
else hv.accounting_delta_blocks,
]
)
payload = {
"generated_at": datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S UTC"),
"hosts": hosts,
"records": [asdict(rec) for rec in records],
"views": [asdict(v) for v in views],
"host_summary": {h: asdict(v) for h, v in host_views.items()},
}
with json_path.open("w") as f:
json.dump(payload, f, indent=2, sort_keys=True)
print("\nSaved:")
print(f" {csv_path}")
print(f" {combined_csv_path}")
print(f" {host_summary_path}")
print(f" {json_path}")
def main() -> int:
args = parse_args()
hosts = make_hosts(args)
if not hosts:
print("ERROR: empty host list", file=sys.stderr)
return 2
all_records: List[DiskRecord] = []
failed_hosts: List[str] = []
print("Querying hosts:")
for host in hosts:
print(f" {host}")
rc, stdout, stderr = run_ssh(args.user, host, args.connect_timeout)
if rc != 0:
failed_hosts.append(host)
err = stderr.strip().splitlines()
if err:
print(f" WARNING: {host}: {err[-1]}", file=sys.stderr)
else:
print(f" WARNING: {host}: ssh failed (rc={rc})", file=sys.stderr)
continue
host_records = parse_host_output(host, stdout)
all_records.extend(host_records)
if not all_records:
print("No records found.")
else:
views, host_views = analyze_records(all_records)
print_host_summary(host_views, hosts)
print_per_host(views, hosts, args.date)
print_combined(views, hosts, args.date)
print_expected_pattern(views, args.date)
if not args.no_save:
prefix = Path(args.save_prefix) if args.save_prefix else default_save_prefix(Path(__file__))
prefix = prefix.expanduser()
if prefix.parent:
prefix.parent.mkdir(parents=True, exist_ok=True)
views, host_views = analyze_records(all_records)
save_reports(prefix, all_records, views, host_views, hosts, args.date)
if failed_hosts:
print(
f"\nHost query failures: {', '.join(failed_hosts)}",
file=sys.stderr,
)
if args.strict:
return 1
return 0
if __name__ == "__main__":
sys.exit(main())
#!/usr/bin/env bash
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "${SCRIPT_DIR}/.." && pwd)"
WORKSPACE="${REPO_ROOT}/workspace"
BUILD_CONF_DIR="${WORKSPACE}/poky/build/conf"
DEFAULT_CATALOG="${BUILD_CONF_DIR}/mmc_profiles.tsv"
usage() {
cat <<'EOF'
Usage:
lwir16_profile_catalog.sh <command> [options]
Commands:
init Create catalog file if missing
list List catalog entries
show Show one profile
register Register existing local.conf + mmc directory
snapshot Copy current mmc directory to versions/, store local.conf, register
activate Copy selected profile local.conf to build/conf/local.conf
remove Remove profile entry from catalog
Common options:
--catalog PATH Catalog path (default: workspace/poky/build/conf/mmc_profiles.tsv)
-h, --help Show this help
Command options:
show/remove/activate:
--name NAME
register:
--name NAME
--local-conf PATH
--mmc-dir PATH
--notes TEXT
--replace
snapshot:
--name NAME
--local-conf PATH (default: workspace/poky/build/conf/local.conf)
--mmc-src PATH (default: workspace/bootable-images/mmc)
--dest-root PATH (default: workspace/bootable-images/versions/<year>)
--notes TEXT
--replace
Examples:
lwir16_profile_catalog.sh init
lwir16_profile_catalog.sh register --name lwir16_boson640_41 \
--local-conf /work/elphel393/poky/build/conf/versions_107/local_boson_41.conf \
--mmc-dir /home/elphel/git/imagej-elphel/attic/elphel393-docker/workspace/bootable-images/versions/2026/mmc_boson640_41-2026
lwir16_profile_catalog.sh list
lwir16_profile_catalog.sh activate --name lwir16_boson640_41
EOF
}
die() {
echo "ERROR: $*" >&2
exit 1
}
log() {
printf '[%s] %s\n' "$(date '+%F %T')" "$*"
}
normalize_path() {
local p="${1:-}"
p="${p/#\~/${HOME}}"
if [[ "$p" == /work/elphel393/* ]]; then
printf '%s\n' "${WORKSPACE}/${p#/work/elphel393/}"
else
printf '%s\n' "$p"
fi
}
abspath() {
local p
p="$(normalize_path "$1")"
if [[ "$p" != /* ]]; then
p="$(pwd)/$p"
fi
p="${p%/}"
p="${p//\/\//\/}"
printf '%s\n' "$p"
}
ensure_catalog() {
local cat_file="$1"
mkdir -p "$(dirname "$cat_file")"
if [[ ! -f "$cat_file" || ! -s "$cat_file" ]]; then
printf 'name\tlocal_conf\tmmc_dir\tcreated_at\tnotes\n' >"$cat_file"
return
fi
local first
first="$(head -n1 "$cat_file" || true)"
if [[ "$first" != $'name\tlocal_conf\tmmc_dir\tcreated_at\tnotes' ]]; then
die "catalog header mismatch in $cat_file"
fi
}
profile_line_by_name() {
local cat_file="$1"
local name="$2"
awk -F'\t' -v n="$name" 'NR>1 && $1==n {print; exit}' "$cat_file"
}
sanitize_notes() {
local n="${1:-}"
n="${n//$'\t'/ }"
n="${n//$'\n'/ }"
printf '%s\n' "$n"
}
upsert_profile() {
local cat_file="$1"
local name="$2"
local local_conf="$3"
local mmc_dir="$4"
local created_at="$5"
local notes="$6"
local tmp
tmp="$(mktemp)"
awk -F'\t' -v OFS='\t' \
-v n="$name" -v lc="$local_conf" -v md="$mmc_dir" -v ct="$created_at" -v no="$notes" '
NR==1 {print; next}
$1==n {print n,lc,md,ct,no; found=1; next}
{print}
END {if (!found) print n,lc,md,ct,no}
' "$cat_file" >"$tmp"
mv "$tmp" "$cat_file"
}
remove_profile() {
local cat_file="$1"
local name="$2"
local tmp
tmp="$(mktemp)"
awk -F'\t' -v OFS='\t' -v n="$name" '
NR==1 {print; next}
$1==n {removed=1; next}
{print}
END {if (!removed) exit 3}
' "$cat_file" >"$tmp" || {
local rc=$?
rm -f "$tmp"
if [[ "$rc" -eq 3 ]]; then
die "profile not found: $name"
fi
exit "$rc"
}
mv "$tmp" "$cat_file"
}
validate_local_conf() {
local lc="$1"
[[ -f "$lc" ]] || die "local.conf file not found: $lc"
}
validate_mmc_dir() {
local md="$1"
[[ -d "$md" ]] || die "mmc directory not found: $md"
local req
for req in boot.bin u-boot-dtb.img devicetree.dtb uImage rootfs.tar.gz; do
[[ -f "$md/$req" ]] || die "missing $req in $md"
done
}
write_sha256sums() {
local md="$1"
(
cd "$md"
sha256sum boot.bin u-boot-dtb.img devicetree.dtb uImage rootfs.tar.gz >SHA256SUMS
)
}
command_name="${1:-}"
if [[ -z "$command_name" || "$command_name" == "-h" || "$command_name" == "--help" ]]; then
usage
exit 0
fi
shift
catalog="$DEFAULT_CATALOG"
name=""
local_conf=""
mmc_dir=""
mmc_src="${WORKSPACE}/bootable-images/mmc"
dest_root="${WORKSPACE}/bootable-images/versions/$(date +%Y)"
notes=""
replace=0
while [[ $# -gt 0 ]]; do
case "$1" in
--catalog)
catalog="$2"
shift 2
;;
--name)
name="$2"
shift 2
;;
--local-conf)
local_conf="$2"
shift 2
;;
--mmc-dir)
mmc_dir="$2"
shift 2
;;
--mmc-src)
mmc_src="$2"
shift 2
;;
--dest-root)
dest_root="$2"
shift 2
;;
--notes)
notes="$2"
shift 2
;;
--replace)
replace=1
shift
;;
-h|--help)
usage
exit 0
;;
*)
die "unknown option: $1"
;;
esac
done
catalog="$(abspath "$catalog")"
notes="$(sanitize_notes "$notes")"
case "$command_name" in
init)
ensure_catalog "$catalog"
log "Catalog ready: $catalog"
;;
list)
ensure_catalog "$catalog"
if command -v column >/dev/null 2>&1; then
column -ts $'\t' "$catalog"
else
cat "$catalog"
fi
;;
show)
ensure_catalog "$catalog"
[[ -n "$name" ]] || die "--name is required"
line="$(profile_line_by_name "$catalog" "$name" || true)"
[[ -n "${line:-}" ]] || die "profile not found: $name"
IFS=$'\t' read -r p_name p_lc p_md p_ct p_no <<<"$line"
printf 'name: %s\n' "$p_name"
printf 'local_conf: %s\n' "$p_lc"
printf 'mmc_dir: %s\n' "$p_md"
printf 'created_at: %s\n' "$p_ct"
printf 'notes: %s\n' "$p_no"
;;
remove)
ensure_catalog "$catalog"
[[ -n "$name" ]] || die "--name is required"
remove_profile "$catalog" "$name"
log "Removed profile: $name"
;;
register)
ensure_catalog "$catalog"
[[ -n "$name" ]] || die "--name is required"
[[ -n "$local_conf" ]] || die "--local-conf is required"
[[ -n "$mmc_dir" ]] || die "--mmc-dir is required"
local_conf="$(abspath "$local_conf")"
mmc_dir="$(abspath "$mmc_dir")"
validate_local_conf "$local_conf"
validate_mmc_dir "$mmc_dir"
write_sha256sums "$mmc_dir"
existing="$(profile_line_by_name "$catalog" "$name" || true)"
if [[ -n "$existing" && "$replace" -ne 1 ]]; then
die "profile '$name' already exists (use --replace to overwrite)"
fi
created_at="$(date -Is)"
upsert_profile "$catalog" "$name" "$local_conf" "$mmc_dir" "$created_at" "$notes"
log "Registered profile: $name"
;;
snapshot)
ensure_catalog "$catalog"
[[ -n "$name" ]] || die "--name is required"
if [[ -z "$local_conf" ]]; then
local_conf="${BUILD_CONF_DIR}/local.conf"
fi
local_conf="$(abspath "$local_conf")"
mmc_src="$(abspath "$mmc_src")"
dest_root="$(abspath "$dest_root")"
validate_local_conf "$local_conf"
validate_mmc_dir "$mmc_src"
snapshot_dir="${dest_root}/${name}"
if [[ -e "$snapshot_dir" ]]; then
if [[ "$replace" -eq 1 ]]; then
rm -rf "$snapshot_dir"
else
die "snapshot destination exists: $snapshot_dir (use --replace)"
fi
fi
mkdir -p "$snapshot_dir"
cp -a "${mmc_src}/." "$snapshot_dir/"
cp -f "$local_conf" "$snapshot_dir/local.conf"
write_sha256sums "$snapshot_dir"
existing="$(profile_line_by_name "$catalog" "$name" || true)"
if [[ -n "$existing" && "$replace" -ne 1 ]]; then
die "profile '$name' already exists (use --replace)"
fi
created_at="$(date -Is)"
upsert_profile "$catalog" "$name" "$snapshot_dir/local.conf" "$snapshot_dir" "$created_at" "$notes"
log "Snapshot created: $snapshot_dir"
log "Registered profile: $name"
;;
activate)
ensure_catalog "$catalog"
[[ -n "$name" ]] || die "--name is required"
line="$(profile_line_by_name "$catalog" "$name" || true)"
[[ -n "${line:-}" ]] || die "profile not found: $name"
IFS=$'\t' read -r p_name p_lc p_md p_ct p_no <<<"$line"
validate_local_conf "$p_lc"
validate_mmc_dir "$p_md"
cp -f "$p_lc" "${BUILD_CONF_DIR}/local.conf"
log "Activated profile: $p_name"
log "local.conf <- $p_lc"
log "mmc dir $p_md"
;;
*)
die "unknown command: $command_name"
;;
esac
#!/usr/bin/env bash
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
exec "${SCRIPT_DIR}/lwir16_profile_catalog.sh" "$@"
#!/usr/bin/env bash
set -euo pipefail
usage() {
cat <<'EOF'
Usage:
reset_camera_ssh_keys.sh [host ...]
reset_camera_ssh_keys.sh [--user USER] [--password PASS] [--prefix A.B.C] [--from N] [--count N]
reset_camera_ssh_keys.sh [--user USER] [--password PASS] [--hosts "H1 H2 ..."]
Description:
1) Removes known-host entries for selected hosts from ~/.ssh/known_hosts
2) Runs ssh-copy-id for each host (default credentials: root/pass)
Defaults:
user = root
password = pass
prefix = 192.168.0
from = 41
count = 6 (hosts .41 .. .46)
Options:
--user USER SSH user (default: root)
--password PASS Password used by sshpass (default: pass)
--prefix A.B.C First 3 octets for generated hosts (default: 192.168.0)
--from N Start host number (default: 41)
--count N Number of hosts (default: 6)
--hosts "H1 H2 ..." Space-separated host list (IPs or hostnames)
--known-hosts PATH known_hosts file path (default: ~/.ssh/known_hosts)
--connect-timeout S SSH connect timeout seconds (default: 7)
--no-copy Only remove known hosts, do not run ssh-copy-id
-h, --help Show this help
EOF
}
log() {
printf '[%s] %s\n' "$(date '+%F %T')" "$*"
}
need_cmd() {
command -v "$1" >/dev/null 2>&1 || {
echo "ERROR: required command not found: $1" >&2
exit 1
}
}
need_cmd ssh-keygen
need_cmd ssh-copy-id
ssh_user="root"
ssh_pass="pass"
prefix="192.168.0"
start_octet=41
count=6
hosts_csv=""
known_hosts="${HOME}/.ssh/known_hosts"
connect_timeout=7
do_copy=1
declare -a explicit_hosts=()
while [[ $# -gt 0 ]]; do
case "$1" in
--user)
ssh_user="$2"
shift 2
;;
--password)
ssh_pass="$2"
shift 2
;;
--prefix)
prefix="$2"
shift 2
;;
--from)
start_octet="$2"
shift 2
;;
--count)
count="$2"
shift 2
;;
--hosts)
hosts_csv="$2"
shift 2
;;
--known-hosts)
known_hosts="$2"
shift 2
;;
--connect-timeout)
connect_timeout="$2"
shift 2
;;
--no-copy)
do_copy=0
shift
;;
-h|--help)
usage
exit 0
;;
--)
shift
while [[ $# -gt 0 ]]; do
explicit_hosts+=("$1")
shift
done
;;
-*)
echo "ERROR: unknown option: $1" >&2
usage
exit 1
;;
*)
explicit_hosts+=("$1")
shift
;;
esac
done
declare -a hosts=()
if [[ -n "$hosts_csv" ]]; then
# shellcheck disable=SC2206
hosts=($hosts_csv)
elif [[ "${#explicit_hosts[@]}" -gt 0 ]]; then
hosts=("${explicit_hosts[@]}")
else
for ((i = 0; i < count; i++)); do
hosts+=("${prefix}.$((start_octet + i))")
done
fi
if [[ "${#hosts[@]}" -eq 0 ]]; then
echo "ERROR: empty host list" >&2
exit 1
fi
mkdir -p "$(dirname "$known_hosts")"
touch "$known_hosts"
chmod 600 "$known_hosts"
have_sshpass=0
if command -v sshpass >/dev/null 2>&1; then
have_sshpass=1
fi
if [[ "$do_copy" -eq 1 ]] && [[ "$have_sshpass" -eq 0 ]]; then
log "sshpass is not installed, ssh-copy-id will prompt for password interactively."
fi
declare -a failed=()
declare -a ok=()
for host in "${hosts[@]}"; do
target="${ssh_user}@${host}"
log "Reset known_hosts entry for ${host}"
ssh-keygen -R "${host}" -f "${known_hosts}" >/dev/null 2>&1 || true
ssh-keygen -R "[${host}]:22" -f "${known_hosts}" >/dev/null 2>&1 || true
if [[ "$do_copy" -eq 0 ]]; then
ok+=("${host} (known_hosts only)")
continue
fi
log "Install SSH key to ${target}"
if [[ "$have_sshpass" -eq 1 ]]; then
if sshpass -p "${ssh_pass}" ssh-copy-id -f \
-o StrictHostKeyChecking=no \
-o UserKnownHostsFile="${known_hosts}" \
-o ConnectTimeout="${connect_timeout}" \
"${target}" >/dev/null 2>&1; then
ok+=("${host}")
else
failed+=("${host}")
fi
else
if ssh-copy-id -f \
-o StrictHostKeyChecking=no \
-o UserKnownHostsFile="${known_hosts}" \
-o ConnectTimeout="${connect_timeout}" \
"${target}"; then
ok+=("${host}")
else
failed+=("${host}")
fi
fi
done
log "Summary: ok=${#ok[@]} failed=${#failed[@]}"
if [[ "${#ok[@]}" -gt 0 ]]; then
printf ' OK: %s\n' "${ok[@]}"
fi
if [[ "${#failed[@]}" -gt 0 ]]; then
printf ' FAIL: %s\n' "${failed[@]}" >&2
exit 1
fi
#!/usr/bin/env bash
set -euo pipefail
usage() {
cat <<'EOF'
Usage:
syncall.sh [host ...]
syncall.sh [--user USER] [--prefix A.B.C] [--from N] [--count N]
syncall.sh [--user USER] [--hosts "H1 H2 ..."]
Description:
Run "sync; sync; sync" on selected camera hosts over SSH.
Use this before power-cycling cameras after remote writes.
Defaults:
user = root
prefix = 192.168.0
from = 41
count = 6 (hosts .41 .. .46)
Options:
--user USER SSH user (default: root)
--prefix A.B.C First 3 octets for generated hosts (default: 192.168.0)
--from N Start host number (default: 41)
--count N Number of hosts (default: 6)
--hosts "H1 H2 ..." Space-separated host list (IPs or hostnames)
--connect-timeout S SSH connect timeout seconds (default: 7)
--dry-run Print commands without executing
-h, --help Show this help
EOF
}
log() {
printf '[%s] %s\n' "$(date '+%F %T')" "$*"
}
run_cmd() {
if [[ "${DRY_RUN}" -eq 1 ]]; then
printf 'DRY-RUN:'
printf ' %q' "$@"
printf '\n'
return 0
fi
"$@"
}
need_cmd() {
command -v "$1" >/dev/null 2>&1 || {
echo "ERROR: required command not found: $1" >&2
exit 1
}
}
need_cmd ssh
ssh_user="root"
prefix="192.168.0"
start_octet=41
count=6
hosts_csv=""
connect_timeout=7
DRY_RUN=0
declare -a explicit_hosts=()
while [[ $# -gt 0 ]]; do
case "$1" in
--user)
ssh_user="$2"
shift 2
;;
--prefix)
prefix="$2"
shift 2
;;
--from)
start_octet="$2"
shift 2
;;
--count)
count="$2"
shift 2
;;
--hosts)
hosts_csv="$2"
shift 2
;;
--connect-timeout)
connect_timeout="$2"
shift 2
;;
--dry-run)
DRY_RUN=1
shift
;;
-h|--help)
usage
exit 0
;;
--)
shift
while [[ $# -gt 0 ]]; do
explicit_hosts+=("$1")
shift
done
;;
-*)
echo "ERROR: unknown option: $1" >&2
usage
exit 1
;;
*)
explicit_hosts+=("$1")
shift
;;
esac
done
declare -a hosts=()
if [[ -n "$hosts_csv" ]]; then
# shellcheck disable=SC2206
hosts=($hosts_csv)
elif [[ "${#explicit_hosts[@]}" -gt 0 ]]; then
hosts=("${explicit_hosts[@]}")
else
for ((i = 0; i < count; i++)); do
hosts+=("${prefix}.$((start_octet + i))")
done
fi
if [[ "${#hosts[@]}" -eq 0 ]]; then
echo "ERROR: empty host list" >&2
exit 1
fi
declare -a ok=()
declare -a failed=()
for host in "${hosts[@]}"; do
target="${ssh_user}@${host}"
log "Syncing ${target}"
if run_cmd ssh -o BatchMode=yes -o ConnectTimeout="${connect_timeout}" \
"${target}" "sync; sync; sync"; then
ok+=("${host}")
else
failed+=("${host}")
fi
done
log "Summary: ok=${#ok[@]} failed=${#failed[@]}"
if [[ "${#ok[@]}" -gt 0 ]]; then
printf ' OK: %s\n' "${ok[@]}"
fi
if [[ "${#failed[@]}" -gt 0 ]]; then
printf ' FAIL: %s\n' "${failed[@]}" >&2
exit 1
fi
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment