#!/usr/bin/env bash set -o errexit set -o nounset set -o pipefail INTERVAL=60 # Das dateisystem initialisieren und Prüfen ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" LOG_DIR="$ROOT_DIR/logs" HOSTS_FILE="$ROOT_DIR/hosts.txt" SEP=';' # Zeit init timestamp_iso() { date -Iseconds } # Erstelle Ordner ensure_dirs() { mkdir -p "$LOG_DIR" } # erstelle und Open log(csv) file current_logfile() { printf "%s/%s.csv" "$LOG_DIR" "$(date +%F)" } # füge Header in log(CSV) hinzu write_header_if_needed() { local file="$1" if [ ! -f "$file" ] || [ ! -s "$file" ]; then printf "timestamp${SEP}label${SEP}http_code\n" >>"$file" fi } # Stop für Script stop_requested=0 _on_exit() { stop_requested=1 printf "\nstop received. exit after current batch\n" >&2 } trap _on_exit INT TERM ensure_dirs printf "loop start. 60s. logs dir: %s\n" "$LOG_DIR" # Loop #Überprüfen ob noch laufen soll while [ "$stop_requested" -eq 0 ]; do logfile="$(current_logfile)" write_header_if_needed "$logfile" # Statuse alle domains überprüfen und in CSV packen while IFS= read -r line; do [ -z "$line" ] && continue case "$line" in \#*) continue ;; esac IFS='|' read -r label url expected <<<"$line" ts="$(timestamp_iso)" set +o errexit code="$(curl -sS -o /dev/null -w "%{http_code}" --max-time 10 -L "$url" 2>/dev/null)" || code="000" set -o errexit printf "%s%s%s%s%s\n" "$ts" "$SEP" "$label" "$SEP" "$code" >>"$logfile" done <"$HOSTS_FILE" # interruptable sleep waited=0 while [ "$waited" -lt "$INTERVAL" ] && [ "$stop_requested" -eq 0 ]; do sleep 1 waited=$((waited+1)) done done printf "exit\n"