This is a simple script to check whether URLs are reachable over HTTP(S). This comes in handy for example when a project has many different (secondary) domains that redirect to the main domain.
#!/bin/bash
urls=(
"http://domain1.com",
"https://domain1.com",
"http://domain2.com",
"https://domain2.com",
"...",
)
# remove commas
for i in "${!urls[@]}"; do
urls[$i]=${urls[$i]//,}
done
#for i in "${!urls[@]}"; do
# echo "$i"
# echo "${urls[$i]}"
#done
#exit 0
for i in "${!urls[@]}"; do
echo "Checking status of ${urls[$i]}"
code=`curl -sL --connect-timeout 20 --max-time 30 -w "%{http_code}\\n" "${urls[$i]}" -o /dev/null`
echo "Found code $code for '${urls[$i]}'"
if [ "$code" = "200" ]; then
echo "Website '${urls[$i]}' is online."
online=true
sleep 3
else
echo "Website '${urls[$i]}' seems to be offline. Waiting $timeout seconds."
echo "Monitor finished with failures, at least one website appears to be unreachable."
exit 1
fi
done
echo "Monitor finished, all good."
exit 0