#!/bin/bash # Name of the file containing the URLs input_file="overview.result" # Check if the input file exists if [ ! -f "$input_file" ]; then echo "File $input_file does not exist." exit 1 fi # Counter for line number line_number=1 # Iterate over each line in the file while IFS= read -r url; do formatted_line_number=$(printf "%04d" $line_number) # Use wget to download the content and save it to a file named ".xml" wget -O "par/${formatted_line_number}.xml" "$url" # Increment the line number ((line_number++)) done < "$input_file"