use retry mechanism for fetching data
Some checks failed
CI/CD Pipeline / test (push) Failing after 1m43s

This commit is contained in:
philipp 2024-02-18 17:00:15 +01:00
parent bdc6f87efa
commit 13c734019d
2 changed files with 35 additions and 2 deletions

View File

@ -110,3 +110,36 @@ fn delete_all_in_dir<P: AsRef<Path>>(dir_path: P) -> std::io::Result<()> {
Ok(())
}
pub(crate) fn fetch_with_retries(url: &str) -> Result<String, Error> {
let mut attempts = 0;
let max_attempts = 100;
loop {
match ureq::get(url).call() {
Ok(response) => {
// If the request is successful, return the response body
match response.into_string() {
Ok(d) => return Ok(d),
Err(e) => return Err(e.into()),
}
}
Err(ureq::Error::Transport(_)) => {
// Increment the attempt counter
attempts += 1;
// Check if the maximum number of attempts has been reached
if attempts >= max_attempts {
return Err(Error::new("Maximum attempts reached."));
}
// TODO: Implement a backoff strategy here. As a simple example, we sleep for 1 second.
std::thread::sleep(std::time::Duration::from_secs(1));
}
Err(e) => {
// For non-transport errors, return the error immediately.
return Err(e.into());
}
}
}
}

View File

@ -29,7 +29,7 @@ use log::info;
use crate::{
law::LawBuilder,
misc::{get_cache_dir, Error},
misc::{fetch_with_retries, get_cache_dir, Error},
};
use self::parser::Risdok;
@ -138,7 +138,7 @@ fn fetch(url: &str) -> Result<String, Error> {
Ok(data) => Ok(data),
Err(_) => {
info!("Not finding url {url} in the cache, downloading...");
let data = ureq::get(url).call()?.into_string()?;
let data = fetch_with_retries(url)?;
let path = Path::new(&expected_filename);
if let Some(parent) = path.parent() {
// Try to create the directory (and any necessary parent directories)