remove temp. data storage (e.g. date), and look directly at data, don't re-download (e.g. on sundays) if url has already been downloaded
This commit is contained in:
23
src/lib.rs
23
src/lib.rs
@@ -1,14 +1,23 @@
|
||||
use chrono::NaiveDate;
|
||||
use serde_json::Value;
|
||||
|
||||
pub async fn newest_morning_journal_streaming_url() -> Result<String, Box<dyn std::error::Error>> {
|
||||
let url = get_newest_morning_journal().await?;
|
||||
get_streaming_url(url).await
|
||||
#[derive(Clone)]
|
||||
pub struct Episode {
|
||||
pub url: String,
|
||||
pub date: NaiveDate,
|
||||
}
|
||||
|
||||
pub async fn newest_morning_journal_streaming_url() -> Result<Episode, Box<dyn std::error::Error>> {
|
||||
let (date, url) = get_newest_morning_journal().await?;
|
||||
let url = get_streaming_url(url).await?;
|
||||
|
||||
Ok(Episode { url, date })
|
||||
}
|
||||
|
||||
// List of broadcasts: https://audioapi.orf.at/oe1/api/json/current/broadcasts
|
||||
//
|
||||
// ^ contains link, e.g. https://audioapi.orf.at/oe1/api/json/4.0/broadcast/797577/20250611
|
||||
async fn get_newest_morning_journal() -> Result<String, Box<dyn std::error::Error>> {
|
||||
async fn get_newest_morning_journal() -> Result<(NaiveDate, String), Box<dyn std::error::Error>> {
|
||||
let url = "https://audioapi.orf.at/oe1/api/json/current/broadcasts";
|
||||
let data: Value = reqwest::get(url).await?.json().await?;
|
||||
|
||||
@@ -19,7 +28,11 @@ async fn get_newest_morning_journal() -> Result<String, Box<dyn std::error::Erro
|
||||
if broadcast["title"] == "Ö1 Morgenjournal"
|
||||
&& let Some(href) = broadcast["href"].as_str()
|
||||
{
|
||||
return Ok(href.into());
|
||||
let date = broadcast["broadcastDay"]
|
||||
.as_str()
|
||||
.expect("There needs to be a broadcastDay!");
|
||||
let date = NaiveDate::parse_from_str(date, "%Y%m%d").expect("broadcastDay in https://audioapi.orf.at/oe1/api/json/current/broadcasts not in a valid format");
|
||||
return Ok((date, href.into()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
37
src/state.rs
37
src/state.rs
@@ -1,38 +1,45 @@
|
||||
use chrono::{Local, NaiveDate};
|
||||
use chrono::Local;
|
||||
use player::Episode;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
pub struct AppState {
|
||||
pub urls: RwLock<Vec<String>>,
|
||||
pub last_download_on_day: RwLock<Option<NaiveDate>>,
|
||||
pub episodes: RwLock<Vec<Episode>>,
|
||||
}
|
||||
|
||||
impl AppState {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
urls: RwLock::new(Vec::new()),
|
||||
last_download_on_day: RwLock::new(None),
|
||||
episodes: RwLock::new(Vec::new()),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn check_update(self: Arc<Self>) {
|
||||
let today = Local::now().date_naive();
|
||||
if let Some(downloaded_on_day) = *self.last_download_on_day.read().await
|
||||
&& today == downloaded_on_day
|
||||
{
|
||||
if self.already_downloaded_today().await {
|
||||
return;
|
||||
}
|
||||
|
||||
*self.last_download_on_day.write().await = Some(today);
|
||||
|
||||
let latest_url = player::newest_morning_journal_streaming_url()
|
||||
let latest_episode = player::newest_morning_journal_streaming_url()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let mut old = self.urls.read().await.clone();
|
||||
old.push(latest_url);
|
||||
if self.already_downloaded_url(&latest_episode.url).await {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut old = self.episodes.read().await.clone();
|
||||
old.push(latest_episode);
|
||||
let new = old.into_iter().rev().take(10).collect(); // only keep last 10
|
||||
|
||||
*self.urls.write().await = new;
|
||||
*self.episodes.write().await = new;
|
||||
}
|
||||
|
||||
async fn already_downloaded_today(self: &Arc<Self>) -> bool {
|
||||
let today = Local::now().date_naive();
|
||||
self.episodes.read().await.iter().any(|x| x.date == today)
|
||||
}
|
||||
|
||||
async fn already_downloaded_url(self: &Arc<Self>, url: &str) -> bool {
|
||||
self.episodes.read().await.iter().any(|x| x.url == url)
|
||||
}
|
||||
}
|
||||
|
@@ -1,19 +1,20 @@
|
||||
use crate::state::AppState;
|
||||
use axum::{extract::State, http::HeaderMap, response::IntoResponse};
|
||||
use player::Episode;
|
||||
use reqwest::header;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub async fn stream_handler(State(state): State<Arc<AppState>>) -> impl IntoResponse {
|
||||
state.clone().check_update().await;
|
||||
|
||||
let content = feed(&state.urls.read().await.to_vec());
|
||||
let content = feed(&state.episodes.read().await.to_vec());
|
||||
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(header::CONTENT_TYPE, "application/rss+xml".parse().unwrap());
|
||||
(headers, content)
|
||||
}
|
||||
|
||||
fn feed(urls: &Vec<String>) -> String {
|
||||
fn feed(episodes: &Vec<Episode>) -> String {
|
||||
let mut ret = String::new();
|
||||
ret.push_str(r#"<?xml version="1.0" encoding="UTF-8"?>"#);
|
||||
ret.push_str(r#"<rss version="2.0">"#);
|
||||
@@ -22,15 +23,17 @@ fn feed(urls: &Vec<String>) -> String {
|
||||
ret.push_str("<link>https://news.hofer.link</link>");
|
||||
ret.push_str("<description>Feed für Ö1 Morgenjournal. Live.</description>");
|
||||
|
||||
for url in urls {
|
||||
for episode in episodes {
|
||||
ret.push_str("<item>");
|
||||
ret.push_str(&format!("<title>Morgenjournal</title>"));
|
||||
ret.push_str(&format!("<link>{}</link>", quick_xml::escape::escape(url)));
|
||||
ret.push_str(&format!(
|
||||
"<title>Morgenjournal {}</title>",
|
||||
&episode.date.format("%d. %m.")
|
||||
));
|
||||
ret.push_str(&format!(
|
||||
"<enclosure url=\"{}\" length=\"0\" type=\"audio/mpeg\"/>\n",
|
||||
quick_xml::escape::escape(url)
|
||||
quick_xml::escape::escape(&episode.url)
|
||||
));
|
||||
ret.push_str(&format!("<description>Morgenjournal</description>"));
|
||||
ret.push_str("<description>Morgenjournal</description>");
|
||||
ret.push_str("</item>");
|
||||
}
|
||||
|
||||
|
Reference in New Issue
Block a user