add station pdf generation; Fixes #26
This commit is contained in:
parent
6b07772a18
commit
8c0746e585
1810
Cargo.lock
generated
1810
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
13
Cargo.toml
13
Cargo.toml
@ -23,6 +23,19 @@ tracing-subscriber = "0.3"
|
||||
futures = "0.3"
|
||||
rand = "0.9"
|
||||
argon2 = "0.5"
|
||||
typst = "0.13"
|
||||
comemo = "0.4.0"
|
||||
# Fetching and unzipping packages
|
||||
zune-inflate = { version = "0.2", default-features = false, features = [
|
||||
"gzip",
|
||||
"std",
|
||||
] }
|
||||
tar = "0.4"
|
||||
ureq = "2.9"
|
||||
time = "0.3"
|
||||
ttf-parser = "0.25"
|
||||
typst-kit = "0.13.1"
|
||||
typst-pdf = "0.13.1"
|
||||
|
||||
|
||||
[dev-dependencies]
|
||||
|
@ -39,6 +39,7 @@ station_delete_err_nonexisting: "Station mit ID %{id} konnte nicht gelöscht wer
|
||||
station_delete_err_already_used: "Station %{name} konnte nicht gelöscht werden, da sie bereits verwendet wird (%{err})"
|
||||
station_has_not_rated_team_yet: "Station hat Team noch nicht bewertet" # should be short -> tooltip
|
||||
station_move_up: "%{name} nach vor reihen" # should be short -> tooltip
|
||||
generate_station_pdf: "Stations PDF generieren"
|
||||
|
||||
|
||||
#
|
||||
|
@ -1,16 +1,18 @@
|
||||
use super::{generate_random_alphanumeric, team::Team};
|
||||
use crate::{
|
||||
AppState,
|
||||
admin::route::Route,
|
||||
models::rating::{Rating, TeamsAtStationLocation},
|
||||
AppState,
|
||||
};
|
||||
use axum::Router;
|
||||
use chrono::{DateTime, Local, NaiveDateTime, Utc};
|
||||
use futures::{StreamExt, stream};
|
||||
use maud::{Markup, Render, html};
|
||||
use futures::{stream, StreamExt};
|
||||
use maud::{html, Markup, Render};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::{FromRow, SqlitePool};
|
||||
|
||||
pub(crate) mod print;
|
||||
mod typst;
|
||||
mod web;
|
||||
|
||||
#[derive(FromRow, Debug, Serialize, Deserialize)]
|
||||
@ -488,5 +490,5 @@ pub struct TeamOnTheWay {
|
||||
}
|
||||
|
||||
pub(super) fn routes() -> Router<AppState> {
|
||||
web::routes()
|
||||
web::routes().merge(print::routes())
|
||||
}
|
||||
|
106
src/admin/station/print.rs
Normal file
106
src/admin/station/print.rs
Normal file
@ -0,0 +1,106 @@
|
||||
use crate::{admin::station::typst::TypstWrapperWorld, url, AppState, Station};
|
||||
use axum::{
|
||||
extract::State,
|
||||
http::{header, StatusCode},
|
||||
response::IntoResponse,
|
||||
routing::get,
|
||||
Router,
|
||||
};
|
||||
use sqlx::SqlitePool;
|
||||
use std::{fmt::Write, sync::Arc};
|
||||
use typst_pdf::PdfOptions;
|
||||
|
||||
pub(crate) async fn station_pdf(stations: Vec<Station>) -> Vec<u8> {
|
||||
let mut content = String::from(
|
||||
r#"
|
||||
#import "@preview/cades:0.3.0": qr-code
|
||||
|
||||
#set page(paper: "a4")
|
||||
#set page(margin: 0pt)
|
||||
|
||||
#let card_width = 105mm // A4 width (210mm) divided by 2
|
||||
#let card_height = 74.25mm // A4 height (297mm) divided by 4
|
||||
|
||||
// Custom function to create a card with title and QR code for a URL
|
||||
#let create_url_card(title, url) = {
|
||||
box(
|
||||
width: card_width,
|
||||
height: card_height,
|
||||
stroke: (
|
||||
paint: black,
|
||||
dash: "loosely-dashed",
|
||||
thickness: 0.5pt
|
||||
),
|
||||
[
|
||||
#align(center + horizon)[
|
||||
#text(weight: "bold", size: 14pt)[Station #title]
|
||||
#qr-code(url, width: 4cm)
|
||||
#text(size: 8pt)[#link(url)]
|
||||
]
|
||||
]
|
||||
)
|
||||
}
|
||||
|
||||
// Function to create a grid of cards
|
||||
#let create_card_grid(cards) = {
|
||||
grid(
|
||||
columns: 2,
|
||||
rows: 4,
|
||||
gutter: 0pt,
|
||||
..cards
|
||||
)
|
||||
}
|
||||
|
||||
#let cards = (
|
||||
"#,
|
||||
);
|
||||
|
||||
for station in stations {
|
||||
if !station.crewless() {
|
||||
let name = station.name;
|
||||
let link = format!("{}/s/{}/{}", url(), station.id, station.pw);
|
||||
write!(content, "create_url_card(\"Station {name}\", \"{link}\"),").unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
write!(
|
||||
content,
|
||||
r#")
|
||||
|
||||
#create_card_grid(cards)"#
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// Create world with content.
|
||||
let world = TypstWrapperWorld::new("../".to_owned(), content);
|
||||
|
||||
// Render document
|
||||
let document = typst::compile(&world)
|
||||
.output
|
||||
.expect("Error compiling typst");
|
||||
|
||||
// Output to pdf
|
||||
typst_pdf::pdf(&document, &PdfOptions::default()).expect("Error exporting PDF")
|
||||
}
|
||||
|
||||
async fn index(State(db): State<Arc<SqlitePool>>) -> impl IntoResponse {
|
||||
let stations = Station::all(&db).await;
|
||||
let pdf = station_pdf(stations).await;
|
||||
|
||||
(
|
||||
StatusCode::OK,
|
||||
[
|
||||
(header::CONTENT_TYPE, "application/pdf"),
|
||||
(
|
||||
header::CONTENT_DISPOSITION,
|
||||
"attachment; filename=\"stations.pdf\"",
|
||||
),
|
||||
],
|
||||
pdf,
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
|
||||
pub(super) fn routes() -> Router<AppState> {
|
||||
Router::new().route("/print", get(index))
|
||||
}
|
230
src/admin/station/typst.rs
Normal file
230
src/admin/station/typst.rs
Normal file
@ -0,0 +1,230 @@
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use typst::diag::{eco_format, FileError, FileResult, PackageError, PackageResult};
|
||||
use typst::foundations::{Bytes, Datetime};
|
||||
use typst::syntax::package::PackageSpec;
|
||||
use typst::syntax::{FileId, Source};
|
||||
use typst::text::{Font, FontBook};
|
||||
use typst::utils::LazyHash;
|
||||
use typst::Library;
|
||||
use typst_kit::fonts::{FontSearcher, FontSlot};
|
||||
|
||||
/// Main interface that determines the environment for Typst.
|
||||
pub struct TypstWrapperWorld {
|
||||
/// Root path to which files will be resolved.
|
||||
root: PathBuf,
|
||||
|
||||
/// The content of a source.
|
||||
source: Source,
|
||||
|
||||
/// The standard library.
|
||||
library: LazyHash<Library>,
|
||||
|
||||
/// Metadata about all known fonts.
|
||||
book: LazyHash<FontBook>,
|
||||
|
||||
/// Metadata about all known fonts.
|
||||
fonts: Vec<FontSlot>,
|
||||
|
||||
/// Map of all known files.
|
||||
files: Arc<Mutex<HashMap<FileId, FileEntry>>>,
|
||||
|
||||
/// Cache directory (e.g. where packages are downloaded to).
|
||||
cache_directory: PathBuf,
|
||||
|
||||
/// http agent to download packages.
|
||||
http: ureq::Agent,
|
||||
|
||||
/// Datetime.
|
||||
time: time::OffsetDateTime,
|
||||
}
|
||||
|
||||
impl TypstWrapperWorld {
|
||||
pub fn new(root: String, source: String) -> Self {
|
||||
let root = PathBuf::from(root);
|
||||
let fonts = FontSearcher::new().include_system_fonts(true).search();
|
||||
|
||||
Self {
|
||||
library: LazyHash::new(Library::default()),
|
||||
book: LazyHash::new(fonts.book),
|
||||
root,
|
||||
fonts: fonts.fonts,
|
||||
source: Source::detached(source),
|
||||
time: time::OffsetDateTime::now_utc(),
|
||||
cache_directory: std::env::var_os("CACHE_DIRECTORY")
|
||||
.map(|os_path| os_path.into())
|
||||
.unwrap_or(std::env::temp_dir()),
|
||||
http: ureq::Agent::new(),
|
||||
files: Arc::new(Mutex::new(HashMap::new())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A File that will be stored in the HashMap.
|
||||
#[derive(Clone, Debug)]
|
||||
struct FileEntry {
|
||||
bytes: Bytes,
|
||||
source: Option<Source>,
|
||||
}
|
||||
|
||||
impl FileEntry {
|
||||
fn new(bytes: Vec<u8>, source: Option<Source>) -> Self {
|
||||
Self {
|
||||
bytes: Bytes::new(bytes),
|
||||
source,
|
||||
}
|
||||
}
|
||||
|
||||
fn source(&mut self, id: FileId) -> FileResult<Source> {
|
||||
let source = if let Some(source) = &self.source {
|
||||
source
|
||||
} else {
|
||||
let contents = std::str::from_utf8(&self.bytes).map_err(|_| FileError::InvalidUtf8)?;
|
||||
let contents = contents.trim_start_matches('\u{feff}');
|
||||
let source = Source::new(id, contents.into());
|
||||
self.source.insert(source)
|
||||
};
|
||||
Ok(source.clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl TypstWrapperWorld {
|
||||
/// Helper to handle file requests.
|
||||
///
|
||||
/// Requests will be either in packages or a local file.
|
||||
fn file(&self, id: FileId) -> FileResult<FileEntry> {
|
||||
let mut files = self.files.lock().map_err(|_| FileError::AccessDenied)?;
|
||||
if let Some(entry) = files.get(&id) {
|
||||
return Ok(entry.clone());
|
||||
}
|
||||
let path = if let Some(package) = id.package() {
|
||||
// Fetching file from package
|
||||
let package_dir = self.download_package(package)?;
|
||||
id.vpath().resolve(&package_dir)
|
||||
} else {
|
||||
// Fetching file from disk
|
||||
id.vpath().resolve(&self.root)
|
||||
}
|
||||
.ok_or(FileError::AccessDenied)?;
|
||||
|
||||
let content = std::fs::read(&path).map_err(|error| FileError::from_io(error, &path))?;
|
||||
Ok(files
|
||||
.entry(id)
|
||||
.or_insert(FileEntry::new(content, None))
|
||||
.clone())
|
||||
}
|
||||
|
||||
/// Downloads the package and returns the system path of the unpacked package.
|
||||
fn download_package(&self, package: &PackageSpec) -> PackageResult<PathBuf> {
|
||||
let package_subdir = format!("{}/{}/{}", package.namespace, package.name, package.version);
|
||||
let path = self.cache_directory.join(package_subdir);
|
||||
|
||||
if path.exists() {
|
||||
return Ok(path);
|
||||
}
|
||||
|
||||
eprintln!("downloading {package}");
|
||||
let url = format!(
|
||||
"https://packages.typst.org/{}/{}-{}.tar.gz",
|
||||
package.namespace, package.name, package.version,
|
||||
);
|
||||
|
||||
let response = retry(|| {
|
||||
let response = self
|
||||
.http
|
||||
.get(&url)
|
||||
.call()
|
||||
.map_err(|error| eco_format!("{error}"))?;
|
||||
|
||||
let status = response.status();
|
||||
if !http_successful(status) {
|
||||
return Err(eco_format!(
|
||||
"response returned unsuccessful status code {status}",
|
||||
));
|
||||
}
|
||||
|
||||
Ok(response)
|
||||
})
|
||||
.map_err(|error| PackageError::NetworkFailed(Some(error)))?;
|
||||
|
||||
let mut compressed_archive = Vec::new();
|
||||
response
|
||||
.into_reader()
|
||||
.read_to_end(&mut compressed_archive)
|
||||
.map_err(|error| PackageError::NetworkFailed(Some(eco_format!("{error}"))))?;
|
||||
let raw_archive = zune_inflate::DeflateDecoder::new(&compressed_archive)
|
||||
.decode_gzip()
|
||||
.map_err(|error| PackageError::MalformedArchive(Some(eco_format!("{error}"))))?;
|
||||
let mut archive = tar::Archive::new(raw_archive.as_slice());
|
||||
archive.unpack(&path).map_err(|error| {
|
||||
_ = std::fs::remove_dir_all(&path);
|
||||
PackageError::MalformedArchive(Some(eco_format!("{error}")))
|
||||
})?;
|
||||
|
||||
Ok(path)
|
||||
}
|
||||
}
|
||||
|
||||
/// This is the interface we have to implement such that `typst` can compile it.
|
||||
///
|
||||
/// I have tried to keep it as minimal as possible
|
||||
impl typst::World for TypstWrapperWorld {
|
||||
/// Standard library.
|
||||
fn library(&self) -> &LazyHash<Library> {
|
||||
&self.library
|
||||
}
|
||||
|
||||
/// Metadata about all known Books.
|
||||
fn book(&self) -> &LazyHash<FontBook> {
|
||||
&self.book
|
||||
}
|
||||
|
||||
/// Accessing the main source file.
|
||||
fn main(&self) -> FileId {
|
||||
self.source.id()
|
||||
}
|
||||
|
||||
/// Accessing a specified source file (based on `FileId`).
|
||||
fn source(&self, id: FileId) -> FileResult<Source> {
|
||||
if id == self.source.id() {
|
||||
Ok(self.source.clone())
|
||||
} else {
|
||||
self.file(id)?.source(id)
|
||||
}
|
||||
}
|
||||
|
||||
/// Accessing a specified file (non-file).
|
||||
fn file(&self, id: FileId) -> FileResult<Bytes> {
|
||||
self.file(id).map(|file| file.bytes.clone())
|
||||
}
|
||||
|
||||
/// Accessing a specified font per index of font book.
|
||||
fn font(&self, id: usize) -> Option<Font> {
|
||||
self.fonts[id].get()
|
||||
}
|
||||
|
||||
/// Get the current date.
|
||||
///
|
||||
/// Optionally, an offset in hours is given.
|
||||
fn today(&self, offset: Option<i64>) -> Option<Datetime> {
|
||||
let offset = offset.unwrap_or(0);
|
||||
let offset = time::UtcOffset::from_hms(offset.try_into().ok()?, 0, 0).ok()?;
|
||||
let time = self.time.checked_to_offset(offset)?;
|
||||
Some(Datetime::Date(time.date()))
|
||||
}
|
||||
}
|
||||
|
||||
fn retry<T, E>(mut f: impl FnMut() -> Result<T, E>) -> Result<T, E> {
|
||||
if let Ok(ok) = f() {
|
||||
Ok(ok)
|
||||
} else {
|
||||
f()
|
||||
}
|
||||
}
|
||||
|
||||
fn http_successful(status: u16) -> bool {
|
||||
// 2XX
|
||||
status / 100 == 2
|
||||
}
|
@ -705,6 +705,11 @@ async fn index(State(db): State<Arc<SqlitePool>>, session: Session) -> Markup {
|
||||
(t!("station_hint_create_first"))
|
||||
}
|
||||
}
|
||||
a href="/admin/station/print"{
|
||||
button {
|
||||
(t!("generate_station_pdf"))
|
||||
}
|
||||
}
|
||||
h2 { (t!("station_new")) }
|
||||
form action="/admin/station" method="post" {
|
||||
fieldset role="group" {
|
||||
|
19
src/lib.rs
19
src/lib.rs
@ -16,23 +16,23 @@ macro_rules! testdb {
|
||||
|
||||
i18n!("locales", fallback = "de-AT");
|
||||
|
||||
use admin::station::Station;
|
||||
use admin::station::{print::station_pdf, Station};
|
||||
use auth::{AuthSession, Backend, User};
|
||||
use axum::{
|
||||
Form, Router,
|
||||
body::Body,
|
||||
extract::{FromRef, State},
|
||||
response::{IntoResponse, Redirect, Response},
|
||||
routing::{get, post},
|
||||
Form, Router,
|
||||
};
|
||||
use axum_login::AuthManagerLayerBuilder;
|
||||
use maud::{Markup, html};
|
||||
use maud::{html, Markup};
|
||||
use partials::page;
|
||||
use serde::Deserialize;
|
||||
use sqlx::SqlitePool;
|
||||
use std::{env, sync::Arc};
|
||||
use tokio::net::TcpListener;
|
||||
use tower_sessions::{Expiry, Session, SessionManagerLayer, cookie::time::Duration};
|
||||
use tower_sessions::{cookie::time::Duration, Expiry, Session, SessionManagerLayer};
|
||||
use tower_sessions_sqlx_store_chrono::SqliteStore;
|
||||
|
||||
pub(crate) mod admin;
|
||||
@ -44,6 +44,9 @@ pub(crate) mod station;
|
||||
pub(crate) fn test_version() -> bool {
|
||||
env::var("TEST_VERSION").is_ok()
|
||||
}
|
||||
pub fn url() -> String {
|
||||
env::var("URL").unwrap()
|
||||
}
|
||||
|
||||
pub(crate) fn pl(amount: usize, single: &str, append: &str) -> String {
|
||||
if amount == 1 {
|
||||
@ -303,7 +306,13 @@ fn router(db: SqlitePool) -> Router {
|
||||
|
||||
/// Starts the main application.
|
||||
pub async fn start(listener: TcpListener, db: SqlitePool) {
|
||||
let app = router(db);
|
||||
let app = router(db.clone());
|
||||
|
||||
tokio::spawn(async move {
|
||||
// Kick-off typst compilation, to reduce wait time for 1st load
|
||||
let stations = Station::all(&db).await;
|
||||
station_pdf(stations).await;
|
||||
});
|
||||
|
||||
axum::serve(listener, app).await.unwrap();
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
use dotenv::dotenv;
|
||||
use sqlx::{SqlitePool, pool::PoolOptions};
|
||||
use sqlx::{pool::PoolOptions, SqlitePool};
|
||||
use std::env;
|
||||
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
|
||||
|
||||
@ -12,6 +12,8 @@ async fn main() {
|
||||
.with(tracing_subscriber::fmt::layer())
|
||||
.init();
|
||||
|
||||
env::var("URL").expect("No URL env set");
|
||||
|
||||
// DB
|
||||
let database_url = env::var("DATABASE_URL").expect("DATABASE_URL must be set");
|
||||
let db: SqlitePool = PoolOptions::new().connect(&database_url).await.unwrap();
|
||||
|
Loading…
x
Reference in New Issue
Block a user