good enough for MVP
This commit is contained in:
parent
8bfd102f79
commit
bc99011bb6
2 changed files with 116 additions and 36 deletions
112
src/main.rs
112
src/main.rs
|
@ -37,6 +37,7 @@ struct Config {
|
|||
common_ninjas: Vec<wac_common_ninja::Config>,
|
||||
icals: Vec<wac_ical::Config>,
|
||||
output: output::Config,
|
||||
feeds: Vec<SimpleDownload>,
|
||||
}
|
||||
|
||||
impl Config {
|
||||
|
@ -52,10 +53,11 @@ impl Config {
|
|||
.chain(self.icals.iter().map(|ical| ical.dl.clone()))
|
||||
}
|
||||
|
||||
fn upstreams(&self) -> Vec<CalendarUi> {
|
||||
fn upstream_calendars(&self) -> Vec<CalendarUi> {
|
||||
let Self {
|
||||
campfires,
|
||||
common_ninjas,
|
||||
feeds: _,
|
||||
icals,
|
||||
output: _,
|
||||
} = self;
|
||||
|
@ -161,7 +163,7 @@ struct Data {
|
|||
icals: Vec<wac_ical::Calendar>,
|
||||
}
|
||||
|
||||
fn read_data_from_disk(config: &Config) -> Result<Data> {
|
||||
fn read_calendars(config: &Config) -> Result<Data> {
|
||||
Ok(Data {
|
||||
campfires: config
|
||||
.campfires
|
||||
|
@ -256,9 +258,19 @@ async fn do_everything(cli: &CliAuto) -> Result<()> {
|
|||
std::fs::rename(&temp_path, &dl.file_path)?;
|
||||
}
|
||||
|
||||
let data = read_data_from_disk(&config)?;
|
||||
let instances = process_data(&data, &config.output, now)?;
|
||||
output::write_html(&config.output, &config.upstreams(), &instances, now)?;
|
||||
let events = {
|
||||
let cal_data = read_calendars(&config)?;
|
||||
process_data(&cal_data, &config.output, now)?
|
||||
};
|
||||
let feed_items = read_feeds(&config)?;
|
||||
|
||||
output::write_html(
|
||||
&config.output,
|
||||
&feed_items,
|
||||
&config.upstream_calendars(),
|
||||
&events,
|
||||
now,
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -280,31 +292,40 @@ fn main_auto(cli: CliAuto) -> Result<()> {
|
|||
}
|
||||
|
||||
#[derive(clap::Parser)]
|
||||
struct CliDebugOutput {
|
||||
struct CliDebugEvents {
|
||||
#[arg(long)]
|
||||
config: Utf8PathBuf,
|
||||
}
|
||||
|
||||
fn main_debug_output(cli: CliDebugOutput) -> Result<()> {
|
||||
fn main_debug_events(cli: CliDebugEvents) -> Result<()> {
|
||||
tracing_subscriber::fmt::init();
|
||||
tracing::info!("Started tracing");
|
||||
let config = std::fs::read_to_string(&cli.config).context("Failed to read config file")?;
|
||||
let config: Config = toml::from_str(&config).context("Failed to parse config file")?;
|
||||
|
||||
let data = read_data_from_disk(&config).context("Failed to read data from disk")?;
|
||||
|
||||
let tz = &config.output.timezone;
|
||||
let now = Utc::now().with_timezone(tz);
|
||||
let instances = process_data(&data, &config.output, now).context("Failed to process data")?;
|
||||
output::write_html(&config.output, &config.upstreams(), &instances, now)
|
||||
.context("Failed to output HTML")?;
|
||||
let data = read_calendars(&config).context("Failed to read data from disk")?;
|
||||
|
||||
let events = process_data(&data, &config.output, now).context("Failed to process data")?;
|
||||
|
||||
std::fs::create_dir_all("output")?;
|
||||
output::atomic_write(
|
||||
"output/calendars.html",
|
||||
&output::calendars_page(&config.upstream_calendars(), now),
|
||||
)?;
|
||||
output::atomic_write(
|
||||
"output/index.html",
|
||||
&output::index_page(&config.output, &events, now),
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(clap::Parser)]
|
||||
struct CliDebugRss {
|
||||
paths: Vec<Utf8PathBuf>,
|
||||
struct CliDebugFeed {
|
||||
#[arg(long)]
|
||||
config: Utf8PathBuf,
|
||||
}
|
||||
|
||||
/// Wraps rss::Item in our own type suitable for merging
|
||||
|
@ -314,12 +335,11 @@ pub(crate) struct FeedItem {
|
|||
inner: rss::Item,
|
||||
}
|
||||
|
||||
fn main_debug_rss(cli: CliDebugRss) -> Result<()> {
|
||||
fn read_feeds(config: &Config) -> Result<Vec<FeedItem>> {
|
||||
let mut items = Vec::new();
|
||||
let now = Utc::now();
|
||||
|
||||
for path in &cli.paths {
|
||||
let s = std::fs::read(path)?;
|
||||
for feed in &config.feeds {
|
||||
let s = std::fs::read(&feed.file_path)?;
|
||||
let channel = rss::Channel::read_from(std::io::BufReader::new(std::io::Cursor::new(s)))?;
|
||||
|
||||
let channel_title = channel.title.clone();
|
||||
|
@ -341,21 +361,64 @@ fn main_debug_rss(cli: CliDebugRss) -> Result<()> {
|
|||
}
|
||||
|
||||
items.sort_by_key(|item| std::cmp::Reverse(item.date));
|
||||
Ok(items)
|
||||
}
|
||||
|
||||
fn main_debug_feed(cli: CliDebugFeed) -> Result<()> {
|
||||
tracing_subscriber::fmt::init();
|
||||
tracing::info!("Started tracing");
|
||||
let config = std::fs::read_to_string(&cli.config).context("Failed to read config file")?;
|
||||
let config: Config = toml::from_str(&config).context("Failed to parse config file")?;
|
||||
|
||||
let tz = &config.output.timezone;
|
||||
let now = Utc::now().with_timezone(tz);
|
||||
let items = read_feeds(&config)?;
|
||||
|
||||
std::fs::create_dir_all("output")?;
|
||||
output::atomic_write(
|
||||
"output/feed.html",
|
||||
&output::feed_page(&items, now.with_timezone(&chrono_tz::UTC)),
|
||||
)?;
|
||||
output::atomic_write("output/feed.html", &output::feed_page(&items, now))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(clap::Parser)]
|
||||
struct CliDebugOutput {
|
||||
#[arg(long)]
|
||||
config: Utf8PathBuf,
|
||||
}
|
||||
|
||||
fn main_debug_output(cli: CliDebugOutput) -> Result<()> {
|
||||
tracing_subscriber::fmt::init();
|
||||
tracing::info!("Started tracing");
|
||||
let config = std::fs::read_to_string(&cli.config).context("Failed to read config file")?;
|
||||
let config: Config = toml::from_str(&config).context("Failed to parse config file")?;
|
||||
|
||||
let tz = &config.output.timezone;
|
||||
let now = Utc::now().with_timezone(tz);
|
||||
|
||||
let events = {
|
||||
let data = read_calendars(&config).context("Failed to read calendars from disk")?;
|
||||
process_data(&data, &config.output, now).context("Failed to process data")?
|
||||
};
|
||||
|
||||
let feed_items = read_feeds(&config)?;
|
||||
|
||||
std::fs::create_dir_all("output")?;
|
||||
output::write_html(
|
||||
&config.output,
|
||||
&feed_items,
|
||||
&config.upstream_calendars(),
|
||||
&events,
|
||||
now,
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(clap::Subcommand)]
|
||||
enum Commands {
|
||||
Auto(CliAuto),
|
||||
DebugEvents(CliDebugEvents),
|
||||
DebugFeed(CliDebugFeed),
|
||||
DebugOutput(CliDebugOutput),
|
||||
DebugRss(CliDebugRss),
|
||||
}
|
||||
|
||||
#[derive(clap::Parser)]
|
||||
|
@ -370,7 +433,8 @@ fn main() -> Result<()> {
|
|||
|
||||
match cli.command {
|
||||
Commands::Auto(x) => main_auto(x),
|
||||
Commands::DebugEvents(x) => main_debug_events(x),
|
||||
Commands::DebugFeed(x) => main_debug_feed(x),
|
||||
Commands::DebugOutput(x) => main_debug_output(x),
|
||||
Commands::DebugRss(x) => main_debug_rss(x),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -74,7 +74,10 @@ fn calendar_link(calendar_ui: &crate::CalendarUi) -> maud::PreEscaped<String> {
|
|||
}
|
||||
}
|
||||
|
||||
fn calendars_page(upstreams: &[crate::CalendarUi], now: DateTime<chrono_tz::Tz>) -> String {
|
||||
pub(crate) fn calendars_page(
|
||||
upstreams: &[crate::CalendarUi],
|
||||
now: DateTime<chrono_tz::Tz>,
|
||||
) -> String {
|
||||
let description = "A list of upstream calendars used by this Wide-Angle Calendar instance";
|
||||
let title = "Upstream calendars";
|
||||
|
||||
|
@ -98,13 +101,13 @@ fn calendars_page(upstreams: &[crate::CalendarUi], now: DateTime<chrono_tz::Tz>)
|
|||
}
|
||||
body {
|
||||
h1 { (title) }
|
||||
p { "Written at: " (now.format("%F %T")) }
|
||||
p {
|
||||
a href="index.html" { "Wide-Angle Calendar" }
|
||||
" / "
|
||||
a href="calendars.html" { (title) }
|
||||
}
|
||||
|
||||
p { "Written at: " (now.format("%F %T")) }
|
||||
p { "These are the calendars that Wide-Angle Calendar pulls from." }
|
||||
|
||||
ol {
|
||||
|
@ -199,8 +202,12 @@ pub(crate) fn feed_page(feed_items: &[crate::FeedItem], now: DateTime<chrono_tz:
|
|||
}
|
||||
body {
|
||||
h1 { (title) }
|
||||
p { "Written at: " (now.format("%F %T")) }
|
||||
p { a href = "calendars.html" { "Upstream calendars" } }
|
||||
p { "Written at: " (now.format("%F %T")) " (But it may lag by several hours - Not for ICE spotting)" }
|
||||
p {
|
||||
a href="index.html" { "Wide-Angle Calendar" }
|
||||
" / "
|
||||
a href="feed.html" { (title) }
|
||||
}
|
||||
@for entry in html_list {
|
||||
(entry)
|
||||
}
|
||||
|
@ -210,7 +217,7 @@ pub(crate) fn feed_page(feed_items: &[crate::FeedItem], now: DateTime<chrono_tz:
|
|||
.into_string()
|
||||
}
|
||||
|
||||
fn index_page(
|
||||
pub(crate) fn index_page(
|
||||
config: &Config,
|
||||
instances: &[EventInstance],
|
||||
now: DateTime<chrono_tz::Tz>,
|
||||
|
@ -336,7 +343,13 @@ fn index_page(
|
|||
h1 { (title) }
|
||||
img src="hero.webp" width="700" height="233" {}
|
||||
p { "Written at: " (now.format("%F %T")) }
|
||||
p { a href = "calendars.html" { "Upstream calendars" } }
|
||||
p {
|
||||
"Sub-pages:"
|
||||
ul {
|
||||
li { a href = "calendars.html" { "Upstream calendars" } }
|
||||
li { a href = "feed.html" { "Feed" } }
|
||||
}
|
||||
}
|
||||
@for entry in html_list {
|
||||
(entry)
|
||||
}
|
||||
|
@ -355,16 +368,19 @@ pub(crate) fn atomic_write(path: &str, content: &str) -> Result<()> {
|
|||
|
||||
pub(crate) fn write_html(
|
||||
config: &Config,
|
||||
// feed_items: &[crate::FeedItem],
|
||||
upstreams: &[crate::CalendarUi],
|
||||
instances: &[EventInstance],
|
||||
feed_items: &[crate::FeedItem],
|
||||
upstream_calendars: &[crate::CalendarUi],
|
||||
events: &[EventInstance],
|
||||
now: DateTime<chrono_tz::Tz>,
|
||||
) -> Result<()> {
|
||||
std::fs::create_dir_all("output")?;
|
||||
|
||||
atomic_write("output/calendars.html", &calendars_page(upstreams, now))?;
|
||||
// atomic_write("output/feed.html", &feed_page(feed_items, now)?)?;
|
||||
atomic_write("output/index.html", &index_page(config, instances, now))?;
|
||||
atomic_write(
|
||||
"output/calendars.html",
|
||||
&calendars_page(upstream_calendars, now),
|
||||
)?;
|
||||
atomic_write("output/feed.html", &feed_page(feed_items, now))?;
|
||||
atomic_write("output/index.html", &index_page(config, events, now))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue