383 lines
10 KiB
Rust
383 lines
10 KiB
Rust
use chrono::DateTime;
|
|
use clap::Parser as _;
|
|
use std::time::Duration;
|
|
|
|
use prelude::*;
|
|
|
|
#[cfg(test)]
|
|
mod tests;
|
|
|
|
mod output;
|
|
mod prelude;
|
|
mod wac_campfire;
|
|
mod wac_common_ninja;
|
|
mod wac_ical;
|
|
|
|
#[derive(Clone, Default, Deserialize)]
|
|
struct SimpleDownload {
|
|
/// URL to scrape to download the file from
|
|
download_url: Option<Url>,
|
|
|
|
/// Disk location to cache the file for debugging
|
|
file_path: Utf8PathBuf,
|
|
}
|
|
|
|
#[derive(Clone, Default, Deserialize)]
|
|
struct CalendarUi {
|
|
/// A canonical webpage we can direct users to
|
|
html_url: Option<Url>,
|
|
|
|
/// Very short name for putting on each event
|
|
short_name: String,
|
|
}
|
|
|
|
#[derive(Deserialize)]
|
|
struct Config {
|
|
campfires: Vec<wac_campfire::Config>,
|
|
common_ninjas: Vec<wac_common_ninja::Config>,
|
|
icals: Vec<wac_ical::Config>,
|
|
output: output::Config,
|
|
}
|
|
|
|
impl Config {
|
|
fn downloads(&self, now: DateTime<chrono_tz::Tz>) -> impl Iterator<Item = SimpleDownload> {
|
|
self.campfires
|
|
.iter()
|
|
.map(|cf| cf.dl.clone())
|
|
.chain(
|
|
self.common_ninjas
|
|
.iter()
|
|
.map(move |cn| cn.simple_download(now)),
|
|
)
|
|
.chain(self.icals.iter().map(|ical| ical.dl.clone()))
|
|
}
|
|
|
|
fn upstreams(&self) -> Vec<CalendarUi> {
|
|
let Self {
|
|
campfires,
|
|
common_ninjas,
|
|
icals,
|
|
output: _,
|
|
} = self;
|
|
|
|
let mut upstreams: Vec<_> = campfires
|
|
.iter()
|
|
.map(|cfg| &cfg.ui)
|
|
.cloned()
|
|
.chain(common_ninjas.iter().map(|cfg| &cfg.ui).cloned())
|
|
.chain(icals.iter().map(|cfg| &cfg.ui).cloned())
|
|
.collect();
|
|
upstreams.sort_by_key(|ui| ui.short_name.clone());
|
|
|
|
upstreams
|
|
}
|
|
}
|
|
|
|
#[derive(clap::Parser)]
|
|
struct CliAuto {
|
|
#[arg(long)]
|
|
config: Utf8PathBuf,
|
|
}
|
|
|
|
struct Parameters {
|
|
/// Events before this time will be ignored if they cause an error
|
|
ignore_before: DateTime<chrono_tz::Tz>,
|
|
|
|
/// Events before this time will not be shown
|
|
output_start: DateTime<chrono_tz::Tz>,
|
|
|
|
/// Events after this time will not be shown
|
|
output_stop: DateTime<chrono_tz::Tz>,
|
|
|
|
tz: chrono_tz::Tz,
|
|
}
|
|
|
|
impl Parameters {
|
|
fn new(now: DateTime<chrono_tz::Tz>) -> Result<Self> {
|
|
// Snap the cutoffs to midnight so we won't present half of a day
|
|
let midnight = chrono::NaiveTime::default();
|
|
let output_start = (now - Duration::from_secs(86_400 * 2))
|
|
.with_time(midnight)
|
|
.single()
|
|
.context("output_start doesn't map to a single time in our timezone")?;
|
|
let output_stop = (now + Duration::from_secs(86_400 * 45))
|
|
.with_time(midnight)
|
|
.single()
|
|
.context("output_stop doesn't map to a single time in our timezone")?;
|
|
|
|
Ok(Parameters {
|
|
ignore_before: now - Duration::from_secs(86_400 * 365 * 2),
|
|
output_start,
|
|
output_stop,
|
|
tz: now.timezone(),
|
|
})
|
|
}
|
|
}
|
|
|
|
/// Similar to `icalendar::DatePerhapsTime` but doesn't allow Floating, and naive dates are stored as local midnight with an "all day" flag
|
|
#[derive(Clone, Copy, Debug, Ord, PartialOrd, Eq, PartialEq)]
|
|
struct DatePerhapsTime {
|
|
dt: DateTime<chrono_tz::Tz>,
|
|
|
|
/// True if the event has no specific time and takes all day on the given date
|
|
///
|
|
/// Not implemented for Campfire because it hasn't shown up in the test data
|
|
all_day: bool,
|
|
}
|
|
|
|
impl DatePerhapsTime {
|
|
fn date_naive(&self) -> chrono::NaiveDate {
|
|
self.dt.date_naive()
|
|
}
|
|
|
|
/// Returns None for all-day events
|
|
fn time(&self) -> Option<chrono::NaiveTime> {
|
|
if self.all_day {
|
|
None
|
|
} else {
|
|
Some(self.dt.time())
|
|
}
|
|
}
|
|
}
|
|
|
|
/// An event that's been duplicated according to its recurrence rules, so we can sort by datetimes
|
|
struct EventInstance {
|
|
calendar_ui: CalendarUi,
|
|
dtstart: DatePerhapsTime,
|
|
location: Option<String>,
|
|
/// Used internally to handle recurrence exceptions in ics
|
|
///
|
|
/// Not implemented for Campfire
|
|
recurrence_id: Option<DatePerhapsTime>,
|
|
summary: Option<String>,
|
|
uid: Option<String>,
|
|
url: Option<String>,
|
|
}
|
|
|
|
#[derive(Default)]
|
|
struct Data {
|
|
campfires: Vec<wac_campfire::Calendar>,
|
|
common_ninjas: Vec<wac_common_ninja::Calendar>,
|
|
icals: Vec<wac_ical::Calendar>,
|
|
}
|
|
|
|
fn read_data_from_disk(config: &Config) -> Result<Data> {
|
|
Ok(Data {
|
|
campfires: config
|
|
.campfires
|
|
.iter()
|
|
.map(|cfg| wac_campfire::Calendar::read_from_config(cfg.clone()))
|
|
.collect::<Result<Vec<_>, _>>()?,
|
|
common_ninjas: config
|
|
.common_ninjas
|
|
.iter()
|
|
.map(|cfg| wac_common_ninja::Calendar::read_from_config(cfg.clone()))
|
|
.collect::<Result<Vec<_>, _>>()?,
|
|
icals: config
|
|
.icals
|
|
.iter()
|
|
.map(|cfg| wac_ical::Calendar::read_from_config(cfg.clone()))
|
|
.collect::<Result<Vec<_>, _>>()?,
|
|
})
|
|
}
|
|
|
|
fn process_data<'a>(
|
|
data: &'a Data,
|
|
config_output: &'a output::Config,
|
|
now: DateTime<chrono_tz::Tz>,
|
|
) -> Result<Vec<EventInstance>> {
|
|
let params = Parameters::new(now)?;
|
|
|
|
let mut instances = vec![];
|
|
|
|
for campfire in &data.campfires {
|
|
for ev in campfire
|
|
.event_instances(¶ms)?
|
|
.into_iter()
|
|
.filter(|x| config_output.filter(x))
|
|
{
|
|
instances.push(ev);
|
|
}
|
|
}
|
|
|
|
for common_ninja in &data.common_ninjas {
|
|
for ev in common_ninja
|
|
.event_instances(¶ms)?
|
|
.into_iter()
|
|
.filter(|x| config_output.filter(x))
|
|
{
|
|
instances.push(ev);
|
|
}
|
|
}
|
|
|
|
for ical in &data.icals {
|
|
for ev in ical
|
|
.event_instances(¶ms)?
|
|
.into_iter()
|
|
.filter(|x| config_output.filter(x))
|
|
{
|
|
instances.push(ev);
|
|
}
|
|
}
|
|
|
|
instances.sort_by_key(|ev| ev.dtstart);
|
|
Ok(instances)
|
|
}
|
|
|
|
static APP_USER_AGENT: &str = concat!(
|
|
env!("CARGO_PKG_NAME"),
|
|
"_Z7FSRRA7/",
|
|
env!("CARGO_PKG_VERSION"),
|
|
);
|
|
|
|
async fn do_everything(cli: &CliAuto) -> Result<()> {
|
|
let config = std::fs::read_to_string(&cli.config)?;
|
|
let config: Config = toml::from_str(&config)?;
|
|
let tz = &config.output.timezone;
|
|
let now = Utc::now().with_timezone(tz);
|
|
|
|
tracing::info!(?APP_USER_AGENT);
|
|
let client = reqwest::Client::builder()
|
|
.user_agent(APP_USER_AGENT)
|
|
.build()?;
|
|
for dl in config.downloads(now) {
|
|
let Some(download_url) = &dl.download_url else {
|
|
continue;
|
|
};
|
|
tracing::info!(url = download_url.to_string(), "requesting...");
|
|
let resp = client.get(download_url.clone()).send().await?;
|
|
if resp.status() != 200 {
|
|
bail!("Bad status {}", resp.status());
|
|
}
|
|
let bytes = resp.bytes().await?;
|
|
|
|
let temp_path = dl.file_path.with_extension(".wac_temp");
|
|
std::fs::write(&temp_path, &bytes)?;
|
|
std::fs::rename(&temp_path, &dl.file_path)?;
|
|
}
|
|
|
|
let data = read_data_from_disk(&config)?;
|
|
let instances = process_data(&data, &config.output, now)?;
|
|
output::write_html(&config.output, &config.upstreams(), &instances, now)?;
|
|
Ok(())
|
|
}
|
|
|
|
/// Seconds to sleep between auto cycles
|
|
const SLEEP_SECS: u64 = 9000;
|
|
|
|
fn main_auto(cli: CliAuto) -> Result<()> {
|
|
tracing_subscriber::fmt::init();
|
|
loop {
|
|
let rt = tokio::runtime::Runtime::new()?;
|
|
rt.block_on(async {
|
|
do_everything(&cli).await?;
|
|
Ok::<_, anyhow::Error>(())
|
|
})?;
|
|
rt.shutdown_timeout(Duration::from_secs(10));
|
|
tracing::info!("The service is eeping");
|
|
std::thread::sleep(Duration::from_secs(SLEEP_SECS));
|
|
}
|
|
}
|
|
|
|
#[derive(clap::Parser)]
|
|
struct CliDebugOutput {
|
|
#[arg(long)]
|
|
config: Utf8PathBuf,
|
|
}
|
|
|
|
fn main_debug_output(cli: CliDebugOutput) -> Result<()> {
|
|
tracing_subscriber::fmt::init();
|
|
tracing::info!("Started tracing");
|
|
let config = std::fs::read_to_string(&cli.config).context("Failed to read config file")?;
|
|
let config: Config = toml::from_str(&config).context("Failed to parse config file")?;
|
|
|
|
let data = read_data_from_disk(&config).context("Failed to read data from disk")?;
|
|
|
|
let tz = &config.output.timezone;
|
|
let now = Utc::now().with_timezone(tz);
|
|
let instances = process_data(&data, &config.output, now).context("Failed to process data")?;
|
|
output::write_html(&config.output, &config.upstreams(), &instances, now)
|
|
.context("Failed to output HTML")?;
|
|
|
|
Ok(())
|
|
}
|
|
|
|
#[derive(clap::Parser)]
|
|
struct CliDebugRss {
|
|
paths: Vec<Utf8PathBuf>,
|
|
}
|
|
|
|
/// Wraps rss::Item in our own type suitable for merging
|
|
pub(crate) struct FeedItem {
|
|
channel_title: String,
|
|
date: chrono::DateTime<chrono::FixedOffset>,
|
|
inner: rss::Item,
|
|
}
|
|
|
|
fn main_debug_rss(cli: CliDebugRss) -> Result<()> {
|
|
let mut items = Vec::new();
|
|
let now = Utc::now();
|
|
|
|
for path in &cli.paths {
|
|
let s = std::fs::read(path)?;
|
|
let channel = rss::Channel::read_from(std::io::BufReader::new(std::io::Cursor::new(s)))?;
|
|
|
|
let channel_title = channel.title.clone();
|
|
for item in channel.into_items() {
|
|
let date = chrono::DateTime::parse_from_rfc2822(
|
|
item.pub_date()
|
|
.as_ref()
|
|
.context("Every RSS Item should have a pub_date")?,
|
|
)?;
|
|
|
|
let item = FeedItem {
|
|
channel_title: channel_title.clone(),
|
|
date,
|
|
inner: item,
|
|
};
|
|
|
|
items.push(item);
|
|
}
|
|
}
|
|
|
|
items.sort_by_key(|item| item.date);
|
|
|
|
for item in items.iter().rev() {
|
|
println!("{}", item.channel_title);
|
|
println!("{}", item.inner.title.as_ref().unwrap());
|
|
println!("{}", item.date.to_rfc3339());
|
|
println!();
|
|
}
|
|
|
|
std::fs::create_dir_all("output")?;
|
|
output::atomic_write(
|
|
"output/feed.html",
|
|
&output::feed_page(&items, now.with_timezone(&chrono_tz::UTC)),
|
|
)?;
|
|
|
|
Ok(())
|
|
}
|
|
|
|
#[derive(clap::Subcommand)]
|
|
enum Commands {
|
|
Auto(CliAuto),
|
|
DebugOutput(CliDebugOutput),
|
|
DebugRss(CliDebugRss),
|
|
}
|
|
|
|
#[derive(clap::Parser)]
|
|
#[command(version, about, long_about = None)]
|
|
struct Cli {
|
|
#[command(subcommand)]
|
|
command: Commands,
|
|
}
|
|
|
|
fn main() -> Result<()> {
|
|
let cli = Cli::try_parse()?;
|
|
|
|
match cli.command {
|
|
Commands::Auto(x) => main_auto(x),
|
|
Commands::DebugOutput(x) => main_debug_output(x),
|
|
Commands::DebugRss(x) => main_debug_rss(x),
|
|
}
|
|
}
|