Compare commits
6 commits
27470083dc
...
d789425e47
Author | SHA1 | Date | |
---|---|---|---|
![]() |
d789425e47 | ||
![]() |
92c30167df | ||
![]() |
dfbf23ed6a | ||
![]() |
d4d0adaacc | ||
![]() |
49f48acaf1 | ||
![]() |
3b79aa21d2 |
6 changed files with 615 additions and 321 deletions
11
Cargo.lock
generated
11
Cargo.lock
generated
|
@ -148,6 +148,15 @@ version = "1.10.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a"
|
checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "camino"
|
||||||
|
version = "1.1.11"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "5d07aa9a93b00c76f71bc35d598bed923f6d4f3a9ca5c24b7737ae1a292841c0"
|
||||||
|
dependencies = [
|
||||||
|
"serde",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cc"
|
name = "cc"
|
||||||
version = "1.2.32"
|
version = "1.2.32"
|
||||||
|
@ -1860,6 +1869,7 @@ version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"base64",
|
"base64",
|
||||||
|
"camino",
|
||||||
"chrono",
|
"chrono",
|
||||||
"chrono-tz",
|
"chrono-tz",
|
||||||
"clap",
|
"clap",
|
||||||
|
@ -1868,6 +1878,7 @@ dependencies = [
|
||||||
"reqwest",
|
"reqwest",
|
||||||
"rrule",
|
"rrule",
|
||||||
"serde",
|
"serde",
|
||||||
|
"serde_json",
|
||||||
"tokio",
|
"tokio",
|
||||||
"toml",
|
"toml",
|
||||||
"tracing",
|
"tracing",
|
||||||
|
|
|
@ -6,6 +6,7 @@ edition = "2024"
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0.98"
|
anyhow = "1.0.98"
|
||||||
base64 = "0.22.1"
|
base64 = "0.22.1"
|
||||||
|
camino = { version = "1.1.11", features = ["serde1"] }
|
||||||
chrono = "0.4.41"
|
chrono = "0.4.41"
|
||||||
chrono-tz = { version = "0.10.4", features = ["serde"] }
|
chrono-tz = { version = "0.10.4", features = ["serde"] }
|
||||||
clap = { version = "4.5.43", features = ["derive"] }
|
clap = { version = "4.5.43", features = ["derive"] }
|
||||||
|
@ -14,6 +15,7 @@ maud = "0.27.0"
|
||||||
reqwest = "0.12.22"
|
reqwest = "0.12.22"
|
||||||
rrule = "0.14.0"
|
rrule = "0.14.0"
|
||||||
serde = { version = "1.0.219", features = ["derive"] }
|
serde = { version = "1.0.219", features = ["derive"] }
|
||||||
|
serde_json = "1.0.142"
|
||||||
tokio = { version = "1.47.1", features = ["rt-multi-thread", "time"] }
|
tokio = { version = "1.47.1", features = ["rt-multi-thread", "time"] }
|
||||||
toml = "0.9.5"
|
toml = "0.9.5"
|
||||||
tracing = "0.1.41"
|
tracing = "0.1.41"
|
||||||
|
|
415
src/main.rs
415
src/main.rs
|
@ -1,32 +1,33 @@
|
||||||
use anyhow::{Context as _, Result, anyhow, bail};
|
use anyhow::{Context as _, Result, bail};
|
||||||
use base64::Engine as _;
|
use camino::Utf8PathBuf;
|
||||||
use chrono::{DateTime, TimeZone as _, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
use clap::Parser as _;
|
use clap::Parser as _;
|
||||||
use icalendar::{Component as _, EventLike as _};
|
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use std::{
|
use std::{collections::BTreeSet, io::Write as _, time::Duration};
|
||||||
collections::BTreeSet, io::Write as _, path::PathBuf, str::FromStr as _, time::Duration,
|
use url::Url;
|
||||||
};
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests;
|
mod tests;
|
||||||
|
|
||||||
#[derive(Clone, Deserialize)]
|
mod wac_campfire;
|
||||||
struct ConfigIcal {
|
mod wac_ical;
|
||||||
/// Disk location to cache the ics file for debugging
|
|
||||||
file_path: PathBuf,
|
|
||||||
|
|
||||||
/// Magical ID we pass to Google to deep-link to Google Calendar events
|
#[derive(Clone, Default, Deserialize)]
|
||||||
google_id: Option<String>,
|
struct Downloadable {
|
||||||
|
/// URL to scrape to download the JSON
|
||||||
|
download_url: Option<Url>,
|
||||||
|
|
||||||
|
/// Disk location to cache the JSON file for debugging
|
||||||
|
file_path: Utf8PathBuf,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Default, Deserialize)]
|
||||||
|
struct CalendarUi {
|
||||||
/// A canonical webpage we can direct users to
|
/// A canonical webpage we can direct users to
|
||||||
html_url: Option<url::Url>,
|
html_url: Option<Url>,
|
||||||
|
|
||||||
/// Very short name for putting on each event
|
/// Very short name for putting on each event
|
||||||
short_name: String,
|
short_name: String,
|
||||||
|
|
||||||
/// URL to scrape to download the ics file
|
|
||||||
ics_url: Option<url::Url>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
|
@ -50,26 +51,27 @@ struct ConfigOutput {
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
struct Config {
|
struct Config {
|
||||||
icals: Vec<ConfigIcal>,
|
campfires: Vec<wac_campfire::Config>,
|
||||||
|
icals: Vec<wac_ical::Config>,
|
||||||
output: ConfigOutput,
|
output: ConfigOutput,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(clap::Parser)]
|
#[derive(clap::Parser)]
|
||||||
struct CliAuto {
|
struct CliAuto {
|
||||||
#[arg(long)]
|
#[arg(long)]
|
||||||
config: PathBuf,
|
config: Utf8PathBuf,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(clap::Parser)]
|
#[derive(clap::Parser)]
|
||||||
struct CliIcsDebug {
|
struct CliDebugOutput {
|
||||||
#[arg(long)]
|
#[arg(long)]
|
||||||
config: PathBuf,
|
config: Utf8PathBuf,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(clap::Subcommand)]
|
#[derive(clap::Subcommand)]
|
||||||
enum Commands {
|
enum Commands {
|
||||||
Auto(CliAuto),
|
Auto(CliAuto),
|
||||||
IcsDebug(CliIcsDebug),
|
DebugOutput(CliDebugOutput),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(clap::Parser)]
|
#[derive(clap::Parser)]
|
||||||
|
@ -118,6 +120,10 @@ impl Parameters {
|
||||||
#[derive(Clone, Copy, Debug, Ord, PartialOrd, Eq, PartialEq)]
|
#[derive(Clone, Copy, Debug, Ord, PartialOrd, Eq, PartialEq)]
|
||||||
struct DatePerhapsTime {
|
struct DatePerhapsTime {
|
||||||
dt: DateTime<chrono_tz::Tz>,
|
dt: DateTime<chrono_tz::Tz>,
|
||||||
|
|
||||||
|
/// True if the event has no specific time and takes all day on the given date
|
||||||
|
///
|
||||||
|
/// Not implemented for Campfire because it hasn't shown up in the test data
|
||||||
all_day: bool,
|
all_day: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -136,309 +142,93 @@ impl DatePerhapsTime {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn normalize_date_perhaps_time(
|
|
||||||
x: &icalendar::DatePerhapsTime,
|
|
||||||
tz: chrono_tz::Tz,
|
|
||||||
) -> Result<DatePerhapsTime> {
|
|
||||||
Ok(match x {
|
|
||||||
icalendar::DatePerhapsTime::DateTime(x) => {
|
|
||||||
let dt = x
|
|
||||||
.try_into_utc()
|
|
||||||
.context("Data error - Could not convert event datetime to UTC")?
|
|
||||||
.with_timezone(&tz);
|
|
||||||
DatePerhapsTime { dt, all_day: false }
|
|
||||||
}
|
|
||||||
icalendar::DatePerhapsTime::Date(date) => {
|
|
||||||
let midnight = chrono::NaiveTime::default();
|
|
||||||
let dt = tz.from_local_datetime(&date.and_time(midnight)).single().context("DateTime doesn't map to a single unambiguous datetime when converting to our timezone")?;
|
|
||||||
DatePerhapsTime { dt, all_day: true }
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn recurring_dates_opt(
|
|
||||||
params: &Parameters,
|
|
||||||
ev: &icalendar::Event,
|
|
||||||
rrule: &icalendar::Property,
|
|
||||||
) -> Result<Option<impl Iterator<Item = DatePerhapsTime>>> {
|
|
||||||
let dtstart = ev
|
|
||||||
.get_start()
|
|
||||||
.context("Data error - Event has no DTSTART")?;
|
|
||||||
let all_day = match &dtstart {
|
|
||||||
icalendar::DatePerhapsTime::Date(_) => true,
|
|
||||||
icalendar::DatePerhapsTime::DateTime(_) => false,
|
|
||||||
};
|
|
||||||
let dtstart_norm = normalize_date_perhaps_time(&dtstart, params.tz)?;
|
|
||||||
|
|
||||||
let rr = rrule::RRule::from_str(rrule.value())
|
|
||||||
.with_context(|| format!("RRule parse failed `{}`", rrule.value()))?;
|
|
||||||
|
|
||||||
if let Some(until) = rr.get_until()
|
|
||||||
&& *until < params.output_start
|
|
||||||
{
|
|
||||||
// This skips over some bad data in our test set where we fail to parse a recurring event that's already ended before our output window starts
|
|
||||||
return Ok(None);
|
|
||||||
}
|
|
||||||
|
|
||||||
let rrule_tz = params.tz.into();
|
|
||||||
|
|
||||||
let rr = rr.build(dtstart_norm.dt.with_timezone(&rrule_tz))?;
|
|
||||||
let dates = rr
|
|
||||||
.after(params.output_start.with_timezone(&rrule_tz))
|
|
||||||
.before(params.output_stop.with_timezone(&rrule_tz))
|
|
||||||
.all(10)
|
|
||||||
.dates
|
|
||||||
.into_iter()
|
|
||||||
.map(move |dtstart| DatePerhapsTime {
|
|
||||||
dt: dtstart.with_timezone(¶ms.tz),
|
|
||||||
all_day,
|
|
||||||
});
|
|
||||||
Ok(Some(dates))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn recurring_dates(
|
|
||||||
params: &Parameters,
|
|
||||||
ev: &icalendar::Event,
|
|
||||||
rrule: &icalendar::Property,
|
|
||||||
) -> Result<impl Iterator<Item = DatePerhapsTime>> {
|
|
||||||
Ok(recurring_dates_opt(params, ev, rrule)?
|
|
||||||
.into_iter()
|
|
||||||
.flatten())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// An event that's been duplicated according to its recurrence rules, so we can sort by datetimes
|
/// An event that's been duplicated according to its recurrence rules, so we can sort by datetimes
|
||||||
struct EventInstance<'a> {
|
struct EventInstance {
|
||||||
|
calendar_ui: CalendarUi,
|
||||||
dtstart: DatePerhapsTime,
|
dtstart: DatePerhapsTime,
|
||||||
ev: &'a icalendar::Event,
|
location: Option<String>,
|
||||||
}
|
/// Used internally to handle recurrence exceptions in ics
|
||||||
|
///
|
||||||
impl EventInstance<'_> {
|
/// Not implemented for Campfire
|
||||||
fn google_url(&self, google_id: &str) -> Result<Option<String>> {
|
recurrence_id: Option<DatePerhapsTime>,
|
||||||
let uid = self.ev.get_uid().context("No UID")?;
|
summary: Option<String>,
|
||||||
if uid.len() > 100 {
|
uid: Option<String>,
|
||||||
// There's one event in one of our test Google calendars which originates from Microsoft Exchange and has a totally different UID format from any other event. I was not able to reverse it, so I'm skipping it for now.
|
|
||||||
return Ok(None);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Strip off the back part of the Google UID
|
|
||||||
let idx = uid.find(['@', '_']).unwrap_or(uid.len());
|
|
||||||
let uid_2 = &uid[..idx];
|
|
||||||
let utc_dtstart = self
|
|
||||||
.dtstart
|
|
||||||
.dt
|
|
||||||
.with_timezone(&chrono_tz::UTC)
|
|
||||||
.format("%Y%m%dT%H%M%SZ")
|
|
||||||
.to_string();
|
|
||||||
let eid_plain = if self.ev.properties().get("RRULE").is_some() {
|
|
||||||
// Recurring events have an extra timestamp in their base64 to disambiguiate
|
|
||||||
format!("{uid_2}_{utc_dtstart} {google_id}")
|
|
||||||
} else {
|
|
||||||
format!("{uid_2} {google_id}")
|
|
||||||
};
|
|
||||||
let eid = base64::engine::general_purpose::URL_SAFE_NO_PAD.encode(&eid_plain);
|
|
||||||
let mut link = url::Url::parse("https://www.google.com/calendar/event").unwrap();
|
|
||||||
link.query_pairs_mut().append_pair("eid", &eid);
|
|
||||||
Ok(Some(link.to_string()))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn url(&self, google_id: Option<&str>) -> Result<Option<String>> {
|
|
||||||
if let Some(url) = self.ev.get_url() {
|
|
||||||
return Ok(Some(url.to_string()));
|
|
||||||
}
|
|
||||||
if let Some(google_id) = google_id {
|
|
||||||
return self.google_url(google_id);
|
|
||||||
}
|
|
||||||
Ok(None)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
struct EventWithUrl<'a> {
|
|
||||||
calendar: &'a ConfigIcal,
|
|
||||||
dtstart: DatePerhapsTime,
|
|
||||||
ev: &'a icalendar::Event,
|
|
||||||
url: Option<String>,
|
url: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> EventWithUrl<'a> {
|
impl EventInstance {
|
||||||
fn from_ei(calendar: &'a ConfigIcal, ei: EventInstance<'a>) -> Result<EventWithUrl<'a>> {
|
fn filter(&self, config_output: &ConfigOutput) -> bool {
|
||||||
let url = ei.url(calendar.google_id.as_deref())?;
|
if let Some(uid) = &self.uid
|
||||||
Ok(Self {
|
&& config_output.hide_uids.contains(uid)
|
||||||
calendar,
|
|
||||||
dtstart: ei.dtstart,
|
|
||||||
ev: ei.ev,
|
|
||||||
url,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn event_instances<'a>(
|
|
||||||
params: &Parameters,
|
|
||||||
ev: &'a icalendar::Event,
|
|
||||||
) -> Result<Vec<EventInstance<'a>>> {
|
|
||||||
let dates = if let Some(rrule) = ev.properties().get("RRULE") {
|
|
||||||
recurring_dates(params, ev, rrule)?.collect()
|
|
||||||
} else {
|
|
||||||
// Event that occurs once
|
|
||||||
|
|
||||||
let dtstart = ev.get_start().context("Data error - Event has no start")?;
|
|
||||||
let dtstart_normalized = normalize_date_perhaps_time(&dtstart, params.tz)?;
|
|
||||||
if dtstart_normalized.dt < params.output_start || dtstart_normalized.dt > params.output_stop
|
|
||||||
{
|
{
|
||||||
return Ok(vec![]);
|
return false;
|
||||||
}
|
}
|
||||||
vec![dtstart_normalized]
|
if let Some(summary) = &self.summary
|
||||||
};
|
&& config_output.hide_summaries.contains(summary)
|
||||||
|
|
||||||
let instances = dates
|
|
||||||
.into_iter()
|
|
||||||
.map(|dtstart| EventInstance { dtstart, ev })
|
|
||||||
.collect();
|
|
||||||
Ok(instances)
|
|
||||||
}
|
|
||||||
|
|
||||||
struct ICal {
|
|
||||||
/// The parsed ics file
|
|
||||||
cal: icalendar::Calendar,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Used to link recurrence exceptions to the original events they replace
|
|
||||||
#[derive(Eq, Ord, PartialOrd, PartialEq)]
|
|
||||||
struct RecurrenceKey<'a> {
|
|
||||||
recurrence_id: DatePerhapsTime,
|
|
||||||
uid: &'a str,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ICal {
|
|
||||||
fn read_from_str(s: &str) -> Result<Self> {
|
|
||||||
let cal = s.parse().map_err(|s| anyhow!("parse error {s}"))?;
|
|
||||||
let cal = Self { cal };
|
|
||||||
Ok(cal)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn read_from_config(config: &ConfigIcal) -> Result<Self> {
|
|
||||||
let s = std::fs::read_to_string(&config.file_path)?;
|
|
||||||
Self::read_from_str(&s)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn events(&self) -> impl Iterator<Item = &icalendar::Event> {
|
|
||||||
self.cal.components.iter().filter_map(|comp| {
|
|
||||||
if let icalendar::CalendarComponent::Event(ev) = comp {
|
|
||||||
Some(ev)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns an unsorted list of event instances for this calendar
|
|
||||||
fn event_instances(&self, params: &Parameters) -> Result<Vec<EventInstance<'_>>> {
|
|
||||||
let mut instances = vec![];
|
|
||||||
let mut recurrence_exceptions = BTreeSet::new();
|
|
||||||
|
|
||||||
for ev in self.events() {
|
|
||||||
let eis = match event_instances(params, ev)
|
|
||||||
.with_context(|| format!("Failed to process event with UID '{:?}'", ev.get_uid()))
|
|
||||||
{
|
{
|
||||||
Ok(x) => x,
|
return false;
|
||||||
Err(e) => {
|
|
||||||
if ev.get_last_modified().context("Event has no timestamp")?
|
|
||||||
< params.ignore_before
|
|
||||||
{
|
|
||||||
tracing::warn!("Ignoring error from very old event {e:?}");
|
|
||||||
continue;
|
|
||||||
} else {
|
|
||||||
Err(e)?
|
|
||||||
}
|
}
|
||||||
}
|
true
|
||||||
};
|
|
||||||
for ei in eis {
|
|
||||||
instances.push(ei);
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(recurrence_id) = ev.get_recurrence_id() {
|
|
||||||
// This is a recurrence exception and we must handle it specially by later deleting the original event it replaces
|
|
||||||
let recurrence_id = normalize_date_perhaps_time(&recurrence_id, params.tz)
|
|
||||||
.context("We should be able to normalize recurrence IDs")?;
|
|
||||||
let uid = ev
|
|
||||||
.get_uid()
|
|
||||||
.context("Every recurrence exception should have a UID")?;
|
|
||||||
|
|
||||||
recurrence_exceptions.insert(RecurrenceKey { recurrence_id, uid });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Find all recurring events that are replaced with recurrence exceptions and delete the originals.
|
|
||||||
// There is probably a not-linear-time way to do this, but this should be fine.
|
|
||||||
|
|
||||||
instances.retain(|ev| {
|
|
||||||
if ev.ev.get_recurrence_id().is_some() {
|
|
||||||
// This is a recurrence exception, exceptions never delete themselves
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
let Some(uid) = ev.ev.get_uid() else {
|
|
||||||
// If there's no UID, we can't apply recurrence exceptions
|
|
||||||
return true;
|
|
||||||
};
|
|
||||||
let key = RecurrenceKey {
|
|
||||||
recurrence_id: ev.dtstart,
|
|
||||||
uid,
|
|
||||||
};
|
|
||||||
!recurrence_exceptions.contains(&key)
|
|
||||||
});
|
|
||||||
|
|
||||||
Ok(instances)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
struct Data {
|
struct Data {
|
||||||
icals: Vec<(ICal, ConfigIcal)>,
|
campfires: Vec<wac_campfire::Calendar>,
|
||||||
|
icals: Vec<wac_ical::Calendar>,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn read_data_from_disk(config: &Config) -> Result<Data> {
|
fn read_data_from_disk(config: &Config) -> Result<Data> {
|
||||||
let mut data = Data::default();
|
Ok(Data {
|
||||||
for config in &config.icals {
|
campfires: config
|
||||||
let cal = ICal::read_from_config(config)?;
|
.campfires
|
||||||
data.icals.push((cal, config.clone()));
|
.iter()
|
||||||
}
|
.map(|cfg| wac_campfire::Calendar::read_from_config(cfg.clone()))
|
||||||
|
.collect::<Result<Vec<_>, _>>()?,
|
||||||
Ok(data)
|
icals: config
|
||||||
|
.icals
|
||||||
|
.iter()
|
||||||
|
.map(|cfg| wac_ical::Calendar::read_from_config(cfg.clone()))
|
||||||
|
.collect::<Result<Vec<_>, _>>()?,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn process_data<'a>(
|
fn process_data<'a>(
|
||||||
data: &'a Data,
|
data: &'a Data,
|
||||||
config_output: &'a ConfigOutput,
|
config_output: &'a ConfigOutput,
|
||||||
now: DateTime<chrono_tz::Tz>,
|
now: DateTime<chrono_tz::Tz>,
|
||||||
) -> Result<Vec<EventWithUrl<'a>>> {
|
) -> Result<Vec<EventInstance>> {
|
||||||
let params = Parameters::new(now)?;
|
let params = Parameters::new(now)?;
|
||||||
|
|
||||||
let mut instances = vec![];
|
let mut instances = vec![];
|
||||||
for (ical, config) in &data.icals {
|
|
||||||
for ei in ical.event_instances(¶ms)? {
|
for campfire in &data.campfires {
|
||||||
if let Some(uid) = ei.ev.get_uid()
|
for ev in campfire
|
||||||
&& config_output.hide_uids.contains(uid)
|
.event_instances(¶ms)?
|
||||||
|
.into_iter()
|
||||||
|
.filter(|x| x.filter(config_output))
|
||||||
{
|
{
|
||||||
continue;
|
instances.push(ev);
|
||||||
}
|
|
||||||
if let Some(summary) = ei.ev.get_summary()
|
|
||||||
&& config_output.hide_summaries.contains(summary)
|
|
||||||
{
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
let ei = EventWithUrl::from_ei(config, ei)?;
|
|
||||||
instances.push(ei);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
instances.sort_by_key(|ei| ei.dtstart);
|
for ical in &data.icals {
|
||||||
|
for ev in ical
|
||||||
|
.event_instances(¶ms)?
|
||||||
|
.into_iter()
|
||||||
|
.filter(|x| x.filter(config_output))
|
||||||
|
{
|
||||||
|
instances.push(ev);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
instances.sort_by_key(|ev| ev.dtstart);
|
||||||
Ok(instances)
|
Ok(instances)
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIXME: Don't print to stdout / stderr
|
|
||||||
fn output_html(
|
fn output_html(
|
||||||
config: &ConfigOutput,
|
config: &ConfigOutput,
|
||||||
instances: &[EventWithUrl],
|
instances: &[EventInstance],
|
||||||
now: DateTime<chrono_tz::Tz>,
|
now: DateTime<chrono_tz::Tz>,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
let today = now.date_naive();
|
let today = now.date_naive();
|
||||||
|
@ -447,11 +237,6 @@ fn output_html(
|
||||||
let mut html_list = vec![];
|
let mut html_list = vec![];
|
||||||
let mut day_list = vec![];
|
let mut day_list = vec![];
|
||||||
for ei in instances {
|
for ei in instances {
|
||||||
let summary = ei
|
|
||||||
.ev
|
|
||||||
.get_summary()
|
|
||||||
.unwrap_or("Data error BXH45NAR - No summary in event");
|
|
||||||
|
|
||||||
let date = ei.dtstart.date_naive();
|
let date = ei.dtstart.date_naive();
|
||||||
let past = date < today;
|
let past = date < today;
|
||||||
let month = date.format("%B").to_string();
|
let month = date.format("%B").to_string();
|
||||||
|
@ -475,8 +260,6 @@ fn output_html(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if last_date_printed != Some(date) {
|
if last_date_printed != Some(date) {
|
||||||
// println!("{date}");
|
|
||||||
|
|
||||||
// FIXME: De-dupe
|
// FIXME: De-dupe
|
||||||
if !day_list.is_empty() {
|
if !day_list.is_empty() {
|
||||||
html_list.push(maud::html! {
|
html_list.push(maud::html! {
|
||||||
|
@ -506,35 +289,36 @@ fn output_html(
|
||||||
.map(|t| t.format("%l:%M %P").to_string())
|
.map(|t| t.format("%l:%M %P").to_string())
|
||||||
.unwrap_or_else(|| "All day".to_string());
|
.unwrap_or_else(|| "All day".to_string());
|
||||||
|
|
||||||
// println!(" {time} - {summary}");
|
let summary = ei
|
||||||
|
.summary
|
||||||
|
.as_deref()
|
||||||
|
.unwrap_or("Data error BXH45NAR - No summary in event");
|
||||||
let summary = if let Some(url) = &ei.url {
|
let summary = if let Some(url) = &ei.url {
|
||||||
maud::html! {a href=(url) {(summary)}}
|
maud::html! {a href=(url) {(summary)}}
|
||||||
} else {
|
} else {
|
||||||
maud::html! {(summary)}
|
maud::html! {(summary)}
|
||||||
};
|
};
|
||||||
|
|
||||||
let location = ei.ev.get_location();
|
|
||||||
|
|
||||||
if past {
|
if past {
|
||||||
day_list.push(maud::html! {
|
day_list.push(maud::html! {
|
||||||
li class="past" { (time) " - " (summary) }
|
li class="past" { (time) " - " (summary) }
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
let calendar_link = if let Some(html_url) = &ei.calendar.html_url {
|
let calendar_link = if let Some(html_url) = &ei.calendar_ui.html_url {
|
||||||
maud::html! { a href=(html_url) { (ei.calendar.short_name) } }
|
maud::html! { a href=(html_url) { (ei.calendar_ui.short_name) } }
|
||||||
} else {
|
} else {
|
||||||
maud::html! { (ei.calendar.short_name)}
|
maud::html! { (ei.calendar_ui.short_name)}
|
||||||
};
|
};
|
||||||
|
|
||||||
// This is where the main stuff happens
|
// This is where the main stuff happens
|
||||||
|
|
||||||
tracing::debug!(uid = ei.ev.get_uid(), summary = ei.ev.get_summary());
|
tracing::debug!(uid = ei.uid, summary = ei.summary);
|
||||||
day_list.push(maud::html! {
|
day_list.push(maud::html! {
|
||||||
li { details {
|
li { details {
|
||||||
summary { (time) " - " (summary) }
|
summary { (time) " - " (summary) }
|
||||||
ul {
|
ul {
|
||||||
li { (calendar_link) " calendar" }
|
li { (calendar_link) " calendar" }
|
||||||
@if let Some(location) = location {
|
@if let Some(location) = &ei.location {
|
||||||
li { "Location: " (location) }
|
li { "Location: " (location) }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -628,20 +412,25 @@ async fn do_everything(cli: &CliAuto) -> Result<()> {
|
||||||
let client = reqwest::Client::builder()
|
let client = reqwest::Client::builder()
|
||||||
.user_agent(APP_USER_AGENT)
|
.user_agent(APP_USER_AGENT)
|
||||||
.build()?;
|
.build()?;
|
||||||
for ical in &config.icals {
|
for dl in config
|
||||||
let Some(ics_url) = &ical.ics_url else {
|
.campfires
|
||||||
|
.iter()
|
||||||
|
.map(|cf| &cf.dl)
|
||||||
|
.chain(config.icals.iter().map(|ical| &ical.dl))
|
||||||
|
{
|
||||||
|
let Some(download_url) = &dl.download_url else {
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
tracing::info!(url = ics_url.to_string(), "requesting...");
|
tracing::info!(url = download_url.to_string(), "requesting...");
|
||||||
let resp = client.get(ics_url.clone()).send().await?;
|
let resp = client.get(download_url.clone()).send().await?;
|
||||||
if resp.status() != 200 {
|
if resp.status() != 200 {
|
||||||
bail!("Bad status {}", resp.status());
|
bail!("Bad status {}", resp.status());
|
||||||
}
|
}
|
||||||
let bytes = resp.bytes().await?;
|
let bytes = resp.bytes().await?;
|
||||||
|
|
||||||
let temp_path = ical.file_path.with_extension(".ics.temp");
|
let temp_path = dl.file_path.with_extension(".wac_temp");
|
||||||
std::fs::write(&temp_path, &bytes)?;
|
std::fs::write(&temp_path, &bytes)?;
|
||||||
std::fs::rename(&temp_path, &ical.file_path)?;
|
std::fs::rename(&temp_path, &dl.file_path)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let data = read_data_from_disk(&config)?;
|
let data = read_data_from_disk(&config)?;
|
||||||
|
@ -670,7 +459,7 @@ fn main_auto(cli: CliAuto) -> Result<()> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main_ics_debug(cli: CliIcsDebug) -> Result<()> {
|
fn main_debug_output(cli: CliDebugOutput) -> Result<()> {
|
||||||
tracing_subscriber::fmt::init();
|
tracing_subscriber::fmt::init();
|
||||||
tracing::info!("Started tracing");
|
tracing::info!("Started tracing");
|
||||||
let config = std::fs::read_to_string(&cli.config)?;
|
let config = std::fs::read_to_string(&cli.config)?;
|
||||||
|
@ -691,6 +480,6 @@ fn main() -> Result<()> {
|
||||||
|
|
||||||
match cli.command {
|
match cli.command {
|
||||||
Commands::Auto(x) => main_auto(x),
|
Commands::Auto(x) => main_auto(x),
|
||||||
Commands::IcsDebug(x) => main_ics_debug(x),
|
Commands::DebugOutput(x) => main_debug_output(x),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
57
src/tests.rs
57
src/tests.rs
|
@ -1,4 +1,5 @@
|
||||||
use super::*;
|
use super::*;
|
||||||
|
use chrono::TimeZone as _;
|
||||||
|
|
||||||
fn chicago_time(
|
fn chicago_time(
|
||||||
year: i32,
|
year: i32,
|
||||||
|
@ -19,9 +20,49 @@ fn dt_from_ts(ts: i64) -> DateTime<chrono_tz::Tz> {
|
||||||
.with_timezone(&chrono_tz::America::Chicago)
|
.with_timezone(&chrono_tz::America::Chicago)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn campfire() -> Result<()> {
|
||||||
|
use wac_campfire::{Calendar, Config};
|
||||||
|
|
||||||
|
let s = r#"{
|
||||||
|
"message": "Success",
|
||||||
|
"eventList": [
|
||||||
|
{
|
||||||
|
"urlToShare" : "https://example.com",
|
||||||
|
"timeZone" : "Central",
|
||||||
|
"startTime" : "7:00 AM",
|
||||||
|
"startDate" : "2025-09-13",
|
||||||
|
"location" : "Three Sisters Park, 17189 IL-29, Chillicothe",
|
||||||
|
"Id" : "701Po000011ncWKIAY",
|
||||||
|
"eventName" : "zero roman mummy hatch",
|
||||||
|
"endTime" : "12:00 PM",
|
||||||
|
"endDate" : "2025-09-13",
|
||||||
|
"description" : "Finally! It's just you, Marion, a division of one!"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}"#;
|
||||||
|
|
||||||
|
let cal = Calendar::read_from_str(Config::default(), s)?;
|
||||||
|
let now = dt_from_ts(1755000000);
|
||||||
|
let params = Parameters::new(now)?;
|
||||||
|
let instances = cal.event_instances(¶ms)?;
|
||||||
|
assert_eq!(instances.len(), 1);
|
||||||
|
|
||||||
|
let event = &instances[0];
|
||||||
|
let expected_time = DatePerhapsTime {
|
||||||
|
dt: chicago_time(2025, 9, 13, 7, 0, 0),
|
||||||
|
all_day: false,
|
||||||
|
};
|
||||||
|
assert_eq!(event.dtstart, expected_time);
|
||||||
|
assert_eq!(event.summary.as_deref(), Some("zero roman mummy hatch"));
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
/// Expect that parsing a calendar works
|
/// Expect that parsing a calendar works
|
||||||
#[test]
|
#[test]
|
||||||
fn calendar_from_str() -> Result<()> {
|
fn calendar_from_str() -> Result<()> {
|
||||||
|
use wac_ical::{Calendar, Config};
|
||||||
|
|
||||||
// Blank lines added for clarity
|
// Blank lines added for clarity
|
||||||
let s = r#"
|
let s = r#"
|
||||||
BEGIN:VCALENDAR
|
BEGIN:VCALENDAR
|
||||||
|
@ -64,10 +105,10 @@ END:VEVENT
|
||||||
END:VCALENDAR
|
END:VCALENDAR
|
||||||
"#;
|
"#;
|
||||||
|
|
||||||
let ical = ICal::read_from_str(s)?;
|
let cal = Calendar::read_from_str(Config::default(), s)?;
|
||||||
let now = dt_from_ts(1755000000);
|
let now = dt_from_ts(1755000000);
|
||||||
let params = Parameters::new(now)?;
|
let params = Parameters::new(now)?;
|
||||||
let instances = ical.event_instances(¶ms)?;
|
let instances = cal.event_instances(¶ms)?;
|
||||||
assert_eq!(instances.len(), 1);
|
assert_eq!(instances.len(), 1);
|
||||||
|
|
||||||
let event = &instances[0];
|
let event = &instances[0];
|
||||||
|
@ -76,12 +117,14 @@ END:VCALENDAR
|
||||||
all_day: false,
|
all_day: false,
|
||||||
};
|
};
|
||||||
assert_eq!(event.dtstart, expected_time);
|
assert_eq!(event.dtstart, expected_time);
|
||||||
assert_eq!(event.ev.get_summary(), Some("zero roman mummy hatch"));
|
assert_eq!(event.summary.as_deref(), Some("zero roman mummy hatch"));
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn hand_written() -> Result<()> {
|
fn hand_written() -> Result<()> {
|
||||||
|
use wac_ical::{Calendar, Config};
|
||||||
|
|
||||||
let s = r#"
|
let s = r#"
|
||||||
BEGIN:VCALENDAR
|
BEGIN:VCALENDAR
|
||||||
|
|
||||||
|
@ -102,7 +145,7 @@ END:VEVENT
|
||||||
END:VCALENDAR
|
END:VCALENDAR
|
||||||
"#;
|
"#;
|
||||||
|
|
||||||
let ical = ICal::read_from_str(s)?;
|
let ical = Calendar::read_from_str(Config::default(), s)?;
|
||||||
let params = Parameters {
|
let params = Parameters {
|
||||||
ignore_before: chicago_time(2025, 1, 1, 0, 0, 0),
|
ignore_before: chicago_time(2025, 1, 1, 0, 0, 0),
|
||||||
output_start: chicago_time(2025, 7, 1, 0, 0, 0),
|
output_start: chicago_time(2025, 7, 1, 0, 0, 0),
|
||||||
|
@ -132,6 +175,8 @@ END:VCALENDAR
|
||||||
/// Expect that recurrent exceptions work correctly and don't duplicate events
|
/// Expect that recurrent exceptions work correctly and don't duplicate events
|
||||||
#[test]
|
#[test]
|
||||||
fn recurrence_exceptions() -> Result<()> {
|
fn recurrence_exceptions() -> Result<()> {
|
||||||
|
use wac_ical::{Calendar, Config};
|
||||||
|
|
||||||
let s = r#"
|
let s = r#"
|
||||||
BEGIN:VCALENDAR
|
BEGIN:VCALENDAR
|
||||||
|
|
||||||
|
@ -188,7 +233,7 @@ END:VEVENT
|
||||||
END:VCALENDAR
|
END:VCALENDAR
|
||||||
"#;
|
"#;
|
||||||
|
|
||||||
let ical = ICal::read_from_str(s)?;
|
let ical = Calendar::read_from_str(Config::default(), s)?;
|
||||||
let params = Parameters {
|
let params = Parameters {
|
||||||
ignore_before: chicago_time(2025, 1, 1, 0, 0, 0),
|
ignore_before: chicago_time(2025, 1, 1, 0, 0, 0),
|
||||||
output_start: chicago_time(2025, 7, 1, 0, 0, 0),
|
output_start: chicago_time(2025, 7, 1, 0, 0, 0),
|
||||||
|
@ -220,7 +265,7 @@ END:VCALENDAR
|
||||||
);
|
);
|
||||||
|
|
||||||
for instance in &instances {
|
for instance in &instances {
|
||||||
assert_eq!(instance.ev.get_summary(), Some("coil perm brush zippy"));
|
assert_eq!(instance.summary.as_deref(), Some("coil perm brush zippy"));
|
||||||
}
|
}
|
||||||
|
|
||||||
assert_eq!(instances.len(), 3);
|
assert_eq!(instances.len(), 3);
|
||||||
|
|
173
src/wac_campfire.rs
Normal file
173
src/wac_campfire.rs
Normal file
|
@ -0,0 +1,173 @@
|
||||||
|
//! Structs and functions specific to gathering input from Campfire, the special thing that Sierra Club uses for their events.
|
||||||
|
//!
|
||||||
|
//! Luckily it puts out JSON in a good format
|
||||||
|
//!
|
||||||
|
//! Note that recurring events aren't implemented for this cause I don't know how they work
|
||||||
|
|
||||||
|
use super::{CalendarUi, DatePerhapsTime, Downloadable, EventInstance, Parameters};
|
||||||
|
use anyhow::{Context as _, Result, bail};
|
||||||
|
use serde::Deserialize;
|
||||||
|
|
||||||
|
#[derive(Clone, Default, Deserialize)]
|
||||||
|
pub(crate) struct Config {
|
||||||
|
#[serde(flatten)]
|
||||||
|
pub(crate) dl: Downloadable,
|
||||||
|
|
||||||
|
#[serde(flatten)]
|
||||||
|
pub(crate) ui: CalendarUi,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
struct Event {
|
||||||
|
description: String,
|
||||||
|
#[serde(alias = "endDate")]
|
||||||
|
end_date: Option<String>,
|
||||||
|
#[serde(alias = "endTime")]
|
||||||
|
end_time: Option<String>,
|
||||||
|
#[serde(alias = "eventName")]
|
||||||
|
event_name: String,
|
||||||
|
location: String,
|
||||||
|
#[serde(alias = "Id")]
|
||||||
|
id: String,
|
||||||
|
#[serde(alias = "startDate")]
|
||||||
|
start_date: String,
|
||||||
|
#[serde(alias = "startTime")]
|
||||||
|
start_time: String,
|
||||||
|
#[serde(alias = "timeZone")]
|
||||||
|
time_zone: String,
|
||||||
|
#[serde(alias = "urlToShare")]
|
||||||
|
url_to_share: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The bit that we deserialize directly from JSON
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
struct CalendarInner {
|
||||||
|
#[serde(alias = "eventList")]
|
||||||
|
event_list: Vec<Event>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) struct Calendar {
|
||||||
|
config: Config,
|
||||||
|
inner: CalendarInner,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_campfire_datetime(date: &str, time: &str, tz: &str) -> Result<DatePerhapsTime> {
|
||||||
|
// Campfire only uses American timezones apparently, because they don't follow tzdata. We'll compensate for that slightly here
|
||||||
|
|
||||||
|
let tz = match tz {
|
||||||
|
"Central" => chrono_tz::US::Central,
|
||||||
|
"Eastern" => chrono_tz::US::Eastern,
|
||||||
|
"Mountain" => chrono_tz::US::Mountain,
|
||||||
|
"Pacific" => chrono_tz::US::Pacific,
|
||||||
|
_ => bail!("Can't recognize this timezone"),
|
||||||
|
};
|
||||||
|
|
||||||
|
let date = chrono::NaiveDate::parse_from_str(date, "%F").context("Couldn't parse date")?;
|
||||||
|
let time =
|
||||||
|
chrono::NaiveTime::parse_from_str(time, "%-I:%M %p").context("Couldn't parse time")?;
|
||||||
|
|
||||||
|
let dt = date
|
||||||
|
.and_time(time)
|
||||||
|
.and_local_timezone(tz)
|
||||||
|
.single()
|
||||||
|
.context("Couldn't map timezones unambiguously")?;
|
||||||
|
Ok(DatePerhapsTime { dt, all_day: false })
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Calendar {
|
||||||
|
pub(crate) fn event_instances(&self, params: &Parameters) -> Result<Vec<EventInstance>> {
|
||||||
|
self.inner
|
||||||
|
.event_list
|
||||||
|
.iter()
|
||||||
|
.filter_map(|ev| {
|
||||||
|
let dtstart =
|
||||||
|
match parse_campfire_datetime(&ev.start_date, &ev.start_time, &ev.time_zone)
|
||||||
|
.context("Couldn't parse start time")
|
||||||
|
{
|
||||||
|
Ok(x) => x,
|
||||||
|
Err(e) => return Some(Err(e)),
|
||||||
|
};
|
||||||
|
if dtstart.dt < params.output_start || dtstart.dt > params.output_stop {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
Some(Ok(EventInstance {
|
||||||
|
calendar_ui: self.config.ui.clone(),
|
||||||
|
dtstart,
|
||||||
|
location: Some(ev.location.clone()),
|
||||||
|
recurrence_id: None,
|
||||||
|
summary: Some(ev.event_name.clone()),
|
||||||
|
uid: Some(ev.id.clone()),
|
||||||
|
url: Some(ev.url_to_share.clone()),
|
||||||
|
}))
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn read_from_str(config: Config, s: &str) -> Result<Self> {
|
||||||
|
let inner = serde_json::from_str(s)?;
|
||||||
|
Ok(Self { config, inner })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn read_from_config(config: Config) -> Result<Self> {
|
||||||
|
let s = std::fs::read_to_string(&config.dl.file_path)?;
|
||||||
|
Self::read_from_str(config, &s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use chrono::{DateTime, TimeZone as _};
|
||||||
|
|
||||||
|
fn chicago_time(
|
||||||
|
year: i32,
|
||||||
|
month: u32,
|
||||||
|
day: u32,
|
||||||
|
hour: u32,
|
||||||
|
minute: u32,
|
||||||
|
second: u32,
|
||||||
|
) -> DateTime<chrono_tz::Tz> {
|
||||||
|
chrono_tz::America::Chicago
|
||||||
|
.with_ymd_and_hms(year, month, day, hour, minute, second)
|
||||||
|
.unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_campfire_datetime() {
|
||||||
|
for (date, time, tz, expected) in [
|
||||||
|
(
|
||||||
|
"2025-08-02",
|
||||||
|
"7:00 AM",
|
||||||
|
"Central",
|
||||||
|
chicago_time(2025, 8, 2, 7, 0, 0),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"2025-08-09",
|
||||||
|
"11:00 AM",
|
||||||
|
"Central",
|
||||||
|
chicago_time(2025, 8, 9, 11, 0, 0),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"2025-08-12",
|
||||||
|
"3:15 PM",
|
||||||
|
"Central",
|
||||||
|
chicago_time(2025, 8, 12, 15, 15, 0),
|
||||||
|
),
|
||||||
|
] {
|
||||||
|
assert_eq!(
|
||||||
|
super::parse_campfire_datetime(date, time, tz).unwrap().dt,
|
||||||
|
expected
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Negative cases
|
||||||
|
|
||||||
|
for (date, time, tz) in [
|
||||||
|
("2025-08-02", "7:00 AM", "Alaska"),
|
||||||
|
("2025-08-02", "", "Central"),
|
||||||
|
("2025-08-02", "All day", "Central"),
|
||||||
|
] {
|
||||||
|
assert!(super::parse_campfire_datetime(date, time, tz).is_err());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
274
src/wac_ical.rs
Normal file
274
src/wac_ical.rs
Normal file
|
@ -0,0 +1,274 @@
|
||||||
|
//! Structs and functions specific to gathering input from ics files, which is a popular format that Google Calendar happens to put out
|
||||||
|
|
||||||
|
use super::{CalendarUi, DatePerhapsTime, Downloadable, EventInstance, Parameters};
|
||||||
|
use anyhow::{Context as _, Result, anyhow};
|
||||||
|
use base64::Engine as _;
|
||||||
|
use chrono::TimeZone as _;
|
||||||
|
use icalendar::{Component as _, EventLike as _};
|
||||||
|
use serde::Deserialize;
|
||||||
|
use std::{collections::BTreeSet, str::FromStr as _};
|
||||||
|
|
||||||
|
/// Google Calendar has a public ics endpoint that we scrape for all upstream Google Calendars
|
||||||
|
#[derive(Clone, Default, Deserialize)]
|
||||||
|
pub(crate) struct Config {
|
||||||
|
#[serde(flatten)]
|
||||||
|
pub(crate) dl: Downloadable,
|
||||||
|
|
||||||
|
/// Magical ID we pass to Google to deep-link to Google Calendar events
|
||||||
|
google_id: Option<String>,
|
||||||
|
|
||||||
|
#[serde(flatten)]
|
||||||
|
pub(crate) ui: CalendarUi,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) struct Calendar {
|
||||||
|
/// The parsed ics file
|
||||||
|
cal: icalendar::Calendar,
|
||||||
|
|
||||||
|
/// The config used to load this calendar
|
||||||
|
config: Config,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn normalize_date_perhaps_time(
|
||||||
|
x: &icalendar::DatePerhapsTime,
|
||||||
|
tz: chrono_tz::Tz,
|
||||||
|
) -> Result<DatePerhapsTime> {
|
||||||
|
Ok(match x {
|
||||||
|
icalendar::DatePerhapsTime::DateTime(x) => {
|
||||||
|
let dt = x
|
||||||
|
.try_into_utc()
|
||||||
|
.context("Data error - Could not convert event datetime to UTC")?
|
||||||
|
.with_timezone(&tz);
|
||||||
|
DatePerhapsTime { dt, all_day: false }
|
||||||
|
}
|
||||||
|
icalendar::DatePerhapsTime::Date(date) => {
|
||||||
|
let midnight = chrono::NaiveTime::default();
|
||||||
|
let dt = tz.from_local_datetime(&date.and_time(midnight)).single().context("DateTime doesn't map to a single unambiguous datetime when converting to our timezone")?;
|
||||||
|
DatePerhapsTime { dt, all_day: true }
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn recurring_dates_opt(
|
||||||
|
params: &Parameters,
|
||||||
|
ev: &icalendar::Event,
|
||||||
|
rrule: &icalendar::Property,
|
||||||
|
) -> Result<Option<impl Iterator<Item = DatePerhapsTime>>> {
|
||||||
|
let dtstart = ev
|
||||||
|
.get_start()
|
||||||
|
.context("Data error - Event has no DTSTART")?;
|
||||||
|
let all_day = match &dtstart {
|
||||||
|
icalendar::DatePerhapsTime::Date(_) => true,
|
||||||
|
icalendar::DatePerhapsTime::DateTime(_) => false,
|
||||||
|
};
|
||||||
|
let dtstart_norm = normalize_date_perhaps_time(&dtstart, params.tz)?;
|
||||||
|
|
||||||
|
let rr = rrule::RRule::from_str(rrule.value())
|
||||||
|
.with_context(|| format!("RRule parse failed `{}`", rrule.value()))?;
|
||||||
|
|
||||||
|
if let Some(until) = rr.get_until()
|
||||||
|
&& *until < params.output_start
|
||||||
|
{
|
||||||
|
// This skips over some bad data in our test set where we fail to parse a recurring event that's already ended before our output window starts
|
||||||
|
return Ok(None);
|
||||||
|
}
|
||||||
|
|
||||||
|
let rrule_tz = params.tz.into();
|
||||||
|
|
||||||
|
let rr = rr.build(dtstart_norm.dt.with_timezone(&rrule_tz))?;
|
||||||
|
let dates = rr
|
||||||
|
.after(params.output_start.with_timezone(&rrule_tz))
|
||||||
|
.before(params.output_stop.with_timezone(&rrule_tz))
|
||||||
|
.all(10)
|
||||||
|
.dates
|
||||||
|
.into_iter()
|
||||||
|
.map(move |dtstart| DatePerhapsTime {
|
||||||
|
dt: dtstart.with_timezone(¶ms.tz),
|
||||||
|
all_day,
|
||||||
|
});
|
||||||
|
Ok(Some(dates))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn recurring_dates(
|
||||||
|
params: &Parameters,
|
||||||
|
ev: &icalendar::Event,
|
||||||
|
rrule: &icalendar::Property,
|
||||||
|
) -> Result<impl Iterator<Item = DatePerhapsTime>> {
|
||||||
|
Ok(recurring_dates_opt(params, ev, rrule)?
|
||||||
|
.into_iter()
|
||||||
|
.flatten())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn google_url(
|
||||||
|
dtstart: DatePerhapsTime,
|
||||||
|
has_rrule: bool,
|
||||||
|
uid: Option<&str>,
|
||||||
|
google_id: &str,
|
||||||
|
) -> Result<Option<String>> {
|
||||||
|
let uid = uid.context("No UID")?;
|
||||||
|
if uid.len() > 100 {
|
||||||
|
// There's one event in one of our test Google calendars which originates from Microsoft Exchange and has a totally different UID format from any other event. I was not able to reverse it, so I'm skipping it for now.
|
||||||
|
return Ok(None);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Strip off the back part of the Google UID
|
||||||
|
let idx = uid.find(['@', '_']).unwrap_or(uid.len());
|
||||||
|
let uid_2 = &uid[..idx];
|
||||||
|
let utc_dtstart = dtstart
|
||||||
|
.dt
|
||||||
|
.with_timezone(&chrono_tz::UTC)
|
||||||
|
.format("%Y%m%dT%H%M%SZ")
|
||||||
|
.to_string();
|
||||||
|
let eid_plain = if has_rrule {
|
||||||
|
// Recurring events have an extra timestamp in their base64 to disambiguiate
|
||||||
|
format!("{uid_2}_{utc_dtstart} {google_id}")
|
||||||
|
} else {
|
||||||
|
format!("{uid_2} {google_id}")
|
||||||
|
};
|
||||||
|
let eid = base64::engine::general_purpose::URL_SAFE_NO_PAD.encode(&eid_plain);
|
||||||
|
let mut link = url::Url::parse("https://www.google.com/calendar/event").unwrap();
|
||||||
|
link.query_pairs_mut().append_pair("eid", &eid);
|
||||||
|
Ok(Some(link.to_string()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn ical_event_instances(
|
||||||
|
config_ical: &Config,
|
||||||
|
params: &Parameters,
|
||||||
|
ev: &icalendar::Event,
|
||||||
|
) -> Result<Vec<EventInstance>> {
|
||||||
|
let dates = if let Some(rrule) = ev.properties().get("RRULE") {
|
||||||
|
recurring_dates(params, ev, rrule)?.collect()
|
||||||
|
} else {
|
||||||
|
// Event that occurs once
|
||||||
|
|
||||||
|
let dtstart = ev.get_start().context("Data error - Event has no start")?;
|
||||||
|
let dtstart_normalized = normalize_date_perhaps_time(&dtstart, params.tz)?;
|
||||||
|
if dtstart_normalized.dt < params.output_start || dtstart_normalized.dt > params.output_stop
|
||||||
|
{
|
||||||
|
return Ok(vec![]);
|
||||||
|
}
|
||||||
|
vec![dtstart_normalized]
|
||||||
|
};
|
||||||
|
|
||||||
|
let instances = dates
|
||||||
|
.into_iter()
|
||||||
|
.map(|dtstart| {
|
||||||
|
let has_rrule = ev.properties().get("RRULE").is_some();
|
||||||
|
let uid = ev.get_uid().map(|s| s.to_string());
|
||||||
|
let url = if let Some(url) = ev.get_url() {
|
||||||
|
Some(url.to_string())
|
||||||
|
} else if let Some(google_id) = &config_ical.google_id {
|
||||||
|
google_url(dtstart, has_rrule, uid.as_deref(), google_id)?
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
let recurrence_id = ev
|
||||||
|
.get_recurrence_id()
|
||||||
|
.as_ref()
|
||||||
|
.map(|x| normalize_date_perhaps_time(x, params.tz))
|
||||||
|
.transpose()?;
|
||||||
|
|
||||||
|
Ok::<_, anyhow::Error>(EventInstance {
|
||||||
|
calendar_ui: config_ical.ui.clone(),
|
||||||
|
dtstart,
|
||||||
|
location: ev.get_location().map(|s| s.to_string()),
|
||||||
|
recurrence_id,
|
||||||
|
summary: ev.get_summary().map(|s| s.to_string()),
|
||||||
|
uid,
|
||||||
|
url,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
instances
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Used to link recurrence exceptions to the original events they replace
|
||||||
|
#[derive(Eq, Ord, PartialOrd, PartialEq)]
|
||||||
|
struct RecurrenceKey<'a> {
|
||||||
|
recurrence_id: DatePerhapsTime,
|
||||||
|
uid: &'a str,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Calendar {
|
||||||
|
fn events(&self) -> impl Iterator<Item = &icalendar::Event> {
|
||||||
|
self.cal.components.iter().filter_map(|comp| {
|
||||||
|
if let icalendar::CalendarComponent::Event(ev) = comp {
|
||||||
|
Some(ev)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns an unsorted list of event instances for this calendar
|
||||||
|
pub(crate) fn event_instances(&self, params: &Parameters) -> Result<Vec<EventInstance>> {
|
||||||
|
let mut instances = vec![];
|
||||||
|
let mut recurrence_exceptions = BTreeSet::new();
|
||||||
|
|
||||||
|
for ev in self.events() {
|
||||||
|
let eis = match ical_event_instances(&self.config, params, ev)
|
||||||
|
.with_context(|| format!("Failed to process event with UID '{:?}'", ev.get_uid()))
|
||||||
|
{
|
||||||
|
Ok(x) => x,
|
||||||
|
Err(e) => {
|
||||||
|
if ev.get_last_modified().context("Event has no timestamp")?
|
||||||
|
< params.ignore_before
|
||||||
|
{
|
||||||
|
tracing::warn!("Ignoring error from very old event {e:?}");
|
||||||
|
continue;
|
||||||
|
} else {
|
||||||
|
Err(e)?
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
for ei in eis {
|
||||||
|
instances.push(ei);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(recurrence_id) = ev.get_recurrence_id() {
|
||||||
|
// This is a recurrence exception and we must handle it specially by later deleting the original event it replaces
|
||||||
|
let recurrence_id = normalize_date_perhaps_time(&recurrence_id, params.tz)
|
||||||
|
.context("We should be able to normalize recurrence IDs")?;
|
||||||
|
let uid = ev
|
||||||
|
.get_uid()
|
||||||
|
.context("Every recurrence exception should have a UID")?;
|
||||||
|
|
||||||
|
recurrence_exceptions.insert(RecurrenceKey { recurrence_id, uid });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find all recurring events that are replaced with recurrence exceptions and delete the originals.
|
||||||
|
// There is probably a not-linear-time way to do this, but this should be fine.
|
||||||
|
|
||||||
|
instances.retain(|ev| {
|
||||||
|
if ev.recurrence_id.is_some() {
|
||||||
|
// This is a recurrence exception, exceptions never delete themselves
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
let Some(uid) = &ev.uid else {
|
||||||
|
// If there's no UID, we can't apply recurrence exceptions
|
||||||
|
return true;
|
||||||
|
};
|
||||||
|
let key = RecurrenceKey {
|
||||||
|
recurrence_id: ev.dtstart,
|
||||||
|
uid,
|
||||||
|
};
|
||||||
|
!recurrence_exceptions.contains(&key)
|
||||||
|
});
|
||||||
|
|
||||||
|
Ok(instances)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn read_from_str(config: Config, s: &str) -> Result<Self> {
|
||||||
|
let cal = s.parse().map_err(|s| anyhow!("parse error {s}"))?;
|
||||||
|
let cal = Self { cal, config };
|
||||||
|
Ok(cal)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn read_from_config(config: Config) -> Result<Self> {
|
||||||
|
let s = std::fs::read_to_string(&config.dl.file_path)?;
|
||||||
|
Self::read_from_str(config, &s)
|
||||||
|
}
|
||||||
|
}
|
Loading…
Add table
Add a link
Reference in a new issue