make sure feeds get downloaded
This commit is contained in:
parent
bc99011bb6
commit
135b9e7333
1 changed files with 49 additions and 43 deletions
92
src/main.rs
92
src/main.rs
|
@ -42,15 +42,20 @@ struct Config {
|
|||
|
||||
impl Config {
|
||||
fn downloads(&self, now: DateTime<chrono_tz::Tz>) -> impl Iterator<Item = SimpleDownload> {
|
||||
self.campfires
|
||||
let Self {
|
||||
campfires,
|
||||
common_ninjas,
|
||||
feeds,
|
||||
icals,
|
||||
output: _,
|
||||
} = self;
|
||||
|
||||
campfires
|
||||
.iter()
|
||||
.map(|cf| cf.dl.clone())
|
||||
.chain(
|
||||
self.common_ninjas
|
||||
.iter()
|
||||
.map(move |cn| cn.simple_download(now)),
|
||||
)
|
||||
.chain(self.icals.iter().map(|ical| ical.dl.clone()))
|
||||
.chain(common_ninjas.iter().map(move |cn| cn.simple_download(now)))
|
||||
.chain(feeds.iter().cloned())
|
||||
.chain(icals.iter().map(|ical| ical.dl.clone()))
|
||||
}
|
||||
|
||||
fn upstream_calendars(&self) -> Vec<CalendarUi> {
|
||||
|
@ -226,6 +231,43 @@ fn process_data<'a>(
|
|||
Ok(instances)
|
||||
}
|
||||
|
||||
/// Wraps rss::Item in our own type suitable for merging
|
||||
pub(crate) struct FeedItem {
|
||||
channel_title: String,
|
||||
date: chrono::DateTime<chrono::FixedOffset>,
|
||||
inner: rss::Item,
|
||||
}
|
||||
|
||||
fn read_feeds(config: &Config) -> Result<Vec<FeedItem>> {
|
||||
let mut items = Vec::new();
|
||||
|
||||
for feed in &config.feeds {
|
||||
let path = &feed.file_path;
|
||||
let s = std::fs::read(path).with_context(|| format!("Cannot read {path}"))?;
|
||||
let channel = rss::Channel::read_from(std::io::BufReader::new(std::io::Cursor::new(s)))?;
|
||||
|
||||
let channel_title = channel.title.clone();
|
||||
for item in channel.into_items() {
|
||||
let date = chrono::DateTime::parse_from_rfc2822(
|
||||
item.pub_date()
|
||||
.as_ref()
|
||||
.context("Every RSS Item should have a pub_date")?,
|
||||
)?;
|
||||
|
||||
let item = FeedItem {
|
||||
channel_title: channel_title.clone(),
|
||||
date,
|
||||
inner: item,
|
||||
};
|
||||
|
||||
items.push(item);
|
||||
}
|
||||
}
|
||||
|
||||
items.sort_by_key(|item| std::cmp::Reverse(item.date));
|
||||
Ok(items)
|
||||
}
|
||||
|
||||
static APP_USER_AGENT: &str = concat!(
|
||||
env!("CARGO_PKG_NAME"),
|
||||
"_Z7FSRRA7/",
|
||||
|
@ -328,42 +370,6 @@ struct CliDebugFeed {
|
|||
config: Utf8PathBuf,
|
||||
}
|
||||
|
||||
/// Wraps rss::Item in our own type suitable for merging
|
||||
pub(crate) struct FeedItem {
|
||||
channel_title: String,
|
||||
date: chrono::DateTime<chrono::FixedOffset>,
|
||||
inner: rss::Item,
|
||||
}
|
||||
|
||||
fn read_feeds(config: &Config) -> Result<Vec<FeedItem>> {
|
||||
let mut items = Vec::new();
|
||||
|
||||
for feed in &config.feeds {
|
||||
let s = std::fs::read(&feed.file_path)?;
|
||||
let channel = rss::Channel::read_from(std::io::BufReader::new(std::io::Cursor::new(s)))?;
|
||||
|
||||
let channel_title = channel.title.clone();
|
||||
for item in channel.into_items() {
|
||||
let date = chrono::DateTime::parse_from_rfc2822(
|
||||
item.pub_date()
|
||||
.as_ref()
|
||||
.context("Every RSS Item should have a pub_date")?,
|
||||
)?;
|
||||
|
||||
let item = FeedItem {
|
||||
channel_title: channel_title.clone(),
|
||||
date,
|
||||
inner: item,
|
||||
};
|
||||
|
||||
items.push(item);
|
||||
}
|
||||
}
|
||||
|
||||
items.sort_by_key(|item| std::cmp::Reverse(item.date));
|
||||
Ok(items)
|
||||
}
|
||||
|
||||
fn main_debug_feed(cli: CliDebugFeed) -> Result<()> {
|
||||
tracing_subscriber::fmt::init();
|
||||
tracing::info!("Started tracing");
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue