Compare commits
4 Commits
5271c4c9aa
...
90f29bd2a4
Author | SHA1 | Date | |
---|---|---|---|
90f29bd2a4 | |||
6ee952133d | |||
6e288b3525 | |||
001ca4879a |
@ -1,4 +1,4 @@
|
||||
use anyhow::Result;
|
||||
use anyhow::{bail, Result};
|
||||
use camino::{Utf8Path, Utf8PathBuf};
|
||||
use chrono::{DateTime, Duration, Utc};
|
||||
use feed_rs::model::Entry;
|
||||
@ -99,33 +99,33 @@ impl FeedStoreFeed {
|
||||
self.info.fetch_data.as_ref()
|
||||
}
|
||||
|
||||
pub fn load_feed(&self, sanitize: bool) -> Option<Feed> {
|
||||
pub fn load_feed(&self, sanitize: bool) -> Result<Feed> {
|
||||
if let Some(raw_feed) = &self.raw_feed {
|
||||
let parser = feed_rs::parser::Builder::new()
|
||||
.sanitize_content(sanitize)
|
||||
.build();
|
||||
Some(parser.parse(raw_feed.as_bytes()).unwrap())
|
||||
Ok(parser.parse(raw_feed.as_bytes())?)
|
||||
} else {
|
||||
None
|
||||
bail!("Feed not loaded yet: {}", self.url);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn has_changed(&self, new_feed: &Feed) -> bool {
|
||||
let Some(old_feed) = self.load_feed(false) else {
|
||||
return true;
|
||||
pub fn has_changed(&self, new_feed: &Feed) -> Result<bool> {
|
||||
let Ok(old_feed) = self.load_feed(false) else {
|
||||
return Ok(true);
|
||||
};
|
||||
|
||||
let mut old_iter = old_feed.entries.iter();
|
||||
for new in &new_feed.entries {
|
||||
let Some(old) = old_iter.next() else {
|
||||
return true;
|
||||
return Ok(true);
|
||||
};
|
||||
if old != new {
|
||||
return true;
|
||||
return Ok(true);
|
||||
}
|
||||
}
|
||||
// ignoring any entries left in old_iter
|
||||
false
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
pub fn store(&mut self, mut response: Response<Body>) -> Result<bool> {
|
||||
@ -149,7 +149,7 @@ impl FeedStoreFeed {
|
||||
self.info.fetch_data = Some(fetchdata);
|
||||
Self::write(&self.path_settings, toml::to_string(&self.info)?)?;
|
||||
|
||||
if !self.has_changed(&feed) {
|
||||
if !self.has_changed(&feed)? {
|
||||
return Ok(false);
|
||||
}
|
||||
debug!("Storing feed for {}.", self.url);
|
||||
@ -235,7 +235,7 @@ pub struct FeedStore {
|
||||
}
|
||||
|
||||
impl FeedStore {
|
||||
pub fn new(dir: &str, feedlist: &Vec<super::FeedConfig>) -> Self {
|
||||
pub fn new(dir: &Utf8Path, feedlist: &Vec<super::FeedConfig>) -> Self {
|
||||
let dir = super::to_checked_pathbuf(dir);
|
||||
let mut feeds: BTreeMap<Url, FeedStoreFeed> = BTreeMap::new();
|
||||
|
||||
@ -248,20 +248,25 @@ impl FeedStore {
|
||||
}
|
||||
|
||||
pub fn collect(&mut self, max_entries: usize) -> (HashMap<String, Feed>, Vec<Entry>) {
|
||||
debug!("Collecting feeds");
|
||||
let mut feeds = HashMap::new();
|
||||
let mut entries = Vec::new();
|
||||
|
||||
for (feed_url, feed_store_feed) in self.feeds.iter_mut() {
|
||||
let Some(mut feed) = feed_store_feed.load_feed(true) else {
|
||||
warn!("Problem parsing feed file for feed {}", feed_url);
|
||||
debug!("Collecting {feed_url}");
|
||||
let mut feed = match feed_store_feed.load_feed(true) {
|
||||
Ok(feed) => feed,
|
||||
Err(e) => {
|
||||
warn!("Problem parsing feed file for feed {}: {}", feed_url, e);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
for entry in &mut feed.entries {
|
||||
entry.source = Some(feed_url.to_string());
|
||||
}
|
||||
|
||||
entries.append(&mut std::mem::take(&mut feed.entries));
|
||||
entries.extend(feed.entries.clone());
|
||||
if entries.len() > 4 * max_entries {
|
||||
entries = trim_entries(entries, max_entries);
|
||||
}
|
||||
|
38
src/main.rs
38
src/main.rs
@ -26,10 +26,11 @@ extern crate log;
|
||||
use crate::feed_store::FeedStore;
|
||||
use crate::fetcher::Fetcher;
|
||||
use anyhow::Result;
|
||||
use camino::Utf8PathBuf;
|
||||
use camino::{Utf8Path, Utf8PathBuf};
|
||||
use clap::Parser;
|
||||
use serde::Deserialize;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fs;
|
||||
use url::Url;
|
||||
|
||||
//mod atom_serializer;
|
||||
mod feed_store;
|
||||
@ -48,6 +49,8 @@ struct Args {
|
||||
config: String,
|
||||
#[arg(long, default_value_t = false)]
|
||||
no_fetch: bool,
|
||||
#[arg(long, default_value_t = false)]
|
||||
force: bool,
|
||||
}
|
||||
|
||||
/// Config to be parsed from toml file given as cmdline option
|
||||
@ -56,29 +59,42 @@ struct Config {
|
||||
/// to be used as part of the fetchers username header
|
||||
bot_name: String,
|
||||
/// where to store downloaded feeds and their metadata
|
||||
feed_dir: String,
|
||||
feed_dir: Utf8PathBuf,
|
||||
/// feeds to be agregated
|
||||
feeds: Vec<FeedConfig>,
|
||||
/// Email adress to use for the from header when fetching feeds
|
||||
from: String,
|
||||
/// where to build the output files
|
||||
out_dir: String,
|
||||
out_dir: Utf8PathBuf,
|
||||
/// templates folder
|
||||
templates_dir: String,
|
||||
templates_dir: Utf8PathBuf,
|
||||
/// How many feed entries should be included in the planet
|
||||
max_entries: usize,
|
||||
/// How soon to refresh, in hours
|
||||
refresh: usize,
|
||||
/// A theme to apply, if any.
|
||||
///
|
||||
/// This is a folder in the templates_dir. If an assets directory
|
||||
/// exists within, the contents will be copied over to the out_dir.
|
||||
theme: Option<String>,
|
||||
/// List of languages for translations
|
||||
#[serde(default)]
|
||||
lang: Vec<Lang>,
|
||||
}
|
||||
|
||||
pub fn to_checked_pathbuf(dir: &str) -> Utf8PathBuf {
|
||||
let dir = Utf8PathBuf::from(dir);
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct Lang {
|
||||
code: String,
|
||||
name: String,
|
||||
link: Url,
|
||||
}
|
||||
|
||||
pub fn to_checked_pathbuf(dir: &Utf8Path) -> Utf8PathBuf {
|
||||
let m = dir
|
||||
.metadata()
|
||||
.unwrap_or_else(|_| panic!("Could not get metadata of dir: {dir}"));
|
||||
assert!(m.is_dir(), "Not a dir: {dir}");
|
||||
dir
|
||||
dir.to_path_buf()
|
||||
}
|
||||
|
||||
/// Config for one individual input feed
|
||||
@ -91,10 +107,10 @@ struct FeedConfig {
|
||||
url: String,
|
||||
}
|
||||
|
||||
fn fetch(config: &Config, feed_store: &mut FeedStore) -> Result<bool> {
|
||||
fn fetch(config: &Config, feed_store: &mut FeedStore, force_rebuild: bool) -> Result<bool> {
|
||||
let fetcher = Fetcher::new(&config.bot_name, &config.from);
|
||||
let rebuild = feed_store.fetch(&fetcher, config.refresh)?;
|
||||
Ok(rebuild)
|
||||
Ok(rebuild || force_rebuild)
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
@ -116,7 +132,7 @@ fn main() -> Result<()> {
|
||||
let should_build = if args.no_fetch {
|
||||
true
|
||||
} else {
|
||||
fetch(&config, &mut feed_store)?
|
||||
fetch(&config, &mut feed_store, args.force)?
|
||||
};
|
||||
|
||||
if should_build {
|
||||
|
@ -2,19 +2,25 @@ use crate::feed_store::FeedStore;
|
||||
use crate::to_checked_pathbuf;
|
||||
use crate::Config;
|
||||
use anyhow::Result;
|
||||
use camino::Utf8Path;
|
||||
use feed_rs::model::Feed;
|
||||
use std::collections::HashMap;
|
||||
use std::fs::File;
|
||||
use std::fs::{copy, create_dir_all, File};
|
||||
use tera::{from_value, Tera};
|
||||
|
||||
pub fn build(config: &Config, feed_store: &mut FeedStore) -> Result<()> {
|
||||
let mut tera = create_tera(&config.templates_dir)?;
|
||||
let mut tera = if let Some(theme) = &config.theme {
|
||||
create_tera(&config.templates_dir.join(theme))?
|
||||
} else {
|
||||
create_tera(&config.templates_dir)?
|
||||
};
|
||||
let out_dir = to_checked_pathbuf(&config.out_dir);
|
||||
|
||||
let mut context = tera::Context::new();
|
||||
let (feeds, entries): (HashMap<String, Feed>, _) = feed_store.collect(config.max_entries);
|
||||
context.insert("feeds", &feeds);
|
||||
context.insert("entries", &entries);
|
||||
context.insert("lang", &config.lang);
|
||||
context.insert("PKG_AUTHORS", env!("CARGO_PKG_AUTHORS"));
|
||||
context.insert("PKG_HOMEPAGE", env!("CARGO_PKG_HOMEPAGE"));
|
||||
context.insert("PKG_NAME", env!("CARGO_PKG_NAME"));
|
||||
@ -26,10 +32,39 @@ pub fn build(config: &Config, feed_store: &mut FeedStore) -> Result<()> {
|
||||
let file = File::create(format!("{out_dir}/{name}"))?;
|
||||
tera.render_to(name, &context, file)?;
|
||||
}
|
||||
|
||||
// Copy static assets from theme, if any
|
||||
if let Some(theme) = &config.theme {
|
||||
let assets_dir = config.templates_dir.join(theme).join("assets");
|
||||
if assets_dir.is_dir() {
|
||||
copy_assets(&assets_dir, &out_dir)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn create_tera(templates_dir: &str) -> Result<Tera> {
|
||||
/// Recursively copy assets from one dir to another
|
||||
///
|
||||
/// Symlinks are ignored.
|
||||
fn copy_assets(orig: &Utf8Path, dest: &Utf8Path) -> Result<()> {
|
||||
if orig.is_dir() {
|
||||
if !dest.is_dir() {
|
||||
create_dir_all(dest)?;
|
||||
}
|
||||
|
||||
for entry in orig.read_dir_utf8()? {
|
||||
let entry = entry?;
|
||||
copy_assets(entry.path(), &dest.join(entry.file_name()))?;
|
||||
}
|
||||
} else if orig.is_file() {
|
||||
copy(orig, dest)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn create_tera(templates_dir: &Utf8Path) -> Result<Tera> {
|
||||
let dir = to_checked_pathbuf(templates_dir);
|
||||
let mut tera = tera::Tera::new(&format!("{dir}/*"))?;
|
||||
// disable autoescape as this would corrupt urls or the entriy contents. todo check this!
|
||||
|
Loading…
x
Reference in New Issue
Block a user