code cleaning, added year pager

master
Ondřej Hruška 5 years ago
parent af687cb8df
commit 6bcebd115c
Signed by: MightyPork
GPG Key ID: 2C5FD5035250423D
  1. 791
      Cargo.lock
  2. 14
      Cargo.toml
  3. 4
      build.sh
  4. 285
      src/bread.rs
  5. 85
      src/logging.rs
  6. 471
      src/main.rs
  7. 41
      web/assets/style.css
  8. 6
      web/templates/detail.html
  9. 4
      web/templates/index.html

791
Cargo.lock generated

File diff suppressed because it is too large Load Diff

@ -9,9 +9,19 @@ markdown = { git = "https://github.com/johannhof/markdown.rs" }
chrono = "0.4"
rss = "1.6.1"
percent-encoding = "1.0.1"
image-utils = "0.2.0"
image = "0.21.0"
blake2 = "0.8.0"
base64 = "0.10.1"
failure = "0.1.5"
smart-default = "0.6.0"
env_logger = "0.7.1"
log = "0.4.8"
clap = "2.33.0"
serde = "1.0.105"
serde_json = "1.0.48"
serde_derive = "1.0.105"
itertools = "0.9.0"
[dependencies.image]
version = "0.21.0"
default-features = false
features = ["jpeg","jpeg_rayon"]

@ -1,5 +1,5 @@
#!/bin/bash
#cargo run --release
target/release/bread
cargo run --release
#target/release/bread
cp -R web/* /home/ondra/devel/ondrovo/blog/base/bread/

@ -0,0 +1,285 @@
use crate::GalleryInfo;
use std::path::{PathBuf, Path};
use percent_encoding::utf8_percent_encode;
use std::{fs, io};
use blake2::{Blake2b, Digest};
use rss::{ItemBuilder, Guid};
use chrono::{TimeZone, Date, Utc, NaiveDate, Datelike};
use std::fs::{OpenOptions, DirEntry, File};
use std::io::{Read, Write};
use failure::Fallible;
use std::borrow::Cow;
#[derive(Debug)]
pub struct BreadRendered {
detail: String,
title: String,
url: String,
detail_fname: String,
pub thumb: String,
pub rss_item: Option<rss::Item>,
}
#[derive(Debug)]
pub struct Bread {
path: PathBuf,
rel_path: PathBuf,
pub date: chrono::NaiveDate,
note: String,
rss_note: String,
images: Vec<PathBuf>,
pub rendered: BreadRendered,
}
#[derive(Debug)]
pub struct BreadLink {
label: String,
url: String,
}
impl Bread {
pub fn compile(&mut self, config: &mut GalleryInfo, prev : Option<BreadLink>, next : Option<BreadLink>) -> Fallible<()> {
let date = self.date.format("%Y/%m/%d").to_string();
let date_slug = self.date.format("%Y-%m-%d").to_string();
let detail_file = date_slug.clone() + ".html";
println!("+ {}", date_slug);
self.rendered.title = date.clone();
self.rendered.detail_fname = detail_file.clone();
// figure out the thumbnail pic
let (img_path, img_alt) = {
let mut first_img: &PathBuf = self.images.get(0).expect(&format!("No images for bread {}", date_slug));
for im in &self.images {
if im.file_name().unwrap().to_string_lossy().contains("cover") {
first_img = im;
break;
}
}
(
first_img.to_str().unwrap().to_owned(),
first_img.file_name().unwrap().to_string_lossy().to_owned(),
)
};
let (note, note_html) = if self.note.is_empty() {
(Cow::Owned(String::new()), "<!-- There's no note about this bread. -->".to_string())
} else {
(Cow::Borrowed(&self.note), format!(r#"<div class="note">{}</div>"#, self.note.trim()))
};
let thumb_fname = date_slug.clone() + "." + Path::new(&img_path).extension().unwrap().to_str().unwrap();
let thumb_path = config.thumbs_path.join(&thumb_fname);
let thumb_relpath = thumb_path.strip_prefix(&config.web_path)?;
let image_path_encoded = urlencode(thumb_relpath.to_string_lossy());
let image_real_path = config.web_path.join(img_path);
// Create the thumb
{
let mut img_file = fs::File::open(&image_real_path)?;
let mut hasher = Blake2b::new();
io::copy(&mut img_file, &mut hasher)?;
let hash = base64::encode(&hasher.result());
let hash_key = thumb_path.to_str().unwrap();
let old_hash = config.image_hashes.get(hash_key);
if old_hash.is_none() || !old_hash.unwrap().eq(&hash) {
println!("building thumb...");
let im = image::open(&image_real_path)?;
let im = im.thumbnail(500, 500);
im.save(&thumb_path)?;
config.image_hashes.put(hash_key.to_string(), hash);
}
}
// Prepare the thumb card for the gallery page
{
self.rendered.thumb = config
.template("_thumb.html")?
.replace("{detail_url}", &detail_file)
.replace("{img_src}", &image_path_encoded)
.replace("{img_alt}", &img_alt)
.replace("{title}", &date);
}
// Add to RSS
{
let image_url: String = config.base_url.to_owned() + "/" + &image_path_encoded;
let link: String = config.base_url.to_owned() + "/" + &detail_file;
let mut guid = Guid::default();
guid.set_value(link.clone());
guid.set_permalink(true);
let date_formatted: Date<Utc> = chrono::Utc.from_local_date(&self.date).unwrap();
let dt = date_formatted.and_hms(12, 0, 0);
let mut descr = String::new();
if !self.rss_note.is_empty() {
descr.push_str(&self.rss_note);
descr.push_str("<hr>");
}
descr.push_str(&note);
descr.push_str(&format!(
"<img src=\"{}\" alt=\"{}\"><p><i>Open the link for full-res photos ({} total)</i>",
image_url,
img_alt,
self.images.len()
));
self.rendered.url = link.clone();
let item: rss::Item = ItemBuilder::default()
.title(date.clone())
.link(link)
.description(descr)
.guid(guid)
.pub_date(dt.to_rfc2822())
.build().unwrap();
self.rendered.rss_item = Some(item);
}
let head_tpl = config.template("_head.html")?;
// Generate the detail page
{
let win_title = format!("Bread from {}", date);
let byear = self.date.year();
let detail = config
.template("detail.html")?
.replace("{head}", &head_tpl.replace("{title}", &win_title))
.replace("{title}", &win_title)
.replace("{date}", &date_slug);
let detail = if byear == config.latest_year {
detail.replace("{gallery_url}", "index.html")
} else {
detail.replace("{gallery_url}", &format!("{}.html", byear))
};
let detail = detail
.replace("{url}", &format!("{}/{}", config.base_url, detail_file))
.replace(
"{thumb_url}",
&format!("{}/thumbs/{}", config.base_url, thumb_fname),
)
.replace("{heading}", &date)
.replace("{prev}", &(match prev {
Some(b) => format!(r##"<a class="#prev" href="{}" title="{}">&lt;</a>"##, b.url, b.label),
None => "".to_string()
}))
.replace("{next}", &(match next {
Some(b) => format!(r##"<a class="#next" href="{}" title="{}">&gt;</a>"##, b.url, b.label),
None => "".to_string()
}))
.replace("{note}", &note_html);
let mut pics = String::new();
for img in &self.images {
let src = urlencode(img.to_string_lossy());
pics.push_str(&format!(
" <a href=\"{src}\"><img alt=\"Bread photo {src}\" src=\"{src}\"></a>\n",
src=&src
))
}
let detail = detail.replace("{images}", &pics.trim());
let mut f = OpenOptions::new().write(true).truncate(true).create(true).open(config.web_path.join(detail_file)).unwrap();
f.write(detail.as_bytes()).unwrap();
self.rendered.detail = detail;
}
Ok(())
}
pub fn to_link(&self) -> BreadLink {
BreadLink {
label: self.date.format("%Y/%m/%d").to_string(),
url: self.date.format("%Y-%m-%d.html").to_string(),
}
}
pub fn parse(base_dir: &PathBuf, bread_dir: &DirEntry) -> Fallible<Bread> {
let bpath = bread_dir.path();
let mut note = String::new();
let mut rss_note = String::new();
let mut note_path = bpath.join("note.txt");
let mut rss_note_path = bpath.join("rss.txt");
// try a md one as a fallback
if !note_path.exists() {
note_path = bpath.join("note.md");
}
if !rss_note_path.exists() {
rss_note_path = bpath.join("rss.md");
}
if note_path.exists() {
let mut note_file = File::open(note_path)?;
note_file.read_to_string(&mut note)?;
note = markdown::to_html(&note);
}
if rss_note_path.exists() {
let mut note_file = File::open(rss_note_path)?;
note_file.read_to_string(&mut rss_note)?;
rss_note = markdown::to_html(&rss_note);
}
let mut bread_files: Vec<DirEntry> = fs::read_dir(&bpath)?
.map(|e| e.unwrap())
.collect();
bread_files.sort_by(|x, y| {
x.file_name().cmp(&y.file_name())
});
let images = bread_files
.iter()
.filter(|&f| {
let fname = f.file_name();
let name = fname.to_string_lossy();
return name.ends_with(".jpg") || name.ends_with(".jpeg");
})
.map(|x| {
x.path().strip_prefix(base_dir).unwrap().to_path_buf()
})
.collect();
return Ok(Bread {
date: NaiveDate::parse_from_str(
&bpath.file_name().unwrap().to_string_lossy(),
"%Y-%m-%d",
)
.unwrap(),
rel_path: bpath.strip_prefix(base_dir)?.to_path_buf(),
path: bpath,
note,
rss_note,
images,
rendered: BreadRendered {
thumb: "".to_string(),
detail: "".to_string(),
rss_item: None,
title: "".to_string(),
url: "".to_string(),
detail_fname: "".to_string()
},
});
}
}
pub fn urlencode<'a>(url : impl Into<Cow<'a, str>>) -> String {
utf8_percent_encode(url.into().as_ref(), percent_encoding::DEFAULT_ENCODE_SET).to_string()
}

@ -0,0 +1,85 @@
use failure::{bail, Fallible};
pub const LOG_LEVELS: [&str; 5] = ["error", "warn", "info", "debug", "trace"];
/// 3rd-party libraries that produce log spam - we set these to a fixed higher level
/// to allow using e.g. TRACE without drowing our custom messages
pub const SPAMMY_LIBS: [&str; 5] = ["tokio_reactor", "hyper", "reqwest", "mio", "want"];
pub fn add_clap_args<'b, 'a: 'b>(clap : clap::App<'a, 'b>) -> clap::App<'a, 'b> {
clap
.arg(clap::Arg::with_name("verbose")
.short("v").multiple(true)
.help("Increase logging verbosity (repeat to increase)"))
.arg(clap::Arg::with_name("log-level")
.short("l").long("log")
.takes_value(true).value_name("LEVEL")
.validator(|s| {
if LOG_LEVELS.contains(&s.as_str()) { return Ok(()); }
Err(format!("Bad log level: {}", s))
})
.help("Set logging verbosity (error,warning,info,debug,trace)"))
}
/// Initialize logging, using `level` as the base if not changed via command-line
/// arguments
pub fn init<'a, 'b>(mut level: &'a str,
argv: &'a clap::ArgMatches,
suppress: Option<&'b [&str]>) -> Fallible<&'a str> {
if !LOG_LEVELS.contains(&level) {
bail!("Invalid default log level: {}", level);
}
/* env RUST_LOG overrides default if set, but can be changed by CLI args */
let env_level = option_env!("RUST_LOG").unwrap_or(""); //env_logger::DEFAULT_FILTER_ENV
if !env_level.is_empty() {
level = env_level;
if !LOG_LEVELS.contains(&level) {
bail!("Invalid env log level: {}", level);
}
}
/* Explicitly requested level */
if let Some(l) = argv.value_of("log-level") {
level = l; // validated by clap
}
/* Verbosity increased */
if argv.is_present("verbose") {
// bump verbosity if -v's are present
let pos = LOG_LEVELS
.iter()
.position(|x| x == &level)
.unwrap();
level = match LOG_LEVELS
.iter()
.nth(pos + argv.occurrences_of("verbose") as usize)
{
Some(new_level) => new_level,
None => "trace",
};
}
let env = env_logger::Env::default().default_filter_or(level);
let mut builder = env_logger::Builder::from_env(env);
builder.format_timestamp_millis();
// set logging level for spammy libs. Ensure the configured log level is not exceeded
let mut lib_level = log::LevelFilter::Info;
if level == "warn" {
lib_level = log::LevelFilter::Warn;
} else if level == "error" {
lib_level = log::LevelFilter::Error;
}
for lib in suppress.unwrap_or(&SPAMMY_LIBS) {
builder.filter_module(lib, lib_level);
}
builder.init();
Ok(level)
}

@ -1,291 +1,41 @@
use blake2::{Blake2b, Digest};
use chrono;
use chrono::offset::TimeZone;
use chrono::Date;
use chrono::NaiveDate;
use chrono::Utc;
use failure::Fallible;
use image_utils;
use markdown;
use percent_encoding::{utf8_percent_encode, DEFAULT_ENCODE_SET};
use rss::{Channel, ChannelBuilder, Guid, ItemBuilder};
use std::env;
use std::fs;
use std::fs::DirEntry;
use std::fs::File;
use std::fs::OpenOptions;
use std::io;
use std::io::prelude::*;
use std::path::{Path, PathBuf};
use base64;
// #[global_allocator]
// static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc;
#[macro_use]
extern crate smart_default;
#[macro_use]
extern crate failure;
#[macro_use]
extern crate serde_derive;
#[macro_use]
extern crate log;
use std::path::{PathBuf, Path};
use std::collections::HashMap;
use std::fs::{File, DirEntry, OpenOptions};
use std::io::{Read, Write};
use std::{env, fs, io};
use failure::Fallible;
use clap;
mod hash_dict;
#[derive(Debug)]
struct BreadRendered {
thumb: String,
detail: String,
rss_item: Option<rss::Item>,
title: String,
url: String,
detail_fname: String,
}
#[derive(Debug)]
struct Bread {
path: PathBuf,
rel_path: PathBuf,
date: chrono::NaiveDate,
note: String,
rss_note: String,
images: Vec<PathBuf>,
rendered: BreadRendered,
}
//impl From<std::option::NoneError> for failure::Error {
// fn from(x: std::option::NoneError) -> Self {
// failure::err_msg("Expected something, found nothing.")
// }
//}
#[derive(Debug)]
struct BreadLink {
label: String,
url: String,
}
impl Bread {
fn compile(&mut self, config: &mut GalleryConfig, prev : Option<BreadLink>, next : Option<BreadLink>) -> Fallible<()> {
let date = self.date.format("%Y/%m/%d").to_string();
let date_slug = self.date.format("%Y-%m-%d").to_string();
let detail_file = date_slug.clone() + ".html";
println!("+ {}", date_slug);
self.rendered.title = date.clone();
self.rendered.detail_fname = detail_file.clone();
// figure out the thumbnail pic
let (img_path, img_alt) = {
let mut first_img: &PathBuf = self.images.get(0).expect(&format!("No images for bread {}", date_slug));
for im in &self.images {
if im.file_name().unwrap().to_str().unwrap().contains("cover") {
first_img = im;
break;
}
}
(
first_img.to_str().unwrap().to_owned(),
first_img.file_name().unwrap().to_str().unwrap().to_owned(),
)
};
let note = if self.note.is_empty() {
"<p><i>There's no note about this bread.</i></p>"
} else {
&self.note
};
let thumb_fname = date_slug.clone() + "." + Path::new(&img_path).extension().unwrap().to_str().unwrap();
let thumb_path = config.thumbs_path.join(&thumb_fname);
let thumb_relpath = thumb_path.strip_prefix(&config.web_path)?;
let image_path_encoded =
utf8_percent_encode(thumb_relpath.to_str().unwrap(), DEFAULT_ENCODE_SET).to_string();
let image_real_path = config.web_path.join(img_path);
// Create the thumb
{
let mut img_file = fs::File::open(&image_real_path)?;
let mut hasher = Blake2b::new();
io::copy(&mut img_file, &mut hasher)?;
let hash = base64::encode(&hasher.result());
let hash_key = thumb_path.to_str().unwrap();
let old_hash = config.image_hashes.get(hash_key);
if old_hash.is_none() || !old_hash.unwrap().eq(&hash) {
println!("building thumb...");
let im = image::open(&image_real_path)?;
let im = im.thumbnail(500, 500);
im.save(&thumb_path)?;
config.image_hashes.put(hash_key.to_string(), hash);
}
}
// Prepare the thumb card for the gallery page
{
self.rendered.thumb = config
.template("_thumb.html")?
.replace("{detail_url}", &detail_file)
.replace("{img_src}", &image_path_encoded)
.replace("{img_alt}", &img_alt)
.replace("{title}", &date);
}
// Add to RSS
{
let image_url: String = config.base_url.to_owned() + "/" + &image_path_encoded;
let link: String = config.base_url.to_owned() + "/" + &detail_file;
let mut guid = Guid::default();
guid.set_value(link.clone());
guid.set_permalink(true);
let date_formatted: Date<Utc> = chrono::Utc.from_local_date(&self.date).unwrap();
let dt = date_formatted.and_hms(12, 0, 0);
let mut descr = String::new();
if !self.rss_note.is_empty() {
descr.push_str(&self.rss_note);
descr.push_str("<hr>");
}
descr.push_str(note);
descr.push_str(&format!(
"<img src=\"{}\" alt=\"{}\"><p><i>Open the link for full-res photos ({} total)</i>",
image_url,
img_alt,
self.images.len()
));
self.rendered.url = link.clone();
let item: rss::Item = ItemBuilder::default()
.title(date.clone())
.link(link)
.description(descr)
.guid(guid)
.pub_date(dt.to_rfc2822())
.build().unwrap();
self.rendered.rss_item = Some(item);
}
let head_tpl = config.template("_head.html")?;
// Generate the detail page
{
let win_title = format!("Bread from {}", date);
let detail = config
.template("detail.html")?
.replace("{head}", &head_tpl.replace("{title}", &win_title))
.replace("{title}", &win_title)
.replace("{date}", &date_slug)
.replace("{url}", &format!("{}/{}", config.base_url, detail_file))
.replace(
"{thumb_url}",
&format!("{}/thumbs/{}", config.base_url, thumb_fname),
)
.replace("{heading}", &date)
.replace("{prev}", &(match prev {
Some(b) => format!("<a class=\"prev\" href=\"{}\" title=\"{}\">&lt;</a>", b.url, b.label),
None => "".to_string()
}))
.replace("{next}", &(match next {
Some(b) => format!("<a class=\"next\" href=\"{}\" title=\"{}\">&gt;</a>", b.url, b.label),
None => "".to_string()
}))
.replace("{note}", note.trim());
let mut pics = String::new();
for img in &self.images {
pics.push_str(&format!(
" <a href=\"{src}\"><img src=\"{src}\"></a>\n",
src = &utf8_percent_encode(img.to_str().unwrap(), DEFAULT_ENCODE_SET).to_string()
))
}
let detail = detail.replace("{images}", &pics.trim());
let mut f = OpenOptions::new().write(true).truncate(true).create(true).open(config.web_path.join(detail_file)).unwrap();
f.write(detail.as_bytes()).unwrap();
self.rendered.detail = detail;
}
Ok(())
}
fn to_link(&self) -> BreadLink {
BreadLink {
label: self.date.format("%Y/%m/%d").to_string(),
url: self.date.format("%Y-%m-%d.html").to_string(),
}
}
fn parse(base_dir: &PathBuf, bread_dir: &DirEntry) -> Result<Bread, std::io::Error> {
let bpath = bread_dir.path();
let mut note = String::new();
let mut rss_note = String::new();
let mut note_path = bpath.join("note.txt");
let mut rss_note_path = bpath.join("rss.txt");
// try a md one as a fallback
if !note_path.exists() {
note_path = bpath.join("note.md");
}
if !rss_note_path.exists() {
rss_note_path = bpath.join("rss.md");
}
if note_path.exists() {
let mut note_file = File::open(note_path)?;
note_file.read_to_string(&mut note)?;
note = markdown::to_html(&note);
}
if rss_note_path.exists() {
let mut note_file = File::open(rss_note_path)?;
note_file.read_to_string(&mut rss_note)?;
rss_note = markdown::to_html(&rss_note);
}
let mut bread_files: Vec<DirEntry> = fs::read_dir(&bpath)?.map(|e| e.unwrap()).collect();
bread_files.sort_by(|x, y| x.file_name().cmp(&y.file_name()));
let images = bread_files
.iter()
.filter(|&f| {
let fname = f.file_name();
let name = fname.to_str().unwrap();
return name.ends_with(".png")
|| name.ends_with(".jpg")
|| name.ends_with(".jpeg")
|| name.ends_with(".gif");
})
.map(|x| x.path().strip_prefix(base_dir).unwrap().to_path_buf())
.collect();
return Ok(Bread {
date: NaiveDate::parse_from_str(
bpath.file_name().unwrap().to_str().unwrap(),
"%Y-%m-%d",
)
.unwrap(),
rel_path: bpath.strip_prefix(base_dir).unwrap().to_path_buf(),
path: bpath,
note,
rss_note,
images,
rendered: BreadRendered {
thumb: "".to_string(),
detail: "".to_string(),
rss_item: None,
title: "".to_string(),
url: "".to_string(),
detail_fname: "".to_string()
},
});
}
}
struct GalleryConfig {
mod logging;
mod bread;
use bread::Bread;
use crate::hash_dict::HashDict;
use chrono::Datelike;
use itertools::Itertools;
#[derive(Serialize, Deserialize, Debug, SmartDefault, Clone)]
#[serde(default)]
pub struct AppConfig {
#[default = "info"]
logging: String,
#[default = "https://www.ondrovo.com/bread"]
web_url : String,
}
pub struct GalleryInfo {
web_path: PathBuf,
data_path: PathBuf,
tpl_path: PathBuf,
@ -293,9 +43,12 @@ struct GalleryConfig {
base_url: String,
templates: HashMap<String, String>,
image_hashes: hash_dict::HashDict,
latest_year: i32,
oldest_year: i32,
}
impl GalleryConfig {
impl GalleryInfo {
/// Read a named template from file, reusing from cache if possible
fn template(&mut self, name: &str) -> Fallible<String> {
if let Some(text) = self.templates.get(name) {
return Ok(text.clone());
@ -308,34 +61,78 @@ impl GalleryConfig {
}
}
const CONFIG_FILE: &str = "breadgen.json";
fn main() -> Fallible<()> {
let version = clap::crate_version!();
let clap =
clap::App::new("Flowbox RT")
.version(version)
.arg(
clap::Arg::with_name("config")
.short("c")
.long("config")
.value_name("FILE")
.help("Sets a custom config file (default: breadgen.json)")
.takes_value(true),
);
let clap = logging::add_clap_args(clap);
let argv = clap.get_matches();
/* Load config */
let appcfg = {
let confile = argv.value_of("config")
.unwrap_or(CONFIG_FILE);
println!("Bread gallery builder {}\nrun with -h for help", version);
println!("config file: {}", confile);
let buf = read_file(confile)
.unwrap_or("{}".to_string());
let config: AppConfig = serde_json::from_str(&buf)?;
let _ = logging::init(&config.logging, &argv, None);
config
};
let mut config = {
let cwd = env::current_dir().unwrap();
let mut ginfo = {
let cwd = env::current_dir()?;
let web_path = cwd.join("web");
let data_path = web_path.join("data");
let tpl_path = web_path.join("templates");
let thumbs_path = web_path.join("thumbs");
GalleryConfig {
GalleryInfo {
web_path,
data_path,
tpl_path,
thumbs_path,
base_url: "https://www.ondrovo.com/bread".into(),
base_url: appcfg.web_url.clone(),
templates: HashMap::new(),
image_hashes: hash_dict::HashDict::load(cwd.join(".hashes.txt"))?,
image_hashes: HashDict::load(cwd.join(".hashes.txt"))?,
latest_year: 0,
oldest_year: 0,
}
};
let mut bread_dirs: Vec<DirEntry> = fs::read_dir(&config.data_path)?.filter_map(|e| e.ok()).collect();
bread_dirs.sort_by(|x, y| x.file_name().cmp(&y.file_name()));
let mut bread_dirs: Vec<DirEntry> = fs::read_dir(&ginfo.data_path)?
.filter_map(Result::ok)
.collect();
bread_dirs.sort_by(|x, y| {
x.file_name().cmp(&y.file_name())
});
let mut breads: Vec<Bread> = bread_dirs
.iter()
.filter_map(|p| Bread::parse(&config.web_path, &p).ok())
.filter_map(|p| Bread::parse(&ginfo.web_path, &p).ok())
.collect();
ginfo.latest_year = breads.last().unwrap().date.year();
ginfo.oldest_year = breads.first().unwrap().date.year();
for i in 0..breads.len() {
let preceding = if i <= 0 { None } else {
match breads.get(i - 1) {
@ -351,32 +148,34 @@ fn main() -> Fallible<()> {
let cur = breads.get_mut(i).unwrap();
cur.compile(&mut config, preceding, following)?;
cur.compile(&mut ginfo, preceding, following)?;
}
let mut channel: Channel = ChannelBuilder::default()
let mut channel: rss::Channel = rss::ChannelBuilder::default()
.title("Piggo's Bread Gallery")
.link("https://www.ondrovo.com/bread")
.description("Sourdough feed")
.build()
.unwrap();
config.image_hashes.save();
let mut start = breads.len() as i32 - 10;
if start < 0 {
start = 0;
}
ginfo.image_hashes.save();
// rss
{
let start = (breads.len() as i32 - 10).max(0) as usize;
let mut channel_items = vec![];
for b in &breads[start as usize..] {
channel_items.push(b.rendered.rss_item.clone().unwrap());
for b in &mut breads[start..] {
channel_items.push(b.rendered.rss_item.take().unwrap());
}
println!("Generating feed...");
let f = OpenOptions::new().write(true).truncate(true).create(true).open(config.web_path.join("feed.xml")).unwrap();
info!("Generating feed...");
let f = OpenOptions::new()
.write(true)
.truncate(true)
.create(true)
.open(ginfo.web_path.join("feed.xml"))
.unwrap();
channel.set_items(channel_items);
channel.pretty_write_to(f, b' ', 2).unwrap();
}
@ -386,18 +185,68 @@ fn main() -> Fallible<()> {
// make thumbs go from the newest to the oldest
breads.reverse();
let oldest = ginfo.oldest_year;
let latest = ginfo.latest_year;
let base_url = ginfo.base_url.clone();
let year_pager = |year| {
let mut buf = String::new();
for y in oldest..=latest {
if year == y {
buf.push_str(&format!(" <li class=\"active\"><a href=\"#\">{y}</a>\n", y=y));
} else {
if y == latest {
buf.push_str(&format!(" <li><a href=\"{u}/index.html\">{y}</a>\n", u=base_url, y=y));
} else {
buf.push_str(&format!(" <li><a href=\"{u}/{y}.html\">{y}</a>\n", u=base_url, y=y));
}
}
}
buf
};
for (year, year_breads) in &breads.iter().group_by(|b| b.date.year()) {
let mut thumbs_buf = String::new();
for b in &breads {
for b in year_breads {
thumbs_buf.push_str(&b.rendered.thumb);
}
println!("Building the gallery page");
let main = config.template("index.html")?.replace("{breads}", &thumbs_buf.trim())
.replace("{head}", config.template("_head.html")?.replace("{title}", "Piggo's breads").trim());
info!("Building the gallery page");
let head = ginfo.template("_head.html")?
.replace("{title}", "Piggo's breads");
let main = ginfo.template("index.html")?
.replace("{breads}", thumbs_buf.trim())
.replace("{year_pager}", year_pager(year).trim())
.replace("{head}", head.trim());
let fname = if year == ginfo.latest_year {
"index.html".to_string()
} else {
format!("{}.html", year)
};
let mut f = OpenOptions::new()
.write(true)
.truncate(true)
.create(true)
.open(ginfo.web_path.join(fname))
.unwrap();
let mut f = OpenOptions::new().write(true).truncate(true).create(true).open(config.web_path.join("index.html")).unwrap();
f.write(main.as_bytes()).unwrap();
}
}
Ok(())
}
pub fn read_file<P: AsRef<Path>>(path: P) -> io::Result<String> {
let path = path.as_ref();
let mut file = File::open(path)?;
let mut buf = String::new();
file.read_to_string(&mut buf)?;
Ok(buf)
}

@ -100,3 +100,44 @@ a {
padding: 0 1em;
}
#YearPager {
font-size: 120%;
list-style: none;
display: block;
text-align: center;
padding: 0;
}
#YearPager li {
display: inline-block;
margin: 0;
padding: 0;
background: #333;
border: 1px solid #999;
border-right-width: 0;
}
#YearPager a {
display: inline-block;
padding: .5rem .8rem;
}
#YearPager li:hover {
background: #444;
}
#YearPager li.active {
font-weight: bold;
}
#YearPager li:last-child {
border-right-width: 1px;
border-top-right-radius: 7px;
border-bottom-right-radius: 7px;
}
#YearPager li:first-child {
border-top-left-radius: 7px;
border-bottom-left-radius: 7px;
}

@ -25,18 +25,16 @@
<meta itemprop="image" content="{thumb_url}">
</head>
<body>
<h1>{prev}<a href="index.html">{heading}</a>{next}</h1>
<h1>{prev}<a href="{gallery_url}">{heading}</a>{next}</h1>
<section class="BreadDetail">
<div class="note">
{note}
</div>
<div class="images">
{images}
</div>
</section>
<p id="crumb"><a href="index.html">Back to bread gallery</a></p>
<p id="crumb"><a href="{gallery_url}">Back to bread gallery</a></p>
</body>
</html>

@ -8,6 +8,10 @@
<p id="crumb">Like and <a href="feed.xml">subscribe</a></p>
<ul id="YearPager">
{year_pager}
</ul>
<div class="MainGallery">
{breads}
</div>

Loading…
Cancel
Save