Compare commits

..

No commits in common. "main" and "v0.1.0" have entirely different histories.
main ... v0.1.0

3 changed files with 173 additions and 199 deletions

View File

@ -1,92 +0,0 @@
use std::{path::PathBuf, io::Write};
use chrono::{serde::ts_seconds, Utc, DateTime};
use serde::{Serialize, Deserialize};
use crate::html::HTMLStringToVec;
pub(crate) fn get_cached_data() -> anyhow::Result<CachedData> {
let mut cache = dirs::cache_dir().unwrap_or(PathBuf::from("."));
cache.push("n58-kantine");
cache.push("data.json");
let s = std::fs::read_to_string(cache)?;
let cached_data: CachedData = serde_json::from_str(&s)?;
Ok(cached_data)
}
pub(crate) fn set_cached_data(data: &CachedData) -> anyhow::Result<()> {
let mut cache = dirs::cache_dir().unwrap_or(PathBuf::from("."));
cache.push("n58-kantine");
let dir = cache.clone();
cache.push("data.json");
let json = serde_json::to_string_pretty(data)?;
std::fs::create_dir_all(dir)?;
let mut f = std::fs::File::create(cache)?;
f.write_all(json.as_bytes())?;
Ok(())
}
const MENU_URL: &'static str =
"http://kantinemeny.azurewebsites.net/ukesmeny?lokasjon=toro@albatross-as.no&dato=";
const SOUP_URL: &'static str =
"http://kantinemeny.azurewebsites.net/ukesmenysuppe?lokasjon=toro@albatross-as.no&dato=";
pub(crate) fn get_cached_or_fetch_data(now: DateTime<Utc>) -> anyhow::Result<CachedData> {
let cache = match get_cached_data() {
Ok(data) => {
if (now - data.timestamp).num_minutes() > 60 {
let data = CachedData {
timestamp: now.clone(),
items: ureq::get(MENU_URL).call()?
.into_string()?
.html_string_to_vec()?,
soup_items: ureq::get(SOUP_URL).call()?
.into_string()?
.html_string_to_vec()?,
};
set_cached_data(&data)?;
data
} else {
data
}
},
Err(_) => {
let data = CachedData {
timestamp: now.clone(),
items: ureq::get(MENU_URL).call()?
.into_string()?
.html_string_to_vec()?,
soup_items: ureq::get(SOUP_URL).call()?
.into_string()?
.html_string_to_vec()?,
};
set_cached_data(&data)?;
data
}
};
Ok(cache)
}
#[derive(Debug, Serialize, Deserialize)]
pub(crate) struct MenuItem {
pub title: Option<String>,
pub additional: Option<String>,
}
#[derive(Debug, Serialize, Deserialize)]
pub(crate) struct CachedData {
#[serde(with = "ts_seconds")]
pub timestamp: chrono::DateTime<Utc>,
pub items: Vec<MenuItem>,
pub soup_items: Vec<MenuItem>,
}

View File

@ -1,71 +0,0 @@
use crate::data::MenuItem;
trait DeocdeHTMLEnts {
fn decode_html_ents(&self) -> Self;
}
impl DeocdeHTMLEnts for String {
fn decode_html_ents(&self) -> Self {
let mut s = String::new();
html_escape::decode_html_entities_to_string(self, &mut s);
s
}
}
pub(crate) trait HTMLStringToVec {
fn html_string_to_vec(&self) -> anyhow::Result<Vec<MenuItem>>;
}
fn get_item(p: &tl::Parser, tag: &tl::HTMLTag) -> Option<String> {
Some(
tag.query_selector(p, ".dagsrett")?
.last()?
.get(p)?
.as_tag()?
.inner_text(p)
.to_string()
.decode_html_ents(),
)
}
fn get_item_additional(p: &tl::Parser, tag: &tl::HTMLTag) -> Option<String> {
Some(
tag.query_selector(p, ".dagsrettgarnityr")?
.last()?
.get(p)?
.as_tag()?
.inner_text(p)
.to_string()
.decode_html_ents(),
)
}
impl HTMLStringToVec for String {
fn html_string_to_vec(&self) -> anyhow::Result<Vec<MenuItem>> {
let dom = tl::parse(self, tl::ParserOptions::default())?;
let p = dom.parser();
let nodes: Vec<&tl::Node> = Vec::from_iter(
dom.query_selector(".dagsinfo")
.unwrap()
.filter_map(|x| x.get(p)),
);
let items = nodes
.into_iter()
.map(|i| {
let tag = i.as_tag().unwrap();
let title = get_item(p, tag);
let additional = get_item_additional(p, tag);
MenuItem {
title,
additional,
}
})
.collect::<Vec<MenuItem>>();
Ok(items)
}
}

View File

@ -1,9 +1,10 @@
mod html;
mod data;
use crate::data::{get_cached_or_fetch_data, MenuItem};
use owo_colors::{AnsiColors, OwoColorize, Stream::Stdout}; use owo_colors::{AnsiColors, OwoColorize, Stream::Stdout};
use chrono::Datelike; use serde::{Deserialize, Serialize};
use std::{path::PathBuf, io::Write};
use dirs;
use chrono::{serde::ts_seconds, Datelike, Utc};
use clap::{Parser, ValueEnum}; use clap::{Parser, ValueEnum};
use lazy_static::lazy_static; use lazy_static::lazy_static;
@ -45,6 +46,10 @@ pub enum ColoriseOutput {
False, False,
} }
const MENU_URL: &'static str =
"http://kantinemeny.azurewebsites.net/ukesmeny?lokasjon=toro@albatross-as.no&dato=";
const SOUP_URL: &'static str =
"http://kantinemeny.azurewebsites.net/ukesmenysuppe?lokasjon=toro@albatross-as.no&dato=";
fn main() -> anyhow::Result<()> { fn main() -> anyhow::Result<()> {
let args = Cli::parse(); let args = Cli::parse();
@ -54,11 +59,41 @@ fn main() -> anyhow::Result<()> {
.weekday() .weekday()
.num_days_from_monday(); .num_days_from_monday();
let Ok(data) = get_cached_or_fetch_data(now) else { let cache = match get_cached_data() {
panic!("Failed to get data"); Ok(data) => {
if (now - data.timestamp).num_minutes() > 60 {
let data = CachedData {
timestamp: now.clone(),
items: ureq::get(MENU_URL).call()?
.into_string()?
.html_string_to_vec()?,
soup_items: ureq::get(SOUP_URL).call()?
.into_string()?
.html_string_to_vec()?,
}; };
let items = cond!(args.soup => data.soup_items; data.items); set_cached_data(&data)?;
data
} else {
data
}
},
Err(_) => {
let data = CachedData {
timestamp: now.clone(),
items: ureq::get(MENU_URL).call()?
.into_string()?
.html_string_to_vec()?,
soup_items: ureq::get(SOUP_URL).call()?
.into_string()?
.html_string_to_vec()?,
};
set_cached_data(&data)?;
data
}
};
let items = cond!(args.soup => cache.soup_items; cache.items);
match args.color { match args.color {
ColoriseOutput::True => { ColoriseOutput::True => {
@ -71,15 +106,33 @@ fn main() -> anyhow::Result<()> {
}; };
if args.today { if args.today {
if weekday_offset as usize >= (&items).len() {
return Ok(())
}
let item = &items[weekday_offset as usize];
let title: String = match item.title.clone() {
Some(t) => t,
None => String::new(),
};
let additional = item.additional.clone().unwrap_or_default();
let mut final_s = String::new();
final_s.push_str(&title);
if !additional.is_empty() {
let f = format!(" - {}", additional);
final_s.push_str(&f);
}
println!("{}", final_s);
display_today(items, weekday_offset);
return Ok(()) return Ok(())
} }
if args.json { if args.json {
let Ok(json) = serde_json::to_string_pretty(&items) else { let json =serde_json::to_string_pretty(&items)?;
panic!("Failed to convert into JSON.")
};
println!("{}", json); println!("{}", json);
return Ok(()) return Ok(())
@ -90,6 +143,33 @@ fn main() -> anyhow::Result<()> {
Ok(()) Ok(())
} }
fn get_cached_data() -> anyhow::Result<CachedData> {
let mut cache = dirs::cache_dir().unwrap_or(PathBuf::from("."));
cache.push("n58-kantine");
cache.push("data.json");
let s = std::fs::read_to_string(cache)?;
let cached_data: CachedData = serde_json::from_str(&s)?;
Ok(cached_data)
}
fn set_cached_data(data: &CachedData) -> anyhow::Result<()> {
let mut cache = dirs::cache_dir().unwrap_or(PathBuf::from("."));
cache.push("n58-kantine");
let dir = cache.clone();
cache.push("data.json");
let json = serde_json::to_string_pretty(data)?;
std::fs::create_dir_all(dir)?;
let mut f = std::fs::File::create(cache)?;
f.write_all(json.as_bytes())?;
Ok(())
}
lazy_static! { lazy_static! {
static ref WEEKDAYS: Vec<&'static str> = static ref WEEKDAYS: Vec<&'static str> =
@ -121,29 +201,86 @@ fn display_week(items: Vec<MenuItem>, day: u32) {
} }
} }
fn display_today(items: Vec<MenuItem>, weekday_offset: u32) { trait DeocdeHTMLEnts {
fn decode_html_ents(&self) -> Self;
if weekday_offset as usize >= (&items).len() {
return
} }
let item = &items[weekday_offset as usize]; impl DeocdeHTMLEnts for String {
fn decode_html_ents(&self) -> Self {
let title: String = match item.title.clone() { let mut s = String::new();
Some(t) => t, html_escape::decode_html_entities_to_string(self, &mut s);
None => String::new(), s
}; }
let additional = item.additional.clone().unwrap_or_default();
let mut final_s = String::new();
final_s.push_str(&title);
if !additional.is_empty() {
let f = format!(" - {}", additional);
final_s.push_str(&f);
} }
println!("{}", final_s); trait HTMLStringToVec {
fn html_string_to_vec(&self) -> anyhow::Result<Vec<MenuItem>>;
} }
fn get_item(p: &tl::Parser, tag: &tl::HTMLTag) -> Option<String> {
Some(
tag.query_selector(p, ".dagsrett")?
.last()?
.get(p)?
.as_tag()?
.inner_text(p)
.to_string()
.decode_html_ents(),
)
}
fn get_item_additional(p: &tl::Parser, tag: &tl::HTMLTag) -> Option<String> {
Some(
tag.query_selector(p, ".dagsrettgarnityr")?
.last()?
.get(p)?
.as_tag()?
.inner_text(p)
.to_string()
.decode_html_ents(),
)
}
impl HTMLStringToVec for String {
fn html_string_to_vec(&self) -> anyhow::Result<Vec<MenuItem>> {
let dom = tl::parse(self, tl::ParserOptions::default())?;
let p = dom.parser();
let nodes: Vec<&tl::Node> = Vec::from_iter(
dom.query_selector(".dagsinfo")
.unwrap()
.filter_map(|x| x.get(p)),
);
let items = nodes
.into_iter()
.map(|i| {
let tag = i.as_tag().unwrap();
let title = get_item(p, tag);
let additional = get_item_additional(p, tag);
MenuItem {
title,
additional,
}
})
.collect::<Vec<MenuItem>>();
Ok(items)
}
}
#[derive(Debug, Serialize, Deserialize)]
struct MenuItem {
pub title: Option<String>,
pub additional: Option<String>,
}
#[derive(Debug, Serialize, Deserialize)]
struct CachedData {
#[serde(with = "ts_seconds")]
pub timestamp: chrono::DateTime<Utc>,
pub items: Vec<MenuItem>,
pub soup_items: Vec<MenuItem>,
}