First working version
This commit is contained in:
commit
8bc7556cf8
6 changed files with 246 additions and 0 deletions
2
.gitignore
vendored
Normal file
2
.gitignore
vendored
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
/target
|
||||
/feeds
|
||||
23
Cargo.toml
Normal file
23
Cargo.toml
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
[package]
|
||||
name = "rssserver"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[profile.release]
|
||||
strip = true
|
||||
opt-level = "s"
|
||||
lto = "on"
|
||||
codegen-units = 1
|
||||
|
||||
[dependencies]
|
||||
anyhow = { version = "1.0.98", default-features = false, features = ["std"] }
|
||||
axum = { version = "0.8.4", features = ["http2"] }
|
||||
chrono = "0.4.41"
|
||||
env_logger = "0.11.8"
|
||||
feed-rs = "2.3.1"
|
||||
handlebars = "6.3.2"
|
||||
log = "0.4.27"
|
||||
reqwest = { version = "0.12.22", features = ["charset", "h2", "http2", "rustls-tls", "system-proxy"], default-features = false }
|
||||
serde = { version = "1.0.219", features = ["derive"] }
|
||||
tokio = { version = "1.46.0", features = ["full"] }
|
||||
toml = "0.8.23"
|
||||
80
assets/index.html
Normal file
80
assets/index.html
Normal file
|
|
@ -0,0 +1,80 @@
|
|||
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>MyReadingRSS</title>
|
||||
<style>
|
||||
body {
|
||||
font-family: Arial, sans-serif;
|
||||
margin: 0;
|
||||
padding: 20px;
|
||||
background-color: #f4f4f4;
|
||||
color: #333;
|
||||
}
|
||||
h1 {
|
||||
text-align: center;
|
||||
}
|
||||
.tree {
|
||||
list-style-type: none;
|
||||
padding-left: 20px;
|
||||
}
|
||||
.folder {
|
||||
cursor: pointer;
|
||||
margin: 5px 0;
|
||||
font-weight: bold;
|
||||
}
|
||||
.folder.show::before {
|
||||
content: "▼";
|
||||
}
|
||||
.folder::before {
|
||||
content: "▶";
|
||||
}
|
||||
.feed {
|
||||
display: none; /* Hidden by default */
|
||||
margin: 5px 0 10px 20px;
|
||||
padding: 10px;
|
||||
background: white;
|
||||
border-radius: 5px;
|
||||
box-shadow: 0 2px 5px rgba(0, 0, 0, 0.1);
|
||||
}
|
||||
.feed.show {
|
||||
display: block;
|
||||
}
|
||||
.feed-item {
|
||||
border-bottom: 1px solid #eaeaea;
|
||||
padding: 5px 0 0 10px;
|
||||
}
|
||||
.feed-item:last-child {
|
||||
border-bottom: none;
|
||||
}
|
||||
.feed-item h2 {
|
||||
margin: 0;
|
||||
font-size: 1.2em;
|
||||
}
|
||||
.feed-item a {
|
||||
text-decoration: none;
|
||||
color: #0073e6;
|
||||
}
|
||||
.feed-item a:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<h1>RSS feeds I read</h1>
|
||||
<ul class="tree" onlick="toggleFeed()">
|
||||
{{{content}}}
|
||||
</ul>
|
||||
|
||||
<script defer>
|
||||
window.addEventListener('click', (e) => {
|
||||
if (!e.srcElement.classList.contains("folder")) return true;
|
||||
e.srcElement.classList.toggle('show');
|
||||
e.srcElement.nextElementSibling.classList.toggle('show');
|
||||
});
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
11
assets/item.html
Normal file
11
assets/item.html
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
<div class="folder">{{title.content}}</div>
|
||||
<div id="subfeed1" class="feed">
|
||||
{{#each entries}}
|
||||
<div class="feed-item">
|
||||
<h2><a href="{{this.id}}">{{this.title.content}}</a></h2>
|
||||
<p>By: {{this.authors.0.name}}</p>
|
||||
<p>Published on: <time datetime="{{this.updated}}">{{this.updated}}</time></p>
|
||||
<p>{{summary.content}}</p>
|
||||
</div>
|
||||
{{/each}}
|
||||
</div>
|
||||
7
config.toml
Normal file
7
config.toml
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
bind = "127.0.0.1:8085"
|
||||
item_tpl = "./assets/item.html"
|
||||
page_tpl = "./assets/index.html"
|
||||
|
||||
[feeds.tc]
|
||||
url = "https://www.youtube.com/feeds/videos.xml?channel_id=UCy0tKL1T7wFoYcxCe0xjN6Q"
|
||||
interval_secs = 43200
|
||||
123
src/main.rs
Normal file
123
src/main.rs
Normal file
|
|
@ -0,0 +1,123 @@
|
|||
use reqwest;
|
||||
|
||||
#[derive(Debug, Clone, serde::Deserialize)]
|
||||
struct FeedConfig {
|
||||
interval_secs: Option<u64>,
|
||||
url: String
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, serde::Deserialize)]
|
||||
struct AppConfig {
|
||||
bind: String,
|
||||
item_tpl: String,
|
||||
page_tpl: String,
|
||||
feeds: std::collections::HashMap<String, FeedConfig>
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct AppState<'a> {
|
||||
config: AppConfig,
|
||||
templater: handlebars::Handlebars<'a>,
|
||||
}
|
||||
|
||||
fn read_config() -> anyhow::Result<AppConfig> {
|
||||
let config_path = std::env::var("RSS_CONFIG").unwrap_or("./config.toml".into());
|
||||
let cfg = std::fs::read_to_string(&config_path)?;
|
||||
let mut cfg: AppConfig = toml::from_str(&cfg)?;
|
||||
Ok(cfg)
|
||||
}
|
||||
|
||||
fn init_template_engine(cfg: AppConfig) -> anyhow::Result<AppState<'static>> {
|
||||
let mut hb = handlebars::Handlebars::new();
|
||||
|
||||
let page_tpl = std::fs::read_to_string(&cfg.page_tpl)?;
|
||||
hb.register_template_string("page", page_tpl)?;
|
||||
|
||||
let item_tpl = std::fs::read_to_string(&cfg.item_tpl)?;
|
||||
hb.register_template_string("item", item_tpl)?;
|
||||
|
||||
Ok(AppState {
|
||||
config: cfg,
|
||||
templater: hb
|
||||
})
|
||||
}
|
||||
|
||||
async fn file_needs_update(
|
||||
path: &std::path::Path,
|
||||
expiration_timeout: std::time::Duration
|
||||
) -> anyhow::Result<bool> {
|
||||
let meta = tokio::fs::metadata(path).await?;
|
||||
let time = meta.modified()?;
|
||||
Ok(time.elapsed()?.as_secs() >= expiration_timeout.as_secs())
|
||||
}
|
||||
|
||||
async fn get_rss(name: &str, cfg: &FeedConfig) -> anyhow::Result<feed_rs::model::Feed> {
|
||||
|
||||
let filename = std::path::PathBuf::from(format!("./feeds/{}.last_feed", name));
|
||||
let data;
|
||||
if file_needs_update(&filename, std::time::Duration::from_secs(cfg.interval_secs.unwrap_or(0))).await.unwrap_or(true) {
|
||||
data = reqwest::get(cfg.url.clone()).await?.text().await?;
|
||||
let _ = tokio::fs::write(&filename, &data).await?;
|
||||
}
|
||||
else {
|
||||
data = tokio::fs::read_to_string(&filename).await?;
|
||||
}
|
||||
|
||||
let feed = feed_rs::parser::parse(data.as_bytes())?;
|
||||
Ok(feed)
|
||||
}
|
||||
|
||||
async fn render_rss_feeds(axum::extract::State(state): axum::extract::State<std::sync::Arc<AppState<'_>>>) -> impl axum::response::IntoResponse {
|
||||
let mut res = Vec::new();
|
||||
for (name, cfg) in state.config.feeds.iter() {
|
||||
|
||||
let feed = match get_rss(&name, cfg).await {
|
||||
Err(e) => {
|
||||
res.push(format!("Error fetching feed: {}", &name));
|
||||
log::error!("Error fetching feed: {}", e);
|
||||
continue;
|
||||
},
|
||||
Ok(feed) => feed,
|
||||
};
|
||||
|
||||
let html = match render_rss_feed(feed, state.clone()).await {
|
||||
Err(e) => {
|
||||
res.push(format!("Error rendering feed: {}", &name));
|
||||
log::error!("Error rendering feed: {}", e);
|
||||
continue;
|
||||
}
|
||||
Ok(html) => html
|
||||
};
|
||||
|
||||
res.push(html);
|
||||
}
|
||||
|
||||
let res = res.join("");
|
||||
let mut hm = std::collections::BTreeMap::new();
|
||||
hm.insert("content", res);
|
||||
let html = state.templater.render("page", &hm).unwrap();
|
||||
|
||||
axum::response::Response::builder()
|
||||
.status(axum::http::StatusCode::OK)
|
||||
.header("Content-Type", "text/html")
|
||||
.body(axum::body::Body::from(html))
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
async fn render_rss_feed(feed: feed_rs::model::Feed, state: std::sync::Arc<AppState<'_>>) -> anyhow::Result<String> {
|
||||
Ok(state.templater.render("item", &feed)?)
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> anyhow::Result<()> {
|
||||
env_logger::init();
|
||||
let conf = read_config()?;
|
||||
let state = init_template_engine(conf)?;
|
||||
let bind = &state.config.bind;
|
||||
let list = tokio::net::TcpListener::bind(bind).await?;
|
||||
let serv = axum::Router::new().route("/", axum::routing::get(render_rss_feeds)).with_state(std::sync::Arc::new(state));
|
||||
|
||||
axum::serve(list, serv).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue