From 6848cdd062391fd05e492b15b14c500b979eacf0 Mon Sep 17 00:00:00 2001 From: F4 Date: Fri, 20 Sep 2024 19:22:01 +0100 Subject: [PATCH] Initial commit --- .env | 4 + .gitignore | 5 + Cargo.toml | 25 ++++ README.md | 49 ++++++++ assets/templates/bulk_creation_form.html | 33 +++++ assets/templates/linkslist.html | 46 +++++++ assets/templates/main.html | 8 ++ migrations/001_create_links_table.sql | 9 ++ src/main.rs | 69 ++++++++++ src/models/error.rs | 13 ++ src/models/link.rs | 144 +++++++++++++++++++++ src/models/mod.rs | 4 + src/models/provider.rs | 32 +++++ src/models/url.rs | 21 ++++ src/routes/index.rs | 28 +++++ src/routes/links.rs | 153 +++++++++++++++++++++++ src/routes/mod.rs | 3 + 17 files changed, 646 insertions(+) create mode 100644 .env create mode 100644 .gitignore create mode 100644 Cargo.toml create mode 100644 README.md create mode 100644 assets/templates/bulk_creation_form.html create mode 100644 assets/templates/linkslist.html create mode 100644 assets/templates/main.html create mode 100644 migrations/001_create_links_table.sql create mode 100644 src/main.rs create mode 100644 src/models/error.rs create mode 100644 src/models/link.rs create mode 100644 src/models/mod.rs create mode 100644 src/models/provider.rs create mode 100644 src/models/url.rs create mode 100644 src/routes/index.rs create mode 100644 src/routes/links.rs create mode 100644 src/routes/mod.rs diff --git a/.env b/.env new file mode 100644 index 0000000..303a330 --- /dev/null +++ b/.env @@ -0,0 +1,4 @@ +LOGLEVEL="info" +DATABASE="sqlite://data/lib.sqlite" +LISTEN="0.0.0.0:3000" +ASSETS_PATH="./assets" diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..a66336a --- /dev/null +++ b/.gitignore @@ -0,0 +1,5 @@ +/.sqlx +/.env +/Cargo.lock +/target +/data diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000..deecebe --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,25 @@ +[package] +name = "manager" +version = "0.1.0" +edition = "2021" + +[dependencies] +actix = "0.13.5" +actix-web = { version = "4.9.0", features = ["rustls"] } +#anyhow = "1.0.89" +#axum = { version = "0.7.5", features = ["form", "http2", "multipart"] } +#axum-login = "0.16.0" +chrono = { version = "0.4.38", features = ["serde"] } +#const_format = "0.2.33" +dotenvy = "0.15.7" +env_logger = "0.11.5" +handlebars = "6.0.0" +log = "0.4.22" +serde = { version = "1.0.209", features = ["derive"] } +serde_json = "1.0.127" +sqlx = { version = "0.8.2", features = ["chrono", "derive", "json", "macros", "runtime-tokio-rustls", "sqlite", "uuid"] } +thiserror = "1.0.63" +tokio = { version = "1.39.3", features = ["full"] } +url = { version = "2.5.2", features = ["serde"] } +#url_serde = "0.2.0" +uuid = { version = "1.10.0", features = ["serde", "v4"] } diff --git a/README.md b/README.md new file mode 100644 index 0000000..6d40809 --- /dev/null +++ b/README.md @@ -0,0 +1,49 @@ +# appname here +================ + +Yet Another Not-Invented-Here-Proof Productivity Tool + +## Table of Contents +----------------- + +* [Getting Started](#getting-started) +* [Features](#features) +* [Configuration](#configuration) +* [Usage](#usage) +* [Caveats/Todos](#caveats) + +## Getting Started +--------------- + +To get started with this project, you will need to have Rust installed on your system. You can download the latest version of Rust from the official [Rust installation page](https://www.rust-lang.org/tools/install) or use an automatic rust toolchain installer [rustup](https://rustup.rs). + +Once you have Rust installed, you can clone this repository and build the project using the following commands: + +```bash +git clone https://github.com/semubico/app.git +cd app +cargo build +``` + +## Configuration +----- + +A limited set of configuration parameters is configurable via environment variables. +Convenience .env file containing all of them is provided in the root of the project. + + +## Usage + +```bash +cd app +cargo run +``` + +## Caveats/Todos +----- + +* /links endpoint accepts several GET parameters, which yet have no corresponding form elements. those are ( offset, count, query ) +* Loglevel might be increased to DEBUG or TRACE for better debugging, which in turn will cause significant slowdown in all database-related activities. Notably under /links endpoint. Consider lowering number of records per page with "?count=" +* Even though the .env has schema 'sqlite3://' present, it doesn't (yet) mean that it's interchangable with mysql or postgres. AsyncProviders need to be rewritten over generic sqlx connection pool in order to support that. Sqlite is the only supported backend for now. Alternative backends may be provided by implementing AsyncProviders for them with different parameter. (So yes, plain files storage is also supported) +* Add handlebars helper to format datetime as smth readable +* Myriad of other todos in the source files (u can safely ignore those) diff --git a/assets/templates/bulk_creation_form.html b/assets/templates/bulk_creation_form.html new file mode 100644 index 0000000..1e09118 --- /dev/null +++ b/assets/templates/bulk_creation_form.html @@ -0,0 +1,33 @@ + + +
+ Add links from onetab +
+ +
+ +
+ + +
+
+
diff --git a/assets/templates/linkslist.html b/assets/templates/linkslist.html new file mode 100644 index 0000000..419da36 --- /dev/null +++ b/assets/templates/linkslist.html @@ -0,0 +1,46 @@ + + + +
+ Selection stats + +
+ + + + + + + + +{{#each links}} + + + + + +{{/each}} diff --git a/assets/templates/main.html b/assets/templates/main.html new file mode 100644 index 0000000..2e51fab --- /dev/null +++ b/assets/templates/main.html @@ -0,0 +1,8 @@ + + + {{title}} + + + SUM text + + diff --git a/migrations/001_create_links_table.sql b/migrations/001_create_links_table.sql new file mode 100644 index 0000000..ddd250a --- /dev/null +++ b/migrations/001_create_links_table.sql @@ -0,0 +1,9 @@ + +create table if not exists Links ( + uuid text not null, + href text not null, + created_at datetime not null, + updated_at datetime not null, + title text, + description text +); diff --git a/src/main.rs b/src/main.rs new file mode 100644 index 0000000..3ad5046 --- /dev/null +++ b/src/main.rs @@ -0,0 +1,69 @@ +use tokio::net::TcpListener; +use sqlx::migrate::MigrateDatabase; +use std::sync::Arc; +use actix_web::{get, App, HttpRequest, HttpServer, HttpResponse, Responder, web}; +use actix_web::{middleware::{Logger, NormalizePath}}; +use handlebars::Handlebars; +use std::path::PathBuf; + +mod models; +mod routes; + +pub struct GlobalData { + db: sqlx::Pool, + assets_path: std::path::PathBuf, + tpl: Handlebars<'static> +} + +async fn init_db(path: &str) -> Result { + + if !sqlx::Sqlite::database_exists(&path).await.unwrap_or(false) { + sqlx::Sqlite::create_database(&path).await?; + let pool = sqlx::sqlite::SqlitePool::connect(&path).await?; + sqlx::migrate!("./migrations").run(&pool).await?; + Ok(pool) + } + else { + sqlx::sqlite::SqlitePool::connect(&path).await + } +} + + +#[tokio::main] +async fn main() -> std::io::Result<()> { + + let loglevel = dotenvy::var("LOGLEVEL").unwrap_or("info".to_string()); + let db = dotenvy::var("DATABASE").expect("No DATABASE configured in env, aborting"); + let bind = dotenvy::var("LISTEN").expect("No address:port to bind provided, aborting"); + let ap = dotenvy::var("ASSETS_PATH").expect("No path to assets provided, aborting"); + + env_logger::init_from_env(env_logger::Env::new().default_filter_or(loglevel)); + + let db = init_db(&db).await.expect("Failed to open database"); + let hb = Handlebars::<'static>::new(); + + let data = GlobalData { + db: db, + tpl: hb, + assets_path: ap.into() + }; + + let data = Arc::new(data); // TODO: should suffice for now, as there are no mutable state here + + + HttpServer::new(move || { + + let appdata = web::Data::new(data.clone()); + + App::new() + .wrap(NormalizePath::trim()) + .wrap(Logger::default()) + .configure(crate::routes::links::build_router) + .configure(crate::routes::index::build_router) + .app_data(appdata) + }) + .bind(bind)? + .run() + .await +} + diff --git a/src/models/error.rs b/src/models/error.rs new file mode 100644 index 0000000..a0575a0 --- /dev/null +++ b/src/models/error.rs @@ -0,0 +1,13 @@ +use thiserror::Error; + +#[derive(Debug, Error)] +pub(crate) enum Error { + #[error("This is a dummy error, so that the compiler would shut up about unused types")] + Todo, + + #[error("Failed to query the backend: {0}")] + DatabaseError(#[from] sqlx::Error) +} + +// TODO: Implement Http error response for actix for this struct +// NOTE: though, the impl should prob go under routes/ as it is webframework-specific diff --git a/src/models/link.rs b/src/models/link.rs new file mode 100644 index 0000000..ecba2d5 --- /dev/null +++ b/src/models/link.rs @@ -0,0 +1,144 @@ +use thiserror::Error; +use serde::{Serialize, Deserialize}; +use sqlx::{SqlitePool, FromRow, types::{Uuid, uuid::fmt::Hyphenated, chrono::{DateTime, Utc, FixedOffset}}}; + +use crate::models::error::Error; +use crate::models::url::Url; +use crate::models::provider::{AsyncCrudProvider, AsyncListFetchProvider}; + +#[derive(Debug, Clone, FromRow, Deserialize, Serialize)] +pub(crate) struct Link { + #[serde(default)] + #[sqlx(try_from = "Hyphenated")] + uuid: Uuid, + + #[sqlx(try_from = "String")] + href: Url, + + title: Option, + description: Option, + created_at: Option>, + updated_at: Option> +} + +impl Link { + pub fn new( + href: &str, + title: Option<&str>, + description: Option<&str>, + created_at: Option<&DateTime> + ) -> Result { + + let time_now = sqlx::types::chrono::Utc::now().into(); + + Ok(Self { + uuid: Uuid::new_v4(), + href: href.to_string().try_into()?, + title: title.and_then(|s| Some(s.trim().to_string())), + description: description.and_then(|s| Some(s.trim().to_string())), + created_at: created_at.or_else(|| Some(&time_now) ).copied(), + updated_at: Some(time_now) + }) + } +} + + + + + +// TODO: (prob never) make sql-backend agnostic +pub(crate) struct LinksProvider { pool: T } + +impl LinksProvider { + pub fn with_db(pool: T) -> Self { + Self { pool } + } +} + + +impl AsyncCrudProvider for LinksProvider { + + type Item = Link; + type Uid = uuid::Uuid; + type Error = Error; + + async fn create(&self, item: &Self::Item) -> Result { + let r = sqlx::query("insert into Links (uuid, href, title, description, created_at, updated_at) values (?, ?, ?, ?, ?, ?)") + .bind(format!("{}", item.uuid)) + .bind(item.href.as_str()) + .bind(&item.title) + .bind(&item.description) + .bind(&item.created_at) + .bind(&item.updated_at) + .execute(&self.pool) + .await + .map_err(|e| Error::DatabaseError(e)) + ?; + + Ok(item.uuid.clone()) + } + + async fn read(&self, uid: &Self::Uid) -> Result { + sqlx::query_as::<_, Link>("select * from Links where uuid like ? limit 0, 1") + .bind(&format!("{}", uid)) + .fetch_one(&self.pool) + .await + .map_err(|e| Error::DatabaseError(e)) + } + + async fn update(&self, uid: &Self::Uid, new: &Self::Item) -> Result { + let res = sqlx::query("update Links set (href, title, description, created_at, updated_at) values (?, ?, ?, ?, ?) where uuid = ? limit 1") + .bind(new.href.as_str()) + .bind(&new.title) + .bind(&new.description) + .bind(&new.created_at) + .bind(&new.updated_at.unwrap_or_else(|| sqlx::types::chrono::Utc::now().into())) + .bind(format!("{}", uid)) + .execute(&self.pool) + .await + .map_err(|e| Error::DatabaseError(e)) + ?; + + Ok(new.clone()) // TODO: In theory it should return previous value that was just updated, but that + // requires another select and the transaction so No + } + + async fn delete(&self, uid: &Self::Uid) -> Result<(), Self::Error> { + sqlx::query("delete from Links where uuid like ?") + .bind(&format!("{}", uid)) + .execute(&self.pool) + .await + .and_then(|_| Ok(())) + .map_err(|e| Error::DatabaseError(e)) + } +} + + +impl AsyncListFetchProvider for LinksProvider { + + type Item = Link; + type Error = Error; + + async fn fetch(&self, filters: &str, offset: Option, count: Option) -> Result, Self::Error> { + sqlx::query_as::<_, Link>("select * from Links where href like ? or title like ? or description like ? limit ?, ?") + .bind(&format!("{}", filters)) + .bind(&format!("{}", filters)) + .bind(&format!("{}", filters)) + .bind(&format!("{}", offset.unwrap_or(0))) + .bind(&format!("{}", count.unwrap_or(0))) + .fetch_all(&self.pool) + .await + .map_err(|e| Error::DatabaseError(e)) + } + + async fn count(&self, filters: &str) -> Result { + sqlx::query_scalar("select count(*) from Links where href like ? or title like ? or description like ? limit 0, 1") + .bind(&format!("{}", filters)) + .bind(&format!("{}", filters)) + .bind(&format!("{}", filters)) + .fetch_one(&self.pool) + .await + .map_err(|e| Error::DatabaseError(e)) + } +} + diff --git a/src/models/mod.rs b/src/models/mod.rs new file mode 100644 index 0000000..fba1130 --- /dev/null +++ b/src/models/mod.rs @@ -0,0 +1,4 @@ +pub(crate) mod error; +pub(crate) mod url; +pub(crate) mod provider; +pub(crate) mod link; diff --git a/src/models/provider.rs b/src/models/provider.rs new file mode 100644 index 0000000..0853bea --- /dev/null +++ b/src/models/provider.rs @@ -0,0 +1,32 @@ + +pub(crate) trait AsyncCrudProvider { + + type Uid; + type Item; + type Error; + + async fn create(&self, new: &Self::Item) -> Result; + + async fn read(&self, uid: &Self::Uid) -> Result; + + async fn update(&self, uid: &Self::Uid, new: &Self::Item) -> Result; + + async fn delete(&self, uid: &Self::Uid) -> Result<(), Self::Error>; +} + +pub(crate) trait AsyncListFetchProvider { + + type Item; + type Error; + + // TODO: replace sql: &str with an &[SearchFilter] , and add SearchFilter and + // SearchFilterBuilder + + async fn fetch(&self, sql: &str, offset: Option, count: Option) -> Result, Self::Error>; + + async fn count(&self, sql: &str) -> Result; + // TODO: maybe move Vec<> to type Container + // and have it be parametrised w/ +} + + diff --git a/src/models/url.rs b/src/models/url.rs new file mode 100644 index 0000000..dd37967 --- /dev/null +++ b/src/models/url.rs @@ -0,0 +1,21 @@ +use serde::{Deserialize, Serialize}; + +use crate::models::error::Error; + +#[derive(Debug, Clone, Deserialize, Serialize)] +pub(crate) struct Url(url::Url); + +impl TryFrom for Url { + type Error = Error; + + fn try_from(val: String) -> Result { + let d = url::Url::parse(&val); + Ok(Self { 0: d.map_err(|_| Self::Error::Todo )? } ) + } +} + +impl Url { + pub(crate) fn as_str(&self) -> &str { + self.0.as_str() + } +} diff --git a/src/routes/index.rs b/src/routes/index.rs new file mode 100644 index 0000000..842e4ad --- /dev/null +++ b/src/routes/index.rs @@ -0,0 +1,28 @@ +use actix_web::{web, HttpResponse}; +use std::sync::Arc; + +use crate::GlobalData; + +pub(crate) fn build_router(cfg: &mut web::ServiceConfig) { + cfg + .service( + web::resource("/") + .route(web::get().to(get_main_page)) + ); +} + + + +pub async fn get_main_page(app: web::Data>) -> HttpResponse { + + let path = app.assets_path.join("templates/main.html"); + let file = std::fs::read_to_string(path).unwrap(); // TODO: Error handling + let vars = serde_json::json!({"title": "Main page"}); // TODO + let html = app.tpl.render_template(&file, &vars).unwrap(); // TODO: Error handling + + HttpResponse::Ok() + .content_type("text/html; charset=utf8") + .body(html) +} + + diff --git a/src/routes/links.rs b/src/routes/links.rs new file mode 100644 index 0000000..8b2b6e9 --- /dev/null +++ b/src/routes/links.rs @@ -0,0 +1,153 @@ +use serde::{Deserialize}; +use crate::App; +use std::sync::Arc; +use actix_web::{ Scope, Result, web, Responder, Resource, HttpResponse, HttpResponseBuilder }; +use uuid::Uuid; +use chrono::{DateTime, Utc}; + +use crate::GlobalData; +use crate::models::provider::{AsyncCrudProvider, AsyncListFetchProvider}; +use crate::models::link::{Link, LinksProvider}; + +pub(crate) fn build_router(cfg: &mut web::ServiceConfig) { + cfg + .service( + web::resource("/links") + .route(web::get().to(get_links_list)) + ) + .service( + web::resource("/links/bulk") + .route(web::get().to(get_bulk_insert_form)) + .route(web::post().to(do_bulk_insert)) + ) + .service( + web::resource("/link/{uuid}") + .route(web::get().to(read_link)) + .route(web::post().to(upsert_link)) + .route(web::delete().to(remove_link)) + ); +} + + +// --- crud --- + +pub async fn read_link(uuid: web::Path, app: web::Data>) -> String { + LinksProvider::with_db(app.db.clone()) + .read(&uuid) + .await + .and_then(|r| Ok(format!("{:?}", r))) + .unwrap_or_default() +} + +pub async fn upsert_link(uuid: web::Path) -> Result { + todo!() +} + +pub async fn remove_link( + uuid: web::Path, + app: web::Data> +) -> HttpResponse { + let r = LinksProvider::with_db(app.db.clone()) + .delete(&uuid) + .await; + + match r { + Ok(_) => HttpResponse::Ok().into(), + Err(_) => HttpResponse::InternalServerError().into() + } +} + +pub async fn get_bulk_insert_form( + app: web::Data> +) -> impl Responder { + + let path = app.assets_path.join("templates/bulk_creation_form.html"); + let file = std::fs::read_to_string(path); + + HttpResponse::Ok() + .content_type("text/html") + .body(file.unwrap_or_default()) +} + +#[derive(Debug, Deserialize)] +struct BulkInsertData { + created_at: String, + hrefs : String +} + +pub async fn do_bulk_insert( + app: web::Data>, + formdata: web::Form +) -> HttpResponse { // TODO: error handling // FIXME: REFACTOR THE F OUT OF IT + + let datetime = DateTime::parse_from_str(&formdata.created_at, "%Y-%m-%dT%H:%M").ok(); + + let lp = LinksProvider::with_db(app.db.clone()); + let mut result = Vec::new(); + + for line in formdata.hrefs.lines() { + + let mut it = line.split("|"); + let Some(href) = it.next() else { continue }; + let title = it.next(); + let desc = it.next(); + + // TODO: error-handling + + if let Some(link) = Link::new(href, title, desc, datetime.as_ref()).ok() { + if let Ok(uuid) = lp.create(&link).await { + result.push(uuid); + } + } + } + + // TODO; + HttpResponse::Created() + .content_type("text/html") + .body(format!("{:?}", result)) +} + + + + +// --- List --- + +#[derive(Debug, Deserialize)] +struct Pagination { + offset: Option, + count: Option, + query: Option +} + +pub async fn get_links_list( + query: web::Query, + app: web::Data> +) -> HttpResponse { + + let filters = match query { + web::Query(Pagination { query: Some(ref q), .. }) => format!("%{}%", &q), + _ => "%".to_string() + }; + + // NOTE: since we don't utilise transactions here, count and data.len() may end up different + // for the same query + // (given the same offset, and count) + + let lp = LinksProvider::with_db(app.db.clone()); + + let count = lp.count(&filters).await.unwrap_or(0); + let data = if count != 0 { lp.fetch(&filters, query.offset, query.count).await.unwrap() } else { Vec::default() }; + + let vars = serde_json::json!({"links": data, "total": count}); // TODO: refactor + + let path = app.assets_path.join("templates/linkslist.html"); + println!("{}", path.display()); + let template = std::fs::read_to_string(path).unwrap(); + + let html = app.tpl.render_template(&template, &vars).unwrap(); // TODO: Error handling + + HttpResponse::Ok() + .content_type("text/html; charset=utf8") + .body(html) +} + diff --git a/src/routes/mod.rs b/src/routes/mod.rs new file mode 100644 index 0000000..0810f10 --- /dev/null +++ b/src/routes/mod.rs @@ -0,0 +1,3 @@ +pub(crate) mod links; +pub(crate) mod index; +
#TitleCreated at
{{#if title}}{{title}}{{else}}{{href}}{{/if}}{{created_at}}