Initial commit
This commit is contained in:
commit
6848cdd062
17 changed files with 646 additions and 0 deletions
4
.env
Normal file
4
.env
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
LOGLEVEL="info"
|
||||
DATABASE="sqlite://data/lib.sqlite"
|
||||
LISTEN="0.0.0.0:3000"
|
||||
ASSETS_PATH="./assets"
|
||||
5
.gitignore
vendored
Normal file
5
.gitignore
vendored
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
/.sqlx
|
||||
/.env
|
||||
/Cargo.lock
|
||||
/target
|
||||
/data
|
||||
25
Cargo.toml
Normal file
25
Cargo.toml
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
[package]
|
||||
name = "manager"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
actix = "0.13.5"
|
||||
actix-web = { version = "4.9.0", features = ["rustls"] }
|
||||
#anyhow = "1.0.89"
|
||||
#axum = { version = "0.7.5", features = ["form", "http2", "multipart"] }
|
||||
#axum-login = "0.16.0"
|
||||
chrono = { version = "0.4.38", features = ["serde"] }
|
||||
#const_format = "0.2.33"
|
||||
dotenvy = "0.15.7"
|
||||
env_logger = "0.11.5"
|
||||
handlebars = "6.0.0"
|
||||
log = "0.4.22"
|
||||
serde = { version = "1.0.209", features = ["derive"] }
|
||||
serde_json = "1.0.127"
|
||||
sqlx = { version = "0.8.2", features = ["chrono", "derive", "json", "macros", "runtime-tokio-rustls", "sqlite", "uuid"] }
|
||||
thiserror = "1.0.63"
|
||||
tokio = { version = "1.39.3", features = ["full"] }
|
||||
url = { version = "2.5.2", features = ["serde"] }
|
||||
#url_serde = "0.2.0"
|
||||
uuid = { version = "1.10.0", features = ["serde", "v4"] }
|
||||
49
README.md
Normal file
49
README.md
Normal file
|
|
@ -0,0 +1,49 @@
|
|||
# appname here
|
||||
================
|
||||
|
||||
Yet Another Not-Invented-Here-Proof Productivity Tool
|
||||
|
||||
## Table of Contents
|
||||
-----------------
|
||||
|
||||
* [Getting Started](#getting-started)
|
||||
* [Features](#features)
|
||||
* [Configuration](#configuration)
|
||||
* [Usage](#usage)
|
||||
* [Caveats/Todos](#caveats)
|
||||
|
||||
## Getting Started
|
||||
---------------
|
||||
|
||||
To get started with this project, you will need to have Rust installed on your system. You can download the latest version of Rust from the official [Rust installation page](https://www.rust-lang.org/tools/install) or use an automatic rust toolchain installer [rustup](https://rustup.rs).
|
||||
|
||||
Once you have Rust installed, you can clone this repository and build the project using the following commands:
|
||||
|
||||
```bash
|
||||
git clone https://github.com/semubico/app.git
|
||||
cd app
|
||||
cargo build
|
||||
```
|
||||
|
||||
## Configuration
|
||||
-----
|
||||
|
||||
A limited set of configuration parameters is configurable via environment variables.
|
||||
Convenience .env file containing all of them is provided in the root of the project.
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
```bash
|
||||
cd app
|
||||
cargo run
|
||||
```
|
||||
|
||||
## Caveats/Todos
|
||||
-----
|
||||
|
||||
* /links endpoint accepts several GET parameters, which yet have no corresponding form elements. those are ( offset, count, query )
|
||||
* Loglevel might be increased to DEBUG or TRACE for better debugging, which in turn will cause significant slowdown in all database-related activities. Notably under /links endpoint. Consider lowering number of records per page with "?count=<amount>"
|
||||
* Even though the .env has schema 'sqlite3://' present, it doesn't (yet) mean that it's interchangable with mysql or postgres. AsyncProviders need to be rewritten over generic sqlx connection pool in order to support that. Sqlite is the only supported backend for now. Alternative backends may be provided by implementing AsyncProviders for them with different <T> parameter. (So yes, plain files storage is also supported)
|
||||
* Add handlebars helper to format datetime as smth readable
|
||||
* Myriad of other todos in the source files (u can safely ignore those)
|
||||
33
assets/templates/bulk_creation_form.html
Normal file
33
assets/templates/bulk_creation_form.html
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
<style>
|
||||
form > * {
|
||||
width: 100%;
|
||||
}
|
||||
form > textarea {
|
||||
min-height: 20em;
|
||||
}
|
||||
fieldset {
|
||||
width: 50%;
|
||||
margin: 5em auto;
|
||||
font-family: sans-serif;
|
||||
}
|
||||
fieldset > form > div.flex,
|
||||
fieldset > form > div.flex > * {
|
||||
width: 100%;
|
||||
}
|
||||
.flex {
|
||||
display: flex;
|
||||
}
|
||||
</style>
|
||||
|
||||
<fieldset>
|
||||
<legend>Add links from onetab</legend>
|
||||
<form method="POST" target="/links/bulk">
|
||||
<input type="datetime-local" name="created_at" value="" />
|
||||
<br>
|
||||
<textarea type="text" maxlength="1000000" name="hrefs" placeholder="<href> | <title>"></textarea>
|
||||
<div class="flex">
|
||||
<input type="reset" value="Reset" />
|
||||
<input type="submit" value="Send" />
|
||||
</div>
|
||||
</form>
|
||||
</fieldset>
|
||||
46
assets/templates/linkslist.html
Normal file
46
assets/templates/linkslist.html
Normal file
|
|
@ -0,0 +1,46 @@
|
|||
<script src="https://unpkg.com/htmx.org@2.0.2"></script>
|
||||
<style>
|
||||
fieldset {
|
||||
width: 68%;
|
||||
margin: 0 auto;
|
||||
border-radius: 1em;
|
||||
}
|
||||
table {
|
||||
font-family: sans-serif;
|
||||
border: 1px solid darkblue;
|
||||
border-radius: 1em;
|
||||
padding: 0 .5em;
|
||||
width: 70%;
|
||||
margin: 0 auto;
|
||||
}
|
||||
table tr td:nth-child(1) {
|
||||
width: 2em;
|
||||
}
|
||||
table tr td:nth-child(3) {
|
||||
text-align:right;
|
||||
width: 10em;
|
||||
}
|
||||
table tr:nth-child(even) {
|
||||
background: rgba(0, 0, 0, .1);
|
||||
}
|
||||
</style>
|
||||
|
||||
<fieldset>
|
||||
<legend>Selection stats</legend>
|
||||
<label>Found: {{total}}</label>
|
||||
</fieldset>
|
||||
|
||||
<table id="linkies">
|
||||
<tr>
|
||||
<th>#</th>
|
||||
<th>Title</th>
|
||||
<th>Created at</th>
|
||||
</tr>
|
||||
|
||||
{{#each links}}
|
||||
<tr id="link{{uuid}}">
|
||||
<td><button hx-confirm="Are you sure?" hx-trigger="click" hx-target="#link{{uuid}}" hx-delete="/link/{{uuid}}" /></td>
|
||||
<td><a ref="nofollow , noindex" href="{{href}}">{{#if title}}{{title}}{{else}}{{href}}{{/if}}</a></td>
|
||||
<td>{{created_at}}</td>
|
||||
</tr>
|
||||
{{/each}}
|
||||
8
assets/templates/main.html
Normal file
8
assets/templates/main.html
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
<html>
|
||||
<head>
|
||||
<title>{{title}}</title>
|
||||
</head>
|
||||
<body>
|
||||
SUM text
|
||||
</body>
|
||||
</html>
|
||||
9
migrations/001_create_links_table.sql
Normal file
9
migrations/001_create_links_table.sql
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
|
||||
create table if not exists Links (
|
||||
uuid text not null,
|
||||
href text not null,
|
||||
created_at datetime not null,
|
||||
updated_at datetime not null,
|
||||
title text,
|
||||
description text
|
||||
);
|
||||
69
src/main.rs
Normal file
69
src/main.rs
Normal file
|
|
@ -0,0 +1,69 @@
|
|||
use tokio::net::TcpListener;
|
||||
use sqlx::migrate::MigrateDatabase;
|
||||
use std::sync::Arc;
|
||||
use actix_web::{get, App, HttpRequest, HttpServer, HttpResponse, Responder, web};
|
||||
use actix_web::{middleware::{Logger, NormalizePath}};
|
||||
use handlebars::Handlebars;
|
||||
use std::path::PathBuf;
|
||||
|
||||
mod models;
|
||||
mod routes;
|
||||
|
||||
pub struct GlobalData {
|
||||
db: sqlx::Pool<sqlx::sqlite::Sqlite>,
|
||||
assets_path: std::path::PathBuf,
|
||||
tpl: Handlebars<'static>
|
||||
}
|
||||
|
||||
async fn init_db(path: &str) -> Result<sqlx::sqlite::SqlitePool, sqlx::Error> {
|
||||
|
||||
if !sqlx::Sqlite::database_exists(&path).await.unwrap_or(false) {
|
||||
sqlx::Sqlite::create_database(&path).await?;
|
||||
let pool = sqlx::sqlite::SqlitePool::connect(&path).await?;
|
||||
sqlx::migrate!("./migrations").run(&pool).await?;
|
||||
Ok(pool)
|
||||
}
|
||||
else {
|
||||
sqlx::sqlite::SqlitePool::connect(&path).await
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> std::io::Result<()> {
|
||||
|
||||
let loglevel = dotenvy::var("LOGLEVEL").unwrap_or("info".to_string());
|
||||
let db = dotenvy::var("DATABASE").expect("No DATABASE configured in env, aborting");
|
||||
let bind = dotenvy::var("LISTEN").expect("No address:port to bind provided, aborting");
|
||||
let ap = dotenvy::var("ASSETS_PATH").expect("No path to assets provided, aborting");
|
||||
|
||||
env_logger::init_from_env(env_logger::Env::new().default_filter_or(loglevel));
|
||||
|
||||
let db = init_db(&db).await.expect("Failed to open database");
|
||||
let hb = Handlebars::<'static>::new();
|
||||
|
||||
let data = GlobalData {
|
||||
db: db,
|
||||
tpl: hb,
|
||||
assets_path: ap.into()
|
||||
};
|
||||
|
||||
let data = Arc::new(data); // TODO: should suffice for now, as there are no mutable state here
|
||||
|
||||
|
||||
HttpServer::new(move || {
|
||||
|
||||
let appdata = web::Data::new(data.clone());
|
||||
|
||||
App::new()
|
||||
.wrap(NormalizePath::trim())
|
||||
.wrap(Logger::default())
|
||||
.configure(crate::routes::links::build_router)
|
||||
.configure(crate::routes::index::build_router)
|
||||
.app_data(appdata)
|
||||
})
|
||||
.bind(bind)?
|
||||
.run()
|
||||
.await
|
||||
}
|
||||
|
||||
13
src/models/error.rs
Normal file
13
src/models/error.rs
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
use thiserror::Error;
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub(crate) enum Error {
|
||||
#[error("This is a dummy error, so that the compiler would shut up about unused types")]
|
||||
Todo,
|
||||
|
||||
#[error("Failed to query the backend: {0}")]
|
||||
DatabaseError(#[from] sqlx::Error)
|
||||
}
|
||||
|
||||
// TODO: Implement Http error response for actix for this struct
|
||||
// NOTE: though, the impl should prob go under routes/ as it is webframework-specific
|
||||
144
src/models/link.rs
Normal file
144
src/models/link.rs
Normal file
|
|
@ -0,0 +1,144 @@
|
|||
use thiserror::Error;
|
||||
use serde::{Serialize, Deserialize};
|
||||
use sqlx::{SqlitePool, FromRow, types::{Uuid, uuid::fmt::Hyphenated, chrono::{DateTime, Utc, FixedOffset}}};
|
||||
|
||||
use crate::models::error::Error;
|
||||
use crate::models::url::Url;
|
||||
use crate::models::provider::{AsyncCrudProvider, AsyncListFetchProvider};
|
||||
|
||||
#[derive(Debug, Clone, FromRow, Deserialize, Serialize)]
|
||||
pub(crate) struct Link {
|
||||
#[serde(default)]
|
||||
#[sqlx(try_from = "Hyphenated")]
|
||||
uuid: Uuid,
|
||||
|
||||
#[sqlx(try_from = "String")]
|
||||
href: Url,
|
||||
|
||||
title: Option<String>,
|
||||
description: Option<String>,
|
||||
created_at: Option<DateTime<FixedOffset>>,
|
||||
updated_at: Option<DateTime<FixedOffset>>
|
||||
}
|
||||
|
||||
impl Link {
|
||||
pub fn new(
|
||||
href: &str,
|
||||
title: Option<&str>,
|
||||
description: Option<&str>,
|
||||
created_at: Option<&DateTime<FixedOffset>>
|
||||
) -> Result<Self, Error> {
|
||||
|
||||
let time_now = sqlx::types::chrono::Utc::now().into();
|
||||
|
||||
Ok(Self {
|
||||
uuid: Uuid::new_v4(),
|
||||
href: href.to_string().try_into()?,
|
||||
title: title.and_then(|s| Some(s.trim().to_string())),
|
||||
description: description.and_then(|s| Some(s.trim().to_string())),
|
||||
created_at: created_at.or_else(|| Some(&time_now) ).copied(),
|
||||
updated_at: Some(time_now)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
// TODO: (prob never) make sql-backend agnostic
|
||||
pub(crate) struct LinksProvider<T> { pool: T }
|
||||
|
||||
impl<T> LinksProvider<T> {
|
||||
pub fn with_db(pool: T) -> Self {
|
||||
Self { pool }
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
impl AsyncCrudProvider for LinksProvider<SqlitePool> {
|
||||
|
||||
type Item = Link;
|
||||
type Uid = uuid::Uuid;
|
||||
type Error = Error;
|
||||
|
||||
async fn create(&self, item: &Self::Item) -> Result<Self::Uid, Self::Error> {
|
||||
let r = sqlx::query("insert into Links (uuid, href, title, description, created_at, updated_at) values (?, ?, ?, ?, ?, ?)")
|
||||
.bind(format!("{}", item.uuid))
|
||||
.bind(item.href.as_str())
|
||||
.bind(&item.title)
|
||||
.bind(&item.description)
|
||||
.bind(&item.created_at)
|
||||
.bind(&item.updated_at)
|
||||
.execute(&self.pool)
|
||||
.await
|
||||
.map_err(|e| Error::DatabaseError(e))
|
||||
?;
|
||||
|
||||
Ok(item.uuid.clone())
|
||||
}
|
||||
|
||||
async fn read(&self, uid: &Self::Uid) -> Result<Self::Item, Self::Error> {
|
||||
sqlx::query_as::<_, Link>("select * from Links where uuid like ? limit 0, 1")
|
||||
.bind(&format!("{}", uid))
|
||||
.fetch_one(&self.pool)
|
||||
.await
|
||||
.map_err(|e| Error::DatabaseError(e))
|
||||
}
|
||||
|
||||
async fn update(&self, uid: &Self::Uid, new: &Self::Item) -> Result<Self::Item, Self::Error> {
|
||||
let res = sqlx::query("update Links set (href, title, description, created_at, updated_at) values (?, ?, ?, ?, ?) where uuid = ? limit 1")
|
||||
.bind(new.href.as_str())
|
||||
.bind(&new.title)
|
||||
.bind(&new.description)
|
||||
.bind(&new.created_at)
|
||||
.bind(&new.updated_at.unwrap_or_else(|| sqlx::types::chrono::Utc::now().into()))
|
||||
.bind(format!("{}", uid))
|
||||
.execute(&self.pool)
|
||||
.await
|
||||
.map_err(|e| Error::DatabaseError(e))
|
||||
?;
|
||||
|
||||
Ok(new.clone()) // TODO: In theory it should return previous value that was just updated, but that
|
||||
// requires another select and the transaction so No
|
||||
}
|
||||
|
||||
async fn delete(&self, uid: &Self::Uid) -> Result<(), Self::Error> {
|
||||
sqlx::query("delete from Links where uuid like ?")
|
||||
.bind(&format!("{}", uid))
|
||||
.execute(&self.pool)
|
||||
.await
|
||||
.and_then(|_| Ok(()))
|
||||
.map_err(|e| Error::DatabaseError(e))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
impl AsyncListFetchProvider for LinksProvider<SqlitePool> {
|
||||
|
||||
type Item = Link;
|
||||
type Error = Error;
|
||||
|
||||
async fn fetch(&self, filters: &str, offset: Option<u64>, count: Option<u64>) -> Result<Vec<Self::Item>, Self::Error> {
|
||||
sqlx::query_as::<_, Link>("select * from Links where href like ? or title like ? or description like ? limit ?, ?")
|
||||
.bind(&format!("{}", filters))
|
||||
.bind(&format!("{}", filters))
|
||||
.bind(&format!("{}", filters))
|
||||
.bind(&format!("{}", offset.unwrap_or(0)))
|
||||
.bind(&format!("{}", count.unwrap_or(0)))
|
||||
.fetch_all(&self.pool)
|
||||
.await
|
||||
.map_err(|e| Error::DatabaseError(e))
|
||||
}
|
||||
|
||||
async fn count(&self, filters: &str) -> Result<u64, Self::Error> {
|
||||
sqlx::query_scalar("select count(*) from Links where href like ? or title like ? or description like ? limit 0, 1")
|
||||
.bind(&format!("{}", filters))
|
||||
.bind(&format!("{}", filters))
|
||||
.bind(&format!("{}", filters))
|
||||
.fetch_one(&self.pool)
|
||||
.await
|
||||
.map_err(|e| Error::DatabaseError(e))
|
||||
}
|
||||
}
|
||||
|
||||
4
src/models/mod.rs
Normal file
4
src/models/mod.rs
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
pub(crate) mod error;
|
||||
pub(crate) mod url;
|
||||
pub(crate) mod provider;
|
||||
pub(crate) mod link;
|
||||
32
src/models/provider.rs
Normal file
32
src/models/provider.rs
Normal file
|
|
@ -0,0 +1,32 @@
|
|||
|
||||
pub(crate) trait AsyncCrudProvider {
|
||||
|
||||
type Uid;
|
||||
type Item;
|
||||
type Error;
|
||||
|
||||
async fn create(&self, new: &Self::Item) -> Result<Self::Uid, Self::Error>;
|
||||
|
||||
async fn read(&self, uid: &Self::Uid) -> Result<Self::Item, Self::Error>;
|
||||
|
||||
async fn update(&self, uid: &Self::Uid, new: &Self::Item) -> Result<Self::Item, Self::Error>;
|
||||
|
||||
async fn delete(&self, uid: &Self::Uid) -> Result<(), Self::Error>;
|
||||
}
|
||||
|
||||
pub(crate) trait AsyncListFetchProvider {
|
||||
|
||||
type Item;
|
||||
type Error;
|
||||
|
||||
// TODO: replace sql: &str with an &[SearchFilter] , and add SearchFilter and
|
||||
// SearchFilterBuilder
|
||||
|
||||
async fn fetch(&self, sql: &str, offset: Option<u64>, count: Option<u64>) -> Result<Vec<Self::Item>, Self::Error>;
|
||||
|
||||
async fn count(&self, sql: &str) -> Result<u64, Self::Error>;
|
||||
// TODO: maybe move Vec<> to type Container
|
||||
// and have it be parametrised w/ <Item>
|
||||
}
|
||||
|
||||
|
||||
21
src/models/url.rs
Normal file
21
src/models/url.rs
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::models::error::Error;
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
pub(crate) struct Url(url::Url);
|
||||
|
||||
impl TryFrom<String> for Url {
|
||||
type Error = Error;
|
||||
|
||||
fn try_from(val: String) -> Result<Self, Self::Error> {
|
||||
let d = url::Url::parse(&val);
|
||||
Ok(Self { 0: d.map_err(|_| Self::Error::Todo )? } )
|
||||
}
|
||||
}
|
||||
|
||||
impl Url {
|
||||
pub(crate) fn as_str(&self) -> &str {
|
||||
self.0.as_str()
|
||||
}
|
||||
}
|
||||
28
src/routes/index.rs
Normal file
28
src/routes/index.rs
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
use actix_web::{web, HttpResponse};
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::GlobalData;
|
||||
|
||||
pub(crate) fn build_router(cfg: &mut web::ServiceConfig) {
|
||||
cfg
|
||||
.service(
|
||||
web::resource("/")
|
||||
.route(web::get().to(get_main_page))
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
|
||||
pub async fn get_main_page(app: web::Data<Arc<GlobalData>>) -> HttpResponse {
|
||||
|
||||
let path = app.assets_path.join("templates/main.html");
|
||||
let file = std::fs::read_to_string(path).unwrap(); // TODO: Error handling
|
||||
let vars = serde_json::json!({"title": "Main page"}); // TODO
|
||||
let html = app.tpl.render_template(&file, &vars).unwrap(); // TODO: Error handling
|
||||
|
||||
HttpResponse::Ok()
|
||||
.content_type("text/html; charset=utf8")
|
||||
.body(html)
|
||||
}
|
||||
|
||||
|
||||
153
src/routes/links.rs
Normal file
153
src/routes/links.rs
Normal file
|
|
@ -0,0 +1,153 @@
|
|||
use serde::{Deserialize};
|
||||
use crate::App;
|
||||
use std::sync::Arc;
|
||||
use actix_web::{ Scope, Result, web, Responder, Resource, HttpResponse, HttpResponseBuilder };
|
||||
use uuid::Uuid;
|
||||
use chrono::{DateTime, Utc};
|
||||
|
||||
use crate::GlobalData;
|
||||
use crate::models::provider::{AsyncCrudProvider, AsyncListFetchProvider};
|
||||
use crate::models::link::{Link, LinksProvider};
|
||||
|
||||
pub(crate) fn build_router(cfg: &mut web::ServiceConfig) {
|
||||
cfg
|
||||
.service(
|
||||
web::resource("/links")
|
||||
.route(web::get().to(get_links_list))
|
||||
)
|
||||
.service(
|
||||
web::resource("/links/bulk")
|
||||
.route(web::get().to(get_bulk_insert_form))
|
||||
.route(web::post().to(do_bulk_insert))
|
||||
)
|
||||
.service(
|
||||
web::resource("/link/{uuid}")
|
||||
.route(web::get().to(read_link))
|
||||
.route(web::post().to(upsert_link))
|
||||
.route(web::delete().to(remove_link))
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
// --- crud ---
|
||||
|
||||
pub async fn read_link(uuid: web::Path<Uuid>, app: web::Data<Arc<GlobalData>>) -> String {
|
||||
LinksProvider::with_db(app.db.clone())
|
||||
.read(&uuid)
|
||||
.await
|
||||
.and_then(|r| Ok(format!("{:?}", r)))
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
pub async fn upsert_link(uuid: web::Path<Uuid>) -> Result<String> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
pub async fn remove_link(
|
||||
uuid: web::Path<Uuid>,
|
||||
app: web::Data<Arc<GlobalData>>
|
||||
) -> HttpResponse {
|
||||
let r = LinksProvider::with_db(app.db.clone())
|
||||
.delete(&uuid)
|
||||
.await;
|
||||
|
||||
match r {
|
||||
Ok(_) => HttpResponse::Ok().into(),
|
||||
Err(_) => HttpResponse::InternalServerError().into()
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_bulk_insert_form(
|
||||
app: web::Data<Arc<GlobalData>>
|
||||
) -> impl Responder {
|
||||
|
||||
let path = app.assets_path.join("templates/bulk_creation_form.html");
|
||||
let file = std::fs::read_to_string(path);
|
||||
|
||||
HttpResponse::Ok()
|
||||
.content_type("text/html")
|
||||
.body(file.unwrap_or_default())
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct BulkInsertData {
|
||||
created_at: String,
|
||||
hrefs : String
|
||||
}
|
||||
|
||||
pub async fn do_bulk_insert(
|
||||
app: web::Data<Arc<GlobalData>>,
|
||||
formdata: web::Form<BulkInsertData>
|
||||
) -> HttpResponse { // TODO: error handling // FIXME: REFACTOR THE F OUT OF IT
|
||||
|
||||
let datetime = DateTime::parse_from_str(&formdata.created_at, "%Y-%m-%dT%H:%M").ok();
|
||||
|
||||
let lp = LinksProvider::with_db(app.db.clone());
|
||||
let mut result = Vec::new();
|
||||
|
||||
for line in formdata.hrefs.lines() {
|
||||
|
||||
let mut it = line.split("|");
|
||||
let Some(href) = it.next() else { continue };
|
||||
let title = it.next();
|
||||
let desc = it.next();
|
||||
|
||||
// TODO: error-handling
|
||||
|
||||
if let Some(link) = Link::new(href, title, desc, datetime.as_ref()).ok() {
|
||||
if let Ok(uuid) = lp.create(&link).await {
|
||||
result.push(uuid);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO;
|
||||
HttpResponse::Created()
|
||||
.content_type("text/html")
|
||||
.body(format!("{:?}", result))
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
// --- List ---
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct Pagination {
|
||||
offset: Option<u64>,
|
||||
count: Option<u64>,
|
||||
query: Option<String>
|
||||
}
|
||||
|
||||
pub async fn get_links_list(
|
||||
query: web::Query<Pagination>,
|
||||
app: web::Data<Arc<GlobalData>>
|
||||
) -> HttpResponse {
|
||||
|
||||
let filters = match query {
|
||||
web::Query(Pagination { query: Some(ref q), .. }) => format!("%{}%", &q),
|
||||
_ => "%".to_string()
|
||||
};
|
||||
|
||||
// NOTE: since we don't utilise transactions here, count and data.len() may end up different
|
||||
// for the same query
|
||||
// (given the same offset, and count)
|
||||
|
||||
let lp = LinksProvider::with_db(app.db.clone());
|
||||
|
||||
let count = lp.count(&filters).await.unwrap_or(0);
|
||||
let data = if count != 0 { lp.fetch(&filters, query.offset, query.count).await.unwrap() } else { Vec::default() };
|
||||
|
||||
let vars = serde_json::json!({"links": data, "total": count}); // TODO: refactor
|
||||
|
||||
let path = app.assets_path.join("templates/linkslist.html");
|
||||
println!("{}", path.display());
|
||||
let template = std::fs::read_to_string(path).unwrap();
|
||||
|
||||
let html = app.tpl.render_template(&template, &vars).unwrap(); // TODO: Error handling
|
||||
|
||||
HttpResponse::Ok()
|
||||
.content_type("text/html; charset=utf8")
|
||||
.body(html)
|
||||
}
|
||||
|
||||
3
src/routes/mod.rs
Normal file
3
src/routes/mod.rs
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
pub(crate) mod links;
|
||||
pub(crate) mod index;
|
||||
|
||||
Loading…
Add table
Add a link
Reference in a new issue