From 62e8a872d3d0e9de1fc4caa4b57367b6b475d7bf Mon Sep 17 00:00:00 2001 From: "husni.zuhdi@accelbyte.net" Date: Mon, 2 Sep 2024 02:33:55 +0700 Subject: [PATCH 01/49] chore: bump version and move config to internal --- cmd/Cargo.toml | 4 ++-- cmd/src/main.rs | 6 ++---- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/cmd/Cargo.toml b/cmd/Cargo.toml index ac34576..017e399 100644 --- a/cmd/Cargo.toml +++ b/cmd/Cargo.toml @@ -1,10 +1,10 @@ [package] name = "cmd" -version = "0.1.3" +version = "0.2.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -internal = { path = "../internal", version = "0.1.3"} +internal = { path = "../internal", version = "0.2.0"} tokio = { version = "1.0", features = ["full"] } diff --git a/cmd/src/main.rs b/cmd/src/main.rs index 3696605..aefb300 100644 --- a/cmd/src/main.rs +++ b/cmd/src/main.rs @@ -1,8 +1,6 @@ -use internal::{self, config::Config, handler::handler}; +use internal::app::app; #[tokio::main] async fn main() -> std::io::Result<()> { - let config = Config::from_envar(); - handler(config).await; - Ok(()) + Ok(app().await) } From ff74146bc8437126df69fc52610e267d90c6e8e4 Mon Sep 17 00:00:00 2001 From: "husni.zuhdi@accelbyte.net" Date: Mon, 2 Sep 2024 02:35:15 +0700 Subject: [PATCH 02/49] chore: ignore sqlite files --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 7cfd474..f3e9c3f 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ **/target **/Cargo.lock **/.env +husni-portfolio.db** From 36eacfa497e688153926987689f69f2930c298f6 Mon Sep 17 00:00:00 2001 From: "husni.zuhdi@accelbyte.net" Date: Mon, 2 Sep 2024 02:35:41 +0700 Subject: [PATCH 03/49] feat: initial migrations for sqlite --- .../20240901103916_initial_migration.sql | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) create mode 100644 internal/migrations/20240901103916_initial_migration.sql diff --git a/internal/migrations/20240901103916_initial_migration.sql b/internal/migrations/20240901103916_initial_migration.sql new file mode 100644 index 0000000..69df56a --- /dev/null +++ b/internal/migrations/20240901103916_initial_migration.sql @@ -0,0 +1,17 @@ +-- Add migration script here +CREATE TABLE IF NOT EXISTS blogs ( + id INTEGER PRIMARY KEY NOT NULL, + name TEXT NOT NULL, + source TEXT NOT NULL, + filename TEXT NOT NULL, + body TEXT NOT NULL +); + +CREATE TABLE IF NOT EXISTS github_trees ( + id INTEGER PRIMARY KEY NOT NULL, + tree_path TEXT NOT NULL, + tree_mode TEXT NOT NULL, + tree_type TEXT NOT NULL, + sha TEXT NOT NULL, + url TEXT NOT NULL +); From fd756542a49d48354e7b8f71acd3e0cdf15c8c7c Mon Sep 17 00:00:00 2001 From: "husni.zuhdi@accelbyte.net" Date: Mon, 2 Sep 2024 02:36:58 +0700 Subject: [PATCH 04/49] feat: implement hexagonal architecture for husni-portfolio --- internal/src/port/blog/command.rs | 13 ++++++++ internal/src/port/blog/mod.rs | 2 ++ internal/src/port/blog/query.rs | 7 +++++ internal/src/port/mod.rs | 1 + internal/src/repo/blog.rs | 19 ++++++++++++ internal/src/repo/mod.rs | 1 + internal/src/usecase/blog.rs | 50 +++++++++++++++++++++++++++++++ internal/src/usecase/mod.rs | 1 + 8 files changed, 94 insertions(+) create mode 100644 internal/src/port/blog/command.rs create mode 100644 internal/src/port/blog/mod.rs create mode 100644 internal/src/port/blog/query.rs create mode 100644 internal/src/port/mod.rs create mode 100644 internal/src/repo/blog.rs create mode 100644 internal/src/repo/mod.rs create mode 100644 internal/src/usecase/blog.rs create mode 100644 internal/src/usecase/mod.rs diff --git a/internal/src/port/blog/command.rs b/internal/src/port/blog/command.rs new file mode 100644 index 0000000..328792a --- /dev/null +++ b/internal/src/port/blog/command.rs @@ -0,0 +1,13 @@ +use crate::model::blog::{Blog, BlogBody, BlogDeleted, BlogFilename, BlogId, BlogName, BlogSource}; + +pub trait BlogQueryCommand { + fn update( + &mut self, + id: BlogId, + name: Option, + filename: Option, + source: Option, + body: Option, + ) -> Blog; + fn delete(&mut self, id: BlogId) -> BlogDeleted; +} diff --git a/internal/src/port/blog/mod.rs b/internal/src/port/blog/mod.rs new file mode 100644 index 0000000..49a772c --- /dev/null +++ b/internal/src/port/blog/mod.rs @@ -0,0 +1,2 @@ +pub mod command; +pub mod query; diff --git a/internal/src/port/blog/query.rs b/internal/src/port/blog/query.rs new file mode 100644 index 0000000..c0b3e76 --- /dev/null +++ b/internal/src/port/blog/query.rs @@ -0,0 +1,7 @@ +use crate::model::blog::{Blog, BlogEndPage, BlogId, BlogStartPage}; + +pub trait BlogQueryPort { + fn find(&self, id: BlogId) -> Blog; + fn find_blogs(&self, start: BlogStartPage, end: BlogEndPage) -> Vec; + fn find_all(&self) -> Vec; +} diff --git a/internal/src/port/mod.rs b/internal/src/port/mod.rs new file mode 100644 index 0000000..21aa2c6 --- /dev/null +++ b/internal/src/port/mod.rs @@ -0,0 +1 @@ +pub mod blog; diff --git a/internal/src/repo/blog.rs b/internal/src/repo/blog.rs new file mode 100644 index 0000000..1e1bc3e --- /dev/null +++ b/internal/src/repo/blog.rs @@ -0,0 +1,19 @@ +use crate::model::blog::{ + Blog, BlogBody, BlogDeleted, BlogEndPage, BlogFilename, BlogId, BlogName, BlogSource, + BlogStartPage, +}; + +pub trait BlogRepo { + fn find(&self, id: BlogId) -> Blog; + fn find_blogs(&self, start: BlogStartPage, end: BlogEndPage) -> Vec; + fn find_all(&self) -> Vec; + fn update( + &mut self, + id: BlogId, + name: Option, + filename: Option, + source: Option, + body: Option, + ) -> Blog; + fn delete(&mut self, id: BlogId) -> BlogDeleted; +} diff --git a/internal/src/repo/mod.rs b/internal/src/repo/mod.rs new file mode 100644 index 0000000..21aa2c6 --- /dev/null +++ b/internal/src/repo/mod.rs @@ -0,0 +1 @@ +pub mod blog; diff --git a/internal/src/usecase/blog.rs b/internal/src/usecase/blog.rs new file mode 100644 index 0000000..0e4c054 --- /dev/null +++ b/internal/src/usecase/blog.rs @@ -0,0 +1,50 @@ +use crate::model::blog::{ + Blog, BlogBody, BlogDeleted, BlogEndPage, BlogFilename, BlogId, BlogName, BlogSource, + BlogStartPage, +}; +use crate::port::blog::{command::BlogQueryCommand, query::BlogQueryPort}; +use crate::repo::blog::BlogRepo; + +pub struct BlogUseCase { + pub blog_repo: Box, +} + +impl Clone for BlogUseCase { + fn clone(&self) -> Self { + self.clone() + } +} + +impl BlogQueryPort for BlogUseCase { + fn find(&self, id: BlogId) -> Blog { + self.blog_repo.find(id) + } + fn find_blogs(&self, start: BlogStartPage, end: BlogEndPage) -> Vec { + self.blog_repo.find_blogs(start, end) + } + fn find_all(&self) -> Vec { + self.blog_repo.find_all() + } +} + +impl BlogQueryCommand for BlogUseCase { + fn update( + &mut self, + id: BlogId, + name: Option, + filename: Option, + source: Option, + body: Option, + ) -> Blog { + self.blog_repo.update(id, name, filename, source, body) + } + fn delete(&mut self, id: BlogId) -> BlogDeleted { + self.blog_repo.delete(id) + } +} + +impl BlogUseCase { + pub fn new(blog_repo: Box) -> BlogUseCase { + BlogUseCase { blog_repo } + } +} diff --git a/internal/src/usecase/mod.rs b/internal/src/usecase/mod.rs new file mode 100644 index 0000000..21aa2c6 --- /dev/null +++ b/internal/src/usecase/mod.rs @@ -0,0 +1 @@ +pub mod blog; From 3c60d89286381bf8d695b2ee8ffca331260e9148 Mon Sep 17 00:00:00 2001 From: "husni.zuhdi@accelbyte.net" Date: Mon, 2 Sep 2024 02:37:48 +0700 Subject: [PATCH 05/49] feat: update data model and add memory as database --- internal/src/database/memory.rs | 172 +++++++++++++++++++ internal/src/database/mod.rs | 1 + internal/src/model/axum.rs | 10 ++ internal/src/model/blog.rs | 286 ++++++++++++++++++++++++++++++++ internal/src/model/data.rs | 222 ------------------------- internal/src/model/github.rs | 84 ++++++++++ internal/src/model/mod.rs | 5 +- internal/src/model/templates.rs | 14 +- internal/src/model/version.rs | 10 ++ 9 files changed, 574 insertions(+), 230 deletions(-) create mode 100644 internal/src/database/memory.rs create mode 100644 internal/src/database/mod.rs create mode 100644 internal/src/model/axum.rs create mode 100644 internal/src/model/blog.rs delete mode 100644 internal/src/model/data.rs create mode 100644 internal/src/model/github.rs create mode 100644 internal/src/model/version.rs diff --git a/internal/src/database/memory.rs b/internal/src/database/memory.rs new file mode 100644 index 0000000..8aa33b0 --- /dev/null +++ b/internal/src/database/memory.rs @@ -0,0 +1,172 @@ +use crate::api::github::get_gh_blogs; +use crate::model::blog::{ + Blog, BlogBody, BlogDeleted, BlogEndPage, BlogFilename, BlogId, BlogName, BlogSource, + BlogStartPage, +}; +use crate::repo::blog::BlogRepo; +use crate::utils::{capitalize, md_to_html}; +use log::{debug, info}; +use std::fs; + +pub struct MemoryBlogRepo { + pub blogs: Vec, +} + +impl BlogRepo for MemoryBlogRepo { + fn find(&self, id: BlogId) -> Blog { + let result = self + .blogs + .iter() + .filter(|blog| &blog.id == &id) + .next() + .unwrap(); + info!("Blog {} processed.", &result.id); + debug!("Blog HTML {}.", &result.body); + + result.clone() + } + fn find_blogs(&self, start: BlogStartPage, end: BlogEndPage) -> Vec { + let start_seq = start.0 as usize; + let end_seq = end.0 as usize; + let result = &self.blogs[start_seq..end_seq]; + result.to_vec() + } + fn find_all(&self) -> Vec { + let result = &self.blogs; + result.to_vec() + } + fn delete(&mut self, id: BlogId) -> BlogDeleted { + let index = self.blogs.iter().position(|blog| &blog.id == &id).unwrap(); + info!("Deleting Blog with Id {}", &index); + + self.blogs.remove(index); + info!("Deleted Blog with Id {}", &index); + BlogDeleted(true) + } + fn update( + &mut self, + id: BlogId, + name: Option, + filename: Option, + source: Option, + body: Option, + ) -> Blog { + let result: &mut Blog = self + .blogs + .iter_mut() + .filter(|blog| &blog.id == &id) + .next() + .unwrap(); + match name { + Some(val) => { + debug!("Update Blog {} name from {} to {}", &id, &result.name, &val); + result.update_name(val) + } + None => (), + } + match filename { + Some(val) => { + debug!( + "Update Blog {} filename from {} to {}", + &id, &result.filename, &val + ); + result.filename = val + } + None => (), + } + match source { + Some(val) => { + debug!( + "Update Blog {} source from {} to {}", + &id, &result.source, &val + ); + result.source = val + } + None => (), + } + match body { + Some(val) => { + debug!("Update Blog {} body from {} to {}", &id, &result.body, &val); + result.body = val + } + None => (), + } + result.clone() + } +} + +impl MemoryBlogRepo { + pub fn new() -> MemoryBlogRepo { + let dir = Some("./statics/blogs/".to_string()); + Self::from_dir(dir) + } + + /// Async function to get BlogsData from github + /// Borrowed `owner`, `repo`, and `branch` String + pub async fn from_github(owner: &String, repo: &String, branch: &String) -> Self { + let dir = Some("./statics/blogs/".to_string()); + let mut blog_data = Self::from_dir(dir).blogs; + let mut gh_blog_data = + get_gh_blogs(owner.to_string(), repo.to_string(), branch.to_string()) + .await + .expect("Failed to get github blog data"); + blog_data.append(&mut gh_blog_data); + Self { blogs: blog_data } + } + + /// Create MemoryBlogRepo from directory + pub fn from_dir(dir: Option) -> Self { + let directory = dir.clone().expect("Failed to get directory"); + let static_path = fs::read_dir(directory.as_str()).unwrap(); + + let blogs_paths: Vec = static_path + .filter_map(|blog_path| { + let path = blog_path.ok().expect("Failed to get blog path").path(); + if path.is_file() { + path.file_name() + .expect("Failed to get filename") + .to_str() + .map(|s| s.to_owned()) + } else { + None + } + }) + .collect(); + + let blogs: Vec = blogs_paths + .iter() + .map(|blog_path| { + let (id, name_init) = blog_path + .split_once("-") + .expect("Failed to split filename into id and name"); + let name_formated = name_init.replace("_", " "); + let (name_lower, _) = name_formated + .split_once(".") + .expect("Failed to remove file extension"); + let name = capitalize(name_lower); + let fullpath = format!("{}{}", directory, blog_path); + + info!("markdown loaded: {}", fullpath); + + let body = md_to_html(fullpath).expect("Failed to convert markdown to html"); + Blog { + id: BlogId(id.to_string()), + name: BlogName(name.to_string()), + source: BlogSource::FileSystem, + filename: BlogFilename(blog_path.to_owned()), + body: BlogBody(body), + } + }) + .collect(); + + debug!("Blogs: {:?}", blogs); + + Self { blogs } + } +} + +impl Default for MemoryBlogRepo { + fn default() -> Self { + MemoryBlogRepo::new() + } +} diff --git a/internal/src/database/mod.rs b/internal/src/database/mod.rs new file mode 100644 index 0000000..eb29191 --- /dev/null +++ b/internal/src/database/mod.rs @@ -0,0 +1 @@ +pub mod memory; diff --git a/internal/src/model/axum.rs b/internal/src/model/axum.rs new file mode 100644 index 0000000..1d3241e --- /dev/null +++ b/internal/src/model/axum.rs @@ -0,0 +1,10 @@ +use crate::config::Config; +use crate::usecase::blog::BlogUseCase; + +/// Axum state +/// Consist of Config and BlogUseCase +#[derive(Clone)] +pub struct AppState { + pub config: Config, + pub blog_usecase: BlogUseCase, +} diff --git a/internal/src/model/blog.rs b/internal/src/model/blog.rs new file mode 100644 index 0000000..17204c0 --- /dev/null +++ b/internal/src/model/blog.rs @@ -0,0 +1,286 @@ +use crate::api::github::get_gh_blogs; +use crate::utils::{capitalize, md_to_html}; +use log::{debug, info}; +use serde::{Deserialize, Serialize}; +use std::fmt::Display; +use std::fs; + +/// BlogId +/// Identifier of Blog +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct BlogId(pub String); + +impl BlogId { + pub fn as_str(&self) -> &str { + self.0.as_str() + } +} + +impl Display for BlogId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.0) + } +} + +/// BlogName +/// Name of the Blog +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct BlogName(pub String); + +impl BlogName { + pub fn as_str(&self) -> &str { + self.0.as_str() + } +} + +impl Display for BlogName { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.0) + } +} + +/// BlogFilename +/// Filename of the Blog +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct BlogFilename(pub String); + +impl BlogFilename { + pub fn as_str(&self) -> &str { + self.0.as_str() + } +} + +impl Display for BlogFilename { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.0) + } +} + +/// BlogBody +/// HTML body of the Blog +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct BlogBody(pub String); + +impl BlogBody { + pub fn as_str(&self) -> &str { + self.0.as_str() + } +} + +impl Display for BlogBody { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.0) + } +} + +/// BlogDeleted +/// Blog Deleted or not +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct BlogDeleted(pub bool); + +/// BlogType +/// Type of Blog source +/// Can be: +/// - FileSystem: Blog markdown come from filesystem +/// - Github: Blog markdown come from github repository +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub enum BlogSource { + FileSystem, + Github, +} + +impl Display for BlogSource { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match &self { + Self::FileSystem => { + write!(f, "FileSystem") + } + Self::Github => { + write!(f, "Github") + } + } + } +} + +/// Blog +/// Blog data with fields: +/// - id: Blog Identifier +/// - name: Blog name +/// - source: Blog source +/// - filename: Blog Filename or Source +/// - body: Blog HTML body +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct Blog { + pub id: BlogId, + pub name: BlogName, + pub source: BlogSource, + pub filename: BlogFilename, + pub body: BlogBody, +} + +impl Blog { + pub fn update_name(&mut self, new_name: BlogName) { + self.name = new_name + } + pub fn update_source(&mut self, new_source: BlogSource) { + self.source = new_source + } + pub fn update_filename(&mut self, new_filename: BlogFilename) { + self.filename = new_filename + } + pub fn update_body(&mut self, new_body: BlogBody) { + self.body = new_body + } +} + +/// BlogStartPage +/// Start page of Blog Pagination +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct BlogStartPage(pub i32); + +/// BlogEndPage +/// End page of Blog Pagination +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct BlogEndPage(pub i32); + +// /// Blogs +// /// Vector of Blog in range of start page and end page +// #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +// pub struct Blogs { +// pub blogs: Vec, +// pub blog_start_page: BlogStartPage, +// pub blog_end_page: BlogEndPage, +// } +// +// impl Default for Blogs { +// fn default() -> Self { +// let dir = Some("./statics/blogs/".to_string()); +// Self::from_dir(dir) +// } +// } +// +// impl Blogs { +// /// Async function to get BlogsData from github +// /// Borrowed `owner`, `repo`, and `branch` String +// pub async fn with_gh(owner: &String, repo: &String, branch: &String) -> Self { +// let dir = Some("./statics/blogs/".to_string()); +// let mut blog_data = Self::from_dir(dir).blogs; +// let mut gh_blog_data = +// get_gh_blogs(owner.to_string(), repo.to_string(), branch.to_string()) +// .await +// .expect("Failed to get github blog data"); +// blog_data.append(&mut gh_blog_data); +// Self { +// blogs: blog_data, +// blog_start_page: BlogStartPage(0), +// blog_end_page: BlogEndPage(10), +// } +// } +// +// pub fn from_dir(dir: Option) -> Self { +// let directory = dir.clone().expect("Failed to get directory"); +// let static_path = fs::read_dir(directory.as_str()).unwrap(); +// +// let blogs_paths: Vec = static_path +// .filter_map(|blog_path| { +// let path = blog_path.ok().expect("Failed to get blog path").path(); +// if path.is_file() { +// path.file_name() +// .expect("Failed to get filename") +// .to_str() +// .map(|s| s.to_owned()) +// } else { +// None +// } +// }) +// .collect(); +// +// let blogs: Vec = blogs_paths +// .iter() +// .map(|blog_path| { +// let (id, name_init) = blog_path +// .split_once("-") +// .expect("Failed to split filename into id and name"); +// let name_formated = name_init.replace("_", " "); +// let (name_lower, _) = name_formated +// .split_once(".") +// .expect("Failed to remove file extension"); +// let name = capitalize(name_lower); +// let fullpath = format!("{}{}", directory, blog_path); +// +// info!("markdown loaded: {}", fullpath); +// +// let body = md_to_html(fullpath).expect("Failed to convert markdown to html"); +// Blog { +// id: BlogId(id.to_string()), +// name: BlogName(name.to_string()), +// source: BlogSource::FileSystem, +// filename: BlogFilename(blog_path.to_owned()), +// body: BlogBody(body), +// } +// }) +// .collect(); +// +// debug!("Blogs: {:?}", blogs); +// +// Self { +// blogs, +// blog_start_page: BlogStartPage(0), +// blog_end_page: BlogEndPage(10), +// } +// } +// } +// +// #[cfg(test)] +// mod test { +// use super::*; +// use std::env::current_dir; +// use std::io::Write; +// use test_log::test; +// +// #[test] +// fn test_blogs_data_from_dir() { +// // Preparation +// let test_id = "999"; +// let test_name = "Test blog"; +// let test_body = "# Testing Blog for Unit Test"; +// let test_body_html = "

Testing Blog for Unit Test

"; +// let test_filename = "999-test_blog.md"; +// let test_path = "../statics/blogs/999-test_blog.md"; +// +// // Get current directory +// debug!( +// "Curent Directory: {}", +// current_dir().expect("Failed to get current dir").display() +// ); +// +// // Create a blog markdown +// let mut md_file = fs::File::create(test_path).expect("Failed to create File Write buffer"); +// md_file +// .write_all(test_body.as_bytes()) +// .expect("Failed to write buffer to"); +// +// // Call create_blogs function +// let dir = Some("../statics/blogs/".to_string()); +// let blogs = Blogs::from_dir(dir); +// +// // Check blogs data +// debug!("Check BlogsData: {:?}", blogs); +// +// let blog_test = blogs +// .blogs +// .iter() +// .filter(|blog| blog.id == BlogId(test_id.to_string())) +// .next() +// .expect("Failed to get test blog data"); +// +// // Compare if new blog markdown is available +// assert_eq!(blog_test.id, BlogId(test_id.to_string())); +// assert_eq!(blog_test.name, BlogName(test_name.to_string())); +// assert_eq!(blog_test.body, BlogBody(test_body_html.to_string())); +// assert_eq!(blog_test.filename, BlogFilename(test_filename.to_string())); +// +// // Delete test blog markdown +// fs::remove_file(test_path).expect("Failed to delete test blog markdown"); +// } +// } diff --git a/internal/src/model/data.rs b/internal/src/model/data.rs deleted file mode 100644 index 9c7b711..0000000 --- a/internal/src/model/data.rs +++ /dev/null @@ -1,222 +0,0 @@ -use crate::api::github::get_gh_blog_data; -use crate::config::Config; -use crate::utils::{capitalize, md_to_html}; -use log::{debug, info}; -use serde::{Deserialize, Serialize}; -use std::fs; - -#[derive(Debug, Clone, PartialEq)] -pub struct ProfileData; - -#[derive(Debug, Clone, PartialEq)] -pub struct BlogsData { - pub blogs: Vec, -} - -#[derive(Debug, Clone, PartialEq, Deserialize)] -pub enum BlogDataType { - FileSystem, - Github, -} - -#[derive(Debug, Clone, PartialEq, Deserialize)] -pub struct BlogData { - pub id: String, - pub name: String, - pub source: BlogDataType, - pub filename: String, - pub body: String, -} - -#[derive(Debug, Clone, PartialEq, Deserialize)] -pub struct VersionData { - pub version: String, - pub build_hash: String, - pub build_date: String, -} - -impl Default for BlogsData { - fn default() -> Self { - let dir = Some("./statics/blogs/".to_string()); - Self::from_dir(dir) - } -} - -impl BlogsData { - pub async fn with_gh(owner: &String, repo: &String, branch: &String) -> Self { - let dir = Some("./statics/blogs/".to_string()); - let mut blog_data = Self::from_dir(dir).blogs; - let mut gh_blog_data = - get_gh_blog_data(owner.to_string(), repo.to_string(), branch.to_string()) - .await - .expect("Failed to get github blog data"); - blog_data.append(&mut gh_blog_data); - Self { blogs: blog_data } - } - - pub fn from_dir(dir: Option) -> Self { - let directory = dir.clone().expect("Failed to get directory"); - let static_path = fs::read_dir(directory.as_str()).unwrap(); - - let blogs_paths: Vec = static_path - .filter_map(|blog_path| { - let path = blog_path.ok().expect("Failed to get blog path").path(); - if path.is_file() { - path.file_name() - .expect("Failed to get filename") - .to_str() - .map(|s| s.to_owned()) - } else { - None - } - }) - .collect(); - - let blogs: Vec = blogs_paths - .iter() - .map(|blog_path| { - let (id, name_init) = blog_path - .split_once("-") - .expect("Failed to split filename into id and name"); - let name_formated = name_init.replace("_", " "); - let (name_lower, _) = name_formated - .split_once(".") - .expect("Failed to remove file extension"); - let name = capitalize(name_lower); - let fullpath = format!("{}{}", directory, blog_path); - - info!("markdown loaded: {}", fullpath); - - let body = - md_to_html(Some(fullpath), None).expect("Failed to convert markdown to html"); - BlogData { - id: id.to_string(), - name: name.to_string(), - source: BlogDataType::FileSystem, - filename: blog_path.to_owned(), - body, - } - }) - .collect(); - - debug!("Blogs: {:?}", blogs); - - BlogsData { blogs } - } -} - -#[derive(Deserialize, Serialize, Debug, Clone)] -pub struct Trees { - pub sha: String, - pub url: String, - pub tree: Vec, -} - -/// The file mode one of -/// 100644 for file (blob) -/// 100755 for executable (blob) -/// 040000 for subdirectory (tree) -/// 160000 for submodule (commit) -/// 120000 for a blob that specifies the path of a symlink. -/// Reference: https://docs.github.com/en/rest/git/trees?apiVersion=2022-11-28 -#[derive(Deserialize, Serialize, Debug, Clone)] -pub enum TreeMode { - #[serde(rename(deserialize = "100644"))] - File, - #[serde(rename(deserialize = "100755"))] - Executable, - #[serde(rename(deserialize = "040000"))] - SubDir, - #[serde(rename(deserialize = "160000"))] - SubModeule, - #[serde(rename(deserialize = "120000"))] - Symlink, -} - -/// Either blob, tree, or commit. -/// Reference: https://docs.github.com/en/rest/git/trees?apiVersion=2022-11-28 -#[derive(Deserialize, Serialize, Debug, Clone)] -pub enum TreeType { - #[serde(rename(deserialize = "blob"))] - Blob, - #[serde(rename(deserialize = "tree"))] - Tree, - #[serde(rename(deserialize = "commit"))] - Commit, -} - -/// Tree structure of git -/// Reference: https://docs.github.com/en/rest/git/trees?apiVersion=2022-11-28 -#[derive(Deserialize, Serialize, Debug, Clone)] -pub struct Tree { - pub path: String, - #[serde(rename(deserialize = "mode"))] - pub tree_mode: TreeMode, - #[serde(rename(deserialize = "type"))] - pub tree_type: TreeType, - pub sha: String, - pub size: Option, - pub url: String, -} - -/// Axum state -/// Consist of Config and BlogsData -#[derive(Debug, Clone)] -pub struct AppState { - pub config: Config, - pub blogs_data: BlogsData, -} - -#[cfg(test)] -mod test { - use super::*; - use std::env::current_dir; - use std::io::Write; - use test_log::test; - - #[test] - fn test_blogs_data_from_dir() { - // Preparation - let test_id = "999"; - let test_name = "Test blog"; - let test_body = "# Testing Blog for Unit Test"; - let test_body_html = "

Testing Blog for Unit Test

"; - let test_filename = "999-test_blog.md"; - let test_path = "../statics/blogs/999-test_blog.md"; - - // Get current directory - debug!( - "Curent Directory: {}", - current_dir().expect("Failed to get current dir").display() - ); - - // Create a blog markdown - let mut md_file = fs::File::create(test_path).expect("Failed to create File Write buffer"); - md_file - .write_all(test_body.as_bytes()) - .expect("Failed to write buffer to"); - - // Call create_blogs function - let dir = Some("../statics/blogs/".to_string()); - let blogs = BlogsData::from_dir(dir); - - // Check blogs data - debug!("Check BlogsData: {:?}", blogs); - - let blog_test = blogs - .blogs - .iter() - .filter(|blog| blog.id == test_id) - .next() - .expect("Failed to get test blog data"); - - // Compare if new blog markdown is available - assert_eq!(blog_test.id.as_str(), test_id); - assert_eq!(blog_test.name.as_str(), test_name); - assert_eq!(blog_test.body.as_str(), test_body_html); - assert_eq!(blog_test.filename.as_str(), test_filename); - - // Delete test blog markdown - fs::remove_file(test_path).expect("Failed to delete test blog markdown"); - } -} diff --git a/internal/src/model/github.rs b/internal/src/model/github.rs new file mode 100644 index 0000000..2a6b314 --- /dev/null +++ b/internal/src/model/github.rs @@ -0,0 +1,84 @@ +use serde::{Deserialize, Serialize}; +use std::fmt::Display; + +/// Github Owner Name +#[derive(Deserialize, Serialize, Debug, Clone)] +pub struct GithubOwner(String); + +impl Display for GithubOwner { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.0) + } +} + +/// Github Owner Repository +#[derive(Deserialize, Serialize, Debug, Clone)] +pub struct GithubRepository(String); + +impl Display for GithubRepository { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.0) + } +} + +/// Github Owner Branch +#[derive(Deserialize, Serialize, Debug, Clone)] +pub struct GithubBranch(String); + +impl Display for GithubBranch { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.0) + } +} + +/// Collection of Tree of github blog data +#[derive(Deserialize, Serialize, Debug, Clone)] +pub struct GithubTrees { + pub trees: Vec, +} + +/// The file mode one of +/// 100644 for file (blob) +/// 100755 for executable (blob) +/// 040000 for subdirectory (tree) +/// 160000 for submodule (commit) +/// 120000 for a blob that specifies the path of a symlink. +/// Reference: https://docs.github.com/en/rest/git/trees?apiVersion=2022-11-28 +#[derive(Deserialize, Serialize, Debug, Clone)] +pub enum GithubTreeMode { + #[serde(rename(deserialize = "100644"))] + File, + #[serde(rename(deserialize = "100755"))] + Executable, + #[serde(rename(deserialize = "040000"))] + SubDir, + #[serde(rename(deserialize = "160000"))] + SubModeule, + #[serde(rename(deserialize = "120000"))] + Symlink, +} + +/// Either blob, tree, or commit. +/// Reference: https://docs.github.com/en/rest/git/trees?apiVersion=2022-11-28 +#[derive(Deserialize, Serialize, Debug, Clone)] +pub enum GithubTreeType { + #[serde(rename(deserialize = "blob"))] + Blob, + #[serde(rename(deserialize = "tree"))] + Tree, + #[serde(rename(deserialize = "commit"))] + Commit, +} + +/// Tree structure of git +/// Reference: https://docs.github.com/en/rest/git/trees?apiVersion=2022-11-28 +#[derive(Deserialize, Serialize, Debug, Clone)] +pub struct GithubTree { + pub path: String, + #[serde(rename(deserialize = "mode"))] + pub tree_mode: GithubTreeMode, + #[serde(rename(deserialize = "type"))] + pub tree_type: GithubTreeType, + pub sha: String, + pub url: String, +} diff --git a/internal/src/model/mod.rs b/internal/src/model/mod.rs index 5003124..3d45e46 100644 --- a/internal/src/model/mod.rs +++ b/internal/src/model/mod.rs @@ -1,2 +1,5 @@ -pub mod data; +pub mod axum; +pub mod blog; +pub mod github; pub mod templates; +pub mod version; diff --git a/internal/src/model/templates.rs b/internal/src/model/templates.rs index e44d65a..d23c501 100644 --- a/internal/src/model/templates.rs +++ b/internal/src/model/templates.rs @@ -2,17 +2,17 @@ use askama::Template; #[derive(Template, Debug)] #[template(path = "profile.html")] -pub struct Profile; +pub struct ProfileTemplate; #[derive(Template, Debug)] #[template(path = "blogs.html")] -pub struct Blogs<'a> { - pub blogs: &'a Vec>, +pub struct BlogsTemplate<'a> { + pub blogs: &'a Vec>, } #[derive(Template, Debug)] #[template(path = "blog.html")] -pub struct Blog<'a> { +pub struct BlogTemplate<'a> { pub id: &'a str, pub name: &'a str, pub filename: &'a str, @@ -21,7 +21,7 @@ pub struct Blog<'a> { #[derive(Template, Debug)] #[template(path = "version.html")] -pub struct Version<'a> { +pub struct VersionTemplate<'a> { pub version: &'a str, pub environment: &'a str, pub build_hash: &'a str, @@ -30,8 +30,8 @@ pub struct Version<'a> { #[derive(Template, Debug)] #[template(path = "404_not_found.html")] -pub struct NotFound; +pub struct NotFoundTemplate; #[derive(Template, Debug)] #[template(path = "500_internal_server_error.html")] -pub struct InternalServerError; +pub struct InternalServerErrorTemplate; diff --git a/internal/src/model/version.rs b/internal/src/model/version.rs new file mode 100644 index 0000000..27b3cf2 --- /dev/null +++ b/internal/src/model/version.rs @@ -0,0 +1,10 @@ +use serde::{Deserialize, Serialize}; + +/// Version +/// Store version, build hash, and buld date +#[derive(Deserialize, Serialize, Debug, Clone)] +pub struct Version { + pub version: String, + pub build_hash: String, + pub build_date: String, +} From ecf039f57e86fd90d0fcd370395f412c772a050d Mon Sep 17 00:00:00 2001 From: "husni.zuhdi@accelbyte.net" Date: Mon, 2 Sep 2024 02:38:50 +0700 Subject: [PATCH 06/49] feat: update router, handler, github api, and utils --- internal/Cargo.toml | 5 +- internal/src/api/github.rs | 314 +++++++++++++++++++++++++------------ internal/src/app.rs | 58 +++++++ internal/src/handler.rs | 181 ++++++++++++++++----- internal/src/lib.rs | 6 +- internal/src/router.rs | 157 ------------------- internal/src/utils.rs | 73 ++------- 7 files changed, 436 insertions(+), 358 deletions(-) create mode 100644 internal/src/app.rs delete mode 100644 internal/src/router.rs diff --git a/internal/Cargo.toml b/internal/Cargo.toml index 195a4a5..0c7f2e4 100644 --- a/internal/Cargo.toml +++ b/internal/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "internal" -version = "0.1.3" +version = "0.2.0" edition = "2021" build = "build.rs" @@ -23,6 +23,9 @@ test-log = "0.2.16" octocrab = "0.39.0" http-body-util = "0.1.2" regex = "1.10.6" +sqlx = { version = "=0.8.1", features = ["sqlite", "runtime-tokio"] } +async-trait = "0.1.81" +# rusqlite = "=0.32.1" [build-dependencies] anyhow = "1.0.86" diff --git a/internal/src/api/github.rs b/internal/src/api/github.rs index 3d21617..0098670 100644 --- a/internal/src/api/github.rs +++ b/internal/src/api/github.rs @@ -1,28 +1,84 @@ -use crate::model::data::{BlogData, BlogDataType, Trees}; -use crate::utils::{capitalize, md_to_html, replace_gh_link}; +use crate::model::blog::{Blog, BlogBody, BlogFilename, BlogId, BlogName, BlogSource}; +use crate::model::github::{GithubTree, GithubTrees}; +use crate::utils::capitalize; use http_body_util::BodyExt; use log::{debug, error, info}; +use markdown::{to_html_with_options, Options}; use octocrab; +use regex::Regex; use serde_json; use std::num::IntErrorKind; -/// get_gh_blog_data() +// pub struct MemoryGithubRepo {} +// +// impl MemoryGithubRepo { +// pub fn new() -> MemoryGithubRepo { +// MemoryGithubRepo {} +// } +// } +// +// impl Default for MemoryGithubRepo { +// fn default() -> Self { +// MemoryGithubRepo::new() +// } +// } + +// #[async_trait] +// impl GithubRepo for MemoryGithubRepo { +/// find all() /// An async function that /// take String of repository owner /// and String of repository repo /// and String of repository branch -/// Return an Option of Vector of BlogData +/// Return an Option of GithubTrees /// /// Example: /// let owner = "husni-zuhdi".to_string(); /// let repo = "husni-blog-resources".to_string(); /// let branch = "main".to_string(); -/// let gh_blog_data = get_gh_blog_list(owner, repo, branch).await?; -pub async fn get_gh_blog_data( - owner: String, - repo: String, - branch: String, -) -> Option> { +/// let gh_trees = MemoryGithubRepo::new().find_all(owner, repo, branch).await?; +// async fn find_all( +// &self, +// owner: GithubOwner, +// repo: GithubRepository, +// branch: GithubBranch, +// ) -> Option { +// let tree_endpoint = format!( +// "https://api.github.com/repos/{}/{}/git/trees/{}", +// &owner, &repo, &branch +// ); +// let gh_trees = octocrab::instance()._get(tree_endpoint).await; +// +// let trees_result = match gh_trees { +// Ok(val) => { +// let body_bytes = val.into_body().collect().await.unwrap().to_bytes(); +// let body_json = String::from_utf8(body_bytes.to_vec()).unwrap(); +// let result: GithubTrees = serde_json::from_str(&body_json).unwrap(); +// Some(result) +// } +// Err(err) => { +// error!("Failed to parse Github Trees result: {}", err); +// None +// } +// }; +// +// trees_result +// } +// } + +/// get_gh_blogs() +/// An async function that +/// take String of repository owner +/// and String of repository repo +/// and String of repository branch +/// Return an Option of GithubTrees +/// +/// Example: +/// let owner = "husni-zuhdi".to_string(); +/// let repo = "husni-blog-resources".to_string(); +/// let branch = "main".to_string(); +/// let gh_trees = get_gh_blogs(owner, repo, branch).await?; +pub async fn get_gh_blogs(owner: String, repo: String, branch: String) -> Option> { let tree_endpoint = format!( "https://api.github.com/repos/{}/{}/git/trees/{}", &owner, &repo, &branch @@ -33,7 +89,7 @@ pub async fn get_gh_blog_data( Ok(val) => { let body_bytes = val.into_body().collect().await.unwrap().to_bytes(); let body_json = String::from_utf8(body_bytes.to_vec()).unwrap(); - let result: Trees = serde_json::from_str(&body_json).unwrap(); + let result: GithubTrees = serde_json::from_str(&body_json).unwrap(); Some(result) } Err(err) => { @@ -42,87 +98,17 @@ pub async fn get_gh_blog_data( } }; - let mut blog_trees: Vec = Vec::new(); + let mut blog_trees: Vec = Vec::new(); match trees_result { Some(val) => { - for tree in val.tree { - let blog_path = tree.path; - - // Check to make sure the path doesn't have a extention - if !blog_path.contains(".") { - // Get blog id with specification of 3 digit integer - let blog_id = blog_path.get(0..3).unwrap(); - let blog_name = blog_path.get(4..).unwrap(); - - match blog_id.parse::() { - Ok(_) => { - if &blog_id != &"000" { - info!("Blog Name: {}", &blog_name); - let blog_readme_path = format!("{}/README.md", &blog_path); - let blog_content = octocrab::instance() - .repos(&owner, &repo) - .get_content() - .path(&blog_readme_path) - .r#ref(&branch) - .send() - .await; - match blog_content { - Ok(mut res) => { - let content = res.take_items(); - let decoded_content = - &content[0].decoded_content().unwrap().clone(); - - let name_formated = blog_name.replace("-", " "); - let name = capitalize(&name_formated); - info!("Markdown of {} loaded", &blog_name); - - let raw_body = - md_to_html(None, Some(decoded_content.to_string())) - .expect("Failed to convert markdown to html"); - debug!("HTML Body of {}: {}", &blog_name, &raw_body); - - let gh_blog_link = format!( - "https://github.com/{}/{}/tree/{}/{}", - &owner, &repo, &branch, &blog_path - ); - let gh_raw_blog_link = format!( - "https://raw.githubusercontent.com/{}/{}/{}/{}", - &owner, &repo, &branch, &blog_path - ); - let body = replace_gh_link( - raw_body, - gh_blog_link, - gh_raw_blog_link, - ); - - blog_trees.push(BlogData { - id: format!("{}-g", blog_id).to_string(), - name, - source: BlogDataType::Github, - filename: format!( - "https://api.github.com/repos/{}/{}/contents/{}", - &owner, &repo, &blog_readme_path - ) - .to_string(), - body, - }) - } - Err(err) => { - error!( - "Failed to get Blog content with Blog ID {} and Name {}: {}", - &blog_id, &blog_name, err - ) - } - } - } - } - Err(err) => { - if err.kind() == &IntErrorKind::InvalidDigit { - continue; - } - println!("Failed to parse Blog ID: {}", err); - } - }; + for tree in val.trees { + let blog_res = + get_gh_blog(tree.clone(), owner.clone(), repo.clone(), branch.clone()).await; + match blog_res { + Some(val) => blog_trees.push(val), + None => { + debug!("Skipped tree {:?}", &tree) + } } } } @@ -133,13 +119,141 @@ pub async fn get_gh_blog_data( Some(blog_trees) } -// Test n -// Nge get semua markdown yang ada di repo -// pub async fn get_github_blogs() -> Vec { -// let repo = octocrab::instance() -// .repos("husni-zuhdi", "husni-blog-resources") -// .get() -// .await -// .expect("Failed to fetch blog resources repo"); -// repo.contents_url.expect("Failed to get contents url") -// } +async fn get_gh_blog( + tree: GithubTree, + owner: String, + repo: String, + branch: String, +) -> Option { + let blog_path = tree.path; + let gh_blog_link = format!( + "https://github.com/{}/{}/tree/{}/{}", + &owner, &repo, &branch, &blog_path + ); + let gh_raw_blog_link = format!( + "https://raw.githubusercontent.com/{}/{}/{}/{}", + &owner, &repo, &branch, &blog_path + ); + + // Check to make sure the path doesn't have a extention + if !blog_path.contains(".") { + // Get blog id with specification of 3 digit integer + let blog_id = blog_path.get(0..3).unwrap(); + let blog_name = blog_path.get(4..).unwrap(); + + match blog_id.parse::() { + Ok(_) => { + if &blog_id != &"000" { + info!("Blog Name: {}", &blog_name); + let blog_readme_path = format!("{}/README.md", &blog_path); + let blog_content = octocrab::instance() + .repos(&owner, &repo) + .get_content() + .path(&blog_readme_path) + .r#ref(&branch) + .send() + .await; + match blog_content { + Ok(mut res) => { + let content = res.take_items(); + let decoded_content = &content[0].decoded_content().unwrap().clone(); + + let name_formated = blog_name.replace("-", " "); + let name = capitalize(&name_formated); + info!("Markdown of {} loaded", &blog_name); + + let body = process_gh_markdown( + decoded_content.to_string(), + gh_blog_link, + gh_raw_blog_link, + ); + debug!("HTML Body of {}: {}", &blog_name, &body); + + let id = format!("{}-g", blog_id).to_string(); + let filename = format!( + "https://api.github.com/repos/{}/{}/contents/{}", + &owner, &repo, &blog_readme_path + ) + .to_string(); + + Some(Blog { + id: BlogId(id), + name: BlogName(name), + source: BlogSource::Github, + filename: BlogFilename(filename), + body: BlogBody(body), + }) + } + Err(err) => { + error!( + "Failed to get Blog content with Blog ID {} and Name {}: {}", + &blog_id, &blog_name, err + ); + None + } + } + } else { + debug!("Folder prefix is 000. Skip this folder"); + None + } + } + Err(err) => { + if err.kind() == &IntErrorKind::InvalidDigit { + debug!("Error Kind {:?}. Safe to ignore.", err.kind()); + } + error!("Failed to parse Blog ID: {}", err); + None + } + } + } else { + info!("This is not a folder. Skip this tree"); + None + } +} + +fn process_gh_markdown(markdown: String, gh_blog_link: String, gh_raw_blog_link: String) -> String { + let raw_body = to_html_with_options(&markdown, &Options::gfm()) + .expect("Failed to convert html with options"); + let body = replace_gh_link(raw_body, gh_blog_link, gh_raw_blog_link); + body +} + +/// replace_gh_link +/// Replace Github Blog relative links +/// with full github content links +/// Take String of markdown body +/// and String of github blog endpoint +/// then return String of updated body +fn replace_gh_link(body: String, gh_blog_link: String, gh_raw_blog_link: String) -> String { + // Regex href=.\.\/ mean + // find string with character 'href=' + // then followed by any character (I tried to use '"' but didn't work) + // then followed by '.' (must use escape character) + // then followed by '/' (must use escape character) + let re_href = Regex::new(r"href=.\.\/").expect("Failed to build regex href"); + + let replaced_str_href = format!("href=\"{}/", gh_blog_link); + debug!("Replaced str: {}", &replaced_str_href); + + let res_href = re_href + .replace_all(body.as_str(), replaced_str_href.as_str()) + .to_string(); + debug!("Replaced Body: {}", &res_href); + + // Regex src=.\.\/ mean + // find string with character 'src=' + // then followed by any character (I tried to use '"' but didn't work) + // then followed by '.' (must use escape character) + // then followed by '/' (must use escape character) + let re_src = Regex::new(r"src=.\.\/").expect("Failed to build regex src"); + + let replaced_str_src = format!("src=\"{}/", gh_raw_blog_link); + debug!("Replaced str: {}", &replaced_str_src); + + let res = re_src + .replace_all(res_href.as_str(), replaced_str_src.as_str()) + .to_string(); + debug!("Replaced Body: {}", &res); + + res +} diff --git a/internal/src/app.rs b/internal/src/app.rs new file mode 100644 index 0000000..298db3c --- /dev/null +++ b/internal/src/app.rs @@ -0,0 +1,58 @@ +use crate::database::memory::MemoryBlogRepo; +use crate::handler; +use crate::model::axum::AppState; +use crate::{config::Config, usecase::blog::BlogUseCase}; +use axum::{ + routing::{get, get_service}, + Router, +}; +use log::info; +use tower_http::services::{ServeDir, ServeFile}; + +pub async fn app() -> () { + // Setup Config + let config = Config::from_envar(); + let endpoint = format!("{}:{}", &config.svc_endpoint, &config.svc_port); + + // Initialize Logger + env_logger::init_from_env(env_logger::Env::new().default_filter_or(config.log_level.clone())); + + // Init app state + let app_state = state_factory(config).await; + + info!("Starting HTTP Server at http://{}", endpoint); + + // Axum Application + let app = Router::new() + .route("/", get(handler::get_profile)) + .route("/not-found", get(handler::get_404_not_found)) + .route("/version", get(handler::get_version)) + .route("/blogs", get(handler::get_blogs)) + .route("/blogs/:blog_id", get(handler::get_blog)) + .nest_service("/statics", get_service(ServeDir::new("./statics/favicon/"))) + .nest_service( + "/statics/styles.css", + get_service(ServeFile::new("./statics/styles.css")), + ) + .with_state(app_state) + .fallback(get(handler::get_404_not_found)); + + // Start Axum Application + let listener = tokio::net::TcpListener::bind(endpoint).await.unwrap(); + axum::serve(listener, app).await.unwrap(); +} + +async fn state_factory(config: Config) -> AppState { + // Setup config and blogs_data states + let mut blog_repo = MemoryBlogRepo::new(); + if !config.gh_owner.is_empty() && !config.gh_repo.is_empty() && !config.gh_branch.is_empty() { + blog_repo = + MemoryBlogRepo::from_github(&config.gh_owner, &config.gh_repo, &config.gh_branch).await; + } + let blog_usecase = BlogUseCase::new(Box::new(blog_repo)); + let app_state = AppState { + config, + blog_usecase, + }; + app_state +} diff --git a/internal/src/handler.rs b/internal/src/handler.rs index 40701a2..79e4c24 100644 --- a/internal/src/handler.rs +++ b/internal/src/handler.rs @@ -1,44 +1,139 @@ -use crate::config::Config; -use crate::model::data::{AppState, BlogsData}; -use crate::router::*; -use axum::{ - routing::{get, get_service}, - Router, -}; -use log::info; -use tower_http::services::{ServeDir, ServeFile}; - -pub async fn handler(cfg: Config) -> () { - // Initialize Logger - env_logger::init_from_env(env_logger::Env::new().default_filter_or(cfg.log_level.clone())); - - // Setup config and blogs_data states - let config = cfg.clone(); - let mut blogs_data = BlogsData::default(); - if !config.gh_owner.is_empty() && !config.gh_repo.is_empty() && !config.gh_branch.is_empty() { - blogs_data = BlogsData::with_gh(&config.gh_owner, &config.gh_repo, &config.gh_branch).await; - } - let app_state = AppState { config, blogs_data }; - - let endpoint = format!("{}:{}", cfg.svc_endpoint, cfg.svc_port); - info!("Starting HTTP Server at http://{}", endpoint); - - // Axum Application - let app = Router::new() - .route("/", get(get_profile)) - .route("/not-found", get(get_404_not_found)) - .route("/version", get(get_version)) - .route("/blogs", get(get_blogs)) - .route("/blogs/:blog_id", get(get_blog)) - .nest_service("/statics", get_service(ServeDir::new("./statics/favicon/"))) - .nest_service( - "/statics/styles.css", - get_service(ServeFile::new("./statics/styles.css")), - ) - .with_state(app_state) - .fallback(get(get_404_not_found)); - - // Start Axum Application - let listener = tokio::net::TcpListener::bind(endpoint).await.unwrap(); - axum::serve(listener, app).await.unwrap(); +use crate::model::blog::BlogId; +use crate::model::{axum::AppState, templates::*}; +use crate::utils::read_version_manifest; +use askama::Template; +use axum::extract::{Path, State}; +use axum::response::Html; +use log::{debug, error, info, warn}; + +/// Note: In axum [example](https://docs.rs/axum/latest/axum/response/index.html#building-responses) +/// They show an example to return Html<&'static str> +/// Instaed of Html. But using static give me a headache :") + +/// get_profile +/// Serve Profile/Biography HTML file +pub async fn get_profile() -> Html { + let profile = ProfileTemplate.render(); + match profile { + Ok(res) => { + info!("Profile askama template rendered."); + Html(res) + } + Err(err) => { + error!("Failed to render profile.html. {}", err); + get_500_internal_server_error() + } + } +} + +/// get_blogs +/// Serve get_blogs HTML file +/// List our blogs title and id +pub async fn get_blogs(State(app_state): State) -> Html { + // Copy data to Template struct + let blogs_data = app_state.blog_usecase.blog_repo.find_all(); + let blogs: Vec = blogs_data + .iter() + .map(|blog| BlogTemplate { + id: &blog.id.as_str(), + name: &blog.name.as_str(), + filename: &blog.filename.as_str(), + body: &blog.body.as_str(), + }) + .collect(); + debug!("Blogs: {:?}", &blogs); + + let blogs_res = BlogsTemplate { blogs: &blogs }.render(); + match blogs_res { + Ok(res) => { + info!("Blogs askama template rendered."); + Html(res) + } + Err(err) => { + error!("Failed to render get_blogs.html. {}", err); + get_500_internal_server_error() + } + } +} + +/// get_blog +/// Serve get_blog HTML file +/// Render our blog +pub async fn get_blog(Path(path): Path, State(app_state): State) -> Html { + let state = app_state.blog_usecase.blog_repo.find(BlogId(path.clone())); + + let blog = BlogTemplate { + id: path.clone().as_str(), + name: &state.name.as_str(), + filename: &state.filename.as_str(), + body: &state.body.as_str(), + } + .render(); + + match blog { + Ok(res) => { + info!("Blog ID {} askama template rendered.", &path); + Html(res) + } + Err(err) => { + error!("Failed to render blog.html. {}", err); + get_500_internal_server_error() + } + } +} + +/// get_version +/// Serve get_version HTML file +pub async fn get_version(State(app_state): State) -> Html { + let version_json = read_version_manifest().expect("Failed to get version manifest"); + let version = VersionTemplate { + version: version_json.version.as_str(), + environment: app_state.config.environment.as_str(), + build_hash: version_json.build_hash.as_str(), + build_date: version_json.build_date.as_str(), + } + .render(); + + match version { + Ok(res) => { + info!("Version askama template rendered."); + Html(res) + } + Err(err) => { + error!("Failed to render version.html. {}", err); + get_500_internal_server_error() + } + } +} + +/// get_404_not_found +/// Serve 404 Not found HTML file +pub async fn get_404_not_found() -> Html { + let not_found = NotFoundTemplate.render(); + match not_found { + Ok(res) => { + info!("NotFound askama template rendered."); + Html(res) + } + Err(err) => { + error!("Failed to render 404_not_found.html. {}", err); + get_500_internal_server_error() + } + } +} + +/// get_500_internal_server_error +/// Serve 500 Internal Server Error HTML file +fn get_500_internal_server_error() -> Html { + let internal_server_error = InternalServerErrorTemplate.render(); + match internal_server_error { + Ok(res) => { + info!("InternalServerError askama template rendered."); + Html(res) + } + Err(err) => { + error!("Failed to render 500_internal_server_error.html. {}", err); + Html("We're fucked up.".to_string()) + } + } } diff --git a/internal/src/lib.rs b/internal/src/lib.rs index 8521a1b..5f1566a 100644 --- a/internal/src/lib.rs +++ b/internal/src/lib.rs @@ -1,6 +1,10 @@ pub mod api; +pub mod app; pub mod config; +pub mod database; pub mod handler; pub mod model; -pub mod router; +pub mod port; +pub mod repo; +pub mod usecase; pub mod utils; diff --git a/internal/src/router.rs b/internal/src/router.rs deleted file mode 100644 index 9bb16de..0000000 --- a/internal/src/router.rs +++ /dev/null @@ -1,157 +0,0 @@ -use crate::model::{data::*, templates::*}; -use crate::utils::read_version_manifest; -use askama::Template; -use axum::extract::{Path, State}; -use axum::response::Html; -use log::{debug, error, info, warn}; - -/// Note: In axum [example](https://docs.rs/axum/latest/axum/response/index.html#building-responses) -/// They show an example to return Html<&'static str> -/// Instaed of Html. But using static give me a headache :") - -/// get_profile -/// Serve Profile/Biography HTML file -pub async fn get_profile() -> Html { - let profile = Profile.render(); - match profile { - Ok(res) => { - info!("Profile askama template rendered."); - Html(res) - } - Err(err) => { - error!("Failed to render profile.html. {}", err); - get_500_internal_server_error() - } - } -} - -/// get_blogs -/// Serve get_blogs HTML file -/// List our blogs title and id -pub async fn get_blogs(State(app_state): State) -> Html { - // Copy data to Template struct - let blogs: Vec = app_state - .blogs_data - .blogs - .iter() - .map(|blog| Blog { - id: &blog.id, - name: &blog.name, - filename: &blog.filename, - body: &blog.body, - }) - .collect(); - debug!("Blogs: {:?}", &blogs); - - let blogs_res = Blogs { blogs: &blogs }.render(); - match blogs_res { - Ok(res) => { - info!("Blogs askama template rendered."); - Html(res) - } - Err(err) => { - error!("Failed to render get_blogs.html. {}", err); - get_500_internal_server_error() - } - } -} - -/// get_blog -/// Serve get_blog HTML file -/// Render our blog -pub async fn get_blog(Path(path): Path, State(app_state): State) -> Html { - let state = app_state - .blogs_data - .blogs - .iter() - .filter(|blog| &blog.id == &path) - .next(); - debug!("BlogData: {:?}", &state); - - match state { - Some(_) => {} - None => { - warn!( - "Failed to get blog with ID {}. Retunre 404 Not Found", - &path - ); - return get_404_not_found().await; - } - } - - let blog_data = state.unwrap(); - let blog = Blog { - id: path.clone().as_str(), - name: &blog_data.name, - filename: &blog_data.filename, - body: &blog_data.body, - } - .render(); - - match blog { - Ok(res) => { - info!("Blog ID {} askama template rendered.", &path); - Html(res) - } - Err(err) => { - error!("Failed to render blog.html. {}", err); - get_500_internal_server_error() - } - } -} - -/// get_version -/// Serve get_version HTML file -pub async fn get_version(State(app_state): State) -> Html { - let version_json = read_version_manifest().expect("Failed to get version manifest"); - let version = Version { - version: version_json.version.as_str(), - environment: app_state.config.environment.as_str(), - build_hash: version_json.build_hash.as_str(), - build_date: version_json.build_date.as_str(), - } - .render(); - - match version { - Ok(res) => { - info!("Version askama template rendered."); - Html(res) - } - Err(err) => { - error!("Failed to render version.html. {}", err); - get_500_internal_server_error() - } - } -} - -/// get_404_not_found -/// Serve 404 Not found HTML file -pub async fn get_404_not_found() -> Html { - let not_found = NotFound.render(); - match not_found { - Ok(res) => { - info!("NotFound askama template rendered."); - Html(res) - } - Err(err) => { - error!("Failed to render 404_not_found.html. {}", err); - get_500_internal_server_error() - } - } -} - -/// get_500_internal_server_error -/// Serve 500 Internal Server Error HTML file -fn get_500_internal_server_error() -> Html { - let internal_server_error = InternalServerError.render(); - match internal_server_error { - Ok(res) => { - info!("InternalServerError askama template rendered."); - Html(res) - } - Err(err) => { - error!("Failed to render 500_internal_server_error.html. {}", err); - Html("We're fucked up.".to_string()) - } - } -} diff --git a/internal/src/utils.rs b/internal/src/utils.rs index aa91abd..0c6c25c 100644 --- a/internal/src/utils.rs +++ b/internal/src/utils.rs @@ -1,26 +1,13 @@ -use crate::model::data::VersionData; -use log::debug; +use crate::model::version::Version; use markdown::{to_html_with_options, Options}; -use regex::Regex; use std::fs; use std::io::BufReader; /// md_to_html: Markdown to HTML /// take String of filename /// return String of converted markdown in html or String of error -pub fn md_to_html(filename: Option, body: Option) -> Result { - let mut body_md = String::new(); - match filename { - Some(val) => { - body_md = fs::read_to_string(val).expect("Failed to read markdown blog file"); - } - None => (), - } - - match body { - Some(val) => body_md = val, - None => (), - } +pub fn md_to_html(filename: String) -> Result { + let body_md = fs::read_to_string(filename).expect("Failed to read markdown blog file"); let html = to_html_with_options(&body_md, &Options::gfm()) .expect("Failed to convert html with options"); Ok(html) @@ -31,11 +18,11 @@ pub fn md_to_html(filename: Option, body: Option) -> Result Result { +pub fn read_version_manifest() -> Result { let file = fs::File::open("version.json").expect("Failed to open version.json"); let reader = BufReader::new(file); - let json: VersionData = serde_json::from_reader(reader).expect("Failed to parse version.json"); + let json: Version = serde_json::from_reader(reader).expect("Failed to parse version.json"); Ok(json) } @@ -51,42 +38,16 @@ pub fn capitalize(s: &str) -> String { } } -/// replace_gh_link -/// Replace Github Blog relative links -/// with full github content links -/// Take String of markdown body -/// and String of github blog endpoint -/// then return String of updated body -pub fn replace_gh_link(body: String, gh_blog_link: String, gh_raw_blog_link: String) -> String { - // Regex href=.\.\/ mean - // find string with character 'href=' - // then followed by any character (I tried to use '"' but didn't work) - // then followed by '.' (must use escape character) - // then followed by '/' (must use escape character) - let re_href = Regex::new(r"href=.\.\/").expect("Failed to build regex href"); - - let replaced_str_href = format!("href=\"{}/", gh_blog_link); - debug!("Replaced str: {}", &replaced_str_href); - - let res_href = re_href - .replace_all(body.as_str(), replaced_str_href.as_str()) - .to_string(); - debug!("Replaced Body: {}", &res_href); - - // Regex src=.\.\/ mean - // find string with character 'src=' - // then followed by any character (I tried to use '"' but didn't work) - // then followed by '.' (must use escape character) - // then followed by '/' (must use escape character) - let re_src = Regex::new(r"src=.\.\/").expect("Failed to build regex src"); - - let replaced_str_src = format!("src=\"{}/", gh_raw_blog_link); - debug!("Replaced str: {}", &replaced_str_src); - - let res = re_src - .replace_all(res_href.as_str(), replaced_str_src.as_str()) - .to_string(); - debug!("Replaced Body: {}", &res); - - res +#[cfg(test)] +mod test { + use super::*; + use test_log::test; + + #[test] + fn test_capitalize() { + let test = "lorem ipsum dolor sit amet".to_string(); + let expected = "Lorem ipsum dolor sit amet".to_string(); + let result = capitalize(test.as_str()); + assert_eq!(result, expected); + } } From 07d00e81e8eafc2f219b57635461aac2af64236c Mon Sep 17 00:00:00 2001 From: "husni.zuhdi@accelbyte.net" Date: Mon, 2 Sep 2024 05:27:08 +0700 Subject: [PATCH 07/49] chore: update sqlx task related and update env.example --- Taskfile.yml | 19 ++++++++++++++++--- env.example | 1 + 2 files changed, 17 insertions(+), 3 deletions(-) diff --git a/Taskfile.yml b/Taskfile.yml index 16deb9a..9e7c2ad 100644 --- a/Taskfile.yml +++ b/Taskfile.yml @@ -8,9 +8,22 @@ tasks: run: summary: Run application with hot-reload cmds: - # Add tailwindcss build for hot reloading - - tailwindcss -i ./statics/input.css -o ./statics/styles.css - - cargo watch -s 'export $(cat .env | xargs) && cargo run -- -release' + - cargo watch -s 'tailwindcss -i ./statics/input.css -o ./statics/styles.css && export $(cat .env | xargs) && cargo run -- -release' + + sqlx-create: + summary: Create db with sqlx + cmds: + - sqlx db create --database-url $DATABASE_URL + sqlx-migrate-run: + summary: Migrate db with sqlx + cmds: + - sqlx migrate run --source internal/migrations --database_url $DATABASE_URL + sqlx-migrate-add: + summary: Creata new migration db with sqlx. Please pass migration description too + vars: + DESCRIPTION: '{{index .MATCH 0}}' + cmds: + - sqlx migrate add --source internal/migrations --database_url $DATABASE_URL {{.DESCRIPTION}} docker-build: summary: Build Docker Image diff --git a/env.example b/env.example index c40c004..413fce0 100644 --- a/env.example +++ b/env.example @@ -2,6 +2,7 @@ SVC_ENDPOINT="127.0.0.1" SVC_PORT="8080" LOG_LEVEL="info" ENVIRONMENT="dev" +DATABASE_URL="sqlite:husni-portfolio.db" POSTGRES_USER="admin" POSTGRES_PASSWORD="admin-password" POSTGRES_DB="testing" From 3545dc7da440d82d0429332ba5cfff97dafe3eca Mon Sep 17 00:00:00 2001 From: "husni.zuhdi@accelbyte.net" Date: Mon, 2 Sep 2024 05:27:39 +0700 Subject: [PATCH 08/49] chore: add dyn-clone dependencies --- internal/Cargo.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/internal/Cargo.toml b/internal/Cargo.toml index 0c7f2e4..0563610 100644 --- a/internal/Cargo.toml +++ b/internal/Cargo.toml @@ -25,6 +25,7 @@ http-body-util = "0.1.2" regex = "1.10.6" sqlx = { version = "=0.8.1", features = ["sqlite", "runtime-tokio"] } async-trait = "0.1.81" +dyn-clone = "1.0.17" # rusqlite = "=0.32.1" [build-dependencies] From 7c4b179487e9c7ba2b685abe69f990845fcf3609 Mon Sep 17 00:00:00 2001 From: "husni.zuhdi@accelbyte.net" Date: Mon, 2 Sep 2024 05:29:10 +0700 Subject: [PATCH 09/49] chore: remove commented code, remove unused functions, and trimming lib usage --- internal/src/model/axum.rs | 3 +- internal/src/model/blog.rs | 161 ----------------------------------- internal/src/model/github.rs | 1 + 3 files changed, 3 insertions(+), 162 deletions(-) diff --git a/internal/src/model/axum.rs b/internal/src/model/axum.rs index 1d3241e..55476db 100644 --- a/internal/src/model/axum.rs +++ b/internal/src/model/axum.rs @@ -1,10 +1,11 @@ use crate::config::Config; use crate::usecase::blog::BlogUseCase; +use std::sync::{Arc, Mutex}; /// Axum state /// Consist of Config and BlogUseCase #[derive(Clone)] pub struct AppState { pub config: Config, - pub blog_usecase: BlogUseCase, + pub blog_usecase: Arc>, } diff --git a/internal/src/model/blog.rs b/internal/src/model/blog.rs index 17204c0..6f5eeea 100644 --- a/internal/src/model/blog.rs +++ b/internal/src/model/blog.rs @@ -1,9 +1,5 @@ -use crate::api::github::get_gh_blogs; -use crate::utils::{capitalize, md_to_html}; -use log::{debug, info}; use serde::{Deserialize, Serialize}; use std::fmt::Display; -use std::fs; /// BlogId /// Identifier of Blog @@ -118,21 +114,6 @@ pub struct Blog { pub body: BlogBody, } -impl Blog { - pub fn update_name(&mut self, new_name: BlogName) { - self.name = new_name - } - pub fn update_source(&mut self, new_source: BlogSource) { - self.source = new_source - } - pub fn update_filename(&mut self, new_filename: BlogFilename) { - self.filename = new_filename - } - pub fn update_body(&mut self, new_body: BlogBody) { - self.body = new_body - } -} - /// BlogStartPage /// Start page of Blog Pagination #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] @@ -142,145 +123,3 @@ pub struct BlogStartPage(pub i32); /// End page of Blog Pagination #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] pub struct BlogEndPage(pub i32); - -// /// Blogs -// /// Vector of Blog in range of start page and end page -// #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] -// pub struct Blogs { -// pub blogs: Vec, -// pub blog_start_page: BlogStartPage, -// pub blog_end_page: BlogEndPage, -// } -// -// impl Default for Blogs { -// fn default() -> Self { -// let dir = Some("./statics/blogs/".to_string()); -// Self::from_dir(dir) -// } -// } -// -// impl Blogs { -// /// Async function to get BlogsData from github -// /// Borrowed `owner`, `repo`, and `branch` String -// pub async fn with_gh(owner: &String, repo: &String, branch: &String) -> Self { -// let dir = Some("./statics/blogs/".to_string()); -// let mut blog_data = Self::from_dir(dir).blogs; -// let mut gh_blog_data = -// get_gh_blogs(owner.to_string(), repo.to_string(), branch.to_string()) -// .await -// .expect("Failed to get github blog data"); -// blog_data.append(&mut gh_blog_data); -// Self { -// blogs: blog_data, -// blog_start_page: BlogStartPage(0), -// blog_end_page: BlogEndPage(10), -// } -// } -// -// pub fn from_dir(dir: Option) -> Self { -// let directory = dir.clone().expect("Failed to get directory"); -// let static_path = fs::read_dir(directory.as_str()).unwrap(); -// -// let blogs_paths: Vec = static_path -// .filter_map(|blog_path| { -// let path = blog_path.ok().expect("Failed to get blog path").path(); -// if path.is_file() { -// path.file_name() -// .expect("Failed to get filename") -// .to_str() -// .map(|s| s.to_owned()) -// } else { -// None -// } -// }) -// .collect(); -// -// let blogs: Vec = blogs_paths -// .iter() -// .map(|blog_path| { -// let (id, name_init) = blog_path -// .split_once("-") -// .expect("Failed to split filename into id and name"); -// let name_formated = name_init.replace("_", " "); -// let (name_lower, _) = name_formated -// .split_once(".") -// .expect("Failed to remove file extension"); -// let name = capitalize(name_lower); -// let fullpath = format!("{}{}", directory, blog_path); -// -// info!("markdown loaded: {}", fullpath); -// -// let body = md_to_html(fullpath).expect("Failed to convert markdown to html"); -// Blog { -// id: BlogId(id.to_string()), -// name: BlogName(name.to_string()), -// source: BlogSource::FileSystem, -// filename: BlogFilename(blog_path.to_owned()), -// body: BlogBody(body), -// } -// }) -// .collect(); -// -// debug!("Blogs: {:?}", blogs); -// -// Self { -// blogs, -// blog_start_page: BlogStartPage(0), -// blog_end_page: BlogEndPage(10), -// } -// } -// } -// -// #[cfg(test)] -// mod test { -// use super::*; -// use std::env::current_dir; -// use std::io::Write; -// use test_log::test; -// -// #[test] -// fn test_blogs_data_from_dir() { -// // Preparation -// let test_id = "999"; -// let test_name = "Test blog"; -// let test_body = "# Testing Blog for Unit Test"; -// let test_body_html = "

Testing Blog for Unit Test

"; -// let test_filename = "999-test_blog.md"; -// let test_path = "../statics/blogs/999-test_blog.md"; -// -// // Get current directory -// debug!( -// "Curent Directory: {}", -// current_dir().expect("Failed to get current dir").display() -// ); -// -// // Create a blog markdown -// let mut md_file = fs::File::create(test_path).expect("Failed to create File Write buffer"); -// md_file -// .write_all(test_body.as_bytes()) -// .expect("Failed to write buffer to"); -// -// // Call create_blogs function -// let dir = Some("../statics/blogs/".to_string()); -// let blogs = Blogs::from_dir(dir); -// -// // Check blogs data -// debug!("Check BlogsData: {:?}", blogs); -// -// let blog_test = blogs -// .blogs -// .iter() -// .filter(|blog| blog.id == BlogId(test_id.to_string())) -// .next() -// .expect("Failed to get test blog data"); -// -// // Compare if new blog markdown is available -// assert_eq!(blog_test.id, BlogId(test_id.to_string())); -// assert_eq!(blog_test.name, BlogName(test_name.to_string())); -// assert_eq!(blog_test.body, BlogBody(test_body_html.to_string())); -// assert_eq!(blog_test.filename, BlogFilename(test_filename.to_string())); -// -// // Delete test blog markdown -// fs::remove_file(test_path).expect("Failed to delete test blog markdown"); -// } -// } diff --git a/internal/src/model/github.rs b/internal/src/model/github.rs index 2a6b314..a8f6f3f 100644 --- a/internal/src/model/github.rs +++ b/internal/src/model/github.rs @@ -34,6 +34,7 @@ impl Display for GithubBranch { /// Collection of Tree of github blog data #[derive(Deserialize, Serialize, Debug, Clone)] pub struct GithubTrees { + #[serde(rename(deserialize = "tree"))] pub trees: Vec, } From e22bde58ccc585390cf5cbc726d18beb00c1ba4d Mon Sep 17 00:00:00 2001 From: "husni.zuhdi@accelbyte.net" Date: Mon, 2 Sep 2024 05:31:40 +0700 Subject: [PATCH 10/49] feat: implement dyn-clone lib --- internal/src/database/memory.rs | 3 ++- internal/src/repo/blog.rs | 5 ++++- internal/src/usecase/blog.rs | 11 +++-------- 3 files changed, 9 insertions(+), 10 deletions(-) diff --git a/internal/src/database/memory.rs b/internal/src/database/memory.rs index 8aa33b0..511c815 100644 --- a/internal/src/database/memory.rs +++ b/internal/src/database/memory.rs @@ -8,6 +8,7 @@ use crate::utils::{capitalize, md_to_html}; use log::{debug, info}; use std::fs; +#[derive(Clone)] pub struct MemoryBlogRepo { pub blogs: Vec, } @@ -60,7 +61,7 @@ impl BlogRepo for MemoryBlogRepo { match name { Some(val) => { debug!("Update Blog {} name from {} to {}", &id, &result.name, &val); - result.update_name(val) + result.name = val } None => (), } diff --git a/internal/src/repo/blog.rs b/internal/src/repo/blog.rs index 1e1bc3e..fe30b34 100644 --- a/internal/src/repo/blog.rs +++ b/internal/src/repo/blog.rs @@ -2,8 +2,9 @@ use crate::model::blog::{ Blog, BlogBody, BlogDeleted, BlogEndPage, BlogFilename, BlogId, BlogName, BlogSource, BlogStartPage, }; +use dyn_clone::{clone_trait_object, DynClone}; -pub trait BlogRepo { +pub trait BlogRepo: DynClone { fn find(&self, id: BlogId) -> Blog; fn find_blogs(&self, start: BlogStartPage, end: BlogEndPage) -> Vec; fn find_all(&self) -> Vec; @@ -17,3 +18,5 @@ pub trait BlogRepo { ) -> Blog; fn delete(&mut self, id: BlogId) -> BlogDeleted; } + +clone_trait_object!(BlogRepo); diff --git a/internal/src/usecase/blog.rs b/internal/src/usecase/blog.rs index 0e4c054..58eae5f 100644 --- a/internal/src/usecase/blog.rs +++ b/internal/src/usecase/blog.rs @@ -5,14 +5,9 @@ use crate::model::blog::{ use crate::port::blog::{command::BlogQueryCommand, query::BlogQueryPort}; use crate::repo::blog::BlogRepo; +#[derive(Clone)] pub struct BlogUseCase { - pub blog_repo: Box, -} - -impl Clone for BlogUseCase { - fn clone(&self) -> Self { - self.clone() - } + pub blog_repo: Box, } impl BlogQueryPort for BlogUseCase { @@ -44,7 +39,7 @@ impl BlogQueryCommand for BlogUseCase { } impl BlogUseCase { - pub fn new(blog_repo: Box) -> BlogUseCase { + pub fn new(blog_repo: Box) -> BlogUseCase { BlogUseCase { blog_repo } } } From 76557fd74563f94890ac105c31986b609c7ad1d3 Mon Sep 17 00:00:00 2001 From: "husni.zuhdi@accelbyte.net" Date: Mon, 2 Sep 2024 05:32:16 +0700 Subject: [PATCH 11/49] feat: update naming and implement Arc Mutex --- internal/src/api/github.rs | 26 +++++++++++++++----------- internal/src/app.rs | 3 ++- internal/src/handler.rs | 16 ++++++++++------ 3 files changed, 27 insertions(+), 18 deletions(-) diff --git a/internal/src/api/github.rs b/internal/src/api/github.rs index 0098670..9299a9b 100644 --- a/internal/src/api/github.rs +++ b/internal/src/api/github.rs @@ -2,7 +2,7 @@ use crate::model::blog::{Blog, BlogBody, BlogFilename, BlogId, BlogName, BlogSou use crate::model::github::{GithubTree, GithubTrees}; use crate::utils::capitalize; use http_body_util::BodyExt; -use log::{debug, error, info}; +use log::{debug, error, info, warn}; use markdown::{to_html_with_options, Options}; use octocrab; use regex::Regex; @@ -125,27 +125,27 @@ async fn get_gh_blog( repo: String, branch: String, ) -> Option { - let blog_path = tree.path; + let tree_path = tree.path; let gh_blog_link = format!( "https://github.com/{}/{}/tree/{}/{}", - &owner, &repo, &branch, &blog_path + &owner, &repo, &branch, &tree_path ); let gh_raw_blog_link = format!( "https://raw.githubusercontent.com/{}/{}/{}/{}", - &owner, &repo, &branch, &blog_path + &owner, &repo, &branch, &tree_path ); // Check to make sure the path doesn't have a extention - if !blog_path.contains(".") { + if !tree_path.contains(".") { // Get blog id with specification of 3 digit integer - let blog_id = blog_path.get(0..3).unwrap(); - let blog_name = blog_path.get(4..).unwrap(); + let blog_id = tree_path.get(0..3).unwrap(); + let blog_name = tree_path.get(4..).unwrap(); match blog_id.parse::() { Ok(_) => { if &blog_id != &"000" { info!("Blog Name: {}", &blog_name); - let blog_readme_path = format!("{}/README.md", &blog_path); + let blog_readme_path = format!("{}/README.md", &tree_path); let blog_content = octocrab::instance() .repos(&owner, &repo) .get_content() @@ -199,14 +199,18 @@ async fn get_gh_blog( } Err(err) => { if err.kind() == &IntErrorKind::InvalidDigit { - debug!("Error Kind {:?}. Safe to ignore.", err.kind()); + debug!("Error Kind {:?}. Skipped.", err.kind()); } - error!("Failed to parse Blog ID: {}", err); + warn!( + "Failed to parse Tree Path {}. Error {:?}. Skipped", + &tree_path, + err.kind() + ); None } } } else { - info!("This is not a folder. Skip this tree"); + info!("Tree {} is not a folder. Skipped.", &tree_path); None } } diff --git a/internal/src/app.rs b/internal/src/app.rs index 298db3c..b616102 100644 --- a/internal/src/app.rs +++ b/internal/src/app.rs @@ -7,6 +7,7 @@ use axum::{ Router, }; use log::info; +use std::sync::{Arc, Mutex}; use tower_http::services::{ServeDir, ServeFile}; pub async fn app() -> () { @@ -49,7 +50,7 @@ async fn state_factory(config: Config) -> AppState { blog_repo = MemoryBlogRepo::from_github(&config.gh_owner, &config.gh_repo, &config.gh_branch).await; } - let blog_usecase = BlogUseCase::new(Box::new(blog_repo)); + let blog_usecase = Arc::new(Mutex::new(BlogUseCase::new(Box::new(blog_repo)))); let app_state = AppState { config, blog_usecase, diff --git a/internal/src/handler.rs b/internal/src/handler.rs index 79e4c24..3efafcf 100644 --- a/internal/src/handler.rs +++ b/internal/src/handler.rs @@ -4,7 +4,7 @@ use crate::utils::read_version_manifest; use askama::Template; use axum::extract::{Path, State}; use axum::response::Html; -use log::{debug, error, info, warn}; +use log::{debug, error, info}; /// Note: In axum [example](https://docs.rs/axum/latest/axum/response/index.html#building-responses) /// They show an example to return Html<&'static str> @@ -30,8 +30,10 @@ pub async fn get_profile() -> Html { /// Serve get_blogs HTML file /// List our blogs title and id pub async fn get_blogs(State(app_state): State) -> Html { + // Locking Mutex + let data = app_state.blog_usecase.lock().expect("Mutex was poisoned"); // Copy data to Template struct - let blogs_data = app_state.blog_usecase.blog_repo.find_all(); + let blogs_data = data.blog_repo.find_all(); let blogs: Vec = blogs_data .iter() .map(|blog| BlogTemplate { @@ -60,13 +62,15 @@ pub async fn get_blogs(State(app_state): State) -> Html { /// Serve get_blog HTML file /// Render our blog pub async fn get_blog(Path(path): Path, State(app_state): State) -> Html { - let state = app_state.blog_usecase.blog_repo.find(BlogId(path.clone())); + // Locking Mutex + let data = app_state.blog_usecase.lock().expect("Mutex was poisoned"); + let blog_data = data.blog_repo.find(BlogId(path.clone())); let blog = BlogTemplate { id: path.clone().as_str(), - name: &state.name.as_str(), - filename: &state.filename.as_str(), - body: &state.body.as_str(), + name: &blog_data.name.as_str(), + filename: &blog_data.filename.as_str(), + body: &blog_data.body.as_str(), } .render(); From fbbcaf1e395936c83764e89a90c1446ebe820cbe Mon Sep 17 00:00:00 2001 From: "husni.zuhdi@accelbyte.net" Date: Mon, 2 Sep 2024 05:36:21 +0700 Subject: [PATCH 12/49] chore: update readme to replace actix -> axum --- README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 8e79300..7bc8b74 100644 --- a/README.md +++ b/README.md @@ -3,9 +3,10 @@ My Portfolio webiste ## Tools I use in this repo * [Rust Programming Language](https://www.rust-lang.org/) -* [Actix](https://actix.rs/) +* [Axum](https://github.com/tokio-rs/axum/tree/main) * [Askama](https://github.com/djc/askama) * [Markdown-rs](https://github.com/wooorm/markdown-rs) +* [Octocrab](https://github.com/XAMPPRocky/octocrab) * [Taskfile](https://taskfile.dev/) * [TailwindCSS](https://tailwindcss.com/) From a1e65f42b760ac46071b46136a5fe379a052b7a5 Mon Sep 17 00:00:00 2001 From: "husni.zuhdi@accelbyte.net" Date: Mon, 2 Sep 2024 09:07:47 +0700 Subject: [PATCH 13/49] chore: exploring markdown-rs frontmatter construct related to md yaml metadata section --- internal/src/utils.rs | 24 ++++++++++++++++++++---- 1 file changed, 20 insertions(+), 4 deletions(-) diff --git a/internal/src/utils.rs b/internal/src/utils.rs index 0c6c25c..6afcdf8 100644 --- a/internal/src/utils.rs +++ b/internal/src/utils.rs @@ -1,5 +1,6 @@ use crate::model::version::Version; -use markdown::{to_html_with_options, Options}; +use log::debug; +use markdown::{to_html_with_options, CompileOptions, Constructs, Options, ParseOptions}; use std::fs; use std::io::BufReader; @@ -7,9 +8,24 @@ use std::io::BufReader; /// take String of filename /// return String of converted markdown in html or String of error pub fn md_to_html(filename: String) -> Result { - let body_md = fs::read_to_string(filename).expect("Failed to read markdown blog file"); - let html = to_html_with_options(&body_md, &Options::gfm()) - .expect("Failed to convert html with options"); + let body_md = fs::read_to_string(filename.clone()).expect("Failed to read markdown blog file"); + debug!("Markdown Body for filename {}: {}", &filename, body_md); + + let html = to_html_with_options( + &body_md, + &Options { + parse: ParseOptions { + constructs: Constructs { + // In case you want to activeat frontmatter in the future + // frontmatter: true, + ..Constructs::gfm() + }, + ..ParseOptions::gfm() + }, + compile: CompileOptions::gfm(), + }, + ) + .expect("Failed to convert html with options"); Ok(html) } From 73ef3d2819d7a48c33d4f085d17f714120679894 Mon Sep 17 00:00:00 2001 From: "husni.zuhdi@accelbyte.net" Date: Tue, 3 Sep 2024 01:29:47 +0700 Subject: [PATCH 14/49] fix: typo in arg --- Taskfile.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/Taskfile.yml b/Taskfile.yml index 9e7c2ad..f18d11b 100644 --- a/Taskfile.yml +++ b/Taskfile.yml @@ -1,4 +1,6 @@ version: '3' +dotenv: + - '.env' tasks: test: @@ -17,7 +19,7 @@ tasks: sqlx-migrate-run: summary: Migrate db with sqlx cmds: - - sqlx migrate run --source internal/migrations --database_url $DATABASE_URL + - sqlx migrate run --source internal/migrations --database-url $DATABASE_URL sqlx-migrate-add: summary: Creata new migration db with sqlx. Please pass migration description too vars: From e10f9acfbdf246895a9ede5bf26114ce6b271353 Mon Sep 17 00:00:00 2001 From: "husni.zuhdi@accelbyte.net" Date: Tue, 3 Sep 2024 01:30:36 +0700 Subject: [PATCH 15/49] chore: remove postgre envar and add sqlite/data envar --- env.example | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/env.example b/env.example index 413fce0..51e1198 100644 --- a/env.example +++ b/env.example @@ -2,12 +2,8 @@ SVC_ENDPOINT="127.0.0.1" SVC_PORT="8080" LOG_LEVEL="info" ENVIRONMENT="dev" +DATA_SOURCE=sqlite DATABASE_URL="sqlite:husni-portfolio.db" -POSTGRES_USER="admin" -POSTGRES_PASSWORD="admin-password" -POSTGRES_DB="testing" -POSTGRES_HOST="127.0.0.1" -POSTGRES_PORT="5432" GITHUB_OWNER=husni-zuhdi GITHUB_REPO=husni-blog-resources GITHUB_BRANCH=main From 609d59cb0f69e05d572147308a66dc1dcaf2f617 Mon Sep 17 00:00:00 2001 From: "husni.zuhdi@accelbyte.net" Date: Tue, 3 Sep 2024 01:31:18 +0700 Subject: [PATCH 16/49] feat: add axum macro feature to help debug handler --- internal/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/Cargo.toml b/internal/Cargo.toml index 0563610..73286e9 100644 --- a/internal/Cargo.toml +++ b/internal/Cargo.toml @@ -7,7 +7,7 @@ build = "build.rs" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -axum = "0.7.5" +axum = { version = "0.7.5", features = ["macros"] } tokio = { version = "1.0", features = ["full"] } # tracing = "0.1" # Might not need this # tracing-subscriber = { version = "0.3", features = ["env-filter"] } # Might not need this From 732374b73d49f0a15fb4444e9dc57148898a9264 Mon Sep 17 00:00:00 2001 From: "husni.zuhdi@accelbyte.net" Date: Tue, 3 Sep 2024 01:32:31 +0700 Subject: [PATCH 17/49] feat: update blog hexagonal arch to support async_trait --- internal/src/port/blog/command.rs | 14 +++++++++++-- internal/src/port/blog/query.rs | 7 ++++--- internal/src/repo/blog.rs | 23 ++++++++++++++------- internal/src/usecase/blog.rs | 34 +++++++++++++++++++++---------- 4 files changed, 55 insertions(+), 23 deletions(-) diff --git a/internal/src/port/blog/command.rs b/internal/src/port/blog/command.rs index 328792a..042e957 100644 --- a/internal/src/port/blog/command.rs +++ b/internal/src/port/blog/command.rs @@ -1,7 +1,17 @@ use crate::model::blog::{Blog, BlogBody, BlogDeleted, BlogFilename, BlogId, BlogName, BlogSource}; +use async_trait::async_trait; +#[async_trait] pub trait BlogQueryCommand { - fn update( + async fn add( + &mut self, + id: BlogId, + name: BlogName, + filename: BlogFilename, + source: BlogSource, + body: BlogBody, + ) -> Blog; + async fn update( &mut self, id: BlogId, name: Option, @@ -9,5 +19,5 @@ pub trait BlogQueryCommand { source: Option, body: Option, ) -> Blog; - fn delete(&mut self, id: BlogId) -> BlogDeleted; + async fn delete(&mut self, id: BlogId) -> BlogDeleted; } diff --git a/internal/src/port/blog/query.rs b/internal/src/port/blog/query.rs index c0b3e76..956ec4c 100644 --- a/internal/src/port/blog/query.rs +++ b/internal/src/port/blog/query.rs @@ -1,7 +1,8 @@ use crate::model::blog::{Blog, BlogEndPage, BlogId, BlogStartPage}; +use async_trait::async_trait; +#[async_trait] pub trait BlogQueryPort { - fn find(&self, id: BlogId) -> Blog; - fn find_blogs(&self, start: BlogStartPage, end: BlogEndPage) -> Vec; - fn find_all(&self) -> Vec; + async fn find(&mut self, id: BlogId) -> Blog; + async fn find_blogs(&mut self, start: BlogStartPage, end: BlogEndPage) -> Vec; } diff --git a/internal/src/repo/blog.rs b/internal/src/repo/blog.rs index fe30b34..953be6e 100644 --- a/internal/src/repo/blog.rs +++ b/internal/src/repo/blog.rs @@ -2,13 +2,24 @@ use crate::model::blog::{ Blog, BlogBody, BlogDeleted, BlogEndPage, BlogFilename, BlogId, BlogName, BlogSource, BlogStartPage, }; +use async_trait::async_trait; use dyn_clone::{clone_trait_object, DynClone}; +clone_trait_object!(BlogRepo); + +#[async_trait] pub trait BlogRepo: DynClone { - fn find(&self, id: BlogId) -> Blog; - fn find_blogs(&self, start: BlogStartPage, end: BlogEndPage) -> Vec; - fn find_all(&self) -> Vec; - fn update( + async fn add( + &mut self, + id: BlogId, + name: BlogName, + filename: BlogFilename, + source: BlogSource, + body: BlogBody, + ) -> Blog; + async fn find(&mut self, id: BlogId) -> Blog; + async fn find_blogs(&mut self, start: BlogStartPage, end: BlogEndPage) -> Vec; + async fn update( &mut self, id: BlogId, name: Option, @@ -16,7 +27,5 @@ pub trait BlogRepo: DynClone { source: Option, body: Option, ) -> Blog; - fn delete(&mut self, id: BlogId) -> BlogDeleted; + async fn delete(&mut self, id: BlogId) -> BlogDeleted; } - -clone_trait_object!(BlogRepo); diff --git a/internal/src/usecase/blog.rs b/internal/src/usecase/blog.rs index 58eae5f..08069e4 100644 --- a/internal/src/usecase/blog.rs +++ b/internal/src/usecase/blog.rs @@ -4,26 +4,36 @@ use crate::model::blog::{ }; use crate::port::blog::{command::BlogQueryCommand, query::BlogQueryPort}; use crate::repo::blog::BlogRepo; +use async_trait::async_trait; #[derive(Clone)] pub struct BlogUseCase { pub blog_repo: Box, } +#[async_trait] impl BlogQueryPort for BlogUseCase { - fn find(&self, id: BlogId) -> Blog { - self.blog_repo.find(id) + async fn find(&mut self, id: BlogId) -> Blog { + self.blog_repo.find(id).await } - fn find_blogs(&self, start: BlogStartPage, end: BlogEndPage) -> Vec { - self.blog_repo.find_blogs(start, end) - } - fn find_all(&self) -> Vec { - self.blog_repo.find_all() + async fn find_blogs(&mut self, start: BlogStartPage, end: BlogEndPage) -> Vec { + self.blog_repo.find_blogs(start, end).await } } +#[async_trait] impl BlogQueryCommand for BlogUseCase { - fn update( + async fn add( + &mut self, + id: BlogId, + name: BlogName, + filename: BlogFilename, + source: BlogSource, + body: BlogBody, + ) -> Blog { + self.blog_repo.add(id, name, filename, source, body).await + } + async fn update( &mut self, id: BlogId, name: Option, @@ -31,10 +41,12 @@ impl BlogQueryCommand for BlogUseCase { source: Option, body: Option, ) -> Blog { - self.blog_repo.update(id, name, filename, source, body) + self.blog_repo + .update(id, name, filename, source, body) + .await } - fn delete(&mut self, id: BlogId) -> BlogDeleted { - self.blog_repo.delete(id) + async fn delete(&mut self, id: BlogId) -> BlogDeleted { + self.blog_repo.delete(id).await } } From 4c2f6c335b873b0cfc9424c89a8ce718392b1e25 Mon Sep 17 00:00:00 2001 From: "husni.zuhdi@accelbyte.net" Date: Tue, 3 Sep 2024 01:34:39 +0700 Subject: [PATCH 18/49] feat: implement FromRow for Blog and add BlogPagination for query param --- internal/src/model/blog.rs | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/internal/src/model/blog.rs b/internal/src/model/blog.rs index 6f5eeea..226b3b7 100644 --- a/internal/src/model/blog.rs +++ b/internal/src/model/blog.rs @@ -1,4 +1,5 @@ use serde::{Deserialize, Serialize}; +use sqlx::{sqlite::SqliteRow, Decode, FromRow}; use std::fmt::Display; /// BlogId @@ -114,6 +115,29 @@ pub struct Blog { pub body: BlogBody, } +/// I don't knwo how it work +/// Ref: https://stackoverflow.com/questions/78615649/how-do-i-load-sqlx-rows-to-a-struct-with-a-vector-of-structs +impl<'r> FromRow<'r, SqliteRow> for Blog { + fn from_row(row: &'r SqliteRow) -> Result { + use sqlx::Row; + let id = row.try_get("id")?; + let name = row.try_get("name")?; + let source = match row.try_get("source")? { + "github" => BlogSource::Github, + "filesystem" => BlogSource::FileSystem, + }; + let filename = row.try_get("filename")?; + let body = row.try_get("body")?; + Ok(Blog { + id: BlogId(id), + name: BlogName(name), + source, + filename: BlogFilename(filename), + body: BlogBody(body), + }) + } +} + /// BlogStartPage /// Start page of Blog Pagination #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] @@ -123,3 +147,11 @@ pub struct BlogStartPage(pub i32); /// End page of Blog Pagination #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] pub struct BlogEndPage(pub i32); + +/// BlogPagination +/// Axum Query struct for Blog Pagination +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct BlogPagination { + pub start: Option, + pub end: Option, +} From 4444bdc84d21c535f77bae28db1f688f953c7a4d Mon Sep 17 00:00:00 2001 From: "husni.zuhdi@accelbyte.net" Date: Tue, 3 Sep 2024 01:35:49 +0700 Subject: [PATCH 19/49] feat: add sqlite db layer and update memory db to use async fn --- internal/src/database/memory.rs | 32 ++++-- internal/src/database/mod.rs | 1 + internal/src/database/sqlite.rs | 190 ++++++++++++++++++++++++++++++++ 3 files changed, 216 insertions(+), 7 deletions(-) create mode 100644 internal/src/database/sqlite.rs diff --git a/internal/src/database/memory.rs b/internal/src/database/memory.rs index 511c815..a4e1133 100644 --- a/internal/src/database/memory.rs +++ b/internal/src/database/memory.rs @@ -5,6 +5,7 @@ use crate::model::blog::{ }; use crate::repo::blog::BlogRepo; use crate::utils::{capitalize, md_to_html}; +use async_trait::async_trait; use log::{debug, info}; use std::fs; @@ -13,8 +14,9 @@ pub struct MemoryBlogRepo { pub blogs: Vec, } +#[async_trait] impl BlogRepo for MemoryBlogRepo { - fn find(&self, id: BlogId) -> Blog { + async fn find(&mut self, id: BlogId) -> Blog { let result = self .blogs .iter() @@ -26,17 +28,33 @@ impl BlogRepo for MemoryBlogRepo { result.clone() } - fn find_blogs(&self, start: BlogStartPage, end: BlogEndPage) -> Vec { + async fn find_blogs(&mut self, start: BlogStartPage, end: BlogEndPage) -> Vec { let start_seq = start.0 as usize; let end_seq = end.0 as usize; let result = &self.blogs[start_seq..end_seq]; result.to_vec() } - fn find_all(&self) -> Vec { - let result = &self.blogs; - result.to_vec() + async fn add( + &mut self, + id: BlogId, + name: BlogName, + filename: BlogFilename, + source: BlogSource, + body: BlogBody, + ) -> Blog { + let result = Blog { + id, + name, + source, + filename, + body, + }; + self.blogs.push(result.clone()); + info!("Blog {} added.", &result.id); + debug!("Blog HTML {}.", &result.body); + result } - fn delete(&mut self, id: BlogId) -> BlogDeleted { + async fn delete(&mut self, id: BlogId) -> BlogDeleted { let index = self.blogs.iter().position(|blog| &blog.id == &id).unwrap(); info!("Deleting Blog with Id {}", &index); @@ -44,7 +62,7 @@ impl BlogRepo for MemoryBlogRepo { info!("Deleted Blog with Id {}", &index); BlogDeleted(true) } - fn update( + async fn update( &mut self, id: BlogId, name: Option, diff --git a/internal/src/database/mod.rs b/internal/src/database/mod.rs index eb29191..ae663f6 100644 --- a/internal/src/database/mod.rs +++ b/internal/src/database/mod.rs @@ -1 +1,2 @@ pub mod memory; +pub mod sqlite; diff --git a/internal/src/database/sqlite.rs b/internal/src/database/sqlite.rs new file mode 100644 index 0000000..b60960b --- /dev/null +++ b/internal/src/database/sqlite.rs @@ -0,0 +1,190 @@ +use crate::api::github::get_gh_blogs; +use crate::model::blog::{ + Blog, BlogBody, BlogDeleted, BlogEndPage, BlogFilename, BlogId, BlogName, BlogSource, + BlogStartPage, +}; +use crate::repo::blog::BlogRepo; +use async_trait::async_trait; +use log::{debug, error, info}; +use sqlx::sqlite::SqlitePool; +use sqlx::{query, query_as, Row}; + +#[derive(Clone)] +pub struct SqliteBlogRepo { + pub pool: SqlitePool, +} + +#[async_trait] +impl BlogRepo for SqliteBlogRepo { + async fn find(&mut self, id: BlogId) -> Blog { + let blog_id = id.0; + let prep_query = "SELECT * FROM blogs WHERE id = $1 ORDER BY id"; + debug!("Executing query {} for id {}", &prep_query, &blog_id); + + let row: Blog = query_as(&prep_query) + .bind(&blog_id) + .fetch_one(&self.pool) + .await + .expect("Failed to execute get query"); + info!("Blog {} processed.", &row.id); + debug!("Blog HTML {}.", &row.body); + row + } + async fn find_blogs(&mut self, start: BlogStartPage, end: BlogEndPage) -> Vec { + let start_seq = start.0; + let end_seq = end.0; + let limit = end_seq - start_seq; + let prep_query = "SELECT * FROM blogs ORDER BY id LIMIT $1 OFFSET $2"; + debug!( + "Executing query {} for start {}, end {}, limit {}", + &prep_query, &start_seq, &end_seq, &limit + ); + + let rows: Vec = query_as(&prep_query) + .bind(&limit) + .bind(&start_seq) + .fetch_all(&self.pool) + .await + .expect("Failed to execute get query"); + info!("Blogs from {} to {} processed.", &start_seq, &end_seq); + for row in rows { + info!("Blog {} processed.", &row.id); + debug!("Blog HTML {}.", &row.body); + } + rows + } + async fn add( + &mut self, + id: BlogId, + name: BlogName, + filename: BlogFilename, + source: BlogSource, + body: BlogBody, + ) -> Blog { + let blog_id = id.0; + let blog_name = name.0; + let blog_filename = filename.0; + let blog_source = format!("{}", source); + let blog_body = body.0; + let prep_add_query = + "INSERT INTO blogs (id, name, filename, source, body) VALUES ($1, $2, $3, $4, $5)"; + debug!("Executing query {} for id {}", &prep_add_query, &blog_id); + + let query_res = query(&prep_add_query) + .bind(&blog_id) + .bind(&blog_name) + .bind(&blog_filename) + .bind(&blog_source) + .bind(&blog_body) + .execute(&self.pool) + .await + .expect("Failed to execute add query"); + info!("Blog {} was added.", &blog_id); + + let prep_get_query = "SELECT * FROM blogs WHERE id = $1 ORDER BY id"; + debug!("Executing query {} for id {}", &prep_get_query, &blog_id); + + let row: Blog = query_as(&prep_get_query) + .bind(&blog_id) + .fetch_one(&self.pool) + .await + .expect("Failed to execute get query"); + info!("Blog {} processed.", &row.id); + debug!("Blog HTML {}.", &row.body); + row + } + async fn delete(&mut self, id: BlogId) -> BlogDeleted { + let blog_id = id.0; + let prep_query = "DELETE FROM blogs WHERE id = $1"; + debug!("Executing query {} for id {}", &prep_query, &blog_id); + + let query_res = query(&prep_query) + .bind(&blog_id) + .execute(&self.pool) + .await + .expect("Failed to execute delete query"); + info!( + "Blog {} in row {} was deleted.", + &blog_id, + &query_res.rows_affected() + ); + BlogDeleted(true) + } + async fn update( + &mut self, + id: BlogId, + name: Option, + filename: Option, + source: Option, + body: Option, + ) -> Blog { + let blog_id = id.0; + let mut affected_col = "".to_string(); + match name { + Some(val) => { + affected_col = format!("{} name = {} ", &affected_col, val); + debug!("Affected Column: '{}'", &affected_col) + } + None => { + debug!("Skipped update name field") + } + } + match filename { + Some(val) => { + affected_col = format!("{} filename = {} ", &affected_col, val); + debug!("Affected Column: '{}'", &affected_col) + } + None => { + debug!("Skipped update name field") + } + } + match source { + Some(val) => { + affected_col = format!("{} source = {} ", &affected_col, val); + debug!("Affected Column: '{}'", &affected_col) + } + None => { + debug!("Skipped update name field") + } + } + match body { + Some(val) => { + affected_col = format!("{} body = {} ", &affected_col, val); + debug!("Affected Column: '{}'", &affected_col) + } + None => { + debug!("Skipped update name field") + } + } + let prep_update_query = format!("UPDATE blogs SET{}WHERE id = $1", &affected_col).as_str(); + debug!("Executing query {} for id {}", &prep_update_query, &blog_id); + + let query_res = query(&prep_update_query) + .bind(&blog_id) + .execute(&self.pool) + .await + .expect("Failed to execute update query"); + info!("Blog {} was updated.", &blog_id); + + let prep_get_query = "SELECT * FROM blogs WHERE id = $1 ORDER BY id"; + debug!("Executing query {} for id {}", &prep_get_query, &blog_id); + + let row: Blog = query_as(&prep_get_query) + .bind(&blog_id) + .fetch_one(&self.pool) + .await + .expect("Failed to execute get query"); + info!("Blog {} processed.", &row.id); + debug!("Blog HTML {}.", &row.body); + row + } +} + +impl SqliteBlogRepo { + pub async fn new(database_url: String) -> SqliteBlogRepo { + let pool = SqlitePool::connect(database_url.as_str()) + .await + .expect("Failed to start sqlite pool"); + SqliteBlogRepo { pool } + } +} From 75d2f54b3402cbf5861826873bca79b65371d43f Mon Sep 17 00:00:00 2001 From: "husni.zuhdi@accelbyte.net" Date: Tue, 3 Sep 2024 01:36:25 +0700 Subject: [PATCH 20/49] feat: remove postgre envar and add sqlite/data envar --- internal/src/config.rs | 131 +++++++++++++---------------------------- 1 file changed, 40 insertions(+), 91 deletions(-) diff --git a/internal/src/config.rs b/internal/src/config.rs index 1ec390c..daebafc 100644 --- a/internal/src/config.rs +++ b/internal/src/config.rs @@ -8,11 +8,8 @@ pub struct Config { pub svc_port: String, pub log_level: String, pub environment: String, - pub postgres_user: String, - pub postgres_password: String, - pub postgres_db: String, - pub postgres_host: String, - pub postgres_port: String, + pub data_source: String, + pub database_url: String, pub gh_owner: String, pub gh_repo: String, pub gh_branch: String, @@ -24,11 +21,8 @@ impl Default for Config { let svc_port: String = "8080".to_string(); let log_level: String = "info".to_string(); let environment: String = "prod".to_string(); - let postgres_user: String = "".to_string(); - let postgres_password: String = "".to_string(); - let postgres_db: String = "".to_string(); - let postgres_host: String = "".to_string(); - let postgres_port: String = "".to_string(); + let data_source: String = "memory".to_string(); + let database_url: String = "".to_owned(); let gh_owner: String = "".to_string(); let gh_repo: String = "".to_string(); let gh_branch: String = "".to_string(); @@ -38,11 +32,8 @@ impl Default for Config { svc_port, log_level, environment, - postgres_user, - postgres_password, - postgres_db, - postgres_host, - postgres_port, + data_source, + database_url, gh_owner, gh_repo, gh_branch, @@ -69,24 +60,12 @@ impl Config { warn!("Failed to load ENVIRONMENT environment variable. Set default to 'prod'"); "prod".to_string() }); - let postgres_user: String = env::var("POSTGRES_USER").unwrap_or_else(|_| { - warn!("Failed to load POSTGRES_USER environment variable. Set default to ''"); - "".to_string() - }); - let postgres_password: String = env::var("POSTGRES_PASSWORD").unwrap_or_else(|_| { - warn!("Failed to load POSTGRES_PASSWORD environment variable. Set default to ''"); - "".to_string() + let data_source: String = env::var("DATA_SOURCE").unwrap_or_else(|_| { + warn!("Failed to load DATA_SOURCE environment variable. Set default to 'memory'"); + "memory".to_string() }); - let postgres_db: String = env::var("POSTGRES_DB").unwrap_or_else(|_| { - warn!("Failed to load POSTGRES_DB environment variable. Set default to ''"); - "".to_string() - }); - let postgres_host: String = env::var("POSTGRES_HOST").unwrap_or_else(|_| { - warn!("Failed to load POSTGRES_HOST environment variable. Set default to ''"); - "".to_string() - }); - let postgres_port: String = env::var("POSTGRES_PORT").unwrap_or_else(|_| { - warn!("Failed to load POSTGRES_PORT environment variable. Set default to ''"); + let database_url: String = env::var("DATABASE_URL").unwrap_or_else(|_| { + warn!("Failed to load DATABASE_URL environment variable. Set default to ''"); "".to_string() }); let gh_owner: String = env::var("GITHUB_OWNER").unwrap_or_else(|_| { @@ -107,11 +86,8 @@ impl Config { svc_port, log_level, environment, - postgres_user, - postgres_password, - postgres_db, - postgres_host, - postgres_port, + data_source, + database_url, gh_owner, gh_repo, gh_branch, @@ -129,11 +105,8 @@ mod test { let svc_port: String = "8080".to_string(); let log_level: String = "info".to_string(); let environment: String = "prod".to_string(); - let postgres_user: String = "".to_string(); - let postgres_password: String = "".to_string(); - let postgres_db: String = "".to_string(); - let postgres_host: String = "".to_string(); - let postgres_port: String = "".to_string(); + let data_source: String = "memory".to_string(); + let database_url: String = "".to_string(); let gh_owner: String = "".to_string(); let gh_repo: String = "".to_string(); let gh_branch: String = "".to_string(); @@ -144,11 +117,8 @@ mod test { assert_eq!(result.svc_port, svc_port); assert_eq!(result.log_level, log_level); assert_eq!(result.environment, environment); - assert_eq!(result.postgres_user, postgres_user); - assert_eq!(result.postgres_password, postgres_password); - assert_eq!(result.postgres_db, postgres_db); - assert_eq!(result.postgres_host, postgres_host); - assert_eq!(result.postgres_port, postgres_port); + assert_eq!(result.data_source, data_source); + assert_eq!(result.database_url, database_url); assert_eq!(result.gh_owner, gh_owner); assert_eq!(result.gh_repo, gh_repo); assert_eq!(result.gh_branch, gh_branch); @@ -158,13 +128,13 @@ mod test { fn test_from_envar_without_optionals() { let svc_endpoint = "127.0.0.1"; let svc_port = "8080"; - let log_level = "info"; - let environment = "dev"; - let postgres_user = ""; - let postgres_password = ""; - let postgres_db = ""; - let postgres_host = ""; - let postgres_port = ""; + let log_level = ""; + let expected_log_level = "info"; + let environment = ""; + let expected_environment = "prod"; + let data_source = ""; + let expected_data_source = "memory"; + let database_url = ""; let gh_owner = ""; let gh_repo = ""; let gh_branch = ""; @@ -173,11 +143,8 @@ mod test { env::set_var("SVC_PORT", svc_port); env::set_var("LOG_LEVEL", log_level); env::set_var("ENVIRONMENT", environment); - env::set_var("POSTGRES_USER", postgres_user); - env::set_var("POSTGRES_PASSWORD", postgres_password); - env::set_var("POSTGRES_DB", postgres_db); - env::set_var("POSTGRES_HOST", postgres_host); - env::set_var("POSTGRES_PORT", postgres_port); + env::set_var("DATA_SOURCE", data_source); + env::set_var("DATABASE_URL", database_url); env::set_var("GITHUB_OWNER", gh_owner); env::set_var("GITHUB_REPO", gh_repo); env::set_var("GITHUB_BRANCH", gh_branch); @@ -186,13 +153,10 @@ mod test { assert_eq!(result.svc_endpoint, svc_endpoint); assert_eq!(result.svc_port, svc_port); - assert_eq!(result.log_level, log_level); - assert_eq!(result.environment, environment); - assert_eq!(result.postgres_user, postgres_user); - assert_eq!(result.postgres_password, postgres_password); - assert_eq!(result.postgres_db, postgres_db); - assert_eq!(result.postgres_host, postgres_host); - assert_eq!(result.postgres_port, postgres_port); + assert_eq!(result.log_level, expected_log_level); + assert_eq!(result.environment, expected_environment); + assert_eq!(result.data_source, expected_data_source); + assert_eq!(result.database_url, database_url); assert_eq!(result.gh_owner, gh_owner); assert_eq!(result.gh_repo, gh_repo); assert_eq!(result.gh_branch, gh_branch); @@ -201,11 +165,8 @@ mod test { env::remove_var("SVC_PORT"); env::remove_var("LOG_LEVEL"); env::remove_var("ENVIRONMENT"); - env::remove_var("POSTGRES_USER"); - env::remove_var("POSTGRES_PASSWORD"); - env::remove_var("POSTGRES_DB"); - env::remove_var("POSTGRES_HOST"); - env::remove_var("POSTGRES_PORT"); + env::remove_var("DATA_SOURCE"); + env::remove_var("DATABASE_URL"); env::remove_var("GITHUB_OWNER"); env::remove_var("GITHUB_REPO"); env::remove_var("GITHUB_BRANCH"); @@ -217,11 +178,8 @@ mod test { let svc_port = "8080"; let log_level = "info"; let environment = "dev"; - let postgres_user = "admin"; - let postgres_password = "admin-password"; - let postgres_db = "testing"; - let postgres_host = "127.0.0.1"; - let postgres_port = "5432"; + let data_source = "sqlite"; + let database_url = "sqlite:husni-portfolio.db"; let gh_owner = "husni-zuhdi"; let gh_repo = "husni-blog-resources"; let gh_branch = "main"; @@ -230,11 +188,8 @@ mod test { env::set_var("SVC_PORT", svc_port); env::set_var("LOG_LEVEL", log_level); env::set_var("ENVIRONMENT", environment); - env::set_var("POSTGRES_USER", postgres_user); - env::set_var("POSTGRES_PASSWORD", postgres_password); - env::set_var("POSTGRES_DB", postgres_db); - env::set_var("POSTGRES_HOST", postgres_host); - env::set_var("POSTGRES_PORT", postgres_port); + env::set_var("DATA_SOURCE", data_source); + env::set_var("DATABASE_URL", database_url); env::set_var("GITHUB_OWNER", gh_owner); env::set_var("GITHUB_REPO", gh_repo); env::set_var("GITHUB_BRANCH", gh_branch); @@ -245,11 +200,8 @@ mod test { assert_eq!(result.svc_port, svc_port); assert_eq!(result.log_level, log_level); assert_eq!(result.environment, environment); - assert_eq!(result.postgres_user, postgres_user); - assert_eq!(result.postgres_password, postgres_password); - assert_eq!(result.postgres_db, postgres_db); - assert_eq!(result.postgres_host, postgres_host); - assert_eq!(result.postgres_port, postgres_port); + assert_eq!(result.data_source, data_source); + assert_eq!(result.database_url, database_url); assert_eq!(result.gh_owner, gh_owner); assert_eq!(result.gh_repo, gh_repo); assert_eq!(result.gh_branch, gh_branch); @@ -258,11 +210,8 @@ mod test { env::remove_var("SVC_PORT"); env::remove_var("LOG_LEVEL"); env::remove_var("ENVIRONMENT"); - env::remove_var("POSTGRES_USER"); - env::remove_var("POSTGRES_PASSWORD"); - env::remove_var("POSTGRES_DB"); - env::remove_var("POSTGRES_HOST"); - env::remove_var("POSTGRES_PORT"); + env::remove_var("DATA_SOURCE"); + env::remove_var("DATABASE_URL"); env::remove_var("GITHUB_OWNER"); env::remove_var("GITHUB_REPO"); env::remove_var("GITHUB_BRANCH"); From 4263ba1b9650c0740a4d3f2454c40a5ea4d1a875 Mon Sep 17 00:00:00 2001 From: "husni.zuhdi@accelbyte.net" Date: Tue, 3 Sep 2024 01:37:12 +0700 Subject: [PATCH 21/49] chore: improve build state fn --- internal/src/app.rs | 34 ++++++++++++++++++++++++---------- 1 file changed, 24 insertions(+), 10 deletions(-) diff --git a/internal/src/app.rs b/internal/src/app.rs index b616102..79ff213 100644 --- a/internal/src/app.rs +++ b/internal/src/app.rs @@ -1,4 +1,5 @@ use crate::database::memory::MemoryBlogRepo; +use crate::database::sqlite::SqliteBlogRepo; use crate::handler; use crate::model::axum::AppState; use crate::{config::Config, usecase::blog::BlogUseCase}; @@ -44,16 +45,29 @@ pub async fn app() -> () { } async fn state_factory(config: Config) -> AppState { - // Setup config and blogs_data states - let mut blog_repo = MemoryBlogRepo::new(); - if !config.gh_owner.is_empty() && !config.gh_repo.is_empty() && !config.gh_branch.is_empty() { - blog_repo = - MemoryBlogRepo::from_github(&config.gh_owner, &config.gh_repo, &config.gh_branch).await; - } - let blog_usecase = Arc::new(Mutex::new(BlogUseCase::new(Box::new(blog_repo)))); - let app_state = AppState { + // Setup blog use case + let blog_usecase = if config.data_source == "sqlite" && config.database_url != "" { + // Use SqliteBlogRepo + let repo = SqliteBlogRepo::new(config.database_url.clone()).await; + Arc::new(Mutex::new(BlogUseCase::new(Box::new(repo)))) + } else { + // Use MemoryBlogRepo + if !config.gh_owner.is_empty() && !config.gh_repo.is_empty() && !config.gh_branch.is_empty() + { + // Use from_github method + let repo = + MemoryBlogRepo::from_github(&config.gh_owner, &config.gh_repo, &config.gh_branch) + .await; + Arc::new(Mutex::new(BlogUseCase::new(Box::new(repo)))) + } else { + // Use Default method + let repo = MemoryBlogRepo::default(); + Arc::new(Mutex::new(BlogUseCase::new(Box::new(repo)))) + } + }; + + AppState { config, blog_usecase, - }; - app_state + } } From b50d111292f8e17a8043f3dcef2491655bac4cbd Mon Sep 17 00:00:00 2001 From: "husni.zuhdi@accelbyte.net" Date: Tue, 3 Sep 2024 03:29:11 +0700 Subject: [PATCH 22/49] chore: change &mut self to &self for queries and add Sync trait --- internal/src/port/blog/query.rs | 4 ++-- internal/src/repo/blog.rs | 4 ++-- internal/src/usecase/blog.rs | 8 ++++---- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/internal/src/port/blog/query.rs b/internal/src/port/blog/query.rs index 956ec4c..e387a43 100644 --- a/internal/src/port/blog/query.rs +++ b/internal/src/port/blog/query.rs @@ -3,6 +3,6 @@ use async_trait::async_trait; #[async_trait] pub trait BlogQueryPort { - async fn find(&mut self, id: BlogId) -> Blog; - async fn find_blogs(&mut self, start: BlogStartPage, end: BlogEndPage) -> Vec; + async fn find(&self, id: BlogId) -> Blog; + async fn find_blogs(&self, start: BlogStartPage, end: BlogEndPage) -> Vec; } diff --git a/internal/src/repo/blog.rs b/internal/src/repo/blog.rs index 953be6e..320e219 100644 --- a/internal/src/repo/blog.rs +++ b/internal/src/repo/blog.rs @@ -17,8 +17,8 @@ pub trait BlogRepo: DynClone { source: BlogSource, body: BlogBody, ) -> Blog; - async fn find(&mut self, id: BlogId) -> Blog; - async fn find_blogs(&mut self, start: BlogStartPage, end: BlogEndPage) -> Vec; + async fn find(&self, id: BlogId) -> Blog; + async fn find_blogs(&self, start: BlogStartPage, end: BlogEndPage) -> Vec; async fn update( &mut self, id: BlogId, diff --git a/internal/src/usecase/blog.rs b/internal/src/usecase/blog.rs index 08069e4..00604d4 100644 --- a/internal/src/usecase/blog.rs +++ b/internal/src/usecase/blog.rs @@ -8,15 +8,15 @@ use async_trait::async_trait; #[derive(Clone)] pub struct BlogUseCase { - pub blog_repo: Box, + pub blog_repo: Box, } #[async_trait] impl BlogQueryPort for BlogUseCase { - async fn find(&mut self, id: BlogId) -> Blog { + async fn find(&self, id: BlogId) -> Blog { self.blog_repo.find(id).await } - async fn find_blogs(&mut self, start: BlogStartPage, end: BlogEndPage) -> Vec { + async fn find_blogs(&self, start: BlogStartPage, end: BlogEndPage) -> Vec { self.blog_repo.find_blogs(start, end).await } } @@ -51,7 +51,7 @@ impl BlogQueryCommand for BlogUseCase { } impl BlogUseCase { - pub fn new(blog_repo: Box) -> BlogUseCase { + pub fn new(blog_repo: Box) -> BlogUseCase { BlogUseCase { blog_repo } } } From 37e73d596bc2f797335c86f2c929abd88a1da6f3 Mon Sep 17 00:00:00 2001 From: "husni.zuhdi@accelbyte.net" Date: Tue, 3 Sep 2024 03:30:57 +0700 Subject: [PATCH 23/49] feat: change std mutex to tokio mutex and update FromRow BlogSource implementation --- internal/src/app.rs | 3 ++- internal/src/model/axum.rs | 3 ++- internal/src/model/blog.rs | 6 +++++- 3 files changed, 9 insertions(+), 3 deletions(-) diff --git a/internal/src/app.rs b/internal/src/app.rs index 79ff213..1ae3f0a 100644 --- a/internal/src/app.rs +++ b/internal/src/app.rs @@ -8,7 +8,8 @@ use axum::{ Router, }; use log::info; -use std::sync::{Arc, Mutex}; +use std::sync::Arc; +use tokio::sync::Mutex; use tower_http::services::{ServeDir, ServeFile}; pub async fn app() -> () { diff --git a/internal/src/model/axum.rs b/internal/src/model/axum.rs index 55476db..7d025ff 100644 --- a/internal/src/model/axum.rs +++ b/internal/src/model/axum.rs @@ -1,6 +1,7 @@ use crate::config::Config; use crate::usecase::blog::BlogUseCase; -use std::sync::{Arc, Mutex}; +use std::sync::Arc; +use tokio::sync::Mutex; /// Axum state /// Consist of Config and BlogUseCase diff --git a/internal/src/model/blog.rs b/internal/src/model/blog.rs index 226b3b7..ef098b9 100644 --- a/internal/src/model/blog.rs +++ b/internal/src/model/blog.rs @@ -1,5 +1,5 @@ use serde::{Deserialize, Serialize}; -use sqlx::{sqlite::SqliteRow, Decode, FromRow}; +use sqlx::{sqlite::SqliteRow, FromRow}; use std::fmt::Display; /// BlogId @@ -125,6 +125,10 @@ impl<'r> FromRow<'r, SqliteRow> for Blog { let source = match row.try_get("source")? { "github" => BlogSource::Github, "filesystem" => BlogSource::FileSystem, + &_ => { + // Default to FileSystem + BlogSource::FileSystem + } }; let filename = row.try_get("filename")?; let body = row.try_get("body")?; From 3cade23306791a5093e5290acef8cf0294ff3fd3 Mon Sep 17 00:00:00 2001 From: "husni.zuhdi@accelbyte.net" Date: Tue, 3 Sep 2024 03:33:44 +0700 Subject: [PATCH 24/49] chore: change &mut self to &self for db repo and fix several sqlite queries + logging --- internal/src/database/memory.rs | 4 ++-- internal/src/database/sqlite.rs | 25 ++++++++++++++++--------- 2 files changed, 18 insertions(+), 11 deletions(-) diff --git a/internal/src/database/memory.rs b/internal/src/database/memory.rs index a4e1133..8839831 100644 --- a/internal/src/database/memory.rs +++ b/internal/src/database/memory.rs @@ -16,7 +16,7 @@ pub struct MemoryBlogRepo { #[async_trait] impl BlogRepo for MemoryBlogRepo { - async fn find(&mut self, id: BlogId) -> Blog { + async fn find(&self, id: BlogId) -> Blog { let result = self .blogs .iter() @@ -28,7 +28,7 @@ impl BlogRepo for MemoryBlogRepo { result.clone() } - async fn find_blogs(&mut self, start: BlogStartPage, end: BlogEndPage) -> Vec { + async fn find_blogs(&self, start: BlogStartPage, end: BlogEndPage) -> Vec { let start_seq = start.0 as usize; let end_seq = end.0 as usize; let result = &self.blogs[start_seq..end_seq]; diff --git a/internal/src/database/sqlite.rs b/internal/src/database/sqlite.rs index b60960b..e3fccff 100644 --- a/internal/src/database/sqlite.rs +++ b/internal/src/database/sqlite.rs @@ -1,13 +1,12 @@ -use crate::api::github::get_gh_blogs; use crate::model::blog::{ Blog, BlogBody, BlogDeleted, BlogEndPage, BlogFilename, BlogId, BlogName, BlogSource, BlogStartPage, }; use crate::repo::blog::BlogRepo; use async_trait::async_trait; -use log::{debug, error, info}; +use log::{debug, info}; use sqlx::sqlite::SqlitePool; -use sqlx::{query, query_as, Row}; +use sqlx::{query, query_as}; #[derive(Clone)] pub struct SqliteBlogRepo { @@ -16,7 +15,7 @@ pub struct SqliteBlogRepo { #[async_trait] impl BlogRepo for SqliteBlogRepo { - async fn find(&mut self, id: BlogId) -> Blog { + async fn find(&self, id: BlogId) -> Blog { let blog_id = id.0; let prep_query = "SELECT * FROM blogs WHERE id = $1 ORDER BY id"; debug!("Executing query {} for id {}", &prep_query, &blog_id); @@ -30,7 +29,7 @@ impl BlogRepo for SqliteBlogRepo { debug!("Blog HTML {}.", &row.body); row } - async fn find_blogs(&mut self, start: BlogStartPage, end: BlogEndPage) -> Vec { + async fn find_blogs(&self, start: BlogStartPage, end: BlogEndPage) -> Vec { let start_seq = start.0; let end_seq = end.0; let limit = end_seq - start_seq; @@ -47,7 +46,7 @@ impl BlogRepo for SqliteBlogRepo { .await .expect("Failed to execute get query"); info!("Blogs from {} to {} processed.", &start_seq, &end_seq); - for row in rows { + for row in &rows { info!("Blog {} processed.", &row.id); debug!("Blog HTML {}.", &row.body); } @@ -79,7 +78,11 @@ impl BlogRepo for SqliteBlogRepo { .execute(&self.pool) .await .expect("Failed to execute add query"); - info!("Blog {} was added.", &blog_id); + info!( + "Blog {} in row {} was added.", + &blog_id, + &query_res.rows_affected() + ); let prep_get_query = "SELECT * FROM blogs WHERE id = $1 ORDER BY id"; debug!("Executing query {} for id {}", &prep_get_query, &blog_id); @@ -156,7 +159,7 @@ impl BlogRepo for SqliteBlogRepo { debug!("Skipped update name field") } } - let prep_update_query = format!("UPDATE blogs SET{}WHERE id = $1", &affected_col).as_str(); + let prep_update_query = format!("UPDATE blogs SET{}WHERE id = $1", &affected_col); debug!("Executing query {} for id {}", &prep_update_query, &blog_id); let query_res = query(&prep_update_query) @@ -164,7 +167,11 @@ impl BlogRepo for SqliteBlogRepo { .execute(&self.pool) .await .expect("Failed to execute update query"); - info!("Blog {} was updated.", &blog_id); + info!( + "Blog {} in row {} was updated.", + &blog_id, + &query_res.rows_affected() + ); let prep_get_query = "SELECT * FROM blogs WHERE id = $1 ORDER BY id"; debug!("Executing query {} for id {}", &prep_get_query, &blog_id); From b9dd825c19f48f7b25b9aba47e20c0486b3010b2 Mon Sep 17 00:00:00 2001 From: "husni.zuhdi@accelbyte.net" Date: Tue, 3 Sep 2024 03:34:28 +0700 Subject: [PATCH 25/49] feat: update config::from_envar implementation --- internal/src/config.rs | 50 +++++++++++++++++++----------------------- 1 file changed, 22 insertions(+), 28 deletions(-) diff --git a/internal/src/config.rs b/internal/src/config.rs index daebafc..d576fb2 100644 --- a/internal/src/config.rs +++ b/internal/src/config.rs @@ -42,6 +42,21 @@ impl Default for Config { } impl Config { + fn parse_optional_envar(envar: &str, default: &str) -> String { + match env::var(&envar) { + Err(e) => { + warn!( + "Failed to load {} environment variable. Set default to '{}'. Error {}", + &envar, &default, e + ); + default.to_string() + } + Ok(val) => match val.as_str() { + "" => default.to_string(), + _ => val, + }, + } + } /// from_envar /// Setup config from environment variables pub fn from_envar() -> Self { @@ -52,34 +67,13 @@ impl Config { .expect("failed to load SVC_PORT environment variable. Double check your config"); // Optional - let log_level: String = env::var("LOG_LEVEL").unwrap_or_else(|_| { - warn!("Failed to load LOG_LEVEL environment variable. Set default to 'info'"); - "info".to_string() - }); - let environment: String = env::var("ENVIRONMENT").unwrap_or_else(|_| { - warn!("Failed to load ENVIRONMENT environment variable. Set default to 'prod'"); - "prod".to_string() - }); - let data_source: String = env::var("DATA_SOURCE").unwrap_or_else(|_| { - warn!("Failed to load DATA_SOURCE environment variable. Set default to 'memory'"); - "memory".to_string() - }); - let database_url: String = env::var("DATABASE_URL").unwrap_or_else(|_| { - warn!("Failed to load DATABASE_URL environment variable. Set default to ''"); - "".to_string() - }); - let gh_owner: String = env::var("GITHUB_OWNER").unwrap_or_else(|_| { - warn!("Failed to load GITHUB_OWNER environment variable. Set default to ''"); - "".to_string() - }); - let gh_repo: String = env::var("GITHUB_REPO").unwrap_or_else(|_| { - warn!("Failed to load GITHUB_REPO environment variable. Set default to ''"); - "".to_string() - }); - let gh_branch: String = env::var("GITHUB_BRANCH").unwrap_or_else(|_| { - warn!("Failed to load GITHUB_BRANCH environment variable. Set default to ''"); - "".to_string() - }); + let log_level: String = Self::parse_optional_envar("LOG_LEVEL", "info"); + let environment: String = Self::parse_optional_envar("ENVIRONMENT", "prod"); + let data_source: String = Self::parse_optional_envar("DATA_SOURCE", "memory"); + let database_url: String = Self::parse_optional_envar("DATABASE_URL", ""); + let gh_owner: String = Self::parse_optional_envar("GITHUB_OWNER", ""); + let gh_repo: String = Self::parse_optional_envar("GITHUB_REPO", ""); + let gh_branch: String = Self::parse_optional_envar("GITHUB_BRANCH", ""); Self { svc_endpoint, From bc4e16dde4265fe0271cd4804228706c80c894d7 Mon Sep 17 00:00:00 2001 From: "husni.zuhdi@accelbyte.net" Date: Tue, 3 Sep 2024 03:35:18 +0700 Subject: [PATCH 26/49] feat: add debug_handler, improve pagination, and apply tokio mutex --- internal/src/handler.rs | 39 ++++++++++++++++++++++++++++++++------- 1 file changed, 32 insertions(+), 7 deletions(-) diff --git a/internal/src/handler.rs b/internal/src/handler.rs index 3efafcf..a4ea502 100644 --- a/internal/src/handler.rs +++ b/internal/src/handler.rs @@ -1,8 +1,9 @@ -use crate::model::blog::BlogId; +use crate::model::blog::{BlogEndPage, BlogId, BlogPagination, BlogStartPage}; use crate::model::{axum::AppState, templates::*}; use crate::utils::read_version_manifest; use askama::Template; -use axum::extract::{Path, State}; +use axum::debug_handler; +use axum::extract::{Path, Query, State}; use axum::response::Html; use log::{debug, error, info}; @@ -29,11 +30,33 @@ pub async fn get_profile() -> Html { /// get_blogs /// Serve get_blogs HTML file /// List our blogs title and id -pub async fn get_blogs(State(app_state): State) -> Html { +#[debug_handler] +pub async fn get_blogs( + State(app_state): State, + pagination: Query, +) -> Html { // Locking Mutex - let data = app_state.blog_usecase.lock().expect("Mutex was poisoned"); + let data = app_state.blog_usecase.lock().await; + + // Setup Pagination + debug!("Pagination {:?}", &pagination); + let start = match pagination.0.start { + Some(val) => val, + None => { + info!("Set default start to 0"); + BlogStartPage(0) + } + }; + let end = match pagination.0.end { + Some(val) => val, + None => { + info!("Set default end to 10"); + BlogEndPage(10) + } + }; + // Copy data to Template struct - let blogs_data = data.blog_repo.find_all(); + let blogs_data = data.blog_repo.find_blogs(start, end).await; let blogs: Vec = blogs_data .iter() .map(|blog| BlogTemplate { @@ -61,11 +84,13 @@ pub async fn get_blogs(State(app_state): State) -> Html { /// get_blog /// Serve get_blog HTML file /// Render our blog +#[debug_handler] pub async fn get_blog(Path(path): Path, State(app_state): State) -> Html { // Locking Mutex - let data = app_state.blog_usecase.lock().expect("Mutex was poisoned"); - let blog_data = data.blog_repo.find(BlogId(path.clone())); + let data = app_state.blog_usecase.lock().await; + // Copy data to Template struct + let blog_data = data.blog_repo.find(BlogId(path.clone())).await; let blog = BlogTemplate { id: path.clone().as_str(), name: &blog_data.name.as_str(), From 3c34a7bfe72d69fd9f84b0f456aeac3536852666 Mon Sep 17 00:00:00 2001 From: "husni.zuhdi@accelbyte.net" Date: Tue, 3 Sep 2024 03:35:38 +0700 Subject: [PATCH 27/49] feat: add inital sqlite migration --- .../migrations/20240901103916_initial_migration.sql | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/internal/migrations/20240901103916_initial_migration.sql b/internal/migrations/20240901103916_initial_migration.sql index 69df56a..adb12f3 100644 --- a/internal/migrations/20240901103916_initial_migration.sql +++ b/internal/migrations/20240901103916_initial_migration.sql @@ -1,17 +1,8 @@ -- Add migration script here CREATE TABLE IF NOT EXISTS blogs ( - id INTEGER PRIMARY KEY NOT NULL, + id TEXT PRIMARY KEY NOT NULL, name TEXT NOT NULL, source TEXT NOT NULL, filename TEXT NOT NULL, body TEXT NOT NULL ); - -CREATE TABLE IF NOT EXISTS github_trees ( - id INTEGER PRIMARY KEY NOT NULL, - tree_path TEXT NOT NULL, - tree_mode TEXT NOT NULL, - tree_type TEXT NOT NULL, - sha TEXT NOT NULL, - url TEXT NOT NULL -); From ca27c157158e6a56dd858198b8083a7cc1ca1a9f Mon Sep 17 00:00:00 2001 From: "husni.zuhdi@accelbyte.net" Date: Tue, 3 Sep 2024 08:58:42 +0700 Subject: [PATCH 28/49] chore: add docs --- internal/src/app.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/internal/src/app.rs b/internal/src/app.rs index 1ae3f0a..37a98b4 100644 --- a/internal/src/app.rs +++ b/internal/src/app.rs @@ -12,6 +12,7 @@ use std::sync::Arc; use tokio::sync::Mutex; use tower_http::services::{ServeDir, ServeFile}; +/// Run the axum web application pub async fn app() -> () { // Setup Config let config = Config::from_envar(); @@ -45,6 +46,7 @@ pub async fn app() -> () { axum::serve(listener, app).await.unwrap(); } +/// Build App State for Axum Application async fn state_factory(config: Config) -> AppState { // Setup blog use case let blog_usecase = if config.data_source == "sqlite" && config.database_url != "" { From 5625ecf79efe0553a0f97bbceb83c9659591875e Mon Sep 17 00:00:00 2001 From: "husni.zuhdi@accelbyte.net" Date: Tue, 3 Sep 2024 08:59:26 +0700 Subject: [PATCH 29/49] chore: change default endpoint and environment naming --- internal/src/config.rs | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/internal/src/config.rs b/internal/src/config.rs index d576fb2..842d5b9 100644 --- a/internal/src/config.rs +++ b/internal/src/config.rs @@ -16,11 +16,13 @@ pub struct Config { } impl Default for Config { + /// By default running on localhost:8080 in release + /// with log-level info and data from memory fn default() -> Self { - let svc_endpoint: String = "127.0.0.1".to_string(); + let svc_endpoint: String = "localhost".to_string(); let svc_port: String = "8080".to_string(); let log_level: String = "info".to_string(); - let environment: String = "prod".to_string(); + let environment: String = "release".to_string(); let data_source: String = "memory".to_string(); let database_url: String = "".to_owned(); let gh_owner: String = "".to_string(); @@ -42,6 +44,7 @@ impl Default for Config { } impl Config { + /// Parse optional environment variable to setup the envar and set default fn parse_optional_envar(envar: &str, default: &str) -> String { match env::var(&envar) { Err(e) => { @@ -68,7 +71,7 @@ impl Config { // Optional let log_level: String = Self::parse_optional_envar("LOG_LEVEL", "info"); - let environment: String = Self::parse_optional_envar("ENVIRONMENT", "prod"); + let environment: String = Self::parse_optional_envar("ENVIRONMENT", "release"); let data_source: String = Self::parse_optional_envar("DATA_SOURCE", "memory"); let database_url: String = Self::parse_optional_envar("DATABASE_URL", ""); let gh_owner: String = Self::parse_optional_envar("GITHUB_OWNER", ""); @@ -95,10 +98,10 @@ mod test { #[test] fn test_default() { - let svc_endpoint: String = "127.0.0.1".to_string(); + let svc_endpoint: String = "localhost".to_string(); let svc_port: String = "8080".to_string(); let log_level: String = "info".to_string(); - let environment: String = "prod".to_string(); + let environment: String = "release".to_string(); let data_source: String = "memory".to_string(); let database_url: String = "".to_string(); let gh_owner: String = "".to_string(); @@ -120,12 +123,12 @@ mod test { #[test] fn test_from_envar_without_optionals() { - let svc_endpoint = "127.0.0.1"; + let svc_endpoint = "localhost"; let svc_port = "8080"; let log_level = ""; let expected_log_level = "info"; let environment = ""; - let expected_environment = "prod"; + let expected_environment = "release"; let data_source = ""; let expected_data_source = "memory"; let database_url = ""; @@ -168,7 +171,7 @@ mod test { #[test] fn test_from_envar_with_optionals() { - let svc_endpoint = "127.0.0.1"; + let svc_endpoint = "localhost"; let svc_port = "8080"; let log_level = "info"; let environment = "dev"; From 0650bdc5260dba29e4fe3964613d666029b63c82 Mon Sep 17 00:00:00 2001 From: "husni.zuhdi@accelbyte.net" Date: Tue, 3 Sep 2024 09:01:20 +0700 Subject: [PATCH 30/49] feat: improve pagionation on memory repo --- internal/src/database/memory.rs | 21 ++++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) diff --git a/internal/src/database/memory.rs b/internal/src/database/memory.rs index 8839831..1c6caf4 100644 --- a/internal/src/database/memory.rs +++ b/internal/src/database/memory.rs @@ -6,7 +6,7 @@ use crate::model::blog::{ use crate::repo::blog::BlogRepo; use crate::utils::{capitalize, md_to_html}; use async_trait::async_trait; -use log::{debug, info}; +use log::{debug, info, warn}; use std::fs; #[derive(Clone)] @@ -29,8 +29,23 @@ impl BlogRepo for MemoryBlogRepo { result.clone() } async fn find_blogs(&self, start: BlogStartPage, end: BlogEndPage) -> Vec { - let start_seq = start.0 as usize; - let end_seq = end.0 as usize; + let start_seq = if start.0 as usize > self.blogs.len() { + warn!("BlogStartPage is greater than Blogs count. Will reset to 0."); + 0 + } else { + start.0 as usize + }; + + let end_seq = if (end.0 as usize > self.blogs.len()) && self.blogs.len() > 10 { + warn!("BlogEndPage is greater than Blogs count. Will reset to Blogs count or 10, whichever is lesser."); + 10 + } else if (end.0 as usize > self.blogs.len()) && self.blogs.len() < 10 { + warn!("BlogEndPage is greater than Blogs count. Will reset to Blogs count or 10, whichever is lesser."); + self.blogs.len() + } else { + end.0 as usize + }; + let result = &self.blogs[start_seq..end_seq]; result.to_vec() } From f0e5f002f7db6d3fa071114b6ecd101916b4c27b Mon Sep 17 00:00:00 2001 From: "husni.zuhdi@accelbyte.net" Date: Tue, 3 Sep 2024 09:03:04 +0700 Subject: [PATCH 31/49] fix: update command port naming --- internal/src/port/blog/command.rs | 2 +- internal/src/repo/blog.rs | 4 ++-- internal/src/usecase/blog.rs | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/internal/src/port/blog/command.rs b/internal/src/port/blog/command.rs index 042e957..92e4cff 100644 --- a/internal/src/port/blog/command.rs +++ b/internal/src/port/blog/command.rs @@ -2,7 +2,7 @@ use crate::model::blog::{Blog, BlogBody, BlogDeleted, BlogFilename, BlogId, Blog use async_trait::async_trait; #[async_trait] -pub trait BlogQueryCommand { +pub trait BlogCommandPort { async fn add( &mut self, id: BlogId, diff --git a/internal/src/repo/blog.rs b/internal/src/repo/blog.rs index 320e219..460ab1b 100644 --- a/internal/src/repo/blog.rs +++ b/internal/src/repo/blog.rs @@ -9,6 +9,8 @@ clone_trait_object!(BlogRepo); #[async_trait] pub trait BlogRepo: DynClone { + async fn find(&self, id: BlogId) -> Blog; + async fn find_blogs(&self, start: BlogStartPage, end: BlogEndPage) -> Vec; async fn add( &mut self, id: BlogId, @@ -17,8 +19,6 @@ pub trait BlogRepo: DynClone { source: BlogSource, body: BlogBody, ) -> Blog; - async fn find(&self, id: BlogId) -> Blog; - async fn find_blogs(&self, start: BlogStartPage, end: BlogEndPage) -> Vec; async fn update( &mut self, id: BlogId, diff --git a/internal/src/usecase/blog.rs b/internal/src/usecase/blog.rs index 00604d4..5b1e777 100644 --- a/internal/src/usecase/blog.rs +++ b/internal/src/usecase/blog.rs @@ -2,7 +2,7 @@ use crate::model::blog::{ Blog, BlogBody, BlogDeleted, BlogEndPage, BlogFilename, BlogId, BlogName, BlogSource, BlogStartPage, }; -use crate::port::blog::{command::BlogQueryCommand, query::BlogQueryPort}; +use crate::port::blog::{command::BlogCommandPort, query::BlogQueryPort}; use crate::repo::blog::BlogRepo; use async_trait::async_trait; @@ -22,7 +22,7 @@ impl BlogQueryPort for BlogUseCase { } #[async_trait] -impl BlogQueryCommand for BlogUseCase { +impl BlogCommandPort for BlogUseCase { async fn add( &mut self, id: BlogId, From 95f13f906189709273c216c0400281981d7fade5 Mon Sep 17 00:00:00 2001 From: "husni.zuhdi@accelbyte.net" Date: Tue, 3 Sep 2024 09:03:57 +0700 Subject: [PATCH 32/49] chore: tidy up Version model --- internal/src/model/version.rs | 18 ++++++++++++++++++ internal/src/utils.rs | 15 --------------- 2 files changed, 18 insertions(+), 15 deletions(-) diff --git a/internal/src/model/version.rs b/internal/src/model/version.rs index 27b3cf2..fa9a2da 100644 --- a/internal/src/model/version.rs +++ b/internal/src/model/version.rs @@ -1,4 +1,6 @@ use serde::{Deserialize, Serialize}; +use std::fs; +use std::io::BufReader; /// Version /// Store version, build hash, and buld date @@ -8,3 +10,19 @@ pub struct Version { pub build_hash: String, pub build_date: String, } + +impl Version { + /// read_version_manifest + /// read version manifest on root repository to get this configuration + /// * version + /// * git build hash + /// * build date + pub fn new() -> Result { + let file = fs::File::open("version.json").expect("Failed to open version.json"); + let reader = BufReader::new(file); + + let version: Version = + serde_json::from_reader(reader).expect("Failed to parse version.json"); + Ok(version) + } +} diff --git a/internal/src/utils.rs b/internal/src/utils.rs index 6afcdf8..e3a1d23 100644 --- a/internal/src/utils.rs +++ b/internal/src/utils.rs @@ -1,8 +1,6 @@ -use crate::model::version::Version; use log::debug; use markdown::{to_html_with_options, CompileOptions, Constructs, Options, ParseOptions}; use std::fs; -use std::io::BufReader; /// md_to_html: Markdown to HTML /// take String of filename @@ -29,19 +27,6 @@ pub fn md_to_html(filename: String) -> Result { Ok(html) } -/// read_version_manifest -/// read version manifest on root repository to get this configuration -/// * version -/// * git build hash -/// * build date -pub fn read_version_manifest() -> Result { - let file = fs::File::open("version.json").expect("Failed to open version.json"); - let reader = BufReader::new(file); - - let json: Version = serde_json::from_reader(reader).expect("Failed to parse version.json"); - Ok(json) -} - /// capitalize /// Capitalize the first character in s. /// Take borrowed str of s From 7f00bc22db66f6c9b47807f765313d139e4db443 Mon Sep 17 00:00:00 2001 From: "husni.zuhdi@accelbyte.net" Date: Tue, 3 Sep 2024 09:04:40 +0700 Subject: [PATCH 33/49] chore: separate BlogsTemplate from BlogTemplate data --- internal/src/model/templates.rs | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/internal/src/model/templates.rs b/internal/src/model/templates.rs index d23c501..ff67c09 100644 --- a/internal/src/model/templates.rs +++ b/internal/src/model/templates.rs @@ -7,7 +7,13 @@ pub struct ProfileTemplate; #[derive(Template, Debug)] #[template(path = "blogs.html")] pub struct BlogsTemplate<'a> { - pub blogs: &'a Vec>, + pub blogs: &'a Vec>, +} + +#[derive(Debug)] +pub struct BlogsTemplateBlog<'a> { + pub id: &'a str, + pub name: &'a str, } #[derive(Template, Debug)] From cf37c3b970aa30ba80b7ff412b14eaf5b17124a7 Mon Sep 17 00:00:00 2001 From: "husni.zuhdi@accelbyte.net" Date: Tue, 3 Sep 2024 09:05:43 +0700 Subject: [PATCH 34/49] chore: update Version and BlogsTemplate implementation --- internal/src/handler.rs | 31 ++++++++++++++++--------------- 1 file changed, 16 insertions(+), 15 deletions(-) diff --git a/internal/src/handler.rs b/internal/src/handler.rs index a4ea502..db4308f 100644 --- a/internal/src/handler.rs +++ b/internal/src/handler.rs @@ -1,6 +1,6 @@ use crate::model::blog::{BlogEndPage, BlogId, BlogPagination, BlogStartPage}; +use crate::model::version::Version; use crate::model::{axum::AppState, templates::*}; -use crate::utils::read_version_manifest; use askama::Template; use axum::debug_handler; use axum::extract::{Path, Query, State}; @@ -29,7 +29,7 @@ pub async fn get_profile() -> Html { /// get_blogs /// Serve get_blogs HTML file -/// List our blogs title and id +/// List our blogs id and name #[debug_handler] pub async fn get_blogs( State(app_state): State, @@ -55,18 +55,19 @@ pub async fn get_blogs( } }; - // Copy data to Template struct + // Construct BlogsTemplate Struct let blogs_data = data.blog_repo.find_blogs(start, end).await; - let blogs: Vec = blogs_data + let blogs: Vec = blogs_data .iter() - .map(|blog| BlogTemplate { - id: &blog.id.as_str(), - name: &blog.name.as_str(), - filename: &blog.filename.as_str(), - body: &blog.body.as_str(), + .map(|blog| { + info!("Construct BlogsTemplateBlog for Blog Id {}", &blog.id); + BlogsTemplateBlog { + id: &blog.id.as_str(), + name: &blog.name.as_str(), + } }) .collect(); - debug!("Blogs: {:?}", &blogs); + debug!("BlogsTemplate blogs : {:?}", &blogs); let blogs_res = BlogsTemplate { blogs: &blogs }.render(); match blogs_res { @@ -89,7 +90,7 @@ pub async fn get_blog(Path(path): Path, State(app_state): State, State(app_state): State) -> Html { - let version_json = read_version_manifest().expect("Failed to get version manifest"); + let version_data = Version::new().expect("Failed to generate Version struct"); let version = VersionTemplate { - version: version_json.version.as_str(), + version: version_data.version.as_str(), environment: app_state.config.environment.as_str(), - build_hash: version_json.build_hash.as_str(), - build_date: version_json.build_date.as_str(), + build_hash: version_data.build_hash.as_str(), + build_date: version_data.build_date.as_str(), } .render(); From f09bc2c5db79e71e25281d9e623db1590d1071fd Mon Sep 17 00:00:00 2001 From: "husni.zuhdi@accelbyte.net" Date: Wed, 4 Sep 2024 01:13:06 +0700 Subject: [PATCH 35/49] chore: refactor handler functions --- internal/src/app.rs | 11 ++- internal/src/{handler.rs => handler/blog.rs} | 83 ++------------------ internal/src/handler/error.rs | 36 +++++++++ internal/src/handler/mod.rs | 8 ++ internal/src/handler/profile.rs | 21 +++++ internal/src/handler/version.rs | 31 ++++++++ 6 files changed, 106 insertions(+), 84 deletions(-) rename internal/src/{handler.rs => handler/blog.rs} (51%) create mode 100644 internal/src/handler/error.rs create mode 100644 internal/src/handler/mod.rs create mode 100644 internal/src/handler/profile.rs create mode 100644 internal/src/handler/version.rs diff --git a/internal/src/app.rs b/internal/src/app.rs index 37a98b4..751a39b 100644 --- a/internal/src/app.rs +++ b/internal/src/app.rs @@ -28,18 +28,17 @@ pub async fn app() -> () { // Axum Application let app = Router::new() - .route("/", get(handler::get_profile)) - .route("/not-found", get(handler::get_404_not_found)) - .route("/version", get(handler::get_version)) - .route("/blogs", get(handler::get_blogs)) - .route("/blogs/:blog_id", get(handler::get_blog)) + .route("/", get(handler::profile::get_profile)) + .route("/version", get(handler::version::get_version)) + .route("/blogs", get(handler::blog::get_blogs)) + .route("/blogs/:blog_id", get(handler::blog::get_blog)) .nest_service("/statics", get_service(ServeDir::new("./statics/favicon/"))) .nest_service( "/statics/styles.css", get_service(ServeFile::new("./statics/styles.css")), ) .with_state(app_state) - .fallback(get(handler::get_404_not_found)); + .fallback(get(handler::error::get_404_not_found)); // Start Axum Application let listener = tokio::net::TcpListener::bind(endpoint).await.unwrap(); diff --git a/internal/src/handler.rs b/internal/src/handler/blog.rs similarity index 51% rename from internal/src/handler.rs rename to internal/src/handler/blog.rs index db4308f..8ac94c6 100644 --- a/internal/src/handler.rs +++ b/internal/src/handler/blog.rs @@ -1,32 +1,15 @@ +use crate::handler::error::get_500_internal_server_error; use crate::model::blog::{BlogEndPage, BlogId, BlogPagination, BlogStartPage}; -use crate::model::version::Version; -use crate::model::{axum::AppState, templates::*}; +use crate::model::{ + axum::AppState, + templates::{BlogTemplate, BlogsTemplate, BlogsTemplateBlog}, +}; use askama::Template; use axum::debug_handler; use axum::extract::{Path, Query, State}; use axum::response::Html; use log::{debug, error, info}; -/// Note: In axum [example](https://docs.rs/axum/latest/axum/response/index.html#building-responses) -/// They show an example to return Html<&'static str> -/// Instaed of Html. But using static give me a headache :") - -/// get_profile -/// Serve Profile/Biography HTML file -pub async fn get_profile() -> Html { - let profile = ProfileTemplate.render(); - match profile { - Ok(res) => { - info!("Profile askama template rendered."); - Html(res) - } - Err(err) => { - error!("Failed to render profile.html. {}", err); - get_500_internal_server_error() - } - } -} - /// get_blogs /// Serve get_blogs HTML file /// List our blogs id and name @@ -111,59 +94,3 @@ pub async fn get_blog(Path(path): Path, State(app_state): State) -> Html { - let version_data = Version::new().expect("Failed to generate Version struct"); - let version = VersionTemplate { - version: version_data.version.as_str(), - environment: app_state.config.environment.as_str(), - build_hash: version_data.build_hash.as_str(), - build_date: version_data.build_date.as_str(), - } - .render(); - - match version { - Ok(res) => { - info!("Version askama template rendered."); - Html(res) - } - Err(err) => { - error!("Failed to render version.html. {}", err); - get_500_internal_server_error() - } - } -} - -/// get_404_not_found -/// Serve 404 Not found HTML file -pub async fn get_404_not_found() -> Html { - let not_found = NotFoundTemplate.render(); - match not_found { - Ok(res) => { - info!("NotFound askama template rendered."); - Html(res) - } - Err(err) => { - error!("Failed to render 404_not_found.html. {}", err); - get_500_internal_server_error() - } - } -} - -/// get_500_internal_server_error -/// Serve 500 Internal Server Error HTML file -fn get_500_internal_server_error() -> Html { - let internal_server_error = InternalServerErrorTemplate.render(); - match internal_server_error { - Ok(res) => { - info!("InternalServerError askama template rendered."); - Html(res) - } - Err(err) => { - error!("Failed to render 500_internal_server_error.html. {}", err); - Html("We're fucked up.".to_string()) - } - } -} diff --git a/internal/src/handler/error.rs b/internal/src/handler/error.rs new file mode 100644 index 0000000..a35c995 --- /dev/null +++ b/internal/src/handler/error.rs @@ -0,0 +1,36 @@ +use crate::model::templates::{InternalServerErrorTemplate, NotFoundTemplate}; +use askama::Template; +use axum::response::Html; +use log::{error, info}; + +/// get_404_not_found +/// Serve 404 Not found HTML file +pub async fn get_404_not_found() -> Html { + let not_found = NotFoundTemplate.render(); + match not_found { + Ok(res) => { + info!("NotFound askama template rendered."); + Html(res) + } + Err(err) => { + error!("Failed to render 404_not_found.html. {}", err); + get_500_internal_server_error() + } + } +} + +/// get_500_internal_server_error +/// Serve 500 Internal Server Error HTML file +pub fn get_500_internal_server_error() -> Html { + let internal_server_error = InternalServerErrorTemplate.render(); + match internal_server_error { + Ok(res) => { + info!("InternalServerError askama template rendered."); + Html(res) + } + Err(err) => { + error!("Failed to render 500_internal_server_error.html. {}", err); + Html("We're fucked up.".to_string()) + } + } +} diff --git a/internal/src/handler/mod.rs b/internal/src/handler/mod.rs new file mode 100644 index 0000000..35db4b8 --- /dev/null +++ b/internal/src/handler/mod.rs @@ -0,0 +1,8 @@ +pub mod blog; +pub mod error; +pub mod profile; +pub mod version; + +// Note: In axum [example](https://docs.rs/axum/latest/axum/response/index.html#building-responses) +// They show an example to return Html<&'static str> +// Instaed of Html. But using static give me a headache :") diff --git a/internal/src/handler/profile.rs b/internal/src/handler/profile.rs new file mode 100644 index 0000000..4b53272 --- /dev/null +++ b/internal/src/handler/profile.rs @@ -0,0 +1,21 @@ +use crate::handler::error::get_500_internal_server_error; +use crate::model::templates::ProfileTemplate; +use askama::Template; +use axum::response::Html; +use log::{error, info}; + +/// get_profile +/// Serve Profile/Biography HTML file +pub async fn get_profile() -> Html { + let profile = ProfileTemplate.render(); + match profile { + Ok(res) => { + info!("Profile askama template rendered."); + Html(res) + } + Err(err) => { + error!("Failed to render profile.html. {}", err); + get_500_internal_server_error() + } + } +} diff --git a/internal/src/handler/version.rs b/internal/src/handler/version.rs new file mode 100644 index 0000000..22dda07 --- /dev/null +++ b/internal/src/handler/version.rs @@ -0,0 +1,31 @@ +use crate::handler::error::get_500_internal_server_error; +use crate::model::version::Version; +use crate::model::{axum::AppState, templates::VersionTemplate}; +use askama::Template; +use axum::extract::State; +use axum::response::Html; +use log::{error, info}; + +/// get_version +/// Serve get_version HTML file +pub async fn get_version(State(app_state): State) -> Html { + let version_data = Version::new().expect("Failed to generate Version struct"); + let version = VersionTemplate { + version: version_data.version.as_str(), + environment: app_state.config.environment.as_str(), + build_hash: version_data.build_hash.as_str(), + build_date: version_data.build_date.as_str(), + } + .render(); + + match version { + Ok(res) => { + info!("Version askama template rendered."); + Html(res) + } + Err(err) => { + error!("Failed to render version.html. {}", err); + get_500_internal_server_error() + } + } +} From f441ed6e9928e4bffa974be2857f77ca2ba020fa Mon Sep 17 00:00:00 2001 From: "husni.zuhdi@accelbyte.net" Date: Fri, 6 Sep 2024 03:01:22 +0700 Subject: [PATCH 36/49] feat: add api port, repo, and usecase --- internal/src/port/api/mod.rs | 1 + internal/src/port/api/query.rs | 8 ++++++++ internal/src/repo/api.rs | 11 +++++++++++ internal/src/usecase/api.rs | 25 +++++++++++++++++++++++++ 4 files changed, 45 insertions(+) create mode 100644 internal/src/port/api/mod.rs create mode 100644 internal/src/port/api/query.rs create mode 100644 internal/src/repo/api.rs create mode 100644 internal/src/usecase/api.rs diff --git a/internal/src/port/api/mod.rs b/internal/src/port/api/mod.rs new file mode 100644 index 0000000..67350db --- /dev/null +++ b/internal/src/port/api/mod.rs @@ -0,0 +1 @@ +pub mod query; diff --git a/internal/src/port/api/query.rs b/internal/src/port/api/query.rs new file mode 100644 index 0000000..af753a4 --- /dev/null +++ b/internal/src/port/api/query.rs @@ -0,0 +1,8 @@ +use crate::model::blog::{Blog, BlogMetadata}; +use async_trait::async_trait; + +#[async_trait] +pub trait ApiQueryPort { + async fn list_metadata(&self) -> Vec; + async fn fetch(&self, metadata: BlogMetadata) -> Blog; +} diff --git a/internal/src/repo/api.rs b/internal/src/repo/api.rs new file mode 100644 index 0000000..e238dc2 --- /dev/null +++ b/internal/src/repo/api.rs @@ -0,0 +1,11 @@ +use crate::model::blog::{Blog, BlogMetadata}; +use async_trait::async_trait; +use dyn_clone::{clone_trait_object, DynClone}; + +clone_trait_object!(ApiRepo); + +#[async_trait] +pub trait ApiRepo: DynClone { + async fn list_metadata(&self) -> Vec; + async fn fetch(&self, metadata: BlogMetadata) -> Blog; +} diff --git a/internal/src/usecase/api.rs b/internal/src/usecase/api.rs new file mode 100644 index 0000000..f39fc65 --- /dev/null +++ b/internal/src/usecase/api.rs @@ -0,0 +1,25 @@ +use crate::model::blog::{Blog, BlogMetadata}; +use crate::port::api::query::ApiQueryPort; +use crate::repo::api::ApiRepo; +use async_trait::async_trait; + +#[derive(Clone)] +pub struct ApiUseCase { + pub api_repo: Box, +} + +#[async_trait] +impl ApiQueryPort for ApiUseCase { + async fn list_metadata(&self) -> Vec { + self.api_repo.list_metadata().await + } + async fn fetch(&self, metadata: BlogMetadata) -> Blog { + self.api_repo.fetch(metadata).await + } +} + +impl ApiUseCase { + pub fn new(api_repo: Box) -> ApiUseCase { + ApiUseCase { api_repo } + } +} From 67e60f83c0ce703119d3bf7ebb955334825b71ac Mon Sep 17 00:00:00 2001 From: "husni.zuhdi@accelbyte.net" Date: Fri, 6 Sep 2024 03:01:58 +0700 Subject: [PATCH 37/49] feat: add implementation of api usecase for github and filesystem --- internal/src/api/filesystem.rs | 87 +++++++ internal/src/api/github.rs | 449 ++++++++++++++++----------------- internal/src/api/mod.rs | 1 + 3 files changed, 311 insertions(+), 226 deletions(-) create mode 100644 internal/src/api/filesystem.rs diff --git a/internal/src/api/filesystem.rs b/internal/src/api/filesystem.rs new file mode 100644 index 0000000..737ac03 --- /dev/null +++ b/internal/src/api/filesystem.rs @@ -0,0 +1,87 @@ +use crate::model::blog::{ + Blog, BlogBody, BlogFilename, BlogId, BlogMetadata, BlogName, BlogSource, +}; +use crate::repo::api::ApiRepo; +use crate::utils::{capitalize, md_to_html}; +use async_trait::async_trait; +use log::{debug, info}; +use std::fs; +use std::path::PathBuf; + +#[derive(Clone)] +pub struct FilesystemApiUseCase { + pub blogs_dir: String, +} + +#[async_trait] +impl ApiRepo for FilesystemApiUseCase { + async fn list_metadata(&self) -> Vec { + let read_dir = fs::read_dir(self.blogs_dir.clone()).expect("Failed to read dir"); + let blogs_metadata: Vec = read_dir + // Collect Blog Filename + .filter_map(|blog_path| { + let blog_path_buf = blog_path.expect("Failed to get blog DirEntry").path(); + Self::process_blog_path(&self, blog_path_buf) + }) + // Collect Blog Metadata + .map(|blog_filename| Self::process_blog_metadata(&self, blog_filename)) + .collect(); + blogs_metadata + } + async fn fetch(&self, metadata: BlogMetadata) -> Blog { + let body = + md_to_html(metadata.filename.0.clone()).expect("Failed to convert markdown to html"); + debug!("Blog Body with Id {}: {}", &metadata.id.0, &body); + + Blog { + id: metadata.id, + name: metadata.name, + source: BlogSource::Filesystem, + filename: metadata.filename, + body: BlogBody(body), + } + } +} + +impl FilesystemApiUseCase { + pub async fn new(blogs_dir: String) -> FilesystemApiUseCase { + FilesystemApiUseCase { blogs_dir } + } + /// Process Blog Path from a PathBuf + /// Returned an Option String + fn process_blog_path(&self, blog_path_buf: PathBuf) -> Option { + if blog_path_buf.is_file() { + blog_path_buf + .file_name() + .expect("Failed to get filename") + .to_str() + .map(|str| str.to_owned()) + } else { + None + } + } + /// Process Blog Metadata from Blog Filename + /// Returned BlogMetadata + fn process_blog_metadata(&self, blog_filename: String) -> BlogMetadata { + let (id, name_init) = blog_filename + .split_once("-") + .expect("Failed to split filename into id and name"); + let name_lower = name_init + .replace("_", " ") + .split_once(".") + .expect("Failed to remove file extension.") + .0 + .to_string(); + let name = capitalize(&name_lower); + let filename = format!("{}{}", self.blogs_dir, &blog_filename); + info!("Blog Metadata with Id {} has been processed.", &id); + debug!("Blog Name with Id {}: {}", &id, &name); + debug!("Blog Filename with Id {}: {}", &id, &filename); + + BlogMetadata { + id: BlogId(id.to_string()), + name: BlogName(name), + filename: BlogFilename(filename), + } + } +} diff --git a/internal/src/api/github.rs b/internal/src/api/github.rs index 9299a9b..607fdcd 100644 --- a/internal/src/api/github.rs +++ b/internal/src/api/github.rs @@ -1,263 +1,260 @@ -use crate::model::blog::{Blog, BlogBody, BlogFilename, BlogId, BlogName, BlogSource}; -use crate::model::github::{GithubTree, GithubTrees}; +use crate::model::blog::{ + Blog, BlogBody, BlogFilename, BlogId, BlogMetadata, BlogName, BlogSource, +}; +use crate::model::github::{GithubBranch, GithubOwner, GithubRepository, GithubTree, GithubTrees}; +use crate::repo::api::ApiRepo; use crate::utils::capitalize; +use async_trait::async_trait; use http_body_util::BodyExt; use log::{debug, error, info, warn}; use markdown::{to_html_with_options, Options}; use octocrab; +use octocrab::models::repos::Content; use regex::Regex; use serde_json; use std::num::IntErrorKind; -// pub struct MemoryGithubRepo {} -// -// impl MemoryGithubRepo { -// pub fn new() -> MemoryGithubRepo { -// MemoryGithubRepo {} -// } -// } -// -// impl Default for MemoryGithubRepo { -// fn default() -> Self { -// MemoryGithubRepo::new() -// } -// } - -// #[async_trait] -// impl GithubRepo for MemoryGithubRepo { -/// find all() -/// An async function that -/// take String of repository owner -/// and String of repository repo -/// and String of repository branch -/// Return an Option of GithubTrees -/// -/// Example: -/// let owner = "husni-zuhdi".to_string(); -/// let repo = "husni-blog-resources".to_string(); -/// let branch = "main".to_string(); -/// let gh_trees = MemoryGithubRepo::new().find_all(owner, repo, branch).await?; -// async fn find_all( -// &self, -// owner: GithubOwner, -// repo: GithubRepository, -// branch: GithubBranch, -// ) -> Option { -// let tree_endpoint = format!( -// "https://api.github.com/repos/{}/{}/git/trees/{}", -// &owner, &repo, &branch -// ); -// let gh_trees = octocrab::instance()._get(tree_endpoint).await; -// -// let trees_result = match gh_trees { -// Ok(val) => { -// let body_bytes = val.into_body().collect().await.unwrap().to_bytes(); -// let body_json = String::from_utf8(body_bytes.to_vec()).unwrap(); -// let result: GithubTrees = serde_json::from_str(&body_json).unwrap(); -// Some(result) -// } -// Err(err) => { -// error!("Failed to parse Github Trees result: {}", err); -// None -// } -// }; -// -// trees_result -// } -// } - -/// get_gh_blogs() -/// An async function that -/// take String of repository owner -/// and String of repository repo -/// and String of repository branch -/// Return an Option of GithubTrees -/// -/// Example: -/// let owner = "husni-zuhdi".to_string(); -/// let repo = "husni-blog-resources".to_string(); -/// let branch = "main".to_string(); -/// let gh_trees = get_gh_blogs(owner, repo, branch).await?; -pub async fn get_gh_blogs(owner: String, repo: String, branch: String) -> Option> { - let tree_endpoint = format!( - "https://api.github.com/repos/{}/{}/git/trees/{}", - &owner, &repo, &branch - ); - let gh_trees = octocrab::instance()._get(tree_endpoint).await; +#[derive(Clone)] +pub struct GithubApiUseCase { + pub github_owner: GithubOwner, + pub github_repo: GithubRepository, + pub github_branch: GithubBranch, +} - let trees_result = match gh_trees { - Ok(val) => { - let body_bytes = val.into_body().collect().await.unwrap().to_bytes(); - let body_json = String::from_utf8(body_bytes.to_vec()).unwrap(); - let result: GithubTrees = serde_json::from_str(&body_json).unwrap(); - Some(result) - } - Err(err) => { - error!("Failed to parse Github Trees result: {}", err); - None - } - }; +#[async_trait] +impl ApiRepo for GithubApiUseCase { + async fn list_metadata(&self) -> Vec { + let trees_result = Self::fetch_github_trees(&self).await; - let mut blog_trees: Vec = Vec::new(); - match trees_result { - Some(val) => { - for tree in val.trees { - let blog_res = - get_gh_blog(tree.clone(), owner.clone(), repo.clone(), branch.clone()).await; - match blog_res { - Some(val) => blog_trees.push(val), - None => { - debug!("Skipped tree {:?}", &tree) + let mut blogs_metadata: Vec = Vec::new(); + match trees_result { + Some(github_trees) => { + for tree in github_trees.trees { + let blog_metadata = Self::process_github_metadata(&self, tree.clone()).await; + match blog_metadata { + Some(metadata) => blogs_metadata.push(metadata), + None => { + debug!("Skipped tree with path {}", &tree.path) + } } } } - } - None => { - error!("failed to filter Github Trees result") - } - }; - Some(blog_trees) -} - -async fn get_gh_blog( - tree: GithubTree, - owner: String, - repo: String, - branch: String, -) -> Option { - let tree_path = tree.path; - let gh_blog_link = format!( - "https://github.com/{}/{}/tree/{}/{}", - &owner, &repo, &branch, &tree_path - ); - let gh_raw_blog_link = format!( - "https://raw.githubusercontent.com/{}/{}/{}/{}", - &owner, &repo, &branch, &tree_path - ); - - // Check to make sure the path doesn't have a extention - if !tree_path.contains(".") { - // Get blog id with specification of 3 digit integer - let blog_id = tree_path.get(0..3).unwrap(); - let blog_name = tree_path.get(4..).unwrap(); + None => { + error!("Failed to filter Github Trees result") + } + }; + blogs_metadata + } + async fn fetch(&self, metadata: BlogMetadata) -> Blog { + let content = Self::fetch_github_content(&self, metadata.filename.clone()).await; - match blog_id.parse::() { - Ok(_) => { - if &blog_id != &"000" { - info!("Blog Name: {}", &blog_name); - let blog_readme_path = format!("{}/README.md", &tree_path); - let blog_content = octocrab::instance() - .repos(&owner, &repo) - .get_content() - .path(&blog_readme_path) - .r#ref(&branch) - .send() - .await; - match blog_content { - Ok(mut res) => { - let content = res.take_items(); - let decoded_content = &content[0].decoded_content().unwrap().clone(); + let blog = match content { + Some(content) => Self::process_github_content(&self, content, metadata), + None => { + error!( + "Failed to get Blog content with Blog ID {} and Name {}: File Not Found", + &metadata.id, &metadata.name + ); + None + } + }; + blog.unwrap() + } +} - let name_formated = blog_name.replace("-", " "); - let name = capitalize(&name_formated); - info!("Markdown of {} loaded", &blog_name); +impl GithubApiUseCase { + pub async fn new( + github_owner: String, + github_repo: String, + github_branch: String, + ) -> GithubApiUseCase { + GithubApiUseCase { + github_owner: GithubOwner(github_owner), + github_repo: GithubRepository(github_repo), + github_branch: GithubBranch(github_branch), + } + } + /// Fetch Github trees + /// Based on repository data from the GithubApiUseCase fields + /// Returned Optional GithubTrees + async fn fetch_github_trees(&self) -> Option { + let trees_endpoint = format!( + "https://api.github.com/repos/{}/{}/git/trees/{}", + self.github_owner, self.github_repo, self.github_branch + ); + let github_trees = octocrab::instance()._get(trees_endpoint).await; + let trees_result = match github_trees { + Ok(github_trees) => { + let body_bytes = github_trees.into_body().collect().await.unwrap().to_bytes(); + let body_json = String::from_utf8(body_bytes.to_vec()).unwrap(); + let result: GithubTrees = serde_json::from_str(&body_json).unwrap(); + Some(result) + } + Err(err) => { + error!("Failed to parse Github Trees result: {}", err); + None + } + }; + trees_result + } + /// Get blog_id with specification of 3 digit integer and blog_name + /// Return an optional 2 string for blog_id and blog_name + fn create_tree_id_and_name(tree_path: String) -> Option<(String, String)> { + let blog_id = tree_path.get(0..3).unwrap().to_string(); + let blog_name = tree_path.get(4..).unwrap().to_string(); + Some((blog_id, blog_name)) + } + /// Process Github Metadata from a GithubTree + /// Returned Optional BlogMetadata + async fn process_github_metadata(&self, tree: GithubTree) -> Option { + let filename = format!( + "https://api.github.com/repos/{}/{}/contents/{}/README.md", + self.github_owner, self.github_repo, &tree.path + ) + .to_string(); - let body = process_gh_markdown( - decoded_content.to_string(), - gh_blog_link, - gh_raw_blog_link, - ); - debug!("HTML Body of {}: {}", &blog_name, &body); + let (blog_id, blog_name) = Self::create_tree_id_and_name(tree.path.0.clone()) + .expect("Failed to spearate Blog id and name"); + let tree_is_dir = !tree.path.0.contains("."); + // Main Infrastructure is the base-level step to replicate + // all infrastructure from `husni-blog-resource` + // Ref: https://github.com/husni-zuhdi/husni-blog-resources/tree/main/000-main-infrastructure + let blog_id_is_not_main_infra = &blog_id != &"000".to_string(); - let id = format!("{}-g", blog_id).to_string(); - let filename = format!( - "https://api.github.com/repos/{}/{}/contents/{}", - &owner, &repo, &blog_readme_path - ) - .to_string(); + if tree_is_dir { + match blog_id.parse::() { + Ok(_) => { + if blog_id_is_not_main_infra { + let id = format!("{}-g", blog_id); + // let id = format!("{}", blog_id); + info!( + "Blog Metadata for Id {} and Name {} is processed", + &id, &blog_name + ); - Some(Blog { - id: BlogId(id), - name: BlogName(name), - source: BlogSource::Github, - filename: BlogFilename(filename), - body: BlogBody(body), - }) - } - Err(err) => { - error!( - "Failed to get Blog content with Blog ID {} and Name {}: {}", - &blog_id, &blog_name, err - ); - None - } + Some(BlogMetadata { + id: BlogId(id), + name: BlogName(blog_name), + filename: BlogFilename(filename), + }) + } else { + debug!("Folder prefix is 000-main-infrastructure. Skip this folder"); + None + } + } + Err(err) => { + if err.kind() == &IntErrorKind::InvalidDigit { + debug!("Error Kind {:?}. Skipped.", err.kind()); } - } else { - debug!("Folder prefix is 000. Skip this folder"); + warn!( + "Failed to parse Tree Path {}. Error {:?}. Skipped", + &tree.path, + err.kind() + ); None } } + } else { + info!("Tree {} is not a directory. Skipped.", &tree.path); + None + } + } + /// Fetch Github Content + /// Take a filename with type BlogFilename (should be url instead?) + /// Returned Optional octocrab::models::Content + async fn fetch_github_content(&self, url: BlogFilename) -> Option { + let github_content = octocrab::instance()._get(url.0.clone()).await; + let content = match github_content { + Ok(content) => { + let body_bytes = content.into_body().collect().await.unwrap().to_bytes(); + let body_json = String::from_utf8(body_bytes.to_vec()).unwrap(); + let result: Content = serde_json::from_str(&body_json).unwrap(); + Some(result) + } Err(err) => { - if err.kind() == &IntErrorKind::InvalidDigit { - debug!("Error Kind {:?}. Skipped.", err.kind()); - } - warn!( - "Failed to parse Tree Path {}. Error {:?}. Skipped", - &tree_path, - err.kind() + error!( + "Failed to parse Github Content for filename {}: {}", + &url, err ); None } - } - } else { - info!("Tree {} is not a folder. Skipped.", &tree_path); - None + }; + content } -} + fn process_content_markdown( + markdown: String, + gh_blog_link: String, + gh_raw_blog_link: String, + ) -> Option { + let raw_body = to_html_with_options(&markdown, &Options::gfm()) + .expect("Failed to convert html with options"); + // Regex href=.\.\/ mean + // find string with character 'href=' + // then followed by any character (I tried to use '"' but didn't work) + // then followed by '.' (must use escape character) + // then followed by '/' (must use escape character) + let re_href = Regex::new(r"href=.\.\/").expect("Failed to build regex href"); -fn process_gh_markdown(markdown: String, gh_blog_link: String, gh_raw_blog_link: String) -> String { - let raw_body = to_html_with_options(&markdown, &Options::gfm()) - .expect("Failed to convert html with options"); - let body = replace_gh_link(raw_body, gh_blog_link, gh_raw_blog_link); - body -} + let replaced_str_href = format!("href=\"{}/", gh_blog_link); + debug!("Replaced str: {}", &replaced_str_href); -/// replace_gh_link -/// Replace Github Blog relative links -/// with full github content links -/// Take String of markdown body -/// and String of github blog endpoint -/// then return String of updated body -fn replace_gh_link(body: String, gh_blog_link: String, gh_raw_blog_link: String) -> String { - // Regex href=.\.\/ mean - // find string with character 'href=' - // then followed by any character (I tried to use '"' but didn't work) - // then followed by '.' (must use escape character) - // then followed by '/' (must use escape character) - let re_href = Regex::new(r"href=.\.\/").expect("Failed to build regex href"); + let res_href = re_href + .replace_all(raw_body.as_str(), replaced_str_href.as_str()) + .to_string(); + debug!("Replaced Body: {}", &res_href); - let replaced_str_href = format!("href=\"{}/", gh_blog_link); - debug!("Replaced str: {}", &replaced_str_href); + // Regex src=.\.\/ mean + // find string with character 'src=' + // then followed by any character (I tried to use '"' but didn't work) + // then followed by '.' (must use escape character) + // then followed by '/' (must use escape character) + let re_src = Regex::new(r"src=.\.\/").expect("Failed to build regex src"); - let res_href = re_href - .replace_all(body.as_str(), replaced_str_href.as_str()) - .to_string(); - debug!("Replaced Body: {}", &res_href); + let replaced_str_src = format!("src=\"{}/", gh_raw_blog_link); + debug!("Replaced str: {}", &replaced_str_src); - // Regex src=.\.\/ mean - // find string with character 'src=' - // then followed by any character (I tried to use '"' but didn't work) - // then followed by '.' (must use escape character) - // then followed by '/' (must use escape character) - let re_src = Regex::new(r"src=.\.\/").expect("Failed to build regex src"); + let body = re_src + .replace_all(res_href.as_str(), replaced_str_src.as_str()) + .to_string(); + debug!("Replaced Body: {}", &body); + Some(body) + } + /// Process Blog Markdown from Github + /// Included replace Github Blog relative links with full github content links + /// Take String of markdown body + /// and String of github blog endpoint + /// then return String of updated body + fn process_github_content(&self, content: Content, metadata: BlogMetadata) -> Option { + let gh_blog_link = format!( + "https://github.com/{}/{}/tree/{}/{}-{}", + self.github_owner, self.github_repo, self.github_branch, &metadata.id, &metadata.name + ); + let gh_raw_blog_link = format!( + "https://raw.githubusercontent.com/{}/{}/{}/{}-{}", + self.github_owner, self.github_repo, self.github_branch, &metadata.id, &metadata.name + ); - let replaced_str_src = format!("src=\"{}/", gh_raw_blog_link); - debug!("Replaced str: {}", &replaced_str_src); + let name_formated = metadata.name.0.replace("-", " "); + let name = capitalize(&name_formated); + // let id = format!("{}-g", &metadata.id).to_string(); - let res = re_src - .replace_all(res_href.as_str(), replaced_str_src.as_str()) - .to_string(); - debug!("Replaced Body: {}", &res); + info!( + "Markdown of Blog id {} with name {} loaded", + &metadata.id, &name + ); + + let markdown = content.decoded_content().unwrap(); + let body = Self::process_content_markdown(markdown, gh_blog_link, gh_raw_blog_link) + .expect("Failed to process content body"); - res + debug!("HTML Body of {}: {}", &metadata.name, &body); + + Some(Blog { + id: metadata.id, + name: BlogName(name), + source: BlogSource::Github, + filename: metadata.filename, + body: BlogBody(body), + }) + } } diff --git a/internal/src/api/mod.rs b/internal/src/api/mod.rs index 72246d3..50f3baa 100644 --- a/internal/src/api/mod.rs +++ b/internal/src/api/mod.rs @@ -1 +1,2 @@ +pub mod filesystem; pub mod github; From 4285d6ef82ac360e3ff80c7faf6b6704f9dd3c57 Mon Sep 17 00:00:00 2001 From: "husni.zuhdi@accelbyte.net" Date: Fri, 6 Sep 2024 03:03:07 +0700 Subject: [PATCH 38/49] feat: add chekc_id method for blog --- internal/src/port/blog/command.rs | 3 +++ internal/src/port/blog/query.rs | 3 ++- internal/src/port/mod.rs | 1 + internal/src/repo/blog.rs | 4 +++- internal/src/repo/mod.rs | 1 + internal/src/usecase/blog.rs | 5 ++++- internal/src/usecase/mod.rs | 1 + 7 files changed, 15 insertions(+), 3 deletions(-) diff --git a/internal/src/port/blog/command.rs b/internal/src/port/blog/command.rs index 92e4cff..a1c75d6 100644 --- a/internal/src/port/blog/command.rs +++ b/internal/src/port/blog/command.rs @@ -3,6 +3,9 @@ use async_trait::async_trait; #[async_trait] pub trait BlogCommandPort { + // TODO: instead of manually input + // why don't we create a struct to input the blog + // and return BlogStored instead? async fn add( &mut self, id: BlogId, diff --git a/internal/src/port/blog/query.rs b/internal/src/port/blog/query.rs index e387a43..045535d 100644 --- a/internal/src/port/blog/query.rs +++ b/internal/src/port/blog/query.rs @@ -1,8 +1,9 @@ -use crate::model::blog::{Blog, BlogEndPage, BlogId, BlogStartPage}; +use crate::model::blog::{Blog, BlogEndPage, BlogId, BlogStartPage, BlogStored}; use async_trait::async_trait; #[async_trait] pub trait BlogQueryPort { async fn find(&self, id: BlogId) -> Blog; async fn find_blogs(&self, start: BlogStartPage, end: BlogEndPage) -> Vec; + async fn check_id(&self, id: BlogId) -> BlogStored; } diff --git a/internal/src/port/mod.rs b/internal/src/port/mod.rs index 21aa2c6..e53260c 100644 --- a/internal/src/port/mod.rs +++ b/internal/src/port/mod.rs @@ -1 +1,2 @@ +pub mod api; pub mod blog; diff --git a/internal/src/repo/blog.rs b/internal/src/repo/blog.rs index 460ab1b..aacee75 100644 --- a/internal/src/repo/blog.rs +++ b/internal/src/repo/blog.rs @@ -1,6 +1,6 @@ use crate::model::blog::{ Blog, BlogBody, BlogDeleted, BlogEndPage, BlogFilename, BlogId, BlogName, BlogSource, - BlogStartPage, + BlogStartPage, BlogStored, }; use async_trait::async_trait; use dyn_clone::{clone_trait_object, DynClone}; @@ -11,6 +11,7 @@ clone_trait_object!(BlogRepo); pub trait BlogRepo: DynClone { async fn find(&self, id: BlogId) -> Blog; async fn find_blogs(&self, start: BlogStartPage, end: BlogEndPage) -> Vec; + async fn check_id(&self, id: BlogId) -> BlogStored; async fn add( &mut self, id: BlogId, @@ -19,6 +20,7 @@ pub trait BlogRepo: DynClone { source: BlogSource, body: BlogBody, ) -> Blog; + // async fn add_bulk(&mut self, blogs: Vec) -> Vec; async fn update( &mut self, id: BlogId, diff --git a/internal/src/repo/mod.rs b/internal/src/repo/mod.rs index 21aa2c6..e53260c 100644 --- a/internal/src/repo/mod.rs +++ b/internal/src/repo/mod.rs @@ -1 +1,2 @@ +pub mod api; pub mod blog; diff --git a/internal/src/usecase/blog.rs b/internal/src/usecase/blog.rs index 5b1e777..8766272 100644 --- a/internal/src/usecase/blog.rs +++ b/internal/src/usecase/blog.rs @@ -1,6 +1,6 @@ use crate::model::blog::{ Blog, BlogBody, BlogDeleted, BlogEndPage, BlogFilename, BlogId, BlogName, BlogSource, - BlogStartPage, + BlogStartPage, BlogStored, }; use crate::port::blog::{command::BlogCommandPort, query::BlogQueryPort}; use crate::repo::blog::BlogRepo; @@ -19,6 +19,9 @@ impl BlogQueryPort for BlogUseCase { async fn find_blogs(&self, start: BlogStartPage, end: BlogEndPage) -> Vec { self.blog_repo.find_blogs(start, end).await } + async fn check_id(&self, id: BlogId) -> BlogStored { + self.blog_repo.check_id(id).await + } } #[async_trait] diff --git a/internal/src/usecase/mod.rs b/internal/src/usecase/mod.rs index 21aa2c6..e53260c 100644 --- a/internal/src/usecase/mod.rs +++ b/internal/src/usecase/mod.rs @@ -1 +1,2 @@ +pub mod api; pub mod blog; From 7b47d0a458214c65d1e1fb3785d4241d63af03bb Mon Sep 17 00:00:00 2001 From: "husni.zuhdi@accelbyte.net" Date: Fri, 6 Sep 2024 03:04:47 +0700 Subject: [PATCH 39/49] feat: add BlogMetadata, BlogStored, and implement FromRow for BlogId --- internal/src/model/blog.rs | 37 ++++++++++++++++++++++++++++++------- 1 file changed, 30 insertions(+), 7 deletions(-) diff --git a/internal/src/model/blog.rs b/internal/src/model/blog.rs index ef098b9..7e8558c 100644 --- a/internal/src/model/blog.rs +++ b/internal/src/model/blog.rs @@ -19,6 +19,14 @@ impl Display for BlogId { } } +impl<'r> FromRow<'r, SqliteRow> for BlogId { + fn from_row(row: &'r SqliteRow) -> Result { + use sqlx::Row; + let id = row.try_get("id")?; + Ok(BlogId(id)) + } +} + /// BlogName /// Name of the Blog #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] @@ -75,22 +83,27 @@ impl Display for BlogBody { #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] pub struct BlogDeleted(pub bool); +/// BlogStored +/// Blog is stored in database or not +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct BlogStored(pub bool); + /// BlogType /// Type of Blog source /// Can be: -/// - FileSystem: Blog markdown come from filesystem +/// - Filesystem: Blog markdown come from filesystem /// - Github: Blog markdown come from github repository #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] pub enum BlogSource { - FileSystem, + Filesystem, Github, } impl Display for BlogSource { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match &self { - Self::FileSystem => { - write!(f, "FileSystem") + Self::Filesystem => { + write!(f, "Filesystem") } Self::Github => { write!(f, "Github") @@ -124,10 +137,10 @@ impl<'r> FromRow<'r, SqliteRow> for Blog { let name = row.try_get("name")?; let source = match row.try_get("source")? { "github" => BlogSource::Github, - "filesystem" => BlogSource::FileSystem, + "filesystem" => BlogSource::Filesystem, &_ => { - // Default to FileSystem - BlogSource::FileSystem + // Default to Filesystem + BlogSource::Filesystem } }; let filename = row.try_get("filename")?; @@ -159,3 +172,13 @@ pub struct BlogPagination { pub start: Option, pub end: Option, } + +/// BlogMetadata +/// Minimum Metadata to query Blog +/// filename can be full filename in filesystem or url to github blog content +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct BlogMetadata { + pub id: BlogId, + pub name: BlogName, + pub filename: BlogFilename, +} From ec8f851825e841a837bdc700a886251ff88fce8d Mon Sep 17 00:00:00 2001 From: "husni.zuhdi@accelbyte.net" Date: Fri, 6 Sep 2024 03:06:23 +0700 Subject: [PATCH 40/49] feat: add type related to GithubTree and enable access data on several struct type --- internal/src/model/github.rs | 48 +++++++++++++++++++++++++++++------- 1 file changed, 39 insertions(+), 9 deletions(-) diff --git a/internal/src/model/github.rs b/internal/src/model/github.rs index a8f6f3f..738dffb 100644 --- a/internal/src/model/github.rs +++ b/internal/src/model/github.rs @@ -1,9 +1,9 @@ use serde::{Deserialize, Serialize}; use std::fmt::Display; -/// Github Owner Name +/// Github blog repository owner name #[derive(Deserialize, Serialize, Debug, Clone)] -pub struct GithubOwner(String); +pub struct GithubOwner(pub String); impl Display for GithubOwner { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { @@ -11,9 +11,9 @@ impl Display for GithubOwner { } } -/// Github Owner Repository +/// Github blog repository name #[derive(Deserialize, Serialize, Debug, Clone)] -pub struct GithubRepository(String); +pub struct GithubRepository(pub String); impl Display for GithubRepository { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { @@ -21,9 +21,9 @@ impl Display for GithubRepository { } } -/// Github Owner Branch +/// Github blog repository branch #[derive(Deserialize, Serialize, Debug, Clone)] -pub struct GithubBranch(String); +pub struct GithubBranch(pub String); impl Display for GithubBranch { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { @@ -71,15 +71,45 @@ pub enum GithubTreeType { Commit, } +/// Github tree path name +#[derive(Deserialize, Serialize, Debug, Clone)] +pub struct GithubTreePath(pub String); + +impl Display for GithubTreePath { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.0) + } +} + +/// Github tree sha +#[derive(Deserialize, Serialize, Debug, Clone)] +pub struct GithubTreeSha(pub String); + +impl Display for GithubTreeSha { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.0) + } +} + +/// Github tree url +#[derive(Deserialize, Serialize, Debug, Clone)] +pub struct GithubTreeUrl(pub String); + +impl Display for GithubTreeUrl { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.0) + } +} + /// Tree structure of git /// Reference: https://docs.github.com/en/rest/git/trees?apiVersion=2022-11-28 #[derive(Deserialize, Serialize, Debug, Clone)] pub struct GithubTree { - pub path: String, + pub path: GithubTreePath, #[serde(rename(deserialize = "mode"))] pub tree_mode: GithubTreeMode, #[serde(rename(deserialize = "type"))] pub tree_type: GithubTreeType, - pub sha: String, - pub url: String, + pub sha: GithubTreeSha, + pub url: GithubTreeUrl, } From fe75c77b18d22e15a76ba81d2cf050650ccfa8d2 Mon Sep 17 00:00:00 2001 From: "husni.zuhdi@accelbyte.net" Date: Fri, 6 Sep 2024 03:07:40 +0700 Subject: [PATCH 41/49] feat: add check_id method and remove api-related implementation --- internal/src/database/memory.rs | 86 +++++++-------------------------- 1 file changed, 17 insertions(+), 69 deletions(-) diff --git a/internal/src/database/memory.rs b/internal/src/database/memory.rs index 1c6caf4..05afbe6 100644 --- a/internal/src/database/memory.rs +++ b/internal/src/database/memory.rs @@ -1,13 +1,11 @@ -use crate::api::github::get_gh_blogs; +// use crate::api::github::get_gh_blogs; use crate::model::blog::{ Blog, BlogBody, BlogDeleted, BlogEndPage, BlogFilename, BlogId, BlogName, BlogSource, - BlogStartPage, + BlogStartPage, BlogStored, }; use crate::repo::blog::BlogRepo; -use crate::utils::{capitalize, md_to_html}; use async_trait::async_trait; use log::{debug, info, warn}; -use std::fs; #[derive(Clone)] pub struct MemoryBlogRepo { @@ -49,6 +47,19 @@ impl BlogRepo for MemoryBlogRepo { let result = &self.blogs[start_seq..end_seq]; result.to_vec() } + async fn check_id(&self, id: BlogId) -> BlogStored { + let result = self.blogs.iter().filter(|blog| &blog.id == &id).next(); + match result { + Some(blog) => { + info!("Blog {} is in Memory.", &blog.id.0); + BlogStored(true) + } + None => { + info!("Blog {} is not in Memory.", &id.0); + BlogStored(false) + } + } + } async fn add( &mut self, id: BlogId, @@ -131,71 +142,8 @@ impl BlogRepo for MemoryBlogRepo { impl MemoryBlogRepo { pub fn new() -> MemoryBlogRepo { - let dir = Some("./statics/blogs/".to_string()); - Self::from_dir(dir) - } - - /// Async function to get BlogsData from github - /// Borrowed `owner`, `repo`, and `branch` String - pub async fn from_github(owner: &String, repo: &String, branch: &String) -> Self { - let dir = Some("./statics/blogs/".to_string()); - let mut blog_data = Self::from_dir(dir).blogs; - let mut gh_blog_data = - get_gh_blogs(owner.to_string(), repo.to_string(), branch.to_string()) - .await - .expect("Failed to get github blog data"); - blog_data.append(&mut gh_blog_data); - Self { blogs: blog_data } - } - - /// Create MemoryBlogRepo from directory - pub fn from_dir(dir: Option) -> Self { - let directory = dir.clone().expect("Failed to get directory"); - let static_path = fs::read_dir(directory.as_str()).unwrap(); - - let blogs_paths: Vec = static_path - .filter_map(|blog_path| { - let path = blog_path.ok().expect("Failed to get blog path").path(); - if path.is_file() { - path.file_name() - .expect("Failed to get filename") - .to_str() - .map(|s| s.to_owned()) - } else { - None - } - }) - .collect(); - - let blogs: Vec = blogs_paths - .iter() - .map(|blog_path| { - let (id, name_init) = blog_path - .split_once("-") - .expect("Failed to split filename into id and name"); - let name_formated = name_init.replace("_", " "); - let (name_lower, _) = name_formated - .split_once(".") - .expect("Failed to remove file extension"); - let name = capitalize(name_lower); - let fullpath = format!("{}{}", directory, blog_path); - - info!("markdown loaded: {}", fullpath); - - let body = md_to_html(fullpath).expect("Failed to convert markdown to html"); - Blog { - id: BlogId(id.to_string()), - name: BlogName(name.to_string()), - source: BlogSource::FileSystem, - filename: BlogFilename(blog_path.to_owned()), - body: BlogBody(body), - } - }) - .collect(); - - debug!("Blogs: {:?}", blogs); - - Self { blogs } + let blogs: Vec = Vec::new(); + MemoryBlogRepo { blogs } } } From 28ea638439e90fbaf366a9e2e2e1a6662268f984 Mon Sep 17 00:00:00 2001 From: "husni.zuhdi@accelbyte.net" Date: Fri, 6 Sep 2024 03:07:56 +0700 Subject: [PATCH 42/49] feat: add check_id method --- internal/src/database/sqlite.rs | 24 ++++++++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/internal/src/database/sqlite.rs b/internal/src/database/sqlite.rs index e3fccff..7477071 100644 --- a/internal/src/database/sqlite.rs +++ b/internal/src/database/sqlite.rs @@ -1,12 +1,12 @@ use crate::model::blog::{ Blog, BlogBody, BlogDeleted, BlogEndPage, BlogFilename, BlogId, BlogName, BlogSource, - BlogStartPage, + BlogStartPage, BlogStored, }; use crate::repo::blog::BlogRepo; use async_trait::async_trait; use log::{debug, info}; use sqlx::sqlite::SqlitePool; -use sqlx::{query, query_as}; +use sqlx::{query, query_as, Sqlite}; #[derive(Clone)] pub struct SqliteBlogRepo { @@ -52,6 +52,26 @@ impl BlogRepo for SqliteBlogRepo { } rows } + async fn check_id(&self, id: BlogId) -> BlogStored { + let blog_id = id.0; + let prep_query = "SELECT id FROM blogs WHERE id = $1 ORDER BY id"; + debug!("Executing query {} for id {}", &prep_query, &blog_id); + + match query_as::(&prep_query) + .bind(&blog_id) + .fetch_one(&self.pool) + .await + { + Ok(id) => { + info!("Blog {} is in Memory.", &id.0); + BlogStored(true) + } + Err(err) => { + info!("Blog {} is not in Memory. Error: {}", &blog_id, err); + BlogStored(false) + } + } + } async fn add( &mut self, id: BlogId, From 8f1d70cf5f89847271bfac22a80baaa8903d4e80 Mon Sep 17 00:00:00 2001 From: "husni.zuhdi@accelbyte.net" Date: Fri, 6 Sep 2024 03:08:33 +0700 Subject: [PATCH 43/49] feat: update state_factory to implement new api usecases --- internal/src/app.rs | 74 ++++++++++++++++++++++++++++++++++++--------- 1 file changed, 60 insertions(+), 14 deletions(-) diff --git a/internal/src/app.rs b/internal/src/app.rs index 751a39b..a291929 100644 --- a/internal/src/app.rs +++ b/internal/src/app.rs @@ -1,7 +1,12 @@ +use crate::api::filesystem::FilesystemApiUseCase; +use crate::api::github::GithubApiUseCase; use crate::database::memory::MemoryBlogRepo; use crate::database::sqlite::SqliteBlogRepo; use crate::handler; use crate::model::axum::AppState; +use crate::port::blog::command::BlogCommandPort; +use crate::port::blog::query::BlogQueryPort; +use crate::repo::api::ApiRepo; use crate::{config::Config, usecase::blog::BlogUseCase}; use axum::{ routing::{get, get_service}, @@ -48,25 +53,66 @@ pub async fn app() -> () { /// Build App State for Axum Application async fn state_factory(config: Config) -> AppState { // Setup blog use case - let blog_usecase = if config.data_source == "sqlite" && config.database_url != "" { + let data_source_is_configured_sqlite = + config.data_source == "sqlite" && config.database_url != ""; + let github_api_is_enabled = + !config.gh_owner.is_empty() && !config.gh_repo.is_empty() && !config.gh_branch.is_empty(); + + let mut blog_uc = if data_source_is_configured_sqlite { // Use SqliteBlogRepo let repo = SqliteBlogRepo::new(config.database_url.clone()).await; - Arc::new(Mutex::new(BlogUseCase::new(Box::new(repo)))) + BlogUseCase::new(Box::new(repo)) } else { - // Use MemoryBlogRepo - if !config.gh_owner.is_empty() && !config.gh_repo.is_empty() && !config.gh_branch.is_empty() - { - // Use from_github method - let repo = - MemoryBlogRepo::from_github(&config.gh_owner, &config.gh_repo, &config.gh_branch) + // // Use MemoryBlogRepo + let repo = MemoryBlogRepo::default(); + BlogUseCase::new(Box::new(repo)) + // } + }; + + let fs_usecase = FilesystemApiUseCase::new("./statics/blogs/".to_string()).await; + let blogs_metadata = fs_usecase.list_metadata().await; + for metadata in blogs_metadata { + // Check if blog id is in the database + let blog_is_not_stored = !blog_uc.check_id(metadata.id.clone()).await.0; + if blog_is_not_stored { + info!("Start to fetch Blog {}.", &metadata.id); + let blog = fs_usecase.fetch(metadata.clone()).await; + info!("Finished to fetch Blog {}.", &metadata.id); + + info!("Start to store Blog {}.", &metadata.id); + let _ = blog_uc + .add(blog.id, blog.name, blog.filename, blog.source, blog.body) + .await; + info!("Finished to store Blog {}.", &metadata.id); + } + } + + if github_api_is_enabled { + let github_usecase = GithubApiUseCase::new( + config.gh_owner.clone(), + config.gh_repo.clone(), + config.gh_branch.clone(), + ) + .await; + let blogs_metadata = github_usecase.list_metadata().await; + for metadata in blogs_metadata { + // Check if blog id is in the database + let blog_is_not_stored = !blog_uc.check_id(metadata.id.clone()).await.0; + if blog_is_not_stored { + info!("Start to fetch Blog {}.", &metadata.id); + let blog = github_usecase.fetch(metadata.clone()).await; + info!("Finished to fetch Blog {}.", &metadata.id); + + info!("Start to store Blog {}.", &metadata.id); + let _ = blog_uc + .add(blog.id, blog.name, blog.filename, blog.source, blog.body) .await; - Arc::new(Mutex::new(BlogUseCase::new(Box::new(repo)))) - } else { - // Use Default method - let repo = MemoryBlogRepo::default(); - Arc::new(Mutex::new(BlogUseCase::new(Box::new(repo)))) + info!("Finished to store Blog {}.", &metadata.id); + } } - }; + } + + let blog_usecase = Arc::new(Mutex::new(blog_uc)); AppState { config, From 764f7285c7dfd8fea0785213dee425ddc8a30bc0 Mon Sep 17 00:00:00 2001 From: Husni Zuhdi Date: Sat, 7 Sep 2024 01:35:00 +0700 Subject: [PATCH 44/49] chore: change error.rs to status.rs and their implementation --- internal/src/app.rs | 2 +- internal/src/handler/blog.rs | 2 +- internal/src/handler/mod.rs | 2 +- internal/src/handler/profile.rs | 2 +- internal/src/handler/{error.rs => status.rs} | 0 internal/src/handler/version.rs | 2 +- 6 files changed, 5 insertions(+), 5 deletions(-) rename internal/src/handler/{error.rs => status.rs} (100%) diff --git a/internal/src/app.rs b/internal/src/app.rs index a291929..61204d1 100644 --- a/internal/src/app.rs +++ b/internal/src/app.rs @@ -43,7 +43,7 @@ pub async fn app() -> () { get_service(ServeFile::new("./statics/styles.css")), ) .with_state(app_state) - .fallback(get(handler::error::get_404_not_found)); + .fallback(get(handler::status::get_404_not_found)); // Start Axum Application let listener = tokio::net::TcpListener::bind(endpoint).await.unwrap(); diff --git a/internal/src/handler/blog.rs b/internal/src/handler/blog.rs index 8ac94c6..b8cefd8 100644 --- a/internal/src/handler/blog.rs +++ b/internal/src/handler/blog.rs @@ -1,4 +1,4 @@ -use crate::handler::error::get_500_internal_server_error; +use crate::handler::status::get_500_internal_server_error; use crate::model::blog::{BlogEndPage, BlogId, BlogPagination, BlogStartPage}; use crate::model::{ axum::AppState, diff --git a/internal/src/handler/mod.rs b/internal/src/handler/mod.rs index 35db4b8..f35b641 100644 --- a/internal/src/handler/mod.rs +++ b/internal/src/handler/mod.rs @@ -1,6 +1,6 @@ pub mod blog; -pub mod error; pub mod profile; +pub mod status; pub mod version; // Note: In axum [example](https://docs.rs/axum/latest/axum/response/index.html#building-responses) diff --git a/internal/src/handler/profile.rs b/internal/src/handler/profile.rs index 4b53272..148fe67 100644 --- a/internal/src/handler/profile.rs +++ b/internal/src/handler/profile.rs @@ -1,4 +1,4 @@ -use crate::handler::error::get_500_internal_server_error; +use crate::handler::status::get_500_internal_server_error; use crate::model::templates::ProfileTemplate; use askama::Template; use axum::response::Html; diff --git a/internal/src/handler/error.rs b/internal/src/handler/status.rs similarity index 100% rename from internal/src/handler/error.rs rename to internal/src/handler/status.rs diff --git a/internal/src/handler/version.rs b/internal/src/handler/version.rs index 22dda07..8d7623e 100644 --- a/internal/src/handler/version.rs +++ b/internal/src/handler/version.rs @@ -1,4 +1,4 @@ -use crate::handler::error::get_500_internal_server_error; +use crate::handler::status::get_500_internal_server_error; use crate::model::version::Version; use crate::model::{axum::AppState, templates::VersionTemplate}; use askama::Template; From 049022756a4cc295f5550783bfaca878df046ca0 Mon Sep 17 00:00:00 2001 From: Husni Zuhdi Date: Sat, 7 Sep 2024 01:35:31 +0700 Subject: [PATCH 45/49] chore: remove unused comment --- internal/src/database/memory.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/internal/src/database/memory.rs b/internal/src/database/memory.rs index 05afbe6..74e3c6c 100644 --- a/internal/src/database/memory.rs +++ b/internal/src/database/memory.rs @@ -1,4 +1,3 @@ -// use crate::api::github::get_gh_blogs; use crate::model::blog::{ Blog, BlogBody, BlogDeleted, BlogEndPage, BlogFilename, BlogId, BlogName, BlogSource, BlogStartPage, BlogStored, From 42aa66e9d9f46fe1d0140d46a3fdd5f256b20b01 Mon Sep 17 00:00:00 2001 From: Husni Zuhdi Date: Sat, 7 Sep 2024 01:36:31 +0700 Subject: [PATCH 46/49] chore: move processing markdwon function to a method under filesystem --- internal/src/api/filesystem.rs | 30 ++++++++++++++++++++++++++++-- internal/src/utils.rs | 29 ----------------------------- 2 files changed, 28 insertions(+), 31 deletions(-) diff --git a/internal/src/api/filesystem.rs b/internal/src/api/filesystem.rs index 737ac03..a74eecd 100644 --- a/internal/src/api/filesystem.rs +++ b/internal/src/api/filesystem.rs @@ -5,6 +5,7 @@ use crate::repo::api::ApiRepo; use crate::utils::{capitalize, md_to_html}; use async_trait::async_trait; use log::{debug, info}; +use markdown::{to_html_with_options, CompileOptions, Constructs, Options, ParseOptions}; use std::fs; use std::path::PathBuf; @@ -29,8 +30,8 @@ impl ApiRepo for FilesystemApiUseCase { blogs_metadata } async fn fetch(&self, metadata: BlogMetadata) -> Blog { - let body = - md_to_html(metadata.filename.0.clone()).expect("Failed to convert markdown to html"); + let body = Self::process_markdown(metadata.filename.0.clone()) + .expect("Failed to convert markdown to html"); debug!("Blog Body with Id {}: {}", &metadata.id.0, &body); Blog { @@ -84,4 +85,29 @@ impl FilesystemApiUseCase { filename: BlogFilename(filename), } } + /// Process Markdown + /// take String of filename and convert markdown file into html with option + /// return String of converted markdown in html or String of error + fn process_markdown(filename: String) -> Result { + let body_md = + fs::read_to_string(filename.clone()).expect("Failed to read markdown blog file"); + debug!("Markdown Body for filename {}: {}", &filename, body_md); + + let html = to_html_with_options( + &body_md, + &Options { + parse: ParseOptions { + constructs: Constructs { + // In case you want to activeat frontmatter in the future + // frontmatter: true, + ..Constructs::gfm() + }, + ..ParseOptions::gfm() + }, + compile: CompileOptions::gfm(), + }, + ) + .expect("Failed to convert html with options"); + Ok(html) + } } diff --git a/internal/src/utils.rs b/internal/src/utils.rs index e3a1d23..0ae2330 100644 --- a/internal/src/utils.rs +++ b/internal/src/utils.rs @@ -1,32 +1,3 @@ -use log::debug; -use markdown::{to_html_with_options, CompileOptions, Constructs, Options, ParseOptions}; -use std::fs; - -/// md_to_html: Markdown to HTML -/// take String of filename -/// return String of converted markdown in html or String of error -pub fn md_to_html(filename: String) -> Result { - let body_md = fs::read_to_string(filename.clone()).expect("Failed to read markdown blog file"); - debug!("Markdown Body for filename {}: {}", &filename, body_md); - - let html = to_html_with_options( - &body_md, - &Options { - parse: ParseOptions { - constructs: Constructs { - // In case you want to activeat frontmatter in the future - // frontmatter: true, - ..Constructs::gfm() - }, - ..ParseOptions::gfm() - }, - compile: CompileOptions::gfm(), - }, - ) - .expect("Failed to convert html with options"); - Ok(html) -} - /// capitalize /// Capitalize the first character in s. /// Take borrowed str of s From 6170fc025c4b0351d6201fdb49ae7edaedb8f33d Mon Sep 17 00:00:00 2001 From: Husni Zuhdi Date: Sat, 7 Sep 2024 01:37:02 +0700 Subject: [PATCH 47/49] chore: move method documentation --- internal/src/api/github.rs | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/internal/src/api/github.rs b/internal/src/api/github.rs index 607fdcd..e166120 100644 --- a/internal/src/api/github.rs +++ b/internal/src/api/github.rs @@ -181,6 +181,11 @@ impl GithubApiUseCase { }; content } + /// Process Content Markdown + /// Included replace Github Blog relative links with full github content links + /// Take String of markdown body + /// and String of github blog endpoint and github raw blog link + /// then return an optional string of processed markdown fn process_content_markdown( markdown: String, gh_blog_link: String, @@ -219,11 +224,8 @@ impl GithubApiUseCase { debug!("Replaced Body: {}", &body); Some(body) } - /// Process Blog Markdown from Github - /// Included replace Github Blog relative links with full github content links - /// Take String of markdown body - /// and String of github blog endpoint - /// then return String of updated body + /// Process Github Content and Metadata + /// Returned an optional Blog fn process_github_content(&self, content: Content, metadata: BlogMetadata) -> Option { let gh_blog_link = format!( "https://github.com/{}/{}/tree/{}/{}-{}", @@ -236,7 +238,6 @@ impl GithubApiUseCase { let name_formated = metadata.name.0.replace("-", " "); let name = capitalize(&name_formated); - // let id = format!("{}-g", &metadata.id).to_string(); info!( "Markdown of Blog id {} with name {} loaded", From c80d715f8f5f1dea82559b9935d4a552ebfe9fa2 Mon Sep 17 00:00:00 2001 From: Husni Zuhdi Date: Sat, 7 Sep 2024 01:37:30 +0700 Subject: [PATCH 48/49] chore: add TODO to change BlogId to int --- internal/src/model/blog.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/internal/src/model/blog.rs b/internal/src/model/blog.rs index 7e8558c..41727b9 100644 --- a/internal/src/model/blog.rs +++ b/internal/src/model/blog.rs @@ -4,6 +4,7 @@ use std::fmt::Display; /// BlogId /// Identifier of Blog +/// TODO: change it to integer32 #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] pub struct BlogId(pub String); From d2091d8bb8afca6290d9473a6c7ece76a99885ed Mon Sep 17 00:00:00 2001 From: Husni Zuhdi Date: Sat, 7 Sep 2024 01:39:18 +0700 Subject: [PATCH 49/49] chore: remove unused md_to_html --- internal/src/api/filesystem.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/src/api/filesystem.rs b/internal/src/api/filesystem.rs index a74eecd..c0c1d5c 100644 --- a/internal/src/api/filesystem.rs +++ b/internal/src/api/filesystem.rs @@ -2,7 +2,7 @@ use crate::model::blog::{ Blog, BlogBody, BlogFilename, BlogId, BlogMetadata, BlogName, BlogSource, }; use crate::repo::api::ApiRepo; -use crate::utils::{capitalize, md_to_html}; +use crate::utils::capitalize; use async_trait::async_trait; use log::{debug, info}; use markdown::{to_html_with_options, CompileOptions, Constructs, Options, ParseOptions};