aboutsummaryrefslogtreecommitdiffstats
path: root/src
diff options
context:
space:
mode:
authorLibravatar cel 🌸 <cel@bunny.garden>2025-06-05 03:50:36 +0100
committerLibravatar cel 🌸 <cel@bunny.garden>2025-06-05 03:51:28 +0100
commite313c40d246ac7545a1907621841090664bf61ae (patch)
tree1eb8d9435da628b6bae242c5cfb89f939d47fc21 /src
parent811ff64dd08c8835d19c57fd959e42a28d800a53 (diff)
downloadfj-main.tar.gz
fj-main.tar.bz2
fj-main.zip
feat: database functionsHEADmain
Diffstat (limited to 'src')
-rw-r--r--src/db.rs347
-rw-r--r--src/error.rs18
-rw-r--r--src/lib.rs4
-rw-r--r--src/task.rs110
-rw-r--r--src/timestamp.rs1
5 files changed, 383 insertions, 97 deletions
diff --git a/src/db.rs b/src/db.rs
index db5534c..6cdf24f 100644
--- a/src/db.rs
+++ b/src/db.rs
@@ -1,62 +1,323 @@
-use sqlx::sqlite::SqlitePool;
+use std::{collections::HashSet, path::Path};
-use crate::{task::Task, Result};
+use chrono::NaiveDateTime;
+use rusqlite::{params, params_from_iter, Connection, ToSql};
+use uuid::Uuid;
-struct Db {
- pool: SqlitePool,
-}
+use crate::{
+ error::DatabaseOpenError,
+ task::{Category, Log, Task},
+ Result,
+};
+
+struct Db(Connection);
impl Db {
- pub async fn create_task(&self, task: Task) -> Result<Task> {
- let id = sqlx::query!(
- "insert into tasks ( name, cron, archived, description ) values ( ?1, ?2, ?3, ?4 )",
- task.name,
- task.cron,
- task.archived,
- task.description
- )
- .execute(&self.pool)
- .await?
- .last_insert_rowid();
-
- for category in task.categories.clone() {
- let category_id = sqlx::query!(
- "insert or ignore into categories ( name ) values ( ?1 )",
- category
- )
- .execute(&self.pool)
- .await?
- .last_insert_rowid();
-
- sqlx::query!(
+ pub fn create_connect_and_migrate(
+ path: impl AsRef<Path>,
+ ) -> std::result::Result<Self, DatabaseOpenError> {
+ if let Some(dir) = path.as_ref().parent() {
+ if dir.is_dir() {
+ } else {
+ std::fs::create_dir_all(dir)?;
+ }
+ let _file = std::fs::OpenOptions::new()
+ .append(true)
+ .create(true)
+ .open(path.as_ref())?;
+ }
+ let url = format!(
+ "{}",
+ path.as_ref()
+ .to_str()
+ .ok_or(DatabaseOpenError::InvalidPath)?
+ );
+ // let db = SqlitePool::connect(&url).await?;
+ // migrate!().run(&db).await?;
+ // Ok(Self { db })
+ let db = Connection::open(url)?;
+ db.execute_batch(include_str!("../migrations/1.sql"))?;
+ Ok(Self(db))
+ }
+
+ pub fn create_connect_memory() -> std::result::Result<Self, DatabaseOpenError> {
+ let db = Connection::open_in_memory()?;
+ db.execute_batch(include_str!("../migrations/1.sql"))?;
+ Ok(Self(db))
+ }
+
+ pub fn create_task(&self, task: &Task) -> Result<()> {
+ let task_id = Uuid::new_v4();
+ self.0.execute(
+ "insert into tasks ( id, name, cron, archived, description ) values ( ?1, ?2, ?3, ?4, ?5 )",
+ (task_id, &task.name, &task.cron, task.archived, &task.description),
+ )?;
+
+ for category in &task.categories {
+ let category_id = Uuid::new_v4();
+ self.0.execute(
+ "insert or ignore into categories ( id, name ) values ( ?1, ?2 )",
+ (category_id, category),
+ )?;
+
+ self.0.execute(
+ "insert into tasks_categories ( task_id, category_id ) values ( ?1, ?2 )",
+ [task_id, category_id],
+ )?;
+ }
+
+ Ok(())
+ }
+
+ pub fn read_task(&self, name: impl AsRef<str>) -> Result<Task> {
+ let name = name.as_ref();
+ let (task_id, mut task) = self
+ .0
+ .prepare("select id, name, cron, archived, description from tasks where name = ?1")?
+ .query_one([name], |row| {
+ Ok((
+ row.get::<_, Uuid>(0)?,
+ Task {
+ name: row.get(1)?,
+ cron: row.get(2)?,
+ archived: row.get(3)?,
+ description: row.get(4)?,
+ categories: HashSet::new(),
+ },
+ ))
+ })?;
+
+ task.categories = self
+ .0
+ .prepare("select name from tasks_categories join categories where task_id = ?1")?
+ .query_map([task_id], |row| Ok(Category::new(row.get(0)?)))?
+ .collect::<std::result::Result<HashSet<_>, _>>()?;
+
+ Ok(task)
+ }
+
+ // TODO: filter
+ pub fn read_tasks(&self, filter: Vec<TaskFilter>) -> Result<Vec<Task>> {
+ let mut tasks = self
+ .0
+ .prepare("select name, cron, archived, description, id from tasks")?
+ .query_map([], |row| {
+ Ok((
+ row.get::<_, Uuid>(4)?,
+ Task {
+ name: row.get(0)?,
+ cron: row.get(1)?,
+ archived: row.get(2)?,
+ description: row.get(3)?,
+ categories: HashSet::new(),
+ },
+ ))
+ })?
+ .collect::<std::result::Result<Vec<_>, _>>()?;
+
+ for (task_id, task) in &mut tasks {
+ let categories = self
+ .0
+ .prepare("select name from tasks_categories join categories where task_id = ?1")?
+ .query_map([*task_id], |row| Ok(Category::new(row.get(0)?)))?
+ .collect::<std::result::Result<HashSet<_>, _>>()?;
+ task.categories = categories;
+ }
+
+ Ok(tasks.into_iter().map(|(_id, task)| task).collect())
+ }
+
+ pub fn update_task(&self, name: impl AsRef<str>, updated_task: &Task) -> Result<()> {
+ let task_id = self.0
+ .prepare("update tasks set name = ?1, cron = ?2, archived = ?3, description = ?4 where name = ?5 returning id")?.query_one( (&updated_task.name, &updated_task.cron, updated_task.archived, &updated_task.description, name.as_ref()), |row| {
+ Ok(row.get::<_, Uuid>(0)?)
+ })?;
+ self.0
+ .execute("delete from tasks_categories where task_id = ?1", [task_id])?;
+ // TODO: clean up orphaned groups....
+ for category in &updated_task.categories {
+ let category_id = Uuid::new_v4();
+ self.0.execute(
+ "insert or ignore into categories ( id, name ) values ( ?1, ?2 )",
+ (category_id, category),
+ )?;
+
+ self.0.execute(
"insert into tasks_categories ( task_id, category_id ) values ( ?1, ?2 )",
- id,
- category_id
- )
- .execute(&self.pool)
- .await?;
+ [task_id, category_id],
+ )?;
}
+ Ok(())
+ }
- Ok(task.add_id(id))
+ pub fn delete_task(&self, name: impl AsRef<str>) -> Result<()> {
+ self.0
+ .execute("delete from tasks where name = ?1", [name.as_ref()])?;
+ Ok(())
}
- pub async fn read_tasks(&self, select: Vec<TaskSelect>) -> Result<Vec<Task>> {
- Ok(sqlx::query!("select * from tasks")
- .fetch_all(&self.pool)
- .await?)
+ pub fn create_log(&self, task_name: impl AsRef<str>, log: &Log) -> Result<()> {
+ self.0.execute("insert into log ( id, task_id, timestamp ) values ( ?1, (select id from tasks where name = ?2), ?3 )", (log.id, task_name.as_ref(), log.timestamp))?;
+ Ok(())
}
- pub async fn update_tasks(&self, select: Vec<TaskSelect>) -> Result<()> {
- todo!()
+ pub fn read_logs(&self, filter: Vec<LogFilter>) -> Result<Vec<Log>> {
+ let mut query = "select id, timestamp from log".to_string();
+
+ if !filter.is_empty() {
+ query.push_str(" where ")
+ }
+ query.push_str(
+ &filter
+ .iter()
+ .map(|filter| filter.where_clause())
+ .collect::<Vec<_>>()
+ .join(", "),
+ );
+
+ let logs = self
+ .0
+ .prepare(&query)?
+ .query_map(params_from_iter(filter), |row| {
+ Ok(Log {
+ id: row.get(0)?,
+ timestamp: row.get(1)?,
+ })
+ })?
+ .collect::<std::result::Result<Vec<_>, _>>()?;
+
+ Ok(logs)
}
- pub async fn delete_tasks(&self, select: Vec<TaskSelect>) -> Result<()> {
- todo!()
+ pub fn delete_log(&self, log: &Log) -> Result<()> {
+ self.0.execute("delete from logs where id = ?1", [log.id])?;
+ Ok(())
}
}
-pub enum TaskSelect {
- Name(String),
+pub enum TaskFilter {
Archived(bool),
Category(String),
}
+
+impl ToSql for TaskFilter {
+ fn to_sql(&self) -> rusqlite::Result<rusqlite::types::ToSqlOutput<'_>> {
+ match self {
+ TaskFilter::Archived(a) => a.to_sql(),
+ TaskFilter::Category(c) => c.to_sql(),
+ }
+ }
+}
+
+pub enum LogFilter {
+ Task(String),
+ Before(NaiveDateTime),
+ After(NaiveDateTime),
+}
+
+impl LogFilter {
+ fn where_clause(&self) -> &'static str {
+ match self {
+ LogFilter::Task(_) => "task_id = (select id from tasks where name = ?)",
+ LogFilter::Before(naive_date_time) => "timestamp < ?",
+ LogFilter::After(naive_date_time) => "timestamp > ?",
+ }
+ }
+}
+
+impl ToSql for LogFilter {
+ fn to_sql(&self) -> rusqlite::Result<rusqlite::types::ToSqlOutput<'_>> {
+ match self {
+ LogFilter::Task(n) => n.to_sql(),
+ LogFilter::Before(naive_date_time) => naive_date_time.to_sql(),
+ LogFilter::After(naive_date_time) => naive_date_time.to_sql(),
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use std::collections::HashSet;
+
+ use crate::task::{Category, Task};
+
+ use super::Db;
+
+ #[test]
+ fn create_task() {
+ let db = Db::create_connect_memory().unwrap();
+
+ let task = Task::new(
+ "piano".to_string(),
+ None,
+ Some("practice piano".to_string()),
+ Some(HashSet::from([Category::new("music".to_string())])),
+ );
+
+ db.create_task(&task).unwrap();
+ }
+
+ #[test]
+ fn read_task() {
+ let db = Db::create_connect_memory().unwrap();
+
+ let task = Task::new(
+ "piano".to_string(),
+ None,
+ Some("practice piano".to_string()),
+ Some(HashSet::from([Category::new("music".to_string())])),
+ );
+
+ db.create_task(&task).unwrap();
+
+ assert_eq!(task, db.read_task("piano").unwrap());
+ }
+
+ #[test]
+ fn read_tasks() {
+ let db = Db::create_connect_memory().unwrap();
+
+ let piano = Task::new(
+ "piano".to_string(),
+ None,
+ Some("practice piano".to_string()),
+ Some(HashSet::from([Category::new("music".to_string())])),
+ );
+
+ db.create_task(&piano).unwrap();
+
+ let german = Task::new(
+ "german".to_string(),
+ None,
+ Some("study german".to_string()),
+ None,
+ );
+
+ db.create_task(&german).unwrap();
+
+ assert_eq!(vec![piano, german], db.read_tasks(Vec::new()).unwrap());
+ }
+
+ // TODO: task orderings: by day, by week, by what's most urgent.
+
+ #[test]
+ fn filter_tasks() {}
+
+ #[test]
+ fn update_task() {}
+
+ #[test]
+ fn delete_task() {}
+
+ #[test]
+ fn create_log() {}
+
+ #[test]
+ fn read_logs() {}
+
+ #[test]
+ fn filter_logs() {}
+
+ #[test]
+ fn delete_log() {}
+}
diff --git a/src/error.rs b/src/error.rs
index 6716b58..580355e 100644
--- a/src/error.rs
+++ b/src/error.rs
@@ -1,9 +1,17 @@
+use thiserror::Error;
+
+#[derive(Debug, Error)]
pub enum Error {
- Database(sqlx::Error),
+ #[error("db: {0}")]
+ Db(#[from] rusqlite::Error),
}
-impl From<sqlx::Error> for Error {
- fn from(e: sqlx::Error) -> Self {
- Error::Database(e)
- }
+#[derive(Debug, Error)]
+pub enum DatabaseOpenError {
+ #[error("invalid path")]
+ InvalidPath,
+ #[error("io: {0}")]
+ Io(#[from] std::io::Error),
+ #[error("db: {0}")]
+ Db(#[from] rusqlite::Error),
}
diff --git a/src/lib.rs b/src/lib.rs
index 2bcd120..66f4685 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -1,7 +1,7 @@
mod db;
mod error;
mod task;
-mod timestamp;
-pub use error::Error;
+pub use error::*;
+pub use task::*;
pub type Result<T> = std::result::Result<T, Error>;
diff --git a/src/task.rs b/src/task.rs
index 59aa502..ce2e7e6 100644
--- a/src/task.rs
+++ b/src/task.rs
@@ -1,25 +1,23 @@
use std::{
- borrow::{Borrow, Cow},
collections::HashSet,
+ ops::{Deref, DerefMut},
str::FromStr,
};
-use chrono::{NaiveDateTime, TimeDelta};
-use sqlx::{
- encode::IsNull,
- prelude::FromRow,
- sqlite::{SqliteArgumentValue, SqliteTypeInfo, SqliteValue},
- Decode, Sqlite, Type, TypeInfo, Value, ValueRef,
+use chrono::{Local, NaiveDateTime, TimeDelta};
+use rusqlite::{
+ types::{FromSql, FromSqlError, ToSqlOutput, Value},
+ ToSql,
};
+use uuid::Uuid;
-#[derive(FromRow)]
+#[derive(PartialEq, Eq, Clone, Debug)]
pub struct Task {
- id: Option<i64>,
- pub name: String,
- pub cron: Option<Schedule>,
- pub archived: bool,
- pub description: Option<String>,
- pub categories: HashSet<String>,
+ pub(crate) name: String,
+ pub(crate) cron: Option<Schedule>,
+ pub(crate) archived: bool,
+ pub(crate) description: Option<String>,
+ pub(crate) categories: HashSet<Category>,
}
impl Task {
@@ -27,10 +25,9 @@ impl Task {
name: String,
cron: Option<Schedule>,
description: Option<String>,
- categories: Option<HashSet<String>>,
+ categories: Option<HashSet<Category>>,
) -> Self {
Self {
- id: None,
name,
cron,
archived: false,
@@ -38,50 +35,57 @@ impl Task {
categories: categories.unwrap_or_else(|| HashSet::new()),
}
}
+}
- pub fn id(&self) -> Option<i64> {
- self.id
- }
+#[derive(PartialEq, Eq, Clone, Debug)]
+pub struct Schedule(cron::Schedule);
- pub fn add_id(mut self, id: i64) -> Self {
- self.id = Some(id);
- self
+impl FromSql for Schedule {
+ fn column_result(value: rusqlite::types::ValueRef<'_>) -> rusqlite::types::FromSqlResult<Self> {
+ let schedule = value.as_str()?;
+ let schedule =
+ cron::Schedule::from_str(schedule).map_err(|e| FromSqlError::Other(Box::new(e)))?;
+ Ok(Self(schedule))
}
}
-pub struct Schedule(cron::Schedule);
+impl ToSql for Schedule {
+ fn to_sql(&self) -> rusqlite::Result<rusqlite::types::ToSqlOutput<'_>> {
+ Ok(rusqlite::types::ToSqlOutput::Owned(
+ rusqlite::types::Value::Text(self.0.to_string()),
+ ))
+ }
+}
-impl sqlx::Encode<'_, Sqlite> for Schedule {
- fn encode_by_ref(
- &self,
- buf: &mut <Sqlite as sqlx::Database>::ArgumentBuffer<'_>,
- ) -> Result<sqlx::encode::IsNull, sqlx::error::BoxDynError> {
- let schedule = &self.0;
- let schedule = schedule.to_string();
- buf.push(SqliteArgumentValue::Text(Cow::Owned(schedule)));
+#[repr(transparent)]
+#[derive(PartialEq, Eq, PartialOrd, Ord, Debug, Clone, Hash)]
+pub struct Category(String);
- Ok(IsNull::No)
+impl ToSql for Category {
+ fn to_sql(&self) -> rusqlite::Result<rusqlite::types::ToSqlOutput<'_>> {
+ Ok(ToSqlOutput::Owned(Value::Text(self.0.clone())))
}
}
-impl sqlx::Decode<'_, Sqlite> for Schedule {
- fn decode(
- value: <Sqlite as sqlx::Database>::ValueRef<'_>,
- ) -> Result<Self, sqlx::error::BoxDynError> {
- let schedule = Decode::<Sqlite>::decode(value)?;
- let schedule = cron::Schedule::from_str(schedule)?;
- Ok(Self(schedule))
+impl FromSql for Category {
+ fn column_result(value: rusqlite::types::ValueRef<'_>) -> rusqlite::types::FromSqlResult<Self> {
+ Ok(Self(value.as_str()?.to_string()))
}
}
-impl Type<Sqlite> for Schedule {
- fn type_info() -> <Sqlite as sqlx::Database>::TypeInfo {
- <&str as Type<Sqlite>>::type_info()
+impl Deref for Category {
+ type Target = String;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
}
}
-#[repr(transparent)]
-pub struct Category(String);
+impl DerefMut for Category {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.0
+ }
+}
impl Category {
pub fn new(category: String) -> Self {
@@ -89,11 +93,25 @@ impl Category {
}
}
-pub struct Log(NaiveDateTime);
+#[derive(Debug, Clone, Copy)]
+pub struct Log {
+ pub(crate) id: Uuid,
+ pub(crate) timestamp: NaiveDateTime,
+}
impl Log {
pub fn new(datetime: NaiveDateTime) -> Self {
- Self(datetime)
+ Self {
+ id: Uuid::new_v4(),
+ timestamp: datetime,
+ }
+ }
+
+ pub fn new_now() -> Self {
+ Self {
+ id: Uuid::new_v4(),
+ timestamp: Local::now().naive_local(),
+ }
}
}
diff --git a/src/timestamp.rs b/src/timestamp.rs
deleted file mode 100644
index 8b13789..0000000
--- a/src/timestamp.rs
+++ /dev/null
@@ -1 +0,0 @@
-