Include documentation for API and subcrates
This commit is contained in:
parent
dfdfc24ee5
commit
3e770a337b
40 changed files with 89 additions and 9 deletions
21
api/adaptors/README.md
Normal file
21
api/adaptors/README.md
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
# Crab Fit Storage Adaptors
|
||||
|
||||
This directory contains sub-crates that connect Crab Fit to a database of some sort. For a list of available adaptors, see the [api readme](../README.md).
|
||||
|
||||
## Adding an adaptor
|
||||
|
||||
The suggested flow is copying an existing adaptor, such as `memory`, and altering the code to work with your chosen database.
|
||||
|
||||
Note, you will need to have the following crates as dependencies in your adaptor:
|
||||
|
||||
- `common`<br>Includes a trait for implementing your adaptor, as well as structs your adaptor needs to return.
|
||||
- `async-trait`<br>Required because the trait from `common` uses async functions, make sure you include `#[async_trait]` above your trait implementation.
|
||||
|
||||
Once you've created the adaptor, you'll need to make sure it's included as a dependency in the root [`Cargo.toml`](../Cargo.toml), and add a feature flag with the same name. Make sure you also document the new adaptor in the [api readme](../README.md).
|
||||
|
||||
Finally, add a new version of the `create_adaptor` function in the [`adaptors.rs`](../src/adaptors.rs) file that will only compile if the specific feature flag you added is set. Don't forget to add a `not` version of the feature to the default memory adaptor function at the bottom of the file.
|
||||
|
||||
## FAQ
|
||||
|
||||
Why is it spelt "adaptor" and not "adapter"?
|
||||
> The maintainer lives in Australia, where it's usually spelt "adaptor" 😎
|
||||
14
api/adaptors/datastore/Cargo.toml
Normal file
14
api/adaptors/datastore/Cargo.toml
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
[package]
|
||||
name = "datastore-adaptor"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
async-trait = "0.1.68"
|
||||
chrono = "0.4.24"
|
||||
common = { path = "../../common" }
|
||||
# Uses custom version of google-cloud that has support for NULL values
|
||||
google-cloud = { git = "https://github.com/GRA0007/google-cloud-rs.git", features = ["datastore", "derive"] }
|
||||
serde = "1.0.163"
|
||||
serde_json = "1.0.96"
|
||||
tokio = { version = "1.28.1", features = ["rt-multi-thread"] }
|
||||
13
api/adaptors/datastore/README.md
Normal file
13
api/adaptors/datastore/README.md
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
# Google Datastore Adaptor
|
||||
|
||||
This adaptor works with [Google Cloud Datastore](https://cloud.google.com/datastore). Please note that it's compatible with Firestore in Datastore mode, but not with Firestore.
|
||||
|
||||
## Environment
|
||||
|
||||
To use this adaptor, make sure you have the `GCP_CREDENTIALS` environment variable set to your service account credentials in JSON format. See [this page](https://developers.google.com/workspace/guides/create-credentials#service-account) for info on setting up a service account and generating credentials.
|
||||
|
||||
Example:
|
||||
|
||||
```env
|
||||
GCP_CREDENTIALS='{"type":"service_account","project_id":"my-project"}'
|
||||
```
|
||||
300
api/adaptors/datastore/src/lib.rs
Normal file
300
api/adaptors/datastore/src/lib.rs
Normal file
|
|
@ -0,0 +1,300 @@
|
|||
use std::{env, error::Error, fmt::Display};
|
||||
|
||||
use async_trait::async_trait;
|
||||
use chrono::{DateTime, NaiveDateTime, Utc};
|
||||
use common::{
|
||||
adaptor::Adaptor,
|
||||
event::{Event, EventDeletion},
|
||||
person::Person,
|
||||
stats::Stats,
|
||||
};
|
||||
use google_cloud::{
|
||||
authorize::ApplicationCredentials,
|
||||
datastore::{Client, Filter, FromValue, IntoValue, Key, Query},
|
||||
};
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
pub struct DatastoreAdaptor {
|
||||
client: Mutex<Client>,
|
||||
}
|
||||
|
||||
// Keys
|
||||
const STATS_KIND: &str = "Stats";
|
||||
const EVENT_KIND: &str = "Event";
|
||||
const PERSON_KIND: &str = "Person";
|
||||
const STATS_EVENTS_ID: &str = "eventCount";
|
||||
const STATS_PEOPLE_ID: &str = "personCount";
|
||||
|
||||
#[async_trait]
|
||||
impl Adaptor for DatastoreAdaptor {
|
||||
type Error = DatastoreAdaptorError;
|
||||
|
||||
async fn get_stats(&self) -> Result<Stats, Self::Error> {
|
||||
let mut client = self.client.lock().await;
|
||||
|
||||
let event_key = Key::new(STATS_KIND).id(STATS_EVENTS_ID);
|
||||
let event_stats: DatastoreStats = client.get(event_key).await?.unwrap_or_default();
|
||||
|
||||
let person_key = Key::new(STATS_KIND).id(STATS_PEOPLE_ID);
|
||||
let person_stats: DatastoreStats = client.get(person_key).await?.unwrap_or_default();
|
||||
|
||||
Ok(Stats {
|
||||
event_count: event_stats.value,
|
||||
person_count: person_stats.value,
|
||||
})
|
||||
}
|
||||
|
||||
async fn increment_stat_event_count(&self) -> Result<i64, Self::Error> {
|
||||
let mut client = self.client.lock().await;
|
||||
|
||||
let key = Key::new(STATS_KIND).id(STATS_EVENTS_ID);
|
||||
let mut event_stats: DatastoreStats = client.get(key.clone()).await?.unwrap_or_default();
|
||||
|
||||
event_stats.value += 1;
|
||||
client.put((key, event_stats.clone())).await?;
|
||||
Ok(event_stats.value)
|
||||
}
|
||||
|
||||
async fn increment_stat_person_count(&self) -> Result<i64, Self::Error> {
|
||||
let mut client = self.client.lock().await;
|
||||
|
||||
let key = Key::new(STATS_KIND).id(STATS_PEOPLE_ID);
|
||||
let mut person_stats: DatastoreStats = client.get(key.clone()).await?.unwrap_or_default();
|
||||
|
||||
person_stats.value += 1;
|
||||
client.put((key, person_stats.clone())).await?;
|
||||
Ok(person_stats.value)
|
||||
}
|
||||
|
||||
async fn get_people(&self, event_id: String) -> Result<Option<Vec<Person>>, Self::Error> {
|
||||
let mut client = self.client.lock().await;
|
||||
|
||||
// Check the event exists
|
||||
if client
|
||||
.get::<DatastoreEvent, _>(Key::new(EVENT_KIND).id(event_id.clone()))
|
||||
.await?
|
||||
.is_none()
|
||||
{
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
Ok(Some(
|
||||
client
|
||||
.query(
|
||||
Query::new(PERSON_KIND)
|
||||
.filter(Filter::Equal("eventId".into(), event_id.into_value())),
|
||||
)
|
||||
.await?
|
||||
.into_iter()
|
||||
.filter_map(|entity| {
|
||||
DatastorePerson::from_value(entity.properties().clone())
|
||||
.ok()
|
||||
.map(|ds_person| ds_person.into())
|
||||
})
|
||||
.collect(),
|
||||
))
|
||||
}
|
||||
|
||||
async fn upsert_person(&self, event_id: String, person: Person) -> Result<Person, Self::Error> {
|
||||
let mut client = self.client.lock().await;
|
||||
|
||||
// Check if person exists
|
||||
let existing_person = client
|
||||
.query(
|
||||
Query::new(PERSON_KIND)
|
||||
.filter(Filter::Equal(
|
||||
"eventId".into(),
|
||||
event_id.clone().into_value(),
|
||||
))
|
||||
.filter(Filter::Equal(
|
||||
"name".into(),
|
||||
person.name.clone().into_value(),
|
||||
)),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let mut key = Key::new(PERSON_KIND);
|
||||
if let Some(entity) = existing_person.first() {
|
||||
key = entity.key().clone();
|
||||
}
|
||||
|
||||
client
|
||||
.put((key, DatastorePerson::from_person(person.clone(), event_id)))
|
||||
.await?;
|
||||
|
||||
Ok(person)
|
||||
}
|
||||
|
||||
async fn get_event(&self, id: String) -> Result<Option<Event>, Self::Error> {
|
||||
let mut client = self.client.lock().await;
|
||||
|
||||
let key = Key::new(EVENT_KIND).id(id.clone());
|
||||
let existing_event = client.get::<DatastoreEvent, _>(key.clone()).await?;
|
||||
|
||||
// Mark as visited if it exists
|
||||
if let Some(mut event) = existing_event.clone() {
|
||||
event.visited = Utc::now().timestamp();
|
||||
client.put((key, event)).await?;
|
||||
}
|
||||
|
||||
Ok(existing_event.map(|e| e.to_event(id)))
|
||||
}
|
||||
|
||||
async fn create_event(&self, event: Event) -> Result<Event, Self::Error> {
|
||||
let mut client = self.client.lock().await;
|
||||
|
||||
let key = Key::new(EVENT_KIND).id(event.id.clone());
|
||||
|
||||
let ds_event: DatastoreEvent = event.clone().into();
|
||||
client.put((key, ds_event)).await?;
|
||||
|
||||
Ok(event)
|
||||
}
|
||||
|
||||
async fn delete_event(&self, id: String) -> Result<EventDeletion, Self::Error> {
|
||||
let mut client = self.client.lock().await;
|
||||
|
||||
let mut keys_to_delete: Vec<Key> = client
|
||||
.query(
|
||||
Query::new(PERSON_KIND)
|
||||
.filter(Filter::Equal("eventId".into(), id.clone().into_value())),
|
||||
)
|
||||
.await?
|
||||
.iter()
|
||||
.map(|entity| entity.key().clone())
|
||||
.collect();
|
||||
|
||||
let person_count = keys_to_delete.len().try_into().unwrap();
|
||||
keys_to_delete.insert(0, Key::new(EVENT_KIND).id(id.clone()));
|
||||
|
||||
client.delete_all(keys_to_delete).await?;
|
||||
|
||||
Ok(EventDeletion { id, person_count })
|
||||
}
|
||||
}
|
||||
|
||||
impl DatastoreAdaptor {
|
||||
pub async fn new() -> Self {
|
||||
// Load credentials
|
||||
let credentials: ApplicationCredentials = serde_json::from_str(
|
||||
&env::var("GCP_CREDENTIALS").expect("Expected GCP_CREDENTIALS environment variable"),
|
||||
)
|
||||
.expect("GCP_CREDENTIALS environment variable is not valid JSON");
|
||||
|
||||
// Connect to datastore
|
||||
let client = Client::from_credentials(credentials.project_id.clone(), credentials.clone())
|
||||
.await
|
||||
.expect("Failed to setup datastore client");
|
||||
let client = Mutex::new(client);
|
||||
|
||||
println!(
|
||||
"🎛️ Connected to datastore in project {}",
|
||||
credentials.project_id
|
||||
);
|
||||
|
||||
Self { client }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(FromValue, IntoValue, Default, Clone)]
|
||||
struct DatastoreStats {
|
||||
value: i64,
|
||||
}
|
||||
|
||||
#[derive(FromValue, IntoValue, Clone)]
|
||||
struct DatastoreEvent {
|
||||
name: String,
|
||||
created: i64,
|
||||
visited: i64,
|
||||
times: Vec<String>,
|
||||
timezone: String,
|
||||
}
|
||||
|
||||
#[derive(FromValue, IntoValue)]
|
||||
#[allow(non_snake_case)]
|
||||
struct DatastorePerson {
|
||||
name: String,
|
||||
password: Option<String>,
|
||||
created: i64,
|
||||
eventId: String,
|
||||
availability: Vec<String>,
|
||||
}
|
||||
|
||||
impl From<DatastorePerson> for Person {
|
||||
fn from(value: DatastorePerson) -> Self {
|
||||
Self {
|
||||
name: value.name,
|
||||
password_hash: value.password,
|
||||
created_at: unix_to_date(value.created),
|
||||
availability: value.availability,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl DatastorePerson {
|
||||
fn from_person(person: Person, event_id: String) -> Self {
|
||||
Self {
|
||||
name: person.name,
|
||||
password: person.password_hash,
|
||||
created: person.created_at.timestamp(),
|
||||
eventId: event_id,
|
||||
availability: person.availability,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Event> for DatastoreEvent {
|
||||
fn from(value: Event) -> Self {
|
||||
Self {
|
||||
name: value.name,
|
||||
created: value.created_at.timestamp(),
|
||||
visited: value.visited_at.timestamp(),
|
||||
times: value.times,
|
||||
timezone: value.timezone,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl DatastoreEvent {
|
||||
fn to_event(&self, event_id: String) -> Event {
|
||||
Event {
|
||||
id: event_id,
|
||||
name: self.name.clone(),
|
||||
created_at: unix_to_date(self.created),
|
||||
visited_at: unix_to_date(self.visited),
|
||||
times: self.times.clone(),
|
||||
timezone: self.timezone.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn unix_to_date(unix: i64) -> DateTime<Utc> {
|
||||
DateTime::from_utc(NaiveDateTime::from_timestamp_opt(unix, 0).unwrap(), Utc)
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum DatastoreAdaptorError {
|
||||
DatastoreError(google_cloud::error::Error),
|
||||
}
|
||||
|
||||
impl Display for DatastoreAdaptorError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
DatastoreAdaptorError::DatastoreError(e) => write!(f, "Datastore Error: {}", e),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Error for DatastoreAdaptorError {}
|
||||
|
||||
impl From<google_cloud::error::Error> for DatastoreAdaptorError {
|
||||
fn from(value: google_cloud::error::Error) -> Self {
|
||||
Self::DatastoreError(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<google_cloud::error::ConvertError> for DatastoreAdaptorError {
|
||||
fn from(value: google_cloud::error::ConvertError) -> Self {
|
||||
Self::DatastoreError(google_cloud::error::Error::Convert(value))
|
||||
}
|
||||
}
|
||||
10
api/adaptors/memory/Cargo.toml
Normal file
10
api/adaptors/memory/Cargo.toml
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
[package]
|
||||
name = "memory-adaptor"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
async-trait = "0.1.68"
|
||||
chrono = "0.4.24"
|
||||
common = { path = "../../common" }
|
||||
tokio = { version = "1.28.1", features = ["rt-multi-thread"] }
|
||||
6
api/adaptors/memory/README.md
Normal file
6
api/adaptors/memory/README.md
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
# Memory Adaptor
|
||||
|
||||
This adaptor stores everything in memory, and all data is lost when the API is stopped. Useful for testing.
|
||||
|
||||
> **Warning**
|
||||
> Do not use this adaptor in production!
|
||||
146
api/adaptors/memory/src/lib.rs
Normal file
146
api/adaptors/memory/src/lib.rs
Normal file
|
|
@ -0,0 +1,146 @@
|
|||
use std::{collections::HashMap, error::Error, fmt::Display};
|
||||
|
||||
use async_trait::async_trait;
|
||||
use chrono::Utc;
|
||||
use common::{
|
||||
adaptor::Adaptor,
|
||||
event::{Event, EventDeletion},
|
||||
person::Person,
|
||||
stats::Stats,
|
||||
};
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
struct State {
|
||||
stats: Stats,
|
||||
events: HashMap<String, Event>,
|
||||
people: HashMap<(String, String), Person>,
|
||||
}
|
||||
|
||||
pub struct MemoryAdaptor {
|
||||
state: Mutex<State>,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Adaptor for MemoryAdaptor {
|
||||
type Error = MemoryAdaptorError;
|
||||
|
||||
async fn get_stats(&self) -> Result<Stats, Self::Error> {
|
||||
let state = self.state.lock().await;
|
||||
|
||||
Ok(state.stats.clone())
|
||||
}
|
||||
|
||||
async fn increment_stat_event_count(&self) -> Result<i64, Self::Error> {
|
||||
let mut state = self.state.lock().await;
|
||||
|
||||
state.stats.event_count += 1;
|
||||
Ok(state.stats.event_count)
|
||||
}
|
||||
|
||||
async fn increment_stat_person_count(&self) -> Result<i64, Self::Error> {
|
||||
let mut state = self.state.lock().await;
|
||||
|
||||
state.stats.person_count += 1;
|
||||
Ok(state.stats.person_count)
|
||||
}
|
||||
|
||||
async fn get_people(&self, event_id: String) -> Result<Option<Vec<Person>>, Self::Error> {
|
||||
let state = self.state.lock().await;
|
||||
|
||||
// Event doesn't exist
|
||||
if state.events.get(&event_id).is_none() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
Ok(Some(
|
||||
state
|
||||
.people
|
||||
.clone()
|
||||
.into_iter()
|
||||
.filter_map(|((p_event_id, _), p)| {
|
||||
if p_event_id == event_id {
|
||||
Some(p)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
))
|
||||
}
|
||||
|
||||
async fn upsert_person(&self, event_id: String, person: Person) -> Result<Person, Self::Error> {
|
||||
let mut state = self.state.lock().await;
|
||||
|
||||
state
|
||||
.people
|
||||
.insert((event_id, person.name.clone()), person.clone());
|
||||
|
||||
Ok(person)
|
||||
}
|
||||
|
||||
async fn get_event(&self, id: String) -> Result<Option<Event>, Self::Error> {
|
||||
let mut state = self.state.lock().await;
|
||||
|
||||
let event = state.events.get(&id).cloned();
|
||||
if let Some(mut event) = event.clone() {
|
||||
event.visited_at = Utc::now();
|
||||
state.events.insert(id, event);
|
||||
}
|
||||
|
||||
Ok(event)
|
||||
}
|
||||
|
||||
async fn create_event(&self, event: Event) -> Result<Event, Self::Error> {
|
||||
let mut state = self.state.lock().await;
|
||||
|
||||
state.events.insert(event.id.clone(), event.clone());
|
||||
|
||||
Ok(event)
|
||||
}
|
||||
|
||||
async fn delete_event(&self, id: String) -> Result<EventDeletion, Self::Error> {
|
||||
let mut state = self.state.lock().await;
|
||||
|
||||
let mut person_count: u64 = state.people.len() as u64;
|
||||
state.people = state
|
||||
.people
|
||||
.clone()
|
||||
.into_iter()
|
||||
.filter(|((event_id, _), _)| event_id != &id)
|
||||
.collect();
|
||||
person_count -= state.people.len() as u64;
|
||||
|
||||
state.events.remove(&id);
|
||||
|
||||
Ok(EventDeletion { id, person_count })
|
||||
}
|
||||
}
|
||||
|
||||
impl MemoryAdaptor {
|
||||
pub async fn new() -> Self {
|
||||
println!("🧠 Using in-memory storage");
|
||||
println!("🚨 WARNING: All data will be lost when the process ends. Make sure you choose a database adaptor before deploying.");
|
||||
|
||||
let state = Mutex::new(State {
|
||||
stats: Stats {
|
||||
event_count: 0,
|
||||
person_count: 0,
|
||||
},
|
||||
events: HashMap::new(),
|
||||
people: HashMap::new(),
|
||||
});
|
||||
|
||||
Self { state }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum MemoryAdaptorError {}
|
||||
|
||||
impl Display for MemoryAdaptorError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "Memory adaptor error")
|
||||
}
|
||||
}
|
||||
|
||||
impl Error for MemoryAdaptorError {}
|
||||
14
api/adaptors/sql/Cargo.toml
Normal file
14
api/adaptors/sql/Cargo.toml
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
[package]
|
||||
name = "sql-adaptor"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
async-trait = "0.1.68"
|
||||
common = { path = "../../common" }
|
||||
sea-orm = { version = "0.11.3", features = [ "macros", "sqlx-mysql", "sqlx-postgres", "sqlx-sqlite", "runtime-tokio-native-tls" ] }
|
||||
serde = { version = "1.0.162", features = [ "derive" ] }
|
||||
async-std = { version = "1", features = ["attributes", "tokio1"] }
|
||||
sea-orm-migration = "0.11.0"
|
||||
serde_json = "1.0.96"
|
||||
chrono = "0.4.24"
|
||||
13
api/adaptors/sql/README.md
Normal file
13
api/adaptors/sql/README.md
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
# SQL Adaptor
|
||||
|
||||
This adaptor works with [Postgres](https://www.postgresql.org/), [MySQL](https://www.mysql.com/) or [SQLite](https://sqlite.org/index.html) databases.
|
||||
|
||||
## Environment
|
||||
|
||||
To use this adaptor, make sure you have the `DATABASE_URL` environment variable set to the database url for your chosen database.
|
||||
|
||||
Example:
|
||||
|
||||
```env
|
||||
DATABASE_URL="postgresql://username:password@localhost:5432/crabfit"
|
||||
```
|
||||
29
api/adaptors/sql/src/entity/event.rs
Normal file
29
api/adaptors/sql/src/entity/event.rs
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.3
|
||||
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)]
|
||||
#[sea_orm(table_name = "event")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key, auto_increment = false)]
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub created_at: DateTime,
|
||||
pub visited_at: DateTime,
|
||||
pub times: Json,
|
||||
pub timezone: String,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(has_many = "super::person::Entity")]
|
||||
Person,
|
||||
}
|
||||
|
||||
impl Related<super::person::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Person.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
7
api/adaptors/sql/src/entity/mod.rs
Normal file
7
api/adaptors/sql/src/entity/mod.rs
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.3
|
||||
|
||||
pub mod prelude;
|
||||
|
||||
pub mod event;
|
||||
pub mod person;
|
||||
pub mod stats;
|
||||
35
api/adaptors/sql/src/entity/person.rs
Normal file
35
api/adaptors/sql/src/entity/person.rs
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.3
|
||||
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)]
|
||||
#[sea_orm(table_name = "person")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key, auto_increment = false)]
|
||||
pub name: String,
|
||||
pub password_hash: Option<String>,
|
||||
pub created_at: DateTime,
|
||||
pub availability: Json,
|
||||
#[sea_orm(primary_key, auto_increment = false)]
|
||||
pub event_id: String,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::event::Entity",
|
||||
from = "Column::EventId",
|
||||
to = "super::event::Column::Id",
|
||||
on_update = "Cascade",
|
||||
on_delete = "Cascade"
|
||||
)]
|
||||
Event,
|
||||
}
|
||||
|
||||
impl Related<super::event::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Event.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
5
api/adaptors/sql/src/entity/prelude.rs
Normal file
5
api/adaptors/sql/src/entity/prelude.rs
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.3
|
||||
|
||||
pub use super::event::Entity as Event;
|
||||
pub use super::person::Entity as Person;
|
||||
pub use super::stats::Entity as Stats;
|
||||
17
api/adaptors/sql/src/entity/stats.rs
Normal file
17
api/adaptors/sql/src/entity/stats.rs
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.3
|
||||
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)]
|
||||
#[sea_orm(table_name = "stats")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
pub event_count: i32,
|
||||
pub person_count: i32,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
229
api/adaptors/sql/src/lib.rs
Normal file
229
api/adaptors/sql/src/lib.rs
Normal file
|
|
@ -0,0 +1,229 @@
|
|||
use std::{env, error::Error};
|
||||
|
||||
use async_trait::async_trait;
|
||||
use chrono::{DateTime as ChronoDateTime, Utc};
|
||||
use common::{
|
||||
adaptor::Adaptor,
|
||||
event::{Event, EventDeletion},
|
||||
person::Person,
|
||||
stats::Stats,
|
||||
};
|
||||
use entity::{event, person, stats};
|
||||
use migration::{Migrator, MigratorTrait};
|
||||
use sea_orm::{
|
||||
strum::Display,
|
||||
ActiveModelTrait,
|
||||
ActiveValue::{NotSet, Set},
|
||||
ColumnTrait, Database, DatabaseConnection, DbErr, EntityTrait, ModelTrait, QueryFilter,
|
||||
TransactionError, TransactionTrait, TryIntoModel,
|
||||
};
|
||||
use serde_json::json;
|
||||
|
||||
mod entity;
|
||||
mod migration;
|
||||
|
||||
pub struct SqlAdaptor {
|
||||
db: DatabaseConnection,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Adaptor for SqlAdaptor {
|
||||
type Error = SqlAdaptorError;
|
||||
|
||||
async fn get_stats(&self) -> Result<Stats, Self::Error> {
|
||||
let stats_row = get_stats_row(&self.db).await?;
|
||||
Ok(Stats {
|
||||
event_count: stats_row.event_count.unwrap() as i64,
|
||||
person_count: stats_row.person_count.unwrap() as i64,
|
||||
})
|
||||
}
|
||||
|
||||
async fn increment_stat_event_count(&self) -> Result<i64, Self::Error> {
|
||||
let mut current_stats = get_stats_row(&self.db).await?;
|
||||
current_stats.event_count = Set(current_stats.event_count.unwrap() + 1);
|
||||
|
||||
Ok(current_stats.save(&self.db).await?.event_count.unwrap() as i64)
|
||||
}
|
||||
|
||||
async fn increment_stat_person_count(&self) -> Result<i64, Self::Error> {
|
||||
let mut current_stats = get_stats_row(&self.db).await?;
|
||||
current_stats.person_count = Set(current_stats.person_count.unwrap() + 1);
|
||||
|
||||
Ok(current_stats.save(&self.db).await?.person_count.unwrap() as i64)
|
||||
}
|
||||
|
||||
async fn get_people(&self, event_id: String) -> Result<Option<Vec<Person>>, Self::Error> {
|
||||
// TODO: optimize into one query
|
||||
let event_row = event::Entity::find_by_id(event_id).one(&self.db).await?;
|
||||
|
||||
Ok(match event_row {
|
||||
Some(event) => Some(
|
||||
event
|
||||
.find_related(person::Entity)
|
||||
.all(&self.db)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|model| model.into())
|
||||
.collect(),
|
||||
),
|
||||
None => None,
|
||||
})
|
||||
}
|
||||
|
||||
async fn upsert_person(&self, event_id: String, person: Person) -> Result<Person, Self::Error> {
|
||||
let data = person::ActiveModel {
|
||||
name: Set(person.name.clone()),
|
||||
password_hash: Set(person.password_hash),
|
||||
created_at: Set(person.created_at.naive_utc()),
|
||||
availability: Set(serde_json::to_value(person.availability).unwrap_or(json!([]))),
|
||||
event_id: Set(event_id.clone()),
|
||||
};
|
||||
|
||||
Ok(
|
||||
match person::Entity::find_by_id((person.name, event_id))
|
||||
.one(&self.db)
|
||||
.await?
|
||||
{
|
||||
Some(_) => data.update(&self.db).await?.try_into_model()?.into(),
|
||||
None => data.insert(&self.db).await?.try_into_model()?.into(),
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
async fn get_event(&self, id: String) -> Result<Option<Event>, Self::Error> {
|
||||
let existing_event = event::Entity::find_by_id(id).one(&self.db).await?;
|
||||
|
||||
// Mark as visited
|
||||
if let Some(event) = existing_event.clone() {
|
||||
let mut event: event::ActiveModel = event.into();
|
||||
event.visited_at = Set(Utc::now().naive_utc());
|
||||
event.save(&self.db).await?;
|
||||
}
|
||||
|
||||
Ok(existing_event.map(|model| model.into()))
|
||||
}
|
||||
|
||||
async fn create_event(&self, event: Event) -> Result<Event, Self::Error> {
|
||||
Ok(event::ActiveModel {
|
||||
id: Set(event.id),
|
||||
name: Set(event.name),
|
||||
created_at: Set(event.created_at.naive_utc()),
|
||||
visited_at: Set(event.visited_at.naive_utc()),
|
||||
times: Set(serde_json::to_value(event.times).unwrap_or(json!([]))),
|
||||
timezone: Set(event.timezone),
|
||||
}
|
||||
.insert(&self.db)
|
||||
.await?
|
||||
.try_into_model()?
|
||||
.into())
|
||||
}
|
||||
|
||||
async fn delete_event(&self, id: String) -> Result<EventDeletion, Self::Error> {
|
||||
let event_id = id.clone();
|
||||
let person_count = self
|
||||
.db
|
||||
.transaction::<_, u64, DbErr>(|t| {
|
||||
Box::pin(async move {
|
||||
// Delete people
|
||||
let people_delete_result = person::Entity::delete_many()
|
||||
.filter(person::Column::EventId.eq(&event_id))
|
||||
.exec(t)
|
||||
.await?;
|
||||
|
||||
// Delete event
|
||||
event::Entity::delete_by_id(event_id).exec(t).await?;
|
||||
|
||||
Ok(people_delete_result.rows_affected)
|
||||
})
|
||||
})
|
||||
.await?;
|
||||
|
||||
Ok(EventDeletion { id, person_count })
|
||||
}
|
||||
}
|
||||
|
||||
// Get the current stats as an ActiveModel
|
||||
async fn get_stats_row(db: &DatabaseConnection) -> Result<stats::ActiveModel, DbErr> {
|
||||
let current_stats = stats::Entity::find().one(db).await?;
|
||||
|
||||
Ok(match current_stats {
|
||||
Some(model) => model.into(),
|
||||
None => stats::ActiveModel {
|
||||
id: NotSet,
|
||||
event_count: Set(0),
|
||||
person_count: Set(0),
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
impl SqlAdaptor {
|
||||
pub async fn new() -> Self {
|
||||
let connection_string =
|
||||
env::var("DATABASE_URL").expect("Expected DATABASE_URL environment variable");
|
||||
|
||||
// Connect to the database
|
||||
let db = Database::connect(&connection_string)
|
||||
.await
|
||||
.expect("Failed to connect to SQL database");
|
||||
println!(
|
||||
"{} Connected to database at {}",
|
||||
match db {
|
||||
DatabaseConnection::SqlxMySqlPoolConnection(_) => "🐬",
|
||||
DatabaseConnection::SqlxPostgresPoolConnection(_) => "🐘",
|
||||
DatabaseConnection::SqlxSqlitePoolConnection(_) => "🪶",
|
||||
DatabaseConnection::Disconnected => panic!("Failed to connect to SQL database"),
|
||||
},
|
||||
connection_string
|
||||
);
|
||||
|
||||
// Setup tables
|
||||
Migrator::up(&db, None)
|
||||
.await
|
||||
.expect("Failed to set up tables in the database");
|
||||
|
||||
Self { db }
|
||||
}
|
||||
}
|
||||
|
||||
impl From<event::Model> for Event {
|
||||
fn from(value: event::Model) -> Self {
|
||||
Self {
|
||||
id: value.id,
|
||||
name: value.name,
|
||||
created_at: ChronoDateTime::<Utc>::from_utc(value.created_at, Utc),
|
||||
visited_at: ChronoDateTime::<Utc>::from_utc(value.visited_at, Utc),
|
||||
times: serde_json::from_value(value.times).unwrap_or(vec![]),
|
||||
timezone: value.timezone,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<person::Model> for Person {
|
||||
fn from(value: person::Model) -> Self {
|
||||
Self {
|
||||
name: value.name,
|
||||
password_hash: value.password_hash,
|
||||
created_at: ChronoDateTime::<Utc>::from_utc(value.created_at, Utc),
|
||||
availability: serde_json::from_value(value.availability).unwrap_or(vec![]),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Display, Debug)]
|
||||
pub enum SqlAdaptorError {
|
||||
DbErr(DbErr),
|
||||
TransactionError(TransactionError<DbErr>),
|
||||
}
|
||||
|
||||
impl Error for SqlAdaptorError {}
|
||||
|
||||
impl From<DbErr> for SqlAdaptorError {
|
||||
fn from(value: DbErr) -> Self {
|
||||
Self::DbErr(value)
|
||||
}
|
||||
}
|
||||
impl From<TransactionError<DbErr>> for SqlAdaptorError {
|
||||
fn from(value: TransactionError<DbErr>) -> Self {
|
||||
Self::TransactionError(value)
|
||||
}
|
||||
}
|
||||
122
api/adaptors/sql/src/migration/m01_setup_tables.rs
Normal file
122
api/adaptors/sql/src/migration/m01_setup_tables.rs
Normal file
|
|
@ -0,0 +1,122 @@
|
|||
use sea_orm_migration::prelude::*;
|
||||
|
||||
#[derive(DeriveMigrationName)]
|
||||
pub struct Migration;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
print!("Setting up database...");
|
||||
|
||||
// Stats table
|
||||
manager
|
||||
.create_table(
|
||||
Table::create()
|
||||
.table(Stats::Table)
|
||||
.if_not_exists()
|
||||
.col(
|
||||
ColumnDef::new(Stats::Id)
|
||||
.integer()
|
||||
.not_null()
|
||||
.auto_increment()
|
||||
.primary_key(),
|
||||
)
|
||||
.col(ColumnDef::new(Stats::EventCount).integer().not_null())
|
||||
.col(ColumnDef::new(Stats::PersonCount).integer().not_null())
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
// Events table
|
||||
manager
|
||||
.create_table(
|
||||
Table::create()
|
||||
.table(Event::Table)
|
||||
.if_not_exists()
|
||||
.col(ColumnDef::new(Event::Id).string().not_null().primary_key())
|
||||
.col(ColumnDef::new(Event::Name).string().not_null())
|
||||
.col(ColumnDef::new(Event::CreatedAt).timestamp().not_null())
|
||||
.col(ColumnDef::new(Event::VisitedAt).timestamp().not_null())
|
||||
.col(ColumnDef::new(Event::Times).json().not_null())
|
||||
.col(ColumnDef::new(Event::Timezone).string().not_null())
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
// People table
|
||||
manager
|
||||
.create_table(
|
||||
Table::create()
|
||||
.table(Person::Table)
|
||||
.if_not_exists()
|
||||
.col(ColumnDef::new(Person::Name).string().not_null())
|
||||
.col(ColumnDef::new(Person::PasswordHash).string())
|
||||
.col(ColumnDef::new(Person::CreatedAt).timestamp().not_null())
|
||||
.col(ColumnDef::new(Person::Availability).json().not_null())
|
||||
.col(ColumnDef::new(Person::EventId).string().not_null())
|
||||
.primary_key(Index::create().col(Person::EventId).col(Person::Name))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
// Relation
|
||||
manager
|
||||
.create_foreign_key(
|
||||
ForeignKey::create()
|
||||
.name("FK_person_event")
|
||||
.from(Person::Table, Person::EventId)
|
||||
.to(Event::Table, Event::Id)
|
||||
.on_delete(ForeignKeyAction::Cascade)
|
||||
.on_update(ForeignKeyAction::Cascade)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
println!(" done");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.drop_table(Table::drop().table(Stats::Table).to_owned())
|
||||
.await?;
|
||||
manager
|
||||
.drop_table(Table::drop().table(Person::Table).to_owned())
|
||||
.await?;
|
||||
manager
|
||||
.drop_table(Table::drop().table(Event::Table).to_owned())
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Learn more at https://docs.rs/sea-query#iden
|
||||
#[derive(Iden)]
|
||||
enum Stats {
|
||||
Table,
|
||||
Id,
|
||||
EventCount,
|
||||
PersonCount,
|
||||
}
|
||||
|
||||
#[derive(Iden)]
|
||||
enum Event {
|
||||
Table,
|
||||
Id,
|
||||
Name,
|
||||
CreatedAt,
|
||||
VisitedAt,
|
||||
Times,
|
||||
Timezone,
|
||||
}
|
||||
|
||||
#[derive(Iden)]
|
||||
enum Person {
|
||||
Table,
|
||||
Name,
|
||||
PasswordHash,
|
||||
CreatedAt,
|
||||
Availability,
|
||||
EventId,
|
||||
}
|
||||
12
api/adaptors/sql/src/migration/mod.rs
Normal file
12
api/adaptors/sql/src/migration/mod.rs
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
pub use sea_orm_migration::prelude::*;
|
||||
|
||||
mod m01_setup_tables;
|
||||
|
||||
pub struct Migrator;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigratorTrait for Migrator {
|
||||
fn migrations() -> Vec<Box<dyn MigrationTrait>> {
|
||||
vec![Box::new(m01_setup_tables::Migration)]
|
||||
}
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue