feat: add integrations

Signed-off-by: kjuulh <contact@kjuulh.io>
This commit is contained in:
2026-03-08 23:00:14 +01:00
parent 5a5f9a3003
commit 646581ff44
65 changed files with 7774 additions and 127 deletions

View File

@@ -1,16 +1,26 @@
use std::time::Duration;
use chrono::{DateTime, Utc};
use forage_core::auth::UserEmail;
use forage_core::session::{CachedUser, SessionData, SessionError, SessionId, SessionStore};
use forage_core::session::{CachedOrg, CachedUser, SessionData, SessionError, SessionId, SessionStore};
use moka::future::Cache;
use sqlx::PgPool;
/// PostgreSQL-backed session store for horizontal scaling.
/// PostgreSQL-backed session store with a Moka write-through cache.
/// Reads check the cache first, falling back to Postgres on miss.
/// Writes update both cache and Postgres atomically.
pub struct PgSessionStore {
pool: PgPool,
cache: Cache<String, SessionData>,
}
impl PgSessionStore {
pub fn new(pool: PgPool) -> Self {
Self { pool }
let cache = Cache::builder()
.max_capacity(10_000)
.time_to_idle(Duration::from_secs(30 * 60)) // evict after 30min idle
.build();
Self { pool, cache }
}
/// Remove sessions inactive for longer than `max_inactive_days`.
@@ -21,6 +31,10 @@ impl PgSessionStore {
.execute(&self.pool)
.await
.map_err(|e| SessionError::Store(e.to_string()))?;
// Moka handles its own TTL eviction, but force a sync for reaped sessions
self.cache.run_pending_tasks().await;
Ok(result.rows_affected())
}
}
@@ -29,21 +43,11 @@ impl PgSessionStore {
impl SessionStore for PgSessionStore {
async fn create(&self, data: SessionData) -> Result<SessionId, SessionError> {
let id = SessionId::generate();
let (user_id, username, emails_json) = match &data.user {
Some(u) => (
Some(u.user_id.clone()),
Some(u.username.clone()),
Some(
serde_json::to_value(&u.emails)
.map_err(|e| SessionError::Store(e.to_string()))?,
),
),
None => (None, None, None),
};
let (user_id, username, emails_json, orgs_json) = extract_user_fields(&data)?;
sqlx::query(
"INSERT INTO sessions (session_id, access_token, refresh_token, access_expires_at, user_id, username, user_emails, csrf_token, created_at, last_seen_at)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)",
"INSERT INTO sessions (session_id, access_token, refresh_token, access_expires_at, user_id, username, user_emails, user_orgs, csrf_token, created_at, last_seen_at)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)",
)
.bind(id.as_str())
.bind(&data.access_token)
@@ -52,6 +56,7 @@ impl SessionStore for PgSessionStore {
.bind(&user_id)
.bind(&username)
.bind(&emails_json)
.bind(&orgs_json)
.bind(&data.csrf_token)
.bind(data.created_at)
.bind(data.last_seen_at)
@@ -59,12 +64,21 @@ impl SessionStore for PgSessionStore {
.await
.map_err(|e| SessionError::Store(e.to_string()))?;
// Populate cache
self.cache.insert(id.as_str().to_string(), data).await;
Ok(id)
}
async fn get(&self, id: &SessionId) -> Result<Option<SessionData>, SessionError> {
// Check cache first
if let Some(data) = self.cache.get(id.as_str()).await {
return Ok(Some(data));
}
// Cache miss — fall back to Postgres
let row: Option<SessionRow> = sqlx::query_as(
"SELECT access_token, refresh_token, access_expires_at, user_id, username, user_emails, csrf_token, created_at, last_seen_at
"SELECT access_token, refresh_token, access_expires_at, user_id, username, user_emails, user_orgs, csrf_token, created_at, last_seen_at
FROM sessions WHERE session_id = $1",
)
.bind(id.as_str())
@@ -72,25 +86,22 @@ impl SessionStore for PgSessionStore {
.await
.map_err(|e| SessionError::Store(e.to_string()))?;
Ok(row.map(|r| r.into_session_data()))
if let Some(row) = row {
let data = row.into_session_data();
// Backfill cache
self.cache.insert(id.as_str().to_string(), data.clone()).await;
Ok(Some(data))
} else {
Ok(None)
}
}
async fn update(&self, id: &SessionId, data: SessionData) -> Result<(), SessionError> {
let (user_id, username, emails_json) = match &data.user {
Some(u) => (
Some(u.user_id.clone()),
Some(u.username.clone()),
Some(
serde_json::to_value(&u.emails)
.map_err(|e| SessionError::Store(e.to_string()))?,
),
),
None => (None, None, None),
};
let (user_id, username, emails_json, orgs_json) = extract_user_fields(&data)?;
sqlx::query(
"UPDATE sessions SET access_token = $1, refresh_token = $2, access_expires_at = $3, user_id = $4, username = $5, user_emails = $6, csrf_token = $7, last_seen_at = $8
WHERE session_id = $9",
"UPDATE sessions SET access_token = $1, refresh_token = $2, access_expires_at = $3, user_id = $4, username = $5, user_emails = $6, user_orgs = $7, csrf_token = $8, last_seen_at = $9
WHERE session_id = $10",
)
.bind(&data.access_token)
.bind(&data.refresh_token)
@@ -98,6 +109,7 @@ impl SessionStore for PgSessionStore {
.bind(&user_id)
.bind(&username)
.bind(&emails_json)
.bind(&orgs_json)
.bind(&data.csrf_token)
.bind(data.last_seen_at)
.bind(id.as_str())
@@ -105,6 +117,9 @@ impl SessionStore for PgSessionStore {
.await
.map_err(|e| SessionError::Store(e.to_string()))?;
// Update cache
self.cache.insert(id.as_str().to_string(), data).await;
Ok(())
}
@@ -115,10 +130,42 @@ impl SessionStore for PgSessionStore {
.await
.map_err(|e| SessionError::Store(e.to_string()))?;
// Evict from cache
self.cache.invalidate(id.as_str()).await;
Ok(())
}
}
/// Extract user fields for SQL binding, shared by create and update.
fn extract_user_fields(
data: &SessionData,
) -> Result<
(
Option<String>,
Option<String>,
Option<serde_json::Value>,
Option<serde_json::Value>,
),
SessionError,
> {
match &data.user {
Some(u) => Ok((
Some(u.user_id.clone()),
Some(u.username.clone()),
Some(
serde_json::to_value(&u.emails)
.map_err(|e| SessionError::Store(e.to_string()))?,
),
Some(
serde_json::to_value(&u.orgs)
.map_err(|e| SessionError::Store(e.to_string()))?,
),
)),
None => Ok((None, None, None, None)),
}
}
#[derive(sqlx::FromRow)]
struct SessionRow {
access_token: String,
@@ -127,6 +174,7 @@ struct SessionRow {
user_id: Option<String>,
username: Option<String>,
user_emails: Option<serde_json::Value>,
user_orgs: Option<serde_json::Value>,
csrf_token: String,
created_at: DateTime<Utc>,
last_seen_at: DateTime<Utc>,
@@ -140,11 +188,15 @@ impl SessionRow {
.user_emails
.and_then(|v| serde_json::from_value(v).ok())
.unwrap_or_default();
let orgs: Vec<CachedOrg> = self
.user_orgs
.and_then(|v| serde_json::from_value(v).ok())
.unwrap_or_default();
Some(CachedUser {
user_id,
username,
emails,
orgs: vec![],
orgs,
})
}
_ => None,