RustyCMS: File-based headless CMS with REST API, admin UI, multilingual support

Co-authored-by: Cursor <cursoragent@cursor.com>
This commit is contained in:
Peter Meier
2026-02-16 09:30:30 +01:00
commit aad93d145f
224 changed files with 19225 additions and 0 deletions

38
src/api/auth.rs Normal file
View File

@@ -0,0 +1,38 @@
//! Optional API key auth via env RUSTYCMS_API_KEY. Protects POST/PUT/DELETE.
use axum::http::HeaderMap;
use super::error::ApiError;
/// Read token from `Authorization: Bearer <token>` or `X-API-Key: <token>`.
pub fn token_from_headers(headers: &HeaderMap) -> Option<String> {
if let Some(v) = headers.get("Authorization") {
if let Ok(s) = v.to_str() {
let s = s.trim();
if s.starts_with("Bearer ") {
return Some(s["Bearer ".len()..].trim().to_string());
}
}
}
if let Some(v) = headers.get("X-API-Key") {
if let Ok(s) = v.to_str() {
return Some(s.trim().to_string());
}
}
None
}
/// If `required_key` is Some, the request must send a matching token (Bearer or X-API-Key).
pub fn require_api_key(required_key: Option<&String>, headers: &HeaderMap) -> Result<(), ApiError> {
let Some(required) = required_key else {
return Ok(());
};
let provided = token_from_headers(headers);
if provided.as_deref() != Some(required.as_str()) {
return Err(ApiError::Unauthorized(
"Missing or invalid API key. Use Authorization: Bearer <key> or X-API-Key: <key>."
.to_string(),
));
}
Ok(())
}

133
src/api/cache.rs Normal file
View File

@@ -0,0 +1,133 @@
//! In-memory response cache for GET /api/content (TTL, invalidation on write access).
use std::collections::HashMap;
use std::time::{Duration, Instant};
use serde_json::Value;
use tokio::sync::RwLock;
/// A cache entry with expiration time.
struct CachedItem {
value: Value,
inserted_at: Instant,
}
/// Response cache for Content API. Keys: `e:{collection}:{slug}:{resolve}` (Entry)
/// and `l:{collection}:{query_hash}` (List). TTL configurable; on POST/PUT/DELETE
/// the affected collection is invalidated.
pub struct ContentCache {
data: RwLock<HashMap<String, CachedItem>>,
ttl: Duration,
}
impl ContentCache {
/// New cache with TTL in seconds (0 = caching disabled).
pub fn new(ttl_secs: u64) -> Self {
Self {
data: RwLock::new(HashMap::new()),
ttl: Duration::from_secs(ttl_secs),
}
}
/// Returns the cached value if present and not expired.
pub async fn get(&self, key: &str) -> Option<Value> {
if self.ttl.is_zero() {
return None;
}
let mut guard = self.data.write().await;
let item = guard.get(key)?;
if item.inserted_at.elapsed() > self.ttl {
guard.remove(key);
return None;
}
Some(item.value.clone())
}
/// Stores a value under the given key.
pub async fn set(&self, key: String, value: Value) {
if self.ttl.is_zero() {
return;
}
let mut guard = self.data.write().await;
guard.insert(
key,
CachedItem {
value,
inserted_at: Instant::now(),
},
);
}
/// Removes all entries for the given collection (after create/update/delete).
/// Invalidates all locales for this collection (e:collection:*, l:collection:*).
pub async fn invalidate_collection(&self, collection: &str) {
let prefix_e = format!("e:{}:", collection);
let prefix_l = format!("l:{}:", collection);
let mut guard = self.data.write().await;
guard.retain(|k, _| !k.starts_with(&prefix_e) && !k.starts_with(&prefix_l));
}
}
/// Cache key for a single entry (incl. _resolve and optional _locale).
pub fn entry_cache_key(collection: &str, slug: &str, resolve_key: &str, locale: Option<&str>) -> String {
let loc = locale.unwrap_or("");
format!("e:{}:{}:{}:{}", collection, slug, resolve_key, loc)
}
/// Cache key for a list (collection + hash of query params + optional locale).
pub fn list_cache_key(collection: &str, query_hash: u64, locale: Option<&str>) -> String {
let loc = locale.unwrap_or("");
format!("l:{}:{}:{}", collection, loc, query_hash)
}
// ---------------------------------------------------------------------------
// Transform Cache (image response bytes + content-type)
// ---------------------------------------------------------------------------
struct CachedImage {
bytes: Vec<u8>,
content_type: String,
inserted_at: Instant,
}
/// In-memory cache for GET /api/transform (same URL + params = cache hit).
pub struct TransformCache {
data: RwLock<HashMap<String, CachedImage>>,
ttl: Duration,
}
impl TransformCache {
pub fn new(ttl_secs: u64) -> Self {
Self {
data: RwLock::new(HashMap::new()),
ttl: Duration::from_secs(ttl_secs),
}
}
pub async fn get(&self, key: &str) -> Option<(Vec<u8>, String)> {
if self.ttl.is_zero() {
return None;
}
let mut guard = self.data.write().await;
let item = guard.get(key)?;
if item.inserted_at.elapsed() > self.ttl {
guard.remove(key);
return None;
}
Some((item.bytes.clone(), item.content_type.clone()))
}
pub async fn set(&self, key: String, bytes: Vec<u8>, content_type: String) {
if self.ttl.is_zero() {
return;
}
let mut guard = self.data.write().await;
guard.insert(
key,
CachedImage {
bytes,
content_type,
inserted_at: Instant::now(),
},
);
}
}

60
src/api/error.rs Normal file
View File

@@ -0,0 +1,60 @@
use axum::http::StatusCode;
use axum::response::{IntoResponse, Response};
use serde_json::json;
/// API error type that maps to appropriate HTTP status codes.
#[derive(Debug)]
pub enum ApiError {
NotFound(String),
BadRequest(String),
Unauthorized(String),
Conflict(String),
Internal(String),
ValidationFailed(Vec<String>),
}
impl IntoResponse for ApiError {
fn into_response(self) -> Response {
let (status, body) = match self {
ApiError::NotFound(msg) => (
StatusCode::NOT_FOUND,
json!({ "error": msg }),
),
ApiError::BadRequest(msg) => (
StatusCode::BAD_REQUEST,
json!({ "error": msg }),
),
ApiError::Unauthorized(msg) => (
StatusCode::UNAUTHORIZED,
json!({ "error": msg }),
),
ApiError::Conflict(msg) => (
StatusCode::CONFLICT,
json!({ "error": msg }),
),
ApiError::Internal(msg) => (
StatusCode::INTERNAL_SERVER_ERROR,
json!({ "error": msg }),
),
ApiError::ValidationFailed(errors) => (
StatusCode::BAD_REQUEST,
json!({ "errors": errors }),
),
};
(status, axum::Json(body)).into_response()
}
}
impl From<anyhow::Error> for ApiError {
fn from(err: anyhow::Error) -> Self {
let msg = err.to_string();
if msg.contains("not found") || msg.contains("does not exist") {
ApiError::NotFound(msg)
} else if msg.contains("already exists") {
ApiError::Conflict(msg)
} else {
ApiError::Internal(msg)
}
}
}

666
src/api/handlers.rs Normal file
View File

@@ -0,0 +1,666 @@
use std::collections::HashMap;
use std::hash::{DefaultHasher, Hash, Hasher};
use std::path::PathBuf;
use std::sync::Arc;
use axum::extract::{Path, Query, State};
use axum::http::header::HeaderValue;
use axum::http::{HeaderMap, StatusCode};
use axum::response::{IntoResponse, Response};
use axum::Json;
use serde_json::{json, Value};
use tokio::sync::RwLock;
use crate::schema::types::{SchemaDefinition, VALID_FIELD_TYPES};
use crate::schema::validator;
use crate::schema::SchemaRegistry;
use crate::store::query::QueryParams;
use crate::store::ContentStore;
use crate::store::slug;
use super::auth;
use super::cache::{self, ContentCache, TransformCache};
use super::error::ApiError;
use super::response::{format_references, parse_resolve};
/// Shared application state. Registry and OpenAPI spec are behind RwLock for hot-reload.
/// Store is selected via RUSTYCMS_STORE=file|sqlite.
/// If api_key is set (RUSTYCMS_API_KEY), POST/PUT/DELETE require it (Bearer or X-API-Key).
/// When locales is set (RUSTYCMS_LOCALES e.g. "de,en"), API accepts _locale query param.
pub struct AppState {
pub registry: Arc<RwLock<SchemaRegistry>>,
pub store: Arc<dyn ContentStore>,
pub openapi_spec: Arc<RwLock<serde_json::Value>>,
/// Path to types directory (e.g. ./types) for schema file writes.
pub types_dir: PathBuf,
pub api_key: Option<String>,
pub cache: Arc<ContentCache>,
pub transform_cache: Arc<TransformCache>,
pub http_client: reqwest::Client,
/// If set, first element is default locale. Enables content/{locale}/{collection}/.
pub locales: Option<Vec<String>>,
}
/// Resolve effective locale from query _locale and state.locales. Returns None when i18n is off.
pub fn effective_locale(params: &HashMap<String, String>, locales: Option<&[String]>) -> Option<String> {
let locales = locales?;
if locales.is_empty() {
return None;
}
let requested = params.get("_locale").map(|s| s.trim()).filter(|s| !s.is_empty());
match requested {
Some(loc) if locales.iter().any(|l| l == loc) => Some(loc.to_string()),
_ => Some(locales[0].clone()),
}
}
// ---------------------------------------------------------------------------
// GET /health
// ---------------------------------------------------------------------------
pub async fn health() -> (StatusCode, Json<Value>) {
(StatusCode::OK, Json(json!({ "status": "ok" })))
}
// ---------------------------------------------------------------------------
// GET /api/collections
// ---------------------------------------------------------------------------
pub async fn list_collections(
State(state): State<Arc<AppState>>,
) -> Json<Value> {
let collections: Vec<Value> = {
let registry = state.registry.read().await;
registry
.list()
.iter()
.filter(|(_, schema)| !schema.reusable)
.map(|(name, schema)| {
json!({
"name": name,
"field_count": schema.fields.len(),
"extends": schema.extends,
"strict": schema.strict,
"description": schema.description,
"tags": schema.tags,
"category": schema.category,
})
})
.collect()
};
Json(json!({ "collections": collections }))
}
// ---------------------------------------------------------------------------
// POST /api/schemas Create new content type (schema)
// ---------------------------------------------------------------------------
/// Content type name: lowercase letters, digits, underscore only.
fn is_valid_schema_name(name: &str) -> bool {
!name.is_empty()
&& name.len() <= 64
&& name
.chars()
.all(|c| c.is_ascii_lowercase() || c.is_ascii_digit() || c == '_')
}
pub async fn create_schema(
State(state): State<Arc<AppState>>,
headers: HeaderMap,
Json(schema): Json<SchemaDefinition>,
) -> Result<(StatusCode, Json<Value>), ApiError> {
auth::require_api_key(state.api_key.as_ref(), &headers)?;
if !is_valid_schema_name(&schema.name) {
return Err(ApiError::BadRequest(
"name: lowercase letters, digits and underscore only, max 64 chars".to_string(),
));
}
if schema.reusable {
return Err(ApiError::BadRequest(
"reusable must not be true for new content types".to_string(),
));
}
let mut errors = Vec::new();
for (field_name, fd) in &schema.fields {
if !VALID_FIELD_TYPES.contains(&fd.field_type.as_str()) {
errors.push(format!(
"Field '{}': unknown type '{}'",
field_name, fd.field_type
));
}
}
if !errors.is_empty() {
return Err(ApiError::ValidationFailed(errors));
}
let path = state.types_dir.join(format!("{}.json", schema.name));
let contents = serde_json::to_string_pretty(&schema).map_err(|e| ApiError::Internal(e.to_string()))?;
tokio::fs::write(&path, contents)
.await
.map_err(|e| ApiError::Internal(format!("Failed to write schema file: {}", e)))?;
state
.store
.ensure_collection_dir(&schema.name)
.await
.map_err(ApiError::from)?;
tracing::info!("Schema created: {} ({})", schema.name, path.display());
Ok((
StatusCode::CREATED,
Json(serde_json::to_value(&schema).unwrap()),
))
}
// ---------------------------------------------------------------------------
// GET /api/collections/:collection
// ---------------------------------------------------------------------------
pub async fn get_collection_schema(
State(state): State<Arc<AppState>>,
Path(collection): Path<String>,
) -> Result<Json<Value>, ApiError> {
let schema = {
let registry = state.registry.read().await;
registry
.get(&collection)
.ok_or_else(|| ApiError::NotFound(format!("Collection '{}' not found", collection)))?
.clone()
};
if schema.reusable {
return Err(ApiError::NotFound(format!(
"Collection '{}' is a reusable partial, not a content collection",
collection
)));
}
Ok(Json(serde_json::to_value(schema).unwrap()))
}
// ---------------------------------------------------------------------------
// GET /api/collections/:collection/slug-check?slug=xxx&exclude=yyy&_locale=zzz
// ---------------------------------------------------------------------------
/// Response: { valid, normalized, available, error? }
/// - valid: slug format is ok (after normalize)
/// - normalized: normalized slug string
/// - available: not used by another entry (or equals exclude when editing)
pub async fn slug_check(
State(state): State<Arc<AppState>>,
Path(collection): Path<String>,
Query(params): Query<HashMap<String, String>>,
) -> Result<Json<Value>, ApiError> {
let slug_raw = params
.get("slug")
.filter(|s| !s.trim().is_empty())
.cloned()
.unwrap_or_default();
let valid = slug::validate_slug(&slug_raw).is_ok();
let normalized = slug::normalize_slug(&slug_raw);
let error = if !valid {
if slug_raw.is_empty() {
Some("Slug must not be empty.".to_string())
} else if normalized.is_empty() {
Some("Slug contains invalid characters. Allowed: lowercase, digits, hyphens (a-z, 0-9, -).".to_string())
} else if slug_raw.contains('/') || slug_raw.contains('\\') || slug_raw.contains("..") {
Some("Slug must not contain path characters (/, \\, ..).".to_string())
} else {
Some("Invalid slug.".to_string())
}
} else {
None
};
let available = if !valid || normalized.is_empty() {
false
} else {
let exclude = params.get("exclude").map(|s| s.trim()).filter(|s| !s.is_empty());
let locale = effective_locale(&params, state.locales.as_deref());
let locale_ref = locale.as_deref();
let registry = state.registry.read().await;
let _schema = match registry.get(&collection) {
Some(s) if !s.reusable => s,
_ => {
return Ok(Json(json!({
"valid": valid,
"normalized": normalized,
"available": false,
"error": "Collection not found."
})));
}
};
drop(registry);
if exclude.as_deref() == Some(normalized.as_str()) {
true
} else {
let exists = state
.store
.get(&collection, &normalized, locale_ref)
.await
.map_err(ApiError::from)?
.is_some();
!exists
}
};
Ok(Json(json!({
"valid": valid,
"normalized": normalized,
"available": available,
"error": error,
})))
}
// ---------------------------------------------------------------------------
// GET /api/content/:collection
// ---------------------------------------------------------------------------
pub async fn list_entries(
State(state): State<Arc<AppState>>,
Path(collection): Path<String>,
Query(params): Query<HashMap<String, String>>,
) -> Result<Json<Value>, ApiError> {
let schema = {
let registry = state.registry.read().await;
registry
.get(&collection)
.ok_or_else(|| ApiError::NotFound(format!("Collection '{}' not found", collection)))?
.clone()
};
if schema.reusable {
return Err(ApiError::NotFound(format!(
"Collection '{}' is a reusable partial, not a content collection",
collection
)));
}
let locale = effective_locale(&params, state.locales.as_deref());
let locale_ref = locale.as_deref();
let mut keys: Vec<_> = params.keys().collect();
keys.sort();
let mut hasher = DefaultHasher::new();
for k in &keys {
k.hash(&mut hasher);
params[*k].hash(&mut hasher);
}
let query_hash = hasher.finish();
let cache_key = cache::list_cache_key(&collection, query_hash, locale_ref);
if let Some(cached) = state.cache.get(&cache_key).await {
return Ok(Json(cached));
}
let entries = state.store.list(&collection, locale_ref).await.map_err(ApiError::from)?;
let resolve = parse_resolve(params.get("_resolve").map(|s| s.as_str()));
let query = QueryParams::from_map(params);
let mut result = query.apply(entries);
for item in result.items.iter_mut() {
*item = format_references(
std::mem::take(item),
&schema,
state.store.as_ref(),
resolve.as_ref(),
locale_ref,
)
.await;
}
let response_value = serde_json::to_value(&result).unwrap();
state.cache.set(cache_key, response_value.clone()).await;
Ok(Json(response_value))
}
// ---------------------------------------------------------------------------
// GET /api/content/:collection/:slug
// ---------------------------------------------------------------------------
pub async fn get_entry(
State(state): State<Arc<AppState>>,
Path((collection, slug)): Path<(String, String)>,
Query(params): Query<HashMap<String, String>>,
headers: HeaderMap,
) -> Result<Response, ApiError> {
let schema = {
let registry = state.registry.read().await;
registry
.get(&collection)
.ok_or_else(|| ApiError::NotFound(format!("Collection '{}' not found", collection)))?
.clone()
};
if schema.reusable {
return Err(ApiError::NotFound(format!(
"Collection '{}' is a reusable partial, not a content collection",
collection
)));
}
let locale = effective_locale(&params, state.locales.as_deref());
let locale_ref = locale.as_deref();
let resolve_key = params.get("_resolve").map(|s| s.as_str()).unwrap_or("");
let cache_key = cache::entry_cache_key(&collection, &slug, resolve_key, locale_ref);
if let Some(ref cached) = state.cache.get(&cache_key).await {
let json_str = serde_json::to_string(cached).unwrap_or_default();
let mut hasher = DefaultHasher::new();
json_str.hash(&mut hasher);
let etag_plain = format!("\"{:016x}\"", hasher.finish());
let etag_header =
HeaderValue::from_str(&etag_plain).unwrap_or_else(|_| HeaderValue::from_static("\"0\""));
let if_none_match = headers
.get("if-none-match")
.and_then(|v| v.to_str().ok())
.map(|s| s.trim().trim_matches('"').to_string());
if if_none_match.as_deref() == Some(etag_plain.trim_matches('"')) {
return Ok((
StatusCode::NOT_MODIFIED,
[("ETag", etag_header.clone())],
)
.into_response());
}
return Ok((
StatusCode::OK,
[("ETag", etag_header)],
Json(cached.clone()),
)
.into_response());
}
let entry = state
.store
.get(&collection, &slug, locale_ref)
.await
.map_err(ApiError::from)?
.ok_or_else(|| {
ApiError::NotFound(format!("Entry '{}' not found in '{}'", slug, collection))
})?;
let resolve = parse_resolve(params.get("_resolve").map(|s| s.as_str()));
let formatted = format_references(
entry,
&schema,
state.store.as_ref(),
resolve.as_ref(),
locale_ref,
)
.await;
state
.cache
.set(cache_key, formatted.clone())
.await;
let json_str = serde_json::to_string(&formatted).unwrap_or_default();
let mut hasher = DefaultHasher::new();
json_str.hash(&mut hasher);
let etag_plain = format!("\"{:016x}\"", hasher.finish());
let etag_header =
HeaderValue::from_str(&etag_plain).unwrap_or_else(|_| HeaderValue::from_static("\"0\""));
let if_none_match = headers
.get("if-none-match")
.and_then(|v| v.to_str().ok())
.map(|s| s.trim().trim_matches('"').to_string());
if if_none_match.as_deref() == Some(etag_plain.trim_matches('"')) {
return Ok((
StatusCode::NOT_MODIFIED,
[("ETag", etag_header.clone())],
)
.into_response());
}
Ok((
StatusCode::OK,
[("ETag", etag_header)],
Json(formatted),
)
.into_response())
}
// ---------------------------------------------------------------------------
// POST /api/content/:collection
// ---------------------------------------------------------------------------
pub async fn create_entry(
State(state): State<Arc<AppState>>,
Path(collection): Path<String>,
Query(params): Query<HashMap<String, String>>,
headers: HeaderMap,
Json(mut body): Json<Value>,
) -> Result<(StatusCode, Json<Value>), ApiError> {
auth::require_api_key(state.api_key.as_ref(), &headers)?;
let locale = effective_locale(&params, state.locales.as_deref());
let locale_ref = locale.as_deref();
let schema = {
let registry = state.registry.read().await;
registry
.get(&collection)
.ok_or_else(|| ApiError::NotFound(format!("Collection '{}' not found", collection)))?
.clone()
};
if schema.reusable {
return Err(ApiError::NotFound(format!(
"Collection '{}' is a reusable partial, not a content collection",
collection
)));
}
// Extract and normalize _slug from body
let slug_raw = body
.as_object()
.and_then(|o| o.get("_slug"))
.and_then(|v| v.as_str())
.map(|s| s.to_string())
.ok_or_else(|| ApiError::BadRequest("Field '_slug' is required".to_string()))?;
slug::validate_slug(&slug_raw).map_err(ApiError::BadRequest)?;
let slug = slug::normalize_slug(&slug_raw);
// Remove _slug from content data
if let Some(obj) = body.as_object_mut() {
obj.remove("_slug");
}
// Apply defaults and auto-generated values
validator::apply_defaults(&schema, &mut body);
// Validate against schema (type checks, constraints, strict mode, …)
let errors = validator::validate_content(&schema, &body);
if !errors.is_empty() {
let messages: Vec<String> = errors.iter().map(|e| e.to_string()).collect();
return Err(ApiError::ValidationFailed(messages));
}
// Unique constraint check (within same locale)
let entries = state.store.list(&collection, locale_ref).await.map_err(ApiError::from)?;
let unique_errors = validator::validate_unique(&schema, &body, None, &entries);
if !unique_errors.is_empty() {
let messages: Vec<String> = unique_errors.iter().map(|e| e.to_string()).collect();
return Err(ApiError::ValidationFailed(messages));
}
// Reference validation (blocking: we need sync closure; use tokio::task::block_in_place or spawn)
let store = &state.store;
let ref_errors = validator::validate_references(&schema, &body, &|coll, s| {
tokio::task::block_in_place(|| {
tokio::runtime::Handle::current().block_on(async move {
store.get(coll, s, locale_ref).await.ok().flatten().is_some()
})
})
});
if !ref_errors.is_empty() {
let messages: Vec<String> = ref_errors.iter().map(|e| e.to_string()).collect();
return Err(ApiError::ValidationFailed(messages));
}
// Persist to filesystem
state
.store
.create(&collection, &slug, &body, locale_ref)
.await
.map_err(ApiError::from)?;
state.cache.invalidate_collection(&collection).await;
// Return created entry (with reference format)
let entry = state
.store
.get(&collection, &slug, locale_ref)
.await
.map_err(ApiError::from)?
.unwrap();
let formatted = format_references(entry, &schema, state.store.as_ref(), None, locale_ref).await;
Ok((StatusCode::CREATED, Json(formatted)))
}
// ---------------------------------------------------------------------------
// PUT /api/content/:collection/:slug
// ---------------------------------------------------------------------------
pub async fn update_entry(
State(state): State<Arc<AppState>>,
Path((collection, slug)): Path<(String, String)>,
Query(params): Query<HashMap<String, String>>,
headers: HeaderMap,
Json(mut body): Json<Value>,
) -> Result<Json<Value>, ApiError> {
auth::require_api_key(state.api_key.as_ref(), &headers)?;
let locale = effective_locale(&params, state.locales.as_deref());
let locale_ref = locale.as_deref();
let schema = {
let registry = state.registry.read().await;
registry
.get(&collection)
.ok_or_else(|| ApiError::NotFound(format!("Collection '{}' not found", collection)))?
.clone()
};
if schema.reusable {
return Err(ApiError::NotFound(format!(
"Collection '{}' is a reusable partial, not a content collection",
collection
)));
}
// Remove _slug if present in body
if let Some(obj) = body.as_object_mut() {
obj.remove("_slug");
}
// Load existing content for readonly check
let existing = state
.store
.get(&collection, &slug, locale_ref)
.await
.map_err(ApiError::from)?
.ok_or_else(|| {
ApiError::NotFound(format!("Entry '{}' not found in '{}'", slug, collection))
})?;
// Readonly violation check
let readonly_errors = validator::check_readonly_violations(&schema, &existing, &body);
if !readonly_errors.is_empty() {
let messages: Vec<String> = readonly_errors.iter().map(|e| e.to_string()).collect();
return Err(ApiError::ValidationFailed(messages));
}
// Validate against schema
let errors = validator::validate_content(&schema, &body);
if !errors.is_empty() {
let messages: Vec<String> = errors.iter().map(|e| e.to_string()).collect();
return Err(ApiError::ValidationFailed(messages));
}
// Unique constraint check (exclude self, within same locale)
let entries = state.store.list(&collection, locale_ref).await.map_err(ApiError::from)?;
let unique_errors = validator::validate_unique(&schema, &body, Some(&slug), &entries);
if !unique_errors.is_empty() {
let messages: Vec<String> = unique_errors.iter().map(|e| e.to_string()).collect();
return Err(ApiError::ValidationFailed(messages));
}
// Reference validation
let store = &state.store;
let ref_errors = validator::validate_references(&schema, &body, &|coll, s| {
tokio::task::block_in_place(|| {
tokio::runtime::Handle::current().block_on(async move {
store.get(coll, s, locale_ref).await.ok().flatten().is_some()
})
})
});
if !ref_errors.is_empty() {
let messages: Vec<String> = ref_errors.iter().map(|e| e.to_string()).collect();
return Err(ApiError::ValidationFailed(messages));
}
// Persist to filesystem
state
.store
.update(&collection, &slug, &body, locale_ref)
.await
.map_err(ApiError::from)?;
state.cache.invalidate_collection(&collection).await;
// Return updated entry (with reference format)
let entry = state
.store
.get(&collection, &slug, locale_ref)
.await
.map_err(ApiError::from)?
.unwrap();
let formatted = format_references(entry, &schema, state.store.as_ref(), None, locale_ref).await;
Ok(Json(formatted))
}
// ---------------------------------------------------------------------------
// DELETE /api/content/:collection/:slug
// ---------------------------------------------------------------------------
pub async fn delete_entry(
State(state): State<Arc<AppState>>,
Path((collection, slug)): Path<(String, String)>,
Query(params): Query<HashMap<String, String>>,
headers: HeaderMap,
) -> Result<StatusCode, ApiError> {
auth::require_api_key(state.api_key.as_ref(), &headers)?;
let locale = effective_locale(&params, state.locales.as_deref());
let locale_ref = locale.as_deref();
let schema = {
let registry = state.registry.read().await;
registry
.get(&collection)
.ok_or_else(|| ApiError::NotFound(format!("Collection '{}' not found", collection)))?
.clone()
};
if schema.reusable {
return Err(ApiError::NotFound(format!(
"Collection '{}' is a reusable partial, not a content collection",
collection
)));
}
state
.store
.delete(&collection, &slug, locale_ref)
.await
.map_err(ApiError::from)?;
state.cache.invalidate_collection(&collection).await;
Ok(StatusCode::NO_CONTENT)
}

8
src/api/mod.rs Normal file
View File

@@ -0,0 +1,8 @@
pub mod auth;
pub mod cache;
pub mod error;
pub mod handlers;
pub mod openapi;
pub mod response;
pub mod routes;
pub mod transform;

604
src/api/openapi.rs Normal file
View File

@@ -0,0 +1,604 @@
//! Dynamic OpenAPI 3.0 spec generation from the SchemaRegistry.
//!
//! Instead of static annotations, the spec is built at startup based on
//! the loaded content type definitions. Each collection gets its own
//! paths and component schemas in the resulting OpenAPI document.
use std::sync::Arc;
use axum::extract::State;
use axum::response::{Html, IntoResponse, Response};
use axum::Json;
use axum::http::header;
use indexmap::IndexMap;
use serde_json::{json, Value};
use crate::schema::types::FieldDefinition;
use crate::schema::SchemaRegistry;
use super::handlers::AppState;
// ---------------------------------------------------------------------------
// Handlers
// ---------------------------------------------------------------------------
/// GET /api and GET /api/ Living-Doc index with links to Swagger UI and overview.
pub async fn api_index() -> Html<&'static str> {
Html(API_INDEX_HTML)
}
/// GET /swagger-ui Serves the Swagger UI (loaded from CDN).
pub async fn swagger_ui() -> Html<&'static str> {
Html(SWAGGER_HTML)
}
/// GET /api-docs/openapi.json Returns the generated OpenAPI spec.
/// Cache-Control: no-cache so Swagger UI always gets the latest after hot-reload.
pub async fn openapi_json(State(state): State<Arc<AppState>>) -> Response {
let spec = state.openapi_spec.read().await.clone();
let mut res = Json(spec).into_response();
res.headers_mut().insert(
header::CACHE_CONTROL,
"no-cache, no-store, must-revalidate".parse().unwrap(),
);
res
}
// ---------------------------------------------------------------------------
// Spec Generation
// ---------------------------------------------------------------------------
/// Generate a complete OpenAPI 3.0.3 spec from the schema registry.
pub fn generate_spec(registry: &SchemaRegistry, server_url: &str) -> Value {
let mut paths = serde_json::Map::new();
let mut schemas = serde_json::Map::new();
// ── Static: GET /api/collections ─────────────────────────────────────
paths.insert(
"/api/collections".to_string(),
json!({
"get": {
"summary": "List all collections",
"operationId": "listCollections",
"tags": ["Collections"],
"responses": {
"200": {
"description": "List of registered content type collections",
"content": { "application/json": { "schema": {
"type": "object",
"properties": {
"collections": {
"type": "array",
"items": {
"type": "object",
"properties": {
"name": { "type": "string" },
"field_count": { "type": "integer" },
"extends": { "type": "string", "nullable": true }
}
}
}
}
}}}
}
}
}
}),
);
// ── Per-Collection paths & schemas (skip reusable partials) ─────────────
for (name, schema) in registry.list().iter().filter(|(_, s)| !s.reusable) {
let tag = name.clone();
let pascal = to_pascal_case(name);
// Component schema for the content type (response)
let response_schema = fields_to_json_schema(&schema.fields, true);
schemas.insert(name.clone(), response_schema);
// Component schema for creation input (includes _slug, no readOnly _slug)
let input_schema = build_input_schema(&schema.fields);
schemas.insert(format!("{}_input", name), input_schema);
// GET /api/collections/:collection
paths.insert(
format!("/api/collections/{}", name),
json!({
"get": {
"summary": format!("Get schema definition for '{}'", name),
"operationId": format!("get{}Schema", pascal),
"tags": ["Collections"],
"responses": {
"200": { "description": "Schema definition" },
"404": { "description": "Collection not found" }
}
}
}),
);
// GET + POST /api/content/:collection
paths.insert(
format!("/api/content/{}", name),
json!({
"get": {
"summary": format!("List '{}' entries", name),
"operationId": format!("list{}", pascal),
"tags": [tag],
"parameters": query_parameters(&schema.fields),
"responses": {
"200": {
"description": "Paginated list of entries",
"content": { "application/json": { "schema": {
"type": "object",
"properties": {
"items": { "type": "array", "items": { "$ref": format!("#/components/schemas/{}", name) } },
"total": { "type": "integer" },
"page": { "type": "integer" },
"per_page": { "type": "integer" },
"total_pages": { "type": "integer" }
}
}}}
}
}
},
"post": {
"summary": format!("Create a new '{}' entry", name),
"operationId": format!("create{}", pascal),
"tags": [tag],
"parameters": content_get_query_params(),
"requestBody": {
"required": true,
"content": { "application/json": {
"schema": { "$ref": format!("#/components/schemas/{}_input", name) }
}}
},
"responses": {
"201": {
"description": "Entry created",
"content": { "application/json": {
"schema": { "$ref": format!("#/components/schemas/{}", name) }
}}
},
"400": { "description": "Validation error" },
"409": { "description": "Entry already exists" }
}
}
}),
);
// GET + PUT + DELETE /api/content/:collection/:slug
let slug_param = json!({
"name": "slug", "in": "path", "required": true,
"schema": { "type": "string" },
"description": "Entry identifier (filename without extension)"
});
let mut get_params = vec![slug_param.clone()];
get_params.extend(content_get_query_params().as_array().cloned().unwrap_or_default());
let get_params = json!(get_params);
let mut put_params = vec![slug_param.clone()];
put_params.extend(content_get_query_params().as_array().cloned().unwrap_or_default());
let put_params = json!(put_params);
let mut delete_params = vec![slug_param.clone()];
delete_params.extend(content_get_query_params().as_array().cloned().unwrap_or_default());
let delete_params = json!(delete_params);
paths.insert(
format!("/api/content/{}/{{slug}}", name),
json!({
"get": {
"summary": format!("Get '{}' entry by slug", name),
"operationId": format!("get{}", pascal),
"tags": [tag],
"parameters": get_params,
"responses": {
"200": {
"description": "The entry",
"content": { "application/json": {
"schema": { "$ref": format!("#/components/schemas/{}", name) }
}}
},
"404": { "description": "Entry not found" }
}
},
"put": {
"summary": format!("Update '{}' entry", name),
"operationId": format!("update{}", pascal),
"tags": [tag],
"parameters": put_params,
"requestBody": {
"required": true,
"content": { "application/json": {
"schema": { "$ref": format!("#/components/schemas/{}", name) }
}}
},
"responses": {
"200": {
"description": "Entry updated",
"content": { "application/json": {
"schema": { "$ref": format!("#/components/schemas/{}", name) }
}}
},
"400": { "description": "Validation error" },
"404": { "description": "Entry not found" }
}
},
"delete": {
"summary": format!("Delete '{}' entry", name),
"operationId": format!("delete{}", pascal),
"tags": [tag],
"parameters": delete_params,
"responses": {
"204": { "description": "Entry deleted" },
"404": { "description": "Entry not found" }
}
}
}),
);
}
// ── GET /api/transform (Image transformation) ─────────────────────────
paths.insert(
"/api/transform".to_string(),
json!({
"get": {
"summary": "Transform image from external URL",
"description": "Fetches an image from the given URL and returns it resized/cropped. Supports JPEG, PNG, WebP, AVIF output.",
"operationId": "transformImage",
"tags": ["Transform"],
"parameters": [
{ "name": "url", "in": "query", "required": true, "schema": { "type": "string", "format": "uri" }, "description": "External image URL" },
{ "name": "w", "in": "query", "required": false, "schema": { "type": "integer", "minimum": 1 }, "description": "Target width (alias: width)" },
{ "name": "h", "in": "query", "required": false, "schema": { "type": "integer", "minimum": 1 }, "description": "Target height (alias: height)" },
{ "name": "ar", "in": "query", "required": false, "schema": { "type": "string", "example": "1:1" }, "description": "Aspect ratio before resize, e.g. 1:1 or 16:9 (center crop)" },
{ "name": "fit", "in": "query", "required": false, "schema": { "type": "string", "enum": ["fill", "contain", "cover"], "default": "contain" }, "description": "fill = exact w×h, contain = fit inside, cover = fill with crop" },
{ "name": "format", "in": "query", "required": false, "schema": { "type": "string", "enum": ["jpeg", "png", "webp", "avif"], "default": "jpeg" }, "description": "Output format" },
{ "name": "quality", "in": "query", "required": false, "schema": { "type": "integer", "minimum": 1, "maximum": 100, "default": 85 }, "description": "JPEG quality (1100)" }
],
"responses": {
"200": {
"description": "Transformed image",
"content": {
"image/jpeg": { "schema": { "type": "string", "format": "binary" } },
"image/png": { "schema": { "type": "string", "format": "binary" } },
"image/webp": { "schema": { "type": "string", "format": "binary" } },
"image/avif": { "schema": { "type": "string", "format": "binary" } }
}
},
"400": { "description": "Invalid URL or not a valid image" },
"500": { "description": "Failed to fetch or process image" }
}
}
}),
);
// ── Assemble the full spec ───────────────────────────────────────────
json!({
"openapi": "3.0.3",
"info": {
"title": "RustyCMS API",
"description": "File-based Headless CMS REST API.\n\nContent types and their fields are defined in `types/*.json5`.\nContent is stored as flat files under `content/<collection>/<slug>.json5`.",
"version": "0.1.0"
},
"servers": [
{ "url": server_url, "description": "Local development" }
],
"tags": build_tags(registry),
"paths": paths,
"components": { "schemas": schemas }
})
}
// ---------------------------------------------------------------------------
// Helpers
// ---------------------------------------------------------------------------
/// Build the JSON Schema for a set of field definitions.
/// If `include_slug` is true, a read-only `_slug` property is added.
fn fields_to_json_schema(
fields: &IndexMap<String, FieldDefinition>,
include_slug: bool,
) -> Value {
let mut properties = serde_json::Map::new();
let mut required = Vec::<Value>::new();
if include_slug {
properties.insert(
"_slug".to_string(),
json!({ "type": "string", "description": "Entry identifier (filename)", "readOnly": true }),
);
}
for (name, fd) in fields {
properties.insert(name.clone(), field_to_json_schema(fd));
if fd.required && !fd.auto {
required.push(json!(name));
}
}
let mut schema = json!({ "type": "object", "properties": properties });
if !required.is_empty() {
schema["required"] = json!(required);
}
schema
}
/// Build the input (creation) schema includes `_slug` as required field.
/// Skips auto-generated and readonly fields (they cannot be set by the user).
fn build_input_schema(fields: &IndexMap<String, FieldDefinition>) -> Value {
let mut properties = serde_json::Map::new();
let mut required = vec![json!("_slug")];
properties.insert(
"_slug".to_string(),
json!({ "type": "string", "description": "URL slug (used as filename)" }),
);
for (name, fd) in fields {
// Skip auto-generated and readonly fields from input schema
if fd.auto || fd.readonly {
continue;
}
properties.insert(name.clone(), field_to_json_schema(fd));
if fd.required {
required.push(json!(name));
}
}
json!({ "type": "object", "properties": properties, "required": required })
}
/// Convert a single FieldDefinition to a JSON Schema property,
/// including all constraints (minLength, max, pattern, nullable, …).
fn field_to_json_schema(fd: &FieldDefinition) -> Value {
let mut schema = match fd.field_type.as_str() {
"string" => json!({ "type": "string" }),
"richtext" | "html" | "markdown" => json!({ "type": "string" }),
"number" => json!({ "type": "number" }),
"integer" => json!({ "type": "integer" }),
"boolean" => json!({ "type": "boolean" }),
"datetime" => json!({ "type": "string", "format": "date-time" }),
"array" => {
if let Some(ref items) = fd.items {
json!({ "type": "array", "items": field_to_json_schema(items) })
} else {
json!({ "type": "array" })
}
}
"object" => {
if let Some(ref nested) = fd.fields {
fields_to_json_schema(nested, false)
} else {
json!({ "type": "object" })
}
}
"reference" => {
let desc = if let Some(ref list) = fd.collections {
if list.is_empty() {
"Reference (slug)".to_string()
} else {
format!("Reference (slug) to one of: {}", list.join(", "))
}
} else if let Some(ref c) = fd.collection {
format!("Reference (slug) to collection '{}'", c)
} else {
"Reference (slug)".to_string()
};
json!({ "type": "string", "description": desc })
},
_ => json!({ "type": "string" }),
};
// ── Description ─────────────────────────────────────────────────────
if let Some(ref desc) = fd.description {
schema["description"] = json!(desc);
}
// ── Enum / default ──────────────────────────────────────────────────
if let Some(ref ev) = fd.enum_values {
schema["enum"] = json!(ev);
}
if let Some(ref dv) = fd.default {
schema["default"] = dv.clone();
}
// ── Nullable ────────────────────────────────────────────────────────
if fd.nullable {
schema["nullable"] = json!(true);
}
// ── Readonly ────────────────────────────────────────────────────────
if fd.readonly {
schema["readOnly"] = json!(true);
}
// ── String constraints ──────────────────────────────────────────────
if let Some(v) = fd.min_length {
schema["minLength"] = json!(v);
}
if let Some(v) = fd.max_length {
schema["maxLength"] = json!(v);
}
if let Some(ref p) = fd.pattern {
schema["pattern"] = json!(p);
}
// ── Number constraints ──────────────────────────────────────────────
if let Some(v) = fd.min {
schema["minimum"] = json!(v);
}
if let Some(v) = fd.max {
schema["maximum"] = json!(v);
}
// ── Array constraints ───────────────────────────────────────────────
if let Some(v) = fd.min_items {
schema["minItems"] = json!(v);
}
if let Some(v) = fd.max_items {
schema["maxItems"] = json!(v);
}
schema
}
/// Common query parameters for content GET (list + get by slug): _resolve, _locale.
fn content_get_query_params() -> Value {
json!([
{ "name": "_resolve", "in": "query", "required": false,
"schema": { "type": "string" },
"description": "Resolve references: 'all' or comma-separated field names (e.g. row1Content,topFullwidthBanner). Resolved entries are embedded instead of { _type, _slug }." },
{ "name": "_locale", "in": "query", "required": false,
"schema": { "type": "string", "example": "de" },
"description": "Locale for content (e.g. de, en). Only used when RUSTYCMS_LOCALES is set. Default = first locale. References are resolved in the same locale." }
])
}
/// Generate query parameters for the list endpoint, including filter params
/// for every field in the schema.
fn query_parameters(fields: &IndexMap<String, FieldDefinition>) -> Value {
let mut params = vec![
json!({ "name": "_sort", "in": "query", "schema": { "type": "string" }, "description": "Field name to sort by" }),
json!({ "name": "_order", "in": "query", "schema": { "type": "string", "enum": ["asc", "desc"] }, "description": "Sort order (default: asc)" }),
json!({ "name": "_page", "in": "query", "schema": { "type": "integer", "default": 1 }, "description": "Page number (1-indexed)" }),
json!({ "name": "_per_page", "in": "query", "schema": { "type": "integer", "default": 50 }, "description": "Items per page" }),
];
// _resolve and _locale (content GET)
params.extend(content_get_query_params().as_array().cloned().unwrap_or_default());
// Add one query parameter per schema field (for filtering)
for (name, fd) in fields {
let schema_type = match fd.field_type.as_str() {
"number" => "number",
"integer" => "integer",
"boolean" => "boolean",
_ => "string",
};
params.push(json!({
"name": name,
"in": "query",
"required": false,
"schema": { "type": schema_type },
"description": format!("Filter by {}", name)
}));
}
json!(params)
}
/// Build the tags array (Collections, Transform, then one per content type).
fn build_tags(registry: &SchemaRegistry) -> Value {
let mut tags = vec![
json!({ "name": "Collections", "description": "Schema / type management" }),
json!({ "name": "Transform", "description": "Image transformation from external URL (resize, crop, format)" }),
];
for (name, schema) in registry.list().iter().filter(|(_, s)| !s.reusable) {
let desc = if let Some(ref ext) = schema.extends {
let parents = ext.names().join(", ");
format!("Content type '{}' (extends {})", name, parents)
} else {
format!("Content type '{}'", name)
};
tags.push(json!({ "name": name, "description": desc }));
}
json!(tags)
}
/// Convert a snake_case string to PascalCase.
fn to_pascal_case(s: &str) -> String {
s.split('_')
.map(|part| {
let mut chars = part.chars();
match chars.next() {
None => String::new(),
Some(c) => {
let upper: String = c.to_uppercase().collect();
upper + chars.as_str()
}
}
})
.collect()
}
// ---------------------------------------------------------------------------
// API Index (GET /api, GET /api/) Living Documentation entry point
// ---------------------------------------------------------------------------
const API_INDEX_HTML: &str = r#"<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>RustyCMS API</title>
<style>
body { font-family: system-ui, sans-serif; max-width: 42rem; margin: 2rem auto; padding: 0 1rem; color: #1a1a1a; line-height: 1.5; }
h1 { font-size: 1.5rem; margin-bottom: 0.5rem; }
p { color: #444; margin-bottom: 1.5rem; }
ul { list-style: none; padding: 0; margin: 0 0 1.5rem 0; }
li { padding: 0.35rem 0; border-bottom: 1px solid #eee; }
li:last-child { border-bottom: none; }
a { color: #0066cc; text-decoration: none; }
a:hover { text-decoration: underline; }
.card { background: #f6f8fa; border-radius: 8px; padding: 1rem 1.25rem; margin-bottom: 1rem; }
.card h2 { font-size: 1rem; margin: 0 0 0.5rem 0; font-weight: 600; }
.card p { margin: 0; color: #555; font-size: 0.9rem; }
code { background: #eee; padding: 0.15em 0.4em; border-radius: 4px; font-size: 0.9em; }
</style>
</head>
<body>
<h1>RustyCMS API</h1>
<p>Headless CMS REST API. Schema-first, content as JSON5, optional SQLite.</p>
<div class="card">
<h2>Living Documentation</h2>
<p><a href="/swagger-ui">Swagger UI</a> Interactive API documentation (all endpoints, try-it-out).</p>
<p><a href="/api-docs/openapi.json">OpenAPI 3.0 Spec</a> Machine-readable specification (JSON).</p>
</div>
<h2>Endpoint Overview</h2>
<ul>
<li><code>GET</code> <a href="/api/collections">/api/collections</a> All content types</li>
<li><code>GET</code> /api/collections/:type Schema for a type</li>
<li><code>GET</code> /api/content/:type List entries</li>
<li><code>GET</code> /api/content/:type/:slug Get one entry</li>
<li><code>POST</code> /api/content/:type Create entry</li>
<li><code>PUT</code> /api/content/:type/:slug Update entry</li>
<li><code>DELETE</code> /api/content/:type/:slug Delete entry</li>
<li><code>GET</code> <a href="/api/transform?url=https://httpbin.org/image/png&w=80&h=80">/api/transform</a> Transform image from URL (w, h, ar, fit, format)</li>
<li><code>GET</code> <a href="/health">/health</a> Health check</li>
</ul>
</body>
</html>"#;
// ---------------------------------------------------------------------------
// Embedded Swagger UI HTML (loads JS/CSS from CDN)
// ---------------------------------------------------------------------------
const SWAGGER_HTML: &str = r#"<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>RustyCMS API Documentation</title>
<link rel="stylesheet" href="https://unpkg.com/swagger-ui-dist@5/swagger-ui.css">
<style>
body { margin: 0; background: #fafafa; }
.topbar { display: none !important; }
</style>
</head>
<body>
<div id="swagger-ui"></div>
<script src="https://unpkg.com/swagger-ui-dist@5/swagger-ui-bundle.js"></script>
<script>
SwaggerUIBundle({
url: '/api-docs/openapi.json',
dom_id: '#swagger-ui',
presets: [SwaggerUIBundle.presets.apis],
layout: 'BaseLayout',
deepLinking: true,
defaultModelsExpandDepth: 2,
defaultModelExpandDepth: 2,
});
</script>
</body>
</html>"#;

138
src/api/response.rs Normal file
View File

@@ -0,0 +1,138 @@
//! Response formatting: reference fields as { _type, _slug } and optional _resolve.
use serde_json::{json, Value};
use crate::schema::types::SchemaDefinition;
use crate::store::ContentStore;
/// Parse _resolve query param: "all" or "field1,field2".
pub fn parse_resolve(resolve_param: Option<&str>) -> Option<ResolveSet> {
let s = resolve_param?.trim();
if s.is_empty() {
return None;
}
if s.eq_ignore_ascii_case("all") {
return Some(ResolveSet::All);
}
let fields: Vec<String> = s
.split(',')
.map(|f| f.trim().to_string())
.filter(|f| !f.is_empty())
.collect();
if fields.is_empty() {
None
} else {
Some(ResolveSet::Fields(fields))
}
}
pub enum ResolveSet {
All,
Fields(Vec<String>),
}
impl ResolveSet {
fn should_resolve(&self, field_name: &str) -> bool {
match self {
ResolveSet::All => true,
ResolveSet::Fields(list) => list.iter().any(|f| f == field_name),
}
}
}
/// Format an entry for API response: reference fields become { _type, _slug };
/// if resolve set includes the field (or all), embed the referenced entry.
/// When locale is set, resolved references are loaded from that locale.
pub async fn format_references(
mut entry: Value,
schema: &SchemaDefinition,
store: &dyn ContentStore,
resolve: Option<&ResolveSet>,
locale: Option<&str>,
) -> Value {
let obj = match entry.as_object_mut() {
Some(o) => o,
None => return entry,
};
for (field_name, fd) in &schema.fields {
if fd.field_type == "reference" {
let colls = fd.reference_collections();
if !colls.is_empty() && obj.get(field_name).and_then(|v| v.as_str()).is_some() {
let slug = obj.get(field_name).and_then(|v| v.as_str()).unwrap();
let first_coll = colls[0];
let ref_obj = json!({ "_type": first_coll, "_slug": slug });
let should_resolve =
resolve.map(|r| r.should_resolve(field_name)).unwrap_or(false);
if should_resolve {
let mut resolved_opt = None;
for coll in &colls {
if let Ok(Some(mut resolved)) = store.get(coll, slug, locale).await {
if let Some(res_obj) = resolved.as_object_mut() {
res_obj.insert("_type".to_string(), Value::String(coll.to_string()));
res_obj.insert("_slug".to_string(), Value::String(slug.to_string()));
}
resolved_opt = Some((coll.to_string(), resolved));
break;
}
}
obj.insert(
field_name.clone(),
resolved_opt
.map(|(_, v)| v)
.unwrap_or_else(|| ref_obj.clone()),
);
} else {
obj.insert(field_name.clone(), ref_obj);
}
}
continue;
}
if fd.field_type == "array" {
if let Some(ref items) = fd.items {
let colls = items.reference_collections();
if items.field_type == "reference" && !colls.is_empty() {
if let Some(arr) = obj.get_mut(field_name).and_then(|v| v.as_array_mut()) {
let should_resolve =
resolve.map(|r| r.should_resolve(field_name)).unwrap_or(false);
let first_coll = colls[0];
let mut new_arr = Vec::new();
for item in arr.drain(..) {
if let Some(slug) = item.as_str() {
let ref_obj = json!({ "_type": first_coll, "_slug": slug });
if should_resolve {
let mut resolved_opt = None;
for coll in &colls {
if let Ok(Some(mut resolved)) = store.get(coll, slug, locale).await
{
if let Some(ro) = resolved.as_object_mut() {
ro.insert(
"_type".to_string(),
Value::String(coll.to_string()),
);
ro.insert(
"_slug".to_string(),
Value::String(slug.to_string()),
);
}
resolved_opt = Some(resolved);
break;
}
}
new_arr.push(resolved_opt.unwrap_or(ref_obj));
} else {
new_arr.push(ref_obj);
}
} else {
new_arr.push(item);
}
}
*arr = new_arr;
}
}
}
}
}
entry
}

46
src/api/routes.rs Normal file
View File

@@ -0,0 +1,46 @@
use std::sync::Arc;
use axum::routing::{get, post};
use axum::Router;
use super::handlers;
use super::handlers::AppState;
use super::openapi;
use super::transform;
pub fn create_router(state: Arc<AppState>) -> Router {
Router::new()
// Health (for Load Balancer / K8s)
.route("/health", get(handlers::health))
// API index (Living Documentation entry point)
.route("/api", get(openapi::api_index))
.route("/api/", get(openapi::api_index))
// Swagger UI & OpenAPI spec
.route("/swagger-ui", get(openapi::swagger_ui))
.route("/api-docs/openapi.json", get(openapi::openapi_json))
// Collection schema endpoints
.route("/api/collections", get(handlers::list_collections))
.route("/api/schemas", post(handlers::create_schema))
.route(
"/api/collections/:collection",
get(handlers::get_collection_schema),
)
.route(
"/api/collections/:collection/slug-check",
get(handlers::slug_check),
)
// Content CRUD endpoints
.route(
"/api/content/:collection",
get(handlers::list_entries).post(handlers::create_entry),
)
.route(
"/api/content/:collection/:slug",
get(handlers::get_entry)
.put(handlers::update_entry)
.delete(handlers::delete_entry),
)
// Image transformation (external URL → transformed image)
.route("/api/transform", get(transform::transform_image))
.with_state(state)
}

221
src/api/transform.rs Normal file
View File

@@ -0,0 +1,221 @@
//! Image transform endpoint: external URL + params (w, h, ar, fit, format) → transformed image.
use std::hash::{Hash, Hasher};
use std::io::Cursor;
use std::sync::Arc;
use axum::body::Body;
use axum::extract::{Query, State};
use axum::http::header::{HeaderValue, CONTENT_TYPE};
use axum::response::Response;
use image::codecs::jpeg::JpegEncoder;
use image::imageops::FilterType;
use image::{DynamicImage, ImageFormat};
use serde::Deserialize;
use std::collections::hash_map::DefaultHasher;
use super::error::ApiError;
use super::handlers::AppState;
/// Query parameters for GET /api/transform.
#[derive(Debug, Deserialize)]
pub struct TransformParams {
/// External image URL (required).
pub url: String,
/// Target width in pixels.
#[serde(alias = "w")]
pub width: Option<u32>,
/// Target height in pixels.
#[serde(alias = "h")]
pub height: Option<u32>,
/// Aspect ratio before scaling, e.g. "1:1" or "16:9". Centered crop.
#[serde(alias = "ar")]
pub aspect_ratio: Option<String>,
/// Fit mode: "fill" (exact w×h), "contain" (fit in rect), "cover" (fill, crop).
#[serde(default = "default_fit")]
pub fit: String,
/// Output format: "jpeg" | "png" | "webp" | "avif". Default: jpeg.
#[serde(default = "default_format")]
pub format: String,
/// JPEG quality 1100. Only for format=jpeg.
#[serde(default = "default_quality")]
pub quality: u8,
}
fn default_fit() -> String {
"contain".to_string()
}
fn default_format() -> String {
"jpeg".to_string()
}
fn default_quality() -> u8 {
85
}
/// Cache key from all transform params (same URL + params = same key).
fn transform_cache_key(params: &TransformParams) -> String {
let mut hasher = DefaultHasher::new();
params.url.hash(&mut hasher);
params.width.hash(&mut hasher);
params.height.hash(&mut hasher);
params.aspect_ratio.hash(&mut hasher);
params.fit.hash(&mut hasher);
params.format.hash(&mut hasher);
params.quality.hash(&mut hasher);
format!("t:{}", hasher.finish())
}
/// Parses "1:1" or "16:9" to (num, den).
fn parse_aspect_ratio(s: &str) -> Option<(u32, u32)> {
let s = s.trim();
let mut it = s.splitn(2, ':');
let a: u32 = it.next()?.trim().parse().ok()?;
let b: u32 = it.next()?.trim().parse().ok()?;
if a == 0 || b == 0 {
return None;
}
Some((a, b))
}
/// Centered crop to target aspect ratio (num:den).
fn crop_to_aspect_ratio(img: &DynamicImage, num: u32, den: u32) -> DynamicImage {
let (w, h) = (img.width(), img.height());
let target_ratio = num as f64 / den as f64;
let current_ratio = w as f64 / h as f64;
let (crop_w, crop_h) = if current_ratio > target_ratio {
let crop_w = (h as f64 * target_ratio) as u32;
(crop_w.min(w), h)
} else {
let crop_h = (w as f64 / target_ratio) as u32;
(w, crop_h.min(h))
};
let x = (w - crop_w) / 2;
let y = (h - crop_h) / 2;
img.crop_imm(x, y, crop_w, crop_h)
}
pub async fn transform_image(
State(state): State<Arc<AppState>>,
Query(params): Query<TransformParams>,
) -> Result<Response, ApiError> {
if params.url.is_empty() {
return Err(ApiError::BadRequest("Parameter 'url' is required".to_string()));
}
let cache_key = transform_cache_key(&params);
if let Some((bytes, content_type)) = state.transform_cache.get(&cache_key).await {
let mut response = Response::new(Body::from(bytes));
response.headers_mut().insert(
CONTENT_TYPE,
HeaderValue::from_str(&content_type).unwrap_or(HeaderValue::from_static("image/jpeg")),
);
return Ok(response);
}
let url = params
.url
.parse::<reqwest::Url>()
.map_err(|_| ApiError::BadRequest("Invalid URL".to_string()))?;
let resp = state
.http_client
.get(url)
.send()
.await
.map_err(|e| ApiError::Internal(format!("Image could not be loaded: {}", e)))?;
if !resp.status().is_success() {
return Err(ApiError::Internal(format!(
"Image URL returned status {}",
resp.status()
)));
}
let bytes = resp
.bytes()
.await
.map_err(|e| ApiError::Internal(format!("Image data invalid: {}", e)))?;
let mut img =
image::load_from_memory(&bytes).map_err(|e| ApiError::BadRequest(format!("Invalid image: {}", e)))?;
// Optional: crop to aspect ratio first (e.g. 1:1)
if let Some(ref ar) = params.aspect_ratio {
if let Some((num, den)) = parse_aspect_ratio(ar) {
img = crop_to_aspect_ratio(&img, num, den);
}
}
let w = params.width.unwrap_or(img.width());
let h = params.height.unwrap_or(img.height());
// When both dimensions are set: default „fill“ (exact w×h), else „contain“ (fit in rect).
let fit = params.fit.to_lowercase();
let effective_fit = if params.width.is_some() && params.height.is_some() && fit.as_str() == "contain" {
"fill"
} else {
fit.as_str()
};
img = match effective_fit {
"fill" => img.resize_exact(w, h, FilterType::Lanczos3),
"cover" => img.resize_to_fill(w, h, FilterType::Lanczos3),
_ => img.thumbnail(w, h), // explicit "contain" or unknown
};
let quality = params.quality.clamp(1, 100);
let format_str = params.format.to_lowercase();
let (body_bytes, content_type) = match format_str.as_str() {
"png" => {
let mut buf = Vec::new();
img.write_to(&mut Cursor::new(&mut buf), ImageFormat::Png)
.map_err(|e| ApiError::Internal(format!("PNG encoding failed: {}", e)))?;
(buf, "image/png")
}
"webp" => {
let mut buf = Vec::new();
img.write_to(&mut Cursor::new(&mut buf), ImageFormat::WebP)
.map_err(|e| ApiError::Internal(format!("WebP encoding failed: {}", e)))?;
(buf, "image/webp")
}
"avif" => {
let mut buf = Vec::new();
img.write_to(&mut Cursor::new(&mut buf), ImageFormat::Avif)
.map_err(|e| ApiError::Internal(format!("AVIF encoding failed: {}", e)))?;
(buf, "image/avif")
}
_ => {
let mut buf = Vec::new();
img.write_with_encoder(JpegEncoder::new_with_quality(&mut buf, quality))
.map_err(|e| ApiError::Internal(format!("JPEG encoding failed: {}", e)))?;
(buf, "image/jpeg")
}
};
state
.transform_cache
.set(cache_key, body_bytes.clone(), content_type.to_string())
.await;
let ct_header = if content_type == "image/png" {
HeaderValue::from_static("image/png")
} else if content_type == "image/webp" {
HeaderValue::from_static("image/webp")
} else if content_type == "image/avif" {
HeaderValue::from_static("image/avif")
} else {
HeaderValue::from_static("image/jpeg")
};
let mut response = Response::new(Body::from(body_bytes));
response.headers_mut().insert(CONTENT_TYPE, ct_header);
Ok(response)
}

View File

@@ -0,0 +1,83 @@
//! Export JSON Schema files from types/*.json5 for editor validation (VS Code / Cursor).
//!
//! Run: cargo run --bin export-json-schema
//! Then open a content file (e.g. content/blog_post/foo.json5) and the editor will validate it.
use std::path::PathBuf;
use clap::Parser;
use rustycms::schema::json_schema;
use rustycms::schema::SchemaRegistry;
#[derive(Parser)]
#[command(name = "export-json-schema", about = "Export JSON Schema files for editor validation")]
struct Args {
/// Directory containing type definitions (types/*.json5)
#[arg(long, default_value = "./types")]
types_dir: PathBuf,
/// Output directory for .schema.json files
#[arg(long, default_value = "./schemas")]
out_dir: PathBuf,
/// Write or update .vscode/settings.json so content/*.json5 is validated with the exported schemas
#[arg(long)]
vscode: bool,
}
fn main() -> anyhow::Result<()> {
let args = Args::parse();
let registry = SchemaRegistry::load(&args.types_dir)?;
let names = registry.collection_names();
if names.is_empty() {
anyhow::bail!("No content types found in {}", args.types_dir.display());
}
std::fs::create_dir_all(&args.out_dir)?;
for name in &names {
let schema = registry.get(name).unwrap();
let json_schema = json_schema::content_schema_for_editor(schema);
let path = args.out_dir.join(format!("{}.schema.json", name));
std::fs::write(
&path,
serde_json::to_string_pretty(&json_schema).unwrap(),
)?;
println!("Written {}", path.display());
}
if args.vscode {
let vscode_dir = std::path::Path::new(".vscode");
std::fs::create_dir_all(vscode_dir)?;
let settings_path = vscode_dir.join("settings.json");
let schema_entries: Vec<serde_json::Value> = names
.iter()
.map(|name| {
let schema_path = args.out_dir.join(format!("{}.schema.json", name));
let schema_url = schema_path.to_string_lossy().replace('\\', "/");
serde_json::json!({
"fileMatch": [
format!("content/{}/*.json5", name),
format!("content/{}/*.json", name),
],
"url": schema_url,
})
})
.collect();
let settings = serde_json::json!({
"json.schemas": schema_entries,
});
std::fs::write(
&settings_path,
serde_json::to_string_pretty(&settings).unwrap(),
)?;
println!("Written {}", settings_path.display());
} else {
println!(
"\nTip: run with --vscode to write .vscode/settings.json for editor validation."
);
}
Ok(())
}

5
src/lib.rs Normal file
View File

@@ -0,0 +1,5 @@
//! Library for RustyCMS (used by the main server binary and by tools like export-json-schema).
pub mod api;
pub mod schema;
pub mod store;

244
src/main.rs Normal file
View File

@@ -0,0 +1,244 @@
use std::path::PathBuf;
use std::sync::Arc;
use std::time::Duration;
use clap::Parser;
use notify::{Config, RecommendedWatcher, RecursiveMode, Watcher};
use tokio::sync::RwLock;
use axum::http::header::HeaderValue;
use tower_http::cors::{AllowOrigin, CorsLayer};
use tower_http::trace::{DefaultOnResponse, TraceLayer};
use tracing::Level;
use tracing_subscriber::EnvFilter;
use rustycms::api::handlers::AppState;
use rustycms::schema::SchemaRegistry;
use rustycms::store::{filesystem::FileStore, sqlite::SqliteStore, ContentStore};
#[derive(Parser)]
#[command(name = "rustycms", about = "A file-based headless CMS written in Rust")]
struct Cli {
/// Path to the directory containing type definitions (*.json5)
#[arg(long, default_value = "./types")]
types_dir: PathBuf,
/// Path to the directory containing content files
#[arg(long, default_value = "./content")]
content_dir: PathBuf,
/// Port to listen on
#[arg(short = 'p', long, default_value_t = 3000)]
port: u16,
/// Host address to bind to
#[arg(long, default_value = "127.0.0.1")]
host: String,
}
fn reload_schemas(
types_dir: &PathBuf,
server_url: &str,
registry: &Arc<RwLock<SchemaRegistry>>,
openapi_spec: &Arc<RwLock<serde_json::Value>>,
) {
let types_dir = types_dir.clone();
let server_url = server_url.to_string();
let registry = Arc::clone(registry);
let openapi_spec = Arc::clone(openapi_spec);
std::thread::spawn(move || {
let rt = tokio::runtime::Handle::current();
rt.block_on(async move {
match SchemaRegistry::load(&types_dir) {
Ok(new_registry) => {
let spec = rustycms::api::openapi::generate_spec(&new_registry, &server_url);
*registry.write().await = new_registry;
*openapi_spec.write().await = spec;
tracing::info!("Hot-reload: schemas and OpenAPI spec updated");
}
Err(e) => {
tracing::error!("Hot-reload failed: {}", e);
}
}
});
});
}
#[tokio::main]
async fn main() -> anyhow::Result<()> {
dotenvy::dotenv().ok();
tracing_subscriber::fmt()
.with_env_filter(
EnvFilter::try_from_default_env()
.unwrap_or_else(|_| EnvFilter::new("rustycms=info,tower_http=info")),
)
.init();
let cli = Cli::parse();
tracing::info!("Loading schemas from {}", cli.types_dir.display());
let registry = SchemaRegistry::load(&cli.types_dir)?;
tracing::info!(
"Loaded {} schema(s): {:?}",
registry.names().len(),
registry.names()
);
let store: std::sync::Arc<dyn ContentStore> = {
let kind = std::env::var("RUSTYCMS_STORE").unwrap_or_else(|_| "file".into());
match kind.as_str() {
"sqlite" => {
let url = std::env::var("RUSTYCMS_DATABASE_URL")
.or_else(|_| std::env::var("DATABASE_URL"))
.unwrap_or_else(|_| "sqlite:content.db".into());
tracing::info!("Using SQLite store: {}", url);
let s = SqliteStore::new(&url).await?;
for name in registry.collection_names() {
s.ensure_collection_dir(&name).await?;
}
std::sync::Arc::new(s)
}
_ => {
tracing::info!("Using file store: {}", cli.content_dir.display());
let s = FileStore::new(&cli.content_dir);
for name in registry.collection_names() {
s.ensure_collection_dir(&name).await?;
}
std::sync::Arc::new(s)
}
}
};
let server_url = format!("http://{}:{}", cli.host, cli.port);
let openapi_spec = rustycms::api::openapi::generate_spec(&registry, &server_url);
tracing::info!("OpenAPI spec generated");
let registry = Arc::new(RwLock::new(registry));
let openapi_spec = Arc::new(RwLock::new(openapi_spec));
let api_key = std::env::var("RUSTYCMS_API_KEY").ok();
if api_key.is_some() {
tracing::info!("API key auth enabled (POST/PUT/DELETE require key)");
}
let cache_ttl_secs = std::env::var("RUSTYCMS_CACHE_TTL_SECS")
.ok()
.and_then(|s| s.parse().ok())
.unwrap_or(60);
let cache = Arc::new(rustycms::api::cache::ContentCache::new(cache_ttl_secs));
let transform_cache = Arc::new(rustycms::api::cache::TransformCache::new(cache_ttl_secs));
if cache_ttl_secs > 0 {
tracing::info!("Response cache enabled, TTL {}s", cache_ttl_secs);
}
let http_client = reqwest::Client::new();
let locales: Option<Vec<String>> = std::env::var("RUSTYCMS_LOCALES")
.ok()
.map(|s| s.split(',').map(|l| l.trim().to_string()).filter(|l| !l.is_empty()).collect())
.filter(|v: &Vec<String>| !v.is_empty());
if let Some(ref locs) = locales {
tracing::info!("Multilingual: locales {:?} (default: {})", locs, &locs[0]);
}
let state = Arc::new(AppState {
registry: Arc::clone(&registry),
store,
openapi_spec: Arc::clone(&openapi_spec),
types_dir: cli.types_dir.clone(),
api_key,
cache,
transform_cache,
http_client,
locales,
});
// Hot-reload: watch types_dir and reload schemas on change
let types_dir_for_callback = cli.types_dir.canonicalize().unwrap_or_else(|_| cli.types_dir.clone());
let (tx, rx) = std::sync::mpsc::channel();
let mut watcher = RecommendedWatcher::new(
move |res: Result<notify::Event, notify::Error>| {
if let Ok(ev) = res {
if ev.kind.is_modify() || ev.kind.is_create() || ev.kind.is_remove() {
// Any change under types_dir triggers reload (robust for editors that use temp files)
let under_types = ev.paths.iter().any(|p| {
p.canonicalize()
.map(|c| c.starts_with(&types_dir_for_callback))
.unwrap_or_else(|_| {
p.extension()
.map(|e| e == "json5" || e == "json")
.unwrap_or(false)
})
});
if under_types {
let _ = tx.send(());
}
}
}
},
Config::default(),
)?;
watcher.watch(&cli.types_dir, RecursiveMode::Recursive)?;
let types_dir_watch = cli.types_dir.clone();
let server_url_watch = server_url.clone();
std::thread::spawn(move || {
let _watcher = watcher;
while rx.recv().is_ok() {
// Debounce: wait for editor to finish writing, drain extra events, then reload once
std::thread::sleep(Duration::from_millis(500));
while rx.try_recv().is_ok() {}
reload_schemas(&types_dir_watch, &server_url_watch, &registry, &openapi_spec);
}
});
tracing::info!("Hot-reload: watching {}", cli.types_dir.display());
let cors = match std::env::var("RUSTYCMS_CORS_ORIGIN") {
Ok(s) if s.trim().is_empty() || s.trim() == "*" => CorsLayer::permissive(),
Ok(s) => {
let o = s.trim().to_string();
match HeaderValue::try_from(o) {
Ok(h) => CorsLayer::new().allow_origin(AllowOrigin::exact(h)),
Err(_) => CorsLayer::permissive(),
}
}
Err(_) => CorsLayer::permissive(),
};
let trace = TraceLayer::new_for_http().on_response(
DefaultOnResponse::new()
.level(Level::INFO)
.latency_unit(tower_http::LatencyUnit::Millis),
);
let app = rustycms::api::routes::create_router(state)
.layer(cors)
.layer(trace);
let addr = format!("{}:{}", cli.host, cli.port);
tracing::info!("RustyCMS v0.1.0 listening on http://{}", addr);
tracing::info!("Swagger UI: http://{}/swagger-ui", addr);
let listener = tokio::net::TcpListener::bind(&addr).await?;
let shutdown = async {
#[cfg(unix)]
{
if let Ok(mut sig) = tokio::signal::unix::signal(tokio::signal::unix::SignalKind::terminate()) {
tokio::select! {
_ = tokio::signal::ctrl_c() => {}
_ = sig.recv() => {}
}
} else {
tokio::signal::ctrl_c().await.ok();
}
}
#[cfg(not(unix))]
{
tokio::signal::ctrl_c().await.ok();
}
tracing::info!("Shutdown signal received, draining requests...");
};
axum::serve(listener, app)
.with_graceful_shutdown(shutdown)
.await?;
Ok(())
}

131
src/schema/json_schema.rs Normal file
View File

@@ -0,0 +1,131 @@
//! Export resolved content-type schemas as JSON Schema for editor validation (e.g. VS Code).
use serde_json::{json, Value};
use crate::schema::types::{FieldDefinition, SchemaDefinition};
/// Build a JSON Schema for a content type so editors can validate `content/<type>/*.json5`.
/// Includes _slug and all fields; required = [_slug] + fields that are required and not auto.
pub fn content_schema_for_editor(schema: &SchemaDefinition) -> Value {
let mut properties = serde_json::Map::new();
let mut required = vec!["_slug".to_string()];
properties.insert(
"_slug".to_string(),
json!({
"type": "string",
"description": "Entry identifier (URL slug / filename without extension)"
}),
);
for (name, fd) in &schema.fields {
properties.insert(name.clone(), field_to_json_schema(fd));
if fd.required && !fd.auto {
required.push(name.clone());
}
}
let mut obj = serde_json::Map::new();
obj.insert("$schema".to_string(), json!("http://json-schema.org/draft-07/schema#"));
obj.insert("type".to_string(), json!("object"));
obj.insert("properties".to_string(), Value::Object(properties));
obj.insert("required".to_string(), json!(required));
obj.insert("additionalProperties".to_string(), json!(!schema.strict));
Value::Object(obj)
}
fn field_to_json_schema(fd: &FieldDefinition) -> Value {
let mut schema = match fd.field_type.as_str() {
"string" => json!({ "type": "string" }),
"richtext" | "html" | "markdown" => json!({ "type": "string" }),
"number" => json!({ "type": "number" }),
"integer" => json!({ "type": "integer" }),
"boolean" => json!({ "type": "boolean" }),
"datetime" => json!({ "type": "string", "format": "date-time" }),
"array" => {
if let Some(ref items) = fd.items {
json!({ "type": "array", "items": field_to_json_schema(items) })
} else {
json!({ "type": "array" })
}
}
"object" => {
if let Some(ref nested) = fd.fields {
let mut props = serde_json::Map::new();
let mut req = Vec::new();
for (n, f) in nested {
props.insert(n.clone(), field_to_json_schema(f));
if f.required && !f.auto {
req.push(json!(n));
}
}
let mut o = serde_json::Map::new();
o.insert("type".to_string(), json!("object"));
o.insert("properties".to_string(), Value::Object(props));
if !req.is_empty() {
o.insert("required".to_string(), json!(req));
}
Value::Object(o)
} else {
json!({ "type": "object" })
}
}
"reference" => {
let desc = if let Some(ref list) = fd.collections {
if list.is_empty() {
"Reference (slug)".to_string()
} else {
format!("Reference (slug) to one of: {}", list.join(", "))
}
} else if let Some(ref c) = fd.collection {
format!("Reference (slug) to collection '{}'", c)
} else {
"Reference (slug)".to_string()
};
json!({ "type": "string", "description": desc })
},
_ => json!({ "type": "string" }),
};
let obj = schema.as_object_mut().unwrap();
if let Some(ref desc) = fd.description {
obj.insert("description".to_string(), json!(desc));
}
if let Some(ref ev) = fd.enum_values {
obj.insert("enum".to_string(), json!(ev));
}
if let Some(ref dv) = fd.default {
obj.insert("default".to_string(), dv.clone());
}
if fd.nullable {
obj.insert("nullable".to_string(), json!(true));
}
if fd.readonly {
obj.insert("readOnly".to_string(), json!(true));
}
if let Some(v) = fd.min_length {
obj.insert("minLength".to_string(), json!(v));
}
if let Some(v) = fd.max_length {
obj.insert("maxLength".to_string(), json!(v));
}
if let Some(ref p) = fd.pattern {
obj.insert("pattern".to_string(), json!(p));
}
if let Some(v) = fd.min {
obj.insert("minimum".to_string(), json!(v));
}
if let Some(v) = fd.max {
obj.insert("maximum".to_string(), json!(v));
}
if let Some(v) = fd.min_items {
obj.insert("minItems".to_string(), json!(v));
}
if let Some(v) = fd.max_items {
obj.insert("maxItems".to_string(), json!(v));
}
schema
}

219
src/schema/loader.rs Normal file
View File

@@ -0,0 +1,219 @@
use std::collections::HashSet;
use std::path::Path;
use anyhow::{Context, Result};
use indexmap::IndexMap;
use super::types::SchemaDefinition;
/// Load all schema definitions from `types/*.json5` files.
pub fn load_schemas(types_dir: &Path) -> Result<IndexMap<String, SchemaDefinition>> {
let mut schemas = IndexMap::new();
if !types_dir.exists() {
anyhow::bail!(
"Types directory does not exist: {}",
types_dir.display()
);
}
let mut entries: Vec<_> = std::fs::read_dir(types_dir)
.context("Failed to read types directory")?
.filter_map(|e| e.ok())
.filter(|e| {
e.path()
.extension()
.map(|ext| ext == "json5" || ext == "json")
.unwrap_or(false)
})
.collect();
entries.sort_by_key(|e| e.file_name());
for entry in entries {
let path = entry.path();
let content = std::fs::read_to_string(&path)
.with_context(|| format!("Failed to read {}", path.display()))?;
let schema: SchemaDefinition = json5::from_str(&content)
.with_context(|| format!("Failed to parse {}", path.display()))?;
// Validate field types
for (field_name, field_def) in &schema.fields {
if !field_def.is_valid_type() {
anyhow::bail!(
"Schema '{}', field '{}': unknown type '{}'",
schema.name,
field_name,
field_def.field_type
);
}
}
// Validate regex patterns at load time (fail fast)
for (field_name, field_def) in &schema.fields {
if let Some(ref pattern) = field_def.pattern {
regex::Regex::new(pattern).with_context(|| {
format!(
"Schema '{}', field '{}': invalid regex pattern '{}'",
schema.name, field_name, pattern
)
})?;
}
}
tracing::info!("Loaded schema: {}", schema.name);
schemas.insert(schema.name.clone(), schema);
}
Ok(schemas)
}
/// Resolve `extends` inheritance merge parent fields into child schemas.
///
/// Supports:
/// - Single extends: `extends: "parent"`
/// - Multiple extends: `extends: ["parent_a", "parent_b"]`
/// - Transitive extends: A extends B extends C
/// - `pick`: keep only listed fields from parents
/// - `omit`: remove listed fields from parents
/// - `partial`: make all fields optional after merge
///
/// Detects circular dependencies and missing parents.
pub fn resolve_extends(schemas: &mut IndexMap<String, SchemaDefinition>) -> Result<()> {
let names: Vec<String> = schemas.keys().cloned().collect();
let mut resolved: HashSet<String> = HashSet::new();
// Schemas without extends are already resolved
for name in &names {
if schemas.get(name).unwrap().extends.is_none() {
resolved.insert(name.clone());
}
}
// Iteratively resolve schemas whose ALL parents are already resolved
let mut changed = true;
while changed {
changed = false;
for name in &names {
if resolved.contains(name) {
continue;
}
let parent_names: Vec<String> = schemas
.get(name)
.unwrap()
.extends
.as_ref()
.unwrap()
.names()
.iter()
.map(|s| s.to_string())
.collect();
// All parents must be resolved first
if !parent_names.iter().all(|p| resolved.contains(p)) {
continue;
}
// ── 1. Merge all parent fields in declaration order ──────────
let mut merged: IndexMap<String, _> = IndexMap::new();
for parent_name in &parent_names {
let parent = schemas.get(parent_name).with_context(|| {
format!(
"Schema '{}' extends '{}', but '{}' was not found",
name, parent_name, parent_name
)
})?;
for (key, value) in &parent.fields {
merged.insert(key.clone(), value.clone());
}
}
// Read child-level modifiers (clone to release the borrow)
let pick = schemas.get(name).unwrap().pick.clone();
let omit = schemas.get(name).unwrap().omit.clone();
let is_partial = schemas.get(name).unwrap().partial;
let child_fields = schemas.get(name).unwrap().fields.clone();
// ── 2. Apply pick (keep only listed parent fields) ───────────
if let Some(ref pick_fields) = pick {
let keep: HashSet<&str> = pick_fields.iter().map(|s| s.as_str()).collect();
merged.retain(|k, _| keep.contains(k.as_str()));
}
// ── 3. Apply omit (remove listed parent fields) ─────────────
if let Some(ref omit_fields) = omit {
let remove: HashSet<&str> = omit_fields.iter().map(|s| s.as_str()).collect();
merged.retain(|k, _| !remove.contains(k.as_str()));
}
// ── 4. Merge child's own fields (override parents) ──────────
for (key, value) in child_fields {
merged.insert(key, value);
}
// ── 5. Apply partial (make all fields optional) ─────────────
if is_partial {
for fd in merged.values_mut() {
fd.required = false;
}
}
schemas.get_mut(name).unwrap().fields = merged;
resolved.insert(name.clone());
changed = true;
tracing::info!("Resolved extends: {} <- {:?}", name, parent_names);
}
}
// Check for unresolved schemas (missing parent or circular dependency)
for name in &names {
if !resolved.contains(name) {
let parents = schemas
.get(name)
.unwrap()
.extends
.as_ref()
.unwrap();
anyhow::bail!(
"Cannot resolve extends for '{}': parent(s) {:?} not found or circular dependency",
name,
parents.names()
);
}
}
Ok(())
}
/// Resolve `useFields` on object fields: copy fields from the referenced schema (partial).
/// Referenced schema should have `reusable: true` and is not exposed as a collection.
pub fn resolve_use_fields(schemas: &mut IndexMap<String, SchemaDefinition>) -> Result<()> {
let mut to_resolve: Vec<(String, String, String)> = Vec::new();
for (schema_name, schema) in schemas.iter() {
for (field_name, fd) in &schema.fields {
if fd.field_type != "object" {
continue;
}
if let Some(ref ref_name) = fd.use_fields {
to_resolve.push((schema_name.clone(), field_name.clone(), ref_name.clone()));
}
}
}
for (schema_name, field_name, ref_name) in to_resolve {
let partial_fields = schemas.get(&ref_name).with_context(|| {
format!(
"Schema '{}': useFields '{}' not found (define a reusable schema with that name)",
schema_name, ref_name
)
})?.fields.clone();
let schema = schemas.get_mut(&schema_name).unwrap();
if let Some(fd) = schema.fields.get_mut(&field_name) {
fd.fields = Some(partial_fields);
}
tracing::debug!("Resolved useFields: {} -> {}", schema_name, ref_name);
}
Ok(())
}

72
src/schema/mod.rs Normal file
View File

@@ -0,0 +1,72 @@
pub mod json_schema;
pub mod loader;
pub mod types;
pub mod validator;
use std::path::Path;
use anyhow::Result;
use indexmap::IndexMap;
use types::SchemaDefinition;
/// Central registry holding all loaded and resolved schema definitions.
pub struct SchemaRegistry {
schemas: IndexMap<String, SchemaDefinition>,
}
impl SchemaRegistry {
/// Load all schemas from the given directory, resolve extends and useFields.
pub fn load(types_dir: &Path) -> Result<Self> {
let mut schemas = loader::load_schemas(types_dir)?;
loader::resolve_extends(&mut schemas)?;
loader::resolve_use_fields(&mut schemas)?;
Ok(Self { schemas })
}
/// Get a schema by name.
pub fn get(&self, name: &str) -> Option<&SchemaDefinition> {
self.schemas.get(name)
}
/// Get all schemas.
pub fn list(&self) -> &IndexMap<String, SchemaDefinition> {
&self.schemas
}
/// Get all schema names (including reusable partials).
pub fn names(&self) -> Vec<String> {
self.schemas.keys().cloned().collect()
}
/// Schema names that are content types (excludes reusable partials).
pub fn collection_names(&self) -> Vec<String> {
self.schemas
.iter()
.filter(|(_, s)| !s.reusable)
.map(|(n, _)| n.clone())
.collect()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn load_schemas_from_types_dir() {
// Requires ./types to exist (project layout)
let path = Path::new("./types");
if !path.exists() {
eprintln!("Skipping load test: ./types not found");
return;
}
let registry = SchemaRegistry::load(path).expect("load schemas");
assert!(!registry.names().is_empty());
let collections = registry.collection_names();
assert!(
collections.contains(&"page".to_string()) || collections.contains(&"tag".to_string()),
"expected at least one known collection, got {:?}",
collections
);
}
}

200
src/schema/types.rs Normal file
View File

@@ -0,0 +1,200 @@
use indexmap::IndexMap;
use serde::{Deserialize, Serialize};
use serde_json::Value;
fn is_false(b: &bool) -> bool {
!*b
}
// ---------------------------------------------------------------------------
// Extends supports both a single string and an array of strings
// ---------------------------------------------------------------------------
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum Extends {
Single(String),
Multiple(Vec<String>),
}
impl Extends {
pub fn names(&self) -> Vec<&str> {
match self {
Extends::Single(name) => vec![name.as_str()],
Extends::Multiple(names) => names.iter().map(|s| s.as_str()).collect(),
}
}
}
// ---------------------------------------------------------------------------
// SchemaDefinition
// ---------------------------------------------------------------------------
/// A complete content type schema definition.
/// Loaded from `types/*.json5` files.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SchemaDefinition {
pub name: String,
/// Human-readable description of the content type (e.g. for admin UI).
#[serde(skip_serializing_if = "Option::is_none", default)]
pub description: Option<String>,
/// Tags for grouping/filtering in the admin (e.g. ["content", "blog"]).
#[serde(skip_serializing_if = "Option::is_none", default)]
pub tags: Option<Vec<String>>,
/// Category for grouping collections in the admin (e.g. "content", "components").
#[serde(skip_serializing_if = "Option::is_none", default)]
pub category: Option<String>,
/// Parent type(s) to inherit fields from. Supports a single string
/// (`"blog_post"`) or an array (`["blog_post", "seo_meta"]`).
#[serde(skip_serializing_if = "Option::is_none", default)]
pub extends: Option<Extends>,
/// When true, reject any fields not defined in the schema.
#[serde(default, skip_serializing_if = "is_false")]
pub strict: bool,
/// After extends resolution, keep **only** these fields from parents.
/// (Like TypeScript `Pick<Parent, "a" | "b">`.)
#[serde(skip_serializing_if = "Option::is_none", default)]
pub pick: Option<Vec<String>>,
/// After extends resolution, **remove** these fields from parents.
/// (Like TypeScript `Omit<Parent, "a">`.)
#[serde(skip_serializing_if = "Option::is_none", default)]
pub omit: Option<Vec<String>>,
/// Make all inherited fields optional.
/// (Like TypeScript `Partial<Parent>`.)
#[serde(default, skip_serializing_if = "is_false")]
pub partial: bool,
/// When true, this schema is only for reuse via useFields (no collection, no API).
#[serde(default, skip_serializing_if = "is_false")]
pub reusable: bool,
pub fields: IndexMap<String, FieldDefinition>,
}
// ---------------------------------------------------------------------------
// FieldDefinition
// ---------------------------------------------------------------------------
/// Definition of a single field within a schema.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FieldDefinition {
/// Field type: "string", "number", "boolean", "datetime",
/// "richtext", "html", "markdown", "integer", "array", "object", "reference"
#[serde(rename = "type")]
pub field_type: String,
#[serde(default, skip_serializing_if = "is_false")]
pub required: bool,
/// Value must be unique across all entries in the collection.
#[serde(default, skip_serializing_if = "is_false")]
pub unique: bool,
/// Auto-generate value (e.g. current timestamp for datetime fields).
#[serde(default, skip_serializing_if = "is_false")]
pub auto: bool,
/// Field cannot be changed after creation (enforced on PUT).
#[serde(default, skip_serializing_if = "is_false")]
pub readonly: bool,
/// Field explicitly allows `null` as a value.
#[serde(default, skip_serializing_if = "is_false")]
pub nullable: bool,
#[serde(skip_serializing_if = "Option::is_none", default)]
pub default: Option<Value>,
/// Allowed values (enum constraint).
#[serde(rename = "enum", skip_serializing_if = "Option::is_none", default)]
pub enum_values: Option<Vec<Value>>,
/// Item definition for array fields.
#[serde(skip_serializing_if = "Option::is_none", default)]
pub items: Option<Box<FieldDefinition>>,
/// Nested field definitions for object fields.
#[serde(skip_serializing_if = "Option::is_none", default)]
pub fields: Option<IndexMap<String, FieldDefinition>>,
/// Reuse fields from another schema (partial). Only for type "object".
#[serde(rename = "useFields", skip_serializing_if = "Option::is_none", default)]
pub use_fields: Option<String>,
/// Target collection for reference fields (single).
#[serde(skip_serializing_if = "Option::is_none", default)]
pub collection: Option<String>,
/// Target collections for polymorphic reference fields (slug can be in any of these).
#[serde(skip_serializing_if = "Option::is_none", default)]
pub collections: Option<Vec<String>>,
/// Human-readable description (appears in Swagger UI).
#[serde(skip_serializing_if = "Option::is_none", default)]
pub description: Option<String>,
// ── String constraints ───────────────────────────────────────────────
#[serde(skip_serializing_if = "Option::is_none", default, rename = "minLength")]
pub min_length: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none", default, rename = "maxLength")]
pub max_length: Option<usize>,
/// Regular expression the string value must match.
#[serde(skip_serializing_if = "Option::is_none", default)]
pub pattern: Option<String>,
// ── Number constraints ───────────────────────────────────────────────
#[serde(skip_serializing_if = "Option::is_none", default)]
pub min: Option<f64>,
#[serde(skip_serializing_if = "Option::is_none", default)]
pub max: Option<f64>,
// ── Array constraints ────────────────────────────────────────────────
#[serde(skip_serializing_if = "Option::is_none", default, rename = "minItems")]
pub min_items: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none", default, rename = "maxItems")]
pub max_items: Option<usize>,
}
// ---------------------------------------------------------------------------
// Helpers
// ---------------------------------------------------------------------------
pub const VALID_FIELD_TYPES: &[&str] = &[
"string", "number", "integer", "boolean", "datetime",
"richtext", "html", "markdown",
"array", "object", "reference",
];
impl FieldDefinition {
pub fn is_valid_type(&self) -> bool {
VALID_FIELD_TYPES.contains(&self.field_type.as_str())
}
/// Collections to try for reference resolution/validation (polymorphic or single).
pub fn reference_collections(&self) -> Vec<&str> {
if let Some(ref list) = self.collections {
if !list.is_empty() {
return list.iter().map(String::as_str).collect();
}
}
if let Some(ref c) = self.collection {
return vec![c.as_str()];
}
vec![]
}
}

440
src/schema/validator.rs Normal file
View File

@@ -0,0 +1,440 @@
use serde_json::Value;
use super::types::{FieldDefinition, SchemaDefinition};
// ---------------------------------------------------------------------------
// ValidationError
// ---------------------------------------------------------------------------
#[derive(Debug)]
pub struct ValidationError {
pub field: String,
pub message: String,
}
impl std::fmt::Display for ValidationError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "Field '{}': {}", self.field, self.message)
}
}
// ---------------------------------------------------------------------------
// Core validation (pure no store dependency)
// ---------------------------------------------------------------------------
/// Validate content against a schema definition.
/// Returns a list of validation errors (empty = valid).
pub fn validate_content(
schema: &SchemaDefinition,
content: &Value,
) -> Vec<ValidationError> {
let mut errors = Vec::new();
let obj = match content.as_object() {
Some(obj) => obj,
None => {
errors.push(ValidationError {
field: "_root".to_string(),
message: "Content must be a JSON object".to_string(),
});
return errors;
}
};
// ── Strict mode: reject unknown fields ───────────────────────────────
if schema.strict {
for key in obj.keys() {
if key.starts_with('_') {
continue;
}
if !schema.fields.contains_key(key) {
errors.push(ValidationError {
field: key.clone(),
message: "Unknown field (strict mode is enabled)".to_string(),
});
}
}
}
// ── Required fields ──────────────────────────────────────────────────
for (name, field_def) in &schema.fields {
if field_def.required && !field_def.auto {
if !obj.contains_key(name) || obj[name].is_null() {
// null is OK if the field is nullable
if obj.get(name).map_or(false, |v| v.is_null()) && field_def.nullable {
continue;
}
errors.push(ValidationError {
field: name.clone(),
message: "Field is required".to_string(),
});
}
}
}
// ── Per-field type & constraint validation ───────────────────────────
for (name, value) in obj {
if name.starts_with('_') {
continue;
}
if let Some(field_def) = schema.fields.get(name) {
validate_field(name, field_def, value, &mut errors);
}
}
errors
}
// ---------------------------------------------------------------------------
// Field-level validation
// ---------------------------------------------------------------------------
fn validate_field(
field_name: &str,
fd: &FieldDefinition,
value: &Value,
errors: &mut Vec<ValidationError>,
) {
// ── Null handling ────────────────────────────────────────────────────
if value.is_null() {
if !fd.nullable {
errors.push(ValidationError {
field: field_name.to_string(),
message: "Field does not allow null (set nullable: true to permit)".to_string(),
});
}
return;
}
// ── Type check ──────────────────────────────────────────────────────
let type_ok = match fd.field_type.as_str() {
"string" | "richtext" | "html" | "markdown" | "datetime" => value.is_string(),
"number" => value.is_number(),
"integer" => value.is_i64() || value.is_u64() || value.as_f64().map(|f| f.fract() == 0.0 && f.is_finite()).unwrap_or(false),
"boolean" => value.is_boolean(),
"array" => value.is_array(),
"object" => value.is_object(),
"reference" => value.is_string(),
_ => true,
};
if !type_ok {
errors.push(ValidationError {
field: field_name.to_string(),
message: format!(
"Expected type '{}', got '{}'",
fd.field_type,
type_name(value)
),
});
return; // no point checking constraints if type is wrong
}
// ── Enum constraint ─────────────────────────────────────────────────
if let Some(ref allowed) = fd.enum_values {
if !allowed.contains(value) {
errors.push(ValidationError {
field: field_name.to_string(),
message: format!("Value must be one of: {:?}", allowed),
});
}
}
// ── String constraints ──────────────────────────────────────────────
if let Some(s) = value.as_str() {
if let Some(min) = fd.min_length {
if s.len() < min {
errors.push(ValidationError {
field: field_name.to_string(),
message: format!("String too short (min {} characters)", min),
});
}
}
if let Some(max) = fd.max_length {
if s.len() > max {
errors.push(ValidationError {
field: field_name.to_string(),
message: format!("String too long (max {} characters)", max),
});
}
}
if let Some(ref pattern) = fd.pattern {
// Pattern was validated at schema load time, so unwrap is safe
if let Ok(re) = regex::Regex::new(pattern) {
if !re.is_match(s) {
errors.push(ValidationError {
field: field_name.to_string(),
message: format!("Value does not match pattern '{}'", pattern),
});
}
}
}
}
// ── Number constraints ──────────────────────────────────────────────
if let Some(n) = value.as_f64() {
if let Some(min) = fd.min {
if n < min {
errors.push(ValidationError {
field: field_name.to_string(),
message: format!("Value {} is below minimum {}", n, min),
});
}
}
if let Some(max) = fd.max {
if n > max {
errors.push(ValidationError {
field: field_name.to_string(),
message: format!("Value {} exceeds maximum {}", n, max),
});
}
}
}
// ── Array constraints & item validation ─────────────────────────────
if let Some(arr) = value.as_array() {
if let Some(min) = fd.min_items {
if arr.len() < min {
errors.push(ValidationError {
field: field_name.to_string(),
message: format!("Array too short (min {} items)", min),
});
}
}
if let Some(max) = fd.max_items {
if arr.len() > max {
errors.push(ValidationError {
field: field_name.to_string(),
message: format!("Array too long (max {} items)", max),
});
}
}
if let Some(ref items_def) = fd.items {
for (i, item) in arr.iter().enumerate() {
validate_field(
&format!("{}[{}]", field_name, i),
items_def,
item,
errors,
);
}
}
}
// ── Nested object validation ────────────────────────────────────────
if fd.field_type == "object" {
if let (Some(ref nested_fields), Some(obj)) = (&fd.fields, value.as_object()) {
for (nested_name, nested_def) in nested_fields {
let full_name = format!("{}.{}", field_name, nested_name);
if let Some(nested_value) = obj.get(nested_name) {
validate_field(&full_name, nested_def, nested_value, errors);
} else if nested_def.required {
errors.push(ValidationError {
field: full_name,
message: "Field is required".to_string(),
});
}
}
}
}
}
fn type_name(value: &Value) -> &'static str {
match value {
Value::Null => "null",
Value::Bool(_) => "boolean",
Value::Number(_) => "number",
Value::String(_) => "string",
Value::Array(_) => "array",
Value::Object(_) => "object",
}
}
// ---------------------------------------------------------------------------
// Defaults & auto-generation
// ---------------------------------------------------------------------------
/// Apply default values and auto-generated fields to content.
pub fn apply_defaults(schema: &SchemaDefinition, content: &mut Value) {
if let Some(obj) = content.as_object_mut() {
for (name, fd) in &schema.fields {
if obj.contains_key(name) {
continue;
}
if let Some(ref default_value) = fd.default {
obj.insert(name.clone(), default_value.clone());
continue;
}
if fd.auto && fd.field_type == "datetime" {
obj.insert(
name.clone(),
Value::String(chrono::Utc::now().to_rfc3339()),
);
}
}
}
}
// ---------------------------------------------------------------------------
// Readonly check (used by update handler)
// ---------------------------------------------------------------------------
/// Check whether any readonly field has been changed.
/// `old` is the current stored content, `new` is the incoming update.
pub fn check_readonly_violations(
schema: &SchemaDefinition,
old: &Value,
new: &Value,
) -> Vec<ValidationError> {
let mut errors = Vec::new();
for (name, fd) in &schema.fields {
if !fd.readonly {
continue;
}
let old_val = old.get(name);
let new_val = new.get(name);
// If the incoming data includes the readonly field with a different value, reject
if let Some(nv) = new_val {
if let Some(ov) = old_val {
if nv != ov {
errors.push(ValidationError {
field: name.clone(),
message: "Field is readonly and cannot be changed after creation"
.to_string(),
});
}
}
}
}
errors
}
// ---------------------------------------------------------------------------
// Unique validation (needs list of existing entries from the handler)
// ---------------------------------------------------------------------------
/// Check that all unique fields have values that don't collide with existing entries.
/// `current_slug` should be `Some(slug)` on update (to exclude self) or `None` on create.
pub fn validate_unique(
schema: &SchemaDefinition,
content: &Value,
current_slug: Option<&str>,
existing_entries: &[(String, Value)],
) -> Vec<ValidationError> {
let mut errors = Vec::new();
let unique_fields: Vec<&String> = schema
.fields
.iter()
.filter(|(_, fd)| fd.unique)
.map(|(name, _)| name)
.collect();
if unique_fields.is_empty() {
return errors;
}
for field_name in &unique_fields {
let Some(value) = content.get(field_name.as_str()) else {
continue;
};
if value.is_null() {
continue;
}
for (slug, entry) in existing_entries {
// Skip self on update
if Some(slug.as_str()) == current_slug {
continue;
}
if let Some(existing_val) = entry.get(field_name.as_str()) {
if existing_val == value {
errors.push(ValidationError {
field: field_name.to_string(),
message: format!(
"Value must be unique '{}' is already used by entry '{}'",
value, slug
),
});
break;
}
}
}
}
errors
}
// ---------------------------------------------------------------------------
// Reference validation (handler provides a lookup function)
// ---------------------------------------------------------------------------
/// Check that all reference fields point to existing entries.
/// `entry_exists(collection, slug) -> bool` is provided by the handler.
/// Supports single collection and polymorphic `collections` (slug valid if found in any).
pub fn validate_references(
schema: &SchemaDefinition,
content: &Value,
entry_exists: &dyn Fn(&str, &str) -> bool,
) -> Vec<ValidationError> {
let mut errors = Vec::new();
if let Some(obj) = content.as_object() {
for (field_name, fd) in &schema.fields {
if fd.field_type == "reference" {
let colls = fd.reference_collections();
if colls.is_empty() {
continue;
}
if let Some(Value::String(slug)) = obj.get(field_name) {
let found = colls.iter().any(|c| entry_exists(c, slug));
if !found {
errors.push(ValidationError {
field: field_name.clone(),
message: format!(
"Referenced entry '{}' not found in any of {:?}",
slug, colls
),
});
}
}
continue;
}
if fd.field_type == "array" {
if let Some(ref items) = fd.items {
if items.field_type != "reference" {
continue;
}
let colls = items.reference_collections();
if colls.is_empty() {
continue;
}
if let Some(Value::Array(arr)) = obj.get(field_name) {
for (i, item) in arr.iter().enumerate() {
if let Some(slug) = item.as_str() {
let found = colls.iter().any(|c| entry_exists(c, slug));
if !found {
errors.push(ValidationError {
field: format!("{}[{}]", field_name, i),
message: format!(
"Referenced entry '{}' not found in any of {:?}",
slug, colls
),
});
}
}
}
}
}
}
}
}
errors
}

214
src/store/filesystem.rs Normal file
View File

@@ -0,0 +1,214 @@
use std::path::{Path, PathBuf};
use anyhow::{Context, Result};
use async_trait::async_trait;
use serde_json::Value;
use tokio::fs;
use tokio::io::AsyncWriteExt;
use super::store::ContentStore;
/// File-based content store (async I/O via tokio::fs).
/// Each collection is a subdirectory, each entry a `.json5` file.
pub struct FileStore {
content_dir: PathBuf,
}
impl FileStore {
pub fn new(content_dir: &Path) -> Self {
Self {
content_dir: content_dir.to_path_buf(),
}
}
/// Base path for a collection. When locale is Some, content is under content/{locale}/{collection}/.
fn collection_dir(&self, collection: &str, locale: Option<&str>) -> PathBuf {
match locale {
Some(loc) => self.content_dir.join(loc).join(collection),
None => self.content_dir.join(collection),
}
}
fn entry_path(&self, collection: &str, slug: &str, locale: Option<&str>) -> PathBuf {
self.collection_dir(collection, locale)
.join(format!("{}.json5", slug))
}
/// Ensure the collection directory exists (no locale: used at startup for all collections).
async fn ensure_collection_dir_impl(&self, collection: &str) -> Result<()> {
let dir = self.collection_dir(collection, None);
if !dir.exists() {
fs::create_dir_all(&dir)
.await
.with_context(|| format!("Failed to create directory: {}", dir.display()))?;
}
Ok(())
}
/// List all entries in a collection (optionally for a locale).
async fn list_impl(&self, collection: &str, locale: Option<&str>) -> Result<Vec<(String, Value)>> {
let dir = self.collection_dir(collection, locale);
if !dir.exists() {
return Ok(Vec::new());
}
let mut entries = Vec::new();
let mut read_dir = fs::read_dir(&dir)
.await
.with_context(|| format!("Failed to read directory: {}", dir.display()))?;
let mut dir_entries = Vec::new();
while let Some(entry) = read_dir.next_entry().await? {
let path = entry.path();
if path
.extension()
.map(|ext| ext == "json5" || ext == "json")
.unwrap_or(false)
{
dir_entries.push(path);
}
}
dir_entries.sort();
for path in dir_entries {
let slug = path
.file_stem()
.and_then(|s| s.to_str())
.unwrap_or_default()
.to_string();
match self.read_file(&path).await {
Ok(mut value) => {
if let Some(obj) = value.as_object_mut() {
obj.insert("_slug".to_string(), Value::String(slug.clone()));
}
entries.push((slug, value));
}
Err(e) => {
tracing::warn!("Skipping {}: {}", path.display(), e);
}
}
}
Ok(entries)
}
/// Get a single entry by slug (optionally for a locale).
async fn get_impl(&self, collection: &str, slug: &str, locale: Option<&str>) -> Result<Option<Value>> {
let path = self.entry_path(collection, slug, locale);
if !path.exists() {
return Ok(None);
}
let mut value = self.read_file(&path).await?;
if let Some(obj) = value.as_object_mut() {
obj.insert("_slug".to_string(), Value::String(slug.to_string()));
}
Ok(Some(value))
}
/// Create a new entry (optionally under a locale path).
async fn create_impl(&self, collection: &str, slug: &str, data: &Value, locale: Option<&str>) -> Result<()> {
let dir = self.collection_dir(collection, locale);
if !dir.exists() {
fs::create_dir_all(&dir)
.await
.with_context(|| format!("Failed to create directory: {}", dir.display()))?;
}
let path = self.entry_path(collection, slug, locale);
if path.exists() {
anyhow::bail!(
"Entry '{}' already exists in collection '{}'",
slug,
collection
);
}
self.write_file(&path, data).await
}
/// Update an existing entry (optionally under a locale path).
async fn update_impl(&self, collection: &str, slug: &str, data: &Value, locale: Option<&str>) -> Result<()> {
let path = self.entry_path(collection, slug, locale);
if !path.exists() {
anyhow::bail!(
"Entry '{}' not found in collection '{}'",
slug,
collection
);
}
self.write_file(&path, data).await
}
/// Delete an entry (optionally under a locale path).
async fn delete_impl(&self, collection: &str, slug: &str, locale: Option<&str>) -> Result<()> {
let path = self.entry_path(collection, slug, locale);
if !path.exists() {
anyhow::bail!(
"Entry '{}' not found in collection '{}'",
slug,
collection
);
}
fs::remove_file(&path)
.await
.with_context(|| format!("Failed to delete {}", path.display()))?;
Ok(())
}
async fn read_file(&self, path: &Path) -> Result<Value> {
let content = fs::read_to_string(path)
.await
.with_context(|| format!("Failed to read {}", path.display()))?;
let value: Value = json5::from_str(&content)
.with_context(|| format!("Failed to parse {}", path.display()))?;
Ok(value)
}
async fn write_file(&self, path: &Path, data: &Value) -> Result<()> {
let content = serde_json::to_string_pretty(data)?;
let mut file = fs::File::create(path)
.await
.with_context(|| format!("Failed to create {}", path.display()))?;
file.write_all(content.as_bytes())
.await
.with_context(|| format!("Failed to write {}", path.display()))?;
Ok(())
}
}
#[async_trait]
impl ContentStore for FileStore {
async fn ensure_collection_dir(&self, collection: &str) -> Result<()> {
self.ensure_collection_dir_impl(collection).await
}
async fn list(&self, collection: &str, locale: Option<&str>) -> Result<Vec<(String, Value)>> {
self.list_impl(collection, locale).await
}
async fn get(&self, collection: &str, slug: &str, locale: Option<&str>) -> Result<Option<Value>> {
self.get_impl(collection, slug, locale).await
}
async fn create(&self, collection: &str, slug: &str, data: &Value, locale: Option<&str>) -> Result<()> {
self.create_impl(collection, slug, data, locale).await
}
async fn update(&self, collection: &str, slug: &str, data: &Value, locale: Option<&str>) -> Result<()> {
self.update_impl(collection, slug, data, locale).await
}
async fn delete(&self, collection: &str, slug: &str, locale: Option<&str>) -> Result<()> {
self.delete_impl(collection, slug, locale).await
}
}

7
src/store/mod.rs Normal file
View File

@@ -0,0 +1,7 @@
pub mod filesystem;
pub mod query;
pub mod slug;
pub mod sqlite;
pub mod store;
pub use store::ContentStore;

228
src/store/query.rs Normal file
View File

@@ -0,0 +1,228 @@
use std::collections::HashMap;
use serde::Serialize;
use serde_json::Value;
/// Filter operation for a field.
#[derive(Debug, Clone)]
pub enum FilterOp {
/// Exact match (default)
Exact(String),
/// String or array contains
Contains(String),
/// String starts with
Prefix(String),
/// Numeric or datetime >=
Min(String),
/// Numeric or datetime <=
Max(String),
}
/// Parsed query parameters for list endpoints.
///
/// Special parameters (prefixed with `_`):
/// - `_sort`, `_order`, `_page`, `_per_page`
///
/// Field filters support suffixes: `field`, `field_prefix`, `field_contains`, `field_min`, `field_max`.
pub struct QueryParams {
pub sort: Option<String>,
pub order: Option<String>,
pub page: Option<usize>,
pub per_page: Option<usize>,
/// (field_name, filter_op)
pub filters: Vec<(String, FilterOp)>,
}
impl QueryParams {
/// Parse query parameters. Extracts system params and builds list of (field, FilterOp).
pub fn from_map(mut map: HashMap<String, String>) -> Self {
let sort = map.remove("_sort");
let order = map.remove("_order");
let page = map.remove("_page").and_then(|v| v.parse().ok());
let per_page = map.remove("_per_page").and_then(|v| v.parse().ok());
map.retain(|k, _| !k.starts_with('_'));
let mut filters = Vec::new();
for (key, value) in map {
let (field, op) = if key.ends_with("_prefix") {
let f = key.strip_suffix("_prefix").unwrap_or(&key).to_string();
(f, FilterOp::Prefix(value))
} else if key.ends_with("_contains") {
let f = key.strip_suffix("_contains").unwrap_or(&key).to_string();
(f, FilterOp::Contains(value))
} else if key.ends_with("_min") {
let f = key.strip_suffix("_min").unwrap_or(&key).to_string();
(f, FilterOp::Min(value))
} else if key.ends_with("_max") {
let f = key.strip_suffix("_max").unwrap_or(&key).to_string();
(f, FilterOp::Max(value))
} else {
(key, FilterOp::Exact(value))
};
filters.push((field, op));
}
Self {
sort,
order,
page,
per_page,
filters,
}
}
/// Apply filters, sorting, and pagination to a list of entries.
pub fn apply(&self, mut entries: Vec<(String, Value)>) -> QueryResult {
entries.retain(|(_, value)| self.matches_filters(value));
let total = entries.len();
if let Some(ref sort_field) = self.sort {
let desc = self.order.as_deref() == Some("desc");
entries.sort_by(|(_, a), (_, b)| {
let va = a.get(sort_field);
let vb = b.get(sort_field);
let cmp = compare_values(va, vb);
if desc {
cmp.reverse()
} else {
cmp
}
});
}
let per_page = self.per_page.unwrap_or(50);
let page = self.page.unwrap_or(1).max(1);
let offset = (page - 1) * per_page;
let total_pages = if total == 0 {
0
} else {
(total + per_page - 1) / per_page
};
let items: Vec<Value> = entries
.into_iter()
.skip(offset)
.take(per_page)
.map(|(slug, mut v)| {
if let Some(obj) = v.as_object_mut() {
obj.insert("_slug".to_string(), Value::String(slug));
}
v
})
.collect();
QueryResult {
items,
total,
page,
per_page,
total_pages,
}
}
fn matches_filters(&self, value: &Value) -> bool {
for (field_name, op) in &self.filters {
let actual = value.get(field_name);
if !filter_matches(actual, op) {
return false;
}
}
true
}
}
fn filter_matches(actual: Option<&Value>, op: &FilterOp) -> bool {
match op {
FilterOp::Exact(expected) => value_matches_exact(actual, expected),
FilterOp::Prefix(prefix) => value_matches_prefix(actual, prefix),
FilterOp::Contains(sub) => value_matches_contains(actual, sub),
FilterOp::Min(min_val) => value_matches_min(actual, min_val),
FilterOp::Max(max_val) => value_matches_max(actual, max_val),
}
}
fn value_matches_exact(actual: Option<&Value>, expected: &str) -> bool {
match actual {
None => expected == "null",
Some(Value::String(s)) => s == expected,
Some(Value::Number(n)) => n.to_string() == expected,
Some(Value::Bool(b)) => (expected == "true" && *b) || (expected == "false" && !*b),
Some(Value::Array(arr)) => arr.iter().any(|item| value_matches_exact(Some(item), expected)),
Some(Value::Null) => expected == "null",
_ => false,
}
}
fn value_matches_prefix(actual: Option<&Value>, prefix: &str) -> bool {
match actual {
Some(Value::String(s)) => s.starts_with(prefix),
Some(Value::Array(arr)) => arr
.iter()
.any(|item| value_matches_prefix(Some(item), prefix)),
_ => false,
}
}
fn value_matches_contains(actual: Option<&Value>, sub: &str) -> bool {
match actual {
Some(Value::String(s)) => s.contains(sub),
Some(Value::Array(arr)) => arr.iter().any(|item| value_matches_exact(Some(item), sub)),
_ => false,
}
}
fn value_matches_min(actual: Option<&Value>, min_str: &str) -> bool {
match actual {
Some(Value::Number(n)) => {
let v = n.as_f64().unwrap_or(f64::MIN);
let min_v = min_str.parse::<f64>().unwrap_or(f64::MIN);
v >= min_v
}
Some(Value::String(s)) => {
// String comparison (e.g. for ISO dates)
s.as_str() >= min_str
}
_ => false,
}
}
fn value_matches_max(actual: Option<&Value>, max_str: &str) -> bool {
match actual {
Some(Value::Number(n)) => {
let v = n.as_f64().unwrap_or(f64::MAX);
let max_v = max_str.parse::<f64>().unwrap_or(f64::MAX);
v <= max_v
}
Some(Value::String(s)) => s.as_str() <= max_str,
_ => false,
}
}
/// Compare two optional JSON values for sorting.
fn compare_values(a: Option<&Value>, b: Option<&Value>) -> std::cmp::Ordering {
match (a, b) {
(None, None) => std::cmp::Ordering::Equal,
(None, Some(_)) => std::cmp::Ordering::Less,
(Some(_), None) => std::cmp::Ordering::Greater,
(Some(a), Some(b)) => match (a, b) {
(Value::String(a), Value::String(b)) => a.cmp(b),
(Value::Number(a), Value::Number(b)) => a
.as_f64()
.unwrap_or(0.0)
.partial_cmp(&b.as_f64().unwrap_or(0.0))
.unwrap_or(std::cmp::Ordering::Equal),
(Value::Bool(a), Value::Bool(b)) => a.cmp(b),
_ => std::cmp::Ordering::Equal,
},
}
}
/// Result of a paginated query.
#[derive(Debug, Serialize)]
pub struct QueryResult {
pub items: Vec<Value>,
pub total: usize,
pub page: usize,
pub per_page: usize,
pub total_pages: usize,
}

135
src/store/slug.rs Normal file
View File

@@ -0,0 +1,135 @@
//! Slug normalization for safe filenames and URLs.
//! Only allows [a-z0-9-]. Rejects empty and invalid slugs.
/// Normalize a slug for use as filename and API identifier.
/// - Lowercase
/// - Replace spaces and repeated hyphens with single `-`
/// - Replace common umlauts (ä->ae, ö->oe, ü->ue, ß->ss)
/// - Remove any character not in [a-z0-9-]
/// - Trim leading/trailing hyphens
pub fn normalize_slug(s: &str) -> String {
let mut out = String::with_capacity(s.len());
let mut prev_hyphen = false;
for c in s.chars() {
let normalized = match c {
'a'..='z' | '0'..='9' => {
prev_hyphen = false;
c
}
'A'..='Z' => {
prev_hyphen = false;
c.to_lowercase().next().unwrap_or(c)
}
' ' | '\t' | '_' | '.' => {
if !prev_hyphen {
prev_hyphen = true;
'-'
} else {
continue;
}
}
'ä' | 'Ä' => {
prev_hyphen = false;
out.push('a');
'e'
}
'ö' | 'Ö' => {
prev_hyphen = false;
out.push('o');
'e'
}
'ü' | 'Ü' => {
prev_hyphen = false;
out.push('u');
'e'
}
'ß' => {
prev_hyphen = false;
out.push('s');
's'
}
'-' => {
if !prev_hyphen {
prev_hyphen = true;
'-'
} else {
continue;
}
}
_ => continue,
};
out.push(normalized);
}
// Trim leading/trailing hyphens
let trimmed = out.trim_matches('-');
// Collapse any remaining repeated hyphens (e.g. from multiple consecutive replacements)
let mut result = String::with_capacity(trimmed.len());
let mut prev = false;
for c in trimmed.chars() {
if c == '-' {
if !prev {
result.push(c);
prev = true;
}
} else {
result.push(c);
prev = false;
}
}
result
}
/// Validate that a slug is acceptable (no path traversal, not empty after normalize).
pub fn validate_slug(s: &str) -> Result<(), String> {
if s.contains('/') || s.contains('\\') || s.contains("..") {
return Err("Slug must not contain /, \\ or ..".to_string());
}
let n = normalize_slug(s);
if n.is_empty() {
return Err("Slug is empty after normalization".to_string());
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn normalize_lowercase_and_hyphens() {
assert_eq!(normalize_slug("Hello World"), "hello-world");
assert_eq!(normalize_slug("foo_bar"), "foo-bar");
assert_eq!(normalize_slug("a--b"), "a-b");
}
#[test]
fn normalize_umlauts() {
assert_eq!(normalize_slug("Grüße"), "gruesse");
assert_eq!(normalize_slug("Straße"), "strasse");
}
#[test]
fn normalize_strips_invalid() {
assert_eq!(normalize_slug("foo!bar"), "foobar");
assert_eq!(normalize_slug(" trim "), "trim");
}
#[test]
fn validate_accepts_ok() {
assert!(validate_slug("hello").is_ok());
assert!(validate_slug("hello-world").is_ok());
}
#[test]
fn validate_rejects_path_traversal() {
assert!(validate_slug("..").is_err());
assert!(validate_slug("foo/bar").is_err());
assert!(validate_slug("foo\\bar").is_err());
}
#[test]
fn validate_rejects_empty_after_normalize() {
assert!(validate_slug("!!!").is_err());
assert!(validate_slug("").is_err());
}
}

165
src/store/sqlite.rs Normal file
View File

@@ -0,0 +1,165 @@
//! SQLite-backed content store. One table stores all collections; JSON in `data`.
use std::str::FromStr;
use anyhow::{Context, Result};
use async_trait::async_trait;
use serde_json::Value;
use sqlx::sqlite::{SqliteConnectOptions, SqlitePool, SqlitePoolOptions};
use sqlx::Row;
use super::store::ContentStore;
/// SQLite content store. Content is stored as JSON in a single table.
pub struct SqliteStore {
pool: SqlitePool,
}
impl SqliteStore {
/// Create pool and run migrations (create table if not exists).
/// Creates the database file if missing (sqlx default is not to create).
pub async fn new(database_url: &str) -> Result<Self> {
let opts = SqliteConnectOptions::from_str(database_url)
.context("Invalid SQLite URL")?
.create_if_missing(true);
let pool = SqlitePoolOptions::new()
.connect_with(opts)
.await
.context("Failed to connect to SQLite")?;
sqlx::query(
r#"
CREATE TABLE IF NOT EXISTS content_entries (
collection TEXT NOT NULL,
slug TEXT NOT NULL,
data TEXT NOT NULL,
PRIMARY KEY (collection, slug)
)
"#,
)
.execute(&pool)
.await
.context("Failed to create content_entries table")?;
Ok(Self { pool })
}
}
#[async_trait]
impl ContentStore for SqliteStore {
async fn ensure_collection_dir(&self, _collection: &str) -> Result<()> {
// Single table for all collections; no per-collection setup.
Ok(())
}
async fn list(&self, collection: &str, _locale: Option<&str>) -> Result<Vec<(String, Value)>> {
let rows = sqlx::query(
"SELECT slug, data FROM content_entries WHERE collection = ? ORDER BY slug",
)
.bind(collection)
.fetch_all(&self.pool)
.await
.context("Failed to list entries")?;
let mut entries = Vec::with_capacity(rows.len());
for row in rows {
let slug: String = row.try_get("slug").context("slug")?;
let data_str: String = row.try_get("data").context("data")?;
let mut value: Value =
serde_json::from_str(&data_str).context("Invalid JSON in content_entries")?;
if let Some(obj) = value.as_object_mut() {
obj.insert("_slug".to_string(), Value::String(slug.clone()));
}
entries.push((slug, value));
}
Ok(entries)
}
async fn get(&self, collection: &str, slug: &str, _locale: Option<&str>) -> Result<Option<Value>> {
let row = sqlx::query("SELECT data FROM content_entries WHERE collection = ? AND slug = ?")
.bind(collection)
.bind(slug)
.fetch_optional(&self.pool)
.await
.context("Failed to get entry")?;
let Some(row) = row else {
return Ok(None);
};
let data_str: String = row.try_get("data").context("data")?;
let mut value: Value =
serde_json::from_str(&data_str).context("Invalid JSON in content_entries")?;
if let Some(obj) = value.as_object_mut() {
obj.insert("_slug".to_string(), Value::String(slug.to_string()));
}
Ok(Some(value))
}
async fn create(&self, collection: &str, slug: &str, data: &Value, _locale: Option<&str>) -> Result<()> {
let data_str = serde_json::to_string(data).context("Serialize content")?;
let res = sqlx::query(
"INSERT INTO content_entries (collection, slug, data) VALUES (?, ?, ?)",
)
.bind(collection)
.bind(slug)
.bind(&data_str)
.execute(&self.pool)
.await;
match res {
Ok(_) => Ok(()),
Err(e) => {
if let Some(db_err) = e.as_database_error() {
if db_err.is_unique_violation() {
anyhow::bail!(
"Entry '{}' already exists in collection '{}'",
slug,
collection
);
}
}
Err(e.into())
}
}
}
async fn update(&self, collection: &str, slug: &str, data: &Value, _locale: Option<&str>) -> Result<()> {
let data_str = serde_json::to_string(data).context("Serialize content")?;
let res = sqlx::query(
"UPDATE content_entries SET data = ? WHERE collection = ? AND slug = ?",
)
.bind(&data_str)
.bind(collection)
.bind(slug)
.execute(&self.pool)
.await
.context("Failed to update entry")?;
if res.rows_affected() == 0 {
anyhow::bail!(
"Entry '{}' not found in collection '{}'",
slug,
collection
);
}
Ok(())
}
async fn delete(&self, collection: &str, slug: &str, _locale: Option<&str>) -> Result<()> {
let res = sqlx::query("DELETE FROM content_entries WHERE collection = ? AND slug = ?")
.bind(collection)
.bind(slug)
.execute(&self.pool)
.await
.context("Failed to delete entry")?;
if res.rows_affected() == 0 {
anyhow::bail!(
"Entry '{}' not found in collection '{}'",
slug,
collection
);
}
Ok(())
}
}

33
src/store/store.rs Normal file
View File

@@ -0,0 +1,33 @@
//! Abstract storage layer for content. Implementations: file-based (FileStore) and SQLite (SqliteStore).
use anyhow::Result;
use async_trait::async_trait;
use serde_json::Value;
/// Backend-agnostic content store. Implemented by [super::filesystem::FileStore]
/// and [super::sqlite::SqliteStore]. Selected via `RUSTYCMS_STORE=file|sqlite`.
///
/// When `locale` is `Some`, the store may return locale-specific content (e.g. `content/{locale}/{collection}/`).
/// When `None`, behaviour is unchanged (single locale or legacy path).
#[async_trait]
pub trait ContentStore: Send + Sync {
/// Ensure the collection exists (e.g. create directory or table). No-op for DB if schema is generic.
async fn ensure_collection_dir(&self, collection: &str) -> Result<()>;
/// List all entries in a collection. Returns (slug, content) with `_slug` already set.
/// `locale`: when set (and store supports it), list from that locale's content.
async fn list(&self, collection: &str, locale: Option<&str>) -> Result<Vec<(String, Value)>>;
/// Get one entry by slug. Returns `None` if not found. Injects `_slug` into the value.
/// `locale`: when set, fetch from that locale's content.
async fn get(&self, collection: &str, slug: &str, locale: Option<&str>) -> Result<Option<Value>>;
/// Create a new entry. Errors if slug already exists.
async fn create(&self, collection: &str, slug: &str, data: &Value, locale: Option<&str>) -> Result<()>;
/// Update an existing entry. Errors if slug does not exist.
async fn update(&self, collection: &str, slug: &str, data: &Value, locale: Option<&str>) -> Result<()>;
/// Delete an entry. Errors if slug does not exist.
async fn delete(&self, collection: &str, slug: &str, locale: Option<&str>) -> Result<()>;
}